diff options
Diffstat (limited to 'subversion/tests')
161 files changed, 36447 insertions, 12010 deletions
diff --git a/subversion/tests/cmdline/README b/subversion/tests/cmdline/README index 9674cac..65dcc28 100644 --- a/subversion/tests/cmdline/README +++ b/subversion/tests/cmdline/README @@ -12,7 +12,7 @@ command-line client. It has no access to code internals; it never looks inside the .svn/ directory; it only performs actions that a human user would do. -These tests require Python 2.5 or later. +These tests require Python 2.7 or later. [ For more general information on Subversion's testing system, please read the README in subversion/tests/. ] @@ -83,6 +83,133 @@ paths adjusted appropriately: Require valid-user </Location> + <Location /authz-test-work/anon> + DAV svn + SVNParentPath /home/yourusernamehere/projects/svn/subversion/tests/cmdline/svn-test-work/local_tmp + AuthzSVNAccessFile /home/yourusernamehere/projects/svn/subversion/tests/cmdline/svn-test-work/authz + SVNListParentPath On + # This may seem unnecessary but granting access to everyone here is necessary + # to exercise a bug with httpd 2.3.x+. The "Require all granted" syntax is + # new to 2.3.x+ which we can detect with the mod_authz_core.c module + # signature. Use the "Allow from all" syntax with older versions for symmetry. + <IfModule mod_authz_core.c> + Require all granted + </IfModule> + <IfModule !mod_authz_core.c> + Allow from all + </IfMOdule> + </Location> + <Location /authz-test-work/mixed> + DAV svn + SVNParentPath /home/yourusernamehere/projects/svn/subversion/tests/cmdline/svn-test-work/local_tmp + AuthzSVNAccessFile /home/yourusernamehere/projects/svn/subversion/tests/cmdline/svn-test-work/authz + SVNListParentPath On + AuthType Basic + AuthName "Subversion Repository" + AuthUserFile /usr/local/apache2/conf/users + Require valid-user + Satisfy Any + </Location> + <Location /authz-test-work/mixed-noauthwhenanon> + DAV svn + SVNParentPath /home/yourusernamehere/projects/svn/subversion/tests/cmdline/svn-test-work/local_tmp + AuthzSVNAccessFile /home/yourusernamehere/projects/svn/subversion/tests/cmdline/svn-test-work/authz + SVNListParentPath On + AuthType Basic + AuthName "Subversion Repository" + AuthUserFile /usr/local/apache2/conf/users + Require valid-user + AuthzSVNNoAuthWhenAnonymousAllowed On + </Location> + <Location /authz-test-work/authn> + DAV svn + SVNParentPath /home/yourusernamehere/projects/svn/subversion/tests/cmdline/svn-test-work/local_tmp + AuthzSVNAccessFile /home/yourusernamehere/projects/svn/subversion/tests/cmdline/svn-test-work/authz + SVNListParentPath On + AuthType Basic + AuthName "Subversion Repository" + AuthUserFile /usr/local/apache2/conf/users + Require valid-user + </Location> + <Location /authz-test-work/authn-anonoff> + DAV svn + SVNParentPath /home/yourusernamehere/projects/svn/subversion/tests/cmdline/svn-test-work/local_tmp + AuthzSVNAccessFile /home/yourusernamehere/projects/svn/subversion/tests/cmdline/svn-test-work/authz + SVNListParentPath On + AuthType Basic + AuthName "Subversion Repository" + AuthUserFile /usr/local/apache2/conf/users + Require valid-user + AuthzSVNAnonymous Off + </Location> + <Location /authz-test-work/authn-lcuser> + DAV svn + SVNParentPath /home/yourusernamehere/projects/svn/subversion/tests/cmdline/svn-test-work/local_tmp + AuthzSVNAccessFile /home/yourusernamehere/projects/svn/subversion/tests/cmdline/svn-test-work/authz + SVNListParentPath On + AuthType Basic + AuthName "Subversion Repository" + AuthUserFile /usr/local/apache2/conf/users + Require valid-user + AuthzForceUsernameCase Lower + </Location> + <Location /authz-test-work/authn-lcuser> + DAV svn + SVNParentPath /home/yourusernamehere/projects/svn/subversion/tests/cmdline/svn-test-work/local_tmp + AuthzSVNAccessFile /home/yourusernamehere/projects/svn/subversion/tests/cmdline/svn-test-work/authz + SVNListParentPath On + AuthType Basic + AuthName "Subversion Repository" + AuthUserFile /usr/local/apache2/conf/users + Require valid-user + AuthzForceUsernameCase Lower + </Location> + <Location /authz-test-work/authn-group> + DAV svn + SVNParentPath /home/yourusernamehere/projects/svn/subversion/tests/cmdline/svn-test-work/local_tmp + AuthzSVNAccessFile /home/yourusernamehere/projects/svn/subversion/tests/cmdline/svn-test-work/authz + SVNListParentPath On + AuthType Basic + AuthName "Subversion Repository" + AuthUserFile /usr/local/apache2/conf/users + AuthGroupFile /usr/local/apache2/conf/groups + Require group random + AuthzSVNAuthoritative Off + </Location> + <IfModule mod_authz_core.c> + <Location /authz-test-work/sallrany> + DAV svn + SVNParentPath /home/yourusernamehere/projects/svn/subversion/tests/cmdline/svn-test-work/local_tmp + AuthzSVNAccessFile /home/yourusernamehere/projects/svn/subversion/tests/cmdline/svn-test-work/authz + SVNListParentPath On + AuthType Basic + AuthName "Subversion Repository" + AuthUserFile /usr/local/apache2/conf/users + AuthzSendForbiddenOnFailure On + Satisfy All + <RequireAny> + Require valid-user + Require expr req('ALLOW') == '1' + </RequireAny> + </Location> + <Location /authz-test-work/sallrall> + DAV svn + SVNParentPath /home/yourusernamehere/projects/svn/subversion/tests/cmdline/svn-test-work/local_tmp + AuthzSVNAccessFile /home/yourusernamehere/projects/svn/subversion/tests/cmdline/svn-test-work/authz + SVNListParentPath On + AuthType Basic + AuthName "Subversion Repository" + AuthUserFile /usr/local/apache2/conf/users + AuthzSendForbiddenOnFailure On + Satisfy All + <RequireAll> + Require valid-user + Require expr req('ALLOW') == '1' + </RequireAll> + </Location> + </IfModule> + + RedirectMatch permanent ^/svn-test-work/repositories/REDIRECT-PERM-(.*)$ /svn-test-work/repositories/$1 RedirectMatch ^/svn-test-work/repositories/REDIRECT-TEMP-(.*)$ /svn-test-work/repositories/$1 @@ -101,6 +228,15 @@ just drop the following 2-line snippet into the ---------------------------- jrandom:xCGl35kV9oWCY jconstant:xCGl35kV9oWCY +JRANDOM:xCGl35kV9oWCY +JCONSTANT:xCGl35kV9oWCY +---------------------------- + +and these lines into the +/usr/local/apache/conf/groups file: +---------------------------- +random: jrandom +constant: jconstant ---------------------------- Now, (re)start Apache and run the tests over mod_dav_svn. @@ -138,6 +274,8 @@ Note [1]: It would be quite too much to expect those password entries ---------------------------- jrandom:$apr1$3p1.....$FQW6RceW5QhJ2blWDQgKn0 jconstant:$apr1$jp1.....$Usrqji1c9H6AbOxOGAzzb0 + JRANDOM:$apr1$3p1.....$FQW6RceW5QhJ2blWDQgKn0 + JCONSTANT:$apr1$jp1.....$Usrqji1c9H6AbOxOGAzzb0 ---------------------------- @@ -151,7 +289,13 @@ it will run just one suite or just one test: $ ./davautocheck.sh basic $ ./davautocheck.sh basic 15 -It also respects some environment variables. +With '--no-tests' argument, it will start httpd but not run any tests. This is +useful for manual testing --- create repositories in +./svn-test-work/repositories/<$repo> and they will be accessible at +<URL>/svn-test-work/repositories/<$repo>. You can also run individual tests by passing the --url option to them, as described above. + +davautocheck.sh also respects some environment variables; see the comments at +the top of the script for details. Running over ra_svn ------------------- @@ -174,8 +318,8 @@ or if you're running an individual test, $ ./basic_tests.py --url=svn://localhost --enable-sasl 3 -Note that to do this you'll have to have a subversion.conf file in your -SASL lib dir (i.e. something like /usr/lib/sasl2/subversion.conf), it +Note that to do this you'll have to have a svn.conf file in your +SASL lib dir (i.e. something like /usr/lib/sasl2/svn.conf), it should contain something like: pwcheck_method: auxprop @@ -189,6 +333,16 @@ $ saslpasswd2 -c -u svntest jconstant As usual, both users should use the password 'rayjandom'. +To enable DUMP_LOAD_CROSS_CHECK to work a third user is required, + +$ saslpasswd2 -c -u svntest __dumpster__ + +with password '__loadster__'. + +The user running the tests will need read access to the sasl database +and on some systems this can be arranged by adding the user to the sasl +group. + There are 'make svnserveautocheck' and ./svnserveautocheck.sh commands, analogous to davautocheck.sh documented above. @@ -245,9 +399,25 @@ Directory Contents /verify.py: Verifies output from Subversion. - /entry.py: Parse an `entries' file (### not used yet) + /testcase.py: Control of test case execution - contains + decorators for expected failures and conditionally + executed tests. + + /sandbox.py: Tools for manipulating a test's working area + ("a sandbox"), those are handy for most simple + actions a test might want to perform on a wc. + /objects.py: Objects that keep track of state during a test. + (not directly used by the test scripts.) + /mergetrees.py: Routines that create merge scenarios. + + /factory.py: Automatically generate a (near-)complete new + cmdline test from a series of shell commands. + + /error.py: Error codes as constants, for convenience. + (auto-generated by tools/dev/gen-py-error.py) + What the Python Tests are Doing =============================== @@ -341,7 +511,7 @@ the bottom of the file. To avoid renumbering of existing tests, you should add new tests to the end of the list. -Testing Compatability With Previous Release +Testing Compatibility With Previous Release =========================================== You can run the Python test suite against older installed versions of diff --git a/subversion/tests/cmdline/atomic-ra-revprop-change.c b/subversion/tests/cmdline/atomic-ra-revprop-change.c index 5ef443f..b2fd956 100644 --- a/subversion/tests/cmdline/atomic-ra-revprop-change.c +++ b/subversion/tests/cmdline/atomic-ra-revprop-change.c @@ -118,13 +118,13 @@ change_rev_prop(const char *url, } else if (! want_error && ! err) /* Expectation was matched. Get out. */ - return SVN_NO_ERROR; + return SVN_NO_ERROR; else if (want_error && ! err) return svn_error_create(SVN_ERR_TEST_FAILED, NULL, "An error was expected but not seen"); else - /* A real (non-SVN_ERR_FS_PROP_BASEVALUE_MISMATCH) error. */ - return svn_error_trace(err); + /* A real (non-SVN_ERR_FS_PROP_BASEVALUE_MISMATCH) error. */ + return svn_error_trace(err); } else /* Running under --server-minor-version? */ diff --git a/subversion/tests/cmdline/authz_tests.py b/subversion/tests/cmdline/authz_tests.py index 20c833d..5e1952e 100755 --- a/subversion/tests/cmdline/authz_tests.py +++ b/subversion/tests/cmdline/authz_tests.py @@ -45,6 +45,7 @@ XFail = svntest.testcase.XFail_deco Issues = svntest.testcase.Issues_deco Issue = svntest.testcase.Issue_deco Wimp = svntest.testcase.Wimp_deco +SkipDumpLoadCrossCheck = svntest.testcase.SkipDumpLoadCrossCheck_deco ###################################################################### # Tests @@ -82,7 +83,7 @@ def authz_open_root(sbox): svntest.actions.run_and_verify_commit(wc_dir, expected_output, None, - None, + [], mu_path) #---------------------------------------------------------------------- @@ -118,11 +119,10 @@ def authz_open_directory(sbox): # Commit the working copy. svntest.actions.run_and_verify_commit(wc_dir, expected_output, - None, - None, - wc_dir) + None) @Skip(svntest.main.is_ra_type_file) +@SkipDumpLoadCrossCheck() def broken_authz_file(sbox): "broken authz files cause errors" @@ -169,12 +169,12 @@ def authz_read_access(sbox): fws_empty_folder_url = fws_url + '/empty folder' if sbox.repo_url.startswith("http"): - expected_err = ".*[Ff]orbidden.*" + expected_err = ".*svn: E175013: .*[Ff]orbidden.*" else: expected_err = ".*svn: E170001: Authorization failed.*" # create some folders with spaces in their names - svntest.actions.run_and_verify_svn(None, None, [], 'mkdir', '-m', 'logmsg', + svntest.actions.run_and_verify_svn(None, [], 'mkdir', '-m', 'logmsg', fws_url, fws_empty_folder_url) write_restrictive_svnserve_conf(sbox.repo_dir) @@ -191,73 +191,65 @@ def authz_read_access(sbox): (svntest.main.wc_author + " = r")}) # read a remote file - svntest.actions.run_and_verify_svn(None, ["This is the file 'iota'.\n"], + svntest.actions.run_and_verify_svn(["This is the file 'iota'.\n"], [], 'cat', iota_url) # read a remote file, readably by user specific exception - svntest.actions.run_and_verify_svn(None, ["This is the file 'chi'.\n"], + svntest.actions.run_and_verify_svn(["This is the file 'chi'.\n"], [], 'cat', chi_url) # read a remote file, unreadable: should fail - svntest.actions.run_and_verify_svn(None, - None, expected_err, + svntest.actions.run_and_verify_svn(None, expected_err, 'cat', lambda_url) # read a remote file, unreadable through recursion: should fail - svntest.actions.run_and_verify_svn(None, - None, expected_err, + svntest.actions.run_and_verify_svn(None, expected_err, 'cat', alpha_url) # read a remote file, user specific authorization is ignored because * = rw - svntest.actions.run_and_verify_svn(None, ["This is the file 'pi'.\n"], + svntest.actions.run_and_verify_svn(["This is the file 'pi'.\n"], [], 'cat', pi_url) # open a remote folder(ls) - svntest.actions.run_and_verify_svn("ls remote root folder", - ["A/\n", "iota\n"], + svntest.actions.run_and_verify_svn(["A/\n", "iota\n"], [], 'ls', root_url) # open a remote folder(ls), unreadable: should fail - svntest.actions.run_and_verify_svn(None, - None, svntest.verify.AnyOutput, 'ls', + svntest.actions.run_and_verify_svn(None, svntest.verify.AnyOutput, 'ls', B_url) # open a remote folder(ls) with spaces, should succeed - svntest.actions.run_and_verify_svn(None, - None, [], 'ls', + svntest.actions.run_and_verify_svn(None, [], 'ls', fws_empty_folder_url) # open a remote folder(ls), unreadable through recursion: should fail - svntest.actions.run_and_verify_svn(None, - None, expected_err, + svntest.actions.run_and_verify_svn(None, expected_err, 'ls', E_url) # copy a remote file - svntest.actions.run_and_verify_svn(None, None, [], 'cp', + svntest.actions.run_and_verify_svn(None, [], 'cp', iota_url, D_url, '-m', 'logmsg') # copy a remote file, source is unreadable: should fail - svntest.actions.run_and_verify_svn(None, - None, expected_err, + svntest.actions.run_and_verify_svn(None, expected_err, 'cp', '-m', 'logmsg', lambda_url, D_url) # copy a remote folder - svntest.actions.run_and_verify_svn(None, None, [], 'cp', + svntest.actions.run_and_verify_svn(None, [], 'cp', C_url, D_url, '-m', 'logmsg') # copy a remote folder, source is unreadable: should fail - svntest.actions.run_and_verify_svn(None, - None, expected_err, + svntest.actions.run_and_verify_svn(None, expected_err, 'cp', '-m', 'logmsg', E_url, D_url) @@ -268,15 +260,13 @@ def authz_read_access(sbox): # into two operations, a committed copy followed by a committed # deletion. But the editor drive required to do these atomically # today is prohibitive. - svntest.actions.run_and_verify_svn(None, - None, expected_err, + svntest.actions.run_and_verify_svn(None, expected_err, 'mv', '-m', 'logmsg', alpha_url, F_alpha_url) ## copy a remote file, source/target ancestor is readonly ## we fail here due to issue #3242. - #svntest.actions.run_and_verify_svn(None, - # None, [], + #svntest.actions.run_and_verify_svn(# None, [], # 'cp', '-m', 'logmsg', # alpha_url, F_alpha_url) @@ -291,7 +281,7 @@ def authz_write_access(sbox): write_restrictive_svnserve_conf(sbox.repo_dir) if sbox.repo_url.startswith('http'): - expected_err = ".*[Ff]orbidden.*" + expected_err = ".*svn: E175013: .*[Ff]orbidden.*" else: expected_err = ".*svn: E220004: Access denied.*" @@ -310,64 +300,55 @@ def authz_write_access(sbox): D_url = A_url + '/D' # copy a remote file, target is readonly: should fail - svntest.actions.run_and_verify_svn(None, - None, expected_err, + svntest.actions.run_and_verify_svn(None, expected_err, 'cp', '-m', 'logmsg', lambda_url, D_url) # copy a remote folder, target is readonly: should fail - svntest.actions.run_and_verify_svn(None, - None, expected_err, + svntest.actions.run_and_verify_svn(None, expected_err, 'cp', '-m', 'logmsg', E_url, D_url) # delete a file, target is readonly: should fail - svntest.actions.run_and_verify_svn(None, - None, expected_err, + svntest.actions.run_and_verify_svn(None, expected_err, 'rm', '-m', 'logmsg', iota_url) # delete a folder, target is readonly: should fail - svntest.actions.run_and_verify_svn(None, - None, expected_err, + svntest.actions.run_and_verify_svn(None, expected_err, 'rm', '-m', 'logmsg', D_url) # create a folder, target is readonly: should fail - svntest.actions.run_and_verify_svn(None, - None, expected_err, + svntest.actions.run_and_verify_svn(None, expected_err, 'mkdir', '-m', 'logmsg', A_url+'/newfolder') # move a remote file, source is readonly: should fail - svntest.actions.run_and_verify_svn(None, - None, expected_err, + svntest.actions.run_and_verify_svn(None, expected_err, 'mv', '-m', 'logmsg', mu_url, C_url) # move a remote folder, source is readonly: should fail - svntest.actions.run_and_verify_svn(None, - None, expected_err, + svntest.actions.run_and_verify_svn(None, expected_err, 'mv', '-m', 'logmsg', D_url, C_url) # move a remote file, target is readonly: should fail - svntest.actions.run_and_verify_svn(None, - None, expected_err, + svntest.actions.run_and_verify_svn(None, expected_err, 'mv', '-m', 'logmsg', lambda_url, D_url) # move a remote folder, target is readonly: should fail - svntest.actions.run_and_verify_svn(None, - None, expected_err, + svntest.actions.run_and_verify_svn(None, expected_err, 'mv', '-m', 'logmsg', B_url, D_url) @@ -387,14 +368,14 @@ def authz_checkout_test(sbox): # write an authz file with *= on / if sbox.repo_url.startswith('http'): - expected_err = ".*[Ff]orbidden.*" + expected_err = ".*svn: E175013: .*[Ff]orbidden.*" else: expected_err = ".*svn: E170001: Authorization failed.*" write_authz_file(sbox, { "/": "* ="}) # checkout a working copy, should fail - svntest.actions.run_and_verify_svn(None, None, expected_err, + svntest.actions.run_and_verify_svn(None, expected_err, 'co', sbox.repo_url, local_dir) # 2nd part: now enable read access @@ -409,9 +390,9 @@ def authz_checkout_test(sbox): expected_wc = svntest.main.greek_state svntest.actions.run_and_verify_checkout(sbox.repo_url, - local_dir, - expected_output, - expected_wc) + local_dir, + expected_output, + expected_wc) @Skip(svntest.main.is_ra_type_file) def authz_checkout_and_update_test(sbox): @@ -473,9 +454,7 @@ def authz_checkout_and_update_test(sbox): expected_output, expected_wc, expected_status, - None, - None, None, - None, None, 1) + [], True) @Skip(svntest.main.is_ra_type_file) def authz_partial_export_test(sbox): @@ -524,7 +503,7 @@ def authz_log_and_tracing_test(sbox): # write an authz file with *=rw on / if sbox.repo_url.startswith('http'): - expected_err = ".*[Ff]orbidden.*" + expected_err = ".*svn: E175013: .*[Ff]orbidden.*" else: expected_err = ".*svn: E170001: Authorization failed.*" @@ -538,24 +517,24 @@ def authz_log_and_tracing_test(sbox): rho_path = os.path.join(wc_dir, 'A', 'D', 'G', 'rho') svntest.main.file_append(rho_path, 'new appended text for rho') - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', 'add file rho', sbox.wc_dir) svntest.main.file_append(rho_path, 'extra change in rho') - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', 'changed file rho', sbox.wc_dir) # copy a remote file - svntest.actions.run_and_verify_svn(None, None, [], 'cp', + svntest.actions.run_and_verify_svn(None, [], 'cp', rho_path, D_url, '-m', 'copy rho to readable area') # now disable read access on the first version of rho, keep the copy in # /A/D readable. if sbox.repo_url.startswith('http'): - expected_err = ".*[Ff]orbidden.*" + expected_err = ".*svn: E175013: .*[Ff]orbidden.*" else: expected_err = ".*svn: E170001: Authorization failed.*" @@ -568,8 +547,7 @@ def authz_log_and_tracing_test(sbox): # changed file in this rev. is not readable anymore, so author and date # should be hidden, like this: # r2 | (no author) | (no date) | 1 line - svntest.actions.run_and_verify_svn(None, - ".*(no author).*(no date).*|-+\n|\n", [], + svntest.actions.run_and_verify_svn(".*(no author).*(no date).*|-+\n|\n", [], 'log', '-r', '2', '--limit', '1', wc_dir) @@ -581,20 +559,19 @@ def authz_log_and_tracing_test(sbox): # if we do the same thing directly on the unreadable file, we get: # svn: Item is not readable - svntest.actions.run_and_verify_svn(None, None, expected_err2, + svntest.actions.run_and_verify_svn(None, expected_err2, 'log', rho_path) # while the HEAD rev of the copy is readable in /A/D, its parent in # /A/D/G is not, so don't spill any info there either. - svntest.actions.run_and_verify_svn(None, - ".*(no author).*(no date).*|-+\n|\n", [], + svntest.actions.run_and_verify_svn(".*(no author).*(no date).*|-+\n|\n", [], 'log', '-r', '2', '--limit', '1', D_url) # Test that only author/date are shown for partially visible revisions. svntest.actions.enable_revprop_changes(sbox.repo_dir) write_authz_file(sbox, { "/": "* = rw"}) svntest.actions.run_and_verify_svn( - None, None, [], # message, expected_stdout, expected_stderr + None, [], # expected_stdout, expected_stderr 'ps', '--revprop', '-r1', 'foobar', 'foo bar', sbox.repo_url) svntest.actions.run_and_verify_log_xml( expected_revprops=[{'svn:author': svntest.main.wc_author, 'svn:date': '', @@ -610,7 +587,10 @@ def authz_log_and_tracing_test(sbox): ## cat # now see if we can look at the older version of rho - svntest.actions.run_and_verify_svn(None, None, expected_err, + + expected_err2 = ".*svn: E195012: Unable to find repository location.*" + + svntest.actions.run_and_verify_svn(None, expected_err2, 'cat', '-r', '2', D_url+'/rho') if sbox.repo_url.startswith('http'): @@ -618,19 +598,20 @@ def authz_log_and_tracing_test(sbox): else: expected_err2 = ".*svn: E220001: Unreadable path encountered; access denied.*" - svntest.actions.run_and_verify_svn(None, None, expected_err2, + svntest.actions.run_and_verify_svn(None, expected_err2, 'cat', '-r', '2', G_url+'/rho') ## diff # we shouldn't see the diff of a file in an unreadable path - svntest.actions.run_and_verify_svn(None, None, expected_err, + svntest.actions.run_and_verify_svn(None, expected_err, 'diff', '-r', 'HEAD', G_url+'/rho') - svntest.actions.run_and_verify_svn(None, None, expected_err, + # diff treats the unreadable path as indicating an add so no error + svntest.actions.run_and_verify_svn(None, [], 'diff', '-r', '2', D_url+'/rho') - svntest.actions.run_and_verify_svn(None, None, expected_err, + svntest.actions.run_and_verify_svn(None, [], 'diff', '-r', '2:4', D_url+'/rho') # test whether read access is correctly granted and denied @@ -644,7 +625,7 @@ def authz_aliases(sbox): write_restrictive_svnserve_conf(sbox.repo_dir) if sbox.repo_url.startswith("http"): - expected_err = ".*[Ff]orbidden.*" + expected_err = ".*svn: E175013: .*[Ff]orbidden.*" else: expected_err = ".*svn: E170001: Authorization failed.*" @@ -658,16 +639,14 @@ def authz_aliases(sbox): iota_url = root_url + '/iota' # copy a remote file, target is readonly for jconstant: should fail - svntest.actions.run_and_verify_svn(None, - None, expected_err, + svntest.actions.run_and_verify_svn(None, expected_err, 'cp', '--username', svntest.main.wc_author2, '-m', 'logmsg', iota_url, B_url) # try the same action, but as user jray (alias of jrandom), should work. - svntest.actions.run_and_verify_svn(None, - None, [], + svntest.actions.run_and_verify_svn(None, [], 'cp', '-m', 'logmsg', iota_url, B_url) @@ -691,15 +670,14 @@ def authz_validate(sbox): "/A/B" : "@undefined_group = rw" }) if sbox.repo_url.startswith("http"): - expected_err = ".*[Ff]orbidden.*" + expected_err = ".*svn: E175013: .*[Ff]orbidden.*" elif sbox.repo_url.startswith("svn"): expected_err = ".*Invalid authz configuration" else: expected_err = ".*@undefined_group.*" # validation of this authz file should fail, so no repo access - svntest.actions.run_and_verify_svn("ls remote folder", - None, expected_err, + svntest.actions.run_and_verify_svn(None, expected_err, 'ls', A_url) @@ -711,15 +689,14 @@ devs2 = @admins, dev2 devs = @devs1, dev3, dev4""" }) if sbox.repo_url.startswith("http"): - expected_err = ".*[Ff]orbidden.*" + expected_err = ".*svn: E175013: .*[Ff]orbidden.*" elif sbox.repo_url.startswith("svn"): expected_err = ".*Invalid authz configuration" else: expected_err = ".*Circular dependency.*" # validation of this authz file should fail, so no repo access - svntest.actions.run_and_verify_svn("ls remote folder", - None, expected_err, + svntest.actions.run_and_verify_svn(None, expected_err, 'ls', A_url) @@ -733,8 +710,7 @@ users = @devs1, @devs2, user1, user2""" }) # validation of this authz file should *not* fail (where formerly, # it complained about circular dependencies that do not, in fact, # exist), so this is business as usual. - svntest.actions.run_and_verify_svn("ls remote folder", - ['B/\n', 'C/\n', 'D/\n', 'mu\n'], + svntest.actions.run_and_verify_svn(['B/\n', 'C/\n', 'D/\n', 'mu\n'], [], 'ls', A_url) @@ -751,9 +727,9 @@ def authz_locking(sbox): write_restrictive_svnserve_conf(sbox.repo_dir) if sbox.repo_url.startswith('http'): - expected_err = ".*[Ff]orbidden.*" + expected_err = ".*svn: E175013: .*[Ff]orbidden.*" else: - expected_err = ".*svn: E170001: Authorization failed.*" + expected_err = ".*svn: warning: W170001: Authorization failed.*" root_url = sbox.repo_url wc_dir = sbox.wc_dir @@ -763,15 +739,13 @@ def authz_locking(sbox): mu_path = os.path.join(wc_dir, 'A', 'mu') # lock a file url, target is readonly: should fail - svntest.actions.run_and_verify_svn(None, - None, expected_err, + svntest.actions.run_and_verify_svn(None, expected_err, 'lock', '-m', 'lock msg', iota_url) # lock a file path, target is readonly: should fail - svntest.actions.run_and_verify_svn(None, - None, expected_err, + svntest.actions.run_and_verify_svn(None, expected_err, 'lock', '-m', 'lock msg', iota_path) @@ -780,8 +754,7 @@ def authz_locking(sbox): # Get a lock on /A/mu and try to commit it. # lock a file path, target is writeable: should succeed - svntest.actions.run_and_verify_svn(None, - None, [], + svntest.actions.run_and_verify_svn(None, [], 'lock', '-m', 'lock msg', mu_path) @@ -795,9 +768,38 @@ def authz_locking(sbox): svntest.actions.run_and_verify_commit(wc_dir, expected_output, [], - None, + [], mu_path) + # Lock two paths one of which fails. First add read access to '/' so + # that OPTIONS on common ancestor works. + write_authz_file(sbox, {"/": "jrandom = r", "/A": "jrandom = rw"}) + + # Two unlocked paths + svntest.actions.run_and_verify_info([{'Lock Token' : None}], + sbox.ospath('iota')) + svntest.actions.run_and_verify_info([{'Lock Token' : None}], + sbox.ospath('A/mu')) + + if sbox.repo_url.startswith('http'): + expected_err = ".*svn: warning: W160039: Unlock.*[Ff]orbidden.*" + else: + expected_err = ".*svn: warning: W170001: Authorization failed.*" + + svntest.actions.run_and_verify_svn(None, expected_err, + 'lock', + '-m', 'lock msg', + mu_path, + iota_path) + + # One path locked, one still unlocked + svntest.actions.run_and_verify_info([{'Lock Token' : None}], + sbox.ospath('iota')) + svntest.actions.run_and_verify_info([{'Lock Token' : 'opaquelocktoken:.*'}], + sbox.ospath('A/mu')) + + + # test for issue #2712: if anon-access == read, svnserve should also check # authz to determine whether a checkout/update is actually allowed for # anonymous users, and, if not, attempt authentication. @@ -824,23 +826,23 @@ def authz_svnserve_anon_access_read(sbox): "/A/D" : "* = r" }) # Perform a checkout of /A/B, expecting to see no errors. - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'checkout', B_url, B_path) # Anonymous users should be able to check out /A/D. - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'checkout', D_url, D_path) # Now try a switch. svntest.main.safe_rmtree(D_path) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'switch', D_url, B_path) # Check out /A/B with an unknown username, expect error. svntest.actions.run_and_verify_svn( - None, None, + None, ".*Authentication error from server: Username not found.*", 'checkout', '--non-interactive', @@ -848,12 +850,12 @@ def authz_svnserve_anon_access_read(sbox): B_url, B_path + '_unsuccessful') # Check out a second copy of /A/B, make changes for later merge. - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'checkout', B_url, other_B_path) other_alpha_path = os.path.join(other_B_path, 'E', 'alpha') svntest.main.file_append(other_alpha_path, "fish\n") - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'commit', '-m', 'log msg', other_B_path) @@ -862,7 +864,7 @@ def authz_svnserve_anon_access_read(sbox): # authz here, not the semantics of the merge. (Merges had been # failing in authz, for the reasons summarized in # http://subversion.tigris.org/issues/show_bug.cgi?id=2712#desc13.) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'merge', '-c', '2', B_url, B_path) @@ -980,13 +982,13 @@ def multiple_matches(sbox): root_url = sbox.repo_url write_restrictive_svnserve_conf(sbox.repo_dir) if sbox.repo_url.startswith("http"): - expected_err = ".*[Ff]orbidden.*" + expected_err = ".*svn: E175013: .*[Ff]orbidden.*" else: expected_err = ".*svn: E170001: Authorization failed.*" # Prohibit access and commit fails write_authz_file(sbox, {'/': 'jrandom ='}) - svntest.actions.run_and_verify_svn(None, None, expected_err, + svntest.actions.run_and_verify_svn(None, expected_err, 'cp', '-m', 'fail copy', root_url, root_url + '/fail') @@ -1033,7 +1035,7 @@ def wc_wc_copy_revert(sbox): expected_status.remove('A/B/E', 'A/B/E/alpha', 'A/B/E/beta') svntest.actions.run_and_verify_status(sbox.wc_dir, expected_status) - svntest.actions.run_and_verify_svn(None, None, + svntest.actions.run_and_verify_svn(None, 'svn: E155035: Cannot copy.*excluded by server', 'cp', sbox.ospath('A'), sbox.ospath('A2')) @@ -1046,17 +1048,17 @@ def wc_wc_copy_revert(sbox): '! - ? ? ' + sbox.ospath('A2/B/E') + '\n', ]) expected_output.match_all = False - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'st', '--verbose', sbox.ospath('A2')) # Issue 4025, info SEGV on incomplete working node - svntest.actions.run_and_verify_svn(None, None, + svntest.actions.run_and_verify_svn(None, 'svn: E145000: .*unrecognized node kind', 'info', sbox.ospath('A2/B/E')) # Issue 4026, copy assertion on incomplete working node - svntest.actions.run_and_verify_svn(None, None, + svntest.actions.run_and_verify_svn(None, 'svn: E145001: cannot handle node kind', 'cp', sbox.ospath('A2/B'), sbox.ospath('B3')) @@ -1065,10 +1067,10 @@ def wc_wc_copy_revert(sbox): '! - ? ? ' + sbox.ospath('B3/E') + '\n', ]) expected_output.match_all = False - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'st', '--verbose', sbox.ospath('B3')) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'revert', '--recursive', sbox.ospath('A2'), sbox.ospath('B3')) @@ -1106,8 +1108,7 @@ def authz_recursive_ls(sbox): 'A/D/gamma', 'iota', ] - svntest.actions.run_and_verify_svn('recursive ls from /', - map(lambda x: x + '\n', expected_entries), + svntest.actions.run_and_verify_svn(map(lambda x: x + '\n', expected_entries), [], 'ls', '-R', sbox.repo_url) @@ -1132,7 +1133,7 @@ def case_sensitive_authz(sbox): }) # error messages - expected_error_for_commit = "Commit failed" + expected_error_for_commit = ".*Commit failed.*" if sbox.repo_url.startswith("http"): expected_error_for_cat = ".*[Ff]orbidden.*" @@ -1142,7 +1143,7 @@ def case_sensitive_authz(sbox): # test the case-sensitivity of the path inside the repo write_authz_file(sbox, {"/": "jrandom = r", "/A/mu": "jrandom =", "/a/Mu": "jrandom = rw"}) - svntest.actions.run_and_verify_svn2(None, None, + svntest.actions.run_and_verify_svn2(None, expected_error_for_cat, 1, 'cat', mu_url) @@ -1175,7 +1176,7 @@ def case_sensitive_authz(sbox): os.path.basename(sbox.repo_dir) + ":/A/mu": "jrandom =", mixed_case_repo_dir + ":/A/mu": "jrandom = rw"} write_authz_file(sbox, {}, sec_mixed_case) - svntest.actions.run_and_verify_svn2(None, None, + svntest.actions.run_and_verify_svn2(None, expected_error_for_cat, 1, 'cat', mu_url) @@ -1195,14 +1196,13 @@ def case_sensitive_authz(sbox): write_authz_file(sbox, {"/": "jrandom = r", "/A": "jrandom = r", "/A/mu": "jrandom = rw"}) - svntest.actions.run_and_verify_svn2('No error', - svntest.verify.AnyOutput, [], + svntest.actions.run_and_verify_svn2(svntest.verify.AnyOutput, [], 0, 'cat', mu_url) # Commit the file. svntest.actions.run_and_verify_commit(wc_dir, expected_output, None, - None, + [], mu_path) @Skip(svntest.main.is_ra_type_file) @@ -1221,17 +1221,17 @@ def authz_tree_conflict(sbox): # And now create an obstruction sbox.simple_mkdir('A/C') - expected_output = svntest.wc.State(wc_dir, {}) - expected_status = svntest.actions.get_virginal_state(wc_dir, 2) - expected_status.tweak('A/C', status='A ', wc_rev='0') - expected_status.tweak('A', '', status='! ', wc_rev='1') + expected_output = svntest.wc.State(wc_dir, { + 'A/C' : Item(status=' ', treeconflict='C'), + }) + expected_status = svntest.actions.get_virginal_state(wc_dir, 1) + expected_status.tweak('A/C', status='R ', treeconflict='C') svntest.actions.run_and_verify_update(wc_dir, expected_output, None, expected_status, - "Failed to mark '.*C' (server|absent):", - None, None, None, None, 0, + [], False, '-r', '1', wc_dir) @Issue(3900) @@ -1260,9 +1260,9 @@ def wc_delete(sbox): expected_status = svntest.actions.get_virginal_state(sbox.wc_dir, 1) expected_err = ".*svn: E155035: .*excluded by server*" - svntest.actions.run_and_verify_svn(None, None, expected_err, - 'rm', sbox.ospath('A/B/E')) - svntest.actions.run_and_verify_svn(None, None, expected_err, + svntest.actions.run_and_verify_svn(None, expected_err, + 'rm', sbox.ospath('A/B/E'), '--force') + svntest.actions.run_and_verify_svn(None, expected_err, 'rm', sbox.ospath('A')) expected_status = svntest.actions.get_virginal_state(sbox.wc_dir, 1) @@ -1281,9 +1281,9 @@ def wc_commit_error_handling(sbox): write_authz_file(sbox, {'/' : '* = r', }) # Creating editor fail: unfriendly error - expected_err = "(svn: E175013: .*orbidden.*)|" + \ + expected_err = "(svn: E175013: .*[Ff]orbidden.*)|" + \ "(svn: E170001: Authorization failed)" - svntest.actions.run_and_verify_svn(None, None, expected_err, + svntest.actions.run_and_verify_svn(None, expected_err, 'ci', wc_dir, '-m', '') write_authz_file(sbox, {'/' : '* = rw', @@ -1294,7 +1294,7 @@ def wc_commit_error_handling(sbox): expected_err = "(svn: E195023: Changing directory '.*Z' is forbidden)|" + \ "(svn: E220004: Access denied)|" + \ "(svn: E175013: Access to '.*Z' forbidden)" - svntest.actions.run_and_verify_svn(None, None, expected_err, + svntest.actions.run_and_verify_svn(None, expected_err, 'ci', wc_dir, '-m', '') sbox.simple_revert('A/Z') @@ -1307,7 +1307,7 @@ def wc_commit_error_handling(sbox): expected_err = "(svn: E195023: Changing file '.*zeta' is forbidden)|" + \ "(svn: E220004: Access denied)|" + \ "(svn: E175013: Access to '.*zeta' forbidden)" - svntest.actions.run_and_verify_svn(None, None, expected_err, + svntest.actions.run_and_verify_svn(None, expected_err, 'ci', wc_dir, '-m', '') sbox.simple_revert('A/zeta') @@ -1315,9 +1315,9 @@ def wc_commit_error_handling(sbox): # Allow a generic dav error and the ra_svn specific one that is returned # on editor->edit_close(). - expected_err = "(svn: E175013: .*orbidden.*)|" + \ + expected_err = "(svn: E175013: .*[Ff]orbidden.*)|" + \ "(svn: E220004: Access denied)" - svntest.actions.run_and_verify_svn(None, None, expected_err, + svntest.actions.run_and_verify_svn(None, expected_err, 'ci', wc_dir, '-m', '') sbox.simple_revert('A/D') @@ -1329,7 +1329,7 @@ def wc_commit_error_handling(sbox): expected_err = "(svn: E195023: Changing file '.*lambda' is forbidden.*)|" + \ "(svn: E220004: Access denied)|" + \ "(svn: E175013: Access to '.*lambda' forbidden)" - svntest.actions.run_and_verify_svn(None, None, expected_err, + svntest.actions.run_and_verify_svn(None, expected_err, 'ci', wc_dir, '-m', '') sbox.simple_revert('A/B/lambda') @@ -1340,7 +1340,7 @@ def wc_commit_error_handling(sbox): expected_err = "(svn: E195023: Changing file '.*lambda' is forbidden.*)|" + \ "(svn: E220004: Access denied)|" + \ "(svn: E175013: Access to '.*lambda' forbidden)" - svntest.actions.run_and_verify_svn(None, None, expected_err, + svntest.actions.run_and_verify_svn(None, expected_err, 'ci', wc_dir, '-m', '') sbox.simple_revert('A/B/lambda') @@ -1351,7 +1351,7 @@ def wc_commit_error_handling(sbox): expected_err = "(svn: E195023: Changing directory '.*F' is forbidden.*)|" + \ "(svn: E220004: Access denied)|" + \ "(svn: E175013: Access to '.*F' forbidden)" - svntest.actions.run_and_verify_svn(None, None, expected_err, + svntest.actions.run_and_verify_svn(None, expected_err, 'ci', wc_dir, '-m', '') sbox.simple_revert('A/B/F') @@ -1361,7 +1361,7 @@ def wc_commit_error_handling(sbox): expected_err = "(svn: E195023: Changing file '.*mu' is forbidden.*)|" + \ "(svn: E220004: Access denied)|" + \ "(svn: E175013: Access to '.*mu' forbidden)" - svntest.actions.run_and_verify_svn(None, None, expected_err, + svntest.actions.run_and_verify_svn(None, expected_err, 'ci', wc_dir, '-m', '') @@ -1381,15 +1381,15 @@ def upgrade_absent(sbox): # Attempt to use the working copy, this should give an error expected_stderr = wc_is_too_old_regex - svntest.actions.run_and_verify_svn(None, None, expected_stderr, + svntest.actions.run_and_verify_svn(None, expected_stderr, 'info', sbox.wc_dir) # Now upgrade the working copy - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'upgrade', sbox.wc_dir) # Relocate to allow finding the repository - svntest.actions.run_and_verify_svn(None, None, [], 'relocate', + svntest.actions.run_and_verify_svn(None, [], 'relocate', 'svn://127.0.0.1/authz_tests-2', sbox.repo_url, sbox.wc_dir) @@ -1430,9 +1430,7 @@ def remove_subdir_with_authz_and_tc(sbox): expected_output, None, expected_status, - None, - None, None, - None, None, False, + [], False, wc_dir, '-r', '1') # Perform some edit operation to introduce a tree conflict @@ -1447,11 +1445,7 @@ def remove_subdir_with_authz_and_tc(sbox): svntest.actions.run_and_verify_update(wc_dir, expected_output, None, - None, - None, - None, None, - None, None, False, - wc_dir) + None) @SkipUnless(svntest.main.is_ra_type_svn) def authz_svnserve_groups(sbox): @@ -1481,37 +1475,34 @@ def authz_svnserve_groups(sbox): expected_err = ".*svn: E170001: Authorization failed.*" # read a remote file - svntest.actions.run_and_verify_svn(None, ["This is the file 'lambda'.\n"], + svntest.actions.run_and_verify_svn(["This is the file 'lambda'.\n"], [], 'cat', lambda_url) # read a remote file - svntest.actions.run_and_verify_svn(None, ["This is the file 'pi'.\n"], + svntest.actions.run_and_verify_svn(["This is the file 'pi'.\n"], [], 'cat', pi_url) # read a remote file, unreadable: should fail - svntest.actions.run_and_verify_svn(None, - None, expected_err, + svntest.actions.run_and_verify_svn(None, expected_err, 'cat', alpha_url) # copy a remote file, source is unreadable: should fail - svntest.actions.run_and_verify_svn(None, - None, expected_err, + svntest.actions.run_and_verify_svn(None, expected_err, 'cp', '-m', 'logmsg', alpha_url, B_url) # copy a remote folder - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'cp', '-m', 'logmsg', F_url, D_url) # copy a remote folder, source is unreadable: should fail - svntest.actions.run_and_verify_svn(None, - None, expected_err, + svntest.actions.run_and_verify_svn(None, expected_err, 'cp', '-m', 'logmsg', E_url, D_url) @@ -1527,27 +1518,26 @@ def authz_del_from_subdir(sbox): write_restrictive_svnserve_conf(sbox.repo_dir) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'rm', sbox.repo_url + '/A/mu', '-m', '') -@XFail() @SkipUnless(svntest.main.is_ra_type_dav) # dontdothat is dav only -@SkipUnless(svntest.main.is_os_windows) # until the buildbots are configured def log_diff_dontdothat(sbox): "log --diff on dontdothat" sbox.build(create_wc = False) ddt_url = sbox.repo_url.replace('/svn-test-work/', '/ddt-test-work/') - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'log', sbox.repo_url, '-c', 1, '--diff') # We should expect a PASS or a proper error message instead of # svn: E175009: XML parsing failed: (403 Forbidden) - svntest.actions.run_and_verify_svn(None, None, [], + expected_err = ".*E175013: Access to '.*authz_tests-28.*' forbidden" + svntest.actions.run_and_verify_svn(None, expected_err, 'log', ddt_url, '-c', 1, '--diff') @@ -1573,7 +1563,7 @@ def authz_file_external_to_authz(sbox): svntest.actions.run_and_verify_update(wc_dir, None, None, expected_status) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'cp', repo_url + '/A', repo_url + '/Z', '-m', 'Add Z') @@ -1589,6 +1579,88 @@ def authz_file_external_to_authz(sbox): svntest.actions.run_and_verify_update(wc_dir, None, None, expected_status) +@Skip(svntest.main.is_ra_type_file) +def authz_log_censor_revprops(sbox): + "log censors revprops for partially visible revs" + + sbox.build(create_wc = False) + + svntest.actions.enable_revprop_changes(sbox.repo_dir) + write_restrictive_svnserve_conf(sbox.repo_dir) + write_authz_file(sbox, {"/" : "* = rw"}) + + # Add the revision property 's'. + svntest.actions.run_and_verify_svn(None, [], 'ps', '--revprop', + '-r1', 's', 'secret', sbox.repo_url) + + # With blanket access, both 'svn:author' and 's' are a part of the output. + svntest.actions.run_and_verify_log_xml( + expected_revprops=[{'svn:author': svntest.main.wc_author, 's': 'secret'}], + args=['--with-revprop', 'svn:author', '--with-revprop', 's', + '-r1', sbox.repo_url]) + + # Make the revision partially visible, but ask for both 'svn:author' and + # 's'. The second revision property should be censored out, as we only + # allow 'svn:author' and 'svn:date' for partially visible revisions. + # This used to fail around trunk@1658379. + write_authz_file(sbox, {"/" : "* = rw", "/A/B" : "* = "}) + + svntest.actions.run_and_verify_log_xml( + expected_revprops=[{'svn:author': svntest.main.wc_author}], + args=['--with-revprop', 'svn:author', '--with-revprop', 's', + '-r1', sbox.repo_url]) + +@Skip(svntest.main.is_ra_type_file) +def remove_access_after_commit(sbox): + "remove a subdir with authz file" + + sbox.build() + wc_dir = sbox.wc_dir + + svntest.main.write_restrictive_svnserve_conf(sbox.repo_dir) + svntest.main.write_authz_file(sbox, { "/" : "*=rw"}) + + # Modification in subtree + sbox.simple_append('A/B/E/alpha', 'appended\n') + sbox.simple_append('A/D/G/rho', 'appended\n') + sbox.simple_commit() + + svntest.main.write_authz_file(sbox, { "/" : "*=rw", + "/A/B" : "*=", + "/A/D" : "*="}) + + # Local modification + sbox.simple_append('A/D/G/pi', 'appended\n') + + expected_output = svntest.wc.State(wc_dir, { + 'A/B' : Item(status='D '), + 'A/D' : Item(status=' ', treeconflict='C'), + }) + expected_disk = svntest.main.greek_state.copy() + expected_disk.tweak('A/D/G/rho', + contents="This is the file 'rho'.\nappended\n") + expected_disk.tweak('A/D/G/pi', + contents="This is the file 'pi'.\nappended\n") + expected_disk.remove('A/B', 'A/B/E', 'A/B/E/alpha', 'A/B/E/beta', + 'A/B/F', 'A/B/lambda') + expected_status = svntest.actions.get_virginal_state(wc_dir, 2) + + expected_status.tweak('A/D', status='R ',treeconflict='C', ) + expected_status.tweak('A/D', 'A/D/G', 'A/D/G/pi', 'A/D/G/rho', 'A/D/G/tau', + 'A/D/H', 'A/D/H/omega', 'A/D/H/chi', 'A/D/H/psi', + 'A/D/gamma', copied='+', wc_rev='-') + expected_status.tweak('A/D/G/pi', status='M ') + expected_status.remove('A/B', 'A/B/E', 'A/B/E/alpha', 'A/B/E/beta', 'A/B/F', + 'A/B/lambda') + + # And expect a mixed rev copy + expected_status.tweak('A/D/G/rho', status='A ', entry_status=' ') + svntest.actions.run_and_verify_update(wc_dir, + expected_output, + expected_disk, + expected_status, + [], True) + ######################################################################## # Run the tests @@ -1624,6 +1696,8 @@ test_list = [ None, authz_del_from_subdir, log_diff_dontdothat, authz_file_external_to_authz, + authz_log_censor_revprops, + remove_access_after_commit, ] serial_only = True diff --git a/subversion/tests/cmdline/autoprop_tests.py b/subversion/tests/cmdline/autoprop_tests.py index f27504f..d6a65a2 100755 --- a/subversion/tests/cmdline/autoprop_tests.py +++ b/subversion/tests/cmdline/autoprop_tests.py @@ -102,9 +102,7 @@ def autoprops_test(sbox, cmd, cfgenable, clienable, subdir): # some directories wc_dir = sbox.wc_dir - tmp_dir = os.path.join(os.path.abspath(svntest.main.temp_dir), sbox.name) - if not os.path.isdir(tmp_dir): - os.makedirs(tmp_dir) + tmp_dir = os.path.abspath(sbox.add_wc_path('autoprops')) config_dir = os.path.join(tmp_dir, 'autoprops_config_' + sbox.name) repos_url = sbox.repo_url @@ -320,7 +318,7 @@ def fail_add_mixed_eol_style(sbox): expected_stderr = "svn: E200009: File '.*" + filename + \ "' has inconsistent newlines" + \ "|" + "svn: E135000: Inconsistent line ending style\n" - run_and_verify_svn(None, [], expected_stderr, + run_and_verify_svn([], expected_stderr, 'add', filepath, *parameters) expected_status = svntest.wc.State(sbox.wc_dir, @@ -417,7 +415,7 @@ def inheritable_autoprops_test(sbox, cmd, cfgenable, clienable, subdir, # some directories wc_dir = sbox.wc_dir - tmp_dir = os.path.abspath(svntest.main.temp_dir) + tmp_dir = os.path.abspath(sbox.add_wc_path('iautoprops')) config_dir = os.path.join(tmp_dir, 'autoprops_config_' + sbox.name) repos_url = sbox.repo_url @@ -468,7 +466,7 @@ def inheritable_autoprops_test(sbox, cmd, cfgenable, clienable, subdir, sbox.simple_propset(SVN_PROP_INHERITABLE_AUTOPROPS, '*.py = svn:mime-type=text/x-python', 'A/D') - svntest.actions.run_and_verify_svn(None, None, [], 'ci', '-m', + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', 'Add some ' + SVN_PROP_INHERITABLE_AUTOPROPS + ' properties', wc_dir) @@ -648,7 +646,7 @@ def svn_prop_inheritable_autoprops_add_versioned_target(sbox): # # Then revert the previous additions and add again, only the # svn:auto-props should be applied. - tmp_dir = os.path.abspath(svntest.main.temp_dir) + tmp_dir = os.path.abspath(sbox.add_wc_path('temp')) config_dir = os.path.join(tmp_dir, 'autoprops_config_disabled_' + sbox.name) create_inherited_autoprops_config(config_dir, False) @@ -676,7 +674,7 @@ def svn_prop_inheritable_autoprops_propset_file_target(sbox): sbox.build() svntest.actions.run_and_verify_svn( - None, None, + None, ".*Cannot set '" + SVN_PROP_INHERITABLE_AUTOPROPS + "' on a file.*", 'ps', SVN_PROP_INHERITABLE_AUTOPROPS, '*.c=svn:eol-style=native', sbox.ospath('iota')) @@ -697,8 +695,7 @@ def svn_prop_inheritable_autoprops_unversioned_subtrees_versioned_target(sbox): '*.c=svn:eol-style=CR', sbox.ospath('A/B')) svntest.main.run_svn(None, 'ps', SVN_PROP_INHERITABLE_AUTOPROPS, '*.c=svn:eol-style=native', sbox.ospath('A/D')) - svntest.main.run_svn(None, 'ci', '-m', 'Add inheritable autoprops', - sbox.wc_dir) + sbox.simple_commit(message='Add inheritable autoprops') # Create two subtrees, each with one new file. os.mkdir(Z_path) @@ -721,9 +718,9 @@ def svn_prop_inheritable_autoprops_unversioned_subtrees_versioned_target(sbox): os.chdir(saved_wd) # Check the resulting autoprops. - svntest.actions.run_and_verify_svn(None, 'native\n', [], + svntest.actions.run_and_verify_svn('native\n', [], 'pg', 'svn:eol-style', foo_path) - svntest.actions.run_and_verify_svn(None, 'CR\n', [], + svntest.actions.run_and_verify_svn('CR\n', [], 'pg', 'svn:eol-style', bar_path) ######################################################################## diff --git a/subversion/tests/cmdline/basic_tests.py b/subversion/tests/cmdline/basic_tests.py index c51d80a..58f7554 100755 --- a/subversion/tests/cmdline/basic_tests.py +++ b/subversion/tests/cmdline/basic_tests.py @@ -60,8 +60,7 @@ def basic_checkout(sbox): # Checkout of a different URL into a working copy fails A_url = sbox.repo_url + '/A' - svntest.actions.run_and_verify_svn("No error where some expected", - None, svntest.verify.AnyOutput, + svntest.actions.run_and_verify_svn(None, svntest.verify.AnyOutput, # "Obstructed update", 'co', A_url, wc_dir) @@ -73,7 +72,7 @@ def basic_checkout(sbox): os.remove(lambda_path) G_path = sbox.ospath('A/D/G') - svntest.actions.run_and_verify_svn(None, None, [], 'rm', G_path) + svntest.actions.run_and_verify_svn(None, [], 'rm', G_path) expected_output = svntest.actions.get_virginal_state(wc_dir, 1) expected_output.tweak('A/mu', status='M ') @@ -88,7 +87,7 @@ def basic_checkout(sbox): # Repeat checkout of original URL into working copy with modifications url = sbox.repo_url - svntest.actions.run_and_verify_svn("Repeat checkout failed", None, [], + svntest.actions.run_and_verify_svn(None, [], 'co', url, wc_dir) @@ -141,9 +140,7 @@ def basic_commit(sbox): svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, - wc_dir) + expected_status) #---------------------------------------------------------------------- @@ -177,7 +174,7 @@ def basic_update(sbox): # Commit. svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) # Create expected output tree for an update of the wc_backup. expected_output = wc.State(wc_backup, { @@ -204,17 +201,30 @@ def basic_update(sbox): expected_status) # Unversioned paths, those that are not immediate children of a versioned - # path, are skipped and do not raise an error + # path, are skipped and do raise an error if they are the only targets xx_path = sbox.ospath('xx/xx') - exit_code, out, err = svntest.actions.run_and_verify_svn( - "update xx/xx", - ["Skipped '"+xx_path+"'\n", - ] + svntest.main.summary_of_conflicts(skipped_paths=1), - [], 'update', xx_path) - exit_code, out, err = svntest.actions.run_and_verify_svn( - "update xx/xx", [], [], + expected_err = "svn: E155007: " + svntest.actions.run_and_verify_svn( + ["Skipped '"+xx_path+"'\n", ], + expected_err, + 'update', xx_path) + svntest.actions.run_and_verify_svn( + [], expected_err, 'update', '--quiet', xx_path) + # Unversioned paths, that are not the only targets of the command are + # skipped without an error + svntest.actions.run_and_verify_svn( + ["Updating '"+mu_path+"':\n", + "At revision 2.\n", + "Skipped '"+xx_path+"'\n", + "Summary of updates:\n", + " Updated '"+mu_path+"' to r2.\n" + ] + svntest.main.summary_of_conflicts(skipped_paths=1), + [], 'update', mu_path, xx_path) + svntest.actions.run_and_verify_svn( + [], [], 'update', '--quiet', mu_path, xx_path) + #---------------------------------------------------------------------- def basic_mkdir_url(sbox): "basic mkdir URL" @@ -224,8 +234,8 @@ def basic_mkdir_url(sbox): Y_url = sbox.repo_url + '/Y' Y_Z_url = sbox.repo_url + '/Y/Z' - svntest.actions.run_and_verify_svn("mkdir URL URL/subdir", - ["\n", "Committed revision 2.\n"], [], + svntest.actions.run_and_verify_svn(["Committing transaction...\n", + "Committed revision 2.\n"], [], 'mkdir', '-m', 'log_msg', Y_url, Y_Z_url) expected_output = wc.State(sbox.wc_dir, { @@ -262,19 +272,18 @@ def basic_mkdir_url_with_parents(sbox): U_url = sbox.repo_url + '/U' U_V_url = sbox.repo_url + '/U/V' U_V_W_url = sbox.repo_url + '/U/V/W' - svntest.actions.run_and_verify_svn("erroneous mkdir sans --parents", - [], + svntest.actions.run_and_verify_svn(None, ".*Try 'svn mkdir --parents' instead.*", 'mkdir', '-m', 'log_msg', X_Y_Z_url, X_Y_Z2_url, X_T_C_url, U_V_W_url) - svntest.actions.run_and_verify_svn("mkdir", - ["\n", "Committed revision 2.\n"], [], + svntest.actions.run_and_verify_svn(["Committing transaction...\n", + "Committed revision 2.\n"], [], 'mkdir', '-m', 'log_msg', X_url, U_url) - svntest.actions.run_and_verify_svn("mkdir --parents", - ["\n", "Committed revision 3.\n"], [], + svntest.actions.run_and_verify_svn(["Committing transaction...\n", + "Committed revision 3.\n"], [], 'mkdir', '-m', 'log_msg', '--parents', X_Y_Z_url, X_Y_Z2_url, X_T_C_url, U_V_W_url) @@ -328,11 +337,11 @@ def basic_mkdir_wc_with_parents(sbox): Y_Z_path = sbox.ospath('Y/Z') - svntest.actions.run_and_verify_svn("erroneous mkdir dir/subdir", [], + svntest.actions.run_and_verify_svn([], ".*Try 'svn mkdir --parents' instead.*", 'mkdir', Y_Z_path) - svntest.actions.run_and_verify_svn("mkdir dir/subdir", None, [], + svntest.actions.run_and_verify_svn(None, [], 'mkdir', '--parents', Y_Z_path) # Verify the WC status, because there was a regression in which parts of @@ -395,8 +404,7 @@ def basic_commit_corruption(sbox): # This commit should fail due to text base corruption. svntest.actions.run_and_verify_commit(wc_dir, expected_output, None, # expected_status, - "svn: E200014: Checksum", - wc_dir) + "svn: E200014: Checksum") # Restore the uncorrupted text base. os.chmod(tb_dir_path, 0777) @@ -408,7 +416,7 @@ def basic_commit_corruption(sbox): # This commit should succeed. svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) #---------------------------------------------------------------------- def basic_update_corruption(sbox): @@ -433,7 +441,7 @@ def basic_update_corruption(sbox): # Make the "other" working copy other_wc = sbox.add_wc_path('other') - svntest.actions.run_and_verify_svn("Checkout to wc2", None, [], + svntest.actions.run_and_verify_svn(None, [], 'co', sbox.repo_url, other_wc) # Make a local mod to mu @@ -452,7 +460,7 @@ def basic_update_corruption(sbox): # This commit should succeed. svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) # Create expected output tree for an update of the other_wc. expected_output = wc.State(other_wc, { @@ -488,7 +496,7 @@ def basic_update_corruption(sbox): fail_output, expected_disk, fail_status, - "svn: E155017: Checksum", other_wc) + "svn: E155017: Checksum") # Restore the uncorrupted text base. os.chmod(tb_dir_path, 0777) @@ -545,7 +553,7 @@ def basic_merging_update(sbox): svntest.actions.run_and_verify_commit(wc_dir, expected_output, expected_status, - None, + [], wc_dir) # Make a backup copy of the working copy @@ -571,7 +579,7 @@ def basic_merging_update(sbox): svntest.actions.run_and_verify_commit(wc_dir, expected_output, expected_status, - None, + [], wc_dir) # Make local mods to wc_backup by recreating mu and rho @@ -652,7 +660,7 @@ def basic_conflict(sbox): # Commit. svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) # Create expected output tree for an update of the wc_backup. expected_output = wc.State(wc_backup, { @@ -666,6 +674,7 @@ def basic_conflict(sbox): contents="\n".join(["This is the file 'mu'.", "<<<<<<< .mine", "Conflicting appended text for mu", + "||||||| .r1", "=======", "Original appended text for mu", ">>>>>>> .r2", @@ -674,6 +683,7 @@ def basic_conflict(sbox): contents="\n".join(["This is the file 'rho'.", "<<<<<<< .mine", "Conflicting appended text for rho", + "||||||| .r1", "=======", "Original appended text for rho", ">>>>>>> .r2", @@ -694,9 +704,7 @@ def basic_conflict(sbox): expected_output, expected_disk, expected_status, - None, - svntest.tree.detect_conflict_files, - extra_files) + extra_files=extra_files) # verify that the extra_files list is now empty. if len(extra_files) != 0: @@ -753,7 +761,7 @@ def basic_cleanup(sbox): svntest.main.safe_rmtree(tmp_path) # Run cleanup (### todo: cleanup doesn't currently print anything) - svntest.actions.run_and_verify_svn("Cleanup command", None, [], + svntest.actions.run_and_verify_svn(None, [], 'cleanup', wc_dir) # Verify unlocked status. @@ -781,12 +789,12 @@ def basic_revert(sbox): svntest.main.file_append(rho_path, "Added some text to 'rho'.\n") svntest.main.file_append(zeta_path, "Added some text to 'zeta'.\n") - svntest.actions.run_and_verify_svn("Add command", None, [], + svntest.actions.run_and_verify_svn(None, [], 'add', zeta_path) - svntest.actions.run_and_verify_svn("Add prop command", None, [], + svntest.actions.run_and_verify_svn(None, [], 'ps', 'random-prop', 'propvalue', gamma_path) - svntest.actions.run_and_verify_svn("Add prop command", None, [], + svntest.actions.run_and_verify_svn(None, [], 'ps', 'random-prop', 'propvalue', iota_path) @@ -802,19 +810,19 @@ def basic_revert(sbox): svntest.actions.run_and_verify_status(wc_dir, expected_output) # Run revert (### todo: revert doesn't currently print anything) - svntest.actions.run_and_verify_svn("Revert command", None, [], + svntest.actions.run_and_verify_svn(None, [], 'revert', beta_path) - svntest.actions.run_and_verify_svn("Revert command", None, [], + svntest.actions.run_and_verify_svn(None, [], 'revert', gamma_path) - svntest.actions.run_and_verify_svn("Revert command", None, [], + svntest.actions.run_and_verify_svn(None, [], 'revert', iota_path) - svntest.actions.run_and_verify_svn("Revert command", None, [], + svntest.actions.run_and_verify_svn(None, [], 'revert', rho_path) - svntest.actions.run_and_verify_svn("Revert command", None, [], + svntest.actions.run_and_verify_svn(None, [], 'revert', zeta_path) # Verify unmodified status. @@ -846,7 +854,7 @@ def basic_revert(sbox): # Finally, check that reverted file is not readonly os.remove(beta_path) - svntest.actions.run_and_verify_svn(None, None, [], 'revert', beta_path) + svntest.actions.run_and_verify_svn(None, [], 'revert', beta_path) if not (open(beta_path, 'r+')): raise svntest.Failure @@ -854,7 +862,7 @@ def basic_revert(sbox): # removed, can be reverted. X_path = sbox.ospath('X') - svntest.actions.run_and_verify_svn(None, None, [], 'mkdir', X_path) + svntest.actions.run_and_verify_svn(None, [], 'mkdir', X_path) expected_status = svntest.actions.get_virginal_state(wc_dir, 1) expected_status.add({ @@ -863,7 +871,7 @@ def basic_revert(sbox): svntest.actions.run_and_verify_status(wc_dir, expected_status) svntest.main.safe_rmtree(X_path) - svntest.actions.run_and_verify_svn(None, None, [], 'revert', X_path) + svntest.actions.run_and_verify_svn(None, [], 'revert', X_path) expected_status.remove('X') svntest.actions.run_and_verify_status(wc_dir, expected_status) @@ -875,7 +883,7 @@ def basic_revert(sbox): ### Most of the rest of this test is ineffective, due to the ### problems described in issue #1611. - svntest.actions.run_and_verify_svn(None, None, [], 'rm', E_path) + svntest.actions.run_and_verify_svn(None, [], 'rm', E_path) svntest.main.safe_rmtree(E_path) expected_status.tweak('A/B/E', status='D ') expected_status.tweak('A/B/E', wc_rev='?') @@ -916,7 +924,7 @@ def basic_revert(sbox): # try updating instead." # # ...but due to test suite lossage, it'll still look like success. - svntest.actions.run_and_verify_svn(None, None, [], 'revert', E_path) + svntest.actions.run_and_verify_svn(None, [], 'revert', E_path) ### FIXME: Again, the problem described in issue #1611 bites us here. # @@ -956,7 +964,7 @@ def basic_switch(sbox): # First, try the switch without the --ignore-ancestry flag, # expecting failure. expected_error = "svn: E195012: .*no common ancestry.*" - svntest.actions.run_and_verify_svn(None, None, expected_error, + svntest.actions.run_and_verify_svn(None, expected_error, 'switch', gamma_url, iota_path) # Now ignore ancestry so we can ge through this switch. @@ -964,7 +972,7 @@ def basic_switch(sbox): expected_output, expected_disk, expected_status, - None, None, None, None, None, + [], False, '--ignore-ancestry') ### Switch the directory `A/D/H' to `A/D/G'. @@ -1016,7 +1024,7 @@ def basic_switch(sbox): # First, try the switch without the --ignore-ancestry flag, # expecting failure. expected_error = "svn: E195012: .*no common ancestry.*" - svntest.actions.run_and_verify_svn(None, None, expected_error, + svntest.actions.run_and_verify_svn(None, expected_error, 'switch', ADG_url, ADH_path) # Do the switch and check the results in three ways. @@ -1024,7 +1032,7 @@ def basic_switch(sbox): expected_output, expected_disk, expected_status, - None, None, None, None, None, + [], False, '--ignore-ancestry') #---------------------------------------------------------------------- @@ -1108,31 +1116,31 @@ def basic_delete(sbox): svntest.actions.run_and_verify_status(wc_dir, expected_output) # 'svn rm' that should fail - svntest.actions.run_and_verify_svn(None, None, svntest.verify.AnyOutput, + svntest.actions.run_and_verify_svn(None, svntest.verify.AnyOutput, 'rm', chi_path) - svntest.actions.run_and_verify_svn(None, None, svntest.verify.AnyOutput, + svntest.actions.run_and_verify_svn(None, svntest.verify.AnyOutput, 'rm', chi_parent_path) - svntest.actions.run_and_verify_svn(None, None, svntest.verify.AnyOutput, + svntest.actions.run_and_verify_svn(None, svntest.verify.AnyOutput, 'rm', rho_path) - svntest.actions.run_and_verify_svn(None, None, svntest.verify.AnyOutput, + svntest.actions.run_and_verify_svn(None, svntest.verify.AnyOutput, 'rm', rho_parent_path) - svntest.actions.run_and_verify_svn(None, None, svntest.verify.AnyOutput, + svntest.actions.run_and_verify_svn(None, svntest.verify.AnyOutput, 'rm', F_path) - svntest.actions.run_and_verify_svn(None, None, svntest.verify.AnyOutput, + svntest.actions.run_and_verify_svn(None, svntest.verify.AnyOutput, 'rm', F_parent_path) - svntest.actions.run_and_verify_svn(None, None, svntest.verify.AnyOutput, + svntest.actions.run_and_verify_svn(None, svntest.verify.AnyOutput, 'rm', sigma_path) - svntest.actions.run_and_verify_svn(None, None, svntest.verify.AnyOutput, + svntest.actions.run_and_verify_svn(None, svntest.verify.AnyOutput, 'rm', sigma_parent_path) - svntest.actions.run_and_verify_svn(None, None, svntest.verify.AnyOutput, + svntest.actions.run_and_verify_svn(None, svntest.verify.AnyOutput, 'rm', X_path) # check status has not changed @@ -1140,22 +1148,22 @@ def basic_delete(sbox): # 'svn rm' that should work E_path = sbox.ospath('A/B/E') - svntest.actions.run_and_verify_svn(None, None, [], 'rm', E_path) + svntest.actions.run_and_verify_svn(None, [], 'rm', E_path) # 'svn rm --force' that should work - svntest.actions.run_and_verify_svn(None, None, [], 'rm', '--force', + svntest.actions.run_and_verify_svn(None, [], 'rm', '--force', chi_parent_path) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'rm', '--force', rho_parent_path) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'rm', '--force', F_path) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'rm', '--force', sigma_parent_path) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'rm', '--force', X_path) # Deleting an unchanged copy shouldn't error. @@ -1167,18 +1175,18 @@ def basic_delete(sbox): # Deleting already removed from wc versioned item with --force iota_path = sbox.ospath('iota') os.remove(iota_path) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'rm', '--force', iota_path) # and without --force gamma_path = sbox.ospath('A/D/gamma') os.remove(gamma_path) - svntest.actions.run_and_verify_svn(None, None, [], 'rm', gamma_path) + svntest.actions.run_and_verify_svn(None, [], 'rm', gamma_path) # Deleting already scheduled for deletion doesn't require --force - svntest.actions.run_and_verify_svn(None, None, [], 'rm', gamma_path) + svntest.actions.run_and_verify_svn(None, [], 'rm', gamma_path) - svntest.actions.run_and_verify_svn(None, None, [], 'rm', E_path) + svntest.actions.run_and_verify_svn(None, [], 'rm', E_path) # check status expected_status = svntest.actions.get_virginal_state(wc_dir, 1) @@ -1204,7 +1212,7 @@ def basic_delete(sbox): svntest.actions.run_and_verify_status(wc_dir, expected_status) # issue 687 delete directory with uncommitted directory child - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'rm', '--force', Y_parent_path) expected_status.tweak('A/D', status='D ') @@ -1237,21 +1245,21 @@ def basic_delete(sbox): # Deleting unversioned file explicitly foo_path = sbox.ospath('foo') svntest.main.file_append(foo_path, 'unversioned foo') - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'rm', '--force', foo_path) verify_file_deleted("Failed to remove unversioned file foo", foo_path) # At one stage deleting a URL dumped core iota_URL = sbox.repo_url + '/iota' - svntest.actions.run_and_verify_svn(None, - ["\n", "Committed revision 2.\n"], [], + svntest.actions.run_and_verify_svn(["Committing transaction...\n", + "Committed revision 2.\n"], [], 'rm', '-m', 'delete iota URL', iota_URL) # Issue 4074, deleting a root url SEGV. expected_error = 'svn: E170000: .*not within a repository' - svntest.actions.run_and_verify_svn(None, [], expected_error, + svntest.actions.run_and_verify_svn([], expected_error, 'rm', sbox.repo_url, '--message', 'delete root') @@ -1265,8 +1273,7 @@ def basic_checkout_deleted(sbox): # Delete A/D and commit. D_path = sbox.ospath('A/D') - svntest.actions.run_and_verify_svn("error scheduling A/D for deletion", - None, [], 'rm', '--force', D_path) + svntest.actions.run_and_verify_svn(None, [], 'rm', '--force', D_path) expected_output = wc.State(wc_dir, { 'A/D' : Item(verb='Deleting'), @@ -1278,14 +1285,12 @@ def basic_checkout_deleted(sbox): 'A/D/gamma') svntest.actions.run_and_verify_commit(wc_dir, - expected_output, expected_status, - None, wc_dir) + expected_output, expected_status) # Now try to checkout revision 1 of A/D. url = sbox.repo_url + '/A/D' wc2 = sbox.ospath('new_D') - svntest.actions.run_and_verify_svn("error checking out r1 of A/D", - None, [], 'co', '-r', '1', + svntest.actions.run_and_verify_svn(None, [], 'co', '-r', '1', url + "@1", wc2) #---------------------------------------------------------------------- @@ -1309,7 +1314,7 @@ def basic_node_kind_change(sbox): svntest.actions.run_and_verify_status(wc_dir, expected_status) # Try and fail to create a directory (file scheduled for deletion) - svntest.actions.run_and_verify_svn(None, None, [], 'mkdir', gamma_path) + svntest.actions.run_and_verify_svn(None, [], 'mkdir', gamma_path) # Status is replaced expected_status.tweak('A/D/gamma', status='R ') @@ -1322,23 +1327,21 @@ def basic_node_kind_change(sbox): expected_status = svntest.actions.get_virginal_state(wc_dir, 1) expected_status.tweak('A/D/gamma', status=' ', wc_rev='2') svntest.actions.run_and_verify_commit(wc_dir, - expected_output, expected_status, - None, wc_dir) + expected_output, expected_status) # Try and fail to create a directory (file deleted) - svntest.actions.run_and_verify_svn(None, - None, svntest.verify.AnyOutput, + svntest.actions.run_and_verify_svn(None, svntest.verify.AnyOutput, 'mkdir', gamma_path) # Status is unchanged svntest.actions.run_and_verify_status(wc_dir, expected_status) # Update to finally get rid of file - svntest.actions.run_and_verify_svn(None, None, [], 'up', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir) # mkdir should succeed - svntest.actions.run_and_verify_svn(None, None, [], 'rm', gamma_path) - svntest.actions.run_and_verify_svn(None, None, [], 'mkdir', gamma_path) + svntest.actions.run_and_verify_svn(None, [], 'rm', gamma_path) + svntest.actions.run_and_verify_svn(None, [], 'mkdir', gamma_path) expected_status.tweak(wc_rev=2) expected_status.add({ @@ -1362,7 +1365,7 @@ def basic_import(sbox): # import new files into repository url = sbox.repo_url + "/dirA/dirB/new_file" exit_code, output, errput = svntest.actions.run_and_verify_svn( - 'Cannot change node kind', None, [], 'import', + None, [], 'import', '-m', 'Log message for new import', new_path, url) lastline = output.pop().strip() @@ -1402,8 +1405,7 @@ def basic_import(sbox): expected_output, expected_disk, expected_status, - None, None, None, - None, None, 1) + [], True) #---------------------------------------------------------------------- @@ -1417,7 +1419,7 @@ def basic_cat(sbox): # Get repository text even if wc is modified svntest.main.file_append(mu_path, "some text") - svntest.actions.run_and_verify_svn(None, ["This is the file 'mu'.\n"], + svntest.actions.run_and_verify_svn(["This is the file 'mu'.\n"], [], 'cat', ###TODO is user/pass really necessary? mu_path) @@ -1436,33 +1438,27 @@ def basic_ls(sbox): cwd = os.getcwd() os.chdir(wc_dir) - svntest.actions.run_and_verify_svn("ls implicit current directory", - ["A/\n", "iota\n"], + svntest.actions.run_and_verify_svn(["A/\n", "iota\n"], [], 'ls') os.chdir(cwd) - svntest.actions.run_and_verify_svn('ls the root of working copy', - ['A/\n', 'iota\n'], + svntest.actions.run_and_verify_svn(['A/\n', 'iota\n'], [], 'ls', wc_dir) - svntest.actions.run_and_verify_svn('ls a working copy directory', - ['B/\n', 'C/\n', 'D/\n', 'mu\n'], + svntest.actions.run_and_verify_svn(['B/\n', 'C/\n', 'D/\n', 'mu\n'], [], 'ls', sbox.ospath('A')) - svntest.actions.run_and_verify_svn('ls working copy directory with -r BASE', - ['B/\n', 'C/\n', 'D/\n', 'mu\n'], + svntest.actions.run_and_verify_svn(['B/\n', 'C/\n', 'D/\n', 'mu\n'], [], 'ls', '-r', 'BASE', sbox.ospath('A')) - svntest.actions.run_and_verify_svn('ls a single file', - ['mu\n'], + svntest.actions.run_and_verify_svn(['mu\n'], [], 'ls', sbox.ospath('A/mu')) - svntest.actions.run_and_verify_svn('recursive ls', - ['E/\n', 'E/alpha\n', 'E/beta\n', 'F/\n', + svntest.actions.run_and_verify_svn(['E/\n', 'E/alpha\n', 'E/beta\n', 'F/\n', 'lambda\n' ], [], 'ls', '-R', sbox.ospath('A/B')) @@ -1497,11 +1493,11 @@ def nonexistent_repository(sbox): # the root directory, the test could fail, and that's just too bad :-). exit_code, output, errput = svntest.actions.run_and_verify_svn( - None, None, svntest.verify.AnyOutput, + None, svntest.verify.AnyOutput, 'log', 'file:///nonexistent_path') for line in errput: - if re.match(".*Unable to open an ra_local session to URL.*", line): + if re.match(".*Unable to connect to a repository at URL.*", line): return # Else never matched the expected error output, so the test failed. @@ -1525,18 +1521,18 @@ def basic_auth_cache(sbox): svntest.main.safe_rmtree(wc_dir) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'checkout', repo_url, wc_dir) # Failed with "not locked" error on missing directory svntest.main.safe_rmtree(sbox.ospath('A/B/E')) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'status', '-u', sbox.ospath('A/B')) # Failed with "already locked" error on new dir - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'copy', repo_url + '/A/B/E', sbox.ospath('A/D/G')) @@ -1565,7 +1561,7 @@ def basic_add_ignores(sbox): open(foo_o_path, 'w') exit_code, output, err = svntest.actions.run_and_verify_svn( - "No output where some expected", svntest.verify.AnyOutput, [], + svntest.verify.AnyOutput, [], 'add', dir_path) for line in output: @@ -1589,11 +1585,11 @@ def basic_add_local_ignores(sbox): dir_path = sbox.ospath('dir') file_path = os.path.join(dir_path, 'app.lock') - svntest.actions.run_and_verify_svn(None, svntest.verify.AnyOutput, [], + svntest.actions.run_and_verify_svn(svntest.verify.AnyOutput, [], 'mkdir', dir_path) svntest.main.run_svn(None, 'propset', 'svn:ignore', '*.lock', dir_path) open(file_path, 'w') - svntest.actions.run_and_verify_svn(None, [], [], + svntest.actions.run_and_verify_svn([], [], 'add', '--force', dir_path) #---------------------------------------------------------------------- @@ -1618,7 +1614,7 @@ def basic_add_no_ignores(sbox): open(foo_rej_path, 'w') exit_code, output, err = svntest.actions.run_and_verify_svn( - "No output where some expected", svntest.verify.AnyOutput, [], + svntest.verify.AnyOutput, [], 'add', '--no-ignore', dir_path) for line in output: @@ -1653,7 +1649,7 @@ def basic_add_parents(sbox): o.close() # Add the file, with it's parents - svntest.actions.run_and_verify_svn(None, None, [], 'add', '--parents', + svntest.actions.run_and_verify_svn(None, [], 'add', '--parents', zeta_path) # Build expected state @@ -1675,14 +1671,12 @@ def basic_add_parents(sbox): # Commit and verify svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, - wc_dir) + expected_status) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'rm', X_path, '--keep-local') - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'add', '--parents', zeta_path) #---------------------------------------------------------------------- @@ -1696,8 +1690,7 @@ def uri_syntax(sbox): url = sbox.repo_url scheme = url[:url.find(":")] url = scheme + "://some_nonexistent_host_with_no_trailing_slash" - svntest.actions.run_and_verify_svn("No error where one expected", - None, svntest.verify.AnyOutput, + svntest.actions.run_and_verify_svn(None, svntest.verify.AnyOutput, 'co', url, local_dir) # Different RA layers give different errors for failed checkouts; @@ -1793,15 +1786,15 @@ def basic_peg_revision(sbox): # Without the trailing "@", expect failure. exit_code, output, errlines = svntest.actions.run_and_verify_svn( - None, None, ".*Syntax error parsing peg revision 'abc'", 'cat', wc_file) + None, ".*Syntax error parsing peg revision 'abc'", 'cat', wc_file) exit_code, output, errlines = svntest.actions.run_and_verify_svn( - None, None, ".*Syntax error parsing peg revision 'abc'", 'cat', url) + None, ".*Syntax error parsing peg revision 'abc'", 'cat', url) # With the trailing "@", expect success. exit_code, output, errlines = svntest.actions.run_and_verify_svn( - None, ["xyz\n"], [], 'cat', wc_file + '@') + ["xyz\n"], [], 'cat', wc_file + '@') exit_code, output, errlines = svntest.actions.run_and_verify_svn( - None, ["xyz\n"], [], 'cat', url + '@') + ["xyz\n"], [], 'cat', url + '@') # Test with leading @ character in filename. filename = '@abc' @@ -1810,23 +1803,23 @@ def basic_peg_revision(sbox): svntest.main.file_append(wc_file, 'xyz\n') exit_code, output, errlines = svntest.actions.run_and_verify_svn( - None, None, [], 'add', wc_file + '@') + None, [], 'add', wc_file + '@') exit_code, output, errlines = svntest.actions.run_and_verify_svn( - None, None, [], 'ci', '-m', 'secret log msg', wc_file + '@') + None, [], 'ci', '-m', 'secret log msg', wc_file + '@') # With a leading "@" which isn't escaped, expect failure. # Note that we just test with filename starting with '@', because # wc_file + '@' + filename is a different situation where svn # will try to parse filename as a peg revision. exit_code, output, errlines = svntest.actions.run_and_verify_svn( - None, None, ".*'%s' is just a peg revision.*" % filename, + None, ".*'%s' is just a peg revision.*" % filename, 'cat', filename) # With a leading "@" which is escaped, expect success. exit_code, output, errlines = svntest.actions.run_and_verify_svn( - None, ["xyz\n"], [], 'cat', wc_file + '@') + ["xyz\n"], [], 'cat', wc_file + '@') exit_code, output, errlines = svntest.actions.run_and_verify_svn( - None, ["xyz\n"], [], 'cat', repos_dir + '/' + filename + '@') + ["xyz\n"], [], 'cat', repos_dir + '/' + filename + '@') def info_nonhead(sbox): "info on file not existing in HEAD" @@ -1838,7 +1831,7 @@ def info_nonhead(sbox): furl = repo_url + "/iota" # Remove iota and commit. - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], "delete", fname) expected_output = svntest.wc.State(wc_dir, { 'iota' : Item(verb='Deleting'), @@ -1847,9 +1840,7 @@ def info_nonhead(sbox): expected_status.remove("iota") svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, - wc_dir) + expected_status) # Get info for old iota at r1. expected_infos = [ { 'URL' : '.*' }, @@ -1868,8 +1859,7 @@ def ls_nonhead(sbox): # Delete A/D/rho and commit. G_path = sbox.ospath('A/D/G') - svntest.actions.run_and_verify_svn("error scheduling A/D/G for deletion", - None, [], 'rm', G_path) + svntest.actions.run_and_verify_svn(None, [], 'rm', G_path) expected_output = wc.State(wc_dir, { 'A/D/G' : Item(verb='Deleting'), @@ -1879,12 +1869,11 @@ def ls_nonhead(sbox): expected_status.remove('A/D/G', 'A/D/G/rho', 'A/D/G/pi', 'A/D/G/tau',) svntest.actions.run_and_verify_commit(wc_dir, - expected_output, expected_status, - None, wc_dir) + expected_output, expected_status) # Check that we can list a file in A/D/G at revision 1. rho_url = sbox.repo_url + "/A/D/G/rho" - svntest.actions.run_and_verify_svn(None, '.* rho\n', [], + svntest.actions.run_and_verify_svn('.* rho\n', [], 'ls', '--verbose', rho_url + '@1') @@ -1900,12 +1889,10 @@ def cat_added_PREV(sbox): # Create and add a file. svntest.main.file_append(f_path, 'new text') - svntest.actions.run_and_verify_svn("adding file", - None, [], 'add', f_path) + svntest.actions.run_and_verify_svn(None, [], 'add', f_path) # Cat'ing the previous version should fail. - svntest.actions.run_and_verify_svn("cat PREV version of file", - None, ".*has no committed revision.*", + svntest.actions.run_and_verify_svn(None, ".*has no committed revision.*", 'cat', '-rPREV', f_path) # Issue #2612. @@ -1916,8 +1903,7 @@ def ls_space_in_repo_name(sbox): sbox.build(name = "repo with spaces") wc_dir = sbox.wc_dir - svntest.actions.run_and_verify_svn('ls the root of the repository', - ['A/\n', 'iota\n'], + svntest.actions.run_and_verify_svn(['A/\n', 'iota\n'], [], 'ls', sbox.repo_url) @@ -1931,11 +1917,11 @@ def delete_keep_local(sbox): C_path = sbox.ospath('A/C') # Remove file iota - svntest.actions.run_and_verify_svn(None, None, [], 'rm', '--keep-local', + svntest.actions.run_and_verify_svn(None, [], 'rm', '--keep-local', iota_path) # Remove directory 'A/C' - svntest.actions.run_and_verify_svn(None, None, [], 'rm', '--keep-local', + svntest.actions.run_and_verify_svn(None, [], 'rm', '--keep-local', C_path) # Commit changes @@ -1950,9 +1936,7 @@ def delete_keep_local(sbox): svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, - wc_dir) + expected_status) # Update working copy to check disk state still greek tree expected_disk = svntest.main.greek_state.copy() @@ -1972,31 +1956,72 @@ def delete_keep_local_twice(sbox): dir = sbox.ospath('dir') - svntest.actions.run_and_verify_svn(None, None, [], 'mkdir', dir) + svntest.actions.run_and_verify_svn(None, [], 'mkdir', dir) - svntest.actions.run_and_verify_svn(None, None, [], 'rm', '--keep-local', dir) - svntest.actions.run_and_verify_svn(None, None, [], 'rm', '--keep-local', dir) + svntest.actions.run_and_verify_svn(None, [], 'rm', '--keep-local', dir) + svntest.actions.run_and_verify_svn(None, [], 'rm', '--keep-local', dir) if not os.path.isdir(dir): logger.warn('Directory was really deleted') raise svntest.Failure -def windows_paths_in_repos(sbox): +@XFail(svntest.main.is_mod_dav_url_quoting_broken) +def special_paths_in_repos(sbox): "use folders with names like 'c:hi'" sbox.build(create_wc = False) + test_file_source = os.path.join(sbox.repo_dir, 'format') repo_url = sbox.repo_url - chi_url = sbox.repo_url + '/c:hi' + test_urls = [ sbox.repo_url + '/c:hi', + sbox.repo_url + '/C:', + sbox.repo_url + '/C&', + sbox.repo_url + '/C<', + sbox.repo_url + '/C# hi', + sbox.repo_url + '/C?', + sbox.repo_url + '/C+', + sbox.repo_url + '/C%'] - # do some manipulations on a folder containing a windows drive name. - svntest.actions.run_and_verify_svn(None, None, [], - 'mkdir', '-m', 'log_msg', - chi_url) + # On Windows Apache HTTPD breaks '\' for us :( + if not (svntest.main.is_os_windows() and + svntest.main.is_ra_type_dav()): + test_urls += [ sbox.repo_url + '/C\\ri' ] + + for test_url in test_urls: + test_file_url = test_url + '/' + test_url[test_url.rindex('/')+1:] + + # do some manipulations on a folder which problematic names + svntest.actions.run_and_verify_svn(None, [], + 'mkdir', '-m', 'log_msg', + test_url) + + svntest.actions.run_and_verify_svnmucc(None, [], + '-m', 'log_msg', + 'put', test_file_source, + test_file_url) + + svntest.actions.run_and_verify_svnmucc(None, [], + 'propset', '-m', 'log_msg', + 'propname', 'propvalue', test_url) + + svntest.actions.run_and_verify_svn('propvalue', [], + 'propget', 'propname', test_url) + + svntest.actions.run_and_verify_svnmucc(None, [], + 'propset', '-m', 'log_msg', + 'propname', 'propvalue', test_file_url) + + svntest.actions.run_and_verify_svn('propvalue', [], + 'propget', 'propname', test_file_url) + + svntest.actions.run_and_verify_svn(None, [], + 'rm', '-m', 'log_msg', + test_file_url) + + svntest.actions.run_and_verify_svn(None, [], + 'rm', '-m', 'log_msg', + test_url) - svntest.actions.run_and_verify_svn(None, None, [], - 'rm', '-m', 'log_msg', - chi_url) def basic_rm_urls_one_repo(sbox): "remotely remove directories from one repository" @@ -2008,7 +2033,7 @@ def basic_rm_urls_one_repo(sbox): # Test 1: remotely delete one directory E_url = repo_url + '/A/B/E' - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'rm', '-m', 'log_msg', E_url) @@ -2031,7 +2056,7 @@ def basic_rm_urls_one_repo(sbox): F_url = repo_url + '/A/B/F' C_url = repo_url + '/A/C' - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'rm', '-m', 'log_msg', F_url, C_url) @@ -2068,8 +2093,7 @@ def basic_rm_urls_multi_repos(sbox): other_repo_dir, other_repo_url = sbox.add_repo_path("other") svntest.main.copy_repos(repo_dir, other_repo_dir, 1, 1) other_wc_dir = sbox.add_wc_path("other") - svntest.actions.run_and_verify_svn("Unexpected error during co", - svntest.verify.AnyOutput, [], "co", + svntest.actions.run_and_verify_svn(svntest.verify.AnyOutput, [], "co", other_repo_url, other_wc_dir) @@ -2079,7 +2103,7 @@ def basic_rm_urls_multi_repos(sbox): F2_url = other_repo_url + '/A/B/F' C2_url = other_repo_url + '/A/C' - svntest.actions.run_and_verify_svn(None, None, [], 'rm', '-m', 'log_msg', + svntest.actions.run_and_verify_svn(None, [], 'rm', '-m', 'log_msg', F_url, C_url, F2_url, C2_url) # Check that the two rm's to each of the repositories were handled in one @@ -2167,7 +2191,7 @@ def automatic_conflict_resolution(sbox): # Commit. svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) # Create expected output tree for an update of the wc_backup. expected_output = wc.State(wc_backup, { @@ -2184,6 +2208,7 @@ def automatic_conflict_resolution(sbox): contents="\n".join(["This is the file 'lambda'.", "<<<<<<< .mine", "Conflicting appended text for lambda", + "||||||| .r1", "=======", "Original appended text for lambda", ">>>>>>> .r2", @@ -2192,6 +2217,7 @@ def automatic_conflict_resolution(sbox): contents="\n".join(["This is the file 'mu'.", "<<<<<<< .mine", "Conflicting appended text for mu", + "||||||| .r1", "=======", "Original appended text for mu", ">>>>>>> .r2", @@ -2200,6 +2226,7 @@ def automatic_conflict_resolution(sbox): contents="\n".join(["This is the file 'rho'.", "<<<<<<< .mine", "Conflicting appended text for rho", + "||||||| .r1", "=======", "Original appended text for rho", ">>>>>>> .r2", @@ -2208,6 +2235,7 @@ def automatic_conflict_resolution(sbox): contents="\n".join(["This is the file 'tau'.", "<<<<<<< .mine", "Conflicting appended text for tau", + "||||||| .r1", "=======", "Original appended text for tau", ">>>>>>> .r2", @@ -2216,6 +2244,7 @@ def automatic_conflict_resolution(sbox): contents="\n".join(["This is the file 'omega'.", "<<<<<<< .mine", "Conflicting appended text for omega", + "||||||| .r1", "=======", "Original appended text for omega", ">>>>>>> .r2", @@ -2241,66 +2270,49 @@ def automatic_conflict_resolution(sbox): expected_output, expected_disk, expected_status, - None, - svntest.tree.detect_conflict_files, - extra_files) - - # verify that the extra_files list is now empty. - if len(extra_files) != 0: - # Because we want to be a well-behaved test, we silently raise if - # the test fails. However, these two print statements would - # probably reveal the cause for the failure, if they were - # uncommented: - # - # logger.warn("Not all extra reject files have been accounted for:") - # logger.warn(extra_files) - ### we should raise a less generic error here. which? - raise svntest.Failure + extra_files=extra_files) # So now lambda, mu and rho are all in a "conflicted" state. Run 'svn # resolve' with the respective "--accept[mine|orig|repo]" flag. # But first, check --accept actions resolved does not accept. - svntest.actions.run_and_verify_svn(None, - # stdout, stderr + svntest.actions.run_and_verify_svn(# stdout, stderr None, ".*invalid 'accept' ARG", 'resolve', '--accept=postpone') - svntest.actions.run_and_verify_svn(None, - # stdout, stderr + svntest.actions.run_and_verify_svn(# stdout, stderr None, ".*invalid 'accept' ARG", 'resolve', '--accept=edit', '--force-interactive') - svntest.actions.run_and_verify_svn(None, - # stdout, stderr + svntest.actions.run_and_verify_svn(# stdout, stderr None, ".*invalid 'accept' ARG", 'resolve', '--accept=launch', '--force-interactive') # Run 'svn resolved --accept=NOPE. Using omega for the test. - svntest.actions.run_and_verify_svn("Resolve command", None, + svntest.actions.run_and_verify_svn(None, ".*NOPE' is not a valid --accept value", 'resolve', '--accept=NOPE', omega_path_backup) # Resolve lambda, mu, and rho with different --accept options. - svntest.actions.run_and_verify_svn("Resolve command", None, [], + svntest.actions.run_and_verify_svn(None, [], 'resolve', '--accept=base', lambda_path_backup) - svntest.actions.run_and_verify_svn("Resolve command", None, [], + svntest.actions.run_and_verify_svn(None, [], 'resolve', '--accept=mine-full', mu_path_backup) - svntest.actions.run_and_verify_svn("Resolve command", None, [], + svntest.actions.run_and_verify_svn(None, [], 'resolve', '--accept=theirs-full', rho_path_backup) fp = open(tau_path_backup, 'w') fp.write("Resolution text for 'tau'.\n") fp.close() - svntest.actions.run_and_verify_svn("Resolve command", None, [], + svntest.actions.run_and_verify_svn(None, [], 'resolve', '--accept=working', tau_path_backup) @@ -2318,6 +2330,7 @@ def automatic_conflict_resolution(sbox): contents="\n".join(["This is the file 'omega'.", "<<<<<<< .mine", "Conflicting appended text for omega", + "||||||| .r1", "=======", "Original appended text for omega", ">>>>>>> .r2", @@ -2345,9 +2358,7 @@ def automatic_conflict_resolution(sbox): expected_output, expected_disk, expected_status, - None, - svntest.tree.detect_conflict_files, - extra_files) + extra_files=extra_files) def info_nonexisting_file(sbox): "get info on a file not in the repo" @@ -2390,8 +2401,7 @@ def basic_relative_url_using_current_dir(sbox): orig_dir = os.getcwd() os.chdir(sbox.wc_dir) - exit_code, output, error = svntest.actions.run_and_verify_svn(None, - expected_output, [], + exit_code, output, error = svntest.actions.run_and_verify_svn(expected_output, [], 'blame', '^/iota') os.chdir(orig_dir) @@ -2423,8 +2433,7 @@ def basic_relative_url_using_other_targets(sbox): " 3 jrandom New contents for mu\n", ] - exit_code, output, error = svntest.actions.run_and_verify_svn(None, - expected_output, [], 'blame', + exit_code, output, error = svntest.actions.run_and_verify_svn(expected_output, [], 'blame', '^/iota', mu_url) def basic_relative_url_multi_repo(sbox): @@ -2438,8 +2447,7 @@ def basic_relative_url_multi_repo(sbox): repo_dir2, repo_url2 = sbox.add_repo_path("other") svntest.main.copy_repos(repo_dir1, repo_dir2, 1, 1) wc_dir2 = sbox.add_wc_path("other") - svntest.actions.run_and_verify_svn("Unexpected error during co", - svntest.verify.AnyOutput, [], "co", + svntest.actions.run_and_verify_svn(svntest.verify.AnyOutput, [], "co", repo_url2, wc_dir2) @@ -2447,7 +2455,7 @@ def basic_relative_url_multi_repo(sbox): iota_url_repo1 = repo_url1 + '/iota' iota_url_repo2 = repo_url2 + '/iota' - exit_code, output, error = svntest.actions.run_and_verify_svn(None, [], + exit_code, output, error = svntest.actions.run_and_verify_svn([], svntest.verify.AnyOutput, 'blame', '^/A/mu', iota_url_repo1, iota_url_repo2) @@ -2466,12 +2474,10 @@ def basic_relative_url_non_canonical(sbox): "iota\n" ] - exit_code, output, error = svntest.actions.run_and_verify_svn(None, - expected_output, [], 'ls', + exit_code, output, error = svntest.actions.run_and_verify_svn(expected_output, [], 'ls', '^/A/', iota_url) - exit_code, output, error = svntest.actions.run_and_verify_svn(None, - expected_output, [], 'ls', + exit_code, output, error = svntest.actions.run_and_verify_svn(expected_output, [], 'ls', '^//A/', iota_url) def basic_relative_url_with_peg_revisions(sbox): @@ -2506,13 +2512,11 @@ def basic_relative_url_with_peg_revisions(sbox): ] # Canonical version with peg revision - exit_code, output, error = svntest.actions.run_and_verify_svn(None, - expected_output, [], 'ls', '-r3', + exit_code, output, error = svntest.actions.run_and_verify_svn(expected_output, [], 'ls', '-r3', '^/A/@3', iota_url) # Non-canonical version with peg revision - exit_code, output, error = svntest.actions.run_and_verify_svn(None, - expected_output, [], 'ls', '-r3', + exit_code, output, error = svntest.actions.run_and_verify_svn(expected_output, [], 'ls', '-r3', '^//A/@3', iota_url) @@ -2578,7 +2582,7 @@ def basic_add_svn_format_file(sbox): # The .svn directory and the format file should not be added as this # breaks the administrative area handling, so we expect some error here - svntest.actions.run_and_verify_svn(None, None, + svntest.actions.run_and_verify_svn(None, ".*reserved name.*", 'add', '--parents', entries_path) @@ -2595,7 +2599,7 @@ def basic_mkdir_mix_targets(sbox): Y_url = sbox.repo_url + '/Y' expected_error = "svn: E200009: Cannot mix repository and working copy targets" - svntest.actions.run_and_verify_svn(None, None, expected_error, + svntest.actions.run_and_verify_svn(None, expected_error, 'mkdir', '-m', 'log_msg', Y_url, 'subdir') def delete_from_url_with_spaces(sbox): @@ -2606,15 +2610,15 @@ def delete_from_url_with_spaces(sbox): sbox.simple_mkdir('Dir With') sbox.simple_mkdir('Dir With/Spaces') - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'ci', sbox.wc_dir, '-m', 'Added dir') # This fails on 1.6.11 with an escaping error. - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'rm', sbox.repo_url + '/Dir%20With%20Spaces', '-m', 'Deleted') - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'rm', sbox.repo_url + '/Dir%20With/Spaces', '-m', 'Deleted') @@ -2623,8 +2627,7 @@ def meta_correct_library_being_used(sbox): "verify that neon/serf are compiled if tested" expected_re = (r'^\* ra_%s :' % svntest.main.options.http_library) expected_output = svntest.verify.RegexOutput(expected_re, match_all=False) - svntest.actions.run_and_verify_svn("is $http_library available", - expected_output, [], '--version') + svntest.actions.run_and_verify_svn(expected_output, [], '--version') def delete_and_add_same_file(sbox): "commit deletes a file and adds one with same text" @@ -2653,9 +2656,7 @@ def delete_and_add_same_file(sbox): # not found". svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, - wc_dir) + expected_status) def delete_child_parent_update(sbox): "rm child, commit, rm parent" @@ -2674,9 +2675,7 @@ def delete_child_parent_update(sbox): svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, - wc_dir) + expected_status) svntest.main.run_svn(wc_dir, 'rm', sbox.ospath('A/B/E')) expected_status.tweak('A/B/E', 'A/B/E/beta', status='D ') @@ -2720,29 +2719,29 @@ def basic_relocate(sbox): # No-op relocation of just the scheme. scheme = repo_url[:repo_url.index('://')+3] - svntest.actions.run_and_verify_svn(None, None, [], 'switch', '--relocate', + svntest.actions.run_and_verify_svn(None, [], 'switch', '--relocate', scheme, scheme, wc_dir) _verify_url(wc_dir, repo_url) # No-op relocation of a bit more of the URL. substring = repo_url[:repo_url.index('://')+7] - svntest.actions.run_and_verify_svn(None, None, [], 'switch', '--relocate', + svntest.actions.run_and_verify_svn(None, [], 'switch', '--relocate', substring, substring, wc_dir) _verify_url(wc_dir, repo_url) # Real relocation to OTHER_REPO_URL. - svntest.actions.run_and_verify_svn(None, None, [], 'switch', '--relocate', + svntest.actions.run_and_verify_svn(None, [], 'switch', '--relocate', repo_url, other_repo_url, wc_dir) _verify_url(wc_dir, other_repo_url) # ... and back again, using the newer 'svn relocate' subcommand. - svntest.actions.run_and_verify_svn(None, None, [], 'relocate', + svntest.actions.run_and_verify_svn(None, [], 'relocate', other_repo_url, repo_url, wc_dir) _verify_url(wc_dir, repo_url) # To OTHER_REPO_URL again, this time with the single-URL form of # 'svn relocate'. - svntest.actions.run_and_verify_svn(None, None, [], 'relocate', + svntest.actions.run_and_verify_svn(None, [], 'relocate', other_repo_url, wc_dir) _verify_url(wc_dir, other_repo_url) @@ -2756,19 +2755,19 @@ def delete_urls_with_spaces(sbox): sbox.build(create_wc = False) # Create three directories with a space in their name - svntest.actions.run_and_verify_svn(None, None, [], 'mkdir', + svntest.actions.run_and_verify_svn(None, [], 'mkdir', sbox.repo_url + '/A spaced', sbox.repo_url + '/B spaced', sbox.repo_url + '/C spaced', '-m', 'Created dirs') # Try to delete the first - svntest.actions.run_and_verify_svn(None, None, [], 'rm', + svntest.actions.run_and_verify_svn(None, [], 'rm', sbox.repo_url + '/A spaced', '-m', 'Deleted A') # And then two at once - svntest.actions.run_and_verify_svn(None, None, [], 'rm', + svntest.actions.run_and_verify_svn(None, [], 'rm', sbox.repo_url + '/B spaced', sbox.repo_url + '/C spaced', '-m', 'Deleted B and C') @@ -2781,8 +2780,7 @@ def ls_url_special_characters(sbox): sbox.repo_url + '%2F' + 'A'] for url in special_urls: - svntest.actions.run_and_verify_svn('ls URL with special characters', - ['B/\n', 'C/\n', 'D/\n', 'mu\n'], + svntest.actions.run_and_verify_svn(['B/\n', 'C/\n', 'D/\n', 'mu\n'], [], 'ls', url) @@ -2797,7 +2795,7 @@ def ls_multiple_and_non_existent_targets(sbox): non_existent_path = sbox.ospath('non-existent') expected_err = ".*W155010.*" - svntest.actions.run_and_verify_svn2(None, None, expected_err, + svntest.actions.run_and_verify_svn2(None, expected_err, 1, 'ls', non_existent_path) def non_existent_url_target(): @@ -2805,7 +2803,7 @@ def ls_multiple_and_non_existent_targets(sbox): non_existent_url = sbox.repo_url + '/non-existent' expected_err = ".*W160013.*" - svntest.actions.run_and_verify_svn2(None, None, expected_err, + svntest.actions.run_and_verify_svn2(None, expected_err, 1, 'ls', non_existent_url) def multiple_wc_targets(): "multiple wc targets" @@ -2815,7 +2813,7 @@ def ls_multiple_and_non_existent_targets(sbox): non_existent_path = sbox.ospath('non-existent') # All targets are existing - svntest.actions.run_and_verify_svn2(None, None, [], + svntest.actions.run_and_verify_svn2(None, [], 0, 'ls', alpha, beta) # One non-existing target @@ -2838,7 +2836,7 @@ def ls_multiple_and_non_existent_targets(sbox): non_existent_url = sbox.repo_url + '/non-existent' # All targets are existing - svntest.actions.run_and_verify_svn2(None, None, [], + svntest.actions.run_and_verify_svn2(None, [], 0, 'ls', alpha, beta) # One non-existing target @@ -2893,7 +2891,7 @@ def add_multiple_targets(sbox): expected_status = svntest.verify.UnorderedOutput( ['A ' + file1 + '\n', 'A ' + file2 + '\n']) - svntest.actions.run_and_verify_svn(None, expected_status, [], + svntest.actions.run_and_verify_svn(expected_status, [], 'status', wc_dir) @@ -2904,29 +2902,29 @@ def quiet_commits(sbox): svntest.main.file_append(sbox.ospath('A/mu'), 'xxx') - svntest.actions.run_and_verify_svn(None, [], [], + svntest.actions.run_and_verify_svn([], [], 'commit', sbox.wc_dir, '--message', 'commit', '--quiet') - svntest.actions.run_and_verify_svn(None, [], [], + svntest.actions.run_and_verify_svn([], [], 'mkdir', sbox.repo_url + '/X', '--message', 'mkdir URL', '--quiet') - svntest.actions.run_and_verify_svn(None, [], [], + svntest.actions.run_and_verify_svn([], [], 'import', sbox.ospath('A/mu'), sbox.repo_url + '/f', '--message', 'import', '--quiet') - svntest.actions.run_and_verify_svn(None, [], [], + svntest.actions.run_and_verify_svn([], [], 'rm', sbox.repo_url + '/f', '--message', 'rm URL', '--quiet') - svntest.actions.run_and_verify_svn(None, [], [], + svntest.actions.run_and_verify_svn([], [], 'copy', sbox.repo_url + '/X', sbox.repo_url + '/Y', '--message', 'cp URL URL', '--quiet') - svntest.actions.run_and_verify_svn(None, [], [], + svntest.actions.run_and_verify_svn([], [], 'move', sbox.repo_url + '/Y', sbox.repo_url + '/Z', '--message', 'mv URL URL', '--quiet') @@ -2980,7 +2978,7 @@ def rm_missing_with_case_clashing_ondisk_item(sbox): # Verify that the casing is not updated, because the path is on-disk. expected_output = [ 'D %s\n' % iota_path ] # 'svn rm' iota, should leave IOTA alone. - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'rm', iota_path) # Test status: the unversioned IOTA should still be there. @@ -3001,14 +2999,14 @@ def delete_conflicts_one_of_many(sbox): svntest.main.file_append(sbox.ospath('A/D/G/rho'), 'new rho') sbox.simple_commit() svntest.main.file_append(sbox.ospath('A/D/G/rho'), 'conflict rho') - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'update', '-r1', '--accept', 'postpone', wc_dir) if not os.path.exists(sbox.ospath('A/D/G/rho.mine')): raise svntest.Failure("conflict file rho.mine missing") - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'rm', '--force', sbox.ospath('A/D/G/rho'), sbox.ospath('A/D/G/tau')) @@ -3043,12 +3041,38 @@ def peg_rev_on_non_existent_wc_path(sbox): # test something. sbox.simple_update() # This currently fails with ENOENT on A/mu3. - svntest.actions.run_and_verify_svn(None, ['r2\n'], [], + svntest.actions.run_and_verify_svn(['r2\n'], [], 'cat', '-r2', sbox.ospath('A3/mu3') + '@3') os.chdir(sbox.ospath('A4')) - svntest.actions.run_and_verify_svn(None, ['r2\n'], [], + svntest.actions.run_and_verify_svn(['r2\n'], [], 'cat', '-r2', sbox.ospath('mu3') + '@3') + +# With 'svn mkdir --parents' the target directory may already exist on disk. +# In that case it was wrongly performing a recursive 'add' on its contents. +def mkdir_parents_target_exists_on_disk(sbox): + "mkdir parents target exists on disk" + + sbox.build() + wc_dir = sbox.wc_dir + + Y_path = sbox.ospath('Y') + Y_Z_path = sbox.ospath('Y/Z') + + os.mkdir(Y_path) + os.mkdir(Y_Z_path) + svntest.actions.run_and_verify_svn(None, [], + 'mkdir', '--parents', Y_path) + + # Y should be added, and Z should not. There was a regression in which Z + # was also added. + expected_status = svntest.actions.get_virginal_state(sbox.wc_dir, 1) + expected_status.add({ + 'Y' : Item(status='A ', wc_rev=0), + }) + svntest.actions.run_and_verify_status(wc_dir, expected_status) + + ######################################################################## # Run the tests @@ -3091,7 +3115,7 @@ test_list = [ None, ls_space_in_repo_name, delete_keep_local, delete_keep_local_twice, - windows_paths_in_repos, + special_paths_in_repos, basic_rm_urls_one_repo, basic_rm_urls_multi_repos, automatic_conflict_resolution, @@ -3117,6 +3141,7 @@ test_list = [ None, rm_missing_with_case_clashing_ondisk_item, delete_conflicts_one_of_many, peg_rev_on_non_existent_wc_path, + mkdir_parents_target_exists_on_disk, ] if __name__ == '__main__': diff --git a/subversion/tests/cmdline/blame_tests.py b/subversion/tests/cmdline/blame_tests.py index fdbb688..f824d25 100755 --- a/subversion/tests/cmdline/blame_tests.py +++ b/subversion/tests/cmdline/blame_tests.py @@ -34,7 +34,7 @@ from svntest.main import server_has_mergeinfo from prop_tests import binary_mime_type_on_text_file_warning # For some basic merge setup used by blame -g tests. -from merge_tests import set_up_branch +from svntest.mergetrees import set_up_branch # (abbreviation) Skip = svntest.testcase.Skip_deco @@ -213,11 +213,11 @@ def blame_in_xml(sbox): 'iota' : Item(verb='Sending'), }) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - None, None, wc_dir) + None) # Retrieve last changed date from svn info exit_code, output, error = svntest.actions.run_and_verify_svn( - None, None, [], + None, [], 'log', file_path, '--xml', '-r1:2') date1 = None @@ -257,7 +257,7 @@ def blame_in_xml(sbox): '</blame>\n'] exit_code, output, error = svntest.actions.run_and_verify_svn( - None, None, [], + None, [], 'blame', file_path, '--xml') for i in range(0, len(output)): @@ -284,17 +284,17 @@ def blame_on_unknown_revision(sbox): 'iota' : Item(verb='Sending'), }) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - None, None, wc_dir) + None) exit_code, output, error = svntest.actions.run_and_verify_svn( - None, None, [], + None, [], 'blame', file_path, '-rHEAD:HEAD') if output[0].find(" - This is the file 'iota'.") == -1: raise svntest.Failure exit_code, output, error = svntest.actions.run_and_verify_svn( - None, None, [], + None, [], 'blame', file_path, '--verbose', '-rHEAD:HEAD') if output[0].find(" - This is the file 'iota'.") == -1: @@ -325,12 +325,12 @@ def blame_peg_rev(sbox): # Check that we get a blame of r1 when we specify a peg revision of r1 # and no explicit revision. - svntest.actions.run_and_verify_svn(None, expected_output_r1, [], + svntest.actions.run_and_verify_svn(expected_output_r1, [], 'blame', 'iota@1') # Check that an explicit revision overrides the default provided by # the peg revision. - svntest.actions.run_and_verify_svn(None, expected_output_r1, [], + svntest.actions.run_and_verify_svn(expected_output_r1, [], 'blame', 'iota@2', '-r1') def blame_eol_styles(sbox): @@ -355,16 +355,16 @@ def blame_eol_styles(sbox): for i in range(1,3): svntest.main.file_append(file_path, "Extra line %d" % (i) + "\n") svntest.actions.run_and_verify_commit(wc_dir, expected_output, - None, None, wc_dir) + None) svntest.main.run_svn(None, 'propset', 'svn:eol-style', eol, file_path) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - None, None, wc_dir) + None) exit_code, output, error = svntest.actions.run_and_verify_svn( - None, None, [], + None, [], 'blame', file_path, '-r1:HEAD') # output is a list of lines, there should be 3 lines @@ -389,7 +389,7 @@ def blame_ignore_whitespace(sbox): 'iota' : Item(verb='Sending'), }) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - None, None, wc_dir) + None) # commit only whitespace changes svntest.main.file_write(file_path, @@ -400,7 +400,7 @@ def blame_ignore_whitespace(sbox): 'iota' : Item(verb='Sending'), }) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - None, None, wc_dir) + None) # match the blame output, as defined in the blame code: # "%6ld %10s %s %s%s", rev, author ? author : " -", @@ -412,7 +412,7 @@ def blame_ignore_whitespace(sbox): ] exit_code, output, error = svntest.actions.run_and_verify_svn( - None, expected_output, [], + expected_output, [], 'blame', '-x', '-w', file_path) # commit some changes @@ -425,7 +425,7 @@ def blame_ignore_whitespace(sbox): 'iota' : Item(verb='Sending'), }) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - None, None, wc_dir) + None) expected_output = [ " 2 jrandom A a \n", @@ -434,7 +434,7 @@ def blame_ignore_whitespace(sbox): " 2 jrandom C c \n", ] - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'blame', '-x', '-w', file_path) def blame_ignore_eolstyle(sbox): @@ -454,7 +454,7 @@ def blame_ignore_eolstyle(sbox): 'iota' : Item(verb='Sending'), }) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - None, None, wc_dir) + None) # commit only eol changes svntest.main.file_write(file_path, @@ -465,7 +465,7 @@ def blame_ignore_eolstyle(sbox): 'iota' : Item(verb='Sending'), }) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - None, None, wc_dir) + None) expected_output = [ " 2 jrandom Aa\n", @@ -474,7 +474,7 @@ def blame_ignore_eolstyle(sbox): ] exit_code, output, error = svntest.actions.run_and_verify_svn( - None, expected_output, [], + expected_output, [], 'blame', '-x', '--ignore-eol-style', file_path) @@ -490,7 +490,7 @@ def blame_merge_info(sbox): mu_path = os.path.join(wc_dir, 'trunk', 'A', 'mu') exit_code, output, error = svntest.actions.run_and_verify_svn( - None, None, [], 'blame', '-g', iota_path) + None, [], 'blame', '-g', iota_path) expected_blame = [ { 'revision' : 2, @@ -507,7 +507,7 @@ def blame_merge_info(sbox): parse_and_verify_blame(output, expected_blame, 1) exit_code, output, error = svntest.actions.run_and_verify_svn( - None, None, [], 'blame', '-g', '-r10:11', iota_path) + None, [], 'blame', '-g', '-r10:11', iota_path) expected_blame = [ { 'revision' : None, @@ -524,7 +524,7 @@ def blame_merge_info(sbox): parse_and_verify_blame(output, expected_blame, 1) exit_code, output, error = svntest.actions.run_and_verify_svn( - None, None, [], 'blame', '-g', '-r16:17', mu_path) + None, [], 'blame', '-g', '-r16:17', mu_path) expected_blame = [ { 'revision' : None, @@ -557,7 +557,7 @@ def blame_merge_out_of_range(sbox): upsilon_path = os.path.join(wc_dir, 'trunk', 'A', 'upsilon') exit_code, output, error = svntest.actions.run_and_verify_svn( - None, None, [], + None, [], 'blame', '-g', upsilon_path) expected_blame = [ @@ -599,12 +599,12 @@ def blame_peg_rev_file_not_in_head(sbox): # Check that we get a blame of r1 when we specify a peg revision of r1 # and no explicit revision. - svntest.actions.run_and_verify_svn(None, expected_output_r1, [], + svntest.actions.run_and_verify_svn(expected_output_r1, [], 'blame', 'iota@1') # Check that an explicit revision overrides the default provided by # the peg revision. - svntest.actions.run_and_verify_svn(None, expected_output_r1, [], + svntest.actions.run_and_verify_svn(expected_output_r1, [], 'blame', 'iota@2', '-r1') def blame_file_not_in_head(sbox): @@ -616,9 +616,10 @@ def blame_file_not_in_head(sbox): # Check that a correct error message is printed when blaming a target that # doesn't exist (in HEAD). expected_err = ".*notexisting' (is not a file.*|path not found|does not exist)" - svntest.actions.run_and_verify_svn(None, [], expected_err, + svntest.actions.run_and_verify_svn([], expected_err, 'blame', notexisting_url) +@SkipUnless(server_has_mergeinfo) def blame_output_after_merge(sbox): "blame -g output with inserted lines" @@ -630,7 +631,8 @@ def blame_output_after_merge(sbox): A_url = sbox.repo_url + '/A' # r2: mv greek tree in trunk. - svntest.actions.run_and_verify_svn(None, ["\n","Committed revision 2.\n"], [], + svntest.actions.run_and_verify_svn(["Committing transaction...\n", + "Committed revision 2.\n"], [], 'mv', "--parents", A_url, trunk_A_url, "-m", "move greek tree to trunk") @@ -650,11 +652,12 @@ def blame_output_after_merge(sbox): 'trunk/A/mu' : Item(verb='Sending'), }) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - None, None, wc_dir) + None) # r4: create branches/br from trunk branches_br_url = sbox.repo_url + "/branches/br" - svntest.actions.run_and_verify_svn(None, ["\n","Committed revision 4.\n"], [], + svntest.actions.run_and_verify_svn(["Committing transaction...\n", + "Committed revision 4.\n"], [], 'cp', '--parents', trunk_url, branches_br_url, "-m", "create branch") @@ -675,7 +678,7 @@ def blame_output_after_merge(sbox): 'branches/br/A/mu' : Item(verb='Sending'), }) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - None, None, wc_dir) + None) # r6: Insert a single line in branches/A/mu svntest.main.file_write(branch_mu_path, @@ -691,11 +694,11 @@ def blame_output_after_merge(sbox): 'branches/br/A/mu' : Item(verb='Sending'), }) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - None, None, wc_dir) + None) # r7: merge branches/br back to trunk trunk_path = os.path.join(wc_dir, "trunk") - svntest.actions.run_and_verify_svn(wc_dir, None, [], 'merge', + svntest.actions.run_and_verify_svn(None, [], 'merge', '-r', '4:HEAD', branches_br_url, trunk_path) expected_output = svntest.wc.State(wc_dir, { @@ -703,7 +706,7 @@ def blame_output_after_merge(sbox): 'trunk/A/mu' : Item(verb='Sending'), }) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - None, None, wc_dir) + None) # Now test blame, first without the -g option expected_output = [ " 3 jrandom New version of file 'mu'.\n", @@ -714,7 +717,7 @@ def blame_output_after_merge(sbox): " 3 jrandom 5th line in file 'mu'.\n", " 3 jrandom 6th line in file 'mu'.\n"] - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'blame', mu_path) # Next test with the -g option @@ -727,7 +730,7 @@ def blame_output_after_merge(sbox): " 3 jrandom 5th line in file 'mu'.\n", " 3 jrandom 6th line in file 'mu'.\n"] - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'blame', '-g', mu_path) # Now test with -rN:M @@ -739,19 +742,19 @@ def blame_output_after_merge(sbox): " - - 5th line in file 'mu'.\n", " - - 6th line in file 'mu'.\n"] - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'blame', '-r', '4:head', mu_path) # Next test with the -g option with -rN:M expected_output = [ " - - New version of file 'mu'.\n", " - - 2nd line in file 'mu'.\n", - "G - - new 3rd line in file 'mu'.\n", + "G 5 jrandom new 3rd line in file 'mu'.\n", "G 6 jrandom add 3.5 line in file 'mu'.\n", " - - 4th line in file 'mu'.\n", " - - 5th line in file 'mu'.\n", " - - 6th line in file 'mu'.\n"] - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'blame', '-g', '-r', '6:head', mu_path) #---------------------------------------------------------------------- @@ -772,7 +775,7 @@ def merge_sensitive_blame_and_empty_mergeinfo(sbox): # Make an edit to A/D/H/psi in r3. svntest.main.file_append(psi_path, "trunk edit in revision three.\n") - svntest.main.run_svn(None, 'ci', '-m', 'trunk edit', wc_dir) + sbox.simple_commit(message='trunk edit') # Merge r3 from A to A_COPY, reverse merge r3 from A/D/H/psi # to A_COPY/D/H/psi, and commit as r4. This results in empty @@ -782,21 +785,18 @@ def merge_sensitive_blame_and_empty_mergeinfo(sbox): sbox.repo_url + '/A', A_COPY_path) svntest.main.run_svn(None, 'merge', '-c-3', sbox.repo_url + '/A/D/H/psi', psi_COPY_path) - svntest.main.run_svn(None, 'ci', '-m', - 'Sync merge A to A_COPY excepting A_COPY/D/H/psi', - wc_dir) + sbox.simple_commit(message='Sync merge A to A_COPY excepting A_COPY/D/H/psi') # Make an edit to A/D/H/psi in r5. svntest.main.file_append(psi_path, "trunk edit in revision five.\n") - svntest.main.run_svn(None, 'ci', '-m', 'trunk edit', wc_dir) + sbox.simple_commit(message='trunk edit') # Sync merge A/D/H/psi to A_COPY/D/H/psi and commit as r6. This replaces # the empty mergeinfo on A_COPY/D/H/psi with '/A/D/H/psi:2-5'. svntest.main.run_svn(None, 'up', wc_dir) svntest.main.run_svn(None, 'merge', sbox.repo_url + '/A/D/H/psi', psi_COPY_path) - svntest.main.run_svn(None, 'ci', '-m', - 'Sync merge A/D/H/psi to A_COPY/D/H/psi', wc_dir) + sbox.simple_commit(message='Sync merge A/D/H/psi to A_COPY/D/H/psi') # Check the blame -g output: # Currently this test fails because the trunk edit done in r3 is @@ -810,7 +810,7 @@ def merge_sensitive_blame_and_empty_mergeinfo(sbox): " 1 jrandom This is the file 'psi'.\n", "G 3 jrandom trunk edit in revision three.\n", "G 5 jrandom trunk edit in revision five.\n"] - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'blame', '-g', psi_COPY_path) def blame_multiple_targets(sbox): @@ -819,53 +819,33 @@ def blame_multiple_targets(sbox): sbox.build() # First, make a new revision of iota. - iota = os.path.join(sbox.wc_dir, 'iota') - svntest.main.file_append(iota, "New contents for iota\n") - svntest.main.run_svn(None, 'ci', '-m', '', iota) + sbox.simple_append('iota', "New contents for iota\n") + sbox.simple_commit() + iota = sbox.ospath('iota') expected_output = [ " 1 jrandom This is the file 'iota'.\n", " 2 jrandom New contents for iota\n", ] - def multiple_wc_targets(): - "multiple wc targets" - - non_existent = os.path.join(sbox.wc_dir, 'non-existent') - - expected_err = ".*W155010.*\n.*E200009.*" - expected_err_re = re.compile(expected_err, re.DOTALL) - - exit_code, output, error = svntest.main.run_svn(1, 'blame', - non_existent, iota) + # We use --force to avoid an early bail from the current blame code, + # that performs a property check before the actual blame. - # Verify error - if not expected_err_re.match("".join(error)): - raise svntest.Failure('blame failed: expected error "%s", but received ' - '"%s"' % (expected_err, "".join(error))) - svntest.verify.verify_outputs(None, output, None, expected_output, None) + non_existent = os.path.join(sbox.wc_dir, 'non-existent') + svntest.actions.run_and_verify_svn(None, + ".*W155010: The node.*non-existent'.*", + 'blame', non_existent, iota, + '--force') - def multiple_url_targets(): - "multiple url targets" + iota_url = sbox.repo_url + '/iota' + non_existent_url = sbox.repo_url + '/non-existent' - iota_url = sbox.repo_url + '/iota' - non_existent = sbox.repo_url + '/non-existent' + # SVN_ERR_FS_NOT_FILE | SVN_ERR_FS_NOT_FOUND + svntest.actions.run_and_verify_svn(None, + ".*W1600(13|17): '.*non-existent' .*not", + 'blame', non_existent_url, iota_url, + '--force') - expected_err = ".*(W160017|W160013|W150000).*\n.*E200009.*" - expected_err_re = re.compile(expected_err, re.DOTALL) - - exit_code, output, error = svntest.main.run_svn(1, 'blame', - non_existent, iota_url) - - # Verify error - if not expected_err_re.match("".join(error)): - raise svntest.Failure('blame failed: expected error "%s", but received ' - '"%s"' % (expected_err, "".join(error))) - svntest.verify.verify_outputs(None, output, None, expected_output, None) - - # Test one by one - multiple_wc_targets() - multiple_url_targets() @Issue(4034) def blame_eol_handling(sbox): @@ -923,10 +903,10 @@ def blame_eol_handling(sbox): ' %d jrandom line 5 \n' % rev, ] - svntest.actions.run_and_verify_svn(f1 + '-base', expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'blame', f1) - svntest.actions.run_and_verify_svn(f2 + '-base', expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'blame', f2) file_data = 'line 1 ' + eol + \ @@ -952,13 +932,133 @@ def blame_eol_handling(sbox): ' %d jrandom line 5 \n' % rev, ] - svntest.actions.run_and_verify_svn(f1 + '-modified', expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'blame', f1) - svntest.actions.run_and_verify_svn(f2 + '-modified', expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'blame', f2) +@SkipUnless(svntest.main.server_has_reverse_get_file_revs) +def blame_youngest_to_oldest(sbox): + "blame_youngest_to_oldest" + + sbox.build() + + # First, make a new revision of iota. + iota = sbox.ospath('iota') + orig_line = open(iota).read() + line = "New contents for iota\n" + svntest.main.file_append(iota, line) + sbox.simple_commit() #r2 + + # Move the file, to check that the operation will peg correctly. + iota_moved = sbox.ospath('iota_moved') + sbox.simple_move('iota', 'iota_moved') + sbox.simple_commit() #r3 + + # Delete a line. + open(iota_moved, 'w').write(line) + sbox.simple_commit() #r4 + + expected_output = [ + ' %d jrandom %s\n' % (4, orig_line[:-1]), + ] + svntest.actions.run_and_verify_svn(expected_output, [], + 'blame', '-r4:1', iota_moved) + + svntest.actions.run_and_verify_svn(expected_output, [], + 'blame', '-rHEAD:1', iota_moved) + + expected_output = [ + ' %d jrandom %s\n' % (2, line[:-1]), + ] + svntest.actions.run_and_verify_svn(expected_output, [], + 'blame', '-r1:HEAD', iota_moved) + +@Issue(4467) +def blame_reverse_no_change(sbox): + "blame reverse towards a revision with no change" + + sbox.build() + + # Introduce a revision where iota doesn't change! + sbox.simple_propset('a', 'b', 'A') + sbox.simple_commit('') #r2 + + sbox.simple_append('iota', 'new line\n') + sbox.simple_commit('') #r3 + + sbox.simple_append('iota', 'another new line\n') + sbox.simple_commit('') #r4 + + expected_output = [ + ' - - This is the file \'iota\'.\n', + ' 3 jrandom new line\n', + ' 4 jrandom another new line\n', + ] + svntest.actions.run_and_verify_svn(expected_output, [], + 'blame', '-r2:HEAD', sbox.ospath('iota')) + + expected_output = [ + ' - - This is the file \'iota\'.\n', + ] + # This used to trigger an assertion on 1.9.x before 1.9.0 + svntest.actions.run_and_verify_svn(expected_output, [], + 'blame', '-rHEAD:2', sbox.ospath('iota')) + + # Drop the middle line + sbox.simple_append('iota', 'This is the file \'iota\'.\n' + 'another new line\n', truncate=True) + sbox.simple_commit('') #r5 + + # Back to start + sbox.simple_append('iota', 'This is the file \'iota\'.\n', truncate=True) + sbox.simple_commit('') #r6 + + expected_output = [ + ' - - This is the file \'iota\'.\n', + ] + svntest.actions.run_and_verify_svn(expected_output, [], + 'blame', '-rHEAD:2', sbox.ospath('iota')) + + expected_output = [ + ' - - This is the file \'iota\'.\n', + ' 5 jrandom new line\n', + ] + svntest.actions.run_and_verify_svn(expected_output, [], + 'blame', '-rHEAD:3', sbox.ospath('iota')) + + expected_output = [ + ' - - This is the file \'iota\'.\n', + ' 5 jrandom new line\n', + ' 6 jrandom another new line\n', + ] + svntest.actions.run_and_verify_svn(expected_output, [], + 'blame', '-rHEAD:4', sbox.ospath('iota')) + + expected_output = [ + ' - - This is the file \'iota\'.\n', + ' 6 jrandom another new line\n', + ] + svntest.actions.run_and_verify_svn(expected_output, [], + 'blame', '-rHEAD:5', sbox.ospath('iota')) + + expected_output = [ + ' - - This is the file \'iota\'.\n', + ] + svntest.actions.run_and_verify_svn(expected_output, [], + 'blame', '-rHEAD:6', sbox.ospath('iota')) + + + expected_output = [ + ' - - This is the file \'iota\'.\n', + ' 5 jrandom new line\n', + ] + svntest.actions.run_and_verify_svn(expected_output, [], + 'blame', '-r5:3', sbox.ospath('iota')) + + ######################################################################## # Run the tests @@ -982,6 +1082,8 @@ test_list = [ None, merge_sensitive_blame_and_empty_mergeinfo, blame_multiple_targets, blame_eol_handling, + blame_youngest_to_oldest, + blame_reverse_no_change, ] if __name__ == '__main__': diff --git a/subversion/tests/cmdline/cat_tests.py b/subversion/tests/cmdline/cat_tests.py index b3227cb..8246bc2 100755 --- a/subversion/tests/cmdline/cat_tests.py +++ b/subversion/tests/cmdline/cat_tests.py @@ -59,7 +59,7 @@ def cat_local_directory(sbox): re.escape(os.path.abspath(A_path)) + \ "' refers to a directory" - svntest.actions.run_and_verify_svn2(None, None, expected_err, + svntest.actions.run_and_verify_svn2(None, expected_err, 1, 'cat', A_path) def cat_remote_directory(sbox): @@ -70,7 +70,7 @@ def cat_remote_directory(sbox): expected_err = "svn: warning: W195007: URL '" + A_url + \ "' refers to a directory\n.*" - svntest.actions.run_and_verify_svn2(None, None, expected_err, + svntest.actions.run_and_verify_svn2(None, expected_err, 1, 'cat', A_url) def cat_base(sbox): @@ -101,7 +101,7 @@ def cat_nonexistent_file(sbox): re.escape(os.path.abspath(bogus_path)) + \ "' is not under version control" - svntest.actions.run_and_verify_svn2(None, None, expected_err, 1, + svntest.actions.run_and_verify_svn2(None, expected_err, 1, 'cat', bogus_path) def cat_skip_uncattable(sbox): @@ -128,18 +128,17 @@ def cat_skip_uncattable(sbox): expected_err = "svn: warning: W200005: '" + \ re.escape(os.path.abspath(item_to_cat)) + \ "' is not under version control" - svntest.actions.run_and_verify_svn2(None, None, expected_err, 1, + svntest.actions.run_and_verify_svn2(None, expected_err, 1, 'cat', item_to_cat) elif os.path.isdir(item_to_cat): expected_err = "svn: warning: W195007: '" + \ re.escape(os.path.abspath(item_to_cat)) + \ "' refers to a directory" - svntest.actions.run_and_verify_svn2(None, None, expected_err, 1, + svntest.actions.run_and_verify_svn2(None, expected_err, 1, 'cat', item_to_cat) else: - svntest.actions.run_and_verify_svn(None, - ["This is the file '"+file+"'.\n"], + svntest.actions.run_and_verify_svn(["This is the file '"+file+"'.\n"], [], 'cat', item_to_cat) G_path = os.path.join(dir_path, 'G') @@ -149,13 +148,13 @@ def cat_skip_uncattable(sbox): expected_err1 = "svn: warning: W195007: '" + \ re.escape(os.path.abspath(G_path)) + \ "' refers to a directory\n" - svntest.actions.run_and_verify_svn2(None, expected_out, expected_err1, 1, + svntest.actions.run_and_verify_svn2(expected_out, expected_err1, 1, 'cat', rho_path, G_path) expected_err2 = "svn: warning: W200005: '" + \ re.escape(os.path.abspath(new_file_path)) + \ "' is not under version control\n" - svntest.actions.run_and_verify_svn2(None, expected_out, expected_err2, 1, + svntest.actions.run_and_verify_svn2(expected_out, expected_err2, 1, 'cat', rho_path, new_file_path) expected_err3 = expected_err1 + expected_err2 + \ @@ -163,6 +162,8 @@ def cat_skip_uncattable(sbox): expected_err_re = re.compile(expected_err3, re.DOTALL) exit_code, output, error = svntest.main.run_svn(1, 'cat', rho_path, G_path, new_file_path) + error = [line for line in error + if not re.compile(svntest.main.stack_trace_regexp).match(line)] # Verify output if output[0] != expected_out: @@ -184,9 +185,9 @@ def cat_unversioned_file(sbox): iota_path = os.path.join(wc_dir, 'iota') # Delete a file an commit the deletion. - svntest.actions.run_and_verify_svn2(None, None, [], 0, + svntest.actions.run_and_verify_svn2(None, [], 0, 'delete', iota_path) - svntest.actions.run_and_verify_svn2(None, None, [], 0, + svntest.actions.run_and_verify_svn2(None, [], 0, 'commit', '-m', 'delete a file', iota_path) @@ -194,13 +195,13 @@ def cat_unversioned_file(sbox): expected_error = "svn: warning: W200005: '" + \ re.escape(os.path.abspath(iota_path)) + \ "' is not under version control" - svntest.actions.run_and_verify_svn2(None, [], expected_error, 1, + svntest.actions.run_and_verify_svn2([], expected_error, 1, 'cat', iota_path) # Put an unversioned file at 'iota' and try to cat it again, the result # should still be the same. svntest.main.file_write(iota_path, "This the unversioned file 'iota'.\n") - svntest.actions.run_and_verify_svn2(None, [], expected_error, 1, + svntest.actions.run_and_verify_svn2([], expected_error, 1, 'cat', iota_path) def cat_keywords(sbox): @@ -209,20 +210,18 @@ def cat_keywords(sbox): wc_dir = sbox.wc_dir iota_path = os.path.join(wc_dir, 'iota') - svntest.actions.run_and_verify_svn(None, - ["This is the file 'iota'.\n"], + svntest.actions.run_and_verify_svn(["This is the file 'iota'.\n"], [], 'cat', iota_path) svntest.main.file_append(iota_path, "$Revision$\n") - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'propset', 'svn:keywords', 'Revision', iota_path) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', 'r2', wc_dir) - svntest.actions.run_and_verify_svn(None, - ["This is the file 'iota'.\n", "$Revision: 2 $\n"], + svntest.actions.run_and_verify_svn(["This is the file 'iota'.\n", "$Revision: 2 $\n"], [], 'cat', iota_path) def cat_url_special_characters(sbox): @@ -237,7 +236,7 @@ def cat_url_special_characters(sbox): "' refers to a directory\n.*" for url in special_urls: - svntest.actions.run_and_verify_svn2(None, None, expected_err, 1, + svntest.actions.run_and_verify_svn2(None, expected_err, 1, 'cat', url) def cat_non_existing_remote_file(sbox): @@ -249,7 +248,7 @@ def cat_non_existing_remote_file(sbox): non_existing_path.split('/')[1] # cat operation on non-existing remote path should return 1 - svntest.actions.run_and_verify_svn2(None, None, expected_err, 1, + svntest.actions.run_and_verify_svn2(None, expected_err, 1, 'cat', non_existing_path) ######################################################################## diff --git a/subversion/tests/cmdline/changelist_tests.py b/subversion/tests/cmdline/changelist_tests.py index 4652acc..1bbb4a6 100755 --- a/subversion/tests/cmdline/changelist_tests.py +++ b/subversion/tests/cmdline/changelist_tests.py @@ -442,7 +442,7 @@ def commit_one_changelist(sbox): svntest.actions.run_and_verify_commit(wc_dir, expected_output, expected_status, - None, + [], wc_dir, "--changelist", "a") @@ -483,7 +483,7 @@ def commit_multiple_changelists(sbox): svntest.actions.run_and_verify_commit(wc_dir, expected_output, expected_status, - None, + [], wc_dir, "--changelist", "a", "--changelist", "i") @@ -814,8 +814,7 @@ def update_with_changelists(sbox): expected_output, expected_disk, expected_status, - None, None, None, - None, None, 1, + [], True, "-r", "1", "--changelist", "a", "--changelist", "i", @@ -856,8 +855,7 @@ def update_with_changelists(sbox): expected_output, expected_disk, expected_status, - None, None, None, - None, None, 1, + [], True, "-r", "1", "--changelist", "a", "--changelist", "i", @@ -907,7 +905,7 @@ def tree_conflicts_and_changelists_on_commit1(sbox): svntest.actions.run_and_verify_commit(wc_dir, expected_output, expected_status, - None, + [], wc_dir, "--changelist", "list") @@ -930,23 +928,23 @@ def tree_conflicts_and_changelists_on_commit2(sbox): expected_output = svntest.verify.RegexOutput( "Deleting.*" + re.escape(C), False) - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'commit', '-m', 'delete A/C', C) expected_output = svntest.verify.RegexOutput( "A.*" + re.escape(C), False) - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'update', C, "-r1") expected_output = svntest.verify.RegexOutput( ".*'propname' set on '" + re.escape(C) + "'", False) - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'propset', 'propname', 'propval', C) expected_output = svntest.verify.RegexOutput( " C " + re.escape(C), False) - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'update', wc_dir) @@ -980,7 +978,7 @@ def tree_conflicts_and_changelists_on_commit2(sbox): svntest.actions.run_and_verify_commit(wc_dir, expected_output, expected_status, - None, + [], wc_dir, "--changelist", "list") @@ -1031,7 +1029,7 @@ def move_added_keeps_changelist(sbox): svntest.main.run_svn(None, "rename", kappa_path, kappa2_path) # kappa not under version control - svntest.actions.run_and_verify_svnversion(None, kappa_path, repo_url, + svntest.actions.run_and_verify_svnversion(kappa_path, repo_url, [], ".*doesn't exist.*") # kappa2 in a changelist expected_infos = [ @@ -1054,17 +1052,17 @@ def change_to_dir(sbox): svntest.actions.run_and_verify_info(expected_infos, sbox.ospath('A/mu')) # A/mu visible in changelist - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'changelist', 'qq', sbox.ospath('A/mu')) expected_infos = [{'Name' : 'mu', 'Changelist' : 'qq'}] svntest.actions.run_and_verify_info(expected_infos, sbox.ospath('A/mu')) # A/mu still visible after delete - svntest.actions.run_and_verify_svn(None, None, [], 'rm', sbox.ospath('A/mu')) + svntest.actions.run_and_verify_svn(None, [], 'rm', sbox.ospath('A/mu')) svntest.actions.run_and_verify_info(expected_infos, sbox.ospath('A/mu')) # A/mu removed from changelist after replace with directory - svntest.actions.run_and_verify_svn(None, '^A|' + _re_cl_rem_pattern, [], + svntest.actions.run_and_verify_svn('^A|' + _re_cl_rem_pattern, [], 'mkdir', sbox.ospath('A/mu')) expected_infos = [{'Changelist' : None}] # No Name for directories? svntest.actions.run_and_verify_info(expected_infos, sbox.ospath('A/mu')) @@ -1078,7 +1076,7 @@ def change_to_dir(sbox): svntest.actions.run_and_verify_info(expected_infos, sbox.ospath('A/mu')) # A/mu visible in changelist - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'changelist', 'qq', sbox.ospath('A/mu')) expected_infos = [{'Name' : 'mu', 'Changelist' : 'qq'}] svntest.actions.run_and_verify_info(expected_infos, sbox.ospath('A/mu')) @@ -1101,26 +1099,26 @@ def revert_deleted_in_changelist(sbox): svntest.actions.run_and_verify_info(expected_infos, sbox.ospath('A/mu')) # A/mu visible in changelist - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'changelist', 'qq', sbox.ospath('A/mu')) expected_infos = [{'Name' : 'mu', 'Changelist' : 'qq'}] svntest.actions.run_and_verify_info(expected_infos, sbox.ospath('A/mu')) # A/mu still visible after delete - svntest.actions.run_and_verify_svn(None, None, [], 'rm', sbox.ospath('A/mu')) + svntest.actions.run_and_verify_svn(None, [], 'rm', sbox.ospath('A/mu')) svntest.actions.run_and_verify_info(expected_infos, sbox.ospath('A/mu')) # A/mu still visible after revert - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'revert', sbox.ospath('A/mu')) svntest.actions.run_and_verify_info(expected_infos, sbox.ospath('A/mu')) # A/mu still visible after parent delete - svntest.actions.run_and_verify_svn(None, None, [], 'rm', sbox.ospath('A')) + svntest.actions.run_and_verify_svn(None, [], 'rm', sbox.ospath('A')) svntest.actions.run_and_verify_info(expected_infos, sbox.ospath('A/mu')) # A/mu still visible after revert - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'revert', '-R', sbox.ospath('A')) svntest.actions.run_and_verify_info(expected_infos, sbox.ospath('A/mu')) @@ -1135,11 +1133,11 @@ def add_remove_non_existent_target(sbox): re.escape(os.path.abspath(bogus_path)) + \ "' was not found" - svntest.actions.run_and_verify_svn(None, None, expected_err, + svntest.actions.run_and_verify_svn(None, expected_err, 'changelist', 'testlist', bogus_path) - svntest.actions.run_and_verify_svn(None, None, expected_err, + svntest.actions.run_and_verify_svn(None, expected_err, 'changelist', bogus_path, '--remove') @@ -1154,11 +1152,11 @@ def add_remove_unversioned_target(sbox): re.escape(os.path.abspath(unversioned)) + \ "' was not found" - svntest.actions.run_and_verify_svn(None, None, expected_err, + svntest.actions.run_and_verify_svn(None, expected_err, 'changelist', 'testlist', unversioned) - svntest.actions.run_and_verify_svn(None, None, expected_err, + svntest.actions.run_and_verify_svn(None, expected_err, 'changelist', unversioned, '--remove') @@ -1171,7 +1169,7 @@ def readd_after_revert(sbox): svntest.main.file_write(dummy, "dummy contents") sbox.simple_add('dummy') - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'changelist', 'testlist', dummy) @@ -1179,7 +1177,7 @@ def readd_after_revert(sbox): svntest.main.file_write(dummy, "dummy contents") - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'add', dummy) diff --git a/subversion/tests/cmdline/checkout_tests.py b/subversion/tests/cmdline/checkout_tests.py index 7456221..4d939c3 100755 --- a/subversion/tests/cmdline/checkout_tests.py +++ b/subversion/tests/cmdline/checkout_tests.py @@ -206,8 +206,8 @@ def checkout_with_obstructions(sbox): 'A/B/lambda', 'A/D', 'A/D/G', 'A/D/G/rho', 'A/D/G/pi', 'A/D/G/tau', 'A/D/H', 'A/D/H/psi', 'A/D/H/omega', 'A/D/H/chi', 'A/D/gamma', 'A/C') - actions.run_and_verify_checkout2(False, url, wc_dir, expected_output, - expected_disk, None, None, None, None) + actions.run_and_verify_checkout(url, wc_dir, expected_output, + expected_disk) # svn status expected_status = actions.get_virginal_state(wc_dir, 1) @@ -243,7 +243,7 @@ def checkout_with_obstructions(sbox): expected_status = actions.get_virginal_state(wc_dir, 1) actions.run_and_verify_update(wc_dir, expected_output, expected_disk, - expected_status, None, None, None, None, None, False, wc_dir) + expected_status,) @@ -291,7 +291,7 @@ def forced_checkout_of_file_with_dir_obstructions(sbox): expected_disk.tweak('iota', contents=None) actions.run_and_verify_checkout(url, wc_dir_other, expected_output, - expected_disk, None, None, None, None, '--force') + expected_disk, [], '--force') #---------------------------------------------------------------------- @@ -356,7 +356,7 @@ def forced_checkout_of_dir_with_file_obstructions(sbox): expected_disk.tweak('A', contents='The file A\n') actions.run_and_verify_checkout(url, wc_dir_other, expected_output, - expected_disk, None, None, None, None, '--force') + expected_disk, [], '--force') # Now see to it that we can recover from the obstructions. @@ -374,7 +374,7 @@ def forced_checkout_of_dir_with_file_obstructions(sbox): svntest.main.run_svn(None, 'revert', '-R', os.path.join(wc_dir_other, 'A')) actions.run_and_verify_update(wc_dir_other, expected_output, expected_disk, - expected_status, None, None, None, None, None, False, wc_dir_other) + expected_status) #---------------------------------------------------------------------- @@ -390,8 +390,7 @@ def forced_checkout_with_faux_obstructions(sbox): svntest.actions.run_and_verify_checkout(sbox.repo_url, sbox.wc_dir, expected_output, - expected_wc, None, None, None, - None, '--force') + expected_wc, [], '--force') #---------------------------------------------------------------------- @@ -411,8 +410,7 @@ def forced_checkout_with_real_obstructions(sbox): svntest.actions.run_and_verify_checkout(sbox.repo_url, sbox.wc_dir, expected_output, - expected_wc, None, None, None, - None, '--force') + expected_wc, [], '--force') #---------------------------------------------------------------------- @@ -436,8 +434,7 @@ def forced_checkout_with_real_obstructions_and_unversioned_files(sbox): svntest.actions.run_and_verify_checkout(sbox.repo_url, sbox.wc_dir, expected_output, - expected_wc, None, None, None, - None, '--force') + expected_wc, [], '--force') #---------------------------------------------------------------------- @@ -462,14 +459,12 @@ def forced_checkout_with_versioned_obstruction(sbox): os.mkdir(other_wc_dir) # Checkout "A" from the first repos to a fresh dir. - svntest.actions.run_and_verify_svn("Unexpected error during co", - svntest.verify.AnyOutput, [], + svntest.actions.run_and_verify_svn(svntest.verify.AnyOutput, [], "co", repo_url + "/A", fresh_wc_dir_A) # Checkout "A" from the second repos to the other dir. - svntest.actions.run_and_verify_svn("Unexpected error during co", - svntest.verify.AnyOutput, [], + svntest.actions.run_and_verify_svn(svntest.verify.AnyOutput, [], "co", other_repo_url + "/A", other_wc_dir_A) @@ -483,8 +478,7 @@ def forced_checkout_with_versioned_obstruction(sbox): expected_wc = svntest.main.greek_state.copy() svntest.actions.run_and_verify_checkout(repo_url, fresh_wc_dir, expected_output, expected_wc, - None, None, None, None, - '--force') + [], '--force') # Checkout the entire first repos into the other dir. This should # fail because it's a different repository. @@ -495,14 +489,13 @@ def forced_checkout_with_versioned_obstruction(sbox): expected_wc = svntest.main.greek_state.copy() svntest.actions.run_and_verify_checkout(repo_url, other_wc_dir, expected_output, expected_wc, - None, None, None, None, - '--force') + [], '--force') #ensure that other_wc_dir_A is not affected by this forced checkout. - svntest.actions.run_and_verify_svn("empty status output", None, + svntest.actions.run_and_verify_svn(None, [], "st", other_wc_dir_A) exit_code, sout, serr = svntest.actions.run_and_verify_svn( - "it should still point to other_repo_url/A", None, [], "info", + None, [], "info", other_wc_dir_A) #TODO rename test_stderr to test_regex or something. @@ -511,7 +504,7 @@ def forced_checkout_with_versioned_obstruction(sbox): #ensure that other_wc_dir is in a consistent state though it may be #missing few items. exit_code, sout, serr = svntest.actions.run_and_verify_svn( - "it should still point to other_repo_url", None, [], "info", + None, [], "info", other_wc_dir) #TODO rename test_stderr to test_regex or something. test_stderr("URL: " + sbox.repo_url + '$', sout) @@ -544,7 +537,7 @@ def import_and_checkout(sbox): # Import import_from_dir to the other repos expected_output = svntest.wc.State(sbox.wc_dir, {}) - svntest.actions.run_and_verify_svn(None, None, [], 'import', + svntest.actions.run_and_verify_svn(None, [], 'import', '-m', 'import', import_from_dir, other_repo_url) @@ -575,8 +568,7 @@ def import_and_checkout(sbox): svntest.actions.run_and_verify_checkout(other_repo_url, import_from_dir, expected_output, expected_wc, - None, None, None, None, - '--force') + [], '--force') #---------------------------------------------------------------------- # Issue #2529. @@ -586,7 +578,8 @@ def checkout_broken_eol(sbox): svntest.actions.load_repo(sbox, os.path.join(os.path.dirname(sys.argv[0]), 'update_tests_data', - 'checkout_broken_eol.dump')) + 'checkout_broken_eol.dump'), + create_wc=False) URL = sbox.repo_url @@ -633,7 +626,7 @@ def checkout_peg_rev(sbox): mu_path = os.path.join(wc_dir, 'A', 'mu') svntest.main.file_append(mu_path, 'appended mu text') - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', 'changed file mu', wc_dir) # now checkout the repo@1 in another folder, this should create our initial @@ -664,7 +657,7 @@ def checkout_peg_rev_date(sbox): ## Get svn:date. exit_code, output, errput = svntest.main.run_svn(None, 'propget', 'svn:date', '--revprop', '-r1', - '--strict', + '--no-newline', sbox.repo_url) if exit_code or errput != [] or len(output) != 1: raise svntest.Failure("svn:date propget failed") @@ -688,7 +681,7 @@ def checkout_peg_rev_date(sbox): mu_path = os.path.join(wc_dir, 'A', 'mu') svntest.main.file_append(mu_path, 'appended mu text') - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', 'changed file mu', wc_dir) # now checkout the repo@peg_string in another folder, this should create our @@ -709,7 +702,7 @@ def checkout_peg_rev_date(sbox): expected_output, expected_wc) - # now try another checkout with repo@r1_string + # now try another checkout with repo@r1_string checkout_target = sbox.add_wc_path('checkout2') os.mkdir(checkout_target) @@ -809,7 +802,7 @@ def co_with_obstructing_local_adds(sbox): # Commit. svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) # Create various paths scheduled for addition which will obstruct # the adds coming from the repos. @@ -858,6 +851,7 @@ def co_with_obstructing_local_adds(sbox): 'A/D/H/I/K/xi' : Item("This is file 'xi'\n"), 'A/D/H/I/K/eta' : Item("\n".join(["<<<<<<< .mine", "This is WC file 'eta'", + "||||||| .r0", "=======", "This is REPOS file 'eta'", ">>>>>>> .r2", @@ -865,6 +859,7 @@ def co_with_obstructing_local_adds(sbox): 'A/D/H/I/L' : Item(), 'A/D/kappa' : Item("\n".join(["<<<<<<< .mine", "This is WC file 'kappa'", + "||||||| .r0", "=======", "This is REPOS file 'kappa'", ">>>>>>> .r2", @@ -895,9 +890,8 @@ def co_with_obstructing_local_adds(sbox): # wc_backup before performing the checkout otherwise. svntest.actions.run_and_verify_checkout(sbox.repo_url, wc_backup, expected_output, expected_disk, - svntest.tree.detect_conflict_files, - extra_files, None, None, - '--force') + [], '--force', + extra_files=extra_files) svntest.actions.run_and_verify_status(wc_backup, expected_status) @@ -909,7 +903,7 @@ def co_with_obstructing_local_adds(sbox): # URL to URL copy of A/D/G to A/D/M. G_URL = sbox.repo_url + '/A/D/G' M_URL = sbox.repo_url + '/A/D/M' - svntest.actions.run_and_verify_svn("Copy error:", None, [], + svntest.actions.run_and_verify_svn(None, [], 'cp', G_URL, M_URL, '-m', '') # WC to WC copy of A/D/H to A/D/M. (M is now scheduled for addition @@ -918,13 +912,13 @@ def co_with_obstructing_local_adds(sbox): H_path = os.path.join(wc_dir, 'A', 'D', 'H') M_path = os.path.join(wc_dir, 'A', 'D', 'M') - svntest.actions.run_and_verify_svn("Copy error:", None, [], + svntest.actions.run_and_verify_svn(None, [], 'cp', H_path, M_path) # URL to URL copy of A/B/E/alpha to A/B/F/omicron. omega_URL = sbox.repo_url + '/A/B/E/alpha' omicron_URL = sbox.repo_url + '/A/B/F/omicron' - svntest.actions.run_and_verify_svn("Copy error:", None, [], + svntest.actions.run_and_verify_svn(None, [], 'cp', omega_URL, omicron_URL, '-m', '') @@ -935,7 +929,7 @@ def co_with_obstructing_local_adds(sbox): omicron_path = os.path.join(wc_dir, 'A', 'B', 'F', 'omicron') chi_path = os.path.join(wc_dir, 'A', 'D', 'H', 'chi') - svntest.actions.run_and_verify_svn("Copy error:", None, [], + svntest.actions.run_and_verify_svn(None, [], 'cp', chi_path, omicron_path) @@ -1001,8 +995,7 @@ def co_with_obstructing_local_adds(sbox): D_path, expected_output, expected_disk, - None, None, None, None, - '--force') + [], '--force') expected_status.tweak('A/D/M', treeconflict='C', status='R ') expected_status.tweak( @@ -1041,8 +1034,7 @@ def co_with_obstructing_local_adds(sbox): F_path, expected_output, expected_disk, - None, None, None, None, - '--force') + [], '--force') expected_status.tweak('A/B/F/omicron', treeconflict='C', status='R ') expected_status.add({ @@ -1122,8 +1114,7 @@ def checkout_wc_from_drive(sbox): 'iota' : Item(status='A '), }) svntest.actions.run_and_verify_checkout(repo_url, wc_dir, - expected_output, expected_wc, - None, None, None, None) + expected_output, expected_wc) wc2_dir = sbox.add_wc_path('2') expected_output = wc.State(wc2_dir, { @@ -1146,9 +1137,25 @@ def checkout_wc_from_drive(sbox): 'B/F' : Item(status='A '), 'B/lambda' : Item(status='A '), }) + + expected_wc = wc.State('', { + 'C' : Item(), + 'B/E/beta' : Item(contents="This is the file 'beta'.\n"), + 'B/E/alpha' : Item(contents="This is the file 'alpha'.\n"), + 'B/lambda' : Item(contents="This is the file 'lambda'.\n"), + 'B/F' : Item(), + 'D/H/omega' : Item(contents="This is the file 'omega'.\n"), + 'D/H/psi' : Item(contents="This is the file 'psi'.\n"), + 'D/H/chi' : Item(contents="This is the file 'chi'.\n"), + 'D/G/rho' : Item(contents="This is the file 'rho'.\n"), + 'D/G/tau' : Item(contents="This is the file 'tau'.\n"), + 'D/G/pi' : Item(contents="This is the file 'pi'.\n"), + 'D/gamma' : Item(contents="This is the file 'gamma'.\n"), + 'mu' : Item(contents="This is the file 'mu'.\n"), + }) + svntest.actions.run_and_verify_checkout(repo_url + '/A', wc2_dir, - expected_output, None, - None, None, None, None) + expected_output, expected_wc) wc3_dir = sbox.add_wc_path('3') expected_output = wc.State(wc3_dir, { @@ -1163,9 +1170,18 @@ def checkout_wc_from_drive(sbox): 'gamma' : Item(status='A '), }) + expected_wc = wc.State('', { + 'H/chi' : Item(contents="This is the file 'chi'.\n"), + 'H/psi' : Item(contents="This is the file 'psi'.\n"), + 'H/omega' : Item(contents="This is the file 'omega'.\n"), + 'G/pi' : Item(contents="This is the file 'pi'.\n"), + 'G/tau' : Item(contents="This is the file 'tau'.\n"), + 'G/rho' : Item(contents="This is the file 'rho'.\n"), + 'gamma' : Item(contents="This is the file 'gamma'.\n"), + }) + svntest.actions.run_and_verify_checkout(repo_url + '/A/D', wc3_dir, - expected_output, None, - None, None, None, None) + expected_output, expected_wc) finally: os.chdir(was_cwd) diff --git a/subversion/tests/cmdline/commit_tests.py b/subversion/tests/cmdline/commit_tests.py index 02ba0c6..b9a7037 100755 --- a/subversion/tests/cmdline/commit_tests.py +++ b/subversion/tests/cmdline/commit_tests.py @@ -174,7 +174,7 @@ def commit_one_file(sbox): svntest.actions.run_and_verify_commit(wc_dir, expected_output, expected_status, - None, + [], omega_path) @@ -200,7 +200,7 @@ def commit_one_new_file(sbox): svntest.actions.run_and_verify_commit(wc_dir, expected_output, expected_status, - None, + [], gloo_path) @@ -229,7 +229,7 @@ def commit_one_new_binary_file(sbox): svntest.actions.run_and_verify_commit(wc_dir, expected_output, expected_status, - None, + [], gloo_path) @@ -283,7 +283,7 @@ def commit_multiple_targets(sbox): svntest.actions.run_and_verify_commit(wc_dir, expected_output, expected_status, - None, + [], psi_path, AB_path, pi_path) #---------------------------------------------------------------------- @@ -339,7 +339,7 @@ def commit_multiple_targets_2(sbox): svntest.actions.run_and_verify_commit(wc_dir, expected_output, expected_status, - None, + [], psi_path, AB_path, omega_path, pi_path) @@ -375,7 +375,7 @@ def commit_inclusive_dir(sbox): svntest.actions.run_and_verify_commit(wc_dir, expected_output, expected_status, - None, + [], D_path) #---------------------------------------------------------------------- @@ -417,7 +417,7 @@ def commit_top_dir(sbox): svntest.actions.run_and_verify_commit(wc_dir, expected_output, expected_status, - None, + [], wc_dir) #---------------------------------------------------------------------- @@ -449,7 +449,7 @@ def commit_unversioned_thing(sbox): svntest.actions.run_and_verify_commit(wc_dir, None, None, - "is not under version control", + ".*is not under version control.*", os.path.join(wc_dir,'blorg')) #---------------------------------------------------------------------- @@ -527,9 +527,7 @@ def nested_dir_replacements(sbox): # Commit from the top of the working copy and verify output & status. svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, - wc_dir) + expected_status) #---------------------------------------------------------------------- @@ -560,8 +558,7 @@ def hudson_part_1(sbox): # Commit the deletion of gamma and verify. svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, wc_dir) + expected_status) # Now gamma should be marked as `deleted' under the hood. When we # update, we should no output, and a perfect, virginal status list @@ -611,8 +608,7 @@ def hudson_part_1_variation_1(sbox): # Commit the deletion of H and verify. svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, wc_dir) + expected_status) # Now H should be marked as `deleted' under the hood. When we # update, we should no see output, and a perfect, virginal status @@ -662,8 +658,7 @@ def hudson_part_1_variation_2(sbox): # Commit the deletion of gamma and verify. svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, wc_dir) + expected_status) # Now gamma should be marked as `deleted' under the hood. # Go ahead and re-add gamma, so that is *also* scheduled for addition. @@ -688,8 +683,7 @@ def hudson_part_1_variation_2(sbox): svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, wc_dir) + expected_status) #---------------------------------------------------------------------- @@ -722,8 +716,7 @@ def hudson_part_2(sbox): # Commit the deletion of gamma and verify. svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, wc_dir) + expected_status) # Now gamma should be marked as `deleted' under the hood, at # revision 2. Meanwhile, A/D is still lagging at revision 1. @@ -735,8 +728,7 @@ def hudson_part_2(sbox): svntest.actions.run_and_verify_commit(wc_dir, None, None, - "[Oo]ut.of.date", - wc_dir) + ".*[Oo]ut of date.*") #---------------------------------------------------------------------- @@ -787,8 +779,7 @@ def hudson_part_2_1(sbox): svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, wc_dir) + expected_status) # Now, assuming all three files in H are marked as 'deleted', an # update of H should print absolutely nothing. @@ -849,7 +840,7 @@ fp.close()""" svntest.main.file_append(iota_path, "More stuff in iota") # Commit, no output expected. - svntest.actions.run_and_verify_svn(None, [], [], + svntest.actions.run_and_verify_svn([], [], 'ci', '--quiet', '-m', 'log msg', wc_dir) @@ -905,8 +896,7 @@ def merge_mixed_revisions(sbox): svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, wc_dir) + expected_status) # 2. svn up A/D/H @@ -940,8 +930,7 @@ def merge_mixed_revisions(sbox): svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, wc_dir) + expected_status) # 4. echo "moo" >> A/D/H/chi; svn ci A/D/H/chi @@ -955,8 +944,7 @@ def merge_mixed_revisions(sbox): expected_status.tweak('iota', wc_rev=3) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, wc_dir) + expected_status) # 5. echo "moo" >> iota; svn ci iota svntest.main.file_append(iota_path, "moomoo") @@ -969,8 +957,7 @@ def merge_mixed_revisions(sbox): expected_status.tweak('iota', wc_rev=5) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, wc_dir) + expected_status) # At this point, here is what our tree should look like: # _ 1 ( 5) working_copies/commit_tests-10 @@ -1011,8 +998,7 @@ def merge_mixed_revisions(sbox): expected_status.tweak('A/D/H/chi', wc_rev=4) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, wc_dir) + expected_status) #---------------------------------------------------------------------- @@ -1089,8 +1075,7 @@ def commit_uri_unsafe(sbox): svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, wc_dir) + expected_status) #---------------------------------------------------------------------- @@ -1126,8 +1111,7 @@ def commit_deleted_edited(sbox): svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, wc_dir) + expected_status) #---------------------------------------------------------------------- @@ -1202,8 +1186,7 @@ def commit_rmd_and_deleted_file(sbox): svntest.main.run_svn(None, 'rm', mu_path) # Commit, hoping to see no errors - svntest.actions.run_and_verify_svn("Output on stderr where none expected", - svntest.verify.AnyOutput, [], + svntest.actions.run_and_verify_svn(svntest.verify.AnyOutput, [], 'commit', '-m', 'logmsg', mu_path) #---------------------------------------------------------------------- @@ -1235,9 +1218,7 @@ def commit_add_file_twice(sbox): # Commit should succeed svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, - wc_dir) + expected_status) # Update to state before commit svntest.main.run_svn(None, 'up', '-r', '1', wc_dir) @@ -1250,8 +1231,7 @@ def commit_add_file_twice(sbox): svntest.actions.run_and_verify_commit(wc_dir, None, None, - "E160020: File.*already exists", - wc_dir) + ".*E160020: File.*already exists.*") #---------------------------------------------------------------------- @@ -1287,9 +1267,7 @@ def commit_from_long_dir(sbox): svntest.actions.run_and_verify_commit(abs_wc_dir, expected_output, - None, - None, - abs_wc_dir) + None) #---------------------------------------------------------------------- @@ -1310,12 +1288,10 @@ def commit_with_lock(sbox): None, None, 'svn: E155004: ' - 'Working copy \'.*\' locked', - wc_dir) + 'Working copy \'.*\' locked') # unlock directory - svntest.actions.run_and_verify_svn("Output on stderr where none expected", - [], [], + svntest.actions.run_and_verify_svn([], [], 'cleanup', D_path) # this commit should succeed @@ -1326,9 +1302,7 @@ def commit_with_lock(sbox): expected_status.tweak('A/D/gamma', wc_rev=2) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, - wc_dir) + expected_status) #---------------------------------------------------------------------- @@ -1352,9 +1326,7 @@ def commit_current_dir(sbox): }) svntest.actions.run_and_verify_commit('.', expected_output, - None, - None, - '.') + None) os.chdir(was_cwd) # I can't get the status check to work as part of run_and_verify_commit. @@ -1385,23 +1357,15 @@ def failed_commit(sbox): svntest.main.file_append(other_iota_path, "More different stuff in iota") # Commit both working copies. The second commit should fail. - svntest.actions.run_and_verify_svn("Output on stderr where none expected", - svntest.verify.AnyOutput, [], + svntest.actions.run_and_verify_svn(svntest.verify.AnyOutput, [], 'commit', '-m', 'log', wc_dir) - svntest.actions.run_and_verify_svn("Output on stderr expected", - None, svntest.verify.AnyOutput, + svntest.actions.run_and_verify_svn(None, svntest.verify.AnyOutput, 'commit', '-m', 'log', other_wc_dir) # Now list the txns in the repo. The list should be empty. - exit_code, output, errput = svntest.main.run_svnadmin('lstxns', - sbox.repo_dir) - svntest.verify.compare_and_display_lines( - "Error running 'svnadmin lstxns'.", - 'STDERR', [], errput) - svntest.verify.compare_and_display_lines( - "Output of 'svnadmin lstxns' is unexpected.", - 'STDOUT', [], output) + svntest.actions.run_and_verify_svnadmin([], [], + 'lstxns', sbox.repo_dir) #---------------------------------------------------------------------- @@ -1419,8 +1383,7 @@ def commit_multiple_wc_nested(sbox): # Checkout a second working copy wc2_dir = sbox.ospath('A/wc2') url = sbox.repo_url - svntest.actions.run_and_verify_svn("Output on stderr where none expected", - svntest.verify.AnyOutput, [], + svntest.actions.run_and_verify_svn(svntest.verify.AnyOutput, [], 'checkout', url, wc2_dir) @@ -1439,8 +1402,7 @@ def commit_multiple_wc_nested(sbox): svntest.actions.run_and_verify_status(wc2_dir, expected_status2) # Commit should succeed, even though one target is a "child" of the other. - svntest.actions.run_and_verify_svn("Ouput on stderr where none expected", - svntest.verify.AnyOutput, [], + svntest.actions.run_and_verify_svn(svntest.verify.AnyOutput, [], 'commit', '-m', 'log', wc_dir, wc2_dir) @@ -1465,12 +1427,10 @@ def commit_multiple_wc(sbox): wc1_dir = sbox.ospath('wc1') wc2_dir = sbox.ospath('wc2') url = sbox.repo_url - svntest.actions.run_and_verify_svn("Output on stderr where none expected", - svntest.verify.AnyOutput, [], + svntest.actions.run_and_verify_svn(svntest.verify.AnyOutput, [], 'checkout', url, wc1_dir) - svntest.actions.run_and_verify_svn("Output on stderr where none expected", - svntest.verify.AnyOutput, [], + svntest.actions.run_and_verify_svn(svntest.verify.AnyOutput, [], 'checkout', url, wc2_dir) @@ -1489,8 +1449,7 @@ def commit_multiple_wc(sbox): svntest.actions.run_and_verify_status(wc2_dir, expected_status2) # Commit should succeed. - svntest.actions.run_and_verify_svn("Output on stderr where none expected", - svntest.verify.AnyOutput, [], + svntest.actions.run_and_verify_svn(svntest.verify.AnyOutput, [], 'commit', '-m', 'log', wc1_dir, wc2_dir) @@ -1520,12 +1479,10 @@ def commit_multiple_wc_multiple_repos(sbox): # Checkout two wcs wc1_dir = sbox.ospath('wc1') wc2_dir = sbox.ospath('wc2') - svntest.actions.run_and_verify_svn("Output on stderr where none expected", - svntest.verify.AnyOutput, [], + svntest.actions.run_and_verify_svn(svntest.verify.AnyOutput, [], 'checkout', sbox.repo_url, wc1_dir) - svntest.actions.run_and_verify_svn("Output on stderr where none expected", - svntest.verify.AnyOutput, [], + svntest.actions.run_and_verify_svn(svntest.verify.AnyOutput, [], 'checkout', url2, wc2_dir) @@ -1549,8 +1506,7 @@ def commit_multiple_wc_multiple_repos(sbox): error_re = ( ".*(is not a working copy" + "|Are all targets part of the same working copy" + "|was not found).*" ) - svntest.actions.run_and_verify_svn("Expected output on stderr doesn't match", - [], error_re, + svntest.actions.run_and_verify_svn([], error_re, 'commit', '-m', 'log', wc1_dir, wc2_dir) @@ -1606,7 +1562,7 @@ def commit_nonrecursive(sbox): svntest.main.file_append(sbox.ospath(file4_path), 'this is file4') # Add them to version control. - svntest.actions.run_and_verify_svn(None, svntest.verify.AnyOutput, [], + svntest.actions.run_and_verify_svn(svntest.verify.AnyOutput, [], 'add', '--depth=empty', sbox.ospath(file1_path), sbox.ospath(dir1_path), @@ -1640,7 +1596,7 @@ def commit_nonrecursive(sbox): svntest.actions.run_and_verify_commit(wc_dir, expected_output, expected_status, - None, + [], '-N', sbox.ospath(file1_path), sbox.ospath(dir1_path), @@ -1725,7 +1681,7 @@ def commit_nonrecursive(sbox): svntest.main.file_append(sbox.ospath(nope_2_path), 'nope_2') # Add them to version control. - svntest.actions.run_and_verify_svn(None, svntest.verify.AnyOutput, [], + svntest.actions.run_and_verify_svn(svntest.verify.AnyOutput, [], 'add', '-N', sbox.ospath(dirA_path), sbox.ospath(fileA_path), @@ -1767,7 +1723,7 @@ def commit_nonrecursive(sbox): svntest.actions.run_and_verify_commit(wc_dir, expected_output, expected_status, - None, + [], '-N', sbox.ospath(dirA_path)) #---------------------------------------------------------------------- @@ -1797,7 +1753,7 @@ def commit_out_of_date_deletions(sbox): I_path = sbox.ospath('A/I') os.mkdir(I_path) svntest.main.run_svn(None, 'add', I_path) - svntest.main.run_svn(None, 'ci', '-m', 'prep', wc_dir) + sbox.simple_commit(message='prep') svntest.main.run_svn(None, 'up', wc_dir) # Make a backup copy of the working copy @@ -1837,7 +1793,7 @@ def commit_out_of_date_deletions(sbox): status=' ') expected_status.remove('A/B/F', 'A/D/H/chi', 'A/B/E/beta', 'A/D/H/psi') commit = svntest.actions.run_and_verify_commit - commit(wc_dir, expected_output, expected_status, None, wc_dir) + commit(wc_dir, expected_output, expected_status, [], wc_dir) # Edits in wc backup I_path = os.path.join(wc_backup, 'A', 'I') @@ -1858,7 +1814,7 @@ def commit_out_of_date_deletions(sbox): # A commit of any one of these files or dirs should fail, preferably # with an out-of-date error message. - error_re = "(out of date|not found)" + error_re = ".*(out of date|not found).*" commit(wc_backup, None, None, error_re, C_path) commit(wc_backup, None, None, error_re, I_path) commit(wc_backup, None, None, error_re, F_path) @@ -1886,7 +1842,7 @@ def commit_with_bad_log_message(sbox): # Commit and expect an error. svntest.actions.run_and_verify_commit(wc_dir, None, None, - "contains a zero byte", + ".*contains a zero byte.*", '-F', log_msg_path, iota_path) @@ -1908,9 +1864,10 @@ def commit_with_mixed_line_endings(sbox): svntest.main.file_append(log_msg_path, "test\nthis\n\rcase\r\n--This line, and those below, will be ignored--\n") # Commit and expect an error. + expected_stderr = ".*E135000: Error normalizing log message to internal format.*" svntest.actions.run_and_verify_commit(wc_dir, None, None, - "Error normalizing log message to internal format", + expected_stderr, '-F', log_msg_path, iota_path) @@ -1941,7 +1898,7 @@ def commit_with_mixed_line_endings_in_ignored_part(sbox): svntest.actions.run_and_verify_commit(wc_dir, expected_output, expected_status, - None, + [], iota_path) def from_wc_top_with_bad_editor(sbox): @@ -1970,12 +1927,10 @@ def from_wc_top_with_bad_editor(sbox): sbox.build() wc_dir = sbox.wc_dir - svntest.actions.run_and_verify_svn("Unexpected failure from propset.", - svntest.verify.AnyOutput, [], + svntest.actions.run_and_verify_svn(svntest.verify.AnyOutput, [], 'pset', 'fish', 'food', wc_dir) os.chdir(wc_dir) exit_code, out, err = svntest.actions.run_and_verify_svn( - "Commit succeeded when should have failed.", None, svntest.verify.AnyOutput, '--force-interactive', 'ci', '--editor-cmd', 'no_such-editor') @@ -1995,7 +1950,7 @@ def mods_in_schedule_delete(sbox): # Schedule a delete, then put in local mods C_path = sbox.ospath('A/C') - svntest.actions.run_and_verify_svn(None, svntest.verify.AnyOutput, [], + svntest.actions.run_and_verify_svn(svntest.verify.AnyOutput, [], 'rm', C_path) if not os.path.exists(C_path): @@ -2011,8 +1966,7 @@ def mods_in_schedule_delete(sbox): 'A/C' : Item(verb='Deleting'), }) svntest.actions.run_and_verify_commit(wc_dir, - expected_output, expected_status, - None, wc_dir) + expected_output, expected_status) # Unversioned file still exists actual_contents = open(foo_path).read() @@ -2093,29 +2047,29 @@ def local_mods_are_not_commits(sbox): expected_error = '.*Local, non-commit operations do not take a log message.*' # copy wc->wc - svntest.actions.run_and_verify_svn(None, None, expected_error, + svntest.actions.run_and_verify_svn(None, expected_error, 'cp', '-m', 'log msg', sbox.ospath('iota'), sbox.ospath('iota2')) # copy repos->wc - svntest.actions.run_and_verify_svn(None, None, expected_error, + svntest.actions.run_and_verify_svn(None, expected_error, 'cp', '-m', 'log msg', sbox.repo_url + "/iota", sbox.ospath('iota2')) # delete - svntest.actions.run_and_verify_svn(None, None, expected_error, + svntest.actions.run_and_verify_svn(None, expected_error, 'rm', '-m', 'log msg', sbox.ospath('A/D/gamma')) # mkdir - svntest.actions.run_and_verify_svn(None, None, expected_error, + svntest.actions.run_and_verify_svn(None, expected_error, 'mkdir', '-m', 'log msg', sbox.ospath('newdir')) # rename - svntest.actions.run_and_verify_svn(None, None, expected_error, + svntest.actions.run_and_verify_svn(None, expected_error, 'cp', '-m', 'log msg', sbox.ospath('A/mu'), sbox.ospath('A/yu')) @@ -2147,7 +2101,8 @@ def post_commit_hook_test(sbox): # filesystem will report an absolute path because that's the way the # filesystem is created by this test suite. expected_output = [ "Sending "+ iota_path + "\n", - "Transmitting file data .\n", + "Transmitting file data .done\n", + "Committing transaction...\n", "Committed revision 2.\n", "\n", "Warning: " + @@ -2155,7 +2110,7 @@ def post_commit_hook_test(sbox): error_msg + "\n", ] - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'ci', '-m', 'log msg', iota_path) #---------------------------------------------------------------------- @@ -2184,7 +2139,7 @@ def commit_same_folder_in_targets(sbox): svntest.actions.run_and_verify_commit(wc_dir, expected_output, expected_status, - None, + [], '-N', wc_dir, iota_path) @@ -2212,7 +2167,7 @@ def commit_inconsistent_eol(sbox): expected_err = ".*iota.*" - svntest.actions.run_and_verify_svn(None, None, expected_err, + svntest.actions.run_and_verify_svn(None, expected_err, 'commit', '-m', 'log message', wc_dir) @@ -2224,16 +2179,16 @@ def mkdir_with_revprop(sbox): sbox.build() remote_dir = sbox.repo_url + "/dir" - svntest.actions.run_and_verify_svn(None, None, [], 'mkdir', '-m', 'msg', + svntest.actions.run_and_verify_svn(None, [], 'mkdir', '-m', 'msg', '--with-revprop', 'bug=42', remote_dir) expected = svntest.verify.UnorderedOutput( ['Unversioned properties on revision 2:\n', ' svn:author\n',' svn:date\n', ' svn:log\n', ' bug\n']) - svntest.actions.run_and_verify_svn(None, expected, [], 'proplist', + svntest.actions.run_and_verify_svn(expected, [], 'proplist', '--revprop', '-r', 2, sbox.repo_url) - svntest.actions.run_and_verify_svn(None, '42', [], 'propget', 'bug', + svntest.actions.run_and_verify_svn('42', [], 'propget', 'bug', '--revprop', '-r', 2, sbox.repo_url) @@ -2243,19 +2198,19 @@ def delete_with_revprop(sbox): sbox.build() remote_dir = sbox.repo_url + "/dir" - svntest.actions.run_and_verify_svn(None, None, [], 'mkdir', '-m', 'msg', + svntest.actions.run_and_verify_svn(None, [], 'mkdir', '-m', 'msg', remote_dir) - svntest.actions.run_and_verify_svn(None, None, [], 'delete', '-m', 'msg', + svntest.actions.run_and_verify_svn(None, [], 'delete', '-m', 'msg', '--with-revprop', 'bug=52', remote_dir) expected = svntest.verify.UnorderedOutput( ['Unversioned properties on revision 3:\n', ' svn:author\n',' svn:date\n', ' svn:log\n', ' bug\n']) - svntest.actions.run_and_verify_svn(None, expected, [], 'proplist', + svntest.actions.run_and_verify_svn(expected, [], 'proplist', '--revprop', '-r', 3, sbox.repo_url) - svntest.actions.run_and_verify_svn(None, '52', [], 'propget', 'bug', + svntest.actions.run_and_verify_svn('52', [], 'propget', 'bug', '--revprop', '-r', 3, sbox.repo_url) @@ -2280,7 +2235,7 @@ def commit_with_revprop(sbox): svntest.actions.run_and_verify_commit(wc_dir, expected_output, expected_status, - None, + [], '-m', 'msg', '--with-revprop', 'bug=62', omega_path, gloo_path) @@ -2289,9 +2244,9 @@ def commit_with_revprop(sbox): ['Unversioned properties on revision 2:\n', ' svn:author\n',' svn:date\n', ' svn:log\n', ' bug\n']) - svntest.actions.run_and_verify_svn(None, expected, [], 'proplist', + svntest.actions.run_and_verify_svn(expected, [], 'proplist', '--revprop', '-r', 2, sbox.repo_url) - svntest.actions.run_and_verify_svn(None, '62', [], 'propget', 'bug', + svntest.actions.run_and_verify_svn('62', [], 'propget', 'bug', '--revprop', '-r', 2, sbox.repo_url) @@ -2305,7 +2260,7 @@ def import_with_revprop(sbox): os.mkdir(local_dir) svntest.main.file_write(local_file, "xxxx") - svntest.actions.run_and_verify_svn(None, None, [], 'import', '-m', 'msg', + svntest.actions.run_and_verify_svn(None, [], 'import', '-m', 'msg', '--with-revprop', 'bug=72', local_dir, sbox.repo_url) @@ -2313,9 +2268,9 @@ def import_with_revprop(sbox): ['Unversioned properties on revision 2:\n', ' svn:author\n',' svn:date\n', ' svn:log\n', ' bug\n']) - svntest.actions.run_and_verify_svn(None, expected, [], 'proplist', + svntest.actions.run_and_verify_svn(expected, [], 'proplist', '--revprop', '-r', 2, sbox.repo_url) - svntest.actions.run_and_verify_svn(None, '72', [], 'propget', 'bug', + svntest.actions.run_and_verify_svn('72', [], 'propget', 'bug', '--revprop', '-r', 2, sbox.repo_url) @@ -2326,10 +2281,10 @@ def copy_R2R_with_revprop(sbox): sbox.build() remote_dir1 = sbox.repo_url + "/dir1" remote_dir2 = sbox.repo_url + "/dir2" - svntest.actions.run_and_verify_svn(None, None, [], 'mkdir', '-m', 'msg', + svntest.actions.run_and_verify_svn(None, [], 'mkdir', '-m', 'msg', remote_dir1) - svntest.actions.run_and_verify_svn(None, None, [], 'copy', '-m', 'msg', + svntest.actions.run_and_verify_svn(None, [], 'copy', '-m', 'msg', '--with-revprop', 'bug=82', remote_dir1, remote_dir2) @@ -2337,9 +2292,9 @@ def copy_R2R_with_revprop(sbox): ['Unversioned properties on revision 3:\n', ' svn:author\n',' svn:date\n', ' svn:log\n', ' bug\n']) - svntest.actions.run_and_verify_svn(None, expected, [], 'proplist', + svntest.actions.run_and_verify_svn(expected, [], 'proplist', '--revprop', '-r', 3, sbox.repo_url) - svntest.actions.run_and_verify_svn(None, '82', [], 'propget', 'bug', + svntest.actions.run_and_verify_svn('82', [], 'propget', 'bug', '--revprop', '-r', 3, sbox.repo_url) @@ -2350,10 +2305,10 @@ def copy_WC2R_with_revprop(sbox): sbox.build() remote_dir = sbox.repo_url + "/dir" local_dir = sbox.ospath('folder') - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'mkdir', local_dir) - svntest.actions.run_and_verify_svn(None, None, [], 'copy', '-m', 'msg', + svntest.actions.run_and_verify_svn(None, [], 'copy', '-m', 'msg', '--with-revprop', 'bug=92', local_dir, remote_dir) @@ -2361,9 +2316,9 @@ def copy_WC2R_with_revprop(sbox): ['Unversioned properties on revision 2:\n', ' svn:author\n',' svn:date\n', ' svn:log\n', ' bug\n']) - svntest.actions.run_and_verify_svn(None, expected, [], 'proplist', + svntest.actions.run_and_verify_svn(expected, [], 'proplist', '--revprop', '-r', 2, sbox.repo_url) - svntest.actions.run_and_verify_svn(None, '92', [], 'propget', 'bug', + svntest.actions.run_and_verify_svn('92', [], 'propget', 'bug', '--revprop', '-r', 2, sbox.repo_url) @@ -2374,10 +2329,10 @@ def move_R2R_with_revprop(sbox): sbox.build() remote_dir1 = sbox.repo_url + "/dir1" remote_dir2 = sbox.repo_url + "/dir2" - svntest.actions.run_and_verify_svn(None, None, [], 'mkdir', '-m', 'msg', + svntest.actions.run_and_verify_svn(None, [], 'mkdir', '-m', 'msg', remote_dir1) - svntest.actions.run_and_verify_svn(None, None, [], 'move', '-m', 'msg', + svntest.actions.run_and_verify_svn(None, [], 'move', '-m', 'msg', '--with-revprop', 'bug=102', remote_dir1, remote_dir2) @@ -2385,9 +2340,9 @@ def move_R2R_with_revprop(sbox): ['Unversioned properties on revision 3:\n', ' svn:author\n',' svn:date\n', ' svn:log\n', ' bug\n']) - svntest.actions.run_and_verify_svn(None, expected, [], 'proplist', + svntest.actions.run_and_verify_svn(expected, [], 'proplist', '--revprop', '-r', 3, sbox.repo_url) - svntest.actions.run_and_verify_svn(None, '102', [], 'propget', 'bug', + svntest.actions.run_and_verify_svn('102', [], 'propget', 'bug', '--revprop', '-r', 3, sbox.repo_url) @@ -2398,7 +2353,7 @@ def propedit_with_revprop(sbox): sbox.build() svntest.main.use_editor('append_foo') - svntest.actions.run_and_verify_svn(None, None, [], 'propedit', '-m', 'msg', + svntest.actions.run_and_verify_svn(None, [], 'propedit', '-m', 'msg', '--with-revprop', 'bug=112', 'prop', sbox.repo_url) @@ -2406,9 +2361,9 @@ def propedit_with_revprop(sbox): ['Unversioned properties on revision 2:\n', ' svn:author\n',' svn:date\n', ' svn:log\n', ' bug\n']) - svntest.actions.run_and_verify_svn(None, expected, [], 'proplist', + svntest.actions.run_and_verify_svn(expected, [], 'proplist', '--revprop', '-r', 2, sbox.repo_url) - svntest.actions.run_and_verify_svn(None, '112', [], 'propget', 'bug', + svntest.actions.run_and_verify_svn('112', [], 'propget', 'bug', '--revprop', '-r', 2, sbox.repo_url) @@ -2419,7 +2374,7 @@ def set_multiple_props_with_revprop(sbox): sbox.build() remote_dir = sbox.repo_url + "/dir" - svntest.actions.run_and_verify_svn(None, None, [], 'mkdir', '-m', 'msg', + svntest.actions.run_and_verify_svn(None, [], 'mkdir', '-m', 'msg', '--with-revprop', 'bug=32', '--with-revprop', 'ref=22', remote_dir) @@ -2427,11 +2382,11 @@ def set_multiple_props_with_revprop(sbox): ['Unversioned properties on revision 2:\n', ' svn:author\n',' svn:date\n', ' svn:log\n', ' bug\n', ' ref\n']) - svntest.actions.run_and_verify_svn(None, expected, [], 'proplist', + svntest.actions.run_and_verify_svn(expected, [], 'proplist', '--revprop', '-r', 2, sbox.repo_url) - svntest.actions.run_and_verify_svn(None, '32', [], 'propget', 'bug', + svntest.actions.run_and_verify_svn('32', [], 'propget', 'bug', '--revprop', '-r', 2, sbox.repo_url) - svntest.actions.run_and_verify_svn(None, '22', [], 'propget', 'ref', + svntest.actions.run_and_verify_svn('22', [], 'propget', 'ref', '--revprop', '-r', 2, sbox.repo_url) @@ -2442,7 +2397,7 @@ def use_empty_value_in_revprop_pair(sbox): sbox.build() remote_dir = sbox.repo_url + "/dir" - svntest.actions.run_and_verify_svn(None, None, [], 'mkdir', '-m', 'msg', + svntest.actions.run_and_verify_svn(None, [], 'mkdir', '-m', 'msg', '--with-revprop', 'bug=', '--with-revprop', 'ref=', remote_dir) @@ -2450,11 +2405,11 @@ def use_empty_value_in_revprop_pair(sbox): ['Unversioned properties on revision 2:\n', ' svn:author\n',' svn:date\n', ' svn:log\n', ' bug\n', ' ref\n']) - svntest.actions.run_and_verify_svn(None, expected, [], 'proplist', + svntest.actions.run_and_verify_svn(expected, [], 'proplist', '--revprop', '-r', 2, sbox.repo_url) - svntest.actions.run_and_verify_svn(None, '', [], 'propget', 'bug', + svntest.actions.run_and_verify_svn('', [], 'propget', 'bug', '--revprop', '-r', 2, sbox.repo_url) - svntest.actions.run_and_verify_svn(None, '', [], 'propget', 'ref', + svntest.actions.run_and_verify_svn('', [], 'propget', 'ref', '--revprop', '-r', 2, sbox.repo_url) @@ -2464,7 +2419,7 @@ def no_equals_in_revprop_pair(sbox): sbox.build() remote_dir = sbox.repo_url + "/dir" - svntest.actions.run_and_verify_svn(None, None, [], 'mkdir', '-m', 'msg', + svntest.actions.run_and_verify_svn(None, [], 'mkdir', '-m', 'msg', '--with-revprop', 'bug', '--with-revprop', 'ref', remote_dir) @@ -2472,11 +2427,11 @@ def no_equals_in_revprop_pair(sbox): ['Unversioned properties on revision 2:\n', ' svn:author\n',' svn:date\n', ' svn:log\n', ' bug\n', ' ref\n']) - svntest.actions.run_and_verify_svn(None, expected, [], 'proplist', + svntest.actions.run_and_verify_svn(expected, [], 'proplist', '--revprop', '-r', 2, sbox.repo_url) - svntest.actions.run_and_verify_svn(None, '', [], 'propget', 'bug', + svntest.actions.run_and_verify_svn('', [], 'propget', 'bug', '--revprop', '-r', 2, sbox.repo_url) - svntest.actions.run_and_verify_svn(None, '', [], 'propget', 'ref', + svntest.actions.run_and_verify_svn('', [], 'propget', 'ref', '--revprop', '-r', 2, sbox.repo_url) @@ -2488,17 +2443,17 @@ def set_invalid_revprops(sbox): remote_dir = sbox.repo_url + "/dir" # Try to set svn: revprops. expected = '.*Standard properties can\'t.*' - svntest.actions.run_and_verify_svn(None, [], expected, 'mkdir', '-m', 'msg', + svntest.actions.run_and_verify_svn([], expected, 'mkdir', '-m', 'msg', '--with-revprop', 'svn:author=42', remote_dir) - svntest.actions.run_and_verify_svn(None, [], expected, 'mkdir', '-m', 'msg', + svntest.actions.run_and_verify_svn([], expected, 'mkdir', '-m', 'msg', '--with-revprop', 'svn:log=42', remote_dir) - svntest.actions.run_and_verify_svn(None, [], expected, 'mkdir', '-m', 'msg', + svntest.actions.run_and_verify_svn([], expected, 'mkdir', '-m', 'msg', '--with-revprop', 'svn:date=42', remote_dir) - svntest.actions.run_and_verify_svn(None, [], expected, 'mkdir', '-m', 'msg', + svntest.actions.run_and_verify_svn([], expected, 'mkdir', '-m', 'msg', '--with-revprop', 'svn:foo=bar', remote_dir) # Empty revprop pair. - svntest.actions.run_and_verify_svn(None, [], + svntest.actions.run_and_verify_svn([], 'svn: E205000: ' 'Revision property pair is empty', 'mkdir', '-m', 'msg', @@ -2546,14 +2501,8 @@ def start_commit_hook_test(sbox): expected_stderr, actual_stderr) # Now list the txns in the repo. The list should be empty. - exit_code, output, errput = svntest.main.run_svnadmin('lstxns', - sbox.repo_dir) - svntest.verify.compare_and_display_lines( - "Error running 'svnadmin lstxns'.", - 'STDERR', [], errput) - svntest.verify.compare_and_display_lines( - "Output of 'svnadmin lstxns' is unexpected.", - 'STDOUT', [], output) + svntest.actions.run_and_verify_svnadmin([], [], + 'lstxns', sbox.repo_dir) #---------------------------------------------------------------------- @Issue(3553) @@ -2611,21 +2560,21 @@ def versioned_log_message(sbox): svntest.main.file_append(iota_path, "2") # try to check in a change using a versioned file as your log entry. - svntest.actions.run_and_verify_svn(None, None, svntest.verify.AnyOutput, + svntest.actions.run_and_verify_svn(None, svntest.verify.AnyOutput, 'ci', '-F', log_path) # force it. should not produce any errors. - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'ci', '-F', log_path, '--force-log') svntest.main.file_append(mu_path, "2") # try the same thing, but specifying the file to commit explicitly. - svntest.actions.run_and_verify_svn(None, None, svntest.verify.AnyOutput, + svntest.actions.run_and_verify_svn(None, svntest.verify.AnyOutput, 'ci', '-F', log_path, mu_path) # force it... should succeed. - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'ci', '-F', log_path, '--force-log', mu_path) @@ -2646,7 +2595,7 @@ def changelist_near_conflict(sbox): # Create a changelist. changelist_name = "logical-changeset" - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], "changelist", changelist_name, mu_path, gloo_path) @@ -2662,7 +2611,7 @@ def changelist_near_conflict(sbox): svntest.actions.run_and_verify_commit(wc_dir, expected_output, expected_status, - None, + [], "--changelist=" + changelist_name, "-m", "msg", wc_dir) @@ -2690,13 +2639,11 @@ def commit_out_of_date_file(sbox): expected_status.tweak("A/D/G/pi", wc_rev=2, status=" ") svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, - "-m", "log message", wc_dir) + expected_status) svntest.main.file_append(backup_pi_path, "hello") expected_err = ".*(pi.*out of date|Out of date.*pi).*" - svntest.actions.run_and_verify_svn(None, None, expected_err, + svntest.actions.run_and_verify_svn(None, expected_err, 'commit', '-m', 'log message', wc_backup) @@ -2724,7 +2671,7 @@ def start_commit_detect_capabilities(sbox): # Commit something. iota_path = sbox.ospath('iota') svntest.main.file_append(iota_path, "More stuff in iota") - svntest.actions.run_and_verify_svn(None, [], [], 'ci', '--quiet', + svntest.actions.run_and_verify_svn([], [], 'ci', '--quiet', '-m', 'log msg', wc_dir) # Check that "mergeinfo" was detected. @@ -2756,8 +2703,7 @@ def commit_added_missing(sbox): os.remove(b_path) # Commit, hoping to see an error - svntest.actions.run_and_verify_svn("Commit should have failed", - [], ".* is scheduled for addition, but is missing", + svntest.actions.run_and_verify_svn([], ".* is scheduled for addition, but is missing", 'commit', '-m', 'logmsg', wc_dir) #---------------------------------------------------------------------- @@ -2783,7 +2729,7 @@ def tree_conflicts_block_commit(sbox): D = sbox.ospath('A/D') G = sbox.ospath('A/D/G') - error_re = "remains in conflict" + error_re = ".*remains in conflict.*" commit_fails_at_path(wc_dir, wc_dir, error_re) commit_fails_at_path(A, A, error_re) commit_fails_at_path(D, D, error_re) @@ -2857,9 +2803,7 @@ def commit_incomplete(sbox): svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, - wc_dir) + expected_status) #---------------------------------------------------------------------- # Reported here: @@ -2909,8 +2853,7 @@ def commit_danglers(sbox): svntest.actions.run_and_verify_commit(mu_copied, None, None, - expected_error, - mu_copied) + expected_error) # But now do the same thing via changelist filtering svntest.main.run_svn(None, 'changelist', 'L', mu_copied, sbox.ospath('A/mu')) @@ -2955,6 +2898,22 @@ def last_changed_of_copied_subdir(sbox): } svntest.actions.run_and_verify_info([expected], E_copied) +def commit_unversioned(sbox): + "verify behavior on unversioned targets" + + sbox.build(read_only=True) + wc_dir = sbox.wc_dir + + expected_err = '.*E200009: .*existing.*\' is not under version control.*' + + # Unversioned, but existing file + svntest.main.file_write(sbox.ospath('existing'), "xxxx") + svntest.actions.run_and_verify_commit(wc_dir, None, None, expected_err, + sbox.ospath('existing')) + + # Unversioned, not existing + svntest.actions.run_and_verify_commit(wc_dir, None, None, expected_err, + sbox.ospath('not-existing')) @Issue(4400) def commit_cp_with_deep_delete(sbox): @@ -3004,9 +2963,7 @@ def commit_cp_with_deep_delete(sbox): # Commit the copy without the one dir. svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, - wc_dir) + expected_status) def commit_deep_deleted(sbox): "try to commit a deep descendant of a deleted node" @@ -3018,9 +2975,9 @@ def commit_deep_deleted(sbox): sbox.simple_propset('k', 'v', 'AA/D/G') # Committing some added descendant returns a proper error - expected_err = ('svn: E200009: \'%s\' is not known to exist in the ' + + expected_err = ('.*svn: E200009: \'%s\' is not known to exist in the ' + 'repository and is not part of the commit, yet its ' + - 'child \'%s\' is part of the commit') % ( + 'child \'%s\' is part of the commit.*') % ( re.escape(os.path.abspath(sbox.ospath('AA'))), re.escape(os.path.abspath(sbox.ospath('AA/D/G')))) @@ -3095,7 +3052,7 @@ def commit_mergeinfo_ood(sbox): ' into \'%s\':\n' % sbox.ospath('branch'), ' U %s\n' % sbox.ospath('branch'), ] - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'merge', '-c4', '^/trunk', sbox.ospath('branch')) @@ -3109,15 +3066,53 @@ def commit_mergeinfo_ood(sbox): '--- Recording mergeinfo for merge of r5 into \'%s\':\n' % sbox.ospath('branch'), ' U %s\n' % sbox.ospath('branch'), ] - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'merge', '-c5', '^/trunk', sbox.ospath('branch')) # Currently this commit succeeds with dav over HTTPv2, while it should really fail expected_err = '.*out of date.*' - svntest.actions.run_and_verify_svn(None, None, expected_err, + svntest.actions.run_and_verify_svn(None, expected_err, 'commit', sbox.ospath(''), '-m', 'M') +@Issue(2295) +def mkdir_conflict_proper_error(sbox): + "mkdir conflict should produce a proper error" + + sbox.build(create_wc=False) + repo_url = sbox.repo_url + + expected_error = "svn: E160020: .* already exists.*'/A'" + svntest.actions.run_and_verify_svn(None, expected_error, + 'mkdir', repo_url + '/A', + '-m', '') + +def commit_xml(sbox): + "commit an xml file" + + sbox.build() + + sbox.simple_add_text('index.xml', 'index.xml') + sbox.simple_add_text('index.html', 'index.html') + sbox.simple_propset('svn:mime-type', 'text/xml', 'index.xml') + sbox.simple_propset('svn:mime-type', 'text/html', 'index.html') + + # This currently (2015-04-09) makes mod_dav return a 'HTTP/1.1 201 Created' + # result with content type text/xml (copied from file), which used to + # invoke the error parsing. + # + # Depending on the Apache version and config, this may cause an xml error. + sbox.simple_commit() + + # This currently (2015-04-09) makes mod_dav return a 'HTTP/1.1 204 Updated' + # result with content type text/xml (copied from file), which used to + # invoke the error parsing. + # + # Depending on the Apache version and config, this may cause an xml error. + sbox.simple_append('index.xml', '<Q></R>', True) + sbox.simple_append('index.html', '<Q></R>', True) + sbox.simple_commit() + ######################################################################## # Run the tests @@ -3189,9 +3184,12 @@ test_list = [ None, commit_add_subadd, commit_danglers, last_changed_of_copied_subdir, + commit_unversioned, commit_cp_with_deep_delete, commit_deep_deleted, commit_mergeinfo_ood, + mkdir_conflict_proper_error, + commit_xml, ] if __name__ == '__main__': diff --git a/subversion/tests/cmdline/copy_tests.py b/subversion/tests/cmdline/copy_tests.py index 39a686b..e873ea2 100755 --- a/subversion/tests/cmdline/copy_tests.py +++ b/subversion/tests/cmdline/copy_tests.py @@ -25,7 +25,7 @@ ###################################################################### # General modules -import stat, os, re, shutil, logging, sys +import stat, os, re, shutil, logging logger = logging.getLogger() @@ -64,7 +64,7 @@ or a url (when false) copy source is used.""" # File scheduled for deletion rho_path = sbox.ospath('A/D/G/rho') - svntest.actions.run_and_verify_svn(None, None, [], 'rm', rho_path) + svntest.actions.run_and_verify_svn(None, [], 'rm', rho_path) # Status before attempting copies expected_status = svntest.actions.get_virginal_state(wc_dir, 1) @@ -77,7 +77,7 @@ or a url (when false) copy source is used.""" else: pi_src = sbox.repo_url + '/A/D/G/pi' - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'cp', pi_src, rho_path) # Now commit @@ -91,8 +91,7 @@ or a url (when false) copy source is used.""" }) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, wc_dir) + expected_status) # Helper for wc_copy_replace_with_props and # repos_to_wc_copy_replace_with_props @@ -113,11 +112,11 @@ def copy_replace_with_props(sbox, wc_copy): # Set props on file which is copy-source later on pi_path = sbox.ospath('A/D/G/pi') rho_path = sbox.ospath('A/D/G/rho') - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'ps', 'phony-prop', '-F', prop_path, pi_path) os.remove(prop_path) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'ps', 'svn:eol-style', 'LF', rho_path) # Verify props having been set @@ -139,14 +138,13 @@ def copy_replace_with_props(sbox, wc_copy): expected_status.tweak('A/D/G/rho', wc_rev='2') svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, wc_dir) + expected_status) # Bring wc into sync - svntest.actions.run_and_verify_svn(None, None, [], 'up', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir) # File scheduled for deletion - svntest.actions.run_and_verify_svn(None, None, [], 'rm', rho_path) + svntest.actions.run_and_verify_svn(None, [], 'rm', rho_path) # Status before attempting copies expected_status = svntest.actions.get_virginal_state(wc_dir, 2) @@ -159,7 +157,7 @@ def copy_replace_with_props(sbox, wc_copy): else: pi_src = sbox.repo_url + '/A/D/G/pi' - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'cp', pi_src, rho_path) # Verify both content and props have been copied @@ -184,8 +182,7 @@ def copy_replace_with_props(sbox, wc_copy): }) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, wc_dir) + expected_status) ###################################################################### @@ -282,18 +279,18 @@ def basic_copy_and_move_files(sbox): svntest.main.file_append(rho_path, 'new appended text for rho') # Copy rho to D -- local mods - svntest.actions.run_and_verify_svn(None, None, [], 'cp', rho_path, D_path) + svntest.actions.run_and_verify_svn(None, [], 'cp', rho_path, D_path) # Copy alpha to C -- no local mods, and rename it to 'alpha2' also - svntest.actions.run_and_verify_svn(None, None, [], 'cp', + svntest.actions.run_and_verify_svn(None, [], 'cp', alpha_path, alpha2_path) # Move mu to H -- local mods - svntest.actions.run_and_verify_svn(None, None, [], 'mv', + svntest.actions.run_and_verify_svn(None, [], 'mv', mu_path, H_path) # Move iota to F -- no local mods - svntest.actions.run_and_verify_svn(None, None, [], 'mv', iota_path, F_path) + svntest.actions.run_and_verify_svn(None, [], 'mv', iota_path, F_path) # Created expected output tree for 'svn ci': # We should see four adds, two deletes, and one change in total. @@ -324,9 +321,7 @@ def basic_copy_and_move_files(sbox): svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, - wc_dir) + expected_status) # Issue 1091, alpha2 would now have the wrong checksum and so a # subsequent commit would fail @@ -337,17 +332,15 @@ def basic_copy_and_move_files(sbox): expected_status.tweak('A/C/alpha2', wc_rev=3) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, - wc_dir) + expected_status) # Assure that attempts at local copy and move fail when a log # message is provided. expected_stderr = \ ".*Local, non-commit operations do not take a log message" - svntest.actions.run_and_verify_svn(None, None, expected_stderr, + svntest.actions.run_and_verify_svn(None, expected_stderr, 'cp', '-m', 'op fails', rho_path, D_path) - svntest.actions.run_and_verify_svn(None, None, expected_stderr, + svntest.actions.run_and_verify_svn(None, expected_stderr, 'mv', '-m', 'op fails', rho_path, D_path) @@ -373,7 +366,7 @@ def receive_copy_in_update(sbox): newG_path = sbox.ospath('A/B/newG') # Copy directory A/D to A/B/newG - svntest.actions.run_and_verify_svn(None, None, [], 'cp', G_path, newG_path) + svntest.actions.run_and_verify_svn(None, [], 'cp', G_path, newG_path) # Created expected output tree for 'svn ci': expected_output = svntest.wc.State(wc_dir, { @@ -391,9 +384,7 @@ def receive_copy_in_update(sbox): svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, - wc_dir) + expected_status) # Now update the other working copy; it should receive a full add of # the newG directory and its contents. @@ -450,7 +441,7 @@ def resurrect_deleted_dir(sbox): G_path = sbox.ospath('A/D/G') # Delete directory A/D/G, commit that as r2. - svntest.actions.run_and_verify_svn(None, None, [], 'rm', '--force', + svntest.actions.run_and_verify_svn(None, [], 'rm', '--force', G_path) expected_output = svntest.wc.State(wc_dir, { @@ -465,14 +456,12 @@ def resurrect_deleted_dir(sbox): svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, - wc_dir) + expected_status) # Use 'svn cp URL@1 URL' to resurrect the deleted directory, where # the two URLs are identical. This used to trigger a failure. url = sbox.repo_url + '/A/D/G' - svntest.actions.run_and_verify_svn(None, None, [], 'cp', + svntest.actions.run_and_verify_svn(None, [], 'cp', url + '@1', url, '-m', 'logmsg') @@ -503,7 +492,7 @@ def copy_deleted_dir_into_prefix(sbox): D_path = sbox.ospath('A/D') # Delete directory A/D, commit that as r2. - svntest.actions.run_and_verify_svn(None, None, [], 'rm', '--force', + svntest.actions.run_and_verify_svn(None, [], 'rm', '--force', D_path) expected_output = svntest.wc.State(wc_dir, { @@ -512,15 +501,13 @@ def copy_deleted_dir_into_prefix(sbox): svntest.actions.run_and_verify_commit(wc_dir, expected_output, - None, - None, - wc_dir) + None) # Ok, copy from a deleted URL into a prefix of that URL, this used to # result in an assert failing. url1 = sbox.repo_url + '/A/D/G' url2 = sbox.repo_url + '/A/D' - svntest.actions.run_and_verify_svn(None, None, [], 'cp', + svntest.actions.run_and_verify_svn(None, [], 'cp', url1 + '@1', url2, '-m', 'logmsg') @@ -547,20 +534,18 @@ def no_copy_overwrites(sbox): dirURL2 = sbox.repo_url + "/A/D/H" # Expect out-of-date failure if 'svn cp URL URL' tries to overwrite a file - svntest.actions.run_and_verify_svn("Whoa, I was able to overwrite a file!", - None, svntest.verify.AnyOutput, + svntest.actions.run_and_verify_svn(None, svntest.verify.AnyOutput, 'cp', fileURL1, fileURL2, '-m', 'fooogle') # Create A/D/H/G by running 'svn cp ...A/D/G .../A/D/H' - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'cp', dirURL1, dirURL2, '-m', 'fooogle') # Repeat the last command. It should *fail* because A/D/H/G already exists. svntest.actions.run_and_verify_svn( - "Whoa, I was able to overwrite a directory!", - None, svntest.verify.AnyOutput, + None, ".*'/A/D/H/G'.*", 'cp', dirURL1, dirURL2, '-m', 'fooogle') @@ -586,9 +571,9 @@ def no_wc_copy_overwrites(sbox): # These copies should fail pi_path = sbox.ospath('A/D/G/pi') rho_path = sbox.ospath('A/D/G/rho') - svntest.actions.run_and_verify_svn(None, None, svntest.verify.AnyOutput, + svntest.actions.run_and_verify_svn(None, svntest.verify.AnyOutput, 'cp', pi_path, rho_path) - svntest.actions.run_and_verify_svn(None, None, svntest.verify.AnyOutput, + svntest.actions.run_and_verify_svn(None, svntest.verify.AnyOutput, 'cp', pi_path, tau_path) # Status after failed copies should not have changed @@ -607,7 +592,7 @@ def copy_modify_commit(sbox): B_path = sbox.ospath('A/B') B2_path = sbox.ospath('A/B2') - svntest.actions.run_and_verify_svn(None, None, [], 'cp', + svntest.actions.run_and_verify_svn(None, [], 'cp', B_path, B2_path) alpha_path = sbox.ospath('A/B2/E/alpha') @@ -620,9 +605,7 @@ def copy_modify_commit(sbox): svntest.actions.run_and_verify_commit(wc_dir, expected_output, - None, - None, - wc_dir) + None) #---------------------------------------------------------------------- @@ -637,7 +620,7 @@ def copy_files_with_properties(sbox): # Set a property on a file rho_path = sbox.ospath('A/D/G/rho') - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'propset', 'pname', 'pval', rho_path) # and commit it @@ -647,22 +630,21 @@ def copy_files_with_properties(sbox): expected_status = svntest.actions.get_virginal_state(wc_dir, 1) expected_status.tweak('A/D/G/rho', status=' ', wc_rev=2) svntest.actions.run_and_verify_commit(wc_dir, - expected_output, expected_status, - None, wc_dir) + expected_output, expected_status) # Set another property, but don't commit it yet - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'propset', 'pname2', 'pval2', rho_path) # WC to WC copy of file with committed and uncommitted properties rho_wc_path = sbox.ospath('A/D/G/rho_wc') - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'copy', rho_path, rho_wc_path) # REPOS to WC copy of file with properties rho_url_path = sbox.ospath('A/D/G/rho_url') rho_url = sbox.repo_url + '/A/D/G/rho' - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'copy', rho_url, rho_url_path) # Properties are not visible in WC status 'A' @@ -674,11 +656,11 @@ def copy_files_with_properties(sbox): svntest.actions.run_and_verify_status(wc_dir, expected_status) # Check properties explicitly - svntest.actions.run_and_verify_svn(None, ['pval\n'], [], + svntest.actions.run_and_verify_svn(['pval\n'], [], 'propget', 'pname', rho_wc_path) - svntest.actions.run_and_verify_svn(None, ['pval2\n'], [], + svntest.actions.run_and_verify_svn(['pval2\n'], [], 'propget', 'pname2', rho_wc_path) - svntest.actions.run_and_verify_svn(None, ['pval\n'], [], + svntest.actions.run_and_verify_svn(['pval\n'], [], 'propget', 'pname', rho_url_path) # Commit and properties are visible in status @@ -694,8 +676,7 @@ def copy_files_with_properties(sbox): 'A/D/G/rho_url' : Item(status=' ', wc_rev=3), }) svntest.actions.run_and_verify_commit(wc_dir, - expected_output, expected_status, - None, wc_dir) + expected_output, expected_status) #---------------------------------------------------------------------- @@ -710,13 +691,13 @@ def copy_delete_commit(sbox): B2_path = sbox.ospath('A/B2') # copy a tree - svntest.actions.run_and_verify_svn(None, None, [], 'cp', + svntest.actions.run_and_verify_svn(None, [], 'cp', B_path, B2_path) # delete two files lambda_path = sbox.ospath('A/B2/lambda') alpha_path = sbox.ospath('A/B2/E/alpha') - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'rm', alpha_path, lambda_path) # commit copied tree containing a deleted file @@ -727,18 +708,16 @@ def copy_delete_commit(sbox): }) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - None, - None, - wc_dir) + None) # copy a tree - svntest.actions.run_and_verify_svn(None, None, [], 'cp', + svntest.actions.run_and_verify_svn(None, [], 'cp', sbox.ospath('A/B'), sbox.ospath('A/B3')) # delete a directory E_path = sbox.ospath('A/B3/E') - svntest.actions.run_and_verify_svn(None, None, [], 'rm', E_path) + svntest.actions.run_and_verify_svn(None, [], 'rm', E_path) # commit copied tree containing a deleted directory expected_output = svntest.wc.State(wc_dir, { @@ -747,9 +726,7 @@ def copy_delete_commit(sbox): }) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - None, - None, - wc_dir) + None) #---------------------------------------------------------------------- @@ -764,7 +741,7 @@ def mv_and_revert_directory(sbox): new_E_path = os.path.join(F_path, 'E') # Issue 931: move failed to lock the directory being deleted - svntest.actions.run_and_verify_svn(None, None, [], 'move', + svntest.actions.run_and_verify_svn(None, [], 'move', E_path, F_path) expected_status = svntest.actions.get_virginal_state(wc_dir, 1) expected_status.tweak('A/B/E/alpha', 'A/B/E/beta', status='D ') @@ -778,7 +755,7 @@ def mv_and_revert_directory(sbox): svntest.actions.run_and_verify_status(wc_dir, expected_status) # Issue 932: revert failed to lock the parent directory - svntest.actions.run_and_verify_svn(None, None, [], 'revert', '--recursive', + svntest.actions.run_and_verify_svn(None, [], 'revert', '--recursive', new_E_path) expected_status.remove('A/B/F/E', 'A/B/F/E/alpha', 'A/B/F/E/beta') expected_status.tweak('A/B/E', moved_to=None) @@ -803,13 +780,13 @@ def copy_preserve_executable_bit(sbox): # Create the first file. svntest.main.file_append(newpath1, "a new file") - svntest.actions.run_and_verify_svn(None, None, [], 'add', newpath1) + svntest.actions.run_and_verify_svn(None, [], 'add', newpath1) mode1 = os.stat(newpath1)[stat.ST_MODE] # Doing this to get the executable bit set on systems that support # that -- the property itself is not the point. - svntest.actions.run_and_verify_svn(None, None, [], 'propset', + svntest.actions.run_and_verify_svn(None, [], 'propset', 'svn:executable', 'on', newpath1) mode2 = os.stat(newpath1)[stat.ST_MODE] @@ -819,12 +796,12 @@ def copy_preserve_executable_bit(sbox): raise svntest.Failure # Commit the file - svntest.actions.run_and_verify_svn(None, None, [], 'ci', + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', 'create file and set svn:executable', wc_dir) # Copy the file - svntest.actions.run_and_verify_svn(None, None, [], 'cp', newpath1, newpath2) + svntest.actions.run_and_verify_svn(None, [], 'cp', newpath1, newpath2) mode3 = os.stat(newpath2)[stat.ST_MODE] @@ -850,17 +827,17 @@ def wc_to_repos(sbox): # modify some items to be copied svntest.main.file_append(sbox.ospath('A/D/H/omega'), "new otext\n") - svntest.actions.run_and_verify_svn(None, None, [], 'propset', 'foo', 'bar', + svntest.actions.run_and_verify_svn(None, [], 'propset', 'foo', 'bar', beta_path) # copy a file - svntest.actions.run_and_verify_svn(None, None, [], '-m', 'fumble file', + svntest.actions.run_and_verify_svn(None, [], '-m', 'fumble file', 'copy', beta_path, beta2_url) # and a directory - svntest.actions.run_and_verify_svn(None, None, [], '-m', 'fumble dir', + svntest.actions.run_and_verify_svn(None, [], '-m', 'fumble dir', 'copy', H_path, H2_url) # copy a file to a directory - svntest.actions.run_and_verify_svn(None, None, [], '-m', 'fumble file', + svntest.actions.run_and_verify_svn(None, [], '-m', 'fumble file', 'copy', beta_path, H2_url) # update the working copy. post-update mereinfo elision will remove @@ -900,7 +877,7 @@ def wc_to_repos(sbox): expected_status) # check local property was copied - svntest.actions.run_and_verify_svn(None, ['bar\n'], [], + svntest.actions.run_and_verify_svn(['bar\n'], [], 'propget', 'foo', beta_path + "2") @@ -927,8 +904,8 @@ def repos_to_wc(sbox): pi_url = sbox.repo_url + "/A/D/G/pi" pi_path = sbox.ospath('pi') - svntest.actions.run_and_verify_svn(None, None, [], 'copy', E_url, wc_dir) - svntest.actions.run_and_verify_svn(None, None, [], 'copy', pi_url, wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'copy', E_url, wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'copy', pi_url, wc_dir) # Extra test: modify file ASAP to check there was a timestamp sleep svntest.main.file_append(pi_path, 'zig\n') @@ -955,7 +932,7 @@ def repos_to_wc(sbox): raise svntest.Failure # Revert everything and verify. - svntest.actions.run_and_verify_svn(None, None, [], 'revert', '-R', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'revert', '-R', wc_dir) svntest.main.safe_rmtree(sbox.ospath('E')) @@ -966,7 +943,7 @@ def repos_to_wc(sbox): # Copy an empty directory from the same repository, see issue #1444. C_url = sbox.repo_url + "/A/C" - svntest.actions.run_and_verify_svn(None, None, [], 'copy', C_url, wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'copy', C_url, wc_dir) expected_output = svntest.actions.get_virginal_state(wc_dir, 1) expected_output.add({ @@ -975,7 +952,7 @@ def repos_to_wc(sbox): svntest.actions.run_and_verify_status(wc_dir, expected_output) # Revert everything and verify. - svntest.actions.run_and_verify_svn(None, None, [], 'revert', '-R', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'revert', '-R', wc_dir) svntest.main.safe_rmtree(sbox.ospath('C')) @@ -995,14 +972,14 @@ def repos_to_wc(sbox): 'A %s\n' % sbox.ospath('E/beta'), 'A %s\n' % sbox.ospath('E/alpha'), ]) - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'copy', E_url, wc_dir) expected_output = [ '--- Copying from foreign repository URL \'%s\':\n' % pi_url, 'A %s\n' % sbox.ospath('pi'), ] - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'copy', pi_url, wc_dir) expected_output = svntest.actions.get_virginal_state(wc_dir, 1) @@ -1015,7 +992,7 @@ def repos_to_wc(sbox): svntest.actions.run_and_verify_status(wc_dir, expected_output) # Revert everything and verify. - svntest.actions.run_and_verify_svn(None, None, [], 'revert', '-R', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'revert', '-R', wc_dir) expected_output = svntest.actions.get_virginal_state(wc_dir, 1) # URL->wc copy: @@ -1024,7 +1001,7 @@ def repos_to_wc(sbox): B_url = sbox.repo_url + "/A/B" D_dir = sbox.ospath('A/D') - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'copy', B_url, D_dir) expected_output = svntest.actions.get_virginal_state(wc_dir, 1) @@ -1038,8 +1015,8 @@ def repos_to_wc(sbox): }) svntest.actions.run_and_verify_status(wc_dir, expected_output) - # Validate the merge info of the copy destination (we expect none) - svntest.actions.run_and_verify_svn(None, [], [], + # Validate the mergeinfo of the copy destination (we expect none) + svntest.actions.run_and_verify_svn([], '.*W200017: Property.*not found', 'propget', SVN_PROP_MERGEINFO, os.path.join(D_dir, 'B')) @@ -1055,7 +1032,7 @@ def copy_to_root(sbox): root = sbox.repo_url mu = root + '/A/mu' - svntest.actions.run_and_verify_svn(None, None, [], 'cp', + svntest.actions.run_and_verify_svn(None, [], 'cp', '-m', '', mu, root) @@ -1093,8 +1070,8 @@ def url_copy_parent_into_child(sbox): F_url = sbox.repo_url + "/A/B/F" # Issue 1367 parent/child URL-to-URL was rejected. - svntest.actions.run_and_verify_svn(None, - ['\n', 'Committed revision 2.\n'], [], + svntest.actions.run_and_verify_svn(['Committing transaction...\n', + 'Committed revision 2.\n'], [], 'cp', '-m', 'a can of worms', B_url, F_url) @@ -1144,7 +1121,7 @@ def wc_copy_parent_into_child(sbox): # Want a smaller WC svntest.main.safe_rmtree(wc_dir) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'checkout', B_url, wc_dir) @@ -1154,9 +1131,9 @@ def wc_copy_parent_into_child(sbox): was_cwd = os.getcwd() os.chdir(wc_dir) - svntest.actions.run_and_verify_svn(None, - ['Adding copy of .\n', - '\n', 'Committed revision 2.\n'], [], + svntest.actions.run_and_verify_svn(['Adding copy of .\n', + 'Committing transaction...\n', + 'Committed revision 2.\n'], [], 'cp', '-m', 'a larger can', '.', F_B_url) @@ -1217,7 +1194,7 @@ def resurrect_deleted_file(sbox): # Delete a file in the repository via immediate commit rho_url = sbox.repo_url + '/A/D/G/rho' - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'rm', rho_url, '-m', 'rev 2') # Update the wc to HEAD (r2) @@ -1234,7 +1211,7 @@ def resurrect_deleted_file(sbox): expected_status) # repos->wc copy, to resurrect deleted file. - svntest.actions.run_and_verify_svn("Copy error:", None, [], + svntest.actions.run_and_verify_svn(None, [], 'cp', rho_url + '@1', wc_dir) # status should now show the file scheduled for addition-with-history @@ -1258,11 +1235,11 @@ def diff_repos_to_wc_copy(sbox): target_wc_path = sbox.ospath('new_file') # Copy a file from the repository to the working copy. - svntest.actions.run_and_verify_svn(None, None, [], 'cp', + svntest.actions.run_and_verify_svn(None, [], 'cp', iota_repos_path, target_wc_path) # Run diff. - svntest.actions.run_and_verify_svn(None, None, [], 'diff', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'diff', wc_dir) #------------------------------------------------------------- @@ -1287,20 +1264,20 @@ def repos_to_wc_copy_eol_keywords(sbox): "Hello\nSubversion\n$LastChangedRevision$\n", "ab") - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'propset', 'svn:eol-style', 'CRLF', iota_wc_path) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'propset', 'svn:keywords', 'Rev', iota_wc_path) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'commit', '-m', 'log msg', wc_dir) # Copy a file from the repository to the working copy. - svntest.actions.run_and_verify_svn(None, None, [], 'cp', + svntest.actions.run_and_verify_svn(None, [], 'cp', iota_repos_path, target_wc_path) # The original bug was that the copy would seg fault. So we test @@ -1342,12 +1319,10 @@ def revision_kinds_local_source(sbox): expected_output = svntest.wc.State(wc_dir, { 'A/mu' : Item(verb='Sending'), }) svntest.main.file_append(mu_path, "New r2 text.\n") - svntest.actions.run_and_verify_commit(wc_dir, expected_output, None, - None, wc_dir) + svntest.actions.run_and_verify_commit(wc_dir, expected_output, None) svntest.main.file_append(mu_path, "New r3 text.\n") - svntest.actions.run_and_verify_commit(wc_dir, expected_output, None, - None, wc_dir) - svntest.actions.run_and_verify_svn(None, None, [], 'up', '-r2', mu_path) + svntest.actions.run_and_verify_commit(wc_dir, expected_output, None) + svntest.actions.run_and_verify_svn(None, [], 'up', '-r2', mu_path) svntest.main.file_append(mu_path, "Working copy.\n") r1 = "This is the file 'mu'.\n" @@ -1369,10 +1344,10 @@ def revision_kinds_local_source(sbox): for dst, from_rev, text, peg_rev in sub_tests: dst_path = os.path.join(wc_dir, dst) if peg_rev is None: - svntest.actions.run_and_verify_svn(None, None, [], "copy", + svntest.actions.run_and_verify_svn(None, [], "copy", mu_path, dst_path) else: - svntest.actions.run_and_verify_svn(None, None, [], "copy", + svntest.actions.run_and_verify_svn(None, [], "copy", mu_path + "@" + peg_rev, dst_path) expected_disk.add({ dst: Item(contents=text) }) @@ -1405,10 +1380,10 @@ def copy_over_missing_file(sbox): os.remove(mu_path) # Try both wc->wc copy and repos->wc copy, expect failures: - svntest.actions.run_and_verify_svn(None, None, svntest.verify.AnyOutput, + svntest.actions.run_and_verify_svn(None, svntest.verify.AnyOutput, 'cp', iota_path, mu_path) - svntest.actions.run_and_verify_svn(None, None, svntest.verify.AnyOutput, + svntest.actions.run_and_verify_svn(None, svntest.verify.AnyOutput, 'cp', iota_url, mu_path) # Make sure that the working copy is not corrupted: @@ -1433,7 +1408,7 @@ def repos_to_wc_1634(sbox): # First delete a subdirectory and commit. E_path = sbox.ospath('A/B/E') - svntest.actions.run_and_verify_svn(None, None, [], 'delete', E_path) + svntest.actions.run_and_verify_svn(None, [], 'delete', E_path) expected_output = svntest.wc.State(wc_dir, { 'A/B/E' : Item(verb='Deleting'), }) @@ -1441,12 +1416,11 @@ def repos_to_wc_1634(sbox): expected_status.remove('A/B/E', 'A/B/E/alpha', 'A/B/E/beta') svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, wc_dir) + expected_status) # Now copy the directory back. E_url = sbox.repo_url + "/A/B/E@1" - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'copy', E_url, E_path) expected_status.add({ 'A/B/E' : Item(status='A ', copied='+', wc_rev='-'), @@ -1455,7 +1429,7 @@ def repos_to_wc_1634(sbox): }) svntest.actions.run_and_verify_status(wc_dir, expected_status) - svntest.actions.run_and_verify_svn(None, None, [], 'up', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir) expected_status = svntest.actions.get_virginal_state(wc_dir, 2) expected_status.add({ 'A/B/E' : Item(status='A ', copied='+', wc_rev='-'), @@ -1475,27 +1449,27 @@ def double_uri_escaping_1814(sbox): base_url = sbox.repo_url + '/base' # rev. 2 - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'mkdir', '-m', 'mybase', base_url) orig_url = base_url + '/foo%20bar' # rev. 3 - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'mkdir', '-m', 'r1', orig_url) orig_rev = 3 # rev. 4 new_url = base_url + '/foo_bar' - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'mv', '-m', 'r2', orig_url, new_url) # This had failed with ra_neon because "foo bar" would be double-encoded # "foo bar" ==> "foo%20bar" ==> "foo%2520bar" - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'ls', ('-r'+str(orig_rev)), '-R', base_url) @@ -1536,7 +1510,7 @@ def wc_to_wc_copy_deleted(sbox): B2_path = sbox.ospath('A/B2') # Schedule for delete - svntest.actions.run_and_verify_svn(None, None, [], 'rm', + svntest.actions.run_and_verify_svn(None, [], 'rm', os.path.join(B_path, 'E', 'alpha'), os.path.join(B_path, 'lambda'), os.path.join(B_path, 'F')) @@ -1553,11 +1527,10 @@ def wc_to_wc_copy_deleted(sbox): }) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, wc_dir) + expected_status) # Copy including stuff in state not-present - svntest.actions.run_and_verify_svn(None, None, [], 'copy', B_path, B2_path) + svntest.actions.run_and_verify_svn(None, [], 'copy', B_path, B2_path) expected_status.add({ 'A/B2' : Item(status='A ', wc_rev='-', copied='+'), 'A/B2/E' : Item(status=' ', wc_rev='-', copied='+'), @@ -1574,7 +1547,7 @@ def wc_to_wc_copy_deleted(sbox): svntest.actions.run_and_verify_status(wc_dir, expected_status) # Revert the entire copy including the schedule not-present bits - svntest.actions.run_and_verify_svn(None, None, [], 'revert', '--recursive', + svntest.actions.run_and_verify_svn(None, [], 'revert', '--recursive', B2_path) expected_status.remove('A/B2', 'A/B2/E', @@ -1586,7 +1559,7 @@ def wc_to_wc_copy_deleted(sbox): svntest.main.safe_rmtree(B2_path) # Copy again and commit - svntest.actions.run_and_verify_svn(None, None, [], 'copy', B_path, B2_path) + svntest.actions.run_and_verify_svn(None, [], 'copy', B_path, B2_path) expected_status.add({ 'A/B2' : Item(status='A ', copied='+', wc_rev='-'), @@ -1610,8 +1583,7 @@ def wc_to_wc_copy_deleted(sbox): svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, wc_dir) + expected_status) #---------------------------------------------------------------------- # Test for copy into a non-existent URL path @@ -1655,11 +1627,11 @@ def non_existent_url_to_url(sbox): pi_url = sbox.repo_url + '/A/D/G/pi' new_url = sbox.repo_url + '/newfile' - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'delete', adg_url, '-m', '') - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'copy', pi_url + '@1', new_url, '-m', '') @@ -1676,19 +1648,19 @@ def old_dir_url_to_url(sbox): new_url = sbox.repo_url + '/newfile' # Delete a directory - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'delete', adg_url, '-m', '') # Copy a file to where the directory used to be - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'copy', iota_url, adg_url, '-m', '') # Try copying a file that was in the deleted directory that is now a # file - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'copy', pi_url + '@1', new_url, '-m', '') @@ -1710,7 +1682,7 @@ def wc_copy_dir_to_itself(sbox): dir_path = os.path.join(wc_dir, dirname) # try to copy dir to itself - svntest.actions.run_and_verify_svn(None, [], + svntest.actions.run_and_verify_svn([], '.*Cannot copy .* into its own child.*', 'copy', dir_path, dir_path) @@ -1742,28 +1714,55 @@ def mixed_wc_to_url(sbox): rho_path = sbox.ospath('A/D/G/rho') # Remove A/D/G/pi, then commit that removal. - svntest.actions.run_and_verify_svn(None, None, [], 'rm', pi_path) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'rm', pi_path) + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', "Delete pi.", wc_dir) # Make a modification to A/D/G/rho, then commit that modification. svntest.main.file_append(rho_path, "\nFirst modification to rho.\n") - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', "Modify rho.", wc_dir) # Make another modification to A/D/G/rho, but don't commit it. svntest.main.file_append(rho_path, "Second modification to rho.\n") # Copy into the source, delete part of the copy, add a non-copied directory - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'cp', B_path, X_path) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'rm', alpha_path) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'mkdir', Y_path) # Now copy local A/D/G to create new directory A/D/Z the repository. - svntest.actions.run_and_verify_svn(None, None, [], + + expected_status = svntest.wc.State(G_path, { + '' : Item(status=' ', wc_rev='1'), + 'X' : Item(status='A ', copied='+', wc_rev='-'), + 'X/F' : Item(status=' ', copied='+', wc_rev='-'), + 'X/E' : Item(status=' ', copied='+', wc_rev='-'), + 'X/E/alpha' : Item(status='D ', copied='+', wc_rev='-'), + 'X/E/beta' : Item(status=' ', copied='+', wc_rev='-'), + 'X/lambda' : Item(status=' ', copied='+', wc_rev='-'), + 'Y' : Item(status='A ', wc_rev='-'), + 'rho' : Item(status='M ', wc_rev='3'), + 'tau' : Item(status=' ', wc_rev='1'), + }) + + svntest.actions.run_and_verify_status(G_path, expected_status) + + expected_output = svntest.verify.UnorderedOutput([ + 'Adding copy of %s\n' % sbox.ospath('A/D/G'), + 'Adding copy of %s\n' % sbox.ospath('A/D/G/X'), + 'Deleting copy of %s\n' % sbox.ospath('A/D/G/X/E/alpha'), + 'Adding copy of %s\n' % sbox.ospath('A/D/G/Y'), + 'Deleting copy of %s\n' % sbox.ospath('A/D/G/pi'), + 'Replacing copy of %s\n' % sbox.ospath('A/D/G/rho'), + 'Transmitting file data .done\n', + 'Committing transaction...\n', + 'Committed revision 4.\n', + ]) + svntest.actions.run_and_verify_svn(expected_output, [], 'cp', '-m', "Make a copy.", G_path, Z_url) expected_output = svntest.verify.UnorderedOutput([ @@ -1778,12 +1777,12 @@ def mixed_wc_to_url(sbox): 'A + A/D/Z/rho\n', ' (from A/D/G/rho:r3)\n', ]) - svntest.actions.run_and_verify_svnlook(None, expected_output, [], + svntest.actions.run_and_verify_svnlook(expected_output, [], 'changed', sbox.repo_dir, '--copy-info') # Copy from copied source - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'cp', '-m', "Make a copy.", E_path, Z2_url) expected_output = svntest.verify.UnorderedOutput([ @@ -1791,14 +1790,14 @@ def mixed_wc_to_url(sbox): ' (from A/B/E/:r1)\n', 'D A/D/Z2/alpha\n', ]) - svntest.actions.run_and_verify_svnlook(None, expected_output, [], + svntest.actions.run_and_verify_svnlook(expected_output, [], 'changed', sbox.repo_dir, '--copy-info') # Check out A/D/Z. If it has pi, that's a bug; or if its rho does # not have the second local mod, that's also a bug. svntest.main.safe_rmtree(wc_dir) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'co', Z_url, wc_dir) if os.path.exists(sbox.ospath('pi')): @@ -1848,7 +1847,7 @@ def delete_replaced_file(sbox): # File scheduled for deletion. rho_path = sbox.ospath('A/D/G/rho') - svntest.actions.run_and_verify_svn(None, None, [], 'rm', rho_path) + svntest.actions.run_and_verify_svn(None, [], 'rm', rho_path) # Status before attempting copies expected_status = svntest.actions.get_virginal_state(wc_dir, 1) @@ -1857,14 +1856,14 @@ def delete_replaced_file(sbox): # Copy 'pi' over 'rho' with history. pi_src = sbox.ospath('A/D/G/pi') - svntest.actions.run_and_verify_svn(None, None, [], 'cp', pi_src, rho_path) + svntest.actions.run_and_verify_svn(None, [], 'cp', pi_src, rho_path) # Check that file copied. expected_status.tweak('A/D/G/rho', status='R ', copied='+', wc_rev='-') svntest.actions.run_and_verify_status(wc_dir, expected_status) # Now delete replaced file. - svntest.actions.run_and_verify_svn(None, None, [], 'rm', + svntest.actions.run_and_verify_svn(None, [], 'rm', '--force', rho_path) # Verify status after deletion. @@ -1889,13 +1888,13 @@ def mv_unversioned_file(sbox): svntest.main.file_append(unver_path_2, "another unversioned file") # Try to move an unversioned file. - svntest.actions.run_and_verify_svn(None, None, + svntest.actions.run_and_verify_svn(None, ".*unversioned1' " + "(does not exist|is not under version control)", 'mv', unver_path_1, dest_path_1) # Try to forcibly move an unversioned file. - svntest.actions.run_and_verify_svn(None, None, + svntest.actions.run_and_verify_svn(None, ".*unversioned2.* " + "(does not exist|is not under version control)", 'mv', @@ -1932,7 +1931,7 @@ def force_move(sbox): was_cwd = os.getcwd() os.chdir(wc_dir) - svntest.actions.run_and_verify_svn(None, move_output, + svntest.actions.run_and_verify_svn(move_output, [], 'move', file_name, "dest") @@ -1959,9 +1958,8 @@ def force_move(sbox): }) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, wc_dir) - svntest.actions.run_and_verify_svn('Cat file', expected_file_content, [], + expected_status) + svntest.actions.run_and_verify_svn(expected_file_content, [], 'cat', sbox.repo_url + '/dest') @@ -1979,11 +1977,11 @@ def copy_copied_file_and_dir(sbox): rho_copy_path_2 = sbox.ospath('A/B/F/rho_copy_2') # Copy A/D/G/rho to A/D/rho_copy_1 - svntest.actions.run_and_verify_svn(None, None, [], 'cp', + svntest.actions.run_and_verify_svn(None, [], 'cp', rho_path, rho_copy_path_1) # Copy the copied file: A/D/rho_copy_1 to A/B/F/rho_copy_2 - svntest.actions.run_and_verify_svn(None, None, [], 'cp', + svntest.actions.run_and_verify_svn(None, [], 'cp', rho_copy_path_1, rho_copy_path_2) E_path = sbox.ospath('A/B/E') @@ -1991,11 +1989,11 @@ def copy_copied_file_and_dir(sbox): E_path_copy_2 = sbox.ospath('A/D/G/E_copy_2') # Copy A/B/E to A/B/F/E_copy_1 - svntest.actions.run_and_verify_svn(None, None, [], 'cp', + svntest.actions.run_and_verify_svn(None, [], 'cp', E_path, E_path_copy_1) # Copy the copied dir: A/B/F/E_copy_1 to A/D/G/E_copy_2 - svntest.actions.run_and_verify_svn(None, None, [], 'cp', + svntest.actions.run_and_verify_svn(None, [], 'cp', E_path_copy_1, E_path_copy_2) # Created expected output tree for 'svn ci': @@ -2021,9 +2019,7 @@ def copy_copied_file_and_dir(sbox): svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, - wc_dir) + expected_status) def move_copied_file_and_dir(sbox): @@ -2037,11 +2033,11 @@ def move_copied_file_and_dir(sbox): rho_copy_move_path = sbox.ospath('A/B/F/rho_copy_moved') # Copy A/D/G/rho to A/D/rho_copy - svntest.actions.run_and_verify_svn(None, None, [], 'cp', + svntest.actions.run_and_verify_svn(None, [], 'cp', rho_path, rho_copy_path) # Move the copied file: A/D/rho_copy to A/B/F/rho_copy_moved - svntest.actions.run_and_verify_svn(None, None, [], 'mv', + svntest.actions.run_and_verify_svn(None, [], 'mv', rho_copy_path, rho_copy_move_path) E_path = sbox.ospath('A/B/E') @@ -2049,11 +2045,11 @@ def move_copied_file_and_dir(sbox): E_path_copy_move = sbox.ospath('A/D/G/E_copy_moved') # Copy A/B/E to A/B/F/E_copy - svntest.actions.run_and_verify_svn(None, None, [], 'cp', + svntest.actions.run_and_verify_svn(None, [], 'cp', E_path, E_path_copy) # Move the copied file: A/B/F/E_copy to A/D/G/E_copy_moved - svntest.actions.run_and_verify_svn(None, None, [], 'mv', + svntest.actions.run_and_verify_svn(None, [], 'mv', E_path_copy, E_path_copy_move) # Created expected output tree for 'svn ci': @@ -2076,9 +2072,7 @@ def move_copied_file_and_dir(sbox): svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, - wc_dir) + expected_status) def move_moved_file_and_dir(sbox): @@ -2092,11 +2086,11 @@ def move_moved_file_and_dir(sbox): rho_move_moved_path = sbox.ospath('A/B/F/rho_move_moved') # Move A/D/G/rho to A/D/rho_moved - svntest.actions.run_and_verify_svn(None, None, [], 'mv', + svntest.actions.run_and_verify_svn(None, [], 'mv', rho_path, rho_move_path) # Move the moved file: A/D/rho_moved to A/B/F/rho_move_moved - svntest.actions.run_and_verify_svn(None, None, [], 'mv', + svntest.actions.run_and_verify_svn(None, [], 'mv', rho_move_path, rho_move_moved_path) E_path = sbox.ospath('A/B/E') @@ -2104,11 +2098,11 @@ def move_moved_file_and_dir(sbox): E_path_move_moved = sbox.ospath('A/D/G/E_move_moved') # Copy A/B/E to A/B/F/E_moved - svntest.actions.run_and_verify_svn(None, None, [], 'mv', + svntest.actions.run_and_verify_svn(None, [], 'mv', E_path, E_path_moved) # Move the moved file: A/B/F/E_moved to A/D/G/E_move_moved - svntest.actions.run_and_verify_svn(None, None, [], 'mv', + svntest.actions.run_and_verify_svn(None, [], 'mv', E_path_moved, E_path_move_moved) # Created expected output tree for 'svn ci': @@ -2135,9 +2129,7 @@ def move_moved_file_and_dir(sbox): svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, - wc_dir) + expected_status) def move_file_within_moved_dir(sbox): @@ -2150,7 +2142,7 @@ def move_file_within_moved_dir(sbox): D_path_moved = sbox.ospath('A/B/F/D_moved') # Move A/B/D to A/B/F/D_moved - svntest.actions.run_and_verify_svn(None, None, [], 'mv', + svntest.actions.run_and_verify_svn(None, [], 'mv', D_path, D_path_moved) chi_path = sbox.ospath('A/B/F/D_moved/H/chi') @@ -2161,9 +2153,9 @@ def move_file_within_moved_dir(sbox): # Move A/B/F/D_moved/H/chi to A/B/F/D_moved/H/chi_moved # then move that to A/B/F/D_moved/H/chi_moved_again - svntest.actions.run_and_verify_svn(None, None, [], 'mv', + svntest.actions.run_and_verify_svn(None, [], 'mv', chi_path, chi_moved_path) - svntest.actions.run_and_verify_svn(None, None, [], 'mv', + svntest.actions.run_and_verify_svn(None, [], 'mv', chi_moved_path, chi_moved_again_path) @@ -2204,9 +2196,7 @@ def move_file_within_moved_dir(sbox): svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, - wc_dir) + expected_status) def move_file_out_of_moved_dir(sbox): @@ -2219,7 +2209,7 @@ def move_file_out_of_moved_dir(sbox): D_path_moved = sbox.ospath('A/B/F/D_moved') # Move A/B/D to A/B/F/D_moved - svntest.actions.run_and_verify_svn(None, None, [], 'mv', + svntest.actions.run_and_verify_svn(None, [], 'mv', D_path, D_path_moved) chi_path = sbox.ospath('A/B/F/D_moved/H/chi') @@ -2229,9 +2219,9 @@ def move_file_out_of_moved_dir(sbox): # Move A/B/F/D_moved/H/chi to A/B/F/D_moved/H/chi_moved # then move that to A/C/chi_moved_again - svntest.actions.run_and_verify_svn(None, None, [], 'mv', + svntest.actions.run_and_verify_svn(None, [], 'mv', chi_path, chi_moved_path) - svntest.actions.run_and_verify_svn(None, None, [], 'mv', + svntest.actions.run_and_verify_svn(None, [], 'mv', chi_moved_path, chi_moved_again_path) @@ -2272,9 +2262,7 @@ def move_file_out_of_moved_dir(sbox): svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, - wc_dir) + expected_status) def move_dir_within_moved_dir(sbox): @@ -2287,7 +2275,7 @@ def move_dir_within_moved_dir(sbox): D_path_moved = sbox.ospath('A/B/F/D_moved') # Move A/D to A/B/F/D_moved - svntest.actions.run_and_verify_svn(None, None, [], 'mv', + svntest.actions.run_and_verify_svn(None, [], 'mv', D_path, D_path_moved) H_path = sbox.ospath('A/B/F/D_moved/H') @@ -2297,9 +2285,9 @@ def move_dir_within_moved_dir(sbox): # Move A/B/F/D_moved/H to A/B/F/D_moved/H_moved # then move that to A/B/F/D_moved/H_moved_again - svntest.actions.run_and_verify_svn(None, None, [], 'mv', + svntest.actions.run_and_verify_svn(None, [], 'mv', H_path, H_moved_path) - svntest.actions.run_and_verify_svn(None, None, [], 'mv', + svntest.actions.run_and_verify_svn(None, [], 'mv', H_moved_path, H_moved_again_path) @@ -2340,9 +2328,7 @@ def move_dir_within_moved_dir(sbox): svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, - wc_dir) + expected_status) def move_dir_out_of_moved_dir(sbox): @@ -2355,7 +2341,7 @@ def move_dir_out_of_moved_dir(sbox): D_path_moved = sbox.ospath('A/B/F/D_moved') # Move A/D to A/B/F/D_moved - svntest.actions.run_and_verify_svn(None, None, [], 'mv', + svntest.actions.run_and_verify_svn(None, [], 'mv', D_path, D_path_moved) H_path = sbox.ospath('A/B/F/D_moved/H') @@ -2364,9 +2350,9 @@ def move_dir_out_of_moved_dir(sbox): # Move A/B/F/D_moved/H to A/B/F/D_moved/H_moved # then move that to A/C/H_moved_again - svntest.actions.run_and_verify_svn(None, None, [], 'mv', + svntest.actions.run_and_verify_svn(None, [], 'mv', H_path, H_moved_path) - svntest.actions.run_and_verify_svn(None, None, [], 'mv', + svntest.actions.run_and_verify_svn(None, [], 'mv', H_moved_path, H_moved_again_path) @@ -2407,9 +2393,7 @@ def move_dir_out_of_moved_dir(sbox): svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, - wc_dir) + expected_status) # Includes regression testing for issue #3429 ("svn mv A B; svn mv B A" # generates replace without history). @@ -2424,9 +2408,9 @@ def move_file_back_and_forth(sbox): rho_move_path = sbox.ospath('A/D/rho_moved') # Move A/D/G/rho away from and then back to its original path - svntest.actions.run_and_verify_svn(None, None, [], 'mv', + svntest.actions.run_and_verify_svn(None, [], 'mv', rho_path, rho_move_path) - svntest.actions.run_and_verify_svn(None, None, [], 'mv', + svntest.actions.run_and_verify_svn(None, [], 'mv', rho_move_path, rho_path) # Check expected status before commit @@ -2438,9 +2422,7 @@ def move_file_back_and_forth(sbox): expected_status = svntest.actions.get_virginal_state(wc_dir, 1) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, - wc_dir) + expected_status) # Includes regression testing for issue #3429 ("svn mv A B; svn mv B A" @@ -2456,13 +2438,13 @@ def move_dir_back_and_forth(sbox): D_move_path = sbox.ospath('D_moved') # Move A/D to D_moved - svntest.actions.run_and_verify_svn(None, None, [], 'mv', + svntest.actions.run_and_verify_svn(None, [], 'mv', D_path, D_move_path) # Move the moved dir: D_moved back to its starting # location at A/D. - svntest.actions.run_and_verify_svn(None, None, [], 'mv', D_move_path, D_path) + svntest.actions.run_and_verify_svn(None, [], 'mv', D_move_path, D_path) # Verify that the status indicates a replace with history expected_status = svntest.actions.get_virginal_state(wc_dir, 1) @@ -2477,7 +2459,7 @@ def copy_move_added_paths(sbox): # Create a new file and schedule it for addition upsilon_path = sbox.ospath('A/D/upsilon') svntest.main.file_write(upsilon_path, "This is the file 'upsilon'\n") - svntest.actions.run_and_verify_svn(None, None, [], 'add', upsilon_path) + svntest.actions.run_and_verify_svn(None, [], 'add', upsilon_path) # Create a dir with children and schedule it for addition I_path = sbox.ospath('A/D/I') @@ -2490,12 +2472,12 @@ def copy_move_added_paths(sbox): svntest.main.file_write(eta_path, "This is the file 'eta'\n") svntest.main.file_write(theta_path, "This is the file 'theta'\n") svntest.main.file_write(kappa_path, "This is the file 'kappa'\n") - svntest.actions.run_and_verify_svn(None, None, [], 'add', I_path) + svntest.actions.run_and_verify_svn(None, [], 'add', I_path) # Create another dir and schedule it for addition K_path = sbox.ospath('K') os.mkdir(K_path) - svntest.actions.run_and_verify_svn(None, None, [], 'add', K_path) + svntest.actions.run_and_verify_svn(None, [], 'add', K_path) # Verify all the adds took place correctly. expected_status_after_adds = svntest.actions.get_virginal_state(wc_dir, 1) @@ -2522,30 +2504,30 @@ def copy_move_added_paths(sbox): # Copy added dir A/D/I to added dir K/I I_copy_path = os.path.join(K_path, 'I') - svntest.actions.run_and_verify_svn(None, None, [], 'cp', + svntest.actions.run_and_verify_svn(None, [], 'cp', I_path, I_copy_path) # Copy added file A/D/upsilon into added dir K upsilon_copy_path = os.path.join(K_path, 'upsilon') - svntest.actions.run_and_verify_svn(None, None, [], 'cp', + svntest.actions.run_and_verify_svn(None, [], 'cp', upsilon_path, upsilon_copy_path) # Move added file A/D/upsilon to upsilon, # then move it again to A/upsilon upsilon_move_path = sbox.ospath('upsilon') upsilon_move_path_2 = sbox.ospath('A/upsilon') - svntest.actions.run_and_verify_svn(None, None, [], 'mv', + svntest.actions.run_and_verify_svn(None, [], 'mv', upsilon_path, upsilon_move_path) - svntest.actions.run_and_verify_svn(None, None, [], 'mv', + svntest.actions.run_and_verify_svn(None, [], 'mv', upsilon_move_path, upsilon_move_path_2) # Move added dir A/D/I to A/B/I, # then move it again to A/D/H/I I_move_path = sbox.ospath('A/B/I') I_move_path_2 = sbox.ospath('A/D/H/I') - svntest.actions.run_and_verify_svn(None, None, [], 'mv', + svntest.actions.run_and_verify_svn(None, [], 'mv', I_path, I_move_path) - svntest.actions.run_and_verify_svn(None, None, [], 'mv', + svntest.actions.run_and_verify_svn(None, [], 'mv', I_move_path, I_move_path_2) # Created expected output tree for 'svn ci' @@ -2585,9 +2567,7 @@ def copy_move_added_paths(sbox): svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, - wc_dir) + expected_status) # Run_and_verify_commit() doesn't handle status of unversioned paths # so manually confirm unversioned paths got copied and moved too. @@ -2614,15 +2594,15 @@ def copy_added_paths_with_props(sbox): # Create a new file, schedule it for addition and set properties upsilon_path = sbox.ospath('A/D/upsilon') svntest.main.file_write(upsilon_path, "This is the file 'upsilon'\n") - svntest.actions.run_and_verify_svn(None, None, [], 'add', upsilon_path) - svntest.actions.run_and_verify_svn(None, None, [], 'propset', + svntest.actions.run_and_verify_svn(None, [], 'add', upsilon_path) + svntest.actions.run_and_verify_svn(None, [], 'propset', 'foo', 'bar', upsilon_path) # Create a dir and schedule it for addition and set properties I_path = sbox.ospath('A/D/I') os.mkdir(I_path) - svntest.actions.run_and_verify_svn(None, None, [], 'add', I_path) - svntest.actions.run_and_verify_svn(None, None, [], 'propset', + svntest.actions.run_and_verify_svn(None, [], 'add', I_path) + svntest.actions.run_and_verify_svn(None, [], 'propset', 'foo', 'bar', I_path) # Verify all the adds took place correctly. @@ -2644,12 +2624,12 @@ def copy_added_paths_with_props(sbox): # Copy added dir I to dir A/C I_copy_path = sbox.ospath('A/C/I') - svntest.actions.run_and_verify_svn(None, None, [], 'cp', + svntest.actions.run_and_verify_svn(None, [], 'cp', I_path, I_copy_path) # Copy added file A/upsilon into dir A/C upsilon_copy_path = sbox.ospath('A/C/upsilon') - svntest.actions.run_and_verify_svn(None, None, [], 'cp', + svntest.actions.run_and_verify_svn(None, [], 'cp', upsilon_path, upsilon_copy_path) # Created expected output tree for 'svn ci' @@ -2678,9 +2658,7 @@ def copy_added_paths_with_props(sbox): svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, - wc_dir) + expected_status) svntest.actions.verify_disk(wc_dir, expected_disk.old_tree(), True) def copy_added_paths_to_URL(sbox): @@ -2692,7 +2670,7 @@ def copy_added_paths_to_URL(sbox): # Create a new file and schedule it for addition upsilon_path = sbox.ospath('A/D/upsilon') svntest.main.file_write(upsilon_path, "This is the file 'upsilon'\n") - svntest.actions.run_and_verify_svn(None, None, [], 'add', upsilon_path) + svntest.actions.run_and_verify_svn(None, [], 'add', upsilon_path) # Create a dir with children and schedule it for addition I_path = sbox.ospath('A/D/I') @@ -2705,7 +2683,7 @@ def copy_added_paths_to_URL(sbox): svntest.main.file_write(eta_path, "This is the file 'eta'\n") svntest.main.file_write(theta_path, "This is the file 'theta'\n") svntest.main.file_write(kappa_path, "This is the file 'kappa'\n") - svntest.actions.run_and_verify_svn(None, None, [], 'add', I_path) + svntest.actions.run_and_verify_svn(None, [], 'add', I_path) # Verify all the adds took place correctly. expected_status_after_adds = svntest.actions.get_virginal_state(wc_dir, 1) @@ -2732,18 +2710,18 @@ def copy_added_paths_to_URL(sbox): # Copy added file A/D/upsilon to URL://A/C/upsilon upsilon_copy_URL = sbox.repo_url + '/A/C/upsilon' - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'cp', '-m', '', upsilon_path, upsilon_copy_URL) - # Validate the merge info of the copy destination (we expect none). - svntest.actions.run_and_verify_svn(None, [], [], + # Validate the mergeinfo of the copy destination (we expect none). + svntest.actions.run_and_verify_svn([], '.*W200017: Property.*not found', 'propget', SVN_PROP_MERGEINFO, upsilon_copy_URL) # Copy added dir A/D/I to URL://A/D/G/I I_copy_URL = sbox.repo_url + '/A/D/G/I' - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'cp', '-m', '', I_path, I_copy_URL) @@ -2770,9 +2748,7 @@ def copy_added_paths_to_URL(sbox): svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, - wc_dir) + expected_status) # Created expected output for update expected_output = svntest.wc.State(wc_dir, { @@ -2931,7 +2907,7 @@ def move_multiple_wc(sbox): C_path = sbox.ospath('A/C') # Move chi, psi, omega and E to A/C - svntest.actions.run_and_verify_svn(None, None, [], 'mv', chi_path, psi_path, + svntest.actions.run_and_verify_svn(None, [], 'mv', chi_path, psi_path, omega_path, E_path, C_path) # Create expected output @@ -2965,9 +2941,7 @@ def move_multiple_wc(sbox): svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, - wc_dir) + expected_status) #---------------------------------------------------------------------- @@ -2986,7 +2960,7 @@ def copy_multiple_wc(sbox): C_path = sbox.ospath('A/C') # Copy chi, psi, omega and E to A/C - svntest.actions.run_and_verify_svn(None, None, [], 'cp', chi_path, psi_path, + svntest.actions.run_and_verify_svn(None, [], 'cp', chi_path, psi_path, omega_path, E_path, C_path) # Create expected output @@ -3012,9 +2986,7 @@ def copy_multiple_wc(sbox): svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, - wc_dir) + expected_status) #---------------------------------------------------------------------- @@ -3034,7 +3006,7 @@ def move_multiple_repo(sbox): # Move three files and a directory in the repo to a different location # in the repo - svntest.actions.run_and_verify_svn(None, None, [], 'mv', + svntest.actions.run_and_verify_svn(None, [], 'mv', chi_url, psi_url, omega_url, E_url, C_url, '-m', 'logmsg') @@ -3100,7 +3072,7 @@ def copy_multiple_repo(sbox): # Copy three files and a directory in the repo to a different location # in the repo - svntest.actions.run_and_verify_svn(None, None, [], 'cp', + svntest.actions.run_and_verify_svn(None, [], 'cp', chi_url, psi_url, omega_url, E_url, C_url, '-m', 'logmsg') @@ -3158,12 +3130,12 @@ def copy_multiple_repo_wc(sbox): # We need this in order to check that we don't end up with URI-encoded # paths in the WC (issue #2955) - svntest.actions.run_and_verify_svn(None, None, [], 'mv', '-m', 'log_msg', + svntest.actions.run_and_verify_svn(None, [], 'mv', '-m', 'log_msg', sbox.repo_url + '/A/D/H/omega', omega_with_space_url) # Perform the copy and check the output - svntest.actions.run_and_verify_svn(None, None, [], 'cp', + svntest.actions.run_and_verify_svn(None, [], 'cp', chi_url, psi_url, omega_with_space_url, E_url, C_path) @@ -3187,8 +3159,7 @@ def copy_multiple_repo_wc(sbox): svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, wc_dir) + expected_status) #---------------------------------------------------------------------- @@ -3207,7 +3178,7 @@ def copy_multiple_wc_repo(sbox): C_url = sbox.repo_url + '/A/C' # Perform the copy and check the output - svntest.actions.run_and_verify_svn(None, None, [], 'cp', + svntest.actions.run_and_verify_svn(None, [], 'cp', chi_path, psi_path, omega_path, E_path, C_url, '-m', 'logmsg') @@ -3268,27 +3239,27 @@ def copy_peg_rev_local_files(sbox): # Play a shell game with some WC files, then commit the changes back # to the repository (making r2). - svntest.actions.run_and_verify_svn(None, None, [], 'mv', + svntest.actions.run_and_verify_svn(None, [], 'mv', psi_path, new_iota_path) - svntest.actions.run_and_verify_svn(None, None, [], 'mv', + svntest.actions.run_and_verify_svn(None, [], 'mv', iota_path, psi_path) - svntest.actions.run_and_verify_svn(None, None, [], 'mv', + svntest.actions.run_and_verify_svn(None, [], 'mv', new_iota_path, iota_path) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', 'rev 2', wc_dir) # Copy using a peg rev (remember, the object at iota_path at HEAD # was at psi_path back at r1). - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'cp', iota_path + '@HEAD', '-r', '1', sigma_path) # Commit and verify disk contents - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'ci', wc_dir, '-m', 'rev 3') @@ -3321,46 +3292,46 @@ def copy_peg_rev_local_dirs(sbox): alpha_path = os.path.join(E_path, 'alpha') # Make some changes to the repository - svntest.actions.run_and_verify_svn(None, None, [], 'rm', + svntest.actions.run_and_verify_svn(None, [], 'rm', alpha_path) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', 'rev 2', wc_dir) - svntest.actions.run_and_verify_svn(None, None, [], 'up', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'mv', E_path, I_path) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', 'rev 3', wc_dir) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'mv', G_path, E_path) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', 'rev 4', wc_dir) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'mv', I_path, G_path) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', 'rev 5', wc_dir) # Copy using a peg rev - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'cp', G_path + '@HEAD', '-r', '1', J_path) # Commit and verify disk contents - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'ci', wc_dir, '-m', 'rev 6') @@ -3406,14 +3377,14 @@ def copy_peg_rev_url(sbox): iota_text = "This is the file 'iota'.\n" # Make some changes to the repository - svntest.actions.run_and_verify_svn(None, None, [], 'mv', + svntest.actions.run_and_verify_svn(None, [], 'mv', psi_path, new_iota_path) - svntest.actions.run_and_verify_svn(None, None, [], 'mv', + svntest.actions.run_and_verify_svn(None, [], 'mv', iota_path, psi_path) - svntest.actions.run_and_verify_svn(None, None, [], 'mv', + svntest.actions.run_and_verify_svn(None, [], 'mv', new_iota_path, iota_path) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', 'rev 2', wc_dir) @@ -3421,13 +3392,13 @@ def copy_peg_rev_url(sbox): # Copy using a peg rev # Add an empty peg specifier ('@') to sigma_url when copying, to test for # issue #3651 "svn copy does not eat peg revision within copy target path". - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'cp', iota_url + '@HEAD', '-r', '1', sigma_url + '@', '-m', 'rev 3') # Validate the copy destination's mergeinfo (we expect none). - svntest.actions.run_and_verify_svn(None, [], [], + svntest.actions.run_and_verify_svn([], '.*W200017: Property.*not found', 'propget', SVN_PROP_MERGEINFO, sigma_url) # Update to HEAD and verify disk contents @@ -3465,15 +3436,15 @@ def old_dir_wc_to_wc(sbox): alpha_url = E_url + '/alpha' # delete E/alpha in r2 - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'rm', '-m', '', alpha_url) # delete E in r3 - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'rm', '-m', '', E_url) # Copy an old revision of E into a new path in the WC - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'cp', '-r1', E, E2) # Create expected output tree. @@ -3491,9 +3462,7 @@ def old_dir_wc_to_wc(sbox): # Commit the one file. svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, - wc_dir) + expected_status) #---------------------------------------------------------------------- @@ -3509,7 +3478,7 @@ def copy_make_parents_wc_wc(sbox): new_iota_path = sbox.ospath('X/Y/Z/iota') # Copy iota - svntest.actions.run_and_verify_svn(None, None, [], 'cp', '--parents', + svntest.actions.run_and_verify_svn(None, [], 'cp', '--parents', iota_path, new_iota_path) # Create expected output @@ -3533,9 +3502,7 @@ def copy_make_parents_wc_wc(sbox): svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, - wc_dir) + expected_status) #---------------------------------------------------------------------- # Test copying and creating parents from the repo to the wc @@ -3550,7 +3517,7 @@ def copy_make_parents_repo_wc(sbox): new_iota_path = sbox.ospath('X/Y/Z/iota') # Copy iota - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'cp', '--parents', iota_url, new_iota_path) @@ -3575,9 +3542,7 @@ def copy_make_parents_repo_wc(sbox): svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, - wc_dir) + expected_status) #---------------------------------------------------------------------- @@ -3593,7 +3558,7 @@ def copy_make_parents_wc_repo(sbox): new_iota_url = sbox.repo_url + '/X/Y/Z/iota' # Copy iota - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'cp', '--parents', '-m', 'log msg', iota_path, new_iota_url) @@ -3641,7 +3606,7 @@ def copy_make_parents_repo_repo(sbox): new_iota_url = sbox.repo_url + '/X/Y/Z/iota' # Copy iota - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'cp', '--parents', '-m', 'log msg', iota_url, new_iota_url) @@ -3752,7 +3717,7 @@ def URI_encoded_repos_to_wc(sbox): }) # Make a copy - svntest.actions.run_and_verify_svn(None, expected, [], + svntest.actions.run_and_verify_svn(expected, [], 'copy', sbox.repo_url + '/' + URL_rel_path, os.path.join(wc_dir, @@ -3762,8 +3727,7 @@ def URI_encoded_repos_to_wc(sbox): {dest_name : Item(verb='Adding')}) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, wc_dir) + expected_status) copy_URL_to_WC('A', 'A COPY', 2) copy_URL_to_WC('A COPY', 'A_COPY_2', 3) @@ -3784,7 +3748,6 @@ def allow_unversioned_parent_for_copy_src(sbox): # Copy the wc-in-unversioned-parent working copy to our original wc. svntest.actions.run_and_verify_svn(None, - None, [], 'cp', wc2_dir, @@ -3830,7 +3793,7 @@ def unneeded_parents(sbox): A_url = sbox.repo_url + '/A' # The --parents is unnecessary, but should still work (not segfault). - svntest.actions.run_and_verify_svn(None, None, [], 'cp', '--parents', + svntest.actions.run_and_verify_svn(None, [], 'cp', '--parents', '-m', 'log msg', iota_url, A_url) # Verify that it worked. @@ -3859,7 +3822,7 @@ def double_parents_with_url(sbox): Z_url = sbox.repo_url + '/A/B/Z' # --parents shouldn't result in a double commit of the same directory. - svntest.actions.run_and_verify_svn(None, None, [], 'cp', '--parents', + svntest.actions.run_and_verify_svn(None, [], 'cp', '--parents', '-m', 'log msg', E_url, Z_url) # Verify that it worked. @@ -3901,12 +3864,11 @@ def copy_into_missing_dir(sbox): # svn: Error processing command 'modify-entry' in '.' # svn: Error modifying entry for 'A' # svn: Entry 'A' is already under version control - svntest.actions.run_and_verify_svn(None, - None, ".*: Path '.*' is not a directory", + svntest.actions.run_and_verify_svn(None, ".*: Path '.*' is not a directory", 'cp', iota_path, A_path) # 'cleanup' should not error. - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'cleanup', wc_dir) @@ -3946,12 +3908,12 @@ def find_copyfrom_information_upstairs(sbox): A2_path = sbox.ospath('A2') B2_path = os.path.join(A2_path, 'B') - svntest.actions.run_and_verify_svn(None, None, [], 'cp', A_path, A2_path) + svntest.actions.run_and_verify_svn(None, [], 'cp', A_path, A2_path) saved_cwd = os.getcwd() try: os.chdir(B2_path) - svntest.actions.run_and_verify_svn(None, None, [], 'mkdir', 'blah') - svntest.actions.run_and_verify_svn(None, None, [], 'mv', 'lambda', 'blah') + svntest.actions.run_and_verify_svn(None, [], 'mkdir', 'blah') + svntest.actions.run_and_verify_svn(None, [], 'mv', 'lambda', 'blah') finally: os.chdir(saved_cwd) @@ -3989,9 +3951,9 @@ def path_move_and_copy_between_wcs_2475(sbox): # Do a checkout, and verify the resulting output and disk contents. svntest.actions.run_and_verify_checkout(repo_url2, - wc2_dir, - expected_output, - expected_wc) + wc2_dir, + expected_output, + expected_wc) # Copy a file from wc to wc2 mu_path = sbox.ospath('A/mu') @@ -4071,10 +4033,10 @@ def commit_copy_depth_empty(sbox): a = sbox.ospath('A') new_a = sbox.ospath('new_A') - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'cp', a, new_a) - svntest.actions.run_and_verify_svn(None, None, [], 'ci', + svntest.actions.run_and_verify_svn(None, [], 'ci', new_a, '--depth', 'empty', '-m', 'Copied directory') @@ -4089,12 +4051,12 @@ def copy_below_copy(sbox): new_A_mu = os.path.join(new_A, 'mu') new_A_new_mu = os.path.join(new_A, 'new_mu') - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'cp', A, new_A) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'cp', new_A_D, new_A_new_D) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'cp', new_A_mu, new_A_new_mu) expected_output = svntest.wc.State(sbox.wc_dir, { @@ -4139,8 +4101,7 @@ def copy_below_copy(sbox): svntest.actions.run_and_verify_commit(sbox.wc_dir, expected_output, - expected_status, - None, sbox.wc_dir) + expected_status) def move_below_move(sbox): "move a dir below a moved dir" @@ -4153,12 +4114,12 @@ def move_below_move(sbox): new_A_mu = os.path.join(new_A, 'mu') new_A_new_mu = os.path.join(new_A, 'new_mu') - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'mv', A, new_A) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'mv', new_A_D, new_A_new_D) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'mv', new_A_mu, new_A_new_mu) expected_output = svntest.wc.State(sbox.wc_dir, { @@ -4201,8 +4162,7 @@ def move_below_move(sbox): svntest.actions.run_and_verify_commit(sbox.wc_dir, expected_output, - expected_status, - None, sbox.wc_dir) + expected_status) def reverse_merge_move(sbox): @@ -4218,10 +4178,10 @@ def reverse_merge_move(sbox): # Create another working copy path and checkout. wc2_dir = sbox.add_wc_path('2') - rav_svn(None, None, [], 'co', sbox.repo_url, wc2_dir) + rav_svn(None, [], 'co', sbox.repo_url, wc2_dir) # Update working directory and ensure that we are at revision 1. - rav_svn(None, exp_noop_up_out(1), [], 'up', wc_dir) + rav_svn(exp_noop_up_out(1), [], 'up', wc_dir) # Add new folder and file, later commit new_path = os.path.join(a_dir, 'New') @@ -4229,21 +4189,21 @@ def reverse_merge_move(sbox): first_path = os.path.join(new_path, 'first') svntest.main.file_append(first_path, 'appended first text') svntest.main.run_svn(None, "add", new_path) - rav_svn(None, None, [], 'ci', wc_dir, '-m', 'Add new folder %s' % new_path) - rav_svn(None, exp_noop_up_out(2), [], 'up', wc_dir) + rav_svn(None, [], 'ci', wc_dir, '-m', 'Add new folder %s' % new_path) + rav_svn(exp_noop_up_out(2), [], 'up', wc_dir) # Reverse merge to revert previous changes and commit - rav_svn(None, None, [], 'merge', '-c', '-2', a_repo_url, a_dir) - rav_svn(None, None, [], 'ci', '-m', 'Reverting svn merge -c -2.', a_dir) - rav_svn(None, exp_noop_up_out(3), [], 'up', wc_dir) + rav_svn(None, [], 'merge', '-c', '-2', a_repo_url, a_dir) + rav_svn(None, [], 'ci', '-m', 'Reverting svn merge -c -2.', a_dir) + rav_svn(exp_noop_up_out(3), [], 'up', wc_dir) # Reverse merge again to undo last revert. - rav_svn(None, None, [], 'merge', '-c', '-3', a_repo_url, a_dir) + rav_svn(None, [], 'merge', '-c', '-3', a_repo_url, a_dir) # Move new added file to another one and commit. second_path = os.path.join(new_path, 'second') - rav_svn(None, None, [], 'move', first_path, second_path) - rav_svn(None, "Adding.*New|Adding.*first||Committed revision 4.", [], + rav_svn(None, [], 'move', first_path, second_path) + rav_svn("Adding.*New|Adding.*first||Committed revision 4.", [], 'ci', '-m', 'Revert svn merge. svn mv %s %s.' % (first_path, second_path), a_dir) @@ -4323,7 +4283,7 @@ def nonrecursive_commit_of_copy(sbox): svntest.actions.run_and_verify_commit(wc_dir, expected_output, expected_status, - None, + [], wc_dir, '--depth', 'immediates') # Regression test for issue #3474 - making a new subdir, moving files into it @@ -4342,9 +4302,9 @@ def copy_added_dir_with_copy(sbox): # Alias for svntest.actions.run_and_verify_svn rav_svn = svntest.actions.run_and_verify_svn - rav_svn(None, None, [], 'mkdir', new_dir) - rav_svn(None, None, [], 'cp', sbox.ospath('A/mu'), new_dir) - rav_svn(None, None, [], 'cp', new_dir, new_dir2) + rav_svn(None, [], 'mkdir', new_dir) + rav_svn(None, [], 'cp', sbox.ospath('A/mu'), new_dir) + rav_svn(None, [], 'cp', new_dir, new_dir2) expected_status = svntest.actions.get_virginal_state(wc_dir, 1) @@ -4359,7 +4319,7 @@ def copy_added_dir_with_copy(sbox): svntest.actions.run_and_verify_status(wc_dir, expected_status) # move of added dir also retains copy history of children - rav_svn(None, None, [], 'mv', new_dir, new_dir3) + rav_svn(None, [], 'mv', new_dir, new_dir3) expected_status.remove('NewDir', 'NewDir/mu') expected_status.add( { @@ -4386,7 +4346,7 @@ def copy_broken_symlink(sbox): sbox.simple_add_symlink('linktarget', 'new_symlink') - rav_svn(None, None, [], 'cp', new_symlink, copied_symlink) + rav_svn(None, [], 'cp', new_symlink, copied_symlink) # Check whether both new_symlink and copied_symlink are added to the # working copy @@ -4405,15 +4365,15 @@ def move_dir_containing_move(sbox): """move a directory containing moved node""" sbox.build() - svntest.actions.run_and_verify_svn(None, None, [], 'mv', + svntest.actions.run_and_verify_svn(None, [], 'mv', sbox.ospath('A/B/E/alpha'), sbox.ospath('A/B/E/alpha_moved')) - svntest.actions.run_and_verify_svn(None, None, [], 'mv', + svntest.actions.run_and_verify_svn(None, [], 'mv', sbox.ospath('A/B/F'), sbox.ospath('A/B/F_moved')) - svntest.actions.run_and_verify_svn(None, None, [], 'mv', + svntest.actions.run_and_verify_svn(None, [], 'mv', sbox.ospath('A/B'), sbox.ospath('A/B_tmp')) @@ -4444,7 +4404,7 @@ def move_dir_containing_move(sbox): svntest.actions.run_and_verify_status(sbox.wc_dir, expected_status) - svntest.actions.run_and_verify_svn(None, None, [], 'mv', + svntest.actions.run_and_verify_svn(None, [], 'mv', sbox.ospath('A/B_tmp'), sbox.ospath('A/B_moved')) expected_status.tweak('A/B', moved_to='A/B_moved') @@ -4502,8 +4462,7 @@ def move_dir_containing_move(sbox): 'A/B_moved/F_moved', moved_from=None) svntest.actions.run_and_verify_commit(sbox.wc_dir, expected_output, - expected_status, - None, sbox.wc_dir) + expected_status) def copy_dir_with_space(sbox): """copy a directory with whitespace to one without""" @@ -4511,11 +4470,11 @@ def copy_dir_with_space(sbox): sbox.build() wc_dir = sbox.wc_dir - svntest.actions.run_and_verify_svn(None, None, [], 'cp', + svntest.actions.run_and_verify_svn(None, [], 'cp', sbox.ospath('A/B/E'), sbox.ospath('E with spaces')) - svntest.actions.run_and_verify_svn(None, None, [], 'cp', + svntest.actions.run_and_verify_svn(None, [], 'cp', sbox.ospath('A/B/E/alpha'), sbox.ospath('E with spaces/al pha')) @@ -4532,15 +4491,14 @@ def copy_dir_with_space(sbox): }) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, wc_dir) + expected_status) - svntest.actions.run_and_verify_svn(None, None, [], 'cp', + svntest.actions.run_and_verify_svn(None, [], 'cp', sbox.ospath('E with spaces'), sbox.ospath('E also spaces') ) - svntest.actions.run_and_verify_svn(None, None, [], 'cp', + svntest.actions.run_and_verify_svn(None, [], 'cp', sbox.ospath('E with spaces/al pha'), sbox.ospath('E also spaces/al b') ) @@ -4558,15 +4516,14 @@ def copy_dir_with_space(sbox): }) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, wc_dir) + expected_status) - svntest.actions.run_and_verify_svn(None, None, [], 'mv', + svntest.actions.run_and_verify_svn(None, [], 'mv', sbox.ospath('E with spaces'), sbox.ospath('E new spaces') ) - svntest.actions.run_and_verify_svn(None, None, [], 'mv', + svntest.actions.run_and_verify_svn(None, [], 'mv', sbox.ospath('E new spaces/al pha'), sbox.ospath('E also spaces/al c') ) @@ -4591,8 +4548,7 @@ def copy_dir_with_space(sbox): }) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, wc_dir) + expected_status) # Regression test for issue #3676 @Issue(3676) @@ -4606,13 +4562,13 @@ def changed_data_should_match_checkout(sbox): verify_dir = sbox.add_wc_path('verify') - svntest.actions.run_and_verify_svn(None, None, [], 'copy', A_B_E, E_new) + svntest.actions.run_and_verify_svn(None, [], 'copy', A_B_E, E_new) sbox.simple_commit() - svntest.actions.run_and_verify_svn(None, None, [], 'up', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir) - svntest.actions.run_and_verify_svn(None, None, [], 'co', sbox.repo_url, verify_dir) + svntest.actions.run_and_verify_svn(None, [], 'co', sbox.repo_url, verify_dir) was_cwd = os.getcwd() os.chdir(verify_dir) @@ -4622,7 +4578,7 @@ def changed_data_should_match_checkout(sbox): os.chdir(was_cwd) os.chdir(wc_dir) verify_out = svntest.verify.UnorderedOutput(verify_out) - svntest.actions.run_and_verify_svn(None, verify_out, [], 'status', '-v') + svntest.actions.run_and_verify_svn(verify_out, [], 'status', '-v') os.chdir(was_cwd) # Regression test for issue #3676 for copies including directories @@ -4637,13 +4593,13 @@ def changed_dir_data_should_match_checkout(sbox): verify_dir = sbox.add_wc_path('verify') - svntest.actions.run_and_verify_svn(None, None, [], 'copy', A_B, B_new) + svntest.actions.run_and_verify_svn(None, [], 'copy', A_B, B_new) sbox.simple_commit() - svntest.actions.run_and_verify_svn(None, None, [], 'up', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir) - svntest.actions.run_and_verify_svn(None, None, [], 'co', sbox.repo_url, verify_dir) + svntest.actions.run_and_verify_svn(None, [], 'co', sbox.repo_url, verify_dir) was_cwd = os.getcwd() os.chdir(verify_dir) @@ -4666,7 +4622,7 @@ def move_added_nodes(sbox): sbox.build(read_only=True) - svntest.actions.run_and_verify_svn(None, None, [], 'mkdir', + svntest.actions.run_and_verify_svn(None, [], 'mkdir', sbox.ospath('X'), sbox.ospath('X/Y')) @@ -4677,21 +4633,21 @@ def move_added_nodes(sbox): }) svntest.actions.run_and_verify_status(sbox.wc_dir, expected_status) - svntest.actions.run_and_verify_svn(None, None, [], 'mv', + svntest.actions.run_and_verify_svn(None, [], 'mv', sbox.ospath('X/Y'), sbox.ospath('X/Z')) expected_status.remove('X/Y') expected_status.add({'X/Z' : Item(status='A ', wc_rev='0')}) svntest.actions.run_and_verify_status(sbox.wc_dir, expected_status) - svntest.actions.run_and_verify_svn(None, None, [], 'mv', + svntest.actions.run_and_verify_svn(None, [], 'mv', sbox.ospath('X/Z'), sbox.ospath('Z')) expected_status.remove('X/Z') expected_status.add({'Z' : Item(status='A ', wc_rev='0')}) svntest.actions.run_and_verify_status(sbox.wc_dir, expected_status) - svntest.actions.run_and_verify_svn(None, None, [], 'mv', + svntest.actions.run_and_verify_svn(None, [], 'mv', sbox.ospath('Z'), sbox.ospath('X/Z')) expected_status.remove('Z') @@ -4724,9 +4680,7 @@ def mixed_rev_copy_del(sbox): expected_status.remove('A/B/E/alpha') svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, - wc_dir) + expected_status) # Update to r2, then update A/B/E/alpha and A/B/E/beta to r1 svntest.main.run_svn(None, 'up', wc_dir) @@ -4743,8 +4697,7 @@ def mixed_rev_copy_del(sbox): expected_status.tweak('A/B/E/beta', wc_rev=1) svntest.actions.run_and_verify_update(wc_dir, expected_output, None, - expected_status, [], - None, None, None, None, None, + expected_status, [], False, '-r1', sbox.ospath('A/B/E/alpha'), sbox.ospath('A/B/E/beta')) @@ -4752,7 +4705,7 @@ def mixed_rev_copy_del(sbox): svntest.actions.run_and_verify_status(wc_dir, expected_status) # Copy A/B/E to A/B/E_copy - svntest.actions.run_and_verify_svn(None, None, [], 'cp', + svntest.actions.run_and_verify_svn(None, [], 'cp', sbox.ospath('A/B/E'), sbox.ospath('A/B/E_copy')) expected_status.add({ @@ -4781,9 +4734,7 @@ def mixed_rev_copy_del(sbox): expected_status.remove('A/B/E_copy/alpha', 'A/B/E_copy/beta') svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, - wc_dir) + expected_status) def copy_delete_undo(sbox, use_revert): "copy, delete child, undo" @@ -4963,12 +4914,12 @@ def move_wc_and_repo_dir_to_itself(sbox): repo_url = sbox.repo_url + '/A' # try to move wc dir to itself - svntest.actions.run_and_verify_svn(None, [], + svntest.actions.run_and_verify_svn([], '.*Cannot move path.* into itself.*', 'move', wc_dir, wc_dir) # try to move repo dir to itself - svntest.actions.run_and_verify_svn(None, [], + svntest.actions.run_and_verify_svn([], '.*Cannot move URL.* into itself.*', 'move', repo_url, repo_url) @@ -5058,7 +5009,7 @@ def copy_wc_url_with_absent(sbox): expected_output, None, None, - None, None, None, None, None, False, + [], False, wc_dir, '--set-depth', 'infinity') # Except for A/no, the 3 directories should now have the same children @@ -5102,10 +5053,10 @@ def copy_url_shortcut(sbox): wc_dir = sbox.wc_dir # Can't use ^/A/D/G shortcut here because wc/X is unversioned. - svntest.actions.run_and_verify_svn(None, None, [], 'copy', + svntest.actions.run_and_verify_svn(None, [], 'copy', sbox.ospath('A/D/G'), sbox.ospath('X')) - svntest.actions.run_and_verify_svn(None, None, [], 'rm', + svntest.actions.run_and_verify_svn(None, [], 'rm', sbox.ospath('X/pi')) expected_status = svntest.actions.get_virginal_state(wc_dir, 1) @@ -5118,7 +5069,7 @@ def copy_url_shortcut(sbox): svntest.actions.run_and_verify_status(wc_dir, expected_status) # Can use ^/A/D/G even though X/pi is a delete within a copy. - svntest.actions.run_and_verify_svn(None, None, [], 'copy', + svntest.actions.run_and_verify_svn(None, [], 'copy', '^/A/D/G/pi', sbox.ospath('X/pi')) expected_status = svntest.actions.get_virginal_state(wc_dir, 1) @@ -5181,12 +5132,12 @@ def copy_base_of_deleted(sbox): sbox.build(read_only = True) wc_dir = sbox.wc_dir - svntest.actions.run_and_verify_svn(None, None, [], 'rm', sbox.ospath('A/mu')) + svntest.actions.run_and_verify_svn(None, [], 'rm', sbox.ospath('A/mu')) expected_status = svntest.actions.get_virginal_state(wc_dir, 1) expected_status.tweak('A/mu', status='D ') svntest.actions.run_and_verify_status(wc_dir, expected_status) - svntest.actions.run_and_verify_svn(None, None, [], 'cp', '-rBASE', + svntest.actions.run_and_verify_svn(None, [], 'cp', '-rBASE', sbox.ospath('A/mu'), sbox.ospath('A/mu2')) expected_status.add({ 'A/mu2' : Item(status='A ', copied='+', wc_rev='-'), @@ -5433,6 +5384,13 @@ def copy_and_move_conflicts(sbox): 'D/G/pi', 'D/G/rho', 'D/G/tau') + expected_status.tweak('B', moved_from='../A/B') + expected_status.tweak('D', moved_from='../A/D') + expected_status.tweak('H', moved_from='D/H') + expected_status.tweak('Q', moved_from='../A/Q') + expected_status.tweak('D/H', moved_to='H') + expected_status.tweak('alpha', moved_from='B/E/alpha') + expected_status.tweak('B/E/alpha', moved_to='alpha') svntest.actions.run_and_verify_status(wc('move-dest'), expected_status) expected_disk = svntest.wc.State('', { @@ -5467,14 +5425,14 @@ def copy_deleted_dir(sbox): # E155035 - SVN_ERR_WC_PATH_UNEXPECTED_STATUS # E155010 - SVN_ERR_WC_PATH_NOT_FOUND - svntest.actions.run_and_verify_svn(None, None, + svntest.actions.run_and_verify_svn(None, 'svn: (E145000|E155035|E155010): ' + '(Path \'.*iota\' does not exist)|' + '(Deleted node .*iota\' copied)', 'cp', sbox.ospath('iota'), sbox.ospath('new_iota')) - svntest.actions.run_and_verify_svn(None, None, + svntest.actions.run_and_verify_svn(None, 'svn: (E145000|E155035|E155010): ' + '(Path \'.*D\' does not exist)|' + '(Deleted node .*D\' copied)', @@ -5486,13 +5444,13 @@ def copy_deleted_dir(sbox): os.mkdir(sbox.ospath('A/D')) # At one time these two invocations raised an assertion. - svntest.actions.run_and_verify_svn(None, None, + svntest.actions.run_and_verify_svn(None, 'svn: (E155035|E155010): ' + '(Path \'.*iota\' does not exist)|' + '(Deleted node.* .*iota\' can\'t be.*)', 'cp', sbox.ospath('iota'), sbox.ospath('new_iota')) - svntest.actions.run_and_verify_svn(None, None, + svntest.actions.run_and_verify_svn(None, 'svn: (E155035|E155010): ' + '(Path \'.*D\' does not exist)|' + '(Deleted node.* .*D\' can\'t be.*)', @@ -5509,11 +5467,11 @@ def commit_copied_half_of_move(sbox): D_path = sbox.ospath('A/D') # iota -> A/D/iota; verify we cannot commit just A/D/iota - svntest.actions.run_and_verify_svn(None, None, [], 'mv', iota_path, D_path) + svntest.actions.run_and_verify_svn(None, [], 'mv', iota_path, D_path) expected_error = "svn: E200009: Cannot commit '.*%s' because it was " \ "moved from '.*%s'" % (re.escape(sbox.ospath('A/D/iota')), re.escape(iota_path)) - svntest.actions.run_and_verify_svn(None, None, expected_error, + svntest.actions.run_and_verify_svn(None, expected_error, 'commit', '-m', 'foo', os.path.join(D_path, 'iota')) @@ -5521,28 +5479,28 @@ def commit_copied_half_of_move(sbox): expected_error = "svn: E200009: Cannot commit '.*%s' because it was " \ "moved from '.*%s'" % (re.escape(sbox.ospath('A/D/iota')), re.escape(iota_path)) - svntest.actions.run_and_verify_svn(None, None, expected_error, + svntest.actions.run_and_verify_svn(None, expected_error, 'commit', '-m', 'foo', D_path) # A/D -> A/C/D; verify we cannot commit just A/C C_path = sbox.ospath('A/C') - svntest.actions.run_and_verify_svn(None, None, [], 'mv', D_path, C_path) + svntest.actions.run_and_verify_svn(None, [], 'mv', D_path, C_path) expected_error = "svn: E200009: Cannot commit '.*%s' because it was moved " \ "from '.*%s'" % (re.escape(os.path.join(C_path, "D")), re.escape(D_path)) - svntest.actions.run_and_verify_svn(None, None, expected_error, + svntest.actions.run_and_verify_svn(None, expected_error, 'commit', '-m', 'foo', C_path) # A/C/D/iota -> A/iota; verify that iota's moved-from hasn't changed D_iota_path = sbox.ospath('A/C/D/iota') A_iota_path = sbox.ospath('A/iota') - svntest.actions.run_and_verify_svn(None, None, [], 'mv', D_iota_path, + svntest.actions.run_and_verify_svn(None, [], 'mv', D_iota_path, A_iota_path) expected_error = "svn: E200009: Cannot commit '.*%s' because it was " \ "moved from '.*%s'" % (re.escape(A_iota_path), re.escape(iota_path)) - svntest.actions.run_and_verify_svn(None, None, expected_error, + svntest.actions.run_and_verify_svn(None, expected_error, 'commit', '-m', 'foo', A_iota_path) @@ -5557,21 +5515,21 @@ def commit_deleted_half_of_move(sbox): D_path = sbox.ospath('A/D') # iota -> A/D/iota; verify we cannot commit just iota - svntest.actions.run_and_verify_svn(None, None, [], 'mv', iota_path, D_path) + svntest.actions.run_and_verify_svn(None, [], 'mv', iota_path, D_path) expected_error = "svn: E200009: Cannot commit '.*%s' because it was moved " \ "to '.*%s'" % (re.escape(iota_path), re.escape(os.path.join(D_path, "iota"))) - svntest.actions.run_and_verify_svn(None, None, expected_error, + svntest.actions.run_and_verify_svn(None, expected_error, 'commit', '-m', 'foo', iota_path) # A/D -> C; verify we cannot commit just A C_path = sbox.ospath('C') - svntest.actions.run_and_verify_svn(None, None, [], 'mv', D_path, C_path) + svntest.actions.run_and_verify_svn(None, [], 'mv', D_path, C_path) expected_error = "svn: E200009: Cannot commit '.*%s' because it was moved " \ "to '.*%s'" % (re.escape(D_path), re.escape(C_path)) - svntest.actions.run_and_verify_svn(None, None, expected_error, + svntest.actions.run_and_verify_svn(None, expected_error, 'commit', '-m', 'foo', A_path) @Issue(4026) @@ -5594,7 +5552,7 @@ def wc_wc_copy_incomplete(sbox): svntest.actions.set_incomplete(sbox.ospath('A/B/F'), 2) # Copy fails with no changes to wc - svntest.actions.run_and_verify_svn(None, None, + svntest.actions.run_and_verify_svn(None, 'svn: E155035: Cannot handle status', 'copy', sbox.ospath('A/B/E'), @@ -5605,7 +5563,7 @@ def wc_wc_copy_incomplete(sbox): svntest.actions.run_and_verify_status(wc_dir, expected_status) # Copy fails part way through - svntest.actions.run_and_verify_svn(None, None, + svntest.actions.run_and_verify_svn(None, 'svn: E155035: Cannot handle status', 'copy', sbox.ospath('A/B'), @@ -5632,8 +5590,7 @@ def wc_wc_copy_incomplete(sbox): expected_status.remove('A/B2/E', 'A/B2/F') svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, wc_dir) + expected_status) expected_status.add({ 'A/B2/E' : Item(status='! ', wc_rev=3), 'A/B2/F' : Item(status='! ', wc_rev=3), @@ -5662,13 +5619,13 @@ def three_nested_moves(sbox): sbox.build() wc_dir = sbox.wc_dir - svntest.actions.run_and_verify_svn(None, None, [], 'mv', + svntest.actions.run_and_verify_svn(None, [], 'mv', sbox.ospath('A/B'), sbox.ospath('A/B2')) - svntest.actions.run_and_verify_svn(None, None, [], 'mv', + svntest.actions.run_and_verify_svn(None, [], 'mv', sbox.ospath('A/B2/E'), sbox.ospath('A/B2/E2')) - svntest.actions.run_and_verify_svn(None, None, [], 'mv', + svntest.actions.run_and_verify_svn(None, [], 'mv', sbox.ospath('A/B2/E2/alpha'), sbox.ospath('A/B2/E2/alpha2')) @@ -5694,8 +5651,7 @@ def three_nested_moves(sbox): svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, wc_dir) + expected_status) def copy_to_unversioned_parent(sbox): "copy to unversioned parent" @@ -5703,14 +5659,14 @@ def copy_to_unversioned_parent(sbox): sbox.build() # This succeeds - #svntest.actions.run_and_verify_svn(None, None, [], 'cp', '--parents', + #svntest.actions.run_and_verify_svn(None, [], 'cp', '--parents', # sbox.ospath('A/B'), # sbox.ospath('New/B2')) # And this currently fails with The node '.*Unversioned' was not found, # while it should succeed or returns some error that a GUI client can use. os.mkdir(sbox.ospath('Unversioned')) - svntest.actions.run_and_verify_svn(None, None, [], 'cp', '--parents', + svntest.actions.run_and_verify_svn(None, [], 'cp', '--parents', sbox.ospath('A/B'), sbox.ospath('Unversioned/B2')) @@ -5728,7 +5684,7 @@ def copy_text_conflict(sbox): sbox.simple_update(revision='1') - svntest.actions.run_and_verify_svn(None, None, [], 'cp', + svntest.actions.run_and_verify_svn(None, [], 'cp', sbox.ospath('A'), sbox.ospath('A_copied')) @@ -5769,19 +5725,19 @@ def copy_over_excluded(sbox): sbox.build(read_only = True) wc_dir = sbox.wc_dir - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'update', '--set-depth', 'exclude', sbox.ospath('A/D')) expected_error = "svn: E155000: Path '.*D' exists.*excluded.*" - svntest.actions.run_and_verify_svn(None, None, expected_error, + svntest.actions.run_and_verify_svn(None, expected_error, 'cp', sbox.repo_url + '/A/C', sbox.ospath('A/D')) expected_error = "svn: E155000: Path '.*D' exists.*excluded.*" - svntest.actions.run_and_verify_svn(None, None, expected_error, + svntest.actions.run_and_verify_svn(None, expected_error, 'cp', sbox.ospath('A/C'), sbox.ospath('A/D')) @@ -5792,25 +5748,18 @@ def copy_relocate(sbox): sbox.build() wc_dir = sbox.wc_dir - tmp_dir = sbox.add_wc_path('relocated') + tmp_dir, url = sbox.add_repo_path('relocated') shutil.copytree(sbox.repo_dir, tmp_dir) - url = 'file://' - - if sys.platform == 'win32': - url += '/' - - url += os.path.abspath(tmp_dir).replace(os.path.sep, '/') - - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'relocate', url, wc_dir) copiedpath = sbox.ospath('AA') - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'cp', url + '/A', copiedpath) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'info', copiedpath) def ext_wc_copy_deleted(sbox): @@ -5824,14 +5773,14 @@ def ext_wc_copy_deleted(sbox): sbox.simple_rm('A/B') sbox.simple_commit() - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'up', '--set-depth', 'exclude', sbox.ospath('A/D')) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'co', sbox.repo_url, wc2_dir, '-r', 1) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'cp', sbox.path('A'), os.path.join(wc2_dir,'AA')) expected_output = expected_output = svntest.wc.State(wc2_dir, { @@ -5840,8 +5789,7 @@ def ext_wc_copy_deleted(sbox): }) svntest.actions.run_and_verify_commit(wc2_dir, - expected_output, None, None, - wc2_dir) + expected_output, None) def copy_subtree_deleted(sbox): "copy to-be-deleted subtree" @@ -5860,11 +5808,11 @@ def copy_subtree_deleted(sbox): 'AA/B' : Item(verb='Deleting'), }) svntest.actions.run_and_verify_commit(wc_dir, - expected_output, None, None, + expected_output, None, [], sbox.ospath('AA')) # Commit copy between working copies - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'cp', sbox.path('A'), os.path.join(wc2_dir,'AA2')) expected_output = expected_output = svntest.wc.State(wc2_dir, { @@ -5872,9 +5820,19 @@ def copy_subtree_deleted(sbox): 'AA2/B' : Item(verb='Deleting'), }) svntest.actions.run_and_verify_commit(wc2_dir, - expected_output, None, None, - wc2_dir) + expected_output, None) + +def resurrect_at_root(sbox): + "resurrect directory at root" + + sbox.build(create_wc=False) + + svntest.actions.run_and_verify_svn(None, [], 'rm', sbox.repo_url + '/A', + '-m', '') + svntest.actions.run_and_verify_svn(None, [], 'cp', + sbox.repo_url + '/A/D/H@1', + sbox.repo_url + '/A', '-m', '') ######################################################################## # Run the tests @@ -5996,6 +5954,7 @@ test_list = [ None, copy_relocate, ext_wc_copy_deleted, copy_subtree_deleted, + resurrect_at_root, ] if __name__ == '__main__': diff --git a/subversion/tests/cmdline/dav-mirror-autocheck.sh b/subversion/tests/cmdline/dav-mirror-autocheck.sh index 42afa28..298a8ba 100755 --- a/subversion/tests/cmdline/dav-mirror-autocheck.sh +++ b/subversion/tests/cmdline/dav-mirror-autocheck.sh @@ -23,7 +23,7 @@ # to the slave. The test should be able to throw all kinds # of svn operations at one or the other, and master/slave # verified as identical in the end. -# +# # Master / slave setup is achieved in a single httpd process # using virtual hosts bound to different addresses on the # loopback network (127.0.0.1, 127.0.0.2) for slave and @@ -34,7 +34,7 @@ # http://subversion.tigris.org/issues/show_bug.cgi?id=2939 # But of course, any svn traffic liable to break over # mirroring would be a good addition. -# +# # Most of the httpd setup was lifted from davautocheck.sh. # The common boilerplate snippets to setup/start/stop httpd # between the two could be factored out and shared. @@ -95,7 +95,7 @@ function get_prog_name() { return 1 } -# splat out httpd config +# splat out httpd config function setup_config() { say "setting up config: " $1 @@ -152,7 +152,7 @@ CustomLog "${HTTPD_ROOT}/ops" "%t %u %{SVN-REPOS-NAME}e %{SVN-ACTION}e ServerName ${SLAVE_HOST} CustomLog "${HTTPD_ROOT}/slave_access_log" common ErrorLog "${HTTPD_ROOT}/slave_error_log" -# slave 'normal' location +# slave 'normal' location <Location "/${SLAVE_LOCATION}"> DAV svn SVNPath "${SLAVE_REPOS}" @@ -196,7 +196,7 @@ function usage() { echo echo " " '<test-work-directory>' must not exist, \ I will not clobber it for you 1>&2 - exit 1 + exit 1 } ### Start execution here ### @@ -403,7 +403,7 @@ $SVNSYNC initialize --non-interactive "$SYNC_URL" "$MASTER_URL" \ # # reproducible test case from: # http://subversion.tigris.org/issues/show_bug.cgi?id=2939 -# +# BASE_URL="$SLAVE_URL" say running svnmucc test to $BASE_URL svnmucc="$SVNMUCC --non-interactive --username jrandom --password rayjandom -mm" @@ -492,7 +492,7 @@ say "Some house-keeping..." say "Re-activating the post-commit hook on the master repo: $MASTER_REPOS." mv "$MASTER_REPOS/hooks/post-commit_" "$MASTER_REPOS/hooks/post-commit" say "Syncing slave with master." -$SVNSYNC --non-interactive sync "$SYNC_URL" --username=svnsync --password=svnsync +$SVNSYNC --non-interactive sync "$SYNC_URL" --username=svnsync --password=svnsync # shut it down echo -n "${SCRIPT}: stopping httpd: " $HTTPD -f $HTTPD_CONFIG -k stop diff --git a/subversion/tests/cmdline/davautocheck.sh b/subversion/tests/cmdline/davautocheck.sh index 21fe418..31b2057 100755 --- a/subversion/tests/cmdline/davautocheck.sh +++ b/subversion/tests/cmdline/davautocheck.sh @@ -25,7 +25,7 @@ # This script simplifies preparation of environment for Subversion client # communicating with a server via DAV protocol. The prerequisites of such # testing are: -# - Subversion built using --enable-shared --enable-dso --with-apxs options, +# - Subversion built using --enable-shared --with-apxs options, # - Working Apache 2 HTTPD Server with the apxs program reachable through # PATH or specified via the APXS Makefile variable or environment variable, # - Modules dav_module and log_config_module compiled as DSO or built into @@ -62,7 +62,7 @@ # one version's client against another version's server) specify both APXS # *and* MODULE_PATH for the other server: # -# APXS=/opt/svn/1.4.x/bin/apxs MODULE_PATH=/opt/svn/1.4.x/modules \ +# APXS=/opt/svn/1.4.x/bin/apxs MODULE_PATH=/opt/svn/1.4.x/modules \ # subversion/tests/cmdline/davautocheck.sh # # To prevent the server from advertising httpv2, pass USE_HTTPV1 in @@ -71,7 +71,7 @@ # To enable "SVNCacheRevProps on" set CACHE_REVPROPS in the environment. # # To test over https set USE_SSL in the environment. -# +# # To use value for "SVNPathAuthz" directive set SVN_PATH_AUTHZ with # appropriate value in the environment. # @@ -79,12 +79,14 @@ # environment. # # Passing --no-tests as argv[1] will have the script start a server -# but not run any tests. +# but not run any tests. Passing --gdb will do the same, and in addition +# spawn gdb in the foreground attached to the running server. PYTHON=${PYTHON:-python} SCRIPTDIR=$(dirname $0) SCRIPT=$(basename $0) +STOPSCRIPT=$SCRIPTDIR/.$SCRIPT.stop trap stop_httpd_and_die HUP TERM INT @@ -112,14 +114,18 @@ query() { if [ -n "$BASH_VERSION" ]; then read -n 1 -t 32 else - # - prog=$(cat) <<'EOF' + # + prog=" import select as s import sys +import tty, termios +tty.setcbreak(sys.stdin.fileno(), termios.TCSANOW) if s.select([sys.stdin.fileno()], [], [], 32)[0]: sys.stdout.write(sys.stdin.read(1)) -EOF - REPLY=`stty cbreak; $PYTHON -c "$prog" "$@"; stty -cbreak` +" + stty_state=`stty -g` + REPLY=`$PYTHON -u -c "$prog" "$@"` + stty $stty_state fi echo [ "${REPLY:-$2}" = 'y' ] @@ -136,7 +142,7 @@ get_loadmodule_config() { fi # maybe it's built-in? - "$HTTPD" -l | grep -q "$1\\.c" && return + "$HTTPD" -l | grep "$1\\.c" >/dev/null && return return 1 } @@ -157,6 +163,7 @@ get_prog_name() { } # Don't assume sbin is in the PATH. +# ### Presumably this is used to locate /usr/sbin/apxs or /usr/sbin/apache2 PATH="$PATH:/usr/sbin:/usr/local/sbin" # Find the source and build directories. The build dir can be found if it is @@ -217,15 +224,19 @@ fi if [ ${MODULE_PATH:+set} ]; then MOD_DAV_SVN="$MODULE_PATH/mod_dav_svn.so" MOD_AUTHZ_SVN="$MODULE_PATH/mod_authz_svn.so" + MOD_DONTDOTHAT="$MODULE_PATH/mod_dontdothat.so" else MOD_DAV_SVN="$ABS_BUILDDIR/subversion/mod_dav_svn/.libs/mod_dav_svn.so" MOD_AUTHZ_SVN="$ABS_BUILDDIR/subversion/mod_authz_svn/.libs/mod_authz_svn.so" + MOD_DONTDOTHAT="$ABS_BUILDDIR/tools/server-side/mod_dontdothat/.libs/mod_dontdothat.so" fi [ -r "$MOD_DAV_SVN" ] \ - || fail "dav_svn_module not found, please use '--enable-shared --enable-dso --with-apxs' with your 'configure' script" + || fail "dav_svn_module not found, please use '--enable-shared --with-apxs' with your 'configure' script" [ -r "$MOD_AUTHZ_SVN" ] \ - || fail "authz_svn_module not found, please use '--enable-shared --enable-dso --with-apxs' with your 'configure' script" + || fail "authz_svn_module not found, please use '--enable-shared --with-apxs' with your 'configure' script" +[ -r "$MOD_DONTDOTHAT" ] \ + || fail "dontdothat_module not found, please use '--enable-shared --with-apxs' with your 'configure' script" for d in "$ABS_BUILDDIR"/subversion/*/.libs; do if [ -z "$BUILDDIR_LIBRARY_PATH" ]; then @@ -237,12 +248,10 @@ done case "`uname`" in Darwin*) - LDD='otool -L' DYLD_LIBRARY_PATH="$BUILDDIR_LIBRARY_PATH:$DYLD_LIBRARY_PATH" export DYLD_LIBRARY_PATH ;; *) - LDD='ldd' LD_LIBRARY_PATH="$BUILDDIR_LIBRARY_PATH:$LD_LIBRARY_PATH" export LD_LIBRARY_PATH ;; @@ -255,8 +264,6 @@ HTTPD=$(get_prog_name $httpd) || fail "HTTPD '$HTTPD' not found" "$HTTPD" -v 1>/dev/null 2>&1 \ || fail "HTTPD '$HTTPD' doesn't start properly" -say "Using '$HTTPD'..." - HTPASSWD=$(get_prog_name htpasswd htpasswd2) \ || fail "Could not find htpasswd or htpasswd2" [ -x $HTPASSWD ] \ @@ -289,8 +296,6 @@ LOAD_MOD_AUTHN_CORE="$(get_loadmodule_config mod_authn_core)" \ || fail "Authn_Core module not found." LOAD_MOD_AUTHZ_CORE="$(get_loadmodule_config mod_authz_core)" \ || fail "Authz_Core module not found." -LOAD_MOD_AUTHZ_HOST="$(get_loadmodule_config mod_authz_host)" \ - || fail "Authz_Host module not found." LOAD_MOD_UNIXD=$(get_loadmodule_config mod_unixd) \ || fail "UnixD module not found" } @@ -298,6 +303,10 @@ LOAD_MOD_AUTHN_FILE="$(get_loadmodule_config mod_authn_file)" \ || fail "Authn_File module not found." LOAD_MOD_AUTHZ_USER="$(get_loadmodule_config mod_authz_user)" \ || fail "Authz_User module not found." +LOAD_MOD_AUTHZ_GROUPFILE="$(get_loadmodule_config mod_authz_groupfile)" \ + || fail "Authz_GroupFile module not found." +LOAD_MOD_AUTHZ_HOST="$(get_loadmodule_config mod_authz_host)" \ + || fail "Authz_Host module not found." } if [ ${APACHE_MPM:+set} ]; then LOAD_MOD_MPM=$(get_loadmodule_config mod_mpm_$APACHE_MPM) \ @@ -308,17 +317,16 @@ if [ ${USE_SSL:+set} ]; then || fail "SSL module not found" fi -random_port() { - if [ -n "$BASH_VERSION" ]; then - echo $(($RANDOM+1024)) - else - $PYTHON -c 'import random; print random.randint(1024, 2**16-1)' - fi -} +# Stop any previous instances, os we can re-use the port. +if [ -x $STOPSCRIPT ]; then $STOPSCRIPT ; sleep 1; fi -HTTPD_PORT=$(random_port) -while netstat -an | grep $HTTPD_PORT | grep 'LISTEN'; do - HTTPD_PORT=$(random_port) +HTTPD_PORT=3691 +while netstat -an | grep $HTTPD_PORT | grep 'LISTEN' >/dev/null; do + HTTPD_PORT=$(( HTTPD_PORT + 1 )) + if [ $HTTPD_PORT -eq 65536 ]; then + # Most likely the loop condition is true regardless of $HTTPD_PORT + fail "netstat claims you have no free ports for httpd to listen on." + fi done HTTPD_ROOT="$ABS_BUILDDIR/subversion/tests/cmdline/httpd-$(date '+%Y%m%d-%H%M%S')" HTTPD_CFG="$HTTPD_ROOT/cfg" @@ -326,8 +334,16 @@ HTTPD_PID="$HTTPD_ROOT/pid" HTTPD_ACCESS_LOG="$HTTPD_ROOT/access_log" HTTPD_ERROR_LOG="$HTTPD_ROOT/error_log" HTTPD_MIME_TYPES="$HTTPD_ROOT/mime.types" -BASE_URL="http://localhost:$HTTPD_PORT" +HTTPD_DONTDOTHAT="$HTTPD_ROOT/dontdothat" +if [ -z "$BASE_URL" ]; then + BASE_URL="http://localhost:$HTTPD_PORT" +else + # Specify the public name of the host when using a proxy on another host, the + # port number will be appended. + BASE_URL="$BASE_URL:$HTTPD_PORT" +fi HTTPD_USERS="$HTTPD_ROOT/users" +HTTPD_GROUPS="$HTTPD_ROOT/groups" mkdir "$HTTPD_ROOT" \ || fail "couldn't create temporary directory '$HTTPD_ROOT'" @@ -388,9 +404,24 @@ fi say "Adding users for lock authentication" $HTPASSWD -bc $HTTPD_USERS jrandom rayjandom $HTPASSWD -b $HTTPD_USERS jconstant rayjandom +$HTPASSWD -b $HTTPD_USERS __dumpster__ __loadster__ +$HTPASSWD -b $HTTPD_USERS JRANDOM rayjandom +$HTPASSWD -b $HTTPD_USERS JCONSTANT rayjandom + +say "Adding groups for mod_authz_svn tests" +cat > "$HTTPD_GROUPS" <<__EOF__ +random: jrandom +constant: jconstant +__EOF__ touch $HTTPD_MIME_TYPES +cat > "$HTTPD_DONTDOTHAT" <<__EOF__ +[recursive-actions] +/ = deny + +__EOF__ + cat > "$HTTPD_CFG" <<__EOF__ $LOAD_MOD_MPM $LOAD_MOD_SSL @@ -405,8 +436,11 @@ $LOAD_MOD_AUTHN_CORE $LOAD_MOD_AUTHN_FILE $LOAD_MOD_AUTHZ_CORE $LOAD_MOD_AUTHZ_USER +$LOAD_MOD_AUTHZ_GROUPFILE $LOAD_MOD_AUTHZ_HOST +$LOAD_MOD_ACCESS_COMPAT LoadModule authz_svn_module "$MOD_AUTHZ_SVN" +LoadModule dontdothat_module "$MOD_DONTDOTHAT" __EOF__ @@ -443,10 +477,10 @@ cat >> "$HTTPD_CFG" <<__EOF__ Listen $HTTPD_PORT ServerName localhost PidFile "$HTTPD_PID" -LogFormat "%h %l %u %t \"%r\" %>s %b" common +LogFormat "%h %l %u %t \"%r\" %>s %b \"%f\"" common CustomLog "$HTTPD_ACCESS_LOG" common ErrorLog "$HTTPD_ERROR_LOG" -LogLevel Debug +LogLevel debug ServerRoot "$HTTPD_ROOT" DocumentRoot "$HTTPD_ROOT" ScoreBoardFile "$HTTPD_ROOT/run" @@ -486,6 +520,19 @@ CustomLog "$HTTPD_ROOT/ops" "%t %u %{SVN-REPOS-NAME}e %{SVN-ACTION}e" SVNCacheRevProps ${CACHE_REVPROPS_SETTING} ${SVN_PATH_AUTHZ_LINE} </Location> +<Location /ddt-test-work/repositories> + DAV svn + SVNParentPath "$ABS_BUILDDIR/subversion/tests/cmdline/svn-test-work/repositories" + AuthzSVNAccessFile "$ABS_BUILDDIR/subversion/tests/cmdline/svn-test-work/authz" + AuthType Basic + AuthName "Subversion Repository" + AuthUserFile $HTTPD_USERS + Require valid-user + SVNAdvertiseV2Protocol ${ADVERTISE_V2_PROTOCOL} + SVNCacheRevProps ${CACHE_REVPROPS_SETTING} + ${SVN_PATH_AUTHZ_LINE} + DontDoThatConfigFile "$HTTPD_DONTDOTHAT" +</Location> <Location /svn-test-work/local_tmp/repos> DAV svn SVNPath "$ABS_BUILDDIR/subversion/tests/cmdline/svn-test-work/local_tmp/repos" @@ -497,11 +544,180 @@ CustomLog "$HTTPD_ROOT/ops" "%t %u %{SVN-REPOS-NAME}e %{SVN-ACTION}e" SVNAdvertiseV2Protocol ${ADVERTISE_V2_PROTOCOL} ${SVN_PATH_AUTHZ_LINE} </Location> +<Location /authz-test-work/anon> + DAV svn + SVNParentPath "$ABS_BUILDDIR/subversion/tests/cmdline/svn-test-work/local_tmp" + AuthzSVNAccessFile "$ABS_BUILDDIR/subversion/tests/cmdline/svn-test-work/authz" + SVNAdvertiseV2Protocol ${ADVERTISE_V2_PROTOCOL} + SVNCacheRevProps ${CACHE_REVPROPS_SETTING} + SVNListParentPath On + # This may seem unnecessary but granting access to everyone here is necessary + # to exercise a bug with httpd 2.3.x+. The "Require all granted" syntax is + # new to 2.3.x+ which we can detect with the mod_authz_core.c module + # signature. Use the "Allow from all" syntax with older versions for symmetry. + <IfModule mod_authz_core.c> + Require all granted + </IfModule> + <IfModule !mod_authz_core.c> + Allow from all + </IfMOdule> + ${SVN_PATH_AUTHZ_LINE} +</Location> +<Location /authz-test-work/mixed> + DAV svn + SVNParentPath "$ABS_BUILDDIR/subversion/tests/cmdline/svn-test-work/local_tmp" + AuthzSVNAccessFile "$ABS_BUILDDIR/subversion/tests/cmdline/svn-test-work/authz" + SVNAdvertiseV2Protocol ${ADVERTISE_V2_PROTOCOL} + SVNCacheRevProps ${CACHE_REVPROPS_SETTING} + SVNListParentPath On + AuthType Basic + AuthName "Subversion Repository" + AuthUserFile $HTTPD_USERS + Require valid-user + Satisfy Any + ${SVN_PATH_AUTHZ_LINE} +</Location> +<Location /authz-test-work/mixed-noauthwhenanon> + DAV svn + SVNParentPath "$ABS_BUILDDIR/subversion/tests/cmdline/svn-test-work/local_tmp" + AuthzSVNAccessFile "$ABS_BUILDDIR/subversion/tests/cmdline/svn-test-work/authz" + SVNAdvertiseV2Protocol ${ADVERTISE_V2_PROTOCOL} + SVNCacheRevProps ${CACHE_REVPROPS_SETTING} + SVNListParentPath On + AuthType Basic + AuthName "Subversion Repository" + AuthUserFile $HTTPD_USERS + Require valid-user + AuthzSVNNoAuthWhenAnonymousAllowed On + SVNPathAuthz On +</Location> +<Location /authz-test-work/authn> + DAV svn + SVNParentPath "$ABS_BUILDDIR/subversion/tests/cmdline/svn-test-work/local_tmp" + AuthzSVNAccessFile "$ABS_BUILDDIR/subversion/tests/cmdline/svn-test-work/authz" + SVNAdvertiseV2Protocol ${ADVERTISE_V2_PROTOCOL} + SVNCacheRevProps ${CACHE_REVPROPS_SETTING} + SVNListParentPath On + AuthType Basic + AuthName "Subversion Repository" + AuthUserFile $HTTPD_USERS + Require valid-user + ${SVN_PATH_AUTHZ_LINE} +</Location> +<Location /authz-test-work/authn-anonoff> + DAV svn + SVNParentPath "$ABS_BUILDDIR/subversion/tests/cmdline/svn-test-work/local_tmp" + AuthzSVNAccessFile "$ABS_BUILDDIR/subversion/tests/cmdline/svn-test-work/authz" + SVNAdvertiseV2Protocol ${ADVERTISE_V2_PROTOCOL} + SVNCacheRevProps ${CACHE_REVPROPS_SETTING} + SVNListParentPath On + AuthType Basic + AuthName "Subversion Repository" + AuthUserFile $HTTPD_USERS + Require valid-user + AuthzSVNAnonymous Off + SVNPathAuthz On +</Location> +<Location /authz-test-work/authn-lcuser> + DAV svn + SVNParentPath "$ABS_BUILDDIR/subversion/tests/cmdline/svn-test-work/local_tmp" + AuthzSVNAccessFile "$ABS_BUILDDIR/subversion/tests/cmdline/svn-test-work/authz" + SVNAdvertiseV2Protocol ${ADVERTISE_V2_PROTOCOL} + SVNCacheRevProps ${CACHE_REVPROPS_SETTING} + SVNListParentPath On + AuthType Basic + AuthName "Subversion Repository" + AuthUserFile $HTTPD_USERS + Require valid-user + AuthzForceUsernameCase Lower + ${SVN_PATH_AUTHZ_LINE} +</Location> +<Location /authz-test-work/authn-lcuser> + DAV svn + SVNParentPath "$ABS_BUILDDIR/subversion/tests/cmdline/svn-test-work/local_tmp" + AuthzSVNAccessFile "$ABS_BUILDDIR/subversion/tests/cmdline/svn-test-work/authz" + SVNAdvertiseV2Protocol ${ADVERTISE_V2_PROTOCOL} + SVNCacheRevProps ${CACHE_REVPROPS_SETTING} + SVNListParentPath On + AuthType Basic + AuthName "Subversion Repository" + AuthUserFile $HTTPD_USERS + Require valid-user + AuthzForceUsernameCase Lower + ${SVN_PATH_AUTHZ_LINE} +</Location> +<Location /authz-test-work/authn-group> + DAV svn + SVNParentPath "$ABS_BUILDDIR/subversion/tests/cmdline/svn-test-work/local_tmp" + AuthzSVNAccessFile "$ABS_BUILDDIR/subversion/tests/cmdline/svn-test-work/authz" + SVNAdvertiseV2Protocol ${ADVERTISE_V2_PROTOCOL} + SVNCacheRevProps ${CACHE_REVPROPS_SETTING} + SVNListParentPath On + AuthType Basic + AuthName "Subversion Repository" + AuthUserFile $HTTPD_USERS + AuthGroupFile $HTTPD_GROUPS + Require group random + AuthzSVNAuthoritative Off + SVNPathAuthz On +</Location> +<IfModule mod_authz_core.c> + <Location /authz-test-work/sallrany> + DAV svn + SVNParentPath "$ABS_BUILDDIR/subversion/tests/cmdline/svn-test-work/local_tmp" + AuthzSVNAccessFile "$ABS_BUILDDIR/subversion/tests/cmdline/svn-test-work/authz" + SVNAdvertiseV2Protocol ${ADVERTISE_V2_PROTOCOL} + SVNCacheRevProps ${CACHE_REVPROPS_SETTING} + SVNListParentPath On + AuthType Basic + AuthName "Subversion Repository" + AuthUserFile $HTTPD_USERS + AuthzSendForbiddenOnFailure On + Satisfy All + <RequireAny> + Require valid-user + Require expr req('ALLOW') == '1' + </RequireAny> + ${SVN_PATH_AUTHZ_LINE} + </Location> + <Location /authz-test-work/sallrall> + DAV svn + SVNParentPath "$ABS_BUILDDIR/subversion/tests/cmdline/svn-test-work/local_tmp" + AuthzSVNAccessFile "$ABS_BUILDDIR/subversion/tests/cmdline/svn-test-work/authz" + SVNAdvertiseV2Protocol ${ADVERTISE_V2_PROTOCOL} + SVNCacheRevProps ${CACHE_REVPROPS_SETTING} + SVNListParentPath On + AuthType Basic + AuthName "Subversion Repository" + AuthUserFile $HTTPD_USERS + AuthzSendForbiddenOnFailure On + Satisfy All + <RequireAll> + Require valid-user + Require expr req('ALLOW') == '1' + </RequireAll> + ${SVN_PATH_AUTHZ_LINE} + </Location> +</IfModule> RedirectMatch permanent ^/svn-test-work/repositories/REDIRECT-PERM-(.*)\$ /svn-test-work/repositories/\$1 RedirectMatch ^/svn-test-work/repositories/REDIRECT-TEMP-(.*)\$ /svn-test-work/repositories/\$1 __EOF__ START="$HTTPD -f $HTTPD_CFG" +printf \ +'#!/bin/sh +if [ -d "%s" ]; then + printf "Stopping previous HTTPD instance..." + if %s -k stop; then + # httpd had no output; echo a newline. + echo "" + elif [ -s "%s" ]; then + # httpd would have printed an error terminated by a newline. + kill -9 "`cat %s`" + fi +fi +' >$STOPSCRIPT "$HTTPD_ROOT" "$START" "$HTTPD_PID" "$HTTPD_PID" +chmod +x $STOPSCRIPT $START -t \ || fail "Configuration file didn't pass the check, most likely modules couldn't be loaded" @@ -531,41 +747,48 @@ if [ $? -ne 0 ]; then HTTP_FETCH_OUTPUT='-s -k -o' fi $HTTP_FETCH $HTTP_FETCH_OUTPUT "$HTTPD_CFG-copy" "$BASE_URL/cfg" -diff -q "$HTTPD_CFG" "$HTTPD_CFG-copy" > /dev/null \ +diff "$HTTPD_CFG" "$HTTPD_CFG-copy" > /dev/null \ || fail "HTTPD doesn't operate according to the generated configuration" rm "$HTTPD_CFG-copy" say "HTTPD is good" if [ $# -eq 1 ] && [ "x$1" = 'x--no-tests' ]; then - echo "http://localhost:$HTTPD_PORT" + echo "http://localhost:$HTTPD_PORT/svn-test-work/repositories" + exit +fi + +if [ $# -eq 1 ] && [ "x$1" = 'x--gdb' ]; then + echo "http://localhost:$HTTPD_PORT/svn-test-work/repositories" + $STOPSCRIPT && gdb -silent -ex r -args $START -X exit fi + if type time > /dev/null; then TIME_CMD=time else TIME_CMD="" fi +MAKE=${MAKE:-make} + say "starting the tests..." CLIENT_CMD="$ABS_BUILDDIR/subversion/svn/svn" -$LDD "$CLIENT_CMD" | grep -q 'not found' \ - && fail "Subversion client couldn't be fully linked at run-time" if [ "$HTTP_LIBRARY" = "" ]; then say "Using default dav library" - "$CLIENT_CMD" --version | egrep -q '^[*] ra_(neon|serf)' \ + "$CLIENT_CMD" --version | egrep '^[*] ra_(neon|serf)' >/dev/null \ || fail "Subversion client couldn't find and/or load ra_dav library" else say "Requesting dav library '$HTTP_LIBRARY'" - "$CLIENT_CMD" --version | egrep -q "^[*] ra_$HTTP_LIBRARY" \ + "$CLIENT_CMD" --version | egrep "^[*] ra_$HTTP_LIBRARY" >/dev/null \ || fail "Subversion client couldn't find and/or load ra_dav library '$HTTP_LIBRARY'" fi if [ $# = 0 ]; then - $TIME_CMD make check "BASE_URL=$BASE_URL" $SSL_MAKE_VAR + $TIME_CMD "$MAKE" check "BASE_URL=$BASE_URL" $SSL_MAKE_VAR r=$? else (cd "$ABS_BUILDDIR/subversion/tests/cmdline/" diff --git a/subversion/tests/cmdline/depth_tests.py b/subversion/tests/cmdline/depth_tests.py index 18af743..5a1d2c7 100755 --- a/subversion/tests/cmdline/depth_tests.py +++ b/subversion/tests/cmdline/depth_tests.py @@ -73,7 +73,6 @@ def set_up_depthy_working_copies(sbox, empty=False, files=False, wc_empty = sbox.wc_dir + '-depth-empty' sbox.add_test_path(wc_empty, True) svntest.actions.run_and_verify_svn( - "Unexpected error from co --depth=empty", svntest.verify.AnyOutput, [], "co", "--depth", "empty", sbox.repo_url, wc_empty) @@ -82,7 +81,6 @@ def set_up_depthy_working_copies(sbox, empty=False, files=False, wc_files = sbox.wc_dir + '-depth-files' sbox.add_test_path(wc_files, True) svntest.actions.run_and_verify_svn( - "Unexpected error from co --depth=files", svntest.verify.AnyOutput, [], "co", "--depth", "files", sbox.repo_url, wc_files) @@ -91,7 +89,6 @@ def set_up_depthy_working_copies(sbox, empty=False, files=False, wc_immediates = sbox.wc_dir + '-depth-immediates' sbox.add_test_path(wc_immediates, True) svntest.actions.run_and_verify_svn( - "Unexpected error from co --depth=immediates", svntest.verify.AnyOutput, [], "co", "--depth", "immediates", sbox.repo_url, wc_immediates) @@ -102,7 +99,7 @@ def verify_depth(msg, depth, path="."): """Verifies that PATH has depth DEPTH. MSG is the failure message.""" if depth == "infinity": # Check for absence of depth line. - exit_code, out, err = svntest.actions.run_and_verify_svn(None, None, + exit_code, out, err = svntest.actions.run_and_verify_svn(None, [], "info", path) for line in out: if line.startswith("Depth:"): @@ -111,7 +108,7 @@ def verify_depth(msg, depth, path="."): expected_stdout = svntest.verify.ExpectedOutput("Depth: %s\n" % depth, match_all=False) svntest.actions.run_and_verify_svn( - msg, expected_stdout, [], "info", path) + expected_stdout, [], "info", path) #---------------------------------------------------------------------- # Ensure that 'checkout --depth=empty' results in a depth-empty working copy. @@ -144,8 +141,7 @@ def depth_files_same_as_nonrecursive(sbox, opt): if os.path.exists(sbox.wc_dir): svntest.main.safe_rmtree(sbox.wc_dir) - svntest.actions.run_and_verify_svn("Unexpected error during co %s" % opt, - svntest.verify.AnyOutput, [], + svntest.actions.run_and_verify_svn(svntest.verify.AnyOutput, [], "co", opt, sbox.repo_url, sbox.wc_dir) # Should create a depth-files top directory, so both iota and A @@ -187,8 +183,7 @@ def depth_empty_update_bypass_single_file(sbox): expected_status.tweak('iota', wc_rev=2, status=' ') svntest.actions.run_and_verify_commit(wc, expected_output, - expected_status, - None, wc) + expected_status) # Update the depth-empty wc, expecting not to receive the change to iota. expected_output = svntest.wc.State(wc_empty, { }) @@ -198,8 +193,7 @@ def depth_empty_update_bypass_single_file(sbox): svntest.actions.run_and_verify_update(wc_empty, expected_output, expected_disk, - expected_status, - None, None, None, None, None) + expected_status) # And the wc should still be depth-empty. verify_depth(None, "empty", wc_empty) @@ -210,7 +204,7 @@ def depth_empty_update_bypass_single_file(sbox): expected_output, expected_disk, expected_status, - None, None, None, None, None, False, + [], False, "--depth=infinity", wc_empty) # And the wc should still be depth-empty. @@ -239,8 +233,7 @@ def depth_immediates_get_top_file_mod_only(sbox): expected_status.tweak('A/mu', wc_rev=2, status=' ') svntest.actions.run_and_verify_commit(wc, expected_output, - expected_status, - None, wc) + expected_status) # Update the depth-immediates wc, expecting to receive only the # change to iota. @@ -259,8 +252,7 @@ def depth_immediates_get_top_file_mod_only(sbox): svntest.actions.run_and_verify_update(wc_immediates, expected_output, expected_disk, - expected_status, - None, None, None, None, None) + expected_status) verify_depth(None, "immediates", wc_immediates) @@ -275,7 +267,7 @@ def depth_empty_commit(sbox): wc_empty_iota = os.path.join(wc_empty, 'iota') # Update 'iota' in the depth-empty working copy and modify it - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'up', wc_empty_iota) svntest.main.file_write(wc_empty_iota, "iota modified") @@ -290,9 +282,7 @@ def depth_empty_commit(sbox): }) svntest.actions.run_and_verify_commit(wc_empty, expected_output, - expected_status, - None, - wc_empty) + expected_status) #---------------------------------------------------------------------- def depth_empty_with_file(sbox): @@ -310,7 +300,7 @@ def depth_empty_with_file(sbox): ### minutes of trying to figure out how, I decided to compromise. # Update iota by name, expecting to receive it. - svntest.actions.run_and_verify_svn(None, None, [], 'up', iota_path) + svntest.actions.run_and_verify_svn(None, [], 'up', iota_path) # Test that we did receive it. if not os.path.exists(iota_path): @@ -324,19 +314,17 @@ def depth_empty_with_file(sbox): expected_status.tweak('iota', wc_rev=2, status=' ') svntest.actions.run_and_verify_commit(wc, expected_output, - expected_status, - None, wc) + expected_status) # Delete iota in the "other" wc. other_iota_path = os.path.join(wc, 'iota') - svntest.actions.run_and_verify_svn(None, None, [], 'rm', other_iota_path) + svntest.actions.run_and_verify_svn(None, [], 'rm', other_iota_path) expected_output = svntest.wc.State(wc, { 'iota' : Item(verb='Deleting'), }) expected_status = svntest.actions.get_virginal_state(wc, 1) expected_status.remove('iota') svntest.actions.run_and_verify_commit(wc, expected_output, - expected_status, - None, wc) + expected_status) # Update the depth-empty wc just a little, expecting to receive # the change in iota. @@ -351,7 +339,7 @@ def depth_empty_with_file(sbox): expected_output, expected_disk, expected_status, - None, None, None, None, None, False, + [], False, '-r2', wc_empty) # Update the depth-empty wc all the way, expecting to receive the deletion @@ -364,8 +352,7 @@ def depth_empty_with_file(sbox): svntest.actions.run_and_verify_update(wc_empty, expected_output, expected_disk, - expected_status, - None, None, None, None, None) + expected_status) #---------------------------------------------------------------------- @@ -409,8 +396,7 @@ def depth_empty_with_dir(sbox): expected_output, expected_disk, expected_status, - None, None, - None, None, None, None, + [], False, A_path) # Commit a change to A/mu in the "other" wc. @@ -421,8 +407,7 @@ def depth_empty_with_dir(sbox): expected_status.tweak('A/mu', wc_rev=2, status=' ') svntest.actions.run_and_verify_commit(wc, expected_output, - expected_status, - None, wc) + expected_status) # Update "A" by name in wc_empty, expect to receive the change to A/mu. expected_output = svntest.wc.State(wc_empty, { 'A/mu' : Item(status='U ') }) @@ -436,20 +421,18 @@ def depth_empty_with_dir(sbox): expected_output, expected_disk, expected_status, - None, None, - None, None, None, None, + [], False, A_path) # Commit the deletion of A/mu from the "other" wc. svntest.main.file_write(other_mu_path, "new text\n") - svntest.actions.run_and_verify_svn(None, None, [], 'rm', other_mu_path) + svntest.actions.run_and_verify_svn(None, [], 'rm', other_mu_path) expected_output = svntest.wc.State(wc, { 'A/mu' : Item(verb='Deleting'), }) expected_status = svntest.actions.get_virginal_state(wc, 1) expected_status.remove('A/mu') svntest.actions.run_and_verify_commit(wc, expected_output, - expected_status, - None, wc) + expected_status) # Update "A" by name in wc_empty, expect to A/mu to disappear. @@ -465,8 +448,7 @@ def depth_empty_with_dir(sbox): expected_output, expected_disk, expected_status, - None, None, - None, None, None, None, + [], False, A_path) @@ -499,31 +481,13 @@ def depth_immediates_bring_in_file(sbox): expected_output, expected_disk, expected_status, - None, None, None, - None, None, None, + [], False, A_mu_path) # Run 'svn up A/D/gamma' to test the edge case 'Skipped'. - expected_output = svntest.wc.State(wc_imm, { - 'A/D/gamma' : Item(verb='Skipped'), - }) - expected_disk = svntest.main.greek_state.copy() - expected_disk.remove('A/C', 'A/B/lambda', 'A/B/E', 'A/B/E/alpha', - 'A/B/E/beta', 'A/B/F', 'A/B', 'A/D/gamma', 'A/D/G', - 'A/D/G/pi', 'A/D/G/rho', 'A/D/G/tau', 'A/D/H/chi', - 'A/D/H/psi', 'A/D/H/omega', 'A/D/H', 'A/D') - expected_status = svntest.actions.get_virginal_state(wc_imm, 1) - expected_status.remove('A/C', 'A/B/lambda', 'A/B/E', 'A/B/E/alpha', - 'A/B/E/beta', 'A/B/F', 'A/B', 'A/D/gamma', 'A/D/G', - 'A/D/G/pi', 'A/D/G/rho', 'A/D/G/tau', 'A/D/H/chi', - 'A/D/H/psi', 'A/D/H/omega', 'A/D/H', 'A/D') - svntest.actions.run_and_verify_update(wc_imm, - expected_output, - expected_disk, - expected_status, - None, None, None, - None, None, None, - gamma_path) + svntest.actions.run_and_verify_svn(["Skipped '"+gamma_path+"'\n", ], + "svn: E155007: ", 'update', gamma_path) + svntest.actions.run_and_verify_status(wc_imm, expected_status) #---------------------------------------------------------------------- def depth_immediates_fill_in_dir(sbox): @@ -560,8 +524,7 @@ def depth_immediates_fill_in_dir(sbox): expected_output, expected_disk, expected_status, - None, None, - None, None, None, None, + [], False, '--set-depth', 'infinity', A_path) @@ -593,8 +556,7 @@ def depth_mixed_bring_in_dir(sbox): expected_output, expected_disk, expected_status, - None, None, - None, None, None, None, + [], False, '--set-depth', 'files', A_path) # Check that A was added at depth=files. @@ -621,8 +583,7 @@ def depth_mixed_bring_in_dir(sbox): expected_output, expected_disk, expected_status, - None, None, - None, None, None, None, + [], False, '--set-depth', 'immediates', B_path) # Check that A/B was added at depth=immediates. @@ -646,8 +607,7 @@ def depth_mixed_bring_in_dir(sbox): expected_output, expected_disk, expected_status, - None, None, - None, None, None, None, + [], False, '--set-depth', 'empty', C_path) # Check that A/C was added at depth=empty. @@ -664,14 +624,13 @@ def depth_empty_unreceive_delete(sbox): iota_path = os.path.join(wc, 'iota') # Commit in the "other" wc. - svntest.actions.run_and_verify_svn(None, None, [], 'rm', iota_path) + svntest.actions.run_and_verify_svn(None, [], 'rm', iota_path) expected_output = svntest.wc.State(wc, { 'iota' : Item(verb='Deleting'), }) expected_status = svntest.actions.get_virginal_state(wc, 1) expected_status.remove('iota') svntest.actions.run_and_verify_commit(wc, expected_output, - expected_status, - None, wc) + expected_status) # Update the depth-empty wc, expecting not to receive the deletion of iota. expected_output = svntest.wc.State(wc_empty, { }) @@ -681,8 +640,7 @@ def depth_empty_unreceive_delete(sbox): svntest.actions.run_and_verify_update(wc_empty, expected_output, expected_disk, - expected_status, - None, None, None, None, None) + expected_status) #---------------------------------------------------------------------- @@ -698,14 +656,13 @@ def depth_immediates_unreceive_delete(sbox): mu_path = os.path.join(wc, 'A', 'mu') # Commit in the "other" wc. - svntest.actions.run_and_verify_svn(None, None, [], 'rm', mu_path) + svntest.actions.run_and_verify_svn(None, [], 'rm', mu_path) expected_output = svntest.wc.State(wc, { 'A/mu' : Item(verb='Deleting'), }) expected_status = svntest.actions.get_virginal_state(wc, 1) expected_status.remove('A/mu') svntest.actions.run_and_verify_commit(wc, expected_output, - expected_status, - None, wc) + expected_status) # Update the depth-immediates wc, expecting not to receive the deletion # of A/mu. @@ -722,8 +679,7 @@ def depth_immediates_unreceive_delete(sbox): svntest.actions.run_and_verify_update(wc_immed, expected_output, expected_disk, - expected_status, - None, None, None, None, None) + expected_status) #---------------------------------------------------------------------- def depth_immediates_receive_delete(sbox): @@ -738,7 +694,7 @@ def depth_immediates_receive_delete(sbox): A_path = os.path.join(wc, 'A') # Commit in the "other" wc. - svntest.actions.run_and_verify_svn(None, None, [], 'rm', A_path) + svntest.actions.run_and_verify_svn(None, [], 'rm', A_path) expected_output = svntest.wc.State(wc, { 'A' : Item(verb='Deleting'), }) expected_status = svntest.wc.State(wc, { '' : Item(status=' ', wc_rev=1), @@ -746,8 +702,7 @@ def depth_immediates_receive_delete(sbox): }) svntest.actions.run_and_verify_commit(wc, expected_output, - expected_status, - None, wc) + expected_status) # Update the depth-immediates wc, expecting to receive the deletion of A. expected_output = svntest.wc.State(wc_immed, { @@ -763,8 +718,7 @@ def depth_immediates_receive_delete(sbox): svntest.actions.run_and_verify_update(wc_immed, expected_output, expected_disk, - expected_status, - None, None, None, None, None) + expected_status) #---------------------------------------------------------------------- def depth_immediates_subdir_propset_1(sbox): @@ -775,7 +729,7 @@ def depth_immediates_subdir_propset_1(sbox): A_path = os.path.join(wc_immediates, 'A') # Set a property on an immediate subdirectory of the working copy. - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'pset', 'foo', 'bar', A_path) @@ -795,7 +749,7 @@ def depth_immediates_subdir_propset_1(sbox): svntest.actions.run_and_verify_commit(wc_immediates, expected_output, expected_status, - None, + [], A_path) # Create expected output tree for the update. @@ -814,7 +768,7 @@ def depth_immediates_subdir_propset_1(sbox): expected_output, expected_disk, expected_status, - None, None, None, None, None, 1) + check_props=True) #---------------------------------------------------------------------- def depth_immediates_subdir_propset_2(sbox): @@ -829,16 +783,15 @@ def depth_immediates_subdir_propset_2(sbox): A_path = sbox.ospath('A') # Set a property on an immediate subdirectory of the working copy. - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'pset', 'foo', 'bar', A_path) # Commit. - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'commit', '-m', 'logmsg', A_path) # Update at depth=immediates in the other wc, expecting to see no errors. - svntest.actions.run_and_verify_svn("Output on stderr where none expected", - svntest.verify.AnyOutput, [], + svntest.actions.run_and_verify_svn(svntest.verify.AnyOutput, [], 'update', '--depth', 'immediates', other_wc) @@ -865,8 +818,7 @@ def depth_update_to_more_depth(sbox): expected_output, expected_disk, expected_status, - None, None, - None, None, None, None, + [], False, '--set-depth', 'files') verify_depth(None, "files") @@ -887,8 +839,7 @@ def depth_update_to_more_depth(sbox): expected_output, expected_disk, expected_status, - None, None, - None, None, None, None, + [], False, '--set-depth', 'immediates') verify_depth(None, "immediates") verify_depth(None, "empty", "A") @@ -912,8 +863,7 @@ def depth_update_to_more_depth(sbox): expected_output, expected_disk, expected_status, - None, None, - None, None, None, None, + [], False, '--set-depth', 'files', 'A') verify_depth(None, "immediates") verify_depth(None, "files", "A") @@ -944,8 +894,7 @@ def depth_update_to_more_depth(sbox): expected_output, expected_disk, expected_status, - None, None, - None, None, None, None, + [], False, '--set-depth', 'infinity') verify_depth("Non-infinity depth detected after an upgrade to depth-infinity", "infinity") @@ -969,13 +918,13 @@ def commit_propmods_with_depth_empty_helper(sbox, depth_arg): chi_path = os.path.join(H_path, 'chi') # Set some properties, modify some files. - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'propset', 'foo', 'foo-val', wc_dir) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'propset', 'bar', 'bar-val', D_path) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'propset', 'baz', 'baz-val', G_path) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'propset', 'qux', 'qux-val', H_path) svntest.main.file_append(iota_path, "new iota\n") svntest.main.file_append(gamma_path, "new gamma\n") @@ -1003,7 +952,7 @@ def commit_propmods_with_depth_empty_helper(sbox, depth_arg): svntest.actions.run_and_verify_commit(wc_dir, expected_output, expected_status, - None, + [], depth_arg, wc_dir, D_path) @@ -1032,18 +981,18 @@ def diff_in_depthy_wc(sbox): gamma_path = os.path.join(wc, 'A', 'D', 'gamma') # Make some changes in the depth-infinity wc, and commit them - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'propset', 'foo', 'foo-val', wc) svntest.main.file_write(iota_path, "new text\n") - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'propset', 'bar', 'bar-val', A_path) svntest.main.file_write(mu_path, "new text\n") svntest.main.file_write(gamma_path, "new text\n") - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'commit', '-m', '', wc) - from diff_tests import make_diff_header, make_diff_prop_header - from diff_tests import make_diff_prop_deleted, make_diff_prop_added + from svntest.verify import make_diff_header, make_diff_prop_header, \ + make_diff_prop_deleted, make_diff_prop_added diff_mu = make_diff_header('A/mu', 'revision 2', 'working copy') + [ "@@ -1 +1 @@\n", "-new text\n", @@ -1063,48 +1012,48 @@ def diff_in_depthy_wc(sbox): expected_output = svntest.verify.UnorderedOutput(diff_dot) # The diff should contain only the propchange on '.' - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'diff', '-rHEAD') # Upgrade to depth-files. - svntest.actions.run_and_verify_svn(None, None, [], 'up', + svntest.actions.run_and_verify_svn(None, [], 'up', '--set-depth', 'files', '-r1') # The diff should contain only the propchange on '.' and the # contents change on iota. expected_output = svntest.verify.UnorderedOutput(diff_iota + diff_dot) - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'diff', '-rHEAD') # Do a diff at --depth empty. expected_output = svntest.verify.UnorderedOutput(diff_dot) - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'diff', '--depth', 'empty', '-rHEAD') # Upgrade to depth-immediates. - svntest.actions.run_and_verify_svn(None, None, [], 'up', + svntest.actions.run_and_verify_svn(None, [], 'up', '--set-depth', 'immediates', '-r1') # The diff should contain the propchanges on '.' and 'A' and the # contents change on iota. expected_output = svntest.verify.UnorderedOutput(diff_A + diff_iota + diff_dot) - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'diff', '-rHEAD') # Do a diff at --depth files. expected_output = svntest.verify.UnorderedOutput(diff_iota + diff_dot) - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'diff', '--depth', 'files', '-rHEAD') # Upgrade A to depth-files. - svntest.actions.run_and_verify_svn(None, None, [], 'up', + svntest.actions.run_and_verify_svn(None, [], 'up', '--set-depth', 'files', '-r1', 'A') # The diff should contain everything but the contents change on # gamma (which does not exist in this working copy). expected_output = svntest.verify.UnorderedOutput(diff_mu + diff_A + diff_iota + diff_dot) - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'diff', '-rHEAD') # Do a diff at --depth immediates. expected_output = svntest.verify.UnorderedOutput(diff_A + diff_iota + diff_dot) - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'diff', '--depth', 'immediates', '-rHEAD') @Issue(2882) @@ -1151,6 +1100,7 @@ def commit_depth_immediates(sbox): # Sending A/D/G/rho # Sending iota # Transmitting file data .. + # Committing transaction... # Committed revision 2. iota_path = sbox.ospath('iota') @@ -1173,7 +1123,7 @@ def commit_depth_immediates(sbox): svntest.actions.run_and_verify_commit(wc_dir, expected_output, expected_status, - None, + [], '--depth', 'immediates', wc_dir, G_path) @@ -1192,7 +1142,7 @@ def depth_immediates_receive_new_dir(sbox): svntest.main.file_write(zeta_path, "This is the file 'zeta'.\n") # Commit in the "other" wc. - svntest.actions.run_and_verify_svn(None, None, [], 'add', I_path) + svntest.actions.run_and_verify_svn(None, [], 'add', I_path) expected_output = svntest.wc.State(wc, { 'I' : Item(verb='Adding'), 'I/zeta' : Item(verb='Adding'), @@ -1204,8 +1154,7 @@ def depth_immediates_receive_new_dir(sbox): }) svntest.actions.run_and_verify_commit(wc, expected_output, - expected_status, - None, wc) + expected_status) # Update the depth-immediates wc, expecting to receive just the # new directory, without the file. @@ -1226,8 +1175,7 @@ def depth_immediates_receive_new_dir(sbox): svntest.actions.run_and_verify_update(wc_immed, expected_output, expected_disk, - expected_status, - None, None, None, None, None) + expected_status) # Check that the new directory was added at depth=empty. verify_depth(None, "empty", other_I_path) @@ -1245,20 +1193,20 @@ def add_tree_with_depth(sbox): os.mkdir(new3_path) os.mkdir(new4_path) # Simple case, add new1 only, set depth to files - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], "add", "--depth", "files", new1_path) verify_depth(None, "infinity", new1_path) # Force add new1 at new1 again, should include new2 at empty, the depth of # new1 should not change - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], "add", "--depth", "immediates", "--force", new1_path) verify_depth(None, "infinity", new1_path) verify_depth(None, "infinity", new2_path) # add new4 with intermediate path, the intermediate path is added at empty - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], "add", "--depth", "immediates", "--parents", new4_path) verify_depth(None, "infinity", new3_path) @@ -1302,8 +1250,8 @@ def upgrade_from_above(sbox): expected_output, expected_disk, expected_status, - None, None, None, None, None, None, - '--set-depth=files') + [], False, + '--set-depth=files', '.') verify_depth(None, "files") finally: os.chdir(saved_cwd) @@ -1324,7 +1272,7 @@ def upgrade_from_above(sbox): expected_output, expected_disk, expected_status, - None, None, None, None, None, None, + [], False, '--set-depth=files', wc) verify_depth(None, "files", wc) @@ -1340,14 +1288,14 @@ def status_in_depthy_wc(sbox): gamma_path = os.path.join(wc, 'A', 'D', 'gamma') # Make some changes in the depth-infinity wc, and commit them - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'propset', 'foo', 'foo-val', wc) svntest.main.file_write(iota_path, "new text\n") - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'propset', 'bar', 'bar-val', A_path) svntest.main.file_write(mu_path, "new text\n") svntest.main.file_write(gamma_path, "new text\n") - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'commit', '-m', '', wc) status = [ @@ -1362,44 +1310,44 @@ def status_in_depthy_wc(sbox): expected_output = svntest.verify.UnorderedOutput(status[:2]) # The output should contain only the change on '.'. - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'st', '-u') # Upgrade to depth-files. - svntest.actions.run_and_verify_svn(None, None, [], 'up', + svntest.actions.run_and_verify_svn(None, [], 'up', '--set-depth', 'files', '-r1') # The output should contain only the changes on '.' and 'iota'. expected_output = svntest.verify.UnorderedOutput(status[:3]) - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'st', '-u') # Do a status -u at --depth empty. expected_output = svntest.verify.UnorderedOutput(status[:2]) - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'st', '-u', '--depth', 'empty') # Upgrade to depth-immediates. - svntest.actions.run_and_verify_svn(None, None, [], 'up', + svntest.actions.run_and_verify_svn(None, [], 'up', '--set-depth', 'immediates', '-r1') # The output should contain the changes on '.', 'A' and 'iota'. expected_output = svntest.verify.UnorderedOutput(status[:4]) - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'st', '-u') # Do a status -u at --depth files. expected_output = svntest.verify.UnorderedOutput(status[:3]) - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'st', '-u', '--depth', 'files') # Upgrade A to depth-files. - svntest.actions.run_and_verify_svn(None, None, [], 'up', + svntest.actions.run_and_verify_svn(None, [], 'up', '--set-depth', 'files', '-r1', 'A') # The output should contain everything but the change on # gamma (which does not exist in this working copy). expected_output = svntest.verify.UnorderedOutput(status) - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'st', '-u') # Do a status -u at --depth immediates. expected_output = svntest.verify.UnorderedOutput(status[:4]) - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'st', '-u', '--depth', 'immediates') #---------------------------------------------------------------------- @@ -1417,7 +1365,7 @@ def depthy_update_above_dir_to_be_deleted(sbox): } exit_code, output, err = svntest.actions.run_and_verify_svn( - None, None, [], + None, [], "delete", "-m", "Delete A.", sbox.repo_url + "/A") def empty_output(wc_dir): @@ -1466,8 +1414,7 @@ def depthy_update_above_dir_to_be_deleted(sbox): expected_output_func(wc_dir), expected_disk, expected_status_func(wc_dir), - None, None, None, None, None, - False, + [], False, "--depth=%s" % depth, wc_dir) @@ -1512,8 +1459,7 @@ def depth_folding_clean_trees_1(sbox): expected_output, expected_disk, expected_status, - None, None, - None, None, None, None, + [], False, '--set-depth', 'immediates', E_path) verify_depth(None, "immediates", E_path) @@ -1523,8 +1469,7 @@ def depth_folding_clean_trees_1(sbox): expected_output, expected_disk, expected_status, - None, None, - None, None, None, None, + [], False, '--set-depth', 'files', E_path) verify_depth(None, "files", E_path) @@ -1540,8 +1485,7 @@ def depth_folding_clean_trees_1(sbox): expected_output, expected_disk, expected_status, - None, None, - None, None, None, None, + [], False, '--set-depth', 'immediates', B_path) verify_depth(None, "immediates", B_path) verify_depth(None, "empty", E_path) @@ -1560,8 +1504,7 @@ def depth_folding_clean_trees_1(sbox): expected_output, expected_disk, expected_status, - None, None, - None, None, None, None, + [], False, '--set-depth', 'empty', H_path) verify_depth(None, "empty", H_path) @@ -1578,8 +1521,7 @@ def depth_folding_clean_trees_1(sbox): expected_output, expected_disk, expected_status, - None, None, - None, None, None, None, + [], False, '--set-depth', 'immediates', D_path) verify_depth(None, "immediates", D_path) verify_depth(None, "empty", G_path) @@ -1597,8 +1539,7 @@ def depth_folding_clean_trees_1(sbox): expected_output, expected_disk, expected_status, - None, None, - None, None, None, None, + [], False, '--set-depth', 'empty', D_path) verify_depth(None, "empty", D_path) @@ -1615,8 +1556,7 @@ def depth_folding_clean_trees_1(sbox): expected_output, expected_disk, expected_status, - None, None, - None, None, None, None, + [], False, '--set-depth', 'immediates', A_path) verify_depth(None, "immediates", A_path) verify_depth(None, "empty", C_path) @@ -1635,8 +1575,7 @@ def depth_folding_clean_trees_1(sbox): expected_output, expected_disk, expected_status, - None, None, - None, None, None, None, + [], False, '--set-depth', 'files', A_path) verify_depth(None, "files", A_path) @@ -1651,8 +1590,7 @@ def depth_folding_clean_trees_1(sbox): expected_output, expected_disk, expected_status, - None, None, - None, None, None, None, + [], False, '--set-depth', 'empty', A_path) verify_depth(None, "empty", A_path) @@ -1667,8 +1605,7 @@ def depth_folding_clean_trees_1(sbox): expected_output, expected_disk, expected_status, - None, None, - None, None, None, None, + [], False, '--set-depth', 'files', wc_dir) verify_depth(None, "files", wc_dir) @@ -1693,13 +1630,13 @@ def depth_folding_clean_trees_2(sbox): G_path = os.path.join(D_path, 'G') # pull in directory A at immediates - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'up', '--depth', 'immediates', A_path) # check to see if it's really at immediates verify_depth(None, "immediates", A_path) # pull in directory D at infinity - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'up', '--set-depth', 'infinity', D_path) # Run 'svn up --set-depth=immediates' to directory A/D. @@ -1739,8 +1676,7 @@ def depth_folding_clean_trees_2(sbox): expected_output, expected_disk, expected_status, - None, None, - None, None, None, None, + [], False, '--set-depth', 'immediates', D_path) verify_depth(None, "immediates", D_path) verify_depth(None, "empty", G_path) @@ -1759,14 +1695,13 @@ def depth_folding_clean_trees_2(sbox): expected_output, expected_disk, expected_status, - None, None, - None, None, None, None, + [], False, '--set-depth', 'immediates', A_path) verify_depth(None, "immediates", A_path) verify_depth(None, "empty", D_path) # pull in directory D at infinity - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'up', '--set-depth', 'infinity', D_path) # Run 'svn up --set-depth=immediates' to directory A. @@ -1780,14 +1715,13 @@ def depth_folding_clean_trees_2(sbox): expected_output, expected_disk, expected_status, - None, None, - None, None, None, None, + [], False, '--set-depth', 'immediates', A_path) verify_depth(None, "immediates", A_path) verify_depth(None, "empty", D_path) # pull in directory D at files - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'up', '--set-depth', 'files', D_path) # Run 'svn up --set-depth=immediates' to directory A. @@ -1799,8 +1733,7 @@ def depth_folding_clean_trees_2(sbox): expected_output, expected_disk, expected_status, - None, None, - None, None, None, None, + [], False, '--set-depth', 'immediates', A_path) verify_depth(None, "immediates", A_path) verify_depth(None, "empty", D_path) @@ -1826,8 +1759,7 @@ def depth_folding_clean_trees_2(sbox): # expected_output, # expected_disk, # expected_status, -# None, None, -# None, None, None, None, +# [], False, # '--set-depth', 'empty', A_path) def depth_fold_expand_clean_trees(sbox): @@ -1843,12 +1775,12 @@ def depth_fold_expand_clean_trees(sbox): D_path = os.path.join(A_path, 'D') # pull in directory A at empty - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'up', '--depth', 'empty', A_path) verify_depth(None, "empty", A_path) # pull in directory D at infinity - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'up', D_path) # Make the other working copy. @@ -1884,8 +1816,7 @@ def depth_fold_expand_clean_trees(sbox): expected_output, expected_disk, expected_status, - None, None, - None, None, None, None, + [], False, '--set-depth', 'immediates', A_path) verify_depth(None, "immediates", A_path) verify_depth(None, "empty", B_path) @@ -1912,8 +1843,7 @@ def depth_fold_expand_clean_trees(sbox): expected_output, expected_disk, expected_status, - None, None, - None, None, None, None, + [], False, '--set-depth', 'files', Other_A_path) verify_depth(None, "files", Other_A_path) @@ -1950,7 +1880,7 @@ def pull_in_tree_with_depth_option(sbox): expected_output, expected_disk, expected_status, - None, None, None, None, None, False, + [], False, "--depth=immediates", A_path) # Check that the A directory was pull ed in at depth=immediates. @@ -1976,22 +1906,29 @@ def fold_tree_with_unversioned_modified_items(sbox): # Fold the A dir to empty, expect the modified & unversioned ones left # unversioned rather than removed, along with paths to those items. - # Even though the directory B and D is not deleted because of local - # modificatoin or unversioned items, there will be only one notification at - # B and D. + # Directories B and D won't be deleted, because that would remove their + # local modifications. Their unmodified descendants are deleted though. expected_output = svntest.wc.State(wc_dir, { - 'A/B' : Item(status='D '), + 'A/B/E' : Item(status='D '), + 'A/B/F' : Item(status='D '), + 'A/B/lambda' : Item(status='D '), 'A/C' : Item(status='D '), - 'A/D' : Item(status='D '), - 'A/mu' : Item(status='D '), + 'A/D/G/rho' : Item(status='D '), + 'A/D/G/tau' : Item(status='D '), + 'A/D/H' : Item(status='D '), + 'A/D/gamma' : Item(status='D '), }) # unversioned items will be ignored in in the status tree, since the # run_and_verify_update() function uses a quiet version of svn status - # Dir A is still versioned, since the wc root is in depth-infinity expected_status = svntest.wc.State(wc_dir, { '' : Item(status=' ', wc_rev=1), 'iota' : Item(status=' ', wc_rev=1), - 'A' : Item(status=' ', wc_rev=1) + 'A' : Item(status=' ', wc_rev=1), + 'A/D' : Item(status=' ', wc_rev='1'), + 'A/D/G' : Item(status=' ', wc_rev='1'), + 'A/D/G/pi' : Item(status='M ', wc_rev='1'), + 'A/B' : Item(status=' ', wc_rev='1'), + 'A/mu' : Item(status='M ', wc_rev='1'), }) expected_disk = svntest.wc.State('', { 'iota' : Item(contents="This is the file 'iota'.\n"), @@ -2007,8 +1944,7 @@ def fold_tree_with_unversioned_modified_items(sbox): expected_output, expected_disk, expected_status, - None, None, - None, None, None, None, + [], False, '--set-depth', 'empty', A_path) verify_depth(None, "empty", A_path) @@ -2026,8 +1962,7 @@ def depth_empty_update_on_file(sbox): expected_status.tweak('iota', wc_rev=2, status=' ') svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, wc_dir) + expected_status) # Update iota with depth=empty. expected_output = svntest.wc.State(wc_dir, @@ -2038,7 +1973,7 @@ def depth_empty_update_on_file(sbox): expected_output, expected_disk, expected_status, - None, None, None, None, None, False, + [], False, '--depth=empty', '-r1', iota_path) # Check the revision and created rev. @@ -2074,8 +2009,7 @@ def excluded_path_update_operation(sbox): expected_output, expected_disk, expected_status, - None, None, - None, None, None, None, + [], False, '--set-depth', 'exclude', E_path) # verify_depth exclude? not implemented yet @@ -2093,13 +2027,12 @@ def excluded_path_update_operation(sbox): expected_output, expected_disk, expected_status, - None, None, - None, None, None, None, + [], False, '--set-depth', 'immediates', B_path) verify_depth(None, "immediates", B_path) # Exclude A/B/E again - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'up', '--set-depth', 'exclude', E_path) # Exclude path B totally, in which contains an excluded subtree. @@ -2112,8 +2045,7 @@ def excluded_path_update_operation(sbox): expected_output, expected_disk, expected_status, - None, None, - None, None, None, None, + [], False, '--set-depth', 'exclude', B_path) # Explicitly pull in excluded path B. @@ -2131,8 +2063,7 @@ def excluded_path_update_operation(sbox): expected_output, expected_disk, expected_status, - None, None, - None, None, None, None, + [], False, B_path) # Test issue # @@ -2146,8 +2077,7 @@ def excluded_path_update_operation(sbox): expected_output, expected_disk, expected_status, - None, None, - None, None, None, None, + [], False, '--set-depth', 'exclude', iota_path) # Update the whole WC to depth=infinity. @@ -2163,8 +2093,7 @@ def excluded_path_update_operation(sbox): expected_output, expected_disk, expected_status, - None, None, - None, None, None, None, + [], False, '--set-depth', 'infinity', wc_dir) def excluded_path_misc_operation(sbox): @@ -2192,13 +2121,12 @@ def excluded_path_misc_operation(sbox): expected_output, expected_disk, expected_status, - None, None, - None, None, None, None, + [], False, '--set-depth', 'exclude', E_path) # copy A/B to A/L, excluded entry should be copied too expected_output = ['A '+L_path+'\n'] - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'cp', B_path, L_path) # verify_depth exclude? not implemented yet #verify_depth(None, "empty", LE_path) @@ -2209,16 +2137,16 @@ def excluded_path_misc_operation(sbox): expected_output = svntest.verify.UnorderedOutput([ "Reverted '%s'\n" % path for path in revert_paths]) - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'revert', '--depth=infinity', L_path) # copy A/B to A/L and then cp A/L to A/M, excluded entry should be # copied both times expected_output = ['A '+L_path+'\n'] - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'cp', B_path, L_path) expected_output = ['A '+M_path+'\n'] - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'cp', L_path, M_path) # commit this copy, with an excluded item. @@ -2236,9 +2164,7 @@ def excluded_path_misc_operation(sbox): }) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, - wc_dir) + expected_status) # Relocate wc, with excluded items in it. repo_dir = sbox.repo_dir @@ -2246,17 +2172,17 @@ def excluded_path_misc_operation(sbox): other_repo_dir, other_repo_url = sbox.add_repo_path('other') svntest.main.copy_repos(repo_dir, other_repo_dir, 2, 0) svntest.main.safe_rmtree(repo_dir, 1) - svntest.actions.run_and_verify_svn(None, None, [], 'switch', '--relocate', + svntest.actions.run_and_verify_svn(None, [], 'switch', '--relocate', repo_url, other_repo_url, wc_dir) # remove the new directory A/L, with an excluded item. # If successed, no error will be thrown - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'rm', L_path) # revert the delete # If successed, no error will be thrown - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'revert', '--depth=infinity', L_path) @@ -2283,12 +2209,11 @@ def excluded_receive_remote_removal(sbox): expected_output, expected_disk, expected_status, - None, None, - None, None, None, None, + [], False, "--set-depth", "exclude", B_path) # Remove path B in the repos. - svntest.actions.run_and_verify_svn(None, None, [], "delete", "-m", + svntest.actions.run_and_verify_svn(None, [], "delete", "-m", "Delete B.", sbox.repo_url + "/A/B") # Update wc, should receive the removal of excluded path B @@ -2299,15 +2224,13 @@ def excluded_receive_remote_removal(sbox): svntest.actions.run_and_verify_update(wc, None, expected_disk, - expected_status, - None, None, - None, None, None, None) + expected_status) # Introduce a new path with the same name B. # This should succeed if the exclude entry is gone with the update, # otherwise a name conflict will rise up. expected_output = ['A '+B_path+'\n'] - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'cp', C_path, B_path) @@ -2327,7 +2250,7 @@ def exclude_keeps_hidden_entries(sbox): # we could grep the 'entries' file, but... # or we could use 'info', but info_excluded() is XFail. expected_stderr = ".*svn: E150002: '.*C' is already under version control.*" - svntest.actions.run_and_verify_svn(None, None, expected_stderr, + svntest.actions.run_and_verify_svn(None, expected_stderr, 'mkdir', 'C') @@ -2383,7 +2306,7 @@ def make_depth_tree_conflicts(sbox): g = j(D, 'gamma') # Store node modifications as rev 2 - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'propset', 'foo', 'foo-val', B) svntest.main.file_append(m, "Modified mu.\n") svntest.main.file_append(g, "Modified gamma.\n") @@ -2401,7 +2324,7 @@ def make_depth_tree_conflicts(sbox): svntest.actions.run_and_verify_commit(wc, expected_output, expected_status, - None, + [], A) # Go back to rev 1 @@ -2416,12 +2339,12 @@ def make_depth_tree_conflicts(sbox): expected_output, expected_disk, expected_status, - None, None, None, None, None, False, + [], False, '-r1', A) # Perform node deletions so that items become unversioned and # will have tree-conflicts upon update. - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'rm', m, B, g) # Update so that conflicts appear @@ -2458,7 +2381,7 @@ def make_depth_tree_conflicts(sbox): expected_output, expected_disk, expected_status, - None, None, None, None, None, False, + [], False, wc) @@ -2538,8 +2461,7 @@ def update_excluded_path_sticky_depths(sbox): expected_output, expected_disk, expected_status, - None, None, - None, None, None, None, + [], False, '--set-depth', 'exclude', B_path) # Update to depth 'empty' for the excluded path A/B @@ -2556,8 +2478,7 @@ def update_excluded_path_sticky_depths(sbox): expected_output, expected_disk, expected_status, - None, None, - None, None, None, None, + [], False, '--set-depth', 'empty', B_path) verify_depth(None, "empty", B_path) expected_info = { @@ -2569,7 +2490,7 @@ def update_excluded_path_sticky_depths(sbox): svntest.actions.run_and_verify_info([expected_info], B_path) # Exclude A/B again - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'up', '--set-depth', 'exclude', B_path) # Update to depth 'files' for the excluded path A/B @@ -2589,8 +2510,7 @@ def update_excluded_path_sticky_depths(sbox): expected_output, expected_disk, expected_status, - None, None, - None, None, None, None, + [], False, '--set-depth', 'files', B_path) verify_depth(None, "files", B_path) expected_info = { @@ -2602,7 +2522,7 @@ def update_excluded_path_sticky_depths(sbox): svntest.actions.run_and_verify_info([expected_info], B_path) # Exclude A/B again - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'up', '--set-depth', 'exclude', B_path) # Update to depth 'immediates' for the excluded path A/B @@ -2628,8 +2548,7 @@ def update_excluded_path_sticky_depths(sbox): expected_output, expected_disk, expected_status, - None, None, - None, None, None, None, + [], False, '--set-depth', 'immediates', B_path) verify_depth(None, "immediates", B_path) expected_info = { @@ -2641,7 +2560,7 @@ def update_excluded_path_sticky_depths(sbox): svntest.actions.run_and_verify_info([expected_info], B_path) # Exclude A/B again - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'up', '--set-depth', 'exclude', B_path) # Update to depth 'infinity' for the excluded path A/B @@ -2659,8 +2578,7 @@ def update_excluded_path_sticky_depths(sbox): expected_output, expected_disk, expected_status, - None, None, - None, None, None, None, + [], False, '--set-depth', 'infinity', B_path) verify_depth(None, "infinity", B_path) expected_info = { @@ -2681,7 +2599,7 @@ def update_depth_empty_root_of_infinite_children(sbox): A_path = os.path.join(wc_dir, 'A') # Update A to depth 'infinity' - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'up', '--set-depth', 'infinity', A_path) # Tweak some files in the full working copy and commit. @@ -2689,7 +2607,7 @@ def update_depth_empty_root_of_infinite_children(sbox): "Modified alpha.\n") svntest.main.file_append(os.path.join(wc_other, 'A', 'D', 'G', 'rho'), "Modified rho.\n") - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', '', wc_other) # Now update the original working copy and make sure we get those changes. @@ -2706,9 +2624,7 @@ def update_depth_empty_root_of_infinite_children(sbox): svntest.actions.run_and_verify_update(wc_dir, expected_output, expected_disk, - expected_status, - None, None, - None, None, None, None, wc_dir) + expected_status) def sparse_update_with_dash_dash_parents(sbox): """update --parents""" @@ -2721,7 +2637,6 @@ def sparse_update_with_dash_dash_parents(sbox): # Start with a depth=empty root checkout. svntest.actions.run_and_verify_svn( - "Unexpected error from co --depth=empty", svntest.verify.AnyOutput, [], "co", "--depth", "empty", sbox.repo_url, sbox.wc_dir) @@ -2749,7 +2664,7 @@ def sparse_update_with_dash_dash_parents(sbox): expected_output, expected_disk, expected_status, - None, None, None, None, None, False, + [], False, '--parents', alpha_path) expected_output = svntest.wc.State(sbox.wc_dir, { @@ -2771,7 +2686,7 @@ def sparse_update_with_dash_dash_parents(sbox): expected_output, expected_disk, expected_status, - None, None, None, None, None, False, + [], False, '--parents', pi_path) expected_output = svntest.wc.State(sbox.wc_dir, { @@ -2790,7 +2705,7 @@ def sparse_update_with_dash_dash_parents(sbox): expected_output, expected_disk, expected_status, - None, None, None, None, None, False, + [], False, '--parents', omega_path) def update_below_depth_empty(sbox): @@ -2807,11 +2722,11 @@ def update_below_depth_empty(sbox): 'A/D' : Item(status='D '), }) svntest.actions.run_and_verify_update(sbox.wc_dir, expected_output, None, - None, None, None, None, None, None, - False, + None, + [], False, '--set-depth', 'empty', A) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'cp', repo_url + '/iota', repo_url + '/A/B', '-m', 'remote copy') @@ -2821,7 +2736,7 @@ def update_below_depth_empty(sbox): # This update should just update the revision of the working copy svntest.actions.run_and_verify_update(sbox.wc_dir, expected_output, None, - None, None) + None) # Test for issue #4136. @Issue(4136) @@ -2842,9 +2757,7 @@ def commit_then_immediates_update(sbox): expected_status.tweak('A/mu', wc_rev=2, status=' ') svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, - wc_dir) + expected_status) # Now, update --depth immediates in the root of the working copy. expected_output = svntest.wc.State(wc_dir, { }) @@ -2860,7 +2773,7 @@ def commit_then_immediates_update(sbox): expected_output, expected_disk, expected_status, - None, None, None, None, None, False, + [], False, "--depth=immediates", wc_dir) def revert_depth_files(sbox): @@ -2875,7 +2788,7 @@ def revert_depth_files(sbox): sbox.simple_rm('A/mu') # Expect reversion of just 'mu' - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'revert', '--depth=immediates', sbox.ospath('A')) # Apply an unrelated directory delete @@ -2883,7 +2796,7 @@ def revert_depth_files(sbox): sbox.simple_rm('A/mu') # Expect reversion of just 'mu' - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'revert', '--depth=files', sbox.ospath('A')) @Issue(4257) @@ -2901,13 +2814,88 @@ def spurious_nodes_row(sbox): expected_output, expected_disk, expected_status, - None, None, None, None, None, False, + [], False, "--depth=empty", sbox.wc_dir) val2 = svntest.wc.sqlite_stmt(sbox.wc_dir, "select count(*) from nodes") if (val1 != val2): # ra_neon added a spurious not-present row that does not show up in status raise svntest.Failure("count changed from '%s' to '%s'" % (val1, val2)) +def commit_excluded(sbox): + "commit an excluded node" + + sbox.build() + wc_dir = sbox.wc_dir + + expected_output = svntest.wc.State(wc_dir, { + 'A/D/G' : Item(status='D '), + }) + expected_status = svntest.actions.get_virginal_state(wc_dir, 1) + expected_status.remove('A/D/G', 'A/D/G/pi', 'A/D/G/rho', 'A/D/G/tau') + + svntest.actions.run_and_verify_update(wc_dir, + expected_output, + None, + expected_status, + [], False, + "--set-depth=exclude", + sbox.ospath('A/D/G')) + + sbox.simple_copy('A/D', 'D') + + expected_output = svntest.wc.State(wc_dir, { + 'D' : Item(verb='Adding'), + }) + + expected_status.add({ + 'D' : Item(status=' ', wc_rev='2'), + 'D/H' : Item(status=' ', wc_rev='2'), + 'D/H/chi' : Item(status=' ', wc_rev='2'), + 'D/H/psi' : Item(status=' ', wc_rev='2'), + 'D/H/omega' : Item(status=' ', wc_rev='2'), + 'D/gamma' : Item(status=' ', wc_rev='2') + }) + + svntest.actions.run_and_verify_commit(wc_dir, + expected_output, + expected_status) + + expected_output = svntest.wc.State(wc_dir, { + 'A/D/G' : Item(status='A '), + 'A/D/G/pi' : Item(status='A '), + 'A/D/G/tau' : Item(status='A '), + 'A/D/G/rho' : Item(status='A '), + 'D/G' : Item(status='A '), + 'D/G/pi' : Item(status='A '), + 'D/G/tau' : Item(status='A '), + 'D/G/rho' : Item(status='A ') + }) + + expected_status.tweak(wc_rev=2) + + expected_status.add({ + 'D' : Item(status=' ', wc_rev='2'), + 'D/G' : Item(status=' ', wc_rev='2'), + 'D/G/pi' : Item(status=' ', wc_rev='2'), + 'D/G/rho' : Item(status=' ', wc_rev='2'), + 'D/G/tau' : Item(status=' ', wc_rev='2'), + 'D/H' : Item(status=' ', wc_rev='2'), + 'D/H/chi' : Item(status=' ', wc_rev='2'), + 'D/H/psi' : Item(status=' ', wc_rev='2'), + 'D/H/omega' : Item(status=' ', wc_rev='2'), + 'D/gamma' : Item(status=' ', wc_rev='2'), + 'A/D/G' : Item(status=' ', wc_rev='2'), + 'A/D/G/rho' : Item(status=' ', wc_rev='2'), + 'A/D/G/tau' : Item(status=' ', wc_rev='2'), + 'A/D/G/pi' : Item(status=' ', wc_rev='2') + }) + + svntest.actions.run_and_verify_update(wc_dir, + expected_output, + None, + expected_status, + [], False, + "--set-depth=infinity", wc_dir) #---------------------------------------------------------------------- # list all tests here, starting with None: @@ -2959,6 +2947,7 @@ test_list = [ None, commit_then_immediates_update, revert_depth_files, spurious_nodes_row, + commit_excluded, ] if __name__ == "__main__": diff --git a/subversion/tests/cmdline/diff_tests.py b/subversion/tests/cmdline/diff_tests.py index aae8c46..f21f887 100755 --- a/subversion/tests/cmdline/diff_tests.py +++ b/subversion/tests/cmdline/diff_tests.py @@ -35,6 +35,11 @@ import svntest from svntest import err, wc from prop_tests import binary_mime_type_on_text_file_warning +from svntest.verify import make_diff_header, make_no_diff_deleted_header, \ + make_diff_header, make_no_diff_deleted_header, \ + make_git_diff_header, make_diff_prop_header, \ + make_diff_prop_val, make_diff_prop_deleted, \ + make_diff_prop_added, make_diff_prop_modified # (abbreviation) Skip = svntest.testcase.Skip_deco @@ -49,172 +54,6 @@ Item = svntest.wc.StateItem ###################################################################### # Generate expected output -def make_diff_header(path, old_tag, new_tag, src_label=None, dst_label=None): - """Generate the expected diff header for file PATH, with its old and new - versions described in parentheses by OLD_TAG and NEW_TAG. SRC_LABEL and - DST_LABEL are paths or urls that are added to the diff labels if we're - diffing against the repository or diffing two arbitrary paths. - Return the header as an array of newline-terminated strings.""" - if src_label: - src_label = src_label.replace('\\', '/') - src_label = '\t(.../' + src_label + ')' - else: - src_label = '' - if dst_label: - dst_label = dst_label.replace('\\', '/') - dst_label = '\t(.../' + dst_label + ')' - else: - dst_label = '' - path_as_shown = path.replace('\\', '/') - return [ - "Index: " + path_as_shown + "\n", - "===================================================================\n", - "--- " + path_as_shown + src_label + "\t(" + old_tag + ")\n", - "+++ " + path_as_shown + dst_label + "\t(" + new_tag + ")\n", - ] - -def make_no_diff_deleted_header(path, old_tag, new_tag): - """Generate the expected diff header for a deleted file PATH when in - 'no-diff-deleted' mode. (In that mode, no further details appear after the - header.) Return the header as an array of newline-terminated strings.""" - path_as_shown = path.replace('\\', '/') - return [ - "Index: " + path_as_shown + " (deleted)\n", - "===================================================================\n", - ] - -def make_git_diff_header(target_path, repos_relpath, - old_tag, new_tag, add=False, src_label=None, - dst_label=None, delete=False, text_changes=True, - cp=False, mv=False, copyfrom_path=None, - copyfrom_rev=None): - """ Generate the expected 'git diff' header for file TARGET_PATH. - REPOS_RELPATH is the location of the path relative to the repository root. - The old and new versions ("revision X", or "working copy") must be - specified in OLD_TAG and NEW_TAG. - SRC_LABEL and DST_LABEL are paths or urls that are added to the diff - labels if we're diffing against the repository. ADD, DELETE, CP and MV - denotes the operations performed on the file. COPYFROM_PATH is the source - of a copy or move. Return the header as an array of newline-terminated - strings.""" - - path_as_shown = target_path.replace('\\', '/') - if src_label: - src_label = src_label.replace('\\', '/') - src_label = '\t(.../' + src_label + ')' - else: - src_label = '' - if dst_label: - dst_label = dst_label.replace('\\', '/') - dst_label = '\t(.../' + dst_label + ')' - else: - dst_label = '' - - output = [ - "Index: " + path_as_shown + "\n", - "===================================================================\n" - ] - if add: - output.extend([ - "diff --git a/" + repos_relpath + " b/" + repos_relpath + "\n", - "new file mode 10644\n", - ]) - if text_changes: - output.extend([ - "--- /dev/null\t(" + old_tag + ")\n", - "+++ b/" + repos_relpath + dst_label + "\t(" + new_tag + ")\n" - ]) - elif delete: - output.extend([ - "diff --git a/" + repos_relpath + " b/" + repos_relpath + "\n", - "deleted file mode 10644\n", - ]) - if text_changes: - output.extend([ - "--- a/" + repos_relpath + src_label + "\t(" + old_tag + ")\n", - "+++ /dev/null\t(" + new_tag + ")\n" - ]) - elif cp: - if copyfrom_rev: - copyfrom_rev = '@' + copyfrom_rev - else: - copyfrom_rev = '' - output.extend([ - "diff --git a/" + copyfrom_path + " b/" + repos_relpath + "\n", - "copy from " + copyfrom_path + copyfrom_rev + "\n", - "copy to " + repos_relpath + "\n", - ]) - if text_changes: - output.extend([ - "--- a/" + copyfrom_path + src_label + "\t(" + old_tag + ")\n", - "+++ b/" + repos_relpath + "\t(" + new_tag + ")\n" - ]) - elif mv: - output.extend([ - "diff --git a/" + copyfrom_path + " b/" + path_as_shown + "\n", - "rename from " + copyfrom_path + "\n", - "rename to " + repos_relpath + "\n", - ]) - if text_changes: - output.extend([ - "--- a/" + copyfrom_path + src_label + "\t(" + old_tag + ")\n", - "+++ b/" + repos_relpath + "\t(" + new_tag + ")\n" - ]) - else: - output.extend([ - "diff --git a/" + repos_relpath + " b/" + repos_relpath + "\n", - "--- a/" + repos_relpath + src_label + "\t(" + old_tag + ")\n", - "+++ b/" + repos_relpath + dst_label + "\t(" + new_tag + ")\n", - ]) - return output - -def make_diff_prop_header(path): - """Return a property diff sub-header, as a list of newline-terminated - strings.""" - return [ - "\n", - "Property changes on: " + path.replace('\\', '/') + "\n", - "___________________________________________________________________\n" - ] - -def make_diff_prop_val(plus_minus, pval): - "Return diff for prop value PVAL, with leading PLUS_MINUS (+ or -)." - if len(pval) > 0 and pval[-1] != '\n': - return [plus_minus + pval + "\n","\\ No newline at end of property\n"] - return [plus_minus + pval] - -def make_diff_prop_deleted(pname, pval): - """Return a property diff for deletion of property PNAME, old value PVAL. - PVAL is a single string with no embedded newlines. Return the result - as a list of newline-terminated strings.""" - return [ - "Deleted: " + pname + "\n", - "## -1 +0,0 ##\n" - ] + make_diff_prop_val("-", pval) - -def make_diff_prop_added(pname, pval): - """Return a property diff for addition of property PNAME, new value PVAL. - PVAL is a single string with no embedded newlines. Return the result - as a list of newline-terminated strings.""" - return [ - "Added: " + pname + "\n", - "## -0,0 +1 ##\n", - ] + make_diff_prop_val("+", pval) - -def make_diff_prop_modified(pname, pval1, pval2): - """Return a property diff for modification of property PNAME, old value - PVAL1, new value PVAL2. - - PVAL is a single string with no embedded newlines. A newline at the - end is significant: without it, we add an extra line saying '\ No - newline at end of property'. - - Return the result as a list of newline-terminated strings. - """ - return [ - "Modified: " + pname + "\n", - "## -1 +1 ##\n", - ] + make_diff_prop_val("-", pval1) + make_diff_prop_val("+", pval2) ###################################################################### # Diff output checker @@ -721,20 +560,9 @@ def diff_non_version_controlled_file(sbox): svntest.main.file_append(sbox.ospath('A/D/foo'), "a new file") - exit_code, diff_output, err_output = svntest.main.run_svn( - 1, 'diff', sbox.ospath('A/D/foo')) - - if count_diff_output(diff_output) != 0: raise svntest.Failure - - # At one point this would crash, so we would only get a 'Segmentation Fault' - # error message. The appropriate response is a few lines of errors. I wish - # there was a way to figure out if svn crashed, but all run_svn gives us is - # the output, so here we are... - for line in err_output: - if re.search("foo' is not under version control$", line): - break - else: - raise svntest.Failure + svntest.actions.run_and_verify_svn(None, + 'svn: E155010: .*foo\' was not found.', + 'diff', sbox.ospath('A/D/foo')) # test 9 def diff_pure_repository_update_a_file(sbox): @@ -822,29 +650,29 @@ def diff_only_property_change(sbox): make_diff_prop_added("svn:eol-style", "native") os.chdir(sbox.wc_dir) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'propset', 'svn:eol-style', 'native', 'iota') - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', 'empty-msg') - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'diff', '-r', '1:2') - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'diff', '-c', '2') - svntest.actions.run_and_verify_svn(None, expected_reverse_output, [], + svntest.actions.run_and_verify_svn(expected_reverse_output, [], 'diff', '-r', '2:1') - svntest.actions.run_and_verify_svn(None, expected_reverse_output, [], + svntest.actions.run_and_verify_svn(expected_reverse_output, [], 'diff', '-c', '-2') - svntest.actions.run_and_verify_svn(None, expected_rev1_output, [], + svntest.actions.run_and_verify_svn(expected_rev1_output, [], 'diff', '-r', '1') - svntest.actions.run_and_verify_svn(None, expected_rev1_output, [], + svntest.actions.run_and_verify_svn(expected_rev1_output, [], 'diff', '-r', 'PREV', 'iota') @@ -881,7 +709,7 @@ def dont_diff_binary_file(sbox): # Commit the new binary file, creating revision 2. svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) # Update the whole working copy to HEAD (rev 2) expected_output = svntest.wc.State(wc_dir, {}) @@ -901,8 +729,7 @@ def dont_diff_binary_file(sbox): expected_output, expected_disk, expected_status, - None, None, None, None, None, - 1) # verify props, too. + check_props=True) # Make a local mod to the binary file. svntest.main.file_append(theta_path, "some extra junk") @@ -944,7 +771,7 @@ def dont_diff_binary_file(sbox): }) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) # Third diff use-case: 'svn diff -r2:3 wc' will compare two # repository trees. @@ -1010,7 +837,7 @@ def diff_head_of_moved_file(sbox): '\ No newline at end of file\n', ] - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'diff', '-r', 'HEAD', new_mu_path) @@ -1041,7 +868,7 @@ def diff_base_to_repos(sbox): expected_status.tweak('iota', wc_rev=2) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) expected_output = svntest.wc.State(wc_dir, {}) expected_disk = svntest.main.greek_state.copy() @@ -1059,7 +886,7 @@ def diff_base_to_repos(sbox): # the rev2 changes and local mods. That's because the working files # are being compared to the repository. exit_code, diff_output, err = svntest.actions.run_and_verify_svn( - None, None, [], 'diff', '-r', '1', wc_dir) + None, [], 'diff', '-r', '1', wc_dir) # Makes diff output look the same on all platforms. def strip_eols(lines): @@ -1079,7 +906,7 @@ def diff_base_to_repos(sbox): # the rev2 changes and NOT the local mods. That's because the # text-bases are being compared to the repository. exit_code, diff_output, err = svntest.actions.run_and_verify_svn( - None, None, [], 'diff', '-r', 'BASE:1', wc_dir) + None, [], 'diff', '-r', 'BASE:1', wc_dir) expected_output_lines = make_diff_header(iota_path, "working copy", "revision 1") + [ @@ -1097,7 +924,7 @@ def diff_base_to_repos(sbox): # look exactly the same as 'svn diff -r2:1'. (If you remove the # header commentary) exit_code, diff_output2, err = svntest.actions.run_and_verify_svn( - None, None, [], 'diff', '-r', '2:1', wc_dir) + None, [], 'diff', '-r', '2:1', wc_dir) diff_output[2:4] = [] diff_output2[2:4] = [] @@ -1107,10 +934,10 @@ def diff_base_to_repos(sbox): # and similarly, does 'svn diff -r1:2' == 'svn diff -r1:BASE' ? exit_code, diff_output, err = svntest.actions.run_and_verify_svn( - None, None, [], 'diff', '-r', '1:2', wc_dir) + None, [], 'diff', '-r', '1:2', wc_dir) exit_code, diff_output2, err = svntest.actions.run_and_verify_svn( - None, None, [], 'diff', '-r', '1:BASE', wc_dir) + None, [], 'diff', '-r', '1:BASE', wc_dir) diff_output[2:4] = [] diff_output2[2:4] = [] @@ -1135,16 +962,16 @@ def diff_base_to_repos(sbox): # -r2:1 and -rBASE:1. None of these diffs should mention the # scheduled addition or deletion. exit_code, diff_output, err = svntest.actions.run_and_verify_svn( - None, None, [], 'diff', '-r', '1:2', wc_dir) + None, [], 'diff', '-r', '1:2', wc_dir) exit_code, diff_output2, err = svntest.actions.run_and_verify_svn( - None, None, [], 'diff', '-r', '1:BASE', wc_dir) + None, [], 'diff', '-r', '1:BASE', wc_dir) exit_code, diff_output3, err = svntest.actions.run_and_verify_svn( - None, None, [], 'diff', '-r', '2:1', wc_dir) + None, [], 'diff', '-r', '2:1', wc_dir) exit_code, diff_output4, err = svntest.actions.run_and_verify_svn( - None, None, [], 'diff', '-r', 'BASE:1', wc_dir) + None, [], 'diff', '-r', 'BASE:1', wc_dir) diff_output[2:4] = [] diff_output2[2:4] = [] @@ -1171,7 +998,7 @@ def diff_base_to_repos(sbox): 'A/D/newfile' : Item(status=' ', wc_rev=3), }) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) expected_output = svntest.wc.State(wc_dir, {}) expected_disk = svntest.main.greek_state.copy() @@ -1192,10 +1019,10 @@ def diff_base_to_repos(sbox): # Now 'svn diff -r3:2' should == 'svn diff -rBASE:2', showing the # removal of changes to iota, the adding of mu, and deletion of newfile. exit_code, diff_output, err = svntest.actions.run_and_verify_svn( - None, None, [], 'diff', '-r', '3:2', wc_dir) + None, [], 'diff', '-r', '3:2', wc_dir) exit_code, diff_output2, err = svntest.actions.run_and_verify_svn( - None, None, [], 'diff', '-r', 'BASE:2', wc_dir) + None, [], 'diff', '-r', 'BASE:2', wc_dir) # to do the comparison, remove all output lines starting with +++ or --- re_infoline = re.compile('^(\+\+\+|---).*$') @@ -1239,7 +1066,7 @@ def diff_deleted_in_head(sbox): expected_status.tweak('A/mu', wc_rev=2) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) expected_output = svntest.wc.State(wc_dir, {}) expected_disk = svntest.main.greek_state.copy() @@ -1262,12 +1089,12 @@ def diff_deleted_in_head(sbox): 'A/C') svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) # Doing an 'svn diff -r1:2' on the URL of directory A should work, # especially over the DAV layer. the_url = sbox.repo_url + '/A' - diff_output = svntest.actions.run_and_verify_svn(None, None, [], + diff_output = svntest.actions.run_and_verify_svn(None, [], 'diff', '-r', '1:2', the_url + "@2") @@ -1351,22 +1178,22 @@ def diff_branches(sbox): A_url = sbox.repo_url + '/A' A2_url = sbox.repo_url + '/A2' - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'cp', '-m', 'log msg', A_url, A2_url) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'up', sbox.wc_dir) A_alpha = sbox.ospath('A/B/E/alpha') A2_alpha = sbox.ospath('A2/B/E/alpha') svntest.main.file_append(A_alpha, "\nfoo\n") - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', 'log msg', sbox.wc_dir) svntest.main.file_append(A2_alpha, "\nbar\n") - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', 'log msg', sbox.wc_dir) svntest.main.file_append(A_alpha, "zig\n") @@ -1375,21 +1202,21 @@ def diff_branches(sbox): # another branch rel_path = os.path.join('B', 'E', 'alpha') exit_code, diff_output, err = svntest.actions.run_and_verify_svn( - None, None, [], 'diff', '--old', A_url, '--new', A2_url, rel_path) + None, [], 'diff', '--old', A_url, '--new', A2_url, rel_path) verify_expected_output(diff_output, "-foo") verify_expected_output(diff_output, "+bar") # Same again but using whole branch exit_code, diff_output, err = svntest.actions.run_and_verify_svn( - None, None, [], 'diff', '--old', A_url, '--new', A2_url) + None, [], 'diff', '--old', A_url, '--new', A2_url) verify_expected_output(diff_output, "-foo") verify_expected_output(diff_output, "+bar") # Compare two repository files on different branches exit_code, diff_output, err = svntest.actions.run_and_verify_svn( - None, None, [], + None, [], 'diff', A_url + '/B/E/alpha', A2_url + '/B/E/alpha') verify_expected_output(diff_output, "-foo") @@ -1397,14 +1224,14 @@ def diff_branches(sbox): # Compare two versions of a file on a single branch exit_code, diff_output, err = svntest.actions.run_and_verify_svn( - None, None, [], + None, [], 'diff', A_url + '/B/E/alpha@2', A_url + '/B/E/alpha@3') verify_expected_output(diff_output, "+foo") # Compare identical files on different branches exit_code, diff_output, err = svntest.actions.run_and_verify_svn( - None, [], [], + [], [], 'diff', A_url + '/B/E/alpha@2', A2_url + '/B/E/alpha@3') @@ -1417,22 +1244,22 @@ def diff_repos_and_wc(sbox): A_url = sbox.repo_url + '/A' A2_url = sbox.repo_url + '/A2' - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'cp', '-m', 'log msg', A_url, A2_url) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'up', sbox.wc_dir) A_alpha = sbox.ospath('A/B/E/alpha') A2_alpha = sbox.ospath('A2/B/E/alpha') svntest.main.file_append(A_alpha, "\nfoo\n") - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', 'log msg', sbox.wc_dir) svntest.main.file_append(A2_alpha, "\nbar\n") - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', 'log msg', sbox.wc_dir) svntest.main.file_append(A_alpha, "zig\n") @@ -1442,7 +1269,7 @@ def diff_repos_and_wc(sbox): A_path = sbox.ospath('A') rel_path = os.path.join('B', 'E', 'alpha') exit_code, diff_output, err = svntest.actions.run_and_verify_svn( - None, None, [], + None, [], 'diff', '--old', A2_url, '--new', A_path, rel_path) verify_expected_output(diff_output, "-bar") @@ -1451,7 +1278,7 @@ def diff_repos_and_wc(sbox): # Same again but using whole branch exit_code, diff_output, err = svntest.actions.run_and_verify_svn( - None, None, [], + None, [], 'diff', '--old', A2_url, '--new', A_path) verify_expected_output(diff_output, "-bar") @@ -1475,32 +1302,32 @@ def diff_file_urls(sbox): os.remove(iota_path) svntest.main.file_append(iota_path, "foo\nbar\nsnafu\n") - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', 'log msg', iota_path) # Now, copy the file elsewhere, twice. - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'cp', '-m', 'log msg', iota_url, iota_copy_url) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'cp', '-m', 'log msg', iota_url, iota_copy2_url) # Update (to get the copies) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'up', sbox.wc_dir) # Now, make edits to one of the copies of iota, and commit. os.remove(iota_copy_path) svntest.main.file_append(iota_copy_path, "foo\nsnafu\nabcdefg\nopqrstuv\n") - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', 'log msg', iota_copy_path) # Finally, do a diff between the first and second copies of iota, # and verify that we got the expected lines. And then do it in reverse! - exit_code, out, err = svntest.actions.run_and_verify_svn(None, None, [], + exit_code, out, err = svntest.actions.run_and_verify_svn(None, [], 'diff', iota_copy_url, iota_copy2_url) @@ -1509,7 +1336,7 @@ def diff_file_urls(sbox): verify_expected_output(out, "-abcdefg") verify_expected_output(out, "-opqrstuv") - exit_code, out, err = svntest.actions.run_and_verify_svn(None, None, [], + exit_code, out, err = svntest.actions.run_and_verify_svn(None, [], 'diff', iota_copy2_url, iota_copy_url) @@ -1528,16 +1355,16 @@ def diff_prop_change_local_edit(sbox): iota_url = sbox.repo_url + '/iota' # Change a property on iota, and commit. - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'propset', 'pname', 'pvalue', iota_path) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', 'log msg', iota_path) # Make local edits to iota. svntest.main.file_append(iota_path, "\nMore text.\n") # diff r1:COMMITTED should show the property change but not the local edit. - exit_code, out, err = svntest.actions.run_and_verify_svn(None, None, [], + exit_code, out, err = svntest.actions.run_and_verify_svn(None, [], 'diff', '-r1:COMMITTED', iota_path) @@ -1547,7 +1374,7 @@ def diff_prop_change_local_edit(sbox): verify_expected_output(out, "+pvalue") # diff r1:BASE should show the property change but not the local edit. - exit_code, out, err = svntest.actions.run_and_verify_svn(None, None, [], + exit_code, out, err = svntest.actions.run_and_verify_svn(None, [], 'diff', '-r1:BASE', iota_path) for line in out: @@ -1556,7 +1383,7 @@ def diff_prop_change_local_edit(sbox): verify_expected_output(out, "+pvalue") # fails at r7481 # diff r1:WC should show the local edit as well as the property change. - exit_code, out, err = svntest.actions.run_and_verify_svn(None, None, [], + exit_code, out, err = svntest.actions.run_and_verify_svn(None, [], 'diff', '-r1', iota_path) verify_expected_output(out, "+More text.") # fails at r7481 @@ -1570,37 +1397,37 @@ def check_for_omitted_prefix_in_path_component(sbox): svntest.actions.do_sleep_for_timestamps() prefix_path = sbox.ospath('prefix_mydir') - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'mkdir', prefix_path) other_prefix_path = sbox.ospath('prefix_other') - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'mkdir', other_prefix_path) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', 'log msg', sbox.wc_dir) file_path = os.path.join(prefix_path, "test.txt") svntest.main.file_write(file_path, "Hello\nThere\nIota\n") - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'add', file_path) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', 'log msg', sbox.wc_dir) prefix_url = sbox.repo_url + "/prefix_mydir" other_prefix_url = sbox.repo_url + "/prefix_other/mytag" - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'cp', '-m', 'log msg', prefix_url, other_prefix_url) svntest.main.file_write(file_path, "Hello\nWorld\nIota\n") - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', 'log msg', prefix_path) - exit_code, out, err = svntest.actions.run_and_verify_svn(None, None, [], + exit_code, out, err = svntest.actions.run_and_verify_svn(None, [], 'diff', prefix_url, other_prefix_url) @@ -1627,12 +1454,12 @@ def diff_renamed_file(sbox): pi2_path = os.path.join('A', 'D', 'pi2') svntest.main.file_write(pi_path, "new pi") - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', 'log msg') svntest.main.file_append(pi_path, "even more pi") - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', 'log msg') svntest.main.run_svn(None, 'mv', pi_path, pi2_path) @@ -1703,7 +1530,7 @@ def diff_renamed_file(sbox): raise svntest.Failure - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', 'log msg') # Repos->WC diff of file after the rename @@ -1758,7 +1585,7 @@ def diff_within_renamed_dir(sbox): 'M') : raise svntest.Failure - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', 'log msg') # Check repos->wc after commit @@ -1801,15 +1628,15 @@ def diff_prop_on_named_dir(sbox): os.chdir(sbox.wc_dir) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'propset', 'p', 'v', 'A') - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', '') - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'propdel', 'p', 'A') - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', '') exit_code, diff_output, err_output = svntest.main.run_svn(None, 'diff', @@ -1825,7 +1652,7 @@ def diff_keywords(sbox): iota_path = sbox.ospath('iota') - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'ps', 'svn:keywords', 'Id Rev Date', @@ -1840,18 +1667,18 @@ def diff_keywords(sbox): fp.write("$Rev::%s$\n" % (' ' * 80)) fp.close() - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', 'keywords', sbox.wc_dir) svntest.main.file_append(iota_path, "bar\n") - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', 'added bar', sbox.wc_dir) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'up', sbox.wc_dir) exit_code, diff_output, err = svntest.actions.run_and_verify_svn( - None, None, [], 'diff', '-r', 'prev:head', sbox.wc_dir) + None, [], 'diff', '-r', 'prev:head', sbox.wc_dir) verify_expected_output(diff_output, "+bar") verify_excluded_output(diff_output, "$Date:") @@ -1859,7 +1686,7 @@ def diff_keywords(sbox): verify_excluded_output(diff_output, "$Id:") exit_code, diff_output, err = svntest.actions.run_and_verify_svn( - None, None, [], 'diff', '-r', 'head:prev', sbox.wc_dir) + None, [], 'diff', '-r', 'head:prev', sbox.wc_dir) verify_expected_output(diff_output, "-bar") verify_excluded_output(diff_output, "$Date:") @@ -1877,13 +1704,13 @@ def diff_keywords(sbox): fp.write("$Rev::%s$\n" % (' ' * 79)) fp.close() - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', 'keywords 2', sbox.wc_dir) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'up', sbox.wc_dir) exit_code, diff_output, err = svntest.actions.run_and_verify_svn( - None, None, [], 'diff', '-r', 'prev:head', sbox.wc_dir) + None, [], 'diff', '-r', 'prev:head', sbox.wc_dir) # these should show up verify_expected_output(diff_output, "+$Id:: ") @@ -1899,7 +1726,7 @@ def diff_keywords(sbox): def diff_force(sbox): - "show diffs for binary files with --force" + "show diffs for binary files" sbox.build() wc_dir = sbox.wc_dir @@ -1925,7 +1752,7 @@ def diff_force(sbox): # Commit iota, creating revision 2. svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) # Add another line, while keeping he file as binary. svntest.main.file_append(iota_path, "another line") @@ -1941,36 +1768,22 @@ def diff_force(sbox): }) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) - # Check that we get diff when the first, the second and both files are - # marked as binary. + # Check that we get diff when the first, the second and both files + # are marked as binary. First we'll use --force. Then we'll use + # the configuration option 'diff-ignore-content-type'. re_nodisplay = re.compile('^Cannot display:') - exit_code, stdout, stderr = svntest.main.run_svn(None, - 'diff', '-r1:2', iota_path, - '--force') - - for line in stdout: - if (re_nodisplay.match(line)): - raise svntest.Failure - - exit_code, stdout, stderr = svntest.main.run_svn(None, - 'diff', '-r2:1', iota_path, - '--force') - - for line in stdout: - if (re_nodisplay.match(line)): - raise svntest.Failure - - exit_code, stdout, stderr = svntest.main.run_svn(None, - 'diff', '-r2:3', iota_path, - '--force') - - for line in stdout: - if (re_nodisplay.match(line)): - raise svntest.Failure + for opt in ['--force', + '--config-option=config:miscellany:diff-ignore-content-type=yes']: + for range in ['-r1:2', '-r2:1', '-r2:3']: + exit_code, stdout, stderr = svntest.main.run_svn(None, 'diff', range, + iota_path, opt) + for line in stdout: + if (re_nodisplay.match(line)): + raise svntest.Failure #---------------------------------------------------------------------- # Regression test for issue #2333: Renaming a directory should produce @@ -2017,7 +1830,7 @@ def diff_renamed_dir(sbox): raise svntest.Failure # Commit - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', 'log msg') # Check repos->wc after commit @@ -2144,61 +1957,61 @@ def diff_property_changes_to_base(sbox): os.chdir(sbox.wc_dir) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'propset', 'fileprop', 'r2value', 'iota') - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'propset', 'dirprop', 'r2value', 'A') - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', 'empty-msg') # Check that forward and reverse repos-repos diffs are as expected. expected = svntest.verify.UnorderedOutput(expected_output_r1_r2) - svntest.actions.run_and_verify_svn(None, expected, [], + svntest.actions.run_and_verify_svn(expected, [], 'diff', '-r', '1:2') expected = svntest.verify.UnorderedOutput(expected_output_r2_r1) - svntest.actions.run_and_verify_svn(None, expected, [], + svntest.actions.run_and_verify_svn(expected, [], 'diff', '-r', '2:1') # Now check repos->WORKING, repos->BASE, and BASE->repos. # (BASE is r1, and WORKING has no local mods, so this should produce # the same output as above). expected = svntest.verify.UnorderedOutput(expected_output_r1) - svntest.actions.run_and_verify_svn(None, expected, [], + svntest.actions.run_and_verify_svn(expected, [], 'diff', '-r', '1') - svntest.actions.run_and_verify_svn(None, expected, [], + svntest.actions.run_and_verify_svn(expected, [], 'diff', '-r', '1:BASE') expected = svntest.verify.UnorderedOutput(expected_output_base_r1) - svntest.actions.run_and_verify_svn(None, expected, [], + svntest.actions.run_and_verify_svn(expected, [], 'diff', '-r', 'BASE:1') # Modify some properties. - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'propset', 'fileprop', 'workingvalue', 'iota') - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'propset', 'dirprop', 'workingvalue', 'A') - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'propset', 'fileprop', 'workingvalue', 'A/mu') # Check that the earlier diffs against BASE are unaffected by the # presence of local mods (with the exception of diff header changes). expected = svntest.verify.UnorderedOutput(expected_output_r1) - svntest.actions.run_and_verify_svn(None, expected, [], + svntest.actions.run_and_verify_svn(expected, [], 'diff', '-r', '1:BASE') expected = svntest.verify.UnorderedOutput(expected_output_base_r1) - svntest.actions.run_and_verify_svn(None, expected, [], + svntest.actions.run_and_verify_svn(expected, [], 'diff', '-r', 'BASE:1') def diff_schedule_delete(sbox): @@ -2207,37 +2020,37 @@ def diff_schedule_delete(sbox): sbox.build() expected_output_r2_working = make_diff_header("foo", "revision 2", - "working copy") + [ + "nonexistent") + [ "@@ -1 +0,0 @@\n", "-xxx\n" ] expected_output_r2_base = make_diff_header("foo", "revision 2", - "working copy") + [ + "nonexistent") + [ "@@ -1 +0,0 @@\n", "-xxx\n", ] - expected_output_base_r2 = make_diff_header("foo", "revision 0", + expected_output_base_r2 = make_diff_header("foo", "nonexistent", "revision 2") + [ "@@ -0,0 +1 @@\n", "+xxx\n", ] - expected_output_r1_base = make_diff_header("foo", "revision 0", + expected_output_r1_base = make_diff_header("foo", "nonexistent", "working copy") + [ "@@ -0,0 +1,2 @@\n", "+xxx\n", "+yyy\n" ] expected_output_base_r1 = make_diff_header("foo", "working copy", - "revision 1") + [ + "nonexistent") + [ "@@ -1,2 +0,0 @@\n", "-xxx\n", "-yyy\n" ] expected_output_base_working = expected_output_base_r1[:] expected_output_base_working[2] = "--- foo\t(revision 3)\n" - expected_output_base_working[3] = "+++ foo\t(working copy)\n" + expected_output_base_working[3] = "+++ foo\t(nonexistent)\n" wc_dir = sbox.wc_dir os.chdir(wc_dir) @@ -2260,23 +2073,23 @@ def diff_schedule_delete(sbox): # in WORKING, but diffs against BASE should remain unaffected. # 1. repos-wc diff: file not present in repos. - svntest.actions.run_and_verify_svn(None, [], [], + svntest.actions.run_and_verify_svn([], [], 'diff', '-r', '1') - svntest.actions.run_and_verify_svn(None, expected_output_r1_base, [], + svntest.actions.run_and_verify_svn(expected_output_r1_base, [], 'diff', '-r', '1:BASE') - svntest.actions.run_and_verify_svn(None, expected_output_base_r1, [], + svntest.actions.run_and_verify_svn(expected_output_base_r1, [], 'diff', '-r', 'BASE:1') # 2. repos-wc diff: file present in repos. - svntest.actions.run_and_verify_svn(None, expected_output_r2_working, [], + svntest.actions.run_and_verify_svn(expected_output_r2_working, [], 'diff', '-r', '2') - svntest.actions.run_and_verify_svn(None, expected_output_r2_base, [], + svntest.actions.run_and_verify_svn(expected_output_r2_base, [], 'diff', '-r', '2:BASE') - svntest.actions.run_and_verify_svn(None, expected_output_base_r2, [], + svntest.actions.run_and_verify_svn(expected_output_base_r2, [], 'diff', '-r', 'BASE:2') # 3. wc-wc diff. - svntest.actions.run_and_verify_svn(None, expected_output_base_working, [], + svntest.actions.run_and_verify_svn(expected_output_base_working, [], 'diff') #---------------------------------------------------------------------- @@ -2303,40 +2116,40 @@ def diff_mime_type_changes(sbox): # Append some text to iota (r2). svntest.main.file_append('iota', "revision 2 text.\n") - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', 'log_msg') # Check that forward and reverse repos-BASE diffs are as expected. - svntest.actions.run_and_verify_svn(None, expected_output_r1_wc, [], + svntest.actions.run_and_verify_svn(expected_output_r1_wc, [], 'diff', '-r', '1:BASE') - svntest.actions.run_and_verify_svn(None, expected_output_wc_r1, [], + svntest.actions.run_and_verify_svn(expected_output_wc_r1, [], 'diff', '-r', 'BASE:1') # Mark iota as a binary file in the working copy. - svntest.actions.run_and_verify_svn2(None, None, + svntest.actions.run_and_verify_svn2(None, binary_mime_type_on_text_file_warning, 0, 'propset', 'svn:mime-type', 'application/octet-stream', 'iota') # Check that the earlier diffs against BASE are unaffected by the # presence of local svn:mime-type property mods. - svntest.actions.run_and_verify_svn(None, expected_output_r1_wc, [], + svntest.actions.run_and_verify_svn(expected_output_r1_wc, [], 'diff', '-r', '1:BASE') - svntest.actions.run_and_verify_svn(None, expected_output_wc_r1, [], + svntest.actions.run_and_verify_svn(expected_output_wc_r1, [], 'diff', '-r', 'BASE:1') # Commit the change (r3) (so that BASE has the binary MIME type), then # mark iota as a text file again in the working copy. - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', 'log_msg') - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'propdel', 'svn:mime-type', 'iota') # Now diffs against BASE will fail, but diffs against WORKNG should be # fine. - svntest.actions.run_and_verify_svn(None, expected_output_r1_wc, [], + svntest.actions.run_and_verify_svn(expected_output_r1_wc, [], 'diff', '-r', '1') @@ -2361,37 +2174,37 @@ def diff_prop_change_local_propmod(sbox): os.chdir(sbox.wc_dir) # Set a property on A/ and iota, and commit them (r2). - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'propset', 'dirprop', 'r2value', 'A') - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'propset', 'fileprop', 'r2value', 'iota') - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', 'log_msg') # Change the property values on A/ and iota, and commit them (r3). - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'propset', 'dirprop', 'r3value', 'A') - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'propset', 'fileprop', 'r3value', 'iota') - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', 'log_msg') # Finally, change the property values one last time. - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'propset', 'dirprop', 'workingvalue', 'A') - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'propset', 'fileprop', 'workingvalue', 'iota') # And also add some properties that only exist in WORKING. - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'propset', 'newdirprop', 'newworkingvalue', 'A') - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'propset', 'newfileprop', 'newworkingvalue', 'iota') @@ -2404,7 +2217,7 @@ def diff_prop_change_local_propmod(sbox): # are included in the output, since they won't be listed in a simple # BASE->r2 diff. expected = svntest.verify.UnorderedOutput(expected_output_r2_wc) - svntest.actions.run_and_verify_svn(None, expected, [], + svntest.actions.run_and_verify_svn(expected, [], 'diff', '-r', '2') @@ -2430,17 +2243,17 @@ def diff_repos_wc_add_with_props(sbox): ] + make_diff_prop_header("X/bar") + \ make_diff_prop_added("propname", "propvalue") - diff_X_r1_base = make_diff_header("X", "revision 0", + diff_X_r1_base = make_diff_header("X", "nonexistent", "working copy") + diff_X - diff_X_base_r3 = make_diff_header("X", "revision 0", + diff_X_base_r3 = make_diff_header("X", "nonexistent", "revision 3") + diff_X - diff_foo_r1_base = make_diff_header("foo", "revision 0", + diff_foo_r1_base = make_diff_header("foo", "nonexistent", "revision 3") + diff_foo - diff_foo_base_r3 = make_diff_header("foo", "revision 0", + diff_foo_base_r3 = make_diff_header("foo", "nonexistent", "revision 3") + diff_foo - diff_X_bar_r1_base = make_diff_header("X/bar", "revision 0", + diff_X_bar_r1_base = make_diff_header("X/bar", "nonexistent", "revision 3") + diff_X_bar - diff_X_bar_base_r3 = make_diff_header("X/bar", "revision 0", + diff_X_bar_base_r3 = make_diff_header("X/bar", "nonexistent", "revision 3") + diff_X_bar expected_output_r1_base = svntest.verify.UnorderedOutput(diff_X_r1_base + @@ -2456,32 +2269,32 @@ def diff_repos_wc_add_with_props(sbox): os.makedirs('X') svntest.main.file_append('foo', "content\n") svntest.main.file_append(os.path.join('X', 'bar'), "content\n") - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'add', 'X', 'foo') - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', 'log_msg') # Set a property on all three items, and commit them (r3). - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'propset', 'propname', 'propvalue', 'X', 'foo', os.path.join('X', 'bar')) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', 'log_msg') # Now, if we diff r1 to WORKING or BASE, we should see the content # addition for foo and X/bar, and property additions for all three. - svntest.actions.run_and_verify_svn(None, expected_output_r1_base, [], + svntest.actions.run_and_verify_svn(expected_output_r1_base, [], 'diff', '-r', '1') - svntest.actions.run_and_verify_svn(None, expected_output_r1_base, [], + svntest.actions.run_and_verify_svn(expected_output_r1_base, [], 'diff', '-r', '1:BASE') # Update the BASE and WORKING revisions to r1. - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'up', '-r', '1') # If we diff BASE to r3, we should see the same output as above. - svntest.actions.run_and_verify_svn(None, expected_output_base_r3, [], + svntest.actions.run_and_verify_svn(expected_output_base_r3, [], 'diff', '-r', 'BASE:3') @@ -2526,11 +2339,11 @@ def diff_repos_working_added_dir(sbox): sbox.build() - expected_output_r1_BASE = make_diff_header("X/bar", "revision 0", + expected_output_r1_BASE = make_diff_header("X/bar", "nonexistent", "revision 2") + [ "@@ -0,0 +1 @@\n", "+content\n" ] - expected_output_r1_WORKING = make_diff_header("X/bar", "revision 0", + expected_output_r1_WORKING = make_diff_header("X/bar", "nonexistent", "working copy") + [ "@@ -0,0 +1,2 @@\n", "+content\n", @@ -2541,9 +2354,9 @@ def diff_repos_working_added_dir(sbox): # Create directory X and file X/bar, and commit them (r2). os.makedirs('X') svntest.main.file_append(os.path.join('X', 'bar'), "content\n") - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'add', 'X') - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', 'log_msg') # Make a local modification to X/bar. @@ -2551,9 +2364,9 @@ def diff_repos_working_added_dir(sbox): # Now, if we diff r1 to WORKING or BASE, we should see the content # addition for X/bar, and (for WORKING) the local modification. - svntest.actions.run_and_verify_svn(None, expected_output_r1_BASE, [], + svntest.actions.run_and_verify_svn(expected_output_r1_BASE, [], 'diff', '-r', '1:BASE') - svntest.actions.run_and_verify_svn(None, expected_output_r1_WORKING, [], + svntest.actions.run_and_verify_svn(expected_output_r1_WORKING, [], 'diff', '-r', '1') @@ -2572,12 +2385,12 @@ def diff_base_repos_moved(sbox): # Move, modify and commit a file svntest.main.run_svn(None, 'mv', oldfile, newfile) svntest.main.file_write(newfile, "new content\n") - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', '') # Check that a base->repos diff with copyfrom shows deleted and added lines. exit_code, out, err = svntest.actions.run_and_verify_svn( - None, svntest.verify.AnyOutput, [], 'diff', '-rBASE:1', newfile) + svntest.verify.AnyOutput, [], 'diff', '-rBASE:1', newfile) if check_diff_output(out, newfile, 'M'): raise svntest.Failure @@ -2599,14 +2412,14 @@ def diff_added_subtree(sbox): os.chdir(sbox.wc_dir) # Roll the wc back to r0 (i.e. an empty wc). - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'up', '-r0') # We shouldn't get any errors when we request a diff showing the # addition of the greek tree. The diff contains additions of files # and directories with parents that don't currently exist in the wc, # which is what we're testing here. - svntest.actions.run_and_verify_svn(None, svntest.verify.AnyOutput, [], + svntest.actions.run_and_verify_svn(svntest.verify.AnyOutput, [], 'diff', '-r', 'BASE:1') #---------------------------------------------------------------------- @@ -2766,6 +2579,20 @@ def basic_diff_summarize(sbox): svntest.actions.run_and_verify_diff_summarize(expected_reverse_diff, wc_dir, '-c-3') + # Get the differences between a deep newly added dir Issue(4421) + expected_diff = svntest.wc.State(wc_dir, { + 'Q/R' : Item(status='A '), + 'Q/R/newfile' : Item(status='A '), + }) + expected_reverse_diff = svntest.wc.State(wc_dir, { + 'Q/R' : Item(status='D '), + 'Q/R/newfile' : Item(status='D '), + }) + svntest.actions.run_and_verify_diff_summarize(expected_diff, + p('Q/R'), '-c3') + svntest.actions.run_and_verify_diff_summarize(expected_reverse_diff, + p('Q/R'), '-c-3') + #---------------------------------------------------------------------- def diff_weird_author(sbox): "diff with svn:author that has < in it" @@ -2785,14 +2612,13 @@ def diff_weird_author(sbox): expected_status.tweak("A/mu", wc_rev=2) svntest.actions.run_and_verify_commit(sbox.wc_dir, expected_output, - expected_status, None, sbox.wc_dir) + expected_status) svntest.main.run_svn(None, "propset", "--revprop", "-r", "2", "svn:author", "J. Random <jrandom@example.com>", sbox.repo_url) - svntest.actions.run_and_verify_svn(None, - ["J. Random <jrandom@example.com>\n"], + svntest.actions.run_and_verify_svn(["J. Random <jrandom@example.com>\n"], [], "pget", "--revprop", "-r" "2", "svn:author", sbox.repo_url) @@ -2803,7 +2629,7 @@ def diff_weird_author(sbox): "+new content\n" ] - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'diff', '-r1:2', sbox.repo_url) # test for issue 2121, use -x -w option for ignoring whitespace during diff @@ -2825,7 +2651,7 @@ def diff_ignore_whitespace(sbox): 'iota' : Item(verb='Sending'), }) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - None, None, wc_dir) + None,) # only whitespace changes, should return no changes svntest.main.file_write(file_path, @@ -2833,7 +2659,7 @@ def diff_ignore_whitespace(sbox): " B b \n" " C c \n") - svntest.actions.run_and_verify_svn(None, [], [], + svntest.actions.run_and_verify_svn([], [], 'diff', '-x', '-w', file_path) # some changes + whitespace @@ -2851,7 +2677,7 @@ def diff_ignore_whitespace(sbox): "+ Bb b \n", " Cc\n" ] - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'diff', '-x', '-w', file_path) def diff_ignore_eolstyle(sbox): @@ -2871,7 +2697,7 @@ def diff_ignore_eolstyle(sbox): 'iota' : Item(verb='Sending'), }) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - None, None, wc_dir) + None) # commit only eol changes svntest.main.file_write(file_path, @@ -2888,7 +2714,7 @@ def diff_ignore_eolstyle(sbox): "+Cc\n", "\ No newline at end of file\n" ] - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'diff', '-x', '--ignore-eol-style', file_path) @@ -2919,7 +2745,7 @@ def diff_in_renamed_folder(sbox): ### child of the A/D/C copy. thus, it appears in the status output as a ### (M)odified child. svntest.actions.run_and_verify_commit(wc_dir, expected_output, - None, None, wc_dir) + None) expected_output = svntest.wc.State(wc_dir, { 'A/D/C/kappa' : Item(verb='Sending'), @@ -2929,7 +2755,7 @@ def diff_in_renamed_folder(sbox): for i in range(3, 5): svntest.main.file_append(kappa_path, str(i) + "\n") svntest.actions.run_and_verify_commit(wc_dir, expected_output, - None, None, wc_dir) + None) expected_output = make_diff_header(kappa_path, "revision 3", "revision 4") + [ @@ -2939,7 +2765,7 @@ def diff_in_renamed_folder(sbox): "+4\n" ] - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'diff', '-r3:4', kappa_path) def diff_with_depth(sbox): @@ -2989,17 +2815,17 @@ def diff_with_depth(sbox): # Test wc-wc diff. expected_diffs = create_expected_diffs("revision 1", "working copy") for depth in ['empty', 'files', 'immediates', 'infinity']: - svntest.actions.run_and_verify_svn(None, expected_diffs[depth], [], + svntest.actions.run_and_verify_svn(expected_diffs[depth], [], 'diff', '--depth', depth) # Commit the changes. - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', '') # Test repos-repos diff. expected_diffs = create_expected_diffs("revision 1", "revision 2") for depth in ['empty', 'files', 'immediates', 'infinity']: - svntest.actions.run_and_verify_svn(None, expected_diffs[depth], [], + svntest.actions.run_and_verify_svn(expected_diffs[depth], [], 'diff', '-c2', '--depth', depth) def create_expected_repos_wc_diffs(): @@ -3042,7 +2868,7 @@ def diff_with_depth(sbox): diff_dot) return expected - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'up', '-r1') sbox.simple_propset('foo1', 'baz1', '.') @@ -3055,7 +2881,7 @@ def diff_with_depth(sbox): # Test wc-repos diff. expected_diffs = create_expected_repos_wc_diffs() for depth in ['empty', 'files', 'immediates', 'infinity']: - svntest.actions.run_and_verify_svn(None, expected_diffs[depth], [], + svntest.actions.run_and_verify_svn(expected_diffs[depth], [], 'diff', '-rHEAD', '--depth', depth) # test for issue 2920: ignore eol-style on empty lines @@ -3079,7 +2905,7 @@ def diff_ignore_eolstyle_empty_lines(sbox): 'iota' : Item(verb='Sending'), }) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - None, None, wc_dir) + None) # sleep to guarantee timestamp change time.sleep(1.1) @@ -3093,7 +2919,7 @@ def diff_ignore_eolstyle_empty_lines(sbox): "Cc\012", mode="wb") - svntest.actions.run_and_verify_svn(None, [], [], + svntest.actions.run_and_verify_svn([], [], 'diff', '-x', '--ignore-eol-style', file_path) @@ -3115,13 +2941,13 @@ def diff_backward_repos_wc_copy(sbox): svntest.main.run_svn(None, 'up', '-r1') # diff r2 against working copy - diff_repos_wc = make_diff_header("A/mucopy", "revision 2", "working copy") + diff_repos_wc = make_diff_header("A/mucopy", "revision 2", "nonexistent") diff_repos_wc += [ "@@ -1 +0,0 @@\n", "-This is the file 'mu'.\n", ] - svntest.actions.run_and_verify_svn(None, diff_repos_wc, [], + svntest.actions.run_and_verify_svn(diff_repos_wc, [], 'diff', '-r' , '2') #---------------------------------------------------------------------- @@ -3186,11 +3012,11 @@ def diff_summarize_xml(sbox): [], wc_dir, paths, items, props, kinds, wc_dir) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) # 1) Test --xml without --summarize svntest.actions.run_and_verify_svn( - None, None, ".*--xml' option only valid with '--summarize' option", + None, ".*--xml' option only valid with '--summarize' option", 'diff', wc_dir, '--xml') # 2) Test --xml on invalid revision @@ -3234,7 +3060,7 @@ def diff_wrong_extension_type(sbox): "'svn diff -x wc -r#' should return error" sbox.build(read_only = True) - svntest.actions.run_and_verify_svn(None, [], err.INVALID_DIFF_OPTION, + svntest.actions.run_and_verify_svn([], err.INVALID_DIFF_OPTION, 'diff', '-x', sbox.wc_dir, '-r', '1') # Check the order of the arguments for an external diff tool @@ -3270,7 +3096,7 @@ def diff_external_diffcmd(sbox): # Check that the output of diff corresponds with the expected arguments, # in the correct order. - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'diff', '--diff-cmd', diff_script_path, iota_path) @@ -3309,16 +3135,16 @@ def diff_url_against_local_mods(sbox): A2 = 'A2' A2_url = sbox.repo_url + '/A2' - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'cp', '-m', 'log msg', A_url, A2_url) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'up') # In A, add, remove and change a file, and commit. make_file_edit_del_add(A) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', 'committing A') # In A2, do the same changes but leave uncommitted. @@ -3327,12 +3153,12 @@ def diff_url_against_local_mods(sbox): # Diff Path of A against working copy of A2. # Output using arbritrary diff handling should be empty. expected_output = [] - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'diff', '--old', A, '--new', A2) # Diff URL of A against working copy of A2. Output should be empty. expected_output = [] - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'diff', '--old', A_url, '--new', A2) @@ -3349,7 +3175,7 @@ def diff_preexisting_rev_against_local_add(sbox): # remove svntest.main.run_svn(None, 'remove', beta) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', 'removing beta') # re-add, without committing @@ -3387,7 +3213,7 @@ def diff_git_format_wc_wc(sbox): expected_output = make_git_diff_header( alpha_copied_path, "A/B/E/alpha_copied", - "revision 0", "working copy", + "revision 1", "working copy", copyfrom_path="A/B/E/alpha", copyfrom_rev='1', cp=True, text_changes=True) + [ @@ -3401,7 +3227,7 @@ def diff_git_format_wc_wc(sbox): copyfrom_rev='1', cp=True, text_changes=False) \ + make_git_diff_header(mu_path, "A/mu", "revision 1", - "working copy", + "nonexistent", delete=True) + [ "@@ -1 +0,0 @@\n", "-This is the file 'mu'.\n", @@ -3410,7 +3236,7 @@ def diff_git_format_wc_wc(sbox): "@@ -1 +1,2 @@\n", " This is the file 'iota'.\n", "+Changed 'iota'.\n", - ] + make_git_diff_header(new_path, "new", "revision 0", + ] + make_git_diff_header(new_path, "new", "nonexistent", "working copy", add=True) + [ "@@ -0,0 +1 @@\n", "+This is the file 'new'.\n", @@ -3418,7 +3244,7 @@ def diff_git_format_wc_wc(sbox): expected = expected_output - svntest.actions.run_and_verify_svn(None, expected, [], 'diff', + svntest.actions.run_and_verify_svn(expected, [], 'diff', '--git', wc_dir) @Issue(4294) @@ -3438,19 +3264,19 @@ def diff_git_format_wc_wc_dir_mv(sbox): svntest.main.run_svn(None, 'mv', g_path, g2_path) expected_output = make_git_diff_header(pi_path, "A/D/G/pi", - "revision 1", "working copy", + "revision 1", "nonexistent", delete=True) \ + [ "@@ -1 +0,0 @@\n", "-This is the file 'pi'.\n" ] + make_git_diff_header(rho_path, "A/D/G/rho", - "revision 1", "working copy", + "revision 1", "nonexistent", delete=True) \ + [ "@@ -1 +0,0 @@\n", "-This is the file 'rho'.\n" ] + make_git_diff_header(tau_path, "A/D/G/tau", - "revision 1", "working copy", + "revision 1", "nonexistent", delete=True) \ + [ "@@ -1 +0,0 @@\n", @@ -3464,7 +3290,7 @@ def diff_git_format_wc_wc_dir_mv(sbox): expected = expected_output - svntest.actions.run_and_verify_svn(None, expected, [], 'diff', + svntest.actions.run_and_verify_svn(expected, [], 'diff', '--git', wc_dir) def diff_git_format_url_wc(sbox): @@ -3485,11 +3311,11 @@ def diff_git_format_url_wc(sbox): svntest.main.run_svn(None, 'commit', '-m', 'Committing changes', wc_dir) svntest.main.run_svn(None, 'up', wc_dir) - expected_output = make_git_diff_header(new_path, "new", "revision 0", + expected_output = make_git_diff_header(new_path, "new", "nonexistent", "revision 2", add=True) + [ "@@ -0,0 +1 @@\n", "+This is the file 'new'.\n", - ] + make_git_diff_header(mu_path, "A/mu", "revision 1", "working copy", + ] + make_git_diff_header(mu_path, "A/mu", "revision 1", "nonexistent", delete=True) + [ "@@ -1 +0,0 @@\n", "-This is the file 'mu'.\n", @@ -3502,7 +3328,7 @@ def diff_git_format_url_wc(sbox): expected = svntest.verify.UnorderedOutput(expected_output) - svntest.actions.run_and_verify_svn(None, expected, [], 'diff', + svntest.actions.run_and_verify_svn(expected, [], 'diff', '--git', '--old', repo_url + '@1', '--new', wc_dir) @@ -3527,11 +3353,11 @@ def diff_git_format_url_url(sbox): svntest.main.run_svn(None, 'up', wc_dir) expected_output = make_git_diff_header("A/mu", "A/mu", "revision 1", - "revision 2", + "nonexistent", delete=True) + [ "@@ -1 +0,0 @@\n", "-This is the file 'mu'.\n", - ] + make_git_diff_header("new", "new", "revision 0", "revision 2", + ] + make_git_diff_header("new", "new", "nonexistent", "revision 2", add=True) + [ "@@ -0,0 +1 @@\n", "+This is the file 'new'.\n", @@ -3544,7 +3370,7 @@ def diff_git_format_url_url(sbox): expected = svntest.verify.UnorderedOutput(expected_output) - svntest.actions.run_and_verify_svn(None, expected, [], 'diff', + svntest.actions.run_and_verify_svn(expected, [], 'diff', '--git', '--old', repo_url + '@1', '--new', repo_url + '@2') @@ -3575,7 +3401,7 @@ def diff_prop_missing_context(sbox): expected_status = svntest.actions.get_virginal_state(wc_dir, 1) expected_status.tweak('iota', wc_rev=2) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) prop_val = "".join([ "line 3\n", @@ -3599,7 +3425,7 @@ def diff_prop_missing_context(sbox): "-line 7\n", ] - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'diff', iota_path) def diff_prop_multiple_hunks(sbox): @@ -3632,7 +3458,7 @@ def diff_prop_multiple_hunks(sbox): expected_status = svntest.actions.get_virginal_state(wc_dir, 1) expected_status.tweak('iota', wc_rev=2) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) prop_val = "".join([ "line 1\n", @@ -3675,7 +3501,7 @@ def diff_prop_multiple_hunks(sbox): " line 13\n", ] - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'diff', iota_path) def diff_git_empty_files(sbox): "create a diff in git format for empty files" @@ -3696,13 +3522,13 @@ def diff_git_empty_files(sbox): }) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) svntest.main.file_write(new_path, "") svntest.main.run_svn(None, 'add', new_path) svntest.main.run_svn(None, 'rm', iota_path) - expected_output = make_git_diff_header(new_path, "new", "revision 0", + expected_output = make_git_diff_header(new_path, "new", "nonexistent", "working copy", add=True, text_changes=False) + [ ] + make_git_diff_header(iota_path, "iota", "revision 2", "working copy", @@ -3711,7 +3537,7 @@ def diff_git_empty_files(sbox): # Two files in diff may be in any order. expected_output = svntest.verify.UnorderedOutput(expected_output) - svntest.actions.run_and_verify_svn(None, expected_output, [], 'diff', + svntest.actions.run_and_verify_svn(expected_output, [], 'diff', '--git', wc_dir) def diff_git_with_props(sbox): @@ -3733,7 +3559,7 @@ def diff_git_with_props(sbox): }) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) svntest.main.file_write(new_path, "") svntest.main.run_svn(None, 'add', new_path) @@ -3741,7 +3567,7 @@ def diff_git_with_props(sbox): svntest.main.run_svn(None, 'propset', 'svn:keywords', 'Id', iota_path) expected_output = make_git_diff_header(new_path, "new", - "revision 0", "working copy", + "nonexistent", "working copy", add=True, text_changes=False) + \ make_diff_prop_header("new") + \ make_diff_prop_added("svn:eol-style", "native") + \ @@ -3754,7 +3580,7 @@ def diff_git_with_props(sbox): # Files in diff may be in any order. expected_output = svntest.verify.UnorderedOutput(expected_output) - svntest.actions.run_and_verify_svn(None, expected_output, [], 'diff', + svntest.actions.run_and_verify_svn(expected_output, [], 'diff', '--git', wc_dir) @Issue(4010) @@ -3775,7 +3601,7 @@ def diff_correct_wc_base_revnum(sbox): 'iota' : Item(status=' ', wc_rev=2), }) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) # Child's base is now 2; parent's is still 1. # Make a local mod. @@ -3787,12 +3613,12 @@ def diff_correct_wc_base_revnum(sbox): make_diff_prop_added("svn:keywords", "Id") # Diff the parent. - svntest.actions.run_and_verify_svn(None, expected_output, [], 'diff', + svntest.actions.run_and_verify_svn(expected_output, [], 'diff', '--git', wc_dir) - # The same again, but specifying the target explicity. This should + # The same again, but specifying the target explicitly. This should # give the same output. - svntest.actions.run_and_verify_svn(None, expected_output, [], 'diff', + svntest.actions.run_and_verify_svn(expected_output, [], 'diff', '--git', iota_path) def diff_git_with_props_on_dir(sbox): @@ -3814,7 +3640,7 @@ def diff_git_with_props_on_dir(sbox): sbox.simple_propset('k','v', '', 'A') svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) was_cwd = os.getcwd() os.chdir(wc_dir) @@ -3829,7 +3655,7 @@ def diff_git_with_props_on_dir(sbox): make_diff_prop_header("") + \ make_diff_prop_added("k", "v") - svntest.actions.run_and_verify_svn(None, expected_output, [], 'diff', + svntest.actions.run_and_verify_svn(expected_output, [], 'diff', '-c2', '--git') os.chdir(was_cwd) @@ -3842,7 +3668,7 @@ def diff_abs_localpath_from_wc_folder(sbox): A_path = sbox.ospath('A') B_abs_path = os.path.abspath(sbox.ospath('A/B')) os.chdir(os.path.abspath(A_path)) - svntest.actions.run_and_verify_svn(None, None, [], 'diff', B_abs_path) + svntest.actions.run_and_verify_svn(None, [], 'diff', B_abs_path) @Issue(3449) def no_spurious_conflict(sbox): @@ -3855,7 +3681,7 @@ def no_spurious_conflict(sbox): data_dir = os.path.join(os.path.dirname(sys.argv[0]), 'diff_tests_data') shutil.copyfile(os.path.join(data_dir, '3449_spurious_v1'), sbox.ospath('3449_spurious')) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'add', sbox.ospath('3449_spurious')) sbox.simple_commit() shutil.copyfile(os.path.join(data_dir, '3449_spurious_v2'), @@ -3865,9 +3691,9 @@ def no_spurious_conflict(sbox): sbox.ospath('3449_spurious')) sbox.simple_commit() - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'update', '-r2', wc_dir) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'merge', '-c4', '^/', wc_dir) expected_status = svntest.actions.get_virginal_state(wc_dir, 2) @@ -3878,7 +3704,7 @@ def no_spurious_conflict(sbox): svntest.actions.run_and_verify_status(wc_dir, expected_status) # This update produces a conflict in 1.6 - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'update', '--accept', 'postpone', wc_dir) expected_status.tweak(wc_rev=4) expected_status.tweak('3449_spurious', status=' ') @@ -3924,23 +3750,23 @@ def diff_two_working_copies(sbox): src_label = os.path.basename(wc_dir_old) dst_label = os.path.basename(wc_dir) - expected_output = make_diff_header('newdir/newfile', 'working copy', + expected_output = make_diff_header('newdir/newfile', 'nonexistent', 'working copy', src_label, dst_label) + [ "@@ -0,0 +1 @@\n", "+new text\n", ] + make_diff_header('A/mu', 'working copy', - 'working copy', + 'nonexistent', src_label, dst_label) + [ "@@ -1 +0,0 @@\n", "-This is the file 'mu'.\n", - ] + make_diff_header('A/B/F', 'working copy', + ] + make_diff_header('A/B/F', 'nonexistent', 'working copy', src_label, dst_label) + [ "@@ -0,0 +1 @@\n", "+new text\n", ] + make_diff_prop_header('A/B/F') + \ - make_diff_prop_modified("newprop", "propval-old\n", + make_diff_prop_added("newprop", "propval-new\n") + \ make_diff_header('A/B/lambda', 'working copy', 'working copy', @@ -3958,35 +3784,40 @@ def diff_two_working_copies(sbox): make_diff_prop_header('A/D/gamma') + \ make_diff_prop_added("newprop", "propval") + \ make_diff_header('A/D/G/pi', 'working copy', - 'working copy', + 'nonexistent', src_label, dst_label) + [ "@@ -1 +0,0 @@\n", "-This is the file 'pi'.\n", - ] + make_diff_header('A/D/G/pi', 'working copy', + ] + make_diff_header('A/D/G/pi', 'nonexistent', 'working copy', src_label, dst_label) + \ make_diff_prop_header('A/D/G/pi') + \ make_diff_prop_added("newprop", "propval") + \ make_diff_header('A/D/H/chi', 'working copy', - 'working copy', + 'nonexistent', src_label, dst_label) + [ "@@ -1 +0,0 @@\n", "-This is the file 'chi'.\n", ] + make_diff_header('A/D/H/omega', 'working copy', - 'working copy', + 'nonexistent', src_label, dst_label) + [ "@@ -1 +0,0 @@\n", "-This is the file 'omega'.\n", ] + make_diff_header('A/D/H/psi', 'working copy', - 'working copy', + 'nonexistent', src_label, dst_label) + [ "@@ -1 +0,0 @@\n", "-This is the file 'psi'.\n", - ] + ] + make_diff_header('A/B/F', 'working copy', + 'nonexistent', + src_label, dst_label) + \ + make_diff_prop_header('A/B/F') + \ + make_diff_prop_deleted('newprop', 'propval-old\n') - # Files in diff may be in any order. + + # Files in diff may be in any order. #### Not any more, but test order is wrong. expected_output = svntest.verify.UnorderedOutput(expected_output) - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'diff', '--old', wc_dir_old, '--new', wc_dir) @@ -4000,22 +3831,22 @@ def diff_deleted_url(sbox): sbox.simple_commit() # A diff of r2 with target A/D/H should show the removed children - expected_output = make_diff_header("chi", "revision 1", "revision 2") + [ + expected_output = make_diff_header("chi", "revision 1", "nonexistent") + [ "@@ -1 +0,0 @@\n", "-This is the file 'chi'.\n", ] + make_diff_header("omega", "revision 1", - "revision 2") + [ + "nonexistent") + [ "@@ -1 +0,0 @@\n", "-This is the file 'omega'.\n", ] + make_diff_header("psi", "revision 1", - "revision 2") + [ + "nonexistent") + [ "@@ -1 +0,0 @@\n", "-This is the file 'psi'.\n", ] # Files in diff may be in any order. expected_output = svntest.verify.UnorderedOutput(expected_output) - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'diff', '-c2', sbox.repo_url + '/A/D/H') @@ -4031,44 +3862,44 @@ def diff_arbitrary_files_and_dirs(sbox): "-This is the file 'iota'.\n", "+This is the file 'mu'.\n" ] - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'diff', '--old', sbox.ospath('iota'), '--new', sbox.ospath('A/mu')) # diff A/B/E with A/D - expected_output = make_diff_header("G/pi", "working copy", "working copy", + expected_output = make_diff_header("G/pi", "nonexistent", "working copy", "B/E", "D") + [ "@@ -0,0 +1 @@\n", "+This is the file 'pi'.\n" - ] + make_diff_header("G/rho", "working copy", + ] + make_diff_header("G/rho", "nonexistent", "working copy", "B/E", "D") + [ "@@ -0,0 +1 @@\n", "+This is the file 'rho'.\n" - ] + make_diff_header("G/tau", "working copy", + ] + make_diff_header("G/tau", "nonexistent", "working copy", "B/E", "D") + [ "@@ -0,0 +1 @@\n", "+This is the file 'tau'.\n" - ] + make_diff_header("H/chi", "working copy", + ] + make_diff_header("H/chi", "nonexistent", "working copy", "B/E", "D") + [ "@@ -0,0 +1 @@\n", "+This is the file 'chi'.\n" - ] + make_diff_header("H/omega", "working copy", + ] + make_diff_header("H/omega", "nonexistent", "working copy", "B/E", "D") + [ "@@ -0,0 +1 @@\n", "+This is the file 'omega'.\n" - ] + make_diff_header("H/psi", "working copy", + ] + make_diff_header("H/psi", "nonexistent", "working copy", "B/E", "D") + [ "@@ -0,0 +1 @@\n", "+This is the file 'psi'.\n" ] + make_diff_header("alpha", "working copy", - "working copy", "B/E", "D") + [ + "nonexistent", "B/E", "D") + [ "@@ -1 +0,0 @@\n", "-This is the file 'alpha'.\n" ] + make_diff_header("beta", "working copy", - "working copy", "B/E", "D") + [ + "nonexistent", "B/E", "D") + [ "@@ -1 +0,0 @@\n", "-This is the file 'beta'.\n" - ] + make_diff_header("gamma", "working copy", + ] + make_diff_header("gamma", "nonexistent", "working copy", "B/E", "D") + [ "@@ -0,0 +1 @@\n", "+This is the file 'gamma'.\n" @@ -4076,7 +3907,7 @@ def diff_arbitrary_files_and_dirs(sbox): # Files in diff may be in any order. expected_output = svntest.verify.UnorderedOutput(expected_output) - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'diff', '--old', sbox.ospath('A/B/E'), '--new', sbox.ospath('A/D')) @@ -4108,20 +3939,20 @@ def diff_properties_only(sbox): sbox.simple_commit() # r2 - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'diff', '--properties-only', '-r', '1:2', sbox.repo_url + '/iota') - svntest.actions.run_and_verify_svn(None, expected_reverse_output, [], + svntest.actions.run_and_verify_svn(expected_reverse_output, [], 'diff', '--properties-only', '-r', '2:1', sbox.repo_url + '/iota') os.chdir(wc_dir) - svntest.actions.run_and_verify_svn(None, expected_rev1_output, [], + svntest.actions.run_and_verify_svn(expected_rev1_output, [], 'diff', '--properties-only', '-r', '1', 'iota') - svntest.actions.run_and_verify_svn(None, expected_rev1_output, [], + svntest.actions.run_and_verify_svn(expected_rev1_output, [], 'diff', '--properties-only', '-r', 'PREV', 'iota') @@ -4157,8 +3988,8 @@ def diff_properties_no_newline(sbox): make_diff_prop_modified(pname, old_val, new_val) sbox.simple_propset(pname, new_val, 'iota') - svntest.actions.run_and_verify_svn(None, expected_output, [], 'diff') - svntest.actions.run_and_verify_svn(None, None, [], 'revert', 'iota') + svntest.actions.run_and_verify_svn(expected_output, [], 'diff') + svntest.actions.run_and_verify_svn(None, [], 'revert', 'iota') os.chdir(old_cwd) @@ -4173,12 +4004,12 @@ def diff_arbitrary_same(sbox): sbox.simple_copy('A', 'A2') - svntest.actions.run_and_verify_svn(None, [], [], + svntest.actions.run_and_verify_svn([], [], 'diff', '--old', sbox.ospath('A'), '--new', sbox.ospath('A2')) - svntest.actions.run_and_verify_svn(None, [], [], + svntest.actions.run_and_verify_svn([], [], 'diff', '--summarize', '--old', sbox.ospath('A'), '--new', sbox.ospath('A2')) @@ -4216,7 +4047,7 @@ def simple_ancestry(sbox): line, ]) - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'diff', sbox.wc_dir, '-r', '1', '--notice-ancestry', @@ -4249,7 +4080,7 @@ def simple_ancestry(sbox): line, ]) - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'diff', sbox.wc_dir, '-r', 'HEAD', '--notice-ancestry', @@ -4287,7 +4118,7 @@ def simple_ancestry(sbox): line, ]) - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'diff', sbox.wc_dir, '-r', '1', '--notice-ancestry', @@ -4298,7 +4129,7 @@ def simple_ancestry(sbox): sbox.simple_commit() sbox.simple_update() - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'diff', sbox.wc_dir, '-r', '1', '--notice-ancestry', @@ -4315,10 +4146,10 @@ def local_tree_replace(sbox): sbox.simple_add_text('extra', 'A/B/F/extra') sbox.simple_commit() - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'rm', '--keep-local', sbox.ospath('A/B')) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'add', sbox.ospath('A/B')) # And now check with ancestry @@ -4344,7 +4175,7 @@ def local_tree_replace(sbox): line, ]) - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'diff', wc_dir, '-r', '2', '--notice-ancestry', @@ -4357,7 +4188,7 @@ def local_tree_replace(sbox): cwd = os.getcwd() os.chdir(wc_dir) - _, out, _ = svntest.actions.run_and_verify_svn(None, None, [], + _, out, _ = svntest.actions.run_and_verify_svn(None, [], 'diff', '.', '-r', '2', '--notice-ancestry', @@ -4366,7 +4197,7 @@ def local_tree_replace(sbox): os.chdir(cwd) # And try to apply it - svntest.actions.run_and_verify_svn(None, None, [], 'revert', '-R', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'revert', '-R', wc_dir) expected_output = svntest.verify.UnorderedOutput([ 'D %s\n' % sbox.ospath('A/B/F/extra'), @@ -4386,7 +4217,7 @@ def local_tree_replace(sbox): ]) # And this currently fails because the ordering is broken, but also # because it hits an issue in 'svn patch' - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'patch', patch, wc_dir) def diff_dir_replaced_by_file(sbox): @@ -4402,25 +4233,25 @@ def diff_dir_replaced_by_file(sbox): 'Index: %s\n' % sbox.path('A/B/E/alpha'), '===================================================================\n', '--- %s\t(revision 1)\n' % sbox.path('A/B/E/alpha'), - '+++ %s\t(working copy)\n' % sbox.path('A/B/E/alpha'), + '+++ %s\t(nonexistent)\n' % sbox.path('A/B/E/alpha'), '@@ -1 +0,0 @@\n', '-This is the file \'alpha\'.\n', 'Index: %s\n' % sbox.path('A/B/E/beta'), '===================================================================\n', '--- %s\t(revision 1)\n' % sbox.path('A/B/E/beta'), - '+++ %s\t(working copy)\n' % sbox.path('A/B/E/beta'), + '+++ %s\t(nonexistent)\n' % sbox.path('A/B/E/beta'), '@@ -1 +0,0 @@\n', '-This is the file \'beta\'.\n', 'Index: %s\n' % sbox.path('A/B/E'), '===================================================================\n', - '--- %s\t(revision 0)\n' % sbox.path('A/B/E'), + '--- %s\t(nonexistent)\n' % sbox.path('A/B/E'), '+++ %s\t(working copy)\n' % sbox.path('A/B/E'), '@@ -0,0 +1 @@\n', '+text\n', '\ No newline at end of file\n', ] - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'diff', wc_dir) def diff_dir_replaced_by_dir(sbox): @@ -4440,24 +4271,24 @@ def diff_dir_replaced_by_dir(sbox): 'Index: %s\n' % sbox.path('A/B/E/alpha'), '===================================================================\n', '--- %s\t(revision 1)\n' % sbox.path('A/B/E/alpha'), - '+++ %s\t(working copy)\n' % sbox.path('A/B/E/alpha'), + '+++ %s\t(nonexistent)\n' % sbox.path('A/B/E/alpha'), '@@ -1 +0,0 @@\n', '-This is the file \'alpha\'.\n', 'Index: %s\n' % sbox.path('A/B/E/beta'), '===================================================================\n', '--- %s\t(revision 1)\n' % sbox.path('A/B/E/beta'), - '+++ %s\t(working copy)\n' % sbox.path('A/B/E/beta'), + '+++ %s\t(nonexistent)\n' % sbox.path('A/B/E/beta'), '@@ -1 +0,0 @@\n', '-This is the file \'beta\'.\n', 'Index: %s\n' % sbox.path('A/B/E/beta'), '===================================================================\n', - '--- %s\t(revision 0)\n' % sbox.path('A/B/E/beta'), + '--- %s\t(nonexistent)\n' % sbox.path('A/B/E/beta'), '+++ %s\t(working copy)\n' % sbox.path('A/B/E/beta'), '@@ -0,0 +1 @@\n', '+New beta\n', 'Index: %s\n' % sbox.path('A/B/E'), '===================================================================\n', - '--- %s\t(revision 0)\n' % sbox.path('A/B/E'), + '--- %s\t(nonexistent)\n' % sbox.path('A/B/E'), '+++ %s\t(working copy)\n' % sbox.path('A/B/E'), '\n', 'Property changes on: %s\n' % sbox.path('A/B/E'), @@ -4467,7 +4298,7 @@ def diff_dir_replaced_by_dir(sbox): '+b\n', ] - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'diff', '--notice-ancestry', wc_dir) # And summarized. Currently produces directory adds after their children @@ -4478,7 +4309,7 @@ def diff_dir_replaced_by_dir(sbox): 'A %s\n' % sbox.ospath('A/B/E'), 'A %s\n' % sbox.ospath('A/B/E/beta'), ]) - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'diff', '--summarize', wc_dir, '--notice-ancestry') @@ -4487,7 +4318,7 @@ def diff_dir_replaced_by_dir(sbox): 'Index: %s\n' % sbox.path('A/B/E/alpha'), '===================================================================\n', '--- %s\t(revision 1)\n' % sbox.path('A/B/E/alpha'), - '+++ %s\t(working copy)\n' % sbox.path('A/B/E/alpha'), + '+++ %s\t(nonexistent)\n' % sbox.path('A/B/E/alpha'), '@@ -1 +0,0 @@\n', '-This is the file \'alpha\'.\n', 'Index: %s\n' % sbox.path('A/B/E/beta'), @@ -4509,7 +4340,7 @@ def diff_dir_replaced_by_dir(sbox): '+b\n', ] - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'diff', wc_dir) expected_output = [ @@ -4517,7 +4348,7 @@ def diff_dir_replaced_by_dir(sbox): 'M %s\n' % sbox.ospath('A/B/E/beta'), ' M %s\n' % sbox.ospath('A/B/E'), ] - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'diff', '--summarize', wc_dir) @@ -4539,14 +4370,14 @@ def diff_repos_empty_file_addition(sbox): 'newfile' : Item(status=' ', wc_rev=2), }) svntest.actions.run_and_verify_commit(sbox.wc_dir, expected_output, - expected_status, None, sbox.wc_dir) + expected_status) # Now diff the revision that added the empty file. expected_output = [ 'Index: newfile\n', '===================================================================\n', ] - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'diff', '-c', '2', sbox.repo_url) def diff_missing_tree_conflict_victim(sbox): @@ -4586,13 +4417,13 @@ def diff_missing_tree_conflict_victim(sbox): expected_disk, expected_status, expected_skip, - None, None, None, None, None, None, - False, '--ignore-ancestry', wc_dir) + [], False, False, + '--ignore-ancestry', wc_dir) # 'svn diff' should show no change for the working copy # This currently fails because svn errors out with a 'node not found' error expected_output = [ ] - svntest.actions.run_and_verify_svn(None, expected_output, [], 'diff', wc_dir) + svntest.actions.run_and_verify_svn(expected_output, [], 'diff', wc_dir) @Issue(4396) def diff_local_missing_obstruction(sbox): @@ -4608,7 +4439,7 @@ def diff_local_missing_obstruction(sbox): # Expect no output for missing and obstructed files expected_output = [ ] - svntest.actions.run_and_verify_svn(None, expected_output, [], 'diff', wc_dir) + svntest.actions.run_and_verify_svn(expected_output, [], 'diff', wc_dir) sbox.simple_propset('K', 'V', 'iota', 'A/mu') sbox.simple_append('IotA', 'Content') @@ -4638,13 +4469,13 @@ def diff_local_missing_obstruction(sbox): '+V\n', '\ No newline at end of property\n', ] - svntest.actions.run_and_verify_svn(None, expected_output, [], 'diff', wc_dir) + svntest.actions.run_and_verify_svn(expected_output, [], 'diff', wc_dir) # Create an external. This produces an error in 1.8.0. sbox.simple_propset('svn:externals', 'AA/BB ' + sbox.repo_url + '/A', '.') sbox.simple_update() - svntest.actions.run_and_verify_svn(None, svntest.verify.AnyOutput, [], + svntest.actions.run_and_verify_svn(svntest.verify.AnyOutput, [], 'diff', wc_dir) @@ -4665,8 +4496,290 @@ def diff_move_inside_copy(sbox): sbox.simple_append(chi_moved, 'a new line') # Bug: Diffing the copied-along parent directory asserts - svntest.actions.run_and_verify_svn(None, svntest.verify.AnyOutput, [], + svntest.actions.run_and_verify_svn(svntest.verify.AnyOutput, [], 'diff', sbox.ospath(h_path)) +@XFail() +@Issue(4464) +def diff_repo_wc_copies(sbox): + "diff repo to wc of a copy" + sbox.build() + wc_dir = sbox.wc_dir + iota_copy = sbox.ospath('iota_copy') + iota_url = sbox.repo_url + '/iota' + + sbox.simple_copy('iota', 'iota_copy') + expected_output = make_diff_header(iota_copy, "nonexistent", "working copy", + iota_url, iota_copy) + [ + "@@ -0,0 +1 @@\n", + "+This is the file 'iota'.\n" ] + svntest.actions.run_and_verify_svn(expected_output, [], 'diff', + '--show-copies-as-adds', + iota_url, iota_copy) + +@Issue(4460) +def diff_repo_wc_file_props(sbox): + "diff repo to wc file target with props" + sbox.build() + iota = sbox.ospath('iota') + + # add a mime-type and a line to iota to test the binary check + sbox.simple_propset('svn:mime-type', 'text/plain', 'iota') + sbox.simple_append('iota','second line\n') + + # test that we get the line and the property add + expected_output = make_diff_header(iota, 'revision 1', 'working copy') + \ + [ '@@ -1 +1,2 @@\n', + " This is the file 'iota'.\n", + "+second line\n", ] + \ + make_diff_prop_header(iota) + \ + make_diff_prop_added('svn:mime-type', 'text/plain') + svntest.actions.run_and_verify_svn(expected_output, [], + 'diff', '-r1', iota) + + # reverse the diff, should get a property delete and line delete + expected_output = make_diff_header(iota, 'working copy', 'revision 1') + \ + [ '@@ -1,2 +1 @@\n', + " This is the file 'iota'.\n", + "-second line\n", ] + \ + make_diff_prop_header(iota) + \ + make_diff_prop_deleted('svn:mime-type', 'text/plain') + svntest.actions.run_and_verify_svn(expected_output, [], + 'diff', '--old', iota, + '--new', iota + '@1') + + # copy iota to test with --show-copies as adds + sbox.simple_copy('iota', 'iota_copy') + iota_copy = sbox.ospath('iota_copy') + + # test that we get all lines as added and the property added + # TODO: We only test that this test doesn't error out because of Issue #4464 + # if and when that issue is fixed this test should check output + svntest.actions.run_and_verify_svn(None, [], 'diff', + '--show-copies-as-adds', '-r1', iota_copy) + + # reverse the diff, should get all lines as a delete and no property + # TODO: We only test that this test doesn't error out because of Issue #4464 + # if and when that issue is fixed this test should check output + svntest.actions.run_and_verify_svn(None, [], 'diff', + '--show-copies-as-adds', + '--old', iota_copy, + '--new', iota + '@1') + + # revert and commit with the eol-style of LF and then update so + # that we can see a change on either windows or *nix. + sbox.simple_revert('iota', 'iota_copy') + sbox.simple_propset('svn:eol-style', 'LF', 'iota') + sbox.simple_commit() #r2 + sbox.simple_update() + + # now that we have a LF file on disk switch to CRLF + sbox.simple_propset('svn:eol-style', 'CRLF', 'iota') + + # test that not only the property but also the file changes + # i.e. that the line endings substitution works + if svntest.main.is_os_windows(): + # test suite normalizes crlf output into just lf on Windows. + # so we have to assume it worked because there is an add and + # remove line with the same content. Fortunately, it doesn't + # do this on *nix so we can be pretty sure that it works right. + # TODO: Provide a way to handle this better + crlf = '\n' + else: + crlf = '\r\n' + expected_output = make_diff_header(iota, 'revision 1', 'working copy') + \ + [ '@@ -1 +1 @@\n', + "-This is the file 'iota'.\n", + "+This is the file 'iota'." + crlf ] + \ + make_diff_prop_header(iota) + \ + make_diff_prop_added('svn:eol-style', 'CRLF') + + svntest.actions.run_and_verify_svn(expected_output, [], + 'diff', '-r1', iota) + + +@Issue(4460) +def diff_repo_repo_added_file_mime_type(sbox): + "diff repo to repo added file with mime-type" + sbox.build() + wc_dir = sbox.wc_dir + newfile = sbox.ospath('newfile') + + # add a file with a mime-type + sbox.simple_append('newfile', "This is the file 'newfile'.\n") + sbox.simple_add('newfile') + sbox.simple_propset('svn:mime-type', 'text/plain', 'newfile') + sbox.simple_commit() # r2 + + # try to diff across the addition + expected_output = make_diff_header(newfile, 'nonexistent', 'revision 2') + \ + [ '@@ -0,0 +1 @@\n', + "+This is the file 'newfile'.\n" ] + \ + make_diff_prop_header(newfile) + \ + make_diff_prop_added('svn:mime-type', 'text/plain') + + svntest.actions.run_and_verify_svn(expected_output, [], 'diff', + '-r1:2', newfile) + + # reverse the diff to diff across a deletion + expected_output = make_diff_header(newfile, 'revision 2', 'nonexistent') + \ + [ '@@ -1 +0,0 @@\n', + "-This is the file 'newfile'.\n", + '\n', + 'Property changes on: %s\n' % sbox.path('newfile'), + '__________________________________________________' + + '_________________\n', + 'Deleted: svn:mime-type\n', + '## -1 +0,0 ##\n', + '-text/plain\n', + '\ No newline at end of property\n'] + svntest.actions.run_and_verify_svn(expected_output, [], 'diff', + '-r2:1', newfile) + +def diff_switched_file(sbox): + "diff a switched file against repository" + + sbox.build() + svntest.actions.run_and_verify_svn(None, [], 'switch', + sbox.repo_url + '/A/mu', + sbox.ospath('iota'), '--ignore-ancestry') + sbox.simple_append('iota', 'Mu????') + + # This diffs the file against its origin + expected_output = [ + 'Index: %s\n' % sbox.path('iota'), + '===================================================================\n', + '--- %s\t(.../A/mu)\t(revision 1)\n' % sbox.path('iota'), + '+++ %s\t(.../iota)\t(working copy)\n' % sbox.path('iota'), + '@@ -1 +1,2 @@\n', + ' This is the file \'mu\'.\n', + '+Mu????\n', + '\ No newline at end of file\n', + ] + svntest.actions.run_and_verify_svn(expected_output, [], + 'diff', '-r', '1', sbox.ospath('iota')) + + # And this undoes the switch for the diff + expected_output = [ + 'Index: %s\n' % sbox.path('iota'), + '===================================================================\n', + '--- %s\t(revision 1)\n' % sbox.path('iota'), + '+++ %s\t(working copy)\n' % sbox.path('iota'), + '@@ -1 +1,2 @@\n', + '-This is the file \'iota\'.\n', + '+This is the file \'mu\'.\n', + '+Mu????\n', + '\ No newline at end of file\n', + ] + svntest.actions.run_and_verify_svn(expected_output, [], + 'diff', '-r', '1', sbox.ospath('')) + +def diff_parent_dir(sbox): + "diff parent directory" + + sbox.build() + wc_dir = sbox.wc_dir + + svntest.actions.run_and_verify_svnmucc(None, [], + '-U', sbox.repo_url, '-m', 'Q', + 'mkdir', 'A/ZZZ', + 'propset', 'A', 'B', 'A/ZZZ') + + was_cwd = os.getcwd() + os.chdir(os.path.join(wc_dir, 'A', 'B')) + try: + # This currently (1.8.9, 1.9.0 development) triggers an assertion failure + # as a non canonical relpath ".." is used as diff target + + expected_output = [ + 'Index: ../ZZZ\n', + '===================================================================\n', + '--- ../ZZZ (revision 2)\n', + '+++ ../ZZZ (nonexistent)\n', + '\n', + 'Property changes on: ../ZZZ\n', + '___________________________________________________________________\n', + 'Deleted: A\n', + '## -1 +0,0 ##\n', + '-B\n', + '\ No newline at end of property\n', + ] + + svntest.actions.run_and_verify_svn(expected_output, [], + 'diff', '-r', '2', '..') + + expected_output = [ + 'Index: ../../A/ZZZ\n', + '===================================================================\n', + '--- ../../A/ZZZ (revision 2)\n', + '+++ ../../A/ZZZ (nonexistent)\n', + '\n', + 'Property changes on: ../../A/ZZZ\n', + '___________________________________________________________________\n', + 'Deleted: A\n', + '## -1 +0,0 ##\n', + '-B\n', + '\ No newline at end of property\n', + ] + + svntest.actions.run_and_verify_svn(expected_output, [], + 'diff', '-r', '2', '../..') + finally: + os.chdir(was_cwd) + +def diff_deleted_in_move_against_repos(sbox): + "diff deleted in move against repository" + + sbox.build() + sbox.simple_move('A/B', 'BB') + sbox.simple_move('BB/E/alpha', 'BB/q') + sbox.simple_rm('BB/E/beta') + + svntest.actions.run_and_verify_svn(None, [], + 'mkdir', sbox.repo_url + '/BB/E', + '--parents', '-m', 'Create dir') + + # OK. Local diff + svntest.actions.run_and_verify_svn(None, [], + 'diff', sbox.wc_dir) + + # OK. Walks nodes locally from wc-root, notices ancestry + svntest.actions.run_and_verify_svn(None, [], + 'diff', sbox.wc_dir, '-r1', + '--notice-ancestry') + + # OK. Walks nodes locally from BB, notices ancestry + svntest.actions.run_and_verify_svn(None, [], + 'diff', sbox.wc_dir, '-r2', + '--notice-ancestry') + + # OK. Walks nodes locally from wc-root + svntest.actions.run_and_verify_svn(None, [], + 'diff', sbox.wc_dir, '-r1') + + # Assertion. Walks nodes locally from BB. + svntest.actions.run_and_verify_svn(None, [], + 'diff', sbox.wc_dir, '-r2') + +def diff_replaced_moved(sbox): + "diff against a replaced moved node" + + sbox.build(read_only=True) + sbox.simple_move('A', 'AA') + sbox.simple_rm('AA/B') + sbox.simple_move('AA/D', 'AA/B') + + # Ok + svntest.actions.run_and_verify_svn(None, [], + 'diff', sbox.ospath('.'), '-r1') + + # Ok (rhuijben: Works through a hack assuming some BASE knowledge) + svntest.actions.run_and_verify_svn(None, [], + 'diff', sbox.ospath('AA'), '-r1') + + # Error (misses BASE node because the diff editor is driven incorrectly) + svntest.actions.run_and_verify_svn(None, [], + 'diff', sbox.ospath('AA/B'), '-r1') + # Regression test for the fix in r1619380. Prior to this (and in releases # 1.8.0 through 1.8.10) a local diff incorrectly showed a copied dir's # properties as added, whereas it should show only the changes against the @@ -4705,13 +4818,56 @@ def diff_local_copied_dir(sbox): '\ No newline at end of property\n', ] - svntest.actions.run_and_verify_svn(None, expected_output_C2, [], + svntest.actions.run_and_verify_svn(expected_output_C2, [], 'diff', 'C2') - svntest.actions.run_and_verify_svn(None, expected_output_C3, [], + svntest.actions.run_and_verify_svn(expected_output_C3, [], 'diff', 'C3') finally: os.chdir(was_cwd) + +def diff_summarize_ignore_properties(sbox): + "diff --summarize --ignore-properties" + + sbox.build() + wc_dir = sbox.wc_dir + + # Make a property change and a content change to 'iota' + sbox.simple_propset('svn:eol-style', 'native', 'iota') + svntest.main.file_append(sbox.ospath('iota'), 'new text') + + # Make a property change to 'A/mu' + sbox.simple_propset('svn:eol-style', 'native', 'A/mu') + + # Make a content change to 'A/B/lambda' + svntest.main.file_append(sbox.ospath('A/B/lambda'), 'new text') + + # Add a file. + svntest.main.file_write(sbox.ospath('new'), 'new text') + sbox.simple_add('new') + + # Delete a file + sbox.simple_rm('A/B/E/alpha') + + expected_diff = svntest.wc.State(wc_dir, { + 'iota': Item(status='M '), + 'new': Item(status='A '), + 'A/B/lambda': Item(status='M '), + 'A/B/E/alpha': Item(status='D '), + }) + svntest.actions.run_and_verify_diff_summarize(expected_diff, + '--ignore-properties', + sbox.wc_dir) + + # test with --xml, too + paths = ['iota', 'new', 'A/B/lambda', 'A/B/E/alpha'] + items = ['modified', 'added', 'modified', 'deleted' ] + kinds = ['file','file', 'file', 'file'] + props = ['none', 'none', 'none', 'none'] + svntest.actions.run_and_verify_diff_summarize_xml( + [], wc_dir, paths, items, props, kinds, wc_dir, '--ignore-properties') + + ######################################################################## #Run the tests @@ -4794,7 +4950,15 @@ test_list = [ None, diff_missing_tree_conflict_victim, diff_local_missing_obstruction, diff_move_inside_copy, + diff_repo_wc_copies, + diff_repo_wc_file_props, + diff_repo_repo_added_file_mime_type, + diff_switched_file, + diff_parent_dir, + diff_deleted_in_move_against_repos, + diff_replaced_moved, diff_local_copied_dir, + diff_summarize_ignore_properties, ] if __name__ == '__main__': diff --git a/subversion/tests/cmdline/entries-dump.c b/subversion/tests/cmdline/entries-dump.c index e4edc61..cfdb1e0 100644 --- a/subversion/tests/cmdline/entries-dump.c +++ b/subversion/tests/cmdline/entries-dump.c @@ -118,15 +118,15 @@ entries_dump(const char *dir_path, svn_wc_adm_access_t *related, apr_pool_t *poo SVN_ERR(svn_wc__read_entries_old(&entries, dir_abspath, pool, pool)); lockfile_path = svn_dirent_join_many(pool, dir_path, svn_wc_get_adm_dir(pool), - "lock", NULL); + "lock", SVN_VA_NULL); SVN_ERR(svn_io_check_path(lockfile_path, &kind, pool)); locked = (kind == svn_node_file); } for (hi = apr_hash_first(pool, entries); hi; hi = apr_hash_next(hi)) { - const char *key = svn__apr_hash_index_key(hi); - const svn_wc_entry_t *entry = svn__apr_hash_index_val(hi); + const char *key = apr_hash_this_key(hi); + const svn_wc_entry_t *entry = apr_hash_this_val(hi); SVN_ERR_ASSERT(strcmp(key, entry->name) == 0); @@ -230,7 +230,7 @@ directory_dump_old(struct directory_walk_baton *bt, scratch_pool, scratch_pool)); for (hi = apr_hash_first(scratch_pool, entries); hi; hi = apr_hash_next(hi)) { - const svn_wc_entry_t *entry = svn__apr_hash_index_val(hi); + const svn_wc_entry_t *entry = apr_hash_this_val(hi); const char *local_abspath; if (entry->deleted || entry->absent || entry->kind != svn_node_dir) @@ -287,6 +287,16 @@ tree_dump_dir(const char *local_abspath, if (kind != svn_node_dir) return SVN_NO_ERROR; + if (strcmp(local_abspath, bt->root_abspath) != 0) + { + svn_boolean_t is_wcroot; + SVN_ERR(svn_wc__db_is_wcroot(&is_wcroot, bt->wc_ctx->db, + local_abspath, scratch_pool)); + + if (is_wcroot) + return SVN_NO_ERROR; /* Report the stub, but not the data */ + } + /* If LOCAL_ABSPATH a child of or equal to ROOT_ABSPATH, then display a relative path starting with PREFIX_PATH. */ path = svn_dirent_skip_ancestor(bt->root_abspath, local_abspath); @@ -304,19 +314,6 @@ tree_dump_dir(const char *local_abspath, } static svn_error_t * -tree_dump_txn(void *baton, svn_sqlite__db_t *db, apr_pool_t *scratch_pool) -{ - struct directory_walk_baton *bt = baton; - - SVN_ERR(svn_wc__internal_walk_children(bt->wc_ctx->db, bt->root_abspath, FALSE, - NULL, tree_dump_dir, bt, - svn_depth_infinity, - NULL, NULL, scratch_pool)); - - return SVN_NO_ERROR; -} - -static svn_error_t * tree_dump(const char *path, apr_pool_t *scratch_pool) { @@ -341,7 +338,12 @@ tree_dump(const char *path, SVN_ERR(svn_wc__db_temp_borrow_sdb(&sdb, bt.wc_ctx->db, bt.root_abspath, scratch_pool)); - SVN_ERR(svn_sqlite__with_lock(sdb, tree_dump_txn, &bt, scratch_pool)); + SVN_SQLITE__WITH_LOCK( + svn_wc__internal_walk_children(db, bt.root_abspath, FALSE, + NULL, tree_dump_dir, &bt, + svn_depth_infinity, + NULL, NULL, scratch_pool), + sdb); /* And close everything we've opened */ SVN_ERR(svn_wc_context_destroy(bt.wc_ctx)); diff --git a/subversion/tests/cmdline/entries_tests.py b/subversion/tests/cmdline/entries_tests.py index 92baa7e..0fcdc1a 100755 --- a/subversion/tests/cmdline/entries_tests.py +++ b/subversion/tests/cmdline/entries_tests.py @@ -85,11 +85,11 @@ def basic_entries(sbox): iota2_path = os.path.join(wc_dir, 'A', 'B', 'E', 'iota2') # Remove 'alpha'. When it is committed, it will be marked DELETED. - svntest.actions.run_and_verify_svn(None, None, [], 'rm', alpha_path) + svntest.actions.run_and_verify_svn(None, [], 'rm', alpha_path) # Tweak 'beta' in order to bump its revision to ensure the replacement # gets the new revision (2), not the value from the parent (1). - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'ps', 'random-prop', 'propvalue', beta_path) @@ -102,11 +102,11 @@ def basic_entries(sbox): expected_status.tweak('A/B/E/beta', wc_rev=2) svntest.actions.run_and_verify_commit(wc_dir, expected_output, expected_status, - None, + [], alpha_path, beta_path) # bump 'G' and iota another revision (3) for later testing - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'ps', 'random-prop', 'propvalue', G_path, iota_path) @@ -117,23 +117,23 @@ def basic_entries(sbox): expected_status.tweak('A/D/G', 'iota', wc_rev=3) svntest.actions.run_and_verify_commit(wc_dir, expected_output, expected_status, - None, + [], G_path, iota_path) # Add a file over the DELETED 'alpha'. It should be schedule-add. open(alpha_path, 'w').write('New alpha contents\n') # Delete 'beta', then add a file over it. Should be schedule-replace. - svntest.actions.run_and_verify_svn(None, None, [], 'rm', beta_path) + svntest.actions.run_and_verify_svn(None, [], 'rm', beta_path) open(beta_path, 'w').write('New beta contents\n') # Plain old add. Should have revision == 0. open(added_path, 'w').write('Added file contents\n') - svntest.actions.run_and_verify_svn(None, None, [], 'add', + svntest.actions.run_and_verify_svn(None, [], 'add', alpha_path, beta_path, added_path) - svntest.actions.run_and_verify_svn(None, None, [], 'cp', + svntest.actions.run_and_verify_svn(None, [], 'cp', iota_path, iota2_path) entries = svntest.main.run_entriesdump(os.path.join(wc_dir, 'A', 'B', 'E')) @@ -153,7 +153,7 @@ def basic_entries(sbox): validate(entries['iota2'], schedule=SCHEDULE_ADD, revision=1, copied=True, copyfrom_rev=3) - svntest.actions.run_and_verify_svn(None, None, [], 'cp', G_path, G2_path) + svntest.actions.run_and_verify_svn(None, [], 'cp', G_path, G2_path) entries = svntest.main.run_entriesdump(G2_path) check_names(entries, 'pi', 'rho', 'tau') @@ -210,7 +210,7 @@ def deletion_details(sbox): # blast iota, then verify the now-deleted entry still contains much of # the same information. - svntest.actions.run_and_verify_svn(None, None, [], 'rm', iota_path) + svntest.actions.run_and_verify_svn(None, [], 'rm', iota_path) entries = svntest.main.run_entriesdump(wc_dir) check_names(entries, 'iota') validate(entries['iota'], revision=iota.revision, @@ -219,8 +219,8 @@ def deletion_details(sbox): # even deleted nodes have a URL validate(entries['iota'], url='%s/iota' % sbox.repo_url) - svntest.actions.run_and_verify_svn(None, None, [], 'cp', D_path, D2_path) - svntest.actions.run_and_verify_svn(None, None, [], 'rm', D2_G_path) + svntest.actions.run_and_verify_svn(None, [], 'cp', D_path, D2_path) + svntest.actions.run_and_verify_svn(None, [], 'rm', D2_G_path) entries = svntest.main.run_entriesdump(D2_path) check_names(entries, 'gamma', 'G') @@ -244,8 +244,8 @@ def deletion_details(sbox): ### for now... this test case is done. just return return - svntest.actions.run_and_verify_svn(None, None, [], 'rm', E_path) - svntest.actions.run_and_verify_svn(None, None, [], 'cp', H_path, E_path) + svntest.actions.run_and_verify_svn(None, [], 'rm', E_path) + svntest.actions.run_and_verify_svn(None, [], 'cp', H_path, E_path) entries = svntest.main.run_entriesdump(E_path) check_names(entries, 'chi', 'omega', 'psi', 'alpha', 'beta') diff --git a/subversion/tests/cmdline/export_tests.py b/subversion/tests/cmdline/export_tests.py index d108847..92d1d52 100755 --- a/subversion/tests/cmdline/export_tests.py +++ b/subversion/tests/cmdline/export_tests.py @@ -84,8 +84,7 @@ def export_nonexistent_url(sbox): svntest.main.safe_rmtree(sbox.wc_dir) export_target = os.path.join(sbox.wc_dir, 'nonexistent') nonexistent_url = sbox.repo_url + "/nonexistent" - svntest.actions.run_and_verify_svn("Error about nonexistent URL expected", - None, svntest.verify.AnyOutput, + svntest.actions.run_and_verify_svn(None, svntest.verify.AnyOutput, 'export', nonexistent_url, export_target) def export_working_copy(sbox): @@ -189,8 +188,7 @@ def export_over_existing_dir(sbox): # the export operation to fail. os.mkdir(export_target) - svntest.actions.run_and_verify_svn("No error where one is expected", - None, svntest.verify.AnyOutput, + svntest.actions.run_and_verify_svn(None, svntest.verify.AnyOutput, 'export', sbox.wc_dir, export_target) # As an extra precaution, make sure export_target doesn't have @@ -470,8 +468,7 @@ def export_nonexistent_file(sbox): export_target = sbox.add_wc_path('export') - svntest.actions.run_and_verify_svn("No error where one is expected", - None, svntest.verify.AnyOutput, + svntest.actions.run_and_verify_svn(None, svntest.verify.AnyOutput, 'export', kappa_path, export_target) def export_unversioned_file(sbox): @@ -485,8 +482,7 @@ def export_unversioned_file(sbox): export_target = sbox.add_wc_path('export') - svntest.actions.run_and_verify_svn("No error where one is expected", - None, svntest.verify.AnyOutput, + svntest.actions.run_and_verify_svn(None, svntest.verify.AnyOutput, 'export', kappa_path, export_target) def export_with_state_deleted(sbox): @@ -497,15 +493,14 @@ def export_with_state_deleted(sbox): # state deleted=true caused export to crash alpha_path = os.path.join(wc_dir, 'A', 'B', 'E', 'alpha') - svntest.actions.run_and_verify_svn(None, None, [], 'rm', alpha_path) + svntest.actions.run_and_verify_svn(None, [], 'rm', alpha_path) expected_output = svntest.wc.State(wc_dir, { 'A/B/E/alpha' : Item(verb='Deleting'), }) expected_status = svntest.actions.get_virginal_state(wc_dir, 1) expected_status.remove('A/B/E/alpha') svntest.actions.run_and_verify_commit(wc_dir, - expected_output, expected_status, - None, wc_dir) + expected_output, expected_status) export_target = sbox.add_wc_path('export') expected_output = svntest.wc.State(export_target, { @@ -557,7 +552,7 @@ def export_HEADplus1_fails(sbox): sbox.build(create_wc = False, read_only = True) - svntest.actions.run_and_verify_svn(None, None, '.*No such revision.*', + svntest.actions.run_and_verify_svn(None, '.*No such revision.*', 'export', sbox.repo_url, sbox.wc_dir, '-r', 38956) @@ -613,7 +608,7 @@ def export_file_overwrite_fails(sbox): # Run it for source local open(os.path.join(tmpdir, 'iota'), 'w').write(not_iota_contents) - svntest.actions.run_and_verify_svn(None, [], '.*exist.*', + svntest.actions.run_and_verify_svn([], '.*exist.*', 'export', iota_path, tmpdir) # Verify it failed @@ -624,7 +619,7 @@ def export_file_overwrite_fails(sbox): # Run it for source URL open(os.path.join(tmpdir, 'iota'), 'w').write(not_iota_contents) - svntest.actions.run_and_verify_svn(None, [], '.*exist.*', + svntest.actions.run_and_verify_svn([], '.*exist.*', 'export', iota_url, tmpdir) # Verify it failed @@ -736,11 +731,11 @@ def export_with_url_unsafe_characters(sbox): # Create the file with special name and commit it. svntest.main.file_write(url_unsafe_path, 'This is URL unsafe path file.') svntest.main.run_svn(None, 'add', url_unsafe_path + '@') - svntest.actions.run_and_verify_svn(None, None, [], 'ci', '-m', 'log msg', + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', 'log msg', '--quiet', wc_dir) # Export the file and verify it. - svntest.actions.run_and_verify_svn(None, None, [], 'export', + svntest.actions.run_and_verify_svn(None, [], 'export', url_unsafe_path_url, export_target + '@') if not os.path.exists(export_target): @@ -906,14 +901,14 @@ def export_file_overwrite_with_force(sbox): # Run it for WC export open(os.path.join(tmpdir, 'iota'), 'w').write(not_iota_contents) - svntest.actions.run_and_verify_svn(None, svntest.verify.AnyOutput, + svntest.actions.run_and_verify_svn(svntest.verify.AnyOutput, [], 'export', '--force', iota_path, tmpdir) svntest.actions.verify_disk(tmpdir, expected_disk) # Run it for URL export open(os.path.join(tmpdir, 'iota'), 'w').write(not_iota_contents) - svntest.actions.run_and_verify_svn(None, svntest.verify.AnyOutput, + svntest.actions.run_and_verify_svn(svntest.verify.AnyOutput, [], 'export', '--force', iota_url, tmpdir) svntest.actions.verify_disk(tmpdir, expected_disk) @@ -956,7 +951,7 @@ def export_custom_keywords(sbox): export_file = os.path.join(export_target, 'alpha') os.remove(export_file) expected_output = ['A %s\n' % export_file, 'Export complete.\n'] - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'export', '--force', sbox.repo_url + '/A/B/E/alpha', export_target) @@ -1020,18 +1015,18 @@ def export_file_external(sbox): @Issue(4427) def export_file_externals2(sbox): "exporting file externals" - + sbox.build() sbox.simple_mkdir('DIR', 'DIR2') - + sbox.simple_propset('svn:externals', '^/iota file', 'DIR') sbox.simple_propset('svn:externals', '^/DIR TheDir', 'DIR2') sbox.simple_commit() sbox.simple_update() - + tmp = sbox.add_wc_path('tmp') os.mkdir(tmp) - + expected_output = svntest.wc.State(tmp, { 'file' : Item(status='A '), }) @@ -1044,7 +1039,7 @@ def export_file_externals2(sbox): tmp, expected_output, expected_disk) - + expected_output = svntest.wc.State(tmp, { 'DIR/file' : Item(status='A '), }) @@ -1056,7 +1051,7 @@ def export_file_externals2(sbox): os.path.join(tmp, 'DIR'), expected_output, expected_disk) - + expected_output = svntest.wc.State(tmp, { 'DIR2/TheDir/file' : Item(status='A '), }) diff --git a/subversion/tests/cmdline/externals_tests.py b/subversion/tests/cmdline/externals_tests.py index 36ccaa6..34f471f 100755 --- a/subversion/tests/cmdline/externals_tests.py +++ b/subversion/tests/cmdline/externals_tests.py @@ -115,7 +115,7 @@ def externals_test_setup(sbox): D_path = os.path.join(wc_init_dir, "A/D") # Create a working copy. - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'checkout', repo_url, wc_init_dir) @@ -123,22 +123,22 @@ def externals_test_setup(sbox): # post-commit status checks. svntest.main.file_append(mu_path, "Added to mu in revision 2.\n") - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', 'log msg', '--quiet', wc_init_dir) svntest.main.file_append(pi_path, "Added to pi in revision 3.\n") - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', 'log msg', '--quiet', wc_init_dir) svntest.main.file_append(lambda_path, "Added to lambda in revision 4.\n") - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', 'log msg', '--quiet', wc_init_dir) svntest.main.file_append(omega_path, "Added to omega in revision 5.\n") - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', 'log msg', '--quiet', wc_init_dir) @@ -206,8 +206,7 @@ def externals_test_setup(sbox): svntest.actions.run_and_verify_commit(wc_init_dir, expected_output, - expected_status, - None, wc_init_dir) + expected_status) return external_url_for @@ -217,7 +216,7 @@ def change_external(path, new_val, commit=True): svntest.actions.set_prop('svn:externals', new_val, path) if commit: - svntest.actions.run_and_verify_svn(None, None, [], 'ci', + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', 'log msg', '--quiet', path) def change_external_expect_error(path, new_val, expected_err): @@ -269,7 +268,7 @@ def checkout_with_externals(sbox): repo_url = sbox.repo_url # Create a working copy. - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'checkout', repo_url, wc_dir) @@ -310,11 +309,11 @@ def update_receive_new_external(sbox): other_repo_url = repo_url + ".other" # Checkout two working copies. - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'checkout', repo_url, wc_dir) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'checkout', repo_url, other_wc_dir) @@ -358,11 +357,11 @@ def update_lose_external(sbox): repo_url = sbox.repo_url # Checkout two working copies. - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'checkout', repo_url, wc_dir) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'checkout', repo_url, other_wc_dir) @@ -433,11 +432,11 @@ def update_change_pristine_external(sbox): other_repo_url = repo_url + ".other" # Checkout two working copies. - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'checkout', repo_url, wc_dir) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'checkout', repo_url, other_wc_dir) @@ -486,11 +485,11 @@ def update_change_modified_external(sbox): other_repo_url = repo_url + ".other" # Checkout two working copies. - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'checkout', repo_url, wc_dir) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'checkout', repo_url, other_wc_dir) @@ -548,11 +547,11 @@ def update_receive_change_under_external(sbox): other_repo_url = repo_url + ".other" # Checkout two working copies. - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'checkout', repo_url, wc_dir) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'checkout', other_repo_url, other_wc_dir) @@ -567,8 +566,7 @@ def update_receive_change_under_external(sbox): expected_status.tweak('A/D/gamma', wc_rev=6) svntest.actions.run_and_verify_commit(other_wc_dir, expected_output, - expected_status, - None, other_wc_dir) + expected_status) # Now update the regular wc to see if we get the change. Note that # none of the module *properties* in this wc have been changed; only @@ -603,8 +601,7 @@ def update_receive_change_under_external(sbox): expected_status.tweak('A/D/G/rho', wc_rev=7) svntest.actions.run_and_verify_commit(other_wc_dir, expected_output, - expected_status, - None, other_wc_dir) + expected_status) expected_output = svntest.wc.State(sbox.ospath('A/C'), { 'exdir_G/rho' : Item(status='U '), @@ -629,7 +626,7 @@ def modify_and_update_receive_new_external(sbox): repo_url = sbox.repo_url # Checkout a working copy - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'checkout', repo_url, wc_dir) @@ -651,8 +648,7 @@ def modify_and_update_receive_new_external(sbox): # Once upon a time there was a core-dump here - svntest.actions.run_and_verify_svn("update failed", - svntest.verify.AnyOutput, [], 'up' ) + svntest.actions.run_and_verify_svn(svntest.verify.AnyOutput, [], 'up' ) os.chdir(was_cwd) @@ -668,7 +664,7 @@ def disallow_dot_or_dotdot_directory_reference(sbox): repo_url = sbox.repo_url # Checkout a working copy - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'checkout', repo_url, wc_dir) @@ -739,7 +735,7 @@ def export_with_externals(sbox): repo_url = sbox.repo_url # Create a working copy. - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'export', repo_url, wc_dir) @@ -786,11 +782,11 @@ def export_wc_with_externals(sbox): export_target = sbox.add_wc_path('export') # Create a working copy. - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'checkout', repo_url, wc_dir) # Export the working copy. - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'export', wc_dir, export_target) ### We should be able to check exactly the paths that externals_test_setup() @@ -815,7 +811,7 @@ def export_wc_with_externals(sbox): svntest.main.safe_rmtree(export_target) # Export it again, without externals. - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'export', '--ignore-externals', wc_dir, export_target) probe_paths_missing(paths) @@ -830,12 +826,12 @@ def external_with_peg_and_op_revision(sbox): repo_url = sbox.repo_url # Checkout a working copy. - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'checkout', repo_url, wc_dir) # remove A/D/H in the other repo - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'rm', external_url_for["A/D/exdir_A/H"], '-m', 'remove original A/D/H') @@ -875,7 +871,7 @@ def new_style_externals(sbox): repo_url = sbox.repo_url # Checkout a working copy. - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'checkout', repo_url, wc_dir) @@ -975,7 +971,7 @@ def old_style_externals_ignore_peg_reg(sbox): repo_url = sbox.repo_url # Checkout a working copy. - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'checkout', repo_url, wc_dir) @@ -998,8 +994,7 @@ def old_style_externals_ignore_peg_reg(sbox): expected_error = "|".join([".*Error handling externals definition.*", ".*URL .*/A/D/G@HEAD' .* doesn't exist.*", ]) - svntest.actions.run_and_verify_svn2("External '%s' used pegs" % ext.strip(), - None, + svntest.actions.run_and_verify_svn2(None, expected_error, 1, 'up', @@ -1016,13 +1011,12 @@ def cannot_move_or_remove_file_externals(sbox): repo_url = sbox.repo_url # Checkout a working copy. - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'checkout', repo_url, wc_dir) # Should not be able to delete the file external. - svntest.actions.run_and_verify_svn("Able to delete file external", - None, + svntest.actions.run_and_verify_svn(None, ".*Cannot remove the external at " ".*gamma.*; please .* " "the svn:externals .*", @@ -1030,8 +1024,7 @@ def cannot_move_or_remove_file_externals(sbox): sbox.ospath('A/B/gamma')) # Should not be able to move the file external. - svntest.actions.run_and_verify_svn("Able to move file external", - None, + svntest.actions.run_and_verify_svn(None, ".*Cannot move the external at " ".*gamma.*; please .*edit.*" "svn:externals.*", @@ -1042,7 +1035,7 @@ def cannot_move_or_remove_file_externals(sbox): # But the directory that contains it can be deleted. expected_status = svntest.actions.get_virginal_state(wc_dir, 6) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'rm', sbox.ospath('A/B')) @@ -1103,8 +1096,7 @@ def cannot_move_or_remove_file_externals(sbox): }) svntest.actions.run_and_verify_commit(wc_dir, - expected_output, expected_status, - None, wc_dir) + expected_output, expected_status) # Bring the working copy up to date and check that the file the file # external is switched to still exists. @@ -1126,7 +1118,7 @@ def cant_place_file_external_into_dir_external(sbox): other_repo_url = repo_url + ".other" # Checkout a working copy. - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'checkout', repo_url, wc_dir) @@ -1138,7 +1130,7 @@ def cant_place_file_external_into_dir_external(sbox): # Bring the working copy up to date and check that the file the file # external is switched to still exists. - svntest.actions.run_and_verify_svn(None, None, 'svn: E205011: ' + + svntest.actions.run_and_verify_svn(None, 'svn: E205011: ' + 'Failure occurred.*definitions', 'up', wc_dir) @@ -1212,7 +1204,7 @@ def binary_file_externals(sbox): }) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) # Create a file external on the binary file A/theta @@ -1252,8 +1244,7 @@ def binary_file_externals(sbox): expected_output, expected_disk, expected_status, - None, None, None, None, None, - True) + check_props=True) #---------------------------------------------------------------------- @@ -1297,11 +1288,10 @@ def update_lose_file_external(sbox): expected_output, expected_disk, expected_status, - None, None, None, None, None, - True) + check_props=True) # now remove the svn:external prop - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'propdel', 'svn:externals', C) # commit the property change @@ -1313,7 +1303,7 @@ def update_lose_file_external(sbox): expected_status.tweak('A/C', wc_rev = 3) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) # try to actually get rid of the external via an update expected_output = svntest.wc.State(wc_dir, { @@ -1334,8 +1324,7 @@ def update_lose_file_external(sbox): expected_output, expected_disk, expected_status, - None, None, None, None, None, - True) + check_props=True) probe_paths_missing([sbox.ospath('A/C/external')]) @@ -1370,15 +1359,15 @@ def switch_relative_external(sbox): }) svntest.actions.run_and_verify_update(wc_dir, expected_output, None, None) - svntest.actions.run_and_verify_svn(None, None, [], 'cp', + svntest.actions.run_and_verify_svn(None, [], 'cp', '--quiet', A_path, A_copy_path) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', 'log msg', '--quiet', wc_dir) # Okay. We now want to switch A to A_copy, which *should* cause # A/D/ext to point to the URL for A_copy/B (instead of A/B). - svntest.actions.run_and_verify_svn(None, None, [], 'sw', + svntest.actions.run_and_verify_svn(None, [], 'sw', A_copy_url, A_path) expected_infos = [ @@ -1415,13 +1404,13 @@ def export_sparse_wc_with_externals(sbox): # Create a working copy with depth=empty itself but children that are # depth=infinity. - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'checkout', '--depth=empty', repo_url, wc_dir) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'update', *child_paths) # Export the working copy. - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'export', wc_dir, export_target) # It failed with "'gamma' is not under version control" because the # depth-infinity children led it wrongly to try to process externals @@ -1482,8 +1471,7 @@ def relegate_external(sbox): expected_output, expected_disk, expected_status, - None, None, None, None, None, - True) + check_props=True) #---------------------------------------------------------------------- @@ -1514,7 +1502,7 @@ def wc_repos_file_externals(sbox): # Commit the new file, creating revision 2. svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) # Create a file external on the file A/theta @@ -1550,8 +1538,7 @@ def wc_repos_file_externals(sbox): expected_output, expected_disk, expected_status, - None, None, None, None, None, - True) + check_props=True) # Copy A/C to a new tag in the repos tag_url = repo_url + '/A/I' @@ -1584,10 +1571,10 @@ def wc_repos_file_externals(sbox): expected_output, expected_disk, expected_status, - None, None, None, None, None, - True) + check_props=True) #---------------------------------------------------------------------- +@SkipUnless(svntest.main.server_has_mergeinfo) @Issue(3843) def merge_target_with_externals(sbox): "merge target with externals" @@ -1604,7 +1591,7 @@ def merge_target_with_externals(sbox): A_branch_path = sbox.ospath('A-branch') A_gamma_branch_path = sbox.ospath('A-branch/D/gamma') - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'checkout', repo_url, wc_dir) @@ -1614,12 +1601,12 @@ def merge_target_with_externals(sbox): change_external(sbox.ospath('A'), externals_prop) # Branch A@1 to A-branch and make a simple text change on the latter in r8. - svntest.actions.run_and_verify_svn(None, None, [], 'copy', A_path + '@1', + svntest.actions.run_and_verify_svn(None, [], 'copy', A_path + '@1', A_branch_path) - svntest.actions.run_and_verify_svn(None, None, [], 'ci', + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', 'make a copy', wc_dir) svntest.main.file_write(A_gamma_branch_path, "The new gamma!\n") - svntest.actions.run_and_verify_svn(None, None, [], 'ci', + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', 'branch edit', wc_dir) expected_output = svntest.wc.State(wc_dir, { 'A/external' : Item(status='A '), @@ -1630,10 +1617,9 @@ def merge_target_with_externals(sbox): # Merge r8 from A-branch back to A. There should be explicit mergeinfo # only at the root of A; the externals should not get any. - svntest.actions.run_and_verify_svn(None, None, [], 'merge', '-c8', + svntest.actions.run_and_verify_svn(None, [], 'merge', '-c8', repo_url + '/A-branch', A_path) svntest.actions.run_and_verify_svn( - "Unexpected subtree mergeinfo created", ["Properties on '" + A_path + "':\n", " svn:mergeinfo\n", " /A-branch:8\n"], @@ -1664,8 +1650,7 @@ def update_modify_file_external(sbox): expected_output, expected_disk, expected_status, - None, None, None, None, None, - True) + check_props=True) # Modify A/mu svntest.main.file_append(sbox.ospath('A/mu'), 'appended mu text') @@ -1675,9 +1660,7 @@ def update_modify_file_external(sbox): expected_status.tweak('A/mu', wc_rev=3) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, - wc_dir) + expected_status) # Update to modify the file external, this asserts in update_editor.c expected_output = svntest.wc.State(wc_dir, { @@ -1694,8 +1677,7 @@ def update_modify_file_external(sbox): expected_output, expected_disk, expected_status, - None, None, None, None, None, - True) + check_props=True) # Test for issue #2267 @Issue(2267) @@ -1709,7 +1691,7 @@ def update_external_on_locally_added_dir(sbox): other_repo_url = repo_url + ".other" # Checkout a working copy - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'checkout', repo_url, wc_dir) @@ -1786,13 +1768,13 @@ def switch_external_on_locally_added_dir(sbox): # Create a branch of A # Checkout a working copy - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'copy', A_path, A_copy_path, '-m', 'Create branch of A') # Checkout a working copy - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'checkout', A_path, wc_dir) @@ -1816,7 +1798,7 @@ def switch_external_on_locally_added_dir(sbox): change_external(new_dir, new_externals_desc, commit=False) # Switch the working copy to the branch, see if we get the new item. - svntest.actions.run_and_verify_svn(None, None, [], 'sw', A_copy_path, wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'sw', A_copy_path, wc_dir) probe_paths_exist([sbox.ospath('foo/exdir_E')]) @@ -1834,8 +1816,7 @@ def file_external_in_sibling(sbox): sbox.simple_update() os.chdir(sbox.ospath("A")) - svntest.actions.run_and_verify_svn(None, - svntest.actions.expected_noop_update_output(2), + svntest.actions.run_and_verify_svn(svntest.actions.expected_noop_update_output(2), [], 'update') @Issue(3823) @@ -1894,7 +1875,7 @@ def exclude_externals(sbox): repo_url = sbox.repo_url # Checkout two working copies. - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'checkout', repo_url, wc_dir) @@ -1902,8 +1883,7 @@ def exclude_externals(sbox): # or register the file external as excluded (preferred behavior) svntest.actions.run_and_verify_update(sbox.ospath('A/B/gamma'), None, None, None, - '.*Cannot exclude.*', - None, None, None, None, False, + '.*Cannot exclude.*', False, '--set-depth', 'exclude', sbox.ospath('A/B/gamma')) @@ -1911,8 +1891,7 @@ def exclude_externals(sbox): # or register the directory external as excluded (preferred behavior) svntest.actions.run_and_verify_update(sbox.ospath('A/C/exdir_G'), None, None, None, - '.*Cannot exclude.*', - None, None, None, None, False, + '.*Cannot exclude.*', False, '--set-depth', 'exclude', sbox.ospath('A/C/exdir_G')) @@ -1969,8 +1948,8 @@ def exclude_externals(sbox): 'A/D/x/y/z/blah/F' : Item(status=' ', wc_rev='5'), }) svntest.actions.run_and_verify_update(wc_dir, - None, None, expected_status, None, - None, None, None, None, False, + None, None, expected_status, + [], False, '--set-depth', 'infinity', wc_dir) def file_externals_different_url(sbox): @@ -2013,7 +1992,7 @@ def file_externals_different_url(sbox): svntest.actions.run_and_verify_update(wc_dir, expected_output, None, - expected_status, None) + expected_status) # Verify that all file external URLs are descendants of r1_url for e in ['r1-e-1', 'r1-e-2', 'r2-e-1', 'r2-e-2', 'rr-e-1']: @@ -2021,7 +2000,7 @@ def file_externals_different_url(sbox): os.path.join(sbox.wc_dir, e)) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'relocate', r1_url, r2_url, wc_dir) @@ -2031,7 +2010,7 @@ def file_externals_different_url(sbox): svntest.actions.run_and_verify_update(wc_dir, expected_output, None, - expected_status, None) + expected_status) # Verify that all file external URLs are descendants of r2_url for e in ['r1-e-1', 'r1-e-2', 'r2-e-1', 'r2-e-2', 'rr-e-1']: @@ -2053,7 +2032,7 @@ def file_external_in_unversioned(sbox): svntest.actions.run_and_verify_update(wc_dir, expected_output, None, None) # At one point this failed with SVN_DEBUG wcng consistency checks enabled - svntest.actions.run_and_verify_svn(None, None, [], 'cleanup', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'cleanup', wc_dir) from svntest import verify, actions, main @@ -2076,12 +2055,12 @@ def copy_file_externals(sbox): # svn mkdir X expected_stdout = ['A ' + X + '\n'] - actions.run_and_verify_svn2('OUTPUT', expected_stdout, [], 0, 'mkdir', X) + actions.run_and_verify_svn2(expected_stdout, [], 0, 'mkdir', X) # svn ps svn:externals "^/iota xiota" X expected_stdout = ["property 'svn:externals' set on '" + X + "'\n"] - actions.run_and_verify_svn2('OUTPUT', expected_stdout, [], 0, 'ps', + actions.run_and_verify_svn2(expected_stdout, [], 0, 'ps', 'svn:externals', ''' ^/iota xiota ^/A/mu xmu @@ -2116,8 +2095,7 @@ def copy_file_externals(sbox): 'X' : Item(status=' ', wc_rev='2'), }) - actions.run_and_verify_commit(wc_dir, expected_output, expected_status, - None, wc_dir) + actions.run_and_verify_commit(wc_dir, expected_output, expected_status) # svn up expected_output = svntest.wc.State(wc_dir, { @@ -2139,7 +2117,7 @@ def copy_file_externals(sbox): expected_status.tweak(wc_rev='2') actions.run_and_verify_update(wc_dir, expected_output, expected_disk, - expected_status, None, None, None, None, None, False, wc_dir) + expected_status) # have a commit on one of the files # echo mod >> X/xmu @@ -2153,7 +2131,7 @@ def copy_file_externals(sbox): expected_status.tweak('X/xmu', wc_rev='3') actions.run_and_verify_commit(wc_dir, expected_output, expected_status, - None, X_xmu) + [], X_xmu) # svn up expected_output = svntest.wc.State(wc_dir, { @@ -2166,13 +2144,13 @@ def copy_file_externals(sbox): expected_status.tweak(wc_rev='3') actions.run_and_verify_update(wc_dir, expected_output, expected_disk, - expected_status, None, None, None, None, None, False, wc_dir) + expected_status) # now perform the WC->WC copy # svn cp X X_copy expected_stdout = ['A ' + X_copy + '\n'] - actions.run_and_verify_svn2('OUTPUT', expected_stdout, [], 0, 'cp', X, + actions.run_and_verify_svn2(expected_stdout, [], 0, 'cp', X, X_copy) # svn ci @@ -2184,8 +2162,7 @@ def copy_file_externals(sbox): 'X_copy' : Item(status=' ', wc_rev='4'), }) - actions.run_and_verify_commit(wc_dir, expected_output, expected_status, - None, wc_dir) + actions.run_and_verify_commit(wc_dir, expected_output, expected_status) # verify disk state, also verifying props expected_disk.add({ @@ -2214,7 +2191,7 @@ def copy_file_externals(sbox): expected_status.tweak(wc_rev='4') actions.run_and_verify_update(wc_dir, expected_output, expected_disk, - expected_status, None, None, None, None, None, True, wc_dir) + expected_status, check_props=True) def commit_include_externals(sbox): "commit --include-externals" @@ -2259,7 +2236,7 @@ def commit_include_externals(sbox): 'A ' + Z_zeta + '\n', ]) - actions.run_and_verify_svn2('OUTPUT', expected_stdout, [], 0, 'add', Z) + actions.run_and_verify_svn2(expected_stdout, [], 0, 'add', Z) # svn mkdir --parents Xpegged X/Y expected_stdout = verify.UnorderedOutput([ @@ -2268,7 +2245,7 @@ def commit_include_externals(sbox): 'A ' + X_Y + '\n', ]) - actions.run_and_verify_svn2('OUTPUT', expected_stdout, [], 0, 'mkdir', + actions.run_and_verify_svn2(expected_stdout, [], 0, 'mkdir', '--parents', Xpegged, X_Y) # svn ci @@ -2289,8 +2266,7 @@ def commit_include_externals(sbox): 'Xpegged' : Item(status=' ', wc_rev='2'), }) - actions.run_and_verify_commit(wc_dir, expected_output, expected_status, - None, wc_dir) + actions.run_and_verify_commit(wc_dir, expected_output, expected_status) # svn up expected_output = svntest.wc.State(wc_dir, {}) @@ -2307,18 +2283,18 @@ def commit_include_externals(sbox): expected_status.tweak(wc_rev='2') actions.run_and_verify_update(wc_dir, expected_output, expected_disk, - expected_status, None, None, None, None, None, False, wc_dir) + expected_status) # svn ps svn:externals "^/Z xZ" A/D/H expected_stdout = ["property 'svn:externals' set on '" + A_D_H + "'\n"] - actions.run_and_verify_svn2('OUTPUT', expected_stdout, [], 0, 'ps', + actions.run_and_verify_svn2(expected_stdout, [], 0, 'ps', 'svn:externals', '^/Z xZ', A_D_H) # svn ps svn:externals "^/iota@1 Xpegged/xiota" wc_dir expected_stdout = ["property 'svn:externals' set on '" + wc_dir + "'\n"] - actions.run_and_verify_svn2('OUTPUT', expected_stdout, [], 0, 'ps', + actions.run_and_verify_svn2(expected_stdout, [], 0, 'ps', 'svn:externals', ''' ^/iota@1 Xpegged/xiota @@ -2394,8 +2370,7 @@ def commit_include_externals(sbox): expected_status.tweak('', 'A/D/H', wc_rev='3') - actions.run_and_verify_commit(wc_dir, expected_output, expected_status, - None, wc_dir) + actions.run_and_verify_commit(wc_dir, expected_output, expected_status) # svn up expected_output = svntest.wc.State(wc_dir, { @@ -2464,7 +2439,7 @@ def commit_include_externals(sbox): expected_status.tweak('Xpegged/xiota', wc_rev='1') actions.run_and_verify_update(wc_dir, expected_output, expected_disk, - expected_status, None, None, None, None, None, False, wc_dir) + expected_status) # echo mod >> Xpegged/xE/alpha main.file_append(Xpegged_xE_alpha, 'mod\n') @@ -2495,22 +2470,21 @@ def commit_include_externals(sbox): # svn ci expected_output = svntest.wc.State(wc_dir, {}) - actions.run_and_verify_commit(wc_dir, expected_output, expected_status, - None, wc_dir) + actions.run_and_verify_commit(wc_dir, expected_output, expected_status) # Expect no externals to be committed, because pegged # svn ci --include-externals Xpegged expected_output = svntest.wc.State(wc_dir, {}) actions.run_and_verify_commit(wc_dir, expected_output, expected_status, - None, '--include-externals', Xpegged) + [], '--include-externals', Xpegged) # Expect no externals to be committed, because of depth # svn ci --depth=immediates --include-externals expected_output = svntest.wc.State(wc_dir, {}) actions.run_and_verify_commit(wc_dir, expected_output, expected_status, - None, '--depth=immediates', '--include-externals', wc_dir) + [], '--depth=immediates', '--include-externals', wc_dir) # Expect only unpegged externals to be committed (those in X/) # svn ci --include-externals @@ -2529,7 +2503,7 @@ def commit_include_externals(sbox): expected_status.tweak('Xpegged/xE/alpha', status='M ') actions.run_and_verify_commit(wc_dir, expected_output, expected_status, - None, '--include-externals', wc_dir) + [], '--include-externals', wc_dir) # svn up expected_output = svntest.wc.State(wc_dir, { @@ -2561,7 +2535,7 @@ def commit_include_externals(sbox): 'Xpegged/xE/beta', wc_rev=1) actions.run_and_verify_update(wc_dir, expected_output, expected_disk, - expected_status, None, None, None, None, None, False, wc_dir) + expected_status) # new mods to check more cases # echo mod >> X/xmu @@ -2590,7 +2564,7 @@ def commit_include_externals(sbox): expected_output = svntest.wc.State(wc_dir, {}) actions.run_and_verify_commit(wc_dir, expected_output, expected_status, - None, '--include-externals', '--depth=empty', X) + [], '--include-externals', '--depth=empty', X) # Expect only file external xmu to be committed, because of depth # svn ci --include-externals --depth=files X @@ -2604,7 +2578,7 @@ def commit_include_externals(sbox): 'X/Y/xH/xZ/zeta', 'Xpegged/xE/alpha', status='M ') actions.run_and_verify_commit(wc_dir, expected_output, expected_status, - None, '--include-externals', '--depth=files', X) + [], '--include-externals', '--depth=files', X) # svn status actions.run_and_verify_unquiet_status(wc_dir, expected_status) @@ -2633,7 +2607,7 @@ def commit_include_externals(sbox): expected_status.tweak('X/Y/xH/chi', status='M ') actions.run_and_verify_update(wc_dir, expected_output, expected_disk, - expected_status, None, None, None, None, None, False, wc_dir) + expected_status) # echo mod >> X/xG/pi main.file_append(X_xG_pi, 'mod\n') @@ -2654,7 +2628,7 @@ def commit_include_externals(sbox): status='M ') actions.run_and_verify_commit(wc_dir, expected_output, expected_status, - None, X_Y_xlambda, X_xG) + [], X_Y_xlambda, X_xG) # svn status actions.run_and_verify_unquiet_status(wc_dir, expected_status) @@ -2693,7 +2667,7 @@ def include_immediate_dir_externals(sbox): # svn mkdir X expected_stdout = ['A ' + X + '\n'] - actions.run_and_verify_svn2('OUTPUT', expected_stdout, [], 0, 'mkdir', X) + actions.run_and_verify_svn2(expected_stdout, [], 0, 'mkdir', X) # svn ci expected_output = svntest.wc.State(wc_dir, { @@ -2705,8 +2679,7 @@ def include_immediate_dir_externals(sbox): 'X' : Item(status=' ', wc_rev='2'), }) - actions.run_and_verify_commit(wc_dir, expected_output, expected_status, - None, wc_dir) + actions.run_and_verify_commit(wc_dir, expected_output, expected_status) # svn up expected_output = svntest.wc.State(wc_dir, {}) @@ -2719,12 +2692,12 @@ def include_immediate_dir_externals(sbox): expected_status.tweak(wc_rev='2') actions.run_and_verify_update(wc_dir, expected_output, expected_disk, - expected_status, None, None, None, None, None, False, wc_dir) + expected_status) # svn ps svn:externals "^/A/B/E X/XE" wc_dir expected_stdout = ["property 'svn:externals' set on '" + wc_dir + "'\n"] - actions.run_and_verify_svn2('OUTPUT', expected_stdout, [], 0, 'ps', + actions.run_and_verify_svn2(expected_stdout, [], 0, 'ps', 'svn:externals', '^/A/B/E X/XE', wc_dir) # svn ci @@ -2734,8 +2707,7 @@ def include_immediate_dir_externals(sbox): expected_status.tweak('', wc_rev='3') - actions.run_and_verify_commit(wc_dir, expected_output, expected_status, - None, wc_dir) + actions.run_and_verify_commit(wc_dir, expected_output, expected_status) # svn up expected_output = svntest.wc.State(wc_dir, { @@ -2757,7 +2729,7 @@ def include_immediate_dir_externals(sbox): }) actions.run_and_verify_update(wc_dir, expected_output, expected_disk, - expected_status, None, None, None, None, None, False, wc_dir) + expected_status) sbox.simple_propset('some', 'change', 'X/XE') @@ -2790,7 +2762,7 @@ def include_immediate_dir_externals(sbox): # # > actions.run_and_verify_commit(wc_dir, expected_output, expected_status, - None, '--include-externals', '--depth=immediates', X) + [], '--include-externals', '--depth=immediates', X) @Issue(4085) @@ -2828,17 +2800,17 @@ def remap_file_external_with_prop_del(sbox): mu_path = sbox.ospath('A/mu') # Add a property to A/mu - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'ps', 'propname', 'propval', mu_path) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'commit', '-m', 'New property on a file', wc_dir) - svntest.actions.run_and_verify_svn(None, None, [], 'up', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir) # Add a new file external A/external pointing to ^/A/mu externals_prop = "^/A/mu external\n" change_external(A_path, externals_prop) - svntest.actions.run_and_verify_svn(None, None, [], 'up', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir) # Change A/external to point to ^/iota externals_prop = "^/iota external\n" @@ -2847,7 +2819,7 @@ def remap_file_external_with_prop_del(sbox): # Now update to bring the new external down. # This previously segfaulted as described in # http://subversion.tigris.org/issues/show_bug.cgi?id=4093#desc1 - svntest.actions.run_and_verify_svn(None, None, [], 'up', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir) # Test for issue #4053 'svn:externals with explicit rev checks out HEAD' @@ -2884,13 +2856,12 @@ def dir_external_with_dash_r_only(sbox): expected_status = actions.get_virginal_state(wc_dir, 1) expected_status.tweak('A/B/E/alpha', wc_rev='2') - actions.run_and_verify_commit(wc_dir, expected_output, expected_status, - None, wc_dir) + actions.run_and_verify_commit(wc_dir, expected_output, expected_status) # svn ps svn:externals ' -r1 ^/A/B/E E_ext' . expected_stdout = ["property 'svn:externals' set on '" + wc_dir + "'\n"] - actions.run_and_verify_svn2('OUTPUT', expected_stdout, [], 0, 'ps', + actions.run_and_verify_svn2(expected_stdout, [], 0, 'ps', 'svn:externals', ' -r1 ^/A/B/E E_ext', wc_dir) # svn up @@ -2916,7 +2887,7 @@ def dir_external_with_dash_r_only(sbox): }) actions.run_and_verify_update(wc_dir, expected_output, expected_disk, - expected_status, None, None, None, None, None, False, wc_dir) + expected_status) # svn info E_ext/alpha expected_info = { 'Revision': '1' } @@ -2933,12 +2904,12 @@ def url_to_wc_copy_of_externals(sbox): repo_url = sbox.repo_url # Create an external A/C/external pointing to ^/A/D/G. - svntest.actions.run_and_verify_svn(None, None, [], 'ps', + svntest.actions.run_and_verify_svn(None, [], 'ps', 'svn:externals', '^/A/D/G external', sbox.ospath('A/C')) - svntest.actions.run_and_verify_svn(None, None, [], 'ci', '-m', + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', 'create an external', wc_dir) - svntest.actions.run_and_verify_svn(None, None, [], 'up', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir) # Copy ^/A/C to External-WC-to-URL-Copy. # @@ -2987,7 +2958,7 @@ def url_to_wc_copy_of_externals(sbox): "A " + external_root_path + "\n" ]) exit_code, stdout, stderr = svntest.actions.run_and_verify_svn2( - "OUTPUT", expected_stdout, [], 0, 'copy', repo_url + '/A/C', + expected_stdout, [], 0, 'copy', repo_url + '/A/C', sbox.ospath('External-WC-to-URL-Copy')) @Issue(4227) @@ -3015,31 +2986,31 @@ def duplicate_targets(sbox): match_all=False) # svn ps svn:externals "^/A/B/E barf\n^/A/B/E barf" . - actions.run_and_verify_svn2('OUTPUT', [], expected_stderr, 1, 'ps', + actions.run_and_verify_svn2([], expected_stderr, 1, 'ps', 'svn:externals', '^/A/B/E barf\n^/A/B/E barf', wc_dir) # svn ps svn:externals "^/A/B/E barf\n^/A/D/G barf" . - actions.run_and_verify_svn2('OUTPUT', [], expected_stderr, 1, 'ps', + actions.run_and_verify_svn2([], expected_stderr, 1, 'ps', 'svn:externals', '^/A/B/E barf\n^/A/D/G barf', wc_dir) # svn ps svn:externals "^/A/B/E barf/.\n^/A/D/G ./barf" . - actions.run_and_verify_svn2('OUTPUT', [], expected_stderr, 1, 'ps', + actions.run_and_verify_svn2([], expected_stderr, 1, 'ps', 'svn:externals', '^/A/B/E barf/.\n^/A/D/G ./barf', wc_dir) # svn ps svn:externals "^/A/B/E ././barf\n^/A/D/G .//barf" . - actions.run_and_verify_svn2('OUTPUT', [], expected_stderr, 1, 'ps', + actions.run_and_verify_svn2([], expected_stderr, 1, 'ps', 'svn:externals', '^/A/B/E ././barf\n^/A/D/G .//barf', wc_dir) # svn pg svn:externals . - expected_stdout = [] + expected_stderr = '.*W200017: Property.*not found' - actions.run_and_verify_svn2('OUTPUT', expected_stdout, [], 0, 'pg', + actions.run_and_verify_svn2([], expected_stderr, 1, 'pg', 'svn:externals', wc_dir) # svn ps svn:externals "^/A/B/E ok" . expected_stdout = ["property 'svn:externals' set on '" + wc_dir + "'\n"] - actions.run_and_verify_svn2('OUTPUT', expected_stdout, [], 0, 'ps', + actions.run_and_verify_svn2(expected_stdout, [], 0, 'ps', 'svn:externals', '^/A/B/E ok', wc_dir) # svn pg svn:externals . @@ -3048,7 +3019,7 @@ def duplicate_targets(sbox): '\n' ]) - actions.run_and_verify_svn2('OUTPUT', expected_stdout, [], 0, 'pg', + actions.run_and_verify_svn2(expected_stdout, [], 0, 'pg', 'svn:externals', wc_dir) @Issue(4225) @@ -3060,7 +3031,7 @@ def list_include_externals(sbox): wc_dir = sbox.wc_dir repo_url = sbox.repo_url - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'checkout', repo_url, wc_dir) @@ -3083,10 +3054,10 @@ def list_include_externals(sbox): "gamma" + "\n"]) exit_code, stdout, stderr = svntest.actions.run_and_verify_svn2( - "OUTPUT", expected_stdout, [], 0, 'ls', '--include-externals', B_path) + expected_stdout, [], 0, 'ls', '--include-externals', B_path) exit_code, stdout, stderr = svntest.actions.run_and_verify_svn2( - "OUTPUT", expected_stdout, [], 0, 'ls', '--include-externals', B_url) + expected_stdout, [], 0, 'ls', '--include-externals', B_url) expected_stdout = verify.UnorderedOutput([ list_external_string("exdir_G", C_url)+ "\n", @@ -3099,10 +3070,10 @@ def list_include_externals(sbox): "psi" + "\n"]) exit_code, stdout, stderr = svntest.actions.run_and_verify_svn2( - "OUTPUT", expected_stdout, [], 0, 'ls', '--include-externals', C_path) + expected_stdout, [], 0, 'ls', '--include-externals', C_path) exit_code, stdout, stderr = svntest.actions.run_and_verify_svn2( - "OUTPUT", expected_stdout, [], 0, 'ls', '--include-externals', C_url) + expected_stdout, [], 0, 'ls', '--include-externals', C_url) @Issue(4293) def move_with_file_externals(sbox): @@ -3120,7 +3091,7 @@ def move_with_file_externals(sbox): sbox.simple_commit() sbox.simple_update() -@Issue(4185) +@Issue(4185,4529) def pinned_externals(sbox): "pinned external" @@ -3133,13 +3104,14 @@ def pinned_externals(sbox): sbox.simple_mkdir('Z') sbox.simple_commit('') + repo_X_C = repo_url + '/X/C' repo_X_mu = repo_url + '/X/mu' expected_output = verify.RegexOutput( '^ 1 jrandom .* mu$' ) - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'list', repo_X_mu, '-v') # So, we copied A/mu to X/mu in r2, but its last changed revision is @@ -3152,14 +3124,10 @@ def pinned_externals(sbox): 'old-rev -r 1 ' + repo_X_mu + '\n' + repo_X_mu + ' new-plain\n' + '-r1 ' + repo_X_mu + ' new-rev\n' + - repo_X_mu + '@1 new-peg\n', + repo_X_mu + '@1 new-peg\n' + '-r1 ' + repo_X_C + ' new-dir-rev\n', 'Z') - expected_output = svntest.wc.State(wc_dir, { - 'A/D' : Item(status=' U'), - 'A/D/exdir_E/beta' : Item(status='A '), - 'A/D/exdir_E/alpha' : Item(status='A '), - }) expected_error = "svn: E205011: Failure.*externals" expected_disk = svntest.main.greek_state.copy() expected_disk.add({ @@ -3167,6 +3135,7 @@ def pinned_externals(sbox): 'Z/old-plain' : Item(contents="This is the file 'mu'.\n"), 'Z/new-plain' : Item(contents="This is the file 'mu'.\n"), 'Z/new-rev' : Item(contents="This is the file 'mu'.\n"), + 'Z/new-dir-rev' : Item(), # And verifying X 'X/D/H/psi' : Item(contents="This is the file 'psi'.\n"), @@ -3212,7 +3181,7 @@ def update_dir_external_shallow(sbox): }) svntest.actions.run_and_verify_update(sbox.wc_dir, expected_output, None, None, - None, None, None, None, None, False, + [], False, '--set-depth=empty', sbox.ospath('A/B/E')) @@ -3226,7 +3195,7 @@ def update_dir_external_shallow(sbox): }) svntest.actions.run_and_verify_update(sbox.wc_dir, expected_output, None, None, - None, None, None, None, None, False, + [], False, '--set-depth=infinity', sbox.ospath('A/B/E')) @@ -3249,7 +3218,7 @@ def switch_parent_relative_file_external(sbox): # Check out A/B_copy to a new working copy branch_wc = sbox.add_wc_path("branch") branch_url = sbox.repo_url + '/A_copy' - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'checkout', branch_url, branch_wc) @@ -3259,14 +3228,161 @@ def switch_parent_relative_file_external(sbox): # Switch the branch working copy to the new branch URL new_branch_url = sbox.repo_url + '/A_copy2' - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'switch', new_branch_url, branch_wc) # Bug: The branch working copy can no longer be updated. - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'update', branch_wc) +@Issue(4420) +def file_external_unversioned_obstruction(sbox): + """file externals unversioned obstruction""" + + sbox.build() + wc_dir = sbox.wc_dir + + expected_output = verify.RegexOutput('r2 committed .*') + svntest.actions.run_and_verify_svnmucc(expected_output, [], + '-U', sbox.repo_url, '-m', 'r2: set external', + 'propset', 'svn:externals', '^/A/mu mu-ext', 'A') + + sbox.simple_append('A/mu-ext', 'unversioned obstruction') + + # Update reports a tree-conflict but status doesn't show any such + # conflict. I'm no sure whether this is correct. + expected_output = svntest.wc.State(wc_dir, { + 'A' : Item(status=' U'), + 'A/mu-ext' : Item(status=' ', treeconflict='A'), + }) + expected_disk = svntest.main.greek_state.copy() + expected_disk.add({ + 'A/mu-ext' : Item('unversioned obstruction'), + }) + expected_status = svntest.actions.get_virginal_state(wc_dir, 2) + expected_status.add({ + 'A/mu-ext' : Item(status='M ', wc_rev='2', switched='X'), + }) + svntest.actions.run_and_verify_update(wc_dir, + expected_output, expected_disk, + expected_status) + +@Issue(4001) +@XFail() +def file_external_versioned_obstruction(sbox): + """file externals versioned obstruction""" + + sbox.build() + wc_dir = sbox.wc_dir + + expected_output = verify.RegexOutput('r2 committed .*') + svntest.actions.run_and_verify_svnmucc(expected_output, [], + '-U', sbox.repo_url, '-m', 'r2: set external', + 'propset', 'svn:externals', '^/A/mu mu-ext', 'A') + + expected_output = svntest.wc.State(wc_dir, { + 'A' : Item(status=' U'), + 'A/mu-ext' : Item(status='A '), + }) + expected_disk = svntest.main.greek_state.copy() + expected_disk.add({ + 'A/mu-ext' : Item('This is the file \'mu\'.\n'), + }) + expected_status = svntest.actions.get_virginal_state(wc_dir, 2) + expected_status.add({ + 'A/mu-ext' : Item(status=' ', wc_rev='2', switched='X'), + }) + svntest.actions.run_and_verify_update(wc_dir, + expected_output, expected_disk, + expected_status) + + # Update skips adding the versioned node because of the file + # external obstruction then when the external is deleted the + # versioned node is missing from disk and wc.db. Not really sure + # what should happen, perhaps a not-present node? + expected_output = verify.RegexOutput('r3 committed .*') + svntest.actions.run_and_verify_svnmucc(expected_output, [], + '-U', sbox.repo_url, '-m', 'r3: copy file', + 'cp', 'head', 'A/mu', 'A/mu-ext', + 'propdel', 'svn:externals', 'A') + + expected_output = svntest.wc.State(wc_dir, { + 'A' : Item(status=' U'), + 'A/mu-ext' : Item(verb='Removed external', prev_verb='Skipped'), + }) + expected_disk.tweak('A/mu-ext', content='This is the file \'mu\'.\n') + expected_status.tweak(wc_rev=3) + expected_status.tweak('A/mu-ext', switched=None) + svntest.actions.run_and_verify_update(wc_dir, + expected_output, expected_disk, + expected_status) + +@Issue(4495) +def update_external_peg_rev(sbox): + "update external peg rev" + + sbox.build() + wc_dir = sbox.wc_dir + + sbox.simple_rm('A/B/E/alpha') + sbox.simple_commit() + sbox.simple_update() + + sbox.simple_propset('svn:externals', '^/A/B/E@1 xE', 'A/B/F') + sbox.simple_commit() + + expected_output = svntest.wc.State(wc_dir, { + 'A/B/F/xE/alpha' : Item(status='A '), + 'A/B/F/xE/beta' : Item(status='A '), + }) + expected_disk = svntest.main.greek_state.copy() + expected_disk.remove('A/B/E/alpha') + expected_disk.add({ + 'A/B/F/xE' : Item(), + 'A/B/F/xE/alpha' : Item('This is the file \'alpha\'.\n'), + 'A/B/F/xE/beta' : Item('This is the file \'beta\'.\n'), + }) + expected_status = svntest.actions.get_virginal_state(wc_dir, 3) + expected_status.remove('A/B/E/alpha') + expected_status.add({ + 'A/B/F/xE' : Item(status=' ', wc_rev='1', prev_status='X '), + 'A/B/F/xE/alpha' : Item(status=' ', wc_rev='1'), + 'A/B/F/xE/beta' : Item(status=' ', wc_rev='1'), + }) + svntest.actions.run_and_verify_update(wc_dir, + expected_output, + expected_disk, + expected_status) + + sbox.simple_propset('svn:externals', '^/A/B/E@2 xE', 'A/B/F') + sbox.simple_commit() + + expected_output = svntest.wc.State(wc_dir, { + 'A/B/F/xE/alpha' : Item(status='D '), + }) + expected_disk.remove('A/B/F/xE/alpha') + expected_status.remove('A/B/F/xE/alpha') + expected_status.tweak(wc_rev=4) + expected_status.tweak('A/B/F/xE', 'A/B/F/xE/beta', wc_rev=2) + svntest.actions.run_and_verify_update(wc_dir, + expected_output, + expected_disk, + expected_status) + + # XFAIL: EXTERNALS.def_revision and EXTERNALS.def_operational_revision + # are still r1 for 'A/B/F/xE' so status is not against the expected r2. + # No testsuite support for ood marker so examine status output manually. + expected_output = [ + "X %s\n" % sbox.ospath('A/B/F/xE'), + "Status against revision: 4\n", + "\n", + "Performing status on external item at '%s':\n" % sbox.ospath('A/B/F/xE'), + "Status against revision: 2\n", + ] + svntest.actions.run_and_verify_svn(expected_output, [], + 'status', '-u', sbox.wc_dir) + def update_deletes_file_external(sbox): "update deletes a file external" @@ -3278,18 +3394,17 @@ def update_deletes_file_external(sbox): sbox.simple_update() # Create a branch - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'copy', '-m', 'create branch', sbox.repo_url + '/A', sbox.repo_url + '/A_copy') # Update the working copy - sbox.simple_commit() sbox.simple_update() # Remove the branch - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'rm', '-m', 'remove branch', sbox.repo_url + '/A_copy') @@ -3297,7 +3412,7 @@ def update_deletes_file_external(sbox): # As of r1448345, this update fails: # E000002: Can't remove directory '.../A_copy/C': No such file or directory sbox.simple_update() - + @Issue(4519) def switch_relative_externals(sbox): @@ -3305,7 +3420,7 @@ def switch_relative_externals(sbox): sbox.build(create_wc=False) - svntest.actions.run_and_verify_svnmucc(None, None, [], + svntest.actions.run_and_verify_svnmucc(None, [], '-U', sbox.repo_url, '-m', 'Q', 'mkdir', 'branches', 'cp', '1', 'A', 'trunk', @@ -3319,21 +3434,926 @@ def switch_relative_externals(sbox): wc = sbox.add_wc_path('wc') - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'co', sbox.repo_url + '/trunk', wc) # This forgets to update some externals data - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'switch', sbox.repo_url + '/branches/A', wc) # This upgrade makes the following update fail - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'upgrade', wc) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'up', wc) +def copy_file_external_to_repo(sbox): + "explicitly copy file external to repo" + + sbox.build() + wc_dir = sbox.wc_dir + + change_external(sbox.ospath('A'), '^/A/mu ext') + sbox.simple_update() + + svntest.actions.run_and_verify_svn(None, [], 'cp', + '--message', 'external copy', + sbox.ospath('A/ext'), + sbox.repo_url + '/ext_copy') + + expected_output = svntest.wc.State(wc_dir, { + 'ext_copy' : Item(status='A '), + }) + expected_disk = svntest.main.greek_state.copy() + expected_disk.add({ + 'A/ext' : Item('This is the file \'mu\'.\n'), + 'ext_copy' : Item('This is the file \'mu\'.\n'), + }) + svntest.actions.run_and_verify_update(wc_dir, + expected_output, expected_disk, None) + +@Issue(4550) +def replace_tree_with_foreign_external(sbox): + "replace tree with foreign external" + + sbox.build() + wc_dir = sbox.wc_dir + repo_dir = sbox.repo_dir + + other_repo_dir, other_repo_url = sbox.add_repo_path('other') + svntest.main.copy_repos(repo_dir, other_repo_dir, 1) + + sbox.simple_propset('svn:externals', other_repo_url + '/A/B X', 'A') + sbox.simple_commit() + sbox.simple_propdel('svn:externals', 'A') + sbox.simple_mkdir('A/X') + sbox.simple_mkdir('A/X/E') + sbox.simple_commit() + sbox.simple_update() + + expected_output = svntest.wc.State(wc_dir, { + 'A/X' : Item(status='D '), + 'A' : Item(status=' U'), + 'A/X/lambda' : Item(status='A '), + 'A/X/E' : Item(status='A '), + 'A/X/E/alpha' : Item(status='A '), + 'A/X/E/beta' : Item(status='A '), + 'A/X/F' : Item(status='A '), + }) + expected_status = svntest.actions.get_virginal_state(wc_dir, 2) + expected_status.add({ + 'A/X' : Item(status=' ', wc_rev=1, prev_status='X '), + 'A/X/E' : Item(status=' ', wc_rev=1, prev_status=' '), + 'A/X/E/alpha' : Item(status=' ', wc_rev=1), + 'A/X/E/beta' : Item(status=' ', wc_rev=1), + 'A/X/F' : Item(status=' ', wc_rev=1), + 'A/X/lambda' : Item(status=' ', wc_rev=1), + }) + svntest.actions.run_and_verify_update(wc_dir, + expected_output, None, expected_status, + [], True, + '-r', '2', wc_dir) + + +def verify_pinned_externals(sbox, external_url_for, base_path_or_url, + external_youngest_rev, other_external_youngest_rev): + "helper for pin-externals tests" + + expected_output = [ + '%s@%d gamma\n' % (external_url_for["A/B/gamma"], + external_youngest_rev), + '\n', + ] + if svntest.sandbox.is_url(base_path_or_url): + target = base_path_or_url + '/A_copy/B' + else: + target = sbox.ospath('A_copy/B') + svntest.actions.run_and_verify_svn(expected_output, [], + 'propget', 'svn:externals', + target) + expected_output = [ + 'exdir_G -r%d %s\n' % (other_external_youngest_rev, + external_url_for["A/C/exdir_G"]), + '%s exdir_H\n' % external_url_for["A/C/exdir_H"], + '\n', + ] + if svntest.sandbox.is_url(base_path_or_url): + target = base_path_or_url + '/A_copy/C' + else: + target = sbox.ospath('A_copy/C') + svntest.actions.run_and_verify_svn(expected_output, [], + 'propget', 'svn:externals', + target) + expected_output = [ + '%s@%d exdir_A\n' % (external_url_for["A/D/exdir_A"], + other_external_youngest_rev), + '%s@%d exdir_A/G\n' % (external_url_for["A/D/exdir_A/G/"], + other_external_youngest_rev), + 'exdir_A/H -r1 %s\n' % external_url_for["A/D/exdir_A/H"], + '%s@%d x/y/z/blah\n' % (external_url_for["A/D/x/y/z/blah"], + other_external_youngest_rev), + '\n', + ] + if svntest.sandbox.is_url(base_path_or_url): + target = base_path_or_url + '/A_copy/D' + else: + target = sbox.ospath('A_copy/D') + svntest.actions.run_and_verify_svn(expected_output, [], + 'propget', 'svn:externals', + target) + + +def copy_pin_externals_repos_repos(sbox): + "svn copy --pin-externals repos->repos" + + external_url_for = externals_test_setup(sbox) + + repo_url = sbox.repo_url + repo_dir = sbox.repo_dir + other_repo_dir = repo_dir + ".other" + + external_youngest_rev = svntest.main.youngest(repo_dir) + other_external_youngest_rev = svntest.main.youngest(other_repo_dir) + + # Perform a repos->repos copy, pinning externals + svntest.actions.run_and_verify_svn(None, [], + 'copy', + repo_url + '/A', + repo_url + '/A_copy', + '-m', 'copy', + '--pin-externals') + verify_pinned_externals(sbox, external_url_for, repo_url, + external_youngest_rev, other_external_youngest_rev) + + +def copy_pin_externals_repos_wc(sbox): + "svn copy --pin-externals repos->wc" + + external_url_for = externals_test_setup(sbox) + + wc_dir = sbox.wc_dir + repo_url = sbox.repo_url + repo_dir = sbox.repo_dir + other_repo_dir = repo_dir + ".other" + + external_youngest_rev = svntest.main.youngest(repo_dir) + other_external_youngest_rev = svntest.main.youngest(other_repo_dir) + + # Create a working copy. + svntest.actions.run_and_verify_svn(None, [], + 'checkout', + repo_url, wc_dir) + + # Perform a repos->wc copy, pinning externals + external_youngest_rev = svntest.main.youngest(repo_dir) + svntest.actions.run_and_verify_svn(None, [], + 'copy', + repo_url + '/A', + os.path.join(wc_dir, 'A_copy'), + '--pin-externals') + verify_pinned_externals(sbox, external_url_for, wc_dir, + external_youngest_rev, other_external_youngest_rev) + + +def copy_pin_externals_wc_repos(sbox): + "svn copy --pin-externals wc->repos" + + external_url_for = externals_test_setup(sbox) + + wc_dir = sbox.wc_dir + repo_url = sbox.repo_url + repo_dir = sbox.repo_dir + other_repo_dir = repo_dir + ".other" + + external_youngest_rev = svntest.main.youngest(repo_dir) + other_external_youngest_rev = svntest.main.youngest(other_repo_dir) + + # Create a working copy. + svntest.actions.run_and_verify_svn(None, [], + 'checkout', + repo_url, wc_dir) + + # Perform a wc->repos copy, pinning externals + svntest.actions.run_and_verify_svn(None, [], + 'copy', + os.path.join(wc_dir, 'A'), + repo_url + '/A_copy', + '-m', 'copy', + '--pin-externals') + verify_pinned_externals(sbox, external_url_for, repo_url, + external_youngest_rev, other_external_youngest_rev) + + +def copy_pin_externals_wc_wc(sbox): + "svn copy --pin-externals wc->wc" + + external_url_for = externals_test_setup(sbox) + + wc_dir = sbox.wc_dir + repo_url = sbox.repo_url + repo_dir = sbox.repo_dir + other_repo_dir = repo_dir + ".other" + + external_youngest_rev = svntest.main.youngest(repo_dir) + other_external_youngest_rev = svntest.main.youngest(other_repo_dir) + + # Create a working copy. + svntest.actions.run_and_verify_svn(None, [], + 'checkout', + repo_url, wc_dir) + + # Perform a wc->wc copy, pinning externals + svntest.actions.run_and_verify_svn(None, [], + 'copy', + os.path.join(wc_dir, 'A'), + os.path.join(wc_dir, 'A_copy'), + '--pin-externals') + verify_pinned_externals(sbox, external_url_for, wc_dir, + external_youngest_rev, other_external_youngest_rev) + + +def copy_pin_externals_moved_external(sbox): + "pin externals which were moved since last changed" + + external_url_for = externals_test_setup(sbox) + + wc_dir = sbox.wc_dir + repo_url = sbox.repo_url + repo_dir = sbox.repo_dir + other_repo_dir = repo_dir + ".other" + + external_youngest_rev = svntest.main.youngest(repo_dir) + other_external_youngest_rev = svntest.main.youngest(other_repo_dir) + + # Create a working copy. + svntest.actions.run_and_verify_svn(None, [], + 'checkout', + repo_url, wc_dir) + + # Test behaviour for external URLs which were moved since + # their last-changed revision. + sbox.simple_move('A/D/gamma', 'A/D/gamma-moved') + sbox.simple_commit() + change_external(sbox.ospath('A/B'), '^/A/D/gamma-moved gamma', commit=True) + sbox.simple_update() + external_youngest_rev = svntest.main.youngest(repo_dir) + svntest.actions.run_and_verify_svn(None, [], + 'copy', + os.path.join(wc_dir, 'A'), + os.path.join(wc_dir, 'A_copy'), + '--pin-externals') + external_url_for["A/B/gamma"] = '^/A/D/gamma-moved' + verify_pinned_externals(sbox, external_url_for, wc_dir, + external_youngest_rev, other_external_youngest_rev) + + +def copy_pin_externals_removed_in_head(sbox): + "already pinned external which was removed in HEAD" + + external_url_for = externals_test_setup(sbox) + + wc_dir = sbox.wc_dir + repo_url = sbox.repo_url + repo_dir = sbox.repo_dir + other_repo_url = repo_url + ".other" + other_repo_dir = repo_dir + ".other" + + # Create a working copy. + svntest.actions.run_and_verify_svn(None, [], + 'checkout', + repo_url, wc_dir) + + # Test an already pinned external which was removed in HEAD. + svntest.actions.run_and_verify_svn(None, [], + 'rm', + other_repo_url + '/A/D/H', + '-m', 'remove A/D/H') + sbox.simple_update() + external_youngest_rev = svntest.main.youngest(repo_dir) + other_external_youngest_rev = svntest.main.youngest(other_repo_dir) + svntest.actions.run_and_verify_svn(None, [], + 'copy', + os.path.join(wc_dir, 'A'), + os.path.join(wc_dir, 'A_copy'), + '--pin-externals') + verify_pinned_externals(sbox, external_url_for, wc_dir, + external_youngest_rev, other_external_youngest_rev) + + +def copy_pin_externals_from_old_rev(sbox): + "copy from an old revision with pinning" + + external_url_for = externals_test_setup(sbox) + + wc_dir = sbox.wc_dir + repo_url = sbox.repo_url + repo_dir = sbox.repo_dir + other_repo_url = repo_url + ".other" + other_repo_dir = repo_dir + ".other" + + # Create a working copy. + svntest.actions.run_and_verify_svn(None, [], + 'checkout', + repo_url, wc_dir) + # Create a couple of revisions affecting 'A'. + for i in range(5): + svntest.main.file_append(sbox.ospath('A/mu'), 'a new line') + sbox.simple_commit() + sbox.simple_update() + + # Test a copy from an old revision with pinning. + external_youngest_rev = svntest.main.youngest(repo_dir) + other_external_youngest_rev = svntest.main.youngest(other_repo_dir) + svntest.actions.run_and_verify_svn(None, [], + 'copy', + os.path.join(wc_dir, 'A@6'), + os.path.join(wc_dir, 'A_copy'), + '--pin-externals') + external_url_for["A/B/gamma"] = '^/A/D/gamma' + verify_pinned_externals(sbox, external_url_for, wc_dir, + external_youngest_rev, other_external_youngest_rev) + + +def copy_pin_externals_wc_local_mods(sbox): + "cannot pin WC externals with local mods" + + external_url_for = externals_test_setup(sbox) + + wc_dir = sbox.wc_dir + repo_url = sbox.repo_url + + # Create a working copy. + svntest.actions.run_and_verify_svn(None, [], + 'checkout', + repo_url, wc_dir) + + svntest.main.file_append(sbox.ospath('A/C/exdir_G/pi'), 'this file changed') + expected_stderr = verify.RegexOutput(".*Cannot pin.*local modifications.*", + match_all=False) + svntest.actions.run_and_verify_svn(None, expected_stderr, + 'copy', + os.path.join(wc_dir, 'A'), + os.path.join(wc_dir, 'A_copy'), + '--pin-externals') + + +def copy_pin_externals_wc_switched_subtrees(sbox): + "cannot pin WC externals with switched subtrees" + + external_url_for = externals_test_setup(sbox) + + wc_dir = sbox.wc_dir + repo_url = sbox.repo_url + + # Create a working copy. + svntest.actions.run_and_verify_svn(None, [], + 'checkout', + repo_url, wc_dir) + + svntest.actions.run_and_verify_svn(None, [], + 'switch', '--ignore-ancestry', '^/A/B', + sbox.ospath('A/D/exdir_A/C')) + expected_stderr = verify.RegexOutput(".*Cannot pin.*switched subtree.*", + match_all=False) + svntest.actions.run_and_verify_svn(None, expected_stderr, + 'copy', + os.path.join(wc_dir, 'A'), + os.path.join(wc_dir, 'A_copy'), + '--pin-externals') + + +def copy_pin_externals_wc_mixed_revisions(sbox): + "cannot pin WC externals with mixed revisions" + + external_url_for = externals_test_setup(sbox) + + wc_dir = sbox.wc_dir + repo_url = sbox.repo_url + + # Create a working copy. + svntest.actions.run_and_verify_svn(None, [], + 'checkout', + repo_url, wc_dir) + + svntest.actions.run_and_verify_svn(None, [], + 'update', '-r1', + sbox.ospath('A/D/exdir_A/mu')) + expected_stderr = verify.RegexOutput(".*Cannot pin.*mixed-revision.*", + match_all=False) + svntest.actions.run_and_verify_svn(None, expected_stderr, + 'copy', + os.path.join(wc_dir, 'A'), + os.path.join(wc_dir, 'A_copy'), + '--pin-externals') + +@Issue(4558) +def copy_pin_externals_whitepace_dir(sbox): + "copy --pin-externals with whitepace dir" + + sbox.build(empty=True) + repo_url = sbox.repo_url + wc_dir = sbox.wc_dir + ss_path = repo_url[repo_url.find('//'):] + + extdef = sbox.get_tempname('extdef') + info = sbox.get_tempname('info') + + open(extdef, 'w').write( + '"' + ss_path +'/deps/sqlite" ext/sqlite\n' + + '"^/deps/A P R" \'ext/A P R\'\n' + + '^/deps/B\ D\ B\' ext/B\ D\ B\'\n' + + repo_url + '/deps/wors%23+t ext/wors#+t') + open(info, 'w').write('info\n') + + svntest.actions.run_and_verify_svnmucc(None, [], '-U', repo_url, + 'mkdir', 'trunk', + 'mkdir', 'branches', + 'mkdir', 'deps', + 'mkdir', 'deps/sqlite', + 'put', info, 'deps/sqlite/readme', + 'mkdir', 'deps/A P R', + 'put', info, 'deps/A P R/about', + 'mkdir', 'deps/B D B\'', + 'put', info, 'deps/B D B\'/copying', + 'mkdir', 'deps/wors#+t', + 'put', info, 'deps/wors#+t/brood', + 'propsetf', 'svn:externals', extdef, + 'trunk', + '-mm' + ) + + svntest.actions.run_and_verify_svn(None, [], 'update', sbox.ospath('trunk'), + '--ignore-externals') + sbox.simple_update('branches') + + expected_status = svntest.wc.State(wc_dir, { + '' : Item(status=' ', wc_rev='0'), + 'trunk' : Item(status=' ', wc_rev='1'), + 'branches' : Item(status=' ', wc_rev='1'), + }) + + svntest.actions.run_and_verify_status(wc_dir, expected_status) + + trunk_url = repo_url + '/trunk' + branches_url = repo_url + '/branches' + trunk_wc = sbox.ospath('trunk') + + # Create a new revision to creat interesting pinning revisions + sbox.simple_propset('A', 'B', 'trunk') + sbox.simple_commit('trunk') + + # And let's copy/pin + svntest.actions.run_and_verify_svn(None, [], + 'copy', '--pin-externals', + trunk_url, branches_url + '/url-url', '-mm') + + svntest.actions.run_and_verify_svn(None, [], + 'copy', '--pin-externals', + trunk_url, sbox.ospath('branches/url-wc')) + sbox.simple_commit('branches/url-wc') + + # Now try to copy without externals in the WC + expected_err = '.*E155035: Cannot pin external.*' + svntest.actions.run_and_verify_svn(None, expected_err, + 'copy', '--pin-externals', + trunk_wc, branches_url + '/wc-url', '-mm') + + svntest.actions.run_and_verify_svn(None, expected_err, + 'copy', '--pin-externals', + trunk_wc, sbox.ospath('branches/wc-wc')) + + # Bring in the externals on trunk + svntest.actions.run_and_verify_svn(None, [], 'update', sbox.ospath('trunk')) + expected_status = svntest.wc.State(wc_dir, { + 'trunk' : Item(status=' ', wc_rev='4'), + 'trunk/ext' : Item(status='X '), + 'trunk/ext/sqlite' : Item(status=' ', wc_rev='4'), + 'trunk/ext/sqlite/readme' : Item(status=' ', wc_rev='4'), + 'trunk/ext/A P R' : Item(status=' ', wc_rev='4'), + 'trunk/ext/A P R/about' : Item(status=' ', wc_rev='4'), + 'trunk/ext/B D B\'' : Item(status=' ', wc_rev='4'), + 'trunk/ext/B D B\'/copying' : Item(status=' ', wc_rev='4'), + 'trunk/ext/wors#+t' : Item(status=' ', wc_rev='4'), + 'trunk/ext/wors#+t/brood' : Item(status=' ', wc_rev='4'), + }) + svntest.actions.run_and_verify_status(sbox.ospath('trunk'), expected_status) + + # And copy again + svntest.actions.run_and_verify_svn(None, [], + 'copy', '--pin-externals', + trunk_wc, branches_url + '/wc-url', '-mm') + + svntest.actions.run_and_verify_svn(None, [], + 'copy', '--pin-externals', + trunk_wc, sbox.ospath('branches/wc-wc')) + sbox.simple_commit('branches/wc-wc') + + + expected_output = svntest.wc.State(wc_dir, { + 'branches/url-url' : Item(status='A '), + 'branches/url-url/ext/A P R/about' : Item(status='A '), + 'branches/url-url/ext/B D B\'/copying' : Item(status='A '), + 'branches/url-url/ext/wors#+t/brood' : Item(status='A '), + 'branches/url-url/ext/sqlite/readme' : Item(status='A '), + + # url-wc is already up to date + + 'branches/wc-url' : Item(status='A '), + 'branches/wc-url/ext/wors#+t/brood' : Item(status='A '), + 'branches/wc-url/ext/sqlite/readme' : Item(status='A '), + 'branches/wc-url/ext/B D B\'/copying' : Item(status='A '), + 'branches/wc-url/ext/A P R/about' : Item(status='A '), + + ## branches/wc-wc should checkout its externals here + }) + expected_status = svntest.wc.State(wc_dir, { + 'branches' : Item(status=' ', wc_rev='6'), + + 'branches/url-url' : Item(status=' ', wc_rev='6'), + 'branches/url-url/ext' : Item(status='X '), + 'branches/url-url/ext/A P R' : Item(status=' ', wc_rev='2'), + 'branches/url-url/ext/A P R/about' : Item(status=' ', wc_rev='2'), + 'branches/url-url/ext/sqlite' : Item(status=' ', wc_rev='2'), + 'branches/url-url/ext/sqlite/readme' : Item(status=' ', wc_rev='2'), + 'branches/url-url/ext/wors#+t' : Item(status=' ', wc_rev='2'), + 'branches/url-url/ext/wors#+t/brood' : Item(status=' ', wc_rev='2'), + 'branches/url-url/ext/B D B\'' : Item(status=' ', wc_rev='2'), + 'branches/url-url/ext/B D B\'/copying' : Item(status=' ', wc_rev='2'), + + 'branches/url-wc' : Item(status=' ', wc_rev='6'), + 'branches/url-wc/ext' : Item(status='X '), + 'branches/url-wc/ext/wors#+t' : Item(status=' ', wc_rev='3'), + 'branches/url-wc/ext/wors#+t/brood' : Item(status=' ', wc_rev='3'), + 'branches/url-wc/ext/B D B\'' : Item(status=' ', wc_rev='3'), + 'branches/url-wc/ext/B D B\'/copying' : Item(status=' ', wc_rev='3'), + 'branches/url-wc/ext/sqlite' : Item(status=' ', wc_rev='3'), + 'branches/url-wc/ext/sqlite/readme' : Item(status=' ', wc_rev='3'), + 'branches/url-wc/ext/A P R' : Item(status=' ', wc_rev='3'), + 'branches/url-wc/ext/A P R/about' : Item(status=' ', wc_rev='3'), + + 'branches/wc-url' : Item(status=' ', wc_rev='6'), + 'branches/wc-url/ext' : Item(status='X '), + 'branches/wc-url/ext/wors#+t' : Item(status=' ', wc_rev='4'), + 'branches/wc-url/ext/wors#+t/brood' : Item(status=' ', wc_rev='4'), + 'branches/wc-url/ext/sqlite' : Item(status=' ', wc_rev='4'), + 'branches/wc-url/ext/sqlite/readme' : Item(status=' ', wc_rev='4'), + 'branches/wc-url/ext/B D B\'' : Item(status=' ', wc_rev='4'), + 'branches/wc-url/ext/B D B\'/copying' : Item(status=' ', wc_rev='4'), + 'branches/wc-url/ext/A P R' : Item(status=' ', wc_rev='4'), + 'branches/wc-url/ext/A P R/about' : Item(status=' ', wc_rev='4'), + + 'branches/wc-wc' : Item(status=' ', wc_rev='6'), + 'branches/wc-wc/ext' : Item(status='X '), + 'branches/wc-wc/ext/wors#+t' : Item(status=' ', wc_rev='4'), + 'branches/wc-wc/ext/wors#+t/brood' : Item(status=' ', wc_rev='4'), + 'branches/wc-wc/ext/sqlite' : Item(status=' ', wc_rev='4'), + 'branches/wc-wc/ext/sqlite/readme' : Item(status=' ', wc_rev='4'), + 'branches/wc-wc/ext/B D B\'' : Item(status=' ', wc_rev='4'), + 'branches/wc-wc/ext/B D B\'/copying' : Item(status=' ', wc_rev='4'), + 'branches/wc-wc/ext/A P R' : Item(status=' ', wc_rev='4'), + 'branches/wc-wc/ext/A P R/about' : Item(status=' ', wc_rev='4'), + }) + svntest.actions.run_and_verify_update(wc_dir + '/branches', expected_output, + None, expected_status) + + # Now let's use our existing setup to perform some copies with dynamic + # destinations + svntest.actions.run_and_verify_svn(None, [], + 'copy', '--parents', '--pin-externals', + repo_url + '/branches/wc-url', + repo_url + '/branches/url-url', + trunk_url, + branches_url + '/3x-url-url', + '-mm') + + svntest.actions.run_and_verify_svn(None, [], + 'copy', '--parents', '--pin-externals', + repo_url + '/branches/wc-url', + repo_url + '/branches/url-url', + trunk_url, + sbox.ospath('branches/3x-url-wc')) + + svntest.actions.run_and_verify_svn(None, [], + 'copy', '--parents', '--pin-externals', + sbox.ospath('branches/wc-url'), + sbox.ospath('branches/url-url'), + sbox.ospath('trunk'), + branches_url + '/3x-wc-url', + '-mm') + + svntest.actions.run_and_verify_svn(None, [], + 'copy', '--parents', '--pin-externals', + sbox.ospath('branches/wc-url'), + sbox.ospath('branches/url-url'), + sbox.ospath('trunk'), + sbox.ospath('branches/3x-wc-wc')) + +def nested_notification(sbox): + "notification for nested externals" + + sbox.build() + wc_dir = sbox.wc_dir + repo_dir = sbox.repo_dir + + sbox.simple_mkdir('D1') + sbox.simple_mkdir('D2') + sbox.simple_mkdir('D3') + sbox.simple_mkdir('D4') + sbox.simple_propset('svn:externals', '^/D2 X', 'D1') + sbox.simple_propset('svn:externals', '^/D3 X', 'D2') + sbox.simple_propset('svn:externals', '^/D4 X', 'D3') + sbox.simple_commit() + expected_output = [ + 'Updating \'' + sbox.ospath('D1') + '\':\n', + '\n', + 'Fetching external item into \'' + sbox.ospath('D1/X') + '\':\n', + ' U ' + sbox.ospath('D1/X') + '\n', + '\n', + 'Fetching external item into \'' + sbox.ospath('D1/X/X') + '\':\n', + ' U ' + sbox.ospath('D1/X/X') + '\n', + '\n', + 'Fetching external item into \'' + sbox.ospath('D1/X/X/X') + '\':\n', + 'Updated external to revision 2.\n', + '\n', + 'External at revision 2.\n', + '\n', + 'External at revision 2.\n', + '\n', + 'At revision 2.\n' + ] + svntest.actions.run_and_verify_svn(expected_output, [], + 'update', sbox.ospath('D1')) + +def file_external_to_normal_file(sbox): + "change a file external to a normal file" + + sbox.build() + wc_dir = sbox.wc_dir + sbox.simple_propset('svn:externals', '^/iota iota', 'A') + sbox.simple_commit() + + expected_status = svntest.actions.get_virginal_state(wc_dir, 2) + expected_status.add({ + 'A/iota' : Item(status=' ', wc_rev='2', switched='X'), + }) + expected_output = svntest.wc.State(wc_dir, { + 'A/iota' : Item(status='A '), + }) + + svntest.actions.run_and_verify_update(wc_dir, expected_output, None, + expected_status) + + # Create second working copy in this state + sbox2 = sbox.clone_dependent(copy_wc=True) + + sbox.simple_propdel('svn:externals', 'A') + + expected_output = svntest.wc.State(wc_dir, { + 'A/iota' : Item(verb='Removed external'), + }) + expected_status.remove('A/iota') + expected_status.tweak('A', status=' M') + svntest.actions.run_and_verify_update(wc_dir, expected_output, None, + expected_status) + + sbox.simple_copy('iota', 'A/iota') + sbox.simple_commit() + + expected_output = svntest.wc.State(wc_dir, { + }) + expected_status.tweak(wc_rev=3) + expected_status.tweak('A', status=' ') + expected_status.add({ + # This case used to triggered a switched status in 1.8.x before this + # test (and the fix for this problem) where added. + 'A/iota' : Item(status=' ', wc_rev='3'), + }) + svntest.actions.run_and_verify_update(wc_dir, expected_output, None, + expected_status) + + + wc_dir = sbox2.wc_dir + + expected_status = svntest.actions.get_virginal_state(wc_dir, 3) + expected_output = svntest.wc.State(wc_dir, { + 'A' : Item(status=' U'), + 'A/iota' : Item(verb='Removed external', prev_verb='Skipped'), + }) + # This reports an obstruction and removes the file external + svntest.actions.run_and_verify_update(wc_dir, expected_output, None, + expected_status) + + expected_status.add({ + 'A/iota' : Item(status=' ', wc_rev='3'), + }) + expected_output = svntest.wc.State(wc_dir, { + 'A/iota' : Item(status='A '), + }) + # This should bring in the new file + svntest.actions.run_and_verify_update(wc_dir, expected_output, None, + expected_status) + +@Issue(4580) +def file_external_recorded_info(sbox): + "check file external recorded info" + + sbox.build() + wc_dir = sbox.wc_dir + + # r2 - Create file external + svntest.actions.run_and_verify_svnmucc(None, [], + '-U', sbox.repo_url, + '-m', '', + 'propset', 'svn:externals', + '^/iota i', '') + + expected_output = svntest.wc.State(wc_dir, { + '' : Item(status=' U'), + 'i' : Item(status='A '), + }) + expected_status = svntest.actions.get_virginal_state(wc_dir, 2) + expected_status.add({ + 'i' : Item(status=' ', wc_rev='2', switched='X') + }) + svntest.actions.run_and_verify_update(wc_dir, expected_output, None, + expected_status, [], False, + '-r', 2, wc_dir) + + expected_infos = [{ + 'Path': re.escape(sbox.ospath('i')), + 'Relative URL': re.escape('^/iota'), + 'Revision': '2', + 'Last Changed Rev': '1', + 'Last Changed Author': 'jrandom' + }] + svntest.actions.run_and_verify_info(expected_infos, sbox.ospath('i')) + + # r3 - No-op change + svntest.actions.run_and_verify_svnmucc(None, [], + '-U', sbox.repo_url, + '-m', '', + 'cp', '1', 'iota', 'iotb') + + expected_output = svntest.wc.State(wc_dir, { + 'iotb' : Item(status='A '), + }) + expected_status = svntest.actions.get_virginal_state(wc_dir, 3) + expected_status.add({ + 'i' : Item(status=' ', wc_rev='3', switched='X'), + 'iotb' : Item(status=' ', wc_rev='3') + }) + svntest.actions.run_and_verify_update(wc_dir, expected_output, None, + expected_status, [], False, + '-r', 3, wc_dir) + + expected_infos = [{ + 'Path': re.escape(sbox.ospath('i')), + 'Relative URL': re.escape('^/iota'), + 'Revision': '3', + 'Last Changed Rev': '1', + 'Last Changed Author': 'jrandom' + }] + svntest.actions.run_and_verify_info(expected_infos, sbox.ospath('i')) + + # r4 - Update url + svntest.actions.run_and_verify_svnmucc(None, [], + '-U', sbox.repo_url, + '-m', '', + 'propset', 'svn:externals', + '^/iotb i', '') + + + expected_output = svntest.wc.State(wc_dir, { + '' : Item(status=' U'), + }) + expected_status = svntest.actions.get_virginal_state(wc_dir, 4) + expected_status.add({ + 'i' : Item(status=' ', wc_rev='4', switched='X'), + 'iotb' : Item(status=' ', wc_rev='4') + }) + svntest.actions.run_and_verify_update(wc_dir, expected_output, None, + expected_status, [], False, + '-r', 4, wc_dir) + + expected_infos = [{ + 'Path': re.escape(sbox.ospath('i')), + 'Relative URL': re.escape('^/iotb'), + 'Revision': '4', + 'Last Changed Rev': '3', + 'Last Changed Author': 'jrandom' + }] + svntest.actions.run_and_verify_info(expected_infos, sbox.ospath('i')) + + # r5 - Replace file + svntest.actions.run_and_verify_svnmucc(None, [], + '-U', sbox.repo_url, + '-m', '', + 'rm', 'iotb', + 'cp', '3', 'A/mu', 'iotb') + + expected_output = svntest.wc.State(wc_dir, { + 'i' : Item(status='U '), + 'iotb' : Item(status='A ', prev_status='D '), + }) + expected_status = svntest.actions.get_virginal_state(wc_dir, 5) + expected_status.add({ + 'i' : Item(status=' ', wc_rev='5', switched='X'), + 'iotb' : Item(status=' ', wc_rev='5') + }) + svntest.actions.run_and_verify_update(wc_dir, expected_output, None, + expected_status, [], False, + '-r', 5, wc_dir) + + expected_infos = [{ + 'Path': re.escape(sbox.ospath('i')), + 'Relative URL': re.escape('^/iotb'), + 'Revision': '5', + 'Last Changed Rev': '5', + 'Last Changed Author': 'jrandom' + }] + svntest.actions.run_and_verify_info(expected_infos, sbox.ospath('i')) + + # Back to r2. But with a conflict + sbox.simple_append('i', 'i') + expected_output = svntest.wc.State(wc_dir, { + '' : Item(status=' U'), + 'iotb' : Item(status='D '), + 'i' : Item(status='C '), + }) + expected_status = svntest.actions.get_virginal_state(wc_dir, 2) + expected_status.add({ + 'i' : Item(status='C ', wc_rev='5', switched='X'), + }) + svntest.actions.run_and_verify_update(wc_dir, expected_output, None, + expected_status, [], False, + '-r', 2, wc_dir) + + expected_infos = [{ + 'Path': re.escape(sbox.ospath('i')), + 'Relative URL': re.escape('^/iota'), + 'Revision': '5', + 'Last Changed Rev': '1', + 'Last Changed Author': 'jrandom', + 'Conflict Details': re.escape('incoming file edit upon switch' + ' Source left: (file) ^/iotb@5' + ' Source right: (file) ^/iota@5') + }] + svntest.actions.run_and_verify_info(expected_infos, sbox.ospath('i')) + +def external_externally_removed(sbox): + "external externally removed" + + sbox.build(read_only = True) + + sbox.simple_propset('svn:externals', '^/A/B B', '') + + # Try fetching the external with a versioned obstruction + sbox.simple_mkdir('B') + expected_err = ".*W155035: The external.*B' is already a versioned path" + svntest.actions.run_and_verify_svn(None, expected_err, + 'up', sbox.wc_dir) + sbox.simple_rm('B') + + + os.makedirs(sbox.ospath('B')) + expected_err2 = "svn: warning: W155007:.*B'" + svntest.actions.run_and_verify_svn(None, expected_err2, + 'up', sbox.wc_dir) + os.rmdir(sbox.ospath('B')) + + # Fetch the external + sbox.simple_update() + + svntest.main.safe_rmtree(sbox.ospath('B')) + sbox.simple_update() # Fetched again + if not os.path.isdir(sbox.ospath('B')): + raise svntest.Failure("B not recreated") + + svntest.main.safe_rmtree(sbox.ospath('B')) + sbox.simple_propdel('svn:externals', '') + + expected_output = [ + "Updating '%s':\n" % sbox.wc_dir, + "Removed external '%s'\n" % sbox.ospath('B'), + "Updated to revision 1.\n" + ] + svntest.actions.run_and_verify_svn(expected_output, [], + 'up', sbox.wc_dir) + + + sbox.simple_propset('svn:externals', '^/A/B B', '') + sbox.simple_update() + svntest.main.safe_rmtree(sbox.ospath('B')) + sbox.simple_mkdir('B') + + svntest.actions.run_and_verify_svn(None, expected_err, + 'up', sbox.wc_dir) + + sbox.simple_propdel('svn:externals', '') + sbox.simple_update() # Should succeed + ######################################################################## # Run the tests @@ -3387,8 +4407,28 @@ test_list = [ None, pinned_externals, update_dir_external_shallow, switch_parent_relative_file_external, + file_external_unversioned_obstruction, + file_external_versioned_obstruction, + update_external_peg_rev, update_deletes_file_external, switch_relative_externals, + copy_file_external_to_repo, + replace_tree_with_foreign_external, + copy_pin_externals_repos_repos, + copy_pin_externals_repos_wc, + copy_pin_externals_wc_repos, + copy_pin_externals_wc_wc, + copy_pin_externals_moved_external, + copy_pin_externals_removed_in_head, + copy_pin_externals_from_old_rev, + copy_pin_externals_wc_local_mods, + copy_pin_externals_wc_switched_subtrees, + copy_pin_externals_wc_mixed_revisions, + copy_pin_externals_whitepace_dir, + nested_notification, + file_external_to_normal_file, + file_external_recorded_info, + external_externally_removed, ] if __name__ == '__main__': diff --git a/subversion/tests/cmdline/getopt_tests.py b/subversion/tests/cmdline/getopt_tests.py index a258c6b..f4e6398 100755 --- a/subversion/tests/cmdline/getopt_tests.py +++ b/subversion/tests/cmdline/getopt_tests.py @@ -74,6 +74,15 @@ del_lines_res = [ re.compile(r" - with Cyrus SASL authentication"), re.compile(r" - using serf \d+\.\d+\.\d+"), re.compile(r"\* fs_(base|fs) :"), + + # Remove 'svn --version' list of platform-specific + # auth cache providers. + re.compile(r"\* Wincrypt cache.*"), + re.compile(r"\* Plaintext cache.*"), + re.compile(r"\* Gnome Keyring"), + re.compile(r"\* GPG-Agent"), + re.compile(r"\* Mac OS X Keychain"), + re.compile(r"\* KWallet \(KDE\)"), ] # This is a list of lines to search and replace text on. @@ -90,13 +99,6 @@ rep_lines_res = [ # In 'svn --version --quiet', we print only the version # number in a single line. (re.compile(r'^\d+\.\d+\.\d+(-[a-zA-Z0-9]+)?$'), 'X.Y.Z\n'), - # 'svn --help' has a line with the version number. - # It can vary, for example: - # "Subversion command-line client, version 1.1.0." - # "Subversion command-line client, version 1.1.0-dev." - (re.compile(r'Subversion command-line client, ' - 'version \d+\.\d+\.\d+(.|-[a-zA-Z0-9]+\.)$'), - 'Subversion command-line client, version X.Y.Z.'), ] # This is a trigger pattern that selects the secondary set of @@ -221,6 +223,18 @@ def getopt_help_bogus_cmd(sbox): "run svn help bogus-cmd" run_one_test(sbox, 'svn_help_bogus-cmd', 'help', 'bogus-cmd') +def getopt_config_option(sbox): + "--config-option's spell checking" + sbox.build(create_wc=False, read_only=True) + expected_stderr = '.*W205000.*did you mean.*' + expected_stdout = svntest.verify.AnyOutput + svntest.actions.run_and_verify_svn2(expected_stdout, expected_stderr, 0, + 'info', + '--config-option', + 'config:miscellanous:diff-extensions=' + + '-u -p', + sbox.repo_url) + ######################################################################## # Run the tests @@ -235,6 +249,7 @@ test_list = [ None, getopt_help, getopt_help_bogus_cmd, getopt_help_log_switch, + getopt_config_option, ] if __name__ == '__main__': diff --git a/subversion/tests/cmdline/getopt_tests_data/svn--help_stdout b/subversion/tests/cmdline/getopt_tests_data/svn--help_stdout index 0cd234d..06e5e03 100644 --- a/subversion/tests/cmdline/getopt_tests_data/svn--help_stdout +++ b/subversion/tests/cmdline/getopt_tests_data/svn--help_stdout @@ -1,5 +1,5 @@ usage: svn <subcommand> [options] [args] -Subversion command-line client, version X.Y.Z. +Subversion command-line client. Type 'svn help <subcommand>' for help on a specific subcommand. Type 'svn --version' to see the program version and RA modules or 'svn --version --quiet' to see just the version number. @@ -10,6 +10,7 @@ command, it recurses on the current directory (inclusive) by default. Available subcommands: add + auth blame (praise, annotate, ann) cat changelist (cl) diff --git a/subversion/tests/cmdline/getopt_tests_data/svn--version--verbose_stdout b/subversion/tests/cmdline/getopt_tests_data/svn--version--verbose_stdout index ab183ee..bb30055 100644 --- a/subversion/tests/cmdline/getopt_tests_data/svn--version--verbose_stdout +++ b/subversion/tests/cmdline/getopt_tests_data/svn--version--verbose_stdout @@ -1,5 +1,5 @@ -svn, version 1.8.0-dev (under development) - compiled Sep 10 2012, 14:00:24 on i386-apple-darwin11.4.0 +svn, version 1.9.0-dev (under development) + compiled Feb 26 2014, 15:15:42 on x86_64-unknown-openbsd5.5 Copyright (C) 2012 The Apache Software Foundation. This software consists of contributions made by many people; @@ -14,9 +14,16 @@ The following repository access (RA) modules are available: * ra_local : Module for accessing a repository on local disk. - handles 'file' scheme * ra_serf : Module for accessing a repository via WebDAV protocol using serf. + - using serf 1.3.3 - handles 'http' scheme - handles 'https' scheme +The following authentication credential caches are available: + +* Plaintext cache in /home/stsp/.subversion +* Gnome Keyring +* GPG-Agent + System information: * running on i386-apple-darwin11.4.0 diff --git a/subversion/tests/cmdline/getopt_tests_data/svn--version_stdout b/subversion/tests/cmdline/getopt_tests_data/svn--version_stdout index 346f218..513ec11 100644 --- a/subversion/tests/cmdline/getopt_tests_data/svn--version_stdout +++ b/subversion/tests/cmdline/getopt_tests_data/svn--version_stdout @@ -1,18 +1,26 @@ -svn, version 0.16.0 (r3987) - compiled Dec 5 2002, 00:02:51 +svn, version 1.9.0-dev (under development) + compiled Feb 26 2014, 15:15:42 on x86_64-unknown-openbsd5.5 -Copyright (C) 2010 The Apache Software Foundation. +Copyright (C) 2014 The Apache Software Foundation. This software consists of contributions made by many people; see the NOTICE file for more information. Subversion is open source software, see http://subversion.apache.org/ The following repository access (RA) modules are available: -* ra_neon : Module for accessing a repository via WebDAV protocol using Neon. - - handles 'http' scheme - - handles 'https' scheme -* ra_local : Module for accessing a repository on local disk. - - handles 'file' scheme * ra_svn : Module for accessing a repository using the svn network protocol. + - with Cyrus SASL authentication - handles 'svn' scheme +* ra_local : Module for accessing a repository on local disk. + - handles 'file' scheme +* ra_serf : Module for accessing a repository via WebDAV protocol using serf. + - using serf 1.3.3 + - handles 'http' scheme + - handles 'https' scheme + +The following authentication credential caches are available: + +* Plaintext cache in /home/stsp/.subversion +* Gnome Keyring +* GPG-Agent diff --git a/subversion/tests/cmdline/getopt_tests_data/svn_help_log_switch_stdout b/subversion/tests/cmdline/getopt_tests_data/svn_help_log_switch_stdout index 59b92aa..4d06339 100644 --- a/subversion/tests/cmdline/getopt_tests_data/svn_help_log_switch_stdout +++ b/subversion/tests/cmdline/getopt_tests_data/svn_help_log_switch_stdout @@ -82,11 +82,11 @@ Valid options: 'BASE' base rev of item's working copy 'COMMITTED' last commit at or before BASE 'PREV' revision just before COMMITTED - -q [--quiet] : print nothing, or only summary information - -v [--verbose] : print extra information + -c [--change] ARG : the change made in revision ARG + -q [--quiet] : do not print the log message + -v [--verbose] : also print all affected paths -g [--use-merge-history] : use/display additional information from merge history - -c [--change] ARG : the change made in revision ARG --targets ARG : pass contents of file ARG as additional args --stop-on-copy : do not cross copies while traversing history --incremental : give output suitable for concatenation @@ -108,21 +108,29 @@ Valid options: amount of white space -w, --ignore-all-space: Ignore all white space --ignore-eol-style: Ignore changes in EOL style + -U ARG, --context ARG: Show ARG lines of context -p, --show-c-function: Show C function name --search ARG : use ARG as search pattern (glob syntax) --search-and ARG : combine ARG with the previous search pattern Global options: --username ARG : specify a username ARG - --password ARG : specify a password ARG + --password ARG : specify a password ARG (caution: on many operating + systems, other users will be able to see this) --no-auth-cache : do not cache authentication tokens --non-interactive : do no interactive prompting (default is to prompt only if standard input is a terminal device) --force-interactive : do interactive prompting even if standard input is not a terminal device - --trust-server-cert : accept SSL server certificates from unknown - certificate authorities without prompting (but only - with '--non-interactive') + --trust-server-cert : deprecated; same as + --trust-server-cert-failures=unknown-ca + --trust-server-cert-failures ARG : with --non-interactive, accept SSL server + certificates with failures; ARG is comma-separated + list of 'unknown-ca' (Unknown Authority), + 'cn-mismatch' (Hostname mismatch), 'expired' + (Expired certificate), 'not-yet-valid' (Not yet + valid certificate) and 'other' (all other not + separately classified certificate errors). --config-dir ARG : read user configuration files from directory ARG --config-option ARG : set user configuration option in the format: FILE:SECTION:OPTION=[VALUE] @@ -188,7 +196,7 @@ Valid options: --relocate : relocate via URL-rewriting --ignore-externals : ignore externals definitions --ignore-ancestry : allow switching to a node with no common ancestor - --force : force operation to run + --force : handle unversioned obstructions as changes --accept ARG : specify automatic conflict resolution action ('postpone', 'working', 'base', 'mine-conflict', 'theirs-conflict', 'mine-full', 'theirs-full', @@ -197,15 +205,22 @@ Valid options: Global options: --username ARG : specify a username ARG - --password ARG : specify a password ARG + --password ARG : specify a password ARG (caution: on many operating + systems, other users will be able to see this) --no-auth-cache : do not cache authentication tokens --non-interactive : do no interactive prompting (default is to prompt only if standard input is a terminal device) --force-interactive : do interactive prompting even if standard input is not a terminal device - --trust-server-cert : accept SSL server certificates from unknown - certificate authorities without prompting (but only - with '--non-interactive') + --trust-server-cert : deprecated; same as + --trust-server-cert-failures=unknown-ca + --trust-server-cert-failures ARG : with --non-interactive, accept SSL server + certificates with failures; ARG is comma-separated + list of 'unknown-ca' (Unknown Authority), + 'cn-mismatch' (Hostname mismatch), 'expired' + (Expired certificate), 'not-yet-valid' (Not yet + valid certificate) and 'other' (all other not + separately classified certificate errors). --config-dir ARG : read user configuration files from directory ARG --config-option ARG : set user configuration option in the format: FILE:SECTION:OPTION=[VALUE] diff --git a/subversion/tests/cmdline/getopt_tests_data/svn_help_stdout b/subversion/tests/cmdline/getopt_tests_data/svn_help_stdout index 0cd234d..06e5e03 100644 --- a/subversion/tests/cmdline/getopt_tests_data/svn_help_stdout +++ b/subversion/tests/cmdline/getopt_tests_data/svn_help_stdout @@ -1,5 +1,5 @@ usage: svn <subcommand> [options] [args] -Subversion command-line client, version X.Y.Z. +Subversion command-line client. Type 'svn help <subcommand>' for help on a specific subcommand. Type 'svn --version' to see the program version and RA modules or 'svn --version --quiet' to see just the version number. @@ -10,6 +10,7 @@ command, it recurses on the current directory (inclusive) by default. Available subcommands: add + auth blame (praise, annotate, ann) cat changelist (cl) diff --git a/subversion/tests/cmdline/history_tests.py b/subversion/tests/cmdline/history_tests.py index 8fd9c2c..31588e2 100755 --- a/subversion/tests/cmdline/history_tests.py +++ b/subversion/tests/cmdline/history_tests.py @@ -70,16 +70,13 @@ def cat_traces_renames(sbox): svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, - wc_dir) + expected_status) # rename pi to rho. commit r3. svntest.main.run_svn(None, 'mv', pi_path, rho_path) # svn cat -r1 rho --> should show pi's contents. - svntest.actions.run_and_verify_svn(None, - [ "This is the file 'pi'.\n"], [], + svntest.actions.run_and_verify_svn([ "This is the file 'pi'.\n"], [], 'cat', '-r', '1', rho_path) expected_output = svntest.wc.State(wc_dir, { @@ -94,9 +91,7 @@ def cat_traces_renames(sbox): svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, - wc_dir) + expected_status) # update whole wc to HEAD expected_output = svntest.wc.State(wc_dir, { }) # no output @@ -115,28 +110,24 @@ def cat_traces_renames(sbox): expected_status) # 'svn cat bloo' --> should show rho's contents. - svntest.actions.run_and_verify_svn(None, - [ "This is the file 'rho'.\n"], [], + svntest.actions.run_and_verify_svn([ "This is the file 'rho'.\n"], [], 'cat', bloo_path) # svn cat -r1 bloo --> should still show rho's contents. - svntest.actions.run_and_verify_svn(None, - [ "This is the file 'rho'.\n"], [], + svntest.actions.run_and_verify_svn([ "This is the file 'rho'.\n"], [], 'cat', '-r', '1', bloo_path) # svn cat -r1 rho --> should show pi's contents. - svntest.actions.run_and_verify_svn(None, - [ "This is the file 'pi'.\n"], [], + svntest.actions.run_and_verify_svn([ "This is the file 'pi'.\n"], [], 'cat', '-r', '1', rho_path) # svn up -r1 - svntest.actions.run_and_verify_svn(None, None, [], 'up', '-r', '1', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'up', '-r', '1', wc_dir) expected_status = svntest.actions.get_virginal_state(wc_dir, 1) svntest.actions.run_and_verify_status(wc_dir, expected_status) # svn cat -rHEAD rho --> should see 'unrelated object' error. - svntest.actions.run_and_verify_svn("unrelated object", - None, svntest.verify.AnyOutput, + svntest.actions.run_and_verify_svn(None, svntest.verify.AnyOutput, 'cat', '-r', 'HEAD', rho_path) @Issue(1970) @@ -189,26 +180,23 @@ def cat_avoids_false_identities(sbox): svntest.main.run_svn(None, 'del', iota_path) svntest.main.file_append(iota_path, "YOU SHOULD NOT SEE THIS\n") svntest.main.run_svn(None, 'add', iota_path) - svntest.main.run_svn(None, 'ci', '-m', 'log msg', - wc_dir) + sbox.simple_commit(message='log msg') svntest.main.run_svn(None, 'up', wc_dir) # r3 svntest.main.run_svn(None, 'del', iota_path) - svntest.main.run_svn(None, 'ci', '-m', 'log msg', - wc_dir) + sbox.simple_commit(message='log msg') svntest.main.run_svn(None, 'up', wc_dir) # r4 svntest.main.run_svn(None, 'cp', iota_url + '@1', wc_dir) - svntest.main.run_svn(None, 'ci', '-m', 'log msg', - wc_dir) + sbox.simple_commit(message='log msg') svntest.main.run_svn(None, 'up', wc_dir) # 'svn cat -r2 iota' should error, because the line of history # currently identified by /iota did not exist in r2, even though a # totally unrelated file of the same name did. - svntest.actions.run_and_verify_svn(None, None, svntest.verify.AnyOutput, + svntest.actions.run_and_verify_svn(None, svntest.verify.AnyOutput, 'cat', '-r', '2', iota_path) diff --git a/subversion/tests/cmdline/import_tests.py b/subversion/tests/cmdline/import_tests.py index 7adf4a8..0d1ccad 100755 --- a/subversion/tests/cmdline/import_tests.py +++ b/subversion/tests/cmdline/import_tests.py @@ -79,7 +79,7 @@ def import_executable(sbox): # import new files into repository url = sbox.repo_url exit_code, output, errput = svntest.actions.run_and_verify_svn( - None, None, [], 'import', + None, [], 'import', '-m', 'Log message for new import', xt_path, url) lastline = output.pop().strip() @@ -126,8 +126,7 @@ def import_executable(sbox): expected_output, expected_disk, expected_status, - None, None, None, - None, None, 1) + check_props=True) #---------------------------------------------------------------------- def import_ignores(sbox): @@ -158,7 +157,7 @@ def import_ignores(sbox): url = sbox.repo_url + '/dir' exit_code, output, errput = svntest.actions.run_and_verify_svn( - None, None, [], 'import', + None, [], 'import', '-m', 'Log message for new import', dir_path, url) @@ -197,8 +196,7 @@ def import_ignores(sbox): expected_output, expected_disk, expected_status, - None, None, None, - None, None, 1) + check_props=True) #---------------------------------------------------------------------- def import_no_ignores(sbox): @@ -225,7 +223,7 @@ def import_no_ignores(sbox): url = sbox.repo_url + '/dir' exit_code, output, errput = svntest.actions.run_and_verify_svn( - None, None, [], 'import', + None, [], 'import', '-m', 'Log message for new import', '--no-ignore', dir_path, url) @@ -272,8 +270,7 @@ def import_no_ignores(sbox): expected_output, expected_disk, expected_status, - None, None, None, - None, None, 1) + check_props=True) #---------------------------------------------------------------------- def import_avoid_empty_revision(sbox): "avoid creating empty revisions with import" @@ -286,15 +283,14 @@ def import_avoid_empty_revision(sbox): os.makedirs(empty_dir) url = sbox.repo_url - svntest.actions.run_and_verify_svn(None, None, [], 'import', + svntest.actions.run_and_verify_svn(None, [], 'import', '-m', 'Log message for new import', empty_dir, url) svntest.main.safe_rmtree(empty_dir) # Verify that an empty revision has not been created - svntest.actions.run_and_verify_svn(None, - exp_noop_up_out(1), + svntest.actions.run_and_verify_svn(exp_noop_up_out(1), [], "update", empty_dir) #---------------------------------------------------------------------- @@ -333,7 +329,7 @@ enable-auto-props = yes os.mkdir(imp_dir_path, 0755) svntest.main.file_write(imp_file_path, "This is file test.dsp.\n") - svntest.actions.run_and_verify_svn(None, None, [], 'import', + svntest.actions.run_and_verify_svn(None, [], 'import', '-m', 'Log message for new import', imp_dir_path, sbox.repo_url, @@ -371,7 +367,7 @@ enable-auto-props = yes "+Extra line" + crlf ] - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'diff', file_path, '--config-dir', config_dir) @@ -389,7 +385,7 @@ enable-auto-props = yes "The third line.\r") # The import should succeed and not error out - svntest.actions.run_and_verify_svn(None, None, [], 'import', + svntest.actions.run_and_verify_svn(None, [], 'import', '-m', 'Log message for new import', imp_dir_path, sbox.repo_url, @@ -407,7 +403,7 @@ def import_into_foreign_repo(sbox): svntest.main.safe_rmtree(other_repo_dir, 1) svntest.main.create_repos(other_repo_dir) - svntest.actions.run_and_verify_svn(None, None, [], 'import', + svntest.actions.run_and_verify_svn(None, [], 'import', '-m', 'Log message for new import', sbox.ospath('A/mu'), other_repo_url + '/f') @@ -478,7 +474,7 @@ def import_inherited_ignores(sbox): # global-ignores config. Lastly, ^/A/B/E should not get any *.foo paths # because of the svn:ignore property on ^/A/B/E, but non-immediate children # of ^/A/B/E are permitted *.foo paths. - svntest.actions.run_and_verify_svn(None, None, [], 'import', + svntest.actions.run_and_verify_svn(None, [], 'import', '--config-dir', config_dir, import_tree_dir, sbox.repo_url + '/A/B/E', @@ -493,13 +489,13 @@ def import_inherited_ignores(sbox): 'A ' + os.path.join(E_path, dir7_path) + '\n', 'A ' + os.path.join(E_path, file7_path) + '\n', 'Updated to revision 3.\n']) - svntest.actions.run_and_verify_svn(None, expected_output, [], 'up', wc_dir) + svntest.actions.run_and_verify_svn(expected_output, [], 'up', wc_dir) # Import the tree to ^/A/B/E/Z. The only difference from above is that # DIR3.foo and its child file2.txt are also imported. Why? Because now # we are creating a new directory in ^/A/B/E, so the svn:ignore property # set on ^/A/B/E doesn't apply. - svntest.actions.run_and_verify_svn(None, None, [], 'import', + svntest.actions.run_and_verify_svn(None, [], 'import', '--config-dir', config_dir, import_tree_dir, sbox.repo_url + '/A/B/E/Z', @@ -517,12 +513,12 @@ def import_inherited_ignores(sbox): 'A ' + os.path.join(Z_path, dir3_path) + '\n', 'A ' + os.path.join(Z_path, file2_path) + '\n', 'Updated to revision 4.\n']) - svntest.actions.run_and_verify_svn(None, expected_output, [], 'up', wc_dir) + svntest.actions.run_and_verify_svn(expected_output, [], 'up', wc_dir) # Import the tree to ^/A/B/F with the --no-ignore option. # No ignores should be considered and the whole tree should # be imported. - svntest.actions.run_and_verify_svn(None, None, [], 'import', + svntest.actions.run_and_verify_svn(None, [], 'import', '--config-dir', config_dir, '--no-ignore', import_tree_dir, sbox.repo_url + '/A/B/F', @@ -546,12 +542,12 @@ def import_inherited_ignores(sbox): 'A ' + os.path.join(F_path, file7_path) + '\n', 'A ' + os.path.join(F_path, dir8_path) + '\n', 'Updated to revision 5.\n']) - svntest.actions.run_and_verify_svn(None, expected_output, [], 'up', wc_dir) + svntest.actions.run_and_verify_svn(expected_output, [], 'up', wc_dir) # Try importing a single file into a directory which has svn:ignore set # on it with a matching pattern of the imported file. The import should # be a no-op. - svntest.actions.run_and_verify_svn(None, [], [], 'import', + svntest.actions.run_and_verify_svn([], [], 'import', '--config-dir', config_dir, os.path.join(import_tree_dir, 'DIR6', 'file6.foo'), @@ -560,7 +556,7 @@ def import_inherited_ignores(sbox): # Try the above, but this time with --no-ignore, this time the import # should succeed. - svntest.actions.run_and_verify_svn(None, None, [], 'import', '--no-ignore', + svntest.actions.run_and_verify_svn(None, [], 'import', '--no-ignore', '--config-dir', config_dir, os.path.join(import_tree_dir, 'DIR6', 'file6.foo'), @@ -570,7 +566,7 @@ def import_inherited_ignores(sbox): ["Updating '" + wc_dir + "':\n", 'A ' + os.path.join(E_path, 'file6.foo') + '\n', 'Updated to revision 6.\n']) - svntest.actions.run_and_verify_svn(None, expected_output, [], 'up', wc_dir) + svntest.actions.run_and_verify_svn(expected_output, [], 'up', wc_dir) #---------------------------------------------------------------------- diff --git a/subversion/tests/cmdline/info_tests.py b/subversion/tests/cmdline/info_tests.py index 8feaf46..187cd7f 100755 --- a/subversion/tests/cmdline/info_tests.py +++ b/subversion/tests/cmdline/info_tests.py @@ -180,7 +180,7 @@ def info_with_tree_conflicts(sbox): svntest.actions.run_and_verify_info([expected_info], path) # check XML info - exit_code, output, error = svntest.actions.run_and_verify_svn(None, None, + exit_code, output, error = svntest.actions.run_and_verify_svn(None, [], 'info', path, '--xml') @@ -208,7 +208,7 @@ def info_with_tree_conflicts(sbox): def info_on_added_file(sbox): """info on added file""" - svntest.actions.make_repo_and_wc(sbox) + sbox.build() wc_dir = sbox.wc_dir # create new file @@ -233,7 +233,7 @@ def info_on_added_file(sbox): svntest.actions.run_and_verify_info([expected], new_file) # check XML info - exit_code, output, error = svntest.actions.run_and_verify_svn(None, None, + exit_code, output, error = svntest.actions.run_and_verify_svn(None, [], 'info', new_file, '--xml') @@ -251,7 +251,7 @@ def info_on_added_file(sbox): def info_on_mkdir(sbox): """info on new dir with mkdir""" - svntest.actions.make_repo_and_wc(sbox) + sbox.build() wc_dir = sbox.wc_dir # create a new directory using svn mkdir @@ -273,7 +273,7 @@ def info_on_mkdir(sbox): svntest.actions.run_and_verify_info([expected], new_dir) # check XML info - exit_code, output, error = svntest.actions.run_and_verify_svn(None, None, + exit_code, output, error = svntest.actions.run_and_verify_svn(None, [], 'info', new_dir, '--xml') @@ -351,7 +351,7 @@ def info_multiple_targets(sbox): non_existent_path = os.path.join(wc_dir, 'non-existent') # All targets are existing - svntest.actions.run_and_verify_svn2(None, None, [], + svntest.actions.run_and_verify_svn2(None, [], 0, 'info', alpha, beta) # One non-existing target @@ -374,7 +374,7 @@ def info_multiple_targets(sbox): non_existent_url = sbox.repo_url + '/non-existent' # All targets are existing - svntest.actions.run_and_verify_svn2(None, None, [], + svntest.actions.run_and_verify_svn2(None, [], 0, 'info', alpha, beta) # One non-existing target @@ -483,12 +483,12 @@ def info_show_exclude(sbox): expected_error = 'svn: E200009: Could not display info for all targets.*' # Expect error on iota (status = not-present) - svntest.actions.run_and_verify_svn(None, [], expected_error, 'info', iota) + svntest.actions.run_and_verify_svn([], expected_error, 'info', iota) sbox.simple_update() # Expect error on iota (unversioned) - svntest.actions.run_and_verify_svn(None, [], expected_error, 'info', iota) + svntest.actions.run_and_verify_svn([], expected_error, 'info', iota) @Issue(3998) def binary_tree_conflict(sbox): @@ -516,7 +516,7 @@ def binary_tree_conflict(sbox): }) svntest.actions.run_and_verify_update(iota, expected_output, None, expected_status, - None, None, None, None, None, False, + [], False, iota, '-r', '2') expected_info = [{ @@ -558,7 +558,7 @@ def relpath_escaping(sbox): # Also test the local path (to help resolving the relative path) and an # unescaped path which the client should automatically encode - svntest.actions.run_and_verify_svn(None, None, [], 'info', + svntest.actions.run_and_verify_svn(None, [], 'info', info[0]['Relative URL'], info[0]['URL'], testpath, @@ -591,6 +591,162 @@ def relpath_escaping(sbox): svntest.actions.run_and_verify_update(wc_dir, expected_output, None, None) +def node_hidden_info(sbox): + "fetch svn info on 'hidden' nodes" + + sbox.build() + + sbox.simple_rm('A/B/E/alpha') + sbox.simple_commit() + svntest.actions.run_and_verify_svn(None, [], + 'up', '--set-depth', 'exclude', + sbox.ospath('A/B/E/beta')) + + sbox.simple_copy('A/B/E', 'E') + + # Running info on BASE not-present fails + expected_err = '.*(E|W)155010: The node \'.*alpha\' was not found.*' + svntest.actions.run_and_verify_svn(None, expected_err, + 'info', sbox.ospath('A/B/E/alpha')) + + expected_info = [ + { + 'Path': re.escape(sbox.ospath('A/B/E/beta')), + 'Schedule': 'normal', + 'Depth': 'exclude', + 'Node Kind': 'file', + }, + { + 'Path': re.escape(sbox.ospath('E/alpha')), + 'Schedule': 'delete', + 'Depth': 'exclude', + 'Node Kind': 'unknown', + }, + { + 'Path': re.escape(sbox.ospath('E/beta')), + 'Schedule': 'normal', + 'Depth': 'exclude', + 'Node Kind': 'file', + } + ] + + svntest.actions.run_and_verify_info(expected_info, + sbox.ospath('A/B/E/beta'), + sbox.ospath('E/alpha'), + sbox.ospath('E/beta')) + + +def info_item_simple(sbox): + "show one info item" + + sbox.build(read_only=True) + svntest.actions.run_and_verify_svn( + ['1'], [], + 'info', '--show-item=revision', '--no-newline', + sbox.ospath('')) + + +def info_item_simple_multiple(sbox): + "show one info item with multiple targets" + + sbox.build(read_only=True) + + svntest.actions.run_and_verify_svn( + r'^jrandom\s+\S+(/|\\)info_tests-\d+((/|\\)[^/\\]+)?$', [], + 'info', '--show-item=last-changed-author', + '--depth=immediates', sbox.ospath('')) + + svntest.actions.run_and_verify_svn( + r'^1\s+\S+(/|\\)info_tests-\d+(/|\\)[^/\\]+$', [], + 'info', '--show-item=last-changed-revision', + sbox.ospath('A'), sbox.ospath('iota')) + + +def info_item_url(sbox): + "show one info item with URL targets" + + sbox.build(create_wc=False, read_only=True) + + svntest.actions.run_and_verify_svn( + '1', [], + 'info', '--show-item=last-changed-revision', + sbox.repo_url) + + + svntest.actions.run_and_verify_svn( + r'^1\s+[^/:]+://.+/repos/[^/]+$', [], + 'info', '--show-item=last-changed-revision', + sbox.repo_url + '/A', sbox.repo_url + '/iota') + + + # Empty working copy root on URL targets + svntest.actions.run_and_verify_svn( + '', [], + 'info', '--show-item=wc-root', + sbox.repo_url) + + +def info_item_uncommmitted(sbox): + "show one info item on uncommitted targets" + + sbox.build() + + svntest.main.file_write(sbox.ospath('newfile'), 'newfile') + sbox.simple_add('newfile') + sbox.simple_mkdir('newdir') + + svntest.actions.run_and_verify_svn( + '', [], + 'info', '--show-item=last-changed-revision', + sbox.ospath('newfile')) + + svntest.actions.run_and_verify_svn( + '', [], + 'info', '--show-item=last-changed-author', + sbox.ospath('newdir')) + + svntest.actions.run_and_verify_svn( + r'\s+\S+(/|\\)new(file|dir)', [], + 'info', '--show-item=last-changed-date', + sbox.ospath('newfile'), sbox.ospath('newdir')) + + svntest.actions.run_and_verify_svn( + r'\^/new(file|dir)\s+\S+(/|\\)new(file|dir)', [], + 'info', '--show-item=relative-url', + sbox.ospath('newfile'), sbox.ospath('newdir')) + + +def info_item_failures(sbox): + "failure modes of 'svn info --show-item'" + + sbox.build(read_only=True) + + svntest.actions.run_and_verify_svn( + None, r'.*E200009:.*', + 'info', '--show-item=revision', + sbox.ospath('not-there')) + + svntest.actions.run_and_verify_svn( + None, r".*E205000: .*; did you mean 'wc-root'\?", + 'info', '--show-item=root', + sbox.ospath('')) + + svntest.actions.run_and_verify_svn( + None, (r".*E205000: --show-item is not valid in --xml mode"), + 'info', '--show-item=revision', '--xml', + sbox.ospath('')) + + svntest.actions.run_and_verify_svn( + None, (r".*E205000: --incremental is only valid in --xml mode"), + 'info', '--show-item=revision', '--incremental', + sbox.ospath('')) + + svntest.actions.run_and_verify_svn( + None, (r".*E205000: --no-newline is only available.*"), + 'info', '--show-item=revision', '--no-newline', + sbox.ospath('A'), sbox.ospath('iota')) + + ######################################################################## # Run the tests @@ -606,6 +762,12 @@ test_list = [ None, info_show_exclude, binary_tree_conflict, relpath_escaping, + node_hidden_info, + info_item_simple, + info_item_simple_multiple, + info_item_url, + info_item_uncommmitted, + info_item_failures, ] if __name__ == '__main__': diff --git a/subversion/tests/cmdline/input_validation_tests.py b/subversion/tests/cmdline/input_validation_tests.py index a454ac9..e1d74a6 100755 --- a/subversion/tests/cmdline/input_validation_tests.py +++ b/subversion/tests/cmdline/input_validation_tests.py @@ -52,7 +52,7 @@ def run_and_verify_svn_in_wc(sbox, expected_stderr, *varargs): old_dir = os.getcwd() try: os.chdir(wc_dir) - svntest.actions.run_and_verify_svn(None, [], expected_stderr, + svntest.actions.run_and_verify_svn([], expected_stderr, *varargs) finally: os.chdir(old_dir) @@ -266,31 +266,31 @@ def delete_repos_root(sbox): svntest.actions.run_and_verify_switch(sbox.wc_dir, sbox.ospath('A/D/G'), repo_url, None, None, expected_status, - None, None, None, None, None, None, + [], False, '--set-depth', 'empty', '--ignore-ancestry') expected_status.tweak('A/B/F', switched='S') svntest.actions.run_and_verify_switch(sbox.wc_dir, sbox.ospath('A/B/F'), repo_url, None, None, expected_status, - None, None, None, None, None, None, + [], False, '--depth', 'empty', '--ignore-ancestry') # Delete the wcroot (which happens to be the repository root) expected_error = 'svn: E155035: \'.*\' is the root of a working copy ' + \ 'and cannot be deleted' - svntest.actions.run_and_verify_svn('Delete root', [], expected_error, + svntest.actions.run_and_verify_svn([], expected_error, 'rm', wc_dir) # This should produce some error, because we can never commit this expected_error = '.*repository root.*' - svntest.actions.run_and_verify_svn('Move root', None, expected_error, + svntest.actions.run_and_verify_svn(None, expected_error, 'mv', sbox.ospath('A/D/G'), sbox.ospath('Z')) # And this currently fails with another nasty error about a wc-lock expected_error = '.*repository root.*' - svntest.actions.run_and_verify_svn('Delete root', [], expected_error, + svntest.actions.run_and_verify_svn([], expected_error, 'rm', sbox.ospath('A/B/F')) ######################################################################## diff --git a/subversion/tests/cmdline/iprop_authz_tests.py b/subversion/tests/cmdline/iprop_authz_tests.py index e306373..835cd37 100755 --- a/subversion/tests/cmdline/iprop_authz_tests.py +++ b/subversion/tests/cmdline/iprop_authz_tests.py @@ -110,7 +110,7 @@ def iprops_authz(sbox): else: expected_err = ".*svn: E170001: Authorization failed.*" svntest.actions.run_and_verify_svn( - None, None, expected_err, 'proplist', '-v', + None, expected_err, 'proplist', '-v', '--show-inherited-props', sbox.repo_url + '/A/D/H/psi') ######################################################################## diff --git a/subversion/tests/cmdline/iprop_tests.py b/subversion/tests/cmdline/iprop_tests.py index cc57e59..bff8a38 100755 --- a/subversion/tests/cmdline/iprop_tests.py +++ b/subversion/tests/cmdline/iprop_tests.py @@ -160,7 +160,7 @@ def iprops_basic_working(sbox): psi_path, expected_iprops, expected_explicit_props) # Proplist file target with only explicit props. - svntest.actions.run_and_verify_svn(None, None, [], 'revert', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'revert', wc_dir) expected_iprops = {} expected_explicit_props = {'FileProp1' : 'File-Prop-Val1'} svntest.actions.run_and_verify_inherited_prop_xml( @@ -184,7 +184,7 @@ def iprops_basic_repos(sbox): svntest.main.run_svn(None, 'commit', '-m', 'Add some file properties', wc_dir) - svntest.actions.run_and_verify_svn(None, None, [], 'up', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir) sbox.simple_propset('RootProp1', 'Root-Prop-Val1', '.') sbox.simple_propset('RootProp2', 'Root-Prop-Val2', '.') sbox.simple_propset('DirProp2', 'Dir-Prop-Val-Root', '.') @@ -317,7 +317,7 @@ def iprops_switched_subtrees(sbox): sbox.repo_url + '/branch2', '-m', 'Make branch2') # Create a root property and two branch properties - svntest.actions.run_and_verify_svn(None, None, [], 'up', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir) sbox.simple_propset('Root-Prop-1', 'Root-Prop-Val1', '.') sbox.simple_propset('Branch-Name', 'Feature #1', 'branch1') sbox.simple_propset('Branch-Name', 'Feature #2', 'branch2') @@ -348,7 +348,7 @@ def iprops_switched_subtrees(sbox): # which does inherit properties from ^/branch1 and ^/. The inherited # properties cache should be updated to reflect this when asking what # properties branch2/B/lambda inherits. - svntest.actions.run_and_verify_svn(None, None, [], 'up', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir) expected_iprops = { sbox.repo_url : {'Root-Prop-1' : 'Root-Prop-Val1'}, sbox.repo_url + '/branch1' : {'Branch-Name' : 'Feature #1'}} @@ -359,20 +359,20 @@ def iprops_switched_subtrees(sbox): # Now update the WC back to r3, where there are no properties. The # inheritable properties cache for the WC-root at branch2/B should be # cleared and no inheritable properties found for branch2/B/lambda. - svntest.actions.run_and_verify_svn(None, None, [], 'up', '-r3', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'up', '-r3', wc_dir) expected_iprops = {} expected_explicit_props = {} svntest.actions.run_and_verify_inherited_prop_xml( branch2_lambda_path, expected_iprops, expected_explicit_props) # Update back to HEAD=r4 before continuing. - svntest.actions.run_and_verify_svn(None, None, [], 'up', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir) # Now unswitch branch2/B and check branch2/B/lambda's inherited props. # Now no iprop cache for branch2/B should exist and branch2/B/lambda # should inherit from branch2 and '.'. svntest.main.run_svn(None, 'switch', sbox.repo_url + '/branch2/B', branch2_B_path) - svntest.actions.run_and_verify_svn(None, None, [], 'up', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir) expected_iprops = { ### Working copy parents! ### wc_dir : {'Root-Prop-1' : 'Root-Prop-Val1'}, @@ -893,8 +893,8 @@ def iprops_pegged_wc_targets(sbox): # Inherited props should always come from the repository parent of # ^/A/B/E/alpha and so should not include the property (working or # otherwise) on A/D. - svntest.actions.run_and_verify_svn(None, None, [], 'delete', G_path) - svntest.actions.run_and_verify_svn(None, None, [], 'copy', + svntest.actions.run_and_verify_svn(None, [], 'delete', G_path) + svntest.actions.run_and_verify_svn(None, [], 'copy', sbox.repo_url + '/A/B', G_path) # Operation | Target | Peg Revision | Operative Revision @@ -1138,11 +1138,11 @@ def iprops_pegged_wc_targets(sbox): # Revert the replacement with history of A/D/G and once again # replace A/D/G, but this time without history (using and export # of A/B. - svntest.actions.run_and_verify_svn(None, None, [], 'revert', G_path, '-R') - svntest.actions.run_and_verify_svn(None, None, [], 'delete', G_path) - svntest.actions.run_and_verify_svn(None, None, [], 'export', + svntest.actions.run_and_verify_svn(None, [], 'revert', G_path, '-R') + svntest.actions.run_and_verify_svn(None, [], 'delete', G_path) + svntest.actions.run_and_verify_svn(None, [], 'export', sbox.repo_url + '/A/B', G_path) - svntest.actions.run_and_verify_svn(None, None, [], 'add', G_path) + svntest.actions.run_and_verify_svn(None, [], 'add', G_path) # Set a working prop on a file within the replaced tree, we should *never* # see this property if asking about the # file@[HEAD | PREV | COMMITTED | BASE] @@ -1156,21 +1156,21 @@ def iprops_pegged_wc_targets(sbox): # # proplist/propget WC-PATH@HEAD svntest.actions.run_and_verify_svn( - None, None, + None, ".*Unknown node kind for '" + sbox.repo_url + "/A/D/G/E/alpha'\n", 'pl', '-v', '--show-inherited-props', replaced_alpha_path + '@HEAD') svntest.actions.run_and_verify_svn( - None, None, + None, ".*'" + sbox.repo_url + "/A/D/G/E/alpha' does not exist in revision 3\n", 'pg', 'RootProp1', '-v', '--show-inherited-props', replaced_alpha_path + '@HEAD') # proplist/propget WC-PATH@PREV svntest.actions.run_and_verify_svn( - None, None, + None, ".*Path '.*alpha' has no committed revision\n", 'pl', '-v', '--show-inherited-props', replaced_alpha_path + '@PREV') svntest.actions.run_and_verify_svn( - None, None, + None, ".*Path '.*alpha' has no committed revision\n", 'pg', 'RootProp1', '-v', '--show-inherited-props', replaced_alpha_path + '@PREV') # proplist/propget WC-PATH@COMMITTED @@ -1421,7 +1421,7 @@ def iprops_shallow_operative_depths(sbox): # r2 - Create a branch.. svntest.main.run_svn(None, 'copy', sbox.repo_url + '/A', sbox.repo_url + '/branch1', '-m', 'Make branch1') - svntest.actions.run_and_verify_svn(None, None, [], 'up', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir) # r3 - Create a root property and some branch properties sbox.simple_propset('Root-Prop-1', 'Root-Prop-Val1', '.') @@ -1437,7 +1437,7 @@ def iprops_shallow_operative_depths(sbox): svntest.main.run_svn(None, 'commit', '-m', 'Change some properties', wc_dir) - svntest.actions.run_and_verify_svn(None, None, [], 'up', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir) # Switch the WC to ^/branch1: svntest.main.run_svn(None, 'switch', '--ignore-ancestry', @@ -1449,7 +1449,7 @@ def iprops_shallow_operative_depths(sbox): svntest.main.run_svn(None, 'switch', sbox.repo_url + '/A/mu', sbox.ospath('mu')) # Update the whole WC back to r3. - svntest.actions.run_and_verify_svn(None, None, [], 'up', '-r3', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'up', '-r3', wc_dir) # Check the inherited props on B/E within the switched subtree # and the switched file mu. The props should all be inherited @@ -1467,7 +1467,7 @@ def iprops_shallow_operative_depths(sbox): # Again check the inherited props on B/E. This shouldn't affect the # switched subtree at all, the props it inherits should still reflect # the values at r3. - svntest.actions.run_and_verify_svn(None, None, [], 'up', + svntest.actions.run_and_verify_svn(None, [], 'up', '--depth=empty', wc_dir) svntest.actions.run_and_verify_inherited_prop_xml( sbox.ospath('B/E'), expected_iprops, expected_explicit_props) @@ -1476,7 +1476,7 @@ def iprops_shallow_operative_depths(sbox): # Update the root of the WC (to HEAD=r4) at depth=files. B/E should # still inherit vales from r3, but mu should now inherit props from r4. - svntest.actions.run_and_verify_svn(None, None, [], 'up', + svntest.actions.run_and_verify_svn(None, [], 'up', '--depth=files', wc_dir) svntest.actions.run_and_verify_inherited_prop_xml( sbox.ospath('B/E'), expected_iprops, expected_explicit_props) @@ -1489,7 +1489,7 @@ def iprops_shallow_operative_depths(sbox): # Update the root of the WC (to HEAD=r4) at depth=immediates. Now both B/E # and mu inherit props from r4. - svntest.actions.run_and_verify_svn(None, None, [], 'up', + svntest.actions.run_and_verify_svn(None, [], 'up', '--depth=immediates', wc_dir) svntest.actions.run_and_verify_inherited_prop_xml( sbox.ospath('B/E'), expected_iprops, expected_explicit_props) @@ -1509,7 +1509,7 @@ def iprops_with_directory_externals(sbox): other_repo_dir, other_repo_url = sbox.add_repo_path("other") other_wc_dir = sbox.add_wc_path("other") svntest.main.copy_repos(repo_dir, other_repo_dir, 1, 1) - svntest.actions.run_and_verify_svn(None, None, [], 'co', other_repo_url, + svntest.actions.run_and_verify_svn(None, [], 'co', other_repo_url, other_wc_dir) # Create a root property on the first WC. @@ -1518,7 +1518,7 @@ def iprops_with_directory_externals(sbox): wc_dir) # Create a root property on the "other" WC. - svntest.actions.run_and_verify_svn(None, None, [], 'ps', 'Other-Root-Prop', + svntest.actions.run_and_verify_svn(None, [], 'ps', 'Other-Root-Prop', 'Root-Prop-Val-from-other', other_wc_dir) svntest.main.run_svn(None, 'commit', '-m', 'Add a root property', other_wc_dir) @@ -1533,20 +1533,20 @@ def iprops_with_directory_externals(sbox): sbox.simple_propset('svn:externals', other_repo_url + '/A/D/G X-Other-Repos', 'E') - svntest.actions.run_and_verify_svn(None, None, [], 'ci', + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', 'Add external point to other WC', wc_dir) - svntest.actions.run_and_verify_svn(None, None, [], 'up', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir) # Create an external in the first WC that points to a location in the # same WC. sbox.simple_propset('svn:externals', sbox.repo_url + '/A/D/H X-Same-Repos', 'F') - svntest.actions.run_and_verify_svn(None, None, [], 'ci', '-m', + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', 'Add external pointing to same repos', wc_dir) - svntest.actions.run_and_verify_svn(None, None, [], 'up', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir) # Check the properties inherited by the external from the same repository. # It should inherit the props from the root of the same repository. @@ -1590,9 +1590,9 @@ def iprops_with_file_externals(sbox): sbox.simple_propset('svn:externals', sbox.repo_url + '/A/D/H/psi@4 file-external-pegged', 'A/B/F') - svntest.actions.run_and_verify_svn(None, None, [], 'ci', '-m', + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', 'Add a file external', wc_dir) - svntest.actions.run_and_verify_svn(None, None, [], 'up', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir) # Check the properties inherited by the external files. Both should # inherit the properties from ^/ and ^/A/D. @@ -1622,7 +1622,7 @@ def iprops_with_file_externals(sbox): expected_explicit_props) # ...We update the external: - svntest.actions.run_and_verify_svn(None, None, [], 'up', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir) # The pegged file external's iprops should remain unchanged. svntest.actions.run_and_verify_inherited_prop_xml( sbox.ospath('A/B/F/file-external-pegged'), expected_iprops, diff --git a/subversion/tests/libsvn_subr/named_atomic-test-proc.c b/subversion/tests/cmdline/lock-helper.c index 534247c..47a333c 100644 --- a/subversion/tests/libsvn_subr/named_atomic-test-proc.c +++ b/subversion/tests/cmdline/lock-helper.c @@ -1,5 +1,5 @@ /* - * named_atomic-test-proc.c: a collection of svn_named_atomic__t tests + * lock-helper.c : create locks with an expiry date * * ==================================================================== * Licensed to the Apache Software Foundation (ASF) under one @@ -21,66 +21,55 @@ * ==================================================================== */ -/* ==================================================================== - To add tests, look toward the bottom of this file. -*/ +#include <stdlib.h> - -#include <stdio.h> +#include "svn_pools.h" +#include "svn_dirent_uri.h" +#include "svn_fs.h" -/* shared test implementation */ -#include "named_atomic-test-common.h" - -/* Very simple process frame around the actual test code */ int main(int argc, const char *argv[]) { - svn_boolean_t got_error = FALSE; apr_pool_t *pool; - svn_error_t *err; + const char *fs_path, *file_path, *username; + apr_int64_t seconds; + apr_time_t expiration_date; + svn_fs_t *fs; + svn_fs_access_t *access; + svn_lock_t *lock; - int id = 0; - int count = 0; - int iterations = 0; + if (argc != 5) + { + fprintf(stderr, "usage: lock-helper repo_path file_path user seconds\n"); + exit(1); + } - /* Initialize APR (Apache pools) */ if (apr_initialize() != APR_SUCCESS) { - printf("apr_initialize() failed.\n"); + fprintf(stderr, "apr_initialize() failed.\n"); exit(1); } pool = svn_pool_create(NULL); - /* lean & mean parameter parsing */ - if (argc != 5) - { - if (argc == 1) /* used to test that this executable can be started */ - exit(0); - - printf("Usage: named_atomic-proc-test ID COUNT ITERATIONS NS.\n"); - exit(1); - } + fs_path = svn_dirent_internal_style(argv[1], pool); + fs_path = svn_dirent_join(fs_path, "db", pool); + file_path = svn_dirent_canonicalize(argv[2], pool); + username = argv[3]; + SVN_INT_ERR(svn_cstring_atoi64(&seconds, argv[4])); - id = (int)apr_atoi64(argv[1]); - count = (int)apr_atoi64(argv[2]); - iterations = (int)apr_atoi64(argv[3]); - name_namespace = argv[4]; + SVN_INT_ERR(svn_fs_open2(&fs, fs_path, NULL, pool, pool)); + SVN_INT_ERR(svn_fs_create_access(&access, username, pool)); + SVN_INT_ERR(svn_fs_set_access(fs, access)); - /* run test routine */ + expiration_date = apr_time_now() + apr_time_from_sec(seconds); - err = test_pipeline(id, count, iterations, pool); - if (err) - { - const char *prefix = apr_psprintf(pool, "Process %d: ", id); - got_error = TRUE; - svn_handle_error2(err, stdout, FALSE, prefix); - svn_error_clear(err); - } + SVN_INT_ERR(svn_fs_lock(&lock, fs, file_path, NULL, "created by lock-helper", + FALSE, expiration_date, SVN_INVALID_REVNUM, FALSE, + pool)); - /* Clean up APR */ svn_pool_destroy(pool); apr_terminate(); - return got_error; + return EXIT_SUCCESS; } diff --git a/subversion/tests/cmdline/lock_tests.py b/subversion/tests/cmdline/lock_tests.py index aaca991..c27a4f4 100755 --- a/subversion/tests/cmdline/lock_tests.py +++ b/subversion/tests/cmdline/lock_tests.py @@ -87,7 +87,7 @@ def lock_file(sbox): svntest.main.file_append(file_path, "This represents a binary file\n") svntest.main.run_svn(None, 'commit', '-m', '', file_path) - svntest.actions.run_and_verify_svn(None, ".*locked by user", [], 'lock', + svntest.actions.run_and_verify_svn(".*locked by user", [], 'lock', '-m', '', file_path) # --- Meanwhile, in our other working copy... --- @@ -135,7 +135,7 @@ def commit_file_keep_lock(sbox): wc_dir = sbox.wc_dir # lock 'A/mu' as wc_author - svntest.actions.run_and_verify_svn(None, ".*locked by user", [], 'lock', + svntest.actions.run_and_verify_svn(".*locked by user", [], 'lock', '-m', 'some lock comment', sbox.ospath('A/mu')) @@ -158,21 +158,25 @@ def commit_file_unlock(sbox): wc_dir = sbox.wc_dir # lock A/mu and iota as wc_author - svntest.actions.run_and_verify_svn(None, ".*locked by user", [], 'lock', + svntest.actions.run_and_verify_svn(".*locked by user", [], 'lock', '-m', 'some lock comment', sbox.ospath('A/mu'), sbox.ospath('iota')) # make a change and commit it, allowing lock to be released sbox.simple_append('A/mu', 'Tweak!\n') - sbox.simple_commit() + + expected_output = svntest.wc.State(wc_dir, { + 'A/mu' : Item(verb='Sending'), + }) expected_status = svntest.actions.get_virginal_state(wc_dir, 1) expected_status.tweak('A/mu', wc_rev=2) - expected_status.tweak('iota', wc_rev=2) - # Make sure the file is unlocked - svntest.actions.run_and_verify_status(wc_dir, expected_status) + # Make sure both iota an mu are unlocked, but only mu is bumped + svntest.actions.run_and_verify_commit(wc_dir, + expected_output, + expected_status) #---------------------------------------------------------------------- def commit_propchange(sbox): @@ -182,7 +186,7 @@ def commit_propchange(sbox): wc_dir = sbox.wc_dir # lock A/mu as wc_author - svntest.actions.run_and_verify_svn(None, ".*locked by user", [], 'lock', + svntest.actions.run_and_verify_svn(".*locked by user", [], 'lock', '-m', 'some lock comment', sbox.ospath('A/mu')) @@ -220,7 +224,7 @@ def break_lock(sbox): file_path = sbox.ospath('iota') file_path_b = sbox.ospath('iota', wc_dir=wc_b) - svntest.actions.run_and_verify_svn(None, ".*locked by user", [], 'lock', + svntest.actions.run_and_verify_svn(".*locked by user", [], 'lock', '-m', '', file_path) # --- Meanwhile, in our other working copy... --- @@ -230,11 +234,11 @@ def break_lock(sbox): # attempt (and fail) to unlock file # This should give a "iota' is not locked in this working copy" error - svntest.actions.run_and_verify_svn(None, None, ".*not locked", + svntest.actions.run_and_verify_svn(None, ".*not locked", 'unlock', file_path_b) - svntest.actions.run_and_verify_svn(None, ".*unlocked", [], + svntest.actions.run_and_verify_svn(".*unlocked", [], 'unlock', '--force', file_path_b) @@ -262,7 +266,7 @@ def steal_lock(sbox): file_path = sbox.ospath('iota') file_path_b = sbox.ospath('iota', wc_dir=wc_b) - svntest.actions.run_and_verify_svn(None, ".*locked by user", [], 'lock', + svntest.actions.run_and_verify_svn(".*locked by user", [], 'lock', '-m', '', file_path) # --- Meanwhile, in our other working copy... --- @@ -271,13 +275,13 @@ def steal_lock(sbox): # attempt (and fail) to lock file - # This should give a "iota' is already locked... error, but exits 0. - svntest.actions.run_and_verify_svn2(None, None, - ".*already locked", 0, - 'lock', - '-m', 'trying to break', file_path_b) + # This should give a "iota' is already locked error + svntest.actions.run_and_verify_svn(None, + ".*already locked", + 'lock', + '-m', 'trying to break', file_path_b) - svntest.actions.run_and_verify_svn(None, ".*locked by user", [], + svntest.actions.run_and_verify_svn(".*locked by user", [], 'lock', '--force', '-m', 'trying to break', file_path_b) @@ -307,7 +311,7 @@ def handle_defunct_lock(sbox): expected_status = svntest.actions.get_virginal_state(wc_dir, 1) # lock the file - svntest.actions.run_and_verify_svn(None, ".*locked by user", [], 'lock', + svntest.actions.run_and_verify_svn(".*locked by user", [], 'lock', '-m', '', sbox.ospath('iota')) # Make a second copy of the working copy @@ -318,7 +322,7 @@ def handle_defunct_lock(sbox): # --- Meanwhile, in our other working copy... --- # Try unlocking the file in the second wc. - svntest.actions.run_and_verify_svn(None, ".*unlocked", [], 'unlock', + svntest.actions.run_and_verify_svn(".*unlocked", [], 'unlock', file_path_b) @@ -370,7 +374,7 @@ def enforce_lock(sbox): raise svntest.Failure # obtain a lock on one of these files... - svntest.actions.run_and_verify_svn(None, ".*locked by user", [], 'lock', + svntest.actions.run_and_verify_svn(".*locked by user", [], 'lock', '-m', '', iota_path) # ...and verify that the write bit gets set... @@ -379,7 +383,7 @@ def enforce_lock(sbox): raise svntest.Failure # ...and unlock it... - svntest.actions.run_and_verify_svn(None, ".*unlocked", [], 'unlock', + svntest.actions.run_and_verify_svn(".*unlocked", [], 'unlock', iota_path) # ...and verify that the write bit gets unset @@ -408,7 +412,7 @@ def update_while_needing_lock(sbox): sbox.simple_update() # Lock, modify, commit, unlock, to create r3. - svntest.actions.run_and_verify_svn(None, ".*locked by user", [], 'lock', + svntest.actions.run_and_verify_svn(".*locked by user", [], 'lock', '-m', '', sbox.ospath('iota')) sbox.simple_append('iota', 'This line added in r2.\n') sbox.simple_commit('iota') # auto-unlocks @@ -447,12 +451,12 @@ def defunct_lock(sbox): svntest.main.run_svn(None, 'update', wc_b) # lock iota in wc_b - svntest.actions.run_and_verify_svn(None, ".*locked by user", [], 'lock', + svntest.actions.run_and_verify_svn(".*locked by user", [], 'lock', '-m', '', iota_path_b) # break the lock iota in wc a - svntest.actions.run_and_verify_svn(None, ".*locked by user", [], 'lock', '--force', + svntest.actions.run_and_verify_svn(".*locked by user", [], 'lock', '--force', '-m', '', iota_path) # update wc_b svntest.main.run_svn(None, 'update', wc_b) @@ -476,16 +480,16 @@ def deleted_path_lock(sbox): iota_path = sbox.ospath('iota') iota_url = sbox.repo_url + '/iota' - svntest.actions.run_and_verify_svn(None, ".*locked by user", [], 'lock', + svntest.actions.run_and_verify_svn(".*locked by user", [], 'lock', '-m', '', iota_path) sbox.simple_rm('iota') - svntest.actions.run_and_verify_svn(None, None, [], 'commit', + svntest.actions.run_and_verify_svn(None, [], 'commit', '--no-unlock', '-m', '', iota_path) # Now make sure that we can delete the lock from iota via a URL - svntest.actions.run_and_verify_svn(None, ".*unlocked", [], 'unlock', + svntest.actions.run_and_verify_svn(".*unlocked", [], 'unlock', iota_url) @@ -505,14 +509,14 @@ def lock_unlock(sbox): expected_status = svntest.actions.get_virginal_state(wc_dir, 1) expected_status.tweak('A/D/G/pi', 'A/D/G/rho', 'A/D/G/tau', writelocked='K') - svntest.actions.run_and_verify_svn(None, ".*locked by user", [], 'lock', + svntest.actions.run_and_verify_svn(".*locked by user", [], 'lock', '-m', '', pi_path, rho_path, tau_path) svntest.actions.run_and_verify_status(wc_dir, expected_status) expected_status.tweak('A/D/G/pi', 'A/D/G/rho', 'A/D/G/tau', writelocked=None) - svntest.actions.run_and_verify_svn(None, ".*unlocked", [], 'unlock', + svntest.actions.run_and_verify_svn(".*unlocked", [], 'unlock', pi_path, rho_path, tau_path) svntest.actions.run_and_verify_status(wc_dir, expected_status) @@ -529,11 +533,11 @@ def deleted_dir_lock(sbox): rho_path = sbox.ospath('A/D/G/rho') tau_path = sbox.ospath('A/D/G/tau') - svntest.actions.run_and_verify_svn(None, ".*locked by user", [], 'lock', + svntest.actions.run_and_verify_svn(".*locked by user", [], 'lock', '-m', '', pi_path, rho_path, tau_path) sbox.simple_rm('A/D/G') # the parent directory - svntest.actions.run_and_verify_svn(None, None, [], 'commit', + svntest.actions.run_and_verify_svn(None, [], 'commit', '--no-unlock', '-m', '', sbox.ospath('A/D/G')) @@ -699,11 +703,11 @@ def out_of_date(sbox): '-m', '', file_path) # --- Meanwhile, in our other working copy... --- - svntest.actions.run_and_verify_svn2(None, None, - ".*newer version of '/iota' exists", 0, - 'lock', - '--username', svntest.main.wc_author2, - '-m', '', file_path_b) + svntest.actions.run_and_verify_svn(None, + ".*newer version of '/iota' exists", + 'lock', + '--username', svntest.main.wc_author2, + '-m', '', file_path_b) #---------------------------------------------------------------------- # Tests reverting a svn:needs-lock file @@ -718,11 +722,11 @@ def revert_lock(sbox): mode = stat.S_IWGRP | stat.S_IWOTH | stat.S_IWRITE # set the prop in wc - svntest.actions.run_and_verify_svn(None, None, [], 'propset', + svntest.actions.run_and_verify_svn(None, [], 'propset', 'svn:needs-lock', 'foo', iota_path) # commit r2 - svntest.actions.run_and_verify_svn(None, None, [], 'commit', + svntest.actions.run_and_verify_svn(None, [], 'commit', '-m', '', iota_path) # make sure that iota got set to read-only @@ -737,7 +741,7 @@ def revert_lock(sbox): svntest.actions.run_and_verify_status(wc_dir, expected_status) # remove read-only-ness - svntest.actions.run_and_verify_svn(None, None, [], 'propdel', + svntest.actions.run_and_verify_svn(None, [], 'propdel', 'svn:needs-lock', iota_path) # make sure that iota got read-only-ness removed @@ -747,7 +751,7 @@ def revert_lock(sbox): raise svntest.Failure # revert the change - svntest.actions.run_and_verify_svn(None, None, [], 'revert', iota_path) + svntest.actions.run_and_verify_svn(None, [], 'revert', iota_path) # make sure that iota got set back to read-only if (os.stat(iota_path)[0] & mode): @@ -760,7 +764,7 @@ def revert_lock(sbox): extra_name = 'xx' # now lock the file - svntest.actions.run_and_verify_svn(None, ".*locked by user", [], 'lock', + svntest.actions.run_and_verify_svn(".*locked by user", [], 'lock', '-m', '', iota_path) # modify it @@ -772,7 +776,7 @@ def revert_lock(sbox): svntest.actions.run_and_verify_status(wc_dir, expected_status) # revert it - svntest.actions.run_and_verify_svn(None, None, [], 'revert', iota_path) + svntest.actions.run_and_verify_svn(None, [], 'revert', iota_path) # make sure it is still writable since we have the lock if (os.stat(iota_path)[0] & mode == 0): @@ -809,7 +813,7 @@ def lock_several_files(sbox): lambda_path = os.path.join(sbox.wc_dir, 'A', 'B', 'lambda') alpha_path = os.path.join(sbox.wc_dir, 'A', 'B', 'E', 'alpha') - svntest.actions.run_and_verify_svn(None, ".*locked by user", [], 'lock', + svntest.actions.run_and_verify_svn(".*locked by user", [], 'lock', '--username', svntest.main.wc_author2, '-m', 'lock several', iota_path, lambda_path, alpha_path) @@ -818,7 +822,7 @@ def lock_several_files(sbox): expected_status.tweak('iota', 'A/B/lambda', 'A/B/E/alpha', writelocked='K') svntest.actions.run_and_verify_status(wc_dir, expected_status) - svntest.actions.run_and_verify_svn(None, ".*unlocked", [], 'unlock', + svntest.actions.run_and_verify_svn(".*unlocked", [], 'unlock', '--username', svntest.main.wc_author2, iota_path, lambda_path, alpha_path) @@ -837,10 +841,10 @@ def lock_switched_files(sbox): iota_URL = sbox.repo_url + '/iota' alpha_URL = sbox.repo_url + '/A/B/E/alpha' - svntest.actions.run_and_verify_svn(None, None, [], 'switch', + svntest.actions.run_and_verify_svn(None, [], 'switch', iota_URL, gamma_path, '--ignore-ancestry') - svntest.actions.run_and_verify_svn(None, None, [], 'switch', + svntest.actions.run_and_verify_svn(None, [], 'switch', alpha_URL, lambda_path, '--ignore-ancestry') @@ -848,7 +852,7 @@ def lock_switched_files(sbox): expected_status.tweak('A/D/gamma', 'A/B/lambda', switched='S') svntest.actions.run_and_verify_status(wc_dir, expected_status) - svntest.actions.run_and_verify_svn(None, ".*locked by user", [], 'lock', + svntest.actions.run_and_verify_svn(".*locked by user", [], 'lock', '-m', 'lock several', gamma_path, lambda_path) @@ -859,7 +863,7 @@ def lock_switched_files(sbox): svntest.actions.run_and_verify_status(wc_dir, expected_status) - svntest.actions.run_and_verify_svn(None, ".*unlocked", [], 'unlock', + svntest.actions.run_and_verify_svn(".*unlocked", [], 'unlock', gamma_path, lambda_path) expected_status.tweak('A/D/gamma', 'A/B/lambda', writelocked=None) @@ -877,7 +881,7 @@ def lock_uri_encoded(sbox): file_path = sbox.ospath(fname) svntest.main.file_append(file_path, "This represents a binary file\n") - svntest.actions.run_and_verify_svn(None, None, [], "add", file_path) + svntest.actions.run_and_verify_svn(None, [], "add", file_path) expected_output = svntest.wc.State(wc_dir, { fname : Item(verb='Adding'), @@ -890,17 +894,17 @@ def lock_uri_encoded(sbox): svntest.actions.run_and_verify_commit(wc_dir, expected_output, expected_status, - None, + [], file_path) - svntest.actions.run_and_verify_svn(None, ".*locked by user", [], 'lock', + svntest.actions.run_and_verify_svn(".*locked by user", [], 'lock', '-m', '', file_path) # Make sure that the file was locked. expected_status.tweak(fname, writelocked='K') svntest.actions.run_and_verify_status(wc_dir, expected_status) - svntest.actions.run_and_verify_svn(None, ".*unlocked", [], 'unlock', + svntest.actions.run_and_verify_svn(".*unlocked", [], 'unlock', file_path) # Make sure it was successfully unlocked again. @@ -909,14 +913,14 @@ def lock_uri_encoded(sbox): # And now the URL case. file_url = sbox.repo_url + '/' + fname - svntest.actions.run_and_verify_svn(None, ".*locked by user", [], 'lock', + svntest.actions.run_and_verify_svn(".*locked by user", [], 'lock', '-m', '', file_url) # Make sure that the file was locked. expected_status.tweak(fname, writelocked='O') svntest.actions.run_and_verify_status(wc_dir, expected_status) - svntest.actions.run_and_verify_svn(None, ".*unlocked", [], 'unlock', + svntest.actions.run_and_verify_svn(".*unlocked", [], 'unlock', file_url) # Make sure it was successfully unlocked again. @@ -941,15 +945,15 @@ def lock_and_exebit1(sbox): gamma_path = sbox.ospath('A/D/gamma') expected_err = ".*svn: warning: W125005: To turn off the svn:needs-lock property,.*" - svntest.actions.run_and_verify_svn2(None, None, expected_err, 0, + svntest.actions.run_and_verify_svn2(None, expected_err, 0, 'ps', 'svn:needs-lock', ' ', gamma_path) expected_err = ".*svn: warning: W125005: To turn off the svn:executable property,.*" - svntest.actions.run_and_verify_svn2(None, None, expected_err, 0, + svntest.actions.run_and_verify_svn2(None, expected_err, 0, 'ps', 'svn:executable', ' ', gamma_path) # commit - svntest.actions.run_and_verify_svn(None, None, [], 'commit', + svntest.actions.run_and_verify_svn(None, [], 'commit', '-m', '', gamma_path) # mode should be +r, -w, +x gamma_stat = os.stat(gamma_path)[0] @@ -961,7 +965,7 @@ def lock_and_exebit1(sbox): raise svntest.Failure # lock - svntest.actions.run_and_verify_svn(None, ".*locked by user", [], 'lock', + svntest.actions.run_and_verify_svn(".*locked by user", [], 'lock', '-m', '', gamma_path) # mode should be +r, +w, +x gamma_stat = os.stat(gamma_path)[0] @@ -976,7 +980,7 @@ def lock_and_exebit1(sbox): svntest.main.file_append(gamma_path, "check stat output after mod & unlock") # unlock - svntest.actions.run_and_verify_svn(None, ".*unlocked", [], 'unlock', + svntest.actions.run_and_verify_svn(".*unlocked", [], 'unlock', gamma_path) # Mode should be +r, -w, +x @@ -989,7 +993,7 @@ def lock_and_exebit1(sbox): raise svntest.Failure # ci - svntest.actions.run_and_verify_svn(None, None, [], 'commit', + svntest.actions.run_and_verify_svn(None, [], 'commit', '-m', '', gamma_path) # Mode should be still +r, -w, +x @@ -1018,15 +1022,15 @@ def lock_and_exebit2(sbox): gamma_path = sbox.ospath('A/D/gamma') expected_err = ".*svn: warning: W125005: To turn off the svn:needs-lock property,.*" - svntest.actions.run_and_verify_svn2(None, None, expected_err, 0, + svntest.actions.run_and_verify_svn2(None, expected_err, 0, 'ps', 'svn:needs-lock', ' ', gamma_path) expected_err = ".*svn: warning: W125005: To turn off the svn:executable property,.*" - svntest.actions.run_and_verify_svn2(None, None, expected_err, 0, + svntest.actions.run_and_verify_svn2(None, expected_err, 0, 'ps', 'svn:executable', ' ', gamma_path) # commit - svntest.actions.run_and_verify_svn(None, None, [], 'commit', + svntest.actions.run_and_verify_svn(None, [], 'commit', '-m', '', gamma_path) # mode should be +r, -w, +x gamma_stat = os.stat(gamma_path)[0] @@ -1038,7 +1042,7 @@ def lock_and_exebit2(sbox): raise svntest.Failure # lock - svntest.actions.run_and_verify_svn(None, ".*locked by user", [], 'lock', + svntest.actions.run_and_verify_svn(".*locked by user", [], 'lock', '-m', '', gamma_path) # mode should be +r, +w, +x gamma_stat = os.stat(gamma_path)[0] @@ -1053,7 +1057,7 @@ def lock_and_exebit2(sbox): svntest.main.file_append(gamma_path, "check stat output after mod & unlock") # commit - svntest.actions.run_and_verify_svn(None, None, [], 'commit', + svntest.actions.run_and_verify_svn(None, [], 'commit', '-m', '', gamma_path) # Mode should be +r, -w, +x @@ -1079,7 +1083,7 @@ def commit_xml_unsafe_file_unlock(sbox): 'commit', '-m', '', file_path) # lock fname as wc_author - svntest.actions.run_and_verify_svn(None, ".*locked by user", [], 'lock', + svntest.actions.run_and_verify_svn(".*locked by user", [], 'lock', '-m', 'some lock comment', file_path) # make a change and commit it, allowing lock to be released @@ -1106,7 +1110,7 @@ def repos_lock_with_info(sbox): file_url = sbox.repo_url + '/' + fname # lock wc file - svntest.actions.run_and_verify_svn(None, ".*locked by user", [], 'lock', + svntest.actions.run_and_verify_svn(".*locked by user", [], 'lock', '--username', svntest.main.wc_author2, '-m', comment, file_path) expected_status = svntest.actions.get_virginal_state(wc_dir, 1) @@ -1114,7 +1118,7 @@ def repos_lock_with_info(sbox): svntest.actions.run_and_verify_status(wc_dir, expected_status) # Steal lock on wc file - svntest.actions.run_and_verify_svn(None, ".*locked by user", [], 'lock', + svntest.actions.run_and_verify_svn(".*locked by user", [], 'lock', '--username', svntest.main.wc_author2, '--force', '-m', comment, file_url) @@ -1149,7 +1153,7 @@ def unlock_already_unlocked_files(sbox): alpha_path = sbox.ospath('A/B/E/alpha') gamma_path = sbox.ospath('A/D/gamma') - svntest.actions.run_and_verify_svn(None, ".*locked by user", [], 'lock', + svntest.actions.run_and_verify_svn(".*locked by user", [], 'lock', '--username', svntest.main.wc_author2, '-m', 'lock several', iota_path, lambda_path, alpha_path) @@ -1160,14 +1164,14 @@ def unlock_already_unlocked_files(sbox): error_msg = ".*Path '/A/B/E/alpha' is already locked by user '" + \ svntest.main.wc_author2 + "'.*" - svntest.actions.run_and_verify_svn2(None, None, error_msg, 0, - 'lock', - '--username', svntest.main.wc_author2, - alpha_path, gamma_path) + svntest.actions.run_and_verify_svn(None, error_msg, + 'lock', + '--username', svntest.main.wc_author2, + alpha_path, gamma_path) expected_status.tweak('A/D/gamma', writelocked='K') svntest.actions.run_and_verify_status(wc_dir, expected_status) - svntest.actions.run_and_verify_svn(None, ".*unlocked", [], 'unlock', + svntest.actions.run_and_verify_svn(".*unlocked", [], 'unlock', '--username', svntest.main.wc_author2, lambda_path) @@ -1176,11 +1180,11 @@ def unlock_already_unlocked_files(sbox): error_msg = "(.*No lock on path '/A/B/lambda'.*)" + \ "|(.*'A/B/lambda' is not locked.*)" - svntest.actions.run_and_verify_svn2(None, None, error_msg, 0, - 'unlock', - '--username', svntest.main.wc_author2, - '--force', - iota_path, lambda_path, alpha_path) + svntest.actions.run_and_verify_svn(None, error_msg, + 'unlock', + '--username', svntest.main.wc_author2, + '--force', + iota_path, lambda_path, alpha_path) expected_status.tweak('iota', 'A/B/E/alpha', writelocked=None) @@ -1196,7 +1200,7 @@ def info_moved_path(sbox): fname2 = sbox.ospath("iota2") # Move iota, creating r2. - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], "mv", fname, fname2) expected_output = svntest.wc.State(wc_dir, { 'iota2' : Item(verb='Adding'), @@ -1209,13 +1213,11 @@ def info_moved_path(sbox): expected_status.remove("iota") svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, - wc_dir) + expected_status) # Create a new, unrelated iota, creating r3. svntest.main.file_append(fname, "Another iota") - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], "add", fname) expected_output = svntest.wc.State(wc_dir, { 'iota' : Item(verb='Adding'), @@ -1225,12 +1227,10 @@ def info_moved_path(sbox): }) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, - wc_dir) + expected_status) # Lock the new iota. - svntest.actions.run_and_verify_svn(None, ".*locked by user", [], + svntest.actions.run_and_verify_svn(".*locked by user", [], "lock", fname) expected_status.tweak("iota", writelocked="K") svntest.actions.run_and_verify_status(wc_dir, expected_status) @@ -1252,10 +1252,10 @@ def ls_url_encoded(sbox): fname = os.path.join(dirname, "f") # Create a dir with a space in its name and a file therein. - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], "mkdir", dirname) svntest.main.file_append(fname, "someone was here") - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], "add", fname) expected_output = svntest.wc.State(wc_dir, { 'space dir' : Item(verb='Adding'), @@ -1268,19 +1268,16 @@ def ls_url_encoded(sbox): }) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, - wc_dir) + expected_status) # Lock the file. - svntest.actions.run_and_verify_svn("Lock space dir/f", ".*locked by user", + svntest.actions.run_and_verify_svn(".*locked by user", [], "lock", fname) # Make sure ls shows it being locked. expected_output = " +2 " + re.escape(svntest.main.wc_author) + " +O .+f|" \ " +2 " + re.escape(svntest.main.wc_author) + " .+\./" - svntest.actions.run_and_verify_svn("List space dir", - expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], "list", "-v", dirname) #---------------------------------------------------------------------- @@ -1297,20 +1294,19 @@ def unlock_wrong_token(sbox): file_path = os.path.join(sbox.wc_dir, fname) file_url = sbox.repo_url + "/iota" - svntest.actions.run_and_verify_svn(None, ".*locked by user", [], 'lock', + svntest.actions.run_and_verify_svn(".*locked by user", [], 'lock', file_path) # Steal the lock as the same author, but using a URL to keep the old token # in the WC. - svntest.actions.run_and_verify_svn(None, ".*locked by user", [], 'lock', + svntest.actions.run_and_verify_svn(".*locked by user", [], 'lock', "--force", file_url) # Then, unlocking the WC path should fail. ### The error message returned is actually this, but let's worry about that ### another day... - svntest.actions.run_and_verify_svn2( - None, None, ".*((No lock on path)|(400 Bad Request))", 0, - 'unlock', file_path) + svntest.actions.run_and_verify_svn(None, ".*(No lock on path)", + 'unlock', file_path) #---------------------------------------------------------------------- # Verify that info shows lock info for locked files with URI-unsafe names @@ -1325,7 +1321,7 @@ def examine_lock_encoded_recurse(sbox): file_path = os.path.join(sbox.wc_dir, fname) svntest.main.file_append(file_path, "This represents a binary file\n") - svntest.actions.run_and_verify_svn(None, None, [], "add", file_path) + svntest.actions.run_and_verify_svn(None, [], "add", file_path) expected_output = svntest.wc.State(wc_dir, { fname : Item(verb='Adding'), @@ -1338,7 +1334,7 @@ def examine_lock_encoded_recurse(sbox): svntest.actions.run_and_verify_commit(wc_dir, expected_output, expected_status, - None, + [], file_path) # lock the file and validate the contents @@ -1358,21 +1354,21 @@ def unlocked_lock_of_other_user(sbox): expected_status = svntest.actions.get_virginal_state(wc_dir, 1) expected_status.tweak('A/D/G/pi', writelocked='K') - svntest.actions.run_and_verify_svn(None, ".*locked by user", [], 'lock', + svntest.actions.run_and_verify_svn(".*locked by user", [], 'lock', '-m', '', pi_path) svntest.actions.run_and_verify_status(wc_dir, expected_status) # now try to unlock with user jconstant, should fail but exit 0. if sbox.repo_url.startswith("http"): - expected_err = ".*403 Forbidden.*" + expected_err = "svn: warning: W160039: .*[Uu]nlock of .*403 Forbidden.*" else: expected_err = "svn: warning: W160039: User '%s' is trying to use a lock owned by "\ "'%s'.*" % (svntest.main.wc_author2, svntest.main.wc_author) - svntest.actions.run_and_verify_svn2(None, [], expected_err, 0, - 'unlock', - '--username', svntest.main.wc_author2, - pi_path) + svntest.actions.run_and_verify_svn([], expected_err, + 'unlock', + '--username', svntest.main.wc_author2, + pi_path) svntest.actions.run_and_verify_status(wc_dir, expected_status) @@ -1390,7 +1386,7 @@ def lock_funky_comment_chars(sbox): svntest.main.file_append(file_path, "This represents a binary file\n") svntest.main.run_svn(None, 'commit', '-m', '', file_path) - svntest.actions.run_and_verify_svn(None, ".*locked by user", [], 'lock', + svntest.actions.run_and_verify_svn(".*locked by user", [], 'lock', '-m', 'lock & load', file_path) #---------------------------------------------------------------------- @@ -1407,26 +1403,26 @@ def lock_twice_in_one_wc(sbox): # Create a needs-lock file svntest.actions.set_prop('svn:needs-lock', '*', mu_path) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'commit', wc_dir, '-m', '') # Mark the file readonly - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'update', wc_dir) # Switch a second location for the same file in the same working copy - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'switch', sbox.repo_url + '/A', sbox.ospath('A/B'), '--ignore-ancestry') # Lock location 1 - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'lock', mu_path, '-m', 'Locked here') - # Locking in location 2 should fail ### Currently returns exitcode 0 - svntest.actions.run_and_verify_svn2(None, None, ".*is already locked.*", 0, - 'lock', '-m', '', mu2_path) + # Locking in location 2 should fail + svntest.actions.run_and_verify_svn(None, ".*is already locked.*", + 'lock', '-m', '', mu2_path) # Change the file anyway os.chmod(mu2_path, 0700) @@ -1435,7 +1431,7 @@ def lock_twice_in_one_wc(sbox): # Commit will just succeed as the DB owns the lock. It's a user decision # to commit the other target instead of the one originally locked - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'commit', mu2_path, '-m', '') #---------------------------------------------------------------------- @@ -1454,14 +1450,13 @@ def lock_path_not_in_head(sbox): # Commit deletion of A/D and A/B/lambda as r2, then update the WC # back to r1. Then attempt to lock some paths that no longer exist # in HEAD. These should fail gracefully. - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'delete', lambda_path, D_path) - svntest.actions.run_and_verify_svn(None, None, [], 'commit', + svntest.actions.run_and_verify_svn(None, [], 'commit', '-m', 'Some deletions', wc_dir) - svntest.actions.run_and_verify_svn(None, None, [], 'up', '-r1', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'up', '-r1', wc_dir) expected_lock_fail_err_re = "svn: warning: W160042: " \ - "((Path .* doesn't exist in HEAD revision)" \ - "|(L(ock|OCK) request (on '.*' )?failed: 405 Method Not Allowed))" + "(Path .* doesn't exist in HEAD revision)" # Issue #3524 These lock attemtps were triggering an assert over ra_serf: # # working_copies\lock_tests-37>svn lock A\D @@ -1473,11 +1468,11 @@ def lock_path_not_in_head(sbox): # ..\..\..\subversion\libsvn_client\ra.c:275: (apr_err=235000) # svn: In file '..\..\..\subversion\libsvn_ra_serf\util.c' line 1120: # assertion failed (ctx->status_code) - svntest.actions.run_and_verify_svn2(None, None, expected_lock_fail_err_re, - 0, 'lock', lambda_path) + svntest.actions.run_and_verify_svn(None, expected_lock_fail_err_re, + 'lock', lambda_path) expected_err = 'svn: E155008: The node \'.*D\' is not a file' - svntest.actions.run_and_verify_svn(None, None, expected_err, + svntest.actions.run_and_verify_svn(None, expected_err, 'lock', D_path) @@ -1500,7 +1495,7 @@ def verify_path_escaping(sbox): svntest.main.run_svn(None, 'add', file1, file2, file3) - svntest.main.run_svn(None, 'ci', '-m', 'commit', wc_dir) + sbox.simple_commit(message='commit') svntest.main.run_svn(None, 'lock', '-m', 'lock 1', file1) svntest.main.run_svn(None, 'lock', '-m', 'lock 2', sbox.repo_url + '/file%20%232') @@ -1533,37 +1528,37 @@ def replace_and_propset_locked_path(sbox): rho_path = sbox.ospath('A/D/G/rho') # Lock mu and A/D/G/rho. - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'lock', mu_path, rho_path, '-m', 'Locked') # Now replace and propset on mu. - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'rm', '--keep-local', mu_path) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'add', mu_path) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'propset', 'foo', 'bar', mu_path) # Commit mu. - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'commit', '-m', '', mu_path) # Let's try this again where directories are involved, shall we? # Replace A/D/G and A/D/G/rho, propset on A/D/G/rho. - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'rm', G_path) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'mkdir', G_path) svntest.main.file_append(rho_path, "This is the new file 'rho'.\n") - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'add', rho_path) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'propset', 'foo', 'bar', rho_path) # And commit G. - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'commit', '-m', '', G_path) @@ -1676,15 +1671,15 @@ def block_unlock_if_pre_unlock_hook_fails(sbox): expected_status = svntest.actions.get_virginal_state(wc_dir, 1) expected_status.tweak('A/D/G/pi', writelocked='K') - svntest.actions.run_and_verify_svn(None, ".*locked by user", [], 'lock', + svntest.actions.run_and_verify_svn(".*locked by user", [], 'lock', '-m', '', pi_path) svntest.actions.run_and_verify_status(wc_dir, expected_status) # Make sure the unlock operation fails as pre-unlock hook blocks it. - expected_unlock_fail_err_re = ".*error text|.*500 Internal Server Error" - svntest.actions.run_and_verify_svn2(None, None, expected_unlock_fail_err_re, - 1, 'unlock', pi_path) + expected_unlock_fail_err_re = ".*error text" + svntest.actions.run_and_verify_svn(None, expected_unlock_fail_err_re, + 'unlock', pi_path) svntest.actions.run_and_verify_status(wc_dir, expected_status) #---------------------------------------------------------------------- @@ -1703,10 +1698,10 @@ def lock_invalid_token(sbox): fname = 'iota' file_path = os.path.join(sbox.wc_dir, fname) - svntest.actions.run_and_verify_svn2(None, None, - "svn: warning: W160037: " \ - ".*scheme.*'opaquelocktoken'", 0, - 'lock', '-m', '', file_path) + svntest.actions.run_and_verify_svn(None, + "svn: warning: W160037: " \ + ".*scheme.*'opaquelocktoken'", + 'lock', '-m', '', file_path) @Issue(3105) def lock_multi_wc(sbox): @@ -1720,19 +1715,19 @@ def lock_multi_wc(sbox): wc2_name = os.path.basename(sbox2.wc_dir) expected_output = svntest.verify.UnorderedOutput([ - '\'%s\' locked by user \'jrandom\'.\n' % os.path.join(wc_name, 'iota'), - '\'%s\' locked by user \'jrandom\'.\n' % os.path.join(wc2_name, 'A', 'mu'), + '\'%s\' locked by user \'jrandom\'.\n' % sbox.ospath('iota'), + '\'%s\' locked by user \'jrandom\'.\n' % sbox2.ospath('A/mu'), ]) - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'lock', sbox.ospath('iota'), sbox2.ospath('A/mu')) expected_output = svntest.verify.UnorderedOutput([ - '\'%s\' unlocked.\n' % os.path.join(wc_name, 'iota'), - '\'%s\' unlocked.\n' % os.path.join(wc2_name, 'A', 'mu'), + '\'%s\' unlocked.\n' % sbox.ospath('iota'), + '\'%s\' unlocked.\n' % sbox2.ospath('A/mu'), ]) - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'unlock', sbox.ospath('iota'), sbox2.ospath('A/mu')) @@ -1744,7 +1739,7 @@ def locks_stick_over_switch(sbox): wc_dir = sbox.wc_dir repo_url = sbox.repo_url - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'cp', sbox.ospath('A'), repo_url + '/AA', '-m', '') @@ -1753,7 +1748,7 @@ def locks_stick_over_switch(sbox): '\'%s\' locked by user \'jrandom\'.\n' % os.path.join('A', 'D', 'H', 'chi'), '\'%s\' locked by user \'jrandom\'.\n' % os.path.join('A', 'mu'), ]) - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'lock', sbox.ospath('A/D/H/chi'), sbox.ospath('A/mu'), sbox.ospath('iota')) @@ -1793,7 +1788,7 @@ def lock_unlock_deleted(sbox): sbox.build() wc_dir = sbox.wc_dir - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'rm', sbox.ospath('A/mu')) expected_status = svntest.actions.get_virginal_state(wc_dir, 1) @@ -1801,13 +1796,13 @@ def lock_unlock_deleted(sbox): svntest.actions.run_and_verify_status(wc_dir, expected_status) expected_output = '\'mu\' locked by user \'jrandom\'.' - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'lock', sbox.ospath('A/mu')) expected_status.tweak('A/mu', writelocked='K') svntest.actions.run_and_verify_status(wc_dir, expected_status) expected_output = '\'mu\' unlocked.' - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'unlock', sbox.ospath('A/mu')) expected_status.tweak('A/mu', writelocked=None) svntest.actions.run_and_verify_status(wc_dir, expected_status) @@ -1822,8 +1817,8 @@ def commit_stolen_lock(sbox): sbox.simple_append('A/mu', 'zig-zag') sbox.simple_lock('A/mu') - expected_output = '\'mu\' locked by user \'jrandom\'.' - svntest.actions.run_and_verify_svn(None, expected_output, [], + expected_output = '\'.*mu\' locked by user \'jrandom\'.' + svntest.actions.run_and_verify_svn(expected_output, [], 'lock', '--force', sbox.repo_url + '/A/mu') @@ -1834,11 +1829,10 @@ def commit_stolen_lock(sbox): svntest.actions.run_and_verify_commit(wc_dir, [], expected_status, - err_re, - wc_dir) + err_re) -# When removing directories, the locks of contained files were not -# correctly removed from the working copy database, thus they later +# When removing directories, the locks of contained files were not +# correctly removed from the working copy database, thus they later # magically reappeared when new files or directories with the same # pathes were added. @Issue(4364) @@ -1853,15 +1847,13 @@ def drop_locks_on_parent_deletion(sbox): sbox.simple_lock('A/B/E/alpha') sbox.simple_lock('A/B/E/beta') sbox.simple_rm('A/B') - + expected_status = svntest.actions.get_virginal_state(wc_dir, 1) expected_status.remove_subtree('A/B') - + svntest.actions.run_and_verify_commit(wc_dir, [], - expected_status, - None, - wc_dir) + expected_status) # now re-add entities to the deleted pathes. sbox.simple_mkdir('A/B') @@ -1870,32 +1862,231 @@ def drop_locks_on_parent_deletion(sbox): # The bug also resurrected locks on directories when their path # matched a former file. sbox.simple_mkdir('A/B/E', 'A/B/E/alpha') - + expected_status = svntest.actions.get_virginal_state(wc_dir, 1) expected_status.tweak('A/B', - 'A/B/E', - 'A/B/E/alpha', - 'A/B/F', - 'A/B/lambda', - wc_rev='3') + 'A/B/E', + 'A/B/E/alpha', + 'A/B/F', + 'A/B/lambda', + wc_rev='3') expected_status.remove('A/B/E/beta') - + svntest.actions.run_and_verify_commit(wc_dir, [], - expected_status, - None, - wc_dir) - - -@SkipUnless(svntest.main.is_ra_type_dav) + expected_status) + + +def copy_with_lock(sbox): + """copy with lock on source""" + + sbox.build() + wc_dir = sbox.wc_dir + lock_url = sbox.repo_url + '/A/B/E/alpha' + + svntest.actions.run_and_validate_lock(lock_url, svntest.main.wc_author) + sbox.simple_copy('A/B/E', 'A/B/E2') + + expected_output = svntest.wc.State(wc_dir, { + 'A/B/E2' : Item(verb='Adding'), + }) + expected_status = svntest.actions.get_virginal_state(wc_dir, 1) + expected_status.tweak('A/B/E/alpha', writelocked='O') + expected_status.add({ + 'A/B/E2' : Item(status=' ', wc_rev=2), + 'A/B/E2/alpha' : Item(status=' ', wc_rev=2), + 'A/B/E2/beta' : Item(status=' ', wc_rev=2), + }) + + # This is really a regression test for httpd: 2.2.25 and 2.4.6, and + # earlier, have a bug that causes mod_dav to check for locks on the + # copy source and so the commit fails. + svntest.actions.run_and_verify_commit(wc_dir, + expected_output, + expected_status) + +def lock_hook_messages(sbox): + "verify (un)lock message is transferred correctly" + + sbox.build(create_wc = False) + repo_dir = sbox.repo_dir + + iota_url = sbox.repo_url + "/iota" + mu_url = sbox.repo_url + "/A/mu" + + svntest.actions.run_and_verify_svn(".*locked by user", [], 'lock', + iota_url) + + error_msg = "Text with <angle brackets> & ampersand" + svntest.actions.create_failing_hook(repo_dir, "pre-lock", error_msg) + svntest.actions.create_failing_hook(repo_dir, "pre-unlock", error_msg) + + _, _, actual_stderr = svntest.actions.run_and_verify_svn( + [], svntest.verify.AnyOutput, + 'lock', mu_url) + if len(actual_stderr) > 4: + actual_stderr = actual_stderr[-4:-2] + actual_stderr[-1:] + expected_err = [ + 'svn: warning: W165001: ' + svntest.actions.hook_failure_message('pre-lock'), + error_msg + "\n", + "svn: E200009: One or more locks could not be obtained\n", + ] + svntest.verify.compare_and_display_lines(None, 'STDERR', + expected_err, actual_stderr) + + + _, _, actual_stderr = svntest.actions.run_and_verify_svn( + [], svntest.verify.AnyOutput, + 'unlock', iota_url) + if len(actual_stderr) > 4: + actual_stderr = actual_stderr[-4:-2] + actual_stderr[-1:] + expected_err = [ + 'svn: warning: W165001: ' + svntest.actions.hook_failure_message('pre-unlock'), + error_msg + "\n", + "svn: E200009: One or more locks could not be released\n", + ] + svntest.verify.compare_and_display_lines(None, 'STDERR', + expected_err, actual_stderr) + + +def failing_post_hooks(sbox): + "locking with failing post-lock and post-unlock" + + sbox.build() + wc_dir = sbox.wc_dir + repo_dir = sbox.repo_dir + + svntest.actions.create_failing_hook(repo_dir, "post-lock", "error text") + svntest.actions.create_failing_hook(repo_dir, "post-unlock", "error text") + + pi_path = sbox.ospath('A/D/G/pi') + expected_status = svntest.actions.get_virginal_state(wc_dir, 1) + expected_status.tweak('A/D/G/pi', writelocked='K') + + if svntest.main.is_ra_type_dav(): + expected_lock_err = [] + expected_unlock_err = '.*svn: E165009: Unlock succeeded.*' # + else: + expected_unlock_err = expected_lock_err = ".*error text" + + # Failing post-lock doesn't stop lock being created. + svntest.actions.run_and_verify_svn("'pi' locked by user", + expected_lock_err, + 'lock', '-m', '', pi_path) + svntest.actions.run_and_verify_status(wc_dir, expected_status) + + expected_status.tweak('A/D/G/pi', writelocked=None) + + # Failing post-unlock doesn't stop lock being removed. + svntest.actions.run_and_verify_svn("'pi' unlocked", + expected_unlock_err, + 'unlock', pi_path) + svntest.actions.run_and_verify_status(wc_dir, expected_status) + +def break_delete_add(sbox): + "break a lock, delete and add the file" + + sbox.build() + wc_dir = sbox.wc_dir + + svntest.actions.run_and_verify_svn(".*locked by user", [], + 'lock', + '-m', 'some lock comment', + sbox.ospath('A/mu')) + + expected_status = svntest.actions.get_virginal_state(wc_dir, 1) + expected_status.tweak('A/mu', writelocked='K') + svntest.actions.run_and_verify_status(wc_dir, expected_status) + + svntest.actions.run_and_verify_svn(".*unlocked", [], + 'unlock', '--force', + sbox.repo_url + '/A/mu') + + svntest.actions.run_and_verify_svn(None, [], + 'rm', + '-m', 'delete file', + sbox.repo_url + '/A/mu') + + # Update removes the locked file and should remove the lock token. + sbox.simple_update() + + # Lock token not visible on newly added file. + sbox.simple_append('A/mu', 'another mu') + sbox.simple_add('A/mu') + expected_status = svntest.actions.get_virginal_state(wc_dir, 2) + expected_status.tweak('A/mu', status='A ', wc_rev='-') + svntest.actions.run_and_verify_status(wc_dir, expected_status) + + ### XFAIL Broken lock token now visible in status. + sbox.simple_commit() + expected_status.tweak('A/mu', status=' ', wc_rev=3) + svntest.actions.run_and_verify_status(wc_dir, expected_status) + def dav_lock_timeout(sbox): "unlock a lock with timeout" + # Locks with timeouts are only created by generic DAV clients but a + # Subversion client may need to view or unlock one over any RA + # layer. + + sbox.build() + wc_dir = sbox.wc_dir + + svntest.main.run_lock_helper(sbox.repo_dir, 'iota', 'some_user', 999) + # Lock should have an expiration date + expiration_date = svntest.actions.run_and_parse_info(sbox.repo_url + '/iota')[0]['Lock Expires'] + + # Verify that there is a lock, by trying to obtain one + svntest.actions.run_and_verify_svn(None, ".*locked by user", + 'lock', '-m', '', sbox.ospath('iota')) + expected_status = svntest.actions.get_virginal_state(wc_dir, 1) + expected_status.tweak('iota', writelocked='O') + svntest.actions.run_and_verify_status(wc_dir, expected_status) + + # This used to fail over serf with a parse error of the timeout. + expected_err = "svn: warning: W160039:" + svntest.actions.run_and_verify_svn(None, expected_err, + 'unlock', sbox.repo_url + '/iota') + + # Force unlock via working copy, this also used to fail over serf. + svntest.actions.run_and_verify_svn(None, [], + 'unlock', sbox.ospath('iota'), '--force') + expected_status.tweak('iota', writelocked=None) + svntest.actions.run_and_verify_status(wc_dir, expected_status) + + # Lock again + svntest.main.run_lock_helper(sbox.repo_dir, 'iota', 'some_user', 999) + expected_status.tweak('iota', writelocked='O') + svntest.actions.run_and_verify_status(wc_dir, expected_status) + + # Force unlock via URL, this also used to fail over serf + svntest.actions.run_and_verify_svn(None, [], + 'unlock', sbox.repo_url + '/iota', + '--force') + expected_status.tweak('iota', writelocked=None) + svntest.actions.run_and_verify_status(wc_dir, expected_status) + + # Lock again + svntest.main.run_lock_helper(sbox.repo_dir, 'iota', 'some_user', 999) + expected_status.tweak('iota', writelocked='O') + svntest.actions.run_and_verify_status(wc_dir, expected_status) + + # Force lock via working copy, this also used to fail over serf. + svntest.actions.run_and_verify_svn(None, [], + 'lock', sbox.ospath('iota'), '--force') + expected_status.tweak('iota', writelocked='K') + svntest.actions.run_and_verify_status(wc_dir, expected_status) + +@SkipUnless(svntest.main.is_ra_type_dav) +def create_dav_lock_timeout(sbox): + "create generic DAV lock with timeout" + import httplib from urlparse import urlparse import base64 sbox.build() + wc_dir = sbox.wc_dir loc = urlparse(sbox.repo_url) if loc.scheme == 'http': @@ -1924,18 +2115,12 @@ def dav_lock_timeout(sbox): r = h.getresponse() - # Verify that there is a lock, by trying to obtain one - svntest.actions.run_and_verify_svn2(None, None, ".*locked by user", 0, - 'lock', '-m', '', sbox.ospath('iota')) - - # Before this patch this used to fail with a parse error of the timeout - svntest.actions.run_and_verify_svn2(None, None, ".*W160039.*Unlock.*403", 0, - 'unlock', sbox.repo_url + '/iota') - - svntest.actions.run_and_verify_svn(None, None, [], - 'unlock', sbox.ospath('iota'), '--force') - + expected_status = svntest.actions.get_virginal_state(wc_dir, 1) + expected_status.tweak('iota', writelocked='O') + svntest.actions.run_and_verify_status(wc_dir, expected_status) + # Lock should have an expiration date + expiration_date = svntest.actions.run_and_parse_info(sbox.repo_url + '/iota')[0]['Lock Expires'] def non_root_locks(sbox): "locks for working copies not at repos root" @@ -1943,16 +2128,16 @@ def non_root_locks(sbox): sbox.build() wc_dir = sbox.wc_dir - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'cp', sbox.repo_url, sbox.repo_url + '/X', '-m', 'copy greek tree') - sbox.simple_switch(sbox.repo_url + '/X') + sbox.simple_switch(sbox.repo_url + '/X') expected_status = svntest.actions.get_virginal_state(wc_dir, 2) svntest.actions.run_and_verify_status(wc_dir, expected_status) # Lock a file - svntest.actions.run_and_verify_svn(None, ".*locked by user", [], + svntest.actions.run_and_verify_svn(".*locked by user", [], 'lock', sbox.ospath('A/D/G/pi'), '-m', '') expected_status.tweak('A/D/G/pi', writelocked='K') @@ -1965,7 +2150,7 @@ def non_root_locks(sbox): svntest.actions.run_and_verify_status(wc_dir, expected_status) # Break the lock - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'unlock', sbox.repo_url + '/X/A/D/G/pi') # Subdir update reports the break @@ -1974,12 +2159,12 @@ def non_root_locks(sbox): svntest.actions.run_and_verify_status(wc_dir, expected_status) # Relock and break - svntest.actions.run_and_verify_svn(None, ".*locked by user", [], + svntest.actions.run_and_verify_svn(".*locked by user", [], 'lock', sbox.ospath('A/D/G/pi'), '-m', '') expected_status.tweak('A/D/G/pi', writelocked='K') svntest.actions.run_and_verify_status(wc_dir, expected_status) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'unlock', sbox.repo_url + '/X/A/D/G/pi') # Root update reports the break @@ -1987,6 +2172,56 @@ def non_root_locks(sbox): expected_status.tweak('A/D/G/pi', writelocked=None) svntest.actions.run_and_verify_status(wc_dir, expected_status) +def many_locks_hooks(sbox): + "many locks with hooks" + + sbox.build() + wc_dir = sbox.wc_dir + + # Prevent locking '/A/D/G/pi'. + svntest.main.create_python_hook_script(os.path.join(sbox.repo_dir, + 'hooks', 'pre-lock'), + 'import sys\n' + 'if sys.argv[2] == "/A/D/G/pi":\n' + ' sys.exit(1)\n' + 'sys.exit(0)\n') + + # Prevent unlocking '/A/mu'. + svntest.main.create_python_hook_script(os.path.join(sbox.repo_dir, + 'hooks', 'pre-unlock'), + 'import sys\n' + 'if sys.argv[2] == "/A/mu":\n' + ' sys.exit(1)\n' + 'sys.exit(0)\n') + + svntest.actions.run_and_verify_svn(".* locked", + "svn: warning: W165001: .*", + 'lock', + sbox.ospath('iota'), + sbox.ospath('A/mu'), + sbox.ospath('A/B/E/alpha'), + sbox.ospath('A/D/G/pi'), + sbox.ospath('A/D/G/rho')) + + expected_status = svntest.actions.get_virginal_state(wc_dir, 1) + expected_status.tweak('iota', 'A/mu', 'A/B/E/alpha', 'A/D/G/rho', + writelocked='K') + svntest.actions.run_and_verify_status(wc_dir, expected_status) + + svntest.actions.run_and_verify_svn(".* unlocked", + "svn: warning: W165001: .*", + 'unlock', + sbox.ospath('iota'), + sbox.ospath('A/mu'), + sbox.ospath('A/B/E/alpha'), + sbox.ospath('A/D/G/rho')) + + expected_status.tweak('iota', 'A/B/E/alpha', 'A/D/G/rho', + writelocked=None) + svntest.actions.run_and_verify_status(wc_dir, expected_status) + + + @Issue(3515) @SkipUnless(svntest.main.is_ra_type_dav) def dav_lock_refresh(sbox): @@ -1999,7 +2234,7 @@ def dav_lock_refresh(sbox): sbox.build(create_wc = False) # Acquire lock on 'iota' - svntest.actions.run_and_verify_svn(None, ".*locked by user", [], 'lock', + svntest.actions.run_and_verify_svn(".*locked by user", [], 'lock', sbox.repo_url + '/iota') # Try to refresh lock using 'If' header @@ -2033,14 +2268,27 @@ def delete_locked_file_with_percent(sbox): "lock and delete a file called 'a %( ) .txt'" sbox.build() + wc_dir = sbox.wc_dir locked_filename = 'a %( ) .txt' locked_path = sbox.ospath(locked_filename) svntest.main.file_write(locked_path, "content\n") sbox.simple_add(locked_filename) sbox.simple_commit() - + sbox.simple_lock(locked_filename) + + expected_status = svntest.actions.get_virginal_state(wc_dir, 1) + expected_status.add({ + 'a %( ) .txt' : Item(status=' ', wc_rev='2', writelocked='K') + }) + expected_infos = [ + { 'Lock Owner' : 'jrandom' }, + ] + svntest.actions.run_and_verify_info(expected_infos, sbox.path('a %( ) .txt'), + '-rHEAD') + svntest.actions.run_and_verify_status(wc_dir, expected_status) + sbox.simple_rm(locked_filename) # XFAIL: With a 1.8.x client, this commit fails with: @@ -2049,6 +2297,200 @@ def delete_locked_file_with_percent(sbox): # Invalid percent encoded URI in tagged If-header [400, #104] sbox.simple_commit() +def lock_commit_bump(sbox): + "a commit should not bump just locked files" + + sbox.build() + wc_dir = sbox.wc_dir + sbox.simple_lock('iota') + + changed_file = sbox.ospath('changed') + sbox.simple_append('changed', 'Changed!') + + svntest.actions.run_and_verify_svn(None, [], 'unlock', '--force', + sbox.repo_url + '/iota') + + svntest.actions.run_and_verify_svnmucc(None, [], + '-U', sbox.repo_url, '-m', 'Q', + 'put', changed_file, 'iota') + + sbox.simple_append('A/mu', 'GOAAAAAAAAL!') + + expected_output = svntest.wc.State(wc_dir, { + 'A/mu' : Item(verb='Sending'), + }) + expected_status = svntest.actions.get_virginal_state(wc_dir, 1) + expected_status.tweak('A/mu', wc_rev=3) + + svntest.actions.run_and_verify_commit(wc_dir, + expected_output, + expected_status) + + # We explicitly check both the Revision and Last Changed Revision. + expected_infos = [ { + 'Revision' : '1' , + 'Last Changed Rev' : '1' , + 'URL' : '.*', + 'Lock Token' : None, } + ] + svntest.actions.run_and_verify_info(expected_infos, + sbox.ospath('iota')) + +def copy_dir_with_locked_file(sbox): + "copy a directory containing a locked file" + + sbox.build() + AA_url = sbox.repo_url + '/AA' + AA2_url = sbox.repo_url + '/AA2' + A_url = sbox.repo_url + '/A' + mu_url = A_url + '/mu' + + svntest.main.run_svn(None, 'lock', '-m', 'locked', mu_url) + + svntest.actions.run_and_verify_svn(None, [], + 'cp', A_url, AA_url, + '-m', '') + + expected_err = "svn: E160037: .*no matching lock-token available" + svntest.actions.run_and_verify_svn(None, expected_err, + 'mv', A_url, AA2_url, + '-m', '') + +@Issue(4557) +def delete_dir_with_lots_of_locked_files(sbox): + "delete a directory containing lots of locked files" + + sbox.build() + wc_dir = sbox.wc_dir + + # A lot of paths. + nfiles = 75 # NOTE: test XPASSES with 50 files!!! + locked_paths = [] + for i in range(nfiles): + locked_paths.append(sbox.ospath("A/locked_files/file-%i" % i)) + + # Create files at these paths + os.mkdir(sbox.ospath("A/locked_files")) + for file_path in locked_paths: + svntest.main.file_write(file_path, "This is '%s'.\n" % (file_path,)) + sbox.simple_add("A/locked_files") + sbox.simple_commit() + sbox.simple_update() + + # lock all the files + svntest.actions.run_and_verify_svn(None, [], 'lock', + '-m', 'All locks', + *locked_paths) + # Locally delete A (regression against earlier versions, which + # always used a special non-standard request) + sbox.simple_rm("A") + + # Commit the deletion + # XFAIL: As of 1.8.10, this commit fails with: + # svn: E175002: Unexpected HTTP status 400 'Bad Request' on '<path>' + # and the following error in the httpd error log: + # request failed: error reading the headers + # This problem was introduced on the 1.8.x branch in r1606976. + sbox.simple_commit() + +def delete_locks_on_depth_commit(sbox): + "delete locks on depth-limited commit" + + sbox.build() + wc_dir = sbox.wc_dir + + svntest.actions.run_and_verify_svn(None, [], 'lock', + '-m', 'All files', + *(sbox.ospath(x) + for x in ['iota', 'A/B/E/alpha', + 'A/B/E/beta', 'A/B/lambda', + 'A/D/G/pi', 'A/D/G/rho', + 'A/D/G/tau', 'A/D/H/chi', + 'A/D/H/omega', 'A/D/H/psi', + 'A/D/gamma', 'A/mu'])) + + sbox.simple_rm("A") + + expected_output = svntest.wc.State(wc_dir, { + 'A' : Item(verb='Deleting'), + }) + + expected_status = svntest.wc.State(wc_dir, { + '' : Item(status=' ', wc_rev='1'), + 'iota' : Item(status=' ', wc_rev='1'), + }) + + svntest.actions.run_and_verify_commit(wc_dir, expected_output, + expected_status, [], + wc_dir, '--depth', 'immediates') + + sbox.simple_update() # r2 + + svntest.actions.run_and_verify_svn(None, [], 'cp', + sbox.repo_url + '/A@1', sbox.ospath('A')) + + expected_output = [ + 'Adding %s\n' % sbox.ospath('A'), + 'svn: The depth of this commit is \'immediates\', but copies ' \ + 'are always performed recursively in the repository.\n', + 'Committing transaction...\n', + 'Committed revision 3.\n', + ] + + # Verifying the warning line... so can't use verify_commit() + svntest.actions.run_and_verify_svn(expected_output, [], + 'commit', wc_dir, '--depth', 'immediates', + '-mm') + + # Verify that all locks are gone at the server and at the client + expected_status = svntest.actions.get_virginal_state(wc_dir, 3) + expected_status.tweak('', 'iota', wc_rev=2) + svntest.actions.run_and_verify_status(wc_dir, expected_status) + +@Issue(4557) +@XFail(svntest.main.is_ra_type_dav) +def replace_dir_with_lots_of_locked_files(sbox): + "replace directory containing lots of locked files" + + sbox.build() + wc_dir = sbox.wc_dir + + # A lot of paths. + nfiles = 75 # NOTE: test XPASSES with 50 files!!! + locked_paths = [] + for i in range(nfiles): + locked_paths.append(sbox.ospath("A/locked_files/file-%i" % i)) + + # Create files at these paths + os.mkdir(sbox.ospath("A/locked_files")) + for file_path in locked_paths: + svntest.main.file_write(file_path, "This is '%s'.\n" % (file_path,)) + sbox.simple_add("A/locked_files") + sbox.simple_commit() + sbox.simple_update() + + # lock all the files + svntest.actions.run_and_verify_svn(None, [], 'lock', + '-m', 'All locks', + *locked_paths) + # Locally delete A (regression against earlier versions, which + # always used a special non-standard request) + sbox.simple_rm("A") + + # But a further replacement never worked + sbox.simple_mkdir("A") + # And an additional propset didn't work either + # (but doesn't require all lock tokens recursively) + sbox.simple_propset("k", "v", "A") + + # Commit the deletion + # XFAIL: As of 1.8.10, this commit fails with: + # svn: E175002: Unexpected HTTP status 400 'Bad Request' on '<path>' + # and the following error in the httpd error log: + # request failed: error reading the headers + # This problem was introduced on the 1.8.x branch in r1606976. + sbox.simple_commit() + ######################################################################## # Run the tests @@ -2102,10 +2544,21 @@ test_list = [ None, lock_unlock_deleted, commit_stolen_lock, drop_locks_on_parent_deletion, + copy_with_lock, + lock_hook_messages, + failing_post_hooks, + break_delete_add, dav_lock_timeout, + create_dav_lock_timeout, non_root_locks, + many_locks_hooks, dav_lock_refresh, delete_locked_file_with_percent, + lock_commit_bump, + copy_dir_with_locked_file, + delete_dir_with_lots_of_locked_files, + delete_locks_on_depth_commit, + replace_dir_with_lots_of_locked_files, ] if __name__ == '__main__': diff --git a/subversion/tests/cmdline/log_tests.py b/subversion/tests/cmdline/log_tests.py index ffcdace..bdb749c 100755 --- a/subversion/tests/cmdline/log_tests.py +++ b/subversion/tests/cmdline/log_tests.py @@ -33,8 +33,8 @@ from svntest import wc from svntest.main import server_has_mergeinfo from svntest.main import SVN_PROP_MERGEINFO -from merge_tests import set_up_branch -from diff_tests import make_diff_header, make_no_diff_deleted_header +from svntest.mergetrees import set_up_branch +from svntest.verify import make_diff_header, make_no_diff_deleted_header # (abbreviation) Skip = svntest.testcase.Skip_deco @@ -227,12 +227,7 @@ def merge_history_repos(sbox): branch_c = os.path.join('branches', 'c') # Create an empty repository - r0 - svntest.main.safe_rmtree(sbox.repo_dir, 1) - svntest.main.safe_rmtree(sbox.wc_dir, 1) - svntest.main.create_repos(sbox.repo_dir) - - svntest.actions.run_and_verify_svn(None, None, [], "co", sbox.repo_url, - sbox.wc_dir) + sbox.build(empty=True) was_cwd = os.getcwd() os.chdir(sbox.wc_dir) @@ -407,7 +402,6 @@ def merge_history_repos(sbox): # Restore working directory os.chdir(was_cwd) - # For errors seen while parsing log data. class SVNLogParseError(Exception): pass @@ -718,6 +712,13 @@ def compare_diff_output(expected_diffs, output): if diffs.issubset(expected_diffs) and diffs.issuperset(expected_diffs): return + print("=============== DIFFS NOT EQUAL ===================") + print("Expected") + for line in expected_diffs: + print(line) + print("Actual:") + for line in output: + print(line) raise svntest.Failure("Diffs not equal") @@ -733,7 +734,7 @@ def plain_log(sbox): os.chdir(sbox.wc_dir) - exit_code, output, err = svntest.actions.run_and_verify_svn(None, None, [], + exit_code, output, err = svntest.actions.run_and_verify_svn(None, [], 'log') log_chain = parse_log_output(output) @@ -745,10 +746,9 @@ def log_with_empty_repos(sbox): "'svn log' on an empty repository" # Create virgin repos - svntest.main.safe_rmtree(sbox.repo_dir, 1) - svntest.main.create_repos(sbox.repo_dir) + sbox.build(create_wc=False, empty=True) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'log', sbox.repo_url) @@ -768,13 +768,13 @@ def log_where_nothing_changed(sbox): rho_path = os.path.join(sbox.wc_dir, 'A', 'D', 'G', 'rho') svntest.main.file_append(rho_path, "some new material in rho") - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', 'log msg', rho_path) # Now run 'svn log -r2' on a directory unaffected by revision 2. H_path = os.path.join(sbox.wc_dir, 'A', 'D', 'H') - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'log', '-r', '2', H_path) @@ -785,7 +785,7 @@ def log_to_revision_zero(sbox): # This used to segfault the server. - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'log', '-v', '-r', '1:0', sbox.wc_dir) @@ -798,7 +798,7 @@ def log_with_path_args(sbox): os.chdir(sbox.wc_dir) exit_code, output, err = svntest.actions.run_and_verify_svn( - None, None, [], + None, [], 'log', sbox.repo_url, 'A/D/G', 'A/D/H') log_chain = parse_log_output(output) @@ -827,7 +827,7 @@ def log_wc_with_peg_revision(sbox): "'svn log wc_target@N'" guarantee_repos_and_wc(sbox) my_path = os.path.join(sbox.wc_dir, "A", "B", "E", "beta") + "@8" - exit_code, output, err = svntest.actions.run_and_verify_svn(None, None, [], + exit_code, output, err = svntest.actions.run_and_verify_svn(None, [], 'log', my_path) check_log_chain(parse_log_output(output), [1]) @@ -839,7 +839,7 @@ def url_missing_in_head(sbox): my_url = sbox.repo_url + "/A/B/E/alpha" + "@8" - exit_code, output, err = svntest.actions.run_and_verify_svn(None, None, [], + exit_code, output, err = svntest.actions.run_and_verify_svn(None, [], 'log', my_url) check_log_chain(parse_log_output(output), [3, 1]) @@ -870,38 +870,38 @@ def log_through_copyfrom_history(sbox): svntest.main.file_write(msg_file, msg2) svntest.main.file_append(mu_path, "2") - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'ci', wc_dir, '-F', msg_file) svntest.main.file_append(mu2_path, "this is mu2") - svntest.actions.run_and_verify_svn(None, None, [], 'add', mu2_path) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'add', mu2_path) + svntest.actions.run_and_verify_svn(None, [], 'ci', wc_dir, '-m', "Log message for revision 3") - svntest.actions.run_and_verify_svn(None, None, [], 'rm', mu2_path) + svntest.actions.run_and_verify_svn(None, [], 'rm', mu2_path) svntest.main.file_write(msg_file, msg4) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'ci', wc_dir, '-F', msg_file) svntest.main.file_append(mu_path, "5") - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'ci', wc_dir, '-m', "Log message for revision 5") svntest.main.file_write(msg_file, msg6) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'cp', '-r', '5', mu_URL, mu2_URL, '-F', msg_file) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir) # The full log for mu2 is relatively unsurprising - exit_code, output, err = svntest.actions.run_and_verify_svn(None, None, [], + exit_code, output, err = svntest.actions.run_and_verify_svn(None, [], 'log', mu2_path) log_chain = parse_log_output(output) check_log_chain(log_chain, [6, 5, 2, 1]) - exit_code, output, err = svntest.actions.run_and_verify_svn(None, None, [], + exit_code, output, err = svntest.actions.run_and_verify_svn(None, [], 'log', mu2_URL) log_chain = parse_log_output(output) check_log_chain(log_chain, [6, 5, 2, 1]) @@ -909,25 +909,25 @@ def log_through_copyfrom_history(sbox): # First "oddity", the full log for mu2 doesn't include r3, but the -r3 # log works! peg_mu2_path = mu2_path + "@3" - exit_code, output, err = svntest.actions.run_and_verify_svn(None, None, [], + exit_code, output, err = svntest.actions.run_and_verify_svn(None, [], 'log', '-r', '3', peg_mu2_path) log_chain = parse_log_output(output) check_log_chain(log_chain, [3]) peg_mu2_URL = mu2_URL + "@3" - exit_code, output, err = svntest.actions.run_and_verify_svn(None, None, [], + exit_code, output, err = svntest.actions.run_and_verify_svn(None, [], 'log', '-r', '3', peg_mu2_URL) log_chain = parse_log_output(output) check_log_chain(log_chain, [3]) - exit_code, output, err = svntest.actions.run_and_verify_svn(None, None, [], + exit_code, output, err = svntest.actions.run_and_verify_svn(None, [], 'log', '-r', '2', mu2_path) log_chain = parse_log_output(output) check_log_chain(log_chain, [2]) - exit_code, output, err = svntest.actions.run_and_verify_svn(None, None, [], + exit_code, output, err = svntest.actions.run_and_verify_svn(None, [], 'log', '-r', '2', mu2_URL) log_chain = parse_log_output(output) @@ -980,7 +980,7 @@ PROPS-END # run log exit_code, output, errput = svntest.actions.run_and_verify_svn( - None, None, [], 'log', URL) + None, [], 'log', URL) # Verify the output contains either the expected fuzzy escape # sequence, or the literal control char. @@ -1010,7 +1010,7 @@ def log_xml_empty_date(sbox): # Ensure that we get a date before we delete the property. exit_code, output, errput = svntest.actions.run_and_verify_svn( - None, None, [], 'log', '--xml', '-r1', sbox.wc_dir) + None, [], 'log', '--xml', '-r1', sbox.wc_dir) matched = 0 for line in output: @@ -1020,12 +1020,12 @@ def log_xml_empty_date(sbox): raise svntest.Failure("log contains no date element") # Set the svn:date revprop to the empty string on revision 1. - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'pdel', '--revprop', '-r1', 'svn:date', sbox.wc_dir) exit_code, output, errput = svntest.actions.run_and_verify_svn( - None, None, [], 'log', '--xml', '-r1', sbox.wc_dir) + None, [], 'log', '--xml', '-r1', sbox.wc_dir) for line in output: if date_re.search(line): @@ -1036,14 +1036,14 @@ def log_limit(sbox): "svn log --limit" guarantee_repos_and_wc(sbox) - exit_code, out, err = svntest.actions.run_and_verify_svn(None, None, [], + exit_code, out, err = svntest.actions.run_and_verify_svn(None, [], 'log', '--limit', '2', sbox.repo_url) log_chain = parse_log_output(out) check_log_chain(log_chain, [9, 8]) - exit_code, out, err = svntest.actions.run_and_verify_svn(None, None, [], + exit_code, out, err = svntest.actions.run_and_verify_svn(None, [], 'log', '--limit', '2', sbox.repo_url, @@ -1052,7 +1052,7 @@ def log_limit(sbox): check_log_chain(log_chain, [9, 6]) exit_code, out, err = svntest.actions.run_and_verify_svn( - None, None, [], + None, [], 'log', '--limit', '2', '--revision', '2:HEAD', sbox.repo_url, 'A/B') log_chain = parse_log_output(out) @@ -1060,7 +1060,7 @@ def log_limit(sbox): # Use -l instead of --limit to test both option forms. exit_code, out, err = svntest.actions.run_and_verify_svn( - None, None, [], + None, [], 'log', '-l', '2', '--revision', '1', sbox.repo_url, 'A/B') log_chain = parse_log_output(out) @@ -1069,11 +1069,11 @@ def log_limit(sbox): must_be_positive = ".*Argument to --limit must be positive.*" # error expected when limit <= 0 - svntest.actions.run_and_verify_svn(None, None, must_be_positive, + svntest.actions.run_and_verify_svn(None, must_be_positive, 'log', '--limit', '0', '--revision', '1', sbox.repo_url, 'A/B') - svntest.actions.run_and_verify_svn(None, None, must_be_positive, + svntest.actions.run_and_verify_svn(None, must_be_positive, 'log', '--limit', '-1', '--revision', '1', sbox.repo_url, 'A/B') @@ -1083,16 +1083,16 @@ def log_base_peg(sbox): target = os.path.join(sbox.wc_dir, 'A', 'B', 'E', 'beta') + '@BASE' - exit_code, out, err = svntest.actions.run_and_verify_svn(None, None, [], + exit_code, out, err = svntest.actions.run_and_verify_svn(None, [], 'log', target) log_chain = parse_log_output(out) check_log_chain(log_chain, [9, 1]) - svntest.actions.run_and_verify_svn(None, None, [], 'update', '-r', '1', + svntest.actions.run_and_verify_svn(None, [], 'update', '-r', '1', sbox.wc_dir) - exit_code, out, err = svntest.actions.run_and_verify_svn(None, None, [], + exit_code, out, err = svntest.actions.run_and_verify_svn(None, [], 'log', target) log_chain = parse_log_output(out) @@ -1103,7 +1103,7 @@ def log_verbose(sbox): "run log with verbose output" guarantee_repos_and_wc(sbox) - exit_code, output, err = svntest.actions.run_and_verify_svn(None, None, [], + exit_code, output, err = svntest.actions.run_and_verify_svn(None, [], 'log', '-v', sbox.wc_dir) @@ -1262,7 +1262,7 @@ def merge_sensitive_log_single_revision(sbox): } os.chdir(TRUNK_path) # First try a single rev using -rN - exit_code, output, err = svntest.actions.run_and_verify_svn(None, None, [], + exit_code, output, err = svntest.actions.run_and_verify_svn(None, [], 'log', '-g', '-r14') @@ -1270,7 +1270,7 @@ def merge_sensitive_log_single_revision(sbox): log_chain = parse_log_output(output) check_merge_results(log_chain, expected_merges) # Then try a single rev using --limit 1 - exit_code, output, err = svntest.actions.run_and_verify_svn(None, None, [], + exit_code, output, err = svntest.actions.run_and_verify_svn(None, [], 'log', '-g', '--limit', '1', '-r14:1') @@ -1285,13 +1285,13 @@ def merge_sensitive_log_single_revision(sbox): 11 : [12], } # First try a single rev using -rN - exit_code, output, err = svntest.actions.run_and_verify_svn(None, None, [], + exit_code, output, err = svntest.actions.run_and_verify_svn(None, [], 'log', '-g', '-r12', BRANCH_B_path) log_chain = parse_log_output(output) check_merge_results(log_chain, expected_merges) - exit_code, output, err = svntest.actions.run_and_verify_svn(None, None, [], + exit_code, output, err = svntest.actions.run_and_verify_svn(None, [], 'log', '-g', '--limit', '1', '-r12:1', @@ -1311,7 +1311,7 @@ def merge_sensitive_log_branching_revision(sbox): BRANCH_B_path = os.path.join(wc_dir, "branches", "b") # Run log on a copying revision - exit_code, output, err = svntest.actions.run_and_verify_svn(None, None, [], + exit_code, output, err = svntest.actions.run_and_verify_svn(None, [], 'log', '-g', '-r10', BRANCH_B_path) @@ -1333,7 +1333,7 @@ def merge_sensitive_log_non_branching_revision(sbox): TRUNK_path = os.path.join(sbox.wc_dir, "trunk") # Run log on a non-copying revision that adds mergeinfo - exit_code, output, err = svntest.actions.run_and_verify_svn(None, None, [], + exit_code, output, err = svntest.actions.run_and_verify_svn(None, [], 'log', '-g', '-r6', TRUNK_path) @@ -1357,7 +1357,7 @@ def merge_sensitive_log_added_path(sbox): XI_path = os.path.join(sbox.wc_dir, "trunk", "A", "xi") # Run log on a non-copying revision that adds mergeinfo - exit_code, output, err = svntest.actions.run_and_verify_svn(None, None, [], + exit_code, output, err = svntest.actions.run_and_verify_svn(None, [], 'log', '-g', XI_path) @@ -1387,7 +1387,7 @@ def log_single_change(sbox): guarantee_repos_and_wc(sbox) repo_url = sbox.repo_url - exit_code, output, err = svntest.actions.run_and_verify_svn(None, None, [], + exit_code, output, err = svntest.actions.run_and_verify_svn(None, [], 'log', '-c', 4, repo_url) log_chain = parse_log_output(output) @@ -1399,7 +1399,7 @@ def log_changes_range(sbox): guarantee_repos_and_wc(sbox) repo_url = sbox.repo_url - exit_code, output, err = svntest.actions.run_and_verify_svn(None, None, [], + exit_code, output, err = svntest.actions.run_and_verify_svn(None, [], 'log', '-c', '2-5', repo_url) @@ -1412,7 +1412,7 @@ def log_changes_list(sbox): guarantee_repos_and_wc(sbox) repo_url = sbox.repo_url - exit_code, output, err = svntest.actions.run_and_verify_svn(None, None, [], + exit_code, output, err = svntest.actions.run_and_verify_svn(None, [], 'log', '-c', '2,5,7', repo_url) @@ -1426,7 +1426,7 @@ def log_changes_complex(sbox): guarantee_repos_and_wc(sbox) repo_url = sbox.repo_url - exit_code, output, err = svntest.actions.run_and_verify_svn(None, None, [], + exit_code, output, err = svntest.actions.run_and_verify_svn(None, [], 'log', '-c', '2,5-3,-8,6-7', repo_url) @@ -1474,7 +1474,7 @@ def retrieve_revprops(sbox): svntest.actions.run_and_verify_commit(wc_dir, expected_output, expected_status, - None, + [], '-m', msg2, omega_path) @@ -1482,10 +1482,10 @@ def retrieve_revprops(sbox): # Set custom property on r1 and r2. svntest.actions.run_and_verify_svn( - None, None, [], # message, expected_stdout, expected_stderr + None, [], # message, expected_stdout, expected_stderr 'ps', '--revprop', '-r1', custom_name, custom_value, sbox.repo_url) svntest.actions.run_and_verify_svn( - None, None, [], # message, expected_stdout, expected_stderr + None, [], # message, expected_stdout, expected_stderr 'ps', '--revprop', '-r2', custom_name, custom_value, sbox.repo_url) # Can't set revprops with log. @@ -1580,8 +1580,8 @@ def merge_sensitive_log_target_with_bogus_mergeinfo(sbox): svntest.main.run_svn(None, 'ci', '-m', 'setting bogus mergeinfo', wc_path) # The tests: Check that 'svn log -g' doesn't error on these. - svntest.actions.run_and_verify_svn(None, None, [], 'log', '-g', C_path) - svntest.actions.run_and_verify_svn(None, None, [], 'log', '-g', D_path) + svntest.actions.run_and_verify_svn(None, [], 'log', '-g', C_path) + svntest.actions.run_and_verify_svn(None, [], 'log', '-g', D_path) @SkipUnless(server_has_mergeinfo) @Issue(3235) @@ -1646,8 +1646,7 @@ def merge_sensitive_log_added_mergeinfo_replaces_inherited(sbox): expected_disk, expected_status, expected_skip, - None, None, None, None, - None, 1) + check_props=True) # Commit the merge. expected_output = svntest.wc.State(wc_dir, { @@ -1661,8 +1660,7 @@ def merge_sensitive_log_added_mergeinfo_replaces_inherited(sbox): 'A_COPY/D/H/omega', 'A_COPY/D/H/psi', wc_rev=7) - svntest.actions.run_and_verify_commit(wc_dir, expected_output, wc_status, - None, wc_dir) + svntest.actions.run_and_verify_commit(wc_dir, expected_output, wc_status) wc_disk.tweak("A_COPY/D", props={SVN_PROP_MERGEINFO : '/A/D:2-6'}) wc_disk.tweak("A_COPY/D/G/rho", "A_COPY/D/H/omega", "A_COPY/D/H/psi", @@ -1671,8 +1669,7 @@ def merge_sensitive_log_added_mergeinfo_replaces_inherited(sbox): # Reverse merge r3 from 'A/D/H' to 'A_COPY/D/H' and commit as r8. # First update the wc so mergeinfo inheritance can occur. This is # necessary so A_COPY/D/H 'knows' that r3 has been merged into it. - svntest.actions.run_and_verify_svn(None, - exp_noop_up_out(7), [], + svntest.actions.run_and_verify_svn(exp_noop_up_out(7), [], 'up', wc_dir) wc_status.tweak(wc_rev=7) expected_output = wc.State(H_COPY_path, { @@ -1703,7 +1700,7 @@ def merge_sensitive_log_added_mergeinfo_replaces_inherited(sbox): expected_elision_output, expected_disk, expected_status, expected_skip, - None, None, None, None, None, 1) + check_props=True) # Commit the merge. expected_output = svntest.wc.State(wc_dir, { @@ -1713,8 +1710,7 @@ def merge_sensitive_log_added_mergeinfo_replaces_inherited(sbox): wc_status.tweak('A_COPY/D/H', 'A_COPY/D/H/psi', wc_rev=8) - svntest.actions.run_and_verify_commit(wc_dir, expected_output, wc_status, - None, wc_dir) + svntest.actions.run_and_verify_commit(wc_dir, expected_output, wc_status) wc_disk.tweak("A_COPY/D/H", props={SVN_PROP_MERGEINFO : '/A/D:2,4-6'}) wc_disk.tweak("A_COPY/D/G/rho", "A_COPY/D/H/omega", "A_COPY/D/H/psi", @@ -1735,7 +1731,7 @@ def merge_sensitive_log_added_mergeinfo_replaces_inherited(sbox): 8 : []} expected_reverse_merges = { 3 : [8]} - exit_code, output, err = svntest.actions.run_and_verify_svn(None, None, + exit_code, output, err = svntest.actions.run_and_verify_svn(None, [], 'log', '-g', '-r8', @@ -1788,7 +1784,7 @@ def merge_sensitive_log_propmod_merge_inheriting_path(sbox): 3 : [7], } exit_code, output, err = svntest.actions.run_and_verify_svn( - None, None, [], 'log', '-g', '-r7', log_target) + None, [], 'log', '-g', '-r7', log_target) log_chain = parse_log_output(output) check_merge_results(log_chain, expected_merges) run_log_g_r7(wc_dir) @@ -1798,7 +1794,7 @@ def merge_sensitive_log_propmod_merge_inheriting_path(sbox): def run_log_g_r8(log_target): expected_merges = { 8 : [] } exit_code, output, err = svntest.actions.run_and_verify_svn( - None, None, [], 'log', '-g', '-r8', log_target) + None, [], 'log', '-g', '-r8', log_target) log_chain = parse_log_output(output) check_merge_results(log_chain, expected_merges) run_log_g_r8(wc_dir) @@ -1820,14 +1816,14 @@ def log_of_local_copy(sbox): # Get the logs for a directory and a file. exit_code, C_log_out, err = svntest.actions.run_and_verify_svn( - None, None, [], 'log', '-v', C_path) + None, [], 'log', '-v', C_path) exit_code, psi_log_out, err = svntest.actions.run_and_verify_svn( - None, None, [], 'log', '-v', psi_path) + None, [], 'log', '-v', psi_path) # Move that directory and file. - svntest.actions.run_and_verify_svn(None, None, [], 'mv', + svntest.actions.run_and_verify_svn(None, [], 'mv', C_path, C_moved_path) - svntest.actions.run_and_verify_svn(None, None, [], 'mv', + svntest.actions.run_and_verify_svn(None, [], 'mv', psi_path, psi_moved_path) # Get the logs for the move destinations. @@ -1843,9 +1839,9 @@ def log_of_local_copy(sbox): # svn: File not found: revision 9, path '/A/C_MOVED' # exit_code, C_moved_log_out, err = svntest.actions.run_and_verify_svn( - None, None, [], 'log', '-v', C_moved_path) + None, [], 'log', '-v', C_moved_path) exit_code, psi_moved_log_out, err = svntest.actions.run_and_verify_svn( - None, None, [], 'log', '-v', psi_moved_path) + None, [], 'log', '-v', psi_moved_path) # The logs of the move source and destinations should be the same. if C_log_out != C_moved_log_out: @@ -1875,20 +1871,18 @@ def merge_sensitive_log_reverse_merges(sbox): # Merge -c3,5 from A to A_COPY, commit as r7 svntest.main.run_svn(None, 'up', wc_dir) svntest.main.run_svn(None, 'merge', '-c3,5', A_path, A_COPY_path) - svntest.main.run_svn(None, 'ci', '-m', 'Merge -c3,5 from A to A_COPY', - wc_dir) + sbox.simple_commit(message='Merge -c3,5 from A to A_COPY') # Merge -c-3,-5,4,6 from A to A_COPY, commit as r8 svntest.main.run_svn(None, 'up', wc_dir) svntest.main.run_svn(None, 'merge', '-c-3,4,-5,6', A_path, A_COPY_path) - svntest.main.run_svn(None, 'ci', '-m', 'Merge -c-3,-5,4,6 from A to A_COPY', - wc_dir) + sbox.simple_commit(message='Merge -c-3,-5,4,6 from A to A_COPY') # Update so svntest.main.run_svn(None, 'up', wc_dir) # Run log -g on path with explicit mergeinfo (A_COPY). - exit_code, out, err = svntest.actions.run_and_verify_svn(None, None, [], + exit_code, out, err = svntest.actions.run_and_verify_svn(None, [], 'log', '-g', '-r8', A_COPY_path) log_chain = parse_log_output(out) @@ -1904,7 +1898,7 @@ def merge_sensitive_log_reverse_merges(sbox): check_merge_results(log_chain, expected_merges, expected_reverse_merges) # Run log -g on path with inherited mergeinfo (A_COPY/D). - exit_code, out, err = svntest.actions.run_and_verify_svn(None, None, [], + exit_code, out, err = svntest.actions.run_and_verify_svn(None, [], 'log', '-g', '-r8', D_COPY_path) log_chain = parse_log_output(out) @@ -1938,58 +1932,55 @@ def merge_sensitive_log_ignores_cyclic_merges(sbox): # Make an edit on the "branch" to A_COPY/mu, commit as r7. svntest.main.file_write(mu_COPY_path, "Branch edit.\n") - svntest.main.run_svn(None, 'ci', '-m', 'Branch edit', wc_dir) + sbox.simple_commit(message='Branch edit') # Make an edit on both the "trunk" and the "branch", commit as r8. svntest.main.file_write(chi_path, "Trunk edit.\n") svntest.main.file_write(tau_COPY_path, "Branch edit.\n") - svntest.main.run_svn(None, 'ci', '-m', 'Branch and trunk edits in one rev', - wc_dir) + sbox.simple_commit(message='Branch and trunk edits in one rev') # Sync merge A to A_COPY, commit as r9 svntest.main.run_svn(None, 'up', wc_dir) svntest.main.run_svn(None, 'merge', sbox.repo_url + '/A', A_COPY_path) - svntest.main.run_svn(None, 'ci', '-m', 'Sync merge A to A_COPY', wc_dir) + sbox.simple_commit(message='Sync merge A to A_COPY') # Reintegrate A_COPY to A, commit as r10 svntest.main.run_svn(None, 'up', wc_dir) svntest.main.run_svn(None, 'merge', '--reintegrate', sbox.repo_url + '/A_COPY', A_path) - svntest.main.run_svn(None, 'ci', '-m', 'Reintegrate A_COPY to A', wc_dir) + sbox.simple_commit(message='Reintegrate A_COPY to A') # Do a --record-only merge of r10 from A to A_COPY, commit as r11. # This will allow us to continue using the branch without deleting it. svntest.main.run_svn(None, 'up', wc_dir) svntest.main.run_svn(None, 'merge', sbox.repo_url + '/A', A_COPY_path) - svntest.main.run_svn(None, 'ci', '-m', - '--record-only merge r10 from A to A_COPY', wc_dir) + sbox.simple_commit(message='--record-only merge r10 from A to A_COPY') # Make an edit on the "branch"; add A_COPY/C and A_COPY/C/Z/nu, # commit as r12. svntest.main.run_svn(None, 'mkdir', Z_COPY_path) svntest.main.file_write(nu_COPY_path, "A new branch file.\n") svntest.main.run_svn(None, 'add', nu_COPY_path) - svntest.main.run_svn(None, 'ci', '-m', 'Branch edit: Add a subtree', wc_dir) + sbox.simple_commit(message='Branch edit: Add a subtree') # Make an edit on the "trunk"; add A/C/X and A/C/X/kappa, # commit as r13. svntest.main.run_svn(None, 'mkdir', X_path) svntest.main.file_write(kappa_path, "A new trunk file.\n") svntest.main.run_svn(None, 'add', kappa_path) - svntest.main.run_svn(None, 'ci', '-m', 'Trunk edit: Add a subtree', wc_dir) + sbox.simple_commit(message='Trunk edit: Add a subtree') svntest.main.run_svn(None, 'up', wc_dir) # Sync merge A to A_COPY, commit as r14 svntest.main.run_svn(None, 'up', wc_dir) svntest.main.run_svn(None, 'merge', sbox.repo_url + '/A', A_COPY_path) - svntest.main.run_svn(None, 'ci', '-m', 'Sync merge A to A_COPY', wc_dir) + sbox.simple_commit(message='Sync merge A to A_COPY') # Reintegrate A_COPY to A, commit as r15 svntest.main.run_svn(None, 'up', wc_dir) svntest.main.run_svn(None, 'merge', '--reintegrate', sbox.repo_url + '/A_COPY', A_path) - svntest.main.run_svn(None, 'ci', '-m', '2nd reintegrate of A_COPY to A', - wc_dir) + sbox.simple_commit(message='2nd reintegrate of A_COPY to A') # Run 'svn log -g A'. We expect to see r13, r10, r6, r5, r4, and r3 only # once, as part of A's own history, not as merged in from A_COPY. @@ -2011,7 +2002,7 @@ def merge_sensitive_log_ignores_cyclic_merges(sbox): 1 : [], } svntest.main.run_svn(None, 'up', wc_dir) - exit_code, out, err = svntest.actions.run_and_verify_svn(None, None, [], + exit_code, out, err = svntest.actions.run_and_verify_svn(None, [], 'log', '-g', A_path) log_chain = parse_log_output(out) @@ -2028,21 +2019,21 @@ def log_with_unrelated_peg_and_operative_revs(sbox): # log for /A/D/G/rho, deleted in revision 5, recreated in revision 8 expected_error = ".*(File|path) not found.*" - svntest.actions.run_and_verify_svn(None, None, expected_error, + svntest.actions.run_and_verify_svn(None, expected_error, 'log', '-r', '6:7', target) - svntest.actions.run_and_verify_svn(None, None, expected_error, + svntest.actions.run_and_verify_svn(None, expected_error, 'log', '-r', '7:6', target) expected_error = ".*Unable to find repository location for.*" - svntest.actions.run_and_verify_svn(None, None, expected_error, + svntest.actions.run_and_verify_svn(None, expected_error, 'log', '-r', '2:9', target) - svntest.actions.run_and_verify_svn(None, None, expected_error, + svntest.actions.run_and_verify_svn(None, expected_error, 'log', '-r', '9:2', target) expected_error = ".*Unable to find repository location for.*" - svntest.actions.run_and_verify_svn(None, None, expected_error, + svntest.actions.run_and_verify_svn(None, expected_error, 'log', '-r', '2:HEAD', target) - svntest.actions.run_and_verify_svn(None, None, expected_error, + svntest.actions.run_and_verify_svn(None, expected_error, 'log', '-r', 'HEAD:2', target) #---------------------------------------------------------------------- @@ -2057,24 +2048,25 @@ def log_on_nonexistent_path_and_valid_rev(sbox): bad_path_real_rev = sbox.repo_url + '/Z@1' bad_path_default_rev = sbox.repo_url + '/Z' - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'log', '-q', real_path_real_rev) expected_error = ".*No such revision 99*" - svntest.actions.run_and_verify_svn(None, None, expected_error, + svntest.actions.run_and_verify_svn(None, expected_error, 'log', '-q', real_path_bad_rev) - svntest.actions.run_and_verify_svn(None, None, expected_error, + svntest.actions.run_and_verify_svn(None, expected_error, 'log', '-q', bad_url_bad_rev) expected_error = ".*not found.*" - svntest.actions.run_and_verify_svn(None, None, expected_error, + svntest.actions.run_and_verify_svn(None, expected_error, 'log', '-q', bad_path_real_rev) - svntest.actions.run_and_verify_svn(None, None, expected_error, + svntest.actions.run_and_verify_svn(None, expected_error, 'log', '-q', bad_path_default_rev) #---------------------------------------------------------------------- # Test for issue #4022 'svn log -g interprets change in inherited mergeinfo # due to move as a merge'. +@SkipUnless(server_has_mergeinfo) @Issue(4022) def merge_sensitive_log_copied_path_inherited_mergeinfo(sbox): "log -g on copied path with inherited mergeinfo" @@ -2090,17 +2082,17 @@ def merge_sensitive_log_copied_path_inherited_mergeinfo(sbox): # r3 - Modify a file (A_COPY/D/gamma) on the branch svntest.main.file_write(gamma_COPY_path, "Branch edit.\n") - svntest.main.run_svn(None, 'ci', '-m', 'Branch edit', wc_dir) + sbox.simple_commit(message='Branch edit') # r4 - Reintegrate A_COPY to A svntest.main.run_svn(None, 'up', wc_dir) svntest.main.run_svn(None, 'merge', '--reintegrate', sbox.repo_url + '/A_COPY', A_path) - svntest.main.run_svn(None, 'ci', '-m', 'Reintegrate A_COPY to A', wc_dir) + sbox.simple_commit(message='Reintegrate A_COPY to A') # r5 - Move file modified by reintegrate (A/D/gamma to A/C/gamma). svntest.main.run_svn(None, 'move', old_gamma_path, new_gamma_path) - svntest.main.run_svn(None, 'ci', '-m', 'Move file', wc_dir) + sbox.simple_commit(message='Move file') # 'svn log -g --stop-on-copy ^/A/C/gamma' hould return *only* r5 # Previously this test failed because the change in gamma's inherited @@ -2132,7 +2124,7 @@ def merge_sensitive_log_copied_path_inherited_mergeinfo(sbox): expected_merges = {5 : []} svntest.main.run_svn(None, 'up', wc_dir) exit_code, out, err = svntest.actions.run_and_verify_svn( - None, None, [], 'log', '-g', '--stop-on-copy', + None, [], 'log', '-g', '--stop-on-copy', sbox.repo_url + '/A/C/gamma') log_chain = parse_log_output(out) check_merge_results(log_chain, expected_merges) @@ -2146,7 +2138,7 @@ def log_diff(sbox): was_cwd = os.getcwd() os.chdir(sbox.wc_dir) - exit_code, output, err = svntest.actions.run_and_verify_svn(None, None, [], + exit_code, output, err = svntest.actions.run_and_verify_svn(None, [], 'log', '--diff') os.chdir(was_cwd) @@ -2164,7 +2156,7 @@ def log_diff(sbox): sbox.simple_commit() os.chdir(sbox.wc_dir) - exit_code, output, err = svntest.actions.run_and_verify_svn(None, None, [], + exit_code, output, err = svntest.actions.run_and_verify_svn(None, [], 'log', '--diff', '-r10:8', 'A2') os.chdir(was_cwd) @@ -2177,7 +2169,7 @@ def log_diff(sbox): "\ No newline at end of file\n", ] ] - r8diff = [ make_diff_header('A2/D/G/rho', 'revision 0', 'revision 8') + r8diff = [ make_diff_header('A2/D/G/rho', 'nonexistent', 'revision 8') + [ "@@ -0,0 +1 @@\n", "+88\n", "\ No newline at end of file\n", @@ -2189,6 +2181,7 @@ def log_diff(sbox): compare_diff_output(r9diff, log_chain[1]['diff_lines']) compare_diff_output(r8diff, log_chain[2]['diff_lines']) +@Skip(svntest.main.is_fs_type_fsx) def log_xml_old(sbox): "log --xml shows kind for old style repository" @@ -2224,7 +2217,7 @@ def log_diff_moved(sbox): mu_at_1 = sbox.repo_url + '/A/mu@1' mu3_at_3 = sbox.repo_url + '/A/mu3@3' - r1diff = [make_diff_header('mu', 'revision 0', 'revision 1') + r1diff = [make_diff_header('mu', 'nonexistent', 'revision 1') + ["@@ -0,0 +1 @@\n", "+This is the file 'mu'.\n"]] @@ -2244,7 +2237,7 @@ def log_diff_moved(sbox): " now mu2\n", "+now mu3\n"]] - exit_code, output, err = svntest.actions.run_and_verify_svn(None, None, [], + exit_code, output, err = svntest.actions.run_and_verify_svn(None, [], 'log', '--diff', mu_at_1) log_chain = parse_log_output(output, with_diffs=True) @@ -2252,7 +2245,7 @@ def log_diff_moved(sbox): raise SVNLogParseError("%d logs found, 1 expected" % len(log_chain)) compare_diff_output(r1diff, log_chain[0]['diff_lines']) - exit_code, output, err = svntest.actions.run_and_verify_svn(None, None, [], + exit_code, output, err = svntest.actions.run_and_verify_svn(None, [], 'log', '--diff', '-r3', mu3_at_3) log_chain = parse_log_output(output, with_diffs=True) @@ -2260,7 +2253,7 @@ def log_diff_moved(sbox): raise SVNLogParseError("%d logs found, 1 expected" % len(log_chain)) compare_diff_output(r3diff, log_chain[0]['diff_lines']) - exit_code, output, err = svntest.actions.run_and_verify_svn(None, None, [], + exit_code, output, err = svntest.actions.run_and_verify_svn(None, [], 'log', '--diff', '-r3:2', mu3_at_3) log_chain = parse_log_output(output, with_diffs=True) @@ -2269,8 +2262,7 @@ def log_diff_moved(sbox): compare_diff_output(r3diff, log_chain[0]['diff_lines']) compare_diff_output(r2diff, log_chain[1]['diff_lines']) - # XFAIL mu3 not found at revisions 0 and 1 - exit_code, output, err = svntest.actions.run_and_verify_svn(None, None, [], + exit_code, output, err = svntest.actions.run_and_verify_svn(None, [], 'log', '--diff', mu3_at_3) log_chain = parse_log_output(output, with_diffs=True) @@ -2290,7 +2282,7 @@ def log_search(sbox): os.chdir(sbox.wc_dir) exit_code, output, err = svntest.actions.run_and_verify_svn( - None, None, [], 'log', '--search', + None, [], 'log', '--search', 'for revision [367]') log_chain = parse_log_output(output) @@ -2298,7 +2290,7 @@ def log_search(sbox): # search is case-sensitive exit_code, output, err = svntest.actions.run_and_verify_svn( - None, None, [], 'log', '--search', + None, [], 'log', '--search', 'FOR REVISION [367]') log_chain = parse_log_output(output) @@ -2306,7 +2298,7 @@ def log_search(sbox): # multi-pattern search exit_code, output, err = svntest.actions.run_and_verify_svn( - None, None, [], 'log', + None, [], 'log', '--search', 'for revision 3', '--search', 'for revision 6', '--search', 'for revision 7') @@ -2316,7 +2308,7 @@ def log_search(sbox): # combined pattern search exit_code, output, err = svntest.actions.run_and_verify_svn( - None, None, [], 'log', '--verbose', + None, [], 'log', '--verbose', '--search', 'for revision 8', '--search-and', 'test the code', '--search', 'for revision 7', @@ -2329,7 +2321,7 @@ def log_search(sbox): check_log_chain(log_chain, [8, 6]) exit_code, output, err = svntest.actions.run_and_verify_svn( - None, None, [], 'log', '--verbose', + None, [], 'log', '--verbose', '--search', 'for revision 8', '--search-and', 'this won\'t match ', '--search', 'for revision 7', @@ -2351,7 +2343,7 @@ def merge_sensitive_log_with_search(sbox): # Run log -g on a non-copying revision that adds mergeinfo, # and perform a search that only matches the merged revision - exit_code, output, err = svntest.actions.run_and_verify_svn(None, None, [], + exit_code, output, err = svntest.actions.run_and_verify_svn(None, [], 'log', '-g', '-r6', '--search', @@ -2365,13 +2357,9 @@ def merge_sensitive_log_with_search(sbox): } check_merge_results(log_chain, expected_merges) -#---------------------------------------------------------------------- -# Test for issue #4355 'svn_client_log5 broken with multiple revisions -# which span a rename'. -@Issue(4355) -@SkipUnless(server_has_mergeinfo) -def log_multiple_revs_spanning_rename(sbox): - "log for multiple revs which span a rename" +# Helper function for a few tests +def create_renaming_history_repos(sbox): + "create a repository containing renames and a suitable working copy" sbox.build() wc_dir = sbox.wc_dir @@ -2379,7 +2367,6 @@ def log_multiple_revs_spanning_rename(sbox): msg_file=os.path.abspath(msg_file) mu_path1 = os.path.join(wc_dir, 'A', 'mu') mu_path2 = os.path.join(wc_dir, 'trunk', 'mu') - trunk_path = os.path.join(wc_dir, 'trunk') # r2 - Change a file. msg=""" Log message for revision 2 @@ -2404,21 +2391,42 @@ def log_multiple_revs_spanning_rename(sbox): svntest.main.run_svn(None, 'ci', '-F', msg_file, wc_dir) svntest.main.run_svn(None, 'up', wc_dir) + # r5 - Cyclic exchange. + svntest.main.run_svn(None, 'up', wc_dir) + sbox.simple_move(os.path.join('trunk', 'D'), os.path.join('trunk', 'X')) + sbox.simple_move(os.path.join('trunk', 'C'), os.path.join('trunk', 'D')) + sbox.simple_move(os.path.join('trunk', 'X'), os.path.join('trunk', 'C')) + svntest.main.run_svn(None, 'ci', '-m', "Log message for revision 5", + wc_dir) + + +#---------------------------------------------------------------------- +# Test for issue #4355 'svn_client_log5 broken with multiple revisions +# which span a rename'. +@Issue(4355) +@SkipUnless(server_has_mergeinfo) +def log_multiple_revs_spanning_rename(sbox): + "log for multiple revs which span a rename" + + trunk_path = sbox.ospath('trunk') + + create_renaming_history_repos(sbox) + # Check that log can handle a revision range that spans a rename. exit_code, output, err = svntest.actions.run_and_verify_svn( - None, None, [], 'log', '-r2:4', sbox.repo_url + '/trunk/mu') + None, [], 'log', '-r2:4', sbox.repo_url + '/trunk/mu') log_chain = parse_log_output(output) check_log_chain(log_chain, [2,3,4]) # Check that log can handle discrete revisions that don't span a rename. exit_code, output, err = svntest.actions.run_and_verify_svn( - None, None, [], 'log', '-c3,4', sbox.repo_url + '/trunk/mu') + None, [], 'log', '-c3,4', sbox.repo_url + '/trunk/mu') log_chain = parse_log_output(output) check_log_chain(log_chain, [3,4]) # Check that log can handle discrete revisions that span a rename. # - # Currently this fails with: + # Previously this failed with: # # >svn log ^/trunk -c2,3,1 # ------------------------------------------------------------------------ @@ -2440,19 +2448,21 @@ def log_multiple_revs_spanning_rename(sbox): # (apr_err=SVN_ERR_FS_NOT_FOUND) # svn: E160013: File not found: revision 1, path '/trunk' exit_code, output, err = svntest.actions.run_and_verify_svn( - None, None, [], 'log', '-c2,3,1', sbox.repo_url + '/trunk/mu') + None, [], 'log', '-c2,3,1', sbox.repo_url + '/trunk/mu') log_chain = parse_log_output(output) check_log_chain(log_chain, [2,3,1]) + mu_path2 = sbox.ospath('trunk/mu') + # Should work with a WC target too. exit_code, output, err = svntest.actions.run_and_verify_svn( - None, None, [], 'log', '-c2,3,1', mu_path2) + None, [], 'log', '-c2,3,1', mu_path2) log_chain = parse_log_output(output) check_log_chain(log_chain, [2,3,1]) # Discreet revision *ranges* which span a rename should work too. exit_code, output, err = svntest.actions.run_and_verify_svn( - None, None, [], 'log', '-r1', '-r4:2', sbox.repo_url + '/trunk') + None, [], 'log', '-r1', '-r4:2', sbox.repo_url + '/trunk') log_chain = parse_log_output(output) check_log_chain(log_chain, [1,4,3,2]) @@ -2473,26 +2483,210 @@ def log_multiple_revs_spanning_rename(sbox): # (apr_err=SVN_ERR_FS_NOT_FOUND) # svn: E160013: File not found: revision 4, path '/A' exit_code, output, err = svntest.actions.run_and_verify_svn( - None, None, [], 'log', '-r1', '-r2:4', sbox.repo_url + '/trunk') + None, [], 'log', '-r1', '-r2:4', sbox.repo_url + '/trunk') log_chain = parse_log_output(output) check_log_chain(log_chain, [1,2,3,4]) # Discrete revs with WC-only opt revs shouldn't cause any problems. exit_code, output, err = svntest.actions.run_and_verify_svn( - None, None, [], 'log', '-r1', '-rPREV', trunk_path) + None, [], 'log', '-r1', '-rPREV', trunk_path) log_chain = parse_log_output(output) check_log_chain(log_chain, [1,3]) exit_code, output, err = svntest.actions.run_and_verify_svn( - None, None, [], 'log', '-r1', '-rCOMMITTED', trunk_path) + None, [], 'log', '-r1', '-rCOMMITTED', trunk_path) log_chain = parse_log_output(output) check_log_chain(log_chain, [1,4]) exit_code, output, err = svntest.actions.run_and_verify_svn( - None, None, [], 'log', '-r1', '-rBASE', trunk_path) + None, [], 'log', '-r1', '-rBASE', trunk_path) log_chain = parse_log_output(output) check_log_chain(log_chain, [1,4]) +@SkipUnless(server_has_mergeinfo) +def mergeinfo_log(sbox): + "'mergeinfo --log' on a path with mergeinfo" + + sbox.build() + wc_dir = sbox.wc_dir + + # make a branch 'A2' + sbox.simple_repo_copy('A', 'A2') # r2 + # make a change in branch 'A' + sbox.simple_mkdir('A/newdir') + sbox.simple_commit(message="Log message for revision 3.") # r3 + sbox.simple_update() + + # Dummy up some mergeinfo. + svntest.actions.run_and_verify_svn(None, [], + 'ps', SVN_PROP_MERGEINFO, '/A:3', + sbox.ospath('A2')) + + # test --log + exit_code, output, err = svntest.actions.run_and_verify_svn(None, [], + 'mergeinfo', '--show-revs=merged', + '--log', sbox.repo_url + '/A', + sbox.ospath('A2')) + check_log_chain(parse_log_output(output), [3]) + + # test --log -v + exit_code, output, err = svntest.actions.run_and_verify_svn(None, [], + 'mergeinfo', '--show-revs=merged', + '--log', '-v', sbox.repo_url + '/A', + sbox.ospath('A2')) + check_log_chain(parse_log_output(output), [3], [1]) + + # test --log -q + svntest.actions.run_and_verify_svn(None, [], + 'mergeinfo', '--show-revs=merged', + '--log', '-q', sbox.repo_url + '/A', + sbox.ospath('A2')) + # TODO: Validate the output, the check_log_chain() function assumes it + # gets the output of the message + +@SkipUnless(server_has_mergeinfo) +@Issue(4463) +def merge_sensitive_log_xml_reverse_merges(sbox): + "log -g --xml differentiates forward/reverse merges" + + sbox.build() + wc_dir = sbox.wc_dir + wc_disk, wc_status = set_up_branch(sbox) + + A_path = os.path.join(wc_dir, 'A') + A_COPY_path = os.path.join(wc_dir, 'A_COPY') + D_COPY_path = os.path.join(wc_dir, 'A_COPY', 'D') + + # Merge -c3,5 from A to A_COPY, commit as r7 + svntest.main.run_svn(None, 'up', wc_dir) + svntest.main.run_svn(None, 'merge', '-c3,5', A_path, A_COPY_path) + sbox.simple_commit(message='Merge -c3,5 from A to A_COPY') + + # Merge -c-3,-5,4,6 from A to A_COPY, commit as r8 + svntest.main.run_svn(None, 'up', wc_dir) + svntest.main.run_svn(None, 'merge', '-c-3,4,-5,6', A_path, A_COPY_path) + sbox.simple_commit(message='Merge -c-3,-5,4,6 from A to A_COPY') + + # Update so + svntest.main.run_svn(None, 'up', wc_dir) + + # Run log -g --xml on path with explicit mergeinfo (A_COPY). + log_attrs = [ + { + u'revision': u'8', + }, + { + u'revision': u'6', + u'reverse-merge': u'false', + }, + { + u'revision': u'5', + u'reverse-merge': u'true', + }, + { + u'revision': u'4', + u'reverse-merge': u'false', + }, + { + u'revision': u'3', + u'reverse-merge': u'true', + }] + svntest.actions.run_and_verify_log_xml(expected_log_attrs=log_attrs, + args=['-g', '-r8', A_COPY_path]) + + # Run log -g --xml on path with inherited mergeinfo (A_COPY/D). + # r5 only affects A_COPY/B/E/beta so not listed + log_attrs = [ + { + u'revision': u'8', + }, + { + u'revision': u'6', + u'reverse-merge': u'false', + }, + { + u'revision': u'4', + u'reverse-merge': u'false', + }, + { + u'revision': u'3', + u'reverse-merge': u'true', + }] + svntest.actions.run_and_verify_log_xml(expected_log_attrs=log_attrs, + args=['-g', '-r8', D_COPY_path]) + +def log_revision_move_copy(sbox): + "log revision handling over move/copy" + + sbox.build() + + sbox.simple_move('iota', 'iotb') + sbox.simple_append('iotb', 'new line\n') + + sbox.simple_copy('A/mu', 'mutb') + sbox.simple_append('mutb', 'mutb\n') + + sbox.simple_move('A/B/E', 'E') + sbox.simple_move('E/alpha', 'alpha') + + #r2 + svntest.actions.run_and_verify_svn(None, [], + 'rm', sbox.repo_url + '/A/D', '-mm') + + sbox.simple_commit() #r3 + + # This introduces a copy and a move in r3, but check how the history + # of these nodes behaves in r2. + + # This one might change behavior once we improve move handling + expected_output = [ + '------------------------------------------------------------------------\n' + ] + expected_err = [] + svntest.actions.run_and_verify_svn(expected_output, expected_err, + 'log', '-v',sbox.ospath('iotb'), + '-r2') + + # While this one + expected_output = [] + expected_err = '.*E195012: Unable to find repository location.*' + svntest.actions.run_and_verify_svn(expected_output, expected_err, + 'log', '-v', sbox.ospath('mutb'), + '-r2') + + # And just for fun, do the same thing for blame + expected_output = [ + ' 1 jrandom This is the file \'iota\'.\n' + ] + expected_err = [] + svntest.actions.run_and_verify_svn(expected_output, expected_err, + 'blame', sbox.ospath('iotb'), + '-r2') + + expected_output = None + expected_err = '.*E195012: Unable to find repository location.*' + svntest.actions.run_and_verify_svn(expected_output, expected_err, + 'blame', sbox.ospath('mutb'), + '-r2') + + expected_output = svntest.verify.RegexListOutput([ + '-+\\n', + 'r3\ .*\n', + re.escape('Changed paths:\n'), + re.escape(' D /A/B/E\n'), + re.escape(' A /E (from /A/B/E:2)\n'), # Patched - Direct move + re.escape(' D /E/alpha\n'), + re.escape(' A /alpha (from /A/B/E/alpha:1)\n'), # Indirect move - Not patched + re.escape(' D /iota\n'), + re.escape(' A /iotb (from /iota:2)\n'), # Patched - Direct move + re.escape(' A /mutb (from /A/mu:1)\n'), # Copy (always r1) + '-+\\n' + ]) + svntest.actions.run_and_verify_svn(expected_output, [], + 'log', '-v', '-q', sbox.wc_dir, + '-c3') + + ######################################################################## # Run the tests @@ -2539,6 +2733,9 @@ test_list = [ None, log_search, merge_sensitive_log_with_search, log_multiple_revs_spanning_rename, + mergeinfo_log, + merge_sensitive_log_xml_reverse_merges, + log_revision_move_copy, ] if __name__ == '__main__': diff --git a/subversion/tests/cmdline/merge_authz_tests.py b/subversion/tests/cmdline/merge_authz_tests.py index 8bf1968..8e14089 100755 --- a/subversion/tests/cmdline/merge_authz_tests.py +++ b/subversion/tests/cmdline/merge_authz_tests.py @@ -41,8 +41,8 @@ Issues = svntest.testcase.Issues_deco Issue = svntest.testcase.Issue_deco Wimp = svntest.testcase.Wimp_deco -from merge_tests import set_up_branch -from merge_tests import expected_merge_output +from svntest.mergetrees import set_up_branch +from svntest.mergetrees import expected_merge_output from svntest.main import SVN_PROP_MERGEINFO from svntest.main import write_restrictive_svnserve_conf from svntest.main import write_authz_file @@ -112,7 +112,7 @@ def mergeinfo_and_skipped_paths(sbox): # Checkout just the branch under the newly restricted authz. wc_restricted = sbox.add_wc_path('restricted') - svntest.actions.run_and_verify_svn(None, None, [], 'checkout', + svntest.actions.run_and_verify_svn(None, [], 'checkout', sbox.repo_url, wc_restricted) @@ -194,8 +194,7 @@ def mergeinfo_and_skipped_paths(sbox): expected_disk, expected_status, expected_skip, - None, None, None, None, - None, 1) + check_props=True) # Merge r4:8 into the restricted WC's A_COPY_2. # @@ -270,8 +269,7 @@ def mergeinfo_and_skipped_paths(sbox): expected_disk, expected_status, expected_skip, - None, None, None, None, - None, 1, 0) + check_props=True) # Merge r5:7 into the restricted WC's A_COPY_3. # @@ -335,9 +333,8 @@ def mergeinfo_and_skipped_paths(sbox): expected_disk, expected_status, expected_skip, - None, None, None, None, - None, 1, 0) - svntest.actions.run_and_verify_svn(None, None, [], 'revert', '--recursive', + check_props=True) + svntest.actions.run_and_verify_svn(None, [], 'revert', '--recursive', wc_restricted) # Test issue #2997. If a merge requires two separate editor drives and the @@ -379,23 +376,23 @@ def mergeinfo_and_skipped_paths(sbox): expected_disk, expected_status, expected_skip, - None, None, None, None, - None, 1, 0, '-c5', '-c8', + [], True, False, + '-c5', '-c8', A_COPY_2_H_path) # Test issue #2829 'Improve handling for skipped paths encountered # during a merge' # Revert previous changes to restricted WC - svntest.actions.run_and_verify_svn(None, None, [], 'revert', '--recursive', + svntest.actions.run_and_verify_svn(None, [], 'revert', '--recursive', wc_restricted) # Add new path 'A/D/H/zeta' svntest.main.file_write(zeta_path, "This is the file 'zeta'.\n") - svntest.actions.run_and_verify_svn(None, None, [], 'add', zeta_path) + svntest.actions.run_and_verify_svn(None, [], 'add', zeta_path) expected_output = wc.State(wc_dir, {'A/D/H/zeta' : Item(verb='Adding')}) wc_status.add({'A/D/H/zeta' : Item(status=' ', wc_rev=9)}) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - wc_status, None, wc_dir) + wc_status) # Merge -r7:9 to the restricted WC's A_COPY_2/D/H. # @@ -436,8 +433,7 @@ def mergeinfo_and_skipped_paths(sbox): expected_disk, expected_status, expected_skip, - None, None, None, None, - None, 1, 0) + check_props=True) # Merge -r4:9 to the restricted WC's A_COPY_2/D/H. # @@ -445,7 +441,7 @@ def mergeinfo_and_skipped_paths(sbox): # non-inheritable mergeinfo (due to the fact 'A_COPY_2/D/H/psi' is missing # and skipped). 'A_COPY_2/D/H/zeta' must therefore get its own explicit # mergeinfo from this merge. - svntest.actions.run_and_verify_svn(None, None, [], 'revert', '--recursive', + svntest.actions.run_and_verify_svn(None, [], 'revert', '--recursive', wc_restricted) expected_output = wc.State(A_COPY_2_H_path, { 'omega' : Item(status='U '), @@ -483,8 +479,7 @@ def mergeinfo_and_skipped_paths(sbox): expected_disk, expected_status, expected_skip, - None, None, None, None, - None, 1, 0) + check_props=True) @SkipUnless(server_has_mergeinfo) @Issue(2876) @@ -526,13 +521,13 @@ def merge_fails_if_subtree_is_deleted_on_src(sbox): # Commit the new content svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) - svntest.actions.run_and_verify_svn(None, None, [], 'cp', A_url, Acopy_url, + svntest.actions.run_and_verify_svn(None, [], 'cp', A_url, Acopy_url, '-m', 'create a new copy of A') # Update working copy - svntest.actions.run_and_verify_svn(None, None, [], 'up', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir) svntest.main.file_substitute(gamma_path, "line1", "this is line1") # Create expected output tree for commit @@ -566,10 +561,10 @@ def merge_fails_if_subtree_is_deleted_on_src(sbox): }) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) # Delete A/D/gamma from working copy - svntest.actions.run_and_verify_svn(None, None, [], 'delete', gamma_path) + svntest.actions.run_and_verify_svn(None, [], 'delete', gamma_path) # Create expected output tree for commit expected_output = wc.State(wc_dir, { 'A/D/gamma' : Item(verb='Deleting'), @@ -580,10 +575,9 @@ def merge_fails_if_subtree_is_deleted_on_src(sbox): svntest.actions.run_and_verify_commit(wc_dir, expected_output, expected_status, - None, + [], wc_dir, wc_dir) svntest.actions.run_and_verify_svn( - None, expected_merge_output([[3,4]], ['U ' + Acopy_gamma_path + '\n', ' U ' + Acopy_gamma_path + '\n']), @@ -603,7 +597,6 @@ def merge_fails_if_subtree_is_deleted_on_src(sbox): # see notes/tree-conflicts/detection.txt, but --force currently avoids # this. svntest.actions.run_and_verify_svn( - None, expected_merge_output([[3,6]], ['D ' + Acopy_gamma_path + '\n', ' U ' + Acopy_path + '\n']), @@ -642,7 +635,7 @@ def reintegrate_fails_if_no_root_access(sbox): expected_output = wc.State(wc_dir, {'A_COPY/mu' : Item(verb='Sending')}) expected_status.tweak('A_COPY/mu', wc_rev=7) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) expected_disk.tweak('A_COPY/mu', contents='Changed on the branch.') # Update the WC. @@ -657,9 +650,9 @@ def reintegrate_fails_if_no_root_access(sbox): 'U ' + psi_COPY_path + '\n', # Mergeinfo notification ' U ' + A_COPY_path + '\n']) - svntest.actions.run_and_verify_svn(None, expected_output, [], 'merge', + svntest.actions.run_and_verify_svn(expected_output, [], 'merge', sbox.repo_url + '/A', A_COPY_path) - svntest.main.run_svn(None, 'ci', '-m', 'synch A_COPY with A', wc_dir) + sbox.simple_commit(message='synch A_COPY with A') # Update so we are ready for reintegrate. svntest.main.run_svn(None, 'up', wc_dir) @@ -733,10 +726,177 @@ def reintegrate_fails_if_no_root_access(sbox): expected_disk, expected_status, expected_skip, - None, None, None, None, - None, True, True, + [], True, True, '--reintegrate', A_path) +def diff_unauth_parent(sbox): + "diff directory without reading parent" + + sbox.build(create_wc=False) + + # Create r2: Change A a bit + svntest.actions.run_and_verify_svnmucc(None, [], + 'propset', 'k', 'v', + sbox.repo_url + '/A', + '-m', 'set prop') + + # Create r3 Mark E and G + svntest.actions.run_and_verify_svnmucc(None, [], + 'propset', 'this-is', 'E', + sbox.repo_url + '/A/B/E', + 'propset', 'this-is', 'G', + sbox.repo_url + '/A/D/G', + '-m', 'set prop') + + # Create r4: Replace A/B/E with A/D/G + svntest.actions.run_and_verify_svnmucc(None, [], + 'rm', sbox.repo_url + '/A/B/E', + 'cp', '3', sbox.repo_url + '/A/D/G', + sbox.repo_url + '/A/B/E', + '-m', 'replace A/B/E') + + + if is_ra_type_svn() or is_ra_type_dav(): + write_restrictive_svnserve_conf(sbox.repo_dir) + write_authz_file(sbox, {"/" : "* =", + "/A" : "* = rw"}) + + # Diff the property change + expected_output = [ + 'Index: .\n', + '===================================================================\n', + '--- .\t(revision 1)\n', + '+++ .\t(revision 2)\n', + '\n', + 'Property changes on: .\n', + '___________________________________________________________________\n', + 'Added: k\n', + '## -0,0 +1 ##\n', + '+v\n', + '\ No newline at end of property\n' + ] + svntest.actions.run_and_verify_svn(expected_output, [], + 'diff', sbox.repo_url + '/A', '-c', '2') + + if is_ra_type_svn() or is_ra_type_dav(): + write_authz_file(sbox, {"/" : "* =", + "/A/B/E" : "* = rw"}) + + # Diff the replacement + expected_output = [ + 'Index: alpha\n', + '===================================================================\n', + '--- alpha\t(revision 3)\n', + '+++ alpha\t(nonexistent)\n', + '@@ -1 +0,0 @@\n', + '-This is the file \'alpha\'.\n', + 'Index: beta\n', + '===================================================================\n', + '--- beta\t(revision 3)\n', + '+++ beta\t(nonexistent)\n', + '@@ -1 +0,0 @@\n', + '-This is the file \'beta\'.\n', + 'Index: tau\n', + '===================================================================\n', + '--- tau\t(nonexistent)\n', + '+++ tau\t(revision 4)\n', + '@@ -0,0 +1 @@\n', + '+This is the file \'tau\'.\n', + 'Index: rho\n', + '===================================================================\n', + '--- rho\t(nonexistent)\n', + '+++ rho\t(revision 4)\n', + '@@ -0,0 +1 @@\n', + '+This is the file \'rho\'.\n', + 'Index: pi\n', + '===================================================================\n', + '--- pi\t(nonexistent)\n', + '+++ pi\t(revision 4)\n', + '@@ -0,0 +1 @@\n', + '+This is the file \'pi\'.\n', + ] + + if is_ra_type_svn() or is_ra_type_dav(): + # Because we can't anchor above C we see just a changed C, not a + # replacement + expected_output += [ + 'Index: .\n', + '===================================================================\n', + '--- .\t(revision 3)\n', + '+++ .\t(revision 4)\n', + '\n', + 'Property changes on: .\n', + '___________________________________________________________________\n', + 'Modified: this-is\n', + '## -1 +1 ##\n', + '-E\n', + '\ No newline at end of property\n', + '+G\n', + '\ No newline at end of property\n', + ] + else: + # ### We should also see a property deletion here! + expected_output += [ + 'Index: .\n', + '===================================================================\n', + '--- .\t(revision 3)\n', + '+++ .\t(nonexistent)\n', + '\n', + 'Property changes on: .\n', + '___________________________________________________________________\n', + 'Deleted: this-is\n', + '## -1 +0,0 ##\n', + '-E\n', + '\ No newline at end of property\n', + 'Index: .\n', + '===================================================================\n', + '--- .\t(nonexistent)\n', + '+++ .\t(revision 4)\n', + '\n', + 'Property changes on: .\n', + '___________________________________________________________________\n', + 'Added: this-is\n', + '## -0,0 +1 ##\n', + '+G\n', + '\ No newline at end of property\n', + ] + + # Use two url diff, because 'svn diff url -c' uses copyfrom to diff against + expected_output = svntest.verify.UnorderedOutput(expected_output) + svntest.actions.run_and_verify_svn(expected_output, [], + 'diff', sbox.repo_url + '/A/B/E@3', + sbox.repo_url + '/A/B/E@4', + '--notice-ancestry') + + # Do the same thing with summarize to really see directory deletes and adds + if is_ra_type_svn() or is_ra_type_dav(): + # With no rights on the parent directory we just see a property change on E + expected_output = [ + 'D %s/A/B/E/alpha\n' % sbox.repo_url, + 'D %s/A/B/E/beta\n' % sbox.repo_url, + 'A %s/A/B/E/tau\n' % sbox.repo_url, + 'A %s/A/B/E/rho\n' % sbox.repo_url, + 'A %s/A/B/E/pi\n' % sbox.repo_url, + ' M %s/A/B/E\n' % sbox.repo_url, + ] + else: + # But with rights on the parent we see a replacement of E + expected_output = [ + 'D %s/A/B/E/alpha\n' % sbox.repo_url, + 'D %s/A/B/E/beta\n' % sbox.repo_url, + 'D %s/A/B/E\n' % sbox.repo_url, + 'A %s/A/B/E/tau\n' % sbox.repo_url, + 'A %s/A/B/E/rho\n' % sbox.repo_url, + 'A %s/A/B/E/pi\n' % sbox.repo_url, + 'A %s/A/B/E\n' % sbox.repo_url, + ] + + expected_output = svntest.verify.UnorderedOutput(expected_output) + svntest.actions.run_and_verify_svn(expected_output, [], + 'diff', sbox.repo_url + '/A/B/E@3', + sbox.repo_url + '/A/B/E@4', + '--notice-ancestry', '--summarize') + ######################################################################## # Run the tests @@ -746,6 +906,7 @@ test_list = [ None, mergeinfo_and_skipped_paths, merge_fails_if_subtree_is_deleted_on_src, reintegrate_fails_if_no_root_access, + diff_unauth_parent, ] serial_only = True diff --git a/subversion/tests/cmdline/merge_automatic_tests.py b/subversion/tests/cmdline/merge_automatic_tests.py index 350ae3e..fbdf395 100755 --- a/subversion/tests/cmdline/merge_automatic_tests.py +++ b/subversion/tests/cmdline/merge_automatic_tests.py @@ -43,10 +43,10 @@ Wimp = svntest.testcase.Wimp_deco from svntest.main import SVN_PROP_MERGEINFO from svntest.main import server_has_mergeinfo -from merge_tests import local_path -from merge_tests import expected_merge_output -from merge_tests import svn_merge -from merge_tests import set_up_branch +from svntest.mergetrees import local_path +from svntest.mergetrees import expected_merge_output +from svntest.mergetrees import svn_merge +from svntest.mergetrees import set_up_branch #---------------------------------------------------------------------- @@ -188,7 +188,7 @@ def logical_changes_in_branch(sbox, branch): def get_mergeinfo_change(sbox, target): """Return a list of revision numbers representing the mergeinfo change on TARGET (working version against base). Non-recursive.""" - exit, out, err = actions.run_and_verify_svn(None, None, [], + exit, out, err = actions.run_and_verify_svn(None, [], 'diff', '--depth=empty', sbox.ospath(target)) merged_revs = [] @@ -302,7 +302,7 @@ def automatic_merge(sbox, source, target, args=[], before_changes = logical_changes_in_branch(sbox, target) exp_out = expected_automatic_merge_output(target, expect_3ways) - exit, out, err = svntest.actions.run_and_verify_svn(None, exp_out, [], + exit, out, err = svntest.actions.run_and_verify_svn(exp_out, [], 'merge', '^/' + source, target, *args) @@ -334,7 +334,6 @@ def three_way_merge_no_op(base_node, source_right_node): def cherry_pick(sbox, rev, source, target): """Cherry-pick merge revision REV from branch SOURCE to branch TARGET (both WC-relative paths), and commit.""" - sbox.simple_update(target) svn_merge(rev, source, target) sbox.simple_commit() @@ -691,6 +690,7 @@ def cherry1_fwd(sbox): # 2 34 5 67 8 9 0 1 init_mod_merge_mod(sbox, mod_6=True, mod_7=False) + sbox.simple_update() modify_branch(sbox, 'A', 8) cherry_pick(sbox, 8, 'A', 'B') modify_branch(sbox, 'A', 10) @@ -782,20 +782,20 @@ def subtree_to_and_fro(sbox): # r7 - Edit a file on the branch. svntest.main.file_write(A_COPY_gamma_path, "Branch edit to 'gamma'.\n") - svntest.actions.run_and_verify_svn(None, None, [], 'ci', wc_dir, + svntest.actions.run_and_verify_svn(None, [], 'ci', wc_dir, '-m', 'Edit a file on our branch') # r8 - Do a subtree sync merge from ^/A/D to A_COPY/D. # Note that among other things this changes A_COPY/D/H/psi. - svntest.actions.run_and_verify_svn(None, None, [], 'up', wc_dir) - svntest.actions.run_and_verify_svn(None, None, [], 'merge', + svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'merge', sbox.repo_url + '/A/D', A_COPY_D_path) - svntest.actions.run_and_verify_svn(None, None, [], 'ci', wc_dir, + svntest.actions.run_and_verify_svn(None, [], 'ci', wc_dir, '-m', 'Automatic subtree merge') # r9 - Make an edit to A/D/H/psi. svntest.main.file_write(psi_path, "Trunk Edit to 'psi'.\n") - svntest.actions.run_and_verify_svn(None, None, [], 'ci', wc_dir, + svntest.actions.run_and_verify_svn(None, [], 'ci', wc_dir, '-m', 'Edit a file on our trunk') # Now reintegrate ^/A_COPY back to A. Prior to issue #4258's fix, the @@ -820,9 +820,9 @@ def subtree_to_and_fro(sbox): # U A # Summary of conflicts: # Text conflicts: 1 - svntest.actions.run_and_verify_svn(None, None, [], 'up', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir) exit_code, out, err = svntest.actions.run_and_verify_svn( - None, [], svntest.verify.AnyOutput, + [], svntest.verify.AnyOutput, 'merge', sbox.repo_url + '/A_COPY', A_path) # Better to produce the same warning that explicitly using the @@ -876,13 +876,13 @@ def merge_to_reverse_cherry_subtree_to_merge_to(sbox): # Properties on 'A_COPY\B': # svn:mergeinfo # /A/B:2-4,6 - svntest.actions.run_and_verify_svn(None, None, [], 'up', wc_dir) - svntest.actions.run_and_verify_svn(None, None, [], 'merge', + svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'merge', sbox.repo_url + '/A', A_COPY_path) - svntest.actions.run_and_verify_svn(None, None, [], 'merge', '-c-5', + svntest.actions.run_and_verify_svn(None, [], 'merge', '-c-5', sbox.repo_url + '/A/B', A_COPY_B_path) - svntest.actions.run_and_verify_svn(None, None, [], 'ci', wc_dir, '-m', + svntest.actions.run_and_verify_svn(None, [], 'ci', wc_dir, '-m', 'sync merge and reverse subtree merge') # Try an automatic sync merge from ^/A to A_COPY. Revision 5 should be @@ -917,7 +917,7 @@ def merge_to_reverse_cherry_subtree_to_merge_to(sbox): # ___________________________________________________________________ # Modified: svn:mergeinfo # Merged /A:r7 - svntest.actions.run_and_verify_svn(None, None, [], 'up', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir) expected_output = wc.State(A_COPY_path, { 'B/E/beta' : Item(status='U '), }) @@ -980,8 +980,8 @@ def merge_to_reverse_cherry_subtree_to_merge_to(sbox): expected_disk, expected_status, expected_skip, - None, None, None, None, - None, 1, 0, A_COPY_path) + [], True, False, + A_COPY_path) #---------------------------------------------------------------------- # Automatic merges should notice ancestory for replaced files @@ -1073,8 +1073,8 @@ def merge_replacement(sbox): expected_disk, expected_status, expected_skip, - None, None, None, None, - None, 1, 0, A_path) + [], True, False, + A_path) @SkipUnless(server_has_mergeinfo) @Issue(4313) @@ -1117,7 +1117,6 @@ def auto_merge_handles_replacements_in_merge_source(sbox): # unusual way. # Please contact the application's support team for more information. svntest.actions.run_and_verify_svn( - None, ["--- Recording mergeinfo for merge of r2 into '" + branch2_path + "':\n", " U " + branch2_path + "\n", "--- Recording mergeinfo for merge of r3 into '" + branch2_path + "':\n", @@ -1175,7 +1174,7 @@ def effective_sync_results_in_reintegrate(sbox): "--- Recording mergeinfo for merge between repository URLs into '" + A_path + "':\n", " U " + A_path + "\n"] - svntest.actions.run_and_verify_svn(None, expected_output, [], 'merge', + svntest.actions.run_and_verify_svn(expected_output, [], 'merge', sbox.repo_url + '/branch', A_path, '--reintegrate') @@ -1198,8 +1197,8 @@ def effective_sync_results_in_reintegrate(sbox): # Conflict discovered in file 'A\mu'. # Select: (p) postpone, (df) diff-full, (e) edit, (m) merge, # (mc) mine-conflict, (tc) theirs-conflict, (s) show all options: p - svntest.actions.run_and_verify_svn(None, None, [], 'revert', A_path, '-R') - svntest.actions.run_and_verify_svn(None, expected_output, [], 'merge', + svntest.actions.run_and_verify_svn(None, [], 'revert', A_path, '-R') + svntest.actions.run_and_verify_svn(expected_output, [], 'merge', sbox.repo_url + '/branch', A_path) @Issue(4481) @@ -1225,7 +1224,7 @@ def reintegrate_subtree_not_updated(sbox): + sbox.ospath('A/D/G') + "':\n", " U " + sbox.ospath('A/D/G') + "\n"] - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'merge', sbox.repo_url + '/D_1/G', sbox.ospath('A/D/G')) @@ -1238,7 +1237,7 @@ def reintegrate_subtree_not_updated(sbox): + sbox.ospath('A/D/H') + "':\n", " U " + sbox.ospath('A/D/H') + "\n"] - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'merge', sbox.repo_url + '/D_1/H', sbox.ospath('A/D/H')) @@ -1274,7 +1273,7 @@ def reintegrate_subtree_not_updated(sbox): + sbox.ospath('D_2/G') + "':\n", " U " + sbox.ospath('D_2/G') + "\n"] - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'merge', sbox.repo_url + '/A/D', sbox.ospath('D_2')) @@ -1295,7 +1294,7 @@ def reintegrate_subtree_not_updated(sbox): + sbox.ospath('A/D') + "\n", " U " + sbox.ospath('A/D/G') + "\n"] - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'merge', sbox.repo_url + '/D_2', sbox.ospath('A/D')) @@ -1321,13 +1320,85 @@ def reintegrate_subtree_not_updated(sbox): + sbox.ospath('D_2') + "\n", " G " + sbox.ospath('D_2/G') + "\n"] - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'merge', sbox.repo_url + '/A/D', sbox.ospath('D_2')) sbox.simple_commit() sbox.simple_update() +def merge_to_copy_and_add(sbox): + "merge peg to a copy and add" + + sbox.build() + + sbox.simple_copy('A', 'AA') + sbox.simple_append('A/mu', 'A/mu') + sbox.simple_commit('A') + + # This is the scenario the code is supposed to support; a copy + svntest.actions.run_and_verify_svn(None, [], + 'merge', '^/A', sbox.ospath('AA')) + + sbox.simple_mkdir('A3') + # And this case currently segfaults, because merge doesn't check + # if the path has a repository location + expected_err = ".*svn: E195012: Can't perform .*A3'.*added.*" + svntest.actions.run_and_verify_svn(None, expected_err, + 'merge', '^/A', sbox.ospath('A3')) + # Try the same merge with --reintegrate, for completeness' sake. + expected_err = ".*svn: E195012: Can't reintegrate into .*A3'.*added.*" + svntest.actions.run_and_verify_svn(None, expected_err, + 'merge', '--reintegrate', '^/A', + sbox.ospath('A3')) + +def merge_delete_crlf_file(sbox): + "merge the deletion of a strict CRLF file" + + sbox.build() + + sbox.simple_copy('A', 'AA') + + # Let commit fix the eols + sbox.simple_add_text('with\rCRLF\rhere!', 'A/crlf') + sbox.simple_add_text('with\rnative\r\eol', 'A/native') + sbox.simple_add_text('with\rCR\r\eol', 'A/cr') + sbox.simple_add_text('with\rLF\r\eol', 'A/lf') + + # And apply the magic property + sbox.simple_propset('svn:eol-style', 'CRLF', 'A/crlf') + sbox.simple_propset('svn:eol-style', 'native', 'A/native') + sbox.simple_propset('svn:eol-style', 'CR', 'A/cr') + sbox.simple_propset('svn:eol-style', 'LF', 'A/lf') + + sbox.simple_commit('A') # r2 + + # Merge the addition of the files + svntest.actions.run_and_verify_svn(None, [], + 'merge', '^/A', sbox.ospath('AA')) + sbox.simple_commit('AA') # r3 + + sbox.simple_rm('A/D', 'A/mu', 'A/crlf', 'A/native', 'A/cr', 'A/lf') + sbox.simple_commit('A') # r4 + + sbox.simple_update('') # Make single revision r4 + + # And now merge the deletes + expected_output = svntest.verify.UnorderedOutput([ + '--- Merging r3 through r4 into \'%s\':\n' % sbox.ospath('AA'), + 'D %s\n' % sbox.ospath('AA/cr'), + 'D %s\n' % sbox.ospath('AA/crlf'), + 'D %s\n' % sbox.ospath('AA/lf'), + 'D %s\n' % sbox.ospath('AA/native'), + 'D %s\n' % sbox.ospath('AA/mu'), + 'D %s\n' % sbox.ospath('AA/D'), + '--- Recording mergeinfo for merge of r3 through r4 into \'%s\':\n' + % sbox.ospath('AA'), + ' U %s\n' % sbox.ospath('AA') + ]) + svntest.actions.run_and_verify_svn(expected_output, [], + 'merge', '^/A', sbox.ospath('AA')) + ######################################################################## # Run the tests @@ -1357,6 +1428,8 @@ test_list = [ None, auto_merge_handles_replacements_in_merge_source, effective_sync_results_in_reintegrate, reintegrate_subtree_not_updated, + merge_to_copy_and_add, + merge_delete_crlf_file ] if __name__ == '__main__': diff --git a/subversion/tests/cmdline/merge_reintegrate_tests.py b/subversion/tests/cmdline/merge_reintegrate_tests.py index 8b3e91f..553a2dc 100755 --- a/subversion/tests/cmdline/merge_reintegrate_tests.py +++ b/subversion/tests/cmdline/merge_reintegrate_tests.py @@ -44,8 +44,8 @@ exp_noop_up_out = svntest.actions.expected_noop_update_output from svntest.main import SVN_PROP_MERGEINFO from svntest.main import server_has_mergeinfo -from merge_tests import set_up_branch -from merge_tests import expected_merge_output +from svntest.mergetrees import set_up_branch +from svntest.mergetrees import expected_merge_output #---------------------------------------------------------------------- def run_reintegrate(src_url, tgt_path): @@ -53,7 +53,7 @@ def run_reintegrate(src_url, tgt_path): there is nothing on stdout, anything on stderr, or a non-zero exit code. """ - svntest.actions.run_and_verify_svn(None, svntest.verify.AnyOutput, [], + svntest.actions.run_and_verify_svn(svntest.verify.AnyOutput, [], 'merge', '--reintegrate', src_url, tgt_path) @@ -71,7 +71,7 @@ def run_reintegrate_expect_error(src_url, tgt_path, # ourselves, but as the 'actual_stdout' argument, that way each line of # error must match the regex. exit_code, out, err = svntest.actions.run_and_verify_svn( - None, expected_stdout, svntest.verify.AnyOutput, + expected_stdout, svntest.verify.AnyOutput, 'merge', '--reintegrate', src_url, tgt_path) assert exit_code @@ -86,7 +86,7 @@ def run_and_verify_reintegrate(tgt_dir, src_url, mergeinfo_output_tree, elision_output_tree, disk_tree, status_tree, skip_tree, - error_re_string = None, + expected_stderr = [], check_props = True, dry_run = True): """Run 'svn merge --reintegrate SRC_URL TGT_DIR'. Raise an error if @@ -98,8 +98,7 @@ def run_and_verify_reintegrate(tgt_dir, src_url, tgt_dir, None, None, src_url, None, output_tree, mergeinfo_output_tree, elision_output_tree, disk_tree, status_tree, skip_tree, - error_re_string, - None, None, None, None, check_props, dry_run, + expected_stderr, check_props, dry_run, '--reintegrate', tgt_dir) @@ -122,7 +121,7 @@ def basic_reintegrate(sbox): expected_output = wc.State(wc_dir, {'A_COPY/mu' : Item(verb='Sending')}) expected_status.tweak('A_COPY/mu', wc_rev=7) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) expected_disk.tweak('A_COPY/mu', contents='Changed on the branch.') # Update the wcs. @@ -130,7 +129,7 @@ def basic_reintegrate(sbox): expected_status.tweak(wc_rev='7') svntest.actions.run_and_verify_update(wc_dir, expected_output, expected_disk, expected_status, - None, None, None, None, None, True) + check_props=True) # Merge from trunk to branch (ie, r3-6), using normal cherry-harvest. A_COPY_path = sbox.ospath('A_COPY') @@ -196,8 +195,7 @@ def basic_reintegrate(sbox): k_expected_disk, k_expected_status, expected_skip, - None, None, None, None, - None, True) + [], True) expected_disk.tweak('A_COPY', props={SVN_PROP_MERGEINFO: '/A:2-7'}) expected_disk.tweak('A_COPY/B/E/beta', contents="New content") expected_disk.tweak('A_COPY/D/G/rho', contents="New content") @@ -215,14 +213,14 @@ def basic_reintegrate(sbox): expected_status.tweak('A_COPY', 'A_COPY/D/H/psi', 'A_COPY/D/G/rho', 'A_COPY/B/E/beta', 'A_COPY/D/H/omega', wc_rev=8) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) # Update the wcs again. expected_output = wc.State(wc_dir, {}) expected_status.tweak(wc_rev='8') svntest.actions.run_and_verify_update(wc_dir, expected_output, expected_disk, expected_status, - None, None, None, None, None, True) + check_props=True) # *finally*, actually run merge --reintegrate in trunk with the @@ -268,7 +266,7 @@ def basic_reintegrate(sbox): k_expected_disk, k_expected_status, expected_skip, - None, True, True) + [], True, True) # Test issue #3640: # @@ -276,14 +274,14 @@ def basic_reintegrate(sbox): # targeting A_MOVED this time. This should work with almost the same # results. The only differences being the inclusion of r9 in the # mergeinfo and the A-->A_MOVED path difference. - svntest.actions.run_and_verify_svn(None, None, [], 'revert', '-R', wc_dir) - svntest.actions.run_and_verify_svn(None, - ['\n', 'Committed revision 9.\n'], + svntest.actions.run_and_verify_svn(None, [], 'revert', '-R', wc_dir) + svntest.actions.run_and_verify_svn(['Committing transaction...\n', + 'Committed revision 9.\n'], [], 'move', sbox.repo_url + '/A', sbox.repo_url + '/A_MOVED', '-m', 'Copy A to A_MOVED') - svntest.actions.run_and_verify_svn(None, None, [], 'up', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir) A_MOVED_path = sbox.ospath('A_MOVED') expected_output = wc.State(A_MOVED_path, { 'mu' : Item(status='U '), @@ -325,9 +323,10 @@ def basic_reintegrate(sbox): k_expected_disk, expected_status, expected_skip, - None, True, True) + [], True, True) #---------------------------------------------------------------------- +@SkipUnless(server_has_mergeinfo) def reintegrate_with_rename(sbox): "merge --reintegrate with renamed file on branch" @@ -342,7 +341,7 @@ def reintegrate_with_rename(sbox): expected_output = wc.State(wc_dir, {'A_COPY/mu' : Item(verb='Sending')}) expected_status.tweak('A_COPY/mu', wc_rev=7) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) expected_disk.tweak('A_COPY/mu', contents='Changed on the branch.') # Update the wcs. @@ -350,7 +349,7 @@ def reintegrate_with_rename(sbox): expected_status.tweak(wc_rev='7') svntest.actions.run_and_verify_update(wc_dir, expected_output, expected_disk, expected_status, - None, None, None, None, None, True) + check_props=True) # Merge from trunk to branch (ie, r3-6), using normal cherry-harvest. A_COPY_path = sbox.ospath('A_COPY') @@ -416,8 +415,7 @@ def reintegrate_with_rename(sbox): k_expected_disk, k_expected_status, expected_skip, - None, None, None, None, - None, True) + [], True) expected_disk.tweak('A_COPY', props={SVN_PROP_MERGEINFO: '/A:2-7'}) expected_disk.tweak('A_COPY/B/E/beta', contents="New content") expected_disk.tweak('A_COPY/D/G/rho', contents="New content") @@ -435,7 +433,7 @@ def reintegrate_with_rename(sbox): expected_status.tweak('A_COPY', 'A_COPY/D/H/psi', 'A_COPY/D/G/rho', 'A_COPY/B/E/beta', 'A_COPY/D/H/omega', wc_rev=8) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) # Update the wcs again. @@ -473,11 +471,11 @@ def reintegrate_with_rename(sbox): expected_status.tweak(wc_rev='8') svntest.actions.run_and_verify_update(wc_dir, expected_output, expected_disk, expected_status, - None, None, None, None, None, True) + check_props=True) # Make another change on the branch: copy tau to tauprime. Commit # in r9. - svntest.actions.run_and_verify_svn(None, None, [], 'cp', + svntest.actions.run_and_verify_svn(None, [], 'cp', sbox.repo_url + '/A_COPY/D/G/tau', sbox.repo_url + '/A_COPY/D/G/tauprime', '-m', @@ -499,7 +497,7 @@ def reintegrate_with_rename(sbox): expected_status.tweak(wc_rev='9') svntest.actions.run_and_verify_update(wc_dir, expected_output, expected_disk, expected_status, - None, None, None, None, None, True) + check_props=True) # *finally*, actually run merge --reintegrate in trunk with the # branch URL. This should bring in the mu change and the tauprime @@ -552,7 +550,7 @@ def reintegrate_with_rename(sbox): k_expected_disk, k_expected_status, expected_skip, - None, True, True) + [], True, True) # Finally, commit the result of the merge (r10). expected_output = wc.State(wc_dir, { @@ -565,9 +563,10 @@ def reintegrate_with_rename(sbox): }) expected_status.tweak('A', 'A/mu', wc_rev=10) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) #---------------------------------------------------------------------- +@SkipUnless(server_has_mergeinfo) def reintegrate_branch_never_merged_to(sbox): "merge --reintegrate on a never-updated branch" @@ -582,7 +581,7 @@ def reintegrate_branch_never_merged_to(sbox): expected_output = wc.State(wc_dir, {'A_COPY/mu' : Item(verb='Sending')}) expected_status.tweak('A_COPY/mu', wc_rev=7) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) expected_disk.tweak('A_COPY/mu', contents='Changed on the branch.') # Update the wcs. @@ -590,11 +589,11 @@ def reintegrate_branch_never_merged_to(sbox): expected_status.tweak(wc_rev='7') svntest.actions.run_and_verify_update(wc_dir, expected_output, expected_disk, expected_status, - None, None, None, None, None, True) + check_props=True) # Make another change on the branch: copy tau to tauprime. Commit # in r8. - svntest.actions.run_and_verify_svn(None, None, [], 'cp', + svntest.actions.run_and_verify_svn(None, [], 'cp', os.path.join(wc_dir, 'A_COPY', 'D', 'G', 'tau'), os.path.join(wc_dir, 'A_COPY', 'D', 'G', @@ -604,7 +603,7 @@ def reintegrate_branch_never_merged_to(sbox): }) expected_status.add({'A_COPY/D/G/tauprime': Item(status=' ', wc_rev=8)}) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) expected_disk.add({ 'A_COPY/D/G/tauprime' : Item(contents="This is the file 'tau'.\n") }) @@ -615,7 +614,7 @@ def reintegrate_branch_never_merged_to(sbox): expected_status.tweak(wc_rev='8') svntest.actions.run_and_verify_update(wc_dir, expected_output, expected_disk, expected_status, - None, None, None, None, None, True) + check_props=True) # *finally*, actually run merge --reintegrate in trunk with the # branch URL. This should bring in the mu change and the tauprime @@ -683,7 +682,7 @@ def reintegrate_branch_never_merged_to(sbox): k_expected_disk, k_expected_status, expected_skip, - None, True, True) + [], True, True) # Finally, commit the result of the merge (r9). expected_output = wc.State(wc_dir, { @@ -696,9 +695,10 @@ def reintegrate_branch_never_merged_to(sbox): }) expected_status.tweak('A', 'A/mu', wc_rev=9) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) #---------------------------------------------------------------------- +@SkipUnless(server_has_mergeinfo) def reintegrate_fail_on_modified_wc(sbox): "merge --reintegrate should fail in modified wc" sbox.build() @@ -714,14 +714,16 @@ def reintegrate_fail_on_modified_wc(sbox): sbox.simple_commit() svntest.main.file_write(mu_path, "Changed on 'trunk' (the merge target).") + expected_skip = wc.State(wc_dir, {}) sbox.simple_update() # avoid mixed-revision error run_and_verify_reintegrate( A_path, sbox.repo_url + '/A_COPY', None, None, None, - None, None, None, + None, None, expected_skip, ".*Cannot merge into a working copy that has local modifications.*", True, False) #---------------------------------------------------------------------- +@SkipUnless(server_has_mergeinfo) def reintegrate_fail_on_mixed_rev_wc(sbox): "merge --reintegrate should fail in mixed-rev wc" sbox.build() @@ -736,15 +738,17 @@ def reintegrate_fail_on_mixed_rev_wc(sbox): }) expected_status.tweak('A/mu', wc_rev=7) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) + expected_skip = wc.State(wc_dir, {}) # Try merging into that same wc, expecting failure. run_and_verify_reintegrate( A_path, sbox.repo_url + '/A_COPY', None, None, None, - None, None, None, + None, None, expected_skip, ".*Cannot merge into mixed-revision working copy.*", True, False) #---------------------------------------------------------------------- +@SkipUnless(server_has_mergeinfo) def reintegrate_fail_on_switched_wc(sbox): "merge --reintegrate should fail in switched wc" sbox.build() @@ -801,18 +805,20 @@ def reintegrate_fail_on_switched_wc(sbox): expected_output, expected_disk, expected_status, - None, None, None, None, None, + [], False, '--ignore-ancestry') sbox.simple_update() # avoid mixed-revision error + expected_skip = wc.State(wc_dir, {}) run_and_verify_reintegrate( A_path, sbox.repo_url + '/A_COPY', None, None, None, - None, None, None, + None, None, expected_skip, ".*Cannot merge into a working copy with a switched subtree.*", True, False) #---------------------------------------------------------------------- # Test for issue #3603 'allow reintegrate merges into WCs with # missing subtrees'. +@SkipUnless(server_has_mergeinfo) @Issue(3603) def reintegrate_on_shallow_wc(sbox): "merge --reintegrate in shallow wc" @@ -840,8 +846,8 @@ def reintegrate_on_shallow_wc(sbox): # depth of A/D to empty. Since the only change made on the branch # since the branch point is to A_COPY/mu, the reintegrate should # simply work and update A/mu with the branch's contents. - svntest.actions.run_and_verify_svn(None, None, [], 'up', wc_dir) - svntest.actions.run_and_verify_svn(None, None, [], 'up', + svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'up', '--set-depth', 'empty', A_D_path) expected_output = wc.State(A_path, { 'mu' : Item(status='U '), @@ -886,13 +892,13 @@ def reintegrate_on_shallow_wc(sbox): expected_A_disk, expected_A_status, expected_A_skip, - None, 1, 1) + [], 1, 1) # Now revert the reintegrate and make a second change on the # branch in r4, but this time change a subtree that corresponds # to the missing (shallow) portion of the source. The reintegrate # should still succeed. - svntest.actions.run_and_verify_svn(None, None, [], 'revert', '-R', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'revert', '-R', wc_dir) svntest.main.file_write(psi_COPY_path, "more branch work") svntest.main.run_svn(None, 'commit', '-m', 'Some more work on the A_COPY branch', wc_dir) @@ -923,7 +929,7 @@ def reintegrate_on_shallow_wc(sbox): expected_A_disk, expected_A_status, expected_A_skip, - None, 1, 1) + [], 1, 1) #---------------------------------------------------------------------- @SkipUnless(server_has_mergeinfo) @@ -935,10 +941,10 @@ def reintegrate_fail_on_stale_source(sbox): A_path = sbox.ospath('A') mu_path = os.path.join(A_path, "mu") svntest.main.file_append(mu_path, 'some text appended to mu\n') - svntest.actions.run_and_verify_svn(None, None, [], 'commit', + svntest.actions.run_and_verify_svn(None, [], 'commit', '-m', 'a change to mu', mu_path) # Unmix the revisions in the working copy. - svntest.actions.run_and_verify_svn(None, None, [], 'update', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'update', wc_dir) # The merge --reintegrate succeeds but since there were no changes # on A_COPY after it was branched the only result is updated mergeinfo # on the reintegrate target. @@ -1003,6 +1009,7 @@ def reintegrate_fail_on_stale_source(sbox): [], True, True) #---------------------------------------------------------------------- +@SkipUnless(server_has_mergeinfo) def merge_file_with_space_in_its_path(sbox): "merge a file with space in its path" @@ -1016,17 +1023,17 @@ def merge_file_with_space_in_its_path(sbox): os.mkdir(some_dir) svntest.main.file_append(file1, "Initial text in the file.\n") svntest.main.run_svn(None, "add", some_dir) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], "ci", "-m", "r2", wc_dir) # Make r3. svntest.main.run_svn(None, "copy", file1, file2) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], "ci", "-m", "r3", wc_dir) # Make r4. svntest.main.file_append(file2, "Next line of text in the file.\n") - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], "ci", "-m", "r4", wc_dir) target_url = sbox.repo_url + '/some%20dir/file2' @@ -1096,16 +1103,15 @@ def reintegrate_with_subtree_mergeinfo(sbox): expected_output = wc.State(wc_dir, {'A_COPY_3/D/gamma' : Item(verb='Sending')}) expected_status.tweak('A_COPY_3/D/gamma', wc_rev=9) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) # r10 - Merge r9 from A_COPY_3/D to A/D, creating explicit subtree # mergeinfo under A. For this and every subsequent merge we update the WC # first to allow full inheritance and elision. - svntest.actions.run_and_verify_svn(None, exp_noop_up_out(9), [], 'up', + svntest.actions.run_and_verify_svn(exp_noop_up_out(9), [], 'up', wc_dir) expected_status.tweak(wc_rev=9) svntest.actions.run_and_verify_svn( - None, expected_merge_output([[9]], ['U ' + gamma_path + '\n', ' U ' + D_path + '\n',]), @@ -1115,21 +1121,20 @@ def reintegrate_with_subtree_mergeinfo(sbox): 'A/D/gamma' : Item(verb='Sending')}) expected_status.tweak('A/D', 'A/D/gamma', wc_rev=10) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) # r11 - Make a text change to A_COPY_2/mu svntest.main.file_write(mu_COPY_2_path, "New content") expected_output = wc.State(wc_dir, {'A_COPY_2/mu' : Item(verb='Sending')}) expected_status.tweak('A_COPY_2/mu', wc_rev=11) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) # r12 - Merge r11 from A_COPY_2/mu to A_COPY/mu - svntest.actions.run_and_verify_svn(None, exp_noop_up_out(11), [], 'up', + svntest.actions.run_and_verify_svn(exp_noop_up_out(11), [], 'up', wc_dir) expected_status.tweak(wc_rev=11) svntest.actions.run_and_verify_svn( - None, expected_merge_output([[11]], ['U ' + mu_COPY_path + '\n', ' U ' + mu_COPY_path + '\n',]), @@ -1138,15 +1143,14 @@ def reintegrate_with_subtree_mergeinfo(sbox): {'A_COPY/mu' : Item(verb='Sending')}) expected_status.tweak('A_COPY/mu', wc_rev=12) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) # r13 - Do a 'synch' cherry harvest merge of all available revisions # from A to A_COPY - svntest.actions.run_and_verify_svn(None, exp_noop_up_out(12), [], 'up', + svntest.actions.run_and_verify_svn(exp_noop_up_out(12), [], 'up', wc_dir) expected_status.tweak(wc_rev=12) svntest.actions.run_and_verify_svn( - None, expected_merge_output([[2,12]], ['U ' + beta_COPY_path + '\n', 'U ' + gamma_COPY_path + '\n', @@ -1176,17 +1180,17 @@ def reintegrate_with_subtree_mergeinfo(sbox): 'A_COPY/D/gamma', wc_rev=13) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) # r14 - Make a text change on A_COPY/B/E/alpha svntest.main.file_write(alpha_COPY_path, "New content") expected_output = wc.State(wc_dir, {'A_COPY/B/E/alpha' : Item(verb='Sending')}) expected_status.tweak('A_COPY/B/E/alpha', wc_rev=14) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) # Now, reintegrate A_COPY to A. This should succeed. - svntest.actions.run_and_verify_svn(None, exp_noop_up_out(14), [], 'up', + svntest.actions.run_and_verify_svn(exp_noop_up_out(14), [], 'up', wc_dir) expected_status.tweak(wc_rev=14) expected_output = wc.State(A_path, { @@ -1256,7 +1260,7 @@ def reintegrate_with_subtree_mergeinfo(sbox): expected_A_disk, expected_A_status, expected_A_skip, - None, 1, 1) + [], 1, 1) # Make some more changes to A_COPY so that the same revisions have *not* # been uniformly applied from A to A_COPY. In this case the reintegrate @@ -1275,12 +1279,11 @@ def reintegrate_with_subtree_mergeinfo(sbox): # A_COPY_2 3-------------[11]-- # First revert the previous reintegrate merge - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'revert', '-R', wc_dir) # r15 - Reverse Merge r8 from A/D to A_COPY/D. svntest.actions.run_and_verify_svn( - None, expected_merge_output([[-8]], ['U ' + omega_COPY_path + '\n', ' U ' + D_COPY_path + '\n',]), @@ -1290,7 +1293,7 @@ def reintegrate_with_subtree_mergeinfo(sbox): 'A_COPY/D/H/omega' : Item(verb='Sending')}) expected_status.tweak('A_COPY/D', 'A_COPY/D/H/omega', wc_rev=15) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) # Now reintegrate A_COPY back to A. Since A_COPY/D no longer has r8 merged # to it from A, the merge should fail. Further we expect an error message @@ -1325,7 +1328,7 @@ def reintegrate_with_subtree_mergeinfo(sbox): # # D) Synch merge the changes in C) from 'trunk' to 'branch' and commit in # rev N+3. The renamed subtree on 'branch' now has additional explicit - # mergeinfo decribing the synch merge from trunk@N+1 to trunk@N+2. + # mergeinfo describing the synch merge from trunk@N+1 to trunk@N+2. # # E) Reintegrate 'branch' to 'trunk'. # @@ -1345,12 +1348,12 @@ def reintegrate_with_subtree_mergeinfo(sbox): # r874258 WC-to-WC moves won't create mergeinfo on the dest if the source # doesn't have any. So do a repos-to-repos move so explicit mergeinfo # *is* created on the destination. - svntest.actions.run_and_verify_svn(None, None,[], 'move', + svntest.actions.run_and_verify_svn(None,[], 'move', sbox.repo_url + '/A/D/gamma', sbox.repo_url + '/A/D/gamma_moved', '-m', 'REPOS-to-REPOS move' ) - svntest.actions.run_and_verify_svn(None, None, [], 'up', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir) expected_status.tweak(wc_rev=16) expected_status.remove('A/D/gamma') expected_status.add({'A/D/gamma_moved' : Item(status=' ', wc_rev=16)}) @@ -1365,7 +1368,6 @@ def reintegrate_with_subtree_mergeinfo(sbox): # behavior pass. # r17 - B) Synch merge from A to A_COPY svntest.actions.run_and_verify_svn( - None, expected_merge_output([[8], [13,16], [2,16]], ['U ' + omega_COPY_path + '\n', 'A ' + gamma_moved_COPY_path + '\n', @@ -1391,18 +1393,17 @@ def reintegrate_with_subtree_mergeinfo(sbox): wc_rev=17) expected_status.add({'A_COPY/D/gamma_moved' : Item(status=' ', wc_rev=17)}) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) # r18 - C) Text mod to A/D/gamma_moved svntest.main.file_write(gamma_moved_path, "Even newer content") expected_output = wc.State(wc_dir, {'A/D/gamma_moved' : Item(verb='Sending')}) expected_status.tweak('A/D/gamma_moved', wc_rev=18) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) # r19 - D) Synch merge from A to A_COPY svntest.actions.run_and_verify_svn( - None, expected_merge_output([[17,18], [2,18]], ['U ' + gamma_moved_COPY_path + '\n', ' U ' + A_COPY_path + '\n', @@ -1420,7 +1421,7 @@ def reintegrate_with_subtree_mergeinfo(sbox): 'A_COPY/D/gamma_moved', wc_rev=19) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) # Reintegrate A_COPY to A, this should work since # A_COPY/D/gamma_moved's natural history, @@ -1430,7 +1431,7 @@ def reintegrate_with_subtree_mergeinfo(sbox): # /A_COPY/D/gamma_moved:17-19 # # shows that it is fully synched up with trunk. - svntest.actions.run_and_verify_svn(None, exp_noop_up_out(19), [], 'up', + svntest.actions.run_and_verify_svn(exp_noop_up_out(19), [], 'up', wc_dir) expected_output = wc.State(A_path, { 'B/E/alpha' : Item(status='U '), @@ -1528,7 +1529,7 @@ def reintegrate_with_subtree_mergeinfo(sbox): expected_A_disk, expected_A_status, expected_A_skip, - None, 1, 1) + [], 1, 1) #---------------------------------------------------------------------- @SkipUnless(server_has_mergeinfo) @@ -1555,71 +1556,71 @@ def multiple_reintegrates_from_the_same_branch(sbox): # the reintegrate target. # # r7 - Create the feature branch. - svntest.actions.run_and_verify_svn(None, None, [], 'up', wc_dir) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'copy', A_path, Feature_branch_path) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', 'Make a feature branch', wc_dir) # r8 - Make a change under 'A'. svntest.main.file_write(mu_path, "New trunk content.\n") - svntest.actions.run_and_verify_svn(None, None, [], 'ci', '-m', + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', "A text change under 'A'", wc_dir) # r9 - Make a change on the feature branch. svntest.main.file_write(Feature_beta_path, "New branch content.\n") - svntest.actions.run_and_verify_svn(None, None, [], 'ci', '-m', + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', "A text change on the feature branch", wc_dir) # r10 - Sync merge all changes from 'A' to the feature branch. - svntest.actions.run_and_verify_svn(None, None, [], 'up', wc_dir) - svntest.actions.run_and_verify_svn(None, None, [], 'merge', + svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'merge', sbox.repo_url + '/A', Feature_branch_path) - svntest.actions.run_and_verify_svn(None, None, [], 'ci', '-m', + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', "Sync merge 'A' to feature branch", wc_dir) # r11 - Reintegrate the feature branch back to 'A'. - svntest.actions.run_and_verify_svn(None, None, [], 'up', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir) run_reintegrate(sbox.repo_url + '/A_FEATURE_BRANCH', A_path) - svntest.actions.run_and_verify_svn(None, None, [], 'ci', '-m', + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', "Reintegrate feature branch back to 'A'", wc_dir) # r12 - Do a --record-only merge from 'A' to the feature branch so we # don't try to merge r11 from trunk during the next sync merge. - svntest.actions.run_and_verify_svn(None, None, [], 'up', wc_dir) - svntest.actions.run_and_verify_svn(None, None, [], 'merge', '-c11', + svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'merge', '-c11', '--record-only', sbox.repo_url + '/A', Feature_branch_path) - svntest.actions.run_and_verify_svn(None, None, [], 'ci', '-m', + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', "Sync merge 'A' to feature branch", wc_dir) # r13 - Make another change on the feature branch. svntest.main.file_write(Feature_beta_path, "Even newer branch content.\n") - svntest.actions.run_and_verify_svn(None, None, [], 'ci', '-m', + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', "Different text on the feature branch", wc_dir) # r14 - Sync merge all changes from 'A' to the feature branch in # preparation for a second reintegrate from this branch. - svntest.actions.run_and_verify_svn(None, None, [], 'up', wc_dir) - svntest.actions.run_and_verify_svn(None, None, [], 'merge', + svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'merge', sbox.repo_url + '/A', Feature_branch_path) - svntest.actions.run_and_verify_svn(None, None, [], 'ci', '-m', + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', "2nd Sync merge 'A' to feature branch", wc_dir) # r15 - Reintegrate the feature branch back to 'A' a second time. # No self-referential mergeinfo should be applied on 'A'. - svntest.actions.run_and_verify_svn(None, None, [], 'up', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir) expected_output = wc.State(A_path, { #'' : Item(status=' U'), #<-- no self-referential mergeinfo applied! 'B/E/beta' : Item(status='U '), @@ -1685,25 +1686,24 @@ def multiple_reintegrates_from_the_same_branch(sbox): expected_disk, expected_status, expected_skip, - None, 1, 1) - svntest.actions.run_and_verify_svn(None, None, [], 'ci', '-m', + [], 1, 1) + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', "2nd Reintegrate feature branch back to 'A'", wc_dir) # Demonstrate the danger of any self-referential mergeinfo on trunk. # # Merge all available revisions except r3 from 'A' to 'A_COPY'. - svntest.actions.run_and_verify_svn(None, None, [], 'up', wc_dir) - svntest.actions.run_and_verify_svn(None, None, [], 'merge', '-r3:HEAD', + svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'merge', '-r3:HEAD', sbox.repo_url + '/A', A_COPY_path) - svntest.actions.run_and_verify_svn(None, None, [], 'ci', '-m', + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', "Merge -r3:HEAD from 'A' to 'A_COPY'", wc_dir) # No self-referential mergeinfo should have been carried on 'A_COPY' from # 'A' that would prevent the following merge from being operative. svntest.actions.run_and_verify_svn( - None, expected_merge_output([[2,3],[2,16]], ['U ' + psi_COPY_path + '\n', ' U ' + A_COPY_path + '\n',]), @@ -1716,6 +1716,7 @@ def multiple_reintegrates_from_the_same_branch(sbox): # # Also tests Issue #3591 'reintegrate merges update subtree mergeinfo # unconditionally'. +@SkipUnless(server_has_mergeinfo) @Issue(3591) def reintegrate_with_self_referential_mergeinfo(sbox): "source has target's history as explicit mergeinfo" @@ -1735,38 +1736,38 @@ def reintegrate_with_self_referential_mergeinfo(sbox): # r6 Copy A to A2 and then manually set some self-referential mergeinfo on # A2/B and A2. - svntest.actions.run_and_verify_svn(None, exp_noop_up_out(5), [], + svntest.actions.run_and_verify_svn(exp_noop_up_out(5), [], 'up', wc_dir) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'copy', A_path, A2_path) # /A:3 describes A2's natural history, a.k.a. it's implicit mergeinfo, so # it is self-referential. Same for /A/B:4 and A2/B. Normally this is # redundant but not harmful. - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'ps', 'svn:mergeinfo', '/A:3', A2_path) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'ps', 'svn:mergeinfo', '/A/B:4', A2_B_path) svntest.actions.run_and_verify_svn( - None, None, [], 'ci', '-m', + None, [], 'ci', '-m', 'copy A to A2 and set some self-referential mergeinfo on the latter.', wc_dir) # r7 Copy A2 to A2.1 - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'copy', A2_path, A2_1_path) - svntest.actions.run_and_verify_svn(None, None, [], 'ci', + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', 'copy A2to A2.1.', wc_dir) # r8 Make a change on A2.1/mu svntest.main.file_write(A2_1_mu_path, 'New A2.1 stuff') - svntest.actions.run_and_verify_svn(None, None, [], 'ci', + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', 'Work done on the A2.1 branch.', wc_dir) # Update to uniform revision and reintegrate A2.1 back to A2. # Note that the mergeinfo on A2/B is not changed by the reintegration # and so is not expected to by updated to describe the merge. - svntest.actions.run_and_verify_svn(None, exp_noop_up_out(8), [], + svntest.actions.run_and_verify_svn(exp_noop_up_out(8), [], 'up', wc_dir) expected_output = wc.State(A2_path, { 'mu' : Item(status='U '), @@ -1841,7 +1842,7 @@ def reintegrate_with_self_referential_mergeinfo(sbox): expected_disk, expected_status, expected_skip, - None, 1, 0) + [], 1, 0) #---------------------------------------------------------------------- # Test for issue #3577 '1.7 subtree mergeinfo recording breaks reintegrate' @@ -1877,20 +1878,20 @@ def reintegrate_with_subtree_merges(sbox): # from A to A_COPY, but do it via subtree merges so the mergeinfo # record of the merges insn't neatly reflected in the root of the # branch. Commit the merge as r8. - svntest.actions.run_and_verify_svn(None, None, [], 'up', wc_dir) - svntest.actions.run_and_verify_svn(None, None, [], 'merge', '-c5', + svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'merge', '-c5', sbox.repo_url + '/A/B', B_COPY_path) - svntest.actions.run_and_verify_svn(None, None, [], 'merge', '-c4', + svntest.actions.run_and_verify_svn(None, [], 'merge', '-c4', sbox.repo_url + '/A/D/G/rho', rho_COPY_path) - svntest.actions.run_and_verify_svn(None, None, [], 'merge', '-c3', + svntest.actions.run_and_verify_svn(None, [], 'merge', '-c3', sbox.repo_url + '/A/D/H', H_COPY_path) - svntest.actions.run_and_verify_svn(None, None, [], 'merge', '-c6', + svntest.actions.run_and_verify_svn(None, [], 'merge', '-c6', sbox.repo_url + '/A', A_COPY_path) - svntest.actions.run_and_verify_svn(None, None, [], 'commit', '-m', + svntest.actions.run_and_verify_svn(None, [], 'commit', '-m', 'Merge everything from A to A_COPY', wc_dir) @@ -1915,7 +1916,7 @@ def reintegrate_with_subtree_merges(sbox): # Missing ranges: /A/D/G/rho:2-3,5-6 # A_COPY/D/H # Missing ranges: /A/D/H:2,4-5 - svntest.actions.run_and_verify_svn(None, None, [], 'up', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir) expected_output = wc.State(A_path, { 'mu' : Item(status='U '), }) @@ -1976,14 +1977,14 @@ def reintegrate_with_subtree_merges(sbox): expected_A_disk, expected_A_status, expected_A_skip, - None, 1, 1) + [], 1, 1) # Test issue #4329. Revert previous merge and commit a new edit to # A/D/H/psi. Attempt the same merge without the --reintegrate option. # It should succeed because the automatic merge code should detect that # a reintegrate-style merge is required, that merge should succeed and # there should be not conflict on A/D/H/psi. - svntest.actions.run_and_verify_svn(None, None, [], 'revert', '-R', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'revert', '-R', wc_dir) svntest.main.file_write(psi_path, "Non-conflicting trunk edit.\n") svntest.main.run_svn(None, 'commit', '-m', 'An edit on trunk prior to reintegrate.', wc_dir) @@ -1997,11 +1998,12 @@ def reintegrate_with_subtree_merges(sbox): expected_mergeinfo_output, expected_elision_output, expected_A_disk, expected_A_status, - expected_A_skip, None, None, None, - None, None, True, False, A_path) + expected_A_skip, + [], True, False, A_path) #---------------------------------------------------------------------- # Test for issue #3654 'added subtrees with mergeinfo break reintegrate'. +@SkipUnless(server_has_mergeinfo) @Issue(3654) def added_subtrees_with_mergeinfo_break_reintegrate(sbox): "added subtrees with mergeinfo break reintegrate" @@ -2024,93 +2026,93 @@ def added_subtrees_with_mergeinfo_break_reintegrate(sbox): # r8 - Add a new file A_COPY_2/C/nu. svntest.main.file_write(nu_COPY_2_path, "This is the file 'nu'.\n") - svntest.actions.run_and_verify_svn(None, None, [], 'add', nu_COPY_2_path) - svntest.actions.run_and_verify_svn(None, None, [], 'ci', + svntest.actions.run_and_verify_svn(None, [], 'add', nu_COPY_2_path) + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', 'Add new file in A_COPY_2 branch', wc_dir) # r9 - Cyclic cherry pick merge r8 from A_COPY_2 back to A. - svntest.actions.run_and_verify_svn(None, None, [], 'up', wc_dir) - svntest.actions.run_and_verify_svn(None, svntest.verify.AnyOutput, [], + svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir) + svntest.actions.run_and_verify_svn(svntest.verify.AnyOutput, [], 'merge', '-c', '8', sbox.repo_url + '/A_COPY_2', A_path) - svntest.actions.run_and_verify_svn(None, None, [], 'ci', + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', 'Merge r8 from A_COPY_2 to A.', wc_dir) # r10 - Make an edit to A_COPY_2/C/nu. svntest.main.file_write(nu_COPY_2_path, "A_COPY_2 edit to file 'nu'.\n") - svntest.actions.run_and_verify_svn(None, None, [], 'ci', + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', 'Edit new file on A_COPY_2 branch', wc_dir) # r11 - Cyclic subtree cherry pick merge r10 from A_COPY_2/C/nu # back to A/C/nu. - svntest.actions.run_and_verify_svn(None, svntest.verify.AnyOutput, [], + svntest.actions.run_and_verify_svn(svntest.verify.AnyOutput, [], 'merge', '-c', '10', sbox.repo_url + '/A_COPY_2/C/nu', nu_path) - svntest.actions.run_and_verify_svn(None, None, [], 'ci', '-m', + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', 'Merge r8 from A_COPY_2/C/nu to A/C/nu.', wc_dir) # r12 - Edit under A_COPY. svntest.main.file_write(mu_path, "mu edits on A_COPY.\n") - svntest.actions.run_and_verify_svn(None, None, [], 'ci', + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', 'Work on A_COPY branch.', wc_dir) # r13 - Sync merge A to A_COPY in preparation for reintegrate. - svntest.actions.run_and_verify_svn(None, None, [], 'up', wc_dir) - svntest.actions.run_and_verify_svn(None, svntest.verify.AnyOutput, [], + svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir) + svntest.actions.run_and_verify_svn(svntest.verify.AnyOutput, [], 'merge', sbox.repo_url + '/A', A_COPY_path) - svntest.actions.run_and_verify_svn(None, None, [], 'ci', '-m', + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', 'Prep for reintegrate: Sync A to A_COPY.', wc_dir) # r14 - Reintegrate A_COPY to A. - svntest.actions.run_and_verify_svn(None, None, [], 'up', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir) run_reintegrate(sbox.repo_url + '/A_COPY', A_path) - svntest.actions.run_and_verify_svn(None, None, [], 'ci', '-m', + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', 'Reintegrate A_COPY to A.', wc_dir) # r15 - Delete A_COPY. - svntest.actions.run_and_verify_svn(None, svntest.verify.AnyOutput, [], + svntest.actions.run_and_verify_svn(svntest.verify.AnyOutput, [], 'delete', A_COPY_path) - svntest.actions.run_and_verify_svn(None, None, [], 'ci', '-m', + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', 'Delete A_COPY branch', wc_dir) # r16 - Create new A_COPY from A@HEAD=15. # # Update so we copy HEAD: - svntest.actions.run_and_verify_svn(None, None, [], 'up', wc_dir) - svntest.actions.run_and_verify_svn(None, svntest.verify.AnyOutput, [], + svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir) + svntest.actions.run_and_verify_svn(svntest.verify.AnyOutput, [], 'copy', A_path, A_COPY_path) - svntest.actions.run_and_verify_svn(None, None, [], 'ci', '-m', + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', 'Create new A_COPY branch from A', wc_dir) # r17 - Unrelated edits under both A and A_COPY. svntest.main.file_write(nu_path, "Trunk work on nu.\n") svntest.main.file_write(lambda_COPY_path, "lambda edit on A_COPY.\n") - svntest.actions.run_and_verify_svn(None, None, [], 'ci', '-m', + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', 'Unrelated edits on A and A_COPY branch.', wc_dir) # r18 - Sync A to A_COPY in preparation for another reintegrate. - svntest.actions.run_and_verify_svn(None, None, [], 'up', wc_dir) - svntest.actions.run_and_verify_svn(None, svntest.verify.AnyOutput, [], + svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir) + svntest.actions.run_and_verify_svn(svntest.verify.AnyOutput, [], 'merge', sbox.repo_url + '/A', A_COPY_path) - svntest.actions.run_and_verify_svn(None, None, [], 'ci', '-m', + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', 'Prep for reintegrate: Sync A to A_COPY.', wc_dir) # Reintegrate A_COPY back to A. We just synced A_COPY with A, so this # should work. The only text change should be the change made to # A_COPY/B/lambda in r17 after the new A_COPY was created. - svntest.actions.run_and_verify_svn(None, None, [], 'up', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir) expected_output = wc.State(A_path, { '' : Item(status=' U'), 'B/lambda' : Item(status='U '), @@ -2190,11 +2192,12 @@ def added_subtrees_with_mergeinfo_break_reintegrate(sbox): expected_disk, expected_status, expected_skip, - None, 1, 1) + [], 1, 1) #---------------------------------------------------------------------- # Test for issue #3648 '2-URL merges incorrectly reverse-merge mergeinfo # for merge target'. +@SkipUnless(server_has_mergeinfo) @Issue(3648) def two_URL_merge_removes_valid_mergeinfo_from_target(sbox): "2-URL merge removes valid mergeinfo from target" @@ -2215,42 +2218,40 @@ def two_URL_merge_removes_valid_mergeinfo_from_target(sbox): # r8 - A simple text edit on the A_COPY branch. svntest.main.file_write(lambda_COPY_path, "Edit on 'branch 1'.\n") - svntest.actions.run_and_verify_svn(None, None, [], 'ci', + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', "Work on 'branch 1'.", wc_dir) # r9 - Sync the A_COPY branch with A up the HEAD (r8). Now A_COPY # differs from A only by the change made in r8 and by the mergeinfo # '/A:2-8' on A_COPY which was set to describe the merge. - svntest.actions.run_and_verify_svn(None, None, [], 'up', wc_dir) - svntest.actions.run_and_verify_svn(None, svntest.verify.AnyOutput, [], + svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir) + svntest.actions.run_and_verify_svn(svntest.verify.AnyOutput, [], 'merge', sbox.repo_url + '/A', A_COPY_path) - svntest.actions.run_and_verify_svn(None, None, [], 'ci', + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', 'Sync A to A_COPY.', wc_dir) # r10 - A simple text edit on our "trunk" A. svntest.main.file_write(mu_path, "Edit on 'trunk'.\n") - svntest.actions.run_and_verify_svn(None, None, [], 'ci', + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', "Work on 'trunk'", wc_dir) # r11 - Sync the A_COPY_2 branch with A up to HEAD (r10). - svntest.actions.run_and_verify_svn(None, None, [], 'up', wc_dir) - svntest.actions.run_and_verify_svn(None, svntest.verify.AnyOutput, [], + svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir) + svntest.actions.run_and_verify_svn(svntest.verify.AnyOutput, [], 'merge', sbox.repo_url + '/A', A_COPY_2_path) - svntest.actions.run_and_verify_svn(None, None, [], 'ci', + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', 'Sync A to A_COPY_2.', wc_dir) # Confirm that the mergeinfo on each branch is what we expect. - svntest.actions.run_and_verify_svn(None, - [A_COPY_path + ' - /A:2-8\n'], + svntest.actions.run_and_verify_svn([A_COPY_path + ' - /A:2-8\n'], [], 'pg', SVN_PROP_MERGEINFO, '-R', A_COPY_path) - svntest.actions.run_and_verify_svn(None, - [A_COPY_2_path + ' - /A:3-10\n'], + svntest.actions.run_and_verify_svn([A_COPY_2_path + ' - /A:3-10\n'], [], 'pg', SVN_PROP_MERGEINFO, '-R', A_COPY_2_path) @@ -2285,7 +2286,7 @@ def two_URL_merge_removes_valid_mergeinfo_from_target(sbox): # # Before issue #3648 was fixed this test failed because the valid mergeinfo # '/A:r3-8' on A_COPY_2 was removed by the merge. - svntest.actions.run_and_verify_svn(None, None, [], 'up', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir) expected_output = wc.State(A_COPY_2_path, { '' : Item(status=' G'), 'B/lambda' : Item(status='U '), @@ -2349,12 +2350,12 @@ def two_URL_merge_removes_valid_mergeinfo_from_target(sbox): expected_disk, expected_status, expected_skip, - None, None, None, None, - None, 1, 1) + [], True, True) #---------------------------------------------------------------------- # Test for issue #3867 'reintegrate merges create mergeinfo for # non-existent paths'. +@SkipUnless(server_has_mergeinfo) @Issue(3867) def reintegrate_creates_bogus_mergeinfo(sbox): "reintegrate creates bogus mergeinfo" @@ -2436,7 +2437,7 @@ def reintegrate_creates_bogus_mergeinfo(sbox): expected_mergeinfo_output, expected_elision_output, expected_disk, None, expected_skip, - None, + [], 1, 1) @@ -2445,6 +2446,7 @@ def reintegrate_creates_bogus_mergeinfo(sbox): # subtree mergeinfo is reintegrated into a target with subtree # mergeinfo. Deliberately written in a style that works with the 1.6 # testsuite. +@SkipUnless(server_has_mergeinfo) @Issue(3957) def no_source_subtree_mergeinfo(sbox): "source without subtree mergeinfo" @@ -2516,7 +2518,9 @@ def no_source_subtree_mergeinfo(sbox): svntest.main.run_svn(None, 'update', wc_dir) # Verify that merge results in no subtree mergeinfo - svntest.actions.run_and_verify_svn(None, [], [], 'propget', 'svn:mergeinfo', + expected_stderr = '.*W200017: Property.*not found' + svntest.actions.run_and_verify_svn([], expected_stderr, + 'propget', 'svn:mergeinfo', sbox.repo_url + '/A/B2/E') # Merge trunk to branch-2 @@ -2525,7 +2529,8 @@ def no_source_subtree_mergeinfo(sbox): svntest.main.run_svn(None, 'update', wc_dir) # Verify that there is still no subtree mergeinfo - svntest.actions.run_and_verify_svn(None, [], [], 'propget', 'svn:mergeinfo', + svntest.actions.run_and_verify_svn([], expected_stderr, + 'propget', 'svn:mergeinfo', sbox.repo_url + '/A/B2/E') # Reintegrate branch-2 to trunk, this fails in 1.6.x from 1.6.13. @@ -2558,7 +2563,7 @@ def no_source_subtree_mergeinfo(sbox): expected_output, expected_mergeinfo, expected_elision, expected_disk, None, expected_skip, - None, + [], 1, 1) #---------------------------------------------------------------------- @@ -2584,13 +2589,13 @@ def reintegrate_replaced_source(sbox): svntest.main.run_svn(None, 'up', wc_dir) svntest.main.run_svn(None, 'merge', sbox.repo_url + '/A', A_COPY_path, '-c3') - svntest.main.run_svn(None, 'ci', '-m', 'Merge r3 from A to A_COPY', wc_dir) + sbox.simple_commit(message='Merge r3 from A to A_COPY') # r8 - Merge r4 from A to A_COPY svntest.main.run_svn(None, 'up', wc_dir) svntest.main.run_svn(None, 'merge', sbox.repo_url + '/A', A_COPY_path, '-c4') - svntest.main.run_svn(None, 'ci', '-m', 'Merge r4 from A to A_COPY', wc_dir) + sbox.simple_commit(message='Merge r4 from A to A_COPY') # r9 - Merge r5 from A to A_COPY. Make an additional edit to # A_COPY/B/E/beta. @@ -2598,7 +2603,7 @@ def reintegrate_replaced_source(sbox): svntest.main.run_svn(None, 'merge', sbox.repo_url + '/A', A_COPY_path, '-c5') svntest.main.file_write(beta_COPY_path, "Branch edit mistake.\n") - svntest.main.run_svn(None, 'ci', '-m', 'Merge r5 from A to A_COPY', wc_dir) + sbox.simple_commit(message='Merge r5 from A to A_COPY') # r10 - Delete A_COPY and replace it with A_COPY@8. This removes the edit # we made above in r9 to A_COPY/B/E/beta. @@ -2606,19 +2611,17 @@ def reintegrate_replaced_source(sbox): svntest.main.run_svn(None, 'delete', A_COPY_path) svntest.main.run_svn(None, 'copy', sbox.repo_url + '/A_COPY@8', A_COPY_path) - svntest.main.run_svn(None, 'ci', '-m', 'Replace A_COPY with A_COPY@8', - wc_dir) + sbox.simple_commit(message='Replace A_COPY with A_COPY@8') # r11 - Make an edit on A_COPY/mu. svntest.main.file_write(mu_COPY_path, "Branch edit.\n") - svntest.main.run_svn(None, 'ci', '-m', 'Branch edit', - wc_dir) + sbox.simple_commit(message='Branch edit') # r12 - Do a final sync merge of A to A_COPY in preparation for # reintegration. svntest.main.run_svn(None, 'up', wc_dir) svntest.main.run_svn(None, 'merge', sbox.repo_url + '/A', A_COPY_path) - svntest.main.run_svn(None, 'ci', '-m', 'Sycn A_COPY with A', wc_dir) + sbox.simple_commit(message='Sync A_COPY with A') # Reintegrate A_COPY to A. The resulting mergeinfo should be # '/A_COPY:2-8,10-12' because of the replacement which removed /A_COPY:9 @@ -2688,6 +2691,7 @@ def reintegrate_replaced_source(sbox): #---------------------------------------------------------------------- @SkipUnless(svntest.main.is_posix_os) +@SkipUnless(server_has_mergeinfo) @Issue(4052) def reintegrate_symlink_deletion(sbox): "reintegrate symlink deletion" @@ -2724,6 +2728,7 @@ def reintegrate_symlink_deletion(sbox): run_reintegrate(A_COPY_url, A_path) #---------------------------------------------------------------------- +@SkipUnless(server_has_mergeinfo) def no_op_reintegrate(sbox): """no-op reintegrate""" @@ -2744,6 +2749,7 @@ def no_op_reintegrate(sbox): run_reintegrate(sbox.repo_url + '/A_COPY', A_path) #---------------------------------------------------------------------- +@SkipUnless(server_has_mergeinfo) def renamed_branch_reintegrate(sbox): """reintegrate a branch that has been renamed""" @@ -2782,6 +2788,38 @@ def renamed_branch_reintegrate(sbox): # ### TODO: Check the result more carefully than merely that it completed. run_reintegrate(sbox.repo_url + '/RENAMED@8', A_path) +@SkipUnless(server_has_mergeinfo) +def reintegrate_noop_branch_into_renamed_branch(sbox): + """reintegrate no-op branch into renamed branch""" + # In this test, the branch has no unique changes but contains a + # revision cherry-picked from trunk. Reintegrating such a branch + # should work, but used to error out when this test was written. + + # Make A_COPY branch in r2, and do a few more commits to A in r3-6. + sbox.build() + + wc_dir = sbox.wc_dir + A_path = sbox.ospath('A') + A_COPY_path = sbox.ospath('A_COPY') + expected_disk, expected_status = set_up_branch(sbox) + + # Cherry-pick merge from trunk to branch + youngest_rev = sbox.youngest() + svntest.main.run_svn(None, 'merge', '-c', youngest_rev, + sbox.repo_url + '/A', A_COPY_path) + sbox.simple_commit() + sbox.simple_update() + + # Rename the trunk + sbox.simple_move('A', 'A_RENAMED') + sbox.simple_commit() + sbox.simple_update() + + # Try to reintegrate the branch. This should work but used to fail with: + # svn: E160013: File not found: revision 5, path '/A_RENAMED' + run_reintegrate(sbox.repo_url + '/A_COPY', sbox.ospath('A_RENAMED')) + + ######################################################################## # Run the tests @@ -2809,6 +2847,7 @@ test_list = [ None, reintegrate_symlink_deletion, no_op_reintegrate, renamed_branch_reintegrate, + reintegrate_noop_branch_into_renamed_branch, ] if __name__ == '__main__': diff --git a/subversion/tests/cmdline/merge_tests.py b/subversion/tests/cmdline/merge_tests.py index d116118..4321e37 100755 --- a/subversion/tests/cmdline/merge_tests.py +++ b/subversion/tests/cmdline/merge_tests.py @@ -51,86 +51,16 @@ from svntest.actions import make_conflict_marker_text from svntest.actions import inject_conflict_into_expected_state from svntest.verify import RegexListOutput -def expected_merge_output(rev_ranges, additional_lines=[], foreign=False, - elides=False, two_url=False, target=None, - text_conflicts=0, prop_conflicts=0, tree_conflicts=0, - text_resolved=0, prop_resolved=0, tree_resolved=0, - skipped_paths=0): - """Generate an (inefficient) regex representing the expected merge - output and mergeinfo notifications from REV_RANGES and ADDITIONAL_LINES. - - REV_RANGES is a list of revision ranges for which mergeinfo is being - recorded. Each range is of the form [start, end] (where both START and - END are inclusive, unlike in '-rX:Y') or the form [single_rev] (which is - like '-c SINGLE_REV'). If REV_RANGES is None then only the standard - notification for a 3-way merge is expected. - - ADDITIONAL_LINES is a list of strings to match the other lines of output; - these are basically regular expressions except that backslashes will be - escaped herein. If ADDITIONAL_LINES is a single string, it is interpreted - the same as a list containing that string. - - If ELIDES is true, add to the regex an expression representing elision - notification. If TWO_URL is true, tweak the regex to expect the - appropriate mergeinfo notification for a 3-way merge. - - TARGET is the local path to the target, as it should appear in - notifications; if None, it is not checked. - - TEXT_CONFLICTS, PROP_CONFLICTS, TREE_CONFLICTS and SKIPPED_PATHS specify - the number of each kind of conflict to expect. - """ - - if rev_ranges is None: - lines = [svntest.main.merge_notify_line(None, None, False, foreign)] - else: - lines = [] - for rng in rev_ranges: - start_rev = rng[0] - if len(rng) > 1: - end_rev = rng[1] - else: - end_rev = None - lines += [svntest.main.merge_notify_line(start_rev, end_rev, - True, foreign, target)] - lines += [svntest.main.mergeinfo_notify_line(start_rev, end_rev, target)] - - if (elides): - lines += ["--- Eliding mergeinfo from .*\n"] - - if (two_url): - lines += ["--- Recording mergeinfo for merge between repository URLs .*\n"] - - # Address "The Backslash Plague" - # - # If ADDITIONAL_LINES are present there are possibly paths in it with - # multiple components and on Windows these components are separated with - # '\'. These need to be escaped properly in the regexp for the match to - # work correctly. See http://aspn.activestate.com/ASPN/docs/ActivePython - # /2.2/howto/regex/regex.html#SECTION000420000000000000000. - if isinstance(additional_lines, str): - additional_lines = [additional_lines] - if sys.platform == 'win32': - additional_lines = [line.replace("\\", "\\\\") for line in additional_lines] - lines += additional_lines - - lines += svntest.main.summary_of_conflicts( - text_conflicts, prop_conflicts, tree_conflicts, - text_resolved, prop_resolved, tree_resolved, - skipped_paths, - as_regex=True) - - return "|".join(lines) - -def check_mergeinfo_recursively(root_path, subpaths_mergeinfo): - """Check that the mergeinfo properties on and under ROOT_PATH are those in - SUBPATHS_MERGEINFO, a {path: mergeinfo-prop-val} dictionary.""" - expected = svntest.verify.UnorderedOutput( - [path + ' - ' + subpaths_mergeinfo[path] + '\n' - for path in subpaths_mergeinfo]) - svntest.actions.run_and_verify_svn(None, expected, [], - 'propget', '-R', SVN_PROP_MERGEINFO, - root_path) +from svntest.mergetrees import expected_merge_output, \ + check_mergeinfo_recursively, \ + set_up_dir_replace, \ + set_up_branch, \ + local_path, \ + svn_mkfile, \ + svn_modfile, \ + svn_copy, \ + svn_merge, \ + noninheritable_mergeinfo_test_set_up ###################################################################### # Tests @@ -224,9 +154,7 @@ def textual_merges_galore(sbox): # Initial commit. svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, - wc_dir) + expected_status) # Make the "other" working copy other_wc = sbox.add_wc_path('other') @@ -260,9 +188,7 @@ def textual_merges_galore(sbox): # Commit revision 3. svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, - wc_dir) + expected_status) # Make local mods in wc.other other_pi_path = os.path.join(other_wc, 'A', 'D', 'G', 'pi') @@ -278,7 +204,7 @@ def textual_merges_galore(sbox): # We skip A/D/G/rho in this merge; it will be tested with a separate # merge command. Temporarily put it back to revision 1, so this # merge succeeds cleanly. - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'up', '-r', '1', other_rho_path) # For A/D/G/tau, we append few different lines, to conflict with the @@ -317,7 +243,7 @@ def textual_merges_galore(sbox): inject_conflict_into_expected_state('A/D/G/tau', expected_disk, expected_status, other_tau_text, tau_text, - 3) + 1, 3) expected_skip = wc.State('', { }) @@ -333,12 +259,9 @@ def textual_merges_galore(sbox): expected_disk, expected_status, expected_skip, - None, - svntest.tree.detect_conflict_files, - (list(tau_conflict_support_files)), - None, None, False, True, - '--allow-mixed-revisions', - other_wc) + [], False, True, + '--allow-mixed-revisions', other_wc, + extra_files=list(tau_conflict_support_files)) # Now reverse merge r3 into A/D/G/rho, give it non-conflicting local # mods, then merge in the 2:3 change. ### Not bothering to do the @@ -350,7 +273,6 @@ def textual_merges_galore(sbox): # so use run_and_verify_svn. ### TODO: We can use run_and_verify_merge() here now. svntest.actions.run_and_verify_svn( - None, expected_merge_output([[-3]], ['G ' + other_rho_path + '\n', ' G ' + other_rho_path + '\n',]), @@ -406,7 +328,7 @@ def textual_merges_galore(sbox): }) inject_conflict_into_expected_state('tau', expected_disk, expected_status, - other_tau_text, tau_text, 3) + other_tau_text, tau_text, 1, 3) # Do the merge, but check svn:mergeinfo props separately since # run_and_verify_merge would attempt to proplist tau's conflict @@ -421,11 +343,10 @@ def textual_merges_galore(sbox): expected_disk, expected_status, expected_skip, - None, - svntest.tree.detect_conflict_files, list(tau_conflict_support_files)) + extra_files=list(tau_conflict_support_files)) - svntest.actions.run_and_verify_svn(None, [], [], + svntest.actions.run_and_verify_svn([], '.*W200017: Property.*not found', 'propget', SVN_PROP_MERGEINFO, os.path.join(other_wc, "A", "D", "G", "rho")) @@ -481,9 +402,7 @@ def add_with_history(sbox): }) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, - wc_dir) + expected_status) expected_output = wc.State(C_path, { 'Q' : Item(status='A '), @@ -533,8 +452,7 @@ def add_with_history(sbox): expected_disk, expected_status, expected_skip, - None, None, None, None, None, - 1) # check props + check_props=True) expected_output = svntest.wc.State(wc_dir, { 'A/C' : Item(verb='Sending'), @@ -561,9 +479,7 @@ def add_with_history(sbox): }) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, - wc_dir) + expected_status) #---------------------------------------------------------------------- # Issue 953 @@ -580,14 +496,14 @@ def simple_property_merges(sbox): beta_path = sbox.ospath('A/B/E/beta') E_path = sbox.ospath('A/B/E') - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'propset', 'foo', 'foo_val', alpha_path) # A binary, non-UTF8 property value - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'propset', 'foo', 'foo\201val', beta_path) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'propset', 'foo', 'foo_val', E_path) @@ -601,31 +517,30 @@ def simple_property_merges(sbox): expected_status.tweak('A/B/E', 'A/B/E/alpha', 'A/B/E/beta', wc_rev=2, status=' ') svntest.actions.run_and_verify_commit(wc_dir, - expected_output, expected_status, - None, wc_dir) - svntest.actions.run_and_verify_svn(None, None, [], 'up', wc_dir) + expected_output, expected_status) + svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir) # Copy B to B2 as rev 3 B_url = sbox.repo_url + '/A/B' B2_url = sbox.repo_url + '/A/B2' - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'copy', '-m', 'copy B to B2', B_url, B2_url) - svntest.actions.run_and_verify_svn(None, None, [], 'up', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir) # Modify a property and add a property for the file and directory - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'propset', 'foo', 'mod_foo', alpha_path) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'propset', 'bar', 'bar_val', alpha_path) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'propset', 'foo', 'mod\201foo', beta_path) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'propset', 'bar', 'bar\201val', beta_path) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'propset', 'foo', 'mod_foo', E_path) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'propset', 'bar', 'bar_val', E_path) # Commit change as rev 4 @@ -641,9 +556,8 @@ def simple_property_merges(sbox): 'A/B2/lambda' : Item(status=' ', wc_rev=3), }) svntest.actions.run_and_verify_commit(wc_dir, - expected_output, expected_status, - None, wc_dir) - svntest.actions.run_and_verify_svn(None, None, [], 'up', wc_dir) + expected_output, expected_status) + svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir) pristine_status = expected_status pristine_status.tweak(wc_rev=4) @@ -689,10 +603,10 @@ def simple_property_merges(sbox): expected_disk, expected_status, expected_skip, - None, None, None, None, None, 1) + check_props=True) # Revert merge - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'revert', '--recursive', wc_dir) svntest.actions.run_and_verify_status(wc_dir, pristine_status) @@ -710,13 +624,15 @@ def simple_property_merges(sbox): expected_disk, expected_status, expected_skip, - None, None, None, None, None, 1) + check_props=True) def error_message(property, old_value, new_value): return "Trying to change property '%s'\n" \ "but the property has been locally deleted.\n" \ - "<<<<<<< (local property value)\n=======\n" \ - "%s>>>>>>> (incoming property value)\n" % (property, new_value) + "<<<<<<< (local property value)\n" \ + "||||||| (incoming 'changed from' value)\n" \ + "%s=======\n" \ + "%s>>>>>>> (incoming 'changed to' value)\n" % (property, old_value, new_value) # Merge B 3:4 into B2 now causes a conflict expected_disk.add({ @@ -726,7 +642,7 @@ def simple_property_merges(sbox): 'E/alpha.prej' : Item(error_message('foo', 'foo_val', 'mod_foo')), 'E/beta.prej' - : Item(error_message('foo', 'foo?\\129val', 'mod?\\129foo')), + : Item(error_message('foo', 'foo?\\81val', 'mod?\\81foo')), }) expected_disk.tweak('E', 'E/alpha', props={'bar' : 'bar_val'}) expected_disk.tweak('E/beta', props={'bar' : 'bar\201val'}) @@ -742,25 +658,25 @@ def simple_property_merges(sbox): expected_disk, expected_status, expected_skip, - None, None, None, None, None, 1) + check_props=True) # issue 1109 : single file property merge. This test performs a merge # that should be a no-op (adding properties that are already present). - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'revert', '--recursive', wc_dir) svntest.actions.run_and_verify_status(wc_dir, pristine_status) # Copy A at rev 4 to A2 to make revision 5. A_url = sbox.repo_url + '/A' A2_url = sbox.repo_url + '/A2' - svntest.actions.run_and_verify_svn(None, - ['\n', 'Committed revision 5.\n'], [], + svntest.actions.run_and_verify_svn(['Committing transaction...\n', + 'Committed revision 5.\n'], [], 'copy', '-m', 'copy A to A2', A_url, A2_url) # Re-root the WC at A2. svntest.main.safe_rmtree(wc_dir) - svntest.actions.run_and_verify_svn(None, None, [], 'checkout', + svntest.actions.run_and_verify_svn(None, [], 'checkout', A2_url, wc_dir) # Attempt to re-merge rev 4 of the original A's alpha. Mergeinfo @@ -770,10 +686,10 @@ def simple_property_merges(sbox): alpha_path = sbox.ospath('B/E/alpha') # Cannot use run_and_verify_merge with a file target - svntest.actions.run_and_verify_svn(None, [], [], 'merge', '-r', '3:4', + svntest.actions.run_and_verify_svn([], [], 'merge', '-r', '3:4', alpha_url, alpha_path) - exit_code, output, err = svntest.actions.run_and_verify_svn(None, None, [], + exit_code, output, err = svntest.actions.run_and_verify_svn(None, [], 'pl', alpha_path) saw_foo = 0 @@ -832,10 +748,10 @@ def merge_similar_unrelated_trees(sbox): os.rename(os.path.join(base2_path, 'A', 'B', 'beta'), os.path.join(base2_path, 'A', 'B', 'zeta')) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'add', base1_path, base2_path, apply_path) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', 'rev 2', wc_dir) expected_output = wc.State(apply_path, { @@ -846,7 +762,7 @@ def merge_similar_unrelated_trees(sbox): # run_and_verify_merge doesn't support 'svn merge URL URL path' ### TODO: We can use run_and_verify_merge() here now. - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'merge', '--ignore-ancestry', base1_url, base2_url, @@ -890,12 +806,10 @@ def merge_one_file_helper(sbox, arg_flav, record_only = 0): expected_status.tweak('A/D/G/rho', wc_rev=2) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, - wc_dir) + expected_status) # Backdate rho to revision 1, so we can merge in the rev 2 changes. - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'up', '-r', '1', rho_path) # Try one merge with an explicit target; it should succeed. @@ -904,21 +818,18 @@ def merge_one_file_helper(sbox, arg_flav, record_only = 0): # right. I think something is still assuming a directory target. if arg_flav == 'r': svntest.actions.run_and_verify_svn( - None, expected_merge_output([[2]], ['U ' + rho_path + '\n', ' U ' + rho_path + '\n']), [], 'merge', '-r', '1:2', rho_url, rho_path) elif arg_flav == 'c': svntest.actions.run_and_verify_svn( - None, expected_merge_output([[2]], ['U ' + rho_path + '\n', ' U ' + rho_path + '\n']), [], 'merge', '-c', '2', rho_url, rho_path) elif arg_flav == '*': svntest.actions.run_and_verify_svn( - None, expected_merge_output([[2]], ['U ' + rho_path + '\n', ' U ' + rho_path + '\n']), @@ -934,7 +845,7 @@ def merge_one_file_helper(sbox, arg_flav, record_only = 0): raise svntest.Failure("Unexpected text in merged '" + rho_path + "'") # Restore rho to pristine revision 1, for another merge. - svntest.actions.run_and_verify_svn(None, None, [], 'revert', rho_path) + svntest.actions.run_and_verify_svn(None, [], 'revert', rho_path) expected_status.tweak('A/D/G/rho', status=' ') svntest.actions.run_and_verify_status(wc_dir, expected_status) @@ -962,7 +873,7 @@ def merge_one_file_helper(sbox, arg_flav, record_only = 0): rho_expected_status = 'MM' merge_cmd.append(rho_url) - svntest.actions.run_and_verify_svn(None, expected_output, [], *merge_cmd) + svntest.actions.run_and_verify_svn(expected_output, [], *merge_cmd) # Inspect rho, make sure it's right. rho_text = svntest.tree.get_text('rho') @@ -1041,9 +952,7 @@ def merge_with_implicit_target_helper(sbox, arg_flav): # Initial commit. svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, - wc_dir) + expected_status) # Make the "other" working copy, at r1 other_wc = sbox.add_wc_path('other') @@ -1059,15 +968,13 @@ def merge_with_implicit_target_helper(sbox, arg_flav): # merge using filename for sourcepath # Cannot use run_and_verify_merge with a file target if arg_flav == 'r': - svntest.actions.run_and_verify_svn(None, - expected_merge_output([[2]], + svntest.actions.run_and_verify_svn(expected_merge_output([[2]], ['U mu\n', ' U mu\n']), [], 'merge', '-r', '1:2', 'mu') elif arg_flav == 'c': - svntest.actions.run_and_verify_svn(None, - expected_merge_output([[2]], + svntest.actions.run_and_verify_svn(expected_merge_output([[2]], ['U mu\n', ' U mu\n']), [], @@ -1077,9 +984,8 @@ def merge_with_implicit_target_helper(sbox, arg_flav): # Without a peg revision, the default merge range of BASE:1 (which # is a no-op) will be chosen. Let's do it both ways (no-op first, # of course). - svntest.actions.run_and_verify_svn(None, None, [], 'merge', 'mu') - svntest.actions.run_and_verify_svn(None, - expected_merge_output([[2]], + svntest.actions.run_and_verify_svn(None, [], 'merge', 'mu') + svntest.actions.run_and_verify_svn(expected_merge_output([[2]], ['U mu\n', ' U mu\n']), [], @@ -1091,8 +997,7 @@ def merge_with_implicit_target_helper(sbox, arg_flav): # merge using URL for sourcepath if arg_flav == 'r': - svntest.actions.run_and_verify_svn(None, - expected_merge_output([[-2]], + svntest.actions.run_and_verify_svn(expected_merge_output([[-2]], ['G mu\n', ' U mu\n', ' G mu\n',], @@ -1100,8 +1005,7 @@ def merge_with_implicit_target_helper(sbox, arg_flav): [], 'merge', '-r', '2:1', mu_url) elif arg_flav == 'c': - svntest.actions.run_and_verify_svn(None, - expected_merge_output([[-2]], + svntest.actions.run_and_verify_svn(expected_merge_output([[-2]], ['G mu\n', ' U mu\n', ' G mu\n'], @@ -1112,8 +1016,7 @@ def merge_with_implicit_target_helper(sbox, arg_flav): # Implicit merge source URL and revision range detection is for # forward merges only (e.g. non-reverts). Undo application of # r2 to enable continuation of the test case. - svntest.actions.run_and_verify_svn(None, - expected_merge_output([[-2]], + svntest.actions.run_and_verify_svn(expected_merge_output([[-2]], ['G mu\n', ' U mu\n', ' G mu\n'], @@ -1127,6 +1030,7 @@ def merge_with_implicit_target_helper(sbox, arg_flav): (svntest.tree.get_text('mu'), orig_mu_text)) #---------------------------------------------------------------------- +@SkipUnless(server_has_mergeinfo) @Issue(785) def merge_with_implicit_target_using_r(sbox): "merging a file w/no explicit target path using -r" @@ -1180,9 +1084,7 @@ def merge_with_prev(sbox): # Initial commit. svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, - wc_dir) + expected_status) # Make some other working copies other_wc = sbox.add_wc_path('other') @@ -1197,8 +1099,7 @@ def merge_with_prev(sbox): # Try to revert the last change to mu via svn merge # Cannot use run_and_verify_merge with a file target - svntest.actions.run_and_verify_svn(None, - expected_merge_output([[-2]], + svntest.actions.run_and_verify_svn(expected_merge_output([[-2]], ['U mu\n', ' U mu\n'], elides=True), @@ -1220,11 +1121,11 @@ def merge_with_prev(sbox): os.chdir(another_wc) # ensure 'A' will be at revision 2 - svntest.actions.run_and_verify_svn(None, None, [], 'up') + svntest.actions.run_and_verify_svn(None, [], 'up') # now try a revert on a directory, and verify that it removed the zot # file we had added previously - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'merge', '-r', 'COMMITTED:PREV', 'A', 'A') @@ -1269,8 +1170,7 @@ def merge_binary_file(sbox): 'A/theta' : Item(status=' ', wc_rev=2), }) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, - wc_dir) + expected_status) # Make the "other" working copy other_wc = sbox.add_wc_path('other') @@ -1286,8 +1186,7 @@ def merge_binary_file(sbox): 'A/theta' : Item(status=' ', wc_rev=3), }) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, - wc_dir) + expected_status) # In second working copy, attempt to 'svn merge -r 2:3'. # We should *not* see a conflict during the update, but a 'U'. @@ -1321,7 +1220,7 @@ def merge_binary_file(sbox): expected_disk, expected_status, expected_skip, - None, None, None, None, None, + [], True, True, '--allow-mixed-revisions', other_wc) @@ -1340,21 +1239,21 @@ def merge_in_new_file_and_diff(sbox): trunk_url = sbox.repo_url + '/A/B/E' # Create a branch - svntest.actions.run_and_verify_svn(None, None, [], 'cp', + svntest.actions.run_and_verify_svn(None, [], 'cp', trunk_url, sbox.repo_url + '/branch', '-m', "Creating the Branch") # Update to revision 2. - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'update', wc_dir) new_file_path = sbox.ospath('A/B/E/newfile') svntest.main.file_write(new_file_path, "newfile\n") # Add the new file, and commit revision 3. - svntest.actions.run_and_verify_svn(None, None, [], "add", new_file_path) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], "add", new_file_path) + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', "Changing the trunk.", wc_dir) @@ -1396,7 +1295,7 @@ def merge_in_new_file_and_diff(sbox): expected_output = [ "Index: " + url_branch_path + "/newfile\n", "===================================================================\n", - "--- "+ url_branch_path + "/newfile (revision 0)\n", + "--- "+ url_branch_path + "/newfile (nonexistent)\n", "+++ "+ url_branch_path + "/newfile (working copy)\n", "@@ -0,0 +1 @@\n", "+newfile\n", @@ -1409,9 +1308,10 @@ def merge_in_new_file_and_diff(sbox): "Property changes on: " + url_branch_path + "\n", "___________________________________________________________________\n", "Added: " + SVN_PROP_MERGEINFO + "\n", + "## -0,0 +0,1 ##\n", " Merged /A/B/E:r2-3\n", ] - svntest.actions.run_and_verify_svn(None, expected_output, [], 'diff', + svntest.actions.run_and_verify_svn(expected_output, [], 'diff', '--show-copies-as-adds', branch_path) @@ -1453,9 +1353,7 @@ def merge_skips_obstructions(sbox): }) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, - wc_dir) + expected_status) pre_merge_status = expected_status @@ -1497,14 +1395,13 @@ def merge_skips_obstructions(sbox): expected_disk, expected_status, expected_skip, - None, None, None, None, None, - 1, 0) + [], True) # Revert the local mods, and this time make "Q" obstructed. An # unversioned file called "Q" will obstruct the adding of the # directory of the same name. - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'revert', '-R', wc_dir) os.unlink(os.path.join(C_path, "foo")) svntest.main.safe_rmtree(os.path.join(C_path, "Q")) @@ -1540,17 +1437,16 @@ def merge_skips_obstructions(sbox): expected_disk, expected_status, expected_skip, - None, None, None, None, None, - 1, 0) + [], True) # Revert the local mods, and commit the deletion of iota and A/D/G. (r3) os.unlink(os.path.join(C_path, "foo")) - svntest.actions.run_and_verify_svn(None, None, [], 'revert', '-R', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'revert', '-R', wc_dir) svntest.actions.run_and_verify_status(wc_dir, pre_merge_status) iota_path = sbox.ospath('iota') G_path = sbox.ospath('A/D/G') - svntest.actions.run_and_verify_svn(None, None, [], 'rm', iota_path, G_path) + svntest.actions.run_and_verify_svn(None, [], 'rm', iota_path, G_path) expected_output = wc.State(wc_dir, { 'A/D/G' : Item(verb='Deleting'), @@ -1560,8 +1456,7 @@ def merge_skips_obstructions(sbox): expected_status.remove('iota', 'A/D/G', 'A/D/G/pi', 'A/D/G/rho', 'A/D/G/tau') svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, wc_dir) + expected_status) # Now create unversioned iota and A/D/G, try running a merge -r2:3. # The merge process should skip over these targets, since they're @@ -1601,14 +1496,14 @@ def merge_skips_obstructions(sbox): expected_disk, expected_status.copy(wc_dir), expected_skip, - None, None, None, None, None, + [], True, False, '--allow-mixed-revisions', wc_dir) # Revert the local mods, and commit a change to A/B/lambda (r4), and then # commit the deletion of the same file. (r5) svntest.main.safe_rmtree(G_path) - svntest.actions.run_and_verify_svn(None, None, [], 'revert', '-R', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'revert', '-R', wc_dir) expected_status.tweak('', status=' ') svntest.actions.run_and_verify_status(wc_dir, expected_status) @@ -1620,10 +1515,9 @@ def merge_skips_obstructions(sbox): expected_status.tweak('A/B/lambda', wc_rev=4) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, wc_dir) + expected_status) - svntest.actions.run_and_verify_svn(None, None, [], 'rm', lambda_path) + svntest.actions.run_and_verify_svn(None, [], 'rm', lambda_path) expected_output = wc.State(wc_dir, { 'A/B/lambda' : Item(verb='Deleting'), @@ -1631,8 +1525,7 @@ def merge_skips_obstructions(sbox): expected_status.remove('A/B/lambda') svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, wc_dir) + expected_status) # lambda is gone, so create an unversioned lambda in its place. # Then attempt to merge -r3:4, which is a change to lambda. The merge @@ -1666,7 +1559,7 @@ def merge_skips_obstructions(sbox): expected_disk, expected_status_short, expected_skip, - None, None, None, None, None, + [], True, False, '--allow-mixed-revisions', wc_dir) @@ -1674,10 +1567,10 @@ def merge_skips_obstructions(sbox): # working file. Then re-run the -r3:4 merge, and see how svn deals # with a file being under version control, but missing. - svntest.actions.run_and_verify_svn(None, None, [], 'add', lambda_path) + svntest.actions.run_and_verify_svn(None, [], 'add', lambda_path) # Mergeinfo prop changed so update to avoid out of date error. - svntest.actions.run_and_verify_svn(None, None, [], 'up', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir) expected_output = wc.State(wc_dir, { '' : Item(verb='Sending'), @@ -1692,8 +1585,7 @@ def merge_skips_obstructions(sbox): expected_status.tweak('', status=' ', wc_rev=6) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, wc_dir) + expected_status) os.unlink(lambda_path) expected_output = wc.State(wc_dir, { }) @@ -1716,7 +1608,7 @@ def merge_skips_obstructions(sbox): expected_disk, expected_status.copy(wc_dir), expected_skip, - None, None, None, None, None, + [], 1, 0, '--ignore-ancestry', '--allow-mixed-revisions', wc_dir) @@ -1738,9 +1630,9 @@ def merge_into_missing(sbox): Q_path = os.path.join(F_path, 'Q') foo_path = os.path.join(F_path, 'foo') - svntest.actions.run_and_verify_svn(None, None, [], 'mkdir', Q_path) + svntest.actions.run_and_verify_svn(None, [], 'mkdir', Q_path) svntest.main.file_append(foo_path, "foo") - svntest.actions.run_and_verify_svn(None, None, [], 'add', foo_path) + svntest.actions.run_and_verify_svn(None, [], 'add', foo_path) expected_output = wc.State(wc_dir, { 'A/B/F/Q' : Item(verb='Adding'), @@ -1753,17 +1645,16 @@ def merge_into_missing(sbox): }) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, wc_dir) + expected_status) R_path = os.path.join(Q_path, 'R') bar_path = os.path.join(R_path, 'bar') baz_path = os.path.join(Q_path, 'baz') - svntest.actions.run_and_verify_svn(None, None, [], 'mkdir', R_path) + svntest.actions.run_and_verify_svn(None, [], 'mkdir', R_path) svntest.main.file_append(bar_path, "bar") - svntest.actions.run_and_verify_svn(None, None, [], 'add', bar_path) + svntest.actions.run_and_verify_svn(None, [], 'add', bar_path) svntest.main.file_append(baz_path, "baz") - svntest.actions.run_and_verify_svn(None, None, [], 'add', baz_path) + svntest.actions.run_and_verify_svn(None, [], 'add', baz_path) expected_output = wc.State(wc_dir, { 'A/B/F/Q/R' : Item(verb='Adding'), @@ -1777,8 +1668,7 @@ def merge_into_missing(sbox): }) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, wc_dir) + expected_status) os.unlink(foo_path) svntest.main.safe_rmtree(Q_path) @@ -1817,8 +1707,8 @@ def merge_into_missing(sbox): expected_disk, expected_status, expected_skip, - None, None, None, None, None, - 0, 0, '--dry-run', + [], False, False, + '--dry-run', '--ignore-ancestry', '--allow-mixed-revisions', F_path) @@ -1842,8 +1732,7 @@ def merge_into_missing(sbox): expected_disk, expected_status, expected_skip, - None, None, None, None, None, - 0, 0, + [], False, False, '--ignore-ancestry', '--allow-mixed-revisions', F_path) @@ -1884,8 +1773,8 @@ def dry_run_adds_file_with_prop(sbox): # Commit a new file which has a property. zig_path = sbox.ospath('A/B/E/zig') svntest.main.file_append(zig_path, "zig contents") - svntest.actions.run_and_verify_svn(None, None, [], 'add', zig_path) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'add', zig_path) + svntest.actions.run_and_verify_svn(None, [], 'propset', 'foo', 'foo_val', zig_path) @@ -1898,8 +1787,7 @@ def dry_run_adds_file_with_prop(sbox): }) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, wc_dir) + expected_status) # Do a regular merge of that change into a different dir. F_path = sbox.ospath('A/B/F') @@ -1927,9 +1815,7 @@ def dry_run_adds_file_with_prop(sbox): expected_disk, expected_status, expected_skip, - None, None, None, None, None, - 1, # please check props - 1) # and do a dry-run also) + [], True, True) #---------------------------------------------------------------------- # Regression test for issue #1673 @@ -1966,9 +1852,7 @@ def merge_binary_with_common_ancestry(sbox): }) svntest.actions.run_and_verify_commit(wc_dir, - expected_output, expected_status, - None, - wc_dir) + expected_output, expected_status) # Create the first branch J_path = sbox.ospath('J') @@ -1985,9 +1869,7 @@ def merge_binary_with_common_ancestry(sbox): }) svntest.actions.run_and_verify_commit(wc_dir, - expected_output, expected_status, - None, - wc_dir) + expected_output, expected_status) # Create the path where the files will be merged K_path = sbox.ospath('K') @@ -2003,9 +1885,7 @@ def merge_binary_with_common_ancestry(sbox): }) svntest.actions.run_and_verify_commit(wc_dir, - expected_output, expected_status, - None, - wc_dir) + expected_output, expected_status) # Copy 'I/theta' to 'K/'. This file will be merged later. theta_K_path = os.path.join(K_path, 'theta') @@ -2021,9 +1901,7 @@ def merge_binary_with_common_ancestry(sbox): }) svntest.actions.run_and_verify_commit(wc_dir, - expected_output, expected_status, - None, - wc_dir) + expected_output, expected_status) # Modify the original ancestry 'I/theta' svntest.main.file_append(theta_I_path, "some extra junk") @@ -2036,9 +1914,7 @@ def merge_binary_with_common_ancestry(sbox): expected_status.tweak('I/theta', wc_rev=6) svntest.actions.run_and_verify_commit(wc_dir, - expected_output, expected_status, - None, - wc_dir) + expected_output, expected_status) # Create the second branch from the modified ancestry L_path = sbox.ospath('L') @@ -2056,9 +1932,7 @@ def merge_binary_with_common_ancestry(sbox): }) svntest.actions.run_and_verify_commit(wc_dir, - expected_output, expected_status, - None, - wc_dir) + expected_output, expected_status) # Now merge first ('J/') and second ('L/') branches into 'K/' saved_cwd = os.getcwd() @@ -2066,8 +1940,7 @@ def merge_binary_with_common_ancestry(sbox): os.chdir(K_path) theta_J_url = sbox.repo_url + '/J/theta' theta_L_url = sbox.repo_url + '/L/theta' - svntest.actions.run_and_verify_svn(None, - expected_merge_output(None, + svntest.actions.run_and_verify_svn(expected_merge_output(None, ['U theta\n', ' U theta\n', ' G theta\n',], @@ -2115,11 +1988,11 @@ def merge_funny_chars_on_path(sbox): target_path = os.path.join(wc_dir, 'A', 'B', 'E', '%s' % target[1], target[2]) svntest.main.file_append(target_path, "%s/%s" % (target[1], target[2])) - svntest.actions.run_and_verify_svn(None, None, [], 'add', target_dir) + svntest.actions.run_and_verify_svn(None, [], 'add', target_dir) elif target[0] == 'f': target_path = os.path.join(wc_dir, 'A', 'B', 'E', '%s' % target[1]) svntest.main.file_append(target_path, "%s" % target[1]) - svntest.actions.run_and_verify_svn(None, None, [], 'add', target_path) + svntest.actions.run_and_verify_svn(None, [], 'add', target_path) else: raise svntest.Failure @@ -2127,12 +2000,12 @@ def merge_funny_chars_on_path(sbox): for target in add_by_mkdir: if target[0] == 'd': target_dir = os.path.join(wc_dir, 'A', 'B', 'E', target[1]) - svntest.actions.run_and_verify_svn(None, None, [], 'mkdir', target_dir) + svntest.actions.run_and_verify_svn(None, [], 'mkdir', target_dir) if target[2]: target_path = os.path.join(wc_dir, 'A', 'B', 'E', '%s' % target[1], target[2]) svntest.main.file_append(target_path, "%s/%s" % (target[1], target[2])) - svntest.actions.run_and_verify_svn(None, None, [], 'add', target_path) + svntest.actions.run_and_verify_svn(None, [], 'add', target_path) expected_output_dic = {} expected_status_dic = {} @@ -2156,8 +2029,7 @@ def merge_funny_chars_on_path(sbox): svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, wc_dir) + expected_status) # Do a regular merge of that change into a different dir. F_path = sbox.ospath('A/B/F') @@ -2198,9 +2070,9 @@ def merge_funny_chars_on_path(sbox): expected_disk, expected_status, expected_skip, - None, None, None, None, None, - 0, # don't check props - 1) # but do a dry-run + [], + False, # don't check props + True) # but do a dry-run expected_output_dic = {} @@ -2216,8 +2088,7 @@ def merge_funny_chars_on_path(sbox): svntest.actions.run_and_verify_commit(F_path, expected_output, - None, - None, wc_dir) + None) #----------------------------------------------------------------------- # Regression test for issue #2064 @@ -2236,29 +2107,29 @@ def merge_keyword_expansions(sbox): os.mkdir(tpath) svntest.main.run_svn(None, "add", tpath) # Commit r2. - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], "ci", "-m", "r2", wcpath) # Copy t to b. svntest.main.run_svn(None, "cp", tpath, bpath) # Commit r3 - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], "ci", "-m", "r3", wcpath) # Add a file to t. svntest.main.file_append(t_fpath, "$Revision$") - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'add', t_fpath) # Ask for keyword expansion in the file. - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'propset', 'svn:keywords', 'Revision', t_fpath) # Commit r4 - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', 'r4', wcpath) # Update the wc before the merge. - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'update', wcpath) expected_status = svntest.actions.get_virginal_state(wcpath, 4) @@ -2307,7 +2178,7 @@ def merge_prop_change_to_deleted_target(sbox): # Add a property to alpha. alpha_path = sbox.ospath('A/B/E/alpha') - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'propset', 'foo', 'foo_val', alpha_path) @@ -2318,13 +2189,12 @@ def merge_prop_change_to_deleted_target(sbox): expected_status = svntest.actions.get_virginal_state(wc_dir, 1) expected_status.tweak('A/B/E/alpha', wc_rev=2, status=' ') svntest.actions.run_and_verify_commit(wc_dir, - expected_output, expected_status, - None, wc_dir) - svntest.actions.run_and_verify_svn(None, None, [], + expected_output, expected_status) + svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir) # Remove alpha entirely. - svntest.actions.run_and_verify_svn(None, None, [], 'rm', alpha_path) + svntest.actions.run_and_verify_svn(None, [], 'rm', alpha_path) expected_output = wc.State(wc_dir, { 'A/B/E/alpha' : Item(verb='Deleting'), }) @@ -2333,7 +2203,7 @@ def merge_prop_change_to_deleted_target(sbox): svntest.actions.run_and_verify_commit(wc_dir, expected_output, expected_status, - None, alpha_path) + [], alpha_path) # Try merging the original propset, which applies to a target that # no longer exists. The bug would only reproduce when run from @@ -2341,123 +2211,10 @@ def merge_prop_change_to_deleted_target(sbox): # --ignore-ancestry here because our merge logic will otherwise # prevent a merge of changes we already have. os.chdir(wc_dir) - svntest.actions.run_and_verify_svn("Merge errored unexpectedly", - svntest.verify.AnyOutput, [], 'merge', + svntest.actions.run_and_verify_svn(svntest.verify.AnyOutput, [], 'merge', '-r1:2', '--ignore-ancestry', '.') #---------------------------------------------------------------------- -def set_up_dir_replace(sbox): - """Set up the working copy for directory replace tests, creating - directory 'A/B/F/foo' with files 'new file' and 'new file2' within - it (r2), and merging 'foo' onto 'C' (r3), then deleting 'A/B/F/foo' - (r4).""" - - sbox.build() - wc_dir = sbox.wc_dir - - C_path = sbox.ospath('A/C') - F_path = sbox.ospath('A/B/F') - F_url = sbox.repo_url + '/A/B/F' - - foo_path = os.path.join(F_path, 'foo') - new_file = os.path.join(foo_path, "new file") - new_file2 = os.path.join(foo_path, "new file 2") - - # Make directory foo in F, and add some files within it. - svntest.actions.run_and_verify_svn(None, None, [], 'mkdir', foo_path) - svntest.main.file_append(new_file, "Initial text in new file.\n") - svntest.main.file_append(new_file2, "Initial text in new file 2.\n") - svntest.main.run_svn(None, "add", new_file) - svntest.main.run_svn(None, "add", new_file2) - - # Commit all the new content, creating r2. - expected_output = wc.State(wc_dir, { - 'A/B/F/foo' : Item(verb='Adding'), - 'A/B/F/foo/new file' : Item(verb='Adding'), - 'A/B/F/foo/new file 2' : Item(verb='Adding'), - }) - expected_status = svntest.actions.get_virginal_state(wc_dir, 1) - expected_status.add({ - 'A/B/F/foo' : Item(status=' ', wc_rev=2), - 'A/B/F/foo/new file' : Item(status=' ', wc_rev=2), - 'A/B/F/foo/new file 2' : Item(status=' ', wc_rev=2), - }) - svntest.actions.run_and_verify_commit(wc_dir, - expected_output, - expected_status, - None, wc_dir) - - # Merge foo onto C - expected_output = wc.State(C_path, { - 'foo' : Item(status='A '), - 'foo/new file' : Item(status='A '), - 'foo/new file 2' : Item(status='A '), - }) - expected_mergeinfo_output = wc.State(C_path, { - '' : Item(status=' U'), - }) - expected_elision_output = wc.State(C_path, { - }) - expected_disk = wc.State('', { - '' : Item(props={SVN_PROP_MERGEINFO : '/A/B/F:2'}), - 'foo' : Item(), - 'foo/new file' : Item("Initial text in new file.\n"), - 'foo/new file 2' : Item("Initial text in new file 2.\n"), - }) - expected_status = wc.State(C_path, { - '' : Item(status=' M', wc_rev=1), - 'foo' : Item(status='A ', wc_rev='-', copied='+'), - 'foo/new file' : Item(status=' ', wc_rev='-', copied='+'), - 'foo/new file 2' : Item(status=' ', wc_rev='-', copied='+'), - }) - expected_skip = wc.State(C_path, { }) - svntest.actions.run_and_verify_merge(C_path, '1', '2', F_url, None, - expected_output, - expected_mergeinfo_output, - expected_elision_output, - expected_disk, - expected_status, - expected_skip, - None, None, None, None, None, 1) - # Commit merge of foo onto C, creating r3. - expected_output = svntest.wc.State(wc_dir, { - 'A/C' : Item(verb='Sending'), - 'A/C/foo' : Item(verb='Adding'), - }) - expected_status = svntest.actions.get_virginal_state(wc_dir, 1) - expected_status.add({ - 'A/B/F/foo' : Item(status=' ', wc_rev=2), - 'A/C' : Item(status=' ', wc_rev=3), - 'A/B/F/foo/new file' : Item(status=' ', wc_rev=2), - 'A/B/F/foo/new file 2' : Item(status=' ', wc_rev=2), - 'A/C/foo' : Item(status=' ', wc_rev=3), - 'A/C/foo/new file' : Item(status=' ', wc_rev=3), - 'A/C/foo/new file 2' : Item(status=' ', wc_rev=3), - - }) - svntest.actions.run_and_verify_commit(wc_dir, - expected_output, - expected_status, - None, wc_dir) - - # Delete foo on F, creating r4. - svntest.actions.run_and_verify_svn(None, None, [], 'rm', foo_path) - expected_output = svntest.wc.State(wc_dir, { - 'A/B/F/foo' : Item(verb='Deleting'), - }) - expected_status = svntest.actions.get_virginal_state(wc_dir, 1) - expected_status.add({ - 'A/C' : Item(status=' ', wc_rev=3), - 'A/C/foo' : Item(status=' ', wc_rev=3), - 'A/C/foo/new file' : Item(status=' ', wc_rev=3), - 'A/C/foo/new file 2' : Item(status=' ', wc_rev=3), - }) - svntest.actions.run_and_verify_commit(wc_dir, - expected_output, - expected_status, - None, wc_dir) - -#---------------------------------------------------------------------- # A merge that replaces a directory # Tests for Issue #2144 and Issue #2607 @SkipUnless(server_has_mergeinfo) @@ -2480,8 +2237,8 @@ def merge_dir_replace(sbox): new_file3 = os.path.join(bar_path, "new file 3") # Make a couple of directories, and add some files within them. - svntest.actions.run_and_verify_svn(None, None, [], 'mkdir', foo_path) - svntest.actions.run_and_verify_svn(None, None, [], 'mkdir', bar_path) + svntest.actions.run_and_verify_svn(None, [], 'mkdir', foo_path) + svntest.actions.run_and_verify_svn(None, [], 'mkdir', bar_path) svntest.main.file_append(new_file3, "Initial text in new file 3.\n") svntest.main.run_svn(None, "add", new_file3) svntest.main.file_append(foo_file, "Initial text in file foo.\n") @@ -2507,8 +2264,7 @@ def merge_dir_replace(sbox): }) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, wc_dir) + expected_status) # Merge replacement of foo onto C expected_output = wc.State(C_path, { 'foo' : Item(status='R '), @@ -2545,10 +2301,9 @@ def merge_dir_replace(sbox): expected_disk, expected_status, expected_skip, - None, None, None, None, None, - 1, - 0) # don't do a dry-run - # the output differs + [], True, + False) # don't do a dry-run + # the output differs # Commit merge of foo onto C expected_output = svntest.wc.State(wc_dir, { @@ -2569,8 +2324,7 @@ def merge_dir_replace(sbox): }) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, wc_dir) + expected_status) #---------------------------------------------------------------------- # A merge that replaces a directory and one of its children @@ -2591,8 +2345,8 @@ def merge_dir_and_file_replace(sbox): # Recreate foo and 'new file 2' in F and add a new folder with a file bar_path = os.path.join(foo_path, 'bar') new_file3 = os.path.join(bar_path, "new file 3") - svntest.actions.run_and_verify_svn(None, None, [], 'mkdir', foo_path) - svntest.actions.run_and_verify_svn(None, None, [], 'mkdir', bar_path) + svntest.actions.run_and_verify_svn(None, [], 'mkdir', foo_path) + svntest.actions.run_and_verify_svn(None, [], 'mkdir', bar_path) svntest.main.file_append(new_file3, "Initial text in new file 3.\n") svntest.main.run_svn(None, "add", new_file3) svntest.main.file_append(new_file2, "New text in new file 2.\n") @@ -2617,8 +2371,7 @@ def merge_dir_and_file_replace(sbox): expected_status.tweak('A/C', wc_rev=3) # From mergeinfo svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, wc_dir) + expected_status) # Merge replacement of foo onto C expected_output = wc.State(C_path, { 'foo' : Item(status='R '), @@ -2654,10 +2407,10 @@ def merge_dir_and_file_replace(sbox): expected_disk, expected_status, expected_skip, - None, None, None, None, None, - 1, - 0) # don't do a dry-run - # the output differs + [], + True, + False) # don't do a dry-run + # the output differs # Commit merge of foo onto C expected_output = svntest.wc.State(wc_dir, { @@ -2679,16 +2432,15 @@ def merge_dir_and_file_replace(sbox): }) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, wc_dir) + expected_status) # Confirm the files are present in the repository. new_file_2_url = sbox.repo_url + '/A/C/foo/new file 2' - svntest.actions.run_and_verify_svn(None, ["New text in new file 2.\n"], + svntest.actions.run_and_verify_svn(["New text in new file 2.\n"], [], 'cat', new_file_2_url) new_file_3_url = sbox.repo_url + '/A/C/foo/bar/new file 3' - svntest.actions.run_and_verify_svn(None, ["Initial text in new file 3.\n"], + svntest.actions.run_and_verify_svn(["Initial text in new file 3.\n"], [], 'cat', new_file_3_url) @@ -2704,12 +2456,12 @@ def merge_file_with_space_in_its_name(sbox): # Make r2. svntest.main.file_append(new_file, "Initial text in the file.\n") svntest.main.run_svn(None, "add", new_file) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], "ci", "-m", "r2", wc_dir) # Make r3. svntest.main.file_append(new_file, "Next line of text in the file.\n") - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], "ci", "-m", "r3", wc_dir) # Try to reverse merge. @@ -2719,7 +2471,7 @@ def merge_file_with_space_in_its_name(sbox): # file (i.e., the URL basename) lives. os.chdir(wc_dir) target_url = sbox.repo_url + '/new%20file' - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], "merge", "-r3:2", target_url) #---------------------------------------------------------------------- @@ -2740,7 +2492,7 @@ def merge_dir_branches(sbox): # Create foo in F foo_path = os.path.join(F_path, 'foo') - svntest.actions.run_and_verify_svn(None, None, [], 'mkdir', foo_path) + svntest.actions.run_and_verify_svn(None, [], 'mkdir', foo_path) expected_output = wc.State(wc_dir, { 'A/B/F/foo' : Item(verb='Adding'), @@ -2751,8 +2503,7 @@ def merge_dir_branches(sbox): }) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, wc_dir) + expected_status) # Create an unversioned foo foo_path = sbox.ospath('foo') @@ -2763,7 +2514,7 @@ def merge_dir_branches(sbox): # syntax of the merge command. ### TODO: We can use run_and_verify_merge() here now. expected_output = expected_merge_output(None, "A " + foo_path + "\n") - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'merge', '--allow-mixed-revisions', C_url, F_url, wc_dir) @@ -2793,10 +2544,10 @@ def safe_property_merge(sbox): beta_path = sbox.ospath('A/B/E/beta') E_path = sbox.ospath('A/B/E') - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'propset', 'foo', 'foo_val', alpha_path, beta_path) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'propset', 'foo', 'foo_val', E_path) @@ -2809,27 +2560,26 @@ def safe_property_merge(sbox): expected_status.tweak('A/B/E', 'A/B/E/alpha', 'A/B/E/beta', wc_rev=2, status=' ') svntest.actions.run_and_verify_commit(wc_dir, - expected_output, expected_status, - None, wc_dir) - svntest.actions.run_and_verify_svn(None, None, [], 'up', wc_dir) + expected_output, expected_status) + svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir) # Copy B to B2 as rev 3 (making a branch) B_url = sbox.repo_url + '/A/B' B2_url = sbox.repo_url + '/A/B2' - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'copy', '-m', 'copy B to B2', B_url, B2_url) - svntest.actions.run_and_verify_svn(None, None, [], 'up', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir) # Change the properties underneath B again, and commit as r4 - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'propset', 'foo', 'foo_val2', alpha_path) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'propdel', 'foo', beta_path) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'propset', 'foo', 'foo_val2', E_path) expected_output = svntest.wc.State(wc_dir, { @@ -2838,19 +2588,18 @@ def safe_property_merge(sbox): 'A/B/E/beta' : Item(verb='Sending'), }) svntest.actions.run_and_verify_commit(wc_dir, - expected_output, None, - None, wc_dir) - svntest.actions.run_and_verify_svn(None, None, [], 'up', wc_dir) + expected_output, None) + svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir) # Make local propchanges to E, alpha and beta in the branch. alpha_path2 = sbox.ospath('A/B2/E/alpha') beta_path2 = sbox.ospath('A/B2/E/beta') E_path2 = sbox.ospath('A/B2/E') - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'propset', 'foo', 'branchval', alpha_path2, beta_path2) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'propset', 'foo', 'branchval', E_path2) @@ -2901,12 +2650,8 @@ def safe_property_merge(sbox): expected_disk, expected_status, expected_skip, - None, # expected error string - svntest.tree.detect_conflict_files, - extra_files, - None, None, # no B singleton handler - 1, # check props - 0) # dry_run + [], True, False, + extra_files=extra_files) #---------------------------------------------------------------------- # Test for issue 2035, whereby 'svn merge' wouldn't always mark @@ -2923,10 +2668,10 @@ def property_merge_from_branch(sbox): alpha_path = sbox.ospath('A/B/E/alpha') E_path = sbox.ospath('A/B/E') - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'propset', 'foo', 'foo_val', alpha_path) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'propset', 'foo', 'foo_val', E_path) @@ -2937,24 +2682,23 @@ def property_merge_from_branch(sbox): expected_status = svntest.actions.get_virginal_state(wc_dir, 1) expected_status.tweak('A/B/E', 'A/B/E/alpha', wc_rev=2, status=' ') svntest.actions.run_and_verify_commit(wc_dir, - expected_output, expected_status, - None, wc_dir) - svntest.actions.run_and_verify_svn(None, None, [], 'up', wc_dir) + expected_output, expected_status) + svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir) # Copy B to B2 as rev 3 (making a branch) B_url = sbox.repo_url + '/A/B' B2_url = sbox.repo_url + '/A/B2' - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'copy', '-m', 'copy B to B2', B_url, B2_url) - svntest.actions.run_and_verify_svn(None, None, [], 'up', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir) # Change the properties underneath B again, and commit as r4 - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'propset', 'foo', 'foo_val2', alpha_path) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'propset', 'foo', 'foo_val2', E_path) expected_output = svntest.wc.State(wc_dir, { @@ -2962,18 +2706,17 @@ def property_merge_from_branch(sbox): 'A/B/E/alpha' : Item(verb='Sending'), }) svntest.actions.run_and_verify_commit(wc_dir, - expected_output, None, - None, wc_dir) - svntest.actions.run_and_verify_svn(None, None, [], 'up', wc_dir) + expected_output, None) + svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir) # Make different propchanges changes to the B2 branch and commit as r5. alpha_path2 = sbox.ospath('A/B2/E/alpha') E_path2 = sbox.ospath('A/B2/E') - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'propset', 'foo', 'branchval', alpha_path2) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'propset', 'foo', 'branchval', E_path2) expected_output = svntest.wc.State(wc_dir, { @@ -2981,9 +2724,8 @@ def property_merge_from_branch(sbox): 'A/B2/E/alpha' : Item(verb='Sending'), }) svntest.actions.run_and_verify_commit(wc_dir, - expected_output, None, - None, wc_dir) - svntest.actions.run_and_verify_svn(None, None, [], 'up', wc_dir) + expected_output, None) + svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir) # Now merge the recent B change to the branch. There are no local # mods anywhere, but we should still get property conflicts anyway! @@ -3031,12 +2773,8 @@ def property_merge_from_branch(sbox): expected_disk, expected_status, expected_skip, - None, # expected error string - svntest.tree.detect_conflict_files, - extra_files, - None, None, # no B singleton handler - 1, # check props - 0) # dry_run + [], True, False, + extra_files=extra_files) #---------------------------------------------------------------------- # Another test for issue 2035, whereby sometimes 'svn merge' marked @@ -3050,7 +2788,7 @@ def property_merge_undo_redo(sbox): # Add a property to a file, commit as r2. alpha_path = sbox.ospath('A/B/E/alpha') - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'propset', 'foo', 'foo_val', alpha_path) @@ -3061,9 +2799,8 @@ def property_merge_undo_redo(sbox): expected_status = svntest.actions.get_virginal_state(wc_dir, 1) expected_status.tweak('A/B/E/alpha', wc_rev=2, status=' ') svntest.actions.run_and_verify_commit(wc_dir, - expected_output, expected_status, - None, wc_dir) - svntest.actions.run_and_verify_svn(None, None, [], 'up', wc_dir) + expected_output, expected_status) + svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir) # Use 'svn merge' to undo the commit. ('svn merge -r2:1') # Result should be a single local-prop-mod. @@ -3089,11 +2826,7 @@ def property_merge_undo_redo(sbox): expected_disk, expected_status, expected_skip, - None, # expected error string - None, None, # no A singleton handler - None, None, # no B singleton handler - 1, # check props - 0) # dry_run + [], True, False) # Change mind, re-apply the change ('svn merge -r1:2'). # This should merge cleanly into existing prop-mod, status shows nothing. @@ -3123,11 +2856,7 @@ def property_merge_undo_redo(sbox): expected_disk, expected_status, expected_skip, - None, # expected error string - None, None, # no A singleton handler - None, None, # no B singleton handler - 1, # check props - 0, # dry_run + [], True, False, '--ignore-ancestry', wc_dir) @@ -3147,25 +2876,24 @@ def cherry_pick_text_conflict(sbox): branch_mu_path = sbox.ospath('copy-of-A/mu') # Create a branch of A. - svntest.actions.run_and_verify_svn(None, None, [], 'cp', + svntest.actions.run_and_verify_svn(None, [], 'cp', A_url, branch_A_url, '-m', "Creating copy-of-A") # Update to get the branch. - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'update', wc_dir) # Change mu's text on the branch, producing r3 through r6. for rev in range(3, 7): svntest.main.file_append(branch_mu_path, ("r%d\n" % rev) * 3) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', 'Add lines to mu in r%d.' % rev, wc_dir) # Mark r5 as merged into trunk, to create disparate revision ranges # which need to be merged. svntest.actions.run_and_verify_svn( - None, expected_merge_output([[5]], [' U ' + A_path + '\n']), [], 'merge', '-c5', '--record-only', @@ -3176,12 +2904,15 @@ def cherry_pick_text_conflict(sbox): expected_output = wc.State(A_path, { 'mu' : Item(status='C '), }) - expected_mergeinfo_output = wc.State(A_path, {}) + expected_mergeinfo_output = wc.State(A_path, { + '' : Item(status=' G') + }) expected_elision_output = wc.State(A_path, { }) expected_disk = wc.State('', { 'mu' : Item("This is the file 'mu'.\n" - + make_conflict_marker_text("r3\n" * 3, "r4\n" * 3, 4)), + + make_conflict_marker_text('', "r3\n" * 3 + "r4\n" * 3, 3, 4, + old_text='r3\n' * 3)), 'B' : Item(), 'B/lambda' : Item("This is the file 'lambda'.\n"), 'B/E' : Item(), @@ -3223,7 +2954,7 @@ def cherry_pick_text_conflict(sbox): }) expected_status.tweak(wc_rev=2) expected_skip = wc.State('', { }) - expected_error = "conflicts were produced while merging r3:4" + expected_error = ".*conflicts were produced while merging r3:4.*" svntest.actions.run_and_verify_merge(A_path, '3', '6', branch_A_url, None, expected_output, expected_mergeinfo_output, @@ -3232,13 +2963,10 @@ def cherry_pick_text_conflict(sbox): expected_status, expected_skip, expected_error, - svntest.tree.detect_conflict_files, + extra_files= ["mu\.working", "mu\.merge-right\.r4", - "mu\.merge-left\.r3"], - None, None, # no singleton handler - 0, # don't check props - 0) # not a dry_run + "mu\.merge-left\.r3"]) #---------------------------------------------------------------------- # Test for issue 2135 @@ -3251,7 +2979,7 @@ def merge_file_replace(sbox): # File scheduled for deletion rho_path = sbox.ospath('A/D/G/rho') - svntest.actions.run_and_verify_svn(None, None, [], 'rm', rho_path) + svntest.actions.run_and_verify_svn(None, [], 'rm', rho_path) expected_status = svntest.actions.get_virginal_state(wc_dir, 1) expected_status.tweak('A/D/G/rho', status='D ') @@ -3266,11 +2994,10 @@ def merge_file_replace(sbox): # Commit rev 2 svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, wc_dir) + expected_status) # Create and add a new file. svntest.main.file_write(rho_path, "new rho\n") - svntest.actions.run_and_verify_svn(None, None, [], 'add', rho_path) + svntest.actions.run_and_verify_svn(None, [], 'add', rho_path) # Commit revsion 3 expected_status.add({ @@ -3283,8 +3010,7 @@ def merge_file_replace(sbox): svntest.actions.run_and_verify_commit(wc_dir, expected_output, - None, - None, wc_dir) + None) # Update working copy expected_output = svntest.wc.State(wc_dir, {}) @@ -3327,9 +3053,7 @@ def merge_file_replace(sbox): expected_status.tweak('A/D/G/rho', status=' ', copied=None, wc_rev='4') svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, - wc_dir) + expected_status) #---------------------------------------------------------------------- # Test for issue 2522 @@ -3343,7 +3067,7 @@ def merge_file_replace_to_mixed_rev_wc(sbox): # File scheduled for deletion rho_path = sbox.ospath('A/D/G/rho') - svntest.actions.run_and_verify_svn(None, None, [], 'rm', rho_path) + svntest.actions.run_and_verify_svn(None, [], 'rm', rho_path) expected_status = svntest.actions.get_virginal_state(wc_dir, 1) expected_status.tweak('A/D/G/rho', status='D ') @@ -3358,8 +3082,7 @@ def merge_file_replace_to_mixed_rev_wc(sbox): # Commit rev 2 svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, wc_dir) + expected_status) # Update working copy expected_disk = svntest.main.greek_state.copy() @@ -3374,7 +3097,7 @@ def merge_file_replace_to_mixed_rev_wc(sbox): # Create and add a new file. svntest.main.file_write(rho_path, "new rho\n") - svntest.actions.run_and_verify_svn(None, None, [], 'add', rho_path) + svntest.actions.run_and_verify_svn(None, [], 'add', rho_path) # Commit revsion 3 expected_status.add({ @@ -3391,8 +3114,7 @@ def merge_file_replace_to_mixed_rev_wc(sbox): svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, wc_dir) + expected_status) # merge changes from r3:1... # @@ -3404,9 +3126,9 @@ def merge_file_replace_to_mixed_rev_wc(sbox): # Normally we'd simply update the whole working copy, but since that would # defeat the purpose of this test (see the comment below), instead we'll # update only "." using --depth empty. This preserves the intent of the - # orginal mixed-rev test for this issue, but allows the merge tracking + # original mixed-rev test for this issue, but allows the merge tracking # logic to consider r3 as valid for reverse merging. - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'up', '--depth', 'empty', wc_dir) expected_status.tweak('', wc_rev=3) expected_output = svntest.wc.State(wc_dir, { @@ -3429,7 +3151,7 @@ def merge_file_replace_to_mixed_rev_wc(sbox): expected_disk, expected_status, expected_skip, - None, None, None, None, None, + [], True, False, '--allow-mixed-revisions', wc_dir) @@ -3444,9 +3166,7 @@ def merge_file_replace_to_mixed_rev_wc(sbox): expected_status.tweak('A/D/G/rho', status=' ', copied=None, wc_rev='4') svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, - wc_dir) + expected_status) #---------------------------------------------------------------------- # use -x -w option for ignoring whitespace during merge @@ -3469,8 +3189,7 @@ def merge_ignore_whitespace(sbox): expected_output = svntest.wc.State(wc_dir, { 'iota' : Item(verb='Sending'), }) - svntest.actions.run_and_verify_commit(wc_dir, expected_output, - None, None, wc_dir) + svntest.actions.run_and_verify_commit(wc_dir, expected_output, None) # change the file, mostly whitespace changes + an extra line svntest.main.file_write(file_path, "A a\nBb \n Cc\nNew line in iota\n") @@ -3479,12 +3198,10 @@ def merge_ignore_whitespace(sbox): expected_status.tweak(file_name, wc_rev=3) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, - wc_dir) + expected_status) # Backdate iota to revision 2, so we can merge in the rev 3 changes. - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'up', '-r', '2', file_path) # Make some local whitespace changes, these should not conflict # with the remote whitespace changes as both will be ignored. @@ -3517,8 +3234,8 @@ def merge_ignore_whitespace(sbox): expected_disk, expected_status, expected_skip, - None, None, None, None, None, - 0, 0, '--allow-mixed-revisions', + [], False, False, + '--allow-mixed-revisions', '-x', '-w', wc_dir) #---------------------------------------------------------------------- @@ -3543,8 +3260,7 @@ def merge_ignore_eolstyle(sbox): expected_output = svntest.wc.State(wc_dir, { 'iota' : Item(verb='Sending'), }) - svntest.actions.run_and_verify_commit(wc_dir, expected_output, - None, None, wc_dir) + svntest.actions.run_and_verify_commit(wc_dir, expected_output, None) # change the file, mostly eol changes + an extra line svntest.main.file_write(file_path, @@ -3558,12 +3274,10 @@ def merge_ignore_eolstyle(sbox): expected_status.tweak(file_name, wc_rev=3) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, - wc_dir) + expected_status) # Backdate iota to revision 2, so we can merge in the rev 3 changes. - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'up', '-r', '2', file_path) # Make some local eol changes, these should not conflict # with the remote eol changes as both will be ignored. @@ -3600,8 +3314,8 @@ def merge_ignore_eolstyle(sbox): expected_disk, expected_status, expected_skip, - None, None, None, None, None, - 0, 0, '--allow-mixed-revisions', + [], False, False, + '--allow-mixed-revisions', '-x', '--ignore-eol-style', wc_dir) #---------------------------------------------------------------------- @@ -3630,7 +3344,7 @@ def merge_conflict_markers_matching_eol(sbox): # Checkout a second working copy wc_backup = sbox.add_wc_path('backup') - svntest.actions.run_and_verify_svn(None, None, [], 'checkout', + svntest.actions.run_and_verify_svn(None, [], 'checkout', sbox.repo_url, wc_backup) # set starting revision @@ -3663,8 +3377,7 @@ def merge_conflict_markers_matching_eol(sbox): # Commit the original change and note the 'base' revision number svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, - wc_dir) + expected_status) cur_rev = cur_rev + 1 base_rev = cur_rev @@ -3696,6 +3409,7 @@ def merge_conflict_markers_matching_eol(sbox): 'A/mu' : Item(contents= "This is the file 'mu'." + eolchar + "<<<<<<< .working" + eolchar + "Conflicting appended text for mu" + eolchar + + "||||||| .merge-left.r" + str(cur_rev - 1) + eolchar + "=======" + eolchar + "Original appended text for mu" + eolchar + ">>>>>>> .merge-right.r" + str(cur_rev) + eolchar), @@ -3771,7 +3485,7 @@ def merge_eolstyle_handling(sbox): # Checkout a second working copy wc_backup = sbox.add_wc_path('backup') - svntest.actions.run_and_verify_svn(None, None, [], 'checkout', + svntest.actions.run_and_verify_svn(None, [], 'checkout', sbox.repo_url, wc_backup) path_backup = os.path.join(wc_backup, 'A', 'mu') @@ -3895,8 +3609,7 @@ def create_deep_trees(wc_dir): 'A/B/F/E/beta' : Item(status=' ', wc_rev=2), }) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, - wc_dir) + expected_status) svntest.main.run_svn(None, 'cp', A_B_F_E_path, A_B_F_E1_path) @@ -3910,11 +3623,10 @@ def create_deep_trees(wc_dir): 'A/B/F/E1/beta' : Item(status=' ', wc_rev=3), }) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, - wc_dir) + expected_status) # Bring the entire WC up to date with rev 3. - svntest.actions.run_and_verify_svn(None, None, [], 'update', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'update', wc_dir) expected_status.tweak(wc_rev=3) # Copy B and commit, creating revision 4. @@ -3935,8 +3647,7 @@ def create_deep_trees(wc_dir): 'A/copy-of-B/lambda' : Item(status=' ', wc_rev=4), }) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, - wc_dir) + expected_status) expected_disk = svntest.main.greek_state.copy() expected_disk.remove('A/B/E', 'A/B/E/alpha', 'A/B/E/beta') @@ -3960,7 +3671,7 @@ def create_deep_trees(wc_dir): svntest.actions.verify_disk(wc_dir, expected_disk, True) # Bring the entire WC up to date with rev 4. - svntest.actions.run_and_verify_svn(None, None, [], 'update', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'update', wc_dir) svntest.actions.verify_disk(wc_dir, expected_disk, True) @@ -3994,11 +3705,10 @@ def avoid_repeated_merge_using_inherited_merge_info(sbox): }) expected_status.tweak('A/B/F/E/alpha', wc_rev=5) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, - wc_dir) + expected_status) # Bring the entire WC up to date with rev 5. - svntest.actions.run_and_verify_svn(None, None, [], 'update', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'update', wc_dir) # Merge changes from rev 5 of B (to alpha) into copy_of_B. expected_output = wc.State(copy_of_B_path, { @@ -4041,11 +3751,7 @@ def avoid_repeated_merge_using_inherited_merge_info(sbox): expected_disk, expected_status, expected_skip, - None, - None, - None, - None, - None, 1) + check_props=True) # Commit the result of the merge, creating revision 6. expected_output = svntest.wc.State(copy_of_B_path, { @@ -4053,14 +3759,14 @@ def avoid_repeated_merge_using_inherited_merge_info(sbox): 'F/E/alpha' : Item(verb='Sending'), }) svntest.actions.run_and_verify_commit(copy_of_B_path, expected_output, - None, None, wc_dir) + None) # Update the WC to bring /A/copy_of_B/F from rev 4 to rev 6. # Without this update, a subsequent merge will not find any merge # info for /A/copy_of_B/F -- nor its parent dir in the repos -- at # rev 4. Mergeinfo wasn't introduced until rev 6. copy_of_B_F_E_path = os.path.join(copy_of_B_path, 'F', 'E') - svntest.actions.run_and_verify_svn(None, None, [], 'update', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'update', wc_dir) # Attempt to re-merge changes to alpha from rev 4. Use the merge # info inherited from the grandparent (copy-of-B) of our merge @@ -4071,7 +3777,6 @@ def avoid_repeated_merge_using_inherited_merge_info(sbox): 'beta' : Item(status=' ', wc_rev=6), }) svntest.actions.run_and_verify_svn( - None, expected_merge_output([[5]], [' U ' + copy_of_B_F_E_path + '\n', ' G ' + copy_of_B_F_E_path + '\n'], @@ -4118,7 +3823,7 @@ def avoid_repeated_merge_on_subtree_with_merge_info(sbox): }) expected_status.tweak('A/B/F/E/alpha', wc_rev=5) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) for path_and_mergeinfo in (('E', '/A/B/F/E:5'), ('E1', '/A/B/F/E:5')): @@ -4152,11 +3857,7 @@ def avoid_repeated_merge_on_subtree_with_merge_info(sbox): expected_disk, expected_status, expected_skip, - None, - None, - None, - None, - None, 1) + check_props=True) # Commit the result of the merge, creating new revision. expected_output = svntest.wc.State(path_name, { @@ -4164,7 +3865,7 @@ def avoid_repeated_merge_on_subtree_with_merge_info(sbox): 'alpha' : Item(verb='Sending'), }) svntest.actions.run_and_verify_commit(path_name, - expected_output, None, None, wc_dir) + expected_output, None, [], wc_dir) # Edit A/B/F/E/alpha and commit it, creating revision 8. new_content_for_alpha = 'new content to alpha\none more line\n' @@ -4179,12 +3880,12 @@ def avoid_repeated_merge_on_subtree_with_merge_info(sbox): 'beta' : Item(status=' ', wc_rev=4), }) svntest.actions.run_and_verify_commit(A_B_F_E_path, expected_output, - expected_status, None, wc_dir) + expected_status, [], wc_dir) # Update the WC to bring /A/copy_of_B to rev 8. # Without this update expected_status tree would be cumbersome to # understand. - svntest.actions.run_and_verify_svn(None, None, [], 'update', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'update', wc_dir) # Merge changes from rev 4:8 of A/B into A/copy_of_B. A/copy_of_B/F/E1 # has explicit mergeinfo and exists at r4 in the merge source, so it @@ -4234,17 +3935,13 @@ def avoid_repeated_merge_on_subtree_with_merge_info(sbox): expected_disk, expected_status, expected_skip, - None, - None, - None, - None, - None, 1) + check_props=True) # Test for part of Issue #2821, see # http://subversion.tigris.org/issues/show_bug.cgi?id=2821#desc22 # # Revert all local changes. - svntest.actions.run_and_verify_svn(None, None, [], 'revert', '-R', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'revert', '-R', wc_dir) # Make a text mod to A/copy-of-B/F/E/alpha newer_content_for_alpha = "Conflicting content" @@ -4292,11 +3989,7 @@ def avoid_repeated_merge_on_subtree_with_merge_info(sbox): expected_disk, expected_status, expected_skip, - None, - None, - None, - None, - None, 1) + check_props=True) #---------------------------------------------------------------------- def tweak_src_then_merge_to_dest(sbox, src_path, dst_path, @@ -4316,11 +4009,11 @@ def tweak_src_then_merge_to_dest(sbox, src_path, dst_path, { '': Item(wc_rev=new_rev, status=' ')}) svntest.actions.run_and_verify_commit(src_path, expected_output, - expected_status, None, src_path) + expected_status) # Update the WC to new_rev so that it would be easier to expect everyone # to be at new_rev. - svntest.actions.run_and_verify_svn(None, None, [], 'update', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'update', wc_dir) # Merge new_rev of src_path to dst_path. @@ -4332,7 +4025,6 @@ def tweak_src_then_merge_to_dest(sbox, src_path, dst_path, merge_url = merge_url.replace('\\', '/') svntest.actions.run_and_verify_svn( - None, expected_merge_output([[new_rev]], ['U ' + dst_path + '\n', ' U ' + dst_path + '\n']), @@ -4380,7 +4072,7 @@ def obey_reporter_api_semantics_while_doing_subtree_merges(sbox): 'A/copy-of-D/gamma' : Item(status=' ', wc_rev=2), }) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) cur_rev = 2 @@ -4403,8 +4095,7 @@ def obey_reporter_api_semantics_while_doing_subtree_merges(sbox): file_contents, cur_rev) copy_of_A_D_wc_rev = cur_rev - svntest.actions.run_and_verify_svn(None, - ['\n', + svntest.actions.run_and_verify_svn(['Committing transaction...\n', 'Committed revision ' + str(cur_rev+1) + '.\n'], [], @@ -4472,158 +4163,7 @@ def obey_reporter_api_semantics_while_doing_subtree_merges(sbox): expected_disk, expected_status, expected_skip, - None, - None, - None, - None, - None, 1) - -#---------------------------------------------------------------------- -def set_up_branch(sbox, branch_only = False, nbr_of_branches = 1): - '''Starting with standard greek tree, copy 'A' NBR_OF_BRANCHES times - to A_COPY, A_COPY_2, A_COPY_3, and so on. Then make four modifications - (setting file contents to "New content") under A: - r(2 + NBR_OF_BRANCHES) - A/D/H/psi - r(3 + NBR_OF_BRANCHES) - A/D/G/rho - r(4 + NBR_OF_BRANCHES) - A/B/E/beta - r(5 + NBR_OF_BRANCHES) - A/D/H/omega - Return (expected_disk, expected_status).''' - - # With the default parameters, the branching looks like this: - # - # A -1-----3-4-5-6-- - # \ - # A_COPY 2----------- - - wc_dir = sbox.wc_dir - - expected_status = svntest.actions.get_virginal_state(wc_dir, 1) - expected_disk = svntest.main.greek_state.copy() - - def copy_A(dest_name, rev): - expected = svntest.verify.UnorderedOutput( - ["A " + os.path.join(wc_dir, dest_name, "B") + "\n", - "A " + os.path.join(wc_dir, dest_name, "B", "lambda") + "\n", - "A " + os.path.join(wc_dir, dest_name, "B", "E") + "\n", - "A " + os.path.join(wc_dir, dest_name, "B", "E", "alpha") + "\n", - "A " + os.path.join(wc_dir, dest_name, "B", "E", "beta") + "\n", - "A " + os.path.join(wc_dir, dest_name, "B", "F") + "\n", - "A " + os.path.join(wc_dir, dest_name, "mu") + "\n", - "A " + os.path.join(wc_dir, dest_name, "C") + "\n", - "A " + os.path.join(wc_dir, dest_name, "D") + "\n", - "A " + os.path.join(wc_dir, dest_name, "D", "gamma") + "\n", - "A " + os.path.join(wc_dir, dest_name, "D", "G") + "\n", - "A " + os.path.join(wc_dir, dest_name, "D", "G", "pi") + "\n", - "A " + os.path.join(wc_dir, dest_name, "D", "G", "rho") + "\n", - "A " + os.path.join(wc_dir, dest_name, "D", "G", "tau") + "\n", - "A " + os.path.join(wc_dir, dest_name, "D", "H") + "\n", - "A " + os.path.join(wc_dir, dest_name, "D", "H", "chi") + "\n", - "A " + os.path.join(wc_dir, dest_name, "D", "H", "omega") + "\n", - "A " + os.path.join(wc_dir, dest_name, "D", "H", "psi") + "\n", - "Checked out revision " + str(rev - 1) + ".\n", - "A " + os.path.join(wc_dir, dest_name) + "\n"]) - expected_status.add({ - dest_name + "/B" : Item(status=' ', wc_rev=rev), - dest_name + "/B/lambda" : Item(status=' ', wc_rev=rev), - dest_name + "/B/E" : Item(status=' ', wc_rev=rev), - dest_name + "/B/E/alpha" : Item(status=' ', wc_rev=rev), - dest_name + "/B/E/beta" : Item(status=' ', wc_rev=rev), - dest_name + "/B/F" : Item(status=' ', wc_rev=rev), - dest_name + "/mu" : Item(status=' ', wc_rev=rev), - dest_name + "/C" : Item(status=' ', wc_rev=rev), - dest_name + "/D" : Item(status=' ', wc_rev=rev), - dest_name + "/D/gamma" : Item(status=' ', wc_rev=rev), - dest_name + "/D/G" : Item(status=' ', wc_rev=rev), - dest_name + "/D/G/pi" : Item(status=' ', wc_rev=rev), - dest_name + "/D/G/rho" : Item(status=' ', wc_rev=rev), - dest_name + "/D/G/tau" : Item(status=' ', wc_rev=rev), - dest_name + "/D/H" : Item(status=' ', wc_rev=rev), - dest_name + "/D/H/chi" : Item(status=' ', wc_rev=rev), - dest_name + "/D/H/omega" : Item(status=' ', wc_rev=rev), - dest_name + "/D/H/psi" : Item(status=' ', wc_rev=rev), - dest_name : Item(status=' ', wc_rev=rev)}) - expected_disk.add({ - dest_name : Item(), - dest_name + '/B' : Item(), - dest_name + '/B/lambda' : Item("This is the file 'lambda'.\n"), - dest_name + '/B/E' : Item(), - dest_name + '/B/E/alpha' : Item("This is the file 'alpha'.\n"), - dest_name + '/B/E/beta' : Item("This is the file 'beta'.\n"), - dest_name + '/B/F' : Item(), - dest_name + '/mu' : Item("This is the file 'mu'.\n"), - dest_name + '/C' : Item(), - dest_name + '/D' : Item(), - dest_name + '/D/gamma' : Item("This is the file 'gamma'.\n"), - dest_name + '/D/G' : Item(), - dest_name + '/D/G/pi' : Item("This is the file 'pi'.\n"), - dest_name + '/D/G/rho' : Item("This is the file 'rho'.\n"), - dest_name + '/D/G/tau' : Item("This is the file 'tau'.\n"), - dest_name + '/D/H' : Item(), - dest_name + '/D/H/chi' : Item("This is the file 'chi'.\n"), - dest_name + '/D/H/omega' : Item("This is the file 'omega'.\n"), - dest_name + '/D/H/psi' : Item("This is the file 'psi'.\n"), - }) - - # Make a branch A_COPY to merge into. - svntest.actions.run_and_verify_svn(None, expected, [], 'copy', - sbox.repo_url + "/A", - os.path.join(wc_dir, - dest_name)) - - expected_output = wc.State(wc_dir, {dest_name : Item(verb='Adding')}) - svntest.actions.run_and_verify_commit(wc_dir, - expected_output, - expected_status, - None, - wc_dir) - for i in range(nbr_of_branches): - if i == 0: - copy_A('A_COPY', i + 2) - else: - copy_A('A_COPY_' + str(i + 1), i + 2) - - if branch_only: - return expected_disk, expected_status - - # Make some changes under A which we'll later merge under A_COPY: - - # r(nbr_of_branches + 2) - modify and commit A/D/H/psi - svntest.main.file_write(sbox.ospath('A/D/H/psi'), - "New content") - expected_output = wc.State(wc_dir, {'A/D/H/psi' : Item(verb='Sending')}) - expected_status.tweak('A/D/H/psi', wc_rev=nbr_of_branches + 2) - svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) - expected_disk.tweak('A/D/H/psi', contents="New content") - - # r(nbr_of_branches + 3) - modify and commit A/D/G/rho - svntest.main.file_write(sbox.ospath('A/D/G/rho'), - "New content") - expected_output = wc.State(wc_dir, {'A/D/G/rho' : Item(verb='Sending')}) - expected_status.tweak('A/D/G/rho', wc_rev=nbr_of_branches + 3) - svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) - expected_disk.tweak('A/D/G/rho', contents="New content") - - # r(nbr_of_branches + 4) - modify and commit A/B/E/beta - svntest.main.file_write(sbox.ospath('A/B/E/beta'), - "New content") - expected_output = wc.State(wc_dir, {'A/B/E/beta' : Item(verb='Sending')}) - expected_status.tweak('A/B/E/beta', wc_rev=nbr_of_branches + 4) - svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) - expected_disk.tweak('A/B/E/beta', contents="New content") - - # r(nbr_of_branches + 5) - modify and commit A/D/H/omega - svntest.main.file_write(sbox.ospath('A/D/H/omega'), - "New content") - expected_output = wc.State(wc_dir, {'A/D/H/omega' : Item(verb='Sending')}) - expected_status.tweak('A/D/H/omega', wc_rev=nbr_of_branches + 5) - svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) - expected_disk.tweak('A/D/H/omega', contents="New content") - - return expected_disk, expected_status + check_props=True) #---------------------------------------------------------------------- @SkipUnless(server_has_mergeinfo) @@ -4705,8 +4245,7 @@ def mergeinfo_inheritance(sbox): expected_disk, expected_status, expected_skip, - None, None, None, None, - None, 1) + check_props=True) # Merge r4 again, this time into A_COPY/D/G. An ancestor directory # (A_COPY/D) exists with identical local mergeinfo, so the merge @@ -4740,8 +4279,7 @@ def mergeinfo_inheritance(sbox): expected_disk, expected_status, expected_skip, - None, None, None, None, - None, 1) + check_props=True) # Merge r5 into A_COPY/B. Again, r1 should be inherited from # A_COPY (Issue #2733) @@ -4779,8 +4317,7 @@ def mergeinfo_inheritance(sbox): expected_disk, expected_status, expected_skip, - None, None, None, None, - None, 1) + check_props=True) # Merge r5 again, this time into A_COPY/B/E/beta. An ancestor # directory (A_COPY/B) exists with identical local mergeinfo, so @@ -4791,7 +4328,7 @@ def mergeinfo_inheritance(sbox): # run_and_verify_merge doesn't support merging to a file WCPATH # so use run_and_verify_svn. ### TODO: We can use run_and_verify_merge() here now. - svntest.actions.run_and_verify_svn(None, [], [], 'merge', '-c5', + svntest.actions.run_and_verify_svn([], [], 'merge', '-c5', sbox.repo_url + '/A/B/E/beta', beta_COPY_path) @@ -4866,8 +4403,7 @@ def mergeinfo_inheritance(sbox): expected_disk, expected_status, expected_skip, - None, None, None, None, - None, 1) + check_props=True) # Merge r6 into A_COPY/D/H/omega, it should inherit it's nearest # ancestor's (A_COPY/D) mergeinfo (Issue #2733 with a file as the @@ -4877,7 +4413,6 @@ def mergeinfo_inheritance(sbox): # so use run_and_verify_svn. ### TODO: We can use run_and_verify_merge() here now. svntest.actions.run_and_verify_svn( - None, expected_merge_output([[6]], ['U ' + omega_COPY_path + '\n', ' G ' + omega_COPY_path + '\n']), @@ -4886,8 +4421,7 @@ def mergeinfo_inheritance(sbox): omega_COPY_path) # Check that mergeinfo was properly set on A_COPY/D/H/omega - svntest.actions.run_and_verify_svn(None, - ["/A/D/H/omega:3-4,6\n"], + svntest.actions.run_and_verify_svn(["/A/D/H/omega:3-4,6\n"], [], 'propget', SVN_PROP_MERGEINFO, omega_COPY_path) @@ -4916,9 +4450,7 @@ def mergeinfo_inheritance(sbox): wc_rev=7) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - wc_status, - None, - wc_dir) + wc_status) # In single-db mode you can't create a disconnected working copy by just # copying a subdir @@ -4930,7 +4462,7 @@ def mergeinfo_inheritance(sbox): # ## Update the disconnected WC it so it will get the most recent mergeinfo ## from the repos when merging. - #svntest.actions.run_and_verify_svn(None, exp_noop_up_out(7), [], 'up', + #svntest.actions.run_and_verify_svn(exp_noop_up_out(7), [], 'up', # other_wc) # ## Merge r5:4 into the root of the disconnected WC. @@ -4968,8 +4500,7 @@ def mergeinfo_inheritance(sbox): # expected_disk, # expected_status, # expected_skip, - # None, None, None, None, - # None, 1) + # check_props=True) #---------------------------------------------------------------------- @SkipUnless(server_has_mergeinfo) @@ -5000,7 +4531,6 @@ def mergeinfo_elision(sbox): # so use run_and_verify_svn. ### TODO: We can use run_and_verify_merge() here now. svntest.actions.run_and_verify_svn( - None, expected_merge_output([[5]], ['U ' + beta_COPY_path + '\n', ' U ' + beta_COPY_path + '\n']), @@ -5014,7 +4544,7 @@ def mergeinfo_elision(sbox): }) svntest.actions.run_and_verify_status(beta_COPY_path, expected_status) - svntest.actions.run_and_verify_svn(None, ["/A/B/E/beta:5\n"], [], + svntest.actions.run_and_verify_svn(["/A/B/E/beta:5\n"], [], 'propget', SVN_PROP_MERGEINFO, beta_COPY_path) @@ -5025,12 +4555,10 @@ def mergeinfo_elision(sbox): wc_status.tweak('A_COPY/B/E/beta', wc_rev=7) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - wc_status, - None, - wc_dir) + wc_status) # Update A_COPY to get all paths to the same working revision. - svntest.actions.run_and_verify_svn(None, exp_noop_up_out(7), [], + svntest.actions.run_and_verify_svn(exp_noop_up_out(7), [], 'up', wc_dir) wc_status.tweak(wc_rev=7) @@ -5065,8 +4593,7 @@ def mergeinfo_elision(sbox): expected_disk, expected_status, expected_skip, - None, None, None, None, - None, 1) + check_props=True) # Merge r3:6 into A_COPY. The merge doesn't touch either of A_COPY's # subtrees with explicit mergeinfo, so those are left alone. @@ -5130,8 +4657,7 @@ def mergeinfo_elision(sbox): expected_disk, expected_status, expected_skip, - None, None, None, None, - None, 1) + check_props=True) # New repeat the above merge but with the --record-only option. # This would result in identical mergeinfo # (r4-6) on A_COPY and two of its descendants, A_COPY/D/G and @@ -5165,8 +4691,8 @@ def mergeinfo_elision(sbox): expected_disk, expected_status, expected_skip, - None, None, None, None, - None, 1, 1, '--record-only', + [], True, True, + '--record-only', A_COPY_path) # Reverse merge r5 out of A_COPY/B/E/beta. The mergeinfo on @@ -5178,7 +4704,6 @@ def mergeinfo_elision(sbox): # so use run_and_verify_svn. ### TODO: We can use run_and_verify_merge() here now. svntest.actions.run_and_verify_svn( - None, expected_merge_output([[-5]], ['U ' + beta_COPY_path + '\n', ' G ' + beta_COPY_path + '\n']), @@ -5192,7 +4717,7 @@ def mergeinfo_elision(sbox): }) svntest.actions.run_and_verify_status(beta_COPY_path, expected_status) - svntest.actions.run_and_verify_svn(None, ["/A/B/E/beta:4,6\n"], [], + svntest.actions.run_and_verify_svn(["/A/B/E/beta:4,6\n"], [], 'propget', SVN_PROP_MERGEINFO, beta_COPY_path) @@ -5202,7 +4727,6 @@ def mergeinfo_elision(sbox): # run_and_verify_merge doesn't support merging to a file WCPATH # so use run_and_verify_svn. svntest.actions.run_and_verify_svn( - None, expected_merge_output([[5]], ['G ' + beta_COPY_path + '\n', ' G ' + beta_COPY_path + '\n', # Update mergeinfo @@ -5219,7 +4743,7 @@ def mergeinfo_elision(sbox): svntest.actions.run_and_verify_status(beta_COPY_path, expected_status) # Once again A_COPY/B/E/beta has no mergeinfo. - svntest.actions.run_and_verify_svn(None, [], [], + svntest.actions.run_and_verify_svn([], '.*W200017: Property.*not found', 'propget', SVN_PROP_MERGEINFO, beta_COPY_path) @@ -5252,7 +4776,6 @@ def mergeinfo_inheritance_and_discontinuous_ranges(sbox): os.chdir(A_COPY_path) svntest.actions.run_and_verify_svn( - None, expected_merge_output([[4]], ['U ' + os.path.join("D", "G", "rho") + '\n', ' U .\n']), @@ -5263,7 +4786,7 @@ def mergeinfo_inheritance_and_discontinuous_ranges(sbox): expected_status.tweak("A_COPY", status=' M') expected_status.tweak("A_COPY/D/G/rho", status='M ') svntest.actions.run_and_verify_status(wc_dir, expected_status) - svntest.actions.run_and_verify_svn(None, ["/A:4\n"], [], + svntest.actions.run_and_verify_svn(["/A:4\n"], [], 'propget', SVN_PROP_MERGEINFO, A_COPY_path) @@ -5315,15 +4838,13 @@ def mergeinfo_inheritance_and_discontinuous_ranges(sbox): expected_disk, expected_status, expected_skip, - None, None, None, None, - None, 1) + check_props=True) # Wipe the memory of a portion of the previous merge... ### It'd be nice to use 'merge --record-only' here, but we can't (yet) ### wipe all ranges for a file due to the bug pointed out in r864719. mu_copy_path = os.path.join(A_COPY_path, 'mu') - svntest.actions.run_and_verify_svn(None, - ["property '" + SVN_PROP_MERGEINFO + svntest.actions.run_and_verify_svn(["property '" + SVN_PROP_MERGEINFO + "' set on '" + mu_copy_path + "'\n"], [], 'propset', SVN_PROP_MERGEINFO, '', mu_copy_path) @@ -5334,11 +4855,11 @@ def mergeinfo_inheritance_and_discontinuous_ranges(sbox): svntest.actions.run_and_verify_commit(wc_dir, expected_output, None, - None, + [], mu_copy_path) # ...and that the presence of the property is retained, even when # the value has been wiped. - svntest.actions.run_and_verify_svn(None, ['\n'], [], 'propget', + svntest.actions.run_and_verify_svn(['\n'], [], 'propget', SVN_PROP_MERGEINFO, mu_copy_path) #---------------------------------------------------------------------- @@ -5359,7 +4880,7 @@ def merge_to_target_with_copied_children(sbox): rho_COPY_COPY_path = sbox.ospath('A_COPY/D/G/rho_copy') # URL to URL copy A_COPY/D/G/rho to A_COPY/D/G/rho_copy - svntest.actions.run_and_verify_svn(None, None, [], 'copy', + svntest.actions.run_and_verify_svn(None, [], 'copy', sbox.repo_url + '/A_COPY/D/G/rho', sbox.repo_url + '/A_COPY/D/G/rho_copy', '-m', 'copy') @@ -5376,12 +4897,10 @@ def merge_to_target_with_copied_children(sbox): expected_output, expected_disk, expected_status, - None, None, None, - None, None, 1) + check_props=True) # Merge r4 into A_COPY/D/G/rho_copy. svntest.actions.run_and_verify_svn( - None, expected_merge_output([[4]], ['U ' + rho_COPY_COPY_path + '\n', ' U ' + rho_COPY_COPY_path + '\n']), @@ -5422,8 +4941,7 @@ def merge_to_target_with_copied_children(sbox): expected_disk, expected_status, expected_skip, - None, None, None, None, - None, 1) + check_props=True) #---------------------------------------------------------------------- @SkipUnless(server_has_mergeinfo) @@ -5454,7 +4972,7 @@ def merge_to_switched_path(sbox): "A " + G_COPY_path + "\n"]) # r7 - Copy A/D/G to A/D/G_COPY and commit. - svntest.actions.run_and_verify_svn(None, expected, [], 'copy', + svntest.actions.run_and_verify_svn(expected, [], 'copy', sbox.repo_url + "/A/D/G", G_COPY_path) @@ -5466,16 +4984,14 @@ def merge_to_switched_path(sbox): "A/D/G_COPY/tau" : Item(status=' ', wc_rev=7), }) - svntest.actions.run_and_verify_commit(wc_dir, expected_output, wc_status, - None, wc_dir) + svntest.actions.run_and_verify_commit(wc_dir, expected_output, wc_status) # r8 - modify and commit A/D/G_COPY/rho svntest.main.file_write(sbox.ospath('A/D/G_COPY/rho'), "New *and* improved rho content") expected_output = wc.State(wc_dir, {'A/D/G_COPY/rho' : Item(verb='Sending')}) wc_status.tweak('A/D/G_COPY/rho', wc_rev=8) - svntest.actions.run_and_verify_commit(wc_dir, expected_output, wc_status, - None, wc_dir) + svntest.actions.run_and_verify_commit(wc_dir, expected_output, wc_status) # Switch A_COPY/D/G to A/D/G. wc_disk.add({ @@ -5496,17 +5012,16 @@ def merge_to_switched_path(sbox): svntest.actions.run_and_verify_switch(sbox.wc_dir, A_COPY_D_G_path, sbox.repo_url + "/A/D/G", expected_output, wc_disk, wc_status, - None, None, None, None, None, 1) + [], 1) # Update working copy to allow elision (if any). - svntest.actions.run_and_verify_svn(None, exp_noop_up_out(8), [], + svntest.actions.run_and_verify_svn(exp_noop_up_out(8), [], 'up', wc_dir) # Set some mergeinfo on a working copy parent of our switched subtree # A_COPY/D/G. Because the subtree is switched it should *not* inherit # this mergeinfo. - svntest.actions.run_and_verify_svn(None, - ["property '" + SVN_PROP_MERGEINFO + + svntest.actions.run_and_verify_svn(["property '" + SVN_PROP_MERGEINFO + "' set on '" + A_COPY_path + "'" + "\n"], [], 'ps', SVN_PROP_MERGEINFO, '/A:4', A_COPY_path) @@ -5546,7 +5061,7 @@ def merge_to_switched_path(sbox): expected_elision_output, expected_disk, expected_status, expected_skip, - None, None, None, None, None, 1) + check_props=True) # Check that the mergeinfo set on a switched target can elide to the # repository. @@ -5562,19 +5077,16 @@ def merge_to_switched_path(sbox): # but as it is switched this empty mergeinfo just elides to the # repository (empty mergeinfo on a path can elide if that path doesn't # inherit *any* mergeinfo). - svntest.actions.run_and_verify_svn(None, - ["Reverted '" + A_COPY_path+ "'\n", + svntest.actions.run_and_verify_svn(["Reverted '" + A_COPY_path+ "'\n", "Reverted '" + A_COPY_D_G_path+ "'\n", "Reverted '" + A_COPY_D_G_rho_path + "'\n"], [], 'revert', '-R', wc_dir) - svntest.actions.run_and_verify_svn(None, - ["property '" + SVN_PROP_MERGEINFO + + svntest.actions.run_and_verify_svn(["property '" + SVN_PROP_MERGEINFO + "' set on '" + A_COPY_D_path+ "'" + "\n"], [], 'ps', SVN_PROP_MERGEINFO, '/A/D:4', A_COPY_D_path) svntest.actions.run_and_verify_svn( - None, expected_merge_output([[-4]], ['U ' + A_COPY_D_G_rho_path + '\n', ' U ' + A_COPY_D_G_path + '\n'], @@ -5646,7 +5158,7 @@ def merge_to_path_with_switched_children(sbox): A_COPY_gamma_path = sbox.ospath('A_COPY/D/gamma') H_COPY_2_path = sbox.ospath('A_COPY_2/D/H') - svntest.actions.run_and_verify_svn(None, exp_noop_up_out(8), [], 'up', + svntest.actions.run_and_verify_svn(exp_noop_up_out(8), [], 'up', wc_dir) wc_status.tweak(wc_rev=8) @@ -5658,7 +5170,7 @@ def merge_to_path_with_switched_children(sbox): svntest.actions.run_and_verify_switch(sbox.wc_dir, A_COPY_G_path, sbox.repo_url + "/A_COPY_2/D/G", expected_output, wc_disk, wc_status, - None, None, None, None, None, 1) + [], 1) # Switch A_COPY/D/G/rho to A_COPY_3/D/G/rho. wc_status.tweak("A_COPY/D/G/rho", switched='S') @@ -5666,7 +5178,7 @@ def merge_to_path_with_switched_children(sbox): svntest.actions.run_and_verify_switch(sbox.wc_dir, A_COPY_rho_path, sbox.repo_url + "/A_COPY_3/D/G/rho", expected_output, wc_disk, wc_status, - None, None, None, None, None, 1) + [], 1) # Switch A_COPY/D/H/psi to A_COPY_2/D/H/psi. wc_status.tweak("A_COPY/D/H/psi", switched='S') @@ -5674,7 +5186,7 @@ def merge_to_path_with_switched_children(sbox): svntest.actions.run_and_verify_switch(sbox.wc_dir, A_COPY_psi_path, sbox.repo_url + "/A_COPY_2/D/H/psi", expected_output, wc_disk, wc_status, - None, None, None, None, None, 1) + [], 1) # Target with switched file child: # @@ -5721,7 +5233,7 @@ def merge_to_path_with_switched_children(sbox): expected_elision_output, expected_disk, expected_status, expected_skip, - None, None, None, None, None, 1) + check_props=True) # Target with switched dir child: # @@ -5772,7 +5284,7 @@ def merge_to_path_with_switched_children(sbox): expected_elision_output, expected_disk_D, expected_status_D, expected_skip_D, - None, None, None, None, None, 1) + check_props=True) # Merge r5 from A/D into A_COPY/D. This updates the mergeinfo on the @@ -5801,7 +5313,7 @@ def merge_to_path_with_switched_children(sbox): expected_elision_output, expected_disk_D, expected_status_D, expected_skip_D, - None, None, None, None, None, 1) + check_props=True) # Finally, merge r4:8 into A_COPY. A_COPY gets mergeinfo for r5-8 added but # since none of A_COPY's subtrees with mergeinfo are affected, none of them @@ -5866,7 +5378,7 @@ def merge_to_path_with_switched_children(sbox): expected_elision_output, expected_disk, expected_status, expected_skip, - None, None, None, None, None, 1) + check_props=True) # Commit changes thus far. expected_output = svntest.wc.State(wc_dir, { 'A_COPY' : Item(verb='Sending'), @@ -5881,8 +5393,7 @@ def merge_to_path_with_switched_children(sbox): wc_status.tweak('A_COPY', 'A_COPY/B/E/beta', 'A_COPY/D', 'A_COPY/D/G', 'A_COPY/D/G/rho', 'A_COPY/D/H', 'A_COPY/D/H/omega', 'A_COPY/D/H/psi', wc_rev=9) - svntest.actions.run_and_verify_commit(wc_dir, expected_output, wc_status, - None, wc_dir) + svntest.actions.run_and_verify_commit(wc_dir, expected_output, wc_status) # Unswitch A_COPY/D/H/psi. expected_output = svntest.wc.State(wc_dir, { @@ -5907,7 +5418,7 @@ def merge_to_path_with_switched_children(sbox): svntest.actions.run_and_verify_switch(sbox.wc_dir, A_COPY_psi_path, sbox.repo_url + "/A_COPY/D/H/psi", expected_output, wc_disk, wc_status, - None, None, None, None, None, 1) + [], 1) # Non-inheritable mergeinfo ranges on a target don't prevent repeat # merges of that range on the target's children. @@ -5952,7 +5463,7 @@ def merge_to_path_with_switched_children(sbox): expected_elision_output, expected_disk, expected_status, expected_skip, - None, None, None, None, None, + [], True, False, '--allow-mixed-revisions', A_COPY_H_path) @@ -5963,8 +5474,7 @@ def merge_to_path_with_switched_children(sbox): # A_COPY/D has a switched child it gets r10 added as a non-inheritable # range. Repeat the same merge checking that no repeat merge is # attempted on A_COPY/D. - svntest.actions.run_and_verify_svn(None, - ["property 'prop:name' set on '" + + svntest.actions.run_and_verify_svn(["property 'prop:name' set on '" + D_path + "'\n"], [], 'ps', 'prop:name', 'propval', D_path) expected_output = svntest.wc.State(wc_dir, { @@ -5974,8 +5484,7 @@ def merge_to_path_with_switched_children(sbox): }) wc_status.tweak('A_COPY/D', wc_rev=9) wc_status.tweak('A/D', 'A_COPY/D/H', 'A_COPY/D/H/psi', wc_rev=10) - svntest.actions.run_and_verify_commit(wc_dir, expected_output, wc_status, - None, wc_dir) + svntest.actions.run_and_verify_commit(wc_dir, expected_output, wc_status) expected_output = wc.State(A_COPY_D_path, { '' : Item(status=' U') }) @@ -6004,7 +5513,7 @@ def merge_to_path_with_switched_children(sbox): expected_elision_output, expected_disk_D, expected_status_D, expected_skip_D, - None, None, None, None, None, + [], True, False, '--allow-mixed-revisions', A_COPY_D_path) # Repeated merge is a no-op, though we still see the notification reporting @@ -6021,7 +5530,7 @@ def merge_to_path_with_switched_children(sbox): expected_elision_output, expected_disk_D, expected_status_D, expected_skip_D, - None, None, None, None, None, + [], True, False, '--allow-mixed-revisions', A_COPY_D_path) @@ -6036,11 +5545,11 @@ def merge_to_path_with_switched_children(sbox): # Revert all local changes. This leaves just the mergeinfo for r5-8 # on A_COPY and its various subtrees. - svntest.actions.run_and_verify_svn(None, None, [], 'revert', '-R', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'revert', '-R', wc_dir) # Update merge target so working revisions are uniform and all # possible elision occurs. - svntest.actions.run_and_verify_svn(None, exp_noop_up_out(10), [], + svntest.actions.run_and_verify_svn(exp_noop_up_out(10), [], 'up', A_COPY_path) # Do the reverse merge. @@ -6113,7 +5622,7 @@ def merge_to_path_with_switched_children(sbox): expected_elision_output, expected_disk, expected_status, expected_skip, - None, None, None, None, None, 1) + check_props=True) #---------------------------------------------------------------------- # Test for issue 2047: Merge from parent dir fails while it succeeds from @@ -6139,16 +5648,16 @@ def merge_with_implicit_target_file(sbox): expected_status = svntest.actions.get_virginal_state(wc_dir, 1) expected_status.tweak('A/mu', wc_rev=2) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) # Update to revision 2. - svntest.actions.run_and_verify_svn(None, None, [], 'update', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'update', wc_dir) # Revert the change committed in r2 os.chdir(wc_dir) # run_and_verify_merge doesn't accept file paths. - svntest.actions.run_and_verify_svn(None, None, [], 'merge', '-r', '2:1', + svntest.actions.run_and_verify_svn(None, [], 'merge', '-r', '2:1', 'A/mu') #---------------------------------------------------------------------- @@ -6173,7 +5682,7 @@ def empty_mergeinfo(sbox): # B) Empty mergeinfo elides to empty mergeinfo. # # C) If a merge sets empty mergeinfo on its target and that target has - # no ancestor in either the WC or the repository with explict + # no ancestor in either the WC or the repository with explicit # mergeinfo, then the target's mergeinfo is removed (a.k.a. elides # to nothing). sbox.build() @@ -6249,8 +5758,7 @@ def empty_mergeinfo(sbox): expected_disk, expected_status, expected_skip, - None, None, None, None, - None, 1) + check_props=True) # Now do the reverse merge into the subtree. expected_output = wc.State(H_COPY_path, { 'psi' : Item(status='G '), @@ -6281,8 +5789,7 @@ def empty_mergeinfo(sbox): expected_disk, expected_status, expected_skip, - None, None, None, None, - None, 1) + check_props=True) # Test areas B and C -- Reverse merge r3 into A_COPY, this would result in # empty mergeinfo on A_COPY and A_COPY/D/H, but the empty mergeinfo on the @@ -6296,13 +5803,14 @@ def empty_mergeinfo(sbox): ' U ' + H_COPY_path + '\n', ' U ' + A_COPY_path + '\n',], elides=True) - svntest.actions.run_and_verify_svn(None, expected_output, + svntest.actions.run_and_verify_svn(expected_output, [], 'merge', '-r4:2', sbox.repo_url + '/A', A_COPY_path) svntest.actions.run_and_verify_status(wc_dir, wc_status) # Check that A_COPY's mergeinfo is gone. - svntest.actions.run_and_verify_svn(None, [], [], 'pg', 'svn:mergeinfo', + svntest.actions.run_and_verify_svn([], '.*W200017: Property.*not found', + 'pg', 'svn:mergeinfo', A_COPY_path) #---------------------------------------------------------------------- @@ -6324,8 +5832,7 @@ def prop_add_to_child_with_mergeinfo(sbox): B_COPY_path = sbox.ospath('A_COPY/B') # Set a non-mergeinfo prop on a file. - svntest.actions.run_and_verify_svn(None, - ["property 'prop:name' set on '" + + svntest.actions.run_and_verify_svn(["property 'prop:name' set on '" + beta_path + "'\n"], [], 'ps', 'prop:name', 'propval', beta_path) expected_disk.tweak('A/B/E/beta', props={'prop:name' : 'propval'}) @@ -6334,13 +5841,10 @@ def prop_add_to_child_with_mergeinfo(sbox): {'A/B/E/beta' : Item(verb='Sending')}) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, - wc_dir) + expected_status) # Merge r4:5 from A/B/E/beta into A_COPY/B/E/beta. svntest.actions.run_and_verify_svn( - None, expected_merge_output([[5]], ['U ' + beta_COPY_path +'\n', ' U ' + beta_COPY_path +'\n',]), @@ -6386,8 +5890,7 @@ def prop_add_to_child_with_mergeinfo(sbox): expected_disk, expected_status, expected_skip, - None, None, None, None, - None, 1) + check_props=True) #---------------------------------------------------------------------- @Issue(2788,3383) @@ -6411,8 +5914,7 @@ def foreign_repos_does_not_update_mergeinfo(sbox): # Merge r3:4 (using implied peg revisions) from 'other' repos into # A_COPY/D/G. Merge should succeed, but no mergeinfo should be set. G_COPY_path = sbox.ospath('A_COPY/D/G') - svntest.actions.run_and_verify_svn(None, - expected_merge_output([[4]], + svntest.actions.run_and_verify_svn(expected_merge_output([[4]], 'U ' + os.path.join(G_COPY_path, "rho") + '\n', True), @@ -6423,8 +5925,7 @@ def foreign_repos_does_not_update_mergeinfo(sbox): # Merge r4:5 (using explicit peg revisions) from 'other' repos into # A_COPY/B/E. Merge should succeed, but no mergeinfo should be set. E_COPY_path = sbox.ospath('A_COPY/B/E') - svntest.actions.run_and_verify_svn(None, - expected_merge_output([[5]], + svntest.actions.run_and_verify_svn(expected_merge_output([[5]], 'U ' + os.path.join(E_COPY_path, "beta") +'\n', True), @@ -6437,10 +5938,10 @@ def foreign_repos_does_not_update_mergeinfo(sbox): svntest.actions.run_and_verify_status(wc_dir, expected_status) # Set up for test of issue #3383. - svntest.actions.run_and_verify_svn(None, None, [], 'revert', '-R', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'revert', '-R', wc_dir) # Get a working copy for the foreign repos. - svntest.actions.run_and_verify_svn(None, None, [], 'co', other_repo_url, + svntest.actions.run_and_verify_svn(None, [], 'co', other_repo_url, other_wc_dir) # Create mergeinfo on the foreign repos on an existing directory and @@ -6449,13 +5950,13 @@ def foreign_repos_does_not_update_mergeinfo(sbox): # simple merges to *death* elsewhere. # Create mergeinfo on an existing directory. - svntest.actions.run_and_verify_svn(None, None, [], 'merge', + svntest.actions.run_and_verify_svn(None, [], 'merge', other_repo_url + '/A', os.path.join(other_wc_dir, 'A_COPY'), '-c5') # Create mergeinfo on an existing file. - svntest.actions.run_and_verify_svn(None, None, [], 'merge', + svntest.actions.run_and_verify_svn(None, [], 'merge', other_repo_url + '/A/D/H/psi', os.path.join(other_wc_dir, 'A_COPY', 'D', 'H', 'psi'), @@ -6463,15 +5964,15 @@ def foreign_repos_does_not_update_mergeinfo(sbox): # Add a new directory with mergeinfo in the foreign repos. new_dir = os.path.join(other_wc_dir, 'A_COPY', 'N') - svntest.actions.run_and_verify_svn(None, None, [], 'mkdir', new_dir) - svntest.actions.run_and_verify_svn(None, None, [], 'ps', + svntest.actions.run_and_verify_svn(None, [], 'mkdir', new_dir) + svntest.actions.run_and_verify_svn(None, [], 'ps', SVN_PROP_MERGEINFO, '', new_dir) # Add a new file with mergeinfo in the foreign repos. new_file = os.path.join(other_wc_dir, 'A_COPY', 'nu') svntest.main.file_write(new_file, "This is the file 'nu'.\n") - svntest.actions.run_and_verify_svn(None, None, [], 'add', new_file) - svntest.actions.run_and_verify_svn(None, None, [], 'ps', + svntest.actions.run_and_verify_svn(None, [], 'add', new_file) + svntest.actions.run_and_verify_svn(None, [], 'ps', SVN_PROP_MERGEINFO, '', new_file) expected_output = wc.State(other_wc_dir,{ @@ -6482,19 +5983,19 @@ def foreign_repos_does_not_update_mergeinfo(sbox): 'A_COPY/nu' : Item(verb='Adding'), # Has empty mergeinfo }) svntest.actions.run_and_verify_commit(other_wc_dir, expected_output, - None, None, other_wc_dir, + None, [], other_wc_dir, '-m', 'create mergeinfo on foreign repos') # Now merge a diff from the foreign repos that contains the mergeinfo # addition in r7 to A_COPY. The mergeinfo diff should *not* be applied # to A_COPY since it refers to a foreign repository... - svntest.actions.run_and_verify_svn(None, None, [], 'merge', + svntest.actions.run_and_verify_svn(None, [], 'merge', other_repo_url + '/A@1', other_repo_url + '/A_COPY@7', sbox.ospath('A_COPY')) #...which means there should be no mergeinfo anywhere in WC_DIR, since # this test never created any. - svntest.actions.run_and_verify_svn(None, [], [], 'pg', + svntest.actions.run_and_verify_svn([], [], 'pg', SVN_PROP_MERGEINFO, '-vR', wc_dir) @@ -6505,7 +6006,18 @@ def foreign_repos_does_not_update_mergeinfo(sbox): def avoid_reflected_revs(sbox): "avoid repeated merges for cyclic merging" - ## See http://subversion.tigris.org/issues/show_bug.cgi?id=2897. ## + # See <http://subversion.tigris.org/issues/show_bug.cgi?id=2897>. + # + # This test cherry-picks some changes (all of them, in fact) from the + # parent branch 'A' to the child branch 'A_COPY', and then tries to + # reintegrate 'A_COPY' to 'A' (explicitly specifying a revision range + # on the source branch). It expects the changes that are unique to the + # branch 'A_COPY' to be merged to 'A'. + # + # A --1----[3]---[5]----------? + # \ \_____\___ / + # \ \ \ / + # A_COPY 2-[---4-----6--7--8]- # Create a WC with a single branch sbox.build() @@ -6529,27 +6041,27 @@ def avoid_reflected_revs(sbox): # We'll consider A as the trunk and A_COPY as the feature branch # r3 - Create a tfile1 in A svntest.main.file_write(tfile1_path, tfile1_content) - svntest.actions.run_and_verify_svn(None, None, [], 'add', tfile1_path) + svntest.actions.run_and_verify_svn(None, [], 'add', tfile1_path) expected_output = wc.State(wc_dir, {'A/tfile1' : Item(verb='Adding')}) wc_status.add({'A/tfile1' : Item(status=' ', wc_rev=3)}) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - wc_status, None, wc_dir) + wc_status) # r4 - Create a bfile1 in A_COPY svntest.main.file_write(bfile1_path, bfile1_content) - svntest.actions.run_and_verify_svn(None, None, [], 'add', bfile1_path) + svntest.actions.run_and_verify_svn(None, [], 'add', bfile1_path) expected_output = wc.State(wc_dir, {'A_COPY/bfile1' : Item(verb='Adding')}) wc_status.add({'A_COPY/bfile1' : Item(status=' ', wc_rev=4)}) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - wc_status, None, wc_dir) + wc_status) # r5 - Create one more file in A svntest.main.file_write(tfile2_path, tfile2_content) - svntest.actions.run_and_verify_svn(None, None, [], 'add', tfile2_path) + svntest.actions.run_and_verify_svn(None, [], 'add', tfile2_path) expected_output = wc.State(wc_dir, {'A/tfile2' : Item(verb='Adding')}) wc_status.add({'A/tfile2' : Item(status=' ', wc_rev=5)}) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - wc_status, None, wc_dir) + wc_status) # Merge r5 from /A to /A_COPY, creating r6 expected_output = wc.State(A_COPY_path, { @@ -6616,17 +6128,17 @@ def avoid_reflected_revs(sbox): expected_disk, expected_status, expected_skip, - None, None, None, None, None, 1, - None, A_COPY_path, + [], True, False, + A_COPY_path, '--allow-mixed-revisions') - svntest.actions.run_and_verify_svn(None, None, [], 'up', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir) expected_output = wc.State(wc_dir, { 'A_COPY' : Item(verb='Sending'), 'A_COPY/tfile2' : Item(verb='Adding'), }) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - None, None, wc_dir) + None) # Merge r3 from /A to /A_COPY, creating r7 expected_output = wc.State(A_COPY_path, { @@ -6656,21 +6168,21 @@ def avoid_reflected_revs(sbox): expected_disk, expected_status, expected_skip, - None, None, None, None, None, 1, - None, A_COPY_path, + [], True, False, + A_COPY_path, '--allow-mixed-revisions') - svntest.actions.run_and_verify_svn(None, None, [], 'up', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir) expected_output = wc.State(wc_dir, { 'A_COPY' : Item(verb='Sending'), 'A_COPY/tfile1' : Item(verb='Adding'), }) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - None, None, wc_dir) + None) # r8 - Add bfile2 to A_COPY svntest.main.file_write(bfile2_path, bfile2_content) - svntest.actions.run_and_verify_svn(None, None, [], 'add', bfile2_path) + svntest.actions.run_and_verify_svn(None, [], 'add', bfile2_path) expected_output = wc.State(wc_dir, {'A_COPY/bfile2' : Item(verb='Adding')}) wc_status.tweak(wc_rev=6) wc_status.add({ @@ -6680,7 +6192,7 @@ def avoid_reflected_revs(sbox): 'A_COPY/tfile1' : Item(status=' ', wc_rev=7), }) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - wc_status, None, wc_dir) + wc_status) # Merge 2:8 from A_COPY(feature branch) to A(trunk). expected_output = wc.State(A_path, { @@ -6753,7 +6265,7 @@ def avoid_reflected_revs(sbox): expected_disk, expected_status, expected_skip, - None, None, None, None, None, 1) + check_props=True) #---------------------------------------------------------------------- @SkipUnless(server_has_mergeinfo) @@ -6772,10 +6284,12 @@ def update_loses_mergeinfo(sbox): A_B_url = sbox.repo_url + '/A/B' A_B_J_url = sbox.repo_url + '/A/B/J' A_B_K_url = sbox.repo_url + '/A/B/K' - svntest.actions.run_and_verify_svn(None, ['\n', 'Committed revision 2.\n'], + svntest.actions.run_and_verify_svn(['Committing transaction...\n', + 'Committed revision 2.\n'], [], 'mkdir', '-m', 'rev 2', A_B_J_url) - svntest.actions.run_and_verify_svn(None, ['\n', 'Committed revision 3.\n'], + svntest.actions.run_and_verify_svn(['Committing transaction...\n', + 'Committed revision 3.\n'], [], 'mkdir', '-m', 'rev 3', A_B_K_url) @@ -6819,9 +6333,7 @@ def update_loses_mergeinfo(sbox): ) svntest.actions.run_and_verify_commit(A_C_wc_dir, expected_output, - expected_status, - None, - A_C_wc_dir) + expected_status) other_A_C_wc_dir = os.path.join(other_wc, 'A', 'C') expected_output = wc.State(other_A_C_wc_dir, {'K' : Item(status='A ')}) @@ -6871,7 +6383,7 @@ def update_loses_mergeinfo(sbox): expected_output, expected_disk, expected_status, - check_props=1) + check_props=True) #---------------------------------------------------------------------- # Tests part of issue# 2829. @@ -6892,10 +6404,12 @@ def merge_loses_mergeinfo(sbox): A_B_url = sbox.repo_url + '/A/B' A_B_J_url = sbox.repo_url + '/A/B/J' A_B_K_url = sbox.repo_url + '/A/B/K' - svntest.actions.run_and_verify_svn(None, ['\n', 'Committed revision 2.\n'], + svntest.actions.run_and_verify_svn(['Committing transaction...\n', + 'Committed revision 2.\n'], [], 'mkdir', '-m', 'rev 2', A_B_J_url) - svntest.actions.run_and_verify_svn(None, ['\n', 'Committed revision 3.\n'], + svntest.actions.run_and_verify_svn(['Committing transaction...\n', + 'Committed revision 3.\n'], [], 'mkdir', '-m', 'rev 3', A_B_K_url) @@ -6936,9 +6450,7 @@ def merge_loses_mergeinfo(sbox): ) svntest.actions.run_and_verify_commit(A_C_wc_dir, expected_output, - expected_status, - None, - A_C_wc_dir) + expected_status) expected_output = wc.State(A_C_wc_dir, {'J' : Item(status='D ')}) expected_elision_output = wc.State(A_C_wc_dir, { '' : Item(status=' U'), @@ -7012,12 +6524,10 @@ def single_file_replace_style_merge_capability(sbox): }) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, wc_dir) + expected_status) # Merge the file mu alone to rev1 - svntest.actions.run_and_verify_svn(None, - expected_merge_output(None, + svntest.actions.run_and_verify_svn(expected_merge_output(None, ['R ' + mu_path + '\n']), [], 'merge', @@ -7074,7 +6584,7 @@ def merge_to_out_of_date_target(sbox): expected_elision_output, expected_disk, expected_status, expected_skip, - None, None, None, None, None, 1) + check_props=True) # Commit merge to first WC. wc_status.tweak('A_COPY/D/H/psi', 'A_COPY/D/H', wc_rev=7) @@ -7084,8 +6594,7 @@ def merge_to_out_of_date_target(sbox): }) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - wc_status, - None, wc_dir) + wc_status) # Merge -c6 into A_COPY/D/H of other WC. expected_output = wc.State(other_A_COPY_H_path, { @@ -7116,7 +6625,7 @@ def merge_to_out_of_date_target(sbox): expected_elision_output, expected_disk, expected_status, expected_skip, - None, None, None, None, None, 1) + check_props=1) # Update A_COPY/D/H in other WC. Local mergeinfo for r6 on A_COPY/D/H # should be *merged* with r3 from first WC. @@ -7140,7 +6649,7 @@ def merge_to_out_of_date_target(sbox): expected_output, other_disk, other_status, - check_props=1) + check_props=True) #---------------------------------------------------------------------- @SkipUnless(server_has_mergeinfo) @@ -7159,7 +6668,7 @@ def merge_with_depth_files(sbox): Acopy_url = sbox.repo_url + '/A_copy' # Copy A_url to A_copy_url - svntest.actions.run_and_verify_svn(None, None, [], 'cp', + svntest.actions.run_and_verify_svn(None, [], 'cp', A_url, Acopy_url, '-m', 'create a new copy of A') @@ -7182,12 +6691,10 @@ def merge_with_depth_files(sbox): # Commit the modified contents svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, - wc_dir) + expected_status) # Update working copy - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'up', Acopy_path) # Merge r1:3 into A_copy with --depth files. The merge only affects @@ -7255,7 +6762,7 @@ def merge_with_depth_files(sbox): expected_elision_output, expected_disk, expected_status, expected_skip, - None, None, None, None, None, 1, 1, + [], True, True, '--depth', 'files', Acopy_path) #---------------------------------------------------------------------- @@ -7292,18 +6799,17 @@ def merge_away_subtrees_noninheritable_ranges(sbox): nu_COPY_path = sbox.ospath('A_COPY/nu') # Make a change to directory A/D/H and commit as r8. - svntest.actions.run_and_verify_svn(None, exp_noop_up_out(7), [], + svntest.actions.run_and_verify_svn(exp_noop_up_out(7), [], 'update', wc_dir) svntest.actions.run_and_verify_svn( - None, ["property 'prop:name' set on '" + H_path + "'\n"], [], + ["property 'prop:name' set on '" + H_path + "'\n"], [], 'ps', 'prop:name', 'propval', H_path) expected_output = svntest.wc.State(wc_dir, { 'A/D/H' : Item(verb='Sending'),}) wc_status.tweak(wc_rev=7) wc_status.tweak('A/D/H', wc_rev=8) - svntest.actions.run_and_verify_commit(wc_dir, expected_output, wc_status, - None, wc_dir) + svntest.actions.run_and_verify_commit(wc_dir, expected_output, wc_status) # Merge r6:8 --depth immediates to A_COPY/D. This should merge the # prop change from r8 to A_COPY/H but not the change to A_COPY/D/H/omega @@ -7352,7 +6858,7 @@ def merge_away_subtrees_noninheritable_ranges(sbox): expected_elision_output, expected_disk, expected_status, expected_skip, - None, None, None, None, None, 1, 1, + [], True, True, '--depth', 'immediates', D_COPY_path) # Repeat the previous merge but at default depth of infinity. The change @@ -7384,36 +6890,35 @@ def merge_away_subtrees_noninheritable_ranges(sbox): expected_elision_output, expected_disk, expected_status, expected_skip, - None, None, None, None, None, 1, 1) + [], True, True) # Now test the problem described in # http://svn.haxx.se/dev/archive-2008-12/0133.shtml. # # First revert all local mods. - svntest.actions.run_and_verify_svn(None, None, [], 'revert', '-R', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'revert', '-R', wc_dir) # r9: Merge all available revisions from A to A_COPY at a depth of empty # this will create non-inheritable mergeinfo on A_COPY. - svntest.actions.run_and_verify_svn(None, None, [], 'up', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir) wc_status.tweak(wc_rev=8) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'merge', '--depth', 'empty', sbox.repo_url + '/A', A_COPY_path) wc_status.tweak('A_COPY', wc_rev=9) expected_output = wc.State(wc_dir, {'A_COPY' : Item(verb='Sending')}) - svntest.actions.run_and_verify_commit(wc_dir, expected_output, - wc_status, None, wc_dir) + svntest.actions.run_and_verify_commit(wc_dir, expected_output, wc_status) # r10: Add the file A/nu. svntest.main.file_write(nu_path, "This is the file 'nu'.\n") - svntest.actions.run_and_verify_svn(None, None, [], 'add', nu_path) + svntest.actions.run_and_verify_svn(None, [], 'add', nu_path) expected_output = wc.State(wc_dir, {'A/nu' : Item(verb='Adding')}) wc_status.add({'A/nu' : Item(status=' ', wc_rev=10)}) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - wc_status, None, wc_dir) + wc_status) # Now merge -c10 from A to A_COPY. - svntest.actions.run_and_verify_svn(None, None, [], 'up', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir) expected_output = wc.State('', { 'nu': Item(status='A '), }) @@ -7481,8 +6986,7 @@ def merge_away_subtrees_noninheritable_ranges(sbox): expected_disk, expected_status, expected_skip, - None, None, None, None, - None, 1) + check_props=True) os.chdir(saved_cwd) # If a merge target has inheritable and non-inheritable ranges and has a @@ -7492,21 +6996,20 @@ def merge_away_subtrees_noninheritable_ranges(sbox): # *and* the mergeinfo inherited from it's parent. # # First revert all local changes and remove A_COPY/C/nu from disk. - svntest.actions.run_and_verify_svn(None, None, [], 'revert', '-R', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'revert', '-R', wc_dir) # Make a text change to A_COPY_2/mu in r11 and then merge that # change to A/mu in r12. This will create mergeinfo of '/A_COPY_2/mu:11' # on A/mu. svntest.main.file_write(mu_2_path, 'new content') - svntest.actions.run_and_verify_svn(None, None, [], 'ci', '-m', 'log msg', + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', 'log msg', wc_dir) svntest.actions.run_and_verify_svn( - None, expected_merge_output([[11]], ['U ' + mu_path + '\n', ' U ' + mu_path + '\n']), [], 'merge', '-c11', sbox.repo_url + '/A_COPY_2/mu', mu_path) - svntest.actions.run_and_verify_svn(None, None, [], 'ci', '-m', 'log msg', + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', 'log msg', wc_dir) # Now merge r12 from A to A_COPY. A_COPY/mu should get the mergeinfo from @@ -7579,16 +7082,14 @@ def merge_away_subtrees_noninheritable_ranges(sbox): expected_disk, expected_status, expected_skip, - None, None, None, None, - None, 1, - False) # No dry-run. + [], True, False) os.chdir(saved_cwd) # Test for issue #3392 # # Revert local changes and update. - svntest.actions.run_and_verify_svn(None, None, [], 'revert', '-R', wc_dir) - svntest.actions.run_and_verify_svn(None, None, [], 'up', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'revert', '-R', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir) # Merge r8 from A/D/H to A_COPY_D/H at depth empty. Since r8 affects only # A_COPY/D/H itself, the resulting mergeinfo is inheritable. Commit this @@ -7622,11 +7123,11 @@ def merge_away_subtrees_noninheritable_ranges(sbox): expected_elision_output, expected_disk, expected_status, expected_skip, - None, None, None, None, None, 1, 1, + [], True, True, '--depth', 'empty', H_COPY_2_path) - svntest.actions.run_and_verify_svn(None, None, [], 'commit', '-m', + svntest.actions.run_and_verify_svn(None, [], 'commit', '-m', 'log msg', wc_dir) - svntest.actions.run_and_verify_svn(None, None, [], 'up', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir) # Now reverse the prior merge. Issue #3392 manifests itself here with # a mergeinfo parsing error: # >svn merge %url%/A/D/H merge_tests-62\A_COPY_2\D\H -c-8 @@ -7651,29 +7152,27 @@ def merge_away_subtrees_noninheritable_ranges(sbox): expected_elision_output, expected_disk, expected_status, expected_skip, - None, None, None, None, None, 1) + check_props=True) # Test issue #3407 'Shallow merges incorrectly set mergeinfo on children'. # # Revert all local mods. - svntest.actions.run_and_verify_svn(None, None, [], 'revert', '-R', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'revert', '-R', wc_dir) # Merge all available changes from A to A_COPY at --depth empty. Only the # mergeinfo on A_COPY should be affected. svntest.actions.run_and_verify_svn( - None, expected_merge_output([[9,13]], [' U ' + A_COPY_path + '\n']), [], 'merge', '--depth', 'empty', sbox.repo_url + '/A', A_COPY_path) - svntest.actions.run_and_verify_svn(None, - [A_COPY_path + ' - /A:2-13*\n'], + svntest.actions.run_and_verify_svn([A_COPY_path + ' - /A:2-13*\n'], [], 'pg', SVN_PROP_MERGEINFO, '-R', A_COPY_path) # Merge all available changes from A to A_COPY at --depth files. Only the # mergeinfo on A_COPY and its file children should be affected. - svntest.actions.run_and_verify_svn(None, None, [], 'revert', '-R', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'revert', '-R', wc_dir) # Revisions 2-13 are already merged to A_COPY and now they will be merged # to A_COPY's file children. Due to the way we drive the merge editor # r2-3, which are inoperative on A_COPY's file children, do not show up @@ -7687,15 +7186,14 @@ def merge_away_subtrees_noninheritable_ranges(sbox): ' U %s\n' % (A_COPY_path), ' G %s\n' % (mu_COPY_path), ' U %s\n' % (nu_COPY_path),]) - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'merge', '--depth', 'files', sbox.repo_url + '/A', A_COPY_path) expected_output = svntest.verify.UnorderedOutput( [A_COPY_path + ' - /A:2-13*\n', mu_COPY_path + ' - /A/mu:2-13\n', nu_COPY_path + ' - /A/nu:10-13\n',]) - svntest.actions.run_and_verify_svn(None, - expected_output, + svntest.actions.run_and_verify_svn(expected_output, [], 'pg', SVN_PROP_MERGEINFO, '-R', A_COPY_path) @@ -7729,14 +7227,13 @@ def merge_to_sparse_directories(sbox): expected_output = wc.State(wc_dir, {'A/mu' : Item(verb='Sending')}) wc_status.tweak('A/mu', wc_rev=7) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - wc_status, None, wc_dir) + wc_status) wc_disk.tweak('A/mu', contents="New content") # r8 - Add a prop to A/D and commit. - svntest.actions.run_and_verify_svn(None, exp_noop_up_out(7), [], + svntest.actions.run_and_verify_svn(exp_noop_up_out(7), [], 'up', wc_dir) - svntest.actions.run_and_verify_svn(None, - ["property 'prop:name' set on '" + + svntest.actions.run_and_verify_svn(["property 'prop:name' set on '" + D_path + "'\n"], [], 'ps', 'prop:name', 'propval', D_path) expected_output = svntest.wc.State(wc_dir, { @@ -7744,14 +7241,12 @@ def merge_to_sparse_directories(sbox): }) wc_status.tweak(wc_rev=7) wc_status.tweak('A/D', wc_rev=8) - svntest.actions.run_and_verify_commit(wc_dir, expected_output, wc_status, - None, wc_dir) + svntest.actions.run_and_verify_commit(wc_dir, expected_output, wc_status) # r9 - Add a prop to A and commit. - svntest.actions.run_and_verify_svn(None, exp_noop_up_out(8), [], + svntest.actions.run_and_verify_svn(exp_noop_up_out(8), [], 'up', wc_dir) - svntest.actions.run_and_verify_svn(None, - ["property 'prop:name' set on '" + + svntest.actions.run_and_verify_svn(["property 'prop:name' set on '" + A_path + "'\n"], [], 'ps', 'prop:name', 'propval', A_path) expected_output = svntest.wc.State(wc_dir, { @@ -7759,8 +7254,7 @@ def merge_to_sparse_directories(sbox): }) wc_status.tweak(wc_rev=8) wc_status.tweak('A', wc_rev=9) - svntest.actions.run_and_verify_commit(wc_dir, expected_output, wc_status, - None, wc_dir) + svntest.actions.run_and_verify_commit(wc_dir, expected_output, wc_status) # Do an --immediates checkout of A_COPY immediates_dir = sbox.add_wc_path('immediates') @@ -7779,7 +7273,7 @@ def merge_to_sparse_directories(sbox): svntest.actions.run_and_verify_checkout(sbox.repo_url + "/A_COPY", immediates_dir, expected_output, expected_disk, - None, None, None, None, + [], "--depth", "immediates") # Merge r4:9 into the immediates WC. @@ -7836,8 +7330,7 @@ def merge_to_sparse_directories(sbox): expected_disk, expected_status, expected_skip, - None, None, None, None, - None, 1) + check_props=True) # Do a --files checkout of A_COPY files_dir = sbox.add_wc_path('files') @@ -7850,7 +7343,7 @@ def merge_to_sparse_directories(sbox): svntest.actions.run_and_verify_checkout(sbox.repo_url + "/A_COPY", files_dir, expected_output, expected_disk, - None, None, None, None, + [], "--depth", "files") # Merge r4:9 into the files WC. @@ -7893,8 +7386,7 @@ def merge_to_sparse_directories(sbox): expected_disk, expected_status, expected_skip, - None, None, None, None, - None, 1) + check_props=True) # Do an --empty checkout of A_COPY empty_dir = sbox.add_wc_path('empty') @@ -7903,7 +7395,7 @@ def merge_to_sparse_directories(sbox): svntest.actions.run_and_verify_checkout(sbox.repo_url + "/A_COPY", empty_dir, expected_output, expected_disk, - None, None, None, None, + [], "--depth", "empty") # Merge r4:9 into the empty WC. @@ -7940,16 +7432,15 @@ def merge_to_sparse_directories(sbox): expected_disk, expected_status, expected_skip, - None, None, None, None, - None, 1) + check_props=True) # Check that default depth for merge is infinity. # # Revert the previous changes to the immediates WC and update one # child in that WC to depth infinity. - svntest.actions.run_and_verify_svn(None, None, [], 'revert', '-R', + svntest.actions.run_and_verify_svn(None, [], 'revert', '-R', immediates_dir) - svntest.actions.run_and_verify_svn(None, None, [], 'up', '--set-depth', + svntest.actions.run_and_verify_svn(None, [], 'up', '--set-depth', 'infinity', os.path.join(immediates_dir, 'D')) # Now merge r6 into the immediates WC, even though the root of the @@ -8004,8 +7495,7 @@ def merge_to_sparse_directories(sbox): expected_disk, expected_status, expected_skip, - None, None, None, None, - None, 1) + check_props=True) #---------------------------------------------------------------------- @SkipUnless(server_has_mergeinfo) @@ -8032,16 +7522,16 @@ def merge_old_and_new_revs_from_renamed_dir(sbox): svntest.main.file_write(mu_path, "This is the file 'mu' modified.\n") expected_output = wc.State(wc_dir, {'A/mu' : Item(verb='Sending')}) wc_status.add({'A/mu' : Item(status=' ', wc_rev=3)}) - svntest.actions.run_and_verify_commit(wc_dir, expected_output, - wc_status, None, wc_dir) + svntest.actions.run_and_verify_commit(wc_dir, expected_output, wc_status) # Move A to A_MOVED - svntest.actions.run_and_verify_svn(None, ['\n', 'Committed revision 4.\n'], + svntest.actions.run_and_verify_svn(['Committing transaction...\n', + 'Committed revision 4.\n'], [], 'mv', '-m', 'mv A to A_MOVED', A_url, A_MOVED_url) # Update the working copy to get A_MOVED - svntest.actions.run_and_verify_svn(None, None, [], 'up', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir) # Make a modification to A_MOVED/mu svntest.main.file_write(A_MOVED_mu_path, "This is 'mu' in A_MOVED.\n") @@ -8095,7 +7585,7 @@ def merge_old_and_new_revs_from_renamed_dir(sbox): 'A_COPY/D/H/psi' : Item(status=' ', wc_rev=4), }) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) # Merge /A_MOVED to /A_COPY - this happens in multiple passes # because /A_MOVED has renames in its history between the boundaries @@ -8162,8 +7652,7 @@ def merge_old_and_new_revs_from_renamed_dir(sbox): expected_disk, expected_status, expected_skip, - None, None, None, None, None, - True, False) + [], True, False) #---------------------------------------------------------------------- @SkipUnless(server_has_mergeinfo) @@ -8205,11 +7694,11 @@ def merge_with_child_having_different_rev_ranges_to_merge(sbox): expected_status = svntest.actions.get_virginal_state(wc_dir, 1) expected_status.tweak('A/mu', wc_rev=2) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) - svntest.actions.run_and_verify_svn(None, None, [], + expected_status) + svntest.actions.run_and_verify_svn(None, [], 'cp', A_url, A_COPY_url, '-m', 'rev 3') # Update the working copy to get A_COPY - svntest.actions.run_and_verify_svn(None, None, [], 'up', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir) expected_status.add({'A_COPY' : Item(status=' '), 'A_COPY/mu' : Item(status=' '), 'A_COPY/C' : Item(status=' '), @@ -8234,8 +7723,8 @@ def merge_with_child_having_different_rev_ranges_to_merge(sbox): svntest.main.file_write(mu_path, tweaked_7th_line) expected_status.tweak('A/mu', wc_rev=4) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) - svntest.actions.run_and_verify_svn(None, None, [], 'up', wc_dir) + expected_status) + svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir) expected_status.tweak(wc_rev=4) tweaked_17th_line = tweaked_7th_line.replace('line17', 'LINE 17') svntest.main.file_write(mu_path, tweaked_17th_line) @@ -8249,16 +7738,15 @@ def merge_with_child_having_different_rev_ranges_to_merge(sbox): expected_status.tweak('A', wc_rev=5) expected_status.tweak('A/mu', wc_rev=5) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) tweaked_27th_line = tweaked_17th_line.replace('line27', 'LINE 27') svntest.main.file_write(mu_path, tweaked_27th_line) expected_status.tweak('A/mu', wc_rev=6) expected_output = wc.State(wc_dir, {'A/mu' : Item(verb='Sending')}) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) # Merge r5 to A_COPY/mu svntest.actions.run_and_verify_svn( - None, expected_merge_output([[5]], ['U ' + A_COPY_mu_path + '\n', ' U ' + A_COPY_mu_path + '\n']), @@ -8327,10 +7815,9 @@ def merge_with_child_having_different_rev_ranges_to_merge(sbox): expected_disk, expected_status, expected_skip, - None, None, None, None, None, 1) + check_props=True) # Revert r5 and r6 on A_COPY/mu svntest.actions.run_and_verify_svn( - None, expected_merge_output([[6,5]], ['G ' + A_COPY_mu_path + '\n', ' G ' + A_COPY_mu_path + '\n']), @@ -8361,7 +7848,7 @@ def merge_with_child_having_different_rev_ranges_to_merge(sbox): expected_disk, expected_status, expected_skip, - None, None, None, None, None, 1) + check_props=True) expected_disk.add({'' : Item(props={SVN_PROP_MERGEINFO : '/A:4-6', 'prop1' : 'val1'})}) @@ -8385,10 +7872,9 @@ def merge_with_child_having_different_rev_ranges_to_merge(sbox): expected_disk, expected_status, expected_skip, - None, None, None, None, None, 1) + check_props=True) #Revert r5 on A_COPY/mu svntest.actions.run_and_verify_svn( - None, expected_merge_output([[-5]], ['G ' + A_COPY_mu_path + '\n', ' G ' + A_COPY_mu_path + '\n']), @@ -8422,7 +7908,7 @@ def merge_with_child_having_different_rev_ranges_to_merge(sbox): expected_disk, expected_status, expected_skip, - None, None, None, None, None, 1) + check_props=True) #---------------------------------------------------------------------- @SkipUnless(server_has_mergeinfo) @@ -8444,7 +7930,8 @@ def merge_old_and_new_revs_from_renamed_file(sbox): mu_MOVED_path = sbox.ospath('A/mu_MOVED') # Copy mu to mu_COPY - svntest.actions.run_and_verify_svn(None, ['\n', 'Committed revision 2.\n'], + svntest.actions.run_and_verify_svn(['Committing transaction...\n', + 'Committed revision 2.\n'], [], 'cp', '-m', 'cp mu to mu_COPY', mu_url, mu_COPY_url) @@ -8454,15 +7941,16 @@ def merge_old_and_new_revs_from_renamed_file(sbox): expected_status = svntest.actions.get_virginal_state(wc_dir, 1) expected_status.tweak('A/mu', wc_rev=3) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) # Move mu to mu_MOVED - svntest.actions.run_and_verify_svn(None, ['\n', 'Committed revision 4.\n'], + svntest.actions.run_and_verify_svn(['Committing transaction...\n', + 'Committed revision 4.\n'], [], 'mv', '-m', 'mv mu to mu_MOVED', mu_url, mu_MOVED_url) # Update the working copy to get mu_MOVED - svntest.actions.run_and_verify_svn(None, None, [], 'up', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir) # Make a modification to mu_MOVED svntest.main.file_write(mu_MOVED_path, "This is 'mu' in mu_MOVED.\n") @@ -8474,7 +7962,7 @@ def merge_old_and_new_revs_from_renamed_file(sbox): 'A/mu_COPY' : Item(status=' ', wc_rev=4), }) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) # Merge A/mu_MOVED to A/mu_COPY - this happens in multiple passes # because A/mu_MOVED has renames in its history between the @@ -8484,11 +7972,11 @@ def merge_old_and_new_revs_from_renamed_file(sbox): ' U %s\n' % (mu_COPY_path), 'G %s\n' % (mu_COPY_path), ' G %s\n' % (mu_COPY_path),]) - svntest.actions.run_and_verify_svn(None, expected_output, + svntest.actions.run_and_verify_svn(expected_output, [], 'merge', '-r', '1:5', mu_MOVED_url, mu_COPY_path) - svntest.actions.run_and_verify_svn(None, ['/A/mu:2-3\n', + svntest.actions.run_and_verify_svn(['/A/mu:2-3\n', '/A/mu_MOVED:4-5\n'], [], 'propget', SVN_PROP_MERGEINFO, mu_COPY_path) @@ -8512,13 +8000,13 @@ def merge_with_auto_rev_range_detection(sbox): A_COPY_path = sbox.ospath('A_COPY') # Create B1 inside A - svntest.actions.run_and_verify_svn(None, ["A " + B1_path + "\n"], + svntest.actions.run_and_verify_svn(["A " + B1_path + "\n"], [], 'mkdir', B1_path) # Add a file mu inside B1 svntest.main.file_write(B1_mu_path, "This is the file 'mu'.\n") - svntest.actions.run_and_verify_svn(None, ["A " + B1_mu_path + "\n"], + svntest.actions.run_and_verify_svn(["A " + B1_mu_path + "\n"], [], 'add', B1_mu_path) # Commit B1 and B1/mu @@ -8532,10 +8020,11 @@ def merge_with_auto_rev_range_detection(sbox): 'A/B1/mu' : Item(status=' ', wc_rev=2), }) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) # Copy A to A_COPY - svntest.actions.run_and_verify_svn(None, ['\n', 'Committed revision 3.\n'], + svntest.actions.run_and_verify_svn(['Committing transaction...\n', + 'Committed revision 3.\n'], [], 'cp', '-m', 'cp A to A_COPY', A_url, A_COPY_url) @@ -8548,10 +8037,10 @@ def merge_with_auto_rev_range_detection(sbox): 'A/B1/mu' : Item(status=' ', wc_rev=4), }) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) # Update the working copy to get A_COPY - svntest.actions.run_and_verify_svn(None, None, [], 'up', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir) # Merge /A to /A_COPY expected_output = wc.State(A_COPY_path, { @@ -8617,8 +8106,7 @@ def merge_with_auto_rev_range_detection(sbox): expected_disk, expected_status, expected_skip, - None, None, None, None, None, - 1, 1) + [], True, True) #---------------------------------------------------------------------- # Test for issue 2818: Provide a 'merge' API which allows for merging of @@ -8648,28 +8136,24 @@ def cherry_picking(sbox): wc_status.tweak(wc_rev='6') svntest.actions.run_and_verify_update(wc_dir, expected_output, wc_disk, wc_status, - None, None, None, None, None, True) + check_props=True) # Make some prop changes to some dirs. - svntest.actions.run_and_verify_svn(None, - ["property 'prop:name' set on '" + + svntest.actions.run_and_verify_svn(["property 'prop:name' set on '" + G_path + "'\n"], [], 'ps', 'prop:name', 'propval', G_path) expected_output = svntest.wc.State(wc_dir, {'A/D/G': Item(verb='Sending'),}) wc_status.tweak('A/D/G', wc_rev=7) wc_disk.tweak('A/D/G', props={'prop:name' : 'propval'}) - svntest.actions.run_and_verify_commit(wc_dir, expected_output, wc_status, - None, wc_dir) - svntest.actions.run_and_verify_svn(None, - ["property 'prop:name' set on '" + + svntest.actions.run_and_verify_commit(wc_dir, expected_output, wc_status) + svntest.actions.run_and_verify_svn(["property 'prop:name' set on '" + H_path + "'\n"], [], 'ps', 'prop:name', 'propval', H_path) expected_output = svntest.wc.State(wc_dir, {'A/D/H': Item(verb='Sending'),}) wc_status.tweak('A/D/H', wc_rev=8) wc_disk.tweak('A/D/H', props={'prop:name' : 'propval'}) - svntest.actions.run_and_verify_commit(wc_dir, expected_output, wc_status, - None, wc_dir) + svntest.actions.run_and_verify_commit(wc_dir, expected_output, wc_status) # Do multiple additive merges to a file" # Merge -r2:4 -c6 into A_COPY/D/G/rho. @@ -8678,7 +8162,6 @@ def cherry_picking(sbox): # so use run_and_verify_svn. ### TODO: We can use run_and_verify_merge() here now. svntest.actions.run_and_verify_svn( - None, expected_merge_output([[3,4],[6]], ['U ' + rho_COPY_path + '\n', ' U ' + rho_COPY_path + '\n', @@ -8690,7 +8173,7 @@ def cherry_picking(sbox): expected_status = wc.State(rho_COPY_path, {'' : Item(status='MM', wc_rev=6)}) svntest.actions.run_and_verify_status(rho_COPY_path, expected_status) - svntest.actions.run_and_verify_svn(None, ["/A/D/G/rho:3-4,6\n"], [], + svntest.actions.run_and_verify_svn(["/A/D/G/rho:3-4,6\n"], [], 'propget', SVN_PROP_MERGEINFO, rho_COPY_path) @@ -8701,7 +8184,7 @@ def cherry_picking(sbox): ['U ' + omega_COPY_path + '\n', ' U ' + H_COPY_path + '\n', ' G ' + H_COPY_path + '\n',]) - svntest.actions.run_and_verify_svn(None, expected_output, + svntest.actions.run_and_verify_svn(expected_output, [], 'merge', '-c6', '-c8', sbox.repo_url + '/A/D/H', H_COPY_path) @@ -8713,8 +8196,7 @@ def cherry_picking(sbox): 'chi' : Item(status=' ', wc_rev=6), 'omega': Item(status='M ', wc_rev=6),}) svntest.actions.run_and_verify_status(H_COPY_path, expected_status) - svntest.actions.run_and_verify_svn(None, - [H_COPY_path + " - /A/D/H:6,8\n"], + svntest.actions.run_and_verify_svn([H_COPY_path + " - /A/D/H:6,8\n"], [], 'propget', '-R', SVN_PROP_MERGEINFO, H_COPY_path) @@ -8728,7 +8210,7 @@ def cherry_picking(sbox): ' G ' + A_COPY_path + '\n', ' G ' + H_COPY_path + '\n',], elides=True) - svntest.actions.run_and_verify_svn(None, expected_output, + svntest.actions.run_and_verify_svn(expected_output, [], 'merge', '-c-3', '-c-6', sbox.repo_url + '/A', A_COPY_path) @@ -8760,7 +8242,7 @@ def cherry_picking(sbox): # Construct proper regex for '\' infested Windows paths. if sys.platform == 'win32': expected_out = expected_out.replace("\\", "\\\\") - svntest.actions.run_and_verify_svn(None, expected_out, [], + svntest.actions.run_and_verify_svn(expected_out, [], 'propget', '-R', SVN_PROP_MERGEINFO, A_COPY_path) @@ -8779,7 +8261,7 @@ def cherry_picking(sbox): ' U ' + rho_COPY_path + '\n', ' G ' + rho_COPY_path + '\n'], elides=True) - svntest.actions.run_and_verify_svn(None, expected_output, [], 'merge', + svntest.actions.run_and_verify_svn(expected_output, [], 'merge', '-r2:3', '-c-4', '-r4:7', sbox.repo_url + '/A/D', D_COPY_path) @@ -8801,7 +8283,7 @@ def cherry_picking(sbox): # Construct proper regex for '\' infested Windows paths. if sys.platform == 'win32': expected_out = expected_out.replace("\\", "\\\\") - svntest.actions.run_and_verify_svn(None, expected_out, [], + svntest.actions.run_and_verify_svn(expected_out, [], 'propget', '-R', SVN_PROP_MERGEINFO, D_COPY_path) @@ -8839,7 +8321,7 @@ def propchange_of_subdir_raises_conflict(sbox): 'A/B/E' : Item(status=' ', wc_rev=3), }) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - wc_status, None, wc_dir) + wc_status) # Merge /A/B to /A_COPY/B ie., r1 to r3 with depth files expected_output = wc.State(A_COPY_B_path, { @@ -8878,8 +8360,8 @@ def propchange_of_subdir_raises_conflict(sbox): expected_disk, expected_status, expected_skip, - None, None, None, None, None, - 1, 1, '--depth', 'files', + [], True, True, + '--depth', 'files', A_COPY_B_path) # Merge /A/B to /A_COPY/B ie., r1 to r3 with infinite depth @@ -8918,8 +8400,7 @@ def propchange_of_subdir_raises_conflict(sbox): expected_disk, expected_status, expected_skip, - None, None, None, None, None, - 1, 1) + [], 1, 1) #---------------------------------------------------------------------- # Test for issue #2971: Reverse merge of prop add segfaults if @@ -8939,16 +8420,14 @@ def reverse_merge_prop_add_on_child(sbox): G_COPY_path = sbox.ospath('A_COPY/D/G') # Make some prop changes to some dirs. - svntest.actions.run_and_verify_svn(None, - ["property 'prop:name' set on '" + + svntest.actions.run_and_verify_svn(["property 'prop:name' set on '" + G_path + "'\n"], [], 'ps', 'prop:name', 'propval', G_path) expected_output = svntest.wc.State(wc_dir, {'A/D/G': Item(verb='Sending'),}) wc_status.tweak('A/D/G', wc_rev=3) wc_disk.tweak('A/D/G', props={'prop:name' : 'propval'}) - svntest.actions.run_and_verify_commit(wc_dir, expected_output, wc_status, - None, wc_dir) + svntest.actions.run_and_verify_commit(wc_dir, expected_output, wc_status) # Merge -c3's prop add to A_COPY/D/G expected_output = wc.State(G_COPY_path, { @@ -8981,8 +8460,7 @@ def reverse_merge_prop_add_on_child(sbox): expected_disk, expected_status, expected_skip, - None, None, None, None, - None, 1) + check_props=True) # Now merge -c-3 but target the previous target's parent instead. expected_output = wc.State(D_COPY_path, { @@ -9028,8 +8506,7 @@ def reverse_merge_prop_add_on_child(sbox): expected_disk, expected_status, expected_skip, - None, None, None, None, - None, 1) + check_props=True) #---------------------------------------------------------------------- @XFail() @@ -9053,7 +8530,7 @@ def merge_target_with_non_inheritable_mergeinfo(sbox): # Make a modifications to A/B/lambda and add A/B/E/newfile svntest.main.file_write(lambda_path, "This is the file 'lambda' modified.\n") svntest.main.file_write(newfile_path, "This is the file 'newfile'.\n") - svntest.actions.run_and_verify_svn(None, None, [], 'add', newfile_path) + svntest.actions.run_and_verify_svn(None, [], 'add', newfile_path) expected_output = wc.State(wc_dir, { 'A/B/lambda' : Item(verb='Sending'), 'A/B/E/newfile' : Item(verb='Adding'), @@ -9063,7 +8540,7 @@ def merge_target_with_non_inheritable_mergeinfo(sbox): 'A/B/E/newfile' : Item(status=' ', wc_rev=3), }) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - wc_status, None, wc_dir) + wc_status) # Merge /A/B to /A_COPY/B ie., r1 to r3 with depth immediates expected_output = wc.State(A_COPY_B_path, { @@ -9106,8 +8583,8 @@ def merge_target_with_non_inheritable_mergeinfo(sbox): expected_disk, expected_status, expected_skip, - None, None, None, None, None, - 1, 1, '--depth', 'immediates', + [], True, True, + '--depth', 'immediates', A_COPY_B_path) # Merge /A/B to /A_COPY/B ie., r1 to r3 with infinite depth @@ -9146,8 +8623,7 @@ def merge_target_with_non_inheritable_mergeinfo(sbox): expected_disk, expected_status, expected_skip, - None, None, None, None, None, - 1, 1) + [], True, True) #---------------------------------------------------------------------- @SkipUnless(server_has_mergeinfo) @@ -9173,13 +8649,11 @@ def self_reverse_merge(sbox): svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, - wc_dir) + expected_status) # update to HEAD so that the to-be-undone revision is found in the # implicit mergeinfo (the natural history) of the target. - svntest.actions.run_and_verify_svn(None, None, [], 'update', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'update', wc_dir) expected_output = wc.State(wc_dir, { 'A/mu' : Item(status='U ') @@ -9200,12 +8674,12 @@ def self_reverse_merge(sbox): expected_elision_output, expected_disk, expected_status, expected_skip, - None, None, None, None, None, 1, 1) - svntest.actions.run_and_verify_svn(None, None, [], 'revert', '-R', wc_dir) + [], True, True) + svntest.actions.run_and_verify_svn(None, [], 'revert', '-R', wc_dir) # record dummy self mergeinfo to test the fact that self-reversal should work # irrespective of mergeinfo. - svntest.actions.run_and_verify_svn(None, None, [], 'ps', SVN_PROP_MERGEINFO, + svntest.actions.run_and_verify_svn(None, [], 'ps', SVN_PROP_MERGEINFO, '/:1', wc_dir) # Bad svntest.main.greek_state does not have '', so adding it explicitly. @@ -9224,7 +8698,7 @@ def self_reverse_merge(sbox): expected_elision_output, expected_disk, expected_status, expected_skip, - None, None, None, None, None, 1, 1) + [], True, True) #---------------------------------------------------------------------- @SkipUnless(server_has_mergeinfo) @@ -9251,9 +8725,9 @@ def ignore_ancestry_and_mergeinfo(sbox): 'A/B/lambda' : Item(status=' ', wc_rev=3), }) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - wc_status, None, wc_dir) + wc_status) - svntest.actions.run_and_verify_svn(None, None, [], 'up', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir) # Merge /A/B to /A_COPY/B ie., r1 to r3 with depth immediates expected_output = wc.State(A_COPY_B_path, { @@ -9290,10 +8764,10 @@ def ignore_ancestry_and_mergeinfo(sbox): expected_disk, expected_status, expected_skip, - None, None, None, None, None, 1, 1) + [], True, True) # Now, revert lambda and repeat the merge. Nothing should happen. - svntest.actions.run_and_verify_svn(None, None, [], 'revert', '-R', + svntest.actions.run_and_verify_svn(None, [], 'revert', '-R', A_COPY_lambda_path) expected_output.remove('lambda') expected_disk.tweak('lambda', contents="This is the file 'lambda'.\n") @@ -9309,7 +8783,7 @@ def ignore_ancestry_and_mergeinfo(sbox): expected_disk, expected_status, expected_skip, - None, None, None, None, None, 1, 1) + [], True, True) # Now, try the merge again with --ignore-ancestry. We should get # lambda re-modified. */ @@ -9330,7 +8804,7 @@ def ignore_ancestry_and_mergeinfo(sbox): expected_disk, expected_status, expected_skip, - None, None, None, None, None, 1, 1, + [], True, True, '--ignore-ancestry', A_COPY_B_path) #---------------------------------------------------------------------- @@ -9361,7 +8835,7 @@ def merge_from_renamed_branch_fails_while_avoiding_repeat_merge(sbox): svntest.main.run_svn(None, 'cp', A_C_url, A_COPY_C_url, '-m', 'copy...') svntest.main.run_svn(None, 'mv', A_COPY_C_url, A_RENAMED_C_url, '-m', 'rename...') - svntest.actions.run_and_verify_svn(None, None, [], 'up', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir) svntest.main.file_write(A_RENAMED_C_file1_path, "This is the file1.\n") svntest.main.run_svn(None, 'add', A_RENAMED_C_file1_path) @@ -9373,8 +8847,7 @@ def merge_from_renamed_branch_fails_while_avoiding_repeat_merge(sbox): 'file1' : Item(status=' ', wc_rev=4), }) svntest.actions.run_and_verify_commit(A_RENAMED_C_path, expected_output, - expected_status, None, - A_RENAMED_C_path) + expected_status) svntest.main.file_write(A_RENAMED_C_file1_path, "This is the file1 modified.\n") expected_output = wc.State(A_RENAMED_C_path, { @@ -9382,8 +8855,7 @@ def merge_from_renamed_branch_fails_while_avoiding_repeat_merge(sbox): }) expected_status.tweak('file1', wc_rev=5) svntest.actions.run_and_verify_commit(A_RENAMED_C_path, expected_output, - expected_status, None, - A_RENAMED_C_path) + expected_status) expected_skip = wc.State(A_C_path, {}) expected_output = wc.State(A_C_path, { @@ -9410,7 +8882,7 @@ def merge_from_renamed_branch_fails_while_avoiding_repeat_merge(sbox): expected_disk, expected_status, expected_skip, - None, None, None, None, None, 1, 1) + [], True, True) expected_output = wc.State(A_C_path, { 'file1' : Item(status='U '), @@ -9434,7 +8906,7 @@ def merge_from_renamed_branch_fails_while_avoiding_repeat_merge(sbox): expected_disk, expected_status, expected_skip, - None, None, None, None, None, 1, 1) + [], True, True) #---------------------------------------------------------------------- # Test for part of issue #2877: 'do subtree merge only if subtree has @@ -9455,7 +8927,8 @@ def merge_source_normalization_and_subtree_merges(sbox): wc_disk, wc_status = set_up_branch(sbox) # r7 - Move A to A_MOVED - svntest.actions.run_and_verify_svn(None, ['\n', 'Committed revision 7.\n'], + svntest.actions.run_and_verify_svn(['Committing transaction...\n', + 'Committed revision 7.\n'], [], 'mv', '-m', 'mv A to A_MOVED', sbox.repo_url + '/A', sbox.repo_url + '/A_MOVED') @@ -9487,7 +8960,7 @@ def merge_source_normalization_and_subtree_merges(sbox): wc_status.tweak(status=' ', wc_rev=7) # Update the WC - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'update', wc_dir) # r8 - Make a text mod to 'A_MOVED/D/G/tau' @@ -9496,8 +8969,7 @@ def merge_source_normalization_and_subtree_merges(sbox): expected_output = wc.State(wc_dir, {'A_MOVED/D/G/tau' : Item(verb='Sending')}) wc_status.tweak('A_MOVED/D/G/tau', status=' ', wc_rev=8) - svntest.actions.run_and_verify_commit(wc_dir, expected_output, - wc_status, None, wc_dir) + svntest.actions.run_and_verify_commit(wc_dir, expected_output, wc_status) # Merge -c4 URL/A_MOVED/D/G A_COPY/D/G. # @@ -9535,8 +9007,7 @@ def merge_source_normalization_and_subtree_merges(sbox): expected_disk, expected_status, expected_skip, - None, None, None, None, - None, 1) + check_props=True) # Merge -c8 URL/A_MOVED/D A_COPY/D. # @@ -9587,8 +9058,7 @@ def merge_source_normalization_and_subtree_merges(sbox): expected_disk, expected_status, expected_skip, - None, None, None, None, - None, 1) + check_props=True) #---------------------------------------------------------------------- # Tests for issue #3067: 'subtrees with intersecting mergeinfo, that don't @@ -9612,16 +9082,14 @@ def new_subtrees_should_not_break_merge(sbox): # Create 'A/D/H/nu', commit it as r7, make a text mod to it in r8. svntest.main.file_write(nu_path, "This is the file 'nu'.\n") - svntest.actions.run_and_verify_svn(None, None, [], 'add', nu_path) + svntest.actions.run_and_verify_svn(None, [], 'add', nu_path) expected_output = wc.State(wc_dir, {'A/D/H/nu' : Item(verb='Adding')}) wc_status.add({'A/D/H/nu' : Item(status=' ', wc_rev=7)}) - svntest.actions.run_and_verify_commit(wc_dir, expected_output, - wc_status, None, wc_dir) + svntest.actions.run_and_verify_commit(wc_dir, expected_output, wc_status) svntest.main.file_write(nu_path, "New content") expected_output = wc.State(wc_dir, {'A/D/H/nu' : Item(verb='Sending')}) wc_status.tweak('A/D/H/nu', wc_rev=8) - svntest.actions.run_and_verify_commit(wc_dir, expected_output, - wc_status, None, wc_dir) + svntest.actions.run_and_verify_commit(wc_dir, expected_output, wc_status) # Merge r7 to A_COPY/D/H, then, so it has it's own explicit mergeinfo, # then merge r8 to A_COPY/D/H/nu so it too has explicit mergeinfo. @@ -9655,12 +9123,11 @@ def new_subtrees_should_not_break_merge(sbox): expected_elision_output, expected_disk, expected_status, expected_skip, - None, None, None, None, None, 1) + check_props=True) # run_and_verify_merge doesn't support merging to a file WCPATH # so use run_and_verify_svn. ### TODO: We can use run_and_verify_merge() here now. svntest.actions.run_and_verify_svn( - None, expected_merge_output([[8]], ['U ' + nu_COPY_path + '\n', ' G ' + nu_COPY_path + '\n']), @@ -9732,8 +9199,7 @@ def new_subtrees_should_not_break_merge(sbox): expected_disk, expected_status, expected_skip, - None, None, None, None, - None, 1) + check_props=True) expected_output = wc.State(D_COPY_path, { 'H/omega': Item(status='G '), }) @@ -9778,8 +9244,7 @@ def new_subtrees_should_not_break_merge(sbox): expected_disk, expected_status, expected_skip, - None, None, None, None, - None, 1) + check_props=True) # Now once again merge r6 to A_COPY. A_COPY already has r6 in its mergeinfo # so we expect only subtree merges on A_COPY/D, A_COPY_D_H, and # A_COPY/D/H/nu. The fact that A/D/H/nu doesn't exist at r6 should not cause @@ -9850,13 +9315,12 @@ def new_subtrees_should_not_break_merge(sbox): expected_disk_1, expected_status, expected_skip, - None, None, None, None, - None, 1) + check_props=True) # Commit this merge as r9. # # Update the wc first to make setting the expected status a bit easier. - svntest.actions.run_and_verify_svn(None, exp_noop_up_out(8), [], + svntest.actions.run_and_verify_svn(exp_noop_up_out(8), [], 'up', wc_dir) wc_status.tweak(wc_rev=8) expected_output = wc.State(wc_dir, { @@ -9872,15 +9336,14 @@ def new_subtrees_should_not_break_merge(sbox): 'A_COPY/D/H/omega', wc_rev=9) wc_status.add({'A_COPY/D/H/nu' : Item(status=' ', wc_rev=9)}) - svntest.actions.run_and_verify_commit(wc_dir, expected_output, - wc_status, None, wc_dir) + svntest.actions.run_and_verify_commit(wc_dir, expected_output, wc_status) # Update the WC. - svntest.actions.run_and_verify_svn(None, exp_noop_up_out(9), [], + svntest.actions.run_and_verify_svn(exp_noop_up_out(9), [], 'up', wc_dir) wc_status.tweak(wc_rev=9) # Yet another test for issue #3067. Merge -rX:Y, where X>Y (reverse merge) - # and the merge target has a subtree that came into existance at some rev + # and the merge target has a subtree that came into existence at some rev # N where X < N < Y. This merge should simply delete the subtree. # # For this test merge -r9:2 to A_COPY. This should revert all the merges @@ -9950,14 +9413,12 @@ def new_subtrees_should_not_break_merge(sbox): expected_disk, expected_status, expected_skip, - None, None, None, None, - None, 1) + check_props=True) # Revert the previous merge, then merge r4 to A_COPY/D/G/rho. Commit # this merge as r10. - svntest.actions.run_and_verify_svn(None, None, [], 'revert', '-R', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'revert', '-R', wc_dir) svntest.actions.run_and_verify_svn( - None, expected_merge_output([[4]], ['U ' + rho_COPY_path + '\n', ' G ' + rho_COPY_path + '\n']), @@ -9965,9 +9426,8 @@ def new_subtrees_should_not_break_merge(sbox): expected_output = wc.State(wc_dir, { 'A_COPY/D/G/rho' : Item(verb='Sending'),}) wc_status.tweak('A_COPY/D/G/rho', wc_rev=10) - svntest.actions.run_and_verify_commit(wc_dir, expected_output, - wc_status, None, wc_dir) - svntest.actions.run_and_verify_svn(None, exp_noop_up_out(10), [], + svntest.actions.run_and_verify_commit(wc_dir, expected_output, wc_status) + svntest.actions.run_and_verify_svn(exp_noop_up_out(10), [], 'up', wc_dir) wc_status.tweak(wc_rev=10) @@ -10022,8 +9482,7 @@ def new_subtrees_should_not_break_merge(sbox): expected_disk_1, expected_status, expected_skip, - None, None, None, None, - None, 1) + check_props=True) #---------------------------------------------------------------------- @SkipUnless(server_has_mergeinfo) @@ -10106,8 +9565,7 @@ def dont_add_mergeinfo_from_own_history(sbox): expected_A_COPY_disk, expected_A_COPY_status, expected_A_COPY_skip, - None, None, None, None, - None, 1) + check_props=True) # Change 'A_COPY/mu' svntest.main.file_write(mu_COPY_path, "New content") @@ -10121,9 +9579,7 @@ def dont_add_mergeinfo_from_own_history(sbox): wc_status.tweak('A_COPY', 'A_COPY/D/H/psi', 'A_COPY/mu', wc_rev=7) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - wc_status, - None, - wc_dir) + wc_status) # Merge r7 back to the 'A' expected_output = wc.State(A_path, { @@ -10185,13 +9641,11 @@ def dont_add_mergeinfo_from_own_history(sbox): expected_A_disk, expected_A_status, expected_A_skip, - None, None, None, None, - None, True, False, + [], True, False, '--allow-mixed-revisions', A_path) # Revert all local mods - svntest.actions.run_and_verify_svn(None, - ["Reverted '" + A_path + "'\n", + svntest.actions.run_and_verify_svn(["Reverted '" + A_path + "'\n", "Reverted '" + mu_path + "'\n"], [], 'revert', '-R', wc_dir) @@ -10200,8 +9654,8 @@ def dont_add_mergeinfo_from_own_history(sbox): # 'A_MOVED', but 'A_MOVED@3' is 'A', so again this mergeinfo is filtered # out, leaving the only the mergeinfo created from the merge itself: # '/A_COPY:7'. - svntest.actions.run_and_verify_svn(None, - ['\n', 'Committed revision 8.\n'], + svntest.actions.run_and_verify_svn(['Committing transaction...\n', + 'Committed revision 8.\n'], [], 'move', sbox.repo_url + '/A', sbox.repo_url + '/A_MOVED', @@ -10285,8 +9739,7 @@ def dont_add_mergeinfo_from_own_history(sbox): expected_output, wc_disk, wc_status, - None, None, None, None, None, - True) + check_props=True) expected_output = wc.State(A_MOVED_path, { 'mu' : Item(status='U '), @@ -10326,12 +9779,10 @@ def dont_add_mergeinfo_from_own_history(sbox): expected_A_disk, expected_A_status, expected_A_skip, - None, None, None, None, - None, 1) + check_props=True) # Revert all local mods - svntest.actions.run_and_verify_svn(None, - ["Reverted '" + A_MOVED_path + "'\n", + svntest.actions.run_and_verify_svn(["Reverted '" + A_MOVED_path + "'\n", "Reverted '" + mu_MOVED_path + "'\n"], [], 'revert', '-R', wc_dir) @@ -10364,7 +9815,7 @@ def dont_add_mergeinfo_from_own_history(sbox): "A " + sbox.ospath('A/D/H/psi') + "\n", "Exported revision 1.\n",] ) - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'export', sbox.repo_url + '/A@1', A_path) expected_output = svntest.verify.UnorderedOutput( @@ -10388,7 +9839,7 @@ def dont_add_mergeinfo_from_own_history(sbox): "A " + sbox.ospath('A/D/H/omega') + "\n", "A " + sbox.ospath('A/D/H/psi') + "\n",] ) - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'add', A_path) # Commit the new 'A' as r9 expected_output = wc.State(wc_dir, { @@ -10437,9 +9888,7 @@ def dont_add_mergeinfo_from_own_history(sbox): wc_status.tweak(status=' ') svntest.actions.run_and_verify_commit(wc_dir, expected_output, - wc_status, - None, - wc_dir) + wc_status) expected_output = wc.State(A_path, { 'mu' : Item(status='U '), @@ -10502,10 +9951,10 @@ def dont_add_mergeinfo_from_own_history(sbox): expected_A_disk, expected_A_status, expected_A_skip, - None, None, None, None, - None, 1) + check_props=True) #---------------------------------------------------------------------- +@SkipUnless(server_has_mergeinfo) @Issue(3094) def merge_range_predates_history(sbox): "merge range predates history" @@ -10523,7 +9972,7 @@ def merge_range_predates_history(sbox): # Tweak a file and commit. (r2) svntest.main.file_append(iota_path, "More data.\n") - svntest.main.run_svn(None, 'ci', '-m', 'tweak iota', wc_dir) + sbox.simple_commit(message='tweak iota') # Create our trunk and branches directory, and update working copy. (r3) svntest.main.run_svn(None, 'mkdir', trunk_url, branches_url, @@ -10533,7 +9982,7 @@ def merge_range_predates_history(sbox): # Add a file to the trunk and commit. (r4) svntest.main.file_append(trunk_file_path, "This is the file 'file'.\n") svntest.main.run_svn(None, 'add', trunk_file_path) - svntest.main.run_svn(None, 'ci', '-m', 'add trunk file', wc_dir) + sbox.simple_commit(message='add trunk file') # Branch trunk from r3, and update working copy. (r5) svntest.main.run_svn(None, 'cp', trunk_url, branch_url, '-r3', @@ -10545,7 +9994,7 @@ def merge_range_predates_history(sbox): expected_output = expected_merge_output([[4,5]], ['A ' + branch_file_path + '\n', ' U ' + branch_path + '\n']) - svntest.actions.run_and_verify_svn(None, expected_output, [], 'merge', + svntest.actions.run_and_verify_svn(expected_output, [], 'merge', trunk_url, branch_path) #---------------------------------------------------------------------- @@ -10636,9 +10085,7 @@ def foreign_repos(sbox): + added_contents) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, - wc_dir) + expected_status) svntest.actions.verify_disk(wc_dir, expected_disk, True) # Now, merge our committed revision into a working copy of another @@ -10647,7 +10094,7 @@ def foreign_repos(sbox): ### TODO: Use run_and_verify_merge() ### svntest.main.run_svn(None, 'merge', '-c2', sbox.repo_url, wc_dir2) - svntest.main.run_svn(None, 'ci', '-m', 'Merge from foreign repos', wc_dir2) + sbox2.simple_commit(message='Merge from foreign repo') svntest.actions.verify_disk(wc_dir2, expected_disk, True) # Now, let's make a third checkout -- our second from the original @@ -10656,7 +10103,7 @@ def foreign_repos(sbox): # This is a regression test for issue #3623 in which wc_dir2 had the # correct state but the committed state was wrong. wc_dir3 = sbox.add_wc_path('wc3') - svntest.actions.run_and_verify_svn(None, None, [], 'checkout', + svntest.actions.run_and_verify_svn(None, [], 'checkout', sbox2.repo_url, wc_dir3) svntest.actions.verify_disk(wc_dir3, expected_disk, True) @@ -10703,13 +10150,11 @@ def foreign_repos_uuid(sbox): }) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, - wc_dir) + expected_status) svntest.actions.verify_disk(wc_dir, expected_disk, True) svntest.main.run_svn(None, 'merge', '-c2', sbox.repo_url, wc_dir2) - svntest.main.run_svn(None, 'ci', '-m', 'Merge from foreign repos', wc_dir2) + sbox2.simple_commit(message='Merge from foreign repos') # Run info to check the copied rev to make sure it's right zeta2_path = os.path.join(wc_dir2, 'A', 'D', 'G', 'zeta') @@ -10813,9 +10258,7 @@ def foreign_repos_2_url(sbox): + added_contents) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, - wc_dir) + expected_status) svntest.actions.verify_disk(wc_dir, expected_disk, True) # Now, "tag" the new state of the repository. @@ -10831,7 +10274,7 @@ def foreign_repos_2_url(sbox): svntest.main.run_svn(None, 'merge', sbox.repo_url + '/A-tag1', sbox.repo_url + '/A-tag2', os.path.join(wc_dir2, 'A')) - svntest.main.run_svn(None, 'ci', '-m', 'Merge from foreign repos', wc_dir2) + sbox2.simple_commit(message='Merge from foreign repos') svntest.actions.verify_disk(wc_dir2, expected_disk, True) #---------------------------------------------------------------------- @@ -10854,9 +10297,11 @@ def merge_added_subtree(sbox): A_COPY_url = url + "/A_COPY" A_path = sbox.ospath('A') - svntest.actions.run_and_verify_svn("",["\n", "Committed revision 2.\n"], [], + svntest.actions.run_and_verify_svn(["Committing transaction...\n", + "Committed revision 2.\n"], [], "cp", "-m", "", A_url, A_COPY_url) - svntest.actions.run_and_verify_svn("",["\n", "Committed revision 3.\n"], [], + svntest.actions.run_and_verify_svn(["Committing transaction...\n", + "Committed revision 3.\n"], [], "cp", "-m", "", A_COPY_url + '/D', A_COPY_url + '/D2') @@ -10905,14 +10350,14 @@ def merge_added_subtree(sbox): }) # Using the above information, verify a REPO->WC copy - svntest.actions.run_and_verify_svn("", None, [], + svntest.actions.run_and_verify_svn(None, [], "cp", A_COPY_url + '/D2', os.path.join(A_path, "D2")) svntest.actions.verify_disk(A_path, expected_disk) svntest.actions.run_and_verify_status(A_path, expected_status) # Remove the copy artifacts - svntest.actions.run_and_verify_svn("", None, [], + svntest.actions.run_and_verify_svn(None, [], "revert", "-R", A_path) svntest.main.safe_rmtree(os.path.join(A_path, "D2")) @@ -10943,14 +10388,14 @@ def merge_unknown_url(sbox): # remove a path from the repo and commit. iota_path = sbox.ospath('iota') - svntest.actions.run_and_verify_svn(None, None, [], 'rm', iota_path) - svntest.actions.run_and_verify_svn("", None, [], + svntest.actions.run_and_verify_svn(None, [], 'rm', iota_path) + svntest.actions.run_and_verify_svn(None, [], "ci", wc_dir, "-m", "log message") url = sbox.repo_url + "/iota" expected_err = ".*File not found.*iota.*|.*iota.*path not found.*" - svntest.actions.run_and_verify_svn("", None, expected_err, + svntest.actions.run_and_verify_svn(None, expected_err, "merge", url, wc_dir) #---------------------------------------------------------------------- @@ -10995,7 +10440,7 @@ def reverse_merge_away_all_mergeinfo(sbox): expected_elision_output, expected_disk, expected_status, expected_skip, - None, None, None, None, None, 1) + check_props=True) # Commit the merge as r7 expected_output = wc.State(wc_dir, { @@ -11007,9 +10452,7 @@ def reverse_merge_away_all_mergeinfo(sbox): wc_rev=7) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - wc_status, - None, - wc_dir) + wc_status) # Now reverse merge r7 from itself, all mergeinfo should be removed. expected_output = wc.State(A_COPY_H_path, { @@ -11042,7 +10485,7 @@ def reverse_merge_away_all_mergeinfo(sbox): expected_elision_output, expected_disk, expected_status, expected_skip, - None, None, None, None, None, + [], True, False, '--allow-mixed-revisions', A_COPY_H_path) @@ -11087,35 +10530,35 @@ def dont_merge_revs_into_subtree_that_predate_it(sbox): expected_output = wc.State(wc_dir, {'A/D/H/psi' : Item(verb='Sending')}) expected_status.tweak('A/D/H/psi', wc_rev=2) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) expected_disk.tweak('A/D/H/psi', contents="New content") # Create 'A/D/H/nu' and commit it as r3. svntest.main.file_write(nu_path, "This is the file 'nu'.\n") - svntest.actions.run_and_verify_svn(None, None, [], 'add', nu_path) + svntest.actions.run_and_verify_svn(None, [], 'add', nu_path) expected_output = wc.State(wc_dir, {'A/D/H/nu' : Item(verb='Adding')}) expected_status.add({'A/D/H/nu' : Item(status=' ', wc_rev=3)}) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) # Delete 'A/D/H/nu' and commit it as r4. - svntest.actions.run_and_verify_svn(None, None, [], 'rm', nu_path) + svntest.actions.run_and_verify_svn(None, [], 'rm', nu_path) expected_output = wc.State(wc_dir, {'A/D/H/nu' : Item(verb='Deleting')}) expected_status.remove('A/D/H/nu') svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) # Copy 'A/D/H/nu' from r3 and commit it as r5. - svntest.actions.run_and_verify_svn(None, None, [], 'cp', + svntest.actions.run_and_verify_svn(None, [], 'cp', sbox.repo_url + '/A/D/H/nu@3', nu_path) expected_output = wc.State(wc_dir, {'A/D/H/nu' : Item(verb='Adding')}) expected_status.add({'A/D/H/nu' : Item(status=' ', wc_rev=5)}) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) # Copy 'A/D/H' to 'H_COPY' in r6. - svntest.actions.run_and_verify_svn(None, - ['\n', 'Committed revision 6.\n'], + svntest.actions.run_and_verify_svn(['Committing transaction...\n', + 'Committed revision 6.\n'], [], 'copy', sbox.repo_url + "/A/D/H", sbox.repo_url + "/H_COPY", @@ -11136,7 +10579,7 @@ def dont_merge_revs_into_subtree_that_predate_it(sbox): expected_output = wc.State(wc_dir, {'A/D/H/nu' : Item(verb='Sending')}) expected_status.tweak('A/D/H/nu', wc_rev=7) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) # Remove A/D/H/nu and commit it as r8. # We do this deletion so that following cherry harvest has a *tough* @@ -11145,7 +10588,7 @@ def dont_merge_revs_into_subtree_that_predate_it(sbox): expected_output = wc.State(wc_dir, {'A/D/H/nu' : Item(verb='Deleting')}) expected_status.remove('A/D/H/nu') svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) # Make another text mod to 'A/D/H/psi' that can be merged to 'H_COPY' # during a cherry harvest and commit it as r9. @@ -11153,7 +10596,7 @@ def dont_merge_revs_into_subtree_that_predate_it(sbox): expected_output = wc.State(wc_dir, {'A/D/H/psi' : Item(verb='Sending')}) expected_status.tweak('A/D/H/psi', wc_rev=9) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) expected_disk.tweak('A/D/H/psi', contents="Even *newer* content") # Update WC so elision occurs smoothly. @@ -11162,7 +10605,6 @@ def dont_merge_revs_into_subtree_that_predate_it(sbox): # Merge r7 from 'A/D/H/nu' to 'H_COPY/nu'. svntest.actions.run_and_verify_svn( - None, expected_merge_output([[7]], ['U ' + nu_COPY_path + '\n', ' U ' + nu_COPY_path + '\n']), @@ -11178,13 +10620,12 @@ def dont_merge_revs_into_subtree_that_predate_it(sbox): expected_skip = wc.State(H_COPY_path, { }) #Cherry pick r2 prior to cherry harvest. - svntest.actions.run_and_verify_svn(None, [], [], 'merge', '-c2', + svntest.actions.run_and_verify_svn([], [], 'merge', '-c2', sbox.repo_url + '/A/D/H', H_COPY_path) # H_COPY needs r6-9 applied while H_COPY/nu needs only 6,8-9. svntest.actions.run_and_verify_svn( - None, expected_merge_output( [[7,9], # Merge notification [6,9]], # Mergeinfo notification @@ -11233,7 +10674,7 @@ def set_up_renamed_subtree(sbox): expected_output = wc.State(wc_dir, {'A/D/H/psi' : Item(verb='Sending')}) expected_status.tweak('A/D/H/psi', wc_rev=2) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) expected_disk.tweak('A/D/H/psi', contents="New content") # Make a text mod to 'A/D/H/omega' and commit it as r3 @@ -11241,11 +10682,11 @@ def set_up_renamed_subtree(sbox): expected_output = wc.State(wc_dir, {'A/D/H/omega' : Item(verb='Sending')}) expected_status.tweak('A/D/H/omega', wc_rev=3) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) expected_disk.tweak('A/D/H/omega', contents="New omega") # Move 'A/D/H/psi' to 'A/D/H/psi_moved' and commit it as r4. - svntest.actions.run_and_verify_svn(None, None, [], 'move', + svntest.actions.run_and_verify_svn(None, [], 'move', psi_path, psi_moved_path) expected_output = wc.State(wc_dir, { 'A/D/H/psi' : Item(verb='Deleting'), @@ -11259,15 +10700,15 @@ def set_up_renamed_subtree(sbox): # this so we still want to test that the issue #3067 fixes tested by # merge_chokes_on_renamed_subtrees and subtrees_with_empty_mergeinfo # still work. - svntest.actions.run_and_verify_svn(None, None, [], 'ps', SVN_PROP_MERGEINFO, + svntest.actions.run_and_verify_svn(None, [], 'ps', SVN_PROP_MERGEINFO, "", psi_moved_path) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) # Copy 'A/D/H' to 'H_COPY' in r5. - svntest.actions.run_and_verify_svn(None, - ['\n', 'Committed revision 5.\n'], + svntest.actions.run_and_verify_svn(['Committing transaction...\n', + 'Committed revision 5.\n'], [], 'copy', sbox.repo_url + "/A/D/H", sbox.repo_url + "/H_COPY", @@ -11288,7 +10729,7 @@ def set_up_renamed_subtree(sbox): {'A/D/H/psi_moved' : Item(verb='Sending')}) expected_status.tweak('A/D/H/psi_moved', wc_rev=6) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) expected_disk.remove('A/D/H/psi') expected_disk.add({ 'A/D/H/psi_moved' : Item("Even *Newer* content"), @@ -11321,7 +10762,6 @@ def merge_chokes_on_renamed_subtrees(sbox): # Here is where issue #3174 appears, the merge fails with: # svn: svn: File not found: revision 3, path '/A/D/H/psi' svntest.actions.run_and_verify_svn( - None, expected_merge_output([[5,6],[3,6]], ['U ' + psi_COPY_moved_path + '\n', ' U ' + psi_COPY_moved_path + '\n', @@ -11353,7 +10793,7 @@ def dont_explicitly_record_implicit_mergeinfo(sbox): nu_copy_path = sbox.ospath('A_copy/D/H/nu') def _commit_and_update(rev, action): - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', 'r%d - %s' % (rev, action), sbox.wc_dir) svntest.main.run_svn(None, 'up', wc_dir) @@ -11381,7 +10821,6 @@ def dont_explicitly_record_implicit_mergeinfo(sbox): # a propget. ### TODO: We can use run_and_verify_merge() here now. svntest.actions.run_and_verify_svn( - None, expected_merge_output([[5]], ['U ' + A_copy_mu_path + '\n', ' U ' + A_copy_mu_path + '\n']), [], 'merge', '-c5', sbox.repo_url + '/A_copy2/mu', A_copy_mu_path) @@ -11450,13 +10889,13 @@ def dont_explicitly_record_implicit_mergeinfo(sbox): expected_elision_output, expected_disk, expected_status, expected_skip, - None, None, None, None, None, 1) + check_props=True) # Revert the previous merges and try a cherry harvest merge where # the subtree's natural history is a proper subset of the merge. - svntest.actions.run_and_verify_svn(None, None, [], 'revert', '-R', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'revert', '-R', wc_dir) - svntest.actions.run_and_verify_svn(None, None, [], 'up', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir) wc_status = svntest.actions.get_virginal_state(wc_dir, 5) wc_status.add({ 'A_copy' : Item(), @@ -11502,18 +10941,18 @@ def dont_explicitly_record_implicit_mergeinfo(sbox): # r6 - Add the file 'A/D/H/nu'. svntest.main.file_write(nu_path, "This is the file 'nu'.\n") - svntest.actions.run_and_verify_svn(None, None, [], 'add', nu_path) + svntest.actions.run_and_verify_svn(None, [], 'add', nu_path) expected_output = wc.State(wc_dir, {'A/D/H/nu' : Item(verb='Adding')}) wc_status.add({'A/D/H/nu' : Item(status=' ', wc_rev=6)}) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - wc_status, None, wc_dir) + wc_status) # r7 - Make a change to 'A/D/H/nu'. svntest.main.file_write(nu_path, "Nu content") expected_output = wc.State(wc_dir, {'A/D/H/nu' : Item(verb='Sending')}) wc_status.tweak('A/D/H/nu', wc_rev=7) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - wc_status, None, wc_dir) + wc_status) # r8 - Merge r6 to 'A_copy'. expected_output = wc.State(A_copy_path, { @@ -11577,8 +11016,7 @@ def dont_explicitly_record_implicit_mergeinfo(sbox): expected_A_copy_disk, expected_A_copy_status, expected_A_copy_skip, - None, None, None, None, - None, 1) + check_props=True) wc_status.add({'A_copy/D/H/nu' : Item(status=' ', wc_rev=8)}) wc_status.tweak('A_copy', wc_rev=8) expected_output = wc.State(wc_dir, { @@ -11586,7 +11024,7 @@ def dont_explicitly_record_implicit_mergeinfo(sbox): 'A_copy' : Item(verb='Sending'), }) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - wc_status, None, wc_dir) + wc_status) # r9 - Merge r7 to 'A_copy/D/H/nu'. expected_skip = wc.State(nu_copy_path, { }) @@ -11594,7 +11032,6 @@ def dont_explicitly_record_implicit_mergeinfo(sbox): # so use run_and_verify_svn. ### TODO: We can use run_and_verify_merge() here now. svntest.actions.run_and_verify_svn( - None, expected_merge_output([[7]], ['U ' + nu_copy_path + '\n', ' G ' + nu_copy_path + '\n',]), @@ -11602,10 +11039,10 @@ def dont_explicitly_record_implicit_mergeinfo(sbox): expected_output = wc.State(wc_dir, {'A_copy/D/H/nu' : Item(verb='Sending')}) wc_status.tweak('A_copy/D/H/nu', wc_rev=9) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - wc_status, None, wc_dir) + wc_status) # Update WC - svntest.actions.run_and_verify_svn(None, None, [], 'up', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir) wc_status.tweak(wc_rev=9) # r10 - Make another change to 'A/D/H/nu'. @@ -11613,10 +11050,10 @@ def dont_explicitly_record_implicit_mergeinfo(sbox): expected_output = wc.State(wc_dir, {'A/D/H/nu' : Item(verb='Sending')}) wc_status.tweak('A/D/H/nu', wc_rev=10) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - wc_status, None, wc_dir) + wc_status) # Update WC - svntest.actions.run_and_verify_svn(None, None, [], 'up', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir) wc_status.tweak(wc_rev=10) # Now do a cherry harvest merge to 'A_copy'. @@ -11683,8 +11120,7 @@ def dont_explicitly_record_implicit_mergeinfo(sbox): expected_A_copy_disk, expected_A_copy_status, expected_A_copy_skip, - None, None, None, None, - None, 1) + check_props=True) #---------------------------------------------------------------------- # Test for issue where merging a change to a broken link fails @@ -11709,7 +11145,6 @@ def merge_broken_link(sbox): os.symlink('beta', link_path) svntest.main.run_svn(None, 'commit', '-m', 'Fix a broken link', link_path) svntest.actions.run_and_verify_svn( - None, expected_merge_output([[4]], ['U ' + copy_path + '/beta_link\n', ' U ' + copy_path + '\n']), @@ -11750,12 +11185,12 @@ def subtree_merges_dont_intersect_with_targets(sbox): }) wc_status.tweak('A/D/gamma', 'A/D/H/psi', wc_rev=8) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - wc_status, None, wc_dir) + wc_status) wc_disk.tweak('A/D/gamma', contents="New content") wc_disk.tweak('A/D/H/psi', contents="Even newer content") # Update the WC. - svntest.actions.run_and_verify_svn(None, exp_noop_up_out(8), [], + svntest.actions.run_and_verify_svn(exp_noop_up_out(8), [], 'update', wc_dir) wc_status.tweak(wc_rev=8) @@ -11774,27 +11209,27 @@ def subtree_merges_dont_intersect_with_targets(sbox): # run_and_verify_merge() because these types of simple merges are # tested to death elsewhere and this is just setup for the "real" # test. - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'merge', '-c4', sbox.repo_url + '/A/D/H/psi', psi_COPY_path) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'merge', '-c8', sbox.repo_url + '/A', A_COPY_path) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'merge', '-c-8', sbox.repo_url + '/A/D/H/psi', psi_COPY_path) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'merge', sbox.repo_url + '/A', A_COPY_2_path) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'merge', '-c-5', sbox.repo_url + '/A', A_COPY_2_path) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'merge', '-c5', '-c-8', sbox.repo_url + '/A/D/H', H_COPY_2_path) @@ -11823,12 +11258,10 @@ def subtree_merges_dont_intersect_with_targets(sbox): wc_rev=9) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - wc_status, - None, - wc_dir) + wc_status) # Update the WC. - svntest.actions.run_and_verify_svn(None, exp_noop_up_out(9), [], + svntest.actions.run_and_verify_svn(exp_noop_up_out(9), [], 'update', wc_dir) # Make sure we have mergeinfo that meets the two criteria set out above. @@ -11912,8 +11345,7 @@ def subtree_merges_dont_intersect_with_targets(sbox): expected_disk, expected_status, expected_skip, - None, None, None, None, - None, 1) + check_props=True) # Merging to the criterion 1 branch. # @@ -11986,8 +11418,7 @@ def subtree_merges_dont_intersect_with_targets(sbox): expected_disk, expected_status, expected_skip, - None, None, None, None, - None, 1) + check_props=True) # Test the notification portion of issue #3199. # @@ -12004,7 +11435,7 @@ def subtree_merges_dont_intersect_with_targets(sbox): ### shortcomings (and allowed merges to file targets). # # Revert the previous merges. - svntest.actions.run_and_verify_svn(None, None, [], 'revert', '-R', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'revert', '-R', wc_dir) # Repeat the forward merge expected_output = expected_merge_output( @@ -12014,7 +11445,7 @@ def subtree_merges_dont_intersect_with_targets(sbox): ' U %s\n' % (H_COPY_2_path), ' U %s\n' % (A_COPY_2_path),], elides=True) - svntest.actions.run_and_verify_svn(None, expected_output, + svntest.actions.run_and_verify_svn(expected_output, [], 'merge', '-r', '3:9', sbox.repo_url + '/A', A_COPY_2_path) @@ -12026,7 +11457,7 @@ def subtree_merges_dont_intersect_with_targets(sbox): ' U %s\n' % (A_COPY_path), ' U %s\n' % (psi_COPY_path)], elides=True) - svntest.actions.run_and_verify_svn(None, expected_output, + svntest.actions.run_and_verify_svn(expected_output, [], 'merge', '-r', '9:3', sbox.repo_url + '/A', A_COPY_path) @@ -12053,7 +11484,7 @@ def subtree_source_missing_in_requested_range(sbox): omega_COPY_path = sbox.ospath('A_COPY/D/H/omega') # r7 Delete A/D/H/psi. - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'delete', psi_path) sbox.simple_commit(message='delete psi') @@ -12065,7 +11496,7 @@ def subtree_source_missing_in_requested_range(sbox): expected_output = expected_merge_output( [[3]], ['U %s\n' % (psi_COPY_path), ' U %s\n' % (psi_COPY_path),]) - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'merge', '-c', '3', sbox.repo_url + '/A/D/H/psi@3', psi_COPY_path) @@ -12075,12 +11506,12 @@ def subtree_source_missing_in_requested_range(sbox): expected_output = expected_merge_output( [[6]], ['U %s\n' % (omega_COPY_path), ' U %s\n' % (omega_COPY_path),]) - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'merge', '-c', '6', sbox.repo_url + '/A/D/H/omega', omega_COPY_path) sbox.simple_commit(message='merge r6 to A_COPY') - svntest.actions.run_and_verify_svn(None, exp_noop_up_out(10), [], 'up', + svntest.actions.run_and_verify_svn(exp_noop_up_out(10), [], 'up', wc_dir) # r11 - Merge r8 to A_COPY. @@ -12088,7 +11519,7 @@ def subtree_source_missing_in_requested_range(sbox): [[8]], ['U %s\n' % (omega_COPY_path), ' U %s\n' % (omega_COPY_path), ' U %s\n' % (A_COPY_path)]) - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'merge', '-c', '8', sbox.repo_url + '/A', A_COPY_path) @@ -12102,12 +11533,12 @@ def subtree_source_missing_in_requested_range(sbox): [[8]], [' G %s\n' % (omega_COPY_path), ' U %s\n' % (psi_COPY_path), ' G %s\n' % (A_COPY_path)]) - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'merge', '-c', '8', sbox.repo_url + '/A', A_COPY_path, '--record-only') sbox.simple_commit(message='merge r8 to A_COPY/D/H/omega') - svntest.actions.run_and_verify_svn(None, exp_noop_up_out(11), [], 'up', + svntest.actions.run_and_verify_svn(exp_noop_up_out(11), [], 'up', wc_dir) # r12 - modify A/D/H/omega yet again. @@ -12119,12 +11550,12 @@ def subtree_source_missing_in_requested_range(sbox): expected_output = expected_merge_output( [[9,12],[2,12]], ['U %s\n' % (omega_COPY_path), ' U %s\n' % (omega_COPY_path)]) - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'merge', sbox.repo_url + '/A/D/H/omega', omega_COPY_path) sbox.simple_commit(message='cherry harvest to A_COPY/D/H/omega') - svntest.actions.run_and_verify_svn(None, exp_noop_up_out(13), [], 'up', + svntest.actions.run_and_verify_svn(exp_noop_up_out(13), [], 'up', wc_dir) # Check that svn:mergeinfo is as expected. @@ -12134,7 +11565,7 @@ def subtree_source_missing_in_requested_range(sbox): psi_COPY_path : '/A/D/H/psi:3,8' }) # Now test a reverse merge where part of the requested range postdates - # a subtree's existance. Merge -r12:1 to A_COPY. This should revert + # a subtree's existence. Merge -r12:1 to A_COPY. This should revert # all of the merges done thus far. The fact that A/D/H/psi no longer # exists after r7 shouldn't break the subtree merge into A_COPY/D/H/psi. # A_COPY/D/H/psi should simply have r3 reverse merged. No paths under @@ -12204,11 +11635,10 @@ def subtree_source_missing_in_requested_range(sbox): expected_disk, expected_status, expected_skip, - None, None, None, None, - None, True, False) + [], True, False) # Revert the previous merge. - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'revert', '-R', wc_dir) # Merge r12 to A_COPY and commit as r14. expected_output = wc.State(A_COPY_path, {}) @@ -12270,8 +11700,7 @@ def subtree_source_missing_in_requested_range(sbox): expected_disk, expected_status, expected_skip, - None, None, None, None, - None, True, False) + [], True, False) # As we did earlier, repeat the merge with the --record-only option to # preserve the old behavior of recording mergeinfo on every subtree, thus # allowing this test to actually test the issue #3067 fixes. @@ -12280,7 +11709,7 @@ def subtree_source_missing_in_requested_range(sbox): ' G %s\n' % (A_COPY_path), ' U %s\n' % (psi_COPY_path), ' U %s\n' % (omega_COPY_path),]) - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'merge', '-c', '12', sbox.repo_url + '/A', A_COPY_path, '--record-only') @@ -12289,7 +11718,7 @@ def subtree_source_missing_in_requested_range(sbox): # Update A_COPY/D/H/rho back to r13 so it's mergeinfo doesn't include # r12. Then merge a range, -r6:12 which should delete a subtree # (A_COPY/D/H/psi). - svntest.actions.run_and_verify_svn(None, exp_noop_up_out(14), [], 'up', + svntest.actions.run_and_verify_svn(exp_noop_up_out(14), [], 'up', wc_dir) expected_output = wc.State(A_COPY_path, { 'D/H/psi' : Item(status='D '), @@ -12350,8 +11779,7 @@ def subtree_source_missing_in_requested_range(sbox): expected_disk, expected_status, expected_skip, - None, None, None, None, - None, True, False) + [], True, False) #---------------------------------------------------------------------- # Another test for issue #3067: 'subtrees that don't exist at the start @@ -12406,11 +11834,12 @@ def subtrees_with_empty_mergeinfo(sbox): expected_elision_output, expected_disk, expected_status, expected_skip, - None, None, None, None, None, 1) + check_props=True) #---------------------------------------------------------------------- # Test for issue #3240 'commits to subtrees added by merge # corrupt working copy and repos'. +@SkipUnless(server_has_mergeinfo) @Issue(3240) def commit_to_subtree_added_by_merge(sbox): "commits to subtrees added by merge wreak havoc" @@ -12432,14 +11861,14 @@ def commit_to_subtree_added_by_merge(sbox): # subtree as r3. os.mkdir(N_path) svntest.main.file_write(nu_path, "This is the file 'nu'.\n") - svntest.actions.run_and_verify_svn(None, None, [], 'add', N_path) + svntest.actions.run_and_verify_svn(None, [], 'add', N_path) expected_output = wc.State(wc_dir, {'A/D/H/N' : Item(verb='Adding'), 'A/D/H/N/nu' : Item(verb='Adding')}) wc_status.add({'A/D/H/N' : Item(status=' ', wc_rev=3), 'A/D/H/N/nu' : Item(status=' ', wc_rev=3)}) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - wc_status, None, wc_dir) + wc_status) # Merge r3 to 'A_COPY/D/H', creating A_COPY/D/H/N' and 'A_COPY/D/H/N/nu'. # Commit the merge as r4. @@ -12477,7 +11906,7 @@ def commit_to_subtree_added_by_merge(sbox): expected_elision_output, expected_disk, expected_status, expected_skip, - None, None, None, None, None, 1) + check_props=True) expected_output = wc.State(wc_dir, { 'A_COPY/D/H' : Item(verb='Sending'), 'A_COPY/D/H/N' : Item(verb='Adding'), @@ -12486,7 +11915,7 @@ def commit_to_subtree_added_by_merge(sbox): 'A_COPY/D/H/N/nu' : Item(status=' ', wc_rev=4)}) wc_status.tweak('A_COPY/D/H', wc_rev=4) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - wc_status, None, wc_dir) + wc_status) # Make a text change to 'A_COPY/D/H/N/nu' and commit it as r5. This # is the first place issue #3240 appears over DAV layers, and the @@ -12505,94 +11934,16 @@ def commit_to_subtree_added_by_merge(sbox): {'A_COPY/D/H/N/nu' : Item(verb='Sending')}) wc_status.tweak('A_COPY/D/H/N/nu', wc_rev=5) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - wc_status, None, wc_dir) + wc_status) # The second place issue #3240 shows up is in the fact that the commit # *did* succeed, but the wrong path ('A/D/H/nu' rather than 'A_COPY/D/H/nu') # is affected. We can see this by running an update; since we just # committed there shouldn't be any incoming changes. - svntest.actions.run_and_verify_svn(None, exp_noop_up_out(5), [], 'up', + svntest.actions.run_and_verify_svn(exp_noop_up_out(5), [], 'up', wc_dir) #---------------------------------------------------------------------- -# Helper functions. These take local paths using '/' separators. - -def local_path(path): - "Convert a path from '/' separators to the local style." - return os.sep.join(path.split('/')) - -def svn_mkfile(path): - "Make and add a file with some default content, and keyword expansion." - path = local_path(path) - dirname, filename = os.path.split(path) - svntest.main.file_write(path, "This is the file '" + filename + "'.\n" + - "Last changed in '$Revision$'.\n") - svntest.actions.run_and_verify_svn(None, None, [], 'add', path) - svntest.actions.run_and_verify_svn(None, None, [], 'propset', - 'svn:keywords', 'Revision', path) - -def svn_modfile(path): - "Make text and property mods to a WC file." - path = local_path(path) - svntest.main.file_append(path, "An extra line.\n") - svntest.actions.run_and_verify_svn(None, None, [], 'propset', - 'newprop', 'v', path) - -def svn_copy(s_rev, path1, path2): - "Copy a WC path locally." - path1 = local_path(path1) - path2 = local_path(path2) - svntest.actions.run_and_verify_svn(None, None, [], 'copy', '--parents', - '-r', s_rev, path1, path2) - -def svn_merge(rev_range, source, target, lines=None, elides=[], - text_conflicts=0, prop_conflicts=0, tree_conflicts=0, - text_resolved=0, prop_resolved=0, tree_resolved=0, - args=[]): - """Merge a single change from path SOURCE to path TARGET and verify the - output and that there is no error. (The changes made are not verified.) - - REV_RANGE is either a number (to cherry-pick that specific change) or a - two-element list [X,Y] to pick the revision range '-r(X-1):Y'. - - LINES is a list of regular expressions to match other lines of output; if - LINES is 'None' then match all normal (non-conflicting) merges. - - ELIDES is a list of paths on which mergeinfo elision should be reported. - - TEXT_CONFLICTS, PROP_CONFLICTS and TREE_CONFLICTS specify the number of - each kind of conflict to expect. - - ARGS are additional arguments passed to svn merge. - """ - - source = local_path(source) - target = local_path(target) - elides = [local_path(p) for p in elides] - if isinstance(rev_range, int): - mi_rev_range = [rev_range] - rev_arg = '-c' + str(rev_range) - else: - mi_rev_range = rev_range - rev_arg = '-r' + str(rev_range[0] - 1) + ':' + str(rev_range[1]) - if lines is None: - lines = ["(A |D |[UG] | [UG]|[UG][UG]) " + target + ".*\n"] - else: - # Expect mergeinfo on the target; caller must supply matches for any - # subtree mergeinfo paths. - lines.append(" [UG] " + target + "\n") - exp_out = expected_merge_output([mi_rev_range], lines, target=target, - elides=elides, - text_conflicts=text_conflicts, - prop_conflicts=prop_conflicts, - tree_conflicts=tree_conflicts, - text_resolved=text_resolved, - prop_resolved=prop_resolved, - tree_resolved=tree_resolved) - svntest.actions.run_and_verify_svn(None, exp_out, [], - 'merge', rev_arg, source, target, *args) - -#---------------------------------------------------------------------- # Tests for merging the deletion of a node, where the node to be deleted # is the same as or different from the node that was deleted. @@ -12697,19 +12048,18 @@ def subtree_merges_dont_cause_spurious_conflicts(sbox): expected_output = wc.State(wc_dir, {'A/D/G/rho' : Item(verb='Sending')}) wc_status.tweak('A/D/G/rho', wc_rev=7) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - wc_status, None, wc_dir) + wc_status) wc_disk.tweak('A/D/G/rho', contents="Newer content") # r8 Make another text change to A/D/G/rho. svntest.main.file_write(rho_path, "Even *newer* content") expected_output = wc.State(wc_dir, {'A/D/G/rho' : Item(verb='Sending')}) wc_status.tweak('A/D/G/rho', wc_rev=8) - svntest.actions.run_and_verify_commit(wc_dir, expected_output, - wc_status, None, wc_dir) + svntest.actions.run_and_verify_commit(wc_dir, expected_output, wc_status) wc_disk.tweak('A/D/G/rho', contents="Even *newer* content") # Update the WC to allow full mergeinfo inheritance and elision. - svntest.actions.run_and_verify_svn(None, exp_noop_up_out(8), [], 'up', + svntest.actions.run_and_verify_svn(exp_noop_up_out(8), [], 'up', wc_dir) wc_status.tweak(wc_rev=8) @@ -12776,12 +12126,11 @@ def subtree_merges_dont_cause_spurious_conflicts(sbox): expected_elision_output, expected_disk, expected_status, expected_skip, - None, None, None, None, None, 1) + check_props=True) # run_and_verify_merge doesn't support merging to a file WCPATH # so use run_and_verify_svn. ### TODO: We can use run_and_verify_merge() here now. - svntest.actions.run_and_verify_svn(None, - expected_merge_output([[-3]], + svntest.actions.run_and_verify_svn(expected_merge_output([[-3]], ['G ' + psi_COPY_path + '\n', ' G ' + psi_COPY_path + '\n']), [], 'merge', '-c-3', @@ -12802,10 +12151,10 @@ def subtree_merges_dont_cause_spurious_conflicts(sbox): 'A_COPY/D/H/omega', wc_rev=9) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - wc_status, None, wc_dir) + wc_status) # Update the WC to allow full mergeinfo inheritance and elision. - svntest.actions.run_and_verify_svn(None, exp_noop_up_out(9), [], 'up', + svntest.actions.run_and_verify_svn(exp_noop_up_out(9), [], 'up', wc_dir) wc_status.tweak(wc_rev=9) @@ -12892,7 +12241,7 @@ def subtree_merges_dont_cause_spurious_conflicts(sbox): expected_elision_output, expected_disk, expected_status, expected_skip, - None, None, None, None, None, 1, 0) + check_props=True) #---------------------------------------------------------------------- # Test for yet another variant of issue #3067. @@ -12918,18 +12267,18 @@ def merge_target_and_subtrees_need_nonintersecting_ranges(sbox): # Add file A/D/G/nu in r7. svntest.main.file_write(nu_path, "This is the file 'nu'.\n") - svntest.actions.run_and_verify_svn(None, None, [], 'add', nu_path) + svntest.actions.run_and_verify_svn(None, [], 'add', nu_path) expected_output = wc.State(wc_dir, {'A/D/G/nu' : Item(verb='Adding')}) wc_status.add({'A/D/G/nu' : Item(status=' ', wc_rev=7)}) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - wc_status, None, wc_dir) + wc_status) # Make a text mod to A/D/G/nu in r8. svntest.main.file_write(nu_path, "New content") expected_output = wc.State(wc_dir, {'A/D/G/nu' : Item(verb='Sending')}) wc_status.tweak('A/D/G/nu', wc_rev=8) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - wc_status, None, wc_dir) + wc_status) # Do several merges to setup a situation where the merge # target and two of its subtrees need non-intersecting ranges @@ -12945,7 +12294,7 @@ def merge_target_and_subtrees_need_nonintersecting_ranges(sbox): # of merges to death we don't use run_and_verify_merge() on these # intermediate merges. svntest.actions.run_and_verify_svn( - None, expected_merge_output([[2,7]], + expected_merge_output([[2,7]], ['U ' + beta_COPY_path + '\n', 'A ' + nu_COPY_path + '\n', 'U ' + rho_COPY_path + '\n', @@ -12955,12 +12304,12 @@ def merge_target_and_subtrees_need_nonintersecting_ranges(sbox): ), [], 'merge', '-r0:7', sbox.repo_url + '/A', A_COPY_path) svntest.actions.run_and_verify_svn( - None, expected_merge_output([[8]], ['U ' + nu_COPY_path + '\n', + expected_merge_output([[8]], ['U ' + nu_COPY_path + '\n', ' G ' + nu_COPY_path + '\n']), [], 'merge', '-c8', sbox.repo_url + '/A/D/G/nu', nu_COPY_path) svntest.actions.run_and_verify_svn( - None, expected_merge_output([[-6]], ['G ' + omega_COPY_path + '\n', + expected_merge_output([[-6]], ['G ' + omega_COPY_path + '\n', ' G ' + omega_COPY_path + '\n']), [], 'merge', '-c-6', sbox.repo_url + '/A/D/H/omega', omega_COPY_path) wc_status.add({'A_COPY/D/G/nu' : Item(status=' ', wc_rev=9)}) @@ -12978,11 +12327,10 @@ def merge_target_and_subtrees_need_nonintersecting_ranges(sbox): 'A_COPY/D/H/omega' : Item(verb='Sending'), 'A_COPY/D/H/psi' : Item(verb='Sending'), }) - svntest.actions.run_and_verify_commit(wc_dir, expected_output, wc_status, - None, wc_dir) + svntest.actions.run_and_verify_commit(wc_dir, expected_output, wc_status) # Update the WC to allow full mergeinfo inheritance and elision. - svntest.actions.run_and_verify_svn(None, exp_noop_up_out(9), [], 'up', + svntest.actions.run_and_verify_svn(exp_noop_up_out(9), [], 'up', wc_dir) # Merge all available revisions from A to A_COPY, the merge logic @@ -13055,8 +12403,7 @@ def merge_target_and_subtrees_need_nonintersecting_ranges(sbox): expected_disk, expected_status, expected_skip, - None, None, None, None, - None, 1) + check_props=True) #---------------------------------------------------------------------- # Part of this test is a regression test for issue #3250 "Repeated merging @@ -13118,7 +12465,7 @@ def merge_two_edits_to_same_prop(sbox): # the conflict, so it no longer tests the original #3250 scenario. # # Revert changes to branch wc - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'revert', '--recursive', A_COPY_path) # In the branch, make two successive changes to the same property @@ -13135,7 +12482,7 @@ def merge_two_edits_to_same_prop(sbox): ], prop_conflicts=1, args=['--allow-mixed-revisions']) # Revert changes to trunk wc, to test next scenario of #3250 - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'revert', '--recursive', A_path) # Merge the first change, then the second, to trunk. @@ -13235,7 +12582,7 @@ def merge_adds_mergeinfo_correctly(sbox): D_COPY_2_path = sbox.ospath('A_COPY_2/D') # Update working copy to allow full inheritance and elision. - svntest.actions.run_and_verify_svn(None, exp_noop_up_out(7), [], + svntest.actions.run_and_verify_svn(exp_noop_up_out(7), [], 'up', wc_dir) wc_status.tweak(wc_rev=7) @@ -13300,8 +12647,7 @@ def merge_adds_mergeinfo_correctly(sbox): expected_disk, expected_status, expected_skip, - None, None, None, None, - None, 1) + check_props=True) wc_status.tweak('A_COPY', 'A_COPY/D/G/rho', wc_rev=8) @@ -13309,8 +12655,7 @@ def merge_adds_mergeinfo_correctly(sbox): 'A_COPY' : Item(verb='Sending'), 'A_COPY/D/G/rho' : Item(verb='Sending'), }) - svntest.actions.run_and_verify_commit(wc_dir, expected_output, wc_status, - None, wc_dir) + svntest.actions.run_and_verify_commit(wc_dir, expected_output, wc_status) # Merge r7 from A/D to A_COPY_2/D and commit as r9. # This creates explicit mergeinfo on A_COPY_2/D of '/A/D:7'. @@ -13355,8 +12700,7 @@ def merge_adds_mergeinfo_correctly(sbox): expected_disk, expected_status, expected_skip, - None, None, None, None, - None, 1) + check_props=True) wc_status.tweak('A_COPY_2/D', 'A_COPY_2/D/H/omega', wc_rev=9) @@ -13364,8 +12708,7 @@ def merge_adds_mergeinfo_correctly(sbox): 'A_COPY_2/D' : Item(verb='Sending'), 'A_COPY_2/D/H/omega' : Item(verb='Sending'), }) - svntest.actions.run_and_verify_commit(wc_dir, expected_output, wc_status, - None, wc_dir) + svntest.actions.run_and_verify_commit(wc_dir, expected_output, wc_status) # Merge r9 from A_COPY_2 to A_COPY. A_COPY/D gets the explicit mergeinfo # '/A/D/:7' added from r9. But it prior to the merge it inherited '/A/D:5' @@ -13373,7 +12716,7 @@ def merge_adds_mergeinfo_correctly(sbox): # the mergeinfo describing this merge '/A_COPY_2:9' should also be present # in A_COPY's explicit mergeinfo. # Update working copy to allow full inheritance and elision. - svntest.actions.run_and_verify_svn(None, exp_noop_up_out(9), [], + svntest.actions.run_and_verify_svn(exp_noop_up_out(9), [], 'up', wc_dir) expected_output = wc.State(A_COPY_path, { 'D' : Item(status=' U'), @@ -13436,16 +12779,15 @@ def merge_adds_mergeinfo_correctly(sbox): expected_disk, expected_status, expected_skip, - None, None, None, None, - None, 1) + check_props=True) # Revert and repeat the above merge, but this time create some # uncommitted mergeinfo on A_COPY/D, this should not cause a write # lock error as was seen in http://subversion.tigris.org/ # ds/viewMessage.do?dsForumId=462&dsMessageId=103945 - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'revert', '-R', wc_dir) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'ps', SVN_PROP_MERGEINFO, '', D_COPY_path) expected_output = wc.State(A_COPY_path, { @@ -13466,8 +12808,7 @@ def merge_adds_mergeinfo_correctly(sbox): expected_disk, expected_status, expected_skip, - None, None, None, None, - None, 1) + check_props=True) #---------------------------------------------------------------------- @SkipUnless(server_has_mergeinfo) @@ -13581,27 +12922,25 @@ def natural_history_filtering(sbox): "A_COPY_2" + '/D/H/omega' : Item("This is the file 'omega'.\n"), "A_COPY_2" + '/D/H/psi' : Item("New content"), }) - svntest.actions.run_and_verify_svn(None, expected, [], 'copy', + svntest.actions.run_and_verify_svn(expected, [], 'copy', sbox.repo_url + "/A", A_COPY_2_path) expected_output = wc.State(wc_dir, {"A_COPY_2" : Item(verb='Adding')}) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - wc_status, - None, - wc_dir) + wc_status) # r8: Make a text change under A, to A/D/H/chi. svntest.main.file_write(chi_path, "New content") expected_output = wc.State(wc_dir, {'A/D/H/chi' : Item(verb='Sending')}) wc_status.tweak('A/D/H/chi', wc_rev=8) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - wc_status, None, wc_dir) + wc_status) wc_disk.tweak('A/D/H/psi', contents="New content") # r9: Merge all available revisions from A to A_COPY. But first # update working copy to allow full inheritance and elision. - svntest.actions.run_and_verify_svn(None, exp_noop_up_out(8), [], + svntest.actions.run_and_verify_svn(exp_noop_up_out(8), [], 'up', wc_dir) wc_status.tweak(wc_rev=8) expected_output = wc.State(A_COPY_path, { @@ -13667,8 +13006,7 @@ def natural_history_filtering(sbox): expected_disk, expected_status, expected_skip, - None, None, None, None, - None, 1) + check_props=True) wc_status.tweak('A_COPY', 'A_COPY/B/E/beta', 'A_COPY/D/G/rho', @@ -13684,11 +13022,10 @@ def natural_history_filtering(sbox): 'A_COPY/D/H/psi' : Item(verb='Sending'), 'A_COPY/D/H/omega' : Item(verb='Sending'), }) - svntest.actions.run_and_verify_commit(wc_dir, expected_output, wc_status, - None, wc_dir) + svntest.actions.run_and_verify_commit(wc_dir, expected_output, wc_status) # Again update the working copy to allow full inheritance and elision. - svntest.actions.run_and_verify_svn(None, exp_noop_up_out(9), [], + svntest.actions.run_and_verify_svn(exp_noop_up_out(9), [], 'up', wc_dir) wc_status.tweak(wc_rev=9) @@ -13757,8 +13094,7 @@ def natural_history_filtering(sbox): expected_disk, expected_status, expected_skip, - None, None, None, None, - None, 1) + check_props=True) #---------------------------------------------------------------------- @SkipUnless(server_has_mergeinfo) @@ -13797,7 +13133,7 @@ def subtree_gets_changes_even_if_ultimately_deleted(sbox): sbox.simple_commit(message='mod psi') # r8: Delete A/D/H/psi. - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'delete', psi_path) sbox.simple_commit(message='delete psi') @@ -13836,10 +13172,9 @@ def subtree_gets_changes_even_if_ultimately_deleted(sbox): expected_elision_output, expected_disk, expected_status, expected_skip, - None, None, None, None, None, 1, 0, + [], True, False, '-c3,7', H_COPY_path) svntest.actions.run_and_verify_svn( - None, expected_merge_output([[-7]], ['G ' + psi_COPY_path + '\n', ' G ' + psi_COPY_path + '\n',]), @@ -13886,7 +13221,7 @@ def subtree_gets_changes_even_if_ultimately_deleted(sbox): expected_elision_output, expected_disk, expected_status, expected_skip, - None, None, None, None, None, 1, 0) + [], True, False) #---------------------------------------------------------------------- @SkipUnless(server_has_mergeinfo) @@ -13906,8 +13241,7 @@ def no_self_referential_filtering_on_added_path(sbox): wc_disk, wc_status = set_up_branch(sbox, False, 2) # r8: Make a prop change on A_COPY/C. - svntest.actions.run_and_verify_svn(None, - ["property 'propname' set on '" + + svntest.actions.run_and_verify_svn(["property 'propname' set on '" + C_COPY_path + "'\n"], [], 'ps', 'propname', 'propval', C_COPY_path) @@ -13916,17 +13250,15 @@ def no_self_referential_filtering_on_added_path(sbox): wc_status.tweak('A_COPY/C', wc_rev=8) wc_disk.tweak("A_COPY/C", props={'propname' : 'propval'}) - svntest.actions.run_and_verify_commit(wc_dir, expected_output, wc_status, - None, wc_dir) + svntest.actions.run_and_verify_commit(wc_dir, expected_output, wc_status) # r9: Merge r8 from A_COPY to A. # # Update first to avoid an out of date error. - svntest.actions.run_and_verify_svn(None, exp_noop_up_out(8), [], 'up', + svntest.actions.run_and_verify_svn(exp_noop_up_out(8), [], 'up', wc_dir) wc_status.tweak(wc_rev=8) svntest.actions.run_and_verify_svn( - None, expected_merge_output([[8]], [' U ' + C_path + '\n', ' U ' + A_path + '\n',]), @@ -13935,8 +13267,7 @@ def no_self_referential_filtering_on_added_path(sbox): {'A' : Item(verb='Sending'), 'A/C' : Item(verb='Sending')}) wc_status.tweak('A', 'A/C', wc_rev=9) - svntest.actions.run_and_verify_commit(wc_dir, expected_output, wc_status, - None, wc_dir) + svntest.actions.run_and_verify_commit(wc_dir, expected_output, wc_status) wc_disk.tweak("A/C", props={'propname' : 'propval'}) @@ -13944,13 +13275,13 @@ def no_self_referential_filtering_on_added_path(sbox): props={SVN_PROP_MERGEINFO : '/A_COPY:8'}) # r10: Move A/C to A/C_MOVED. - svntest.actions.run_and_verify_svn(None, - ['\n', 'Committed revision 10.\n'], + svntest.actions.run_and_verify_svn(['Committing transaction...\n', + 'Committed revision 10.\n'], [], 'move', sbox.repo_url + '/A/C', sbox.repo_url + '/A/C_MOVED', '-m', 'Copy A/C to A/C_MOVED') - svntest.actions.run_and_verify_svn(None, None, [], 'up', + svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir) # Now try to merge all available revisions from A to A_COPY_2. @@ -14038,8 +13369,7 @@ def no_self_referential_filtering_on_added_path(sbox): expected_A_COPY_2_disk, expected_A_COPY_2_status, expected_A_COPY_2_skip, - None, None, None, None, - None, 1) + check_props=True) #---------------------------------------------------------------------- # Test for issue #3324 @@ -14087,27 +13417,26 @@ def merge_range_prior_to_rename_source_existence(sbox): # r8 - Text change to A/B/E/alpha svntest.main.file_write(alpha_path, "New content") wc_status.tweak('A/B/E/alpha', wc_rev=8) - svntest.actions.run_and_verify_svn(None, None, [], 'ci', '-m', + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', 'Text change', wc_dir) # r9 - Add the file A/D/H/nu and make another change to A/B/E/alpha. svntest.main.file_write(alpha_path, "Even newer content") svntest.main.file_write(nu_path, "This is the file 'nu'.\n") - svntest.actions.run_and_verify_svn(None, None, [], 'add', nu_path) + svntest.actions.run_and_verify_svn(None, [], 'add', nu_path) expected_output = wc.State(wc_dir, {'A/D/H/nu' : Item(verb='Adding'), 'A/B/E/alpha' : Item(verb='Sending')}) wc_status.add({'A/D/H/nu' : Item(status=' ', wc_rev=9)}) wc_status.tweak('A/B/E/alpha', wc_rev=9) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - wc_status, None, wc_dir) + wc_status) # r10 - Merge all available revisions (i.e. -r1:9) from A to A_COPY. - svntest.actions.run_and_verify_svn(None, exp_noop_up_out(9), [], 'up', + svntest.actions.run_and_verify_svn(exp_noop_up_out(9), [], 'up', wc_dir) wc_status.tweak(wc_rev=9) svntest.actions.run_and_verify_svn( - None, expected_merge_output([[2,9]], ['A ' + nu_COPY_path + '\n', 'U ' + alpha_COPY_path + '\n', @@ -14134,14 +13463,13 @@ def merge_range_prior_to_rename_source_existence(sbox): wc_rev=10) wc_status.add({'A_COPY/D/H/nu' : Item(status=' ', wc_rev=10)}) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - wc_status, None, wc_dir) + wc_status) # r11 - Reverse merge -r9:1 from A/B to A_COPY/B - svntest.actions.run_and_verify_svn(None, exp_noop_up_out(10), [], 'up', + svntest.actions.run_and_verify_svn(exp_noop_up_out(10), [], 'up', wc_dir) wc_status.tweak(wc_rev=10) svntest.actions.run_and_verify_svn( - None, expected_merge_output([[9,2]], ['U ' + alpha_COPY_path + '\n', 'U ' + beta_COPY_path + '\n', ' G ' + B_COPY_path + '\n',]), @@ -14155,10 +13483,10 @@ def merge_range_prior_to_rename_source_existence(sbox): 'A_COPY/B/E/beta', wc_rev=11) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - wc_status, None, wc_dir) + wc_status) # r12 - Move A/D/H/nu to A/D/H/nu_moved - svntest.actions.run_and_verify_svn(None, ["\n", + svntest.actions.run_and_verify_svn(["Committing transaction...\n", "Committed revision 12.\n"], [], 'move', sbox.repo_url + '/A/D/H/nu', sbox.repo_url + '/A/D/H/nu_moved', @@ -14169,7 +13497,7 @@ def merge_range_prior_to_rename_source_existence(sbox): "A " + nu_moved_path + "\n", "Updated to revision 12.\n"], ) - svntest.actions.run_and_verify_svn(None, expected_output, + svntest.actions.run_and_verify_svn(expected_output, [], 'up', wc_dir) # Now merge -r7:12 from A to A_COPY. @@ -14246,9 +13574,8 @@ def merge_range_prior_to_rename_source_existence(sbox): expected_disk, expected_status, expected_skip, - None, None, None, None, - None, 1, 0) - svntest.actions.run_and_verify_svn(None, None, [], 'ci', '-m', + check_props=True) + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', 'Merge -r7:12 from A to A_COPY', wc_dir) # Now run a similar scenario as above on the second branch, but with @@ -14264,20 +13591,19 @@ def merge_range_prior_to_rename_source_existence(sbox): # Properties on 'A_COPY_2\B': # svn:mergeinfo # /A/B:3-13 - svntest.actions.run_and_verify_svn(None, exp_noop_up_out(13), [], 'up', + svntest.actions.run_and_verify_svn(exp_noop_up_out(13), [], 'up', wc_dir) - svntest.actions.run_and_verify_svn(None, - None, # Don't check stdout, we test this + svntest.actions.run_and_verify_svn(None, # Don't check stdout, we test this # type of merge to death elsewhere. [], 'merge', sbox.repo_url + '/A/B', B_COPY_2_path) - svntest.actions.run_and_verify_svn(None, None,[], 'merge', '-r', '2:9', + svntest.actions.run_and_verify_svn(None,[], 'merge', '-r', '2:9', sbox.repo_url + '/A', A_COPY_2_path) svntest.actions.run_and_verify_svn( - None, None, [], 'ci', '-m', + None, [], 'ci', '-m', 'Merge all from A/B to A_COPY_2/B\nMerge -r2:9 from A to A_COPY_2', wc_dir) - svntest.actions.run_and_verify_svn(None, exp_noop_up_out(14), [], 'up', + svntest.actions.run_and_verify_svn(exp_noop_up_out(14), [], 'up', wc_dir) # Now reverse merge -r13:7 from A to A_COPY_2. @@ -14377,8 +13703,7 @@ def merge_range_prior_to_rename_source_existence(sbox): expected_disk, expected_status, expected_skip, - None, None, None, None, - None, 1, 1) + [], True, True) #---------------------------------------------------------------------- def set_up_natural_history_gap(sbox): @@ -14408,23 +13733,26 @@ def set_up_natural_history_gap(sbox): # r6: Delete 'A' exit_code, out, err = svntest.actions.run_and_verify_svn( - None, "(Committed revision 6.)|(\n)", [], + ["Committing transaction...\n", + "Committed revision 6.\n"], [], 'delete', sbox.repo_url + '/A', '-m', 'Delete A') # r7: Resurrect 'A' by copying 'A@2' to 'A'. exit_code, out, err = svntest.actions.run_and_verify_svn( - None, "(Committed revision 7.)|(\n)", [], + ["Committing transaction...\n", + "Committed revision 7.\n"], [], 'copy', sbox.repo_url + '/A@2', sbox.repo_url + '/A', '-m', 'Resurrect A from A@2') # r8: Branch the resurrected 'A' to 'A_COPY'. exit_code, out, err = svntest.actions.run_and_verify_svn( - None, "(Committed revision 8.)|(\n)", [], + ["Committing transaction...\n", + "Committed revision 8.\n"], [], 'copy', sbox.repo_url + '/A', sbox.repo_url + '/A_COPY', '-m', 'Copy A to A_COPY') # Update to bring all the repos side changes down. - exit_code, out, err = svntest.actions.run_and_verify_svn(None, None, [], + exit_code, out, err = svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir) wc_status.add({ "A_COPY/B" : Item(status=' '), @@ -14455,8 +13783,8 @@ def set_up_natural_history_gap(sbox): # Update the WC to a uniform revision. svntest.actions.run_and_verify_commit(wc_dir, expected_output, - wc_status, None, wc_dir) - svntest.actions.run_and_verify_svn(None, exp_noop_up_out(9), [], + wc_status) + svntest.actions.run_and_verify_svn(exp_noop_up_out(9), [], 'up', wc_dir) return wc_disk, wc_status @@ -14548,8 +13876,7 @@ def dont_merge_gaps_in_history(sbox): expected_disk, expected_status, expected_skip, - None, None, None, None, - None, 1) + check_props=True) #---------------------------------------------------------------------- # Test for issue #3432 'Merge can record mergeinfo from natural history @@ -14643,8 +13970,7 @@ def handle_gaps_in_implicit_mergeinfo(sbox): expected_disk, expected_status, expected_skip, - None, None, None, None, - None, 1) + check_props=True) # Now reverse merge -r9:2 from 'A@HEAD' to 'A_COPY'. This should be # a no-op since the only operative change made on 'A@HEAD' between r2:9 @@ -14666,8 +13992,7 @@ def handle_gaps_in_implicit_mergeinfo(sbox): expected_disk, expected_status, expected_skip, - None, None, None, None, - None, 1) + check_props=True) # Now merge all available revisions from 'A' to 'A_COPY'. # The mergeinfo '/A:4' on 'A_COPY' should have no impact on this merge @@ -14687,8 +14012,7 @@ def handle_gaps_in_implicit_mergeinfo(sbox): expected_disk, expected_status, expected_skip, - None, None, None, None, - None, 1) + check_props=True) #---------------------------------------------------------------------- # Test for issue #3323 'Mergeinfo deleted by a merge should disappear' @@ -14712,43 +14036,42 @@ def mergeinfo_deleted_by_a_merge_should_disappear(sbox): # r7: Merge all available revisions from A/D to A_COPY/D, this creates # mergeinfo on A_COPY/D. - svntest.actions.run_and_verify_svn(None, None, [], 'up', wc_dir) - svntest.actions.run_and_verify_svn(None, - None, # Don't check stdout, we test this + svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir) + svntest.actions.run_and_verify_svn(None, # Don't check stdout, we test this # type of merge to death elsewhere. [], 'merge', sbox.repo_url + '/A/D', D_COPY_path) svntest.actions.run_and_verify_svn( - None, None, [], 'ci', '-m', + None, [], 'ci', '-m', 'Merge all available revisions from A/D to A_COPY/D', wc_dir) # r8: Copy A_COPY to A_COPY_2, this carries the mergeinf on A_COPY/D # to A_COPY_2/D. - svntest.actions.run_and_verify_svn(None, None, [], 'up', wc_dir) - svntest.actions.run_and_verify_svn(None, None,[], + svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir) + svntest.actions.run_and_verify_svn(None,[], 'copy', A_COPY_path, A_COPY_2_path) - svntest.actions.run_and_verify_svn(None, None, [], 'ci', '-m', + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', 'Copy A_COPY to A_COPY_2', wc_dir) # r9: Propdel the mergeinfo on A_COPY/D. - svntest.actions.run_and_verify_svn(None, None,[], + svntest.actions.run_and_verify_svn(None,[], 'pd', SVN_PROP_MERGEINFO, D_COPY_path) - svntest.actions.run_and_verify_svn(None, None, [], 'ci', '-m', + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', 'Propdel the mergeinfo on A_COPY/D', wc_dir) # r10: Merge r5 from A to A_COPY_2 so the latter gets some explicit # mergeinfo. - svntest.actions.run_and_verify_svn(None, None, [], 'up', wc_dir) - svntest.actions.run_and_verify_svn(None, None, [], 'merge', '-c5', + svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'merge', '-c5', sbox.repo_url + '/A', A_COPY_2_path) - svntest.actions.run_and_verify_svn(None, None, [], 'ci', '-m', + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', 'Merge r5 from A to A_COPY_2', wc_dir) # Now merge r9 from A_COPY to A_COPY_2. Since the merge itself cleanly # removes all explicit mergeinfo from A_COPY_2/D, we should not set any # mergeinfo on that subtree describing the merge. - svntest.actions.run_and_verify_svn(None, None, [], 'up', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir) expected_output = wc.State(A_COPY_2_path, { 'D' : Item(status=' U'), }) @@ -14809,8 +14132,7 @@ def mergeinfo_deleted_by_a_merge_should_disappear(sbox): expected_disk, expected_status, expected_skip, - None, None, None, None, - None, 1) + check_props=True) #---------------------------------------------------------------------- # File merge optimization caused segfault during noop file merge @@ -14836,35 +14158,33 @@ def noop_file_merge(sbox): # eligible ranges to be merged to A_COPY/D/H/chi into two discrete # sets: r1-4 and r5-HEAD svntest.actions.run_and_verify_svn( - None, expected_merge_output([[5]], ['U ' + beta_COPY_path + '\n', ' U ' + A_COPY_path + '\n',]), [], 'merge', '-c5', sbox.repo_url + '/A', A_COPY_path) - svntest.actions.run_and_verify_svn(None, None, [], 'commit', '-m', + svntest.actions.run_and_verify_svn(None, [], 'commit', '-m', 'Merge r5 from A to A_COPY', wc_dir) # Update working copy to allow full inheritance and elision. - svntest.actions.run_and_verify_svn(None, exp_noop_up_out(7), [], + svntest.actions.run_and_verify_svn(exp_noop_up_out(7), [], 'up', wc_dir) # Merge all available revisions from A/D/H/chi to A_COPY/D/H/chi. # There are no operative changes in the source, so this should # not produce any output other than mergeinfo updates on # A_COPY/D/H/chi. This is where the segfault occurred. - svntest.actions.run_and_verify_svn(None, None, [], 'merge', + svntest.actions.run_and_verify_svn(None, [], 'merge', sbox.repo_url + '/A/D/H/chi', chi_COPY_path) - svntest.actions.run_and_verify_svn(None, - [' M ' + chi_COPY_path + '\n'], + svntest.actions.run_and_verify_svn([' M ' + chi_COPY_path + '\n'], [], 'st', chi_COPY_path) - svntest.actions.run_and_verify_svn(None, - ['/A/D/H/chi:2-7\n'], + svntest.actions.run_and_verify_svn(['/A/D/H/chi:2-7\n'], [], 'pg', SVN_PROP_MERGEINFO, chi_COPY_path) #---------------------------------------------------------------------- +@SkipUnless(server_has_mergeinfo) @Issue(2690) def copy_then_replace_via_merge(sbox): "copy then replace via merge" @@ -14901,17 +14221,17 @@ def copy_then_replace_via_merge(sbox): main.file_append(AJK_zeta, 'new text') main.file_append(AJL_zeta, 'new text') main.run_svn(None, 'add', AJ) - main.run_svn(None, 'ci', wc_dir, '-m', 'create tree J') # r3 + sbox.simple_commit(message='create tree J') # r3 main.run_svn(None, 'up', wc_dir) # Copy J to the branch via merge main.run_svn(None, 'merge', url_A, branch) - main.run_svn(None, 'ci', wc_dir, '-m', 'merge to branch') # r4 + sbox.simple_commit(message='merge to branch') # r4 main.run_svn(None, 'up', wc_dir) # In A, replace J with a slightly different tree main.run_svn(None, 'rm', AJ) - main.run_svn(None, 'ci', wc_dir, '-m', 'rm AJ') # r5 + sbox.simple_commit(message='rm AJ') # r5 main.run_svn(None, 'up', wc_dir) os.makedirs(AJL) @@ -14921,7 +14241,7 @@ def copy_then_replace_via_merge(sbox): main.file_append(AJL_zeta, 'really new text') main.file_append(AJM_zeta, 'really new text') main.run_svn(None, 'add', AJ) - main.run_svn(None, 'ci', wc_dir, '-m', 'create tree J again') # r6 + sbox.simple_commit(message='create tree J again') # r6 main.run_svn(None, 'up', wc_dir) # Run merge to replace /branch/J in one swell foop. @@ -14961,8 +14281,7 @@ def copy_then_replace_via_merge(sbox): }) actions.run_and_verify_commit(branch_J, expected_output, - expected_status, - None, branch_J) + expected_status) #---------------------------------------------------------------------- @SkipUnless(server_has_mergeinfo) @@ -14985,33 +14304,32 @@ def record_only_merge(sbox): nu_COPY_path = sbox.ospath('A_COPY/C/nu') # r7 - Copy the branch A_COPY@2 to A2 and update the WC. - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'copy', A_COPY_path, A2_path) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'commit', '-m', 'Branch the branch', wc_dir) # r8 - Add A/C/nu and A/B/Z. # Add a new file with mergeinfo in the foreign repos. svntest.main.file_write(nu_path, "This is the file 'nu'.\n") - svntest.actions.run_and_verify_svn(None, None, [], 'add', nu_path) - svntest.actions.run_and_verify_svn(None, None, [], 'mkdir', Z_path) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'add', nu_path) + svntest.actions.run_and_verify_svn(None, [], 'mkdir', Z_path) + svntest.actions.run_and_verify_svn(None, [], 'commit', '-m', 'Add subtrees', wc_dir) # r9 - Edit A/C/nu and add a random property on A/B/Z. svntest.main.file_write(nu_path, "New content.\n") - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'ps', 'propname', 'propval', Z_path) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'commit', '-m', 'Subtree changes', wc_dir) # r10 - Merge r8 from A to A_COPY. - svntest.actions.run_and_verify_svn(None, exp_noop_up_out(9), [], 'up', + svntest.actions.run_and_verify_svn(exp_noop_up_out(9), [], 'up', wc_dir) - svntest.actions.run_and_verify_svn(None, - expected_merge_output( + svntest.actions.run_and_verify_svn(expected_merge_output( [[8]], ['A ' + Z_COPY_path + '\n', 'A ' + nu_COPY_path + '\n', @@ -15019,7 +14337,7 @@ def record_only_merge(sbox): [], 'merge', '-c8', sbox.repo_url + '/A', A_COPY_path) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'commit', '-m', 'Root merge of r8', wc_dir) @@ -15029,8 +14347,7 @@ def record_only_merge(sbox): # r6 from A/D/H to A_COPY/D/H # r9 from A/C/nu to A_COPY/C/nu # r9 from A/B/Z to A_COPY/B/Z - svntest.actions.run_and_verify_svn(None, - expected_merge_output( + svntest.actions.run_and_verify_svn(expected_merge_output( [[4]], ['U ' + rho_COPY_path + '\n', ' U ' + rho_COPY_path + '\n',]), @@ -15038,32 +14355,29 @@ def record_only_merge(sbox): sbox.repo_url + '/A/D/G/rho', rho_COPY_path) svntest.actions.run_and_verify_svn( - None, expected_merge_output([[6]], ['U ' + omega_COPY_path + '\n', ' U ' + H_COPY_path + '\n',]), [], 'merge', '-c6', sbox.repo_url + '/A/D/H', H_COPY_path) - svntest.actions.run_and_verify_svn(None, - expected_merge_output( + svntest.actions.run_and_verify_svn(expected_merge_output( [[9]], ['U ' + nu_COPY_path + '\n', ' G ' + nu_COPY_path + '\n',]), [], 'merge', '-c9', sbox.repo_url + '/A/C/nu', nu_COPY_path) - svntest.actions.run_and_verify_svn(None, - expected_merge_output( + svntest.actions.run_and_verify_svn(expected_merge_output( [[9]], [' U ' + Z_COPY_path + '\n', ' G ' + Z_COPY_path + '\n']), [], 'merge', '-c9', sbox.repo_url + '/A/B/Z', Z_COPY_path) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'commit', '-m', 'Several subtree merges', wc_dir) - svntest.actions.run_and_verify_svn(None, exp_noop_up_out(11), [], 'up', + svntest.actions.run_and_verify_svn(exp_noop_up_out(11), [], 'up', wc_dir) # Now do a --record-only merge of r10 and r11 from A_COPY to A2. @@ -15150,7 +14464,7 @@ def record_only_merge(sbox): expected_disk, expected_status, expected_skip, - None, None, None, None, None, 1, 0, + [], True, False, '--record-only', A2_path) #---------------------------------------------------------------------- @@ -15171,7 +14485,7 @@ def merge_automatic_conflict_resolution(sbox): # r7 - Make a change on A_COPY that will conflict with r3 on A svntest.main.file_write(psi_COPY_path, "BASE.\n") - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'commit', '-m', 'log msg', wc_dir) # Set up our base expectations, we'll tweak accordingly for each option. @@ -15228,6 +14542,8 @@ def merge_automatic_conflict_resolution(sbox): }) expected_disk.tweak('D/H/psi', contents="<<<<<<< .working\n" "BASE.\n" + "||||||| .merge-left.r2\n" + "This is the file 'psi'.\n" "=======\n" "New content>>>>>>> .merge-right.r3\n") expected_status.tweak('D/H/psi', status='C ') @@ -15242,14 +14558,13 @@ def merge_automatic_conflict_resolution(sbox): expected_disk, expected_status, expected_skip, - None, - svntest.tree.detect_conflict_files, - list(psi_conflict_support_files), - None, None, 1, 1, + [], True, True, '--accept', 'postpone', '--allow-mixed-revisions', - A_COPY_path) - svntest.actions.run_and_verify_svn(None, None, [], + A_COPY_path, + extra_files= + list(psi_conflict_support_files)) + svntest.actions.run_and_verify_svn(None, [], 'revert', '--recursive', wc_dir) # Test --accept mine-conflict and mine-full @@ -15265,12 +14580,11 @@ def merge_automatic_conflict_resolution(sbox): expected_disk, expected_status, expected_skip, - None, None, None, - None, None, 1, 0, + [], True, False, '--accept', 'mine-conflict', '--allow-mixed-revisions', A_COPY_path) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'revert', '--recursive', wc_dir) svntest.actions.run_and_verify_merge(A_COPY_path, '2', '3', sbox.repo_url + '/A', None, @@ -15280,12 +14594,11 @@ def merge_automatic_conflict_resolution(sbox): expected_disk, expected_status, expected_skip, - None, None, None, - None, None, 1, 0, + [], True, False, '--accept', 'mine-full', '--allow-mixed-revisions', A_COPY_path) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'revert', '--recursive', wc_dir) # Test --accept theirs-conflict and theirs-full @@ -15301,12 +14614,11 @@ def merge_automatic_conflict_resolution(sbox): expected_disk, expected_status, expected_skip, - None, None, None, - None, None, 1, 0, + [], True, False, '--accept', 'theirs-conflict', '--allow-mixed-revisions', A_COPY_path) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'revert', '--recursive', wc_dir) svntest.actions.run_and_verify_merge(A_COPY_path, '2', '3', sbox.repo_url + '/A', None, @@ -15316,12 +14628,11 @@ def merge_automatic_conflict_resolution(sbox): expected_disk, expected_status, expected_skip, - None, None, None, - None, None, 1, 0, + [], True, False, '--accept', 'theirs-full', '--allow-mixed-revisions', A_COPY_path) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'revert', '--recursive', wc_dir) # Test --accept base ### TODO: Also test that the output has a 'Resolved' line for this path. @@ -15338,8 +14649,7 @@ def merge_automatic_conflict_resolution(sbox): expected_disk, expected_status, expected_skip, - None, None, None, - None, None, 1, 0, + [], True, False, '--accept', 'base', '--allow-mixed-revisions', A_COPY_path) @@ -15347,6 +14657,7 @@ def merge_automatic_conflict_resolution(sbox): #---------------------------------------------------------------------- # Test for issue #3440 'Skipped paths get incorrect override mergeinfo # during merge'. +@SkipUnless(server_has_mergeinfo) @Issue(3440) def skipped_files_get_correct_mergeinfo(sbox): "skipped files get correct mergeinfo set" @@ -15377,7 +14688,6 @@ def skipped_files_get_correct_mergeinfo(sbox): # Merge r3 from A to A_COPY, this will create explicit mergeinfo of # '/A:3' on A_COPY. Commit this merge as r8. svntest.actions.run_and_verify_svn( - None, expected_merge_output([[3]], ['U ' + psi_COPY_path + '\n', ' U ' + A_COPY_path + '\n',]), @@ -15395,9 +14705,9 @@ def skipped_files_get_correct_mergeinfo(sbox): # # Issue #3440 occurred when empty mergeinfo was set on A_COPY/D/H, making # it appear that r3 was never merged. - svntest.actions.run_and_verify_svn(None, exp_noop_up_out(8), [], + svntest.actions.run_and_verify_svn(exp_noop_up_out(8), [], 'up', wc_dir) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'up', '--set-depth=empty', H_COPY_path) expected_status = wc.State(A_COPY_path, { '' : Item(status=' M'), @@ -15458,8 +14768,7 @@ def skipped_files_get_correct_mergeinfo(sbox): expected_disk, expected_status, expected_skip, - None, None, None, None, None, - 1, 1) + [], True, True) #---------------------------------------------------------------------- # Test for issue #3115 'Case only renames resulting from merges don't @@ -15476,8 +14785,8 @@ def committed_case_only_move_and_revert(sbox): A_COPY_path = sbox.ospath('A_COPY') # r3: A case-only file rename on the server - svntest.actions.run_and_verify_svn(None, - ['\n', 'Committed revision 3.\n'], + svntest.actions.run_and_verify_svn(['Committing transaction...\n', + 'Committed revision 3.\n'], [], 'move', sbox.repo_url + '/A/mu', sbox.repo_url + '/A/MU', @@ -15545,8 +14854,7 @@ def committed_case_only_move_and_revert(sbox): expected_disk, expected_status, expected_skip, - None, None, None, None, - None, 1, 0) + [], True, False) # Commit the merge expected_output = svntest.wc.State(wc_dir, { @@ -15558,8 +14866,7 @@ def committed_case_only_move_and_revert(sbox): wc_status.remove('A_COPY/mu') wc_status.add({'A_COPY/MU': Item(status=' ', wc_rev=4)}) - svntest.actions.run_and_verify_commit(wc_dir, expected_output, wc_status, - None, wc_dir) + svntest.actions.run_and_verify_commit(wc_dir, expected_output, wc_status) # In issue #3115 the WC gets corrupted and any subsequent revert # attempts fail with this error: @@ -15577,11 +14884,11 @@ def committed_case_only_move_and_revert(sbox): # ..\..\..\subversion\libsvn_wc\workqueue.c:490: (apr_err=2) # svn: Error restoring text for 'C:\SVN\src-trunk\Debug\subversion\tests # \cmdline\svn-test-work\working_copies\merge_tests-139\A_COPY\MU' - svntest.actions.run_and_verify_svn(None, [], [], 'revert', '-R', wc_dir) + svntest.actions.run_and_verify_svn([], [], 'revert', '-R', wc_dir) # r5: A case-only directory rename on the server - svntest.actions.run_and_verify_svn(None, - ['\n', 'Committed revision 5.\n'], + svntest.actions.run_and_verify_svn(['Committing transaction...\n', + 'Committed revision 5.\n'], [], 'move', sbox.repo_url + '/A/C', sbox.repo_url + '/A/c', @@ -15614,13 +14921,13 @@ def committed_case_only_move_and_revert(sbox): expected_disk, expected_status, expected_skip, - None, None, None, None, - None, 1, 0, + [], True, False, '--allow-mixed-revisions', A_COPY_path) #---------------------------------------------------------------------- # This is a test for issue #3221 'Unable to merge into working copy of # deleted branch'. +@SkipUnless(server_has_mergeinfo) @Issue(3221) def merge_into_wc_for_deleted_branch(sbox): "merge into WC of deleted branch should work" @@ -15637,7 +14944,7 @@ def merge_into_wc_for_deleted_branch(sbox): # r7 - Delete the branch on the repository, obviously it still # exists in our WC. - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'delete', sbox.repo_url + '/A_COPY', '-m', 'Delete A_COPY directly in repos') @@ -15725,8 +15032,7 @@ def merge_into_wc_for_deleted_branch(sbox): expected_disk, expected_status, expected_skip, - None, None, None, None, - None, 1, 0) + check_props=True) #---------------------------------------------------------------------- def foreign_repos_del_and_props(sbox): @@ -15739,32 +15045,32 @@ def foreign_repos_del_and_props(sbox): (r2_path, r2_url) = sbox.add_repo_path('fgn') svntest.main.create_repos(r2_path) - svntest.actions.run_and_verify_svn(None, None, [], 'checkout', + svntest.actions.run_and_verify_svn(None, [], 'checkout', r2_url, wc2_dir) - svntest.actions.run_and_verify_svn(None, None, [], 'propset', + svntest.actions.run_and_verify_svn(None, [], 'propset', 'svn:eol-style', 'native', sbox.ospath('iota')) - svntest.actions.run_and_verify_svn(None, None, [], 'cp', + svntest.actions.run_and_verify_svn(None, [], 'cp', sbox.ospath('A/D'), sbox.ospath('D')) - svntest.actions.run_and_verify_svn(None, None, [], 'rm', + svntest.actions.run_and_verify_svn(None, [], 'rm', sbox.ospath('A/D'), sbox.ospath('D/G')) new_file = sbox.ospath('new-file') svntest.main.file_write(new_file, 'new-file') - svntest.actions.run_and_verify_svn(None, None, [], 'add', new_file) + svntest.actions.run_and_verify_svn(None, [], 'add', new_file) - svntest.actions.run_and_verify_svn(None, None, [], 'propset', + svntest.actions.run_and_verify_svn(None, [], 'propset', 'svn:eol-style', 'native', new_file) - svntest.actions.run_and_verify_svn(None, None, [], 'commit', wc_dir, + svntest.actions.run_and_verify_svn(None, [], 'commit', wc_dir, '-m', 'changed') - svntest.actions.run_and_verify_svn(None, None, [], 'merge', + svntest.actions.run_and_verify_svn(None, [], 'merge', sbox.repo_url, wc2_dir, '-r', '0:1') @@ -15778,10 +15084,10 @@ def foreign_repos_del_and_props(sbox): expected_status = svntest.actions.get_virginal_state(wc2_dir, 1) - svntest.actions.run_and_verify_svn(None, None, [], 'commit', wc2_dir, + svntest.actions.run_and_verify_svn(None, [], 'commit', wc2_dir, '-m', 'Merged r1') - svntest.actions.run_and_verify_svn(None, None, [], 'merge', + svntest.actions.run_and_verify_svn(None, [], 'merge', sbox.repo_url, wc2_dir, '-r', '1:2', '--allow-mixed-revisions') @@ -15809,7 +15115,7 @@ def foreign_repos_del_and_props(sbox): " svn:eol-style\n", "Properties on '%s':\n" % (os.path.join(wc2_dir, 'new-file')), " svn:eol-style\n" ] - svntest.actions.run_and_verify_svn(None, expected_output, [], 'proplist', + svntest.actions.run_and_verify_svn(expected_output, [], 'proplist', os.path.join(wc2_dir, 'iota'), os.path.join(wc2_dir, 'new-file')) @@ -15828,7 +15134,7 @@ def immediate_depth_merge_creates_minimal_subtree_mergeinfo(sbox): B_COPY_path = sbox.ospath('A_COPY/B') - svntest.actions.run_and_verify_svn(None, None, [], 'up', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir) # Merge -c5 from A/B to A_COPY/B at --depth immediates. # This should create only the minimum subtree mergeinfo @@ -15875,13 +15181,14 @@ def immediate_depth_merge_creates_minimal_subtree_mergeinfo(sbox): expected_disk, expected_status, expected_skip, - None, None, None, None, None, - 1, 1, '--depth', 'immediates', + [], True, True, + '--depth', 'immediates', B_COPY_path) #---------------------------------------------------------------------- # Test for issue #3646 'cyclic --record-only merges create self-referential # mergeinfo' +@SkipUnless(server_has_mergeinfo) @Issue(3646) def record_only_merge_creates_self_referential_mergeinfo(sbox): "merge creates self referential mergeinfo" @@ -15902,15 +15209,15 @@ def record_only_merge_creates_self_referential_mergeinfo(sbox): # Make a change to A/mu in r2. svntest.main.file_write(mu_path, "Trunk edit\n") - svntest.actions.run_and_verify_svn(None, None, [], 'ci', '-m', 'trunk edit', + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', 'trunk edit', wc_dir) - svntest.actions.run_and_verify_svn(None, None, [], 'up', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir) # Copy A to A-branch in r3 - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'copy', A_path, A_branch_path) - svntest.actions.run_and_verify_svn(None, None, [], 'ci', + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', 'Branch A to A-branch', wc_dir) - svntest.actions.run_and_verify_svn(None, None, [], 'up', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir) # Merge A-branch back to A. This should record the mergeinfo '/A-branch:3' # on A. @@ -15971,7 +15278,7 @@ def record_only_merge_creates_self_referential_mergeinfo(sbox): expected_A_disk, expected_A_status, expected_A_skip, - None, None, None, None, None, 1, 1, + [], True, True, '--record-only', A_path) #---------------------------------------------------------------------- @@ -15991,44 +15298,44 @@ def dav_skelta_mode_causes_spurious_conflicts(sbox): A_branch_path = sbox.ospath('A-branch') C_branch_path = sbox.ospath('A-branch/C') - # r2 - Set some intial properties: + # r2 - Set some initial properties: # # 'dir-prop'='value1' on A/C. # 'svn:eol-style'='native' on A/mu. - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'ps', 'dir-prop', 'initial-val', C_path) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'ps', 'svn:eol-style', 'native', mu_path) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', 'Set some properties', wc_dir) # r3 - Branch 'A' to 'A-branch': - svntest.actions.run_and_verify_svn(None, None, [], 'up', wc_dir) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'copy', A_path, A_branch_path) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', 'Create a branch of A', wc_dir) # r4 - Make a text mod to 'A/mu' and add new props to 'A/mu' and 'A/C': svntest.main.file_write(mu_path, "The new mu!\n") - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'ps', 'prop-name', 'prop-val', mu_path) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'ps', 'another-dir-prop', 'initial-val', C_path) - svntest.actions.run_and_verify_svn(None, None, [], 'ci', '-m', + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', 'Edit a file and make some prop changes', wc_dir) # r5 - Modify the sole property on 'A-branch/C': - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'ps', 'dir-prop', 'branch-val', C_branch_path) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', 'prop mod on branch', wc_dir) # Now merge r4 from 'A' to 'A-branch'. @@ -16055,7 +15362,7 @@ def dav_skelta_mode_causes_spurious_conflicts(sbox): # Summary of conflicts: # Text conflicts: 1 # Property conflicts: 1 - svntest.actions.run_and_verify_svn(None, None, [], 'up', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir) expected_output = wc.State(A_branch_path, { 'mu' : Item(status='UU'), 'C' : Item(status=' U'), @@ -16121,7 +15428,7 @@ def dav_skelta_mode_causes_spurious_conflicts(sbox): expected_disk, expected_status, expected_skip, - None, None, None, None, None, 1, 1) + [], True, True) #---------------------------------------------------------------------- @@ -16165,8 +15472,8 @@ def merge_into_locally_added_file(sbox): expected_disk, expected_status, expected_skip, - None, None, None, None, None, - True, True, new_path) + [], True, True, + new_path) sbox.simple_commit() #---------------------------------------------------------------------- @@ -16226,13 +15533,14 @@ def merge_into_locally_added_directory(sbox): expected_disk, expected_status, expected_skip, - None, None, None, None, None, - True, True, new_dir_path) + [], True, True, + new_dir_path) sbox.simple_commit() #---------------------------------------------------------------------- # Test for issue #2915 'Handle mergeinfo for subtrees missing due to removal # by non-svn command' +@SkipUnless(server_has_mergeinfo) @Issue(2915) def merge_with_os_deleted_subtrees(sbox): "merge tracking fails if target missing subtrees" @@ -16277,7 +15585,7 @@ def merge_with_os_deleted_subtrees(sbox): "|(.*A_COPY" + re_sep + "C\n)" + \ "|(.*A_COPY" + re_sep + "D" + re_sep + "H" + re_sep + "psi\n)" exit_code, out, err = svntest.actions.run_and_verify_svn( - "Missing subtrees should raise error", [], svntest.verify.AnyOutput, + [], svntest.verify.AnyOutput, 'merge', sbox.repo_url + '/A', A_COPY_path) svntest.verify.verify_outputs("Merge failed but not in the way expected", err, None, err_re + missing, None, @@ -16289,7 +15597,7 @@ def merge_with_os_deleted_subtrees(sbox): missing = "|(.*A_COPY" + re_sep + "mu\n)" + \ "|(.*A_COPY" + re_sep + "C\n)" exit_code, out, err = svntest.actions.run_and_verify_svn( - "Missing subtrees should raise error", [], svntest.verify.AnyOutput, + [], svntest.verify.AnyOutput, 'merge', sbox.repo_url + '/A', A_COPY_path, '--depth=immediates') svntest.verify.verify_outputs("Merge failed but not in the way expected", err, None, err_re + missing, None, True) @@ -16299,7 +15607,7 @@ def merge_with_os_deleted_subtrees(sbox): # as missing. missing = "|(.*A_COPY" + re_sep + "mu\n)" exit_code, out, err = svntest.actions.run_and_verify_svn( - "Missing subtrees should raise error", [], svntest.verify.AnyOutput, + [], svntest.verify.AnyOutput, 'merge', sbox.repo_url + '/A', A_COPY_path, '--depth=files') svntest.verify.verify_outputs("Merge failed but not in the way expected", err, None, err_re + missing, None, True) @@ -16308,7 +15616,6 @@ def merge_with_os_deleted_subtrees(sbox): # Only the...oh, wait, the target is present and that is as deep # as the merge goes, so this merge should succeed! svntest.actions.run_and_verify_svn( - "Depth empty merge should succeed as long at the target is present", svntest.verify.AnyOutput, [], 'merge', sbox.repo_url + '/A', A_COPY_path, '--depth=empty') @@ -16338,45 +15645,43 @@ def no_self_referential_or_nonexistent_inherited_mergeinfo(sbox): # r7 - Add the file A/C/nu svntest.main.file_write(nu_path, "This is the file 'nu'.\n") - svntest.actions.run_and_verify_svn(None, None, [], 'add', nu_path) - svntest.actions.run_and_verify_svn(None, None, [], 'commit', + svntest.actions.run_and_verify_svn(None, [], 'add', nu_path) + svntest.actions.run_and_verify_svn(None, [], 'commit', '-m', 'Add file', wc_dir) # r8 - Sync merge A to A_COPY svntest.actions.run_and_verify_svn( - "Synch merge failed unexpectedly", svntest.verify.AnyOutput, [], 'merge', sbox.repo_url + '/A', A_COPY_path) - svntest.actions.run_and_verify_svn(None, None, [], 'commit', + svntest.actions.run_and_verify_svn(None, [], 'commit', '-m', 'Sync A_COPY with A', wc_dir) # r9 - Add the subtree A/D/J # A/D/J/zeta - svntest.actions.run_and_verify_svn(None, None, [], 'mkdir', J_path) + svntest.actions.run_and_verify_svn(None, [], 'mkdir', J_path) svntest.main.file_write(zeta_path, "This is the file 'zeta'.\n") - svntest.actions.run_and_verify_svn(None, None, [], 'add', zeta_path) - svntest.actions.run_and_verify_svn(None, None, [], 'commit', + svntest.actions.run_and_verify_svn(None, [], 'add', zeta_path) + svntest.actions.run_and_verify_svn(None, [], 'commit', '-m', 'Add subtree', wc_dir) # Update the WC in preparation for merges. - svntest.actions.run_and_verify_svn(None, None, [], 'up', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir) # r10 - Sync merge A to A_COPY svntest.actions.run_and_verify_svn( - "Synch merge failed unexpectedly", svntest.verify.AnyOutput, [], 'merge', sbox.repo_url + '/A', A_COPY_path) - svntest.actions.run_and_verify_svn(None, None, [], 'commit', + svntest.actions.run_and_verify_svn(None, [], 'commit', '-m', 'Sync A_COPY with A', wc_dir) # r11 - Text changes to A/C/nu and A/D/J/zeta. svntest.main.file_write(nu_path, "This is the EDITED file 'nu'.\n") svntest.main.file_write(zeta_path, "This is the EDITED file 'zeta'.\n") - svntest.actions.run_and_verify_svn(None, None, [], 'commit', + svntest.actions.run_and_verify_svn(None, [], 'commit', '-m', 'Edit added files', wc_dir) # Update the WC in preparation for merges. - svntest.actions.run_and_verify_svn(None, None, [], 'up', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir) # This test is marked as XFail because the following two merges # create mergeinfo with both non-existent path-revs and self-referential @@ -16412,8 +15717,7 @@ def no_self_referential_or_nonexistent_inherited_mergeinfo(sbox): expected_disk, expected_status, expected_skip, - None, None, None, None, - None, 1) + check_props=True) # Merge all available revisions from A/D/J to A_COPY/D/J. Like the # previous merge, the target should not have any non-existent ('/A/D/J:2-8') @@ -16443,8 +15747,7 @@ def no_self_referential_or_nonexistent_inherited_mergeinfo(sbox): expected_disk, expected_status, expected_skip, - None, None, None, None, - None, 1) + check_props=True) #---------------------------------------------------------------------- # Test for issue #3756 'subtree merge can inherit invalid working mergeinfo', @@ -16470,26 +15773,25 @@ def subtree_merges_inherit_invalid_working_mergeinfo(sbox): # r7 - Add the file A/C/nu svntest.main.file_write(nu_path, "This is the file 'nu'.\n") - svntest.actions.run_and_verify_svn(None, None, [], 'add', nu_path) - svntest.actions.run_and_verify_svn(None, None, [], 'commit', + svntest.actions.run_and_verify_svn(None, [], 'add', nu_path) + svntest.actions.run_and_verify_svn(None, [], 'commit', '-m', 'Add file', wc_dir) # r8 Merge c7 from A to A_COPY. svntest.actions.run_and_verify_svn( - "Merge failed unexpectedly", svntest.verify.AnyOutput, [], 'merge', sbox.repo_url + '/A', A_COPY_path, '-c7') - svntest.actions.run_and_verify_svn(None, None, [], 'commit', + svntest.actions.run_and_verify_svn(None, [], 'commit', '-m', 'Merge subtree file addition', wc_dir) # r9 - A text change to A/C/nu. svntest.main.file_write(nu_path, "This is the EDITED file 'nu'.\n") - svntest.actions.run_and_verify_svn(None, None, [], 'commit', + svntest.actions.run_and_verify_svn(None, [], 'commit', '-m', 'Edit added file', wc_dir) # Update the WC in preparation for merges. - svntest.actions.run_and_verify_svn(None, None, [], 'up', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir) # Now do two merges. The first, r3 to the root of the branch A_COPY. # This creates working mergeinfo '/A:3,7' on A_COPY. Then do a subtree @@ -16503,15 +15805,12 @@ def subtree_merges_inherit_invalid_working_mergeinfo(sbox): # Currently this test is marked as XFail because the resulting mergeinfo is # '/A/C/nu:3,7,9' and thus includes a non-existent path-rev. svntest.actions.run_and_verify_svn( - "Merge failed unexpectedly", svntest.verify.AnyOutput, [], 'merge', sbox.repo_url + '/A', A_COPY_path, '-c3') svntest.actions.run_and_verify_svn( - "Merge failed unexpectedly", svntest.verify.AnyOutput, [], 'merge', sbox.repo_url + '/A/C/nu', nu_COPY_path, '-c9') svntest.actions.run_and_verify_svn( - "Subtree merge under working merge produced the wrong mergeinfo", '/A/C/nu:9', [], 'pg', SVN_PROP_MERGEINFO, nu_COPY_path) @@ -16519,6 +15818,7 @@ def subtree_merges_inherit_invalid_working_mergeinfo(sbox): # Test for issue #3686 'executable flag not correctly set on merge' # See http://subversion.tigris.org/issues/show_bug.cgi?id=3686 @Issue(3686) +@SkipUnless(server_has_mergeinfo) @SkipUnless(svntest.main.is_posix_os) def merge_change_to_file_with_executable(sbox): "executable flag is maintained during binary merge" @@ -16534,18 +15834,18 @@ def merge_change_to_file_with_executable(sbox): beta_path = sbox.ospath('A/B/E/beta') # Force one of the files to be a binary type - svntest.actions.run_and_verify_svn2(None, None, + svntest.actions.run_and_verify_svn2(None, binary_mime_type_on_text_file_warning, 0, 'propset', 'svn:mime-type', 'application/octet-stream', alpha_path) # Set the 'svn:executable' property on both files - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'propset', 'svn:executable', 'ON', beta_path) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'propset', 'svn:executable', 'ON', alpha_path) @@ -16565,7 +15865,7 @@ def merge_change_to_file_with_executable(sbox): raise svntest.Failure("beta is not marked as executable before commit") # Create the branch - svntest.actions.run_and_verify_svn(None, None, [], 'cp', + svntest.actions.run_and_verify_svn(None, [], 'cp', trunk_url, sbox.repo_url + '/branch', '-m', "Creating the Branch") @@ -16577,7 +15877,7 @@ def merge_change_to_file_with_executable(sbox): # Re-root the WC at the branch svntest.main.safe_rmtree(wc_dir) - svntest.actions.run_and_verify_svn(None, None, [], 'checkout', + svntest.actions.run_and_verify_svn(None, [], 'checkout', sbox.repo_url + '/branch', wc_dir) # Recalculate the paths @@ -16617,8 +15917,7 @@ def merge_change_to_file_with_executable(sbox): expected_disk, expected_status, expected_skip, - None, None, None, - None, None, True, True) + [], True, True) # Verify the executable bit has been set @@ -16662,8 +15961,7 @@ def dry_run_merge_conflicting_binary(sbox): 'A/theta' : Item(status=' ', wc_rev=2), }) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, - wc_dir) + expected_status) # Make the "other" working copy other_wc = sbox.add_wc_path('other') @@ -16679,8 +15977,7 @@ def dry_run_merge_conflicting_binary(sbox): 'A/theta' : Item(status=' ', wc_rev=3), }) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, - wc_dir) + expected_status) # In second working copy, append different content to the binary # and attempt to 'svn merge -r 2:3'. @@ -16728,8 +16025,8 @@ def dry_run_merge_conflicting_binary(sbox): expected_disk, expected_status, expected_skip, - None, None, None, None, None, - True, True, '--allow-mixed-revisions', + [], True, True, + '--allow-mixed-revisions', other_wc) #---------------------------------------------------------------------- @@ -16746,19 +16043,19 @@ def foreign_repos_prop_conflict(sbox): other_repo_dir, other_repo_url = sbox.add_repo_path("other") other_wc_dir = sbox.add_wc_path("other") svntest.main.copy_repos(repo_dir, other_repo_dir, 1, 1) - svntest.actions.run_and_verify_svn(None, None, [], 'co', other_repo_url, + svntest.actions.run_and_verify_svn(None, [], 'co', other_repo_url, other_wc_dir) # Add properties in the first repos and commit. sbox.simple_propset('red', 'rojo', 'A/D/G') sbox.simple_propset('yellow', 'amarillo', 'A/D/G') - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', 'spenglish', wc_dir) # Tweak properties in the first repos and commit. sbox.simple_propset('red', 'rosso', 'A/D/G') sbox.simple_propset('yellow', 'giallo', 'A/D/G') - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', 'engtalian', wc_dir) # Now, merge the propchange to the *second* working copy. @@ -16766,8 +16063,7 @@ def foreign_repos_prop_conflict(sbox): "A", "D", "G"))] expected_output = expected_merge_output([[3]], expected_output, True, prop_conflicts=1) - svntest.actions.run_and_verify_svn(None, - expected_output, + svntest.actions.run_and_verify_svn(expected_output, [], 'merge', '-c3', sbox.repo_url, other_wc_dir) @@ -16793,36 +16089,36 @@ def merge_adds_subtree_with_mergeinfo(sbox): # r8 - Add the file A_COPY/C/nu. svntest.main.file_write(nu_COPY_path, "This is the file 'nu'.\n") - svntest.actions.run_and_verify_svn(None, None, [], 'add', nu_COPY_path) - svntest.actions.run_and_verify_svn(None, None, [], 'ci', '-m', + svntest.actions.run_and_verify_svn(None, [], 'add', nu_COPY_path) + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', 'Add a file on the A_COPY branch', wc_dir) # r9 - Cherry pick r8 from A_COPY to A. - svntest.actions.run_and_verify_svn(None, None, [], 'up', wc_dir) - svntest.actions.run_and_verify_svn(None, None, [], 'merge', + svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'merge', sbox.repo_url + '/A_COPY', A_path, '-c8') - svntest.actions.run_and_verify_svn(None, None, [], 'ci', '-m', + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', 'Merge r8 from A_COPY to A', wc_dir) # r10 - Make a modification to A_COPY/C/nu svntest.main.file_append(nu_COPY_path, "More work on the A_COPY branch.\n") - svntest.actions.run_and_verify_svn(None, None, [], 'ci', '-m', + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', 'Some work on the A_COPY branch', wc_dir) # r9 - Cherry pick r10 from A_COPY/C/nu to A/C/nu. Make some # changes to A/C/nu before committing the merge. - svntest.actions.run_and_verify_svn(None, None, [], 'merge', + svntest.actions.run_and_verify_svn(None, [], 'merge', sbox.repo_url + '/A_COPY/C/nu', nu_path, '-c10') svntest.main.file_append(nu_path, "A faux conflict resolution.\n") - svntest.actions.run_and_verify_svn(None, None, [], 'ci', '-m', + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', 'Merge r8 from A_COPY to A', wc_dir) # Sync merge A to A_COPY_2 - svntest.actions.run_and_verify_svn(None, None, [], 'up', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir) expected_output = wc.State(A_COPY2_path, { 'B/E/beta' : Item(status='U '), 'C/nu' : Item(status='A '), @@ -16900,8 +16196,7 @@ def merge_adds_subtree_with_mergeinfo(sbox): expected_disk, expected_status, expected_skip, - None, None, None, None, - None, 1, False) + check_props=True) #---------------------------------------------------------------------- # A test for issue #3978 'reverse merge which adds subtree fails'. @@ -16922,26 +16217,26 @@ def reverse_merge_adds_subtree(sbox): H_COPY_path = sbox.ospath('A_COPY/D/H') # r7 - Delete A\D\H\chi - svntest.actions.run_and_verify_svn(None, None, [], 'delete', chi_path) - svntest.actions.run_and_verify_svn(None, None, [], 'ci', '-m', + svntest.actions.run_and_verify_svn(None, [], 'delete', chi_path) + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', 'Delete a file', wc_dir) # r8 - Merge r7 from A to A_COPY - svntest.actions.run_and_verify_svn(None, None, [], 'merge', + svntest.actions.run_and_verify_svn(None, [], 'merge', sbox.repo_url + '/A', A_COPY_path, '-c7') - svntest.actions.run_and_verify_svn(None, None, [], 'ci', '-m', + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', 'Cherry-pick r7 from A to A_COPY', wc_dir) # r9 - File depth sync merge from A/D/H to A_COPY/D/H/ # This shallow merge does not create non-inheritable mergeinfo because of # the issue #4057 fix; all subtrees affected by the diff are present, so # non-inheritable mergeinfo is not required. - svntest.actions.run_and_verify_svn(None, None, [], 'up', wc_dir) - svntest.actions.run_and_verify_svn(None, None, [], 'merge', + svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'merge', sbox.repo_url + '/A/D/H', H_COPY_path, '--depth', 'files') - svntest.actions.run_and_verify_svn(None, None, [], 'ci', '-m', + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', 'Cherry-pick r7 from A to A_COPY', wc_dir) # Reverse merge r7 from A to A_COPY @@ -16973,7 +16268,7 @@ def reverse_merge_adds_subtree(sbox): # ..\..\..\subversion\libsvn_subr\mergeinfo.c:504: (apr_err=200022) # ..\..\..\subversion\libsvn_subr\kitchensink.c:57: (apr_err=200022) # svn: E200022: Negative revision number found parsing '-7' - svntest.actions.run_and_verify_svn(None, None, [], 'up', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir) expected_output = wc.State(A_COPY_path, { 'D/H/chi' : Item(status='A '), }) @@ -17036,8 +16331,7 @@ def reverse_merge_adds_subtree(sbox): expected_disk, expected_status, expected_skip, - None, None, None, None, - None, 1, False) + check_props=True) #---------------------------------------------------------------------- # A test for issue #3989 'merge which deletes file with native eol-style @@ -17057,22 +16351,22 @@ def merged_deletion_causes_tree_conflict(sbox): H_branch_path = sbox.ospath('branch/D/H') # r2 - Set svn:eol-style native on A/D/H/psi - svntest.actions.run_and_verify_svn(None, None, [], 'ps', 'svn:eol-style', + svntest.actions.run_and_verify_svn(None, [], 'ps', 'svn:eol-style', 'native', psi_path) - svntest.actions.run_and_verify_svn(None, None, [], 'ci', '-m', + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', 'Set eol-style native on a path', wc_dir) # r3 - Branch ^/A to ^/branch - svntest.actions.run_and_verify_svn(None, None, [], 'copy', + svntest.actions.run_and_verify_svn(None, [], 'copy', sbox.repo_url + '/A', sbox.repo_url + '/branch', '-m', 'Copy ^/A to ^/branch') - svntest.actions.run_and_verify_svn(None, None, [], 'up', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir) # r4 - Delete A/D/H/psi - svntest.actions.run_and_verify_svn(None, None, [], 'delete', psi_path) - svntest.actions.run_and_verify_svn(None, None, [], 'ci', '-m', + svntest.actions.run_and_verify_svn(None, [], 'delete', psi_path) + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', 'Delete a a path with native eol-style', wc_dir) @@ -17081,7 +16375,7 @@ def merged_deletion_causes_tree_conflict(sbox): # branch/D/H/psi is, ignoring differences caused by svn:eol-style, identical # to ^/A/D/H/psi when the latter was deleted, so the deletion should merge # cleanly. - svntest.actions.run_and_verify_svn(None, None, [], 'up', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir) expected_output = wc.State(H_branch_path, { 'psi' : Item(status='D '), }) @@ -17110,8 +16404,7 @@ def merged_deletion_causes_tree_conflict(sbox): expected_disk, expected_status, expected_skip, - None, None, None, None, - None, 1, False) + check_props=True) #---------------------------------------------------------------------- # A test for issue #3976 'record-only merges which add new subtree mergeinfo @@ -17132,7 +16425,7 @@ def record_only_merge_adds_new_subtree_mergeinfo(sbox): H_COPY2_path = sbox.ospath('A_COPY_2/D/H') # r7 - Copy ^/A_COPY to ^/A_COPY_2 - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'copy', '-m', 'copy A_COPY to A_COPY_2', sbox.repo_url + '/A_COPY', sbox.repo_url + '/A_COPY_2') @@ -17144,13 +16437,13 @@ def record_only_merge_adds_new_subtree_mergeinfo(sbox): svntest.main.run_svn(None, 'commit', '-m', 'set svn:eol-style', wc_dir) # r9 - Merge r3 from ^/A/D/H/psi to A_COPY/D/H/psi. - svntest.actions.run_and_verify_svn(None, None, [], 'merge', + svntest.actions.run_and_verify_svn(None, [], 'merge', sbox.repo_url + '/A/D/H/psi', psi_COPY_path, '-c3') svntest.main.run_svn(None, 'commit', '-m', 'Subtree merge', wc_dir) # r10 - Merge r8 from ^/A/D/H/psi to A_COPY/D/H/psi. - svntest.actions.run_and_verify_svn(None, None, [], 'merge', + svntest.actions.run_and_verify_svn(None, [], 'merge', sbox.repo_url + '/A/D/H/psi', psi_COPY_path, '-c8') svntest.main.run_svn(None, 'commit', '-m', 'Subtree merge', wc_dir) @@ -17163,7 +16456,7 @@ def record_only_merge_adds_new_subtree_mergeinfo(sbox): # 2) The mergeinfo '/A/D/H/psi:8' from r10. # # 3) The mergeinfo '/A_COPY/D/H/psi:10' describing the merge itself. - svntest.actions.run_and_verify_svn(None, None, [], 'up', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir) expected_output = wc.State(H_COPY2_path, { 'psi' : Item(status=' U'), }) @@ -17197,64 +16490,7 @@ def record_only_merge_adds_new_subtree_mergeinfo(sbox): expected_disk, expected_status, expected_skip, - None, None, None, None, - None, 1, False) - -#---------------------------------------------------------------------- -# Setup helper for issue #4056 and issue #4057 tests. -def noninheritable_mergeinfo_test_set_up(sbox): - '''Starting with standard greek tree, copy 'A' to 'branch' in r2 and - then made a file edit to A/B/lambda in r3. - Return (expected_output, expected_mergeinfo_output, expected_elision_output, - expected_status, expected_disk, expected_skip) for a merge of - r3 from ^/A/B to branch/B.''' - - sbox.build() - wc_dir = sbox.wc_dir - - lambda_path = sbox.ospath('A/B/lambda') - B_branch_path = sbox.ospath('branch/B') - - # r2 - Branch ^/A to ^/branch. - svntest.main.run_svn(None, 'copy', sbox.repo_url + '/A', - sbox.repo_url + '/branch', '-m', 'make a branch') - - # r3 - Make an edit to A/B/lambda. - svntest.main.file_write(lambda_path, "trunk edit.\n") - svntest.main.run_svn(None, 'commit', '-m', 'file edit', wc_dir) - svntest.main.run_svn(None, 'up', wc_dir) - - expected_output = wc.State(B_branch_path, { - 'lambda' : Item(status='U '), - }) - expected_mergeinfo_output = wc.State(B_branch_path, { - '' : Item(status=' U'), - 'lambda' : Item(status=' U'), - }) - expected_elision_output = wc.State(B_branch_path, { - 'lambda' : Item(status=' U'), - }) - expected_status = wc.State(B_branch_path, { - '' : Item(status=' M'), - 'lambda' : Item(status='M '), - 'E' : Item(status=' '), - 'E/alpha' : Item(status=' '), - 'E/beta' : Item(status=' '), - 'F' : Item(status=' '), - }) - expected_status.tweak(wc_rev='3') - expected_disk = wc.State('', { - '' : Item(props={SVN_PROP_MERGEINFO : '/A/B:3'}), - 'lambda' : Item("trunk edit.\n"), - 'E' : Item(), - 'E/alpha' : Item("This is the file 'alpha'.\n"), - 'E/beta' : Item("This is the file 'beta'.\n"), - 'F' : Item(), - }) - expected_skip = wc.State(B_branch_path, {}) - - return expected_output, expected_mergeinfo_output, expected_elision_output, \ - expected_status, expected_disk, expected_skip + check_props=True) #---------------------------------------------------------------------- @@ -17298,7 +16534,7 @@ def unnecessary_noninheritable_mergeinfo_missing_subtrees(sbox): expected_disk, expected_status, expected_skip, - None, None, None, None, None, 1, 1, + [], True, True, B_branch_path) #---------------------------------------------------------------------- @@ -17341,20 +16577,19 @@ def unnecessary_noninheritable_mergeinfo_shallow_merge(sbox): expected_disk, expected_status, expected_skip, - None, None, None, None, None, 1, 1, + [], True, True, '--depth', 'files', B_branch_path) # Revert the merge and then make a prop change to A/B/E in r4. - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'revert', '--recursive', sbox.wc_dir) - svntest.actions.run_and_verify_svn(None, - ["property 'prop:name' set on '" + + svntest.actions.run_and_verify_svn(["property 'prop:name' set on '" + E_path + "'\n"], [], 'ps', 'prop:name', 'propval', E_path) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', 'A new property on a dir', sbox.wc_dir) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'up', sbox.wc_dir) # Merge r4 from ^/A/B to branch/B at operational depth=immediates @@ -17406,7 +16641,7 @@ def unnecessary_noninheritable_mergeinfo_shallow_merge(sbox): expected_disk, expected_status, expected_skip, - None, None, None, None, None, 1, 1, + [], True, True, '--depth', 'immediates', B_branch_path) #---------------------------------------------------------------------- @@ -17436,14 +16671,14 @@ def svnmucc_abuse_1(sbox): sbox.simple_commit() ## r5: fail to revert it - svntest.actions.run_and_verify_svnmucc(None, None, [], + svntest.actions.run_and_verify_svnmucc(None, [], '-m', 'r5', '-U', sbox.repo_url, 'rm', 'A', 'cp', 'HEAD', 'A', 'A') ## r6: really revert it - svntest.actions.run_and_verify_svnmucc(None, None, [], + svntest.actions.run_and_verify_svnmucc(None, [], '-m', 'r6', '-U', sbox.repo_url, 'rm', 'A', @@ -17496,12 +16731,11 @@ def merge_source_with_replacement(sbox): svntest.main.run_svn(None, 'up', wc_dir) svntest.main.run_svn(None, 'del', A_path) svntest.main.run_svn(None, 'copy', sbox.repo_url + '/A@5', A_path) - svntest.main.run_svn(None, 'ci', '-m', - 'Replace A with older version of itself', wc_dir) + sbox.simple_commit(message='Replace A with older version of itself') # r8: Make an edit to A/D/H/omega: svntest.main.file_write(omega_path, "New content for 'omega'.\n") - svntest.main.run_svn(None, 'ci', '-m', 'file edit', wc_dir) + sbox.simple_commit(message='file edit') # Update and sync merge ^/A to A_COPY. # @@ -17540,14 +16774,13 @@ def merge_source_with_replacement(sbox): 'U ' + psi_COPY_path + '\n', ' U ' + A_COPY_path + '\n', ' G ' + A_COPY_path + '\n',]) - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'merge', sbox.repo_url + '/A', A_COPY_path) # Misleading notifications are one thing, incorrect mergeinfo is quite # another. - svntest.actions.run_and_verify_svn(None, - [A_COPY_path + ' - /A:2-5,7-8\n'], + svntest.actions.run_and_verify_svn([A_COPY_path + ' - /A:2-5,7-8\n'], [], 'pg', SVN_PROP_MERGEINFO, '-R', A_COPY_path) @@ -17564,7 +16797,7 @@ def merge_source_with_replacement(sbox): ' U ' + A_COPY_path + '\n', ' G ' + A_COPY_path + '\n',], elides=True) - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'merge', sbox.repo_url + '/A', A_COPY_path, '-r8:1') @@ -17594,26 +16827,25 @@ def reverse_merge_with_rename(sbox): wc_disk, wc_status = set_up_branch(sbox) # r7 - Rename ^/A to ^/trunk. - svntest.actions.run_and_verify_svn(None, - ['\n', 'Committed revision 7.\n'], + svntest.actions.run_and_verify_svn(['Committing transaction...\n', + 'Committed revision 7.\n'], [], 'move', sbox.repo_url + '/A', sbox.repo_url + '/trunk', '-m', "Rename 'A' to 'trunk'") - svntest.actions.run_and_verify_svn(None, None, [], 'up', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir) # r8 - Make and edit to trunk/D/H/omega (which was also edited in r6). svntest.main.file_write(omega_path, "Edit 'omega' on trunk.\n") - svntest.main.run_svn(None, 'ci', '-m', 'Another omega edit', wc_dir) + sbox.simple_commit(message='Another omega edit') # r9 - Sync merge ^/trunk to A_COPY. - svntest.actions.run_and_verify_svn(None, - None, # Don't check stdout, we test this + svntest.actions.run_and_verify_svn(None, # Don't check stdout, we test this # type of merge to death elsewhere. [], 'merge', sbox.repo_url + '/trunk', A_COPY_path) - svntest.main.run_svn(None, 'ci', '-m', 'Sync A_COPY with ^/trunk', wc_dir) - svntest.actions.run_and_verify_svn(None, None, [], 'up', wc_dir) + sbox.simple_commit(message='Sync A_COPY with ^/trunk') + svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir) # Reverse merge -r9:1 from ^/trunk to A_COPY. This should return # A_COPY to the same state it had prior to the sync merge in r2. @@ -17649,7 +16881,7 @@ def reverse_merge_with_rename(sbox): 'U ' + psi_COPY_path + '\n', ' U ' + A_COPY_path + '\n', ' G ' + A_COPY_path + '\n',], elides=True) - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'merge', sbox.repo_url + '/trunk', A_COPY_path, '-r9:1') @@ -17673,18 +16905,18 @@ def merge_adds_then_deletes_subtree(sbox): nu_branch_path = sbox.ospath('branch/C/nu') # Make a branch. - svntest.actions.run_and_verify_svn(None, None, [], 'copy', + svntest.actions.run_and_verify_svn(None, [], 'copy', sbox.repo_url + '/A', sbox.repo_url + '/branch', '-m', 'Make a branch.') # On the branch parent: Add a file in r3 and then delete it in r4. svntest.main.file_write(nu_path, "This is the file 'nu'.\n") - svntest.actions.run_and_verify_svn(None, None, [], 'add', nu_path) - svntest.actions.run_and_verify_svn(None, None, [], 'ci', wc_dir, + svntest.actions.run_and_verify_svn(None, [], 'add', nu_path) + svntest.actions.run_and_verify_svn(None, [], 'ci', wc_dir, '-m', 'Add a file') - svntest.actions.run_and_verify_svn(None, None, [], 'delete', nu_path) - svntest.actions.run_and_verify_svn(None, None, [], 'ci', wc_dir, + svntest.actions.run_and_verify_svn(None, [], 'delete', nu_path) + svntest.actions.run_and_verify_svn(None, [], 'ci', wc_dir, '-m', 'Delete a file') # Merge r3 and r4 from ^/A/C to branch/C as part of one merge @@ -17715,9 +16947,8 @@ def merge_adds_then_deletes_subtree(sbox): # svn: E155010: The node 'C:\SVN\src-trunk\Debug\subversion\tests # \cmdline\svn-test-work\working_copies\merge_tests-128\branch\C\nu' # was not found. - svntest.actions.run_and_verify_svn(None, None, [], 'up', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir) svntest.actions.run_and_verify_svn( - None, expected_merge_output([[3],[4]], ['A ' + nu_branch_path + '\n', 'D ' + nu_branch_path + '\n', @@ -17752,35 +16983,35 @@ def merge_with_added_subtrees_with_mergeinfo(sbox): wc_disk, wc_status = set_up_branch(sbox, nbr_of_branches=2) # r8 - Add a subtree under A. - svntest.actions.run_and_verify_svn(None, None, [], 'mkdir', '--parents', + svntest.actions.run_and_verify_svn(None, [], 'mkdir', '--parents', Z_path) svntest.main.file_write(nu_path, "This is the file 'nu'.\n") - svntest.actions.run_and_verify_svn(None, None, [], 'add', nu_path) - svntest.actions.run_and_verify_svn(None, None, [], 'ci', wc_dir, + svntest.actions.run_and_verify_svn(None, [], 'add', nu_path) + svntest.actions.run_and_verify_svn(None, [], 'ci', wc_dir, '-m', 'Add a subtree on our "trunk"') # r9 - Sync ^/A to the first branch A_COPY. - svntest.actions.run_and_verify_svn(None, None, [], 'up', wc_dir) - svntest.actions.run_and_verify_svn(None, None, [], 'merge', + svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'merge', sbox.repo_url + '/A', A_COPY_path) - svntest.actions.run_and_verify_svn(None, None, [], 'ci', wc_dir, + svntest.actions.run_and_verify_svn(None, [], 'ci', wc_dir, '-m', 'Sync ^/A to ^/A_COPY') # r10 - Make some edits on the first branch. - svntest.actions.run_and_verify_svn(None, None, [], 'ps', 'branch-prop-foo', + svntest.actions.run_and_verify_svn(None, [], 'ps', 'branch-prop-foo', 'bar', Y_COPY_path) - svntest.actions.run_and_verify_svn(None, None, [], 'mkdir', W_COPY_path) - svntest.actions.run_and_verify_svn(None, None, [], 'ci', wc_dir, + svntest.actions.run_and_verify_svn(None, [], 'mkdir', W_COPY_path) + svntest.actions.run_and_verify_svn(None, [], 'ci', wc_dir, '-m', 'Make some edits on "branch 1"') # r11 - Cherry-pick r10 on the first branch back to A, but # do so at depth=empty so non-inheritable mergeinfo is created. - svntest.actions.run_and_verify_svn(None, None, [], 'up', wc_dir) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'merge', '-c10', '--depth=empty', sbox.repo_url + '/A_COPY/C/X/Y', Y_path) svntest.actions.run_and_verify_svn( - None, None, [], 'ci', wc_dir, + None, [], 'ci', wc_dir, '-m', 'Depth empty subtree cherry pick from "branch 1" to "trunk"') # Sync ^/A to the second branch A_COPY_2. @@ -17821,7 +17052,7 @@ def merge_with_added_subtrees_with_mergeinfo(sbox): # svn:mergeinfo # /A/C/X/Y/Z:8-11 # ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - svntest.actions.run_and_verify_svn(None, None, [], 'up', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir) expected_output = wc.State(A_COPY2_path, { 'B/E/beta' : Item(status='U '), 'D/G/rho' : Item(status='U '), @@ -17899,8 +17130,7 @@ def merge_with_added_subtrees_with_mergeinfo(sbox): expected_disk, expected_status, expected_skip, - None, None, None, None, - None, 1, 0) + check_props=True) #---------------------------------------------------------------------- @SkipUnless(server_has_mergeinfo) @@ -17923,34 +17153,33 @@ def merge_with_externals_with_mergeinfo(sbox): wc_disk, wc_status = set_up_branch(sbox) svntest.main.file_write(mu_COPY_path, "branch edit") - svntest.actions.run_and_verify_svn(None, None, [], 'ci', '-m', + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', 'file edit on the branch', wc_dir) - svntest.actions.run_and_verify_svn(None, None, [], 'up', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir) # Create a file external under 'A' and set some bogus mergeinfo # on it (the fact that this mergeinfo is bogus has no bearing on # this test). - svntest.actions.run_and_verify_svn(None, None, [], 'propset', + svntest.actions.run_and_verify_svn(None, [], 'propset', 'svn:externals', '^/iota file-external', A_path) - svntest.actions.run_and_verify_svn(None, None, [], 'ci', '-m', + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', 'set file external', wc_dir) - svntest.actions.run_and_verify_svn(None, None, [], 'up', wc_dir) - svntest.actions.run_and_verify_svn(None, None, [], 'ps', SVN_PROP_MERGEINFO, + svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'ps', SVN_PROP_MERGEINFO, "/bogus-mergeinfo:5", file_external_path) - svntest.actions.run_and_verify_svn(None, None, [], 'ci', '-m', + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', 'set mergeinfo on file external', file_external_path) # Sync merge ^/A to A_COPY and then reintegrate A_COPY back to A. - svntest.actions.run_and_verify_svn(None, None, [], 'merge', + svntest.actions.run_and_verify_svn(None, [], 'merge', sbox.repo_url + '/A', A_COPY_path) - svntest.actions.run_and_verify_svn(None, None, [], 'ci', '-m', + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', 'sync merge', wc_dir) # This was segfaulting, see # http://svn.haxx.se/dev/archive-2012-10/0364.shtml svntest.actions.run_and_verify_svn( - None, expected_merge_output(None, ['U ' + mu_path + '\n', ' U ' + A_path + '\n'], @@ -18020,7 +17249,6 @@ def merge_binary_file_with_keywords(sbox): # merge back svntest.actions.run_and_verify_svn( - None, expected_merge_output([[3,4]], ['C ' + sbox.ospath(file_mod_both) + '\n', 'U ' + sbox.ospath(file_mod_src) + '\n', @@ -18063,7 +17291,6 @@ def merge_conflict_when_keywords_removed(sbox): # sync merge svntest.actions.run_and_verify_svn( - None, expected_merge_output([[3,4]], ['U '+ sbox.ospath('A2/keyfile') + '\n', ' U A2\n']), @@ -18078,7 +17305,6 @@ def merge_conflict_when_keywords_removed(sbox): # sync merge again svntest.actions.run_and_verify_svn( - None, expected_merge_output([[5,6]], ['UU ' + sbox.ospath('A2/keyfile') + '\n', ' U A2\n']), @@ -18119,7 +17345,7 @@ def merge_target_selection(sbox): '--- Recording mergeinfo for merge of r4 into \'.\':\n', ' U .\n', ] - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'merge', '^/dir', '-c', '4') svntest.main.run_svn(None, 'revert', '-R', '.') @@ -18131,7 +17357,7 @@ def merge_target_selection(sbox): '--- Recording mergeinfo for merge of r4 into \'binary-file\':\n', ' U binary-file\n', ] - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'merge', '^/dir/binary-file', '-c', '4') svntest.main.run_svn(None, 'revert', '-R', '.') @@ -18143,7 +17369,7 @@ def merge_target_selection(sbox): '--- Recording mergeinfo for merge of r4 into \'.\':\n', ' U .\n', ] - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'merge', '^/dir', '-c', '4', '.') svntest.main.run_svn(None, 'revert', '-R', '.') @@ -18155,7 +17381,7 @@ def merge_target_selection(sbox): '--- Recording mergeinfo for merge of r4 into \'binary-file\':\n', ' U binary-file\n', ] - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'merge', '^/dir/binary-file', '-c', '4', 'binary-file') svntest.main.run_svn(None, 'revert', '-R', '.') @@ -18167,7 +17393,7 @@ def merge_target_selection(sbox): '--- Recording mergeinfo for merge of r4 into \'.\':\n', ' U .\n', ] + svntest.main.summary_of_conflicts(tree_conflicts=1) - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'merge', '^/dir/binary-file', '-c', '4', '.') svntest.main.run_svn(None, 'revert', '-R', '.') @@ -18179,10 +17405,11 @@ def merge_target_selection(sbox): '--- Recording mergeinfo for merge of r4 into \'binary-file\':\n', ' U binary-file\n', ] + svntest.main.summary_of_conflicts(tree_conflicts=1) - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'merge', '^/dir', '-c', '4', 'binary-file') -@Issue(3405) +@SkipUnless(server_has_mergeinfo) +@Issue(3405) # seems to be the wrong issue number def merge_properties_on_adds(sbox): "merged directory properties are added" @@ -18200,7 +17427,7 @@ def merge_properties_on_adds(sbox): sbox.simple_commit() sbox.simple_update() - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'merge', '^/A/D/G', sbox.ospath('G')) expected_output = svntest.verify.UnorderedOutput([ @@ -18215,7 +17442,7 @@ def merge_properties_on_adds(sbox): 'Properties on \'%s\':\n' % sbox.ospath('G/M/file'), ' key\n', ]) - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'proplist', '-R', sbox.ospath('G')) expected_output = svntest.verify.UnorderedOutput([ @@ -18232,7 +17459,7 @@ def merge_properties_on_adds(sbox): # I merged the tree, which should include history but only the files have # the properties stored in PRISTINE. All directories have the properties # as local changes in ACTUAL. - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'proplist', '-R', sbox.ospath('G'), '-r', 'BASE') @@ -18404,7 +17631,7 @@ def check_mergeinfo(expected_mergeinfo, tgt_ospath): EXPECTED_MERGEINFO (list of lines). """ svntest.actions.run_and_verify_svn( - None, expected_mergeinfo, [], 'pg', SVN_PROP_MERGEINFO, tgt_ospath) + expected_mergeinfo, [], 'pg', SVN_PROP_MERGEINFO, tgt_ospath) def simple_merge(src_path, tgt_ospath, rev_args): """Merge from ^/SRC_PATH to TGT_OSPATH using revision arguments REV_ARGS @@ -18419,7 +17646,7 @@ def simple_merge(src_path, tgt_ospath, rev_args): target=tgt_ospath) src_url = '^/' + src_path svntest.actions.run_and_verify_svn( - None, expected_out, [], + expected_out, [], 'merge', src_url, tgt_ospath, '--accept', 'postpone', *rev_args) @SkipUnless(server_has_mergeinfo) @@ -18463,14 +17690,14 @@ def conflict_aborted_mergeinfo_described_partial_merge(sbox): sbox.simple_commit() # r14: merge some changes to the branch so that later merges will be split - svntest.actions.run_and_verify_svn(None, None, [], 'merge', '-c5,9', + svntest.actions.run_and_verify_svn(None, [], 'merge', '-c5,9', '^/' + trunk, sbox.ospath(branch), '--accept', 'theirs-conflict') sbox.simple_commit() sbox.simple_update() def revert_branch(): - svntest.actions.run_and_verify_svn(None, None, [], 'revert', '-R', + svntest.actions.run_and_verify_svn(None, [], 'revert', '-R', sbox.ospath(branch)) def try_merge(relpath, conflict_rev, rev_args, @@ -18501,7 +17728,7 @@ def conflict_aborted_mergeinfo_described_partial_merge(sbox): src_url = '^/' + src_path svntest.actions.run_and_verify_svn( - None, expected_out, expected_err, + expected_out, expected_err, 'merge', src_url, tgt_ospath, '--accept', 'postpone', *rev_args) @@ -18645,9 +17872,9 @@ def multiple_editor_drive_merge_notifications(sbox): sbox.simple_commit() # Cherry pick merge r5 and r6 to each branch and commit. - svntest.actions.run_and_verify_svn(None, None, [], 'merge', '^/iota', + svntest.actions.run_and_verify_svn(None, [], 'merge', '^/iota', '-c', '5,7', iota_branch_path) - svntest.actions.run_and_verify_svn(None, None, [], 'merge', '^/A/C', + svntest.actions.run_and_verify_svn(None, [], 'merge', '^/A/C', '-c', '5,7', C_branch_path) sbox.simple_commit() @@ -18658,7 +17885,6 @@ def multiple_editor_drive_merge_notifications(sbox): # re checking the merge notification headers -- which need to be improved # at some point. svntest.actions.run_and_verify_svn( - None, ["--- Merging r2 through r4 into '" + C_branch_path + "':\n", " U " + C_branch_path + "\n", "--- Merging r6 into '" + C_branch_path + "':\n", @@ -18693,7 +17919,6 @@ def multiple_editor_drive_merge_notifications(sbox): # --- Recording mergeinfo for merge of r2 through r9 into 'iota-copy': # U iota-copy svntest.actions.run_and_verify_svn( - None, ["--- Merging r2 through r4 into '" + iota_branch_path + "':\n", " U " + iota_branch_path + "\n", "--- Merging r6 into '" + iota_branch_path + "':\n", @@ -18726,7 +17951,7 @@ def single_editor_drive_merge_notifications(sbox): set_up_branch(sbox) # r7 - Subtree merge - svntest.actions.run_and_verify_svn(None, None, [], 'merge', '^/A/D', + svntest.actions.run_and_verify_svn(None, [], 'merge', '^/A/D', '-c4', D_copy_path) sbox.simple_commit() sbox.simple_update() @@ -18766,13 +17991,13 @@ def single_editor_drive_merge_notifications(sbox): " U " + D_copy_path + "\n", "--- Eliding mergeinfo from '" + D_copy_path + "':\n", " U " + D_copy_path + "\n"]) - svntest.actions.run_and_verify_svn(None, expected_output, [], 'merge', + svntest.actions.run_and_verify_svn(expected_output, [], 'merge', sbox.repo_url + '/A', A_copy_path) # r8 and r9 - Commit and do reverse subtree merge. sbox.simple_commit() sbox.simple_update() - svntest.actions.run_and_verify_svn(None, None, [], 'merge', '^/A/D', + svntest.actions.run_and_verify_svn(None, [], 'merge', '^/A/D', '-c-4', D_copy_path) sbox.simple_commit() @@ -18793,7 +18018,7 @@ def single_editor_drive_merge_notifications(sbox): " U " + D_copy_path + "\n", "--- Eliding mergeinfo from '" + D_copy_path + "':\n", " U " + D_copy_path + "\n"]) - svntest.actions.run_and_verify_svn(None, expected_output, [], 'merge', + svntest.actions.run_and_verify_svn(expected_output, [], 'merge', '-r9:2', sbox.repo_url + '/A', A_copy_path) @@ -18841,14 +18066,14 @@ def conflicted_split_merge_with_resolve(sbox): sbox.simple_commit() # r14: merge some changes to the branch so that later merges will be split - svntest.actions.run_and_verify_svn(None, None, [], 'merge', '-c5,9', + svntest.actions.run_and_verify_svn(None, [], 'merge', '-c5,9', '^/' + trunk, sbox.ospath(branch), '--accept', 'theirs-conflict') sbox.simple_commit() sbox.simple_update() def revert_branch(): - svntest.actions.run_and_verify_svn(None, None, [], 'revert', '-R', + svntest.actions.run_and_verify_svn(None, [], 'revert', '-R', sbox.ospath(branch)) def try_merge(relpath, conflict_rev, rev_args, @@ -18879,7 +18104,7 @@ def conflicted_split_merge_with_resolve(sbox): src_url = '^/' + src_path + '@11' svntest.actions.run_and_verify_svn( - None, expected_out, expected_err, + expected_out, expected_err, 'merge', src_url, tgt_ospath, '--accept', 'mine-full', *rev_args) @@ -19021,7 +18246,7 @@ def merge_to_empty_target_merge_to_infinite_target(sbox): C1_path = sbox.ospath('A/C/C1') test3_path = sbox.ospath('A/C/C1/test.txt') - + # r3 - Add some subtrees: # A /A/B/B1 # A /A/B/B1/B1a @@ -19048,16 +18273,16 @@ def merge_to_empty_target_merge_to_infinite_target(sbox): sbox.simple_commit() # r6 - Set depth of A_COPY to empty, merge all available revs from ^/A. - svntest.actions.run_and_verify_svn(None, None, [], 'up', + svntest.actions.run_and_verify_svn(None, [], 'up', '--set-depth=empty', A_COPY_path) - svntest.actions.run_and_verify_svn(None, None, [], 'up', + svntest.actions.run_and_verify_svn(None, [], 'up', '--set-depth=infinity', C_COPY_path) - svntest.actions.run_and_verify_svn(None, None, [], 'merge', '^/A', + svntest.actions.run_and_verify_svn(None, [], 'merge', '^/A', A_COPY_path) sbox.simple_commit() # Update A_COPY back to depth infinity and retry the prior merge. - svntest.actions.run_and_verify_svn(None, None, [], 'up', + svntest.actions.run_and_verify_svn(None, [], 'up', '--set-depth=infinity', A_COPY_path) expected_output = wc.State(A_COPY_path, { @@ -19137,36 +18362,11 @@ def merge_to_empty_target_merge_to_infinite_target(sbox): expected_disk, expected_status, expected_skip, - None, None, None, None, - None, 1, 0) + [], True, False) # Commit the merge. #sbox.simple_commit() -def merge_dir_delete_force(sbox): - "merge a directory delete with --force" - - sbox.build() - - sbox.simple_rm('A/D/G') - sbox.simple_commit() # r2 - - sbox.simple_update(revision=1) - - # Just merging r2 on r1 succeeds - svntest.actions.run_and_verify_svn(sbox.wc_dir, None, [], - 'merge', '-c2', '^/', sbox.wc_dir, - '--ignore-ancestry') - - # Bring working copy to r1 again - svntest.actions.run_and_verify_svn(sbox.wc_dir, None, [], - 'revert', '-R', sbox.wc_dir) - - # But when using --force this same merge caused a segfault in 1.8.0-1.8.8 - svntest.actions.run_and_verify_svn(sbox.wc_dir, None, [], - 'merge', '-c2', '^/', sbox.wc_dir, - '--ignore-ancestry', '--force') - def conflict_naming(sbox): "verify conflict file naming" @@ -19196,6 +18396,8 @@ def conflict_naming(sbox): 'file.txt.r2' : Item(contents="This is the initial content\n"), 'file.txt' : Item(contents="<<<<<<< .mine\n" \ "This is conflicting content\n" \ + "||||||| .r3\n" \ + "This is the new content\n" \ "=======\n" \ "This is the initial content\n" \ ">>>>>>> .r2\n"), @@ -19204,8 +18406,7 @@ def conflict_naming(sbox): svntest.actions.run_and_verify_update(wc_dir, expected_output, expected_disk, expected_status, - None, None, None, - None, None, None, + [], False, wc_dir, '-r', '2') sbox.simple_revert('file.txt') @@ -19227,6 +18428,8 @@ def conflict_naming(sbox): 'file.txt.r2.txt' : Item(contents="This is the initial content\n"), 'file.txt' : Item(contents="<<<<<<< .mine.txt\n" \ "This is conflicting content\n" \ + "||||||| .r3.txt\n" \ + "This is the new content\n" \ "=======\n" \ "This is the initial content\n" \ ">>>>>>> .r2.txt\n"), @@ -19235,7 +18438,7 @@ def conflict_naming(sbox): svntest.actions.run_and_verify_update( wc_dir, expected_output, expected_disk, expected_status, - None, None, None, None, None, None, + [], False, wc_dir, '-r', '2', '--config-option', 'config:miscellany:preserved-conflict-file-exts=' + @@ -19256,13 +18459,15 @@ def conflict_naming(sbox): 'file.txt.merge-right.r2': Item(contents="This is the initial content\n"), 'file.txt' : Item(contents="<<<<<<< .working\n" \ "This is conflicting content\n" \ + "||||||| .merge-left.r3\n" \ + "This is the new content\n" \ "=======\n" \ "This is the initial content\n" \ ">>>>>>> .merge-right.r2\n"), 'file.txt.working' : Item(contents="This is conflicting content\n"), }) - svntest.actions.run_and_verify_svn(wc_dir, None, [], + svntest.actions.run_and_verify_svn(None, [], 'merge', '-c-3', '^/', sbox.ospath('')) svntest.actions.run_and_verify_status(wc_dir, expected_status) svntest.actions.verify_disk(wc_dir, expected_disk) @@ -19281,6 +18486,8 @@ def conflict_naming(sbox): 'file.txt.merge-right.r2.txt': Item(contents="This is the initial content\n"), 'file.txt' : Item(contents="<<<<<<< .working.txt\n" \ "This is conflicting content\n" \ + "||||||| .merge-left.r3.txt\n" \ + "This is the new content\n" \ "=======\n" \ "This is the initial content\n" \ ">>>>>>> .merge-right.r2.txt\n"), @@ -19288,7 +18495,7 @@ def conflict_naming(sbox): }) svntest.actions.run_and_verify_svn( - wc_dir, None, [], + None, [], 'merge', '-c-3', '^/', sbox.ospath(''), '--config-option', 'config:miscellany:preserved-conflict-file-exts=' + @@ -19296,6 +18503,30 @@ def conflict_naming(sbox): svntest.actions.run_and_verify_status(wc_dir, expected_status) svntest.actions.verify_disk(wc_dir, expected_disk) +def merge_dir_delete_force(sbox): + "merge a directory delete with --force" + + sbox.build() + + sbox.simple_rm('A/D/G') + sbox.simple_commit() # r2 + + sbox.simple_update(revision=1) + + # Just merging r2 on r1 succeeds + svntest.actions.run_and_verify_svn(None, [], + 'merge', '-c2', '^/', sbox.wc_dir, + '--ignore-ancestry') + + # Bring working copy to r1 again + svntest.actions.run_and_verify_svn(None, [], + 'revert', '-R', sbox.wc_dir) + + # But when using --force this same merge caused a segfault in 1.8.0-1.8.8 + svntest.actions.run_and_verify_svn(None, [], + 'merge', '-c2', '^/', sbox.wc_dir, + '--ignore-ancestry', '--force') + ######################################################################## # Run the tests @@ -19441,8 +18672,8 @@ test_list = [ None, single_editor_drive_merge_notifications, conflicted_split_merge_with_resolve, merge_to_empty_target_merge_to_infinite_target, - merge_dir_delete_force, conflict_naming, + merge_dir_delete_force, ] if __name__ == '__main__': diff --git a/subversion/tests/cmdline/merge_tree_conflict_tests.py b/subversion/tests/cmdline/merge_tree_conflict_tests.py index 15287d3..28c1f33 100755 --- a/subversion/tests/cmdline/merge_tree_conflict_tests.py +++ b/subversion/tests/cmdline/merge_tree_conflict_tests.py @@ -43,10 +43,10 @@ Wimp = svntest.testcase.Wimp_deco from svntest.main import SVN_PROP_MERGEINFO from svntest.main import server_has_mergeinfo -from merge_tests import set_up_branch -from merge_tests import svn_copy -from merge_tests import svn_merge -from merge_tests import expected_merge_output +from svntest.mergetrees import set_up_branch +from svntest.mergetrees import svn_copy +from svntest.mergetrees import svn_merge +from svntest.mergetrees import expected_merge_output #---------------------------------------------------------------------- @SkipUnless(server_has_mergeinfo) @@ -61,7 +61,7 @@ def delete_file_and_dir(sbox): B2_path = os.path.join(wc_dir, 'A', 'B2') B_url = sbox.repo_url + '/A/B' - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'copy', B_path, B2_path) expected_output = wc.State(wc_dir, { @@ -78,14 +78,12 @@ def delete_file_and_dir(sbox): }) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, - wc_dir) + expected_status) # Rev 3 delete E and lambda from B E_path = os.path.join(B_path, 'E') lambda_path = os.path.join(B_path, 'lambda') - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'delete', E_path, lambda_path) expected_output = wc.State(wc_dir, { @@ -98,15 +96,13 @@ def delete_file_and_dir(sbox): 'A/B/lambda') svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, - wc_dir) + expected_status) def modify_B2(): # Local mods in B2 B2_E_path = os.path.join(B2_path, 'E') B2_lambda_path = os.path.join(B2_path, 'lambda') - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'propset', 'foo', 'foo_val', B2_E_path, B2_lambda_path) expected_status.tweak( @@ -157,8 +153,7 @@ def delete_file_and_dir(sbox): expected_disk, expected_status2, expected_skip, - None, None, None, None, None, - True) + check_props=True) #---------------------------------------------------------------------- # This is a regression for issue #1176. @@ -180,10 +175,10 @@ def merge_catches_nonexistent_target(sbox): Q_url = sbox.repo_url + '/A/D/Q' # Copy dir A/D/G to A/D/Q - svntest.actions.run_and_verify_svn(None, None, [], 'cp', G_path, Q_path) + svntest.actions.run_and_verify_svn(None, [], 'cp', G_path, Q_path) svntest.main.file_append(newfile_path, 'This is newfile.\n') - svntest.actions.run_and_verify_svn(None, None, [], 'add', newfile_path) + svntest.actions.run_and_verify_svn(None, [], 'add', newfile_path) # Add newfile to dir G, creating r2. expected_output = wc.State(wc_dir, { @@ -203,8 +198,7 @@ def merge_catches_nonexistent_target(sbox): ### (M)odified child. svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, wc_dir) + expected_status) # Change newfile, creating r3. svntest.main.file_append(newfile_path, 'A change to newfile.\n') @@ -214,8 +208,7 @@ def merge_catches_nonexistent_target(sbox): expected_status.tweak('A/D/Q/newfile', wc_rev=3) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, wc_dir) + expected_status) # Merge the change to newfile (from r3) into G, where newfile # doesn't exist. This is a tree conflict (use case 4, see @@ -258,7 +251,7 @@ def merge_catches_nonexistent_target(sbox): expected_disk, expected_status, expected_skip, - None, None, None, None, None, True) + check_props=True) expected_status.add({ 'newfile' : Item(status='! ', treeconflict='C'), @@ -284,21 +277,21 @@ def merge_tree_deleted_in_target(sbox): # Copy B to I, creating r1. - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'cp', B_url, I_url, '-m', 'rev 2') # Change some files, creating r2. svntest.main.file_append(alpha_path, 'A change to alpha.\n') svntest.main.file_append(os.path.join(B_path, 'lambda'), 'change lambda.\n') - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', 'rev 3', B_path) # Remove E, creating r3. E_url = sbox.repo_url + '/A/I/E' - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'rm', E_url, '-m', 'rev 4') - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'up', os.path.join(wc_dir,'A')) expected_output = wc.State(I_path, { @@ -333,8 +326,7 @@ def merge_tree_deleted_in_target(sbox): expected_disk, expected_status, expected_skip, - None, None, None, None, None, - 1, 0) + check_props=True) expected_status.add({ 'E' : Item(status='! ', treeconflict='C'), }) @@ -354,7 +346,7 @@ def three_way_merge_add_of_existing_binary_file(sbox): # Create a branch of A, creating revision 2. A_url = sbox.repo_url + "/A" branch_A_url = sbox.repo_url + "/copy-of-A" - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], "cp", A_url, branch_A_url, "-m", "Creating copy-of-A") @@ -377,8 +369,7 @@ def three_way_merge_add_of_existing_binary_file(sbox): "A/theta" : Item(status=" ", wc_rev=3), }) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, - wc_dir) + expected_status) # In the working copy, attempt to 'svn merge branch_A_url@2 A_url@3 A'. # We should *not* see a conflict during the merge, but an 'A'. @@ -423,8 +414,8 @@ def three_way_merge_add_of_existing_binary_file(sbox): expected_disk, expected_status, expected_skip, - None, None, None, None, None, - 1, 0, '--allow-mixed-revisions', A_path) + [], True, False, + '--allow-mixed-revisions', A_path) #---------------------------------------------------------------------- # Issue #2515 @@ -447,16 +438,16 @@ def merge_added_dir_to_deleted_in_target(sbox): I_path = os.path.join(wc_dir, 'A', 'I') - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'cp', B_url, I_url, '-m', 'rev 2') - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'rm', F_url, '-m', 'rev 3') - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'mkdir', '-m', 'rev 4', J_url) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'up', os.path.join(wc_dir,'A')) expected_output = wc.State(I_path, { @@ -482,9 +473,7 @@ def merge_added_dir_to_deleted_in_target(sbox): expected_elision_output, expected_disk, None, - expected_skip, - None, None, None, None, None, - 0, 0) + expected_skip) #---------------------------------------------------------------------- # Issue 2584 @@ -514,8 +503,7 @@ def merge_add_over_versioned_file_conflicts(sbox): 'A/C/alpha' : Item(status=' ', wc_rev=2), }) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, - wc_dir) + expected_status) # Merge r1:2 from A/C to A/B/E. This will attempt to add A/C/alpha, # but since A/B/E/alpha already exists we get a tree conflict. @@ -573,17 +561,17 @@ def mergeinfo_recording_in_skipped_merge(sbox): expected_output = wc.State(wc_dir, {'A/mu' : Item(verb='Sending')}) wc_status.add({'A/mu' : Item(status=' ', wc_rev=3)}) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - wc_status, None, wc_dir) + wc_status) # Make a modification to A/B/E/alpha svntest.main.file_write(alpha_path, "This is the file 'alpha' modified.\n") expected_output = wc.State(wc_dir, {'A/B/E/alpha' : Item(verb='Sending')}) wc_status.add({'A/B/E/alpha' : Item(status=' ', wc_rev=4)}) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - wc_status, None, wc_dir) + wc_status) # Delete A_COPY/B/E - svntest.actions.run_and_verify_svn(None, None, [], 'rm', + svntest.actions.run_and_verify_svn(None, [], 'rm', A_COPY_B_E_path) # Merge /A to /A_COPY ie., r1 to r4 @@ -644,8 +632,7 @@ def mergeinfo_recording_in_skipped_merge(sbox): expected_disk, expected_status, expected_skip, - None, None, None, None, None, - 1, 1) + [], True, True) #---------------------------------------------------------------------- def del_differing_file(sbox): @@ -673,7 +660,7 @@ def del_differing_file(sbox): target = 'A/D/G2' svn_copy(s_rev_orig, source, target) svntest.main.file_append(target+"/tau", "An extra line in the target.\n") - svntest.actions.run_and_verify_svn(None, None, [], 'propset', + svntest.actions.run_and_verify_svn(None, [], 'propset', 'newprop', 'v', target+"/pi") dir_D = os.path.join('A','D') @@ -693,7 +680,7 @@ def del_differing_file(sbox): target = 'A/D/G3' svn_copy(s_rev_orig, source, target) svntest.main.file_append(target+"/tau", "An extra line in the target.\n") - svntest.actions.run_and_verify_svn(None, None, [], 'propset', + svntest.actions.run_and_verify_svn(None, [], 'propset', 'newprop', 'v', target+"/pi") sbox.simple_commit(target) @@ -728,18 +715,18 @@ def tree_conflicts_and_obstructions(sbox): br_alpha_moved = os.path.join(branch_path, 'alpha-moved') # Create a branch - svntest.actions.run_and_verify_svn(None, None, [], 'cp', + svntest.actions.run_and_verify_svn(None, [], 'cp', trunk_url, sbox.repo_url + '/branch', '-m', "Creating the Branch") - svntest.actions.run_and_verify_svn(None, None, [], 'mv', + svntest.actions.run_and_verify_svn(None, [], 'mv', trunk_url + '/alpha', trunk_url + '/alpha-moved', '-m', "Move alpha to alpha-moved") # Update to revision 2. - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'update', wc_dir) svntest.main.file_write(br_alpha_moved, "I am blocking myself from trunk\n") @@ -1456,12 +1443,12 @@ def merge_replace_setup(sbox): # make a branch of A # svn cp $URL/A $URL/branch - expected_stdout = verify.UnorderedOutput([ - '\n', + expected_stdout = [ + 'Committing transaction...\n', 'Committed revision 2.\n', - ]) + ] - actions.run_and_verify_svn2('OUTPUT', expected_stdout, [], 0, 'cp', url_A, + actions.run_and_verify_svn2(expected_stdout, [], 0, 'cp', url_A, url_branch, '-m', 'copy log') # svn up @@ -1534,7 +1521,7 @@ def merge_replace_setup(sbox): }) actions.run_and_verify_update(wc_dir, expected_output, expected_disk, - expected_status, None, None, None, None, None, False, wc_dir) + expected_status) # ACTIONS ON THE MERGE SOURCE (branch) # various deletes of files and dirs @@ -1551,7 +1538,7 @@ def merge_replace_setup(sbox): 'D ' + branch_D_H + '\n', ]) - actions.run_and_verify_svn2('OUTPUT', expected_stdout, [], 0, 'delete', + actions.run_and_verify_svn2(expected_stdout, [], 0, 'delete', branch_mu, branch_B_E, branch_D_G_pi, branch_D_H) # svn ci @@ -1566,8 +1553,7 @@ def merge_replace_setup(sbox): 'branch/D/H/chi', 'branch/D/H/psi', 'branch/D/G/pi', 'branch/B/E', 'branch/B/E/beta', 'branch/B/E/alpha') - actions.run_and_verify_commit(wc_dir, expected_output, expected_status, - None, wc_dir) + actions.run_and_verify_commit(wc_dir, expected_output, expected_status) # svn up expected_output = svntest.wc.State(wc_dir, {}) @@ -1579,7 +1565,7 @@ def merge_replace_setup(sbox): expected_status.tweak(wc_rev='3') actions.run_and_verify_update(wc_dir, expected_output, expected_disk, - expected_status, None, None, None, None, None, False, wc_dir) + expected_status) # replacements. # file-with-file @@ -1589,33 +1575,33 @@ def merge_replace_setup(sbox): # svn add branch/mu expected_stdout = ['A ' + branch_mu + '\n'] - actions.run_and_verify_svn2('OUTPUT', expected_stdout, [], 0, 'add', + actions.run_and_verify_svn2(expected_stdout, [], 0, 'add', branch_mu) # dir-with-dir # svn mkdir branch/B/E expected_stdout = ['A ' + branch_B_E + '\n'] - actions.run_and_verify_svn2('OUTPUT', expected_stdout, [], 0, 'mkdir', + actions.run_and_verify_svn2(expected_stdout, [], 0, 'mkdir', branch_B_E) # svn ps propname propval branch/B/E expected_stdout = ["property 'propname' set on '" + branch_B_E + "'\n"] - actions.run_and_verify_svn2('OUTPUT', expected_stdout, [], 0, 'ps', + actions.run_and_verify_svn2(expected_stdout, [], 0, 'ps', 'propname', 'propval', branch_B_E) # file-with-dir # svn mkdir branch/D/G/pi expected_stdout = ['A ' + branch_D_G_pi + '\n'] - actions.run_and_verify_svn2('OUTPUT', expected_stdout, [], 0, 'mkdir', + actions.run_and_verify_svn2(expected_stdout, [], 0, 'mkdir', branch_D_G_pi) # svn ps propname propval branch/D/G/pi expected_stdout = ["property 'propname' set on '" + branch_D_G_pi + "'\n"] - actions.run_and_verify_svn2('OUTPUT', expected_stdout, [], 0, 'ps', + actions.run_and_verify_svn2(expected_stdout, [], 0, 'ps', 'propname', 'propval', branch_D_G_pi) # dir-with-file @@ -1625,7 +1611,7 @@ def merge_replace_setup(sbox): # svn add branch/D/H expected_stdout = ['A ' + branch_D_H + '\n'] - actions.run_and_verify_svn2('OUTPUT', expected_stdout, [], 0, 'add', + actions.run_and_verify_svn2(expected_stdout, [], 0, 'add', branch_D_H) # svn ci @@ -1643,8 +1629,7 @@ def merge_replace_setup(sbox): 'branch/mu' : Item(status=' ', wc_rev='4'), }) - actions.run_and_verify_commit(wc_dir, expected_output, expected_status, - None, wc_dir) + actions.run_and_verify_commit(wc_dir, expected_output, expected_status) return expected_disk, expected_status @@ -1686,7 +1671,7 @@ def merge_replace_causes_tree_conflict(sbox): # svn ps propname otherpropval A/B/E expected_stdout = ["property 'propname' set on '" + A_B_E + "'\n"] - actions.run_and_verify_svn2('OUTPUT', expected_stdout, [], 0, 'ps', + actions.run_and_verify_svn2(expected_stdout, [], 0, 'ps', 'propname', 'otherpropval', A_B_E) # echo modified > A/D/G/pi @@ -1695,7 +1680,7 @@ def merge_replace_causes_tree_conflict(sbox): # svn ps propname propval A/D/H expected_stdout = ["property 'propname' set on '" + A_D_H + "'\n"] - actions.run_and_verify_svn2('OUTPUT', expected_stdout, [], 0, 'ps', + actions.run_and_verify_svn2(expected_stdout, [], 0, 'ps', 'propname', 'propval', A_D_H) # svn merge $URL/A $URL/branch A @@ -1709,7 +1694,7 @@ def merge_replace_causes_tree_conflict(sbox): ' U ' + A + '\n', ], target=A, two_url=True, tree_conflicts=4) - actions.run_and_verify_svn2('OUTPUT', expected_stdout, [], 0, 'merge', + actions.run_and_verify_svn2(expected_stdout, [], 0, 'merge', url_A, url_branch, A) # svn st @@ -1775,7 +1760,7 @@ def merge_replace_causes_tree_conflict2(sbox): 'D ' + os.path.join(A_D_H, 'psi') + '\n', 'D ' + A_D_H + '\n', ]) - actions.run_and_verify_svn2('OUTPUT', expected_stdout, [], 0, 'delete', + actions.run_and_verify_svn2(expected_stdout, [], 0, 'delete', A_mu, A_B_E, A_D_G_pi, A_D_H) expected_status.tweak('A/B/E', 'A/B/E/alpha', 'A/B/E/beta', 'A/D/G/pi', 'A/D/H', 'A/D/H/chi', 'A/D/H/omega', 'A/D/H/psi', @@ -1795,7 +1780,7 @@ def merge_replace_causes_tree_conflict2(sbox): " U " + A_mu + "\n", # mergeinfo -> 'RM' status ], target=A, two_url=True, tree_conflicts=1) - actions.run_and_verify_svn2('OUTPUT', expected_stdout, [], 0, 'merge', + actions.run_and_verify_svn2(expected_stdout, [], 0, 'merge', url_A, url_branch, A, '--depth=files') # New mergeinfo describing the merge. expected_status.tweak('A', status=' M') @@ -1815,7 +1800,7 @@ def merge_replace_causes_tree_conflict2(sbox): " U " + A_B + "\n", # mergeinfo ], target=A_B, two_url=True, tree_conflicts=1) - actions.run_and_verify_svn2('OUTPUT', expected_stdout, [], 0, 'merge', + actions.run_and_verify_svn2(expected_stdout, [], 0, 'merge', url_A_B, url_branch_B, A_B) # New mergeinfo describing the merge. expected_status.tweak('A/B', status=' M') @@ -1835,7 +1820,7 @@ def merge_replace_causes_tree_conflict2(sbox): " U " + A_D + "\n", # mergeinfo ], target=A_D, two_url=True, tree_conflicts=1) - actions.run_and_verify_svn2('OUTPUT', expected_stdout, [], 0, 'merge', + actions.run_and_verify_svn2(expected_stdout, [], 0, 'merge', '--depth=immediates', url_A_D, url_branch_D, A_D) # New mergeinfo describing the merge. expected_status.tweak('A/D', 'A/D/G', status=' M') @@ -1855,7 +1840,7 @@ def merge_replace_causes_tree_conflict2(sbox): " U " + A_D_G + "\n", # mergeinfo ], target=A_D_G, two_url=True, tree_conflicts=1) - actions.run_and_verify_svn2('OUTPUT', expected_stdout, [], 0, 'merge', + actions.run_and_verify_svn2(expected_stdout, [], 0, 'merge', url_A_D_G, url_branch_D_G, A_D_G) # New mergeinfo describing the merge. expected_status.tweak('A/D/G', status=' M') @@ -1876,7 +1861,7 @@ def merge_replace_causes_tree_conflict2(sbox): '|( \>.*)' tree_conflicted_path = [A_B_E, A_mu, A_D_G_pi, A_D_H] for path in tree_conflicted_path: - actions.run_and_verify_svn2('OUTPUT', expected_stdout, [], 0, 'st', + actions.run_and_verify_svn2(expected_stdout, [], 0, 'st', '--depth=empty', path) #---------------------------------------------------------------------- @@ -1894,25 +1879,25 @@ def merge_replace_on_del_fails(sbox): C_branch_path = os.path.join(wc_dir, 'branch', 'C') # r2 - Copy ^/A to ^/branch - svntest.actions.run_and_verify_svn(None, None, [], 'copy', + svntest.actions.run_and_verify_svn(None, [], 'copy', sbox.repo_url + '/A', sbox.repo_url + '/branch', '-m', 'Create a branch') # r3 - Replace A/C - svntest.actions.run_and_verify_svn(None, None, [], 'del', C_path) - svntest.actions.run_and_verify_svn(None, None, [], 'mkdir', C_path) - svntest.actions.run_and_verify_svn(None, None, [], 'ci', + svntest.actions.run_and_verify_svn(None, [], 'del', C_path) + svntest.actions.run_and_verify_svn(None, [], 'mkdir', C_path) + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', 'Replace A/C', wc_dir) # r4 - Delete branch/C - svntest.actions.run_and_verify_svn(None, None, [], 'up', wc_dir) - svntest.actions.run_and_verify_svn(None, None, [], 'del', C_branch_path) - svntest.actions.run_and_verify_svn(None, None, [], 'ci', + svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'del', C_branch_path) + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', 'Delete branch/C', wc_dir) # Sync merge ^/A to branch - svntest.actions.run_and_verify_svn(None, None, [], 'up', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir) expected_stdout = expected_merge_output([[2,4]], [ ' C ' + C_branch_path + '\n', # merge ' U ' + branch_path + '\n', # mergeinfo @@ -1946,38 +1931,190 @@ def merge_replace_on_del_fails(sbox): # svn: E155010: The node 'C:\SVN\src-trunk\Debug\subversion\tests\ # cmdline\svn-test-work\working_copies\merge_tree_conflict_tests-24\ # branch\C' was not found. - actions.run_and_verify_svn2('OUTPUT', expected_stdout, [], 0, 'merge', + actions.run_and_verify_svn2(expected_stdout, [], 0, 'merge', sbox.repo_url + '/A', branch_path) +def merge_conflict_details(sbox): + "merge conflict details" + + sbox.build() + wc_dir = sbox.wc_dir + + sbox.simple_append('A/B/E/new', 'new\n') + sbox.simple_add('A/B/E/new') + sbox.simple_append('A/B/E/alpha', '\nextra\nlines\n') + sbox.simple_rm('A/B/E/beta', 'A/B/F') + sbox.simple_propset('key', 'VAL', 'A/B/E', 'A/B') + sbox.simple_mkdir('A/B/E/new-dir1') + sbox.simple_mkdir('A/B/E/new-dir2') + sbox.simple_mkdir('A/B/E/new-dir3') + sbox.simple_rm('A/B/lambda') + sbox.simple_mkdir('A/B/lambda') + sbox.simple_commit() + + sbox.simple_update('', 1) + + sbox.simple_move('A/B', 'B') + + sbox.simple_propset('key', 'vAl', 'B') + sbox.simple_move('B/E/beta', 'beta') + sbox.simple_propset('a', 'b', 'B/F', 'B/lambda') + sbox.simple_append('B/E/alpha', 'other\nnew\nlines') + sbox.simple_mkdir('B/E/new') + sbox.simple_mkdir('B/E/new-dir1') + sbox.simple_append('B/E/new-dir2', 'something') + sbox.simple_append('B/E/new-dir3', 'something') + sbox.simple_add('B/E/new-dir3') + + expected_status = svntest.actions.get_virginal_state(wc_dir, 1) + expected_status.add({ + 'B' : Item(status=' C', copied='+', moved_from='A/B', + wc_rev='-', entry_status='AC'), + 'B/E' : Item(status=' M', copied='+', wc_rev='-'), + 'B/E/new' : Item(status='A ', treeconflict='C', wc_rev='-'), + 'B/E/beta' : Item(status='D ', copied='+', treeconflict='C', + wc_rev='-', moved_to='beta'), + 'B/E/alpha' : Item(status='C ', copied='+', wc_rev='-'), + 'B/E/new-dir3' : Item(status='A ', treeconflict='C', wc_rev='-'), + 'B/E/new-dir1' : Item(status='A ', treeconflict='C', wc_rev='-'), + 'B/F' : Item(status=' M', copied='+', treeconflict='C', + wc_rev='-'), + 'B/lambda' : Item(status=' M', copied='+', treeconflict='C', + wc_rev='-'), + 'beta' : Item(status='A ', copied='+', + moved_from='B/E/beta', wc_rev='-') + }) + expected_status.tweak('A/B', status='D ', wc_rev='1', moved_to='B') + expected_status.tweak('A/B/lambda', 'A/B/E', 'A/B/E/beta', 'A/B/E/alpha', + 'A/B/F', status='D ') + + expected_output = svntest.wc.State(wc_dir, { + 'B' : Item(status=' C'), + 'B/E' : Item(status=' U'), + 'B/E/new' : Item(status=' ', treeconflict='C'), + 'B/E/beta' : Item(status=' ', treeconflict='C'), + 'B/E/alpha' : Item(status='C '), + 'B/E/new-dir3' : Item(status=' ', treeconflict='C'), + 'B/E/new-dir1' : Item(status=' ', treeconflict='C'), + 'B/F' : Item(status=' ', treeconflict='C'), + 'B/lambda' : Item(status=' ', treeconflict='C'), + }) + expected_skip = wc.State(wc_dir, { + 'B/E/new-dir2' : Item(verb='Skipped'), + }) + svntest.actions.run_and_verify_merge(sbox.ospath('B'), + 1, 2, '^/A/B', '^/A/B', + expected_output, + None, None, + None, None, expected_skip) + svntest.actions.run_and_verify_status(wc_dir, expected_status) + + expected_info = [ + { + "Path" : re.escape(sbox.ospath('B')), + + "Conflict Properties File" : + re.escape(sbox.ospath('B/dir_conflicts.prej')) + '.*', + "Conflict Details": re.escape( + 'incoming dir edit upon merge' + + ' Source left: (dir) ^/A/B@1' + + ' Source right: (dir) ^/A/B@2') + }, + { + "Path" : re.escape(sbox.ospath('B/E')), + }, + { + "Path" : re.escape(sbox.ospath('B/E/alpha')), + "Conflict Previous Base File" : '.*alpha.*', + "Conflict Previous Working File" : '.*alpha.*', + "Conflict Current Base File": '.*alpha.*', + "Conflict Details": re.escape( + 'incoming file edit upon merge' + + ' Source left: (file) ^/A/B/E/alpha@1' + + ' Source right: (file) ^/A/B/E/alpha@2') + }, + { + "Path" : re.escape(sbox.ospath('B/E/beta')), + "Tree conflict": re.escape( + 'local file moved away, incoming file delete or move upon merge' + + ' Source left: (file) ^/A/B/E/beta@1' + + ' Source right: (none) ^/A/B/E/beta@2') + }, + { + "Path" : re.escape(sbox.ospath('B/E/new')), + "Tree conflict": re.escape( + 'local dir add, incoming file add upon merge' + + ' Source left: (none) ^/A/B/E/new@1' + + ' Source right: (file) ^/A/B/E/new@2') + }, + { + "Path" : re.escape(sbox.ospath('B/E/new-dir1')), + "Tree conflict": re.escape( + 'local dir add, incoming dir add upon merge' + + ' Source left: (none) ^/A/B/E/new-dir1@1' + + ' Source right: (dir) ^/A/B/E/new-dir1@2') + }, + #{ ### Skipped + # "Path" : re.escape(sbox.ospath('B/E/new-dir2')), + # "Tree conflict": re.escape( + # 'local file unversioned, incoming dir add upon merge' + + # ' Source left: (none) ^/A/B/E/new-dir2@1' + + # ' Source right: (dir) ^/A/B/E/new-dir2@2') + #}, + { + "Path" : re.escape(sbox.ospath('B/E/new-dir3')), + "Tree conflict": re.escape( + 'local file add, incoming dir add upon merge' + + ' Source left: (none) ^/A/B/E/new-dir3@1' + + ' Source right: (dir) ^/A/B/E/new-dir3@2') + }, + { + "Path" : re.escape(sbox.ospath('B/F')), + "Tree conflict": re.escape( + 'local dir edit, incoming dir delete or move upon merge' + + ' Source left: (dir) ^/A/B/F@1' + + ' Source right: (none) ^/A/B/F@2') + }, + { + "Path" : re.escape(sbox.ospath('B/lambda')), + "Tree conflict": re.escape( + 'local file edit, incoming replace with dir upon merge' + + ' Source left: (file) ^/A/B/lambda@1' + + ' Source right: (dir) ^/A/B/lambda@2') + }, + ] + + svntest.actions.run_and_verify_info(expected_info, sbox.ospath('B'), + '--depth', 'infinity') + def merge_obstruction_recording(sbox): "merge obstruction recording" - sbox.build() + sbox.build(empty=True) wc_dir = sbox.wc_dir - sbox.simple_rm('iota', 'A') sbox.simple_mkdir('trunk') sbox.simple_mkdir('branches') - sbox.simple_commit() #r2 + sbox.simple_commit() #r1 - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'copy', sbox.repo_url + '/trunk', sbox.repo_url + '/branches/branch', - '-mCopy') # r3 + '-mCopy') # r2 sbox.simple_mkdir('trunk/dir') sbox.simple_add_text('The file on trunk\n', 'trunk/dir/file.txt') - sbox.simple_commit() #r4 + sbox.simple_commit() #r3 sbox.simple_update() sbox.simple_mkdir('branches/branch/dir') sbox.simple_add_text('The file on branch\n', 'branches/branch/dir/file.txt') - sbox.simple_commit() #r5 + sbox.simple_commit() #r4 sbox.simple_update() - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'switch', '^/branches/branch', wc_dir, '--ignore-ancestry') @@ -1987,23 +2124,21 @@ def merge_obstruction_recording(sbox): }) expected_mergeinfo_output = wc.State(wc_dir, { '' : Item(status=' U'), - 'dir' : Item(status=' U'), # Because dir already exists }) expected_elision_output = wc.State(wc_dir, { }) expected_disk = wc.State('', { 'dir/file.txt' : Item(contents="The file on branch\n"), - 'dir' : Item(props={'svn:mergeinfo':''}), - '.' : Item(props={'svn:mergeinfo':'/trunk:3-5'}), + '.' : Item(props={'svn:mergeinfo':'/trunk:2-4'}), }) expected_status = wc.State(wc_dir, { - '' : Item(status=' M', wc_rev='5'), - 'dir' : Item(status=' M', treeconflict='C', wc_rev='5'), - 'dir/file.txt' : Item(status=' ', wc_rev='5'), + '' : Item(status=' M', wc_rev='4'), + 'dir' : Item(status=' ', treeconflict='C', wc_rev='4'), + 'dir/file.txt' : Item(status=' ', wc_rev='4'), }) expected_skip = wc.State('', { }) - svntest.actions.run_and_verify_merge(wc_dir, '2', '5', sbox.repo_url + '/trunk', + svntest.actions.run_and_verify_merge(wc_dir, '1', '4', sbox.repo_url + '/trunk', None, expected_output, expected_mergeinfo_output, @@ -2017,8 +2152,8 @@ def merge_obstruction_recording(sbox): "Path" : re.escape(sbox.ospath('dir')), "Tree conflict": re.escape( 'local dir obstruction, incoming dir add upon merge' + - ' Source left: (dir) ^/trunk/dir@2' + # Should be '(none)' - ' Source right: (dir) ^/trunk/dir@5') + ' Source left: (none) ^/trunk/dir@1' + + ' Source right: (dir) ^/trunk/dir@4') }, ] @@ -2027,38 +2162,210 @@ def merge_obstruction_recording(sbox): # How should the user handle this conflict? # ### Would be nice if we could just accept mine (leave as is, fix mergeinfo) # ### or accept theirs (delete what is here and insert copy - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'resolve', '--accept=working', sbox.ospath('dir')) # Redo the skipped merge as record only merge expected_output = [ - '--- Recording mergeinfo for merge of r5 into \'%s\':\n' % \ + '--- Recording mergeinfo for merge of r4 into \'%s\':\n' % \ sbox.ospath('dir'), - ' G %s\n' % sbox.ospath('dir'), + ' U %s\n' % sbox.ospath('dir'), ] # ### Why are r1-r3 not recorded? # ### Guess: Because dir's history only exists since r4. - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'merge', '--record-only', sbox.repo_url + '/trunk/dir', sbox.ospath('dir'), - '-c', '2-5') + '-c', '1-4') expected_disk = wc.State('', { - 'dir' : Item(props={'svn:mergeinfo':'/trunk/dir:5'}), + 'dir' : Item(props={'svn:mergeinfo':'/trunk/dir:4'}), 'dir/file.txt' : Item(contents="The file on branch\n"), - '.' : Item(props={'svn:mergeinfo':'/trunk:3-5'}), + '.' : Item(props={'svn:mergeinfo':'/trunk:2-4'}), }) svntest.actions.verify_disk(wc_dir, expected_disk, check_props=True) - # Because r2-r4 are not recorded, the mergeinfo is not elided :( + # Because r1-r3 are not recorded, the mergeinfo is not elided :( # Even something like a two url merge wouldn't work, because dir - # didn't exist below trunk in r2 either. + # didn't exist below trunk in r1 either. # A resolver action could be smarter though... +def added_revision_recording_in_tree_conflict(sbox): + "tree conflict stores added revision for victim" + + sbox.build(empty=True) + wc_dir = sbox.wc_dir + + sbox.simple_mkdir('trunk') + sbox.simple_commit() #r1 + + # Create a branch + svntest.actions.run_and_verify_svn(None, [], + 'copy', sbox.repo_url + '/trunk', + sbox.repo_url + '/branch', + '-mcopy') # r2 + + sbox.simple_add_text('The file on trunk\n', 'trunk/foo') + sbox.simple_commit() #r3 + + sbox.simple_update() + + # Merge ^/trunk into ^/branch + expected_output = svntest.wc.State(sbox.ospath('branch'), { + 'foo' : Item(status='A '), + }) + expected_mergeinfo_output = wc.State(sbox.ospath('branch'), { + '' : Item(status=' U') + }) + expected_elision_output = wc.State(wc_dir, { + }) + expected_disk = wc.State('', { + 'foo' : Item(contents="The file on trunk\n"), + '.' : Item(props={u'svn:mergeinfo': u'/trunk:2-3'}), + }) + expected_status = wc.State(sbox.ospath('branch'), { + '' : Item(status=' M', wc_rev='3'), + 'foo' : Item(status='A ', copied='+', wc_rev='-'), + }) + expected_skip = wc.State('', { + }) + svntest.actions.run_and_verify_merge(sbox.ospath('branch'), None, None, + sbox.repo_url + '/trunk', + None, + expected_output, + expected_mergeinfo_output, + expected_elision_output, + expected_disk, + expected_status, + expected_skip, + check_props=True) + + sbox.simple_commit() #r4 + + # Edit the file on the branch + sbox.simple_append('branch/foo', 'The file on the branch\n') + sbox.simple_commit() #r5 + + # Replace file with a directory on trunk + sbox.simple_rm('trunk/foo') + sbox.simple_mkdir('trunk/foo') + sbox.simple_commit() #r6 + + sbox.simple_update() + + # Merge ^/trunk into ^/branch + expected_output = svntest.wc.State(sbox.ospath('branch'), { + 'foo' : Item(status=' ', treeconflict='C') + }) + expected_mergeinfo_output = wc.State(sbox.ospath('branch'), { + '' : Item(status=' U'), + }) + expected_elision_output = wc.State(wc_dir, { + }) + expected_disk = wc.State('', { + 'foo' : Item(contents="The file on trunk\nThe file on the branch\n"), + '.' : Item(props={u'svn:mergeinfo': u'/trunk:2-6'}), + }) + expected_status = wc.State(sbox.ospath('branch'), { + '' : Item(status=' M', wc_rev='6'), + 'foo' : Item(status=' ', treeconflict='C', wc_rev='6'), + }) + expected_skip = wc.State('', { + }) + svntest.actions.run_and_verify_merge(sbox.ospath('branch'), None, None, + sbox.repo_url + '/trunk', + None, + expected_output, + expected_mergeinfo_output, + expected_elision_output, + expected_disk, + expected_status, + expected_skip, + check_props=True) + + # Ensure that revisions in tree conflict info match what we expect. + # We used to record source left as ^/trunk/foo@1 instead of ^/trunk/foo@3. + # Note that foo was first added in r3. + expected_info = [ + { + "Path" : re.escape(sbox.ospath('branch/foo')), + "Tree conflict": re.escape( + 'local file edit, incoming replace with dir upon merge' + + ' Source left: (file) ^/trunk/foo@3' + + ' Source right: (dir) ^/trunk/foo@6'), + }, + ] + svntest.actions.run_and_verify_info(expected_info, sbox.ospath('branch/foo')) + +def spurios_tree_conflict_with_added_file(sbox): + "spurious tree conflict with unmodified added file" + + sbox.build() + wc_dir = sbox.wc_dir + + # Create a branch of A, A_copy + sbox.simple_copy('A', 'A_branch') + sbox.simple_commit() + + # Create a new file on the trunk + sbox.simple_append('A/new', 'new\n') + sbox.simple_add('A/new') + sbox.simple_commit() + + # Sync the branch with the trunk + sbox.simple_update() + expected_output = wc.State(wc_dir, { + "A_branch/new" : Item(status="A "), + }) + expected_skip = wc.State('', { }) + svntest.actions.run_and_verify_merge(sbox.ospath('A_branch'), + None, None, '^/A', None, + expected_output, + None, None, + None, None, expected_skip) + sbox.simple_commit() + + # Reintegrate the branch (a no-op change, but users are free to do this) + sbox.simple_update() + expected_output = wc.State(wc_dir, { }) + svntest.actions.run_and_verify_merge(sbox.ospath('A'), + None, None, '^/A_branch', None, + expected_output, + None, None, + None, None, expected_skip, + [], False, True, '--reintegrate', + sbox.ospath('A')) + + # Delete the new file on the branch + sbox.simple_rm('A_branch/new') + sbox.simple_commit() + + # Make an unrelated change on the trunk + sbox.simple_append('A/mu', 'more text\n') + sbox.simple_commit() + + # Merge the trunk to the branch. Forcing a reintegrate merge here since + # this is what the automatic merge does, as of the time this test was written. + # This merge would raise an 'local missing vs incoming edit' tree conflict + # on the new file, which is bogus since there are no incoming edits. + expected_output = wc.State(wc_dir, { + 'A_branch/mu' : Item(status='U '), + }) + expected_mergeinfo_output = wc.State(wc_dir, { + 'A_branch' : Item(status=' U'), + }) + svntest.actions.run_and_verify_merge(sbox.ospath('A_branch'), + None, None, '^/A', None, + expected_output, + expected_mergeinfo_output, None, + None, None, expected_skip, + [], False, True, '--reintegrate', + sbox.ospath('A_branch')) + ######################################################################## # Run the tests @@ -2090,7 +2397,10 @@ test_list = [ None, merge_replace_causes_tree_conflict, merge_replace_causes_tree_conflict2, merge_replace_on_del_fails, + merge_conflict_details, merge_obstruction_recording, + added_revision_recording_in_tree_conflict, + spurios_tree_conflict_with_added_file, ] if __name__ == '__main__': diff --git a/subversion/tests/cmdline/mergeinfo_tests.py b/subversion/tests/cmdline/mergeinfo_tests.py index adb7925..328a9f2 100755 --- a/subversion/tests/cmdline/mergeinfo_tests.py +++ b/subversion/tests/cmdline/mergeinfo_tests.py @@ -44,10 +44,9 @@ exp_noop_up_out = svntest.actions.expected_noop_update_output from svntest.main import SVN_PROP_MERGEINFO from svntest.main import server_has_mergeinfo -# Get a couple merge helpers from merge_tests.py -import merge_tests -from merge_tests import set_up_branch -from merge_tests import expected_merge_output +# Get a couple merge helpers +from svntest.mergetrees import set_up_branch +from svntest.mergetrees import expected_merge_output def adjust_error_for_server_version(expected_err): "Return the expected error regexp appropriate for the server version." @@ -75,6 +74,7 @@ def no_mergeinfo(sbox): sbox.repo_url + '/A2', "--show-revs=merged") +@SkipUnless(server_has_mergeinfo) def mergeinfo(sbox): "'mergeinfo' on a path with mergeinfo" @@ -89,7 +89,7 @@ def mergeinfo(sbox): sbox.simple_update() # Dummy up some mergeinfo. - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'ps', SVN_PROP_MERGEINFO, '/A:3', sbox.ospath('A2')) svntest.actions.run_and_verify_mergeinfo(adjust_error_for_server_version(""), @@ -173,6 +173,7 @@ def mergeinfo_non_source(sbox): #---------------------------------------------------------------------- # Issue #3138 +@SkipUnless(server_has_mergeinfo) @Issue(3138) def mergeinfo_on_unknown_url(sbox): "mergeinfo of an unknown url should return error" @@ -182,20 +183,21 @@ def mergeinfo_on_unknown_url(sbox): # remove a path from the repo and commit. iota_path = os.path.join(wc_dir, 'iota') - svntest.actions.run_and_verify_svn(None, None, [], 'rm', iota_path) - svntest.actions.run_and_verify_svn("", None, [], + svntest.actions.run_and_verify_svn(None, [], 'rm', iota_path) + svntest.actions.run_and_verify_svn(None, [], "ci", wc_dir, "-m", "log message") url = sbox.repo_url + "/iota" expected_err = adjust_error_for_server_version(".*File not found.*iota.*|" ".*iota.*path not found.*") - svntest.actions.run_and_verify_svn("", None, expected_err, + svntest.actions.run_and_verify_svn(None, expected_err, "mergeinfo", "--show-revs", "eligible", url, wc_dir) # Test for issue #3126 'svn mergeinfo shows too few or too many # eligible revisions'. Specifically # http://subversion.tigris.org/issues/show_bug.cgi?id=3126#desc5. +@SkipUnless(server_has_mergeinfo) @Issue(3126) def non_inheritable_mergeinfo(sbox): "non-inheritable mergeinfo shows as merged" @@ -211,11 +213,10 @@ def non_inheritable_mergeinfo(sbox): # Update the WC, then merge r4 from A to A_COPY and r6 from A to A_COPY # at --depth empty and commit the merges as r7. - svntest.actions.run_and_verify_svn(None, exp_noop_up_out(6), [], 'up', + svntest.actions.run_and_verify_svn(exp_noop_up_out(6), [], 'up', wc_dir) expected_status.tweak(wc_rev=6) svntest.actions.run_and_verify_svn( - None, expected_merge_output([[4]], ['U ' + rho_COPY_path + '\n', ' U ' + A_COPY_path + '\n',]), @@ -223,7 +224,6 @@ def non_inheritable_mergeinfo(sbox): sbox.repo_url + '/A', A_COPY_path) svntest.actions.run_and_verify_svn( - None, expected_merge_output([[6]], ' G ' + A_COPY_path + '\n'), [], 'merge', '-c6', sbox.repo_url + '/A', @@ -234,10 +234,10 @@ def non_inheritable_mergeinfo(sbox): }) expected_status.tweak('A_COPY', 'A_COPY/D/G/rho', wc_rev=7) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) # Update the WC a last time to ensure full inheritance. - svntest.actions.run_and_verify_svn(None, exp_noop_up_out(7), [], 'up', + svntest.actions.run_and_verify_svn(exp_noop_up_out(7), [], 'up', wc_dir) # Despite being non-inheritable, r6 should still show as merged to A_COPY @@ -294,26 +294,25 @@ def recursive_mergeinfo(sbox): nu2_path = os.path.join(wc_dir, "A2", "C", "nu2") # Rename A to A2 in r7. - svntest.actions.run_and_verify_svn(None, exp_noop_up_out(6), [], 'up', wc_dir) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(exp_noop_up_out(6), [], 'up', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'ren', A_path, A2_path) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'ci', wc_dir, '-m', 'rename A to A2') # Add the files A/B/F/nu and A/C/nu2 and commit them as r8. svntest.main.file_write(nu_path, "A new file.\n") svntest.main.file_write(nu2_path, "Another new file.\n") svntest.main.run_svn(None, "add", nu_path, nu2_path) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'ci', wc_dir, '-m', 'Add 2 new files') # Do several merges to create varied subtree mergeinfo # Merge r4 from A2 to A_COPY at depth empty - svntest.actions.run_and_verify_svn(None, exp_noop_up_out(8), [], 'up', + svntest.actions.run_and_verify_svn(exp_noop_up_out(8), [], 'up', wc_dir) svntest.actions.run_and_verify_svn( - None, expected_merge_output([[4]], ' U ' + A_COPY_path + '\n'), [], 'merge', '-c4', '--depth', 'empty', sbox.repo_url + '/A2', @@ -321,7 +320,6 @@ def recursive_mergeinfo(sbox): # Merge r6 from A2/D/H to A_COPY/D/H svntest.actions.run_and_verify_svn( - None, expected_merge_output([[6]], ['U ' + omega_COPY_path + '\n', ' G ' + H_COPY_path + '\n']), @@ -331,7 +329,6 @@ def recursive_mergeinfo(sbox): # Merge r5 from A2 to A_COPY svntest.actions.run_and_verify_svn( - None, expected_merge_output([[5]], ['U ' + beta_COPY_path + '\n', ' G ' + A_COPY_path + '\n', @@ -345,7 +342,6 @@ def recursive_mergeinfo(sbox): # Reverse merge -r5 from A2/C to A_COPY/C leaving empty mergeinfo on # A_COPY/C. svntest.actions.run_and_verify_svn( - None, expected_merge_output([[-5]], ' G ' + C_COPY_path + '\n'), [], 'merge', '-c-5', @@ -353,7 +349,6 @@ def recursive_mergeinfo(sbox): # Merge r8 from A2/B/F to A_COPY/B/F svntest.actions.run_and_verify_svn( - None, expected_merge_output([[8]], ['A ' + nu_COPY_path + '\n', ' G ' + F_COPY_path + '\n']), @@ -362,9 +357,9 @@ def recursive_mergeinfo(sbox): F_COPY_path) # Commit everything this far as r9 - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'ci', wc_dir, '-m', 'Many merges') - svntest.actions.run_and_verify_svn(None, exp_noop_up_out(9), [], 'up', + svntest.actions.run_and_verify_svn(exp_noop_up_out(9), [], 'up', wc_dir) # Test svn mergeinfo -R / --depth infinity. @@ -459,26 +454,24 @@ def mergeinfo_on_pegged_wc_path(sbox): # # r7 - Merge -c3,6 from A to A_COPY. svntest.actions.run_and_verify_svn( - None, expected_merge_output([[3],[6]], ['U ' + psi_COPY_path + '\n', 'U ' + omega_COPY_path + '\n', ' U ' + A_COPY_path + '\n', ' G ' + A_COPY_path + '\n',]), [], 'merge', '-c3,6', sbox.repo_url + '/A', A_COPY_path) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'ci', wc_dir, '-m', 'Merge r3 and r6') # r8 - Merge -c5 from A to A_COPY. svntest.actions.run_and_verify_svn( - None, expected_merge_output([[5]], ['U ' + beta_COPY_path + '\n', ' U ' + A_COPY_path + '\n']), [], 'merge', '-c5', '--allow-mixed-revisions', sbox.repo_url + '/A', A_COPY_path) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'ci', wc_dir, '-m', 'Merge r5') @@ -572,19 +565,19 @@ def wc_target_inherits_mergeinfo_from_repos(sbox): # Properties on 'A_COPY': # svn:mergeinfo # /A:7 - svntest.actions.run_and_verify_svn(None, None, [], 'merge', + svntest.actions.run_and_verify_svn(None, [], 'merge', sbox.repo_url + '/A/D/G/rho', rho_COPY_path, '-c5') - svntest.actions.run_and_verify_svn(None, None, [], 'merge', + svntest.actions.run_and_verify_svn(None, [], 'merge', sbox.repo_url + '/A', A_COPY_path, '-c7') - svntest.actions.run_and_verify_svn(None, None, [], 'ci', '-m', + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', 'Cherrypicks to branch subtree and root', wc_dir) # Checkout a new wc rooted at ^/A_COPY/D. subtree_wc = sbox.add_wc_path('D_COPY') - svntest.actions.run_and_verify_svn(None, None, [], 'co', + svntest.actions.run_and_verify_svn(None, [], 'co', sbox.repo_url + '/A_COPY/D', subtree_wc) @@ -620,10 +613,10 @@ def wc_target_inherits_mergeinfo_from_repos(sbox): # source. # # In r9 make a change that effects two branches: - svntest.actions.run_and_verify_svn(None, None, [], 'up', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir) svntest.main.file_write(gamma_2_path, "New content.\n") svntest.main.file_write(tau_path, "New content.\n") - svntest.actions.run_and_verify_svn(None, None, [], 'ci', '-m', + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', 'Make changes under both A and A_COPY_2', wc_dir) @@ -638,10 +631,10 @@ def wc_target_inherits_mergeinfo_from_repos(sbox): # Properties on 'A_COPY\D\G\rho': # svn:mergeinfo # /A/D/G/rho:5 - svntest.actions.run_and_verify_svn(None, None, [], 'merge', + svntest.actions.run_and_verify_svn(None, [], 'merge', sbox.repo_url + '/A_COPY_2', A_COPY_path, '-c9') - svntest.actions.run_and_verify_svn(None, None, [], 'ci', '-m', + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', 'Merge r8 from A_COPY_2 to A_COPY', wc_dir) @@ -671,8 +664,8 @@ def wc_target_inherits_mergeinfo_from_repos(sbox): '--show-revs', 'merged', '-R') # Test while the target is the full WC and then with the subtree WC: - svntest.actions.run_and_verify_svn(None, None, [], 'up', wc_dir) - svntest.actions.run_and_verify_svn(None, None, [], 'up', subtree_wc) + svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'up', subtree_wc) test_svn_mergeinfo_4_way(D_COPY_path) test_svn_mergeinfo_4_way(subtree_wc) @@ -695,27 +688,27 @@ def natural_history_is_not_eligible_nor_merged(sbox): # r7 - Add a new file A/C/nu svntest.main.file_write(nu_path, "This is the file 'nu'.\n") - svntest.actions.run_and_verify_svn(None, None, [], 'add', nu_path) - svntest.actions.run_and_verify_svn(None, None, [], 'ci', + svntest.actions.run_and_verify_svn(None, [], 'add', nu_path) + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', 'Add a file', wc_dir) # r8 - Sync merge ^/A to A_COPY - svntest.actions.run_and_verify_svn(None, None, [], 'merge', + svntest.actions.run_and_verify_svn(None, [], 'merge', sbox.repo_url + '/A', A_COPY_path) - svntest.actions.run_and_verify_svn(None, None, [], 'ci', + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', 'Add a file', wc_dir) # r9 - Modify the file added in r7 svntest.main.file_write(nu_path, "Modification to file 'nu'.\n") - svntest.actions.run_and_verify_svn(None, None, [], 'ci', + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', 'Modify added file', wc_dir) # r10 - Merge ^/A/C/nu to A_COPY/C/nu, creating subtree mergeinfo. - svntest.actions.run_and_verify_svn(None, None, [], 'merge', + svntest.actions.run_and_verify_svn(None, [], 'merge', sbox.repo_url + '/A/C/nu', nu_COPY_path) - svntest.actions.run_and_verify_svn(None, None, [], 'ci', + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', 'Add a file', wc_dir) - svntest.actions.run_and_verify_svn(None, None, [], 'up', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir) # We've effectively merged everything from ^/A to A_COPY, check # that svn mergeinfo -R agrees. @@ -756,13 +749,13 @@ def noninheritable_mergeinfo_not_always_eligible(sbox): svntest.main.run_svn(None, 'up', wc_dir) # r4 - Merge r3 from ^/A to branch at depth=empty. - svntest.actions.run_and_verify_svn(None, None, [], 'merge', + svntest.actions.run_and_verify_svn(None, [], 'merge', sbox.repo_url + '/A', branch_path, '-c3', '--depth=empty') # Forcibly set non-inheritable mergeinfo to replicate the pre-1.8 behavior, # where prior to the fix for issue #4057, non-inheritable mergeinfo was # unconditionally set for merges with shallow operational depths. - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'propset', SVN_PROP_MERGEINFO, '/A:3*\n', branch_path) svntest.main.run_svn(None, 'commit', '-m', 'shallow merge', wc_dir) @@ -780,9 +773,184 @@ def noninheritable_mergeinfo_not_always_eligible(sbox): [], sbox.repo_url + '/A', sbox.repo_url + '/branch', '--show-revs', 'eligible', '-R') +@SkipUnless(server_has_mergeinfo) +@Issue(4301) +def mergeinfo_local_move(sbox): + "'mergeinfo' on a locally moved path" + + sbox.build() + wc_dir = sbox.wc_dir + + sbox.simple_move('A', 'A2') + svntest.actions.run_and_verify_svn(None, [], + 'mergeinfo', sbox.repo_url + '/A', + sbox.ospath('A2')) + +@SkipUnless(server_has_mergeinfo) +@Issue(4582) +def no_mergeinfo_on_tree_conflict_victim(sbox): + "do not record mergeinfo on tree conflict victims" + sbox.build() + + # Create a branch of A called A_copy + sbox.simple_copy('A', 'A_copy') + sbox.simple_commit() + + # Add a new directory and file on both branches + sbox.simple_mkdir('A/dir') + sbox.simple_add_text('new file', 'A/dir/f') + sbox.simple_commit() + + sbox.simple_mkdir('A_copy/dir') + sbox.simple_add_text('new file', 'A_copy/dir/f') + sbox.simple_commit() + + # Run a merge from A to A_copy + expected_output = wc.State(sbox.ospath('A_copy'), { + 'dir' : Item(status=' ', treeconflict='C'), + 'dir/f' : Item(status=' ', treeconflict='A'), + }) + expected_mergeinfo_output = wc.State(sbox.ospath('A_copy'), { + '' : Item(status=' U'), + }) + expected_elision_output = wc.State(sbox.ospath('A_copy'), { + }) + + expected_disk = svntest.wc.State('', { + 'C' : Item(), + 'B/E/beta' : Item(contents="This is the file 'beta'.\n"), + 'B/E/alpha' : Item(contents="This is the file 'alpha'.\n"), + 'B/lambda' : Item(contents="This is the file 'lambda'.\n"), + 'B/F' : Item(), + 'D/H/omega' : Item(contents="This is the file 'omega'.\n"), + 'D/H/psi' : Item(contents="This is the file 'psi'.\n"), + 'D/H/chi' : Item(contents="This is the file 'chi'.\n"), + 'D/G/tau' : Item(contents="This is the file 'tau'.\n"), + 'D/G/pi' : Item(contents="This is the file 'pi'.\n"), + 'D/G/rho' : Item(contents="This is the file 'rho'.\n"), + 'D/gamma' : Item(contents="This is the file 'gamma'.\n"), + 'dir/f' : Item(contents="new file"), + 'mu' : Item(contents="This is the file 'mu'.\n"), + }) + + # The merge will create an add vs add tree conflict on A_copy/dir + expected_status = svntest.wc.State(sbox.ospath('A_copy'), { + '' : Item(status=' M', wc_rev='4'), + 'D' : Item(status=' ', wc_rev='4'), + 'D/G' : Item(status=' ', wc_rev='4'), + 'D/G/pi' : Item(status=' ', wc_rev='4'), + 'D/G/rho' : Item(status=' ', wc_rev='4'), + 'D/G/tau' : Item(status=' ', wc_rev='4'), + 'D/H' : Item(status=' ', wc_rev='4'), + 'D/H/psi' : Item(status=' ', wc_rev='4'), + 'D/H/omega' : Item(status=' ', wc_rev='4'), + 'D/H/chi' : Item(status=' ', wc_rev='4'), + 'D/gamma' : Item(status=' ', wc_rev='4'), + 'B' : Item(status=' ', wc_rev='4'), + 'B/F' : Item(status=' ', wc_rev='4'), + 'B/E' : Item(status=' ', wc_rev='4'), + 'B/E/alpha' : Item(status=' ', wc_rev='4'), + 'B/E/beta' : Item(status=' ', wc_rev='4'), + 'B/lambda' : Item(status=' ', wc_rev='4'), + 'C' : Item(status=' ', wc_rev='4'), + 'dir' : Item(status=' ', treeconflict='C', wc_rev='4'), + 'dir/f' : Item(status=' ', wc_rev='4'), + 'mu' : Item(status=' ', wc_rev='4'), + }) + + expected_skip = wc.State('', { }) + + sbox.simple_update('A_copy') + svntest.actions.run_and_verify_merge(sbox.ospath('A_copy'), + None, None, # rev1, rev2 + '^/A', + None, # URL2 + expected_output, + expected_mergeinfo_output, + expected_elision_output, + expected_disk, + expected_status, + expected_skip) + + # Resolve the tree conflict by accepting the working copy state left + # behind by the merge. This preserves the line of history of A_copy/dir, + # which originated on the branch 'A_copy', rather than replacing it with + # Jthe line f history of A/dir which originated on branch 'A' + svntest.actions.run_and_verify_resolve([sbox.ospath('A_copy/dir')], + '--accept', 'working', + sbox.ospath('A_copy/dir')) + sbox.simple_commit('A_copy') + + # Now try to merge the 'A_copy' branch back to 'A" + expected_output = wc.State(sbox.ospath('A'), { + 'dir' : Item(status='R '), # changes line of history of A/dir + 'dir/f' : Item(status='A '), + }) + expected_mergeinfo_output = wc.State(sbox.ospath('A'), { + '' : Item(status=' U'), + }) + expected_elision_output = wc.State(sbox.ospath('A'), { + }) + + expected_disk = svntest.wc.State('', { + 'C' : Item(), + 'B/E/beta' : Item(contents="This is the file 'beta'.\n"), + 'B/E/alpha' : Item(contents="This is the file 'alpha'.\n"), + 'B/F' : Item(), + 'B/lambda' : Item(contents="This is the file 'lambda'.\n"), + 'D/H/omega' : Item(contents="This is the file 'omega'.\n"), + 'D/H/psi' : Item(contents="This is the file 'psi'.\n"), + 'D/H/chi' : Item(contents="This is the file 'chi'.\n"), + 'D/G/tau' : Item(contents="This is the file 'tau'.\n"), + 'D/G/pi' : Item(contents="This is the file 'pi'.\n"), + 'D/G/rho' : Item(contents="This is the file 'rho'.\n"), + 'D/gamma' : Item(contents="This is the file 'gamma'.\n"), + 'dir/f' : Item(contents="new file"), + 'mu' : Item(contents="This is the file 'mu'.\n"), + }) + + expected_status = svntest.wc.State(sbox.ospath('A'), { + '' : Item(status=' M', wc_rev='5'), + 'dir' : Item(status='R ', copied='+', wc_rev='-'), + 'dir/f' : Item(status=' ', copied='+', wc_rev='-'), + 'D' : Item(status=' ', wc_rev='5'), + 'D/H' : Item(status=' ', wc_rev='5'), + 'D/H/chi' : Item(status=' ', wc_rev='5'), + 'D/H/omega' : Item(status=' ', wc_rev='5'), + 'D/H/psi' : Item(status=' ', wc_rev='5'), + 'D/G' : Item(status=' ', wc_rev='5'), + 'D/G/pi' : Item(status=' ', wc_rev='5'), + 'D/G/rho' : Item(status=' ', wc_rev='5'), + 'D/G/tau' : Item(status=' ', wc_rev='5'), + 'D/gamma' : Item(status=' ', wc_rev='5'), + 'B' : Item(status=' ', wc_rev='5'), + 'B/E' : Item(status=' ', wc_rev='5'), + 'B/E/beta' : Item(status=' ', wc_rev='5'), + 'B/E/alpha' : Item(status=' ', wc_rev='5'), + 'B/lambda' : Item(status=' ', wc_rev='5'), + 'B/F' : Item(status=' ', wc_rev='5'), + 'mu' : Item(status=' ', wc_rev='5'), + 'C' : Item(status=' ', wc_rev='5'), + }) + + expected_skip = wc.State('', { }) + sbox.simple_update('A') + svntest.actions.run_and_verify_merge(sbox.ospath('A'), + None, None, # rev1, rev2 + '^/A_copy', + None, # URL2 + expected_output, + expected_mergeinfo_output, + expected_elision_output, + expected_disk, + expected_status, + expected_skip) + sbox.simple_commit('A') + ######################################################################## # Run the tests +# Note that mergeinfo --log is tested in log_tests.py # list all tests here, starting with None: test_list = [ None, @@ -797,6 +965,8 @@ test_list = [ None, wc_target_inherits_mergeinfo_from_repos, natural_history_is_not_eligible_nor_merged, noninheritable_mergeinfo_not_always_eligible, + mergeinfo_local_move, + no_mergeinfo_on_tree_conflict_victim, ] if __name__ == '__main__': diff --git a/subversion/tests/cmdline/mod_authz_svn_tests.py b/subversion/tests/cmdline/mod_authz_svn_tests.py new file mode 100644 index 0000000..d04690f --- /dev/null +++ b/subversion/tests/cmdline/mod_authz_svn_tests.py @@ -0,0 +1,1073 @@ +#!/usr/bin/env python +# +# mod_authz_svn_tests.py: testing mod_authz_svn +# +# Subversion is a tool for revision control. +# See http://subversion.apache.org for more information. +# +# ==================================================================== +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +###################################################################### + +# General modules +import os, re, logging + +logger = logging.getLogger() + +# Our testing module +import svntest + +# (abbreviation) +Skip = svntest.testcase.Skip_deco +SkipUnless = svntest.testcase.SkipUnless_deco +XFail = svntest.testcase.XFail_deco +Issues = svntest.testcase.Issues_deco +Issue = svntest.testcase.Issue_deco +Wimp = svntest.testcase.Wimp_deco + +ls_of_D_no_H = '''<html><head><title>repos - Revision 1: /A/D</title></head> +<body> + <h2>repos - Revision 1: /A/D</h2> + <ul> + <li><a href="../">..</a></li> + <li><a href="G/">G/</a></li> + <li><a href="gamma">gamma</a></li> + </ul> +</body></html>''' + +ls_of_D_H = '''<html><head><title>repos - Revision 1: /A/D</title></head> +<body> + <h2>repos - Revision 1: /A/D</h2> + <ul> + <li><a href="../">..</a></li> + <li><a href="G/">G/</a></li> + <li><a href="H/">H/</a></li> + <li><a href="gamma">gamma</a></li> + </ul> +</body></html>''' + +ls_of_H = '''<html><head><title>repos - Revision 1: /A/D/H</title></head> +<body> + <h2>repos - Revision 1: /A/D/H</h2> + <ul> + <li><a href="../">..</a></li> + <li><a href="chi">chi</a></li> + <li><a href="omega">omega</a></li> + <li><a href="psi">psi</a></li> + </ul> +</body></html>''' + +user1 = svntest.main.wc_author +user1_upper = user1.upper() +user1_pass = svntest.main.wc_passwd +user1_badpass = 'XXX' +assert user1_pass != user1_badpass, "Passwords can't match" +user2 = svntest.main.wc_author2 +user2_upper = user2.upper() +user2_pass = svntest.main.wc_passwd +user2_badpass = 'XXX' +assert user2_pass != user2_badpass, "Passwords can't match" + +def write_authz_file(sbox): + svntest.main.write_authz_file(sbox, { + '/': '$anonymous = r\n' + + 'jrandom = rw\n' + + 'jconstant = rw', + '/A/D/H': '$anonymous =\n' + + '$authenticated =\n' + + 'jrandom = rw' + }) + +def write_authz_file_groups(sbox): + authz_name = sbox.authz_name() + svntest.main.write_authz_file(sbox,{ + '/': '* =', + }) + +def verify_get(test_area_url, path, user, pw, + expected_status, expected_body, headers): + import httplib + from urlparse import urlparse + import base64 + + req_url = test_area_url + path + + loc = urlparse(req_url) + + if loc.scheme == 'http': + h = httplib.HTTPConnection(loc.hostname, loc.port) + else: + h = httplib.HTTPSConnection(loc.hostname, loc.port) + + if headers is None: + headers = {} + + if user and pw: + auth_info = user + ':' + pw + headers['Authorization'] = 'Basic ' + base64.b64encode(auth_info) + else: + auth_info = "anonymous" + + h.request('GET', req_url, None, headers) + + r = h.getresponse() + + actual_status = r.status + if expected_status and expected_status != actual_status: + + logger.warn("Expected status '" + str(expected_status) + + "' but got '" + str(actual_status) + + "' on url '" + req_url + "' (" + + auth_info + ").") + raise svntest.Failure + + if expected_body: + actual_body = r.read() + if expected_body != actual_body: + logger.warn("Expected body:") + logger.warn(expected_body) + logger.warn("But got:") + logger.warn(actual_body) + logger.warn("on url '" + req_url + "' (" + auth_info + ").") + raise svntest.Failure + +def verify_gets(test_area_url, tests): + for test in tests: + verify_get(test_area_url, test['path'], test.get('user'), test.get('pw'), + test['status'], test.get('body'), test.get('headers')) + + +###################################################################### +# Tests +# +# Each test must return on success or raise on failure. + + +#---------------------------------------------------------------------- + + +@SkipUnless(svntest.main.is_ra_type_dav) +def anon(sbox): + "test anonymous access" + sbox.build(read_only = True, create_wc = False) + + test_area_url = sbox.repo_url.replace('/svn-test-work/local_tmp/repos', + '/authz-test-work/anon') + + write_authz_file(sbox) + + anon_tests = ( + { 'path': '', 'status': 301 }, + { 'path': '/', 'status': 200 }, + { 'path': '/repos', 'status': 301 }, + { 'path': '/repos/', 'status': 200 }, + { 'path': '/repos/A', 'status': 301 }, + { 'path': '/repos/A/', 'status': 200 }, + { 'path': '/repos/A/D', 'status': 301 }, + { 'path': '/repos/A/D/', 'status': 200, 'body': ls_of_D_no_H }, + { 'path': '/repos/A/D/gamma', 'status': 200 }, + { 'path': '/repos/A/D/H', 'status': 403 }, + { 'path': '/repos/A/D/H/', 'status': 403 }, + { 'path': '/repos/A/D/H/chi', 'status': 403 }, + # auth isn't configured so nothing should change when passing + # authn details + { 'path': '', 'status': 301, 'user': user1, 'pw': user1_pass}, + { 'path': '/', 'status': 200, 'user': user1, 'pw': user1_pass}, + { 'path': '/repos', 'status': 301, 'user': user1, 'pw': user1_pass}, + { 'path': '/repos/', 'status': 200, 'user': user1, 'pw': user1_pass}, + { 'path': '/repos/A', 'status': 301, 'user': user1, 'pw': user1_pass}, + { 'path': '/repos/A/', 'status': 200, 'user': user1, 'pw': user1_pass}, + { 'path': '/repos/A/D', 'status': 301, 'user': user1, 'pw': user1_pass}, + { 'path': '/repos/A/D/', 'status': 200, 'body': ls_of_D_no_H, + 'user': user1, 'pw': user1_pass}, + { 'path': '/repos/A/D/gamma', 'status': 200, 'user': user1, 'pw': user1_pass}, + { 'path': '/repos/A/D/H', 'status': 403, 'user': user1, 'pw': user1_pass}, + { 'path': '/repos/A/D/H/', 'status': 403, 'user': user1, 'pw': user1_pass}, + { 'path': '/repos/A/D/H/chi', 'status': 403, 'user': user1, 'pw': user1_pass}, + { 'path': '', 'status': 301, 'user': user1, 'pw': user1_badpass}, + { 'path': '/', 'status': 200, 'user': user1, 'pw': user1_badpass}, + { 'path': '/repos', 'status': 301, 'user': user1, 'pw': user1_badpass}, + { 'path': '/repos/', 'status': 200, 'user': user1, 'pw': user1_badpass}, + { 'path': '/repos/A', 'status': 301, 'user': user1, 'pw': user1_badpass}, + { 'path': '/repos/A/', 'status': 200, 'user': user1, 'pw': user1_badpass}, + { 'path': '/repos/A/D', 'status': 301, 'user': user1, 'pw': user1_badpass}, + { 'path': '/repos/A/D/', 'status': 200, 'body': ls_of_D_no_H, + 'user': user1, 'pw': user1_badpass}, + { 'path': '/repos/A/D/gamma', 'status': 200, 'user': user1, 'pw': user1_badpass}, + { 'path': '/repos/A/D/H', 'status': 403, 'user': user1, 'pw': user1_badpass}, + { 'path': '/repos/A/D/H/', 'status': 403, 'user': user1, 'pw': user1_badpass}, + { 'path': '/repos/A/D/H/chi', 'status': 403, 'user': user1, 'pw': user1_badpass}, + { 'path': '', 'status': 301, 'user': user2, 'pw': user1_pass}, + { 'path': '/', 'status': 200, 'user': user2, 'pw': user1_pass}, + { 'path': '/repos', 'status': 301, 'user': user2, 'pw': user1_pass}, + { 'path': '/repos/', 'status': 200, 'user': user2, 'pw': user1_pass}, + { 'path': '/repos/A', 'status': 301, 'user': user2, 'pw': user1_pass}, + { 'path': '/repos/A/', 'status': 200, 'user': user2, 'pw': user1_pass}, + { 'path': '/repos/A/D', 'status': 301, 'user': user2, 'pw': user1_pass}, + { 'path': '/repos/A/D/', 'status': 200, 'body': ls_of_D_no_H, + 'user': user2, 'pw': user1_pass}, + { 'path': '/repos/A/D/gamma', 'status': 200, 'user': user2, 'pw': user2_pass}, + { 'path': '/repos/A/D/H', 'status': 403, 'user': user2, 'pw': user2_pass}, + { 'path': '/repos/A/D/H/', 'status': 403, 'user': user2, 'pw': user2_pass}, + { 'path': '/repos/A/D/H/chi', 'status': 403, 'user': user2, 'pw': user2_pass}, + { 'path': '', 'status': 301, 'user': user2, 'pw': user2_badpass}, + { 'path': '/', 'status': 200, 'user': user2, 'pw': user2_badpass}, + { 'path': '/repos', 'status': 301, 'user': user2, 'pw': user2_badpass}, + { 'path': '/repos/', 'status': 200, 'user': user2, 'pw': user2_badpass}, + { 'path': '/repos/A', 'status': 301, 'user': user2, 'pw': user2_badpass}, + { 'path': '/repos/A/', 'status': 200, 'user': user2, 'pw': user2_badpass}, + { 'path': '/repos/A/D', 'status': 301, 'user': user2, 'pw': user2_badpass}, + { 'path': '/repos/A/D/', 'status': 200, 'body': ls_of_D_no_H, + 'user': user2, 'pw': user2_badpass}, + { 'path': '/repos/A/D/gamma', 'status': 200, 'user': user2, 'pw': user2_badpass}, + { 'path': '/repos/A/D/H', 'status': 403, 'user': user2, 'pw': user2_badpass}, + { 'path': '/repos/A/D/H/', 'status': 403, 'user': user2, 'pw': user2_badpass}, + { 'path': '/repos/A/D/H/chi', 'status': 403, 'user': user2, 'pw': user2_badpass}, + ) + + verify_gets(test_area_url, anon_tests) + + +@SkipUnless(svntest.main.is_ra_type_dav) +def mixed(sbox): + "test mixed anonymous and authenticated access" + sbox.build(read_only = True, create_wc = False) + + test_area_url = sbox.repo_url.replace('/svn-test-work/local_tmp/repos', + '/authz-test-work/mixed') + + write_authz_file(sbox) + + mixed_tests = ( + { 'path': '', 'status': 301, }, + { 'path': '/', 'status': 200, }, + { 'path': '/repos', 'status': 301, }, + { 'path': '/repos/', 'status': 200, }, + { 'path': '/repos/A', 'status': 301, }, + { 'path': '/repos/A/', 'status': 200, }, + { 'path': '/repos/A/D', 'status': 301, }, + { 'path': '/repos/A/D/', 'status': 200, 'body': ls_of_D_no_H, + }, + { 'path': '/repos/A/D/gamma', 'status': 200, }, + { 'path': '/repos/A/D/H', 'status': 401, }, + { 'path': '/repos/A/D/H/', 'status': 401, }, + { 'path': '/repos/A/D/H/chi', 'status': 401, }, + # auth is configured and user1 is allowed access to H + { 'path': '', 'status': 301, 'user': user1, 'pw': user1_pass}, + { 'path': '/', 'status': 200, 'user': user1, 'pw': user1_pass}, + { 'path': '/repos', 'status': 301, 'user': user1, 'pw': user1_pass}, + { 'path': '/repos/', 'status': 200, 'user': user1, 'pw': user1_pass}, + { 'path': '/repos/A', 'status': 301, 'user': user1, 'pw': user1_pass}, + { 'path': '/repos/A/', 'status': 200, 'user': user1, 'pw': user1_pass}, + { 'path': '/repos/A/D', 'status': 301, 'user': user1, 'pw': user1_pass}, + { 'path': '/repos/A/D/', 'status': 200, 'body': ls_of_D_H, + 'user': user1, 'pw': user1_pass}, + { 'path': '/repos/A/D/gamma', 'status': 200, 'user': user1, 'pw': user1_pass}, + { 'path': '/repos/A/D/H', 'status': 301, 'user': user1, 'pw': user1_pass}, + { 'path': '/repos/A/D/H/', 'status': 200, 'body': ls_of_H, 'user': user1, 'pw': user1_pass}, + { 'path': '/repos/A/D/H/chi', 'status': 200, 'user': user1, 'pw': user1_pass}, + # try with the wrong password for user1 + { 'path': '', 'status': 401, 'user': user1, 'pw': user1_badpass}, + { 'path': '/', 'status': 401, 'user': user1, 'pw': user1_badpass}, + { 'path': '/repos', 'status': 401, 'user': user1, 'pw': user1_badpass}, + { 'path': '/repos/', 'status': 401, 'user': user1, 'pw': user1_badpass}, + { 'path': '/repos/A', 'status': 401, 'user': user1, 'pw': user1_badpass}, + { 'path': '/repos/A/', 'status': 401, 'user': user1, 'pw': user1_badpass}, + { 'path': '/repos/A/D', 'status': 401, 'user': user1, 'pw': user1_badpass}, + { 'path': '/repos/A/D/', 'status': 401, 'user': user1, 'pw': user1_badpass}, + { 'path': '/repos/A/D/gamma', 'status': 401, 'user': user1, 'pw': user1_badpass}, + { 'path': '/repos/A/D/H', 'status': 401, 'user': user1, 'pw': user1_badpass}, + { 'path': '/repos/A/D/H/', 'status': 401, 'user': user1, 'pw': user1_badpass}, + { 'path': '/repos/A/D/H/chi', 'status': 401, 'user': user1, 'pw': user1_badpass}, + # auth is configured and user2 is not allowed access to H + { 'path': '', 'status': 301, 'user': user2, 'pw': user2_pass}, + { 'path': '/', 'status': 200, 'user': user2, 'pw': user2_pass}, + { 'path': '/repos', 'status': 301, 'user': user2, 'pw': user2_pass}, + { 'path': '/repos/', 'status': 200, 'user': user2, 'pw': user2_pass}, + { 'path': '/repos/A', 'status': 301, 'user': user2, 'pw': user2_pass}, + { 'path': '/repos/A/', 'status': 200, 'user': user2, 'pw': user2_pass}, + { 'path': '/repos/A/D', 'status': 301, 'user': user2, 'pw': user2_pass}, + { 'path': '/repos/A/D/', 'status': 200, 'body': ls_of_D_no_H, + 'user': user2, 'pw': user2_pass}, + { 'path': '/repos/A/D/gamma', 'status': 200, 'user': user2, 'pw': user2_pass}, + { 'path': '/repos/A/D/H', 'status': 403, 'user': user2, 'pw': user2_pass}, + { 'path': '/repos/A/D/H/', 'status': 403, 'user': user2, 'pw': user2_pass}, + { 'path': '/repos/A/D/H/chi', 'status': 403, 'user': user2, 'pw': user2_pass}, + # try with the wrong password for user2 + { 'path': '', 'status': 401, 'user': user2, 'pw': user2_badpass}, + { 'path': '/', 'status': 401, 'user': user2, 'pw': user2_badpass}, + { 'path': '/repos', 'status': 401, 'user': user2, 'pw': user2_badpass}, + { 'path': '/repos/', 'status': 401, 'user': user2, 'pw': user2_badpass}, + { 'path': '/repos/A', 'status': 401, 'user': user2, 'pw': user2_badpass}, + { 'path': '/repos/A/', 'status': 401, 'user': user2, 'pw': user2_badpass}, + { 'path': '/repos/A/D', 'status': 401, 'user': user2, 'pw': user2_badpass}, + { 'path': '/repos/A/D/', 'status': 401, 'user': user2, 'pw': user2_badpass}, + { 'path': '/repos/A/D/gamma', 'status': 401, 'user': user2, 'pw': user2_badpass}, + { 'path': '/repos/A/D/H', 'status': 401, 'user': user2, 'pw': user2_badpass}, + { 'path': '/repos/A/D/H/', 'status': 401, 'user': user2, 'pw': user2_badpass}, + { 'path': '/repos/A/D/H/chi', 'status': 401, 'user': user2, 'pw': user2_badpass}, + ) + + verify_gets(test_area_url, mixed_tests) + +@SkipUnless(svntest.main.is_ra_type_dav) +@XFail(svntest.main.is_httpd_authz_provider_enabled) +# uses the AuthzSVNNoAuthWhenAnonymousAllowed On directive +# this is broken with httpd 2.3.x+ since it requires the auth system to accept +# r->user == NULL and there is a test for this in server/request.c now. It +# was intended as a workaround for the lack of Satisfy Any in 2.3.x+ which +# was resolved by httpd with mod_access_compat in 2.3.x+. +def mixed_noauthwhenanon(sbox): + "test mixed with noauthwhenanon directive" + sbox.build(read_only = True, create_wc = False) + + test_area_url = sbox.repo_url.replace('/svn-test-work/local_tmp/repos', + '/authz-test-work/mixed-noauthwhenanon') + + write_authz_file(sbox) + + noauthwhenanon_tests = ( + { 'path': '', 'status': 301, }, + { 'path': '/', 'status': 200, }, + { 'path': '/repos', 'status': 301, }, + { 'path': '/repos/', 'status': 200, }, + { 'path': '/repos/A', 'status': 301, }, + { 'path': '/repos/A/', 'status': 200, }, + { 'path': '/repos/A/D', 'status': 301, }, + { 'path': '/repos/A/D/', 'status': 200, 'body': ls_of_D_no_H, + }, + { 'path': '/repos/A/D/gamma', 'status': 200, }, + { 'path': '/repos/A/D/H', 'status': 401, }, + { 'path': '/repos/A/D/H/', 'status': 401, }, + { 'path': '/repos/A/D/H/chi', 'status': 401, }, + # auth is configured and user1 is allowed access to H + { 'path': '', 'status': 301, 'user': user1, 'pw': user1_pass}, + { 'path': '/', 'status': 200, 'user': user1, 'pw': user1_pass}, + { 'path': '/repos', 'status': 301, 'user': user1, 'pw': user1_pass}, + { 'path': '/repos/', 'status': 200, 'user': user1, 'pw': user1_pass}, + { 'path': '/repos/A', 'status': 301, 'user': user1, 'pw': user1_pass}, + { 'path': '/repos/A/', 'status': 200, 'user': user1, 'pw': user1_pass}, + { 'path': '/repos/A/D', 'status': 301, 'user': user1, 'pw': user1_pass}, + { 'path': '/repos/A/D/', 'status': 200, 'body': ls_of_D_H, + 'user': user1, 'pw': user1_pass}, + { 'path': '/repos/A/D/gamma', 'status': 200, 'user': user1, 'pw': user1_pass}, + { 'path': '/repos/A/D/H', 'status': 301, 'user': user1, 'pw': user1_pass}, + { 'path': '/repos/A/D/H/', 'status': 200, 'body': ls_of_H, 'user': user1, 'pw': user1_pass}, + { 'path': '/repos/A/D/H/chi', 'status': 200, 'user': user1, 'pw': user1_pass}, + # try with the wrong password for user1 + # note that unlike doing this with Satisfy Any this case + # actually provides anon access when provided with an invalid + # password + { 'path': '', 'status': 301, 'user': user1, 'pw': user1_badpass}, + { 'path': '/', 'status': 200, 'user': user1, 'pw': user1_badpass}, + { 'path': '/repos', 'status': 301, 'user': user1, 'pw': user1_badpass}, + { 'path': '/repos/', 'status': 200, 'user': user1, 'pw': user1_badpass}, + { 'path': '/repos/A', 'status': 301, 'user': user1, 'pw': user1_badpass}, + { 'path': '/repos/A/', 'status': 200, 'user': user1, 'pw': user1_badpass}, + { 'path': '/repos/A/D', 'status': 301, 'user': user1, 'pw': user1_badpass}, + { 'path': '/repos/A/D/', 'status': 200, 'user': user1, 'pw': user1_badpass}, + { 'path': '/repos/A/D/gamma', 'status': 200, 'user': user1, 'pw': user1_badpass}, + { 'path': '/repos/A/D/H', 'status': 401, 'user': user1, 'pw': user1_badpass}, + { 'path': '/repos/A/D/H/', 'status': 401, 'user': user1, 'pw': user1_badpass}, + { 'path': '/repos/A/D/H/chi', 'status': 401, 'user': user1, 'pw': user1_badpass}, + # auth is configured and user2 is not allowed access to H + { 'path': '', 'status': 301, 'user': user2, 'pw': user2_pass}, + { 'path': '/', 'status': 200, 'user': user2, 'pw': user2_pass}, + { 'path': '/repos', 'status': 301, 'user': user2, 'pw': user2_pass}, + { 'path': '/repos/', 'status': 200, 'user': user2, 'pw': user2_pass}, + { 'path': '/repos/A', 'status': 301, 'user': user2, 'pw': user2_pass}, + { 'path': '/repos/A/', 'status': 200, 'user': user2, 'pw': user2_pass}, + { 'path': '/repos/A/D', 'status': 301, 'user': user2, 'pw': user2_pass}, + { 'path': '/repos/A/D/', 'status': 200, 'body': ls_of_D_no_H, + 'user': user2, 'pw': user2_pass}, + { 'path': '/repos/A/D/gamma', 'status': 200, 'user': user2, 'pw': user2_pass}, + { 'path': '/repos/A/D/H', 'status': 403, 'user': user2, 'pw': user2_pass}, + { 'path': '/repos/A/D/H/', 'status': 403, 'user': user2, 'pw': user2_pass}, + { 'path': '/repos/A/D/H/chi', 'status': 403, 'user': user2, 'pw': user2_pass}, + # try with the wrong password for user2 + { 'path': '', 'status': 301, 'user': user2, 'pw': user2_badpass}, + { 'path': '/', 'status': 200, 'user': user2, 'pw': user2_badpass}, + { 'path': '/repos', 'status': 301, 'user': user2, 'pw': user2_badpass}, + { 'path': '/repos/', 'status': 200, 'user': user2, 'pw': user2_badpass}, + { 'path': '/repos/A', 'status': 301, 'user': user2, 'pw': user2_badpass}, + { 'path': '/repos/A/', 'status': 200, 'user': user2, 'pw': user2_badpass}, + { 'path': '/repos/A/D', 'status': 301, 'user': user2, 'pw': user2_badpass}, + { 'path': '/repos/A/D/', 'status': 200, 'user': user2, 'pw': user2_badpass}, + { 'path': '/repos/A/D/gamma', 'status': 200, 'user': user2, 'pw': user2_badpass}, + { 'path': '/repos/A/D/H', 'status': 401, 'user': user2, 'pw': user2_badpass}, + { 'path': '/repos/A/D/H/', 'status': 401, 'user': user2, 'pw': user2_badpass}, + { 'path': '/repos/A/D/H/chi', 'status': 401, 'user': user2, 'pw': user2_badpass}, + ) + + verify_gets(test_area_url, noauthwhenanon_tests) + + +@SkipUnless(svntest.main.is_ra_type_dav) +def authn(sbox): + "test authenticated only access" + sbox.build(read_only = True, create_wc = False) + + test_area_url = sbox.repo_url.replace('/svn-test-work/local_tmp/repos', + '/authz-test-work/authn') + + write_authz_file(sbox) + + authn_tests = ( + { 'path': '', 'status': 401, }, + { 'path': '/', 'status': 401, }, + { 'path': '/repos', 'status': 401, }, + { 'path': '/repos/', 'status': 401, }, + { 'path': '/repos/A', 'status': 401, }, + { 'path': '/repos/A/', 'status': 401, }, + { 'path': '/repos/A/D', 'status': 401, }, + { 'path': '/repos/A/D/', 'status': 401, }, + { 'path': '/repos/A/D/gamma', 'status': 401, }, + { 'path': '/repos/A/D/H', 'status': 401, }, + { 'path': '/repos/A/D/H/', 'status': 401, }, + { 'path': '/repos/A/D/H/chi', 'status': 401, }, + # auth is configured and user1 is allowed access to H + { 'path': '', 'status': 301, 'user': user1, 'pw': user1_pass}, + { 'path': '/', 'status': 200, 'user': user1, 'pw': user1_pass}, + { 'path': '/repos', 'status': 301, 'user': user1, 'pw': user1_pass}, + { 'path': '/repos/', 'status': 200, 'user': user1, 'pw': user1_pass}, + { 'path': '/repos/A', 'status': 301, 'user': user1, 'pw': user1_pass}, + { 'path': '/repos/A/', 'status': 200, 'user': user1, 'pw': user1_pass}, + { 'path': '/repos/A/D', 'status': 301, 'user': user1, 'pw': user1_pass}, + { 'path': '/repos/A/D/', 'status': 200, 'body': ls_of_D_H, + 'user': user1, 'pw': user1_pass}, + { 'path': '/repos/A/D/gamma', 'status': 200, 'user': user1, 'pw': user1_pass}, + { 'path': '/repos/A/D/H', 'status': 301, 'user': user1, 'pw': user1_pass}, + { 'path': '/repos/A/D/H/', 'status': 200, 'body': ls_of_H, 'user': user1, 'pw': user1_pass}, + { 'path': '/repos/A/D/H/chi', 'status': 200, 'user': user1, 'pw': user1_pass}, + # try with upper case username for user1 + { 'path': '', 'status': 301, 'user': user1_upper, 'pw': user1_pass}, + { 'path': '/', 'status': 200, 'user': user1_upper, 'pw': user1_pass}, + { 'path': '/repos', 'status': 403, 'user': user1_upper, 'pw': user1_pass}, + { 'path': '/repos/', 'status': 403, 'user': user1_upper, 'pw': user1_pass}, + { 'path': '/repos/A', 'status': 403, 'user': user1_upper, 'pw': user1_pass}, + { 'path': '/repos/A/', 'status': 403, 'user': user1_upper, 'pw': user1_pass}, + { 'path': '/repos/A/D', 'status': 403, 'user': user1_upper, 'pw': user1_pass}, + { 'path': '/repos/A/D/', 'status': 403, 'user': user1_upper, 'pw': user1_pass}, + { 'path': '/repos/A/D/gamma', 'status': 403, 'user': user1_upper, 'pw': user1_pass}, + { 'path': '/repos/A/D/H', 'status': 403, 'user': user1_upper, 'pw': user1_pass}, + { 'path': '/repos/A/D/H/', 'status': 403, 'user': user1_upper, 'pw': user1_pass}, + { 'path': '/repos/A/D/H/chi', 'status': 403, 'user': user1_upper, 'pw': user1_pass}, + # try with the wrong password for user1 + { 'path': '', 'status': 401, 'user': user1, 'pw': user1_badpass}, + { 'path': '/', 'status': 401, 'user': user1, 'pw': user1_badpass}, + { 'path': '/repos', 'status': 401, 'user': user1, 'pw': user1_badpass}, + { 'path': '/repos/', 'status': 401, 'user': user1, 'pw': user1_badpass}, + { 'path': '/repos/A', 'status': 401, 'user': user1, 'pw': user1_badpass}, + { 'path': '/repos/A/', 'status': 401, 'user': user1, 'pw': user1_badpass}, + { 'path': '/repos/A/D', 'status': 401, 'user': user1, 'pw': user1_badpass}, + { 'path': '/repos/A/D/', 'status': 401, 'user': user1, 'pw': user1_badpass}, + { 'path': '/repos/A/D/gamma', 'status': 401, 'user': user1, 'pw': user1_badpass}, + { 'path': '/repos/A/D/H', 'status': 401, 'user': user1, 'pw': user1_badpass}, + { 'path': '/repos/A/D/H/', 'status': 401, 'user': user1, 'pw': user1_badpass}, + { 'path': '/repos/A/D/H/chi', 'status': 401, 'user': user1, 'pw': user1_badpass}, + # auth is configured and user2 is not allowed access to H + { 'path': '', 'status': 301, 'user': user2, 'pw': user2_pass}, + { 'path': '/', 'status': 200, 'user': user2, 'pw': user2_pass}, + { 'path': '/repos', 'status': 301, 'user': user2, 'pw': user2_pass}, + { 'path': '/repos/', 'status': 200, 'user': user2, 'pw': user2_pass}, + { 'path': '/repos/A', 'status': 301, 'user': user2, 'pw': user2_pass}, + { 'path': '/repos/A/', 'status': 200, 'user': user2, 'pw': user2_pass}, + { 'path': '/repos/A/D', 'status': 301, 'user': user2, 'pw': user2_pass}, + { 'path': '/repos/A/D/', 'status': 200, 'body': ls_of_D_no_H, + 'user': user2, 'pw': user2_pass}, + { 'path': '/repos/A/D/gamma', 'status': 200, 'user': user2, 'pw': user2_pass}, + { 'path': '/repos/A/D/H', 'status': 403, 'user': user2, 'pw': user2_pass}, + { 'path': '/repos/A/D/H/', 'status': 403, 'user': user2, 'pw': user2_pass}, + { 'path': '/repos/A/D/H/chi', 'status': 403, 'user': user2, 'pw': user2_pass}, + # try with upper case username for user2 + { 'path': '', 'status': 301, 'user': user2_upper, 'pw': user2_pass}, + { 'path': '/', 'status': 200, 'user': user2_upper, 'pw': user2_pass}, + { 'path': '/repos', 'status': 403, 'user': user2_upper, 'pw': user2_pass}, + { 'path': '/repos/', 'status': 403, 'user': user2_upper, 'pw': user2_pass}, + { 'path': '/repos/A', 'status': 403, 'user': user2_upper, 'pw': user2_pass}, + { 'path': '/repos/A/', 'status': 403, 'user': user2_upper, 'pw': user2_pass}, + { 'path': '/repos/A/D', 'status': 403, 'user': user2_upper, 'pw': user2_pass}, + { 'path': '/repos/A/D/', 'status': 403, 'user': user2_upper, 'pw': user2_pass}, + { 'path': '/repos/A/D/gamma', 'status': 403, 'user': user2_upper, 'pw': user2_pass}, + { 'path': '/repos/A/D/H', 'status': 403, 'user': user2_upper, 'pw': user2_pass}, + { 'path': '/repos/A/D/H/', 'status': 403, 'user': user2_upper, 'pw': user2_pass}, + { 'path': '/repos/A/D/H/chi', 'status': 403, 'user': user2_upper, 'pw': user2_pass}, + # try with the wrong password for user2 + { 'path': '', 'status': 401, 'user': user2, 'pw': user2_badpass}, + { 'path': '/', 'status': 401, 'user': user2, 'pw': user2_badpass}, + { 'path': '/repos', 'status': 401, 'user': user2, 'pw': user2_badpass}, + { 'path': '/repos/', 'status': 401, 'user': user2, 'pw': user2_badpass}, + { 'path': '/repos/A', 'status': 401, 'user': user2, 'pw': user2_badpass}, + { 'path': '/repos/A/', 'status': 401, 'user': user2, 'pw': user2_badpass}, + { 'path': '/repos/A/D', 'status': 401, 'user': user2, 'pw': user2_badpass}, + { 'path': '/repos/A/D/', 'status': 401, 'user': user2, 'pw': user2_badpass}, + { 'path': '/repos/A/D/gamma', 'status': 401, 'user': user2, 'pw': user2_badpass}, + { 'path': '/repos/A/D/H', 'status': 401, 'user': user2, 'pw': user2_badpass}, + { 'path': '/repos/A/D/H/', 'status': 401, 'user': user2, 'pw': user2_badpass}, + { 'path': '/repos/A/D/H/chi', 'status': 401, 'user': user2, 'pw': user2_badpass}, + ) + + verify_gets(test_area_url, authn_tests) + +@SkipUnless(svntest.main.is_ra_type_dav) +def authn_anonoff(sbox): + "test authenticated only access with anonoff" + sbox.build(read_only = True, create_wc = False) + + test_area_url = sbox.repo_url.replace('/svn-test-work/local_tmp/repos', + '/authz-test-work/authn-anonoff') + + write_authz_file(sbox) + + anonoff_tests = ( + { 'path': '', 'status': 401, }, + { 'path': '/', 'status': 401, }, + { 'path': '/repos', 'status': 401, }, + { 'path': '/repos/', 'status': 401, }, + { 'path': '/repos/A', 'status': 401, }, + { 'path': '/repos/A/', 'status': 401, }, + { 'path': '/repos/A/D', 'status': 401, }, + { 'path': '/repos/A/D/', 'status': 401, }, + { 'path': '/repos/A/D/gamma', 'status': 401, }, + { 'path': '/repos/A/D/H', 'status': 401, }, + { 'path': '/repos/A/D/H/', 'status': 401, }, + { 'path': '/repos/A/D/H/chi', 'status': 401, }, + # auth is configured and user1 is allowed access to H + { 'path': '', 'status': 301, 'user': user1, 'pw': user1_pass}, + { 'path': '/', 'status': 200, 'user': user1, 'pw': user1_pass}, + { 'path': '/repos', 'status': 301, 'user': user1, 'pw': user1_pass}, + { 'path': '/repos/', 'status': 200, 'user': user1, 'pw': user1_pass}, + { 'path': '/repos/A', 'status': 301, 'user': user1, 'pw': user1_pass}, + { 'path': '/repos/A/', 'status': 200, 'user': user1, 'pw': user1_pass}, + { 'path': '/repos/A/D', 'status': 301, 'user': user1, 'pw': user1_pass}, + { 'path': '/repos/A/D/', 'status': 200, 'body': ls_of_D_H, + 'user': user1, 'pw': user1_pass}, + { 'path': '/repos/A/D/gamma', 'status': 200, 'user': user1, 'pw': user1_pass}, + { 'path': '/repos/A/D/H', 'status': 301, 'user': user1, 'pw': user1_pass}, + { 'path': '/repos/A/D/H/', 'status': 200, 'body': ls_of_H, 'user': user1, 'pw': user1_pass}, + { 'path': '/repos/A/D/H/chi', 'status': 200, 'user': user1, 'pw': user1_pass}, + # try with upper case username for user1 + { 'path': '', 'status': 301, 'user': user1_upper, 'pw': user1_pass}, + { 'path': '/', 'status': 200, 'user': user1_upper, 'pw': user1_pass}, + { 'path': '/repos', 'status': 403, 'user': user1_upper, 'pw': user1_pass}, + { 'path': '/repos/', 'status': 403, 'user': user1_upper, 'pw': user1_pass}, + { 'path': '/repos/A', 'status': 403, 'user': user1_upper, 'pw': user1_pass}, + { 'path': '/repos/A/', 'status': 403, 'user': user1_upper, 'pw': user1_pass}, + { 'path': '/repos/A/D', 'status': 403, 'user': user1_upper, 'pw': user1_pass}, + { 'path': '/repos/A/D/', 'status': 403, 'user': user1_upper, 'pw': user1_pass}, + { 'path': '/repos/A/D/gamma', 'status': 403, 'user': user1_upper, 'pw': user1_pass}, + { 'path': '/repos/A/D/H', 'status': 403, 'user': user1_upper, 'pw': user1_pass}, + { 'path': '/repos/A/D/H/', 'status': 403, 'user': user1_upper, 'pw': user1_pass}, + { 'path': '/repos/A/D/H/chi', 'status': 403, 'user': user1_upper, 'pw': user1_pass}, + # try with the wrong password for user1 + { 'path': '', 'status': 401, 'user': user1, 'pw': user1_badpass}, + { 'path': '/', 'status': 401, 'user': user1, 'pw': user1_badpass}, + { 'path': '/repos', 'status': 401, 'user': user1, 'pw': user1_badpass}, + { 'path': '/repos/', 'status': 401, 'user': user1, 'pw': user1_badpass}, + { 'path': '/repos/A', 'status': 401, 'user': user1, 'pw': user1_badpass}, + { 'path': '/repos/A/', 'status': 401, 'user': user1, 'pw': user1_badpass}, + { 'path': '/repos/A/D', 'status': 401, 'user': user1, 'pw': user1_badpass}, + { 'path': '/repos/A/D/', 'status': 401, 'user': user1, 'pw': user1_badpass}, + { 'path': '/repos/A/D/gamma', 'status': 401, 'user': user1, 'pw': user1_badpass}, + { 'path': '/repos/A/D/H', 'status': 401, 'user': user1, 'pw': user1_badpass}, + { 'path': '/repos/A/D/H/', 'status': 401, 'user': user1, 'pw': user1_badpass}, + { 'path': '/repos/A/D/H/chi', 'status': 401, 'user': user1, 'pw': user1_badpass}, + # auth is configured and user2 is not allowed access to H + { 'path': '', 'status': 301, 'user': user2, 'pw': user2_pass}, + { 'path': '/', 'status': 200, 'user': user2, 'pw': user2_pass}, + { 'path': '/repos', 'status': 301, 'user': user2, 'pw': user2_pass}, + { 'path': '/repos/', 'status': 200, 'user': user2, 'pw': user2_pass}, + { 'path': '/repos/A', 'status': 301, 'user': user2, 'pw': user2_pass}, + { 'path': '/repos/A/', 'status': 200, 'user': user2, 'pw': user2_pass}, + { 'path': '/repos/A/D', 'status': 301, 'user': user2, 'pw': user2_pass}, + { 'path': '/repos/A/D/', 'status': 200, 'body': ls_of_D_no_H, + 'user': user2, 'pw': user2_pass}, + { 'path': '/repos/A/D/gamma', 'status': 200, 'user': user2, 'pw': user2_pass}, + { 'path': '/repos/A/D/H', 'status': 403, 'user': user2, 'pw': user2_pass}, + { 'path': '/repos/A/D/H/', 'status': 403, 'user': user2, 'pw': user2_pass}, + { 'path': '/repos/A/D/H/chi', 'status': 403, 'user': user2, 'pw': user2_pass}, + # try with upper case username for user2 + { 'path': '', 'status': 301, 'user': user2_upper, 'pw': user2_pass}, + { 'path': '/', 'status': 200, 'user': user2_upper, 'pw': user2_pass}, + { 'path': '/repos', 'status': 403, 'user': user2_upper, 'pw': user2_pass}, + { 'path': '/repos/', 'status': 403, 'user': user2_upper, 'pw': user2_pass}, + { 'path': '/repos/A', 'status': 403, 'user': user2_upper, 'pw': user2_pass}, + { 'path': '/repos/A/', 'status': 403, 'user': user2_upper, 'pw': user2_pass}, + { 'path': '/repos/A/D', 'status': 403, 'user': user2_upper, 'pw': user2_pass}, + { 'path': '/repos/A/D/', 'status': 403, 'user': user2_upper, 'pw': user2_pass}, + { 'path': '/repos/A/D/gamma', 'status': 403, 'user': user2_upper, 'pw': user2_pass}, + { 'path': '/repos/A/D/H', 'status': 403, 'user': user2_upper, 'pw': user2_pass}, + { 'path': '/repos/A/D/H/', 'status': 403, 'user': user2_upper, 'pw': user2_pass}, + { 'path': '/repos/A/D/H/chi', 'status': 403, 'user': user2_upper, 'pw': user2_pass}, + # try with the wrong password for user2 + { 'path': '', 'status': 401, 'user': user2, 'pw': user2_badpass}, + { 'path': '/', 'status': 401, 'user': user2, 'pw': user2_badpass}, + { 'path': '/repos', 'status': 401, 'user': user2, 'pw': user2_badpass}, + { 'path': '/repos/', 'status': 401, 'user': user2, 'pw': user2_badpass}, + { 'path': '/repos/A', 'status': 401, 'user': user2, 'pw': user2_badpass}, + { 'path': '/repos/A/', 'status': 401, 'user': user2, 'pw': user2_badpass}, + { 'path': '/repos/A/D', 'status': 401, 'user': user2, 'pw': user2_badpass}, + { 'path': '/repos/A/D/', 'status': 401, 'user': user2, 'pw': user2_badpass}, + { 'path': '/repos/A/D/gamma', 'status': 401, 'user': user2, 'pw': user2_badpass}, + { 'path': '/repos/A/D/H', 'status': 401, 'user': user2, 'pw': user2_badpass}, + { 'path': '/repos/A/D/H/', 'status': 401, 'user': user2, 'pw': user2_badpass}, + { 'path': '/repos/A/D/H/chi', 'status': 401, 'user': user2, 'pw': user2_badpass}, + ) + + verify_gets(test_area_url, anonoff_tests) + +@SkipUnless(svntest.main.is_ra_type_dav) +def authn_lcuser(sbox): + "test authenticated only access with lcuser" + sbox.build(read_only = True, create_wc = False) + + test_area_url = sbox.repo_url.replace('/svn-test-work/local_tmp/repos', + '/authz-test-work/authn-lcuser') + + write_authz_file(sbox) + + lcuser_tests = ( + # try with upper case username for user1 (works due to lcuser option) + { 'path': '', 'status': 301, 'user': user1_upper, 'pw': user1_pass}, + { 'path': '/', 'status': 200, 'user': user1_upper, 'pw': user1_pass}, + { 'path': '/repos', 'status': 301, 'user': user1_upper, 'pw': user1_pass}, + { 'path': '/repos/', 'status': 200, 'user': user1_upper, 'pw': user1_pass}, + { 'path': '/repos/A', 'status': 301, 'user': user1_upper, 'pw': user1_pass}, + { 'path': '/repos/A/', 'status': 200, 'user': user1_upper, 'pw': user1_pass}, + { 'path': '/repos/A/D', 'status': 301, 'user': user1_upper, 'pw': user1_pass}, + { 'path': '/repos/A/D/', 'status': 200, 'body': ls_of_D_H, + 'user': user1_upper, 'pw': user1_pass}, + { 'path': '/repos/A/D/gamma', 'status': 200, 'user': user1_upper, 'pw': user1_pass}, + { 'path': '/repos/A/D/H', 'status': 301, 'user': user1_upper, 'pw': user1_pass}, + { 'path': '/repos/A/D/H/', 'status': 200, 'body': ls_of_H, 'user': user1_upper, 'pw': user1_pass}, + { 'path': '/repos/A/D/H/chi', 'status': 200, 'user': user1_upper, 'pw': user1_pass}, + # try with upper case username for user2 (works due to lcuser option) + { 'path': '', 'status': 301, 'user': user2_upper, 'pw': user2_pass}, + { 'path': '/', 'status': 200, 'user': user2_upper, 'pw': user2_pass}, + { 'path': '/repos', 'status': 301, 'user': user2_upper, 'pw': user2_pass}, + { 'path': '/repos/', 'status': 200, 'user': user2_upper, 'pw': user2_pass}, + { 'path': '/repos/A', 'status': 301, 'user': user2_upper, 'pw': user2_pass}, + { 'path': '/repos/A/', 'status': 200, 'user': user2_upper, 'pw': user2_pass}, + { 'path': '/repos/A/D', 'status': 301, 'user': user2_upper, 'pw': user2_pass}, + { 'path': '/repos/A/D/', 'status': 200, 'body': ls_of_D_no_H, + 'user': user2_upper, 'pw': user2_pass}, + { 'path': '/repos/A/D/gamma', 'status': 200, 'user': user2_upper, 'pw': user2_pass}, + { 'path': '/repos/A/D/H', 'status': 403, 'user': user2_upper, 'pw': user2_pass}, + { 'path': '/repos/A/D/H/', 'status': 403, 'user': user2_upper, 'pw': user2_pass}, + { 'path': '/repos/A/D/H/chi', 'status': 403, 'user': user2_upper, 'pw': user2_pass}, + ) + + verify_gets(test_area_url, lcuser_tests) + +# authenticated access only by group - a excuse to use AuthzSVNAuthoritative Off +# this is terribly messed up, Require group runs after mod_authz_svn. +# so if mod_authz_svn grants the access then it doesn't matter what the group +# requirement says. If we reject the access then you can use the AuthzSVNAuthoritative Off +# directive to fall through to the group check. Overall the behavior of setups like this +# is almost guaranteed to not be what users expect. +@SkipUnless(svntest.main.is_ra_type_dav) +def authn_group(sbox): + "test authenticated only access via groups" + sbox.build(read_only = True, create_wc = False) + + test_area_url = sbox.repo_url.replace('/svn-test-work/local_tmp/repos', + '/authz-test-work/authn-group') + + # Can't use write_authz_file() as most tests because we want to deny all + # access with mod_authz_svn so the tests fall through to the group handling + authz_name = sbox.authz_name() + svntest.main.write_authz_file(sbox, { + '/': '* =', + }) + + group_tests = ( + { 'path': '', 'status': 401, }, + { 'path': '/', 'status': 401, }, + { 'path': '/repos', 'status': 401, }, + { 'path': '/repos/', 'status': 401, }, + { 'path': '/repos/A', 'status': 401, }, + { 'path': '/repos/A/', 'status': 401, }, + { 'path': '/repos/A/D', 'status': 401, }, + { 'path': '/repos/A/D/', 'status': 401, }, + { 'path': '/repos/A/D/gamma', 'status': 401, }, + { 'path': '/repos/A/D/H', 'status': 401, }, + { 'path': '/repos/A/D/H/', 'status': 401, }, + { 'path': '/repos/A/D/H/chi', 'status': 401, }, + # auth is configured and user1 is allowed access repo including H + { 'path': '', 'status': 301, 'user': user1, 'pw': user1_pass}, + { 'path': '/', 'status': 200, 'user': user1, 'pw': user1_pass}, + { 'path': '/repos', 'status': 301, 'user': user1, 'pw': user1_pass}, + { 'path': '/repos/', 'status': 200, 'user': user1, 'pw': user1_pass}, + { 'path': '/repos/A', 'status': 301, 'user': user1, 'pw': user1_pass}, + { 'path': '/repos/A/', 'status': 200, 'user': user1, 'pw': user1_pass}, + { 'path': '/repos/A/D', 'status': 301, 'user': user1, 'pw': user1_pass}, + { 'path': '/repos/A/D/', 'status': 200, 'body': ls_of_D_H, + 'user': user1, 'pw': user1_pass}, + { 'path': '/repos/A/D/gamma', 'status': 200, 'user': user1, 'pw': user1_pass}, + { 'path': '/repos/A/D/H', 'status': 301, 'user': user1, 'pw': user1_pass}, + { 'path': '/repos/A/D/H/', 'status': 200, 'body': ls_of_H, 'user': user1, 'pw': user1_pass}, + { 'path': '/repos/A/D/H/chi', 'status': 200, 'user': user1, 'pw': user1_pass}, + ) + + verify_gets(test_area_url, group_tests) + +# This test exists to validate our behavior when used with the new authz +# provider system introduced in httpd 2.3.x. The Satisfy directive +# determines how older authz hooks are combined and the RequireA(ll|ny) +# blocks handles how new authz providers are combined. The overall results of +# all the authz providers (combined per the Require* blocks) are then +# combined with the other authz hooks via the Satisfy directive. +# Meaning this test requires that mod_authz_svn says yes and there is +# either a valid user or the ALLOW header is 1. The header may seem +# like a silly test but it's easier to excercise than say a host directive +# in a repeatable test. +@SkipUnless(svntest.main.is_httpd_authz_provider_enabled) +def authn_sallrany(sbox): + "test satisfy all require any config" + sbox.build(read_only = True, create_wc = False) + + test_area_url = sbox.repo_url.replace('/svn-test-work/local_tmp/repos', + '/authz-test-work/sallrany') + + write_authz_file(sbox) + + allow_header = { 'ALLOW': '1' } + + sallrany_tests = ( + #anon access isn't allowed without ALLOW header + { 'path': '', 'status': 401, }, + { 'path': '/', 'status': 401, }, + { 'path': '/repos', 'status': 401, }, + { 'path': '/repos/', 'status': 401, }, + { 'path': '/repos/A', 'status': 401, }, + { 'path': '/repos/A/', 'status': 401, }, + { 'path': '/repos/A/D', 'status': 401, }, + { 'path': '/repos/A/D/', 'status': 401, }, + { 'path': '/repos/A/D/gamma', 'status': 401, }, + { 'path': '/repos/A/D/H', 'status': 401, }, + { 'path': '/repos/A/D/H/', 'status': 401, }, + { 'path': '/repos/A/D/H/chi', 'status': 401, }, + # auth is configured and user1 is allowed access repo including H + { 'path': '', 'status': 301, 'user': user1, 'pw': user1_pass}, + { 'path': '/', 'status': 200, 'user': user1, 'pw': user1_pass}, + { 'path': '/repos', 'status': 301, 'user': user1, 'pw': user1_pass}, + { 'path': '/repos/', 'status': 200, 'user': user1, 'pw': user1_pass}, + { 'path': '/repos/A', 'status': 301, 'user': user1, 'pw': user1_pass}, + { 'path': '/repos/A/', 'status': 200, 'user': user1, 'pw': user1_pass}, + { 'path': '/repos/A/D', 'status': 301, 'user': user1, 'pw': user1_pass}, + { 'path': '/repos/A/D/', 'status': 200, 'body': ls_of_D_H, + 'user': user1, 'pw': user1_pass}, + { 'path': '/repos/A/D/gamma', 'status': 200, 'user': user1, 'pw': user1_pass}, + { 'path': '/repos/A/D/H', 'status': 301, 'user': user1, 'pw': user1_pass}, + { 'path': '/repos/A/D/H/', 'status': 200, 'body': ls_of_H, 'user': user1, 'pw': user1_pass}, + { 'path': '/repos/A/D/H/chi', 'status': 200, 'user': user1, 'pw': user1_pass}, + # try with the wrong password for user1 + { 'path': '', 'status': 401, 'user': user1, 'pw': user1_badpass}, + { 'path': '/', 'status': 401, 'user': user1, 'pw': user1_badpass}, + { 'path': '/repos', 'status': 401, 'user': user1, 'pw': user1_badpass}, + { 'path': '/repos/', 'status': 401, 'user': user1, 'pw': user1_badpass}, + { 'path': '/repos/A', 'status': 401, 'user': user1, 'pw': user1_badpass}, + { 'path': '/repos/A/', 'status': 401, 'user': user1, 'pw': user1_badpass}, + { 'path': '/repos/A/D', 'status': 401, 'user': user1, 'pw': user1_badpass}, + { 'path': '/repos/A/D/', 'status': 401, 'user': user1, 'pw': user1_badpass}, + { 'path': '/repos/A/D/gamma', 'status': 401, 'user': user1, 'pw': user1_badpass}, + { 'path': '/repos/A/D/H', 'status': 401, 'user': user1, 'pw': user1_badpass}, + { 'path': '/repos/A/D/H/', 'status': 401, 'user': user1, 'pw': user1_badpass}, + { 'path': '/repos/A/D/H/chi', 'status': 401, 'user': user1, 'pw': user1_badpass}, + # auth is configured and user2 is not allowed access to H + { 'path': '', 'status': 301, 'user': user2, 'pw': user2_pass}, + { 'path': '/', 'status': 200, 'user': user2, 'pw': user2_pass}, + { 'path': '/repos', 'status': 301, 'user': user2, 'pw': user2_pass}, + { 'path': '/repos/', 'status': 200, 'user': user2, 'pw': user2_pass}, + { 'path': '/repos/A', 'status': 301, 'user': user2, 'pw': user2_pass}, + { 'path': '/repos/A/', 'status': 200, 'user': user2, 'pw': user2_pass}, + { 'path': '/repos/A/D', 'status': 301, 'user': user2, 'pw': user2_pass}, + { 'path': '/repos/A/D/', 'status': 200, 'body': ls_of_D_no_H, + 'user': user2, 'pw': user2_pass}, + { 'path': '/repos/A/D/gamma', 'status': 200, 'user': user2, 'pw': user2_pass}, + { 'path': '/repos/A/D/H', 'status': 403, 'user': user2, 'pw': user2_pass}, + { 'path': '/repos/A/D/H/', 'status': 403, 'user': user2, 'pw': user2_pass}, + { 'path': '/repos/A/D/H/chi', 'status': 403, 'user': user2, 'pw': user2_pass}, + # try with the wrong password for user2 + { 'path': '', 'status': 401, 'user': user2, 'pw': user2_badpass}, + { 'path': '/', 'status': 401, 'user': user2, 'pw': user2_badpass}, + { 'path': '/repos', 'status': 401, 'user': user2, 'pw': user2_badpass}, + { 'path': '/repos/', 'status': 401, 'user': user2, 'pw': user2_badpass}, + { 'path': '/repos/A', 'status': 401, 'user': user2, 'pw': user2_badpass}, + { 'path': '/repos/A/', 'status': 401, 'user': user2, 'pw': user2_badpass}, + { 'path': '/repos/A/D', 'status': 401, 'user': user2, 'pw': user2_badpass}, + { 'path': '/repos/A/D/', 'status': 401, 'user': user2, 'pw': user2_badpass}, + { 'path': '/repos/A/D/gamma', 'status': 401, 'user': user2, 'pw': user2_badpass}, + { 'path': '/repos/A/D/H', 'status': 401, 'user': user2, 'pw': user2_badpass}, + { 'path': '/repos/A/D/H/', 'status': 401, 'user': user2, 'pw': user2_badpass}, + { 'path': '/repos/A/D/H/chi', 'status': 401, 'user': user2, 'pw': user2_badpass}, + # anon is allowed with the ALLOW header + { 'path': '', 'status': 301, 'headers': allow_header }, + { 'path': '/', 'status': 200, 'headers': allow_header }, + { 'path': '/repos', 'status': 301, 'headers': allow_header }, + { 'path': '/repos/', 'status': 200, 'headers': allow_header }, + { 'path': '/repos/A', 'status': 301, 'headers': allow_header }, + { 'path': '/repos/A/', 'status': 200, 'headers': allow_header }, + { 'path': '/repos/A/D', 'status': 301, 'headers': allow_header }, + { 'path': '/repos/A/D/', 'status': 200, 'body': ls_of_D_no_H, 'headers': allow_header }, + { 'path': '/repos/A/D/gamma', 'status': 200, 'headers': allow_header }, + # these 3 tests return 403 instead of 401 becasue the config allows + # the anon user with the ALLOW header without any auth and the old hook + # system has no way of knowing it should return 401 since authentication is + # configured and can change the behavior. It could decide to return 401 just on + # the basis of authentication being configured but then that leaks info in other + # cases so it's better for this case to be "broken". + { 'path': '/repos/A/D/H', 'status': 403, 'headers': allow_header }, + { 'path': '/repos/A/D/H/', 'status': 403, 'headers': allow_header }, + { 'path': '/repos/A/D/H/chi', 'status': 403, 'headers': allow_header }, + # auth is configured and user1 is allowed access repo including H + { 'path': '', 'status': 301, 'user': user1, 'pw': user1_pass, 'headers': allow_header }, + { 'path': '/', 'status': 200, 'user': user1, 'pw': user1_pass, 'headers': allow_header }, + { 'path': '/repos', 'status': 301, 'user': user1, 'pw': user1_pass, 'headers': allow_header }, + { 'path': '/repos/', 'status': 200, 'user': user1, 'pw': user1_pass, 'headers': allow_header }, + { 'path': '/repos/A', 'status': 301, 'user': user1, 'pw': user1_pass, 'headers': allow_header }, + { 'path': '/repos/A/', 'status': 200, 'user': user1, 'pw': user1_pass, 'headers': allow_header }, + { 'path': '/repos/A/D', 'status': 301, 'user': user1, 'pw': user1_pass, 'headers': allow_header }, + { 'path': '/repos/A/D/', 'status': 200, 'body': ls_of_D_H, + 'user': user1, 'pw': user1_pass, 'headers': allow_header }, + { 'path': '/repos/A/D/gamma', 'status': 200, 'user': user1, 'pw': user1_pass, 'headers': allow_header }, + { 'path': '/repos/A/D/H', 'status': 301, 'user': user1, 'pw': user1_pass, 'headers': allow_header }, + { 'path': '/repos/A/D/H/', 'status': 200, 'body': ls_of_H, 'user': user1, 'pw': user1_pass, 'headers': allow_header }, + { 'path': '/repos/A/D/H/chi', 'status': 200, 'user': user1, 'pw': user1_pass, 'headers': allow_header }, + # try with the wrong password for user1 + { 'path': '', 'status': 401, 'user': user1, 'pw': user1_badpass, 'headers': allow_header }, + { 'path': '/', 'status': 401, 'user': user1, 'pw': user1_badpass, 'headers': allow_header }, + { 'path': '/repos', 'status': 401, 'user': user1, 'pw': user1_badpass, 'headers': allow_header }, + { 'path': '/repos/', 'status': 401, 'user': user1, 'pw': user1_badpass, 'headers': allow_header }, + { 'path': '/repos/A', 'status': 401, 'user': user1, 'pw': user1_badpass, 'headers': allow_header }, + { 'path': '/repos/A/', 'status': 401, 'user': user1, 'pw': user1_badpass, 'headers': allow_header }, + { 'path': '/repos/A/D', 'status': 401, 'user': user1, 'pw': user1_badpass, 'headers': allow_header }, + { 'path': '/repos/A/D/', 'status': 401, 'user': user1, 'pw': user1_badpass, 'headers': allow_header }, + { 'path': '/repos/A/D/gamma', 'status': 401, 'user': user1, 'pw': user1_badpass, 'headers': allow_header }, + { 'path': '/repos/A/D/H', 'status': 401, 'user': user1, 'pw': user1_badpass, 'headers': allow_header }, + { 'path': '/repos/A/D/H/', 'status': 401, 'user': user1, 'pw': user1_badpass, 'headers': allow_header }, + { 'path': '/repos/A/D/H/chi', 'status': 401, 'user': user1, 'pw': user1_badpass, 'headers': allow_header }, + # auth is configured and user2 is not allowed access to H + { 'path': '', 'status': 301, 'user': user2, 'pw': user2_pass, 'headers': allow_header }, + { 'path': '/', 'status': 200, 'user': user2, 'pw': user2_pass, 'headers': allow_header }, + { 'path': '/repos', 'status': 301, 'user': user2, 'pw': user2_pass, 'headers': allow_header }, + { 'path': '/repos/', 'status': 200, 'user': user2, 'pw': user2_pass, 'headers': allow_header }, + { 'path': '/repos/A', 'status': 301, 'user': user2, 'pw': user2_pass, 'headers': allow_header }, + { 'path': '/repos/A/', 'status': 200, 'user': user2, 'pw': user2_pass, 'headers': allow_header }, + { 'path': '/repos/A/D', 'status': 301, 'user': user2, 'pw': user2_pass, 'headers': allow_header }, + { 'path': '/repos/A/D/', 'status': 200, 'body': ls_of_D_no_H, + 'user': user2, 'pw': user2_pass, 'headers': allow_header }, + { 'path': '/repos/A/D/gamma', 'status': 200, 'user': user2, 'pw': user2_pass, 'headers': allow_header }, + { 'path': '/repos/A/D/H', 'status': 403, 'user': user2, 'pw': user2_pass, 'headers': allow_header }, + { 'path': '/repos/A/D/H/', 'status': 403, 'user': user2, 'pw': user2_pass, 'headers': allow_header }, + { 'path': '/repos/A/D/H/chi', 'status': 403, 'user': user2, 'pw': user2_pass, 'headers': allow_header }, + # try with the wrong password for user2 + { 'path': '', 'status': 401, 'user': user2, 'pw': user2_badpass, 'headers': allow_header }, + { 'path': '/', 'status': 401, 'user': user2, 'pw': user2_badpass, 'headers': allow_header }, + { 'path': '/repos', 'status': 401, 'user': user2, 'pw': user2_badpass, 'headers': allow_header }, + { 'path': '/repos/', 'status': 401, 'user': user2, 'pw': user2_badpass, 'headers': allow_header }, + { 'path': '/repos/A', 'status': 401, 'user': user2, 'pw': user2_badpass, 'headers': allow_header }, + { 'path': '/repos/A/', 'status': 401, 'user': user2, 'pw': user2_badpass, 'headers': allow_header }, + { 'path': '/repos/A/D', 'status': 401, 'user': user2, 'pw': user2_badpass, 'headers': allow_header }, + { 'path': '/repos/A/D/', 'status': 401, 'user': user2, 'pw': user2_badpass, 'headers': allow_header }, + { 'path': '/repos/A/D/gamma', 'status': 401, 'user': user2, 'pw': user2_badpass, 'headers': allow_header }, + { 'path': '/repos/A/D/H', 'status': 401, 'user': user2, 'pw': user2_badpass, 'headers': allow_header }, + { 'path': '/repos/A/D/H/', 'status': 401, 'user': user2, 'pw': user2_badpass, 'headers': allow_header }, + { 'path': '/repos/A/D/H/chi', 'status': 401, 'user': user2, 'pw': user2_badpass, 'headers': allow_header }, + + ) + + verify_gets(test_area_url, sallrany_tests) + +# See comments on authn_sallrany test for some background on the interaction +# of Satisfy Any and the newer Require blocks. +@SkipUnless(svntest.main.is_httpd_authz_provider_enabled) +def authn_sallrall(sbox): + "test satisfy all require all config" + sbox.build(read_only = True, create_wc = False) + + test_area_url = sbox.repo_url.replace('/svn-test-work/local_tmp/repos', + '/authz-test-work/sallrall') + + write_authz_file(sbox) + + allow_header = { 'ALLOW': '1' } + + sallrall_tests = ( + #anon access isn't allowed without ALLOW header + { 'path': '', 'status': 403, }, + { 'path': '/', 'status': 403, }, + { 'path': '/repos', 'status': 403, }, + { 'path': '/repos/', 'status': 403, }, + { 'path': '/repos/A', 'status': 403, }, + { 'path': '/repos/A/', 'status': 403, }, + { 'path': '/repos/A/D', 'status': 403, }, + { 'path': '/repos/A/D/', 'status': 403, }, + { 'path': '/repos/A/D/gamma', 'status': 403, }, + { 'path': '/repos/A/D/H', 'status': 403, }, + { 'path': '/repos/A/D/H/', 'status': 403, }, + { 'path': '/repos/A/D/H/chi', 'status': 403, }, + # auth is configured but no access is allowed without the ALLOW header + { 'path': '', 'status': 403, 'user': user1, 'pw': user1_pass}, + { 'path': '/', 'status': 403, 'user': user1, 'pw': user1_pass}, + { 'path': '/repos', 'status': 403, 'user': user1, 'pw': user1_pass}, + { 'path': '/repos/', 'status': 403, 'user': user1, 'pw': user1_pass}, + { 'path': '/repos/A', 'status': 403, 'user': user1, 'pw': user1_pass}, + { 'path': '/repos/A/', 'status': 403, 'user': user1, 'pw': user1_pass}, + { 'path': '/repos/A/D', 'status': 403, 'user': user1, 'pw': user1_pass}, + { 'path': '/repos/A/D/', 'status': 403, 'user': user1, 'pw': user1_pass}, + { 'path': '/repos/A/D/gamma', 'status': 403, 'user': user1, 'pw': user1_pass}, + { 'path': '/repos/A/D/H', 'status': 403, 'user': user1, 'pw': user1_pass}, + { 'path': '/repos/A/D/H/', 'status': 403, 'user': user1, 'pw': user1_pass}, + { 'path': '/repos/A/D/H/chi', 'status': 403, 'user': user1, 'pw': user1_pass}, + # try with the wrong password for user1 + { 'path': '', 'status': 403, 'user': user1, 'pw': user1_badpass}, + { 'path': '/', 'status': 403, 'user': user1, 'pw': user1_badpass}, + { 'path': '/repos', 'status': 403, 'user': user1, 'pw': user1_badpass}, + { 'path': '/repos/', 'status': 403, 'user': user1, 'pw': user1_badpass}, + { 'path': '/repos/A', 'status': 403, 'user': user1, 'pw': user1_badpass}, + { 'path': '/repos/A/', 'status': 403, 'user': user1, 'pw': user1_badpass}, + { 'path': '/repos/A/D', 'status': 403, 'user': user1, 'pw': user1_badpass}, + { 'path': '/repos/A/D/', 'status': 403, 'user': user1, 'pw': user1_badpass}, + { 'path': '/repos/A/D/gamma', 'status': 403, 'user': user1, 'pw': user1_badpass}, + { 'path': '/repos/A/D/H', 'status': 403, 'user': user1, 'pw': user1_badpass}, + { 'path': '/repos/A/D/H/', 'status': 403, 'user': user1, 'pw': user1_badpass}, + { 'path': '/repos/A/D/H/chi', 'status': 403, 'user': user1, 'pw': user1_badpass}, + # auth is configured but no access is allowed without the ALLOW header + { 'path': '', 'status': 403, 'user': user2, 'pw': user2_pass}, + { 'path': '/', 'status': 403, 'user': user2, 'pw': user2_pass}, + { 'path': '/repos', 'status': 403, 'user': user2, 'pw': user2_pass}, + { 'path': '/repos/', 'status': 403, 'user': user2, 'pw': user2_pass}, + { 'path': '/repos/A', 'status': 403, 'user': user2, 'pw': user2_pass}, + { 'path': '/repos/A/', 'status': 403, 'user': user2, 'pw': user2_pass}, + { 'path': '/repos/A/D', 'status': 403, 'user': user2, 'pw': user2_pass}, + { 'path': '/repos/A/D/', 'status': 403, 'user': user2, 'pw': user2_pass}, + { 'path': '/repos/A/D/gamma', 'status': 403, 'user': user2, 'pw': user2_pass}, + { 'path': '/repos/A/D/H', 'status': 403, 'user': user2, 'pw': user2_pass}, + { 'path': '/repos/A/D/H/', 'status': 403, 'user': user2, 'pw': user2_pass}, + { 'path': '/repos/A/D/H/chi', 'status': 403, 'user': user2, 'pw': user2_pass}, + # try with the wrong password for user2 + { 'path': '', 'status': 403, 'user': user2, 'pw': user2_badpass}, + { 'path': '/', 'status': 403, 'user': user2, 'pw': user2_badpass}, + { 'path': '/repos', 'status': 403, 'user': user2, 'pw': user2_badpass}, + { 'path': '/repos/', 'status': 403, 'user': user2, 'pw': user2_badpass}, + { 'path': '/repos/A', 'status': 403, 'user': user2, 'pw': user2_badpass}, + { 'path': '/repos/A/', 'status': 403, 'user': user2, 'pw': user2_badpass}, + { 'path': '/repos/A/D', 'status': 403, 'user': user2, 'pw': user2_badpass}, + { 'path': '/repos/A/D/', 'status': 403, 'user': user2, 'pw': user2_badpass}, + { 'path': '/repos/A/D/gamma', 'status': 403, 'user': user2, 'pw': user2_badpass}, + { 'path': '/repos/A/D/H', 'status': 403, 'user': user2, 'pw': user2_badpass}, + { 'path': '/repos/A/D/H/', 'status': 403, 'user': user2, 'pw': user2_badpass}, + { 'path': '/repos/A/D/H/chi', 'status': 403, 'user': user2, 'pw': user2_badpass}, + # anon is not allowed even with ALLOW header + { 'path': '', 'status': 401, 'headers': allow_header }, + { 'path': '/', 'status': 401, 'headers': allow_header }, + { 'path': '/repos', 'status': 401, 'headers': allow_header }, + { 'path': '/repos/', 'status': 401, 'headers': allow_header }, + { 'path': '/repos/A', 'status': 401, 'headers': allow_header }, + { 'path': '/repos/A/', 'status': 401, 'headers': allow_header }, + { 'path': '/repos/A/D', 'status': 401, 'headers': allow_header }, + { 'path': '/repos/A/D/', 'status': 401, 'headers': allow_header }, + { 'path': '/repos/A/D/gamma', 'status': 401, 'headers': allow_header }, + { 'path': '/repos/A/D/H', 'status': 401, 'headers': allow_header }, + { 'path': '/repos/A/D/H/', 'status': 401, 'headers': allow_header }, + { 'path': '/repos/A/D/H/chi', 'status': 401, 'headers': allow_header }, + # auth is configured and user1 is allowed access repo including H + { 'path': '', 'status': 301, 'user': user1, 'pw': user1_pass, 'headers': allow_header }, + { 'path': '/', 'status': 200, 'user': user1, 'pw': user1_pass, 'headers': allow_header }, + { 'path': '/repos', 'status': 301, 'user': user1, 'pw': user1_pass, 'headers': allow_header }, + { 'path': '/repos/', 'status': 200, 'user': user1, 'pw': user1_pass, 'headers': allow_header }, + { 'path': '/repos/A', 'status': 301, 'user': user1, 'pw': user1_pass, 'headers': allow_header }, + { 'path': '/repos/A/', 'status': 200, 'user': user1, 'pw': user1_pass, 'headers': allow_header }, + { 'path': '/repos/A/D', 'status': 301, 'user': user1, 'pw': user1_pass, 'headers': allow_header }, + { 'path': '/repos/A/D/', 'status': 200, 'body': ls_of_D_H, + 'user': user1, 'pw': user1_pass, 'headers': allow_header }, + { 'path': '/repos/A/D/gamma', 'status': 200, 'user': user1, 'pw': user1_pass, 'headers': allow_header }, + { 'path': '/repos/A/D/H', 'status': 301, 'user': user1, 'pw': user1_pass, 'headers': allow_header }, + { 'path': '/repos/A/D/H/', 'status': 200, 'body': ls_of_H, 'user': user1, 'pw': user1_pass, 'headers': allow_header }, + { 'path': '/repos/A/D/H/chi', 'status': 200, 'user': user1, 'pw': user1_pass, 'headers': allow_header }, + # try with the wrong password for user1 + { 'path': '', 'status': 401, 'user': user1, 'pw': user1_badpass, 'headers': allow_header }, + { 'path': '/', 'status': 401, 'user': user1, 'pw': user1_badpass, 'headers': allow_header }, + { 'path': '/repos', 'status': 401, 'user': user1, 'pw': user1_badpass, 'headers': allow_header }, + { 'path': '/repos/', 'status': 401, 'user': user1, 'pw': user1_badpass, 'headers': allow_header }, + { 'path': '/repos/A', 'status': 401, 'user': user1, 'pw': user1_badpass, 'headers': allow_header }, + { 'path': '/repos/A/', 'status': 401, 'user': user1, 'pw': user1_badpass, 'headers': allow_header }, + { 'path': '/repos/A/D', 'status': 401, 'user': user1, 'pw': user1_badpass, 'headers': allow_header }, + { 'path': '/repos/A/D/', 'status': 401, 'user': user1, 'pw': user1_badpass, 'headers': allow_header }, + { 'path': '/repos/A/D/gamma', 'status': 401, 'user': user1, 'pw': user1_badpass, 'headers': allow_header }, + { 'path': '/repos/A/D/H', 'status': 401, 'user': user1, 'pw': user1_badpass, 'headers': allow_header }, + { 'path': '/repos/A/D/H/', 'status': 401, 'user': user1, 'pw': user1_badpass, 'headers': allow_header }, + { 'path': '/repos/A/D/H/chi', 'status': 401, 'user': user1, 'pw': user1_badpass, 'headers': allow_header }, + # auth is configured and user2 is not allowed access to H + { 'path': '', 'status': 301, 'user': user2, 'pw': user2_pass, 'headers': allow_header }, + { 'path': '/', 'status': 200, 'user': user2, 'pw': user2_pass, 'headers': allow_header }, + { 'path': '/repos', 'status': 301, 'user': user2, 'pw': user2_pass, 'headers': allow_header }, + { 'path': '/repos/', 'status': 200, 'user': user2, 'pw': user2_pass, 'headers': allow_header }, + { 'path': '/repos/A', 'status': 301, 'user': user2, 'pw': user2_pass, 'headers': allow_header }, + { 'path': '/repos/A/', 'status': 200, 'user': user2, 'pw': user2_pass, 'headers': allow_header }, + { 'path': '/repos/A/D', 'status': 301, 'user': user2, 'pw': user2_pass, 'headers': allow_header }, + { 'path': '/repos/A/D/', 'status': 200, 'body': ls_of_D_no_H, + 'user': user2, 'pw': user2_pass, 'headers': allow_header }, + { 'path': '/repos/A/D/gamma', 'status': 200, 'user': user2, 'pw': user2_pass, 'headers': allow_header }, + { 'path': '/repos/A/D/H', 'status': 403, 'user': user2, 'pw': user2_pass, 'headers': allow_header }, + { 'path': '/repos/A/D/H/', 'status': 403, 'user': user2, 'pw': user2_pass, 'headers': allow_header }, + { 'path': '/repos/A/D/H/chi', 'status': 403, 'user': user2, 'pw': user2_pass, 'headers': allow_header }, + # try with the wrong password for user2 + { 'path': '', 'status': 401, 'user': user2, 'pw': user2_badpass, 'headers': allow_header }, + { 'path': '/', 'status': 401, 'user': user2, 'pw': user2_badpass, 'headers': allow_header }, + { 'path': '/repos', 'status': 401, 'user': user2, 'pw': user2_badpass, 'headers': allow_header }, + { 'path': '/repos/', 'status': 401, 'user': user2, 'pw': user2_badpass, 'headers': allow_header }, + { 'path': '/repos/A', 'status': 401, 'user': user2, 'pw': user2_badpass, 'headers': allow_header }, + { 'path': '/repos/A/', 'status': 401, 'user': user2, 'pw': user2_badpass, 'headers': allow_header }, + { 'path': '/repos/A/D', 'status': 401, 'user': user2, 'pw': user2_badpass, 'headers': allow_header }, + { 'path': '/repos/A/D/', 'status': 401, 'user': user2, 'pw': user2_badpass, 'headers': allow_header }, + { 'path': '/repos/A/D/gamma', 'status': 401, 'user': user2, 'pw': user2_badpass, 'headers': allow_header }, + { 'path': '/repos/A/D/H', 'status': 401, 'user': user2, 'pw': user2_badpass, 'headers': allow_header }, + { 'path': '/repos/A/D/H/', 'status': 401, 'user': user2, 'pw': user2_badpass, 'headers': allow_header }, + { 'path': '/repos/A/D/H/chi', 'status': 401, 'user': user2, 'pw': user2_badpass, 'headers': allow_header }, + + ) + + verify_gets(test_area_url, sallrall_tests) + + +######################################################################## +# Run the tests + + +# list all tests here, starting with None: +test_list = [ None, + anon, + mixed, + mixed_noauthwhenanon, + authn, + authn_anonoff, + authn_lcuser, + authn_group, + authn_sallrany, + authn_sallrall, + ] +serial_only = True + +if __name__ == '__main__': + svntest.main.run_tests(test_list) + # NOTREACHED + + +### End of file. diff --git a/subversion/tests/cmdline/move_tests.py b/subversion/tests/cmdline/move_tests.py index 4495b59..b9f921c 100755 --- a/subversion/tests/cmdline/move_tests.py +++ b/subversion/tests/cmdline/move_tests.py @@ -152,7 +152,7 @@ def move_file_test(sbox, source, dest, move_func, test): # update to start_rev svntest.actions.run_and_verify_update(wc_dir, test['start_output'], test['start_disk'], test['start_status'], - None, None, None, None, None, False, + [], False, '-r', test['start_rev'], wc_dir) # execute the move move_func(test['start_rev']) @@ -162,7 +162,7 @@ def move_file_test(sbox, source, dest, move_func, test): # properties. svntest.actions.run_and_verify_update(wc_dir, test['up_output'], test['up_disk'], test['up_status'], - None, None, None, None, None, True, + [], True, '-r', test['end_rev'], wc_dir) revert_paths = None @@ -182,8 +182,7 @@ def move_file_test(sbox, source, dest, move_func, test): resolve['disk'] = None if 'revert_paths' in resolve: revert_paths = resolve['revert_paths'] - svntest.actions.run_and_verify_svn('Resolve modification to source of move', - resolve['output'], resolve['error'], + svntest.actions.run_and_verify_svn(resolve['output'], resolve['error'], 'resolve', '--accept', resolve_accept, '-R', wc_dir) @@ -246,11 +245,6 @@ def build_simple_file_move_tests(sbox, source, dest): copied='+', wc_rev='-')}) mc['disk'] = test['up_disk'].copy() mc['disk'].tweak(dest, contents="This is the file 'lambda'.\nmodified\n") - # theirs-conflict doesn't work - tc = {} - tc['error'] = svntest.verify.RegexOutput(".*: .*: W155027:.*", match_all=False) - tc['status'] = test['up_status'] - tc['disk'] = test['up_disk'] # working breaks the move working = {} working['output'] = svntest.verify.ExpectedOutput( @@ -263,7 +257,7 @@ def build_simple_file_move_tests(sbox, source, dest): working['status'].tweak(source, status='D ') working['status'].add({dest: Item(status='A ', copied='+', wc_rev='-')}) working['disk'] = test['up_disk'] - test['resolves'] = {'mine-conflict': mc, 'theirs-conflict': tc, + test['resolves'] = {'mine-conflict': mc, 'working': working} test['revert_paths'] = [source_path, dest_path] tests.append(test) @@ -286,15 +280,11 @@ def build_simple_file_move_tests(sbox, source, dest): test['up_status'] = svntest.actions.get_virginal_state(wc_dir, test['end_rev']) test['up_status'].tweak(source, status='! ', treeconflict='C', wc_rev=None) test['up_status'].add({dest: Item(status='A ', copied='+', wc_rev='-')}) - # mine-conflict and theirs-conflict don't work. + # mine-conflict doen't work. mc = {} mc['error'] = svntest.verify.RegexOutput(".*: .*: W155027:.*", match_all=False) mc['status'] = test['up_status'] mc['disk'] = test['up_disk'] - tc = {} - tc['error'] = svntest.verify.RegexOutput(".*: .*: W155027:.*", match_all=False) - tc['status'] = test['up_status'] - tc['disk'] = test['up_disk'] working = {} # XXX: Doesn't say it broke the move it should. working['output'] = svntest.verify.ExpectedOutput( @@ -306,7 +296,7 @@ def build_simple_file_move_tests(sbox, source, dest): working['status'].remove(source) working['disk'] = test['up_disk'] working['revert_paths'] = [dest_path] - test['resolves'] = {'mine-conflict': mc, 'theirs-conflict': tc, + test['resolves'] = {'mine-conflict': mc, 'working': working} test['revert_paths'] = [dest_path, source_path] tests.append(test) @@ -331,15 +321,11 @@ def build_simple_file_move_tests(sbox, source, dest): # XXX: Is entry_status=' ' really right here? test['up_status'].tweak(source, status='! ', treeconflict='C', entry_status=' ') test['up_status'].add({dest: Item(status='A ', copied='+', wc_rev='-')}) - # mine-conflict and theirs-conflict don't work. + # mine-conflict doesn't work. mc = {} mc['error'] = svntest.verify.RegexOutput(".*: .*: W155027:.*", match_all=False) mc['status'] = test['up_status'] mc['disk'] = test['up_disk'] - tc = {} - tc['error'] = svntest.verify.RegexOutput(".*: .*: W155027:.*", match_all=False) - tc['status'] = test['up_status'] - tc['disk'] = test['up_disk'] working = {} # XXX: Broke the move but doesn't notify that it does. working['output'] = svntest.verify.ExpectedOutput( @@ -350,7 +336,7 @@ def build_simple_file_move_tests(sbox, source, dest): working['status'].tweak(source, status='! ') working['status'].add({dest: Item(status='A ', copied='+', wc_rev='-')}) working['disk'] = test['up_disk'] - test['resolves'] = {'mine-conflict': mc, 'theirs-conflict': tc, + test['resolves'] = {'mine-conflict': mc, 'working': working} test['revert_paths'] = [source_path, dest_path] tests.append(test) @@ -374,15 +360,11 @@ def build_simple_file_move_tests(sbox, source, dest): test['up_status'].tweak(source, status='D ', moved_to=dest) test['up_status'].add({dest: Item(status='R ', copied='+', treeconflict='C', wc_rev='-', moved_from=source)}) - # mine-conflict and theirs-conflict don't work. + # mine-conflict doesn't work. mc = {} mc['error'] = svntest.verify.RegexOutput(".*: .*: W155027:.*", match_all=False) mc['status'] = test['up_status'] mc['disk'] = test['up_disk'] - tc = {} - tc['error'] = svntest.verify.RegexOutput(".*: .*: W155027:.*", match_all=False) - tc['status'] = test['up_status'] - tc['disk'] = test['up_disk'] working = {} # XXX: Doesn't say what it did. working['output'] = svntest.verify.ExpectedOutput( @@ -394,7 +376,7 @@ def build_simple_file_move_tests(sbox, source, dest): working['status'].add({dest: Item(status='R ', moved_from=source, copied='+', wc_rev='-')}) working['disk'] = test['up_disk'] - test['resolves'] = {'mine-conflict': mc, 'theirs-conflict': tc, + test['resolves'] = {'mine-conflict': mc, 'working': working} test['revert_paths'] = [source_path, dest_path] tests.append(test) @@ -419,15 +401,11 @@ def build_simple_file_move_tests(sbox, source, dest): test['up_status'].tweak(source, status='D ', moved_to=dest) test['up_status'].add({dest: Item(status='R ', copied='+', treeconflict='C', wc_rev='-', moved_from=source)}) - # mine-conflict and theirs-conflict don't work. + # mine-conflict doesn't work. mc = {} mc['error'] = svntest.verify.RegexOutput(".*: .*: W155027:.*", match_all=False) mc['status'] = test['up_status'] mc['disk'] = test['up_disk'] - tc = {} - tc['error'] = svntest.verify.RegexOutput(".*: .*: W155027:.*", match_all=False) - tc['status'] = test['up_status'] - tc['disk'] = test['up_disk'] working = {} working['accept'] = 'working' # XXX: Doesn't say what it did. @@ -440,7 +418,7 @@ def build_simple_file_move_tests(sbox, source, dest): working['status'].add({dest: Item(status='R ', moved_from=source, copied='+', wc_rev='-')}) working['disk'] = test['up_disk'] - test['resolves'] = {'mine-conflict': mc, 'theirs-conflict': tc, + test['resolves'] = {'mine-conflict': mc, 'working': working} test['revert_paths'] = [source_path, dest_path] tests.append(test) @@ -488,15 +466,11 @@ def build_simple_file_move_tests(sbox, source, dest): test['up_status'].tweak(source, status='D ', moved_to=dest) test['up_status'].add({dest: Item(status='R ', copied='+', treeconflict='C', wc_rev='-', moved_from=source)}) - # mine-conflict and theirs-conflict don't work. + # mine-conflict doesn't work. mc = {} mc['error'] = svntest.verify.RegexOutput(".*: .*: W155027:.*", match_all=False) mc['status'] = test['up_status'] mc['disk'] = test['up_disk'] - tc = {} - tc['error'] = svntest.verify.RegexOutput(".*: .*: W155027:.*", match_all=False) - tc['status'] = test['up_status'] - tc['disk'] = test['up_disk'] working = {} # XXX: Doesn't say what it did. working['output'] = svntest.verify.ExpectedOutput( @@ -508,7 +482,7 @@ def build_simple_file_move_tests(sbox, source, dest): working['status'].add({dest: Item(status='R ', moved_from=source, copied='+', wc_rev='-')}) working['disk'] = test['up_disk'] - test['resolves'] = {'mine-conflict': mc, 'theirs-conflict': tc, + test['resolves'] = {'mine-conflict': mc, 'working': working} test['revert_paths'] = [source_path, dest_path] tests.append(test) @@ -532,15 +506,11 @@ def build_simple_file_move_tests(sbox, source, dest): test['up_status'].tweak(source, status='D ', moved_to=dest) test['up_status'].add({dest: Item(status='R ', copied='+', treeconflict='C', wc_rev='-', moved_from=source)}) - # mine-conflict and theirs-conflict don't work. + # mine-conflict doesn't work. mc = {} mc['error'] = svntest.verify.RegexOutput(".*: .*: W155027:.*", match_all=False) mc['status'] = test['up_status'] mc['disk'] = test['up_disk'] - tc = {} - tc['error'] = svntest.verify.RegexOutput(".*: .*: W155027:.*", match_all=False) - tc['status'] = test['up_status'] - tc['disk'] = test['up_disk'] working = {} # XXX: Doesn't say what it did. working['output'] = svntest.verify.ExpectedOutput( @@ -552,7 +522,7 @@ def build_simple_file_move_tests(sbox, source, dest): working['status'].add({dest: Item(status='R ', moved_from=source, copied='+', wc_rev='-')}) working['disk'] = test['up_disk'] - test['resolves'] = {'mine-conflict': mc, 'theirs-conflict': tc, + test['resolves'] = {'mine-conflict': mc, 'working': working} test['revert_paths'] = [source_path, dest_path] tests.append(test) @@ -576,15 +546,11 @@ def build_simple_file_move_tests(sbox, source, dest): test['up_status'].tweak(source, status='D ', moved_to=dest) test['up_status'].add({dest: Item(status='R ', copied='+', treeconflict='C', wc_rev='-', moved_from=source)}) - # mine-conflict and theirs-conflict don't work. + # mine-conflict doesn't work. mc = {} mc['error'] = svntest.verify.RegexOutput(".*: .*: W155027:.*", match_all=False) mc['status'] = test['up_status'] mc['disk'] = test['up_disk'] - tc = {} - tc['error'] = svntest.verify.RegexOutput(".*: .*: W155027:.*", match_all=False) - tc['status'] = test['up_status'] - tc['disk'] = test['up_disk'] working = {} # XXX: Didn't tell us what it did. working['output'] = svntest.verify.ExpectedOutput( @@ -596,7 +562,7 @@ def build_simple_file_move_tests(sbox, source, dest): working['status'].add({dest: Item(status='R ', moved_from=source, copied='+', wc_rev='-')}) working['disk'] = test['up_disk'] - test['resolves'] = {'mine-conflict': mc, 'theirs-conflict': tc, + test['resolves'] = {'mine-conflict': mc, 'working': working} test['revert_paths'] = [source_path, dest_path] tests.append(test) @@ -620,15 +586,11 @@ def build_simple_file_move_tests(sbox, source, dest): test['up_status'].tweak(source, status='D ', moved_to=dest) test['up_status'].add({dest: Item(status='R ', copied='+', treeconflict='C', wc_rev='-', moved_from=source)}) - # mine-conflict and theirs-conflict don't work. + # mine-conflict doesn't work. mc = {} mc['error'] = svntest.verify.RegexOutput(".*: .*: W155027:.*", match_all=False) mc['status'] = test['up_status'] mc['disk'] = test['up_disk'] - tc = {} - tc['error'] = svntest.verify.RegexOutput(".*: .*: W155027:.*", match_all=False) - tc['status'] = test['up_status'] - tc['disk'] = test['up_disk'] working = {} # XXX: Doesn't tell you what it did. working['output'] = svntest.verify.ExpectedOutput( @@ -640,7 +602,7 @@ def build_simple_file_move_tests(sbox, source, dest): working['status'].add({dest: Item(status='R ', moved_from=source, copied='+', wc_rev='-')}) working['disk'] = test['up_disk'] - test['resolves'] = {'mine-conflict': mc, 'theirs-conflict': tc, + test['resolves'] = {'mine-conflict': mc, 'working': working} test['revert_paths'] = [source_path, dest_path] tests.append(test) @@ -664,15 +626,11 @@ def build_simple_file_move_tests(sbox, source, dest): test['up_status'].tweak(source, status='D ', moved_to=dest) test['up_status'].add({dest: Item(status='R ', copied='+', treeconflict='C', wc_rev='-', moved_from=source)}) - # mine-conflict and theirs-conflict don't work. + # mine-conflict doesn't work. mc = {} mc['error'] = svntest.verify.RegexOutput(".*: .*: W155027:.*", match_all=False) mc['status'] = test['up_status'] mc['disk'] = test['up_disk'] - tc = {} - tc['error'] = svntest.verify.RegexOutput(".*: .*: W155027:.*", match_all=False) - tc['status'] = test['up_status'] - tc['disk'] = test['up_disk'] working = {} # XXX: Doesn't tell you what it did. working['output'] = svntest.verify.ExpectedOutput( @@ -684,7 +642,7 @@ def build_simple_file_move_tests(sbox, source, dest): working['status'].add({dest: Item(status='R ', moved_from=source, copied='+', wc_rev='-')}) working['disk'] = test['up_disk'] - test['resolves'] = {'mine-conflict': mc, 'theirs-conflict': tc, + test['resolves'] = {'mine-conflict': mc, 'working': working} test['revert_paths'] = [source_path, dest_path] tests.append(test) @@ -720,11 +678,6 @@ def build_simple_file_move_tests(sbox, source, dest): copied='+', wc_rev='-')}) mc['disk'] = test['up_disk'].copy() mc['disk'].tweak(dest, props={u'foo': u'bar'}) - # theirs-conflict doesn't work - tc = {} - tc['error'] = svntest.verify.RegexOutput(".*: .*: W155027:.*", match_all=False) - tc['status'] = test['up_status'] - tc['disk'] = test['up_disk'] working = {} working['output'] = svntest.verify.ExpectedOutput( [ @@ -737,7 +690,7 @@ def build_simple_file_move_tests(sbox, source, dest): working['status'].tweak(source, status='D ') working['status'].add({dest: Item(status='A ', copied='+', wc_rev='-')}) working['disk'] = test['up_disk'] - test['resolves'] = {'mine-conflict': mc, 'theirs-conflict': tc, + test['resolves'] = {'mine-conflict': mc, 'working': working} test['revert_paths'] = [source_path, dest_path] tests.append(test) @@ -773,11 +726,6 @@ def build_simple_file_move_tests(sbox, source, dest): copied='+', wc_rev='-')}) mc['disk'] = test['up_disk'].copy() mc['disk'].tweak(dest, props={u'foo': u'baz'}) - # theirs-conflict doesn't work - tc = {} - tc['error'] = svntest.verify.RegexOutput(".*: .*: W155027:.*", match_all=False) - tc['status'] = test['up_status'] - tc['disk'] = test['up_disk'] working = {} working['output'] = svntest.verify.ExpectedOutput( [ @@ -790,7 +738,7 @@ def build_simple_file_move_tests(sbox, source, dest): working['status'].tweak(source, status='D ') working['status'].add({dest: Item(status='A ', copied='+', wc_rev='-')}) working['disk'] = test['up_disk'] - test['resolves'] = {'mine-conflict': mc, 'theirs-conflict': tc, + test['resolves'] = {'mine-conflict': mc, 'working': working} test['revert_paths'] = [source_path, dest_path] tests.append(test) @@ -826,11 +774,6 @@ def build_simple_file_move_tests(sbox, source, dest): copied='+', wc_rev='-')}) mc['disk'] = test['up_disk'].copy() mc['disk'].tweak(dest, props={}) - # theirs-conflict doesn't work - tc = {} - tc['error'] = svntest.verify.RegexOutput(".*: .*: W155027:.*", match_all=False) - tc['status'] = test['up_status'] - tc['disk'] = test['up_disk'] working = {} working['output'] = svntest.verify.ExpectedOutput( [ @@ -843,7 +786,7 @@ def build_simple_file_move_tests(sbox, source, dest): working['status'].tweak(source, status='D ') working['status'].add({dest: Item(status='A ', copied='+', wc_rev='-')}) working['disk'] = test['up_disk'] - test['resolves'] = {'mine-conflict': mc, 'theirs-conflict': tc, + test['resolves'] = {'mine-conflict': mc, 'working': working} test['revert_paths'] = [source_path, dest_path] tests.append(test) @@ -870,15 +813,11 @@ def build_simple_file_move_tests(sbox, source, dest): test['up_status'].tweak(source, status='! ', treeconflict='C', wc_rev=None) test['up_status'].add({dest: Item(status='R ', copied='+', wc_rev='-', treeconflict='C')}) - # mine-conflict and theirs-conflict don't work. + # mine-conflict doesn't work. mc = {} mc['error'] = svntest.verify.RegexOutput(".*: .*: W155027:.*", match_all=False) mc['status'] = test['up_status'] mc['disk'] = test['up_disk'] - tc = {} - tc['error'] = svntest.verify.RegexOutput(".*: .*: W155027:.*", match_all=False) - tc['status'] = test['up_status'] - tc['disk'] = test['up_disk'] working = {} working['output'] = svntest.verify.ExpectedOutput( "Resolved conflicted state of '%s'\n" % source_path, match_all=False @@ -889,7 +828,7 @@ def build_simple_file_move_tests(sbox, source, dest): working['status'].remove(source) working['disk'] = test['up_disk'] working['revert_paths'] = [dest_path] - test['resolves'] = {'mine-conflict': mc, 'theirs-conflict': tc, + test['resolves'] = {'mine-conflict': mc, 'working': working} test['revert_paths'] = [dest_path, source_path] tests.append(test) @@ -904,7 +843,7 @@ def build_simple_file_move_func(sbox, source, dest): # Setup the move function def move_func(rev): # execute the move - svntest.actions.run_and_verify_svn(None, None, [], "move", + svntest.actions.run_and_verify_svn(None, [], "move", source_path, dest_path) if move_func.extra_mv_tests: mv_status = svntest.actions.get_virginal_state(wc_dir, rev) @@ -914,13 +853,13 @@ def build_simple_file_move_func(sbox, source, dest): mv_info_src = [ { 'Path' : re.escape(source_path), - 'Moved To' : re.escape(dest), + 'Moved To' : re.escape(sbox.ospath(dest)), } ] mv_info_dst = [ { 'Path' : re.escape(dest_path), - 'Moved From' : re.escape(source), + 'Moved From' : re.escape(sbox.ospath(source)), } ] @@ -1098,7 +1037,7 @@ def property_merge(sbox): svntest.actions.run_and_verify_status(wc_dir, expected_status) sbox.simple_update() - svntest.actions.run_and_verify_svn("resolve failed", None, [], + svntest.actions.run_and_verify_svn(None, [], 'resolve', '--accept=mine-conflict', sbox.ospath('A/C')) @@ -1140,29 +1079,33 @@ def property_merge(sbox): """Trying to add new property 'key1' but the property already exists. <<<<<<< (local property value) -value2======= -value3>>>>>>> (incoming property value) +value2||||||| (incoming 'changed from' value) +======= +value3>>>>>>> (incoming 'changed to' value) """), 'A/C2/D5/dir_conflicts.prej' : Item(contents= """Trying to change property 'key1' but the property has already been locally changed to a different value. <<<<<<< (local property value) -value2======= -value3>>>>>>> (incoming property value) +value2||||||| (incoming 'changed from' value) +value1======= +value3>>>>>>> (incoming 'changed to' value) """), 'A/C2/f4.prej' : Item(contents= """Trying to add new property 'key1' but the property already exists. <<<<<<< (local property value) -value2======= -value3>>>>>>> (incoming property value) +value2||||||| (incoming 'changed from' value) +======= +value3>>>>>>> (incoming 'changed to' value) """), 'A/C2/f5.prej' : Item(contents= """Trying to change property 'key1' but the property has already been locally changed to a different value. <<<<<<< (local property value) -value2======= -value3>>>>>>> (incoming property value) +value2||||||| (incoming 'changed from' value) +value1======= +value3>>>>>>> (incoming 'changed to' value) """), }) @@ -1205,7 +1148,7 @@ def move_missing(sbox): # This move currently fails halfway between adding the dest and # deleting the source - svntest.actions.run_and_verify_svn(None, None, expected_err, + svntest.actions.run_and_verify_svn(None, expected_err, 'mv', sbox.ospath('A/D/G'), sbox.ospath('R')) @@ -1213,27 +1156,119 @@ def move_missing(sbox): expected_status.tweak('A/D/G', 'A/D/G/tau', 'A/D/G/pi', 'A/D/G/rho', status='! ', entry_status=' ') - expected_status.add({ - 'R' : Item(status='! ', wc_rev='-', - entry_status='A ', entry_copied='+'), - 'R/pi' : Item(status='! ', wc_rev='-', - entry_status=' ', entry_copied='+'), - 'R/tau' : Item(status='! ', wc_rev='-', - entry_status=' ', entry_copied='+'), - 'R/rho' : Item(status='! ', wc_rev='-', - entry_status=' ', entry_copied='+'), - }) - # Verify that the status processing doesn't crash svntest.actions.run_and_verify_status(wc_dir, expected_status) # The issue is a crash when the destination is present os.mkdir(sbox.ospath('R')) - expected_status.tweak('R', status='A ', copied='+') svntest.actions.run_and_verify_status(wc_dir, expected_status) +def nested_replaces(sbox): + "nested replaces" + + sbox.build(create_wc=False, empty=True) + repo_url = sbox.repo_url + wc_dir = sbox.wc_dir + ospath = sbox.ospath + + ## r1: setup + svntest.actions.run_and_verify_svnmucc(None, [], + '-U', repo_url, + '-m', 'r1: create tree', + 'mkdir', 'A', 'mkdir', 'A/B', 'mkdir', 'A/B/C', + 'mkdir', 'X', 'mkdir', 'X/Y', 'mkdir', 'X/Y/Z', + # sentinel files + 'put', os.devnull, 'A/a', + 'put', os.devnull, 'A/B/b', + 'put', os.devnull, 'A/B/C/c', + 'put', os.devnull, 'X/x', + 'put', os.devnull, 'X/Y/y', + 'put', os.devnull, 'X/Y/Z/z') + + svntest.main.run_svn(None, 'checkout', '-q', repo_url, wc_dir) + r1_status = svntest.wc.State(wc_dir, { + '' : Item(status=' ', wc_rev='1'), + 'A' : Item(status=' ', wc_rev='1'), + 'A/B' : Item(status=' ', wc_rev='1'), + 'A/B/C' : Item(status=' ', wc_rev='1'), + 'X' : Item(status=' ', wc_rev='1'), + 'X/Y' : Item(status=' ', wc_rev='1'), + 'X/Y/Z' : Item(status=' ', wc_rev='1'), + 'A/a' : Item(status=' ', wc_rev='1'), + 'A/B/b' : Item(status=' ', wc_rev='1'), + 'A/B/C/c' : Item(status=' ', wc_rev='1'), + 'X/x' : Item(status=' ', wc_rev='1'), + 'X/Y/y' : Item(status=' ', wc_rev='1'), + 'X/Y/Z/z' : Item(status=' ', wc_rev='1'), + }) + svntest.actions.run_and_verify_status(wc_dir, r1_status) + + ## r2: juggling + moves = [ + ('A', 'A2'), + ('X', 'X2'), + ('A2/B/C', 'X'), + ('X2/Y/Z', 'A'), + ('A2/B', 'A/B'), + ('X2/Y', 'X/Y'), + ('A2', 'X/Y/Z'), + ('X2', 'A/B/C'), + ] + for src, dst in moves: + svntest.main.run_svn(None, 'mv', ospath(src), ospath(dst)) + r2_status = svntest.wc.State(wc_dir, { + '' : Item(status=' ', wc_rev='1'), + 'A' : Item(status='R ', copied='+', moved_from='X/Y/Z', moved_to='X/Y/Z', wc_rev='-'), + 'A/B' : Item(status='A ', copied='+', moved_from='X/Y/Z/B', wc_rev='-', entry_status='R '), + 'A/B/C' : Item(status='R ', copied='+', moved_from='X', moved_to='X', wc_rev='-'), + 'A/B/C/Y' : Item(status='D ', copied='+', wc_rev='-', moved_to='X/Y'), + 'A/B/C/Y/y' : Item(status='D ', copied='+', wc_rev='-'), + 'A/B/C/Y/Z' : Item(status='D ', copied='+', wc_rev='-'), + 'A/B/C/Y/Z/z':Item(status='D ', copied='+', wc_rev='-'), + 'X' : Item(status='R ', copied='+', moved_from='A/B/C', moved_to='A/B/C', wc_rev='-'), + 'X/Y' : Item(status='A ', copied='+', moved_from='A/B/C/Y', wc_rev='-', entry_status='R '), + 'X/Y/Z' : Item(status='R ', copied='+', moved_from='A', moved_to='A', wc_rev='-'), + 'X/Y/Z/B' : Item(status='D ', copied='+', wc_rev='-', moved_to='A/B'), + 'X/Y/Z/B/b' : Item(status='D ', copied='+', wc_rev='-'), + 'X/Y/Z/B/C' : Item(status='D ', copied='+', wc_rev='-'), + 'X/Y/Z/B/C/c':Item(status='D ', copied='+', wc_rev='-'), + 'A/a' : Item(status='D ', wc_rev='1'), + 'A/B/b' : Item(status='D ', wc_rev='1'), + 'A/B/C/c' : Item(status='D ', copied='+', wc_rev='-'), + 'X/x' : Item(status='D ', wc_rev='1'), + 'X/Y/y' : Item(status='D ', wc_rev='1'), + 'X/Y/Z/z' : Item(status='D ', copied='+', wc_rev='-'), + 'X/c' : Item(status=' ', copied='+', wc_rev='-'), + 'A/z' : Item(status=' ', copied='+', wc_rev='-'), + 'A/B/b' : Item(status=' ', copied='+', wc_rev='-'), + 'X/Y/y' : Item(status=' ', copied='+', wc_rev='-'), + 'X/Y/Z/a' : Item(status=' ', copied='+', wc_rev='-'), + 'A/B/C/x' : Item(status=' ', copied='+', wc_rev='-'), + }) + svntest.actions.run_and_verify_status(wc_dir, r2_status) + + svntest.main.run_svn(None, 'commit', '-m', 'r2: juggle the tree', wc_dir) + expected_output = svntest.verify.UnorderedRegexListOutput(map(re.escape, [ + ' R /A (from /X/Y/Z:1)', + ' A /A/B (from /A/B:1)', + ' R /A/B/C (from /X:1)', + ' R /X (from /A/B/C:1)', + ' A /X/Y (from /X/Y:1)', + ' R /X/Y/Z (from /A:1)', + ' D /X/Y/Z/B', + ' D /A/B/C/Y', + ]) + [ + '^-', '^r2', '^-', '^Changed paths:', + ]) + svntest.actions.run_and_verify_svn(expected_output, [], + 'log', '-qvr2', repo_url) + + ## Test updating to r1. + svntest.main.run_svn(None, 'update', '-r1', wc_dir) + svntest.actions.run_and_verify_status(wc_dir, r1_status) + def setup_move_many(sbox): "helper function which creates a wc with node A/A/A which is moved 3 times" @@ -1309,7 +1344,7 @@ def move_many_update_delete(sbox): svntest.actions.run_and_verify_status(wc_dir, expected_status) # And now create a tree conflict - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'rm', sbox.repo_url + '/B', '-m', '') @@ -1326,10 +1361,9 @@ def move_many_update_delete(sbox): svntest.actions.run_and_verify_update(wc_dir, expected_output, None, expected_status) - # Would be nice if we could run the resolver as a separate step, + # Would be nice if we could run the resolver as a separate step, # but 'svn resolve' just fails for any value but working -@XFail() def move_many_update_add(sbox): "move many and add-on-update" @@ -1343,7 +1377,7 @@ def move_many_update_add(sbox): #svntest.actions.run_and_verify_status(wc_dir, expected_status) # And now create a tree conflict - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'mkdir', sbox.repo_url + '/B/A/A/BB', '-m', '') @@ -1353,7 +1387,7 @@ def move_many_update_add(sbox): 'B/A/A' : Item(status=' ', treeconflict='U'), 'B/A/A/BB' : Item(status=' ', treeconflict='A'), # And while resolving - 'A/A/' : Item(status=' ', treeconflict='C') + 'A/A' : Item(status=' ', treeconflict='C') }) expected_status.tweak('', @@ -1369,21 +1403,28 @@ def move_many_update_add(sbox): svntest.actions.run_and_verify_update(wc_dir, expected_output, None, expected_status, - None, None, None, - None, None, None, + [], False, wc_dir, '--accept', 'mine-conflict') # And another one - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'mkdir', sbox.repo_url + '/C/A/A/BB', '-m', '') expected_status.tweak('', 'B', 'B/A', 'B/A/A', 'B/A/A/A', 'C', 'C/A', 'C/A/A', 'C/A/A/A', + 'B/A/A/BB', wc_rev='4') + expected_status.add({ + 'C/A/A/BB' : Item(status='D ', wc_rev='4'), + }) + + expected_status.tweak('A/A/A', treeconflict='C') + expected_output = svntest.wc.State(wc_dir, { + 'A/A/A' : Item(status=' ', treeconflict='C'), 'C/A' : Item(status=' ', treeconflict='C'), 'C/A/A' : Item(status=' ', treeconflict='U'), 'C/A/A/BB' : Item(status=' ', treeconflict='A'), @@ -1392,13 +1433,13 @@ def move_many_update_add(sbox): # This currently triggers an assertion failure svntest.actions.run_and_verify_update(wc_dir, expected_output, None, expected_status, - None, None, None, - None, None, None, + [], False, wc_dir, '--accept', 'mine-conflict') @Issue(4437) def move_del_moved(sbox): "delete moved node, still a move" + sbox.build() wc_dir = sbox.wc_dir @@ -1428,7 +1469,7 @@ def copy_move_commit(sbox): # create table bbb (Id int not null) # - Commit # Repro Issue 2 - # - Copy folder aaa under same parent folder (i.e. as a sibling). (using Ctrl drag/drop). + # - Copy folder aaa under same parent folder (i.e. as a sibling). (using Ctrl drag/drop). # Creates Copy of aaa # - Rename Copy of aaa to eee # - Commit @@ -1441,7 +1482,6 @@ def copy_move_commit(sbox): sbox.simple_move('A/D/GG', 'A/D/GG-moved') sbox.simple_commit('A/D/GG-moved') - def move_to_from_external(sbox): "move to and from an external" @@ -1449,87 +1489,211 @@ def move_to_from_external(sbox): sbox.simple_propset('svn:externals', '^/A/D/G GG', '') sbox.simple_update() - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'move', sbox.ospath('GG/tau'), sbox.ospath('tau')) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'move', sbox.ospath('iota'), sbox.ospath('GG/tau')) - - svntest.actions.run_and_verify_svn(None, None, [], + + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', 'Commit both', sbox.ospath(''), sbox.ospath('GG')) - -def move_conflict_markers(sbox): - "move conflict markers" + +def revert_del_root_of_move(sbox): + "revert delete root of move" + + sbox.build() + wc_dir = sbox.wc_dir + sbox.simple_copy('A/mu', 'A/B/E/mu') + sbox.simple_copy('A/mu', 'A/B/F/mu') + sbox.simple_commit() + sbox.simple_update('', 1) + sbox.simple_move('A/B/E', 'E') + sbox.simple_rm('A/B') + + expected_output = svntest.wc.State(wc_dir, { + 'A/B' : Item(status=' ', treeconflict='C'), + 'A/B/E' : Item(status=' ', treeconflict='U'), + 'A/B/E/mu' : Item(status=' ', treeconflict='A'), + 'A/B/F' : Item(status=' ', treeconflict='U'), + 'A/B/F/mu' : Item(status=' ', treeconflict='A'), + }) + + expected_status = svntest.actions.get_virginal_state(wc_dir, 2) + expected_status.tweak('A/B', status='D ', treeconflict='C') + expected_status.tweak('A/B/E', status='D ', moved_to='E') + expected_status.tweak('A/B/F', 'A/B/lambda', 'A/B/E/alpha', 'A/B/E/beta', + status='D ') + expected_status.add({ + 'A/B/F/mu' : Item(status='D ', wc_rev='2'), + 'A/B/E/mu' : Item(status='D ', wc_rev='2'), + 'E' : Item(status='A ', copied='+', moved_from='A/B/E', wc_rev='-'), + 'E/beta' : Item(status=' ', copied='+', wc_rev='-'), + 'E/alpha' : Item(status=' ', copied='+', wc_rev='-'), + }) + + svntest.actions.run_and_verify_update(wc_dir, expected_output, None, + expected_status) + + expected_output = [ + "Reverted '%s'\n" % sbox.ospath('A/B'), # Reverted + " C %s\n" % sbox.ospath('A/B/E') # New tree conflict + ] + svntest.actions.run_and_verify_svn(expected_output, [], + 'revert', sbox.ospath('A/B'), + '--depth', 'empty') + + expected_status.tweak('A/B', status=' ', treeconflict=None) + expected_status.tweak('A/B/E', treeconflict='C') + svntest.actions.run_and_verify_status(wc_dir, expected_status) + +def move_conflict_details(sbox): + "move conflict details" sbox.build() - wc_dir = sbox.wc_dir - sbox.simple_propset('key','val', 'iota', 'A/B/E', 'A/B/E/beta') + + sbox.simple_append('A/B/E/new', 'new\n') + sbox.simple_add('A/B/E/new') + sbox.simple_append('A/B/E/alpha', '\nextra\nlines\n') + sbox.simple_rm('A/B/E/beta', 'A/B/F') + sbox.simple_propset('key', 'VAL', 'A/B/E', 'A/B') + sbox.simple_mkdir('A/B/E/new-dir1') + sbox.simple_mkdir('A/B/E/new-dir2') + sbox.simple_mkdir('A/B/E/new-dir3') + sbox.simple_rm('A/B/lambda') + sbox.simple_mkdir('A/B/lambda') sbox.simple_commit() + sbox.simple_update('', 1) - sbox.simple_propset('key','false', 'iota', 'A/B/E', 'A/B/E/beta') - expected_output = svntest.wc.State(wc_dir, { - 'A/B/E' : Item(status=' C'), - 'A/B/E/beta' : Item(status=' C'), - 'iota' : Item(status=' C'), - }) - expected_status = svntest.actions.get_virginal_state(wc_dir, 2) - expected_status.tweak('iota', 'A/B/E', 'A/B/E/beta', status=' C') - expected_disk = svntest.main.greek_state.copy() - expected_disk.add({ - 'A/B/E/dir_conflicts.prej' : Item(contents= - "Trying to add new property 'key'\n" - "but the property already exists.\n" - "<<<<<<< (local property value)\n" - "false=======\n" - "val>>>>>>> (incoming property value)\n"), - 'A/B/E/beta.prej' : Item(contents= - "Trying to add new property 'key'\n" - "but the property already exists.\n" - "<<<<<<< (local property value)\n" - "false=======\n" - "val>>>>>>> (incoming property value)\n"), - 'iota.prej' : Item(contents= - "Trying to add new property 'key'\n" - "but the property already exists.\n" - "<<<<<<< (local property value)\n" - "false=======\n" - "val>>>>>>> (incoming property value)\n"), - }) - svntest.actions.run_and_verify_update(wc_dir, - expected_output, - expected_disk, - expected_status) + sbox.simple_move('A/B', 'B') - sbox.simple_move('iota', 'A/iotb') - sbox.simple_move('A/B/E', 'E') + sbox.simple_update('', 2) + + expected_info = [ + { + "Moved To": re.escape(sbox.ospath("B")), + "Tree conflict": re.escape( + 'local dir moved away, incoming dir edit upon update' + + ' Source left: (dir) ^/A/B@1' + + ' Source right: (dir) ^/A/B@2') + } + ] + svntest.actions.run_and_verify_info(expected_info, sbox.ospath('A/B')) + + sbox.simple_propset('key', 'vAl', 'B') + sbox.simple_move('B/E/beta', 'beta') + sbox.simple_propset('a', 'b', 'B/F', 'B/lambda') + sbox.simple_append('B/E/alpha', 'other\nnew\nlines') + sbox.simple_mkdir('B/E/new') + sbox.simple_mkdir('B/E/new-dir1') + sbox.simple_append('B/E/new-dir2', 'something') + sbox.simple_append('B/E/new-dir3', 'something') + sbox.simple_add('B/E/new-dir3') + + + expected_output = [ + " C %s\n" % sbox.ospath('B'), # Property conflicted + " U %s\n" % sbox.ospath('B/E'), # Just updated + "C %s\n" % sbox.ospath('B/E/alpha'), # Text conflicted + " C %s\n" % sbox.ospath('B/E/beta'), + " C %s\n" % sbox.ospath('B/E/new'), + " C %s\n" % sbox.ospath('B/E/new-dir1'), + " C %s\n" % sbox.ospath('B/E/new-dir2'), + " C %s\n" % sbox.ospath('B/E/new-dir3'), + " C %s\n" % sbox.ospath('B/F'), + " C %s\n" % sbox.ospath('B/lambda'), + "Updated to revision 2.\n", + "Resolved conflicted state of '%s'\n" % sbox.ospath('A/B') + ] + svntest.actions.run_and_verify_svn(expected_output, [], + 'resolve', sbox.ospath('A/B'), + '--depth', 'empty', + '--accept', 'mine-conflict') + + expected_info = [ + { + "Path" : re.escape(sbox.ospath('B')), + + "Conflict Properties File" : + re.escape(sbox.ospath('B/dir_conflicts.prej')) + '.*', + "Conflict Details": re.escape( + 'incoming dir edit upon update' + + ' Source left: (dir) ^/A/B@1' + + ' Source right: (dir) ^/A/B@2') + }, + { + "Path" : re.escape(sbox.ospath('B/E')), + }, + { + "Path" : re.escape(sbox.ospath('B/E/alpha')), + "Conflict Previous Base File" : '.*alpha.*', + "Conflict Previous Working File" : '.*alpha.*', + "Conflict Current Base File": '.*alpha.*', + "Conflict Details": re.escape( + 'incoming file edit upon update' + + ' Source left: (file) ^/A/B/E/alpha@1' + + ' Source right: (file) ^/A/B/E/alpha@2') + }, + { + "Path" : re.escape(sbox.ospath('B/E/beta')), + "Tree conflict": re.escape( + 'local file moved away, incoming file delete or move upon update' + + ' Source left: (file) ^/A/B/E/beta@1' + + ' Source right: (none) ^/A/B/E/beta@2') + }, + { + "Path" : re.escape(sbox.ospath('B/E/new')), + "Tree conflict": re.escape( + 'local dir add, incoming file add upon update' + + ' Source left: (none) ^/A/B/E/new@1' + + ' Source right: (file) ^/A/B/E/new@2') + }, + { + "Path" : re.escape(sbox.ospath('B/E/new-dir1')), + "Tree conflict": re.escape( + 'local dir add, incoming dir add upon update' + + ' Source left: (none) ^/A/B/E/new-dir1@1' + + ' Source right: (dir) ^/A/B/E/new-dir1@2') + }, + { + "Path" : re.escape(sbox.ospath('B/E/new-dir2')), + "Tree conflict": re.escape( + 'local file unversioned, incoming dir add upon update' + + ' Source left: (none) ^/A/B/E/new-dir2@1' + + ' Source right: (dir) ^/A/B/E/new-dir2@2') + }, + { + "Path" : re.escape(sbox.ospath('B/E/new-dir3')), + "Tree conflict": re.escape( + 'local file add, incoming dir add upon update' + + ' Source left: (none) ^/A/B/E/new-dir3@1' + + ' Source right: (dir) ^/A/B/E/new-dir3@2') + }, + { + "Path" : re.escape(sbox.ospath('B/F')), + "Tree conflict": re.escape( + 'local dir edit, incoming dir delete or move upon update' + + ' Source left: (dir) ^/A/B/F@1' + + ' Source right: (none) ^/A/B/F@2') + }, + { + "Path" : re.escape(sbox.ospath('B/lambda')), + "Tree conflict": re.escape( + 'local file edit, incoming replace with dir upon update' + + ' Source left: (file) ^/A/B/lambda@1' + + ' Source right: (dir) ^/A/B/lambda@2') + }, + ] + + svntest.actions.run_and_verify_info(expected_info, sbox.ospath('B'), + '--depth', 'infinity') - expected_status.tweak('iota', status='D ', moved_to='A/iotb') - expected_status.tweak('A/B/E', status='D ', moved_to='E') - expected_status.tweak('A/B/E/alpha', 'A/B/E/beta', status='D ') - expected_status.add({ - 'A/iotb' : Item(status='A ', copied='+', moved_from='iota', wc_rev='-'), - 'E' : Item(status='A ', copied='+', moved_from='A/B/E', wc_rev='-'), - 'E/beta' : Item(status=' M', copied='+', wc_rev='-'), - 'E/alpha' : Item(status=' ', copied='+', wc_rev='-'), - }) - expected_disk.remove('iota', 'iota.prej', - 'A/B/E', 'A/B/E/alpha', 'A/B/E/beta', - 'A/B/E/dir_conflicts.prej', - 'A/B/E/beta.prej') - expected_disk.add({ - 'A/iotb' : Item(contents="This is the file 'iota'.\n"), - 'E/beta' : Item(contents="This is the file 'beta'.\n"), - 'E/alpha' : Item(contents="This is the file 'alpha'.\n"), - }) - svntest.actions.run_and_verify_status(wc_dir, expected_status) - svntest.actions.verify_disk(wc_dir, expected_disk) ####################################################################### # Run the tests @@ -1542,12 +1706,14 @@ test_list = [ None, deeper_move_file_test, property_merge, move_missing, + nested_replaces, move_many_update_delete, move_many_update_add, move_del_moved, copy_move_commit, move_to_from_external, - move_conflict_markers, + revert_del_root_of_move, + move_conflict_details, ] if __name__ == '__main__': diff --git a/subversion/tests/cmdline/patch_tests.py b/subversion/tests/cmdline/patch_tests.py index b17da63..8e16adc 100755 --- a/subversion/tests/cmdline/patch_tests.py +++ b/subversion/tests/cmdline/patch_tests.py @@ -102,7 +102,7 @@ def patch(sbox): expected_status = svntest.actions.get_virginal_state(wc_dir, 1) expected_status.tweak('A/mu', wc_rev=2) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) # Apply patch @@ -377,7 +377,7 @@ def patch_offset(sbox): expected_status.tweak('A/mu', wc_rev=2) expected_status.tweak('iota', wc_rev=2) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) # Apply patch @@ -546,7 +546,7 @@ def patch_chopped_leading_spaces(sbox): expected_status = svntest.actions.get_virginal_state(wc_dir, 1) expected_status.tweak('A/mu', wc_rev=2) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) # Apply patch @@ -716,7 +716,7 @@ def patch_strip1(sbox): expected_status = svntest.actions.get_virginal_state(wc_dir, 1) expected_status.tweak('A/mu', wc_rev=2) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) # Apply patch @@ -869,7 +869,7 @@ def patch_no_index_line(sbox): expected_status = svntest.actions.get_virginal_state(wc_dir, 1) expected_status.tweak('A/D/gamma', wc_rev=2) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) unidiff_patch = [ "--- A/D/gamma\t(revision 1)\n", "+++ A/D/gamma\t(working copy)\n", @@ -1079,9 +1079,9 @@ def patch_remove_empty_dirs(sbox): svntest.main.file_write(patch_file_path, ''.join(unidiff_patch)) F_path = sbox.ospath('A/B/F') - svntest.actions.run_and_verify_svn("Deleting F failed", None, [], + svntest.actions.run_and_verify_svn(None, [], 'rm', F_path) - svntest.actions.run_and_verify_svn("Update failed", None, [], + svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir) # We should be able to handle one path beeing missing. @@ -1148,7 +1148,7 @@ def patch_reject(sbox): expected_status = svntest.actions.get_virginal_state(wc_dir, 1) expected_status.tweak('A/D/gamma', wc_rev=2) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) patch_file_path = make_patch_path(sbox) @@ -1219,7 +1219,7 @@ def patch_keywords(sbox): expected_status = svntest.actions.get_virginal_state(wc_dir, 1) expected_status.tweak('A/D/gamma', wc_rev=2) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) patch_file_path = make_patch_path(sbox) @@ -1308,7 +1308,7 @@ def patch_with_fuzz(sbox): expected_status = svntest.actions.get_virginal_state(wc_dir, 1) expected_status.tweak('A/mu', wc_rev=2) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) unidiff_patch = [ "Index: mu\n", @@ -1444,7 +1444,7 @@ def patch_reverse(sbox): expected_status = svntest.actions.get_virginal_state(wc_dir, 1) expected_status.tweak('A/mu', wc_rev=2) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) # Apply patch @@ -1673,7 +1673,7 @@ def patch_no_svn_eol_style(sbox): 1) # dry-run expected_output = ["Reverted '" + mu_path + "'\n"] - svntest.actions.run_and_verify_svn(None, expected_output, [], 'revert', '-R', wc_dir) + svntest.actions.run_and_verify_svn(expected_output, [], 'revert', '-R', wc_dir) def patch_with_svn_eol_style(sbox): "patch target with svn:eol-style" @@ -1788,7 +1788,7 @@ def patch_with_svn_eol_style(sbox): 1) # dry-run expected_output = ["Reverted '" + mu_path + "'\n"] - svntest.actions.run_and_verify_svn(None, expected_output, [], 'revert', '-R', wc_dir) + svntest.actions.run_and_verify_svn(expected_output, [], 'revert', '-R', wc_dir) def patch_with_svn_eol_style_uncommitted(sbox): "patch target with uncommitted svn:eol-style" @@ -1897,7 +1897,7 @@ def patch_with_svn_eol_style_uncommitted(sbox): 1) # dry-run expected_output = ["Reverted '" + mu_path + "'\n"] - svntest.actions.run_and_verify_svn(None, expected_output, [], 'revert', '-R', wc_dir) + svntest.actions.run_and_verify_svn(expected_output, [], 'revert', '-R', wc_dir) def patch_with_ignore_whitespace(sbox): "ignore whitespace when patching" @@ -1943,7 +1943,7 @@ def patch_with_ignore_whitespace(sbox): expected_status = svntest.actions.get_virginal_state(wc_dir, 1) expected_status.tweak('A/mu', wc_rev=2) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) # Apply patch with leading and trailing spaces removed and tabs transformed # to spaces. The patch should match and the hunks should be written to the @@ -2077,7 +2077,7 @@ def patch_replace_locally_deleted_file(sbox): expected_status = svntest.actions.get_virginal_state(wc_dir, 1) expected_status.tweak('A/mu', wc_rev=2) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) # Locally delete mu svntest.main.run_svn(None, 'rm', mu_path) @@ -2143,7 +2143,7 @@ def patch_no_eol_at_eof(sbox): expected_status = svntest.actions.get_virginal_state(wc_dir, 1) expected_status.tweak('iota', wc_rev=2) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) unidiff_patch = [ "--- iota\t(revision 1)\n", "+++ iota\t(working copy)\n", @@ -2213,7 +2213,7 @@ def patch_with_properties(sbox): expected_status = svntest.actions.get_virginal_state(wc_dir, 1) expected_status.tweak('iota', wc_rev=2) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) # Apply patch unidiff_patch = [ @@ -2306,7 +2306,7 @@ def patch_same_twice(sbox): expected_status = svntest.actions.get_virginal_state(wc_dir, 1) expected_status.tweak('A/mu', wc_rev=2) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) # Apply patch @@ -2491,7 +2491,7 @@ def patch_dir_properties(sbox): expected_status.tweak('', wc_rev=2) expected_status.tweak('A/B', wc_rev=2) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) # Apply patch unidiff_patch = [ @@ -2713,7 +2713,7 @@ def patch_prop_offset(sbox): expected_status = svntest.actions.get_virginal_state(wc_dir, 1) expected_status.tweak('iota', wc_rev=2) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) # Apply patch @@ -2888,7 +2888,7 @@ def patch_prop_with_fuzz(sbox): expected_status = svntest.actions.get_virginal_state(wc_dir, 1) expected_status.tweak('A/mu', wc_rev=2) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) unidiff_patch = [ "Index: mu\n", @@ -3072,7 +3072,7 @@ def patch_old_target_names(sbox): expected_status = svntest.actions.get_virginal_state(wc_dir, 1) expected_status.tweak('A/mu', wc_rev=2) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) # Apply patch @@ -3198,7 +3198,7 @@ def patch_reverse_revert(sbox): expected_status = svntest.actions.get_virginal_state(wc_dir, 1) expected_status.tweak('A/mu', wc_rev=2) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) # Apply patch @@ -3520,7 +3520,7 @@ def patch_moved_away(sbox): expected_status = svntest.actions.get_virginal_state(wc_dir, 1) expected_status.tweak('A/mu', wc_rev=2) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) # Move mu away sbox.simple_move("A/mu", "A/mu2") @@ -3681,7 +3681,7 @@ def patch_deletes_prop(sbox): expected_status = svntest.actions.get_virginal_state(wc_dir, 1) expected_status.tweak('iota', wc_rev=2) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) # Apply patch unidiff_patch = [ @@ -3828,7 +3828,7 @@ def patch_reversed_add_with_props2(sbox): expected_status = svntest.actions.get_virginal_state(wc_dir, 1) expected_status.add({'newfile' : Item(wc_rev=2, status=' ')}) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) # Now, we'll try to reverse-apply the very diff we just created. We # expect the original state of the working copy in r1 plus 'newfile' @@ -4019,7 +4019,7 @@ def patch_target_no_eol_at_eof(sbox): expected_status.tweak('iota', wc_rev=2) expected_status.tweak('A/mu', wc_rev=2) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) unidiff_patch = [ "Index: A/mu\n", "===================================================================\n", @@ -4222,22 +4222,20 @@ def patch_change_symlink_target(sbox): expected_output = svntest.wc.State(wc_dir, { 'link' : Item(verb='Adding'), }) - svntest.actions.run_and_verify_commit(wc_dir, expected_output, - None, None, wc_dir) + svntest.actions.run_and_verify_commit(wc_dir, expected_output, None) patch_output = [ 'U %s\n' % sbox.ospath('link'), ] - svntest.actions.run_and_verify_svn(None, patch_output, [], + svntest.actions.run_and_verify_svn(patch_output, [], 'patch', patch_file_path, wc_dir) # r3 - Store result expected_output = svntest.wc.State(wc_dir, { 'link' : Item(verb='Sending'), }) - svntest.actions.run_and_verify_commit(wc_dir, expected_output, - None, None, wc_dir) + svntest.actions.run_and_verify_commit(wc_dir, expected_output, None) # r4 - Now as symlink sbox.simple_rm('link') @@ -4245,10 +4243,9 @@ def patch_change_symlink_target(sbox): expected_output = svntest.wc.State(wc_dir, { 'link' : Item(verb='Replacing'), }) - svntest.actions.run_and_verify_commit(wc_dir, expected_output, - None, None, wc_dir) + svntest.actions.run_and_verify_commit(wc_dir, expected_output, None) - svntest.actions.run_and_verify_svn(None, patch_output, [], + svntest.actions.run_and_verify_svn(patch_output, [], 'patch', patch_file_path, wc_dir) # TODO: when it passes, verify that the on-disk 'link' is correct --- @@ -4357,7 +4354,7 @@ def patch_replace_dir_with_file_and_vv(sbox): 'A %s\n' % sbox.ospath('iota'), ] - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'patch', patch_file_path, sbox.wc_dir) @Issue(4297) @@ -4391,7 +4388,7 @@ def single_line_mismatch(sbox): '> rejected hunk @@ -1,1 +1,1 @@\n', ] + svntest.main.summary_of_conflicts(text_conflicts=1) - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'patch', patch_file_path, wc_dir) @Issue(3644) @@ -4442,7 +4439,7 @@ def patch_empty_file(sbox): ] # Current result: lf.txt patched ok, new created, empty succeeds with offset. - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'patch', patch_file_path, wc_dir) expected_disk = svntest.main.greek_state.copy() @@ -4556,7 +4553,7 @@ def patch_apply_no_fuz(sbox): ] # Current result: lf.txt patched ok, new created, empty succeeds with offset. - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'patch', patch_path, wc_dir) if not filecmp.cmp(sbox.ospath('test.txt'), sbox.ospath('test_v2.txt')): @@ -4657,8 +4654,38 @@ def patch_with_custom_keywords(sbox): expected_output, expected_disk, expected_status, expected_skip) +def patch_git_rename(sbox): + """--git patch with rename header""" + + sbox.build() + wc_dir = sbox.wc_dir + + # a simple --git rename patch + unidiff_patch = [ + "diff --git a/iota b/iota2\n", + "similarity index 100%\n", + "rename from iota\n", + "rename to iota2\n", + ] + + patch_file_path = make_patch_path(sbox) + svntest.main.file_write(patch_file_path, ''.join(unidiff_patch)) + + expected_output = [ 'A %s\n' % sbox.ospath('iota2'), + 'D %s\n' % sbox.ospath('iota')] + expected_disk = svntest.main.greek_state.copy() + expected_disk.remove('iota') + expected_disk.add({'iota2' : Item(contents="This is the file 'iota'.\n")}) + expected_status = svntest.actions.get_virginal_state(wc_dir, 1) + expected_status.add({ + 'iota2' : Item(status='A ', copied='+', wc_rev='-', moved_from='iota'), + }) + expected_status.tweak('iota', status='D ', wc_rev=1, moved_to='iota2') + expected_skip = wc.State('', { }) + svntest.actions.run_and_verify_patch(wc_dir, os.path.abspath(patch_file_path), + expected_output, expected_disk, + expected_status, expected_skip) -@XFail() @Issue(4533) def patch_hunk_avoid_reorder(sbox): """avoid reordering hunks""" @@ -4787,6 +4814,27 @@ def patch_hunk_avoid_reorder(sbox): sbox.simple_revert('A/mu') +@Issue(4533) +def patch_hunk_avoid_reorder2(sbox): + """avoid reordering hunks 2""" + + sbox.build() + wc_dir = sbox.wc_dir + + sbox.simple_append('A/mu', + 'AA\n' 'BB\n' 'CC\n' 'DD\n' 'EE\n' 'FF\n' + 'TT\n' 'UU\n' 'VV\n' 'WW\n' 'XX\n' 'YY\n' + 'GG\n' 'HH\n' 'II\n' 'JJ\n' 'KK\n' 'LL\n' + '33333\n' '33333\n' '33333\n' + '33333\n' '33333\n' '33333\n' + '33333\n' '33333\n' '33333\n' + '33333\n' '33333\n' '33333\n' + 'MM\n' 'NN\n' 'OO\n' 'PP\n' 'QQ\n' 'RR\n' + 'SS\n' 'TT\n' 'UU\n' 'VV\n' 'WW\n' 'XX\n' + 'YY\n' 'ZZ\n', truncate=True) + sbox.simple_commit() + + # two hunks, first matches at offset +18, second matches at both -13 # change patch so second hunk matches at both -12 and +19, we still # want the second match unidiff_patch = [ @@ -4895,6 +4943,500 @@ def patch_hunk_reorder(sbox): expected_output, expected_disk, expected_status, expected_skip) + # In the following case the reordered hunk2 is smaller offset + # magnitude than hunk2 at the end and the reorder is preferred. + sbox.simple_revert('A/mu') + sbox.simple_append('A/mu', + 'x\n' * 2 + + '1\n' '2\n' '3\n' 'hunk2\n' '4\n' '5\n' '6\n' + + 'x\n' * 2 + + '1\n' '2\n' '3\n' 'hunk2\n' '4\n' '5\n' '6\n' + + 'x\n' * 10 + + '1\n' '2\n' '3\n' 'hunk1\n' '4\n' '5\n' '6\n' + + 'x\n' * 100 + + '1\n' '2\n' '3\n' 'hunk2\n' '4\n' '5\n' '6\n', + truncate=True) + sbox.simple_commit() + + unidiff_patch = [ + "Index: A/mu\n" + "===================================================================\n", + "--- A/mu\t(revision 2)\n", + "+++ A/mu\t(working copy)\n", + "@@ -28,7 +28,7 @@\n", + " 1\n", + " 2\n", + " 3\n", + "-hunk1\n", + "+hunk1-mod\n", + " 4\n", + " 5\n", + " 6\n", + "@@ -44,7 +44,7 @@\n", + " 1\n", + " 2\n", + " 3\n", + "-hunk2\n", + "+hunk2-mod\n", + " 4\n", + " 5\n", + " 6\n", + ] + + patch_file_path = make_patch_path(sbox) + svntest.main.file_write(patch_file_path, ''.join(unidiff_patch)) + + expected_output = [ + 'U %s\n' % sbox.ospath('A/mu'), + '> applied hunk @@ -44,7 +44,7 @@ with offset -32\n', + '> applied hunk @@ -28,7 +28,7 @@ with offset 1\n', + ] + expected_disk.tweak('A/mu', contents= + 'x\n' * 2 + + '1\n' '2\n' '3\n' 'hunk2\n' '4\n' '5\n' '6\n' + + 'x\n' * 2 + + '1\n' '2\n' '3\n' 'hunk2-mod\n' '4\n' '5\n' '6\n' + + 'x\n' * 10 + + '1\n' '2\n' '3\n' 'hunk1-mod\n' '4\n' '5\n' '6\n' + + 'x\n' * 100 + + '1\n' '2\n' '3\n' 'hunk2\n' '4\n' '5\n' '6\n') + + expected_status.tweak('A/mu', status='M ', wc_rev=3) + expected_skip = wc.State('', { }) + svntest.actions.run_and_verify_patch(wc_dir, os.path.abspath(patch_file_path), + expected_output, expected_disk, + expected_status, expected_skip) + sbox.simple_revert('A/mu') + + # In this case the reordered hunk2 is further than hunk2 at the end + # and the reordered is not preferred. + unidiff_patch = [ + "Index: A/mu\n" + "===================================================================\n", + "--- A/mu\t(revision 2)\n", + "+++ A/mu\t(working copy)\n", + "@@ -28,7 +28,7 @@\n", + " 1\n", + " 2\n", + " 3\n", + "-hunk1\n", + "+hunk1-mod\n", + " 4\n", + " 5\n", + " 6\n", + "@@ -110,7 +110,7 @@\n", + " 1\n", + " 2\n", + " 3\n", + "-hunk2\n", + "+hunk2-mod\n", + " 4\n", + " 5\n", + " 6\n", + ] + + patch_file_path = make_patch_path(sbox) + svntest.main.file_write(patch_file_path, ''.join(unidiff_patch)) + + expected_output = [ + 'U %s\n' % sbox.ospath('A/mu'), + '> applied hunk @@ -28,7 +28,7 @@ with offset 1\n', + '> applied hunk @@ -110,7 +110,7 @@ with offset 26\n', + ] + expected_disk.tweak('A/mu', contents= + 'x\n' * 2 + + '1\n' '2\n' '3\n' 'hunk2\n' '4\n' '5\n' '6\n' + + 'x\n' * 2 + + '1\n' '2\n' '3\n' 'hunk2\n' '4\n' '5\n' '6\n' + + 'x\n' * 10 + + '1\n' '2\n' '3\n' 'hunk1-mod\n' '4\n' '5\n' '6\n' + + 'x\n' * 100 + + '1\n' '2\n' '3\n' 'hunk2-mod\n' '4\n' '5\n' '6\n') + + expected_status.tweak('A/mu', status='M ', wc_rev=3) + expected_skip = wc.State('', { }) + svntest.actions.run_and_verify_patch(wc_dir, os.path.abspath(patch_file_path), + expected_output, expected_disk, + expected_status, expected_skip) + +@XFail() +def patch_hunk_overlap(sbox): + """hunks that overlap""" + + sbox.build() + wc_dir = sbox.wc_dir + + sbox.simple_append('A/mu', + 'AA\n' 'BB\n' 'CC\n' 'DD\n' 'EE\n' 'FF\n' + 'GG\n' 'HH\n' 'II\n', truncate=True) + sbox.simple_commit() + + # Two hunks that overlap when applied, GNU patch can apply both hunks. + unidiff_patch = [ + "Index: A/mu\n" + "===================================================================\n", + "--- A/mu\t(revision 1)\n", + "+++ A/mu\t(working copy)\n", + "@@ -2,6 +2,7 @@\n", + " BB\n", + " CC\n", + " DD\n", + "+11111\n", + " EE\n", + " FF\n", + " GG\n", + "@@ -9,6 +10,7 @@\n", + " DD\n", + " EE\n", + " FF\n", + "+22222\n", + " GG\n", + " HH\n", + " II\n", + ] + + patch_file_path = make_patch_path(sbox) + svntest.main.file_write(patch_file_path, ''.join(unidiff_patch)) + + expected_output = [ + 'U %s\n' % sbox.ospath('A/mu'), + '> applied hunk @@ -9,6 +10,7 @@ with offset -5\n', + ] + expected_disk = svntest.main.greek_state.copy() + expected_disk.tweak('A/mu', contents= + 'AA\n' 'BB\n' 'CC\n' 'DD\n' '11111\n' 'EE\n' 'FF\n' + '22222\n' 'GG\n' 'HH\n' 'II\n') + expected_status = svntest.actions.get_virginal_state(wc_dir, 1) + expected_status.tweak('A/mu', status='M ', wc_rev=2) + expected_skip = wc.State('', { }) + svntest.actions.run_and_verify_patch(wc_dir, os.path.abspath(patch_file_path), + expected_output, expected_disk, + expected_status, expected_skip) + +def patch_delete_modified(sbox): + """patch delete modified""" + + sbox.build() + wc_dir = sbox.wc_dir + + # A patch that deletes beta. + unidiff_patch = [ + "Index: A/B/E/beta\n", + "===================================================================\n", + "--- A/B/E/beta (revision 1)\n", + "+++ A/B/E/beta (working copy)\n", + "@@ -1 +0,0 @@\n", + "-This is the file 'beta'.\n", + ] + + patch_file_path = make_patch_path(sbox) + svntest.main.file_write(patch_file_path, ''.join(unidiff_patch)) + + # First application deletes beta + expected_output = [ + 'D %s\n' % sbox.ospath('A/B/E/beta'), + ] + expected_disk = svntest.main.greek_state.copy() + expected_disk.remove('A/B/E/beta') + expected_status = svntest.actions.get_virginal_state(wc_dir, 1) + expected_status.tweak('A/B/E/beta', status='D ') + expected_skip = wc.State('', { }) + svntest.actions.run_and_verify_patch(wc_dir, os.path.abspath(patch_file_path), + expected_output, expected_disk, + expected_status, expected_skip) + + # Second application skips + expected_output = [ + 'Skipped \'%s\'\n' % sbox.ospath('A/B/E/beta'), + ] + svntest.main.summary_of_conflicts(skipped_paths=1) + expected_skip = wc.State('', { + sbox.ospath('A/B/E/beta') : Item(verb='Skipped'), + }) + svntest.actions.run_and_verify_patch(wc_dir, os.path.abspath(patch_file_path), + expected_output, expected_disk, + expected_status, expected_skip) + + # Third application, with file present even though state is 'D', also skips + sbox.simple_append('A/B/E/beta', 'Modified', truncate=True) + expected_disk.add({'A/B/E/beta' : Item(contents='Modified')}) + expected_output = [ + 'Skipped \'%s\'\n' % sbox.ospath('A/B/E/beta'), + ] + svntest.main.summary_of_conflicts(skipped_paths=1) + expected_skip = wc.State('', { + sbox.ospath('A/B/E/beta') : Item(verb='Skipped'), + }) + svntest.actions.run_and_verify_patch(wc_dir, os.path.abspath(patch_file_path), + expected_output, expected_disk, + expected_status, expected_skip) + + # Revert and modify beta, fourth application gives a text conflict. + sbox.simple_revert('A/B/E/beta') + sbox.simple_append('A/B/E/beta', 'Modified', truncate=True) + + expected_output = [ + 'C %s\n' % sbox.ospath('A/B/E/beta'), + '> rejected hunk @@ -1,1 +0,0 @@\n', + ] + svntest.main.summary_of_conflicts(text_conflicts=1) + expected_skip = wc.State('', { }) + reject_file_contents = [ + "--- A/B/E/beta\n", + "+++ A/B/E/beta\n", + "@@ -1,1 +0,0 @@\n", + "-This is the file 'beta'.\n", + ] + expected_disk.add({'A/B/E/beta.svnpatch.rej' + : Item(contents=''.join(reject_file_contents)) + }) + expected_status.tweak('A/B/E/beta', status='M ') + svntest.actions.run_and_verify_patch(wc_dir, os.path.abspath(patch_file_path), + expected_output, expected_disk, + expected_status, expected_skip) + +def patch_closest(sbox): + "find closest hunk" + + sbox.build() + wc_dir = sbox.wc_dir + + unidiff_patch = [ + "Index: A/mu\n" + "===================================================================\n", + "--- A/mu\t(revision 2)\n", + "+++ A/mu\t(working copy)\n", + "@@ -47,7 +47,7 @@\n", + " 1\n", + " 2\n", + " 3\n", + "-hunk1\n", + "+hunk1-mod\n", + " 4\n", + " 5\n", + " 6\n", + "@@ -66,7 +66,7 @@\n", + " 1\n", + " 2\n", + " 3\n", + "-rejected-hunk2-\n", + "+rejected-hunk2-mod\n", + " 4\n", + " 5\n", + " 6\n", + "@@ -180,7 +180,7 @@\n", + " 1\n", + " 2\n", + " 3\n", + "-hunk3\n", + "+hunk3-mod\n", + " 4\n", + " 5\n", + " 6\n", + ] + patch_file_path = make_patch_path(sbox) + svntest.main.file_write(patch_file_path, ''.join(unidiff_patch)) + + # Previous offset for hunk3 is +4, hunk3 matches at relative offsets + # of -19 and +18, prefer +18 gives final offset +22 + sbox.simple_append('A/mu', + 'x\n' * 50 + + '1\n' '2\n' '3\n' 'hunk1\n' '4\n' '5\n' '6\n' + + 'x\n' * 50 + + '1\n' '2\n' '3\n' 'hunk2\n' '4\n' '5\n' '6\n' + + 'x\n' * 50 + + '1\n' '2\n' '3\n' 'hunk3\n' '4\n' '5\n' '6\n' + + 'x\n' * 30 + + '1\n' '2\n' '3\n' 'hunk3\n' '4\n' '5\n' '6\n' + + 'x\n' * 10, + truncate=True) + sbox.simple_commit() + + expected_output = [ + 'C %s\n' % sbox.ospath('A/mu'), + '> applied hunk @@ -47,7 +47,7 @@ with offset 4\n', + '> applied hunk @@ -180,7 +180,7 @@ with offset 22\n', + '> rejected hunk @@ -66,7 +66,7 @@\n', + ] + svntest.main.summary_of_conflicts(text_conflicts=1) + expected_disk = svntest.main.greek_state.copy() + expected_disk.add({'A/mu.svnpatch.rej' : Item(contents= + "--- A/mu\n" + + "+++ A/mu\n" + + "@@ -66,7 +66,7 @@\n" + + " 1\n" + + " 2\n" + + " 3\n" + + "-rejected-hunk2-\n" + + "+rejected-hunk2-mod\n" + + " 4\n" + + " 5\n" + + " 6\n")}) + expected_disk.tweak('A/mu', contents= + 'x\n' * 50 + + '1\n' '2\n' '3\n' 'hunk1-mod\n' '4\n' '5\n' '6\n' + + 'x\n' * 50 + + '1\n' '2\n' '3\n' 'hunk2\n' '4\n' '5\n' '6\n' + + 'x\n' * 50 + + '1\n' '2\n' '3\n' 'hunk3\n' '4\n' '5\n' '6\n' + + 'x\n' * 30 + + '1\n' '2\n' '3\n' 'hunk3-mod\n' '4\n' '5\n' '6\n' + + 'x\n' * 10) + expected_status = svntest.actions.get_virginal_state(wc_dir, 1) + expected_status.tweak('A/mu', status='M ', wc_rev=2) + expected_skip = wc.State('', { }) + svntest.actions.run_and_verify_patch(wc_dir, os.path.abspath(patch_file_path), + expected_output, expected_disk, + expected_status, expected_skip) + + # Previous offset for hunk3 is +4, hunk3 matches at relative offsets + # of -19 and +20, prefer -19 gives final offset -15 + sbox.simple_append('A/mu', + 'x\n' * 50 + + '1\n' '2\n' '3\n' 'hunk1\n' '4\n' '5\n' '6\n' + + 'x\n' * 50 + + '1\n' '2\n' '3\n' 'hunk2\n' '4\n' '5\n' '6\n' + + 'x\n' * 50 + + '1\n' '2\n' '3\n' 'hunk3\n' '4\n' '5\n' '6\n' + + 'x\n' * 32 + + '1\n' '2\n' '3\n' 'hunk3\n' '4\n' '5\n' '6\n' + + 'x\n' * 10, + truncate=True) + sbox.simple_commit() + + expected_output = [ + 'C %s\n' % sbox.ospath('A/mu'), + '> applied hunk @@ -47,7 +47,7 @@ with offset 4\n', + '> applied hunk @@ -180,7 +180,7 @@ with offset -15\n', + '> rejected hunk @@ -66,7 +66,7 @@\n', + ] + svntest.main.summary_of_conflicts(text_conflicts=1) + expected_disk.tweak('A/mu', contents= + 'x\n' * 50 + + '1\n' '2\n' '3\n' 'hunk1-mod\n' '4\n' '5\n' '6\n' + + 'x\n' * 50 + + '1\n' '2\n' '3\n' 'hunk2\n' '4\n' '5\n' '6\n' + + 'x\n' * 50 + + '1\n' '2\n' '3\n' 'hunk3-mod\n' '4\n' '5\n' '6\n' + + 'x\n' * 32 + + '1\n' '2\n' '3\n' 'hunk3\n' '4\n' '5\n' '6\n' + + 'x\n' * 10) + expected_status = svntest.actions.get_virginal_state(wc_dir, 1) + expected_status.tweak('A/mu', status='M ', wc_rev=3) + expected_skip = wc.State('', { }) + svntest.actions.run_and_verify_patch(wc_dir, os.path.abspath(patch_file_path), + expected_output, expected_disk, + expected_status, expected_skip) + + # Previous offset for hunk3 is +4, hunk3 matches at relative offsets + # of -19 and +19, prefer -19 gives final offset -15 + sbox.simple_append('A/mu', + 'x\n' * 50 + + '1\n' '2\n' '3\n' 'hunk1\n' '4\n' '5\n' '6\n' + + 'x\n' * 50 + + '1\n' '2\n' '3\n' 'hunk2\n' '4\n' '5\n' '6\n' + + 'x\n' * 50 + + '1\n' '2\n' '3\n' 'hunk3\n' '4\n' '5\n' '6\n' + + 'x\n' * 31 + + '1\n' '2\n' '3\n' 'hunk3\n' '4\n' '5\n' '6\n' + + 'x\n' * 10, + truncate=True) + sbox.simple_commit() + + expected_output = [ + 'C %s\n' % sbox.ospath('A/mu'), + '> applied hunk @@ -47,7 +47,7 @@ with offset 4\n', + '> applied hunk @@ -180,7 +180,7 @@ with offset -15\n', + '> rejected hunk @@ -66,7 +66,7 @@\n', + ] + svntest.main.summary_of_conflicts(text_conflicts=1) + expected_disk.tweak('A/mu', contents= + 'x\n' * 50 + + '1\n' '2\n' '3\n' 'hunk1-mod\n' '4\n' '5\n' '6\n' + + 'x\n' * 50 + + '1\n' '2\n' '3\n' 'hunk2\n' '4\n' '5\n' '6\n' + + 'x\n' * 50 + + '1\n' '2\n' '3\n' 'hunk3-mod\n' '4\n' '5\n' '6\n' + + 'x\n' * 31 + + '1\n' '2\n' '3\n' 'hunk3\n' '4\n' '5\n' '6\n' + + 'x\n' * 10) + expected_status = svntest.actions.get_virginal_state(wc_dir, 1) + expected_status.tweak('A/mu', status='M ', wc_rev=4) + expected_skip = wc.State('', { }) + svntest.actions.run_and_verify_patch(wc_dir, os.path.abspath(patch_file_path), + expected_output, expected_disk, + expected_status, expected_skip) + + # Previous offset for hunk3 is +4, hunk3 matches at relative offsets + # of +173 and -173, prefer +173 gives final offset +177 + sbox.simple_append('A/mu', + 'x\n' * 10 + + '1\n' '2\n' '3\n' 'hunk3\n' '4\n' '5\n' '6\n' + + 'x\n' * 33 + + '1\n' '2\n' '3\n' 'hunk1\n' '4\n' '5\n' '6\n' + + 'x\n' * 50 + + '1\n' '2\n' '3\n' 'hunk2\n' '4\n' '5\n' '6\n' + + 'x\n' * 242 + + '1\n' '2\n' '3\n' 'hunk3\n' '4\n' '5\n' '6\n' + + 'x\n' * 10, + truncate=True) + sbox.simple_commit() + + expected_output = [ + 'C %s\n' % sbox.ospath('A/mu'), + '> applied hunk @@ -47,7 +47,7 @@ with offset 4\n', + '> applied hunk @@ -180,7 +180,7 @@ with offset 177\n', + '> rejected hunk @@ -66,7 +66,7 @@\n', + ] + svntest.main.summary_of_conflicts(text_conflicts=1) + expected_disk.tweak('A/mu', contents= + 'x\n' * 10 + + '1\n' '2\n' '3\n' 'hunk3\n' '4\n' '5\n' '6\n' + + 'x\n' * 33 + + '1\n' '2\n' '3\n' 'hunk1-mod\n' '4\n' '5\n' '6\n' + + 'x\n' * 50 + + '1\n' '2\n' '3\n' 'hunk2\n' '4\n' '5\n' '6\n' + + 'x\n' * 242 + + '1\n' '2\n' '3\n' 'hunk3-mod\n' '4\n' '5\n' '6\n' + + 'x\n' * 10) + expected_status = svntest.actions.get_virginal_state(wc_dir, 1) + expected_status.tweak('A/mu', status='M ', wc_rev=5) + expected_skip = wc.State('', { }) + svntest.actions.run_and_verify_patch(wc_dir, os.path.abspath(patch_file_path), + expected_output, expected_disk, + expected_status, expected_skip) + + # Previous offset for hunk3 is +4, hunk3 matches at relative offsets + # of +174 and -173, prefer -173 gives final offset -169 + sbox.simple_append('A/mu', + 'x\n' * 10 + + '1\n' '2\n' '3\n' 'hunk3\n' '4\n' '5\n' '6\n' + + 'x\n' * 33 + + '1\n' '2\n' '3\n' 'hunk1\n' '4\n' '5\n' '6\n' + + 'x\n' * 50 + + '1\n' '2\n' '3\n' 'hunk2\n' '4\n' '5\n' '6\n' + + 'x\n' * 243 + + '1\n' '2\n' '3\n' 'hunk3\n' '4\n' '5\n' '6\n' + + 'x\n' * 10, + truncate=True) + sbox.simple_commit() + + expected_output = [ + 'C %s\n' % sbox.ospath('A/mu'), + '> applied hunk @@ -180,7 +180,7 @@ with offset -169\n', + '> applied hunk @@ -47,7 +47,7 @@ with offset 4\n', + '> rejected hunk @@ -66,7 +66,7 @@\n', + ] + svntest.main.summary_of_conflicts(text_conflicts=1) + expected_disk.tweak('A/mu', contents= + 'x\n' * 10 + + '1\n' '2\n' '3\n' 'hunk3-mod\n' '4\n' '5\n' '6\n' + + 'x\n' * 33 + + '1\n' '2\n' '3\n' 'hunk1-mod\n' '4\n' '5\n' '6\n' + + 'x\n' * 50 + + '1\n' '2\n' '3\n' 'hunk2\n' '4\n' '5\n' '6\n' + + 'x\n' * 243 + + '1\n' '2\n' '3\n' 'hunk3\n' '4\n' '5\n' '6\n' + + 'x\n' * 10) + expected_status = svntest.actions.get_virginal_state(wc_dir, 1) + expected_status.tweak('A/mu', status='M ', wc_rev=6) + expected_skip = wc.State('', { }) + svntest.actions.run_and_verify_patch(wc_dir, os.path.abspath(patch_file_path), + expected_output, expected_disk, + expected_status, expected_skip) + ######################################################################## #Run the tests @@ -4947,8 +5489,13 @@ test_list = [ None, patch_apply_no_fuz, patch_lacking_trailing_eol_on_context, patch_with_custom_keywords, + patch_git_rename, patch_hunk_avoid_reorder, + patch_hunk_avoid_reorder2, patch_hunk_reorder, + patch_hunk_overlap, + patch_delete_modified, + patch_closest, ] if __name__ == '__main__': diff --git a/subversion/tests/cmdline/prop_tests.py b/subversion/tests/cmdline/prop_tests.py index e3f963e..0d8d89b 100755 --- a/subversion/tests/cmdline/prop_tests.py +++ b/subversion/tests/cmdline/prop_tests.py @@ -100,8 +100,7 @@ def make_local_props(sbox): # Edit without actually changing the property svntest.main.use_editor('identity') - svntest.actions.run_and_verify_svn(None, - "No changes to property 'editme' on '.*'", + svntest.actions.run_and_verify_svn("No changes to property 'editme' on '.*'", [], 'propedit', 'editme', os.path.join(wc_dir, 'A', 'mu')) @@ -134,9 +133,7 @@ def commit_props(sbox): # Commit the one file. svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, - wc_dir) + expected_status) @@ -170,16 +167,14 @@ def update_props(sbox): # Commit property mods svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, wc_dir) + expected_status) # Add more properties sbox.simple_propset('blue2', 'azul2', 'A/mu') sbox.simple_propset('red2', 'rojo2', 'A/D/H') expected_status.tweak('A/mu', 'A/D/H', wc_rev=3, status=' ') svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, wc_dir) + expected_status) # Create expected output tree for an update of the wc_backup. expected_output = svntest.wc.State(wc_backup, { @@ -202,7 +197,7 @@ def update_props(sbox): expected_output, expected_disk, expected_status, - None, None, None, None, None, 1, + [], 1, '-r', '2', wc_backup) # This adds properties to nodes that have properties @@ -215,7 +210,7 @@ def update_props(sbox): expected_output, expected_disk, expected_status, - None, None, None, None, None, 1, + [], 1, '-r', '3', wc_backup) @@ -244,8 +239,7 @@ def downdate_props(sbox): # Commit the one file. svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, wc_dir) + expected_status) # Make some mod (something to commit) svntest.main.file_append(mu_path, "some mod") @@ -262,8 +256,7 @@ def downdate_props(sbox): # Commit the one file. svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, wc_dir) + expected_status) # Create expected output tree for an update. expected_output = svntest.wc.State(wc_dir, { @@ -282,7 +275,7 @@ def downdate_props(sbox): expected_output, expected_disk, expected_status, - None, None, None, None, None, 1, + [], 1, '-r', '1', wc_dir) #---------------------------------------------------------------------- @@ -314,8 +307,7 @@ def remove_props(sbox): # Commit the one file. svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, wc_dir) + expected_status) #---------------------------------------------------------------------- @@ -363,14 +355,8 @@ def update_conflict_props(sbox): expected_output, expected_disk, expected_status, - None, - svntest.tree.detect_conflict_files, - extra_files, - None, None, 1) - - if len(extra_files) != 0: - logger.warn("didn't get expected conflict files") - raise svntest.verify.SVNUnexpectedOutput + check_props=True, + extra_files=extra_files) # Resolve the conflicts svntest.actions.run_and_verify_resolved([mu_path, A_path]) @@ -405,8 +391,7 @@ def commit_conflict_dirprops(sbox): sbox.simple_propset('foo', 'eek', '') svntest.actions.run_and_verify_commit(wc_dir, None, None, - "[oO]ut[- ]of[- ]date", - wc_dir) + ".*[oO]ut[- ]of[- ]date.*") #---------------------------------------------------------------------- @@ -465,8 +450,7 @@ def commit_replacement_props(sbox): expected_status.tweak('A/B/lambda', wc_rev=3, status=' ') svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, wc_dir) + expected_status) #---------------------------------------------------------------------- @@ -530,8 +514,7 @@ def revert_replacement_props(sbox): expected_output, expected_disk, expected_status, - None, None, None, None, None, - 1) + check_props=True) #---------------------------------------------------------------------- @Issues(920,2065) @@ -550,51 +533,50 @@ def inappropriate_props(sbox): svntest.actions.run_and_verify_status(wc_dir, expected_status) # These should produce an error - svntest.actions.run_and_verify_svn('Illegal target', - None, svntest.verify.AnyOutput, + svntest.actions.run_and_verify_svn(None, svntest.verify.AnyOutput, 'propset', 'svn:executable', 'on', A_path) - svntest.actions.run_and_verify_svn('Illegal target', None, + svntest.actions.run_and_verify_svn(None, svntest.verify.AnyOutput, 'propset', 'svn:keywords', 'LastChangedDate', A_path) - svntest.actions.run_and_verify_svn('Illegal target', None, + svntest.actions.run_and_verify_svn(None, svntest.verify.AnyOutput, 'propset', 'svn:eol-style', 'native', A_path) - svntest.actions.run_and_verify_svn('Invalid svn:eol-style', None, + svntest.actions.run_and_verify_svn(None, svntest.verify.AnyOutput, 'propset', 'svn:eol-style', 'invalid value', os.path.join(A_path, 'mu')) - svntest.actions.run_and_verify_svn('Illegal target', None, + svntest.actions.run_and_verify_svn(None, svntest.verify.AnyOutput, 'propset', 'svn:mime-type', 'image/png', A_path) - svntest.actions.run_and_verify_svn('Illegal target', None, + svntest.actions.run_and_verify_svn(None, svntest.verify.AnyOutput, 'propset', 'svn:ignore', '*.o', iota_path) - svntest.actions.run_and_verify_svn('Illegal target', None, + svntest.actions.run_and_verify_svn(None, svntest.verify.AnyOutput, 'propset', 'svn:externals', 'foo http://host.com/repos', iota_path) - svntest.actions.run_and_verify_svn('Illegal target', None, + svntest.actions.run_and_verify_svn(None, svntest.verify.AnyOutput, 'propset', 'svn:author', 'socrates', iota_path) - svntest.actions.run_and_verify_svn('Illegal target', None, + svntest.actions.run_and_verify_svn(None, svntest.verify.AnyOutput, 'propset', 'svn:log', 'log message', iota_path) - svntest.actions.run_and_verify_svn('Illegal target', None, + svntest.actions.run_and_verify_svn(None, svntest.verify.AnyOutput, 'propset', 'svn:date', 'Tue Jan 19 04:14:07 2038', iota_path) - svntest.actions.run_and_verify_svn('Illegal target', None, + svntest.actions.run_and_verify_svn(None, svntest.verify.AnyOutput, 'propset', 'svn:original-date', 'Thu Jan 1 01:00:00 1970', iota_path) @@ -603,7 +585,7 @@ def inappropriate_props(sbox): svntest.actions.run_and_verify_status(wc_dir, expected_status) # Recursive setting of inappropriate dir prop should work on files - svntest.actions.run_and_verify_svn(None, None, [], 'propset', '-R', + svntest.actions.run_and_verify_svn(None, [], 'propset', '-R', 'svn:executable', 'on', E_path) expected_status.tweak('A/B/E/alpha', 'A/B/E/beta', status=' M') @@ -620,7 +602,7 @@ def inappropriate_props(sbox): 'propset', 'svn:mime-type', 'application/octet-stream', sbox.ospath('binary')) - svntest.actions.run_and_verify_svn('Illegal target', None, + svntest.actions.run_and_verify_svn(None, svntest.verify.AnyOutput, 'propset', 'svn:eol-style', 'CRLF', path) @@ -629,7 +611,7 @@ def inappropriate_props(sbox): svntest.main.file_append(path, "line1\rline2\n") sbox.simple_add('multi-eol') - svntest.actions.run_and_verify_svn('Illegal target', None, + svntest.actions.run_and_verify_svn(None, svntest.verify.AnyOutput, 'propset', 'svn:eol-style', 'LF', path) @@ -638,7 +620,7 @@ def inappropriate_props(sbox): svntest.main.file_append(path, "line1\n\r") sbox.simple_add('backwards-eol') - svntest.actions.run_and_verify_svn('Illegal target', None, + svntest.actions.run_and_verify_svn(None, svntest.verify.AnyOutput, 'propset', 'svn:eol-style', 'native', path) @@ -647,7 +629,7 @@ def inappropriate_props(sbox): svntest.main.file_append(path, "line1\r\n\r") sbox.simple_add('incomplete-eol') - svntest.actions.run_and_verify_svn('Illegal target', None, + svntest.actions.run_and_verify_svn(None, svntest.verify.AnyOutput, 'propset', 'svn:eol-style', 'CR', path) @@ -657,25 +639,25 @@ def inappropriate_props(sbox): path = sbox.ospath('binary') svntest.main.file_append(path, "binary") - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'propset', '--force', 'svn:eol-style', 'CRLF', path) path = sbox.ospath('multi-eol') - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'propset', '--force', 'svn:eol-style', 'LF', path) path = sbox.ospath('backwards-eol') - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'propset', '--force', 'svn:eol-style', 'native', path) path = sbox.ospath('incomplete-eol') - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'propset', '--force', 'svn:eol-style', 'CR', path) @@ -684,18 +666,18 @@ def inappropriate_props(sbox): path = sbox.ospath('A/D') # ...grammatically incorrect - svntest.actions.run_and_verify_svn('illegal grammar', None, + svntest.actions.run_and_verify_svn(None, "svn: E200020: Pathname not terminated by ':'\n", 'propset', SVN_PROP_MERGEINFO, '/trunk', path) - svntest.actions.run_and_verify_svn('illegal grammar', None, + svntest.actions.run_and_verify_svn(None, "svn: E200022: Invalid revision number found " "parsing 'one'\n", 'propset', SVN_PROP_MERGEINFO, '/trunk:one', path) # ...contain overlapping revision ranges of differing inheritability. - svntest.actions.run_and_verify_svn('overlapping ranges', None, + svntest.actions.run_and_verify_svn(None, "svn: E200020: Unable to parse overlapping " "revision ranges '9-20\\*' and " "'18-22' with different " @@ -703,7 +685,7 @@ def inappropriate_props(sbox): 'propset', SVN_PROP_MERGEINFO, '/branch:5-7,9-20*,18-22', path) - svntest.actions.run_and_verify_svn('overlapping ranges', None, + svntest.actions.run_and_verify_svn(None, "svn: E200020: Unable to parse overlapping " "revision ranges " "(('3' and '3\\*')|('3\\*' and '3')) " @@ -714,21 +696,21 @@ def inappropriate_props(sbox): # ...contain revision ranges with start revisions greater than or # equal to end revisions. - svntest.actions.run_and_verify_svn('range start >= range end', None, + svntest.actions.run_and_verify_svn(None, "svn: E200020: Unable to parse reversed " "revision range '20-5'\n", 'propset', SVN_PROP_MERGEINFO, '/featureX:4,20-5', path) # ...contain paths mapped to empty revision ranges - svntest.actions.run_and_verify_svn('empty ranges', None, + svntest.actions.run_and_verify_svn(None, "svn: E200020: Mergeinfo for '/trunk' maps to " "an empty revision range\n", 'propset', SVN_PROP_MERGEINFO, '/trunk:', path) # ...contain non-inheritable ranges when the target is a file. - svntest.actions.run_and_verify_svn('empty ranges', None, + svntest.actions.run_and_verify_svn(None, "svn: E200020: Cannot set non-inheritable " "mergeinfo on a non-directory*", 'propset', SVN_PROP_MERGEINFO, @@ -808,54 +790,49 @@ def copy_inherits_special_props(sbox): # non-Posix platforms, we won't have to skip here: @Skip(is_non_posix_and_non_windows_os) @Issue(3086) -@XFail(svntest.main.is_ra_type_dav) def revprop_change(sbox): "set, get, and delete a revprop change" sbox.build() # First test the error when no revprop-change hook exists. - svntest.actions.run_and_verify_svn(None, None, '.*pre-revprop-change', + svntest.actions.run_and_verify_svn(None, '.*pre-revprop-change', 'propset', '--revprop', '-r', '0', 'cash-sound', 'cha-ching!', sbox.wc_dir) # Now test error output from revprop-change hook. svntest.actions.disable_revprop_changes(sbox.repo_dir) - svntest.actions.run_and_verify_svn(None, None, '.*pre-revprop-change.* 0 jrandom cash-sound A', + svntest.actions.run_and_verify_svn(None, '.*pre-revprop-change.* 0 jrandom cash-sound A', 'propset', '--revprop', '-r', '0', 'cash-sound', 'cha-ching!', sbox.wc_dir) # Create the revprop-change hook for this test svntest.actions.enable_revprop_changes(sbox.repo_dir) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'propset', '--revprop', '-r', '0', 'cash-sound', 'cha-ching!', sbox.wc_dir) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'propget', '--revprop', '-r', '0', 'cash-sound', sbox.wc_dir) # Now test that blocking the revprop delete. svntest.actions.disable_revprop_changes(sbox.repo_dir) - svntest.actions.run_and_verify_svn(None, None, '.*pre-revprop-change.* 0 jrandom cash-sound D', + svntest.actions.run_and_verify_svn(None, '.*pre-revprop-change.* 0 jrandom cash-sound D', 'propdel', '--revprop', '-r', '0', 'cash-sound', sbox.wc_dir) # Now test actually deleting the revprop. svntest.actions.enable_revprop_changes(sbox.repo_dir) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'propdel', '--revprop', '-r', '0', 'cash-sound', sbox.wc_dir) - actual_exit, actual_stdout, actual_stderr = svntest.main.run_svn( - None, 'pg', '--revprop', '-r', '0', 'cash-sound', sbox.wc_dir) - # The property should have been deleted. - regex = 'cha-ching' - for line in actual_stdout: - if re.match(regex, line): - raise svntest.Failure + svntest.actions.run_and_verify_svn(None, + '.*(E195011|E200017).*cash-sound.*', + 'propget', '--revprop', '-r', '0', 'cash-sound', sbox.wc_dir) #---------------------------------------------------------------------- @@ -910,7 +887,7 @@ def prop_value_conversions(sbox): svntest.actions.set_prop('some-prop', 'bar\n', iota_path) # NOTE: When writing out multi-line prop values in svn:* props, the - # client converts to local encoding and local eoln style. + # client converts to local encoding and local eol style. # Therefore, the expected output must contain the right kind of eoln # strings. That's why we use os.linesep in the tests below, not just # plain '\n'. The _last_ \n is also from the client, but it's not @@ -1008,9 +985,7 @@ def binary_props(sbox): # Commit the propsets. svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, - wc_dir) + expected_status) # Create expected output, disk, and status trees for an update of # the wc_backup. @@ -1028,8 +1003,7 @@ def binary_props(sbox): svntest.actions.run_and_verify_update(wc_backup, expected_output, expected_disk, - expected_status, - None, None, None, None, None, 0) + expected_status) # Now, check those properties. svntest.actions.check_prop('prop_zb', B_path_bak, [prop_zb]) @@ -1121,7 +1095,7 @@ def recursive_base_wc_ops(sbox): }) svntest.actions.run_and_verify_status(wc_dir, expected_status) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'propset', '-R', 'svn:keywords', 'Date', os.path.join(wc_dir, 'A', 'B')) expected_status.tweak('A/B/lambda', 'A/B/E/alpha', 'A/B/E/beta', status=' M') @@ -1159,9 +1133,9 @@ def url_props_ops(sbox): sbox.simple_commit() # Test propget - svntest.actions.run_and_verify_svn(None, [ propval1 + '\n' ], [], + svntest.actions.run_and_verify_svn([ propval1 + '\n' ], [], 'propget', prop1, iota_url) - svntest.actions.run_and_verify_svn(None, [ propval1 + '\n' ], [], + svntest.actions.run_and_verify_svn([ propval1 + '\n' ], [], 'propget', prop1, A_url) # Test normal proplist @@ -1197,15 +1171,14 @@ def url_props_ops(sbox): 'propedit', prop1, '-m', 'editlog', iota_url) svntest.main.run_svn(None, 'propedit', prop1, '-m', 'editlog', A_url) - svntest.actions.run_and_verify_svn(None, [ propval1 + '\n' ], [], + svntest.actions.run_and_verify_svn([ propval1 + '\n' ], [], 'propget', prop1, iota_url) - svntest.actions.run_and_verify_svn(None, [ propval1 + '\n' ], [], + svntest.actions.run_and_verify_svn([ propval1 + '\n' ], [], 'propget', prop1, A_url) # Edit without actually changing the property svntest.main.use_editor('identity') - svntest.actions.run_and_verify_svn(None, - "No changes to property '%s' on '.*'" + svntest.actions.run_and_verify_svn("No changes to property '%s' on '.*'" % prop1, [], 'propedit', prop1, '-m', 'nocommit', @@ -1233,20 +1206,20 @@ def removal_schedule_added_props(sbox): # create new fs file open(newfile_path, 'w').close() # Add it and set a property - svntest.actions.run_and_verify_svn(None, file_add_output, [], 'add', newfile_path) - svntest.actions.run_and_verify_svn(None, propset_output, [], 'propset', + svntest.actions.run_and_verify_svn(file_add_output, [], 'add', newfile_path) + svntest.actions.run_and_verify_svn(propset_output, [], 'propset', 'newprop', 'newvalue', newfile_path) - svntest.actions.run_and_verify_svn(None, propls_output, [], + svntest.actions.run_and_verify_svn(propls_output, [], 'proplist', '-v', newfile_path) # remove the file - svntest.actions.run_and_verify_svn(None, file_rm_output, [], + svntest.actions.run_and_verify_svn(file_rm_output, [], 'rm', '--force', newfile_path) # recreate the file and add it again open(newfile_path, 'w').close() - svntest.actions.run_and_verify_svn(None, file_add_output, [], 'add', newfile_path) + svntest.actions.run_and_verify_svn(file_add_output, [], 'add', newfile_path) # Now there should be NO properties leftover... - svntest.actions.run_and_verify_svn(None, [], [], + svntest.actions.run_and_verify_svn([], [], 'proplist', '-v', newfile_path) #---------------------------------------------------------------------- @@ -1276,8 +1249,7 @@ def update_props_on_wc_root(sbox): # Commit the working copy svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, wc_dir) + expected_status) # Create expected output tree for an update of the wc_backup. expected_output = svntest.wc.State(wc_backup, { @@ -1297,7 +1269,7 @@ def update_props_on_wc_root(sbox): expected_output, expected_disk, expected_status, - None, None, None, None, None, 1) + check_props=True) # test for issue 2743 @Issue(2743) @@ -1449,23 +1421,23 @@ def invalid_propnames(sbox): expected_stdout = (".*Attempting to delete nonexistent property " "'%s'.*" % (propname,)) - svntest.actions.run_and_verify_svn(None, expected_stdout, [], + svntest.actions.run_and_verify_svn(expected_stdout, [], 'propdel', propname) expected_stderr = (".*'%s' is not a valid Subversion" ' property name' % (propname,)) - svntest.actions.run_and_verify_svn(None, None, expected_stderr, + svntest.actions.run_and_verify_svn(None, expected_stderr, 'propedit', propname) - svntest.actions.run_and_verify_svn(None, None, expected_stderr, + svntest.actions.run_and_verify_svn(None, expected_stderr, 'propget', propname) - svntest.actions.run_and_verify_svn(None, None, expected_stderr, + svntest.actions.run_and_verify_svn(None, expected_stderr, 'propset', propname, propval) - svntest.actions.run_and_verify_svn(None, None, expected_stderr, + svntest.actions.run_and_verify_svn(None, expected_stderr, 'commit', '--with-revprop', '='.join([propname, propval])) # Now swap them: --with-revprop should accept propname as a property # value; no concept of validity there. - svntest.actions.run_and_verify_svn(None, [], [], + svntest.actions.run_and_verify_svn([], [], 'commit', '--with-revprop', '='.join([propval, propname])) @@ -1486,15 +1458,15 @@ def perms_on_symlink(sbox): saved_cwd = os.getcwd() os.chdir(sbox.wc_dir) try: - svntest.actions.run_and_verify_svn(None, None, [], 'mkdir', 'newdir') + svntest.actions.run_and_verify_svn(None, [], 'mkdir', 'newdir') os.symlink('newdir', 'symlink') - svntest.actions.run_and_verify_svn(None, None, [], 'add', 'symlink') + svntest.actions.run_and_verify_svn(None, [], 'add', 'symlink') old_mode = os.stat('newdir')[stat.ST_MODE] # The only property on 'symlink' is svn:special, so attempting to remove # 'svn:executable' should result in an error expected_stdout = (".*Attempting to delete nonexistent property " "'svn:executable'.*") - svntest.actions.run_and_verify_svn(None, expected_stdout, [], 'propdel', + svntest.actions.run_and_verify_svn(expected_stdout, [], 'propdel', 'svn:executable', 'symlink') new_mode = os.stat('newdir')[stat.ST_MODE] if not old_mode == new_mode: @@ -1535,8 +1507,7 @@ def remove_custom_ns_props(sbox): # Commit the one file. svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, wc_dir) + expected_status) # Create expected trees for the update. expected_output = svntest.wc.State(wc_backup, { @@ -1551,7 +1522,7 @@ def remove_custom_ns_props(sbox): expected_output, expected_disk, expected_status, - None, None, None, None, None, 1) + check_props=True) def props_over_time(sbox): "property retrieval with peg and operative revs" @@ -1606,12 +1577,13 @@ def props_over_time(sbox): pget_expected = expected if pget_expected: pget_expected = [ pget_expected + "\n" ] + expected_err = [] if expected else '.*W200017: Property.*not found.*' if op_rev != 0: - svntest.actions.run_and_verify_svn(None, pget_expected, [], + svntest.actions.run_and_verify_svn(pget_expected, expected_err, 'propget', 'revision', peg_path, '-r', str(op_rev)) else: - svntest.actions.run_and_verify_svn(None, pget_expected, [], + svntest.actions.run_and_verify_svn(pget_expected, expected_err, 'propget', 'revision', peg_path) ### Test 'svn proplist -v' @@ -1624,17 +1596,16 @@ def props_over_time(sbox): " " + expected + "\n" ] if op_rev != 0: - svntest.actions.run_and_verify_svn(None, plist_expected, [], + svntest.actions.run_and_verify_svn(plist_expected, [], 'proplist', '-v', peg_path, '-r', str(op_rev)) else: - svntest.actions.run_and_verify_svn(None, plist_expected, [], + svntest.actions.run_and_verify_svn(plist_expected, [], 'proplist', '-v', peg_path) # XFail the same reason revprop_change() is. @SkipUnless(svntest.main.server_enforces_date_syntax) -@XFail(svntest.main.is_ra_type_dav) @Issue(3086) def invalid_propvalues(sbox): "test handling invalid svn:* property values" @@ -1646,7 +1617,7 @@ def invalid_propvalues(sbox): svntest.actions.enable_revprop_changes(repo_dir) expected_stderr = '.*unexpected property value.*|.*Bogus date.*' - svntest.actions.run_and_verify_svn(None, [], expected_stderr, + svntest.actions.run_and_verify_svn([], expected_stderr, 'propset', '--revprop', '-r', '0', 'svn:date', 'Sat May 10 12:12:31 2008', repo_url) @@ -1678,7 +1649,7 @@ def same_replacement_props(sbox): expected_out = [ "Properties on '" + foo_url + "':\n", " someprop\n", " someval\n" ] - svntest.actions.run_and_verify_svn(None, expected_out, [], + svntest.actions.run_and_verify_svn(expected_out, [], 'proplist', '-v', foo_url) def added_moved_file(sbox): @@ -1720,7 +1691,7 @@ def delete_nonexistent_property(sbox): # Remove one property expected_stdout = ".*Attempting to delete nonexistent property 'yellow'.*" - svntest.actions.run_and_verify_svn(None, expected_stdout, [], + svntest.actions.run_and_verify_svn(expected_stdout, [], 'propdel', 'yellow', os.path.join(wc_dir, 'A', 'D', 'G')) @@ -1740,17 +1711,17 @@ def post_revprop_change_hook(sbox): svntest.actions.create_failing_hook(repo_dir, 'post-revprop-change', error_msg) - # serf/neon/mod_dav_svn give SVN_ERR_RA_DAV_REQUEST_FAILED + # serf/mod_dav_svn give SVN_ERR_RA_DAV_PROPPATCH_FAILED # file/svn give SVN_ERR_REPOS_HOOK_FAILURE - expected_error = 'svn: (E175002|E165001).*post-revprop-change hook failed' + expected_error = 'svn: (E175008|E165001).*post-revprop-change hook failed' - svntest.actions.run_and_verify_svn(None, [], expected_error, + svntest.actions.run_and_verify_svn([], expected_error, 'ps', '--revprop', '-r0', 'p', 'v', wc_dir) # Verify change has stuck -- at one time mod_dav_svn would rollback # revprop changes on post-revprop-change hook errors - svntest.actions.run_and_verify_svn(None, 'v', [], + svntest.actions.run_and_verify_svn('v', [], 'pg', '--revprop', '-r0', 'p', wc_dir) @@ -1786,7 +1757,7 @@ def rm_of_replaced_file(sbox): svntest.main.run_svn(None, 'rm', '--force', mu_path) svntest.actions.run_and_verify_svn( - None, [], + [], 'svn: E200009.*some targets are not versioned.*', 'proplist', '-v', mu_path) @@ -1884,8 +1855,9 @@ def prop_reject_grind(sbox): "Trying to change property 'edit.none'\n" "but the property does not exist locally.\n" "<<<<<<< (local property value)\n" - "=======\n" - "repos.changed>>>>>>> (incoming property value)\n", + "||||||| (incoming 'changed from' value)\n" + "repos=======\n" + "repos.changed>>>>>>> (incoming 'changed to' value)\n", "Trying to delete property 'del.del'\n" "but the property has been locally deleted and had a different value.\n", @@ -1893,75 +1865,84 @@ def prop_reject_grind(sbox): "Trying to delete property 'del.edit'\n" "but the local property value is different.\n" "<<<<<<< (local property value)\n" - "local.changed=======\n" - ">>>>>>> (incoming property value)\n", + "local.changed||||||| (incoming 'changed from' value)\n" + "repos=======\n" + ">>>>>>> (incoming 'changed to' value)\n", "Trying to change property 'edit.del'\n" "but the property has been locally deleted.\n" "<<<<<<< (local property value)\n" - "=======\n" - "repos.changed>>>>>>> (incoming property value)\n", + "||||||| (incoming 'changed from' value)\n" + "repos=======\n" + "repos.changed>>>>>>> (incoming 'changed to' value)\n", "Trying to change property 'edit.edit'\n" "but the property has already been locally changed to a different value.\n" "<<<<<<< (local property value)\n" - "local.changed=======\n" - "repos.changed>>>>>>> (incoming property value)\n", + "local.changed||||||| (incoming 'changed from' value)\n" + "repos=======\n" + "repos.changed>>>>>>> (incoming 'changed to' value)\n", "Trying to delete property 'del.edit2'\n" "but the property has been locally modified.\n" "<<<<<<< (local property value)\n" - "repos.changed=======\n" - ">>>>>>> (incoming property value)\n", + "repos.changed||||||| (incoming 'changed from' value)\n" + "repos=======\n" + ">>>>>>> (incoming 'changed to' value)\n", "Trying to delete property 'del.add'\n" "but the property has been locally added.\n" "<<<<<<< (local property value)\n" - "local=======\n" - ">>>>>>> (incoming property value)\n", + "local||||||| (incoming 'changed from' value)\n" + "repos=======\n" + ">>>>>>> (incoming 'changed to' value)\n", "Trying to delete property 'del.diff'\n" "but the local property value is different.\n" "<<<<<<< (local property value)\n" - "local=======\n" - ">>>>>>> (incoming property value)\n", + "local||||||| (incoming 'changed from' value)\n" + "repos=======\n" + ">>>>>>> (incoming 'changed to' value)\n", "Trying to change property 'edit.add'\n" "but the property has been locally added with a different value.\n" "<<<<<<< (local property value)\n" - "local=======\n" - "repos.changed>>>>>>> (incoming property value)\n", + "local||||||| (incoming 'changed from' value)\n" + "repos=======\n" + "repos.changed>>>>>>> (incoming 'changed to' value)\n", "Trying to change property 'edit.diff'\n" "but the local property value conflicts with the incoming change.\n" "<<<<<<< (local property value)\n" - "local=======\n" - "repos.changed>>>>>>> (incoming property value)\n", + "local||||||| (incoming 'changed from' value)\n" + "repos=======\n" + "repos.changed>>>>>>> (incoming 'changed to' value)\n", "Trying to add new property 'add.add'\n" "but the property already exists.\n" "<<<<<<< (local property value)\n" - "local=======\n" - "repos>>>>>>> (incoming property value)\n", + "local||||||| (incoming 'changed from' value)\n" + "=======\n" + "repos>>>>>>> (incoming 'changed to' value)\n", "Trying to add new property 'add.diff'\n" "but the property already exists.\n" - "Local property value:\n" - "local\n" - "Incoming property value:\n" - "repos\n", + "<<<<<<< (local property value)\n" + "local||||||| (incoming 'changed from' value)\n" + "=======\n" + "repos>>>>>>> (incoming 'changed to' value)\n", "Trying to add new property 'add.del'\n" "but the property has been locally deleted.\n" - "<<<<<<< (local property value)\n" - "=======\n" - "repos>>>>>>> (incoming property value)\n", + "Incoming property value:\n" + "repos\n", "Trying to add new property 'add.edit'\n" "but the property already exists.\n" "<<<<<<< (local property value)\n" - "local.changed=======\n" - "repos>>>>>>> (incoming property value)\n", + "local.changed||||||| (incoming 'changed from' value)\n" + "=======\n" + "repos>>>>>>> (incoming 'changed to' value)\n", ] # Get the contents of mu.prej. The error messages are in the prej file @@ -2048,7 +2029,7 @@ def atomic_over_ra(sbox): # Initial state. svntest.actions.enable_revprop_changes(sbox.repo_dir) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'propset', '--revprop', '-r', '0', 'flower', s1, repo_url) @@ -2059,8 +2040,8 @@ def atomic_over_ra(sbox): # so we should fail. expected_stderr = ".*doesn't advertise.*ATOMIC_REVPROP" svntest.actions.run_and_verify_atomic_ra_revprop_change( - None, None, expected_stderr, 1, repo_url, 0, 'flower', - old_value, proposed_value) + None, expected_stderr, 1, repo_url, 0, 'flower', + old_value, proposed_value, True) # The original value is still there. svntest.actions.check_prop('flower', repo_url, [s1], 0) @@ -2068,7 +2049,7 @@ def atomic_over_ra(sbox): def FAILS_WITH_BPV(not_the_old_value, proposed_value): if svntest.main.server_has_atomic_revprop(): svntest.actions.run_and_verify_atomic_ra_revprop_change( - None, None, [], 0, repo_url, 0, 'flower', + None, [], 0, repo_url, 0, 'flower', not_the_old_value, proposed_value, True) else: expect_old_server_fail(not_the_old_value, proposed_value) @@ -2076,7 +2057,7 @@ def atomic_over_ra(sbox): def PASSES_WITHOUT_BPV(yes_the_old_value, proposed_value): if svntest.main.server_has_atomic_revprop(): svntest.actions.run_and_verify_atomic_ra_revprop_change( - None, None, [], 0, repo_url, 0, 'flower', + None, [], 0, repo_url, 0, 'flower', yes_the_old_value, proposed_value, False) else: expect_old_server_fail(yes_the_old_value, proposed_value) @@ -2247,16 +2228,16 @@ def propget_redirection(sbox): # Set the 'big' mergeinfo prop on A/B, A/C, and A/D. svntest.main.file_write(prop_val_file, big_prop_val) - svntest.actions.run_and_verify_svn(None, None, [], 'propset', + svntest.actions.run_and_verify_svn(None, [], 'propset', SVN_PROP_MERGEINFO, '-F', prop_val_file, B_path) - svntest.actions.run_and_verify_svn(None, None, [], 'propset', + svntest.actions.run_and_verify_svn(None, [], 'propset', SVN_PROP_MERGEINFO, '-F', prop_val_file, C_path) - svntest.actions.run_and_verify_svn(None, None, [], 'propset', + svntest.actions.run_and_verify_svn(None, [], 'propset', SVN_PROP_MERGEINFO, '-F', prop_val_file, D_path) - svntest.actions.run_and_verify_svn(None, None, [], 'ci', '-m', + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', 'ps some large svn:mergeinfos', wc_dir) # Run propget -vR svn:mergeinfo, redirecting the stdout to a file. @@ -2300,7 +2281,7 @@ def file_matching_dir_prop_reject(sbox): # Add file with awkward name svntest.main.file_append(sbox.ospath('A/dir_conflicts'), "some content\n") - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'add', sbox.ospath('A/dir_conflicts')) sbox.simple_propset('prop', 'val1', 'A/dir_conflicts') sbox.simple_propset('prop', 'val1', 'A') @@ -2314,7 +2295,7 @@ def file_matching_dir_prop_reject(sbox): 'A/dir_conflicts' : Item(status=' ', wc_rev=2), }) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) # Modify/commit property change sbox.simple_propset('prop', 'val2', 'A/dir_conflicts') @@ -2325,7 +2306,7 @@ def file_matching_dir_prop_reject(sbox): }) expected_status.tweak('A', 'A/dir_conflicts', wc_rev=3) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) # Local property mod sbox.simple_propset('prop', 'val3', 'A/dir_conflicts') @@ -2344,21 +2325,18 @@ def file_matching_dir_prop_reject(sbox): expected_status.tweak(wc_rev=2) expected_status.tweak('A', 'A/dir_conflicts', status=' C') + # Conflict: BASE=val2 WORKING=val3 INCOMING_OLD=val2 INCOMING_NEW=val1 extra_files = ['dir_conflicts.prej', 'dir_conflicts.2.prej'] svntest.actions.run_and_verify_update(wc_dir, expected_output, expected_disk, expected_status, - None, - svntest.tree.detect_conflict_files, - extra_files, - None, None, True, '-r', '2', wc_dir) - if len(extra_files) != 0: - logger.warn("didn't get expected conflict files") - raise svntest.verify.SVNUnexpectedOutput + [], True, + '-r', '2', wc_dir, + extra_files=extra_files) # Revert and update to check that conflict files are removed - svntest.actions.run_and_verify_svn(None, None, [], 'revert', '-R', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'revert', '-R', wc_dir) expected_status.tweak('A', 'A/dir_conflicts', status=' ') svntest.actions.run_and_verify_status(wc_dir, expected_status) @@ -2372,7 +2350,7 @@ def file_matching_dir_prop_reject(sbox): expected_output, expected_disk, expected_status, - None, None, None, None, None, True) + check_props=True) def pristine_props_listed(sbox): "check if pristine properties are visible" @@ -2386,13 +2364,13 @@ def pristine_props_listed(sbox): expected_output = ["Properties on '" + sbox.ospath('A') + "':\n", " prop\n"] # Now we see the pristine properties - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'proplist', '-R', wc_dir, '-r', 'BASE') sbox.simple_propset('prop', 'needs-fix', 'A') # And now we see no property at all - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'proplist', '-R', wc_dir, '-r', 'BASE') def create_inherited_ignores_config(config_dir): @@ -2470,7 +2448,7 @@ def inheritable_ignores(sbox): ['? ' + X_dir_path + '\n', '? ' + Y_dir_path + '\n', '? ' + Z_dir_path + '\n',]) - svntest.actions.run_and_verify_svn(None, expected_output, [], 'st', + svntest.actions.run_and_verify_svn(expected_output, [], 'st', '--config-dir', config_dir, wc_dir) # Check status without the custom config. @@ -2482,7 +2460,7 @@ def inheritable_ignores(sbox): '? ' + Z_dir_path + '\n', '? ' + boo_dir_path + '\n', '? ' + goo_file_path + '\n',]) - svntest.actions.run_and_verify_svn(None, expected_output, [], 'st', wc_dir) + svntest.actions.run_and_verify_svn(expected_output, [], 'st', wc_dir) # Check status with the custom config and --no-ignore. expected_output = svntest.verify.UnorderedOutput( @@ -2497,7 +2475,7 @@ def inheritable_ignores(sbox): 'I ' + goo_file_path + '\n', 'I ' + moo_file_path + '\n', 'I ' + foo_file_path + '\n',]) - svntest.actions.run_and_verify_svn(None, expected_output, [], 'st', + svntest.actions.run_and_verify_svn(expected_output, [], 'st', '--config-dir', config_dir, '--no-ignore', wc_dir) @@ -2516,7 +2494,7 @@ def inheritable_ignores(sbox): '? ' + goo_file_path + '\n', 'I ' + moo_file_path + '\n', 'I ' + foo_file_path + '\n',]) - svntest.actions.run_and_verify_svn(None, expected_output, [], 'st', + svntest.actions.run_and_verify_svn(expected_output, [], 'st', '--no-ignore', wc_dir) # Perform the add with the --force flag, targeting the root of the WC. @@ -2531,14 +2509,14 @@ def inheritable_ignores(sbox): ['A ' + 'ADD-ME-DIR-X\n', 'A ' + os.path.join('A', 'ADD-ME-DIR-Y.doo') + '\n', 'A ' + os.path.join('A', 'D', 'G', 'ADD-ME-DIR-Z.doo') + '\n']) - svntest.actions.run_and_verify_svn("Adds in spite of ignores", expected, + svntest.actions.run_and_verify_svn(expected, [], 'add', '.', '--force', '--config-dir', config_dir) os.chdir(saved_wd) # Now revert and try the add with the --no-ignore flag, nothing should # be ignored. - svntest.actions.run_and_verify_svn(None, None, [], 'revert', wc_dir, '-R') + svntest.actions.run_and_verify_svn(None, [], 'revert', wc_dir, '-R') saved_wd = os.getcwd() os.chdir(sbox.wc_dir) expected = svntest.verify.UnorderedOutput( @@ -2561,8 +2539,7 @@ def inheritable_ignores(sbox): 'ignore-me-file.roo') + '\n', 'A ' + os.path.join('A', 'D', 'IGNORE-ME-DIR.moo') + '\n', 'A ' + os.path.join('A', 'D', 'ignore-me-file.moo') + '\n']) - svntest.actions.run_and_verify_svn("Files ignored with --no-ignore", - expected, [], 'add', '.', '--force', + svntest.actions.run_and_verify_svn(expected, [], 'add', '.', '--force', '--no-ignore', '--config-dir', config_dir) @@ -2595,7 +2572,7 @@ def almost_known_prop_names(sbox): svntest.actions.set_prop('svn:foobar', 'x', iota_path, "svn: E195011: 'svn:foobar'" " is not a valid svn: property name;" - " re-run with '--force' to set it") + " use '--force' to set it") @Issue(3231) def peg_rev_base_working(sbox): @@ -2608,10 +2585,173 @@ def peg_rev_base_working(sbox): svntest.actions.set_prop('ordinal', 'ninth\n', sbox.ospath('iota')) sbox.simple_commit(message='r2') svntest.actions.set_prop('cardinal', 'nine\n', sbox.ospath('iota')) - svntest.actions.run_and_verify_svn(None, ['ninth\n'], [], - 'propget', '--strict', 'ordinal', + svntest.actions.run_and_verify_svn(['ninth\n'], [], + 'propget', '--no-newline', 'ordinal', sbox.ospath('iota') + '@BASE') +@Issue(4415) +def xml_unsafe_author(sbox): + "svn:author with XML unsafe chars" + + sbox.build() + wc_dir = sbox.wc_dir + + svntest.actions.enable_revprop_changes(sbox.repo_dir) + + # client sends svn:author (via PROPPATCH for DAV) + svntest.actions.run_and_verify_svn(None, [], + 'propset', '--revprop', '-r', '1', + 'svn:author', 'foo\bbar', wc_dir) + + # mod_dav_svn sends svn:author (via REPORT for DAV) + sbox.simple_update(revision=0) + sbox.simple_update(revision=1) + expected_info = [{ + 'Path' : re.escape(wc_dir), + 'Repository Root' : sbox.repo_url, + 'Repository UUID' : svntest.actions.get_wc_uuid(wc_dir), + 'Last Changed Author' : 'foo\bbar', + }] + svntest.actions.run_and_verify_info(expected_info, wc_dir) + + # mod_dav_svn sends svn:author (via PROPFIND for DAV) + # Since r1553367 this works correctly on ra_serf, since we now request + # a single property value which skips creating the creator-displayname property + svntest.actions.run_and_verify_svn(['foo\bbar'], [], + 'propget', '--revprop', '-r', '1', + 'svn:author', '--no-newline', wc_dir) + + # Ensure a stable date + svntest.actions.run_and_verify_svn(None, [], + 'propset', '--revprop', '-r', '1', + 'svn:date', '2015-01-01T00:00:00.0Z', wc_dir) + + # But a proplist of this property value still fails via DAV. + expected_output = svntest.verify.UnorderedOutput([ + 'Unversioned properties on revision 1:\n', + ' svn:author\n', + ' foo\bbar\n', + ' svn:date\n', + ' 2015-01-01T00:00:00.0Z\n', + ' svn:log\n', + ' Log message for revision 1.\n' + ]) + svntest.actions.run_and_verify_svn(expected_output, [], + 'proplist', '--revprop', '-r', '1', '-v', + wc_dir) + +@Issue(4415) +def xml_unsafe_author2(sbox): + "svn:author with XML unsafe chars 2" + + sbox.build(create_wc = False) + repo_url = sbox.repo_url + + svntest.actions.enable_revprop_changes(sbox.repo_dir) + + # client sends svn:author (via PROPPATCH for DAV) + svntest.actions.run_and_verify_svn(None, [], + 'propset', '--revprop', '-r', '1', + 'svn:author', 'foo\bbar', repo_url) + + # Ensure a stable date + svntest.actions.run_and_verify_svn(None, [], + 'propset', '--revprop', '-r', '1', + 'svn:date', '2000-01-01T12:00:00.0Z', + repo_url) + + if svntest.main.is_ra_type_dav(): + # This receives the filtered author (but that is better than an Xml fail) + expected_author = 'foobar' + else: + expected_author = 'foo\bbar' + + # Use svn ls in --xml mode to test locale independent output. + expected_output = [ + '<?xml version="1.0" encoding="UTF-8"?>\n', + '<lists>\n', + '<list\n', + ' path="%s">\n' % sbox.repo_url, + '<entry\n', + ' kind="dir">\n', + '<name>A</name>\n', + '<commit\n', + ' revision="1">\n', + '<author>%s</author>\n' % expected_author, + '<date>2000-01-01T12:00:00.000000Z</date>\n', + '</commit>\n', + '</entry>\n', + '<entry\n', + ' kind="file">\n', + '<name>iota</name>\n', + '<size>25</size>\n', + '<commit\n', + ' revision="1">\n', + '<author>%s</author>\n' % expected_author, + '<date>2000-01-01T12:00:00.000000Z</date>\n', + '</commit>\n', + '</entry>\n', + '</list>\n', + '</lists>\n' + ] + + svntest.actions.run_and_verify_svn(expected_output, [], + 'ls', '--xml', repo_url) + + expected_info = [{ + 'Repository Root' : sbox.repo_url, + 'Last Changed Author' : expected_author, + }] + svntest.actions.run_and_verify_info(expected_info, repo_url) + +def dir_prop_conflict_details(sbox): + "verify dir property conflict details" + + sbox.build() + wc_dir = sbox.wc_dir + + # Apply some changes + sbox.simple_propset('svn:mergeinfo', '/B:1', 'A') + sbox.simple_propset('my-prop', 'my-val', 'A') + sbox.simple_commit() + + # Revert to r1 + sbox.simple_update('', revision=1) + + # Apply some incompatible changes + sbox.simple_propset('svn:mergeinfo', '/C:1', 'A') + sbox.simple_propset('my-prop', 'other-val', 'A') + + # This should report out of date because there are incompatible property + # changes that can't be merged on the server + svntest.actions.run_and_verify_commit(wc_dir, + None, + None, + '.*[Oo]ut of date.*') + + expected_output = svntest.wc.State(wc_dir, { + 'A' : Item(status=' C'), + }) + expected_status = svntest.actions.get_virginal_state(wc_dir, 2) + expected_status.tweak('A', status=' C') + + svntest.actions.run_and_verify_update(wc_dir, + expected_output, + None, + expected_status, + check_props=True) + + # The conflict properties file line was shown for previous versions, but the + # conflict source urls are new since 1.8. + expected_info = { + 'Conflict Properties File' : re.escape(sbox.ospath('A/dir_conflicts.prej')), + 'Conflict Details': re.escape('incoming dir edit upon update' + + ' Source left: (dir) ^/A@1' + + ' Source right: (dir) ^/A@2') + } + svntest.actions.run_and_verify_info([expected_info], sbox.path('A')) + + def iprops_list_abspath(sbox): "test listing iprops via abspath" @@ -2620,14 +2760,14 @@ def iprops_list_abspath(sbox): sbox.simple_propset('im', 'root', '') sbox.simple_commit() - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'switch', '^/A/D', sbox.ospath(''), '--ignore-ancestry') sbox.simple_propset('im', 'GammA', 'gamma') expected_output = [ - 'Inherited properties on \'%s\',\n' % sbox.ospath('')[:-1], + 'Inherited properties on \'%s\',\n' % sbox.ospath(''), 'from \'%s\':\n' % sbox.repo_url, ' im\n', ' root\n', @@ -2635,7 +2775,7 @@ def iprops_list_abspath(sbox): ' im\n', ' GammA\n' ] - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'pl', '-R', '--show-inherited-props', '-v', sbox.ospath('')) @@ -2649,7 +2789,7 @@ def iprops_list_abspath(sbox): ' im\n', ' GammA\n' ] - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'pl', '-R', '--show-inherited-props', '-v', os.path.abspath(sbox.ospath(''))) @@ -2659,14 +2799,39 @@ def wc_propop_on_url(sbox): sbox.build(create_wc = False) - svntest.actions.run_and_verify_svn(None, None, '.*E195000:.*path', + svntest.actions.run_and_verify_svn(None, '.*E195000:.*path', 'pl', '-r', 'PREV', sbox.repo_url) - svntest.actions.run_and_verify_svn(None, None, '.*E195000:.*path', + svntest.actions.run_and_verify_svn(None, '.*E195000:.*path', 'pg', 'my:Q', '-r', 'PREV', sbox.repo_url) +def prop_conflict_root(sbox): + """property conflict on wc root""" + + sbox.build() + wc_dir = sbox.wc_dir + + sbox.simple_propset('propname', 'propval1', '') + sbox.simple_commit() + sbox.simple_propset('propname', 'propval2', '') + sbox.simple_commit() + sbox.simple_update(revision=2) + sbox.simple_propset('propname', 'propvalconflict', '') + + expected_output = svntest.wc.State(wc_dir, { + '' : Item(status=' C'), + }) + expected_disk = svntest.main.greek_state.copy() + expected_status = svntest.actions.get_virginal_state(wc_dir, 3) + expected_status.tweak('', status=' C') + extra_files = ['dir_conflicts.prej'] + svntest.actions.run_and_verify_update(wc_dir, + expected_output, + expected_disk, + expected_status, + extra_files=extra_files) ######################################################################## # Run the tests @@ -2713,8 +2878,12 @@ test_list = [ None, inheritable_ignores, almost_known_prop_names, peg_rev_base_working, + xml_unsafe_author, + xml_unsafe_author2, + dir_prop_conflict_details, iprops_list_abspath, wc_propop_on_url, + prop_conflict_root, ] if __name__ == '__main__': diff --git a/subversion/tests/cmdline/redirect_tests.py b/subversion/tests/cmdline/redirect_tests.py index 3262c1b..e8196a3 100755 --- a/subversion/tests/cmdline/redirect_tests.py +++ b/subversion/tests/cmdline/redirect_tests.py @@ -154,7 +154,7 @@ def redirected_nonroot_update(sbox): checkout_url, wc_dir) if err: raise svntest.Failure - + # Relocate (by cheating) the working copy to the redirect URL. When # we then update, we'll expect to find ourselves automagically back # to the original URL. (This is because we can't easily introduce a @@ -178,6 +178,88 @@ def redirected_nonroot_update(sbox): verify_url(wc_dir, checkout_url) #---------------------------------------------------------------------- +@SkipUnless(svntest.main.is_ra_type_dav) +def redirected_externals(sbox): + "redirected externals" + + sbox.build() + + sbox.simple_propset('svn:externals', + '^/A/B/E/alpha fileX\n' + '^/A/B/F dirX', + 'A/C') + sbox.simple_commit() + sbox.simple_update() + + wc_dir = sbox.add_wc_path("my") + co_url = sbox.redirected_root_url() + exit_code, out, err = svntest.main.run_svn(None, 'co', co_url, wc_dir) + if err: + raise svntest.Failure + if not redirect_regex.match(out[0]): + raise svntest.Failure + + verify_url(wc_dir, sbox.repo_url) + verify_url(sbox.ospath('A/C/fileX'), sbox.repo_url + '/A/B/E/alpha', + wc_path_is_file=True) + verify_url(sbox.ospath('A/C/dirX'), sbox.repo_url + '/A/B/F') + +#---------------------------------------------------------------------- +@SkipUnless(svntest.main.is_ra_type_dav) +def redirected_copy(sbox): + "redirected copy" + + sbox.build(create_wc=False) + + # E170011 = SVN_ERR_RA_SESSION_URL_MISMATCH + expected_error = "svn: E170011: Repository moved permanently" + + # This tests the actual copy handling + svntest.actions.run_and_verify_svn(None, expected_error, + 'cp', '-m', 'failed copy', + sbox.redirected_root_url() + '/A', + sbox.redirected_root_url() + '/A_copied') + + # This tests the cmdline handling of '^/copy-of-A' + svntest.actions.run_and_verify_svn(None, expected_error, + 'cp', '-m', 'failed copy', + sbox.redirected_root_url() + '/A', + '^/copy-of-A') + + # E170011 = SVN_ERR_RA_SESSION_URL_MISMATCH + expected_error = "svn: E170011: Repository moved temporarily" + + # This tests the actual copy handling + svntest.actions.run_and_verify_svn(None, expected_error, + 'cp', '-m', 'failed copy', + sbox.redirected_root_url(temporary=True) + '/A', + sbox.redirected_root_url(temporary=True) + '/A_copied') + + # This tests the cmdline handling of '^/copy-of-A' + svntest.actions.run_and_verify_svn(None, expected_error, + 'cp', '-m', 'failed copy', + sbox.redirected_root_url(temporary=True) + '/A', + '^/copy-of-A') +#---------------------------------------------------------------------- +@SkipUnless(svntest.main.is_ra_type_dav) +def redirected_commands(sbox): + "redirected commands" + + sbox.build(create_wc=False) + + svntest.actions.run_and_verify_svn(None, [], + 'log', + sbox.redirected_root_url() + '/A') + + svntest.actions.run_and_verify_svn(None, [], + 'ls', + sbox.redirected_root_url() + '/A') + + svntest.actions.run_and_verify_svn(None, [], + 'info', + sbox.redirected_root_url() + '/A') + +#---------------------------------------------------------------------- ######################################################################## # Run the tests @@ -188,6 +270,9 @@ test_list = [ None, redirected_checkout, redirected_update, redirected_nonroot_update, + redirected_externals, + redirected_copy, + redirected_commands, ] if __name__ == '__main__': diff --git a/subversion/tests/cmdline/relocate_tests.py b/subversion/tests/cmdline/relocate_tests.py index 7e6ea19..40ffb49 100755 --- a/subversion/tests/cmdline/relocate_tests.py +++ b/subversion/tests/cmdline/relocate_tests.py @@ -42,7 +42,7 @@ Item = svntest.wc.StateItem from svntest.main import SVN_PROP_MERGEINFO, server_has_mergeinfo from externals_tests import change_external -from switch_tests import do_routine_switching +from svntest.deeptrees import do_routine_switching #---------------------------------------------------------------------- @@ -53,7 +53,7 @@ def relocate_deleted_missing_copied(sbox): # Delete A/mu to create a deleted entry for mu in A/.svn/entries mu_path = os.path.join(wc_dir, 'A', 'mu') - svntest.actions.run_and_verify_svn(None, None, [], 'rm', mu_path) + svntest.actions.run_and_verify_svn(None, [], 'rm', mu_path) expected_status = svntest.actions.get_virginal_state(wc_dir, 1) expected_status.remove('A/mu') expected_output = svntest.wc.State(wc_dir, { @@ -61,8 +61,7 @@ def relocate_deleted_missing_copied(sbox): }) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, wc_dir) + expected_status) # Remove A/B/F to create a missing entry svntest.main.safe_rmtree(os.path.join(wc_dir, 'A', 'B', 'F')) @@ -70,11 +69,11 @@ def relocate_deleted_missing_copied(sbox): # Copy A/D to A/D2 D_path = os.path.join(wc_dir, 'A', 'D') D2_path = os.path.join(wc_dir, 'A', 'D2') - svntest.actions.run_and_verify_svn(None, None, [], 'copy', + svntest.actions.run_and_verify_svn(None, [], 'copy', D_path, D2_path) # Delete within the copy D2G_path = os.path.join(wc_dir, 'A', 'D2', 'G') - svntest.actions.run_and_verify_svn(None, None, [], 'rm', D2G_path) + svntest.actions.run_and_verify_svn(None, [], 'rm', D2G_path) expected_status.add({ 'A/D2' : Item(status='A ', wc_rev='-', copied='+'), @@ -97,7 +96,7 @@ def relocate_deleted_missing_copied(sbox): other_repo_dir, other_repo_url = sbox.add_repo_path('other') svntest.main.copy_repos(repo_dir, other_repo_dir, 2, 0) svntest.main.safe_rmtree(repo_dir, 1) - svntest.actions.run_and_verify_svn(None, None, [], 'switch', '--relocate', + svntest.actions.run_and_verify_svn(None, [], 'switch', '--relocate', repo_url, other_repo_url, wc_dir) # Deleted and missing entries should be preserved, so update should @@ -141,8 +140,7 @@ def relocate_deleted_missing_copied(sbox): status=' ', wc_rev='3', copied=None) expected_status.remove('A/D2/G', 'A/D2/G/pi', 'A/D2/G/rho', 'A/D2/G/tau') svntest.actions.run_and_verify_commit(wc_dir, - expected_output, expected_status, - None, wc_dir) + expected_output, expected_status) #---------------------------------------------------------------------- @@ -163,26 +161,26 @@ def relocate_beyond_repos_root(sbox): other_B_url = other_repo_url + "/B" svntest.main.safe_rmtree(wc_dir, 1) - svntest.actions.run_and_verify_svn(None, None, [], 'checkout', + svntest.actions.run_and_verify_svn(None, [], 'checkout', repo_url + '/A', wc_dir) svntest.main.copy_repos(repo_dir, other_repo_dir, 1, 0) # A relocate that changes the repo path part of the URL shouldn't work. # This tests for issue #2380. - svntest.actions.run_and_verify_svn(None, None, + svntest.actions.run_and_verify_svn(None, ".*Invalid relocation destination.*", 'relocate', A_url, other_B_url, A_wc_dir) # Another way of trying to change the fs path, leading to an invalid # repository root. - svntest.actions.run_and_verify_svn(None, None, + svntest.actions.run_and_verify_svn(None, ".*is not the root.*", 'relocate', repo_url, other_B_url, A_wc_dir) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'relocate', A_url, other_A_url, A_wc_dir) @@ -208,21 +206,16 @@ def relocate_and_propset(sbox): # Create virgin repos and working copy svntest.main.safe_rmtree(sbox.repo_dir, 1) svntest.main.create_repos(sbox.repo_dir) + svntest.actions.guarantee_greek_repository( + sbox.repo_dir, svntest.main.options.server_minor_version) wc_dir = sbox.wc_dir repo_dir = sbox.repo_dir repo_url = sbox.repo_url - # import the greek tree - svntest.main.greek_state.write_to_disk(svntest.main.greek_dump_dir) - exit_code, output, errput = svntest.main.run_svn( - None, 'import', '-m', 'Log message for revision 1.', - svntest.main.greek_dump_dir, sbox.repo_url) - # checkout svntest.main.safe_rmtree(wc_dir, 1) - svntest.actions.run_and_verify_svn(None, - None, [], + svntest.actions.run_and_verify_svn(None, [], 'checkout', repo_url, wc_dir) @@ -230,7 +223,7 @@ def relocate_and_propset(sbox): other_repo_dir, other_repo_url = sbox.add_repo_path('other') svntest.main.copy_repos(repo_dir, other_repo_dir, 1, 0) svntest.main.safe_rmtree(repo_dir, 1) - svntest.actions.run_and_verify_svn(None, None, [], 'relocate', + svntest.actions.run_and_verify_svn(None, [], 'relocate', repo_url, other_repo_url, wc_dir) # Remove gamma from the working copy. @@ -250,8 +243,7 @@ def relocate_and_propset(sbox): # Commit the deletion of gamma and verify. svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, wc_dir) + expected_status) # Now gamma should be marked as `deleted' under the hood, at # revision 2. Meanwhile, A/D is still lagging at revision 1. @@ -263,8 +255,7 @@ def relocate_and_propset(sbox): svntest.actions.run_and_verify_commit(wc_dir, None, None, - "[Oo]ut.of.date", - wc_dir) + ".*[Oo]ut of date.*") #---------------------------------------------------------------------- @@ -273,24 +264,19 @@ def single_file_relocate(sbox): # Create virgin repos and working copy svntest.main.safe_rmtree(sbox.repo_dir, 1) - svntest.main.create_repos(sbox.repo_dir) + svntest.actions.guarantee_greek_repository( + sbox.repo_dir, svntest.main.options.server_minor_version) wc_dir = sbox.wc_dir iota_path = os.path.join(sbox.wc_dir, 'iota') repo_dir = sbox.repo_dir repo_url = sbox.repo_url iota_url = repo_url + '/iota' - - # import the greek tree - svntest.main.greek_state.write_to_disk(svntest.main.greek_dump_dir) - exit_code, output, errput = svntest.main.run_svn( - None, 'import', '-m', 'Log message for revision 1.', - svntest.main.greek_dump_dir, sbox.repo_url) + greek_dump_dir = sbox.add_wc_path('greek-dump') # checkout svntest.main.safe_rmtree(wc_dir, 1) - svntest.actions.run_and_verify_svn(None, - None, [], + svntest.actions.run_and_verify_svn(None, [], 'checkout', repo_url, wc_dir) @@ -299,7 +285,7 @@ def single_file_relocate(sbox): other_iota_url = other_repo_url + '/iota' svntest.main.copy_repos(repo_dir, other_repo_dir, 1, 0) svntest.main.safe_rmtree(repo_dir, 1) - svntest.actions.run_and_verify_svn(None, None, + svntest.actions.run_and_verify_svn(None, ".*Cannot relocate.*", 'relocate', iota_url, other_iota_url, iota_path) @@ -322,7 +308,7 @@ def relocate_with_switched_children(sbox): svntest.main.safe_rmtree(repo_dir, 1) # Do the switch and check the results in three ways. - svntest.actions.run_and_verify_svn(None, None, [], 'relocate', + svntest.actions.run_and_verify_svn(None, [], 'relocate', repo_url, other_repo_url, wc_dir) # Attempt to commit changes and examine results @@ -340,8 +326,7 @@ def relocate_with_switched_children(sbox): # This won't actually do a commit, because nothing should be modified. svntest.actions.run_and_verify_commit(wc_dir, - expected_output, expected_status, - None, wc_dir) + expected_output, expected_status) # Check the URLs of various nodes. info_output = { @@ -369,7 +354,7 @@ def relocate_with_relative_externals(sbox): # Add a relative external. change_external(os.path.join(wc_dir, 'A', 'B'), "^/A/D/G G-ext\n../D/H H-ext", commit=True) - svntest.actions.run_and_verify_svn(None, None, [], 'update', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'update', wc_dir) # Move our repository to another location. repo_dir = sbox.repo_dir @@ -379,7 +364,7 @@ def relocate_with_relative_externals(sbox): svntest.main.safe_rmtree(repo_dir, 1) # Now relocate our working copy. - svntest.actions.run_and_verify_svn(None, None, [], 'relocate', + svntest.actions.run_and_verify_svn(None, [], 'relocate', repo_url, other_repo_url, wc_dir) # Check the URLs of the externals -- were they updated to point to the diff --git a/subversion/tests/cmdline/resolve_tests.py b/subversion/tests/cmdline/resolve_tests.py index c032b06..0d3b28a 100755 --- a/subversion/tests/cmdline/resolve_tests.py +++ b/subversion/tests/cmdline/resolve_tests.py @@ -41,8 +41,8 @@ Issues = svntest.testcase.Issues_deco Issue = svntest.testcase.Issue_deco Wimp = svntest.testcase.Wimp_deco -from merge_tests import set_up_branch -from merge_tests import expected_merge_output +from svntest.mergetrees import set_up_branch +from svntest.mergetrees import expected_merge_output ###################################################################### @@ -70,13 +70,12 @@ def automatic_conflict_resolution(sbox): # Make a change on the A_COPY branch such that a subsequent merge # conflicts. svntest.main.file_write(psi_COPY_path, "Branch content.\n") - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'commit', '-m', 'log msg', wc_dir) def do_text_conflicting_merge(): - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'revert', '--recursive', A_COPY_path) svntest.actions.run_and_verify_svn( - None, expected_merge_output([[3]], [ "C %s\n" % psi_COPY_path, " U %s\n" % A_COPY_path], @@ -126,10 +125,10 @@ def prop_conflict_resolution(sbox): psi_path = os.path.join(wc_dir, "A", "D", "H", "psi") # r2 - Set property 'propname:propval' on iota, A/mu, and A/D/gamma. - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'ps', 'propname', 'propval', iota_path, mu_path, gamma_path) - svntest.actions.run_and_verify_svn(None, None, [], 'commit', + svntest.actions.run_and_verify_svn(None, [], 'commit', '-m', 'create some new properties', wc_dir) @@ -138,15 +137,15 @@ def prop_conflict_resolution(sbox): # iota : Delete property 'propname' # A/mu : Change property 'propname' to 'incoming-conflict' # A/D/gamma : Change property 'propname' to 'incoming-no-conflict' - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'pd', 'propname', iota_path) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'ps', 'propname', 'incoming-conflict', mu_path) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'ps', 'propname', 'incoming-no-conflict', gamma_path) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'commit', '-m', 'delete a property', wc_dir) @@ -156,7 +155,7 @@ def prop_conflict_resolution(sbox): """Revert the WC, update it to r2, and set the following properties: - itoa : 'propname' = 'local_edit' + iota : 'propname' = 'local_edit' 'newprop' = 'new-val-no-incoming' A/mu : 'propname' = 'local_edit' A/D/gamma : 'propname' = 'incoming-no-conflict' @@ -168,9 +167,9 @@ def prop_conflict_resolution(sbox): Using svn propget, check that the resolution results in the following properties: - itoa : 'propname' = RESOLVED_EDITED_PROP_VAL_OUTPUT + iota : 'propname' = RESOLVED_DELETED_PROP_VAL_OUTPUT 'newprop' = 'new-val-no-incoming' - A/mu : 'propname' = RESOLVED_DELETED_PROP_VAL_OUTPUT + A/mu : 'propname' = RESOLVED_EDITED_PROP_VAL_OUTPUT A/D/gamma : 'propname' = 'incoming-no-conflict' A/D/H/psi : 'newprop' = 'new-val-no-incoming' @@ -178,50 +177,46 @@ def prop_conflict_resolution(sbox): both follow the rules for the expected_stdout arg to run_and_verify_svn2()""" - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'revert', '--recursive', wc_dir) - svntest.actions.run_and_verify_svn(None, None, [], 'up', '-r2', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'up', '-r2', wc_dir) # Set some properties that will conflict when we update. - svntest.actions.run_and_verify_svn(None, None, [], 'ps', + svntest.actions.run_and_verify_svn(None, [], 'ps', 'propname', 'local_edit', iota_path, mu_path) # Set a property that should always merge cleanly with the update. - svntest.actions.run_and_verify_svn(None, None, [], 'ps', + svntest.actions.run_and_verify_svn(None, [], 'ps', 'propname', 'incoming-no-conflict', gamma_path) # Set a property that has no update coming. - svntest.actions.run_and_verify_svn(None, None, [], 'ps', + svntest.actions.run_and_verify_svn(None, [], 'ps', 'newprop', 'new-val-no-incoming', psi_path, iota_path) # Update, postponing all conflict resolution. - svntest.actions.run_and_verify_svn(None, None, [], 'up', + svntest.actions.run_and_verify_svn(None, [], 'up', '--accept=postpone', wc_dir) svntest.actions.run_and_verify_resolve([iota_path, mu_path], '-R', '--accept', resolve_accept, wc_dir) + if resolved_deleted_prop_val_output: + expected_deleted_stderr = [] + else: + expected_deleted_stderr = '.*W200017: Property.*not found' + svntest.actions.run_and_verify_svn( - 'svn revolve -R --accept=' + resolve_accept + ' of prop conflict ' - 'not resolved as expected;', - resolved_deleted_prop_val_output, [], 'pg', 'propname', iota_path) + resolved_deleted_prop_val_output, expected_deleted_stderr, + 'pg', 'propname', iota_path) svntest.actions.run_and_verify_svn( - 'svn revolve -R --accept=' + resolve_accept + ' of prop conflict ' - 'not resolved as expected;', ['new-val-no-incoming\n'], [], 'pg', 'newprop', iota_path) svntest.actions.run_and_verify_svn( - 'svn revolve -R --accept=' + resolve_accept + ' of prop conflict ' - 'not resolved as expected;', resolved_edited_prop_val_output, [], 'pg', 'propname', mu_path) svntest.actions.run_and_verify_svn( - 'svn revolve -R --accept=' + resolve_accept + ' modified a ' - 'non-conflicted property', ['incoming-no-conflict\n'], [], 'pg', 'propname', gamma_path) svntest.actions.run_and_verify_svn( - 'svn revolve -R --accept=' + resolve_accept + ' modified a ' - 'non-conflicted property', ['new-val-no-incoming\n'], [], 'pg', 'newprop', psi_path) # Test how svn resolve deals with prop conflicts and other local @@ -294,7 +289,7 @@ def resolved_on_wc_root(sbox): # Commit mods svntest.main.file_append(i, "changed iota.\n") svntest.main.file_append(g, "changed gamma.\n") - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'propset', 'foo', 'foo-val', B) expected_output = svntest.wc.State(wc, { @@ -308,9 +303,7 @@ def resolved_on_wc_root(sbox): svntest.actions.run_and_verify_commit(wc, expected_output, - expected_status, - None, - wc) + expected_status) # Go back to rev 1 expected_output = svntest.wc.State(wc, { @@ -324,12 +317,12 @@ def resolved_on_wc_root(sbox): expected_output, expected_disk, expected_status, - None, None, None, None, None, False, + [], False, '-r1', wc) # Deletions so that the item becomes unversioned and # will have a tree-conflict upon update. - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'rm', i, B, g) # Update so that conflicts appear @@ -356,7 +349,7 @@ def resolved_on_wc_root(sbox): expected_output, expected_disk, None, - None, None, None, None, None, False, + [], False, wc) svntest.actions.run_and_verify_unquiet_status(wc, expected_status) @@ -367,6 +360,7 @@ def resolved_on_wc_root(sbox): svntest.actions.run_and_verify_unquiet_status(wc, expected_status) #---------------------------------------------------------------------- +@SkipUnless(svntest.main.server_has_mergeinfo) def resolved_on_deleted_item(sbox): "resolved on deleted item" @@ -384,7 +378,7 @@ def resolved_on_deleted_item(sbox): A2_url = sbox.repo_url + '/A2' # make a copy of A - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'cp', A_url, A2_url, '-m', 'm') expected_output = svntest.wc.State(wc, { @@ -454,13 +448,13 @@ def resolved_on_deleted_item(sbox): expected_output, expected_disk, expected_status, - None, None, None, None, None, False, + [], False, wc) # Create some conflicts... # Modify the paths in the one directory. - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'propset', 'foo', 'foo-val', B) svntest.main.file_append(g, "Modified gamma.\n") @@ -473,12 +467,10 @@ def resolved_on_deleted_item(sbox): svntest.actions.run_and_verify_commit(wc, expected_output, - expected_status, - None, - wc) + expected_status) # Delete the paths in the second directory. - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'rm', B2, g2) expected_output = svntest.wc.State(wc, { @@ -494,7 +486,7 @@ def resolved_on_deleted_item(sbox): svntest.actions.run_and_verify_commit(wc, expected_output, expected_status, - None, + [], A2) # Now merge A to A2, creating conflicts... @@ -547,7 +539,7 @@ def resolved_on_deleted_item(sbox): expected_mergeinfo_output, expected_elision_output, expected_disk, None, expected_skip, - None, dry_run = False) + [], dry_run = False) svntest.actions.run_and_verify_unquiet_status(A2, expected_status) # Now resolve by recursing on the working copy root. @@ -570,7 +562,7 @@ def theirs_conflict_in_subdir(sbox): alpha_path2 = os.path.join(wc2, 'A', 'B', 'E', 'alpha') svntest.main.file_append(alpha_path, "Modified alpha.\n") - svntest.main.run_svn(None, 'ci', '-m', 'logmsg', wc) + sbox.simple_commit(message='logmsg') svntest.main.file_append(alpha_path2, "Modified alpha, too.\n") svntest.main.run_svn(None, 'up', wc2) @@ -606,6 +598,66 @@ def multi_range_merge_with_accept(sbox): svntest.main.run_svn(None, 'merge', '-c4,3', '^/iota', 'iota', '--accept=theirs-conflict') +#---------------------------------------------------------------------- + +# Test for issue #4647 'auto resolution mine-full fails on binary file' +@Issue(4647) +def automatic_binary_conflict_resolution(sbox): + "resolve -R --accept [base | mf | tf] binary file" + + sbox.build() + wc_dir = sbox.wc_dir + + # Some paths we'll care about + A_COPY_path = os.path.join(wc_dir, "A_COPY") + + # Add a binary file to the project in revision 2. + theta_contents = open(os.path.join(sys.path[0], "theta.bin"), 'rb').read() + theta_path = sbox.ospath('A/theta') + svntest.main.file_write(theta_path, theta_contents, 'wb') + svntest.main.run_svn(None, 'add', theta_path) + svntest.main.run_svn(None, 'commit', '-m', 'log msg', wc_dir) + + # Branch A to A_COPY in revision 3. + svntest.main.run_svn(None, 'copy', wc_dir + "/A", A_COPY_path) + svntest.main.run_svn(None, 'commit', '-m', 'log msg', wc_dir) + + # Modify the binary file on trunk and in the branch, so that both versions + # differ. + theta_branch_path = sbox.ospath('A_COPY/theta') + svntest.main.file_append_binary(theta_path, theta_contents) + svntest.main.run_svn(None, 'commit', '-m', 'log msg', wc_dir) + svntest.main.file_append_binary(theta_branch_path, theta_contents) + svntest.main.file_append_binary(theta_branch_path, theta_contents) + svntest.main.run_svn(None, 'commit', '-m', 'log msg', wc_dir) + + # Run an svn update now to prevent mixed-revision working copy [1:4] error. + svntest.main.run_svn(None, 'update', wc_dir) + + + def do_binary_conflicting_merge(): + svntest.actions.run_and_verify_svn(None, [], + 'revert', '--recursive', A_COPY_path) + svntest.main.run_svn(None, 'merge', sbox.repo_url + "/A/theta", wc_dir + "/A_COPY/theta") + + # Test 'svn resolve -R --accept base' + do_binary_conflicting_merge() + svntest.actions.run_and_verify_resolve([theta_branch_path], + '-R', '--accept', 'base', + A_COPY_path) + + # Test 'svn resolve -R --accept mine-full' + do_binary_conflicting_merge() + svntest.actions.run_and_verify_resolve([theta_branch_path], + '-R', '--accept', 'mine-full', + A_COPY_path) + + # Test 'svn resolve -R --accept theirs-full' + do_binary_conflicting_merge() + svntest.actions.run_and_verify_resolve([theta_branch_path], + '-R', '--accept', 'tf', + A_COPY_path) + ######################################################################## # Run the tests @@ -619,6 +671,7 @@ test_list = [ None, resolved_on_deleted_item, theirs_conflict_in_subdir, multi_range_merge_with_accept, + automatic_binary_conflict_resolution, ] if __name__ == '__main__': diff --git a/subversion/tests/cmdline/revert_tests.py b/subversion/tests/cmdline/revert_tests.py index cf7b1fc..3d718e6 100755 --- a/subversion/tests/cmdline/revert_tests.py +++ b/subversion/tests/cmdline/revert_tests.py @@ -64,11 +64,11 @@ def revert_replacement_with_props(sbox, wc_copy): # Set props on file which is copy-source later on pi_path = os.path.join(wc_dir, 'A', 'D', 'G', 'pi') rho_path = os.path.join(wc_dir, 'A', 'D', 'G', 'rho') - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'ps', 'phony-prop', '-F', prop_path, pi_path) os.remove(prop_path) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'ps', 'svn:eol-style', 'LF', rho_path) # Verify props having been set @@ -91,14 +91,13 @@ def revert_replacement_with_props(sbox, wc_copy): expected_status.tweak('A/D/G/rho', wc_rev='2') svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, wc_dir) + expected_status) # Bring wc into sync - svntest.actions.run_and_verify_svn(None, None, [], 'up', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir) # File scheduled for deletion - svntest.actions.run_and_verify_svn(None, None, [], 'rm', rho_path) + svntest.actions.run_and_verify_svn(None, [], 'rm', rho_path) # Status before attempting copies expected_status = svntest.actions.get_virginal_state(wc_dir, 2) @@ -111,7 +110,7 @@ def revert_replacement_with_props(sbox, wc_copy): else: pi_src = sbox.repo_url + '/A/D/G/pi' - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'cp', pi_src, rho_path) # Verify both content and props have been copied @@ -131,7 +130,7 @@ def revert_replacement_with_props(sbox, wc_copy): expected_status.tweak('A/D/G/rho', status=' ', copied=None, wc_rev='2') expected_output = ["Reverted '" + rho_path + "'\n"] - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'revert', '-R', wc_dir) svntest.actions.run_and_verify_status(wc_dir, expected_status) @@ -175,18 +174,18 @@ def revert_from_wc_root(sbox): svntest.main.file_append(rho_path, "Added some text to 'rho'.\n") svntest.main.file_append(zeta_path, "Added some text to 'zeta'.\n") - svntest.actions.run_and_verify_svn("Add command", None, [], + svntest.actions.run_and_verify_svn(None, [], 'add', zeta_path) - svntest.actions.run_and_verify_svn("Add prop command", None, [], + svntest.actions.run_and_verify_svn(None, [], 'ps', 'random-prop', 'propvalue', gamma_path) - svntest.actions.run_and_verify_svn("Add prop command", None, [], + svntest.actions.run_and_verify_svn(None, [], 'ps', 'random-prop', 'propvalue', iota_path) - svntest.actions.run_and_verify_svn("Add prop command", None, [], + svntest.actions.run_and_verify_svn(None, [], 'ps', 'random-prop', 'propvalue', '.') - svntest.actions.run_and_verify_svn("Add prop command", None, [], + svntest.actions.run_and_verify_svn(None, [], 'ps', 'random-prop', 'propvalue', 'A') @@ -202,25 +201,25 @@ def revert_from_wc_root(sbox): svntest.actions.run_and_verify_status('', expected_output) # Run revert - svntest.actions.run_and_verify_svn("Revert command", None, [], + svntest.actions.run_and_verify_svn(None, [], 'revert', beta_path) - svntest.actions.run_and_verify_svn("Revert command", None, [], + svntest.actions.run_and_verify_svn(None, [], 'revert', gamma_path) - svntest.actions.run_and_verify_svn("Revert command", None, [], + svntest.actions.run_and_verify_svn(None, [], 'revert', iota_path) - svntest.actions.run_and_verify_svn("Revert command", None, [], + svntest.actions.run_and_verify_svn(None, [], 'revert', rho_path) - svntest.actions.run_and_verify_svn("Revert command", None, [], + svntest.actions.run_and_verify_svn(None, [], 'revert', zeta_path) - svntest.actions.run_and_verify_svn("Revert command", None, [], + svntest.actions.run_and_verify_svn(None, [], 'revert', '.') - svntest.actions.run_and_verify_svn("Revert command", None, [], + svntest.actions.run_and_verify_svn(None, [], 'revert', 'A') # Verify unmodified status. @@ -303,13 +302,13 @@ def revert_reexpand_keyword(sbox): # This commit fails because newfile2_path is missing, but only after # we call svn_wc__internal_file_modified_p() on new_file. - svntest.actions.run_and_verify_commit(wc_dir, None, None, "2' is scheduled"+ - " for addition, but is missing", + svntest.actions.run_and_verify_commit(wc_dir, None, None, ".*2' is scheduled"+ + " for addition, but is missing.*", newfile_path, newfile2_path, '-m', "Shouldn't be committed") # Revert the file. The file is not reverted! - svntest.actions.run_and_verify_svn(None, [], [], 'revert', newfile_path) + svntest.actions.run_and_verify_svn([], [], 'revert', newfile_path) #---------------------------------------------------------------------- @@ -326,7 +325,7 @@ def revert_replaced_file_without_props(sbox): # Add a new file, file1, that has no prop-base svntest.main.file_append(file1_path, "This is the file 'file1' revision 2.") - svntest.actions.run_and_verify_svn(None, None, [], 'add', file1_path) + svntest.actions.run_and_verify_svn(None, [], 'add', file1_path) # commit file1 expected_output = svntest.wc.State(wc_dir, { @@ -339,10 +338,10 @@ def revert_replaced_file_without_props(sbox): }) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) # delete file1 - svntest.actions.run_and_verify_svn(None, None, [], 'rm', file1_path) + svntest.actions.run_and_verify_svn(None, [], 'rm', file1_path) # test that file1 is scheduled for deletion. expected_status.tweak('file1', status='D ') @@ -350,14 +349,14 @@ def revert_replaced_file_without_props(sbox): # recreate and add file1 svntest.main.file_append(file1_path, "This is the file 'file1' revision 3.") - svntest.actions.run_and_verify_svn(None, None, [], 'add', file1_path) + svntest.actions.run_and_verify_svn(None, [], 'add', file1_path) # Test to see if file1 is schedule for replacement expected_status.tweak('file1', status='R ') svntest.actions.run_and_verify_status(wc_dir, expected_status) # revert file1 - svntest.actions.run_and_verify_svn(None, ["Reverted '" + file1_path + "'\n"], + svntest.actions.run_and_verify_svn(["Reverted '" + file1_path + "'\n"], [], 'revert', file1_path) # test that file1 really was reverted @@ -389,7 +388,7 @@ def revert_moved_file(sbox): 'D ' + iota + '\n', ]) - actions.run_and_verify_svn2('OUTPUT', expected_stdout, [], 0, 'mv', iota, + actions.run_and_verify_svn2(expected_stdout, [], 0, 'mv', iota, iota_moved) # svn st @@ -405,7 +404,7 @@ def revert_moved_file(sbox): # svn revert iota expected_stdout = ["Reverted '" + iota + "'\n"] - actions.run_and_verify_svn2('OUTPUT', expected_stdout, [], 0, 'revert', + actions.run_and_verify_svn2(expected_stdout, [], 0, 'revert', iota) # svn st @@ -429,7 +428,7 @@ def revert_file_merge_replace_with_history(sbox): # File scheduled for deletion rho_path = os.path.join(wc_dir, 'A', 'D', 'G', 'rho') - svntest.actions.run_and_verify_svn(None, None, [], 'rm', rho_path) + svntest.actions.run_and_verify_svn(None, [], 'rm', rho_path) expected_status = svntest.actions.get_virginal_state(wc_dir, 1) expected_status.tweak('A/D/G/rho', status='D ') @@ -444,13 +443,12 @@ def revert_file_merge_replace_with_history(sbox): # Commit rev 2 svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, wc_dir) + expected_status) # create new rho file svntest.main.file_write(rho_path, "new rho\n") # Add the new file - svntest.actions.run_and_verify_svn(None, None, [], 'add', rho_path) + svntest.actions.run_and_verify_svn(None, [], 'add', rho_path) # Commit revsion 3 expected_status.add({ @@ -463,7 +461,7 @@ def revert_file_merge_replace_with_history(sbox): svntest.actions.run_and_verify_commit(wc_dir, expected_output, - None, None, wc_dir) + None) # Update working copy expected_output = svntest.wc.State(wc_dir, {}) @@ -501,7 +499,6 @@ def revert_file_merge_replace_with_history(sbox): # Now revert svntest.actions.run_and_verify_svn(None, - None, [], 'revert', rho_path) # test that rho really was reverted @@ -535,7 +532,7 @@ def revert_after_second_replace(sbox): # File scheduled for deletion rho_path = os.path.join(wc_dir, 'A', 'D', 'G', 'rho') - svntest.actions.run_and_verify_svn(None, None, [], 'rm', rho_path) + svntest.actions.run_and_verify_svn(None, [], 'rm', rho_path) # Status before attempting copy expected_status = svntest.actions.get_virginal_state(wc_dir, 1) @@ -545,14 +542,14 @@ def revert_after_second_replace(sbox): # Replace file for the first time pi_src = os.path.join(wc_dir, 'A', 'D', 'G', 'pi') - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'cp', pi_src, rho_path) expected_status.tweak('A/D/G/rho', status='R ', copied='+', wc_rev='-') svntest.actions.run_and_verify_status(wc_dir, expected_status) # Now delete replaced file. - svntest.actions.run_and_verify_svn(None, None, [], 'rm', '--force', rho_path) + svntest.actions.run_and_verify_svn(None, [], 'rm', '--force', rho_path) # Status should be same as after first delete expected_status = svntest.actions.get_virginal_state(wc_dir, 1) @@ -562,13 +559,13 @@ def revert_after_second_replace(sbox): # Replace file for the second time pi_src = os.path.join(wc_dir, 'A', 'D', 'G', 'pi') - svntest.actions.run_and_verify_svn(None, None, [], 'cp', pi_src, rho_path) + svntest.actions.run_and_verify_svn(None, [], 'cp', pi_src, rho_path) expected_status.tweak('A/D/G/rho', status='R ', copied='+', wc_rev='-') svntest.actions.run_and_verify_status(wc_dir, expected_status) # Now revert - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'revert', '-R', wc_dir) # Check disk status @@ -608,9 +605,9 @@ def revert_after_manual_conflict_resolution__text(sbox): os.remove(iota_path_2 + '.r2') # Verify no output from status, diff, or revert - svntest.actions.run_and_verify_svn(None, [], [], "status", wc_dir_2) - svntest.actions.run_and_verify_svn(None, [], [], "diff", wc_dir_2) - svntest.actions.run_and_verify_svn(None, [], [], "revert", "-R", wc_dir_2) + svntest.actions.run_and_verify_svn([], [], "status", wc_dir_2) + svntest.actions.run_and_verify_svn([], [], "diff", wc_dir_2) + svntest.actions.run_and_verify_svn([], [], "revert", "-R", wc_dir_2) def revert_after_manual_conflict_resolution__prop(sbox): "revert after manual property-conflict resolution" @@ -636,9 +633,9 @@ def revert_after_manual_conflict_resolution__prop(sbox): os.remove(iota_path_2 + '.prej') # Verify no output from status, diff, or revert - svntest.actions.run_and_verify_svn(None, [], [], "status", wc_dir_2) - svntest.actions.run_and_verify_svn(None, [], [], "diff", wc_dir_2) - svntest.actions.run_and_verify_svn(None, [], [], "revert", "-R", wc_dir_2) + svntest.actions.run_and_verify_svn([], [], "status", wc_dir_2) + svntest.actions.run_and_verify_svn([], [], "diff", wc_dir_2) + svntest.actions.run_and_verify_svn([], [], "revert", "-R", wc_dir_2) def revert_propset__dir(sbox): "revert a simple propset on a dir" @@ -648,7 +645,7 @@ def revert_propset__dir(sbox): a_path = os.path.join(wc_dir, 'A') svntest.main.run_svn(None, 'propset', 'foo', 'x', a_path) expected_output = re.escape("Reverted '" + a_path + "'") - svntest.actions.run_and_verify_svn(None, expected_output, [], "revert", + svntest.actions.run_and_verify_svn(expected_output, [], "revert", a_path) def revert_propset__file(sbox): @@ -659,7 +656,7 @@ def revert_propset__file(sbox): iota_path = os.path.join(wc_dir, 'iota') svntest.main.run_svn(None, 'propset', 'foo', 'x', iota_path) expected_output = re.escape("Reverted '" + iota_path + "'") - svntest.actions.run_and_verify_svn(None, expected_output, [], "revert", + svntest.actions.run_and_verify_svn(expected_output, [], "revert", iota_path) def revert_propdel__dir(sbox): @@ -673,7 +670,7 @@ def revert_propdel__dir(sbox): 'commit', '-m', 'ps', a_path) svntest.main.run_svn(None, 'propdel', 'foo', a_path) expected_output = re.escape("Reverted '" + a_path + "'") - svntest.actions.run_and_verify_svn(None, expected_output, [], "revert", + svntest.actions.run_and_verify_svn(expected_output, [], "revert", a_path) def revert_propdel__file(sbox): @@ -687,7 +684,7 @@ def revert_propdel__file(sbox): 'commit', '-m', 'ps', iota_path) svntest.main.run_svn(None, 'propdel', 'foo', iota_path) expected_output = re.escape("Reverted '" + iota_path + "'") - svntest.actions.run_and_verify_svn(None, expected_output, [], "revert", + svntest.actions.run_and_verify_svn(expected_output, [], "revert", iota_path) def revert_replaced_with_history_file_1(sbox): @@ -699,7 +696,7 @@ def revert_replaced_with_history_file_1(sbox): mu_path = os.path.join(wc_dir, 'A', 'mu') # Remember the original text of 'mu' - exit_code, text_r1, err = svntest.actions.run_and_verify_svn(None, None, [], + exit_code, text_r1, err = svntest.actions.run_and_verify_svn(None, [], 'cat', mu_path) # delete mu and replace it with a copy of iota svntest.main.run_svn(None, 'rm', mu_path) @@ -714,8 +711,7 @@ def revert_replaced_with_history_file_1(sbox): }) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, wc_dir) + expected_status) # update the working copy svntest.main.run_svn(None, 'up', wc_dir) @@ -755,20 +751,19 @@ def revert_replaced_with_history_file_1(sbox): }) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, wc_dir) + expected_status) # Verify the content of 'mu' - svntest.actions.run_and_verify_svn(None, text_r1, [], 'cat', mu_path) + svntest.actions.run_and_verify_svn(text_r1, [], 'cat', mu_path) # situation: no local modifications, mu has its original content again. # revert 'mu' locally, shouldn't change a thing. - svntest.actions.run_and_verify_svn(None, [], [], "revert", + svntest.actions.run_and_verify_svn([], [], "revert", mu_path) # Verify the content of 'mu' - svntest.actions.run_and_verify_svn(None, text_r1, [], 'cat', mu_path) + svntest.actions.run_and_verify_svn(text_r1, [], 'cat', mu_path) #---------------------------------------------------------------------- # Test for issue #2804. @@ -780,9 +775,9 @@ def status_of_missing_dir_after_revert(sbox): wc_dir = sbox.wc_dir A_D_G_path = os.path.join(wc_dir, "A", "D", "G") - svntest.actions.run_and_verify_svn(None, None, [], "rm", A_D_G_path) + svntest.actions.run_and_verify_svn(None, [], "rm", A_D_G_path) expected_output = re.escape("Reverted '" + A_D_G_path + "'") - svntest.actions.run_and_verify_svn(None, expected_output, [], "revert", + svntest.actions.run_and_verify_svn(expected_output, [], "revert", A_D_G_path) expected_status = svntest.actions.get_virginal_state(wc_dir, 1) @@ -796,7 +791,7 @@ def status_of_missing_dir_after_revert(sbox): svntest.actions.run_and_verify_status(wc_dir, expected_status) # When using single-db, we can get back to the virginal state. - svntest.actions.run_and_verify_svn(None, None, [], "revert", + svntest.actions.run_and_verify_svn(None, [], "revert", "-R", A_D_G_path) expected_status = svntest.actions.get_virginal_state(wc_dir, 1) @@ -814,7 +809,7 @@ def status_of_missing_dir_after_revert_replaced_with_history_dir(sbox): # delete A/D/G and commit G_path = os.path.join(wc_dir, "A", "D", "G") - svntest.actions.run_and_verify_svn(None, None, [], "rm", G_path) + svntest.actions.run_and_verify_svn(None, [], "rm", G_path) expected_status = svntest.actions.get_virginal_state(wc_dir, 1) expected_status.remove('A/D/G', 'A/D/G/rho', 'A/D/G/pi', 'A/D/G/tau') expected_output = svntest.wc.State(wc_dir, { @@ -822,12 +817,11 @@ def status_of_missing_dir_after_revert_replaced_with_history_dir(sbox): }) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, wc_dir) + expected_status) # copy A/D/G from A/B/E and commit E_path = os.path.join(wc_dir, "A", "B", "E") - svntest.actions.run_and_verify_svn(None, None, [], "cp", E_path, G_path) + svntest.actions.run_and_verify_svn(None, [], "cp", E_path, G_path) expected_status.add({ 'A/D/G' : Item(status=' ', wc_rev='3'), 'A/D/G/alpha' : Item(status=' ', wc_rev='3'), @@ -838,8 +832,7 @@ def status_of_missing_dir_after_revert_replaced_with_history_dir(sbox): }) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, wc_dir) + expected_status) # update the working copy svntest.main.run_svn(None, 'up', wc_dir) @@ -887,10 +880,10 @@ def status_of_missing_dir_after_revert_replaced_with_history_dir(sbox): expected_output = svntest.verify.UnorderedOutput([ "Reverted '%s'\n" % path for path in revert_paths]) - svntest.actions.run_and_verify_svn(None, expected_output, [], "revert", "-R", + svntest.actions.run_and_verify_svn(expected_output, [], "revert", "-R", G_path) - svntest.actions.run_and_verify_svn(None, [], [], + svntest.actions.run_and_verify_svn([], [], "status", wc_dir) svntest.main.safe_rmtree(G_path) @@ -899,7 +892,7 @@ def status_of_missing_dir_after_revert_replaced_with_history_dir(sbox): ["! " + G_path + "\n", "! " + os.path.join(G_path, "alpha") + "\n", "! " + os.path.join(G_path, "beta") + "\n"]) - svntest.actions.run_and_verify_svn(None, expected_output, [], "status", + svntest.actions.run_and_verify_svn(expected_output, [], "status", wc_dir) # Test for issue #2928. @@ -929,8 +922,7 @@ def revert_replaced_with_history_file_2(sbox): expected_status.tweak('A/mu', status=' ', wc_rev=2) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, wc_dir) + expected_status) #---------------------------------------------------------------------- @@ -973,7 +965,7 @@ def revert_tree_conflicts_in_updated_files(sbox): expected_disk.remove('A/D/G/tau') # Revert individually in wc - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'revert', G_pi, G_rho, G_tau) svntest.actions.run_and_verify_status(wc_dir, expected_status) svntest.actions.verify_disk(wc_dir, expected_disk) @@ -988,7 +980,7 @@ def revert_tree_conflicts_in_updated_files(sbox): expected_status.wc_dir = wc_dir_2 # Revert recursively in wc 2 - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'revert', '-R', G2) svntest.actions.run_and_verify_status(wc_dir_2, expected_status) svntest.actions.verify_disk(wc_dir_2, expected_disk) @@ -1000,7 +992,7 @@ def revert_add_over_not_present_dir(sbox): wc_dir = sbox.wc_dir main.run_svn(None, 'rm', os.path.join(wc_dir, 'A/C')) - main.run_svn(None, 'ci', wc_dir, '-m', 'Deleted dir') + sbox.simple_commit(message='Deleted dir') expected_status = svntest.actions.get_virginal_state(wc_dir, 1) expected_status.remove('A/C') @@ -1019,7 +1011,7 @@ def revert_added_tree(sbox): sbox.build() wc_dir = sbox.wc_dir - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'mkdir', sbox.ospath('X'), sbox.ospath('X/Y')) expected_status = svntest.actions.get_virginal_state(wc_dir, 1) expected_status.add({ @@ -1030,7 +1022,7 @@ def revert_added_tree(sbox): # Revert is non-recursive and fails, status is unchanged expected_error = '.*Try \'svn revert --depth infinity\'.*' - svntest.actions.run_and_verify_svn(None, None, expected_error, + svntest.actions.run_and_verify_svn(None, expected_error, 'revert', sbox.ospath('X')) svntest.actions.run_and_verify_status(wc_dir, expected_status) @@ -1041,7 +1033,7 @@ def revert_child_of_copy(sbox): sbox.build() wc_dir = sbox.wc_dir - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'cp', sbox.ospath('A/B/E'), sbox.ospath('A/B/E2')) @@ -1058,13 +1050,13 @@ def revert_child_of_copy(sbox): # First revert removes text change, child is still copied expected_output = ["Reverted '%s'\n" % sbox.ospath('A/B/E2/beta')] - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'revert', sbox.ospath('A/B/E2/beta')) expected_status.tweak('A/B/E2/beta', status=' ') svntest.actions.run_and_verify_status(wc_dir, expected_status) # Second revert of child does nothing, child is still copied - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'revert', sbox.ospath('A/B/E2/beta')) svntest.actions.run_and_verify_status(wc_dir, expected_status) @@ -1075,7 +1067,7 @@ def revert_non_recusive_after_delete(sbox): sbox.build(read_only=True) wc_dir = sbox.wc_dir - svntest.actions.run_and_verify_svn(None, None, [], 'rm', sbox.ospath('A/B')) + svntest.actions.run_and_verify_svn(None, [], 'rm', sbox.ospath('A/B')) expected_status = svntest.actions.get_virginal_state(wc_dir, 1) expected_status.tweak('A/B', 'A/B/E', 'A/B/E/alpha', 'A/B/E/beta', 'A/B/F', 'A/B/lambda', status='D ') @@ -1083,19 +1075,19 @@ def revert_non_recusive_after_delete(sbox): # This appears to work but gets the op-depth wrong expected_output = ["Reverted '%s'\n" % sbox.ospath('A/B')] - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'revert', sbox.ospath('A/B')) expected_status.tweak('A/B', status=' ') svntest.actions.run_and_verify_status(wc_dir, expected_status) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'mkdir', sbox.ospath('A/B/E')) expected_status.tweak('A/B/E', status='R ') svntest.actions.run_and_verify_status(wc_dir, expected_status) # Since the op-depth was wrong A/B/E erroneously remains deleted expected_output = ["Reverted '%s'\n" % sbox.ospath('A/B/E')] - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'revert', sbox.ospath('A/B/E')) expected_status.tweak('A/B/E', status=' ') svntest.actions.run_and_verify_status(wc_dir, expected_status) @@ -1143,7 +1135,7 @@ def revert_permissions_only(sbox): os.chmod(sbox.ospath('A/B/E/alpha'), 0444) # read-only is_readonly(sbox.ospath('A/B/E/alpha')) expected_output = ["Reverted '%s'\n" % sbox.ospath('A/B/E/alpha')] - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'revert', sbox.ospath('A/B/E/alpha')) is_writable(sbox.ospath('A/B/E/alpha')) @@ -1151,14 +1143,14 @@ def revert_permissions_only(sbox): os.chmod(sbox.ospath('A/B/E/beta'), 0777) # executable is_executable(sbox.ospath('A/B/E/beta')) expected_output = ["Reverted '%s'\n" % sbox.ospath('A/B/E/beta')] - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'revert', sbox.ospath('A/B/E/beta')) is_non_executable(sbox.ospath('A/B/E/beta')) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'propset', 'svn:needs-lock', '1', sbox.ospath('A/B/E/alpha')) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'propset', 'svn:executable', '1', sbox.ospath('A/B/E/beta')) @@ -1171,13 +1163,12 @@ def revert_permissions_only(sbox): expected_status.tweak('A/B/E/beta', wc_rev='2') svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, wc_dir) + expected_status) os.chmod(sbox.ospath('A/B/E/alpha'), 0666) # not read-only is_writable(sbox.ospath('A/B/E/alpha')) expected_output = ["Reverted '%s'\n" % sbox.ospath('A/B/E/alpha')] - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'revert', sbox.ospath('A/B/E/alpha')) is_readonly(sbox.ospath('A/B/E/alpha')) @@ -1185,18 +1176,18 @@ def revert_permissions_only(sbox): os.chmod(sbox.ospath('A/B/E/beta'), 0666) # not executable is_non_executable(sbox.ospath('A/B/E/beta')) expected_output = ["Reverted '%s'\n" % sbox.ospath('A/B/E/beta')] - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'revert', sbox.ospath('A/B/E/beta')) is_executable(sbox.ospath('A/B/E/beta')) # copied file is always writeable sbox.simple_update() expected_output = ["A %s\n" % sbox.ospath('A/B/E2')] - svntest.actions.run_and_verify_svn(None, expected_output, [], 'copy', + svntest.actions.run_and_verify_svn(expected_output, [], 'copy', sbox.ospath('A/B/E'), sbox.ospath('A/B/E2')) is_writable(sbox.ospath('A/B/E2/alpha')) - svntest.actions.run_and_verify_svn(None, [], [], + svntest.actions.run_and_verify_svn([], [], 'revert', sbox.ospath('A/B/E2/alpha')) is_writable(sbox.ospath('A/B/E2/alpha')) @@ -1208,7 +1199,7 @@ def revert_copy_depth_files(sbox): sbox.build(read_only=True) wc_dir = sbox.wc_dir - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'copy', sbox.ospath('A/B/E'), sbox.ospath('A/B/E2')) @@ -1226,7 +1217,7 @@ def revert_copy_depth_files(sbox): 'A/B/E2/alpha', 'A/B/E2/beta']]) - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'revert', '--depth', 'files', sbox.ospath('A/B/E2')) @@ -1241,7 +1232,7 @@ def revert_nested_add_depth_immediates(sbox): sbox.build(read_only=True) wc_dir = sbox.wc_dir - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'mkdir', '--parents', sbox.ospath('A/X/Y')) expected_status = svntest.actions.get_virginal_state(wc_dir, 1) @@ -1254,7 +1245,7 @@ def revert_nested_add_depth_immediates(sbox): expected_output = svntest.verify.UnorderedOutput([ "Reverted '%s'\n" % sbox.ospath(path) for path in ['A/X', 'A/X/Y']]) - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'revert', '--depth', 'immediates', sbox.ospath('A/X')) @@ -1272,7 +1263,7 @@ def create_superflous_actual_node(sbox): sbox.simple_update() # Create a NODES row with op-depth>0 - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'copy', '-r', '1', sbox.repo_url + '/A/B/E/alpha', sbox.ospath('alpha')) @@ -1284,7 +1275,7 @@ def create_superflous_actual_node(sbox): }) svntest.actions.run_and_verify_status(wc_dir, expected_status) svntest.main.file_append(sbox.ospath('alpha'), 'my text\n') - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'merge', '--accept', 'postpone', '^/A/B/E/alpha', sbox.ospath('alpha')) expected_status.tweak('alpha', status='CM', entry_status='A ') @@ -1300,6 +1291,7 @@ def create_superflous_actual_node(sbox): svntest.actions.run_and_verify_status(wc_dir, expected_status) @Issue(3859) +@SkipUnless(svntest.main.server_has_mergeinfo) def revert_empty_actual(sbox): "revert with superfluous actual node" @@ -1307,8 +1299,7 @@ def revert_empty_actual(sbox): wc_dir = sbox.wc_dir # Non-recursive code path works - svntest.actions.run_and_verify_svn(None, - ["Reverted '%s'\n" % sbox.ospath('alpha')], + svntest.actions.run_and_verify_svn(["Reverted '%s'\n" % sbox.ospath('alpha')], [], 'revert', sbox.ospath('alpha')) @@ -1316,16 +1307,16 @@ def revert_empty_actual(sbox): svntest.actions.run_and_verify_status(wc_dir, expected_status) @Issue(3859) +@SkipUnless(svntest.main.server_has_mergeinfo) def revert_empty_actual_recursive(sbox): - "recusive revert with superfluous actual node" + "recursive revert with superfluous actual node" create_superflous_actual_node(sbox) wc_dir = sbox.wc_dir # Recursive code path fails, the superfluous actual node suppresses the # notification - svntest.actions.run_and_verify_svn(None, - ["Reverted '%s'\n" % sbox.ospath('alpha')], + svntest.actions.run_and_verify_svn(["Reverted '%s'\n" % sbox.ospath('alpha')], [], 'revert', '-R', sbox.ospath('alpha')) @@ -1502,7 +1493,7 @@ def create_no_text_change_conflict(sbox): # Update to create a conflict svntest.main.file_append(sbox.ospath('A/B/E/alpha'), 'my text\n') - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'up', '-r1', '--accept', 'postpone', wc_dir) expected_status = svntest.actions.get_virginal_state(wc_dir, 1) @@ -1520,8 +1511,7 @@ def revert_no_text_change_conflict(sbox): create_no_text_change_conflict(sbox) wc_dir = sbox.wc_dir - svntest.actions.run_and_verify_svn(None, - ["Reverted '%s'\n" + svntest.actions.run_and_verify_svn(["Reverted '%s'\n" % sbox.ospath('A/B/E/alpha')], [], 'revert', sbox.ospath('A/B/E/alpha')) @@ -1536,8 +1526,7 @@ def revert_no_text_change_conflict_recursive(sbox): create_no_text_change_conflict(sbox) wc_dir = sbox.wc_dir - svntest.actions.run_and_verify_svn(None, - ["Reverted '%s'\n" + svntest.actions.run_and_verify_svn(["Reverted '%s'\n" % sbox.ospath('A/B/E/alpha')], [], 'revert', '-R', wc_dir) @@ -1573,7 +1562,7 @@ def revert_with_unversioned_targets(sbox): "Skipped '%s'\n" % sbox.ospath('A/D/H/delta'), "Reverted '%s'\n" % sbox.ospath('A/D/H/psi'), ]) - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'revert', chi_path, delta_path, psi_path) # verify status @@ -1593,7 +1582,7 @@ def revert_with_unversioned_targets(sbox): def revert_nonexistent(sbox): 'svn revert -R nonexistent' sbox.build(read_only=True) - svntest.actions.run_and_verify_svn(None, 'Skipped.*nonexistent', [], + svntest.actions.run_and_verify_svn('Skipped.*nonexistent', [], 'revert', '-R', sbox.ospath('nonexistent')) @Issue(4168) @@ -1609,13 +1598,13 @@ def revert_obstructing_wc(sbox): # Checkout wc as depth empty svntest.actions.run_and_verify_checkout(sbox.repo_url, wc_dir, expected_output, expected_disk, - None, None, None, None, + [], '--depth', 'empty') # And create an obstructing working copy as A svntest.actions.run_and_verify_checkout(sbox.repo_url, wc_dir + '/A', expected_output, expected_disk, - None, None, None, None, + [], '--depth', 'empty') # Now try to fetch the entire wc, which will find an obstruction @@ -1629,21 +1618,25 @@ def revert_obstructing_wc(sbox): # A is not versioned but exists }) - # Use expected_status.old_tree() to avoid doing an entries comparion svntest.actions.run_and_verify_update(wc_dir, expected_output, None, - expected_status.old_tree(), - None, None, None, - None, None, None, + expected_status, + [], False, wc_dir, '--set-depth', 'infinity') # Revert should do nothing (no local changes), and report the obstruction - # (reporting the obstruction is nice for debuging, but not really required + # (reporting the obstruction is nice for debugging, but not really required # in this specific case, as the node was not modified) - svntest.actions.run_and_verify_svn(None, "Skipped '.*A' -- .*obstruct.*", [], + svntest.actions.run_and_verify_svn("Skipped '.*A' -- .*obstruct.*", [], 'revert', '-R', wc_dir) +def revert_moved_dir_partial(sbox): + "partial revert moved_dir" + + sbox.build(read_only = True) + sbox.simple_move('A', 'A_') + svntest.actions.run_and_verify_svn(None, [], 'revert', sbox.ospath('A')) ######################################################################## @@ -1686,6 +1679,7 @@ test_list = [ None, revert_with_unversioned_targets, revert_nonexistent, revert_obstructing_wc, + revert_moved_dir_partial, ] if __name__ == '__main__': diff --git a/subversion/tests/cmdline/schedule_tests.py b/subversion/tests/cmdline/schedule_tests.py index d27fbf7..6000588 100755 --- a/subversion/tests/cmdline/schedule_tests.py +++ b/subversion/tests/cmdline/schedule_tests.py @@ -184,15 +184,17 @@ def add_executable(sbox): file_ospath = sbox.ospath(fileName) if executable: expected_out = ["*\n"] + expected_err = [] else: expected_out = [] + expected_err = '.*W200017: Property.*not found' # create an empty file open(file_ospath, "w") os.chmod(file_ospath, perm) sbox.simple_add(fileName) - svntest.actions.run_and_verify_svn(None, expected_out, [], + svntest.actions.run_and_verify_svn(expected_out, expected_err, 'propget', "svn:executable", file_ospath) test_cases = [ @@ -276,7 +278,7 @@ def revert_add_files(sbox): epsilon_path = sbox.ospath('A/D/G/epsilon') files = [delta_path, zeta_path, epsilon_path] - exit_code, output, err = svntest.actions.run_and_verify_svn(None, None, [], + exit_code, output, err = svntest.actions.run_and_verify_svn(None, [], 'revert', '--recursive', wc_dir) @@ -296,7 +298,7 @@ def revert_add_directories(sbox): Z_path = sbox.ospath('A/D/H/Z') files = [X_path, Y_path, Z_path] - exit_code, output, err = svntest.actions.run_and_verify_svn(None, None, [], + exit_code, output, err = svntest.actions.run_and_verify_svn(None, [], 'revert', '--recursive', wc_dir) @@ -322,7 +324,7 @@ def revert_nested_adds(sbox): + [os.path.join(Z_path, child) for child in ['R', 'zeta']]) - exit_code, output, err = svntest.actions.run_and_verify_svn(None, None, [], + exit_code, output, err = svntest.actions.run_and_verify_svn(None, [], 'revert', '--recursive', wc_dir) @@ -343,7 +345,7 @@ def revert_add_executable(sbox): other_path = sbox.ospath('other_exe') files = [all_path, none_path, user_path, group_path, other_path] - exit_code, output, err = svntest.actions.run_and_verify_svn(None, None, [], + exit_code, output, err = svntest.actions.run_and_verify_svn(None, [], 'revert', '--recursive', wc_dir) @@ -364,7 +366,7 @@ def revert_delete_files(sbox): omega_path = sbox.ospath('A/D/H/omega') files = [iota_path, mu_path, omega_path, rho_path] - exit_code, output, err = svntest.actions.run_and_verify_svn(None, None, [], + exit_code, output, err = svntest.actions.run_and_verify_svn(None, [], 'revert', '--recursive', wc_dir) @@ -390,7 +392,7 @@ def revert_delete_dirs(sbox): files = [E_path, F_path, H_path, alpha_path, beta_path, chi_path, omega_path, psi_path] - exit_code, output, err = svntest.actions.run_and_verify_svn(None, None, [], + exit_code, output, err = svntest.actions.run_and_verify_svn(None, [], 'revert', '--recursive', wc_dir) @@ -493,8 +495,7 @@ def delete_missing(sbox): svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, wc_dir) + expected_status) #---------------------------------------------------------------------- # Regression test for issue #854: @@ -553,7 +554,7 @@ def status_add_deleted_directory(sbox): # Update will *not* remove the entry for A despite it being marked # deleted. - svntest.actions.run_and_verify_svn(None, exp_noop_up_out(2), [], + svntest.actions.run_and_verify_svn(exp_noop_up_out(2), [], 'up', wc_dir) expected_status.tweak('', 'iota', wc_rev=2) svntest.actions.run_and_verify_status(wc_dir, expected_status) @@ -567,10 +568,9 @@ def status_add_deleted_directory(sbox): def add_recursive_already_versioned(sbox): "'svn add' should traverse already-versioned dirs" + sbox.build() wc_dir = sbox.wc_dir - svntest.actions.make_repo_and_wc(sbox) - # Create some files, then schedule them for addition delta_path = sbox.ospath('delta') zeta_path = sbox.ospath('A/B/zeta') @@ -616,8 +616,7 @@ def fail_add_directory(sbox): os.makedirs(sbox.wc_dir) os.chdir(sbox.wc_dir) - svntest.actions.run_and_verify_svn('Failed mkdir', - None, svntest.verify.AnyOutput, + svntest.actions.run_and_verify_svn(None, svntest.verify.AnyOutput, 'mkdir', 'A') if os.path.exists('A'): raise svntest.Failure('svn mkdir created an unversioned directory') @@ -636,7 +635,7 @@ def delete_non_existent(sbox): wc_dir = sbox.wc_dir os.chdir(wc_dir) - svntest.actions.run_and_verify_svn(None, None, svntest.verify.AnyOutput, + svntest.actions.run_and_verify_svn(None, svntest.verify.AnyOutput, 'rm', '--force', 'non-existent') @@ -651,7 +650,7 @@ def delete_redelete_fudgery(sbox): B_path = os.path.join(wc_dir, 'A', 'B') # Delete 'A/B' using --keep-local, then remove at the OS level. - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'rm', '--keep-local', B_path) svntest.main.safe_rmtree(B_path) @@ -669,10 +668,10 @@ def delete_redelete_fudgery(sbox): ### information stored now in the working copy root's one DB. That ### could change the whole flow of this test, possible leading us to ### remove it as altogether irrelevant. --cmpilato - svntest.actions.run_and_verify_svn(None, None, [], 'up', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir) # Now try to run - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'rm', '--keep-local', B_path) def propset_on_deleted_should_fail(sbox): @@ -681,9 +680,9 @@ def propset_on_deleted_should_fail(sbox): wc_dir = sbox.wc_dir iota = os.path.join(wc_dir, 'iota') - svntest.actions.run_and_verify_svn(None, None, [], 'rm', iota) + svntest.actions.run_and_verify_svn(None, [], 'rm', iota) - svntest.actions.run_and_verify_svn(None, None, "svn: E155023: Can't set propert.*", + svntest.actions.run_and_verify_svn(None, "svn: E155023: Can't set propert.*", 'ps', 'prop', 'val', iota) @Issue(3468) @@ -721,8 +720,7 @@ def replace_dir_delete_child(sbox): svntest.actions.run_and_verify_commit(sbox.wc_dir, expected_output, - expected_status, - None, sbox.wc_dir) + expected_status) ######################################################################## diff --git a/subversion/tests/cmdline/special_tests.py b/subversion/tests/cmdline/special_tests.py index 6340002..2af1529 100755 --- a/subversion/tests/cmdline/special_tests.py +++ b/subversion/tests/cmdline/special_tests.py @@ -86,20 +86,19 @@ def general_symlink(sbox): }) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, - wc_dir) + expected_status) ## Now we should update to the previous version, verify that no ## symlink is present, then update back to HEAD and see if the symlink ## is regenerated properly. - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'up', '-r', '1', wc_dir) # Is the symlink gone? if os.path.isfile(newfile_path) or os.path.islink(newfile_path): raise svntest.Failure - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'up', '-r', '2', wc_dir) # Is the symlink back? @@ -118,7 +117,7 @@ def general_symlink(sbox): was_cwd = os.getcwd() os.chdir(wc_dir) - svntest.actions.run_and_verify_svn(None, [ "M newfile\n" ], [], 'st') + svntest.actions.run_and_verify_svn([ "M newfile\n" ], [], 'st') os.chdir(was_cwd) @@ -133,7 +132,7 @@ def general_symlink(sbox): }) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) @SkipUnless(svntest.main.is_posix_os) @@ -152,7 +151,7 @@ def replace_file_with_symlink(sbox): # Does status show the obstruction? was_cwd = os.getcwd() os.chdir(wc_dir) - svntest.actions.run_and_verify_svn(None, [ "~ iota\n" ], [], 'st') + svntest.actions.run_and_verify_svn([ "~ iota\n" ], [], 'st') # And does a commit fail? os.chdir(was_cwd) @@ -183,7 +182,7 @@ def import_export_symlink(sbox): # import this symlink into the repository url = sbox.repo_url + "/dirA/dirB/new_link" exit_code, output, errput = svntest.actions.run_and_verify_svn( - 'Import a symlink', None, [], 'import', + None, [], 'import', '-m', 'log msg', new_path, url) regex = "(Committed|Imported) revision [0-9]+." @@ -197,7 +196,7 @@ def import_export_symlink(sbox): os.remove(new_path) # run update and verify that the symlink is put back into place - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir) # Is the symlink back? @@ -213,7 +212,7 @@ def import_export_symlink(sbox): for export_src, dest_dir in [(sbox.wc_dir, 'export-wc'), (sbox.repo_url, 'export-url')]: export_target = sbox.add_wc_path(dest_dir) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'export', export_src, export_target) # is the link at the correct place? @@ -250,11 +249,11 @@ def copy_tree_with_symlink(sbox): }) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) # Copy H to H2 H_path = os.path.join(wc_dir, 'A', 'D', 'H') H2_path = os.path.join(wc_dir, 'A', 'D', 'H2') - svntest.actions.run_and_verify_svn(None, None, [], 'cp', H_path, H2_path) + svntest.actions.run_and_verify_svn(None, [], 'cp', H_path, H2_path) # 'svn status' should show just "A/D/H2 A +". Nothing broken. expected_status.add({ @@ -297,7 +296,7 @@ def replace_symlink_with_file(sbox): }) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) # Now replace the symlink with a normal file and try to commit, we @@ -308,7 +307,7 @@ def replace_symlink_with_file(sbox): # Does status show the obstruction? was_cwd = os.getcwd() os.chdir(wc_dir) - svntest.actions.run_and_verify_svn(None, [ "~ newfile\n" ], [], 'st') + svntest.actions.run_and_verify_svn([ "~ newfile\n" ], [], 'st') # And does a commit fail? os.chdir(was_cwd) @@ -350,10 +349,10 @@ def remove_symlink(sbox): }) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) # Now remove it - svntest.actions.run_and_verify_svn(None, None, [], 'rm', newfile_path) + svntest.actions.run_and_verify_svn(None, [], 'rm', newfile_path) # Commit and verify that it worked expected_output = svntest.wc.State(wc_dir, { @@ -366,7 +365,7 @@ def remove_symlink(sbox): }) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) #---------------------------------------------------------------------- @SkipUnless(server_has_mergeinfo) @@ -395,8 +394,7 @@ def merge_symlink_into_file(sbox): 'A/Dprime/gamma' : Item(verb='Deleting'), }) - svntest.actions.run_and_verify_commit(wc_dir, expected_output, None, None, - wc_dir) + svntest.actions.run_and_verify_commit(wc_dir, expected_output, None) # Commit a symlink in its place linktarget_path = os.path.join(wc_dir, 'linktarget') @@ -407,8 +405,7 @@ def merge_symlink_into_file(sbox): 'A/Dprime/gamma' : Item(verb='Adding'), }) - svntest.actions.run_and_verify_commit(wc_dir, expected_output, None, None, - wc_dir) + svntest.actions.run_and_verify_commit(wc_dir, expected_output, None) # merge the creation of the symlink into the original directory svntest.main.run_svn(None, @@ -430,8 +427,7 @@ def merge_symlink_into_file(sbox): 'A/D/gamma' : Item(verb='Replacing'), }) - svntest.actions.run_and_verify_commit(wc_dir, expected_output, None, None, - wc_dir) + svntest.actions.run_and_verify_commit(wc_dir, expected_output, None) @@ -460,8 +456,7 @@ def merge_file_into_symlink(sbox): 'A/Dprime/gamma' : Item(verb='Deleting'), }) - svntest.actions.run_and_verify_commit(wc_dir, expected_output, None, None, - wc_dir) + svntest.actions.run_and_verify_commit(wc_dir, expected_output, None) # Commit a symlink in its place linktarget_path = os.path.join(wc_dir, 'linktarget') @@ -472,8 +467,7 @@ def merge_file_into_symlink(sbox): 'A/Dprime/gamma' : Item(verb='Adding'), }) - svntest.actions.run_and_verify_commit(wc_dir, expected_output, None, None, - wc_dir) + svntest.actions.run_and_verify_commit(wc_dir, expected_output, None) svntest.main.file_write(gamma_path, 'changed file', 'w+') @@ -481,8 +475,7 @@ def merge_file_into_symlink(sbox): 'A/D/gamma' : Item(verb='Sending'), }) - svntest.actions.run_and_verify_commit(wc_dir, expected_output, None, None, - wc_dir) + svntest.actions.run_and_verify_commit(wc_dir, expected_output, None) # ok, now merge the change to the file into the symlink we created, this # gives us a weird error @@ -498,7 +491,8 @@ def checkout_repo_with_symlinks(sbox): svntest.actions.load_repo(sbox, os.path.join(os.path.dirname(sys.argv[0]), 'special_tests_data', - 'symlink.dump')) + 'symlink.dump'), + create_wc=False) expected_output = svntest.wc.State(sbox.wc_dir, { 'from': Item(status='A '), @@ -537,7 +531,7 @@ def diff_symlink_to_dir(sbox): expected_output = [ "Index: link\n", "===================================================================\n", - "--- link\t(revision 0)\n", + "--- link\t(nonexistent)\n", "+++ link\t(working copy)\n", "@@ -0,0 +1 @@\n", "+link A/D\n", @@ -550,10 +544,10 @@ def diff_symlink_to_dir(sbox): "+*\n", "\\ No newline at end of property\n" ] - svntest.actions.run_and_verify_svn(None, expected_output, [], 'diff', + svntest.actions.run_and_verify_svn(expected_output, [], 'diff', '.') # We should get the same output if we the diff the symlink itself. - svntest.actions.run_and_verify_svn(None, expected_output, [], 'diff', 'link') + svntest.actions.run_and_verify_svn(expected_output, [], 'diff', 'link') #---------------------------------------------------------------------- # Issue 2692 (part of): Check that the client can check out a repository @@ -564,7 +558,8 @@ def checkout_repo_with_unknown_special_type(sbox): svntest.actions.load_repo(sbox, os.path.join(os.path.dirname(sys.argv[0]), 'special_tests_data', - 'bad-special-type.dump')) + 'bad-special-type.dump'), + create_wc=False) expected_output = svntest.wc.State(sbox.wc_dir, { 'special': Item(status='A '), @@ -595,7 +590,7 @@ def replace_symlink_with_dir(sbox): # Does status show the obstruction? was_cwd = os.getcwd() os.chdir(wc_dir) - svntest.actions.run_and_verify_svn(None, [ "~ from\n" ], [], 'st') + svntest.actions.run_and_verify_svn([ "~ from\n" ], [], 'st') # The commit shouldn't do anything. # I'd expect a failed commit here, but replacing a file locally with a @@ -604,46 +599,84 @@ def replace_symlink_with_dir(sbox): expected_output = svntest.wc.State(wc_dir, { }) - error_re_string = 'E145001: (Entry|Node).*has.*changed (special|kind)' + error_re_string = '.*E145001: (Entry|Node).*has.*changed (special|kind).*' svntest.actions.run_and_verify_commit(wc_dir, expected_output, - None, error_re_string, wc_dir) + None, error_re_string) # test for issue #1808: svn up deletes local symlink that obstructs # versioned file @Issue(1808) -@SkipUnless(svntest.main.is_posix_os) def update_obstructing_symlink(sbox): "symlink obstructs incoming delete" sbox.build() wc_dir = sbox.wc_dir - mu_path = os.path.join(wc_dir, 'A', 'mu') - mu_url = sbox.repo_url + '/A/mu' - iota_path = os.path.join(wc_dir, 'iota') + mu_path = sbox.ospath('A/mu') - # delete A/mu and replace it with a symlink - svntest.main.run_svn(None, 'rm', mu_path) - os.symlink(iota_path, mu_path) + iota_abspath = os.path.abspath(sbox.ospath('iota')) - svntest.main.run_svn(None, 'rm', mu_url, - '-m', 'log msg') + # delete mu and replace it with an (not-added) symlink + sbox.simple_rm('A/mu') + sbox.simple_symlink(iota_abspath, 'A/mu') - svntest.main.run_svn(None, - 'up', wc_dir) + # delete pi and replace it with an added symlink + sbox.simple_rm('A/D/G/pi') + sbox.simple_add_symlink(iota_abspath, 'A/D/G/pi') - # check that the symlink is still there - target = os.readlink(mu_path) - if target != iota_path: - raise svntest.Failure + if not os.path.exists(mu_path): + raise svntest.Failure("mu should be there") + + # Now remove mu and pi in the repository + svntest.main.run_svn(None, 'rm', '-m', 'log msg', + sbox.repo_url + '/A/mu', + sbox.repo_url + '/A/D/G/pi') + + # We expect tree conflicts + expected_output = svntest.wc.State(wc_dir, { + 'A/mu': Item(status=' ', treeconflict='C'), + 'A/D/G/pi': Item(status=' ', treeconflict='C') + }) + expected_status = svntest.actions.get_virginal_state(wc_dir, 2) + expected_status.tweak('A/mu', status='? ', treeconflict='C', + wc_rev=None) + + expected_status.tweak('A/D/G/pi', status='A ',treeconflict='C', + wc_rev='-') + + svntest.actions.run_and_verify_update(wc_dir, + expected_output, None, + expected_status) + + expected_info = [ + { + 'Path': re.escape(sbox.ospath('A/D/G/pi')), + 'Tree conflict': 'local file replace, incoming file delete or move.*' + }, + { + 'Path': re.escape(sbox.ospath('A/mu')), + 'Tree conflict': 'local file delete, incoming file delete or move.*' + } + ] + + svntest.actions.run_and_verify_info(expected_info, + sbox.ospath('A/D/G/pi'), + sbox.ospath('A/mu')) + + # check that the symlink is still there + if not os.path.exists(mu_path): + raise svntest.Failure("mu should be there") + if svntest.main.is_posix_os(): + target = os.readlink(mu_path) + if target != iota_abspath: + raise svntest.Failure("mu no longer points to the same location") def warn_on_reserved_name(sbox): "warn when attempt operation on a reserved name" sbox.build() reserved_path = os.path.join(sbox.wc_dir, svntest.main.get_admin_name()) svntest.actions.run_and_verify_svn( - "Locking a file with a reserved name failed to result in an error", None, ".*Skipping argument: E200025: '.+' ends in a reserved name.*", 'lock', reserved_path) @@ -666,8 +699,8 @@ def propvalue_normalized(sbox): # Property value should be SVN_PROP_BOOLEAN_TRUE expected_propval = ['*'] - svntest.actions.run_and_verify_svn(None, expected_propval, [], - 'propget', '--strict', 'svn:special', + svntest.actions.run_and_verify_svn(expected_propval, [], + 'propget', '--no-newline', 'svn:special', iota2_path) # Commit and check again. @@ -679,12 +712,11 @@ def propvalue_normalized(sbox): 'iota2' : Item(status=' ', wc_rev=2), }) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, - wc_dir) + expected_status) svntest.main.run_svn(None, 'update', wc_dir) - svntest.actions.run_and_verify_svn(None, expected_propval, [], - 'propget', '--strict', 'svn:special', + svntest.actions.run_and_verify_svn(expected_propval, [], + 'propget', '--no-newline', 'svn:special', iota2_path) @@ -702,7 +734,7 @@ def unrelated_changed_special_status(sbox): os.unlink('psi') os.symlink('omega', 'psi') # omega is versioned! svntest.main.run_svn(None, 'changelist', 'chi cl', 'chi') - svntest.actions.run_and_verify_svn(None, None, [], 'commit', + svntest.actions.run_and_verify_svn(None, [], 'commit', '--changelist', 'chi cl', '-m', 'psi changed special status') @@ -728,7 +760,7 @@ def symlink_destination_change(sbox): }) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) # Modify the symlink to point somewhere else os.remove(newfile_path) @@ -746,7 +778,7 @@ def symlink_destination_change(sbox): svntest.actions.run_and_verify_status(wc_dir, expected_status) # Issue 3972, repeat revert produces no output - svntest.actions.run_and_verify_svn(None, [], [], 'revert', '-R', wc_dir) + svntest.actions.run_and_verify_svn([], [], 'revert', '-R', wc_dir) svntest.actions.run_and_verify_status(wc_dir, expected_status) # Now replace the symlink with a normal file and try to commit, we @@ -850,8 +882,7 @@ def symlink_to_wc_svnversion(sbox): symlink_basename = os.path.basename(symlink_path) # Some basic tests - svntest.actions.run_and_verify_svnversion("Unmodified symlink to wc", - symlink_path, sbox.repo_url, + svntest.actions.run_and_verify_svnversion(symlink_path, sbox.repo_url, [ "1\n" ], []) #---------------------------------------------------------------------- @@ -901,8 +932,7 @@ def update_symlink(sbox): expected_output, expected_disk, expected_status, - None, None, None, - None, None, 1) + check_props=True) #---------------------------------------------------------------------- @Issue(4091) @@ -1108,7 +1138,7 @@ def cat_added_symlink(sbox): kappa_path = sbox.ospath('kappa') sbox.simple_add_symlink('iota', 'kappa') - svntest.actions.run_and_verify_svn(None, "link iota", [], + svntest.actions.run_and_verify_svn("link iota", [], "cat", kappa_path) #---------------------------------------------------------------------- @@ -1144,8 +1174,7 @@ def incoming_symlink_changes(sbox): 's-in-place' : Item(verb='Sending'), }) svntest.actions.run_and_verify_commit(wc_dir, - expected_output, None, None, - wc_dir) + expected_output, None) # r4 svntest.main.run_svnmucc('propdel', 'svn:special', @@ -1176,7 +1205,6 @@ def incoming_symlink_changes(sbox): expected_output, None, expected_status, - None, None, None, None, None, check_props=True) # Update back to r2, to prepare some local changes @@ -1193,8 +1221,7 @@ def incoming_symlink_changes(sbox): expected_output, None, expected_status, - None, None, None, None, None, - True, + [], True, wc_dir, '-r', '2') # Ok, now add a property on all of them to make future symlinkness changes @@ -1218,8 +1245,65 @@ def incoming_symlink_changes(sbox): expected_output, None, expected_status, - None, None, None, None, None, - True) + check_props=True) + +#---------------------------------------------------------------------- +@Issue(4479) +def multiline_special(sbox): + "multiline file with svn:special" + + sbox.build() + wc_dir = sbox.wc_dir + + sbox.simple_append('iota', 'A second line.\n') + sbox.simple_commit(); + tmp = sbox.get_tempname() + svntest.main.file_write(tmp, '*', 'w+') + svntest.main.run_svnmucc('propsetf', 'svn:special', tmp, + sbox.repo_url + '/iota', + '-m', 'set svn:special') + + sbox.simple_update(revision=1); + sbox.simple_update(); + + expected_disk = svntest.main.greek_state.copy() + expected_disk.tweak() + expected_disk.tweak('iota', + contents="This is the file 'iota'.\nA second line.\n", + props={'svn:special' : '*'}) + svntest.actions.verify_disk(wc_dir, expected_disk.old_tree(), True) + +#---------------------------------------------------------------------- +@Issue(4482) +@XFail(svntest.main.is_posix_os) +def multiline_symlink_special(sbox): + "multiline link file with svn:special" + + sbox.build() + wc_dir = sbox.wc_dir + + sbox.simple_append('dodgy-link1', 'link foo\n') + sbox.simple_append('dodgy-link2', 'link foo\nbar\n') + svntest.main.run_svnmucc('put', sbox.ospath('dodgy-link1'), 'dodgy-link1', + 'put', sbox.ospath('dodgy-link2'), 'dodgy-link2', + 'propset', 'svn:special', 'X', 'dodgy-link1', + 'propset', 'svn:special', 'X', 'dodgy-link2', + '-U', sbox.repo_url, + '-m', 'Create dodgy symlinks') + os.remove(sbox.ospath('dodgy-link1')) + os.remove(sbox.ospath('dodgy-link2')) + + sbox.simple_update(); + + expected_status = svntest.actions.get_virginal_state(wc_dir, 2) + expected_status.add({ + 'dodgy-link1' : Item(status=' ', wc_rev=2), + 'dodgy-link2' : Item(status=' ', wc_rev=2), + }) + # XFAIL: Only content before \n used when creating the link but all + # content used when detecting modifications, so the pristine working + # copy shows up as modified. + svntest.actions.run_and_verify_status(wc_dir, expected_status) ######################################################################## # Run the tests @@ -1252,6 +1336,8 @@ test_list = [ None, externals_as_symlink_targets, cat_added_symlink, incoming_symlink_changes, + multiline_special, + multiline_symlink_special, ] if __name__ == '__main__': diff --git a/subversion/tests/cmdline/stat_tests.py b/subversion/tests/cmdline/stat_tests.py index 6e706ef..602dc76 100755 --- a/subversion/tests/cmdline/stat_tests.py +++ b/subversion/tests/cmdline/stat_tests.py @@ -66,7 +66,7 @@ def status_unversioned_file_in_current_dir(sbox): svntest.main.file_append('foo', 'a new file') - svntest.actions.run_and_verify_svn(None, [ "? foo\n" ], [], + svntest.actions.run_and_verify_svn([ "? foo\n" ], [], 'stat', 'foo') #---------------------------------------------------------------------- @@ -107,7 +107,7 @@ def status_update_with_nested_adds(sbox): # Commit. svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) # Now we go to the backup working copy, still at revision 1. # We will run 'svn st -u', and make sure that newdir/newfile is reported @@ -136,7 +136,7 @@ def status_shows_all_in_current_dir(sbox): wc_dir = sbox.wc_dir os.chdir(wc_dir) - exit_code, output, err = svntest.actions.run_and_verify_svn(None, None, [], + exit_code, output, err = svntest.actions.run_and_verify_svn(None, [], 'stat', '-vN') if (len(output) != len(os.listdir("."))): @@ -155,14 +155,14 @@ def status_missing_file(sbox): os.remove('iota') - exit_code, output, err = svntest.actions.run_and_verify_svn(None, None, [], + exit_code, output, err = svntest.actions.run_and_verify_svn(None, [], 'status') for line in output: if not re.match("! +iota", line): raise svntest.Failure # This invocation is for issue #2127. - exit_code, output, err = svntest.actions.run_and_verify_svn(None, None, [], + exit_code, output, err = svntest.actions.run_and_verify_svn(None, [], 'status', '-u', 'iota') found_it = 0 @@ -213,7 +213,7 @@ def status_type_change(sbox): expected_output = [s.replace('/', os.path.sep) for s in expected_output] - svntest.actions.run_and_verify_svn(None, UnorderedOutput(expected_output), + svntest.actions.run_and_verify_svn(UnorderedOutput(expected_output), [], 'status') # Now change the file that is obstructing the versioned dir into an @@ -247,7 +247,7 @@ def status_type_change(sbox): # Fix separator for Windows expected_output = [s.replace('/', os.path.sep) for s in expected_output] - svntest.actions.run_and_verify_svn(None, UnorderedOutput(expected_output), + svntest.actions.run_and_verify_svn(UnorderedOutput(expected_output), [], 'status') # Now change the versioned dir that is obstructing the file into an @@ -255,7 +255,7 @@ def status_type_change(sbox): svntest.main.safe_rmtree('iota') os.mkdir('iota') - svntest.actions.run_and_verify_svn(None, UnorderedOutput(expected_output), + svntest.actions.run_and_verify_svn(UnorderedOutput(expected_output), [], 'status') #---------------------------------------------------------------------- @@ -288,7 +288,7 @@ def status_type_change_to_symlink(sbox): '~ iota\n', ] - svntest.actions.run_and_verify_svn(None, UnorderedOutput(expected_output), + svntest.actions.run_and_verify_svn(UnorderedOutput(expected_output), [], 'status') # "valid" symlinks @@ -297,7 +297,7 @@ def status_type_change_to_symlink(sbox): os.symlink('A/mu', 'iota') os.symlink('C', 'A/D') - svntest.actions.run_and_verify_svn(None, UnorderedOutput(expected_output), + svntest.actions.run_and_verify_svn(UnorderedOutput(expected_output), [], 'status') #---------------------------------------------------------------------- @@ -318,7 +318,7 @@ def status_with_new_files_pending(sbox): svntest.main.run_svn(None, 'up', '-r', '1') - exit_code, output, err = svntest.actions.run_and_verify_svn(None, None, [], + exit_code, output, err = svntest.actions.run_and_verify_svn(None, [], 'status', '-u') # The bug fixed in revision 3686 was a segmentation fault. @@ -353,8 +353,7 @@ def status_for_unignored_file(sbox): ['I newdir\n', 'I newfile\n', ' M .\n']) - svntest.actions.run_and_verify_svn(None, - expected, + svntest.actions.run_and_verify_svn(expected, [], 'status', '--no-ignore', '.') @@ -362,8 +361,7 @@ def status_for_unignored_file(sbox): expected = svntest.verify.UnorderedOutput( ['I newdir\n', 'I newfile\n']) - svntest.actions.run_and_verify_svn(None, - expected, + svntest.actions.run_and_verify_svn(expected, [], 'status', 'newdir', 'newfile') @@ -379,7 +377,7 @@ def status_for_nonexistent_file(sbox): os.chdir(wc_dir) exit_code, output, err = svntest.actions.run_and_verify_svn( - None, None, [], 'status', 'nonexistent-file') + None, [], 'status', 'nonexistent-file') # there should *not* be a status line printed for the nonexistent file for line in output: @@ -403,10 +401,10 @@ def status_nonrecursive_update_different_cwd(sbox): J_url = sbox.repo_url + '/A/C/J' K_path = os.path.join(wc_dir, 'A', 'C', 'K' ) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'mkdir', '-m', 'rev 2', J_url) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'mkdir', K_path) os.chdir(wc_dir) @@ -418,8 +416,7 @@ def status_nonrecursive_update_different_cwd(sbox): 'Status against revision: 2\n' ] os.chdir('A') - svntest.actions.run_and_verify_svn(None, - UnorderedOutput(expected_output), + svntest.actions.run_and_verify_svn(UnorderedOutput(expected_output), [], 'status', '-v', '-N', '-u', 'C') @@ -430,8 +427,7 @@ def status_nonrecursive_update_different_cwd(sbox): 'Status against revision: 2\n'] os.chdir('C') - svntest.actions.run_and_verify_svn(None, - UnorderedOutput(expected_output), + svntest.actions.run_and_verify_svn(UnorderedOutput(expected_output), [], 'status', '-v', '-N', '-u', '.') @@ -494,7 +490,7 @@ def status_file_needs_update(sbox): # the -v flag, which we don't want, as this bug never appeared when # -v was passed. So we run status by hand: os.chdir(was_cwd) - exit_code, out, err = svntest.actions.run_and_verify_svn(None, None, [], + exit_code, out, err = svntest.actions.run_and_verify_svn(None, [], 'status', '-u', other_wc) @@ -558,7 +554,7 @@ def status_uninvited_parent_directory(sbox): # the bug is present). So run status by hand: os.chdir(was_cwd) exit_code, out, err = svntest.actions.run_and_verify_svn( - None, None, [], + None, [], 'status', '-u', os.path.join(other_wc, 'newfile')) for line in out: @@ -592,7 +588,7 @@ def status_on_forward_deletion(sbox): # svn: Working copy not locked # svn: directory '' not locked # - svntest.actions.run_and_verify_svn(None, None, [], 'st', '-u', 'wc') + svntest.actions.run_and_verify_svn(None, [], 'st', '-u', 'wc') # Try again another way; the error would look like this: # @@ -607,13 +603,13 @@ def status_on_forward_deletion(sbox): svntest.main.safe_rmtree('wc') svntest.main.run_svn(None, 'co', '-r1', A_url + "@1", 'wc') - svntest.actions.run_and_verify_svn(None, None, [], 'st', '-u', 'wc') + svntest.actions.run_and_verify_svn(None, [], 'st', '-u', 'wc') #---------------------------------------------------------------------- def get_last_changed_date(path): "get the Last Changed Date for path using svn info" - exit_code, out, err = svntest.actions.run_and_verify_svn(None, None, [], + exit_code, out, err = svntest.actions.run_and_verify_svn(None, [], 'info', path) for line in out: if re.match("^Last Changed Date", line): @@ -624,7 +620,7 @@ def get_last_changed_date(path): # Helper for timestamp_behaviour test def get_text_timestamp(path): "get the text-time for path using svn info" - exit_code, out, err = svntest.actions.run_and_verify_svn(None, None, [], + exit_code, out, err = svntest.actions.run_and_verify_svn(None, [], 'info', path) for line in out: if re.match("^Text Last Updated", line): @@ -634,7 +630,7 @@ def get_text_timestamp(path): def no_text_timestamp(path): "ensure no text-time for path using svn info" - exit_code, out, err = svntest.actions.run_and_verify_svn(None, None, [], + exit_code, out, err = svntest.actions.run_and_verify_svn(None, [], 'info', path) for line in out: if re.match("^Text Last Updated", line): @@ -670,9 +666,9 @@ def text_time_behaviour(wc_dir, wc_path, status_path, expected_status, cmd): # revert/cleanup change the text-time even though the text doesn't change if cmd == 'cleanup': - svntest.actions.run_and_verify_svn(None, None, [], cmd, wc_dir) + svntest.actions.run_and_verify_svn(None, [], cmd, wc_dir) else: - svntest.actions.run_and_verify_svn(None, None, [], cmd, wc_path) + svntest.actions.run_and_verify_svn(None, [], cmd, wc_path) svntest.actions.run_and_verify_status(wc_dir, expected_status) text_time = get_text_timestamp(wc_path) if text_time == pre_text_time: @@ -718,7 +714,7 @@ use-commit-times = yes svntest.main.create_config_dir(config_dir, config_contents) other_wc = sbox.add_wc_path('other') - svntest.actions.run_and_verify_svn("checkout failed", None, [], + svntest.actions.run_and_verify_svn(None, [], 'co', sbox.repo_url, other_wc, '--config-dir', config_dir) @@ -739,7 +735,7 @@ use-commit-times = yes expected_status = svntest.actions.get_virginal_state(other_wc, 1) svntest.actions.run_and_verify_update(other_wc, expected_output, expected_disk, expected_status, - None, None, None, None, None, False, + [], False, other_wc, '--config-dir', config_dir) iota_text_timestamp = get_text_timestamp(other_iota_path) if (iota_text_timestamp[17] != ':' or @@ -794,7 +790,7 @@ def status_on_unversioned_dotdot(sbox): os.mkdir(new_subsub) os.chdir(new_subsub) - svntest.actions.run_and_verify_svn2(None, None, + svntest.actions.run_and_verify_svn2(None, "svn: warning: W155(010|007):.*'.*'.*not", 0, 'st', '..') @@ -868,7 +864,7 @@ def missing_dir_in_anchor(sbox): wc_dir = sbox.wc_dir foo_path = sbox.ospath('foo') - svntest.actions.run_and_verify_svn(None, None, [], 'mkdir', foo_path) + svntest.actions.run_and_verify_svn(None, [], 'mkdir', foo_path) expected_status = svntest.actions.get_virginal_state(wc_dir, 1) expected_status.add({ 'foo' : Item(status='A ', wc_rev=0), @@ -894,7 +890,7 @@ def status_in_xml(sbox): # Retrieve last changed date from svn log exit_code, output, error = svntest.actions.run_and_verify_svn( - None, None, [], 'log', file_path, '--xml', '-rHEAD') + None, [], 'log', file_path, '--xml', '-rHEAD') info_msg = "<date>" for line in output: @@ -912,7 +908,7 @@ def status_in_xml(sbox): svntest.actions.run_and_verify_status_xml(expected_entries, file_path, '-u') - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'cp', '-m', 'repo-to-repo copy', sbox.repo_url + '/iota', sbox.repo_url + '/iota2') @@ -926,7 +922,7 @@ def status_in_xml(sbox): svntest.actions.run_and_verify_status_xml(expected_entries, file_path, '-u') - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'rm', '-m', 'repo delete', sbox.repo_url + '/A/B/E/alpha') @@ -951,15 +947,15 @@ def status_ignored_dir(sbox): new_dir = sbox.ospath('dir.o') new_dir_url = sbox.repo_url + "/dir.o" - svntest.actions.run_and_verify_svn("Create dir", "\n|Committed revision 2.", [], + svntest.actions.run_and_verify_svn(["Committing transaction...\n", + "Committed revision 2.\n"], [], 'mkdir', new_dir_url, '-m', 'msg') # Make a dir that is ignored by the default ignore patterns. os.mkdir(new_dir) # run_and_verify_status doesn't handle this weird kind of entry. - svntest.actions.run_and_verify_svn(None, - ['I * ' + new_dir + "\n", + svntest.actions.run_and_verify_svn(['I * ' + new_dir + "\n", ' * 1 ' + wc_dir + "\n", 'Status against revision: 2\n'], [], "status", "-u", wc_dir) @@ -977,7 +973,7 @@ def status_unversioned_dir_in_wc(sbox): expected_err = "svn: warning: (W155007|W155010): .*'.*(/|\\\\)" + \ os.path.basename(path) + \ "' was not found" - svntest.actions.run_and_verify_svn2(None, [], expected_err, 0, + svntest.actions.run_and_verify_svn2([], expected_err, 0, "status", path) #---------------------------------------------------------------------- @@ -1000,7 +996,7 @@ def status_missing_dir(sbox): expected = [ s.replace('A/D/G', a_d_g).replace('/', os.path.sep) for s in expected ] - svntest.actions.run_and_verify_svn(None, UnorderedOutput(expected), [], + svntest.actions.run_and_verify_svn(UnorderedOutput(expected), [], "status", wc_dir) expected = [ @@ -1012,7 +1008,7 @@ def status_missing_dir(sbox): ] # now run status -u, we should be able to do this without crashing - svntest.actions.run_and_verify_svn(None, UnorderedOutput(expected), [], + svntest.actions.run_and_verify_svn(UnorderedOutput(expected), [], "status", "-u", wc_dir) # Finally run an explicit status request directly on the missing directory. @@ -1025,7 +1021,7 @@ def status_missing_dir(sbox): expected = [ s.replace('A/D/G', a_d_g).replace('/', os.path.sep) for s in expected ] - svntest.actions.run_and_verify_svn(None, UnorderedOutput(expected), [], + svntest.actions.run_and_verify_svn(UnorderedOutput(expected), [], "status", a_d_g) def status_add_plus_conflict(sbox): @@ -1038,42 +1034,42 @@ def status_add_plus_conflict(sbox): branch_url = sbox.repo_url + '/branch' trunk_url = sbox.repo_url + '/trunk' - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'mkdir', '-m', 'rev 2', branch_url, trunk_url) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'update', wc_dir) branch_file = sbox.ospath('branch/file') svntest.main.file_write(branch_file, "line 1\nline2\nline3\n", 'wb+') - svntest.actions.run_and_verify_svn(None, None, [], 'add', branch_file) + svntest.actions.run_and_verify_svn(None, [], 'add', branch_file) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'commit', branch_file, '-m', 'rev 3') svntest.main.file_write(branch_file, "line 1\nline3\n", 'wb') - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'commit', branch_file, '-m', 'rev 4') svntest.main.file_write(branch_file, "line 1\nline2\n", 'wb') - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'commit', branch_file, '-m', 'rev 5') trunk_dir = sbox.ospath('trunk') - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'merge', branch_url, '-r', '2:3', trunk_dir) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'merge', branch_url, '-r', '4:5', trunk_dir) @@ -1088,7 +1084,7 @@ def status_add_plus_conflict(sbox): expected_output = svntest.verify.UnorderedOutput(lines) - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'status', wc_dir) #---------------------------------------------------------------------- @@ -1102,8 +1098,7 @@ def inconsistent_eol(sbox): svntest.main.file_write(iota_path, "line 1\nline 2\n", "wb") - svntest.actions.run_and_verify_svn(None, - "property 'svn:eol-style' set on.*iota", + svntest.actions.run_and_verify_svn("property 'svn:eol-style' set on.*iota", [], 'propset', 'svn:eol-style', 'native', sbox.ospath('iota')) @@ -1116,7 +1111,7 @@ def inconsistent_eol(sbox): expected_status.tweak('iota', wc_rev=2) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) # Make the eol style inconsistent and verify that status says nothing. svntest.main.file_write(iota_path, "line 1\nline 2\r\n", "wb") @@ -1149,8 +1144,7 @@ def status_update_with_incoming_props(sbox): # Commit the working copy svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, wc_dir) + expected_status) # Create expected trees for an update to revision 1. expected_output = svntest.wc.State(wc_dir, { @@ -1165,7 +1159,7 @@ def status_update_with_incoming_props(sbox): expected_output, expected_disk, expected_status, - None, None, None, None, None, 1, + [], True, '-r', '1', wc_dir) # Can't use run_and_verify_status here because the out-of-date @@ -1175,8 +1169,7 @@ def status_update_with_incoming_props(sbox): " * 1 " + wc_dir + "\n", "Status against revision: 2\n" ]) - svntest.actions.run_and_verify_svn(None, - expected, + svntest.actions.run_and_verify_svn(expected, [], "status", "-u", wc_dir) @@ -1188,12 +1181,12 @@ def status_update_with_incoming_props(sbox): " * 1 1 jrandom " + wc_dir + "\n", "Status against revision: 2\n" ]) - svntest.actions.run_and_verify_svn(None, expected, [], + svntest.actions.run_and_verify_svn(expected, [], "status", "-uvN", wc_dir) # Retrieve last changed date from svn log - exit_code, output, error = svntest.actions.run_and_verify_svn(None, None, [], + exit_code, output, error = svntest.actions.run_and_verify_svn(None, [], 'log', wc_dir, '--xml', '-r1') @@ -1254,8 +1247,7 @@ def status_update_verbose_with_incoming_props(sbox): # Commit the working copy svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, wc_dir) + expected_status) # Create expected trees for an update to revision 1. expected_output = svntest.wc.State(wc_dir, { @@ -1271,7 +1263,7 @@ def status_update_verbose_with_incoming_props(sbox): expected_output, expected_disk, expected_status, - None, None, None, None, None, 1, + [], True, '-r', '1', wc_dir) # Can't use run_and_verify_status here because the out-of-date @@ -1301,8 +1293,7 @@ def status_update_verbose_with_incoming_props(sbox): " *" + common + wc_dir + "\n", "Status against revision: 2\n" ]) - svntest.actions.run_and_verify_svn(None, - expected, + svntest.actions.run_and_verify_svn(expected, [], "status", "-uv", wc_dir) @@ -1333,8 +1324,7 @@ def status_nonrecursive_update(sbox): expected_status.tweak('A/D/gamma', wc_rev=2, status=' ') svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, wc_dir) + expected_status) # Create expected trees for an update to revision 1. expected_output = svntest.wc.State(wc_dir, { @@ -1349,15 +1339,14 @@ def status_nonrecursive_update(sbox): expected_output, expected_disk, expected_status, - None, None, None, None, None, 0, + [], False, '-r', '1', wc_dir) # Check the remote status of folder A (non-recursively) xout = [" * 1 " + sbox.ospath('A/mu') + "\n", "Status against revision: 2\n" ] - svntest.actions.run_and_verify_svn(None, - xout, + svntest.actions.run_and_verify_svn(xout, [], "status", "-uN", A_path) @@ -1390,8 +1379,7 @@ def change_files_and_commit(wc_dir, files, baserev=1): expected_status.tweak(file, wc_rev=commitrev, status=' ') svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, wc_dir) + expected_status) def status_depth_local(sbox): "run 'status --depth=X' with local changes" @@ -1414,8 +1402,7 @@ def status_depth_local(sbox): # depth=empty expected = svntest.verify.UnorderedOutput( [" M %s\n" % A_path]) - svntest.actions.run_and_verify_svn(None, - expected, + svntest.actions.run_and_verify_svn(expected, [], "status", "--depth=empty", A_path) @@ -1424,8 +1411,7 @@ def status_depth_local(sbox): [" M %s\n" % A_path, "M %s\n" % mu_path]) - svntest.actions.run_and_verify_svn(None, - expected, + svntest.actions.run_and_verify_svn(expected, [], "status", "--depth=files", A_path) @@ -1435,8 +1421,7 @@ def status_depth_local(sbox): " M %s\n" % D_path, "M %s\n" % mu_path]) - svntest.actions.run_and_verify_svn(None, - expected, + svntest.actions.run_and_verify_svn(expected, [], "status", "--depth=immediates", A_path) @@ -1447,8 +1432,7 @@ def status_depth_local(sbox): "M %s\n" % mu_path, "M %s\n" % gamma_path]) - svntest.actions.run_and_verify_svn(None, - expected, + svntest.actions.run_and_verify_svn(expected, [], "status", "--depth=infinity", A_path) @@ -1468,7 +1452,7 @@ def status_depth_update(sbox): svntest.main.run_svn(None, 'up', wc_dir) svntest.main.run_svn(None, 'propset', '--force', 'svn:test', 'value', A_path) svntest.main.run_svn(None, 'propset', '--force', 'svn:test', 'value', D_path) - svntest.main.run_svn(None, 'ci', '-m', 'log message', wc_dir) + sbox.simple_commit(message='log message') # update to r1 svntest.main.run_svn(None, 'up', '-r', '1', wc_dir) @@ -1480,8 +1464,7 @@ def status_depth_update(sbox): [" * 1 %s\n" % A_path, "Status against revision: 3\n"]) - svntest.actions.run_and_verify_svn(None, - expected, + svntest.actions.run_and_verify_svn(expected, [], "status", "-u", "--depth=empty", A_path) @@ -1491,8 +1474,7 @@ def status_depth_update(sbox): " * 1 %s\n" % A_path, "Status against revision: 3\n"]) - svntest.actions.run_and_verify_svn(None, - expected, + svntest.actions.run_and_verify_svn(expected, [], "status", "-u", "--depth=files", A_path) @@ -1504,8 +1486,7 @@ def status_depth_update(sbox): " * 1 %s\n" % mu_path, "Status against revision: 3\n"]) - svntest.actions.run_and_verify_svn(None, - expected, + svntest.actions.run_and_verify_svn(expected, [], "status", "-u", "--depth=immediates", A_path) @@ -1518,8 +1499,7 @@ def status_depth_update(sbox): " * 1 %s\n" % gamma_path, "Status against revision: 3\n"]) - svntest.actions.run_and_verify_svn(None, - expected, + svntest.actions.run_and_verify_svn(expected, [], "status", "-u", "--depth=infinity", A_path) @@ -1548,8 +1528,7 @@ def status_depth_update_local_modifications(sbox): [" M 1 %s\n" % A_path, "Status against revision: 1\n"]) - svntest.actions.run_and_verify_svn(None, - expected, + svntest.actions.run_and_verify_svn(expected, [], "status", "-u", "--depth=empty", A_path) @@ -1557,8 +1536,7 @@ def status_depth_update_local_modifications(sbox): ["M 1 %s\n" % mu_path, "Status against revision: 1\n"]) - svntest.actions.run_and_verify_svn(None, - expected, + svntest.actions.run_and_verify_svn(expected, [], "status", "-u", "--depth=empty", mu_path) @@ -1568,8 +1546,7 @@ def status_depth_update_local_modifications(sbox): " M 1 %s\n" % A_path, "Status against revision: 1\n"]) - svntest.actions.run_and_verify_svn(None, - expected, + svntest.actions.run_and_verify_svn(expected, [], "status", "-u", "--depth=files", A_path) @@ -1581,8 +1558,7 @@ def status_depth_update_local_modifications(sbox): "M 1 %s\n" % mu_path, "Status against revision: 1\n"]) - svntest.actions.run_and_verify_svn(None, - expected, + svntest.actions.run_and_verify_svn(expected, [], "status", "-u", "--depth=immediates", A_path) @@ -1595,8 +1571,7 @@ def status_depth_update_local_modifications(sbox): "M 1 %s\n" % gamma_path, "Status against revision: 1\n"]) - svntest.actions.run_and_verify_svn(None, - expected, + svntest.actions.run_and_verify_svn(expected, [], "status", "-u", "--depth=infinity", A_path) @@ -1613,7 +1588,7 @@ def status_dash_u_deleted_directories(sbox): B_path = os.path.join(A_path, 'B') # delete the B directory - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'rm', B_path) # now run status -u on B and its children @@ -1630,8 +1605,7 @@ def status_dash_u_deleted_directories(sbox): "D 1 %s\n" % os.path.join("B", "E", "beta"), "D 1 %s\n" % os.path.join("B", "F"), "Status against revision: 1\n" ]) - svntest.actions.run_and_verify_svn(None, - expected, + svntest.actions.run_and_verify_svn(expected, [], "status", "-u", "B") @@ -1647,8 +1621,7 @@ def status_dash_u_deleted_directories(sbox): "D 1 %s\n" % os.path.join("E", "beta"), "D 1 %s\n" % "F", "Status against revision: 1\n" ]) - svntest.actions.run_and_verify_svn(None, - expected, + svntest.actions.run_and_verify_svn(expected, [], "status", "-u", ".") @@ -1661,8 +1634,7 @@ def status_dash_u_deleted_directories(sbox): os.chdir(was_cwd) os.chdir(A_path) - svntest.actions.run_and_verify_svn(None, - expected, + svntest.actions.run_and_verify_svn(expected, [], "status", "-u", os.path.join("B", "E")) @@ -1687,8 +1659,7 @@ def status_dash_u_type_change(sbox): xout = ["~ 1 iota\n", "Status against revision: 1\n" ] - svntest.actions.run_and_verify_svn(None, - xout, + svntest.actions.run_and_verify_svn(xout, [], "status", "-u") @@ -1723,8 +1694,7 @@ def status_dash_u_type_change(sbox): [s.replace('/', os.path.sep) for s in output]) - svntest.actions.run_and_verify_svn(None, - expected, + svntest.actions.run_and_verify_svn(expected, [], "status", "-u") @@ -1747,15 +1717,14 @@ def status_with_tree_conflicts(sbox): # check status of G expected = svntest.verify.UnorderedOutput( ["A + C %s\n" % rho, - " > local file edit, incoming file delete upon update\n", + " > local file edit, incoming file delete or move upon update\n", "D C %s\n" % pi, " > local file delete, incoming file edit upon update\n", "! C %s\n" % tau, - " > local file delete, incoming file delete upon update\n", + " > local file delete, incoming file delete or move upon update\n", ] + svntest.main.summary_of_conflicts(tree_conflicts=3)) - svntest.actions.run_and_verify_svn(None, - expected, + svntest.actions.run_and_verify_svn(expected, [], "status", G) @@ -1765,13 +1734,12 @@ def status_with_tree_conflicts(sbox): "D C 2 2 jrandom %s\n" % pi, " > local file delete, incoming file edit upon update\n", "A + C - 1 jrandom %s\n" % rho, - " > local file edit, incoming file delete upon update\n", + " > local file edit, incoming file delete or move upon update\n", "! C %s\n" % tau, - " > local file delete, incoming file delete upon update\n", + " > local file delete, incoming file delete or move upon update\n", ] + svntest.main.summary_of_conflicts(tree_conflicts=3)) - svntest.actions.run_and_verify_svn(None, - expected, + svntest.actions.run_and_verify_svn(expected, [], "status", "-v", G) @@ -1829,7 +1797,7 @@ def status_nested_wc_old_format(sbox): svntest.main.file_append(sbox.ospath('subdir/.svn/format'), '10\n') # format 10 was the Subversion 1.6 format os.chdir(wc_dir) - svntest.actions.run_and_verify_svn(None, [ "? subdir\n" ], [], 'st') + svntest.actions.run_and_verify_svn([ "? subdir\n" ], [], 'st') #---------------------------------------------------------------------- # Regression test for issue #3855 "status doesn't show 'K' on a locked @@ -1842,9 +1810,9 @@ def status_locked_deleted(sbox): iota_path = sbox.ospath('iota') sbox.simple_rm('iota') - svntest.actions.run_and_verify_svn(None, None, [], 'lock', + svntest.actions.run_and_verify_svn(None, [], 'lock', os.path.join(sbox.wc_dir, 'iota')) - svntest.actions.run_and_verify_svn(None, ['D K %s\n' % iota_path], [], + svntest.actions.run_and_verify_svn(['D K %s\n' % iota_path], [], 'status', iota_path) @Issue(3774) @@ -1856,7 +1824,7 @@ def wc_wc_copy_timestamp(sbox): time.sleep(1.1) svntest.main.file_append(sbox.ospath('A/D/H/psi'), 'modified\n') - svntest.actions.run_and_verify_svn(None, None, [], 'copy', + svntest.actions.run_and_verify_svn(None, [], 'copy', sbox.ospath('A/D/H'), sbox.ospath('A/D/H2')) @@ -1887,7 +1855,7 @@ def wc_wc_copy_timestamp(sbox): raise svntest.Failure("psi timestamps should be the same") # Cleanup repairs timestamps, so this should be a no-op. - svntest.actions.run_and_verify_svn(None, None, [], 'cleanup', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'cleanup', wc_dir) chi_dst_timestamp2 = get_text_timestamp(sbox.ospath('A/D/H2/chi')) if chi_dst_timestamp2 != chi_dst_timestamp1: raise svntest.Failure("chi timestamps should be the same") @@ -1915,7 +1883,7 @@ def wclock_status(sbox): 'A/D/G', 'A/D/H'] ]) - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'status', wc_dir) # Second non-recursive lock @@ -1929,7 +1897,7 @@ def wclock_status(sbox): 'A/D/G', 'A/D/H'] ]) - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'status', wc_dir) @@ -1951,12 +1919,12 @@ def modified_modulo_translation(sbox): # Run status. Expect some output. # TODO: decide how such files should show in the output; whether they # always show, or only with some --flag; and adjust this accordingly. - svntest.actions.run_and_verify_svn(None, svntest.verify.AnyOutput, [], + svntest.actions.run_and_verify_svn(svntest.verify.AnyOutput, [], 'status', wc_dir) # Expect the file to be renormalized (to LF) after a revert. sbox.simple_revert('iota') - svntest.actions.run_and_verify_svn(None, [], [], 'status', wc_dir) + svntest.actions.run_and_verify_svn([], [], 'status', wc_dir) def status_not_present(sbox): "no status on not-present and excluded nodes" @@ -1970,7 +1938,7 @@ def status_not_present(sbox): sbox.ospath('A/mu'), sbox.ospath('A/B')) sbox.simple_commit() - svntest.actions.run_and_verify_svn(None, [], [],'status', + svntest.actions.run_and_verify_svn([], [],'status', sbox.ospath('iota'), sbox.ospath('A/B'), sbox.ospath('A/C'), @@ -1987,7 +1955,7 @@ def status_unversioned_dir(sbox): # This should work on UNIX-like systems and Windows systems expected_err = "svn: warning: W1550(07|10): .*'.*(/|\\\\)" + \ "' is not a working copy" - svntest.actions.run_and_verify_svn2(None, [], expected_err, 0, + svntest.actions.run_and_verify_svn2([], expected_err, 0, "status", "/") def status_case_changed(sbox): @@ -2073,7 +2041,7 @@ def move_update_timestamps(sbox): expected_status) time.sleep(1.1) - svntest.actions.run_and_verify_svn("resolve failed", None, [], + svntest.actions.run_and_verify_svn(None, [], 'resolve', '--accept=mine-conflict', sbox.ospath('A/B/E')) @@ -2121,12 +2089,12 @@ def status_path_handling(sbox): def status_move_missing_direct(sbox): "move information when status is called directly" - + sbox.build() sbox.simple_copy('A', 'Z') sbox.simple_commit('') sbox.simple_update('') - + sbox.simple_move('Z', 'ZZ') sbox.simple_move('A', 'Z') sbox.simple_move('Z/B', 'ZB') @@ -2136,18 +2104,18 @@ def status_move_missing_direct(sbox): # Somehow 'svn status' now shows different output for 'ZB/E' # when called directly and via an ancestor, as this handles # multi-layer in a different way - + # Note that the status output may change over different Subversion revisions, # but the status on a node should be identical anyway 'svn status' is called # on it. - + expected_output = [ 'A + %s\n' % sbox.ospath('ZB'), - ' > moved from %s\n' % os.path.join('..', 'Z', 'B'), + ' > moved from %s\n' % os.path.join('..', 'Z', 'B'), 'D + %s\n' % sbox.ospath('ZB/E'), ' > moved to %s\n' % os.path.join('..', 'Z', 'B', 'E'), ] - svntest.actions.run_and_verify_svn(None, expected_output, [], 'status', + svntest.actions.run_and_verify_svn(expected_output, [], 'status', sbox.ospath('ZB'), '--depth', 'immediates') # And calling svn status on just 'ZB/E' should have the same result for this node @@ -2156,12 +2124,12 @@ def status_move_missing_direct(sbox): 'D + %s\n' % sbox.ospath('ZB/E'), ' > moved to %s\n' % os.path.join('..', '..', 'Z', 'B', 'E'), ] - svntest.actions.run_and_verify_svn(None, expected_output, [], 'status', + svntest.actions.run_and_verify_svn(expected_output, [], 'status', sbox.ospath('ZB/E'), '--depth', 'empty') def status_move_missing_direct_base(sbox): "move when status is called directly with base" - + sbox.build() sbox.simple_copy('A', 'Z') sbox.simple_mkdir('Q') @@ -2169,10 +2137,10 @@ def status_move_missing_direct_base(sbox): sbox.simple_mkdir('Q/ZB/E') sbox.simple_commit('') sbox.simple_update('') - + sbox.simple_rm('Q') sbox.simple_mkdir('Q') - + sbox.simple_move('Z', 'ZZ') sbox.simple_move('A', 'Z') sbox.simple_move('Z/B', 'Q/ZB') @@ -2182,21 +2150,21 @@ def status_move_missing_direct_base(sbox): # Somehow 'svn status' now shows different output for 'Q/ZB/E' # when called directly and via an ancestor, as this handles # multi-layer in a different way - + # Note that the status output may change over different Subversion revisions, # but the status on a node should be identical anyway 'svn status' is called # on it. - + # This test had a different result as status_move_missing_direct at the time of # writing this test. - + expected_output = [ 'A + %s\n' % sbox.ospath('Q/ZB'), ' > moved from %s\n' % os.path.join('..', '..', 'Z', 'B'), 'D + %s\n' % sbox.ospath('Q/ZB/E'), ' > moved to %s\n' % os.path.join('..', '..', 'Z', 'B', 'E'), ] - svntest.actions.run_and_verify_svn(None, expected_output, [], 'status', + svntest.actions.run_and_verify_svn(expected_output, [], 'status', sbox.ospath('Q/ZB'), '--depth', 'immediates') # And calling svn status on just 'ZB/E' should have the same result for this node, @@ -2205,9 +2173,99 @@ def status_move_missing_direct_base(sbox): 'D + %s\n' % sbox.ospath('Q/ZB/E'), ' > moved to %s\n' % os.path.join('..', '..', '..', 'Z', 'B', 'E'), ] - svntest.actions.run_and_verify_svn(None, expected_output, [], 'status', + svntest.actions.run_and_verify_svn(expected_output, [], 'status', sbox.ospath('Q/ZB/E'), '--depth', 'empty') +def status_missing_conflicts(sbox): + "status missing certain conflicts" + + sbox.build() + wc_dir = sbox.wc_dir + sbox.simple_propset('q', 'r', 'A/B/E/alpha', 'A/B/E/beta') + sbox.simple_commit() + + sbox.simple_move('A/B/E/alpha', 'alpha') + sbox.simple_move('A/B/E/beta', 'beta') + + sbox.simple_rm('A/B/E') + + sbox.simple_update('A/B/E', revision=1) + + expected_status = svntest.actions.get_virginal_state(wc_dir, 1) + expected_status.tweak('A/B/E', status='D ', treeconflict='C', wc_rev=1) + expected_status.tweak('A/B/E/alpha', status='D ', treeconflict='C', wc_rev=1, + moved_to='alpha') + expected_status.tweak('A/B/E/beta', status='D ', treeconflict='C', wc_rev=1, + moved_to='beta') + expected_status.add({ + 'alpha' : Item(status='A ', copied='+', moved_from='A/B/E/alpha', wc_rev='-'), + 'beta' : Item(status='A ', copied='+', moved_from='A/B/E/beta', wc_rev='-') + }) + + svntest.actions.run_and_verify_status(wc_dir, expected_status) + expected_info = [ + { + 'Tree conflict': 'local file moved away, incoming file edit upon update.*' + }, + { + 'Tree conflict': 'local file moved away, incoming file edit upon update.*' + } + ] + svntest.actions.run_and_verify_info(expected_info, + sbox.ospath('A/B/E/alpha'), + sbox.ospath('A/B/E/beta')) + + svntest.actions.run_and_verify_svn(None, [], + 'resolve', '--accept=mine-conflict', + '--depth=empty', sbox.ospath('A/B/E')) + expected_status.tweak('A/B/E', treeconflict=None) + svntest.actions.run_and_verify_status(wc_dir, expected_status) + + # Now replace with directory + sbox.simple_mkdir('A/B/E') + expected_status.tweak('A/B/E', status='R ') + svntest.actions.run_and_verify_status(wc_dir, expected_status) + + svntest.actions.run_and_verify_status(wc_dir, expected_status) + svntest.actions.run_and_verify_info(expected_info, + sbox.ospath('A/B/E/alpha'), + sbox.ospath('A/B/E/beta')) + + #Recreate scenario for file + sbox.simple_rm('A/B/E', 'alpha', 'beta') + svntest.actions.run_and_verify_svn(None, [], + 'revert', '-R', sbox.ospath('A/B/E')) + + sbox.simple_update('A/B/E', revision=2) + + sbox.simple_move('A/B/E/alpha', 'alpha') + sbox.simple_move('A/B/E/beta', 'beta') + + sbox.simple_rm('A/B/E') + sbox.simple_update('A/B/E', revision=1) + svntest.actions.run_and_verify_svn(None, [], + 'resolve', '--accept=mine-conflict', + '--depth=empty', sbox.ospath('A/B/E')) + + sbox.simple_append('A/B/E', 'something') + expected_status.tweak('A/B/E', status='D ') + svntest.actions.run_and_verify_status(wc_dir, expected_status) + sbox.simple_add('A/B/E') + + # In the entries world A/B/E doesn't have children.. + expected_status.tweak('A/B/E', status='R ', entry_kind='file') + + # Tree conflicts still in db + svntest.actions.run_and_verify_info(expected_info, + sbox.ospath('A/B/E/alpha'), + sbox.ospath('A/B/E/beta')) + + # But not in status! + svntest.actions.run_and_verify_status(wc_dir, expected_status) + + + + ######################################################################## # Run the tests @@ -2258,6 +2316,7 @@ test_list = [ None, status_path_handling, status_move_missing_direct, status_move_missing_direct_base, + status_missing_conflicts, ] if __name__ == '__main__': diff --git a/subversion/tests/cmdline/svnadmin_tests.py b/subversion/tests/cmdline/svnadmin_tests.py index e32681d..470db65 100755 --- a/subversion/tests/cmdline/svnadmin_tests.py +++ b/subversion/tests/cmdline/svnadmin_tests.py @@ -26,11 +26,13 @@ # General modules import os +import logging import re import shutil import sys import threading -import logging +import time +import gzip logger = logging.getLogger() @@ -48,6 +50,7 @@ XFail = svntest.testcase.XFail_deco Issues = svntest.testcase.Issues_deco Issue = svntest.testcase.Issue_deco Wimp = svntest.testcase.Wimp_deco +SkipDumpLoadCrossCheck = svntest.testcase.SkipDumpLoadCrossCheck_deco Item = svntest.wc.StateItem def check_hotcopy_bdb(src, dst): @@ -61,8 +64,7 @@ def check_hotcopy_bdb(src, dst): if origerr or backerr or origout != backout: raise svntest.Failure -def check_hotcopy_fsfs(src, dst): - "Verify that the SRC FSFS repository has been correctly copied to DST." +def check_hotcopy_fsfs_fsx(src, dst): # Walk the source and compare all files to the destination for src_dirpath, src_dirs, src_files in os.walk(src): # Verify that the current directory exists in the destination @@ -72,16 +74,34 @@ def check_hotcopy_fsfs(src, dst): "destination" % dst_dirpath) # Verify that all dirents in the current directory also exist in source for dst_dirent in os.listdir(dst_dirpath): + # Ignore auto-created empty lock files as they may or may not + # be present and are neither required by nor do they harm to + # the destination repository. + if dst_dirent == 'pack-lock': + continue + if dst_dirent == 'write-lock': + continue + + # Ignore auto-created rep-cache.db-journal file + if dst_dirent == 'rep-cache.db-journal': + continue + src_dirent = os.path.join(src_dirpath, dst_dirent) if not os.path.exists(src_dirent): raise svntest.Failure("%s does not exist in hotcopy " "source" % src_dirent) # Compare all files in this directory for src_file in src_files: - # Exclude temporary files - if src_file == 'rev-prop-atomics.shm': + # Ignore auto-created empty lock files as they may or may not + # be present and are neither required by nor do they harm to + # the destination repository. + if src_file == 'pack-lock': continue - if src_file == 'rev-prop-atomics.mutex': + if src_file == 'write-lock': + continue + + # Ignore auto-created rep-cache.db-journal file + if src_file == 'rep-cache.db-journal': continue src_path = os.path.join(src_dirpath, src_file) @@ -90,6 +110,20 @@ def check_hotcopy_fsfs(src, dst): raise svntest.Failure("%s does not exist in hotcopy " "destination" % dst_path) + # Special case for db/uuid: Only the UUID in the first line needs + # to match. Source and target must have the same number of lines + # (due to having the same format). + if src_path == os.path.join(src, 'db', 'uuid'): + lines1 = open(src_path, 'rb').read().split("\n") + lines2 = open(dst_path, 'rb').read().split("\n") + if len(lines1) != len(lines2): + raise svntest.Failure("%s differs in number of lines" + % dst_path) + if lines1[0] != lines2[0]: + raise svntest.Failure("%s contains different uuid: '%s' vs. '%s'" + % (dst_path, lines1[0], lines2[0])) + continue + # Special case for rep-cache: It will always differ in a byte-by-byte # comparison, so compare db tables instead. if src_file == 'rep-cache.db': @@ -106,14 +140,14 @@ def check_hotcopy_fsfs(src, dst): for i in range(len(rows1)): if rows1[i] != rows2[i]: raise svntest.Failure("rep-cache row %i differs: '%s' vs. '%s'" - % (row, rows1[i])) + % (i, rows1[i], rows2[i])) continue # Special case for revprop-generation: It will always be zero in # the hotcopy destination (i.e. a fresh cache generation) if src_file == 'revprop-generation': f2 = open(dst_path, 'r') - revprop_gen = int(f2.read().strip()) + revprop_gen = int(f2.read().strip().split()[1]) if revprop_gen != 0: raise svntest.Failure("Hotcopy destination has non-zero " + "revprop generation") @@ -146,6 +180,14 @@ def check_hotcopy_fsfs(src, dst): f1.close() f2.close() +def check_hotcopy_fsfs(src, dst): + "Verify that the SRC FSFS repository has been correctly copied to DST." + check_hotcopy_fsfs_fsx(src, dst) + +def check_hotcopy_fsx(src, dst): + "Verify that the SRC FSX repository has been correctly copied to DST." + check_hotcopy_fsfs_fsx(src, dst) + #---------------------------------------------------------------------- # How we currently test 'svnadmin' -- @@ -189,6 +231,24 @@ def get_txns(repo_dir): return txns +def patch_format(repo_dir, shard_size): + """Rewrite the format of the FSFS or FSX repository REPO_DIR so + that it would use sharding with SHARDS revisions per shard.""" + + format_path = os.path.join(repo_dir, "db", "format") + contents = open(format_path, 'rb').read() + processed_lines = [] + + for line in contents.split("\n"): + if line.startswith("layout "): + processed_lines.append("layout sharded %d" % shard_size) + else: + processed_lines.append(line) + + new_contents = "\n".join(processed_lines) + os.chmod(format_path, 0666) + open(format_path, 'wb').write(new_contents) + def load_and_verify_dumpstream(sbox, expected_stdout, expected_stderr, revs, check_props, dump, *varargs): """Load the array of lines passed in DUMP into the current tests' @@ -225,8 +285,7 @@ def load_and_verify_dumpstream(sbox, expected_stdout, expected_stderr, if revs: # verify revs as wc states for rev in range(len(revs)): - svntest.actions.run_and_verify_svn("Updating to r%s" % (rev+1), - svntest.verify.AnyOutput, [], + svntest.actions.run_and_verify_svn(svntest.verify.AnyOutput, [], "update", "-r%s" % (rev+1), sbox.wc_dir) @@ -238,42 +297,140 @@ def load_dumpstream(sbox, dump, *varargs): return load_and_verify_dumpstream(sbox, None, None, None, False, dump, *varargs) -###################################################################### -# Tests - +class FSFS_Index: + """Manages indexes of a rev file in a FSFS format 7 repository. + The interface returns P2L information and allows for item offsets + and lengths to be modified. """ + + def __init__(self, sbox, revision): + self.by_item = { } + self.revision = revision + self.repo_dir = sbox.repo_dir + + self._read() + + def _read(self): + """ Read P2L index using svnfsfs. """ + exit_code, output, errput = svntest.main.run_svnfsfs('dump-index', + '-r' + str(self.revision), + self.repo_dir) + svntest.verify.verify_outputs("Error while dumping index", + [], errput, [], []) + svntest.verify.verify_exit_code(None, exit_code, 0) + + self.by_item.clear() + for line in output: + values = line.split() + if len(values) >= 4 and values[0] != 'Start': + item = long(values[4]) + self.by_item[item] = values + + def _write(self): + """ Rewrite indexes using svnfsfs. """ + by_offset = {} + for values in self.by_item.itervalues(): + by_offset[long(values[0], 16)] = values + + lines = [] + for (offset, values) in sorted(by_offset.items()): + values = by_offset[offset] + line = values[0] + ' ' + values[1] + ' ' + values[2] + ' ' + \ + values[3] + ' ' + values[4] + '\n'; + lines.append(line) + + exit_code, output, errput = svntest.main.run_command_stdin( + svntest.main.svnfsfs_binary, 0, 0, True, lines, + 'load-index', self.repo_dir) + + svntest.verify.verify_outputs("Error while rewriting index", + output, errput, [], []) + svntest.verify.verify_exit_code(None, exit_code, 0) + + def get_item(self, item): + """ Return offset, length and type of ITEM. """ + values = self.by_item[item] + + offset = long(values[0], 16) + len = long(values[1], 16) + type = values[2] + + return (offset, len, type) + + def modify_item(self, item, offset, len): + """ Modify offset and length of ITEM. """ + values = self.by_item[item] + + values[0] = '%x' % offset + values[1] = '%x' % len + + self._write() + +def repo_format(sbox): + """ Return the repository format number for SBOX.""" + + format_file = open(os.path.join(sbox.repo_dir, "db", "format")) + format = int(format_file.read()[:1]) + format_file.close() -#---------------------------------------------------------------------- + return format -def test_create(sbox): - "'svnadmin create'" +def set_changed_path_list(sbox, revision, changes): + """ Replace the changed paths list in the revision file REVISION in SBOX + with the text CHANGES.""" + idx = None - repo_dir = sbox.repo_dir - wc_dir = sbox.wc_dir + # read full file + fp = open(fsfs_file(sbox.repo_dir, 'revs', str(revision)), 'r+b') + contents = fp.read() + length = len(contents) - svntest.main.safe_rmtree(repo_dir, 1) - svntest.main.safe_rmtree(wc_dir) - - svntest.main.create_repos(repo_dir) - - svntest.actions.run_and_verify_svn("Creating rev 0 checkout", - ["Checked out revision 0.\n"], [], - "checkout", - sbox.repo_url, wc_dir) + if repo_format(sbox) < 7: + # replace the changed paths list + header = contents[contents.rfind('\n', length - 64, length - 1):] + body_len = long(header.split(' ')[1]) + else: + # read & parse revision file footer + footer_length = ord(contents[length-1]); + footer = contents[length - footer_length - 1:length-1] + l2p_offset = long(footer.split(' ')[0]) + l2p_checksum = footer.split(' ')[1] + p2l_offset = long(footer.split(' ')[2]) + p2l_checksum = footer.split(' ')[3] + + idx = FSFS_Index(sbox, revision) + (offset, item_len, item_type) = idx.get_item(1) + + # split file contents + body_len = offset + indexes = contents[l2p_offset:length - footer_length - 1] + + # construct new footer, include indexes as are + file_len = body_len + len(changes) + 1 + p2l_offset += file_len - l2p_offset + + header = str(file_len) + ' ' + l2p_checksum + ' ' \ + + str(p2l_offset) + ' ' + p2l_checksum + header += chr(len(header)) + header = '\n' + indexes + header + + contents = contents[:body_len] + changes + header + + # set new contents + fp.seek(0) + fp.write(contents) + fp.truncate() + fp.close() - svntest.actions.run_and_verify_svn( - "Running status", - [], [], - "status", wc_dir) + if repo_format(sbox) >= 7: + idx.modify_item(1, offset, len(changes) + 1) - svntest.actions.run_and_verify_svn( - "Running verbose status", - [" 0 0 ? %s\n" % wc_dir], [], - "status", "--verbose", wc_dir) +###################################################################### +# Tests - # success +#---------------------------------------------------------------------- # dump stream tests need a dump file @@ -306,7 +463,7 @@ dumpfile_revisions = \ def extra_headers(sbox): "loading of dumpstream with extra headers" - test_create(sbox) + sbox.build(empty=True) dumpfile = clean_dumpfile() @@ -321,7 +478,7 @@ def extra_headers(sbox): def extra_blockcontent(sbox): "load success on oversized Content-length" - test_create(sbox) + sbox.build(empty=True) dumpfile = clean_dumpfile() @@ -339,7 +496,7 @@ def extra_blockcontent(sbox): def inconsistent_headers(sbox): "load failure on undersized Content-length" - test_create(sbox) + sbox.build(empty=True) dumpfile = clean_dumpfile() @@ -355,7 +512,7 @@ def inconsistent_headers(sbox): def empty_date(sbox): "preserve date-less revisions in load" - test_create(sbox) + sbox.build(empty=True) dumpfile = clean_dumpfile() @@ -370,8 +527,8 @@ def empty_date(sbox): '--ignore-uuid') # Verify that the revision still lacks the svn:date property. - svntest.actions.run_and_verify_svn(None, [], [], "propget", - "--revprop", "-r1", "svn:date", + svntest.actions.run_and_verify_svn([], '.*(E195011|E200017).*svn:date', + "propget", "--revprop", "-r1", "svn:date", sbox.wc_dir) #---------------------------------------------------------------------- @@ -386,8 +543,7 @@ def dump_copied_dir(sbox): old_C_path = os.path.join(wc_dir, 'A', 'C') new_C_path = os.path.join(wc_dir, 'A', 'B', 'C') svntest.main.run_svn(None, 'cp', old_C_path, new_C_path) - svntest.main.run_svn(None, 'ci', wc_dir, '--quiet', - '-m', 'log msg') + sbox.simple_commit(message='log msg') exit_code, output, errput = svntest.main.run_svnadmin("dump", repo_dir) if svntest.verify.compare_and_display_lines( @@ -410,8 +566,7 @@ def dump_move_dir_modify_child(sbox): Q_path = os.path.join(wc_dir, 'A', 'Q') svntest.main.run_svn(None, 'cp', B_path, Q_path) svntest.main.file_append(os.path.join(Q_path, 'lambda'), 'hello') - svntest.main.run_svn(None, 'ci', wc_dir, '--quiet', - '-m', 'log msg') + sbox.simple_commit(message='log msg') exit_code, output, errput = svntest.main.run_svnadmin("dump", repo_dir) svntest.verify.compare_and_display_lines( "Output of 'svnadmin dump' is unexpected.", @@ -452,15 +607,17 @@ def hotcopy_dot(sbox): os.chdir(backup_dir) svntest.actions.run_and_verify_svnadmin( - None, None, [], + None, [], "hotcopy", os.path.join(cwd, sbox.repo_dir), '.') os.chdir(cwd) if svntest.main.is_fs_type_fsfs(): check_hotcopy_fsfs(sbox.repo_dir, backup_dir) - else: + if svntest.main.is_fs_type_bdb(): check_hotcopy_bdb(sbox.repo_dir, backup_dir) + if svntest.main.is_fs_type_fsx(): + check_hotcopy_fsx(sbox.repo_dir, backup_dir) #---------------------------------------------------------------------- @@ -495,24 +652,29 @@ def hotcopy_format(sbox): #---------------------------------------------------------------------- def setrevprop(sbox): - "'setlog' and 'setrevprop', bypassing hooks'" + "setlog, setrevprop, delrevprop; bypass hooks" sbox.build() # Try a simple log property modification. iota_path = os.path.join(sbox.wc_dir, "iota") - exit_code, output, errput = svntest.main.run_svnadmin("setlog", - sbox.repo_dir, - "-r0", - "--bypass-hooks", - iota_path) - if errput: - logger.warn("Error: 'setlog' failed") - raise svntest.Failure + mu_path = sbox.ospath('A/mu') + svntest.actions.run_and_verify_svnadmin([], [], + "setlog", sbox.repo_dir, "-r0", + "--bypass-hooks", + iota_path) + + # Make sure it fails without --bypass-hooks. (We haven't called + # svntest.actions.enable_revprop_changes().) + # + # Note that we attempt to set the log message to a different value than the + # successful call. + svntest.actions.run_and_verify_svnadmin([], svntest.verify.AnyOutput, + "setlog", sbox.repo_dir, "-r0", + mu_path) # Verify that the revprop value matches what we set when retrieved # through the client. - svntest.actions.run_and_verify_svn(None, - [ "This is the file 'iota'.\n", "\n" ], + svntest.actions.run_and_verify_svn([ "This is the file 'iota'.\n", "\n" ], [], "propget", "--revprop", "-r0", "svn:log", sbox.wc_dir) @@ -530,10 +692,18 @@ def setrevprop(sbox): # Verify that the revprop value matches what we set when retrieved # through the client. - svntest.actions.run_and_verify_svn(None, [ "foo\n" ], [], "propget", + svntest.actions.run_and_verify_svn([ "foo\n" ], [], "propget", "--revprop", "-r0", "svn:author", sbox.wc_dir) + # Delete the property. + svntest.actions.run_and_verify_svnadmin([], [], + "delrevprop", "-r0", sbox.repo_dir, + "svn:author") + svntest.actions.run_and_verify_svnlook([], ".*E200017.*svn:author.*", + "propget", "--revprop", "-r0", + sbox.repo_dir, "svn:author") + def verify_windows_paths_in_repos(sbox): "verify a repository containing paths like 'c:hi'" @@ -542,28 +712,38 @@ def verify_windows_paths_in_repos(sbox): repo_url = sbox.repo_url chi_url = sbox.repo_url + '/c:hi' - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'mkdir', '-m', 'log_msg', chi_url) exit_code, output, errput = svntest.main.run_svnadmin("verify", sbox.repo_dir) + if errput: + raise SVNUnexpectedStderr(errput) - # unfortunately, FSFS needs to do more checks than BDB resulting in - # different progress output - if svntest.main.is_fs_type_fsfs(): + # unfortunately, some backends needs to do more checks than other + # resulting in different progress output + if svntest.main.is_fs_log_addressing(): svntest.verify.compare_and_display_lines( "Error while running 'svnadmin verify'.", - 'STDERR', ["* Verifying repository metadata ...\n", + 'STDOUT', ["* Verifying metadata at revision 0 ...\n", + "* Verifying repository metadata ...\n", "* Verified revision 0.\n", "* Verified revision 1.\n", - "* Verified revision 2.\n"], errput) + "* Verified revision 2.\n"], output) + elif svntest.main.fs_has_rep_sharing(): + svntest.verify.compare_and_display_lines( + "Error while running 'svnadmin verify'.", + 'STDOUT', ["* Verifying repository metadata ...\n", + "* Verified revision 0.\n", + "* Verified revision 1.\n", + "* Verified revision 2.\n"], output) else: svntest.verify.compare_and_display_lines( "Error while running 'svnadmin verify'.", - 'STDERR', ["* Verified revision 0.\n", + 'STDOUT', ["* Verified revision 0.\n", "* Verified revision 1.\n", - "* Verified revision 2.\n"], errput) + "* Verified revision 2.\n"], output) #---------------------------------------------------------------------- @@ -596,12 +776,14 @@ def verify_incremental_fsfs(sbox): """svnadmin verify detects corruption dump can't""" # setup a repo with a directory 'c:hi' - sbox.build(create_wc = False) + # use physical addressing as this is hard to provoke with logical addressing + sbox.build(create_wc = False, + minor_version = min(svntest.main.options.server_minor_version,8)) repo_url = sbox.repo_url E_url = sbox.repo_url + '/A/B/E' # Create A/B/E/bravo in r2. - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'mkdir', '-m', 'log_msg', E_url + '/bravo') # Corrupt r2's reference to A/C by replacing "dir 7-1.0.r1/1568" with @@ -834,7 +1016,7 @@ def load_with_parent_dir(sbox): "'svnadmin load --parent-dir' reparents mergeinfo" ## See http://subversion.tigris.org/issues/show_bug.cgi?id=2983. ## - test_create(sbox) + sbox.build(empty=True) dumpfile_location = os.path.join(os.path.dirname(sys.argv[0]), 'svnadmin_tests_data', @@ -842,20 +1024,18 @@ def load_with_parent_dir(sbox): dumpfile = open(dumpfile_location).read() # Create 'sample' dir in sbox.repo_url, and load the dump stream there. - svntest.actions.run_and_verify_svn(None, - ['\n', 'Committed revision 1.\n'], + svntest.actions.run_and_verify_svn(['Committing transaction...\n', + 'Committed revision 1.\n'], [], "mkdir", sbox.repo_url + "/sample", "-m", "Create sample dir") load_dumpstream(sbox, dumpfile, '--parent-dir', '/sample') # Verify the svn:mergeinfo properties for '--parent-dir' - svntest.actions.run_and_verify_svn(None, - [sbox.repo_url + + svntest.actions.run_and_verify_svn([sbox.repo_url + "/sample/branch - /sample/trunk:5-7\n"], [], 'propget', 'svn:mergeinfo', '-R', sbox.repo_url + '/sample/branch') - svntest.actions.run_and_verify_svn(None, - [sbox.repo_url + + svntest.actions.run_and_verify_svn([sbox.repo_url + "/sample/branch1 - " + "/sample/branch:6-9\n"], [], 'propget', 'svn:mergeinfo', '-R', @@ -864,21 +1044,19 @@ def load_with_parent_dir(sbox): # Create 'sample-2' dir in sbox.repo_url, and load the dump stream again. # This time, don't include a leading slash on the --parent-dir argument. # See issue #3547. - svntest.actions.run_and_verify_svn(None, - ['\n', 'Committed revision 11.\n'], + svntest.actions.run_and_verify_svn(['Committing transaction...\n', + 'Committed revision 11.\n'], [], "mkdir", sbox.repo_url + "/sample-2", "-m", "Create sample-2 dir") load_dumpstream(sbox, dumpfile, '--parent-dir', 'sample-2') # Verify the svn:mergeinfo properties for '--parent-dir'. - svntest.actions.run_and_verify_svn(None, - [sbox.repo_url + + svntest.actions.run_and_verify_svn([sbox.repo_url + "/sample-2/branch - " + "/sample-2/trunk:15-17\n"], [], 'propget', 'svn:mergeinfo', '-R', sbox.repo_url + '/sample-2/branch') - svntest.actions.run_and_verify_svn(None, - [sbox.repo_url + + svntest.actions.run_and_verify_svn([sbox.repo_url + "/sample-2/branch1 - " + "/sample-2/branch:16-19\n"], [], 'propget', 'svn:mergeinfo', '-R', @@ -898,11 +1076,11 @@ def set_uuid(sbox): orig_uuid = output[0].rstrip() # Try setting a new, bogus UUID. - svntest.actions.run_and_verify_svnadmin(None, None, '^.*Malformed UUID.*$', + svntest.actions.run_and_verify_svnadmin(None, '^.*Malformed UUID.*$', 'setuuid', sbox.repo_dir, 'abcdef') # Try generating a brand new UUID. - svntest.actions.run_and_verify_svnadmin(None, [], None, + svntest.actions.run_and_verify_svnadmin([], None, 'setuuid', sbox.repo_dir) exit_code, output, errput = svntest.main.run_svnlook('uuid', sbox.repo_dir) if errput: @@ -913,7 +1091,7 @@ def set_uuid(sbox): raise svntest.Failure # Now, try setting the UUID back to the original value. - svntest.actions.run_and_verify_svnadmin(None, [], None, + svntest.actions.run_and_verify_svnadmin([], None, 'setuuid', sbox.repo_dir, orig_uuid) exit_code, output, errput = svntest.main.run_svnlook('uuid', sbox.repo_dir) if errput: @@ -930,7 +1108,7 @@ def reflect_dropped_renumbered_revs(sbox): ## See http://subversion.tigris.org/issues/show_bug.cgi?id=3020. ## - test_create(sbox) + sbox.build(empty=True) dumpfile_location = os.path.join(os.path.dirname(sys.argv[0]), 'svndumpfilter_tests_data', @@ -938,7 +1116,8 @@ def reflect_dropped_renumbered_revs(sbox): dumpfile = open(dumpfile_location).read() # Create 'toplevel' dir in sbox.repo_url - svntest.actions.run_and_verify_svn(None, ['\n', 'Committed revision 1.\n'], + svntest.actions.run_and_verify_svn(['Committing transaction...\n', + 'Committed revision 1.\n'], [], "mkdir", sbox.repo_url + "/toplevel", "-m", "Create toplevel dir") @@ -954,7 +1133,7 @@ def reflect_dropped_renumbered_revs(sbox): url + "/trunk - /branch1:5-9\n", url + "/toplevel/trunk - /toplevel/branch1:14-18\n", ]) - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'propget', 'svn:mergeinfo', '-R', sbox.repo_url) @@ -969,10 +1148,10 @@ def fsfs_recover_handle_missing_revs_or_revprops_file(sbox): # Commit up to r3, so we can test various recovery scenarios. svntest.main.file_append(os.path.join(sbox.wc_dir, 'iota'), 'newer line\n') - svntest.main.run_svn(None, 'ci', sbox.wc_dir, '--quiet', '-m', 'log msg') + sbox.simple_commit(message='log msg') svntest.main.file_append(os.path.join(sbox.wc_dir, 'iota'), 'newest line\n') - svntest.main.run_svn(None, 'ci', sbox.wc_dir, '--quiet', '-m', 'log msg') + sbox.simple_commit(message='log msg') rev_3 = fsfs_file(sbox.repo_dir, 'revs', '3') rev_was_3 = rev_3 + '.was' @@ -1029,20 +1208,19 @@ def fsfs_recover_handle_missing_revs_or_revprops_file(sbox): ".*Revision 3 has a non-file where its revprops file should be.*"): raise svntest.Failure + # Restore the r3 revprops file, thus repairing the repository. + os.rmdir(revprop_3) + os.rename(revprop_was_3, revprop_3) + #---------------------------------------------------------------------- +@Skip(svntest.main.tests_use_prepacakaged_repository) def create_in_repo_subdir(sbox): "'svnadmin create /path/to/repo/subdir'" + sbox.build(create_wc=False, empty=True) repo_dir = sbox.repo_dir - wc_dir = sbox.wc_dir - - svntest.main.safe_rmtree(repo_dir, 1) - svntest.main.safe_rmtree(wc_dir) - - # This should succeed - svntest.main.create_repos(repo_dir) success = False try: @@ -1070,22 +1248,21 @@ def create_in_repo_subdir(sbox): @SkipUnless(svntest.main.is_fs_type_fsfs) +@SkipDumpLoadCrossCheck() def verify_with_invalid_revprops(sbox): "svnadmin verify detects invalid revprops file" + sbox.build(create_wc=False, empty=True) repo_dir = sbox.repo_dir - svntest.main.safe_rmtree(repo_dir, 1) - - # This should succeed - svntest.main.create_repos(repo_dir) - # Run a test verify exit_code, output, errput = svntest.main.run_svnadmin("verify", sbox.repo_dir) + if errput: + raise SVNUnexpectedStderr(errput) if svntest.verify.verify_outputs( - "Output of 'svnadmin verify' is unexpected.", None, errput, None, + "Output of 'svnadmin verify' is unexpected.", None, output, None, ".*Verified revision 0*"): raise svntest.Failure @@ -1127,7 +1304,7 @@ def dont_drop_valid_mergeinfo_during_incremental_loads(sbox): "don't filter mergeinfo revs from incremental dump" # Create an empty repos. - test_create(sbox) + sbox.build(empty=True) # PART 1: Load a full dump to an empty repository. # @@ -1139,7 +1316,7 @@ def dont_drop_valid_mergeinfo_during_incremental_loads(sbox): # | | | | # trunk---r2---r3-----r5---r6-------r8---r9---------------> | | # r1 | | | | | | - # intial | | | |______ | | + # initial | | | |______ | | # import copy | copy | merge merge # | | | merge (r5) (r8) # | | | (r9) | | @@ -1187,7 +1364,7 @@ def dont_drop_valid_mergeinfo_during_incremental_loads(sbox): url + "B2 - /trunk:9\n", url + "B1/B/E - /branches/B2/B/E:11-12\n", "/trunk/B/E:5-6,8-9\n"]) - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'propget', 'svn:mergeinfo', '-R', sbox.repo_url) @@ -1216,7 +1393,7 @@ def dont_drop_valid_mergeinfo_during_incremental_loads(sbox): dump_fp.close() # Blow away the current repos and create an empty one in its place. - test_create(sbox) + sbox.build(empty=True) # Load the three incremental dump files in sequence. load_dumpstream(sbox, open(dump_file_r1_10).read(), '--ignore-uuid') @@ -1226,7 +1403,7 @@ def dont_drop_valid_mergeinfo_during_incremental_loads(sbox): # Check the mergeinfo, we use the same expected output as before, # as it (duh!) should be exactly the same as when we loaded the # repos in one shot. - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'propget', 'svn:mergeinfo', '-R', sbox.repo_url) @@ -1236,7 +1413,7 @@ def dont_drop_valid_mergeinfo_during_incremental_loads(sbox): # PART 3: Load a full dump to an non-empty repository. # # Reset our sandbox. - test_create(sbox) + sbox.build(empty=True) # Load this skeleton repos into the empty target: # @@ -1280,14 +1457,14 @@ def dont_drop_valid_mergeinfo_during_incremental_loads(sbox): url + "B2 - /Projects/Project-X/trunk:15\n", url + "B1/B/E - /Projects/Project-X/branches/B2/B/E:17-18\n", "/Projects/Project-X/trunk/B/E:11-12,14-15\n"]) - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'propget', 'svn:mergeinfo', '-R', sbox.repo_url) # PART 4: Load a a series of incremental dumps to an non-empty repository. # # Reset our sandbox. - test_create(sbox) + sbox.build(empty=True) # Load this skeleton repos into the empty target: load_dumpstream(sbox, dumpfile_skeleton, '--ignore-uuid') @@ -1303,7 +1480,7 @@ def dont_drop_valid_mergeinfo_during_incremental_loads(sbox): # Check the resulting mergeinfo. We expect the exact same results # as Part 3. # See http://subversion.tigris.org/issues/show_bug.cgi?id=3020#desc16. - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'propget', 'svn:mergeinfo', '-R', sbox.repo_url) @@ -1315,14 +1492,12 @@ def hotcopy_symlink(sbox): ## See http://subversion.tigris.org/issues/show_bug.cgi?id=2591. ## + # Create a repository. + sbox.build(create_wc=False, empty=True) original_repo = sbox.repo_dir hotcopy_repo, hotcopy_url = sbox.add_repo_path('hotcopy') - # Create a repository. - svntest.main.safe_rmtree(original_repo, 1) - svntest.main.create_repos(original_repo) - # Create a file, a dir and a missing path outside the repoitory. svntest.main.safe_rmtree(sbox.wc_dir, 1) os.mkdir(sbox.wc_dir) @@ -1355,7 +1530,7 @@ def hotcopy_symlink(sbox): os.symlink(target_abspath, symlink_path + '_abs') svntest.actions.run_and_verify_svnadmin( - None, None, [], + None, [], "hotcopy", original_repo, hotcopy_repo) # Check if the symlinks were copied correctly. @@ -1418,7 +1593,7 @@ text """ - test_create(sbox) + sbox.build(empty=True) # Try to load the dumpstream, expecting a failure (because of mixed EOLs). load_and_verify_dumpstream(sbox, [], svntest.verify.AnyOutput, @@ -1440,7 +1615,11 @@ def verify_non_utf8_paths(sbox): "svnadmin verify with non-UTF-8 paths" dumpfile = clean_dumpfile() - test_create(sbox) + + # Corruption only possible in physically addressed revisions created + # with pre-1.6 servers. + sbox.build(empty=True, + minor_version=min(svntest.main.options.server_minor_version,8)) # Load the dumpstream load_and_verify_dumpstream(sbox, [], [], dumpfile_revisions, False, @@ -1457,15 +1636,15 @@ def verify_non_utf8_paths(sbox): if line == "A\n": # replace 'A' with a latin1 character -- the new path is not valid UTF-8 fp_new.write("\xE6\n") - elif line == "text: 1 279 32 0 d63ecce65d8c428b86f4f8b0920921fe\n": - # fix up the representation checksum - fp_new.write("text: 1 279 32 0 b50b1d5ed64075b5f632f3b8c30cd6b2\n") - elif line == "text: 1 280 32 32 d63ecce65d8c428b86f4f8b0920921fe\n": - # fix up the representation checksum - fp_new.write("text: 1 280 32 32 b50b1d5ed64075b5f632f3b8c30cd6b2\n") + elif line == "text: 1 279 32 32 d63ecce65d8c428b86f4f8b0920921fe\n": + # phys, PLAIN directories: fix up the representation checksum + fp_new.write("text: 1 279 32 32 b50b1d5ed64075b5f632f3b8c30cd6b2\n") elif line == "text: 1 292 44 32 a6be7b4cf075fd39e6a99eb69a31232b\n": - # fix up the representation checksum + # phys, deltified directories: fix up the representation checksum fp_new.write("text: 1 292 44 32 f2e93e73272cac0f18fccf16f224eb93\n") + elif line == "text: 1 6 31 31 90f306aa9bfd72f456072076a2bd94f7\n": + # log addressing: fix up the representation checksum + fp_new.write("text: 1 6 31 31 db2d4a0bad5dff0aea9a288dec02f1fb\n") elif line == "cpath: /A\n": # also fix up the 'created path' field fp_new.write("cpath: /\xE6\n") @@ -1491,8 +1670,8 @@ def verify_non_utf8_paths(sbox): expected_stderr = [ "* Dumped revision 0.\n", "WARNING 0x0002: E160005: " - "While validating fspath '?\\230': " - "Path '?\\230' is not in UTF-8" + "While validating fspath '?\\E6': " + "Path '?\\E6' is not in UTF-8" "\n", "* Dumped revision 1.\n", ] @@ -1515,12 +1694,12 @@ def test_lslocks_and_rmlocks(sbox): if exit_code or errput or output: raise svntest.Failure("Error: 'lslocks' failed") - expected_output = UnorderedOutput( - ["'A/B/lambda' locked by user 'jrandom'.\n", - "'iota' locked by user 'jrandom'.\n"]) + expected_output = svntest.verify.UnorderedRegexListOutput( + ["'.*lambda' locked by user 'jrandom'.\n", + "'.*iota' locked by user 'jrandom'.\n"]) # Lock iota and A/B/lambda using svn client - svntest.actions.run_and_verify_svn(None, expected_output, + svntest.actions.run_and_verify_svn(expected_output, [], "lock", "-m", "Locking files", iota_url, lambda_url) @@ -1582,7 +1761,7 @@ def load_ranges(sbox): "'svnadmin load --revision X:Y'" ## See http://subversion.tigris.org/issues/show_bug.cgi?id=3734. ## - test_create(sbox) + sbox.build(empty=True) dumpfile_location = os.path.join(os.path.dirname(sys.argv[0]), 'svnadmin_tests_data', @@ -1593,17 +1772,27 @@ def load_ranges(sbox): # Load our dumpfile, 2 revisions at a time, verifying that we have # the correct youngest revision after each load. load_dumpstream(sbox, dumpdata, '-r0:2') - svntest.actions.run_and_verify_svnlook("Unexpected output", ['2\n'], + svntest.actions.run_and_verify_svnlook(['2\n'], None, 'youngest', sbox.repo_dir) load_dumpstream(sbox, dumpdata, '-r3:4') - svntest.actions.run_and_verify_svnlook("Unexpected output", ['4\n'], + svntest.actions.run_and_verify_svnlook(['4\n'], None, 'youngest', sbox.repo_dir) load_dumpstream(sbox, dumpdata, '-r5:6') - svntest.actions.run_and_verify_svnlook("Unexpected output", ['6\n'], + svntest.actions.run_and_verify_svnlook(['6\n'], None, 'youngest', sbox.repo_dir) # There are ordering differences in the property blocks. - expected_dump = UnorderedOutput(dumplines) + if (svntest.main.options.server_minor_version < 6): + temp = [] + + for line in dumplines: + if not "Text-content-sha1:" in line: + temp.append(line) + + expected_dump = UnorderedOutput(temp) + else: + expected_dump = UnorderedOutput(dumplines) + new_dumpdata = svntest.actions.run_and_verify_dump(sbox.repo_dir) svntest.verify.compare_and_display_lines("Dump files", "DUMP", expected_dump, new_dumpdata) @@ -1620,7 +1809,7 @@ def hotcopy_incremental(sbox): for i in [1, 2, 3]: os.chdir(backup_dir) svntest.actions.run_and_verify_svnadmin( - None, None, [], + None, [], "hotcopy", "--incremental", os.path.join(cwd, sbox.repo_dir), '.') os.chdir(cwd) @@ -1632,28 +1821,30 @@ def hotcopy_incremental(sbox): sbox.simple_commit() @SkipUnless(svntest.main.is_fs_type_fsfs) +@SkipUnless(svntest.main.fs_has_pack) def hotcopy_incremental_packed(sbox): "'svnadmin hotcopy --incremental' with packing" + + # Configure two files per shard to trigger packing. sbox.build() + patch_format(sbox.repo_dir, shard_size=2) backup_dir, backup_url = sbox.add_repo_path('backup') os.mkdir(backup_dir) cwd = os.getcwd() - # Configure two files per shard to trigger packing - format_file = open(os.path.join(sbox.repo_dir, 'db', 'format'), 'wb') - format_file.write("6\nlayout sharded 2\n") - format_file.close() - # Pack revisions 0 and 1. - svntest.actions.run_and_verify_svnadmin( - None, ['Packing revisions in shard 0...done.\n'], [], "pack", - os.path.join(cwd, sbox.repo_dir)) + # Pack revisions 0 and 1 if not already packed. + if not (svntest.main.is_fs_type_fsfs and svntest.main.options.fsfs_packing + and svntest.main.options.fsfs_sharding == 2): + svntest.actions.run_and_verify_svnadmin( + ['Packing revisions in shard 0...done.\n'], [], "pack", + os.path.join(cwd, sbox.repo_dir)) # Commit 5 more revs, hotcopy and pack after each commit. for i in [1, 2, 3, 4, 5]: os.chdir(backup_dir) svntest.actions.run_and_verify_svnadmin( - None, None, [], + None, [], "hotcopy", "--incremental", os.path.join(cwd, sbox.repo_dir), '.') os.chdir(cwd) @@ -1663,12 +1854,13 @@ def hotcopy_incremental_packed(sbox): if i < 5: sbox.simple_mkdir("newdir-%i" % i) sbox.simple_commit() - if not i % 2: + if (svntest.main.is_fs_type_fsfs and not svntest.main.options.fsfs_packing + and not i % 2): expected_output = ['Packing revisions in shard %d...done.\n' % (i/2)] else: expected_output = [] svntest.actions.run_and_verify_svnadmin( - None, expected_output, [], "pack", os.path.join(cwd, sbox.repo_dir)) + expected_output, [], "pack", os.path.join(cwd, sbox.repo_dir)) def locking(sbox): @@ -1683,7 +1875,7 @@ def locking(sbox): # Test illegal character in comment file. expected_error = ".*svnadmin: E130004:.*" - svntest.actions.run_and_verify_svnadmin(None, None, + svntest.actions.run_and_verify_svnadmin(None, expected_error, "lock", sbox.repo_dir, "iota", "jrandom", @@ -1691,7 +1883,7 @@ def locking(sbox): # Test locking path with --bypass-hooks expected_output = "'iota' locked by user 'jrandom'." - svntest.actions.run_and_verify_svnadmin(None, expected_output, + svntest.actions.run_and_verify_svnadmin(expected_output, None, "lock", sbox.repo_dir, "iota", "jrandom", @@ -1699,13 +1891,13 @@ def locking(sbox): "--bypass-hooks") # Remove lock - svntest.actions.run_and_verify_svnadmin(None, None, + svntest.actions.run_and_verify_svnadmin(None, None, "rmlocks", sbox.repo_dir, "iota") # Test locking path without --bypass-hooks expected_output = "'iota' locked by user 'jrandom'." - svntest.actions.run_and_verify_svnadmin(None, expected_output, + svntest.actions.run_and_verify_svnadmin(expected_output, None, "lock", sbox.repo_dir, "iota", "jrandom", @@ -1713,7 +1905,7 @@ def locking(sbox): # Test locking already locked path. expected_error = ".*svnadmin: E160035:.*" - svntest.actions.run_and_verify_svnadmin(None, None, + svntest.actions.run_and_verify_svnadmin(None, expected_error, "lock", sbox.repo_dir, "iota", "jrandom", @@ -1721,7 +1913,7 @@ def locking(sbox): # Test locking non-existent path. expected_error = ".*svnadmin: E160013:.*" - svntest.actions.run_and_verify_svnadmin(None, None, + svntest.actions.run_and_verify_svnadmin(None, expected_error, "lock", sbox.repo_dir, "non-existent", "jrandom", @@ -1730,7 +1922,7 @@ def locking(sbox): # Test locking a path while specifying a lock token. expected_output = "'A/D/G/rho' locked by user 'jrandom'." lock_token = "opaquelocktoken:01234567-89ab-cdef-89ab-cdef01234567" - svntest.actions.run_and_verify_svnadmin(None, expected_output, + svntest.actions.run_and_verify_svnadmin(expected_output, None, "lock", sbox.repo_dir, "A/D/G/rho", "jrandom", @@ -1739,7 +1931,7 @@ def locking(sbox): # Test unlocking a path, but provide the wrong lock token. expected_error = ".*svnadmin: E160040:.*" wrong_lock_token = "opaquelocktoken:12345670-9ab8-defc-9ab8-def01234567c" - svntest.actions.run_and_verify_svnadmin(None, None, + svntest.actions.run_and_verify_svnadmin(None, expected_error, "unlock", sbox.repo_dir, "A/D/G/rho", "jrandom", @@ -1748,7 +1940,7 @@ def locking(sbox): # Test unlocking the path again, but this time provide the correct # lock token. expected_output = "'A/D/G/rho' unlocked." - svntest.actions.run_and_verify_svnadmin(None, expected_output, + svntest.actions.run_and_verify_svnadmin(expected_output, None, "unlock", sbox.repo_dir, "A/D/G/rho", "jrandom", @@ -1763,7 +1955,7 @@ def locking(sbox): # Test locking a path. Don't use --bypass-hooks, though, as we wish # to verify that hook script is really getting executed. expected_error = ".*svnadmin: E165001:.*" - svntest.actions.run_and_verify_svnadmin(None, None, + svntest.actions.run_and_verify_svnadmin(None, expected_error, "lock", sbox.repo_dir, "iota", "jrandom", @@ -1785,7 +1977,7 @@ def locking(sbox): # Try to unlock a path while providing the correct lock token but # with a preventative hook in place. expected_error = ".*svnadmin: E165001:.*" - svntest.actions.run_and_verify_svnadmin(None, None, + svntest.actions.run_and_verify_svnadmin(None, expected_error, "unlock", sbox.repo_dir, "iota", "jrandom", @@ -1794,7 +1986,7 @@ def locking(sbox): # Finally, use --bypass-hooks to unlock the path (again using the # correct lock token). expected_output = "'iota' unlocked." - svntest.actions.run_and_verify_svnadmin(None, expected_output, + svntest.actions.run_and_verify_svnadmin(expected_output, None, "unlock", "--bypass-hooks", sbox.repo_dir, @@ -1842,15 +2034,370 @@ def mergeinfo_race(sbox): @Issue(4213) +@Skip(svntest.main.is_fs_type_fsx) def recover_old_empty(sbox): "recover empty --compatible-version=1.3" - svntest.main.safe_rmtree(sbox.repo_dir, 1) - svntest.main.create_repos(sbox.repo_dir, minor_version=3) - svntest.actions.run_and_verify_svnadmin(None, None, [], + sbox.build(create_wc=False, empty=True, minor_version=3) + svntest.actions.run_and_verify_svnadmin(None, [], "recover", sbox.repo_dir) @SkipUnless(svntest.main.is_fs_type_fsfs) +def verify_keep_going(sbox): + "svnadmin verify --keep-going test" + + sbox.build(create_wc = False) + repo_url = sbox.repo_url + B_url = sbox.repo_url + '/B' + C_url = sbox.repo_url + '/C' + + # Create A/B/E/bravo in r2. + svntest.actions.run_and_verify_svn(None, [], + 'mkdir', '-m', 'log_msg', + B_url) + + svntest.actions.run_and_verify_svn(None, [], + 'mkdir', '-m', 'log_msg', + C_url) + + r2 = fsfs_file(sbox.repo_dir, 'revs', '2') + fp = open(r2, 'r+b') + fp.write("""inserting junk to corrupt the rev""") + fp.close() + exit_code, output, errput = svntest.main.run_svnadmin("verify", + "--keep-going", + sbox.repo_dir) + + exp_out = svntest.verify.RegexListOutput([".*Verified revision 0.", + ".*Verified revision 1.", + ".*", + ".*Summary.*", + ".*r2: E160004:.*", + ".*r2: E160004:.*", + ".*r3: E160004:.*", + ".*r3: E160004:.*"]) + + if (svntest.main.fs_has_rep_sharing()): + exp_out.insert(0, ".*Verifying.*metadata.*") + + exp_err = svntest.verify.RegexListOutput([".*Error verifying revision 2.", + "svnadmin: E160004:.*", + "svnadmin: E160004:.*", + ".*Error verifying revision 3.", + "svnadmin: E160004:.*", + "svnadmin: E160004:.*", + "svnadmin: E205012:.*"], False) + + if (svntest.main.is_fs_log_addressing()): + exp_err.insert(0, ".*Error verifying repository metadata.") + exp_err.insert(1, "svnadmin: E160004:.*") + + if svntest.verify.verify_outputs("Unexpected error while running 'svnadmin verify'.", + output, errput, exp_out, exp_err): + raise svntest.Failure + + exit_code, output, errput = svntest.main.run_svnadmin("verify", + sbox.repo_dir) + + if (svntest.main.is_fs_log_addressing()): + exp_out = svntest.verify.RegexListOutput([".*Verifying metadata at revision 0"]) + else: + exp_out = svntest.verify.RegexListOutput([".*Verified revision 0.", + ".*Verified revision 1."]) + if (svntest.main.fs_has_rep_sharing()): + exp_out.insert(0, ".*Verifying repository metadata.*") + + if (svntest.main.is_fs_log_addressing()): + exp_err = svntest.verify.RegexListOutput([ + ".*Error verifying repository metadata.", + "svnadmin: E160004:.*"], False) + else: + exp_err = svntest.verify.RegexListOutput([".*Error verifying revision 2.", + "svnadmin: E160004:.*", + "svnadmin: E160004:.*"], False) + + if svntest.verify.verify_outputs("Unexpected error while running 'svnadmin verify'.", + output, errput, exp_out, exp_err): + raise svntest.Failure + + + exit_code, output, errput = svntest.main.run_svnadmin("verify", + "--quiet", + sbox.repo_dir) + + if (svntest.main.is_fs_log_addressing()): + exp_err = svntest.verify.RegexListOutput([ + ".*Error verifying repository metadata.", + "svnadmin: E160004:.*"], False) + else: + exp_err = svntest.verify.RegexListOutput([".*Error verifying revision 2.", + "svnadmin: E160004:.*", + "svnadmin: E160004:.*"], False) + + if svntest.verify.verify_outputs("Output of 'svnadmin verify' is unexpected.", + None, errput, None, exp_err): + raise svntest.Failure + + # Don't leave a corrupt repository + svntest.main.safe_rmtree(sbox.repo_dir, True) + + +@SkipUnless(svntest.main.is_fs_type_fsfs) +def verify_keep_going_quiet(sbox): + "svnadmin verify --keep-going --quiet test" + + sbox.build(create_wc = False) + repo_url = sbox.repo_url + B_url = sbox.repo_url + '/B' + C_url = sbox.repo_url + '/C' + + # Create A/B/E/bravo in r2. + svntest.actions.run_and_verify_svn(None, [], + 'mkdir', '-m', 'log_msg', + B_url) + + svntest.actions.run_and_verify_svn(None, [], + 'mkdir', '-m', 'log_msg', + C_url) + + r2 = fsfs_file(sbox.repo_dir, 'revs', '2') + fp = open(r2, 'r+b') + fp.write("""inserting junk to corrupt the rev""") + fp.close() + + exit_code, output, errput = svntest.main.run_svnadmin("verify", + "--keep-going", + "--quiet", + sbox.repo_dir) + + exp_err = svntest.verify.RegexListOutput([".*Error verifying revision 2.", + "svnadmin: E160004:.*", + "svnadmin: E160004:.*", + ".*Error verifying revision 3.", + "svnadmin: E160004:.*", + "svnadmin: E160004:.*", + "svnadmin: E205012:.*"], False) + + # Insert another expected error from checksum verification + if (svntest.main.is_fs_log_addressing()): + exp_err.insert(0, ".*Error verifying repository metadata.") + exp_err.insert(1, "svnadmin: E160004:.*") + + if svntest.verify.verify_outputs( + "Unexpected error while running 'svnadmin verify'.", + output, errput, None, exp_err): + raise svntest.Failure + + # Don't leave a corrupt repository + svntest.main.safe_rmtree(sbox.repo_dir, True) + + +@SkipUnless(svntest.main.is_fs_type_fsfs) +def verify_invalid_path_changes(sbox): + "detect invalid changed path list entries" + + sbox.build(create_wc = False) + repo_url = sbox.repo_url + + # Create a number of revisions each adding a single path + for r in range(2,20): + svntest.actions.run_and_verify_svn(None, [], + 'mkdir', '-m', 'log_msg', + sbox.repo_url + '/B' + str(r)) + + # modify every other revision to make sure that errors are not simply + # "carried over" but that all corrupts we get detected independently + + # add existing node + set_changed_path_list(sbox, 2, + "_0.0.t1-1 add-dir false false /A\n\n") + + # add into non-existent parent + set_changed_path_list(sbox, 4, + "_0.0.t3-2 add-dir false false /C/X\n\n") + + # del non-existent node + set_changed_path_list(sbox, 6, + "_0.0.t5-2 delete-dir false false /C\n\n") + + # del existent node of the wrong kind + # + # THIS WILL NOT BE DETECTED + # since dump mechanism and file don't care about the types of deleted nodes + set_changed_path_list(sbox, 8, + "_0.0.t7-2 delete-file false false /B3\n\n") + + # copy from non-existent node + set_changed_path_list(sbox, 10, + "_0.0.t9-2 add-dir false false /B10\n" + "6 /B8\n") + + # copy from existing node of the wrong kind + set_changed_path_list(sbox, 12, + "_0.0.t11-2 add-file false false /B12\n" + "9 /B8\n") + + # modify non-existent node + set_changed_path_list(sbox, 14, + "_0.0.t13-2 modify-file false false /A/D/H/foo\n\n") + + # modify existent node of the wrong kind + set_changed_path_list(sbox, 16, + "_0.0.t15-2 modify-file false false /B12\n\n") + + # replace non-existent node + set_changed_path_list(sbox, 18, + "_0.0.t17-2 replace-file false false /A/D/H/foo\n\n") + + # find corruptions + exit_code, output, errput = svntest.main.run_svnadmin("verify", + "--keep-going", + sbox.repo_dir) + + exp_out = svntest.verify.RegexListOutput([".*Verified revision 0.", + ".*Verified revision 1.", + ".*Verified revision 3.", + ".*Verified revision 5.", + ".*Verified revision 7.", + ".*Verified revision 8.", + ".*Verified revision 9.", + ".*Verified revision 11.", + ".*Verified revision 13.", + ".*Verified revision 15.", + ".*Verified revision 17.", + ".*Verified revision 19.", + ".*", + ".*Summary.*", + ".*r2: E160020:.*", + ".*r2: E160020:.*", + ".*r4: E160013:.*", + ".*r6: E160013:.*", + ".*r6: E160013:.*", + ".*r10: E160013:.*", + ".*r10: E160013:.*", + ".*r12: E145001:.*", + ".*r12: E145001:.*", + ".*r14: E160013:.*", + ".*r14: E160013:.*", + ".*r16: E145001:.*", + ".*r16: E145001:.*", + ".*r18: E160013:.*", + ".*r18: E160013:.*"]) + if (svntest.main.fs_has_rep_sharing()): + exp_out.insert(0, ".*Verifying.*metadata.*") + if svntest.main.is_fs_log_addressing(): + exp_out.insert(1, ".*Verifying.*metadata.*") + + exp_err = svntest.verify.RegexListOutput([".*Error verifying revision 2.", + "svnadmin: E160020:.*", + "svnadmin: E160020:.*", + ".*Error verifying revision 4.", + "svnadmin: E160013:.*", + ".*Error verifying revision 6.", + "svnadmin: E160013:.*", + "svnadmin: E160013:.*", + ".*Error verifying revision 10.", + "svnadmin: E160013:.*", + "svnadmin: E160013:.*", + ".*Error verifying revision 12.", + "svnadmin: E145001:.*", + "svnadmin: E145001:.*", + ".*Error verifying revision 14.", + "svnadmin: E160013:.*", + "svnadmin: E160013:.*", + ".*Error verifying revision 16.", + "svnadmin: E145001:.*", + "svnadmin: E145001:.*", + ".*Error verifying revision 18.", + "svnadmin: E160013:.*", + "svnadmin: E160013:.*", + "svnadmin: E205012:.*"], False) + + + if svntest.verify.verify_outputs("Unexpected error while running 'svnadmin verify'.", + output, errput, exp_out, exp_err): + raise svntest.Failure + + exit_code, output, errput = svntest.main.run_svnadmin("verify", + sbox.repo_dir) + + exp_out = svntest.verify.RegexListOutput([".*Verified revision 0.", + ".*Verified revision 1."]) + exp_err = svntest.verify.RegexListOutput([".*Error verifying revision 2.", + "svnadmin: E160020:.*", + "svnadmin: E160020:.*"], False) + + if (svntest.main.fs_has_rep_sharing()): + exp_out.insert(0, ".*Verifying.*metadata.*") + if svntest.main.is_fs_log_addressing(): + exp_out.insert(1, ".*Verifying.*metadata.*") + if svntest.verify.verify_outputs("Unexpected error while running 'svnadmin verify'.", + output, errput, exp_out, exp_err): + raise svntest.Failure + + + exit_code, output, errput = svntest.main.run_svnadmin("verify", + "--quiet", + sbox.repo_dir) + + exp_out = [] + exp_err = svntest.verify.RegexListOutput([".*Error verifying revision 2.", + "svnadmin: E160020:.*", + "svnadmin: E160020:.*"], False) + + if svntest.verify.verify_outputs("Output of 'svnadmin verify' is unexpected.", + output, errput, exp_out, exp_err): + raise svntest.Failure + + # Don't leave a corrupt repository + svntest.main.safe_rmtree(sbox.repo_dir, True) + + +def verify_denormalized_names(sbox): + "detect denormalized names and name collisions" + + sbox.build(create_wc=False, empty=True) + + dumpfile_location = os.path.join(os.path.dirname(sys.argv[0]), + 'svnadmin_tests_data', + 'normalization_check.dump') + load_dumpstream(sbox, open(dumpfile_location).read()) + + exit_code, output, errput = svntest.main.run_svnadmin( + "verify", "--check-normalization", sbox.repo_dir) + + expected_output_regex_list = [ + ".*Verified revision 0.", + ".*Verified revision 1.", + ".*Verified revision 2.", + ".*Verified revision 3.", + # A/{Eacute}/{aring}lpha + "WARNING 0x0003: Duplicate representation of path 'A/.*/.*lpha'", + ".*Verified revision 4.", + ".*Verified revision 5.", + # Q/{aring}lpha + "WARNING 0x0004: Duplicate representation of path '/Q/.*lpha'" + # A/{Eacute} + " in svn:mergeinfo property of 'A/.*'", + ".*Verified revision 6.", + ".*Verified revision 7."] + + # The BDB backend doesn't do global metadata verification. + if (svntest.main.fs_has_rep_sharing()): + expected_output_regex_list.insert(0, ".*Verifying repository metadata.*") + + if svntest.main.is_fs_log_addressing(): + expected_output_regex_list.insert(0, ".* Verifying metadata at revision 0.*") + + exp_out = svntest.verify.RegexListOutput(expected_output_regex_list) + exp_err = svntest.verify.ExpectedOutput([]) + + svntest.verify.verify_outputs( + "Unexpected error while running 'svnadmin verify'.", + output, errput, exp_out, exp_err) + + +@SkipUnless(svntest.main.is_fs_type_fsfs) def fsfs_recover_old_non_empty(sbox): "fsfs recover non-empty --compatible-version=1.3" @@ -1859,7 +2406,7 @@ def fsfs_recover_old_non_empty(sbox): # svnadmin: E200002: Serialized hash missing terminator sbox.build(create_wc=False, minor_version=3) - svntest.actions.run_and_verify_svnadmin(None, None, [], "recover", + svntest.actions.run_and_verify_svnadmin(None, [], "recover", sbox.repo_dir) @@ -1873,12 +2420,694 @@ def fsfs_hotcopy_old_non_empty(sbox): sbox.build(create_wc=False, minor_version=3) backup_dir, backup_url = sbox.add_repo_path('backup') - svntest.actions.run_and_verify_svnadmin(None, None, [], "hotcopy", + svntest.actions.run_and_verify_svnadmin(None, [], "hotcopy", sbox.repo_dir, backup_dir) check_hotcopy_fsfs(sbox.repo_dir, backup_dir) +def load_ignore_dates(sbox): + "svnadmin load --ignore-dates" + + # All revisions in the loaded repository should come after this time. + start_time = time.localtime() + time.sleep(1) + + sbox.build(create_wc=False, empty=True) + + dumpfile_skeleton = open(os.path.join(os.path.dirname(sys.argv[0]), + 'svnadmin_tests_data', + 'skeleton_repos.dump')).read() + + load_dumpstream(sbox, dumpfile_skeleton, '--ignore-dates') + svntest.actions.run_and_verify_svnlook(['6\n'], + None, 'youngest', sbox.repo_dir) + for rev in range(1, 6): + exit_code, output, errput = svntest.main.run_svnlook('date', '-r', rev, + sbox.repo_dir) + if errput: + raise SVNUnexpectedStderr(errput) + rev_time = time.strptime(output[0].rstrip()[:19], '%Y-%m-%d %H:%M:%S') + if rev_time < start_time: + raise svntest.Failure("Revision time for r%d older than load start time\n" + " rev_time: %s\n" + " start_time: %s" + % (rev, str(rev_time), str(start_time))) + + +@SkipUnless(svntest.main.is_fs_type_fsfs) +def fsfs_hotcopy_old_with_id_changes(sbox): + "fsfs hotcopy old with node-id and copy-id changes" + + # Around trunk@1573728, running 'svnadmin hotcopy' for the + # --compatible-version=1.3 repository with certain node-id and copy-id + # changes ended with mismatching db/current in source and destination: + # + # source: "2 l 1" destination: "2 k 1", + # "3 l 2" "3 4 2" + # (and so on...) + # + # We test this case by creating a --compatible-version=1.3 repository + # and committing things that result in node-id and copy-id changes. + # After every commit, we hotcopy the repository to a new destination + # and check whether the source of the backup and the backup itself are + # identical. We also maintain a separate --incremental backup, which + # is updated and checked after every commit. + sbox.build(create_wc=True, minor_version=3) + + inc_backup_dir, inc_backup_url = sbox.add_repo_path('incremental-backup') + + # r1 = Initial greek tree sandbox. + backup_dir, backup_url = sbox.add_repo_path('backup-after-r1') + svntest.actions.run_and_verify_svnadmin(None, [], "hotcopy", + sbox.repo_dir, backup_dir) + svntest.actions.run_and_verify_svnadmin(None, [], "hotcopy", + "--incremental", + sbox.repo_dir, inc_backup_dir) + check_hotcopy_fsfs(sbox.repo_dir, backup_dir) + check_hotcopy_fsfs(sbox.repo_dir, inc_backup_dir) + + # r2 = Add a new property. + sbox.simple_propset('foo', 'bar', 'A/mu') + sbox.simple_commit(message='r2') + + backup_dir, backup_url = sbox.add_repo_path('backup-after-r2') + svntest.actions.run_and_verify_svnadmin(None, [], "hotcopy", + sbox.repo_dir, backup_dir) + svntest.actions.run_and_verify_svnadmin(None, [], "hotcopy", + "--incremental", + sbox.repo_dir, inc_backup_dir) + check_hotcopy_fsfs(sbox.repo_dir, backup_dir) + check_hotcopy_fsfs(sbox.repo_dir, inc_backup_dir) + + # r3 = Copy a file. + sbox.simple_copy('A/B/E', 'A/B/E1') + sbox.simple_commit(message='r3') + + backup_dir, backup_url = sbox.add_repo_path('backup-after-r3') + svntest.actions.run_and_verify_svnadmin(None, [], "hotcopy", + sbox.repo_dir, backup_dir) + svntest.actions.run_and_verify_svnadmin(None, [], "hotcopy", + "--incremental", + sbox.repo_dir, inc_backup_dir) + check_hotcopy_fsfs(sbox.repo_dir, backup_dir) + check_hotcopy_fsfs(sbox.repo_dir, inc_backup_dir) + + # r4 = Remove an existing file ... + sbox.simple_rm('A/D/gamma') + sbox.simple_commit(message='r4') + + backup_dir, backup_url = sbox.add_repo_path('backup-after-r4') + svntest.actions.run_and_verify_svnadmin(None, [], "hotcopy", + sbox.repo_dir, backup_dir) + svntest.actions.run_and_verify_svnadmin(None, [], "hotcopy", + "--incremental", + sbox.repo_dir, inc_backup_dir) + check_hotcopy_fsfs(sbox.repo_dir, backup_dir) + check_hotcopy_fsfs(sbox.repo_dir, inc_backup_dir) + + # r5 = ...and replace it with a new file here. + sbox.simple_add_text("This is the replaced file.\n", 'A/D/gamma') + sbox.simple_commit(message='r5') + + backup_dir, backup_url = sbox.add_repo_path('backup-after-r5') + svntest.actions.run_and_verify_svnadmin(None, [], "hotcopy", + sbox.repo_dir, backup_dir) + svntest.actions.run_and_verify_svnadmin(None, [], "hotcopy", + "--incremental", + sbox.repo_dir, inc_backup_dir) + check_hotcopy_fsfs(sbox.repo_dir, backup_dir) + check_hotcopy_fsfs(sbox.repo_dir, inc_backup_dir) + + # r6 = Add an entirely new file. + sbox.simple_add_text('This is an entirely new file.\n', 'A/C/mu1') + sbox.simple_commit(message='r6') + + backup_dir, backup_url = sbox.add_repo_path('backup-after-r6') + svntest.actions.run_and_verify_svnadmin(None, [], "hotcopy", + sbox.repo_dir, backup_dir) + svntest.actions.run_and_verify_svnadmin(None, [], "hotcopy", + "--incremental", + sbox.repo_dir, inc_backup_dir) + check_hotcopy_fsfs(sbox.repo_dir, backup_dir) + check_hotcopy_fsfs(sbox.repo_dir, inc_backup_dir) + + # r7 = Change the content of the existing file (this changeset does + # not bump the next-id and copy-id counters in the repository). + sbox.simple_append('A/mu', 'This is change in the existing file.\n') + sbox.simple_commit(message='r7') + + backup_dir, backup_url = sbox.add_repo_path('backup-after-r7') + svntest.actions.run_and_verify_svnadmin(None, [], "hotcopy", + sbox.repo_dir, backup_dir) + svntest.actions.run_and_verify_svnadmin(None, [], "hotcopy", + "--incremental", + sbox.repo_dir, inc_backup_dir) + check_hotcopy_fsfs(sbox.repo_dir, backup_dir) + check_hotcopy_fsfs(sbox.repo_dir, inc_backup_dir) + + +@SkipUnless(svntest.main.fs_has_pack) +def verify_packed(sbox): + "verify packed with small shards" + + # Configure two files per shard to trigger packing. + sbox.build() + patch_format(sbox.repo_dir, shard_size=2) + + # Play with our greek tree. These changesets fall into two + # separate shards with r2 and r3 being in shard 1 ... + sbox.simple_append('iota', "Line.\n") + sbox.simple_append('A/D/gamma', "Another line.\n") + sbox.simple_commit(message='r2') + sbox.simple_propset('foo', 'bar', 'iota') + sbox.simple_propset('foo', 'baz', 'A/mu') + sbox.simple_commit(message='r3') + + # ...and r4 and r5 being in shard 2. + sbox.simple_rm('A/C') + sbox.simple_copy('A/B/E', 'A/B/E1') + sbox.simple_move('A/mu', 'A/B/mu') + sbox.simple_commit(message='r4') + sbox.simple_propdel('foo', 'A/B/mu') + sbox.simple_commit(message='r5') + + if svntest.main.is_fs_type_fsfs and svntest.main.options.fsfs_packing: + # With --fsfs-packing, everything is already packed and we + # can skip this part. + pass + else: + expected_output = ["Packing revisions in shard 0...done.\n", + "Packing revisions in shard 1...done.\n", + "Packing revisions in shard 2...done.\n"] + svntest.actions.run_and_verify_svnadmin(expected_output, [], + "pack", sbox.repo_dir) + + if svntest.main.is_fs_log_addressing(): + expected_output = ["* Verifying metadata at revision 0 ...\n", + "* Verifying metadata at revision 2 ...\n", + "* Verifying metadata at revision 4 ...\n", + "* Verifying repository metadata ...\n", + "* Verified revision 0.\n", + "* Verified revision 1.\n", + "* Verified revision 2.\n", + "* Verified revision 3.\n", + "* Verified revision 4.\n", + "* Verified revision 5.\n"] + else: + expected_output = ["* Verifying repository metadata ...\n", + "* Verified revision 0.\n", + "* Verified revision 1.\n", + "* Verified revision 2.\n", + "* Verified revision 3.\n", + "* Verified revision 4.\n", + "* Verified revision 5.\n"] + + svntest.actions.run_and_verify_svnadmin(expected_output, [], + "verify", sbox.repo_dir) + +# Test that 'svnadmin freeze' is nestable. (For example, this ensures it +# won't take system-global locks, only repository-scoped ones.) +# +# This could be useful to easily freeze a small number of repositories at once. +# +# ### We don't actually test that freeze takes a write lock anywhere (not even +# ### in C tests.) +def freeze_freeze(sbox): + "svnadmin freeze svnadmin freeze (some-cmd)" + + sbox.build(create_wc=False, read_only=True) + second_repo_dir, _ = sbox.add_repo_path('backup') + svntest.actions.run_and_verify_svnadmin(None, [], "hotcopy", + sbox.repo_dir, second_repo_dir) + + if svntest.main.is_fs_type_fsx() or \ + (svntest.main.is_fs_type_fsfs() and \ + svntest.main.options.server_minor_version < 9): + # FSFS repositories created with --compatible-version=1.8 and less + # erroneously share the filesystem data (locks, shared transaction + # data, ...) between hotcopy source and destination. This is fixed + # for new FS formats, but in order to avoid a deadlock for old formats, + # we have to manually assign a new UUID for the hotcopy destination. + # As of trunk@1618024, the same applies to FSX repositories. + svntest.actions.run_and_verify_svnadmin([], None, + 'setuuid', second_repo_dir) + + svntest.actions.run_and_verify_svnadmin(None, [], + 'freeze', '--', sbox.repo_dir, + svntest.main.svnadmin_binary, 'freeze', '--', second_repo_dir, + sys.executable, '-c', 'True') + + arg_file = sbox.get_tempname() + svntest.main.file_write(arg_file, + "%s\n%s\n" % (sbox.repo_dir, second_repo_dir)) + + svntest.actions.run_and_verify_svnadmin(None, [], + 'freeze', '-F', arg_file, '--', + sys.executable, '-c', 'True') + +def verify_metadata_only(sbox): + "verify metadata only" + + sbox.build(create_wc = False) + exit_code, output, errput = svntest.main.run_svnadmin("verify", + sbox.repo_dir, + "--metadata-only") + if errput: + raise SVNUnexpectedStderr(errput) + + # Unfortunately, older formats won't test as thoroughly than newer ones + # resulting in different progress output. BDB will do a full check but + # not produce any output. + if svntest.main.is_fs_log_addressing(): + svntest.verify.compare_and_display_lines( + "Unexpected error while running 'svnadmin verify'.", + 'STDOUT', ["* Verifying metadata at revision 0 ...\n", + "* Verifying repository metadata ...\n"], output) + elif svntest.main.fs_has_rep_sharing() \ + and not svntest.main.is_fs_type_bdb(): + svntest.verify.compare_and_display_lines( + "Unexpected error while running 'svnadmin verify'.", + 'STDOUT', ["* Verifying repository metadata ...\n"], output) + else: + svntest.verify.compare_and_display_lines( + "Unexpected error while running 'svnadmin verify'.", + 'STDOUT', [], output) + + +@Skip(svntest.main.is_fs_type_bdb) +def verify_quickly(sbox): + "verify quickly using metadata" + + sbox.build(create_wc = False) + rev_file = open(fsfs_file(sbox.repo_dir, 'revs', '1'), 'r+b') + + # set new contents + rev_file.seek(8) + rev_file.write('#') + rev_file.close() + + exit_code, output, errput = svntest.main.run_svnadmin("verify", + sbox.repo_dir, + "--metadata-only") + + # unfortunately, some backends needs to do more checks than other + # resulting in different progress output + if svntest.main.is_fs_log_addressing(): + exp_out = svntest.verify.RegexListOutput([]) + exp_err = svntest.verify.RegexListOutput(["svnadmin: E160004:.*"], False) + else: + exp_out = svntest.verify.RegexListOutput([]) + exp_err = svntest.verify.RegexListOutput([]) + + if (svntest.main.fs_has_rep_sharing()): + exp_out.insert(0, ".*Verifying.*metadata.*") + if svntest.verify.verify_outputs("Unexpected error while running 'svnadmin verify'.", + output, errput, exp_out, exp_err): + raise svntest.Failure + + # Don't leave a corrupt repository + svntest.main.safe_rmtree(sbox.repo_dir, True) + + +@SkipUnless(svntest.main.is_fs_type_fsfs) +@SkipUnless(svntest.main.fs_has_pack) +def fsfs_hotcopy_progress(sbox): + "hotcopy progress reporting" + + # Check how 'svnadmin hotcopy' reports progress for non-incremental + # and incremental scenarios. The progress output can be affected by + # the --fsfs-packing option, so skip the test if that is the case. + if svntest.main.options.fsfs_packing: + raise svntest.Skip + + # Create an empty repository, configure three files per shard. + sbox.build(create_wc=False, empty=True) + patch_format(sbox.repo_dir, shard_size=3) + + inc_backup_dir, inc_backup_url = sbox.add_repo_path('incremental-backup') + + # Nothing really exciting for the empty repository. + expected_full = [ + "* Copied revision 0.\n" + ] + expected_incremental = [ + "* Copied revision 0.\n", + ] + + backup_dir, backup_url = sbox.add_repo_path('backup-0') + svntest.actions.run_and_verify_svnadmin(expected_full, [], + 'hotcopy', + sbox.repo_dir, backup_dir) + svntest.actions.run_and_verify_svnadmin(expected_incremental, [], + 'hotcopy', '--incremental', + sbox.repo_dir, inc_backup_dir) + + # Commit three revisions. After this step we have a full shard + # (r0, r1, r2) and the second shard (r3) with a single revision. + for i in range(3): + svntest.actions.run_and_verify_svn(None, [], 'mkdir', + '-m', svntest.main.make_log_msg(), + sbox.repo_url + '/dir-%i' % i) + expected_full = [ + "* Copied revision 0.\n", + "* Copied revision 1.\n", + "* Copied revision 2.\n", + "* Copied revision 3.\n", + ] + expected_incremental = [ + "* Copied revision 1.\n", + "* Copied revision 2.\n", + "* Copied revision 3.\n", + ] + + backup_dir, backup_url = sbox.add_repo_path('backup-1') + svntest.actions.run_and_verify_svnadmin(expected_full, [], + 'hotcopy', + sbox.repo_dir, backup_dir) + svntest.actions.run_and_verify_svnadmin(expected_incremental, [], + 'hotcopy', '--incremental', + sbox.repo_dir, inc_backup_dir) + + # Pack everything (r3 is still unpacked) and hotcopy again. In this case, + # the --incremental output should track the incoming (r0, r1, r2) pack and + # should not mention r3, because it is already a part of the destination + # and is *not* a part of the incoming pack. + svntest.actions.run_and_verify_svnadmin(None, [], 'pack', + sbox.repo_dir) + expected_full = [ + "* Copied revisions from 0 to 2.\n", + "* Copied revision 3.\n", + ] + expected_incremental = [ + "* Copied revisions from 0 to 2.\n", + ] + + backup_dir, backup_url = sbox.add_repo_path('backup-2') + svntest.actions.run_and_verify_svnadmin(expected_full, [], + 'hotcopy', + sbox.repo_dir, backup_dir) + svntest.actions.run_and_verify_svnadmin(expected_incremental, [], + 'hotcopy', '--incremental', + sbox.repo_dir, inc_backup_dir) + + # Fill the second shard, pack again, commit several unpacked revisions + # on top of it. Rerun the hotcopy and check the progress output. + for i in range(4, 6): + svntest.actions.run_and_verify_svn(None, [], 'mkdir', + '-m', svntest.main.make_log_msg(), + sbox.repo_url + '/dir-%i' % i) + + svntest.actions.run_and_verify_svnadmin(None, [], 'pack', + sbox.repo_dir) + + for i in range(6, 8): + svntest.actions.run_and_verify_svn(None, [], 'mkdir', + '-m', svntest.main.make_log_msg(), + sbox.repo_url + '/dir-%i' % i) + expected_full = [ + "* Copied revisions from 0 to 2.\n", + "* Copied revisions from 3 to 5.\n", + "* Copied revision 6.\n", + "* Copied revision 7.\n", + ] + expected_incremental = [ + "* Copied revisions from 3 to 5.\n", + "* Copied revision 6.\n", + "* Copied revision 7.\n", + ] + + backup_dir, backup_url = sbox.add_repo_path('backup-3') + svntest.actions.run_and_verify_svnadmin(expected_full, [], + 'hotcopy', + sbox.repo_dir, backup_dir) + svntest.actions.run_and_verify_svnadmin(expected_incremental, [], + 'hotcopy', '--incremental', + sbox.repo_dir, inc_backup_dir) + + +@SkipUnless(svntest.main.is_fs_type_fsfs) +def fsfs_hotcopy_progress_with_revprop_changes(sbox): + "incremental hotcopy progress with changed revprops" + + # The progress output can be affected by the --fsfs-packing + # option, so skip the test if that is the case. + if svntest.main.options.fsfs_packing: + raise svntest.Skip + + # Create an empty repository, commit several revisions and hotcopy it. + sbox.build(create_wc=False, empty=True) + + for i in range(6): + svntest.actions.run_and_verify_svn(None, [], 'mkdir', + '-m', svntest.main.make_log_msg(), + sbox.repo_url + '/dir-%i' % i) + expected_output = [ + "* Copied revision 0.\n", + "* Copied revision 1.\n", + "* Copied revision 2.\n", + "* Copied revision 3.\n", + "* Copied revision 4.\n", + "* Copied revision 5.\n", + "* Copied revision 6.\n", + ] + + backup_dir, backup_url = sbox.add_repo_path('backup') + svntest.actions.run_and_verify_svnadmin(expected_output, [], + 'hotcopy', + sbox.repo_dir, backup_dir) + + # Amend a few log messages in the source, run the --incremental hotcopy. + # The progress output should only mention the corresponding revisions. + revprop_file = sbox.get_tempname() + svntest.main.file_write(revprop_file, "Modified log message.") + + for i in [1, 3, 6]: + svntest.actions.run_and_verify_svnadmin(None, [], + 'setrevprop', + sbox.repo_dir, '-r', i, + 'svn:log', revprop_file) + expected_output = [ + "* Copied revision 1.\n", + "* Copied revision 3.\n", + "* Copied revision 6.\n", + ] + svntest.actions.run_and_verify_svnadmin(expected_output, [], + 'hotcopy', '--incremental', + sbox.repo_dir, backup_dir) + + +@SkipUnless(svntest.main.is_fs_type_fsfs) +def fsfs_hotcopy_progress_old(sbox): + "hotcopy --compatible-version=1.3 progress" + + sbox.build(create_wc=False, empty=True, minor_version=3) + + inc_backup_dir, inc_backup_url = sbox.add_repo_path('incremental-backup') + + # Nothing really exciting for the empty repository. + expected_full = [ + "* Copied revision 0.\n" + ] + expected_incremental = [ + "* Copied revision 0.\n", + ] + + backup_dir, backup_url = sbox.add_repo_path('backup-0') + svntest.actions.run_and_verify_svnadmin(expected_full, [], + 'hotcopy', + sbox.repo_dir, backup_dir) + svntest.actions.run_and_verify_svnadmin(expected_incremental, [], + 'hotcopy', '--incremental', + sbox.repo_dir, inc_backup_dir) + + # Commit three revisions, hotcopy and check the progress output. + for i in range(3): + svntest.actions.run_and_verify_svn(None, [], 'mkdir', + '-m', svntest.main.make_log_msg(), + sbox.repo_url + '/dir-%i' % i) + + expected_full = [ + "* Copied revision 0.\n", + "* Copied revision 1.\n", + "* Copied revision 2.\n", + "* Copied revision 3.\n", + ] + expected_incremental = [ + "* Copied revision 1.\n", + "* Copied revision 2.\n", + "* Copied revision 3.\n", + ] + + backup_dir, backup_url = sbox.add_repo_path('backup-1') + svntest.actions.run_and_verify_svnadmin(expected_full, [], + 'hotcopy', + sbox.repo_dir, backup_dir) + svntest.actions.run_and_verify_svnadmin(expected_incremental, [], + 'hotcopy', '--incremental', + sbox.repo_dir, inc_backup_dir) + + +@SkipUnless(svntest.main.fs_has_unique_freeze) +def freeze_same_uuid(sbox): + "freeze multiple repositories with same UUID" + + sbox.build(create_wc=False) + + first_repo_dir, _ = sbox.add_repo_path('first') + second_repo_dir, _ = sbox.add_repo_path('second') + + # Test that 'svnadmin freeze A (svnadmin freeze B)' does not deadlock for + # new FSFS formats, even if 'A' and 'B' share the same UUID. Create two + # repositories by loading the same dump file, ... + svntest.main.create_repos(first_repo_dir) + svntest.main.create_repos(second_repo_dir) + + dump_path = os.path.join(os.path.dirname(sys.argv[0]), + 'svnadmin_tests_data', + 'skeleton_repos.dump') + dump_contents = open(dump_path, 'rb').readlines() + svntest.actions.run_and_verify_load(first_repo_dir, dump_contents) + svntest.actions.run_and_verify_load(second_repo_dir, dump_contents) + + # ...and execute the 'svnadmin freeze -F' command. + arg_file = sbox.get_tempname() + svntest.main.file_write(arg_file, + "%s\n%s\n" % (first_repo_dir, second_repo_dir)) + + svntest.actions.run_and_verify_svnadmin(None, None, + 'freeze', '-F', arg_file, '--', + sys.executable, '-c', 'True') + + +@Skip(svntest.main.is_fs_type_fsx) +def upgrade(sbox): + "upgrade --compatible-version=1.3" + + sbox.build(create_wc=False, minor_version=3) + svntest.actions.run_and_verify_svnadmin(None, [], "upgrade", + sbox.repo_dir) + # Does the repository work after upgrade? + svntest.actions.run_and_verify_svn(['Committing transaction...\n', + 'Committed revision 2.\n'], [], 'mkdir', + '-m', svntest.main.make_log_msg(), + sbox.repo_url + '/dir') + +def load_txdelta(sbox): + "exercising svn_txdelta_target on BDB" + + sbox.build(empty=True) + + # This dumpfile produced a BDB repository that generated cheksum + # mismatches on read caused by the improper handling of + # svn_txdelta_target ops. The bug was fixed by r1640832. + + dumpfile_location = os.path.join(os.path.dirname(sys.argv[0]), + 'svnadmin_tests_data', + 'load_txdelta.dump.gz') + dumpfile = gzip.open(dumpfile_location).read() + + load_dumpstream(sbox, dumpfile) + + # Verify would fail with a checksum mismatch: + # * Error verifying revision 14. + # svnadmin: E200014: MD5 checksum mismatch on representation 'r': + # expected: 5182e8876ed894dc7fe28f6ff5b2fee6 + # actual: 5121f82875508863ad70daa8244e6947 + + exit_code, output, errput = svntest.main.run_svnadmin("verify", sbox.repo_dir) + if errput: + raise SVNUnexpectedStderr(errput) + if svntest.verify.verify_outputs( + "Output of 'svnadmin verify' is unexpected.", None, output, None, + ".*Verified revision *"): + raise svntest.Failure + +@Issues(4563) +def load_no_svndate_r0(sbox): + "load without svn:date on r0" + + sbox.build(create_wc=False, empty=True) + + # svn:date exits + svntest.actions.run_and_verify_svnlook([' svn:date\n'], [], + 'proplist', '--revprop', '-r0', + sbox.repo_dir) + + dump_old = ["SVN-fs-dump-format-version: 2\n", "\n", + "UUID: bf52886d-358d-4493-a414-944a6e5ad4f5\n", "\n", + "Revision-number: 0\n", + "Prop-content-length: 10\n", + "Content-length: 10\n", "\n", + "PROPS-END\n", "\n"] + svntest.actions.run_and_verify_load(sbox.repo_dir, dump_old) + + # svn:date should have been removed + svntest.actions.run_and_verify_svnlook([], [], + 'proplist', '--revprop', '-r0', + sbox.repo_dir) + +# This is only supported for FSFS +# The port to FSX is still pending, BDB won't support it. +@SkipUnless(svntest.main.is_fs_type_fsfs) +def hotcopy_read_only(sbox): + "'svnadmin hotcopy' a read-only source repository" + sbox.build() + svntest.main.chmod_tree(sbox.repo_dir, 0, 0222) + + backup_dir, backup_url = sbox.add_repo_path('backup') + exit_code, output, errput = svntest.main.run_svnadmin("hotcopy", + sbox.repo_dir, + backup_dir) + + # r/o repos are hard to clean up. Make it writable again. + svntest.main.chmod_tree(sbox.repo_dir, 0222, 0222) + if errput: + logger.warn("Error: hotcopy failed") + raise SVNUnexpectedStderr(errput) + +@XFail(svntest.main.is_fs_type_fsx) +@Issue(4598) +def dump_no_op_change(sbox): + "svnadmin dump with no-op changes" + + sbox.build(create_wc=False, empty=True) + empty_file = sbox.get_tempname() + svntest.main.file_write(empty_file, '') + + svntest.actions.run_and_verify_svnmucc(None, [], + '-U', sbox.repo_url, + '-m', svntest.main.make_log_msg(), + 'put', empty_file, 'bar') + # Commit a no-op change. + svntest.actions.run_and_verify_svnmucc(None, [], + '-U', sbox.repo_url, + '-m', svntest.main.make_log_msg(), + 'put', empty_file, 'bar') + # Dump and load the repository. + _, dump, _ = svntest.actions.run_and_verify_svnadmin(None, [], + 'dump', '-q', + sbox.repo_dir) + sbox2 = sbox.clone_dependent() + sbox2.build(create_wc=False, empty=True) + load_and_verify_dumpstream(sbox2, None, [], None, False, dump) + + # We expect svn log -v to yield identical results for both original and + # reconstructed repositories. This used to fail as described in the + # Issue 4598 (https://issues.apache.org/jira/browse/SVN-4598), at least + # around r1706415. + # + # Test svn log -v for r2: + _, expected, _ = svntest.actions.run_and_verify_svn(None, [], 'log', '-v', + '-r2', sbox.repo_url) + svntest.actions.run_and_verify_svn(expected, [], 'log', '-v', + '-r2', sbox2.repo_url) + # Test svn log -v for /bar: + _, expected, _ = svntest.actions.run_and_verify_svn(None, [], 'log', '-v', + sbox.repo_url + '/bar') + svntest.actions.run_and_verify_svn(expected, [], 'log', '-v', + sbox2.repo_url + '/bar') + ######################################################################## # Run the tests @@ -1916,8 +3145,27 @@ test_list = [ None, locking, mergeinfo_race, recover_old_empty, + verify_keep_going, + verify_keep_going_quiet, + verify_invalid_path_changes, + verify_denormalized_names, fsfs_recover_old_non_empty, fsfs_hotcopy_old_non_empty, + load_ignore_dates, + fsfs_hotcopy_old_with_id_changes, + verify_packed, + freeze_freeze, + verify_metadata_only, + verify_quickly, + fsfs_hotcopy_progress, + fsfs_hotcopy_progress_with_revprop_changes, + fsfs_hotcopy_progress_old, + freeze_same_uuid, + upgrade, + load_txdelta, + load_no_svndate_r0, + hotcopy_read_only, + dump_no_op_change, ] if __name__ == '__main__': diff --git a/subversion/tests/cmdline/svnadmin_tests_data/load_txdelta.dump.gz b/subversion/tests/cmdline/svnadmin_tests_data/load_txdelta.dump.gz Binary files differnew file mode 100644 index 0000000..900d357 --- /dev/null +++ b/subversion/tests/cmdline/svnadmin_tests_data/load_txdelta.dump.gz diff --git a/subversion/tests/cmdline/svnadmin_tests_data/normalization_check.dump b/subversion/tests/cmdline/svnadmin_tests_data/normalization_check.dump new file mode 100644 index 0000000..32ae006 --- /dev/null +++ b/subversion/tests/cmdline/svnadmin_tests_data/normalization_check.dump @@ -0,0 +1,259 @@ +SVN-fs-dump-format-version: 2 + +UUID: bf695de5-cd61-4024-8cb3-a12d299c7c62 + +Revision-number: 0 +Prop-content-length: 56 +Content-length: 56 + +K 8 +svn:date +V 27 +2013-11-24T02:29:36.942478Z +PROPS-END + +Revision-number: 1 +Prop-content-length: 126 +Content-length: 126 + +K 10 +svn:author +V 7 +jrandom +K 8 +svn:date +V 27 +2013-11-24T02:34:17.982927Z +K 7 +svn:log +V 24 +Denormalized tree import +PROPS-END + +Node-path: A +Node-kind: dir +Node-action: add +Prop-content-length: 10 +Content-length: 10 + +PROPS-END + + +Node-path: A/EÌ +Node-kind: dir +Node-action: add +Prop-content-length: 47 +Content-length: 47 + +K 13 +svn:mergeinfo +V 12 +/Q/Ã¥lpha:69 +PROPS-END + + +Node-path: A/EÌ/Ã¥lpha +Node-kind: file +Node-action: add +Prop-content-length: 10 +Text-content-length: 0 +Text-content-md5: d41d8cd98f00b204e9800998ecf8427e +Text-content-sha1: da39a3ee5e6b4b0d3255bfef95601890afd80709 +Content-length: 10 + +PROPS-END + + +Node-path: A/iÌ‚öta +Node-kind: file +Node-action: add +Prop-content-length: 10 +Text-content-length: 0 +Text-content-md5: d41d8cd98f00b204e9800998ecf8427e +Text-content-sha1: da39a3ee5e6b4b0d3255bfef95601890afd80709 +Content-length: 10 + +PROPS-END + + +Revision-number: 2 +Prop-content-length: 128 +Content-length: 128 + +K 10 +svn:author +V 7 +jrandom +K 8 +svn:date +V 27 +2013-11-24T02:34:39.316466Z +K 7 +svn:log +V 26 +Modified denormalized file +PROPS-END + +Node-path: A/iÌ‚öta +Node-kind: file +Node-action: change +Text-content-length: 9 +Text-content-md5: d2508118d0d39e198d1129d87d692d59 +Text-content-sha1: e2fb5f2139d086ded2cb600d5a91a196e76bf020 +Content-length: 9 + +modified + + +Revision-number: 3 +Prop-content-length: 126 +Content-length: 126 + +K 10 +svn:author +V 7 +jrandom +K 8 +svn:date +V 27 +2013-11-24T02:35:09.976189Z +K 7 +svn:log +V 24 +Modified normalized file +PROPS-END + +Node-path: A/EÌ/Ã¥lpha +Node-kind: file +Node-action: change +Text-content-length: 9 +Text-content-md5: d2508118d0d39e198d1129d87d692d59 +Text-content-sha1: e2fb5f2139d086ded2cb600d5a91a196e76bf020 +Content-length: 9 + +modified + + +Revision-number: 4 +Prop-content-length: 124 +Content-length: 124 + +K 10 +svn:author +V 7 +jrandom +K 8 +svn:date +V 27 +2013-11-24T02:36:11.674695Z +K 7 +svn:log +V 22 +Created name collision +PROPS-END + +Node-path: A/EÌ/aÌŠlpha +Node-kind: file +Node-action: add +Prop-content-length: 10 +Text-content-length: 0 +Text-content-md5: d41d8cd98f00b204e9800998ecf8427e +Text-content-sha1: da39a3ee5e6b4b0d3255bfef95601890afd80709 +Content-length: 10 + +PROPS-END + + +Revision-number: 5 +Prop-content-length: 125 +Content-length: 125 + +K 10 +svn:author +V 7 +jrandom +K 8 +svn:date +V 27 +2013-11-24T02:36:52.182891Z +K 7 +svn:log +V 23 +Modified colliding file +PROPS-END + +Node-path: A/EÌ/aÌŠlpha +Node-kind: file +Node-action: change +Text-content-length: 9 +Text-content-md5: d2508118d0d39e198d1129d87d692d59 +Text-content-sha1: e2fb5f2139d086ded2cb600d5a91a196e76bf020 +Content-length: 9 + +modified + + +Revision-number: 6 +Prop-content-length: 127 +Content-length: 127 + +K 10 +svn:author +V 7 +jrandom +K 8 +svn:date +V 27 +2013-11-24T18:04:43.128158Z +K 7 +svn:log +V 25 +Update mergeinfo on A/EÌ +PROPS-END + +Node-path: A/EÌ +Node-kind: dir +Node-action: change +Prop-content-length: 61 +Content-length: 61 + +K 13 +svn:mergeinfo +V 26 +/Q/aÌŠlpha:71 +/Q/Ã¥lpha:69 +PROPS-END + + +Revision-number: 7 +Prop-content-length: 130 +Content-length: 130 + +K 10 +svn:author +V 7 +jrandom +K 8 +svn:date +V 27 +2013-11-24T18:04:51.128158Z +K 7 +svn:log +V 25 +Update mergeinfo on A/EÌ +PROPS-END + +Node-path: A/EÌ +Node-kind: dir +Node-action: change +Prop-content-length: 64 +Content-length: 64 + +K 13 +svn:mergeinfo +V 29 +/Q/aÌŠlpha:71 +/Q/Ã¥lpha:69,71 +PROPS-END + + diff --git a/subversion/tests/cmdline/svnauthz_tests.py b/subversion/tests/cmdline/svnauthz_tests.py index fc93b23..7c1396c 100755 --- a/subversion/tests/cmdline/svnauthz_tests.py +++ b/subversion/tests/cmdline/svnauthz_tests.py @@ -92,20 +92,20 @@ def svnauthz_validate_file_test(sbox): svntest.main.file_write(authz_path, authz_content) # Valid authz file - svntest.actions.run_and_verify_svnauthz("Valid authz file", None, None, + svntest.actions.run_and_verify_svnauthz(None, None, 0, False, "validate", authz_path) # Invalid authz file, expect exit code 1, we found the file loaded it # but found an error svntest.main.file_write(authz_path, 'x\n') - svntest.actions.run_and_verify_svnauthz("Invalid authz file", None, None, + svntest.actions.run_and_verify_svnauthz(None, None, 1, False, "validate", authz_path) - # Non-existant authz file + # Non-existent authz file # exit code 2, operational error since we can't test the file. os.close(authz_fd) os.remove(authz_path) - svntest.actions.run_and_verify_svnauthz("Non-existant authz file", None, + svntest.actions.run_and_verify_svnauthz(None, None, 2, False, "validate", authz_path) @@ -128,24 +128,23 @@ def svnauthz_validate_repo_test(sbox): expected_status.add({ 'A/authz' : Item(status=' ', wc_rev=2), }) - if svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir): - raise svntest.Failure + svntest.actions.run_and_verify_commit(wc_dir, expected_output, + expected_status) # Valid authz url (file stored in repo) authz_url = repo_url + '/A/authz' - svntest.actions.run_and_verify_svnauthz("Valid authz url", None, None, + svntest.actions.run_and_verify_svnauthz(None, None, 0, False, "validate", authz_url) # Invalid authz url (again use the iota file in the repo) # expect exit code 1, we found the file loaded it but found an error iota_url = repo_url + '/iota' - svntest.actions.run_and_verify_svnauthz("Invalid authz url", None, None, + svntest.actions.run_and_verify_svnauthz(None, None, 1, False, "validate", iota_url) - # Non-existant authz url + # Non-existent authz url # exit code 2, operational error since we can't test the file. - svntest.actions.run_and_verify_svnauthz("Non-existant authz file", None, + svntest.actions.run_and_verify_svnauthz(None, None, 2, False, "validate", repo_url + "/zilch") @@ -174,9 +173,8 @@ def svnauthz_validate_txn_test(sbox): expected_status.add({ 'A/authz' : Item(status=' ', wc_rev=2), }) - if svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir): - raise svntest.Failure + svntest.actions.run_and_verify_commit(wc_dir, expected_output, + expected_status) expected_data = ['Exit 0\n'] verify_logfile(logfilepath, expected_data) @@ -184,9 +182,8 @@ def svnauthz_validate_txn_test(sbox): svntest.main.file_append(authz_path, 'x') expected_output = wc.State(wc_dir, {'A/authz' : Item(verb='Sending')}) expected_status.tweak('A/authz', status=' ', wc_rev=3) - if svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir): - raise svntest.Failure + svntest.actions.run_and_verify_commit(wc_dir, expected_output, + expected_status) expected_data = svntest.verify.RegexOutput(".*?Error parsing authz file: '.*?'", match_all=False) verify_logfile(logfilepath, expected_data, delete_log=False) @@ -201,7 +198,7 @@ def svnauthz_validate_txn_test(sbox): svntest.main.file_append(authz_path, 'x') expected_status.tweak('A/authz', status=' ', wc_rev=4) if svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir): + expected_status): raise svntest.Failure expected_data = svntest.verify.ExpectedOutput("Exit 2\n", match_all=False) verify_logfile(logfilepath, expected_data) @@ -220,38 +217,35 @@ def svnauthz_accessof_file_test(sbox): # Anonymous access with no path, and no repository should be rw # since it returns the highest level of access granted anywhere. # So /bios being rw for everyone means this will be rw. - svntest.actions.run_and_verify_svnauthz("Anonymous access", ["rw\n"], None, + svntest.actions.run_and_verify_svnauthz(["rw\n"], None, 0, False, "accessof", authz_path) # Anonymous access on /jokes should be r, no repo so won't match # the slapstick:/jokes section. - svntest.actions.run_and_verify_svnauthz("Anonymous access on path", - ["r\n"], None, 0, False, "accessof", + svntest.actions.run_and_verify_svnauthz(["r\n"], None, 0, False, "accessof", authz_path, "--path", "/jokes") # Anonymous access on /jokes on slapstick repo should be no - svntest.actions.run_and_verify_svnauthz("Anonymous access on path with repo", - ["no\n"], None, 0, False, "accessof", + svntest.actions.run_and_verify_svnauthz(["no\n"], None, 0, False, "accessof", authz_path, "--path", "/jokes", "--repository", "slapstick") # User access with no path, and no repository should be rw # since it returns the h ighest level of access anywhere. # So /bios being rw for everyone means this will be rw. - svntest.actions.run_and_verify_svnauthz("User access", ["rw\n"], None, + svntest.actions.run_and_verify_svnauthz(["rw\n"], None, 0, False, "accessof", authz_path, "--username", "groucho") # User groucho specified on /jokes with no repo, will not match any of the # repo specific sections, so is r since everyone has read access. - svntest.actions.run_and_verify_svnauthz("User access on path", ["r\n"], None, + svntest.actions.run_and_verify_svnauthz(["r\n"], None, 0, False, "accessof", authz_path, "--path", "/jokes", "--username", "groucho") # User groucho specified on /jokes with the repo comedy will be rw - svntest.actions.run_and_verify_svnauthz("User access on path with repo", - ["rw\n"], None, 0, False, "accessof", + svntest.actions.run_and_verify_svnauthz(["rw\n"], None, 0, False, "accessof", authz_path, "--path", "/jokes", "--username", "groucho", "--repository", "comedy") @@ -282,45 +276,42 @@ def svnauthz_accessof_repo_test(sbox): 'A/authz' : Item(status=' ', wc_rev=2), }) if svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir): + expected_status): raise svntest.Failure # Anonymous access with no path, and no repository should be rw # since it returns the highest level of access granted anywhere. # So /bios being rw for everyone means this will be rw. authz_url = repo_url + "/A/authz" - svntest.actions.run_and_verify_svnauthz("Anonymous access", ["rw\n"], None, + svntest.actions.run_and_verify_svnauthz(["rw\n"], None, 0, False, "accessof", authz_url) # Anonymous access on /jokes should be r, no repo so won't match # the slapstick:/jokes section. - svntest.actions.run_and_verify_svnauthz("Anonymous access on path", - ["r\n"], None, 0, False, "accessof", + svntest.actions.run_and_verify_svnauthz(["r\n"], None, 0, False, "accessof", authz_url, "--path", "/jokes") # Anonymous access on /jokes on slapstick repo should be no - svntest.actions.run_and_verify_svnauthz("Anonymous access on path with repo", - ["no\n"], None, 0, False, "accessof", + svntest.actions.run_and_verify_svnauthz(["no\n"], None, 0, False, "accessof", authz_url, "--path", "/jokes", "--repository", "slapstick") # User access with no path, and no repository should be rw # since it returns the h ighest level of access anywhere. # So /bios being rw for everyone means this will be rw. - svntest.actions.run_and_verify_svnauthz("User access", ["rw\n"], None, + svntest.actions.run_and_verify_svnauthz(["rw\n"], None, 0, False, "accessof", authz_url, "--username", "groucho") # User groucho specified on /jokes with no repo, will not match any of the # repo specific sections, so is r since everyone has read access. - svntest.actions.run_and_verify_svnauthz("User access on path", ["r\n"], None, + svntest.actions.run_and_verify_svnauthz(["r\n"], None, 0, False, "accessof", authz_url, "--path", "/jokes", "--username", "groucho") # User groucho specified on /jokes with the repo comedy will be rw - svntest.actions.run_and_verify_svnauthz("User access on path with repo", - ["rw\n"], None, 0, False, "accessof", + svntest.actions.run_and_verify_svnauthz(["rw\n"], None, 0, False, "accessof", authz_url, "--path", "/jokes", "--username", "groucho", "--repository", "comedy") @@ -341,29 +332,26 @@ def svnauthz_accessof_groups_file_test(sbox): # Anonymous access with no path, and no repository should be no # since it returns the highest level of access granted anywhere. - svntest.actions.run_and_verify_svnauthz("Anonymous access", ["no\n"], None, + svntest.actions.run_and_verify_svnauthz(["no\n"], None, 0, False, "accessof", authz_path, "--groups-file", groups_path) # User stafford (@musicians) access with no path, and no repository should # be no since it returns the highest level of access granted anywhere. - svntest.actions.run_and_verify_svnauthz("Group 1 access", - ["rw\n"], None, + svntest.actions.run_and_verify_svnauthz(["rw\n"], None, 0, False, "accessof", authz_path, "--groups-file", groups_path, "--username", "stafford") # User groucho (@comedians) access with no path, and no repository should # be no since it returns the highest level of access granted anywhere. - svntest.actions.run_and_verify_svnauthz("Group 2 access", - ["no\n"], None, + svntest.actions.run_and_verify_svnauthz(["no\n"], None, 0, False, "accessof", authz_path, "--groups-file", groups_path, "--username", "groucho") # Anonymous access specified on /jokes with the repo comedy will be no. - svntest.actions.run_and_verify_svnauthz("Anonymous access on path with repo", - ["no\n"], None, 0, False, + svntest.actions.run_and_verify_svnauthz(["no\n"], None, 0, False, "accessof", authz_path, "--groups-file", groups_path, "--path", "jokes", @@ -371,8 +359,7 @@ def svnauthz_accessof_groups_file_test(sbox): # User stafford (@musicians) specified on /jokes with the repo comedy # will be no. - svntest.actions.run_and_verify_svnauthz("Group 1 access on path with repo", - ["no\n"], None, + svntest.actions.run_and_verify_svnauthz(["no\n"], None, 0, False, "accessof", authz_path, "--groups-file", groups_path, "--path", "jokes", @@ -381,8 +368,7 @@ def svnauthz_accessof_groups_file_test(sbox): # User groucho (@comedians) specified on /jokes with the repo # comedy will be r. - svntest.actions.run_and_verify_svnauthz("Group 2 access on path with repo", - ["r\n"], None, + svntest.actions.run_and_verify_svnauthz(["r\n"], None, 0, False, "accessof", authz_path, "--groups-file", groups_path, "--path", "jokes", @@ -423,37 +409,33 @@ def svnauthz_accessof_groups_repo_test(sbox): 'A/groups' : Item(status=' ', wc_rev=2), }) - if svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir): - raise svntest.Failure + svntest.actions.run_and_verify_commit(wc_dir, expected_output, + expected_status) # Anonymous access with no path, and no repository should be no # since it returns the highest level of access granted anywhere. authz_url = repo_url + "/A/authz" groups_url = repo_url + "/A/groups" - svntest.actions.run_and_verify_svnauthz("Anonymous access", ["no\n"], None, + svntest.actions.run_and_verify_svnauthz(["no\n"], None, 0, False, "accessof", authz_url, "--groups-file", groups_url) # User stafford (@musicians) access with no path, and no repository should # be no since it returns the highest level of access granted anywhere. - svntest.actions.run_and_verify_svnauthz("Group 1 access", - ["rw\n"], None, + svntest.actions.run_and_verify_svnauthz(["rw\n"], None, 0, False, "accessof", authz_url, "--groups-file", groups_url, "--username", "stafford") # User groucho (@comedians) access with no path, and no repository should # be no since it returns the highest level of access granted anywhere. - svntest.actions.run_and_verify_svnauthz("Group 2 access", - ["no\n"], None, + svntest.actions.run_and_verify_svnauthz(["no\n"], None, 0, False, "accessof", authz_url, "--groups-file", groups_url, "--username", "groucho") # Anonymous access specified on /jokes with the repo comedy will be no. - svntest.actions.run_and_verify_svnauthz("Anonymous access on path with repo", - ["no\n"], None, 0, False, + svntest.actions.run_and_verify_svnauthz(["no\n"], None, 0, False, "accessof", authz_url, "--groups-file", groups_url, "--path", "jokes", @@ -461,8 +443,7 @@ def svnauthz_accessof_groups_repo_test(sbox): # User stafford (@musicians) specified on /jokes with the repo comedy # will be no. - svntest.actions.run_and_verify_svnauthz("Group 1 access on path with repo", - ["no\n"], None, + svntest.actions.run_and_verify_svnauthz(["no\n"], None, 0, False, "accessof", authz_url, "--groups-file", groups_url, "--path", "jokes", @@ -471,8 +452,7 @@ def svnauthz_accessof_groups_repo_test(sbox): # User groucho (@comedians) specified on /jokes with the repo # comedy will be r. - svntest.actions.run_and_verify_svnauthz("Group 2 access on path with repo", - ["r\n"], None, + svntest.actions.run_and_verify_svnauthz(["r\n"], None, 0, False, "accessof", authz_url, "--groups-file", groups_url, "--path", "jokes", @@ -495,7 +475,7 @@ def svnauthz_accessof_is_file_test(sbox): expected_output = svntest.verify.RegexOutput( ".*'x' is not a valid argument for --is", match_all=False ) - svntest.actions.run_and_verify_svnauthz("--is x fails", None, + svntest.actions.run_and_verify_svnauthz(None, expected_output, 2, False, "accessof", authz_path, "--is", "x") @@ -503,52 +483,46 @@ def svnauthz_accessof_is_file_test(sbox): # since it returns the highest level of access granted anywhere. # So /bios being rw for everyone means this will be rw. # Test --is rw returns 0. - svntest.actions.run_and_verify_svnauthz("Anonymous access --is rw", None, + svntest.actions.run_and_verify_svnauthz(None, None, 0, False, "accessof", authz_path, "--is", "rw") # Test --is r returns 3. - svntest.actions.run_and_verify_svnauthz("Anonymous access --is r", None, + svntest.actions.run_and_verify_svnauthz(None, None, 3, False, "accessof", authz_path, "--is", "r") # Test --is no returns 3. - svntest.actions.run_and_verify_svnauthz("Anonymous access --is no", None, + svntest.actions.run_and_verify_svnauthz(None, None, 3, False, "accessof", authz_path, "--is", "no") # Anonymous access on /jokes should be r, no repo so won't match # the slapstick:/jokes section. # Test --is r returns 0. - svntest.actions.run_and_verify_svnauthz("Anonymous access on path --is r", - None, None, 0, False, "accessof", + svntest.actions.run_and_verify_svnauthz(None, None, 0, False, "accessof", authz_path, "--path", "/jokes", "--is", "r") # Test --is rw returns 3. - svntest.actions.run_and_verify_svnauthz("Anonymous access on path --is r", - None, None, 3, False, "accessof", + svntest.actions.run_and_verify_svnauthz(None, None, 3, False, "accessof", authz_path, "--path", "/jokes", "--is", "rw") # Test --is no returns 3. - svntest.actions.run_and_verify_svnauthz("Anonymous access on path --is r", - None, None, 3, False, "accessof", + svntest.actions.run_and_verify_svnauthz(None, None, 3, False, "accessof", authz_path, "--path", "/jokes", "--is", "no") # Anonymous access on /jokes on slapstick repo should be no # Test --is no returns 0. - svntest.actions.run_and_verify_svnauthz("Anon access on path w/ repo --is no", - None, None, 0, False, "accessof", + svntest.actions.run_and_verify_svnauthz(None, None, 0, False, "accessof", authz_path, "--path", "/jokes", "--repository", "slapstick", "--is", "no") # Test --is rw returns 3. - svntest.actions.run_and_verify_svnauthz("Anon access on path w/ repo --is no", - None, None, 3, False, "accessof", + svntest.actions.run_and_verify_svnauthz(None, None, 3, False, "accessof", authz_path, "--path", "/jokes", "--repository", "slapstick", "--is", "rw") # Test --is r returns 3. - svntest.actions.run_and_verify_svnauthz("Anon access on path w/ repo --is no", - None, None, 3, False, "accessof", + svntest.actions.run_and_verify_svnauthz(None, None, 3, False, "accessof", authz_path, "--path", "/jokes", "--repository", "slapstick", "--is", "r") @@ -557,17 +531,17 @@ def svnauthz_accessof_is_file_test(sbox): # since it returns the h ighest level of access anywhere. # So /bios being rw for everyone means this will be rw. # Test --is rw returns 0. - svntest.actions.run_and_verify_svnauthz("User access --is rw", None, None, + svntest.actions.run_and_verify_svnauthz(None, None, 0, False, "accessof", authz_path, "--username", "groucho", "--is", "rw") # Test --is r returns 3. - svntest.actions.run_and_verify_svnauthz("User access --is r", None, None, + svntest.actions.run_and_verify_svnauthz(None, None, 3, False, "accessof", authz_path, "--username", "groucho", "--is", "r") # Test --is no returns 3. - svntest.actions.run_and_verify_svnauthz("User access --is no", None, None, + svntest.actions.run_and_verify_svnauthz(None, None, 3, False, "accessof", authz_path, "--username", "groucho", "--is", "no") @@ -575,18 +549,18 @@ def svnauthz_accessof_is_file_test(sbox): # User groucho specified on /jokes with no repo, will not match any of the # repo specific sections, so is r since everyone has read access. # Test --is r returns 0. - svntest.actions.run_and_verify_svnauthz("User access on path --is r", None, + svntest.actions.run_and_verify_svnauthz(None, None, 0, False, "accessof", authz_path, "--path", "/jokes", "--username", "groucho", "--is", "r") # Test --is rw returns 3. - svntest.actions.run_and_verify_svnauthz("User access on path --is rw", None, + svntest.actions.run_and_verify_svnauthz(None, None, 3, False, "accessof", authz_path, "--path", "/jokes", "--username", "groucho", "--is", "rw") # Test --is no returns 3. - svntest.actions.run_and_verify_svnauthz("User access on path --is no", None, + svntest.actions.run_and_verify_svnauthz(None, None, 3, False, "accessof", authz_path, "--path", "/jokes", "--username", "groucho", @@ -594,22 +568,19 @@ def svnauthz_accessof_is_file_test(sbox): # User groucho specified on /jokes with the repo comedy will be rw # Test --is rw returns 0. - svntest.actions.run_and_verify_svnauthz("User access on path w/ repo --is rw", - None, None, 0, False, "accessof", + svntest.actions.run_and_verify_svnauthz(None, None, 0, False, "accessof", authz_path, "--path", "/jokes", "--username", "groucho", "--repository", "comedy", "--is", "rw") # Test --is r returns 3. - svntest.actions.run_and_verify_svnauthz("User access on path w/ repo --is r", - None, None, 3, False, "accessof", + svntest.actions.run_and_verify_svnauthz(None, None, 3, False, "accessof", authz_path, "--path", "/jokes", "--username", "groucho", "--repository", "comedy", "--is", "r") # Test --is no returns 3. - svntest.actions.run_and_verify_svnauthz("User access on path w/ repo --is no", - None, None, 3, False, "accessof", + svntest.actions.run_and_verify_svnauthz(None, None, 3, False, "accessof", authz_path, "--path", "/jokes", "--username", "groucho", "--repository", "comedy", "--is", @@ -622,8 +593,7 @@ def svnauthz_accessof_is_file_test(sbox): ".*Error while parsing config file:", match_all=False ) - svntest.actions.run_and_verify_svnauthz("--is with invalid authz file", - None, expected_out, 1, False, + svntest.actions.run_and_verify_svnauthz(None, expected_out, 1, False, "accessof", authz_path, "--path", "/jokes", "--username", "groucho", "--repository", "comedy", "--is", @@ -654,9 +624,8 @@ def svnauthz_accessof_is_repo_test(sbox): expected_status.add({ 'A/authz' : Item(status=' ', wc_rev=2), }) - if svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir): - raise svntest.Failure + svntest.actions.run_and_verify_commit(wc_dir, expected_output, + expected_status) # Test an invalid --is option, should get an error message and exit code # of 2. @@ -664,7 +633,7 @@ def svnauthz_accessof_is_repo_test(sbox): expected_output = svntest.verify.RegexOutput( ".*'x' is not a valid argument for --is", match_all=False ) - svntest.actions.run_and_verify_svnauthz("--is x fails", None, + svntest.actions.run_and_verify_svnauthz(None, expected_output, 2, False, "accessof", authz_url, "--is", "x") @@ -672,52 +641,46 @@ def svnauthz_accessof_is_repo_test(sbox): # since it returns the highest level of access granted anywhere. # So /bios being rw for everyone means this will be rw. # Test --is rw returns 0. - svntest.actions.run_and_verify_svnauthz("Anonymous access --is rw", None, + svntest.actions.run_and_verify_svnauthz(None, None, 0, False, "accessof", authz_url, "--is", "rw") # Test --is r returns 3. - svntest.actions.run_and_verify_svnauthz("Anonymous access --is r", None, + svntest.actions.run_and_verify_svnauthz(None, None, 3, False, "accessof", authz_url, "--is", "r") # Test --is no returns 3. - svntest.actions.run_and_verify_svnauthz("Anonymous access --is no", None, + svntest.actions.run_and_verify_svnauthz(None, None, 3, False, "accessof", authz_url, "--is", "no") # Anonymous access on /jokes should be r, no repo so won't match # the slapstick:/jokes section. # Test --is r returns 0. - svntest.actions.run_and_verify_svnauthz("Anonymous access on path --is r", - None, None, 0, False, "accessof", + svntest.actions.run_and_verify_svnauthz(None, None, 0, False, "accessof", authz_url, "--path", "/jokes", "--is", "r") # Test --is rw returns 3. - svntest.actions.run_and_verify_svnauthz("Anonymous access on path --is r", - None, None, 3, False, "accessof", + svntest.actions.run_and_verify_svnauthz(None, None, 3, False, "accessof", authz_url, "--path", "/jokes", "--is", "rw") # Test --is no returns 3. - svntest.actions.run_and_verify_svnauthz("Anonymous access on path --is r", - None, None, 3, False, "accessof", + svntest.actions.run_and_verify_svnauthz(None, None, 3, False, "accessof", authz_url, "--path", "/jokes", "--is", "no") # Anonymous access on /jokes on slapstick repo should be no # Test --is no returns 0. - svntest.actions.run_and_verify_svnauthz("Anon access on path w/ repo --is no", - None, None, 0, False, "accessof", + svntest.actions.run_and_verify_svnauthz(None, None, 0, False, "accessof", authz_url, "--path", "/jokes", "--repository", "slapstick", "--is", "no") # Test --is rw returns 3. - svntest.actions.run_and_verify_svnauthz("Anon access on path w/ repo --is no", - None, None, 3, False, "accessof", + svntest.actions.run_and_verify_svnauthz(None, None, 3, False, "accessof", authz_url, "--path", "/jokes", "--repository", "slapstick", "--is", "rw") # Test --is r returns 3. - svntest.actions.run_and_verify_svnauthz("Anon access on path w/ repo --is no", - None, None, 3, False, "accessof", + svntest.actions.run_and_verify_svnauthz(None, None, 3, False, "accessof", authz_url, "--path", "/jokes", "--repository", "slapstick", "--is", "r") @@ -726,17 +689,17 @@ def svnauthz_accessof_is_repo_test(sbox): # since it returns the h ighest level of access anywhere. # So /bios being rw for everyone means this will be rw. # Test --is rw returns 0. - svntest.actions.run_and_verify_svnauthz("User access --is rw", None, None, + svntest.actions.run_and_verify_svnauthz(None, None, 0, False, "accessof", authz_url, "--username", "groucho", "--is", "rw") # Test --is r returns 3. - svntest.actions.run_and_verify_svnauthz("User access --is r", None, None, + svntest.actions.run_and_verify_svnauthz(None, None, 3, False, "accessof", authz_url, "--username", "groucho", "--is", "r") # Test --is no returns 3. - svntest.actions.run_and_verify_svnauthz("User access --is no", None, None, + svntest.actions.run_and_verify_svnauthz(None, None, 3, False, "accessof", authz_url, "--username", "groucho", "--is", "no") @@ -744,18 +707,18 @@ def svnauthz_accessof_is_repo_test(sbox): # User groucho specified on /jokes with no repo, will not match any of the # repo specific sections, so is r since everyone has read access. # Test --is r returns 0. - svntest.actions.run_and_verify_svnauthz("User access on path --is r", None, + svntest.actions.run_and_verify_svnauthz(None, None, 0, False, "accessof", authz_url, "--path", "/jokes", "--username", "groucho", "--is", "r") # Test --is rw returns 3. - svntest.actions.run_and_verify_svnauthz("User access on path --is rw", None, + svntest.actions.run_and_verify_svnauthz(None, None, 3, False, "accessof", authz_url, "--path", "/jokes", "--username", "groucho", "--is", "rw") # Test --is no returns 3. - svntest.actions.run_and_verify_svnauthz("User access on path --is no", None, + svntest.actions.run_and_verify_svnauthz(None, None, 3, False, "accessof", authz_url, "--path", "/jokes", "--username", "groucho", @@ -763,22 +726,19 @@ def svnauthz_accessof_is_repo_test(sbox): # User groucho specified on /jokes with the repo comedy will be rw # Test --is rw returns 0. - svntest.actions.run_and_verify_svnauthz("User access on path w/ repo --is rw", - None, None, 0, False, "accessof", + svntest.actions.run_and_verify_svnauthz(None, None, 0, False, "accessof", authz_url, "--path", "/jokes", "--username", "groucho", "--repository", "comedy", "--is", "rw") # Test --is r returns 3. - svntest.actions.run_and_verify_svnauthz("User access on path w/ repo --is r", - None, None, 3, False, "accessof", + svntest.actions.run_and_verify_svnauthz(None, None, 3, False, "accessof", authz_url, "--path", "/jokes", "--username", "groucho", "--repository", "comedy", "--is", "r") # Test --is no returns 3. - svntest.actions.run_and_verify_svnauthz("User access on path w/ repo --is no", - None, None, 3, False, "accessof", + svntest.actions.run_and_verify_svnauthz(None, None, 3, False, "accessof", authz_url, "--path", "/jokes", "--username", "groucho", "--repository", "comedy", "--is", @@ -788,17 +748,15 @@ def svnauthz_accessof_is_repo_test(sbox): svntest.main.file_append(authz_path, "x\n") expected_output = wc.State(wc_dir, {'A/authz' : Item(verb='Sending')}) expected_status.tweak('A/authz', wc_rev=3) - if svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir): - raise svntest.Failure + svntest.actions.run_and_verify_commit(wc_dir, expected_output, + expected_status) # Check that --is returns 1 when the syntax is invalid with a url. expected_out = svntest.verify.RegexOutput( ".*Error while parsing config file:", match_all=False ) - svntest.actions.run_and_verify_svnauthz("--is with invalid authz url", - None, expected_out, 1, False, + svntest.actions.run_and_verify_svnauthz(None, expected_out, 1, False, "accessof", authz_url, "--path", "/jokes", "--username", "groucho", "--repository", "comedy", "--is", @@ -833,9 +791,8 @@ def svnauthz_accessof_txn_test(sbox): expected_status.add({ 'A/authz' : Item(status=' ', wc_rev=2), }) - if svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir): - raise svntest.Failure + svntest.actions.run_and_verify_commit(wc_dir, expected_output, + expected_status) expected_data = ['Exit 0\n'] verify_logfile(logfilepath, expected_data) @@ -848,33 +805,30 @@ def svnauthz_accessof_txn_test(sbox): expected_output = wc.State(wc_dir, {'A/authz' : Item(verb='Sending')}) expected_status.tweak('A/authz', status=' ', wc_rev=3) svntest.main.file_append(authz_path, "groucho = r\n") - if svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir): - raise svntest.Failure + svntest.actions.run_and_verify_commit(wc_dir, expected_output, + expected_status) expected_data = svntest.verify.ExpectedOutput('Exit 3\n', match_all=False) verify_logfile(logfilepath, expected_data) - # break the authz file with a non-existant group and check for an exit 1. + # break the authz file with a non-existent group and check for an exit 1. expected_status.tweak('A/authz', status=' ', wc_rev=4) svntest.main.file_append(authz_path, "@friends = rw\n") - if svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir): - raise svntest.Failure + svntest.actions.run_and_verify_commit(wc_dir, expected_output, + expected_status) expected_data = svntest.verify.ExpectedOutput('Exit 1\n', match_all=False) verify_logfile(logfilepath, expected_data) - # break the authz file with a non-existant gropu and check for an exit 2. + # break the authz file with a non-existent gropu and check for an exit 2. expected_output = wc.State(wc_dir, {'A/authz' : Item(verb='Deleting')}) expected_status.remove('A/authz') svntest.main.run_svn(None, 'rm', authz_path) - if svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir): - raise svntest.Failure + svntest.actions.run_and_verify_commit(wc_dir, expected_output, + expected_status) expected_data = svntest.verify.ExpectedOutput('Exit 2\n', match_all=False) verify_logfile(logfilepath, expected_data) def svnauthz_compat_mode_file_test(sbox): - "test 'svnauthz-validate' compatability mode file" + "test 'svnauthz-validate' compatibility mode file" # Create an authz file @@ -883,30 +837,28 @@ def svnauthz_compat_mode_file_test(sbox): svntest.main.file_write(authz_path, authz_content) # Check a valid file. - svntest.actions.run_and_verify_svnauthz("svnauthz-validate on file", - None, None, 0, True, + svntest.actions.run_and_verify_svnauthz(None, None, 0, True, authz_path) # Check an invalid file. svntest.main.file_append(authz_path, "x\n") - svntest.actions.run_and_verify_svnauthz("svnauthz-validate on invalid file", - None, None, 1, True, + svntest.actions.run_and_verify_svnauthz(None, None, 1, True, authz_path) # Remove the file. os.close(authz_fd) os.remove(authz_path) - # Check a non-existant file. + # Check a non-existent file. svntest.actions.run_and_verify_svnauthz( - "svnauthz-validate on non-existant file", None, None, 2, True, + None, None, 2, True, authz_path ) @SkipUnless(svntest.main.is_ra_type_file) def svnauthz_compat_mode_repo_test(sbox): - "test 'svnauthz-validate' compatability mode url" + "test 'svnauthz-validate' compatibility mode url" sbox.build() wc_dir = sbox.wc_dir @@ -925,28 +877,24 @@ def svnauthz_compat_mode_repo_test(sbox): expected_status.add({ 'A/authz' : Item(status=' ', wc_rev=2), }) - if svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir): - raise svntest.Failure - svntest.actions.run_and_verify_svnauthz("svnauthz-validate on url", - None, None, 0, True, + svntest.actions.run_and_verify_commit(wc_dir, expected_output, + expected_status) + svntest.actions.run_and_verify_svnauthz(None, None, 0, True, authz_url) # Check an invalid url. svntest.main.file_append(authz_path, "x\n") expected_output = wc.State(wc_dir, {'A/authz' : Item(verb='Sending')}) expected_status.tweak('A/authz', status=' ', wc_rev=3) - if svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir): - raise svntest.Failure - svntest.actions.run_and_verify_svnauthz("svnauthz-validate on invalid file", - None, None, 1, True, + svntest.actions.run_and_verify_commit(wc_dir, expected_output, + expected_status) + svntest.actions.run_and_verify_svnauthz(None, None, 1, True, authz_path) - # Check a non-existant url. + # Check a non-existent url. # Exit code really should be 2 since this is an operational error. svntest.actions.run_and_verify_svnauthz( - "svnauthz-validate on non-existant file", None, None, 2, True, + None, None, 2, True, repo_url + "/zilch" ) diff --git a/subversion/tests/cmdline/svndumpfilter_tests.py b/subversion/tests/cmdline/svndumpfilter_tests.py index 93a3244..abd47f6 100755 --- a/subversion/tests/cmdline/svndumpfilter_tests.py +++ b/subversion/tests/cmdline/svndumpfilter_tests.py @@ -34,8 +34,7 @@ import svntest from svntest.verify import SVNExpectedStdout, SVNExpectedStderr # Get some helper routines -from svnadmin_tests import (load_and_verify_dumpstream, load_dumpstream, - test_create) +from svnadmin_tests import load_and_verify_dumpstream, load_dumpstream from svntest.main import run_svn, run_svnadmin # (abbreviation) @@ -60,15 +59,10 @@ def filter_and_return_output(dump, bufsize=0, *varargs): dump = [ dump ] # Does the caller want the stderr? - try: - varargs.index('-q') + if '-q' in varargs or '--quiet' in varargs: expected_errput = None # Stderr with -q or --quiet is a real error! - except: - try: - varargs.index('--quiet') - expected_errput = None - except: - expected_errput = svntest.verify.AnyOutput + else: + expected_errput = svntest.verify.AnyOutput ## TODO: Should we handle exit_code? exit_code, output, errput = svntest.main.run_command_stdin( svntest.main.svndumpfilter_binary, expected_errput, bufsize, True, @@ -92,7 +86,7 @@ def reflect_dropped_renumbered_revs(sbox): ## See http://subversion.tigris.org/issues/show_bug.cgi?id=2982. ## # Test svndumpfilter with include option - test_create(sbox) + sbox.build(empty=True) dumpfile_location = os.path.join(os.path.dirname(sys.argv[0]), 'svndumpfilter_tests_data', 'with_merges.dump') @@ -112,13 +106,13 @@ def reflect_dropped_renumbered_revs(sbox): expected_output = svntest.verify.UnorderedOutput([ url + "/trunk - /branch1:4-5\n", ]) - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'propget', 'svn:mergeinfo', '-R', sbox.repo_url) # Test svndumpfilter with exclude option - test_create(sbox) + sbox.build(empty=True) filtered_out, filtered_err = filter_and_return_output( dumpfile, 0, "exclude", "branch1", "--skip-missing-merge-sources", @@ -131,7 +125,7 @@ def reflect_dropped_renumbered_revs(sbox): expected_output = svntest.verify.UnorderedOutput([ url + "/trunk - \n", ]) - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'propget', 'svn:mergeinfo', '-R', sbox.repo_url) @@ -142,7 +136,7 @@ def svndumpfilter_loses_mergeinfo(sbox): ## See http://subversion.tigris.org/issues/show_bug.cgi?id=3181. ## - test_create(sbox) + sbox.build(empty=True) dumpfile_location = os.path.join(os.path.dirname(sys.argv[0]), 'svndumpfilter_tests_data', 'with_merges.dump') @@ -158,7 +152,7 @@ def svndumpfilter_loses_mergeinfo(sbox): expected_output = svntest.verify.UnorderedOutput([ url + "/trunk - /branch1:4-8\n", ]) - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'propget', 'svn:mergeinfo', '-R', sbox.repo_url) @@ -225,7 +219,7 @@ def dumpfilter_with_targets(sbox): "svndumpfilter --targets blah" ## See http://subversion.tigris.org/issues/show_bug.cgi?id=2697. ## - test_create(sbox) + sbox.build(empty=True) dumpfile_location = os.path.join(os.path.dirname(sys.argv[0]), 'svndumpfilter_tests_data', @@ -248,7 +242,7 @@ def dumpfilter_with_targets(sbox): def dumpfilter_with_patterns(sbox): "svndumpfilter --pattern PATH_PREFIX" - test_create(sbox) + sbox.build(empty=True) dumpfile_location = os.path.join(os.path.dirname(sys.argv[0]), 'svndumpfilter_tests_data', @@ -268,7 +262,7 @@ def dumpfilter_with_patterns(sbox): def filter_mergeinfo_revs_outside_of_dump_stream(sbox): "filter mergeinfo revs outside of dump stream" - test_create(sbox) + sbox.build(empty=True) # Load a partial dump into an existing repository. # @@ -283,7 +277,7 @@ def filter_mergeinfo_revs_outside_of_dump_stream(sbox): # | | | | # trunk---r2---r3-----r5---r6-------r8---r9---------------> | | # r1 | | | | | | - # intial | | | |______ | | + # initial | | | |______ | | # import copy | copy | merge merge # | | | merge (r5) (r8) # | | | (r9) | | @@ -357,7 +351,7 @@ def filter_mergeinfo_revs_outside_of_dump_stream(sbox): url + "/B2 - /trunk:4\n", url + "/B1/B/E - /branches/B2/B/E:6-7\n", "/trunk/B/E:3-4\n"]) - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'propget', 'svn:mergeinfo', '-R', sbox.repo_url) @@ -371,7 +365,7 @@ def filter_mergeinfo_revs_outside_of_dump_stream(sbox): # Project-Z (Added r5) # docs/ (Added r6) # README (Added r6). - test_create(sbox) + sbox.build(empty=True) skeleton_dumpfile = open(os.path.join(os.path.dirname(sys.argv[0]), 'svnadmin_tests_data', 'skeleton_repos.dump')).read() @@ -484,7 +478,7 @@ def filter_mergeinfo_revs_outside_of_dump_stream(sbox): expected_output = svntest.verify.UnorderedOutput([ url + "/B1 - /Projects/Project-X/trunk:9\n", url + "/B1/B/E - /Projects/Project-X/trunk/B/E:8-9\n"]) - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'propget', 'svn:mergeinfo', '-R', sbox.repo_url) @@ -500,7 +494,7 @@ def filter_mergeinfo_revs_outside_of_dump_stream(sbox): def dropped_but_not_renumbered_empty_revs(sbox): "mergeinfo maps correctly when dropping revs" - test_create(sbox) + sbox.build(empty=True) # The dump file mergeinfo_included_full.dump represents this repository: # @@ -511,7 +505,7 @@ def dropped_but_not_renumbered_empty_revs(sbox): # | | | | # trunk---r2---r3-----r5---r6-------r8---r9---------------> | | # r1 | | | | | | - # intial | | | |______ | | + # initial | | | |______ | | # import copy | copy | merge merge # | | | merge (r5) (r8) # | | | (r9) | | @@ -590,7 +584,7 @@ def dropped_but_not_renumbered_empty_revs(sbox): expected_output = svntest.verify.UnorderedOutput([ url + "/B1 - /trunk:6,8\n", url + "/B1/B/E - /trunk/B/E:5-8\n"]) - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'propget', 'svn:mergeinfo', '-R', sbox.repo_url) @@ -616,7 +610,7 @@ def match_empty_prefix(sbox): raise verify.UnexpectedStderr(filtered_err) # Load the filtered dump into a repo and check the result - test_create(sbox) + sbox.build(empty=True) load_dumpstream(sbox, filtered_output, '--ignore-uuid') svntest.actions.run_and_verify_update(sbox.wc_dir, expected_output, @@ -650,7 +644,7 @@ def accepts_deltas(sbox): "accepts deltas in the input" # Accept format v3 (as created by 'svnadmin --deltas' or svnrdump). - test_create(sbox) + sbox.build(empty=True) dumpfile_location = os.path.join(os.path.dirname(sys.argv[0]), 'svndumpfilter_tests_data', 'simple_v3.dump') @@ -684,7 +678,7 @@ def dumpfilter_targets_expect_leading_slash_prefixes(sbox): "dumpfilter targets expect leading '/' in prefixes" ## See http://subversion.tigris.org/issues/show_bug.cgi?id=4234. ## - test_create(sbox) + sbox.build(empty=True) dumpfile_location = os.path.join(os.path.dirname(sys.argv[0]), 'svndumpfilter_tests_data', diff --git a/subversion/tests/cmdline/svnfsfs_tests.py b/subversion/tests/cmdline/svnfsfs_tests.py new file mode 100755 index 0000000..fece34b --- /dev/null +++ b/subversion/tests/cmdline/svnfsfs_tests.py @@ -0,0 +1,200 @@ +#!/usr/bin/env python +# +# svnfsfs_tests.py: testing the 'svnfsfs' tool. +# +# Subversion is a tool for revision control. +# See http://subversion.apache.org for more information. +# +# ==================================================================== +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +###################################################################### + +# General modules +import os +import logging +import re +import shutil +import sys +import threading +import time +import gzip + +logger = logging.getLogger() + +# Our testing module +import svntest +from svntest.verify import SVNExpectedStdout, SVNExpectedStderr +from svntest.verify import SVNUnexpectedStderr +from svntest.verify import UnorderedOutput +from svntest.main import SVN_PROP_MERGEINFO + +# (abbreviation) +Skip = svntest.testcase.Skip_deco +SkipUnless = svntest.testcase.SkipUnless_deco +XFail = svntest.testcase.XFail_deco +Issues = svntest.testcase.Issues_deco +Issue = svntest.testcase.Issue_deco +Wimp = svntest.testcase.Wimp_deco +SkipDumpLoadCrossCheck = svntest.testcase.SkipDumpLoadCrossCheck_deco +Item = svntest.wc.StateItem + +#---------------------------------------------------------------------- + +# How we currently test 'svnfsfs' -- +# +# 'svnadmin create': Create an empty repository, test that the +# root node has a proper created-revision, +# because there was once a bug where it +# didn't. +# +# Note also that "svnadmin create" is tested +# implicitly every time we run a python test +# script. (An empty repository is always +# created and then imported into; if this +# subcommand failed catastrophically, every +# test would fail and we would know instantly.) +# +# 'svnadmin createtxn' +# 'svnadmin rmtxn': See below. +# +# 'svnadmin lstxns': We don't care about the contents of transactions; +# we only care that they exist or not. +# Therefore, we can simply parse transaction headers. +# +# 'svnadmin dump': A couple regression tests that ensure dump doesn't +# error out, and one to check that the --quiet option +# really does what it's meant to do. The actual +# contents of the dump aren't verified at all. +# +###################################################################### +# Helper routines + +def patch_format(repo_dir, shard_size): + """Rewrite the format of the FSFS repository REPO_DIR so + that it would use sharding with SHARDS revisions per shard.""" + + format_path = os.path.join(repo_dir, "db", "format") + contents = open(format_path, 'rb').read() + processed_lines = [] + + for line in contents.split("\n"): + if line.startswith("layout "): + processed_lines.append("layout sharded %d" % shard_size) + else: + processed_lines.append(line) + + new_contents = "\n".join(processed_lines) + os.chmod(format_path, 0666) + open(format_path, 'wb').write(new_contents) + +###################################################################### +# Tests + +#---------------------------------------------------------------------- + +@SkipUnless(svntest.main.is_fs_type_fsfs) +@SkipUnless(svntest.main.fs_has_pack) +@SkipUnless(svntest.main.is_fs_log_addressing) +def load_index_sharded(sbox): + "load-index in a packed repo" + + # Configure two files per shard to trigger packing. + sbox.build() + patch_format(sbox.repo_dir, shard_size=2) + + # With --fsfs-packing, everything is already packed and we + # can skip this part. + if not svntest.main.options.fsfs_packing: + expected_output = ["Packing revisions in shard 0...done.\n"] + svntest.actions.run_and_verify_svnadmin(expected_output, [], + "pack", sbox.repo_dir) + + # Read P2L index using svnfsfs. + exit_code, items, errput = \ + svntest.actions.run_and_verify_svnfsfs(None, [], "dump-index", "-r0", + sbox.repo_dir) + + # load-index promises to deal with input that + # + # * uses the same encoding as the dump-index output + # * is not in ascending item offset order + # * ignores lines with the full table header + # * ignores the checksum column and beyond + # * figures out the correct target revision even if the first item + # does not match the first revision in the pack file + # + # So, let's mess with the ITEMS list to call in on these promises. + + # not in ascending order + items.reverse() + + # multiple headers (there is already one now at the bottom) + items.insert(0, " Start Length Type Revision Item Checksum\n") + + # make columns have a variable size + # mess with the checksums + # add a junk column + # keep header lines as are + for i in range(0, len(items)): + if items[i].find("Start") == -1: + columns = items[i].split() + columns[5] = columns[5].replace('f','-').replace('0','9') + columns.append("junk") + items[i] = ' '.join(columns) + "\n" + + # first entry is for rev 1, pack starts at rev 0, though + assert(items[1].split()[3] == "1") + + # Reload the index + exit_code, output, errput = svntest.main.run_command_stdin( + svntest.main.svnfsfs_binary, [], 0, False, items, + "load-index", sbox.repo_dir) + + # Run verify to see whether we broke anything. + expected_output = ["* Verifying metadata at revision 0 ...\n", + "* Verifying repository metadata ...\n", + "* Verified revision 0.\n", + "* Verified revision 1.\n"] + svntest.actions.run_and_verify_svnadmin(expected_output, [], + "verify", sbox.repo_dir) + +@SkipUnless(svntest.main.is_fs_type_fsfs) +def test_stats_on_empty_repo(sbox): + "stats on empty repo shall not crash" + + sbox.build(create_wc=False, empty=True) + + exit_code, output, errput = \ + svntest.actions.run_and_verify_svnfsfs(None, [], 'stats', sbox.repo_dir) + +######################################################################## +# Run the tests + + +# list all tests here, starting with None: +test_list = [ None, + load_index_sharded, + test_stats_on_empty_repo, + ] + +if __name__ == '__main__': + svntest.main.run_tests(test_list) + # NOTREACHED + + +### End of file. diff --git a/subversion/tests/cmdline/svnlook_tests.py b/subversion/tests/cmdline/svnlook_tests.py index f7c620b..d6c49c0 100755 --- a/subversion/tests/cmdline/svnlook_tests.py +++ b/subversion/tests/cmdline/svnlook_tests.py @@ -95,9 +95,7 @@ def test_misc(sbox): svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, - wc_dir) + expected_status) # give the repo a new UUID uuid = "01234567-89ab-cdef-89ab-cdef01234567" @@ -209,9 +207,9 @@ def delete_file_in_moved_dir(sbox): # move E to E2 and delete E2/alpha E_path = os.path.join(wc_dir, 'A', 'B', 'E') E2_path = os.path.join(wc_dir, 'A', 'B', 'E2') - svntest.actions.run_and_verify_svn(None, None, [], 'mv', E_path, E2_path) + svntest.actions.run_and_verify_svn(None, [], 'mv', E_path, E2_path) alpha_path = os.path.join(E2_path, 'alpha') - svntest.actions.run_and_verify_svn(None, None, [], 'rm', alpha_path) + svntest.actions.run_and_verify_svn(None, [], 'rm', alpha_path) # commit expected_output = svntest.wc.State(wc_dir, { @@ -234,9 +232,7 @@ def delete_file_in_moved_dir(sbox): ### in order to get this commit working again. svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, - wc_dir) + expected_status) exit_code, output, errput = svntest.main.run_svnlook("dirs-changed", repo_dir) @@ -263,16 +259,16 @@ def test_print_property_diffs(sbox): # Add a bogus property to iota iota_path = os.path.join(wc_dir, 'iota') - svntest.actions.run_and_verify_svn(None, None, [], 'propset', + svntest.actions.run_and_verify_svn(None, [], 'propset', 'bogus_prop', 'bogus_val', iota_path) # commit the change - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', 'log msg', iota_path) # Grab the diff exit_code, expected_output, err = svntest.actions.run_and_verify_svn( - None, None, [], 'diff', '-r', 'PREV', iota_path) + None, [], 'diff', '-r', 'PREV', iota_path) exit_code, output, errput = svntest.main.run_svnlook("diff", repo_dir) if errput: @@ -370,7 +366,7 @@ def changed_copy_info(sbox): E_path = os.path.join(wc_dir, 'A', 'B', 'E') alpha_path = os.path.join(wc_dir, 'A', 'B', 'E', 'alpha') alpha2_path = os.path.join(wc_dir, 'A', 'alpha2') - svntest.actions.run_and_verify_svn(None, None, [], 'cp', alpha_path, + svntest.actions.run_and_verify_svn(None, [], 'cp', alpha_path, alpha2_path) # commit @@ -383,9 +379,7 @@ def changed_copy_info(sbox): }) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, - wc_dir) + expected_status) exit_code, output, errput = svntest.main.run_svnlook("changed", repo_dir) if errput: @@ -446,10 +440,10 @@ def limit_history(sbox): "history --limit" sbox.build(create_wc=False) repo_url = sbox.repo_url - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'mv', '-m', 'log msg', repo_url + "/iota", repo_url + "/iota2") - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'mv', '-m', 'log msg', repo_url + "/A/mu", repo_url + "/iota") history = run_svnlook("history", "--limit=1", sbox.repo_dir) @@ -481,9 +475,7 @@ def diff_ignore_whitespace(sbox): svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, - wc_dir) + expected_status) # Check the output of 'svnlook diff -x --ignore-space-change' on mu. # It should not print anything. @@ -536,13 +528,11 @@ def diff_ignore_eolstyle(sbox): svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, - wc_dir) + expected_status) # Grab the diff exit_code, expected_output, err = svntest.actions.run_and_verify_svn( - None, None, [], + None, [], 'diff', '-r', 'PREV', '-x', '--ignore-eol-style', mu_path) @@ -676,9 +666,9 @@ fp.close()""" svntest.main.file_append(rho_path, 'new appended text for rho') # commit, and check the hook's logfile - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', 'log msg', wc_dir) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir) expected_data = [ 'U A/D/G/rho\n', 'U A/mu\n', 'A/\n', 'A/D/G/\n' ] @@ -694,9 +684,9 @@ fp.close()""" svntest.main.create_python_hook_script(pre_commit_hook, hook_instance) - svntest.actions.run_and_verify_svn(None, None, [], 'propset', + svntest.actions.run_and_verify_svn(None, [], 'propset', 'bogus_prop', 'bogus_val\n', A_path) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', 'log msg', wc_dir, '--with-revprop', 'bogus_rev_prop=bogus_rev_val\n') # Now check the logfile @@ -705,16 +695,15 @@ fp.close()""" "Properties on '/A':\n", ' bogus_prop\n', ' svn:log\n', ' svn:author\n', - ' svn:check-locks\n', # internal prop, not really expected ' bogus_rev_prop\n', ' svn:date\n', ' svn:txn-client-compat-version\n', + ' svn:txn-user-agent\n', ] - # ra_dav and ra_svn add the user-agent ephemeral property - if svntest.main.is_ra_type_dav() or svntest.main.is_ra_type_svn(): - expected_data.append(' svn:txn-user-agent\n') verify_logfile(logfilepath, svntest.verify.UnorderedOutput(expected_data)) +# From r1293375 until fixed in r1303856, 'svnlook changed' and 'svnlook diff' +# produced no output on a property delete. def property_delete(sbox): "property delete" @@ -726,9 +715,7 @@ def property_delete(sbox): sbox.simple_propdel('foo', 'A/mu') sbox.simple_commit() - # XFail since r1293375, changed and diff produce no output on a - # property delete - svntest.actions.run_and_verify_svnlook(None, ["_U A/mu\n"], [], + svntest.actions.run_and_verify_svnlook(["_U A/mu\n"], [], 'changed', repo_dir) diff --git a/subversion/tests/cmdline/svnmucc_tests.py b/subversion/tests/cmdline/svnmucc_tests.py index 604ee67..b910017 100755 --- a/subversion/tests/cmdline/svnmucc_tests.py +++ b/subversion/tests/cmdline/svnmucc_tests.py @@ -43,7 +43,7 @@ def reject_bogus_mergeinfo(sbox): # At present this tests the server, but if we ever make svnmucc # validate the mergeinfo up front then it will only test the client - svntest.actions.run_and_verify_svnmucc(None, [], expected_error, + svntest.actions.run_and_verify_svnmucc([], expected_error, 'propset', 'svn:mergeinfo', '/B:0', '-m', 'log msg', sbox.repo_url + '/A') @@ -106,7 +106,9 @@ def basic_svnmucc(sbox): sbox.build() empty_file = sbox.ospath('empty') + file = sbox.ospath('file') svntest.main.file_append(empty_file, '') + svntest.main.file_append(file, 'file') # revision 2 test_svnmucc(sbox.repo_url, @@ -301,6 +303,14 @@ def basic_svnmucc(sbox): 'propsetf', 'testprop', empty_file, 'foo/z.c', 'propsetf', 'testprop', empty_file, 'foo/foo') + # revision 21 + test_svnmucc(sbox.repo_url, + ['M /foo/z.c', + ], #--------- + '-m', 'log msg', + 'propset', 'testprop', 'false', 'foo/z.c', + 'put', file, 'foo/z.c') + # Expected missing revision error xtest_svnmucc(sbox.repo_url, ["svnmucc: E200004: 'a' is not a revision" @@ -310,22 +320,21 @@ def basic_svnmucc(sbox): # Expected cannot be younger error xtest_svnmucc(sbox.repo_url, - ['svnmucc: E205000: Copy source revision cannot be younger ' + - 'than base revision', + ['svnmucc: E160006: No such revision 42', ], #--------- '-m', 'log msg', 'cp', '42', 'a', 'b') # Expected already exists error xtest_svnmucc(sbox.repo_url, - ["svnmucc: E125002: 'foo' already exists", + ["svnmucc: E160020: Path 'foo' already exists", ], #--------- '-m', 'log msg', 'cp', '17', 'a', 'foo') # Expected copy_src already exists error xtest_svnmucc(sbox.repo_url, - ["svnmucc: E125002: 'a/bar' (from 'foo/bar:17') already exists", + ["svnmucc: E160020: Path 'a/bar' already exists", ], #--------- '-m', 'log msg', 'cp', '17', 'foo', 'a', @@ -333,7 +342,7 @@ def basic_svnmucc(sbox): # Expected not found error xtest_svnmucc(sbox.repo_url, - ["svnmucc: E125002: 'a' not found", + ["svnmucc: E160013: Path 'a' not found in revision 17", ], #--------- '-m', 'log msg', 'cp', '17', 'a', 'b') @@ -341,21 +350,22 @@ def basic_svnmucc(sbox): def propset_root_internal(sbox, target): ## propset on ^/ - svntest.actions.run_and_verify_svnmucc(None, None, [], + svntest.actions.run_and_verify_svnmucc(None, [], '-m', 'log msg', 'propset', 'foo', 'bar', target) - svntest.actions.run_and_verify_svn(None, 'bar', [], - 'propget', '--strict', 'foo', + svntest.actions.run_and_verify_svn('bar', [], + 'propget', '--no-newline', 'foo', target) ## propdel on ^/ - svntest.actions.run_and_verify_svnmucc(None, None, [], + svntest.actions.run_and_verify_svnmucc(None, [], '-m', 'log msg', 'propdel', 'foo', target) - svntest.actions.run_and_verify_svn(None, [], [], - 'propget', '--strict', 'foo', + svntest.actions.run_and_verify_svn([], + '.*W200017: Property.*not found', + 'propget', '--no-newline', 'foo', target) @Issues(3663) @@ -411,6 +421,174 @@ def no_log_msg_non_interactive(sbox): 'mkdir', 'A/subdir') +def nested_replaces(sbox): + "nested replaces" + + sbox.build(create_wc=False) + repo_url = sbox.repo_url + svntest.actions.run_and_verify_svnmucc(None, [], + '-U', repo_url, '-m', 'r2: create tree', + 'rm', 'A', + 'rm', 'iota', + 'mkdir', 'A', 'mkdir', 'A/B', 'mkdir', 'A/B/C', + 'mkdir', 'M', 'mkdir', 'M/N', 'mkdir', 'M/N/O', + 'mkdir', 'X', 'mkdir', 'X/Y', 'mkdir', 'X/Y/Z') + svntest.actions.run_and_verify_svnmucc(None, [], + '-U', repo_url, '-m', 'r3: nested replaces', + *(""" +rm A rm M rm X +cp HEAD X/Y/Z A cp HEAD A/B/C M cp HEAD M/N/O X +cp HEAD A/B A/B cp HEAD M/N M/N cp HEAD X/Y X/Y +rm A/B/C rm M/N/O rm X/Y/Z +cp HEAD X A/B/C cp HEAD A M/N/O cp HEAD M X/Y/Z +rm A/B/C/Y + """.split())) + + # ### TODO: need a smarter run_and_verify_log() that verifies copyfrom + expected_output = svntest.verify.UnorderedRegexListOutput(map(re.escape, [ + ' R /A (from /X/Y/Z:2)', + ' A /A/B (from /A/B:2)', + ' R /A/B/C (from /X:2)', + ' R /M (from /A/B/C:2)', + ' A /M/N (from /M/N:2)', + ' R /M/N/O (from /A:2)', + ' R /X (from /M/N/O:2)', + ' A /X/Y (from /X/Y:2)', + ' R /X/Y/Z (from /M:2)', + ' D /A/B/C/Y', + ]) + [ + '^-', '^r3', '^-', '^Changed paths:', + ]) + svntest.actions.run_and_verify_svn(expected_output, [], + 'log', '-qvr3', repo_url) + + +def prohibited_deletes_and_moves(sbox): + "test prohibited delete and move operations" + + # These action sequences were allowed in 1.8.13, but are prohibited in 1.9.x + # and later. Most of them probably indicate an inadvertent user mistake. + # See dev@, 2015-05-11, "Re: Issue 4579 / svnmucc fails to process certain + # deletes", <http://svn.haxx.se/dev/archive-2015-05/0038.shtml> + + sbox.build(read_only = True) + svntest.main.file_write(sbox.ospath('file'), "New contents") + + xtest_svnmucc(sbox.repo_url, + ["svnmucc: E200009: Can't delete node at 'iota'", + ], #--------- + '-m', 'r2: modify and delete /iota', + 'put', sbox.ospath('file'), 'iota', + 'rm', 'iota') + + xtest_svnmucc(sbox.repo_url, + ["svnmucc: E200009: Can't delete node at 'iota'", + ], #--------- + '-m', 'r2: propset and delete /iota', + 'propset', 'prop', 'val', 'iota', + 'rm', 'iota') + + xtest_svnmucc(sbox.repo_url, + ["svnmucc: E160013: Can't delete node at 'iota' as it does " + "not exist", + ], #--------- + '-m', 'r2: delete and delete /iota', + 'rm', 'iota', + 'rm', 'iota') + + # Subversion 1.8.13 used to move /iota without applying the text change. + xtest_svnmucc(sbox.repo_url, + ["svnmucc: E200009: Can't delete node at 'iota'", + ], #--------- + '-m', 'r2: modify and move /iota', + 'put', sbox.ospath('file'), 'iota', + 'mv', 'iota', 'iota2') + + # Subversion 1.8.13 used to move /A without applying the inner remove. + xtest_svnmucc(sbox.repo_url, + ["svnmucc: E200009: Can't delete node at 'A'", + ], #--------- + '-m', 'r2: delete /A/B and move /A', + 'rm', 'A/B', + 'mv', 'A', 'A1') + +def svnmucc_type_errors(sbox): + "test type errors" + + sbox.build(read_only=True) + + sbox.simple_append('file', 'New contents') + + xtest_svnmucc(sbox.repo_url, + ["svnmucc: E160016: Can't operate on 'B' " + "because 'A' is not a directory"], + '-m', '', + 'put', sbox.ospath('file'), 'A', + 'mkdir', 'A/B', + 'propset', 'iota', 'iota', 'iota') + + xtest_svnmucc(sbox.repo_url, + ["svnmucc: E200009: Can't delete node at 'A'"], + '-m', '', + 'mkdir', 'A/Z', + 'put', sbox.ospath('file'), 'A') + + xtest_svnmucc(sbox.repo_url, + ["svnmucc: E160020: Path 'Z' already exists"], + '-m', '', + 'mkdir', 'A/Z', + 'put', sbox.ospath('file'), 'A/Z') + +def svnmucc_propset_and_put(sbox): + "propset and put" + + sbox.build() + + sbox.simple_append('file', 'New contents') + + # First in the sane order: put, then propset + xtest_svnmucc(sbox.repo_url, + [], + '-m', '', + 'put', sbox.ospath('file'), 't1', + 'propset', 't1', 't1', 't1') + + # And now in an impossible order: propset, then put + xtest_svnmucc(sbox.repo_url, + ["svnmucc: E200009: Can't set properties at not existing 't2'"], + '-m', '', + 'propset', 't2', 't2', 't2', + 'put', sbox.ospath('file'), 't2') + + # And if the target already exists (dir) + xtest_svnmucc(sbox.repo_url, + ["svnmucc: E200009: Can't delete node at 'A'"], + '-m', '', + 'propset', 'A', 'A', 'A', + 'put', sbox.ospath('file'), 'A') + + # And if the target already exists (file) # fixed in r1702467 + xtest_svnmucc(sbox.repo_url, + [], + '-m', '', + 'propset', 'iota', 'iota', 'iota', + 'put', sbox.ospath('file'), 'iota') + + # Put same file twice (non existing) + xtest_svnmucc(sbox.repo_url, + ["svnmucc: E160020: Path 't3' already exists"], + '-m', '', + 'put', sbox.ospath('file'), 't3', + 'put', sbox.ospath('file'), 't3') + + # Put same file twice (existing) + xtest_svnmucc(sbox.repo_url, + ["svnmucc: E200009: Can't update file at 't1'"], + '-m', '', + 'put', sbox.ospath('file'), 't1', + 'put', sbox.ospath('file'), 't1') + + ###################################################################### test_list = [ None, @@ -419,6 +597,10 @@ test_list = [ None, propset_root, too_many_log_messages, no_log_msg_non_interactive, + nested_replaces, + prohibited_deletes_and_moves, + svnmucc_type_errors, + svnmucc_propset_and_put, ] if __name__ == '__main__': diff --git a/subversion/tests/cmdline/svnrdump_tests.py b/subversion/tests/cmdline/svnrdump_tests.py index 334bd15..1b564d7 100755 --- a/subversion/tests/cmdline/svnrdump_tests.py +++ b/subversion/tests/cmdline/svnrdump_tests.py @@ -34,7 +34,6 @@ from svntest.verify import SVNUnexpectedStdout, SVNUnexpectedStderr from svntest.verify import SVNExpectedStderr from svntest.main import write_restrictive_svnserve_conf from svntest.main import server_has_partial_replay -from svnadmin_tests import test_create # (abbreviation) Skip = svntest.testcase.Skip_deco @@ -61,36 +60,27 @@ mismatched_headers_re = re.compile( ###################################################################### # Helper routines -def build_repos(sbox): - """Build an empty sandbox repository""" - - # Cleanup after the last run by removing any left-over repository. - svntest.main.safe_rmtree(sbox.repo_dir) - - # Create an empty repository. - svntest.main.create_repos(sbox.repo_dir) - -def compare_repos_dumps(svnrdump_sbox, svnadmin_dumpfile): - """Compare two dumpfiles, one created from SVNRDUMP_SBOX, and other given - by SVNADMIN_DUMPFILE. The dumpfiles do not need to match linewise, as the - SVNADMIN_DUMPFILE contents will first be loaded into a repository and then +def compare_repos_dumps(sbox, other_dumpfile, + bypass_prop_validation=False): + """Compare two dumpfiles, one created from SBOX, and other given + by OTHER_DUMPFILE. The dumpfiles do not need to match linewise, as the + OTHER_DUMPFILE contents will first be loaded into a repository and then re-dumped to do the match, which should generate the same dumpfile as - dumping SVNRDUMP_SBOX.""" - - svnrdump_contents = svntest.actions.run_and_verify_dump( - svnrdump_sbox.repo_dir) + dumping SBOX.""" - svnadmin_sbox = svnrdump_sbox.clone_dependent() - svntest.main.safe_rmtree(svnadmin_sbox.repo_dir) - svntest.main.create_repos(svnadmin_sbox.repo_dir) - svntest.actions.run_and_verify_load(svnadmin_sbox.repo_dir, svnadmin_dumpfile) + sbox_dumpfile = svntest.actions.run_and_verify_dump(sbox.repo_dir) - svnadmin_contents = svntest.actions.run_and_verify_dump( - svnadmin_sbox.repo_dir) + # Load and dump the other dumpfile (using svnadmin) + other_sbox = sbox.clone_dependent() + other_sbox.build(create_wc=False, empty=True) + svntest.actions.run_and_verify_load(other_sbox.repo_dir, other_dumpfile, + bypass_prop_validation) + other_dumpfile = svntest.actions.run_and_verify_dump(other_sbox.repo_dir) + ### This call kind-of assumes EXPECTED is first and ACTUAL is second. svntest.verify.compare_dump_files( - "Dump files", "DUMP", svnadmin_contents, svnrdump_contents) + "Dump files", "DUMP", other_dumpfile, sbox_dumpfile) def run_dump_test(sbox, dumpfile_name, expected_dumpfile_name = None, subdir = None, bypass_prop_validation = False, @@ -103,7 +93,7 @@ def run_dump_test(sbox, dumpfile_name, expected_dumpfile_name = None, array of optional additional options to pass to 'svnrdump dump'.""" # Create an empty sandbox repository - build_repos(sbox) + sbox.build(create_wc=False, empty=True) # This directory contains all the dump files svnrdump_tests_dir = os.path.join(os.path.dirname(sys.argv[0]), @@ -111,11 +101,10 @@ def run_dump_test(sbox, dumpfile_name, expected_dumpfile_name = None, # Load the specified dump file into the sbox repository using # svnadmin load - svnadmin_dumpfile = open(os.path.join(svnrdump_tests_dir, + original_dumpfile = open(os.path.join(svnrdump_tests_dir, dumpfile_name), 'rb').readlines() - - svntest.actions.run_and_verify_load(sbox.repo_dir, svnadmin_dumpfile, + svntest.actions.run_and_verify_load(sbox.repo_dir, original_dumpfile, bypass_prop_validation) repo_url = sbox.repo_url @@ -129,28 +118,29 @@ def run_dump_test(sbox, dumpfile_name, expected_dumpfile_name = None, [], 0, *opts) if expected_dumpfile_name: - svnadmin_dumpfile = open(os.path.join(svnrdump_tests_dir, + expected_dumpfile = open(os.path.join(svnrdump_tests_dir, expected_dumpfile_name), 'rb').readlines() # Compare the output from stdout if ignore_base_checksums: - svnadmin_dumpfile = [l for l in svnadmin_dumpfile + expected_dumpfile = [l for l in expected_dumpfile if not l.startswith('Text-delta-base-md5')] svnrdump_dumpfile = [l for l in svnrdump_dumpfile if not l.startswith('Text-delta-base-md5')] - svnadmin_dumpfile = [l for l in svnadmin_dumpfile + expected_dumpfile = [l for l in expected_dumpfile if not mismatched_headers_re.match(l)] svnrdump_dumpfile = [l for l in svnrdump_dumpfile if not mismatched_headers_re.match(l)] - svnadmin_dumpfile = svntest.verify.UnorderedOutput(svnadmin_dumpfile) + expected_dumpfile = svntest.verify.UnorderedOutput(expected_dumpfile) svntest.verify.compare_and_display_lines( - "Dump files", "DUMP", svnadmin_dumpfile, svnrdump_dumpfile, + "Dump files", "DUMP", expected_dumpfile, svnrdump_dumpfile, None) else: - compare_repos_dumps(sbox, svnadmin_dumpfile) + # The expected dumpfile is the result of dumping SBOX. + compare_repos_dumps(sbox, svnrdump_dumpfile, bypass_prop_validation) def run_load_test(sbox, dumpfile_name, expected_dumpfile_name = None, expect_deltas = True): @@ -158,7 +148,7 @@ def run_load_test(sbox, dumpfile_name, expected_dumpfile_name = None, dump' and check that the same dumpfile is produced""" # Create an empty sandbox repository - build_repos(sbox) + sbox.build(create_wc=False, empty=True) # Create the revprop-change hook for this test svntest.actions.enable_revprop_changes(sbox.repo_dir) @@ -169,36 +159,37 @@ def run_load_test(sbox, dumpfile_name, expected_dumpfile_name = None, # Load the specified dump file into the sbox repository using # svnrdump load - svnrdump_dumpfile = open(os.path.join(svnrdump_tests_dir, + original_dumpfile = open(os.path.join(svnrdump_tests_dir, dumpfile_name), 'rb').readlines() # Set the UUID of the sbox repository to the UUID specified in the # dumpfile ### RA layer doesn't have a set_uuid functionality - uuid = svnrdump_dumpfile[2].split(' ')[1][:-1] - svntest.actions.run_and_verify_svnadmin2("Setting UUID", None, None, 0, + uuid = original_dumpfile[2].split(' ')[1][:-1] + svntest.actions.run_and_verify_svnadmin2(None, None, 0, 'setuuid', sbox.repo_dir, uuid) - svntest.actions.run_and_verify_svnrdump(svnrdump_dumpfile, + svntest.actions.run_and_verify_svnrdump(original_dumpfile, svntest.verify.AnyOutput, [], 0, 'load', sbox.repo_url) - # Create a dump file using svnadmin dump - svnadmin_dumpfile = svntest.actions.run_and_verify_dump(sbox.repo_dir, + # Re-dump the rdump-loaded repo using svnadmin dump + resulted_dumpfile = svntest.actions.run_and_verify_dump(sbox.repo_dir, expect_deltas) if expected_dumpfile_name: - svnrdump_dumpfile = open(os.path.join(svnrdump_tests_dir, + expected_dumpfile = open(os.path.join(svnrdump_tests_dir, expected_dumpfile_name), 'rb').readlines() # Compare the output from stdout svntest.verify.compare_and_display_lines( - "Dump files", "DUMP", svnrdump_dumpfile, svnadmin_dumpfile) + "Dump files", "DUMP", expected_dumpfile, resulted_dumpfile) else: - compare_repos_dumps(sbox, svnrdump_dumpfile) + expected_dumpfile = original_dumpfile + compare_repos_dumps(sbox, expected_dumpfile) ###################################################################### # Tests @@ -410,7 +401,7 @@ def reflect_dropped_renumbered_revs(sbox): "svnrdump renumbers dropped revs in mergeinfo" # Create an empty sandbox repository - build_repos(sbox) + sbox.build(create_wc=False, empty=True) # Create the revprop-change hook for this test svntest.actions.enable_revprop_changes(sbox.repo_dir) @@ -431,7 +422,8 @@ def reflect_dropped_renumbered_revs(sbox): # Create the 'toplevel' directory in repository and then load the same # dumpfile into that subtree. - svntest.actions.run_and_verify_svn(None, ['\n', 'Committed revision 10.\n'], + svntest.actions.run_and_verify_svn(['Committing transaction...\n', + 'Committed revision 10.\n'], [], "mkdir", sbox.repo_url + "/toplevel", "-m", "Create toplevel dir to load into") svntest.actions.run_and_verify_svnrdump(svnrdump_dumpfile, @@ -444,7 +436,7 @@ def reflect_dropped_renumbered_revs(sbox): url + "/trunk - /branch1:4-8\n", url + "/toplevel/trunk - /toplevel/branch1:14-18\n", ]) - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'propget', 'svn:mergeinfo', '-R', sbox.repo_url) @@ -473,7 +465,7 @@ def dont_drop_valid_mergeinfo_during_incremental_svnrdump_loads(sbox): "don't drop mergeinfo revs in incremental svnrdump" # Create an empty repos. - test_create(sbox) + sbox.build(empty=True) # Create the revprop-change hook for this test svntest.actions.enable_revprop_changes(sbox.repo_dir) @@ -488,7 +480,7 @@ def dont_drop_valid_mergeinfo_during_incremental_svnrdump_loads(sbox): # | | | | # trunk---r2---r3-----r5---r6-------r8---r9---------------> | | # r1 | | | | | | - # intial | | | |______ | | + # initial | | | |______ | | # import copy | copy | merge merge # | | | merge (r5) (r8) # | | | (r9) | | @@ -542,7 +534,7 @@ def dont_drop_valid_mergeinfo_during_incremental_svnrdump_loads(sbox): url + "B2 - /trunk:9\n", url + "B1/B/E - /branches/B2/B/E:11-12\n", "/trunk/B/E:5-6,8-9\n"]) - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'propget', 'svn:mergeinfo', '-R', sbox.repo_url) @@ -579,7 +571,8 @@ def dont_drop_valid_mergeinfo_during_incremental_svnrdump_loads(sbox): dump_fp.close() # Blow away the current repos and create an empty one in its place. - test_create(sbox) + svntest.main.safe_rmtree(sbox.repo_dir, True) # Fix race with bdb in svnserve + sbox.build(empty=True) # Create the revprop-change hook for this test svntest.actions.enable_revprop_changes(sbox.repo_dir) @@ -604,7 +597,7 @@ def dont_drop_valid_mergeinfo_during_incremental_svnrdump_loads(sbox): # Check the mergeinfo, we use the same expected output as before, # as it (duh!) should be exactly the same as when we loaded the # repos in one shot. - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'propget', 'svn:mergeinfo', '-R', sbox.repo_url) @@ -614,7 +607,8 @@ def dont_drop_valid_mergeinfo_during_incremental_svnrdump_loads(sbox): # PART 3: Load a full dump to an non-empty repository. # # Reset our sandbox. - test_create(sbox) + svntest.main.safe_rmtree(sbox.repo_dir, True) # Fix race with bdb in svnserve + sbox.build(empty=True) # Create the revprop-change hook for this test svntest.actions.enable_revprop_changes(sbox.repo_dir) @@ -668,14 +662,15 @@ def dont_drop_valid_mergeinfo_during_incremental_svnrdump_loads(sbox): url + "B2 - /Projects/Project-X/trunk:15\n", url + "B1/B/E - /Projects/Project-X/branches/B2/B/E:17-18\n", "/Projects/Project-X/trunk/B/E:11-12,14-15\n"]) - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'propget', 'svn:mergeinfo', '-R', sbox.repo_url) # PART 4: Load a a series of incremental dumps to an non-empty repository. # # Reset our sandbox. - test_create(sbox) + svntest.main.safe_rmtree(sbox.repo_dir, True) # Fix race with bdb in svnserve + sbox.build(empty=True) # Create the revprop-change hook for this test svntest.actions.enable_revprop_changes(sbox.repo_dir) @@ -710,7 +705,7 @@ def dont_drop_valid_mergeinfo_during_incremental_svnrdump_loads(sbox): # Check the resulting mergeinfo. We expect the exact same results # as Part 3. # See http://subversion.tigris.org/issues/show_bug.cgi?id=3020#desc16. - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'propget', 'svn:mergeinfo', '-R', sbox.repo_url) @@ -720,14 +715,15 @@ def svnrdump_load_partial_incremental_dump(sbox): "svnrdump load partial incremental dump" # Create an empty sandbox repository - build_repos(sbox) + sbox.build(create_wc=False, empty=True) # Create the revprop-change hook for this test svntest.actions.enable_revprop_changes(sbox.repo_dir) # Create the 'A' directory in repository and then load the partial # incremental dump into the root of the repository. - svntest.actions.run_and_verify_svn(None, ['\n', 'Committed revision 1.\n'], + svntest.actions.run_and_verify_svn(['Committing transaction...\n', + 'Committed revision 1.\n'], [], "mkdir", sbox.repo_url + "/A", "-m", "Create toplevel dir to load into") @@ -769,6 +765,53 @@ def only_trunk_A_range_dump(sbox): #---------------------------------------------------------------------- +@Issue(4490) +def load_prop_change_in_non_deltas_dump(sbox): + "load: prop change in non-deltas dump" + # 'svnrdump load' crashed when processing a node record with a non-delta + # properties block if the node previously had any svn:* properties. + + sbox.build() + sbox.simple_propset('svn:eol-style', 'native', 'iota', 'A/mu', 'A/B/lambda') + sbox.simple_commit() + + # Any prop change on a node that had an svn:* prop triggered the crash, + # so test an svn:* prop deletion and also some other prop changes. + sbox.simple_propdel('svn:eol-style', 'iota') + sbox.simple_propset('svn:eol-style', 'LF', 'A/mu') + sbox.simple_propset('p1', 'v1', 'A/B/lambda') + sbox.simple_commit() + + # Create a non-deltas dump. Use 'svnadmin', as svnrdump doesn't have that + # option. + dump = svntest.actions.run_and_verify_dump(sbox.repo_dir, deltas=False) + + # Try to load that dump. + sbox.build(create_wc=False, empty=True) + svntest.actions.enable_revprop_changes(sbox.repo_dir) + svntest.actions.run_and_verify_svnrdump(dump, + [], [], 0, + '-q', 'load', sbox.repo_url) + +#---------------------------------------------------------------------- + +@Issue(4476) +def dump_mergeinfo_contains_r0(sbox): + "dump: mergeinfo that contains r0" + ### We pass the original dump file name as 'expected_dumpfile_name' because + ### run_dump_test is currently broken when we don't. + run_dump_test(sbox, "mergeinfo-contains-r0.dump", + bypass_prop_validation=True) + +#---------------------------------------------------------------------- + +@XFail() +@Issue(4476) +def load_mergeinfo_contains_r0(sbox): + "load: mergeinfo that contains r0" + run_load_test(sbox, "mergeinfo-contains-r0.dump", + expected_dumpfile_name="mergeinfo-contains-r0.expected.dump") + #---------------------------------------------------------------------- # Regression test for issue 4551 "svnrdump load commits wrong properties, @@ -870,6 +913,30 @@ def load_non_deltas_replace_copy_with_props(sbox): actual = map(str.strip, out) svntest.verify.compare_and_display_lines(None, 'PROPS', expected, actual) +# Regression test for issue #4552 "svnrdump writes duplicate headers for a +# replace-with-copy". 'svnrdump dump' wrote the Node-path and Node-kind +# headers twice for the 'delete' record of a replace-with-copy. +@Issue(4552) +def dump_replace_with_copy(sbox): + "dump replace with copy" + sbox.build() + + # Copy file/dir, replacing something + sbox.simple_rm('A/D/gamma', 'A/C') + sbox.simple_copy('A/mu@1', 'A/D/gamma') + sbox.simple_copy('A/B@1', 'A/C') + sbox.simple_commit() + + # Dump with 'svnrdump' + dumpfile = svntest.actions.run_and_verify_svnrdump( + None, svntest.verify.AnyOutput, [], 0, + 'dump', '--quiet', '--incremental', '-r2', + sbox.repo_url) + + # Check the 'delete' record headers: expect this parse to fail if headers + # are duplicated + svntest.verify.DumpParser(dumpfile).parse() + # Regression test for issue 4551 "svnrdump load commits wrong properties, # or fails, on a non-deltas dumpfile". In this test, a node's props are # modified, and the failure mode is that RA-serf would end up deleting @@ -969,8 +1036,12 @@ test_list = [ None, range_dump, only_trunk_range_dump, only_trunk_A_range_dump, + load_prop_change_in_non_deltas_dump, + dump_mergeinfo_contains_r0, + load_mergeinfo_contains_r0, load_non_deltas_copy_with_props, load_non_deltas_replace_copy_with_props, + dump_replace_with_copy, load_non_deltas_with_props, ] diff --git a/subversion/tests/cmdline/svnrdump_tests_data/mergeinfo-contains-r0.dump b/subversion/tests/cmdline/svnrdump_tests_data/mergeinfo-contains-r0.dump Binary files differnew file mode 100644 index 0000000..2b7b50e --- /dev/null +++ b/subversion/tests/cmdline/svnrdump_tests_data/mergeinfo-contains-r0.dump diff --git a/subversion/tests/cmdline/svnrdump_tests_data/mergeinfo-contains-r0.expected.dump b/subversion/tests/cmdline/svnrdump_tests_data/mergeinfo-contains-r0.expected.dump Binary files differnew file mode 100644 index 0000000..6fdbf1c --- /dev/null +++ b/subversion/tests/cmdline/svnrdump_tests_data/mergeinfo-contains-r0.expected.dump diff --git a/subversion/tests/cmdline/svnserveautocheck.sh b/subversion/tests/cmdline/svnserveautocheck.sh index d537731..7c17784 100755 --- a/subversion/tests/cmdline/svnserveautocheck.sh +++ b/subversion/tests/cmdline/svnserveautocheck.sh @@ -98,6 +98,8 @@ else TIME_CMD="" fi +MAKE=${MAKE:-make} + SVNSERVE_PORT=$(random_port) while netstat -an | grep $SVNSERVE_PORT | grep 'LISTEN'; do SVNSERVE_PORT=$(random_port) @@ -119,7 +121,7 @@ fi BASE_URL=svn://127.0.0.1:$SVNSERVE_PORT if [ $# = 0 ]; then - $TIME_CMD make check "BASE_URL=$BASE_URL" + $TIME_CMD "$MAKE" check "BASE_URL=$BASE_URL" r=$? else cd "$ABS_BUILDDIR/subversion/tests/cmdline/" diff --git a/subversion/tests/cmdline/svnsync_authz_tests.py b/subversion/tests/cmdline/svnsync_authz_tests.py index 62e79c6..54dc099 100755 --- a/subversion/tests/cmdline/svnsync_authz_tests.py +++ b/subversion/tests/cmdline/svnsync_authz_tests.py @@ -30,17 +30,18 @@ import sys, os # Test suite-specific modules -import locale, re, urllib +import locale, re # Our testing module import svntest from svntest.verify import SVNUnexpectedStdout, SVNUnexpectedStderr from svntest.verify import SVNExpectedStderr from svntest.main import write_restrictive_svnserve_conf +from svntest.main import write_authz_file from svntest.main import server_has_partial_replay # Shared helpers -from svnsync_tests import build_repos, run_init, run_sync, run_test +from svnsync_tests import run_init, run_sync, run_test # (abbreviation) Skip = svntest.testcase.Skip_deco @@ -56,27 +57,25 @@ Item = svntest.wc.StateItem def basic_authz(sbox): "verify that unreadable content is not synced" - sbox.build("svnsync-basic-authz") + sbox.build(create_wc = False) write_restrictive_svnserve_conf(sbox.repo_dir) dest_sbox = sbox.clone_dependent() - build_repos(dest_sbox) + dest_sbox.build(create_wc=False, empty=True) svntest.actions.enable_revprop_changes(dest_sbox.repo_dir) run_init(dest_sbox.repo_url, sbox.repo_url) - args = tuple(s.authz_name() for s in [sbox, sbox, dest_sbox]) - svntest.main.file_write(sbox.authz_file, - "[%s:/]\n" - "* = r\n" - "\n" - "[%s:/A/B]\n" - "* = \n" - "\n" - "[%s:/]\n" - "* = rw\n" % args) + src_authz = sbox.authz_name() + dst_authz = dest_sbox.authz_name() + write_authz_file(sbox, None, + prefixed_rules = { + src_authz + ':/': '* = r', + src_authz + ':/A/B': '* =', + dst_authz + ':/': '* = rw', + }) run_sync(dest_sbox.repo_url) @@ -84,13 +83,11 @@ def basic_authz(sbox): iota_url = dest_sbox.repo_url + '/iota' # this file should have been blocked by authz - svntest.actions.run_and_verify_svn(None, - [], svntest.verify.AnyOutput, + svntest.actions.run_and_verify_svn([], svntest.verify.AnyOutput, 'cat', lambda_url) # this file should have been synced - svntest.actions.run_and_verify_svn(None, - svntest.verify.AnyOutput, [], + svntest.actions.run_and_verify_svn(svntest.verify.AnyOutput, [], 'cat', iota_url) @@ -99,7 +96,7 @@ def basic_authz(sbox): def copy_from_unreadable_dir(sbox): "verify that copies from unreadable dirs work" - sbox.build("svnsync-copy-from-unreadable-dir") + sbox.build() B_url = sbox.repo_url + '/A/B' P_url = sbox.repo_url + '/A/P' @@ -107,7 +104,6 @@ def copy_from_unreadable_dir(sbox): # Set a property on the directory we're going to copy, and a file in it, to # confirm that they're transmitted when we later sync the copied directory svntest.actions.run_and_verify_svn(None, - None, [], 'pset', 'foo', @@ -115,7 +111,6 @@ def copy_from_unreadable_dir(sbox): sbox.wc_dir + '/A/B/lambda') svntest.actions.run_and_verify_svn(None, - None, [], 'pset', 'baz', @@ -123,7 +118,6 @@ def copy_from_unreadable_dir(sbox): sbox.wc_dir + '/A/B') svntest.actions.run_and_verify_svn(None, - None, [], 'ci', sbox.wc_dir + '/A/B', @@ -131,7 +125,6 @@ def copy_from_unreadable_dir(sbox): # Now copy that directory so we'll see it in our synced copy svntest.actions.run_and_verify_svn(None, - None, [], 'cp', B_url, @@ -141,21 +134,18 @@ def copy_from_unreadable_dir(sbox): write_restrictive_svnserve_conf(sbox.repo_dir) dest_sbox = sbox.clone_dependent() - build_repos(dest_sbox) + dest_sbox.build(create_wc=False, empty=True) svntest.actions.enable_revprop_changes(dest_sbox.repo_dir) - args = tuple(s.authz_name() for s in [sbox, sbox, dest_sbox]) - open(sbox.authz_file, 'w').write( - "[%s:/]\n" - "* = r\n" - "\n" - "[%s:/A/B]\n" - "* = \n" - "\n" - "[%s:/]\n" - "* = rw" - % args) + src_authz = sbox.authz_name() + dst_authz = dest_sbox.authz_name() + write_authz_file(sbox, None, + prefixed_rules = { + src_authz + ':/': '* = r', + src_authz + ':/A/B': '* =', + dst_authz + ':/': '* = rw', + }) run_init(dest_sbox.repo_url, sbox.repo_url) @@ -187,15 +177,13 @@ def copy_from_unreadable_dir(sbox): expected_out, out[2:11]) - svntest.actions.run_and_verify_svn(None, - ['bar\n'], + svntest.actions.run_and_verify_svn(['bar\n'], [], 'pget', 'foo', dest_sbox.repo_url + '/A/P/lambda') - svntest.actions.run_and_verify_svn(None, - ['zot\n'], + svntest.actions.run_and_verify_svn(['zot\n'], [], 'pget', 'baz', @@ -207,11 +195,10 @@ def copy_from_unreadable_dir(sbox): def copy_with_mod_from_unreadable_dir(sbox): "verify copies with mods from unreadable dirs" - sbox.build("svnsync-copy-with-mod-from-unreadable-dir") + sbox.build() # Make a copy of the B directory. svntest.actions.run_and_verify_svn(None, - None, [], 'cp', sbox.wc_dir + '/A/B', @@ -219,7 +206,6 @@ def copy_with_mod_from_unreadable_dir(sbox): # Set a property inside the copied directory. svntest.actions.run_and_verify_svn(None, - None, [], 'pset', 'foo', @@ -228,7 +214,6 @@ def copy_with_mod_from_unreadable_dir(sbox): # Add a new directory and file inside the copied directory. svntest.actions.run_and_verify_svn(None, - None, [], 'mkdir', sbox.wc_dir + '/A/P/NEW-DIR') @@ -238,14 +223,12 @@ def copy_with_mod_from_unreadable_dir(sbox): # Delete a file inside the copied directory. svntest.actions.run_and_verify_svn(None, - None, [], 'rm', sbox.wc_dir + '/A/P/E/beta') # Commit the copy-with-modification. svntest.actions.run_and_verify_svn(None, - None, [], 'ci', sbox.wc_dir, @@ -255,21 +238,18 @@ def copy_with_mod_from_unreadable_dir(sbox): write_restrictive_svnserve_conf(sbox.repo_dir) dest_sbox = sbox.clone_dependent() - build_repos(dest_sbox) + dest_sbox.build(create_wc=False, empty=True) svntest.actions.enable_revprop_changes(dest_sbox.repo_dir) - args = tuple(s.authz_name() for s in [sbox, sbox, dest_sbox]) - open(sbox.authz_file, 'w').write( - "[%s:/]\n" - "* = r\n" - "\n" - "[%s:/A/B]\n" - "* = \n" - "\n" - "[%s:/]\n" - "* = rw" - % args) + src_authz = sbox.authz_name() + dst_authz = dest_sbox.authz_name() + write_authz_file(sbox, None, + prefixed_rules = { + src_authz + ':/': '* = r', + src_authz + ':/A/B': '* =', + dst_authz + ':/': '* = rw', + }) run_init(dest_sbox.repo_url, sbox.repo_url) @@ -302,8 +282,7 @@ def copy_with_mod_from_unreadable_dir(sbox): expected_out, out[2:12]) - svntest.actions.run_and_verify_svn(None, - ['bar\n'], + svntest.actions.run_and_verify_svn(['bar\n'], [], 'pget', 'foo', @@ -315,11 +294,10 @@ def copy_with_mod_from_unreadable_dir(sbox): def copy_with_mod_from_unreadable_dir_and_copy(sbox): "verify copies with mods from unreadable dirs +copy" - sbox.build("svnsync-copy-with-mod-from-unreadable-dir-and-copy") + sbox.build() # Make a copy of the B directory. svntest.actions.run_and_verify_svn(None, - None, [], 'cp', sbox.wc_dir + '/A/B', @@ -328,7 +306,6 @@ def copy_with_mod_from_unreadable_dir_and_copy(sbox): # Copy a (readable) file into the copied directory. svntest.actions.run_and_verify_svn(None, - None, [], 'cp', sbox.wc_dir + '/A/D/gamma', @@ -337,7 +314,6 @@ def copy_with_mod_from_unreadable_dir_and_copy(sbox): # Commit the copy-with-modification. svntest.actions.run_and_verify_svn(None, - None, [], 'ci', sbox.wc_dir, @@ -347,21 +323,18 @@ def copy_with_mod_from_unreadable_dir_and_copy(sbox): write_restrictive_svnserve_conf(sbox.repo_dir) dest_sbox = sbox.clone_dependent() - build_repos(dest_sbox) + dest_sbox.build(create_wc=False, empty=True) svntest.actions.enable_revprop_changes(dest_sbox.repo_dir) - args = tuple(s.authz_name() for s in [sbox, sbox, dest_sbox]) - open(sbox.authz_file, 'w').write( - "[%s:/]\n" - "* = r\n" - "\n" - "[%s:/A/B]\n" - "* = \n" - "\n" - "[%s:/]\n" - "* = rw" - % args) + src_authz = sbox.authz_name() + dst_authz = dest_sbox.authz_name() + write_authz_file(sbox, None, + prefixed_rules = { + src_authz + ':/': '* = r', + src_authz + ':/A/B': '* =', + dst_authz + ':/': '* = rw', + }) run_init(dest_sbox.repo_url, sbox.repo_url) @@ -396,6 +369,9 @@ def copy_with_mod_from_unreadable_dir_and_copy(sbox): def identity_copy(sbox): "copy UTF-8 svn:* props identically" + + sbox.build(create_wc = False) + orig_lc_all = locale.setlocale(locale.LC_ALL) other_locales = [ "English.1252", "German.1252", "French.1252", "en_US.ISO-8859-1", "en_GB.ISO-8859-1", "de_DE.ISO-8859-1", @@ -420,10 +396,10 @@ def identity_copy(sbox): def specific_deny_authz(sbox): "verify if specifically denied paths dont sync" - sbox.build("specific-deny-authz") + sbox.build() dest_sbox = sbox.clone_dependent() - build_repos(dest_sbox) + dest_sbox.build(create_wc=False, empty=True) svntest.actions.enable_revprop_changes(dest_sbox.repo_dir) @@ -440,38 +416,31 @@ def specific_deny_authz(sbox): # For mod_dav_svn's parent path setup we need per-repos permissions in # the authz file... if sbox.repo_url.startswith('http'): - svntest.main.file_write(sbox.authz_file, - "[specific-deny-authz:/]\n" - "* = r\n" - "\n" - "[specific-deny-authz:/A]\n" - "* = \n" - "\n" - "[specific-deny-authz:/A_COPY/B/lambda]\n" - "* = \n" - "\n" - "[specific-deny-authz-1:/]\n" - "* = rw\n") + src_authz = sbox.authz_name() + dst_authz = dest_sbox.authz_name() + write_authz_file(sbox, None, + prefixed_rules = { + src_authz + ':/': '* = r', + src_authz + ':/A': '* =', + src_authz + ':/A_COPY/B/lambda': '* =', + dst_authz + ':/': '* = rw', + }) # Otherwise we can just go with the permissions needed for the source # repository. else: - svntest.main.file_write(sbox.authz_file, - "[/]\n" - "* = r\n" - "\n" - "[/A]\n" - "* = \n" - "\n" - "[/A_COPY/B/lambda]\n" - "* = \n") + write_authz_file(sbox, None, + prefixed_rules = { + '/': '* = r', + '/A': '* =', + '/A_COPY/B/lambda': '* =', + }) run_sync(dest_sbox.repo_url) lambda_url = dest_sbox.repo_url + '/A_COPY/B/lambda' # this file should have been blocked by authz - svntest.actions.run_and_verify_svn(None, - [], svntest.verify.AnyOutput, + svntest.actions.run_and_verify_svn([], svntest.verify.AnyOutput, 'cat', lambda_url) @@ -481,8 +450,8 @@ def copy_delete_unreadable_child(sbox): "copy, then rm at-src-unreadable child" # Prepare the source: Greek tree (r1), cp+rm (r2). - sbox.build("copy-delete-unreadable-child") - svntest.actions.run_and_verify_svnmucc(None, None, [], + sbox.build(create_wc = False) + svntest.actions.run_and_verify_svnmucc(None, [], '-m', 'r2', '-U', sbox.repo_url, 'cp', 'HEAD', '/', 'branch', @@ -490,27 +459,24 @@ def copy_delete_unreadable_child(sbox): # Create the destination. dest_sbox = sbox.clone_dependent() - build_repos(dest_sbox) + dest_sbox.build(create_wc=False, empty=True) svntest.actions.enable_revprop_changes(dest_sbox.repo_dir) # Lock down the source. - authz = sbox.authz_name() write_restrictive_svnserve_conf(sbox.repo_dir, anon_access='read') - svntest.main.file_write(sbox.authz_file, - "[%s:/]\n" - "* = r\n" - "[%s:/A]\n" - "* = \n" - % (authz, authz)) - - dest_url = svntest.main.file_scheme_prefix \ - + urllib.pathname2url(os.path.abspath(dest_sbox.repo_dir)) + src_authz = sbox.authz_name() + write_authz_file(sbox, None, + prefixed_rules = { + src_authz + ':/': '* = r', + src_authz + ':/A': '* =', + }) + + dest_url = dest_sbox.file_protocol_url() run_init(dest_url, sbox.repo_url) run_sync(dest_url) # sanity check - svntest.actions.run_and_verify_svn(None, - ["iota\n"], [], + svntest.actions.run_and_verify_svn(["iota\n"], [], 'ls', dest_url+'/branch@2') diff --git a/subversion/tests/cmdline/svnsync_tests.py b/subversion/tests/cmdline/svnsync_tests.py index 16594cb..c7821b2 100755 --- a/subversion/tests/cmdline/svnsync_tests.py +++ b/subversion/tests/cmdline/svnsync_tests.py @@ -28,12 +28,13 @@ import sys, os # Test suite-specific modules -import re, urllib +import re # Our testing module import svntest from svntest.verify import SVNUnexpectedStdout, SVNUnexpectedStderr from svntest.verify import SVNExpectedStderr +from svntest.verify import AnyOutput from svntest.main import server_has_partial_replay # (abbreviation) @@ -49,117 +50,57 @@ Item = svntest.wc.StateItem # Helper routines -def build_repos(sbox): - """Avoid the use sbox.build() because we're working with a repos - other than the Greek tree.""" - # Cleanup after the last run by removing any left-over repository. - svntest.main.safe_rmtree(sbox.repo_dir) - - # Create an empty repository. - svntest.main.create_repos(sbox.repo_dir) - - -def run_sync(url, source_url=None, expected_error=None, - source_prop_encoding=None): +def run_sync(url, source_url=None, + source_prop_encoding=None, + expected_output=AnyOutput, expected_error=[]): "Synchronize the mirror repository with the master" if source_url is not None: - args = ["synchronize", url, source_url, - "--username", svntest.main.wc_author, - "--password", svntest.main.wc_passwd] + args = ["synchronize", url, source_url] else: # Allow testing of old source-URL-less syntax - args = ["synchronize", url, - "--username", svntest.main.wc_author, - "--password", svntest.main.wc_passwd] + args = ["synchronize", url] if source_prop_encoding: args.append("--source-prop-encoding") args.append(source_prop_encoding) - exit_code, output, errput = svntest.main.run_svnsync(*args) - for index, line in enumerate(errput[:]): - if re.search("warning: W200007", line): - del errput[index] - if errput: - if expected_error is None: - raise SVNUnexpectedStderr(errput) - else: - expected_error = svntest.verify.RegexOutput(expected_error, - match_all=False) - svntest.verify.compare_and_display_lines(None, "STDERR", - expected_error, errput) - elif expected_error is not None: - raise SVNExpectedStderr - if not output and not expected_error: - # should be: ['Committed revision 1.\n', 'Committed revision 2.\n'] - raise SVNUnexpectedStdout("Missing stdout") - -def run_copy_revprops(url, source_url, expected_error=None, - source_prop_encoding=None): + # Normal expected output is of the form: + # ['Transmitting file data .......\n', # optional + # 'Committed revision 1.\n', + # 'Copied properties for revision 1.\n', ...] + svntest.actions.run_and_verify_svnsync(expected_output, expected_error, + *args) + +def run_copy_revprops(url, source_url, + source_prop_encoding=None, + expected_output=AnyOutput, expected_error=[]): "Copy revprops to the mirror repository from the master" - args = ["copy-revprops", url, source_url, - "--username", svntest.main.wc_author, - "--password", svntest.main.wc_passwd] + args = ["copy-revprops", url, source_url] if source_prop_encoding: args.append("--source-prop-encoding") args.append(source_prop_encoding) - exit_code, output, errput = svntest.main.run_svnsync(*args) - for index, line in enumerate(errput[:]): - if re.search("warning: W200007", line): - del errput[index] - if errput: - if expected_error is None: - raise SVNUnexpectedStderr(errput) - else: - expected_error = svntest.verify.RegexOutput(expected_error, - match_all=False) - svntest.verify.compare_and_display_lines(None, "STDERR", - expected_error, errput) - elif expected_error is not None: - raise SVNExpectedStderr - if not output and not expected_error: - # should be: ['Copied properties for revision 1.\n', - # 'Copied properties for revision 2.\n'] - raise SVNUnexpectedStdout("Missing stdout") + # Normal expected output is of the form: + # ['Copied properties for revision 1.\n', ...] + svntest.actions.run_and_verify_svnsync(expected_output, expected_error, + *args) def run_init(dst_url, src_url, source_prop_encoding=None): "Initialize the mirror repository from the master" - args = ["initialize", dst_url, src_url, - "--username", svntest.main.wc_author, - "--password", svntest.main.wc_passwd] + args = ["initialize", dst_url, src_url] if source_prop_encoding: args.append("--source-prop-encoding") args.append(source_prop_encoding) - exit_code, output, errput = svntest.main.run_svnsync(*args) - for index, line in enumerate(errput[:]): - if re.search("warning: W200007", line): - del errput[index] - if errput: - raise SVNUnexpectedStderr(errput) - if output != ['Copied properties for revision 0.\n']: - raise SVNUnexpectedStdout(output) + expected_output = ['Copied properties for revision 0.\n'] + svntest.actions.run_and_verify_svnsync(expected_output, [], *args) -def run_info(url, expected_error=None): +def run_info(url, expected_output=AnyOutput, expected_error=[]): "Print synchronization information of the repository" - exit_code, output, errput = svntest.main.run_svnsync( - "info", url, - "--username", svntest.main.wc_author, - "--password", svntest.main.wc_passwd) - if errput: - if expected_error is None: - raise SVNUnexpectedStderr(errput) - else: - expected_error = svntest.verify.RegexOutput(expected_error, - match_all=False) - svntest.verify.compare_and_display_lines(None, "STDERR", - expected_error, errput) - elif expected_error is not None: - raise SVNExpectedStderr - if not output and not expected_error: - # should be: ['From URL: http://....\n', - # 'From UUID: XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX\n', - # 'Last Merged Revision: XXX\n'] - raise SVNUnexpectedStdout("Missing stdout") + # Normal expected output is of the form: + # ['From URL: http://....\n', + # 'From UUID: XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX\n', + # 'Last Merged Revision: XXX\n'] + svntest.actions.run_and_verify_svnsync(expected_output, expected_error, + "info", url) def setup_and_sync(sbox, dump_file_contents, subdir=None, @@ -168,7 +109,7 @@ def setup_and_sync(sbox, dump_file_contents, subdir=None, """Create a repository for SBOX, load it with DUMP_FILE_CONTENTS, then create a mirror repository and sync it with SBOX. If is_src_ra_local or is_dest_ra_local is True, then run_init, run_sync, and run_copy_revprops will use the file:// scheme for the source and destination URLs. Return the mirror sandbox.""" # Create the empty master repository. - build_repos(sbox) + sbox.build(create_wc=False, empty=True) # Load the repository from DUMP_FILE_PATH. svntest.actions.run_and_verify_load(sbox.repo_dir, dump_file_contents, @@ -176,11 +117,11 @@ def setup_and_sync(sbox, dump_file_contents, subdir=None, # Create the empty destination repository. dest_sbox = sbox.clone_dependent() - build_repos(dest_sbox) + dest_sbox.build(create_wc=False, empty=True) # Setup the mirror repository. Feed it the UUID of the source repository. exit_code, output, errput = svntest.main.run_svnlook("uuid", sbox.repo_dir) - svntest.actions.run_and_verify_svnadmin2("Setting UUID", None, None, 0, + svntest.actions.run_and_verify_svnadmin2(None, None, 0, 'setuuid', dest_sbox.repo_dir, output[0][:-1]) @@ -190,16 +131,14 @@ def setup_and_sync(sbox, dump_file_contents, subdir=None, repo_url = sbox.repo_url cwd = os.getcwd() if is_src_ra_local: - repo_url = svntest.main.file_scheme_prefix + \ - urllib.pathname2url(os.path.join(cwd, sbox.repo_dir)) + repo_url = sbox.file_protocol_url() if subdir: repo_url = repo_url + subdir dest_repo_url = dest_sbox.repo_url if is_dest_ra_local: - dest_repo_url = svntest.main.file_scheme_prefix + \ - urllib.pathname2url(os.path.join(cwd, dest_sbox.repo_dir)) + dest_repo_url = dest_sbox.file_protocol_url() run_init(dest_repo_url, repo_url, source_prop_encoding) run_sync(dest_repo_url, repo_url, @@ -221,7 +160,7 @@ def verify_mirror(dest_sbox, exp_dump_file_contents): for prop_name in ("svn:sync-from-url", "svn:sync-from-uuid", "svn:sync-last-merged-rev"): svntest.actions.run_and_verify_svn( - None, None, [], "propdel", "--revprop", "-r", "0", + None, [], "propdel", "--revprop", "-r", "0", prop_name, dest_sbox.repo_url) # Create a dump file from the mirror repository. @@ -346,14 +285,13 @@ def detect_meddling(sbox): sbox.build("svnsync-meddling") dest_sbox = sbox.clone_dependent() - build_repos(dest_sbox) + dest_sbox.build(create_wc=False, empty=True) # Make our own destination checkout (have to do it ourself because # it is not greek). svntest.main.safe_rmtree(dest_sbox.wc_dir) svntest.actions.run_and_verify_svn(None, - None, [], 'co', dest_sbox.repo_url, @@ -365,7 +303,6 @@ def detect_meddling(sbox): run_sync(dest_sbox.repo_url) svntest.actions.run_and_verify_svn(None, - None, [], 'up', dest_sbox.wc_dir) @@ -374,14 +311,14 @@ def detect_meddling(sbox): svntest.main.file_append(os.path.join(dest_sbox.wc_dir, 'A', 'B', 'lambda'), 'new lambda text') svntest.actions.run_and_verify_svn(None, - None, [], 'ci', '-m', 'msg', dest_sbox.wc_dir) + expected_error = r".*Destination HEAD \(2\) is not the last merged revision \(1\).*" run_sync(dest_sbox.repo_url, None, - ".*Destination HEAD \\(2\\) is not the last merged revision \\(1\\).*") + expected_output=[], expected_error=expected_error) def url_encoding(sbox): "test url encoding issues" @@ -427,28 +364,18 @@ def info_synchronized(sbox): src_uuid = output[0].strip() dest_sbox = sbox.clone_dependent() - build_repos(dest_sbox) + dest_sbox.build(create_wc=False, empty=True) svntest.actions.enable_revprop_changes(dest_sbox.repo_dir) run_init(dest_sbox.repo_url, sbox.repo_url) run_sync(dest_sbox.repo_url) - exit_code, output, errput = svntest.main.run_svnsync( - "info", dest_sbox.repo_url, - "--username", svntest.main.wc_author, - "--password", svntest.main.wc_passwd) - if errput: - raise SVNUnexpectedStderr(errput) - expected_out = ['Source URL: %s\n' % sbox.repo_url, 'Source Repository UUID: %s\n' % src_uuid, 'Last Merged Revision: 1\n', ] - - svntest.verify.compare_and_display_lines(None, - 'INFO', - expected_out, - output) + svntest.actions.run_and_verify_svnsync(expected_out, [], + "info", dest_sbox.repo_url) def info_not_synchronized(sbox): "test info cmd on an un-synchronized repo" @@ -456,7 +383,7 @@ def info_not_synchronized(sbox): sbox.build("svnsync-info-not-syncd", False) run_info(sbox.repo_url, - ".*Repository '%s' is not initialized.*" % sbox.repo_url) + [], ".*Repository '%s' is not initialized.*" % sbox.repo_url) #---------------------------------------------------------------------- diff --git a/subversion/tests/cmdline/svntest/__init__.py b/subversion/tests/cmdline/svntest/__init__.py index 98a67b6..e090685 100644 --- a/subversion/tests/cmdline/svntest/__init__.py +++ b/subversion/tests/cmdline/svntest/__init__.py @@ -23,11 +23,11 @@ __all__ = [ ] import sys -if sys.hexversion < 0x2050000: - sys.stderr.write('[SKIPPED] at least Python 2.5 is required\n') +if sys.hexversion < 0x2070000: + sys.stderr.write('[SKIPPED] at least Python 2.7 is required\n') # note: exiting is a bit harsh for a library module, but we really do - # require Python 2.5. this package isn't going to work otherwise. + # require Python 2.7. this package isn't going to work otherwise. # we're skipping this test, not failing, so exit with 0 sys.exit(0) diff --git a/subversion/tests/cmdline/svntest/actions.py b/subversion/tests/cmdline/svntest/actions.py index feef017..a25928d 100644 --- a/subversion/tests/cmdline/svntest/actions.py +++ b/subversion/tests/cmdline/svntest/actions.py @@ -76,46 +76,49 @@ def setup_pristine_greek_repository(): # If there's no pristine repos, create one. if not os.path.exists(main.pristine_greek_repos_dir): - main.create_repos(main.pristine_greek_repos_dir) + if main.options.fsfs_version is not None: + main.unpack_greek_repos(main.pristine_greek_repos_dir) + else: + main.create_repos(main.pristine_greek_repos_dir) - # if this is dav, gives us access rights to import the greek tree. - if main.is_ra_type_dav(): - authz_file = os.path.join(main.work_dir, "authz") - main.file_write(authz_file, "[/]\n* = rw\n") + # if this is dav, gives us access rights to import the greek tree. + if main.is_ra_type_dav(): + authz_file = os.path.join(main.work_dir, "authz") + main.file_write(authz_file, "[/]\n* = rw\n") - # dump the greek tree to disk. - main.greek_state.write_to_disk(main.greek_dump_dir) + # dump the greek tree to disk. + main.greek_state.write_to_disk(main.greek_dump_dir) - # import the greek tree, using l:foo/p:bar - ### todo: svn should not be prompting for auth info when using - ### repositories with no auth/auth requirements - _, output, _ = main.run_svn(None, 'import', '-m', - 'Log message for revision 1.', - main.greek_dump_dir, - main.pristine_greek_repos_url) + # import the greek tree, using l:foo/p:bar + ### todo: svn should not be prompting for auth info when using + ### repositories with no auth/auth requirements + _, output, _ = main.run_svn(None, 'import', '-m', + 'Log message for revision 1.', + main.greek_dump_dir, + main.pristine_greek_repos_url) - # verify the printed output of 'svn import'. - lastline = output.pop().strip() - match = re.search("(Committed|Imported) revision [0-9]+.", lastline) - if not match: - logger.error("import did not succeed, while creating greek repos.") - logger.error("The final line from 'svn import' was:") - logger.error(lastline) - sys.exit(1) - output_tree = wc.State.from_commit(output) - - expected_output_tree = main.greek_state.copy(main.greek_dump_dir) - expected_output_tree.tweak(verb='Adding', - contents=None) + # verify the printed output of 'svn import'. + lastline = output.pop().strip() + match = re.search("(Committed|Imported) revision [0-9]+.", lastline) + if not match: + logger.error("import did not succeed, while creating greek repos.") + logger.error("The final line from 'svn import' was:") + logger.error(lastline) + sys.exit(1) + output_tree = wc.State.from_commit(output) + + expected_output_tree = main.greek_state.copy(main.greek_dump_dir) + expected_output_tree.tweak(verb='Adding', + contents=None) - try: - expected_output_tree.compare_and_display('output', output_tree) - except tree.SVNTreeUnequal: - verify.display_trees("ERROR: output of import command is unexpected.", - "OUTPUT TREE", - expected_output_tree.old_tree(), - output_tree.old_tree()) - sys.exit(1) + try: + expected_output_tree.compare_and_display('output', output_tree) + except tree.SVNTreeUnequal: + verify.display_trees("ERROR: output of import command is unexpected.", + "OUTPUT TREE", + expected_output_tree.old_tree(), + output_tree.old_tree()) + sys.exit(1) # Finally, disallow any changes to the "pristine" repos. error_msg = "Don't modify the pristine repository" @@ -126,7 +129,7 @@ def setup_pristine_greek_repository(): ###################################################################### -def guarantee_empty_repository(path): +def guarantee_empty_repository(path, minor_version): """Guarantee that a local svn repository exists at PATH, containing nothing.""" @@ -136,7 +139,7 @@ def guarantee_empty_repository(path): # create an empty repository at PATH. main.safe_rmtree(path) - main.create_repos(path) + main.create_repos(path, minor_version) # Used by every test, so that they can run independently of one # another. Every time this routine is called, it recursively copies @@ -153,16 +156,22 @@ def guarantee_greek_repository(path, minor_version): # copy the pristine repository to PATH. main.safe_rmtree(path) - if main.copy_repos(main.pristine_greek_repos_dir, path, 1, 1, minor_version): + if (main.options.fsfs_version is not None): + failed = main.unpack_greek_repos(path) + else: + failed = main.copy_repos(main.pristine_greek_repos_dir, + path, 1, 1, minor_version) + if failed: logger.error("copying repository failed.") sys.exit(1) # make the repos world-writeable, for mod_dav_svn's sake. main.chmod_tree(path, 0666, 0666) + # give the repository a unique UUID + run_and_verify_svnadmin([], [], 'setuuid', path) -def run_and_verify_atomic_ra_revprop_change(message, - expected_stdout, +def run_and_verify_atomic_ra_revprop_change(expected_stdout, expected_stderr, expected_exit, url, revision, propname, @@ -192,11 +201,11 @@ def run_and_verify_atomic_ra_revprop_change(message, want_error) verify.verify_outputs("Unexpected output", out, err, expected_stdout, expected_stderr) - verify.verify_exit_code(message, exit_code, expected_exit) + verify.verify_exit_code("Unexpected return code", exit_code, expected_exit) return exit_code, out, err -def run_and_verify_svnlook(message, expected_stdout, +def run_and_verify_svnlook(expected_stdout, expected_stderr, *varargs): """Like run_and_verify_svnlook2, but the expected exit code is assumed to be 0 if no output is expected on stderr, and 1 otherwise.""" @@ -204,21 +213,21 @@ def run_and_verify_svnlook(message, expected_stdout, expected_exit = 0 if expected_stderr is not None and expected_stderr != []: expected_exit = 1 - return run_and_verify_svnlook2(message, expected_stdout, expected_stderr, + return run_and_verify_svnlook2(expected_stdout, expected_stderr, expected_exit, *varargs) -def run_and_verify_svnlook2(message, expected_stdout, expected_stderr, +def run_and_verify_svnlook2(expected_stdout, expected_stderr, expected_exit, *varargs): """Run svnlook command and check its output and exit code.""" exit_code, out, err = main.run_svnlook(*varargs) verify.verify_outputs("Unexpected output", out, err, expected_stdout, expected_stderr) - verify.verify_exit_code(message, exit_code, expected_exit) + verify.verify_exit_code("Unexpected return code", exit_code, expected_exit) return exit_code, out, err -def run_and_verify_svnadmin(message, expected_stdout, +def run_and_verify_svnadmin(expected_stdout, expected_stderr, *varargs): """Like run_and_verify_svnadmin2, but the expected exit code is assumed to be 0 if no output is expected on stderr, and 1 otherwise.""" @@ -226,21 +235,43 @@ def run_and_verify_svnadmin(message, expected_stdout, expected_exit = 0 if expected_stderr is not None and expected_stderr != []: expected_exit = 1 - return run_and_verify_svnadmin2(message, expected_stdout, expected_stderr, + return run_and_verify_svnadmin2(expected_stdout, expected_stderr, expected_exit, *varargs) -def run_and_verify_svnadmin2(message, expected_stdout, expected_stderr, +def run_and_verify_svnadmin2(expected_stdout, expected_stderr, expected_exit, *varargs): """Run svnadmin command and check its output and exit code.""" exit_code, out, err = main.run_svnadmin(*varargs) verify.verify_outputs("Unexpected output", out, err, expected_stdout, expected_stderr) - verify.verify_exit_code(message, exit_code, expected_exit) + verify.verify_exit_code("Unexpected return code", exit_code, expected_exit) + return exit_code, out, err + + +def run_and_verify_svnfsfs(expected_stdout, + expected_stderr, *varargs): + """Like run_and_verify_svnfsfs2, but the expected exit code is + assumed to be 0 if no output is expected on stderr, and 1 otherwise.""" + + expected_exit = 0 + if expected_stderr is not None and expected_stderr != []: + expected_exit = 1 + return run_and_verify_svnfsfs2(expected_stdout, expected_stderr, + expected_exit, *varargs) + +def run_and_verify_svnfsfs2(expected_stdout, expected_stderr, + expected_exit, *varargs): + """Run svnfsfs command and check its output and exit code.""" + + exit_code, out, err = main.run_svnfsfs(*varargs) + verify.verify_outputs("Unexpected output", out, err, + expected_stdout, expected_stderr) + verify.verify_exit_code("Unexpected return code", exit_code, expected_exit) return exit_code, out, err -def run_and_verify_svnversion(message, wc_dir, trail_url, +def run_and_verify_svnversion(wc_dir, trail_url, expected_stdout, expected_stderr, *varargs): """like run_and_verify_svnversion2, but the expected exit code is assumed to be 0 if no output is expected on stderr, and 1 otherwise.""" @@ -248,11 +279,11 @@ def run_and_verify_svnversion(message, wc_dir, trail_url, expected_exit = 0 if expected_stderr is not None and expected_stderr != []: expected_exit = 1 - return run_and_verify_svnversion2(message, wc_dir, trail_url, + return run_and_verify_svnversion2(wc_dir, trail_url, expected_stdout, expected_stderr, expected_exit, *varargs) -def run_and_verify_svnversion2(message, wc_dir, trail_url, +def run_and_verify_svnversion2(wc_dir, trail_url, expected_stdout, expected_stderr, expected_exit, *varargs): """Run svnversion command and check its output and exit code.""" @@ -264,10 +295,10 @@ def run_and_verify_svnversion2(message, wc_dir, trail_url, verify.verify_outputs("Unexpected output", out, err, expected_stdout, expected_stderr) - verify.verify_exit_code(message, exit_code, expected_exit) + verify.verify_exit_code("Unexpected return code", exit_code, expected_exit) return exit_code, out, err -def run_and_verify_svn(message, expected_stdout, expected_stderr, *varargs): +def run_and_verify_svn(expected_stdout, expected_stderr, *varargs): """like run_and_verify_svn2, but the expected exit code is assumed to be 0 if no output is expected on stderr, and 1 otherwise.""" @@ -278,10 +309,10 @@ def run_and_verify_svn(message, expected_stdout, expected_stderr, *varargs): expected_exit = 1 elif expected_stderr != []: expected_exit = 1 - return run_and_verify_svn2(message, expected_stdout, expected_stderr, + return run_and_verify_svn2(expected_stdout, expected_stderr, expected_exit, *varargs) -def run_and_verify_svn2(message, expected_stdout, expected_stderr, +def run_and_verify_svn2(expected_stdout, expected_stderr, expected_exit, *varargs): """Invoke main.run_svn() with *VARARGS. Return exit code as int; stdout, stderr as lists of lines (including line terminators). For both @@ -296,7 +327,7 @@ def run_and_verify_svn2(message, expected_stdout, expected_stderr, - If it is already an instance of ExpectedOutput (e.g. UnorderedOutput), leave it alone. - ...and invoke compare_and_display_lines() on MESSAGE, a label based + ...and invoke compare_and_display_lines() on a label based on the name of the stream being compared (e.g. STDOUT), the ExpectedOutput instance, and the actual output. @@ -318,8 +349,9 @@ def run_and_verify_svn2(message, expected_stdout, expected_stderr, want_err = True exit_code, out, err = main.run_svn(want_err, *varargs) - verify.verify_outputs(message, out, err, expected_stdout, expected_stderr) - verify.verify_exit_code(message, exit_code, expected_exit) + verify.verify_outputs("Unexpected output", out, err, + expected_stdout, expected_stderr) + verify.verify_exit_code("Unexpected return code", exit_code, expected_exit) return exit_code, out, err def run_and_verify_load(repo_dir, dump_file_content, @@ -328,28 +360,22 @@ def run_and_verify_load(repo_dir, dump_file_content, if not isinstance(dump_file_content, list): raise TypeError("dump_file_content argument should have list type") expected_stderr = [] + args = () if bypass_prop_validation: - exit_code, output, errput = main.run_command_stdin( - main.svnadmin_binary, expected_stderr, 0, True, dump_file_content, - 'load', '--force-uuid', '--quiet', '--bypass-prop-validation', repo_dir) - else: - exit_code, output, errput = main.run_command_stdin( - main.svnadmin_binary, expected_stderr, 0, True, dump_file_content, - 'load', '--force-uuid', '--quiet', repo_dir) - - verify.verify_outputs("Unexpected stderr output", None, errput, - None, expected_stderr) + args += ('--bypass-prop-validation',) + main.run_command_stdin( + main.svnadmin_binary, expected_stderr, 0, True, dump_file_content, + 'load', '--force-uuid', '--quiet', repo_dir, *args) def run_and_verify_dump(repo_dir, deltas=False): "Runs 'svnadmin dump' and reports any errors, returning the dump content." + args = () if deltas: - exit_code, output, errput = main.run_svnadmin('dump', '--deltas', - repo_dir) - else: - exit_code, output, errput = main.run_svnadmin('dump', repo_dir) - verify.verify_outputs("Missing expected output(s)", output, errput, - verify.AnyOutput, verify.AnyOutput) + args += ('--deltas',) + exit_code, output, errput = run_and_verify_svnadmin( + verify.AnyOutput, [], + 'dump', '--quiet', repo_dir, *args) return output @@ -364,6 +390,8 @@ def run_and_verify_svnrdump(dumpfile_content, expected_stdout, if sys.platform == 'win32': err = map(lambda x : x.replace('\r\n', '\n'), err) + # Ignore "consider upgrade" warnings to allow regression tests to pass + # when run against a 1.6 mod_dav_svn. for index, line in enumerate(err[:]): if re.search("warning: W200007", line): del err[index] @@ -374,29 +402,57 @@ def run_and_verify_svnrdump(dumpfile_content, expected_stdout, return output -def run_and_verify_svnmucc(message, expected_stdout, expected_stderr, +def run_and_verify_svnmucc(expected_stdout, expected_stderr, *varargs): """Run svnmucc command and check its output""" expected_exit = 0 if expected_stderr is not None and expected_stderr != []: expected_exit = 1 - return run_and_verify_svnmucc2(message, expected_stdout, expected_stderr, + return run_and_verify_svnmucc2(expected_stdout, expected_stderr, expected_exit, *varargs) -def run_and_verify_svnmucc2(message, expected_stdout, expected_stderr, +def run_and_verify_svnmucc2(expected_stdout, expected_stderr, expected_exit, *varargs): """Run svnmucc command and check its output and exit code.""" exit_code, out, err = main.run_svnmucc(*varargs) verify.verify_outputs("Unexpected output", out, err, expected_stdout, expected_stderr) - verify.verify_exit_code(message, exit_code, expected_exit) + verify.verify_exit_code("Unexpected return code", exit_code, expected_exit) + return exit_code, out, err + + +def run_and_verify_svnsync(expected_stdout, expected_stderr, + *varargs): + """Run svnsync command and check its output""" + + expected_exit = 0 + if expected_stderr is not None and expected_stderr != []: + expected_exit = 1 + return run_and_verify_svnsync2(expected_stdout, expected_stderr, + expected_exit, *varargs) + +def run_and_verify_svnsync2(expected_stdout, expected_stderr, + expected_exit, *varargs): + """Run svnmucc command and check its output and exit code.""" + + exit_code, out, err = main.run_svnsync(*varargs) + + # Ignore "consider upgrade" warnings to allow regression tests to pass + # when run against a 1.6 mod_dav_svn. + for index, line in enumerate(err[:]): + if re.search("warning: W200007", line): + del err[index] + + verify.verify_outputs("Unexpected output", out, err, + expected_stdout, expected_stderr) + verify.verify_exit_code("Unexpected return code", exit_code, expected_exit) return exit_code, out, err def load_repo(sbox, dumpfile_path = None, dump_str = None, - bypass_prop_validation = False): + bypass_prop_validation = False,create_wc=True): "Loads the dumpfile into sbox" if not dump_str: dump_str = open(dumpfile_path, "rb").read() @@ -409,7 +465,8 @@ def load_repo(sbox, dumpfile_path = None, dump_str = None, # Load the mergetracking dumpfile into the repos, and check it out the repo run_and_verify_load(sbox.repo_dir, dump_str.splitlines(True), bypass_prop_validation) - run_and_verify_svn(None, None, [], "co", sbox.repo_url, sbox.wc_dir) + if create_wc: + run_and_verify_svn(None, [], "co", sbox.repo_url, sbox.wc_dir) return dump_str @@ -421,7 +478,7 @@ def expected_noop_update_output(rev): % (rev), "no-op update") -def run_and_verify_svnauthz(message, expected_stdout, expected_stderr, +def run_and_verify_svnauthz(expected_stdout, expected_stderr, expected_exit, compat_mode, *varargs): """Run svnauthz command and check its output and exit code. If COMPAT_MODE is True then run the command in pre-1.8 @@ -434,7 +491,7 @@ def run_and_verify_svnauthz(message, expected_stdout, expected_stderr, verify.verify_outputs("Unexpected output", out, err, expected_stdout, expected_stderr) - verify.verify_exit_code(message, exit_code, expected_exit) + verify.verify_exit_code("Unexpected return code", exit_code, expected_exit) return exit_code, out, err ###################################################################### @@ -446,13 +503,9 @@ def run_and_verify_svnauthz(message, expected_stdout, expected_stderr, # -def run_and_verify_checkout2(do_remove, - URL, wc_dir_name, output_tree, disk_tree, - singleton_handler_a = None, - a_baton = None, - singleton_handler_b = None, - b_baton = None, - *args): +def run_and_verify_checkout(URL, wc_dir_name, output_tree, disk_tree, + expected_stderr=[], + *args, **kw): """Checkout the URL into a new directory WC_DIR_NAME. *ARGS are any extra optional args to the checkout subcommand. @@ -469,17 +522,12 @@ def run_and_verify_checkout2(do_remove, if isinstance(output_tree, wc.State): output_tree = output_tree.old_tree() - # Remove dir if it's already there, unless this is a forced checkout. - # In that case assume we want to test a forced checkout's toleration - # of obstructing paths. - if do_remove: - main.safe_rmtree(wc_dir_name) - # Checkout and make a tree of the output, using l:foo/p:bar ### todo: svn should not be prompting for auth info when using ### repositories with no auth/auth requirements - exit_code, output, errput = main.run_svn(None, 'co', - URL, wc_dir_name, *args) + exit_code, output, errput = run_and_verify_svn(None, expected_stderr, + 'co', URL, wc_dir_name, + *args) actual = tree.build_tree_from_checkout(output) # Verify actual output against expected output. @@ -490,32 +538,7 @@ def run_and_verify_checkout2(do_remove, raise if disk_tree: - verify_disk(wc_dir_name, disk_tree, False, - singleton_handler_a, a_baton, - singleton_handler_b, b_baton) - -def run_and_verify_checkout(URL, wc_dir_name, output_tree, disk_tree, - singleton_handler_a = None, - a_baton = None, - singleton_handler_b = None, - b_baton = None, - *args): - """Same as run_and_verify_checkout2(), but without the DO_REMOVE arg. - WC_DIR_NAME is deleted if present unless the '--force' option is passed - in *ARGS.""" - - - # Remove dir if it's already there, unless this is a forced checkout. - # In that case assume we want to test a forced checkout's toleration - # of obstructing paths. - return run_and_verify_checkout2(('--force' not in args), - URL, wc_dir_name, output_tree, disk_tree, - singleton_handler_a, - a_baton, - singleton_handler_b, - b_baton, - *args) - + verify_disk(wc_dir_name, disk_tree, False, **kw) def run_and_verify_export(URL, export_dir_name, output_tree, disk_tree, *args): @@ -561,8 +584,13 @@ def run_and_verify_export(URL, export_dir_name, output_tree, disk_tree, # run_and_verify_log_xml class LogEntry: - def __init__(self, revision, changed_paths=None, revprops=None): + def __init__(self, revision, attributes=None, + changed_paths=None, revprops=None): self.revision = revision + if attributes == None: + self.attributes = {} + else: + self.attributes = attributes if changed_paths == None: self.changed_paths = {} else: @@ -572,6 +600,15 @@ class LogEntry: else: self.revprops = revprops + def assert_log_attrs(self, attributes): + """Assert that attributes is the same as this entry's attributes + Raises svntest.Failure if not. + """ + if self.attributes != attributes: + raise Failure('\n' + '\n'.join(difflib.ndiff( + pprint.pformat(attributes).splitlines(), + pprint.pformat(self.attributes).splitlines()))) + def assert_changed_paths(self, changed_paths): """Assert that changed_paths is the same as this entry's changed_paths Raises svntest.Failure if not. @@ -649,7 +686,7 @@ class LogParser: # element handlers def logentry_start(self, attrs): - self.entries.append(LogEntry(int(attrs['revision']))) + self.entries.append(LogEntry(int(attrs['revision']), attrs)) def author_end(self): self.svn_prop('author') def msg_end(self): @@ -669,16 +706,19 @@ class LogParser: self.entries[-1].changed_paths[self.use_cdata()] = [{'kind': self.kind, 'action': self.action}] -def run_and_verify_log_xml(message=None, expected_paths=None, - expected_revprops=None, expected_stdout=None, - expected_stderr=None, args=[]): +def run_and_verify_log_xml(expected_log_attrs=None, + expected_paths=None, expected_revprops=None, + expected_stdout=None, expected_stderr=None, + args=[]): """Call run_and_verify_svn with log --xml and args (optional) as command - arguments, and pass along message, expected_stdout, and expected_stderr. - - If message is None, pass the svn log command as message. + arguments, and pass along expected_stdout, and expected_stderr. expected_paths checking is not yet implemented. + expected_log_attrs is an optional list of dicts, compared to each revisions's + logentry attributes. The list must be in the same order the log entries + come in. + expected_revprops is an optional list of dicts, compared to each revision's revprops. The list must be in the same order the log entries come in. Any svn:date revprops in the dicts must be '' in order to @@ -687,8 +727,7 @@ def run_and_verify_log_xml(message=None, expected_paths=None, expected_paths and expected_revprops are ignored if expected_stdout or expected_stderr is specified. """ - if message == None: - message = ' '.join(args) + message = ' '.join(args) # We'll parse the output unless the caller specifies expected_stderr or # expected_stdout for run_and_verify_svn. @@ -705,7 +744,7 @@ def run_and_verify_log_xml(message=None, expected_paths=None, log_args.append('-v') (exit_code, stdout, stderr) = run_and_verify_svn( - message, expected_stdout, expected_stderr, + expected_stdout, expected_stderr, 'log', '--xml', *log_args) if not parse: return @@ -717,6 +756,8 @@ def run_and_verify_log_xml(message=None, expected_paths=None, entry.assert_revprops(expected_revprops[index]) if expected_paths != None: entry.assert_changed_paths(expected_paths[index]) + if expected_log_attrs != None: + entry.assert_log_attrs(expected_log_attrs[index]) def verify_update(actual_output, @@ -728,11 +769,8 @@ def verify_update(actual_output, elision_output_tree, disk_tree, status_tree, - singleton_handler_a=None, - a_baton=None, - singleton_handler_b=None, - b_baton=None, - check_props=False): + check_props=False, + extra_files=None): """Verify update of WC_DIR_NAME. The subcommand output (found in ACTUAL_OUTPUT, ACTUAL_MERGEINFO_OUTPUT, @@ -795,8 +833,7 @@ def verify_update(actual_output, # Create a tree by scanning the working copy, and verify it if disk_tree: verify_disk(wc_dir_name, disk_tree, check_props, - singleton_handler_a, a_baton, - singleton_handler_b, b_baton) + extra_files=extra_files) # Verify via 'status' command too, if possible. if status_tree: @@ -804,12 +841,22 @@ def verify_update(actual_output, def verify_disk(wc_dir_name, disk_tree, check_props=False, - singleton_handler_a = None, a_baton = None, - singleton_handler_b = None, b_baton = None): + extra_files=None): """Verify WC_DIR_NAME against DISK_TREE. If CHECK_PROPS is set, the comparison will examin props. Returns if successful, raises on failure.""" + singleton_handler_a = None + a_baton = None, + singleton_handler_b = None + b_baton = None + done_a = None + + if extra_files: + singleton_handler_a = svntest.tree.detect_conflict_files + done_a = svntest.tree.detect_conflict_files_done + a_baton = extra_files + if isinstance(disk_tree, wc.State): disk_tree = disk_tree.old_tree() @@ -823,18 +870,15 @@ def verify_disk(wc_dir_name, disk_tree, check_props=False, _log_tree_state("ACTUAL DISK TREE:", actual_disk) raise - + if done_a: + done_a(a_baton) def run_and_verify_update(wc_dir_name, output_tree, disk_tree, status_tree, - error_re_string = None, - singleton_handler_a = None, - a_baton = None, - singleton_handler_b = None, - b_baton = None, + expected_stderr=[], check_props = False, - *args): + *args, **kw): """Update WC_DIR_NAME. *ARGS are any extra optional args to the update subcommand. NOTE: If *ARGS is specified at all, explicit @@ -852,38 +896,20 @@ def run_and_verify_update(wc_dir_name, None, the 'svn status' output will be verified against STATUS_TREE. (This is a good way to check that revision numbers were bumped.) - For the DISK_TREE verification, SINGLETON_HANDLER_A and - SINGLETON_HANDLER_B will be passed to tree.compare_trees -- see that - function's doc string for more details. - If CHECK_PROPS is set, then disk comparison will examine props. Return if successful, raise on failure.""" # Update and make a tree of the output. - if len(args): - exit_code, output, errput = main.run_svn(error_re_string, 'up', *args) - else: - exit_code, output, errput = main.run_svn(error_re_string, - 'up', wc_dir_name, - *args) + if len(args) == 0: + args = (wc_dir_name,) - if error_re_string: - rm = re.compile(error_re_string) - match = None - for line in errput: - match = rm.search(line) - if match: - break - if not match: - raise main.SVNUnmatchedError + exit_code, output, errput = run_and_verify_svn(None, expected_stderr, 'up', *args) actual = wc.State.from_checkout(output) verify_update(actual, None, None, wc_dir_name, output_tree, None, None, disk_tree, status_tree, - singleton_handler_a, a_baton, - singleton_handler_b, b_baton, - check_props) + check_props, **kw) def run_and_parse_info(*args): @@ -967,19 +993,20 @@ def run_and_verify_info(expected_infos, *args): for actual, expected in zip(actual_infos, expected_infos): # compare dicts + path = actual['Path'] for key, value in expected.items(): assert ':' not in key # caller passed impossible expectations? if value is None and key in actual: - raise main.SVNLineUnequal("Found unexpected key '%s' with value '%s'" - % (key, actual[key])) + raise main.SVNLineUnequal("On '%s': Found unexpected key '%s'\n Value '%s'" + % (path, key, actual[key])) if value is not None and key not in actual: - raise main.SVNLineUnequal("Expected key '%s' (with value '%s') " - "not found" % (key, value)) + raise main.SVNLineUnequal("On '%s': Expected key '%s' not found\n Expected value '%s'" + % (path, key, value)) if value is not None and not re.match(value, actual[key]): - raise verify.SVNUnexpectedStdout("Values of key '%s' don't match:\n" + raise verify.SVNUnexpectedStdout("On '%s': Values of key '%s' don't match:\n" " Expected: '%s' (regex)\n" " Found: '%s' (string)\n" - % (key, value, actual[key])) + % (path, key, value, actual[key])) except: sys.stderr.write("Bad 'svn info' output:\n" @@ -993,25 +1020,16 @@ def run_and_verify_merge(dir, rev1, rev2, url1, url2, mergeinfo_output_tree, elision_output_tree, disk_tree, status_tree, skip_tree, - error_re_string = None, - singleton_handler_a = None, - a_baton = None, - singleton_handler_b = None, - b_baton = None, + expected_stderr = [], check_props = False, dry_run = True, - *args): + *args, **kw): """Run 'svn merge URL1@REV1 URL2@REV2 DIR' if URL2 is not None (for a three-way merge between URLs and WC). If URL2 is None, run 'svn merge -rREV1:REV2 URL1 DIR'. If both REV1 and REV2 are None, leave off the '-r' argument. - If ERROR_RE_STRING, the merge must exit with error, and the error - message must match regular expression ERROR_RE_STRING. - - Else if ERROR_RE_STRING is None, then: - The subcommand output will be verified against OUTPUT_TREE. Output related to mergeinfo notifications will be verified against MERGEINFO_OUTPUT_TREE if that is not None. Output related to mergeinfo @@ -1051,8 +1069,8 @@ def run_and_verify_merge(dir, rev1, rev2, url1, url2, pre_disk = tree.build_tree_from_wc(dir) dry_run_command = merge_command + ('--dry-run',) dry_run_command = dry_run_command + args - exit_code, out_dry, err_dry = main.run_svn(error_re_string, - *dry_run_command) + exit_code, out_dry, err_dry = run_and_verify_svn(None, expected_stderr, + *dry_run_command) post_disk = tree.build_tree_from_wc(dir) try: tree.compare_trees("disk", post_disk, pre_disk) @@ -1065,16 +1083,7 @@ def run_and_verify_merge(dir, rev1, rev2, url1, url2, # Update and make a tree of the output. merge_command = merge_command + args - exit_code, out, err = main.run_svn(error_re_string, *merge_command) - - if error_re_string: - if not error_re_string.startswith(".*"): - error_re_string = ".*(" + error_re_string + ")" - expected_err = verify.RegexOutput(error_re_string, match_all=False) - verify.verify_outputs(None, None, err, None, expected_err) - return - elif err: - raise verify.SVNUnexpectedStderr(err) + exit_code, out, err = run_and_verify_svn(None, expected_stderr, *merge_command) # Split the output into that related to application of the actual diff # and that related to the recording of mergeinfo describing the merge. @@ -1163,9 +1172,7 @@ def run_and_verify_merge(dir, rev1, rev2, url1, url2, verify_update(actual_diff, actual_mergeinfo, actual_elision, dir, output_tree, mergeinfo_output_tree, elision_output_tree, disk_tree, status_tree, - singleton_handler_a, a_baton, - singleton_handler_b, b_baton, - check_props) + check_props, **kw) def run_and_verify_patch(dir, patch_path, @@ -1173,7 +1180,7 @@ def run_and_verify_patch(dir, patch_path, error_re_string=None, check_props=False, dry_run=True, - *args): + *args, **kw): """Run 'svn patch patch_path DIR'. If ERROR_RE_STRING, 'svn patch' must exit with error, and the error @@ -1265,7 +1272,7 @@ def run_and_verify_patch(dir, patch_path, verify_update(mytree, None, None, dir, output_tree, None, None, disk_tree, status_tree, - check_props=check_props) + check_props=check_props, **kw) def run_and_verify_mergeinfo(error_re_string = None, @@ -1308,54 +1315,31 @@ def run_and_verify_switch(wc_dir_name, wc_target, switch_url, output_tree, disk_tree, status_tree, - error_re_string = None, - singleton_handler_a = None, - a_baton = None, - singleton_handler_b = None, - b_baton = None, + expected_stderr = [], check_props = False, - *args): + *args, **kw): """Switch WC_TARGET (in working copy dir WC_DIR_NAME) to SWITCH_URL. - If ERROR_RE_STRING, the switch must exit with error, and the error - message must match regular expression ERROR_RE_STRING. - - Else if ERROR_RE_STRING is None, then: - The subcommand output will be verified against OUTPUT_TREE, and the working copy itself will be verified against DISK_TREE. If optional STATUS_TREE is given, then 'svn status' output will be compared. (This is a good way to check that revision numbers were bumped.) - For the DISK_TREE verification, SINGLETON_HANDLER_A and - SINGLETON_HANDLER_B will be passed to tree.compare_trees -- see that - function's doc string for more details. - If CHECK_PROPS is set, then disk comparison will examine props. Return if successful, raise on failure.""" # Update and make a tree of the output. - exit_code, output, errput = main.run_svn(error_re_string, 'switch', - switch_url, wc_target, *args) - - if error_re_string: - if not error_re_string.startswith(".*"): - error_re_string = ".*(" + error_re_string + ")" - expected_err = verify.RegexOutput(error_re_string, match_all=False) - verify.verify_outputs(None, None, errput, None, expected_err) - elif errput: - raise verify.SVNUnexpectedStderr(err) - + exit_code, output, errput = run_and_verify_svn(None, expected_stderr, + 'switch', switch_url, + wc_target, *args) actual = wc.State.from_checkout(output) verify_update(actual, None, None, wc_dir_name, output_tree, None, None, disk_tree, status_tree, - singleton_handler_a, a_baton, - singleton_handler_b, b_baton, - check_props) + check_props, **kw) def process_output_for_commit(output, error_re_string): """Helper for run_and_verify_commit(), also used in the factory.""" @@ -1403,7 +1387,7 @@ def process_output_for_commit(output, error_re_string): def run_and_verify_commit(wc_dir_name, output_tree, status_tree, - error_re_string = None, + expected_stderr=[], *args): """Commit and verify results within working copy WC_DIR_NAME, sending ARGS to the commit subcommand. @@ -1413,9 +1397,7 @@ def run_and_verify_commit(wc_dir_name, output_tree, status_tree, be compared. (This is a good way to check that revision numbers were bumped.) - If ERROR_RE_STRING is None, the commit must not exit with error. If - ERROR_RE_STRING is a string, the commit must exit with error, and - the error message must match regular expression ERROR_RE_STRING. + EXPECTED_STDERR is handled as in run_and_verify_svn() Return if successful, raise on failure.""" @@ -1423,21 +1405,15 @@ def run_and_verify_commit(wc_dir_name, output_tree, status_tree, output_tree = output_tree.old_tree() # Commit. + if len(args) == 0: + args = (wc_dir_name,) if '-m' not in args and '-F' not in args: args = list(args) + ['-m', 'log msg'] - exit_code, output, errput = main.run_svn(error_re_string, 'ci', - *args) - - if error_re_string: - if not error_re_string.startswith(".*"): - error_re_string = ".*(" + error_re_string + ")" - expected_err = verify.RegexOutput(error_re_string, match_all=False) - verify.verify_outputs(None, None, errput, None, expected_err) - - # Else not expecting error: + exit_code, output, errput = run_and_verify_svn(None, expected_stderr, + 'ci', *args) # Convert the output into a tree. - output = process_output_for_commit(output, error_re_string) + output = process_output_for_commit(output, expected_stderr) actual = tree.build_tree_from_commit(output) # Verify actual output against expected output. @@ -1457,53 +1433,43 @@ def run_and_verify_commit(wc_dir_name, output_tree, status_tree, # This function always passes '-q' to the status command, which # suppresses the printing of any unversioned or nonexistent items. -def run_and_verify_status(wc_dir_name, status_tree, - singleton_handler_a = None, - a_baton = None, - singleton_handler_b = None, - b_baton = None): +def run_and_verify_status(wc_dir_name, status_tree, no_entries=False): """Run 'status' on WC_DIR_NAME and compare it with the - expected STATUS_TREE. SINGLETON_HANDLER_A and SINGLETON_HANDLER_B will - be passed to tree.compare_trees - see that function's doc string for - more details. + expected STATUS_TREE. Returns on success, raises on failure.""" + if not isinstance(status_tree, wc.State): + raise TypeError('wc.State tree expected') + exit_code, output, errput = main.run_svn(None, 'status', '-v', '-u', '-q', wc_dir_name) - actual_status = svntest.wc.State.from_status(output) + actual_status = svntest.wc.State.from_status(output, wc_dir=wc_dir_name) # Verify actual output against expected output. - if isinstance(status_tree, wc.State): - try: - status_tree.compare_and_display('status', actual_status) - except tree.SVNTreeError: - _log_tree_state("ACTUAL STATUS TREE:", actual_status.old_tree(), - wc_dir_name) - raise - else: - actual_status = actual_status.old_tree() - try: - tree.compare_trees("status", actual_status, status_tree, - singleton_handler_a, a_baton, - singleton_handler_b, b_baton) - except tree.SVNTreeError: - verify.display_trees(None, 'STATUS OUTPUT TREE', status_tree, actual_status) - _log_tree_state("ACTUAL STATUS TREE:", actual_status, wc_dir_name) - raise + try: + status_tree.compare_and_display('status', actual_status) + except tree.SVNTreeError: + _log_tree_state("ACTUAL STATUS TREE:", actual_status.old_tree(), + wc_dir_name) + raise + + if no_entries: + return # if we have an output State, and we can/are-allowed to create an # entries-based State, then compare the two. - if isinstance(status_tree, wc.State): - actual_entries = wc.State.from_entries(wc_dir_name) - if actual_entries: - tweaked = status_tree.copy() - tweaked.tweak_for_entries_compare() - try: - tweaked.compare_and_display('entries', actual_entries) - except tree.SVNTreeUnequal: - ### do something more - raise + actual_entries = wc.State.from_entries(wc_dir_name) + if actual_entries: + tweaked = status_tree.copy() + tweaked.tweak_for_entries_compare() + try: + tweaked.compare_and_display('entries', actual_entries) + except tree.SVNTreeUnequal: + ### do something more + _log_tree_state("ACTUAL ENTRY TREE:", actual_entries.old_tree(), + wc_dir_name) + raise # A variant of previous func, but doesn't pass '-q'. This allows us @@ -1513,27 +1479,21 @@ def run_and_verify_unquiet_status(wc_dir_name, status_tree): expected STATUS_TREE. Returns on success, raises on failure.""" + if not isinstance(status_tree, wc.State): + raise TypeError('wc.State tree expected') + exit_code, output, errput = main.run_svn(None, 'status', '-v', '-u', wc_dir_name) - actual_status = svntest.wc.State.from_status(output) + actual_status = svntest.wc.State.from_status(output, wc_dir=wc_dir_name) # Verify actual output against expected output. - if isinstance(status_tree, wc.State): - try: - status_tree.compare_and_display('unquiet status', actual_status) - except tree.SVNTreeError: - _log_tree_state("ACTUAL STATUS TREE:", - actual_status.normalize().old_tree(), wc_dir_name) - raise - else: - actual_status = actual_status.old_tree() - try: - tree.compare_trees("UNQUIET STATUS", actual_status, status_tree) - except tree.SVNTreeError: - _log_tree_state("ACTUAL UNQUIET STATUS TREE:", actual_status, - wc_dir_name) - raise + try: + status_tree.compare_and_display('unquiet status', actual_status) + except tree.SVNTreeError: + _log_tree_state("ACTUAL STATUS TREE:", + actual_status.normalize().old_tree(), wc_dir_name) + raise def run_and_verify_status_xml(expected_entries = [], *args): @@ -1542,7 +1502,7 @@ def run_and_verify_status_xml(expected_entries = [], EXPECTED_ENTRIES. """ - exit_code, output, errput = run_and_verify_svn(None, None, [], + exit_code, output, errput = run_and_verify_svn(None, [], 'status', '--xml', *args) if len(errput) > 0: @@ -1614,11 +1574,11 @@ def run_and_verify_inherited_prop_xml(path_or_url, if (propname): exit_code, output, errput = svntest.actions.run_and_verify_svn( - None, None, [], 'propget', propname, '--xml', + None, [], 'propget', propname, '--xml', '--show-inherited-props', path_or_url, *args) else: exit_code, output, errput = svntest.actions.run_and_verify_svn( - None, None, [], 'proplist', '-v', '--xml', '--show-inherited-props', + None, [], 'proplist', '-v', '--xml', '--show-inherited-props', path_or_url, *args) if len(errput) > 0: @@ -1691,7 +1651,7 @@ def run_and_verify_diff_summarize_xml(error_re_string = [], EXPECTED_PROPS and EXPECTED_KINDS. Returns on success, raises on failure.""" - exit_code, output, errput = run_and_verify_svn(None, None, error_re_string, + exit_code, output, errput = run_and_verify_svn(None, error_re_string, 'diff', '--summarize', '--xml', *args) @@ -1775,12 +1735,12 @@ def run_and_validate_lock(path, username): comment = "Locking path:%s." % path # lock the path - run_and_verify_svn(None, ".*locked by user", [], 'lock', + run_and_verify_svn(".*locked by user", [], 'lock', '--username', username, '-m', comment, path) # Run info and check that we get the lock fields. - exit_code, output, err = run_and_verify_svn(None, None, [], + exit_code, output, err = run_and_verify_svn(None, [], 'info','-R', path) @@ -1818,7 +1778,7 @@ def _run_and_verify_resolve(cmd, expected_paths, *args): expected_paths]), ], match_all=False) - run_and_verify_svn(None, expected_output, [], + run_and_verify_svn(expected_output, [], cmd, *args) def run_and_verify_resolve(expected_paths, *args): @@ -1842,7 +1802,7 @@ def run_and_verify_revert(expected_paths, *args): expected_output = verify.UnorderedOutput([ "Reverted '" + path + "'\n" for path in expected_paths]) - run_and_verify_svn(None, expected_output, [], + run_and_verify_svn(expected_output, [], "revert", *args) @@ -1851,32 +1811,44 @@ def run_and_verify_revert(expected_paths, *args): # This allows a test to *quickly* bootstrap itself. -def make_repo_and_wc(sbox, create_wc = True, read_only = False, - minor_version = None): - """Create a fresh 'Greek Tree' repository and check out a WC from it. +def make_repo_and_wc(sbox, create_wc=True, read_only=False, empty=False, + minor_version=None): + """Create a fresh repository and check out a WC from it. If EMPTY is + True, the repository and WC will be empty and at revision 0, + otherwise they will contain the 'Greek Tree' at revision 1. If READ_ONLY is False, a dedicated repository will be created, at the path - SBOX.repo_dir. If READ_ONLY is True, the pristine repository will be used. + SBOX.repo_dir. If READ_ONLY is True, a shared pristine repository may be + used or a dedicated repository may be created. (Currently we use a shared + pristine 'Greek tree' repo but we create a dedicated empty repo.) In either case, SBOX.repo_url is assumed to point to the repository that will be used. - If create_wc is True, a dedicated working copy will be checked out from + If CREATE_WC is True, a dedicated working copy will be checked out from the repository, at the path SBOX.wc_dir. Returns on success, raises on failure.""" - # Create (or copy afresh) a new repos with a greek tree in it. - if not read_only: - guarantee_greek_repository(sbox.repo_dir, minor_version) + # Create or copy or reference the appropriate kind of repository: + # if we want a non-empty, Greek repo, refer to the shared one; else + # if we want an empty repo or a writable Greek repo, create one. + # (We could have a shared empty repo for read-only use, but we don't.) + if empty: + guarantee_empty_repository(sbox.repo_dir, minor_version) + expected_state = svntest.wc.State('', {}) + else: + if not read_only: + guarantee_greek_repository(sbox.repo_dir, minor_version) + expected_state = main.greek_state if create_wc: # Generate the expected output tree. - expected_output = main.greek_state.copy() + expected_output = expected_state.copy() expected_output.wc_dir = sbox.wc_dir expected_output.tweak(status='A ', contents=None) # Generate an expected wc tree. - expected_wc = main.greek_state + expected_wc = expected_state # Do a checkout, and verify the resulting output and disk contents. run_and_verify_checkout(sbox.repo_url, @@ -1916,11 +1888,11 @@ def get_virginal_state(wc_dir, rev): return state # Cheap administrative directory locking -def lock_admin_dir(wc_dir, recursive=False): +def lock_admin_dir(wc_dir, recursive=False, work_queue=False): "Lock a SVN administrative directory" db, root_path, relpath = wc.open_wc_db(wc_dir) - svntest.main.run_wc_lock_tester(recursive, wc_dir) + svntest.main.run_wc_lock_tester(recursive, wc_dir, work_queue) def set_incomplete(wc_dir, revision): "Make wc_dir incomplete at revision" @@ -2101,7 +2073,7 @@ def inject_conflict_into_wc(sbox, state_path, file_path, if expected_status: expected_status.tweak(state_path, wc_rev=merged_rev) run_and_verify_commit(wc_dir, expected_output, expected_status, - None, file_path) + [], file_path) # Backdate the file. exit_code, output, errput = main.run_svn(None, "up", "-r", str(prev_rev), @@ -2122,7 +2094,7 @@ def inject_conflict_into_wc(sbox, state_path, file_path, inject_conflict_into_expected_state(state_path, expected_disk, expected_status, conflicting_contents, contents, - merged_rev) + prev_rev, merged_rev) exit_code, output, errput = main.run_svn(None, "up", "-r", str(merged_rev), file_path) if expected_status: @@ -2130,26 +2102,30 @@ def inject_conflict_into_wc(sbox, state_path, file_path, def inject_conflict_into_expected_state(state_path, expected_disk, expected_status, - wc_text, merged_text, merged_rev): + wc_text, merged_text, prev_rev, + merged_rev): """Update the EXPECTED_DISK and EXPECTED_STATUS trees for the conflict at STATE_PATH (ignored if None). WC_TEXT, MERGED_TEXT, and MERGED_REV are used to determine the contents of the conflict (the text parameters should be newline-terminated).""" if expected_disk: conflict_marker = make_conflict_marker_text(wc_text, merged_text, - merged_rev) + prev_rev, merged_rev) existing_text = expected_disk.desc[state_path].contents or "" expected_disk.tweak(state_path, contents=existing_text + conflict_marker) if expected_status: expected_status.tweak(state_path, status='C ') -def make_conflict_marker_text(wc_text, merged_text, merged_rev): +def make_conflict_marker_text(wc_text, merged_text, prev_rev, merged_rev, + old_text=''): """Return the conflict marker text described by WC_TEXT (the current text in the working copy, MERGED_TEXT (the conflicting text merged in), and MERGED_REV (the revision from whence the conflicting text came).""" - return "<<<<<<< .working\n" + wc_text + "=======\n" + \ + return "<<<<<<< .working\n" + wc_text + \ + "||||||| .merge-left.r" + str(prev_rev) + '\n' + \ + old_text + "=======\n" + \ merged_text + ">>>>>>> .merge-right.r" + str(merged_rev) + "\n" @@ -2201,7 +2177,7 @@ def build_greek_tree_conflicts(sbox): expected_status = get_virginal_state(wc_dir, 1) expected_status.tweak('A/D/G/pi', wc_rev='2') expected_status.remove('A/D/G/rho', 'A/D/G/tau') - run_and_verify_commit(wc_dir, expected_output, expected_status, None, + run_and_verify_commit(wc_dir, expected_output, expected_status, [], '-m', 'Incoming changes.', wc_dir ) # Update back to the pristine state ("time-warp"). @@ -2213,7 +2189,7 @@ def build_greek_tree_conflicts(sbox): expected_disk = main.greek_state expected_status = get_virginal_state(wc_dir, 1) run_and_verify_update(wc_dir, expected_output, expected_disk, - expected_status, None, None, None, None, None, False, + expected_status, [], False, '-r', '1', wc_dir) # Make local changes @@ -2224,6 +2200,6 @@ def build_greek_tree_conflicts(sbox): # Update, receiving the incoming changes on top of the local changes, # causing tree conflicts. Don't check for any particular result: that is # the job of other tests. - run_and_verify_svn(None, verify.AnyOutput, [], 'update', wc_dir) + run_and_verify_svn(verify.AnyOutput, [], 'update', wc_dir) diff --git a/subversion/tests/cmdline/svntest/deeptrees.py b/subversion/tests/cmdline/svntest/deeptrees.py index 52517a2..a40729c 100644 --- a/subversion/tests/cmdline/svntest/deeptrees.py +++ b/subversion/tests/cmdline/svntest/deeptrees.py @@ -123,13 +123,13 @@ def deep_trees_leaf_edit(base): main.file_append(F, "More text for file alpha.\n") main.file_append(DF, "More text for file beta.\n") main.file_append(DDF, "More text for file gamma.\n") - run_and_verify_svn(None, verify.AnyOutput, [], + run_and_verify_svn(verify.AnyOutput, [], 'propset', 'prop1', '1', F, DF, DDF) D = j(base, 'D', 'D1') DD = j(base, 'DD', 'D1', 'D2') DDD = j(base, 'DDD', 'D1', 'D2', 'D3') - run_and_verify_svn(None, verify.AnyOutput, [], + run_and_verify_svn(verify.AnyOutput, [], 'propset', 'prop1', '1', D, DD, DDD) D = j(base, 'D', 'D1', 'delta') DD = j(base, 'DD', 'D1', 'D2', 'epsilon') @@ -137,7 +137,7 @@ def deep_trees_leaf_edit(base): main.file_append(D, "This is the file 'delta'.\n") main.file_append(DD, "This is the file 'epsilon'.\n") main.file_append(DDD, "This is the file 'zeta'.\n") - run_and_verify_svn(None, verify.AnyOutput, [], + run_and_verify_svn(verify.AnyOutput, [], 'add', D, DD, DDD) # deep trees state after a call to deep_trees_leaf_edit @@ -196,11 +196,7 @@ deep_trees_after_leaf_del = wc.State('', { # deep trees state after a call to deep_trees_leaf_del with no commit def deep_trees_after_leaf_del_no_ci(wc_dir): - if svntest.main.wc_is_singledb(wc_dir): - return deep_trees_after_leaf_del - else: - return deep_trees_empty_dirs - + return deep_trees_after_leaf_del def deep_trees_tree_del(base): """Helper function for deep trees test cases. Delete top-level dirs.""" @@ -240,31 +236,9 @@ deep_trees_after_tree_del = wc.State('', { 'DDD' : Item(), }) -# deep trees state without any files -deep_trees_empty_dirs = wc.State('', { - 'F' : Item(), - 'D' : Item(), - 'D/D1' : Item(), - 'DF' : Item(), - 'DF/D1' : Item(), - 'DD' : Item(), - 'DD/D1' : Item(), - 'DD/D1/D2' : Item(), - 'DDF' : Item(), - 'DDF/D1' : Item(), - 'DDF/D1/D2' : Item(), - 'DDD' : Item(), - 'DDD/D1' : Item(), - 'DDD/D1/D2' : Item(), - 'DDD/D1/D2/D3' : Item(), - }) - # deep trees state after a call to deep_trees_tree_del with no commit def deep_trees_after_tree_del_no_ci(wc_dir): - if svntest.main.wc_is_singledb(wc_dir): - return deep_trees_after_tree_del - else: - return deep_trees_empty_dirs + return deep_trees_after_tree_del def deep_trees_tree_del_repos(base): """Helper function for deep trees test cases. Delete top-level dirs, @@ -524,8 +498,13 @@ def deep_trees_run_tests_scheme_for_update(sbox, greater_scheme): x_status.copy() x_status.wc_dir = base + if test_case.error_re_string == None: + expected_stderr = [] + else: + expected_stderr = test_case.error_re_string + run_and_verify_update(base, x_out, x_disk, None, - error_re_string = test_case.error_re_string) + expected_stderr = expected_stderr) if x_status: run_and_verify_unquiet_status(base, x_status) @@ -551,8 +530,7 @@ def deep_trees_run_tests_scheme_for_update(sbox, greater_scheme): x_status.wc_dir = base run_and_verify_commit(base, None, x_status, - test_case.commit_block_string, - base) + test_case.commit_block_string) except: logger.warn("ERROR IN: Tests scheme for update: " + "while checking commit-blocking in '%s'", test_case.name) @@ -612,8 +590,13 @@ def deep_trees_skipping_on_update(sbox, test_case, skip_paths, # Account for nodes that were updated by further_action x_status.tweak('', 'D', 'F', 'DD', 'DF', 'DDD', 'DDF', wc_rev=4) + if test_case.error_re_string == None: + expected_stderr = [] + else: + expected_stderr = test_case.error_re_string + run_and_verify_update(base, x_out, x_disk, None, - error_re_string = test_case.error_re_string) + expected_stderr = expected_stderr) run_and_verify_unquiet_status(base, x_status) @@ -771,9 +754,14 @@ def deep_trees_run_tests_scheme_for_switch(sbox, greater_scheme): x_status.copy() x_status.wc_dir = local + if test_case.error_re_string == None: + expected_stderr = [] + else: + expected_stderr = test_case.error_re_string + run_and_verify_switch(local, local, incoming, x_out, x_disk, None, - test_case.error_re_string, None, None, None, - None, False, '--ignore-ancestry') + expected_stderr, False, + '--ignore-ancestry') run_and_verify_unquiet_status(local, x_status) x_info = test_case.expected_info or {} @@ -797,8 +785,7 @@ def deep_trees_run_tests_scheme_for_switch(sbox, greater_scheme): x_status.wc_dir = local run_and_verify_commit(local, None, x_status, - test_case.commit_block_string, - local) + test_case.commit_block_string) except: logger.warn("ERROR IN: Tests scheme for switch: " + "while checking commit-blocking in '%s'", test_case.name) @@ -985,10 +972,14 @@ def deep_trees_run_tests_scheme_for_merge(sbox, greater_scheme, if ignore_ancestry: varargs = varargs + ('--ignore-ancestry',) + if test_case.error_re_string == None: + expected_stderr = [] + else: + expected_stderr = test_case.error_re_string + run_and_verify_merge(local, '0', 'HEAD', incoming, None, x_out, None, None, x_disk, None, x_skip, - test_case.error_re_string, - None, None, None, None, + expected_stderr, False, False, *varargs) run_and_verify_unquiet_status(local, x_status) except: @@ -1010,11 +1001,197 @@ def deep_trees_run_tests_scheme_for_merge(sbox, greater_scheme, x_status.wc_dir = local run_and_verify_commit(local, None, x_status, - test_case.commit_block_string, - local) + test_case.commit_block_string) except: logger.warn("ERROR IN: Tests scheme for merge: " + "while checking commit-blocking in '%s'", test_case.name) raise +### Bummer. It would be really nice to have easy access to the URL +### member of our entries files so that switches could be testing by +### examining the modified ancestry. But status doesn't show this +### information. Hopefully in the future the cmdline binary will have +### a subcommand for dumping multi-line detailed information about +### versioned things. Until then, we'll stick with the traditional +### verification methods. +### +### gjs says: we have 'svn info' now + +def get_routine_status_state(wc_dir): + """get the routine status list for WC_DIR at the completion of an + initial call to do_routine_switching()""" + + # Construct some paths for convenience + ADH_path = os.path.join(wc_dir, 'A', 'D', 'H') + chi_path = os.path.join(ADH_path, 'chi') + omega_path = os.path.join(ADH_path, 'omega') + psi_path = os.path.join(ADH_path, 'psi') + pi_path = os.path.join(ADH_path, 'pi') + tau_path = os.path.join(ADH_path, 'tau') + rho_path = os.path.join(ADH_path, 'rho') + + # Now generate a state + state = svntest.actions.get_virginal_state(wc_dir, 1) + state.remove('A/B/E', 'A/B/E/alpha', 'A/B/E/beta', 'A/B/F', 'A/B/lambda') + state.add({ + 'A/B/pi' : Item(status=' ', wc_rev=1), + 'A/B/tau' : Item(status=' ', wc_rev=1), + 'A/B/rho' : Item(status=' ', wc_rev=1), + }) + + return state + +#---------------------------------------------------------------------- + +def get_routine_disk_state(wc_dir): + """get the routine disk list for WC_DIR at the completion of an + initial call to do_routine_switching()""" + + disk = svntest.main.greek_state.copy() + + # iota has the same contents as gamma + disk.tweak('iota', contents=disk.desc['A/D/gamma'].contents) + + # A/B/* no longer exist, but have been replaced by copies of A/D/G/* + disk.remove('A/B/E', 'A/B/E/alpha', 'A/B/E/beta', 'A/B/F', 'A/B/lambda') + disk.add({ + 'A/B/pi' : Item("This is the file 'pi'.\n"), + 'A/B/rho' : Item("This is the file 'rho'.\n"), + 'A/B/tau' : Item("This is the file 'tau'.\n"), + }) + + return disk + +#---------------------------------------------------------------------- + +def do_routine_switching(wc_dir, repo_url, verify): + """perform some routine switching of the working copy WC_DIR for + other tests to use. If VERIFY, then do a full verification of the + switching, else don't bother.""" + + ### Switch the file `iota' to `A/D/gamma'. + + # Construct some paths for convenience + iota_path = os.path.join(wc_dir, 'iota') + gamma_url = repo_url + '/A/D/gamma' + + if verify: + # Create expected output tree + expected_output = svntest.wc.State(wc_dir, { + 'iota' : Item(status='U '), + }) + + # Create expected disk tree (iota will have gamma's contents) + expected_disk = svntest.main.greek_state.copy() + expected_disk.tweak('iota', + contents=expected_disk.desc['A/D/gamma'].contents) + + # Create expected status tree + expected_status = svntest.actions.get_virginal_state(wc_dir, 1) + expected_status.tweak('iota', switched='S') + + # Do the switch and check the results in three ways. + svntest.actions.run_and_verify_switch(wc_dir, iota_path, gamma_url, + expected_output, + expected_disk, + expected_status, + [], + False, '--ignore-ancestry') + else: + svntest.main.run_svn(None, 'switch', '--ignore-ancestry', + gamma_url, iota_path) + + ### Switch the directory `A/B' to `A/D/G'. + + # Construct some paths for convenience + AB_path = os.path.join(wc_dir, 'A', 'B') + ADG_url = repo_url + '/A/D/G' + + if verify: + # Create expected output tree + expected_output = svntest.wc.State(wc_dir, { + 'A/B/E' : Item(status='D '), + 'A/B/F' : Item(status='D '), + 'A/B/lambda' : Item(status='D '), + 'A/B/pi' : Item(status='A '), + 'A/B/tau' : Item(status='A '), + 'A/B/rho' : Item(status='A '), + }) + + # Create expected disk tree (iota will have gamma's contents, + # A/B/* will look like A/D/G/*) + expected_disk = get_routine_disk_state(wc_dir) + + # Create expected status + expected_status = get_routine_status_state(wc_dir) + expected_status.tweak('iota', 'A/B', switched='S') + + # Do the switch and check the results in three ways. + svntest.actions.run_and_verify_switch(wc_dir, AB_path, ADG_url, + expected_output, + expected_disk, + expected_status, + [], + False, '--ignore-ancestry') + else: + svntest.main.run_svn(None, 'switch', '--ignore-ancestry', + ADG_url, AB_path) + + +#---------------------------------------------------------------------- + +def commit_routine_switching(wc_dir, verify): + "Commit some stuff in a routinely-switched working copy." + + # Make some local mods + iota_path = os.path.join(wc_dir, 'iota') + Bpi_path = os.path.join(wc_dir, 'A', 'B', 'pi') + Gpi_path = os.path.join(wc_dir, 'A', 'D', 'G', 'pi') + Z_path = os.path.join(wc_dir, 'A', 'D', 'G', 'Z') + zeta_path = os.path.join(wc_dir, 'A', 'D', 'G', 'Z', 'zeta') + + svntest.main.file_append(iota_path, "apple") + svntest.main.file_append(Bpi_path, "melon") + svntest.main.file_append(Gpi_path, "banana") + os.mkdir(Z_path) + svntest.main.file_append(zeta_path, "This is the file 'zeta'.\n") + svntest.main.run_svn(None, 'add', Z_path) + + # Try to commit. We expect this to fail because, if all the + # switching went as expected, A/B/pi and A/D/G/pi point to the + # same URL. We don't allow this. + svntest.actions.run_and_verify_commit( + wc_dir, None, None, + "svn: E195003: Cannot commit both .* as they refer to the same URL$") + + # Okay, that all taken care of, let's revert the A/D/G/pi path and + # move along. Afterward, we should be okay to commit. (Sorry, + # holsta, that banana has to go...) + svntest.main.run_svn(None, 'revert', Gpi_path) + + # Create expected output tree. + expected_output = svntest.wc.State(wc_dir, { + 'A/D/G/Z' : Item(verb='Adding'), + 'A/D/G/Z/zeta' : Item(verb='Adding'), + 'iota' : Item(verb='Sending'), + 'A/B/pi' : Item(verb='Sending'), + }) + + # Created expected status tree. + expected_status = get_routine_status_state(wc_dir) + expected_status.tweak('iota', 'A/B', switched='S') + expected_status.tweak('iota', 'A/B/pi', wc_rev=2, status=' ') + expected_status.add({ + 'A/D/G/Z' : Item(status=' ', wc_rev=2), + 'A/D/G/Z/zeta' : Item(status=' ', wc_rev=2), + }) + + # Commit should succeed + if verify: + svntest.actions.run_and_verify_commit(wc_dir, + expected_output, + expected_status) + else: + svntest.main.run_svn(None, + 'ci', '-m', 'log msg', wc_dir) diff --git a/subversion/tests/cmdline/svntest/factory.py b/subversion/tests/cmdline/svntest/factory.py index 9a2b17b..32093ea 100644 --- a/subversion/tests/cmdline/svntest/factory.py +++ b/subversion/tests/cmdline/svntest/factory.py @@ -97,7 +97,7 @@ # # YOU ARE CORDIALLY INVITED to add/tweak/change to your needs. # If you want to know what's going on, look at the switch() -# funtion of TestFactory below. +# function of TestFactory below. # # # DETAILS @@ -308,7 +308,7 @@ class TestFactory: # An optimized list kept up-to-date by variable additions self.sorted_vars_by_pathlen = [] - # Wether we ever used the variables 'wc_dir' and 'url' (tiny tweak) + # Whether we ever used the variables 'wc_dir' and 'url' (tiny tweak) self.used_wc_dir = False self.used_abs_wc_dir = False self.used_url = False @@ -500,7 +500,7 @@ class TestFactory: else: py = "expected_stdout = verify.UnorderedOutput(" + pylist + ")\n\n" py += pychdir - py += "actions.run_and_verify_svn2('OUTPUT', expected_stdout, [], 0" + py += "actions.run_and_verify_svn2(expected_stdout, [], 0" else: # write a test that expects failure pylist = self.strlist2py(err) @@ -509,8 +509,7 @@ class TestFactory: else: py = "expected_stderr = verify.UnorderedOutput(" + pylist + ")\n\n" py += pychdir - py += ("actions.run_and_verify_svn2('OUTPUT', " + - "[], expected_stderr, " + str(code)) + py += ("actions.run_and_verify_svn2([], expected_stderr, " + str(code)) if len(pyargs) > 0: py += ", " + ", ".join(pyargs) @@ -635,13 +634,13 @@ class TestFactory: py += pychdir py += ("actions.run_and_verify_update(" + wc.py + ", " + "expected_output, expected_disk, expected_status, " + - "None, None, None, None, None, False") + "[], False") else: # write a test that expects error py = "expected_error = " + self.strlist2py(err) + "\n\n" py += pychdir py += ("actions.run_and_verify_update(" + wc.py + ", None, None, " + - "None, expected_error, None, None, None, None, False") + "None, expected_error") if len(pyargs) > 0: py += ', ' + ', '.join(pyargs) @@ -701,14 +700,14 @@ class TestFactory: py += ("actions.run_and_verify_switch(" + wc.py + ", " + wc_arg.pyarg + ", " + url_arg.pyarg + ", " + "expected_output, expected_disk, expected_status, " + - "None, None, None, None, None, False") + "[], False") else: # write a test that expects error py = "expected_error = " + self.strlist2py(err) + "\n\n" py += pychdir py += ("actions.run_and_verify_switch(" + wc.py + ", " + wc_arg.pyarg + ", " + url_arg.pyarg + ", " + - "None, None, None, expected_error, None, None, None, None, False") + "None, None, None, expected_error, False") if len(pyargs) > 0: py += ', ' + ', '.join(pyargs) @@ -766,7 +765,7 @@ class TestFactory: py += ("actions.run_and_verify_checkout(" + url_arg.pyarg + ", " + wc_arg.pyarg + - ", expected_output, expected_disk, None, None, None, None") + ", expected_output, expected_disk") else: # write a test that expects failure pylist = self.strlist2py(err) @@ -775,8 +774,7 @@ class TestFactory: else: py += "expected_stderr = verify.UnorderedOutput(" + pylist + ")\n\n" py += pychdir - py += ("actions.run_and_verify_svn2('OUTPUT', " + - "[], expected_stderr, " + str(code) + + py += ("actions.run_and_verify_svn2([], expected_stderr, " + str(code) + ", " + url_arg.pyarg + ", " + wc_arg.pyarg) # Append the remaining args @@ -1037,7 +1035,7 @@ class TestFactory: make_py, prev_status = self.get_prev_status(wc) - actual_status = svntest.wc.State.from_status(output) + actual_status = svntest.wc.State.from_status(output, wc_dir=wc.realpath) # The tests currently compare SVNTreeNode trees, so let's do that too. prev_status_tree = prev_status.old_tree() diff --git a/subversion/tests/cmdline/svntest/main.py b/subversion/tests/cmdline/svntest/main.py index 0d1ca91..de59c06 100644 --- a/subversion/tests/cmdline/svntest/main.py +++ b/subversion/tests/cmdline/svntest/main.py @@ -36,6 +36,7 @@ import xml import urllib import logging import hashlib +import zipfile from urlparse import urlparse try: @@ -53,7 +54,7 @@ import svntest from svntest import Failure from svntest import Skip -SVN_VER_MINOR = 8 +SVN_VER_MINOR = 9 ###################################################################### # @@ -111,7 +112,7 @@ class SVNRepositoryCreateFailure(Failure): # Windows specifics if sys.platform == 'win32': windows = True - file_scheme_prefix = 'file:' + file_scheme_prefix = 'file:///' _exe = '.exe' _bat = '.bat' os.environ['SVN_DBG_STACKTRACES_TO_STDERR'] = 'y' @@ -131,6 +132,10 @@ else: wc_author = 'jrandom' wc_passwd = 'rayjandom' +# Username and password used by svnrdump in dump/load cross-checks +crosscheck_username = '__dumpster__' +crosscheck_password = '__loadster__' + # Username and password used by the working copies for "second user" # scenarios wc_author2 = 'jconstant' # use the same password as wc_author @@ -141,23 +146,30 @@ stack_trace_regexp = r'(?:.*subversion[\\//].*\.c:[0-9]*,$|.*apr_err=.*)' os.environ['LC_ALL'] = 'C' ###################################################################### -# The locations of the svn, svnadmin and svnlook binaries, relative to -# the only scripts that import this file right now (they live in ../). +# The locations of the svn binaries. # Use --bin to override these defaults. -svn_binary = os.path.abspath('../../svn/svn' + _exe) -svnadmin_binary = os.path.abspath('../../svnadmin/svnadmin' + _exe) -svnlook_binary = os.path.abspath('../../svnlook/svnlook' + _exe) -svnrdump_binary = os.path.abspath('../../svnrdump/svnrdump' + _exe) -svnsync_binary = os.path.abspath('../../svnsync/svnsync' + _exe) -svnversion_binary = os.path.abspath('../../svnversion/svnversion' + _exe) -svndumpfilter_binary = os.path.abspath('../../svndumpfilter/svndumpfilter' + \ - _exe) -svnmucc_binary=os.path.abspath('../../svnmucc/svnmucc' + _exe) -entriesdump_binary = os.path.abspath('entries-dump' + _exe) -atomic_ra_revprop_change_binary = os.path.abspath('atomic-ra-revprop-change' + \ - _exe) -wc_lock_tester_binary = os.path.abspath('../libsvn_wc/wc-lock-tester' + _exe) -wc_incomplete_tester_binary = os.path.abspath('../libsvn_wc/wc-incomplete-tester' + _exe) +def P(relpath, + head=os.path.dirname(os.path.dirname(os.path.abspath('.'))) + ): + if sys.platform=='win32': + return os.path.join(head, relpath + '.exe') + else: + return os.path.join(head, relpath) +svn_binary = P('svn/svn') +svnadmin_binary = P('svnadmin/svnadmin') +svnlook_binary = P('svnlook/svnlook') +svnrdump_binary = P('svnrdump/svnrdump') +svnsync_binary = P('svnsync/svnsync') +svnversion_binary = P('svnversion/svnversion') +svndumpfilter_binary = P('svndumpfilter/svndumpfilter') +svnmucc_binary = P('svnmucc/svnmucc') +svnfsfs_binary = P('svnfsfs/svnfsfs') +entriesdump_binary = P('tests/cmdline/entries-dump') +lock_helper_binary = P('tests/cmdline/lock-helper') +atomic_ra_revprop_change_binary = P('tests/cmdline/atomic-ra-revprop-change') +wc_lock_tester_binary = P('tests/libsvn_wc/wc-lock-tester') +wc_incomplete_tester_binary = P('tests/libsvn_wc/wc-incomplete-tester') +del P ###################################################################### # The location of svnauthz binary, relative to the only scripts that @@ -539,6 +551,15 @@ def run_command_stdin(command, error_expected, bufsize=-1, binary_mode=False, and not any(map(lambda arg: 'prop_tests-12' in arg, varargs)): raise Failure("Repository diskpath in %s: %r" % (name, lines)) + valgrind_diagnostic = False + # A valgrind diagnostic will raise a failure if the command is + # expected to run without error. When an error is expected any + # subsequent error pattern matching is usually lenient and will not + # detect the diagnostic so make sure a failure is raised here. + if error_expected and stderr_lines: + if any(map(lambda arg: re.match('==[0-9]+==', arg), stderr_lines)): + valgrind_diagnostic = True + stop = time.time() logger.info('<TIME = %.6f>' % (stop - start)) for x in stdout_lines: @@ -546,7 +567,8 @@ def run_command_stdin(command, error_expected, bufsize=-1, binary_mode=False, for x in stderr_lines: logger.info(x.rstrip()) - if (not error_expected) and ((stderr_lines) or (exit_code != 0)): + if (((not error_expected) and ((stderr_lines) or (exit_code != 0))) + or valgrind_diagnostic): for x in stderr_lines: logger.warning(x.rstrip()) if len(varargs) <= 5: @@ -561,7 +583,8 @@ def run_command_stdin(command, error_expected, bufsize=-1, binary_mode=False, stderr_lines def create_config_dir(cfgdir, config_contents=None, server_contents=None, - ssl_cert=None, ssl_url=None, http_proxy=None): + ssl_cert=None, ssl_url=None, http_proxy=None, + exclusive_wc_locks=None): "Create config directories and files" # config file names @@ -582,25 +605,41 @@ password-stores = [miscellany] interactive-conflicts = false """ - + if exclusive_wc_locks: + config_contents += """ +[working-copy] +exclusive-locking = true +""" # define default server file contents if none provided if server_contents is None: http_library_str = "" if options.http_library: http_library_str = "http-library=%s" % (options.http_library) http_proxy_str = "" + http_proxy_username_str = "" + http_proxy_password_str = "" if options.http_proxy: http_proxy_parsed = urlparse("//" + options.http_proxy) http_proxy_str = "http-proxy-host=%s\n" % (http_proxy_parsed.hostname) + \ "http-proxy-port=%d" % (http_proxy_parsed.port or 80) + if options.http_proxy_username: + http_proxy_username_str = "http-proxy-username=%s" % \ + (options.http_proxy_username) + if options.http_proxy_password: + http_proxy_password_str = "http-proxy-password=%s" % \ + (options.http_proxy_password) + server_contents = """ # [global] %s %s +%s +%s store-plaintext-passwords=yes store-passwords=yes -""" % (http_library_str, http_proxy_str) +""" % (http_library_str, http_proxy_str, http_proxy_username_str, + http_proxy_password_str) file_write(cfgfile_cfg, config_contents) file_write(cfgfile_srv, server_contents) @@ -661,14 +700,25 @@ def _with_config_dir(args): else: return args + ('--config-dir', default_config_dir) +class svnrdump_crosscheck_authentication: + pass + def _with_auth(args): assert '--password' not in args - args = args + ('--password', wc_passwd, + if svnrdump_crosscheck_authentication in args: + args = filter(lambda x: x is not svnrdump_crosscheck_authentication, args) + auth_username = crosscheck_username + auth_password = crosscheck_password + else: + auth_username = wc_author + auth_password = wc_passwd + + args = args + ('--password', auth_password, '--no-auth-cache' ) if '--username' in args: return args else: - return args + ('--username', wc_author ) + return args + ('--username', auth_username ) # For running subversion and returning the output def run_svn(error_expected, *varargs): @@ -716,7 +766,8 @@ def run_svnrdump(stdin_input, *varargs): def run_svnsync(*varargs): """Run svnsync with VARARGS, returns exit code as int; stdout, stderr as list of lines (including line terminators).""" - return run_command(svnsync_binary, 1, False, *(_with_config_dir(varargs))) + return run_command(svnsync_binary, 1, False, + *(_with_auth(_with_config_dir(varargs)))) def run_svnversion(*varargs): """Run svnversion with VARARGS, returns exit code as int; stdout, stderr @@ -739,6 +790,16 @@ def run_svnauthz_validate(*varargs): stderr as list of lines (including line terminators).""" return run_command(svnauthz_validate_binary, 1, False, *varargs) +def run_svnfsfs(*varargs): + """Run svnfsfs with VARARGS, returns exit code as int; stdout, stderr + as list of lines (including line terminators).""" + return run_command(svnfsfs_binary, 1, False, *varargs) + +def run_lock_helper(repo, path, user, seconds): + """Run lock-helper to lock path in repo by username for seconds""" + + return run_command(lock_helper_binary, 1, False, repo, path, user, seconds) + def run_entriesdump(path): """Run the entries-dump helper, returning a dict of Entry objects.""" # use spawn_process rather than run_command to avoid copying all the data @@ -793,9 +854,11 @@ def run_atomic_ra_revprop_change(url, revision, propname, skel, want_error): url, revision, propname, skel, want_error and 1 or 0, default_config_dir) -def run_wc_lock_tester(recursive, path): +def run_wc_lock_tester(recursive, path, work_queue=False): "Run the wc-lock obtainer tool, returning its exit code, stdout and stderr" - if recursive: + if work_queue: + option = "-w" + elif recursive: option = "-r" else: option = "-1" @@ -874,47 +937,10 @@ def file_substitute(path, contents, new_contents): fcontent = open(path, 'r').read().replace(contents, new_contents) open(path, 'w').write(fcontent) -# For creating blank new repositories -def create_repos(path, minor_version = None): - """Create a brand-new SVN repository at PATH. If PATH does not yet - exist, create it.""" - - if not os.path.exists(path): - os.makedirs(path) # this creates all the intermediate dirs, if neccessary - - opts = ("--bdb-txn-nosync",) - if not minor_version or minor_version > options.server_minor_version: - minor_version = options.server_minor_version - opts += ("--compatible-version=1.%d" % (minor_version),) - if options.fs_type is not None: - opts += ("--fs-type=" + options.fs_type,) - exit_code, stdout, stderr = run_command(svnadmin_binary, 1, False, "create", - path, *opts) - - # Skip tests if we can't create the repository. - if stderr: - stderr_lines = 0 - not_using_fsfs_backend = (options.fs_type != "fsfs") - backend_deprecation_warning = False - for line in stderr: - stderr_lines += 1 - if line.find('Unknown FS type') != -1: - raise Skip - if not_using_fsfs_backend: - if 0 < line.find('repository back-end is deprecated, consider using'): - backend_deprecation_warning = True - - # Creating BDB repositories will cause svnadmin to print a warning - # which should be ignored. - if (stderr_lines == 1 - and not_using_fsfs_backend - and backend_deprecation_warning): - pass - else: - # If the FS type is known and we noticed more than just the - # BDB-specific warning, assume the repos couldn't be created - # (e.g. due to a missing 'svnadmin' binary). - raise SVNRepositoryCreateFailure("".join(stderr).rstrip()) +# For setting up authz and hooks in existing repos +def _post_create_repos(path, minor_version = None): + """Set default access right configurations for svnserve and mod_dav + as well as hooks etc. in the SVN repository at PATH.""" # Require authentication to write to the repos, for ra_svn testing. file_write(get_svnserve_conf_file_path(path), @@ -927,14 +953,26 @@ def create_repos(path, minor_version = None): # This actually creates TWO [users] sections in the file (one of them is # uncommented in `svnadmin create`'s template), so we exercise the .ini # files reading code's handling of duplicates, too. :-) - file_append(os.path.join(path, "conf", "passwd"), - "[users]\njrandom = rayjandom\njconstant = rayjandom\n"); + users = ("[users]\n" + "jrandom = rayjandom\n" + "jconstant = rayjandom\n") + if tests_verify_dump_load_cross_check(): + # Insert a user for the dump/load cross-check. + users += (crosscheck_username + " = " + crosscheck_password + "\n") + file_append(os.path.join(path, "conf", "passwd"), users) if options.fs_type is None or options.fs_type == 'fsfs': # fsfs.conf file if options.config_file is not None and \ (not minor_version or minor_version >= 6): - shutil.copy(options.config_file, get_fsfs_conf_file_path(path)) + config_file = open(options.config_file, 'r') + fsfsconf = open(get_fsfs_conf_file_path(path), 'w') + for line in config_file.readlines(): + fsfsconf.write(line) + if options.memcached_server and line == '[memcached-servers]\n': + fsfsconf.write('key = %s\n' % options.memcached_server) + config_file.close() + fsfsconf.close() # format file if options.fsfs_sharding is not None: @@ -964,7 +1002,7 @@ def create_repos(path, minor_version = None): # post-commit # Note that some tests (currently only commit_tests) create their own # post-commit hooks, which would override this one. :-( - if options.fsfs_packing: + if options.fsfs_packing and minor_version >=6: # some tests chdir. abs_path = os.path.abspath(path) create_python_hook_script(get_post_commit_hook_path(abs_path), @@ -977,6 +1015,73 @@ def create_repos(path, minor_version = None): # make the repos world-writeable, for mod_dav_svn's sake. chmod_tree(path, 0666, 0666) +def _unpack_precooked_repos(path, template): + testdir = os.path.dirname(os.path.abspath(os.path.dirname(__file__))) + repozip = os.path.join(os.path.dirname(testdir), "templates", template) + zipfile.ZipFile(repozip, 'r').extractall(path) + +# For creating new, pre-cooked greek repositories +def unpack_greek_repos(path): + template = "greek-fsfs-v%d.zip" % options.fsfs_version + _unpack_precooked_repos(path, template) + _post_create_repos(path, options.server_minor_version) + +# For creating blank new repositories +def create_repos(path, minor_version = None): + """Create a brand-new SVN repository at PATH. If PATH does not yet + exist, create it.""" + + if not os.path.exists(path): + os.makedirs(path) # this creates all the intermediate dirs, if necessary + + if options.fsfs_version is None: + if options.fs_type == "bdb": + opts = ("--bdb-txn-nosync",) + else: + opts = () + if minor_version is None or minor_version > options.server_minor_version: + minor_version = options.server_minor_version + opts += ("--compatible-version=1.%d" % (minor_version),) + if options.fs_type is not None: + opts += ("--fs-type=" + options.fs_type,) + exit_code, stdout, stderr = run_command(svnadmin_binary, 1, False, + "create", path, *opts) + else: + # Copy a pre-cooked FSFS repository + assert options.fs_type == "fsfs" + template = "empty-fsfs-v%d.zip" % options.fsfs_version + _unpack_precooked_repos(path, template) + exit_code, stdout, stderr = run_command(svnadmin_binary, 1, False, + "setuuid", path) + + # Skip tests if we can't create the repository. + if stderr: + stderr_lines = 0 + not_using_fsfs_backend = (options.fs_type != "fsfs") + backend_deprecation_warning = False + for line in stderr: + stderr_lines += 1 + if line.find('Unknown FS type') != -1: + raise Skip + if not_using_fsfs_backend: + if 0 < line.find('repository back-end is deprecated, consider using'): + backend_deprecation_warning = True + + # Creating BDB repositories will cause svnadmin to print a warning + # which should be ignored. + if (stderr_lines == 1 + and not_using_fsfs_backend + and backend_deprecation_warning): + pass + else: + # If the FS type is known and we noticed more than just the + # BDB-specific warning, assume the repos couldn't be created + # (e.g. due to a missing 'svnadmin' binary). + raise SVNRepositoryCreateFailure("".join(stderr).rstrip()) + + # Configure the new repository. + _post_create_repos(path, minor_version) + # For copying a repository def copy_repos(src_path, dst_path, head_revision, ignore_uuid = 1, minor_version = None): @@ -1120,7 +1225,7 @@ def write_restrictive_svnserve_conf_with_groups(repo_dir, # parallel execution at the bottom like so # if __name__ == '__main__': # svntest.main.run_tests(test_list, serial_only = True) -def write_authz_file(sbox, rules, sections=None): +def write_authz_file(sbox, rules, sections=None, prefixed_rules=None): """Write an authz file to SBOX, appropriate for the RA method used, with authorizations rules RULES mapping paths to strings containing the rules. You can add sections SECTIONS (ex. groups, aliases...) with @@ -1128,23 +1233,37 @@ an appropriate list of mappings. """ fp = open(sbox.authz_file, 'w') - # When the sandbox repository is read only it's name will be different from + # When the sandbox repository is read only its name will be different from # the repository name. - repo_name = sbox.repo_dir - while repo_name[-1] == '/': - repo_name = repo_name[:-1] - repo_name = os.path.basename(repo_name) + repo_name = os.path.basename(sbox.repo_dir.rstrip('/')) if sbox.repo_url.startswith("http"): - prefix = repo_name + ":" + default_prefix = repo_name + ":" else: - prefix = "" + default_prefix = "" + if sections: for p, r in sections.items(): fp.write("[%s]\n%s\n" % (p, r)) - for p, r in rules.items(): - fp.write("[%s%s]\n%s\n" % (prefix, p, r)) + if not prefixed_rules: + prefixed_rules = dict() + + if rules: + for p, r in rules.items(): + prefixed_rules[default_prefix + p] = r + + for p, r in prefixed_rules.items(): + fp.write("[%s]\n%s\n" % (p, r)) + if tests_verify_dump_load_cross_check(): + # Insert an ACE that lets the dump/load cross-check bypass + # authz restrictions. + fp.write(crosscheck_username + " = rw\n") + + if tests_verify_dump_load_cross_check() and '/' not in prefixed_rules: + # We need a repository-root ACE for the dump/load cross-check + fp.write("[/]\n" + crosscheck_username + " = rw\n") + fp.close() # See the warning about parallel test execution in write_authz_file @@ -1295,6 +1414,12 @@ def make_log_msg(): # Functions which check the test configuration # (useful for conditional XFails) +def tests_use_prepacakaged_repository(): + return options.fsfs_version is not None + +def tests_verify_dump_load_cross_check(): + return options.dump_load_cross_check + def is_ra_type_dav(): return options.test_area_url.startswith('http') @@ -1322,9 +1447,28 @@ def is_fs_type_fsfs(): # This assumes that fsfs is the default fs implementation. return options.fs_type == 'fsfs' or options.fs_type is None +def is_fs_type_fsx(): + return options.fs_type == 'fsx' + def is_fs_type_bdb(): return options.fs_type == 'bdb' +def is_fs_log_addressing(): + return is_fs_type_fsx() or \ + (is_fs_type_fsfs() and options.server_minor_version >= 9) + +def fs_has_rep_sharing(): + return is_fs_type_fsx() or \ + (is_fs_type_fsfs() and options.server_minor_version >= 6) + +def fs_has_pack(): + return is_fs_type_fsx() or \ + (is_fs_type_fsfs() and options.server_minor_version >= 6) + +def fs_has_unique_freeze(): + return (is_fs_type_fsfs() and options.server_minor_version >= 9 + or is_fs_type_bdb()) + def is_os_windows(): return os.name == 'nt' @@ -1367,6 +1511,9 @@ def server_enforces_date_syntax(): def server_has_atomic_revprop(): return options.server_minor_version >= 7 +def server_has_reverse_get_file_revs(): + return options.server_minor_version >= 8 + def is_plaintext_password_storage_disabled(): try: predicate = re.compile("^WARNING: Plaintext password storage is enabled!") @@ -1378,6 +1525,30 @@ def is_plaintext_password_storage_disabled(): return False return True + +# https://issues.apache.org/bugzilla/show_bug.cgi?id=56480 +# https://issues.apache.org/bugzilla/show_bug.cgi?id=55397 +__mod_dav_url_quoting_broken_versions = frozenset([ + '2.2.27', + '2.2.26', + '2.2.25', + '2.4.9', + '2.4.8', + '2.4.7', + '2.4.6', + '2.4.5', +]) +def is_mod_dav_url_quoting_broken(): + if is_ra_type_dav(): + return (options.httpd_version in __mod_dav_url_quoting_broken_versions) + return None + +def is_httpd_authz_provider_enabled(): + if is_ra_type_dav(): + v = options.httpd_version.split('.') + return (v[0] == '2' and int(v[1]) >= 3) or int(v[0]) > 2 + return None + ###################################################################### @@ -1412,13 +1583,12 @@ class TestSpawningThread(threading.Thread): args = [] args.append(str(index)) args.append('-c') + args.append('--set-log-level=%s' % logger.getEffectiveLevel()) # add some startup arguments from this process if options.fs_type: args.append('--fs-type=' + options.fs_type) if options.test_area_url: args.append('--url=' + options.test_area_url) - if logger.getEffectiveLevel() <= logging.DEBUG: - args.append('-v') if options.cleanup: args.append('--cleanup') if options.enable_sasl: @@ -1435,6 +1605,24 @@ class TestSpawningThread(threading.Thread): args.append('--ssl-cert=' + options.ssl_cert) if options.http_proxy: args.append('--http-proxy=' + options.http_proxy) + if options.http_proxy_username: + args.append('--http-proxy-username=' + options.http_proxy_username) + if options.http_proxy_password: + args.append('--http-proxy-password=' + options.http_proxy_password) + if options.httpd_version: + args.append('--httpd-version=' + options.httpd_version) + if options.exclusive_wc_locks: + args.append('--exclusive-wc-locks') + if options.memcached_server: + args.append('--memcached-server=' + options.memcached_server) + if options.fsfs_sharding: + args.append('--fsfs-sharding=' + str(options.fsfs_sharding)) + if options.fsfs_packing: + args.append('--fsfs-packing') + if options.fsfs_version: + args.append('--fsfs-version=' + str(options.fsfs_version)) + if options.dump_load_cross_check: + args.append('--dump-load-cross-check') result, stdout_lines, stderr_lines = spawn_process(command, 0, False, None, *args) @@ -1694,13 +1882,7 @@ def _internal_run_tests(test_list, testnums, parallel, srcdir, progress_func): return exit_code -def create_default_options(): - """Set the global options to the defaults, as provided by the argument - parser.""" - _parse_options([]) - - -def _create_parser(): +def _create_parser(usage=None): """Return a parser for our test suite.""" def set_log_level(option, opt, value, parser, level=None): if level: @@ -1710,9 +1892,18 @@ def _create_parser(): # called from --set-log-level logger.setLevel(getattr(logging, value, None) or int(value)) - # set up the parser + # Set up the parser. + # If you add new options, consider adding them in + # + # .../build/run_tests.py:main() + # + # and handling them in + # + # .../build/run_tests.py:TestHarness._init_py_tests() + # _default_http_library = 'serf' - usage = 'usage: %prog [options] [<test> ...]' + if usage is None: + usage = 'usage: %prog [options] [<test> ...]' parser = optparse.OptionParser(usage=usage) parser.add_option('-l', '--list', action='store_true', dest='list_tests', help='Print test doc strings instead of running them') @@ -1727,6 +1918,9 @@ def _create_parser(): parser.add_option('-p', '--parallel', action='store_const', const=default_num_threads, dest='parallel', help='Run the tests in parallel') + parser.add_option('--parallel-instances', action='store', + type='int', dest='parallel', + help='Run the given number of tests in parallel') parser.add_option('-c', action='store_true', dest='is_child_process', help='Flag if we are running this python test as a ' + 'child process') @@ -1736,7 +1930,7 @@ def _create_parser(): parser.add_option('--url', action='store', help='Base url to the repos (e.g. svn://localhost)') parser.add_option('--fs-type', action='store', - help='Subversion file system type (fsfs or bdb)') + help='Subversion file system type (fsfs, bdb or fsx)') parser.add_option('--cleanup', action='store_true', help='Whether to clean up') parser.add_option('--enable-sasl', action='store_true', @@ -1757,6 +1951,13 @@ def _create_parser(): help="Run 'svnadmin pack' automatically") parser.add_option('--fsfs-sharding', action='store', type='int', help='Default shard size (for fsfs)') + parser.add_option('--fsfs-version', type='int', action='store', + help='FSFS version (fsfs)') + parser.add_option('--dump-load-cross-check', action='store_true', + help="After every test, run a series of dump and load " + + "tests with svnadmin, svnrdump and svndumpfilter " + + " on the testcase repositories to cross-check " + + " dump file compatibility.") parser.add_option('--config-file', action='store', help="Configuration file for tests.") parser.add_option('--set-log-level', action='callback', type='str', @@ -1780,43 +1981,73 @@ def _create_parser(): help='Path to SSL server certificate.') parser.add_option('--http-proxy', action='store', help='Use the HTTP Proxy at hostname:port.') + parser.add_option('--http-proxy-username', action='store', + help='Username for the HTTP Proxy.') + parser.add_option('--http-proxy-password', action='store', + help='Password for the HTTP Proxy.') + parser.add_option('--httpd-version', action='store', + help='Assume HTTPD is this version.') parser.add_option('--tools-bin', action='store', dest='tools_bin', help='Use the svn tools installed in this path') + parser.add_option('--exclusive-wc-locks', action='store_true', + help='Use sqlite exclusive locking for working copies') + parser.add_option('--memcached-server', action='store', + help='Use memcached server at specified URL (FSFS only)') # most of the defaults are None, but some are other values, set them here parser.set_defaults( server_minor_version=SVN_VER_MINOR, url=file_scheme_prefix + \ - urllib.pathname2url(os.path.abspath(os.getcwd())), + svntest.wc.svn_uri_quote( + os.path.abspath( + os.getcwd()).replace(os.path.sep, '/')), http_library=_default_http_library) return parser -def _parse_options(arglist=sys.argv[1:]): +def parse_options(arglist=sys.argv[1:], usage=None): """Parse the arguments in arg_list, and set the global options object with the results""" global options - parser = _create_parser() + parser = _create_parser(usage) (options, args) = parser.parse_args(arglist) - # some sanity checking + # Normalize url to have no trailing slash + if options.url: + if options.url[-1:] == '/': + options.test_area_url = options.url[:-1] + else: + options.test_area_url = options.url + + # Some sanity checking if options.fsfs_packing and not options.fsfs_sharding: parser.error("--fsfs-packing requires --fsfs-sharding") - # If you change the below condition then change - # ../../../../build/run_tests.py too. if options.server_minor_version not in range(3, SVN_VER_MINOR+1): parser.error("test harness only supports server minor versions 3-%d" % SVN_VER_MINOR) - if options.url: - if options.url[-1:] == '/': # Normalize url to have no trailing slash - options.test_area_url = options.url[:-1] - else: - options.test_area_url = options.url + # Make sure the server-minor-version matches the fsfs-version parameter. + if options.fsfs_version: + if options.fsfs_version == 6: + if options.server_minor_version \ + and options.server_minor_version != 8 \ + and options.server_minor_version != SVN_VER_MINOR: + parser.error("--fsfs-version=6 requires --server-minor-version=8") + options.server_minor_version = 8 + if options.fsfs_version == 4: + if options.server_minor_version \ + and options.server_minor_version != 7 \ + and options.server_minor_version != SVN_VER_MINOR: + parser.error("--fsfs-version=4 requires --server-minor-version=7") + options.server_minor_version = 7 + pass + # ### Add more tweaks here if and when we support pre-cooked versions + # ### of FSFS repositories. + pass return (parser, args) @@ -1918,6 +2149,7 @@ def execute_tests(test_list, serial_only = False, test_name = None, global svn_binary global svnadmin_binary global svnlook_binary + global svnrdump_binary global svnsync_binary global svndumpfilter_binary global svnversion_binary @@ -1946,7 +2178,7 @@ def execute_tests(test_list, serial_only = False, test_name = None, if not options: # Override which tests to run from the commandline - (parser, args) = _parse_options() + (parser, args) = parse_options() test_selection = args else: parser = _create_parser() @@ -2003,7 +2235,7 @@ def execute_tests(test_list, serial_only = False, test_name = None, # it to a number if possible for testnum in list(range(1, len(test_list))): test_case = TestRunner(test_list[testnum], testnum) - if test_case.get_function_name() == str(arg): + if test_case.get_function_name() == str(arg).rstrip(','): testnums.append(testnum) appended = True break @@ -2016,7 +2248,9 @@ def execute_tests(test_list, serial_only = False, test_name = None, # Calculate pristine_greek_repos_url from test_area_url. pristine_greek_repos_url = options.test_area_url + '/' + \ - urllib.pathname2url(pristine_greek_repos_dir) + svntest.wc.svn_uri_quote( + pristine_greek_repos_dir.replace( + os.path.sep, '/')) if options.use_jsvn: if options.svn_bin is None: @@ -2035,6 +2269,7 @@ def execute_tests(test_list, serial_only = False, test_name = None, svn_binary = os.path.join(options.svn_bin, 'svn' + _exe) svnadmin_binary = os.path.join(options.svn_bin, 'svnadmin' + _exe) svnlook_binary = os.path.join(options.svn_bin, 'svnlook' + _exe) + svnrdump_binary = os.path.join(options.svn_bin, 'svnrdump' + _exe) svnsync_binary = os.path.join(options.svn_bin, 'svnsync' + _exe) svndumpfilter_binary = os.path.join(options.svn_bin, 'svndumpfilter' + _exe) @@ -2093,29 +2328,38 @@ def execute_tests(test_list, serial_only = False, test_name = None, # We are simply listing the tests so always exit with success. return 0 - # don't run tests in parallel when the tests don't support it or there - # are only a few tests to run. + # don't run tests in parallel when the tests don't support it or + # there are only a few tests to run. + options_parallel = options.parallel if serial_only or len(testnums) < 2: options.parallel = 0 - if not options.is_child_process: - # Build out the default configuration directory - create_config_dir(default_config_dir, - ssl_cert=options.ssl_cert, - ssl_url=options.test_area_url, - http_proxy=options.http_proxy) - - # Setup the pristine repository - svntest.actions.setup_pristine_greek_repository() - - # Run the tests. - exit_code = _internal_run_tests(test_list, testnums, options.parallel, - options.srcdir, progress_func) + try: + if not options.is_child_process: + # Build out the default configuration directory + create_config_dir(default_config_dir, + ssl_cert=options.ssl_cert, + ssl_url=options.test_area_url, + http_proxy=options.http_proxy, + exclusive_wc_locks=options.exclusive_wc_locks) + + # Setup the pristine repository + svntest.actions.setup_pristine_greek_repository() + + # Run the tests. + exit_code = _internal_run_tests(test_list, testnums, options.parallel, + options.srcdir, progress_func) + finally: + options.parallel = options_parallel # Remove all scratchwork: the 'pristine' repository, greek tree, etc. # This ensures that an 'import' will happen the next time we run. if not options.is_child_process and not options.keep_local_tmp: - safe_rmtree(temp_dir, 1) + try: + safe_rmtree(temp_dir, 1) + except: + logger.error("ERROR: cleanup of '%s' directory failed." % temp_dir) + exit_code = 1 # Cleanup after ourselves. svntest.sandbox.cleanup_deferred_test_paths() diff --git a/subversion/tests/cmdline/svntest/mergetrees.py b/subversion/tests/cmdline/svntest/mergetrees.py new file mode 100755 index 0000000..e01592a --- /dev/null +++ b/subversion/tests/cmdline/svntest/mergetrees.py @@ -0,0 +1,506 @@ +#!/usr/bin/env python +# +# mergetrees.py: routines that create merge scenarios +# +# Subversion is a tool for revision control. +# See http://subversion.apache.org for more information. +# +# ==================================================================== +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +###################################################################### + +# General modules +import shutil, sys, re, os +import time + +# Our testing module +import main, wc, verify, actions, testcase + +from prop_tests import binary_mime_type_on_text_file_warning + +# (abbreviation) +Item = wc.StateItem +Skip = testcase.Skip_deco +SkipUnless = testcase.SkipUnless_deco +XFail = testcase.XFail_deco +Issues = testcase.Issues_deco +Issue = testcase.Issue_deco +Wimp = testcase.Wimp_deco +exp_noop_up_out = actions.expected_noop_update_output + +from svntest.main import SVN_PROP_MERGEINFO + +def expected_merge_output(rev_ranges, additional_lines=[], foreign=False, + elides=False, two_url=False, target=None, + text_conflicts=0, prop_conflicts=0, tree_conflicts=0, + text_resolved=0, prop_resolved=0, tree_resolved=0, + skipped_paths=0): + """Generate an (inefficient) regex representing the expected merge + output and mergeinfo notifications from REV_RANGES and ADDITIONAL_LINES. + + REV_RANGES is a list of revision ranges for which mergeinfo is being + recorded. Each range is of the form [start, end] (where both START and + END are inclusive, unlike in '-rX:Y') or the form [single_rev] (which is + like '-c SINGLE_REV'). If REV_RANGES is None then only the standard + notification for a 3-way merge is expected. + + ADDITIONAL_LINES is a list of strings to match the other lines of output; + these are basically regular expressions except that backslashes will be + escaped herein. If ADDITIONAL_LINES is a single string, it is interpreted + the same as a list containing that string. + + If ELIDES is true, add to the regex an expression representing elision + notification. If TWO_URL is true, tweak the regex to expect the + appropriate mergeinfo notification for a 3-way merge. + + TARGET is the local path to the target, as it should appear in + notifications; if None, it is not checked. + + TEXT_CONFLICTS, PROP_CONFLICTS, TREE_CONFLICTS and SKIPPED_PATHS specify + the number of each kind of conflict to expect. + """ + + if rev_ranges is None: + lines = [main.merge_notify_line(None, None, False, foreign)] + else: + lines = [] + for rng in rev_ranges: + start_rev = rng[0] + if len(rng) > 1: + end_rev = rng[1] + else: + end_rev = None + lines += [main.merge_notify_line(start_rev, end_rev, + True, foreign, target)] + lines += [main.mergeinfo_notify_line(start_rev, end_rev, target)] + + if (elides): + lines += ["--- Eliding mergeinfo from .*\n"] + + if (two_url): + lines += ["--- Recording mergeinfo for merge between repository URLs .*\n"] + + # Address "The Backslash Plague" + # + # If ADDITIONAL_LINES are present there are possibly paths in it with + # multiple components and on Windows these components are separated with + # '\'. These need to be escaped properly in the regexp for the match to + # work correctly. See http://aspn.activestate.com/ASPN/docs/ActivePython + # /2.2/howto/regex/regex.html#SECTION000420000000000000000. + if isinstance(additional_lines, str): + additional_lines = [additional_lines] + if sys.platform == 'win32': + additional_lines = [line.replace("\\", "\\\\") for line in additional_lines] + lines += additional_lines + + lines += main.summary_of_conflicts( + text_conflicts, prop_conflicts, tree_conflicts, + text_resolved, prop_resolved, tree_resolved, + skipped_paths, + as_regex=True) + + return "|".join(lines) + +def check_mergeinfo_recursively(root_path, subpaths_mergeinfo): + """Check that the mergeinfo properties on and under ROOT_PATH are those in + SUBPATHS_MERGEINFO, a {path: mergeinfo-prop-val} dictionary.""" + expected = verify.UnorderedOutput( + [path + ' - ' + subpaths_mergeinfo[path] + '\n' + for path in subpaths_mergeinfo]) + actions.run_and_verify_svn(expected, [], + 'propget', '-R', SVN_PROP_MERGEINFO, + root_path) + +###################################################################### +#---------------------------------------------------------------------- +def set_up_dir_replace(sbox): + """Set up the working copy for directory replace tests, creating + directory 'A/B/F/foo' with files 'new file' and 'new file2' within + it (r2), and merging 'foo' onto 'C' (r3), then deleting 'A/B/F/foo' + (r4).""" + + sbox.build() + wc_dir = sbox.wc_dir + + C_path = sbox.ospath('A/C') + F_path = sbox.ospath('A/B/F') + F_url = sbox.repo_url + '/A/B/F' + + foo_path = os.path.join(F_path, 'foo') + new_file = os.path.join(foo_path, "new file") + new_file2 = os.path.join(foo_path, "new file 2") + + # Make directory foo in F, and add some files within it. + actions.run_and_verify_svn(None, [], 'mkdir', foo_path) + main.file_append(new_file, "Initial text in new file.\n") + main.file_append(new_file2, "Initial text in new file 2.\n") + main.run_svn(None, "add", new_file) + main.run_svn(None, "add", new_file2) + + # Commit all the new content, creating r2. + expected_output = wc.State(wc_dir, { + 'A/B/F/foo' : Item(verb='Adding'), + 'A/B/F/foo/new file' : Item(verb='Adding'), + 'A/B/F/foo/new file 2' : Item(verb='Adding'), + }) + expected_status = actions.get_virginal_state(wc_dir, 1) + expected_status.add({ + 'A/B/F/foo' : Item(status=' ', wc_rev=2), + 'A/B/F/foo/new file' : Item(status=' ', wc_rev=2), + 'A/B/F/foo/new file 2' : Item(status=' ', wc_rev=2), + }) + actions.run_and_verify_commit(wc_dir, expected_output, expected_status) + + # Merge foo onto C + expected_output = wc.State(C_path, { + 'foo' : Item(status='A '), + 'foo/new file' : Item(status='A '), + 'foo/new file 2' : Item(status='A '), + }) + expected_mergeinfo_output = wc.State(C_path, { + '' : Item(status=' U'), + }) + expected_elision_output = wc.State(C_path, { + }) + expected_disk = wc.State('', { + '' : Item(props={SVN_PROP_MERGEINFO : '/A/B/F:2'}), + 'foo' : Item(), + 'foo/new file' : Item("Initial text in new file.\n"), + 'foo/new file 2' : Item("Initial text in new file 2.\n"), + }) + expected_status = wc.State(C_path, { + '' : Item(status=' M', wc_rev=1), + 'foo' : Item(status='A ', wc_rev='-', copied='+'), + 'foo/new file' : Item(status=' ', wc_rev='-', copied='+'), + 'foo/new file 2' : Item(status=' ', wc_rev='-', copied='+'), + }) + expected_skip = wc.State(C_path, { }) + actions.run_and_verify_merge(C_path, '1', '2', F_url, None, + expected_output, + expected_mergeinfo_output, + expected_elision_output, + expected_disk, + expected_status, + expected_skip, + check_props=True) + # Commit merge of foo onto C, creating r3. + expected_output = wc.State(wc_dir, { + 'A/C' : Item(verb='Sending'), + 'A/C/foo' : Item(verb='Adding'), + }) + expected_status = actions.get_virginal_state(wc_dir, 1) + expected_status.add({ + 'A/B/F/foo' : Item(status=' ', wc_rev=2), + 'A/C' : Item(status=' ', wc_rev=3), + 'A/B/F/foo/new file' : Item(status=' ', wc_rev=2), + 'A/B/F/foo/new file 2' : Item(status=' ', wc_rev=2), + 'A/C/foo' : Item(status=' ', wc_rev=3), + 'A/C/foo/new file' : Item(status=' ', wc_rev=3), + 'A/C/foo/new file 2' : Item(status=' ', wc_rev=3), + + }) + actions.run_and_verify_commit(wc_dir, expected_output, expected_status) + + # Delete foo on F, creating r4. + actions.run_and_verify_svn(None, [], 'rm', foo_path) + expected_output = wc.State(wc_dir, { + 'A/B/F/foo' : Item(verb='Deleting'), + }) + expected_status = actions.get_virginal_state(wc_dir, 1) + expected_status.add({ + 'A/C' : Item(status=' ', wc_rev=3), + 'A/C/foo' : Item(status=' ', wc_rev=3), + 'A/C/foo/new file' : Item(status=' ', wc_rev=3), + 'A/C/foo/new file 2' : Item(status=' ', wc_rev=3), + }) + actions.run_and_verify_commit(wc_dir, expected_output, expected_status) + +#---------------------------------------------------------------------- +def set_up_branch(sbox, branch_only = False, nbr_of_branches = 1): + '''Starting with standard greek tree, copy 'A' NBR_OF_BRANCHES times + to A_COPY, A_COPY_2, A_COPY_3, and so on. Then, unless BRANCH_ONLY is + true, make four modifications (setting file contents to "New content") + under A: + r(2 + NBR_OF_BRANCHES) - A/D/H/psi + r(3 + NBR_OF_BRANCHES) - A/D/G/rho + r(4 + NBR_OF_BRANCHES) - A/B/E/beta + r(5 + NBR_OF_BRANCHES) - A/D/H/omega + Return (expected_disk, expected_status).''' + + # With the default parameters, the branching looks like this: + # + # A -1-----3-4-5-6-- + # \ + # A_COPY 2----------- + + wc_dir = sbox.wc_dir + + expected_status = actions.get_virginal_state(wc_dir, 1) + expected_disk = main.greek_state.copy() + + def copy_A(dest_name, rev): + expected = verify.UnorderedOutput( + ["A " + os.path.join(wc_dir, dest_name, "B") + "\n", + "A " + os.path.join(wc_dir, dest_name, "B", "lambda") + "\n", + "A " + os.path.join(wc_dir, dest_name, "B", "E") + "\n", + "A " + os.path.join(wc_dir, dest_name, "B", "E", "alpha") + "\n", + "A " + os.path.join(wc_dir, dest_name, "B", "E", "beta") + "\n", + "A " + os.path.join(wc_dir, dest_name, "B", "F") + "\n", + "A " + os.path.join(wc_dir, dest_name, "mu") + "\n", + "A " + os.path.join(wc_dir, dest_name, "C") + "\n", + "A " + os.path.join(wc_dir, dest_name, "D") + "\n", + "A " + os.path.join(wc_dir, dest_name, "D", "gamma") + "\n", + "A " + os.path.join(wc_dir, dest_name, "D", "G") + "\n", + "A " + os.path.join(wc_dir, dest_name, "D", "G", "pi") + "\n", + "A " + os.path.join(wc_dir, dest_name, "D", "G", "rho") + "\n", + "A " + os.path.join(wc_dir, dest_name, "D", "G", "tau") + "\n", + "A " + os.path.join(wc_dir, dest_name, "D", "H") + "\n", + "A " + os.path.join(wc_dir, dest_name, "D", "H", "chi") + "\n", + "A " + os.path.join(wc_dir, dest_name, "D", "H", "omega") + "\n", + "A " + os.path.join(wc_dir, dest_name, "D", "H", "psi") + "\n", + "Checked out revision " + str(rev - 1) + ".\n", + "A " + os.path.join(wc_dir, dest_name) + "\n"]) + expected_status.add({ + dest_name + "/B" : Item(status=' ', wc_rev=rev), + dest_name + "/B/lambda" : Item(status=' ', wc_rev=rev), + dest_name + "/B/E" : Item(status=' ', wc_rev=rev), + dest_name + "/B/E/alpha" : Item(status=' ', wc_rev=rev), + dest_name + "/B/E/beta" : Item(status=' ', wc_rev=rev), + dest_name + "/B/F" : Item(status=' ', wc_rev=rev), + dest_name + "/mu" : Item(status=' ', wc_rev=rev), + dest_name + "/C" : Item(status=' ', wc_rev=rev), + dest_name + "/D" : Item(status=' ', wc_rev=rev), + dest_name + "/D/gamma" : Item(status=' ', wc_rev=rev), + dest_name + "/D/G" : Item(status=' ', wc_rev=rev), + dest_name + "/D/G/pi" : Item(status=' ', wc_rev=rev), + dest_name + "/D/G/rho" : Item(status=' ', wc_rev=rev), + dest_name + "/D/G/tau" : Item(status=' ', wc_rev=rev), + dest_name + "/D/H" : Item(status=' ', wc_rev=rev), + dest_name + "/D/H/chi" : Item(status=' ', wc_rev=rev), + dest_name + "/D/H/omega" : Item(status=' ', wc_rev=rev), + dest_name + "/D/H/psi" : Item(status=' ', wc_rev=rev), + dest_name : Item(status=' ', wc_rev=rev)}) + expected_disk.add({ + dest_name : Item(), + dest_name + '/B' : Item(), + dest_name + '/B/lambda' : Item("This is the file 'lambda'.\n"), + dest_name + '/B/E' : Item(), + dest_name + '/B/E/alpha' : Item("This is the file 'alpha'.\n"), + dest_name + '/B/E/beta' : Item("This is the file 'beta'.\n"), + dest_name + '/B/F' : Item(), + dest_name + '/mu' : Item("This is the file 'mu'.\n"), + dest_name + '/C' : Item(), + dest_name + '/D' : Item(), + dest_name + '/D/gamma' : Item("This is the file 'gamma'.\n"), + dest_name + '/D/G' : Item(), + dest_name + '/D/G/pi' : Item("This is the file 'pi'.\n"), + dest_name + '/D/G/rho' : Item("This is the file 'rho'.\n"), + dest_name + '/D/G/tau' : Item("This is the file 'tau'.\n"), + dest_name + '/D/H' : Item(), + dest_name + '/D/H/chi' : Item("This is the file 'chi'.\n"), + dest_name + '/D/H/omega' : Item("This is the file 'omega'.\n"), + dest_name + '/D/H/psi' : Item("This is the file 'psi'.\n"), + }) + + # Make a branch A_COPY to merge into. + actions.run_and_verify_svn(expected, [], 'copy', + sbox.repo_url + "/A", + os.path.join(wc_dir, + dest_name)) + + expected_output = wc.State(wc_dir, {dest_name : Item(verb='Adding')}) + actions.run_and_verify_commit(wc_dir, expected_output, expected_status) + for i in range(nbr_of_branches): + if i == 0: + copy_A('A_COPY', i + 2) + else: + copy_A('A_COPY_' + str(i + 1), i + 2) + + if branch_only: + return expected_disk, expected_status + + # Make some changes under A which we'll later merge under A_COPY: + + # r(nbr_of_branches + 2) - modify and commit A/D/H/psi + main.file_write(sbox.ospath('A/D/H/psi'), + "New content") + expected_output = wc.State(wc_dir, {'A/D/H/psi' : Item(verb='Sending')}) + expected_status.tweak('A/D/H/psi', wc_rev=nbr_of_branches + 2) + actions.run_and_verify_commit(wc_dir, expected_output, expected_status) + expected_disk.tweak('A/D/H/psi', contents="New content") + + # r(nbr_of_branches + 3) - modify and commit A/D/G/rho + main.file_write(sbox.ospath('A/D/G/rho'), + "New content") + expected_output = wc.State(wc_dir, {'A/D/G/rho' : Item(verb='Sending')}) + expected_status.tweak('A/D/G/rho', wc_rev=nbr_of_branches + 3) + actions.run_and_verify_commit(wc_dir, expected_output, expected_status) + expected_disk.tweak('A/D/G/rho', contents="New content") + + # r(nbr_of_branches + 4) - modify and commit A/B/E/beta + main.file_write(sbox.ospath('A/B/E/beta'), + "New content") + expected_output = wc.State(wc_dir, {'A/B/E/beta' : Item(verb='Sending')}) + expected_status.tweak('A/B/E/beta', wc_rev=nbr_of_branches + 4) + actions.run_and_verify_commit(wc_dir, expected_output, expected_status) + expected_disk.tweak('A/B/E/beta', contents="New content") + + # r(nbr_of_branches + 5) - modify and commit A/D/H/omega + main.file_write(sbox.ospath('A/D/H/omega'), + "New content") + expected_output = wc.State(wc_dir, {'A/D/H/omega' : Item(verb='Sending')}) + expected_status.tweak('A/D/H/omega', wc_rev=nbr_of_branches + 5) + actions.run_and_verify_commit(wc_dir, expected_output, expected_status) + expected_disk.tweak('A/D/H/omega', contents="New content") + + return expected_disk, expected_status + +#---------------------------------------------------------------------- +# Helper functions. These take local paths using '/' separators. + +def local_path(path): + "Convert a path from '/' separators to the local style." + return os.sep.join(path.split('/')) + +def svn_mkfile(path): + "Make and add a file with some default content, and keyword expansion." + path = local_path(path) + dirname, filename = os.path.split(path) + main.file_write(path, "This is the file '" + filename + "'.\n" + + "Last changed in '$Revision$'.\n") + actions.run_and_verify_svn(None, [], 'add', path) + actions.run_and_verify_svn(None, [], 'propset', + 'svn:keywords', 'Revision', path) + +def svn_modfile(path): + "Make text and property mods to a WC file." + path = local_path(path) + main.file_append(path, "An extra line.\n") + actions.run_and_verify_svn(None, [], 'propset', + 'newprop', 'v', path) + +def svn_copy(s_rev, path1, path2): + "Copy a WC path locally." + path1 = local_path(path1) + path2 = local_path(path2) + actions.run_and_verify_svn(None, [], 'copy', '--parents', + '-r', s_rev, path1, path2) + +def svn_merge(rev_range, source, target, lines=None, elides=[], + text_conflicts=0, prop_conflicts=0, tree_conflicts=0, + text_resolved=0, prop_resolved=0, tree_resolved=0, + args=[]): + """Merge a single change from path SOURCE to path TARGET and verify the + output and that there is no error. (The changes made are not verified.) + + REV_RANGE is either a number (to cherry-pick that specific change) or a + two-element list [X,Y] to pick the revision range '-r(X-1):Y'. + + LINES is a list of regular expressions to match other lines of output; if + LINES is 'None' then match all normal (non-conflicting) merges. + + ELIDES is a list of paths on which mergeinfo elision should be reported. + + TEXT_CONFLICTS, PROP_CONFLICTS and TREE_CONFLICTS specify the number of + each kind of conflict to expect. + + ARGS are additional arguments passed to svn merge. + """ + + source = local_path(source) + target = local_path(target) + elides = [local_path(p) for p in elides] + if isinstance(rev_range, int): + mi_rev_range = [rev_range] + rev_arg = '-c' + str(rev_range) + else: + mi_rev_range = rev_range + rev_arg = '-r' + str(rev_range[0] - 1) + ':' + str(rev_range[1]) + if lines is None: + lines = ["(A |D |[UG] | [UG]|[UG][UG]) " + target + ".*\n"] + else: + # Expect mergeinfo on the target; caller must supply matches for any + # subtree mergeinfo paths. + lines.append(" [UG] " + target + "\n") + exp_out = expected_merge_output([mi_rev_range], lines, target=target, + elides=elides, + text_conflicts=text_conflicts, + prop_conflicts=prop_conflicts, + tree_conflicts=tree_conflicts, + text_resolved=text_resolved, + prop_resolved=prop_resolved, + tree_resolved=tree_resolved) + actions.run_and_verify_svn(exp_out, [], + 'merge', rev_arg, source, target, *args) + +#---------------------------------------------------------------------- +# Setup helper for issue #4056 and issue #4057 tests. +def noninheritable_mergeinfo_test_set_up(sbox): + '''Starting with standard greek tree, copy 'A' to 'branch' in r2 and + then made a file edit to A/B/lambda in r3. + Return (expected_output, expected_mergeinfo_output, expected_elision_output, + expected_status, expected_disk, expected_skip) for a merge of + r3 from ^/A/B to branch/B.''' + + sbox.build() + wc_dir = sbox.wc_dir + + lambda_path = sbox.ospath('A/B/lambda') + B_branch_path = sbox.ospath('branch/B') + + # r2 - Branch ^/A to ^/branch. + main.run_svn(None, 'copy', sbox.repo_url + '/A', + sbox.repo_url + '/branch', '-m', 'make a branch') + + # r3 - Make an edit to A/B/lambda. + main.file_write(lambda_path, "trunk edit.\n") + main.run_svn(None, 'commit', '-m', 'file edit', wc_dir) + main.run_svn(None, 'up', wc_dir) + + expected_output = wc.State(B_branch_path, { + 'lambda' : Item(status='U '), + }) + expected_mergeinfo_output = wc.State(B_branch_path, { + '' : Item(status=' U'), + 'lambda' : Item(status=' U'), + }) + expected_elision_output = wc.State(B_branch_path, { + 'lambda' : Item(status=' U'), + }) + expected_status = wc.State(B_branch_path, { + '' : Item(status=' M'), + 'lambda' : Item(status='M '), + 'E' : Item(status=' '), + 'E/alpha' : Item(status=' '), + 'E/beta' : Item(status=' '), + 'F' : Item(status=' '), + }) + expected_status.tweak(wc_rev='3') + expected_disk = wc.State('', { + '' : Item(props={SVN_PROP_MERGEINFO : '/A/B:3'}), + 'lambda' : Item("trunk edit.\n"), + 'E' : Item(), + 'E/alpha' : Item("This is the file 'alpha'.\n"), + 'E/beta' : Item("This is the file 'beta'.\n"), + 'F' : Item(), + }) + expected_skip = wc.State(B_branch_path, {}) + + return expected_output, expected_mergeinfo_output, expected_elision_output, \ + expected_status, expected_disk, expected_skip + diff --git a/subversion/tests/cmdline/svntest/objects.py b/subversion/tests/cmdline/svntest/objects.py index 8c8cdd5..169e5e6 100644 --- a/subversion/tests/cmdline/svntest/objects.py +++ b/subversion/tests/cmdline/svntest/objects.py @@ -156,7 +156,7 @@ class SvnRepository: """Run 'svnadmin dump' on the repository.""" exit_code, stdout, stderr = \ - actions.run_and_verify_svnadmin(None, None, None, + actions.run_and_verify_svnadmin(None, None, 'dump', self.repo_absdir) ldumpfile = local_path(output_dir + "/svnadmin.dump") main.file_write(ldumpfile, ''.join(stderr)) @@ -167,7 +167,7 @@ class SvnRepository: make, and each directory is a path relative to the repository root, neither starting nor ending with a slash.""" urls = [self.repo_url + '/' + dir for dir in dirs] - actions.run_and_verify_svn(None, None, [], + actions.run_and_verify_svn(None, [], 'mkdir', '-m', 'svn_mkdirs()', '--parents', *urls) self.head_rev += 1 @@ -207,7 +207,7 @@ class SvnWC: def svn_mkdir(self, rpath): lpath = local_path(rpath) - actions.run_and_verify_svn(None, None, [], 'mkdir', lpath) + actions.run_and_verify_svn(None, [], 'mkdir', lpath) self.state.add({ rpath : wc.StateItem(status='A ') @@ -216,7 +216,7 @@ class SvnWC: # def propset(self, pname, pvalue, *rpaths): # "Set property 'pname' to value 'pvalue' on each path in 'rpaths'" # local_paths = tuple([local_path(rpath) for rpath in rpaths]) -# actions.run_and_verify_svn(None, None, [], 'propset', pname, pvalue, +# actions.run_and_verify_svn(None, [], 'propset', pname, pvalue, # *local_paths) def svn_set_props(self, rpath, props): @@ -224,10 +224,10 @@ class SvnWC: """ lpath = local_path(rpath) #for prop in path's existing props: - # actions.run_and_verify_svn(None, None, [], 'propdel', + # actions.run_and_verify_svn(None, [], 'propdel', # prop, lpath) for prop in props: - actions.run_and_verify_svn(None, None, [], 'propset', + actions.run_and_verify_svn(None, [], 'propset', prop, props[prop], lpath) self.state.tweak(rpath, props=props) @@ -240,7 +240,7 @@ class SvnWC: content = "This is the file '" + filename + "'.\n" + \ "Last changed in '$Revision$'.\n" main.file_write(lpath, content) - actions.run_and_verify_svn(None, None, [], 'add', lpath) + actions.run_and_verify_svn(None, [], 'add', lpath) self.state.add({ rpath : wc.StateItem(status='A ') @@ -257,7 +257,7 @@ class SvnWC: lpath = local_path(rpath) if content is not None: #main.file_append(lpath, "An extra line.\n") - #actions.run_and_verify_svn(None, None, [], 'propset', + #actions.run_and_verify_svn(None, [], 'propset', # 'newprop', 'v', lpath) main.file_write(lpath, content) self.state.tweak(rpath, content=content) @@ -274,7 +274,7 @@ class SvnWC: args = [lpath1, lpath2] if parents: args += ['--parents'] - actions.run_and_verify_svn(None, None, [], 'copy', *args) + actions.run_and_verify_svn(None, [], 'copy', *args) self.state.add({ rpath2: self.state.desc[rpath1] }) @@ -292,7 +292,7 @@ class SvnWC: args += ['-r', rev] if parents: args += ['--parents'] - actions.run_and_verify_svn(None, None, [], 'copy', *args) + actions.run_and_verify_svn(None, [], 'copy', *args) self.state.add({ rpath2: self.state.desc[rpath1] }) @@ -303,12 +303,12 @@ class SvnWC: args = [] if even_if_modified: args += ['--force'] - actions.run_and_verify_svn(None, None, [], 'delete', lpath, *args) + actions.run_and_verify_svn(None, [], 'delete', lpath, *args) def svn_commit(self, rpath='', log=''): "Commit a WC path (recursively). Return the new revision number." lpath = local_path(rpath) - actions.run_and_verify_svn(None, verify.AnyOutput, [], + actions.run_and_verify_svn(verify.AnyOutput, [], 'commit', '-m', log, lpath) actions.run_and_verify_update(lpath, None, None, None) self.repo.head_rev += 1 @@ -332,6 +332,6 @@ class SvnWC: # exp_1 = "--- Merging r.* into '" + target_re + ".*':" # exp_2 = "(A |D |[UG] | [UG]|[UG][UG]) " + target_re + ".*" # exp_out = verify.RegexOutput(exp_1 + "|" + exp_2) -# actions.run_and_verify_svn(None, exp_out, [], +# actions.run_and_verify_svn(exp_out, [], # 'merge', rev_spec, lsource, ltarget) diff --git a/subversion/tests/cmdline/svntest/sandbox.py b/subversion/tests/cmdline/svntest/sandbox.py index ad67ac0..8451bb1 100644 --- a/subversion/tests/cmdline/svntest/sandbox.py +++ b/subversion/tests/cmdline/svntest/sandbox.py @@ -24,14 +24,68 @@ import os import shutil import copy -import urllib import logging +import re import svntest logger = logging.getLogger() +def make_mirror(sbox, source_prop_encoding=None): + """Make a mirror of the repository in SBOX. + """ + # Set up the mirror repository. + dest_sbox = sbox.clone_dependent() + dest_sbox.build(create_wc=False, empty=True) + exit_code, output, errput = svntest.main.run_svnlook("uuid", sbox.repo_dir) + svntest.actions.run_and_verify_svnadmin2(None, None, 0, + 'setuuid', dest_sbox.repo_dir, + output[0][:-1]) + svntest.actions.enable_revprop_changes(dest_sbox.repo_dir) + + repo_url = sbox.repo_url + dest_repo_url = dest_sbox.repo_url + + # Synchronize it. + args = (svntest.main.svnrdump_crosscheck_authentication,) + if source_prop_encoding: + args = args + ("--source-prop-encoding=" + source_prop_encoding,) + svntest.actions.run_and_verify_svnsync(svntest.verify.AnyOutput, [], + "initialize", + dest_repo_url, repo_url, *args) + svntest.actions.run_and_verify_svnsync(None, [], + "synchronize", + dest_repo_url, repo_url, *args) + + return dest_sbox + +def verify_mirror(repo_url, repo_dir, expected_dumpfile): + """Compare the repository content at REPO_URL/REPO_DIR with that in + EXPECTED_DUMPFILE (which is a non-delta dump). + """ + # Remove some SVNSync-specific housekeeping properties from the + # mirror repository in preparation for the comparison dump. + for prop_name in ("svn:sync-from-url", "svn:sync-from-uuid", + "svn:sync-last-merged-rev"): + svntest.actions.run_and_verify_svn( + None, [], "propdel", "--revprop", "-r", "0", + prop_name, repo_url) + # Create a dump file from the mirror repository. + dumpfile_s_n = svntest.actions.run_and_verify_dump(repo_dir) + # Compare the mirror's dumpfile, ignoring any expected differences: + # The original dumpfile in some cases lacks 'Text-content-sha1' headers; + # the mirror dump always has them -- ### Why? + svnsync_headers_always = re.compile("Text-content-sha1: ") + dumpfile_a_n_cmp = [l for l in expected_dumpfile + if not svnsync_headers_always.match(l)] + dumpfile_s_n_cmp = [l for l in dumpfile_s_n + if not svnsync_headers_always.match(l)] + svntest.verify.compare_dump_files(None, None, + dumpfile_a_n_cmp, + dumpfile_s_n_cmp) + + class Sandbox: """Manages a sandbox (one or more repository/working copy pairs) for a test to operate within.""" @@ -46,7 +100,9 @@ class Sandbox: # This flag is set to True by build() and returned by is_built() self._is_built = False - def _set_name(self, name, read_only=False): + self.was_cwd = os.getcwd() + + def _set_name(self, name, read_only=False, empty=False): """A convenience method for renaming a sandbox, useful when working with multiple repositories in the same unit test.""" if not name is None: @@ -54,32 +110,19 @@ class Sandbox: self.read_only = read_only self.wc_dir = os.path.join(svntest.main.general_wc_dir, self.name) self.add_test_path(self.wc_dir) - if not read_only: + if empty or not read_only: # use a local repo self.repo_dir = os.path.join(svntest.main.general_repo_dir, self.name) self.repo_url = (svntest.main.options.test_area_url + '/' - + urllib.pathname2url(self.repo_dir)) + + svntest.wc.svn_uri_quote( + self.repo_dir.replace(os.path.sep, '/'))) self.add_test_path(self.repo_dir) else: self.repo_dir = svntest.main.pristine_greek_repos_dir self.repo_url = svntest.main.pristine_greek_repos_url - ### TODO: Move this into to the build() method - # For dav tests we need a single authz file which must be present, - # so we recreate it each time a sandbox is created with some default - # contents, making sure that an empty file is never present if self.repo_url.startswith("http"): - # this dir doesn't exist out of the box, so we may have to make it - if not os.path.exists(svntest.main.work_dir): - os.makedirs(svntest.main.work_dir) self.authz_file = os.path.join(svntest.main.work_dir, "authz") - tmp_authz_file = os.path.join(svntest.main.work_dir, "authz-" + self.name) - open(tmp_authz_file, 'w').write("[/]\n* = rw\n") - shutil.move(tmp_authz_file, self.authz_file) self.groups_file = os.path.join(svntest.main.work_dir, "groups") - - # For svnserve tests we have a per-repository authz file, and it - # doesn't need to be there in order for things to work, so we don't - # have any default contents. elif self.repo_url.startswith("svn"): self.authz_file = os.path.join(self.repo_dir, "conf", "authz") self.groups_file = os.path.join(self.repo_dir, "conf", "groups") @@ -102,15 +145,32 @@ class Sandbox: shutil.copytree(self.wc_dir, clone.wc_dir, symlinks=True) return clone - def build(self, name=None, create_wc=True, read_only=False, + def build(self, name=None, create_wc=True, read_only=False, empty=False, minor_version=None): """Make a 'Greek Tree' repo (or refer to the central one if READ_ONLY), + or make an empty repo if EMPTY is true, and check out a WC from it (unless CREATE_WC is false). Change the sandbox's name to NAME. See actions.make_repo_and_wc() for details.""" - self._set_name(name, read_only) - svntest.actions.make_repo_and_wc(self, create_wc, read_only, minor_version) + self._set_name(name, read_only, empty) + self._ensure_authz() + svntest.actions.make_repo_and_wc(self, create_wc, read_only, empty, + minor_version) self._is_built = True + def _ensure_authz(self): + "make sure the repository is accessible" + + if self.repo_url.startswith("http"): + default_authz = "[/]\n* = rw\n" + + if (svntest.main.options.parallel == 0 + and (not os.path.isfile(self.authz_file) + or open(self.authz_file,'r').read() != default_authz)): + + tmp_authz_file = os.path.join(svntest.main.work_dir, "authz-" + self.name) + open(tmp_authz_file, 'w').write(default_authz) + shutil.move(tmp_authz_file, self.authz_file) + def authz_name(self, repo_dir=None): "return this sandbox's name for use in an authz file" repo_dir = repo_dir or self.repo_dir @@ -135,7 +195,8 @@ class Sandbox: path = (os.path.join(svntest.main.general_repo_dir, self.name) + '.' + suffix) url = svntest.main.options.test_area_url + \ - '/' + urllib.pathname2url(path) + '/' + svntest.wc.svn_uri_quote( + path.replace(os.path.sep, '/')) self.add_test_path(path, remove) return path, url @@ -185,7 +246,11 @@ class Sandbox: of this sbox, or relative to OS-style path WC_DIR if supplied.""" if wc_dir is None: wc_dir = self.wc_dir - return os.path.join(wc_dir, svntest.wc.to_ospath(relpath)) + + if relpath == '': + return wc_dir + else: + return os.path.join(wc_dir, svntest.wc.to_ospath(relpath)) def ospaths(self, relpaths, wc_dir=None): """Return a list of RELPATHS but with each path converted to an OS-style @@ -212,6 +277,12 @@ class Sandbox: temporary and 'TEMP' or 'PERM', parts[1]) + def file_protocol_url(self): + """get a file:// url pointing to the repository""" + return svntest.main.file_scheme_prefix + \ + svntest.wc.svn_uri_quote( + os.path.abspath(self.repo_dir).replace(os.path.sep, '/')) + def simple_update(self, target=None, revision='HEAD'): """Update the WC or TARGET. TARGET is a relpath relative to the WC.""" @@ -317,15 +388,18 @@ class Sandbox: raise Exception("Unexpected line '" + line + "' in proplist output" + str(out)) return props - def simple_add_symlink(self, dest, target): - """Create a symlink TARGET pointing to DEST and add it to subversion""" + def simple_symlink(self, dest, target): + """Create a symlink TARGET pointing to DEST""" if svntest.main.is_posix_os(): os.symlink(dest, self.ospath(target)) else: svntest.main.file_write(self.ospath(target), "link %s" % dest) + + def simple_add_symlink(self, dest, target, add=True): + """Create a symlink TARGET pointing to DEST and add it to subversion""" + self.simple_symlink(dest, target) self.simple_add(target) - if not svntest.main.is_posix_os(): - # '*' is evaluated on Windows + if not svntest.main.is_posix_os(): # '*' is evaluated on Windows self.simple_propset('svn:special', 'X', target) def simple_add_text(self, text, *targets): @@ -360,7 +434,7 @@ class Sandbox: def simple_append(self, dest, contents, truncate=False): """Append CONTENTS to file DEST, optionally truncating it first. DEST is a relpath relative to the WC.""" - open(self.ospath(dest), truncate and 'w' or 'a').write(contents) + open(self.ospath(dest), truncate and 'wb' or 'ab').write(contents) def simple_lock(self, *targets): """Lock TARGETS in the WC. @@ -369,6 +443,129 @@ class Sandbox: targets = self.ospaths(targets) svntest.main.run_svn(False, 'lock', *targets) + def youngest(self): + _, output, _ = svntest.actions.run_and_verify_svnlook( + svntest.verify.AnyOutput, [], + 'youngest', self.repo_dir) + youngest = int(output[0]) + return youngest + + def verify_repo(self): + """ + """ + svnrdump_headers_missing = re.compile( + "Text-content-sha1: .*|Text-copy-source-md5: .*|" + "Text-copy-source-sha1: .*|Text-delta-base-sha1: .*" + ) + svnrdump_headers_always = re.compile( + "Prop-delta: .*" + ) + + dumpfile_a_n = svntest.actions.run_and_verify_dump(self.repo_dir, + deltas=False) + dumpfile_a_d = svntest.actions.run_and_verify_dump(self.repo_dir, + deltas=True) + dumpfile_r_d = svntest.actions.run_and_verify_svnrdump( + None, svntest.verify.AnyOutput, [], 0, 'dump', '-q', self.repo_url, + svntest.main.svnrdump_crosscheck_authentication) + + # Compare the two deltas dumpfiles, ignoring expected differences + dumpfile_a_d_cmp = [l for l in dumpfile_a_d + if not svnrdump_headers_missing.match(l) + and not svnrdump_headers_always.match(l)] + dumpfile_r_d_cmp = [l for l in dumpfile_r_d + if not svnrdump_headers_always.match(l)] + # Ignore differences in number of blank lines between node records, + # as svnrdump puts 3 whereas svnadmin puts 2 after a replace-with-copy. + svntest.verify.compare_dump_files(None, None, + dumpfile_a_d_cmp, + dumpfile_r_d_cmp, + ignore_number_of_blank_lines=True) + + # Try loading the dump files. + # For extra points, load each with the other tool: + # svnadmin dump | svnrdump load + # svnrdump dump | svnadmin load + repo_dir_a_n, repo_url_a_n = self.add_repo_path('load_a_n') + svntest.main.create_repos(repo_dir_a_n) + svntest.actions.enable_revprop_changes(repo_dir_a_n) + svntest.actions.run_and_verify_svnrdump( + dumpfile_a_n, svntest.verify.AnyOutput, [], 0, 'load', repo_url_a_n, + svntest.main.svnrdump_crosscheck_authentication) + + repo_dir_a_d, repo_url_a_d = self.add_repo_path('load_a_d') + svntest.main.create_repos(repo_dir_a_d) + svntest.actions.enable_revprop_changes(repo_dir_a_d) + svntest.actions.run_and_verify_svnrdump( + dumpfile_a_d, svntest.verify.AnyOutput, [], 0, 'load', repo_url_a_d, + svntest.main.svnrdump_crosscheck_authentication) + + repo_dir_r_d, repo_url_r_d = self.add_repo_path('load_r_d') + svntest.main.create_repos(repo_dir_r_d) + svntest.actions.run_and_verify_load(repo_dir_r_d, dumpfile_r_d) + + # Dump the loaded repositories in the same way; expect exact equality + reloaded_dumpfile_a_n = svntest.actions.run_and_verify_dump(repo_dir_a_n) + reloaded_dumpfile_a_d = svntest.actions.run_and_verify_dump(repo_dir_a_d) + reloaded_dumpfile_r_d = svntest.actions.run_and_verify_dump(repo_dir_r_d) + svntest.verify.compare_dump_files(None, None, + reloaded_dumpfile_a_n, + reloaded_dumpfile_a_d, + ignore_uuid=True) + svntest.verify.compare_dump_files(None, None, + reloaded_dumpfile_a_d, + reloaded_dumpfile_r_d, + ignore_uuid=True) + + # Run each dump through svndumpfilter and check for no further change. + for dumpfile in [dumpfile_a_n, + dumpfile_a_d, + dumpfile_r_d + ]: + ### No buffer size seems to work for update_tests-2. So skip that test? + ### (Its dumpfile size is ~360 KB non-delta, ~180 KB delta.) + if len(''.join(dumpfile)) > 100000: + continue + + exit_code, dumpfile2, errput = svntest.main.run_command_stdin( + svntest.main.svndumpfilter_binary, None, -1, True, + dumpfile, '--quiet', 'include', '/') + assert not exit_code and not errput + # Ignore empty prop sections in the input file during comparison, as + # svndumpfilter strips them. + # Ignore differences in number of blank lines between node records, + # as svndumpfilter puts 3 instead of 2 after an add or delete record. + svntest.verify.compare_dump_files(None, None, dumpfile, dumpfile2, + expect_content_length_always=True, + ignore_empty_prop_sections=True, + ignore_number_of_blank_lines=True) + + # Run the repository through 'svnsync' and check that this does not + # change the repository content. (Don't bother if it's already been + # created by svnsync.) + if "svn:sync-from-url\n" not in dumpfile_a_n: + dest_sbox = make_mirror(self) + verify_mirror(dest_sbox.repo_url, dest_sbox.repo_dir, dumpfile_a_n) + + def verify(self, skip_cross_check=False): + """Do additional testing that should hold for any sandbox, such as + verifying that the repository can be dumped. + """ + if (not skip_cross_check + and svntest.main.tests_verify_dump_load_cross_check()): + if self.is_built() and not self.read_only: + # verify that we can in fact dump the repo + # (except for the few tests that deliberately corrupt the repo) + os.chdir(self.was_cwd) + if os.path.exists(self.repo_dir): + logger.info("VERIFY: running dump/load cross-check") + self.verify_repo() + else: + logger.info("VERIFY: WARNING: skipping dump/load cross-check:" + " is-built=%s, read-only=%s" + % (self.is_built() and "true" or "false", + self.read_only and "true" or "false")) + pass def is_url(target): return (target.startswith('^/') diff --git a/subversion/tests/cmdline/svntest/testcase.py b/subversion/tests/cmdline/svntest/testcase.py index 9243c7b..7643f1e 100644 --- a/subversion/tests/cmdline/svntest/testcase.py +++ b/subversion/tests/cmdline/svntest/testcase.py @@ -28,7 +28,8 @@ import os, types, sys import svntest # if somebody does a "from testcase import *", they only get these names -__all__ = ['_XFail', '_Wimp', '_Skip', '_SkipUnless'] +__all__ = ['_XFail', '_Wimp', '_Skip', '_SkipUnless', + '_SkipDumpLoadCrossCheck'] RESULT_OK = 'ok' RESULT_FAIL = 'fail' @@ -135,7 +136,7 @@ class FunctionTestCase(TestCase): is derived from the file name in which FUNC was defined) """ - def __init__(self, func, issues=None): + def __init__(self, func, issues=None, skip_cross_check=False): # it better be a function that accepts an sbox parameter and has a # docstring on it. assert isinstance(func, types.FunctionType) @@ -161,6 +162,7 @@ class FunctionTestCase(TestCase): TestCase.__init__(self, doc=doc, issues=issues) self.func = func + self.skip_cross_check = skip_cross_check def get_function_name(self): return self.func.func_name @@ -173,7 +175,9 @@ class FunctionTestCase(TestCase): return os.path.splitext(os.path.basename(filename))[0] def run(self, sandbox): - return self.func(sandbox) + result = self.func(sandbox) + sandbox.verify(skip_cross_check = self.skip_cross_check) + return result class _XFail(TestCase): @@ -261,11 +265,22 @@ class _SkipUnless(_Skip): _Skip.__init__(self, test_case, lambda c=cond_func: not c()) -def create_test_case(func, issues=None): +class _SkipDumpLoadCrossCheck(TestCase): + """A test that will skip the post-test dump/load cross-check.""" + + def __init__(self, test_case, cond_func=lambda: True, wip=None, + issues=None): + TestCase.__init__(self, + create_test_case(test_case, skip_cross_check=True), + cond_func, wip=wip, issues=issues) + + +def create_test_case(func, issues=None, skip_cross_check=False): if isinstance(func, TestCase): return func else: - return FunctionTestCase(func, issues=issues) + return FunctionTestCase(func, issues=issues, + skip_cross_check=skip_cross_check) # Various decorators to make declaring tests as such simpler @@ -322,5 +337,15 @@ def Issues_deco(*issues): return _second +def SkipDumpLoadCrossCheck_deco(cond_func = lambda: True): + def _second(func): + if isinstance(func, TestCase): + return _SkipDumpLoadCrossCheck(func, cond_func, issues=func.issues) + else: + return _SkipDumpLoadCrossCheck(func, cond_func) + + return _second + + # Create a singular alias, for linguistic correctness Issue_deco = Issues_deco diff --git a/subversion/tests/cmdline/svntest/tree.py b/subversion/tests/cmdline/svntest/tree.py index b8dd03e..676c96a 100644 --- a/subversion/tests/cmdline/svntest/tree.py +++ b/subversion/tests/cmdline/svntest/tree.py @@ -610,6 +610,11 @@ def detect_conflict_files(node, extra_files): logger.warn(str(node)) raise SVNTreeUnequal(msg) +def detect_conflict_files_done(extra_files): + """Done handler for detect_conflict_files""" + if len(extra_files): + raise SVNTreeError("Not all extra reject files have been accounted for") + ########################################################################### ########################################################################### # EXPORTED ROUTINES ARE BELOW diff --git a/subversion/tests/cmdline/svntest/verify.py b/subversion/tests/cmdline/svntest/verify.py index 1c0ae2e..e0da27f 100644 --- a/subversion/tests/cmdline/svntest/verify.py +++ b/subversion/tests/cmdline/svntest/verify.py @@ -212,6 +212,9 @@ class RegexOutput(ExpectedOutput): def display_differences(self, message, label, actual): display_lines(message, self.expected, actual, label + ' (regexp)', label) + def insert(self, index, line): + self.expected.insert(index, line) + self.expected_re = re.compile(self.expected) class RegexListOutput(ExpectedOutput): """Matches an ordered list of regular expressions. @@ -227,7 +230,7 @@ class RegexListOutput(ExpectedOutput): def __init__(self, expected, match_all=True): "EXPECTED is a list of regular expression strings." - assert isinstance(expected, list) and expected != [] + assert isinstance(expected, list) ExpectedOutput.__init__(self, expected, match_all) self.expected_res = [re.compile(e) for e in expected] @@ -251,6 +254,10 @@ class RegexListOutput(ExpectedOutput): def display_differences(self, message, label, actual): display_lines(message, self.expected, actual, label + ' (regexp)', label) + def insert(self, index, line): + self.expected.insert(index, line) + self.expected_res = [re.compile(e) for e in self.expected] + class UnorderedOutput(ExpectedOutput): """Matches an unordered list of lines. @@ -467,8 +474,10 @@ class DumpParser: if not m: if required: raise SVNDumpParseError("expected '%s' at line %d\n%s" + "\nPrevious lines:\n%s" % (regex, self.current, - self.lines[self.current])) + self.lines[self.current], + ''.join(self.lines[max(0,self.current - 10):self.current]))) else: return None self.current += 1 @@ -484,6 +493,26 @@ class DumpParser: self.current += 1 return True + def parse_header(self, header): + regex = '([^:]*): (.*)$' + m = re.match(regex, self.lines[self.current]) + if not m: + raise SVNDumpParseError("expected a header at line %d, but found:\n%s" + % (self.current, self.lines[self.current])) + self.current += 1 + return m.groups() + + def parse_headers(self): + headers = [] + while self.lines[self.current] != '\n': + key, val = self.parse_header(self) + headers.append((key, val)) + return headers + + + def parse_boolean(self, header, required): + return self.parse_line(header + ': (false|true)$', required) + def parse_format(self): return self.parse_line('SVN-fs-dump-format-version: ([0-9]+)$') @@ -493,6 +522,9 @@ class DumpParser: def parse_revision(self): return self.parse_line('Revision-number: ([0-9]+)$') + def parse_prop_delta(self): + return self.parse_line('Prop-delta: (false|true)$', required=False) + def parse_prop_length(self, required=True): return self.parse_line('Prop-content-length: ([0-9]+)$', required) @@ -500,10 +532,7 @@ class DumpParser: return self.parse_line('Content-length: ([0-9]+)$', required) def parse_path(self): - path = self.parse_line('Node-path: (.+)$', required=False) - if not path and self.lines[self.current] == 'Node-path: \n': - self.current += 1 - path = '' + path = self.parse_line('Node-path: (.*)$', required=False) return path def parse_kind(self): @@ -534,17 +563,55 @@ class DumpParser: def parse_text_sha1(self): return self.parse_line('Text-content-sha1: ([0-9a-z]+)$', required=False) + def parse_text_delta(self): + return self.parse_line('Text-delta: (false|true)$', required=False) + + def parse_text_delta_base_md5(self): + return self.parse_line('Text-delta-base-md5: ([0-9a-f]+)$', required=False) + + def parse_text_delta_base_sha1(self): + return self.parse_line('Text-delta-base-sha1: ([0-9a-f]+)$', required=False) + def parse_text_length(self): return self.parse_line('Text-content-length: ([0-9]+)$', required=False) - # One day we may need to parse individual property name/values into a map def get_props(self): props = [] while not re.match('PROPS-END$', self.lines[self.current]): props.append(self.lines[self.current]) self.current += 1 self.current += 1 - return props + + # Split into key/value pairs to do an unordered comparison. + # This parses the serialized hash under the assumption that it is valid. + prophash = {} + curprop = [0] + while curprop[0] < len(props): + def read_key_or_value(curprop): + # klen / vlen + klen = int(props[curprop[0]].split()[1]) + curprop[0] += 1 + + # key / value + key = '' + while len(key) != klen + 1: + key += props[curprop[0]] + curprop[0] += 1 + key = key[:-1] + + return key + + if props[curprop[0]].startswith('K'): + key = read_key_or_value(curprop) + value = read_key_or_value(curprop) + elif props[curprop[0]].startswith('D'): + key = read_key_or_value(curprop) + value = None + else: + raise + prophash[key] = value + + return prophash def get_content(self, length): content = '' @@ -560,17 +627,43 @@ class DumpParser: def parse_one_node(self): node = {} + + # optional 'kind' and required 'action' must be next node['kind'] = self.parse_kind() action = self.parse_action() - node['copyfrom_rev'] = self.parse_copyfrom_rev() - node['copyfrom_path'] = self.parse_copyfrom_path() - node['copy_md5'] = self.parse_copy_md5() - node['copy_sha1'] = self.parse_copy_sha1() - node['prop_length'] = self.parse_prop_length(required=False) - node['text_length'] = self.parse_text_length() - node['text_md5'] = self.parse_text_md5() - node['text_sha1'] = self.parse_text_sha1() - node['content_length'] = self.parse_content_length(required=False) + + # read any remaining headers + headers_list = self.parse_headers() + headers = dict(headers_list) + + # Content-length must be last, if present + if 'Content-length' in headers and headers_list[-1][0] != 'Content-length': + raise SVNDumpParseError("'Content-length' header is not last, " + "in header block ending at line %d" + % (self.current,)) + + # parse the remaining optional headers and store in specific keys in NODE + for key, header, regex in [ + ('copyfrom_rev', 'Node-copyfrom-rev', '([0-9]+)$'), + ('copyfrom_path', 'Node-copyfrom-path', '(.*)$'), + ('copy_md5', 'Text-copy-source-md5', '([0-9a-z]+)$'), + ('copy_sha1', 'Text-copy-source-sha1','([0-9a-z]+)$'), + ('prop_length', 'Prop-content-length', '([0-9]+)$'), + ('text_length', 'Text-content-length', '([0-9]+)$'), + ('text_md5', 'Text-content-md5', '([0-9a-z]+)$'), + ('text_sha1', 'Text-content-sha1', '([0-9a-z]+)$'), + ('content_length', 'Content-length', '([0-9]+)$'), + ]: + if not header in headers: + node[key] = None + continue + m = re.match(regex, headers[header]) + if not m: + raise SVNDumpParseError("expected '%s' at line %d\n%s" + % (regex, self.current, + self.lines[self.current])) + node[key] = m.group(1) + self.parse_blank() if node['prop_length']: node['props'] = self.get_props() @@ -592,7 +685,7 @@ class DumpParser: if self.current >= len(self.lines): break path = self.parse_path() - if not path and not path is '': + if path is None: break if not nodes.get(path): nodes[path] = {} @@ -630,7 +723,11 @@ class DumpParser: self.parse_all_revisions() return self.parsed -def compare_dump_files(message, label, expected, actual): +def compare_dump_files(message, label, expected, actual, + ignore_uuid=False, + expect_content_length_always=False, + ignore_empty_prop_sections=False, + ignore_number_of_blank_lines=False): """Parse two dump files EXPECTED and ACTUAL, both of which are lists of lines as returned by run_and_verify_dump, and check that the same revisions, nodes, properties, etc. are present in both dumps. @@ -639,7 +736,219 @@ def compare_dump_files(message, label, expected, actual): parsed_expected = DumpParser(expected).parse() parsed_actual = DumpParser(actual).parse() + if ignore_uuid: + parsed_expected['uuid'] = '<ignored>' + parsed_actual['uuid'] = '<ignored>' + + for parsed in [parsed_expected, parsed_actual]: + for rev_name, rev_record in parsed.items(): + #print "Found %s" % (rev_name,) + if 'nodes' in rev_record: + #print "Found %s.%s" % (rev_name, 'nodes') + for path_name, path_record in rev_record['nodes'].items(): + #print "Found %s.%s.%s" % (rev_name, 'nodes', path_name) + for action_name, action_record in path_record.items(): + #print "Found %s.%s.%s.%s" % (rev_name, 'nodes', path_name, action_name) + + if expect_content_length_always: + if action_record.get('content_length') == None: + #print 'Adding: %s.%s.%s.%s.%s' % (rev_name, 'nodes', path_name, action_name, 'content_length=0') + action_record['content_length'] = '0' + if ignore_empty_prop_sections: + if action_record.get('prop_length') == '10': + #print 'Removing: %s.%s.%s.%s.%s' % (rev_name, 'nodes', path_name, action_name, 'prop_length') + action_record['prop_length'] = None + del action_record['props'] + old_content_length = int(action_record['content_length']) + action_record['content_length'] = str(old_content_length - 10) + if ignore_number_of_blank_lines: + action_record['blanks'] = 0 + if parsed_expected != parsed_actual: - raise svntest.Failure('\n' + '\n'.join(ndiff( + print 'DIFF of raw dumpfiles (including expected differences)' + print ''.join(ndiff(expected, actual)) + raise svntest.Failure('DIFF of parsed dumpfiles (ignoring expected differences)\n' + + '\n'.join(ndiff( pprint.pformat(parsed_expected).splitlines(), pprint.pformat(parsed_actual).splitlines()))) + +########################################################################################## +## diff verifications +def is_absolute_url(target): + return (target.startswith('file://') + or target.startswith('http://') + or target.startswith('https://') + or target.startswith('svn://') + or target.startswith('svn+ssh://')) + +def make_diff_header(path, old_tag, new_tag, src_label=None, dst_label=None): + """Generate the expected diff header for file PATH, with its old and new + versions described in parentheses by OLD_TAG and NEW_TAG. SRC_LABEL and + DST_LABEL are paths or urls that are added to the diff labels if we're + diffing against the repository or diffing two arbitrary paths. + Return the header as an array of newline-terminated strings.""" + if src_label: + src_label = src_label.replace('\\', '/') + if not is_absolute_url(src_label): + src_label = '.../' + src_label + src_label = '\t(' + src_label + ')' + else: + src_label = '' + if dst_label: + dst_label = dst_label.replace('\\', '/') + if not is_absolute_url(dst_label): + dst_label = '.../' + dst_label + dst_label = '\t(' + dst_label + ')' + else: + dst_label = '' + path_as_shown = path.replace('\\', '/') + return [ + "Index: " + path_as_shown + "\n", + "===================================================================\n", + "--- " + path_as_shown + src_label + "\t(" + old_tag + ")\n", + "+++ " + path_as_shown + dst_label + "\t(" + new_tag + ")\n", + ] + +def make_no_diff_deleted_header(path, old_tag, new_tag): + """Generate the expected diff header for a deleted file PATH when in + 'no-diff-deleted' mode. (In that mode, no further details appear after the + header.) Return the header as an array of newline-terminated strings.""" + path_as_shown = path.replace('\\', '/') + return [ + "Index: " + path_as_shown + " (deleted)\n", + "===================================================================\n", + ] + +def make_git_diff_header(target_path, repos_relpath, + old_tag, new_tag, add=False, src_label=None, + dst_label=None, delete=False, text_changes=True, + cp=False, mv=False, copyfrom_path=None, + copyfrom_rev=None): + """ Generate the expected 'git diff' header for file TARGET_PATH. + REPOS_RELPATH is the location of the path relative to the repository root. + The old and new versions ("revision X", or "working copy") must be + specified in OLD_TAG and NEW_TAG. + SRC_LABEL and DST_LABEL are paths or urls that are added to the diff + labels if we're diffing against the repository. ADD, DELETE, CP and MV + denotes the operations performed on the file. COPYFROM_PATH is the source + of a copy or move. Return the header as an array of newline-terminated + strings.""" + + path_as_shown = target_path.replace('\\', '/') + if src_label: + src_label = src_label.replace('\\', '/') + src_label = '\t(.../' + src_label + ')' + else: + src_label = '' + if dst_label: + dst_label = dst_label.replace('\\', '/') + dst_label = '\t(.../' + dst_label + ')' + else: + dst_label = '' + + output = [ + "Index: " + path_as_shown + "\n", + "===================================================================\n" + ] + if add: + output.extend([ + "diff --git a/" + repos_relpath + " b/" + repos_relpath + "\n", + "new file mode 10644\n", + ]) + if text_changes: + output.extend([ + "--- /dev/null\t(" + old_tag + ")\n", + "+++ b/" + repos_relpath + dst_label + "\t(" + new_tag + ")\n" + ]) + elif delete: + output.extend([ + "diff --git a/" + repos_relpath + " b/" + repos_relpath + "\n", + "deleted file mode 10644\n", + ]) + if text_changes: + output.extend([ + "--- a/" + repos_relpath + src_label + "\t(" + old_tag + ")\n", + "+++ /dev/null\t(" + new_tag + ")\n" + ]) + elif cp: + if copyfrom_rev: + copyfrom_rev = '@' + copyfrom_rev + else: + copyfrom_rev = '' + output.extend([ + "diff --git a/" + copyfrom_path + " b/" + repos_relpath + "\n", + "copy from " + copyfrom_path + copyfrom_rev + "\n", + "copy to " + repos_relpath + "\n", + ]) + if text_changes: + output.extend([ + "--- a/" + copyfrom_path + src_label + "\t(" + old_tag + ")\n", + "+++ b/" + repos_relpath + "\t(" + new_tag + ")\n" + ]) + elif mv: + output.extend([ + "diff --git a/" + copyfrom_path + " b/" + path_as_shown + "\n", + "rename from " + copyfrom_path + "\n", + "rename to " + repos_relpath + "\n", + ]) + if text_changes: + output.extend([ + "--- a/" + copyfrom_path + src_label + "\t(" + old_tag + ")\n", + "+++ b/" + repos_relpath + "\t(" + new_tag + ")\n" + ]) + else: + output.extend([ + "diff --git a/" + repos_relpath + " b/" + repos_relpath + "\n", + "--- a/" + repos_relpath + src_label + "\t(" + old_tag + ")\n", + "+++ b/" + repos_relpath + dst_label + "\t(" + new_tag + ")\n", + ]) + return output + +def make_diff_prop_header(path): + """Return a property diff sub-header, as a list of newline-terminated + strings.""" + return [ + "\n", + "Property changes on: " + path.replace('\\', '/') + "\n", + "___________________________________________________________________\n" + ] + +def make_diff_prop_val(plus_minus, pval): + "Return diff for prop value PVAL, with leading PLUS_MINUS (+ or -)." + if len(pval) > 0 and pval[-1] != '\n': + return [plus_minus + pval + "\n","\\ No newline at end of property\n"] + return [plus_minus + pval] + +def make_diff_prop_deleted(pname, pval): + """Return a property diff for deletion of property PNAME, old value PVAL. + PVAL is a single string with no embedded newlines. Return the result + as a list of newline-terminated strings.""" + return [ + "Deleted: " + pname + "\n", + "## -1 +0,0 ##\n" + ] + make_diff_prop_val("-", pval) + +def make_diff_prop_added(pname, pval): + """Return a property diff for addition of property PNAME, new value PVAL. + PVAL is a single string with no embedded newlines. Return the result + as a list of newline-terminated strings.""" + return [ + "Added: " + pname + "\n", + "## -0,0 +1 ##\n", + ] + make_diff_prop_val("+", pval) + +def make_diff_prop_modified(pname, pval1, pval2): + """Return a property diff for modification of property PNAME, old value + PVAL1, new value PVAL2. + + PVAL is a single string with no embedded newlines. A newline at the + end is significant: without it, we add an extra line saying '\ No + newline at end of property'. + + Return the result as a list of newline-terminated strings. + """ + return [ + "Modified: " + pname + "\n", + "## -1 +1 ##\n", + ] + make_diff_prop_val("-", pval1) + make_diff_prop_val("+", pval2) + diff --git a/subversion/tests/cmdline/svntest/wc.py b/subversion/tests/cmdline/svntest/wc.py index 171c18d..ab98b00 100644 --- a/subversion/tests/cmdline/svntest/wc.py +++ b/subversion/tests/cmdline/svntest/wc.py @@ -141,12 +141,14 @@ class State: self.desc.update(more_desc) - def add_state(self, parent, state): + def add_state(self, parent, state, strict=False): "Import state items from a State object, reparent the items to PARENT." assert isinstance(state, State) for path, item in state.desc.items(): - if path == '': + if strict: + path = parent + path + elif path == '': path = parent else: path = parent + '/' + path @@ -358,6 +360,11 @@ class State: for p, i in self.desc.copy().items(): if p.startswith(path + '/'): del self.desc[p] + elif item.entry_kind == 'file': + # A file has no descendants in svn_wc_entry_t + for p, i in self.desc.copy().items(): + if p.startswith(path + '/'): + del self.desc[p] else: # when reading the entry structures, we don't examine for text or # property mods, so clear those flags. we also do not examine the @@ -434,7 +441,7 @@ class State: return not self.__eq__(other) @classmethod - def from_status(cls, lines): + def from_status(cls, lines, wc_dir=None): """Create a State object from 'svn status' output.""" def not_space(value): @@ -442,6 +449,17 @@ class State: return value return None + def parse_move(path, wc_dir): + if path.startswith('../'): + # ../ style paths are relative from the status root + return to_relpath(os.path.normpath(repos_join(wc_dir, path))) + else: + # Other paths are just relative from cwd + return to_relpath(path) + + if not wc_dir: + wc_dir = '' + desc = { } last = None for line in lines: @@ -455,15 +473,15 @@ class State: if ex_match: if ex_match.group('moved_from'): - path = ex_match.group('moved_from') - last.tweak(moved_from = to_relpath(path)) + path = to_relpath(ex_match.group('moved_from')) + last.tweak(moved_from = parse_move(path, wc_dir)) elif ex_match.group('moved_to'): - path = ex_match.group('moved_to') - last.tweak(moved_to = to_relpath(path)) + path = to_relpath(ex_match.group('moved_to')) + last.tweak(moved_to = parse_move(path, wc_dir)) elif ex_match.group('swapped_with'): - path = ex_match.group('swapped_with') - last.tweak(moved_to = to_relpath(path)) - last.tweak(moved_from = to_relpath(path)) + path = to_relpath(ex_match.group('swapped_with')) + last.tweak(moved_to = parse_move(path, wc_dir)) + last.tweak(moved_from = parse_move(path, wc_dir)) # Parse TC description? @@ -590,6 +608,9 @@ class State: if line.startswith('DBG:') or line.startswith('Transmitting'): continue + if line.startswith('Committing transaction'): + continue + match = _re_parse_commit_ext.search(line) if match: desc[to_relpath(match.group(4))] = StateItem(verb=match.group(1)) @@ -666,8 +687,6 @@ class State: }) desc = { } - dot_svn = svntest.main.get_admin_name() - dump_data = svntest.main.run_entriesdump_tree(base) if not dump_data: @@ -675,6 +694,16 @@ class State: # ### Improve! return cls('', desc) + dirent_join = repos_join + if len(base) == 2 and base[1:]==':' and sys.platform=='win32': + # We have a win32 drive relative path... Auch. Fix joining + def drive_join(a, b): + if len(a) == 2: + return a+b + else: + return repos_join(a,b) + dirent_join = drive_join + for parent, entries in sorted(dump_data.items()): parent_url = entries[''].url @@ -696,11 +725,11 @@ class State: # that we can't put the status as "! " because that gets tweaked # out of our expected tree. item = StateItem(status=' ', wc_rev='?') - desc[repos_join(parent, name)] = item + desc[dirent_join(parent, name)] = item continue item = StateItem.from_entry(entry) if name: - desc[repos_join(parent, name)] = item + desc[dirent_join(parent, name)] = item implied_url = repos_join(parent_url, svn_uri_quote(name)) else: item._url = entry.url # attach URL to directory StateItems @@ -731,7 +760,7 @@ class StateItem: """ def __init__(self, contents=None, props=None, - status=None, verb=None, wc_rev=None, + status=None, verb=None, wc_rev=None, entry_kind=None, entry_rev=None, entry_status=None, entry_copied=None, locked=None, copied=None, switched=None, writelocked=None, treeconflict=None, moved_from=None, moved_to=None, @@ -758,6 +787,9 @@ class StateItem: self.prev_verb = prev_verb # The base revision number of the node in the WC, as a string. self.wc_rev = wc_rev + # If 'file' specifies that the node is a file, and as such has no svn_wc_entry_t + # descendants + self.entry_kind = None # These will be set when we expect the wc_rev/status to differ from those # found in the entries code. self.entry_rev = entry_rev @@ -948,6 +980,20 @@ def svn_uri_quote(url): # ------------ +def python_sqlite_can_read_wc(): + """Check if the Python builtin is capable enough to peek into wc.db""" + + try: + db = svntest.sqlite3.connect('') + + c = db.cursor() + c.execute('select sqlite_version()') + ver = tuple(map(int, c.fetchall()[0][0].split('.'))) + + return ver >= (3, 6, 18) # Currently enough (1.7-1.9) + except: + return False + def open_wc_db(local_path): """Open the SQLite DB for the WC path LOCAL_PATH. Return (DB object, WC root path, WC relpath of LOCAL_PATH).""" diff --git a/subversion/tests/cmdline/svnversion_tests.py b/subversion/tests/cmdline/svnversion_tests.py index ccc8e78..2ed6e46 100755 --- a/subversion/tests/cmdline/svnversion_tests.py +++ b/subversion/tests/cmdline/svnversion_tests.py @@ -50,47 +50,41 @@ def svnversion_test(sbox): repo_url = sbox.repo_url # Unmodified - svntest.actions.run_and_verify_svnversion("Unmodified working copy", - wc_dir, repo_url, + svntest.actions.run_and_verify_svnversion(wc_dir, repo_url, [ "1\n" ], []) # Unmodified, whole wc switched - svntest.actions.run_and_verify_svnversion("Unmodified switched working copy", - wc_dir, "some/other/url", + svntest.actions.run_and_verify_svnversion(wc_dir, "some/other/url", [ "1S\n" ], []) mu_path = os.path.join(wc_dir, 'A', 'mu') svntest.main.file_append(mu_path, 'appended mu text') # Modified file - svntest.actions.run_and_verify_svnversion("Modified file", - mu_path, repo_url + '/A/mu', + svntest.actions.run_and_verify_svnversion(mu_path, repo_url + '/A/mu', [ "1M\n" ], []) # Text modified - svntest.actions.run_and_verify_svnversion("Modified text", wc_dir, repo_url, + svntest.actions.run_and_verify_svnversion(wc_dir, repo_url, [ "1M\n" ], []) expected_output = wc.State(wc_dir, {'A/mu' : Item(verb='Sending')}) expected_status = svntest.actions.get_virginal_state(wc_dir, 1) expected_status.tweak('A/mu', wc_rev=2) if svntest.actions.run_and_verify_commit(wc_dir, - expected_output, expected_status, - None, wc_dir): + expected_output, expected_status): raise svntest.Failure # Unmodified, mixed - svntest.actions.run_and_verify_svnversion("Unmodified mixed working copy", - wc_dir, repo_url, + svntest.actions.run_and_verify_svnversion(wc_dir, repo_url, [ "1:2\n" ], []) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'propset', 'blue', 'azul', os.path.join(wc_dir, 'A', 'mu')) # Prop modified, mixed - svntest.actions.run_and_verify_svnversion("Property modified mixed wc", - wc_dir, repo_url, + svntest.actions.run_and_verify_svnversion(wc_dir, repo_url, [ "1:2M\n" ], []) iota_path = os.path.join(wc_dir, 'iota') @@ -108,45 +102,39 @@ def svnversion_test(sbox): expected_output, expected_disk, expected_status, - None, None, None, None, None, + [], False, '--ignore-ancestry'): raise svntest.Failure # Prop modified, mixed, part wc switched - svntest.actions.run_and_verify_svnversion("Prop-mod mixed partly switched", - wc_dir, repo_url, + svntest.actions.run_and_verify_svnversion(wc_dir, repo_url, [ "1:2MS\n" ], []) # Plain (exported) directory that is a direct subdir of a versioned dir Q_path = os.path.join(wc_dir, 'Q') os.mkdir(Q_path) - svntest.actions.run_and_verify_svnversion("Exported subdirectory", - Q_path, repo_url, + svntest.actions.run_and_verify_svnversion(Q_path, repo_url, [ "Unversioned directory\n" ], []) # Plain (exported) directory that is not a direct subdir of a versioned dir R_path = os.path.join(Q_path, 'Q') os.mkdir(R_path) - svntest.actions.run_and_verify_svnversion("Exported directory", - R_path, repo_url, + svntest.actions.run_and_verify_svnversion(R_path, repo_url, [ "Unversioned directory\n" ], []) # Switched file - svntest.actions.run_and_verify_svnversion("Switched file", - iota_path, repo_url + '/iota', + svntest.actions.run_and_verify_svnversion(iota_path, repo_url + '/iota', [ "2S\n" ], []) # Unversioned file kappa_path = os.path.join(wc_dir, 'kappa') svntest.main.file_write(kappa_path, "This is the file 'kappa'.") - svntest.actions.run_and_verify_svnversion("Unversioned file", - kappa_path, repo_url, + svntest.actions.run_and_verify_svnversion(kappa_path, repo_url, [ "Unversioned file\n" ], []) # Nonexistent file or directory X_path = os.path.join(wc_dir, 'Q', 'X') - svntest.actions.run_and_verify_svnversion("Nonexistent file or directory", - X_path, repo_url, + svntest.actions.run_and_verify_svnversion(X_path, repo_url, None, [ "'%s' doesn't exist\n" % os.path.abspath(X_path) ]) @@ -163,11 +151,10 @@ def svnversion_test(sbox): }) svntest.actions.run_and_verify_checkout(repo_url + "/A", A_A_path, expected_output, expected_disk, - None, None, None, None, - "--depth=files") + [], "--depth=files") # Partial (sparse) checkout - svntest.actions.run_and_verify_svnversion("Sparse checkout", A_A_path, + svntest.actions.run_and_verify_svnversion(A_A_path, repo_url, [ "2SP\n" ], []) @@ -188,7 +175,7 @@ ext-file -r 1 %s/A/D/H/omega """ % (repo_url, repo_url) (fd, tmp_f) = tempfile.mkstemp(dir=wc_dir) svntest.main.file_append(tmp_f, externals_desc) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'pset', '-F', tmp_f, 'svn:externals', C_path) os.close(fd) @@ -200,11 +187,10 @@ ext-file -r 1 %s/A/D/H/omega expected_status.tweak('A/C', wc_rev=2) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, wc_dir) + expected_status) # Update to get it on disk - svntest.actions.run_and_verify_svn(None, None, [], 'up', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir) ext_dir_path = os.path.join(C_path, 'ext-dir') ext_file_path = os.path.join(C_path, 'ext-file') expected_infos = [ @@ -213,8 +199,7 @@ ext-file -r 1 %s/A/D/H/omega ] svntest.actions.run_and_verify_info(expected_infos, ext_dir_path, ext_file_path) - svntest.actions.run_and_verify_svnversion("working copy with svn:externals", - wc_dir, repo_url, + svntest.actions.run_and_verify_svnversion(wc_dir, repo_url, [ "2\n" ], []) #---------------------------------------------------------------------- @@ -231,24 +216,21 @@ def svnversion_with_excluded_subtrees(sbox): D_path = os.path.join(wc_dir, "A", "D") psi_path = os.path.join(wc_dir, "A", "D", "H", "psi") - svntest.actions.run_and_verify_svnversion("working copy with excluded dir", - wc_dir, repo_url, + svntest.actions.run_and_verify_svnversion(wc_dir, repo_url, [ "1\n" ], []) # Exclude a directory and check that svnversion detects it. - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'up', '--set-depth', 'exclude', B_path) - svntest.actions.run_and_verify_svnversion("working copy with excluded dir", - wc_dir, repo_url, + svntest.actions.run_and_verify_svnversion(wc_dir, repo_url, [ "1P\n" ], []) # Exclude a file and check that svnversion detects it. Target the # svnversion command on a subtree that does not contain the excluded # directory to assure we a detecting the switched file. - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'up', '--set-depth', 'exclude', psi_path) - svntest.actions.run_and_verify_svnversion("working copy with excluded file", - D_path, repo_url + '/A/D', + svntest.actions.run_and_verify_svnversion(D_path, repo_url + '/A/D', [ "1P\n" ], []) def svnversion_with_structural_changes(sbox): @@ -261,22 +243,20 @@ def svnversion_with_structural_changes(sbox): iota_path = os.path.join(wc_dir, 'iota') iota_copy_path = os.path.join(wc_dir, 'iota_copy') - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'cp', iota_path, iota_copy_path) - svntest.actions.run_and_verify_svnversion("Copied file", - iota_copy_path, repo_url + + svntest.actions.run_and_verify_svnversion(iota_copy_path, repo_url + '/iota_copy', [ "Uncommitted local addition, " "copy or move\n" ], []) C_path = os.path.join(wc_dir, 'A', 'C') C_copy_path = os.path.join(wc_dir, 'C_copy') - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'cp', C_path, C_copy_path) - svntest.actions.run_and_verify_svnversion("Copied dir", - C_copy_path, repo_url + + svntest.actions.run_and_verify_svnversion(C_copy_path, repo_url + '/C_copy', [ "Uncommitted local addition, " "copy or move\n" ], @@ -285,13 +265,12 @@ def svnversion_with_structural_changes(sbox): # Test deletion sbox.simple_rm('iota') - svntest.actions.run_and_verify_svnversion("Deleted file", - sbox.ospath('iota'), + svntest.actions.run_and_verify_svnversion(sbox.ospath('iota'), repo_url + '/iota', ["1M\n"], [], ) - svntest.actions.run_and_verify_svnversion("Deleted file", wc_dir, repo_url, + svntest.actions.run_and_verify_svnversion(wc_dir, repo_url, [ "1:2M\n" ], []) def committed_revisions(sbox): @@ -303,7 +282,7 @@ def committed_revisions(sbox): sbox.simple_copy('iota', 'iota2') sbox.simple_commit() sbox.simple_update() - svntest.actions.run_and_verify_svnversion("Committed revisions", wc_dir, repo_url, + svntest.actions.run_and_verify_svnversion(wc_dir, repo_url, [ "1:2\n" ], [], "--committed") @@ -313,8 +292,7 @@ def non_reposroot_wc(sbox): wc_dir = sbox.add_wc_path('wc2') repo_url = sbox.repo_url + "/A/B" svntest.main.run_svn(None, 'checkout', repo_url, wc_dir) - svntest.actions.run_and_verify_svnversion("Non-repos-root wc dir", - wc_dir, repo_url, + svntest.actions.run_and_verify_svnversion(wc_dir, repo_url, [ "1\n" ], []) @Issue(3858) @@ -332,25 +310,24 @@ def child_switched(sbox): ### Target is repos root and WC root. # No switches. - svntest.actions.run_and_verify_svnversion(None, wc_dir, None, + svntest.actions.run_and_verify_svnversion(wc_dir, None, [ "2\n" ], []) # Switch A/B to a sibling. sbox.simple_switch(repo_url + '/A/D', 'A/B') # This should detect the switch at A/B. - svntest.actions.run_and_verify_svnversion(None, wc_dir, None, + svntest.actions.run_and_verify_svnversion(wc_dir, None, [ "2S\n" ], []) ### Target is neither repos root nor WC root. # But A/B/G and its children are not switched by itself. - svntest.actions.run_and_verify_svnversion(None, - os.path.join(wc_dir, 'A/B/G'), + svntest.actions.run_and_verify_svnversion(os.path.join(wc_dir, 'A/B/G'), None, [ "2\n" ], []) # And A/B isn't switched when you look at it directly. - svntest.actions.run_and_verify_svnversion(None, os.path.join(wc_dir, 'A/B'), + svntest.actions.run_and_verify_svnversion(os.path.join(wc_dir, 'A/B'), None, [ "2\n" ], []) # Switch branch/D to ^/A, then switch branch/D/G back to ^/branch/D/G so @@ -359,37 +336,31 @@ def child_switched(sbox): sbox.simple_switch(repo_url + '/branch/D/G', 'branch/D/G') # This should detect the switch at branch/D and branch/D/G. - svntest.actions.run_and_verify_svnversion(None, - os.path.join(wc_dir, 'branch'), + svntest.actions.run_and_verify_svnversion(os.path.join(wc_dir, 'branch'), None, [ "2S\n" ], []) # Directly targeting the switched branch/D should still detect the switch # at branch/D/G even though the latter isn't switched against the root of # the working copy. - svntest.actions.run_and_verify_svnversion(None, - os.path.join(wc_dir, 'branch', + svntest.actions.run_and_verify_svnversion(os.path.join(wc_dir, 'branch', 'D'), None, [ "2S\n" ], []) # Switch A/B to ^/. sbox.simple_switch(repo_url, 'A/B') - svntest.actions.run_and_verify_svnversion(None, - os.path.join(wc_dir), + svntest.actions.run_and_verify_svnversion(os.path.join(wc_dir), None, [ "2S\n" ], []) - svntest.actions.run_and_verify_svnversion(None, - os.path.join(wc_dir, 'A'), + svntest.actions.run_and_verify_svnversion(os.path.join(wc_dir, 'A'), None, [ "2S\n" ], []) ### Target is repos root but not WC root. - svntest.actions.run_and_verify_svnversion(None, - os.path.join(wc_dir, 'A', 'B'), + svntest.actions.run_and_verify_svnversion(os.path.join(wc_dir, 'A', 'B'), None, [ "2\n" ], []) # Switch A/B/A/D/G to ^/A/D/H. sbox.simple_switch(repo_url + '/A/D/H', 'A/B/A/D/G') - svntest.actions.run_and_verify_svnversion(None, - os.path.join(wc_dir, 'A', 'B'), + svntest.actions.run_and_verify_svnversion(os.path.join(wc_dir, 'A', 'B'), None, [ "2S\n" ], []) ### Target is not repos root but is WC root. @@ -398,17 +369,14 @@ def child_switched(sbox): # ^A/D/G. sbox.simple_switch(repo_url + '/branch', '.') sbox.simple_switch(repo_url + '/A/D/G', 'D/G') - svntest.actions.run_and_verify_svnversion(None, - os.path.join(wc_dir,), + svntest.actions.run_and_verify_svnversion(os.path.join(wc_dir,), None, [ "2S\n" ], []) ### Target is neither repos root nor WC root. - svntest.actions.run_and_verify_svnversion(None, - os.path.join(wc_dir, 'D'), + svntest.actions.run_and_verify_svnversion(os.path.join(wc_dir, 'D'), None, [ "2S\n" ], []) - svntest.actions.run_and_verify_svnversion(None, - os.path.join(wc_dir, 'D', 'H'), + svntest.actions.run_and_verify_svnversion(os.path.join(wc_dir, 'D', 'H'), None, [ "2\n" ], []) ######################################################################## diff --git a/subversion/tests/cmdline/switch_tests.py b/subversion/tests/cmdline/switch_tests.py index 3f1e7ab..8712723 100755 --- a/subversion/tests/cmdline/switch_tests.py +++ b/subversion/tests/cmdline/switch_tests.py @@ -41,199 +41,8 @@ Wimp = svntest.testcase.Wimp_deco Item = svntest.wc.StateItem from svntest.main import SVN_PROP_MERGEINFO, server_has_mergeinfo -from externals_tests import change_external - - -### Bummer. It would be really nice to have easy access to the URL -### member of our entries files so that switches could be testing by -### examining the modified ancestry. But status doesn't show this -### information. Hopefully in the future the cmdline binary will have -### a subcommand for dumping multi-line detailed information about -### versioned things. Until then, we'll stick with the traditional -### verification methods. -### -### gjs says: we have 'svn info' now - -def get_routine_status_state(wc_dir): - """get the routine status list for WC_DIR at the completion of an - initial call to do_routine_switching()""" - - # Construct some paths for convenience - ADH_path = os.path.join(wc_dir, 'A', 'D', 'H') - chi_path = os.path.join(ADH_path, 'chi') - omega_path = os.path.join(ADH_path, 'omega') - psi_path = os.path.join(ADH_path, 'psi') - pi_path = os.path.join(ADH_path, 'pi') - tau_path = os.path.join(ADH_path, 'tau') - rho_path = os.path.join(ADH_path, 'rho') - - # Now generate a state - state = svntest.actions.get_virginal_state(wc_dir, 1) - state.remove('A/B/E', 'A/B/E/alpha', 'A/B/E/beta', 'A/B/F', 'A/B/lambda') - state.add({ - 'A/B/pi' : Item(status=' ', wc_rev=1), - 'A/B/tau' : Item(status=' ', wc_rev=1), - 'A/B/rho' : Item(status=' ', wc_rev=1), - }) - - return state - -#---------------------------------------------------------------------- - -def get_routine_disk_state(wc_dir): - """get the routine disk list for WC_DIR at the completion of an - initial call to do_routine_switching()""" - - disk = svntest.main.greek_state.copy() - - # iota has the same contents as gamma - disk.tweak('iota', contents=disk.desc['A/D/gamma'].contents) - - # A/B/* no longer exist, but have been replaced by copies of A/D/G/* - disk.remove('A/B/E', 'A/B/E/alpha', 'A/B/E/beta', 'A/B/F', 'A/B/lambda') - disk.add({ - 'A/B/pi' : Item("This is the file 'pi'.\n"), - 'A/B/rho' : Item("This is the file 'rho'.\n"), - 'A/B/tau' : Item("This is the file 'tau'.\n"), - }) - - return disk - -#---------------------------------------------------------------------- - -def do_routine_switching(wc_dir, repo_url, verify): - """perform some routine switching of the working copy WC_DIR for - other tests to use. If VERIFY, then do a full verification of the - switching, else don't bother.""" - - ### Switch the file `iota' to `A/D/gamma'. - - # Construct some paths for convenience - iota_path = os.path.join(wc_dir, 'iota') - gamma_url = repo_url + '/A/D/gamma' - - if verify: - # Create expected output tree - expected_output = svntest.wc.State(wc_dir, { - 'iota' : Item(status='U '), - }) - - # Create expected disk tree (iota will have gamma's contents) - expected_disk = svntest.main.greek_state.copy() - expected_disk.tweak('iota', - contents=expected_disk.desc['A/D/gamma'].contents) - - # Create expected status tree - expected_status = svntest.actions.get_virginal_state(wc_dir, 1) - expected_status.tweak('iota', switched='S') - - # Do the switch and check the results in three ways. - svntest.actions.run_and_verify_switch(wc_dir, iota_path, gamma_url, - expected_output, - expected_disk, - expected_status, - None, None, None, None, None, - False, '--ignore-ancestry') - else: - svntest.main.run_svn(None, 'switch', '--ignore-ancestry', - gamma_url, iota_path) - - ### Switch the directory `A/B' to `A/D/G'. - - # Construct some paths for convenience - AB_path = os.path.join(wc_dir, 'A', 'B') - ADG_url = repo_url + '/A/D/G' - - if verify: - # Create expected output tree - expected_output = svntest.wc.State(wc_dir, { - 'A/B/E' : Item(status='D '), - 'A/B/F' : Item(status='D '), - 'A/B/lambda' : Item(status='D '), - 'A/B/pi' : Item(status='A '), - 'A/B/tau' : Item(status='A '), - 'A/B/rho' : Item(status='A '), - }) - - # Create expected disk tree (iota will have gamma's contents, - # A/B/* will look like A/D/G/*) - expected_disk = get_routine_disk_state(wc_dir) - - # Create expected status - expected_status = get_routine_status_state(wc_dir) - expected_status.tweak('iota', 'A/B', switched='S') - - # Do the switch and check the results in three ways. - svntest.actions.run_and_verify_switch(wc_dir, AB_path, ADG_url, - expected_output, - expected_disk, - expected_status, - None, None, None, None, None, - False, '--ignore-ancestry') - else: - svntest.main.run_svn(None, 'switch', '--ignore-ancestry', - ADG_url, AB_path) - - -#---------------------------------------------------------------------- - -def commit_routine_switching(wc_dir, verify): - "Commit some stuff in a routinely-switched working copy." - - # Make some local mods - iota_path = os.path.join(wc_dir, 'iota') - Bpi_path = os.path.join(wc_dir, 'A', 'B', 'pi') - Gpi_path = os.path.join(wc_dir, 'A', 'D', 'G', 'pi') - Z_path = os.path.join(wc_dir, 'A', 'D', 'G', 'Z') - zeta_path = os.path.join(wc_dir, 'A', 'D', 'G', 'Z', 'zeta') - - svntest.main.file_append(iota_path, "apple") - svntest.main.file_append(Bpi_path, "melon") - svntest.main.file_append(Gpi_path, "banana") - os.mkdir(Z_path) - svntest.main.file_append(zeta_path, "This is the file 'zeta'.\n") - svntest.main.run_svn(None, 'add', Z_path) - - # Try to commit. We expect this to fail because, if all the - # switching went as expected, A/B/pi and A/D/G/pi point to the - # same URL. We don't allow this. - svntest.actions.run_and_verify_commit( - wc_dir, None, None, - "svn: E195003: Cannot commit both .* as they refer to the same URL$", - wc_dir) - - # Okay, that all taken care of, let's revert the A/D/G/pi path and - # move along. Afterward, we should be okay to commit. (Sorry, - # holsta, that banana has to go...) - svntest.main.run_svn(None, 'revert', Gpi_path) - - # Create expected output tree. - expected_output = svntest.wc.State(wc_dir, { - 'A/D/G/Z' : Item(verb='Adding'), - 'A/D/G/Z/zeta' : Item(verb='Adding'), - 'iota' : Item(verb='Sending'), - 'A/B/pi' : Item(verb='Sending'), - }) - - # Created expected status tree. - expected_status = get_routine_status_state(wc_dir) - expected_status.tweak('iota', 'A/B', switched='S') - expected_status.tweak('iota', 'A/B/pi', wc_rev=2, status=' ') - expected_status.add({ - 'A/D/G/Z' : Item(status=' ', wc_rev=2), - 'A/D/G/Z/zeta' : Item(status=' ', wc_rev=2), - }) - - # Commit should succeed - if verify: - svntest.actions.run_and_verify_commit(wc_dir, - expected_output, - expected_status, - None, wc_dir) - else: - svntest.main.run_svn(None, - 'ci', '-m', 'log msg', wc_dir) - +from svntest.deeptrees import do_routine_switching, commit_routine_switching, \ + get_routine_disk_state, get_routine_status_state ###################################################################### # Tests @@ -380,8 +189,7 @@ def full_rev_update(sbox): expected_output, expected_disk, expected_status, - None, None, None, - None, None, 1, + [], True, '-r', '1', wc_dir) #---------------------------------------------------------------------- @@ -438,8 +246,7 @@ def update_switched_things(sbox): expected_output, expected_disk, expected_status, - None, None, None, - None, None, 0, + [], False, B_path, iota_path) @@ -496,8 +303,7 @@ def rev_update_switched_things(sbox): expected_output, expected_disk, expected_status, - None, None, None, - None, None, 1, + [], True, '-r', '1', B_path, iota_path) @@ -542,12 +348,12 @@ def delete_subdir(sbox): A2_url = sbox.repo_url + '/A2' A2_B_F_url = sbox.repo_url + '/A2/B/F' - svntest.actions.run_and_verify_svn(None, - ['\n', 'Committed revision 2.\n'], [], + svntest.actions.run_and_verify_svn(['Committing transaction...\n', + 'Committed revision 2.\n'], [], 'cp', '-m', 'make copy', A_url, A2_url) - svntest.actions.run_and_verify_svn(None, - ['\n', 'Committed revision 3.\n'], [], + svntest.actions.run_and_verify_svn(['Committing transaction...\n', + 'Committed revision 3.\n'], [], 'rm', '-m', 'delete subdir', A2_B_F_url) expected_output = svntest.wc.State(wc_dir, { @@ -565,8 +371,8 @@ def delete_subdir(sbox): expected_output, expected_disk, expected_status, - None, None, None, None, None, - False, '--ignore-ancestry') + [], False, + '--ignore-ancestry') #---------------------------------------------------------------------- # Issue 1532: Switch a file to a dir: can't switch it back to the file @@ -581,7 +387,7 @@ def file_dir_file(sbox): file_url = sbox.repo_url + '/iota' dir_url = sbox.repo_url + '/A/C' - svntest.actions.run_and_verify_svn(None, None, [], 'switch', + svntest.actions.run_and_verify_svn(None, [], 'switch', '--ignore-ancestry', dir_url, file_path) if not os.path.isdir(file_path): raise svntest.Failure @@ -591,7 +397,7 @@ def file_dir_file(sbox): # In this specific case the switch editor is designed to be rooted on the node # itself instead of its ancestor. If you would use sbox.ospath('A') for # file_path the switch works both ways. - svntest.actions.run_and_verify_svn(None, None, [], 'switch', + svntest.actions.run_and_verify_svn(None, [], 'switch', '--ignore-ancestry', file_url, file_path) if not os.path.isfile(file_path): raise svntest.Failure @@ -633,10 +439,10 @@ def nonrecursive_switching(sbox): 'up', wc1_dir) svntest.main.file_append(wc1_new_file, "This is the file 'newfile'.\n") svntest.main.run_svn(None, 'add', wc1_new_file) - svntest.main.run_svn(None, 'ci', '-m', '', wc1_dir) + sbox.simple_commit() # Try to switch "wc2" to the branch (non-recursively) - svntest.actions.run_and_verify_svn(None, None, [], 'switch', '-N', + svntest.actions.run_and_verify_svn(None, [], 'switch', '-N', '--ignore-ancestry', version1_url, wc2_dir) # Check the URLs of the (not switched) directories. @@ -672,8 +478,8 @@ def failed_anchor_is_target(sbox): # Make a directory 'G/psi' in the repository. G_url = sbox.repo_url + '/A/D/G' G_psi_url = G_url + '/psi' - svntest.actions.run_and_verify_svn(None, - ['\n', 'Committed revision 2.\n'], [], + svntest.actions.run_and_verify_svn(['Committing transaction...\n', + 'Committed revision 2.\n'], [], 'mkdir', '-m', 'log msg', G_psi_url) # Modify the file 'H/psi' locally. @@ -682,7 +488,7 @@ def failed_anchor_is_target(sbox): svntest.main.file_append(psi_path, "more text") # This switch raises a tree conflict on 'psi', because of the local mods. - svntest.actions.run_and_verify_svn(None, svntest.verify.AnyOutput, [], + svntest.actions.run_and_verify_svn(svntest.verify.AnyOutput, [], 'switch', '--ignore-ancestry', G_url, H_path) @@ -733,8 +539,8 @@ def bad_intermediate_urls(sbox): # First, make an extra subdirectory in C to match one in the root, plus # another one inside of that. - svntest.actions.run_and_verify_svn(None, - ['\n', 'Committed revision 2.\n'], [], + svntest.actions.run_and_verify_svn(['Committing transaction...\n', + 'Committed revision 2.\n'], [], 'mkdir', '-m', 'log msg', url_A_C_A, url_A_C_A_Z) @@ -780,8 +586,9 @@ def bad_intermediate_urls(sbox): }) actions.run_and_verify_switch(wc_dir, wc_dir, url_A_C, expected_output, - expected_disk, expected_status, None, None, - None, None, None, False, '--ignore-ancestry') + expected_disk, expected_status, + [], False, + '--ignore-ancestry') # However, the URL for wc/A should now reflect ^/A/C/A, not something else. expected_infos = [ @@ -805,7 +612,7 @@ def bad_intermediate_urls(sbox): expected_status.tweak('A/Z', treeconflict=None) actions.run_and_verify_update(wc_dir, expected_output, expected_disk, - expected_status, None, None, None, None, None, False, wc_dir) + expected_status) @@ -833,18 +640,18 @@ def obstructed_switch(sbox): url_A_B_Esave = url + '/A/B/Esave' # svn cp -m msgcopy url/A/B/E url/A/B/Esave - expected_stdout = verify.UnorderedOutput([ - '\n', + expected_stdout = [ + 'Committing transaction...\n', 'Committed revision 2.\n', - ]) + ] - actions.run_and_verify_svn2('OUTPUT', expected_stdout, [], 0, 'cp', '-m', + actions.run_and_verify_svn2(expected_stdout, [], 0, 'cp', '-m', 'msgcopy', url_A_B_E, url_A_B_Esave) # svn rm A/B/E/alpha expected_stdout = ['D ' + A_B_E_alpha + '\n'] - actions.run_and_verify_svn2('OUTPUT', expected_stdout, [], 0, 'rm', + actions.run_and_verify_svn2(expected_stdout, [], 0, 'rm', A_B_E_alpha) # svn commit @@ -855,8 +662,7 @@ def obstructed_switch(sbox): expected_status = actions.get_virginal_state(wc_dir, 1) expected_status.remove('A/B/E/alpha') - actions.run_and_verify_commit(wc_dir, expected_output, expected_status, - None, wc_dir) + actions.run_and_verify_commit(wc_dir, expected_output, expected_status) # echo "hello" >> A/B/E/alpha main.file_append(A_B_E_alpha, 'hello') @@ -877,8 +683,8 @@ def obstructed_switch(sbox): actions.run_and_verify_switch(wc_dir, A_B_E, url_A_B_Esave, expected_output, expected_disk, - expected_status, None, None, None, None, - None, False, '--ignore-ancestry') + expected_status, + [], False, '--ignore-ancestry') # svn status expected_status.add({ @@ -893,7 +699,7 @@ def obstructed_switch(sbox): expected_stdout = verify.RegexOutput( ".*local file unversioned, incoming file add upon switch", match_all=False) - actions.run_and_verify_svn2('OUTPUT', expected_stdout, [], 0, 'info', + actions.run_and_verify_svn2(expected_stdout, [], 0, 'info', A_B_E_alpha) @@ -934,11 +740,11 @@ def commit_mods_below_switch(sbox): expected_output, expected_disk, expected_status, - None, None, None, None, None, + [], False, '--ignore-ancestry') D_path = sbox.ospath('A/D') - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'propset', 'x', 'x', C_path, D_path) expected_status.tweak('A/C', 'A/D', status=' M') @@ -954,7 +760,7 @@ def commit_mods_below_switch(sbox): # with "'A/C/E' is missing or not locked" svntest.actions.run_and_verify_commit(wc_dir, expected_output, expected_status, - None, C_path, D_path) + [], C_path, D_path) #---------------------------------------------------------------------- # Issue 2306. @@ -973,15 +779,15 @@ def refresh_read_only_attribute(sbox): # Create a branch. url = sbox.repo_url + '/A' branch_url = sbox.repo_url + '/A-branch' - svntest.actions.run_and_verify_svn(None, - ['\n', 'Committed revision 2.\n'], [], + svntest.actions.run_and_verify_svn(['Committing transaction...\n', + 'Committed revision 2.\n'], [], 'cp', '-m', 'svn:needs-lock not set', url, branch_url) # Set the svn:needs-lock property on a file from the "trunk". A_path = sbox.ospath('A') mu_path = os.path.join(A_path, 'mu') - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'ps', 'svn:needs-lock', '1', mu_path) # Commit the propset of svn:needs-lock. @@ -992,7 +798,7 @@ def refresh_read_only_attribute(sbox): expected_status.tweak('A/mu', wc_rev=3) svntest.actions.run_and_verify_commit(wc_dir, expected_output, expected_status, - None, mu_path) + [], mu_path) # The file on which svn:needs-lock was set is now expected to be read-only. if os.access(mu_path, os.W_OK): @@ -1013,7 +819,7 @@ def refresh_read_only_attribute(sbox): expected_output, expected_disk, expected_status, - None, None, None, None, None, + [], False, '--ignore-ancestry') # The file with we set svn:needs-lock on should now be writable, but @@ -1042,8 +848,8 @@ def switch_change_repos_root(sbox): # Test 1: A switch that changes to a non-existing repo shouldn't work. expected_err = ".*Unable to open repository.*|.*Could not open.*|"\ - ".*No repository found.*" - svntest.actions.run_and_verify_svn(None, None, + ".*Could not find.*|.*No repository found.*" + svntest.actions.run_and_verify_svn(None, expected_err, 'switch', '--ignore-ancestry', other_A_url, A_wc_dir) @@ -1053,7 +859,7 @@ def switch_change_repos_root(sbox): other_A_url = other_repo_url + "/A" svntest.main.create_repos(other_repo_dir) - svntest.actions.run_and_verify_svn(None, None, + svntest.actions.run_and_verify_svn(None, ".*UUID.*", 'switch', '--ignore-ancestry', other_A_url, A_wc_dir) @@ -1136,8 +942,7 @@ def forced_switch(sbox): svntest.actions.run_and_verify_switch(sbox.wc_dir, F_path, AD_url, expected_output, expected_disk, - expected_status, None, - None, None, None, None, False, + expected_status, [], False, '--force', '--ignore-ancestry') #---------------------------------------------------------------------- @@ -1211,8 +1016,8 @@ def forced_switch_failures(sbox): main.file_write(A_C_H, "The file 'H'\n") # Test three cases where forced switch should cause a tree conflict - # 1) A forced switch that tries to add a file when an unversioned - # directory of the same name already exists. (Currently fails) + # 1) A forced switch that tries to add a directory when an unversioned + # file of the same name already exists. (Currently fails) # svn switch --force url/A/D A/C expected_output = svntest.wc.State(wc_dir, { 'A/C/G' : Item(status='A '), @@ -1252,12 +1057,12 @@ def forced_switch_failures(sbox): expected_status.tweak('A/C', switched='S') actions.run_and_verify_switch(wc_dir, A_C, url_A_D, expected_output, - expected_disk, expected_status, None, None, - None, None, None, False, '--force', + expected_disk, expected_status, [], False, + '--force', '--ignore-ancestry') - # 2) A forced switch that tries to add a dir when a file of the same + # 2) A forced switch that tries to add a file when a dir of the same # name already exists. (Tree conflict) # svn switch --force url/A/D/G A/B/F expected_output = svntest.wc.State(wc_dir, { @@ -1279,16 +1084,16 @@ def forced_switch_failures(sbox): expected_status.tweak('A/B/F', switched='S') actions.run_and_verify_switch(wc_dir, A_B_F, url_A_D_G, expected_output, - expected_disk, expected_status, None, None, - None, None, None, False, '--force', + expected_disk, expected_status, [], False, + '--force', '--ignore-ancestry') # svn info A/B/F/pi expected_stdout = verify.ExpectedOutput( - 'Tree conflict: local file unversioned, incoming file add upon switch\n', + 'Tree conflict: local dir unversioned, incoming file add upon switch\n', match_all=False) - actions.run_and_verify_svn2('OUTPUT', expected_stdout, [], 0, 'info', + actions.run_and_verify_svn2(expected_stdout, [], 0, 'info', A_B_F_pi) @@ -1297,11 +1102,11 @@ def forced_switch_failures(sbox): # Make dir A/D/H/I in repos. # svn mkdir -m "Log message" url/A/D/H/I expected_stdout = verify.UnorderedOutput([ - '\n', + 'Committing transaction...\n', 'Committed revision 2.\n', ]) - actions.run_and_verify_svn2('OUTPUT', expected_stdout, [], 0, 'mkdir', + actions.run_and_verify_svn2(expected_stdout, [], 0, 'mkdir', '-m', 'Log message', url_A_D_H_I) # Make A/D/G/I and co A/D/H/I into it. @@ -1316,7 +1121,6 @@ def forced_switch_failures(sbox): }) exit_code, so, se = svntest.actions.run_and_verify_svn( - "Unexpected error during co", ['Checked out revision 2.\n'], [], "co", url_A_D_H_I, A_D_G_I) @@ -1334,9 +1138,8 @@ def forced_switch_failures(sbox): }) actions.run_and_verify_switch(wc_dir, A_D_G, url_A_D_H, expected_output, - None, None, None, - None, None, None, None, - False, '--force', '--ignore-ancestry') + None, None, [], False, + '--force', '--ignore-ancestry') # Delete all three obstructions and finish the update. # rm -rf A/D/G/I @@ -1395,7 +1198,7 @@ def forced_switch_failures(sbox): svntest.main.run_svn(None, 'revert', '-R', sbox.ospath('A/C/H')) actions.run_and_verify_update(wc_dir, expected_output, expected_disk, - expected_status, None, None, None, None, None, False, wc_dir) + expected_status) def switch_with_obstructing_local_adds(sbox): @@ -1428,7 +1231,7 @@ def switch_with_obstructing_local_adds(sbox): "This is the unversioned file 'upsilon'.\n") # Add the above obstructions. - svntest.actions.run_and_verify_svn("Add error:", None, [], + svntest.actions.run_and_verify_svn(None, [], 'add', G_path, I_path, gamma_copy_path) @@ -1474,9 +1277,6 @@ def switch_with_obstructing_local_adds(sbox): 'A/B/F/I' : Item(status='A ', wc_rev='-', entry_rev='0'), }) - # "Extra" files that we expect to result from the conflicts. - extra_files = ['pi\.r0', 'pi\.r1', 'pi\.mine'] - # Do the switch and check the results in three ways. F_path = sbox.ospath('A/B/F') D_url = sbox.repo_url + '/A/D' @@ -1485,9 +1285,7 @@ def switch_with_obstructing_local_adds(sbox): expected_output, expected_disk, expected_status, - None, - svntest.tree.detect_conflict_files, - extra_files, None, None, False, + [], False, '--ignore-ancestry') #---------------------------------------------------------------------- @@ -1502,15 +1300,15 @@ def switch_scheduled_add(sbox): nodo_path = sbox.ospath('nodo') svntest.main.file_append(file_path, "") - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'add', file_path) - svntest.actions.run_and_verify_svn(None, None, + svntest.actions.run_and_verify_svn(None, "svn: E200007: Cannot switch '.*file' " + "because it is not in the repository yet", 'switch', '--ignore-ancestry', switch_url, file_path) - svntest.actions.run_and_verify_svn(None, None, + svntest.actions.run_and_verify_svn(None, "svn: E155010: The node '.*nodo' was not", 'switch', '--ignore-ancestry', switch_url, nodo_path) @@ -1546,7 +1344,7 @@ def mergeinfo_switch_elision(sbox): "Checked out revision 1.\n", "A " + B_COPY_1_path + "\n", ]) - svntest.actions.run_and_verify_svn(None, expected_stdout, [], 'copy', + svntest.actions.run_and_verify_svn(expected_stdout, [], 'copy', sbox.repo_url + "/A/B", B_COPY_1_path) expected_stdout = verify.UnorderedOutput([ @@ -1558,7 +1356,7 @@ def mergeinfo_switch_elision(sbox): "Checked out revision 1.\n", "A " + B_COPY_2_path + "\n", ]) - svntest.actions.run_and_verify_svn(None, expected_stdout, [], 'copy', + svntest.actions.run_and_verify_svn(expected_stdout, [], 'copy', sbox.repo_url + "/A/B", B_COPY_2_path) expected_output = svntest.wc.State(wc_dir, { @@ -1583,9 +1381,7 @@ def mergeinfo_switch_elision(sbox): svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, - wc_dir) + expected_status) # Make some changes under A/B @@ -1595,7 +1391,7 @@ def mergeinfo_switch_elision(sbox): {'A/B/E/beta' : Item(verb='Sending')}) expected_status.tweak('A/B/E/beta', wc_rev=3) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) # r4 - modify and commit A/B/E/alpha svntest.main.file_write(alpha_path, "New content") @@ -1603,7 +1399,7 @@ def mergeinfo_switch_elision(sbox): {'A/B/E/alpha' : Item(verb='Sending')}) expected_status.tweak('A/B/E/alpha', wc_rev=4) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) # Merge r2:4 into A/B_COPY_1 expected_output = svntest.wc.State(B_COPY_1_path, { @@ -1640,8 +1436,7 @@ def mergeinfo_switch_elision(sbox): expected_merge_disk, expected_merge_status, expected_skip, - None, None, None, None, - None, 1) + check_props=True) # r5 - Commit the merge into A/B_COPY_1/E expected_output = svntest.wc.State( @@ -1655,7 +1450,7 @@ def mergeinfo_switch_elision(sbox): expected_status.tweak('A/B_COPY_1/E/beta', wc_rev=5) expected_status.tweak('A/B_COPY_1/lambda', wc_rev=2) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) # Merge r2:4 into A/B_COPY_2/E expected_output = svntest.wc.State(E_COPY_2_path, { @@ -1686,8 +1481,7 @@ def mergeinfo_switch_elision(sbox): expected_merge_disk, expected_merge_status, expected_skip, - None, None, None, None, - None, 1) + check_props=True) # Switch A/B_COPY_2 to URL of A/B_COPY_1. The local mergeinfo for r1,3-4 # on A/B_COPY_2/E is identical to the mergeinfo added to A/B_COPY_2 as a @@ -1741,7 +1535,7 @@ def mergeinfo_switch_elision(sbox): expected_output, expected_disk, expected_status, - None, None, None, None, None, True, + [], True, '--ignore-ancestry') # Now check a switch which reverses and earlier switch and leaves @@ -1763,11 +1557,10 @@ def mergeinfo_switch_elision(sbox): expected_output, expected_disk, expected_status, - None, None, None, None, None, True, + [], True, '--ignore-ancestry') - svntest.actions.run_and_verify_svn(None, - ["property '" + SVN_PROP_MERGEINFO + + svntest.actions.run_and_verify_svn(["property '" + SVN_PROP_MERGEINFO + "' set on '" + lambda_path + "'" + "\n"], [], 'ps', SVN_PROP_MERGEINFO, '/A/B/lambda:3-4', lambda_path) @@ -1785,7 +1578,7 @@ def mergeinfo_switch_elision(sbox): expected_output, expected_disk, expected_status, - None, None, None, None, None, True, + [], True, '--ignore-ancestry') #---------------------------------------------------------------------- @@ -1815,8 +1608,8 @@ def switch_with_depth(sbox): svntest.actions.run_and_verify_switch(wc_dir, AB_path, AD_url, expected_output, expected_disk, - expected_status, None, - None, None, None, None, False, + expected_status, + [], False, '--depth', 'empty', '--ignore-ancestry') # Set up expected results for reverting 'switch --depth=empty' @@ -1827,8 +1620,8 @@ def switch_with_depth(sbox): svntest.actions.run_and_verify_switch(wc_dir, AB_path, AB_url, expected_output, expected_disk, - expected_status, None, - None, None, None, None, False, + expected_status, + [], False, '--depth', 'empty', '--ignore-ancestry') # Set up expected results of 'switch --depth=files' @@ -1854,8 +1647,8 @@ def switch_with_depth(sbox): svntest.actions.run_and_verify_switch(wc_dir, AB_path, AD_url, expected_output, expected_disk, - expected_status, None, - None, None, None, None, False, + expected_status, + [], False, '--depth', 'files', '--ignore-ancestry') # Set up expected results for reverting 'switch --depth=files' @@ -1869,8 +1662,8 @@ def switch_with_depth(sbox): svntest.actions.run_and_verify_switch(wc_dir, AB_path, AB_url, expected_output, expected_disk, - expected_status, None, - None, None, None, None, False, + expected_status, + [], False, '--depth', 'files', '--ignore-ancestry') # Putting the depth=immediates stuff in a subroutine, because we're @@ -1907,8 +1700,8 @@ def switch_with_depth(sbox): svntest.actions.run_and_verify_switch(wc_dir, AB_path, AD_url, expected_output, expected_disk, - expected_status, None, - None, None, None, None, False, + expected_status, + [], False, '--depth', 'immediates', '--ignore-ancestry') @@ -1932,8 +1725,8 @@ def switch_with_depth(sbox): svntest.actions.run_and_verify_switch(wc_dir, AB_path, AB_url, expected_output, expected_disk, - expected_status, None, - None, None, None, None, False, + expected_status, + [], False, '--ignore-ancestry') # Okay, repeat 'switch --depth=immediates'. (Afterwards we'll @@ -1958,8 +1751,8 @@ def switch_with_depth(sbox): svntest.actions.run_and_verify_switch(wc_dir, AB_path, AB_url, expected_output, expected_disk, - expected_status, None, - None, None, None, None, False, + expected_status, + [], False, '--depth', 'infinity', '--ignore-ancestry') @@ -1976,24 +1769,18 @@ def switch_to_dir_with_peg_rev(sbox): X_path = sbox.ospath('X') Y_path = sbox.ospath('Y') svntest.main.run_svn(None, 'mkdir', X_path, Y_path) - svntest.main.run_svn(None, 'ci', - '-m', 'log message', - wc_dir) + sbox.simple_commit(message='log message') # change tau in rev. 3 ADG_path = sbox.ospath('A/D/G') tau_path = os.path.join(ADG_path, 'tau') svntest.main.file_append(tau_path, "new line\n") - svntest.main.run_svn(None, 'ci', - '-m', 'log message', - wc_dir) + sbox.simple_commit(message='log message') # delete A/D/G in rev. 4 svntest.main.run_svn(None, 'up', wc_dir) svntest.main.run_svn(None, 'rm', ADG_path) - svntest.main.run_svn(None, 'ci', - '-m', 'log message', - wc_dir) + sbox.simple_commit(message='log message') # Test 1: switch X to A/D/G@2 ADG_url = repo_url + '/A/D/G' @@ -2025,8 +1812,8 @@ def switch_to_dir_with_peg_rev(sbox): svntest.actions.run_and_verify_switch(wc_dir, X_path, ADG_url + '@3', expected_output, expected_disk, - expected_status, None, - None, None, None, None, False, + expected_status, + [], False, '-r', '2', '--ignore-ancestry') def switch_urls_with_spaces(sbox): @@ -2047,7 +1834,7 @@ def switch_urls_with_spaces(sbox): svntest.main.file_write(bbb_path, "This is the file 'bar baz bal'.\n") svntest.main.run_svn(None, 'add', tpm_path, bbb_path) - svntest.main.run_svn(None, 'ci', '-m', 'log message', wc_dir) + sbox.simple_commit(message='log message') # Test 1: switch directory 'A B C' to url 'X Y Z' XYZ_url = repo_url + '/X Y Z' @@ -2072,7 +1859,7 @@ def switch_urls_with_spaces(sbox): expected_output, expected_disk, expected_status, - None, None, None, None, None, + [], False, '--ignore-ancestry') # Test 2: switch file 'bar baz bal' to 'tau pau mau' @@ -2099,7 +1886,7 @@ def switch_urls_with_spaces(sbox): expected_output, expected_disk, expected_status, - None, None, None, None, None, + [], False, '--ignore-ancestry') def switch_to_dir_with_peg_rev2(sbox): @@ -2112,23 +1899,19 @@ def switch_to_dir_with_peg_rev2(sbox): # prepare dir X in rev. 2 X_path = sbox.ospath('X') svntest.main.run_svn(None, 'mkdir', X_path) - svntest.main.run_svn(None, 'ci', - '-m', 'log message', - wc_dir) + sbox.simple_commit(message='log message') # make a change in ADG in rev. 3 tau_path = sbox.ospath('A/D/G/tau') svntest.main.file_append(tau_path, "extra line\n") - svntest.main.run_svn(None, 'ci', '-m', 'log message', wc_dir) + sbox.simple_commit(message='log message') # Rename ADG to ADY in rev 4 svntest.main.run_svn(None, 'up', wc_dir) ADG_path = sbox.ospath('A/D/G') ADY_path = sbox.ospath('A/D/Y') svntest.main.run_svn(None, 'mv', ADG_path, ADY_path) - svntest.main.run_svn(None, 'ci', - '-m', 'log message', - wc_dir) + sbox.simple_commit(message='log message') # Test switch X to rev 2 of A/D/Y@HEAD ADY_url = sbox.repo_url + '/A/D/Y' @@ -2166,8 +1949,7 @@ def switch_to_dir_with_peg_rev2(sbox): svntest.actions.run_and_verify_switch(wc_dir, X_path, ADY_url + '@HEAD', expected_output, expected_disk, - expected_status, None, - None, None, None, None, False, + expected_status, [], False, '-r', '2', '--ignore-ancestry') def switch_to_root(sbox): @@ -2219,7 +2001,7 @@ def switch_to_root(sbox): expected_output, expected_disk, expected_status, - None, None, None, None, None, + [], False, '--ignore-ancestry') #---------------------------------------------------------------------- @@ -2239,19 +2021,19 @@ def tolerate_local_mods(sbox): A_url = sbox.repo_url + '/A' A2_url = sbox.repo_url + '/A2' - svntest.actions.run_and_verify_svn(None, - ['\n', 'Committed revision 2.\n'], [], + svntest.actions.run_and_verify_svn(['Committing transaction...\n', + 'Committed revision 2.\n'], [], 'cp', '-m', 'make copy', A_url, A2_url) os.mkdir(L_path) svntest.main.run_svn(None, 'add', L_path) - svntest.main.run_svn(None, 'ci', '-m', 'Commit added folder', wc_dir) + sbox.simple_commit(message='Commit added folder') # locally modified unversioned file svntest.main.file_write(LM_path, 'Locally modified file.\n', 'w+') expected_output = svntest.wc.State(wc_dir, { - 'A/L' : Item(status='D '), + 'A/L' : Item(status=' ', treeconflict='C'), }) expected_disk = svntest.main.greek_state.copy() @@ -2263,13 +2045,16 @@ def tolerate_local_mods(sbox): expected_status = svntest.actions.get_virginal_state(wc_dir, 3) expected_status.tweak('', 'iota', wc_rev=1) expected_status.tweak('A', switched='S') + expected_status.add({ + 'A/L' : Item(status='A ', copied='+', treeconflict='C', wc_rev='-') + }) # Used to fail with locally modified or unversioned files svntest.actions.run_and_verify_switch(wc_dir, A_path, A2_url, expected_output, expected_disk, expected_status, - None, None, None, None, None, + [], False, '--ignore-ancestry') #---------------------------------------------------------------------- @@ -2291,8 +2076,6 @@ disk_after_leaf_edit = svntest.deeptrees.deep_trees_after_leaf_edit disk_after_leaf_del = svntest.deeptrees.deep_trees_after_leaf_del disk_after_tree_del = svntest.deeptrees.deep_trees_after_tree_del -disk_empty_dirs = svntest.deeptrees.deep_trees_empty_dirs - deep_trees_conflict_output = svntest.deeptrees.deep_trees_conflict_output deep_trees_conflict_output_skipped = \ svntest.deeptrees.deep_trees_conflict_output_skipped @@ -2327,10 +2110,14 @@ def tree_conflicts_on_switch_1_1(sbox): 'DDF/D1/D2/gamma' : Item(status=' ', treeconflict='U') }) - expected_disk = disk_empty_dirs.copy() - expected_disk.remove('D/D1', 'DF/D1', 'DD/D1', 'DD/D1/D2', - 'DDF/D1', 'DDF/D1/D2', - 'DDD/D1', 'DDD/D1/D2', 'DDD/D1/D2/D3') + expected_disk = svntest.wc.State('', { + 'F' : Item(), + 'D' : Item(), + 'DF' : Item(), + 'DD' : Item(), + 'DDF' : Item(), + 'DDD' : Item(), + }) # The files delta, epsilon, and zeta are incoming additions, but since # they are all within locally deleted trees they should also be schedule @@ -2413,7 +2200,14 @@ def tree_conflicts_on_switch_1_2(sbox): 'DF/D1/beta' : Item(status=' ', treeconflict='D'), }) - expected_disk = disk_empty_dirs.copy() + expected_disk = svntest.wc.State('', { + 'F' : Item(), + 'D' : Item(), + 'DF' : Item(), + 'DD' : Item(), + 'DDF' : Item(), + 'DDD' : Item(), + }) expected_status = deep_trees_status_local_tree_del.copy() @@ -2436,18 +2230,11 @@ def tree_conflicts_on_switch_1_2(sbox): 'DDD/D1/D2/D3', 'DDF/D1/D2/gamma', 'DF/D1/beta') - ### Why does the deep trees state not include files? - expected_disk.remove('D/D1', - 'DD/D1/D2', - 'DDD/D1/D2/D3') - expected_disk.remove('DF/D1', 'DD/D1', - 'DDF/D1', 'DDF/D1/D2', - 'DDD/D1', 'DDD/D1/D2') expected_info = { 'F/alpha' : { 'Tree conflict' : - '^local file delete, incoming file delete upon switch' + '^local file delete, incoming file delete or move upon switch' + ' Source left: .file.*/F/alpha@2' + ' Source right: .none.*(/F/alpha@3)?$', }, @@ -2465,7 +2252,7 @@ def tree_conflicts_on_switch_1_2(sbox): }, 'D/D1' : { 'Tree conflict' : - '^local dir delete, incoming dir delete upon switch' + '^local dir delete, incoming dir delete or move upon switch' + ' Source left: .dir.*/D/D1@2' + ' Source right: .none.*(/D/D1@3)?$', }, @@ -2530,37 +2317,37 @@ def tree_conflicts_on_switch_2_1(sbox): expected_info = { 'F/alpha' : { 'Tree conflict' : - '^local file edit, incoming file delete upon switch' + '^local file edit, incoming file delete or move upon switch' + ' Source left: .file.*/F/alpha@2' + ' Source right: .none.*(/F/alpha@3)?$', }, 'DF/D1' : { 'Tree conflict' : - '^local dir edit, incoming dir delete upon switch' + '^local dir edit, incoming dir delete or move upon switch' + ' Source left: .dir.*/DF/D1@2' + ' Source right: .none.*(/DF/D1@3)?$', }, 'DDF/D1' : { 'Tree conflict' : - '^local dir edit, incoming dir delete upon switch' + '^local dir edit, incoming dir delete or move upon switch' + ' Source left: .dir.*/DDF/D1@2' + ' Source right: .none.*(/DDF/D1@3)?$', }, 'D/D1' : { 'Tree conflict' : - '^local dir edit, incoming dir delete upon switch' + '^local dir edit, incoming dir delete or move upon switch' + ' Source left: .dir.*/D/D1@2' + ' Source right: .none.*(/D/D1@3)?$', }, 'DD/D1' : { 'Tree conflict' : - '^local dir edit, incoming dir delete upon switch' + '^local dir edit, incoming dir delete or move upon switch' + ' Source left: .dir.*/DD/D1@2' + ' Source right: .none.*(/DD/D1@3)?$', }, 'DDD/D1' : { 'Tree conflict' : - '^local dir edit, incoming dir delete upon switch' + '^local dir edit, incoming dir delete or move upon switch' + ' Source left: .dir.*/DDD/D1@2' + ' Source right: .none.*(/DDD/D1@3)?$', }, @@ -2591,7 +2378,14 @@ def tree_conflicts_on_switch_2_2(sbox): ### when dirs_same_p() is implemented) expected_output = deep_trees_conflict_output - expected_disk = disk_empty_dirs.copy() + expected_disk = svntest.wc.State('', { + 'DDF/D1/D2' : Item(), + 'F' : Item(), + 'D' : Item(), + 'DF/D1' : Item(), + 'DD/D1' : Item(), + 'DDD/D1/D2' : Item(), + }) expected_status = svntest.deeptrees.deep_trees_virginal_state.copy() expected_status.add({'' : Item(), @@ -2602,64 +2396,51 @@ def tree_conflicts_on_switch_2_2(sbox): # Expect the incoming tree deletes and the local leaf deletes to mean # that all deleted paths are *really* gone, not simply scheduled for # deletion. - expected_status.tweak('F/alpha', - 'D/D1', - 'DD/D1', - 'DF/D1', - 'DDD/D1', - 'DDF/D1', + expected_status.tweak('DD/D1', 'DF/D1', 'DDF/D1', 'DDD/D1', + status='A ', copied='+', treeconflict='C', + wc_rev='-') + expected_status.tweak('DDF/D1/D2', 'DDD/D1/D2', + copied='+', wc_rev='-') + expected_status.tweak('DD/D1/D2', 'DF/D1/beta', 'DDD/D1/D2/D3', + 'DDF/D1/D2/gamma', + status='D ', copied='+', wc_rev='-') + expected_status.tweak('F/alpha', 'D/D1', status='! ', treeconflict='C', wc_rev=None) - # Remove from expected status and disk everything below the deleted paths. - expected_status.remove('DD/D1/D2', - 'DF/D1/beta', - 'DDD/D1/D2', - 'DDD/D1/D2/D3', - 'DDF/D1/D2', - 'DDF/D1/D2/gamma',) - expected_disk.remove('D/D1', - 'DD/D1', - 'DD/D1/D2', - 'DF/D1', - 'DDD/D1', - 'DDD/D1/D2', - 'DDD/D1/D2/D3', - 'DDF/D1', - 'DDF/D1/D2',) expected_info = { 'F/alpha' : { 'Tree conflict' : - '^local file delete, incoming file delete upon switch' + '^local file delete, incoming file delete or move upon switch' + ' Source left: .file.*/F/alpha@2' + ' Source right: .none.*(/F/alpha@3)?$', }, 'DF/D1' : { 'Tree conflict' : - '^local dir delete, incoming dir delete upon switch' + '^local dir edit, incoming dir delete or move upon switch' + ' Source left: .dir.*/DF/D1@2' + ' Source right: .none.*(/DF/D1@3)?$', }, 'DDF/D1' : { 'Tree conflict' : - '^local dir delete, incoming dir delete upon switch' + '^local dir edit, incoming dir delete or move upon switch' + ' Source left: .dir.*/DDF/D1@2' + ' Source right: .none.*(/DDF/D1@3)?$', }, 'D/D1' : { 'Tree conflict' : - '^local dir delete, incoming dir delete upon switch' + '^local dir delete, incoming dir delete or move upon switch' + ' Source left: .dir.*/D/D1@2' + ' Source right: .none.*(/D/D1@3)?$', }, 'DD/D1' : { 'Tree conflict' : - '^local dir delete, incoming dir delete upon switch' + '^local dir edit, incoming dir delete or move upon switch' + ' Source left: .dir.*/DD/D1@2' + ' Source right: .none.*(/DD/D1@3)?$', }, 'DDD/D1' : { 'Tree conflict' : - '^local dir delete, incoming dir delete upon switch' + '^local dir edit, incoming dir delete or move upon switch' + ' Source left: .dir.*/DDD/D1@2' + ' Source right: .none.*(/DDD/D1@3)?$', }, @@ -2683,7 +2464,14 @@ def tree_conflicts_on_switch_3(sbox): expected_output = deep_trees_conflict_output - expected_disk = disk_empty_dirs.copy() + expected_disk = svntest.wc.State('', { + 'F' : Item(), + 'D' : Item(), + 'DF' : Item(), + 'DD' : Item(), + 'DDF' : Item(), + 'DDD' : Item(), + }) expected_status = deep_trees_status_local_tree_del.copy() expected_status.tweak('', switched='S') @@ -2705,50 +2493,41 @@ def tree_conflicts_on_switch_3(sbox): 'DDD/D1/D2/D3', 'DDF/D1/D2', 'DDF/D1/D2/gamma',) - expected_disk.remove('D/D1', - 'DD/D1', - 'DD/D1/D2', - 'DF/D1', - 'DDD/D1', - 'DDD/D1/D2', - 'DDD/D1/D2/D3', - 'DDF/D1', - 'DDF/D1/D2',) expected_info = { 'F/alpha' : { 'Tree conflict' : - '^local file delete, incoming file delete upon switch' + '^local file delete, incoming file delete or move upon switch' + ' Source left: .file.*/F/alpha@2' + ' Source right: .none.*(/F/alpha@3)?$', }, 'DF/D1' : { 'Tree conflict' : - '^local dir delete, incoming dir delete upon switch' + '^local dir delete, incoming dir delete or move upon switch' + ' Source left: .dir.*/DF/D1@2' + ' Source right: .none.*(/DF/D1@3)?$', }, 'DDF/D1' : { 'Tree conflict' : - '^local dir delete, incoming dir delete upon switch' + '^local dir delete, incoming dir delete or move upon switch' + ' Source left: .dir.*/DDF/D1@2' + ' Source right: .none.*(/DDF/D1@3)?$', }, 'D/D1' : { 'Tree conflict' : - '^local dir delete, incoming dir delete upon switch' + '^local dir delete, incoming dir delete or move upon switch' + ' Source left: .dir.*/D/D1@2' + ' Source right: .none.*(/D/D1@3)?$', }, 'DD/D1' : { 'Tree conflict' : - '^local dir delete, incoming dir delete upon switch' + '^local dir delete, incoming dir delete or move upon switch' + ' Source left: .dir.*/DD/D1@2' + ' Source right: .none.*(/DD/D1@3)?$', }, 'DDD/D1' : { 'Tree conflict' : - '^local dir delete, incoming dir delete upon switch' + '^local dir delete, incoming dir delete or move upon switch' + ' Source left: .dir.*/DDD/D1@2' + ' Source right: .none.*(/DDD/D1@3)?$', }, @@ -2779,7 +2558,7 @@ def copy_with_switched_subdir(sbox): svntest.actions.run_and_verify_status(wc_dir, state) # Switch A/D/G - svntest.actions.run_and_verify_svn(None, None, [], 'switch', + svntest.actions.run_and_verify_svn(None, [], 'switch', '--ignore-ancestry', E_url, G) state.tweak('A/D/G', switched='S') @@ -2791,7 +2570,7 @@ def copy_with_switched_subdir(sbox): svntest.actions.run_and_verify_status(wc_dir, state) # And now copy A/D and everything below it to R - svntest.actions.run_and_verify_svn(None, None, [], 'cp', D, R) + svntest.actions.run_and_verify_svn(None, [], 'cp', D, R) state.add({ 'R' : Item(status='A ', copied='+', wc_rev='-'), @@ -2807,22 +2586,21 @@ def copy_with_switched_subdir(sbox): svntest.actions.run_and_verify_status(wc_dir, state) - svntest.main.run_svn(None, 'ci', '-m', 'Commit added folder', wc_dir) + sbox.simple_commit(message='Commit added folder') # Additional test, it should commit to R/G/alpha. svntest.main.run_svn(None, 'up', wc_dir) svntest.main.file_append(sbox.ospath('R/G/alpha'), "apple") - svntest.main.run_svn(None, 'ci', '-m', 'Commit changed file', wc_dir) + sbox.simple_commit(message='Commit changed file') # Checkout working copy to verify result svntest.main.safe_rmtree(wc_dir, 1) - svntest.actions.run_and_verify_svn(None, - None, [], + svntest.actions.run_and_verify_svn(None, [], 'checkout', sbox.repo_url, wc_dir) # Switch A/D/G again to recreate state - svntest.actions.run_and_verify_svn(None, None, [], 'switch', + svntest.actions.run_and_verify_svn(None, [], 'switch', '--ignore-ancestry', E_url, G) # Clear the statuses @@ -2846,15 +2624,15 @@ def up_to_old_rev_with_subtree_switched_to_root(sbox): # Starting with a vanilla greek tree, create a branch of A, switch # that branch to the root of the repository, then update the WC to # r1. - svntest.actions.run_and_verify_svn(None, None, [], 'copy', A_path, + svntest.actions.run_and_verify_svn(None, [], 'copy', A_path, branch_path) - svntest.actions.run_and_verify_svn(None, None, [], 'ci', wc_dir, + svntest.actions.run_and_verify_svn(None, [], 'ci', wc_dir, '-m', 'Create a branch') - svntest.actions.run_and_verify_svn(None, None, [], 'sw', sbox.repo_url, + svntest.actions.run_and_verify_svn(None, [], 'sw', sbox.repo_url, branch_path, '--ignore-ancestry') # Now update the WC to r1. - svntest.actions.run_and_verify_svn(None, None, [], 'up', '-r1', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'up', '-r1', wc_dir) def different_node_kind(sbox): "switch to a different node kind" @@ -2877,9 +2655,9 @@ def different_node_kind(sbox): expected_status.add_state(rel_path, pristine_status.subtree(rel_url)) svntest.actions.run_and_verify_switch(sbox.wc_dir, full_path, full_url, None, expected_disk, expected_status, - None, None, None, None, None, False, + [], False, '--ignore-ancestry') - svntest.actions.run_and_verify_svn(None, None, [], 'info', full_path) + svntest.actions.run_and_verify_svn(None, [], 'info', full_path) if not os.path.isdir(full_path): raise svntest.Failure @@ -2893,9 +2671,9 @@ def different_node_kind(sbox): expected_status.tweak(rel_path, switched='S') svntest.actions.run_and_verify_switch(sbox.wc_dir, full_path, full_url, None, expected_disk, expected_status, - None, None, None, None, None, False, + [], False, '--ignore-ancestry') - svntest.actions.run_and_verify_svn(None, None, [], 'info', full_path) + svntest.actions.run_and_verify_svn(None, [], 'info', full_path) if not os.path.isfile(full_path): raise svntest.Failure @@ -2915,12 +2693,12 @@ def switch_to_spaces(sbox): repo_url = sbox.repo_url # Paths are normalized in the command processing, so %20 is equivalent to ' ' - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'cp', repo_url + '/A', repo_url + '/A%20with space', '-m', '') - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'mv', repo_url + '/A%20with space', repo_url + '/A with%20more spaces', '-m', '') @@ -2958,17 +2736,145 @@ def switch_across_replacement(sbox): }) svntest.actions.run_and_verify_update(sbox.wc_dir, expected_output, None, None, - None, None, None, None, None, False, + [], False, '-r1') svntest.actions.run_and_verify_update(sbox.wc_dir, expected_output, None, None, - None, None, None, None, None, False, + [], False, '-r2') svntest.actions.run_and_verify_switch(sbox.wc_dir, sbox.ospath('A'), '^/A', expected_output, None, None, - None, None, None, None, None, False, + [], False, '-r1') +@Issue(1975) +def switch_keywords(sbox): + "switch and svn:keywords" + sbox.build() + gamma_path = sbox.ospath('A/D/gamma') + psi_path = sbox.ospath('A/D/H/psi') + + sbox.simple_propset('svn:keywords', 'URL', 'A/D/gamma') + svntest.main.file_write(gamma_path, "$URL$\n") + sbox.simple_propset('svn:keywords', 'URL', 'A/D/H/psi') + svntest.main.file_write(psi_path, "$URL$\n") + sbox.simple_commit() + + expected_disk = svntest.main.greek_state.copy() + expected_disk.tweak('A/D/gamma', + contents="$URL: %s/A/D/gamma $\n" % sbox.repo_url) + expected_disk.tweak('A/D/H/psi', + contents="$URL: %s/A/D/H/psi $\n" % sbox.repo_url) + + svntest.actions.run_and_verify_update(sbox.wc_dir, + None, expected_disk, None) + sbox.simple_copy('A', 'A_copy') + sbox.simple_commit() + sbox.simple_update() + + # Next, we're going to switch A to A_copy, and expect keywords + # in the switched files gamma and psi to be updated accordingly. + + expected_disk.add({ + 'A_copy/D/H/chi' : Item(contents="This is the file 'chi'.\n"), + 'A_copy/D/H/psi' : Item(contents="$URL: %s/A_copy/D/H/psi $\n" + % sbox.repo_url), + 'A_copy/D/H/omega' : Item(contents="This is the file 'omega'.\n"), + 'A_copy/D/G/pi' : Item(contents="This is the file 'pi'.\n"), + 'A_copy/D/G/tau' : Item(contents="This is the file 'tau'.\n"), + 'A_copy/D/G/rho' : Item(contents="This is the file 'rho'.\n"), + 'A_copy/D/gamma' : Item(contents="$URL: %s/A_copy/D/gamma $\n" + % sbox.repo_url), + 'A_copy/B/F' : Item(), + 'A_copy/B/E/alpha' : Item(contents="This is the file 'alpha'.\n"), + 'A_copy/B/E/beta' : Item(contents="This is the file 'beta'.\n"), + 'A_copy/B/lambda' : Item(contents="This is the file 'lambda'.\n"), + 'A_copy/mu' : Item(contents="This is the file 'mu'.\n"), + 'A_copy/C' : Item(), + }) + + # update expected URL for switched gamma + expected_disk.tweak('A/D/gamma', + contents="$URL: %s/A_copy/D/gamma $\n" % sbox.repo_url) + + # leave gamma unmodified, locally modify psi + svntest.main.file_write(psi_path, "$URL$\nnew line\n") + # update expected URL for switched psi + expected_disk.tweak('A/D/H/psi', + contents="$URL: %s/A_copy/D/H/psi $\nnew line\n" + % sbox.repo_url) + + expected_status = svntest.actions.get_virginal_state(sbox.wc_dir, 3) + expected_status.add({ + 'A_copy' : Item(status=' ', wc_rev='3'), + 'A_copy/mu' : Item(status=' ', wc_rev='3'), + 'A_copy/D' : Item(status=' ', wc_rev='3'), + 'A_copy/D/H' : Item(status=' ', wc_rev='3'), + 'A_copy/D/H/psi' : Item(status=' ', wc_rev='3'), + 'A_copy/D/H/chi' : Item(status=' ', wc_rev='3'), + 'A_copy/D/H/omega' : Item(status=' ', wc_rev='3'), + 'A_copy/D/gamma' : Item(status=' ', wc_rev='3'), + 'A_copy/D/G' : Item(status=' ', wc_rev='3'), + 'A_copy/D/G/rho' : Item(status=' ', wc_rev='3'), + 'A_copy/D/G/tau' : Item(status=' ', wc_rev='3'), + 'A_copy/D/G/pi' : Item(status=' ', wc_rev='3'), + 'A_copy/B' : Item(status=' ', wc_rev='3'), + 'A_copy/B/E' : Item(status=' ', wc_rev='3'), + 'A_copy/B/E/alpha' : Item(status=' ', wc_rev='3'), + 'A_copy/B/E/beta' : Item(status=' ', wc_rev='3'), + 'A_copy/B/F' : Item(status=' ', wc_rev='3'), + 'A_copy/B/lambda' : Item(status=' ', wc_rev='3'), + 'A_copy/C' : Item(status=' ', wc_rev='3'), + }) + expected_status.tweak('A', switched='S') + expected_status.tweak('A/D/H/psi', status='M ') + + # both gamma and psi should have update URLs after the switch + svntest.actions.run_and_verify_switch(sbox.wc_dir, sbox.ospath('A'), '^/A_copy', + None, expected_disk, expected_status) + +@Issue(4524) +def switch_moves(sbox): + "switch moves on wc checkpoint" + + sbox.build() + + sbox.simple_move('A/B', 'B') + sbox.simple_rm('A') + + branch_url = sbox.repo_url + '/branch' + + svntest.actions.run_and_verify_svn(None, [], + 'cp', sbox.wc_dir, branch_url, + '-m', '') + + expected_disk = svntest.wc.State('', { + 'B/E/alpha' : Item(contents="This is the file 'alpha'.\n"), + 'B/E/beta' : Item(contents="This is the file 'beta'.\n"), + 'B/lambda' : Item(contents="This is the file 'lambda'.\n"), + 'B/F' : Item(), + 'iota' : Item(contents="This is the file 'iota'.\n"), + }) + + expected_status = svntest.wc.State(sbox.wc_dir, { + '' : Item(status=' ', wc_rev='2'), + 'B' : Item(status='R ', copied='+', treeconflict='C', wc_rev='-'), + 'B/lambda' : Item(status=' ', copied='+', wc_rev='-'), + 'B/F' : Item(status=' ', copied='+', wc_rev='-'), + 'B/E' : Item(status=' ', copied='+', wc_rev='-'), + 'B/E/beta' : Item(status=' ', copied='+', wc_rev='-'), + 'B/E/alpha' : Item(status=' ', copied='+', wc_rev='-'), + 'A' : Item(status='! ', treeconflict='C'), + 'iota' : Item(status=' ', wc_rev='2'), + }) + + # In Subversion 1.8 this scenario causes an Sqlite row not found error. + # It would be nice if we could handle the tree conflict more intelligent, as + # the working copy matches the incomming change. + svntest.actions.run_and_verify_switch(sbox.wc_dir, sbox.ospath(''), branch_url, + None, expected_disk, expected_status) + + ######################################################################## # Run the tests @@ -3011,6 +2917,8 @@ test_list = [ None, different_node_kind, switch_to_spaces, switch_across_replacement, + switch_keywords, + switch_moves, ] if __name__ == '__main__': diff --git a/subversion/tests/cmdline/trans_tests.py b/subversion/tests/cmdline/trans_tests.py index 3dd4fd4..8aa2597 100755 --- a/subversion/tests/cmdline/trans_tests.py +++ b/subversion/tests/cmdline/trans_tests.py @@ -191,7 +191,7 @@ def setup_working_copy(wc_dir, value_len): # Set the property keyword for PATH. Turn on all possible keywords. ### todo: Later, take list of keywords to set. def keywords_on(path): - svntest.actions.run_and_verify_svn(None, None, [], 'propset', + svntest.actions.run_and_verify_svn(None, [], 'propset', "svn:keywords", "Author Rev Date URL Id Header", path) @@ -199,7 +199,7 @@ def keywords_on(path): # Delete property NAME from versioned PATH in the working copy. ### todo: Later, take list of keywords to remove from the propval? def keywords_off(path): - svntest.actions.run_and_verify_svn(None, None, [], 'propdel', + svntest.actions.run_and_verify_svn(None, [], 'propdel', "svn:keywords", path) @@ -298,7 +298,7 @@ def keywords_from_birth(sbox): }) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - None, None, wc_dir) + None) # Make sure the unexpanded URL keyword got expanded correctly. fp = open(url_unexp_path, 'r') @@ -449,9 +449,6 @@ def keywords_from_birth(sbox): # This is a slight rewrite of his test, to use the run_and_verify_* API. # This is for issue #631. -def do_nothing(x, y): - return 0 - @Issue(631) def update_modified_with_translation(sbox): "update modified file with eol-style 'native'" @@ -462,7 +459,7 @@ def update_modified_with_translation(sbox): # Replace contents of rho and set eol translation to 'native' rho_path = os.path.join(wc_dir, 'A', 'D', 'G', 'rho') svntest.main.file_write(rho_path, "1\n2\n3\n4\n5\n6\n7\n8\n9\n") - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'propset', 'svn:eol-style', 'native', rho_path) @@ -479,7 +476,7 @@ def update_modified_with_translation(sbox): svntest.actions.run_and_verify_commit(wc_dir, expected_output, expected_status, - None, rho_path) + [], rho_path) # Change rho again svntest.main.file_write(rho_path, "1\n2\n3\n4\n4.5\n5\n6\n7\n8\n9\n") @@ -491,7 +488,7 @@ def update_modified_with_translation(sbox): svntest.actions.run_and_verify_commit(wc_dir, expected_output, expected_status, - None, rho_path) + [], rho_path) # Locally modify rho again. svntest.main.file_write(rho_path, "1\n2\n3\n4\n4.5\n5\n6\n7\n8\n9\n10\n") @@ -515,6 +512,17 @@ def update_modified_with_translation(sbox): "8", "9", "10", + "||||||| .r3", + "1", + "2", + "3", + "4", + "4.5", + "5", + "6", + "7", + "8", + "9", "=======", "This is the file 'rho'.", ">>>>>>> .r1", @@ -522,13 +530,14 @@ def update_modified_with_translation(sbox): # Updating back to revision 1 should not error; the merge should # work, with eol-translation turned on. + extra_files = ['rho.r1', 'rho.r3', 'rho.mine'] svntest.actions.run_and_verify_update(wc_dir, expected_output, expected_disk, - None, None, - do_nothing, None, - None, None, - 0, '-r', '1', wc_dir) + None, + [], False, + '-r', '1', wc_dir, + extra_files=extra_files) #---------------------------------------------------------------------- @@ -559,23 +568,24 @@ def eol_change_is_text_mod(sbox): f.close() # commit the file - svntest.actions.run_and_verify_svn(None, None, [], 'add', foo_path) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'add', foo_path) + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', 'log msg', foo_path) if svntest.main.windows: - svntest.actions.run_and_verify_svn(None, None, [], 'propset', + svntest.actions.run_and_verify_svn(None, [], 'propset', 'svn:eol-style', 'LF', foo_path) else: - svntest.actions.run_and_verify_svn(None, None, [], 'propset', + svntest.actions.run_and_verify_svn(None, [], 'propset', 'svn:eol-style', 'CRLF', foo_path) # check 1: did new contents get transmitted? expected_output = ["Sending " + foo_path + "\n", - "Transmitting file data .\n", + "Transmitting file data .done\n", + "Committing transaction...\n", "Committed revision 3.\n"] - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'ci', '-m', 'log msg', foo_path) # check 2: do the files have the right contents now? @@ -605,20 +615,20 @@ def keyword_expanded_on_checkout(sbox): # The bug didn't occur if there were multiple files in the # directory, so setup an empty directory. Z_path = os.path.join(wc_dir, 'Z') - svntest.actions.run_and_verify_svn(None, None, [], 'mkdir', Z_path) + svntest.actions.run_and_verify_svn(None, [], 'mkdir', Z_path) # Add the file that has the keyword to be expanded url_path = os.path.join(Z_path, 'url') svntest.main.file_append(url_path, "$URL$") - svntest.actions.run_and_verify_svn(None, None, [], 'add', url_path) + svntest.actions.run_and_verify_svn(None, [], 'add', url_path) keywords_on(url_path) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', 'log msg', wc_dir) other_wc_dir = sbox.add_wc_path('other') other_url_path = os.path.join(other_wc_dir, 'Z', 'url') - svntest.actions.run_and_verify_svn(None, None, [], 'checkout', + svntest.actions.run_and_verify_svn(None, [], 'checkout', sbox.repo_url, other_wc_dir) @@ -645,7 +655,7 @@ def cat_keyword_expansion(sbox): # Set up A/mu to do $Rev$ keyword expansion svntest.main.file_append(mu_path , "$Rev$\n$Author$") - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'propset', 'svn:keywords', 'Rev Author', mu_path) @@ -655,38 +665,35 @@ def cat_keyword_expansion(sbox): expected_status = svntest.actions.get_virginal_state(wc_dir, 1) expected_status.tweak('A/mu', wc_rev=2) svntest.actions.run_and_verify_commit(wc_dir, - expected_output, expected_status, - None, wc_dir) + expected_output, expected_status) # Change the author to value which will get truncated on expansion full_author = "x" * 400 key_author = "x" * 244 svntest.actions.enable_revprop_changes(sbox.repo_dir) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'propset', '--revprop', '-r2', 'svn:author', full_author, sbox.wc_dir) - svntest.actions.run_and_verify_svn(None, [ full_author ], [], + svntest.actions.run_and_verify_svn([ full_author ], [], 'propget', '--revprop', '-r2', - 'svn:author', '--strict', + 'svn:author', '--no-newline', sbox.wc_dir) # Make another commit so that the last changed revision for A/mu is # not HEAD. - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'propset', 'foo', 'bar', lambda_path) expected_output = wc.State(wc_dir, { 'A/B/lambda' : Item(verb='Sending'), }) expected_status.tweak('A/B/lambda', wc_rev=3) svntest.actions.run_and_verify_commit(wc_dir, - expected_output, expected_status, - None, wc_dir) + expected_output, expected_status) # At one stage the keywords were expanded to values for the requested # revision, not to those committed revision - svntest.actions.run_and_verify_svn(None, - [ "This is the file 'mu'.\n", + svntest.actions.run_and_verify_svn([ "This is the file 'mu'.\n", "$Rev: 2 $\n", "$Author: " + key_author + " $"], [], 'cat', '-r', 'HEAD', mu_path) @@ -702,8 +709,8 @@ def copy_propset_commit(sbox): mu2_path = os.path.join(wc_dir, 'A', 'mu2') # Copy and propset - svntest.actions.run_and_verify_svn(None, None, [], 'copy', mu_path, mu2_path) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'copy', mu_path, mu2_path) + svntest.actions.run_and_verify_svn(None, [], 'propset', 'svn:eol-style', 'native', mu2_path) expected_status = svntest.actions.get_virginal_state(wc_dir, 1) @@ -718,8 +725,7 @@ def copy_propset_commit(sbox): }) expected_status.tweak('A/mu2', status=' ', wc_rev=2, copied=None) svntest.actions.run_and_verify_commit(wc_dir, - expected_output, expected_status, - None, wc_dir) + expected_output, expected_status) #---------------------------------------------------------------------- # Create a greek tree, commit a keyword into one file, @@ -741,11 +747,10 @@ def propset_commit_checkout_nocrash(sbox): expected_status = svntest.actions.get_virginal_state(wc_dir, 1) expected_status.tweak('A/mu', wc_rev=2) svntest.actions.run_and_verify_commit(wc_dir, - expected_output, expected_status, - None, wc_dir) + expected_output, expected_status) # Set property to do keyword expansion on A/mu, commit. - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'propset', 'svn:keywords', 'Rev', mu_path) expected_output = wc.State(wc_dir, { 'A/mu' : Item(verb='Sending'), @@ -753,14 +758,13 @@ def propset_commit_checkout_nocrash(sbox): expected_status = svntest.actions.get_virginal_state(wc_dir, 1) expected_status.tweak('A/mu', wc_rev=3) svntest.actions.run_and_verify_commit(wc_dir, - expected_output, expected_status, - None, wc_dir) + expected_output, expected_status) # Check out into another wc dir other_wc_dir = sbox.add_wc_path('other') mu_other_path = os.path.join(other_wc_dir, 'A', 'mu') - svntest.actions.run_and_verify_svn(None, None, [], 'checkout', + svntest.actions.run_and_verify_svn(None, [], 'checkout', sbox.repo_url, other_wc_dir) @@ -783,14 +787,14 @@ def propset_revert_noerror(sbox): # Set the Rev keyword for the mu file # could use the keywords_on()/keywords_off() functions to # set/del all svn:keywords - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'propset', 'svn:keywords', 'Rev', mu_path) expected_status = svntest.actions.get_virginal_state(wc_dir, 1) expected_status.tweak('A/mu', status=' M') svntest.actions.run_and_verify_status(wc_dir, expected_status) # Revert the propset - svntest.actions.run_and_verify_svn(None, None, [], 'revert', mu_path) + svntest.actions.run_and_verify_svn(None, [], 'revert', mu_path) expected_status = svntest.actions.get_virginal_state(wc_dir, 1) svntest.actions.run_and_verify_status(wc_dir, expected_status) @@ -822,9 +826,7 @@ def props_only_file_update(sbox): svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, - wc_dir) + expected_status) # Create r3 that drops svn:keywords @@ -837,9 +839,7 @@ def props_only_file_update(sbox): svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, - wc_dir) + expected_status) # Now, go back to r2. iota should have the Author keyword expanded. expected_disk = svntest.main.greek_state.copy() @@ -849,9 +849,7 @@ def props_only_file_update(sbox): svntest.actions.run_and_verify_update(wc_dir, None, None, expected_status, - None, - None, None, None, None, - False, + [], False, wc_dir, '-r', '2') if open(iota_path).read() != ''.join(content_expanded): @@ -864,11 +862,7 @@ def props_only_file_update(sbox): expected_status = svntest.actions.get_virginal_state(wc_dir, 3) svntest.actions.run_and_verify_update(wc_dir, - None, expected_disk, expected_status, - None, - None, None, None, None, - False, - wc_dir) + None, expected_disk, expected_status) if open(iota_path).read() != ''.join(content): raise svntest.Failure("$Author$ is not contracted in 'iota'") @@ -905,11 +899,11 @@ def autoprops_inconsistent_eol(sbox): sbox.simple_add_text(text, 'add.c') sbox.simple_add_text(text, 'add-force.c') - svntest.actions.run_and_verify_svn(None, None, '.*inconsistent newlines.*', + svntest.actions.run_and_verify_svn(None, '.*inconsistent newlines.*', 'ps', 'svn:eol-style', 'native', sbox.ospath('add.c')) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'ps', 'svn:eol-style', 'native', '--force', sbox.ospath('add.c')) @@ -931,7 +925,7 @@ def autoprops_inconsistent_eol(sbox): expected_output = ['A %s\n' % sbox.ospath('auto.c')] # Fails with svn: E200009: File '.*auto.c' has inconsistent newlines - svntest.actions.run_and_verify_svn(None, expected_output, + svntest.actions.run_and_verify_svn(expected_output, [], 'add', sbox.ospath('auto.c')) @XFail() @@ -950,7 +944,7 @@ def autoprops_inconsistent_mime(sbox): expected_output = ['A %s\n' % sbox.ospath('c.iota.c')] # Fails with svn: E200009: File '.*c.iota.c' has binary mime type property - svntest.actions.run_and_verify_svn(None, expected_output, + svntest.actions.run_and_verify_svn(expected_output, [], 'add', sbox.ospath('c.iota.c')) diff --git a/subversion/tests/cmdline/tree_conflict_tests.py b/subversion/tests/cmdline/tree_conflict_tests.py index c9e5b1a..acf7c90 100755 --- a/subversion/tests/cmdline/tree_conflict_tests.py +++ b/subversion/tests/cmdline/tree_conflict_tests.py @@ -25,7 +25,7 @@ ###################################################################### # General modules -import sys, re, os, traceback +import sys, re, os, stat, traceback # Our testing module import svntest @@ -350,7 +350,7 @@ def set_up_repos(wc_dir, br_dir, scenarios): main.run_svn(None, 'mkdir', '--parents', P) for modaction in init_mods: modify(modaction, incoming_paths(wc_dir, P)) - run_and_verify_svn(None, AnyOutput, [], + run_and_verify_svn(AnyOutput, [], 'commit', '-m', 'Initial set-up.', wc_dir) # Capture the revision number init_rev = 2 ### hard-coded @@ -363,7 +363,7 @@ def set_up_repos(wc_dir, br_dir, scenarios): modify(modaction, incoming_paths(wc_dir, P)) # commit all the modifications - run_and_verify_svn(None, AnyOutput, [], + run_and_verify_svn(AnyOutput, [], 'commit', '-m', 'Action.', wc_dir) # Capture the revision number changed_rev = 3 ### hard-coded @@ -411,7 +411,7 @@ def ensure_tree_conflict(sbox, operation, else: # switch/merge # Make, and work in, a "branch2" that is a copy of "branch1". target_br = "branch2" - run_and_verify_svn(None, AnyOutput, [], + run_and_verify_svn(AnyOutput, [], 'copy', '-r', str(source_left_rev), url_of(source_br), url_of(target_br), '-m', 'Create target branch.') @@ -435,7 +435,7 @@ def ensure_tree_conflict(sbox, operation, for modaction in loc_action: modify(modaction, localmod_paths(".", target_path), is_init=False) if commit_local_mods: - run_and_verify_svn(None, AnyOutput, [], + run_and_verify_svn(AnyOutput, [], 'commit', target_path, '-m', 'Mods in target branch.') head_rev += 1 @@ -444,7 +444,7 @@ def ensure_tree_conflict(sbox, operation, # For switch/merge, there is no such precondition. if operation == 'update': logger.debug("--- Trying to commit (expecting 'out-of-date' error)") - run_and_verify_commit(".", None, None, "Commit failed", + run_and_verify_commit(".", None, None, ".*Commit failed.*", target_path) if modaction.startswith('f'): @@ -462,15 +462,15 @@ def ensure_tree_conflict(sbox, operation, # Do the main action if operation == 'update': logger.debug("--- Updating") - run_and_verify_svn(None, expected_stdout, [], + run_and_verify_svn(expected_stdout, [], 'update', target_path) elif operation == 'switch': logger.debug("--- Switching") - run_and_verify_svn(None, expected_stdout, [], + run_and_verify_svn(expected_stdout, [], 'switch', source_url, target_path) elif operation == 'merge': logger.debug("--- Merging") - run_and_verify_svn(None, expected_stdout, [], + run_and_verify_svn(expected_stdout, [], 'merge', '--allow-mixed-revisions', '-r', str(source_left_rev) + ':' + str(source_right_rev), @@ -513,7 +513,7 @@ def ensure_tree_conflict(sbox, operation, "^ > .* upon " + operation] + svntest.main.summary_of_conflicts(tree_conflicts=1)) ]) - run_and_verify_svn(None, expected_stdout, [], + run_and_verify_svn(expected_stdout, [], 'status', victim_path) logger.debug("--- Resolving the conflict") @@ -523,14 +523,14 @@ def ensure_tree_conflict(sbox, operation, run_and_verify_resolved([victim_path]) logger.debug("--- Checking that 'status' does not report a conflict") - exitcode, stdout, stderr = run_and_verify_svn(None, None, [], + exitcode, stdout, stderr = run_and_verify_svn(None, [], 'status', victim_path) for line in stdout: if line[6] == 'C': # and line.endswith(victim_path + '\n'): raise svntest.Failure("unexpected status C") # on victim_path # logger.debug("--- Committing (should now succeed)") - # run_and_verify_svn(None, None, [], + # run_and_verify_svn(None, [], # 'commit', '-m', '', target_path) # target_start_rev += 1 @@ -542,7 +542,7 @@ def ensure_tree_conflict(sbox, operation, main.run_svn(None, 'revert', '-R', wc_dir) main.safe_rmtree(wc_dir) if operation != 'update': - run_and_verify_svn(None, AnyOutput, [], + run_and_verify_svn(AnyOutput, [], 'delete', url_of(target_br), '-m', 'Delete target branch.') head_rev += 1 @@ -654,7 +654,6 @@ def up_sw_dir_del_onto_del(sbox): # Adding branch1\dC\D # # Committed revision 4. -@XFail(svntest.main.is_ra_type_dav) @Issue(3314) def up_sw_dir_add_onto_add(sbox): "up/sw dir: add onto add" @@ -777,11 +776,9 @@ def force_del_tc_inside(sbox): main.run_svn(None, 'update', '-r2', wc_dir) # Set a meaningless prop on each dir and file - run_and_verify_svn(None, - ["property 'propname' set on '" + dir + "'\n"], + run_and_verify_svn(["property 'propname' set on '" + dir + "'\n"], [], 'ps', 'propname', 'propval', dir) - run_and_verify_svn(None, - ["property 'propname' set on '" + file + "'\n"], + run_and_verify_svn(["property 'propname' set on '" + file + "'\n"], [], 'ps', 'propname', 'propval', file) # Update WC to HEAD, tree conflicts result dir and file @@ -805,12 +802,10 @@ def force_del_tc_inside(sbox): }) run_and_verify_update(wc_dir, expected_output, expected_disk, expected_status, - None, None, None, None, None, 1, - wc_dir) + check_props=True) # Delete A/C with --force, in effect disarming the tree-conflicts. - run_and_verify_svn(None, - verify.UnorderedOutput(['D ' + C + '\n', + run_and_verify_svn(verify.UnorderedOutput(['D ' + C + '\n', 'D ' + dir + '\n', 'D ' + file + '\n']), [], 'delete', C, '--force') @@ -828,8 +823,7 @@ def force_del_tc_inside(sbox): expected_status.remove('A/C') run_and_verify_commit(wc_dir, - expected_output, expected_status, None, - wc_dir) + expected_output, expected_status) #---------------------------------------------------------------------- @@ -865,11 +859,9 @@ def force_del_tc_is_target(sbox): main.run_svn(None, 'update', '-r2', wc_dir) # Set a meaningless prop on each dir and file - run_and_verify_svn(None, - ["property 'propname' set on '" + dir + "'\n"], + run_and_verify_svn(["property 'propname' set on '" + dir + "'\n"], [], 'ps', 'propname', 'propval', dir) - run_and_verify_svn(None, - ["property 'propname' set on '" + file + "'\n"], + run_and_verify_svn(["property 'propname' set on '" + file + "'\n"], [], 'ps', 'propname', 'propval', file) # Update WC to HEAD, tree conflicts result dir and file @@ -893,12 +885,10 @@ def force_del_tc_is_target(sbox): }) run_and_verify_update(wc_dir, expected_output, expected_disk, expected_status, - None, None, None, None, None, 1, - wc_dir) + check_props=True) # Delete nodes with --force, in effect disarming the tree-conflicts. - run_and_verify_svn(None, - ['D ' + dir + '\n', + run_and_verify_svn(['D ' + dir + '\n', 'D ' + file + '\n'], [], 'delete', dir, file, '--force') @@ -911,8 +901,7 @@ def force_del_tc_is_target(sbox): expected_output = wc.State(wc_dir, {}) run_and_verify_commit(wc_dir, - expected_output, expected_status, None, - wc_dir) + expected_output, expected_status) #---------------------------------------------------------------------- @@ -945,8 +934,7 @@ def query_absent_tree_conflicted_dir(sbox): main.run_svn(None, 'update', '-r2', wc_dir) # Set a meaningless prop on A/C/C - run_and_verify_svn(None, - ["property 'propname' set on '" + C_C_path + "'\n"], + run_and_verify_svn(["property 'propname' set on '" + C_C_path + "'\n"], [], 'ps', 'propname', 'propval', C_C_path) # Update WC to HEAD, a tree conflict results on A/C/C because of the @@ -964,12 +952,10 @@ def query_absent_tree_conflicted_dir(sbox): treeconflict='C')}) run_and_verify_update(wc_dir, expected_output, expected_disk, expected_status, - None, None, None, None, None, 1, - wc_dir) + check_props=True) # Delete A/C with --keep-local. - run_and_verify_svn(None, - verify.UnorderedOutput(['D ' + C_C_path + '\n', + run_and_verify_svn(verify.UnorderedOutput(['D ' + C_C_path + '\n', 'D ' + C_path + '\n']), [], 'delete', C_path, '--keep-local') @@ -996,7 +982,7 @@ def query_absent_tree_conflicted_dir(sbox): run_and_verify_status(C_C_path, expected_output) # using info: - run_and_verify_svn(None, None, ".*W155010.*The node.*was not found.*", + run_and_verify_svn(None, ".*W155010.*The node.*was not found.*", 'info', C_C_path) #---------------------------------------------------------------------- @@ -1008,7 +994,7 @@ def up_add_onto_add_revert(sbox): sbox.build() wc_dir = sbox.wc_dir wc2_dir = sbox.add_wc_path('wc2') - svntest.actions.run_and_verify_svn(None, None, [], 'checkout', + svntest.actions.run_and_verify_svn(None, [], 'checkout', sbox.repo_url, wc2_dir) file1 = os.path.join(wc_dir, 'newfile') @@ -1023,7 +1009,7 @@ def up_add_onto_add_revert(sbox): main.run_svn(None, 'cp', os.path.join(wc_dir, 'A/C'), dir1) main.run_svn(None, 'cp', os.path.join(wc2_dir, 'A/C'), dir2) - main.run_svn(None, 'ci', wc_dir, '-m', 'Added file') + sbox.simple_commit(message='Added file') expected_disk = main.greek_state.copy() expected_disk.add({ @@ -1039,8 +1025,7 @@ def up_add_onto_add_revert(sbox): run_and_verify_update(wc2_dir, None, expected_disk, expected_status, - None, None, None, None, None, 1, - wc2_dir) + check_props=True) # Currently (r927086), this removes dir2 and file2 in a way that # they don't reappear after update. @@ -1057,8 +1042,7 @@ def up_add_onto_add_revert(sbox): # the repository run_and_verify_update(wc2_dir, None, expected_disk, expected_status, - None, None, None, None, None, 1, - wc2_dir) + check_props=True) #---------------------------------------------------------------------- @@ -1080,12 +1064,12 @@ def lock_update_only(sbox): file_path_b = os.path.join(wc_b, fname) # Lock a file as wc_author, and schedule the file for deletion. - svntest.actions.run_and_verify_svn(None, ".*locked by user", [], 'lock', + svntest.actions.run_and_verify_svn(".*locked by user", [], 'lock', '-m', '', file_path) svntest.main.run_svn(None, 'delete', file_path) # In our other working copy, steal that lock. - svntest.actions.run_and_verify_svn(None, ".*locked by user", [], 'lock', + svntest.actions.run_and_verify_svn(".*locked by user", [], 'lock', '-m', '', '--force', file_path) # Now update the first working copy. It should appear as a no-op. @@ -1095,8 +1079,7 @@ def lock_update_only(sbox): expected_status.tweak('iota', status='D ', writelocked='K') run_and_verify_update(wc_dir, None, expected_disk, expected_status, - None, None, None, None, None, 1, - wc_dir) + check_props=True) #---------------------------------------------------------------------- @@ -1171,92 +1154,95 @@ def actual_only_node_behaviour(sbox): # add expected_stdout = None expected_stderr = ".*foo.*is an existing item in conflict.*" - run_and_verify_svn(None, expected_stdout, expected_stderr, + run_and_verify_svn(expected_stdout, expected_stderr, "add", foo_path) # add (with an existing obstruction of foo) svntest.main.file_write(foo_path, "This is an obstruction of foo.\n") expected_stdout = None expected_stderr = ".*foo.*is an existing item in conflict.*" - run_and_verify_svn(None, expected_stdout, expected_stderr, + run_and_verify_svn(expected_stdout, expected_stderr, "add", foo_path) os.remove(foo_path) # remove obstruction # blame (praise, annotate, ann) expected_stdout = None expected_stderr = ".*foo.*not found.*" - run_and_verify_svn(None, expected_stdout, expected_stderr, + run_and_verify_svn(expected_stdout, expected_stderr, "blame", foo_path) # cat expected_stdout = None expected_stderr = ".*foo.*not under version control.*" - run_and_verify_svn(None, expected_stdout, expected_stderr, + run_and_verify_svn(expected_stdout, expected_stderr, "cat", foo_path) # cat -rBASE expected_stdout = None expected_stderr = ".*foo.*not under version control.*" - run_and_verify_svn(None, expected_stdout, expected_stderr, + run_and_verify_svn(expected_stdout, expected_stderr, "cat", "-r", "BASE", foo_path) # changelist (cl) expected_stdout = None expected_stderr = ".*svn: warning: W155010: The node '.*foo' was not found." - run_and_verify_svn(None, expected_stdout, expected_stderr, + run_and_verify_svn(expected_stdout, expected_stderr, "changelist", "my_changelist", foo_path) # checkout (co) ### this does not error out -- needs review expected_stdout = None expected_stderr = [] - run_and_verify_svn(None, expected_stdout, expected_stderr, + run_and_verify_svn(expected_stdout, expected_stderr, "checkout", A_copy_url, foo_path) ### for now, ignore the fact that checkout succeeds and remove the nested ### working copy so we can test more commands - shutil.rmtree(foo_path) + def onerror(function, path, execinfo): + os.chmod(path, stat.S_IREAD | stat.S_IWRITE) + os.remove(path) + shutil.rmtree(foo_path, onerror=onerror) # cleanup expected_stdout = None expected_stderr = ".*foo.*is not a working copy directory" - run_and_verify_svn(None, expected_stdout, expected_stderr, + run_and_verify_svn(expected_stdout, expected_stderr, "cleanup", foo_path) # commit (ci) expected_stdout = None expected_stderr = ".*foo.*remains in conflict.*" - run_and_verify_svn(None, expected_stdout, expected_stderr, + run_and_verify_svn(expected_stdout, expected_stderr, "commit", foo_path) # copy (cp) expected_stdout = None expected_stderr = ".*foo.*does not exist.*" - run_and_verify_svn(None, expected_stdout, expected_stderr, + run_and_verify_svn(expected_stdout, expected_stderr, "copy", foo_path, foo_path + ".copy") # delete (del, remove, rm) expected_stdout = None expected_stderr = ".*foo.*is not under version control.*" - run_and_verify_svn(None, expected_stdout, expected_stderr, + run_and_verify_svn(expected_stdout, expected_stderr, "delete", foo_path) # diff (di) expected_stdout = None - expected_stderr = ".*foo.*is not under version control.*" - run_and_verify_svn(None, expected_stdout, expected_stderr, + expected_stderr = ".*E155.*foo.*was not found.*" + run_and_verify_svn(expected_stdout, expected_stderr, "diff", foo_path) # export expected_stdout = None expected_stderr = ".*foo.*was not found.*" - run_and_verify_svn(None, expected_stdout, expected_stderr, + run_and_verify_svn(expected_stdout, expected_stderr, "export", foo_path, sbox.get_tempname()) # import expected_stdout = None expected_stderr = ".*(foo.*does not exist|Can't stat.*foo).*" - run_and_verify_svn(None, expected_stdout, expected_stderr, + run_and_verify_svn(expected_stdout, expected_stderr, "import", '-m', svntest.main.make_log_msg(), foo_path, sbox.repo_url + '/foo_imported') # info expected_info = { - 'Tree conflict': 'local file missing, incoming file edit upon merge.*', + 'Tree conflict': 'local missing or deleted or moved away, incoming file edit upon merge.*', 'Name': 'foo', 'Schedule': 'normal', 'Node Kind': 'none', @@ -1267,42 +1253,42 @@ def actual_only_node_behaviour(sbox): # list (ls) expected_stdout = None expected_stderr = ".*foo.*was not found.*" - run_and_verify_svn(None, expected_stdout, expected_stderr, + run_and_verify_svn(expected_stdout, expected_stderr, "list", foo_path) # lock expected_stdout = None expected_stderr = ".*foo.*was not found.*" - run_and_verify_svn(None, expected_stdout, expected_stderr, + run_and_verify_svn(expected_stdout, expected_stderr, "lock", foo_path) # log expected_stdout = None expected_stderr = ".*foo.*was not found.*" - run_and_verify_svn(None, expected_stdout, expected_stderr, + run_and_verify_svn(expected_stdout, expected_stderr, "log", foo_path) # merge # note: this is intentionally a no-op merge that does not record mergeinfo expected_stdout = None expected_stderr = ".*foo.*was not found.*" - run_and_verify_svn(None, expected_stdout, expected_stderr, + run_and_verify_svn(expected_stdout, expected_stderr, "merge", '--ignore-ancestry', '-c', '4', A_copy_url + '/mu', foo_path) # mergeinfo expected_stdout = None expected_stderr = ".*foo.*was not found.*" - run_and_verify_svn(None, expected_stdout, expected_stderr, + run_and_verify_svn(expected_stdout, expected_stderr, "mergeinfo", A_copy_url + '/foo', foo_path) # mkdir expected_stdout = None expected_stderr = ".*foo.*is an existing item in conflict.*" - run_and_verify_svn(None, expected_stdout, expected_stderr, + run_and_verify_svn(expected_stdout, expected_stderr, "mkdir", foo_path) # move (mv, rename, ren) expected_stdout = None expected_stderr = ".*foo.*does not exist.*" - run_and_verify_svn(None, expected_stdout, expected_stderr, + run_and_verify_svn(expected_stdout, expected_stderr, "move", foo_path, foo_path + ".moved") # patch expected_stdout = None @@ -1319,43 +1305,43 @@ def actual_only_node_behaviour(sbox): for line in patch_data: f.write(line) f.close() - run_and_verify_svn(None, expected_stdout, expected_stderr, + run_and_verify_svn(expected_stdout, expected_stderr, "patch", patch_path, sbox.ospath("A/foo")) # propdel (pdel, pd) expected_stdout = None expected_stderr = ".*foo.*was not found.*" - run_and_verify_svn(None, expected_stdout, expected_stderr, + run_and_verify_svn(expected_stdout, expected_stderr, "propdel", "svn:eol-style", foo_path) # propget (pget, pg) expected_stdout = None expected_stderr = ".*foo.*is not under version control.*" - run_and_verify_svn(None, expected_stdout, expected_stderr, + run_and_verify_svn(expected_stdout, expected_stderr, "propget", "svn:eol-style", foo_path) # proplist (plist, pl) expected_stdout = None expected_stderr = ".*foo.*is not under version control.*" - svntest.actions.run_and_verify_svn(None, expected_stdout, expected_stderr, + svntest.actions.run_and_verify_svn(expected_stdout, expected_stderr, "proplist", foo_path) # propset (pset, ps) expected_stdout = None expected_stderr = ".*foo.*was not found.*" - run_and_verify_svn(None, expected_stdout, expected_stderr, + run_and_verify_svn(expected_stdout, expected_stderr, "propset", "svn:eol-style", "native", foo_path) # relocate expected_stdout = None expected_stderr = ".*foo.*was not found.*" - run_and_verify_svn(None, expected_stdout, expected_stderr, + run_and_verify_svn(expected_stdout, expected_stderr, "relocate", A_copy_url + "/foo", foo_path) # resolve expected_stdout = "Resolved conflicted state of.*foo.*" expected_stderr = [] - run_and_verify_svn(None, expected_stdout, expected_stderr, + run_and_verify_svn(expected_stdout, expected_stderr, "resolve", "--accept", "working", foo_path) # revert the entire working copy and repeat the merge so we can test @@ -1367,7 +1353,7 @@ def actual_only_node_behaviour(sbox): # revert expected_stdout = "Reverted.*foo.*" expected_stderr = [] - run_and_verify_svn(None, expected_stdout, expected_stderr, + run_and_verify_svn(expected_stdout, expected_stderr, "revert", foo_path) # revert the entire working copy and repeat the merge so we can test @@ -1379,7 +1365,7 @@ def actual_only_node_behaviour(sbox): # revert expected_stdout = "Reverted.*foo.*" expected_stderr = [] - run_and_verify_svn(None, expected_stdout, expected_stderr, + run_and_verify_svn(expected_stdout, expected_stderr, "revert", "-R", foo_path) # revert the entire working copy and repeat the merge so we can test @@ -1397,13 +1383,13 @@ def actual_only_node_behaviour(sbox): # switch (sw) expected_stdout = None expected_stderr = ".*foo.*was not found.*" - run_and_verify_svn(None, expected_stdout, expected_stderr, + run_and_verify_svn(expected_stdout, expected_stderr, "switch", A_copy_url + "/foo", foo_path) # unlock expected_stdout = None expected_stderr = ".*foo.*was not found.*" - run_and_verify_svn(None, expected_stdout, expected_stderr, + run_and_verify_svn(expected_stdout, expected_stderr, "unlock", foo_path) # update (up) @@ -1413,13 +1399,13 @@ def actual_only_node_behaviour(sbox): expected_stdout = [ "Updating '" + foo_path + "':\n", "At revision 4.\n"] expected_stderr = [] - run_and_verify_svn(None, expected_stdout, expected_stderr, + run_and_verify_svn(expected_stdout, expected_stderr, "update", foo_path) # upgrade expected_stdout = None expected_stderr = ".*Can't upgrade.*foo.*" - run_and_verify_svn(None, expected_stdout, expected_stderr, + run_and_verify_svn(expected_stdout, expected_stderr, "upgrade", foo_path) #---------------------------------------------------------------------- @@ -1443,18 +1429,80 @@ def update_dir_with_not_present(sbox): sbox.simple_rm('A/B') # We can't commit this without updating (ra_svn produces its own error) - run_and_verify_svn(None, None, + run_and_verify_svn(None, "svn: (E155011|E160028|E170004): (Dir|Item).*B.*out of date", 'ci', '-m', '', wc_dir) # So we run update - run_and_verify_svn(None, None, [], + run_and_verify_svn(None, [], 'up', wc_dir) # And now we can commit - run_and_verify_svn(None, None, [], + run_and_verify_svn(None, [], 'ci', '-m', '', wc_dir) +def update_delete_mixed_rev(sbox): + "update that deletes mixed-rev" + + sbox.build() + wc_dir = sbox.wc_dir + sbox.simple_move('A/B/E/alpha', 'A/B/E/alpha2') + sbox.simple_commit() + sbox.simple_update() + sbox.simple_rm('A/B') + sbox.simple_commit() + sbox.simple_update(revision=1) + sbox.simple_update(target='A/B/E', revision=2) + sbox.simple_mkdir('A/B/E2') + + # Update raises a tree conflict on A/B due to local mod A/B/E2 + expected_output = wc.State(wc_dir, { + 'A/B' : Item(status=' ', treeconflict='C'), + }) + expected_disk = main.greek_state.copy() + expected_disk.add({ + 'A/B/E2' : Item(), + 'A/B/E/alpha2' : Item(contents='This is the file \'alpha\'.\n'), + }) + expected_disk.remove('A/B/E/alpha') + expected_status = get_virginal_state(wc_dir, 3) + expected_status.remove('A/B/E/alpha') + expected_status.add({ + 'A/B/E2' : Item(status='A ', wc_rev='-'), + 'A/B/E/alpha2' : Item(status=' ', copied='+', wc_rev='-'), + }) + expected_status.tweak('A/B', + status='A ', copied='+', treeconflict='C', wc_rev='-') + expected_status.tweak('A/B/F', 'A/B/E', 'A/B/E/beta', 'A/B/lambda', + copied='+', wc_rev='-') + + # The entries world doesn't see a changed revision as another add + # while the WC-NG world does... + expected_status.tweak('A/B/E', status='A ', entry_status=' ') + run_and_verify_update(wc_dir, + expected_output, expected_disk, expected_status, + check_props=True) + + # Resolving to working state should give a mixed-revision copy that + # gets committed as multiple copies + run_and_verify_resolved([sbox.ospath('A/B')], sbox.ospath('A/B')) + expected_output = wc.State(wc_dir, { + 'A/B' : Item(verb='Adding'), + 'A/B/E' : Item(verb='Replacing'), + 'A/B/E2' : Item(verb='Adding'), + }) + expected_status.tweak('A/B', 'A/B/E', 'A/B/E2', 'A/B/F', 'A/B/E/alpha2', + 'A/B/E/beta', 'A/B/lambda', + status=' ', wc_rev=4, copied=None, treeconflict=None) + run_and_verify_commit(wc_dir, + expected_output, expected_status) + + expected_info = { + 'Name': 'alpha2', + 'Node Kind': 'file', + } + run_and_verify_info([expected_info], sbox.repo_url + '/A/B/E/alpha2') + ####################################################################### # Run the tests @@ -1485,6 +1533,7 @@ test_list = [ None, at_directory_external, actual_only_node_behaviour, update_dir_with_not_present, + update_delete_mixed_rev, ] if __name__ == '__main__': diff --git a/subversion/tests/cmdline/update_tests.py b/subversion/tests/cmdline/update_tests.py index 8a6c31c..4bcab46 100755 --- a/subversion/tests/cmdline/update_tests.py +++ b/subversion/tests/cmdline/update_tests.py @@ -34,8 +34,8 @@ logger = logging.getLogger() # Our testing module import svntest from svntest import wc, actions, verify, deeptrees -from merge_tests import expected_merge_output -from merge_tests import set_up_branch +from svntest.mergetrees import expected_merge_output +from svntest.mergetrees import set_up_branch # (abbreviation) Skip = svntest.testcase.Skip_deco @@ -55,50 +55,6 @@ from svntest.main import SVN_PROP_MERGEINFO, server_has_mergeinfo # Each test must return on success or raise on failure. -#---------------------------------------------------------------------- - -# Helper for update_binary_file() test -- a custom singleton handler. -def detect_extra_files(node, extra_files): - """NODE has been discovered as an extra file on disk. Verify that - it matches one of the regular expressions in the EXTRA_FILES list of - lists, and that its contents matches the second part of the list - item. If it matches, remove the match from the list. If it doesn't - match, raise an exception.""" - - # Baton is of the form: - # - # [ [wc_dir, pattern, contents], - # [wc_dir, pattern, contents], ... ] - - for fdata in extra_files: - wc_dir = fdata[0] - pattern = fdata[1] - contents = None - if len(fdata) > 2: - contents = fdata[2] - match_obj = re.match(pattern, node.name) - if match_obj: - if contents is None: - return - else: - # Strip the root_node_name from node path - # (svntest.tree.root_node_name, currently `__SVN_ROOT_NODE'), - # since it doesn't really exist. Also strip the trailing "slash". - real_path = node.path - if real_path.startswith(svntest.tree.root_node_name): - real_path = real_path[len(svntest.tree.root_node_name) + - len(os.sep) :] - real_path = os.path.join(wc_dir, real_path) - - real_contents = open(real_path).read() - if real_contents == contents: - extra_files.pop(extra_files.index(fdata)) # delete pattern from list - return - - logger.warn("Found unexpected object: %s", node.name) - raise svntest.tree.SVNTreeUnequal - - def update_binary_file(sbox): "update a locally-modified binary file" @@ -127,7 +83,7 @@ def update_binary_file(sbox): # Commit the new binary file, creating revision 2. svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) # Make a backup copy of the working copy. wc_backup = sbox.add_wc_path('backup') @@ -151,7 +107,7 @@ def update_binary_file(sbox): # Commit original working copy again, creating revision 3. svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) # Now start working in the backup working copy: @@ -178,13 +134,7 @@ def update_binary_file(sbox): 'A/theta' : Item(status='C ', wc_rev=3), }) - # Extra 'singleton' files we expect to exist after the update. - # In the case, the locally-modified binary file should be backed up - # to an .orig file. - # This is a list of lists, of the form [ WC_DIR, - # [pattern, contents], ...] - extra_files = [[wc_backup, 'theta.*\.r2', theta_contents], - [wc_backup, 'theta.*\.r3', theta_contents_r3]] + extra_files = ['theta.r2', 'theta.r3'] # Do the update and check the results in three ways. Pass our # custom singleton handler to verify the .orig file; this handler @@ -194,15 +144,8 @@ def update_binary_file(sbox): expected_output, expected_disk, expected_status, - None, - detect_extra_files, extra_files, - None, None, 1) - - # verify that the extra_files list is now empty. - if len(extra_files) != 0: - logger.warn("Not all extra reject files have been accounted for:") - logger.warn(extra_files) - raise svntest.Failure + [], True, + extra_files=extra_files) #---------------------------------------------------------------------- @@ -249,7 +192,7 @@ def update_binary_file_2(sbox): # Commit the new binary filea, creating revision 2. svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) # Make some mods to the binary files. svntest.main.file_append(theta_path, "foobar") @@ -272,7 +215,7 @@ def update_binary_file_2(sbox): # Commit original working copy again, creating revision 3. svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) # Create expected output tree for an update to rev 2. expected_output = svntest.wc.State(wc_dir, { @@ -303,8 +246,7 @@ def update_binary_file_2(sbox): expected_output, expected_disk, expected_status, - None, None, None, - None, None, 1, + [], True, '-r', '2', wc_dir) @@ -340,7 +282,7 @@ def update_binary_file_3(sbox): # Commit the new binary file, creating revision 2. svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) # Make some mods to the binary files. svntest.main.file_append(theta_path, "foobar") @@ -359,7 +301,7 @@ def update_binary_file_3(sbox): # Commit modified working copy, creating revision 3. svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) # Now we locally modify the file back to the old version. svntest.main.file_write(theta_path, theta_contents, 'wb') @@ -389,8 +331,7 @@ def update_binary_file_3(sbox): expected_output, expected_disk, expected_status, - None, None, None, - None, None, 1, + [], True, '-r', '2', wc_dir) #---------------------------------------------------------------------- @@ -443,7 +384,7 @@ def update_missing(sbox): expected_output, expected_disk, expected_status, - None, None, None, None, None, 0, + [], False, mu_path, rho_path, E_path, H_path) @@ -531,8 +472,7 @@ def update_to_rev_zero(sbox): svntest.actions.run_and_verify_update(wc_dir, expected_output, expected_disk, - None, None, - None, None, None, None, 0, + None, [], False, '-r', '0', wc_dir) #---------------------------------------------------------------------- @@ -581,7 +521,7 @@ def receive_overlapping_same_change(sbox): # Commit the change, creating revision 2. svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) # Expected output tree for update of other_wc. expected_output = svntest.wc.State(other_wc, { @@ -644,7 +584,7 @@ def update_to_resolve_text_conflicts(sbox): # Commit. svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) # Create expected output tree for an update of the wc_backup. expected_output = svntest.wc.State(wc_backup, { @@ -658,6 +598,7 @@ def update_to_resolve_text_conflicts(sbox): contents="\n".join(["This is the file 'mu'.", "<<<<<<< .mine", "Conflicting appended text for mu", + "||||||| .r1", "=======", "Original appended text for mu", ">>>>>>> .r2", @@ -666,6 +607,7 @@ def update_to_resolve_text_conflicts(sbox): contents="\n".join(["This is the file 'rho'.", "<<<<<<< .mine", "Conflicting appended text for rho", + "||||||| .r1", "=======", "Original appended text for rho", ">>>>>>> .r2", @@ -687,15 +629,7 @@ def update_to_resolve_text_conflicts(sbox): expected_output, expected_disk, expected_status, - None, - svntest.tree.detect_conflict_files, - extra_files) - - - # verify that the extra_files list is now empty. - if len(extra_files) != 0: - logger.warn("didn't get expected extra files") - raise svntest.Failure + extra_files=extra_files) # remove the conflicting files to clear text conflict but not props conflict os.remove(mu_path_backup) @@ -726,24 +660,24 @@ def update_delete_modified_files(sbox): # Delete a file alpha_path = sbox.ospath('A/B/E/alpha') - svntest.actions.run_and_verify_svn("Deleting alpha failed", None, [], + svntest.actions.run_and_verify_svn(None, [], 'rm', alpha_path) # Delete a directory containing files G_path = sbox.ospath('A/D/G') - svntest.actions.run_and_verify_svn("Deleting G failed", None, [], + svntest.actions.run_and_verify_svn(None, [], 'rm', G_path) # Commit - svntest.actions.run_and_verify_svn("Committing deletes failed", None, [], + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', 'log msg', wc_dir) ### Update before backdating to avoid obstructed update error for G - svntest.actions.run_and_verify_svn("Updating after commit failed", None, [], + svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir) # Backdate to restore deleted items - svntest.actions.run_and_verify_svn("Backdating failed", None, [], + svntest.actions.run_and_verify_svn(None, [], 'up', '-r', '1', wc_dir) # Modify the file to be deleted, and a file in the directory to be deleted @@ -807,7 +741,7 @@ def update_after_add_rm_deleted(sbox): # Delete a file and directory from WC alpha_path = sbox.ospath('A/B/E/alpha') F_path = sbox.ospath('A/B/F') - svntest.actions.run_and_verify_svn(None, None, [], 'rm', alpha_path, F_path) + svntest.actions.run_and_verify_svn(None, [], 'rm', alpha_path, F_path) # Commit deletion expected_output = svntest.wc.State(wc_dir, { @@ -820,13 +754,13 @@ def update_after_add_rm_deleted(sbox): expected_status.remove('A/B/F') svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) # alpha and F are now in state "deleted", next we add a new ones svntest.main.file_append(alpha_path, "new alpha") - svntest.actions.run_and_verify_svn(None, None, [], 'add', alpha_path) + svntest.actions.run_and_verify_svn(None, [], 'add', alpha_path) - svntest.actions.run_and_verify_svn(None, None, [], 'mkdir', F_path) + svntest.actions.run_and_verify_svn(None, [], 'mkdir', F_path) # New alpha and F should be in add state A expected_status.add({ @@ -838,7 +772,7 @@ def update_after_add_rm_deleted(sbox): # Forced removal of new alpha and F must restore "deleted" state - svntest.actions.run_and_verify_svn(None, None, [], 'rm', '--force', + svntest.actions.run_and_verify_svn(None, [], 'rm', '--force', alpha_path, F_path) if os.path.exists(alpha_path) or os.path.exists(F_path): raise svntest.Failure @@ -850,7 +784,7 @@ def update_after_add_rm_deleted(sbox): # Although parent dir is already at rev 1, the "deleted" state will cause # alpha and F to be restored in the WC when updated to rev 1 - svntest.actions.run_and_verify_svn(None, None, [], 'up', '-r', '1', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'up', '-r', '1', wc_dir) expected_status.add({ 'A/B/E/alpha' : Item(status=' ', wc_rev=1), @@ -874,7 +808,7 @@ def obstructed_update_alters_wc_props(sbox): # Create a new dir in the repo in prep for creating an obstruction. #print "Adding dir to repo" - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'mkdir', '-m', 'prep for obstruction', sbox.repo_url + '/A/foo') @@ -905,7 +839,7 @@ def obstructed_update_alters_wc_props(sbox): }) actions.run_and_verify_update(wc_dir, expected_output, expected_disk, - expected_status, None, None, None, None, None, False, wc_dir) + expected_status) # Remove the file which caused the obstruction. @@ -949,7 +883,7 @@ def update_replace_dir(sbox): # Delete a directory F_path = sbox.ospath('A/B/F') - svntest.actions.run_and_verify_svn(None, None, [], 'rm', F_path) + svntest.actions.run_and_verify_svn(None, [], 'rm', F_path) # Commit deletion expected_output = svntest.wc.State(wc_dir, { @@ -960,10 +894,10 @@ def update_replace_dir(sbox): expected_status.remove('A/B/F') svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) # Add replacement directory - svntest.actions.run_and_verify_svn(None, None, [], 'mkdir', F_path) + svntest.actions.run_and_verify_svn(None, [], 'mkdir', F_path) # Commit addition expected_output = svntest.wc.State(wc_dir, { @@ -974,7 +908,7 @@ def update_replace_dir(sbox): expected_status.tweak('A/B/F', wc_rev=3) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) # Update to HEAD expected_output = svntest.wc.State(wc_dir, { @@ -998,7 +932,7 @@ def update_replace_dir(sbox): expected_output, expected_disk, expected_status, - None, None, None, None, None, 0, + [], False, '-r', '1', wc_dir) expected_status = svntest.actions.get_virginal_state(wc_dir, 1) @@ -1028,14 +962,14 @@ def update_single_file(sbox): expected_status.tweak('A/mu', wc_rev=2) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) # At one stage 'svn up file' failed with a parent lock error was_cwd = os.getcwd() os.chdir(sbox.ospath('A')) ### Can't get run_and_verify_update to work having done the chdir. - svntest.actions.run_and_verify_svn("update failed", None, [], + svntest.actions.run_and_verify_svn(None, [], 'up', '-r', '1', 'mu') os.chdir(was_cwd) @@ -1071,7 +1005,7 @@ def prop_update_on_scheduled_delete(sbox): # Commit the change, creating revision 2. svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) svntest.main.run_svn(None, 'rm', other_iota_path) @@ -1109,10 +1043,10 @@ def update_receive_illegal_name(sbox): + '/A/D/G/' + svntest.main.get_admin_name()) # Ha! The client doesn't allow us to mkdir a '.svn' but it does # allow us to copy to a '.svn' so ... - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'mkdir', '-m', 'log msg', legal_url) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'mv', '-m', 'log msg', legal_url, illegal_url) @@ -1184,8 +1118,8 @@ def update_deleted_missing_dir(sbox): expected_output, expected_disk, expected_status, - None, None, None, None, None, - 0, "-r", "2", E_path, H_path) + [], False, + "-r", "2", E_path, H_path) # Update back to the old revision again svntest.main.run_svn(None, @@ -1205,8 +1139,8 @@ def update_deleted_missing_dir(sbox): expected_output, expected_disk, expected_status, - None, None, None, None, None, - 0, "-r", "2", wc_dir) + [], False, + "-r", "2", wc_dir) #---------------------------------------------------------------------- @@ -1231,12 +1165,11 @@ def another_hudson_problem(sbox): svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, wc_dir) + expected_status) # Delete directory G from the repository - svntest.actions.run_and_verify_svn(None, - ['\n', 'Committed revision 3.\n'], [], + svntest.actions.run_and_verify_svn(['Committing transaction...\n', + 'Committed revision 3.\n'], [], 'rm', '-m', 'log msg', sbox.repo_url + '/A/D/G') @@ -1259,8 +1192,7 @@ def another_hudson_problem(sbox): # Sigh, I can't get run_and_verify_update to work (but not because # of issue 919 as far as I can tell) expected_output = svntest.verify.UnorderedOutput(expected_output) - svntest.actions.run_and_verify_svn(None, - expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'up', G_path) # Both G and gamma should be 'deleted', update should produce no output @@ -1299,13 +1231,12 @@ def update_deleted_targets(sbox): svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, wc_dir) + expected_status) # Explicit update must not remove the 'deleted=true' entries - svntest.actions.run_and_verify_svn(None, exp_noop_up_out(2), [], + svntest.actions.run_and_verify_svn(exp_noop_up_out(2), [], 'update', gamma_path) - svntest.actions.run_and_verify_svn(None, exp_noop_up_out(2), [], + svntest.actions.run_and_verify_svn(exp_noop_up_out(2), [], 'update', F_path) # Update to r1 to restore items, since the parent directory is already @@ -1323,7 +1254,7 @@ def update_deleted_targets(sbox): expected_output, expected_disk, expected_status, - None, None, None, None, None, 0, + [], False, '-r', '1', wc_dir) @@ -1337,8 +1268,8 @@ def new_dir_with_spaces(sbox): wc_dir = sbox.wc_dir # Create a new directory ("spacey dir") directly in repository - svntest.actions.run_and_verify_svn(None, - ['\n', 'Committed revision 2.\n'], [], + svntest.actions.run_and_verify_svn(['Committing transaction...\n', + 'Committed revision 2.\n'], [], 'mkdir', '-m', 'log msg', sbox.repo_url + '/A/spacey%20dir') @@ -1387,8 +1318,7 @@ def non_recursive_update(sbox): expected_status.tweak('A/mu', 'A/D/G/rho', wc_rev=2) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, wc_dir) + expected_status) # Update back to revision 1 expected_output = svntest.wc.State(wc_dir, { @@ -1402,7 +1332,7 @@ def non_recursive_update(sbox): svntest.actions.run_and_verify_update(wc_dir, expected_output, expected_disk, expected_status, - None, None, None, None, None, 0, + [], False, '-r', '1', wc_dir) # Non-recursive update of A should change A/mu but not A/D/G/rho @@ -1418,7 +1348,7 @@ def non_recursive_update(sbox): svntest.actions.run_and_verify_update(wc_dir, expected_output, expected_disk, expected_status, - None, None, None, None, None, 0, + [], False, '-N', A_path) #---------------------------------------------------------------------- @@ -1433,9 +1363,9 @@ def checkout_empty_dir(sbox): C_url = sbox.repo_url + '/A/C' svntest.main.safe_rmtree(wc_dir) - svntest.actions.run_and_verify_svn(None, None, [], 'checkout', C_url, wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'checkout', C_url, wc_dir) - svntest.actions.run_and_verify_svn(None, [], [], 'status', wc_dir) + svntest.actions.run_and_verify_svn([], [], 'status', wc_dir) #---------------------------------------------------------------------- @@ -1463,8 +1393,8 @@ def update_to_deletion(sbox): svntest.actions.run_and_verify_update(wc_dir, expected_output, expected_disk, - None, None, - None, None, None, None, 0, + None, + [], False, '-r', '0', iota_path) # Update the wc root, so iota comes back. @@ -1477,9 +1407,7 @@ def update_to_deletion(sbox): svntest.actions.run_and_verify_update(wc_dir, expected_output, expected_disk, - None, None, - None, None, None, None, 0, - wc_dir) + None) #---------------------------------------------------------------------- @@ -1494,17 +1422,17 @@ def update_deletion_inside_out(sbox): child_path = os.path.join(parent_path, 'E') # Could be a file, doesn't matter # Delete the parent directory. - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'rm', parent_path) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', '', wc_dir) # Update back to r1. - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'update', '-r', '1', wc_dir) # Update just the child to r2. - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'update', '-r', '2', child_path) # Now try a normal update. @@ -1535,7 +1463,7 @@ def update_schedule_add_dir(sbox): # Delete directory A/D/G in the repository via immediate commit G_path = sbox.ospath('A/D/G') G_url = sbox.repo_url + '/A/D/G' - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'rm', G_url, '-m', 'rev 2') # Update the wc to HEAD (r2) @@ -1557,7 +1485,7 @@ def update_schedule_add_dir(sbox): # Do a URL->wc copy, creating a new schedule-add A/D/G. # (Standard procedure when trying to resurrect the directory.) D_path = sbox.ospath('A/D') - svntest.actions.run_and_verify_svn("Copy error:", None, [], + svntest.actions.run_and_verify_svn(None, [], 'cp', G_url + '@1', D_path) # status should now show the dir scheduled for addition-with-history @@ -1571,7 +1499,7 @@ def update_schedule_add_dir(sbox): svntest.actions.run_and_verify_status(wc_dir, expected_status) # Now update with the schedule-add dir as the target. - svntest.actions.run_and_verify_svn(None, None, [], 'up', G_path) + svntest.actions.run_and_verify_svn(None, [], 'up', G_path) # The update should be a no-op, and the schedule-add directory # should still exist! 'svn status' shouldn't change at all. @@ -1602,8 +1530,8 @@ def update_to_future_add(sbox): svntest.actions.run_and_verify_update(wc_dir, expected_output, expected_disk, - None, None, - None, None, None, None, 0, + None, + [], False, '-r', '0', wc_dir) # Update iota to the current HEAD. @@ -1620,8 +1548,8 @@ def update_to_future_add(sbox): svntest.actions.run_and_verify_update(wc_dir, expected_output, expected_disk, - None, None, - None, None, None, None, 0, + None, + [], False, iota_path) # Now try updating the directory into the future @@ -1654,8 +1582,8 @@ def update_to_future_add(sbox): svntest.actions.run_and_verify_update(wc_dir, expected_output, expected_disk, - None, None, - None, None, None, None, 0, + None, + [], False, A_path) #---------------------------------------------------------------------- @@ -1688,7 +1616,7 @@ def update_xml_unsafe_dir(sbox): # Commit. svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) # chdir into the funky path, and update from there. os.chdir(test_path) @@ -1731,7 +1659,7 @@ def conflict_markers_matching_eol(sbox): # Checkout a second working copy wc_backup = sbox.add_wc_path('backup') - svntest.actions.run_and_verify_svn(None, None, [], 'checkout', + svntest.actions.run_and_verify_svn(None, [], 'checkout', sbox.repo_url, wc_backup) # set starting revision @@ -1766,7 +1694,7 @@ def conflict_markers_matching_eol(sbox): # Commit the original change and note the 'base' revision number svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) cur_rev = cur_rev + 1 base_rev = cur_rev @@ -1798,6 +1726,7 @@ def conflict_markers_matching_eol(sbox): 'A/mu' : Item(contents= "This is the file 'mu'." + eolchar + "<<<<<<< .mine" + eolchar + "Conflicting appended text for mu" + eolchar + + "||||||| .r" + str(cur_rev - 1) + eolchar + "=======" + eolchar + "Original appended text for mu" + eolchar + ">>>>>>> .r" + str(cur_rev) + eolchar), @@ -1831,10 +1760,7 @@ def conflict_markers_matching_eol(sbox): svntest.actions.run_and_verify_update(wc_backup, expected_backup_output, expected_backup_disk, - expected_backup_status, - None, - None, - None) + expected_backup_status) # cleanup for next run svntest.main.run_svn(None, 'revert', '-R', wc_backup) @@ -1864,7 +1790,7 @@ def update_eolstyle_handling(sbox): # Checkout a second working copy wc_backup = sbox.add_wc_path('backup') - svntest.actions.run_and_verify_svn(None, None, [], 'checkout', + svntest.actions.run_and_verify_svn(None, [], 'checkout', sbox.repo_url, wc_backup) path_backup = os.path.join(wc_backup, 'A', 'mu') @@ -1891,8 +1817,7 @@ def update_eolstyle_handling(sbox): svntest.actions.run_and_verify_update(wc_backup, expected_backup_output, expected_backup_disk, - expected_backup_status, - None, None, None) + expected_backup_status) # Test 2: now change the eol-style property to another value and commit, # update the still changed mu in the second working copy; there should be @@ -1917,8 +1842,7 @@ def update_eolstyle_handling(sbox): svntest.actions.run_and_verify_update(wc_backup, expected_backup_output, expected_backup_disk, - expected_backup_status, - None, None, None) + expected_backup_status) # Test 3: now delete the eol-style property and commit, update the still # changed mu in the second working copy; there should be no conflict! @@ -1942,8 +1866,7 @@ def update_eolstyle_handling(sbox): svntest.actions.run_and_verify_update(wc_backup, expected_backup_output, expected_backup_disk, - expected_backup_status, - None, None, None) + expected_backup_status) # Bug in which "update" put a bogus revision number on a schedule-add file, # causing the wrong version of it to be committed. @@ -1961,32 +1884,32 @@ def update_copy_of_old_rev(sbox): url2 = sbox.repo_url + '/A2/mu' # Remember the original text of the file - exit_code, text_r1, err = svntest.actions.run_and_verify_svn(None, None, [], + exit_code, text_r1, err = svntest.actions.run_and_verify_svn(None, [], 'cat', '-r1', url) # Commit a different version of the file svntest.main.file_write(file, "Second revision of 'mu'\n") - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', '', wc_dir) # Copy an old revision of its directory into a new path in the WC - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'cp', '-r1', dir, dir2) # Update. (Should do nothing, but added a bogus "revision" in "entries".) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir) # Commit, and check that it says it's committing the right thing exp_out = ['Adding ' + dir2 + '\n', - '\n', + 'Committing transaction...\n', 'Committed revision 3.\n'] - svntest.actions.run_and_verify_svn(None, exp_out, [], + svntest.actions.run_and_verify_svn(exp_out, [], 'ci', '-m', '', wc_dir) # Verify the committed file's content - svntest.actions.run_and_verify_svn(None, text_r1, [], + svntest.actions.run_and_verify_svn(text_r1, [], 'cat', url2) #---------------------------------------------------------------------- @@ -2049,7 +1972,7 @@ def forced_update(sbox): # Commit. svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) # Make a local mod to mu that will merge cleanly. backup_mu_path = os.path.join(wc_backup, 'A', 'mu') @@ -2125,7 +2048,7 @@ def forced_update(sbox): expected_output, expected_disk, expected_status, - None, None, None, None, None, 0, + [], False, wc_backup, '--force') #---------------------------------------------------------------------- @@ -2164,7 +2087,7 @@ def forced_update_failures(sbox): # Commit. svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) # Create an unversioned dir A/B/F/nu that will obstruct the file of the # same name coming from the repository. Create an unversioned file A/C/I @@ -2200,8 +2123,9 @@ def forced_update_failures(sbox): expected_status.tweak('A/B/F', wc_rev='2') actions.run_and_verify_update(wc_backup, expected_output, - expected_disk, expected_status, None, None, None, None, None, False, - '--force', backup_A_B_F) + expected_disk, expected_status, + [], False, + '--force', backup_A_B_F) # A forced update that tries to add a directory when an unversioned file @@ -2243,8 +2167,9 @@ def forced_update_failures(sbox): expected_status.tweak('A/C', 'A/B/F', wc_rev='2') actions.run_and_verify_update(wc_dir_backup, expected_output, - expected_disk, expected_status, None, None, None, None, None, False, - '--force', backup_A_C) + expected_disk, expected_status, + [], False, + '--force', backup_A_C) # rm -rf wc_dir_backup/A/C/I wc_dir_backup/A/B/F/nu os.remove(backup_A_C_I) @@ -2263,8 +2188,7 @@ def forced_update_failures(sbox): expected_status.tweak('A/C/I', 'A/B/F/nu', treeconflict=None) actions.run_and_verify_update(wc_dir_backup, expected_output, - expected_disk, expected_status, None, None, None, None, None, False, - wc_dir_backup) + expected_disk, expected_status) # svn up -r1 wc_dir_backup/A/C expected_output = svntest.wc.State(wc_dir_backup, { @@ -2277,16 +2201,17 @@ def forced_update_failures(sbox): expected_status.tweak('A/C', wc_rev='1') actions.run_and_verify_update(wc_dir_backup, expected_output, - expected_disk, expected_status, None, None, None, None, None, False, - '-r1', backup_A_C) + expected_disk, expected_status, + [], False, + '-r1', backup_A_C) # svn co url/A/C/I wc_dir_backup/A/C/I expected_output = svntest.wc.State(wc_dir_backup, {}) expected_disk = svntest.wc.State(wc_dir, {}) - actions.run_and_verify_checkout2(False, url_A_C_I, backup_A_C_I, - expected_output, expected_disk, None, None, None, None) + actions.run_and_verify_checkout(url_A_C_I, backup_A_C_I, + expected_output, expected_disk) # svn up --force wc_dir_backup/A/C expected_output = svntest.wc.State(wc_dir_backup, { @@ -2294,7 +2219,8 @@ def forced_update_failures(sbox): }) actions.run_and_verify_update(wc_dir_backup, expected_output, None, None, - None, None, None, None, None, False, '--force', backup_A_C) + [], False, + '--force', backup_A_C) #---------------------------------------------------------------------- @@ -2346,7 +2272,7 @@ def update_wc_on_windows_drive(sbox): was_cwd = os.getcwd() try: - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'checkout', sbox.repo_url, wc_dir) @@ -2370,7 +2296,7 @@ def update_wc_on_windows_drive(sbox): }) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, + expected_status, [], wc_dir, zeta_path) # Non recursive commit @@ -2392,7 +2318,7 @@ def update_wc_on_windows_drive(sbox): }) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, + expected_status, [], '-N', wc_dir, dir1_path, file1_path) @@ -2414,7 +2340,7 @@ def update_wc_on_windows_drive(sbox): expected_output, expected_disk, expected_status, - None, None, None, None, None, 0, + [], False, '-r', '1', wc_dir) os.chdir(was_cwd) @@ -2443,11 +2369,14 @@ def update_wc_on_windows_drive(sbox): expected_disk.tweak('A/mu', contents = expected_disk.desc['A/mu'].contents + '\nAppended text for mu') - # Use .old_tree() for status to avoid the entries validation + # Create expected status with 'H:iota' style paths + expected_status_relative = svntest.wc.State('', {}) + expected_status_relative.add_state(wc_dir, expected_status, strict=True) + svntest.actions.run_and_verify_update(wc_dir, expected_output, expected_disk, - expected_status.old_tree()) + expected_status_relative) finally: os.chdir(was_cwd) @@ -2498,15 +2427,10 @@ def update_wc_with_replaced_file(sbox): expected_disk = svntest.main.greek_state.copy() expected_disk.tweak('iota', contents="") - conflict_files = [] - svntest.actions.run_and_verify_update(wc_dir, expected_output, expected_disk, - expected_status, - None, - svntest.tree.detect_conflict_files, - conflict_files) + expected_status) # Make us a working copy with a 'replace-with-history' file. svntest.main.run_svn(None, 'revert', iota_path) @@ -2523,8 +2447,7 @@ def update_wc_with_replaced_file(sbox): expected_output, expected_disk, expected_status, - None, - None, None, None, None, 0, + [], False, wc_dir, '-r1') svntest.main.run_svn(None, 'rm', iota_path) @@ -2549,15 +2472,10 @@ def update_wc_with_replaced_file(sbox): expected_disk = svntest.main.greek_state.copy() expected_disk.tweak('iota', contents="This is the file 'mu'.\n") - conflict_files = [ ] - svntest.actions.run_and_verify_update(wc_dir, expected_output, expected_disk, - expected_status, - None, - svntest.tree.detect_conflict_files, - conflict_files) + expected_status) #---------------------------------------------------------------------- def update_with_obstructing_additions(sbox): @@ -2670,7 +2588,7 @@ def update_with_obstructing_additions(sbox): # Commit. svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) # Create various paths scheduled for addition which will obstruct # the adds coming from the repos. @@ -2737,6 +2655,7 @@ def update_with_obstructing_additions(sbox): 'A/D/H/I/J' : Item(props={'propname1' : 'propval-WC'}), 'A/D/H/I/J/eta' : Item("\n".join(["<<<<<<< .mine", "This is WC file 'eta'", + "||||||| .r0", "=======", "This is REPOS file 'eta'", ">>>>>>> .r2", @@ -2746,6 +2665,7 @@ def update_with_obstructing_additions(sbox): 'A/D/H/I/L' : Item(), 'A/D/kappa' : Item("\n".join(["<<<<<<< .mine", "This is WC file 'kappa'", + "||||||| .r0", "=======", "This is REPOS file 'kappa'", ">>>>>>> .r2", @@ -2753,6 +2673,7 @@ def update_with_obstructing_additions(sbox): props={'propname1' : 'propval-WC'}), 'A/D/epsilon' : Item("\n".join(["<<<<<<< .mine", "This is WC file 'epsilon'", + "||||||| .r0", "=======", "This is REPOS file 'epsilon'", ">>>>>>> .r2", @@ -2791,10 +2712,8 @@ def update_with_obstructing_additions(sbox): expected_output, expected_disk, expected_status, - None, - svntest.tree.detect_conflict_files, - extra_files, None, None, 1, - wc_backup) + [], True, + extra_files=extra_files) # Some obstructions are still not permitted: # @@ -2804,7 +2723,7 @@ def update_with_obstructing_additions(sbox): # URL to URL copy of A/D/G to A/M. G_URL = sbox.repo_url + '/A/D/G' M_URL = sbox.repo_url + '/A/M' - svntest.actions.run_and_verify_svn("Copy error:", None, [], + svntest.actions.run_and_verify_svn(None, [], 'cp', G_URL, M_URL, '-m', '') # WC to WC copy of A/D/H to A/M, M now scheduled for addition with @@ -2813,13 +2732,13 @@ def update_with_obstructing_additions(sbox): A_path = sbox.ospath('A') M_path = sbox.ospath('A/M') - svntest.actions.run_and_verify_svn("Copy error:", None, [], + svntest.actions.run_and_verify_svn(None, [], 'cp', H_path, M_path) # URL to URL copy of A/D/H/omega to omicron. omega_URL = sbox.repo_url + '/A/D/H/omega' omicron_URL = sbox.repo_url + '/omicron' - svntest.actions.run_and_verify_svn("Copy error:", None, [], + svntest.actions.run_and_verify_svn(None, [], 'cp', omega_URL, omicron_URL, '-m', '') @@ -2828,7 +2747,7 @@ def update_with_obstructing_additions(sbox): chi_path = sbox.ospath('A/D/H/chi') omicron_path = sbox.ospath('omicron') - svntest.actions.run_and_verify_svn("Copy error:", None, [], + svntest.actions.run_and_verify_svn(None, [], 'cp', chi_path, omicron_path) @@ -2903,7 +2822,7 @@ def update_with_obstructing_additions(sbox): svntest.actions.run_and_verify_update(wc_dir, expected_output, expected_disk, expected_status, - None, None, None, None, None, False, + [], False, A_path) # Resolve the tree conflict. @@ -2922,7 +2841,7 @@ def update_with_obstructing_additions(sbox): svntest.actions.run_and_verify_update(wc_dir, expected_output, expected_disk, expected_status, - None, None, None, None, None, False, + [], False, wc_dir, '-N') # Resolve the tree conflict. @@ -2935,7 +2854,7 @@ def update_with_obstructing_additions(sbox): # Again, --force shouldn't matter. svntest.actions.run_and_verify_update(wc_dir, expected_output, expected_disk, expected_status, - None, None, None, None, None, False, + [], False, omicron_path, '-N', '--force') # Test for issue #2022: Update shouldn't touch conflicted files. @@ -2971,7 +2890,7 @@ def update_conflicted(sbox): expected_status.tweak('iota', 'A/mu', 'A/B/lambda', 'A/D', wc_rev=2) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) # Do another change to each path that we will need later. # Also, change a file below A/D in the path. @@ -2990,7 +2909,7 @@ def update_conflicted(sbox): 'A/D/G/pi': Item(verb='Sending')}) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) # Go back to revision 1. expected_output = svntest.wc.State(wc_dir, { @@ -3009,9 +2928,7 @@ def update_conflicted(sbox): expected_output, expected_disk, expected_status, - None, - None, None, - None, None, 1, + [], True, '-r1', wc_dir) # Create modifications conflicting with rev 2. @@ -3033,6 +2950,7 @@ def update_conflicted(sbox): contents="\n".join(["This is the file 'iota'.", "<<<<<<< .mine", "Conflicting appended text for iota", + "||||||| .r1", "=======", "Original appended text for iota", ">>>>>>> .r2", @@ -3041,6 +2959,7 @@ def update_conflicted(sbox): contents="\n".join(["This is the file 'mu'.", "<<<<<<< .mine", "Conflicting appended text for mu", + "||||||| .r1", "=======", "Original appended text for mu", ">>>>>>> .r2", @@ -3053,19 +2972,18 @@ def update_conflicted(sbox): expected_status.tweak('A/B/lambda', 'A/D', status=' C') expected_status.tweak('A/mu', status='CC') - extra_files = [ [wc_dir, 'iota.*\.(r1|r2|mine)'], - [wc_dir, 'mu.*\.(r1|r2|mine|prej)'], - [wc_dir, 'lambda.*\.prej'], - [wc_dir, 'dir_conflicts.prej']] + extra_files = [ 'iota.r1', 'iota.r2', 'iota.mine', + 'mu.r1', 'mu.r2', 'mu.mine', 'mu.prej', + 'lambda.prej', + 'dir_conflicts.prej'] svntest.actions.run_and_verify_update(wc_dir, expected_output, expected_disk, expected_status, - None, - detect_extra_files, extra_files, - None, None, 1, - '-r2', wc_dir) + [], True, + '-r2', wc_dir, + extra_files=extra_files+[]) # Now, update to HEAD, which should skip all the conflicted files, but # still update the pi file. @@ -3093,9 +3011,8 @@ def update_conflicted(sbox): expected_output, expected_disk, expected_status, - None, - detect_extra_files, extra_files, - None, None, 1) + [], True, + extra_files=extra_files) #---------------------------------------------------------------------- @SkipUnless(server_has_mergeinfo) @@ -3128,7 +3045,7 @@ def mergeinfo_update_elision(sbox): "Checked out revision 1.\n", "A " + B_COPY_path + "\n", ]) - svntest.actions.run_and_verify_svn(None, expected_stdout, [], 'copy', + svntest.actions.run_and_verify_svn(expected_stdout, [], 'copy', sbox.repo_url + "/A/B", B_COPY_path) expected_output = wc.State(wc_dir, {'A/B_COPY' : Item(verb='Adding')}) @@ -3144,9 +3061,7 @@ def mergeinfo_update_elision(sbox): svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, - None, - wc_dir) + expected_status) # Make some changes under A/B @@ -3158,7 +3073,7 @@ def mergeinfo_update_elision(sbox): expected_status.tweak('A/B/E/beta', wc_rev=3) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) # r4 - modify and commit A/B/lambda svntest.main.file_write(lambda_path, "New content") @@ -3168,7 +3083,7 @@ def mergeinfo_update_elision(sbox): expected_status.tweak('A/B/lambda', wc_rev=4) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) # r5 - modify and commit A/B/E/alpha svntest.main.file_write(alpha_path, "New content") @@ -3178,7 +3093,7 @@ def mergeinfo_update_elision(sbox): expected_status.tweak('A/B/E/alpha', wc_rev=5) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) # Merge r2:5 into A/B_COPY expected_output = wc.State(B_COPY_path, { @@ -3222,8 +3137,7 @@ def mergeinfo_update_elision(sbox): expected_merge_disk, expected_merge_status, expected_skip, - None, None, None, None, - None, 1) + check_props=True) # r6 - Commit the merge expected_output = wc.State(wc_dir, @@ -3238,7 +3152,7 @@ def mergeinfo_update_elision(sbox): expected_status.tweak('A/B_COPY/lambda', wc_rev=6) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) # Update WC back to r5, A/COPY_B is at it's pre-merge state again expected_output = wc.State(wc_dir, @@ -3266,8 +3180,7 @@ def mergeinfo_update_elision(sbox): expected_output, expected_disk, expected_status, - None, None, None, - None, None, 1, + [], True, '-r', '5', wc_dir) # Merge r2:5 to A/B_COPY/E/alpha @@ -3278,8 +3191,7 @@ def mergeinfo_update_elision(sbox): # run_and_verify_merge doesn't support merging to a file WCPATH # so use run_and_verify_svn. - svntest.actions.run_and_verify_svn(None, - expected_merge_output([[3,5]], + svntest.actions.run_and_verify_svn(expected_merge_output([[3,5]], ['U ' + alpha_COPY_path + '\n', ' U ' + alpha_COPY_path + '\n']), [], 'merge', '-r2:5', @@ -3294,7 +3206,7 @@ def mergeinfo_update_elision(sbox): svntest.actions.run_and_verify_status(alpha_COPY_path, expected_alpha_status) - svntest.actions.run_and_verify_svn(None, ["/A/B/E/alpha:3-5\n"], [], + svntest.actions.run_and_verify_svn(["/A/B/E/alpha:3-5\n"], [], 'propget', SVN_PROP_MERGEINFO, alpha_COPY_path) @@ -3321,8 +3233,7 @@ def mergeinfo_update_elision(sbox): expected_output, expected_disk, expected_status, - None, None, None, - None, None, 1) + [], True) # Now test that an updated target's mergeinfo can itself elide. # r7 - modify and commit A/B/E/alpha @@ -3333,10 +3244,10 @@ def mergeinfo_update_elision(sbox): expected_status.tweak('A/B/E/alpha', 'A/B_COPY/E/alpha', status=' ', wc_rev=7) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) # Update A to get all paths to the same working revision. - svntest.actions.run_and_verify_svn(None, exp_noop_up_out(7), [], + svntest.actions.run_and_verify_svn(exp_noop_up_out(7), [], 'up', wc_dir) # Merge r6:7 into A/B_COPY/E @@ -3375,12 +3286,10 @@ def mergeinfo_update_elision(sbox): expected_merge_disk, expected_merge_status, expected_skip, - None, None, None, None, - None, 1) + check_props=True) # r8 - Commit the merge - svntest.actions.run_and_verify_svn(None, - exp_noop_up_out(7), + svntest.actions.run_and_verify_svn(exp_noop_up_out(7), [], 'update', wc_dir) expected_output = wc.State(wc_dir, @@ -3391,7 +3300,7 @@ def mergeinfo_update_elision(sbox): expected_status.tweak('A/B_COPY/E', 'A/B_COPY/E/alpha', wc_rev=8) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) # Update A/COPY_B/E back to r7 expected_output = wc.State(wc_dir, { @@ -3410,8 +3319,7 @@ def mergeinfo_update_elision(sbox): expected_output, expected_disk, expected_status, - None, None, None, - None, None, 1, + [], True, '-r', '7', E_COPY_path) # Merge r6:7 to A/B_COPY @@ -3456,8 +3364,7 @@ def mergeinfo_update_elision(sbox): expected_merge_disk, expected_merge_status, expected_skip, - None, None, None, None, - None, 1,alpha_COPY_path) + [], True, True) # Update just A/B_COPY/E. The mergeinfo (r3-5,7) reset on # A/B_COPY/E by the udpate is identical to the local info on @@ -3483,8 +3390,8 @@ def mergeinfo_update_elision(sbox): expected_output, expected_disk, expected_status, - None, None, None, - None, None, 1, E_COPY_path) + [], True, + E_COPY_path) #---------------------------------------------------------------------- @@ -3518,11 +3425,11 @@ def update_copied_from_replaced_and_changed(sbox): fn3_path = sbox.ospath(fn3_relpath) # Move fn2 to fn1 - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'mv', fn2_path, fn1_path) # Move fn3 to fn2 - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'mv', fn3_path, fn2_path) # Commit that change, creating r2. @@ -3540,7 +3447,7 @@ def update_copied_from_replaced_and_changed(sbox): }) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) # Modify fn2. fn2_final_contents = "I have new contents for the middle file." @@ -3559,7 +3466,7 @@ def update_copied_from_replaced_and_changed(sbox): }) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) # Go back to r1. expected_output = svntest.wc.State(wc_dir, { @@ -3575,8 +3482,8 @@ def update_copied_from_replaced_and_changed(sbox): svntest.actions.run_and_verify_update(wc_dir, expected_output, expected_disk, - None, None, - None, None, None, None, 0, + None, + [], False, '-r', '1', wc_dir) # And back up to 3 again. @@ -3600,9 +3507,7 @@ def update_copied_from_replaced_and_changed(sbox): svntest.actions.run_and_verify_update(wc_dir, expected_output, expected_disk, - expected_status, None, - None, None, None, None, 0, - wc_dir) + expected_status) #---------------------------------------------------------------------- # Regression test: ra_neon assumes that you never delete a property on @@ -3616,7 +3521,7 @@ def update_copied_and_deleted_prop(sbox): iota2_path = sbox.ospath('iota2') # Add a property on iota - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'propset', 'foo', 'bar', iota_path) # Commit that change, creating r2. expected_output = svntest.wc.State(wc_dir, { @@ -3627,12 +3532,12 @@ def update_copied_and_deleted_prop(sbox): expected_status_mixed.tweak('iota', wc_rev=2) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status_mixed, None, wc_dir) + expected_status_mixed) # Copy iota to iota2 and delete the property on it. - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'copy', iota_path, iota2_path) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'propdel', 'foo', iota2_path) # Commit that change, creating r3. @@ -3645,7 +3550,7 @@ def update_copied_and_deleted_prop(sbox): }) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status_mixed, None, wc_dir) + expected_status_mixed) # Update the whole wc, verifying disk as well. expected_output = svntest.wc.State(wc_dir, { }) @@ -3679,8 +3584,7 @@ def update_copied_and_deleted_prop(sbox): expected_output, expected_disk_r2, expected_status_r2, - None, None, None, None, None, - True, + [], True, "-r2", wc_dir) # And finally, back to r3, getting an add-with-history-and-property-deleted @@ -3795,7 +3699,7 @@ def update_accept_conflicts(sbox): # Commit. svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) # Now we'll update each of our 5 files in wc_backup; each one will get # conflicts, and we'll handle each with a different --accept option. @@ -3806,16 +3710,14 @@ def update_accept_conflicts(sbox): # iota: no accept option # Just leave the conflicts alone, since run_and_verify_svn already uses # the --non-interactive option. - svntest.actions.run_and_verify_svn(None, - update_output_with_conflicts( + svntest.actions.run_and_verify_svn(update_output_with_conflicts( 2, iota_path_backup), [], 'update', iota_path_backup) # lambda: --accept=postpone # Just leave the conflicts alone. - svntest.actions.run_and_verify_svn(None, - update_output_with_conflicts( + svntest.actions.run_and_verify_svn(update_output_with_conflicts( 2, lambda_path_backup), [], 'update', '--accept=postpone', @@ -3823,8 +3725,7 @@ def update_accept_conflicts(sbox): # mu: --accept=base # Accept the pre-update base file. - svntest.actions.run_and_verify_svn(None, - update_output_with_conflicts_resolved( + svntest.actions.run_and_verify_svn(update_output_with_conflicts_resolved( 2, mu_path_backup), [], 'update', '--accept=base', @@ -3832,8 +3733,7 @@ def update_accept_conflicts(sbox): # alpha: --accept=mine # Accept the user's working file. - svntest.actions.run_and_verify_svn(None, - update_output_with_conflicts_resolved( + svntest.actions.run_and_verify_svn(update_output_with_conflicts_resolved( 2, alpha_path_backup), [], 'update', '--accept=mine-full', @@ -3841,8 +3741,7 @@ def update_accept_conflicts(sbox): # beta: --accept=theirs # Accept their file. - svntest.actions.run_and_verify_svn(None, - update_output_with_conflicts_resolved( + svntest.actions.run_and_verify_svn(update_output_with_conflicts_resolved( 2, beta_path_backup), [], 'update', '--accept=theirs-full', @@ -3852,8 +3751,7 @@ def update_accept_conflicts(sbox): # Run editor and accept the edited file. The merge tool will leave # conflicts in place, so expect a message on stderr, but expect # svn to exit with an exit code of 0. - svntest.actions.run_and_verify_svn2(None, - update_output_with_conflicts_resolved( + svntest.actions.run_and_verify_svn2(update_output_with_conflicts_resolved( 2, pi_path_backup), "system(.*) returned.*", 0, 'update', '--accept=edit', @@ -3862,8 +3760,7 @@ def update_accept_conflicts(sbox): # rho: --accept=launch # Run the external merge tool, it should leave conflict markers in place. - svntest.actions.run_and_verify_svn(None, - update_output_with_conflicts( + svntest.actions.run_and_verify_svn(update_output_with_conflicts( 2, rho_path_backup), [], 'update', '--accept=launch', @@ -3876,12 +3773,14 @@ def update_accept_conflicts(sbox): expected_disk.tweak('iota', contents=("This is the file 'iota'.\n" '<<<<<<< .mine\n' 'My appended text for iota\n' + '||||||| .r1\n' '=======\n' 'Their appended text for iota\n' '>>>>>>> .r2\n')) expected_disk.tweak('A/B/lambda', contents=("This is the file 'lambda'.\n" '<<<<<<< .mine\n' 'My appended text for lambda\n' + '||||||| .r1\n' '=======\n' 'Their appended text for lambda\n' '>>>>>>> .r2\n')) @@ -3893,6 +3792,7 @@ def update_accept_conflicts(sbox): expected_disk.tweak('A/D/G/pi', contents=("This is the file 'pi'.\n" '<<<<<<< .mine\n' 'My appended text for pi\n' + '||||||| .r1\n' '=======\n' 'Their appended text for pi\n' '>>>>>>> .r2\n' @@ -3900,6 +3800,7 @@ def update_accept_conflicts(sbox): expected_disk.tweak('A/D/G/rho', contents=("This is the file 'rho'.\n" '<<<<<<< .mine\n' 'My appended text for rho\n' + '||||||| .r1\n' '=======\n' 'Their appended text for rho\n' '>>>>>>> .r2\n' @@ -3931,112 +3832,7 @@ def update_accept_conflicts(sbox): expected_output, expected_disk, expected_status, - None, - svntest.tree.detect_conflict_files, - extra_files) - -# Test for a wc corruption race condition (possibly introduced in -# r863416) which is easy to trigger if interactive conflict resolution -# dies in the middle of prompting. Specifically, we run an update -# with interactive-conflicts on but close stdin immediately, so the -# prompt errors out; then the dir_baton pool cleanup handlers in the -# WC update editor flush and run incomplete logs and lead to WC -# corruption, detectable by another update command. - -# FIXME: With issue #4280 fixed and this test using --force-interactive, -# the test driver can no longer redirect terminal input to cause -# an EOF. Consequently, skip this test so that it does not hang -# the test suite. -@Skip() -def eof_in_interactive_conflict_resolver(sbox): - "eof in interactive resolution can't break wc" - - sbox.build() - wc_dir = sbox.wc_dir - - # Set up a custom config directory which *doesn't* turn off - # interactive resolution - config_contents = '''\ -[auth] -password-stores = - -[miscellany] -interactive-conflicts = true -''' - tmp_dir = os.path.abspath(svntest.main.temp_dir) - config_dir = os.path.join(tmp_dir, 'interactive-conflicts-config') - svntest.main.create_config_dir(config_dir, config_contents) - - iota_path = sbox.ospath('iota') - - # Modify iota and commit for r2. - svntest.main.file_append(iota_path, "Appended text in r2.\n") - expected_output = svntest.wc.State(wc_dir, { - 'iota': Item(verb="Sending"), - }) - expected_status = svntest.actions.get_virginal_state(wc_dir, 1) - expected_status.tweak('iota', wc_rev=2) - svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) - - # Go back to revision 1. - expected_output = svntest.wc.State(wc_dir, { - 'iota' : Item(status='U '), - }) - - expected_disk = svntest.main.greek_state.copy() - - expected_status = svntest.actions.get_virginal_state(wc_dir, 1) - - svntest.actions.run_and_verify_update(wc_dir, - expected_output, - expected_disk, - expected_status, - None, - None, None, - None, None, 1, - '-r1', wc_dir) - - # Modify iota differently and try to update *with the interactive - # resolver*. ### The parser won't go so well with the output - svntest.main.file_append(iota_path, "Local mods to r1 text.\n") - svntest.actions.run_and_verify_update( - wc_dir, None, None, None, - "End of file while reading from terminal", - None, None, None, None, 1, - wc_dir, '--force-interactive', '--config-dir', config_dir) - - # Now update -r1 again. Hopefully we don't get a checksum error! - expected_output = svntest.wc.State(wc_dir, { - 'iota': Item(verb="Skipped"), - }) - - # The interactive callback aborts, so the file remains in conflict. - expected_disk.tweak('iota', contents="This is the file 'iota'.\n" - "<<<<<<< .mine\n" - "Local mods to r1 text.\n" - "=======\n" - "Appended text in r2.\n" - ">>>>>>> .r2\n"), - expected_disk.add({ - 'iota.r1' : Item(contents="This is the file 'iota'.\n"), - 'iota.r2' : Item(contents="This is the file 'iota'.\n" - "Appended text in r2.\n"), - 'iota.mine' : Item(contents="This is the file 'iota'.\n" - "Local mods to r1 text.\n"), - }) - - expected_status = svntest.actions.get_virginal_state(wc_dir, 1) - expected_status.tweak('iota', status='C ', wc_rev=2) - - svntest.actions.run_and_verify_update(wc_dir, - expected_output, - expected_disk, - expected_status, - None, - None, None, - None, None, 1, - '-r1', wc_dir) + extra_files=extra_files) #---------------------------------------------------------------------- @@ -4055,11 +3851,11 @@ def update_uuid_changed(sbox): uuid_before = svntest.actions.get_wc_uuid(wc_dir) # Change repository's uuid. - svntest.actions.run_and_verify_svnadmin(None, None, [], + svntest.actions.run_and_verify_svnadmin(None, [], 'setuuid', repo_dir) # 'update' detected the new uuid... - svntest.actions.run_and_verify_svn(None, None, '.*UUID.*', + svntest.actions.run_and_verify_svn(None, '.*UUID.*', 'update', wc_dir) # ...and didn't overwrite the old uuid. @@ -4094,7 +3890,7 @@ def restarted_update_should_delete_dir_prop(sbox): expected_status.tweak('A', wc_rev=2) svntest.actions.run_and_verify_commit(wc_dir, expected_output, - expected_status, None, wc_dir) + expected_status) # Create a second working copy. ### Does this hack still work with wc-ng? @@ -4121,7 +3917,7 @@ def restarted_update_should_delete_dir_prop(sbox): }) svntest.actions.run_and_verify_commit(other_wc, expected_output, - expected_status, None, other_wc) + expected_status) # Back in the first working copy, create an obstructing path and # update. The update will flag a tree conflict. @@ -4146,7 +3942,7 @@ def restarted_update_should_delete_dir_prop(sbox): }) actions.run_and_verify_update(wc_dir, expected_output, expected_disk, - expected_status, None, None, None, None, None, False, wc_dir) + expected_status) # Now, delete the obstructing path and rerun the update. os.unlink(zeta_path) @@ -4189,8 +3985,6 @@ disk_after_leaf_edit = svntest.deeptrees.deep_trees_after_leaf_edit disk_after_leaf_del = svntest.deeptrees.deep_trees_after_leaf_del disk_after_tree_del = svntest.deeptrees.deep_trees_after_tree_del -disk_empty_dirs = svntest.deeptrees.deep_trees_empty_dirs - deep_trees_conflict_output = svntest.deeptrees.deep_trees_conflict_output deep_trees_conflict_output_skipped = \ svntest.deeptrees.deep_trees_conflict_output_skipped @@ -4223,11 +4017,14 @@ def tree_conflicts_on_update_1_1(sbox): 'DF/D1/beta' : Item(status=' ', treeconflict='U'), }) - expected_disk = disk_empty_dirs.copy() - expected_disk.remove('D/D1', 'DF/D1', 'DD/D1', 'DD/D1/D2', - 'DDF/D1', 'DDF/D1/D2', - 'DDD/D1', 'DDD/D1/D2', 'DDD/D1/D2/D3') - + expected_disk = svntest.wc.State('', { + 'F' : Item(), + 'D' : Item(), + 'DF' : Item(), + 'DD' : Item(), + 'DDF' : Item(), + 'DDD' : Item(), + }) # The files delta, epsilon, and zeta are incoming additions, but since # they are all within locally deleted trees they should also be schedule # for deletion. @@ -4307,7 +4104,14 @@ def tree_conflicts_on_update_1_2(sbox): 'DDF/D1/D2/gamma' : Item(status=' ', treeconflict='D'), }) - expected_disk = disk_empty_dirs.copy() + expected_disk = svntest.wc.State('', { + 'F' : Item(), + 'D' : Item(), + 'DF' : Item(), + 'DD' : Item(), + 'DDF' : Item(), + 'DDD' : Item(), + }) expected_status = deep_trees_status_local_tree_del.copy() @@ -4330,18 +4134,10 @@ def tree_conflicts_on_update_1_2(sbox): 'DDF/D1/D2/gamma', 'DF/D1/beta') - ### Why does the deep trees state not include files? - expected_disk.remove('D/D1', - 'DD/D1/D2', - 'DDD/D1/D2/D3', - 'DF/D1', 'DD/D1', - 'DDF/D1', 'DDF/D1/D2', - 'DDD/D1', 'DDD/D1/D2') - expected_info = { 'F/alpha' : { 'Tree conflict' : - '^local file delete, incoming file delete upon update' + '^local file delete, incoming file delete or move upon update' + ' Source left: .file.*/F/alpha@2' + ' Source right: .none.*(/F/alpha@3)?$', }, @@ -4359,7 +4155,7 @@ def tree_conflicts_on_update_1_2(sbox): }, 'D/D1' : { 'Tree conflict' : - '^local dir delete, incoming dir delete upon update' + '^local dir delete, incoming dir delete or move upon update' + ' Source left: .dir.*/D/D1@2' + ' Source right: .none.*(/D/D1@3)?$', }, @@ -4424,37 +4220,37 @@ def tree_conflicts_on_update_2_1(sbox): expected_info = { 'F/alpha' : { 'Tree conflict' : - '^local file edit, incoming file delete upon update' + '^local file edit, incoming file delete or move upon update' + ' Source left: .file.*/F/alpha@2' + ' Source right: .none.*(/F/alpha@3)?$', }, 'DF/D1' : { 'Tree conflict' : - '^local dir edit, incoming dir delete upon update' + '^local dir edit, incoming dir delete or move upon update' + ' Source left: .dir.*/DF/D1@2' + ' Source right: .none.*(/DF/D1@3)?$', }, 'DDF/D1' : { 'Tree conflict' : - '^local dir edit, incoming dir delete upon update' + '^local dir edit, incoming dir delete or move upon update' + ' Source left: .dir.*/DDF/D1@2' + ' Source right: .none.*(/DDF/D1@3)?$', }, 'D/D1' : { 'Tree conflict' : - '^local dir edit, incoming dir delete upon update' + '^local dir edit, incoming dir delete or move upon update' + ' Source left: .dir.*/D/D1@2' + ' Source right: .none.*(/D/D1@3)?$', }, 'DD/D1' : { 'Tree conflict' : - '^local dir edit, incoming dir delete upon update' + '^local dir edit, incoming dir delete or move upon update' + ' Source left: .dir.*/DD/D1@2' + ' Source right: .none.*(/DD/D1@3)?$', }, 'DDD/D1' : { 'Tree conflict' : - '^local dir edit, incoming dir delete upon update' + '^local dir edit, incoming dir delete or move upon update' + ' Source left: .dir.*/DDD/D1@2' + ' Source right: .none.*(/DDD/D1@3)?$', }, @@ -4486,7 +4282,14 @@ def tree_conflicts_on_update_2_2(sbox): ### when dirs_same_p() is implemented) expected_output = deep_trees_conflict_output - expected_disk = disk_empty_dirs.copy() + expected_disk = svntest.wc.State('', { + 'DDF/D1/D2' : Item(), + 'F' : Item(), + 'D' : Item(), + 'DF/D1' : Item(), + 'DD/D1' : Item(), + 'DDD/D1/D2' : Item(), + }) expected_status = svntest.deeptrees.deep_trees_virginal_state.copy() expected_status.add({'' : Item()}) @@ -4504,65 +4307,51 @@ def tree_conflicts_on_update_2_2(sbox): # Expect the incoming tree deletes and the local leaf deletes to mean # that all deleted paths are *really* gone, not simply scheduled for # deletion. - expected_status.tweak('F/alpha', - 'D/D1', - 'DD/D1', - 'DF/D1', - 'DDD/D1', - 'DDF/D1', - status='! ', wc_rev=None) - # Remove from expected status and disk everything below the deleted paths. - expected_status.remove('DD/D1/D2', - 'DF/D1/beta', - 'DDD/D1/D2', - 'DDD/D1/D2/D3', - 'DDF/D1/D2', - 'DDF/D1/D2/gamma',) - - expected_disk.remove('D/D1', - 'DD/D1', - 'DD/D1/D2', - 'DF/D1', - 'DDD/D1', - 'DDD/D1/D2', - 'DDD/D1/D2/D3', - 'DDF/D1', - 'DDF/D1/D2',) + expected_status.tweak('DD/D1', 'DF/D1', 'DDF/D1', 'DDD/D1', + status='A ', copied='+', treeconflict='C', + wc_rev='-') + expected_status.tweak('DDF/D1/D2', 'DDD/D1/D2', + copied='+', wc_rev='-') + expected_status.tweak('DD/D1/D2', 'DF/D1/beta', 'DDD/D1/D2/D3', + 'DDF/D1/D2/gamma', + status='D ', copied='+', wc_rev='-') + expected_status.tweak('F/alpha', 'D/D1', + status='! ', treeconflict='C', wc_rev=None) expected_info = { 'F/alpha' : { 'Tree conflict' : - '^local file delete, incoming file delete upon update' + '^local file delete, incoming file delete or move upon update' + ' Source left: .file.*/F/alpha@2' + ' Source right: .none.*(/F/alpha@3)?$', }, 'DF/D1' : { 'Tree conflict' : - '^local dir delete, incoming dir delete upon update' + '^local dir edit, incoming dir delete or move upon update' + ' Source left: .dir.*/DF/D1@2' + ' Source right: .none.*(/DF/D1@3)?$', }, 'DDF/D1' : { 'Tree conflict' : - '^local dir delete, incoming dir delete upon update' + '^local dir edit, incoming dir delete or move upon update' + ' Source left: .dir.*/DDF/D1@2' + ' Source right: .none.*(/DDF/D1@3)?$', }, 'D/D1' : { 'Tree conflict' : - '^local dir delete, incoming dir delete upon update' + '^local dir delete, incoming dir delete or move upon update' + ' Source left: .dir.*/D/D1@2' + ' Source right: .none.*(/D/D1@3)?$', }, 'DD/D1' : { 'Tree conflict' : - '^local dir delete, incoming dir delete upon update' + '^local dir edit, incoming dir delete or move upon update' + ' Source left: .dir.*/DD/D1@2' + ' Source right: .none.*(/DD/D1@3)?$', }, 'DDD/D1' : { 'Tree conflict' : - '^local dir delete, incoming dir delete upon update' + '^local dir edit, incoming dir delete or move upon update' + ' Source left: .dir.*/DDD/D1@2' + ' Source right: .none.*(/DDD/D1@3)?$', }, @@ -4659,8 +4448,14 @@ def tree_conflicts_on_update_3(sbox): expected_output = deep_trees_conflict_output - expected_disk = disk_empty_dirs.copy() - + expected_disk = svntest.wc.State('', { + 'F' : Item(), + 'D' : Item(), + 'DF' : Item(), + 'DD' : Item(), + 'DDF' : Item(), + 'DDD' : Item(), + }) expected_status = deep_trees_status_local_tree_del.copy() # Expect the incoming tree deletes and the local tree deletes to mean @@ -4681,50 +4476,40 @@ def tree_conflicts_on_update_3(sbox): 'DDF/D1/D2', 'DDF/D1/D2/gamma',) - expected_disk.remove('D/D1', - 'DD/D1', - 'DD/D1/D2', - 'DF/D1', - 'DDD/D1', - 'DDD/D1/D2', - 'DDD/D1/D2/D3', - 'DDF/D1', - 'DDF/D1/D2',) - expected_info = { 'F/alpha' : { 'Tree conflict' : - '^local file delete, incoming file delete upon update' + '^local file delete, incoming file delete or move upon update' + ' Source left: .file.*/F/alpha@2' + ' Source right: .none.*(/F/alpha@3)?$', }, 'DF/D1' : { 'Tree conflict' : - '^local dir delete, incoming dir delete upon update' + '^local dir delete, incoming dir delete or move upon update' + ' Source left: .dir.*/DF/D1@2' + ' Source right: .none.*(/DF/D1@3)?$', }, 'DDF/D1' : { 'Tree conflict' : - '^local dir delete, incoming dir delete upon update' + '^local dir delete, incoming dir delete or move upon update' + ' Source left: .dir.*/DDF/D1@2' + ' Source right: .none.*(/DDF/D1@3)?$', }, 'D/D1' : { 'Tree conflict' : - '^local dir delete, incoming dir delete upon update' + '^local dir delete, incoming dir delete or move upon update' + ' Source left: .dir.*/D/D1@2' + ' Source right: .none.*(/D/D1@3)?$', }, 'DD/D1' : { 'Tree conflict' : - '^local dir delete, incoming dir delete upon update' + '^local dir delete, incoming dir delete or move upon update' + ' Source left: .dir.*/DD/D1@2' + ' Source right: .none.*(/DD/D1@3)?$', }, 'DDD/D1' : { 'Tree conflict' : - '^local dir delete, incoming dir delete upon update' + '^local dir delete, incoming dir delete or move upon update' + ' Source left: .dir.*/DDD/D1@2' + ' Source right: .none.*(/DDD/D1@3)?$', }, @@ -4773,38 +4558,38 @@ def tree_conflict_uc1_update_deleted_tree(sbox): def modify_dir(dir): """Make some set of local modifications to an existing tree: A prop change, add a child, delete a child, change a child.""" - run_and_verify_svn(None, AnyOutput, [], 'propset', 'p', 'v', dir) + run_and_verify_svn(AnyOutput, [], 'propset', 'p', 'v', dir) path = os.path.join(dir, 'new_file') svntest.main.file_write(path, "This is the file 'new_file'.\n") - svntest.actions.run_and_verify_svn(None, None, [], 'add', path) + svntest.actions.run_and_verify_svn(None, [], 'add', path) path = os.path.join(dir, 'C', 'N') os.mkdir(path) path2 = os.path.join(dir, 'C', 'N', 'nu') svntest.main.file_write(path2, "This is the file 'nu'.\n") - svntest.actions.run_and_verify_svn(None, None, [], 'add', path) + svntest.actions.run_and_verify_svn(None, [], 'add', path) path = os.path.join(dir, 'B', 'lambda') - svntest.actions.run_and_verify_svn(None, None, [], 'delete', path) + svntest.actions.run_and_verify_svn(None, [], 'delete', path) path = os.path.join(dir, 'B', 'E', 'alpha') svntest.main.file_append(path, "An extra line.\n") # Prep for both scenarios modify_dir(A) - run_and_verify_svn(None, AnyOutput, [], 'ci', A, '-m', 'modify_dir') - run_and_verify_svn(None, AnyOutput, [], 'up', wc_dir) + run_and_verify_svn(AnyOutput, [], 'ci', A, '-m', 'modify_dir') + run_and_verify_svn(AnyOutput, [], 'up', wc_dir) # Existing scenario wc2 = sbox.add_wc_path('wc2') A2 = os.path.join(wc2, 'A') svntest.actions.duplicate_dir(sbox.wc_dir, wc2) - run_and_verify_svn(None, AnyOutput, [], 'delete', A2) + run_and_verify_svn(AnyOutput, [], 'delete', A2) # New scenario (starts at the revision before the committed mods) - run_and_verify_svn(None, AnyOutput, [], 'up', A, '-r1') - run_and_verify_svn(None, AnyOutput, [], 'delete', A) + run_and_verify_svn(AnyOutput, [], 'up', A, '-r1') + run_and_verify_svn(AnyOutput, [], 'delete', A) expected_output = None expected_disk = None @@ -4861,7 +4646,7 @@ def tree_conflict_uc1_update_deleted_tree(sbox): }) run_and_verify_commit(wc_dir, expected_output, expected_status, - None, wc_dir, '-m', 'commit resolved tree') + [], wc_dir, '-m', 'commit resolved tree') # Issue #3334: a delete-onto-modified tree conflict should leave the node @@ -4905,21 +4690,21 @@ def tree_conflict_uc2_schedule_re_add(sbox): def modify_dir(dir): """Make some set of local modifications to an existing tree: A prop change, add a child, delete a child, change a child.""" - run_and_verify_svn(None, AnyOutput, [], + run_and_verify_svn(AnyOutput, [], 'propset', 'p', 'v', dir) path = os.path.join(dir, 'new_file') svntest.main.file_write(path, "This is the file 'new_file'.\n") - svntest.actions.run_and_verify_svn(None, None, [], 'add', path) + svntest.actions.run_and_verify_svn(None, [], 'add', path) path = os.path.join(dir, 'B', 'lambda') - svntest.actions.run_and_verify_svn(None, None, [], 'delete', path) + svntest.actions.run_and_verify_svn(None, [], 'delete', path) path = os.path.join(dir, 'B', 'E', 'alpha') svntest.main.file_append(path, "An extra line.\n") # Prepare the repos so that a later 'update' has an incoming deletion: # Delete the dir in the repos, making r2 - run_and_verify_svn(None, AnyOutput, [], + run_and_verify_svn(AnyOutput, [], '-m', '', 'delete', dir_url) # Existing scenario @@ -4927,8 +4712,8 @@ def tree_conflict_uc2_schedule_re_add(sbox): wc2 = sbox.add_wc_path('wc2') dir2 = os.path.join(wc2, dir) svntest.actions.duplicate_dir(sbox.wc_dir, wc2) - run_and_verify_svn(None, AnyOutput, [], 'up', wc2) - run_and_verify_svn(None, AnyOutput, [], 'copy', dir_url + '@1', dir2) + run_and_verify_svn(AnyOutput, [], 'up', wc2) + run_and_verify_svn(AnyOutput, [], 'copy', dir_url + '@1', dir2) modify_dir(dir2) # New scenario @@ -5030,8 +4815,7 @@ def set_deep_depth_on_target_with_shallow_children(sbox): expected_output, expected_disk, expected_status, - None, None, None, - None, None, 1, + [], True, '--set-depth', 'empty', B_path) @@ -5062,8 +4846,7 @@ def set_deep_depth_on_target_with_shallow_children(sbox): expected_output, expected_disk, expected_status, - None, None, None, - None, None, 1, + [], True, '--set-depth', 'immediates', D_path) @@ -5091,8 +4874,7 @@ def set_deep_depth_on_target_with_shallow_children(sbox): expected_output, expected_disk, expected_status, - None, None, None, - None, None, 1, + [], True, '--set-depth', 'infinity', A_path) @@ -5106,13 +4888,13 @@ def update_wc_of_dir_to_rev_not_containing_this_dir(sbox): # Create working copy of 'A' directory A_url = sbox.repo_url + "/A" other_wc_dir = sbox.add_wc_path("other") - svntest.actions.run_and_verify_svn(None, None, [], "co", A_url, other_wc_dir) + svntest.actions.run_and_verify_svn(None, [], "co", A_url, other_wc_dir) # Delete 'A' directory from repository - svntest.actions.run_and_verify_svn(None, None, [], "rm", A_url, "-m", "") + svntest.actions.run_and_verify_svn(None, [], "rm", A_url, "-m", "") # Try to update working copy of 'A' directory - svntest.actions.run_and_verify_svn(None, None, + svntest.actions.run_and_verify_svn(None, "svn: E160005: Target path '/A' does not exist", "up", other_wc_dir) @@ -5136,8 +4918,7 @@ def update_empty_hides_entries(sbox): None, expected_disk_empty, expected_status_empty, - None, None, None, - None, None, 1, + [], True, '-r', '0', wc_dir) @@ -5146,8 +4927,7 @@ def update_empty_hides_entries(sbox): None, expected_disk, expected_status, - None, None, None, - None, None, 1, + [], True, wc_dir) # Update to revision 0 - Removes all files from WC @@ -5155,8 +4935,7 @@ def update_empty_hides_entries(sbox): None, expected_disk_empty, expected_status_empty, - None, None, None, - None, None, 1, + [], True, '-r', '0', wc_dir) @@ -5165,8 +4944,7 @@ def update_empty_hides_entries(sbox): None, expected_disk_empty, expected_status_empty, - None, None, None, - None, None, 1, + [], True, '--depth', 'empty', wc_dir) @@ -5179,13 +4957,12 @@ def update_empty_hides_entries(sbox): None, expected_disk, expected_status, - None, None, None, - None, None, 1, - wc_dir) + check_props=True) #---------------------------------------------------------------------- # Test for issue #3573 'local non-inheritable mergeinfo changes not # properly merged with updated mergeinfo' +@SkipUnless(server_has_mergeinfo) def mergeinfo_updates_merge_with_local_mods(sbox): "local mergeinfo changes are merged with updates" @@ -5203,29 +4980,29 @@ def mergeinfo_updates_merge_with_local_mods(sbox): ### No, we are not checking the merge output for these simple ### merges. This is already covered *TO DEATH* in merge_tests.py. ### - svntest.actions.run_and_verify_svn(None, None, [], 'up', wc_dir) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'merge', '-c3', '--depth', 'empty', sbox.repo_url + '/A', A_COPY_path) - svntest.actions.run_and_verify_svn(None, None, [], 'ci', '-m', + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', 'Merge r3 from A to A_COPY at depth empty', wc_dir) # Merge -c5 from A to A_COPY (at default --depth infinity), commit as r8. - svntest.actions.run_and_verify_svn(None, None, [], 'up', wc_dir) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'merge', '-c5', sbox.repo_url + '/A', A_COPY_path) - svntest.actions.run_and_verify_svn(None, None, [], 'ci', '-m', + svntest.actions.run_and_verify_svn(None, [], 'ci', '-m', 'Merge r5 from A to A_COPY', wc_dir) # Update WC to r7, repeat merge of -c3 from A to A_COPY but this # time do it at --depth infinity. Confirm that the mergeinfo # on A_COPY is no longer inheritable. - svntest.actions.run_and_verify_svn(None, None, [], 'up', '-r7', wc_dir) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'up', '-r7', wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'merge', '-c3', '--depth', 'infinity', sbox.repo_url + '/A', A_COPY_path) - svntest.actions.run_and_verify_svn(None, [A_COPY_path + " - /A:3\n"], [], + svntest.actions.run_and_verify_svn([A_COPY_path + " - /A:3\n"], [], 'pg', SVN_PROP_MERGEINFO, '-R', A_COPY_path) @@ -5234,8 +5011,8 @@ def mergeinfo_updates_merge_with_local_mods(sbox): # brought down by the update (/A:3* --> /A:3*,5) leaving us with /A:3,5. ### This was failing because of issue #3573. The local mergeinfo change ### is reverted, leaving '/A:3*,5' on A_COPY. - svntest.actions.run_and_verify_svn(None, None, [], 'up', wc_dir) - svntest.actions.run_and_verify_svn(None, [A_COPY_path + " - /A:3,5\n"], [], + svntest.actions.run_and_verify_svn(None, [], 'up', wc_dir) + svntest.actions.run_and_verify_svn([A_COPY_path + " - /A:3,5\n"], [], 'pg', SVN_PROP_MERGEINFO, '-R', A_COPY_path) @@ -5260,7 +5037,7 @@ def update_with_excluded_subdir(sbox): expected_status.remove('A/D/G', 'A/D/G/pi', 'A/D/G/rho', 'A/D/G/tau') svntest.actions.run_and_verify_update(wc_dir, expected_output, expected_disk, expected_status, - None, None, None, None, None, False, + [], False, '--set-depth=exclude', G) # Commit a new revision so there is something to update to. @@ -5320,7 +5097,7 @@ def update_nonexistent_child_of_copy(sbox): 'nonexistent' : Item(verb='Skipped'), }) svntest.actions.run_and_verify_update(os.path.join('A2', 'nonexistent'), - expected_output, None, None, None) + expected_output, None, None) # Try updating a deleted path in the copied dir. svntest.main.run_svn(None, 'delete', os.path.join('A2', 'mu')) @@ -5329,7 +5106,7 @@ def update_nonexistent_child_of_copy(sbox): 'mu' : Item(verb='Skipped'), }) svntest.actions.run_and_verify_update(os.path.join('A2', 'mu'), - expected_output, None, None, None) + expected_output, None, None) if os.path.exists('A2/mu'): raise svntest.Failure("A2/mu improperly revived") @@ -5396,9 +5173,8 @@ def skip_access_denied(sbox): expected_output, None, expected_status, - None, - None, None, - None, None, None, wc_dir, '-r', '1') + [], False, + wc_dir, '-r', '1') f.close() @@ -5413,19 +5189,19 @@ def update_to_HEAD_plus_1(sbox): # revision".) svntest.actions.run_and_verify_update(wc_dir, None, None, None, - "E160006.*No such.*revision", - None, None, - None, None, None, wc_dir, '-r', '2') + ".*E160006.*No such.*revision.*", + False, + wc_dir, '-r', '2') other_wc = sbox.add_wc_path('other') other_url = sbox.repo_url + '/A' - svntest.actions.run_and_verify_svn("subtree checkout", None, [], + svntest.actions.run_and_verify_svn(None, [], 'co', other_url, other_wc) svntest.actions.run_and_verify_update(other_wc, None, None, None, - "E160006.*No such.*revision", - None, None, - None, None, None, other_wc, '-r', '2') + ".*E160006.*No such.*revision.*", + False, + other_wc, '-r', '2') def update_moved_dir_leaf_del(sbox): "update locally moved dir with leaf del" @@ -5465,12 +5241,11 @@ def update_moved_dir_leaf_del(sbox): expected_output, expected_disk, expected_status, - None, None, None, - None, None, 1) + check_props=True) # Now resolve the conflict, using --accept=mine-conflict applying # the update to A/B/E2 - svntest.actions.run_and_verify_svn("resolve failed", None, [], + svntest.actions.run_and_verify_svn(None, [], 'resolve', '--accept=mine-conflict', sbox.ospath('A/B/E')) @@ -5520,13 +5295,12 @@ def update_moved_dir_edited_leaf_del(sbox): expected_output, expected_disk, expected_status, - None, None, None, - None, None, 1) + check_props=True) # Now resolve the conflict, using --accept=mine-conflict. # This should apply the update to A/B/E2, and flag a tree # conflict on A/B/E2/alpha (incoming delete vs. local edit) - svntest.actions.run_and_verify_svn("resolve failed", None, [], + svntest.actions.run_and_verify_svn(None, [], 'resolve', '--accept=mine-conflict', sbox.ospath('A/B/E')) @@ -5579,12 +5353,11 @@ def update_moved_dir_file_add(sbox): expected_output, expected_disk, expected_status, - None, None, None, - None, None, 1) + check_props=True) # Now resolve the conflict, using --accept=mine-conflict. # This should apply the update to A/B/E2, adding A/B/E2/foo. - svntest.actions.run_and_verify_svn("resolve failed", None, [], + svntest.actions.run_and_verify_svn(None, [], 'resolve', '--accept=mine-conflict', sbox.ospath('A/B/E')) @@ -5640,9 +5413,8 @@ def update_moved_dir_dir_add(sbox): expected_output, expected_disk, expected_status, - None, None, None, - None, None, 1) - svntest.actions.run_and_verify_svn("resolve failed", None, [], + check_props=True) + svntest.actions.run_and_verify_svn(None, [], 'resolve', '--recursive', '--accept=mine-conflict', wc_dir) @@ -5697,13 +5469,12 @@ def update_moved_dir_file_move(sbox): expected_output, expected_disk, expected_status, - None, None, None, - None, None, 1) + check_props=True) # The incoming change is a delete as we don't yet track server-side # moves. Resolving the tree-conflict as "mine-conflict" applies the # delete to the move destination. - svntest.actions.run_and_verify_svn("resolve failed", None, [], + svntest.actions.run_and_verify_svn(None, [], 'resolve', '--accept=mine-conflict', sbox.ospath('A/B/E')) @@ -5764,10 +5535,9 @@ def update_move_text_mod(sbox): expected_output, expected_disk, expected_status, - None, None, None, - None, None, 1) + check_props=True) - svntest.actions.run_and_verify_svn("resolve failed", None, [], + svntest.actions.run_and_verify_svn(None, [], 'resolve', '--recursive', '--accept=mine-conflict', @@ -5830,10 +5600,9 @@ def update_nested_move_text_mod(sbox): expected_output, expected_disk, expected_status, - None, None, None, - None, None, 1) + check_props=True) - svntest.actions.run_and_verify_svn("resolve failed", None, [], + svntest.actions.run_and_verify_svn(None, [], 'resolve', '--recursive', '--accept=mine-conflict', @@ -5866,8 +5635,7 @@ def update_with_parents_and_exclude(sbox): expected_output, None, expected_status, - None, None, None, - None, None, False, + [], False, '--set-depth', 'exclude', sbox.ospath('A')) @@ -5897,8 +5665,7 @@ def update_with_parents_and_exclude(sbox): expected_output, None, expected_status, - None, None, None, - None, None, False, + [], False, '--parents', sbox.ospath('A/B')) @@ -5982,8 +5749,7 @@ def update_edit_delete_obstruction(sbox): expected_output, expected_disk, expected_status, - None, None, None, - None, None, 1, + [], True, '-r', '2', wc_dir) # Cleanup obstructions @@ -5993,7 +5759,7 @@ def update_edit_delete_obstruction(sbox): os.rmdir(sbox.ospath('A/mu')) # Revert to remove working nodes and tree conflicts - svntest.actions.run_and_verify_svn('Reverting', None, [], + svntest.actions.run_and_verify_svn(None, [], 'revert', '-R', sbox.ospath('A/B'), sbox.ospath('A/mu'), @@ -6046,8 +5812,7 @@ def update_edit_delete_obstruction(sbox): expected_output, expected_disk, expected_status, - None, None, None, - None, None, 1, + [], True, '-r', '3', wc_dir) def update_deleted(sbox): @@ -6069,8 +5834,7 @@ def update_deleted(sbox): expected_output, None, None, - None, None, None, - None, None, 1, + [], True, sbox.ospath('A/B')) @Issue(3144,3630) @@ -6115,13 +5879,12 @@ def break_moved_dir_edited_leaf_del(sbox): expected_output, expected_disk, expected_status, - None, None, None, - None, None, 1) + check_props=True) # Now resolve the conflict, using --accept=working # This should break the move of A/B/E to A/B/E2, leaving A/B/E2 # as a copy. The deletion of A/B/E is not reverted. - svntest.actions.run_and_verify_svn("resolve failed", None, [], + svntest.actions.run_and_verify_svn(None, [], 'resolve', '--recursive', '--accept=working', wc_dir) expected_status.tweak('A/B/E', treeconflict=None, moved_to=None) @@ -6178,13 +5941,12 @@ def break_moved_replaced_dir(sbox): expected_output, expected_disk, expected_status, - None, None, None, - None, None, 1) + check_props=True) # Now resolve the conflict, using --accept=working # This should break the move of A/B/E to A/B/E2, leaving A/B/E2 # as a copy. A/B/E is not reverted. - svntest.actions.run_and_verify_svn("resolve failed", None, [], + svntest.actions.run_and_verify_svn(None, [], 'resolve', '--recursive', '--accept=working', wc_dir) expected_status.tweak('A/B/E2', moved_from=None) @@ -6200,17 +5962,17 @@ def update_removes_switched(sbox): wc_dir = sbox.wc_dir repo_url = sbox.repo_url - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'cp', repo_url + '/A', repo_url + '/AA', '-m', 'Q') - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'co', repo_url + '/A', sbox.wc_dir) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'switch', repo_url + '/AA/B', wc_dir + '/B') - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'rm', repo_url + '/AA/B', '-m', 'Q') expected_output = svntest.wc.State(wc_dir, { @@ -6358,8 +6120,7 @@ def incomplete_overcomplete(sbox): expected_output, r5_disk, expected_status, - None, None, None, None, None, - True) + check_props=True) # And now we mark the directory incomplete, as if the update had failed # half-way through an update to r3 @@ -6389,8 +6150,7 @@ def incomplete_overcomplete(sbox): expected_output, r3_disk, r3_status, - None, None, None, None, None, - True, + [], True, wc_dir, '-r', 3) @Issue(4300) @@ -6456,8 +6216,7 @@ def update_swapped_depth_dirs(sbox): expected_output, expected_disk, expected_status, - None, None, None, - None, None, 1) + check_props=True) def move_update_props(sbox): "move-update with property mods" @@ -6516,12 +6275,11 @@ def move_update_props(sbox): expected_output, expected_disk, expected_status, - None, None, None, - None, None, 1, + [], True, '-r', '2', wc_dir) # Resolve conflict moving changes to destination without conflict - svntest.actions.run_and_verify_svn("resolve failed", None, [], + svntest.actions.run_and_verify_svn(None, [], 'resolve', '--accept=mine-conflict', sbox.ospath('A/B')) @@ -6543,12 +6301,11 @@ def move_update_props(sbox): expected_output, expected_disk, expected_status, - None, None, None, - None, None, 1, + [], True, '-r', '3', wc_dir) # Resolve conflict moving changes and raising property conflicts - svntest.actions.run_and_verify_svn("resolve failed", None, [], + svntest.actions.run_and_verify_svn(None, [], 'resolve', '--accept=mine-conflict', sbox.ospath('A/B')) @@ -6563,11 +6320,10 @@ def move_update_props(sbox): 'propertyB' : 'value3'}) extra_files = ['dir_conflicts.prej', 'beta.prej'] svntest.actions.verify_disk(wc_dir, expected_disk, True, - svntest.tree.detect_conflict_files, extra_files) + extra_files=extra_files) @Issues(3288) @SkipUnless(svntest.main.is_os_windows) -@XFail(svntest.main.is_ra_type_dav) def windows_update_backslash(sbox): "test filename with backslashes inside" @@ -6575,17 +6331,52 @@ def windows_update_backslash(sbox): wc_dir = sbox.wc_dir - svntest.actions.run_and_verify_svnmucc(None, None, [], - '-U', sbox.repo_url, + mucc_url = sbox.repo_url + + if mucc_url.startswith('http'): + # Apache Httpd doesn't allow creating paths with '\\' in them on Windows + # AH00026: found %2f (encoded '/') in URI (decoded='/svn-test-work/repositories/authz_tests-30/!svn/ver/2/A/completely\\unusable\\dir'), returning 404 + # + # Let's use file:// to work around. + mucc_url = 'file:///' + os.path.abspath(sbox.repo_dir).replace('\\', '/') + + svntest.actions.run_and_verify_svnmucc(None, [], + '-U', mucc_url, '-m', '', 'mkdir', 'A/completely\\unusable\\dir') # No error and a proper skip + recording in the working copy would also - # be a good result. This just verifies current behavior. - - expected_error = 'svn: E155000: .* is not valid.*' - svntest.actions.run_and_verify_svn(wc_dir, None, expected_error, 'up', - wc_dir) + # be a good result. This just verifies current behavior: + # + # - Error via file://, svn:// or http:// with SVNPathAuthz short_circuit + # + # - No error via http:// with SVNPathAuthz on + # (The reason is that Apache Httpd doesn't allow paths with '\\' in + # them on Windows, and a subrequest-based access check returns 404. + # This makes mod_dav_svn report the path as server excluded (aka + # absent), which doesn't produce output when updating.) + # + # Since https://issues.apache.org/jira/browse/SVN-3288 is about a crash, + # we're fine with either result -- that is, if `svn update' finished + # without an error, we expect specific stdout and proper wc state. + # If it failed, we expect to get the following error: + # + # svn: E155000: 'completely\unusable\dir' is not valid as filename + # in directory [...] + # + exit_code, output, errput = svntest.main.run_svn(1, 'up', wc_dir) + if exit_code == 0: + verify.verify_outputs("Unexpected output", output, errput, [ + "Updating '%s':\n" % wc_dir, + "At revision 2.\n" + ], []) + expected_status = svntest.actions.get_virginal_state(wc_dir, 2) + svntest.actions.run_and_verify_status(wc_dir, expected_status) + elif exit_code == 1: + verify.verify_outputs("Unexpected output", output, errput, + None, 'svn: E155000: .* is not valid.*') + else: + raise verify.SVNUnexpectedExitCode(exit_code) def update_moved_away(sbox): "update subtree of moved away" @@ -6665,8 +6456,7 @@ def update_moved_away(sbox): expected_output, expected_disk, expected_status, - None, None, None, - None, None, None, + [], False, sbox.ospath('A/B/E')) @Issues(4323) @@ -6676,7 +6466,7 @@ def bump_below_tree_conflict(sbox): sbox.build() wc_dir = sbox.wc_dir - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'rm', sbox.repo_url + '/A/B', '-m', '') @@ -6707,8 +6497,7 @@ def bump_below_tree_conflict(sbox): expected_output, None, expected_status, - None, None, None, - None, None, None, + [], False, '-r', '2', wc_dir) # A is tree conflicted, so an update of A/D should be a skip/no-op. @@ -6719,8 +6508,7 @@ def bump_below_tree_conflict(sbox): expected_output, None, expected_status, - None, None, None, - None, None, None, + [], False, sbox.ospath('A/D')) # A is tree conflicted, so an update of A/D/G should be a skip/no-op. @@ -6731,8 +6519,7 @@ def bump_below_tree_conflict(sbox): expected_output, None, expected_status, - None, None, None, - None, None, None, + [], False, sbox.ospath('A/D/G')) @Issues(4111) @@ -6743,21 +6530,19 @@ def update_child_below_add(sbox): wc_dir = sbox.wc_dir sbox.simple_update('A/B', 0) + e_path = sbox.ospath('A/B/E') - # Update skips A/B/E because A/B has a not-present BASE node. - expected_output = svntest.wc.State(wc_dir, { - 'A/B/E' : Item(verb='Skipped'), - }) + # Update skips and errors on A/B/E because A/B has a not-present BASE node. + expected_output = ["Skipped '"+e_path+"'\n"] + expected_err = "svn: E155007: " expected_status = svntest.actions.get_virginal_state(wc_dir, 1) expected_status.remove('A/B', 'A/B/E', 'A/B/E/alpha', 'A/B/E/beta', 'A/B/F', 'A/B/lambda') - svntest.actions.run_and_verify_update(wc_dir, - expected_output, - None, - expected_status, - None, None, None, - None, None, None, - sbox.ospath('A/B/E')) + svntest.actions.run_and_verify_svn(expected_output, + expected_err, + 'update', e_path) + svntest.actions.run_and_verify_status(wc_dir, expected_status) + # Add working nodes over A/B sbox.simple_mkdir('A/B') @@ -6769,15 +6554,241 @@ def update_child_below_add(sbox): 'A/B/E' : Item(status='A ', wc_rev='-'), 'A/B/E/alpha' : Item(status='A ', wc_rev='-'), }) + expected_output = svntest.wc.State(wc_dir, { + 'A/B/E' : Item(verb='Skipped'), + }) # Update should still skip A/B/E svntest.actions.run_and_verify_update(wc_dir, expected_output, None, expected_status, - None, None, None, - None, None, None, + [], False, sbox.ospath('A/B/E')) +def update_conflict_details(sbox): + "update conflict details" + + sbox.build() + wc_dir = sbox.wc_dir + + sbox.simple_append('A/B/E/new', 'new\n') + sbox.simple_add('A/B/E/new') + sbox.simple_append('A/B/E/alpha', '\nextra\nlines\n') + sbox.simple_rm('A/B/E/beta', 'A/B/F') + sbox.simple_propset('key', 'VAL', 'A/B/E', 'A/B') + sbox.simple_mkdir('A/B/E/new-dir1') + sbox.simple_mkdir('A/B/E/new-dir2') + sbox.simple_mkdir('A/B/E/new-dir3') + sbox.simple_rm('A/B/lambda') + sbox.simple_mkdir('A/B/lambda') + sbox.simple_commit() + + sbox.simple_update('', 1) + + sbox.simple_propset('key', 'vAl', 'A/B') + sbox.simple_move('A/B/E/beta', 'beta') + sbox.simple_propset('a', 'b', 'A/B/F', 'A/B/lambda') + sbox.simple_append('A/B/E/alpha', 'other\nnew\nlines') + sbox.simple_mkdir('A/B/E/new') + sbox.simple_mkdir('A/B/E/new-dir1') + sbox.simple_append('A/B/E/new-dir2', 'something') + sbox.simple_append('A/B/E/new-dir3', 'something') + sbox.simple_add('A/B/E/new-dir3') + + expected_status = svntest.actions.get_virginal_state(wc_dir, 2) + expected_status.add({ + 'A/B/E/new' : Item(status='R ', treeconflict='C', wc_rev='2'), + 'A/B/E/new-dir2' : Item(status='D ', treeconflict='C', wc_rev='2'), + 'A/B/E/new-dir3' : Item(status='R ', treeconflict='C', wc_rev='2'), + 'A/B/E/new-dir1' : Item(status=' ', wc_rev='2'), + 'A/C' : Item(status=' ', wc_rev='2'), + 'iota' : Item(status=' ', wc_rev='2'), + 'beta' : Item(status='A ', copied='+', wc_rev='-') + }) + expected_status.tweak('A/B', status=' C', wc_rev='2') + expected_status.tweak('A/B/E/alpha', status='C ', wc_rev='2') + expected_status.tweak('A/B/E/beta', status='! ', treeconflict='C', wc_rev=None) + expected_status.tweak('A/B/F', status='A ', copied='+', treeconflict='C', wc_rev='-') + expected_status.tweak('A/B/lambda', status='RM', copied='+', treeconflict='C', wc_rev='-') + expected_status.tweak('A/mu', status=' ', wc_rev='2') + expected_output = svntest.wc.State(wc_dir, { + 'A/B' : Item(status=' C'), + 'A/B/E' : Item(status=' U'), + 'A/B/E/new' : Item(status=' ', treeconflict='C'), + 'A/B/E/beta' : Item(status=' ', treeconflict='C'), + 'A/B/E/alpha' : Item(status='C '), + 'A/B/E/new-dir2' : Item(status=' ', treeconflict='C'), + 'A/B/E/new-dir3' : Item(status=' ', treeconflict='C'), + 'A/B/E/new-dir1' : Item(status='E '), + 'A/B/F' : Item(status=' ', treeconflict='C'), + # ### 2 tree conflict reports; one for delete; one for add... + 'A/B/lambda' : Item(status=' ', treeconflict='A', + prev_status=' ', prev_treeconflict='C'), + }) + svntest.actions.run_and_verify_update(wc_dir, expected_output, + None, expected_status) + + # Update can't pass source as none at a specific URL@revision, + # because it doesn't know... the working copy could be mixed + # revision or may have excluded parts... + expected_info = [ + { + "Path" : re.escape(sbox.ospath('A/B')), + + "Conflict Properties File" : + re.escape(sbox.ospath('A/B/dir_conflicts.prej')) + '.*', + "Conflict Details": re.escape( + 'incoming dir edit upon update' + + ' Source left: (dir) ^/A/B@1' + + ' Source right: (dir) ^/A/B@2') + }, + { + "Path" : re.escape(sbox.ospath('A/B/E')), + }, + { + "Path" : re.escape(sbox.ospath('A/B/E/alpha')), + "Conflict Previous Base File" : '.*alpha.*', + "Conflict Previous Working File" : '.*alpha.*', + "Conflict Current Base File": '.*alpha.*', + "Conflict Details": re.escape( + 'incoming file edit upon update' + + ' Source left: (file) ^/A/B/E/alpha@1' + + ' Source right: (file) ^/A/B/E/alpha@2') + }, + { + "Path" : re.escape(sbox.ospath('A/B/E/beta')), + "Tree conflict": re.escape( + 'local file moved away, incoming file delete or move upon update' + + ' Source left: (file) ^/A/B/E/beta@1' + + ' Source right: (none) ^/A/B/E/beta@2') + }, + { + "Path" : re.escape(sbox.ospath('A/B/E/new')), + "Tree conflict": re.escape( + 'local dir add, incoming file add upon update' + + ' Source left: (none)' + + ' Source right: (file) ^/A/B/E/new@2') + }, + { + "Path" : re.escape(sbox.ospath('A/B/E/new-dir1')), + # No tree conflict. Existing directory taken over + }, + { + "Path" : re.escape(sbox.ospath('A/B/E/new-dir2')), + "Tree conflict": re.escape( + 'local file unversioned, incoming dir add upon update' + + ' Source left: (none)' + + ' Source right: (dir) ^/A/B/E/new-dir2@2') + }, + { + "Path" : re.escape(sbox.ospath('A/B/E/new-dir3')), + "Tree conflict": re.escape( + 'local file add, incoming dir add upon update' + + ' Source left: (none)' + + ' Source right: (dir) ^/A/B/E/new-dir3@2') + }, + { + "Path" : re.escape(sbox.ospath('A/B/F')), + "Tree conflict": re.escape( + 'local dir edit, incoming dir delete or move upon update' + + ' Source left: (dir) ^/A/B/F@1' + + ' Source right: (none) ^/A/B/F@2') + }, + { + "Path" : re.escape(sbox.ospath('A/B/lambda')), + "Tree conflict": re.escape( + 'local file edit, incoming replace with dir upon update' + + ' Source left: (file) ^/A/B/lambda@1' + + ' Source right: (dir) ^/A/B/lambda@2') + }, + ] + + svntest.actions.run_and_verify_info(expected_info, sbox.ospath('A/B'), + '--depth', 'infinity') + +def update_add_conflicted_deep(sbox): + "deep add conflicted" + + sbox.build() + repo_url = sbox.repo_url + + svntest.actions.run_and_verify_svnmucc( + None, [], '-U', repo_url, '-m', '', + 'mkdir', 'A/z', + 'mkdir', 'A/z/z', + 'mkdir', 'A/z/z/z') + + svntest.actions.run_and_verify_svnmucc( + None, [], '-U', repo_url, '-m', '', + 'rm', 'A/z', + 'mkdir', 'A/z', + 'mkdir', 'A/z/z', + 'mkdir', 'A/z/z/z') + + sbox.simple_append('A/z', 'A/z') + sbox.simple_add('A/z') + sbox.simple_update('A', 2) + # This final update used to segfault using 1.9.0 and 1.9.1 + sbox.simple_update('A/z/z', 3) + +def missing_tmp_update(sbox): + "missing tmp update caused segfault" + + sbox.build(read_only = True) + wc_dir = sbox.wc_dir + svntest.actions.run_and_verify_update(wc_dir, None, None, None, [], False, + wc_dir, '--set-depth', 'empty') + + os.rmdir(sbox.ospath(svntest.main.get_admin_name() + '/tmp')) + + svntest.actions.run_and_verify_svn(None, '.*Unable to create.*', + 'up', wc_dir, '--set-depth', 'infinity') + + svntest.actions.run_and_verify_svn(None, [], 'cleanup', wc_dir) + + svntest.actions.run_and_verify_update(wc_dir, None, None, None, [], False, + wc_dir, '--set-depth', 'infinity') + +def update_delete_switched(sbox): + "update delete switched" + + sbox.build(read_only = True) + wc_dir = sbox.wc_dir + + svntest.actions.run_and_verify_switch(wc_dir, sbox.ospath('A/B/E'), + sbox.repo_url + '/A/D/G', + None, None, None, [], False, + '--ignore-ancestry') + + # Introduce some change somewhere... + sbox.simple_propset('A', 'A', 'A') + + expected_status = svntest.wc.State(wc_dir, { + '' : Item(status=' ', wc_rev='1'), + 'A' : Item(status='A ', copied='+', treeconflict='C', wc_rev='-'), + 'A/B' : Item(status=' ', copied='+', wc_rev='-'), + 'A/B/E' : Item(status='A ', copied='+', wc_rev='-'), + 'A/B/E/rho' : Item(status=' ', copied='+', wc_rev='-'), + 'A/B/E/pi' : Item(status=' ', copied='+', wc_rev='-'), + 'A/B/E/tau' : Item(status=' ', copied='+', wc_rev='-'), + 'A/B/lambda' : Item(status=' ', copied='+', wc_rev='-'), + 'A/B/F' : Item(status=' ', copied='+', wc_rev='-'), + 'A/D' : Item(status=' ', copied='+', wc_rev='-'), + 'A/D/G' : Item(status=' ', copied='+', wc_rev='-'), + 'A/D/G/pi' : Item(status=' ', copied='+', wc_rev='-'), + 'A/D/G/tau' : Item(status=' ', copied='+', wc_rev='-'), + 'A/D/G/rho' : Item(status=' ', copied='+', wc_rev='-'), + 'A/D/gamma' : Item(status=' ', copied='+', wc_rev='-'), + 'A/D/H' : Item(status=' ', copied='+', wc_rev='-'), + 'A/D/H/omega' : Item(status=' ', copied='+', wc_rev='-'), + 'A/D/H/psi' : Item(status=' ', copied='+', wc_rev='-'), + 'A/D/H/chi' : Item(status=' ', copied='+', wc_rev='-'), + 'A/mu' : Item(status=' ', copied='+', wc_rev='-'), + 'A/C' : Item(status=' ', copied='+', wc_rev='-'), + 'iota' : Item(status=' ', wc_rev='1'), + }) + svntest.actions.run_and_verify_update(wc_dir, None, None, expected_status, + [], False, sbox.ospath('A'), '-r', 0) ####################################################################### # Run the tests @@ -6823,7 +6834,6 @@ test_list = [ None, update_copied_from_replaced_and_changed, update_copied_and_deleted_prop, update_accept_conflicts, - eof_in_interactive_conflict_resolver, update_uuid_changed, restarted_update_should_delete_dir_prop, tree_conflicts_on_update_1_1, @@ -6865,6 +6875,10 @@ test_list = [ None, update_moved_away, bump_below_tree_conflict, update_child_below_add, + update_conflict_details, + update_add_conflicted_deep, + missing_tmp_update, + update_delete_switched, ] if __name__ == '__main__': diff --git a/subversion/tests/cmdline/upgrade_tests.py b/subversion/tests/cmdline/upgrade_tests.py index 839206e..57aabae 100755 --- a/subversion/tests/cmdline/upgrade_tests.py +++ b/subversion/tests/cmdline/upgrade_tests.py @@ -109,8 +109,7 @@ def check_format(sbox, expected_format): raise svntest.Failure("found format '%d'; expected '%d'; in wc '%s'" % (found_format, expected_format, root)) - if svntest.main.wc_is_singledb(sbox.wc_dir): - dirs[:] = [] + dirs[:] = [] if dot_svn in dirs: dirs.remove(dot_svn) @@ -258,7 +257,7 @@ def basic_upgrade(sbox): replace_sbox_with_tarfile(sbox, 'basic_upgrade.tar.bz2') # Attempt to use the working copy, this should give an error - svntest.actions.run_and_verify_svn(None, None, wc_is_too_old_regex, + svntest.actions.run_and_verify_svn(None, wc_is_too_old_regex, 'info', sbox.wc_dir) # Upgrade on something anywhere within a versioned subdir gives a @@ -267,24 +266,24 @@ def basic_upgrade(sbox): # Both cases use the same error code. not_wc = ".*(E155007|E155019).*%s'.*not a working copy.*" os.mkdir(sbox.ospath('X')) - svntest.actions.run_and_verify_svn(None, None, not_wc % 'X', + svntest.actions.run_and_verify_svn(None, not_wc % 'X', 'upgrade', sbox.ospath('X')) # Upgrade on a non-existent subdir within an old WC gives a # 'not a working copy' error. - svntest.actions.run_and_verify_svn(None, None, not_wc % 'Y', + svntest.actions.run_and_verify_svn(None, not_wc % 'Y', 'upgrade', sbox.ospath('Y')) # Upgrade on a versioned file within an old WC gives a # 'not a working copy' error. - svntest.actions.run_and_verify_svn(None, None, not_wc % 'mu', + svntest.actions.run_and_verify_svn(None, not_wc % 'mu', 'upgrade', sbox.ospath('A/mu')) # Upgrade on a versioned dir within an old WC gives a # 'not a working copy' error. - svntest.actions.run_and_verify_svn(None, None, not_wc % 'A', + svntest.actions.run_and_verify_svn(None, not_wc % 'A', 'upgrade', sbox.ospath('A')) # Now upgrade the working copy - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'upgrade', sbox.wc_dir) # Actually check the format number of the upgraded working copy @@ -304,10 +303,10 @@ def upgrade_with_externals(sbox): # Attempt to use the working copy, this should give an error expected_stderr = wc_is_too_old_regex - svntest.actions.run_and_verify_svn(None, None, expected_stderr, + svntest.actions.run_and_verify_svn(None, expected_stderr, 'info', sbox.wc_dir) # Now upgrade the working copy - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'upgrade', sbox.wc_dir) # Actually check the format number of the upgraded working copy @@ -320,12 +319,12 @@ def upgrade_1_5_body(sbox, subcommand): # Attempt to use the working copy, this should give an error expected_stderr = wc_is_too_old_regex - svntest.actions.run_and_verify_svn(None, None, expected_stderr, + svntest.actions.run_and_verify_svn(None, expected_stderr, subcommand, sbox.wc_dir) # Now upgrade the working copy - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'upgrade', sbox.wc_dir) # Check the format of the working copy @@ -359,7 +358,7 @@ def logs_left_1_5(sbox): # Try to upgrade, this should give an error expected_stderr = (".*Cannot upgrade with existing logs; .*") - svntest.actions.run_and_verify_svn(None, None, expected_stderr, + svntest.actions.run_and_verify_svn(None, expected_stderr, 'upgrade', sbox.wc_dir) @@ -367,7 +366,7 @@ def upgrade_wcprops(sbox): "test upgrading a working copy with wcprops" replace_sbox_with_tarfile(sbox, 'upgrade_wcprops.tar.bz2') - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'upgrade', sbox.wc_dir) # Make sure that .svn/all-wcprops has disappeared @@ -437,15 +436,15 @@ def basic_upgrade_1_0(sbox): # Attempt to use the working copy, this should give an error expected_stderr = wc_is_too_old_regex - svntest.actions.run_and_verify_svn(None, None, expected_stderr, + svntest.actions.run_and_verify_svn(None, expected_stderr, 'info', sbox.wc_dir) # Now upgrade the working copy - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'upgrade', sbox.wc_dir) # And the separate working copy below COPIED or check_format() fails - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'upgrade', os.path.join(sbox.wc_dir, 'COPIED', 'G')) @@ -521,12 +520,12 @@ def basic_upgrade_1_0(sbox): def do_x3_upgrade(sbox, expected_error=[]): # Attempt to use the working copy, this should give an error expected_stderr = wc_is_too_old_regex - svntest.actions.run_and_verify_svn(None, None, expected_stderr, + svntest.actions.run_and_verify_svn(None, expected_stderr, 'info', sbox.wc_dir) # Now upgrade the working copy - svntest.actions.run_and_verify_svn(None, None, expected_error, + svntest.actions.run_and_verify_svn(None, expected_error, 'upgrade', sbox.wc_dir) if expected_error != []: @@ -600,7 +599,7 @@ def do_x3_upgrade(sbox, expected_error=[]): 'A/G_new/rho' : {'svn:eol-style': 'native'} }) - svntest.actions.run_and_verify_svn(None, 'Reverted.*', [], + svntest.actions.run_and_verify_svn('Reverted.*', [], 'revert', '-R', sbox.wc_dir) expected_status = svntest.wc.State(sbox.wc_dir, @@ -665,7 +664,7 @@ def missing_dirs(sbox): # touch wc/A/D wc/A/B_new/F replace_sbox_with_tarfile(sbox, 'missing-dirs.tar.bz2') - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'upgrade', sbox.wc_dir) expected_status = svntest.wc.State(sbox.wc_dir, { @@ -694,7 +693,7 @@ def missing_dirs2(sbox): os.remove(sbox.ospath('A/B_new/F')) os.mkdir(sbox.ospath('A/D')) os.mkdir(sbox.ospath('A/B_new/F')) - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'upgrade', sbox.wc_dir) expected_status = svntest.wc.State(sbox.wc_dir, { @@ -721,7 +720,7 @@ def delete_and_keep_local(sbox): replace_sbox_with_tarfile(sbox, 'wc-delete.tar.bz2') - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'upgrade', sbox.wc_dir) expected_status = svntest.wc.State(sbox.wc_dir, @@ -752,7 +751,7 @@ def dirs_only_upgrade(sbox): expected_output = ["Upgraded '%s'\n" % (sbox.ospath('').rstrip(os.path.sep)), "Upgraded '%s'\n" % (sbox.ospath('A'))] - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'upgrade', sbox.wc_dir) expected_status = svntest.wc.State(sbox.wc_dir, { @@ -823,7 +822,7 @@ def delete_in_copy_upgrade(sbox): wc_dir = sbox.wc_dir replace_sbox_with_tarfile(sbox, 'delete-in-copy.tar.bz2') - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'upgrade', sbox.wc_dir) expected_status = svntest.actions.get_virginal_state(sbox.wc_dir, 1) @@ -837,7 +836,7 @@ def delete_in_copy_upgrade(sbox): }) run_and_verify_status_no_server(sbox.wc_dir, expected_status) - svntest.actions.run_and_verify_svn(None, 'Reverted.*', [], 'revert', '-R', + svntest.actions.run_and_verify_svn('Reverted.*', [], 'revert', '-R', sbox.ospath('A/B-copied/E')) expected_status.tweak('A/B-copied/E', @@ -856,7 +855,7 @@ def replaced_files(sbox): wc_dir = sbox.wc_dir replace_sbox_with_tarfile(sbox, 'replaced-files.tar.bz2') - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'upgrade', sbox.wc_dir) # A is a checked-out dir containing A/f and A/g, then @@ -894,7 +893,7 @@ def replaced_files(sbox): [sbox.ospath('B/f'), '395dfb603d8a4e0348d0b082803f2b7426c76eb9'], [sbox.ospath('B/g'), None]]) - svntest.actions.run_and_verify_svn(None, 'Reverted.*', [], 'revert', + svntest.actions.run_and_verify_svn('Reverted.*', [], 'revert', sbox.ospath('A/f'), sbox.ospath('B/f'), sbox.ospath('A/g'), sbox.ospath('B/g')) @@ -916,7 +915,7 @@ def upgrade_with_scheduled_change(sbox): replace_sbox_with_tarfile(sbox, 'upgrade_with_scheduled_change.tar.bz2') - svntest.actions.run_and_verify_svn(None, None, [], + svntest.actions.run_and_verify_svn(None, [], 'upgrade', sbox.wc_dir) expected_status = svntest.actions.get_virginal_state(sbox.wc_dir, 1) expected_status.add({ @@ -930,21 +929,21 @@ def tree_replace1(sbox): replace_sbox_with_tarfile(sbox, 'tree-replace1.tar.bz2') - svntest.actions.run_and_verify_svn(None, None, [], 'upgrade', sbox.wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'upgrade', sbox.wc_dir) expected_status = svntest.wc.State(sbox.wc_dir, { '' : Item(status=' M', wc_rev=17), 'B' : Item(status='R ', copied='+', wc_rev='-'), - 'B/f' : Item(status='R ', copied='+', wc_rev='-'), + 'B/f' : Item(status=' ', copied='+', wc_rev='-'), 'B/g' : Item(status='D ', wc_rev=17), - 'B/h' : Item(status='A ', copied='+', wc_rev='-'), - 'B/C' : Item(status='R ', copied='+', wc_rev='-'), - 'B/C/f' : Item(status='R ', copied='+', wc_rev='-'), + 'B/h' : Item(status=' ', copied='+', wc_rev='-'), + 'B/C' : Item(status=' ', copied='+', wc_rev='-'), + 'B/C/f' : Item(status=' ', copied='+', wc_rev='-'), 'B/D' : Item(status='D ', wc_rev=17), 'B/D/f' : Item(status='D ', wc_rev=17), - 'B/E' : Item(status='A ', copied='+', wc_rev='-'), - 'B/E/f' : Item(status='A ', copied='+', wc_rev='-'), + 'B/E' : Item(status=' ', copied='+', wc_rev='-'), + 'B/E/f' : Item(status=' ', copied='+', wc_rev='-'), }) run_and_verify_status_no_server(sbox.wc_dir, expected_status) @@ -954,7 +953,7 @@ def tree_replace2(sbox): replace_sbox_with_tarfile(sbox, 'tree-replace2.tar.bz2') - svntest.actions.run_and_verify_svn(None, None, [], 'upgrade', sbox.wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'upgrade', sbox.wc_dir) expected_status = svntest.wc.State(sbox.wc_dir, { @@ -962,11 +961,11 @@ def tree_replace2(sbox): 'B' : Item(status='R ', copied='+', wc_rev='-'), 'B/f' : Item(status='D ', wc_rev=12), 'B/D' : Item(status='D ', wc_rev=12), - 'B/g' : Item(status='A ', copied='+', wc_rev='-'), - 'B/E' : Item(status='A ', copied='+', wc_rev='-'), + 'B/g' : Item(status=' ', copied='+', wc_rev='-'), + 'B/E' : Item(status=' ', copied='+', wc_rev='-'), 'C' : Item(status='R ', copied='+', wc_rev='-'), - 'C/f' : Item(status='A ', copied='+', wc_rev='-'), - 'C/D' : Item(status='A ', copied='+', wc_rev='-'), + 'C/f' : Item(status=' ', copied='+', wc_rev='-'), + 'C/D' : Item(status=' ', copied='+', wc_rev='-'), 'C/g' : Item(status='D ', wc_rev=12), 'C/E' : Item(status='D ', wc_rev=12), }) @@ -988,7 +987,7 @@ def upgrade_from_format_28(sbox): assert not os.path.exists(new_pristine_path) # Upgrade the WC - svntest.actions.run_and_verify_svn(None, None, [], 'upgrade', sbox.wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'upgrade', sbox.wc_dir) assert not os.path.exists(old_pristine_path) assert os.path.exists(new_pristine_path) @@ -999,7 +998,7 @@ def depth_exclude(sbox): replace_sbox_with_tarfile(sbox, 'depth_exclude.tar.bz2') - svntest.actions.run_and_verify_svn(None, None, [], 'upgrade', sbox.wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'upgrade', sbox.wc_dir) expected_status = svntest.wc.State(sbox.wc_dir, { @@ -1015,7 +1014,7 @@ def depth_exclude_2(sbox): replace_sbox_with_tarfile(sbox, 'depth_exclude_2.tar.bz2') - svntest.actions.run_and_verify_svn(None, None, [], 'upgrade', sbox.wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'upgrade', sbox.wc_dir) expected_status = svntest.wc.State(sbox.wc_dir, { @@ -1030,7 +1029,7 @@ def add_add_del_del_tc(sbox): replace_sbox_with_tarfile(sbox, 'add_add_del_del_tc.tar.bz2') - svntest.actions.run_and_verify_svn(None, None, [], 'upgrade', sbox.wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'upgrade', sbox.wc_dir) expected_status = svntest.wc.State(sbox.wc_dir, { @@ -1048,7 +1047,7 @@ def add_add_x2(sbox): replace_sbox_with_tarfile(sbox, 'add_add_x2.tar.bz2') - svntest.actions.run_and_verify_svn(None, None, [], 'upgrade', sbox.wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'upgrade', sbox.wc_dir) expected_status = svntest.wc.State(sbox.wc_dir, { @@ -1078,7 +1077,7 @@ def upgrade_with_missing_subdir(sbox): # Attempt to use the working copy, this should give an error expected_stderr = wc_is_too_old_regex - svntest.actions.run_and_verify_svn(None, None, expected_stderr, + svntest.actions.run_and_verify_svn(None, expected_stderr, 'info', sbox.wc_dir) # Now remove a subdirectory @@ -1094,7 +1093,7 @@ def upgrade_with_missing_subdir(sbox): "Upgraded '%s'\n" % sbox.ospath('A/D/G'), "Upgraded '%s'\n" % sbox.ospath('A/D/H'), ]) - svntest.actions.run_and_verify_svn(None, expected_output, [], + svntest.actions.run_and_verify_svn(expected_output, [], 'upgrade', sbox.wc_dir) # And now perform an update. (This used to fail with an assertion) @@ -1122,7 +1121,7 @@ def upgrade_locked(sbox): replace_sbox_with_tarfile(sbox, 'upgrade_locked.tar.bz2') - svntest.actions.run_and_verify_svn(None, None, [], 'upgrade', sbox.wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'upgrade', sbox.wc_dir) expected_status = svntest.wc.State(sbox.wc_dir, { @@ -1145,21 +1144,21 @@ def upgrade_file_externals(sbox): '07146bbd-0b64-4aaf-ab70-cd76a0df2d41') expected_output = svntest.verify.RegexOutput('r2 committed.*') - svntest.actions.run_and_verify_svnmucc(None, expected_output, [], + svntest.actions.run_and_verify_svnmucc(expected_output, [], '-m', 'r2', 'propset', 'svn:externals', '^/A/B/E EX\n^/A/mu muX', sbox.repo_url + '/A/B/F') expected_output = svntest.verify.RegexOutput('r3 committed.*') - svntest.actions.run_and_verify_svnmucc(None, expected_output, [], + svntest.actions.run_and_verify_svnmucc(expected_output, [], '-m', 'r3', 'propset', 'svn:externals', '^/A/B/F FX\n^/A/B/lambda lambdaX', sbox.repo_url + '/A/C') expected_output = svntest.verify.RegexOutput('r4 committed.*') - svntest.actions.run_and_verify_svnmucc(None, expected_output, [], + svntest.actions.run_and_verify_svnmucc(expected_output, [], '-m', 'r4', 'propset', 'pname1', 'pvalue1', sbox.repo_url + '/A/mu', @@ -1168,8 +1167,8 @@ def upgrade_file_externals(sbox): 'propset', 'pname3', 'pvalue3', sbox.repo_url + '/A/B/E/alpha') - svntest.actions.run_and_verify_svn(None, None, [], 'upgrade', sbox.wc_dir) - svntest.actions.run_and_verify_svn(None, None, [], 'relocate', + svntest.actions.run_and_verify_svn(None, [], 'upgrade', sbox.wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'relocate', 'file:///tmp/repo', sbox.repo_url, sbox.wc_dir) @@ -1214,10 +1213,10 @@ def upgrade_missing_replaced(sbox): sbox.build(create_wc=False) replace_sbox_with_tarfile(sbox, 'upgrade_missing_replaced.tar.bz2') - svntest.actions.run_and_verify_svn(None, None, [], 'upgrade', sbox.wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'upgrade', sbox.wc_dir) svntest.main.run_svnadmin('setuuid', sbox.repo_dir, 'd7130b12-92f6-45c9-9217-b9f0472c3fab') - svntest.actions.run_and_verify_svn(None, None, [], 'relocate', + svntest.actions.run_and_verify_svn(None, [], 'relocate', 'file:///tmp/repo', sbox.repo_url, sbox.wc_dir) @@ -1240,7 +1239,7 @@ def upgrade_missing_replaced(sbox): svntest.actions.run_and_verify_update(sbox.wc_dir, expected_output, None, expected_status) - svntest.actions.run_and_verify_svn(None, 'Reverted.*', [], 'revert', '-R', + svntest.actions.run_and_verify_svn('Reverted.*', [], 'revert', '-R', sbox.wc_dir) expected_status = svntest.actions.get_virginal_state(sbox.wc_dir, 1) # And verify that the state is now valid in both the entries an status world. @@ -1253,10 +1252,10 @@ def upgrade_not_present_replaced(sbox): sbox.build(create_wc=False) replace_sbox_with_tarfile(sbox, 'upgrade_not_present_replaced.tar.bz2') - svntest.actions.run_and_verify_svn(None, None, [], 'upgrade', sbox.wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'upgrade', sbox.wc_dir) svntest.main.run_svnadmin('setuuid', sbox.repo_dir, 'd7130b12-92f6-45c9-9217-b9f0472c3fab') - svntest.actions.run_and_verify_svn(None, None, [], 'relocate', + svntest.actions.run_and_verify_svn(None, [], 'relocate', 'file:///tmp/repo', sbox.repo_url, sbox.wc_dir) @@ -1279,15 +1278,15 @@ def upgrade_from_1_7_conflict(sbox): # The working copy contains a text conflict, and upgrading such # a working copy used to cause a pointless 'upgrade required' error. - svntest.actions.run_and_verify_svn(None, None, [], 'upgrade', sbox.wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'upgrade', sbox.wc_dir) def do_iprops_upgrade(nonrootfile, rootfile, sbox): wc_dir = sbox.wc_dir replace_sbox_with_tarfile(sbox, nonrootfile) - svntest.actions.run_and_verify_svn(None, None, [], 'upgrade', sbox.wc_dir) - svntest.actions.run_and_verify_svn(None, None, [], 'relocate', + svntest.actions.run_and_verify_svn(None, [], 'upgrade', sbox.wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'relocate', 'file:///tmp/repo', sbox.repo_url, wc_dir) expected_output = [] @@ -1334,8 +1333,8 @@ def do_iprops_upgrade(nonrootfile, rootfile, sbox): # Now try with a repository root working copy replace_sbox_with_tarfile(sbox, rootfile) - svntest.actions.run_and_verify_svn(None, None, [], 'upgrade', sbox.wc_dir) - svntest.actions.run_and_verify_svn(None, None, [], 'relocate', + svntest.actions.run_and_verify_svn(None, [], 'upgrade', sbox.wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'relocate', 'file:///tmp/repo', sbox.repo_url, wc_dir) # Unswitched inherited props available after upgrade @@ -1418,7 +1417,7 @@ def changelist_upgrade_1_6(sbox): svntest.main.run_svnadmin('setuuid', sbox.repo_dir, 'aa4c97bd-2e1a-4e55-a1e5-3db22cff2673') replace_sbox_with_tarfile(sbox, 'changelist_upgrade_1_6.tar.bz2') - svntest.actions.run_and_verify_svn(None, None, [], 'upgrade', sbox.wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'upgrade', sbox.wc_dir) exit_code, output, errput = svntest.main.run_svn(None, 'info', sbox.wc_dir, '--depth', 'infinity', @@ -1437,8 +1436,9 @@ def upgrade_1_7_dir_external(sbox): # This fails for 'make check EXCLUSIVE_WC_LOCKS=1' giving an error: # svn: warning: W200033: sqlite[S5]: database is locked - svntest.actions.run_and_verify_svn(None, None, [], 'upgrade', sbox.wc_dir) + svntest.actions.run_and_verify_svn(None, [], 'upgrade', sbox.wc_dir) +@SkipUnless(svntest.wc.python_sqlite_can_read_wc) def auto_analyze(sbox): """automatic SQLite ANALYZE""" @@ -1480,6 +1480,90 @@ def auto_analyze(sbox): if val != [(1,)]: raise svntest.Failure("analyze failed") +def upgrade_1_0_with_externals(sbox): + "test upgrading 1.0.0 working copy with externals" + + sbox.build(create_wc = False) + replace_sbox_with_tarfile(sbox, 'upgrade_1_0_with_externals.tar.bz2') + + url = sbox.repo_url + + # This is non-canonical by the rules of svn_uri_canonicalize, it gets + # written into the entries file and upgrade has to canonicalize. + non_canonical_url = url[:-1] + '%%%02x' % ord(url[-1]) + xml_entries_relocate(sbox.wc_dir, 'file:///1.0.0/repos', non_canonical_url) + + externals_propval = 'exdir_G ' + sbox.repo_url + '/A/D/G' + '\n' + adm_name = svntest.main.get_admin_name() + dir_props_file = os.path.join(sbox.wc_dir, adm_name, 'dir-props') + svntest.main.file_write(dir_props_file, + ('K 13\n' + 'svn:externals\n' + 'V %d\n' % len(externals_propval)) + + externals_propval + '\nEND\n', 'wb') + + # Attempt to use the working copy, this should give an error + expected_stderr = wc_is_too_old_regex + svntest.actions.run_and_verify_svn(None, expected_stderr, + 'info', sbox.wc_dir) + + + # Now upgrade the working copy + svntest.actions.run_and_verify_svn(None, [], + 'upgrade', sbox.wc_dir) + # And the separate working copy below COPIED or check_format() fails + svntest.actions.run_and_verify_svn(None, [], + 'upgrade', + os.path.join(sbox.wc_dir, 'COPIED', 'G')) + + # Actually check the format number of the upgraded working copy + check_format(sbox, get_current_format()) + + # Now check the contents of the working copy + # #### This working copy is not just a basic tree, + # fix with the right data once we get here + expected_status = svntest.wc.State(sbox.wc_dir, + { + '' : Item(status=' M', wc_rev=7), + 'B' : Item(status=' ', wc_rev='7'), + 'B/mu' : Item(status=' ', wc_rev='7'), + 'B/D' : Item(status=' ', wc_rev='7'), + 'B/D/H' : Item(status=' ', wc_rev='7'), + 'B/D/H/psi' : Item(status=' ', wc_rev='7'), + 'B/D/H/omega' : Item(status=' ', wc_rev='7'), + 'B/D/H/zeta' : Item(status='MM', wc_rev='7'), + 'B/D/H/chi' : Item(status=' ', wc_rev='7'), + 'B/D/gamma' : Item(status=' ', wc_rev='9'), + 'B/D/G' : Item(status=' ', wc_rev='7'), + 'B/D/G/tau' : Item(status=' ', wc_rev='7'), + 'B/D/G/rho' : Item(status=' ', wc_rev='7'), + 'B/D/G/pi' : Item(status=' ', wc_rev='7'), + 'B/B' : Item(status=' ', wc_rev='7'), + 'B/B/lambda' : Item(status=' ', wc_rev='7'), + 'MKDIR' : Item(status='A ', wc_rev='0'), + 'MKDIR/MKDIR' : Item(status='A ', wc_rev='0'), + 'A' : Item(status=' ', wc_rev='7'), + 'A/B' : Item(status=' ', wc_rev='7'), + 'A/B/lambda' : Item(status=' ', wc_rev='7'), + 'A/D' : Item(status=' ', wc_rev='7'), + 'A/D/G' : Item(status=' ', wc_rev='7'), + 'A/D/G/rho' : Item(status=' ', wc_rev='7'), + 'A/D/G/pi' : Item(status=' ', wc_rev='7'), + 'A/D/G/tau' : Item(status=' ', wc_rev='7'), + 'A/D/H' : Item(status=' ', wc_rev='7'), + 'A/D/H/psi' : Item(status=' ', wc_rev='7'), + 'A/D/H/omega' : Item(status=' ', wc_rev='7'), + 'A/D/H/zeta' : Item(status=' ', wc_rev='7'), + 'A/D/H/chi' : Item(status=' ', wc_rev='7'), + 'A/D/gamma' : Item(status=' ', wc_rev='7'), + 'A/mu' : Item(status=' ', wc_rev='7'), + 'iota' : Item(status=' ', wc_rev='7'), + 'COPIED' : Item(status=' ', wc_rev='10'), + 'DELETED' : Item(status='D ', wc_rev='10'), + 'exdir_G' : Item(status='X '), + }) + run_and_verify_status_no_server(sbox.wc_dir, expected_status) + ######################################################################## # Run the tests @@ -1537,6 +1621,7 @@ test_list = [ None, changelist_upgrade_1_6, upgrade_1_7_dir_external, auto_analyze, + upgrade_1_0_with_externals, ] diff --git a/subversion/tests/cmdline/upgrade_tests_data/upgrade_1_0_with_externals.tar.bz2 b/subversion/tests/cmdline/upgrade_tests_data/upgrade_1_0_with_externals.tar.bz2 Binary files differnew file mode 100644 index 0000000..c3d8da5 --- /dev/null +++ b/subversion/tests/cmdline/upgrade_tests_data/upgrade_1_0_with_externals.tar.bz2 diff --git a/subversion/tests/cmdline/wc_tests.py b/subversion/tests/cmdline/wc_tests.py index 75f6743..185acad 100755 --- a/subversion/tests/cmdline/wc_tests.py +++ b/subversion/tests/cmdline/wc_tests.py @@ -42,6 +42,7 @@ Issues = svntest.testcase.Issues_deco Issue = svntest.testcase.Issue_deco Wimp = svntest.testcase.Wimp_deco Item = wc.StateItem +UnorderedOutput = svntest.verify.UnorderedOutput ###################################################################### # Tests @@ -131,7 +132,7 @@ def status_with_inaccessible_wc_db(sbox): sbox.build(read_only = True) os.chmod(sbox.ospath(".svn/wc.db"), 0) svntest.actions.run_and_verify_svn( - "Status when wc.db is not accessible", None, + None, r"[^ ]+ E155016: The working copy database at '.*' is corrupt", "st", sbox.wc_dir) @@ -143,7 +144,7 @@ def status_with_corrupt_wc_db(sbox): with open(sbox.ospath(".svn/wc.db"), 'wb') as fd: fd.write('\0' * 17) svntest.actions.run_and_verify_svn( - "Status when wc.db is corrupt", None, + None, r"[^ ]+ E155016: The working copy database at '.*' is corrupt", "st", sbox.wc_dir) @@ -154,7 +155,7 @@ def status_with_zero_length_wc_db(sbox): sbox.build(read_only = True) os.close(os.open(sbox.ospath(".svn/wc.db"), os.O_RDWR | os.O_TRUNC)) svntest.actions.run_and_verify_svn( - "Status when wc.db has zero length", None, + None, r"[^ ]+ E200030:", # SVN_ERR_SQLITE_ERROR "st", sbox.wc_dir) @@ -165,7 +166,7 @@ def status_without_wc_db(sbox): sbox.build(read_only = True) os.remove(sbox.ospath(".svn/wc.db")) svntest.actions.run_and_verify_svn( - "Status when wc.db is missing", None, + None, r"[^ ]+ E155016: The working copy database at '.*' is missing", "st", sbox.wc_dir) @@ -178,7 +179,7 @@ def status_without_wc_db_and_entries(sbox): os.remove(sbox.ospath(".svn/wc.db")) os.remove(sbox.ospath(".svn/entries")) svntest.actions.run_and_verify_svn2( - "Status when wc.db and entries are missing", None, + None, r"[^ ]+ warning: W155007: '.*' is not a working copy", 0, "st", sbox.wc_dir) @@ -191,7 +192,7 @@ def status_with_missing_wc_db_and_maybe_valid_entries(sbox): fd.write('something\n') os.remove(sbox.ospath(".svn/wc.db")) svntest.actions.run_and_verify_svn( - "Status when wc.db is missing and .svn/entries might be valid", None, + None, r"[^ ]+ E155036:", # SVN_ERR_WC_UPGRADE_REQUIRED "st", sbox.wc_dir) @@ -202,7 +203,7 @@ def cleanup_below_wc_root(sbox): sbox.build(read_only = True) svntest.actions.lock_admin_dir(sbox.ospath(""), True) - svntest.actions.run_and_verify_svn("Cleanup below wc root", None, [], + svntest.actions.run_and_verify_svn(None, [], "cleanup", sbox.ospath("A")) @SkipUnless(svntest.main.is_posix_os) @@ -221,8 +222,144 @@ def update_through_unversioned_symlink(sbox): # Subversion 1.8.0 crashes when updating a working copy through a symlink svntest.actions.run_and_verify_update(wc_dir, expected_output, expected_disk, expected_status, - None, None, None, None, None, 1, - symlink) + [], True, symlink) + +@Issue(3549) +def cleanup_unversioned_items(sbox): + """cleanup --remove-unversioned / --remove-ignored""" + + sbox.build(read_only = True) + wc_dir = sbox.wc_dir + + # create some unversioned items + os.mkdir(sbox.ospath('dir1')) + os.mkdir(sbox.ospath('dir2')) + contents = "This is an unversioned file\n." + svntest.main.file_write(sbox.ospath('dir1/dir1_child1'), contents) + svntest.main.file_write(sbox.ospath('dir2/dir2_child1'), contents) + os.mkdir(sbox.ospath('dir2/foo_child2')) + svntest.main.file_write(sbox.ospath('file_foo'), contents), + os.mkdir(sbox.ospath('dir_foo')) + svntest.main.file_write(sbox.ospath('dir_foo/foo_child1'), contents) + os.mkdir(sbox.ospath('dir_foo/foo_child2')) + # a file that matches a default ignore pattern + svntest.main.file_write(sbox.ospath('foo.o'), contents) + + # ignore some of the unversioned items + sbox.simple_propset('svn:ignore', '*_foo', '.') + + os.chdir(wc_dir) + + expected_output = [ + ' M .\n', + '? dir1\n', + '? dir2\n', + ] + svntest.actions.run_and_verify_svn(UnorderedOutput(expected_output), + [], 'status') + expected_output += [ + 'I dir_foo\n', + 'I file_foo\n', + 'I foo.o\n', + ] + svntest.actions.run_and_verify_svn(UnorderedOutput(expected_output), + [], 'status', '--no-ignore') + + expected_output = [ + 'D dir1\n', + 'D dir2\n', + ] + svntest.actions.run_and_verify_svn(UnorderedOutput(expected_output), + [], 'cleanup', '--remove-unversioned') + expected_output = [ + ' M .\n', + 'I dir_foo\n', + 'I file_foo\n', + 'I foo.o\n', + ] + svntest.actions.run_and_verify_svn(UnorderedOutput(expected_output), + [], 'status', '--no-ignore') + + # remove ignored items, with an empty global-ignores list + expected_output = [ + 'D dir_foo\n', + 'D file_foo\n', + ] + svntest.actions.run_and_verify_svn(UnorderedOutput(expected_output), + [], 'cleanup', '--remove-ignored', + '--config-option', + 'config:miscellany:global-ignores=') + + # the file matching global-ignores should still be present + expected_output = [ + ' M .\n', + 'I foo.o\n', + ] + svntest.actions.run_and_verify_svn(UnorderedOutput(expected_output), + [], 'status', '--no-ignore') + + # un-ignore the file matching global ignores, making it unversioned, + # and remove it with --remove-unversioned + expected_output = [ + 'D foo.o\n', + ] + svntest.actions.run_and_verify_svn(UnorderedOutput(expected_output), + [], 'cleanup', '--remove-unversioned', + '--config-option', + 'config:miscellany:global-ignores=') + expected_output = [ + ' M .\n', + ] + svntest.actions.run_and_verify_svn(expected_output, + [], 'status', '--no-ignore') + +def cleanup_unversioned_items_in_locked_wc(sbox): + """cleanup unversioned items in locked WC should fail""" + + sbox.build(read_only = True) + + contents = "This is an unversioned file\n." + svntest.main.file_write(sbox.ospath('unversioned_file'), contents) + + svntest.actions.lock_admin_dir(sbox.ospath(""), True) + for option in ['--remove-unversioned', '--remove-ignored']: + svntest.actions.run_and_verify_svn(None, + "svn: E155004: Working copy locked;.*", + "cleanup", option, + sbox.ospath("")) + +def cleanup_dir_external(sbox): + """cleanup --include-externals""" + + sbox.build(read_only = True) + + # configure a directory external + sbox.simple_propset("svn:externals", "^/A A_ext", ".") + sbox.simple_update() + + svntest.actions.lock_admin_dir(sbox.ospath("A_ext"), True) + svntest.actions.run_and_verify_svn(["Performing cleanup on external " + + "item at '%s'.\n" % sbox.ospath("A_ext")], + [], "cleanup", '--include-externals', + sbox.ospath("")) + +@Issue(4390) +def checkout_within_locked_wc(sbox): + """checkout within a locked working copy""" + + sbox.build(read_only = True) + + # lock working copy and create outstanding work queue items + svntest.actions.lock_admin_dir(sbox.ospath(""), True, True) + expected_output = [ + "A %s\n" % sbox.ospath("nested-wc/alpha"), + "A %s\n" % sbox.ospath("nested-wc/beta"), + "Checked out revision 1.\n" + ] + svntest.actions.run_and_verify_svn(UnorderedOutput(expected_output), + [], "checkout", sbox.repo_url + '/A/B/E', + sbox.ospath("nested-wc")) + ######################################################################## # Run the tests @@ -243,6 +380,10 @@ test_list = [ None, status_with_missing_wc_db_and_maybe_valid_entries, cleanup_below_wc_root, update_through_unversioned_symlink, + cleanup_unversioned_items, + cleanup_unversioned_items_in_locked_wc, + cleanup_dir_external, + checkout_within_locked_wc, ] if __name__ == '__main__': diff --git a/subversion/tests/diacritical.txt b/subversion/tests/diacritical.txt new file mode 100644 index 0000000..8e6fb0e --- /dev/null +++ b/subversion/tests/diacritical.txt @@ -0,0 +1,41 @@ +-*- coding: utf-8 -*- + +This is the source of the test data used by the normalized unicode +string comparison tests. + + +Whole word: Ṩůḇṽá¸È‘šḯá»á¹‹ + +Individual letters: + +char name NFC UCS-4 NFC UTF-8 NFD UCS-4 NFD UTF-8 + +Ṩ S with dot above and below \u1E68 \xe1\xb9\xa8 S\u0323\u0307 S\xcc\xa3\xcc\x87 +ů u with ring \u016F \xc5\xaf u\u030A u\xcc\x8a +ḇ b with macron below \u1E07 \xe1\xb8\x87 b\u0331 b\xcc\xb1 +á¹½ v with tilde \u1E7D \xe1\xb9\xbd v\u0303 v\xcc\x83 +Ḡe with breve and cedilla \u1E1D \xe1\xb8\x9d e\u0327\u0306 e\xcc\xa7\xcc\x86 +È‘ r with double grave \u0211 \xc8\x91 r\u030F r\xcc\x8f +Å¡ s with caron \u0161 \xc5\xa1 s\u030C s\xcc\x8c +ḯ i with diaeresis and acute \u1E2F \xe1\xb8\xaf i\u0308\u0301 i\xcc\x88\xcc\x81 +á» o with grave and hook \u1EDD \xe1\xbb\x9d o\u031B\u0300 o\xcc\x9b\xcc\x80 +ṋ n with circumflex below \u1E4B \xe1\xb9\x8b n\u032D n\xcc\xad + +Combining diacriticals: + +char name UCS-4 UTF-8 + + ̇ dot \u0307 \xcc\x87 + Ì£ dot below \u0323 \xcc\xa3 + ÌŠ ring \u030A \xcc\x8a + ̱ macron below \u0331 \xcc\xb1 + ̃ tilde \u0303 \xcc\x83 + ̆ breve \u0306 \xcc\x86 + ̧ cedilla \u0327 \xcc\xa7 + Ì double grave \u030F \xcc\x8f + ÌŒ caron \u030C \xcc\x8c + ̈ diaeresis \u0308 \xcc\x88 + Ì acute \u0301 \xcc\x81 + Ì€ grave \u0300 \xcc\x80 + Ì› horn \u031B \xcc\x9b + Ì circumflex below \u032D \xcc\xad diff --git a/subversion/tests/libsvn_client/client-test.c b/subversion/tests/libsvn_client/client-test.c index 9fad3bb..4e2a6d8 100644 --- a/subversion/tests/libsvn_client/client-test.c +++ b/subversion/tests/libsvn_client/client-test.c @@ -31,9 +31,12 @@ #include "../../libsvn_client/client.h" #include "svn_pools.h" #include "svn_client.h" +#include "private/svn_client_mtcc.h" #include "svn_repos.h" #include "svn_subst.h" #include "private/svn_wc_private.h" +#include "svn_props.h" +#include "svn_hash.h" #include "../svn_test.h" #include "../svn_test_fs.h" @@ -57,7 +60,8 @@ create_greek_repos(const char **repos_url, svn_fs_root_t *txn_root; /* Create a filesytem and repository. */ - SVN_ERR(svn_test__create_repos(&repos, name, opts, pool)); + SVN_ERR(svn_test__create_repos( + &repos, svn_test_data_path(name, pool), opts, pool)); /* Prepare and commit a txn containing the Greek tree. */ SVN_ERR(svn_fs_begin_txn2(&txn, svn_repos_fs(repos), 0 /* rev */, @@ -67,7 +71,8 @@ create_greek_repos(const char **repos_url, SVN_ERR(svn_repos_fs_commit_txn(NULL, repos, &committed_rev, txn, pool)); SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(committed_rev)); - SVN_ERR(svn_uri_get_file_url_from_dirent(repos_url, name, pool)); + SVN_ERR(svn_uri_get_file_url_from_dirent( + repos_url, svn_test_data_path(name, pool), pool)); return SVN_NO_ERROR; } @@ -331,7 +336,6 @@ test_patch(const svn_test_opts_t *opts, { const char *repos_url; const char *wc_path; - const char *cwd; svn_opt_revision_t rev; svn_opt_revision_t peg_rev; svn_client_ctx_t *ctx; @@ -370,12 +374,11 @@ test_patch(const svn_test_opts_t *opts, SVN_ERR(create_greek_repos(&repos_url, "test-patch-repos", opts, pool)); /* Check out the HEAD revision */ - SVN_ERR(svn_dirent_get_absolute(&cwd, "", pool)); /* Put wc inside an unversioned directory. Checking out a 1.7 wc directly inside a 1.6 wc doesn't work reliably, an intervening unversioned directory prevents the problems. */ - wc_path = svn_dirent_join(cwd, "test-patch", pool); + wc_path = svn_test_data_path("test-patch", pool); SVN_ERR(svn_io_make_dir_recursively(wc_path, pool)); svn_test_add_dir_cleanup(wc_path); @@ -389,8 +392,9 @@ test_patch(const svn_test_opts_t *opts, TRUE, FALSE, ctx, pool)); /* Create the patch file. */ - patch_file_path = svn_dirent_join_many(pool, cwd, - "test-patch", "test-patch.diff", NULL); + patch_file_path = svn_dirent_join_many( + pool, svn_test_data_path("test-patch", pool), + "test-patch.diff", SVN_VA_NULL); SVN_ERR(svn_io_file_open(&patch_file, patch_file_path, (APR_READ | APR_WRITE | APR_CREATE | APR_TRUNCATE), APR_OS_DEFAULT, pool)); @@ -400,7 +404,7 @@ test_patch(const svn_test_opts_t *opts, SVN_ERR(svn_io_file_write(patch_file, unidiff_patch[i], &len, pool)); SVN_TEST_ASSERT(len == strlen(unidiff_patch[i])); } - SVN_ERR(svn_io_file_flush_to_disk(patch_file, pool)); + SVN_ERR(svn_io_file_flush(patch_file, pool)); /* Apply the patch. */ pcb.patched_tempfiles = apr_hash_make(pool); @@ -445,7 +449,7 @@ test_wc_add_scenarios(const svn_test_opts_t *opts, SVN_ERR(create_greek_repos(&repos_url, "test-wc-add-repos", opts, pool)); committed_rev = 1; - SVN_ERR(svn_dirent_get_absolute(&wc_path, "test-wc-add", pool)); + wc_path = svn_test_data_path("test-wc-add", pool); /* Remove old test data from the previous run */ SVN_ERR(svn_io_remove_dir2(wc_path, TRUE, NULL, NULL, pool)); @@ -598,7 +602,7 @@ test_16k_add(const svn_test_opts_t *opts, svn_opt_revision_t rev; svn_client_ctx_t *ctx; const char *repos_url; - const char *cwd, *wc_path; + const char *wc_path; svn_opt_revision_t peg_rev; apr_array_header_t *targets; apr_pool_t *iterpool = svn_pool_create(pool); @@ -608,12 +612,11 @@ test_16k_add(const svn_test_opts_t *opts, SVN_ERR(create_greek_repos(&repos_url, "test-16k-repos", opts, pool)); /* Check out the HEAD revision */ - SVN_ERR(svn_dirent_get_absolute(&cwd, "", pool)); /* Put wc inside an unversioned directory. Checking out a 1.7 wc directly inside a 1.6 wc doesn't work reliably, an intervening unversioned directory prevents the problems. */ - wc_path = svn_dirent_join(cwd, "test-16k", pool); + wc_path = svn_test_data_path("test-16k", pool); SVN_ERR(svn_io_make_dir_recursively(wc_path, pool)); svn_test_add_dir_cleanup(wc_path); @@ -735,7 +738,7 @@ test_foreign_repos_copy(const svn_test_opts_t *opts, SVN_ERR(create_greek_repos(&repos_url, "foreign-copy1", opts, pool)); SVN_ERR(create_greek_repos(&repos2_url, "foreign-copy2", opts, pool)); - SVN_ERR(svn_dirent_get_absolute(&wc_path, "test-wc-add", pool)); + wc_path = svn_test_data_path("test-foreign-repos-copy", pool); wc_path = svn_dirent_join(wc_path, "foreign-wc", pool); @@ -769,22 +772,671 @@ test_foreign_repos_copy(const svn_test_opts_t *opts, return SVN_NO_ERROR; } +static svn_error_t * +test_suggest_mergesources(const svn_test_opts_t *opts, + apr_pool_t *pool) +{ + const char *repos_url; + svn_client_ctx_t *ctx; + svn_client__mtcc_t *mtcc; + apr_array_header_t *results; + svn_opt_revision_t peg_rev; + svn_opt_revision_t head_rev; + const char *wc_path; + + peg_rev.kind = svn_opt_revision_unspecified; + + /* Create a filesytem and repository containing the Greek tree. */ + SVN_ERR(create_greek_repos(&repos_url, "mergesources", opts, pool)); + + SVN_ERR(svn_client_create_context(&ctx, pool)); + + SVN_ERR(svn_client__mtcc_create(&mtcc, repos_url, -1, ctx, pool, pool)); + SVN_ERR(svn_client__mtcc_add_copy("A", 1, "AA", mtcc, pool)); + SVN_ERR(svn_client__mtcc_commit(NULL, NULL, NULL, mtcc, pool)); + + SVN_ERR(svn_client_suggest_merge_sources( + &results, + svn_path_url_add_component2(repos_url, "AA", pool), + &peg_rev, ctx, pool)); + SVN_TEST_ASSERT(results != NULL); + SVN_TEST_ASSERT(results->nelts >= 1); + SVN_TEST_STRING_ASSERT(APR_ARRAY_IDX(results, 0, const char *), + svn_path_url_add_component2(repos_url, "A", pool)); + + /* And now test the same thing with a minimal working copy */ + wc_path = svn_test_data_path("mergesources-wc", pool); + svn_test_add_dir_cleanup(wc_path); + SVN_ERR(svn_io_remove_dir2(wc_path, TRUE, NULL, NULL, pool)); + + head_rev.kind = svn_opt_revision_head; + SVN_ERR(svn_client_checkout3(NULL, + svn_path_url_add_component2(repos_url, "AA", pool), + wc_path, + &head_rev, &head_rev, svn_depth_empty, + FALSE, FALSE, ctx, pool)); + + + SVN_ERR(svn_client_suggest_merge_sources(&results, + wc_path, + &peg_rev, ctx, pool)); + SVN_TEST_ASSERT(results != NULL); + SVN_TEST_ASSERT(results->nelts >= 1); + SVN_TEST_STRING_ASSERT(APR_ARRAY_IDX(results, 0, const char *), + svn_path_url_add_component2(repos_url, "A", pool)); + + return SVN_NO_ERROR; +} + + +static char +status_to_char(enum svn_wc_status_kind status) +{ + + switch (status) + { + case svn_wc_status_none: return '.'; + case svn_wc_status_unversioned: return '?'; + case svn_wc_status_normal: return '-'; + case svn_wc_status_added: return 'A'; + case svn_wc_status_missing: return '!'; + case svn_wc_status_incomplete: return ':'; + case svn_wc_status_deleted: return 'D'; + case svn_wc_status_replaced: return 'R'; + case svn_wc_status_modified: return 'M'; + case svn_wc_status_merged: return 'G'; + case svn_wc_status_conflicted: return 'C'; + case svn_wc_status_obstructed: return '~'; + case svn_wc_status_ignored: return 'I'; + case svn_wc_status_external: return 'X'; + default: return '*'; + } +} + +static int +compare_status_paths(const void *a, const void *b) +{ + const svn_client_status_t *const *const sta = a; + const svn_client_status_t *const *const stb = b; + return svn_path_compare_paths((*sta)->local_abspath, (*stb)->local_abspath); +} + +static svn_error_t * +remote_only_status_receiver(void *baton, const char *path, + const svn_client_status_t *status, + apr_pool_t *scratch_pool) +{ + apr_array_header_t *results = baton; + APR_ARRAY_PUSH(results, const svn_client_status_t *) = + svn_client_status_dup(status, results->pool); + return SVN_NO_ERROR; +} + +static svn_error_t * +test_remote_only_status(const svn_test_opts_t *opts, apr_pool_t *pool) +{ + static const struct remote_only_status_result + { + const char *relpath; + svn_revnum_t revision; + enum svn_wc_status_kind node_status; + enum svn_wc_status_kind text_status; + enum svn_wc_status_kind prop_status; + svn_revnum_t ood_changed_rev; + enum svn_wc_status_kind repos_node_status; + enum svn_wc_status_kind repos_text_status; + enum svn_wc_status_kind repos_prop_status; + } expected[] = { + { ".", + +1, svn_wc_status_normal, svn_wc_status_normal, svn_wc_status_none, + +2, svn_wc_status_modified, svn_wc_status_modified, svn_wc_status_none }, + { "B", + +1, svn_wc_status_normal, svn_wc_status_normal, svn_wc_status_none, + +2, svn_wc_status_none, svn_wc_status_none, svn_wc_status_none }, + { "C", + +1, svn_wc_status_normal, svn_wc_status_normal, svn_wc_status_none, + +2, svn_wc_status_deleted, svn_wc_status_none, svn_wc_status_none }, + { "D", + +1, svn_wc_status_normal, svn_wc_status_normal, svn_wc_status_none, + +2, svn_wc_status_none, svn_wc_status_none, svn_wc_status_none }, + { "epsilon", + -1, svn_wc_status_none, svn_wc_status_none, svn_wc_status_none, + +2, svn_wc_status_added, svn_wc_status_modified, svn_wc_status_none }, + { "mu", + +1, svn_wc_status_normal, svn_wc_status_normal, svn_wc_status_none, + +2, svn_wc_status_modified, svn_wc_status_normal, svn_wc_status_none }, + + { NULL } + }; + + const char *repos_url; + const char *wc_path; + const char *local_path; + apr_file_t *local_file; + svn_client_ctx_t *ctx; + svn_client__mtcc_t *mtcc; + svn_opt_revision_t rev; + svn_revnum_t result_rev; + svn_string_t *contents = svn_string_create("modified\n", pool); + svn_stream_t *contentstream = svn_stream_from_string(contents, pool); + const struct remote_only_status_result *ex; + svn_stream_mark_t *start; + apr_array_header_t *targets; + apr_array_header_t *results; + int i; + + SVN_ERR(svn_stream_mark(contentstream, &start, pool)); + + /* Create a filesytem and repository containing the Greek tree. */ + SVN_ERR(create_greek_repos(&repos_url, "test-remote-only-status", opts, pool)); + + SVN_ERR(svn_client_create_context(&ctx, pool)); + + /* Make some modifications in the repository, creating revision 2. */ + SVN_ERR(svn_client__mtcc_create(&mtcc, repos_url, -1, ctx, pool, pool)); + SVN_ERR(svn_stream_seek(contentstream, start)); + SVN_ERR(svn_client__mtcc_add_add_file("A/epsilon", contentstream, NULL, + mtcc, pool)); + SVN_ERR(svn_stream_seek(contentstream, start)); + SVN_ERR(svn_client__mtcc_add_update_file("A/mu", + contentstream, NULL, NULL, NULL, + mtcc, pool)); + SVN_ERR(svn_stream_seek(contentstream, start)); + SVN_ERR(svn_client__mtcc_add_add_file("A/D/epsilon", contentstream, NULL, + mtcc, pool)); + SVN_ERR(svn_stream_seek(contentstream, start)); + SVN_ERR(svn_client__mtcc_add_update_file("A/B/lambda", + contentstream, NULL, NULL, NULL, + mtcc, pool)); + SVN_ERR(svn_client__mtcc_add_delete("A/C", mtcc, pool)); + SVN_ERR(svn_client__mtcc_commit(NULL, NULL, NULL, mtcc, pool)); + + /* Check out a sparse root @r1 of the repository */ + wc_path = svn_test_data_path("test-remote-only-status-wc", pool); + /*svn_test_add_dir_cleanup(wc_path);*/ + SVN_ERR(svn_io_remove_dir2(wc_path, TRUE, NULL, NULL, pool)); + + rev.kind = svn_opt_revision_number; + rev.value.number = 1; + SVN_ERR(svn_client_checkout3(NULL, + apr_pstrcat(pool, repos_url, "/A", SVN_VA_NULL), + wc_path, &rev, &rev, svn_depth_immediates, + FALSE, FALSE, ctx, pool)); + + /* Add a local file; this is a double-check to make sure that + remote-only status ignores local changes. */ + local_path = svn_dirent_join(wc_path, "zeta", pool); + SVN_ERR(svn_io_file_create_empty(local_path, pool)); + SVN_ERR(svn_client_add5(local_path, svn_depth_unknown, + FALSE, FALSE, FALSE, FALSE, + ctx, pool)); + + /* Replace a local dir */ + local_path = svn_dirent_join(wc_path, "B", pool); + targets = apr_array_make(pool, 1, sizeof(const char*)); + APR_ARRAY_PUSH(targets, const char*) = local_path; + SVN_ERR(svn_client_delete4(targets, FALSE, FALSE, NULL, NULL, NULL, + ctx, pool)); + SVN_ERR(svn_client_mkdir4(targets, FALSE, NULL, NULL, NULL, + ctx, pool)); + + /* Modify a local dir's props */ + local_path = svn_dirent_join(wc_path, "D", pool); + targets = apr_array_make(pool, 1, sizeof(const char*)); + APR_ARRAY_PUSH(targets, const char*) = local_path; + SVN_ERR(svn_client_propset_local("prop", contents, targets, + svn_depth_empty, FALSE, NULL, + ctx, pool)); + + /* Modify a local file's contents */ + local_path = svn_dirent_join(wc_path, "mu", pool); + SVN_ERR(svn_io_file_open(&local_file, local_path, + APR_FOPEN_WRITE | APR_FOPEN_TRUNCATE, + 0, pool)); + SVN_ERR(svn_io_file_write_full(local_file, + contents->data, contents->len, + NULL, pool)); + SVN_ERR(svn_io_file_close(local_file, pool)); + + /* Run the remote-only status. */ + results = apr_array_make(pool, 3, sizeof(const svn_client_status_t *)); + rev.kind = svn_opt_revision_head; + SVN_ERR(svn_client_status6( + &result_rev, ctx, wc_path, &rev, svn_depth_unknown, + TRUE, TRUE, FALSE, FALSE, FALSE, FALSE, NULL, + remote_only_status_receiver, results, pool)); + + SVN_TEST_ASSERT(result_rev == 2); + + /* Compare the number of results with the expected results */ + for (i = 0, ex = expected; ex->relpath; ++ex, ++i) + ; + SVN_TEST_ASSERT(results->nelts == i); + + if (opts->verbose) + qsort(results->elts, results->nelts, results->elt_size, + compare_status_paths); + + for (i = 0; i < results->nelts; ++i) + { + const svn_client_status_t *st = + APR_ARRAY_IDX(results, i, const svn_client_status_t *); + + const char *relpath = + svn_dirent_skip_ancestor(wc_path, st->local_abspath); + if (!relpath) + relpath = st->local_abspath; + if (!*relpath) + relpath = "."; + + for (ex = expected; ex->relpath; ++ex) + { + if (0 == strcmp(relpath, ex->relpath)) + break; + } + SVN_TEST_ASSERT(ex->relpath != NULL); + + if (opts->verbose) + printf("%c%c%c %2ld %c%c%c %2ld %s\n", + status_to_char(st->node_status), + status_to_char(st->text_status), + status_to_char(st->prop_status), + (long)st->revision, + status_to_char(st->repos_node_status), + status_to_char(st->repos_text_status), + status_to_char(st->repos_prop_status), + (long)st->ood_changed_rev, + relpath); + + SVN_TEST_ASSERT(st->revision == ex->revision); + SVN_TEST_ASSERT(st->ood_changed_rev == ex->ood_changed_rev); + SVN_TEST_ASSERT(st->node_status == ex->node_status); + SVN_TEST_ASSERT(st->repos_node_status == ex->repos_node_status); + } + + return SVN_NO_ERROR; +} + +static svn_error_t * +test_copy_pin_externals(const svn_test_opts_t *opts, + apr_pool_t *pool) +{ + svn_opt_revision_t rev; + svn_opt_revision_t peg_rev; + const char *repos_url; + const char *A_url; + const char *A_copy_url; + const char *wc_path; + svn_client_ctx_t *ctx; + const svn_string_t *propval; + apr_hash_t *externals_to_pin; + apr_array_header_t *external_items; + apr_array_header_t *copy_sources; + svn_wc_external_item2_t items[6]; + svn_client_copy_source_t copy_source; + apr_hash_t *props; + apr_array_header_t *pinned_externals_descs; + apr_array_header_t *pinned_externals; + int i; + int num_tested_externals; + svn_stringbuf_t *externals_test_prop; + struct pin_externals_test_data { + const char *src_external_desc; + const char *expected_dst_external_desc; + } pin_externals_test_data[] = { + { "^/A/D/gamma B/gamma", "^/A/D/gamma@2 B/gamma" }, + { "-r1 ^/A/D/G C/exdir_G", "-r1 ^/A/D/G C/exdir_G" }, + { "^/A/D/H@1 C/exdir_H", "^/A/D/H@1 C/exdir_H" }, + { "^/A/D/H C/exdir_H2", "^/A/D/H@2 C/exdir_H2" }, + { "-r1 ^/A/B D/z/y/z/blah", "-r1 ^/A/B@2 D/z/y/z/blah" } , + { "-r1 ^/A/D@2 exdir_D", "-r1 ^/A/D@2 exdir_D" }, + /* Dated revision should retain their date string exactly. */ + { "-r{1970-01-01T00:00} ^/A/C 70s", "-r{1970-01-01T00:00} ^/A/C@2 70s"}, + { "-r{2004-02-23} ^/svn 1.0", "-r{2004-02-23} ^/svn 1.0"}, + { NULL }, + }; + + /* Create a filesytem and repository containing the Greek tree. */ + SVN_ERR(create_greek_repos(&repos_url, "pin-externals", opts, pool)); + + wc_path = svn_test_data_path("pin-externals-working-copy", pool); + + /* Remove old test data from the previous run */ + SVN_ERR(svn_io_remove_dir2(wc_path, TRUE, NULL, NULL, pool)); + + SVN_ERR(svn_io_make_dir_recursively(wc_path, pool)); + svn_test_add_dir_cleanup(wc_path); + + rev.kind = svn_opt_revision_head; + peg_rev.kind = svn_opt_revision_unspecified; + SVN_ERR(svn_client_create_context(&ctx, pool)); + + /* Configure some externals on ^/A */ + i = 0; + externals_test_prop = svn_stringbuf_create_empty(pool); + while (pin_externals_test_data[i].src_external_desc) + { + svn_stringbuf_appendcstr(externals_test_prop, + pin_externals_test_data[i].src_external_desc); + svn_stringbuf_appendbyte(externals_test_prop, '\n'); + i++; + } + propval = svn_string_create_from_buf(externals_test_prop, pool); + A_url = apr_pstrcat(pool, repos_url, "/A", SVN_VA_NULL); + SVN_ERR(svn_client_propset_remote(SVN_PROP_EXTERNALS, propval, + A_url, TRUE, 1, NULL, + NULL, NULL, ctx, pool)); + + /* Set up parameters for pinning some externals. */ + externals_to_pin = apr_hash_make(pool); + + items[0].url = "^/A/D/gamma"; + items[0].target_dir = "B/gamma"; + items[1].url = "^/A/B"; + items[1].target_dir = "D/z/y/z/blah"; + items[2].url = "^/A/D/H"; + items[2].target_dir = "C/exdir_H2"; + items[3].url= "^/A/D"; + items[3].target_dir= "exdir_D"; + items[4].url = "^/A/C"; + items[4].target_dir = "70s"; + /* Also add an entry which doesn't match any actual definition. */ + items[5].url = "^/this/does/not/exist"; + items[5].target_dir = "in/test/data"; + + external_items = apr_array_make(pool, 2, sizeof(svn_wc_external_item2_t *)); + for (i = 0; i < sizeof(items) / sizeof(items[0]); i++) + APR_ARRAY_PUSH(external_items, svn_wc_external_item2_t *) = &items[i]; + svn_hash_sets(externals_to_pin, A_url, external_items); + + /* Copy ^/A to ^/A_copy, pinning two non-pinned externals. */ + copy_source.path = A_url; + copy_source.revision = &rev; + copy_source.peg_revision = &peg_rev; + copy_sources = apr_array_make(pool, 1, sizeof(svn_client_copy_source_t *)); + APR_ARRAY_PUSH(copy_sources, svn_client_copy_source_t *) = ©_source; + A_copy_url = apr_pstrcat(pool, repos_url, "/A_copy", SVN_VA_NULL); + SVN_ERR(svn_client_copy7(copy_sources, A_copy_url, FALSE, FALSE, + FALSE, FALSE, TRUE, externals_to_pin, + NULL, NULL, NULL, ctx, pool)); + + /* Verify that externals were pinned as expected. */ + SVN_ERR(svn_client_propget5(&props, NULL, SVN_PROP_EXTERNALS, + A_copy_url, &peg_rev, &rev, NULL, + svn_depth_empty, NULL, ctx, pool, pool)); + propval = svn_hash_gets(props, A_copy_url); + SVN_TEST_ASSERT(propval); + + /* Test the unparsed representation of copied externals descriptions. */ + pinned_externals_descs = svn_cstring_split(propval->data, "\n", FALSE, pool); + for (i = 0; i < pinned_externals_descs->nelts; i++) + { + const char *externals_desc; + const char *expected_desc; + + externals_desc = APR_ARRAY_IDX(pinned_externals_descs, i, const char *); + expected_desc = pin_externals_test_data[i].expected_dst_external_desc; + SVN_TEST_STRING_ASSERT(externals_desc, expected_desc); + } + /* Ensure all test cases were tested. */ + SVN_TEST_ASSERT(i == (sizeof(pin_externals_test_data) / + sizeof(pin_externals_test_data[0]) - 1)); + + SVN_ERR(svn_wc_parse_externals_description3(&pinned_externals, A_copy_url, + propval->data, TRUE, pool)); + + /* For completeness, test the parsed representation, too */ + num_tested_externals = 0; + for (i = 0; i < pinned_externals->nelts; i++) + { + svn_wc_external_item2_t *item; + + item = APR_ARRAY_IDX(pinned_externals, i, svn_wc_external_item2_t *); + if (strcmp(item->url, "^/A/D/gamma") == 0) + { + SVN_TEST_STRING_ASSERT(item->target_dir, "B/gamma"); + /* Pinned to r2. */ + SVN_TEST_ASSERT(item->revision.kind == svn_opt_revision_number); + SVN_TEST_ASSERT(item->revision.value.number == 2); + SVN_TEST_ASSERT(item->peg_revision.kind == svn_opt_revision_number); + SVN_TEST_ASSERT(item->peg_revision.value.number == 2); + num_tested_externals++; + } + else if (strcmp(item->url, "^/A/D/G") == 0) + { + SVN_TEST_STRING_ASSERT(item->target_dir, "C/exdir_G"); + /* Not pinned. */ + SVN_TEST_ASSERT(item->revision.kind == svn_opt_revision_number); + SVN_TEST_ASSERT(item->revision.value.number == 1); + SVN_TEST_ASSERT(item->peg_revision.kind == svn_opt_revision_head); + num_tested_externals++; + } + else if (strcmp(item->url, "^/A/D/H") == 0) + { + if (strcmp(item->target_dir, "C/exdir_H") == 0) + { + /* Was already pinned to r1. */ + SVN_TEST_ASSERT(item->revision.kind == svn_opt_revision_number); + SVN_TEST_ASSERT(item->revision.value.number == 1); + SVN_TEST_ASSERT(item->peg_revision.kind == + svn_opt_revision_number); + SVN_TEST_ASSERT(item->peg_revision.value.number == 1); + num_tested_externals++; + } + else if (strcmp(item->target_dir, "C/exdir_H2") == 0) + { + /* Pinned to r2. */ + SVN_TEST_ASSERT(item->revision.kind == svn_opt_revision_number); + SVN_TEST_ASSERT(item->revision.value.number == 2); + SVN_TEST_ASSERT(item->peg_revision.kind == + svn_opt_revision_number); + SVN_TEST_ASSERT(item->peg_revision.value.number == 2); + num_tested_externals++; + } + else + SVN_TEST_ASSERT(FALSE); /* unknown external */ + } + else if (strcmp(item->url, "^/A/B") == 0) + { + SVN_TEST_STRING_ASSERT(item->target_dir, "D/z/y/z/blah"); + /* Pinned to r2. */ + SVN_TEST_ASSERT(item->revision.kind == svn_opt_revision_number); + SVN_TEST_ASSERT(item->revision.value.number == 1); + SVN_TEST_ASSERT(item->peg_revision.kind == svn_opt_revision_number); + SVN_TEST_ASSERT(item->peg_revision.value.number == 2); + num_tested_externals++; + } + else if (strcmp(item->url, "^/A/D") == 0) + { + SVN_TEST_STRING_ASSERT(item->target_dir, "exdir_D"); + /* Pinned to r2. */ + SVN_TEST_ASSERT(item->revision.kind == svn_opt_revision_number); + SVN_TEST_ASSERT(item->revision.value.number == 1); + SVN_TEST_ASSERT(item->peg_revision.kind == svn_opt_revision_number); + SVN_TEST_ASSERT(item->peg_revision.value.number == 2); + num_tested_externals++; + } + else if (strcmp(item->url, "^/A/C") == 0) + { + SVN_TEST_STRING_ASSERT(item->target_dir, "70s"); + /* Pinned to r2. */ + SVN_TEST_ASSERT(item->revision.kind == svn_opt_revision_date); + /* Don't bother testing the exact date value here. */ + SVN_TEST_ASSERT(item->peg_revision.kind == svn_opt_revision_number); + SVN_TEST_ASSERT(item->peg_revision.value.number == 2); + num_tested_externals++; + } + else if (strcmp(item->url, "^/svn") == 0) + { + SVN_TEST_STRING_ASSERT(item->target_dir, "1.0"); + /* Was and not in externals_to_pin, operative revision was a date. */ + SVN_TEST_ASSERT(item->revision.kind == svn_opt_revision_date); + /* Don't bother testing the exact date value here. */ + SVN_TEST_ASSERT(item->peg_revision.kind == svn_opt_revision_head); + num_tested_externals++; + } + else + SVN_TEST_ASSERT(FALSE); /* unknown URL */ + } + + /* Ensure all test cases were tested. */ + SVN_TEST_ASSERT(num_tested_externals == (sizeof(pin_externals_test_data) / + sizeof(pin_externals_test_data[0]) + - 1)); + + return SVN_NO_ERROR; +} + +/* issue #4560 */ +static svn_error_t * +test_copy_pin_externals_select_subtree(const svn_test_opts_t *opts, apr_pool_t *pool) +{ + svn_opt_revision_t rev; + svn_opt_revision_t peg_rev; + const char *repos_url; + const char *A_copy_url; + const char *B_url; + const char *wc_path; + svn_client_ctx_t *ctx; + apr_hash_t *externals_to_pin; + apr_array_header_t *external_items; + apr_array_header_t *copy_sources; + svn_wc_external_item2_t item; + svn_client_copy_source_t copy_source; + apr_hash_t *props; + int i; + struct test_data { + const char *subtree_relpath; + const char *src_external_desc; + const char *expected_dst_external_desc; + } test_data[] = { + /* Note: these externals definitions contain extra whitespace on + purpose, to test that the pinning logic doesn't make + whitespace-only changes to values that aren't pinned. */ + + /* External on A/B will be pinned. */ + { "B", "^/A/D/gamma gamma-ext", "^/A/D/gamma@3 gamma-ext" }, + + /* External on A/D won't be pinned. */ + { "D", "^/A/B/F F-ext", "^/A/B/F F-ext" } , + + { NULL }, + }; + + /* Create a filesytem and repository containing the Greek tree. */ + SVN_ERR(create_greek_repos(&repos_url, "pin-externals-select-subtree", + opts, pool)); + + wc_path = svn_test_data_path("pin-externals-select-subtree-wc", pool); + + /* Remove old test data from the previous run */ + SVN_ERR(svn_io_remove_dir2(wc_path, TRUE, NULL, NULL, pool)); + + SVN_ERR(svn_io_make_dir_recursively(wc_path, pool)); + svn_test_add_dir_cleanup(wc_path); + + rev.kind = svn_opt_revision_head; + peg_rev.kind = svn_opt_revision_unspecified; + SVN_ERR(svn_client_create_context(&ctx, pool)); + + /* Configure externals. */ + i = 0; + while (test_data[i].subtree_relpath) + { + const char *subtree_relpath; + const char *url; + const svn_string_t *propval; + + subtree_relpath = test_data[i].subtree_relpath; + propval = svn_string_create(test_data[i].src_external_desc, pool); + + url = apr_pstrcat(pool, repos_url, "/A/", subtree_relpath, SVN_VA_NULL); + SVN_ERR(svn_client_propset_remote(SVN_PROP_EXTERNALS, propval, + url, TRUE, 1, NULL, + NULL, NULL, ctx, pool)); + i++; + } + + /* Set up parameters for pinning externals on A/B. */ + externals_to_pin = apr_hash_make(pool); + + item.url = "^/A/D/gamma"; + item.target_dir = "gamma-ext"; + + external_items = apr_array_make(pool, 2, sizeof(svn_wc_external_item2_t *)); + APR_ARRAY_PUSH(external_items, svn_wc_external_item2_t *) = &item; + B_url = apr_pstrcat(pool, repos_url, "/A/B", SVN_VA_NULL); + svn_hash_sets(externals_to_pin, B_url, external_items); + + /* Copy ^/A to ^/A_copy, pinning externals on ^/A/B. */ + copy_source.path = apr_pstrcat(pool, repos_url, "/A", SVN_VA_NULL); + copy_source.revision = &rev; + copy_source.peg_revision = &peg_rev; + copy_sources = apr_array_make(pool, 1, sizeof(svn_client_copy_source_t *)); + APR_ARRAY_PUSH(copy_sources, svn_client_copy_source_t *) = ©_source; + A_copy_url = apr_pstrcat(pool, repos_url, "/A_copy", SVN_VA_NULL); + SVN_ERR(svn_client_copy7(copy_sources, A_copy_url, FALSE, FALSE, + FALSE, FALSE, TRUE, externals_to_pin, + NULL, NULL, NULL, ctx, pool)); + + /* Verify that externals were pinned as expected. */ + i = 0; + while (test_data[i].subtree_relpath) + { + const char *subtree_relpath; + const char *url; + const svn_string_t *propval; + svn_stringbuf_t *externals_desc; + const char *expected_desc; + + subtree_relpath = test_data[i].subtree_relpath; + url = apr_pstrcat(pool, A_copy_url, "/", subtree_relpath, SVN_VA_NULL); + + SVN_ERR(svn_client_propget5(&props, NULL, SVN_PROP_EXTERNALS, + url, &peg_rev, &rev, NULL, + svn_depth_empty, NULL, ctx, pool, pool)); + propval = svn_hash_gets(props, url); + SVN_TEST_ASSERT(propval); + externals_desc = svn_stringbuf_create(propval->data, pool); + svn_stringbuf_strip_whitespace(externals_desc); + expected_desc = test_data[i].expected_dst_external_desc; + SVN_TEST_STRING_ASSERT(externals_desc->data, expected_desc); + + i++; + } + + return SVN_NO_ERROR; +} + /* ========================================================================== */ -struct svn_test_descriptor_t test_funcs[] = + +static int max_threads = 3; + +static struct svn_test_descriptor_t test_funcs[] = { SVN_TEST_NULL, SVN_TEST_PASS2(test_elide_mergeinfo_catalog, "test svn_client__elide_mergeinfo_catalog"), SVN_TEST_PASS2(test_args_to_target_array, "test svn_client_args_to_target_array"), - SVN_TEST_OPTS_PASS(test_patch, "test svn_client_patch"), SVN_TEST_OPTS_PASS(test_wc_add_scenarios, "test svn_wc_add3 scenarios"), + SVN_TEST_OPTS_PASS(test_foreign_repos_copy, "test foreign repository copy"), + SVN_TEST_OPTS_PASS(test_patch, "test svn_client_patch"), SVN_TEST_OPTS_PASS(test_copy_crash, "test a crash in svn_client_copy5"), #ifdef TEST16K_ADD SVN_TEST_OPTS_PASS(test_16k_add, "test adding 16k files"), #endif SVN_TEST_OPTS_PASS(test_youngest_common_ancestor, "test youngest_common_ancestor"), - SVN_TEST_OPTS_PASS(test_foreign_repos_copy, "test foreign repository copy"), + SVN_TEST_OPTS_PASS(test_suggest_mergesources, + "test svn_client_suggest_merge_sources"), + SVN_TEST_OPTS_PASS(test_remote_only_status, + "test svn_client_status6 with ignore_local_mods"), + SVN_TEST_OPTS_PASS(test_copy_pin_externals, + "test svn_client_copy7 with externals_to_pin"), + SVN_TEST_OPTS_PASS(test_copy_pin_externals_select_subtree, + "pin externals on selected subtrees only"), SVN_TEST_NULL }; + +SVN_TEST_MAIN diff --git a/subversion/tests/libsvn_client/mtcc-test.c b/subversion/tests/libsvn_client/mtcc-test.c new file mode 100644 index 0000000..e11738e --- /dev/null +++ b/subversion/tests/libsvn_client/mtcc-test.c @@ -0,0 +1,817 @@ +/* + * Regression tests for mtcc code in the libsvn_client library. + * + * ==================================================================== + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + * ==================================================================== + */ + +#include "svn_pools.h" +#include "svn_props.h" +#include "svn_client.h" +#include "private/svn_client_mtcc.h" + +#include "../svn_test.h" +#include "../svn_test_fs.h" + +/* Baton for verify_commit_callback*/ +struct verify_commit_baton +{ + const svn_commit_info_t *commit_info; + apr_pool_t *result_pool; +}; + +/* Commit result collector for verify_mtcc_commit */ +static svn_error_t * +verify_commit_callback(const svn_commit_info_t *commit_info, + void *baton, + apr_pool_t *pool) +{ + struct verify_commit_baton *vcb = baton; + + vcb->commit_info = svn_commit_info_dup(commit_info, vcb->result_pool); + return SVN_NO_ERROR; +} + +/* Create a stream from a c string */ +static svn_stream_t * +cstr_stream(const char *data, apr_pool_t *result_pool) +{ + return svn_stream_from_string(svn_string_create(data, result_pool), + result_pool); +} + +static svn_error_t * +verify_mtcc_commit(svn_client__mtcc_t *mtcc, + svn_revnum_t expected_rev, + apr_pool_t *pool) +{ + struct verify_commit_baton vcb; + vcb.commit_info = NULL; + vcb.result_pool = pool; + + SVN_ERR(svn_client__mtcc_commit(NULL, verify_commit_callback, &vcb, mtcc, pool)); + + SVN_TEST_ASSERT(vcb.commit_info != NULL); + SVN_TEST_ASSERT(vcb.commit_info->revision == expected_rev); + + return SVN_NO_ERROR; +} + + +/* Constructs a greek tree as revision 1 in the repository at repos_url */ +static svn_error_t * +make_greek_tree(const char *repos_url, + apr_pool_t *scratch_pool) +{ + svn_client__mtcc_t *mtcc; + svn_client_ctx_t *ctx; + apr_pool_t *subpool; + int i; + + subpool = svn_pool_create(scratch_pool); + + SVN_ERR(svn_client_create_context2(&ctx, NULL, subpool)); + SVN_ERR(svn_test__init_auth_baton(&ctx->auth_baton, subpool)); + + SVN_ERR(svn_client__mtcc_create(&mtcc, repos_url, 0, ctx, subpool, subpool)); + + for (i = 0; svn_test__greek_tree_nodes[i].path; i++) + { + if (svn_test__greek_tree_nodes[i].contents) + { + SVN_ERR(svn_client__mtcc_add_add_file( + svn_test__greek_tree_nodes[i].path, + cstr_stream( + svn_test__greek_tree_nodes[i].contents, + subpool), + NULL /* src_checksum */, + mtcc, subpool)); + } + else + { + SVN_ERR(svn_client__mtcc_add_mkdir( + svn_test__greek_tree_nodes[i].path, + mtcc, subpool)); + } + } + + SVN_ERR(verify_mtcc_commit(mtcc, 1, subpool)); + + svn_pool_clear(subpool); + return SVN_NO_ERROR; +} + +static svn_error_t * +test_mkdir(const svn_test_opts_t *opts, + apr_pool_t *pool) +{ + svn_client__mtcc_t *mtcc; + svn_client_ctx_t *ctx; + const char *repos_url; + + SVN_ERR(svn_test__create_repos2(NULL, &repos_url, NULL, "mtcc-mkdir", + opts, pool, pool)); + + SVN_ERR(svn_client_create_context2(&ctx, NULL, pool)); + SVN_ERR(svn_test__init_auth_baton(&ctx->auth_baton, pool)); + + SVN_ERR(svn_client__mtcc_create(&mtcc, repos_url, 0, ctx, pool, pool)); + + SVN_ERR(svn_client__mtcc_add_mkdir("branches", mtcc, pool)); + SVN_ERR(svn_client__mtcc_add_mkdir("trunk", mtcc, pool)); + SVN_ERR(svn_client__mtcc_add_mkdir("branches/1.x", mtcc, pool)); + SVN_ERR(svn_client__mtcc_add_mkdir("tags", mtcc, pool)); + SVN_ERR(svn_client__mtcc_add_mkdir("tags/1.0", mtcc, pool)); + SVN_ERR(svn_client__mtcc_add_mkdir("tags/1.1", mtcc, pool)); + + SVN_ERR(verify_mtcc_commit(mtcc, 1, pool)); + + return SVN_NO_ERROR; +} + +static svn_error_t * +test_mkgreek(const svn_test_opts_t *opts, + apr_pool_t *pool) +{ + svn_client__mtcc_t *mtcc; + svn_client_ctx_t *ctx; + const char *repos_url; + + SVN_ERR(svn_test__create_repos2(NULL, &repos_url, NULL, "mtcc-mkgreek", + opts, pool, pool)); + + SVN_ERR(make_greek_tree(repos_url, pool)); + + SVN_ERR(svn_client_create_context2(&ctx, NULL, pool)); + SVN_ERR(svn_test__init_auth_baton(&ctx->auth_baton, pool)); + + SVN_ERR(svn_client__mtcc_create(&mtcc, repos_url, 1, ctx, pool, pool)); + + SVN_ERR(svn_client__mtcc_add_copy("A", 1, "greek_A", mtcc, pool)); + + SVN_ERR(verify_mtcc_commit(mtcc, 2, pool)); + + return SVN_NO_ERROR; +} + +static svn_error_t * +test_swap(const svn_test_opts_t *opts, + apr_pool_t *pool) +{ + svn_client__mtcc_t *mtcc; + svn_client_ctx_t *ctx; + const char *repos_url; + + SVN_ERR(svn_test__create_repos2(NULL, &repos_url, NULL, "mtcc-swap", + opts, pool, pool)); + + SVN_ERR(make_greek_tree(repos_url, pool)); + + SVN_ERR(svn_client_create_context2(&ctx, NULL, pool)); + SVN_ERR(svn_test__init_auth_baton(&ctx->auth_baton, pool)); + + SVN_ERR(svn_client__mtcc_create(&mtcc, repos_url, 1, ctx, pool, pool)); + + SVN_ERR(svn_client__mtcc_add_move("A/B", "B", mtcc, pool)); + SVN_ERR(svn_client__mtcc_add_move("A/D", "A/B", mtcc, pool)); + SVN_ERR(svn_client__mtcc_add_copy("A/B", 1, "A/D", mtcc, pool)); + + SVN_ERR(verify_mtcc_commit(mtcc, 2, pool)); + + return SVN_NO_ERROR; +} + +static svn_error_t * +test_propset(const svn_test_opts_t *opts, + apr_pool_t *pool) +{ + svn_client__mtcc_t *mtcc; + svn_client_ctx_t *ctx; + const char *repos_url; + + SVN_ERR(svn_test__create_repos2(NULL, &repos_url, NULL, "mtcc-propset", + opts, pool, pool)); + + SVN_ERR(make_greek_tree(repos_url, pool)); + + SVN_ERR(svn_client_create_context2(&ctx, NULL, pool)); + SVN_ERR(svn_test__init_auth_baton(&ctx->auth_baton, pool)); + + SVN_ERR(svn_client__mtcc_create(&mtcc, repos_url, 1, ctx, pool, pool)); + + SVN_ERR(svn_client__mtcc_add_propset("iota", "key", + svn_string_create("val", pool), FALSE, + mtcc, pool)); + SVN_ERR(svn_client__mtcc_add_propset("A", "A-key", + svn_string_create("val-A", pool), FALSE, + mtcc, pool)); + SVN_ERR(svn_client__mtcc_add_propset("A/B", "B-key", + svn_string_create("val-B", pool), FALSE, + mtcc, pool)); + + /* The repository ignores propdeletes of properties that aren't there, + so this just works */ + SVN_ERR(svn_client__mtcc_add_propset("A/D", "D-key", NULL, FALSE, + mtcc, pool)); + + SVN_ERR(verify_mtcc_commit(mtcc, 2, pool)); + + SVN_ERR(svn_client__mtcc_create(&mtcc, repos_url, 2, ctx, pool, pool)); + SVN_TEST_ASSERT_ERROR( + svn_client__mtcc_add_propset("A", SVN_PROP_MIME_TYPE, + svn_string_create("text/plain", pool), + FALSE, mtcc, pool), + SVN_ERR_ILLEGAL_TARGET); + + SVN_TEST_ASSERT_ERROR( + svn_client__mtcc_add_propset("iota", SVN_PROP_IGNORE, + svn_string_create("iota", pool), + FALSE, mtcc, pool), + SVN_ERR_ILLEGAL_TARGET); + + SVN_ERR(svn_client__mtcc_add_propset("iota", SVN_PROP_EOL_STYLE, + svn_string_create("LF", pool), + FALSE, mtcc, pool)); + + SVN_ERR(svn_client__mtcc_add_add_file("ok", cstr_stream("line\nline\n", pool), + NULL, mtcc, pool)); + SVN_ERR(svn_client__mtcc_add_add_file("bad", cstr_stream("line\nno\r\n", pool), + NULL, mtcc, pool)); + + SVN_ERR(svn_client__mtcc_add_propset("ok", SVN_PROP_EOL_STYLE, + svn_string_create("LF", pool), + FALSE, mtcc, pool)); + + SVN_TEST_ASSERT_ERROR( + svn_client__mtcc_add_propset("bad", SVN_PROP_EOL_STYLE, + svn_string_create("LF", pool), + FALSE, mtcc, pool), + SVN_ERR_ILLEGAL_TARGET); + + SVN_ERR(verify_mtcc_commit(mtcc, 3, pool)); + + return SVN_NO_ERROR; +} + +static svn_error_t * +test_update_files(const svn_test_opts_t *opts, + apr_pool_t *pool) +{ + svn_client__mtcc_t *mtcc; + svn_client_ctx_t *ctx; + const char *repos_url; + + SVN_ERR(svn_test__create_repos2(NULL, &repos_url, NULL, "mtcc-update-files", + opts, pool, pool)); + SVN_ERR(make_greek_tree(repos_url, pool)); + + SVN_ERR(svn_client_create_context2(&ctx, NULL, pool)); + SVN_ERR(svn_test__init_auth_baton(&ctx->auth_baton, pool)); + + SVN_ERR(svn_client__mtcc_create(&mtcc, repos_url, 1, ctx, pool, pool)); + + /* Update iota with knowledge of the old data */ + SVN_ERR(svn_client__mtcc_add_update_file(svn_test__greek_tree_nodes[0].path, + cstr_stream("new-iota", pool), + NULL, + cstr_stream( + svn_test__greek_tree_nodes[0] + .contents, + pool), + NULL, + mtcc, pool)); + + SVN_ERR(svn_client__mtcc_add_update_file("A/mu", + cstr_stream("new-MU", pool), + NULL, + NULL, NULL, + mtcc, pool)); + + /* Set a property on the same node */ + SVN_ERR(svn_client__mtcc_add_propset("A/mu", "mu-key", + svn_string_create("mu-A", pool), FALSE, + mtcc, pool)); + /* And some other node */ + SVN_ERR(svn_client__mtcc_add_propset("A/B", "B-key", + svn_string_create("val-B", pool), FALSE, + mtcc, pool)); + + SVN_ERR(verify_mtcc_commit(mtcc, 2, pool)); + return SVN_NO_ERROR; +} + +static svn_error_t * +test_overwrite(const svn_test_opts_t *opts, + apr_pool_t *pool) +{ + svn_client__mtcc_t *mtcc; + svn_client_ctx_t *ctx; + const char *repos_url; + + SVN_ERR(svn_test__create_repos2(NULL, &repos_url, NULL, "mtcc-overwrite", + opts, pool, pool)); + + SVN_ERR(make_greek_tree(repos_url, pool)); + + SVN_ERR(svn_client_create_context2(&ctx, NULL, pool)); + SVN_ERR(svn_test__init_auth_baton(&ctx->auth_baton, pool)); + + SVN_ERR(svn_client__mtcc_create(&mtcc, repos_url, 1, ctx, pool, pool)); + + SVN_ERR(svn_client__mtcc_add_copy("A", 1, "AA", mtcc, pool)); + + SVN_TEST_ASSERT_ERROR(svn_client__mtcc_add_mkdir("AA/B", mtcc, pool), + SVN_ERR_FS_ALREADY_EXISTS); + + SVN_TEST_ASSERT_ERROR(svn_client__mtcc_add_mkdir("AA/D/H/chi", mtcc, pool), + SVN_ERR_FS_ALREADY_EXISTS); + + SVN_ERR(svn_client__mtcc_add_mkdir("AA/BB", mtcc, pool)); + + SVN_ERR(verify_mtcc_commit(mtcc, 2, pool)); + return SVN_NO_ERROR; +} + +static svn_error_t * +test_anchoring(const svn_test_opts_t *opts, + apr_pool_t *pool) +{ + svn_client__mtcc_t *mtcc; + svn_client_ctx_t *ctx; + const char *repos_url; + + SVN_ERR(svn_test__create_repos2(NULL, &repos_url, NULL, "mtcc-anchoring", + opts, pool, pool)); + + SVN_ERR(make_greek_tree(repos_url, pool)); + + SVN_ERR(svn_client_create_context2(&ctx, NULL, pool)); + SVN_ERR(svn_test__init_auth_baton(&ctx->auth_baton, pool)); + + /* Update a file as root operation */ + SVN_ERR(svn_client__mtcc_create(&mtcc, + svn_path_url_add_component2(repos_url, "iota", + pool), + 1, ctx, pool, pool)); + SVN_ERR(svn_client__mtcc_add_update_file("", + cstr_stream("new-iota", pool), + NULL, NULL, NULL, + mtcc, pool)); + SVN_ERR(svn_client__mtcc_add_propset("", "key", + svn_string_create("value", pool), + FALSE, mtcc, pool)); + + SVN_ERR(verify_mtcc_commit(mtcc, 2, pool)); + + /* Add a directory as root operation */ + SVN_ERR(svn_client__mtcc_create(&mtcc, + svn_path_url_add_component2(repos_url, "BB", + pool), + 2, ctx, pool, pool)); + SVN_ERR(svn_client__mtcc_add_mkdir("", mtcc, pool)); + SVN_ERR(verify_mtcc_commit(mtcc, 3, pool)); + + /* Add a file as root operation */ + SVN_ERR(svn_client__mtcc_create(&mtcc, + svn_path_url_add_component2(repos_url, "new", + pool), + 3, ctx, pool, pool)); + SVN_ERR(svn_client__mtcc_add_add_file("", cstr_stream("new", pool), NULL, + mtcc, pool)); + SVN_ERR(verify_mtcc_commit(mtcc, 4, pool)); + + /* Delete as root operation */ + SVN_ERR(svn_client__mtcc_create(&mtcc, + svn_path_url_add_component2(repos_url, "new", + pool), + 4, ctx, pool, pool)); + SVN_ERR(svn_client__mtcc_add_delete("", mtcc, pool)); + SVN_ERR(verify_mtcc_commit(mtcc, 5, pool)); + + /* Propset file as root operation */ + SVN_ERR(svn_client__mtcc_create(&mtcc, + svn_path_url_add_component2(repos_url, "A/mu", + pool), + 5, ctx, pool, pool)); + SVN_ERR(svn_client__mtcc_add_propset("", "key", + svn_string_create("val", pool), + FALSE, mtcc, pool)); + SVN_ERR(verify_mtcc_commit(mtcc, 6, pool)); + + /* Propset dir as root operation */ + SVN_ERR(svn_client__mtcc_create(&mtcc, + svn_path_url_add_component2(repos_url, "A", + pool), + 6, ctx, pool, pool)); + SVN_ERR(svn_client__mtcc_add_propset("", "key", + svn_string_create("val", pool), + FALSE, mtcc, pool)); + SVN_ERR(verify_mtcc_commit(mtcc, 7, pool)); + + /* Propset reposroot as root operation */ + SVN_ERR(svn_client__mtcc_create(&mtcc, repos_url, 7, ctx, pool, pool)); + SVN_ERR(svn_client__mtcc_add_propset("", "key", + svn_string_create("val", pool), + FALSE, mtcc, pool)); + SVN_ERR(verify_mtcc_commit(mtcc, 8, pool)); + + return SVN_NO_ERROR; +} + +static svn_error_t * +test_replace_tree(const svn_test_opts_t *opts, + apr_pool_t *pool) +{ + svn_client__mtcc_t *mtcc; + svn_client_ctx_t *ctx; + const char *repos_url; + + SVN_ERR(svn_test__create_repos2(NULL, &repos_url, NULL, "mtcc-replace_tree", + opts, pool, pool)); + + SVN_ERR(make_greek_tree(repos_url, pool)); + + SVN_ERR(svn_client_create_context2(&ctx, NULL, pool)); + SVN_ERR(svn_test__init_auth_baton(&ctx->auth_baton, pool)); + + SVN_ERR(svn_client__mtcc_create(&mtcc, repos_url, 1, ctx, pool, pool)); + + SVN_ERR(svn_client__mtcc_add_delete("A", mtcc, pool)); + SVN_ERR(svn_client__mtcc_add_delete("iota", mtcc, pool)); + SVN_ERR(svn_client__mtcc_add_mkdir("A", mtcc, pool)); + SVN_ERR(svn_client__mtcc_add_mkdir("A/B", mtcc, pool)); + SVN_ERR(svn_client__mtcc_add_mkdir("A/B/C", mtcc, pool)); + SVN_ERR(svn_client__mtcc_add_mkdir("M", mtcc, pool)); + SVN_ERR(svn_client__mtcc_add_mkdir("M/N", mtcc, pool)); + SVN_ERR(svn_client__mtcc_add_mkdir("M/N/O", mtcc, pool)); + + SVN_ERR(verify_mtcc_commit(mtcc, 2, pool)); + + return SVN_NO_ERROR; +} + +/* Baton for handle_rev */ +struct handle_rev_baton +{ + svn_revnum_t last; + svn_boolean_t up; + svn_boolean_t first; + + /* Per revision handler */ + svn_txdelta_window_handler_t inner_handler; + void *inner_baton; + + /* Swapped between revisions to reconstruct data */ + svn_stringbuf_t *cur; + svn_stringbuf_t *prev; + + /* Pool for some test stuff */ + apr_pool_t *pool; +}; + +/* Implement svn_txdelta_window_handler_t */ +static svn_error_t * +handle_rev_delta(svn_txdelta_window_t *window, + void * baton) +{ + struct handle_rev_baton *hrb = baton; + + SVN_ERR(hrb->inner_handler(window, hrb->inner_baton)); + + if (!window) + { + int expected_rev; + const char *expected; + + /* Some revisions don't update the revision body */ + switch (hrb->last) + { + case 5: + expected_rev = 4; + break; + case 7: /* Not reported */ + case 8: + expected_rev = 6; + break; + default: + expected_rev = (int)hrb->last; + } + + expected = apr_psprintf(hrb->pool, "revision-%d", expected_rev); + + SVN_TEST_STRING_ASSERT(hrb->cur->data, expected); + } + + return SVN_NO_ERROR; +} + +/* Helper for test_file_revs_both_ways */ +static svn_error_t * +handle_rev(void *baton, + const char *path, + svn_revnum_t rev, + apr_hash_t *rev_props, + svn_boolean_t result_of_merge, + svn_txdelta_window_handler_t *delta_handler, + void **delta_baton, + apr_array_header_t *prop_diffs, + apr_pool_t *pool) +{ + struct handle_rev_baton *hrb = baton; + svn_revnum_t expected_rev = hrb->up ? (hrb->last + 1) : (hrb->last - 1); + + if (expected_rev == 7) + expected_rev = hrb->up ? 8 : 6; + + SVN_TEST_ASSERT(rev == expected_rev); + SVN_TEST_ASSERT(apr_hash_count(rev_props) >= 3); + SVN_TEST_STRING_ASSERT(path, (rev < 5) ? "/iota" : "/mu"); + + if (!hrb->first + && (rev == (hrb->up ? 5 : 4) || rev == (hrb->up ? 8 : 6))) + SVN_TEST_ASSERT(delta_handler == NULL); + else + SVN_TEST_ASSERT(delta_handler != NULL); + + if (delta_handler) + { + svn_stringbuf_t *tmp; + + *delta_handler = handle_rev_delta; + *delta_baton = hrb; + + /* Swap string buffers, to use previous as original */ + tmp = hrb->prev; + hrb->prev = hrb->cur; + hrb->cur = tmp; + + svn_stringbuf_setempty(hrb->cur); + + svn_txdelta_apply(svn_stream_from_stringbuf(hrb->prev, pool), + svn_stream_from_stringbuf(hrb->cur, pool), + NULL, NULL, pool, + &hrb->inner_handler, + &hrb->inner_baton); + } + + hrb->last = rev; + hrb->first = FALSE; + + return SVN_NO_ERROR; +} + +static svn_error_t * +test_file_revs_both_ways(const svn_test_opts_t *opts, + apr_pool_t *pool) +{ + svn_client__mtcc_t *mtcc; + svn_client_ctx_t *ctx; + apr_pool_t *subpool = svn_pool_create(pool); + const char *repos_url; + svn_ra_session_t *ra; + struct handle_rev_baton hrb; + + SVN_ERR(svn_test__create_repos2(NULL, &repos_url, NULL, "mtcc-file-revs", + opts, pool, subpool)); + + SVN_ERR(svn_client_create_context2(&ctx, NULL, pool)); + SVN_ERR(svn_test__init_auth_baton(&ctx->auth_baton, pool)); + + SVN_ERR(svn_client__mtcc_create(&mtcc, repos_url, 0, ctx, subpool, subpool)); + SVN_ERR(svn_client__mtcc_add_add_file("iota", + cstr_stream("revision-1", subpool), + NULL /* src_checksum */, + mtcc, subpool)); + SVN_ERR(verify_mtcc_commit(mtcc, 1, subpool)); + svn_pool_clear(subpool); + + SVN_ERR(svn_client__mtcc_create(&mtcc, repos_url, 1, ctx, subpool, subpool)); + SVN_ERR(svn_client__mtcc_add_update_file("iota", + cstr_stream("revision-2", subpool), + NULL /* src_checksum */, NULL, NULL, + mtcc, subpool)); + SVN_ERR(verify_mtcc_commit(mtcc, 2, subpool)); + svn_pool_clear(subpool); + + SVN_ERR(svn_client__mtcc_create(&mtcc, repos_url, 2, ctx, subpool, subpool)); + SVN_ERR(svn_client__mtcc_add_update_file("iota", + cstr_stream("revision-3", subpool), + NULL /* src_checksum */, NULL, NULL, + mtcc, subpool)); + SVN_ERR(verify_mtcc_commit(mtcc, 3, subpool)); + svn_pool_clear(subpool); + + SVN_ERR(svn_client__mtcc_create(&mtcc, repos_url, 3, ctx, subpool, subpool)); + SVN_ERR(svn_client__mtcc_add_update_file("iota", + cstr_stream("revision-4", subpool), + NULL /* src_checksum */, NULL, NULL, + mtcc, subpool)); + SVN_ERR(verify_mtcc_commit(mtcc, 4, subpool)); + svn_pool_clear(subpool); + + SVN_ERR(svn_client__mtcc_create(&mtcc, repos_url, 4, ctx, subpool, subpool)); + SVN_ERR(svn_client__mtcc_add_move("iota", "mu", mtcc, subpool)); + SVN_ERR(verify_mtcc_commit(mtcc, 5, subpool)); + svn_pool_clear(subpool); + + SVN_ERR(svn_client__mtcc_create(&mtcc, repos_url, 5, ctx, subpool, subpool)); + SVN_ERR(svn_client__mtcc_add_update_file("mu", + cstr_stream("revision-6", subpool), + NULL /* src_checksum */, NULL, NULL, + mtcc, subpool)); + SVN_ERR(verify_mtcc_commit(mtcc, 6, subpool)); + svn_pool_clear(subpool); + + SVN_ERR(svn_client__mtcc_create(&mtcc, repos_url, 6, ctx, subpool, subpool)); + SVN_ERR(svn_client__mtcc_add_delete("mu", mtcc, subpool)); + SVN_ERR(verify_mtcc_commit(mtcc, 7, subpool)); + svn_pool_clear(subpool); + + SVN_ERR(svn_client_open_ra_session2(&ra, repos_url, NULL, ctx, pool, subpool)); + + hrb.prev = svn_stringbuf_create("", pool); + hrb.cur = svn_stringbuf_create("", pool); + hrb.pool = pool; + + svn_pool_clear(subpool); + hrb.up = FALSE; + hrb.last = 5; + hrb.first = TRUE; + svn_stringbuf_setempty(hrb.prev); + svn_stringbuf_setempty(hrb.cur); + SVN_ERR(svn_ra_get_file_revs2(ra, "iota", 4, 1, FALSE, + handle_rev, &hrb, + subpool)); + SVN_TEST_ASSERT(hrb.last == 1); + + svn_pool_clear(subpool); + hrb.up = TRUE; + hrb.last = 0; + hrb.first = TRUE; + svn_stringbuf_setempty(hrb.prev); + svn_stringbuf_setempty(hrb.cur); + SVN_ERR(svn_ra_get_file_revs2(ra, "iota", 1, 4, FALSE, + handle_rev, &hrb, + subpool)); + SVN_TEST_ASSERT(hrb.last == 4); + + svn_pool_clear(subpool); + hrb.up = FALSE; + hrb.last = 7; + hrb.first = TRUE; + svn_stringbuf_setempty(hrb.prev); + svn_stringbuf_setempty(hrb.cur); + SVN_ERR(svn_ra_get_file_revs2(ra, "mu", 6, 1, FALSE, + handle_rev, &hrb, + subpool)); + SVN_TEST_ASSERT(hrb.last == 1); + + svn_pool_clear(subpool); + hrb.up = TRUE; + hrb.last = 0; + hrb.first = TRUE; + svn_stringbuf_setempty(hrb.prev); + svn_stringbuf_setempty(hrb.cur); + SVN_ERR(svn_ra_get_file_revs2(ra, "mu", 1, 6, FALSE, + handle_rev, &hrb, + subpool)); + SVN_TEST_ASSERT(hrb.last == 6); + + /* Ressurect mu */ + svn_pool_clear(subpool); + SVN_ERR(svn_client__mtcc_create(&mtcc, repos_url, 7, ctx, subpool, subpool)); + SVN_ERR(svn_client__mtcc_add_copy("mu", 6, "mu", mtcc, subpool)); + SVN_ERR(verify_mtcc_commit(mtcc, 8, subpool)); + + svn_pool_clear(subpool); + hrb.up = TRUE; + hrb.last = 0; + hrb.first = TRUE; + svn_stringbuf_setempty(hrb.prev); + svn_stringbuf_setempty(hrb.cur); + SVN_ERR(svn_ra_get_file_revs2(ra, "mu", 1, SVN_INVALID_REVNUM, FALSE, + handle_rev, &hrb, + subpool)); + SVN_TEST_ASSERT(hrb.last == 8); + + svn_pool_clear(subpool); + hrb.up = FALSE; + hrb.last = 9; + hrb.first = TRUE; + svn_stringbuf_setempty(hrb.prev); + svn_stringbuf_setempty(hrb.cur); + SVN_ERR(svn_ra_get_file_revs2(ra, "mu", SVN_INVALID_REVNUM, 1, FALSE, + handle_rev, &hrb, + subpool)); + SVN_TEST_ASSERT(hrb.last == 1); + + return SVN_NO_ERROR; +} + +static svn_error_t * +test_iprops_path_format(const svn_test_opts_t *opts, + apr_pool_t *pool) +{ + svn_client__mtcc_t *mtcc; + svn_client_ctx_t *ctx; + apr_pool_t *subpool = svn_pool_create(pool); + const char *repos_url; + svn_ra_session_t *ra; + + SVN_ERR(svn_test__create_repos2(NULL, &repos_url, NULL, "mtcc-iprops-paths", + opts, pool, subpool)); + + SVN_ERR(svn_client_create_context2(&ctx, NULL, pool)); + SVN_ERR(svn_test__init_auth_baton(&ctx->auth_baton, pool)); + + SVN_ERR(svn_client__mtcc_create(&mtcc, repos_url, 0, ctx, subpool, subpool)); + SVN_ERR(svn_client__mtcc_add_mkdir("A", mtcc, subpool)); + SVN_ERR(svn_client__mtcc_add_mkdir("A/B", mtcc, subpool)); + SVN_ERR(svn_client__mtcc_add_mkdir("A/B/C", mtcc, subpool)); + SVN_ERR(svn_client__mtcc_add_mkdir("A/B/C/D", mtcc, subpool)); + SVN_ERR(svn_client__mtcc_add_propset("", "on-root", + svn_string_create("ROOT", subpool), + FALSE, mtcc, subpool)); + SVN_ERR(svn_client__mtcc_add_propset("A/B", "on-B", + svn_string_create("BBBB", subpool), + FALSE, mtcc, subpool)); + SVN_ERR(svn_client__mtcc_add_propset("A/B/C", "Z", + svn_string_create("Z", subpool), + FALSE, mtcc, subpool)); + SVN_ERR(verify_mtcc_commit(mtcc, 1, subpool)); + svn_pool_clear(subpool); + + { + apr_array_header_t *iprops; + svn_prop_inherited_item_t *ip; + + SVN_ERR(svn_client_open_ra_session2(&ra, repos_url, NULL, ctx, + pool, subpool)); + + SVN_ERR(svn_ra_get_inherited_props(ra, &iprops, "A/B/C/D", 1, + subpool, subpool)); + + SVN_TEST_ASSERT(iprops != NULL); + SVN_TEST_INT_ASSERT(iprops->nelts, 3); + + ip = APR_ARRAY_IDX(iprops, 0, svn_prop_inherited_item_t *); + SVN_TEST_STRING_ASSERT(ip->path_or_url, ""); + + ip = APR_ARRAY_IDX(iprops, 1, svn_prop_inherited_item_t *); + SVN_TEST_STRING_ASSERT(ip->path_or_url, "A/B"); + + ip = APR_ARRAY_IDX(iprops, 2, svn_prop_inherited_item_t *); + SVN_TEST_STRING_ASSERT(ip->path_or_url, "A/B/C"); + } + + return SVN_NO_ERROR; +} + +/* ========================================================================== */ + + +static int max_threads = 3; + +static struct svn_test_descriptor_t test_funcs[] = + { + SVN_TEST_NULL, + SVN_TEST_OPTS_PASS(test_mkdir, + "test mtcc mkdir"), + SVN_TEST_OPTS_PASS(test_mkgreek, + "test making greek tree"), + SVN_TEST_OPTS_PASS(test_swap, + "swapping some trees"), + SVN_TEST_OPTS_PASS(test_propset, + "test propset and propdel"), + SVN_TEST_OPTS_PASS(test_update_files, + "test update files"), + SVN_TEST_OPTS_PASS(test_overwrite, + "test overwrite"), + SVN_TEST_OPTS_PASS(test_anchoring, + "test mtcc anchoring for root operations"), + SVN_TEST_OPTS_PASS(test_replace_tree, + "test mtcc replace tree"), + SVN_TEST_OPTS_PASS(test_file_revs_both_ways, + "test ra_get_file_revs2 both ways"), + SVN_TEST_OPTS_PASS(test_iprops_path_format, + "test iprops url format"), + SVN_TEST_NULL + }; + +SVN_TEST_MAIN diff --git a/subversion/tests/libsvn_delta/random-test.c b/subversion/tests/libsvn_delta/random-test.c index 209ab1d..600989a 100644 --- a/subversion/tests/libsvn_delta/random-test.c +++ b/subversion/tests/libsvn_delta/random-test.c @@ -109,12 +109,9 @@ open_tempfile(const char *name_template, apr_pool_t *pool) { apr_status_t apr_err; apr_file_t *fp = NULL; - char *templ; - - if (!name_template) - templ = apr_pstrdup(pool, "tempfile_XXXXXX"); - else - templ = apr_pstrdup(pool, name_template); + char *templ = (char *)apr_pstrdup( + pool, svn_test_data_path( + name_template ? name_template : "tempfile_XXXXXX", pool)); apr_err = apr_file_mktemp(&fp, templ, 0, pool); assert(apr_err == 0); @@ -283,9 +280,10 @@ copy_tempfile(apr_file_t *fp, apr_pool_t *pool) -/* Implements svn_test_driver_t. */ +/* (Note: *LAST_SEED is an output parameter.) */ static svn_error_t * -random_test(apr_pool_t *pool) +do_random_test(apr_pool_t *pool, + apr_uint32_t *last_seed) { apr_uint32_t seed, maxlen; apr_size_t bytes_range; @@ -300,7 +298,7 @@ random_test(apr_pool_t *pool) for (i = 0; i < iterations; i++) { /* Generate source and target for the delta and its application. */ - apr_uint32_t subseed_base = svn_test_rand(&seed); + apr_uint32_t subseed_base = svn_test_rand((*last_seed = seed, &seed)); apr_file_t *source = generate_random_file(maxlen, subseed_base, &seed, random_bytes, bytes_range, dump_files, pool); @@ -360,6 +358,17 @@ random_test(apr_pool_t *pool) return SVN_NO_ERROR; } +/* Implements svn_test_driver_t. */ +static svn_error_t * +random_test(apr_pool_t *pool) +{ + apr_uint32_t seed; + svn_error_t *err = do_random_test(pool, &seed); + if (err) + fprintf(stderr, "SEED: %lu\n", (unsigned long)seed); + return err; +} + /* (Note: *LAST_SEED is an output parameter.) */ @@ -500,6 +509,8 @@ random_combine_test(apr_pool_t *pool) { apr_uint32_t seed; svn_error_t *err = do_random_combine_test(pool, &seed); + if (err) + fprintf(stderr, "SEED: %lu\n", (unsigned long)seed); return err; } @@ -513,7 +524,9 @@ random_combine_test(apr_pool_t *pool) /* The test table. */ -struct svn_test_descriptor_t test_funcs[] = +static int max_threads = 1; + +static struct svn_test_descriptor_t test_funcs[] = { SVN_TEST_NULL, SVN_TEST_PASS2(random_test, @@ -526,3 +539,5 @@ struct svn_test_descriptor_t test_funcs[] = #endif SVN_TEST_NULL }; + +SVN_TEST_MAIN diff --git a/subversion/tests/libsvn_delta/range-index-test.h b/subversion/tests/libsvn_delta/range-index-test.h index 4301201..f330a6a 100644 --- a/subversion/tests/libsvn_delta/range-index-test.h +++ b/subversion/tests/libsvn_delta/range-index-test.h @@ -27,7 +27,7 @@ #include "../../libsvn_delta/compose_delta.c" static range_index_node_t *prev_node, *prev_prev_node; -static apr_off_t +static apr_size_t walk_range_index(range_index_node_t *node, const char **msg) { apr_off_t ret; @@ -70,19 +70,18 @@ print_node_data(range_index_node_t *node, const char *msg, apr_off_t ndx) { if (-node->target_offset == ndx) { - printf(" * Node: [%3"APR_OFF_T_FMT - ",%3"APR_OFF_T_FMT - ") = %-5"APR_OFF_T_FMT"%s\n", + printf(" * Node: [%3"APR_SIZE_T_FMT + ",%3"APR_SIZE_T_FMT + ") = %-5"APR_SIZE_T_FMT"%s\n", node->offset, node->limit, -node->target_offset, msg); } else { - printf(" Node: [%3"APR_OFF_T_FMT - ",%3"APR_OFF_T_FMT - ") = %"APR_OFF_T_FMT"\n", + printf(" Node: [%3"APR_SIZE_T_FMT + ",%3"APR_SIZE_T_FMT + ") = %"APR_SIZE_T_FMT"\n", node->offset, node->limit, - (node->target_offset < 0 - ? -node->target_offset : node->target_offset)); + node->target_offset); } } @@ -154,13 +153,13 @@ random_range_index_test(apr_pool_t *pool) ndx = create_range_index(pool); for (i = 1; i <= iterations; ++i) { - apr_off_t offset = svn_test_rand(&seed) % 47; - apr_off_t limit = offset + svn_test_rand(&seed) % 16 + 1; + apr_size_t offset = svn_test_rand(&seed) % 47; + apr_size_t limit = offset + svn_test_rand(&seed) % 16 + 1; range_list_node_t *list, *r; - apr_off_t ret; + apr_size_t ret; const char *msg2; - printf("%3d: Inserting [%3"APR_OFF_T_FMT",%3"APR_OFF_T_FMT") ...", + printf("%3d: Inserting [%3"APR_SIZE_T_FMT",%3"APR_SIZE_T_FMT") ...", i, offset, limit); splay_range_index(offset, ndx); list = build_range_list(offset, limit, ndx); @@ -170,7 +169,7 @@ random_range_index_test(apr_pool_t *pool) if (ret == 0) { for (r = list; r; r = r->next) - printf(" %s[%3"APR_OFF_T_FMT",%3"APR_OFF_T_FMT")", + printf(" %s[%3"APR_SIZE_T_FMT",%3"APR_SIZE_T_FMT")", (r->kind == range_from_source ? (++src_cp, "S") : (++tgt_cp, "T")), r->offset, r->limit); diff --git a/subversion/tests/libsvn_delta/window-test.c b/subversion/tests/libsvn_delta/window-test.c index 384f0df..28f4609 100644 --- a/subversion/tests/libsvn_delta/window-test.c +++ b/subversion/tests/libsvn_delta/window-test.c @@ -100,10 +100,14 @@ stream_window_test(apr_pool_t *pool) /* The test table. */ -struct svn_test_descriptor_t test_funcs[] = +static int max_threads = 1; + +static struct svn_test_descriptor_t test_funcs[] = { SVN_TEST_NULL, SVN_TEST_PASS2(stream_window_test, "txdelta stream and windows test"), SVN_TEST_NULL }; + +SVN_TEST_MAIN diff --git a/subversion/tests/libsvn_diff/diff-diff3-test.c b/subversion/tests/libsvn_diff/diff-diff3-test.c index 1839034..96b6da2 100644 --- a/subversion/tests/libsvn_diff/diff-diff3-test.c +++ b/subversion/tests/libsvn_diff/diff-diff3-test.c @@ -137,9 +137,9 @@ make_file(const char *filename, "merge-FILENAME1-FILENAME2-FILENAME3". The conflict style STYLE is used. */ static svn_error_t * -three_way_merge(const char *filename1, - const char *filename2, - const char *filename3, +three_way_merge(const char *base_filename1, + const char *base_filename2, + const char *base_filename3, const char *contents1, const char *contents2, const char *contents3, @@ -152,8 +152,12 @@ three_way_merge(const char *filename1, apr_file_t *output; svn_stream_t *ostream; svn_stringbuf_t *actual; - char *merge_name = apr_psprintf(pool, "merge-%s-%s-%s", - filename1, filename2, filename3); + char *merge_name = apr_psprintf( + pool, "merge-%s-%s-%s", base_filename1, base_filename2, base_filename3); + + const char *filename1 = svn_test_data_path(base_filename1, pool); + const char *filename2 = svn_test_data_path(base_filename2, pool); + const char *filename3 = svn_test_data_path(base_filename3, pool); /* We have an EXPECTED string we can match, because we don't support any other combinations (yet) than the ones above. */ @@ -169,13 +173,15 @@ three_way_merge(const char *filename1, actual = svn_stringbuf_create_empty(pool); ostream = svn_stream_from_stringbuf(actual, pool); - SVN_ERR(svn_diff_mem_string_output_merge2 + SVN_ERR(svn_diff_mem_string_output_merge3 (ostream, diff, original, modified, latest, - apr_psprintf(pool, "||||||| %s", filename1), - apr_psprintf(pool, "<<<<<<< %s", filename2), - apr_psprintf(pool, ">>>>>>> %s", filename3), + apr_psprintf(pool, "||||||| %s", base_filename1), + apr_psprintf(pool, "<<<<<<< %s", base_filename2), + apr_psprintf(pool, ">>>>>>> %s", base_filename3), NULL, /* separator */ - style, pool)); + style, + NULL, NULL, /* cancel */ + pool)); SVN_ERR(svn_stream_close(ostream)); if (strcmp(actual->data, expected) != 0) @@ -195,17 +201,22 @@ three_way_merge(const char *filename1, APR_OS_DEFAULT, pool)); ostream = svn_stream_from_aprfile2(output, FALSE, pool); - SVN_ERR(svn_diff_file_output_merge2(ostream, diff, - filename1, filename2, filename3, - NULL, NULL, NULL, NULL, - style, - pool)); + SVN_ERR(svn_diff_file_output_merge3( + ostream, diff, + filename1, filename2, filename3, + apr_psprintf(pool, "||||||| %s", base_filename1), + apr_psprintf(pool, "<<<<<<< %s", base_filename2), + apr_psprintf(pool, ">>>>>>> %s", base_filename3), + NULL, /* separator */ + style, + NULL, NULL, /* cancel */ + pool)); SVN_ERR(svn_stream_close(ostream)); SVN_ERR(svn_stringbuf_from_file2(&actual, merge_name, pool)); if (strcmp(actual->data, expected)) return svn_error_createf(SVN_ERR_TEST_FAILED, NULL, "failed merging diff '%s' to '%s' into '%s'", - filename1, filename2, filename3); + base_filename1, base_filename2, base_filename3); SVN_ERR(svn_io_remove_file2(filename1, TRUE, pool)); if (strcmp(filename1, filename2)) @@ -227,8 +238,8 @@ three_way_merge(const char *filename1, preserved otherwise. If the diff fails the diff output will be in a file called "diff-FILENAME1-FILENAME2". */ static svn_error_t * -two_way_diff(const char *filename1, - const char *filename2, +two_way_diff(const char *base_filename1, + const char *base_filename2, const char *contents1, const char *contents2, const char *expected, @@ -239,7 +250,13 @@ two_way_diff(const char *filename1, apr_file_t *output; svn_stream_t *ostream; svn_stringbuf_t *actual; - char *diff_name = apr_psprintf(pool, "diff-%s-%s", filename1, filename2); + char *diff_name = (char *)apr_pstrdup( + pool, svn_test_data_path( + apr_psprintf(pool, "diff-%s-%s", base_filename1, base_filename2), + pool)); + + const char *filename1 = svn_test_data_path(base_filename1, pool); + const char *filename2 = svn_test_data_path(base_filename2, pool); /* Some of the tests have lots of lines, although not much data as the lines are short, and the in-memory diffs allocate a lot of @@ -261,7 +278,7 @@ two_way_diff(const char *filename1, ostream = svn_stream_from_stringbuf(actual, pool); SVN_ERR(svn_diff_mem_string_output_unified(ostream, diff, - filename1, filename2, + base_filename1, base_filename2, SVN_APR_LOCALE_CHARSET, original, modified, subpool)); svn_pool_clear(subpool); @@ -286,7 +303,7 @@ two_way_diff(const char *filename1, ostream = svn_stream_from_aprfile2(output, FALSE, pool); SVN_ERR(svn_diff_file_output_unified2(ostream, diff, filename1, filename2, - filename1, filename2, + base_filename1, base_filename2, SVN_APR_LOCALE_CHARSET, pool)); SVN_ERR(svn_stream_close(ostream)); @@ -312,16 +329,16 @@ two_way_diff(const char *filename1, return svn_error_createf(SVN_ERR_TEST_FAILED, NULL, "failed comparing '%s' and '%s'" " (memory and file results are different)", - filename1, filename2); + base_filename1, base_filename2); } /* May as well do the trivial merges while we are here */ - SVN_ERR(three_way_merge(filename1, filename2, filename1, + SVN_ERR(three_way_merge(base_filename1, base_filename2, base_filename1, contents1, contents2, contents1, contents2, NULL, svn_diff_conflict_display_modified_latest, subpool)); svn_pool_clear(subpool); - SVN_ERR(three_way_merge(filename2, filename1, filename2, + SVN_ERR(three_way_merge(base_filename2, base_filename1, base_filename2, contents2, contents1, contents2, contents1, NULL, svn_diff_conflict_display_modified_latest, subpool)); @@ -2141,14 +2158,20 @@ test_diff4(apr_pool_t *pool) " /* line plus-four of context */\n" " /* line plus-five of context */\n" "}\n"); - SVN_ERR(make_file("B2", B2.data, pool)); - SVN_ERR(make_file("T1", T1.data, pool)); - SVN_ERR(make_file("T2", T2.data, pool)); - SVN_ERR(make_file("T3", T3.data, pool)); + + const char *B2_path = svn_test_data_path("B2", pool); + const char *T1_path = svn_test_data_path("T1", pool); + const char *T2_path = svn_test_data_path("T2", pool); + const char *T3_path = svn_test_data_path("T3", pool); + + SVN_ERR(make_file(B2_path, B2.data, pool)); + SVN_ERR(make_file(T1_path, T1.data, pool)); + SVN_ERR(make_file(T2_path, T2.data, pool)); + SVN_ERR(make_file(T3_path, T3.data, pool)); /* Usage: tools/diff/diff4 <mine> <older> <yours> <ancestor> */ /* tools/diff/diff4 B2 T2 T3 T1 > B2new */ - SVN_ERR(svn_diff_file_diff4(&diff, "T2", "B2", "T3", "T1", pool)); + SVN_ERR(svn_diff_file_diff4(&diff, T2_path, B2_path, T3_path, T1_path, pool)); /* Sanity. */ SVN_TEST_ASSERT(! svn_diff_contains_conflicts(diff)); @@ -2161,7 +2184,7 @@ test_diff4(apr_pool_t *pool) svn_stringbuf_create_ensure(417, pool), /* 417 == wc -c < B2new */ pool); SVN_ERR(svn_diff_file_output_merge(actual, diff, - "T2", "B2", "T3", + T2_path, B2_path, T3_path, NULL, NULL, NULL, NULL, FALSE, FALSE, @@ -2179,12 +2202,16 @@ random_trivial_merge(apr_pool_t *pool) int i; apr_pool_t *subpool = svn_pool_create(pool); + const char *base_filename1 = "trivial1"; + const char *base_filename2 = "trivial2"; + + const char *filename1 = svn_test_data_path(base_filename1, pool); + const char *filename2 = svn_test_data_path(base_filename2, pool); + seed_val(); for (i = 0; i < 5; ++i) { - const char *filename1 = "trivial1"; - const char *filename2 = "trivial2"; int min_lines = 1000; int max_lines = 1100; int var_lines = 50; @@ -2201,12 +2228,12 @@ random_trivial_merge(apr_pool_t *pool) SVN_ERR(svn_stringbuf_from_file2(&contents1, filename1, subpool)); SVN_ERR(svn_stringbuf_from_file2(&contents2, filename2, subpool)); - SVN_ERR(three_way_merge(filename1, filename2, filename1, + SVN_ERR(three_way_merge(base_filename1, base_filename2, base_filename1, contents1->data, contents2->data, contents1->data, contents2->data, NULL, svn_diff_conflict_display_modified_latest, subpool)); - SVN_ERR(three_way_merge(filename2, filename1, filename2, + SVN_ERR(three_way_merge(base_filename2, base_filename1, base_filename2, contents2->data, contents1->data, contents2->data, contents1->data, NULL, svn_diff_conflict_display_modified_latest, @@ -2231,14 +2258,20 @@ random_three_way_merge(apr_pool_t *pool) int i; apr_pool_t *subpool = svn_pool_create(pool); + const char *base_filename1 = "original"; + const char *base_filename2 = "modified1"; + const char *base_filename3 = "modified2"; + const char *base_filename4 = "combined"; + + const char *filename1 = svn_test_data_path(base_filename1, pool); + const char *filename2 = svn_test_data_path(base_filename2, pool); + const char *filename3 = svn_test_data_path(base_filename3, pool); + const char *filename4 = svn_test_data_path(base_filename4, pool); + seed_val(); for (i = 0; i < 20; ++i) { - const char *filename1 = "original"; - const char *filename2 = "modified1"; - const char *filename3 = "modified2"; - const char *filename4 = "combined"; svn_stringbuf_t *original, *modified1, *modified2, *combined; /* Pick NUM_LINES large enough so that the 'strip identical suffix' code gets triggered with reasonable probability. (Currently it ignores @@ -2272,12 +2305,12 @@ random_three_way_merge(apr_pool_t *pool) SVN_ERR(svn_stringbuf_from_file2(&modified2, filename3, pool)); SVN_ERR(svn_stringbuf_from_file2(&combined, filename4, pool)); - SVN_ERR(three_way_merge(filename1, filename2, filename3, + SVN_ERR(three_way_merge(base_filename1, base_filename2, base_filename3, original->data, modified1->data, modified2->data, combined->data, NULL, svn_diff_conflict_display_modified_latest, subpool)); - SVN_ERR(three_way_merge(filename1, filename3, filename2, + SVN_ERR(three_way_merge(base_filename1, base_filename3, base_filename2, original->data, modified2->data, modified1->data, combined->data, NULL, svn_diff_conflict_display_modified_latest, @@ -2303,14 +2336,20 @@ merge_with_part_already_present(apr_pool_t *pool) int i; apr_pool_t *subpool = svn_pool_create(pool); + const char *base_filename1 = "pap-original"; + const char *base_filename2 = "pap-modified1"; + const char *base_filename3 = "pap-modified2"; + const char *base_filename4 = "pap-combined"; + + const char *filename1 = svn_test_data_path(base_filename1, pool); + const char *filename2 = svn_test_data_path(base_filename2, pool); + const char *filename3 = svn_test_data_path(base_filename3, pool); + const char *filename4 = svn_test_data_path(base_filename4, pool); + seed_val(); for (i = 0; i < 20; ++i) { - const char *filename1 = "pap-original"; - const char *filename2 = "pap-modified1"; - const char *filename3 = "pap-modified2"; - const char *filename4 = "pap-combined"; svn_stringbuf_t *original, *modified1, *modified2, *combined; int num_lines = 200, num_src = 20, num_dst = 20; svn_boolean_t *lines = apr_pcalloc(subpool, sizeof(*lines) * num_lines); @@ -2345,12 +2384,12 @@ merge_with_part_already_present(apr_pool_t *pool) SVN_ERR(svn_stringbuf_from_file2(&modified2, filename3, pool)); SVN_ERR(svn_stringbuf_from_file2(&combined, filename4, pool)); - SVN_ERR(three_way_merge(filename1, filename2, filename3, + SVN_ERR(three_way_merge(base_filename1, base_filename2, base_filename3, original->data, modified1->data, modified2->data, combined->data, NULL, svn_diff_conflict_display_modified_latest, subpool)); - SVN_ERR(three_way_merge(filename1, filename3, filename2, + SVN_ERR(three_way_merge(base_filename1, base_filename3, base_filename2, original->data, modified2->data, modified1->data, combined->data, NULL, svn_diff_conflict_display_modified_latest, @@ -2915,7 +2954,10 @@ two_way_issue_3362_v2(apr_pool_t *pool) /* ========================================================================== */ -struct svn_test_descriptor_t test_funcs[] = + +static int max_threads = 4; + +static struct svn_test_descriptor_t test_funcs[] = { SVN_TEST_NULL, SVN_TEST_PASS2(dump_core, @@ -2947,10 +2989,12 @@ struct svn_test_descriptor_t test_funcs[] = SVN_TEST_PASS2(test_identical_suffix, "identical suffix starts at the boundary of a chunk"), SVN_TEST_PASS2(test_token_compare, - "compare tokes at the chunk boundary"), + "compare tokens at the chunk boundary"), SVN_TEST_PASS2(two_way_issue_3362_v1, "2-way issue #3362 test v1"), SVN_TEST_PASS2(two_way_issue_3362_v2, "2-way issue #3362 test v2"), SVN_TEST_NULL }; + +SVN_TEST_MAIN diff --git a/subversion/tests/libsvn_diff/parse-diff-test.c b/subversion/tests/libsvn_diff/parse-diff-test.c index 9a44369..d116f04 100644 --- a/subversion/tests/libsvn_diff/parse-diff-test.c +++ b/subversion/tests/libsvn_diff/parse-diff-test.c @@ -25,6 +25,8 @@ #include "../svn_test.h" #include "svn_diff.h" +#include "svn_hash.h" +#include "svn_mergeinfo.h" #include "svn_pools.h" #include "svn_utf.h" @@ -253,6 +255,27 @@ static const char *unidiff_lacking_trailing_eol = " This is the file 'gamma'." NL "+some more bytes to 'gamma'"; /* Don't add NL after this line */ +static const char *unidiff_with_mergeinfo = + "Index: A/C" NL + "===================================================================" NL + "--- A/C\t(revision 2)" NL + "+++ A/C\t(working copy)" NL + "Modified: svn:ignore" NL + "## -7,6 +7,7 ##" NL + " configure" NL + " libtool" NL + " .gdb_history" NL + "+.swig_checked" NL + " *.orig" NL + " *.rej" NL + " TAGS" NL + "Modified: svn:mergeinfo" NL + "## -0,1 +0,3 ##" NL + " Reverse-merged /subversion/branches/1.6.x-r935631:r952683-955333" NL + " /subversion/branches/nfc-nfd-aware-client:r870276,870376 をマージã—ã¾ã—ãŸ"NL + " Fusionné /subversion/branches/1.7.x-r1507044:r1507300-1511568" NL + " Merged /subversion/branches/1.8.x-openssl-dirs:r1535139" NL; +/* The above diff intentionally contains i18n versions of some lines. */ /* Create a PATCH_FILE containing the contents of DIFF. */ static svn_error_t * @@ -644,7 +667,7 @@ test_parse_property_diff(apr_pool_t *pool) prop_patch = apr_hash_get(patch->prop_patches, "prop_add", APR_HASH_KEY_STRING); - SVN_TEST_ASSERT(!strcmp("prop_add", prop_patch->name)); + SVN_TEST_STRING_ASSERT(prop_patch->name, "prop_add"); SVN_TEST_ASSERT(prop_patch->operation == svn_diff_op_added); hunks = prop_patch->hunks; @@ -957,9 +980,92 @@ test_parse_unidiff_lacking_trailing_eol(apr_pool_t *pool) return SVN_NO_ERROR; } +static svn_error_t * +test_parse_unidiff_with_mergeinfo(apr_pool_t *pool) +{ + svn_patch_file_t *patch_file; + svn_boolean_t reverse; + svn_boolean_t ignore_whitespace; + int i; + apr_pool_t *iterpool; + + reverse = FALSE; + ignore_whitespace = FALSE; + iterpool = svn_pool_create(pool); + for (i = 0; i < 2; i++) + { + svn_patch_t *patch; + svn_mergeinfo_t mergeinfo; + svn_mergeinfo_t reverse_mergeinfo; + svn_rangelist_t *rangelist; + svn_merge_range_t *range; + + svn_pool_clear(iterpool); + + SVN_ERR(create_patch_file(&patch_file, unidiff_with_mergeinfo, + pool)); + + SVN_ERR(svn_diff_parse_next_patch(&patch, patch_file, reverse, + ignore_whitespace, iterpool, + iterpool)); + SVN_TEST_ASSERT(patch); + SVN_TEST_STRING_ASSERT(patch->old_filename, "A/C"); + SVN_TEST_STRING_ASSERT(patch->new_filename, "A/C"); + + /* svn:ignore */ + SVN_TEST_ASSERT(apr_hash_count(patch->prop_patches) == 1); + + SVN_TEST_ASSERT(patch->mergeinfo); + SVN_TEST_ASSERT(patch->reverse_mergeinfo); + + if (reverse) + { + mergeinfo = patch->reverse_mergeinfo; + reverse_mergeinfo = patch->mergeinfo; + } + else + { + mergeinfo = patch->mergeinfo; + reverse_mergeinfo = patch->reverse_mergeinfo; + } + + rangelist = svn_hash_gets(reverse_mergeinfo, + "/subversion/branches/1.6.x-r935631"); + SVN_TEST_ASSERT(rangelist); + SVN_TEST_ASSERT(rangelist->nelts == 1); + range = APR_ARRAY_IDX(rangelist, 0, svn_merge_range_t *); + SVN_TEST_ASSERT(range->start == 952682); + SVN_TEST_ASSERT(range->end == 955333); + + rangelist = svn_hash_gets(mergeinfo, + "/subversion/branches/nfc-nfd-aware-client"); + SVN_TEST_ASSERT(rangelist); + SVN_TEST_ASSERT(rangelist->nelts == 2); + range = APR_ARRAY_IDX(rangelist, 0, svn_merge_range_t *); + SVN_TEST_ASSERT(range->end == 870276); + range = APR_ARRAY_IDX(rangelist, 1, svn_merge_range_t *); + SVN_TEST_ASSERT(range->end == 870376); + + rangelist = svn_hash_gets(mergeinfo, + "/subversion/branches/1.8.x-openssl-dirs"); + SVN_TEST_ASSERT(rangelist); + SVN_TEST_ASSERT(rangelist->nelts == 1); + range = APR_ARRAY_IDX(rangelist, 0, svn_merge_range_t *); + SVN_TEST_ASSERT(range->end == 1535139); + + reverse = !reverse; + SVN_ERR(svn_diff_close_patch_file(patch_file, pool)); + } + svn_pool_destroy(iterpool); + return SVN_NO_ERROR; +} + /* ========================================================================== */ -struct svn_test_descriptor_t test_funcs[] = + +static int max_threads = 1; + +static struct svn_test_descriptor_t test_funcs[] = { SVN_TEST_NULL, SVN_TEST_PASS2(test_parse_unidiff, @@ -980,5 +1086,9 @@ struct svn_test_descriptor_t test_funcs[] = "test git diffs with spaces in paths"), SVN_TEST_PASS2(test_parse_unidiff_lacking_trailing_eol, "test parsing unidiffs lacking trailing eol"), + SVN_TEST_PASS2(test_parse_unidiff_with_mergeinfo, + "test parsing unidiffs with mergeinfo"), SVN_TEST_NULL }; + +SVN_TEST_MAIN diff --git a/subversion/tests/libsvn_fs/fs-test.c b/subversion/tests/libsvn_fs/fs-test.c index 104a692..fcab5e4 100644 --- a/subversion/tests/libsvn_fs/fs-test.c +++ b/subversion/tests/libsvn_fs/fs-test.c @@ -23,6 +23,7 @@ #include <stdlib.h> #include <string.h> #include <apr_pools.h> +#include <apr_thread_proc.h> #include <assert.h> #include "../svn_test.h" @@ -37,11 +38,16 @@ #include "svn_props.h" #include "svn_version.h" +#include "svn_private_config.h" +#include "private/svn_fs_util.h" #include "private/svn_fs_private.h" +#include "private/svn_fspath.h" +#include "private/svn_sqlite.h" #include "../svn_test_fs.h" #include "../../libsvn_delta/delta.h" +#include "../../libsvn_fs/fs-loader.h" #define SET_STR(ps, s) ((ps)->data = (s), (ps)->len = strlen(s)) @@ -203,24 +209,35 @@ reopen_trivial_transaction(const svn_test_opts_t *opts, { svn_fs_t *fs; svn_fs_txn_t *txn; + svn_fs_root_t *root; const char *txn_name; apr_pool_t *subpool = svn_pool_create(pool); SVN_ERR(svn_test__create_fs(&fs, "test-repo-reopen-trivial-txn", opts, pool)); - /* Begin a new transaction that is based on revision 0. */ + /* Create a first transaction - we don't want that one to reopen. */ + SVN_ERR(svn_fs_begin_txn(&txn, fs, 0, subpool)); + + /* Begin a second transaction that is based on revision 0. */ SVN_ERR(svn_fs_begin_txn(&txn, fs, 0, subpool)); /* Don't use the subpool, txn_name must persist beyond the current txn */ SVN_ERR(svn_fs_txn_name(&txn_name, txn, pool)); + /* Create a third transaction - we don't want that one to reopen. */ + SVN_ERR(svn_fs_begin_txn(&txn, fs, 0, subpool)); + /* Close the transaction. */ svn_pool_clear(subpool); /* Reopen the transaction by name */ SVN_ERR(svn_fs_open_txn(&txn, fs, txn_name, subpool)); + /* Does it have the same name? */ + SVN_ERR(svn_fs_txn_root(&root, txn, subpool)); + SVN_TEST_STRING_ASSERT(svn_fs_txn_root_name(root, subpool), txn_name); + /* Close the transaction ... again. */ svn_pool_destroy(subpool); @@ -1019,7 +1036,7 @@ static svn_error_t * check_entry_present(svn_fs_root_t *root, const char *path, const char *name, apr_pool_t *pool) { - svn_boolean_t present; + svn_boolean_t present = FALSE; SVN_ERR(check_entry(root, path, name, &present, pool)); if (! present) @@ -1036,7 +1053,7 @@ static svn_error_t * check_entry_absent(svn_fs_root_t *root, const char *path, const char *name, apr_pool_t *pool) { - svn_boolean_t present; + svn_boolean_t present = TRUE; SVN_ERR(check_entry(root, path, name, &present, pool)); if (present) @@ -1122,6 +1139,8 @@ basic_commit(const svn_test_opts_t *opts, /* Create the greek tree. */ SVN_ERR(svn_test__create_greek_tree(txn_root, pool)); + SVN_TEST_ASSERT(svn_fs_is_txn_root(txn_root)); + SVN_TEST_ASSERT(!svn_fs_is_revision_root(txn_root)); /* Commit it. */ SVN_ERR(svn_fs_commit_txn(&conflict, &after_rev, txn, pool)); @@ -1135,6 +1154,8 @@ basic_commit(const svn_test_opts_t *opts, /* Get root of the revision */ SVN_ERR(svn_fs_revision_root(&revision_root, fs, after_rev, pool)); + SVN_TEST_ASSERT(!svn_fs_is_txn_root(revision_root)); + SVN_TEST_ASSERT(svn_fs_is_revision_root(revision_root)); /* Check the tree. */ SVN_ERR(svn_test__check_greek_tree(revision_root, pool)); @@ -1576,6 +1597,10 @@ merging_commit(const svn_test_opts_t *opts, SVN_ERR(svn_fs_make_file(txn_root, "theta", pool)); SVN_ERR(svn_test__set_file_contents (txn_root, "theta", "This is another file 'theta'.\n", pool)); + + /* TXN must actually be based upon revisions[4] (instead of HEAD). */ + SVN_TEST_ASSERT(svn_fs_txn_base_revision(txn) == revisions[4]); + SVN_ERR(test_commit_txn(&failed_rev, txn, "/theta", pool)); SVN_ERR(svn_fs_abort_txn(txn, pool)); @@ -1599,6 +1624,9 @@ merging_commit(const svn_test_opts_t *opts, SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool)); SVN_ERR(svn_fs_delete(txn_root, "A/D/H", pool)); + /* TXN must actually be based upon revisions[1] (instead of HEAD). */ + SVN_TEST_ASSERT(svn_fs_txn_base_revision(txn) == revisions[1]); + /* We used to create the revision like this before fixing issue #2751 -- Directory prop mods reverted in overlapping commits scenario. @@ -2075,7 +2103,7 @@ copy_test(const svn_test_opts_t *opts, svn_revnum_t after_rev; /* Prepare a filesystem. */ - SVN_ERR(svn_test__create_fs(&fs, "test-repo-copy-test", + SVN_ERR(svn_test__create_fs(&fs, "test-repo-copy", opts, pool)); /* In first txn, create and commit the greek tree. */ @@ -3756,6 +3784,17 @@ small_file_integrity(const svn_test_opts_t *opts, static svn_error_t * +almostmedium_file_integrity(const svn_test_opts_t *opts, + apr_pool_t *pool) +{ + apr_uint32_t seed = (apr_uint32_t) apr_time_now(); + + return file_integrity_helper(SVN_DELTA_WINDOW_SIZE - 1, &seed, opts, + "test-repo-almostmedium-file-integrity", pool); +} + + +static svn_error_t * medium_file_integrity(const svn_test_opts_t *opts, apr_pool_t *pool) { @@ -4156,6 +4195,12 @@ check_related(const svn_test_opts_t *opts, { "E", 7 }, { "E", 8 }, { "F", 9 }, { "F", 10 } }; + /* Latest revision that touched the respective path. */ + struct path_rev_t latest_changes[6] = { + { "A", 4 }, { "B", 6 }, { "C", 6 }, + { "D", 7 }, { "E", 8 }, { "F", 10 } + }; + int related_matrix[16][16] = { /* A1 ... F10 across the top here*/ { 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0 }, /* A1 */ @@ -4185,14 +4230,16 @@ check_related(const svn_test_opts_t *opts, struct path_rev_t pr2 = path_revs[j]; const svn_fs_id_t *id1, *id2; int related = 0; + svn_fs_node_relation_t relation; + svn_fs_root_t *rev_root1, *rev_root2; /* Get the ID for the first path/revision combination. */ - SVN_ERR(svn_fs_revision_root(&rev_root, fs, pr1.rev, subpool)); - SVN_ERR(svn_fs_node_id(&id1, rev_root, pr1.path, subpool)); + SVN_ERR(svn_fs_revision_root(&rev_root1, fs, pr1.rev, subpool)); + SVN_ERR(svn_fs_node_id(&id1, rev_root1, pr1.path, subpool)); /* Get the ID for the second path/revision combination. */ - SVN_ERR(svn_fs_revision_root(&rev_root, fs, pr2.rev, subpool)); - SVN_ERR(svn_fs_node_id(&id2, rev_root, pr2.path, subpool)); + SVN_ERR(svn_fs_revision_root(&rev_root2, fs, pr2.rev, subpool)); + SVN_ERR(svn_fs_node_id(&id2, rev_root2, pr2.path, subpool)); /* <exciting> Now, run the relationship check! </exciting> */ related = svn_fs_check_related(id1, id2) ? 1 : 0; @@ -4215,9 +4262,263 @@ check_related(const svn_test_opts_t *opts, pr1.path, (int)pr1.rev, pr2.path, (int)pr2.rev); } + /* Asking directly, i.e. without involving the noderev IDs as + * an intermediate, should yield the same results. */ + SVN_ERR(svn_fs_node_relation(&relation, rev_root1, pr1.path, + rev_root2, pr2.path, subpool)); + if (i == j) + { + /* Identical note. */ + if (!related || relation != svn_fs_node_unchanged) + { + return svn_error_createf + (SVN_ERR_TEST_FAILED, NULL, + "expected '%s:%d' to be the same as '%s:%d';" + " it was not", + pr1.path, (int)pr1.rev, pr2.path, (int)pr2.rev); + } + } + else if (related && relation != svn_fs_node_common_ancestor) + { + return svn_error_createf + (SVN_ERR_TEST_FAILED, NULL, + "expected '%s:%d' to have a common ancestor with '%s:%d';" + " it had not", + pr1.path, (int)pr1.rev, pr2.path, (int)pr2.rev); + } + else if (!related && relation != svn_fs_node_unrelated) + { + return svn_error_createf + (SVN_ERR_TEST_FAILED, NULL, + "expected '%s:%d' to not be related to '%s:%d'; it was", + pr1.path, (int)pr1.rev, pr2.path, (int)pr2.rev); + } + + svn_pool_clear(subpool); + } /* for ... */ + } /* for ... */ + + /* Verify that the noderevs stay the same after their last change. */ + for (i = 0; i < 6; ++i) + { + const char *path = latest_changes[i].path; + svn_revnum_t latest = latest_changes[i].rev; + svn_fs_root_t *latest_root; + svn_revnum_t rev; + svn_fs_node_relation_t relation; + + /* FS root of the latest change. */ + svn_pool_clear(subpool); + SVN_ERR(svn_fs_revision_root(&latest_root, fs, latest, subpool)); + + /* All future revisions. */ + for (rev = latest + 1; rev <= 10; ++rev) + { + /* Query their noderev relationship to the latest change. */ + SVN_ERR(svn_fs_revision_root(&rev_root, fs, rev, subpool)); + SVN_ERR(svn_fs_node_relation(&relation, latest_root, path, + rev_root, path, subpool)); + + /* They shall use the same noderevs */ + if (relation != svn_fs_node_unchanged) + { + return svn_error_createf + (SVN_ERR_TEST_FAILED, NULL, + "expected '%s:%d' to be the same as '%s:%d';" + " it was not", + path, (int)latest, path, (int)rev); + } + } /* for ... */ + } /* for ... */ + } + + /* Destroy the subpool. */ + svn_pool_destroy(subpool); + + return SVN_NO_ERROR; +} + + +static svn_error_t * +check_txn_related(const svn_test_opts_t *opts, + apr_pool_t *pool) +{ + apr_pool_t *subpool = svn_pool_create(pool); + svn_fs_t *fs; + svn_fs_txn_t *txn[3]; + svn_fs_root_t *root[3]; + svn_revnum_t youngest_rev = 0; + + /* Create a filesystem and repository. */ + SVN_ERR(svn_test__create_fs(&fs, "test-repo-check-related", + opts, pool)); + + /*** Step I: Build up some state in our repository through a series + of commits */ + + /* This is the node graph we are testing. It contains one revision (r1) + and two transactions, T1 and T2 - yet uncommitted. + + A is a file that exists in r1 (A-0) and gets modified in both txns. + C is a copy of A1 made in both txns. + B is a new node created in both txns + D is a file that exists in r1 (D-0) and never gets modified. + + +--A-0--+ D-0 + | | + +-----+ +-----+ + | | | | + B-1 C-T A-1 A-2 C-1 B-2 + */ + /* Revision 1 */ + SVN_ERR(svn_fs_begin_txn(&txn[0], fs, youngest_rev, subpool)); + SVN_ERR(svn_fs_txn_root(&root[0], txn[0], subpool)); + SVN_ERR(svn_fs_make_file(root[0], "A", subpool)); + SVN_ERR(svn_test__set_file_contents(root[0], "A", "1", subpool)); + SVN_ERR(svn_fs_make_file(root[0], "D", subpool)); + SVN_ERR(svn_fs_commit_txn(NULL, &youngest_rev, txn[0], subpool)); + SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(youngest_rev)); + svn_pool_clear(subpool); + SVN_ERR(svn_fs_revision_root(&root[0], fs, youngest_rev, pool)); + + /* Transaction 1 */ + SVN_ERR(svn_fs_begin_txn(&txn[1], fs, youngest_rev, pool)); + SVN_ERR(svn_fs_txn_root(&root[1], txn[1], pool)); + SVN_ERR(svn_test__set_file_contents(root[1], "A", "2", pool)); + SVN_ERR(svn_fs_copy(root[0], "A", root[1], "C", pool)); + SVN_ERR(svn_fs_make_file(root[1], "B", pool)); + + /* Transaction 2 */ + SVN_ERR(svn_fs_begin_txn(&txn[2], fs, youngest_rev, pool)); + SVN_ERR(svn_fs_txn_root(&root[2], txn[2], pool)); + SVN_ERR(svn_test__set_file_contents(root[2], "A", "2", pool)); + SVN_ERR(svn_fs_copy(root[0], "A", root[2], "C", pool)); + SVN_ERR(svn_fs_make_file(root[2], "B", pool)); + + /*** Step II: Exhaustively verify relationship between all nodes in + existence. */ + { + int i, j; + + struct path_rev_t + { + const char *path; + int root; + }; + + /* Our 16 existing files/revisions. */ + struct path_rev_t path_revs[8] = { + { "A", 0 }, { "A", 1 }, { "A", 2 }, + { "B", 1 }, { "B", 2 }, + { "C", 1 }, { "C", 2 }, + { "D", 0 } + }; + + int related_matrix[8][8] = { + /* A-0 ... D-0 across the top here*/ + { 1, 1, 1, 0, 0, 1, 1, 0 }, /* A-0 */ + { 1, 1, 1, 0, 0, 1, 1, 0 }, /* A-1 */ + { 1, 1, 1, 0, 0, 1, 1, 0 }, /* A-2 */ + { 0, 0, 0, 1, 0, 0, 0, 0 }, /* C-1 */ + { 0, 0, 0, 0, 1, 0, 0, 0 }, /* C-2 */ + { 1, 1, 1, 0, 0, 1, 1, 0 }, /* B-1 */ + { 1, 1, 1, 0, 0, 1, 1, 0 }, /* B-2 */ + { 0, 0, 0, 0, 0, 0, 0, 1 } /* D-0 */ + }; + + /* Here's the fun part. Running the tests. */ + for (i = 0; i < 8; i++) + { + for (j = 0; j < 8; j++) + { + struct path_rev_t pr1 = path_revs[i]; + struct path_rev_t pr2 = path_revs[j]; + const svn_fs_id_t *id1, *id2; + int related = 0; + svn_fs_node_relation_t relation; + svn_pool_clear(subpool); + + /* Get the ID for the first path/revision combination. */ + SVN_ERR(svn_fs_node_id(&id1, root[pr1.root], pr1.path, subpool)); + + /* Get the ID for the second path/revision combination. */ + SVN_ERR(svn_fs_node_id(&id2, root[pr2.root], pr2.path, subpool)); + + /* <exciting> Now, run the relationship check! </exciting> */ + related = svn_fs_check_related(id1, id2) ? 1 : 0; + if (related == related_matrix[i][j]) + { + /* xlnt! */ + } + else if ((! related) && related_matrix[i][j]) + { + return svn_error_createf + (SVN_ERR_TEST_FAILED, NULL, + "expected '%s-%d' to be related to '%s-%d'; it was not", + pr1.path, pr1.root, pr2.path, pr2.root); + } + else if (related && (! related_matrix[i][j])) + { + return svn_error_createf + (SVN_ERR_TEST_FAILED, NULL, + "expected '%s-%d' to not be related to '%s-%d'; it was", + pr1.path, pr1.root, pr2.path, pr2.root); + } + + /* Asking directly, i.e. without involving the noderev IDs as + * an intermediate, should yield the same results. */ + SVN_ERR(svn_fs_node_relation(&relation, root[pr1.root], pr1.path, + root[pr2.root], pr2.path, subpool)); + if (i == j) + { + /* Identical note. */ + if (!related || relation != svn_fs_node_unchanged) + { + return svn_error_createf + (SVN_ERR_TEST_FAILED, NULL, + "expected '%s-%d' to be the same as '%s-%d';" + " it was not", + pr1.path, pr1.root, pr2.path, pr2.root); + } + } + else if (related && relation != svn_fs_node_common_ancestor) + { + return svn_error_createf + (SVN_ERR_TEST_FAILED, NULL, + "expected '%s-%d' to have a common ancestor with '%s-%d';" + " it had not", + pr1.path, pr1.root, pr2.path, pr2.root); + } + else if (!related && relation != svn_fs_node_unrelated) + { + return svn_error_createf + (SVN_ERR_TEST_FAILED, NULL, + "expected '%s-%d' to not be related to '%s-%d'; it was", + pr1.path, pr1.root, pr2.path, pr2.root); + } } /* for ... */ } /* for ... */ + + /* Verify that the noderevs stay the same after their last change. + There is only D that is not changed. */ + for (i = 1; i <= 2; ++i) + { + svn_fs_node_relation_t relation; + svn_pool_clear(subpool); + + /* Query their noderev relationship to the latest change. */ + SVN_ERR(svn_fs_node_relation(&relation, root[i], "D", + root[0], "D", subpool)); + + /* They shall use the same noderevs */ + if (relation != svn_fs_node_unchanged) + { + return svn_error_createf + (SVN_ERR_TEST_FAILED, NULL, + "expected 'D-%d' to be the same as 'D-0'; it was not", i); + } + } /* for ... */ } /* Destroy the subpool. */ @@ -4238,7 +4539,7 @@ branch_test(const svn_test_opts_t *opts, svn_revnum_t youngest_rev = 0; /* Create a filesystem and repository. */ - SVN_ERR(svn_test__create_fs(&fs, "test-repo-branch-test", + SVN_ERR(svn_test__create_fs(&fs, "test-repo-branch", opts, pool)); /*** Revision 1: Create the greek tree in revision. ***/ @@ -4305,22 +4606,49 @@ branch_test(const svn_test_opts_t *opts, } +/* Verify that file FILENAME under ROOT has the same contents checksum + * as CONTENTS when comparing the checksums of the given TYPE. + * Use POOL for temporary allocations. */ +static svn_error_t * +verify_file_checksum(svn_stringbuf_t *contents, + svn_fs_root_t *root, + const char *filename, + svn_checksum_kind_t type, + apr_pool_t *pool) +{ + svn_checksum_t *expected_checksum, *actual_checksum; + + /* Write a file, compare the repository's idea of its checksum + against our idea of its checksum. They should be the same. */ + SVN_ERR(svn_checksum(&expected_checksum, type, contents->data, + contents->len, pool)); + SVN_ERR(svn_fs_file_checksum(&actual_checksum, type, root, filename, TRUE, + pool)); + if (!svn_checksum_match(expected_checksum, actual_checksum)) + return svn_error_createf + (SVN_ERR_FS_GENERAL, NULL, + "verify-checksum: checksum mismatch:\n" + " expected: %s\n" + " actual: %s\n", + svn_checksum_to_cstring(expected_checksum, pool), + svn_checksum_to_cstring(actual_checksum, pool)); + + return SVN_NO_ERROR; +} + static svn_error_t * verify_checksum(const svn_test_opts_t *opts, apr_pool_t *pool) { svn_fs_t *fs; svn_fs_txn_t *txn; - svn_fs_root_t *txn_root; + svn_fs_root_t *txn_root, *rev_root; svn_stringbuf_t *str; - svn_checksum_t *expected_checksum, *actual_checksum; + svn_revnum_t rev; /* Write a file, compare the repository's idea of its checksum against our idea of its checksum. They should be the same. */ - str = svn_stringbuf_create("My text editor charges me rent.", pool); - SVN_ERR(svn_checksum(&expected_checksum, svn_checksum_md5, str->data, - str->len, pool)); SVN_ERR(svn_test__create_fs(&fs, "test-repo-verify-checksum", opts, pool)); @@ -4328,17 +4656,20 @@ verify_checksum(const svn_test_opts_t *opts, SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool)); SVN_ERR(svn_fs_make_file(txn_root, "fact", pool)); SVN_ERR(svn_test__set_file_contents(txn_root, "fact", str->data, pool)); - SVN_ERR(svn_fs_file_checksum(&actual_checksum, svn_checksum_md5, txn_root, - "fact", TRUE, pool)); - if (!svn_checksum_match(expected_checksum, actual_checksum)) - return svn_error_createf - (SVN_ERR_FS_GENERAL, NULL, - "verify-checksum: checksum mismatch:\n" - " expected: %s\n" - " actual: %s\n", - svn_checksum_to_cstring(expected_checksum, pool), - svn_checksum_to_cstring(actual_checksum, pool)); + /* Do it for the txn. */ + SVN_ERR(verify_file_checksum(str, txn_root, "fact", svn_checksum_md5, + pool)); + SVN_ERR(verify_file_checksum(str, txn_root, "fact", svn_checksum_sha1, + pool)); + + /* Do it again - this time for the revision. */ + SVN_ERR(svn_fs_commit_txn(NULL, &rev, txn, pool)); + SVN_ERR(svn_fs_revision_root(&rev_root, fs, rev, pool)); + SVN_ERR(verify_file_checksum(str, rev_root, "fact", svn_checksum_md5, + pool)); + SVN_ERR(verify_file_checksum(str, rev_root, "fact", svn_checksum_sha1, + pool)); return SVN_NO_ERROR; } @@ -4568,6 +4899,7 @@ unordered_txn_dirprops(const svn_test_opts_t *opts, svn_fs_root_t *txn_root, *txn_root2; svn_string_t pval; svn_revnum_t new_rev, not_rev; + svn_boolean_t is_bdb = strcmp(opts->fs_type, "bdb") == 0; /* This is a regression test for issue #2751. */ @@ -4624,10 +4956,21 @@ unordered_txn_dirprops(const svn_test_opts_t *opts, /* Commit the first one first. */ SVN_ERR(test_commit_txn(&new_rev, txn, NULL, pool)); - /* Then commit the second -- but expect an conflict because the - directory wasn't up-to-date, which is required for propchanges. */ - SVN_ERR(test_commit_txn(¬_rev, txn2, "/A/B", pool)); - SVN_ERR(svn_fs_abort_txn(txn2, pool)); + /* Some backends are clever then others. */ + if (is_bdb) + { + /* Then commit the second -- but expect an conflict because the + directory wasn't up-to-date, which is required for propchanges. */ + SVN_ERR(test_commit_txn(¬_rev, txn2, "/A/B", pool)); + SVN_ERR(svn_fs_abort_txn(txn2, pool)); + } + else + { + /* Then commit the second -- there will be no conflict despite the + directory being out-of-data because the properties as well as the + directory structure (list of nodes) was up-to-date. */ + SVN_ERR(test_commit_txn(¬_rev, txn2, NULL, pool)); + } return SVN_NO_ERROR; } @@ -4936,7 +5279,7 @@ delete_fs(const svn_test_opts_t *opts, return SVN_NO_ERROR; } -/* Issue 4340, "fs layer should reject filenames with trailing \n" */ +/* Issue 4340, "filenames containing \n corrupt FSFS repositories" */ static svn_error_t * filename_trailing_newline(const svn_test_opts_t *opts, apr_pool_t *pool) @@ -4947,15 +5290,11 @@ filename_trailing_newline(const svn_test_opts_t *opts, svn_fs_root_t *txn_root, *root; svn_revnum_t youngest_rev = 0; svn_error_t *err; - svn_boolean_t legacy_backend; - static const char contents[] = "foo\003bar"; - - /* The FS API wants \n to be permitted, but FSFS never implemented that, - * so for FSFS we expect errors rather than successes in some of the commits. - * Use a blacklist approach so that new FSes default to implementing the API - * as originally defined. */ - legacy_backend = (!strcmp(opts->fs_type, SVN_FS_TYPE_FSFS)); + /* The FS API wants \n to be permitted, but FSFS never implemented that. + * Moreover, formats like svn:mergeinfo and svn:externals don't support + * it either. So, we can't have newlines in file names in any FS. + */ SVN_ERR(svn_test__create_fs(&fs, "test-repo-filename-trailing-newline", opts, pool)); @@ -4967,73 +5306,1874 @@ filename_trailing_newline(const svn_test_opts_t *opts, SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(youngest_rev)); svn_pool_clear(subpool); - /* Attempt to copy /foo to "/bar\n". This should fail on FSFS. */ + /* Attempt to copy /foo to "/bar\n". This should fail. */ SVN_ERR(svn_fs_begin_txn(&txn, fs, youngest_rev, subpool)); SVN_ERR(svn_fs_txn_root(&txn_root, txn, subpool)); SVN_ERR(svn_fs_revision_root(&root, fs, youngest_rev, subpool)); err = svn_fs_copy(root, "/foo", txn_root, "/bar\n", subpool); - if (!legacy_backend) - SVN_TEST_ASSERT(err == SVN_NO_ERROR); - else - SVN_TEST_ASSERT_ERROR(err, SVN_ERR_FS_PATH_SYNTAX); + SVN_TEST_ASSERT_ERROR(err, SVN_ERR_FS_PATH_SYNTAX); - /* Attempt to create a file /foo/baz\n. This should fail on FSFS. */ + /* Attempt to create a file /foo/baz\n. This should fail. */ err = svn_fs_make_file(txn_root, "/foo/baz\n", subpool); - if (!legacy_backend) - SVN_TEST_ASSERT(err == SVN_NO_ERROR); + SVN_TEST_ASSERT_ERROR(err, SVN_ERR_FS_PATH_SYNTAX); + + /* Attempt to create a directory /foo/bang\n. This should fail. */ + err = svn_fs_make_dir(txn_root, "/foo/bang\n", subpool); + SVN_TEST_ASSERT_ERROR(err, SVN_ERR_FS_PATH_SYNTAX); + + return SVN_NO_ERROR; +} + +static svn_error_t * +test_fs_info_format(const svn_test_opts_t *opts, + apr_pool_t *pool) +{ + svn_fs_t *fs; + int fs_format; + svn_version_t *supports_version; + svn_version_t v1_5_0 = {1, 5, 0, ""}; + svn_version_t v1_9_0 = {1, 9, 0, ""}; + svn_test_opts_t opts2; + svn_boolean_t is_fsx = strcmp(opts->fs_type, "fsx") == 0; + + opts2 = *opts; + opts2.server_minor_version = is_fsx ? 9 : 5; + + SVN_ERR(svn_test__create_fs(&fs, "test-fs-format-info", &opts2, pool)); + SVN_ERR(svn_fs_info_format(&fs_format, &supports_version, fs, pool, pool)); + + if (is_fsx) + { + SVN_TEST_ASSERT(fs_format == 1); + SVN_TEST_ASSERT(svn_ver_equal(supports_version, &v1_9_0)); + } else - SVN_TEST_ASSERT_ERROR(err, SVN_ERR_FS_PATH_SYNTAX); - + { + /* happens to be the same for FSFS and BDB */ + SVN_TEST_ASSERT(fs_format == 3); + SVN_TEST_ASSERT(svn_ver_equal(supports_version, &v1_5_0)); + } + + return SVN_NO_ERROR; +} - /* Create another file, with contents. */ - if (!legacy_backend) +/* Sleeps until apr_time_now() value changes. */ +static void sleep_for_timestamps(void) +{ + apr_time_t start = apr_time_now(); + + while (start == apr_time_now()) { - SVN_ERR(svn_fs_make_file(txn_root, "/bar\n/baz\n", subpool)); - SVN_ERR(svn_test__set_file_contents(txn_root, "bar\n/baz\n", - contents, pool)); + apr_sleep(APR_USEC_PER_SEC / 1000); } +} + +static svn_error_t * +commit_timestamp(const svn_test_opts_t *opts, + apr_pool_t *pool) +{ + svn_fs_t *fs; + svn_fs_txn_t *txn; + svn_fs_root_t *txn_root; + svn_string_t *date = svn_string_create("Yesterday", pool); + svn_revnum_t rev = 0; + apr_hash_t *proplist; + svn_string_t *svn_date; + svn_string_t *txn_svn_date; + + SVN_ERR(svn_test__create_fs(&fs, "test-fs-commit-timestamp", + opts, pool)); + + /* Commit with a specified svn:date. */ + SVN_ERR(svn_fs_begin_txn2(&txn, fs, rev, SVN_FS_TXN_CLIENT_DATE, pool)); + SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool)); + SVN_ERR(svn_fs_make_dir(txn_root, "/foo", pool)); + SVN_ERR(svn_fs_change_txn_prop(txn, SVN_PROP_REVISION_DATE, date, pool)); + SVN_ERR(svn_fs_commit_txn(NULL, &rev, txn, pool)); + + SVN_ERR(svn_fs_revision_proplist(&proplist, fs, rev, pool)); + svn_date = apr_hash_get(proplist, SVN_PROP_REVISION_DATE, + APR_HASH_KEY_STRING); + SVN_TEST_ASSERT(svn_date && !strcmp(svn_date->data, date->data)); + + /* Commit that overwrites the specified svn:date. */ + SVN_ERR(svn_fs_begin_txn(&txn, fs, rev, pool)); + SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool)); + SVN_ERR(svn_fs_make_dir(txn_root, "/bar", pool)); + SVN_ERR(svn_fs_change_txn_prop(txn, SVN_PROP_REVISION_DATE, date, pool)); + SVN_ERR(svn_fs_commit_txn(NULL, &rev, txn, pool)); + + SVN_ERR(svn_fs_revision_proplist(&proplist, fs, rev, pool)); + svn_date = apr_hash_get(proplist, SVN_PROP_REVISION_DATE, + APR_HASH_KEY_STRING); + SVN_TEST_ASSERT(svn_date && strcmp(svn_date->data, date->data)); + + /* Commit with a missing svn:date. */ + SVN_ERR(svn_fs_begin_txn2(&txn, fs, rev, SVN_FS_TXN_CLIENT_DATE, pool)); + SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool)); + SVN_ERR(svn_fs_make_dir(txn_root, "/zag", pool)); + SVN_ERR(svn_fs_change_txn_prop(txn, SVN_PROP_REVISION_DATE, NULL, pool)); + SVN_ERR(svn_fs_txn_prop(&svn_date, txn, SVN_PROP_REVISION_DATE, pool)); + SVN_TEST_ASSERT(!svn_date); + SVN_ERR(svn_fs_commit_txn(NULL, &rev, txn, pool)); + + SVN_ERR(svn_fs_revision_proplist(&proplist, fs, rev, pool)); + svn_date = apr_hash_get(proplist, SVN_PROP_REVISION_DATE, + APR_HASH_KEY_STRING); + SVN_TEST_ASSERT(!svn_date); + + /* Commit that overwites a missing svn:date. */ + SVN_ERR(svn_fs_begin_txn(&txn, fs, rev, pool)); + SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool)); + SVN_ERR(svn_fs_make_dir(txn_root, "/zig", pool)); + SVN_ERR(svn_fs_change_txn_prop(txn, SVN_PROP_REVISION_DATE, NULL, pool)); + SVN_ERR(svn_fs_txn_prop(&svn_date, txn, SVN_PROP_REVISION_DATE, pool)); + SVN_TEST_ASSERT(!svn_date); + SVN_ERR(svn_fs_commit_txn(NULL, &rev, txn, pool)); + + SVN_ERR(svn_fs_revision_proplist(&proplist, fs, rev, pool)); + svn_date = apr_hash_get(proplist, SVN_PROP_REVISION_DATE, + APR_HASH_KEY_STRING); + SVN_TEST_ASSERT(svn_date); + + /* Commit that doesn't do anything special about svn:date. */ + SVN_ERR(svn_fs_begin_txn2(&txn, fs, rev, 0, pool)); + SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool)); + SVN_ERR(svn_fs_make_dir(txn_root, "/zig/foo", pool)); + SVN_ERR(svn_fs_txn_prop(&txn_svn_date, txn, SVN_PROP_REVISION_DATE, pool)); + SVN_TEST_ASSERT(txn_svn_date); + sleep_for_timestamps(); + SVN_ERR(svn_fs_commit_txn(NULL, &rev, txn, pool)); + + SVN_ERR(svn_fs_revision_proplist(&proplist, fs, rev, pool)); + svn_date = apr_hash_get(proplist, SVN_PROP_REVISION_DATE, + APR_HASH_KEY_STRING); + SVN_TEST_ASSERT(svn_date); + SVN_TEST_ASSERT(!svn_string_compare(svn_date, txn_svn_date)); + + /* Commit that instructs the backend to use a specific svn:date, but + * doesn't provide one. This used to fail with BDB prior to r1663697. */ + SVN_ERR(svn_fs_begin_txn2(&txn, fs, rev, SVN_FS_TXN_CLIENT_DATE, pool)); + SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool)); + SVN_ERR(svn_fs_make_dir(txn_root, "/zig/bar", pool)); + SVN_ERR(svn_fs_txn_prop(&txn_svn_date, txn, SVN_PROP_REVISION_DATE, pool)); + SVN_TEST_ASSERT(txn_svn_date); + sleep_for_timestamps(); + SVN_ERR(svn_fs_commit_txn(NULL, &rev, txn, pool)); + + SVN_ERR(svn_fs_revision_proplist(&proplist, fs, rev, pool)); + svn_date = apr_hash_get(proplist, SVN_PROP_REVISION_DATE, + APR_HASH_KEY_STRING); + SVN_TEST_ASSERT(svn_date); + SVN_TEST_ASSERT(!svn_string_compare(svn_date, txn_svn_date)); + + return SVN_NO_ERROR; +} + +static svn_error_t * +test_compat_version(const svn_test_opts_t *opts, + apr_pool_t *pool) +{ + svn_version_t *compatible_version; + apr_hash_t *config = apr_hash_make(pool); + + svn_version_t vcurrent = {SVN_VER_MAJOR, SVN_VER_MINOR, 0, ""}; + svn_version_t v1_2_0 = {1, 2, 0, ""}; + svn_version_t v1_3_0 = {1, 3, 0, ""}; + svn_version_t v1_5_0 = {1, 5, 0, ""}; + + /* no version specified -> default to the current one */ + SVN_ERR(svn_fs__compatible_version(&compatible_version, config, pool)); + SVN_TEST_ASSERT(svn_ver_equal(compatible_version, &vcurrent)); + + /* test specific compat option */ + svn_hash_sets(config, SVN_FS_CONFIG_PRE_1_6_COMPATIBLE, "1"); + SVN_ERR(svn_fs__compatible_version(&compatible_version, config, pool)); + SVN_TEST_ASSERT(svn_ver_equal(compatible_version, &v1_5_0)); + + /* test precedence amongst compat options */ + svn_hash_sets(config, SVN_FS_CONFIG_PRE_1_8_COMPATIBLE, "1"); + SVN_ERR(svn_fs__compatible_version(&compatible_version, config, pool)); + SVN_TEST_ASSERT(svn_ver_equal(compatible_version, &v1_5_0)); + + svn_hash_sets(config, SVN_FS_CONFIG_PRE_1_4_COMPATIBLE, "1"); + SVN_ERR(svn_fs__compatible_version(&compatible_version, config, pool)); + SVN_TEST_ASSERT(svn_ver_equal(compatible_version, &v1_3_0)); + + /* precedence should work with the generic option as well */ + svn_hash_sets(config, SVN_FS_CONFIG_COMPATIBLE_VERSION, "1.4.17-??"); + SVN_ERR(svn_fs__compatible_version(&compatible_version, config, pool)); + SVN_TEST_ASSERT(svn_ver_equal(compatible_version, &v1_3_0)); + + svn_hash_sets(config, SVN_FS_CONFIG_COMPATIBLE_VERSION, "1.2.3-no!"); + SVN_ERR(svn_fs__compatible_version(&compatible_version, config, pool)); + SVN_TEST_ASSERT(svn_ver_equal(compatible_version, &v1_2_0)); + + /* test generic option alone */ + config = apr_hash_make(pool); + svn_hash_sets(config, SVN_FS_CONFIG_COMPATIBLE_VERSION, "1.2.3-no!"); + SVN_ERR(svn_fs__compatible_version(&compatible_version, config, pool)); + SVN_TEST_ASSERT(svn_ver_equal(compatible_version, &v1_2_0)); + + /* out of range values should be caped by the current tool version */ + svn_hash_sets(config, SVN_FS_CONFIG_COMPATIBLE_VERSION, "2.3.4-x"); + SVN_ERR(svn_fs__compatible_version(&compatible_version, config, pool)); + SVN_TEST_ASSERT(svn_ver_equal(compatible_version, &vcurrent)); + + return SVN_NO_ERROR; +} + +static svn_error_t * +dir_prop_merge(const svn_test_opts_t *opts, + apr_pool_t *pool) +{ + svn_fs_t *fs; + svn_revnum_t head_rev; + svn_fs_root_t *root; + svn_fs_txn_t *txn, *mid_txn, *top_txn, *sub_txn, *c_txn; + svn_boolean_t is_bdb = strcmp(opts->fs_type, "bdb") == 0; + + /* Create test repository. */ + SVN_ERR(svn_test__create_fs(&fs, "test-fs-dir_prop-merge", opts, pool)); + + SVN_ERR(svn_fs_begin_txn(&txn, fs, 0, pool)); + SVN_ERR(svn_fs_txn_root(&root, txn, pool)); - if (!legacy_backend) + /* Create and verify the greek tree. */ + SVN_ERR(svn_test__create_greek_tree(root, pool)); + SVN_ERR(test_commit_txn(&head_rev, txn, NULL, pool)); + + /* Start concurrent transactions */ + + /* 1st: modify a mid-level directory */ + SVN_ERR(svn_fs_begin_txn2(&mid_txn, fs, head_rev, 0, pool)); + SVN_ERR(svn_fs_txn_root(&root, mid_txn, pool)); + SVN_ERR(svn_fs_change_node_prop(root, "A/D", "test-prop", + svn_string_create("val1", pool), pool)); + svn_fs_close_root(root); + + /* 2st: modify a top-level directory */ + SVN_ERR(svn_fs_begin_txn2(&top_txn, fs, head_rev, 0, pool)); + SVN_ERR(svn_fs_txn_root(&root, top_txn, pool)); + SVN_ERR(svn_fs_change_node_prop(root, "A", "test-prop", + svn_string_create("val2", pool), pool)); + svn_fs_close_root(root); + + SVN_ERR(svn_fs_begin_txn2(&sub_txn, fs, head_rev, 0, pool)); + SVN_ERR(svn_fs_txn_root(&root, sub_txn, pool)); + SVN_ERR(svn_fs_change_node_prop(root, "A/D/G", "test-prop", + svn_string_create("val3", pool), pool)); + svn_fs_close_root(root); + + /* 3rd: modify a conflicting change to the mid-level directory */ + SVN_ERR(svn_fs_begin_txn2(&c_txn, fs, head_rev, 0, pool)); + SVN_ERR(svn_fs_txn_root(&root, c_txn, pool)); + SVN_ERR(svn_fs_change_node_prop(root, "A/D", "test-prop", + svn_string_create("valX", pool), pool)); + svn_fs_close_root(root); + + /* Prop changes to the same node should conflict */ + SVN_ERR(test_commit_txn(&head_rev, mid_txn, NULL, pool)); + SVN_ERR(test_commit_txn(&head_rev, c_txn, "/A/D", pool)); + SVN_ERR(svn_fs_abort_txn(c_txn, pool)); + + /* Changes in a sub-tree should not conflict with prop changes to some + parent directory but some backends are clever then others. */ + if (is_bdb) { - svn_revnum_t after_rev; - static svn_test__tree_entry_t expected_entries[] = { - { "foo", NULL }, - { "bar\n", NULL }, - { "foo/baz\n", "" }, - { "bar\n/baz\n", contents }, - { NULL, NULL } - }; - const char *expected_changed_paths[] = { - "/bar\n", - "/foo/baz\n", - "/bar\n/baz\n", - NULL - }; - apr_hash_t *expected_changes = apr_hash_make(pool); - int i; + SVN_ERR(test_commit_txn(&head_rev, top_txn, "/A", pool)); + SVN_ERR(svn_fs_abort_txn(top_txn, pool)); + } + else + { + SVN_ERR(test_commit_txn(&head_rev, top_txn, NULL, pool)); + } + + /* The inverted case is not that trivial to handle. Hence, conflict. + Depending on the checking order, the reported conflict path differs. */ + SVN_ERR(test_commit_txn(&head_rev, sub_txn, is_bdb ? "/A/D" : "/A", pool)); + SVN_ERR(svn_fs_abort_txn(sub_txn, pool)); + + return SVN_NO_ERROR; +} + +#if APR_HAS_THREADS +struct reopen_modify_baton_t { + const char *fs_path; + const char *txn_name; + apr_pool_t *pool; + svn_error_t *err; +}; + +static void * APR_THREAD_FUNC +reopen_modify_child(apr_thread_t *tid, void *data) +{ + struct reopen_modify_baton_t *baton = data; + svn_fs_t *fs; + svn_fs_txn_t *txn; + svn_fs_root_t *root; + + baton->err = svn_fs_open(&fs, baton->fs_path, NULL, baton->pool); + if (!baton->err) + baton->err = svn_fs_open_txn(&txn, fs, baton->txn_name, baton->pool); + if (!baton->err) + baton->err = svn_fs_txn_root(&root, txn, baton->pool); + if (!baton->err) + baton->err = svn_fs_change_node_prop(root, "A", "name", + svn_string_create("value", + baton->pool), + baton->pool); + svn_pool_destroy(baton->pool); + apr_thread_exit(tid, 0); + return NULL; +} +#endif + +static svn_error_t * +reopen_modify(const svn_test_opts_t *opts, + apr_pool_t *pool) +{ +#if APR_HAS_THREADS + svn_fs_t *fs; + svn_revnum_t head_rev = 0; + svn_fs_root_t *root; + svn_fs_txn_t *txn; + const char *fs_path, *txn_name; + svn_string_t *value; + struct reopen_modify_baton_t baton; + apr_status_t status, child_status; + apr_threadattr_t *tattr; + apr_thread_t *tid; + + /* Create test repository with greek tree. */ + fs_path = "test-reopen-modify"; + SVN_ERR(svn_test__create_fs(&fs, fs_path, opts, pool)); + SVN_ERR(svn_fs_begin_txn(&txn, fs, head_rev, pool)); + SVN_ERR(svn_fs_txn_root(&root, txn, pool)); + SVN_ERR(svn_test__create_greek_tree(root, pool)); + SVN_ERR(test_commit_txn(&head_rev, txn, NULL, pool)); + + /* Create txn with changes. */ + SVN_ERR(svn_fs_begin_txn(&txn, fs, head_rev, pool)); + SVN_ERR(svn_fs_txn_name(&txn_name, txn, pool)); + SVN_ERR(svn_fs_txn_root(&root, txn, pool)); + SVN_ERR(svn_fs_make_dir(root, "X", pool)); + + /* In another thread: reopen fs and txn, and add more changes. This + works in BDB and FSX but in FSFS the txn_dir_cache becomes + out-of-date and the thread's changes don't reach the revision. */ + baton.fs_path = fs_path; + baton.txn_name = txn_name; + baton.pool = svn_pool_create(pool); + status = apr_threadattr_create(&tattr, pool); + if (status) + return svn_error_wrap_apr(status, _("Can't create threadattr")); + status = apr_thread_create(&tid, tattr, reopen_modify_child, &baton, pool); + if (status) + return svn_error_wrap_apr(status, _("Can't create thread")); + status = apr_thread_join(&child_status, tid); + if (status) + return svn_error_wrap_apr(status, _("Can't join thread")); + if (baton.err) + return svn_error_trace(baton.err); + + /* Commit */ + SVN_ERR(test_commit_txn(&head_rev, txn, NULL, pool)); + + /* Check for change made by thread. */ + SVN_ERR(svn_fs_revision_root(&root, fs, head_rev, pool)); + SVN_ERR(svn_fs_node_prop(&value, root, "A", "name", pool)); + SVN_TEST_ASSERT(value && !strcmp(value->data, "value")); + + return SVN_NO_ERROR; +#else + return svn_error_create(SVN_ERR_TEST_SKIPPED, NULL, "no thread support"); +#endif +} + +static svn_error_t * +upgrade_while_committing(const svn_test_opts_t *opts, + apr_pool_t *pool) +{ + svn_fs_t *fs; + svn_revnum_t head_rev = 0; + svn_fs_root_t *root; + svn_fs_txn_t *txn1, *txn2; + const char *fs_path; + apr_hash_t *fs_config = apr_hash_make(pool); + + /* Bail (with success) on known-untestable scenarios */ + if (strcmp(opts->fs_type, "fsfs") != 0) + return svn_error_create(SVN_ERR_TEST_SKIPPED, NULL, + "this will test FSFS repositories only"); + + if (opts->server_minor_version && (opts->server_minor_version < 6)) + return svn_error_create(SVN_ERR_TEST_SKIPPED, NULL, + "pre-1.6 SVN doesn't support FSFS packing"); + + /* Create test repository with greek tree. */ + fs_path = "test-upgrade-while-committing"; + + svn_hash_sets(fs_config, SVN_FS_CONFIG_COMPATIBLE_VERSION, "1.7"); + svn_hash_sets(fs_config, SVN_FS_CONFIG_FSFS_SHARD_SIZE, "2"); + SVN_ERR(svn_test__create_fs2(&fs, fs_path, opts, fs_config, pool)); + + SVN_ERR(svn_fs_begin_txn(&txn1, fs, head_rev, pool)); + SVN_ERR(svn_fs_txn_root(&root, txn1, pool)); + SVN_ERR(svn_test__create_greek_tree(root, pool)); + SVN_ERR(test_commit_txn(&head_rev, txn1, NULL, pool)); + + /* Create txn with changes. */ + SVN_ERR(svn_fs_begin_txn(&txn1, fs, head_rev, pool)); + SVN_ERR(svn_fs_txn_root(&root, txn1, pool)); + SVN_ERR(svn_fs_make_dir(root, "/foo", pool)); + + /* Upgrade filesystem, but keep existing svn_fs_t object. */ + SVN_ERR(svn_fs_upgrade(fs_path, pool)); + + /* Creating a new txn for the old svn_fs_t should not fail. */ + SVN_ERR(svn_fs_begin_txn(&txn2, fs, head_rev, pool)); + + /* Committing the already existing txn should not fail. */ + SVN_ERR(test_commit_txn(&head_rev, txn1, NULL, pool)); + + /* Verify filesystem content. */ + SVN_ERR(svn_fs_verify(fs_path, NULL, 0, SVN_INVALID_REVNUM, NULL, NULL, + NULL, NULL, pool)); + + return SVN_NO_ERROR; +} + +/* Utility method for test_paths_changed. Verify that REV in FS changes + * exactly one path and that that change is a property change. Expect + * the MERGEINFO_MOD flag of the change to have the given value. + */ +static svn_error_t * +verify_root_prop_change(svn_fs_t *fs, + svn_revnum_t rev, + svn_tristate_t mergeinfo_mod, + apr_pool_t *pool) +{ + svn_fs_path_change2_t *change; + svn_fs_root_t *root; + apr_hash_t *changes; + + SVN_ERR(svn_fs_revision_root(&root, fs, rev, pool)); + SVN_ERR(svn_fs_paths_changed2(&changes, root, pool)); + SVN_TEST_ASSERT(apr_hash_count(changes) == 1); + change = svn_hash_gets(changes, "/"); + + SVN_TEST_ASSERT(change->node_rev_id); + SVN_TEST_ASSERT(change->change_kind == svn_fs_path_change_modify); + SVN_TEST_ASSERT( change->node_kind == svn_node_dir + || change->node_kind == svn_node_unknown); + SVN_TEST_ASSERT(change->text_mod == FALSE); + SVN_TEST_ASSERT(change->prop_mod == TRUE); + + if (change->copyfrom_known) + { + SVN_TEST_ASSERT(change->copyfrom_rev == SVN_INVALID_REVNUM); + SVN_TEST_ASSERT(change->copyfrom_path == NULL); + } + + SVN_TEST_ASSERT(change->mergeinfo_mod == mergeinfo_mod); + + return SVN_NO_ERROR; +} + +static svn_error_t * +test_paths_changed(const svn_test_opts_t *opts, + apr_pool_t *pool) +{ + svn_fs_t *fs; + svn_revnum_t head_rev = 0; + svn_fs_root_t *root; + svn_fs_txn_t *txn; + const char *fs_path; + apr_hash_t *changes; + svn_boolean_t has_mergeinfo_mod = FALSE; + apr_hash_index_t *hi; + int i; + + /* The "mergeinfo_mod flag will say "unknown" until recently. */ + if ( strcmp(opts->fs_type, "bdb") != 0 + && (!opts->server_minor_version || (opts->server_minor_version >= 9))) + has_mergeinfo_mod = TRUE; + + /* Create test repository with greek tree. */ + fs_path = "test-paths-changed"; + + SVN_ERR(svn_test__create_fs2(&fs, fs_path, opts, NULL, pool)); + + SVN_ERR(svn_fs_begin_txn(&txn, fs, head_rev, pool)); + SVN_ERR(svn_fs_txn_root(&root, txn, pool)); + SVN_ERR(svn_test__create_greek_tree(root, pool)); + SVN_ERR(test_commit_txn(&head_rev, txn, NULL, pool)); + + /* Create txns with various prop changes. */ + SVN_ERR(svn_fs_begin_txn(&txn, fs, head_rev, pool)); + SVN_ERR(svn_fs_txn_root(&root, txn, pool)); + SVN_ERR(svn_fs_change_node_prop(root, "/", "propname", + svn_string_create("propval", pool), pool)); + SVN_ERR(test_commit_txn(&head_rev, txn, NULL, pool)); + + SVN_ERR(svn_fs_begin_txn(&txn, fs, head_rev, pool)); + SVN_ERR(svn_fs_txn_root(&root, txn, pool)); + SVN_ERR(svn_fs_change_node_prop(root, "/", "svn:mergeinfo", + svn_string_create("/: 1\n", pool), pool)); + SVN_ERR(test_commit_txn(&head_rev, txn, NULL, pool)); + + /* Verify changed path lists. */ + + /* Greek tree creation rev. */ + SVN_ERR(svn_fs_revision_root(&root, fs, head_rev - 2, pool)); + SVN_ERR(svn_fs_paths_changed2(&changes, root, pool)); + + /* Reports all paths? */ + for (i = 0; svn_test__greek_tree_nodes[i].path; ++i) + { + const char *path + = svn_fspath__canonicalize(svn_test__greek_tree_nodes[i].path, pool); + + SVN_TEST_ASSERT(svn_hash_gets(changes, path)); + } + + SVN_TEST_ASSERT(apr_hash_count(changes) == i); + + /* Verify per-path info. */ + for (hi = apr_hash_first(pool, changes); hi; hi = apr_hash_next(hi)) + { + svn_fs_path_change2_t *change = apr_hash_this_val(hi); + + SVN_TEST_ASSERT(change->node_rev_id); + SVN_TEST_ASSERT(change->change_kind == svn_fs_path_change_add); + SVN_TEST_ASSERT( change->node_kind == svn_node_file + || change->node_kind == svn_node_dir + || change->node_kind == svn_node_unknown); + + if (change->node_kind != svn_node_unknown) + SVN_TEST_ASSERT(change->text_mod == ( change->node_kind + == svn_node_file)); + + SVN_TEST_ASSERT(change->prop_mod == FALSE); + + if (change->copyfrom_known) + { + SVN_TEST_ASSERT(change->copyfrom_rev == SVN_INVALID_REVNUM); + SVN_TEST_ASSERT(change->copyfrom_path == NULL); + } + + if (has_mergeinfo_mod) + SVN_TEST_ASSERT(change->mergeinfo_mod == svn_tristate_false); + else + SVN_TEST_ASSERT(change->mergeinfo_mod == svn_tristate_unknown); + } + + /* Propset rev. */ + SVN_ERR(verify_root_prop_change(fs, head_rev - 1, + has_mergeinfo_mod ? svn_tristate_false + : svn_tristate_unknown, + pool)); + + /* Mergeinfo set rev. */ + SVN_ERR(verify_root_prop_change(fs, head_rev, + has_mergeinfo_mod ? svn_tristate_true + : svn_tristate_unknown, + pool)); + + return SVN_NO_ERROR; +} + +static svn_error_t * +test_delete_replaced_paths_changed(const svn_test_opts_t *opts, + apr_pool_t *pool) +{ + svn_fs_t *fs; + svn_revnum_t head_rev = 0; + svn_fs_root_t *root; + svn_fs_txn_t *txn; + const char *fs_path; + apr_hash_t *changes; + svn_fs_path_change2_t *change; + const svn_fs_id_t *file_id; + + /* Create test repository with greek tree. */ + fs_path = "test-delete-replace-paths-changed"; + + SVN_ERR(svn_test__create_fs2(&fs, fs_path, opts, NULL, pool)); + + SVN_ERR(svn_fs_begin_txn(&txn, fs, head_rev, pool)); + SVN_ERR(svn_fs_txn_root(&root, txn, pool)); + SVN_ERR(svn_test__create_greek_tree(root, pool)); + SVN_ERR(test_commit_txn(&head_rev, txn, NULL, pool)); + + /* Create that replaces a file with a folder and then deletes that + * replacement. Start with the deletion. */ + SVN_ERR(svn_fs_begin_txn(&txn, fs, head_rev, pool)); + SVN_ERR(svn_fs_txn_root(&root, txn, pool)); + SVN_ERR(svn_fs_delete(root, "/iota", pool)); + + /* The change list should now report a deleted file. */ + SVN_ERR(svn_fs_paths_changed2(&changes, root, pool)); + change = svn_hash_gets(changes, "/iota"); + file_id = change->node_rev_id; + SVN_TEST_ASSERT( change->node_kind == svn_node_file + || change->node_kind == svn_node_unknown); + SVN_TEST_ASSERT(change->change_kind == svn_fs_path_change_delete); + + /* Add a replacement. */ + SVN_ERR(svn_fs_make_dir(root, "/iota", pool)); + + /* The change list now reports a replacement by a directory. */ + SVN_ERR(svn_fs_paths_changed2(&changes, root, pool)); + change = svn_hash_gets(changes, "/iota"); + SVN_TEST_ASSERT( change->node_kind == svn_node_dir + || change->node_kind == svn_node_unknown); + SVN_TEST_ASSERT(change->change_kind == svn_fs_path_change_replace); + SVN_TEST_ASSERT(svn_fs_compare_ids(change->node_rev_id, file_id) != 0); + + /* Delete the replacement again. */ + SVN_ERR(svn_fs_delete(root, "/iota", pool)); + + /* The change list should now be reported as a deleted file again. */ + SVN_ERR(svn_fs_paths_changed2(&changes, root, pool)); + change = svn_hash_gets(changes, "/iota"); + SVN_TEST_ASSERT( change->node_kind == svn_node_file + || change->node_kind == svn_node_unknown); + SVN_TEST_ASSERT(change->change_kind == svn_fs_path_change_delete); + SVN_TEST_ASSERT(svn_fs_compare_ids(change->node_rev_id, file_id) == 0); + + /* Finally, commit the change. */ + SVN_ERR(test_commit_txn(&head_rev, txn, NULL, pool)); + + /* The committed revision should still report the same change. */ + SVN_ERR(svn_fs_revision_root(&root, fs, head_rev, pool)); + SVN_ERR(svn_fs_paths_changed2(&changes, root, pool)); + change = svn_hash_gets(changes, "/iota"); + SVN_TEST_ASSERT( change->node_kind == svn_node_file + || change->node_kind == svn_node_unknown); + SVN_TEST_ASSERT(change->change_kind == svn_fs_path_change_delete); + + return SVN_NO_ERROR; +} + +/* Get rid of transaction NAME in FS. This function deals with backend- + * specific behavior as permitted by the API. */ +static svn_error_t * +cleanup_txn(svn_fs_t *fs, + const char *name, + apr_pool_t *scratch_pool) +{ + /* Get rid of the txns one at a time. */ + svn_error_t *err = svn_fs_purge_txn(fs, name, scratch_pool); + + /* Some backends (BDB) don't support purging transactions that have never + * seen an abort or commit attempt. Simply abort those txns. */ + if (err && err->apr_err == SVN_ERR_FS_TRANSACTION_NOT_DEAD) + { + svn_fs_txn_t *txn; + svn_error_clear(err); + err = SVN_NO_ERROR; + + SVN_ERR(svn_fs_open_txn(&txn, fs, name, scratch_pool)); + SVN_ERR(svn_fs_abort_txn(txn, scratch_pool)); + + /* Should be gone now ... */ + SVN_TEST_ASSERT_ERROR(svn_fs_open_txn(&txn, fs, name, scratch_pool), + SVN_ERR_FS_NO_SUCH_TRANSACTION); + } + + return svn_error_trace(err); +} + +/* Make sure we get txn lists correctly. */ +static svn_error_t * +purge_txn_test(const svn_test_opts_t *opts, + apr_pool_t *pool) +{ + svn_fs_t *fs; + svn_fs_txn_t *txn; + const char *name1, *name2; + apr_array_header_t *txn_list; + apr_pool_t *subpool = svn_pool_create(pool); + + SVN_ERR(svn_test__create_fs(&fs, "test-repo-purge-txn", + opts, pool)); - SVN_ERR(svn_fs_commit_txn(NULL, &after_rev, txn, subpool)); - SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(after_rev)); + /* Begin a new transaction, get its name (in the top pool), close it. */ + SVN_ERR(svn_fs_begin_txn(&txn, fs, 0, subpool)); + SVN_ERR(svn_fs_txn_name(&name1, txn, pool)); + + /* Begin *another* transaction, get its name (in the top pool), close it. */ + SVN_ERR(svn_fs_begin_txn(&txn, fs, 0, subpool)); + SVN_ERR(svn_fs_txn_name(&name2, txn, pool)); + svn_pool_clear(subpool); + + /* Get rid of the txns one at a time. */ + SVN_ERR(cleanup_txn(fs, name1, pool)); + + /* There should be exactly one left. */ + SVN_ERR(svn_fs_list_transactions(&txn_list, fs, pool)); + + /* Check the list. It should have *exactly* one entry. */ + SVN_TEST_ASSERT( txn_list->nelts == 1 + && !strcmp(name2, APR_ARRAY_IDX(txn_list, 0, const char *))); + + /* Get rid of the other txn as well. */ + SVN_ERR(cleanup_txn(fs, name2, pool)); + + /* There should be exactly one left. */ + SVN_ERR(svn_fs_list_transactions(&txn_list, fs, pool)); + + /* Check the list. It should have no entries. */ + SVN_TEST_ASSERT(txn_list->nelts == 0); + + return SVN_NO_ERROR; +} + +static svn_error_t * +compare_contents(const svn_test_opts_t *opts, + apr_pool_t *pool) +{ + svn_fs_t *fs; + svn_fs_txn_t *txn; + svn_fs_root_t *txn_root, *root1, *root2; + const char *original = "original contents"; + svn_revnum_t rev; + int i; + apr_pool_t *iterpool = svn_pool_create(pool); + + /* Two similar but different texts that yield the same MD5 digest. */ + const char *evil_text1 + = "\xd1\x31\xdd\x02\xc5\xe6\xee\xc4\x69\x3d\x9a\x06\x98\xaf\xf9\x5c" + "\x2f\xca\xb5\x87\x12\x46\x7e\xab\x40\x04\x58\x3e\xb8\xfb\x7f\x89" + "\x55\xad\x34\x06\x09\xf4\xb3\x02\x83\xe4\x88\x83\x25\x71\x41\x5a" + "\x08\x51\x25\xe8\xf7\xcd\xc9\x9f\xd9\x1d\xbd\xf2\x80\x37\x3c\x5b" + "\xd8\x82\x3e\x31\x56\x34\x8f\x5b\xae\x6d\xac\xd4\x36\xc9\x19\xc6" + "\xdd\x53\xe2\xb4\x87\xda\x03\xfd\x02\x39\x63\x06\xd2\x48\xcd\xa0" + "\xe9\x9f\x33\x42\x0f\x57\x7e\xe8\xce\x54\xb6\x70\x80\xa8\x0d\x1e" + "\xc6\x98\x21\xbc\xb6\xa8\x83\x93\x96\xf9\x65\x2b\x6f\xf7\x2a\x70"; + const char *evil_text2 + = "\xd1\x31\xdd\x02\xc5\xe6\xee\xc4\x69\x3d\x9a\x06\x98\xaf\xf9\x5c" + "\x2f\xca\xb5\x07\x12\x46\x7e\xab\x40\x04\x58\x3e\xb8\xfb\x7f\x89" + "\x55\xad\x34\x06\x09\xf4\xb3\x02\x83\xe4\x88\x83\x25\xf1\x41\x5a" + "\x08\x51\x25\xe8\xf7\xcd\xc9\x9f\xd9\x1d\xbd\x72\x80\x37\x3c\x5b" + "\xd8\x82\x3e\x31\x56\x34\x8f\x5b\xae\x6d\xac\xd4\x36\xc9\x19\xc6" + "\xdd\x53\xe2\x34\x87\xda\x03\xfd\x02\x39\x63\x06\xd2\x48\xcd\xa0" + "\xe9\x9f\x33\x42\x0f\x57\x7e\xe8\xce\x54\xb6\x70\x80\x28\x0d\x1e" + "\xc6\x98\x21\xbc\xb6\xa8\x83\x93\x96\xf9\x65\xab\x6f\xf7\x2a\x70"; + svn_checksum_t *checksum1, *checksum2; + + /* (path, rev) pairs to compare plus the expected API return values */ + struct + { + svn_revnum_t rev1; + const char *path1; + svn_revnum_t rev2; + const char *path2; + + svn_boolean_t different; /* result of svn_fs_*_different */ + svn_tristate_t changed; /* result of svn_fs_*_changed */ + } to_compare[] = + { + /* same representation */ + { 1, "foo", 2, "foo", FALSE, svn_tristate_false }, + { 1, "foo", 2, "bar", FALSE, svn_tristate_false }, + { 2, "foo", 2, "bar", FALSE, svn_tristate_false }, + + /* different content but MD5 check is not reliable */ + { 3, "foo", 3, "bar", TRUE, svn_tristate_true }, + + /* different contents */ + { 1, "foo", 3, "bar", TRUE, svn_tristate_true }, + { 1, "foo", 3, "foo", TRUE, svn_tristate_true }, + { 3, "foo", 4, "bar", TRUE, svn_tristate_true }, + { 3, "foo", 4, "bar", TRUE, svn_tristate_true }, + { 2, "bar", 3, "bar", TRUE, svn_tristate_true }, + { 3, "bar", 4, "bar", TRUE, svn_tristate_true }, + + /* variations on the same theme: same content, possibly different rep */ + { 4, "foo", 4, "bar", FALSE, svn_tristate_unknown }, + { 1, "foo", 4, "bar", FALSE, svn_tristate_unknown }, + { 2, "foo", 4, "bar", FALSE, svn_tristate_unknown }, + { 1, "foo", 4, "foo", FALSE, svn_tristate_unknown }, + { 2, "foo", 4, "foo", FALSE, svn_tristate_unknown }, + { 2, "bar", 4, "bar", FALSE, svn_tristate_unknown }, + + /* EOL */ + { 0 }, + }; + + /* Same same, but different. + * Just checking that we actually have an MD5 collision. */ + SVN_ERR(svn_checksum(&checksum1, svn_checksum_md5, evil_text1, + strlen(evil_text1), pool)); + SVN_ERR(svn_checksum(&checksum2, svn_checksum_md5, evil_text2, + strlen(evil_text2), pool)); + SVN_TEST_ASSERT(svn_checksum_match(checksum1, checksum1)); + SVN_TEST_ASSERT(strcmp(evil_text1, evil_text2)); + + /* Now, build up our test repo. */ + SVN_ERR(svn_test__create_fs(&fs, "test-repo-compare-contents", + opts, pool)); + + /* Rev 1: create a file. */ + SVN_ERR(svn_fs_begin_txn(&txn, fs, 0, iterpool)); + SVN_ERR(svn_fs_txn_root(&txn_root, txn, iterpool)); + SVN_ERR(svn_fs_make_file(txn_root, "foo", iterpool)); + SVN_ERR(svn_test__set_file_contents(txn_root, "foo", original, iterpool)); + SVN_ERR(svn_fs_change_node_prop(txn_root, "foo", "prop", + svn_string_create(original, iterpool), + iterpool)); + SVN_ERR(svn_fs_commit_txn(NULL, &rev, txn, iterpool)); + SVN_TEST_ASSERT(rev == 1); + svn_pool_clear(iterpool); + + /* Rev 2: copy that file. */ + SVN_ERR(svn_fs_revision_root(&root1, fs, rev, iterpool)); + SVN_ERR(svn_fs_begin_txn(&txn, fs, 0, iterpool)); + SVN_ERR(svn_fs_txn_root(&txn_root, txn, iterpool)); + SVN_ERR(svn_fs_copy(root1, "foo", txn_root, "bar", iterpool)); + SVN_ERR(svn_fs_commit_txn(NULL, &rev, txn, iterpool)); + SVN_TEST_ASSERT(rev == 2); + svn_pool_clear(iterpool); + + /* Rev 3: modify both files. + * The new contents differs for both files but has the same length and MD5. + */ + SVN_ERR(svn_fs_begin_txn(&txn, fs, rev, iterpool)); + SVN_ERR(svn_fs_txn_root(&txn_root, txn, iterpool)); + SVN_ERR(svn_test__set_file_contents(txn_root, "foo", evil_text1, iterpool)); + SVN_ERR(svn_test__set_file_contents(txn_root, "bar", evil_text2, iterpool)); + SVN_ERR(svn_fs_change_node_prop(txn_root, "foo", "prop", + svn_string_create(evil_text1, iterpool), + iterpool)); + SVN_ERR(svn_fs_change_node_prop(txn_root, "bar", "prop", + svn_string_create(evil_text2, iterpool), + iterpool)); + SVN_ERR(svn_fs_commit_txn(NULL, &rev, txn, iterpool)); + SVN_TEST_ASSERT(rev == 3); + svn_pool_clear(iterpool); + + /* Rev 4: revert both file contents. + */ + SVN_ERR(svn_fs_begin_txn(&txn, fs, rev, iterpool)); + SVN_ERR(svn_fs_txn_root(&txn_root, txn, iterpool)); + SVN_ERR(svn_test__set_file_contents(txn_root, "foo", original, iterpool)); + SVN_ERR(svn_test__set_file_contents(txn_root, "bar", original, iterpool)); + SVN_ERR(svn_fs_change_node_prop(txn_root, "foo", "prop", + svn_string_create(original, iterpool), + iterpool)); + SVN_ERR(svn_fs_change_node_prop(txn_root, "bar", "prop", + svn_string_create(original, iterpool), + iterpool)); + SVN_ERR(svn_fs_commit_txn(NULL, &rev, txn, iterpool)); + SVN_TEST_ASSERT(rev == 4); + svn_pool_clear(iterpool); + + /* Perform all comparisons listed in TO_COMPARE. */ + for (i = 0; to_compare[i].rev1 > 0; ++i) + { + svn_boolean_t text_different; + svn_boolean_t text_changed; + svn_boolean_t props_different; + svn_boolean_t props_changed; + + svn_pool_clear(iterpool); + SVN_ERR(svn_fs_revision_root(&root1, fs, to_compare[i].rev1, iterpool)); + SVN_ERR(svn_fs_revision_root(&root2, fs, to_compare[i].rev2, iterpool)); + + /* Compare node texts. */ + SVN_ERR(svn_fs_contents_different(&text_different, + root1, to_compare[i].path1, + root2, to_compare[i].path2, + iterpool)); + SVN_ERR(svn_fs_contents_changed(&text_changed, + root1, to_compare[i].path1, + root2, to_compare[i].path2, + iterpool)); + + /* Compare properties. */ + SVN_ERR(svn_fs_props_different(&props_different, + root1, to_compare[i].path1, + root2, to_compare[i].path2, + iterpool)); + SVN_ERR(svn_fs_props_changed(&props_changed, + root1, to_compare[i].path1, + root2, to_compare[i].path2, + iterpool)); + + /* Check results. */ + SVN_TEST_ASSERT(text_different == to_compare[i].different); + SVN_TEST_ASSERT(props_different == to_compare[i].different); + + switch (to_compare[i].changed) + { + case svn_tristate_true: + SVN_TEST_ASSERT(text_changed); + SVN_TEST_ASSERT(props_changed); + break; + + case svn_tristate_false: + SVN_TEST_ASSERT(!text_changed); + SVN_TEST_ASSERT(!props_changed); + break; + + default: + break; + } + } + + svn_pool_destroy(iterpool); + + return SVN_NO_ERROR; +} + +static svn_error_t * +test_path_change_create(const svn_test_opts_t *opts, + apr_pool_t *pool) +{ + svn_fs_t *fs; + svn_fs_root_t *root; + const svn_fs_id_t *id; + svn_fs_path_change2_t *change; + + /* Build an empty test repo ... */ + SVN_ERR(svn_test__create_fs(&fs, "test-repo-path-change-create", + opts, pool)); + + /* ... just to give us a valid ID. */ + SVN_ERR(svn_fs_revision_root(&root, fs, 0, pool)); + SVN_ERR(svn_fs_node_id(&id, root, "", pool)); + + /* Do what we came here for. */ + change = svn_fs_path_change2_create(id, svn_fs_path_change_replace, pool); + + SVN_TEST_ASSERT(change); + SVN_TEST_ASSERT(change->node_rev_id == id); + SVN_TEST_ASSERT(change->change_kind == svn_fs_path_change_replace); + + /* All other fields should be "empty" / "unused". */ + SVN_TEST_ASSERT(change->node_kind == svn_node_none); + + SVN_TEST_ASSERT(change->text_mod == FALSE); + SVN_TEST_ASSERT(change->prop_mod == FALSE); + SVN_TEST_ASSERT(change->mergeinfo_mod == svn_tristate_unknown); + + SVN_TEST_ASSERT(change->copyfrom_known == FALSE); + SVN_TEST_ASSERT(change->copyfrom_rev == SVN_INVALID_REVNUM); + SVN_TEST_ASSERT(change->copyfrom_path == NULL); + + return SVN_NO_ERROR; +} + +static svn_error_t * +test_node_created_info(const svn_test_opts_t *opts, + apr_pool_t *pool) +{ + svn_fs_t *fs; + svn_fs_txn_t *txn; + svn_fs_root_t *txn_root, *root; + svn_revnum_t rev; + int i; + apr_pool_t *iterpool = svn_pool_create(pool); + + /* Test vectors. */ + struct + { + svn_revnum_t rev; + const char *path; + svn_revnum_t crev; + const char *cpath; + } to_check[] = + { + /* New noderev only upon modification. */ + { 1, "A/B/E/beta", 1, "/A/B/E/beta" }, + { 2, "A/B/E/beta", 1, "/A/B/E/beta" }, + { 3, "A/B/E/beta", 3, "/A/B/E/beta" }, + { 4, "A/B/E/beta", 3, "/A/B/E/beta" }, + + /* Lazily copied node. */ + { 2, "Z/B/E/beta", 1, "/A/B/E/beta" }, + { 3, "Z/B/E/beta", 1, "/A/B/E/beta" }, + { 4, "Z/B/E/beta", 4, "/Z/B/E/beta" }, + + /* Bubble-up upon sub-tree change. */ + { 2, "Z", 2, "/Z" }, + { 3, "Z", 2, "/Z" }, + { 4, "Z", 4, "/Z" }, + + { 0 } + }; + + /* Start with a new repo and the greek tree in rev 1. */ + SVN_ERR(svn_test__create_fs(&fs, "test-repo-node-created-path", + opts, pool)); + + SVN_ERR(svn_fs_begin_txn(&txn, fs, 0, iterpool)); + SVN_ERR(svn_fs_txn_root(&txn_root, txn, iterpool)); + SVN_ERR(svn_test__create_greek_tree(txn_root, iterpool)); + SVN_ERR(test_commit_txn(&rev, txn, NULL, iterpool)); + svn_pool_clear(iterpool); + + /* r2: copy a subtree */ + SVN_ERR(svn_fs_begin_txn(&txn, fs, rev, iterpool)); + SVN_ERR(svn_fs_txn_root(&txn_root, txn, iterpool)); + SVN_ERR(svn_fs_revision_root(&root, fs, rev, iterpool)); + SVN_ERR(svn_fs_copy(root, "A", txn_root, "Z", iterpool)); + SVN_ERR(test_commit_txn(&rev, txn, NULL, iterpool)); + svn_pool_clear(iterpool); + + /* r3: touch node in copy source */ + SVN_ERR(svn_fs_begin_txn(&txn, fs, rev, iterpool)); + SVN_ERR(svn_fs_txn_root(&txn_root, txn, iterpool)); + SVN_ERR(svn_test__set_file_contents(txn_root, "A/B/E/beta", "new", iterpool)); + SVN_ERR(test_commit_txn(&rev, txn, NULL, iterpool)); + svn_pool_clear(iterpool); + + /* r4: touch same relative node in copy target */ + SVN_ERR(svn_fs_begin_txn(&txn, fs, rev, iterpool)); + SVN_ERR(svn_fs_txn_root(&txn_root, txn, iterpool)); + SVN_ERR(svn_test__set_file_contents(txn_root, "Z/B/E/beta", "new", iterpool)); + SVN_ERR(test_commit_txn(&rev, txn, NULL, iterpool)); + svn_pool_clear(iterpool); + + /* Now ask for some 'node created' info. */ + for (i = 0; to_check[i].rev > 0; ++i) + { + svn_revnum_t crev; + const char *cpath; + + svn_pool_clear(iterpool); + + /* Get created path and rev. */ + SVN_ERR(svn_fs_revision_root(&root, fs, to_check[i].rev, iterpool)); + SVN_ERR(svn_fs_node_created_path(&cpath, root, to_check[i].path, + iterpool)); + SVN_ERR(svn_fs_node_created_rev(&crev, root, to_check[i].path, + iterpool)); + + /* Compare the results with our expectations. */ + SVN_TEST_STRING_ASSERT(cpath, to_check[i].cpath); + + if (crev != to_check[i].crev) + return svn_error_createf(SVN_ERR_TEST_FAILED, NULL, + "created rev mismatch for %s@%ld:\n" + " expected '%ld'\n" + " found '%ld", + to_check[i].path, + to_check[i].rev, + to_check[i].crev, + crev); + } + + svn_pool_destroy(iterpool); + + return SVN_NO_ERROR; +} + +static svn_error_t * +test_print_modules(const svn_test_opts_t *opts, + apr_pool_t *pool) +{ + const char *expected, *module_name; + svn_stringbuf_t *modules = svn_stringbuf_create_empty(pool); + + /* Name of the providing module */ + if (strcmp(opts->fs_type, "fsx") == 0) + module_name = "fs_x"; + else if (strcmp(opts->fs_type, "fsfs") == 0) + module_name = "fs_fs"; + else if (strcmp(opts->fs_type, "bdb") == 0) + module_name = "fs_base"; + else + return svn_error_createf(SVN_ERR_TEST_SKIPPED, NULL, + "don't know the module name for %s", + opts->fs_type); + + SVN_ERR(svn_fs_print_modules(modules, pool)); + + /* The requested FS type must be listed amongst the available modules. */ + expected = apr_psprintf(pool, "* %s : ", module_name); + SVN_TEST_ASSERT(strstr(modules->data, expected)); + + return SVN_NO_ERROR; +} + +/* Baton to be used with process_file_contents. */ +typedef struct process_file_contents_baton_t +{ + const char *contents; + svn_boolean_t processed; +} process_file_contents_baton_t; + +/* Implements svn_fs_process_contents_func_t. + * We flag the BATON as "processed" and compare the CONTENTS we've got to + * what we expect through the BATON. + */ +static svn_error_t * +process_file_contents(const unsigned char *contents, + apr_size_t len, + void *baton, + apr_pool_t *scratch_pool) +{ + process_file_contents_baton_t *b = baton; + + SVN_TEST_ASSERT(strlen(b->contents) == len); + SVN_TEST_ASSERT(memcmp(b->contents, contents, len) == 0); + b->processed = TRUE; - /* Validate the DAG. */ - SVN_ERR(svn_fs_revision_root(&root, fs, after_rev, pool)); - SVN_ERR(svn_test__validate_tree(root, expected_entries, 4, pool)); + return SVN_NO_ERROR; +} + +static svn_error_t * +test_zero_copy_processsing(const svn_test_opts_t *opts, + apr_pool_t *pool) +{ + svn_fs_t *fs; + svn_fs_txn_t *txn; + svn_fs_root_t *txn_root, *root; + svn_revnum_t rev; + const struct svn_test__tree_entry_t *node; + apr_pool_t *iterpool = svn_pool_create(pool); + + /* Start with a new repo and the greek tree in rev 1. */ + SVN_ERR(svn_test__create_fs(&fs, "test-repo-zero-copy-processing", + opts, pool)); + + SVN_ERR(svn_fs_begin_txn(&txn, fs, 0, iterpool)); + SVN_ERR(svn_fs_txn_root(&txn_root, txn, iterpool)); + SVN_ERR(svn_test__create_greek_tree(txn_root, iterpool)); + SVN_ERR(test_commit_txn(&rev, txn, NULL, iterpool)); + svn_pool_clear(iterpool); + + SVN_ERR(svn_fs_revision_root(&root, fs, rev, pool)); + + /* Prime the full-text cache by reading all file contents. */ + for (node = svn_test__greek_tree_nodes; node->path; node++) + if (node->contents) + { + svn_stream_t *stream; + svn_pool_clear(iterpool); + + SVN_ERR(svn_fs_file_contents(&stream, root, node->path, iterpool)); + SVN_ERR(svn_stream_copy3(stream, svn_stream_buffered(iterpool), + NULL, NULL, iterpool)); + } + + /* Now, try to get the data directly from cache + * (if the backend has caches). */ + for (node = svn_test__greek_tree_nodes; node->path; node++) + if (node->contents) + { + svn_boolean_t success; + + process_file_contents_baton_t baton; + baton.contents = node->contents; + baton.processed = FALSE; + + svn_pool_clear(iterpool); + + SVN_ERR(svn_fs_try_process_file_contents(&success, root, node->path, + process_file_contents, &baton, + iterpool)); + SVN_TEST_ASSERT(success == baton.processed); + } + + svn_pool_destroy(iterpool); + + return SVN_NO_ERROR; +} + +static svn_error_t * +test_dir_optimal_order(const svn_test_opts_t *opts, + apr_pool_t *pool) +{ + svn_fs_t *fs; + svn_fs_txn_t *txn; + svn_fs_root_t *txn_root, *root; + svn_revnum_t rev; + apr_hash_t *unordered; + apr_array_header_t *ordered; + int i; + apr_hash_index_t *hi; + + /* Create a new repo and the greek tree in rev 1. */ + SVN_ERR(svn_test__create_fs(&fs, "test-repo-dir-optimal-order", + opts, pool)); + + SVN_ERR(svn_fs_begin_txn(&txn, fs, 0, pool)); + SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool)); + SVN_ERR(svn_test__create_greek_tree(txn_root, pool)); + SVN_ERR(test_commit_txn(&rev, txn, NULL, pool)); + + SVN_ERR(svn_fs_revision_root(&root, fs, rev, pool)); + + /* Call the API function we are interested in. */ + SVN_ERR(svn_fs_dir_entries(&unordered, root, "A", pool)); + SVN_ERR(svn_fs_dir_optimal_order(&ordered, root, unordered, pool, pool)); + + /* Verify that all entries are returned. */ + SVN_TEST_ASSERT(ordered->nelts == apr_hash_count(unordered)); + for (hi = apr_hash_first(pool, unordered); hi; hi = apr_hash_next(hi)) + { + svn_boolean_t found = FALSE; + const char *name = apr_hash_this_key(hi); + + /* Compare hash -> array because the array might contain the same + * entry more than once. Since that can't happen in the hash, doing + * it in this direction ensures ORDERED won't contain duplicates. + */ + for (i = 0; !found && i < ordered->nelts; ++i) + { + svn_fs_dirent_t *item = APR_ARRAY_IDX(ordered, i, svn_fs_dirent_t*); + if (strcmp(item->name, name) == 0) + { + found = TRUE; + SVN_TEST_ASSERT(item == apr_hash_this_val(hi)); + } + } - /* Validate changed-paths, where the problem originally occurred. */ - for (i = 0; expected_changed_paths[i]; i++) - svn_hash_sets(expected_changes, expected_changed_paths[i], - "undefined value"); - SVN_ERR(svn_test__validate_changes(root, expected_changes, pool)); + SVN_TEST_ASSERT(found); } return SVN_NO_ERROR; } +static svn_error_t * +test_config_files(const svn_test_opts_t *opts, + apr_pool_t *pool) +{ + svn_fs_t *fs; + apr_array_header_t *files; + int i; + const char *repo_name = "test-repo-config-files"; + + /* Create a empty and get its config files. */ + SVN_ERR(svn_test__create_fs(&fs, repo_name, opts, pool)); + SVN_ERR(svn_fs_info_config_files(&files, fs, pool, pool)); + + /* All files should exist and be below the repo. */ + for (i = 0; i < files->nelts; ++i) + { + svn_node_kind_t kind; + const char *path = APR_ARRAY_IDX(files, i, const char*); + + SVN_ERR(svn_io_check_path(path, &kind, pool)); + + SVN_TEST_ASSERT(kind == svn_node_file); + SVN_TEST_ASSERT(svn_dirent_is_ancestor(repo_name, path)); + } + + return SVN_NO_ERROR; +} + +static svn_error_t * +test_delta_file_stream(const svn_test_opts_t *opts, + apr_pool_t *pool) +{ + svn_fs_t *fs; + svn_fs_txn_t *txn; + svn_fs_root_t *txn_root, *root1, *root2; + svn_revnum_t rev; + + const char *old_content = "some content"; + const char *new_content = "some more content"; + svn_txdelta_window_handler_t delta_handler; + void *delta_baton; + svn_txdelta_stream_t *delta_stream; + svn_stringbuf_t *source = svn_stringbuf_create_empty(pool); + svn_stringbuf_t *dest = svn_stringbuf_create_empty(pool); + + /* Create a new repo. */ + SVN_ERR(svn_test__create_fs(&fs, "test-repo-delta-file-stream", + opts, pool)); + + /* Revision 1: create a file. */ + SVN_ERR(svn_fs_begin_txn(&txn, fs, 0, pool)); + SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool)); + SVN_ERR(svn_fs_make_file(txn_root, "foo", pool)); + SVN_ERR(svn_test__set_file_contents(txn_root, "foo", old_content, pool)); + SVN_ERR(test_commit_txn(&rev, txn, NULL, pool)); + + /* Revision 2: create a file. */ + SVN_ERR(svn_fs_begin_txn(&txn, fs, rev, pool)); + SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool)); + SVN_ERR(svn_test__set_file_contents(txn_root, "foo", new_content, pool)); + SVN_ERR(test_commit_txn(&rev, txn, NULL, pool)); + + SVN_ERR(svn_fs_revision_root(&root1, fs, 1, pool)); + SVN_ERR(svn_fs_revision_root(&root2, fs, 2, pool)); + + /* Test 1: Get delta against empty target. */ + SVN_ERR(svn_fs_get_file_delta_stream(&delta_stream, + NULL, NULL, root1, "foo", pool)); + + svn_stringbuf_setempty(source); + svn_stringbuf_setempty(dest); + + svn_txdelta_apply(svn_stream_from_stringbuf(source, pool), + svn_stream_from_stringbuf(dest, pool), + NULL, NULL, pool, &delta_handler, &delta_baton); + SVN_ERR(svn_txdelta_send_txstream(delta_stream, + delta_handler, + delta_baton, + pool)); + SVN_TEST_STRING_ASSERT(old_content, dest->data); + + /* Test 2: Get delta against previous version. */ + SVN_ERR(svn_fs_get_file_delta_stream(&delta_stream, + root1, "foo", root2, "foo", pool)); + + svn_stringbuf_set(source, old_content); + svn_stringbuf_setempty(dest); + + svn_txdelta_apply(svn_stream_from_stringbuf(source, pool), + svn_stream_from_stringbuf(dest, pool), + NULL, NULL, pool, &delta_handler, &delta_baton); + SVN_ERR(svn_txdelta_send_txstream(delta_stream, + delta_handler, + delta_baton, + pool)); + SVN_TEST_STRING_ASSERT(new_content, dest->data); + + /* Test 3: Get reverse delta. */ + SVN_ERR(svn_fs_get_file_delta_stream(&delta_stream, + root2, "foo", root1, "foo", pool)); + + svn_stringbuf_set(source, new_content); + svn_stringbuf_setempty(dest); + + svn_txdelta_apply(svn_stream_from_stringbuf(source, pool), + svn_stream_from_stringbuf(dest, pool), + NULL, NULL, pool, &delta_handler, &delta_baton); + SVN_ERR(svn_txdelta_send_txstream(delta_stream, + delta_handler, + delta_baton, + pool)); + SVN_TEST_STRING_ASSERT(old_content, dest->data); + + return SVN_NO_ERROR; +} + +static svn_error_t * +test_fs_merge(const svn_test_opts_t *opts, + apr_pool_t *pool) +{ + svn_fs_t *fs; + svn_fs_txn_t *txn; + svn_fs_root_t *txn_root, *root0, *root1; + svn_revnum_t rev; + + /* Very basic test for svn_fs_merge because all the other interesting + * cases get tested implicitly with concurrent txn / commit tests. + * This API is just a thin layer around the internal merge function + * and we simply check that the plumbing between them works. + */ + + /* Create a new repo. */ + SVN_ERR(svn_test__create_fs(&fs, "test-repo-fs-merge", + opts, pool)); + SVN_ERR(svn_fs_revision_root(&root0, fs, 0, pool)); + + /* Revision 1: create a file. */ + SVN_ERR(svn_fs_begin_txn(&txn, fs, 0, pool)); + SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool)); + SVN_ERR(svn_fs_make_file(txn_root, "foo", pool)); + SVN_ERR(test_commit_txn(&rev, txn, NULL, pool)); + SVN_ERR(svn_fs_revision_root(&root1, fs, rev, pool)); + + /* Merge-able txn: create another file. */ + SVN_ERR(svn_fs_begin_txn(&txn, fs, 0, pool)); + SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool)); + SVN_ERR(svn_fs_make_file(txn_root, "bar", pool)); + + SVN_ERR(svn_fs_merge(NULL, root1, "/", txn_root, "/", root0, "/", pool)); + + /* Not merge-able: create the same file file. */ + SVN_ERR(svn_fs_begin_txn(&txn, fs, 0, pool)); + SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool)); + SVN_ERR(svn_fs_make_file(txn_root, "foo", pool)); + + SVN_TEST_ASSERT_ERROR(svn_fs_merge(NULL, root1, "/", txn_root, "/", root0, + "/", pool), SVN_ERR_FS_CONFLICT); + + return SVN_NO_ERROR; +} + +static svn_error_t * +test_fsfs_config_opts(const svn_test_opts_t *opts, + apr_pool_t *pool) +{ + apr_hash_t *fs_config; + svn_fs_t *fs; + const svn_fs_info_placeholder_t *fs_info; + const svn_fs_fsfs_info_t *fsfs_info; + + /* Bail (with SKIP) on known-untestable scenarios */ + if (strcmp(opts->fs_type, "fsfs") != 0) + return svn_error_create(SVN_ERR_TEST_SKIPPED, NULL, + "this will test FSFS repositories only"); + + /* Remove the test directory from previous runs. */ + SVN_ERR(svn_io_remove_dir2("test-fsfs-config-opts", TRUE, NULL, NULL, pool)); + + /* Create the test directory and add it to the test cleanup list. */ + SVN_ERR(svn_io_dir_make("test-fsfs-config-opts", APR_OS_DEFAULT, pool)); + svn_test_add_dir_cleanup("test-fsfs-config-opts"); + + /* Create an FSFS filesystem with default config.*/ + fs_config = apr_hash_make(pool); + svn_hash_sets(fs_config, SVN_FS_CONFIG_FS_TYPE, SVN_FS_TYPE_FSFS); + SVN_ERR(svn_fs_create(&fs, "test-fsfs-config-opts/default", fs_config, pool)); + + /* Re-open FS to test the data on disk. */ + SVN_ERR(svn_fs_open2(&fs, "test-fsfs-config-opts/default", NULL, pool, pool)); + + SVN_ERR(svn_fs_info(&fs_info, fs, pool, pool)); + SVN_TEST_STRING_ASSERT(fs_info->fs_type, SVN_FS_TYPE_FSFS); + fsfs_info = (const void *) fs_info; + + /* Check FSFS specific info. Don't check the SHARD_SIZE, because it depends + * on a compile-time constant and may be overridden. */ + SVN_TEST_ASSERT(fsfs_info->log_addressing); + SVN_TEST_ASSERT(fsfs_info->min_unpacked_rev == 0); + + /* Create an FSFS filesystem with custom settings: disabled log-addressing + * and custom shard size (123). */ + fs_config = apr_hash_make(pool); + svn_hash_sets(fs_config, SVN_FS_CONFIG_FS_TYPE, SVN_FS_TYPE_FSFS); + svn_hash_sets(fs_config, SVN_FS_CONFIG_FSFS_LOG_ADDRESSING, "false"); + svn_hash_sets(fs_config, SVN_FS_CONFIG_FSFS_SHARD_SIZE, "123"); + SVN_ERR(svn_fs_create(&fs, "test-fsfs-config-opts/custom", fs_config, pool)); + + /* Re-open FS to test the data on disk. */ + SVN_ERR(svn_fs_open2(&fs, "test-fsfs-config-opts/custom", NULL, pool, pool)); + + SVN_ERR(svn_fs_info(&fs_info, fs, pool, pool)); + SVN_TEST_STRING_ASSERT(fs_info->fs_type, SVN_FS_TYPE_FSFS); + fsfs_info = (const void *) fs_info; + + /* Check FSFS specific info, including the SHARD_SIZE. */ + SVN_TEST_ASSERT(fsfs_info->log_addressing == FALSE); + SVN_TEST_ASSERT(fsfs_info->shard_size == 123); + SVN_TEST_ASSERT(fsfs_info->min_unpacked_rev == 0); + + return SVN_NO_ERROR; +} + +static svn_error_t * +test_txn_pool_lifetime(const svn_test_opts_t *opts, + apr_pool_t *pool) +{ + /* Technically, the FS API makes no assumption on the lifetime of logically + * dependent objects. In particular, a txn root object may get destroyed + * after the FS object that it has been built upon. Actual data access is + * implied to be invalid without a valid svn_fs_t. + * + * This test verifies that at least the destruction order of those two + * objects is arbitrary. + */ + svn_fs_t *fs; + svn_fs_txn_t *txn; + svn_fs_root_t *txn_root; + + /* We will allocate FS in FS_POOL. Using a separate allocator makes + * sure that we actually free the memory when destroying the pool. + */ + apr_allocator_t *fs_allocator = svn_pool_create_allocator(FALSE); + apr_pool_t *fs_pool = apr_allocator_owner_get(fs_allocator); + + /* Create a new repo. */ + SVN_ERR(svn_test__create_fs(&fs, "test-repo-pool-lifetime", + opts, fs_pool)); + + /* Create a TXN_ROOT referencing FS. */ + SVN_ERR(svn_fs_begin_txn(&txn, fs, 0, pool)); + SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool)); + + /* Destroy FS. Depending on the actual allocator implementation, + * these memory pages becomes inaccessible. */ + svn_pool_destroy(fs_pool); + + /* Unclean implementations will try to access FS and may segfault here. */ + svn_fs_close_root(txn_root); + + return SVN_NO_ERROR; +} + +static svn_error_t * +test_modify_txn_being_written(const svn_test_opts_t *opts, + apr_pool_t *pool) +{ + /* FSFS has a limitation (and check) that only one file can be + * modified in TXN at time: see r861812 and svn_fs_apply_text() docstring. + * This is regression test for this behavior. */ + svn_fs_t *fs; + svn_fs_txn_t *txn; + const char *txn_name; + svn_fs_root_t *txn_root; + svn_stream_t *foo_contents; + svn_stream_t *bar_contents; + + /* Bail (with success) on known-untestable scenarios */ + if (strcmp(opts->fs_type, SVN_FS_TYPE_FSFS) != 0) + return svn_error_create(SVN_ERR_TEST_SKIPPED, NULL, + "this will test FSFS repositories only"); + + /* Create a new repo. */ + SVN_ERR(svn_test__create_fs(&fs, "test-modify-txn-being-written", + opts, pool)); + + /* Create a TXN_ROOT referencing FS. */ + SVN_ERR(svn_fs_begin_txn(&txn, fs, 0, pool)); + SVN_ERR(svn_fs_txn_name(&txn_name, txn, pool)); + SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool)); + + /* Make file /foo and open for writing.*/ + SVN_ERR(svn_fs_make_file(txn_root, "/foo", pool)); + SVN_ERR(svn_fs_apply_text(&foo_contents, txn_root, "/foo", NULL, pool)); + + /* Attempt to modify another file '/bar' -- FSFS doesn't allow this. */ + SVN_ERR(svn_fs_make_file(txn_root, "/bar", pool)); + SVN_TEST_ASSERT_ERROR( + svn_fs_apply_text(&bar_contents, txn_root, "/bar", NULL, pool), + SVN_ERR_FS_REP_BEING_WRITTEN); + + /* *Reopen TXN. */ + SVN_ERR(svn_fs_open_txn(&txn, fs, txn_name, pool)); + SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool)); + + /* Check that file '/bar' still cannot be modified */ + SVN_TEST_ASSERT_ERROR( + svn_fs_apply_text(&bar_contents, txn_root, "/bar", NULL, pool), + SVN_ERR_FS_REP_BEING_WRITTEN); + + /* Close file '/foo'. */ + SVN_ERR(svn_stream_close(foo_contents)); + + /* Now file '/bar' can be modified. */ + SVN_ERR(svn_fs_apply_text(&bar_contents, txn_root, "/bar", NULL, pool)); + + return SVN_NO_ERROR; +} + +static svn_error_t * +test_prop_and_text_rep_sharing_collision(const svn_test_opts_t *opts, + apr_pool_t *pool) +{ + /* Regression test for issue 4554: Wrong file length with PLAIN + * representations in FSFS. */ + svn_fs_t *fs; + svn_fs_txn_t *txn; + svn_fs_root_t *txn_root; + svn_fs_root_t *rev_root; + svn_revnum_t new_rev; + svn_filesize_t length; + const char *testdir = "test-prop-and-text-rep-sharing-collision"; + + /* Create a new repo. */ + SVN_ERR(svn_test__create_fs(&fs, testdir, opts, pool)); + + SVN_ERR(svn_fs_begin_txn(&txn, fs, 0, pool)); + SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool)); + /* Set node property for the root. */ + SVN_ERR(svn_fs_change_node_prop(txn_root, "/", "prop", + svn_string_create("value", pool), + pool)); + + /* Commit revision r1. */ + SVN_ERR(test_commit_txn(&new_rev, txn, NULL, pool)); + + SVN_ERR(svn_fs_begin_txn(&txn, fs, 1, pool)); + SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool)); + + /* Create file with same contents as property representation. */ + SVN_ERR(svn_fs_make_file(txn_root, "/foo", pool)); + SVN_ERR(svn_test__set_file_contents(txn_root, "/foo", + "K 4\n" + "prop\n" + "V 5\n" + "value\n" + "END\n", pool)); + + /* Commit revision r2. */ + SVN_ERR(test_commit_txn(&new_rev, txn, NULL, pool)); + + /* Check that FS reports correct length for the file (23). */ + SVN_ERR(svn_fs_revision_root(&rev_root, fs, 2, pool)); + SVN_ERR(svn_fs_file_length(&length, rev_root, "/foo", pool)); + + SVN_TEST_ASSERT(length == 23); + return SVN_NO_ERROR; +} + +static svn_error_t * +test_internal_txn_props(const svn_test_opts_t *opts, + apr_pool_t *pool) +{ + svn_fs_t *fs; + svn_fs_txn_t *txn; + svn_string_t *val; + svn_prop_t prop; + svn_prop_t internal_prop; + apr_array_header_t *props; + apr_hash_t *proplist; + svn_error_t *err; + + SVN_ERR(svn_test__create_fs(&fs, "test-repo-internal-txn-props", + opts, pool)); + SVN_ERR(svn_fs_begin_txn2(&txn, fs, 0, + SVN_FS_TXN_CHECK_LOCKS | + SVN_FS_TXN_CHECK_OOD | + SVN_FS_TXN_CLIENT_DATE, pool)); + + /* Ensure that we cannot read internal transaction properties. */ + SVN_ERR(svn_fs_txn_prop(&val, txn, SVN_FS__PROP_TXN_CHECK_LOCKS, pool)); + SVN_TEST_ASSERT(!val); + SVN_ERR(svn_fs_txn_prop(&val, txn, SVN_FS__PROP_TXN_CHECK_OOD, pool)); + SVN_TEST_ASSERT(!val); + SVN_ERR(svn_fs_txn_prop(&val, txn, SVN_FS__PROP_TXN_CLIENT_DATE, pool)); + SVN_TEST_ASSERT(!val); + + SVN_ERR(svn_fs_txn_proplist(&proplist, txn, pool)); + SVN_TEST_ASSERT(apr_hash_count(proplist) == 1); + val = svn_hash_gets(proplist, SVN_PROP_REVISION_DATE); + SVN_TEST_ASSERT(val); + + /* We also cannot change or discard them. */ + val = svn_string_create("Ooops!", pool); + + err = svn_fs_change_txn_prop(txn, SVN_FS__PROP_TXN_CHECK_LOCKS, val, pool); + SVN_TEST_ASSERT_ERROR(err, SVN_ERR_INCORRECT_PARAMS); + err = svn_fs_change_txn_prop(txn, SVN_FS__PROP_TXN_CHECK_LOCKS, NULL, pool); + SVN_TEST_ASSERT_ERROR(err, SVN_ERR_INCORRECT_PARAMS); + err = svn_fs_change_txn_prop(txn, SVN_FS__PROP_TXN_CHECK_OOD, val, pool); + SVN_TEST_ASSERT_ERROR(err, SVN_ERR_INCORRECT_PARAMS); + err = svn_fs_change_txn_prop(txn, SVN_FS__PROP_TXN_CHECK_OOD, NULL, pool); + SVN_TEST_ASSERT_ERROR(err, SVN_ERR_INCORRECT_PARAMS); + err = svn_fs_change_txn_prop(txn, SVN_FS__PROP_TXN_CLIENT_DATE, val, pool); + SVN_TEST_ASSERT_ERROR(err, SVN_ERR_INCORRECT_PARAMS); + err = svn_fs_change_txn_prop(txn, SVN_FS__PROP_TXN_CLIENT_DATE, NULL, pool); + SVN_TEST_ASSERT_ERROR(err, SVN_ERR_INCORRECT_PARAMS); + + prop.name = "foo"; + prop.value = svn_string_create("bar", pool); + internal_prop.name = SVN_FS__PROP_TXN_CHECK_LOCKS; + internal_prop.value = svn_string_create("Ooops!", pool); + + props = apr_array_make(pool, 2, sizeof(svn_prop_t)); + APR_ARRAY_PUSH(props, svn_prop_t) = prop; + APR_ARRAY_PUSH(props, svn_prop_t) = internal_prop; + + err = svn_fs_change_txn_props(txn, props, pool); + SVN_TEST_ASSERT_ERROR(err, SVN_ERR_INCORRECT_PARAMS); + + return SVN_NO_ERROR; +} + +/* A freeze function that expects an 'svn_error_t *' baton, and returns it. */ +/* This function implements svn_fs_freeze_func_t. */ +static svn_error_t * +freeze_func(void *baton, apr_pool_t *pool) +{ + return baton; +} + +static svn_error_t * +freeze_and_commit(const svn_test_opts_t *opts, + apr_pool_t *pool) +{ + svn_fs_t *fs; + svn_fs_txn_t *txn; + svn_fs_root_t *txn_root; + svn_revnum_t new_rev = 0; + apr_pool_t *subpool = svn_pool_create(pool); + + if (!strcmp(opts->fs_type, "bdb")) + return svn_error_create(SVN_ERR_TEST_SKIPPED, NULL, + "this will not test BDB repositories"); + + SVN_ERR(svn_test__create_fs(&fs, "test-freeze-and-commit", opts, subpool)); + + /* This test used to FAIL with an SQLite error since svn_fs_freeze() + * wouldn't unlock rep-cache.db. Therefore, part of the role of creating + * the Greek tree is to create a rep-cache.db, in order to test that + * svn_fs_freeze() unlocks it. */ + + /* r1: Commit the Greek tree. */ + SVN_ERR(svn_fs_begin_txn(&txn, fs, new_rev, subpool)); + SVN_ERR(svn_fs_txn_root(&txn_root, txn, subpool)); + SVN_ERR(svn_test__create_greek_tree(txn_root, subpool)); + SVN_ERR(test_commit_txn(&new_rev, txn, NULL, subpool)); + + /* Freeze and unfreeze. */ + SVN_ERR(svn_fs_freeze(fs, freeze_func, SVN_NO_ERROR, pool)); + + /* Freeze again, but have freeze_func fail. */ + { + svn_error_t *err = svn_error_create(APR_EGENERAL, NULL, NULL); + SVN_TEST_ASSERT_ERROR(svn_fs_freeze(fs, freeze_func, err, pool), + err->apr_err); + } + + /* Make some commit using same FS instance. */ + SVN_ERR(svn_fs_begin_txn(&txn, fs, new_rev, pool)); + SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool)); + SVN_ERR(svn_fs_change_node_prop(txn_root, "/", "temperature", + svn_string_create("310.05", pool), + pool)); + SVN_ERR(test_commit_txn(&new_rev, txn, NULL, pool)); + + /* Re-open FS and make another commit. */ + SVN_ERR(svn_fs_open(&fs, "test-freeze-and-commit", NULL, subpool)); + SVN_ERR(svn_fs_begin_txn(&txn, fs, new_rev, pool)); + SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool)); + SVN_ERR(svn_fs_change_node_prop(txn_root, "/", "temperature", + svn_string_create("451", pool), + pool)); + SVN_ERR(test_commit_txn(&new_rev, txn, NULL, pool)); + + return SVN_NO_ERROR; +} + +static svn_error_t * +commit_with_locked_rep_cache(const svn_test_opts_t *opts, + apr_pool_t *pool) +{ + svn_fs_t *fs; + svn_fs_txn_t *txn; + svn_fs_root_t *txn_root; + svn_revnum_t new_rev; + svn_sqlite__db_t *sdb; + svn_error_t *err; + const char *fs_path; + const char *statements[] = { "SELECT MAX(revision) FROM rep_cache", NULL }; + + if (strcmp(opts->fs_type, SVN_FS_TYPE_FSFS) != 0) + return svn_error_create(SVN_ERR_TEST_SKIPPED, NULL, + "this will test FSFS repositories only"); + + if (opts->server_minor_version && (opts->server_minor_version < 6)) + return svn_error_create(SVN_ERR_TEST_SKIPPED, NULL, + "pre-1.6 SVN doesn't support FSFS rep-sharing"); + + fs_path = "test-repo-commit-with-locked-rep-cache"; + SVN_ERR(svn_test__create_fs(&fs, fs_path, opts, pool)); + + /* r1: Add a file. */ + SVN_ERR(svn_fs_begin_txn2(&txn, fs, 0, 0, pool)); + SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool)); + SVN_ERR(svn_fs_make_file(txn_root, "/foo", pool)); + SVN_ERR(svn_test__set_file_contents(txn_root, "/foo", "a", pool)); + SVN_ERR(test_commit_txn(&new_rev, txn, NULL, pool)); + SVN_TEST_INT_ASSERT(new_rev, 1); + + /* Begin a new transaction based on r1. */ + SVN_ERR(svn_fs_begin_txn2(&txn, fs, 1, 0, pool)); + SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool)); + SVN_ERR(svn_test__set_file_contents(txn_root, "/foo", "b", pool)); + + /* Obtain a shared lock on the rep-cache.db by starting a new read + * transaction. */ + SVN_ERR(svn_sqlite__open(&sdb, + svn_dirent_join(fs_path, "rep-cache.db", pool), + svn_sqlite__mode_readonly, statements, 0, NULL, + 0, pool, pool)); + SVN_ERR(svn_sqlite__begin_transaction(sdb)); + SVN_ERR(svn_sqlite__exec_statements(sdb, 0)); + + /* Attempt to commit fs transaction. This should result in a commit + * post-processing error due to us still holding the shared lock on the + * rep-cache.db. */ + err = svn_fs_commit_txn(NULL, &new_rev, txn, pool); + SVN_TEST_ASSERT_ERROR(err, SVN_ERR_SQLITE_BUSY); + SVN_TEST_INT_ASSERT(new_rev, 2); + + /* Release the shared lock. */ + SVN_ERR(svn_sqlite__finish_transaction(sdb, SVN_NO_ERROR)); + SVN_ERR(svn_sqlite__close(sdb)); + + /* Try an operation that reads from rep-cache.db. + * + * XFAIL: Around r1740802, this call was producing an error due to the + * svn_fs_t keeping an unusable db connection (and associated file + * locks) within it. + */ + SVN_ERR(svn_fs_verify(fs_path, NULL, 0, SVN_INVALID_REVNUM, NULL, NULL, + NULL, NULL, pool)); + + return SVN_NO_ERROR; +} + +static svn_error_t * +test_rep_sharing_strict_content_check(const svn_test_opts_t *opts, + apr_pool_t *pool) +{ + svn_fs_t *fs; + svn_fs_txn_t *txn; + svn_fs_root_t *txn_root; + svn_revnum_t new_rev; + const char *fs_path, *fs_path2; + apr_pool_t *subpool = svn_pool_create(pool); + svn_error_t *err; + + /* Bail (with success) on known-untestable scenarios */ + if (strcmp(opts->fs_type, SVN_FS_TYPE_BDB) == 0) + return svn_error_create(SVN_ERR_TEST_SKIPPED, NULL, + "BDB repositories don't support rep-sharing"); + + /* Create 2 repos with same structure & size but different contents */ + fs_path = "test-rep-sharing-strict-content-check1"; + fs_path2 = "test-rep-sharing-strict-content-check2"; + + SVN_ERR(svn_test__create_fs(&fs, fs_path, opts, subpool)); + + SVN_ERR(svn_fs_begin_txn2(&txn, fs, 0, 0, subpool)); + SVN_ERR(svn_fs_txn_root(&txn_root, txn, subpool)); + SVN_ERR(svn_fs_make_file(txn_root, "/foo", subpool)); + SVN_ERR(svn_test__set_file_contents(txn_root, "foo", "quite bad", subpool)); + SVN_ERR(test_commit_txn(&new_rev, txn, NULL, subpool)); + SVN_TEST_INT_ASSERT(new_rev, 1); + + SVN_ERR(svn_test__create_fs(&fs, fs_path2, opts, subpool)); + + SVN_ERR(svn_fs_begin_txn2(&txn, fs, 0, 0, subpool)); + SVN_ERR(svn_fs_txn_root(&txn_root, txn, subpool)); + SVN_ERR(svn_fs_make_file(txn_root, "foo", subpool)); + SVN_ERR(svn_test__set_file_contents(txn_root, "foo", "very good", subpool)); + SVN_ERR(test_commit_txn(&new_rev, txn, NULL, subpool)); + SVN_TEST_INT_ASSERT(new_rev, 1); + + /* Close both repositories. */ + svn_pool_clear(subpool); + + /* Doctor the first repo such that it uses the wrong rep-cache. */ + SVN_ERR(svn_io_copy_file(svn_relpath_join(fs_path2, "rep-cache.db", pool), + svn_relpath_join(fs_path, "rep-cache.db", pool), + FALSE, pool)); + + /* Changing the file contents such that rep-sharing would kick in if + the file contents was not properly compared. */ + SVN_ERR(svn_fs_open2(&fs, fs_path, NULL, subpool, subpool)); + + SVN_ERR(svn_fs_begin_txn2(&txn, fs, 1, 0, subpool)); + SVN_ERR(svn_fs_txn_root(&txn_root, txn, subpool)); + err = svn_test__set_file_contents(txn_root, "foo", "very good", subpool); + SVN_TEST_ASSERT_ERROR(err, SVN_ERR_FS_GENERAL); + + svn_pool_destroy(subpool); + + return SVN_NO_ERROR; +} + /* ------------------------------------------------------------------------ */ /* The test table. */ -struct svn_test_descriptor_t test_funcs[] = +static int max_threads = 8; + +static struct svn_test_descriptor_t test_funcs[] = { SVN_TEST_NULL, SVN_TEST_OPTS_PASS(trivial_transaction, @@ -5048,6 +7188,12 @@ struct svn_test_descriptor_t test_funcs[] = "check that transaction names are not reused"), SVN_TEST_OPTS_PASS(write_and_read_file, "write and read a file's contents"), + SVN_TEST_OPTS_PASS(almostmedium_file_integrity, + "create and modify almostmedium file"), + SVN_TEST_OPTS_PASS(medium_file_integrity, + "create and modify medium file"), + SVN_TEST_OPTS_PASS(large_file_integrity, + "create and modify large file"), SVN_TEST_OPTS_PASS(create_mini_tree_transaction, "test basic file and subdirectory creation"), SVN_TEST_OPTS_PASS(create_greek_tree_transaction, @@ -5079,10 +7225,6 @@ struct svn_test_descriptor_t test_funcs[] = "check old revisions"), SVN_TEST_OPTS_PASS(check_all_revisions, "after each commit, check all revisions"), - SVN_TEST_OPTS_PASS(medium_file_integrity, - "create and modify medium file"), - SVN_TEST_OPTS_PASS(large_file_integrity, - "create and modify large file"), SVN_TEST_OPTS_PASS(check_root_revision, "ensure accurate storage of root node"), SVN_TEST_OPTS_PASS(test_node_created_rev, @@ -5111,5 +7253,63 @@ struct svn_test_descriptor_t test_funcs[] = "test svn_fs_delete_fs"), SVN_TEST_OPTS_PASS(filename_trailing_newline, "filenames with trailing \\n might be rejected"), + SVN_TEST_OPTS_PASS(test_fs_info_format, + "test svn_fs_info_format"), + SVN_TEST_OPTS_PASS(commit_timestamp, + "commit timestamp"), + SVN_TEST_OPTS_PASS(test_compat_version, + "test svn_fs__compatible_version"), + SVN_TEST_OPTS_PASS(dir_prop_merge, + "test merge directory properties"), + SVN_TEST_OPTS_XFAIL_OTOH(reopen_modify, + "test reopen and modify txn", + SVN_TEST_PASS_IF_FS_TYPE_IS_NOT("fsfs")), + SVN_TEST_OPTS_PASS(upgrade_while_committing, + "upgrade while committing"), + SVN_TEST_OPTS_PASS(test_paths_changed, + "test svn_fs_paths_changed"), + SVN_TEST_OPTS_PASS(test_delete_replaced_paths_changed, + "test deletion after replace in changed paths list"), + SVN_TEST_OPTS_PASS(purge_txn_test, + "test purging transactions"), + SVN_TEST_OPTS_PASS(compare_contents, + "compare contents of different nodes"), + SVN_TEST_OPTS_PASS(test_path_change_create, + "test svn_fs_path_change2_create"), + SVN_TEST_OPTS_PASS(test_node_created_info, + "test FS 'node created' info"), + SVN_TEST_OPTS_PASS(test_print_modules, + "test FS module listing"), + SVN_TEST_OPTS_PASS(test_zero_copy_processsing, + "test zero copy file processing"), + SVN_TEST_OPTS_PASS(test_dir_optimal_order, + "test svn_fs_dir_optimal_order"), + SVN_TEST_OPTS_PASS(test_config_files, + "get configuration files"), + SVN_TEST_OPTS_PASS(test_delta_file_stream, + "get a delta stream on a file"), + SVN_TEST_OPTS_PASS(test_fs_merge, + "get merging txns with newer revisions"), + SVN_TEST_OPTS_PASS(test_fsfs_config_opts, + "test creating FSFS repository with different opts"), + SVN_TEST_OPTS_PASS(test_txn_pool_lifetime, + "test pool lifetime dependencies with txn roots"), + SVN_TEST_OPTS_PASS(test_modify_txn_being_written, + "test modify txn being written in FSFS"), + SVN_TEST_OPTS_PASS(test_prop_and_text_rep_sharing_collision, + "test property and text rep-sharing collision"), + SVN_TEST_OPTS_PASS(test_internal_txn_props, + "test setting and getting internal txn props"), + SVN_TEST_OPTS_PASS(check_txn_related, + "test svn_fs_check_related for transactions"), + SVN_TEST_OPTS_PASS(freeze_and_commit, + "freeze and commit"), + SVN_TEST_OPTS_PASS(commit_with_locked_rep_cache, + "test commit with locked rep-cache"), + SVN_TEST_OPTS_XFAIL_OTOH(test_rep_sharing_strict_content_check, + "test rep-sharing on content rather than SHA1", + SVN_TEST_PASS_IF_FS_TYPE_IS(SVN_FS_TYPE_FSFS)), SVN_TEST_NULL }; + +SVN_TEST_MAIN diff --git a/subversion/tests/libsvn_fs/locks-test.c b/subversion/tests/libsvn_fs/locks-test.c index ee275de..0c86eb2 100644 --- a/subversion/tests/libsvn_fs/locks-test.c +++ b/subversion/tests/libsvn_fs/locks-test.c @@ -28,6 +28,7 @@ #include "svn_error.h" #include "svn_fs.h" +#include "svn_hash.h" #include "../svn_test_fs.h" @@ -52,9 +53,19 @@ get_locks_callback(void *baton, struct get_locks_baton_t *b = baton; apr_pool_t *hash_pool = apr_hash_pool_get(b->locks); svn_string_t *lock_path = svn_string_create(lock->path, hash_pool); - apr_hash_set(b->locks, lock_path->data, lock_path->len, - svn_lock_dup(lock, hash_pool)); - return SVN_NO_ERROR; + + if (!apr_hash_get(b->locks, lock_path->data, lock_path->len)) + { + apr_hash_set(b->locks, lock_path->data, lock_path->len, + svn_lock_dup(lock, hash_pool)); + return SVN_NO_ERROR; + } + else + { + return svn_error_createf(SVN_ERR_TEST_FAILED, NULL, + "Lock for path '%s' is being reported twice.", + lock->path); + } } /* A factory function. */ @@ -608,9 +619,9 @@ lock_expiration(const svn_test_opts_t *opts, SVN_ERR(svn_fs_create_access(&access, "bubba", pool)); SVN_ERR(svn_fs_set_access(fs, access)); - /* Lock /A/D/G/rho, with an expiration 3 seconds from now. */ + /* Lock /A/D/G/rho, with an expiration 2 seconds from now. */ SVN_ERR(svn_fs_lock(&mylock, fs, "/A/D/G/rho", NULL, "", 0, - apr_time_now() + apr_time_from_sec(3), + apr_time_now() + apr_time_from_sec(2), SVN_INVALID_REVNUM, FALSE, pool)); /* Become nobody. */ @@ -640,9 +651,9 @@ lock_expiration(const svn_test_opts_t *opts, num_expected_paths, pool)); } - /* Sleep 5 seconds, so the lock auto-expires. Anonymous commit + /* Sleep 2 seconds, so the lock auto-expires. Anonymous commit should then succeed. */ - apr_sleep(apr_time_from_sec(5)); + apr_sleep(apr_time_from_sec(3)); /* Verify that the lock auto-expired even in the recursive case. */ { @@ -786,15 +797,430 @@ lock_out_of_date(const svn_test_opts_t *opts, return SVN_NO_ERROR; } +struct lock_result_t { + const svn_lock_t *lock; + svn_error_t *fs_err; +}; + +static svn_error_t * +expect_lock(const char *path, + apr_hash_t *results, + svn_fs_t *fs, + apr_pool_t *scratch_pool) +{ + svn_lock_t *lock; + struct lock_result_t *result = svn_hash_gets(results, path); + + SVN_TEST_ASSERT(result && result->lock && !result->fs_err); + SVN_ERR(svn_fs_get_lock(&lock, fs, path, scratch_pool)); + SVN_TEST_ASSERT(lock); + return SVN_NO_ERROR; +} + +static svn_error_t * +expect_error(const char *path, + apr_hash_t *results, + svn_fs_t *fs, + apr_pool_t *scratch_pool) +{ + svn_lock_t *lock; + struct lock_result_t *result = svn_hash_gets(results, path); + + SVN_TEST_ASSERT(result && !result->lock && result->fs_err); + svn_error_clear(result->fs_err); + SVN_ERR(svn_fs_get_lock(&lock, fs, path, scratch_pool)); + SVN_TEST_ASSERT(!lock); + return SVN_NO_ERROR; +} + +static svn_error_t * +expect_unlock(const char *path, + apr_hash_t *results, + svn_fs_t *fs, + apr_pool_t *scratch_pool) +{ + svn_lock_t *lock; + struct lock_result_t *result = svn_hash_gets(results, path); + + SVN_TEST_ASSERT(result && !result->fs_err); + SVN_ERR(svn_fs_get_lock(&lock, fs, path, scratch_pool)); + SVN_TEST_ASSERT(!lock); + return SVN_NO_ERROR; +} + +static svn_error_t * +expect_unlock_error(const char *path, + apr_hash_t *results, + svn_fs_t *fs, + apr_pool_t *scratch_pool) +{ + svn_lock_t *lock; + struct lock_result_t *result = svn_hash_gets(results, path); + + SVN_TEST_ASSERT(result && result->fs_err); + svn_error_clear(result->fs_err); + SVN_ERR(svn_fs_get_lock(&lock, fs, path, scratch_pool)); + SVN_TEST_ASSERT(lock); + return SVN_NO_ERROR; +} + +struct lock_many_baton_t { + apr_hash_t *results; + apr_pool_t *pool; + int count; +}; + +/* Implements svn_fs_lock_callback_t. */ +static svn_error_t * +lock_many_cb(void *lock_baton, + const char *path, + const svn_lock_t *lock, + svn_error_t *fs_err, + apr_pool_t *pool) +{ + struct lock_many_baton_t *b = lock_baton; + struct lock_result_t *result = apr_palloc(b->pool, + sizeof(struct lock_result_t)); + + result->lock = lock; + result->fs_err = svn_error_dup(fs_err); + svn_hash_sets(b->results, apr_pstrdup(b->pool, path), result); + + if (b->count) + if (!--(b->count)) + return svn_error_create(SVN_ERR_FS_GENERAL, NULL, "lock_many_cb"); + + return SVN_NO_ERROR; +} + +static svn_error_t * +lock_multiple_paths(const svn_test_opts_t *opts, + apr_pool_t *pool) +{ + svn_fs_t *fs; + svn_fs_txn_t *txn; + svn_fs_root_t *root, *txn_root; + const char *conflict; + svn_revnum_t newrev; + svn_fs_access_t *access; + svn_fs_lock_target_t *target; + struct lock_many_baton_t baton; + apr_hash_t *lock_paths, *unlock_paths; + apr_hash_index_t *hi; + + SVN_ERR(create_greek_fs(&fs, &newrev, "test-lock-multiple-paths", + opts, pool)); + SVN_ERR(svn_fs_create_access(&access, "bubba", pool)); + SVN_ERR(svn_fs_set_access(fs, access)); + SVN_ERR(svn_fs_revision_root(&root, fs, newrev, pool)); + SVN_ERR(svn_fs_begin_txn2(&txn, fs, newrev, SVN_FS_TXN_CHECK_LOCKS, pool)); + SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool)); + SVN_ERR(svn_fs_make_dir(txn_root, "/A/BB", pool)); + SVN_ERR(svn_fs_make_dir(txn_root, "/A/BBB", pool)); + SVN_ERR(svn_fs_copy(root, "/A/mu", txn_root, "/A/BB/mu", pool)); + SVN_ERR(svn_fs_copy(root, "/A/mu", txn_root, "/A/BBB/mu", pool)); + SVN_ERR(svn_fs_commit_txn(&conflict, &newrev, txn, pool)); + + baton.results = apr_hash_make(pool); + baton.pool = pool; + baton.count = 0; + lock_paths = apr_hash_make(pool); + unlock_paths = apr_hash_make(pool); + target = svn_fs_lock_target_create(NULL, newrev, pool); + + svn_hash_sets(lock_paths, "/A/B/E/alpha", target); + svn_hash_sets(lock_paths, "/A/B/E/beta", target); + svn_hash_sets(lock_paths, "/A/B/E/zulu", target); + svn_hash_sets(lock_paths, "/A/BB/mu", target); + svn_hash_sets(lock_paths, "/A/BBB/mu", target); + svn_hash_sets(lock_paths, "/A/D/G/pi", target); + svn_hash_sets(lock_paths, "/A/D/G/rho", target); + svn_hash_sets(lock_paths, "/A/mu", target); + svn_hash_sets(lock_paths, "/X/zulu", target); + + /* Lock some paths. */ + apr_hash_clear(baton.results); + SVN_ERR(svn_fs_lock_many(fs, lock_paths, "comment", 0, 0, 0, + lock_many_cb, &baton, + pool, pool)); + + SVN_ERR(expect_lock("/A/B/E/alpha", baton.results, fs, pool)); + SVN_ERR(expect_lock("/A/B/E/beta", baton.results, fs, pool)); + SVN_ERR(expect_error("/A/B/E/zulu", baton.results, fs, pool)); + SVN_ERR(expect_lock("/A/BB/mu", baton.results, fs, pool)); + SVN_ERR(expect_lock("/A/BBB/mu", baton.results, fs, pool)); + SVN_ERR(expect_lock("/A/D/G/pi", baton.results, fs, pool)); + SVN_ERR(expect_lock("/A/D/G/rho", baton.results, fs, pool)); + SVN_ERR(expect_lock("/A/mu", baton.results, fs, pool)); + SVN_ERR(expect_error("/X/zulu", baton.results, fs, pool)); + + /* Unlock without force and wrong tokens. */ + for (hi = apr_hash_first(pool, lock_paths); hi; hi = apr_hash_next(hi)) + svn_hash_sets(unlock_paths, apr_hash_this_key(hi), "wrong-token"); + apr_hash_clear(baton.results); + SVN_ERR(svn_fs_unlock_many(fs, unlock_paths, FALSE, lock_many_cb, &baton, + pool, pool)); + + SVN_ERR(expect_unlock_error("/A/B/E/alpha", baton.results, fs, pool)); + SVN_ERR(expect_unlock_error("/A/B/E/beta", baton.results, fs, pool)); + SVN_ERR(expect_error("/A/B/E/zulu", baton.results, fs, pool)); + SVN_ERR(expect_unlock_error("/A/BB/mu", baton.results, fs, pool)); + SVN_ERR(expect_unlock_error("/A/BBB/mu", baton.results, fs, pool)); + SVN_ERR(expect_unlock_error("/A/D/G/pi", baton.results, fs, pool)); + SVN_ERR(expect_unlock_error("/A/D/G/rho", baton.results, fs, pool)); + SVN_ERR(expect_unlock_error("/A/mu", baton.results, fs, pool)); + SVN_ERR(expect_error("/X/zulu", baton.results, fs, pool)); + + /* Force unlock. */ + for (hi = apr_hash_first(pool, lock_paths); hi; hi = apr_hash_next(hi)) + svn_hash_sets(unlock_paths, apr_hash_this_key(hi), ""); + apr_hash_clear(baton.results); + SVN_ERR(svn_fs_unlock_many(fs, unlock_paths, TRUE, lock_many_cb, &baton, + pool, pool)); + + SVN_ERR(expect_unlock("/A/B/E/alpha", baton.results, fs, pool)); + SVN_ERR(expect_unlock("/A/B/E/beta", baton.results, fs, pool)); + SVN_ERR(expect_error("/A/B/E/zulu", baton.results, fs, pool)); + SVN_ERR(expect_unlock("/A/BB/mu", baton.results, fs, pool)); + SVN_ERR(expect_unlock("/A/BBB/mu", baton.results, fs, pool)); + SVN_ERR(expect_unlock("/A/D/G/pi", baton.results, fs, pool)); + SVN_ERR(expect_unlock("/A/D/G/rho", baton.results, fs, pool)); + SVN_ERR(expect_unlock("/A/mu", baton.results, fs, pool)); + SVN_ERR(expect_error("/X/zulu", baton.results, fs, pool)); + + /* Lock again. */ + apr_hash_clear(baton.results); + SVN_ERR(svn_fs_lock_many(fs, lock_paths, "comment", 0, 0, 0, + lock_many_cb, &baton, + pool, pool)); + + SVN_ERR(expect_lock("/A/B/E/alpha", baton.results, fs, pool)); + SVN_ERR(expect_lock("/A/B/E/beta", baton.results, fs, pool)); + SVN_ERR(expect_error("/A/B/E/zulu", baton.results, fs, pool)); + SVN_ERR(expect_lock("/A/BB/mu", baton.results, fs, pool)); + SVN_ERR(expect_lock("/A/BBB/mu", baton.results, fs, pool)); + SVN_ERR(expect_lock("/A/D/G/pi", baton.results, fs, pool)); + SVN_ERR(expect_lock("/A/D/G/rho", baton.results, fs, pool)); + SVN_ERR(expect_lock("/A/mu", baton.results, fs, pool)); + SVN_ERR(expect_error("/X/zulu", baton.results, fs, pool)); + + /* Unlock without force. */ + for (hi = apr_hash_first(pool, baton.results); hi; hi = apr_hash_next(hi)) + { + struct lock_result_t *result = apr_hash_this_val(hi); + svn_hash_sets(unlock_paths, apr_hash_this_key(hi), + result->lock ? result->lock->token : "non-existent-token"); + } + apr_hash_clear(baton.results); + SVN_ERR(svn_fs_unlock_many(fs, unlock_paths, FALSE, lock_many_cb, &baton, + pool, pool)); + + SVN_ERR(expect_unlock("/A/B/E/alpha", baton.results, fs, pool)); + SVN_ERR(expect_unlock("/A/B/E/beta", baton.results, fs, pool)); + SVN_ERR(expect_error("/A/B/E/zulu", baton.results, fs, pool)); + SVN_ERR(expect_unlock("/A/BB/mu", baton.results, fs, pool)); + SVN_ERR(expect_unlock("/A/BBB/mu", baton.results, fs, pool)); + SVN_ERR(expect_unlock("/A/D/G/pi", baton.results, fs, pool)); + SVN_ERR(expect_unlock("/A/D/G/rho", baton.results, fs, pool)); + SVN_ERR(expect_unlock("/A/mu", baton.results, fs, pool)); + SVN_ERR(expect_error("/X/zulu", baton.results, fs, pool)); + + return SVN_NO_ERROR; +} + +static svn_error_t * +lock_cb_error(const svn_test_opts_t *opts, + apr_pool_t *pool) +{ + svn_fs_t *fs; + svn_revnum_t newrev; + svn_fs_access_t *access; + svn_fs_lock_target_t *target; + struct lock_many_baton_t baton; + apr_hash_t *lock_paths, *unlock_paths; + svn_lock_t *lock; + + SVN_ERR(create_greek_fs(&fs, &newrev, "test-lock-cb-error", opts, pool)); + SVN_ERR(svn_fs_create_access(&access, "bubba", pool)); + SVN_ERR(svn_fs_set_access(fs, access)); + + baton.results = apr_hash_make(pool); + baton.pool = pool; + baton.count = 1; + lock_paths = apr_hash_make(pool); + unlock_paths = apr_hash_make(pool); + target = svn_fs_lock_target_create(NULL, newrev, pool); + + svn_hash_sets(lock_paths, "/A/B/E/alpha", target); + svn_hash_sets(lock_paths, "/A/B/E/beta", target); + + apr_hash_clear(baton.results); + SVN_TEST_ASSERT_ERROR(svn_fs_lock_many(fs, lock_paths, "comment", 0, 0, 0, + lock_many_cb, &baton, + pool, pool), + SVN_ERR_FS_GENERAL); + + SVN_TEST_ASSERT(apr_hash_count(baton.results) == 1); + SVN_TEST_ASSERT(svn_hash_gets(baton.results, "/A/B/E/alpha") + || svn_hash_gets(baton.results, "/A/B/E/beta")); + SVN_ERR(svn_fs_get_lock(&lock, fs, "/A/B/E/alpha", pool)); + SVN_TEST_ASSERT(lock); + svn_hash_sets(unlock_paths, "/A/B/E/alpha", lock->token); + SVN_ERR(svn_fs_get_lock(&lock, fs, "/A/B/E/beta", pool)); + SVN_TEST_ASSERT(lock); + svn_hash_sets(unlock_paths, "/A/B/E/beta", lock->token); + + baton.count = 1; + apr_hash_clear(baton.results); + SVN_TEST_ASSERT_ERROR(svn_fs_unlock_many(fs, unlock_paths, FALSE, + lock_many_cb, &baton, + pool, pool), + SVN_ERR_FS_GENERAL); + + SVN_TEST_ASSERT(apr_hash_count(baton.results) == 1); + SVN_TEST_ASSERT(svn_hash_gets(baton.results, "/A/B/E/alpha") + || svn_hash_gets(baton.results, "/A/B/E/beta")); + + SVN_ERR(svn_fs_get_lock(&lock, fs, "/A/B/E/alpha", pool)); + SVN_TEST_ASSERT(!lock); + SVN_ERR(svn_fs_get_lock(&lock, fs, "/A/B/E/beta", pool)); + SVN_TEST_ASSERT(!lock); + return SVN_NO_ERROR; +} + +static svn_error_t * +obtain_write_lock_failure(const svn_test_opts_t *opts, + apr_pool_t *pool) +{ + svn_fs_t *fs; + svn_revnum_t newrev; + svn_fs_access_t *access; + svn_fs_lock_target_t *target; + struct lock_many_baton_t baton; + apr_hash_t *lock_paths, *unlock_paths; + + /* The test makes sense only for FSFS. */ + if (strcmp(opts->fs_type, SVN_FS_TYPE_FSFS) != 0) + return svn_error_create(SVN_ERR_TEST_SKIPPED, NULL, + "this will test FSFS repositories only"); + + SVN_ERR(create_greek_fs(&fs, &newrev, "test-obtain-write-lock-failure", + opts, pool)); + SVN_ERR(svn_fs_create_access(&access, "bubba", pool)); + SVN_ERR(svn_fs_set_access(fs, access)); + + /* Make a read only 'write-lock' file. This prevents any write operations + from being executed. */ + SVN_ERR(svn_io_set_file_read_only("test-obtain-write-lock-failure/write-lock", + FALSE, pool)); + + baton.results = apr_hash_make(pool); + baton.pool = pool; + baton.count = 0; + + /* Trying to lock some paths. We don't really care about error; the test + shouldn't crash. */ + target = svn_fs_lock_target_create(NULL, newrev, pool); + lock_paths = apr_hash_make(pool); + svn_hash_sets(lock_paths, "/iota", target); + svn_hash_sets(lock_paths, "/A/mu", target); + + apr_hash_clear(baton.results); + SVN_TEST_ASSERT_ANY_ERROR(svn_fs_lock_many(fs, lock_paths, "comment", 0, 0, 0, + lock_many_cb, &baton, pool, pool)); + + /* Trying to unlock some paths. We don't really care about error; the test + shouldn't crash. */ + unlock_paths = apr_hash_make(pool); + svn_hash_sets(unlock_paths, "/iota", ""); + svn_hash_sets(unlock_paths, "/A/mu", ""); + + apr_hash_clear(baton.results); + SVN_TEST_ASSERT_ANY_ERROR(svn_fs_unlock_many(fs, unlock_paths, TRUE, + lock_many_cb, &baton, pool, + pool)); + + return SVN_NO_ERROR; +} + +static svn_error_t * +parent_and_child_lock(const svn_test_opts_t *opts, + apr_pool_t *pool) +{ + svn_fs_t *fs; + svn_fs_access_t *access; + svn_fs_txn_t *txn; + svn_fs_root_t *root; + const char *conflict; + svn_revnum_t newrev; + svn_lock_t *lock; + struct get_locks_baton_t *get_locks_baton; + apr_size_t num_expected_paths; + + SVN_ERR(svn_test__create_fs(&fs, "test-parent-and-child-lock", opts, pool)); + SVN_ERR(svn_fs_create_access(&access, "bubba", pool)); + SVN_ERR(svn_fs_set_access(fs, access)); + + /* Make a file '/A'. */ + SVN_ERR(svn_fs_begin_txn(&txn, fs, 0, pool)); + SVN_ERR(svn_fs_txn_root(&root, txn, pool)); + SVN_ERR(svn_fs_make_file(root, "/A", pool)); + SVN_ERR(svn_fs_commit_txn(&conflict, &newrev, txn, pool)); + + /* Obtain a lock on '/A'. */ + SVN_ERR(svn_fs_lock(&lock, fs, "/A", NULL, NULL, FALSE, 0, newrev, FALSE, + pool)); + + /* Add a lock token to FS access context. */ + SVN_ERR(svn_fs_access_add_lock_token(access, lock->token)); + + /* Make some weird change: replace file '/A' by a directory with a + child. Issue 2507 means that the result is that the directory /A + remains locked. */ + SVN_ERR(svn_fs_begin_txn(&txn, fs, newrev, pool)); + SVN_ERR(svn_fs_txn_root(&root, txn, pool)); + SVN_ERR(svn_fs_delete(root, "/A", pool)); + SVN_ERR(svn_fs_make_dir(root, "/A", pool)); + SVN_ERR(svn_fs_make_file(root, "/A/b", pool)); + SVN_ERR(svn_fs_commit_txn(&conflict, &newrev, txn, pool)); + + /* Obtain a lock on '/A/b'. Issue 2507 means that the lock index + for / refers to both /A and /A/b, and that the lock index for /A + refers to /A/b. */ + SVN_ERR(svn_fs_lock(&lock, fs, "/A/b", NULL, NULL, FALSE, 0, newrev, FALSE, + pool)); + + /* Verify the locked paths. The lock for /A/b should not be reported + twice even though issue 2507 means we access the index for / and + the index for /A both of which refer to /A/b. */ + { + static const char *expected_paths[] = { + "/A", + "/A/b", + }; + num_expected_paths = sizeof(expected_paths) / sizeof(const char *); + get_locks_baton = make_get_locks_baton(pool); + SVN_ERR(svn_fs_get_locks(fs, "/", get_locks_callback, + get_locks_baton, pool)); + SVN_ERR(verify_matching_lock_paths(get_locks_baton, expected_paths, + num_expected_paths, pool)); + } + + return SVN_NO_ERROR; +} /* ------------------------------------------------------------------------ */ /* The test table. */ -struct svn_test_descriptor_t test_funcs[] = +static int max_threads = 2; + +static struct svn_test_descriptor_t test_funcs[] = { SVN_TEST_NULL, + SVN_TEST_OPTS_PASS(lock_expiration, + "test that locks can expire"), SVN_TEST_OPTS_PASS(lock_only, "lock only"), SVN_TEST_OPTS_PASS(lookup_lock_by_path, @@ -811,11 +1237,19 @@ struct svn_test_descriptor_t test_funcs[] = "test that locking is enforced in final commit step"), SVN_TEST_OPTS_PASS(lock_dir_propchange, "dir propchange can be committed with locked child"), - SVN_TEST_OPTS_PASS(lock_expiration, - "test that locks can expire"), SVN_TEST_OPTS_PASS(lock_break_steal_refresh, "breaking, stealing, refreshing a lock"), SVN_TEST_OPTS_PASS(lock_out_of_date, "check out-of-dateness before locking"), + SVN_TEST_OPTS_PASS(lock_multiple_paths, + "lock multiple paths"), + SVN_TEST_OPTS_PASS(lock_cb_error, + "lock callback error"), + SVN_TEST_OPTS_PASS(obtain_write_lock_failure, + "lock/unlock when 'write-lock' couldn't be obtained"), + SVN_TEST_OPTS_PASS(parent_and_child_lock, + "lock parent and it's child"), SVN_TEST_NULL }; + +SVN_TEST_MAIN diff --git a/subversion/tests/libsvn_fs_base/changes-test.c b/subversion/tests/libsvn_fs_base/changes-test.c index b93410e..a637203 100644 --- a/subversion/tests/libsvn_fs_base/changes-test.c +++ b/subversion/tests/libsvn_fs_base/changes-test.c @@ -193,7 +193,7 @@ changes_fetch_raw(const svn_test_opts_t *opts, struct changes_args args; /* Create a new fs and repos */ - SVN_ERR(svn_test__create_bdb_fs(&fs, "test-repo-changes-fetch", opts, + SVN_ERR(svn_test__create_bdb_fs(&fs, "test-repo-changes-fetch-raw", opts, pool)); /* First, verify that we can request changes for an arbitrary key @@ -903,7 +903,9 @@ changes_bad_sequences(const svn_test_opts_t *opts, /* The test table. */ -struct svn_test_descriptor_t test_funcs[] = +static int max_threads = 4; + +static struct svn_test_descriptor_t test_funcs[] = { SVN_TEST_NULL, SVN_TEST_OPTS_PASS(changes_add, @@ -920,3 +922,5 @@ struct svn_test_descriptor_t test_funcs[] = "verify that bad change sequences raise errors"), SVN_TEST_NULL }; + +SVN_TEST_MAIN diff --git a/subversion/tests/libsvn_fs_base/fs-base-test.c b/subversion/tests/libsvn_fs_base/fs-base-test.c index d16deae..20d4d63 100644 --- a/subversion/tests/libsvn_fs_base/fs-base-test.c +++ b/subversion/tests/libsvn_fs_base/fs-base-test.c @@ -124,7 +124,7 @@ static svn_error_t * check_entry_present(svn_fs_root_t *root, const char *path, const char *name, apr_pool_t *pool) { - svn_boolean_t present; + svn_boolean_t present = FALSE; SVN_ERR(check_entry(root, path, name, &present, pool)); if (! present) @@ -141,7 +141,7 @@ static svn_error_t * check_entry_absent(svn_fs_root_t *root, const char *path, const char *name, apr_pool_t *pool) { - svn_boolean_t present; + svn_boolean_t present = TRUE; SVN_ERR(check_entry(root, path, name, &present, pool)); if (present) @@ -213,7 +213,7 @@ check_id(svn_fs_t *fs, const svn_fs_id_t *id, svn_boolean_t *present, static svn_error_t * check_id_present(svn_fs_t *fs, const svn_fs_id_t *id, apr_pool_t *pool) { - svn_boolean_t present; + svn_boolean_t present = FALSE; SVN_ERR(check_id(fs, id, &present, pool)); if (! present) @@ -233,7 +233,7 @@ check_id_present(svn_fs_t *fs, const svn_fs_id_t *id, apr_pool_t *pool) static svn_error_t * check_id_absent(svn_fs_t *fs, const svn_fs_id_t *id, apr_pool_t *pool) { - svn_boolean_t present; + svn_boolean_t present = TRUE; SVN_ERR(check_id(fs, id, &present, pool)); if (present) @@ -1519,7 +1519,9 @@ key_test(apr_pool_t *pool) /* The test table. */ -struct svn_test_descriptor_t test_funcs[] = +static int max_threads = 2; + +static struct svn_test_descriptor_t test_funcs[] = { SVN_TEST_NULL, SVN_TEST_OPTS_PASS(create_berkeley_filesystem, @@ -1546,3 +1548,5 @@ struct svn_test_descriptor_t test_funcs[] = "testing sequential alphanumeric key generation"), SVN_TEST_NULL }; + +SVN_TEST_MAIN diff --git a/subversion/tests/libsvn_fs_base/strings-reps-test.c b/subversion/tests/libsvn_fs_base/strings-reps-test.c index 4512eef..34e829c 100644 --- a/subversion/tests/libsvn_fs_base/strings-reps-test.c +++ b/subversion/tests/libsvn_fs_base/strings-reps-test.c @@ -612,7 +612,7 @@ write_null_string(const svn_test_opts_t *opts, /* Create a new fs and repos */ SVN_ERR(svn_test__create_bdb_fs - (&fs, "test-repo-test-strings", opts, + (&fs, "test-repo-write-null-string", opts, pool)); args.fs = fs; @@ -732,7 +732,9 @@ copy_string(const svn_test_opts_t *opts, /* The test table. */ -struct svn_test_descriptor_t test_funcs[] = +static int max_threads = 3; + +static struct svn_test_descriptor_t test_funcs[] = { SVN_TEST_NULL, SVN_TEST_OPTS_PASS(write_new_rep, @@ -753,3 +755,5 @@ struct svn_test_descriptor_t test_funcs[] = "create and copy a string"), SVN_TEST_NULL }; + +SVN_TEST_MAIN diff --git a/subversion/tests/libsvn_fs_fs/fs-fs-fuzzy-test.c b/subversion/tests/libsvn_fs_fs/fs-fs-fuzzy-test.c new file mode 100644 index 0000000..818c1e0 --- /dev/null +++ b/subversion/tests/libsvn_fs_fs/fs-fs-fuzzy-test.c @@ -0,0 +1,393 @@ +/* fs-fs-fuzzy-test.c --- fuzzing tests for the FSFS filesystem + * + * ==================================================================== + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + * ==================================================================== + */ + +#include <stdlib.h> +#include <string.h> +#include <apr_pools.h> + +#include "../svn_test.h" +#include "../../libsvn_fs_fs/fs.h" +#include "../../libsvn_fs_fs/fs_fs.h" +#include "../../libsvn_fs_fs/rev_file.h" + +#include "svn_hash.h" +#include "svn_pools.h" +#include "svn_props.h" +#include "svn_fs.h" +#include "private/svn_string_private.h" +#include "private/svn_string_private.h" + +#include "../svn_test_fs.h" + + + +/*** Helper Functions ***/ + +/* We won't log or malfunction() upon errors. */ +static void +dont_filter_warnings(void *baton, svn_error_t *err) +{ + return; +} + + +/*** Test core code ***/ + +/* Verify that a modification of any single byte in REVISION of FS at + * REPO_NAME using MODIFIER with BATON will be detected. */ +static svn_error_t * +fuzzing_1_byte_1_rev(const char *repo_name, + svn_fs_t *fs, + svn_revnum_t revision, + unsigned char (* modifier)(unsigned char c, void *baton), + void *baton, + apr_pool_t *pool) +{ + svn_repos_t *repos; + apr_hash_t *fs_config; + svn_fs_fs__revision_file_t *rev_file; + apr_off_t filesize = 0, offset; + apr_off_t i; + unsigned char footer_len; + + apr_pool_t *iterpool = svn_pool_create(pool); + + /* Open the revision file for modification. */ + SVN_ERR(svn_fs_fs__open_pack_or_rev_file_writable(&rev_file, fs, revision, + pool, iterpool)); + SVN_ERR(svn_fs_fs__auto_read_footer(rev_file)); + SVN_ERR(svn_io_file_seek(rev_file->file, APR_END, &filesize, iterpool)); + + offset = filesize - 1; + SVN_ERR(svn_io_file_seek(rev_file->file, APR_SET, &offset, iterpool)); + SVN_ERR(svn_io_file_getc((char *)&footer_len, rev_file->file, iterpool)); + + /* We want all the caching we can get. More importantly, we want to + change the cache namespace before each test iteration. */ + fs_config = apr_hash_make(pool); + svn_hash_sets(fs_config, SVN_FS_CONFIG_FSFS_CACHE_DELTAS, "1"); + svn_hash_sets(fs_config, SVN_FS_CONFIG_FSFS_CACHE_FULLTEXTS, "1"); + svn_hash_sets(fs_config, SVN_FS_CONFIG_FSFS_CACHE_REVPROPS, "2"); + svn_hash_sets(fs_config, SVN_FS_CONFIG_FSFS_BLOCK_READ, "0"); + + /* Manipulate all bytes one at a time. */ + for (i = 0; i < filesize; ++i) + { + svn_error_t *err = SVN_NO_ERROR; + + /* Read byte */ + unsigned char c_old, c_new; + SVN_ERR(svn_io_file_seek(rev_file->file, APR_SET, &i, iterpool)); + SVN_ERR(svn_io_file_getc((char *)&c_old, rev_file->file, iterpool)); + + /* What to replace it with. Skip if there is no change. */ + c_new = modifier(c_old, baton); + if (c_new == c_old) + continue; + + /* Modify / corrupt the data. */ + SVN_ERR(svn_io_file_seek(rev_file->file, APR_SET, &i, iterpool)); + SVN_ERR(svn_io_file_putc((char)c_new, rev_file->file, iterpool)); + SVN_ERR(svn_io_file_flush(rev_file->file, iterpool)); + + /* Make sure we use a different namespace for the caches during + this iteration. */ + svn_hash_sets(fs_config, SVN_FS_CONFIG_FSFS_CACHE_NS, + svn_uuid_generate(iterpool)); + SVN_ERR(svn_repos_open3(&repos, repo_name, fs_config, iterpool, iterpool)); + svn_fs_set_warning_func(svn_repos_fs(repos), dont_filter_warnings, NULL); + + /* This shall detect the corruption and return an error. */ + err = svn_repos_verify_fs3(repos, revision, revision, FALSE, FALSE, + NULL, NULL, NULL, NULL, NULL, NULL, + iterpool); + + /* Case-only changes in checksum digests are not an error. + * We allow upper case chars to be used in MD5 checksums in all other + * places, thus restricting them here would be inconsistent. */ + if ( i >= filesize - footer_len /* Within footer */ + && c_old >= 'a' && c_old <= 'f' /* 'a' to 'f', only appear + in checksum digests */ + && c_new == c_old - 'a' + 'A') /* respective upper case */ + { + if (err) + { + /* Let us know where we were too strict ... */ + printf("Detected case change in checksum digest at offset 0x%" + APR_UINT64_T_HEX_FMT " (%" APR_OFF_T_FMT ") in r%ld: " + "%c -> %c\n", (apr_uint64_t)i, i, revision, c_old, c_new); + + SVN_ERR(err); + } + } + else if (!err) + { + /* Let us know where we miss changes ... */ + printf("Undetected mod at offset 0x%"APR_UINT64_T_HEX_FMT + " (%"APR_OFF_T_FMT") in r%ld: 0x%02x -> 0x%02x\n", + (apr_uint64_t)i, i, revision, c_old, c_new); + + SVN_TEST_ASSERT(err); + } + + svn_error_clear(err); + + /* Undo the corruption. */ + SVN_ERR(svn_io_file_seek(rev_file->file, APR_SET, &i, iterpool)); + SVN_ERR(svn_io_file_putc((char)c_old, rev_file->file, iterpool)); + + svn_pool_clear(iterpool); + } + + svn_pool_destroy(iterpool); + + return SVN_NO_ERROR; +} + +/* Create a greek repo with OPTS at REPO_NAME. Verify that a modification + * of any single byte using MODIFIER with BATON will be detected. */ +static svn_error_t * +fuzzing_1_byte_test(const svn_test_opts_t *opts, + const char *repo_name, + unsigned char (* modifier)(unsigned char c, void *baton), + void *baton, + apr_pool_t *pool) +{ + svn_repos_t *repos; + svn_fs_t *fs; + svn_fs_txn_t *txn; + svn_fs_root_t *txn_root; + svn_revnum_t rev; + svn_revnum_t i; + + apr_pool_t *iterpool = svn_pool_create(pool); + + /* Bail (with success) on known-untestable scenarios */ + if (strcmp(opts->fs_type, "fsfs") != 0) + return svn_error_create(SVN_ERR_TEST_SKIPPED, NULL, + "this will test FSFS repositories only"); + /* Create a filesystem */ + SVN_ERR(svn_test__create_repos(&repos, repo_name, opts, pool)); + fs = svn_repos_fs(repos); + + /* Revision 1 (one and only revision): the Greek tree */ + SVN_ERR(svn_fs_begin_txn(&txn, fs, 0, pool)); + SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool)); + SVN_ERR(svn_test__create_greek_tree(txn_root, pool)); + SVN_ERR(svn_fs_commit_txn(NULL, &rev, txn, pool)); + SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(rev)); + + for (i = 0; i <= rev; ++i) + { + svn_pool_clear(iterpool); + SVN_ERR(fuzzing_1_byte_1_rev(repo_name, fs, i, modifier, baton, + iterpool)); + } + + svn_pool_destroy(iterpool); + + return SVN_NO_ERROR; +} + +/* Modifier function to be used with fuzzing_set_byte_test. + * We return the fixed char value given as *BATON. */ +static unsigned char +set_byte(unsigned char c, void *baton) +{ + return *(const unsigned char *)baton; +} + +/* Run the fuzzing test setting any byte in the repo to all values MIN to + * MAX-1. */ +static svn_error_t * +fuzzing_set_byte_test(const svn_test_opts_t *opts, + int min, + int max, + apr_pool_t *pool) +{ + apr_pool_t *iterpool = svn_pool_create(pool); + unsigned i = 0; + for (i = min; i < max; ++i) + { + unsigned char c = i; + const char *repo_name; + svn_pool_clear(iterpool); + + repo_name = apr_psprintf(iterpool, "test-repo-fuzzing_set_byte_%d_%d", + min, max); + SVN_ERR(fuzzing_1_byte_test(opts, repo_name, set_byte, &c, iterpool)); + } + + svn_pool_destroy(iterpool); + return SVN_NO_ERROR; +} + + + +/*** Tests ***/ + +/* ------------------------------------------------------------------------ */ + +static unsigned char +invert_byte(unsigned char c, void *baton) +{ + return ~c; +} + +static svn_error_t * +fuzzing_invert_byte_test(const svn_test_opts_t *opts, + apr_pool_t *pool) +{ + SVN_ERR(fuzzing_1_byte_test(opts, "test-repo-fuzzing_invert_byte", + invert_byte, NULL, pool)); + + return SVN_NO_ERROR; +} + +/* ------------------------------------------------------------------------ */ + +static unsigned char +increment_byte(unsigned char c, void *baton) +{ + return c + 1; +} + +static svn_error_t * +fuzzing_increment_byte_test(const svn_test_opts_t *opts, + apr_pool_t *pool) +{ + SVN_ERR(fuzzing_1_byte_test(opts, "test-repo-fuzzing_increment_byte", + increment_byte, NULL, pool)); + + return SVN_NO_ERROR; +} + +/* ------------------------------------------------------------------------ */ + +static unsigned char +decrement_byte(unsigned char c, void *baton) +{ + return c - 1; +} + +static svn_error_t * +fuzzing_decrement_byte_test(const svn_test_opts_t *opts, + apr_pool_t *pool) +{ + SVN_ERR(fuzzing_1_byte_test(opts, "test-repo-fuzzing_decrement_byte", + decrement_byte, NULL, pool)); + + return SVN_NO_ERROR; +} + +/* ------------------------------------------------------------------------ */ + +static unsigned char +null_byte(unsigned char c, void *baton) +{ + return 0; +} + +static svn_error_t * +fuzzing_null_byte_test(const svn_test_opts_t *opts, + apr_pool_t *pool) +{ + SVN_ERR(fuzzing_1_byte_test(opts, "test-repo-fuzzing_null_byte", + null_byte, NULL, pool)); + + return SVN_NO_ERROR; +} + +/* ------------------------------------------------------------------------ */ + +/* Generator macro: define a test function covering byte values N to M-1 */ +#define FUZZING_SET_BYTE_TEST_N(N,M)\ + static svn_error_t * \ + fuzzing_set_byte_test_ ##N(const svn_test_opts_t *opts, \ + apr_pool_t *pool) \ + { \ + return svn_error_trace(fuzzing_set_byte_test(opts, N, M, pool)); \ + } + +/* Add the test function declared above to the test_funcs array. */ +#define TEST_FUZZING_SET_BYTE_TEST_N(N,M)\ + SVN_TEST_OPTS_PASS(fuzzing_set_byte_test_ ##N, \ + "set any byte to any value between " #N " and " #M) + +/* Declare tests that will cover all possible byte values. */ +FUZZING_SET_BYTE_TEST_N(0,16) +FUZZING_SET_BYTE_TEST_N(16,32) +FUZZING_SET_BYTE_TEST_N(32,48) +FUZZING_SET_BYTE_TEST_N(48,64) +FUZZING_SET_BYTE_TEST_N(64,80) +FUZZING_SET_BYTE_TEST_N(80,96) +FUZZING_SET_BYTE_TEST_N(96,112) +FUZZING_SET_BYTE_TEST_N(112,128) +FUZZING_SET_BYTE_TEST_N(128,144) +FUZZING_SET_BYTE_TEST_N(144,160) +FUZZING_SET_BYTE_TEST_N(160,176) +FUZZING_SET_BYTE_TEST_N(176,192) +FUZZING_SET_BYTE_TEST_N(192,208) +FUZZING_SET_BYTE_TEST_N(208,224) +FUZZING_SET_BYTE_TEST_N(224,240) +FUZZING_SET_BYTE_TEST_N(240,256) + + +/* The test table. */ + +/* Allow for any number of tests to run in parallel. */ +static int max_threads = 0; + +static struct svn_test_descriptor_t test_funcs[] = + { + SVN_TEST_NULL, + SVN_TEST_OPTS_PASS(fuzzing_invert_byte_test, + "fuzzing: invert any byte"), + SVN_TEST_OPTS_PASS(fuzzing_increment_byte_test, + "fuzzing: increment any byte"), + SVN_TEST_OPTS_PASS(fuzzing_decrement_byte_test, + "fuzzing: decrement any byte"), + SVN_TEST_OPTS_PASS(fuzzing_null_byte_test, + "fuzzing: set any byte to 0"), + + /* Register generated tests. */ + TEST_FUZZING_SET_BYTE_TEST_N(0,16), + TEST_FUZZING_SET_BYTE_TEST_N(16,32), + TEST_FUZZING_SET_BYTE_TEST_N(32,48), + TEST_FUZZING_SET_BYTE_TEST_N(48,64), + TEST_FUZZING_SET_BYTE_TEST_N(64,80), + TEST_FUZZING_SET_BYTE_TEST_N(80,96), + TEST_FUZZING_SET_BYTE_TEST_N(96,112), + TEST_FUZZING_SET_BYTE_TEST_N(112,128), + TEST_FUZZING_SET_BYTE_TEST_N(128,144), + TEST_FUZZING_SET_BYTE_TEST_N(144,160), + TEST_FUZZING_SET_BYTE_TEST_N(160,176), + TEST_FUZZING_SET_BYTE_TEST_N(176,192), + TEST_FUZZING_SET_BYTE_TEST_N(192,208), + TEST_FUZZING_SET_BYTE_TEST_N(208,224), + TEST_FUZZING_SET_BYTE_TEST_N(224,240), + TEST_FUZZING_SET_BYTE_TEST_N(240,256), + + SVN_TEST_NULL + }; + +SVN_TEST_MAIN diff --git a/subversion/tests/libsvn_fs_fs/fs-fs-pack-test.c b/subversion/tests/libsvn_fs_fs/fs-fs-pack-test.c new file mode 100644 index 0000000..aa07469 --- /dev/null +++ b/subversion/tests/libsvn_fs_fs/fs-fs-pack-test.c @@ -0,0 +1,1947 @@ +/* fs-fs-pack-test.c --- tests for the FSFS filesystem + * + * ==================================================================== + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + * ==================================================================== + */ + +#include <stdlib.h> +#include <string.h> +#include <apr_pools.h> + +#include "../svn_test.h" +#include "../../libsvn_fs/fs-loader.h" +#include "../../libsvn_fs_fs/fs.h" +#include "../../libsvn_fs_fs/fs_fs.h" +#include "../../libsvn_fs_fs/low_level.h" +#include "../../libsvn_fs_fs/pack.h" +#include "../../libsvn_fs_fs/util.h" + +#include "svn_hash.h" +#include "svn_pools.h" +#include "svn_props.h" +#include "svn_fs.h" +#include "private/svn_string_private.h" + +#include "../svn_test_fs.h" + + + +/*** Helper Functions ***/ + +static void +ignore_fs_warnings(void *baton, svn_error_t *err) +{ +#ifdef SVN_DEBUG + SVN_DBG(("Ignoring FS warning %s\n", + svn_error_symbolic_name(err ? err->apr_err : 0))); +#endif + return; +} + +/* Return the expected contents of "iota" in revision REV. */ +static const char * +get_rev_contents(svn_revnum_t rev, apr_pool_t *pool) +{ + /* Toss in a bunch of magic numbers for spice. */ + apr_int64_t num = ((rev * 1234353 + 4358) * 4583 + ((rev % 4) << 1)) / 42; + return apr_psprintf(pool, "%" APR_INT64_T_FMT "\n", num); +} + +struct pack_notify_baton +{ + apr_int64_t expected_shard; + svn_fs_pack_notify_action_t expected_action; +}; + +static svn_error_t * +pack_notify(void *baton, + apr_int64_t shard, + svn_fs_pack_notify_action_t action, + apr_pool_t *pool) +{ + struct pack_notify_baton *pnb = baton; + + SVN_TEST_ASSERT(shard == pnb->expected_shard); + SVN_TEST_ASSERT(action == pnb->expected_action); + + /* Update expectations. */ + switch (action) + { + case svn_fs_pack_notify_start: + pnb->expected_action = svn_fs_pack_notify_end; + break; + + case svn_fs_pack_notify_end: + pnb->expected_action = svn_fs_pack_notify_start; + pnb->expected_shard++; + break; + + default: + return svn_error_create(SVN_ERR_TEST_FAILED, NULL, + "Unknown notification action when packing"); + } + + return SVN_NO_ERROR; +} + +#define R1_LOG_MSG "Let's serf" + +/* Create a filesystem in DIR. Set the shard size to SHARD_SIZE and create + NUM_REVS number of revisions (in addition to r0). Use POOL for + allocations. After this function successfully completes, the filesystem's + youngest revision number will be NUM_REVS. */ +static svn_error_t * +create_non_packed_filesystem(const char *dir, + const svn_test_opts_t *opts, + svn_revnum_t num_revs, + int shard_size, + apr_pool_t *pool) +{ + svn_fs_t *fs; + svn_fs_txn_t *txn; + svn_fs_root_t *txn_root; + const char *conflict; + svn_revnum_t after_rev; + apr_pool_t *subpool = svn_pool_create(pool); + apr_pool_t *iterpool; + apr_hash_t *fs_config; + + /* Bail (with success) on known-untestable scenarios */ + if (strcmp(opts->fs_type, "fsfs") != 0) + return svn_error_create(SVN_ERR_TEST_SKIPPED, NULL, + "this will test FSFS repositories only"); + + if (opts->server_minor_version && (opts->server_minor_version < 6)) + return svn_error_create(SVN_ERR_TEST_SKIPPED, NULL, + "pre-1.6 SVN doesn't support FSFS packing"); + + fs_config = apr_hash_make(pool); + svn_hash_sets(fs_config, SVN_FS_CONFIG_FSFS_SHARD_SIZE, + apr_itoa(pool, shard_size)); + + /* Create a filesystem. */ + SVN_ERR(svn_test__create_fs2(&fs, dir, opts, fs_config, subpool)); + + /* Revision 1: the Greek tree */ + SVN_ERR(svn_fs_begin_txn(&txn, fs, 0, subpool)); + SVN_ERR(svn_fs_txn_root(&txn_root, txn, subpool)); + SVN_ERR(svn_test__create_greek_tree(txn_root, subpool)); + SVN_ERR(svn_fs_change_txn_prop(txn, SVN_PROP_REVISION_LOG, + svn_string_create(R1_LOG_MSG, pool), + pool)); + SVN_ERR(svn_fs_commit_txn(&conflict, &after_rev, txn, subpool)); + SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(after_rev)); + + /* Revisions 2 thru NUM_REVS-1: content tweaks to "iota". */ + iterpool = svn_pool_create(subpool); + while (after_rev < num_revs) + { + svn_pool_clear(iterpool); + SVN_ERR(svn_fs_begin_txn(&txn, fs, after_rev, iterpool)); + SVN_ERR(svn_fs_txn_root(&txn_root, txn, iterpool)); + SVN_ERR(svn_test__set_file_contents(txn_root, "iota", + get_rev_contents(after_rev + 1, + iterpool), + iterpool)); + SVN_ERR(svn_fs_commit_txn(&conflict, &after_rev, txn, iterpool)); + SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(after_rev)); + } + svn_pool_destroy(iterpool); + svn_pool_destroy(subpool); + + /* Done */ + return SVN_NO_ERROR; +} + +/* Create a packed filesystem in DIR. Set the shard size to + SHARD_SIZE and create NUM_REVS number of revisions (in addition to + r0). Use POOL for allocations. After this function successfully + completes, the filesystem's youngest revision number will be the + same as NUM_REVS. */ +static svn_error_t * +create_packed_filesystem(const char *dir, + const svn_test_opts_t *opts, + svn_revnum_t num_revs, + int shard_size, + apr_pool_t *pool) +{ + struct pack_notify_baton pnb; + + /* Create the repo and fill it. */ + SVN_ERR(create_non_packed_filesystem(dir, opts, num_revs, shard_size, + pool)); + + /* Now pack the FS */ + pnb.expected_shard = 0; + pnb.expected_action = svn_fs_pack_notify_start; + return svn_fs_pack(dir, pack_notify, &pnb, NULL, NULL, pool); +} + +/* Create a packed FSFS filesystem for revprop tests at REPO_NAME with + * MAX_REV revisions and the given SHARD_SIZE and OPTS. Return it in *FS. + * Use POOL for allocations. + */ +static svn_error_t * +prepare_revprop_repo(svn_fs_t **fs, + const char *repo_name, + svn_revnum_t max_rev, + int shard_size, + const svn_test_opts_t *opts, + apr_pool_t *pool) +{ + svn_fs_txn_t *txn; + svn_fs_root_t *txn_root; + const char *conflict; + svn_revnum_t after_rev; + apr_pool_t *subpool; + + /* Create the packed FS and open it. */ + SVN_ERR(create_packed_filesystem(repo_name, opts, max_rev, shard_size, pool)); + SVN_ERR(svn_fs_open2(fs, repo_name, NULL, pool, pool)); + + subpool = svn_pool_create(pool); + /* Do a commit to trigger packing. */ + SVN_ERR(svn_fs_begin_txn(&txn, *fs, max_rev, subpool)); + SVN_ERR(svn_fs_txn_root(&txn_root, txn, subpool)); + SVN_ERR(svn_test__set_file_contents(txn_root, "iota", "new-iota", subpool)); + SVN_ERR(svn_fs_commit_txn(&conflict, &after_rev, txn, subpool)); + SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(after_rev)); + svn_pool_destroy(subpool); + + /* Pack the repository. */ + SVN_ERR(svn_fs_pack(repo_name, NULL, NULL, NULL, NULL, pool)); + + return SVN_NO_ERROR; +} + +/* For revision REV, return a short log message allocated in POOL. + */ +static svn_string_t * +default_log(svn_revnum_t rev, apr_pool_t *pool) +{ + return svn_string_createf(pool, "Default message for rev %ld", rev); +} + +/* For revision REV, return a long log message allocated in POOL. + */ +static svn_string_t * +large_log(svn_revnum_t rev, apr_size_t length, apr_pool_t *pool) +{ + svn_stringbuf_t *temp = svn_stringbuf_create_ensure(100000, pool); + int i, count = (int)(length - 50) / 6; + + svn_stringbuf_appendcstr(temp, "A "); + for (i = 0; i < count; ++i) + svn_stringbuf_appendcstr(temp, "very, "); + + svn_stringbuf_appendcstr(temp, + apr_psprintf(pool, "very long message for rev %ld, indeed", rev)); + + return svn_stringbuf__morph_into_string(temp); +} + +/* For revision REV, return a long log message allocated in POOL. + */ +static svn_string_t * +huge_log(svn_revnum_t rev, apr_pool_t *pool) +{ + return large_log(rev, 90000, pool); +} + + +/*** Tests ***/ + +/* ------------------------------------------------------------------------ */ +#define REPO_NAME "test-repo-fsfs-pack" +#define SHARD_SIZE 7 +#define MAX_REV 53 +static svn_error_t * +pack_filesystem(const svn_test_opts_t *opts, + apr_pool_t *pool) +{ + int i; + svn_node_kind_t kind; + const char *path; + char buf[80]; + apr_file_t *file; + apr_size_t len; + + SVN_ERR(create_packed_filesystem(REPO_NAME, opts, MAX_REV, SHARD_SIZE, + pool)); + + /* Check to see that the pack files exist, and that the rev directories + don't. */ + for (i = 0; i < (MAX_REV + 1) / SHARD_SIZE; i++) + { + path = svn_dirent_join_many(pool, REPO_NAME, "revs", + apr_psprintf(pool, "%d.pack", i / SHARD_SIZE), + "pack", SVN_VA_NULL); + + /* These files should exist. */ + SVN_ERR(svn_io_check_path(path, &kind, pool)); + if (kind != svn_node_file) + return svn_error_createf(SVN_ERR_FS_GENERAL, NULL, + "Expected pack file '%s' not found", path); + + if (opts->server_minor_version && (opts->server_minor_version < 9)) + { + path = svn_dirent_join_many(pool, REPO_NAME, "revs", + apr_psprintf(pool, "%d.pack", i / SHARD_SIZE), + "manifest", SVN_VA_NULL); + SVN_ERR(svn_io_check_path(path, &kind, pool)); + if (kind != svn_node_file) + return svn_error_createf(SVN_ERR_FS_GENERAL, NULL, + "Expected manifest file '%s' not found", + path); + } + + /* This directory should not exist. */ + path = svn_dirent_join_many(pool, REPO_NAME, "revs", + apr_psprintf(pool, "%d", i / SHARD_SIZE), + SVN_VA_NULL); + SVN_ERR(svn_io_check_path(path, &kind, pool)); + if (kind != svn_node_none) + return svn_error_createf(SVN_ERR_FS_GENERAL, NULL, + "Unexpected directory '%s' found", path); + } + + /* Ensure the min-unpacked-rev jives with the above operations. */ + SVN_ERR(svn_io_file_open(&file, + svn_dirent_join(REPO_NAME, PATH_MIN_UNPACKED_REV, + pool), + APR_READ | APR_BUFFERED, APR_OS_DEFAULT, pool)); + len = sizeof(buf); + SVN_ERR(svn_io_read_length_line(file, buf, &len, pool)); + SVN_ERR(svn_io_file_close(file, pool)); + if (SVN_STR_TO_REV(buf) != (MAX_REV / SHARD_SIZE) * SHARD_SIZE) + return svn_error_createf(SVN_ERR_FS_GENERAL, NULL, + "Bad '%s' contents", PATH_MIN_UNPACKED_REV); + + /* Finally, make sure the final revision directory does exist. */ + path = svn_dirent_join_many(pool, REPO_NAME, "revs", + apr_psprintf(pool, "%d", (i / SHARD_SIZE) + 1), + SVN_VA_NULL); + SVN_ERR(svn_io_check_path(path, &kind, pool)); + if (kind != svn_node_none) + return svn_error_createf(SVN_ERR_FS_GENERAL, NULL, + "Expected directory '%s' not found", path); + + + return SVN_NO_ERROR; +} +#undef REPO_NAME +#undef SHARD_SIZE +#undef MAX_REV + +/* ------------------------------------------------------------------------ */ +#define REPO_NAME "test-repo-fsfs-pack-even" +#define SHARD_SIZE 4 +#define MAX_REV 11 +static svn_error_t * +pack_even_filesystem(const svn_test_opts_t *opts, + apr_pool_t *pool) +{ + svn_node_kind_t kind; + const char *path; + + SVN_ERR(create_packed_filesystem(REPO_NAME, opts, MAX_REV, SHARD_SIZE, + pool)); + + path = svn_dirent_join_many(pool, REPO_NAME, "revs", "2.pack", SVN_VA_NULL); + SVN_ERR(svn_io_check_path(path, &kind, pool)); + if (kind != svn_node_dir) + return svn_error_createf(SVN_ERR_FS_GENERAL, NULL, + "Packing did not complete as expected"); + + return SVN_NO_ERROR; +} +#undef REPO_NAME +#undef SHARD_SIZE +#undef MAX_REV + +/* ------------------------------------------------------------------------ */ +#define REPO_NAME "test-repo-read-packed-fs" +#define SHARD_SIZE 5 +#define MAX_REV 11 +static svn_error_t * +read_packed_fs(const svn_test_opts_t *opts, + apr_pool_t *pool) +{ + svn_fs_t *fs; + svn_stream_t *rstream; + svn_stringbuf_t *rstring; + svn_revnum_t i; + + SVN_ERR(create_packed_filesystem(REPO_NAME, opts, MAX_REV, SHARD_SIZE, pool)); + SVN_ERR(svn_fs_open2(&fs, REPO_NAME, NULL, pool, pool)); + + for (i = 1; i < (MAX_REV + 1); i++) + { + svn_fs_root_t *rev_root; + svn_stringbuf_t *sb; + + SVN_ERR(svn_fs_revision_root(&rev_root, fs, i, pool)); + SVN_ERR(svn_fs_file_contents(&rstream, rev_root, "iota", pool)); + SVN_ERR(svn_test__stream_to_string(&rstring, rstream, pool)); + + if (i == 1) + sb = svn_stringbuf_create("This is the file 'iota'.\n", pool); + else + sb = svn_stringbuf_create(get_rev_contents(i, pool), pool); + + if (! svn_stringbuf_compare(rstring, sb)) + return svn_error_createf(SVN_ERR_FS_GENERAL, NULL, + "Bad data in revision %ld.", i); + } + + return SVN_NO_ERROR; +} +#undef REPO_NAME +#undef SHARD_SIZE +#undef MAX_REV + +/* ------------------------------------------------------------------------ */ +#define REPO_NAME "test-repo-commit-packed-fs" +#define SHARD_SIZE 5 +#define MAX_REV 10 +static svn_error_t * +commit_packed_fs(const svn_test_opts_t *opts, + apr_pool_t *pool) +{ + svn_fs_t *fs; + svn_fs_txn_t *txn; + svn_fs_root_t *txn_root; + const char *conflict; + svn_revnum_t after_rev; + + /* Create the packed FS and open it. */ + SVN_ERR(create_packed_filesystem(REPO_NAME, opts, MAX_REV, 5, pool)); + SVN_ERR(svn_fs_open2(&fs, REPO_NAME, NULL, pool, pool)); + + /* Now do a commit. */ + SVN_ERR(svn_fs_begin_txn(&txn, fs, MAX_REV, pool)); + SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool)); + SVN_ERR(svn_test__set_file_contents(txn_root, "iota", + "How much better is it to get wisdom than gold! and to get " + "understanding rather to be chosen than silver!", pool)); + SVN_ERR(svn_fs_commit_txn(&conflict, &after_rev, txn, pool)); + SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(after_rev)); + + return SVN_NO_ERROR; +} +#undef REPO_NAME +#undef MAX_REV +#undef SHARD_SIZE + +/* ------------------------------------------------------------------------ */ +#define REPO_NAME "test-repo-get-set-revprop-packed-fs" +#define SHARD_SIZE 4 +#define MAX_REV 10 +static svn_error_t * +get_set_revprop_packed_fs(const svn_test_opts_t *opts, + apr_pool_t *pool) +{ + svn_fs_t *fs; + svn_string_t *prop_value; + + /* Create the packed FS and open it. */ + SVN_ERR(prepare_revprop_repo(&fs, REPO_NAME, MAX_REV, SHARD_SIZE, opts, + pool)); + + /* Try to get revprop for revision 0 + * (non-packed due to special handling). */ + SVN_ERR(svn_fs_revision_prop(&prop_value, fs, 0, SVN_PROP_REVISION_AUTHOR, + pool)); + + /* Try to change revprop for revision 0 + * (non-packed due to special handling). */ + SVN_ERR(svn_fs_change_rev_prop(fs, 0, SVN_PROP_REVISION_AUTHOR, + svn_string_create("tweaked-author", pool), + pool)); + + /* verify */ + SVN_ERR(svn_fs_revision_prop(&prop_value, fs, 0, SVN_PROP_REVISION_AUTHOR, + pool)); + SVN_TEST_STRING_ASSERT(prop_value->data, "tweaked-author"); + + /* Try to get packed revprop for revision 5. */ + SVN_ERR(svn_fs_revision_prop(&prop_value, fs, 5, SVN_PROP_REVISION_AUTHOR, + pool)); + + /* Try to change packed revprop for revision 5. */ + SVN_ERR(svn_fs_change_rev_prop(fs, 5, SVN_PROP_REVISION_AUTHOR, + svn_string_create("tweaked-author2", pool), + pool)); + + /* verify */ + SVN_ERR(svn_fs_revision_prop(&prop_value, fs, 5, SVN_PROP_REVISION_AUTHOR, + pool)); + SVN_TEST_STRING_ASSERT(prop_value->data, "tweaked-author2"); + + return SVN_NO_ERROR; +} +#undef REPO_NAME +#undef MAX_REV +#undef SHARD_SIZE + +/* ------------------------------------------------------------------------ */ +#define REPO_NAME "test-repo-get-set-large-revprop-packed-fs" +#define SHARD_SIZE 4 +#define MAX_REV 11 +static svn_error_t * +get_set_large_revprop_packed_fs(const svn_test_opts_t *opts, + apr_pool_t *pool) +{ + svn_fs_t *fs; + svn_string_t *prop_value; + svn_revnum_t rev; + + /* Create the packed FS and open it. */ + SVN_ERR(prepare_revprop_repo(&fs, REPO_NAME, MAX_REV, SHARD_SIZE, opts, + pool)); + + /* Set commit messages to different, large values that fill the pack + * files but do not exceed the pack size limit. */ + for (rev = 0; rev <= MAX_REV; ++rev) + SVN_ERR(svn_fs_change_rev_prop(fs, rev, SVN_PROP_REVISION_LOG, + large_log(rev, 1000, pool), + pool)); + + /* verify */ + for (rev = 0; rev <= MAX_REV; ++rev) + { + SVN_ERR(svn_fs_revision_prop(&prop_value, fs, rev, + SVN_PROP_REVISION_LOG, pool)); + SVN_TEST_STRING_ASSERT(prop_value->data, + large_log(rev, 1000, pool)->data); + } + + /* Put a larger revprop into the last, some middle and the first revision + * of a pack. This should cause the packs to split in the middle. */ + SVN_ERR(svn_fs_change_rev_prop(fs, 3, SVN_PROP_REVISION_LOG, + /* rev 0 is not packed */ + large_log(3, 2400, pool), + pool)); + SVN_ERR(svn_fs_change_rev_prop(fs, 5, SVN_PROP_REVISION_LOG, + large_log(5, 1500, pool), + pool)); + SVN_ERR(svn_fs_change_rev_prop(fs, 8, SVN_PROP_REVISION_LOG, + large_log(8, 1500, pool), + pool)); + + /* verify */ + for (rev = 0; rev <= MAX_REV; ++rev) + { + SVN_ERR(svn_fs_revision_prop(&prop_value, fs, rev, + SVN_PROP_REVISION_LOG, pool)); + + if (rev == 3) + SVN_TEST_STRING_ASSERT(prop_value->data, + large_log(rev, 2400, pool)->data); + else if (rev == 5 || rev == 8) + SVN_TEST_STRING_ASSERT(prop_value->data, + large_log(rev, 1500, pool)->data); + else + SVN_TEST_STRING_ASSERT(prop_value->data, + large_log(rev, 1000, pool)->data); + } + + return SVN_NO_ERROR; +} +#undef REPO_NAME +#undef MAX_REV +#undef SHARD_SIZE + +/* ------------------------------------------------------------------------ */ +#define REPO_NAME "test-repo-get-set-huge-revprop-packed-fs" +#define SHARD_SIZE 4 +#define MAX_REV 10 +static svn_error_t * +get_set_huge_revprop_packed_fs(const svn_test_opts_t *opts, + apr_pool_t *pool) +{ + svn_fs_t *fs; + svn_string_t *prop_value; + svn_revnum_t rev; + + /* Create the packed FS and open it. */ + SVN_ERR(prepare_revprop_repo(&fs, REPO_NAME, MAX_REV, SHARD_SIZE, opts, + pool)); + + /* Set commit messages to different values */ + for (rev = 0; rev <= MAX_REV; ++rev) + SVN_ERR(svn_fs_change_rev_prop(fs, rev, SVN_PROP_REVISION_LOG, + default_log(rev, pool), + pool)); + + /* verify */ + for (rev = 0; rev <= MAX_REV; ++rev) + { + SVN_ERR(svn_fs_revision_prop(&prop_value, fs, rev, + SVN_PROP_REVISION_LOG, pool)); + SVN_TEST_STRING_ASSERT(prop_value->data, default_log(rev, pool)->data); + } + + /* Put a huge revprop into the last, some middle and the first revision + * of a pack. They will cause the pack files to split accordingly. */ + SVN_ERR(svn_fs_change_rev_prop(fs, 3, SVN_PROP_REVISION_LOG, + huge_log(3, pool), + pool)); + SVN_ERR(svn_fs_change_rev_prop(fs, 5, SVN_PROP_REVISION_LOG, + huge_log(5, pool), + pool)); + SVN_ERR(svn_fs_change_rev_prop(fs, 8, SVN_PROP_REVISION_LOG, + huge_log(8, pool), + pool)); + + /* verify */ + for (rev = 0; rev <= MAX_REV; ++rev) + { + SVN_ERR(svn_fs_revision_prop(&prop_value, fs, rev, + SVN_PROP_REVISION_LOG, pool)); + + if (rev == 3 || rev == 5 || rev == 8) + SVN_TEST_STRING_ASSERT(prop_value->data, + huge_log(rev, pool)->data); + else + SVN_TEST_STRING_ASSERT(prop_value->data, + default_log(rev, pool)->data); + } + + return SVN_NO_ERROR; +} +#undef REPO_NAME +#undef MAX_REV +#undef SHARD_SIZE + +/* ------------------------------------------------------------------------ */ +/* Regression test for issue #3571 (fsfs 'svnadmin recover' expects + youngest revprop to be outside revprops.db). */ +#define REPO_NAME "test-repo-recover-fully-packed" +#define SHARD_SIZE 4 +#define MAX_REV 7 +static svn_error_t * +recover_fully_packed(const svn_test_opts_t *opts, + apr_pool_t *pool) +{ + apr_pool_t *subpool; + svn_fs_t *fs; + svn_fs_txn_t *txn; + svn_fs_root_t *txn_root; + const char *conflict; + svn_revnum_t after_rev; + svn_error_t *err; + + /* Create a packed FS for which every revision will live in a pack + digest file, and then recover it. */ + SVN_ERR(create_packed_filesystem(REPO_NAME, opts, MAX_REV, SHARD_SIZE, pool)); + SVN_ERR(svn_fs_recover(REPO_NAME, NULL, NULL, pool)); + + /* Add another revision, re-pack, re-recover. */ + subpool = svn_pool_create(pool); + SVN_ERR(svn_fs_open2(&fs, REPO_NAME, NULL, subpool, subpool)); + SVN_ERR(svn_fs_begin_txn(&txn, fs, MAX_REV, subpool)); + SVN_ERR(svn_fs_txn_root(&txn_root, txn, subpool)); + SVN_ERR(svn_test__set_file_contents(txn_root, "A/mu", "new-mu", subpool)); + SVN_ERR(svn_fs_commit_txn(&conflict, &after_rev, txn, subpool)); + SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(after_rev)); + svn_pool_destroy(subpool); + SVN_ERR(svn_fs_pack(REPO_NAME, NULL, NULL, NULL, NULL, pool)); + SVN_ERR(svn_fs_recover(REPO_NAME, NULL, NULL, pool)); + + /* Now, delete the youngest revprop file, and recover again. This + time we want to see an error! */ + SVN_ERR(svn_io_remove_file2( + svn_dirent_join_many(pool, REPO_NAME, PATH_REVPROPS_DIR, + apr_psprintf(pool, "%ld/%ld", + after_rev / SHARD_SIZE, + after_rev), + SVN_VA_NULL), + FALSE, pool)); + err = svn_fs_recover(REPO_NAME, NULL, NULL, pool); + if (! err) + return svn_error_create(SVN_ERR_TEST_FAILED, NULL, + "Expected SVN_ERR_FS_CORRUPT error; got none"); + if (err->apr_err != SVN_ERR_FS_CORRUPT) + return svn_error_create(SVN_ERR_TEST_FAILED, err, + "Expected SVN_ERR_FS_CORRUPT error; got:"); + svn_error_clear(err); + return SVN_NO_ERROR; +} +#undef REPO_NAME +#undef MAX_REV +#undef SHARD_SIZE + +/* ------------------------------------------------------------------------ */ +/* Regression test for issue #4320 (fsfs file-hinting fails when reading a rep + from the transaction that is commiting rev = SHARD_SIZE). */ +#define REPO_NAME "test-repo-file-hint-at-shard-boundary" +#define SHARD_SIZE 4 +#define MAX_REV (SHARD_SIZE - 1) +static svn_error_t * +file_hint_at_shard_boundary(const svn_test_opts_t *opts, + apr_pool_t *pool) +{ + apr_pool_t *subpool; + svn_fs_t *fs; + svn_fs_txn_t *txn; + svn_fs_root_t *txn_root; + const char *file_contents; + svn_stringbuf_t *retrieved_contents; + svn_error_t *err = SVN_NO_ERROR; + + /* Create a packed FS and MAX_REV revisions */ + SVN_ERR(create_packed_filesystem(REPO_NAME, opts, MAX_REV, SHARD_SIZE, pool)); + + /* Reopen the filesystem */ + subpool = svn_pool_create(pool); + SVN_ERR(svn_fs_open2(&fs, REPO_NAME, NULL, subpool, subpool)); + + /* Revision = SHARD_SIZE */ + file_contents = get_rev_contents(SHARD_SIZE, subpool); + SVN_ERR(svn_fs_begin_txn(&txn, fs, MAX_REV, subpool)); + SVN_ERR(svn_fs_txn_root(&txn_root, txn, subpool)); + SVN_ERR(svn_test__set_file_contents(txn_root, "iota", file_contents, + subpool)); + + /* Retrieve the file. */ + SVN_ERR(svn_test__get_file_contents(txn_root, "iota", &retrieved_contents, + subpool)); + if (strcmp(retrieved_contents->data, file_contents)) + { + err = svn_error_create(SVN_ERR_TEST_FAILED, err, + "Retrieved incorrect contents from iota."); + } + + /* Close the repo. */ + svn_pool_destroy(subpool); + + return err; +} +#undef REPO_NAME +#undef MAX_REV +#undef SHARD_SIZE + +/* ------------------------------------------------------------------------ */ +#define REPO_NAME "test-repo-fsfs-info" +#define SHARD_SIZE 3 +#define MAX_REV 5 +static svn_error_t * +test_info(const svn_test_opts_t *opts, + apr_pool_t *pool) +{ + svn_fs_t *fs; + const svn_fs_fsfs_info_t *fsfs_info; + const svn_fs_info_placeholder_t *info; + + SVN_ERR(create_packed_filesystem(REPO_NAME, opts, MAX_REV, SHARD_SIZE, + pool)); + + SVN_ERR(svn_fs_open2(&fs, REPO_NAME, NULL, pool, pool)); + SVN_ERR(svn_fs_info(&info, fs, pool, pool)); + info = svn_fs_info_dup(info, pool, pool); + + SVN_TEST_STRING_ASSERT(opts->fs_type, info->fs_type); + + /* Bail (with success) on known-untestable scenarios */ + if (strcmp(opts->fs_type, "fsfs") != 0) + return SVN_NO_ERROR; + + fsfs_info = (const void *)info; + if (opts->server_minor_version && (opts->server_minor_version < 6)) + { + SVN_TEST_ASSERT(fsfs_info->shard_size == 0); + SVN_TEST_ASSERT(fsfs_info->min_unpacked_rev == 0); + } + else + { + SVN_TEST_ASSERT(fsfs_info->shard_size == SHARD_SIZE); + SVN_TEST_ASSERT(fsfs_info->min_unpacked_rev + == (MAX_REV + 1) / SHARD_SIZE * SHARD_SIZE); + } + + return SVN_NO_ERROR; +} +#undef REPO_NAME +#undef SHARD_SIZE +#undef MAX_REV + +/* ------------------------------------------------------------------------ */ +#define REPO_NAME "test-repo-fsfs-pack-shard-size-one" +#define SHARD_SIZE 1 +#define MAX_REV 4 +static svn_error_t * +pack_shard_size_one(const svn_test_opts_t *opts, + apr_pool_t *pool) +{ + svn_string_t *propval; + svn_fs_t *fs; + + SVN_ERR(create_packed_filesystem(REPO_NAME, opts, MAX_REV, SHARD_SIZE, + pool)); + SVN_ERR(svn_fs_open2(&fs, REPO_NAME, NULL, pool, pool)); + /* whitebox: revprop packing special-cases r0, which causes + (start_rev==1, end_rev==0) in pack_revprops_shard(). So test that. */ + SVN_ERR(svn_fs_revision_prop(&propval, fs, 1, SVN_PROP_REVISION_LOG, pool)); + SVN_TEST_STRING_ASSERT(propval->data, R1_LOG_MSG); + + return SVN_NO_ERROR; +} +#undef REPO_NAME +#undef SHARD_SIZE +#undef MAX_REV +/* ------------------------------------------------------------------------ */ +#define REPO_NAME "test-repo-get_set_multiple_huge_revprops_packed_fs" +#define SHARD_SIZE 4 +#define MAX_REV 9 +static svn_error_t * +get_set_multiple_huge_revprops_packed_fs(const svn_test_opts_t *opts, + apr_pool_t *pool) +{ + svn_fs_t *fs; + svn_string_t *prop_value; + svn_revnum_t rev; + + /* Create the packed FS and open it. */ + SVN_ERR(prepare_revprop_repo(&fs, REPO_NAME, MAX_REV, SHARD_SIZE, opts, + pool)); + + /* Set commit messages to different values */ + for (rev = 0; rev <= MAX_REV; ++rev) + SVN_ERR(svn_fs_change_rev_prop(fs, rev, SVN_PROP_REVISION_LOG, + default_log(rev, pool), + pool)); + + /* verify */ + for (rev = 0; rev <= MAX_REV; ++rev) + { + SVN_ERR(svn_fs_revision_prop(&prop_value, fs, rev, + SVN_PROP_REVISION_LOG, pool)); + SVN_TEST_STRING_ASSERT(prop_value->data, default_log(rev, pool)->data); + } + + /* Put a huge revprop into revision 1 and 2. */ + SVN_ERR(svn_fs_change_rev_prop(fs, 1, SVN_PROP_REVISION_LOG, + huge_log(1, pool), + pool)); + SVN_ERR(svn_fs_change_rev_prop(fs, 2, SVN_PROP_REVISION_LOG, + huge_log(2, pool), + pool)); + SVN_ERR(svn_fs_change_rev_prop(fs, 5, SVN_PROP_REVISION_LOG, + huge_log(5, pool), + pool)); + SVN_ERR(svn_fs_change_rev_prop(fs, 6, SVN_PROP_REVISION_LOG, + huge_log(6, pool), + pool)); + + /* verify */ + for (rev = 0; rev <= MAX_REV; ++rev) + { + SVN_ERR(svn_fs_revision_prop(&prop_value, fs, rev, + SVN_PROP_REVISION_LOG, pool)); + + if (rev == 1 || rev == 2 || rev == 5 || rev == 6) + SVN_TEST_STRING_ASSERT(prop_value->data, + huge_log(rev, pool)->data); + else + SVN_TEST_STRING_ASSERT(prop_value->data, + default_log(rev, pool)->data); + } + + return SVN_NO_ERROR; +} +#undef REPO_NAME +#undef MAX_REV +#undef SHARD_SIZE + +/* ------------------------------------------------------------------------ */ +#define SHARD_SIZE 4 +static svn_error_t * +upgrade_txns_to_log_addressing(const svn_test_opts_t *opts, + const char *repo_name, + svn_revnum_t max_rev, + svn_boolean_t upgrade_before_txns, + apr_pool_t *pool) +{ + svn_fs_t *fs; + svn_revnum_t rev; + apr_array_header_t *txns; + apr_array_header_t *txn_names; + int i, k; + svn_test_opts_t temp_opts; + svn_fs_root_t *root; + apr_pool_t *iterpool = svn_pool_create(pool); + + static const char * const paths[SHARD_SIZE][2] + = { + { "A/mu", "A/B/lambda" }, + { "A/B/E/alpha", "A/D/H/psi" }, + { "A/D/gamma", "A/B/E/beta" }, + { "A/D/G/pi", "A/D/G/rho" } + }; + + /* Bail (with success) on known-untestable scenarios */ + if ((strcmp(opts->fs_type, "fsfs") != 0) + || (opts->server_minor_version && (opts->server_minor_version < 9))) + return svn_error_create(SVN_ERR_TEST_SKIPPED, NULL, + "pre-1.9 SVN doesn't support log addressing"); + + /* Create the packed FS in phys addressing format and open it. */ + temp_opts = *opts; + temp_opts.server_minor_version = 8; + SVN_ERR(prepare_revprop_repo(&fs, repo_name, max_rev, SHARD_SIZE, + &temp_opts, pool)); + + if (upgrade_before_txns) + { + /* upgrade to final repo format (using log addressing) and re-open */ + SVN_ERR(svn_fs_upgrade2(repo_name, NULL, NULL, NULL, NULL, pool)); + SVN_ERR(svn_fs_open2(&fs, repo_name, svn_fs_config(fs, pool), pool, + pool)); + } + + /* Create 4 concurrent transactions */ + txns = apr_array_make(pool, SHARD_SIZE, sizeof(svn_fs_txn_t *)); + txn_names = apr_array_make(pool, SHARD_SIZE, sizeof(const char *)); + for (i = 0; i < SHARD_SIZE; ++i) + { + svn_fs_txn_t *txn; + const char *txn_name; + + SVN_ERR(svn_fs_begin_txn(&txn, fs, max_rev, pool)); + APR_ARRAY_PUSH(txns, svn_fs_txn_t *) = txn; + + SVN_ERR(svn_fs_txn_name(&txn_name, txn, pool)); + APR_ARRAY_PUSH(txn_names, const char *) = txn_name; + } + + /* Let all txns touch at least 2 files. + * Thus, the addressing data of at least one representation in the txn + * will differ between addressing modes. */ + for (i = 0; i < SHARD_SIZE; ++i) + { + svn_fs_txn_t *txn = APR_ARRAY_IDX(txns, i, svn_fs_txn_t *); + SVN_ERR(svn_fs_txn_root(&root, txn, pool)); + + for (k = 0; k < 2; ++k) + { + svn_stream_t *stream; + const char *file_path = paths[i][k]; + svn_pool_clear(iterpool); + + SVN_ERR(svn_fs_apply_text(&stream, root, file_path, NULL, iterpool)); + SVN_ERR(svn_stream_printf(stream, iterpool, + "This is file %s in txn %d", + file_path, i)); + SVN_ERR(svn_stream_close(stream)); + } + } + + if (!upgrade_before_txns) + { + /* upgrade to final repo format (using log addressing) and re-open */ + SVN_ERR(svn_fs_upgrade2(repo_name, NULL, NULL, NULL, NULL, pool)); + SVN_ERR(svn_fs_open2(&fs, repo_name, svn_fs_config(fs, pool), pool, + pool)); + } + + /* Commit all transactions + * (in reverse order to make things more interesting) */ + for (i = SHARD_SIZE - 1; i >= 0; --i) + { + svn_fs_txn_t *txn; + const char *txn_name = APR_ARRAY_IDX(txn_names, i, const char *); + svn_pool_clear(iterpool); + + SVN_ERR(svn_fs_open_txn(&txn, fs, txn_name, iterpool)); + SVN_ERR(svn_fs_commit_txn(NULL, &rev, txn, iterpool)); + } + + /* Further changes to fill the shard */ + + SVN_ERR(svn_fs_youngest_rev(&rev, fs, pool)); + SVN_TEST_ASSERT(rev == SHARD_SIZE + max_rev + 1); + + while ((rev + 1) % SHARD_SIZE) + { + svn_fs_txn_t *txn; + if (rev % SHARD_SIZE == 0) + break; + + svn_pool_clear(iterpool); + + SVN_ERR(svn_fs_begin_txn(&txn, fs, rev, iterpool)); + SVN_ERR(svn_fs_txn_root(&root, txn, iterpool)); + SVN_ERR(svn_test__set_file_contents(root, "iota", + get_rev_contents(rev + 1, iterpool), + iterpool)); + SVN_ERR(svn_fs_commit_txn(NULL, &rev, txn, iterpool)); + } + + /* Make sure to close all file handles etc. from the last iteration */ + + svn_pool_clear(iterpool); + + /* Pack repo to verify that old and new shard get packed according to + their respective addressing mode */ + + SVN_ERR(svn_fs_pack(repo_name, NULL, NULL, NULL, NULL, pool)); + + /* verify that our changes got in */ + + SVN_ERR(svn_fs_revision_root(&root, fs, rev, pool)); + for (i = 0; i < SHARD_SIZE; ++i) + { + for (k = 0; k < 2; ++k) + { + svn_stream_t *stream; + const char *file_path = paths[i][k]; + svn_string_t *string; + const char *expected; + + svn_pool_clear(iterpool); + + SVN_ERR(svn_fs_file_contents(&stream, root, file_path, iterpool)); + SVN_ERR(svn_string_from_stream(&string, stream, iterpool, iterpool)); + + expected = apr_psprintf(pool,"This is file %s in txn %d", + file_path, i); + SVN_TEST_STRING_ASSERT(string->data, expected); + } + } + + /* verify that the indexes are consistent, we calculated the correct + low-level checksums etc. */ + SVN_ERR(svn_fs_verify(repo_name, NULL, + SVN_INVALID_REVNUM, SVN_INVALID_REVNUM, + NULL, NULL, NULL, NULL, pool)); + for (; rev >= 0; --rev) + { + svn_pool_clear(iterpool); + SVN_ERR(svn_fs_revision_root(&root, fs, rev, iterpool)); + SVN_ERR(svn_fs_verify_root(root, iterpool)); + } + + return SVN_NO_ERROR; +} +#undef SHARD_SIZE + +#define REPO_NAME "test-repo-upgrade_new_txns_to_log_addressing" +#define MAX_REV 8 +static svn_error_t * +upgrade_new_txns_to_log_addressing(const svn_test_opts_t *opts, + apr_pool_t *pool) +{ + SVN_ERR(upgrade_txns_to_log_addressing(opts, REPO_NAME, MAX_REV, TRUE, + pool)); + + return SVN_NO_ERROR; +} +#undef REPO_NAME +#undef MAX_REV + +/* ------------------------------------------------------------------------ */ +#define REPO_NAME "test-repo-upgrade_old_txns_to_log_addressing" +#define MAX_REV 8 +static svn_error_t * +upgrade_old_txns_to_log_addressing(const svn_test_opts_t *opts, + apr_pool_t *pool) +{ + SVN_ERR(upgrade_txns_to_log_addressing(opts, REPO_NAME, MAX_REV, FALSE, + pool)); + + return SVN_NO_ERROR; +} + +#undef REPO_NAME +#undef MAX_REV + +/* ------------------------------------------------------------------------ */ + +#define REPO_NAME "test-repo-metadata_checksumming" +static svn_error_t * +metadata_checksumming(const svn_test_opts_t *opts, + apr_pool_t *pool) +{ + svn_fs_t *fs; + const char *repo_path, *r0_path; + apr_hash_t *fs_config = apr_hash_make(pool); + svn_stringbuf_t *r0; + svn_fs_root_t *root; + apr_hash_t *dir; + + /* Skip this test unless we are FSFS f7+ */ + if ((strcmp(opts->fs_type, "fsfs") != 0) + || (opts->server_minor_version && (opts->server_minor_version < 9))) + return svn_error_create(SVN_ERR_TEST_SKIPPED, NULL, + "pre-1.9 SVN doesn't checksum metadata"); + + /* Create the file system to fiddle with. */ + SVN_ERR(svn_test__create_fs(&fs, REPO_NAME, opts, pool)); + repo_path = svn_fs_path(fs, pool); + + /* Manipulate the data on disk. + * (change id from '0.0.*' to '1.0.*') */ + r0_path = svn_dirent_join_many(pool, repo_path, "revs", "0", "0", + SVN_VA_NULL); + SVN_ERR(svn_stringbuf_from_file2(&r0, r0_path, pool)); + r0->data[21] = '1'; + SVN_ERR(svn_io_remove_file2(r0_path, FALSE, pool)); + SVN_ERR(svn_io_file_create_bytes(r0_path, r0->data, r0->len, pool)); + + /* Reading the corrupted data on the normal code path triggers no error. + * Use a separate namespace to avoid simply reading data from cache. */ + svn_hash_sets(fs_config, SVN_FS_CONFIG_FSFS_CACHE_NS, + svn_uuid_generate(pool)); + SVN_ERR(svn_fs_open2(&fs, repo_path, fs_config, pool, pool)); + SVN_ERR(svn_fs_revision_root(&root, fs, 0, pool)); + SVN_ERR(svn_fs_dir_entries(&dir, root, "/", pool)); + + /* The block-read code path uses the P2L index information and compares + * low-level checksums. Again, separate cache namespace. */ + svn_hash_sets(fs_config, SVN_FS_CONFIG_FSFS_CACHE_NS, + svn_uuid_generate(pool)); + svn_hash_sets(fs_config, SVN_FS_CONFIG_FSFS_BLOCK_READ, "1"); + SVN_ERR(svn_fs_open2(&fs, repo_path, fs_config, pool, pool)); + SVN_ERR(svn_fs_revision_root(&root, fs, 0, pool)); + SVN_TEST_ASSERT_ERROR(svn_fs_dir_entries(&dir, root, "/", pool), + SVN_ERR_CHECKSUM_MISMATCH); + + return SVN_NO_ERROR; +} + +#undef REPO_NAME + +/* ------------------------------------------------------------------------ */ + +#define REPO_NAME "test-repo-revprop_caching_on_off" +static svn_error_t * +revprop_caching_on_off(const svn_test_opts_t *opts, + apr_pool_t *pool) +{ + svn_fs_t *fs1; + svn_fs_t *fs2; + apr_hash_t *fs_config; + svn_string_t *value; + const svn_string_t *another_value_for_avoiding_warnings_from_a_broken_api; + const svn_string_t *new_value = svn_string_create("new", pool); + + if (strcmp(opts->fs_type, "fsfs") != 0) + return svn_error_create(SVN_ERR_TEST_SKIPPED, NULL, NULL); + + /* Open two filesystem objects, enable revision property caching + * in one of them. */ + SVN_ERR(svn_test__create_fs(&fs1, REPO_NAME, opts, pool)); + + fs_config = apr_hash_make(pool); + svn_hash_sets(fs_config, SVN_FS_CONFIG_FSFS_CACHE_REVPROPS, "1"); + + SVN_ERR(svn_fs_open2(&fs2, svn_fs_path(fs1, pool), fs_config, pool, pool)); + + /* With inefficient named atomics, the filesystem will output a warning + and disable the revprop caching, but we still would like to test + these cases. Ignore the warning(s). */ + svn_fs_set_warning_func(fs2, ignore_fs_warnings, NULL); + + SVN_ERR(svn_fs_revision_prop(&value, fs2, 0, "svn:date", pool)); + another_value_for_avoiding_warnings_from_a_broken_api = value; + SVN_ERR(svn_fs_change_rev_prop2( + fs1, 0, "svn:date", + &another_value_for_avoiding_warnings_from_a_broken_api, + new_value, pool)); + + /* Expect the change to be visible through both objects.*/ + SVN_ERR(svn_fs_revision_prop(&value, fs1, 0, "svn:date", pool)); + SVN_TEST_STRING_ASSERT(value->data, "new"); + + SVN_ERR(svn_fs_revision_prop(&value, fs2, 0, "svn:date", pool)); + SVN_TEST_STRING_ASSERT(value->data, "new"); + + return SVN_NO_ERROR; +} + +#undef REPO_NAME + +/* ------------------------------------------------------------------------ */ + +static svn_error_t * +id_parser_test(const svn_test_opts_t *opts, + apr_pool_t *pool) +{ + #define LONG_MAX_STR #LONG_MAX + + /* Verify the revision number parser (e.g. first element of a txn ID) */ + svn_fs_fs__id_part_t id_part; + SVN_ERR(svn_fs_fs__id_txn_parse(&id_part, "0-0")); + +#if LONG_MAX == 2147483647L + SVN_ERR(svn_fs_fs__id_txn_parse(&id_part, "2147483647-0")); + + /* Trigger all sorts of overflow conditions. */ + SVN_TEST_ASSERT_ERROR(svn_fs_fs__id_txn_parse(&id_part, "2147483648-0"), + SVN_ERR_FS_MALFORMED_TXN_ID); + SVN_TEST_ASSERT_ERROR(svn_fs_fs__id_txn_parse(&id_part, "21474836470-0"), + SVN_ERR_FS_MALFORMED_TXN_ID); + SVN_TEST_ASSERT_ERROR(svn_fs_fs__id_txn_parse(&id_part, "21474836479-0"), + SVN_ERR_FS_MALFORMED_TXN_ID); + SVN_TEST_ASSERT_ERROR(svn_fs_fs__id_txn_parse(&id_part, "4294967295-0"), + SVN_ERR_FS_MALFORMED_TXN_ID); + SVN_TEST_ASSERT_ERROR(svn_fs_fs__id_txn_parse(&id_part, "4294967296-0"), + SVN_ERR_FS_MALFORMED_TXN_ID); + SVN_TEST_ASSERT_ERROR(svn_fs_fs__id_txn_parse(&id_part, "4294967304-0"), + SVN_ERR_FS_MALFORMED_TXN_ID); + SVN_TEST_ASSERT_ERROR(svn_fs_fs__id_txn_parse(&id_part, "4294967305-0"), + SVN_ERR_FS_MALFORMED_TXN_ID); + SVN_TEST_ASSERT_ERROR(svn_fs_fs__id_txn_parse(&id_part, "42949672950-0"), + SVN_ERR_FS_MALFORMED_TXN_ID); + SVN_TEST_ASSERT_ERROR(svn_fs_fs__id_txn_parse(&id_part, "42949672959-0"), + SVN_ERR_FS_MALFORMED_TXN_ID); + + /* 0x120000000 = 4831838208. + * 483183820 < 10*483183820 mod 2^32 = 536870904 */ + SVN_TEST_ASSERT_ERROR(svn_fs_fs__id_txn_parse(&id_part, "4831838208-0"), + SVN_ERR_FS_MALFORMED_TXN_ID); +#else + SVN_ERR(svn_fs_fs__id_txn_parse(&id_part, "9223372036854775807-0")); + + /* Trigger all sorts of overflow conditions. */ + SVN_TEST_ASSERT_ERROR(svn_fs_fs__id_txn_parse(&id_part, + "9223372036854775808-0"), + SVN_ERR_FS_MALFORMED_TXN_ID); + SVN_TEST_ASSERT_ERROR(svn_fs_fs__id_txn_parse(&id_part, + "92233720368547758070-0"), + SVN_ERR_FS_MALFORMED_TXN_ID); + SVN_TEST_ASSERT_ERROR(svn_fs_fs__id_txn_parse(&id_part, + "92233720368547758079-0"), + SVN_ERR_FS_MALFORMED_TXN_ID); + SVN_TEST_ASSERT_ERROR(svn_fs_fs__id_txn_parse(&id_part, + "18446744073709551615-0"), + SVN_ERR_FS_MALFORMED_TXN_ID); + SVN_TEST_ASSERT_ERROR(svn_fs_fs__id_txn_parse(&id_part, + "18446744073709551616-0"), + SVN_ERR_FS_MALFORMED_TXN_ID); + SVN_TEST_ASSERT_ERROR(svn_fs_fs__id_txn_parse(&id_part, + "18446744073709551624-0"), + SVN_ERR_FS_MALFORMED_TXN_ID); + SVN_TEST_ASSERT_ERROR(svn_fs_fs__id_txn_parse(&id_part, + "18446744073709551625-0"), + SVN_ERR_FS_MALFORMED_TXN_ID); + SVN_TEST_ASSERT_ERROR(svn_fs_fs__id_txn_parse(&id_part, + "184467440737095516150-0"), + SVN_ERR_FS_MALFORMED_TXN_ID); + SVN_TEST_ASSERT_ERROR(svn_fs_fs__id_txn_parse(&id_part, + "184467440737095516159-0"), + SVN_ERR_FS_MALFORMED_TXN_ID); + + /* 0x12000000000000000 = 20752587082923245568. + * 2075258708292324556 < 10*2075258708292324556 mod 2^32 = 2305843009213693944 */ + SVN_TEST_ASSERT_ERROR(svn_fs_fs__id_txn_parse(&id_part, + "20752587082923245568-0"), + SVN_ERR_FS_MALFORMED_TXN_ID); +#endif + + /* Invalid characters */ + SVN_TEST_ASSERT_ERROR(svn_fs_fs__id_txn_parse(&id_part, "2e4-0"), + SVN_ERR_FS_MALFORMED_TXN_ID); + SVN_TEST_ASSERT_ERROR(svn_fs_fs__id_txn_parse(&id_part, "2-4-0"), + SVN_ERR_FS_MALFORMED_TXN_ID); + + return SVN_NO_ERROR; +} + +#undef REPO_NAME + +/* ------------------------------------------------------------------------ */ + +#define REPO_NAME "test-repo-plain_0_length" + +static svn_error_t * +receive_index(const svn_fs_fs__p2l_entry_t *entry, + void *baton, + apr_pool_t *scratch_pool) +{ + apr_array_header_t *entries = baton; + APR_ARRAY_PUSH(entries, svn_fs_fs__p2l_entry_t *) + = apr_pmemdup(entries->pool, entry, sizeof(*entry)); + + return SVN_NO_ERROR; +} + +static apr_size_t +stringbuf_find(svn_stringbuf_t *rev_contents, + const char *substring) +{ + apr_size_t i; + apr_size_t len = strlen(substring); + + for (i = 0; i < rev_contents->len - len + 1; ++i) + if (!memcmp(rev_contents->data + i, substring, len)) + return i; + + return APR_SIZE_MAX; +} + +static svn_error_t * +plain_0_length(const svn_test_opts_t *opts, + apr_pool_t *pool) +{ + svn_fs_t *fs; + fs_fs_data_t *ffd; + svn_fs_txn_t *txn; + svn_fs_root_t *root; + svn_revnum_t rev; + const char *rev_path; + svn_stringbuf_t *rev_contents; + apr_hash_t *fs_config; + svn_filesize_t file_length; + apr_size_t offset; + + if (strcmp(opts->fs_type, "fsfs") != 0) + return svn_error_create(SVN_ERR_TEST_SKIPPED, NULL, NULL); + + /* Create a repo that does not deltify properties and does not share reps + on its own - makes it easier to do that later by hand. */ + SVN_ERR(svn_test__create_fs(&fs, REPO_NAME, opts, pool)); + ffd = fs->fsap_data; + ffd->deltify_properties = FALSE; + ffd->rep_sharing_allowed = FALSE; + + /* Create one file node with matching contents and property reps. */ + SVN_ERR(svn_fs_begin_txn(&txn, fs, 0, pool)); + SVN_ERR(svn_fs_txn_root(&root, txn, pool)); + SVN_ERR(svn_fs_make_file(root, "foo", pool)); + SVN_ERR(svn_test__set_file_contents(root, "foo", "END\n", pool)); + SVN_ERR(svn_fs_change_node_prop(root, "foo", "x", NULL, pool)); + SVN_ERR(svn_fs_commit_txn(NULL, &rev, txn, pool)); + + /* Redirect text rep to props rep. */ + rev_path = svn_fs_fs__path_rev_absolute(fs, rev, pool); + SVN_ERR(svn_stringbuf_from_file2(&rev_contents, rev_path, pool)); + + offset = stringbuf_find(rev_contents, "id: "); + if (offset != APR_SIZE_MAX) + { + node_revision_t *noderev; + svn_stringbuf_t *noderev_str; + + /* Read the noderev. */ + svn_stream_t *stream = svn_stream_from_stringbuf(rev_contents, pool); + SVN_ERR(svn_stream_skip(stream, offset)); + SVN_ERR(svn_fs_fs__read_noderev(&noderev, stream, pool, pool)); + SVN_ERR(svn_stream_close(stream)); + + /* Tweak the DATA_REP. */ + noderev->data_rep->revision = noderev->prop_rep->revision; + noderev->data_rep->item_index = noderev->prop_rep->item_index; + noderev->data_rep->size = noderev->prop_rep->size; + noderev->data_rep->expanded_size = 0; + + /* Serialize it back. */ + noderev_str = svn_stringbuf_create_empty(pool); + stream = svn_stream_from_stringbuf(noderev_str, pool); + SVN_ERR(svn_fs_fs__write_noderev(stream, noderev, ffd->format, + svn_fs_fs__fs_supports_mergeinfo(fs), + pool)); + SVN_ERR(svn_stream_close(stream)); + + /* Patch the revision contents */ + memcpy(rev_contents->data + offset, noderev_str->data, noderev_str->len); + } + + SVN_ERR(svn_io_write_atomic(rev_path, rev_contents->data, + rev_contents->len, NULL, pool)); + + if (svn_fs_fs__use_log_addressing(fs)) + { + /* Refresh index data (checksums). */ + apr_array_header_t *entries = apr_array_make(pool, 4, sizeof(void *)); + SVN_ERR(svn_fs_fs__dump_index(fs, rev, receive_index, entries, + NULL, NULL, pool)); + SVN_ERR(svn_fs_fs__load_index(fs, rev, entries, pool)); + } + + /* Create an independent FS instances with separate caches etc. */ + fs_config = apr_hash_make(pool); + svn_hash_sets(fs_config, SVN_FS_CONFIG_FSFS_CACHE_NS, + svn_uuid_generate(pool)); + SVN_ERR(svn_fs_open2(&fs, REPO_NAME, fs_config, pool, pool)); + + /* Now, check that we get the correct file length. */ + SVN_ERR(svn_fs_revision_root(&root, fs, rev, pool)); + SVN_ERR(svn_fs_file_length(&file_length, root, "foo", pool)); + + SVN_TEST_ASSERT(file_length == 4); + + return SVN_NO_ERROR; +} + +#undef REPO_NAME + +/* ------------------------------------------------------------------------ */ + +#define REPO_NAME "test-repo-rep_sharing_effectiveness" + +static int +count_substring(svn_stringbuf_t *string, + const char *needle) +{ + int count = 0; + apr_size_t len = strlen(needle); + apr_size_t pos; + + for (pos = 0; pos + len <= string->len; ++pos) + if (memcmp(string->data + pos, needle, len) == 0) + ++count; + + return count; +} + +static svn_error_t * +count_representations(int *count, + svn_fs_t *fs, + svn_revnum_t revision, + apr_pool_t *pool) +{ + svn_stringbuf_t *rev_contents; + const char *rev_path = svn_fs_fs__path_rev_absolute(fs, revision, pool); + SVN_ERR(svn_stringbuf_from_file2(&rev_contents, rev_path, pool)); + + *count = count_substring(rev_contents, "PLAIN") + + count_substring(rev_contents, "DELTA"); + + return SVN_NO_ERROR; +} + +/* Repeat string S many times to make it big enough for deltification etc. + to kick in. */ +static const char* +multiply_string(const char *s, + apr_pool_t *pool) +{ + svn_stringbuf_t *temp = svn_stringbuf_create(s, pool); + + int i; + for (i = 0; i < 7; ++i) + svn_stringbuf_insert(temp, temp->len, temp->data, temp->len); + + return temp->data; +} + +static svn_error_t * +rep_sharing_effectiveness(const svn_test_opts_t *opts, + apr_pool_t *pool) +{ + svn_fs_t *fs; + fs_fs_data_t *ffd; + svn_fs_txn_t *txn; + svn_fs_root_t *root; + svn_revnum_t rev; + const char *hello_str = multiply_string("Hello, ", pool); + const char *world_str = multiply_string("World!", pool); + const char *goodbye_str = multiply_string("Goodbye!", pool); + + if (strcmp(opts->fs_type, "fsfs") != 0) + return svn_error_create(SVN_ERR_TEST_SKIPPED, NULL, NULL); + + /* Create a repo that and explicitly enable rep sharing. */ + SVN_ERR(svn_test__create_fs(&fs, REPO_NAME, opts, pool)); + + ffd = fs->fsap_data; + if (ffd->format < SVN_FS_FS__MIN_REP_SHARING_FORMAT) + return svn_error_create(SVN_ERR_TEST_SKIPPED, NULL, NULL); + + ffd->rep_sharing_allowed = TRUE; + + /* Revision 1: create 2 files with different content. */ + SVN_ERR(svn_fs_begin_txn(&txn, fs, 0, pool)); + SVN_ERR(svn_fs_txn_root(&root, txn, pool)); + SVN_ERR(svn_fs_make_file(root, "foo", pool)); + SVN_ERR(svn_test__set_file_contents(root, "foo", hello_str, pool)); + SVN_ERR(svn_fs_make_file(root, "bar", pool)); + SVN_ERR(svn_test__set_file_contents(root, "bar", world_str, pool)); + SVN_ERR(svn_fs_commit_txn(NULL, &rev, txn, pool)); + + /* Revision 2: modify a file to match another file's r1 content and + add another with the same content. + (classic rep-sharing). */ + SVN_ERR(svn_fs_begin_txn(&txn, fs, rev, pool)); + SVN_ERR(svn_fs_txn_root(&root, txn, pool)); + SVN_ERR(svn_test__set_file_contents(root, "foo", world_str, pool)); + SVN_ERR(svn_fs_make_file(root, "baz", pool)); + SVN_ERR(svn_test__set_file_contents(root, "baz", hello_str, pool)); + SVN_ERR(svn_fs_commit_txn(NULL, &rev, txn, pool)); + + /* Revision 3: modify all files to some new, identical content and add + another with the same content. + (in-revision rep-sharing). */ + SVN_ERR(svn_fs_begin_txn(&txn, fs, rev, pool)); + SVN_ERR(svn_fs_txn_root(&root, txn, pool)); + SVN_ERR(svn_test__set_file_contents(root, "foo", goodbye_str, pool)); + SVN_ERR(svn_test__set_file_contents(root, "bar", goodbye_str, pool)); + SVN_ERR(svn_test__set_file_contents(root, "baz", goodbye_str, pool)); + SVN_ERR(svn_fs_make_file(root, "qux", pool)); + SVN_ERR(svn_test__set_file_contents(root, "qux", goodbye_str, pool)); + SVN_ERR(svn_fs_commit_txn(NULL, &rev, txn, pool)); + + /* Verify revision contents. */ + { + const struct { + svn_revnum_t revision; + const char *file; + const char *contents; + } expected[] = { + { 1, "foo", "Hello, " }, + { 1, "bar", "World!" }, + { 2, "foo", "World!" }, + { 2, "bar", "World!" }, + { 2, "baz", "Hello, " }, + { 3, "foo", "Goodbye!" }, + { 3, "bar", "Goodbye!" }, + { 3, "baz", "Goodbye!" }, + { 3, "qux", "Goodbye!" }, + { SVN_INVALID_REVNUM, NULL, NULL } + }; + + int i; + apr_pool_t *iterpool = svn_pool_create(pool); + for (i = 0; SVN_IS_VALID_REVNUM(expected[i].revision); ++i) + { + svn_stringbuf_t *str; + + SVN_ERR(svn_fs_revision_root(&root, fs, expected[i].revision, + iterpool)); + SVN_ERR(svn_test__get_file_contents(root, expected[i].file, &str, + iterpool)); + + SVN_TEST_STRING_ASSERT(str->data, + multiply_string(expected[i].contents, + iterpool)); + } + + svn_pool_destroy(iterpool); + } + + /* Verify that rep sharing eliminated most reps. */ + { + /* Number of expected representations (including the root directory). */ + const int expected[] = { 1, 3, 1, 2 } ; + + svn_revnum_t i; + apr_pool_t *iterpool = svn_pool_create(pool); + for (i = 0; i <= rev; ++i) + { + int count; + SVN_ERR(count_representations(&count, fs, i, iterpool)); + SVN_TEST_ASSERT(count == expected[i]); + } + + svn_pool_destroy(iterpool); + } + + return SVN_NO_ERROR; +} + +#undef REPO_NAME + +/* ------------------------------------------------------------------------ */ + +#define REPO_NAME "test-repo-delta_chain_with_plain" + +static svn_error_t * +delta_chain_with_plain(const svn_test_opts_t *opts, + apr_pool_t *pool) +{ + svn_fs_t *fs; + fs_fs_data_t *ffd; + svn_fs_txn_t *txn; + svn_fs_root_t *root; + svn_revnum_t rev; + svn_stringbuf_t *prop_value, *contents, *contents2, *hash_rep; + int i; + apr_hash_t *fs_config, *props; + + if (strcmp(opts->fs_type, "fsfs") != 0) + return svn_error_create(SVN_ERR_TEST_SKIPPED, NULL, NULL); + + /* Reproducing issue #4577 without the r1676667 fix is much harder in 1.9+ + * than it was in 1.8. The reason is that 1.9+ won't deltify small reps + * nor against small reps. So, we must construct relatively large PLAIN + * and DELTA reps. + * + * The idea is to construct a PLAIN prop rep, make a file share that as + * its text rep, grow the file considerably (to make the PLAIN rep later + * read beyond EOF) and then replace it entirely with another longish + * contents. + */ + + /* Create a repo that and explicitly enable rep sharing. */ + SVN_ERR(svn_test__create_fs(&fs, REPO_NAME, opts, pool)); + + ffd = fs->fsap_data; + if (ffd->format < SVN_FS_FS__MIN_REP_SHARING_FORMAT) + return svn_error_create(SVN_ERR_TEST_SKIPPED, NULL, NULL); + + ffd->rep_sharing_allowed = TRUE; + + /* Make sure all props are stored as PLAIN reps. */ + ffd->deltify_properties = FALSE; + + /* Construct various content strings. + * Note that props need to be shorter than the file contents. */ + prop_value = svn_stringbuf_create("prop", pool); + for (i = 0; i < 10; ++i) + svn_stringbuf_appendstr(prop_value, prop_value); + + contents = svn_stringbuf_create("Some text.", pool); + for (i = 0; i < 10; ++i) + svn_stringbuf_appendstr(contents, contents); + + contents2 = svn_stringbuf_create("Totally new!", pool); + for (i = 0; i < 10; ++i) + svn_stringbuf_appendstr(contents2, contents2); + + /* Revision 1: create a property rep. */ + SVN_ERR(svn_fs_begin_txn(&txn, fs, 0, pool)); + SVN_ERR(svn_fs_txn_root(&root, txn, pool)); + SVN_ERR(svn_fs_change_node_prop(root, "/", "p", + svn_string_create(prop_value->data, pool), + pool)); + SVN_ERR(svn_fs_commit_txn(NULL, &rev, txn, pool)); + + /* Revision 2: create a file that shares the text rep with the PLAIN + * property rep from r1. */ + props = apr_hash_make(pool); + svn_hash_sets(props, "p", svn_string_create(prop_value->data, pool)); + + hash_rep = svn_stringbuf_create_empty(pool); + svn_hash_write2(props, svn_stream_from_stringbuf(hash_rep, pool), "END", + pool); + + SVN_ERR(svn_fs_begin_txn(&txn, fs, rev, pool)); + SVN_ERR(svn_fs_txn_root(&root, txn, pool)); + SVN_ERR(svn_fs_make_file(root, "foo", pool)); + SVN_ERR(svn_test__set_file_contents(root, "foo", hash_rep->data, pool)); + SVN_ERR(svn_fs_commit_txn(NULL, &rev, txn, pool)); + + /* Revision 3: modify the file contents to a long-ish full text + * (~10kByte, longer than the r1 revision file). */ + SVN_ERR(svn_fs_begin_txn(&txn, fs, rev, pool)); + SVN_ERR(svn_fs_txn_root(&root, txn, pool)); + SVN_ERR(svn_test__set_file_contents(root, "foo", contents->data, pool)); + SVN_ERR(svn_fs_commit_txn(NULL, &rev, txn, pool)); + + /* Revision 4: replace file contents to something disjoint from r3. */ + SVN_ERR(svn_fs_begin_txn(&txn, fs, rev, pool)); + SVN_ERR(svn_fs_txn_root(&root, txn, pool)); + SVN_ERR(svn_test__set_file_contents(root, "foo", contents2->data, pool)); + SVN_ERR(svn_fs_commit_txn(NULL, &rev, txn, pool)); + + /* Getting foo@4 must work. To make sure we actually read from disk, + * use a new FS instance with disjoint caches. */ + fs_config = apr_hash_make(pool); + svn_hash_sets(fs_config, SVN_FS_CONFIG_FSFS_CACHE_NS, + svn_uuid_generate(pool)); + SVN_ERR(svn_fs_open2(&fs, REPO_NAME, fs_config, pool, pool)); + + SVN_ERR(svn_fs_revision_root(&root, fs, rev, pool)); + SVN_ERR(svn_test__get_file_contents(root, "foo", &contents, pool)); + SVN_TEST_STRING_ASSERT(contents->data, contents2->data); + + return SVN_NO_ERROR; +} + +#undef REPO_NAME + +/* ------------------------------------------------------------------------ */ + +#define REPO_NAME "test-repo-compare_0_length_rep" + +static svn_error_t * +compare_0_length_rep(const svn_test_opts_t *opts, + apr_pool_t *pool) +{ + svn_fs_t *fs; + svn_fs_txn_t *txn; + svn_fs_root_t *root; + svn_revnum_t rev; + int i, k; + apr_hash_t *fs_config; + + /* Test expectations. */ +#define no_rep_file "no-rep" +#define empty_plain_file "empty-plain" +#define plain_file "plain" +#define empty_delta_file "empty-delta" +#define delta_file "delta" + + enum { COUNT = 5 }; + const char *file_names[COUNT] = { no_rep_file, + empty_delta_file, + plain_file, + empty_delta_file, + delta_file }; + + int equal[COUNT][COUNT] = { { 1, 1, 0, 1, 0 }, + { 1, 1, 0, 1, 0 }, + { 0, 0, 1, 0, 1 }, + { 1, 1, 0, 1, 0 }, + { 0, 0, 1, 0, 1 } }; + + /* Well, this club is FSFS only ... */ + if (strcmp(opts->fs_type, "fsfs") != 0) + return svn_error_create(SVN_ERR_TEST_SKIPPED, NULL, NULL); + + /* We want to check that whether NULL reps, empty PLAIN reps and empty + * DELTA reps are all considered equal, yet different from non-empty reps. + * + * Because we can't create empty PLAIN reps with recent formats anymore, + * some format selection & upgrade gymnastics is needed. */ + + /* Create a format 1 repository. + * This one does not support DELTA reps, so all is PLAIN. */ + fs_config = apr_hash_make(pool); + svn_hash_sets(fs_config, SVN_FS_CONFIG_PRE_1_4_COMPATIBLE, "x"); + SVN_ERR(svn_test__create_fs2(&fs, REPO_NAME, opts, fs_config, pool)); + + /* Revision 1, create 3 files: + * One with no rep, one with an empty rep and a non-empty one. */ + SVN_ERR(svn_fs_begin_txn(&txn, fs, 0, pool)); + SVN_ERR(svn_fs_txn_root(&root, txn, pool)); + SVN_ERR(svn_fs_make_file(root, no_rep_file, pool)); + SVN_ERR(svn_fs_make_file(root, empty_plain_file, pool)); + SVN_ERR(svn_test__set_file_contents(root, empty_plain_file, "", pool)); + SVN_ERR(svn_fs_make_file(root, plain_file, pool)); + SVN_ERR(svn_test__set_file_contents(root, plain_file, "x", pool)); + SVN_ERR(svn_fs_commit_txn(NULL, &rev, txn, pool)); + + /* Upgrade the file system format. */ + SVN_ERR(svn_fs_upgrade2(REPO_NAME, NULL, NULL, NULL, NULL, pool)); + SVN_ERR(svn_fs_open2(&fs, REPO_NAME, NULL, pool, pool)); + + /* Revision 2, create two more files: + * a file with an empty DELTA rep and a non-empty one. */ + SVN_ERR(svn_fs_begin_txn(&txn, fs, rev, pool)); + SVN_ERR(svn_fs_txn_root(&root, txn, pool)); + SVN_ERR(svn_fs_make_file(root, empty_delta_file, pool)); + SVN_ERR(svn_test__set_file_contents(root, empty_delta_file, "", pool)); + SVN_ERR(svn_fs_make_file(root, delta_file, pool)); + SVN_ERR(svn_test__set_file_contents(root, delta_file, "x", pool)); + SVN_ERR(svn_fs_commit_txn(NULL, &rev, txn, pool)); + + /* Now compare. */ + SVN_ERR(svn_fs_revision_root(&root, fs, rev, pool)); + for (i = 0; i < COUNT; ++i) + for (k = 0; k < COUNT; ++k) + { + svn_boolean_t different; + SVN_ERR(svn_fs_contents_different(&different, root, file_names[i], + root, file_names[k], pool)); + SVN_TEST_ASSERT(different != equal[i][k]); + } + + return SVN_NO_ERROR; +} + +#undef REPO_NAME + +/* ------------------------------------------------------------------------ */ +/* Verify that the format 7 pack logic works even if we can't fit all index + metadata into memory. */ +#define REPO_NAME "test-repo-pack-with-limited-memory" +#define SHARD_SIZE 4 +#define MAX_REV (2 * SHARD_SIZE - 1) +static svn_error_t * +pack_with_limited_memory(const svn_test_opts_t *opts, + apr_pool_t *pool) +{ + apr_size_t max_mem; + apr_pool_t *iterpool = svn_pool_create(pool); + + /* Bail (with success) on known-untestable scenarios */ + if (opts->server_minor_version && (opts->server_minor_version < 9)) + return svn_error_create(SVN_ERR_TEST_SKIPPED, NULL, + "pre-1.9 SVN doesn't support reordering packs"); + + /* Run with an increasing memory allowance such that we cover all + splitting scenarios. */ + for (max_mem = 350; max_mem < 8000; max_mem += max_mem / 2) + { + const char *dir; + svn_fs_t *fs; + + svn_pool_clear(iterpool); + + /* Create a filesystem. */ + dir = apr_psprintf(iterpool, "%s-%d", REPO_NAME, (int)max_mem); + SVN_ERR(create_non_packed_filesystem(dir, opts, MAX_REV, SHARD_SIZE, + iterpool)); + + /* Pack it with a narrow memory budget. */ + SVN_ERR(svn_fs_open2(&fs, dir, NULL, iterpool, iterpool)); + SVN_ERR(svn_fs_fs__pack(fs, max_mem, NULL, NULL, NULL, NULL, + iterpool)); + + /* To be sure: Verify that we didn't break the repo. */ + SVN_ERR(svn_fs_verify(dir, NULL, 0, MAX_REV, NULL, NULL, NULL, NULL, + iterpool)); + } + + svn_pool_destroy(iterpool); + + return SVN_NO_ERROR; +} +#undef REPO_NAME +#undef MAX_REV +#undef SHARD_SIZE + +/* ------------------------------------------------------------------------ */ + +#define REPO_NAME "test-repo-large_delta_against_plain" + +static svn_error_t * +large_delta_against_plain(const svn_test_opts_t *opts, + apr_pool_t *pool) +{ + svn_fs_t *fs; + fs_fs_data_t *ffd; + svn_fs_txn_t *txn; + svn_fs_root_t *root; + svn_revnum_t rev; + svn_stringbuf_t *prop_value; + svn_string_t *prop_read; + int i; + apr_hash_t *fs_config; + + if (strcmp(opts->fs_type, "fsfs") != 0) + return svn_error_create(SVN_ERR_TEST_SKIPPED, NULL, NULL); + + /* Create a repo that and explicitly enable rep sharing. */ + SVN_ERR(svn_test__create_fs(&fs, REPO_NAME, opts, pool)); + ffd = fs->fsap_data; + + /* Make sure all props are stored as PLAIN reps. */ + ffd->deltify_properties = FALSE; + + /* Construct a property larger than 2 txdelta windows. */ + prop_value = svn_stringbuf_create("prop", pool); + while (prop_value->len <= 2 * 102400) + svn_stringbuf_appendstr(prop_value, prop_value); + + /* Revision 1: create a property rep. */ + SVN_ERR(svn_fs_begin_txn(&txn, fs, 0, pool)); + SVN_ERR(svn_fs_txn_root(&root, txn, pool)); + SVN_ERR(svn_fs_change_node_prop(root, "/", "p", + svn_string_create(prop_value->data, pool), + pool)); + SVN_ERR(svn_fs_commit_txn(NULL, &rev, txn, pool)); + + /* Now, store them as DELTA reps. */ + ffd->deltify_properties = TRUE; + + /* Construct a property larger than 2 txdelta windows, distinct from the + * previous one but with a matching "tail". */ + prop_value = svn_stringbuf_create("blob", pool); + while (prop_value->len <= 2 * 102400) + svn_stringbuf_appendstr(prop_value, prop_value); + for (i = 0; i < 100; ++i) + svn_stringbuf_appendcstr(prop_value, "prop"); + + /* Revision 2: modify the property. */ + SVN_ERR(svn_fs_begin_txn(&txn, fs, 1, pool)); + SVN_ERR(svn_fs_txn_root(&root, txn, pool)); + SVN_ERR(svn_fs_change_node_prop(root, "/", "p", + svn_string_create(prop_value->data, pool), + pool)); + SVN_ERR(svn_fs_commit_txn(NULL, &rev, txn, pool)); + + /* Reconstructing the property deltified must work. To make sure we + * actually read from disk, use a new FS instance with disjoint caches. */ + fs_config = apr_hash_make(pool); + svn_hash_sets(fs_config, SVN_FS_CONFIG_FSFS_CACHE_NS, + svn_uuid_generate(pool)); + SVN_ERR(svn_fs_open2(&fs, REPO_NAME, fs_config, pool, pool)); + + SVN_ERR(svn_fs_revision_root(&root, fs, rev, pool)); + SVN_ERR(svn_fs_node_prop(&prop_read, root, "/", "p", pool)); + SVN_TEST_STRING_ASSERT(prop_read->data, prop_value->data); + + return SVN_NO_ERROR; +} + +#undef REPO_NAME + + + +/* The test table. */ + +static int max_threads = 4; + +static struct svn_test_descriptor_t test_funcs[] = + { + SVN_TEST_NULL, + SVN_TEST_OPTS_PASS(pack_filesystem, + "pack a FSFS filesystem"), + SVN_TEST_OPTS_PASS(pack_even_filesystem, + "pack FSFS where revs % shard = 0"), + SVN_TEST_OPTS_PASS(read_packed_fs, + "read from a packed FSFS filesystem"), + SVN_TEST_OPTS_PASS(commit_packed_fs, + "commit to a packed FSFS filesystem"), + SVN_TEST_OPTS_PASS(get_set_revprop_packed_fs, + "get/set revprop while packing FSFS filesystem"), + SVN_TEST_OPTS_PASS(get_set_large_revprop_packed_fs, + "get/set large packed revprops in FSFS"), + SVN_TEST_OPTS_PASS(get_set_huge_revprop_packed_fs, + "get/set huge packed revprops in FSFS"), + SVN_TEST_OPTS_PASS(recover_fully_packed, + "recover a fully packed filesystem"), + SVN_TEST_OPTS_PASS(file_hint_at_shard_boundary, + "test file hint at shard boundary"), + SVN_TEST_OPTS_PASS(test_info, + "test svn_fs_info"), + SVN_TEST_OPTS_PASS(pack_shard_size_one, + "test packing with shard size = 1"), + SVN_TEST_OPTS_PASS(get_set_multiple_huge_revprops_packed_fs, + "set multiple huge revprops in packed FSFS"), + SVN_TEST_OPTS_PASS(upgrade_new_txns_to_log_addressing, + "upgrade txns to log addressing in shared FSFS"), + SVN_TEST_OPTS_PASS(upgrade_old_txns_to_log_addressing, + "upgrade txns started before svnadmin upgrade"), + SVN_TEST_OPTS_PASS(metadata_checksumming, + "metadata checksums being checked"), + SVN_TEST_OPTS_PASS(revprop_caching_on_off, + "change revprops with enabled and disabled caching"), + SVN_TEST_OPTS_PASS(id_parser_test, + "id parser test"), + SVN_TEST_OPTS_PASS(plain_0_length, + "file with 0 expanded-length, issue #4554"), + SVN_TEST_OPTS_PASS(rep_sharing_effectiveness, + "rep-sharing effectiveness"), + SVN_TEST_OPTS_PASS(delta_chain_with_plain, + "delta chains starting with PLAIN, issue #4577"), + SVN_TEST_OPTS_PASS(compare_0_length_rep, + "compare empty PLAIN and non-existent reps"), + SVN_TEST_OPTS_PASS(pack_with_limited_memory, + "pack with limited memory for metadata"), + SVN_TEST_OPTS_PASS(large_delta_against_plain, + "large deltas against PLAIN, issue #4658"), + SVN_TEST_NULL + }; + +SVN_TEST_MAIN diff --git a/subversion/tests/libsvn_fs_fs/fs-fs-private-test.c b/subversion/tests/libsvn_fs_fs/fs-fs-private-test.c new file mode 100644 index 0000000..a1447ee --- /dev/null +++ b/subversion/tests/libsvn_fs_fs/fs-fs-private-test.c @@ -0,0 +1,434 @@ +/* fs-fs-private-test.c --- tests FSFS's private API + * + * ==================================================================== + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + * ==================================================================== + */ + +#include <stdlib.h> +#include <string.h> + +#include "../svn_test.h" + +#include "svn_hash.h" +#include "svn_pools.h" +#include "svn_props.h" +#include "svn_fs.h" + +#include "private/svn_string_private.h" +#include "private/svn_fs_fs_private.h" +#include "private/svn_subr_private.h" + +#include "../../libsvn_fs_fs/index.h" + +#include "../svn_test_fs.h" + + + +/* Utility functions */ + +/* Create a repo under REPO_NAME using OPTS. Allocate the repository in + * RESULT_POOL and return it in *REPOS. Set *REV to the revision containing + * the Greek tree addition. Use SCRATCH_POOL for temporary allocations. + */ +static svn_error_t * +create_greek_repo(svn_repos_t **repos, + svn_revnum_t *rev, + const svn_test_opts_t *opts, + const char *repo_name, + apr_pool_t *result_pool, + apr_pool_t *scratch_pool) +{ + svn_fs_t *fs; + svn_fs_txn_t *txn; + svn_fs_root_t *txn_root; + + /* Create a filesystem */ + SVN_ERR(svn_test__create_repos(repos, repo_name, opts, result_pool)); + fs = svn_repos_fs(*repos); + + /* Add the Greek tree */ + SVN_ERR(svn_fs_begin_txn(&txn, fs, 0, scratch_pool)); + SVN_ERR(svn_fs_txn_root(&txn_root, txn, scratch_pool)); + SVN_ERR(svn_test__create_greek_tree(txn_root, scratch_pool)); + SVN_ERR(svn_fs_commit_txn(NULL, rev, txn, scratch_pool)); + SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(*rev)); + + return SVN_NO_ERROR; +} + + +/* ------------------------------------------------------------------------ */ + +#define REPO_NAME "test-repo-get-repo-stats-test" + +static svn_error_t * +verify_representation_stats(const svn_fs_fs__representation_stats_t *stats, + apr_uint64_t expected_count) +{ + /* Small items, no packing (but inefficiency due to packing attempt). */ + SVN_TEST_ASSERT(stats->total.count == expected_count); + SVN_TEST_ASSERT( stats->total.packed_size >= 10 * expected_count + && stats->total.packed_size <= 1000 * expected_count); + SVN_TEST_ASSERT( stats->total.packed_size >= stats->total.expanded_size + && stats->total.packed_size <= 2 * stats->total.expanded_size); + SVN_TEST_ASSERT( stats->total.overhead_size >= 5 * expected_count + && stats->total.overhead_size <= 100 * expected_count); + + /* Rep sharing has no effect on the Greek tree. */ + SVN_TEST_ASSERT(stats->total.count == stats->uniques.count); + SVN_TEST_ASSERT(stats->total.packed_size == stats->uniques.packed_size); + SVN_TEST_ASSERT(stats->total.expanded_size == stats->uniques.expanded_size); + SVN_TEST_ASSERT(stats->total.overhead_size == stats->uniques.overhead_size); + + SVN_TEST_ASSERT(stats->shared.count == 0); + SVN_TEST_ASSERT(stats->shared.packed_size == 0); + SVN_TEST_ASSERT(stats->shared.expanded_size == 0); + SVN_TEST_ASSERT(stats->shared.overhead_size == 0); + + /* No rep sharing. */ + SVN_TEST_ASSERT(stats->references == stats->total.count); + SVN_TEST_ASSERT(stats->expanded_size == stats->total.expanded_size); + + return SVN_NO_ERROR; +} + +static svn_error_t * +verify_node_stats(const svn_fs_fs__node_stats_t *node_stats, + apr_uint64_t expected_count) +{ + SVN_TEST_ASSERT(node_stats->count == expected_count); + SVN_TEST_ASSERT( node_stats->size > 100 * node_stats->count + && node_stats->size < 1000 * node_stats->count); + + return SVN_NO_ERROR; +} + +static svn_error_t * +verify_large_change(const svn_fs_fs__large_change_info_t *change, + svn_revnum_t revision) +{ + if (change->revision == SVN_INVALID_REVNUM) + { + /* Unused entry due to the Greek tree being small. */ + SVN_TEST_ASSERT(change->path->len == 0); + SVN_TEST_ASSERT(change->size == 0); + } + else if (strcmp(change->path->data, "/") == 0) + { + /* The root folder nodes are always there, i.e. aren't in the + * Greek tree "do add" list. */ + SVN_TEST_ASSERT( SVN_IS_VALID_REVNUM(change->revision) + && change->revision <= revision); + } + else + { + const struct svn_test__tree_entry_t *node; + for (node = svn_test__greek_tree_nodes; node->path; node++) + if (strcmp(node->path, change->path->data + 1) == 0) + { + SVN_TEST_ASSERT(change->revision == revision); + + /* When checking content sizes, keep in mind the optional + * SVNDIFF overhead.*/ + if (node->contents) + SVN_TEST_ASSERT( change->size >= strlen(node->contents) + && change->size <= 12 + strlen(node->contents)); + + return SVN_NO_ERROR; + } + + SVN_TEST_ASSERT(!"Change is part of Greek tree"); + } + + return SVN_NO_ERROR; +} + +static svn_error_t * +verify_histogram(const svn_fs_fs__histogram_t *histogram) +{ + apr_uint64_t sum_count = 0; + apr_uint64_t sum_size = 0; + + int i; + for (i = 0; i < 64; ++i) + { + svn_fs_fs__histogram_line_t line = histogram->lines[i]; + + if (i > 10 || i < 1) + SVN_TEST_ASSERT(line.sum == 0 && line.count == 0); + else + SVN_TEST_ASSERT( line.sum >= (line.count << (i-1)) + && line.sum <= (line.count << i)); + + sum_count += line.count; + sum_size += line.sum; + } + + SVN_TEST_ASSERT(histogram->total.count == sum_count); + SVN_TEST_ASSERT(histogram->total.sum == sum_size); + + return SVN_NO_ERROR; +} + +static svn_error_t * +get_repo_stats(const svn_test_opts_t *opts, + apr_pool_t *pool) +{ + svn_repos_t *repos; + svn_revnum_t rev; + apr_size_t i; + svn_fs_fs__stats_t *stats; + svn_fs_fs__extension_info_t *extension_info; + + /* Bail (with success) on known-untestable scenarios */ + if (strcmp(opts->fs_type, "fsfs") != 0) + return svn_error_create(SVN_ERR_TEST_SKIPPED, NULL, + "this will test FSFS repositories only"); + + /* Create a filesystem */ + SVN_ERR(create_greek_repo(&repos, &rev, opts, REPO_NAME, pool, pool)); + + /* Gather statistics info on that repo. */ + SVN_ERR(svn_fs_fs__get_stats(&stats, svn_repos_fs(repos), NULL, NULL, + NULL, NULL, pool, pool)); + + /* Check that the stats make sense. */ + SVN_TEST_ASSERT(stats->total_size > 1000 && stats->total_size < 10000); + SVN_TEST_ASSERT(stats->revision_count == 2); + SVN_TEST_ASSERT(stats->change_count == 20); + SVN_TEST_ASSERT(stats->change_len > 500 && stats->change_len < 2000); + + /* Check representation stats. */ + SVN_ERR(verify_representation_stats(&stats->total_rep_stats, 20)); + SVN_ERR(verify_representation_stats(&stats->file_rep_stats, 12)); + SVN_ERR(verify_representation_stats(&stats->dir_rep_stats, 8)); + SVN_ERR(verify_representation_stats(&stats->file_prop_rep_stats, 0)); + SVN_ERR(verify_representation_stats(&stats->dir_prop_rep_stats, 0)); + + /* Check node stats against rep stats. */ + SVN_ERR(verify_node_stats(&stats->total_node_stats, 22)); + SVN_ERR(verify_node_stats(&stats->file_node_stats, 12)); + SVN_ERR(verify_node_stats(&stats->dir_node_stats, 10)); + + /* Check largest changes. */ + SVN_TEST_ASSERT(stats->largest_changes->count == 64); + SVN_TEST_ASSERT(stats->largest_changes->min_size == 0); + + for (i = 0; i < stats->largest_changes->count; ++i) + SVN_ERR(verify_large_change(stats->largest_changes->changes[i], rev)); + + /* Check histograms. */ + SVN_ERR(verify_histogram(&stats->rep_size_histogram)); + SVN_ERR(verify_histogram(&stats->node_size_histogram)); + SVN_ERR(verify_histogram(&stats->added_rep_size_histogram)); + SVN_ERR(verify_histogram(&stats->added_node_size_histogram)); + SVN_ERR(verify_histogram(&stats->unused_rep_histogram)); + SVN_ERR(verify_histogram(&stats->file_histogram)); + SVN_ERR(verify_histogram(&stats->file_rep_histogram)); + SVN_ERR(verify_histogram(&stats->file_prop_histogram)); + SVN_ERR(verify_histogram(&stats->file_prop_rep_histogram)); + SVN_ERR(verify_histogram(&stats->dir_histogram)); + SVN_ERR(verify_histogram(&stats->dir_rep_histogram)); + SVN_ERR(verify_histogram(&stats->dir_prop_histogram)); + SVN_ERR(verify_histogram(&stats->dir_prop_rep_histogram)); + + /* No file in the Greek tree has an externsion */ + SVN_TEST_ASSERT(apr_hash_count(stats->by_extension) == 1); + extension_info = svn_hash_gets(stats->by_extension, "(none)"); + SVN_TEST_ASSERT(extension_info); + + SVN_ERR(verify_histogram(&extension_info->rep_histogram)); + SVN_ERR(verify_histogram(&extension_info->node_histogram)); + + return SVN_NO_ERROR; +} + +#undef REPO_NAME + +/* ------------------------------------------------------------------------ */ + +#define REPO_NAME "test-repo-dump-index-test" + +typedef struct dump_baton_t +{ + /* Number of callback invocations so far */ + int invocations; + + /* Rev file location we expect to be reported next */ + apr_off_t offset; + + /* All items must be from this revision. */ + svn_revnum_t revision; + + /* Track the item numbers we have already seen. */ + svn_bit_array__t *numbers_seen; +} dump_baton_t; + +static svn_error_t * +dump_index_entry(const svn_fs_fs__p2l_entry_t *entry, + void *baton_p, + apr_pool_t *scratch_pool) +{ + dump_baton_t *baton = baton_p; + + /* Count invocations. */ + baton->invocations++; + + /* We expect a report of contiguous non-empty items. */ + SVN_TEST_ASSERT(entry->offset == baton->offset); + SVN_TEST_ASSERT(entry->size > 0 && entry->size < 1000); + baton->offset += entry->size; + + /* Type must be valid. */ + SVN_TEST_ASSERT( entry->type > SVN_FS_FS__ITEM_TYPE_UNUSED + && entry->type <= SVN_FS_FS__ITEM_TYPE_CHANGES); + + /* We expect all items to be from the specified revision. */ + SVN_TEST_ASSERT(entry->item.revision == baton->revision); + + /* Item numnber must be plausibly small and unique. */ + SVN_TEST_ASSERT(entry->item.number < 100); + SVN_TEST_ASSERT(!svn_bit_array__get(baton->numbers_seen, + (apr_size_t)entry->item.number)); + svn_bit_array__set(baton->numbers_seen, (apr_size_t)entry->item.number, 1); + + return SVN_NO_ERROR; +} + +static svn_error_t * +dump_index(const svn_test_opts_t *opts, + apr_pool_t *pool) +{ + svn_repos_t *repos; + svn_revnum_t rev; + dump_baton_t baton; + + /* Bail (with success) on known-untestable scenarios */ + if (strcmp(opts->fs_type, "fsfs") != 0) + return svn_error_create(SVN_ERR_TEST_SKIPPED, NULL, + "this will test FSFS repositories only"); + + if (opts->server_minor_version && (opts->server_minor_version < 9)) + return svn_error_create(SVN_ERR_TEST_SKIPPED, NULL, + "pre-1.9 SVN doesn't have FSFS indexes"); + + /* Create a filesystem */ + SVN_ERR(create_greek_repo(&repos, &rev, opts, REPO_NAME, pool, pool)); + + /* Read the index data for REV from that repo. */ + baton.invocations = 0; + baton.offset = 0; + baton.revision = rev; + baton.numbers_seen = svn_bit_array__create(100, pool); + SVN_ERR(svn_fs_fs__dump_index(svn_repos_fs(repos), rev, dump_index_entry, + &baton, NULL, NULL, pool)); + + /* Check that we've got all data (20 noderevs + 20 reps + 1 changes list). */ + SVN_TEST_ASSERT(baton.invocations == 41); + + return SVN_NO_ERROR; +} + +#undef REPO_NAME + +/* ------------------------------------------------------------------------ */ + +static svn_error_t * +receive_index(const svn_fs_fs__p2l_entry_t *entry, + void *baton, + apr_pool_t *scratch_pool) +{ + apr_array_header_t *entries = baton; + APR_ARRAY_PUSH(entries, svn_fs_fs__p2l_entry_t *) + = apr_pmemdup(entries->pool, entry, sizeof(*entry)); + + return SVN_NO_ERROR; +} + +#define REPO_NAME "test-repo-load-index-test" + +static svn_error_t * +load_index(const svn_test_opts_t *opts, apr_pool_t *pool) +{ + svn_repos_t *repos; + svn_revnum_t rev; + apr_array_header_t *entries = apr_array_make(pool, 41, sizeof(void *)); + apr_array_header_t *alt_entries = apr_array_make(pool, 1, sizeof(void *)); + svn_fs_fs__p2l_entry_t entry; + + /* Bail (with success) on known-untestable scenarios */ + if (strcmp(opts->fs_type, "fsfs") != 0) + return svn_error_create(SVN_ERR_TEST_SKIPPED, NULL, + "this will test FSFS repositories only"); + + if (opts->server_minor_version && (opts->server_minor_version < 9)) + return svn_error_create(SVN_ERR_TEST_SKIPPED, NULL, + "pre-1.9 SVN doesn't have FSFS indexes"); + + /* Create a filesystem */ + SVN_ERR(create_greek_repo(&repos, &rev, opts, REPO_NAME, pool, pool)); + + /* Read the original index contents for REV in ENTRIES. */ + SVN_ERR(svn_fs_fs__dump_index(svn_repos_fs(repos), rev, receive_index, + entries, NULL, NULL, pool)); + + /* Replace it with an empty index. + * Note that the API requires at least one entry. Give it a dummy. */ + entry.offset = 0; + entry.size = 0; + entry.type = SVN_FS_FS__ITEM_TYPE_UNUSED; + entry.item.number = SVN_FS_FS__ITEM_INDEX_UNUSED; + entry.item.revision = SVN_INVALID_REVNUM; + APR_ARRAY_PUSH(alt_entries, svn_fs_fs__p2l_entry_t *) = &entry; + + SVN_ERR(svn_fs_fs__load_index(svn_repos_fs(repos), rev, alt_entries, pool)); + SVN_TEST_ASSERT_ERROR(svn_repos_verify_fs3(repos, rev, rev, FALSE, FALSE, + NULL, NULL, NULL, NULL, NULL, + NULL, pool), + SVN_ERR_FS_INDEX_CORRUPTION); + + /* Restore the original index. */ + SVN_ERR(svn_fs_fs__load_index(svn_repos_fs(repos), rev, entries, pool)); + SVN_ERR(svn_repos_verify_fs3(repos, rev, rev, FALSE, FALSE, NULL, NULL, + NULL, NULL, NULL, NULL, pool)); + + return SVN_NO_ERROR; +} + +#undef REPO_NAME + + + +/* The test table. */ + +static int max_threads = 0; + +static struct svn_test_descriptor_t test_funcs[] = + { + SVN_TEST_NULL, + SVN_TEST_OPTS_PASS(get_repo_stats, + "get statistics on a FSFS filesystem"), + SVN_TEST_OPTS_PASS(dump_index, + "dump the P2L index"), + SVN_TEST_OPTS_PASS(load_index, + "load the P2L index"), + SVN_TEST_NULL + }; + +SVN_TEST_MAIN diff --git a/subversion/tests/libsvn_fs_fs/fs-pack-test.c b/subversion/tests/libsvn_fs_x/fs-x-pack-test.c index f0d7650..f85a357 100644 --- a/subversion/tests/libsvn_fs_fs/fs-pack-test.c +++ b/subversion/tests/libsvn_fs_x/fs-x-pack-test.c @@ -1,4 +1,4 @@ -/* fs-pack-test.c --- tests for the filesystem +/* fs-x-pack-test.c --- tests for the FSX filesystem * * ==================================================================== * Licensed to the Apache Software Foundation (ASF) under one @@ -25,7 +25,8 @@ #include <apr_pools.h> #include "../svn_test.h" -#include "../../libsvn_fs_fs/fs.h" +#include "../../libsvn_fs_x/fs.h" +#include "../../libsvn_fs_x/reps.h" #include "svn_pools.h" #include "svn_props.h" @@ -38,16 +39,6 @@ /*** Helper Functions ***/ -static void -ignore_fs_warnings(void *baton, svn_error_t *err) -{ -#ifdef SVN_DEBUG - SVN_DBG(("Ignoring FS warning %s\n", - svn_error_symbolic_name(err ? err->apr_err : 0))); -#endif - return; -} - /* Write the format number and maximum number of files per directory to a new format file in PATH, overwriting a previously existing file. Use POOL for temporary allocation. @@ -62,36 +53,15 @@ write_format(const char *path, const char *contents; path = svn_dirent_join(path, "format", pool); + SVN_TEST_ASSERT(max_files_per_dir > 0); - if (format >= SVN_FS_FS__MIN_LAYOUT_FORMAT_OPTION_FORMAT) - { - if (max_files_per_dir) - contents = apr_psprintf(pool, - "%d\n" - "layout sharded %d\n", - format, max_files_per_dir); - else - contents = apr_psprintf(pool, - "%d\n" - "layout linear", - format); - } - else - { - contents = apr_psprintf(pool, "%d\n", format); - } + contents = apr_psprintf(pool, + "%d\n" + "layout sharded %d\n", + format, max_files_per_dir); - { - const char *path_tmp; - - SVN_ERR(svn_io_write_unique(&path_tmp, - svn_dirent_dirname(path, pool), - contents, strlen(contents), - svn_io_file_del_none, pool)); - - /* rename the temp file as the real destination */ - SVN_ERR(svn_io_file_rename(path_tmp, path, pool)); - } + SVN_ERR(svn_io_write_atomic(path, contents, strlen(contents), + NULL /* copy perms */, pool)); /* And set the perms to make it read only */ return svn_io_set_file_read_only(path, FALSE, pool); @@ -143,6 +113,8 @@ pack_notify(void *baton, return SVN_NO_ERROR; } +#define R1_LOG_MSG "Let's serf" + /* Create a packed filesystem in DIR. Set the shard size to SHARD_SIZE and create NUM_REVS number of revisions (in addition to r0). Use POOL for allocations. After this function successfully @@ -165,6 +137,15 @@ create_packed_filesystem(const char *dir, apr_pool_t *iterpool; int version; + /* Bail (with success) on known-untestable scenarios */ + if (strcmp(opts->fs_type, "fsx") != 0) + return svn_error_create(SVN_ERR_TEST_SKIPPED, NULL, + "this will test FSX repositories only"); + + if (opts->server_minor_version && (opts->server_minor_version < 9)) + return svn_error_create(SVN_ERR_TEST_SKIPPED, NULL, + "pre-1.9 SVN doesn't support FSX"); + /* Create a filesystem, then close it */ SVN_ERR(svn_test__create_fs(&fs, dir, opts, subpool)); svn_pool_destroy(subpool); @@ -178,12 +159,15 @@ create_packed_filesystem(const char *dir, SVN_ERR(write_format(dir, version, shard_size, subpool)); /* Reopen the filesystem */ - SVN_ERR(svn_fs_open(&fs, dir, NULL, subpool)); + SVN_ERR(svn_fs_open2(&fs, dir, NULL, subpool, subpool)); /* Revision 1: the Greek tree */ SVN_ERR(svn_fs_begin_txn(&txn, fs, 0, subpool)); SVN_ERR(svn_fs_txn_root(&txn_root, txn, subpool)); SVN_ERR(svn_test__create_greek_tree(txn_root, subpool)); + SVN_ERR(svn_fs_change_txn_prop(txn, SVN_PROP_REVISION_LOG, + svn_string_create(R1_LOG_MSG, pool), + pool)); SVN_ERR(svn_fs_commit_txn(&conflict, &after_rev, txn, subpool)); SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(after_rev)); @@ -230,7 +214,7 @@ prepare_revprop_repo(svn_fs_t **fs, /* Create the packed FS and open it. */ SVN_ERR(create_packed_filesystem(repo_name, opts, max_rev, shard_size, pool)); - SVN_ERR(svn_fs_open(fs, repo_name, NULL, pool)); + SVN_ERR(svn_fs_open2(fs, repo_name, NULL, pool, pool)); subpool = svn_pool_create(pool); /* Do a commit to trigger packing. */ @@ -285,7 +269,7 @@ huge_log(svn_revnum_t rev, apr_pool_t *pool) /*** Tests ***/ /* ------------------------------------------------------------------------ */ -#define REPO_NAME "test-repo-fsfs-pack" +#define REPO_NAME "test-repo-fsx-pack" #define SHARD_SIZE 7 #define MAX_REV 53 static svn_error_t * @@ -299,11 +283,6 @@ pack_filesystem(const svn_test_opts_t *opts, apr_file_t *file; apr_size_t len; - /* Bail (with success) on known-untestable scenarios */ - if ((strcmp(opts->fs_type, "fsfs") != 0) - || (opts->server_minor_version && (opts->server_minor_version < 6))) - return SVN_NO_ERROR; - SVN_ERR(create_packed_filesystem(REPO_NAME, opts, MAX_REV, SHARD_SIZE, pool)); @@ -313,27 +292,18 @@ pack_filesystem(const svn_test_opts_t *opts, { path = svn_dirent_join_many(pool, REPO_NAME, "revs", apr_psprintf(pool, "%d.pack", i / SHARD_SIZE), - "pack", NULL); + "pack", SVN_VA_NULL); - /* These files should exist. */ + /* This file should exist. */ SVN_ERR(svn_io_check_path(path, &kind, pool)); if (kind != svn_node_file) return svn_error_createf(SVN_ERR_FS_GENERAL, NULL, "Expected pack file '%s' not found", path); - path = svn_dirent_join_many(pool, REPO_NAME, "revs", - apr_psprintf(pool, "%d.pack", i / SHARD_SIZE), - "manifest", NULL); - SVN_ERR(svn_io_check_path(path, &kind, pool)); - if (kind != svn_node_file) - return svn_error_createf(SVN_ERR_FS_GENERAL, NULL, - "Expected manifest file '%s' not found", - path); - /* This directory should not exist. */ path = svn_dirent_join_many(pool, REPO_NAME, "revs", apr_psprintf(pool, "%d", i / SHARD_SIZE), - NULL); + SVN_VA_NULL); SVN_ERR(svn_io_check_path(path, &kind, pool)); if (kind != svn_node_none) return svn_error_createf(SVN_ERR_FS_GENERAL, NULL, @@ -355,7 +325,7 @@ pack_filesystem(const svn_test_opts_t *opts, /* Finally, make sure the final revision directory does exist. */ path = svn_dirent_join_many(pool, REPO_NAME, "revs", apr_psprintf(pool, "%d", (i / SHARD_SIZE) + 1), - NULL); + SVN_VA_NULL); SVN_ERR(svn_io_check_path(path, &kind, pool)); if (kind != svn_node_none) return svn_error_createf(SVN_ERR_FS_GENERAL, NULL, @@ -369,7 +339,7 @@ pack_filesystem(const svn_test_opts_t *opts, #undef MAX_REV /* ------------------------------------------------------------------------ */ -#define REPO_NAME "test-repo-fsfs-pack-even" +#define REPO_NAME "test-repo-fsx-pack-even" #define SHARD_SIZE 4 #define MAX_REV 11 static svn_error_t * @@ -379,15 +349,10 @@ pack_even_filesystem(const svn_test_opts_t *opts, svn_node_kind_t kind; const char *path; - /* Bail (with success) on known-untestable scenarios */ - if ((strcmp(opts->fs_type, "fsfs") != 0) - || (opts->server_minor_version && (opts->server_minor_version < 6))) - return SVN_NO_ERROR; - SVN_ERR(create_packed_filesystem(REPO_NAME, opts, MAX_REV, SHARD_SIZE, pool)); - path = svn_dirent_join_many(pool, REPO_NAME, "revs", "2.pack", NULL); + path = svn_dirent_join_many(pool, REPO_NAME, "revs", "2.pack", SVN_VA_NULL); SVN_ERR(svn_io_check_path(path, &kind, pool)); if (kind != svn_node_dir) return svn_error_createf(SVN_ERR_FS_GENERAL, NULL, @@ -412,13 +377,8 @@ read_packed_fs(const svn_test_opts_t *opts, svn_stringbuf_t *rstring; svn_revnum_t i; - /* Bail (with success) on known-untestable scenarios */ - if ((strcmp(opts->fs_type, "fsfs") != 0) - || (opts->server_minor_version && (opts->server_minor_version < 6))) - return SVN_NO_ERROR; - SVN_ERR(create_packed_filesystem(REPO_NAME, opts, MAX_REV, SHARD_SIZE, pool)); - SVN_ERR(svn_fs_open(&fs, REPO_NAME, NULL, pool)); + SVN_ERR(svn_fs_open2(&fs, REPO_NAME, NULL, pool, pool)); for (i = 1; i < (MAX_REV + 1); i++) { @@ -459,14 +419,9 @@ commit_packed_fs(const svn_test_opts_t *opts, const char *conflict; svn_revnum_t after_rev; - /* Bail (with success) on known-untestable scenarios */ - if ((strcmp(opts->fs_type, "fsfs") != 0) - || (opts->server_minor_version && (opts->server_minor_version < 6))) - return SVN_NO_ERROR; - /* Create the packed FS and open it. */ SVN_ERR(create_packed_filesystem(REPO_NAME, opts, MAX_REV, 5, pool)); - SVN_ERR(svn_fs_open(&fs, REPO_NAME, NULL, pool)); + SVN_ERR(svn_fs_open2(&fs, REPO_NAME, NULL, pool, pool)); /* Now do a commit. */ SVN_ERR(svn_fs_begin_txn(&txn, fs, MAX_REV, pool)); @@ -494,11 +449,6 @@ get_set_revprop_packed_fs(const svn_test_opts_t *opts, svn_fs_t *fs; svn_string_t *prop_value; - /* Bail (with success) on known-untestable scenarios */ - if ((strcmp(opts->fs_type, "fsfs") != 0) - || (opts->server_minor_version && (opts->server_minor_version < 7))) - return SVN_NO_ERROR; - /* Create the packed FS and open it. */ SVN_ERR(prepare_revprop_repo(&fs, REPO_NAME, MAX_REV, SHARD_SIZE, opts, pool)); @@ -551,11 +501,6 @@ get_set_large_revprop_packed_fs(const svn_test_opts_t *opts, svn_string_t *prop_value; svn_revnum_t rev; - /* Bail (with success) on known-untestable scenarios */ - if ((strcmp(opts->fs_type, "fsfs") != 0) - || (opts->server_minor_version && (opts->server_minor_version < 7))) - return SVN_NO_ERROR; - /* Create the packed FS and open it. */ SVN_ERR(prepare_revprop_repo(&fs, REPO_NAME, MAX_REV, SHARD_SIZE, opts, pool)); @@ -624,11 +569,6 @@ get_set_huge_revprop_packed_fs(const svn_test_opts_t *opts, svn_string_t *prop_value; svn_revnum_t rev; - /* Bail (with success) on known-untestable scenarios */ - if ((strcmp(opts->fs_type, "fsfs") != 0) - || (opts->server_minor_version && (opts->server_minor_version < 7))) - return SVN_NO_ERROR; - /* Create the packed FS and open it. */ SVN_ERR(prepare_revprop_repo(&fs, REPO_NAME, MAX_REV, SHARD_SIZE, opts, pool)); @@ -697,11 +637,6 @@ recover_fully_packed(const svn_test_opts_t *opts, svn_revnum_t after_rev; svn_error_t *err; - /* Bail (with success) on known-untestable scenarios */ - if ((strcmp(opts->fs_type, "fsfs") != 0) - || (opts->server_minor_version && (opts->server_minor_version < 7))) - return SVN_NO_ERROR; - /* Create a packed FS for which every revision will live in a pack digest file, and then recover it. */ SVN_ERR(create_packed_filesystem(REPO_NAME, opts, MAX_REV, SHARD_SIZE, pool)); @@ -709,7 +644,7 @@ recover_fully_packed(const svn_test_opts_t *opts, /* Add another revision, re-pack, re-recover. */ subpool = svn_pool_create(pool); - SVN_ERR(svn_fs_open(&fs, REPO_NAME, NULL, subpool)); + SVN_ERR(svn_fs_open2(&fs, REPO_NAME, NULL, subpool, subpool)); SVN_ERR(svn_fs_begin_txn(&txn, fs, MAX_REV, subpool)); SVN_ERR(svn_fs_txn_root(&txn_root, txn, subpool)); SVN_ERR(svn_test__set_file_contents(txn_root, "A/mu", "new-mu", subpool)); @@ -726,7 +661,7 @@ recover_fully_packed(const svn_test_opts_t *opts, apr_psprintf(pool, "%ld/%ld", after_rev / SHARD_SIZE, after_rev), - NULL), + SVN_VA_NULL), FALSE, pool)); err = svn_fs_recover(REPO_NAME, NULL, NULL, pool); if (! err) @@ -760,17 +695,12 @@ file_hint_at_shard_boundary(const svn_test_opts_t *opts, svn_stringbuf_t *retrieved_contents; svn_error_t *err = SVN_NO_ERROR; - /* Bail (with success) on known-untestable scenarios */ - if ((strcmp(opts->fs_type, "fsfs") != 0) - || (opts->server_minor_version && (opts->server_minor_version < 8))) - return SVN_NO_ERROR; - /* Create a packed FS and MAX_REV revisions */ SVN_ERR(create_packed_filesystem(REPO_NAME, opts, MAX_REV, SHARD_SIZE, pool)); /* Reopen the filesystem */ subpool = svn_pool_create(pool); - SVN_ERR(svn_fs_open(&fs, REPO_NAME, NULL, subpool)); + SVN_ERR(svn_fs_open2(&fs, REPO_NAME, NULL, subpool, subpool)); /* Revision = SHARD_SIZE */ file_contents = get_rev_contents(SHARD_SIZE, subpool); @@ -798,153 +728,155 @@ file_hint_at_shard_boundary(const svn_test_opts_t *opts, #undef SHARD_SIZE /* ------------------------------------------------------------------------ */ -#define REPO_NAME "get_set_multiple_huge_revprops_packed_fs" -#define SHARD_SIZE 4 -#define MAX_REV 9 +#define REPO_NAME "test-repo-fsx-info" +#define SHARD_SIZE 3 +#define MAX_REV 5 static svn_error_t * -get_set_multiple_huge_revprops_packed_fs(const svn_test_opts_t *opts, - apr_pool_t *pool) +test_info(const svn_test_opts_t *opts, + apr_pool_t *pool) { svn_fs_t *fs; - svn_string_t *prop_value; - svn_revnum_t rev; - - /* Bail (with success) on known-untestable scenarios */ - if ((strcmp(opts->fs_type, "fsfs") != 0) - || (opts->server_minor_version && (opts->server_minor_version < 7))) - return SVN_NO_ERROR; - - /* Create the packed FS and open it. */ - SVN_ERR(prepare_revprop_repo(&fs, REPO_NAME, MAX_REV, SHARD_SIZE, opts, - pool)); + const svn_fs_fsfs_info_t *fsfs_info; + const svn_fs_info_placeholder_t *info; - /* Set commit messages to different values */ - for (rev = 0; rev <= MAX_REV; ++rev) - SVN_ERR(svn_fs_change_rev_prop(fs, rev, SVN_PROP_REVISION_LOG, - default_log(rev, pool), + SVN_ERR(create_packed_filesystem(REPO_NAME, opts, MAX_REV, SHARD_SIZE, pool)); - /* verify */ - for (rev = 0; rev <= MAX_REV; ++rev) - { - SVN_ERR(svn_fs_revision_prop(&prop_value, fs, rev, - SVN_PROP_REVISION_LOG, pool)); - SVN_TEST_STRING_ASSERT(prop_value->data, default_log(rev, pool)->data); - } + SVN_ERR(svn_fs_open2(&fs, REPO_NAME, NULL, pool, pool)); + SVN_ERR(svn_fs_info(&info, fs, pool, pool)); + info = svn_fs_info_dup(info, pool, pool); - /* Put a huge revprop into revision 1 and 2. */ - SVN_ERR(svn_fs_change_rev_prop(fs, 1, SVN_PROP_REVISION_LOG, - huge_log(1, pool), - pool)); - SVN_ERR(svn_fs_change_rev_prop(fs, 2, SVN_PROP_REVISION_LOG, - huge_log(2, pool), - pool)); - SVN_ERR(svn_fs_change_rev_prop(fs, 5, SVN_PROP_REVISION_LOG, - huge_log(5, pool), - pool)); - SVN_ERR(svn_fs_change_rev_prop(fs, 6, SVN_PROP_REVISION_LOG, - huge_log(6, pool), - pool)); + SVN_TEST_STRING_ASSERT(opts->fs_type, info->fs_type); - /* verify */ - for (rev = 0; rev <= MAX_REV; ++rev) - { - SVN_ERR(svn_fs_revision_prop(&prop_value, fs, rev, - SVN_PROP_REVISION_LOG, pool)); + /* Bail (with success) on known-untestable scenarios */ + if (strcmp(opts->fs_type, "fsx") != 0) + return SVN_NO_ERROR; - if (rev == 1 || rev == 2 || rev == 5 || rev == 6) - SVN_TEST_STRING_ASSERT(prop_value->data, - huge_log(rev, pool)->data); - else - SVN_TEST_STRING_ASSERT(prop_value->data, - default_log(rev, pool)->data); - } + fsfs_info = (const void *)info; + SVN_TEST_ASSERT(fsfs_info->shard_size == SHARD_SIZE); + SVN_TEST_ASSERT(fsfs_info->min_unpacked_rev + == (MAX_REV + 1) / SHARD_SIZE * SHARD_SIZE); return SVN_NO_ERROR; } #undef REPO_NAME -#undef MAX_REV #undef SHARD_SIZE +#undef MAX_REV /* ------------------------------------------------------------------------ */ - -#define REPO_NAME "revprop_caching_on_off" +#define REPO_NAME "test-repo-fsx-rev-container" +#define SHARD_SIZE 3 +#define MAX_REV 5 static svn_error_t * -revprop_caching_on_off(const svn_test_opts_t *opts, - apr_pool_t *pool) +test_reps(const svn_test_opts_t *opts, + apr_pool_t *pool) { - svn_fs_t *fs1; - svn_fs_t *fs2; - apr_hash_t *fs_config; - svn_string_t *value; - const svn_string_t *another_value_for_avoiding_warnings_from_a_broken_api; - const svn_string_t *new_value = svn_string_create("new", pool); + svn_fs_t *fs = NULL; + svn_fs_x__reps_builder_t *builder; + svn_fs_x__reps_t *container; + svn_stringbuf_t *serialized; + svn_stream_t *stream; + svn_stringbuf_t *contents = svn_stringbuf_create_ensure(10000, pool); + int i; - if (strcmp(opts->fs_type, "fsfs") != 0) - return svn_error_create(SVN_ERR_TEST_SKIPPED, NULL, NULL); + for (i = 0; i < 10000; ++i) + { + int v, s = 0; + for (v = i; v > 0; v /= 10) + s += v % 10; - /* Open two filesystem objects, enable revision property caching - * in one of them. */ - SVN_ERR(svn_test__create_fs(&fs1, REPO_NAME, opts, pool)); + svn_stringbuf_appendbyte(contents, (char)(s + ' ')); + } - fs_config = apr_hash_make(pool); - apr_hash_set(fs_config, SVN_FS_CONFIG_FSFS_CACHE_REVPROPS, - APR_HASH_KEY_STRING, "1"); + SVN_ERR(create_packed_filesystem(REPO_NAME, opts, MAX_REV, SHARD_SIZE, + pool)); - SVN_ERR(svn_fs_open(&fs2, svn_fs_path(fs1, pool), fs_config, pool)); + SVN_ERR(svn_fs_open2(&fs, REPO_NAME, NULL, pool, pool)); - /* With inefficient named atomics, the filesystem will output a warning - and disable the revprop caching, but we still would like to test - these cases. Ignore the warning(s). */ - svn_fs_set_warning_func(fs2, ignore_fs_warnings, NULL); + builder = svn_fs_x__reps_builder_create(fs, pool); + for (i = 10000; i > 10; --i) + { + apr_size_t idx; + svn_string_t string; + string.data = contents->data; + string.len = i; - SVN_ERR(svn_fs_revision_prop(&value, fs2, 0, "svn:date", pool)); - another_value_for_avoiding_warnings_from_a_broken_api = value; - SVN_ERR(svn_fs_change_rev_prop2( - fs1, 0, "svn:date", - &another_value_for_avoiding_warnings_from_a_broken_api, - new_value, pool)); + SVN_ERR(svn_fs_x__reps_add(&idx, builder, &string)); + } - /* Expect the change to be visible through both objects.*/ - SVN_ERR(svn_fs_revision_prop(&value, fs1, 0, "svn:date", pool)); - SVN_TEST_STRING_ASSERT(value->data, "new"); + serialized = svn_stringbuf_create_empty(pool); + stream = svn_stream_from_stringbuf(serialized, pool); + SVN_ERR(svn_fs_x__write_reps_container(stream, builder, pool)); - SVN_ERR(svn_fs_revision_prop(&value, fs2, 0, "svn:date", pool)); - SVN_TEST_STRING_ASSERT(value->data, "new"); + SVN_ERR(svn_stream_reset(stream)); + SVN_ERR(svn_fs_x__read_reps_container(&container, stream, pool, pool)); + SVN_ERR(svn_stream_close(stream)); return SVN_NO_ERROR; } #undef REPO_NAME +#undef SHARD_SIZE +#undef MAX_REV /* ------------------------------------------------------------------------ */ +#define REPO_NAME "test-repo-fsx-pack-shard-size-one" +#define SHARD_SIZE 1 +#define MAX_REV 4 +static svn_error_t * +pack_shard_size_one(const svn_test_opts_t *opts, + apr_pool_t *pool) +{ + svn_string_t *propval; + svn_fs_t *fs; + + SVN_ERR(create_packed_filesystem(REPO_NAME, opts, MAX_REV, SHARD_SIZE, + pool)); + SVN_ERR(svn_fs_open2(&fs, REPO_NAME, NULL, pool, pool)); + /* whitebox: revprop packing special-cases r0, which causes + (start_rev==1, end_rev==0) in pack_revprops_shard(). So test that. */ + SVN_ERR(svn_fs_revision_prop(&propval, fs, 1, SVN_PROP_REVISION_LOG, pool)); + SVN_TEST_STRING_ASSERT(propval->data, R1_LOG_MSG); + + return SVN_NO_ERROR; +} +#undef REPO_NAME +#undef SHARD_SIZE +#undef MAX_REV +/* ------------------------------------------------------------------------ */ /* The test table. */ -struct svn_test_descriptor_t test_funcs[] = +static int max_threads = 4; + +static struct svn_test_descriptor_t test_funcs[] = { SVN_TEST_NULL, SVN_TEST_OPTS_PASS(pack_filesystem, - "pack a FSFS filesystem"), + "pack a FSX filesystem"), SVN_TEST_OPTS_PASS(pack_even_filesystem, - "pack FSFS where revs % shard = 0"), + "pack FSX where revs % shard = 0"), SVN_TEST_OPTS_PASS(read_packed_fs, - "read from a packed FSFS filesystem"), + "read from a packed FSX filesystem"), SVN_TEST_OPTS_PASS(commit_packed_fs, - "commit to a packed FSFS filesystem"), + "commit to a packed FSX filesystem"), SVN_TEST_OPTS_PASS(get_set_revprop_packed_fs, - "get/set revprop while packing FSFS filesystem"), + "get/set revprop while packing FSX filesystem"), SVN_TEST_OPTS_PASS(get_set_large_revprop_packed_fs, - "get/set large packed revprops in FSFS"), + "get/set large packed revprops in FSX"), SVN_TEST_OPTS_PASS(get_set_huge_revprop_packed_fs, - "get/set huge packed revprops in FSFS"), + "get/set huge packed revprops in FSX"), SVN_TEST_OPTS_PASS(recover_fully_packed, "recover a fully packed filesystem"), SVN_TEST_OPTS_PASS(file_hint_at_shard_boundary, "test file hint at shard boundary"), - SVN_TEST_OPTS_PASS(get_set_multiple_huge_revprops_packed_fs, - "set multiple huge revprops in packed FSFS"), - SVN_TEST_OPTS_PASS(revprop_caching_on_off, - "change revprops with enabled and disabled caching"), + SVN_TEST_OPTS_PASS(test_info, + "test svn_fs_info"), + SVN_TEST_OPTS_PASS(test_reps, + "test representations container"), + SVN_TEST_OPTS_PASS(pack_shard_size_one, + "test packing with shard size = 1"), SVN_TEST_NULL }; + +SVN_TEST_MAIN diff --git a/subversion/tests/libsvn_fs_x/string-table-test.c b/subversion/tests/libsvn_fs_x/string-table-test.c new file mode 100644 index 0000000..2633bfa --- /dev/null +++ b/subversion/tests/libsvn_fs_x/string-table-test.c @@ -0,0 +1,318 @@ +/* string-table-test.c --- tests for string tables + * + * ==================================================================== + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + * ==================================================================== + */ + +#include "../svn_test.h" +#include "../../libsvn_fs_x/string_table.h" +#include "svn_pools.h" +#include "svn_sorts.h" + +/* Some tests use this list of strings as is. They are all "short strings" + * in the terminology of string tables. We use them also as an input to + * generate strings of arbitrary length. + */ +enum { STRING_COUNT = 12 }; +static const char *basic_strings[STRING_COUNT] = + { + "some string", + "this is another string", + "this is a duplicate", + "some longer string", + "this is a very long string", + "and here is another", + "this is a duplicate", + "/some/path/to/a/dir", + "/some/path/to/a/file", + "/some/other/dir", + "/some/other/file", + "" + }; + +/* Generate a string of exactly LEN chars (plus terminating NUL). KEY is + * an arbitrary integer that will be transformed into a character sequence + * using entries of BASIC_STRINGS. The result will be allocated in POOL. + */ +static svn_stringbuf_t * +generate_string(apr_uint64_t key, apr_size_t len, apr_pool_t *pool) +{ + svn_stringbuf_t *result = svn_stringbuf_create_ensure(len, pool); + apr_uint64_t temp = key; + apr_uint64_t run = 0; + + while (len) + { + apr_size_t idx; + apr_size_t add_len; + + if (temp == 0) + { + temp = key; + run++; + } + + idx = (temp + run) % STRING_COUNT; + temp /= STRING_COUNT; + + add_len = strlen(basic_strings[idx]); + add_len = MIN(len, add_len); + + svn_stringbuf_appendbytes(result, basic_strings[idx], add_len); + len -= add_len; + } + + return result; +} + +static svn_error_t * +store_and_load_table(string_table_t **table, apr_pool_t *pool) +{ + svn_stringbuf_t *stream_buffer = svn_stringbuf_create_empty(pool); + svn_stream_t *stream; + + stream = svn_stream_from_stringbuf(stream_buffer, pool); + SVN_ERR(svn_fs_x__write_string_table(stream, *table, pool)); + SVN_ERR(svn_stream_close(stream)); + + *table = NULL; + + stream = svn_stream_from_stringbuf(stream_buffer, pool); + SVN_ERR(svn_fs_x__read_string_table(table, stream, pool, pool)); + SVN_ERR(svn_stream_close(stream)); + + return SVN_NO_ERROR; +} + +static svn_error_t * +create_empty_table_body(svn_boolean_t do_load_store, + apr_pool_t *pool) +{ + string_table_builder_t *builder + = svn_fs_x__string_table_builder_create(pool); + string_table_t *table + = svn_fs_x__string_table_create(builder, pool); + + SVN_TEST_STRING_ASSERT(svn_fs_x__string_table_get(table, 0, NULL, pool), ""); + + if (do_load_store) + SVN_ERR(store_and_load_table(&table, pool)); + + SVN_TEST_STRING_ASSERT(svn_fs_x__string_table_get(table, 0, NULL, pool), ""); + + return SVN_NO_ERROR; +} + +static svn_error_t * +short_string_table_body(svn_boolean_t do_load_store, + apr_pool_t *pool) +{ + apr_size_t indexes[STRING_COUNT] = { 0 }; + + string_table_builder_t *builder; + string_table_t *table; + int i; + + builder = svn_fs_x__string_table_builder_create(pool); + for (i = 0; i < STRING_COUNT; ++i) + indexes[i] = svn_fs_x__string_table_builder_add(builder, basic_strings[i], 0); + + table = svn_fs_x__string_table_create(builder, pool); + if (do_load_store) + SVN_ERR(store_and_load_table(&table, pool)); + + SVN_TEST_ASSERT(indexes[2] == indexes[6]); + for (i = 0; i < STRING_COUNT; ++i) + { + apr_size_t len; + const char *string + = svn_fs_x__string_table_get(table, indexes[i], &len, pool); + + SVN_TEST_STRING_ASSERT(string, basic_strings[i]); + SVN_TEST_ASSERT(len == strlen(string)); + SVN_TEST_ASSERT(len == strlen(basic_strings[i])); + } + + SVN_TEST_STRING_ASSERT(svn_fs_x__string_table_get(table, STRING_COUNT, + NULL, pool), ""); + + return SVN_NO_ERROR; +} + +static svn_error_t * +large_string_table_body(svn_boolean_t do_load_store, + apr_pool_t *pool) +{ + enum { COUNT = 10 }; + + svn_stringbuf_t *strings[COUNT] = { 0 }; + apr_size_t indexes[COUNT] = { 0 }; + + string_table_builder_t *builder; + string_table_t *table; + int i; + + builder = svn_fs_x__string_table_builder_create(pool); + for (i = 0; i < COUNT; ++i) + { + strings[i] = generate_string(APR_UINT64_C(0x1234567876543210) * (i + 1), + 73000 + 1000 * i, pool); + indexes[i] = svn_fs_x__string_table_builder_add(builder, + strings[i]->data, + strings[i]->len); + } + + table = svn_fs_x__string_table_create(builder, pool); + if (do_load_store) + SVN_ERR(store_and_load_table(&table, pool)); + + for (i = 0; i < COUNT; ++i) + { + apr_size_t len; + const char *string + = svn_fs_x__string_table_get(table, indexes[i], &len, pool); + + SVN_TEST_STRING_ASSERT(string, strings[i]->data); + SVN_TEST_ASSERT(len == strlen(string)); + SVN_TEST_ASSERT(len == strings[i]->len); + } + + return SVN_NO_ERROR; +} + +static svn_error_t * +many_strings_table_body(svn_boolean_t do_load_store, + apr_pool_t *pool) +{ + /* cause multiple sub-tables (6 to be exact) to be created */ + enum { COUNT = 100 }; + + svn_stringbuf_t *strings[COUNT] = { 0 }; + apr_size_t indexes[COUNT] = { 0 }; + + string_table_builder_t *builder; + string_table_t *table; + int i; + + builder = svn_fs_x__string_table_builder_create(pool); + for (i = 0; i < COUNT; ++i) + { + strings[i] = generate_string(APR_UINT64_C(0x1234567876543210) * (i + 1), + (i * i) % 23000, pool); + indexes[i] = svn_fs_x__string_table_builder_add(builder, + strings[i]->data, + strings[i]->len); + } + + table = svn_fs_x__string_table_create(builder, pool); + if (do_load_store) + SVN_ERR(store_and_load_table(&table, pool)); + + for (i = 0; i < COUNT; ++i) + { + apr_size_t len; + const char *string + = svn_fs_x__string_table_get(table, indexes[i], &len, pool); + + SVN_TEST_STRING_ASSERT(string, strings[i]->data); + SVN_TEST_ASSERT(len == strlen(string)); + SVN_TEST_ASSERT(len == strings[i]->len); + } + + return SVN_NO_ERROR; +} + +static svn_error_t * +create_empty_table(apr_pool_t *pool) +{ + return svn_error_trace(create_empty_table_body(FALSE, pool)); +} + +static svn_error_t * +short_string_table(apr_pool_t *pool) +{ + return svn_error_trace(short_string_table_body(FALSE, pool)); +} + +static svn_error_t * +large_string_table(apr_pool_t *pool) +{ + return svn_error_trace(large_string_table_body(FALSE, pool)); +} + +static svn_error_t * +many_strings_table(apr_pool_t *pool) +{ + return svn_error_trace(many_strings_table_body(FALSE, pool)); +} + +static svn_error_t * +store_load_short_string_table(apr_pool_t *pool) +{ + return svn_error_trace(short_string_table_body(TRUE, pool)); +} + +static svn_error_t * +store_load_large_string_table(apr_pool_t *pool) +{ + return svn_error_trace(large_string_table_body(TRUE, pool)); +} + +static svn_error_t * +store_load_empty_table(apr_pool_t *pool) +{ + return svn_error_trace(create_empty_table_body(TRUE, pool)); +} + +static svn_error_t * +store_load_many_strings_table(apr_pool_t *pool) +{ + return svn_error_trace(many_strings_table_body(TRUE, pool)); +} + + +/* ------------------------------------------------------------------------ */ + +/* The test table. */ + +static int max_threads = 4; + +static struct svn_test_descriptor_t test_funcs[] = + { + SVN_TEST_NULL, + SVN_TEST_PASS2(create_empty_table, + "create an empty string table"), + SVN_TEST_PASS2(short_string_table, + "string table with short strings only"), + SVN_TEST_PASS2(large_string_table, + "string table with large strings only"), + SVN_TEST_PASS2(many_strings_table, + "string table with many strings"), + SVN_TEST_PASS2(store_load_empty_table, + "store and load an empty string table"), + SVN_TEST_PASS2(store_load_short_string_table, + "store and load table with short strings only"), + SVN_TEST_PASS2(store_load_large_string_table, + "store and load table with large strings only"), + SVN_TEST_PASS2(store_load_many_strings_table, + "store and load string table with many strings"), + SVN_TEST_NULL + }; + +SVN_TEST_MAIN diff --git a/subversion/tests/libsvn_ra/ra-test.c b/subversion/tests/libsvn_ra/ra-test.c index 584da3d..83ac59f 100644 --- a/subversion/tests/libsvn_ra/ra-test.c +++ b/subversion/tests/libsvn_ra/ra-test.c @@ -25,7 +25,8 @@ #include <apr_general.h> #include <apr_pools.h> - +#include <apr_file_io.h> +#include <assert.h> #define SVN_DEPRECATED #include "svn_error.h" @@ -33,6 +34,9 @@ #include "svn_ra.h" #include "svn_time.h" #include "svn_pools.h" +#include "svn_cmdline.h" +#include "svn_dirent_uri.h" +#include "svn_hash.h" #include "../svn_test.h" #include "../svn_test_fs.h" @@ -44,23 +48,22 @@ static svn_error_t * -make_and_open_local_repos(svn_ra_session_t **session, - const char *repos_name, - const svn_test_opts_t *opts, - apr_pool_t *pool) +make_and_open_repos(svn_ra_session_t **session, + const char *repos_name, + const svn_test_opts_t *opts, + apr_pool_t *pool) { - svn_repos_t *repos; const char *url; svn_ra_callbacks2_t *cbtable; SVN_ERR(svn_ra_create_callbacks(&cbtable, pool)); + SVN_ERR(svn_test__init_auth_baton(&cbtable->auth_baton, pool)); - SVN_ERR(svn_test__create_repos(&repos, repos_name, opts, pool)); + SVN_ERR(svn_test__create_repos2(NULL, &url, NULL, repos_name, opts, + pool, pool)); SVN_ERR(svn_ra_initialize(pool)); - SVN_ERR(svn_uri_get_file_url_from_dirent(&url, repos_name, pool)); - - SVN_ERR(svn_ra_open3(session, url, NULL, cbtable, NULL, NULL, pool)); + SVN_ERR(svn_ra_open4(session, NULL, url, NULL, cbtable, NULL, NULL, pool)); return SVN_NO_ERROR; } @@ -90,6 +93,174 @@ commit_changes(svn_ra_session_t *session, return SVN_NO_ERROR; } +static svn_error_t * +commit_tree(svn_ra_session_t *session, + apr_pool_t *pool) +{ + apr_hash_t *revprop_table = apr_hash_make(pool); + const svn_delta_editor_t *editor; + void *edit_baton; + const char *repos_root_url; + void *root_baton, *A_baton, *B_baton, *file_baton; + + SVN_ERR(svn_ra_get_commit_editor3(session, &editor, &edit_baton, + revprop_table, + NULL, NULL, NULL, TRUE, pool)); + SVN_ERR(svn_ra_get_repos_root(session, &repos_root_url, pool)); + + SVN_ERR(editor->open_root(edit_baton, SVN_INVALID_REVNUM, + pool, &root_baton)); + SVN_ERR(editor->add_directory("A", root_baton, NULL, SVN_INVALID_REVNUM, + pool, &A_baton)); + SVN_ERR(editor->add_directory("A/B", A_baton, NULL, SVN_INVALID_REVNUM, + pool, &B_baton)); + SVN_ERR(editor->add_file("A/B/f", B_baton, NULL, SVN_INVALID_REVNUM, + pool, &file_baton)); + SVN_ERR(editor->close_file(file_baton, NULL, pool)); + SVN_ERR(editor->add_file("A/B/g", B_baton, NULL, SVN_INVALID_REVNUM, + pool, &file_baton)); + SVN_ERR(editor->close_file(file_baton, NULL, pool)); + SVN_ERR(editor->close_directory(B_baton, pool)); + SVN_ERR(editor->add_directory("A/BB", A_baton, NULL, SVN_INVALID_REVNUM, + pool, &B_baton)); + SVN_ERR(editor->add_file("A/BB/f", B_baton, NULL, SVN_INVALID_REVNUM, + pool, &file_baton)); + SVN_ERR(editor->close_file(file_baton, NULL, pool)); + SVN_ERR(editor->add_file("A/BB/g", B_baton, NULL, SVN_INVALID_REVNUM, + pool, &file_baton)); + SVN_ERR(editor->close_file(file_baton, NULL, pool)); + SVN_ERR(editor->close_directory(B_baton, pool)); + SVN_ERR(editor->close_directory(A_baton, pool)); + SVN_ERR(editor->close_edit(edit_baton, pool)); + return SVN_NO_ERROR; +} + +/* Baton for opening tunnels */ +typedef struct tunnel_baton_t +{ + int magic; /* TUNNEL_MAGIC */ + int open_count; + svn_boolean_t last_check; +} tunnel_baton_t; + +#define TUNNEL_MAGIC 0xF00DF00F + +/* Baton for closing a specific tunnel */ +typedef struct close_baton_t +{ + int magic; + tunnel_baton_t *tb; + apr_proc_t *proc; +} close_baton_t; + +#define CLOSE_MAGIC 0x1BADBAD1 + +static svn_boolean_t +check_tunnel(void *tunnel_baton, const char *tunnel_name) +{ + tunnel_baton_t *b = tunnel_baton; + + if (b->magic != TUNNEL_MAGIC) + abort(); + + b->last_check = (0 == strcmp(tunnel_name, "test")); + return b->last_check; +} + +static void +close_tunnel(void *tunnel_context, void *tunnel_baton); + +static svn_error_t * +open_tunnel(svn_stream_t **request, svn_stream_t **response, + svn_ra_close_tunnel_func_t *close_func, void **close_baton, + void *tunnel_baton, + const char *tunnel_name, const char *user, + const char *hostname, int port, + svn_cancel_func_t cancel_func, void *cancel_baton, + apr_pool_t *pool) +{ + svn_node_kind_t kind; + apr_proc_t *proc; + apr_procattr_t *attr; + apr_status_t status; + const char *args[] = { "svnserve", "-t", "-r", ".", NULL }; + const char *svnserve; + tunnel_baton_t *b = tunnel_baton; + close_baton_t *cb; + + SVN_TEST_ASSERT(b->magic == TUNNEL_MAGIC); + + SVN_ERR(svn_dirent_get_absolute(&svnserve, "../../svnserve/svnserve", pool)); +#ifdef WIN32 + svnserve = apr_pstrcat(pool, svnserve, ".exe", SVN_VA_NULL); +#endif + SVN_ERR(svn_io_check_path(svnserve, &kind, pool)); + if (kind != svn_node_file) + return svn_error_createf(SVN_ERR_TEST_FAILED, NULL, + "Could not find svnserve at %s", + svn_dirent_local_style(svnserve, pool)); + + status = apr_procattr_create(&attr, pool); + if (status == APR_SUCCESS) + status = apr_procattr_io_set(attr, 1, 1, 0); + if (status == APR_SUCCESS) + status = apr_procattr_cmdtype_set(attr, APR_PROGRAM); + proc = apr_palloc(pool, sizeof(*proc)); + if (status == APR_SUCCESS) + status = apr_proc_create(proc, + svn_dirent_local_style(svnserve, pool), + args, NULL, attr, pool); + if (status != APR_SUCCESS) + return svn_error_wrap_apr(status, "Could not run svnserve"); + apr_pool_note_subprocess(pool, proc, APR_KILL_NEVER); + + /* APR pipe objects inherit by default. But we don't want the + * tunnel agent's pipes held open by future child processes + * (such as other ra_svn sessions), so turn that off. */ + apr_file_inherit_unset(proc->in); + apr_file_inherit_unset(proc->out); + + cb = apr_pcalloc(pool, sizeof(*cb)); + cb->magic = CLOSE_MAGIC; + cb->tb = b; + cb->proc = proc; + + *request = svn_stream_from_aprfile2(proc->in, FALSE, pool); + *response = svn_stream_from_aprfile2(proc->out, FALSE, pool); + *close_func = close_tunnel; + *close_baton = cb; + ++b->open_count; + return SVN_NO_ERROR; +} + +static void +close_tunnel(void *tunnel_context, void *tunnel_baton) +{ + close_baton_t *b = tunnel_context; + + if (b->magic != CLOSE_MAGIC) + abort(); + if (--b->tb->open_count == 0) + { + apr_status_t child_exit_status; + int child_exit_code; + apr_exit_why_e child_exit_why; + + SVN_TEST_ASSERT_NO_RETURN(0 == apr_file_close(b->proc->in)); + SVN_TEST_ASSERT_NO_RETURN(0 == apr_file_close(b->proc->out)); + + child_exit_status = + apr_proc_wait(b->proc, &child_exit_code, &child_exit_why, APR_WAIT); + + SVN_TEST_ASSERT_NO_RETURN(child_exit_status == APR_CHILD_DONE); + SVN_TEST_ASSERT_NO_RETURN(child_exit_code == 0); + SVN_TEST_ASSERT_NO_RETURN(child_exit_why == APR_PROC_EXIT); + } +} + + + + /*-------------------------------------------------------------------*/ /** The tests **/ @@ -130,9 +301,9 @@ location_segments_test(const svn_test_opts_t *opts, b.segments = segments; b.pool = pool; - SVN_ERR(make_and_open_local_repos(&session, - "test-repo-locsegs", opts, - pool)); + SVN_ERR(make_and_open_repos(&session, + "test-repo-locsegs", opts, + pool)); /* ### This currently tests only a small subset of what's possible. */ SVN_ERR(commit_changes(session, pool)); @@ -153,6 +324,294 @@ location_segments_test(const svn_test_opts_t *opts, } +/* Test ra_svn tunnel callbacks. */ + +static svn_error_t * +check_tunnel_callback_test(const svn_test_opts_t *opts, + apr_pool_t *pool) +{ + tunnel_baton_t *b = apr_pcalloc(pool, sizeof(*b)); + svn_ra_callbacks2_t *cbtable; + svn_ra_session_t *session; + + b->magic = TUNNEL_MAGIC; + + SVN_ERR(svn_ra_create_callbacks(&cbtable, pool)); + cbtable->check_tunnel_func = check_tunnel; + cbtable->open_tunnel_func = open_tunnel; + cbtable->tunnel_baton = b; + SVN_ERR(svn_cmdline_create_auth_baton(&cbtable->auth_baton, + TRUE /* non_interactive */, + "jrandom", "rayjandom", + NULL, + TRUE /* no_auth_cache */, + FALSE /* trust_server_cert */, + NULL, NULL, NULL, pool)); + + b->last_check = TRUE; + SVN_TEST_ASSERT_ERROR(svn_ra_open4(&session, NULL, + "svn+foo://localhost/no-repo", + NULL, cbtable, NULL, NULL, pool), + SVN_ERR_RA_CANNOT_CREATE_SESSION); + SVN_TEST_ASSERT(!b->last_check); + return SVN_NO_ERROR; +} + +static svn_error_t * +tunnel_callback_test(const svn_test_opts_t *opts, + apr_pool_t *pool) +{ + tunnel_baton_t *b = apr_pcalloc(pool, sizeof(*b)); + apr_pool_t *scratch_pool = svn_pool_create(pool); + const char *url; + svn_ra_callbacks2_t *cbtable; + svn_ra_session_t *session; + const char tunnel_repos_name[] = "test-repo-tunnel"; + + b->magic = TUNNEL_MAGIC; + + SVN_ERR(svn_test__create_repos(NULL, tunnel_repos_name, opts, scratch_pool)); + + /* Immediately close the repository to avoid race condition with svnserve + (and then the cleanup code) with BDB when our pool is cleared. */ + svn_pool_clear(scratch_pool); + + url = apr_pstrcat(pool, "svn+test://localhost/", tunnel_repos_name, + SVN_VA_NULL); + SVN_ERR(svn_ra_create_callbacks(&cbtable, pool)); + cbtable->check_tunnel_func = check_tunnel; + cbtable->open_tunnel_func = open_tunnel; + cbtable->tunnel_baton = b; + SVN_ERR(svn_cmdline_create_auth_baton(&cbtable->auth_baton, + TRUE /* non_interactive */, + "jrandom", "rayjandom", + NULL, + TRUE /* no_auth_cache */, + FALSE /* trust_server_cert */, + NULL, NULL, NULL, pool)); + + b->last_check = FALSE; + SVN_ERR(svn_ra_open4(&session, NULL, url, NULL, cbtable, NULL, NULL, + scratch_pool)); + SVN_TEST_ASSERT(b->last_check); + SVN_TEST_ASSERT(b->open_count > 0); + svn_pool_destroy(scratch_pool); + SVN_TEST_ASSERT(b->open_count == 0); + return SVN_NO_ERROR; +} + +struct lock_result_t { + svn_lock_t *lock; + svn_error_t *err; +}; + +struct lock_baton_t { + apr_hash_t *results; + apr_pool_t *pool; +}; + +/* Implements svn_ra_lock_callback_t. */ +static svn_error_t * +lock_cb(void *baton, + const char *path, + svn_boolean_t do_lock, + const svn_lock_t *lock, + svn_error_t *ra_err, + apr_pool_t *pool) +{ + struct lock_baton_t *b = baton; + struct lock_result_t *result = apr_palloc(b->pool, + sizeof(struct lock_result_t)); + + if (lock) + { + result->lock = apr_palloc(b->pool, sizeof(svn_lock_t)); + *result->lock = *lock; + result->lock->path = apr_pstrdup(b->pool, lock->path); + result->lock->token = apr_pstrdup(b->pool, lock->token); + result->lock->owner = apr_pstrdup(b->pool, lock->owner); + result->lock->comment = apr_pstrdup(b->pool, lock->comment); + } + else + result->lock = NULL; + result->err = ra_err; + + svn_hash_sets(b->results, apr_pstrdup(b->pool, path), result); + + return SVN_NO_ERROR; +} + +static svn_error_t * +expect_lock(const char *path, + apr_hash_t *results, + svn_ra_session_t *session, + apr_pool_t *scratch_pool) +{ + svn_lock_t *lock; + struct lock_result_t *result = svn_hash_gets(results, path); + + SVN_TEST_ASSERT(result && result->lock && !result->err); + SVN_ERR(svn_ra_get_lock(session, &lock, path, scratch_pool)); + SVN_TEST_ASSERT(lock); + return SVN_NO_ERROR; +} + +static svn_error_t * +expect_error(const char *path, + apr_hash_t *results, + svn_ra_session_t *session, + apr_pool_t *scratch_pool) +{ + svn_lock_t *lock; + struct lock_result_t *result = svn_hash_gets(results, path); + + SVN_TEST_ASSERT(result && result->err); + SVN_TEST_ASSERT(!result->lock); + /* RA layers shouldn't report SVN_ERR_FS_NOT_FOUND */ + SVN_ERR(svn_ra_get_lock(session, &lock, path, scratch_pool)); + + SVN_TEST_ASSERT(!lock); + return SVN_NO_ERROR; +} + +static svn_error_t * +expect_unlock(const char *path, + apr_hash_t *results, + svn_ra_session_t *session, + apr_pool_t *scratch_pool) +{ + svn_lock_t *lock; + struct lock_result_t *result = svn_hash_gets(results, path); + + SVN_TEST_ASSERT(result && !result->err); + SVN_ERR(svn_ra_get_lock(session, &lock, path, scratch_pool)); + SVN_TEST_ASSERT(!lock); + return SVN_NO_ERROR; +} + +static svn_error_t * +expect_unlock_error(const char *path, + apr_hash_t *results, + svn_ra_session_t *session, + apr_pool_t *scratch_pool) +{ + svn_lock_t *lock; + struct lock_result_t *result = svn_hash_gets(results, path); + + SVN_TEST_ASSERT(result && result->err); + SVN_ERR(svn_ra_get_lock(session, &lock, path, scratch_pool)); + SVN_TEST_ASSERT(lock); + return SVN_NO_ERROR; +} + +/* Test svn_ra_lock(). */ +static svn_error_t * +lock_test(const svn_test_opts_t *opts, + apr_pool_t *pool) +{ + svn_ra_session_t *session; + apr_hash_t *lock_targets = apr_hash_make(pool); + apr_hash_t *unlock_targets = apr_hash_make(pool); + svn_revnum_t rev = 1; + struct lock_result_t *result; + struct lock_baton_t baton; + apr_hash_index_t *hi; + + SVN_ERR(make_and_open_repos(&session, "test-repo-lock", opts, pool)); + SVN_ERR(commit_tree(session, pool)); + + baton.results = apr_hash_make(pool); + baton.pool = pool; + + svn_hash_sets(lock_targets, "A/B/f", &rev); + svn_hash_sets(lock_targets, "A/B/g", &rev); + svn_hash_sets(lock_targets, "A/B/z", &rev); + svn_hash_sets(lock_targets, "A/BB/f", &rev); + svn_hash_sets(lock_targets, "X/z", &rev); + + /* Lock some paths. */ + SVN_ERR(svn_ra_lock(session, lock_targets, "foo", FALSE, lock_cb, &baton, + pool)); + + SVN_ERR(expect_lock("A/B/f", baton.results, session, pool)); + SVN_ERR(expect_lock("A/B/g", baton.results, session, pool)); + SVN_ERR(expect_error("A/B/z", baton.results, session, pool)); + SVN_ERR(expect_lock("A/BB/f", baton.results, session, pool)); + SVN_ERR(expect_error("X/z", baton.results, session, pool)); + + /* Unlock without force and wrong lock tokens */ + for (hi = apr_hash_first(pool, lock_targets); hi; hi = apr_hash_next(hi)) + svn_hash_sets(unlock_targets, apr_hash_this_key(hi), "wrong-token"); + apr_hash_clear(baton.results); + SVN_ERR(svn_ra_unlock(session, unlock_targets, FALSE, lock_cb, &baton, pool)); + + SVN_ERR(expect_unlock_error("A/B/f", baton.results, session, pool)); + SVN_ERR(expect_unlock_error("A/B/g", baton.results, session, pool)); + SVN_ERR(expect_error("A/B/z", baton.results, session, pool)); + SVN_ERR(expect_unlock_error("A/BB/f", baton.results, session, pool)); + SVN_ERR(expect_error("X/z", baton.results, session, pool)); + + /* Force unlock */ + for (hi = apr_hash_first(pool, lock_targets); hi; hi = apr_hash_next(hi)) + svn_hash_sets(unlock_targets, apr_hash_this_key(hi), ""); + apr_hash_clear(baton.results); + SVN_ERR(svn_ra_unlock(session, unlock_targets, TRUE, lock_cb, &baton, pool)); + + SVN_ERR(expect_unlock("A/B/f", baton.results, session, pool)); + SVN_ERR(expect_unlock("A/B/g", baton.results, session, pool)); + SVN_ERR(expect_error("A/B/z", baton.results, session, pool)); + SVN_ERR(expect_unlock("A/BB/f", baton.results, session, pool)); + SVN_ERR(expect_error("X/z", baton.results, session, pool)); + + /* Lock again. */ + apr_hash_clear(baton.results); + SVN_ERR(svn_ra_lock(session, lock_targets, "foo", FALSE, lock_cb, &baton, + pool)); + + SVN_ERR(expect_lock("A/B/f", baton.results, session, pool)); + SVN_ERR(expect_lock("A/B/g", baton.results, session, pool)); + SVN_ERR(expect_error("A/B/z", baton.results, session, pool)); + SVN_ERR(expect_lock("A/BB/f", baton.results, session, pool)); + SVN_ERR(expect_error("X/z", baton.results, session, pool)); + + for (hi = apr_hash_first(pool, baton.results); hi; hi = apr_hash_next(hi)) + { + result = apr_hash_this_val(hi); + svn_hash_sets(unlock_targets, apr_hash_this_key(hi), + result->lock ? result->lock->token : "non-existent-token"); + } + apr_hash_clear(baton.results); + SVN_ERR(svn_ra_unlock(session, unlock_targets, FALSE, lock_cb, &baton, pool)); + + SVN_ERR(expect_unlock("A/B/f", baton.results, session, pool)); + SVN_ERR(expect_unlock("A/B/g", baton.results, session, pool)); + SVN_ERR(expect_error("A/B/z", baton.results, session, pool)); + SVN_ERR(expect_unlock("A/BB/f", baton.results, session, pool)); + SVN_ERR(expect_error("X/z", baton.results, session, pool)); + + return SVN_NO_ERROR; +} + +/* Test svn_ra_get_dir2(). */ +static svn_error_t * +get_dir_test(const svn_test_opts_t *opts, + apr_pool_t *pool) +{ + svn_ra_session_t *session; + apr_hash_t *dirents; + + SVN_ERR(make_and_open_repos(&session, "test-get-dir", opts, pool)); + SVN_ERR(commit_tree(session, pool)); + + /* This call used to block on ra-svn for 1.8.0...r1656713 */ + SVN_TEST_ASSERT_ERROR(svn_ra_get_dir2(session, &dirents, NULL, NULL, + "non/existing/relpath", 1, + SVN_DIRENT_KIND, pool), + SVN_ERR_FS_NOT_FOUND); + + return SVN_NO_ERROR; +} + /* Implements svn_commit_callback2_t for commit_callback_failure() */ static svn_error_t * commit_callback_with_failure(const svn_commit_info_t *info, @@ -162,11 +621,11 @@ commit_callback_with_failure(const svn_commit_info_t *info, apr_time_t timetemp; SVN_TEST_ASSERT(info != NULL); - SVN_TEST_STRING_ASSERT(info->author, ""); /* No auth baton supplied. */ + SVN_TEST_STRING_ASSERT(info->author, "jrandom"); SVN_TEST_STRING_ASSERT(info->post_commit_err, NULL); SVN_ERR(svn_time_from_cstring(&timetemp, info->date, scratch_pool)); - SVN_TEST_ASSERT(info->date != 0); + SVN_TEST_ASSERT(timetemp != 0); SVN_TEST_ASSERT(info->repos_root != NULL); SVN_TEST_ASSERT(info->revision == 1); @@ -181,13 +640,13 @@ commit_callback_failure(const svn_test_opts_t *opts, const svn_delta_editor_t *editor; void *edit_baton; void *root_baton; - SVN_ERR(make_and_open_local_repos(&ra_session, "commit_cb_failure", opts, pool)); + SVN_ERR(make_and_open_repos(&ra_session, "commit_cb_failure", opts, pool)); SVN_ERR(svn_ra_get_commit_editor3(ra_session, &editor, &edit_baton, apr_hash_make(pool), commit_callback_with_failure, NULL, NULL, FALSE, pool)); - SVN_ERR(editor->open_root(edit_baton, 1, pool, &root_baton)); + SVN_ERR(editor->open_root(edit_baton, 0, pool, &root_baton)); SVN_ERR(editor->change_dir_prop(root_baton, "A", svn_string_create("B", pool), pool)); SVN_ERR(editor->close_directory(root_baton, pool)); @@ -200,14 +659,215 @@ commit_callback_failure(const svn_test_opts_t *opts, return SVN_NO_ERROR; } +static svn_error_t * +base_revision_above_youngest(const svn_test_opts_t *opts, + apr_pool_t *pool) +{ + svn_ra_session_t *ra_session; + const svn_delta_editor_t *editor; + void *edit_baton; + void *root_baton; + svn_error_t *err; + SVN_ERR(make_and_open_repos(&ra_session, "base_revision_above_youngest", + opts, pool)); + + SVN_ERR(svn_ra_get_commit_editor3(ra_session, &editor, &edit_baton, + apr_hash_make(pool), NULL, + NULL, NULL, FALSE, pool)); + + /* r1 doesn't exist, but we say we want to apply changes against this + revision to see how the ra layers behave. + + Some will see an error directly on open_root, others in a later + state. */ + + /* ra-local and http pre-v2 will see the error here */ + err = editor->open_root(edit_baton, 1, pool, &root_baton); + + if (!err) + err = editor->change_dir_prop(root_baton, "A", + svn_string_create("B", pool), pool); + + /* http v2 will notice it here (PROPPATCH) */ + if (!err) + err = editor->close_directory(root_baton, pool); + + /* ra svn only notes it at some later point. Typically here */ + if (!err) + err = editor->close_edit(edit_baton, pool); + + SVN_TEST_ASSERT_ERROR(err, + SVN_ERR_FS_NO_SUCH_REVISION); + + SVN_ERR(editor->abort_edit(edit_baton, pool)); + return SVN_NO_ERROR; +} + + +static svn_error_t * +ra_list_has_props(const svn_test_opts_t *opts, + apr_pool_t *pool) +{ + svn_ra_session_t *ra_session; + const svn_delta_editor_t *editor; + apr_pool_t *iterpool = svn_pool_create(pool); + int i; + void *edit_baton; + const char *trunk_url; + + SVN_ERR(make_and_open_repos(&ra_session, "ra_list_has_props", + opts, pool)); + + SVN_ERR(svn_ra_get_commit_editor3(ra_session, &editor, &edit_baton, + apr_hash_make(pool), NULL, + NULL, NULL, FALSE, iterpool)); + + /* Create initial layout*/ + { + void *root_baton; + void *dir_baton; + + SVN_ERR(editor->open_root(edit_baton, 0, pool, &root_baton)); + SVN_ERR(editor->add_directory("trunk", root_baton, NULL, SVN_INVALID_REVNUM, + iterpool, &dir_baton)); + SVN_ERR(editor->close_directory(dir_baton, iterpool)); + SVN_ERR(editor->add_directory("tags", root_baton, NULL, SVN_INVALID_REVNUM, + iterpool, &dir_baton)); + SVN_ERR(editor->close_directory(dir_baton, iterpool)); + SVN_ERR(editor->close_directory(root_baton, iterpool)); + SVN_ERR(editor->close_edit(edit_baton, iterpool)); + } + + SVN_ERR(svn_ra_get_repos_root2(ra_session, &trunk_url, pool)); + trunk_url = svn_path_url_add_component2(trunk_url, "trunk", pool); + + /* Create a few tags. Using a value like 8000 will take too long for a normal + testrun, but produces more realistic problems */ + for (i = 0; i < 50; i++) + { + void *root_baton; + void *tags_baton; + void *dir_baton; + + svn_pool_clear(iterpool); + + SVN_ERR(svn_ra_get_commit_editor3(ra_session, &editor, &edit_baton, + apr_hash_make(pool), NULL, + NULL, NULL, FALSE, iterpool)); + + SVN_ERR(editor->open_root(edit_baton, i+1, pool, &root_baton)); + SVN_ERR(editor->open_directory("tags", root_baton, i+1, iterpool, + &tags_baton)); + SVN_ERR(editor->add_directory(apr_psprintf(iterpool, "tags/T%05d", i+1), + tags_baton, trunk_url, 1, iterpool, + &dir_baton)); + + SVN_ERR(editor->close_directory(dir_baton, iterpool)); + SVN_ERR(editor->close_directory(tags_baton, iterpool)); + SVN_ERR(editor->close_directory(root_baton, iterpool)); + SVN_ERR(editor->close_edit(edit_baton, iterpool)); + } + + { + apr_hash_t *dirents; + svn_revnum_t fetched_rev; + apr_hash_t *props; + + SVN_ERR(svn_ra_get_dir2(ra_session, &dirents, &fetched_rev, &props, + "tags", SVN_INVALID_REVNUM, + SVN_DIRENT_ALL, pool)); + } + + return SVN_NO_ERROR; +} + +/* Test ra_svn tunnel editor handling, including polling. */ + +static svn_error_t * +tunnel_run_checkout(const svn_test_opts_t *opts, + apr_pool_t *pool) +{ + tunnel_baton_t *b = apr_pcalloc(pool, sizeof(*b)); + apr_pool_t *scratch_pool = svn_pool_create(pool); + const char *url; + svn_ra_callbacks2_t *cbtable; + svn_ra_session_t *session; + const char tunnel_repos_name[] = "test-run_checkout"; + const svn_ra_reporter3_t *reporter; + void *report_baton; + + b->magic = TUNNEL_MAGIC; + + SVN_ERR(svn_test__create_repos(NULL, tunnel_repos_name, opts, scratch_pool)); + + /* Immediately close the repository to avoid race condition with svnserve + (and then the cleanup code) with BDB when our pool is cleared. */ + svn_pool_clear(scratch_pool); + + url = apr_pstrcat(pool, "svn+test://localhost/", tunnel_repos_name, + SVN_VA_NULL); + SVN_ERR(svn_ra_create_callbacks(&cbtable, pool)); + cbtable->check_tunnel_func = check_tunnel; + cbtable->open_tunnel_func = open_tunnel; + cbtable->tunnel_baton = b; + SVN_ERR(svn_cmdline_create_auth_baton2(&cbtable->auth_baton, + TRUE /* non_interactive */, + "jrandom", "rayjandom", + NULL, + TRUE /* no_auth_cache */, + FALSE /* trust_server_cert */, + FALSE, FALSE, FALSE, FALSE, + NULL, NULL, NULL, pool)); + + b->last_check = FALSE; + + SVN_ERR(svn_ra_open4(&session, NULL, url, NULL, cbtable, NULL, NULL, + scratch_pool)); + + SVN_ERR(commit_changes(session, pool)); + + SVN_ERR(svn_ra_do_update3(session, + &reporter, &report_baton, + 1, "", + svn_depth_infinity, FALSE, FALSE, + svn_delta_default_editor(pool), NULL, + pool, pool)); + + SVN_ERR(reporter->set_path(report_baton, "", 0, svn_depth_infinity, FALSE, + NULL, pool)); + + SVN_ERR(reporter->finish_report(report_baton, pool)); + + return SVN_NO_ERROR; +} + /* The test table. */ -struct svn_test_descriptor_t test_funcs[] = + +static int max_threads = 2; + +static struct svn_test_descriptor_t test_funcs[] = { SVN_TEST_NULL, SVN_TEST_OPTS_PASS(location_segments_test, "test svn_ra_get_location_segments"), + SVN_TEST_OPTS_PASS(check_tunnel_callback_test, + "test ra_svn tunnel callback check"), + SVN_TEST_OPTS_PASS(tunnel_callback_test, + "test ra_svn tunnel creation callbacks"), + SVN_TEST_OPTS_PASS(lock_test, + "lock multiple paths"), + SVN_TEST_OPTS_PASS(get_dir_test, + "test ra_get_dir2"), SVN_TEST_OPTS_PASS(commit_callback_failure, "commit callback failure"), + SVN_TEST_OPTS_PASS(base_revision_above_youngest, + "base revision newer than youngest"), + SVN_TEST_OPTS_PASS(ra_list_has_props, + "check list has_props performance"), + SVN_TEST_OPTS_PASS(tunnel_run_checkout, + "verify checkout over a tunnel"), SVN_TEST_NULL }; + +SVN_TEST_MAIN diff --git a/subversion/tests/libsvn_ra_local/ra-local-test.c b/subversion/tests/libsvn_ra_local/ra-local-test.c index fc13ecc..98695bc 100644 --- a/subversion/tests/libsvn_ra_local/ra-local-test.c +++ b/subversion/tests/libsvn_ra_local/ra-local-test.c @@ -220,7 +220,7 @@ check_split_url(const char *repos_path, SVN_ERR(svn_uri_get_file_url_from_dirent(&root_url, repos_path, pool)); if (in_repos_path) - url = apr_pstrcat(pool, root_url, in_repos_path, (char *)NULL); + url = apr_pstrcat(pool, root_url, in_repos_path, SVN_VA_NULL); else url = root_url; @@ -279,7 +279,9 @@ split_url_test(const svn_test_opts_t *opts, #define HAS_UNC_HOST 0 #endif -struct svn_test_descriptor_t test_funcs[] = +static int max_threads = 1; + +static struct svn_test_descriptor_t test_funcs[] = { SVN_TEST_NULL, SVN_TEST_OPTS_PASS(open_ra_session, @@ -296,3 +298,5 @@ struct svn_test_descriptor_t test_funcs[] = "test svn_ra_local__split_URL correctness"), SVN_TEST_NULL }; + +SVN_TEST_MAIN diff --git a/subversion/tests/libsvn_repos/dir-delta-editor.h b/subversion/tests/libsvn_repos/dir-delta-editor.h index 5e7e280..5c498f8 100644 --- a/subversion/tests/libsvn_repos/dir-delta-editor.h +++ b/subversion/tests/libsvn_repos/dir-delta-editor.h @@ -40,7 +40,18 @@ extern "C" { #endif /* __cplusplus */ - + +/* Get an editor that will edit an FS transaction via the libsvn_fs API. + * + * Set *EDITOR and *EDIT_BATON to a new editor that edits the subtree at + * PATH of the existing, open transaction TXN_ROOT in filesystem FS. + * + * Note: Related but more complex functions in the regular API include + * svn_fs__editor_create_for() and svn_repos_get_commit_editor*(). + * + * Note: The only connection with dir-deltas is that a test for dir-deltas + * was the first user of this editor. + */ svn_error_t * dir_delta_get_editor(const svn_delta_editor_t **editor, void **edit_baton, diff --git a/subversion/tests/libsvn_repos/dump-load-test.c b/subversion/tests/libsvn_repos/dump-load-test.c new file mode 100644 index 0000000..3b6118d --- /dev/null +++ b/subversion/tests/libsvn_repos/dump-load-test.c @@ -0,0 +1,289 @@ +/* dump-load-test.c --- tests for dumping and loading repositories + * + * ==================================================================== + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + * ==================================================================== + */ + +#include <stdlib.h> +#include <string.h> +#include <apr_pools.h> + +#include "svn_pools.h" +#include "svn_error.h" +#include "svn_fs.h" +#include "svn_repos.h" +#include "private/svn_repos_private.h" + +#include "../svn_test.h" +#include "../svn_test_fs.h" + + + +/* Test dumping in the presence of the property PROP_NAME:PROP_VAL. + * Return the dumped data in *DUMP_DATA_P (if DUMP_DATA_P is not null). + * REPOS is an empty repository. + * See svn_repos_dump_fs3() for START_REV, END_REV, NOTIFY_FUNC, NOTIFY_BATON. + */ +static svn_error_t * +test_dump_bad_props(svn_stringbuf_t **dump_data_p, + svn_repos_t *repos, + const char *prop_name, + const svn_string_t *prop_val, + svn_revnum_t start_rev, + svn_revnum_t end_rev, + svn_repos_notify_func_t notify_func, + void *notify_baton, + apr_pool_t *pool) +{ + const char *test_path = "/bar"; + svn_fs_t *fs = svn_repos_fs(repos); + svn_fs_txn_t *txn; + svn_fs_root_t *txn_root; + svn_revnum_t youngest_rev = 0; + svn_stringbuf_t *dump_data = svn_stringbuf_create_empty(pool); + svn_stream_t *stream = svn_stream_from_stringbuf(dump_data, pool); + const char *expected_str; + + /* Revision 1: Any commit will do, here */ + SVN_ERR(svn_fs_begin_txn2(&txn, fs, youngest_rev, 0, pool)); + SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool)); + SVN_ERR(svn_fs_make_dir(txn_root, test_path , pool)); + SVN_ERR(svn_repos_fs_commit_txn(NULL, repos, &youngest_rev, txn, pool)); + SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(youngest_rev)); + + /* Revision 2: Add the bad property */ + SVN_ERR(svn_fs_begin_txn2(&txn, fs, youngest_rev, 0, pool)); + SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool)); + SVN_ERR(svn_fs_change_node_prop(txn_root, test_path , prop_name, prop_val, + pool)); + SVN_ERR(svn_repos_fs_commit_txn(NULL, repos, &youngest_rev, txn, pool)); + SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(youngest_rev)); + + /* Test that a dump completes without error. */ + SVN_ERR(svn_repos_dump_fs3(repos, stream, start_rev, end_rev, + FALSE, FALSE, + notify_func, notify_baton, + NULL, NULL, + pool)); + svn_stream_close(stream); + + /* Check that the property appears in the dump data */ + expected_str = apr_psprintf(pool, "K %d\n%s\n" + "V %d\n%s\n" + "PROPS-END\n", + (int)strlen(prop_name), prop_name, + (int)prop_val->len, prop_val->data); + SVN_TEST_ASSERT(strstr(dump_data->data, expected_str)); + + if (dump_data_p) + *dump_data_p = dump_data; + return SVN_NO_ERROR; +} + +/* Test loading in the presence of the property PROP_NAME:PROP_VAL. + * Load data from DUMP_DATA. + * REPOS is an empty repository. + */ +static svn_error_t * +test_load_bad_props(svn_stringbuf_t *dump_data, + svn_repos_t *repos, + const char *prop_name, + const svn_string_t *prop_val, + const char *parent_fspath, + svn_boolean_t validate_props, + svn_repos_notify_func_t notify_func, + void *notify_baton, + apr_pool_t *pool) +{ + const char *test_path = apr_psprintf(pool, "%s%s", + parent_fspath ? parent_fspath : "", + "/bar"); + svn_stream_t *stream = svn_stream_from_stringbuf(dump_data, pool); + svn_fs_t *fs; + svn_fs_root_t *rev_root; + svn_revnum_t youngest_rev; + svn_string_t *loaded_prop_val; + + SVN_ERR(svn_repos_load_fs5(repos, stream, + SVN_INVALID_REVNUM, SVN_INVALID_REVNUM, + svn_repos_load_uuid_default, + parent_fspath, + FALSE, FALSE, /*use_*_commit_hook*/ + validate_props, + FALSE /*ignore_dates*/, + notify_func, notify_baton, + NULL, NULL, /*cancellation*/ + pool)); + svn_stream_close(stream); + + /* Check the loaded property */ + fs = svn_repos_fs(repos); + SVN_ERR(svn_fs_youngest_rev(&youngest_rev, fs, pool)); + SVN_ERR(svn_fs_revision_root(&rev_root, fs, youngest_rev, pool)); + SVN_ERR(svn_fs_node_prop(&loaded_prop_val, + rev_root, test_path, prop_name, pool)); + SVN_TEST_ASSERT(svn_string_compare(loaded_prop_val, prop_val)); + return SVN_NO_ERROR; +} + +/* Notification receiver for test_dump_r0_mergeinfo(). This does not + need to do anything, it just needs to exist. + */ +static void +dump_r0_mergeinfo_notifier(void *baton, + const svn_repos_notify_t *notify, + apr_pool_t *scratch_pool) +{ +} + +/* Regression test for the 'dump' part of issue #4476 "Mergeinfo + containing r0 makes svnsync and svnadmin dump fail". */ +static svn_error_t * +test_dump_r0_mergeinfo(const svn_test_opts_t *opts, + apr_pool_t *pool) +{ + const char *prop_name = "svn:mergeinfo"; + const svn_string_t *bad_mergeinfo = svn_string_create("/foo:0", pool); + svn_repos_t *repos; + + SVN_ERR(svn_test__create_repos(&repos, "test-repo-dump-r0-mergeinfo", + opts, pool)); + /* In order to exercise the + functionality under test -- that is, in order for the dump to try to + parse the mergeinfo it is dumping -- the dump must start from a + revision greater than 1 and must take a notification callback. */ + SVN_ERR(test_dump_bad_props(NULL, repos, + prop_name, bad_mergeinfo, + 2, SVN_INVALID_REVNUM, + dump_r0_mergeinfo_notifier, NULL, + pool)); + + return SVN_NO_ERROR; +} + +static void +load_r0_mergeinfo_notifier(void *baton, + const svn_repos_notify_t *notify, + apr_pool_t *scratch_pool) +{ + svn_boolean_t *had_mergeinfo_warning = baton; + + if (notify->action == svn_repos_notify_warning) + { + if (notify->warning == svn_repos_notify_warning_invalid_mergeinfo) + { + *had_mergeinfo_warning = TRUE; + } + } +} + +/* Regression test for the 'load' part of issue #4476 "Mergeinfo + * containing r0 makes svnsync and svnadmin dump fail". + * + * Bad mergeinfo should not prevent loading a backup, at least when we do not + * require mergeinfo revision numbers or paths to be adjusted during loading. + */ +static svn_error_t * +test_load_r0_mergeinfo(const svn_test_opts_t *opts, + apr_pool_t *pool) +{ + const char *prop_name = "svn:mergeinfo"; + const svn_string_t *prop_val = svn_string_create("/foo:0", pool); + svn_stringbuf_t *dump_data = svn_stringbuf_create_empty(pool); + + /* Produce a dump file containing bad mergeinfo */ + { + svn_repos_t *repos; + + SVN_ERR(svn_test__create_repos(&repos, "test-repo-load-r0-mi-1", + opts, pool)); + SVN_ERR(test_dump_bad_props(&dump_data, repos, + prop_name, prop_val, + SVN_INVALID_REVNUM, SVN_INVALID_REVNUM, + NULL, NULL, pool)); + } + + /* Test loading without validating properties: should warn and succeed */ + { + svn_repos_t *repos; + svn_boolean_t had_mergeinfo_warning = FALSE; + + SVN_ERR(svn_test__create_repos(&repos, "test-repo-load-r0-mi-2", + opts, pool)); + + /* Without changing revision numbers or paths */ + SVN_ERR(test_load_bad_props(dump_data, repos, + prop_name, prop_val, + NULL /*parent_dir*/, FALSE /*validate_props*/, + load_r0_mergeinfo_notifier, &had_mergeinfo_warning, + pool)); + SVN_TEST_ASSERT(had_mergeinfo_warning); + + /* With changing revision numbers and/or paths (by loading the same data + again, on top of existing revisions, into subdirectory 'bar') */ + had_mergeinfo_warning = FALSE; + SVN_ERR(test_load_bad_props(dump_data, repos, + prop_name, prop_val, + "/bar", FALSE /*validate_props*/, + load_r0_mergeinfo_notifier, &had_mergeinfo_warning, + pool)); + SVN_TEST_ASSERT(had_mergeinfo_warning); + } + + /* Test loading with validating properties: should return an error */ + { + svn_repos_t *repos; + + SVN_ERR(svn_test__create_repos(&repos, "test-repo-load-r0-mi-3", + opts, pool)); + + /* Without changing revision numbers or paths */ + SVN_TEST_ASSERT_ANY_ERROR(test_load_bad_props(dump_data, repos, + prop_name, prop_val, + NULL /*parent_dir*/, TRUE /*validate_props*/, + NULL, NULL, + pool)); + + /* With changing revision numbers and/or paths (by loading the same data + again, on top of existing revisions, into subdirectory 'bar') */ + SVN_TEST_ASSERT_ANY_ERROR(test_load_bad_props(dump_data, repos, + prop_name, prop_val, + "/bar", TRUE /*validate_props*/, + NULL, NULL, + pool)); + } + + return SVN_NO_ERROR; +} + +/* The test table. */ + +static int max_threads = 4; + +static struct svn_test_descriptor_t test_funcs[] = + { + SVN_TEST_NULL, + SVN_TEST_OPTS_PASS(test_dump_r0_mergeinfo, + "test dumping with r0 mergeinfo"), + SVN_TEST_OPTS_PASS(test_load_r0_mergeinfo, + "test loading with r0 mergeinfo"), + SVN_TEST_NULL + }; + +SVN_TEST_MAIN diff --git a/subversion/tests/libsvn_repos/repos-test.c b/subversion/tests/libsvn_repos/repos-test.c index 87265f0..8208958 100644 --- a/subversion/tests/libsvn_repos/repos-test.c +++ b/subversion/tests/libsvn_repos/repos-test.c @@ -36,13 +36,15 @@ #include "svn_config.h" #include "svn_props.h" #include "svn_version.h" +#include "private/svn_repos_private.h" + +/* be able to look into svn_config_t */ +#include "../../libsvn_subr/config_impl.h" #include "../svn_test_fs.h" #include "dir-delta-editor.h" -#include "private/svn_repos_private.h" - /* Used to terminate lines in large multi-line string literals. */ #define NL APR_EOL_STR @@ -486,19 +488,19 @@ print_chrevs(const apr_array_header_t *revs_got, outstr = apr_pstrcat(pool, outstr, apr_psprintf(pool, "%ld ", rev), - (char *)NULL); + SVN_VA_NULL); } } - outstr = apr_pstrcat(pool, outstr, "} Expected: { ", (char *)NULL); + outstr = apr_pstrcat(pool, outstr, "} Expected: { ", SVN_VA_NULL); for (i = 0; i < num_revs_expected; i++) { outstr = apr_pstrcat(pool, outstr, apr_psprintf(pool, "%ld ", revs_expected[i]), - (char *)NULL); + SVN_VA_NULL); } - return apr_pstrcat(pool, outstr, "}", (char *)NULL); + return apr_pstrcat(pool, outstr, "}", SVN_VA_NULL); } @@ -1425,8 +1427,8 @@ in_repo_authz(const svn_test_opts_t *opts, * Create an authz file and put it in the repository. * Verify it can be read with an relative URL. * Verify it can be read with an absolute URL. - * Verify non-existant path does not error out when must_exist is FALSE. - * Verify non-existant path does error out when must_exist is TRUE. + * Verify non-existent path does not error out when must_exist is FALSE. + * Verify non-existent path does error out when must_exist is TRUE. * Verify that an http:// URL produces an error. * Verify that an svn:// URL produces an error. */ @@ -1456,18 +1458,18 @@ in_repo_authz(const svn_test_opts_t *opts, repos_root = svn_repos_path(repos, pool); SVN_ERR(svn_uri_get_file_url_from_dirent(&repos_url, repos_root, pool)); - authz_url = apr_pstrcat(pool, repos_url, "/authz", (char *)NULL); - noent_authz_url = apr_pstrcat(pool, repos_url, "/A/authz", (char *)NULL); + authz_url = svn_path_url_add_component2(repos_url, "authz", pool); + noent_authz_url = svn_path_url_add_component2(repos_url, "A/authz", pool); /* absolute file URL. */ SVN_ERR(svn_repos_authz_read2(&authz_cfg, authz_url, NULL, TRUE, pool)); SVN_ERR(authz_check_access(authz_cfg, test_set, pool)); - /* Non-existant path in the repo with must_exist set to FALSE */ + /* Non-existent path in the repo with must_exist set to FALSE */ SVN_ERR(svn_repos_authz_read2(&authz_cfg, noent_authz_url, NULL, FALSE, pool)); - /* Non-existant path in the repo with must_exist set to TRUE */ + /* Non-existent path in the repo with must_exist set to TRUE */ err = svn_repos_authz_read2(&authz_cfg, noent_authz_url, NULL, TRUE, pool); if (!err || err->apr_err != SVN_ERR_ILLEGAL_TARGET) return svn_error_createf(SVN_ERR_TEST_FAILED, err, @@ -1595,15 +1597,14 @@ in_repo_groups_authz(const svn_test_opts_t *opts, /* Calculate URLs */ repos_root = svn_repos_path(repos, pool); SVN_ERR(svn_uri_get_file_url_from_dirent(&repos_url, repos_root, pool)); - authz_url = apr_pstrcat(pool, repos_url, "/authz", (char *)NULL); - empty_authz_url = apr_pstrcat(pool, repos_url, "/empty-authz", (char *)NULL); - noent_authz_url = apr_pstrcat(pool, repos_url, "/A/authz", (char *)NULL); - groups_url = apr_pstrcat(pool, repos_url, "/groups", (char *)NULL); - noent_groups_url = apr_pstrcat(pool, repos_url, "/A/groups", (char *)NULL); + authz_url = svn_path_url_add_component2(repos_url, "authz", pool); + empty_authz_url = svn_path_url_add_component2(repos_url, "empty-authz", pool); + noent_authz_url = svn_path_url_add_component2(repos_url, "A/authz", pool); + groups_url = svn_path_url_add_component2(repos_url, "groups", pool); + noent_groups_url = svn_path_url_add_component2(repos_url, "A/groups", pool); /* absolute file URLs. */ - groups_url = apr_pstrcat(pool, repos_url, "/groups", (char *)NULL); SVN_ERR(svn_repos_authz_read2(&authz_cfg, authz_url, groups_url, TRUE, pool)); SVN_ERR(authz_check_access(authz_cfg, test_set, pool)); @@ -1656,7 +1657,7 @@ in_repo_groups_authz(const svn_test_opts_t *opts, /* Helper for the groups_authz test. Set *AUTHZ_P to a representation of - AUTHZ_CONTENTS in conjuction with GROUPS_CONTENTS, using POOL for + AUTHZ_CONTENTS in conjunction with GROUPS_CONTENTS, using POOL for temporary allocation. If DISK is TRUE then write the contents to temporary files and use svn_repos_authz_read2() to get the data if FALSE write the data to a buffered stream and use svn_repos_authz_parse(). */ @@ -1771,7 +1772,7 @@ groups_authz(const svn_test_opts_t *opts, * 2. Verify that access rights written in the global groups file are * discarded and affect nothing in authorization terms. * 3. Verify that local groups in the authz file are prohibited in - * conjuction with global groups (and that a configuration error is + * conjunction with global groups (and that a configuration error is * reported in this scenario). * 4. Ensure that group cycles in the global groups file are reported. * @@ -1827,7 +1828,7 @@ groups_authz(const svn_test_opts_t *opts, SVN_ERR(authz_check_access(authz_cfg, test_set2, pool)); - /* Local groups cannot be used in conjuction with global groups. */ + /* Local groups cannot be used in conjunction with global groups. */ groups_contents = "[groups]" NL "slaves = maximus" NL @@ -2460,7 +2461,8 @@ node_location_segments(const svn_test_opts_t *opts, /* Bail (with success) on known-untestable scenarios */ if ((strcmp(opts->fs_type, "bdb") == 0) && (opts->server_minor_version == 4)) - return SVN_NO_ERROR; + return svn_error_create(SVN_ERR_TEST_SKIPPED, NULL, + "not supported for BDB in SVN 1.4"); /* Create the repository. */ SVN_ERR(svn_test__create_repos(&repos, "test-repo-node-location-segments", @@ -2886,7 +2888,7 @@ log_receiver(void *baton, svn_log_entry_t *log_entry, apr_pool_t *pool) { - int *count = baton; + svn_revnum_t *count = baton; (*count)++; return SVN_NO_ERROR; } @@ -2944,13 +2946,18 @@ get_logs(const svn_test_opts_t *opts, svn_revnum_t end_arg = end ? end : SVN_INVALID_REVNUM; svn_revnum_t eff_start = start ? start : youngest_rev; svn_revnum_t eff_end = end ? end : youngest_rev; - int limit, max_logs = + int limit; + svn_revnum_t max_logs = MAX(eff_start, eff_end) + 1 - MIN(eff_start, eff_end); - int num_logs; + svn_revnum_t num_logs; + /* this may look like it can get in an infinite loop if max_logs + * ended up being larger than the size limit can represent. It + * can't because a negative limit will end up failing to match + * the existed number of logs. */ for (limit = 0; limit <= max_logs; limit++) { - int num_expected = limit ? limit : max_logs; + svn_revnum_t num_expected = limit ? limit : max_logs; svn_pool_clear(subpool); num_logs = 0; @@ -2961,9 +2968,9 @@ get_logs(const svn_test_opts_t *opts, if (num_logs != num_expected) return svn_error_createf(SVN_ERR_TEST_FAILED, NULL, "Log with start=%ld,end=%ld,limit=%d " - "returned %d entries (expected %d)", + "returned %ld entries (expected %ld)", start_arg, end_arg, limit, - num_logs, max_logs); + num_logs, num_expected); } } } @@ -3059,6 +3066,11 @@ test_get_file_revs(const svn_test_opts_t *opts, apr_hash_set(ht_reverse_results, &trunk_results[i].rev, sizeof(svn_revnum_t), &trunk_results[i]); + /* Check for feature support */ + if (opts->server_minor_version && (opts->server_minor_version < 5)) + return svn_error_create(SVN_ERR_TEST_SKIPPED, NULL, + "not supported in pre-1.5 SVN"); + /* Create the repository and verify blame results. */ SVN_ERR(svn_test__create_blame_repository(&repos, "test-repo-get-filerevs", opts, subpool)); @@ -3167,7 +3179,26 @@ test_delete_repos(const svn_test_opts_t *opts, return SVN_NO_ERROR; } -/* Related to issue 4340, "fs layer should reject filenames with trailing \n" */ +/* Prepare a commit for the filename_with_control_chars() tests */ +static svn_error_t * +fwcc_prepare(const svn_delta_editor_t **editor_p, + void **edit_baton_p, + void **root_baton, + svn_repos_t *repos, + apr_pool_t *scratch_pool) +{ + /* Checks for control characters are implemented in the commit editor, + * not in the FS API. */ + SVN_ERR(svn_repos_get_commit_editor4(editor_p, edit_baton_p, repos, + NULL, "file://test", "/", + "plato", "test commit", + dummy_commit_cb, NULL, NULL, NULL, + scratch_pool)); + SVN_ERR((*editor_p)->open_root(*edit_baton_p, 1, scratch_pool, root_baton)); + return SVN_NO_ERROR; +} + +/* Related to issue 4340, "filenames containing \n corrupt FSFS repositories" */ static svn_error_t * filename_with_control_chars(const svn_test_opts_t *opts, apr_pool_t *pool) @@ -3206,17 +3237,6 @@ filename_with_control_chars(const svn_test_opts_t *opts, SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(youngest_rev)); svn_pool_clear(subpool); - /* Checks for control characters are implemented in the commit editor, - * not in the FS API. */ - SVN_ERR(svn_fs_begin_txn(&txn, fs, youngest_rev, pool)); - SVN_ERR(svn_repos_get_commit_editor4(&editor, &edit_baton, repos, - txn, "file://test", "/", - "plato", "test commit", - dummy_commit_cb, NULL, NULL, NULL, - pool)); - - SVN_ERR(editor->open_root(edit_baton, 1, pool, &root_baton)); - /* Attempt to copy /foo to a bad path P. This should fail. */ i = 0; do @@ -3225,8 +3245,13 @@ filename_with_control_chars(const svn_test_opts_t *opts, if (p == NULL) break; svn_pool_clear(subpool); + + SVN_ERR(fwcc_prepare(&editor, &edit_baton, &root_baton, repos, subpool)); err = editor->add_directory(p, root_baton, "/foo", 1, subpool, &out_baton); + if (!err) + err = editor->close_edit(edit_baton, subpool); + svn_error_clear(editor->abort_edit(edit_baton, subpool)); SVN_TEST_ASSERT_ERROR(err, SVN_ERR_FS_PATH_SYNTAX); } while (p); @@ -3238,8 +3263,13 @@ filename_with_control_chars(const svn_test_opts_t *opts, if (p == NULL) break; svn_pool_clear(subpool); + + SVN_ERR(fwcc_prepare(&editor, &edit_baton, &root_baton, repos, subpool)); err = editor->add_file(p, root_baton, NULL, SVN_INVALID_REVNUM, subpool, &out_baton); + if (!err) + err = editor->close_edit(edit_baton, subpool); + svn_error_clear(editor->abort_edit(edit_baton, subpool)); SVN_TEST_ASSERT_ERROR(err, SVN_ERR_FS_PATH_SYNTAX); } while (p); @@ -3252,281 +3282,576 @@ filename_with_control_chars(const svn_test_opts_t *opts, if (p == NULL) break; svn_pool_clear(subpool); + + SVN_ERR(fwcc_prepare(&editor, &edit_baton, &root_baton, repos, subpool)); err = editor->add_directory(p, root_baton, NULL, SVN_INVALID_REVNUM, subpool, &out_baton); + if (!err) + err = editor->close_edit(edit_baton, subpool); + svn_error_clear(editor->abort_edit(edit_baton, subpool)); SVN_TEST_ASSERT_ERROR(err, SVN_ERR_FS_PATH_SYNTAX); } while (p); - SVN_ERR(editor->abort_edit(edit_baton, subpool)); + return SVN_NO_ERROR; +} + +static svn_error_t * +test_repos_info(const svn_test_opts_t *opts, + apr_pool_t *pool) +{ + svn_repos_t *repos; + svn_test_opts_t opts2; + apr_hash_t *capabilities; + svn_version_t *supports_version; + svn_version_t v1_0_0 = {1, 0, 0, ""}; + svn_version_t v1_4_0 = {1, 4, 0, ""}; + int repos_format; + svn_boolean_t is_fsx = strcmp(opts->fs_type, "fsx") == 0; + + opts2 = *opts; + + /* for repo types that have been around before 1.4 */ + if (!is_fsx) + { + opts2.server_minor_version = 3; + SVN_ERR(svn_test__create_repos(&repos, "test-repo-info-3", + &opts2, pool)); + SVN_ERR(svn_repos_capabilities(&capabilities, repos, pool, pool)); + SVN_TEST_ASSERT(apr_hash_count(capabilities) == 0); + SVN_ERR(svn_repos_info_format(&repos_format, &supports_version, repos, + pool, pool)); + SVN_TEST_ASSERT(repos_format == 3); + SVN_TEST_ASSERT(svn_ver_equal(supports_version, &v1_0_0)); + } + + opts2.server_minor_version = 9; + SVN_ERR(svn_test__create_repos(&repos, "test-repo-info-9", + &opts2, pool)); + SVN_ERR(svn_repos_capabilities(&capabilities, repos, pool, pool)); + SVN_TEST_ASSERT(apr_hash_count(capabilities) == 1); + SVN_TEST_ASSERT(svn_hash_gets(capabilities, SVN_REPOS_CAPABILITY_MERGEINFO)); + SVN_ERR(svn_repos_info_format(&repos_format, &supports_version, repos, + pool, pool)); + SVN_TEST_ASSERT(repos_format == 5); + SVN_TEST_ASSERT(svn_ver_equal(supports_version, &v1_4_0)); return SVN_NO_ERROR; } - -/* Notification receiver for test_dump_bad_mergeinfo(). This does not - need to do anything, it just needs to exist. - */ -static void -dump_r0_mergeinfo_notifier(void *baton, - const svn_repos_notify_t *notify, - apr_pool_t *scratch_pool) +static svn_error_t * +test_config_pool(const svn_test_opts_t *opts, + apr_pool_t *pool) +{ + const char *repo_name = "test-repo-config-pool"; + svn_repos_t *repos; + svn_stringbuf_t *cfg_buffer1, *cfg_buffer2; + svn_config_t *cfg; + apr_hash_t *sections1, *sections2; + int i; + svn_fs_txn_t *txn; + svn_fs_root_t *root, *rev_root; + svn_revnum_t rev; + const char *repo_root_url; + const char *srcdir; + svn_error_t *err; + + svn_repos__config_pool_t *config_pool; + apr_pool_t *config_pool_pool; + apr_pool_t *subpool = svn_pool_create(pool); + + const char *wrk_dir = svn_test_data_path("config_pool", pool); + + SVN_ERR(svn_io_make_dir_recursively(wrk_dir, pool)); + + /* read all config info through a single config pool and we want to be + able to control its lifetime. The latter requires a separate pool. */ + config_pool_pool = svn_pool_create(pool); + SVN_ERR(svn_repos__config_pool_create(&config_pool, TRUE, + config_pool_pool)); + + /* have two different configurations */ + SVN_ERR(svn_test_get_srcdir(&srcdir, opts, pool)); + SVN_ERR(svn_stringbuf_from_file2( + &cfg_buffer1, + svn_dirent_join(srcdir, + "../libsvn_subr/config-test.cfg", + pool), + pool)); + cfg_buffer2 = svn_stringbuf_dup(cfg_buffer1, pool); + svn_stringbuf_appendcstr(cfg_buffer2, "\n[more]\nU=\"X\"\n"); + + /* write them to 2x2 files */ + SVN_ERR(svn_io_write_atomic(svn_dirent_join(wrk_dir, + "config-pool-test1.cfg", + pool), + cfg_buffer1->data, cfg_buffer1->len, NULL, + pool)); + SVN_ERR(svn_io_write_atomic(svn_dirent_join(wrk_dir, + "config-pool-test2.cfg", + pool), + cfg_buffer1->data, cfg_buffer1->len, NULL, + pool)); + SVN_ERR(svn_io_write_atomic(svn_dirent_join(wrk_dir, + "config-pool-test3.cfg", + pool), + cfg_buffer2->data, cfg_buffer2->len, NULL, + pool)); + SVN_ERR(svn_io_write_atomic(svn_dirent_join(wrk_dir, + "config-pool-test4.cfg", + pool), + cfg_buffer2->data, cfg_buffer2->len, NULL, + pool)); + + /* requesting a config over and over again should return the same + (even though it is not being referenced) */ + sections1 = NULL; + for (i = 0; i < 4; ++i) + { + SVN_ERR(svn_repos__config_pool_get( + &cfg, NULL, config_pool, + svn_dirent_join(wrk_dir, + "config-pool-test1.cfg", + pool), + TRUE, TRUE, NULL, subpool)); + + if (sections1 == NULL) + sections1 = cfg->sections; + else + SVN_TEST_ASSERT(cfg->sections == sections1); + + svn_pool_clear(subpool); + } + + /* requesting the same config from another file should return the same + (even though it is not being referenced) */ + for (i = 0; i < 4; ++i) + { + SVN_ERR(svn_repos__config_pool_get( + &cfg, NULL, config_pool, + svn_dirent_join(wrk_dir, + "config-pool-test2.cfg", + pool), + TRUE, TRUE, NULL, subpool)); + + SVN_TEST_ASSERT(cfg->sections == sections1); + + svn_pool_clear(subpool); + } + + /* reading a different configuration should return a different pointer */ + sections2 = NULL; + for (i = 0; i < 2; ++i) + { + SVN_ERR(svn_repos__config_pool_get( + &cfg, NULL, config_pool, + svn_dirent_join(wrk_dir, + "config-pool-test3.cfg", + pool), + TRUE, TRUE, NULL, subpool)); + + if (sections2 == NULL) + sections2 = cfg->sections; + else + SVN_TEST_ASSERT(cfg->sections == sections2); + + SVN_TEST_ASSERT(sections1 != sections2); + svn_pool_clear(subpool); + } + + /* create an in-repo config */ + SVN_ERR(svn_dirent_get_absolute(&repo_root_url, repo_name, pool)); + SVN_ERR(svn_uri_get_file_url_from_dirent(&repo_root_url, repo_root_url, + pool)); + + SVN_ERR(svn_test__create_repos(&repos, repo_name, opts, pool)); + SVN_ERR(svn_fs_begin_txn2(&txn, svn_repos_fs(repos), 0, 0, pool)); + SVN_ERR(svn_fs_txn_root(&root, txn, pool)); + SVN_ERR(svn_fs_make_dir(root, "dir", pool)); + SVN_ERR(svn_fs_make_file(root, "dir/config", pool)); + SVN_ERR(svn_test__set_file_contents(root, "dir/config", + cfg_buffer1->data, pool)); + SVN_ERR(svn_fs_commit_txn(NULL, &rev, txn, pool)); + + /* reading the config from the repo should still give cfg1 */ + SVN_ERR(svn_repos__config_pool_get(&cfg, NULL, config_pool, + svn_path_url_add_component2( + repo_root_url, + "dir/config", pool), + TRUE, TRUE, NULL, subpool)); + SVN_TEST_ASSERT(cfg->sections == sections1); + svn_pool_clear(subpool); + + /* create another in-repo config */ + SVN_ERR(svn_fs_begin_txn2(&txn, svn_repos_fs(repos), rev, 0, pool)); + SVN_ERR(svn_fs_txn_root(&root, txn, pool)); + SVN_ERR(svn_fs_revision_root(&rev_root, svn_repos_fs(repos), rev, pool)); + SVN_ERR(svn_fs_copy(rev_root, "dir", root, "another-dir", pool)); + SVN_ERR(svn_test__set_file_contents(root, "dir/config", + cfg_buffer2->data, pool)); + SVN_ERR(svn_fs_commit_txn(NULL, &rev, txn, pool)); + + /* reading the config from the repo should give cfg2 now */ + SVN_ERR(svn_repos__config_pool_get(&cfg, NULL, config_pool, + svn_path_url_add_component2( + repo_root_url, + "dir/config", pool), + TRUE, TRUE, NULL, subpool)); + SVN_TEST_ASSERT(cfg->sections == sections2); + svn_pool_clear(subpool); + + /* reading the copied config should still give cfg1 */ + SVN_ERR(svn_repos__config_pool_get(&cfg, NULL, config_pool, + svn_path_url_add_component2( + repo_root_url, + "another-dir/config", + pool), + TRUE, TRUE, NULL, subpool)); + SVN_TEST_ASSERT(cfg->sections == sections1); + svn_pool_clear(subpool); + + /* once again: repeated reads. This triggers a different code path. */ + SVN_ERR(svn_repos__config_pool_get(&cfg, NULL, config_pool, + svn_path_url_add_component2( + repo_root_url, + "dir/config", pool), + TRUE, TRUE, NULL, subpool)); + SVN_TEST_ASSERT(cfg->sections == sections2); + SVN_ERR(svn_repos__config_pool_get(&cfg, NULL, config_pool, + svn_path_url_add_component2( + repo_root_url, + "another-dir/config", + pool), + TRUE, TRUE, NULL, subpool)); + SVN_TEST_ASSERT(cfg->sections == sections1); + svn_pool_clear(subpool); + + /* access paths that don't exist */ + SVN_TEST_ASSERT_ERROR(svn_repos__config_pool_get(&cfg, NULL, config_pool, + svn_path_url_add_component2(repo_root_url, "X", + pool), + TRUE, TRUE, NULL, subpool), + SVN_ERR_ILLEGAL_TARGET); + err = svn_repos__config_pool_get(&cfg, NULL, config_pool, "X.cfg", + TRUE, TRUE, NULL, subpool); + SVN_TEST_ASSERT(err && APR_STATUS_IS_ENOENT(err->apr_err)); + svn_error_clear(err); + svn_pool_clear(subpool); + + return SVN_NO_ERROR; +} + + +static svn_error_t * +test_repos_fs_type(const svn_test_opts_t *opts, + apr_pool_t *pool) { + svn_repos_t *repos; + + /* Create test repository. */ + SVN_ERR(svn_test__create_repos(&repos, "test-repo-repos_fs_type", + opts, pool)); + + SVN_TEST_STRING_ASSERT(svn_repos_fs_type(repos, pool), opts->fs_type); + + /* Re-open repository and verify fs-type again. */ + SVN_ERR(svn_repos_open3(&repos, svn_repos_path(repos, pool), NULL, + pool, pool)); + + SVN_TEST_STRING_ASSERT(svn_repos_fs_type(repos, pool), opts->fs_type); + + return SVN_NO_ERROR; } -/* Regression test for part the 'dump' part of issue #4476 "Mergeinfo - containing r0 makes svnsync and svnadmin dump fail". */ static svn_error_t * -test_dump_r0_mergeinfo(const svn_test_opts_t *opts, - apr_pool_t *pool) +deprecated_access_context_api(const svn_test_opts_t *opts, + apr_pool_t *pool) { svn_repos_t *repos; - svn_fs_t *fs; + svn_fs_access_t *access; svn_fs_txn_t *txn; - svn_fs_root_t *txn_root; - svn_revnum_t youngest_rev = 0; - const svn_string_t *bad_mergeinfo = svn_string_create("/foo:0", pool); + svn_fs_root_t *root; + const char *conflict; + svn_revnum_t new_rev; + const char *hook; - SVN_ERR(svn_test__create_repos(&repos, "test-repo-dump-r0-mergeinfo", + /* Create test repository. */ + SVN_ERR(svn_test__create_repos(&repos, + "test-repo-deprecated-access-context-api", opts, pool)); - fs = svn_repos_fs(repos); - /* Revision 1: Any commit will do, here */ - SVN_ERR(svn_fs_begin_txn(&txn, fs, youngest_rev, pool)); - SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool)); - SVN_ERR(svn_fs_make_dir(txn_root, "/bar", pool)); - SVN_ERR(svn_repos_fs_commit_txn(NULL, repos, &youngest_rev, txn, pool)); - SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(youngest_rev)); + /* Set an empty pre-commit hook. */ +#ifdef WIN32 + hook = apr_pstrcat(pool, svn_repos_pre_commit_hook(repos, pool), ".bat", + SVN_VA_NULL); + SVN_ERR(svn_io_file_create(hook, + "exit 0" APR_EOL_STR, + pool)); +#else + hook = svn_repos_pre_commit_hook(repos, pool); + SVN_ERR(svn_io_file_create(hook, + "#!/bin/sh" APR_EOL_STR "exit 0" APR_EOL_STR, + pool)); + SVN_ERR(svn_io_set_file_executable(hook, TRUE, FALSE, pool)); +#endif - /* Revision 2: Add bad mergeinfo */ - SVN_ERR(svn_fs_begin_txn(&txn, fs, youngest_rev, pool)); - SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool)); - SVN_ERR(svn_fs_change_node_prop(txn_root, "/bar", "svn:mergeinfo", bad_mergeinfo, pool)); - SVN_ERR(svn_repos_fs_commit_txn(NULL, repos, &youngest_rev, txn, pool)); - SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(youngest_rev)); + /* Set some access context using svn_fs_access_add_lock_token(). */ + SVN_ERR(svn_fs_create_access(&access, "jrandom", pool)); + SVN_ERR(svn_fs_access_add_lock_token(access, "opaquelocktoken:abc")); + SVN_ERR(svn_fs_set_access(svn_repos_fs(repos), access)); - /* Test that a dump completes without error. In order to exercise the - functionality under test -- that is, in order for the dump to try to - parse the mergeinfo it is dumping -- the dump must start from a - revision greater than 1 and must take a notification callback. */ - { - svn_stringbuf_t *stringbuf = svn_stringbuf_create_empty(pool); - svn_stream_t *stream = svn_stream_from_stringbuf(stringbuf, pool); - - SVN_ERR(svn_repos_dump_fs3(repos, stream, 2, SVN_INVALID_REVNUM, - FALSE, FALSE, - dump_r0_mergeinfo_notifier, NULL, - NULL, NULL, - pool)); - } + /* Commit a new revision. */ + SVN_ERR(svn_repos_fs_begin_txn_for_commit2(&txn, repos, 0, + apr_hash_make(pool), pool)); + SVN_ERR(svn_fs_txn_root(&root, txn, pool)); + SVN_ERR(svn_fs_make_dir(root, "/whatever", pool)); + SVN_ERR(svn_repos_fs_commit_txn(&conflict, repos, &new_rev, txn, pool)); + + SVN_TEST_STRING_ASSERT(conflict, NULL); + SVN_TEST_ASSERT(new_rev == 1); return SVN_NO_ERROR; } - -/* Test dumping in the presence of the property PROP_NAME:PROP_VAL. - * Return the dumped data in *DUMP_DATA_P (if DUMP_DATA_P is not null). - * REPOS is an empty repository. - * See svn_repos_dump_fs3() for START_REV, END_REV, NOTIFY_FUNC, NOTIFY_BATON. - */ static svn_error_t * -test_dump_bad_props(svn_stringbuf_t **dump_data_p, - svn_repos_t *repos, - const char *prop_name, - const svn_string_t *prop_val, - svn_revnum_t start_rev, - svn_revnum_t end_rev, - svn_repos_notify_func_t notify_func, - void *notify_baton, - apr_pool_t *pool) +mkdir_delete_copy(svn_repos_t *repos, + const char *src, + const char *dst, + apr_pool_t *pool) { - const char *test_path = "/bar"; svn_fs_t *fs = svn_repos_fs(repos); + svn_revnum_t youngest_rev; svn_fs_txn_t *txn; - svn_fs_root_t *txn_root; - svn_revnum_t youngest_rev = 0; - svn_stringbuf_t *dump_data = svn_stringbuf_create_empty(pool); - svn_stream_t *stream = svn_stream_from_stringbuf(dump_data, pool); - const char *expected_str; + svn_fs_root_t *txn_root, *rev_root; - /* Revision 1: Any commit will do, here */ - SVN_ERR(svn_fs_begin_txn2(&txn, fs, youngest_rev, 0, pool)); + SVN_ERR(svn_fs_youngest_rev(&youngest_rev, fs, pool)); + + SVN_ERR(svn_fs_begin_txn(&txn, fs, youngest_rev, pool)); SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool)); - SVN_ERR(svn_fs_make_dir(txn_root, test_path , pool)); + SVN_ERR(svn_fs_make_dir(txn_root, "A/T", pool)); + SVN_ERR(svn_repos_fs_commit_txn(NULL, repos, &youngest_rev, txn, pool)); + + SVN_ERR(svn_fs_begin_txn(&txn, fs, youngest_rev, pool)); + SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool)); + SVN_ERR(svn_fs_delete(txn_root, "A/T", pool)); SVN_ERR(svn_repos_fs_commit_txn(NULL, repos, &youngest_rev, txn, pool)); - SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(youngest_rev)); - /* Revision 2: Add the bad property */ - SVN_ERR(svn_fs_begin_txn2(&txn, fs, youngest_rev, 0, pool)); + SVN_ERR(svn_fs_begin_txn(&txn, fs, youngest_rev, pool)); SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool)); - SVN_ERR(svn_fs_change_node_prop(txn_root, test_path , prop_name, prop_val, - pool)); + SVN_ERR(svn_fs_revision_root(&rev_root, fs, youngest_rev - 1, pool)); + SVN_ERR(svn_fs_copy(rev_root, src, txn_root, dst, pool)); SVN_ERR(svn_repos_fs_commit_txn(NULL, repos, &youngest_rev, txn, pool)); - SVN_TEST_ASSERT(SVN_IS_VALID_REVNUM(youngest_rev)); - /* Test that a dump completes without error. */ - SVN_ERR(svn_repos_dump_fs3(repos, stream, start_rev, end_rev, - FALSE, FALSE, - notify_func, notify_baton, - NULL, NULL, - pool)); - svn_stream_close(stream); - - /* Check that the property appears in the dump data */ - expected_str = apr_psprintf(pool, "K %d\n%s\n" - "V %d\n%s\n" - "PROPS-END\n", - (int)strlen(prop_name), prop_name, - (int)prop_val->len, prop_val->data); - SVN_TEST_ASSERT(strstr(dump_data->data, expected_str)); - - if (dump_data_p) - *dump_data_p = dump_data; return SVN_NO_ERROR; } -/* Test loading in the presence of the property PROP_NAME:PROP_VAL. - * Load data from DUMP_DATA. - * REPOS is an empty repository. - */ +struct authz_read_baton_t { + apr_hash_t *paths; + apr_pool_t *pool; + const char *deny; +}; + static svn_error_t * -test_load_bad_props(svn_stringbuf_t *dump_data, - svn_repos_t *repos, - const char *prop_name, - const svn_string_t *prop_val, - const char *parent_fspath, - svn_boolean_t validate_props, - svn_repos_notify_func_t notify_func, - void *notify_baton, - apr_pool_t *pool) +authz_read_func(svn_boolean_t *allowed, + svn_fs_root_t *root, + const char *path, + void *baton, + apr_pool_t *pool) { - const char *test_path = apr_psprintf(pool, "%s%s", - parent_fspath ? parent_fspath : "", - "/bar"); - svn_stream_t *stream = svn_stream_from_stringbuf(dump_data, pool); - svn_fs_t *fs; - svn_fs_root_t *rev_root; - svn_revnum_t youngest_rev; - svn_string_t *loaded_prop_val; - - SVN_ERR(svn_repos_load_fs4(repos, stream, - SVN_INVALID_REVNUM, SVN_INVALID_REVNUM, - svn_repos_load_uuid_default, - parent_fspath, - FALSE, FALSE, /*use_*_commit_hook*/ - validate_props, - notify_func, notify_baton, - NULL, NULL, /*cancellation*/ - pool)); - svn_stream_close(stream); + struct authz_read_baton_t *b = baton; + + if (b->deny && !strcmp(b->deny, path)) + *allowed = FALSE; + else + *allowed = TRUE; + + svn_hash_sets(b->paths, apr_pstrdup(b->pool, path), (void*)1); - /* Check the loaded property */ - fs = svn_repos_fs(repos); - SVN_ERR(svn_fs_youngest_rev(&youngest_rev, fs, pool)); - SVN_ERR(svn_fs_revision_root(&rev_root, fs, youngest_rev, pool)); - SVN_ERR(svn_fs_node_prop(&loaded_prop_val, - rev_root, test_path, prop_name, pool)); - SVN_TEST_ASSERT(svn_string_compare(loaded_prop_val, prop_val)); return SVN_NO_ERROR; } -static void -load_r0_mergeinfo_notifier(void *baton, - const svn_repos_notify_t *notify, - apr_pool_t *scratch_pool) +static svn_error_t * +verify_locations(apr_hash_t *actual, + apr_hash_t *expected, + apr_hash_t *checked, + apr_pool_t *pool) { - svn_boolean_t *had_mergeinfo_warning = baton; + apr_hash_index_t *hi; - if (notify->action == svn_repos_notify_warning) + for (hi = apr_hash_first(pool, expected); hi; hi = apr_hash_next(hi)) { - if (notify->warning == svn_repos__notify_warning_invalid_mergeinfo) - { - *had_mergeinfo_warning = TRUE; - } + const svn_revnum_t *rev = apr_hash_this_key(hi); + const char *path = apr_hash_get(actual, rev, sizeof(svn_revnum_t)); + + if (!path) + return svn_error_createf(SVN_ERR_TEST_FAILED, NULL, + "expected %s for %d found (null)", + (char*)apr_hash_this_val(hi), (int)*rev); + else if (strcmp(path, apr_hash_this_val(hi))) + return svn_error_createf(SVN_ERR_TEST_FAILED, NULL, + "expected %s for %d found %s", + (char*)apr_hash_this_val(hi), (int)*rev, path); + } + + for (hi = apr_hash_first(pool, actual); hi; hi = apr_hash_next(hi)) + { + const svn_revnum_t *rev = apr_hash_this_key(hi); + const char *path = apr_hash_get(expected, rev, sizeof(svn_revnum_t)); + + if (!path) + return svn_error_createf(SVN_ERR_TEST_FAILED, NULL, + "found %s for %d expected (null)", + (char*)apr_hash_this_val(hi), (int)*rev); + else if (strcmp(path, apr_hash_this_val(hi))) + return svn_error_createf(SVN_ERR_TEST_FAILED, NULL, + "found %s for %d expected %s", + (char*)apr_hash_this_val(hi), (int)*rev, path); + + if (!svn_hash_gets(checked, path)) + return svn_error_createf(SVN_ERR_TEST_FAILED, NULL, + "did not check %s", path); + } + + return SVN_NO_ERROR; +} + +static void +set_expected(apr_hash_t *expected, + svn_revnum_t rev, + const char *path, + apr_pool_t *pool) +{ + svn_revnum_t *rp = apr_palloc(pool, sizeof(svn_revnum_t)); + *rp = rev; + apr_hash_set(expected, rp, sizeof(svn_revnum_t), path); } -/* Regression test for the 'load' part of issue #4476 "Mergeinfo - * containing r0 makes svnsync and svnadmin dump fail". - * - * Bad mergeinfo should not prevent loading a backup, at least when we do not - * require mergeinfo revision numbers or paths to be adjusted during loading. - */ static svn_error_t * -test_load_r0_mergeinfo(const svn_test_opts_t *opts, - apr_pool_t *pool) +trace_node_locations_authz(const svn_test_opts_t *opts, + apr_pool_t *pool) { - const char *prop_name = "svn:mergeinfo"; - const svn_string_t *prop_val = svn_string_create("/foo:0", pool); - svn_stringbuf_t *dump_data = svn_stringbuf_create_empty(pool); + svn_repos_t *repos; + svn_fs_t *fs; + svn_revnum_t youngest_rev = 0; + svn_fs_txn_t *txn; + svn_fs_root_t *txn_root; + struct authz_read_baton_t arb; + apr_array_header_t *revs = apr_array_make(pool, 10, sizeof(svn_revnum_t)); + apr_hash_t *locations; + apr_hash_t *expected = apr_hash_make(pool); + int i; - /* Produce a dump file containing bad mergeinfo */ - { - svn_repos_t *repos; + /* Create test repository. */ + SVN_ERR(svn_test__create_repos(&repos, "test-repo-trace-node-locations-authz", + opts, pool)); + fs = svn_repos_fs(repos); - SVN_ERR(svn_test__create_repos(&repos, "test-repo-load-r0-mi-1", - opts, pool)); - SVN_ERR(test_dump_bad_props(&dump_data, repos, - prop_name, prop_val, - SVN_INVALID_REVNUM, SVN_INVALID_REVNUM, - NULL, NULL, pool)); - } + /* r1 create A */ + SVN_ERR(svn_fs_begin_txn(&txn, fs, youngest_rev, pool)); + SVN_ERR(svn_fs_txn_root(&txn_root, txn, pool)); + SVN_ERR(svn_fs_make_dir(txn_root, "A", pool)); + SVN_ERR(svn_fs_make_file(txn_root, "A/f", pool)); + SVN_ERR(svn_test__set_file_contents(txn_root, "A/f", "foobar", pool)); + SVN_ERR(svn_repos_fs_commit_txn(NULL, repos, &youngest_rev, txn, pool)); - /* Test loading without validating properties: should warn and succeed */ - { - svn_repos_t *repos; - svn_boolean_t had_mergeinfo_warning = FALSE; - - SVN_ERR(svn_test__create_repos(&repos, "test-repo-load-r0-mi-2", - opts, pool)); - - /* Without changing revision numbers or paths */ - SVN_ERR(test_load_bad_props(dump_data, repos, - prop_name, prop_val, - NULL /*parent_dir*/, FALSE /*validate_props*/, - load_r0_mergeinfo_notifier, &had_mergeinfo_warning, - pool)); - SVN_TEST_ASSERT(had_mergeinfo_warning); - - /* With changing revision numbers and/or paths (by loading the same data - again, on top of existing revisions, into subdirectory 'bar') */ - had_mergeinfo_warning = FALSE; - SVN_ERR(test_load_bad_props(dump_data, repos, - prop_name, prop_val, - "/bar", FALSE /*validate_props*/, - load_r0_mergeinfo_notifier, &had_mergeinfo_warning, - pool)); - SVN_TEST_ASSERT(had_mergeinfo_warning); - } + /* r4 copy A to B */ + SVN_ERR(mkdir_delete_copy(repos, "A", "B", pool)); - /* Test loading with validating properties: should return an error */ - { - svn_repos_t *repos; + /* r7 copy B to C */ + SVN_ERR(mkdir_delete_copy(repos, "B", "C", pool)); - SVN_ERR(svn_test__create_repos(&repos, "test-repo-load-r0-mi-3", - opts, pool)); - - /* Without changing revision numbers or paths */ - SVN_TEST__ASSERT_ANY_ERROR(test_load_bad_props(dump_data, repos, - prop_name, prop_val, - NULL /*parent_dir*/, TRUE /*validate_props*/, - NULL, NULL, - pool)); - - /* With changing revision numbers and/or paths (by loading the same data - again, on top of existing revisions, into subdirectory 'bar') */ - SVN_TEST__ASSERT_ANY_ERROR(test_load_bad_props(dump_data, repos, - prop_name, prop_val, - "/bar", TRUE /*validate_props*/, - NULL, NULL, - pool)); - } + /* r10 copy C to D */ + SVN_ERR(mkdir_delete_copy(repos, "C", "D", pool)); + + SVN_ERR(svn_fs_youngest_rev(&youngest_rev, fs, pool)); + SVN_ERR_ASSERT(youngest_rev == 10); + + arb.paths = apr_hash_make(pool); + arb.pool = pool; + arb.deny = NULL; + + apr_array_clear(revs); + for (i = 0; i <= youngest_rev; ++i) + APR_ARRAY_PUSH(revs, svn_revnum_t) = i; + set_expected(expected, 10, "/D/f", pool); + set_expected(expected, 8, "/C/f", pool); + set_expected(expected, 7, "/C/f", pool); + set_expected(expected, 5, "/B/f", pool); + set_expected(expected, 4, "/B/f", pool); + set_expected(expected, 2, "/A/f", pool); + set_expected(expected, 1, "/A/f", pool); + apr_hash_clear(arb.paths); + SVN_ERR(svn_repos_trace_node_locations(fs, &locations, "D/f", 10, revs, + authz_read_func, &arb, pool)); + SVN_ERR(verify_locations(locations, expected, arb.paths, pool)); + + apr_array_clear(revs); + for (i = 1; i <= youngest_rev; ++i) + APR_ARRAY_PUSH(revs, svn_revnum_t) = i; + apr_hash_clear(arb.paths); + SVN_ERR(svn_repos_trace_node_locations(fs, &locations, "D/f", 10, revs, + authz_read_func, &arb, pool)); + SVN_ERR(verify_locations(locations, expected, arb.paths, pool)); + + apr_array_clear(revs); + for (i = 2; i <= youngest_rev; ++i) + APR_ARRAY_PUSH(revs, svn_revnum_t) = i; + set_expected(expected, 1, NULL, pool); + apr_hash_clear(arb.paths); + SVN_ERR(svn_repos_trace_node_locations(fs, &locations, "D/f", 10, revs, + authz_read_func, &arb, pool)); + SVN_ERR(verify_locations(locations, expected, arb.paths, pool)); + + apr_array_clear(revs); + for (i = 3; i <= youngest_rev; ++i) + APR_ARRAY_PUSH(revs, svn_revnum_t) = i; + set_expected(expected, 2, NULL, pool); + apr_hash_clear(arb.paths); + SVN_ERR(svn_repos_trace_node_locations(fs, &locations, "D/f", 10, revs, + authz_read_func, &arb, pool)); + SVN_ERR(verify_locations(locations, expected, arb.paths, pool)); + + apr_array_clear(revs); + for (i = 6; i <= youngest_rev; ++i) + APR_ARRAY_PUSH(revs, svn_revnum_t) = i; + set_expected(expected, 5, NULL, pool); + set_expected(expected, 4, NULL, pool); + apr_hash_clear(arb.paths); + SVN_ERR(svn_repos_trace_node_locations(fs, &locations, "D/f", 10, revs, + authz_read_func, &arb, pool)); + SVN_ERR(verify_locations(locations, expected, arb.paths, pool)); + + arb.deny = "/B/f"; + apr_array_clear(revs); + for (i = 0; i <= youngest_rev; ++i) + APR_ARRAY_PUSH(revs, svn_revnum_t) = i; + apr_hash_clear(arb.paths); + SVN_ERR(svn_repos_trace_node_locations(fs, &locations, "D/f", 10, revs, + authz_read_func, &arb, pool)); + SVN_ERR(verify_locations(locations, expected, arb.paths, pool)); + + apr_array_clear(revs); + for (i = 6; i <= youngest_rev; ++i) + APR_ARRAY_PUSH(revs, svn_revnum_t) = i; + apr_hash_clear(arb.paths); + SVN_ERR(svn_repos_trace_node_locations(fs, &locations, "D/f", 10, revs, + authz_read_func, &arb, pool)); + SVN_ERR(verify_locations(locations, expected, arb.paths, pool)); + + APR_ARRAY_PUSH(revs, svn_revnum_t) = 0; + apr_hash_clear(arb.paths); + SVN_ERR(svn_repos_trace_node_locations(fs, &locations, "D/f", 10, revs, + authz_read_func, &arb, pool)); + SVN_ERR(verify_locations(locations, expected, arb.paths, pool)); return SVN_NO_ERROR; } /* The test table. */ -struct svn_test_descriptor_t test_funcs[] = +static int max_threads = 4; + +static struct svn_test_descriptor_t test_funcs[] = { SVN_TEST_NULL, SVN_TEST_OPTS_PASS(dir_deltas, @@ -3569,9 +3894,17 @@ struct svn_test_descriptor_t test_funcs[] = "test svn_repos_delete"), SVN_TEST_OPTS_PASS(filename_with_control_chars, "test filenames with control characters"), - SVN_TEST_OPTS_PASS(test_dump_r0_mergeinfo, - "test dumping with r0 mergeinfo"), - SVN_TEST_OPTS_PASS(test_load_r0_mergeinfo, - "test loading with r0 mergeinfo"), + SVN_TEST_OPTS_PASS(test_repos_info, + "test svn_repos_info_*"), + SVN_TEST_OPTS_PASS(test_config_pool, + "test svn_repos__config_pool_*"), + SVN_TEST_OPTS_PASS(test_repos_fs_type, + "test test_repos_fs_type"), + SVN_TEST_OPTS_PASS(deprecated_access_context_api, + "test deprecated access context api"), + SVN_TEST_OPTS_PASS(trace_node_locations_authz, + "authz for svn_repos_trace_node_locations"), SVN_TEST_NULL }; + +SVN_TEST_MAIN diff --git a/subversion/tests/libsvn_subr/auth-test.c b/subversion/tests/libsvn_subr/auth-test.c index 9d7d866..bbe7933 100644 --- a/subversion/tests/libsvn_subr/auth-test.c +++ b/subversion/tests/libsvn_subr/auth-test.c @@ -63,13 +63,14 @@ test_platform_specific_auth_providers(apr_pool_t *pool) number_of_providers += 2; #endif #if defined(WIN32) && !defined(__MINGW32__) - number_of_providers += 2; + number_of_providers += 4; #endif if (providers->nelts != number_of_providers) return svn_error_createf (SVN_ERR_TEST_FAILED, NULL, "svn_auth_get_platform_specific_client_providers should return " \ - "an array of %d providers", number_of_providers); + "an array of %d providers, but returned %d providers", + number_of_providers, providers->nelts); /* Test Keychain auth providers */ #ifdef SVN_HAVE_KEYCHAIN_SERVICES @@ -219,8 +220,8 @@ cleanup_callback(svn_boolean_t *delete_cred, { svn_auth_baton_t *b = walk_baton; - SVN_TEST_ASSERT(strcmp(cred_kind, SVN_AUTH_CRED_SIMPLE) == 0); - SVN_TEST_ASSERT(strcmp(realmstring, "<http://my.host> My realm") == 0); + SVN_TEST_STRING_ASSERT(cred_kind, SVN_AUTH_CRED_SIMPLE); + SVN_TEST_STRING_ASSERT(realmstring, "<http://my.host> My realm"); SVN_ERR(svn_auth_forget_credentials(b, cred_kind, realmstring, scratch_pool)); @@ -271,7 +272,7 @@ test_auth_clear(apr_pool_t *pool) pool)); creds = credentials; - SVN_TEST_ASSERT(strcmp(creds->username, "jrandom") == 0); + SVN_TEST_STRING_ASSERT(creds->username, "jrandom"); SVN_TEST_ASSERT(creds->may_save); /* And tell that they are ok and can be saved */ @@ -291,7 +292,7 @@ test_auth_clear(apr_pool_t *pool) SVN_TEST_ASSERT(credentials); creds = credentials; - SVN_TEST_ASSERT(strcmp(creds->username, "jrandom") == 0); + SVN_TEST_STRING_ASSERT(creds->username, "jrandom"); SVN_TEST_ASSERT(creds->may_save); /* Use our walker function to delete credentials (and forget them @@ -314,18 +315,16 @@ test_auth_clear(apr_pool_t *pool) /* The test table. */ -struct svn_test_descriptor_t test_funcs[] = +static int max_threads = 1; + +static struct svn_test_descriptor_t test_funcs[] = { SVN_TEST_NULL, SVN_TEST_PASS2(test_platform_specific_auth_providers, "test retrieving platform-specific auth providers"), -#ifndef SVN_DISABLE_PLAINTEXT_PASSWORD_STORAGE SVN_TEST_PASS2(test_auth_clear, "test svn_auth_clear()"), -#else - SVN_TEST_WIMP(test_auth_clear, - "test svn_auth_clear()", - "Needs testing with SVN_DISABLE_PLAINTEXT_PASSWORD_STORAGE"), -#endif SVN_TEST_NULL }; + +SVN_TEST_MAIN diff --git a/subversion/tests/libsvn_subr/bit-array-test.c b/subversion/tests/libsvn_subr/bit-array-test.c new file mode 100644 index 0000000..e6fe528 --- /dev/null +++ b/subversion/tests/libsvn_subr/bit-array-test.c @@ -0,0 +1,140 @@ +/* + * bit-array-test.c: a collection of svn_bit_array__* tests + * + * ==================================================================== + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + * ==================================================================== + */ + +/* ==================================================================== + To add tests, look toward the bottom of this file. + +*/ + + + +#include <stdio.h> +#include <string.h> +#include <apr_pools.h> + +#include "../svn_test.h" + +#include "svn_error.h" +#include "svn_string.h" /* This includes <apr_*.h> */ +#include "private/svn_subr_private.h" + +static svn_error_t * +test_zero_defaults(apr_pool_t *pool) +{ + svn_bit_array__t *array = svn_bit_array__create(0, pool); + + /* Test (default) allocation boundaries */ + SVN_TEST_ASSERT(svn_bit_array__get(array, 0x7ffff) == 0); + SVN_TEST_ASSERT(svn_bit_array__get(array, 0x80000) == 0); + + /* Test address boundaries */ + SVN_TEST_ASSERT(svn_bit_array__get(array, 0) == 0); + SVN_TEST_ASSERT(svn_bit_array__get(array, APR_SIZE_MAX) == 0); + + return SVN_NO_ERROR; +} + +static svn_error_t * +test_get_set(apr_pool_t *pool) +{ + svn_bit_array__t *array = svn_bit_array__create(0, pool); + apr_size_t i, min = 0x7ff00, max = 0x7ff00 + 1025; + + /* All values default to 0. */ + for (i = min; i < max; ++i) + SVN_TEST_ASSERT(svn_bit_array__get(array, i) == 0); + + /* Create a pattern, setting every other bit. Array will also auto-grow. */ + for (i = min; i < max; ++i) + if (i % 2) + svn_bit_array__set(array, i, 1); + + /* Verify pattern */ + for (i = min; i < max; ++i) + SVN_TEST_ASSERT(svn_bit_array__get(array, i) == i % 2); + + /* Zero the zeros in the pattern -> should be no change. */ + for (i = min; i < max; ++i) + if (i % 2 == 0) + svn_bit_array__set(array, i, 0); + + /* Verify pattern */ + for (i = min; i < max; ++i) + SVN_TEST_ASSERT(svn_bit_array__get(array, i) == i % 2); + + /* Write an inverted pattern while verifying the old one. */ + for (i = min; i < max; ++i) + { + SVN_TEST_ASSERT(svn_bit_array__get(array, i) == i % 2); + svn_bit_array__set(array, i, 1 - (i % 2)); + } + + /* Verify pattern */ + for (i = min; i < max; ++i) + SVN_TEST_ASSERT(svn_bit_array__get(array, i) == 1 - (i % 2)); + + return SVN_NO_ERROR; +} + +static svn_error_t * +test_sparse(apr_pool_t *pool) +{ + svn_bit_array__t *array = svn_bit_array__create(0, pool); + apr_size_t i, k, min = 0x7ff00, max = 0x7ff00 + 1025, SCALE = 0x10000000; + + /* All values default to 0. */ + for (i = 0; i < 15; ++i) + for (k = i * SCALE + min; k < i * SCALE + max; ++k) + SVN_TEST_ASSERT(svn_bit_array__get(array, k) == 0); + + /* Create a pattern, setting every other bit. Array will also auto-grow. */ + for (i = 0; i < 15; ++i) + for (k = i * SCALE + min; k < i * SCALE + max; ++k) + if (k % 2) + svn_bit_array__set(array, k, 1); + + /* Verify pattern */ + for (i = 0; i < 15; ++i) + for (k = i * SCALE + min; k < i * SCALE + max; ++k) + SVN_TEST_ASSERT(svn_bit_array__get(array, k) == k % 2); + + return SVN_NO_ERROR; +} + +/* An array of all test functions */ + +static int max_threads = 1; + +static struct svn_test_descriptor_t test_funcs[] = + { + SVN_TEST_NULL, + SVN_TEST_PASS2(test_zero_defaults, + "check entries to default to zero"), + SVN_TEST_PASS2(test_get_set, + "get / set entries"), + SVN_TEST_PASS2(test_sparse, + "get / set sparse entries"), + SVN_TEST_NULL + }; + +SVN_TEST_MAIN diff --git a/subversion/tests/libsvn_subr/cache-test.c b/subversion/tests/libsvn_subr/cache-test.c index 9e7a7b3..1616441 100644 --- a/subversion/tests/libsvn_subr/cache-test.c +++ b/subversion/tests/libsvn_subr/cache-test.c @@ -157,7 +157,8 @@ test_memcache_basic(const svn_test_opts_t *opts, { SVN_ERR(svn_config_read3(&config, opts->config_file, TRUE, FALSE, FALSE, pool)); - SVN_ERR(svn_cache__make_memcache_from_config(&memcache, config, pool)); + SVN_ERR(svn_cache__make_memcache_from_config(&memcache, config, + pool, pool)); } if (! memcache) @@ -193,8 +194,9 @@ test_membuffer_cache_basic(apr_pool_t *pool) deserialize_revnum, APR_HASH_KEY_STRING, "cache:", + SVN_CACHE__MEMBUFFER_DEFAULT_PRIORITY, FALSE, - pool)); + pool, pool)); return basic_cache_test(cache, FALSE, pool); } @@ -220,6 +222,16 @@ raise_error_partial_getter_func(void **out, return svn_error_create(APR_EGENERAL, NULL, NULL); } +/* Implements svn_cache__partial_setter_func_t */ +static svn_error_t * +raise_error_partial_setter_func(void **data, + apr_size_t *data_len, + void *baton, + apr_pool_t *result_pool) +{ + return svn_error_create(APR_EGENERAL, NULL, NULL); +} + static svn_error_t * test_membuffer_serializer_error_handling(apr_pool_t *pool) { @@ -239,8 +251,9 @@ test_membuffer_serializer_error_handling(apr_pool_t *pool) raise_error_deserialize_func, APR_HASH_KEY_STRING, "cache:", + SVN_CACHE__MEMBUFFER_DEFAULT_PRIORITY, FALSE, - pool)); + pool, pool)); SVN_ERR(svn_cache__set(cache, "twenty", &twenty, pool)); @@ -258,6 +271,30 @@ test_membuffer_serializer_error_handling(apr_pool_t *pool) NULL, pool), APR_EGENERAL); + /* Create a new cache. */ + SVN_ERR(svn_cache__membuffer_cache_create(&membuffer, 10*1024, 1, 0, + TRUE, TRUE, pool)); + SVN_ERR(svn_cache__create_membuffer_cache(&cache, + membuffer, + serialize_revnum, + deserialize_revnum, + APR_HASH_KEY_STRING, + "cache:", + SVN_CACHE__MEMBUFFER_DEFAULT_PRIORITY, + FALSE, + pool, pool)); + + /* Store one entry in cache. */ + SVN_ERR(svn_cache__set(cache, "twenty", &twenty, pool)); + + /* Test setting data in cache using partial setter that + always raises an error. */ + SVN_TEST_ASSERT_ERROR( + svn_cache__set_partial(cache, "twenty", + raise_error_partial_setter_func, + NULL, pool), + APR_EGENERAL); + return SVN_NO_ERROR; } @@ -286,7 +323,8 @@ test_memcache_long_key(const svn_test_opts_t *opts, { SVN_ERR(svn_config_read3(&config, opts->config_file, TRUE, FALSE, FALSE, pool)); - SVN_ERR(svn_cache__make_memcache_from_config(&memcache, config, pool)); + SVN_ERR(svn_cache__make_memcache_from_config(&memcache, config, + pool, pool)); } if (! memcache) @@ -316,10 +354,80 @@ test_memcache_long_key(const svn_test_opts_t *opts, return SVN_NO_ERROR; } +static svn_error_t * +test_membuffer_cache_clearing(apr_pool_t *pool) +{ + svn_cache__t *cache; + svn_membuffer_t *membuffer; + svn_boolean_t found; + svn_revnum_t *value; + svn_revnum_t valueA = 12345; + svn_revnum_t valueB = 67890; + + /* Create a simple cache for strings, keyed by strings. */ + SVN_ERR(svn_cache__membuffer_cache_create(&membuffer, 10*1024, 1, 0, + TRUE, TRUE, pool)); + SVN_ERR(svn_cache__create_membuffer_cache(&cache, + membuffer, + serialize_revnum, + deserialize_revnum, + APR_HASH_KEY_STRING, + "cache:", + SVN_CACHE__MEMBUFFER_DEFAULT_PRIORITY, + FALSE, + pool, pool)); + + /* Initially, the cache is empty. */ + SVN_ERR(svn_cache__get((void **) &value, &found, cache, "key A", pool)); + SVN_TEST_ASSERT(!found); + SVN_ERR(svn_cache__get((void **) &value, &found, cache, "key B", pool)); + SVN_TEST_ASSERT(!found); + SVN_ERR(svn_cache__get((void **) &value, &found, cache, "key C", pool)); + SVN_TEST_ASSERT(!found); + + /* Add entries. */ + SVN_ERR(svn_cache__set(cache, "key A", &valueA, pool)); + SVN_ERR(svn_cache__set(cache, "key B", &valueB, pool)); + + /* Added entries should be cached (too small to get evicted already). */ + SVN_ERR(svn_cache__get((void **) &value, &found, cache, "key A", pool)); + SVN_TEST_ASSERT(found); + SVN_TEST_ASSERT(*value == valueA); + SVN_ERR(svn_cache__get((void **) &value, &found, cache, "key B", pool)); + SVN_TEST_ASSERT(found); + SVN_TEST_ASSERT(*value == valueB); + SVN_ERR(svn_cache__get((void **) &value, &found, cache, "key C", pool)); + SVN_TEST_ASSERT(!found); + + /* Clear the cache. */ + SVN_ERR(svn_cache__membuffer_clear(membuffer)); + + /* The cache is empty again. */ + SVN_ERR(svn_cache__get((void **) &value, &found, cache, "key A", pool)); + SVN_TEST_ASSERT(!found); + SVN_ERR(svn_cache__get((void **) &value, &found, cache, "key B", pool)); + SVN_TEST_ASSERT(!found); + SVN_ERR(svn_cache__get((void **) &value, &found, cache, "key C", pool)); + SVN_TEST_ASSERT(!found); + + /* But still functional: */ + SVN_ERR(svn_cache__set(cache, "key B", &valueB, pool)); + SVN_ERR(svn_cache__has_key(&found, cache, "key A", pool)); + SVN_TEST_ASSERT(!found); + SVN_ERR(svn_cache__has_key(&found, cache, "key B", pool)); + SVN_TEST_ASSERT(found); + SVN_ERR(svn_cache__has_key(&found, cache, "key C", pool)); + SVN_TEST_ASSERT(!found); + + return SVN_NO_ERROR; +} + /* The test table. */ -struct svn_test_descriptor_t test_funcs[] = +static int max_threads = 1; + +static struct svn_test_descriptor_t test_funcs[] = { SVN_TEST_NULL, SVN_TEST_PASS2(test_inprocess_cache_basic, @@ -332,5 +440,9 @@ struct svn_test_descriptor_t test_funcs[] = "basic membuffer svn_cache test"), SVN_TEST_PASS2(test_membuffer_serializer_error_handling, "test for error handling in membuffer svn_cache"), + SVN_TEST_PASS2(test_membuffer_cache_clearing, + "test clearing a membuffer svn_cache"), SVN_TEST_NULL }; + +SVN_TEST_MAIN diff --git a/subversion/tests/libsvn_subr/checksum-test.c b/subversion/tests/libsvn_subr/checksum-test.c index c5e2409..4c6d993 100644 --- a/subversion/tests/libsvn_subr/checksum-test.c +++ b/subversion/tests/libsvn_subr/checksum-test.c @@ -27,38 +27,47 @@ #include "svn_error.h" #include "svn_io.h" -#include "private/svn_pseudo_md5.h" #include "../svn_test.h" +/* Verify that DIGEST of checksum type KIND can be parsed and + * converted back to a string matching DIGEST. NAME will be used + * to identify the type of checksum in error messages. + */ static svn_error_t * -test_checksum_parse(apr_pool_t *pool) +checksum_parse_kind(const char *digest, + svn_checksum_kind_t kind, + const char *name, + apr_pool_t *pool) { - const char *md5_digest = "8518b76f7a45fe4de2d0955085b41f98"; - const char *sha1_digest = "74d82379bcc6771454377db03b912c2b62704139"; const char *checksum_display; svn_checksum_t *checksum; - SVN_ERR(svn_checksum_parse_hex(&checksum, svn_checksum_md5, md5_digest, pool)); + SVN_ERR(svn_checksum_parse_hex(&checksum, kind, digest, pool)); checksum_display = svn_checksum_to_cstring_display(checksum, pool); - if (strcmp(checksum_display, md5_digest) != 0) + if (strcmp(checksum_display, digest) != 0) return svn_error_createf (SVN_ERR_CHECKSUM_MISMATCH, NULL, - "verify-checksum: md5 checksum mismatch:\n" + "verify-checksum: %s checksum mismatch:\n" " expected: %s\n" - " actual: %s\n", md5_digest, checksum_display); + " actual: %s\n", name, digest, checksum_display); - SVN_ERR(svn_checksum_parse_hex(&checksum, svn_checksum_sha1, sha1_digest, - pool)); - checksum_display = svn_checksum_to_cstring_display(checksum, pool); + return SVN_NO_ERROR; +} - if (strcmp(checksum_display, sha1_digest) != 0) - return svn_error_createf - (SVN_ERR_CHECKSUM_MISMATCH, NULL, - "verify-checksum: sha1 checksum mismatch:\n" - " expected: %s\n" - " actual: %s\n", sha1_digest, checksum_display); +static svn_error_t * +test_checksum_parse(apr_pool_t *pool) +{ + SVN_ERR(checksum_parse_kind("8518b76f7a45fe4de2d0955085b41f98", + svn_checksum_md5, "md5", pool)); + SVN_ERR(checksum_parse_kind("74d82379bcc6771454377db03b912c2b62704139", + svn_checksum_sha1, "sha1", pool)); + SVN_ERR(checksum_parse_kind("deadbeef", + svn_checksum_fnv1a_32, "fnv-1a", pool)); + SVN_ERR(checksum_parse_kind("cafeaffe", + svn_checksum_fnv1a_32x4, + "modified fnv-1a", pool)); return SVN_NO_ERROR; } @@ -66,94 +75,97 @@ test_checksum_parse(apr_pool_t *pool) static svn_error_t * test_checksum_empty(apr_pool_t *pool) { - svn_checksum_t *checksum; - char data = '\0'; - - checksum = svn_checksum_empty_checksum(svn_checksum_md5, pool); - SVN_TEST_ASSERT(svn_checksum_is_empty_checksum(checksum)); - - checksum = svn_checksum_empty_checksum(svn_checksum_sha1, pool); - SVN_TEST_ASSERT(svn_checksum_is_empty_checksum(checksum)); + svn_checksum_kind_t kind; + for (kind = svn_checksum_md5; kind <= svn_checksum_fnv1a_32x4; ++kind) + { + svn_checksum_t *checksum; + char data = '\0'; - SVN_ERR(svn_checksum(&checksum, svn_checksum_md5, &data, 0, pool)); - SVN_TEST_ASSERT(svn_checksum_is_empty_checksum(checksum)); + checksum = svn_checksum_empty_checksum(kind, pool); + SVN_TEST_ASSERT(svn_checksum_is_empty_checksum(checksum)); - SVN_ERR(svn_checksum(&checksum, svn_checksum_sha1, &data, 0, pool)); - SVN_TEST_ASSERT(svn_checksum_is_empty_checksum(checksum)); + SVN_ERR(svn_checksum(&checksum, kind, &data, 0, pool)); + SVN_TEST_ASSERT(svn_checksum_is_empty_checksum(checksum)); + } return SVN_NO_ERROR; } +/* Verify that "zero" checksums work properly for the given checksum KIND. + */ static svn_error_t * -test_pseudo_md5(apr_pool_t *pool) +zero_match_kind(svn_checksum_kind_t kind, apr_pool_t *pool) { - apr_uint32_t input[16] = { 0 }; - apr_uint32_t digest_15[4] = { 0 }; - apr_uint32_t digest_31[4] = { 0 }; - apr_uint32_t digest_63[4] = { 0 }; - svn_checksum_t *checksum; + svn_checksum_t *zero; + svn_checksum_t *A; + svn_checksum_t *B; - /* input is all 0s but the hash shall be different - (due to different input sizes)*/ - svn__pseudo_md5_15(digest_15, input); - svn__pseudo_md5_31(digest_31, input); - svn__pseudo_md5_63(digest_63, input); + zero = svn_checksum_create(kind, pool); + SVN_ERR(svn_checksum_clear(zero)); + SVN_ERR(svn_checksum(&A, kind, "A", 1, pool)); + SVN_ERR(svn_checksum(&B, kind, "B", 1, pool)); - SVN_TEST_ASSERT(memcmp(digest_15, digest_31, sizeof(digest_15))); - SVN_TEST_ASSERT(memcmp(digest_15, digest_63, sizeof(digest_15))); - SVN_TEST_ASSERT(memcmp(digest_31, digest_63, sizeof(digest_15))); + /* Different non-zero don't match. */ + SVN_TEST_ASSERT(!svn_checksum_match(A, B)); - /* the checksums shall also be different from "proper" MD5 */ - SVN_ERR(svn_checksum(&checksum, svn_checksum_md5, input, 15, pool)); - SVN_TEST_ASSERT(memcmp(digest_15, checksum->digest, sizeof(digest_15))); + /* Zero matches anything of the same kind. */ + SVN_TEST_ASSERT(svn_checksum_match(A, zero)); + SVN_TEST_ASSERT(svn_checksum_match(zero, B)); - SVN_ERR(svn_checksum(&checksum, svn_checksum_md5, input, 31, pool)); - SVN_TEST_ASSERT(memcmp(digest_31, checksum->digest, sizeof(digest_15))); + return SVN_NO_ERROR; +} - SVN_ERR(svn_checksum(&checksum, svn_checksum_md5, input, 63, pool)); - SVN_TEST_ASSERT(memcmp(digest_63, checksum->digest, sizeof(digest_15))); +static svn_error_t * +zero_match(apr_pool_t *pool) +{ + svn_checksum_kind_t kind; + for (kind = svn_checksum_md5; kind <= svn_checksum_fnv1a_32x4; ++kind) + SVN_ERR(zero_match_kind(kind, pool)); return SVN_NO_ERROR; } static svn_error_t * -zero_match(apr_pool_t *pool) +zero_cross_match(apr_pool_t *pool) { - svn_checksum_t *zero_md5; - svn_checksum_t *zero_sha1; - svn_checksum_t *A_md5; - svn_checksum_t *B_md5; - svn_checksum_t *A_sha1; - svn_checksum_t *B_sha1; + svn_checksum_kind_t i_kind; + svn_checksum_kind_t k_kind; + for (i_kind = svn_checksum_md5; + i_kind <= svn_checksum_fnv1a_32x4; + ++i_kind) + { + svn_checksum_t *i_zero; + svn_checksum_t *i_A; - zero_md5 = svn_checksum_create(svn_checksum_md5, pool); - SVN_ERR(svn_checksum_clear(zero_md5)); - SVN_ERR(svn_checksum(&A_md5, svn_checksum_md5, "A", 1, pool)); - SVN_ERR(svn_checksum(&B_md5, svn_checksum_md5, "B", 1, pool)); + i_zero = svn_checksum_create(i_kind, pool); + SVN_ERR(svn_checksum_clear(i_zero)); + SVN_ERR(svn_checksum(&i_A, i_kind, "A", 1, pool)); - zero_sha1 = svn_checksum_create(svn_checksum_sha1, pool); - SVN_ERR(svn_checksum_clear(zero_sha1)); - SVN_ERR(svn_checksum(&A_sha1, svn_checksum_sha1, "A", 1, pool)); - SVN_ERR(svn_checksum(&B_sha1, svn_checksum_sha1, "B", 1, pool)); + for (k_kind = svn_checksum_md5; + k_kind <= svn_checksum_fnv1a_32x4; + ++k_kind) + { + svn_checksum_t *k_zero; + svn_checksum_t *k_A; + if (i_kind == k_kind) + continue; - /* Different non-zero don't match. */ - SVN_TEST_ASSERT(!svn_checksum_match(A_md5, B_md5)); - SVN_TEST_ASSERT(!svn_checksum_match(A_sha1, B_sha1)); - SVN_TEST_ASSERT(!svn_checksum_match(A_md5, A_sha1)); - SVN_TEST_ASSERT(!svn_checksum_match(A_md5, B_sha1)); + k_zero = svn_checksum_create(k_kind, pool); + SVN_ERR(svn_checksum_clear(k_zero)); + SVN_ERR(svn_checksum(&k_A, k_kind, "A", 1, pool)); - /* Zero matches anything of the same kind. */ - SVN_TEST_ASSERT(svn_checksum_match(A_md5, zero_md5)); - SVN_TEST_ASSERT(svn_checksum_match(zero_md5, B_md5)); - SVN_TEST_ASSERT(svn_checksum_match(A_sha1, zero_sha1)); - SVN_TEST_ASSERT(svn_checksum_match(zero_sha1, B_sha1)); + /* Different non-zero don't match. */ + SVN_TEST_ASSERT(!svn_checksum_match(i_A, k_A)); + + /* Zero doesn't match anything of a different kind... */ + SVN_TEST_ASSERT(!svn_checksum_match(i_zero, k_A)); + SVN_TEST_ASSERT(!svn_checksum_match(i_A, k_zero)); - /* Zero doesn't match anything of a different kind... */ - SVN_TEST_ASSERT(!svn_checksum_match(zero_md5, A_sha1)); - SVN_TEST_ASSERT(!svn_checksum_match(zero_sha1, A_md5)); - /* ...even another zero. */ - SVN_TEST_ASSERT(!svn_checksum_match(zero_md5, zero_sha1)); + /* ...even another zero. */ + SVN_TEST_ASSERT(!svn_checksum_match(i_zero, k_zero)); + } + } return SVN_NO_ERROR; } @@ -163,12 +175,14 @@ zlib_expansion_test(const svn_test_opts_t *opts, apr_pool_t *pool) { const char *data_path; + const char *srcdir; svn_stringbuf_t *deflated; Byte dst_buffer[256 * 1024]; Byte *src_buffer; - apr_size_t sz; + uInt sz; - data_path = svn_dirent_join(opts->srcdir, "zlib.deflated", pool); + SVN_ERR(svn_test_get_srcdir(&srcdir, opts, pool)); + data_path = svn_dirent_join(srcdir, "zlib.deflated", pool); SVN_ERR(svn_stringbuf_from_file2(&deflated, data_path, pool)); src_buffer = (Byte*)deflated->data; @@ -177,6 +191,7 @@ zlib_expansion_test(const svn_test_opts_t *opts, for (sz = 1; sz < 256; sz++) { z_stream stream; + uLong crc = crc32(0, Z_NULL, 0); memset(&stream, 0, sizeof(stream)); inflateInit2(&stream, -15 /* DEFLATE_WINDOW_SIZE */); @@ -193,12 +208,15 @@ zlib_expansion_test(const svn_test_opts_t *opts, { return svn_error_createf( SVN_ERR_TEST_FAILED, NULL, - "Failure decompressing with blocksize %d", (int)sz); + "Failure decompressing with blocksize %u", sz); } + crc = crc32(crc, dst_buffer, sizeof(dst_buffer) - stream.avail_out); + stream.avail_out = sizeof(dst_buffer); + stream.next_out = dst_buffer; stream.avail_in += sz; } while (stream.next_in + stream.avail_in < src_buffer + deflated->len); - stream.avail_in = (src_buffer + deflated->len) - stream.next_in; + stream.avail_in = (uInt) (deflated->len - stream.total_in); { int zr = inflate(&stream, Z_NO_FLUSH); @@ -207,8 +225,9 @@ zlib_expansion_test(const svn_test_opts_t *opts, { return svn_error_createf( SVN_ERR_TEST_FAILED, NULL, - "Final flush failed with blocksize %d", (int)sz); + "Final flush failed with blocksize %u", sz); } + crc = crc32(crc, dst_buffer, sizeof(dst_buffer) - stream.avail_out); zr = inflateEnd(&stream); @@ -216,42 +235,65 @@ zlib_expansion_test(const svn_test_opts_t *opts, { return svn_error_createf( SVN_ERR_TEST_FAILED, NULL, - "End of stream handling failed with blocksize %d", - (int)sz); + "End of stream handling failed with blocksize %u", + sz); } } - { - apr_uint32_t crc = crc32(0, dst_buffer, stream.total_out); - - if (stream.total_out != 242014 || crc != 0x8f03d934) - { - return svn_error_createf( - SVN_ERR_TEST_FAILED, NULL, - "Decompressed data doesn't match expected size or crc with " - "blocksize %d: Found crc32=0x%08x, size=%d.\n" - "Verify your ZLib installation, as this should never happen", - (int)sz, (unsigned)crc, (int)stream.total_out); - } - } + if (stream.total_out != 242014 || crc != 0x8f03d934) + { + return svn_error_createf( + SVN_ERR_TEST_FAILED, NULL, + "Decompressed data doesn't match expected size or crc with " + "blocksize %u: Found crc32=0x%08lx, size=%lu.\n" + "Verify your ZLib installation, as this should never happen", + sz, crc, stream.total_out); + } } return SVN_NO_ERROR; } +static svn_error_t * +test_serialization(apr_pool_t *pool) +{ + svn_checksum_kind_t kind; + for (kind = svn_checksum_md5; kind <= svn_checksum_fnv1a_32x4; ++kind) + { + const svn_checksum_t *parsed_checksum; + svn_checksum_t *checksum = svn_checksum_empty_checksum(kind, pool); + const char *serialized = svn_checksum_serialize(checksum, pool, pool); + + SVN_ERR(svn_checksum_deserialize(&parsed_checksum, serialized, pool, + pool)); + + SVN_TEST_ASSERT(parsed_checksum->kind == kind); + SVN_TEST_ASSERT(svn_checksum_match(checksum, parsed_checksum)); + } + + return SVN_NO_ERROR; +} + /* An array of all test functions */ -struct svn_test_descriptor_t test_funcs[] = + +static int max_threads = 1; + +static struct svn_test_descriptor_t test_funcs[] = { SVN_TEST_NULL, SVN_TEST_PASS2(test_checksum_parse, "checksum parse"), SVN_TEST_PASS2(test_checksum_empty, "checksum emptiness"), - SVN_TEST_PASS2(test_pseudo_md5, - "pseudo-md5 compatibility"), SVN_TEST_PASS2(zero_match, "zero checksum matching"), SVN_TEST_OPTS_PASS(zlib_expansion_test, "zlib expansion test (zlib regression)"), + SVN_TEST_PASS2(zero_cross_match, + "zero checksum cross-type matching"), + SVN_TEST_PASS2(test_serialization, + "checksum (de-)serialization"), SVN_TEST_NULL }; + +SVN_TEST_MAIN diff --git a/subversion/tests/libsvn_subr/compat-test.c b/subversion/tests/libsvn_subr/compat-test.c index 9ff8099..8414847 100644 --- a/subversion/tests/libsvn_subr/compat-test.c +++ b/subversion/tests/libsvn_subr/compat-test.c @@ -209,7 +209,10 @@ test_version_at_least(apr_pool_t *pool) } /* An array of all test functions */ -struct svn_test_descriptor_t test_funcs[] = + +static int max_threads = 1; + +static struct svn_test_descriptor_t test_funcs[] = { SVN_TEST_NULL, SVN_TEST_PASS2(test_version_compatibility, @@ -220,3 +223,5 @@ struct svn_test_descriptor_t test_funcs[] = "svn_version__at_least"), SVN_TEST_NULL }; + +SVN_TEST_MAIN diff --git a/subversion/tests/libsvn_subr/config-test.c b/subversion/tests/libsvn_subr/config-test.c index 8938457..919893e 100644 --- a/subversion/tests/libsvn_subr/config-test.c +++ b/subversion/tests/libsvn_subr/config-test.c @@ -33,48 +33,14 @@ #include <apr_getopt.h> #include <apr_pools.h> +#include "svn_dirent_uri.h" #include "svn_error.h" #include "svn_config.h" +#include "private/svn_subr_private.h" #include "../svn_test.h" -/* Initialize parameters for the tests. */ -extern int test_argc; -extern const char **test_argv; - -static const apr_getopt_option_t opt_def[] = - { - {"srcdir", 'S', 1, "the source directory for VPATH test runs"}, - {0, 0, 0, 0} - }; -static const char *srcdir = NULL; - -static svn_error_t *init_params(apr_pool_t *pool) -{ - apr_getopt_t *opt; - int optch; - const char *opt_arg; - apr_status_t status; - - apr_getopt_init(&opt, pool, test_argc, test_argv); - while (!(status = apr_getopt_long(opt, opt_def, &optch, &opt_arg))) - { - switch (optch) - { - case 'S': - srcdir = opt_arg; - break; - } - } - - if (!srcdir) - return svn_error_create(SVN_ERR_TEST_FAILED, 0, - "missing required parameter '--srcdir'"); - - return SVN_NO_ERROR; -} - /* A quick way to create error messages. */ static svn_error_t * fail(apr_pool_t *pool, const char *fmt, ...) @@ -89,6 +55,18 @@ fail(apr_pool_t *pool, const char *fmt, ...) return svn_error_create(SVN_ERR_TEST_FAILED, SVN_NO_ERROR, msg); } +static svn_error_t * +get_config_file_path(const char **cfg_file, + const svn_test_opts_t *opts, + apr_pool_t *pool) +{ + const char *srcdir; + + SVN_ERR(svn_test_get_srcdir(&srcdir, opts, pool)); + *cfg_file = svn_dirent_join(srcdir, "config-test.cfg", pool); + + return SVN_NO_ERROR; +} static const char *config_keys[] = { "foo", "a", "b", "c", "d", "e", "f", "g", "h", "i", NULL }; @@ -99,16 +77,14 @@ static const char *config_values[] = { "bar", "Aa", "100", "bar", "Aa 100", NULL }; static svn_error_t * -test_text_retrieval(apr_pool_t *pool) +test_text_retrieval(const svn_test_opts_t *opts, + apr_pool_t *pool) { svn_config_t *cfg; int i; const char *cfg_file; - if (!srcdir) - SVN_ERR(init_params(pool)); - - cfg_file = apr_pstrcat(pool, srcdir, "/", "config-test.cfg", (char *)NULL); + SVN_ERR(get_config_file_path(&cfg_file, opts, pool)); SVN_ERR(svn_config_read3(&cfg, cfg_file, TRUE, FALSE, FALSE, pool)); /* Test values retrieved from our ConfigParser instance against @@ -150,16 +126,14 @@ static const char *false_keys[] = {"false1", "false2", "false3", "false4", NULL}; static svn_error_t * -test_boolean_retrieval(apr_pool_t *pool) +test_boolean_retrieval(const svn_test_opts_t *opts, + apr_pool_t *pool) { svn_config_t *cfg; int i; const char *cfg_file; - if (!srcdir) - SVN_ERR(init_params(pool)); - - cfg_file = apr_pstrcat(pool, srcdir, "/", "config-test.cfg", (char *)NULL); + SVN_ERR(get_config_file_path(&cfg_file, opts, pool)); SVN_ERR(svn_config_read3(&cfg, cfg_file, TRUE, FALSE, FALSE, pool)); for (i = 0; true_keys[i] != NULL; i++) @@ -211,15 +185,13 @@ test_boolean_retrieval(apr_pool_t *pool) } static svn_error_t * -test_has_section_case_insensitive(apr_pool_t *pool) +test_has_section_case_insensitive(const svn_test_opts_t *opts, + apr_pool_t *pool) { svn_config_t *cfg; const char *cfg_file; - if (!srcdir) - SVN_ERR(init_params(pool)); - - cfg_file = apr_pstrcat(pool, srcdir, "/", "config-test.cfg", (char *)NULL); + SVN_ERR(get_config_file_path(&cfg_file, opts, pool)); SVN_ERR(svn_config_read3(&cfg, cfg_file, TRUE, FALSE, FALSE, pool)); if (! svn_config_has_section(cfg, "section1")) @@ -241,15 +213,13 @@ test_has_section_case_insensitive(apr_pool_t *pool) } static svn_error_t * -test_has_section_case_sensitive(apr_pool_t *pool) +test_has_section_case_sensitive(const svn_test_opts_t *opts, + apr_pool_t *pool) { svn_config_t *cfg; const char *cfg_file; - if (!srcdir) - SVN_ERR(init_params(pool)); - - cfg_file = apr_pstrcat(pool, srcdir, "/", "config-test.cfg", (char *)NULL); + SVN_ERR(get_config_file_path(&cfg_file, opts, pool)); SVN_ERR(svn_config_read3(&cfg, cfg_file, TRUE, TRUE, FALSE, pool)); if (! svn_config_has_section(cfg, "section1")) @@ -271,7 +241,8 @@ test_has_section_case_sensitive(apr_pool_t *pool) } static svn_error_t * -test_has_option_case_sensitive(apr_pool_t *pool) +test_has_option_case_sensitive(const svn_test_opts_t *opts, + apr_pool_t *pool) { svn_config_t *cfg; const char *cfg_file; @@ -289,10 +260,7 @@ test_has_option_case_sensitive(apr_pool_t *pool) }; static const int test_data_size = sizeof(test_data)/sizeof(*test_data); - if (!srcdir) - SVN_ERR(init_params(pool)); - - cfg_file = apr_pstrcat(pool, srcdir, "/", "config-test.cfg", (char *)NULL); + SVN_ERR(get_config_file_path(&cfg_file, opts, pool)); SVN_ERR(svn_config_read3(&cfg, cfg_file, TRUE, TRUE, TRUE, pool)); for (i = 0; i < test_data_size; ++i) @@ -313,16 +281,14 @@ test_has_option_case_sensitive(apr_pool_t *pool) } static svn_error_t * -test_stream_interface(apr_pool_t *pool) +test_stream_interface(const svn_test_opts_t *opts, + apr_pool_t *pool) { svn_config_t *cfg; const char *cfg_file; svn_stream_t *stream; - if (!srcdir) - SVN_ERR(init_params(pool)); - - cfg_file = apr_pstrcat(pool, srcdir, "/", "config-test.cfg", (char *)NULL); + SVN_ERR(get_config_file_path(&cfg_file, opts, pool)); SVN_ERR(svn_stream_open_readonly(&stream, cfg_file, pool, pool)); SVN_ERR(svn_config_parse(&cfg, stream, TRUE, TRUE, pool)); @@ -353,16 +319,42 @@ test_ignore_bom(apr_pool_t *pool) } static svn_error_t * +test_read_only_mode(const svn_test_opts_t *opts, + apr_pool_t *pool) +{ + svn_config_t *cfg; + svn_config_t *cfg2; + const char *cfg_file; + + SVN_ERR(get_config_file_path(&cfg_file, opts, pool)); + SVN_ERR(svn_config_read3(&cfg, cfg_file, TRUE, TRUE, FALSE, pool)); + + /* setting CFG to r/o mode shall toggle the r/o mode and expand values */ + + SVN_TEST_ASSERT(!svn_config__is_read_only(cfg)); + SVN_TEST_ASSERT(!svn_config__is_expanded(cfg, "section1", "i")); + + svn_config__set_read_only(cfg, pool); + + SVN_TEST_ASSERT(svn_config__is_read_only(cfg)); + SVN_TEST_ASSERT(svn_config__is_expanded(cfg, "section1", "i")); + + /* copies should be r/w with values */ + + SVN_ERR(svn_config_dup(&cfg2, cfg, pool)); + SVN_TEST_ASSERT(!svn_config__is_read_only(cfg2)); + + return SVN_NO_ERROR; +} + +static svn_error_t * test_expand(const svn_test_opts_t *opts, apr_pool_t *pool) { svn_config_t *cfg; const char *cfg_file, *val; - if (!srcdir) - SVN_ERR(init_params(pool)); - - cfg_file = apr_pstrcat(pool, srcdir, "/", "config-test.cfg", (char *)NULL); + SVN_ERR(get_config_file_path(&cfg_file, opts, pool)); SVN_ERR(svn_config_read3(&cfg, cfg_file, TRUE, TRUE, FALSE, pool)); /* Get expanded "g" which requires expanding "c". */ @@ -371,13 +363,34 @@ test_expand(const svn_test_opts_t *opts, /* Get expanded "c". */ svn_config_get(cfg, &val, "section1", "c", NULL); - /* With pool debugging enabled this ensures that the expanded value + /* With pool debugging enabled this ensures that the expanded value of "c" was not created in a temporary pool when expanding "g". */ SVN_TEST_STRING_ASSERT(val, "bar"); return SVN_NO_ERROR; } +static svn_error_t * +test_invalid_bom(apr_pool_t *pool) +{ + svn_config_t *cfg; + svn_error_t *err; + svn_string_t *cfg_string; + svn_stream_t *stream; + + cfg_string = svn_string_create("\xEF", pool); + stream = svn_stream_from_string(cfg_string, pool); + err = svn_config_parse(&cfg, stream, TRUE, TRUE, pool); + SVN_TEST_ASSERT_ERROR(err, SVN_ERR_MALFORMED_FILE); + + cfg_string = svn_string_create("\xEF\xBB", pool); + stream = svn_stream_from_string(cfg_string, pool); + err = svn_config_parse(&cfg, stream, TRUE, TRUE, pool); + SVN_TEST_ASSERT_ERROR(err, SVN_ERR_MALFORMED_FILE); + + return SVN_NO_ERROR; +} + /* ==================================================================== If you add a new test to this file, update this array. @@ -386,23 +399,33 @@ test_expand(const svn_test_opts_t *opts, */ /* An array of all test functions */ -struct svn_test_descriptor_t test_funcs[] = + +static int max_threads = 1; + +static struct svn_test_descriptor_t test_funcs[] = { SVN_TEST_NULL, - SVN_TEST_PASS2(test_text_retrieval, - "test svn_config"), - SVN_TEST_PASS2(test_boolean_retrieval, - "test svn_config boolean conversion"), - SVN_TEST_PASS2(test_has_section_case_insensitive, - "test svn_config_has_section (case insensitive)"), - SVN_TEST_PASS2(test_has_section_case_sensitive, - "test svn_config_has_section (case sensitive)"), - SVN_TEST_PASS2(test_has_option_case_sensitive, - "test case-sensitive option name lookup"), - SVN_TEST_PASS2(test_stream_interface, - "test svn_config_parse"), - SVN_TEST_PASS2(test_ignore_bom, "test parsing config file with BOM"), + SVN_TEST_OPTS_PASS(test_text_retrieval, + "test svn_config"), + SVN_TEST_OPTS_PASS(test_boolean_retrieval, + "test svn_config boolean conversion"), + SVN_TEST_OPTS_PASS(test_has_section_case_insensitive, + "test svn_config_has_section (case insensitive)"), + SVN_TEST_OPTS_PASS(test_has_section_case_sensitive, + "test svn_config_has_section (case sensitive)"), + SVN_TEST_OPTS_PASS(test_has_option_case_sensitive, + "test case-sensitive option name lookup"), + SVN_TEST_OPTS_PASS(test_stream_interface, + "test svn_config_parse"), + SVN_TEST_PASS2(test_ignore_bom, + "test parsing config file with BOM"), + SVN_TEST_OPTS_PASS(test_read_only_mode, + "test r/o mode"), SVN_TEST_OPTS_PASS(test_expand, "test variable expansion"), + SVN_TEST_PASS2(test_invalid_bom, + "test parsing config file with invalid BOM"), SVN_TEST_NULL }; + +SVN_TEST_MAIN diff --git a/subversion/tests/libsvn_subr/crypto-test.c b/subversion/tests/libsvn_subr/crypto-test.c index 0c52804..91fd6c7 100644 --- a/subversion/tests/libsvn_subr/crypto-test.c +++ b/subversion/tests/libsvn_subr/crypto-test.c @@ -177,7 +177,9 @@ test_passphrase_check(apr_pool_t *pool) /* The test table. */ -struct svn_test_descriptor_t test_funcs[] = +static int max_threads = -1; + +static struct svn_test_descriptor_t test_funcs[] = { SVN_TEST_NULL, SVN_TEST_PASS2(test_encrypt_decrypt_password, @@ -186,3 +188,5 @@ struct svn_test_descriptor_t test_funcs[] = "password checktext generation/validation"), SVN_TEST_NULL }; + +SVN_TEST_MAIN diff --git a/subversion/tests/libsvn_subr/dirent_uri-test.c b/subversion/tests/libsvn_subr/dirent_uri-test.c index 992d288..61d9e24 100644 --- a/subversion/tests/libsvn_subr/dirent_uri-test.c +++ b/subversion/tests/libsvn_subr/dirent_uri-test.c @@ -270,7 +270,7 @@ test_dirent_join(apr_pool_t *pool) "\"%s\". expected \"%s\"", base, comp, result, expect); - result = svn_dirent_join_many(pool, base, comp, NULL); + result = svn_dirent_join_many(pool, base, comp, SVN_VA_NULL); if (strcmp(result, expect)) return svn_error_createf(SVN_ERR_TEST_FAILED, NULL, "svn_dirent_join_many(\"%s\", \"%s\") returned " @@ -286,72 +286,72 @@ test_dirent_join(apr_pool_t *pool) "expected \"%s\"", \ result, expect); - TEST_MANY((pool, "abc", NULL), "abc"); - TEST_MANY((pool, "/abc", NULL), "/abc"); - TEST_MANY((pool, "/", NULL), "/"); - - TEST_MANY((pool, "abc", "def", "ghi", NULL), "abc/def/ghi"); - TEST_MANY((pool, "abc", "/def", "ghi", NULL), "/def/ghi"); - TEST_MANY((pool, "/abc", "def", "ghi", NULL), "/abc/def/ghi"); - TEST_MANY((pool, "abc", "def", "/ghi", NULL), "/ghi"); - TEST_MANY((pool, "/", "def", "/ghi", NULL), "/ghi"); - TEST_MANY((pool, "/", "/def", "/ghi", NULL), "/ghi"); - - TEST_MANY((pool, SVN_EMPTY_PATH, "def", "ghi", NULL), "def/ghi"); - TEST_MANY((pool, "abc", SVN_EMPTY_PATH, "ghi", NULL), "abc/ghi"); - TEST_MANY((pool, "abc", "def", SVN_EMPTY_PATH, NULL), "abc/def"); - TEST_MANY((pool, SVN_EMPTY_PATH, "def", SVN_EMPTY_PATH, NULL), "def"); - TEST_MANY((pool, SVN_EMPTY_PATH, SVN_EMPTY_PATH, "ghi", NULL), "ghi"); - TEST_MANY((pool, "abc", SVN_EMPTY_PATH, SVN_EMPTY_PATH, NULL), "abc"); - TEST_MANY((pool, SVN_EMPTY_PATH, "def", "/ghi", NULL), "/ghi"); - TEST_MANY((pool, SVN_EMPTY_PATH, SVN_EMPTY_PATH, "/ghi", NULL), "/ghi"); - - TEST_MANY((pool, "/", "def", "ghi", NULL), "/def/ghi"); - TEST_MANY((pool, "abc", "/", "ghi", NULL), "/ghi"); - TEST_MANY((pool, "abc", "def", "/", NULL), "/"); - TEST_MANY((pool, "/", "/", "ghi", NULL), "/ghi"); - TEST_MANY((pool, "/", "/", "/", NULL), "/"); - TEST_MANY((pool, "/", SVN_EMPTY_PATH, "ghi", NULL), "/ghi"); - TEST_MANY((pool, "/", "def", SVN_EMPTY_PATH, NULL), "/def"); - TEST_MANY((pool, SVN_EMPTY_PATH, "/", "ghi", NULL), "/ghi"); - TEST_MANY((pool, "/", SVN_EMPTY_PATH, SVN_EMPTY_PATH, NULL), "/"); - TEST_MANY((pool, SVN_EMPTY_PATH, "/", SVN_EMPTY_PATH, NULL), "/"); - TEST_MANY((pool, SVN_EMPTY_PATH, SVN_EMPTY_PATH, "/", NULL), "/"); + TEST_MANY((pool, "abc", SVN_VA_NULL), "abc"); + TEST_MANY((pool, "/abc", SVN_VA_NULL), "/abc"); + TEST_MANY((pool, "/", SVN_VA_NULL), "/"); + + TEST_MANY((pool, "abc", "def", "ghi", SVN_VA_NULL), "abc/def/ghi"); + TEST_MANY((pool, "abc", "/def", "ghi", SVN_VA_NULL), "/def/ghi"); + TEST_MANY((pool, "/abc", "def", "ghi", SVN_VA_NULL), "/abc/def/ghi"); + TEST_MANY((pool, "abc", "def", "/ghi", SVN_VA_NULL), "/ghi"); + TEST_MANY((pool, "/", "def", "/ghi", SVN_VA_NULL), "/ghi"); + TEST_MANY((pool, "/", "/def", "/ghi", SVN_VA_NULL), "/ghi"); + + TEST_MANY((pool, SVN_EMPTY_PATH, "def", "ghi", SVN_VA_NULL), "def/ghi"); + TEST_MANY((pool, "abc", SVN_EMPTY_PATH, "ghi", SVN_VA_NULL), "abc/ghi"); + TEST_MANY((pool, "abc", "def", SVN_EMPTY_PATH, SVN_VA_NULL), "abc/def"); + TEST_MANY((pool, SVN_EMPTY_PATH, "def", SVN_EMPTY_PATH, SVN_VA_NULL), "def"); + TEST_MANY((pool, SVN_EMPTY_PATH, SVN_EMPTY_PATH, "ghi", SVN_VA_NULL), "ghi"); + TEST_MANY((pool, "abc", SVN_EMPTY_PATH, SVN_EMPTY_PATH, SVN_VA_NULL), "abc"); + TEST_MANY((pool, SVN_EMPTY_PATH, "def", "/ghi", SVN_VA_NULL), "/ghi"); + TEST_MANY((pool, SVN_EMPTY_PATH, SVN_EMPTY_PATH, "/ghi", SVN_VA_NULL), "/ghi"); + + TEST_MANY((pool, "/", "def", "ghi", SVN_VA_NULL), "/def/ghi"); + TEST_MANY((pool, "abc", "/", "ghi", SVN_VA_NULL), "/ghi"); + TEST_MANY((pool, "abc", "def", "/", SVN_VA_NULL), "/"); + TEST_MANY((pool, "/", "/", "ghi", SVN_VA_NULL), "/ghi"); + TEST_MANY((pool, "/", "/", "/", SVN_VA_NULL), "/"); + TEST_MANY((pool, "/", SVN_EMPTY_PATH, "ghi", SVN_VA_NULL), "/ghi"); + TEST_MANY((pool, "/", "def", SVN_EMPTY_PATH, SVN_VA_NULL), "/def"); + TEST_MANY((pool, SVN_EMPTY_PATH, "/", "ghi", SVN_VA_NULL), "/ghi"); + TEST_MANY((pool, "/", SVN_EMPTY_PATH, SVN_EMPTY_PATH, SVN_VA_NULL), "/"); + TEST_MANY((pool, SVN_EMPTY_PATH, "/", SVN_EMPTY_PATH, SVN_VA_NULL), "/"); + TEST_MANY((pool, SVN_EMPTY_PATH, SVN_EMPTY_PATH, "/", SVN_VA_NULL), "/"); #ifdef SVN_USE_DOS_PATHS - TEST_MANY((pool, "X:/", "def", "ghi", NULL), "X:/def/ghi"); - TEST_MANY((pool, "abc", "X:/", "ghi", NULL), "X:/ghi"); - TEST_MANY((pool, "abc", "def", "X:/", NULL), "X:/"); - TEST_MANY((pool, "X:/", "X:/", "ghi", NULL), "X:/ghi"); - TEST_MANY((pool, "X:/", "X:/", "/", NULL), "/"); - TEST_MANY((pool, "X:/", SVN_EMPTY_PATH, "ghi", NULL), "X:/ghi"); - TEST_MANY((pool, "X:/", "def", SVN_EMPTY_PATH, NULL), "X:/def"); - TEST_MANY((pool, SVN_EMPTY_PATH, "X:/", "ghi", NULL), "X:/ghi"); - TEST_MANY((pool, "X:/", SVN_EMPTY_PATH, SVN_EMPTY_PATH, NULL), "X:/"); - TEST_MANY((pool, SVN_EMPTY_PATH, "X:/", SVN_EMPTY_PATH, NULL), "X:/"); - TEST_MANY((pool, SVN_EMPTY_PATH, SVN_EMPTY_PATH, "X:/", NULL), "X:/"); - - TEST_MANY((pool, "X:", "def", "ghi", NULL), "X:def/ghi"); - TEST_MANY((pool, "X:", "X:/", "ghi", NULL), "X:/ghi"); - TEST_MANY((pool, "X:", "X:/", "/", NULL), "/"); - TEST_MANY((pool, "X:", SVN_EMPTY_PATH, "ghi", NULL), "X:ghi"); - TEST_MANY((pool, "X:", "def", SVN_EMPTY_PATH, NULL), "X:def"); - TEST_MANY((pool, SVN_EMPTY_PATH, "X:", "ghi", NULL), "X:ghi"); - TEST_MANY((pool, "//srv/shr", "def", "ghi", NULL), "//srv/shr/def/ghi"); - TEST_MANY((pool, "//srv/shr/fld", "def", "ghi", NULL), "//srv/shr/fld/def/ghi"); - TEST_MANY((pool, "//srv/shr/fld", "def", "//srv/shr", NULL), "//srv/shr"); - TEST_MANY((pool, "//srv/s r/fld", "def", "//srv/s r", NULL), "//srv/s r"); - TEST_MANY((pool, SVN_EMPTY_PATH, "//srv/shr/fld", "def", "ghi", NULL), "//srv/shr/fld/def/ghi"); - TEST_MANY((pool, SVN_EMPTY_PATH, "//srv/shr/fld", "def", "//srv/shr", NULL), "//srv/shr"); - - TEST_MANY((pool, "abcd", "/dir", "A:", "file", NULL), "A:file"); - TEST_MANY((pool, "abcd", "A:", "/dir", "file", NULL), "A:/dir/file"); + TEST_MANY((pool, "X:/", "def", "ghi", SVN_VA_NULL), "X:/def/ghi"); + TEST_MANY((pool, "abc", "X:/", "ghi", SVN_VA_NULL), "X:/ghi"); + TEST_MANY((pool, "abc", "def", "X:/", SVN_VA_NULL), "X:/"); + TEST_MANY((pool, "X:/", "X:/", "ghi", SVN_VA_NULL), "X:/ghi"); + TEST_MANY((pool, "X:/", "X:/", "/", SVN_VA_NULL), "/"); + TEST_MANY((pool, "X:/", SVN_EMPTY_PATH, "ghi", SVN_VA_NULL), "X:/ghi"); + TEST_MANY((pool, "X:/", "def", SVN_EMPTY_PATH, SVN_VA_NULL), "X:/def"); + TEST_MANY((pool, SVN_EMPTY_PATH, "X:/", "ghi", SVN_VA_NULL), "X:/ghi"); + TEST_MANY((pool, "X:/", SVN_EMPTY_PATH, SVN_EMPTY_PATH, SVN_VA_NULL), "X:/"); + TEST_MANY((pool, SVN_EMPTY_PATH, "X:/", SVN_EMPTY_PATH, SVN_VA_NULL), "X:/"); + TEST_MANY((pool, SVN_EMPTY_PATH, SVN_EMPTY_PATH, "X:/", SVN_VA_NULL), "X:/"); + + TEST_MANY((pool, "X:", "def", "ghi", SVN_VA_NULL), "X:def/ghi"); + TEST_MANY((pool, "X:", "X:/", "ghi", SVN_VA_NULL), "X:/ghi"); + TEST_MANY((pool, "X:", "X:/", "/", SVN_VA_NULL), "/"); + TEST_MANY((pool, "X:", SVN_EMPTY_PATH, "ghi", SVN_VA_NULL), "X:ghi"); + TEST_MANY((pool, "X:", "def", SVN_EMPTY_PATH, SVN_VA_NULL), "X:def"); + TEST_MANY((pool, SVN_EMPTY_PATH, "X:", "ghi", SVN_VA_NULL), "X:ghi"); + TEST_MANY((pool, "//srv/shr", "def", "ghi", SVN_VA_NULL), "//srv/shr/def/ghi"); + TEST_MANY((pool, "//srv/shr/fld", "def", "ghi", SVN_VA_NULL), "//srv/shr/fld/def/ghi"); + TEST_MANY((pool, "//srv/shr/fld", "def", "//srv/shr", SVN_VA_NULL), "//srv/shr"); + TEST_MANY((pool, "//srv/s r/fld", "def", "//srv/s r", SVN_VA_NULL), "//srv/s r"); + TEST_MANY((pool, SVN_EMPTY_PATH, "//srv/shr/fld", "def", "ghi", SVN_VA_NULL), "//srv/shr/fld/def/ghi"); + TEST_MANY((pool, SVN_EMPTY_PATH, "//srv/shr/fld", "def", "//srv/shr", SVN_VA_NULL), "//srv/shr"); + + TEST_MANY((pool, "abcd", "/dir", "A:", "file", SVN_VA_NULL), "A:file"); + TEST_MANY((pool, "abcd", "A:", "/dir", "file", SVN_VA_NULL), "A:/dir/file"); #else /* !SVN_USE_DOS_PATHS */ - TEST_MANY((pool, "X:", "def", "ghi", NULL), "X:/def/ghi"); - TEST_MANY((pool, "X:", SVN_EMPTY_PATH, "ghi", NULL), "X:/ghi"); - TEST_MANY((pool, "X:", "def", SVN_EMPTY_PATH, NULL), "X:/def"); - TEST_MANY((pool, SVN_EMPTY_PATH, "X:", "ghi", NULL), "X:/ghi"); + TEST_MANY((pool, "X:", "def", "ghi", SVN_VA_NULL), "X:/def/ghi"); + TEST_MANY((pool, "X:", SVN_EMPTY_PATH, "ghi", SVN_VA_NULL), "X:/ghi"); + TEST_MANY((pool, "X:", "def", SVN_EMPTY_PATH, SVN_VA_NULL), "X:/def"); + TEST_MANY((pool, SVN_EMPTY_PATH, "X:", "ghi", SVN_VA_NULL), "X:/ghi"); #endif /* SVN_USE_DOS_PATHS */ /* ### probably need quite a few more tests... */ @@ -809,6 +809,9 @@ static const testcase_canonicalize_t uri_canonical_tests[] = { "http://hst/foo/../bar","http://hst/foo/../bar" }, { "http://hst/", "http://hst" }, { "http:///", "http://" }, + { "http:///example.com/", "http:///example.com" }, + { "http:////example.com/", "http:///example.com" }, + { "http://///////example.com/", "http:///example.com" }, { "https://", "https://" }, { "file:///", "file://" }, { "file://", "file://" }, @@ -1109,6 +1112,7 @@ test_relpath_is_canonical(apr_pool_t *pool) static const testcase_is_canonical_t tests[] = { { "", TRUE }, { ".", FALSE }, + { "..", TRUE }, { "/", FALSE }, { "/.", FALSE }, { "./", FALSE }, @@ -1937,13 +1941,13 @@ test_dirent_get_absolute(apr_pool_t *pool) expect_abs = expect; if (*expect == '%') - expect_abs = apr_pstrcat(pool, curdir, expect + 1, (char *)NULL); + expect_abs = apr_pstrcat(pool, curdir, expect + 1, SVN_VA_NULL); #ifdef SVN_USE_DOS_PATHS if (*expect == '@') - expect_abs = apr_pstrcat(pool, curdironc, expect + 1, NULL); + expect_abs = apr_pstrcat(pool, curdironc, expect + 1, SVN_VA_NULL); if (*expect == '$') - expect_abs = apr_pstrcat(pool, curdrive, expect + 1, NULL); + expect_abs = apr_pstrcat(pool, curdrive, expect + 1, SVN_VA_NULL); /* Remove double '/' when CWD was the root dir (E.g. C:/) */ expect_abs = svn_dirent_canonicalize(expect_abs, pool); @@ -1987,8 +1991,8 @@ test_dirent_get_absolute_from_lc_drive(apr_pool_t *pool) for (hi = apr_hash_first(pool, dirents); hi; hi = apr_hash_next(hi)) { - const char *dir = svn__apr_hash_index_key(hi); - svn_io_dirent2_t *de = svn__apr_hash_index_val(hi); + const char *dir = apr_hash_this_key(hi); + svn_io_dirent2_t *de = apr_hash_this_val(hi); if (de->kind == svn_node_dir && strcmp(dir, current_dir_on_C)) @@ -2857,7 +2861,9 @@ test_rule3(apr_pool_t *pool) /* The test table. */ -struct svn_test_descriptor_t test_funcs[] = +static int max_threads = 1; + +static struct svn_test_descriptor_t test_funcs[] = { SVN_TEST_NULL, SVN_TEST_PASS2(test_dirent_is_root, @@ -2958,3 +2964,5 @@ struct svn_test_descriptor_t test_funcs[] = "test match with RFC 6125 s. 6.4.3 Rule 3"), SVN_TEST_NULL }; + +SVN_TEST_MAIN diff --git a/subversion/tests/libsvn_subr/error-code-test.c b/subversion/tests/libsvn_subr/error-code-test.c index e996616..8dada36 100644 --- a/subversion/tests/libsvn_subr/error-code-test.c +++ b/subversion/tests/libsvn_subr/error-code-test.c @@ -74,10 +74,14 @@ check_error_codes_unique(apr_pool_t *pool) /* The test table. */ -struct svn_test_descriptor_t test_funcs[] = +static int max_threads = 1; + +static struct svn_test_descriptor_t test_funcs[] = { SVN_TEST_NULL, SVN_TEST_PASS2(check_error_codes_unique, "check that error codes are unique"), SVN_TEST_NULL }; + +SVN_TEST_MAIN diff --git a/subversion/tests/libsvn_subr/error-test.c b/subversion/tests/libsvn_subr/error-test.c index 18dacde..ea3291b 100644 --- a/subversion/tests/libsvn_subr/error-test.c +++ b/subversion/tests/libsvn_subr/error-test.c @@ -205,8 +205,12 @@ test_error_symbolic_name(apr_pool_t *pool) { SVN_ERR_WC_NOT_WORKING_COPY, "SVN_ERR_WC_NOT_WORKING_COPY" }, /* Test an implementation detail. */ { SVN_ERR_BAD_CATEGORY_START, "SVN_ERR_BAD_CONTAINING_POOL" }, +#ifdef SVN_DEBUG + { ENOENT, "ENOENT" }, + { APR_ENOPOOL, "APR_ENOPOOL" }, +#endif /* Test non-errors. */ - { 1, NULL }, + { -1, NULL }, { SVN_ERR_WC_CATEGORY_START - 1, NULL }, /* Whitebox-test exceptional cases. */ { SVN_WARNING, "SVN_WARNING" }, @@ -225,7 +229,9 @@ test_error_symbolic_name(apr_pool_t *pool) /* The test table. */ -struct svn_test_descriptor_t test_funcs[] = +static int max_threads = 1; + +static struct svn_test_descriptor_t test_funcs[] = { SVN_TEST_NULL, SVN_TEST_PASS2(test_error_root_cause, @@ -236,3 +242,5 @@ struct svn_test_descriptor_t test_funcs[] = "test svn_error_symbolic_name"), SVN_TEST_NULL }; + +SVN_TEST_MAIN diff --git a/subversion/tests/libsvn_subr/hashdump-test.c b/subversion/tests/libsvn_subr/hashdump-test.c index a80b1c1..57a8535 100644 --- a/subversion/tests/libsvn_subr/hashdump-test.c +++ b/subversion/tests/libsvn_subr/hashdump-test.c @@ -38,9 +38,9 @@ /* Our own global variables */ -apr_hash_t *proplist, *new_proplist; +static apr_hash_t *proplist, *new_proplist; -const char *review = +static const char *review = "A forthright entrance, yet coquettish on the tongue, its deceptively\n" "fruity exterior hides the warm mahagony undercurrent that is the\n" "hallmark of Chateau Fraisant-Pitre. Connoisseurs of the region will\n" @@ -177,7 +177,10 @@ test3(apr_pool_t *pool) */ /* An array of all test functions */ -struct svn_test_descriptor_t test_funcs[] = + +static int max_threads = 1; + +static struct svn_test_descriptor_t test_funcs[] = { SVN_TEST_NULL, SVN_TEST_PASS2(test1, @@ -188,3 +191,5 @@ struct svn_test_descriptor_t test_funcs[] = "write hash out, read back in, compare"), SVN_TEST_NULL }; + +SVN_TEST_MAIN diff --git a/subversion/tests/libsvn_subr/io-test.c b/subversion/tests/libsvn_subr/io-test.c index 82e8630..e9de6fb 100644 --- a/subversion/tests/libsvn_subr/io-test.c +++ b/subversion/tests/libsvn_subr/io-test.c @@ -26,10 +26,14 @@ #include <stdio.h> #include <apr.h> +#include <apr_version.h> #include "svn_pools.h" #include "svn_string.h" +#include "svn_io.h" #include "private/svn_skel.h" +#include "private/svn_dep_compat.h" +#include "private/svn_io_private.h" #include "../svn_test.h" #include "../svn_test_fs.h" @@ -37,7 +41,7 @@ /* Helpers to create the test data directory. */ -#define TEST_DIR "io-test-temp" +#define TEST_DIR_PREFIX "io-test-temp" /* The definition for the test data files. */ struct test_file_definition_t @@ -64,7 +68,7 @@ struct test_file_definition_t char* created_path; }; -struct test_file_definition_t test_file_definitions[] = +static struct test_file_definition_t test_file_definitions_template[] = { {"empty", "", 0}, {"single_a", "a", 1}, @@ -119,6 +123,7 @@ struct test_file_definition_t test_file_definitions[] = static svn_error_t * create_test_file(struct test_file_definition_t* definition, + const char *testname, apr_pool_t *pool, apr_pool_t *scratch_pool) { @@ -127,6 +132,7 @@ create_test_file(struct test_file_definition_t* definition, apr_off_t midpos = definition->size / 2; svn_error_t *err = NULL; int i; + const char *test_dir = apr_pstrcat(pool, TEST_DIR_PREFIX, testname, NULL); if (definition->size < 5) SVN_ERR_ASSERT(strlen(definition->data) >= (apr_size_t)definition->size); @@ -134,9 +140,9 @@ create_test_file(struct test_file_definition_t* definition, SVN_ERR_ASSERT(strlen(definition->data) >= 5); - definition->created_path = svn_dirent_join(TEST_DIR, - definition->name, - pool); + definition->created_path = svn_dirent_join(test_dir, + definition->name, + pool); SVN_ERR(svn_io_file_open(&file_h, definition->created_path, @@ -174,37 +180,47 @@ create_test_file(struct test_file_definition_t* definition, /* Function to prepare the whole set of on-disk files to be compared. */ static svn_error_t * -create_comparison_candidates(apr_pool_t *scratch_pool) +create_comparison_candidates(struct test_file_definition_t **definitions, + const char *testname, + apr_pool_t *pool) { svn_node_kind_t kind; - apr_pool_t *iterpool = svn_pool_create(scratch_pool); + apr_pool_t *iterpool = svn_pool_create(pool); struct test_file_definition_t *candidate; svn_error_t *err = SVN_NO_ERROR; + apr_size_t count = 0; + const char *test_dir = apr_pstrcat(pool, TEST_DIR_PREFIX, + testname, NULL); /* If there's already a directory named io-test-temp, delete it. Doing things this way means that repositories stick around after a failure for postmortem analysis, but also that tests can be re-run without cleaning out the repositories created by prior runs. */ - SVN_ERR(svn_io_check_path(TEST_DIR, &kind, scratch_pool)); + SVN_ERR(svn_io_check_path(test_dir, &kind, pool)); if (kind == svn_node_dir) - SVN_ERR(svn_io_remove_dir2(TEST_DIR, TRUE, NULL, NULL, scratch_pool)); + SVN_ERR(svn_io_remove_dir2(test_dir, TRUE, NULL, NULL, pool)); else if (kind != svn_node_none) return svn_error_createf(SVN_ERR_TEST_FAILED, NULL, "There is already a file named '%s'", - TEST_DIR); + test_dir); - SVN_ERR(svn_io_dir_make(TEST_DIR, APR_OS_DEFAULT, scratch_pool)); + SVN_ERR(svn_io_dir_make(test_dir, APR_OS_DEFAULT, pool)); - svn_test_add_dir_cleanup(TEST_DIR); + svn_test_add_dir_cleanup(test_dir); - for (candidate = test_file_definitions; + for (candidate = test_file_definitions_template; candidate->name != NULL; candidate += 1) + count++; + + *definitions = apr_pmemdup(pool, test_file_definitions_template, + (count + 1) * sizeof(**definitions)); + for (candidate = *definitions; candidate->name != NULL; candidate += 1) { svn_pool_clear(iterpool); - err = create_test_file(candidate, scratch_pool, iterpool); + err = create_test_file(candidate, testname, pool, iterpool); if (err) break; } @@ -227,8 +243,11 @@ test_two_file_size_comparison(apr_pool_t *scratch_pool) svn_error_t *err = SVN_NO_ERROR; svn_error_t *cmp_err; apr_pool_t *iterpool = svn_pool_create(scratch_pool); + struct test_file_definition_t *test_file_definitions; - SVN_ERR(create_comparison_candidates(scratch_pool)); + SVN_ERR(create_comparison_candidates(&test_file_definitions, + "test_two_file_size_comparison", + scratch_pool)); for (outer = test_file_definitions; outer->name != NULL; outer += 1) { @@ -278,8 +297,11 @@ test_two_file_content_comparison(apr_pool_t *scratch_pool) svn_error_t *err = SVN_NO_ERROR; svn_error_t *cmp_err; apr_pool_t *iterpool = svn_pool_create(scratch_pool); + struct test_file_definition_t *test_file_definitions; - SVN_ERR(create_comparison_candidates(scratch_pool)); + SVN_ERR(create_comparison_candidates(&test_file_definitions, + "test_two_file_content_comparison", + scratch_pool)); for (outer = test_file_definitions; outer->name != NULL; outer += 1) { @@ -331,8 +353,11 @@ test_three_file_size_comparison(apr_pool_t *scratch_pool) svn_error_t *err = SVN_NO_ERROR; svn_error_t *cmp_err; apr_pool_t *iterpool = svn_pool_create(scratch_pool); + struct test_file_definition_t *test_file_definitions; - SVN_ERR(create_comparison_candidates(scratch_pool)); + SVN_ERR(create_comparison_candidates(&test_file_definitions, + "test_three_file_size_comparison", + scratch_pool)); for (outer = test_file_definitions; outer->name != NULL; outer += 1) { @@ -411,8 +436,11 @@ test_three_file_content_comparison(apr_pool_t *scratch_pool) svn_error_t *err = SVN_NO_ERROR; svn_error_t *cmp_err; apr_pool_t *iterpool = svn_pool_create(scratch_pool); + struct test_file_definition_t *test_file_definitions; - SVN_ERR(create_comparison_candidates(scratch_pool)); + SVN_ERR(create_comparison_candidates(&test_file_definitions, + "test_three_file_content_comparison", + scratch_pool)); for (outer = test_file_definitions; outer->name != NULL; outer += 1) { @@ -507,10 +535,304 @@ read_length_line_shouldnt_loop(apr_pool_t *pool) return SVN_NO_ERROR; } +/* Move the read pointer in FILE to absolute position OFFSET and align + * the read buffer to multiples of BLOCK_SIZE. BUFFERED is set only if + * FILE actually uses a read buffer. Use POOL for allocations. + */ +static svn_error_t * +aligned_seek(apr_file_t *file, + apr_size_t block_size, + apr_size_t offset, + svn_boolean_t buffered, + apr_pool_t *pool) +{ + apr_off_t block_start; + apr_off_t current; + + SVN_ERR(svn_io_file_aligned_seek(file, (apr_off_t)block_size, + &block_start, (apr_off_t)offset, pool)); + + /* block start shall be aligned to multiples of block_size. + If it isn't, it must be aligned to APR's default block size(pre-1.3 APR) + */ + if (buffered) + { + SVN_TEST_ASSERT(block_start % block_size == 0); + SVN_TEST_ASSERT(offset - block_start < block_size); + } + + /* we must be at the desired offset */ + current = 0; + SVN_ERR(svn_io_file_seek(file, APR_CUR, ¤t, pool)); + SVN_TEST_ASSERT(current == (apr_off_t)offset); + + return SVN_NO_ERROR; +} + +/* Move the read pointer in FILE to absolute position OFFSET, align the + * read buffer to multiples of BLOCK_SIZE and read one byte from that + * position. Verify that it matches the CONTENTS for that offset. + * BUFFERED is set only if FILE actually uses a read buffer. + * Use POOL for allocations. + */ +static svn_error_t * +aligned_read_at(apr_file_t *file, + svn_stringbuf_t *contents, + apr_size_t block_size, + apr_size_t offset, + svn_boolean_t buffered, + apr_pool_t *pool) +{ + char c; + SVN_ERR(aligned_seek(file, block_size, offset, buffered, pool)); + + /* the data we read must match whatever we wrote there */ + SVN_ERR(svn_io_file_getc(&c, file, pool)); + SVN_TEST_ASSERT(c == contents->data[offset]); + + return SVN_NO_ERROR; +} + +/* Verify that aligned seek with the given BLOCK_SIZE works for FILE. + * CONTENTS is the data expected from FILE. BUFFERED is set only if FILE + * actually uses a read buffer. Use POOL for allocations. + */ +static svn_error_t * +aligned_read(apr_file_t *file, + svn_stringbuf_t *contents, + apr_size_t block_size, + svn_boolean_t buffered, + apr_pool_t *pool) +{ + apr_size_t i; + apr_size_t offset = 0; + const apr_size_t prime = 78427; + + /* "random" access to different offsets */ + for (i = 0, offset = prime; i < 10; ++i, offset += prime) + SVN_ERR(aligned_read_at(file, contents, block_size, + offset % contents->len, buffered, pool)); + + /* we can seek to EOF */ + SVN_ERR(aligned_seek(file, contents->len, block_size, buffered, pool)); + + /* reversed order access to all bytes */ + for (i = contents->len; i > 0; --i) + SVN_ERR(aligned_read_at(file, contents, block_size, i - 1, buffered, + pool)); + + /* forward order access to all bytes */ + for (i = 0; i < contents->len; ++i) + SVN_ERR(aligned_read_at(file, contents, block_size, i, buffered, pool)); + + return SVN_NO_ERROR; +} + +static svn_error_t * +aligned_seek_test(apr_pool_t *pool) +{ + apr_size_t i; + const char *tmp_dir; + const char *tmp_file; + apr_file_t *f; + svn_stringbuf_t *contents; + const apr_size_t file_size = 100000; + + /* create a temp folder & schedule it for automatic cleanup */ + + SVN_ERR(svn_dirent_get_absolute(&tmp_dir, "aligned_seek_tmp", pool)); + SVN_ERR(svn_io_remove_dir2(tmp_dir, TRUE, NULL, NULL, pool)); + SVN_ERR(svn_io_make_dir_recursively(tmp_dir, pool)); + svn_test_add_dir_cleanup(tmp_dir); + + /* create a temp file with know contents */ + + contents = svn_stringbuf_create_ensure(file_size, pool); + for (i = 0; i < file_size; ++i) + svn_stringbuf_appendbyte(contents, (char)rand()); + + SVN_ERR(svn_io_write_unique(&tmp_file, tmp_dir, contents->data, + contents->len, + svn_io_file_del_on_pool_cleanup, pool)); + + /* now, access read data with varying alignment sizes */ + SVN_ERR(svn_io_file_open(&f, tmp_file, APR_READ | APR_BUFFERED, + APR_OS_DEFAULT, pool)); + SVN_ERR(aligned_read(f, contents, 0x1000, TRUE, pool)); /* APR default */ + SVN_ERR(aligned_read(f, contents, 0x8000, TRUE, pool)); /* "unusual" 32K */ + SVN_ERR(aligned_read(f, contents, 0x10000, TRUE, pool)); /* FSX default */ + SVN_ERR(aligned_read(f, contents, 0x100000, TRUE, pool)); /* larger than file */ + SVN_ERR(aligned_read(f, contents, 10001, TRUE, pool)); /* odd, larger than + APR default */ + SVN_ERR(aligned_read(f, contents, 1003, TRUE, pool)); /* odd, smaller than + APR default */ + SVN_ERR(svn_io_file_close(f, pool)); + + /* now, try read data with buffering disabled. + That are a special case because APR reports a buffer size of 0. */ + SVN_ERR(svn_io_file_open(&f, tmp_file, APR_READ, APR_OS_DEFAULT, pool)); + SVN_ERR(aligned_read(f, contents, 0x1000, FALSE, pool)); + SVN_ERR(aligned_read(f, contents, 0x8000, FALSE, pool)); + SVN_ERR(aligned_read(f, contents, 0x10000, FALSE, pool)); + SVN_ERR(aligned_read(f, contents, 0x100000, FALSE, pool)); + SVN_ERR(aligned_read(f, contents, 10001, FALSE, pool)); + SVN_ERR(aligned_read(f, contents, 1003, FALSE, pool)); + SVN_ERR(svn_io_file_close(f, pool)); + + return SVN_NO_ERROR; +} + +static svn_error_t * +ignore_enoent(apr_pool_t *pool) +{ + const char *tmp_dir, *path; + const svn_io_dirent2_t *dirent_p; + apr_file_t *file; + + /* Create an empty directory. */ + SVN_ERR(svn_dirent_get_absolute(&tmp_dir, "ignore_enoent", pool)); + SVN_ERR(svn_io_remove_dir2(tmp_dir, TRUE, NULL, NULL, pool)); + SVN_ERR(svn_io_make_dir_recursively(tmp_dir, pool)); + svn_test_add_dir_cleanup(tmp_dir); + + /* Path does not exist. */ + path = svn_dirent_join(tmp_dir, "not-present", pool); + SVN_ERR(svn_io_remove_dir2(path, TRUE, NULL, NULL, pool)); + SVN_ERR(svn_io_remove_file2(path, TRUE, pool)); + SVN_ERR(svn_io_set_file_read_only(path, TRUE, pool)); + SVN_ERR(svn_io_set_file_read_write(path, TRUE, pool)); + SVN_ERR(svn_io_set_file_executable(path, TRUE, TRUE, pool)); + SVN_ERR(svn_io_set_file_executable(path, FALSE, TRUE, pool)); + SVN_ERR(svn_io_stat_dirent2(&dirent_p, path, TRUE, TRUE, pool, pool)); + SVN_ERR(svn_io_stat_dirent2(&dirent_p, path, FALSE, TRUE, pool, pool)); + + /* Neither path nor parent exists. */ + path = svn_dirent_join(path, "not-present", pool); + SVN_ERR(svn_io_remove_dir2(path, TRUE, NULL, NULL, pool)); + SVN_ERR(svn_io_remove_file2(path, TRUE, pool)); + SVN_ERR(svn_io_set_file_read_only(path, TRUE, pool)); + SVN_ERR(svn_io_set_file_read_write(path, TRUE, pool)); + SVN_ERR(svn_io_set_file_executable(path, TRUE, TRUE, pool)); + SVN_ERR(svn_io_set_file_executable(path, FALSE, TRUE, pool)); + SVN_ERR(svn_io_stat_dirent2(&dirent_p, path, TRUE, TRUE, pool, pool)); + SVN_ERR(svn_io_stat_dirent2(&dirent_p, path, FALSE, TRUE, pool, pool)); + + /* File does exist. */ + path = svn_dirent_join(tmp_dir, "present", pool); + SVN_ERR(svn_io_file_open(&file, path, + APR_WRITE | APR_CREATE | APR_TRUNCATE, + APR_OS_DEFAULT, + pool)); + SVN_ERR(svn_io_file_close(file, pool)); + + /* Path does not exist as child of file. */ + path = svn_dirent_join(path, "not-present", pool); + SVN_ERR(svn_io_remove_dir2(path, TRUE, NULL, NULL, pool)); + SVN_ERR(svn_io_remove_file2(path, TRUE, pool)); + SVN_ERR(svn_io_set_file_read_only(path, TRUE, pool)); + SVN_ERR(svn_io_set_file_read_write(path, TRUE, pool)); + SVN_ERR(svn_io_set_file_executable(path, TRUE, TRUE, pool)); + SVN_ERR(svn_io_set_file_executable(path, FALSE, TRUE, pool)); + SVN_ERR(svn_io_stat_dirent2(&dirent_p, path, TRUE, TRUE, pool, pool)); + SVN_ERR(svn_io_stat_dirent2(&dirent_p, path, FALSE, TRUE, pool, pool)); + + return SVN_NO_ERROR; +} + +static svn_error_t * +test_install_stream_to_longpath(apr_pool_t *pool) +{ + const char *tmp_dir; + const char *final_abspath; + const char *deep_dir; + svn_stream_t *stream; + svn_stringbuf_t *actual_content; + int i; + + /* Create an empty directory. */ + SVN_ERR(svn_dirent_get_absolute(&tmp_dir, "test_install_stream_to_longpath", + pool)); + SVN_ERR(svn_io_remove_dir2(tmp_dir, TRUE, NULL, NULL, pool)); + SVN_ERR(svn_io_make_dir_recursively(tmp_dir, pool)); + svn_test_add_dir_cleanup(tmp_dir); + + deep_dir = tmp_dir; + + /* Generate very long path (> 260 symbols) */ + for (i = 0; i < 26; i++) + { + deep_dir = svn_dirent_join(deep_dir, "1234567890", pool); + SVN_ERR(svn_io_make_dir_recursively(deep_dir, pool)); + } + + final_abspath = svn_dirent_join(deep_dir, "stream1", pool); + SVN_ERR(svn_stream__create_for_install(&stream, deep_dir, pool, pool)); + SVN_ERR(svn_stream_puts(stream, "stream1 content")); + SVN_ERR(svn_stream_close(stream)); + SVN_ERR(svn_stream__install_stream(stream, + final_abspath, + TRUE, + pool)); + + SVN_ERR(svn_stringbuf_from_file2(&actual_content, + final_abspath, + pool)); + + SVN_TEST_STRING_ASSERT(actual_content->data, "stream1 content"); + + return SVN_NO_ERROR; +} +static svn_error_t * +test_apr_trunc_workaround(apr_pool_t *pool) +{ + const char *tmp_dir; + const char *tmp_file; + apr_file_t *f; + apr_size_t len; + apr_off_t offset; + char dummy; + + /* create a temp folder & schedule it for automatic cleanup */ + SVN_ERR(svn_dirent_get_absolute(&tmp_dir, "test_apr_trunc_workaround", + pool)); + SVN_ERR(svn_io_remove_dir2(tmp_dir, TRUE, NULL, NULL, pool)); + SVN_ERR(svn_io_make_dir_recursively(tmp_dir, pool)); + svn_test_add_dir_cleanup(tmp_dir); + + /* create an r/w file */ + tmp_file = svn_dirent_join(tmp_dir, "file", pool); + SVN_ERR(svn_io_file_open(&f, tmp_file, + APR_READ | APR_WRITE | APR_BUFFERED | APR_CREATE | + APR_TRUNCATE, + APR_OS_DEFAULT, pool)); + + /* write some content and put it internally into read mode */ + len = 10; + SVN_ERR(svn_io_file_write(f, "0123456789", &len, pool)); + + offset = 0; + SVN_ERR(svn_io_file_seek(f, APR_SET, &offset, pool)); + SVN_ERR(svn_io_file_getc(&dummy, f, pool)); + + /* clear the file and write some new content */ + SVN_ERR(svn_io_file_trunc(f, 0, pool)); + len = 3; + SVN_ERR(svn_io_file_write(f, "abc", &len, pool)); + + /* we should now be positioned at the end of the new content */ + offset = 0; + SVN_ERR(svn_io_file_seek(f, APR_CUR, &offset, pool)); + SVN_TEST_ASSERT(offset == (int)len); + + return SVN_NO_ERROR; +} + /* The test table. */ -struct svn_test_descriptor_t test_funcs[] = +static int max_threads = 3; + +static struct svn_test_descriptor_t test_funcs[] = { SVN_TEST_NULL, SVN_TEST_PASS2(test_two_file_size_comparison, @@ -523,5 +845,15 @@ struct svn_test_descriptor_t test_funcs[] = "three file content comparison"), SVN_TEST_PASS2(read_length_line_shouldnt_loop, "svn_io_read_length_line() shouldn't loop"), + SVN_TEST_PASS2(aligned_seek_test, + "test aligned seek"), + SVN_TEST_PASS2(ignore_enoent, + "test ignore-enoent"), + SVN_TEST_PASS2(test_install_stream_to_longpath, + "test svn_stream__install_stream to long path"), + SVN_TEST_PASS2(test_apr_trunc_workaround, + "test workaround for APR in svn_io_file_trunc"), SVN_TEST_NULL }; + +SVN_TEST_MAIN diff --git a/subversion/tests/libsvn_subr/mergeinfo-test.c b/subversion/tests/libsvn_subr/mergeinfo-test.c index ecfcd02..5f4d37e 100644 --- a/subversion/tests/libsvn_subr/mergeinfo-test.c +++ b/subversion/tests/libsvn_subr/mergeinfo-test.c @@ -104,7 +104,7 @@ verify_mergeinfo_parse(const char *input, /* Were we expecting any more ranges? */ if (j < MAX_NBR_RANGES - 1 - && !expected_ranges[j].end == 0) + && expected_ranges[j].end != 0) return svn_error_createf(SVN_ERR_TEST_FAILED, NULL, "svn_mergeinfo_parse (%s) failed to " "produce the expected number of ranges", @@ -114,11 +114,7 @@ verify_mergeinfo_parse(const char *input, } -/* Some of our own global variables (for simplicity), which map paths - -> merge ranges. */ -static apr_hash_t *info1, *info2; - -#define NBR_MERGEINFO_VALS 24 +#define NBR_MERGEINFO_VALS 25 /* Valid mergeinfo values. */ static const char * const mergeinfo_vals[NBR_MERGEINFO_VALS] = @@ -152,7 +148,8 @@ static const char * const mergeinfo_vals[NBR_MERGEINFO_VALS] = "/A/:7-8", "/A///:7-8", "/A/.:7-8", - "/A/./B:7-8" + "/A/./B:7-8", + ":7-8", }; /* Paths corresponding to mergeinfo_vals. */ static const char * const mergeinfo_paths[NBR_MERGEINFO_VALS] = @@ -185,7 +182,8 @@ static const char * const mergeinfo_paths[NBR_MERGEINFO_VALS] = "/A", "/A", "/A", - "/A/B" + "/A/B", + "/", }; /* First ranges from the paths identified by mergeinfo_paths. */ static svn_merge_range_t mergeinfo_ranges[NBR_MERGEINFO_VALS][MAX_NBR_RANGES] = @@ -216,6 +214,7 @@ static svn_merge_range_t mergeinfo_ranges[NBR_MERGEINFO_VALS][MAX_NBR_RANGES] = { {6, 8, TRUE} }, { {6, 8, TRUE} }, { {6, 8, TRUE} }, + { {6, 8, TRUE} }, }; static svn_error_t * @@ -268,6 +267,7 @@ test_parse_combine_rangeinfo(apr_pool_t *pool) { apr_array_header_t *result; svn_merge_range_t *resultrange; + apr_hash_t *info1; SVN_ERR(svn_mergeinfo_parse(&info1, single_mergeinfo, pool)); @@ -301,7 +301,7 @@ test_parse_combine_rangeinfo(apr_pool_t *pool) } -#define NBR_BROKEN_MERGEINFO_VALS 27 +#define NBR_BROKEN_MERGEINFO_VALS 26 /* Invalid mergeinfo values. */ static const char * const broken_mergeinfo_vals[NBR_BROKEN_MERGEINFO_VALS] = { @@ -333,8 +333,6 @@ static const char * const broken_mergeinfo_vals[NBR_BROKEN_MERGEINFO_VALS] = "/trunk:", "/trunk:2-9\n/branch:", "::", - /* No path */ - ":1-3", /* Invalid revisions */ "trunk:a-3", "branch:3-four", @@ -346,6 +344,7 @@ test_parse_broken_mergeinfo(apr_pool_t *pool) { int i; svn_error_t *err; + apr_hash_t *info1; /* Trigger some error(s) with mal-formed input. */ for (i = 0; i < NBR_BROKEN_MERGEINFO_VALS; i++) @@ -565,6 +564,7 @@ test_mergeinfo_intersect(apr_pool_t *pool) { {0, 1, TRUE}, {2, 4, TRUE}, {11, 12, TRUE} }; svn_rangelist_t *rangelist; apr_hash_t *intersection; + apr_hash_t *info1, *info2; SVN_ERR(svn_mergeinfo_parse(&info1, "/trunk: 1-6,12-16\n/foo: 31", pool)); SVN_ERR(svn_mergeinfo_parse(&info2, "/trunk: 1,3-4,7,9,11-12", pool)); @@ -701,6 +701,7 @@ test_merge_mergeinfo(apr_pool_t *pool) { int j; svn_string_t *info2_starting, *info2_ending; + apr_hash_t *info1, *info2; SVN_ERR(svn_mergeinfo_parse(&info1, mergeinfo[i].mergeinfo1, pool)); SVN_ERR(svn_mergeinfo_parse(&info2, mergeinfo[i].mergeinfo2, pool)); @@ -1109,6 +1110,7 @@ test_rangelist_to_string(apr_pool_t *pool) svn_rangelist_t *result; svn_string_t *output; svn_string_t *expected = svn_string_create("3,5,7-11,13-14", pool); + apr_hash_t *info1; SVN_ERR(svn_mergeinfo_parse(&info1, mergeinfo1, pool)); @@ -1129,6 +1131,7 @@ test_mergeinfo_to_string(apr_pool_t *pool) { svn_string_t *output; svn_string_t *expected; + apr_hash_t *info1, *info2; expected = svn_string_create("/fred:8-10\n/trunk:3,5,7-11,13-14", pool); SVN_ERR(svn_mergeinfo_parse(&info1, mergeinfo1, pool)); @@ -1670,7 +1673,9 @@ test_remove_prefix_from_catalog(apr_pool_t *pool) /* The test table. */ -struct svn_test_descriptor_t test_funcs[] = +static int max_threads = 1; + +static struct svn_test_descriptor_t test_funcs[] = { SVN_TEST_NULL, SVN_TEST_PASS2(test_parse_single_line_mergeinfo, @@ -1711,3 +1716,5 @@ struct svn_test_descriptor_t test_funcs[] = "removal of prefix paths from catalog keys"), SVN_TEST_NULL }; + +SVN_TEST_MAIN diff --git a/subversion/tests/libsvn_subr/named_atomic-test-common.h b/subversion/tests/libsvn_subr/named_atomic-test-common.h deleted file mode 100644 index 2ada4ee..0000000 --- a/subversion/tests/libsvn_subr/named_atomic-test-common.h +++ /dev/null @@ -1,245 +0,0 @@ -/* - * named_atomic-test-common.h: shared function implementations for - * named_atomic-test - * - * ==================================================================== - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - * ==================================================================== - */ - - - -#include "../svn_test.h" -#include "svn_pools.h" -#include "private/svn_named_atomic.h" - -/* Some constants that we will use in our tests */ - -/* All our atomics start with that name */ -#define ATOMIC_NAME "MyTestAtomic" - -/* Factor used to create non-trivial 64 bit numbers */ -#define HUGE_VALUE 1234567890123456ll - -/* to separate this code from any production environment */ -const char *name_namespace = NULL; -const char *name_namespace1 = NULL; -const char *name_namespace2 = NULL; - -/* data structure containing all information we need to check for - * a) passing some deadline - * b) reaching the maximum iteration number - */ -typedef struct watchdog_t -{ - apr_time_t deadline; - svn_named_atomic__t *atomic_counter; - int iterations; - int call_count; /* don't call apr_time_now() too often '*/ -} watchdog_t; - -/* init the WATCHDOG data structure for checking ATOMIC_COUNTER to reach - * ITERATIONS and for the system time to pass a deadline MAX_DURATION - * microsecs in the future. - */ -static void -init_watchdog(watchdog_t *watchdog, - svn_named_atomic__t *atomic_counter, - int iterations, - apr_time_t max_duration) -{ - watchdog->deadline = apr_time_now() + max_duration; - watchdog->atomic_counter = atomic_counter; - watchdog->iterations = iterations; - watchdog->call_count = 0; -} - -/* test for watchdog conditions */ -static svn_error_t * -check_watchdog(watchdog_t *watchdog, svn_boolean_t *done) -{ - apr_int64_t counter = 0; - - /* check for normal end of loop. - * We are a watchdog, so don't check for errors. */ - *done = FALSE; - svn_error_clear(svn_named_atomic__read(&counter, - watchdog->atomic_counter)); - if (counter >= watchdog->iterations) - { - *done = TRUE; - return SVN_NO_ERROR; - } - - /* Check the system time and indicate when deadline has passed */ - if (++watchdog->call_count > 100) - { - watchdog->call_count = 100; - if (apr_time_now() > watchdog->deadline) - return svn_error_createf(SVN_ERR_TEST_FAILED, - 0, - "Deadline has passed at iteration %d/%d", - (int)counter, watchdog->iterations); - } - - /* no problem so far */ - return SVN_NO_ERROR; -} - -/* "pipeline" test: initialization code executed by the worker with ID 0. - * Pushes COUNT tokens into ATOMIC_OUT and checks for ATOMIC_COUNTER not to - * exceed ITERATIONS (early termination). - */ -static svn_error_t * -test_pipeline_prepare(svn_named_atomic__t *atomic_out, - int count, - watchdog_t *watchdog) -{ - apr_int64_t value = 0; - int i; - svn_boolean_t done = FALSE; - - /* Initialize values in thread 0, pass them along in other threads */ - - for (i = 1; i <= count; ++i) - do - { - /* Generate new token (once the old one has been removed)*/ - SVN_ERR(svn_named_atomic__cmpxchg(&value, - i, - 0, - atomic_out)); - SVN_ERR(check_watchdog(watchdog, &done)); - if (done) return SVN_NO_ERROR; - } - while (value != 0); - - return SVN_NO_ERROR; -} - -/* "pipeline" test: the main loop. Each one of the COUNT workers receives - * data in its ATOMIC_IN and passes it on to ATOMIC_OUT until ATOMIC_COUNTER - * exceeds ITERATIONS. - */ -static svn_error_t * -test_pipeline_loop(svn_named_atomic__t *atomic_in, - svn_named_atomic__t *atomic_out, - svn_named_atomic__t *atomic_counter, - int count, - int iterations, - watchdog_t *watchdog) -{ - apr_int64_t value = 0, old_value, last_value = 0; - apr_int64_t counter; - svn_boolean_t done = FALSE; - - /* Pass the tokens along */ - - do - { - /* Wait for and consume incoming token. */ - do - { - SVN_ERR(svn_named_atomic__write(&value, 0, atomic_in)); - SVN_ERR(check_watchdog(watchdog, &done)); - if (done) return SVN_NO_ERROR; - } - while (value == 0); - - /* All tokes must come in in the same order */ - SVN_TEST_ASSERT((last_value % count) == (value - 1)); - last_value = value; - - /* Wait for the target atomic to become vacant and write the token */ - do - { - SVN_ERR(svn_named_atomic__cmpxchg(&old_value, - value, - 0, - atomic_out)); - SVN_ERR(check_watchdog(watchdog, &done)); - if (done) return SVN_NO_ERROR; - } - while (old_value != 0); - - /* Count the number of operations */ - SVN_ERR(svn_named_atomic__add(&counter, 1, atomic_counter)); - } - while (counter < iterations); - - /* done */ - - return SVN_NO_ERROR; -} - -/* "pipeline" test: worker with ID 0 initializes the data; all workers - * (COUNT in total) have one input and one output bucket that form a ring - * spanning all workers. Each worker passes the value along ITERATIONS times. - */ -static svn_error_t * -test_pipeline(int id, int count, int iterations, apr_pool_t *pool) -{ - svn_atomic_namespace__t *ns; - svn_named_atomic__t *atomic_in; - svn_named_atomic__t *atomic_out; - svn_named_atomic__t *atomic_counter; - svn_error_t *err = SVN_NO_ERROR; - watchdog_t watchdog; - - /* get the two I/O atomics for this thread */ - SVN_ERR(svn_atomic_namespace__create(&ns, name_namespace, pool)); - SVN_ERR(svn_named_atomic__get(&atomic_in, - ns, - apr_pstrcat(pool, - ATOMIC_NAME, - apr_itoa(pool, - id), - NULL), - FALSE)); - SVN_ERR(svn_named_atomic__get(&atomic_out, - ns, - apr_pstrcat(pool, - ATOMIC_NAME, - apr_itoa(pool, - (id + 1) % count), - NULL), - FALSE)); - - /* our iteration counter */ - SVN_ERR(svn_named_atomic__get(&atomic_counter, ns, "counter", FALSE)); - - /* safeguard our execution time. Limit it to 20s */ - init_watchdog(&watchdog, atomic_counter, iterations, 20000000); - - /* fill pipeline */ - if (id == 0) - err = test_pipeline_prepare(atomic_out, count, &watchdog); - - /* Pass the tokens along */ - if (!err) - err = test_pipeline_loop(atomic_in, atomic_out, atomic_counter, - count, iterations, &watchdog); - - /* if we experienced an error, cause everybody to exit */ - if (err) - svn_error_clear(svn_named_atomic__write(NULL, iterations, atomic_counter)); - - /* done */ - - return err; -} diff --git a/subversion/tests/libsvn_subr/named_atomic-test.c b/subversion/tests/libsvn_subr/named_atomic-test.c deleted file mode 100644 index 05604d2..0000000 --- a/subversion/tests/libsvn_subr/named_atomic-test.c +++ /dev/null @@ -1,761 +0,0 @@ -/* - * named_atomic-test.c: a collection of svn_named_atomic__t tests - * - * ==================================================================== - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - * ==================================================================== - */ - -/* ==================================================================== - To add tests, look toward the bottom of this file. -*/ - - -#include <stdio.h> -#include <apr_file_io.h> - -#include "svn_io.h" - -/* shared test implementation */ -#include "named_atomic-test-common.h" - -/* Name of the worker process executable */ -#define TEST_PROC "named_atomic-proc-test" - -/* number of hardware threads (logical cores) that we may use. - * Will be set to at least 2 - even on unicore machines. */ -static int hw_thread_count = 0; - -/* number of iterations that we should perform on concurrency tests - * (will be calibrated to about 1s runtime)*/ -static int suggested_iterations = 0; - -/* If possible, translate PROC to a global path and set DIRECTORY to - * the current directory. - */ -static svn_error_t * -adjust_proc_path(const char **proc, const char **directory, apr_pool_t *pool) -{ -#ifdef WIN32 - /* Under Windows, the test will not be in the current directory - * and neither will be PROC. Therefore, determine its full path */ - char path [MAX_PATH] = { 0 }; - GetModuleFileNameA(NULL, path, sizeof(path)); - *(strrchr(path, '\\') + 1) = 0; - *proc = apr_pstrcat(pool, path, *proc, ".exe", NULL); - - /* And we need to set the working dir to our working dir to make - * our sub-processes find all DLLs. */ - GetCurrentDirectoryA(sizeof(path), path); - *directory = apr_pstrdup(pool, path); -#endif - - return SVN_NO_ERROR; -} - -/* Returns true if PROC can be found and executed. - */ -static svn_boolean_t -proc_found(const char *proc, apr_pool_t *pool) -{ - static svn_tristate_t result = svn_tristate_unknown; - - if (result == svn_tristate_unknown) - { - svn_error_t *error = SVN_NO_ERROR; - const char * directory = NULL; - - /* all processes and their I/O data */ - apr_proc_t process; - const char * args[2]; - - args[0] = proc; - args[1] = NULL; - svn_error_clear(adjust_proc_path(&args[0], &directory, pool)); - - /* try to start the process */ - error = svn_io_start_cmd3(&process, - directory, /* working directory */ - args[0], - args, - NULL, /* environment */ - FALSE, /* no handle inheritance */ - FALSE, /* no STDIN pipe */ - NULL, - FALSE, /* no STDOUT pipe */ - NULL, - FALSE, /* no STDERR pipe */ - NULL, - pool); - if (!error) - error = svn_io_wait_for_cmd(&process, proc, NULL, NULL, pool); - - result = error ? svn_tristate_false : svn_tristate_true; - svn_error_clear(error); - } - - return result == svn_tristate_true; -} - -/* Remove temporary files from disk. - */ -static apr_status_t -cleanup_test_shm(void *arg) -{ - apr_pool_t *pool = arg; - - svn_error_clear(svn_atomic_namespace__cleanup(name_namespace, pool)); - svn_error_clear(svn_atomic_namespace__cleanup(name_namespace1, pool)); - svn_error_clear(svn_atomic_namespace__cleanup(name_namespace2, pool)); - - return 0; -} - -/* Bring shared memory to a defined state. This is very useful in case of - * lingering problems from previous tests or test runs. - */ -static svn_error_t * -init_test_shm(apr_pool_t *pool) -{ - svn_atomic_namespace__t *ns; - svn_named_atomic__t *atomic; - apr_pool_t *scratch = svn_pool_create(pool); - - if (name_namespace == NULL) - { - apr_pool_t *global_pool = svn_pool_create(NULL); - SVN_ERR(svn_io_open_unique_file3(NULL, - &name_namespace, - NULL, - svn_io_file_del_on_pool_cleanup, - global_pool, - pool)); - SVN_ERR(svn_io_open_unique_file3(NULL, - &name_namespace1, - NULL, - svn_io_file_del_on_pool_cleanup, - global_pool, - pool)); - SVN_ERR(svn_io_open_unique_file3(NULL, - &name_namespace2, - NULL, - svn_io_file_del_on_pool_cleanup, - global_pool, - pool)); - } - - /* skip tests if the current user does not have the required privileges */ - if (!svn_named_atomic__is_supported()) - return svn_error_wrap_apr(SVN_ERR_TEST_SKIPPED, - "user has insufficient privileges"); - - /* destroy temp files after usage */ - - apr_pool_cleanup_register(pool, pool, - cleanup_test_shm, apr_pool_cleanup_null); - - /* get the two I/O atomics for this thread */ - SVN_ERR(svn_atomic_namespace__create(&ns, name_namespace, scratch)); - SVN_ERR(svn_named_atomic__get(&atomic, ns, ATOMIC_NAME, TRUE)); - SVN_ERR(svn_named_atomic__write(NULL, 0, atomic)); - SVN_ERR(svn_named_atomic__get(&atomic, ns, ATOMIC_NAME "1", TRUE)); - SVN_ERR(svn_named_atomic__write(NULL, 0, atomic)); - SVN_ERR(svn_named_atomic__get(&atomic, ns, ATOMIC_NAME "2", TRUE)); - SVN_ERR(svn_named_atomic__write(NULL, 0, atomic)); - - svn_pool_clear(scratch); - - SVN_ERR(svn_atomic_namespace__create(&ns, name_namespace1, scratch)); - SVN_ERR(svn_named_atomic__get(&atomic, ns, ATOMIC_NAME, TRUE)); - SVN_ERR(svn_named_atomic__write(NULL, 0, atomic)); - svn_pool_clear(scratch); - - SVN_ERR(svn_atomic_namespace__create(&ns, name_namespace2, scratch)); - SVN_ERR(svn_named_atomic__get(&atomic, ns, ATOMIC_NAME, TRUE)); - SVN_ERR(svn_named_atomic__write(NULL, 0, atomic)); - svn_pool_clear(scratch); - - /* done */ - - return SVN_NO_ERROR; -} - -/* Prepare the shared memory for a run with COUNT workers. - */ -static svn_error_t * -init_concurrency_test_shm(apr_pool_t *pool, int count) -{ - svn_atomic_namespace__t *ns; - svn_named_atomic__t *atomic; - int i; - - /* get the two I/O atomics for this thread */ - SVN_ERR(svn_atomic_namespace__create(&ns, name_namespace, pool)); - - /* reset the I/O atomics for all threads */ - for (i = 0; i < count; ++i) - { - SVN_ERR(svn_named_atomic__get(&atomic, - ns, - apr_pstrcat(pool, - ATOMIC_NAME, - apr_itoa(pool, i), - NULL), - TRUE)); - SVN_ERR(svn_named_atomic__write(NULL, 0, atomic)); - } - - SVN_ERR(svn_named_atomic__get(&atomic, ns, "counter", TRUE)); - SVN_ERR(svn_named_atomic__write(NULL, 0, atomic)); - - return SVN_NO_ERROR; -} - -#if APR_HAS_THREADS - -/* our thread function type - */ -typedef svn_error_t *(*thread_func_t)(int, int, int, apr_pool_t *); - -/* Per-thread input and output data. - */ -struct thread_baton -{ - int thread_count; - int thread_no; - int iterations; - svn_error_t *result; - thread_func_t func; -}; - -/* APR thread function implementation: A wrapper around baton->func that - * handles the svn_error_t return value. - */ -static void * -APR_THREAD_FUNC test_thread(apr_thread_t *thread, void *baton) -{ - struct thread_baton *params = baton; - apr_pool_t *pool = svn_pool_create_ex(NULL, NULL); - - params->result = (*params->func)(params->thread_no, - params->thread_count, - params->iterations, - pool); - svn_pool_destroy(pool); - apr_thread_exit(thread, APR_SUCCESS); - - return NULL; -} - -/* Runs FUNC in COUNT concurrent threads ITERATION times and combines the - * results. - */ -static svn_error_t * -run_threads(apr_pool_t *pool, int count, int iterations, thread_func_t func) -{ - apr_status_t status; - int i; - svn_error_t *error = SVN_NO_ERROR; - - /* all threads and their I/O data */ - apr_thread_t **threads = apr_palloc(pool, count * sizeof(*threads)); - struct thread_baton *batons = apr_palloc(pool, count * sizeof(*batons)); - - /* start threads */ - for (i = 0; i < count; ++i) - { - batons[i].thread_count = count; - batons[i].thread_no = i; - batons[i].iterations = iterations; - batons[i].func = func; - - status = apr_thread_create(&threads[i], - NULL, - test_thread, - &batons[i], - pool); - if (status != APR_SUCCESS) - SVN_ERR(svn_error_wrap_apr(status, "could not create a thread")); - } - - /* Wait for threads to finish and return result. */ - for (i = 0; i < count; ++i) - { - apr_status_t retval; - status = apr_thread_join(&retval, threads[i]); - if (status != APR_SUCCESS) - SVN_ERR(svn_error_wrap_apr(status, "waiting for thread's end failed")); - - if (batons[i].result) - error = svn_error_compose_create (error, svn_error_quick_wrap - (batons[i].result, apr_psprintf(pool, "Thread %d failed", i))); - } - - return error; -} -#endif - -/* Runs PROC in COUNT concurrent worker processes and check the results. - */ -static svn_error_t * -run_procs(apr_pool_t *pool, const char *proc, int count, int iterations) -{ - int i, k; - svn_error_t *error = SVN_NO_ERROR; - const char * directory = NULL; - - /* all processes and their I/O data */ - apr_proc_t *process = apr_palloc(pool, count * sizeof(*process)); - apr_file_t *common_stdout = NULL; - apr_file_open_stdout(&common_stdout, pool); - - SVN_ERR(adjust_proc_path(&proc, &directory, pool)); - - /* start sub-processes */ - for (i = 0; i < count; ++i) - { - const char * args[6]; - - args[0] = proc; - args[1] = apr_itoa(pool, i); - args[2] = apr_itoa(pool, count); - args[3] = apr_itoa(pool, iterations); - args[4] = name_namespace; - args[5] = NULL; - - error = svn_io_start_cmd3(&process[i], - directory, /* working directory */ - args[0], - args, - NULL, /* environment */ - FALSE, /* no handle inheritance */ - FALSE, /* no STDIN pipe */ - NULL, - FALSE, /* consolidate into 1 STDOUT */ - common_stdout, - FALSE, /* no STDERR pipe */ - NULL, - pool); - if (error) - { - /* dump program name and parameters */ - for (k = 0; k < sizeof(args) / sizeof(args[0]); ++k) - if (args[k]) - printf(k == 0 ? "%s\n" : " %s\n", args[k]); - - if (directory) - printf("working folder %s:\n", directory); - - return error; - } - } - - /* Wait for sub-processes to finish and return result. */ - for (i = 0; i < count; ++i) - { - const char *cmd = apr_psprintf(pool, - "named_atomic-test-proc %d %d %d", - i, count, iterations); - error = svn_error_compose_create(error, - svn_io_wait_for_cmd(&process[i], - cmd, NULL, NULL, - pool)); - } - - return error; -} - -/* Set SUGGESTED_ITERATIONS to a value that COUNT workers will take - * about 1 second to execute. - */ -static svn_error_t * -calibrate_iterations(apr_pool_t *pool, int count) -{ - apr_time_t start; - int calib_iterations; - double taken = 0.0; - - /* increase iterations until we pass the 100ms mark */ - - for (calib_iterations = 10; taken < 100000.0; calib_iterations *= 2) - { - apr_pool_t *scratch = svn_pool_create(pool); - SVN_ERR(init_concurrency_test_shm(scratch, count)); - - start = apr_time_now(); - SVN_ERR(run_procs(pool, TEST_PROC, count, calib_iterations)); - - taken = (double)(apr_time_now() - start); - svn_pool_destroy(scratch); - } - - /* scale that to 1s */ - - suggested_iterations = (int)(1000000.0 / taken * calib_iterations); - - return SVN_NO_ERROR; -} - -/* Find out how far the system will scale, i.e. how many workers can be - * run concurrently without experiencing significant slowdowns. - * Sets HW_THREAD_COUNT to a value of 2 .. 32 (limit the system impact in - * case our heuristics fail) and determines the number of iterations. - * Can be called multiple times but will skip the calculations after the - * first successful run. - */ -static svn_error_t * -calibrate_concurrency(apr_pool_t *pool) -{ - if (hw_thread_count == 0) - { - /* these parameters should be ok even on very slow machines */ - hw_thread_count = 2; - suggested_iterations = 100; - - /* if we've got a proper machine and OS setup, let's prepare for - * some real testing */ - if (svn_named_atomic__is_efficient() && proc_found(TEST_PROC, pool)) - { - SVN_ERR(calibrate_iterations(pool, 2)); - for (; hw_thread_count < 32; hw_thread_count *= 2) - { - int saved_suggestion = suggested_iterations; - - /* run with an additional core to spare - * (even low CPU usage might cause heavy context switching) */ - SVN_ERR(calibrate_iterations(pool, hw_thread_count * 2 + 1)); - if (suggested_iterations < 100000) - { - /* Machines with only a small number of cores are prone - * to inconsistent performance due context switching. - * Reduce the number of iterations on those machines. */ - suggested_iterations = hw_thread_count > 2 - ? saved_suggestion - : saved_suggestion / 2; - break; - } - } - } - - printf("using %d cores for %d iterations\n", hw_thread_count, - suggested_iterations); - } - - return SVN_NO_ERROR; -} - -/* The individual tests */ - -static svn_error_t * -test_basics(apr_pool_t *pool) -{ - svn_atomic_namespace__t *ns; - svn_named_atomic__t *atomic; - apr_int64_t value; - - SVN_ERR(init_test_shm(pool)); - - /* Use a separate namespace for our tests isolate them from production */ - SVN_ERR(svn_atomic_namespace__create(&ns, name_namespace, pool)); - - /* Test a non-existing atomic */ - SVN_ERR(svn_named_atomic__get(&atomic, ns, ATOMIC_NAME "x", FALSE)); - SVN_TEST_ASSERT(atomic == NULL); - - /* Now, we auto-create it */ - SVN_ERR(svn_named_atomic__get(&atomic, ns, ATOMIC_NAME, TRUE)); - SVN_TEST_ASSERT(atomic != NULL); - - /* The default value should be 0 */ - SVN_TEST_ASSERT_ERROR(svn_named_atomic__read(&value, NULL), - SVN_ERR_BAD_ATOMIC); - value = 1; - SVN_ERR(svn_named_atomic__read(&value, atomic)); - SVN_TEST_ASSERT(value == 0); - - /* Write should return the previous value. */ - SVN_TEST_ASSERT_ERROR(svn_named_atomic__write(&value, 0, NULL), - SVN_ERR_BAD_ATOMIC); - value = 1; - SVN_ERR(svn_named_atomic__write(&value, 21, atomic)); - SVN_TEST_ASSERT(value == 0); - SVN_ERR(svn_named_atomic__read(&value, atomic)); - SVN_TEST_ASSERT(value == 21); - - SVN_ERR(svn_named_atomic__write(&value, 42, atomic)); - SVN_TEST_ASSERT(value == 21); - SVN_ERR(svn_named_atomic__read(&value, atomic)); - SVN_TEST_ASSERT(value == 42); - - SVN_ERR(svn_named_atomic__write(NULL, 17, atomic)); - SVN_ERR(svn_named_atomic__read(&value, atomic)); - SVN_TEST_ASSERT(value == 17); - - /* Adding & subtracting values */ - SVN_TEST_ASSERT_ERROR(svn_named_atomic__add(&value, 0, NULL), - SVN_ERR_BAD_ATOMIC); - SVN_ERR(svn_named_atomic__add(&value, 25, atomic)); - SVN_TEST_ASSERT(value == 42); - SVN_ERR(svn_named_atomic__add(NULL, 47, atomic)); - SVN_ERR(svn_named_atomic__read(&value, atomic)); - SVN_TEST_ASSERT(value == 89); - - SVN_ERR(svn_named_atomic__add(&value, -25, atomic)); - SVN_TEST_ASSERT(value == 64); - SVN_ERR(svn_named_atomic__add(NULL, -22, atomic)); - SVN_ERR(svn_named_atomic__read(&value, atomic)); - SVN_TEST_ASSERT(value == 42); - - /* Compare-and-exchange */ - SVN_TEST_ASSERT_ERROR(svn_named_atomic__cmpxchg(&value, 0, 0, NULL), - SVN_ERR_BAD_ATOMIC); - value = 1; - SVN_ERR(svn_named_atomic__cmpxchg(&value, 99, 41, atomic)); - SVN_TEST_ASSERT(value == 42); - - value = 1; - SVN_ERR(svn_named_atomic__cmpxchg(&value, 98, 42, atomic)); - SVN_TEST_ASSERT(value == 42); - SVN_ERR(svn_named_atomic__cmpxchg(&value, 67, 98, atomic)); - SVN_TEST_ASSERT(value == 98); - - SVN_ERR(svn_named_atomic__cmpxchg(NULL, 42, 67, atomic)); - SVN_ERR(svn_named_atomic__read(&value, atomic)); - SVN_TEST_ASSERT(value == 42); - - return SVN_NO_ERROR; -} - -static svn_error_t * -test_bignums(apr_pool_t *pool) -{ - svn_atomic_namespace__t *ns; - svn_named_atomic__t *atomic; - apr_int64_t value; - - SVN_ERR(init_test_shm(pool)); - - /* Use a separate namespace for our tests isolate them from production */ - SVN_ERR(svn_atomic_namespace__create(&ns, name_namespace, pool)); - - /* Auto-create our atomic variable */ - SVN_ERR(svn_named_atomic__get(&atomic, ns, ATOMIC_NAME, TRUE)); - SVN_TEST_ASSERT(atomic != NULL); - - /* Write should return the previous value. */ - - SVN_ERR(svn_named_atomic__write(NULL, 0, atomic)); - value = 1; - SVN_ERR(svn_named_atomic__write(&value, 21 * HUGE_VALUE, atomic)); - SVN_TEST_ASSERT(value == 0 * HUGE_VALUE); - SVN_ERR(svn_named_atomic__read(&value, atomic)); - SVN_TEST_ASSERT(value == 21 * HUGE_VALUE); - - SVN_ERR(svn_named_atomic__write(&value, 17 * HUGE_VALUE, atomic)); - SVN_TEST_ASSERT(value == 21 * HUGE_VALUE); - - /* Adding & subtracting values */ - SVN_ERR(svn_named_atomic__add(&value, 25 * HUGE_VALUE, atomic)); - SVN_TEST_ASSERT(value == 42 * HUGE_VALUE); - SVN_ERR(svn_named_atomic__add(&value, -25 * HUGE_VALUE, atomic)); - SVN_TEST_ASSERT(value == 17 * HUGE_VALUE); - - /* Compare-and-exchange */ - value = 1; - SVN_ERR(svn_named_atomic__cmpxchg(&value, 99 * HUGE_VALUE, 41 * HUGE_VALUE, atomic)); - SVN_TEST_ASSERT(value == 17 * HUGE_VALUE); - - value = 1; - SVN_ERR(svn_named_atomic__cmpxchg(&value, 98 * HUGE_VALUE, 17 * HUGE_VALUE, atomic)); - SVN_TEST_ASSERT(value == 17 * HUGE_VALUE); - SVN_ERR(svn_named_atomic__read(&value, atomic)); - SVN_TEST_ASSERT(value == 98 * HUGE_VALUE); - - return SVN_NO_ERROR; -} - -static svn_error_t * -test_multiple_atomics(apr_pool_t *pool) -{ - svn_atomic_namespace__t *ns; - svn_named_atomic__t *atomic1; - svn_named_atomic__t *atomic2; - svn_named_atomic__t *atomic1_alias; - svn_named_atomic__t *atomic2_alias; - apr_int64_t value1; - apr_int64_t value2; - - SVN_ERR(init_test_shm(pool)); - - /* Use a separate namespace for our tests isolate them from production */ - SVN_ERR(svn_atomic_namespace__create(&ns, name_namespace, pool)); - - /* Create two atomics */ - SVN_ERR(svn_named_atomic__get(&atomic1, ns, ATOMIC_NAME "1", TRUE)); - SVN_ERR(svn_named_atomic__get(&atomic2, ns, ATOMIC_NAME "2", TRUE)); - SVN_TEST_ASSERT(atomic1 != NULL); - SVN_TEST_ASSERT(atomic2 != NULL); - SVN_TEST_ASSERT(atomic1 != atomic2); - - /* Get aliases to those */ - SVN_ERR(svn_named_atomic__get(&atomic1_alias, ns, ATOMIC_NAME "1", TRUE)); - SVN_ERR(svn_named_atomic__get(&atomic2_alias, ns, ATOMIC_NAME "2", TRUE)); - SVN_TEST_ASSERT(atomic1 == atomic1_alias); - SVN_TEST_ASSERT(atomic2 == atomic2_alias); - - /* The atomics shall not overlap, i.e. changes to one do not affect the other */ - SVN_ERR(svn_named_atomic__write(NULL, 0, atomic1)); - SVN_ERR(svn_named_atomic__write(NULL, 0, atomic2)); - SVN_ERR(svn_named_atomic__write(&value1, 21 * HUGE_VALUE, atomic1)); - SVN_ERR(svn_named_atomic__write(&value2, 42 * HUGE_VALUE, atomic2)); - SVN_TEST_ASSERT(value1 == 0); - SVN_TEST_ASSERT(value2 == 0); - - SVN_ERR(svn_named_atomic__read(&value1, atomic1)); - SVN_ERR(svn_named_atomic__read(&value2, atomic2)); - SVN_TEST_ASSERT(value1 == 21 * HUGE_VALUE); - SVN_TEST_ASSERT(value2 == 42 * HUGE_VALUE); - - SVN_ERR(svn_named_atomic__add(&value1, 25 * HUGE_VALUE, atomic1)); - SVN_ERR(svn_named_atomic__add(&value2, -25 * HUGE_VALUE, atomic2)); - SVN_TEST_ASSERT(value1 == 46 * HUGE_VALUE); - SVN_TEST_ASSERT(value2 == 17 * HUGE_VALUE); - - value1 = 1; - value2 = 1; - SVN_ERR(svn_named_atomic__cmpxchg(&value1, 4 * HUGE_VALUE, 46 * HUGE_VALUE, atomic1)); - SVN_ERR(svn_named_atomic__cmpxchg(&value2, 98 * HUGE_VALUE, 17 * HUGE_VALUE, atomic2)); - SVN_TEST_ASSERT(value1 == 46 * HUGE_VALUE); - SVN_TEST_ASSERT(value2 == 17 * HUGE_VALUE); - - SVN_ERR(svn_named_atomic__read(&value1, atomic1)); - SVN_ERR(svn_named_atomic__read(&value2, atomic2)); - SVN_TEST_ASSERT(value1 == 4 * HUGE_VALUE); - SVN_TEST_ASSERT(value2 == 98 * HUGE_VALUE); - - return SVN_NO_ERROR; -} - -static svn_error_t * -test_namespaces(apr_pool_t *pool) -{ - svn_atomic_namespace__t *test_namespace1; - svn_atomic_namespace__t *test_namespace1_alias; - svn_atomic_namespace__t *test_namespace2; - svn_atomic_namespace__t *test_namespace2_alias; - svn_named_atomic__t *atomic1; - svn_named_atomic__t *atomic2; - svn_named_atomic__t *atomic1_alias; - svn_named_atomic__t *atomic2_alias; - apr_int64_t value; - - SVN_ERR(init_test_shm(pool)); - - /* Use a separate namespace for our tests isolate them from production */ - SVN_ERR(svn_atomic_namespace__create(&test_namespace1, name_namespace1, pool)); - SVN_ERR(svn_atomic_namespace__create(&test_namespace1_alias, name_namespace1, pool)); - SVN_ERR(svn_atomic_namespace__create(&test_namespace2, name_namespace2, pool)); - SVN_ERR(svn_atomic_namespace__create(&test_namespace2_alias, name_namespace2, pool)); - - /* Create two atomics with the same name in different namespaces */ - SVN_ERR(svn_named_atomic__get(&atomic1, test_namespace1, ATOMIC_NAME, TRUE)); - SVN_ERR(svn_named_atomic__get(&atomic1_alias, test_namespace1_alias, ATOMIC_NAME, FALSE)); - SVN_ERR(svn_named_atomic__get(&atomic2, test_namespace2, ATOMIC_NAME, TRUE)); - SVN_ERR(svn_named_atomic__get(&atomic2_alias, test_namespace2_alias, ATOMIC_NAME, FALSE)); - SVN_TEST_ASSERT(atomic1 != atomic1_alias); - SVN_TEST_ASSERT(atomic1_alias != NULL); - SVN_TEST_ASSERT(atomic2 != atomic2_alias); - SVN_TEST_ASSERT(atomic2_alias != NULL); - - /* Write data to our atomics */ - SVN_ERR(svn_named_atomic__write(NULL, 21 * HUGE_VALUE, atomic1)); - SVN_ERR(svn_named_atomic__write(NULL, 42 * HUGE_VALUE, atomic2)); - - /* Now check who sees which value */ - SVN_ERR(svn_named_atomic__read(&value, atomic1)); - SVN_TEST_ASSERT(value == 21 * HUGE_VALUE); - SVN_ERR(svn_named_atomic__read(&value, atomic2)); - SVN_TEST_ASSERT(value == 42 * HUGE_VALUE); - - SVN_ERR(svn_named_atomic__read(&value, atomic1_alias)); - SVN_TEST_ASSERT(value == 21 * HUGE_VALUE); - SVN_ERR(svn_named_atomic__read(&value, atomic2_alias)); - SVN_TEST_ASSERT(value == 42 * HUGE_VALUE); - - return SVN_NO_ERROR; -} - -static svn_error_t * -test_multithreaded(apr_pool_t *pool) -{ -#if APR_HAS_THREADS - SVN_ERR(init_test_shm(pool)); - - SVN_ERR(calibrate_concurrency(pool)); - - SVN_ERR(init_concurrency_test_shm(pool, hw_thread_count)); - SVN_ERR(run_threads(pool, hw_thread_count, suggested_iterations, test_pipeline)); - - return SVN_NO_ERROR; -#else - return svn_error_create(SVN_ERR_TEST_SKIPPED, NULL, NULL); -#endif -} - -static svn_error_t * -test_multiprocess(apr_pool_t *pool) -{ - if (!proc_found(TEST_PROC, pool)) - return svn_error_wrap_apr(SVN_ERR_TEST_SKIPPED, - "executable '%s' not found", TEST_PROC); - - SVN_ERR(init_test_shm(pool)); - - SVN_ERR(calibrate_concurrency(pool)); - - SVN_ERR(init_concurrency_test_shm(pool, hw_thread_count)); - SVN_ERR(run_procs(pool, TEST_PROC, hw_thread_count, suggested_iterations)); - - return SVN_NO_ERROR; -} - -/* - ==================================================================== - If you add a new test to this file, update this array. - - (These globals are required by our included main()) -*/ - -/* An array of all test functions */ -struct svn_test_descriptor_t test_funcs[] = - { - SVN_TEST_NULL, - SVN_TEST_PASS2(test_basics, - "basic r/w access to a single atomic"), - SVN_TEST_PASS2(test_bignums, - "atomics must be 64 bits"), - SVN_TEST_PASS2(test_multiple_atomics, - "basic r/w access to multiple atomics"), - SVN_TEST_PASS2(test_namespaces, - "use different namespaces"), - SVN_TEST_PASS2(test_multithreaded, - "multithreaded access to atomics"), - SVN_TEST_PASS2(test_multiprocess, - "multi-process access to atomics"), - SVN_TEST_NULL - }; diff --git a/subversion/tests/libsvn_subr/opt-test.c b/subversion/tests/libsvn_subr/opt-test.c index a7c570a..c336d81 100644 --- a/subversion/tests/libsvn_subr/opt-test.c +++ b/subversion/tests/libsvn_subr/opt-test.c @@ -193,7 +193,9 @@ test_svn_opt_args_to_target_array2(apr_pool_t *pool) /* The test table. */ -struct svn_test_descriptor_t test_funcs[] = +static int max_threads = 1; + +static struct svn_test_descriptor_t test_funcs[] = { SVN_TEST_NULL, SVN_TEST_PASS2(test_parse_peg_rev, @@ -202,3 +204,5 @@ struct svn_test_descriptor_t test_funcs[] = "test svn_opt_args_to_target_array2"), SVN_TEST_NULL }; + +SVN_TEST_MAIN diff --git a/subversion/tests/libsvn_subr/packed-data-test.c b/subversion/tests/libsvn_subr/packed-data-test.c new file mode 100644 index 0000000..d5d6a20 --- /dev/null +++ b/subversion/tests/libsvn_subr/packed-data-test.c @@ -0,0 +1,577 @@ +/* + * packed-data-test.c: a collection of svn_packed__* tests + * + * ==================================================================== + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + * ==================================================================== + */ + +/* ==================================================================== + To add tests, look toward the bottom of this file. + +*/ + + + +#include <stdio.h> +#include <string.h> +#include <apr_pools.h> + +#include "../svn_test.h" + +#include "svn_error.h" +#include "svn_string.h" /* This includes <apr_*.h> */ +#include "private/svn_packed_data.h" + +/* Take the WRITE_ROOT, serialize its contents, parse it again into a new + * data root and return it in *READ_ROOT. Allocate it in POOL. + */ +static svn_error_t* +get_read_root(svn_packed__data_root_t **read_root, + svn_packed__data_root_t *write_root, + apr_pool_t *pool) +{ + svn_stringbuf_t *stream_buffer = svn_stringbuf_create_empty(pool); + svn_stream_t *stream; + + stream = svn_stream_from_stringbuf(stream_buffer, pool); + SVN_ERR(svn_packed__data_write(stream, write_root, pool)); + SVN_ERR(svn_stream_close(stream)); + + stream = svn_stream_from_stringbuf(stream_buffer, pool); + SVN_ERR(svn_packed__data_read(read_root, stream, pool, pool)); + SVN_ERR(svn_stream_close(stream)); + + return SVN_NO_ERROR; +} + +static svn_error_t * +test_empty_container(apr_pool_t *pool) +{ + /* create an empty, readable container */ + svn_packed__data_root_t *root = svn_packed__data_create_root(pool); + SVN_ERR(get_read_root(&root, root, pool)); + + /* there should be no sub-streams */ + SVN_TEST_ASSERT(svn_packed__first_int_stream(root) == NULL); + SVN_TEST_ASSERT(svn_packed__first_byte_stream(root) == NULL); + + return SVN_NO_ERROR; +} + +/* Check that COUNT numbers from VALUES can be written as uints to a + * packed data stream and can be read from that stream again. Deltify + * data in the stream if DIFF is set. Use POOL for allocations. + */ +static svn_error_t * +verify_uint_stream(const apr_uint64_t *values, + apr_size_t count, + svn_boolean_t diff, + apr_pool_t *pool) +{ + svn_packed__data_root_t *root = svn_packed__data_create_root(pool); + svn_packed__int_stream_t *stream + = svn_packed__create_int_stream(root, diff, FALSE); + + apr_size_t i; + for (i = 0; i < count; ++i) + svn_packed__add_uint(stream, values[i]); + + SVN_ERR(get_read_root(&root, root, pool)); + + /* the container should contain exactly one int stream */ + stream = svn_packed__first_int_stream(root); + SVN_TEST_ASSERT(stream); + SVN_TEST_ASSERT(!svn_packed__next_int_stream(stream)); + SVN_TEST_ASSERT(!svn_packed__first_byte_stream(root)); + + /* the stream shall contain exactly the items we put into it */ + SVN_TEST_ASSERT(svn_packed__int_count(stream) == count); + for (i = 0; i < count; ++i) + SVN_TEST_ASSERT(svn_packed__get_uint(stream) == values[i]); + + /* reading beyond eos should return 0 values */ + SVN_TEST_ASSERT(svn_packed__get_uint(stream) == 0); + + return SVN_NO_ERROR; +} + +static svn_error_t * +test_uint_stream(apr_pool_t *pool) +{ + enum { COUNT = 8 }; + const apr_uint64_t values[COUNT] = + { + APR_UINT64_MAX, + 0, + APR_UINT64_MAX, + APR_UINT64_C(0x8000000000000000), + 0, + APR_UINT64_C(0x7fffffffffffffff), + APR_UINT64_C(0x1234567890abcdef), + APR_UINT64_C(0x0fedcba987654321), + }; + + SVN_ERR(verify_uint_stream(values, COUNT, FALSE, pool)); + SVN_ERR(verify_uint_stream(values, COUNT, TRUE, pool)); + + return SVN_NO_ERROR; +} + +/* Check that COUNT numbers from VALUES can be written as signed ints to a + * packed data stream and can be read from that stream again. Deltify + * data in the stream if DIFF is set. Use POOL for allocations. + */ +static svn_error_t * +verify_int_stream(const apr_int64_t *values, + apr_size_t count, + svn_boolean_t diff, + apr_pool_t *pool) +{ + svn_packed__data_root_t *root = svn_packed__data_create_root(pool); + svn_packed__int_stream_t *stream + = svn_packed__create_int_stream(root, diff, TRUE); + + apr_size_t i; + for (i = 0; i < count; ++i) + svn_packed__add_int(stream, values[i]); + + SVN_ERR(get_read_root(&root, root, pool)); + + /* the container should contain exactly one int stream */ + stream = svn_packed__first_int_stream(root); + SVN_TEST_ASSERT(stream); + SVN_TEST_ASSERT(!svn_packed__next_int_stream(stream)); + SVN_TEST_ASSERT(!svn_packed__first_byte_stream(root)); + + /* the stream shall contain exactly the items we put into it */ + SVN_TEST_ASSERT(svn_packed__int_count(stream) == count); + for (i = 0; i < count; ++i) + SVN_TEST_ASSERT(svn_packed__get_int(stream) == values[i]); + + /* reading beyond eos should return 0 values */ + SVN_TEST_ASSERT(svn_packed__get_int(stream) == 0); + + return SVN_NO_ERROR; +} + +static svn_error_t * +test_int_stream(apr_pool_t *pool) +{ + enum { COUNT = 7 }; + const apr_int64_t values[COUNT] = + { + APR_INT64_MAX, /* extreme value */ + APR_INT64_MIN, /* other extreme, creating maximum delta to predecessor */ + 0, /* delta to predecessor > APR_INT64_MAX */ + APR_INT64_MAX, /* max value, again */ + -APR_INT64_MAX, /* _almost_ min value, almost max delta */ + APR_INT64_C(0x1234567890abcdef), /* some arbitrary value */ + -APR_INT64_C(0x0fedcba987654321), /* arbitrary value, different sign */ + }; + + SVN_ERR(verify_int_stream(values, COUNT, FALSE, pool)); + SVN_ERR(verify_int_stream(values, COUNT, TRUE, pool)); + + return SVN_NO_ERROR; +} + +static svn_error_t * +test_byte_stream(apr_pool_t *pool) +{ + enum { COUNT = 6 }; + const svn_string_t values[COUNT] = + { + { "", 0 }, + { "\0", 1 }, + { "\0", 1 }, + { "some text", 9 }, + { "", 0 }, + { "some more", 9 } + }; + + svn_packed__data_root_t *root = svn_packed__data_create_root(pool); + svn_packed__byte_stream_t *stream + = svn_packed__create_bytes_stream(root); + + apr_size_t i; + for (i = 0; i < COUNT; ++i) + svn_packed__add_bytes(stream, values[i].data, values[i].len); + + SVN_ERR(get_read_root(&root, root, pool)); + + /* the container should contain exactly one byte stream */ + stream = svn_packed__first_byte_stream(root); + SVN_TEST_ASSERT(stream); + SVN_TEST_ASSERT(!svn_packed__next_byte_stream(stream)); + + /* the stream shall contain exactly the items we put into it */ + SVN_TEST_ASSERT(svn_packed__byte_count(stream) == 20); + for (i = 0; i < COUNT; ++i) + { + svn_string_t string; + string.data = svn_packed__get_bytes(stream, &string.len); + + SVN_TEST_ASSERT(string.len == values[i].len); + SVN_TEST_ASSERT(!memcmp(string.data, values[i].data, string.len)); + } + + /* reading beyond eos should return 0 values */ + SVN_TEST_ASSERT(svn_packed__byte_count(stream) == 0); + + return SVN_NO_ERROR; +} + +/* Some simple structure that we use as sub-structure to BASE_RECORD_T. + * Have it contain numbers and strings. + */ +typedef struct sub_record_t +{ + int sub_counter; + svn_string_t text; +} sub_record_t; + +/* signed / unsigned, 64 bits and shorter, diff-able and not, multiple + * strings, multiple sub-records. */ +typedef struct base_record_t +{ + int counter; + svn_string_t description; + apr_uint64_t large_unsigned1; + apr_uint64_t large_unsigned2; + const sub_record_t *left_subs; + apr_int64_t large_signed1; + apr_int64_t large_signed2; + unsigned prime; + const sub_record_t *right_subs; + svn_string_t binary; +} base_record_t; + +/* our test data */ +enum {SUB_RECORD_COUNT = 7}; +enum {BASE_RECORD_COUNT = 4}; + +static const sub_record_t sub_records[SUB_RECORD_COUNT] = +{ + { 6, { "this is quite a longish piece of text", 37} }, + { 5, { "x", 1} }, + { 4, { "not empty", 9} }, + { 3, { "another bit of text", 19} }, + { 2, { "", 0} }, + { 1, { "first sub-record", 16} }, + { 0 } +}; + +static const base_record_t test_data[BASE_RECORD_COUNT] = +{ + { 1, { "maximum", 7}, + APR_UINT64_MAX, APR_UINT64_MAX, sub_records, + APR_INT64_MAX, APR_INT64_MAX, 9967, sub_records + 1, + { "\0\1\2\3\4\5\6\7\x8\x9\xa", 11} }, + + { 2, { "minimum", 7}, + 0, 0, sub_records + 6, + APR_INT64_MIN, APR_INT64_MIN, 6029, sub_records + 5, + { "X\0\0Y", 4} }, + + { 3, { "mean", 4}, + APR_UINT64_C(0x8000000000000000), APR_UINT64_C(0x8000000000000000), + sub_records + 2, + 0, 0, 653, sub_records + 3, + { "\xff\0\1\2\3\4\5\6\7\x8\x9\xa", 12} }, + + { 4, { "random", 6}, + APR_UINT64_C(0x1234567890abcdef), APR_UINT64_C(0xfedcba987654321), + sub_records + 4, + APR_INT64_C(0x1234567890abcd), APR_INT64_C(-0xedcba987654321), 7309, + sub_records + 1, + { "\x80\x7f\0\1\6", 5} } +}; + +/* Serialize RECORDS into INT_STREAM and TEXT_STREAM. Stop when the + * current record's SUB_COUNTER is 0. + */ +static unsigned +pack_subs(svn_packed__int_stream_t *int_stream, + svn_packed__byte_stream_t *text_stream, + const sub_record_t *records) +{ + unsigned count; + for (count = 0; records[count].sub_counter; ++count) + { + svn_packed__add_int(int_stream, records[count].sub_counter); + svn_packed__add_bytes(text_stream, + records[count].text.data, + records[count].text.len); + } + + return count; +} + +/* Serialize COUNT records starting from DATA into a packed data container + * allocated in POOL and return the container root. + */ +static svn_packed__data_root_t * +pack(const base_record_t *data, + apr_size_t count, + apr_pool_t *pool) +{ + apr_size_t i; + svn_packed__data_root_t *root = svn_packed__data_create_root(pool); + svn_packed__int_stream_t *base_stream + = svn_packed__create_int_stream(root, FALSE, FALSE); + svn_packed__int_stream_t *sub_count_stream + = svn_packed__create_int_stream(root, TRUE, FALSE); + + svn_packed__int_stream_t *left_sub_stream + = svn_packed__create_int_stream(root, FALSE, TRUE); + svn_packed__int_stream_t *right_sub_stream + = svn_packed__create_int_stream(root, FALSE, TRUE); + + svn_packed__byte_stream_t *base_description_stream + = svn_packed__create_bytes_stream(root); + svn_packed__byte_stream_t *base_binary_stream + = svn_packed__create_bytes_stream(root); + svn_packed__byte_stream_t *sub_text_stream + = svn_packed__create_bytes_stream(root); + + svn_packed__create_int_substream(base_stream, TRUE, TRUE); /* counter */ + svn_packed__create_int_substream(base_stream, TRUE, FALSE); /* large_unsigned1 */ + svn_packed__create_int_substream(base_stream, FALSE, FALSE); /* large_unsigned2 */ + svn_packed__create_int_substream(base_stream, TRUE, TRUE); /* large_signed1 */ + svn_packed__create_int_substream(base_stream, FALSE, TRUE); /* large_signed2 */ + svn_packed__create_int_substream(base_stream, TRUE, FALSE); /* prime */ + + for (i = 0; i < count; ++i) + { + svn_packed__add_int(base_stream, data[i].counter); + svn_packed__add_bytes(base_description_stream, + data[i].description.data, + data[i].description.len); + svn_packed__add_uint(base_stream, data[i].large_unsigned1); + svn_packed__add_uint(base_stream, data[i].large_unsigned2); + svn_packed__add_uint(sub_count_stream, + pack_subs(left_sub_stream, sub_text_stream, + data[i].left_subs)); + + svn_packed__add_int(base_stream, data[i].large_signed1); + svn_packed__add_int(base_stream, data[i].large_signed2); + svn_packed__add_uint(base_stream, data[i].prime); + svn_packed__add_uint(sub_count_stream, + pack_subs(right_sub_stream, sub_text_stream, + data[i].right_subs)); + + svn_packed__add_bytes(base_binary_stream, + data[i].binary.data, + data[i].binary.len); + } + + return root; +} + +/* Deserialize COUNT records from INT_STREAM and TEXT_STREAM and return + * the result allocated in POOL. + */ +static sub_record_t * +unpack_subs(svn_packed__int_stream_t *int_stream, + svn_packed__byte_stream_t *text_stream, + apr_size_t count, + apr_pool_t *pool) +{ + sub_record_t *records = apr_pcalloc(pool, (count + 1) * sizeof(*records)); + + apr_size_t i; + for (i = 0; i < count; ++i) + { + records[i].sub_counter = (int) svn_packed__get_int(int_stream); + records[i].text.data = svn_packed__get_bytes(text_stream, + &records[i].text.len); + } + + return records; +} + +/* Deserialize all records from the packed data container ROOT, allocate + * them in POOL and return them. Set *COUNT to the number of records read. + */ +static base_record_t * +unpack(apr_size_t *count, + svn_packed__data_root_t *root, + apr_pool_t *pool) +{ + svn_packed__int_stream_t *base_stream + = svn_packed__first_int_stream(root); + svn_packed__int_stream_t *sub_count_stream + = svn_packed__next_int_stream(base_stream); + svn_packed__byte_stream_t *base_description_stream + = svn_packed__first_byte_stream(root); + svn_packed__byte_stream_t *base_binary_stream + = svn_packed__next_byte_stream(base_description_stream); + svn_packed__byte_stream_t *sub_text_stream + = svn_packed__next_byte_stream(base_binary_stream); + + svn_packed__int_stream_t *left_sub_stream + = svn_packed__next_int_stream(sub_count_stream); + svn_packed__int_stream_t *right_sub_stream + = svn_packed__next_int_stream(left_sub_stream); + + apr_size_t i; + base_record_t *data; + *count = svn_packed__int_count(sub_count_stream) / 2; + data = apr_pcalloc(pool, *count * sizeof(*data)); + + for (i = 0; i < *count; ++i) + { + data[i].counter = (int) svn_packed__get_int(base_stream); + data[i].description.data + = svn_packed__get_bytes(base_description_stream, + &data[i].description.len); + data[i].large_unsigned1 = svn_packed__get_uint(base_stream); + data[i].large_unsigned2 = svn_packed__get_uint(base_stream); + data[i].left_subs = unpack_subs(left_sub_stream, sub_text_stream, + (apr_size_t)svn_packed__get_uint(sub_count_stream), + pool); + + data[i].large_signed1 = svn_packed__get_int(base_stream); + data[i].large_signed2 = svn_packed__get_int(base_stream); + data[i].prime = (unsigned) svn_packed__get_uint(base_stream); + data[i].right_subs = unpack_subs(right_sub_stream, sub_text_stream, + (apr_size_t)svn_packed__get_uint(sub_count_stream), + pool); + + data[i].binary.data + = svn_packed__get_bytes(base_binary_stream, + &data[i].binary.len); + } + + return data; +} + +/* Assert that LHS and RHS contain the same binary data (i.e. don't test + * for a terminating NUL). + */ +static svn_error_t * +compare_binary(const svn_string_t *lhs, + const svn_string_t *rhs) +{ + SVN_TEST_ASSERT(lhs->len == rhs->len); + SVN_TEST_ASSERT(!memcmp(lhs->data, rhs->data, rhs->len)); + + return SVN_NO_ERROR; +} + +/* Assert that LHS and RHS contain the same number of records with the + * same contents. + */ +static svn_error_t * +compare_subs(const sub_record_t *lhs, + const sub_record_t *rhs) +{ + for (; lhs->sub_counter; ++lhs, ++rhs) + { + SVN_TEST_ASSERT(lhs->sub_counter == rhs->sub_counter); + SVN_ERR(compare_binary(&lhs->text, &rhs->text)); + } + + SVN_TEST_ASSERT(lhs->sub_counter == rhs->sub_counter); + return SVN_NO_ERROR; +} + +/* Assert that the first COUNT records in LHS and RHS have the same contents. + */ +static svn_error_t * +compare(const base_record_t *lhs, + const base_record_t *rhs, + apr_size_t count) +{ + apr_size_t i; + for (i = 0; i < count; ++i) + { + SVN_TEST_ASSERT(lhs[i].counter == rhs[i].counter); + SVN_ERR(compare_binary(&lhs[i].description, &rhs[i].description)); + SVN_TEST_ASSERT(lhs[i].large_unsigned1 == rhs[i].large_unsigned1); + SVN_TEST_ASSERT(lhs[i].large_unsigned2 == rhs[i].large_unsigned2); + SVN_ERR(compare_subs(lhs[i].left_subs, rhs[i].left_subs)); + SVN_TEST_ASSERT(lhs[i].counter == rhs[i].counter); + SVN_TEST_ASSERT(lhs[i].large_signed1 == rhs[i].large_signed1); + SVN_TEST_ASSERT(lhs[i].large_signed2 == rhs[i].large_signed2); + SVN_TEST_ASSERT(lhs[i].prime == rhs[i].prime); + SVN_ERR(compare_subs(lhs[i].right_subs, rhs[i].right_subs)); + SVN_ERR(compare_binary(&lhs[i].binary, &rhs[i].binary)); + } + + return SVN_NO_ERROR; +} + +static svn_error_t * +test_empty_structure(apr_pool_t *pool) +{ + base_record_t *unpacked; + apr_size_t count; + + /* create an empty, readable container */ + svn_packed__data_root_t *root = pack(test_data, 0, pool); + + SVN_ERR(get_read_root(&root, root, pool)); + unpacked = unpack(&count, root, pool); + SVN_TEST_ASSERT(count == 0); + SVN_ERR(compare(unpacked, test_data, count)); + + return SVN_NO_ERROR; +} + +static svn_error_t * +test_full_structure(apr_pool_t *pool) +{ + base_record_t *unpacked; + apr_size_t count; + + /* create an empty, readable container */ + svn_packed__data_root_t *root = pack(test_data, BASE_RECORD_COUNT, pool); + + SVN_ERR(get_read_root(&root, root, pool)); + unpacked = unpack(&count, root, pool); + SVN_TEST_ASSERT(count == BASE_RECORD_COUNT); + SVN_ERR(compare(unpacked, test_data, count)); + + return SVN_NO_ERROR; +} + +/* An array of all test functions */ + +static int max_threads = 1; + +static struct svn_test_descriptor_t test_funcs[] = + { + SVN_TEST_NULL, + SVN_TEST_PASS2(test_empty_container, + "test empty container"), + SVN_TEST_PASS2(test_uint_stream, + "test a single uint stream"), + SVN_TEST_PASS2(test_int_stream, + "test a single int stream"), + SVN_TEST_PASS2(test_byte_stream, + "test a single bytes stream"), + SVN_TEST_PASS2(test_empty_structure, + "test empty, nested structure"), + SVN_TEST_PASS2(test_full_structure, + "test nested structure"), + SVN_TEST_NULL + }; + +SVN_TEST_MAIN diff --git a/subversion/tests/libsvn_subr/path-test.c b/subversion/tests/libsvn_subr/path-test.c index ec35176..6f0a996 100644 --- a/subversion/tests/libsvn_subr/path-test.c +++ b/subversion/tests/libsvn_subr/path-test.c @@ -315,9 +315,9 @@ test_uri_decode(apr_pool_t *pool) const char *path; const char *result; } tests[] = { - { "http://c.r.a/s%\0008me", + { "http://c.r.a/s%\0" "8me", "http://c.r.a/s%"}, - { "http://c.r.a/s%6\000me", + { "http://c.r.a/s%6\0" "me", "http://c.r.a/s%6" }, { "http://c.r.a/s%68me", "http://c.r.a/shme" }, @@ -489,7 +489,7 @@ test_path_join(apr_pool_t *pool) if (svn_path_is_url(base)) continue; - result = svn_path_join_many(pool, base, comp, NULL); + result = svn_path_join_many(pool, base, comp, SVN_VA_NULL); if (strcmp(result, expect)) return svn_error_createf(SVN_ERR_TEST_FAILED, NULL, "svn_path_join_many(\"%s\", \"%s\") returned " @@ -505,74 +505,74 @@ test_path_join(apr_pool_t *pool) "expected \"%s\"", \ result, expect); - TEST_MANY((pool, "abc", NULL), "abc"); - TEST_MANY((pool, "/abc", NULL), "/abc"); - TEST_MANY((pool, "/", NULL), "/"); - - TEST_MANY((pool, "abc", "def", "ghi", NULL), "abc/def/ghi"); - TEST_MANY((pool, "abc", "/def", "ghi", NULL), "/def/ghi"); - TEST_MANY((pool, "/abc", "def", "ghi", NULL), "/abc/def/ghi"); - TEST_MANY((pool, "abc", "def", "/ghi", NULL), "/ghi"); - TEST_MANY((pool, "/", "def", "/ghi", NULL), "/ghi"); - TEST_MANY((pool, "/", "/def", "/ghi", NULL), "/ghi"); - - TEST_MANY((pool, SVN_EMPTY_PATH, "def", "ghi", NULL), "def/ghi"); - TEST_MANY((pool, "abc", SVN_EMPTY_PATH, "ghi", NULL), "abc/ghi"); - TEST_MANY((pool, "abc", "def", SVN_EMPTY_PATH, NULL), "abc/def"); - TEST_MANY((pool, SVN_EMPTY_PATH, "def", SVN_EMPTY_PATH, NULL), "def"); - TEST_MANY((pool, SVN_EMPTY_PATH, SVN_EMPTY_PATH, "ghi", NULL), "ghi"); - TEST_MANY((pool, "abc", SVN_EMPTY_PATH, SVN_EMPTY_PATH, NULL), "abc"); - TEST_MANY((pool, SVN_EMPTY_PATH, "def", "/ghi", NULL), "/ghi"); - TEST_MANY((pool, SVN_EMPTY_PATH, SVN_EMPTY_PATH, "/ghi", NULL), "/ghi"); - - TEST_MANY((pool, "/", "def", "ghi", NULL), "/def/ghi"); - TEST_MANY((pool, "abc", "/", "ghi", NULL), "/ghi"); - TEST_MANY((pool, "abc", "def", "/", NULL), "/"); - TEST_MANY((pool, "/", "/", "ghi", NULL), "/ghi"); - TEST_MANY((pool, "/", "/", "/", NULL), "/"); - TEST_MANY((pool, "/", SVN_EMPTY_PATH, "ghi", NULL), "/ghi"); - TEST_MANY((pool, "/", "def", SVN_EMPTY_PATH, NULL), "/def"); - TEST_MANY((pool, SVN_EMPTY_PATH, "/", "ghi", NULL), "/ghi"); - TEST_MANY((pool, "/", SVN_EMPTY_PATH, SVN_EMPTY_PATH, NULL), "/"); - TEST_MANY((pool, SVN_EMPTY_PATH, "/", SVN_EMPTY_PATH, NULL), "/"); - TEST_MANY((pool, SVN_EMPTY_PATH, SVN_EMPTY_PATH, "/", NULL), "/"); + TEST_MANY((pool, "abc", SVN_VA_NULL), "abc"); + TEST_MANY((pool, "/abc", SVN_VA_NULL), "/abc"); + TEST_MANY((pool, "/", SVN_VA_NULL), "/"); + + TEST_MANY((pool, "abc", "def", "ghi", SVN_VA_NULL), "abc/def/ghi"); + TEST_MANY((pool, "abc", "/def", "ghi", SVN_VA_NULL), "/def/ghi"); + TEST_MANY((pool, "/abc", "def", "ghi", SVN_VA_NULL), "/abc/def/ghi"); + TEST_MANY((pool, "abc", "def", "/ghi", SVN_VA_NULL), "/ghi"); + TEST_MANY((pool, "/", "def", "/ghi", SVN_VA_NULL), "/ghi"); + TEST_MANY((pool, "/", "/def", "/ghi", SVN_VA_NULL), "/ghi"); + + TEST_MANY((pool, SVN_EMPTY_PATH, "def", "ghi", SVN_VA_NULL), "def/ghi"); + TEST_MANY((pool, "abc", SVN_EMPTY_PATH, "ghi", SVN_VA_NULL), "abc/ghi"); + TEST_MANY((pool, "abc", "def", SVN_EMPTY_PATH, SVN_VA_NULL), "abc/def"); + TEST_MANY((pool, SVN_EMPTY_PATH, "def", SVN_EMPTY_PATH, SVN_VA_NULL), "def"); + TEST_MANY((pool, SVN_EMPTY_PATH, SVN_EMPTY_PATH, "ghi", SVN_VA_NULL), "ghi"); + TEST_MANY((pool, "abc", SVN_EMPTY_PATH, SVN_EMPTY_PATH, SVN_VA_NULL), "abc"); + TEST_MANY((pool, SVN_EMPTY_PATH, "def", "/ghi", SVN_VA_NULL), "/ghi"); + TEST_MANY((pool, SVN_EMPTY_PATH, SVN_EMPTY_PATH, "/ghi", SVN_VA_NULL), "/ghi"); + + TEST_MANY((pool, "/", "def", "ghi", SVN_VA_NULL), "/def/ghi"); + TEST_MANY((pool, "abc", "/", "ghi", SVN_VA_NULL), "/ghi"); + TEST_MANY((pool, "abc", "def", "/", SVN_VA_NULL), "/"); + TEST_MANY((pool, "/", "/", "ghi", SVN_VA_NULL), "/ghi"); + TEST_MANY((pool, "/", "/", "/", SVN_VA_NULL), "/"); + TEST_MANY((pool, "/", SVN_EMPTY_PATH, "ghi", SVN_VA_NULL), "/ghi"); + TEST_MANY((pool, "/", "def", SVN_EMPTY_PATH, SVN_VA_NULL), "/def"); + TEST_MANY((pool, SVN_EMPTY_PATH, "/", "ghi", SVN_VA_NULL), "/ghi"); + TEST_MANY((pool, "/", SVN_EMPTY_PATH, SVN_EMPTY_PATH, SVN_VA_NULL), "/"); + TEST_MANY((pool, SVN_EMPTY_PATH, "/", SVN_EMPTY_PATH, SVN_VA_NULL), "/"); + TEST_MANY((pool, SVN_EMPTY_PATH, SVN_EMPTY_PATH, "/", SVN_VA_NULL), "/"); #ifdef SVN_USE_DOS_PATHS /* These will fail, see issue #2028 - TEST_MANY((pool, "X:", "def", "ghi", NULL), "X:def/ghi"); - TEST_MANY((pool, "X:", SVN_EMPTY_PATH, "ghi", NULL), "X:ghi"); - TEST_MANY((pool, "X:", "def", SVN_EMPTY_PATH, NULL), "X:def"); - TEST_MANY((pool, SVN_EMPTY_PATH, "X:", "ghi", NULL), "X:ghi"); - TEST_MANY((pool, "X:/", "def", "ghi", NULL), "X:/def/ghi"); - TEST_MANY((pool, "abc", "X:/", "ghi", NULL), "X:/ghi"); - TEST_MANY((pool, "abc", "def", "X:/", NULL), "X:/"); - TEST_MANY((pool, "X:/", "X:/", "ghi", NULL), "X:/ghi"); - TEST_MANY((pool, "X:/", "X:/", "/", NULL), "/"); - TEST_MANY((pool, "X:/", SVN_EMPTY_PATH, "ghi", NULL), "X:/ghi"); - TEST_MANY((pool, "X:/", "def", SVN_EMPTY_PATH, NULL), "X:/def"); - TEST_MANY((pool, SVN_EMPTY_PATH, "X:/", "ghi", NULL), "X:/ghi"); - TEST_MANY((pool, "X:/", SVN_EMPTY_PATH, SVN_EMPTY_PATH, NULL), "X:/"); - TEST_MANY((pool, SVN_EMPTY_PATH, "X:/", SVN_EMPTY_PATH, NULL), "X:/"); - TEST_MANY((pool, SVN_EMPTY_PATH, SVN_EMPTY_PATH, "X:/", NULL), "X:/"); - TEST_MANY((pool, "X:", "X:/", "ghi", NULL), "X:/ghi"); - TEST_MANY((pool, "X:", "X:/", "/", NULL), "/"); - - TEST_MANY((pool, "//srv/shr", "def", "ghi", NULL), "//srv/shr/def/ghi"); - TEST_MANY((pool, "//srv", "shr", "def", "ghi", NULL), "//srv/shr/def/ghi"); - TEST_MANY((pool, "//srv/shr/fld", "def", "ghi", NULL), + TEST_MANY((pool, "X:", "def", "ghi", SVN_VA_NULL), "X:def/ghi"); + TEST_MANY((pool, "X:", SVN_EMPTY_PATH, "ghi", SVN_VA_NULL), "X:ghi"); + TEST_MANY((pool, "X:", "def", SVN_EMPTY_PATH, SVN_VA_NULL), "X:def"); + TEST_MANY((pool, SVN_EMPTY_PATH, "X:", "ghi", SVN_VA_NULL), "X:ghi"); + TEST_MANY((pool, "X:/", "def", "ghi", SVN_VA_NULL), "X:/def/ghi"); + TEST_MANY((pool, "abc", "X:/", "ghi", SVN_VA_NULL), "X:/ghi"); + TEST_MANY((pool, "abc", "def", "X:/", SVN_VA_NULL), "X:/"); + TEST_MANY((pool, "X:/", "X:/", "ghi", SVN_VA_NULL), "X:/ghi"); + TEST_MANY((pool, "X:/", "X:/", "/", SVN_VA_NULL), "/"); + TEST_MANY((pool, "X:/", SVN_EMPTY_PATH, "ghi", SVN_VA_NULL), "X:/ghi"); + TEST_MANY((pool, "X:/", "def", SVN_EMPTY_PATH, SVN_VA_NULL), "X:/def"); + TEST_MANY((pool, SVN_EMPTY_PATH, "X:/", "ghi", SVN_VA_NULL), "X:/ghi"); + TEST_MANY((pool, "X:/", SVN_EMPTY_PATH, SVN_EMPTY_PATH, SVN_VA_NULL), "X:/"); + TEST_MANY((pool, SVN_EMPTY_PATH, "X:/", SVN_EMPTY_PATH, SVN_VA_NULL), "X:/"); + TEST_MANY((pool, SVN_EMPTY_PATH, SVN_EMPTY_PATH, "X:/", SVN_VA_NULL), "X:/"); + TEST_MANY((pool, "X:", "X:/", "ghi", SVN_VA_NULL), "X:/ghi"); + TEST_MANY((pool, "X:", "X:/", "/", SVN_VA_NULL), "/"); + + TEST_MANY((pool, "//srv/shr", "def", "ghi", SVN_VA_NULL), "//srv/shr/def/ghi"); + TEST_MANY((pool, "//srv", "shr", "def", "ghi", SVN_VA_NULL), "//srv/shr/def/ghi"); + TEST_MANY((pool, "//srv/shr/fld", "def", "ghi", SVN_VA_NULL), "//srv/shr/fld/def/ghi"); - TEST_MANY((pool, "//srv/shr/fld", "def", "//srv/shr", NULL), "//srv/shr"); - TEST_MANY((pool, "//srv", "shr", "//srv/shr", NULL), "//srv/shr"); - TEST_MANY((pool, SVN_EMPTY_PATH, "//srv/shr/fld", "def", "ghi", NULL), + TEST_MANY((pool, "//srv/shr/fld", "def", "//srv/shr", SVN_VA_NULL), "//srv/shr"); + TEST_MANY((pool, "//srv", "shr", "//srv/shr", SVN_VA_NULL), "//srv/shr"); + TEST_MANY((pool, SVN_EMPTY_PATH, "//srv/shr/fld", "def", "ghi", SVN_VA_NULL), "//srv/shr/fld/def/ghi"); - TEST_MANY((pool, SVN_EMPTY_PATH, "//srv/shr/fld", "def", "//srv/shr", NULL), + TEST_MANY((pool, SVN_EMPTY_PATH, "//srv/shr/fld", "def", "//srv/shr", SVN_VA_NULL), "//srv/shr"); */ #else /* WIN32 or Cygwin */ - TEST_MANY((pool, "X:", "def", "ghi", NULL), "X:/def/ghi"); - TEST_MANY((pool, "X:", SVN_EMPTY_PATH, "ghi", NULL), "X:/ghi"); - TEST_MANY((pool, "X:", "def", SVN_EMPTY_PATH, NULL), "X:/def"); - TEST_MANY((pool, SVN_EMPTY_PATH, "X:", "ghi", NULL), "X:/ghi"); + TEST_MANY((pool, "X:", "def", "ghi", SVN_VA_NULL), "X:/def/ghi"); + TEST_MANY((pool, "X:", SVN_EMPTY_PATH, "ghi", SVN_VA_NULL), "X:/ghi"); + TEST_MANY((pool, "X:", "def", SVN_EMPTY_PATH, SVN_VA_NULL), "X:/def"); + TEST_MANY((pool, SVN_EMPTY_PATH, "X:", "ghi", SVN_VA_NULL), "X:/ghi"); #endif /* non-WIN32 */ /* ### probably need quite a few more tests... */ @@ -1210,6 +1210,7 @@ test_path_splitext(apr_pool_t *pool) { "yep.still/no-ext", "yep.still/no-ext", "" }, { "folder.with/period.log", "folder.with/period.", "log" }, { "period.", "period.", "" }, + { "dir/period.", "dir/period.", "" }, { "file.ends-with/period.", "file.ends-with/period.", "" }, { "two-periods..txt", "two-periods..", "txt" }, { ".dot-file", ".dot-file", "" }, @@ -1527,7 +1528,7 @@ condense_targets_tests_helper(const char* title, /* Verify the common part with the expected (prefix with cwd). */ if (*exp_common == '%') - exp_common_abs = apr_pstrcat(pool, curdir, exp_common + 1, (char *)NULL); + exp_common_abs = apr_pstrcat(pool, curdir, exp_common + 1, SVN_VA_NULL); if (strcmp(common_path, exp_common_abs) != 0) { @@ -1544,7 +1545,7 @@ condense_targets_tests_helper(const char* title, { const char * target = APR_ARRAY_IDX(condensed_targets, i, const char*); if (token && (*token == '%')) - token = apr_pstrcat(pool, curdir, token + 1, (char *)NULL); + token = apr_pstrcat(pool, curdir, token + 1, SVN_VA_NULL); if (! token || (target && (strcmp(target, token) != 0))) { @@ -1700,7 +1701,9 @@ test_path_resolve_repos_relative_url(apr_pool_t *pool) /* The test table. */ -struct svn_test_descriptor_t test_funcs[] = +static int max_threads = 1; + +static struct svn_test_descriptor_t test_funcs[] = { SVN_TEST_NULL, SVN_TEST_PASS2(test_path_is_child, @@ -1759,3 +1762,5 @@ struct svn_test_descriptor_t test_funcs[] = "test svn_path_resolve_repos_relative_url"), SVN_TEST_NULL }; + +SVN_TEST_MAIN diff --git a/subversion/tests/libsvn_subr/prefix-string-test.c b/subversion/tests/libsvn_subr/prefix-string-test.c new file mode 100644 index 0000000..e420cff --- /dev/null +++ b/subversion/tests/libsvn_subr/prefix-string-test.c @@ -0,0 +1,154 @@ +/* + * prefix-string-test.c: a collection of svn_prefix_string__* tests + * + * ==================================================================== + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + * ==================================================================== + */ + +/* ==================================================================== + To add tests, look toward the bottom of this file. + +*/ + + + +#include <stdio.h> +#include <string.h> +#include <apr_pools.h> + +#include "../svn_test.h" + +#include "svn_error.h" +#include "svn_string.h" /* This includes <apr_*.h> */ +#include "private/svn_string_private.h" + +static svn_error_t * +test_empty_string(apr_pool_t *pool) +{ + svn_prefix_tree__t *tree = svn_prefix_tree__create(pool); + svn_prefix_string__t *empty = svn_prefix_string__create(tree, ""); + + /* same instance for all strings of the same value */ + SVN_TEST_ASSERT(empty == svn_prefix_string__create(tree, "")); + + /* does it actually have the right contents? */ + SVN_TEST_ASSERT(svn_prefix_string__expand(empty, pool)->len == 0); + SVN_TEST_STRING_ASSERT(svn_prefix_string__expand(empty, pool)->data, ""); + + /* strings shall be equal to themselves */ + SVN_TEST_ASSERT(0 == svn_prefix_string__compare(empty, empty)); + + return SVN_NO_ERROR; +} + +enum {TEST_CASE_COUNT = 9}; + +static const char *test_cases[TEST_CASE_COUNT] = +{ + "a longish string of sorts, longer than 7 anyway", + "some other string", + "more stuff on root", + "some shorter string", + "some short string", + "some short str", + "some short str2", + "a longish string of sorts, longer than ?! anyway", + "a" +}; + +static svn_error_t * +test_string_creation(apr_pool_t *pool) +{ + svn_prefix_tree__t *tree = svn_prefix_tree__create(pool); + svn_prefix_string__t *strings[TEST_CASE_COUNT]; + int i; + + /* create strings and remember their initial references */ + for (i = 0; i < TEST_CASE_COUNT; ++i) + strings[i] = svn_prefix_string__create(tree, test_cases[i]); + + /* doing this again must yield the same pointers */ + for (i = 0; i < TEST_CASE_COUNT; ++i) + SVN_TEST_ASSERT(strings[i] + == svn_prefix_string__create(tree, test_cases[i])); + + /* converting them back to strings must be the initial values */ + for (i = 0; i < TEST_CASE_COUNT; ++i) + { + svn_string_t *expanded = svn_prefix_string__expand(strings[i], pool); + + SVN_TEST_ASSERT(expanded->len == strlen(test_cases[i])); + SVN_TEST_STRING_ASSERT(expanded->data, test_cases[i]); + + } + + return SVN_NO_ERROR; +} + +static svn_error_t * +test_string_comparison(apr_pool_t *pool) +{ + svn_prefix_tree__t *tree = svn_prefix_tree__create(pool); + svn_prefix_string__t *strings[TEST_CASE_COUNT]; + int i, k; + + /* create strings */ + for (i = 0; i < TEST_CASE_COUNT; ++i) + strings[i] = svn_prefix_string__create(tree, test_cases[i]); + + /* comparing them with themselves */ + for (i = 0; i < TEST_CASE_COUNT; ++i) + SVN_TEST_ASSERT(! svn_prefix_string__compare(strings[i], strings[i])); + + /* compare with all other strings */ + for (i = 0; i < TEST_CASE_COUNT; ++i) + { + svn_string_t *lhs = svn_prefix_string__expand(strings[i], pool); + for (k = 0; k < TEST_CASE_COUNT; ++k) + { + svn_string_t *rhs = svn_prefix_string__expand(strings[k], pool); + int expected_diff = strcmp(lhs->data, rhs->data); + int actual_diff = svn_prefix_string__compare(strings[i], strings[k]); + + SVN_TEST_ASSERT((actual_diff < 0) == (expected_diff < 0)); + SVN_TEST_ASSERT((actual_diff > 0) == (expected_diff > 0)); + SVN_TEST_ASSERT(!actual_diff == !expected_diff); + } + } + + return SVN_NO_ERROR; +} + +/* An array of all test functions */ + +static int max_threads = 1; + +static struct svn_test_descriptor_t test_funcs[] = + { + SVN_TEST_NULL, + SVN_TEST_PASS2(test_empty_string, + "check empty strings"), + SVN_TEST_PASS2(test_string_creation, + "create many strings"), + SVN_TEST_PASS2(test_string_comparison, + "compare strings"), + SVN_TEST_NULL + }; + +SVN_TEST_MAIN diff --git a/subversion/tests/libsvn_subr/priority-queue-test.c b/subversion/tests/libsvn_subr/priority-queue-test.c new file mode 100644 index 0000000..bd2d991 --- /dev/null +++ b/subversion/tests/libsvn_subr/priority-queue-test.c @@ -0,0 +1,240 @@ +/* + * priority-queue-test.c: a collection of svn_priority_queue__* tests + * + * ==================================================================== + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + * ==================================================================== + */ + +/* ==================================================================== + To add tests, look toward the bottom of this file. + +*/ + + + +#include <stdio.h> +#include <string.h> +#include <apr_pools.h> + +#include "../svn_test.h" + +#include "svn_error.h" +#include "private/svn_sorts_private.h" + +/* priority queue test: + * items in the queue are simple integers, in ascending order */ + +/* number of items to put into the queue */ +enum {NUMBER_COUNT = 11}; + +/* the actual values in the order we add them to the queue */ +static const int numbers[NUMBER_COUNT] + = { 8395, 0, -1, 3885, 1, -435, 99993, 10, 0, 1, 8395 }; + +/* test_update will modify in-queue data and expects the queue to return + the values in the following order: */ +static const int expected_modified[NUMBER_COUNT] + = { -431, 0, 1, 3, 5, 10, 16, 3889, 8395, 8403, 99997 }; + +/* standard compare function for integers */ +static int +compare_func(const void *lhs, const void *rhs) +{ + return *(const int *)lhs - *(const int *)rhs; +} + +/* Check that QUEUE is empty and the usual operations still work */ +static svn_error_t * +verify_empty_queue(svn_priority_queue__t *queue) +{ + /* it's an empty queue */ + SVN_TEST_ASSERT(svn_priority_queue__size(queue) == 0); + SVN_TEST_ASSERT(svn_priority_queue__peek(queue) == NULL); + + /* these should be no-ops */ + svn_priority_queue__update(queue); + svn_priority_queue__pop(queue); + + return SVN_NO_ERROR; +} + +/* check that the tip of QUEUE equals EXPECTED and remove the first element */ +static svn_error_t * +extract_expected(svn_priority_queue__t *queue, int expected) +{ + int value = *(int *)svn_priority_queue__peek(queue); + SVN_TEST_ASSERT(value == expected); + svn_priority_queue__pop(queue); + + return SVN_NO_ERROR; +} + +/* Verify that QUEUE returns all elements in the proper order. + Also check that data can be added & removed without disturbing the order. + */ +static svn_error_t * +verify_queue_order(svn_priority_queue__t *queue) +{ + int sorted[NUMBER_COUNT]; + int i; + + /* reference order */ + memcpy(sorted, numbers, sizeof(numbers)); + qsort(sorted, NUMBER_COUNT, sizeof(sorted[0]), compare_func); + + /* verify that the queue returns the data in the same order */ + for (i = 0; i < NUMBER_COUNT; ++i) + { + int item = *(int *)svn_priority_queue__peek(queue); + int to_insert; + + /* is this the value we expected? */ + SVN_TEST_ASSERT(item == sorted[i]); + + /* add two items at the tip of the queue */ + to_insert = item - 1; + svn_priority_queue__push(queue, &to_insert); + svn_priority_queue__push(queue, &item); + + /* check queue length */ + SVN_TEST_ASSERT(svn_priority_queue__size(queue) == NUMBER_COUNT-i+2); + + /* now, lets extract all 3 of them */ + SVN_ERR(extract_expected(queue, item-1)); + SVN_ERR(extract_expected(queue, item)); + SVN_ERR(extract_expected(queue, item)); + + /* check queue length */ + SVN_TEST_ASSERT(svn_priority_queue__size(queue) == NUMBER_COUNT-i-1); + } + + /* the queue should now be empty */ + verify_empty_queue(queue); + + return SVN_NO_ERROR; +} + +/* return a queue allocated in POOL containing all items of NUMBERS */ +static svn_priority_queue__t * +create_standard_queue(apr_pool_t *pool) +{ + apr_array_header_t *elements + = apr_array_make(pool, 11, sizeof(numbers[0])); + + /* build queue */ + int i; + for (i = 0; i < NUMBER_COUNT; ++i) + APR_ARRAY_PUSH(elements, int) = numbers[i]; + + return svn_priority_queue__create(elements, compare_func); +} + + +static svn_error_t * +test_empty_queue(apr_pool_t *pool) +{ + apr_array_header_t *elements + = apr_array_make(pool, 0, sizeof(int)); + svn_priority_queue__t *queue + = svn_priority_queue__create(elements, compare_func); + + verify_empty_queue(queue); + + return SVN_NO_ERROR; +} + +static svn_error_t * +test_sort_queue(apr_pool_t *pool) +{ + svn_priority_queue__t *queue = create_standard_queue(pool); + + /* data should come out of the queue in sorted order */ + SVN_ERR(verify_queue_order(queue)); + + return SVN_NO_ERROR; +} + +static svn_error_t * +test_push(apr_pool_t *pool) +{ + apr_array_header_t *elements + = apr_array_make(pool, 3, sizeof(int)); + svn_priority_queue__t *queue + = svn_priority_queue__create(elements, compare_func); + + /* build queue */ + int i; + for (i = 0; i < NUMBER_COUNT; ++i) + svn_priority_queue__push(queue, &numbers[i]); + + /* data should come out of the queue in sorted order */ + SVN_ERR(verify_queue_order(queue)); + + return SVN_NO_ERROR; +} + +static svn_error_t * +test_update(apr_pool_t *pool) +{ + svn_priority_queue__t *queue = create_standard_queue(pool); + + /* modify all items in the queue */ + int i; + for (i = 0; i < NUMBER_COUNT; ++i) + { + int *tip = svn_priority_queue__peek(queue); + *tip += 4; + svn_priority_queue__update(queue); + + /* extract and verify tip */ + SVN_TEST_ASSERT(*(int *)svn_priority_queue__peek(queue) + == expected_modified[i]); + svn_priority_queue__pop(queue); + + /* this should be a no-op now */ + svn_priority_queue__update(queue); + + SVN_TEST_ASSERT(svn_priority_queue__size(queue) == NUMBER_COUNT-i-1); + } + + /* the queue should now be empty */ + verify_empty_queue(queue); + + return SVN_NO_ERROR; +} + +/* An array of all test functions */ + +static int max_threads = 1; + +static struct svn_test_descriptor_t test_funcs[] = + { + SVN_TEST_NULL, + SVN_TEST_PASS2(test_empty_queue, + "test empty queue"), + SVN_TEST_PASS2(test_sort_queue, + "data returned by a priority queue shall be ordered"), + SVN_TEST_PASS2(test_push, + "priority queues can be built up incrementally"), + SVN_TEST_PASS2(test_update, + "updating the head of the queue"), + SVN_TEST_NULL + }; + +SVN_TEST_MAIN diff --git a/subversion/tests/libsvn_subr/revision-test.c b/subversion/tests/libsvn_subr/revision-test.c index 7e5d752..53ca8da 100644 --- a/subversion/tests/libsvn_subr/revision-test.c +++ b/subversion/tests/libsvn_subr/revision-test.c @@ -34,6 +34,12 @@ test_revnum_parse(apr_pool_t *pool) "", "abc", "-456", + "2147483648", + "4294967295", + "4300000000", + "00000000001", + "21474836470", + "999999999999999999999999", NULL }; @@ -41,6 +47,8 @@ test_revnum_parse(apr_pool_t *pool) "0", "12345", "12345ABC", + "0000000001", + "2147483647x", NULL }; @@ -115,10 +123,14 @@ test_revnum_parse(apr_pool_t *pool) /* The test table. */ -struct svn_test_descriptor_t test_funcs[] = +static int max_threads = 1; + +static struct svn_test_descriptor_t test_funcs[] = { SVN_TEST_NULL, SVN_TEST_PASS2(test_revnum_parse, "test svn_revnum_parse"), SVN_TEST_NULL }; + +SVN_TEST_MAIN diff --git a/subversion/tests/libsvn_subr/root-pools-test.c b/subversion/tests/libsvn_subr/root-pools-test.c new file mode 100644 index 0000000..8116418 --- /dev/null +++ b/subversion/tests/libsvn_subr/root-pools-test.c @@ -0,0 +1,137 @@ +/* + * root-pools-test.c -- test the svn_root_pools__* API + * + * ==================================================================== + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + * ==================================================================== + */ + +#include <apr_pools.h> +#include <apr_thread_proc.h> +#include <apr_thread_cond.h> + +#include "private/svn_atomic.h" +#include "private/svn_subr_private.h" + +#include "../svn_test.h" + +/* do a few allocations of various sizes from POOL */ +static void +do_some_allocations(apr_pool_t *pool) +{ + int i; + apr_size_t fib = 1, fib1 = 0, fib2 = 0; + for (i = 0; i < 25; ++i) /* fib(25) = 75025 */ + { + apr_pcalloc(pool, fib1); + fib2 = fib1; + fib1 = fib; + fib += fib2; + } +} + +/* allocate, use and recycle a pool from POOLs a few times */ +static void +use_root_pool(svn_root_pools__t *pools) +{ + int i; + for (i = 0; i < 1000; ++i) + { + apr_pool_t *pool = svn_root_pools__acquire_pool(pools); + do_some_allocations(pool); + svn_root_pools__release_pool(pool, pools); + } +} + +#if APR_HAS_THREADS +static void * +APR_THREAD_FUNC thread_func(apr_thread_t *tid, void *data) +{ + /* give all threads a good chance to get started by the scheduler */ + apr_thread_yield(); + + use_root_pool(data); + apr_thread_exit(tid, APR_SUCCESS); + + return NULL; +} +#endif + +static svn_error_t * +test_root_pool(apr_pool_t *pool) +{ + svn_root_pools__t *pools; + SVN_ERR(svn_root_pools__create(&pools)); + use_root_pool(pools); + + return SVN_NO_ERROR; +} + +#define APR_ERR(expr) \ + do { \ + apr_status_t status = (expr); \ + if (status) \ + return svn_error_wrap_apr(status, NULL); \ + } while (0) + +static svn_error_t * +test_root_pool_concurrency(apr_pool_t *pool) +{ +#if APR_HAS_THREADS + /* The svn_root_pools__t container is supposed to be thread-safe. + Do some multi-threaded access and hope that there are no segfaults. + */ + enum { THREAD_COUNT = 10 }; + svn_root_pools__t *pools; + apr_thread_t *threads[THREAD_COUNT]; + int i; + + SVN_ERR(svn_root_pools__create(&pools)); + + for (i = 0; i < THREAD_COUNT; ++i) + APR_ERR(apr_thread_create(&threads[i], NULL, thread_func, pools, pool)); + + /* wait for the threads to finish */ + for (i = 0; i < THREAD_COUNT; ++i) + { + apr_status_t retval; + APR_ERR(apr_thread_join(&retval, threads[i])); + APR_ERR(retval); + } +#endif + + return SVN_NO_ERROR; +} + + +/* The test table. */ + +static int max_threads = 1; + +static struct svn_test_descriptor_t test_funcs[] = + { + SVN_TEST_NULL, + SVN_TEST_PASS2(test_root_pool, + "test root pool recycling"), + SVN_TEST_SKIP2(test_root_pool_concurrency, + ! APR_HAS_THREADS, + "test concurrent root pool recycling"), + SVN_TEST_NULL + }; + +SVN_TEST_MAIN diff --git a/subversion/tests/libsvn_subr/skel-test.c b/subversion/tests/libsvn_subr/skel-test.c index 49fe1a3..9839e6a 100644 --- a/subversion/tests/libsvn_subr/skel-test.c +++ b/subversion/tests/libsvn_subr/skel-test.c @@ -59,7 +59,7 @@ get_empty_string(apr_pool_t *pool) { svn_pool_clear(pool); - return svn_stringbuf_ncreate(0, 0, pool); + return svn_stringbuf_create_empty(pool); } /* Parse a skeleton from a Subversion string. */ @@ -886,7 +886,9 @@ unparse_list(apr_pool_t *pool) /* The test table. */ -struct svn_test_descriptor_t test_funcs[] = +static int max_threads = 1; + +static struct svn_test_descriptor_t test_funcs[] = { SVN_TEST_NULL, SVN_TEST_PASS2(parse_implicit_length, @@ -903,3 +905,5 @@ struct svn_test_descriptor_t test_funcs[] = "unparse lists"), SVN_TEST_NULL }; + +SVN_TEST_MAIN diff --git a/subversion/tests/libsvn_subr/spillbuf-test.c b/subversion/tests/libsvn_subr/spillbuf-test.c index c928dc3..16021b1 100644 --- a/subversion/tests/libsvn_subr/spillbuf-test.c +++ b/subversion/tests/libsvn_subr/spillbuf-test.c @@ -57,10 +57,8 @@ check_read(svn_spillbuf_t *buf, static svn_error_t * -test_spillbuf_basic(apr_pool_t *pool) +test_spillbuf__basic(apr_pool_t *pool, apr_size_t len, svn_spillbuf_t *buf) { - apr_size_t len = strlen(basic_data); /* Don't include basic_data's NUL */ - svn_spillbuf_t *buf = svn_spillbuf__create(len, 10 * len, pool); int i; const char *readptr; apr_size_t readlen; @@ -87,6 +85,22 @@ test_spillbuf_basic(apr_pool_t *pool) return SVN_NO_ERROR; } +static svn_error_t * +test_spillbuf_basic(apr_pool_t *pool) +{ + apr_size_t len = strlen(basic_data); /* Don't include basic_data's NUL */ + svn_spillbuf_t *buf = svn_spillbuf__create(len, 10 * len, pool); + return test_spillbuf__basic(pool, len, buf); +} + +static svn_error_t * +test_spillbuf_basic_spill_all(apr_pool_t *pool) +{ + apr_size_t len = strlen(basic_data); /* Don't include basic_data's NUL */ + svn_spillbuf_t *buf = + svn_spillbuf__create_extended(len, 10 * len, TRUE, TRUE, NULL, pool); + return test_spillbuf__basic(pool, len, buf); +} static svn_error_t * read_callback(svn_boolean_t *stop, @@ -107,12 +121,8 @@ read_callback(svn_boolean_t *stop, static svn_error_t * -test_spillbuf_callback(apr_pool_t *pool) +test_spillbuf__callback(apr_pool_t *pool, svn_spillbuf_t *buf) { - svn_spillbuf_t *buf = svn_spillbuf__create( - sizeof(basic_data) /* blocksize */, - 10 * sizeof(basic_data) /* maxsize */, - pool); int i; int counter; svn_boolean_t exhausted; @@ -133,15 +143,31 @@ test_spillbuf_callback(apr_pool_t *pool) return SVN_NO_ERROR; } - static svn_error_t * -test_spillbuf_file(apr_pool_t *pool) +test_spillbuf_callback(apr_pool_t *pool) { - apr_size_t altsize = sizeof(basic_data) + 2; svn_spillbuf_t *buf = svn_spillbuf__create( - altsize /* blocksize */, - 2 * sizeof(basic_data) /* maxsize */, + sizeof(basic_data) /* blocksize */, + 10 * sizeof(basic_data) /* maxsize */, pool); + return test_spillbuf__callback(pool, buf); +} + +static svn_error_t * +test_spillbuf_callback_spill_all(apr_pool_t *pool) +{ + svn_spillbuf_t *buf = svn_spillbuf__create_extended( + sizeof(basic_data) /* blocksize */, + 10 * sizeof(basic_data) /* maxsize */, + TRUE /* delte on close */, + TRUE /* spill all data */, + NULL, pool); + return test_spillbuf__callback(pool, buf); +} + +static svn_error_t * +test_spillbuf__file(apr_pool_t *pool, apr_size_t altsize, svn_spillbuf_t *buf) +{ int i; const char *readptr; apr_size_t readlen; @@ -203,14 +229,33 @@ test_spillbuf_file(apr_pool_t *pool) return SVN_NO_ERROR; } +static svn_error_t * +test_spillbuf_file(apr_pool_t *pool) +{ + apr_size_t altsize = sizeof(basic_data) + 2; + svn_spillbuf_t *buf = svn_spillbuf__create( + altsize /* blocksize */, + 2 * sizeof(basic_data) /* maxsize */, + pool); + return test_spillbuf__file(pool, altsize, buf); +} static svn_error_t * -test_spillbuf_interleaving(apr_pool_t *pool) +test_spillbuf_file_spill_all(apr_pool_t *pool) { - svn_spillbuf_t *buf = svn_spillbuf__create(8 /* blocksize */, - 15 /* maxsize */, - pool); + apr_size_t altsize = sizeof(basic_data) + 2; + svn_spillbuf_t *buf = svn_spillbuf__create_extended( + altsize /* blocksize */, + 2 * sizeof(basic_data) /* maxsize */, + TRUE /* delte on close */, + TRUE /* spill all data */, + NULL, pool); + return test_spillbuf__file(pool, altsize, buf); +} +static svn_error_t * +test_spillbuf__interleaving(apr_pool_t *pool, svn_spillbuf_t* buf) +{ SVN_ERR(svn_spillbuf__write(buf, "abcdef", 6, pool)); SVN_ERR(svn_spillbuf__write(buf, "ghijkl", 6, pool)); /* now: two blocks: 8 and 4 bytes */ @@ -238,18 +283,36 @@ test_spillbuf_interleaving(apr_pool_t *pool) return SVN_NO_ERROR; } +static svn_error_t * +test_spillbuf_interleaving(apr_pool_t *pool) +{ + svn_spillbuf_t *buf = svn_spillbuf__create(8 /* blocksize */, + 15 /* maxsize */, + pool); + return test_spillbuf__interleaving(pool, buf); +} + +static svn_error_t * +test_spillbuf_interleaving_spill_all(apr_pool_t *pool) +{ + svn_spillbuf_t *buf = svn_spillbuf__create_extended( + 8 /* blocksize */, + 15 /* maxsize */, + TRUE /* delte on close */, + TRUE /* spill all data */, + NULL, pool); + return test_spillbuf__interleaving(pool, buf); +} static svn_error_t * test_spillbuf_reader(apr_pool_t *pool) { - svn_spillbuf_reader_t *sbr; + svn_spillbuf_reader_t *sbr = svn_spillbuf__reader_create(4 /* blocksize */, + 100 /* maxsize */, + pool); apr_size_t amt; char buf[10]; - sbr = svn_spillbuf__reader_create(4 /* blocksize */, - 100 /* maxsize */, - pool); - SVN_ERR(svn_spillbuf__reader_write(sbr, "abcdef", 6, pool)); /* Get a buffer from the underlying reader, and grab a couple bytes. */ @@ -270,13 +333,13 @@ test_spillbuf_reader(apr_pool_t *pool) return SVN_NO_ERROR; } - static svn_error_t * test_spillbuf_stream(apr_pool_t *pool) { - svn_stream_t *stream = svn_stream__from_spillbuf(8 /* blocksize */, - 15 /* maxsize */, - pool); + svn_spillbuf_t *buf = svn_spillbuf__create(4 /* blocksize */, + 100 /* maxsize */, + pool); + svn_stream_t *stream = svn_stream__from_spillbuf(buf, pool); char readbuf[256]; apr_size_t readlen; apr_size_t writelen; @@ -287,7 +350,7 @@ test_spillbuf_stream(apr_pool_t *pool) /* now: two blocks: 8 and 4 bytes */ readlen = 8; - SVN_ERR(svn_stream_read(stream, readbuf, &readlen)); + SVN_ERR(svn_stream_read_full(stream, readbuf, &readlen)); SVN_TEST_ASSERT(readlen == 8 && memcmp(readbuf, "abcdefgh", 8) == 0); /* now: one block: 4 bytes */ @@ -295,7 +358,7 @@ test_spillbuf_stream(apr_pool_t *pool) SVN_ERR(svn_stream_write(stream, "mnopqr", &writelen)); /* now: two blocks: 8 and 2 bytes */ - SVN_ERR(svn_stream_read(stream, readbuf, &readlen)); + SVN_ERR(svn_stream_read_full(stream, readbuf, &readlen)); SVN_TEST_ASSERT(readlen == 8 && memcmp(readbuf, "ijklmnop", 8) == 0); /* now: one block: 2 bytes */ @@ -305,28 +368,23 @@ test_spillbuf_stream(apr_pool_t *pool) SVN_ERR(svn_stream_write(stream, "GHIJKL", &writelen)); /* now: two blocks: 8 and 6 bytes, and 6 bytes spilled to a file */ - SVN_ERR(svn_stream_read(stream, readbuf, &readlen)); + SVN_ERR(svn_stream_read_full(stream, readbuf, &readlen)); SVN_TEST_ASSERT(readlen == 8 && memcmp(readbuf, "qrstuvwx", 8) == 0); readlen = 6; - SVN_ERR(svn_stream_read(stream, readbuf, &readlen)); + SVN_ERR(svn_stream_read_full(stream, readbuf, &readlen)); SVN_TEST_ASSERT(readlen == 6 && memcmp(readbuf, "ABCDEF", 6) == 0); - SVN_ERR(svn_stream_read(stream, readbuf, &readlen)); + SVN_ERR(svn_stream_read_full(stream, readbuf, &readlen)); SVN_TEST_ASSERT(readlen == 6 && memcmp(readbuf, "GHIJKL", 6) == 0); return SVN_NO_ERROR; } - static svn_error_t * -test_spillbuf_rwfile(apr_pool_t *pool) +test_spillbuf__rwfile(apr_pool_t *pool, svn_spillbuf_t *buf) { - svn_spillbuf_t *buf = svn_spillbuf__create(4 /* blocksize */, - 10 /* maxsize */, - pool); - SVN_ERR(svn_spillbuf__write(buf, "abcdef", 6, pool)); SVN_ERR(svn_spillbuf__write(buf, "ghijkl", 6, pool)); SVN_ERR(svn_spillbuf__write(buf, "mnopqr", 6, pool)); @@ -360,14 +418,30 @@ test_spillbuf_rwfile(apr_pool_t *pool) return SVN_NO_ERROR; } - static svn_error_t * -test_spillbuf_eof(apr_pool_t *pool) +test_spillbuf_rwfile(apr_pool_t *pool) { svn_spillbuf_t *buf = svn_spillbuf__create(4 /* blocksize */, 10 /* maxsize */, pool); + return test_spillbuf__rwfile(pool, buf); +} + +static svn_error_t * +test_spillbuf_rwfile_spill_all(apr_pool_t *pool) +{ + svn_spillbuf_t *buf = svn_spillbuf__create_extended( + 4 /* blocksize */, + 10 /* maxsize */, + TRUE /* delte on close */, + TRUE /* spill all data */, + NULL, pool); + return test_spillbuf__rwfile(pool, buf); +} +static svn_error_t * +test_spillbuf__eof(apr_pool_t *pool, svn_spillbuf_t *buf) +{ SVN_ERR(svn_spillbuf__write(buf, "abcdef", 6, pool)); SVN_ERR(svn_spillbuf__write(buf, "ghijkl", 6, pool)); /* now: two blocks: 4 and 2 bytes, and 6 bytes in spill file. */ @@ -415,19 +489,108 @@ test_spillbuf_eof(apr_pool_t *pool) return SVN_NO_ERROR; } +static svn_error_t * +test_spillbuf_eof(apr_pool_t *pool) +{ + svn_spillbuf_t *buf = svn_spillbuf__create(4 /* blocksize */, + 10 /* maxsize */, + pool); + return test_spillbuf__eof(pool, buf); +} + +static svn_error_t * +test_spillbuf_eof_spill_all(apr_pool_t *pool) +{ + svn_spillbuf_t *buf = svn_spillbuf__create_extended( + 4 /* blocksize */, + 10 /* maxsize */, + TRUE /* delte on close */, + TRUE /* spill all data */, + NULL, pool); + return test_spillbuf__eof(pool, buf); +} + +static svn_error_t * +test_spillbuf__file_attrs(apr_pool_t *pool, svn_boolean_t spill_all, + svn_spillbuf_t *buf) +{ + apr_finfo_t finfo; + + SVN_ERR(svn_spillbuf__write(buf, "abcdef", 6, pool)); + SVN_ERR(svn_spillbuf__write(buf, "ghijkl", 6, pool)); + SVN_ERR(svn_spillbuf__write(buf, "mnopqr", 6, pool)); + + /* Check that the spillbuf size is what we expect it to be */ + SVN_TEST_ASSERT(svn_spillbuf__get_size(buf) == 18); + + /* Check file existence */ + SVN_TEST_ASSERT(svn_spillbuf__get_filename(buf) != NULL); + SVN_TEST_ASSERT(svn_spillbuf__get_file(buf) != NULL); + + /* The size of the file must match expectations */ + SVN_ERR(svn_io_file_info_get(&finfo, APR_FINFO_SIZE, + svn_spillbuf__get_file(buf), pool)); + if (spill_all) + SVN_TEST_ASSERT(finfo.size == svn_spillbuf__get_size(buf)); + else + SVN_TEST_ASSERT(finfo.size == (svn_spillbuf__get_size(buf) + - svn_spillbuf__get_memory_size(buf))); + return SVN_NO_ERROR; +} + +static svn_error_t * +test_spillbuf_file_attrs(apr_pool_t *pool) +{ + svn_spillbuf_t *buf = svn_spillbuf__create(4 /* blocksize */, + 10 /* maxsize */, + pool); + return test_spillbuf__file_attrs(pool, FALSE, buf); +} + +static svn_error_t * +test_spillbuf_file_attrs_spill_all(apr_pool_t *pool) +{ + svn_spillbuf_t *buf = svn_spillbuf__create_extended( + 4 /* blocksize */, + 10 /* maxsize */, + TRUE /* delte on close */, + TRUE /* spill all data */, + NULL, pool); + return test_spillbuf__file_attrs(pool, TRUE, buf); +} /* The test table. */ -struct svn_test_descriptor_t test_funcs[] = + +static int max_threads = 1; + +static struct svn_test_descriptor_t test_funcs[] = { SVN_TEST_NULL, SVN_TEST_PASS2(test_spillbuf_basic, "basic spill buffer test"), + SVN_TEST_PASS2(test_spillbuf_basic_spill_all, + "basic spill buffer test (spill-all-data)"), SVN_TEST_PASS2(test_spillbuf_callback, "spill buffer read callback"), + SVN_TEST_PASS2(test_spillbuf_callback_spill_all, + "spill buffer read callback (spill-all-data)"), SVN_TEST_PASS2(test_spillbuf_file, "spill buffer file test"), + SVN_TEST_PASS2(test_spillbuf_file_spill_all, + "spill buffer file test (spill-all-data)"), SVN_TEST_PASS2(test_spillbuf_interleaving, "interleaving reads and writes"), + SVN_TEST_PASS2(test_spillbuf_interleaving_spill_all, + "interleaving reads and writes (spill-all-data)"), SVN_TEST_PASS2(test_spillbuf_reader, "spill buffer reader test"), SVN_TEST_PASS2(test_spillbuf_stream, "spill buffer stream test"), SVN_TEST_PASS2(test_spillbuf_rwfile, "read/write spill file"), + SVN_TEST_PASS2(test_spillbuf_rwfile_spill_all, + "read/write spill file (spill-all-data)"), SVN_TEST_PASS2(test_spillbuf_eof, "validate reaching EOF of spill file"), + SVN_TEST_PASS2(test_spillbuf_eof_spill_all, + "validate reaching EOF (spill-all-data)"), + SVN_TEST_PASS2(test_spillbuf_file_attrs, "check spill file properties"), + SVN_TEST_PASS2(test_spillbuf_file_attrs_spill_all, + "check spill file properties (spill-all-data)"), SVN_TEST_NULL }; + +SVN_TEST_MAIN diff --git a/subversion/tests/libsvn_subr/sqlite-test.c b/subversion/tests/libsvn_subr/sqlite-test.c new file mode 100644 index 0000000..f44aa8d --- /dev/null +++ b/subversion/tests/libsvn_subr/sqlite-test.c @@ -0,0 +1,186 @@ +/* + * sqlite-test.c -- test the stream functions + * + * ==================================================================== + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + * ==================================================================== + */ + +#include "private/svn_sqlite.h" +#include "../svn_test.h" + +static svn_error_t * +open_db(svn_sqlite__db_t **sdb, + const char **db_abspath_p, + const char *db_name, + const char *const *statements, + apr_int32_t timeout, + apr_pool_t *pool) +{ + const char *db_dir, *db_abspath; + + SVN_ERR(svn_dirent_get_absolute(&db_dir, "sqlite-test-tmp", pool)); + SVN_ERR(svn_io_remove_dir2(db_dir, TRUE, NULL, NULL, pool)); + SVN_ERR(svn_io_make_dir_recursively(db_dir, pool)); + svn_test_add_dir_cleanup(db_dir); + + db_abspath = svn_dirent_join(db_dir, db_name, pool); + + SVN_ERR(svn_sqlite__open(sdb, db_abspath, svn_sqlite__mode_rwcreate, + statements, 0, NULL, timeout, pool, pool)); + + if (db_abspath_p) + *db_abspath_p = db_abspath; + return SVN_NO_ERROR; +} + +static svn_error_t * +error_second(svn_sqlite__context_t *sctx, + int argc, + svn_sqlite__value_t *values[], + void *baton) +{ + static int i = 0; + + if (++i == 2) + svn_sqlite__result_error(sctx, "fake error", 0); + else + svn_sqlite__result_int64(sctx, 1); + + return SVN_NO_ERROR; +} + +static svn_error_t * +test_sqlite_reset(apr_pool_t *pool) +{ + svn_sqlite__db_t *sdb; + svn_sqlite__stmt_t *stmt; + svn_boolean_t have_row; + const char *value; + + static const char *const statements[] = { + "CREATE TABLE reset (" + " one TEXT NOT NULL PRIMARY KEY," + " two TEXT" + ");" + "INSERT INTO reset(one, two) VALUES ('foo', 'bar');" + "INSERT INTO reset(one, two) VALUES ('zig', 'zag')", + + "SELECT one FROM reset WHERE two IS NOT NULL AND error_second(one) " + "ORDER BY one", + + NULL + }; + + SVN_ERR(open_db(&sdb, NULL, "reset", statements, 0, pool)); + SVN_ERR(svn_sqlite__create_scalar_function(sdb, "error_second", + 1, FALSE /* deterministic */, + error_second, NULL)); + SVN_ERR(svn_sqlite__exec_statements(sdb, 0)); + SVN_ERR(svn_sqlite__get_statement(&stmt, sdb, 1)); + + /* First step is OK. */ + SVN_ERR(svn_sqlite__step(&have_row, stmt)); + SVN_TEST_ASSERT(have_row); + value = svn_sqlite__column_text(stmt, 0, NULL); + SVN_TEST_ASSERT(value && !strcmp(value, "foo")); + + /* Second step fails. */ + SVN_TEST_ASSERT_ERROR(svn_sqlite__step(&have_row, stmt), + SVN_ERR_SQLITE_ERROR); + + /* The svn_sqlite__step wrapper calls svn_sqlite__reset when step + fails so the reset call here is a no-op. The first step can be + repeated. */ + SVN_ERR(svn_sqlite__reset(stmt)); + SVN_ERR(svn_sqlite__step(&have_row, stmt)); + SVN_TEST_ASSERT(have_row); + value = svn_sqlite__column_text(stmt, 0, NULL); + SVN_TEST_ASSERT(value && !strcmp(value, "foo")); + SVN_ERR(svn_sqlite__reset(stmt)); + + return SVN_NO_ERROR; +} + +static svn_error_t * +test_sqlite_txn_commit_busy(apr_pool_t *pool) +{ + svn_sqlite__db_t *sdb1; + svn_sqlite__db_t *sdb2; + const char *db_abspath; + svn_error_t *err; + + static const char *const statements[] = { + "CREATE TABLE test (one TEXT NOT NULL PRIMARY KEY)", + + "INSERT INTO test(one) VALUES ('foo')", + + "SELECT one from test", + + NULL + }; + + /* Open two db connections. + + Use a small busy_timeout of 250ms, since we're about to receive an + SVN_ERR_SQLITE_BUSY error, and retrying for the default 10 seconds + would be a waste of time. */ + SVN_ERR(open_db(&sdb1, &db_abspath, "txn_commit_busy", + statements, 250, pool)); + SVN_ERR(svn_sqlite__open(&sdb2, db_abspath, svn_sqlite__mode_readwrite, + statements, 0, NULL, 250, pool, pool)); + SVN_ERR(svn_sqlite__exec_statements(sdb1, 0)); + + /* Begin two deferred transactions. */ + SVN_ERR(svn_sqlite__begin_transaction(sdb1)); + SVN_ERR(svn_sqlite__exec_statements(sdb1, 1 /* INSERT */)); + SVN_ERR(svn_sqlite__begin_transaction(sdb2)); + SVN_ERR(svn_sqlite__exec_statements(sdb2, 2 /* SELECT */)); + + /* Try to COMMIT the first write transaction; this should fail due to + the concurrent read transaction that holds a shared lock on the db. */ + err = svn_sqlite__finish_transaction(sdb1, SVN_NO_ERROR); + SVN_TEST_ASSERT_ERROR(err, SVN_ERR_SQLITE_BUSY); + + /* We failed to COMMIT the first transaction, but COMMIT-ting the + second transaction through a different db connection should succeed. + Upgrade it to a write transaction by executing the INSERT statement, + and then commit. */ + SVN_ERR(svn_sqlite__exec_statements(sdb2, 1 /* INSERT */)); + SVN_ERR(svn_sqlite__finish_transaction(sdb2, SVN_NO_ERROR)); + + SVN_ERR(svn_sqlite__close(sdb2)); + SVN_ERR(svn_sqlite__close(sdb1)); + + return SVN_NO_ERROR; +} + + +static int max_threads = 1; + +static struct svn_test_descriptor_t test_funcs[] = + { + SVN_TEST_NULL, + SVN_TEST_PASS2(test_sqlite_reset, + "sqlite reset"), + SVN_TEST_PASS2(test_sqlite_txn_commit_busy, + "sqlite busy on transaction commit"), + SVN_TEST_NULL + }; + +SVN_TEST_MAIN diff --git a/subversion/tests/libsvn_subr/stream-test.c b/subversion/tests/libsvn_subr/stream-test.c index c8dba13..aaa9bf1 100644 --- a/subversion/tests/libsvn_subr/stream-test.c +++ b/subversion/tests/libsvn_subr/stream-test.c @@ -73,7 +73,7 @@ test_stream_from_string(apr_pool_t *pool) while (len == TEST_BUF_SIZE) { /* Read a chunk ... */ - SVN_ERR(svn_stream_read(stream, buffer, &len)); + SVN_ERR(svn_stream_read_full(stream, buffer, &len)); /* ... and append the chunk to the stringbuf. */ svn_stringbuf_appendbytes(outbuf, buffer, len); @@ -206,7 +206,7 @@ test_stream_compressed(apr_pool_t *pool) while (len >= TEST_BUF_SIZE) { len = TEST_BUF_SIZE; - SVN_ERR(svn_stream_read(stream, buf, &len)); + SVN_ERR(svn_stream_read_full(stream, buf, &len)); if (len > 0) svn_stringbuf_appendbytes(inbuf, buf, len); } @@ -332,17 +332,17 @@ test_stream_seek_stringbuf(apr_pool_t *pool) stringbuf = svn_stringbuf_create("OneTwo", pool); stream = svn_stream_from_stringbuf(stringbuf, pool); len = 3; - SVN_ERR(svn_stream_read(stream, buf, &len)); + SVN_ERR(svn_stream_read_full(stream, buf, &len)); buf[3] = '\0'; SVN_TEST_STRING_ASSERT(buf, "One"); SVN_ERR(svn_stream_mark(stream, &mark, pool)); len = 3; - SVN_ERR(svn_stream_read(stream, buf, &len)); + SVN_ERR(svn_stream_read_full(stream, buf, &len)); buf[3] = '\0'; SVN_TEST_STRING_ASSERT(buf, "Two"); SVN_ERR(svn_stream_seek(stream, mark)); len = 3; - SVN_ERR(svn_stream_read(stream, buf, &len)); + SVN_ERR(svn_stream_read_full(stream, buf, &len)); buf[3] = '\0'; SVN_TEST_STRING_ASSERT(buf, "Two"); @@ -351,7 +351,7 @@ test_stream_seek_stringbuf(apr_pool_t *pool) SVN_ERR(svn_stream_skip(stream, 2)); /* The remaining line should be empty */ len = 3; - SVN_ERR(svn_stream_read(stream, buf, &len)); + SVN_ERR(svn_stream_read_full(stream, buf, &len)); buf[len] = '\0'; SVN_TEST_ASSERT(len == 1); SVN_TEST_STRING_ASSERT(buf, "o"); @@ -381,7 +381,7 @@ test_stream_seek_translated(apr_pool_t *pool) FALSE, keywords, TRUE, pool); /* Seek from outside of keyword to inside of keyword. */ len = 25; - SVN_ERR(svn_stream_read(translated_stream, buf, &len)); + SVN_ERR(svn_stream_read_full(translated_stream, buf, &len)); SVN_TEST_ASSERT(len == 25); buf[25] = '\0'; SVN_TEST_STRING_ASSERT(buf, "One$MyKeyword: my keyword"); @@ -389,7 +389,7 @@ test_stream_seek_translated(apr_pool_t *pool) SVN_ERR(svn_stream_reset(translated_stream)); SVN_ERR(svn_stream_seek(translated_stream, mark)); len = 4; - SVN_ERR(svn_stream_read(translated_stream, buf, &len)); + SVN_ERR(svn_stream_read_full(translated_stream, buf, &len)); SVN_TEST_ASSERT(len == 4); buf[4] = '\0'; SVN_TEST_STRING_ASSERT(buf, " was"); @@ -397,7 +397,7 @@ test_stream_seek_translated(apr_pool_t *pool) SVN_ERR(svn_stream_seek(translated_stream, mark)); SVN_ERR(svn_stream_skip(translated_stream, 2)); len = 2; - SVN_ERR(svn_stream_read(translated_stream, buf, &len)); + SVN_ERR(svn_stream_read_full(translated_stream, buf, &len)); SVN_TEST_ASSERT(len == 2); buf[len] = '\0'; SVN_TEST_STRING_ASSERT(buf, "as"); @@ -405,13 +405,13 @@ test_stream_seek_translated(apr_pool_t *pool) /* Seek from inside of keyword to inside of keyword. */ SVN_ERR(svn_stream_mark(translated_stream, &mark, pool)); len = 9; - SVN_ERR(svn_stream_read(translated_stream, buf, &len)); + SVN_ERR(svn_stream_read_full(translated_stream, buf, &len)); SVN_TEST_ASSERT(len == 9); buf[9] = '\0'; SVN_TEST_STRING_ASSERT(buf, " expanded"); SVN_ERR(svn_stream_seek(translated_stream, mark)); len = 9; - SVN_ERR(svn_stream_read(translated_stream, buf, &len)); + SVN_ERR(svn_stream_read_full(translated_stream, buf, &len)); SVN_TEST_ASSERT(len == 9); buf[9] = '\0'; SVN_TEST_STRING_ASSERT(buf, " expanded"); @@ -419,7 +419,7 @@ test_stream_seek_translated(apr_pool_t *pool) SVN_ERR(svn_stream_seek(translated_stream, mark)); SVN_ERR(svn_stream_skip(translated_stream, 6)); len = 3; - SVN_ERR(svn_stream_read(translated_stream, buf, &len)); + SVN_ERR(svn_stream_read_full(translated_stream, buf, &len)); SVN_TEST_ASSERT(len == 3); buf[len] = '\0'; SVN_TEST_STRING_ASSERT(buf, "ded"); @@ -427,13 +427,13 @@ test_stream_seek_translated(apr_pool_t *pool) /* Seek from inside of keyword to outside of keyword. */ SVN_ERR(svn_stream_mark(translated_stream, &mark, pool)); len = 4; - SVN_ERR(svn_stream_read(translated_stream, buf, &len)); + SVN_ERR(svn_stream_read_full(translated_stream, buf, &len)); SVN_TEST_ASSERT(len == 4); buf[4] = '\0'; SVN_TEST_STRING_ASSERT(buf, " $Tw"); SVN_ERR(svn_stream_seek(translated_stream, mark)); len = 4; - SVN_ERR(svn_stream_read(translated_stream, buf, &len)); + SVN_ERR(svn_stream_read_full(translated_stream, buf, &len)); SVN_TEST_ASSERT(len == 4); buf[4] = '\0'; SVN_TEST_STRING_ASSERT(buf, " $Tw"); @@ -441,7 +441,7 @@ test_stream_seek_translated(apr_pool_t *pool) SVN_ERR(svn_stream_seek(translated_stream, mark)); SVN_ERR(svn_stream_skip(translated_stream, 2)); len = 2; - SVN_ERR(svn_stream_read(translated_stream, buf, &len)); + SVN_ERR(svn_stream_read_full(translated_stream, buf, &len)); SVN_TEST_ASSERT(len == 2); buf[len] = '\0'; SVN_TEST_STRING_ASSERT(buf, "Tw"); @@ -449,13 +449,13 @@ test_stream_seek_translated(apr_pool_t *pool) /* Seek from outside of keyword to outside of keyword. */ SVN_ERR(svn_stream_mark(translated_stream, &mark, pool)); len = 1; - SVN_ERR(svn_stream_read(translated_stream, buf, &len)); + SVN_ERR(svn_stream_read_full(translated_stream, buf, &len)); SVN_TEST_ASSERT(len == 1); buf[1] = '\0'; SVN_TEST_STRING_ASSERT(buf, "o"); SVN_ERR(svn_stream_seek(translated_stream, mark)); len = 1; - SVN_ERR(svn_stream_read(translated_stream, buf, &len)); + SVN_ERR(svn_stream_read_full(translated_stream, buf, &len)); SVN_TEST_ASSERT(len == 1); buf[1] = '\0'; SVN_TEST_STRING_ASSERT(buf, "o"); @@ -463,7 +463,7 @@ test_stream_seek_translated(apr_pool_t *pool) SVN_ERR(svn_stream_seek(translated_stream, mark)); SVN_ERR(svn_stream_skip(translated_stream, 2)); len = 1; - SVN_ERR(svn_stream_read(translated_stream, buf, &len)); + SVN_ERR(svn_stream_read_full(translated_stream, buf, &len)); SVN_TEST_ASSERT(len == 0); buf[len] = '\0'; SVN_TEST_STRING_ASSERT(buf, ""); @@ -524,7 +524,7 @@ test_stream_compressed_empty_file(apr_pool_t *pool) pool, pool)); stream = svn_stream_compressed(empty_file_stream, pool); len = sizeof(buf); - SVN_ERR(svn_stream_read(stream, buf, &len)); + SVN_ERR(svn_stream_read_full(stream, buf, &len)); if (len > 0) return svn_error_create(SVN_ERR_TEST_FAILED, NULL, "Got unexpected result."); @@ -727,9 +727,87 @@ test_stream_base64_2(apr_pool_t *pool) return SVN_NO_ERROR; } +static svn_error_t * +test_stringbuf_from_stream(apr_pool_t *pool) +{ + const char *test_cases[] = + { + "", + "x", + "this string is longer than the default 64 minimum block size used" + "by the function under test", + NULL + }; + + const char **test_case; + for (test_case = test_cases; *test_case; ++test_case) + { + svn_stringbuf_t *result1, *result2, *result3, *result4; + svn_stringbuf_t *original = svn_stringbuf_create(*test_case, pool); + + svn_stream_t *stream1 = svn_stream_from_stringbuf(original, pool); + svn_stream_t *stream2 = svn_stream_from_stringbuf(original, pool); + + SVN_ERR(svn_stringbuf_from_stream(&result1, stream1, 0, pool)); + SVN_ERR(svn_stringbuf_from_stream(&result2, stream1, 0, pool)); + SVN_ERR(svn_stringbuf_from_stream(&result3, stream2, original->len, + pool)); + SVN_ERR(svn_stringbuf_from_stream(&result4, stream2, original->len, + pool)); + + /* C-string contents must match */ + SVN_TEST_STRING_ASSERT(result1->data, original->data); + SVN_TEST_STRING_ASSERT(result2->data, ""); + SVN_TEST_STRING_ASSERT(result3->data, original->data); + SVN_TEST_STRING_ASSERT(result4->data, ""); + + /* assumed length must match */ + SVN_TEST_ASSERT(result1->len == original->len); + SVN_TEST_ASSERT(result2->len == 0); + SVN_TEST_ASSERT(result3->len == original->len); + SVN_TEST_ASSERT(result4->len == 0); + } + + return SVN_NO_ERROR; +} + +static svn_error_t * +empty_read_full_fn(void *baton, char *buffer, apr_size_t *len) +{ + *len = 0; + return SVN_NO_ERROR; +} + +static svn_error_t * +test_stream_compressed_read_full(apr_pool_t *pool) +{ + svn_stream_t *stream, *empty_stream; + char buf[1]; + apr_size_t len; + + /* Reading an empty stream with read_full only support should not error. */ + empty_stream = svn_stream_create(NULL, pool); + + /* Create stream with only full read support. */ + svn_stream_set_read2(empty_stream, NULL, empty_read_full_fn); + + stream = svn_stream_compressed(empty_stream, pool); + len = sizeof(buf); + SVN_ERR(svn_stream_read_full(stream, buf, &len)); + if (len > 0) + return svn_error_create(SVN_ERR_TEST_FAILED, NULL, + "Got unexpected result."); + + SVN_ERR(svn_stream_close(stream)); + + return SVN_NO_ERROR; +} + /* The test table. */ -struct svn_test_descriptor_t test_funcs[] = +static int max_threads = 1; + +static struct svn_test_descriptor_t test_funcs[] = { SVN_TEST_NULL, SVN_TEST_PASS2(test_stream_from_string, @@ -752,5 +830,11 @@ struct svn_test_descriptor_t test_funcs[] = "test base64 encoding/decoding streams"), SVN_TEST_PASS2(test_stream_base64_2, "base64 decoding allocation problem"), + SVN_TEST_PASS2(test_stringbuf_from_stream, + "test svn_stringbuf_from_stream"), + SVN_TEST_PASS2(test_stream_compressed_read_full, + "test compression for streams without partial read"), SVN_TEST_NULL }; + +SVN_TEST_MAIN diff --git a/subversion/tests/libsvn_subr/string-test.c b/subversion/tests/libsvn_subr/string-test.c index 735db18..ab0cc44 100644 --- a/subversion/tests/libsvn_subr/string-test.c +++ b/subversion/tests/libsvn_subr/string-test.c @@ -38,6 +38,7 @@ #include "svn_io.h" #include "svn_error.h" +#include "svn_sorts.h" /* MIN / MAX */ #include "svn_string.h" /* This includes <apr_*.h> */ #include "private/svn_string_private.h" @@ -58,9 +59,8 @@ fail(apr_pool_t *pool, const char *fmt, ...) /* Some of our own global variables, for simplicity. Yes, simplicity. */ -svn_stringbuf_t *a = NULL, *b = NULL, *c = NULL; -const char *phrase_1 = "hello, "; -const char *phrase_2 = "a longish phrase of sorts, longer than 16 anyway"; +static const char *phrase_1 = "hello, "; +static const char *phrase_2 = "a longish phrase of sorts, longer than 16 anyway"; @@ -68,7 +68,7 @@ const char *phrase_2 = "a longish phrase of sorts, longer than 16 anyway"; static svn_error_t * test1(apr_pool_t *pool) { - a = svn_stringbuf_create(phrase_1, pool); + svn_stringbuf_t *a = svn_stringbuf_create(phrase_1, pool); /* Test that length, data, and null-termination are correct. */ if ((a->len == strlen(phrase_1)) && ((strcmp(a->data, phrase_1)) == 0)) @@ -81,7 +81,7 @@ test1(apr_pool_t *pool) static svn_error_t * test2(apr_pool_t *pool) { - b = svn_stringbuf_ncreate(phrase_2, 16, pool); + svn_stringbuf_t *b = svn_stringbuf_ncreate(phrase_2, 16, pool); /* Test that length, data, and null-termination are correct. */ if ((b->len == 16) && ((strncmp(b->data, phrase_2, 16)) == 0)) @@ -97,8 +97,8 @@ test3(apr_pool_t *pool) char *tmp; size_t old_len; - a = svn_stringbuf_create(phrase_1, pool); - b = svn_stringbuf_ncreate(phrase_2, 16, pool); + svn_stringbuf_t *a = svn_stringbuf_create(phrase_1, pool); + svn_stringbuf_t *b = svn_stringbuf_ncreate(phrase_2, 16, pool); tmp = apr_palloc(pool, (a->len + b->len + 1)); strcpy(tmp, a->data); @@ -117,7 +117,7 @@ test3(apr_pool_t *pool) static svn_error_t * test4(apr_pool_t *pool) { - a = svn_stringbuf_create(phrase_1, pool); + svn_stringbuf_t *a = svn_stringbuf_create(phrase_1, pool); svn_stringbuf_appendcstr(a, "new bytes to append"); /* Test that length, data, and null-termination are correct. */ @@ -132,7 +132,7 @@ test4(apr_pool_t *pool) static svn_error_t * test5(apr_pool_t *pool) { - a = svn_stringbuf_create(phrase_1, pool); + svn_stringbuf_t *a = svn_stringbuf_create(phrase_1, pool); svn_stringbuf_appendbytes(a, "new bytes to append", 9); /* Test that length, data, and null-termination are correct. */ @@ -147,9 +147,9 @@ test5(apr_pool_t *pool) static svn_error_t * test6(apr_pool_t *pool) { - a = svn_stringbuf_create(phrase_1, pool); - b = svn_stringbuf_create(phrase_2, pool); - c = svn_stringbuf_dup(a, pool); + svn_stringbuf_t *a = svn_stringbuf_create(phrase_1, pool); + svn_stringbuf_t *b = svn_stringbuf_create(phrase_2, pool); + svn_stringbuf_t *c = svn_stringbuf_dup(a, pool); /* Test that length, data, and null-termination are correct. */ if ((svn_stringbuf_compare(a, c)) && (! svn_stringbuf_compare(b, c))) @@ -165,7 +165,7 @@ test7(apr_pool_t *pool) char *tmp; size_t tmp_len; - c = svn_stringbuf_create(phrase_2, pool); + svn_stringbuf_t *c = svn_stringbuf_create(phrase_2, pool); tmp_len = c->len; tmp = apr_palloc(pool, c->len + 1); @@ -185,7 +185,7 @@ test7(apr_pool_t *pool) static svn_error_t * test8(apr_pool_t *pool) { - c = svn_stringbuf_create(phrase_2, pool); + svn_stringbuf_t *c = svn_stringbuf_create(phrase_2, pool); svn_stringbuf_setempty(c); @@ -199,7 +199,7 @@ test8(apr_pool_t *pool) static svn_error_t * test9(apr_pool_t *pool) { - a = svn_stringbuf_create(phrase_1, pool); + svn_stringbuf_t *a = svn_stringbuf_create(phrase_1, pool); svn_stringbuf_fillchar(a, '#'); @@ -379,7 +379,7 @@ test_find_char_backward(const char* data, { apr_size_t i; - a = svn_stringbuf_create(data, pool); + svn_stringbuf_t *a = svn_stringbuf_create(data, pool); i = svn_stringbuf_find_char_backward(a, ch); if (i == pos) @@ -391,7 +391,7 @@ test_find_char_backward(const char* data, static svn_error_t * test13(apr_pool_t *pool) { - a = svn_stringbuf_create("test, test", pool); + svn_stringbuf_t *a = svn_stringbuf_create("test, test", pool); return test_find_char_backward(a->data, a->len, ',', 4, pool); } @@ -399,7 +399,7 @@ test13(apr_pool_t *pool) static svn_error_t * test14(apr_pool_t *pool) { - a = svn_stringbuf_create(",test test", pool); + svn_stringbuf_t *a = svn_stringbuf_create(",test test", pool); return test_find_char_backward(a->data, a->len, ',', 0, pool); } @@ -407,7 +407,7 @@ test14(apr_pool_t *pool) static svn_error_t * test15(apr_pool_t *pool) { - a = svn_stringbuf_create("testing,", pool); + svn_stringbuf_t *a = svn_stringbuf_create("testing,", pool); return test_find_char_backward(a->data, a->len, @@ -419,7 +419,7 @@ test15(apr_pool_t *pool) static svn_error_t * test16(apr_pool_t *pool) { - a = svn_stringbuf_create_empty(pool); + svn_stringbuf_t *a = svn_stringbuf_create_empty(pool); return test_find_char_backward(a->data, a->len, ',', 0, pool); } @@ -427,7 +427,7 @@ test16(apr_pool_t *pool) static svn_error_t * test17(apr_pool_t *pool) { - a = svn_stringbuf_create("test test test", pool); + svn_stringbuf_t *a = svn_stringbuf_create("test test test", pool); return test_find_char_backward(a->data, a->len, @@ -443,7 +443,7 @@ test_first_non_whitespace(const char *str, { apr_size_t i; - a = svn_stringbuf_create(str, pool); + svn_stringbuf_t *a = svn_stringbuf_create(str, pool); i = svn_stringbuf_first_non_whitespace(a); @@ -474,8 +474,8 @@ test20(apr_pool_t *pool) static svn_error_t * test21(apr_pool_t *pool) { - a = svn_stringbuf_create(" \ttest\t\t \t ", pool); - b = svn_stringbuf_create("test", pool); + svn_stringbuf_t *a = svn_stringbuf_create(" \ttest\t\t \t ", pool); + svn_stringbuf_t *b = svn_stringbuf_create("test", pool); svn_stringbuf_strip_whitespace(a); @@ -490,8 +490,8 @@ test_stringbuf_unequal(const char* str1, const char* str2, apr_pool_t *pool) { - a = svn_stringbuf_create(str1, pool); - b = svn_stringbuf_create(str2, pool); + svn_stringbuf_t *a = svn_stringbuf_create(str1, pool); + svn_stringbuf_t *b = svn_stringbuf_create(str2, pool); if (svn_stringbuf_compare(a, b)) return fail(pool, "test failed"); @@ -521,23 +521,58 @@ test24(apr_pool_t *pool) SVN_TEST_ASSERT(length == 1); SVN_TEST_STRING_ASSERT(buffer, "0"); - length = svn__i64toa(buffer, 0x8000000000000000ll); + length = svn__i64toa(buffer, APR_INT64_MIN); SVN_TEST_ASSERT(length == 20); SVN_TEST_STRING_ASSERT(buffer, "-9223372036854775808"); - length = svn__i64toa(buffer, 0x7fffffffffffffffll); + length = svn__i64toa(buffer, APR_INT64_MAX); SVN_TEST_ASSERT(length == 19); SVN_TEST_STRING_ASSERT(buffer, "9223372036854775807"); - length = svn__ui64toa(buffer, 0ull); + length = svn__ui64toa(buffer, 0u); SVN_TEST_ASSERT(length == 1); SVN_TEST_STRING_ASSERT(buffer, "0"); - length = svn__ui64toa(buffer, 0xffffffffffffffffull); + length = svn__ui64toa(buffer, APR_UINT64_MAX); SVN_TEST_ASSERT(length == 20); SVN_TEST_STRING_ASSERT(buffer, "18446744073709551615"); - return test_stringbuf_unequal("abc", "abb", pool); + return SVN_NO_ERROR; +} + +static svn_error_t * +sub_test_base36(apr_uint64_t value, const char *base36) +{ + char buffer[SVN_INT64_BUFFER_SIZE]; + apr_size_t length; + apr_size_t expected_length = strlen(base36); + const char *end = buffer; + apr_uint64_t result; + + length = svn__ui64tobase36(buffer, value); + SVN_TEST_ASSERT(length == expected_length); + SVN_TEST_STRING_ASSERT(buffer, base36); + + result = svn__base36toui64(&end, buffer); + SVN_TEST_ASSERT(end - buffer == length); + SVN_TEST_ASSERT(result == value); + + result = svn__base36toui64(NULL, buffer); + SVN_TEST_ASSERT(result == value); + + return SVN_NO_ERROR; +} + +static svn_error_t * +test_base36(apr_pool_t *pool) +{ + SVN_ERR(sub_test_base36(0, "0")); + SVN_ERR(sub_test_base36(APR_UINT64_C(1234567890), "kf12oi")); + SVN_ERR(sub_test_base36(APR_UINT64_C(0x7fffffffffffffff), "1y2p0ij32e8e7")); + SVN_ERR(sub_test_base36(APR_UINT64_C(0x8000000000000000), "1y2p0ij32e8e8")); + SVN_ERR(sub_test_base36(APR_UINT64_MAX, "3w5e11264sgsf")); + + return SVN_NO_ERROR; } static svn_error_t * @@ -554,7 +589,7 @@ expect_stringbuf_equal(const svn_stringbuf_t* str1, static svn_error_t * test_stringbuf_insert(apr_pool_t *pool) { - a = svn_stringbuf_create("st , ", pool); + svn_stringbuf_t *a = svn_stringbuf_create("st , ", pool); svn_stringbuf_insert(a, 0, "teflon", 2); SVN_TEST_STRING_ASSERT(a->data, "test , "); @@ -587,7 +622,7 @@ test_stringbuf_insert(apr_pool_t *pool) static svn_error_t * test_stringbuf_remove(apr_pool_t *pool) { - a = svn_stringbuf_create("test hello, world!", pool); + svn_stringbuf_t *a = svn_stringbuf_create("test hello, world!", pool); svn_stringbuf_remove(a, 0, 2); SVN_TEST_STRING_ASSERT(a->data, "st hello, world!"); @@ -599,13 +634,21 @@ test_stringbuf_remove(apr_pool_t *pool) SVN_TEST_STRING_ASSERT(a->data, "stell"); svn_stringbuf_remove(a, 1200, 393); - return expect_stringbuf_equal(a, "stell", pool); + SVN_ERR(expect_stringbuf_equal(a, "stell", pool)); + + svn_stringbuf_remove(a, APR_SIZE_MAX, 2); + SVN_ERR(expect_stringbuf_equal(a, "stell", pool)); + + svn_stringbuf_remove(a, 1, APR_SIZE_MAX); + SVN_ERR(expect_stringbuf_equal(a, "s", pool)); + + return SVN_NO_ERROR; } static svn_error_t * test_stringbuf_replace(apr_pool_t *pool) { - a = svn_stringbuf_create("odd with some world?", pool); + svn_stringbuf_t *a = svn_stringbuf_create("odd with some world?", pool); svn_stringbuf_replace(a, 0, 3, "tester", 4); SVN_TEST_STRING_ASSERT(a->data, "test with some world?"); @@ -637,6 +680,12 @@ test_stringbuf_replace(apr_pool_t *pool) svn_stringbuf_ncreate("test hello\0-\0world!\0-\0!", 23, pool))); + svn_stringbuf_replace(a, 1, APR_SIZE_MAX, "x", 1); + SVN_ERR(expect_stringbuf_equal(a, "tx", pool)); + + svn_stringbuf_replace(a, APR_SIZE_MAX, APR_SIZE_MAX, "y", 1); + SVN_ERR(expect_stringbuf_equal(a, "txy", pool)); + return SVN_NO_ERROR; } @@ -648,13 +697,14 @@ test_string_similarity(apr_pool_t *pool) const char *stra; const char *strb; apr_size_t lcs; - int score; + unsigned int score; } tests[] = { -#define SCORE(lcs, len) ((2000 * (lcs) + (len)/2) / (len)) +#define SCORE(lcs, len) \ + ((2 * SVN_STRING__SIM_RANGE_MAX * (lcs) + (len)/2) / (len)) /* Equality */ - {"", "", 0, 1000}, + {"", "", 0, SVN_STRING__SIM_RANGE_MAX}, {"quoth", "quoth", 5, SCORE(5, 5+5)}, /* Deletion at start */ @@ -708,17 +758,20 @@ test_string_similarity(apr_pool_t *pool) for (t = tests; t->stra; ++t) { apr_size_t lcs; - const unsigned int score = + const apr_size_t score = svn_cstring__similarity(t->stra, t->strb, &buffer, &lcs); /* fprintf(stderr, - "lcs %s ~ %s score %.3f (%"APR_SIZE_T_FMT - ") expected %.3f (%"APR_SIZE_T_FMT"))\n", - t->stra, t->strb, score/1000.0, lcs, t->score/1000.0, t->lcs); + "lcs %s ~ %s score %.6f (%"APR_SIZE_T_FMT + ") expected %.6f (%"APR_SIZE_T_FMT"))\n", + t->stra, t->strb, score/1.0/SVN_STRING__SIM_RANGE_MAX, + lcs, t->score/1.0/SVN_STRING__SIM_RANGE_MAX, t->lcs); */ if (score != t->score) - return fail(pool, "%s ~ %s score %.3f <> expected %.3f", - t->stra, t->strb, score/1000.0, t->score/1000.0); + return fail(pool, "%s ~ %s score %.6f <> expected %.6f", + t->stra, t->strb, + score/1.0/SVN_STRING__SIM_RANGE_MAX, + t->score/1.0/SVN_STRING__SIM_RANGE_MAX); if (lcs != t->lcs) return fail(pool, @@ -731,7 +784,8 @@ test_string_similarity(apr_pool_t *pool) { const svn_string_t foo = {"svn:foo", 4}; const svn_string_t bar = {"svn:bar", 4}; - if (1000 != svn_string__similarity(&foo, &bar, &buffer, NULL)) + if (SVN_STRING__SIM_RANGE_MAX + != svn_string__similarity(&foo, &bar, &buffer, NULL)) return fail(pool, "'%s'[:4] ~ '%s'[:4] found different", foo.data, bar.data); } @@ -739,6 +793,106 @@ test_string_similarity(apr_pool_t *pool) return SVN_NO_ERROR; } +static svn_error_t * +test_string_matching(apr_pool_t *pool) +{ + const struct test_data_t + { + const char *a; + const char *b; + apr_size_t match_len; + apr_size_t rmatch_len; + } + tests[] = + { + /* edge cases */ + {"", "", 0, 0}, + {"", "x", 0, 0}, + {"x", "", 0, 0}, + {"x", "x", 1, 1}, + {"", "1234567890abcdef", 0, 0}, + {"1234567890abcdef", "", 0, 0}, + {"1234567890abcdef", "1234567890abcdef", 16, 16}, + + /* left-side matches */ + {"x", "y", 0, 0}, + {"ax", "ay", 1, 0}, + {"ax", "a", 1, 0}, + {"a", "ay", 1, 0}, + {"1234567890abcdef", "1234567890abcdeg", 15, 0}, + {"1234567890abcdef_", "1234567890abcdefg", 16, 0}, + {"12345678_0abcdef", "1234567890abcdeg", 8, 0}, + {"1234567890abcdef", "12345678", 8, 0}, + {"12345678", "1234567890abcdef", 8, 0}, + {"12345678_0ab", "1234567890abcdef", 8, 0}, + + /* right-side matches */ + {"xa", "ya", 0, 1}, + {"xa", "a", 0, 1}, + {"a", "ya", 0, 1}, + {"_234567890abcdef", "1234567890abcdef", 0, 15}, + {"_1234567890abcdef", "x1234567890abcdef", 0, 16}, + {"1234567_90abcdef", "_1234567890abcdef", 0, 8}, + {"1234567890abcdef", "90abcdef", 0, 8}, + {"90abcdef", "1234567890abcdef", 0, 8}, + {"8_0abcdef", "7890abcdef", 0, 7}, + + /* two-side matches */ + {"bxa", "bya", 1, 1}, + {"bxa", "ba", 1, 1}, + {"ba", "bya", 1, 1}, + {"1234567_90abcdef", "1234567890abcdef", 7, 8}, + {"12345678_90abcdef", "1234567890abcdef", 8, 8}, + {"12345678_0abcdef", "1234567890abcdef", 8, 7}, + {"123456_abcdef", "1234sdffdssdf567890abcdef", 4, 6}, + {"1234567890abcdef", "12345678ef", 8, 2}, + {"x_234567890abcdef", "x1234567890abcdef", 1, 15}, + {"1234567890abcdefx", "1234567890abcdex", 15, 1}, + + /* list terminator */ + {NULL} + }; + + const struct test_data_t *test; + for (test = tests; test->a != NULL; ++test) + { + apr_size_t a_len = strlen(test->a); + apr_size_t b_len = strlen(test->b); + apr_size_t max_match = MIN(a_len, b_len); + apr_size_t match_len + = svn_cstring__match_length(test->a, test->b, max_match); + apr_size_t rmatch_len + = svn_cstring__reverse_match_length(test->a + a_len, test->b + b_len, + max_match); + + SVN_TEST_ASSERT(match_len == test->match_len); + SVN_TEST_ASSERT(rmatch_len == test->rmatch_len); + } + + return SVN_NO_ERROR; +} + +static svn_error_t * +test_string_skip_prefix(apr_pool_t *pool) +{ + SVN_TEST_STRING_ASSERT(svn_cstring_skip_prefix("12345", "12345"), + ""); + SVN_TEST_STRING_ASSERT(svn_cstring_skip_prefix("12345", "123"), + "45"); + SVN_TEST_STRING_ASSERT(svn_cstring_skip_prefix("12345", ""), + "12345"); + SVN_TEST_STRING_ASSERT(svn_cstring_skip_prefix("12345", "23"), + NULL); + SVN_TEST_STRING_ASSERT(svn_cstring_skip_prefix("1", "12"), + NULL); + SVN_TEST_STRING_ASSERT(svn_cstring_skip_prefix("", ""), + ""); + SVN_TEST_STRING_ASSERT(svn_cstring_skip_prefix("", "12"), + NULL); + + return SVN_NO_ERROR; +} + /* ==================================================================== If you add a new test to this file, update this array. @@ -747,7 +901,10 @@ test_string_similarity(apr_pool_t *pool) */ /* An array of all test functions */ -struct svn_test_descriptor_t test_funcs[] = + +static int max_threads = 1; + +static struct svn_test_descriptor_t test_funcs[] = { SVN_TEST_NULL, SVN_TEST_PASS2(test1, @@ -783,7 +940,7 @@ struct svn_test_descriptor_t test_funcs[] = SVN_TEST_PASS2(test16, "find_char_backward; len = 0 case"), SVN_TEST_PASS2(test17, - "find_char_backward; no occurence case"), + "find_char_backward; no occurrence case"), SVN_TEST_PASS2(test18, "check whitespace removal; common case"), SVN_TEST_PASS2(test19, @@ -798,6 +955,8 @@ struct svn_test_descriptor_t test_funcs[] = "compare stringbufs; same length, different content"), SVN_TEST_PASS2(test24, "verify i64toa"), + SVN_TEST_PASS2(test_base36, + "verify base36 conversion"), SVN_TEST_PASS2(test_stringbuf_insert, "check inserting into svn_stringbuf_t"), SVN_TEST_PASS2(test_stringbuf_remove, @@ -806,5 +965,11 @@ struct svn_test_descriptor_t test_funcs[] = "check replacement in svn_stringbuf_t"), SVN_TEST_PASS2(test_string_similarity, "test string similarity scores"), + SVN_TEST_PASS2(test_string_matching, + "test string matching"), + SVN_TEST_PASS2(test_string_skip_prefix, + "test svn_cstring_skip_prefix()"), SVN_TEST_NULL }; + +SVN_TEST_MAIN diff --git a/subversion/tests/libsvn_subr/subst_translate-test.c b/subversion/tests/libsvn_subr/subst_translate-test.c index 1e555f2..0c4ee96 100644 --- a/subversion/tests/libsvn_subr/subst_translate-test.c +++ b/subversion/tests/libsvn_subr/subst_translate-test.c @@ -115,7 +115,7 @@ test_svn_subst_translate_string2_null_encoding_helper(apr_pool_t *pool) svn_string_t *new_value = NULL; svn_boolean_t translated_to_utf8 = FALSE; svn_boolean_t translated_line_endings = TRUE; - /* 'Æ', which is 0xc6 in both ISO-8859-1 and Windows-1252 */ + /* The 'AE' ligature, which is 0xc6 in both ISO-8859-1 and Windows-1252 */ svn_string_t *source_string = svn_string_create("\xc6", pool); SVN_ERR(svn_subst_translate_string2(&new_value, &translated_to_utf8, @@ -397,7 +397,7 @@ test_svn_subst_long_keywords(apr_pool_t *pool) "01234567890123456789012345678901234567890123456789" "012345678901234567890123456789012345678901234567"; - /* The longest keyword that can be expanded: the value is empty. */ + /* The longest keyword that can be expanded: the value is empty. */ const char keyword_z[] = "Q" "01234567890123456789012345678901234567890123456789" @@ -500,7 +500,9 @@ test_svn_subst_long_keywords(apr_pool_t *pool) return SVN_NO_ERROR; } -struct svn_test_descriptor_t test_funcs[] = +static int max_threads = 1; + +static struct svn_test_descriptor_t test_funcs[] = { SVN_TEST_NULL, SVN_TEST_PASS2(test_svn_subst_translate_string2, @@ -519,3 +521,6 @@ struct svn_test_descriptor_t test_funcs[] = "test long keywords (issue 4350)"), SVN_TEST_NULL }; + +SVN_TEST_MAIN + diff --git a/subversion/tests/libsvn_subr/time-test.c b/subversion/tests/libsvn_subr/time-test.c index 82e56b1..51fbe67 100644 --- a/subversion/tests/libsvn_subr/time-test.c +++ b/subversion/tests/libsvn_subr/time-test.c @@ -29,10 +29,10 @@ #include "../svn_test.h" /* All these variables should refer to the same point in time. */ -apr_time_t test_timestamp = APR_TIME_C(1021316450966679); -const char *test_timestring = +static apr_time_t test_timestamp = APR_TIME_C(1021316450966679); +static const char *test_timestring = "2002-05-13T19:00:50.966679Z"; -const char *test_old_timestring = +static const char *test_old_timestring = "Mon 13 May 2002 22:00:50.966679 (day 133, dst 1, gmt_off 010800)"; @@ -339,7 +339,9 @@ test_parse_date(apr_pool_t *pool) /* The test table. */ -struct svn_test_descriptor_t test_funcs[] = +static int max_threads = 1; + +static struct svn_test_descriptor_t test_funcs[] = { SVN_TEST_NULL, SVN_TEST_PASS2(test_time_to_cstring, @@ -354,3 +356,5 @@ struct svn_test_descriptor_t test_funcs[] = "test svn_parse_date"), SVN_TEST_NULL }; + +SVN_TEST_MAIN diff --git a/subversion/tests/libsvn_subr/translate-test.c b/subversion/tests/libsvn_subr/translate-test.c index 9300503..2436bc7 100644 --- a/subversion/tests/libsvn_subr/translate-test.c +++ b/subversion/tests/libsvn_subr/translate-test.c @@ -48,7 +48,7 @@ /*** Helpers ***/ /* (Almost) all the tests share the same test data. */ -const char *lines[] = +static const char *lines[] = { "Line 1: fairly boring subst test data... blah blah", "Line 2: fairly boring subst test data... blah blah.", @@ -223,15 +223,12 @@ random_eol_marker(void) static svn_error_t * create_file(const char *fname, const char *eol_str, apr_pool_t *pool) { - apr_status_t apr_err; apr_file_t *f; apr_size_t i, j; - apr_err = apr_file_open(&f, fname, + SVN_ERR(svn_io_file_open(&f, fname, (APR_WRITE | APR_CREATE | APR_EXCL | APR_BINARY), - APR_OS_DEFAULT, pool); - if (apr_err) - return svn_error_create(apr_err, NULL, fname); + APR_OS_DEFAULT, pool)); for (i = 0; i < (sizeof(lines) / sizeof(*lines)); i++) { @@ -243,45 +240,13 @@ create_file(const char *fname, const char *eol_str, apr_pool_t *pool) fprintf() doing a newline conversion? */ for (j = 0; this_eol_str[j]; j++) { - apr_err = apr_file_putc(this_eol_str[j], f); - if (apr_err) - return svn_error_create(apr_err, NULL, fname); + SVN_ERR(svn_io_file_putc(this_eol_str[j], f, pool)); } } - apr_err = apr_file_close(f); - if (apr_err) - return svn_error_create(apr_err, NULL, fname); - - return SVN_NO_ERROR; -} - - -/* If FNAME is a regular file, remove it; if it doesn't exist at all, - return success. Otherwise, return error. */ -static svn_error_t * -remove_file(const char *fname, apr_pool_t *pool) -{ - apr_status_t apr_err; - apr_finfo_t finfo; - - if (apr_stat(&finfo, fname, APR_FINFO_TYPE, pool) == APR_SUCCESS) - { - if (finfo.filetype == APR_REG) - { - apr_err = apr_file_remove(fname, pool); - if (apr_err) - return svn_error_create(apr_err, NULL, fname); - } - else - return svn_error_createf(SVN_ERR_TEST_FAILED, NULL, - "non-file '%s' is in the way", fname); - } - - return SVN_NO_ERROR; + return svn_error_trace(svn_io_file_close(f, pool)); } - /* Set up, run, and verify the results of a substitution. * * Create a file TEST_NAME.src using global `lines' as the initial @@ -325,14 +290,14 @@ substitute_and_verify(const char *test_name, apr_size_t idx = 0; apr_size_t i; const char *expect[(sizeof(lines) / sizeof(*lines))]; - const char *src_fname = apr_pstrcat(pool, test_name, ".src", (char *)NULL); - const char *dst_fname = apr_pstrcat(pool, test_name, ".dst", (char *)NULL); + const char *src_fname = apr_pstrcat(pool, test_name, ".src", SVN_VA_NULL); + const char *dst_fname = apr_pstrcat(pool, test_name, ".dst", SVN_VA_NULL); svn_string_t *val; apr_pool_t *subpool = svn_pool_create(pool); /** Clean up from previous tests, set up src data, and convert. **/ - SVN_ERR(remove_file(src_fname, pool)); - SVN_ERR(remove_file(dst_fname, pool)); + SVN_ERR(svn_io_remove_file2(src_fname, TRUE, pool)); + SVN_ERR(svn_io_remove_file2(dst_fname, TRUE, pool)); SVN_ERR(create_file(src_fname, src_eol, pool)); if (rev) @@ -395,7 +360,7 @@ substitute_and_verify(const char *test_name, else { svn_error_clear(err); - SVN_ERR(remove_file(src_fname, pool)); + SVN_ERR(svn_io_remove_file2(src_fname, FALSE, pool)); return SVN_NO_ERROR; } @@ -419,27 +384,27 @@ substitute_and_verify(const char *test_name, "Valid $LastChangedRevision: ", rev, " $, started unexpanded.", - (char *)NULL); + SVN_VA_NULL); expect[5 - 1] = apr_pstrcat(pool, "Line 5: ", "Valid $Rev: ", rev, " $, started unexpanded.", - (char *)NULL); + SVN_VA_NULL); expect[26 - 1] = apr_pstrcat(pool, "Line 26: ", "Emptily expanded keyword $Rev: ", rev," $.", - (char *)NULL); + SVN_VA_NULL); expect[29 - 1] = apr_pstrcat(pool, "Line 29: ", "Valid $LastChangedRevision: ", rev, " $, started expanded.", - (char *)NULL); + SVN_VA_NULL); expect[30 - 1] = apr_pstrcat(pool, "Line 30: ", "Valid $Rev: ", rev, " $, started expanded.", - (char *)NULL); + SVN_VA_NULL); } else /* unexpand */ { @@ -462,31 +427,31 @@ substitute_and_verify(const char *test_name, "Valid $LastChangedDate: ", date, " $, started unexpanded.", - (char *)NULL); + SVN_VA_NULL); expect[13 - 1] = apr_pstrcat(pool, "Line 13: ", "Valid $Date: ", date, " $, started unexpanded.", - (char *)NULL); + SVN_VA_NULL); expect[33 - 1] = apr_pstrcat(pool, "Line 33: ", "Valid $LastChangedDate: ", date, " $, started expanded.", - (char *)NULL); + SVN_VA_NULL); expect[34 - 1] = apr_pstrcat(pool, "Line 34: ", "Valid $Date: ", date, " $, started expanded.", - (char *)NULL); + SVN_VA_NULL); expect[51 - 1] = apr_pstrcat(pool, "Line 51: ", "same, but with embedded keyword ", "$$$$$$$$Date: ", date, " $$$$$$$$$$.", - (char *)NULL); + SVN_VA_NULL); expect[52 - 1] = apr_pstrcat(pool, "Line 52: ", "same, with expanded, empty keyword ", "$$$$$$Date: ", date, " $$$$$$.", - (char *)NULL); + SVN_VA_NULL); } else /* unexpand */ { @@ -511,46 +476,46 @@ substitute_and_verify(const char *test_name, "Valid $LastChangedBy: ", author, " $, started unexpanded.", - (char *)NULL); + SVN_VA_NULL); expect[9 - 1] = apr_pstrcat(pool, "Line 9: ", "Valid $Author: ", author, " $, started unexpanded.", - (char *)NULL); + SVN_VA_NULL); expect[37 - 1] = apr_pstrcat(pool, "Line 37: ", "Valid $LastChangedBy: ", author, - " $, started expanded.", (char *)NULL); + " $, started expanded.", SVN_VA_NULL); expect[38 - 1] = apr_pstrcat(pool, "Line 38: ", "Valid $Author: ", author, " $, started expanded.", - (char *)NULL); + SVN_VA_NULL); expect[46 - 1] = apr_pstrcat(pool, "Line 46: ", "Empty $Author: ", author, " $, started expanded.", - (char *)NULL); + SVN_VA_NULL); expect[71 - 1] = - apr_pstrcat(pool, ".$veR$Author: ", author, " $", (char *)NULL); + apr_pstrcat(pool, ".$veR$Author: ", author, " $", SVN_VA_NULL); expect[74 - 1] = apr_pstrcat(pool, "Line 74: ", "Valid $Author: ", author, " $, started expanded.", - (char *)NULL); + SVN_VA_NULL); expect[79 - 1] = apr_pstrcat(pool, "Line 79: ", "Valid $Author: ", author, " $, started expanded.", - (char *)NULL); + SVN_VA_NULL); expect[80 - 1] = apr_pstrcat(pool, "Line 80: ", "Valid $Author: ", author, " $, started expanded.", - (char *)NULL); + SVN_VA_NULL); expect[81 - 1] = apr_pstrcat(pool, "Line 81: ", "Valid $Author: ", author, " $, started expanded.", - (char *)NULL); + SVN_VA_NULL); expect[82 - 1] = apr_pstrcat(pool, "Line 82: ", "Valid $Author: ", author, " $, started expanded.", - (char *)NULL); + SVN_VA_NULL); } else /* unexpand */ { @@ -581,23 +546,23 @@ substitute_and_verify(const char *test_name, expect[16 - 1] = apr_pstrcat(pool, "Line 16: ", "Valid $HeadURL: ", url, " $, started unexpanded.", - (char *)NULL); + SVN_VA_NULL); expect[17 - 1] = apr_pstrcat(pool, "Line 17: ", "Valid $URL: ", url, " $, started unexpanded.", - (char *)NULL); + SVN_VA_NULL); expect[41 - 1] = apr_pstrcat(pool, "Line 41: ", "Valid $HeadURL: ", url, " $, started expanded.", - (char *)NULL); + SVN_VA_NULL); expect[42 - 1] = apr_pstrcat(pool, "Line 42: ", "Valid $URL: ", url, " $, started expanded.", - (char *)NULL); + SVN_VA_NULL); expect[75 - 1] = apr_pstrcat(pool, "Line 75: ", "Valid $URL: ", url, " $, started expanded.", - (char *)NULL); + SVN_VA_NULL); } else /* unexpand */ { @@ -622,14 +587,14 @@ substitute_and_verify(const char *test_name, "Two keywords back to back: " "$Author: ", author, " $" "$Rev: ", rev, " $.", - (char *)NULL); + SVN_VA_NULL); expect[49 - 1] = apr_pstrcat(pool, "Line 49: ", "One keyword, one not, back to back: " "$Author: ", author, " $Rev$.", - (char *)NULL); + SVN_VA_NULL); expect[70 - 1] = - apr_pstrcat(pool, "$Author: ", author, " $Rev$.", (char *)NULL); + apr_pstrcat(pool, "$Author: ", author, " $Rev$.", SVN_VA_NULL); } /* Else Lines 48, 49, and 70 remain unchanged. */ } @@ -641,14 +606,14 @@ substitute_and_verify(const char *test_name, apr_pstrcat(pool, "Line 48: ", "Two keywords back to back: " "$Author$$Rev: ", rev, " $.", - (char *)NULL); + SVN_VA_NULL); expect[49 - 1] = apr_pstrcat(pool, "Line 49: ", "One keyword, one not, back to back: " "$Author$Rev: ", rev, " $.", - (char *)NULL); + SVN_VA_NULL); expect[70 - 1] = - apr_pstrcat(pool, "$Author$Rev: ", rev, " $.", (char *)NULL); + apr_pstrcat(pool, "$Author$Rev: ", rev, " $.", SVN_VA_NULL); } /* Else Lines 48, 49, and 70 remain unchanged. */ } @@ -660,14 +625,14 @@ substitute_and_verify(const char *test_name, apr_pstrcat(pool, "Line 48: ", "Two keywords back to back: " "$Author: ", author, " $$Rev$.", - (char *)NULL); + SVN_VA_NULL); expect[49 - 1] = apr_pstrcat(pool, "Line 49: ", "One keyword, one not, back to back: " "$Author: ", author, " $Rev$.", - (char *)NULL); + SVN_VA_NULL); expect[70 - 1] = - apr_pstrcat(pool, "$Author: ", author, " $Rev$.", (char *)NULL); + apr_pstrcat(pool, "$Author: ", author, " $Rev$.", SVN_VA_NULL); } /* Else Lines 48, 49, and 70 remain unchanged. */ } @@ -684,14 +649,14 @@ substitute_and_verify(const char *test_name, "keyword in a keyword: $Author: ", author, " $Date$ $", - (char *)NULL); + SVN_VA_NULL); } else /* unexpand */ { expect[24 - 1] = apr_pstrcat(pool, "Line 24: ", "keyword in a keyword: $Author$Date$ $", - (char *)NULL); + SVN_VA_NULL); } } else if (date && (! author)) @@ -703,7 +668,7 @@ substitute_and_verify(const char *test_name, "keyword in a keyword: $Author: $Date: ", date, " $ $", - (char *)NULL); + SVN_VA_NULL); } /* Else Line 24 remains unchanged. */ } @@ -716,14 +681,14 @@ substitute_and_verify(const char *test_name, "keyword in a keyword: $Author: ", author, " $Date$ $", - (char *)NULL); + SVN_VA_NULL); } else /* unexpand */ { expect[24 - 1] = apr_pstrcat(pool, "Line 24: ", "keyword in a keyword: $Author$Date$ $", - (char *)NULL); + SVN_VA_NULL); } } /* Else neither author nor date, so Line 24 remains unchanged. */ @@ -769,8 +734,8 @@ substitute_and_verify(const char *test_name, } /* Clean up this test, since successful. */ - SVN_ERR(remove_file(src_fname, pool)); - SVN_ERR(remove_file(dst_fname, pool)); + SVN_ERR(svn_io_remove_file2(src_fname, FALSE, pool)); + SVN_ERR(svn_io_remove_file2(dst_fname, FALSE, pool)); return SVN_NO_ERROR; } @@ -862,7 +827,7 @@ static svn_error_t * mixed_to_lf(apr_pool_t *pool) { return substitute_and_verify - ("cr_to_lf", NULL, "\n", 1, NULL, NULL, NULL, NULL, 1, pool); + ("mixed_to_lf", NULL, "\n", 1, NULL, NULL, NULL, NULL, 1, pool); } @@ -1096,10 +1061,10 @@ static svn_error_t * unexpand_author(apr_pool_t *pool) { SVN_ERR(substitute_and_verify - ("author", "\n", NULL, 0, NULL, NULL, "jrandom", NULL, 0, pool)); + ("unexpand_author", "\n", NULL, 0, NULL, NULL, "jrandom", NULL, 0, pool)); SVN_ERR(substitute_and_verify - ("author", "\r\n", NULL, 0, NULL, NULL, "jrandom", NULL, 0, pool)); + ("unexpand_author", "\r\n", NULL, 0, NULL, NULL, "jrandom", NULL, 0, pool)); return SVN_NO_ERROR; } @@ -1109,11 +1074,11 @@ static svn_error_t * unexpand_date(apr_pool_t *pool) { SVN_ERR(substitute_and_verify - ("date", "\n", NULL, 0, + ("unexpand_date", "\n", NULL, 0, NULL, "Wed Jan 9 07:49:05 2002", NULL, NULL, 0, pool)); SVN_ERR(substitute_and_verify - ("date", "\r\n", NULL, 0, + ("unexpand_date", "\r\n", NULL, 0, NULL, "Wed Jan 9 07:49:05 2002", NULL, NULL, 0, pool)); return SVN_NO_ERROR; @@ -1124,11 +1089,11 @@ static svn_error_t * unexpand_author_date(apr_pool_t *pool) { SVN_ERR(substitute_and_verify - ("author_date", "\n", NULL, 0, + ("unexpand_author_date", "\n", NULL, 0, NULL, "Wed Jan 9 07:49:05 2002", "jrandom", NULL, 0, pool)); SVN_ERR(substitute_and_verify - ("author_date", "\r\n", NULL, 0, + ("unexpand_author_date", "\r\n", NULL, 0, NULL, "Wed Jan 9 07:49:05 2002", "jrandom", NULL, 0, pool)); return SVN_NO_ERROR; @@ -1139,11 +1104,11 @@ static svn_error_t * unexpand_author_rev(apr_pool_t *pool) { SVN_ERR(substitute_and_verify - ("author_rev", "\n", NULL, 0, + ("unexpand_author_rev", "\n", NULL, 0, "1729", NULL, "jrandom", NULL, 0, pool)); SVN_ERR(substitute_and_verify - ("author_rev", "\r\n", NULL, 0, + ("unexpand_author_rev", "\r\n", NULL, 0, "1729", NULL, "jrandom", NULL, 0, pool)); return SVN_NO_ERROR; @@ -1154,11 +1119,11 @@ static svn_error_t * unexpand_rev(apr_pool_t *pool) { SVN_ERR(substitute_and_verify - ("rev", "\n", NULL, 0, + ("unexpand_rev", "\n", NULL, 0, "1729", NULL, NULL, NULL, 0, pool)); SVN_ERR(substitute_and_verify - ("rev", "\r\n", NULL, 0, + ("unexpand_rev", "\r\n", NULL, 0, "1729", NULL, NULL, NULL, 0, pool)); return SVN_NO_ERROR; @@ -1169,11 +1134,11 @@ static svn_error_t * unexpand_rev_url(apr_pool_t *pool) { SVN_ERR(substitute_and_verify - ("rev_url", "\n", NULL, 0, + ("unexpand_rev_url", "\n", NULL, 0, "1729", NULL, NULL, "http://subversion.tigris.org", 0, pool)); SVN_ERR(substitute_and_verify - ("rev_url", "\r\n", NULL, 0, + ("unexpand_rev_url", "\r\n", NULL, 0, "1729", NULL, NULL, "http://subversion.tigris.org", 0, pool)); return SVN_NO_ERROR; @@ -1184,7 +1149,7 @@ static svn_error_t * unexpand_author_date_rev_url(apr_pool_t *pool) { SVN_ERR(substitute_and_verify - ("author_date_rev_url", "\n", NULL, 0, + ("unexpand_author_date_rev_url", "\n", NULL, 0, "1729", "Wed Jan 9 07:49:05 2002", "jrandom", @@ -1192,7 +1157,7 @@ unexpand_author_date_rev_url(apr_pool_t *pool) 1, pool)); SVN_ERR(substitute_and_verify - ("author_date_rev_url", "\r\n", NULL, 0, + ("unexpand_author_date_rev_url", "\r\n", NULL, 0, "1729", "Wed Jan 9 07:49:05 2002", "jrandom", @@ -1210,7 +1175,7 @@ static svn_error_t * lf_to_crlf_unexpand_author(apr_pool_t *pool) { return substitute_and_verify - ("lf_to_crlf_author", "\n", "\r\n", 0, + ("lf_to_crlf_unexpand_author", "\n", "\r\n", 0, NULL, NULL, "jrandom", NULL, 0, pool); } @@ -1219,7 +1184,7 @@ static svn_error_t * mixed_to_lf_unexpand_author_date(apr_pool_t *pool) { return substitute_and_verify - ("mixed_to_lf_author_date", NULL, "\n", 1, + ("mixed_to_lf_unexpand_author_date", NULL, "\n", 1, NULL, "Wed Jan 9 07:49:05 2002", "jrandom", NULL, 0, pool); } @@ -1228,7 +1193,7 @@ static svn_error_t * crlf_to_cr_unexpand_author_rev(apr_pool_t *pool) { return substitute_and_verify - ("crlf_to_cr_author_rev", "\r\n", "\r", 0, + ("crlf_to_cr_unexpand_author_rev", "\r\n", "\r", 0, "1729", NULL, "jrandom", NULL, 0, pool); } @@ -1237,7 +1202,7 @@ static svn_error_t * cr_to_crlf_unexpand_rev(apr_pool_t *pool) { return substitute_and_verify - ("cr_to_crlf_rev", "\r", "\r\n", 0, + ("cr_to_crlf_unexpand_rev", "\r", "\r\n", 0, "1729", NULL, NULL, NULL, 0, pool); } @@ -1246,7 +1211,7 @@ static svn_error_t * cr_to_crlf_unexpand_rev_url(apr_pool_t *pool) { return substitute_and_verify - ("cr_to_crlf_rev_url", "\r", "\r\n", 0, + ("cr_to_crlf_unexpand_rev_url", "\r", "\r\n", 0, "1729", NULL, NULL, "http://subversion.tigris.org", 0, pool); } @@ -1255,7 +1220,7 @@ static svn_error_t * mixed_to_crlf_unexpand_author_date_rev_url(apr_pool_t *pool) { return substitute_and_verify - ("mixed_to_crlf_author_date_rev_url", NULL, "\r\n", 1, + ("mixed_to_crlf_unexpand_author_date_rev_url", NULL, "\r\n", 1, "1729", "Wed Jan 9 07:49:05 2002", "jrandom", @@ -1268,7 +1233,9 @@ mixed_to_crlf_unexpand_author_date_rev_url(apr_pool_t *pool) /* The test table. */ -struct svn_test_descriptor_t test_funcs[] = +static int max_threads = 7; + +static struct svn_test_descriptor_t test_funcs[] = { SVN_TEST_NULL, /* The no-op conversion. */ @@ -1362,3 +1329,5 @@ struct svn_test_descriptor_t test_funcs[] = "mixed_to_crlf; unexpand author, date, rev, url"), SVN_TEST_NULL }; + +SVN_TEST_MAIN diff --git a/subversion/tests/libsvn_subr/utf-test.c b/subversion/tests/libsvn_subr/utf-test.c index 2028e14..dd81ccd 100644 --- a/subversion/tests/libsvn_subr/utf-test.c +++ b/subversion/tests/libsvn_subr/utf-test.c @@ -25,6 +25,7 @@ #include "svn_utf.h" #include "svn_pools.h" +#include "private/svn_string_private.h" #include "private/svn_utf_private.h" /* Random number seed. Yes, it's global, just pretend you can't see it. */ @@ -226,7 +227,7 @@ test_utf_cstring_to_utf8_ex2(apr_pool_t *pool) const char *expected_result; const char *from_page; } tests[] = { - {"ascii text\n", "ascii text\n", "unexistant-page"}, + {"ascii text\n", "ascii text\n", "unexistent-page"}, {"Edelwei\xdf", "Edelwei\xc3\x9f", "ISO-8859-1"} }; @@ -266,7 +267,7 @@ test_utf_cstring_from_utf8_ex2(apr_pool_t *pool) const char *expected_result; const char *to_page; } tests[] = { - {"ascii text\n", "ascii text\n", "unexistant-page"}, + {"ascii text\n", "ascii text\n", "unexistent-page"}, {"Edelwei\xc3\x9f", "Edelwei\xdf", "ISO-8859-1"} }; @@ -294,10 +295,540 @@ test_utf_cstring_from_utf8_ex2(apr_pool_t *pool) return SVN_NO_ERROR; } +/* Test normalization-independent UTF-8 string comparison */ +static svn_error_t * +test_utf_collated_compare(apr_pool_t *pool) +{ + /* Normalized: NFC */ + static const char nfc[] = + "\xe1\xb9\xa8" /* S with dot above and below */ + "\xc5\xaf" /* u with ring */ + "\xe1\xb8\x87" /* b with macron below */ + "\xe1\xb9\xbd" /* v with tilde */ + "\xe1\xb8\x9d" /* e with breve and cedilla */ + "\xc8\x91" /* r with double grave */ + "\xc5\xa1" /* s with caron */ + "\xe1\xb8\xaf" /* i with diaeresis and acute */ + "\xe1\xbb\x9d" /* o with grave and hook */ + "\xe1\xb9\x8b"; /* n with circumflex below */ + + /* Normalized: NFD */ + static const char nfd[] = + "S\xcc\xa3\xcc\x87" /* S with dot above and below */ + "u\xcc\x8a" /* u with ring */ + "b\xcc\xb1" /* b with macron below */ + "v\xcc\x83" /* v with tilde */ + "e\xcc\xa7\xcc\x86" /* e with breve and cedilla */ + "r\xcc\x8f" /* r with double grave */ + "s\xcc\x8c" /* s with caron */ + "i\xcc\x88\xcc\x81" /* i with diaeresis and acute */ + "o\xcc\x9b\xcc\x80" /* o with grave and hook */ + "n\xcc\xad"; /* n with circumflex below */ + + /* Mixed, denormalized */ + static const char mixup[] = + "S\xcc\x87\xcc\xa3" /* S with dot above and below */ + "\xc5\xaf" /* u with ring */ + "b\xcc\xb1" /* b with macron below */ + "\xe1\xb9\xbd" /* v with tilde */ + "e\xcc\xa7\xcc\x86" /* e with breve and cedilla */ + "\xc8\x91" /* r with double grave */ + "s\xcc\x8c" /* s with caron */ + "\xe1\xb8\xaf" /* i with diaeresis and acute */ + "o\xcc\x80\xcc\x9b" /* o with grave and hook */ + "\xe1\xb9\x8b"; /* n with circumflex below */ + + static const char longer[] = + "\xe1\xb9\xa8" /* S with dot above and below */ + "\xc5\xaf" /* u with ring */ + "\xe1\xb8\x87" /* b with macron below */ + "\xe1\xb9\xbd" /* v with tilde */ + "\xe1\xb8\x9d" /* e with breve and cedilla */ + "\xc8\x91" /* r with double grave */ + "\xc5\xa1" /* s with caron */ + "\xe1\xb8\xaf" /* i with diaeresis and acute */ + "\xe1\xbb\x9d" /* o with grave and hook */ + "\xe1\xb9\x8b" /* n with circumflex below */ + "X"; + + static const char shorter[] = + "\xe1\xb9\xa8" /* S with dot above and below */ + "\xc5\xaf" /* u with ring */ + "\xe1\xb8\x87" /* b with macron below */ + "\xe1\xb9\xbd" /* v with tilde */ + "\xe1\xb8\x9d" /* e with breve and cedilla */ + "\xc8\x91" /* r with double grave */ + "\xc5\xa1" /* s with caron */ + "\xe1\xb8\xaf" /* i with diaeresis and acute */ + "\xe1\xbb\x9d"; /* o with grave and hook */ + + static const char lowcase[] = + "s\xcc\x87\xcc\xa3" /* s with dot above and below */ + "\xc5\xaf" /* u with ring */ + "b\xcc\xb1" /* b with macron below */ + "\xe1\xb9\xbd" /* v with tilde */ + "e\xcc\xa7\xcc\x86" /* e with breve and cedilla */ + "\xc8\x91" /* r with double grave */ + "s\xcc\x8c" /* s with caron */ + "\xe1\xb8\xaf" /* i with diaeresis and acute */ + "o\xcc\x80\xcc\x9b" /* o with grave and hook */ + "\xe1\xb9\x8b"; /* n with circumflex below */ + + static const struct utfcmp_test_t { + const char *stra; + char op; + const char *strb; + const char *taga; + const char *tagb; + } utfcmp_tests[] = { + /* Empty key */ + {"", '=', "", "empty", "empty"}, + {"", '<', "a", "empty", "nonempty"}, + {"a", '>', "", "nonempty", "empty"}, + + /* Deterministic ordering */ + {"a", '<', "b", "a", "b"}, + {"b", '<', "c", "b", "c"}, + {"a", '<', "c", "a", "c"}, + + /* Normalized equality */ + {nfc, '=', nfd, "nfc", "nfd"}, + {nfd, '=', nfc, "nfd", "nfc"}, + {nfc, '=', mixup, "nfc", "mixup"}, + {nfd, '=', mixup, "nfd", "mixup"}, + {mixup, '=', nfd, "mixup", "nfd"}, + {mixup, '=', nfc, "mixup", "nfc"}, + + /* Key length */ + {nfc, '<', longer, "nfc", "longer"}, + {longer, '>', nfc, "longer", "nfc"}, + {nfd, '>', shorter, "nfd", "shorter"}, + {shorter, '<', nfd, "shorter", "nfd"}, + {mixup, '<', lowcase, "mixup", "lowcase"}, + {lowcase, '>', mixup, "lowcase", "mixup"}, + + {NULL, 0, NULL, NULL, NULL} + }; + + + const struct utfcmp_test_t *ut; + svn_membuf_t bufa, bufb; + svn_membuf__create(&bufa, 0, pool); + svn_membuf__create(&bufb, 0, pool); + + srand(111); + for (ut = utfcmp_tests; ut->stra; ++ut) + { + const svn_boolean_t implicit_size = (rand() % 17) & 1; + const apr_size_t lena = (implicit_size + ? SVN_UTF__UNKNOWN_LENGTH : strlen(ut->stra)); + const apr_size_t lenb = (implicit_size + ? SVN_UTF__UNKNOWN_LENGTH : strlen(ut->strb)); + int result; + + SVN_ERR(svn_utf__normcmp(&result, + ut->stra, lena, ut->strb, lenb, + &bufa, &bufb)); + + /* UCS-4 debugging dump of the decomposed strings + { + const apr_int32_t *const ucsbufa = bufa.data; + const apr_int32_t *const ucsbufb = bufb.data; + apr_size_t i; + + printf("(%c)%7s %c %s\n", ut->op, + ut->taga, (!result ? '=' : (result < 0 ? '<' : '>')), ut->tagb); + + for (i = 0; i < bufa.size || i < bufb.size; ++i) + { + if (i < bufa.size && i < bufb.size) + printf(" U+%04X U+%04X\n", ucsbufa[i], ucsbufb[i]); + else if (i < bufa.size) + printf(" U+%04X\n", ucsbufa[i]); + else + printf(" U+%04X\n", ucsbufb[i]); + } + } + */ + + if (('=' == ut->op && 0 != result) + || ('<' == ut->op && 0 <= result) + || ('>' == ut->op && 0 >= result)) + { + return svn_error_createf + (SVN_ERR_TEST_FAILED, NULL, + "Ut->Op '%s' %c '%s' but '%s' %c '%s'", + ut->taga, ut->op, ut->tagb, + ut->taga, (!result ? '=' : (result < 0 ? '<' : '>')), ut->tagb); + } + } + + return SVN_NO_ERROR; +} + + + +static svn_error_t * +test_utf_pattern_match(apr_pool_t *pool) +{ + static const struct glob_test_t { + svn_boolean_t sql_like; + svn_boolean_t matches; + const char *pattern; + const char *string; + const char *escape; + } glob_tests[] = { +#define LIKE_MATCH TRUE, TRUE +#define LIKE_FAIL TRUE, FALSE +#define GLOB_MATCH FALSE, TRUE +#define GLOB_FAIL FALSE, FALSE + + {LIKE_FAIL, "", "test", NULL}, + {GLOB_FAIL, "", "test", NULL}, + {LIKE_FAIL, "", "%", NULL}, + {GLOB_FAIL, "", "*", NULL}, + {LIKE_FAIL, "test", "%", NULL}, + {GLOB_FAIL, "test", "*", NULL}, + {LIKE_MATCH, "test", "test", NULL}, + {GLOB_MATCH, "test", "test", NULL}, + {LIKE_MATCH, "t\xe1\xb8\x9dst", "te\xcc\xa7\xcc\x86st", NULL}, + {GLOB_MATCH, "te\xcc\xa7\xcc\x86st", "t\xe1\xb8\x9dst", NULL}, + + {LIKE_FAIL, "test", "test", "\xe1\xb8\x9d"}, /* escape char not ascii */ + {LIKE_FAIL, "test", "test", ""}, /* empty escape string */ + + {LIKE_MATCH, "te#st", "test", "#"}, + {LIKE_FAIL, "te#st", "test", NULL}, + {GLOB_MATCH, "te\\st", "test", NULL}, + {LIKE_MATCH, "te##st", "te#st", "#"}, + {LIKE_FAIL, "te##st", "te#st", NULL}, + {GLOB_MATCH, "te\\\\st", "te\\st", NULL}, + {GLOB_FAIL, "te\\\\st", "te\\st", "\\"}, /* escape char with glob */ + {LIKE_FAIL, "te#%t", "te%t", NULL}, + {LIKE_MATCH, "te#%t", "te%t", "#"}, + {GLOB_MATCH, "te\\*t", "te*t", NULL}, + {LIKE_FAIL, "te#%t", "test", NULL}, + {GLOB_FAIL, "te\\*t", "test", NULL}, + {LIKE_FAIL, "te#_t", "te_t", NULL}, + {LIKE_MATCH, "te#_t", "te_t", "#"}, + {GLOB_MATCH, "te\\?t", "te?t", NULL}, + {LIKE_FAIL, "te#_t", "test", NULL}, + {LIKE_FAIL, "te#_t", "test", "#"}, + {GLOB_FAIL, "te\\?t", "test", NULL}, + + {LIKE_MATCH, "_est", "test", NULL}, + {GLOB_MATCH, "?est", "test", NULL}, + {LIKE_MATCH, "te_t", "test", NULL}, + {GLOB_MATCH, "te?t", "test", NULL}, + {LIKE_MATCH, "tes_", "test", NULL}, + {GLOB_MATCH, "tes?", "test", NULL}, + {LIKE_FAIL, "test_", "test", NULL}, + {GLOB_FAIL, "test?", "test", NULL}, + + {LIKE_MATCH, "[s%n]", "[subversion]", NULL}, + {GLOB_FAIL, "[s*n]", "[subversion]", NULL}, + {LIKE_MATCH, "#[s%n]", "[subversion]", "#"}, + {GLOB_MATCH, "\\[s*n]", "[subversion]", NULL}, + + {GLOB_MATCH, ".[\\-\\t]", ".t", NULL}, + {GLOB_MATCH, "test*?*[a-z]*", "testgoop", NULL}, + {GLOB_MATCH, "te[^x]t", "test", NULL}, + {GLOB_MATCH, "te[^abc]t", "test", NULL}, + {GLOB_MATCH, "te[^x]t", "test", NULL}, + {GLOB_MATCH, "te[!x]t", "test", NULL}, + {GLOB_FAIL, "te[^x]t", "text", NULL}, + {GLOB_FAIL, "te[^\\x]t", "text", NULL}, + {GLOB_FAIL, "te[^x\\", "text", NULL}, + {GLOB_FAIL, "te[/]t", "text", NULL}, + {GLOB_MATCH, "te[r-t]t", "test", NULL}, + {GLOB_MATCH, "te[r-Tz]t", "tezt", NULL}, + {GLOB_FAIL, "te[R-T]t", "tent", NULL}, +/* {GLOB_MATCH, "tes[]t]", "test", NULL}, */ + {GLOB_MATCH, "tes[t-]", "test", NULL}, + {GLOB_MATCH, "tes[t-]]", "test]", NULL}, + {GLOB_FAIL, "tes[t-]]", "test", NULL}, + {GLOB_FAIL, "tes[u-]", "test", NULL}, + {GLOB_FAIL, "tes[t-]", "tes[t-]", NULL}, + {GLOB_MATCH, "test[/-/]", "test/", NULL}, + {GLOB_MATCH, "test[\\/-/]", "test/", NULL}, + {GLOB_MATCH, "test[/-\\/]", "test/", NULL}, + +#undef LIKE_MATCH +#undef LIKE_FAIL +#undef GLOB_MATCH +#undef GLOB_FAIL + + {FALSE, FALSE, NULL, NULL, NULL} + }; + + const struct glob_test_t *gt; + svn_membuf_t bufa, bufb, bufc; + svn_membuf__create(&bufa, 0, pool); + svn_membuf__create(&bufb, 0, pool); + svn_membuf__create(&bufc, 0, pool); + + srand(79); + for (gt = glob_tests; gt->pattern; ++gt) + { + const svn_boolean_t implicit_size = (rand() % 13) & 1; + const apr_size_t lenptn = (implicit_size + ? SVN_UTF__UNKNOWN_LENGTH + : strlen(gt->pattern)); + const apr_size_t lenstr = (implicit_size + ? SVN_UTF__UNKNOWN_LENGTH + : strlen(gt->string)); + const apr_size_t lenesc = (implicit_size + ? SVN_UTF__UNKNOWN_LENGTH + : (gt->escape ? strlen(gt->escape) : 0)); + svn_boolean_t match; + svn_error_t *err; + + + err = svn_utf__glob(&match, + gt->pattern, lenptn, + gt->string, lenstr, + gt->escape, lenesc, + gt->sql_like, &bufa, &bufb, &bufc); + + if (!gt->sql_like && gt->escape && !err) + return svn_error_create + (SVN_ERR_TEST_FAILED, err, "Failed to detect GLOB ESCAPE"); + + if ((err && gt->matches) + || (!err && !match != !gt->matches)) + { + if (gt->sql_like) + return svn_error_createf + (SVN_ERR_TEST_FAILED, err, + "Wrong result: %s'%s' LIKE '%s'%s%s%s%s", + (gt->matches ? "NOT " : ""), gt->string, gt->pattern, + (gt->escape ? " ESCAPE " : ""), (gt->escape ? "'" : ""), + (gt->escape ? gt->escape : ""), (gt->escape ? "'" : "")); + else + return svn_error_createf + (SVN_ERR_TEST_FAILED, err, "Wrong result: %s%s GLOB %s", + (gt->matches ? "NOT " : ""), gt->string, gt->pattern); + } + + if (err) + svn_error_clear(err); + } + + return SVN_NO_ERROR; +} + + +static svn_error_t * +test_utf_fuzzy_escape(apr_pool_t *pool) +{ + + /* Accented latin, mixed normalization */ + static const char mixup[] = + "S\xcc\x87\xcc\xa3" /* S with dot above and below */ + "\xc5\xaf" /* u with ring */ + "b\xcc\xb1" /* b with macron below */ + "\xe1\xb9\xbd" /* v with tilde */ + "e\xcc\xa7\xcc\x86" /* e with breve and cedilla */ + "\xc8\x91" /* r with double grave */ + "s\xcc\x8c" /* s with caron */ + "\xe1\xb8\xaf" /* i with diaeresis and acute */ + "o\xcc\x80\xcc\x9b" /* o with grave and hook */ + "\xe1\xb9\x8b"; /* n with circumflex below */ + + /* As above, but latin lowercase 'o' replaced with Greek 'omicron' */ + static const char greekish[] = + "S\xcc\x87\xcc\xa3" /* S with dot above and below */ + "\xc5\xaf" /* u with ring */ + "b\xcc\xb1" /* b with macron below */ + "\xe1\xb9\xbd" /* v with tilde */ + "e\xcc\xa7\xcc\x86" /* e with breve and cedilla */ + "\xc8\x91" /* r with double grave */ + "s\xcc\x8c" /* s with caron */ + "\xe1\xb8\xaf" /* i with diaeresis and acute */ + "\xce\xbf\xcc\x80\xcc\x9b" /* omicron with grave and hook */ + "\xe1\xb9\x8b"; /* n with circumflex below */ + + /* More interesting invalid characters. */ + static const char invalid[] = + "Not Unicode: \xef\xb7\x91;" /* U+FDD1 */ + "Out of range: \xf4\x90\x80\x81;" /* U+110001 */ + "Not UTF-8: \xe6;" + "Null byte: \0;"; + + const char *fuzzy; + + fuzzy = svn_utf__fuzzy_escape(mixup, strlen(mixup), pool); + SVN_TEST_ASSERT(0 == strcmp(fuzzy, "Subversion")); + + fuzzy = svn_utf__fuzzy_escape(greekish, strlen(greekish), pool); + SVN_TEST_ASSERT(0 == strcmp(fuzzy, "Subversi{U+03BF}n")); + + fuzzy = svn_utf__fuzzy_escape(invalid, sizeof(invalid) - 1, pool); + /*fprintf(stderr, "%s\n", fuzzy);*/ + SVN_TEST_ASSERT(0 == strcmp(fuzzy, + "Not Unicode: {U?FDD1};" + "Out of range: ?\\F4?\\90?\\80?\\81;" + "Not UTF-8: ?\\E6;" + "Null byte: \\0;")); + + return SVN_NO_ERROR; +} + +static svn_error_t * +test_utf_is_normalized(apr_pool_t *pool) +{ + /* Normalized: NFC */ + static const char nfc[] = + "\xe1\xb9\xa8" /* S with dot above and below */ + "\xc5\xaf" /* u with ring */ + "\xe1\xb8\x87" /* b with macron below */ + "\xe1\xb9\xbd" /* v with tilde */ + "\xe1\xb8\x9d" /* e with breve and cedilla */ + "\xc8\x91" /* r with double grave */ + "\xc5\xa1" /* s with caron */ + "\xe1\xb8\xaf" /* i with diaeresis and acute */ + "\xe1\xbb\x9d" /* o with grave and hook */ + "\xe1\xb9\x8b"; /* n with circumflex below */ + + /* Normalized: NFD */ + static const char nfd[] = + "S\xcc\xa3\xcc\x87" /* S with dot above and below */ + "u\xcc\x8a" /* u with ring */ + "b\xcc\xb1" /* b with macron below */ + "v\xcc\x83" /* v with tilde */ + "e\xcc\xa7\xcc\x86" /* e with breve and cedilla */ + "r\xcc\x8f" /* r with double grave */ + "s\xcc\x8c" /* s with caron */ + "i\xcc\x88\xcc\x81" /* i with diaeresis and acute */ + "o\xcc\x9b\xcc\x80" /* o with grave and hook */ + "n\xcc\xad"; /* n with circumflex below */ + + /* Mixed, denormalized */ + static const char mixup[] = + "S\xcc\x87\xcc\xa3" /* S with dot above and below */ + "\xc5\xaf" /* u with ring */ + "b\xcc\xb1" /* b with macron below */ + "\xe1\xb9\xbd" /* v with tilde */ + "e\xcc\xa7\xcc\x86" /* e with breve and cedilla */ + "\xc8\x91" /* r with double grave */ + "s\xcc\x8c" /* s with caron */ + "\xe1\xb8\xaf" /* i with diaeresis and acute */ + "o\xcc\x80\xcc\x9b" /* o with grave and hook */ + "\xe1\xb9\x8b"; /* n with circumflex below */ + + /* Invalid UTF-8 */ + static const char invalid[] = + "\xe1\xb9\xa8" /* S with dot above and below */ + "\xc5\xaf" /* u with ring */ + "\xe1\xb8\x87" /* b with macron below */ + "\xe1\xb9\xbd" /* v with tilde */ + "\xe1\xb8\x9d" /* e with breve and cedilla */ + "\xc8\x91" /* r with double grave */ + "\xc5\xa1" /* s with caron */ + "\xe1\xb8\xaf" /* i with diaeresis and acute */ + "\xe6" /* Invalid byte */ + "\xe1\xb9\x8b"; /* n with circumflex below */ + + SVN_ERR_ASSERT(svn_utf__is_normalized(nfc, pool)); + SVN_ERR_ASSERT(!svn_utf__is_normalized(nfd, pool)); + SVN_ERR_ASSERT(!svn_utf__is_normalized(mixup, pool)); + SVN_ERR_ASSERT(!svn_utf__is_normalized(invalid, pool)); + + return SVN_NO_ERROR; +} + + +static svn_error_t * +test_utf_conversions(apr_pool_t *pool) +{ + static const struct cvt_test_t + { + svn_boolean_t sixteenbit; + svn_boolean_t bigendian; + const char *source; + const char *result; + } tests[] = { + +#define UTF_32_LE FALSE, FALSE +#define UTF_32_BE FALSE, TRUE +#define UTF_16_LE TRUE, FALSE +#define UTF_16_BE TRUE, TRUE + + /* Normal character conversion */ + { UTF_32_LE, "t\0\0\0" "e\0\0\0" "s\0\0\0" "t\0\0\0" "\0\0\0\0", "test" }, + { UTF_32_BE, "\0\0\0t" "\0\0\0e" "\0\0\0s" "\0\0\0t" "\0\0\0\0", "test" }, + { UTF_16_LE, "t\0" "e\0" "s\0" "t\0" "\0\0", "test" }, + { UTF_16_BE, "\0t" "\0e" "\0s" "\0t" "\0\0", "test" }, + + /* Valid surrogate pairs */ + { UTF_16_LE, "\x00\xD8" "\x00\xDC" "\0\0", "\xf0\x90\x80\x80" }, /* U+010000 */ + { UTF_16_LE, "\x34\xD8" "\x1E\xDD" "\0\0", "\xf0\x9d\x84\x9e" }, /* U+01D11E */ + { UTF_16_LE, "\xFF\xDB" "\xFD\xDF" "\0\0", "\xf4\x8f\xbf\xbd" }, /* U+10FFFD */ + + { UTF_16_BE, "\xD8\x00" "\xDC\x00" "\0\0", "\xf0\x90\x80\x80" }, /* U+010000 */ + { UTF_16_BE, "\xD8\x34" "\xDD\x1E" "\0\0", "\xf0\x9d\x84\x9e" }, /* U+01D11E */ + { UTF_16_BE, "\xDB\xFF" "\xDF\xFD" "\0\0", "\xf4\x8f\xbf\xbd" }, /* U+10FFFD */ + + /* Swapped, single and trailing surrogate pairs */ + { UTF_16_LE, "*\0" "\x00\xDC" "\x00\xD8" "*\0\0\0", "*\xed\xb0\x80" "\xed\xa0\x80*" }, + { UTF_16_LE, "*\0" "\x1E\xDD" "*\0\0\0", "*\xed\xb4\x9e*" }, + { UTF_16_LE, "*\0" "\xFF\xDB" "*\0\0\0", "*\xed\xaf\xbf*" }, + { UTF_16_LE, "\x1E\xDD" "\0\0", "\xed\xb4\x9e" }, + { UTF_16_LE, "\xFF\xDB" "\0\0", "\xed\xaf\xbf" }, + + { UTF_16_BE, "\0*" "\xDC\x00" "\xD8\x00" "\0*\0\0", "*\xed\xb0\x80" "\xed\xa0\x80*" }, + { UTF_16_BE, "\0*" "\xDD\x1E" "\0*\0\0", "*\xed\xb4\x9e*" }, + { UTF_16_BE, "\0*" "\xDB\xFF" "\0*\0\0", "*\xed\xaf\xbf*" }, + { UTF_16_BE, "\xDD\x1E" "\0\0", "\xed\xb4\x9e" }, + { UTF_16_BE, "\xDB\xFF" "\0\0", "\xed\xaf\xbf" }, + +#undef UTF_32_LE +#undef UTF_32_BE +#undef UTF_16_LE +#undef UTF_16_BE + + { 0 } + }; + + const struct cvt_test_t *tc; + const svn_string_t *result; + int i; + + for (i = 1, tc = tests; tc->source; ++tc, ++i) + { + if (tc->sixteenbit) + SVN_ERR(svn_utf__utf16_to_utf8(&result, (const void*)tc->source, + SVN_UTF__UNKNOWN_LENGTH, + tc->bigendian, pool, pool)); + else + SVN_ERR(svn_utf__utf32_to_utf8(&result, (const void*)tc->source, + SVN_UTF__UNKNOWN_LENGTH, + tc->bigendian, pool, pool)); + SVN_ERR_ASSERT(0 == strcmp(result->data, tc->result)); + } + + /* Test counted strings with NUL characters */ + SVN_ERR(svn_utf__utf16_to_utf8( + &result, (void*)("x\0" "\0\0" "y\0" "*\0"), 3, + FALSE, pool, pool)); + SVN_ERR_ASSERT(0 == memcmp(result->data, "x\0y", 3)); + + SVN_ERR(svn_utf__utf32_to_utf8( + &result, + (void*)("\0\0\0x" "\0\0\0\0" "\0\0\0y" "\0\0\0*"), 3, + TRUE, pool, pool)); + SVN_ERR_ASSERT(0 == memcmp(result->data, "x\0y", 3)); + + return SVN_NO_ERROR; +} + + /* The test table. */ -struct svn_test_descriptor_t test_funcs[] = +static int max_threads = 1; + +static struct svn_test_descriptor_t test_funcs[] = { SVN_TEST_NULL, SVN_TEST_PASS2(utf_validate, @@ -308,5 +839,17 @@ struct svn_test_descriptor_t test_funcs[] = "test svn_utf_cstring_to_utf8_ex2"), SVN_TEST_PASS2(test_utf_cstring_from_utf8_ex2, "test svn_utf_cstring_from_utf8_ex2"), + SVN_TEST_PASS2(test_utf_collated_compare, + "test svn_utf__normcmp"), + SVN_TEST_PASS2(test_utf_pattern_match, + "test svn_utf__glob"), + SVN_TEST_PASS2(test_utf_fuzzy_escape, + "test svn_utf__fuzzy_escape"), + SVN_TEST_PASS2(test_utf_is_normalized, + "test svn_utf__is_normalized"), + SVN_TEST_PASS2(test_utf_conversions, + "test svn_utf__utf{16,32}_to_utf8"), SVN_TEST_NULL }; + +SVN_TEST_MAIN diff --git a/subversion/tests/libsvn_subr/x509-test.c b/subversion/tests/libsvn_subr/x509-test.c new file mode 100644 index 0000000..a3806b8 --- /dev/null +++ b/subversion/tests/libsvn_subr/x509-test.c @@ -0,0 +1,848 @@ +/* + * x509-test.c -- test the x509 parser functions + * + * ==================================================================== + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + * ==================================================================== + */ + +#include <string.h> +#include "svn_x509.h" +#include "svn_base64.h" +#include "svn_time.h" +#include "svn_pools.h" +#include "svn_string.h" + +#include "../svn_test.h" + +struct x509_test { + const char *base64_cert; /* Base64 encoded DER X.509 cert */ + const char *subject; /* Subject Distinguished Name */ + const char *subject_oids; /* Space separated list of oids in Subject */ + const char *issuer; /* Issuer Distinguished Name */ + const char *issuer_oids; /* Space separated list of oids in Issuer */ + + /* These timesamps are in the format that svn_time_to_cstring() produces. + * This is not the same string as the parser returns since it returns + * the ressult of svn_time_to_human_cstring(), which is in the local + * timezone. So we can't store exactly what the parser will output. */ + const char *valid_from; + const char *valid_to; + const char *hostnames; + const char *sha1_digest; +}; + +static struct x509_test cert_tests[] = { + /* contains extensions and uses a sha256 algorithm */ + { "MIIEtzCCA5+gAwIBAgIQWGBOrapkezd+BWVsAtmtmTANBgkqhkiG9w0BAQsFADA8" + "MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMVGhhd3RlLCBJbmMuMRYwFAYDVQQDEw1U" + "aGF3dGUgU1NMIENBMB4XDTE0MDQxMTAwMDAwMFoXDTE2MDQwNzIzNTk1OVowgYsx" + "CzAJBgNVBAYTAlVTMREwDwYDVQQIEwhNYXJ5bGFuZDEUMBIGA1UEBxQLRm9yZXN0" + "IEhpbGwxIzAhBgNVBAoUGkFwYWNoZSBTb2Z0d2FyZSBGb3VuZGF0aW9uMRcwFQYD" + "VQQLFA5JbmZyYXN0cnVjdHVyZTEVMBMGA1UEAxQMKi5hcGFjaGUub3JnMIIBIjAN" + "BgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA+Tq4mH+stRoxe4xth8tUCgLt+P4L" + "D/JWZz4a2IecaaAk57vIlTxEyP16fUShUfxVJnD0KV11zv2qaEUXNaA6hKd4H/oB" + "u2OyGev+quRM+aFCjWqASkXt7fLGsIkHAwP3XwBVBpARbcXJeCjCBxqaYrQqS8LT" + "wfPUD9eYncGlQ+ixb3Bosy7TmkWKeLsRdS90cAO/rdgQ8OI7kLT/1tr5GpF9RmXo" + "RnVqMP+U0zGd/BNNSneg7emb7TxLzxeMKZ7QbF4MZi8RRN11spvx8/f92CiYrGGu" + "y67VdOGPaomYc+VZ2syLwduHGK40ADrEK3+MQpsRFB0dM08j9bhpr5A44wIDAQAB" + "o4IBYzCCAV8wFwYDVR0RBBAwDoIMKi5hcGFjaGUub3JnMAkGA1UdEwQCMAAwQgYD" + "VR0gBDswOTA3BgpghkgBhvhFAQc2MCkwJwYIKwYBBQUHAgEWG2h0dHBzOi8vd3d3" + "LnRoYXd0ZS5jb20vY3BzLzAOBgNVHQ8BAf8EBAMCBaAwHwYDVR0jBBgwFoAUp6KD" + "uzRFQD381TBPErk+oQGf9tswOgYDVR0fBDMwMTAvoC2gK4YpaHR0cDovL3N2ci1v" + "di1jcmwudGhhd3RlLmNvbS9UaGF3dGVPVi5jcmwwHQYDVR0lBBYwFAYIKwYBBQUH" + "AwEGCCsGAQUFBwMCMGkGCCsGAQUFBwEBBF0wWzAiBggrBgEFBQcwAYYWaHR0cDov" + "L29jc3AudGhhd3RlLmNvbTA1BggrBgEFBQcwAoYpaHR0cDovL3N2ci1vdi1haWEu" + "dGhhd3RlLmNvbS9UaGF3dGVPVi5jZXIwDQYJKoZIhvcNAQELBQADggEBAF52BLvl" + "x5or9/aO7+cPhxuPxwiNRgbvHdCakD7n8vzjNyct9fKp6/XxB6GQiTZ0nZPJOyIu" + "Pi1QDLKOXvaPeLKDBilL/+mrn/ev3s/aRQSrUsieKDoQnqtmlxEHc/T3+Ni/RZob" + "PD4GzPuNKpK3BIc0fk/95T8R1DjBSQ5/clvkzOKtcl3VffAwnHiE9TZx9js7kZwO" + "b9nOKX8DFao3EpQcS7qn63Ibzbq5A6ry8ZNRQSIJK/xlCAWoyUd1uxnqGFnus8wb" + "9RVZJQe8YvyytBjgbE3QjnfPOxoEJA3twupnPmH+OCTM6V3TZqpRZj/sZ5rtIQ++" + "hI5FdJWUWVSgnSw=", + "C=US, ST=Maryland, L=Forest Hill, O=Apache Software Foundation, " + "OU=Infrastructure, CN=*.apache.org", + "2.5.4.6 2.5.4.8 2.5.4.7 2.5.4.10 2.5.4.11 2.5.4.3", + "C=US, O=Thawte, Inc., CN=Thawte SSL CA", + "2.5.4.6 2.5.4.10 2.5.4.3", + "2014-04-11T00:00:00.000000Z", + "2016-04-07T23:59:59.000000Z", + "*.apache.org", + "151d8ad1e1bac21466bc2836ba80b5fcf872f37c" }, + /* the expiration is after 2049 so the expiration is in the + * generalized format, while the start date is still in the UTC + * format. Note this is actually a CA cert but that really doesn't + * matter here. */ + { "MIIDtzCCAp+gAwIBAgIJAJKX85dqh3RvMA0GCSqGSIb3DQEBBQUAMEUxCzAJBgNV" + "BAYTAkFVMRMwEQYDVQQIEwpTb21lLVN0YXRlMSEwHwYDVQQKExhJbnRlcm5ldCBX" + "aWRnaXRzIFB0eSBMdGQwIBcNMTQwNjI3MTczMTUxWhgPMjExNDA2MDMxNzMxNTFa" + "MEUxCzAJBgNVBAYTAkFVMRMwEQYDVQQIEwpTb21lLVN0YXRlMSEwHwYDVQQKExhJ" + "bnRlcm5ldCBXaWRnaXRzIFB0eSBMdGQwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAw" + "ggEKAoIBAQDaa4gwNBB6vgWrlOIEMdzvD06zmmiocEt6UnTHtmAcfrBuDnKrBwEh" + "f5JxneL16XIuKwK6n/4omBtem/PPjjpOLM9PMQuoO0cpQ0UGFnfpmko6PSQoqRHl" + "qTbDGv4usn7qdZV+FKz/B9CMonRSzWHMz5YPmqfob6BqaaJY/qJEzHJA24bm4jPH" + "IsaVCInEGpqAUpejwBzNujfbLibBNrVX7K846zk+tnsNR90kP5h3IRP3SdWVywKC" + "AMN2izzhmaDhuPzaTBobovr+ySJShmX6gdB5PpWkm6rcBl6RJ+tM0ZBSJjQvkYp4" + "seV+rcXFgpJP/aQL3vhDON32tjWh3A2JAgMBAAGjgacwgaQwHQYDVR0OBBYEFF+N" + "7TyDI8THpAbx1pfzFFtl5z4iMHUGA1UdIwRuMGyAFF+N7TyDI8THpAbx1pfzFFtl" + "5z4ioUmkRzBFMQswCQYDVQQGEwJBVTETMBEGA1UECBMKU29tZS1TdGF0ZTEhMB8G" + "A1UEChMYSW50ZXJuZXQgV2lkZ2l0cyBQdHkgTHRkggkAkpfzl2qHdG8wDAYDVR0T" + "BAUwAwEB/zANBgkqhkiG9w0BAQUFAAOCAQEAo4t9fYe2I+XIQn8i/KI9UFEE9fue" + "w6rQMnf9yyd8nwL+IcV84hvyNrq0+7SptUBMq3rsEf5UIBIBI4Oa614mJ/Kt976O" + "S7Sa1IPH7j+zb/jqH/xGskEVi25dZz7psFCmi7Hm9dnVz9YKa2yLW6R2KZcTVxCx" + "SSdDRlD7SonsYeq2fGrAo7Y9xfZsiJ2ZbJ18kHs2coMWuhgSrN9jrML6mb5B+k22" + "/rgsCJgFsBDPBYR3ju0Ahqg7v6kwg9O2PJzyb4ljsw8oI0sCwHTZW5I5FMq2D9g6" + "hj80N2fhS9QWoLyeKoMTNB2Do6VaNrLrCJiscZWrsnM1f+XBqV8hMuHX8A==", + "C=AU, ST=Some-State, O=Internet Widgits Pty Ltd", + "2.5.4.6 2.5.4.8 2.5.4.10", + "C=AU, ST=Some-State, O=Internet Widgits Pty Ltd", + "2.5.4.6 2.5.4.8 2.5.4.10", + "2014-06-27T17:31:51.000000Z", + "2114-06-03T17:31:51.000000Z", + NULL, + "db3a959e145acc2741f9eeecbeabce53cc5b7362" }, + /* The subject (except for country code) is UTF-8 encoded. + * created with openssl using utf8-yes and string_mask=utf8only */ + { "MIIDrTCCApWgAwIBAgIBATANBgkqhkiG9w0BAQUFADBFMQswCQYDVQQGEwJBVTET" + "MBEGA1UECBMKU29tZS1TdGF0ZTEhMB8GA1UEChMYSW50ZXJuZXQgV2lkZ2l0cyBQ" + "dHkgTHRkMB4XDTE0MDcwMjE4MzYxMFoXDTE1MDcwMjE4MzYxMFowcjELMAkGA1UE" + "BhMCR1IxFTATBgNVBAgMDM6Rz4TPhM65zrrOrjETMBEGA1UEBwwKzpHOuM6uzr3O" + "sTEdMBsGA1UECgwUz4DOsc+BzqzOtM61zrnOs868zrExGDAWBgNVBAMMD3d3dy5l" + "eGFtcGxlLmNvbTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMVPuQPz" + "INjsiXl+GeiXMzXV1Bfm8vzbQnMLAFY/ZKKK4gpy58xcNrmur//Fd38naTM/DetO" + "PEoDa+vQ48CnUWCDT3CKUA3BnrjtR3/EITC7XRcfk5lyk0IZr9RZB1WedQxK1n5E" + "Ecz8EBrm9+1442Nmg/y1F8d/2F2CjKB+PgfOP1WWaIQcsjLsftXec+kGjc34kwbS" + "9D9H+bRrPVcOzBZOqC+K0K7MMOxKA5mMi4b/Nlep76gTaUyonclRIADanAyaK5WG" + "0IkEI/nxufaP3AcPksCbroWLTkPKIe97Yj6mnzNhK9TA9w5RgdBrjNyfrwUaYiYR" + "FxVJN0VrHWSsRnECAwEAAaN7MHkwCQYDVR0TBAIwADAsBglghkgBhvhCAQ0EHxYd" + "T3BlblNTTCBHZW5lcmF0ZWQgQ2VydGlmaWNhdGUwHQYDVR0OBBYEFNOobRTPfoWP" + "EGgXVkHfwrqz7PVzMB8GA1UdIwQYMBaAFIV8JZkZ88X7MTQSsJ6/qF3KboHKMA0G" + "CSqGSIb3DQEBBQUAA4IBAQAam6vJUv6kcWWrEAfdnwwRmmJ4X1Jey3Sp48G35MOE" + "KkHtwqbtL+QU1VA2X98bEYobqZinM3e3zrlbpgbe1xoJ00MnT9CgQObXr+cum/Ql" + "PwWXB5fK3BrNwqRMRGc9w27FevyFeybdKhc47jEKMOANrB/aziNHaq9gBtU/HZdy" + "rm9TEaOHMy6vNrdpOZKpwXPxYqsQxMLpen9D64t/3P6hsV5FMQTaxSFhszidG44t" + "xaU4O0BOq4x//THCWguMxzO5RxW/V8wI/rkpvhAH1wljHTusnsAZea4PpstZ7+W7" + "43GME1DwjYdUK9HhqRNrDkiJLox4Tmegw9A7m4XLt4zu", + "C=GR, ST=\xce\x91\xcf\x84\xcf\x84\xce\xb9\xce\xba\xce\xae, " + "L=\xce\x91\xce\xb8\xce\xae\xce\xbd\xce\xb1, " + "O=\xcf\x80\xce\xb1\xcf\x81\xce\xac\xce\xb4\xce\xb5\xce\xb9\xce\xb3" + "\xce\xbc\xce\xb1, CN=www.example.com", + "2.5.4.6 2.5.4.8 2.5.4.7 2.5.4.10 2.5.4.3", + "C=AU, ST=Some-State, O=Internet Widgits Pty Ltd", + "2.5.4.6 2.5.4.8 2.5.4.10", + "2014-07-02T18:36:10.000000Z", + "2015-07-02T18:36:10.000000Z", + "www.example.com", + "b3b9789d8a53868f418619565f6b56af0033bdd3" }, + /* The issuer and subject (except for the country code) is + * UnversalString encoded. Created with a hacked version of openssl + * using utf8=yes and string_mask=MASK:256. In order for that to + * output UniversalString encoded data you need to change the + * DIRSTRING_TYPE in crypto/asn1/asn1.h to be defined as + * B_ASN1_DIRECTORYSTRING so that UnviersalString is available to be + * used in the DirectoryStrings. OpenSSL by default avoids + * this type (for the reasonable reason that it's wasteful and + * UTF-8 can encoded everything it can in the most efficient way). + * OU uses the mathematical monospace digits 0-9 to test characters + * outside of the range of the Basic Multilingual Plane */ + { "MIIEnzCCA4egAwIBAgIBATANBgkqhkiG9w0BAQUFADCBqzELMAkGA1UEBhMCQVUx" + "MTAvBgNVBAgcKAAAAFMAAABvAAAAbQAAAGUAAAAtAAAAUwAAAHQAAABhAAAAdAAA" + "AGUxaTBnBgNVBAocYAAAAEkAAABuAAAAdAAAAGUAAAByAAAAbgAAAGUAAAB0AAAA" + "IAAAAFcAAABpAAAAZAAAAGcAAABpAAAAdAAAAHMAAAAgAAAAUAAAAHQAAAB5AAAA" + "IAAAAEwAAAB0AAAAZDAeFw0xNDA3MjIyMjM3MzBaFw0xNTA3MjIyMjM3MzBaMIH8" + "MQswCQYDVQQGEwJHUjEhMB8GA1UECBwYAAADkQAAA8QAAAPEAAADuQAAA7oAAAOu" + "MR0wGwYDVQQHHBQAAAORAAADuAAAA64AAAO9AAADsTExMC8GA1UEChwoAAADwAAA" + "A7EAAAPBAAADrAAAA7QAAAO1AAADuQAAA7MAAAO8AAADsTExMC8GA1UECxwoAAHX" + "9gAB1/cAAdf4AAHX+QAB1/oAAdf7AAHX/AAB1/0AAdf+AAHX/zFFMEMGA1UEAxw8" + "AAAAdwAAAHcAAAB3AAAALgAAAGUAAAB4AAAAYQAAAG0AAABwAAAAbAAAAGUAAAAu" + "AAAAYwAAAG8AAABtMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAuYUb" + "aNt22rsR5Qc/2zsenSvrlbvv1CwwRPNxcWTKdLl4lJEUy5YCnQXIq3qTi+eAFetQ" + "MwUOZem6kgNdwmGvCz3lrLwOobd1D5mG9agzKLVUVj72csbNNFzHr8z/7oaHvYYs" + "eYxW3oRm6vDYtHw5spXrxTzRIAnG6foxXFYAtDDHQpdjsofxqXO67aUmmGvE5ffX" + "gD3dvTvjejzcjjVsLQP/HG4MQOqeIyvyyHg1E3dyOrG+3qR6RN1ZveROdvU38Udm" + "s0KSGVX2lDLsUTQSKg5L8CLWDHqgGQWjLZQRgRiKZId/f9ubaJdLN6KfAQ3UvYAP" + "bKL5/k2GpsPDE21X0QIDAQABo3sweTAJBgNVHRMEAjAAMCwGCWCGSAGG+EIBDQQf" + "Fh1PcGVuU1NMIEdlbmVyYXRlZCBDZXJ0aWZpY2F0ZTAdBgNVHQ4EFgQUccHhM6C7" + "nGMpclkG7YLIRuFueYQwHwYDVR0jBBgwFoAUz0X1b2Ok9MVVzxqxX6MgtTwSKmYw" + "DQYJKoZIhvcNAQEFBQADggEBAEpqEa08JkPG+XBlLemnoJsnoaRuQnLZvSCoAwIt" + "fugTE8686EigTZyYVFQ+GaI+EqVeiMjpAEhS3IMbhx5VIr61S3Nta2BG9OPjr4Xf" + "01oUeh4egL93CpIGNwu6M1SrQv2UVAKTwahxNmNuvx6Ojx5P2tne+KJtRUiwM3dE" + "of78/0NJD27OwjW0ruZAifF5CAR7mhy3NOMARpE2kqZk5695OF+QCahe00Y/9ulz" + "sCjgjpCUYv87OTbBGC5XGRd/ZopTRqtBVxpEHX/fux5/wqxBawrCuQsVw1Kfw0Ur" + "30aYWLsOsRwhiQkukjQfcMra1AHLujWaAHuLIDls1ozc8xo=", + "C=GR, ST=\xce\x91\xcf\x84\xcf\x84\xce\xb9\xce\xba\xce\xae, " + "L=\xce\x91\xce\xb8\xce\xae\xce\xbd\xce\xb1, " + "O=\xcf\x80\xce\xb1\xcf\x81\xce\xac\xce\xb4\xce\xb5\xce\xb9\xce\xb3" + "\xce\xbc\xce\xb1, " + "OU=\xf0\x9d\x9f\xb6\xf0\x9d\x9f\xb7\xf0\x9d\x9f\xb8\xf0\x9d\x9f\xb9" + "\xf0\x9d\x9f\xba\xf0\x9d\x9f\xbb\xf0\x9d\x9f\xbc\xf0\x9d\x9f\xbd" + "\xf0\x9d\x9f\xbe\xf0\x9d\x9f\xbf, " + "CN=www.example.com", + "2.5.4.6 2.5.4.8 2.5.4.7 2.5.4.10 2.5.4.11 2.5.4.3", + "C=AU, ST=Some-State, O=Internet Widgits Pty Ltd", + "2.5.4.6 2.5.4.8 2.5.4.10", + "2014-07-22T22:37:30.000000Z", + "2015-07-22T22:37:30.000000Z", + "www.example.com", + "cfa15310189cf89f1dadc9c989db46f287fff7a7" + }, + /* The issuer and subject (except for the country code) is BMPString + * encoded. Created with openssl using utf8-yes and string_mask=MASK:2048. + */ + { "MIID3zCCAsegAwIBAgIBATANBgkqhkiG9w0BAQUFADBnMQswCQYDVQQGEwJBVTEd" + "MBsGA1UECB4UAFMAbwBtAGUALQBTAHQAYQB0AGUxOTA3BgNVBAoeMABJAG4AdABl" + "AHIAbgBlAHQAIABXAGkAZABnAGkAdABzACAAUAB0AHkAIABMAHQAZDAeFw0xNDA3" + "MjIyMzAyMDlaFw0xNTA3MjIyMzAyMDlaMIGBMQswCQYDVQQGEwJHUjEVMBMGA1UE" + "CB4MA5EDxAPEA7kDugOuMRMwEQYDVQQHHgoDkQO4A64DvQOxMR0wGwYDVQQKHhQD" + "wAOxA8EDrAO0A7UDuQOzA7wDsTEnMCUGA1UEAx4eAHcAdwB3AC4AZQB4AGEAbQBw" + "AGwAZQAuAGMAbwBtMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAqzof" + "mf9YANAl2I5AcUjfAAJhqc2BL6z6k0J9bWyDL7DZf6AJtD5stRjs8cgiSGfJt9Cg" + "YQ0Cvnwz9ztNVXLliMmiJ4V0HzG80GI6SBK0PoCVbddUV/PN7REgPNjTwMYlys5w" + "Yt/GR8OJJV+eb02rpAfVigDlh7CFjY/uKMs2ThPi+yQb2V6qxLk3ZKIHh5IbKQjt" + "zIX/W1t+hiBjojnuOmhAoEefZ583k7amR5GBZO4GS5Qfj+4kjL5xiwB3bjTC8pnV" + "Iv4+mN2F6xKW/9IOWZtdySDADaU2ioyuMDzzjp5N5Nt0ZGhrEG2cDC3CatZaV4U7" + "9yBbi6kzlo3fCbCOlQIDAQABo3sweTAJBgNVHRMEAjAAMCwGCWCGSAGG+EIBDQQf" + "Fh1PcGVuU1NMIEdlbmVyYXRlZCBDZXJ0aWZpY2F0ZTAdBgNVHQ4EFgQUNvwKR1v/" + "R0FQU1WnzqT3brNxaQQwHwYDVR0jBBgwFoAUSM/JbJVWuYFp+awSOEXZcKn1ddQw" + "DQYJKoZIhvcNAQEFBQADggEBABna/SiYMBJvbnI+lj7j8ddSFihaFheqtouxOB2d" + "tiVz5mcc5KsAFlkrxt7YcYB7SEc+K28nqGb3bfbZ18JayRBY3JS/h4WGu4eL5XkX" + "rceWUy60zF7DHs6p8E8HZVF1CdCC/LXr2BAdYTc/y1f37bLKVFF4mMJMP4b8/nSL" + "z8+oOO9CxaEjzRoCawf2+jaajXTSTDXBgIx1t6bJMAS6S6RKPaCketyAmpsOZVBS" + "VtBVfVIOB2zFqs6iqkXtdiOXWlZ0DBQRX0G1VD5G80RlZXs0yEfufCwLUl/TyOhM" + "WisUSEOzd4RlbsBj30JQkVG9+jXb2KChPkiMpg0tFi8HU3s=", + "C=GR, ST=\xce\x91\xcf\x84\xcf\x84\xce\xb9\xce\xba\xce\xae, " + "L=\xce\x91\xce\xb8\xce\xae\xce\xbd\xce\xb1, " + "O=\xcf\x80\xce\xb1\xcf\x81\xce\xac\xce\xb4\xce\xb5\xce\xb9\xce\xb3" + "\xce\xbc\xce\xb1, CN=www.example.com", + "2.5.4.6 2.5.4.8 2.5.4.7 2.5.4.10 2.5.4.3", + "C=AU, ST=Some-State, O=Internet Widgits Pty Ltd", + "2.5.4.6 2.5.4.8 2.5.4.10", + "2014-07-22T23:02:09.000000Z", + "2015-07-22T23:02:09.000000Z", + "www.example.com", + "6e2cd969350979d3741b9abb66c71159a94ff971" + }, + /* The issuer and subject (except for the country code) is T61String + * (aka TeletexString) encoded. Created with openssl using utf8=yes + * and string_mask=MASK:4. Note that the example chosen specifically + * includes the Norwegian OE (slashed O) to highlight that this is + * being treated as ISO-8859-1 despite what the X.509 says. + * See the following for the horrible details on + * this encoding: https://www.cs.auckland.ac.nz/~pgut001/pubs/x509guide.txt + */ + { "MIIDnTCCAoWgAwIBAgIBATANBgkqhkiG9w0BAQUFADBFMQswCQYDVQQGEwJBVTET" + "MBEGA1UECBQKU29tZS1TdGF0ZTEhMB8GA1UEChQYSW50ZXJuZXQgV2lkZ2l0cyBQ" + "dHkgTHRkMB4XDTE0MDcyMjIzNDQxOFoXDTE1MDcyMjIzNDQxOFowYjELMAkGA1UE" + "BhMCTk8xGDAWBgNVBAgUD034cmUgb2cgUm9tc2RhbDEQMA4GA1UEBxQHxWxlc3Vu" + "ZDENMAsGA1UEChQEZPhtZTEYMBYGA1UEAxQPd3d3LmV4YW1wbGUuY29tMIIBIjAN" + "BgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAz8uD5f2KRXvB//mKOpCXM3h/MOjK" + "xUgC4TIHi3BmnYR0IDElMPJrC263/eU0hKycyegyMjXkwIN5eEx4/Nl///RrzJBQ" + "+uXKfEJ4hTJ5x1uUYxhmtq4djZFxfjFH5yobT/LRDkEw9b/+NiRb30P+WrxhrAKW" + "7GRsE2pIdPdbM2IB5v/wORB4TK0kLYkmeEPWNJd63SmX4BEC6dRAaMxLIXKn75r5" + "GhMHKbUdt2Yy+5s0JlN9hMWqhnavCmGquzl7y/1E1OOUIm0jhL0sJn6wVTc+UO+Q" + "7u/w0xf38J8SU7lW6zbcQyYaSIQCMikgpprUSXdQZZUZGmHS7Gis39SiLwIDAQAB" + "o3sweTAJBgNVHRMEAjAAMCwGCWCGSAGG+EIBDQQfFh1PcGVuU1NMIEdlbmVyYXRl" + "ZCBDZXJ0aWZpY2F0ZTAdBgNVHQ4EFgQUQa2QLy+4QUH8hKNdR2LcvDKYImcwHwYD" + "VR0jBBgwFoAUpX6YP04yWqNiziUM7h0KgrRHMF4wDQYJKoZIhvcNAQEFBQADggEB" + "AElYUTQp5MOQk+ykIV0MHTw9OsEvLc1ZDmChls5WKYAu6KWgBbcjcTlkTpDlydrO" + "6JFxvCCg0K13dYOI3K/O9icGRauIrxrJOTtaIMryj7F51C52TOVPzkjL05eZTh+q" + "MmP3KI3uYSpXI6D6RI6hOKIRnFiUOQuXW3I8Z7s03KScBc9PSsVrMBLBz/Vpklaf" + "Tv/3jVBVIZwCW67SnFQ+vqEzaM4Ns2TBodlVqB1w0enPpow8bNnUwElLQJx3GXnl" + "z0JTpA6AwIRCF8n+VJgNN218fo2t2vvDDW/cZ+XMXzGNVhAqQ1F8B36esxy3P8+o" + "Bcwx241dxeGSYFHerqrTJIU=", + "C=NO, ST=M\xc3\xb8re og Romsdal, L=\xc3\x85lesund, O=d\xc3\xb8me, " + "CN=www.example.com", + "2.5.4.6 2.5.4.8 2.5.4.7 2.5.4.10 2.5.4.3", + "C=AU, ST=Some-State, O=Internet Widgits Pty Ltd", + "2.5.4.6 2.5.4.8 2.5.4.10", + "2014-07-22T23:44:18.000000Z", + "2015-07-22T23:44:18.000000Z", + "www.example.com", + "787d1577ae77b79649d8f99cf4ed58a332dc48da" + }, + /* Certificate with several Subject Alt Name dNSNames. Note that + * the CommonName is not duplicated in the Subject Alt Name to + * test that the Common Name is excluded when Subject Alt Name + * exists. */ + { "MIIEMTCCAxmgAwIBAgIBATANBgkqhkiG9w0BAQUFADBjMQswCQYDVQQGEwJBVTET" + "MBEGA1UECBMKU29tZS1TdGF0ZTEhMB8GA1UEChMYSW50ZXJuZXQgV2lkZ2l0cyBQ" + "dHkgTHRkMRwwGgYDVQQDExNJbnRlcm5ldCBXaWRnaXRzIENBMB4XDTE0MDcyNTE3" + "NDEwNFoXDTE1MDcyNTE3NDEwNFowdDELMAkGA1UEBhMCVVMxEzARBgNVBAgTCldh" + "c2hpbmd0b24xEzARBgNVBAcTCk5vcnRoIEJlbmQxITAfBgNVBAoTGEludGVybmV0" + "IFdpZGdpdHMgUHR5IEx0ZDEYMBYGA1UEAxMPd3d3LmV4YW1wbGUuY29tMIIBIjAN" + "BgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAxlryoK6hMhGI/UlHi7v1m+Z3tCvg" + "ZG1twDFNvBACpFVbJtC/v+fiy1eG7ooZ1PsdCINQ1iXLh1igevlw/4w6iTDpeSZg" + "OCPYqK6ejnS0bKtSB4TuP8yiQtqwaVz4yPP88lXuQJDRJzgaAR0VAhooLgEpl1z1" + "n9wQO15AW5swzpKcEOi4n6Zmf1t7oxOt9awAOhkL1FfFwkpbiK9yQv3TPVo+xzbx" + "BJxwx55RY8Dpiu0kuiTYWsd02pocb0uIqd7a5B4y05PhJseqwyX0Mw57HBBnbru1" + "lCetP4PkoM2gf7Uoj9e61nmM1mustKTIPvh7tZHWW3UW9JxAFG+6FkKDewIDAQAB" + "o4HeMIHbMAkGA1UdEwQCMAAwLAYJYIZIAYb4QgENBB8WHU9wZW5TU0wgR2VuZXJh" + "dGVkIENlcnRpZmljYXRlMB0GA1UdDgQWBBQ4A9k8VwI0wv7u5rB4+1D9cuHiqTAf" + "BgNVHSMEGDAWgBS6O+MdRDDrD715AXdrnuNZ7wDSyjALBgNVHQ8EBAMCBeAwUwYD" + "VR0RBEwwSoINKi5leGFtcGxlLmNvbYIRKi5mb28uZXhhbXBsZS5jb22CESouYmFy" + "LmV4YW1wbGUuY29tghN6aWctemFnLmV4YW1wbGUuY29tMA0GCSqGSIb3DQEBBQUA" + "A4IBAQAf4IrSOL741IUkyFQrDdof39Cp87VdNEo4Bl8fUSuCjqZONxJfiAFx7GcB" + "Cd7h7Toe6CYCeQLHSEXQ1S1eWYLIq0ZoP3Q/huJdoH7yskDyC5Faexph0obKM5hj" + "+EYGW2W/UYBzEZai+eePBovARDlupiMaTJGvtdU/AcgMhXCoGNK6egesXoiNgfFh" + "h+lXUNWUWm2gZlKwRJff8tkR7bIG7MGzyL6Rqav2/tQdbFVXN5AFPdYPFLf0Vo5m" + "eGYM87TILfSo7n7Kh0aZovwcuF/vPUWRJl3B1HaPt9k6DhcFyAji0SJyZWyM4v88" + "GSq5Dk8dnTdL2otToll+r4IqFLlp", + "C=US, ST=Washington, L=North Bend, O=Internet Widgits Pty Ltd, " + "CN=www.example.com", + "2.5.4.6 2.5.4.8 2.5.4.7 2.5.4.10 2.5.4.3", + "C=AU, ST=Some-State, O=Internet Widgits Pty Ltd, CN=Internet Widgits CA", + "2.5.4.6 2.5.4.8 2.5.4.10 2.5.4.3", + "2014-07-25T17:41:04.000000Z", + "2015-07-25T17:41:04.000000Z", + "*.example.com, *.foo.example.com, *.bar.example.com, zig-zag.example.com", + "9c365d27b7b6cc438576a8e465685ea7a4f61129" + }, + /* This is a CA cert that has a Common Name that doesn't look like + * a hostname. Make sure that the hostnames field remains blank for it. */ + { "MIIEEjCCAvqgAwIBAgIJAKJarRWbvbCjMA0GCSqGSIb3DQEBBQUAMGMxCzAJBgNV" + "BAYTAkFVMRMwEQYDVQQIEwpTb21lLVN0YXRlMSEwHwYDVQQKExhJbnRlcm5ldCBX" + "aWRnaXRzIFB0eSBMdGQxHDAaBgNVBAMTE0ludGVybmV0IFdpZGdpdHMgQ0EwHhcN" + "MTQwNzI1MTc0MTAzWhcNMjQwNzIyMTc0MTAzWjBjMQswCQYDVQQGEwJBVTETMBEG" + "A1UECBMKU29tZS1TdGF0ZTEhMB8GA1UEChMYSW50ZXJuZXQgV2lkZ2l0cyBQdHkg" + "THRkMRwwGgYDVQQDExNJbnRlcm5ldCBXaWRnaXRzIENBMIIBIjANBgkqhkiG9w0B" + "AQEFAAOCAQ8AMIIBCgKCAQEAv0f0TAiE13WHaFv8j6M9uuniO40+Aj8cuhZtJ1GC" + "GI/mW56wq2BJrP6N4+jyxYbZ/13S3ypPu+N087Nc/4xaPtUD/eKqMlU+o8gHM/Lf" + "BEs2dUuBsvkNM0KoC04NPNTOYDnfHOrzx8iHhqlDedwmP8FeQn3rNS8k4qDyJpG3" + "Ay8ICz5mB07Cy6NISohTxMtatfW5yKmhnhiS92X42QAEgI1pGB7jJl1g3u+KY1Bf" + "/10kcramYSYIM1uB7XHQjZI4bhEhQwuIWePMOSCOykdmbemM3ijF9f531Olq+0Nz" + "t7lA1b/aW4PGGJsZ6uIIjKMaX4npP+HHUaNGVssgTnTehQIDAQABo4HIMIHFMB0G" + "A1UdDgQWBBS6O+MdRDDrD715AXdrnuNZ7wDSyjCBlQYDVR0jBIGNMIGKgBS6O+Md" + "RDDrD715AXdrnuNZ7wDSyqFnpGUwYzELMAkGA1UEBhMCQVUxEzARBgNVBAgTClNv" + "bWUtU3RhdGUxITAfBgNVBAoTGEludGVybmV0IFdpZGdpdHMgUHR5IEx0ZDEcMBoG" + "A1UEAxMTSW50ZXJuZXQgV2lkZ2l0cyBDQYIJAKJarRWbvbCjMAwGA1UdEwQFMAMB" + "Af8wDQYJKoZIhvcNAQEFBQADggEBAI442H8CpePFvOtdvcosu2N8juJrzACuayDI" + "Ze32EtHFN611azduqkWBgMJ3Fv74o0A7u5Gl8A7RZnfBTMX7cvpfHvWefau0xqgm" + "Mn8CcTUGel0qudCCMe+kPppmkgNaZFvawSqcAA/u2yni2yx8BakYYDZzyfmEf9dm" + "hZi5SmxFFba5UhNKOye0GKctT13s/7EgfFNyVhZA7hWU26Xm88QnGnN/qxJdpq+e" + "+Glctn9tyke4b1VZ2Yr+R4OktrId44ZQcRD44+88v5ThP8DQsvkXcjREMFAIPkvG" + "CEDOIem4l9KFfnsHn8/4KvoBRkmCkGaSwOwUdUG+jIjBpY/82kM=", + "C=AU, ST=Some-State, O=Internet Widgits Pty Ltd, CN=Internet Widgits CA", + "2.5.4.6 2.5.4.8 2.5.4.10 2.5.4.3", + "C=AU, ST=Some-State, O=Internet Widgits Pty Ltd, CN=Internet Widgits CA", + "2.5.4.6 2.5.4.8 2.5.4.10 2.5.4.3", + "2014-07-25T17:41:03.000000Z", + "2024-07-22T17:41:03.000000Z", + NULL, + "b9decce236aa1da07b2bf088160bffe1469b9a4a" + }, + /* Cert with a IP SAN entry. Make sure we properly skip them. */ + { "MIIENjCCAx6gAwIBAgIBATANBgkqhkiG9w0BAQUFADBjMQswCQYDVQQGEwJBVTET" + "MBEGA1UECBMKU29tZS1TdGF0ZTEhMB8GA1UEChMYSW50ZXJuZXQgV2lkZ2l0cyBQ" + "dHkgTHRkMRwwGgYDVQQDExNJbnRlcm5ldCBXaWRnaXRzIENBMB4XDTE0MDcyNTE4" + "NDMyOFoXDTE1MDcyNTE4NDMyOFowczELMAkGA1UEBhMCVVMxEzARBgNVBAgTCldh" + "c2hpbmd0b24xEzARBgNVBAcTCk5vcnRoIEJlbmQxITAfBgNVBAoTGEludGVybmV0" + "IFdpZGdpdHMgUHR5IEx0ZDEXMBUGA1UEAxMOaXAuZXhhbXBsZS5jb20wggEiMA0G" + "CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDXKkSxg89tu5/n+lIC8ajj1T9vsO5B" + "nRH5Sne7UPc6pGMTNFi1MOVjdDWkmuCUzoI+HKLDc69/4V5RU12N1QNgsgcOzCSo" + "qgxa+dQk2s1shz1zhyaHkpdeMZU3/p9D4v+nRGAdYifwl/VOTwjWWucNzHDBwvb6" + "+Wm4pXE94Y5p8fY/lZi7VgtxdoPdSHGkIAps8psZGPjqKpLEjnLMp1n0v9cZhBF6" + "OoMUZpQuwcjT8vMQppgIWhZFLiH2jn7FTYWZyB0Dh9nMd097NQA87VtVfNc+g0oY" + "qLe3YldJgvVfyeSLhnyv68fBfGcTj310pNrGeE/m4tyxupiUT8BitfxPAgMBAAGj" + "geQwgeEwCQYDVR0TBAIwADAsBglghkgBhvhCAQ0EHxYdT3BlblNTTCBHZW5lcmF0" + "ZWQgQ2VydGlmaWNhdGUwHQYDVR0OBBYEFI09JZlhKV44Z+I5d58V/ZDqQ7yZMB8G" + "A1UdIwQYMBaAFDjQVnIU9pQI1nM8jjmxYiicMTdGMAsGA1UdDwQEAwIF4DBZBgNV" + "HREEUjBQgg0qLmV4YW1wbGUuY29tghEqLmZvby5leGFtcGxlLmNvbYcEfwAAAYIR" + "Ki5iYXIuZXhhbXBsZS5jb22CE3ppZy16YWcuZXhhbXBsZS5jb20wDQYJKoZIhvcN" + "AQEFBQADggEBAEK+XIGwavf+5Ht44ifHrGog0CDr4ESg7wFjzk+BJwYDtIPp9b8A" + "EG8qbfmOS+2trG3zc74baf2rmrfn0YGZ/GV826NMTaf7YU1/tJQTo+RX9g3aHg6f" + "pUBfIyAV8ELq84sgwd1PIgleVgIiDrz+a0UZ05Z5S+GbR2pwNH6+fO0O5E9clt2a" + "Cute1UMBqAMGKiFaP8HD6SUFTdTKZNxHtQzYmmuvoC1nzVatMFdkTuQgSQ/uNlzg" + "+yUFoufMZhs3gPx9PfXGOQ7f3nKE+WCK4KNGv+OILYsk4zUjMznfAwBRs9PyITN2" + "BKe64WsF6ZxTq3zLVGy5I8LpbtlvSmAaBp4=", + "C=US, ST=Washington, L=North Bend, O=Internet Widgits Pty Ltd, " + "CN=ip.example.com", + "2.5.4.6 2.5.4.8 2.5.4.7 2.5.4.10 2.5.4.3", + "C=AU, ST=Some-State, O=Internet Widgits Pty Ltd, CN=Internet Widgits CA", + "2.5.4.6 2.5.4.8 2.5.4.10 2.5.4.3", + "2014-07-25T18:43:28.000000Z", + "2015-07-25T18:43:28.000000Z", + "*.example.com, *.foo.example.com, *.bar.example.com, zig-zag.example.com", + "3525fb617c232fdc738d736c1cbd5d97b19b51e4" + }, + /* Cert with the signature algorithm OID set to sha1WithRSA instead of + * sha1WithRSAEncryption. Both have the same meaning but the sha1WithRSA + * doesn't seem to be used anymore and is shorter */ + { "MIIDgDCCAmygAwIBAgIBATAJBgUrDgMCHQUAMEUxCzAJBgNVBAYTAkFVMRMwEQYD" + "VQQIFApTb21lLVN0YXRlMSEwHwYDVQQKFBhJbnRlcm5ldCBXaWRnaXRzIFB0eSBM" + "dGQwHhcNMTQwODE4MDk1OTQ1WhcNMTUwODE4MDk1OTQ1WjBNMQswCQYDVQQGEwJV" + "SzEQMA4GA1UECBQHRW5nbGFuZDESMBAGA1UEBxQJU2hlZmZpZWxkMRgwFgYDVQQD" + "FA93d3cuZXhhbXBsZS5jb20wggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIB" + "AQCkvtieKg33RSzhn5JMDPPRlDS8Q16CN96A4lLI9YrJCy33z46PrbR2mq2hOz5l" + "MdgbAaRF0MUGhcKv4msJ0bsWhkybaSBAVgnoC7ObQWPNF7ppMzUjeDAlUBXNfheR" + "ZcgcgGWqUkoB1uUMhvmVuPrzvxn+WCwyoP6zQCviYLsR8AygGQgdhV6c9wJ/x9HS" + "MRUvUOeo7SCmx9GK5Hc11QV2K3rwKXABeAxXNzbyQe7hFfQYCI2SB5s3bEnhIvg7" + "BG0BQmoprHjXWBftc0+msKQTFw7+jZ21NsfwGoPonuVsCOJjJ51jp2oKqk3b1GGc" + "DEmmMQ0JtqfHO5a7JACBaHbTAgMBAAGjezB5MAkGA1UdEwQCMAAwLAYJYIZIAYb4" + "QgENBB8WHU9wZW5TU0wgR2VuZXJhdGVkIENlcnRpZmljYXRlMB0GA1UdDgQWBBSo" + "jICtcIgZL6OCCB5BJ5PGf1UIyTAfBgNVHSMEGDAWgBT5KQMLMylrXSQvhMtONHZc" + "22Jm9TAJBgUrDgMCHQUAA4IBAQCvCJ4i2kRzSRhnlDxd0UbQtytVIJFFJlfREPTM" + "j8+VqqtCVyPSX8T5NU+HCiEmhVrTlm/W0i8ygJXr8izyIMGRqbyhn2M9b8hAY6Jl" + "0edztu/FV/YHsJbPznWkXWpMMaXDEX4wI329f5odccIbB5VSaaoAdKZ6Ne4nf6oV" + "95KRFWkXoYjm24TnpALsNnK1Kjjed6h5ApB+IANOpXYFbGcsfbuKhWbFd2nd6t5U" + "NpUcv4H9Tgdl6KgrfsbQtAeouWCgoiNzrul8FOaQTdJLZfCsjuE+IkGpM+DX8PiF" + "5M41EqkSKia8sChFIln+lkRY41OWP9uQ1VXCfdRIzOnXWh9U", + "C=UK, ST=England, L=Sheffield, CN=www.example.com", + "2.5.4.6 2.5.4.8 2.5.4.7 2.5.4.3", + "C=AU, ST=Some-State, O=Internet Widgits Pty Ltd", + "2.5.4.6 2.5.4.8 2.5.4.10", + "2014-08-18T09:59:45.000000Z", + "2015-08-18T09:59:45.000000Z", + "www.example.com", + "0e0869961d508b13bb22aa8da675b2e9951c0e70" + }, + /* X.509 v1 certificate, we used to crash on these prior to r1619861. */ + { "MIIDDTCCAfUCAQEwDQYJKoZIhvcNAQEFBQAwRTELMAkGA1UEBhMCQVUxEzARBgNV" + "BAgTClNvbWUtU3RhdGUxITAfBgNVBAoTGEludGVybmV0IFdpZGdpdHMgUHR5IEx0" + "ZDAeFw0xNTAxMTkyMjEyNDhaFw0xNjAxMTkyMjEyNDhaMFQxCzAJBgNVBAYTAlVT" + "MRMwEQYDVQQIEwpXYXNoaW5ndG9uMRMwEQYDVQQHEwpOb3J0aCBCZW5kMRswGQYD" + "VQQDExJ4NTA5djEuZXhhbXBsZS5jb20wggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAw" + "ggEKAoIBAQDniW3DmGGtA0MoYqE9H55/RmjtTJD2WVmM/STEsw+RW74UGsZ62qfi" + "ADedl4ukZYKlk3TwJrGEwDBKOMWHuzCYVxhclyHkHwX7QqamvZRgaOonEu82KHuE" + "dZo4FhOWDC9D0yS4RFbfqvSu/JG19FYsnRQn1RPFYji6jG9TRwavplVBiMhR68kc" + "8HTW1Wu7uJ5SV0UtTicFes8MGek3+zWceGt+Egwd2UlIYXwTPzB5m7UPuufEdvFL" + "ED3pusVatohFzjCbYsuJIR5ppYd49uTxPWGvRidJ2C8GbDf9PCgDduS0Gz91Txnw" + "h+WiVYCQ6SxAJWp/xeZWE71k88N0vJEzAgMBAAEwDQYJKoZIhvcNAQEFBQADggEB" + "ABoBaObsHnIrkd3RvvGb5q7fnEfiT1DXsufS3ypf4Z8IST/z+NeaUaiRN1oLcvDz" + "qC7ygTYZ2BZoEw3ReCGqQWT4iYET+lH8DM+U5val3gVlSWqx1jj/wiV1OAxQsakM" + "BnmNs/MDshiv54irvSlqnxEp2o/BU/vMrN656C5DJkZpYoMpIWxdFnd+bzNzuN1k" + "pJfTjzWlGckKfdblNPOfdtccTqtQ5d4mWtYNJ8DfL5rRRwCuzXvZtbVHKxqkXaXr" + "CYUfFUobapgPfvvMc1QcDY+2nvhC2ij+HAPIHgZPuzJsjZRC1zwg074cfgjZbgbm" + "R0HVF486p3vS8HFv4lndRZA=", + "C=US, ST=Washington, L=North Bend, CN=x509v1.example.com", + "2.5.4.6 2.5.4.8 2.5.4.7 2.5.4.3", + "C=AU, ST=Some-State, O=Internet Widgits Pty Ltd", + "2.5.4.6 2.5.4.8 2.5.4.10", + "2015-01-19T22:12:48.000000Z", + "2016-01-19T22:12:48.000000Z", + "x509v1.example.com", + "5730dd65a7f77fdf0dfd90e5a53119f38854af29" + }, + /* X.509 v1 certificate with an X.509 v3 Subject Alternative Name + * extension. Although these are ill-formed per RFC 5280 s. 4.1, we + * suspect that they could exist in the real world. Make sure we do + * not error out, and that we pick up SAN (b.example.com) from the + * extension. */ + { "MIIDLzCCAhcCAQ8wDQYJKoZIhvcNAQEFBQAwKzEpMCcGA1UEAwwgSW50ZXJuZXQg" + "V2lkZ2l0cyBJbnRlcm1lZGlhdGUgQ0EwHhcNMTUwMTI5MDAzMzU1WhcNMTYwMTI5" + "MDAzMzU1WjByMQswCQYDVQQGEwJVUzETMBEGA1UECAwKV2FzaGluZ3RvbjETMBEG" + "A1UEBwwKTm9ydGggQmVuZDEhMB8GA1UECgwYSW50ZXJuZXQgV2lkZ2l0cyBQdHkg" + "THRkMRYwFAYDVQQDDA1hLmV4YW1wbGUuY29tMIIBIjANBgkqhkiG9w0BAQEFAAOC" + "AQ8AMIIBCgKCAQEAs0hj2xPRQZpecqk0Ih1l4juAuQZeSgv3yD/VtSq/9sTBH6iA" + "4XjJQcHROYxYaK0QS/qlCjpl+Q3mOaVIu+59TLy3T2YVgqMYmgB453ntuJPkdF1C" + "fJ2j19YAQZHHdOFaP1G+auBwjmHns3+MkG4s7EPuJP7TBCcSFlOmz5D4GUui3NVG" + "LBYUog1ZhF4oe/7d4jc2Cn8uypNT/Hc1ViIlCT4rFoAirv9Uob+4zjQ3Z18I1Ql1" + "t8oszVCj3kKDboEty2RduwPLx/2ztWYBCvFhd49JGdi/nzMi+j2d5HCI3V8W06pN" + "mvrVU4G0ImVRa8wpmQCSm2Tp0s42FAVHWw8yMwIDAQABoxwwGjAYBgNVHREEETAP" + "gg1iLmV4YW1wbGUuY29tMA0GCSqGSIb3DQEBBQUAA4IBAQDI/n0NYakuRP/485/A" + "dan71qBy3sljjOreq71IfBdtq+GEjCL1B0TD0V338LXki9NicCLeD/MWfceDjV0u" + "AjPTxaZEn/NWqXo0mpNC535Y6G46mIHYDGC8JyvCJjaXF+GVstNt6lXzZp2Yn3Si" + "K57uVb+zz5zAGSO982I2HACZPnF/oAtp7bwxzwvBsLqSLw3hh0ATVPp6ktE+WMoI" + "X75CVcDmU0zjXqzKiFPKeTVjQG6YxgvplMaag/iNngkgEhX4PIrxdIEsHf8l9ogC" + "dz51MFxetsC4D2KRq8IblF9i+9r3hlv+Dbf9ovYe9Hu0usloSinImoWOw42iWWmP" + "vT4l", + "C=US, ST=Washington, L=North Bend, O=Internet Widgits Pty Ltd, " + "CN=a.example.com", + "2.5.4.6 2.5.4.8 2.5.4.7 2.5.4.10 2.5.4.3", + "CN=Internet Widgits Intermediate CA", + "2.5.4.3", + "2015-01-29T00:33:55.000000Z", + "2016-01-29T00:33:55.000000Z", + "b.example.com", + "47fa5c76fee6e21e37def6da3746bba84a5a09bf" + }, + /* X.509 certificate with multiple Relative Distinguished Names + * Borrowed form the Chromium test suite see thier bug here + * https://code.google.com/p/chromium/issues/detail?id=101009 + */ + { "MIICsDCCAhmgAwIBAgIJAO9sL1fZ/VoPMA0GCSqGSIb3DQEBBQUAMHExbzAJBgNV" + "BAYTAlVTMA8GA1UECgwIQ2hyb21pdW0wFgYKCZImiZPyLGQBGRYIQ2hyb21pdW0w" + "GgYDVQQDDBNNdWx0aXZhbHVlIFJETiBUZXN0MB0GA1UECwwWQ2hyb21pdW0gbmV0" + "X3VuaXR0ZXN0czAeFw0xMTEyMDIwMzQ3MzlaFw0xMjAxMDEwMzQ3MzlaMHExbzAJ" + "BgNVBAYTAlVTMA8GA1UECgwIQ2hyb21pdW0wFgYKCZImiZPyLGQBGRYIQ2hyb21p" + "dW0wGgYDVQQDDBNNdWx0aXZhbHVlIFJETiBUZXN0MB0GA1UECwwWQ2hyb21pdW0g" + "bmV0X3VuaXR0ZXN0czCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEAnSMQ7YeC" + "sOuk+0n128F7TfDtG/X48sG10oTe65SC8N6LBLfo7YYiQZlWVHEzjsFpaiv0dx4k" + "cIFbVghXAky/r5qgM1XiAGuzzFw7R27cBTC9DPlRwHArP3CiEKO3iz8i+qu9x0il" + "/9N70LcSSAu/kGLxikDbHRoM9d2SKhy2LGsCAwEAAaNQME4wHQYDVR0OBBYEFI1e" + "cfoqc7qfjmMyHF2rh9CrR6u3MB8GA1UdIwQYMBaAFI1ecfoqc7qfjmMyHF2rh9Cr" + "R6u3MAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQEFBQADgYEAGKwN01A47nxVHOkw" + "wFdbT8t9FFkY3pIg5meoqO3aATNaSEzkZoUljWtWgWfzr+n4ElwZBxeYv9cPurVk" + "a+wXygzWzsOzCUMKBI/aS8ijRervyvh6LpGojPGn1HttnXNLmhy+BLECs7cq6f0Z" + "hvImrEWhD5uZGlOxaZk+bFEjQHA=", + "C=US, O=Chromium, 0.9.2342.19200300.100.1.25=Chromium, " + "CN=Multivalue RDN Test, OU=Chromium net_unittests", + "2.5.4.6 2.5.4.10 0.9.2342.19200300.100.1.25 2.5.4.3 2.5.4.11", + "C=US, O=Chromium, 0.9.2342.19200300.100.1.25=Chromium, " + "CN=Multivalue RDN Test, OU=Chromium net_unittests", + "2.5.4.6 2.5.4.10 0.9.2342.19200300.100.1.25 2.5.4.3 2.5.4.11", + "2011-12-02T03:47:39.000000Z", + "2012-01-01T03:47:39.000000Z", + NULL, + "99302ca2824f585a117bb41302a388daa0519765" + }, + /* certificate with subject that includes an attribute that has an + * object id that has leading zeros. This isn't technically legal + * but a simplistic parser might parser it the same as an object + * id that doesn't have a leading zero. In this case the object id + * with a leading zero could parse to the same object id as the + * Common Name. Make sure we don't treat it as such. */ + { "MIIDDjCCAfYCAQEwDQYJKoZIhvcNAQEFBQAwRTELMAkGA1UEBhMCQVUxEzARBgNV" + "BAgTClNvbWUtU3RhdGUxITAfBgNVBAoTGEludGVybmV0IFdpZGdpdHMgUHR5IEx0" + "ZDAeFw0xNTAxMjcwNzQ5MDhaFw0xNjAxMjcwNzQ5MDhaMFUxCzAJBgNVBAYTAlVT" + "MRMwEQYDVQQIEwpXYXNoaW5ndG9uMRMwEQYDVQQHEwpOb3J0aCBCZW5kMRwwGgYE" + "VQSAAxMSbm90YWNuLmV4YW1wbGUuY29tMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8A" + "MIIBCgKCAQEAvXCJv0gr9d3GNYiukPrbse0FdXmuBx2mPf665WyZVHk9JiPnDcb2" + "ng8gHLgJe8izou6I0vN2iJgy91rUPvX9zA3qVhml+cboVY2jHCPWo/v5PQsXAgLV" + "5gVjp2POn3N0O1xcS1yNe249LkP0Di3kAMp5gkzdprm3fD3JDW1Q+ocQylnbjzG0" + "FtNQSUJLITvPXjR7ny46Fci2mv8scHOvlEXTK5/2RoBaoK2jWQimqGfFj1sr1vqZ" + "Wcb6NAdZso64Xg1V6CWX8zymlA7gAhTQWveq+ovUWcXpmR8aj9pYNuy0aZW3BANz" + "N6L0G7OZiVUvvzpfnn0V3Z/sR/iQs7q3nQIDAQABMA0GCSqGSIb3DQEBBQUAA4IB" + "AQACZwruCiesCRkT08AtHl0WQnQui58e9/7En+iqxNQO6+fx84SfWGcUFYZtvzdO" + "KkHNTs06km+471OjLSDcotRkdqO1JxQCkNxbrPat7T6FrO9n2JFivx6eijRqK/jB" + "cBYW92dK4BfXU4+FyeB2OIpyPjuqLU2j7S5p7qNU50i/1J7Qt669nXeaPINIfZdW" + "sDjjWkFR1VOgXS/zeu/GOxlQFmmcde+X/qkFI+L352VX7Ktf95j4ms4vG2yZgNfe" + "jbNb9a7LMcqlop/PlX5WBGv8GGKUNZO0LvukFYOULf1oL8VQsN0x/gRHGC7m9kVM" + "3hojWZDXAY4mYqdBCRX7/gkt", + "C=US, ST=Washington, L=North Bend, 2.5.4.03=notacn.example.com", + "2.5.4.6 2.5.4.8 2.5.4.7 2.5.4.03", + "C=AU, ST=Some-State, O=Internet Widgits Pty Ltd", + "2.5.4.6 2.5.4.8 2.5.4.10", + "2015-01-27T07:49:08.000000Z", + "2016-01-27T07:49:08.000000Z", + NULL, + "6f24b834ba00fb4ef863df63b8fbeddab25e4838" + }, + /* certificate with subject that includes an attribute that has an + * object id that has an overflow such that it calculates to + * the same object id as the Common Name (2.5.4.3). OpenSSL + * with its bignum support shows this as 2.5.4.2361183241434822606851. + * It would be wrong to display this as a Common Name to the user. */ + { "MIIDGTCCAgECAQEwDQYJKoZIhvcNAQEFBQAwRTELMAkGA1UEBhMCQVUxEzARBgNV" + "BAgTClNvbWUtU3RhdGUxITAfBgNVBAoTGEludGVybmV0IFdpZGdpdHMgUHR5IEx0" + "ZDAeFw0xNTAxMjcwODMxNDNaFw0xNjAxMjcwODMxNDNaMGAxCzAJBgNVBAYTAlVT" + "MRMwEQYDVQQIEwpXYXNoaW5ndG9uMRMwEQYDVQQHEwpOb3J0aCBCZW5kMScwJQYN" + "VQSCgICAgICAgICAAxMUb3ZlcmZsb3cuZXhhbXBsZS5jb20wggEiMA0GCSqGSIb3" + "DQEBAQUAA4IBDwAwggEKAoIBAQDHL1e8zSPyRND3tI42Vqca2FoCiWn881Czv2ct" + "tGFwyjUM8R1yHXEP+doS9KN9L29xRWZRxyCQ18S+QbjNQCh6Ay22qnkBu0uPdVB6" + "iIVKiW9RzU8dZSFMnveUZYLloG12kK++ooJGIstTJwkI8Naw1X1D29gZaY9oSKAc" + "Gs5c92po61RoetB744dUfUbAXi8eEd4ShdsdnCoswpEI4WTLdYLZ/cH/sU1a5Djm" + "cAfEBzZSOseEQSG7Fa/HvHyW+jDNnKG2r73M45TDcXAunSFcAYl1ioBaRwwdcTbK" + "SMGORThIX5UwpJDZI5sTVmTTRuCjbMxXXki/g9fTYD6mlaavAgMBAAEwDQYJKoZI" + "hvcNAQEFBQADggEBABvZSzFniMK4lqJcubzzk410NqZQEDBxdNZTNGrQYIDV8fDU" + "LLoQ2/2Y6kOQbx8r3RNcaJ6JtJeVqAq05It9oR5lMJFA2r0YMl4eB2V6o35+eaKY" + "FXrJzwx0rki2mX+iKsgRbJTv6mFb4I7vny404WKHNgYIfB8Z5jgbwWgrXH9M6BMb" + "FL9gZHMmU+6uqvCPYeIIZaAjT4J4E9322gpcumI9KGVApmbQhi5lC1hBh+eUprG7" + "4Brl9GeCLSTnTTf4GHIpqaUsKMtJ1sN/KJGwEB7Z4aszr80P5/sjHXOyqJ78tx46" + "pwH7/Fx0pM7nZjJVGvcxGBBOMeKy/o2QUVvEYPU=", + "C=US, ST=Washington, L=North Bend, \?\?=overflow.example.com", + "2.5.4.6 2.5.4.8 2.5.4.7 \?\?", + "C=AU, ST=Some-State, O=Internet Widgits Pty Ltd", + "2.5.4.6 2.5.4.8 2.5.4.10", + "2015-01-27T08:31:43.000000Z", + "2016-01-27T08:31:43.000000Z", + NULL, + "c1f063daf23e402fe58bab1a3fa2ba05c1106158" + }, + /* certificate with multiple common names, make sure this behaves + * the same way as serf. */ + { "MIIDJjCCAg4CAQEwDQYJKoZIhvcNAQEFBQAwRTELMAkGA1UEBhMCQVUxEzARBgNV" + "BAgTClNvbWUtU3RhdGUxITAfBgNVBAoTGEludGVybmV0IFdpZGdpdHMgUHR5IEx0" + "ZDAeFw0xNTAxMjExNzUwMDZaFw0xNjAxMjExNzUwMDZaMG0xCzAJBgNVBAYTAlVT" + "MRMwEQYDVQQIEwpXYXNoaW5ndG9uMRMwEQYDVQQHEwpOb3J0aCBCZW5kMRkwFwYD" + "VQQDExBnb29kLmV4YW1wbGUuY29tMRkwFwYDVQQDExBldmlsLmV4YW1wbGUuY29t" + "MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA5pfrXkiiDGCWSYhMQNHJ" + "gNBLEBNcFzsGpW8i6rMKVephwG7p4VqIvc0pSsmpD9IYuIxxq/2E2cziaTWyqCBp" + "hKKipqt8eMcu6u45LduHGiCcnN7rHORbQZTdvwzTmiVN1eI1oCVejB4zgHNkHUko" + "DyaALCHGRz8l7Qq6hSbiOnhH1qlscIIEsgQEyDlMZpbsWVTQKPxluhtgqVEn7wPN" + "qScrf2evq050NuNYYFzCmuqOGKq2gKbD/BlUqCNmEM2JPg/bdcAQxFCf0HcvDiS9" + "e29suMKWZAzJkbzrWhlDMG1Xt5c7dd82PcGwnL//Q7muE57luCw38Gp2vQQ3/Uki" + "vQIDAQABMA0GCSqGSIb3DQEBBQUAA4IBAQBry9wfxYia/dCSKvDXOBKUgWFQtI8j" + "7vYHuouTvIb5m6b62kiUdtuaVKi3jnUbHUFohOi/6o+HIwbXSgz5CbiLjgUvONBU" + "BLekaguIYX9tTmg+vhWchcmVMHufj6HdQkzWtyojSQD9GjHGInNDG102KlN1cdL8" + "jGTrru4vnef+xA24EvYPdcS2+H2yYH0THL3JPKo1GtO4NCEGWQbS6Ygwcy+BQpbU" + "TBIWhlbleuCalB8qhWyijcHeszT7mFR0CarEaSLeZj6FaQpZB636iHuELmxcgiFw" + "j3r3QZyAMEGvPPBPKYSTgmol31pX9LYvuFGA9ADQ2in/n9WdMfYzFzOn", + "C=US, ST=Washington, L=North Bend, " + "CN=good.example.com, CN=evil.example.com", + "2.5.4.6 2.5.4.8 2.5.4.7 2.5.4.3 2.5.4.3", + "C=AU, ST=Some-State, O=Internet Widgits Pty Ltd", + "2.5.4.6 2.5.4.8 2.5.4.10", + "2015-01-21T17:50:06.000000Z", + "2016-01-21T17:50:06.000000Z", + "good.example.com", + "9693f17e59205f41ca2e14450d151b945651b2d7" + }, + { NULL } +}; + +static svn_error_t * +compare_dates(const char *expected, + apr_time_t actual, + const char *type, + const char *subject, + apr_pool_t *pool) +{ + apr_time_t expected_tm; + + if (!actual) + return svn_error_createf(SVN_ERR_TEST_FAILED, NULL, + "No %s for cert '%s'", type, subject); + + SVN_ERR(svn_time_from_cstring(&expected_tm, expected, pool)); + if (!expected_tm) + return svn_error_createf(SVN_ERR_TEST_FAILED, NULL, + "Problem converting expected %s '%s' to text " + "output for cert '%s'", type, expected, + subject); + + if (expected_tm != actual) + return svn_error_createf(SVN_ERR_TEST_FAILED, NULL, + "The %s didn't match expected '%s'," + " got '%s' for cert '%s'", + type, expected, + svn_time_to_cstring(actual, pool), + subject); + + return SVN_NO_ERROR; +} + +static svn_error_t * +compare_hostnames(const char *expected, + const apr_array_header_t *actual, + const char *subject, + apr_pool_t *pool) +{ + + int i; + svn_stringbuf_t *buf; + + if (!actual) + { + if (expected) + return svn_error_createf(SVN_ERR_TEST_FAILED, NULL, + "The hostnames didn't match expected '%s'," + " got NULL for cert '%s'", + expected, subject); + return SVN_NO_ERROR; + } + + buf = svn_stringbuf_create_empty(pool); + for (i = 0; i < actual->nelts; ++i) + { + const char *hostname = APR_ARRAY_IDX(actual, i, const char*); + if (i > 0) + svn_stringbuf_appendbytes(buf, ", ", 2); + svn_stringbuf_appendbytes(buf, hostname, strlen(hostname)); + } + + if (strcmp(expected, buf->data)) + return svn_error_createf(SVN_ERR_TEST_FAILED, NULL, + "The hostnames didn't match expected '%s'," + " got '%s' for cert '%s'", + expected, buf->data, subject); + return SVN_NO_ERROR; +} + +static svn_error_t * +compare_oids(const char *expected, + const apr_array_header_t *actual, + const char *subject, + apr_pool_t *pool) +{ + int i; + svn_stringbuf_t *buf; + + if (!actual) + { + if (expected) + return svn_error_createf(SVN_ERR_TEST_FAILED, NULL, + "The oids didn't match expected '%s'," + " got NULL for cert '%s'", + expected, subject); + return SVN_NO_ERROR; + } + + buf = svn_stringbuf_create_empty(pool); + for (i = 0; i < actual->nelts; ++i) + { + apr_size_t len; + const svn_x509_name_attr_t *attr = APR_ARRAY_IDX(actual, i, const svn_x509_name_attr_t *); + const void *oid = svn_x509_name_attr_get_oid(attr, &len); + const char *oid_string = svn_x509_oid_to_string(oid, len, pool, pool); + if (i > 0) + svn_stringbuf_appendbyte(buf, ' '); + if (oid_string) + svn_stringbuf_appendcstr(buf, oid_string); + else + svn_stringbuf_appendcstr(buf, "??"); + } + + if (strcmp(expected, buf->data)) + return svn_error_createf(SVN_ERR_TEST_FAILED, NULL, + "The oids didn't match expected '%s'," + " got '%s' for cert '%s'", + expected, buf->data, subject); + return SVN_NO_ERROR; + +} + + +static svn_error_t * +compare_results(struct x509_test *xt, + svn_x509_certinfo_t *certinfo, + apr_pool_t *pool) +{ + const char *v; + + v = svn_x509_certinfo_get_subject(certinfo, pool); + if (!v) + return svn_error_createf(SVN_ERR_TEST_FAILED, NULL, + "No subject for cert '%s'", xt->subject); + if (strcmp(v, xt->subject)) + return svn_error_createf(SVN_ERR_TEST_FAILED, NULL, + "Subject didn't match for cert '%s', " + "expected '%s', got '%s'", xt->subject, + xt->subject, v); + + SVN_ERR(compare_oids(xt->subject_oids, svn_x509_certinfo_get_subject_attrs(certinfo), + xt->subject, pool)); + + v = svn_x509_certinfo_get_issuer(certinfo, pool); + if (!v) + return svn_error_createf(SVN_ERR_TEST_FAILED, NULL, + "No issuer for cert '%s'", xt->subject); + if (strcmp(v, xt->issuer)) + return svn_error_createf(SVN_ERR_TEST_FAILED, NULL, + "Issuer didn't match for cert '%s', " + "expected '%s', got '%s'", xt->subject, + xt->issuer, v); + + SVN_ERR(compare_oids(xt->issuer_oids, svn_x509_certinfo_get_issuer_attrs(certinfo), + xt->subject, pool)); + + SVN_ERR(compare_dates(xt->valid_from, + svn_x509_certinfo_get_valid_from(certinfo), + "valid-from", + xt->subject, + pool)); + + SVN_ERR(compare_dates(xt->valid_to, + svn_x509_certinfo_get_valid_to(certinfo), + "valid-to", + xt->subject, + pool)); + + SVN_ERR(compare_hostnames(xt->hostnames, + svn_x509_certinfo_get_hostnames(certinfo), + xt->subject, + pool)); + + v = svn_checksum_to_cstring_display( + svn_x509_certinfo_get_digest(certinfo), pool); + if (!v) + return svn_error_createf(SVN_ERR_TEST_FAILED, NULL, + "No SHA1 digest for cert '%s'", xt->subject); + if (strcmp(v, xt->sha1_digest)) + return svn_error_createf(SVN_ERR_TEST_FAILED, NULL, + "SHA1 digest didn't match for cert '%s', " + "expected '%s', got '%s'", xt->subject, + xt->sha1_digest, v); + + return SVN_NO_ERROR; +} + +static svn_error_t * +test_x509_parse_cert(apr_pool_t *pool) +{ + struct x509_test *xt; + apr_pool_t *iterpool = svn_pool_create(pool); + + for (xt = cert_tests; xt->base64_cert; xt++) + { + const svn_string_t *der_cert; + svn_x509_certinfo_t *certinfo; + + svn_pool_clear(iterpool); + + /* Convert header-less PEM to DER by undoing base64 encoding. */ + der_cert = svn_base64_decode_string(svn_string_create(xt->base64_cert, + pool), + iterpool); + + SVN_ERR(svn_x509_parse_cert(&certinfo, der_cert->data, der_cert->len, + iterpool, iterpool)); + + SVN_ERR(compare_results(xt, certinfo, iterpool)); + } + + return SVN_NO_ERROR; +} + +#if 0 +static struct x509_test broken_cert_tests[] = { + { NULL } +}; + +static svn_error_t * +test_x509_parse_cert_broken(apr_pool_t *pool) +{ + struct x509_test *xt; + apr_pool_t *iterpool = svn_pool_create(pool); + + for (xt = broken_cert_tests; xt->base64_cert; xt++) + { + const svn_string_t *der_cert; + svn_x509_certinfo_t *certinfo; + + svn_pool_clear(iterpool); + + /* Convert header-less PEM to DER by undoing base64 encoding. */ + der_cert = svn_base64_decode_string(svn_string_create(xt->base64_cert, + pool), + iterpool); + + SVN_ERR(svn_x509_parse_cert(&certinfo, der_cert->data, der_cert->len, + iterpool, iterpool)); + + SVN_ERR(compare_results(xt, certinfo, iterpool)); + } + + return SVN_NO_ERROR; +} +#endif + +/* The test table. */ + +static int max_threads = 1; + +static struct svn_test_descriptor_t test_funcs[] = + { + SVN_TEST_NULL, + SVN_TEST_PASS2(test_x509_parse_cert, + "test svn_x509_parse_cert"), +/* SVN_TEST_XFAIL2(test_x509_parse_cert_broken, + "test broken certs"), */ + SVN_TEST_NULL + }; + +SVN_TEST_MAIN diff --git a/subversion/tests/libsvn_wc/conflict-data-test.c b/subversion/tests/libsvn_wc/conflict-data-test.c index 97d22ea..7d89825 100644 --- a/subversion/tests/libsvn_wc/conflict-data-test.c +++ b/subversion/tests/libsvn_wc/conflict-data-test.c @@ -134,7 +134,7 @@ compare_file_content(const char *file_abspath, * conflict, or are both NULL. Return an error if not. * * Compare the property values found in files named by - * ACTUAL->base_abspath, ACTUAL->my_abspath, ACTUAL->merged_abspath + * ACTUAL->base_abspath, ACTUAL->my_abspath, ACTUAL->merged_file * with EXPECTED_BASE_VAL, EXPECTED_MY_VAL, EXPECTED_THEIR_VAL * respectively, ignoring the corresponding fields in EXPECTED. */ static svn_error_t * @@ -236,8 +236,8 @@ test_deserialize_tree_conflict(apr_pool_t *pool) SVN_ERR(svn_wc__deserialize_conflict(&conflict, skel, "", pool, pool)); if ((conflict->node_kind != exp_conflict->node_kind) || - (conflict->action != exp_conflict->action) || - (conflict->reason != exp_conflict->reason) || + (conflict->action != exp_conflict->action) || + (conflict->reason != exp_conflict->reason) || (conflict->operation != exp_conflict->operation) || (strcmp(conflict->local_abspath, exp_conflict->local_abspath) != 0)) return fail(pool, "Unexpected tree conflict"); @@ -289,10 +289,12 @@ test_read_write_tree_conflicts(const svn_test_opts_t *opts, SVN_ERR(svn_test__sandbox_create(&sbox, "read_write_tree_conflicts", opts, pool)); parent_abspath = svn_dirent_join(sbox.wc_abspath, "A", pool); - SVN_ERR(svn_wc__db_op_add_directory(sbox.wc_ctx->db, parent_abspath, - NULL /*props*/, NULL, pool)); child1_abspath = svn_dirent_join(parent_abspath, "foo", pool); child2_abspath = svn_dirent_join(parent_abspath, "bar", pool); + SVN_ERR(sbox_wc_mkdir(&sbox, "A")); + SVN_ERR(sbox_wc_mkdir(&sbox, "A/bar")); + SVN_ERR(sbox_file_write(&sbox, "A/foo", "")); + SVN_ERR(sbox_wc_add(&sbox, "A/foo")); conflict1 = tree_conflict_create(child1_abspath, svn_node_file, svn_wc_operation_merge, @@ -606,22 +608,22 @@ test_serialize_tree_conflict(const svn_test_opts_t *opts, SVN_TEST_ASSERT(complete); /* Everything available */ { - svn_wc_conflict_reason_t local_change; - svn_wc_conflict_action_t incoming_change; + svn_wc_conflict_reason_t reason; + svn_wc_conflict_action_t action; const char *moved_away_op_root_abspath; - SVN_ERR(svn_wc__conflict_read_tree_conflict(&local_change, - &incoming_change, + SVN_ERR(svn_wc__conflict_read_tree_conflict(&reason, + &action, &moved_away_op_root_abspath, sbox.wc_ctx->db, sbox.wc_abspath, conflict_skel, pool, pool)); - SVN_TEST_ASSERT(local_change == svn_wc_conflict_reason_moved_away); - SVN_TEST_ASSERT(incoming_change == svn_wc_conflict_action_delete); - SVN_TEST_ASSERT(!strcmp(moved_away_op_root_abspath, - sbox_wc_path(&sbox, "A/B"))); + SVN_TEST_ASSERT(reason == svn_wc_conflict_reason_moved_away); + SVN_TEST_ASSERT(action == svn_wc_conflict_action_delete); + SVN_TEST_STRING_ASSERT(moved_away_op_root_abspath, + sbox_wc_path(&sbox, "A/B")); } return SVN_NO_ERROR; @@ -807,9 +809,152 @@ test_prop_conflicts(const svn_test_opts_t *opts, return SVN_NO_ERROR; } +static svn_error_t * +test_prop_conflict_resolving(const svn_test_opts_t *opts, + apr_pool_t *pool) +{ + svn_test__sandbox_t b; + svn_skel_t *conflict; + const char *A_abspath; + const char *marker_abspath; + apr_hash_t *conflicted_props; + apr_hash_t *props; + const char *value; + + SVN_ERR(svn_test__sandbox_create(&b, "test_prop_resolving", opts, pool)); + SVN_ERR(sbox_wc_mkdir(&b, "A")); + + SVN_ERR(sbox_wc_propset(&b, "prop-1", "r1", "A")); + SVN_ERR(sbox_wc_propset(&b, "prop-2", "r1", "A")); + SVN_ERR(sbox_wc_propset(&b, "prop-3", "r1", "A")); + SVN_ERR(sbox_wc_propset(&b, "prop-4", "r1", "A")); + SVN_ERR(sbox_wc_propset(&b, "prop-5", "r1", "A")); + SVN_ERR(sbox_wc_propset(&b, "prop-6", "r1", "A")); + + SVN_ERR(sbox_wc_commit(&b, "")); + SVN_ERR(sbox_wc_propset(&b, "prop-1", "r2", "A")); + SVN_ERR(sbox_wc_propset(&b, "prop-2", "r2", "A")); + SVN_ERR(sbox_wc_propset(&b, "prop-3", "r2", "A")); + SVN_ERR(sbox_wc_propset(&b, "prop-4", NULL, "A")); + SVN_ERR(sbox_wc_propset(&b, "prop-5", NULL, "A")); + SVN_ERR(sbox_wc_propset(&b, "prop-7", "r2", "A")); + SVN_ERR(sbox_wc_propset(&b, "prop-8", "r2", "A")); + SVN_ERR(sbox_wc_commit(&b, "")); + + SVN_ERR(sbox_wc_propset(&b, "prop-1", "mod", "A")); + SVN_ERR(sbox_wc_propset(&b, "prop-2", "mod", "A")); + SVN_ERR(sbox_wc_propset(&b, "prop-3", "mod", "A")); + SVN_ERR(sbox_wc_propset(&b, "prop-4", "mod", "A")); + SVN_ERR(sbox_wc_propset(&b, "prop-5", "mod", "A")); + SVN_ERR(sbox_wc_propset(&b, "prop-6", "mod", "A")); + SVN_ERR(sbox_wc_propset(&b, "prop-7", "mod", "A")); + SVN_ERR(sbox_wc_propset(&b, "prop-8", "mod", "A")); + + SVN_ERR(sbox_wc_update(&b, "", 1)); + + A_abspath = sbox_wc_path(&b, "A"); + SVN_ERR(svn_wc__db_read_conflict(&conflict, NULL, NULL, + b.wc_ctx->db, A_abspath, + pool, pool)); + + /* We have tree conflicts... */ + SVN_TEST_ASSERT(conflict != NULL); + + SVN_ERR(svn_wc__conflict_read_prop_conflict(&marker_abspath, + NULL, NULL, NULL, + &conflicted_props, + b.wc_ctx->db, A_abspath, + conflict, + pool, pool)); + + SVN_TEST_ASSERT(conflicted_props != NULL); + /* All properties but r6 are conflicted */ + SVN_TEST_ASSERT(apr_hash_count(conflicted_props) == 7); + SVN_TEST_ASSERT(! svn_hash_gets(conflicted_props, "prop-6")); + + /* Let's resolve a few conflicts */ + SVN_ERR(sbox_wc_resolve_prop(&b, "A", "prop-1", + svn_wc_conflict_choose_mine_conflict)); + SVN_ERR(sbox_wc_resolve_prop(&b, "A", "prop-2", + svn_wc_conflict_choose_theirs_conflict)); + SVN_ERR(sbox_wc_resolve_prop(&b, "A", "prop-3", + svn_wc_conflict_choose_merged)); + + SVN_ERR(svn_wc__db_read_conflict(&conflict, NULL, NULL, + b.wc_ctx->db, A_abspath, + pool, pool)); + + /* We have tree conflicts... */ + SVN_TEST_ASSERT(conflict != NULL); + + SVN_ERR(svn_wc__conflict_read_prop_conflict(&marker_abspath, + NULL, NULL, NULL, + &conflicted_props, + b.wc_ctx->db, A_abspath, + conflict, + pool, pool)); + + SVN_TEST_ASSERT(conflicted_props != NULL); + SVN_TEST_ASSERT(apr_hash_count(conflicted_props) == 4); + + SVN_ERR(svn_wc__db_read_props(&props, b.wc_ctx->db, A_abspath, + pool, pool)); + + value = svn_prop_get_value(props, "prop-1"); + SVN_TEST_STRING_ASSERT(value, "mod"); + value = svn_prop_get_value(props, "prop-2"); + SVN_TEST_STRING_ASSERT(value, "r1"); + value = svn_prop_get_value(props, "prop-3"); + SVN_TEST_STRING_ASSERT(value, "mod"); + + return SVN_NO_ERROR; +} + +static svn_error_t * +test_binary_file_conflict(const svn_test_opts_t *opts, + apr_pool_t *pool) +{ + svn_test__sandbox_t sbox; + const apr_array_header_t *conflicts; + svn_wc_conflict_description2_t *desc; + + SVN_ERR(svn_test__sandbox_create(&sbox, "test_binary_file_conflict", opts, pool)); + + /* Create and add a binary file. */ + SVN_ERR(sbox_file_write(&sbox, "binary-file", "\xff\xff")); + SVN_ERR(sbox_wc_add(&sbox, "binary-file")); + SVN_ERR(sbox_wc_propset(&sbox, SVN_PROP_MIME_TYPE, + "application/octet-stream", "binary-file")); + SVN_ERR(sbox_wc_commit(&sbox, "binary-file")); /* r1 */ + + /* Make a change to the binary file. */ + SVN_ERR(sbox_file_write(&sbox, "binary-file", "\xfc\xfc\xfc\xfc\xfc\xfc")); + SVN_ERR(sbox_wc_commit(&sbox, "binary-file")); /* r2 */ + + /* Update back to r1, make a conflicting change to binary file. */ + SVN_ERR(sbox_wc_update(&sbox, "binary-file", 1)); + SVN_ERR(sbox_file_write(&sbox, "binary-file", "\xfd\xfd\xfd\xfd")); + + /* Update to HEAD and ensure the conflict is marked as binary. */ + SVN_ERR(sbox_wc_update(&sbox, "binary-file", 2)); + SVN_ERR(svn_wc__read_conflicts(&conflicts, NULL, sbox.wc_ctx->db, + sbox_wc_path(&sbox, "binary-file"), + FALSE /* create_tempfiles */, + FALSE /* only_tree_conflict */, + pool, pool)); + SVN_TEST_ASSERT(conflicts->nelts == 1); + desc = APR_ARRAY_IDX(conflicts, 0, svn_wc_conflict_description2_t *); + SVN_TEST_ASSERT(desc->is_binary); + + return SVN_NO_ERROR; +} + + /* The test table. */ -struct svn_test_descriptor_t test_funcs[] = +static int max_threads = 1; + +static struct svn_test_descriptor_t test_funcs[] = { SVN_TEST_NULL, SVN_TEST_PASS2(test_deserialize_tree_conflict, @@ -826,6 +971,11 @@ struct svn_test_descriptor_t test_funcs[] = "read and write a tree conflict"), SVN_TEST_OPTS_PASS(test_prop_conflicts, "test prop conflicts"), + SVN_TEST_OPTS_PASS(test_prop_conflict_resolving, + "test property conflict resolving"), + SVN_TEST_OPTS_PASS(test_binary_file_conflict, + "test binary file conflict"), SVN_TEST_NULL }; +SVN_TEST_MAIN diff --git a/subversion/tests/libsvn_wc/db-test.c b/subversion/tests/libsvn_wc/db-test.c index 45e9c4d..76ec893 100644 --- a/subversion/tests/libsvn_wc/db-test.c +++ b/subversion/tests/libsvn_wc/db-test.c @@ -88,248 +88,205 @@ static const char * const TESTING_DATA = ( "insert into wcroot values (1, null); " "insert into pristine values ('$sha1$" SHA1_1 "', NULL, 15, 1, '$md5 $" MD5_1 "'); " +); - /* ### The file_externals column in NODES is temporary, and will be - ### removed. However, to keep the tests passing, we need to add it - ### to the following insert statements. *Be sure to remove it*. */ +#define NOT_MOVED FALSE, NULL +#define NO_COPY_FROM 0, NULL, SVN_INVALID_REVNUM +static const svn_test__nodes_data_t nodes_init_data[] = { /* load the base nodes into the nodes table */ - "insert into nodes values (" - " 1, '', 0, null, 1, '', 1, 'normal'," - " null, null, 'dir', '()', 'infinity', null, null, 1, " TIME_1s ", '" AUTHOR_1 "'," - " null, null, null, null, null);" - "insert into nodes values (" - " 1, 'A', 0, '', 1, 'A', 1, 'normal'," - " null, null, 'file', '()', null, '$sha1$" SHA1_1 "', null, 1, " TIME_1s ", '" AUTHOR_1 "'," - " 10, null, null, null, null);" - "insert into nodes values (" - " 1, 'B', 0, '', 1, 'B', null, 'excluded'," - " null, null, 'symlink', null, null, null, null, null, null, null," - " null, null, null, null, null);" - "insert into nodes values (" - " 1, 'C', 0, '', 1, 'C', null, 'server-excluded'," - " null, null, 'unknown', null, null, null, null, null, null, null," - " null, null, null, null, null);" - "insert into nodes values (" - " 1, 'D', 0, '', 1, 'D', null, 'not-present'," - " null, null, 'unknown', null, null, null, null, null, null, null," - " null, null, null, null, null);" - "insert into nodes values (" - " 1, 'E', 0, '', 1, 'E', null, 'incomplete'," - " null, null, 'unknown', null, null, null, null, null, null, null," - " null, null, null, null, null);" - "insert into nodes values (" - " 1, 'F', 0, '', 1, 'F', 1, 'normal'," - " null, null, 'file', '()', null, '$sha1$" SHA1_1 "', null, 1, " TIME_1s ", '" AUTHOR_1 "'," - " 15, null, null, null, null);" - "insert into nodes values (" - " 1, 'G', 0, '', 2, 'G-alt', 1, 'normal'," - " null, null, 'file', '()', null, '$sha1$" SHA1_1 "', null, 2, " TIME_2s ", '" AUTHOR_2 "'," - " 15, null, null, null, null);" - "insert into nodes values (" - " 1, 'H', 0, '', 1, 'H', 1, 'normal'," - " null, null, 'symlink', '()', null, null, 'H-target', 1, " TIME_1s ", '" AUTHOR_1 "'," - " null, null, null, null, null);" - "insert into nodes values (" - " 1, 'I', 0, '', 1, 'I', 1, 'normal'," - " null, null, 'dir', '()', null, null, null, 1, " TIME_1s ", '" AUTHOR_1 "'," - " null, null, null, null, null);" - "insert into nodes values (" - " 1, 'J', 0, '', 1, 'J', 1, 'normal'," - " null, null, 'dir', '()', null, null, null, 1, " TIME_1s ", '" AUTHOR_1 "'," - " null, null, null, null, null);" - "insert into nodes values (" - " 1, 'J/J-e', 0, 'J', 1, 'J/J-e', 1, 'normal'," - " null, 'other/place', 'dir', '()', null, null, null, 1, " TIME_1s ", '" AUTHOR_1 "'," - " null, null, null, null, null);" - "insert into nodes values (" - " 1, 'J/J-e/J-e-a', 0, 'J/J-e', 1, 'J/J-e/J-e-a', 1, 'normal'," - " null, null, 'file', '()', null, '$sha1$" SHA1_1 "', null, 1, " TIME_1s ", '" AUTHOR_1 "'," - " 15, null, null, null, null);" - "insert into nodes values (" - " 1, 'J/J-e/J-e-b', 0, 'J/J-e', 1, 'J/J-e/J-e-b', 1, 'normal'," - " null, null, 'dir', '()', null, null, null, 1, " TIME_1s ", '" AUTHOR_1 "'," - " null, null, null, null, null);" - "insert into nodes values (" - " 1, 'J/J-e/J-e-b/Jeba', 0, 'J/J-e/J-e-b', 1, 'J/J-e/J-e-b/Jeba', 1, 'normal'," - " null, null, 'file', '()', null, '$sha1$" SHA1_1 "', null, 1, " TIME_1s ", '" AUTHOR_1 "'," - " 15, null, null, null, null);" - "insert into nodes values (" - " 1, 'J/J-f', 0, 'J', 1, 'J/J-f', 1, 'normal'," - " null, null, 'dir', '()', null, null, null, 1, " TIME_1s ", '" AUTHOR_1 "'," - " null, null, null, null, null);" - "insert into nodes values (" - " 1, 'J/J-f/J-f-a', 0, 'J/J-f', 1, 'J/J-f/J-f-a', 1, 'normal'," - " null, null, 'dir', '()', null, null, null, 1, " TIME_1s ", '" AUTHOR_1 "'," - " null, null, null, null, null);" - "insert into nodes values (" - " 1, 'K', 0, '', 1, 'K', 1, 'normal'," - " null, null, 'dir', '()', null, null, null, 1, " TIME_1s ", '" AUTHOR_1 "'," - " null, null, null, null, null);" - "insert into nodes values (" - " 1, 'K/K-a', 0, 'K', 1, 'K/K-a', 1, 'normal'," - " null, null, 'file', '()', null, '$sha1$" SHA1_1 "', null, 1, " TIME_1s ", '" AUTHOR_1 "'," - " 15, null, null, null, null);" - "insert into nodes values (" - " 1, 'K/K-b', 0, 'K', 1, 'K/K-b', 1, 'normal'," - " null, 'moved/away', 'file', '()', null, '$sha1$" SHA1_1 "', null, 1, " TIME_1s ", '" AUTHOR_1 "'," - " 15, null, null, null, null);" - "" - /* Load data into NODES table; - ### op_depths have not been calculated by me yet; - the value 1 is just 'good enough' to make the nodes WORKING nodes. */ - "insert into nodes values (" - " 1, 'I', 1, '', 2, 'some/dir', 2, 'normal'," - " null, null, 'dir', '()', 'immediates', null, null, 2, " TIME_2s ", '" AUTHOR_2 "'," - " null, null, null, null, null);" - - /* I'm not sure what the working J is supposed to represent. It - replaces the base J, but is it a copy or not? It has no - copyfrom, but nodes like J/J-e appear to be deleted which - implies they are children of a copied J. */ - "insert into nodes values (" - " 1, 'J', 1, '', null, null, null, 'normal'," - " null, null, 'dir', '()', 'immediates', null, null, null, null, null," - " null, null, null, null, null);" - "insert into nodes values (" - " 1, 'J/J-a', 1, 'J', null, null, null, 'normal'," - " null, null, 'file', '()', null, null, null, null, null, null," - " null, null, null, null, null);" - "insert into nodes values (" - " 1, 'J/J-b', 2, 'J', 2, 'some/dir', 2, 'normal'," - " null, null, 'dir', '()', 'infinity', null, null, 2, " TIME_2s ", '" AUTHOR_2 "'," - " null, null, null, null, null);" - "insert into nodes values (" - " 1, 'J/J-b/J-b-a', 3, 'J/J-b', 2, 'another/dir', 2, 'normal'," - " null, null, 'dir', '()', 'infinity', null, null, 2, " TIME_2s ", '" AUTHOR_2 "'," - " null, null, null, null, null);" - "insert into nodes values (" - " 1, 'J/J-b/J-b-b', 2, 'J/J-b', null, null, 2, 'normal'," - " null, null, 'file', '()', null, null, null, null, null, null," - " null, null, null, null, null);" - "insert into nodes values (" - " 1, 'J/J-c', 1, 'J', null, null, null, 'normal'," - " null, null, 'dir', '()', null, null, null, null, null, null," - " null, null, null, null, null);" - "insert into nodes values (" - " 1, 'J/J-c/J-c-a', 1, 'J/J-c', null, null, null, 'normal'," - " null, null, 'dir', '()', null, null, null, null, null, null," - " null, null, null, null, null);" - "insert into nodes values (" - " 1, 'J/J-c', 2, 'J', null, null, null, 'base-deleted'," - " null, null, 'dir', '()', null, null, null, null, null, null," - " null, null, null, null, null);" - "insert into nodes values (" - " 1, 'J/J-c/J-c-a', 2, 'J/J-c', null, null, null, 'base-deleted'," - " null, null, 'dir', '()', null, null, null, null, null, null," - " null, null, null, null, null);" - "insert into nodes values (" - " 1, 'J/J-d', 2, 'J', 2, 'moved/file', 2, 'normal'," - " 1, null, 'file', '()', null, '$sha1$" SHA1_1 "', null, 2, " TIME_2s ", '" AUTHOR_2 "'," - " 10, null, null, null, null);" - "insert into nodes values (" - " 1, 'moved/file', 0, 'moved', 2, 'moved/file', 2, 'normal'," - " null, null, 'file', '()', null, '$sha1$" SHA1_1 "', null, 2, " TIME_2s ", '" AUTHOR_2 "'," - " 10, null, null, null, null);" - "insert into nodes values (" - " 1, 'moved/file', 2, 'moved', 2, 'moved/file', 2, 'base-deleted'," - " null, 'J/J-d', 'file', '()', null, null, null, null, null, null," - " 10, null, null, null, null);" - "insert into nodes values (" - " 1, 'J/J-e', 1, 'J', null, null, null, 'normal'," - " null, null, 'dir', '()', null, null, null, null, null, null," - " null, null, null, null, null);" - "insert into nodes values (" - " 1, 'J/J-e/J-e-a', 1, 'J/J-e', null, null, null, 'normal'," - " null, null, 'file', '()', null, null, null, null, null, null," - " null, null, null, null, null);" - "insert into nodes values (" - " 1, 'J/J-e/J-e-b', 1, 'J/J-e', null, null, null, 'normal'," - " null, null, 'dir', '()', null, null, null, null, null, null," - " null, null, null, null, null);" - "insert into nodes values (" - " 1, 'J/J-e', 2, 'J', null, null, null, 'base-deleted'," - " null, null, 'dir', '()', null, null, null, null, null, null," - " null, null, null, null, null);" - "insert into nodes values (" - " 1, 'J/J-e/J-e-a', 2, 'J/J-e', null, null, null, 'base-deleted'," - " null, null, 'file', '()', null, null, null, null, null, null," - " null, null, null, null, null);" - "insert into nodes values (" - " 1, 'J/J-e/J-e-b', 2, 'J/J-e', null, null, null, 'base-deleted'," - " null, null, 'dir', '()', null, null, null, null, null, null," - " null, null, null, null, null);" - "insert into nodes values (" - " 1, 'J/J-e/J-e-b/Jeba', 1, 'J/J-e/J-e-b', null, null, null, 'base-deleted'," - " null, null, 'file', '()', null, null, null, null, null, null," - " null, null, null, null, null);" - "insert into nodes values (" - " 1, 'J/J-f', 1, 'J', null, null, null, 'normal'," - " null, null, 'dir', '()', 'immediates', null, null, null, null, null," - " null, null, null, null, null);" - "insert into nodes values (" - " 1, 'J/J-f/J-f-a', 1, 'J/J-f', null, null, null, 'base-deleted'," - " null, null, 'dir', '()', 'immediates', null, null, null, null, null," - " null, null, null, null, null);" - "insert into nodes values (" - " 1, 'K', 1, '', null, null, null, 'base-deleted'," - " null, null, 'dir', '()', null, null, null, null, null, null," - " null, null, null, null, null);" - "insert into nodes values (" - " 1, 'K/K-a', 1, 'K', null, null, null, 'base-deleted'," - " null, null, 'file', '()', null, null, null, null, null, null," - " null, null, null, null, null);" - "insert into nodes values (" - " 1, 'K/K-b', 1, 'K', null, null, null, 'base-deleted'," - " null, null, 'file', '()', null, null, null, null, null, null," - " null, null, null, null, null);" - "insert into nodes values (" - " 1, 'L', 1, '', null, null, null, 'normal'," - " null, null, 'dir', '()', 'immediates', null, null, null, null, null," - " null, null, null, null, null);" - "insert into nodes values (" - " 1, 'L/L-a', 1, 'L', null, null, null, 'normal'," - " null, null, 'dir', '()', 'immediates', null, null, null, null, null," - " null, null, null, null, null);" - "insert into nodes values (" - " 1, 'L/L-a/L-a-a', 1, 'L/L-a', null, null, null, 'normal'," - " null, null, 'dir', '()', 'immediates', null, null, null, null, null," - " null, null, null, null, null);" - "insert into nodes values (" - " 1, 'L/L-a', 2, 'L', null, null, null, 'base-deleted'," - " null, null, 'dir', '()', 'immediates', null, null, null, null, null," - " null, null, null, null, null);" - "insert into nodes values (" - " 1, 'L/L-a/L-a-a', 2, 'L/L-a', null, null, null, 'base-deleted'," - " null, null, 'dir', '()', 'immediates', null, null, null, null, null," - " null, null, null, null, null);" - "insert into nodes values (" - " 1, 'other/place', 2, 'other', null, null, null, 'normal'," - " 1, null, 'dir', '()', 'immediates', null, null, null, null, null," - " null, null, null, null, null);" - "insert into nodes values (" - " 1, 'other/place/J-e-a', 2, 'other/place', null, null, null, 'normal'," - " null, null, 'dir', '()', 'immediates', null, null, null, null, null," - " null, null, null, null, null);" - "insert into nodes values (" - " 1, 'other/place/J-e-b', 2, 'other/place', null, null, null, 'normal'," - " null, null, 'dir', '()', null, null, null, 1, " TIME_1s ", '" AUTHOR_1 "'," - " null, null, null, null, null);" - "insert into nodes values (" - " 1, 'other/place/J-e-b/Jeba', 0, 'other/place/J-e-b', null, null, null, 'normal'," - " null, null, 'file', '()', null, '$sha1$" SHA1_1 "', null, 1, " TIME_1s ", '" AUTHOR_1 "'," - " 15, null, null, null, null);" - "insert into actual_node values (" - " 1, 'I', '', null, null, null, null, null, 'changelist', null, " - " null, null, null, null, null);" - "insert into actual_node values (" - " 1, 'F', '', null, null, null, null, null, null, null, " - " '" F_TC_DATA "', null, null, null, null);" - "insert into actual_node values (" - " 1, 'G', '', null, null, null, null, null, null, null, " - " '" G_TC_DATA "', null, null, null, null);" - ); + { 0, "", "normal", 1, "", 1, NOT_MOVED, + svn_node_dir, "()", "infinity", NULL, NULL, 1, TIME_1a, AUTHOR_1 }, + { 0, "A", "normal", 1, "A", 1, NOT_MOVED, + svn_node_file, "()", NULL, "$sha1$" SHA1_1, NULL, 1, TIME_1a, AUTHOR_1, + FALSE, NULL, 10, 10 }, + + { 0, "B", "excluded", 1, "B", SVN_INVALID_REVNUM, NOT_MOVED, + svn_node_symlink}, + + { 0, "C", "server-excluded", 1, "C", 0, NOT_MOVED, + svn_node_unknown}, + + { 0, "D", "not-present", 1, "D", 0, NOT_MOVED, + svn_node_unknown}, + + { 0, "E", "incomplete", 1, "E", 1, NOT_MOVED, + svn_node_unknown}, + + { 0, "F", "normal", 1, "F", 1, NOT_MOVED, + svn_node_file, "()", NULL, "$sha1$" SHA1_1, NULL, 2, TIME_2a, AUTHOR_2}, + + { 0, "G", "normal", 2, "G-alt", 1, NOT_MOVED, + svn_node_file, "()", NULL, "$sha1$" SHA1_1, NULL, 2, TIME_2a, AUTHOR_2 }, + + { 0, "H", "normal", 1, "H", 1, NOT_MOVED, + svn_node_symlink, "()", NULL, NULL, "H-target", 1, TIME_1a, AUTHOR_1 }, + + { 0, "I", "normal", 1, "I", 1, NOT_MOVED, + svn_node_dir, "()", "infinity", NULL, NULL, 1, TIME_1a, AUTHOR_1 }, + + { 0, "J", "normal", 1, "J", 1, NOT_MOVED, + svn_node_dir, "()", "infinity", NULL, NULL, 1, TIME_1a, AUTHOR_1 }, + + { 0, "J/J-e", "normal", 1, "J/J-e", 1, NOT_MOVED, + svn_node_dir, "()", "infinity", NULL, NULL, 1, TIME_1a, AUTHOR_1 }, + + { 0, "J/J-e/J-e-a", "normal", 1, "J/J-e/J-e-a", 1, NOT_MOVED, + svn_node_file, "()", NULL, "$sha1$" SHA1_1, NULL, 1, TIME_1a, AUTHOR_1 }, + + { 0, "J/J-e/J-e-b", "normal", 1, "J/J-e/J-e-b", 1, NOT_MOVED, + svn_node_dir, "()", "infinity", NULL, NULL, 1, TIME_1a, AUTHOR_1 }, + + { 0, "J/J-e/J-e-b/Jeba", "normal", 1, "J/J-e/J-e-b/Jeba", 1, NOT_MOVED, + svn_node_file, "()", NULL, "$sha1$" SHA1_1, NULL, 1, TIME_1a, AUTHOR_1 }, + + { 0, "J/J-f", "normal", 1, "J/J-f", 1, NOT_MOVED, + svn_node_dir, "()", "infinity", NULL, NULL, 1, TIME_1a, AUTHOR_1 }, + + { 0, "J/J-f/J-f-a", "normal", 1, "J/J-f/J-f-a", 1, NOT_MOVED, + svn_node_dir, "()", "infinity", NULL, NULL, 1, TIME_1a, AUTHOR_1 }, + + { 0, "J", "normal", 1, "J", 1, NOT_MOVED, + svn_node_dir, "()", "infinity", NULL, NULL, 1, TIME_1a, AUTHOR_1 }, + + { 0, "K", "normal", 1, "K", 1, NOT_MOVED, + svn_node_dir, "()", "infinity", NULL, NULL, 1, TIME_1a, AUTHOR_1 }, + + { 0, "K/K-a", "normal", 2, "K/K-a", 1, NOT_MOVED, + svn_node_file, "()", NULL, "$sha1$" SHA1_1, NULL, 2, TIME_2a, AUTHOR_2, + FALSE, NULL, 15, 14}, + + { 0, "K/K-b", "normal", 2, "K/K-b", 1, NOT_MOVED, + svn_node_file, "()", NULL, "$sha1$" SHA1_1, NULL, 2, TIME_2a, AUTHOR_2, + FALSE, NULL, 15, 14}, + + /* Load data into the working layers of NODES */ + + { 1, "I", "normal", 2, "some/dir", 2, NOT_MOVED, + svn_node_dir, "()", "immediates", NULL, NULL, 2, TIME_2a, AUTHOR_2 }, + + /* J was originally a local addition, but its descendants are replaced, + so let's turn J in a copy */ + { 1, "J", "normal", 2, "q", 2, NOT_MOVED, + svn_node_dir, "()", "immediates", NULL, NULL, 2, TIME_2a, AUTHOR_2 }, + + { 1, "J/J-a", "normal", 2, "q/J-a", 2, NOT_MOVED, + svn_node_file, "()", NULL, "$sha1$" SHA1_1, NULL, 2, TIME_2a, AUTHOR_2 }, + + { 1, "J/J-b", "normal", 2, "q/J-b", 2, NOT_MOVED, + svn_node_dir, "()", "infinity", NULL, NULL, 2, TIME_2a, AUTHOR_2 }, + + { 3, "J/J-b/J-b-a", "normal", 2, "another/dir", 2, NOT_MOVED, + svn_node_dir, "()", "infinity", NULL, NULL, 2, TIME_2a, AUTHOR_2 }, + + { 1, "J/J-b/J-b-b", "normal", 2, "q/J-b/J-b-b", 2, NOT_MOVED, + svn_node_file, "()", NULL, "$sha1$" SHA1_1, NULL, 2, TIME_2a, AUTHOR_2 }, + + { 1, "J/J-c", "normal", 2, "q/J-c", 2, NOT_MOVED, + svn_node_dir, "()", "infinity", NULL, NULL, 2, TIME_2a, AUTHOR_2 }, + + { 1, "J/J-c/J-c-a", "normal", 2, "q/J-c/J-c-a", 2, NOT_MOVED, + svn_node_dir, "()", "infinity", NULL, NULL, 2, TIME_2a, AUTHOR_2 }, + + { 2, "J/J-c", "base-deleted", NO_COPY_FROM, NOT_MOVED, + svn_node_dir}, + + { 2, "J/J-c/J-c-a", "base-deleted", NO_COPY_FROM, NOT_MOVED, + svn_node_dir}, + + { 2, "J/J-d", "normal", 2, "moved/file", 2, TRUE, NULL, + svn_node_file, "()", NULL, "$sha1$" SHA1_1, NULL, 2, TIME_2a, AUTHOR_2 }, + + { 0, "moved", "normal", 2, "moved", 2, NOT_MOVED, + svn_node_dir, "()", "infinity", NULL, NULL, 1, TIME_1a, AUTHOR_1 }, + + { 0, "moved/file", "normal", 2, "moved/file", 2, NOT_MOVED, + svn_node_file, "()", NULL, "$sha1$" SHA1_1, NULL, 2, TIME_2a, AUTHOR_2 }, + + { 2, "moved/file", "base-deleted", NO_COPY_FROM, FALSE, "J/J-d", + svn_node_file}, + + { 1, "J/J-e", "normal", 2, "q/J-e", 2, NOT_MOVED, + svn_node_dir, "()", "infinity", NULL, NULL, 2, TIME_2a, AUTHOR_2 }, + + { 1, "J/J-e/J-e-a", "normal", 2, "q/J-e/J-e-a", 2, NOT_MOVED, + svn_node_file, "()", NULL, "$sha1$" SHA1_1, NULL, 2, TIME_2a, AUTHOR_2 }, + + { 1, "J/J-e/J-e-b", "normal", 2, "q/J-e/J-e-b", 2, NOT_MOVED, + svn_node_dir, "()", "infinity", NULL, NULL, 2, TIME_2a, AUTHOR_2 }, + + { 2, "J/J-e", "base-deleted", NO_COPY_FROM, FALSE, "other/place", + svn_node_dir}, + + { 2, "J/J-e/J-e-a", "base-deleted", NO_COPY_FROM, NOT_MOVED, + svn_node_file}, + + { 2, "J/J-e/J-e-b", "base-deleted", NO_COPY_FROM, NOT_MOVED, + svn_node_dir}, + + { 1, "J/J-e/J-e-b/Jeba", "base-deleted", NO_COPY_FROM, NOT_MOVED, + svn_node_file}, + + { 1, "J/J-f", "normal", 2, "q/J-f", 2, NOT_MOVED, + svn_node_dir, "()", "immediates", NULL, NULL, 2, TIME_2a, AUTHOR_2 }, + + { 1, "J/J-f/J-f-a", "base-deleted", NO_COPY_FROM, NOT_MOVED, + svn_node_dir}, + + { 1, "K", "base-deleted", NO_COPY_FROM, NOT_MOVED, + svn_node_dir}, + + { 1, "K/K-a", "base-deleted", NO_COPY_FROM, NOT_MOVED, + svn_node_file}, + + { 1, "K/K-b", "base-deleted", NO_COPY_FROM, FALSE, "moved/away", + svn_node_file}, + + { 1, "L", "normal", 2, "from", 2, NOT_MOVED, + svn_node_dir, "()", "immediates", NULL, NULL, 2, TIME_2a, AUTHOR_2 }, + + { 1, "L/L-a", "normal", 2, "from/L-a", 2, NOT_MOVED, + svn_node_dir, "()", "immediates", NULL, NULL, 2, TIME_2a, AUTHOR_2 }, + + { 1, "L/L-a/L-a-a", "normal", 2, "from/L-a/L-a-a", 2, NOT_MOVED, + svn_node_dir, "()", "immediates", NULL, NULL, 2, TIME_2a, AUTHOR_2 }, + + { 2, "L/L-a", "base-deleted", NO_COPY_FROM, NOT_MOVED, + svn_node_dir}, + + { 2, "L/L-a/L-a-a", "base-deleted", NO_COPY_FROM, NOT_MOVED, + svn_node_dir}, + + { 0, "other", "normal", 2, "other", 2, NOT_MOVED, + svn_node_dir, "()", "infinity", NULL, NULL, 2, TIME_2a, AUTHOR_2 }, + + { 2, "other/place", "normal", 2, "q/J-e", 2, TRUE, NULL, + svn_node_dir, "()", "immediates", NULL, NULL, 2, TIME_2a, AUTHOR_2 }, + + { 2, "other/place/J-e-a", "normal", 2, "q/J-e/J-e-a", 2, TRUE, NULL, + svn_node_file, "()", NULL, "$sha1$" SHA1_1, NULL, 2, TIME_2a, AUTHOR_2 }, + + { 2, "other/place/J-e-b", "normal", 2, "q/J-e/J-e-b", 2, TRUE, NULL, + svn_node_dir, "()", "infinity", NULL, NULL, 2, TIME_2a, AUTHOR_2 }, + + { 2, "other/place/J-e-b/Jeba", "normal", 2, "q/J-e/J-e-b/Jeba", 2, TRUE, NULL, + svn_node_file, "()", NULL, "$sha1$" SHA1_1, NULL, 1, TIME_1a, AUTHOR_1 }, + + /*** NEW ****/ + { 2, "moved/away", "normal", 2, "K/K-b", 1, TRUE, NULL, + svn_node_file, "()", NULL, "$sha1$" SHA1_1, NULL, 2, TIME_2a, AUTHOR_2, + FALSE, NULL, 15, 14}, + { 0 } +}; + +static const svn_test__actual_data_t actual_init_data[] = { + { "A", NULL, "changelist", NULL }, + { "F", NULL, NULL, F_TC_DATA }, + { "G", NULL, NULL, F_TC_DATA }, + + { 0 } +}; static svn_error_t * create_open(svn_wc__db_t **db, @@ -338,10 +295,16 @@ create_open(svn_wc__db_t **db, apr_pool_t *pool) { SVN_ERR(svn_dirent_get_absolute(local_abspath, - svn_dirent_join("fake-wc", subdir, pool), + svn_dirent_join( + svn_test_data_path("db-test", pool), + subdir, pool), pool)); + + SVN_ERR(svn_io_remove_dir2(*local_abspath, TRUE, NULL, NULL, pool)); + SVN_ERR(svn_wc__db_open(db, NULL, FALSE, TRUE, pool, pool)); - SVN_ERR(svn_test__create_fake_wc(*local_abspath, TESTING_DATA, pool, pool)); + SVN_ERR(svn_test__create_fake_wc(*local_abspath, TESTING_DATA, + nodes_init_data, actual_init_data, pool)); svn_test_add_dir_cleanup(*local_abspath); @@ -361,18 +324,17 @@ set_prop(apr_hash_t *props, const char *name, const char *value, } -static svn_boolean_t +static svn_error_t * validate_abspath(const char *wcroot_abspath, const char *expected_relpath, const char *actual_abspath, apr_pool_t *scratch_pool) { - if (actual_abspath == NULL) - return FALSE; - return strcmp(svn_dirent_join(wcroot_abspath, + SVN_TEST_STRING_ASSERT(actual_abspath, + svn_dirent_join(wcroot_abspath, expected_relpath, - scratch_pool), - actual_abspath) == 0; + scratch_pool)); + return SVN_NO_ERROR; } @@ -660,7 +622,7 @@ test_inserting_nodes(apr_pool_t *pool) props, 1, TIME_1a, AUTHOR_1, children, svn_depth_infinity, - NULL, NULL, FALSE, NULL, NULL, NULL, + NULL, FALSE, NULL, NULL, NULL, NULL, pool)); /* Replace an incomplete node with a file node. */ @@ -771,11 +733,17 @@ test_children(apr_pool_t *pool) SVN_ERR(svn_wc__db_base_get_children(&children, db, local_abspath, pool, pool)); - SVN_TEST_ASSERT(children->nelts == 11); + SVN_TEST_ASSERT(children->nelts == 13); for (i = children->nelts; i--; ) { const char *name = APR_ARRAY_IDX(children, i, const char *); + if (strcmp(name, "moved") == 0 + || strcmp(name, "other") == 0) + { + continue; + } + SVN_TEST_ASSERT(strlen(name) == 1); /* ### check the actual values */ } @@ -783,11 +751,17 @@ test_children(apr_pool_t *pool) SVN_ERR(svn_wc__db_read_children(&children, db, local_abspath, pool, pool)); - SVN_TEST_ASSERT(children->nelts == 12); + SVN_TEST_ASSERT(children->nelts == 14); for (i = children->nelts; i--; ) { const char *name = APR_ARRAY_IDX(children, i, const char *); + if (strcmp(name, "moved") == 0 + || strcmp(name, "other") == 0) + { + continue; + } + SVN_TEST_ASSERT(strlen(name) == 1); /* ### check the actual values */ } @@ -858,7 +832,7 @@ test_working_info(apr_pool_t *pool) SVN_TEST_ASSERT(checksum == NULL); SVN_TEST_ASSERT(recorded_size == SVN_INVALID_FILESIZE); SVN_TEST_ASSERT(target == NULL); - SVN_TEST_STRING_ASSERT(changelist, "changelist"); + SVN_TEST_STRING_ASSERT(changelist, NULL); SVN_TEST_STRING_ASSERT(original_repos_relpath, "some/dir"); SVN_TEST_STRING_ASSERT(original_root_url, ROOT_TWO); SVN_TEST_STRING_ASSERT(original_uuid, UUID_TWO); @@ -876,8 +850,31 @@ test_working_info(apr_pool_t *pool) /* ### we need a hojillion more tests in here. I just want to get this ### round checked in, so I'm skipping more tests at this point. */ - - + SVN_ERR(svn_wc__db_read_info( + &status, &kind, &revision, + &repos_relpath, &repos_root_url, &repos_uuid, + &changed_rev, &changed_date, &changed_author, + &depth, &checksum, &target, &original_repos_relpath, + &original_root_url, &original_uuid, &original_revnum, + &lock, &recorded_size, &recorded_time, &changelist, + &conflicted, &op_root, &had_props, &props_mod, + &have_base, &have_more_work, &have_work, + db, svn_dirent_join(local_abspath, "A", pool), + pool, pool)); + SVN_TEST_ASSERT(status == svn_wc__db_status_normal); + SVN_TEST_ASSERT(kind == svn_node_file); + SVN_TEST_STRING_ASSERT(changelist, "changelist"); + SVN_TEST_ASSERT(revision == 1); + SVN_TEST_STRING_ASSERT(repos_relpath, "A"); + SVN_TEST_STRING_ASSERT(repos_root_url, "http://example.com/one"); + SVN_TEST_STRING_ASSERT(repos_uuid, "uuid1"); + SVN_TEST_ASSERT(changed_rev == 1); + SVN_TEST_ASSERT(changed_date == TIME_1a); + SVN_TEST_STRING_ASSERT(changed_author, AUTHOR_1); + SVN_TEST_ASSERT(depth == svn_depth_unknown); + SVN_TEST_ASSERT(checksum != NULL); + SVN_TEST_ASSERT(recorded_size == 10); + SVN_TEST_ASSERT(target == NULL); return SVN_NO_ERROR; } @@ -901,9 +898,16 @@ test_pdh(apr_pool_t *pool) NULL, NULL, pool)); + SVN_ERR(svn_wc__db_base_add_directory( + db, svn_dirent_join(local_abspath, "sub2", pool), + local_abspath, "sub2", ROOT_ONE, UUID_ONE, 1, + apr_hash_make(pool), 1, 1, "me", NULL, + svn_depth_infinity, NULL, FALSE, NULL, NULL, + NULL, NULL, pool)); + SVN_ERR(svn_wc__db_base_add_excluded_node( - db, svn_dirent_join(local_abspath, "sub/A", pool), - "sub/A", ROOT_ONE, UUID_ONE, 1, + db, svn_dirent_join(local_abspath, "sub2/A", pool), + "sub2/A", ROOT_ONE, UUID_ONE, 1, svn_node_file, svn_wc__db_status_server_excluded, NULL, NULL, pool)); @@ -941,17 +945,17 @@ test_scan_addition(apr_pool_t *pool) &original_revision, db, svn_dirent_join(local_abspath, "J", pool), pool, pool)); - SVN_TEST_ASSERT(status == svn_wc__db_status_added); - SVN_TEST_ASSERT(validate_abspath(local_abspath, "J", op_root_abspath, pool)); + SVN_TEST_ASSERT(status == svn_wc__db_status_copied); + SVN_ERR(validate_abspath(local_abspath, "J", op_root_abspath, pool)); SVN_TEST_STRING_ASSERT(repos_relpath, "J"); SVN_TEST_STRING_ASSERT(repos_root_url, ROOT_ONE); SVN_TEST_STRING_ASSERT(repos_uuid, UUID_ONE); - SVN_TEST_ASSERT(original_repos_relpath == NULL); - SVN_TEST_ASSERT(original_root_url == NULL); - SVN_TEST_ASSERT(original_uuid == NULL); - SVN_TEST_ASSERT(original_revision == SVN_INVALID_REVNUM); + SVN_TEST_STRING_ASSERT(original_repos_relpath, "q"); + SVN_TEST_STRING_ASSERT(original_root_url, ROOT_TWO); + SVN_TEST_STRING_ASSERT(original_uuid, UUID_TWO); + SVN_TEST_ASSERT(original_revision == 2); - /* Simple addition of a file (affects how scan-up is started). */ + /* Simple copy (affects how scan-up is started). */ SVN_ERR(svn_wc__db_scan_addition( &status, &op_root_abspath, &repos_relpath, &repos_root_url, &repos_uuid, @@ -959,15 +963,15 @@ test_scan_addition(apr_pool_t *pool) &original_revision, db, svn_dirent_join(local_abspath, "J/J-a", pool), pool, pool)); - SVN_TEST_ASSERT(status == svn_wc__db_status_added); - SVN_TEST_ASSERT(validate_abspath(local_abspath, "J", op_root_abspath, pool)); + SVN_TEST_ASSERT(status == svn_wc__db_status_copied); + SVN_ERR(validate_abspath(local_abspath, "J", op_root_abspath, pool)); SVN_TEST_STRING_ASSERT(repos_relpath, "J/J-a"); SVN_TEST_STRING_ASSERT(repos_root_url, ROOT_ONE); SVN_TEST_STRING_ASSERT(repos_uuid, UUID_ONE); - SVN_TEST_ASSERT(original_repos_relpath == NULL); - SVN_TEST_ASSERT(original_root_url == NULL); - SVN_TEST_ASSERT(original_uuid == NULL); - SVN_TEST_ASSERT(original_revision == SVN_INVALID_REVNUM); + SVN_TEST_STRING_ASSERT(original_repos_relpath, "q"); + SVN_TEST_STRING_ASSERT(original_root_url, ROOT_TWO); + SVN_TEST_STRING_ASSERT(original_uuid, UUID_TWO); + SVN_TEST_ASSERT(original_revision == 2); /* Node was moved here. */ SVN_ERR(svn_wc__db_scan_addition( @@ -985,15 +989,15 @@ test_scan_addition(apr_pool_t *pool) db, svn_dirent_join(local_abspath, "J/J-d", pool), pool, pool)); SVN_TEST_ASSERT(status == svn_wc__db_status_moved_here); - SVN_TEST_ASSERT(validate_abspath(local_abspath, "J/J-d", + SVN_ERR(validate_abspath(local_abspath, "J/J-d", op_root_abspath, pool)); - SVN_TEST_ASSERT(validate_abspath(local_abspath, "moved/file", + SVN_ERR(validate_abspath(local_abspath, "moved/file", moved_from_abspath, pool)); - SVN_TEST_ASSERT(validate_abspath(local_abspath, "J/J-d", + SVN_ERR(validate_abspath(local_abspath, "J/J-d", move_op_root_abspath, pool)); - SVN_TEST_ASSERT(validate_abspath(local_abspath, "moved/file", + SVN_ERR(validate_abspath(local_abspath, "moved/file", move_op_root_src, pool)); - SVN_TEST_ASSERT(validate_abspath(local_abspath, "moved/file", + SVN_ERR(validate_abspath(local_abspath, "moved/file", delete_op_root_abspath, pool)); SVN_TEST_STRING_ASSERT(repos_relpath, "J/J-d"); SVN_TEST_STRING_ASSERT(repos_root_url, ROOT_ONE); @@ -1012,12 +1016,12 @@ test_scan_addition(apr_pool_t *pool) db, svn_dirent_join(local_abspath, "J/J-b", pool), pool, pool)); SVN_TEST_ASSERT(status == svn_wc__db_status_copied); - SVN_TEST_ASSERT(validate_abspath(local_abspath, "J/J-b", + SVN_ERR(validate_abspath(local_abspath, "J", op_root_abspath, pool)); SVN_TEST_STRING_ASSERT(repos_relpath, "J/J-b"); SVN_TEST_STRING_ASSERT(repos_root_url, ROOT_ONE); SVN_TEST_STRING_ASSERT(repos_uuid, UUID_ONE); - SVN_TEST_STRING_ASSERT(original_repos_relpath, "some/dir"); + SVN_TEST_STRING_ASSERT(original_repos_relpath, "q"); SVN_TEST_STRING_ASSERT(original_root_url, ROOT_TWO); SVN_TEST_STRING_ASSERT(original_uuid, UUID_TWO); SVN_TEST_ASSERT(original_revision == 2); @@ -1031,7 +1035,7 @@ test_scan_addition(apr_pool_t *pool) db, svn_dirent_join(local_abspath, "J/J-b/J-b-a", pool), pool, pool)); SVN_TEST_ASSERT(status == svn_wc__db_status_copied); - SVN_TEST_ASSERT(validate_abspath(local_abspath, "J/J-b/J-b-a", + SVN_ERR(validate_abspath(local_abspath, "J/J-b/J-b-a", op_root_abspath, pool)); SVN_TEST_STRING_ASSERT(repos_relpath, "J/J-b/J-b-a"); SVN_TEST_STRING_ASSERT(repos_root_url, ROOT_ONE); @@ -1050,12 +1054,12 @@ test_scan_addition(apr_pool_t *pool) db, svn_dirent_join(local_abspath, "J/J-b/J-b-b", pool), pool, pool)); SVN_TEST_ASSERT(status == svn_wc__db_status_copied); - SVN_TEST_ASSERT(validate_abspath(local_abspath, "J/J-b", + SVN_ERR(validate_abspath(local_abspath, "J", op_root_abspath, pool)); SVN_TEST_STRING_ASSERT(repos_relpath, "J/J-b/J-b-b"); SVN_TEST_STRING_ASSERT(repos_root_url, ROOT_ONE); SVN_TEST_STRING_ASSERT(repos_uuid, UUID_ONE); - SVN_TEST_STRING_ASSERT(original_repos_relpath, "some/dir"); + SVN_TEST_STRING_ASSERT(original_repos_relpath, "q"); SVN_TEST_STRING_ASSERT(original_root_url, ROOT_TWO); SVN_TEST_STRING_ASSERT(original_uuid, UUID_TWO); SVN_TEST_ASSERT(original_revision == 2); @@ -1084,13 +1088,13 @@ test_scan_deletion(apr_pool_t *pool) ©_op_root_abspath, db, svn_dirent_join(local_abspath, "J/J-e", pool), pool, pool)); - SVN_TEST_ASSERT(validate_abspath(local_abspath, "J", + SVN_ERR(validate_abspath(local_abspath, "J", base_del_abspath, pool)); - SVN_TEST_ASSERT(validate_abspath(local_abspath, "other/place", + SVN_ERR(validate_abspath(local_abspath, "other/place", moved_to_abspath, pool)); - SVN_TEST_ASSERT(validate_abspath(local_abspath, "J/J-e", + SVN_ERR(validate_abspath(local_abspath, "J/J-e", work_del_abspath, pool)); - SVN_TEST_ASSERT(validate_abspath(local_abspath, "other/place", + SVN_ERR(validate_abspath(local_abspath, "other/place", copy_op_root_abspath, pool)); /* Node was moved elsewhere (child of operation root). */ @@ -1101,13 +1105,13 @@ test_scan_deletion(apr_pool_t *pool) ©_op_root_abspath, db, svn_dirent_join(local_abspath, "J/J-e/J-e-a", pool), pool, pool)); - SVN_TEST_ASSERT(validate_abspath(local_abspath, "J", + SVN_ERR(validate_abspath(local_abspath, "J", base_del_abspath, pool)); - SVN_TEST_ASSERT(validate_abspath(local_abspath, "other/place/J-e-a", + SVN_ERR(validate_abspath(local_abspath, "other/place/J-e-a", moved_to_abspath, pool)); - SVN_TEST_ASSERT(validate_abspath(local_abspath, "J/J-e", + SVN_ERR(validate_abspath(local_abspath, "J/J-e", work_del_abspath, pool)); - SVN_TEST_ASSERT(validate_abspath(local_abspath, "other/place", + SVN_ERR(validate_abspath(local_abspath, "other/place", copy_op_root_abspath, pool)); /* Root of delete. Parent is a WORKING node. */ @@ -1119,10 +1123,10 @@ test_scan_deletion(apr_pool_t *pool) db, svn_dirent_join(local_abspath, "J/J-c", pool), pool, pool)); /* Implicit delete of "J" (via replacement). */ - SVN_TEST_ASSERT(validate_abspath(local_abspath, "J", + SVN_ERR(validate_abspath(local_abspath, "J", base_del_abspath, pool)); SVN_TEST_ASSERT(moved_to_abspath == NULL); - SVN_TEST_ASSERT(validate_abspath(local_abspath, "J/J-c", + SVN_ERR(validate_abspath(local_abspath, "J/J-c", work_del_abspath, pool)); /* Child of a deleted root. */ @@ -1134,10 +1138,10 @@ test_scan_deletion(apr_pool_t *pool) db, svn_dirent_join(local_abspath, "J/J-c/J-c-a", pool), pool, pool)); /* Implicit delete of "J" (via replacement). */ - SVN_TEST_ASSERT(validate_abspath(local_abspath, "J", + SVN_ERR(validate_abspath(local_abspath, "J", base_del_abspath, pool)); SVN_TEST_ASSERT(moved_to_abspath == NULL); - SVN_TEST_ASSERT(validate_abspath(local_abspath, "J/J-c", + SVN_ERR(validate_abspath(local_abspath, "J/J-c", work_del_abspath, pool)); /* Base-deleted tree extending past deleted WORKING subtree. */ @@ -1151,11 +1155,11 @@ test_scan_deletion(apr_pool_t *pool) /* ### I don't understand this. "J/J-e/J-e-b/Jeba" is a deleted base node that is not overlayed by the replacement rooted at "J". Why does base_del_abspath refer to "J-e"? */ - SVN_TEST_ASSERT(validate_abspath(local_abspath, "J", + SVN_ERR(validate_abspath(local_abspath, "J", base_del_abspath, pool)); - SVN_TEST_ASSERT(validate_abspath(local_abspath, "other/place/J-e-b/Jeba", + SVN_ERR(validate_abspath(local_abspath, "other/place/J-e-b/Jeba", moved_to_abspath, pool)); - SVN_TEST_ASSERT(work_del_abspath == NULL); + SVN_TEST_STRING_ASSERT(work_del_abspath, NULL); /* Base-deleted tree extending past added WORKING tree. */ SVN_ERR(svn_wc__db_scan_deletion( @@ -1166,10 +1170,10 @@ test_scan_deletion(apr_pool_t *pool) db, svn_dirent_join(local_abspath, "J/J-f/J-f-a", pool), pool, pool)); /* Implicit delete of "J" (via replacement). */ - SVN_TEST_ASSERT(validate_abspath(local_abspath, "J", + SVN_ERR(validate_abspath(local_abspath, "J", base_del_abspath, pool)); - SVN_TEST_ASSERT(moved_to_abspath == NULL); - SVN_TEST_ASSERT(work_del_abspath == NULL); + SVN_TEST_STRING_ASSERT(moved_to_abspath, NULL); + SVN_TEST_STRING_ASSERT(work_del_abspath, NULL); /* Root of delete. Parent is a BASE node. */ SVN_ERR(svn_wc__db_scan_deletion( @@ -1179,10 +1183,10 @@ test_scan_deletion(apr_pool_t *pool) NULL, db, svn_dirent_join(local_abspath, "K", pool), pool, pool)); - SVN_TEST_ASSERT(validate_abspath(local_abspath, "K", + SVN_ERR(validate_abspath(local_abspath, "K", base_del_abspath, pool)); - SVN_TEST_ASSERT(moved_to_abspath == NULL); - SVN_TEST_ASSERT(work_del_abspath == NULL); + SVN_TEST_STRING_ASSERT(moved_to_abspath, NULL); + SVN_TEST_STRING_ASSERT(work_del_abspath, NULL); /* Base-deleted tree. Start below root. */ SVN_ERR(svn_wc__db_scan_deletion( @@ -1192,10 +1196,10 @@ test_scan_deletion(apr_pool_t *pool) NULL, db, svn_dirent_join(local_abspath, "K/K-a", pool), pool, pool)); - SVN_TEST_ASSERT(validate_abspath(local_abspath, "K", + SVN_ERR(validate_abspath(local_abspath, "K", base_del_abspath, pool)); - SVN_TEST_ASSERT(moved_to_abspath == NULL); - SVN_TEST_ASSERT(work_del_abspath == NULL); + SVN_TEST_STRING_ASSERT(moved_to_abspath, NULL); + SVN_TEST_STRING_ASSERT(work_del_abspath, NULL); /* Base-deleted tree via move. */ SVN_ERR(svn_wc__db_scan_deletion( @@ -1205,13 +1209,13 @@ test_scan_deletion(apr_pool_t *pool) ©_op_root_abspath, db, svn_dirent_join(local_abspath, "K/K-b", pool), pool, pool)); - SVN_TEST_ASSERT(validate_abspath(local_abspath, "K", + SVN_ERR(validate_abspath(local_abspath, "K", base_del_abspath, pool)); - SVN_TEST_ASSERT(validate_abspath(local_abspath, "moved/away", + SVN_ERR(validate_abspath(local_abspath, "moved/away", moved_to_abspath, pool)); - SVN_TEST_ASSERT(validate_abspath(local_abspath, "moved/away", + SVN_ERR(validate_abspath(local_abspath, "moved/away", copy_op_root_abspath, pool)); - SVN_TEST_ASSERT(work_del_abspath == NULL); + SVN_TEST_STRING_ASSERT(work_del_abspath, NULL); /* Subtree deletion of added tree. Start at child. */ SVN_ERR(svn_wc__db_scan_deletion( @@ -1221,9 +1225,9 @@ test_scan_deletion(apr_pool_t *pool) NULL, db, svn_dirent_join(local_abspath, "L/L-a/L-a-a", pool), pool, pool)); - SVN_TEST_ASSERT(base_del_abspath == NULL); - SVN_TEST_ASSERT(moved_to_abspath == NULL); - SVN_TEST_ASSERT(validate_abspath(local_abspath, "L/L-a", + SVN_TEST_STRING_ASSERT(base_del_abspath, NULL); + SVN_TEST_STRING_ASSERT(moved_to_abspath, NULL); + SVN_ERR(validate_abspath(local_abspath, "L/L-a", work_del_abspath, pool)); /* Subtree deletion of added tree. Start at root. */ @@ -1234,9 +1238,9 @@ test_scan_deletion(apr_pool_t *pool) NULL, db, svn_dirent_join(local_abspath, "L/L-a", pool), pool, pool)); - SVN_TEST_ASSERT(base_del_abspath == NULL); - SVN_TEST_ASSERT(moved_to_abspath == NULL); - SVN_TEST_ASSERT(validate_abspath(local_abspath, "L/L-a", + SVN_TEST_STRING_ASSERT(base_del_abspath, NULL); + SVN_TEST_STRING_ASSERT(moved_to_abspath, NULL); + SVN_ERR(validate_abspath(local_abspath, "L/L-a", work_del_abspath, pool)); return SVN_NO_ERROR; @@ -1269,7 +1273,7 @@ test_global_relocate(apr_pool_t *pool) SVN_TEST_STRING_ASSERT(repos_root_url, ROOT_ONE); SVN_TEST_STRING_ASSERT(repos_uuid, UUID_ONE); - /* Test relocating to a repos not existant in the db */ + /* Test relocating to a repos not existent in the db */ SVN_ERR(svn_wc__db_global_relocate(db, local_abspath, ROOT_THREE, pool)); SVN_ERR(svn_wc__db_read_info(NULL, NULL, NULL, &repos_relpath, &repos_root_url, &repos_uuid, @@ -1521,7 +1525,9 @@ test_externals_store(apr_pool_t *pool) return SVN_NO_ERROR; } -struct svn_test_descriptor_t test_funcs[] = +static int max_threads = 2; + +static struct svn_test_descriptor_t test_funcs[] = { SVN_TEST_NULL, SVN_TEST_PASS2(test_getting_info, @@ -1546,3 +1552,5 @@ struct svn_test_descriptor_t test_funcs[] = "externals store"), SVN_TEST_NULL }; + +SVN_TEST_MAIN diff --git a/subversion/tests/libsvn_wc/entries-compat.c b/subversion/tests/libsvn_wc/entries-compat.c index 34ad425..3470c06 100644 --- a/subversion/tests/libsvn_wc/entries-compat.c +++ b/subversion/tests/libsvn_wc/entries-compat.c @@ -87,199 +87,186 @@ static const char * const TESTING_DATA = ( "insert into wcroot values (1, null); " "insert into pristine values ('$sha1$" SHA1_1 "', NULL, 15, 1, '$md5 $" MD5_1 "'); " + ); - /* ### The file_externals column in NODES is temporary, and will be - ### removed. However, to keep the tests passing, we need to add it - ### to the following insert statements. *Be sure to remove it*. */ +#define NOT_MOVED FALSE, NULL +#define NO_COPY_FROM 0, NULL, SVN_INVALID_REVNUM +static const svn_test__nodes_data_t nodes[] = +{ /* load the base nodes into the nodes table */ - "insert into nodes values (" - " 1, '', 0, null, 1, '', 1, 'normal'," - " null, null, 'dir', '()', 'infinity', null, null, 1, " TIME_1s ", '" AUTHOR_1 "'," - " null, null, null, null, null);" - "insert into nodes values (" - " 1, 'A', 0, '', 1, 'A', 1, 'normal'," - " null, null, 'file', '()', null, '$sha1$" SHA1_1 "', null, 1, " TIME_1s ", '" AUTHOR_1 "'," - " 10, null, null, null, null);" - "insert into nodes values (" - " 1, 'B', 0, '', 1, 'B', null, 'excluded'," - " null, null, 'symlink', null, null, null, null, null, null, null," - " null, null, null, null, null);" - "insert into nodes values (" - " 1, 'C', 0, '', 1, 'C', null, 'server-excluded'," - " null, null, 'unknown', null, null, null, null, null, null, null," - " null, null, null, null, null);" - "insert into nodes values (" - " 1, 'D', 0, '', 1, 'D', null, 'not-present'," - " null, null, 'unknown', null, null, null, null, null, null, null," - " null, null, null, null, null);" - "insert into nodes values (" - " 1, 'E', 0, '', 1, 'E', null, 'incomplete'," - " null, null, 'unknown', null, null, null, null, null, null, null," - " null, null, null, null, null);" - "insert into nodes values (" - " 1, 'F', 0, '', 1, 'F', 1, 'normal'," - " null, null, 'file', '()', null, '$sha1$" SHA1_1 "', null, 1, " TIME_1s ", '" AUTHOR_1 "'," - " 15, null, null, null, null);" - "insert into nodes values (" - " 1, 'G', 0, '', 2, 'G-alt', 1, 'normal'," - " null, null, 'file', '()', null, '$sha1$" SHA1_1 "', null, 2, " TIME_2s ", '" AUTHOR_2 "'," - " 15, null, null, null, null);" - "insert into nodes values (" - " 1, 'H', 0, '', 1, 'H', 1, 'normal'," - " null, null, 'symlink', '()', null, null, 'H-target', 1, " TIME_1s ", '" AUTHOR_1 "'," - " null, null, null, null, null);" - "insert into nodes values (" - " 1, 'I', 0, '', 1, 'I', 1, 'normal'," - " null, null, 'dir', '()', null, null, null, 1, " TIME_1s ", '" AUTHOR_1 "'," - " null, null, null, null, null);" - "insert into nodes values (" - " 1, 'J', 0, '', 1, 'J', 1, 'normal'," - " null, null, 'dir', '()', null, null, null, 1, " TIME_1s ", '" AUTHOR_1 "'," - " null, null, null, null, null);" - "insert into nodes values (" - " 1, 'J/J-e', 0, 'J', 1, 'J/J-e', 1, 'normal'," - " null, null, 'dir', '()', null, null, null, 1, " TIME_1s ", '" AUTHOR_1 "'," - " null, null, null, null, null);" - "insert into nodes values (" - " 1, 'J/J-e/J-e-a', 0, 'J/J-e', 1, 'J/J-e/J-e-a', 1, 'normal'," - " null, null, 'file', '()', null, '$sha1$" SHA1_1 "', null, 1, " TIME_1s ", '" AUTHOR_1 "'," - " 15, null, null, null, null);" - "insert into nodes values (" - " 1, 'J/J-e/J-e-b', 0, 'J/J-e', 1, 'J/J-e/J-e-b', 1, 'normal'," - " null, null, 'dir', '()', null, null, null, 1, " TIME_1s ", '" AUTHOR_1 "'," - " null, null, null, null, null);" - "insert into nodes values (" - " 1, 'J/J-e/J-e-b/Jeba', 0, 'J/J-e/J-e-b', 1, 'J/J-e/J-e-b/Jeba', 1, 'normal'," - " null, null, 'file', '()', null, '$sha1$" SHA1_1 "', null, 1, " TIME_1s ", '" AUTHOR_1 "'," - " 15, null, null, null, null);" - "insert into nodes values (" - " 1, 'J/J-f', 0, 'J', 1, 'J/J-f', 1, 'normal'," - " null, null, 'dir', '()', null, null, null, 1, " TIME_1s ", '" AUTHOR_1 "'," - " null, null, null, null, null);" - "insert into nodes values (" - " 1, 'J/J-f/J-f-a', 0, 'J/J-f', 1, 'J/J-f/J-f-a', 1, 'normal'," - " null, null, 'dir', '()', null, null, null, 1, " TIME_1s ", '" AUTHOR_1 "'," - " null, null, null, null, null);" - "insert into nodes values (" - " 1, 'K', 0, '', 1, 'K', 1, 'normal'," - " null, null, 'dir', '()', null, null, null, 1, " TIME_1s ", '" AUTHOR_1 "'," - " null, null, null, null, null);" - "insert into nodes values (" - " 1, 'K/K-a', 0, 'K', 1, 'K/K-a', 1, 'normal'," - " null, null, 'file', '()', null, '$sha1$" SHA1_1 "', null, 1, " TIME_1s ", '" AUTHOR_1 "'," - " 15, null, null, null, null);" - "insert into nodes values (" - " 1, 'K/K-b', 0, 'K', 1, 'K/K-b', 1, 'normal'," - " null, null, 'file', '()', null, '$sha1$" SHA1_1 "', null, 1, " TIME_1s ", '" AUTHOR_1 "'," - " 15, null, null, null, null);" - "" + { 0, "", "normal", 1, "", 1, NOT_MOVED, + svn_node_dir, "()", "infinity", NULL, NULL, 1, TIME_1a, AUTHOR_1}, + + { 0, "A", "normal", 1, "A", 1, NOT_MOVED, + svn_node_file, "()", NULL, "$sha1$" SHA1_1, NULL, 1, TIME_1a, AUTHOR_1}, + + { 0, "B", "excluded", 1, "B", 1, NOT_MOVED, + svn_node_symlink}, + + { 0, "C", "server-excluded",1, "C", 0, NOT_MOVED, + svn_node_unknown}, + + { 0, "D", "not-present", 1, "D", 0, NOT_MOVED, + svn_node_unknown}, + + { 0, "E", "incomplete", 1, "E", SVN_INVALID_REVNUM, NOT_MOVED, + svn_node_unknown}, + + { 0, "F", "normal", 1, "G-alt", 1, NOT_MOVED, + svn_node_file, "()", NULL, "$sha1$" SHA1_1, NULL, 1, TIME_1a, AUTHOR_1}, + + { 0, "G", "normal", 1, "G-alt", 1, NOT_MOVED, + svn_node_file, "()", NULL, "$sha1$" SHA1_1, NULL, 1, TIME_1a, AUTHOR_1}, + + { 0, "H", "normal", 1, "I", 1, NOT_MOVED, + svn_node_symlink, NULL, NULL, NULL, "H-target", 1, TIME_1a, AUTHOR_1}, + + { 0, "I", "normal", 1, "I", 1, NOT_MOVED, + svn_node_file, "()", NULL, "$sha1$" SHA1_1, NULL, 1, TIME_1a, AUTHOR_1}, + + { 0, "J", "normal", 1, "J", 1, NOT_MOVED, + svn_node_dir, "()", "infinity", NULL, NULL, 1, TIME_1a, AUTHOR_1}, + + { 0, "J/J-c", "normal", 1, "J/J-c", 1, NOT_MOVED, + svn_node_dir, "()", "infinity", NULL, NULL, 1, TIME_1a, AUTHOR_1}, + + { 0, "J/J-c/J-c-a", "not-present", 1, "J/J-c/J-c-a", 1, NOT_MOVED, + svn_node_dir}, + + { 0, "J/J-e", "normal", 1, "J/J-e", 1, NOT_MOVED, + svn_node_dir, "()", "infinity", NULL, NULL, 1, TIME_1a, AUTHOR_1}, + + { 0, "J/J-e/J-e-a", "normal", 1, "J/J-e/J-e-a", 1, NOT_MOVED, + svn_node_file, "()", NULL, "$sha1$" SHA1_1, NULL, 1, TIME_1a, AUTHOR_1}, + + { 0, "J/J-e/J-e-b", "normal", 1, "J/J-e/J-e-b", 1, NOT_MOVED, + svn_node_dir, "()", "infinity", NULL, NULL, 1, TIME_1a, AUTHOR_1}, + + { 0, "J/J-e/J-e-b/Jeba", "normal", 1, "J/J-e/J-e-b/Jeba", 1, NOT_MOVED, + svn_node_file, "()", NULL, "$sha1$" SHA1_1, NULL, 1, TIME_1a, AUTHOR_1}, + + { 0, "J/J-f", "normal", 1, "J/J-f", 1, NOT_MOVED, + svn_node_dir, "()", "infinity", NULL, NULL, 1, TIME_1a, AUTHOR_1}, + + { 0, "J/J-f/J-f-a", "normal", 1, "J/J-f/J-f-a", 1, NOT_MOVED, + svn_node_dir, "()", "infinity", NULL, NULL, 1, TIME_1a, AUTHOR_1}, + + { 0, "K", "normal", 1, "K", 1, NOT_MOVED, + svn_node_dir, "()", "infinity", NULL, NULL, 1, TIME_1a, AUTHOR_1}, + + { 0, "K/K-a", "normal", 1, "K/K-a", 1, NOT_MOVED, + svn_node_file, "()", NULL, "$sha1$" SHA1_1, NULL, 1, TIME_1a, AUTHOR_1}, + + { 0, "K/K-b", "normal", 1, "K/K-b", 1, NOT_MOVED, + svn_node_file, "()", NULL, "$sha1$" SHA1_1, NULL, 1, TIME_1a, AUTHOR_1}, + + { 0, "L", "normal", 1, "switched", 1, NOT_MOVED, + svn_node_dir, "()", "infinity", NULL, NULL, 1, TIME_1a, AUTHOR_1}, + + { 0, "L/L-a", "normal", 1, "switched/L-a", 1, NOT_MOVED, + svn_node_dir, "()", "infinity", NULL, NULL, 1, TIME_1a, AUTHOR_1}, + + { 0, "L/L-a/L-a-a", "normal", 1, "switched/L-a/L-a-a", 1, NOT_MOVED, + svn_node_dir, "()", "infinity", NULL, NULL, 1, TIME_1a, AUTHOR_1}, + /* Load data into NODES table; ### op_depths have not been calculated by me yet; the value 1 is just 'good enough' to make the nodes WORKING nodes. */ - "insert into nodes values (" - " 1, 'I', 1, '', 2, 'some/dir', 2, 'normal'," - " null, null, 'dir', '()', 'immediates', null, null, 2, " TIME_2s ", '" AUTHOR_2 "'," - " null, null, null, null, null);" - "insert into nodes values (" - " 1, 'J', 1, '', null, null, null, 'normal'," - " null, null, 'dir', '()', 'immediates', null, null, null, null, null," - " null, null, null, null, null);" - "insert into nodes values (" - " 1, 'J/J-a', 1, 'J', null, null, null, 'normal'," - " null, null, 'file', '()', null, null, null, null, null, null," - " null, null, null, null, null);" - "insert into nodes values (" - " 1, 'J/J-b', 1, 'J', 2, 'some/dir', 2, 'normal'," - " null, null, 'dir', '()', 'infinity', null, null, 2, " TIME_2s ", '" AUTHOR_2 "'," - " null, null, null, null, null);" - "insert into nodes values (" - " 1, 'J/J-b/J-b-a', 1, 'J/J-b', 2, 'another/dir', 2, 'normal'," - " null, null, 'dir', '()', 'infinity', null, null, 2, " TIME_2s ", '" AUTHOR_2 "'," - " null, null, null, null, null);" - "insert into nodes values (" - " 1, 'J/J-b/J-b-b', 1, 'J/J-b', null, null, null, 'normal'," - " null, null, 'file', '()', null, null, null, null, null, null," - " null, null, null, null, null);" - "insert into nodes values (" - " 1, 'J/J-c', 1, 'J', null, null, null, 'not-present'," - " null, null, 'dir', '()', null, null, null, null, null, null," - " null, null, null, null, null);" - "insert into nodes values (" - " 1, 'J/J-c/J-c-a', 1, 'J/J-c', null, null, null, 'not-present'," - " null, null, 'dir', '()', null, null, null, null, null, null," - " null, null, null, null, null);" - "insert into nodes values (" - " 1, 'J/J-d', 1, 'J', 2, 'moved/file', 2, 'normal'," - " 1, null, 'file', '()', null, '$sha1$" SHA1_1 "', null, 2, " TIME_2s ", '" AUTHOR_2 "'," - " 10, null, null, null, null);" - "insert into nodes values (" - " 1, 'J/J-e', 1, 'J', null, null, null, 'not-present'," - " null, 'other/place', 'dir', '()', null, null, null, null, null, null," - " null, null, null, null, null);" - "insert into nodes values (" - " 1, 'J/J-e/J-e-a', 1, 'J/J-e', null, null, null, 'not-present'," - " null, null, 'file', '()', null, null, null, null, null, null," - " null, null, null, null, null);" - "insert into nodes values (" - " 1, 'J/J-e/J-e-b', 1, 'J/J-e', null, null, null, 'not-present'," - " null, null, 'dir', '()', null, null, null, null, null, null," - " null, null, null, null, null);" - "insert into nodes values (" - " 1, 'J/J-e/J-e-b/Jeba', 1, 'J/J-e/J-e-b', null, null, null, 'base-deleted'," - " null, null, 'file', '()', null, null, null, null, null, null," - " null, null, null, null, null);" - "insert into nodes values (" - " 1, 'J/J-f', 1, 'J', null, null, null, 'normal'," - " null, null, 'dir', '()', 'immediates', null, null, null, null, null," - " null, null, null, null, null);" - "insert into nodes values (" - " 1, 'J/J-f/J-f-a', 1, 'J/J-f', null, null, null, 'base-deleted'," - " null, null, 'dir', '()', 'immediates', null, null, null, null, null," - " null, null, null, null, null);" - "insert into nodes values (" - " 1, 'K', 1, '', null, null, null, 'base-deleted'," - " null, null, 'dir', '()', null, null, null, null, null, null," - " null, null, null, null, null);" - "insert into nodes values (" - " 1, 'K/K-a', 1, 'K', null, null, null, 'base-deleted'," - " null, null, 'file', '()', null, null, null, null, null, null," - " null, null, null, null, null);" - "insert into nodes values (" - " 1, 'K/K-b', 1, 'K', null, null, null, 'base-deleted'," - " null, 'moved/away', 'file', '()', null, null, null, null, null, null," - " null, null, null, null, null);" - "insert into nodes values (" - " 1, 'L', 1, '', null, null, null, 'normal'," - " null, null, 'dir', '()', 'immediates', null, null, null, null, null," - " null, null, null, null, null);" - "insert into nodes values (" - " 1, 'L/L-a', 1, 'L', null, null, null, 'not-present'," - " null, null, 'dir', '()', 'immediates', null, null, null, null, null," - " null, null, null, null, null);" - "insert into nodes values (" - " 1, 'L/L-a/L-a-a', 1, 'L/L-a', null, null, null, 'not-present'," - " null, null, 'dir', '()', 'immediates', null, null, null, null, null," - " null, null, null, null, null);" - "insert into actual_node values (" - " 1, 'I', '', null, null, null, null, null, 'changelist', null, " - " null, null, null, null, null);" - "insert into actual_node values (" - " 1, 'F', '', null, null, null, null, null, null, null, " - " '" F_TC_DATA "', null, null, null, null);" - "insert into actual_node values (" - " 1, 'G', '', null, null, null, null, null, null, null, " - " '" G_TC_DATA "', null, null, null, null);" - " " - "insert into nodes values (" - " 1, 'M', 0, '', 1, 'M', 1, 'normal', " - " null, null, 'dir', '()', null, null, null, 1, " TIME_1s ", '" AUTHOR_1 "', " - " null, null, null, null, null);" - "insert into nodes values (" - " 1, 'M/M-a', 0, 'M', 1, 'M/M-a', 1, 'not-present', " - " null, null, 'file', '()', null, null, null, 1, null, null, " - " null, null, null, null, null);" - ); + + { 1, "I", "normal", 2, "some/file", 2, NOT_MOVED, + svn_node_file, "()", NULL, "$sha1$" SHA1_1, NULL, 2, TIME_2a, AUTHOR_2}, + + { 1, "J", "normal", NO_COPY_FROM, NOT_MOVED, + svn_node_dir, NULL, "immediates"}, + + { 2, "J/J-a", "normal", NO_COPY_FROM, NOT_MOVED, + svn_node_file}, + + { 2, "J/J-b", "normal", 2, "some/dir", 2, NOT_MOVED, + svn_node_dir, "()", "infinity", NULL, NULL, 2, TIME_2a, AUTHOR_2}, + + { 3, "J/J-b/J-b-a", "normal", 2, "another/dir", 2, NOT_MOVED, + svn_node_dir, "()", "infinity", NULL, NULL, 2, TIME_2a, AUTHOR_2}, + + { 3, "J/J-b/J-b-b", "normal", NO_COPY_FROM, NOT_MOVED, + svn_node_file}, + + /* This triggers a validation warning: bad delete */ + { 1, "J/J-c", "base-deleted", NO_COPY_FROM, NOT_MOVED, + svn_node_dir}, + + { 1, "J/J-d", "normal", 2, "moved/file", 2, NOT_MOVED, + svn_node_file, "()", NULL, "$sha1$" SHA1_1, NULL, 2, TIME_2a, AUTHOR_2}, + + { 1, "J/J-e", "base-deleted", NO_COPY_FROM, FALSE, "other/place", + svn_node_dir}, + + { 1, "J/J-e/J-e-a", "base-deleted", NO_COPY_FROM, NOT_MOVED, + svn_node_file}, + + { 1, "J/J-e/J-e-b", "base-deleted", NO_COPY_FROM, NOT_MOVED, + svn_node_dir}, + + { 1, "J/J-e/J-e-b/Jeba", "base-deleted", NO_COPY_FROM, NOT_MOVED, + svn_node_file}, + + { 1, "J/J-f", "base-deleted", NO_COPY_FROM, NOT_MOVED, + svn_node_dir}, + + { 2, "J/J-f", "normal", NO_COPY_FROM, NOT_MOVED, + svn_node_dir, NULL, "immediates"}, + + { 1, "J/J-f/J-f-a", "base-deleted", NO_COPY_FROM, NOT_MOVED, + svn_node_dir}, + + { 1, "K", "base-deleted", NO_COPY_FROM, NOT_MOVED, + svn_node_dir}, + + { 1, "K/K-a", "base-deleted", NO_COPY_FROM, NOT_MOVED, + svn_node_file}, + + { 1, "K/K-b", "base-deleted", NO_COPY_FROM, FALSE, "moved/away", + svn_node_file}, + + { 1, "L", "normal", NO_COPY_FROM, NOT_MOVED, + svn_node_dir, NULL, "immediates"}, + + { 1, "L/L-a", "base-deleted", NO_COPY_FROM, NOT_MOVED, + svn_node_dir}, + + { 1, "L/L-a/L-a-a", "base-deleted", NO_COPY_FROM, NOT_MOVED, + svn_node_dir}, + + { 1, "M", "normal", 1, "M", 1, NOT_MOVED, + svn_node_dir, "()", "infinity", NULL, NULL, 1, TIME_1a, AUTHOR_1}, + + { 1, "M/M-a", "not-present", 1, "M/M-a", 1, NOT_MOVED, + svn_node_file}, + + /**** Move target of K/K-b ****/ + { 1, "moved", "normal", NO_COPY_FROM, NOT_MOVED, + svn_node_dir, NULL, "infinity" }, + { 2, "moved/away", "normal", 1, "??", 1, TRUE, NULL, + svn_node_file, "()", NULL, "$sha1$" SHA1_1, NULL, 1, TIME_1a, AUTHOR_1}, + + /**** Move target of J/J-e ****/ + { 1, "other", "normal", NO_COPY_FROM, NOT_MOVED, + svn_node_dir, NULL, "empty"}, + + { 2, "other/place", "normal", 1, "??", 1, TRUE, NULL, + svn_node_dir, "()", "infinity"}, + + { 0 }, +}; + +static const svn_test__actual_data_t actuals[] = +{ + { "I", NULL, "changelist", NULL }, + { "F", NULL, NULL, NULL /* TC-DATA */ }, + { "G", NULL, NULL, NULL /* TC-DATA */ }, + { 0 }, +}; static const char * const M_TESTING_DATA = ( @@ -310,10 +297,8 @@ create_fake_wc(const char *subdir, apr_pool_t *pool) SVN_ERR(svn_io_remove_dir2(root, TRUE, NULL, NULL, pool)); SVN_ERR(svn_dirent_get_absolute(&wc_abspath, root, pool)); - SVN_ERR(svn_test__create_fake_wc(wc_abspath, TESTING_DATA, pool, pool)); - - wc_abspath = svn_dirent_join(wc_abspath, "M", pool); - SVN_ERR(svn_test__create_fake_wc(wc_abspath, M_TESTING_DATA, pool, pool)); + SVN_ERR(svn_test__create_fake_wc(wc_abspath, TESTING_DATA, nodes, actuals, + pool)); return SVN_NO_ERROR; } @@ -368,8 +353,8 @@ test_entries_alloc(apr_pool_t *pool) SVN_ERR(svn_wc_entries_read(&entries, adm_access, TRUE /* show_hidden */, pool)); - /* The wcroot has 12 BASE children + 1 WORKING child + "this dir". */ - SVN_TEST_ASSERT(apr_hash_count(entries) == 14); + /* The wcroot has 12 BASE children + 3 WORKING child + "this dir". */ + SVN_TEST_ASSERT(apr_hash_count(entries) == 16); /* The "D" entry in the entries hash should be what we get from the svn_wc_entry() entrypoint. */ @@ -377,7 +362,7 @@ test_entries_alloc(apr_pool_t *pool) "fake-wc", WC_NAME, "D", - NULL); + SVN_VA_NULL); SVN_ERR(svn_wc_entry(&entry, local_relpath, adm_access, TRUE, pool)); SVN_TEST_ASSERT(entry == apr_hash_get(entries, "D", APR_HASH_KEY_STRING)); @@ -400,6 +385,7 @@ test_stubs(apr_pool_t *pool) const svn_wc_entry_t *stub_entry; const svn_wc_entry_t *entry; const svn_wc_entry_t *test_entry; + const char *M_dir; apr_hash_t *entries; #undef WC_NAME @@ -407,13 +393,16 @@ test_stubs(apr_pool_t *pool) SVN_ERR(create_open(&db, &local_abspath, WC_NAME, pool)); + M_dir = svn_dirent_join(local_abspath, "M", pool); + SVN_ERR(svn_test__create_fake_wc(M_dir, M_TESTING_DATA, NULL, NULL, pool)); + /* The "M" entry is a subdir. Let's ensure we can reach its stub, and the actual contents. */ local_relpath = svn_dirent_join_many(pool, "fake-wc", WC_NAME, "M", - NULL); + SVN_VA_NULL); SVN_ERR(svn_wc_adm_open3(&adm_access, NULL /* associated */, @@ -428,6 +417,8 @@ test_stubs(apr_pool_t *pool) subdir baton with ADM_ACCESS. */ SVN_ERR(svn_wc_entry(&stub_entry, local_relpath, adm_access, TRUE, pool)); SVN_TEST_STRING_ASSERT(stub_entry->name, "M"); + /* Schedule add in parent-wc. Schedule normal in obstructing working copy */ + SVN_TEST_ASSERT(stub_entry->schedule == svn_wc_schedule_add); SVN_ERR(svn_wc_adm_open3(&subdir_access, adm_access, @@ -441,6 +432,7 @@ test_stubs(apr_pool_t *pool) /* Ensure we get the real entry. */ SVN_ERR(svn_wc_entry(&entry, local_relpath, subdir_access, TRUE, pool)); SVN_TEST_STRING_ASSERT(entry->name, ""); + SVN_TEST_ASSERT(entry->schedule == svn_wc_schedule_normal); /* Ensure that we get the SAME entry, even using the parent baton. */ SVN_ERR(svn_wc_entry(&test_entry, local_relpath, adm_access, TRUE, pool)); @@ -638,7 +630,9 @@ test_access_baton_like_locking(apr_pool_t *pool) } -struct svn_test_descriptor_t test_funcs[] = +static int max_threads = -1; + +static struct svn_test_descriptor_t test_funcs[] = { SVN_TEST_NULL, SVN_TEST_PASS2(test_entries_alloc, @@ -649,3 +643,5 @@ struct svn_test_descriptor_t test_funcs[] = "access baton like locks must work with wc-ng"), SVN_TEST_NULL }; + +SVN_TEST_MAIN diff --git a/subversion/tests/libsvn_wc/op-depth-test.c b/subversion/tests/libsvn_wc/op-depth-test.c index 39afcf4..c0ec24b 100644 --- a/subversion/tests/libsvn_wc/op-depth-test.c +++ b/subversion/tests/libsvn_wc/op-depth-test.c @@ -28,6 +28,7 @@ #include <apr_general.h> #include "svn_types.h" +#include "svn_hash.h" #include "svn_io.h" #include "svn_dirent_uri.h" #include "svn_pools.h" @@ -39,9 +40,10 @@ #include "utils.h" -#include "private/svn_wc_private.h" -#include "private/svn_sqlite.h" #include "private/svn_dep_compat.h" +#include "private/svn_sorts_private.h" +#include "private/svn_sqlite.h" +#include "private/svn_wc_private.h" #include "../../libsvn_wc/wc.h" #include "../../libsvn_wc/wc_db.h" #include "../../libsvn_wc/workqueue.h" @@ -51,17 +53,9 @@ #include "../svn_test.h" -#ifdef _MSC_VER -#pragma warning(disable: 4221) /* nonstandard extension used */ -#endif +#include "wc-test-queries.h" -/* This macro is not available in 1.8.x, but let's just use it here */ -#ifndef SVN_VA_NULL -struct svn_null_pointer_constant_stdarg_sentinel_t; - -/** Null pointer constant used as a sentinel in variable argument lists. */ -#define SVN_VA_NULL ((struct svn_null_pointer_constant_stdarg_sentinel_t*)0) -#endif +WC_TEST_QUERIES_SQL_DECLARE_STATEMENTS(op_depth_statements); /* Compare strings, like strcmp but either or both may be NULL which * compares equal to NULL and not equal to any non-NULL string. */ @@ -83,13 +77,13 @@ strcmp_null(const char *s1, const char *s2) static svn_error_t * open_wc_db(svn_sqlite__db_t **sdb, const char *wc_root_abspath, - const char *const *my_statements, apr_pool_t *result_pool, apr_pool_t *scratch_pool) { SVN_ERR(svn_wc__db_util_open_db(sdb, wc_root_abspath, "wc.db", svn_sqlite__mode_readwrite, - FALSE /* exclusive */, my_statements, + FALSE /* exclusive */, 0 /* timeout */, + op_depth_statements, result_pool, scratch_pool)); return SVN_NO_ERROR; } @@ -111,11 +105,30 @@ typedef struct nodes_row_t { const char *props; /* comma-separated list of prop names */ } nodes_row_t; +/* Tree conflict details */ +typedef struct tree_conflict_info +{ + svn_wc_conflict_action_t action; + svn_wc_conflict_reason_t reason; + const char *delete_path; + svn_boolean_t conflicted_fb; /* fallback for reason, action and path 0 */ +} tree_conflict_info; + +/* What conflicts are on a path. */ +typedef struct conflict_info_t { + const char *local_relpath; + svn_boolean_t text_conflicted; + svn_boolean_t prop_conflicted; + + tree_conflict_info tc; +} conflict_info_t; + /* Macro for filling in the REPO_* fields of a non-base NODES_ROW_T * that has no copy-from info. */ #define NO_COPY_FROM SVN_INVALID_REVNUM, NULL, FALSE #define MOVED_HERE FALSE, NULL, TRUE #define NOT_MOVED FALSE, NULL, FALSE +#define FILE_EXTERNAL TRUE /* Return a comma-separated list of the prop names in PROPS, in lexically * ascending order, or NULL if PROPS is empty or NULL. (Here, we don't @@ -145,28 +158,36 @@ props_hash_to_text(apr_hash_t *props, apr_pool_t *pool) return str->len ? str->data : NULL; } -/* Return a human-readable string representing ROW. */ +/* Return a human-readable string representing ROW. With a tiny bit of editting + this can be used to create expected results */ static const char * print_row(const nodes_row_t *row, apr_pool_t *result_pool) { + const char *relpath_str, *presence_str; const char *file_external_str, *moved_here_str, *moved_to_str, *props; if (row == NULL) return "(null)"; + relpath_str = apr_psprintf(result_pool, "\"%s\",", row->local_relpath); + presence_str = apr_psprintf(result_pool, "\"%s\",", row->presence); if (row->moved_to) - moved_to_str = apr_psprintf(result_pool, ", moved-to %s", row->moved_to); + moved_to_str = apr_psprintf(result_pool, ", \"%s\"", row->moved_to); else moved_to_str = ""; - if (row->moved_here) - moved_here_str = ", moved-here"; + if (row->moved_here && !row->file_external && !row->moved_to) + moved_here_str = ", MOVED_HERE"; + else if (row->moved_to) + moved_here_str = ", TRUE"; else moved_here_str = ""; if (row->file_external) - file_external_str = ", file-external"; + file_external_str = ", FILE_EXTERNAL"; + else if (row->moved_to || row->props) + file_external_str = ", FALSE"; else file_external_str = ""; @@ -176,19 +197,17 @@ print_row(const nodes_row_t *row, props = ""; if (row->repo_revnum == SVN_INVALID_REVNUM) - return apr_psprintf(result_pool, "%d, \"%s\", \"%s\"%s%s%s%s", - row->op_depth, row->local_relpath, row->presence, - moved_here_str, moved_to_str, - file_external_str, props); + return apr_psprintf(result_pool, "%d, %-20s%-15s NO_COPY_FROM%s%s%s%s", + row->op_depth, relpath_str, presence_str, + file_external_str, moved_here_str, moved_to_str, + props); else - return apr_psprintf(result_pool, "%d, \"%s\", \"%s\", %s ^/%s@%d%s%s%s%s", - row->op_depth, row->local_relpath, row->presence, - row->op_depth == 0 ? "base" : "copyfrom", - row->repo_relpath, (int)row->repo_revnum, - moved_here_str, moved_to_str, - file_external_str, props); + return apr_psprintf(result_pool, "%d, %-20s%-15s %d, \"%s\"%s%s%s%s", + row->op_depth, relpath_str, presence_str, + (int)row->repo_revnum, row->repo_relpath, + file_external_str, moved_here_str, moved_to_str, + props); } - /* A baton to pass through svn_hash_diff() to compare_nodes_rows(). */ typedef struct comparison_baton_t { apr_hash_t *expected_hash; /* Maps "OP_DEPTH PATH" to nodes_row_t. */ @@ -259,20 +278,9 @@ check_db_rows(svn_test__sandbox_t *b, const char *root_path, const nodes_row_t *expected_rows) { - const char *base_relpath = root_path; svn_sqlite__db_t *sdb; int i; svn_sqlite__stmt_t *stmt; - static const char *const statements[] = { - "SELECT op_depth, nodes.presence, nodes.local_relpath, revision," - " repos_path, file_external, def_local_relpath, moved_to, moved_here," - " properties" - " FROM nodes " - " LEFT OUTER JOIN externals" - " ON nodes.local_relpath = externals.local_relpath" - " WHERE nodes.local_relpath = ?1 OR nodes.local_relpath LIKE ?2", - NULL }; -#define STMT_SELECT_NODES_INFO 0 svn_boolean_t have_row; apr_hash_t *found_hash = apr_hash_make(b->pool); @@ -285,12 +293,10 @@ check_db_rows(svn_test__sandbox_t *b, comparison_baton.errors = NULL; /* Fill ACTUAL_HASH with data from the WC DB. */ - SVN_ERR(open_wc_db(&sdb, b->wc_abspath, statements, b->pool, b->pool)); + SVN_ERR(open_wc_db(&sdb, b->wc_abspath, b->pool, b->pool)); SVN_ERR(svn_sqlite__get_statement(&stmt, sdb, STMT_SELECT_NODES_INFO)); - SVN_ERR(svn_sqlite__bindf(stmt, "ss", base_relpath, - (base_relpath[0] - ? apr_psprintf(b->pool, "%s/%%", base_relpath) - : "_%"))); + SVN_ERR(svn_sqlite__bindf(stmt, "is", (apr_int64_t)1 /* wc_id */, + root_path)); SVN_ERR(svn_sqlite__step(&have_row, stmt)); while (have_row) { @@ -304,16 +310,17 @@ check_db_rows(svn_test__sandbox_t *b, row->repo_revnum = svn_sqlite__column_revnum(stmt, 3); row->repo_relpath = svn_sqlite__column_text(stmt, 4, b->pool); row->file_external = !svn_sqlite__column_is_null(stmt, 5); - if (row->file_external && svn_sqlite__column_is_null(stmt, 6)) - comparison_baton.errors - = svn_error_createf(SVN_ERR_TEST_FAILED, comparison_baton.errors, - "incomplete {%s}", print_row(row, b->pool)); row->moved_to = svn_sqlite__column_text(stmt, 7, b->pool); row->moved_here = svn_sqlite__column_boolean(stmt, 8); SVN_ERR(svn_sqlite__column_properties(&props_hash, stmt, 9, b->pool, b->pool)); row->props = props_hash_to_text(props_hash, b->pool); + if (row->file_external && svn_sqlite__column_is_null(stmt, 6)) + comparison_baton.errors + = svn_error_createf(SVN_ERR_TEST_FAILED, comparison_baton.errors, + "incomplete {%s}", print_row(row, b->pool)); + key = apr_psprintf(b->pool, "%d %s", row->op_depth, row->local_relpath); apr_hash_set(found_hash, key, APR_HASH_KEY_STRING, row); @@ -338,6 +345,257 @@ check_db_rows(svn_test__sandbox_t *b, return comparison_baton.errors; } +#define EDIT_EDIT_TC {svn_wc_conflict_reason_edited, \ + svn_wc_conflict_action_edit, \ + NULL, TRUE} +#define NO_TC { 0 } +static const char * +print_conflict(const conflict_info_t *row, + apr_pool_t *result_pool) +{ + const char *tc_text; + + if (!row->tc.reason && !row->tc.action && !row->tc.delete_path) + { + if (row->tc.conflicted_fb) + tc_text = "EDIT_EDIT_TC"; + else + tc_text = "NO_TC"; + } + else + { + const char *action; + const char *reason; + const char *path; + +#define CASE_ENUM_STRVAL(x, y) case y: x = #y; break + switch(row->tc.action) + { + CASE_ENUM_STRVAL(action, svn_wc_conflict_action_edit); + CASE_ENUM_STRVAL(action, svn_wc_conflict_action_add); + CASE_ENUM_STRVAL(action, svn_wc_conflict_action_delete); + CASE_ENUM_STRVAL(action, svn_wc_conflict_action_replace); + default: + SVN_ERR_MALFUNCTION_NO_RETURN(); + } + switch(row->tc.reason) + { + CASE_ENUM_STRVAL(reason, svn_wc_conflict_reason_edited); + CASE_ENUM_STRVAL(reason, svn_wc_conflict_reason_obstructed); + CASE_ENUM_STRVAL(reason, svn_wc_conflict_reason_deleted); + CASE_ENUM_STRVAL(reason, svn_wc_conflict_reason_missing); + CASE_ENUM_STRVAL(reason, svn_wc_conflict_reason_unversioned); + CASE_ENUM_STRVAL(reason, svn_wc_conflict_reason_added); + CASE_ENUM_STRVAL(reason, svn_wc_conflict_reason_replaced); + CASE_ENUM_STRVAL(reason, svn_wc_conflict_reason_moved_away); + CASE_ENUM_STRVAL(reason, svn_wc_conflict_reason_moved_here); + default: + SVN_ERR_MALFUNCTION_NO_RETURN(); + } + + if (row->tc.delete_path) + path = apr_psprintf(result_pool, ", \"%s\"", row->tc.delete_path); + else + path = ""; + + tc_text = apr_psprintf(result_pool, "{%s, %s%s}", action, + reason, path); + } + + return apr_psprintf(result_pool, "\"%s\", %s, %s, %s", + row->local_relpath, + row->text_conflicted ? "TRUE" : "FALSE", + row->prop_conflicted ? "TRUE" : "FALSE", + tc_text); +} + +static svn_boolean_t +tree_conflicts_match(const tree_conflict_info *expected, + const tree_conflict_info *actual) +{ + if (expected->action != actual->action) + return FALSE; + else if (expected->reason != actual->reason) + return FALSE; + else if (strcmp_null(expected->delete_path, actual->delete_path) != 0) + return FALSE; + else if (expected->conflicted_fb != actual->conflicted_fb) + return FALSE; + + return TRUE; +} + +static svn_error_t * +compare_conflict_info(const void *key, apr_ssize_t klen, + enum svn_hash_diff_key_status status, + void *baton) +{ + comparison_baton_t *b = baton; + conflict_info_t *expected = apr_hash_get(b->expected_hash, key, klen); + conflict_info_t *found = apr_hash_get(b->found_hash, key, klen); + + if (! expected) + { + b->errors = svn_error_createf( + SVN_ERR_TEST_FAILED, b->errors, + "found {%s}", + print_conflict(found, b->scratch_pool)); + } + else if (! found) + { + b->errors = svn_error_createf( + SVN_ERR_TEST_FAILED, b->errors, + "expected {%s}", + print_conflict(expected, b->scratch_pool)); + } + else if (expected->text_conflicted != found->text_conflicted + || expected->prop_conflicted != found->prop_conflicted + || !tree_conflicts_match(&expected->tc, &found->tc)) + { + b->errors = svn_error_createf( + SVN_ERR_TEST_FAILED, b->errors, + "expected {%s}; found {%s}", + print_conflict(expected, b->scratch_pool), + print_conflict(found, b->scratch_pool)); + } + + /* Don't terminate the comparison: accumulate all differences. */ + return SVN_NO_ERROR; +} + +static svn_error_t * +check_db_conflicts(svn_test__sandbox_t *b, + const char *root_path, + const conflict_info_t *expected_conflicts) +{ + svn_sqlite__db_t *sdb; + int i; + svn_sqlite__stmt_t *stmt; + + svn_boolean_t have_row; + apr_hash_t *found_hash = apr_hash_make(b->pool); + apr_hash_t *expected_hash = apr_hash_make(b->pool); + apr_pool_t *iterpool = svn_pool_create(b->pool); + apr_hash_index_t *hi; + comparison_baton_t comparison_baton; + + comparison_baton.expected_hash = expected_hash; + comparison_baton.found_hash = found_hash; + comparison_baton.scratch_pool = b->pool; + comparison_baton.errors = NULL; + + /* Fill ACTUAL_HASH with data from the WC DB. */ + SVN_ERR(open_wc_db(&sdb, b->wc_abspath, b->pool, b->pool)); + SVN_ERR(svn_sqlite__get_statement(&stmt, sdb, STMT_SELECT_ACTUAL_INFO)); + SVN_ERR(svn_sqlite__bindf(stmt, "is", (apr_int64_t)1 /* wc_id */, + root_path)); + SVN_ERR(svn_sqlite__step(&have_row, stmt)); + while (have_row) + { + conflict_info_t *row = apr_pcalloc(b->pool, sizeof(*row)); + + row->local_relpath = svn_sqlite__column_text(stmt, 0, b->pool); + + svn_hash_sets(found_hash, row->local_relpath, row); + + SVN_ERR(svn_sqlite__step(&have_row, stmt)); + } + SVN_ERR(svn_sqlite__reset(stmt)); + SVN_ERR(svn_sqlite__close(sdb)); + + for (hi = apr_hash_first(b->pool, found_hash); hi; hi = apr_hash_next(hi)) + { + svn_skel_t *conflict; + conflict_info_t *info = apr_hash_this_val(hi); + const char *local_abspath; + svn_boolean_t tree_conflicted; + + svn_pool_clear(iterpool); + + local_abspath = svn_dirent_join(b->wc_abspath, info->local_relpath, + iterpool); + + SVN_ERR(svn_wc__db_read_conflict(&conflict, NULL, NULL, + b->wc_ctx->db, local_abspath, + iterpool, iterpool)); + + SVN_TEST_ASSERT(conflict != NULL); + + SVN_ERR(svn_wc__conflict_read_info(NULL, NULL, + &info->text_conflicted, + &info->prop_conflicted, + &tree_conflicted, + b->wc_ctx->db, local_abspath, + conflict, + iterpool, iterpool)); + + if (tree_conflicted) + { + const char *move_src_abspath; + SVN_ERR(svn_wc__conflict_read_tree_conflict(&info->tc.reason, + &info->tc.action, + &move_src_abspath, + b->wc_ctx->db, + local_abspath, + conflict, + b->pool, iterpool)); + + if (move_src_abspath) + info->tc.delete_path = + svn_dirent_skip_ancestor(b->wc_abspath, move_src_abspath); + + if (!info->tc.reason + && !info->tc.action + && !info->tc.delete_path) + { + info->tc.conflicted_fb = TRUE; + } + } + } + + /* Fill EXPECTED_HASH with data from EXPECTED_ROWS. */ + if (expected_conflicts) + for (i = 0; expected_conflicts[i].local_relpath != NULL; i++) + { + const conflict_info_t *row = &expected_conflicts[i]; + + svn_hash_sets(expected_hash, row->local_relpath, row); + } + + /* Compare EXPECTED_HASH with ACTUAL_HASH and return any errors. */ + SVN_ERR(svn_hash_diff(expected_hash, found_hash, + compare_conflict_info, &comparison_baton, b->pool)); + return comparison_baton.errors; +} + +static svn_error_t * +verify_db_callback(void *baton, + const char *wc_abspath, + const char *local_relpath, + int op_depth, + int id, + const char *msg, + apr_pool_t *scratch_pool) +{ + if (op_depth >= 0) + return svn_error_createf(SVN_ERR_WC_CORRUPT, NULL, + "Verify: %s: %s (%d): SV%04d %s", + wc_abspath, local_relpath, op_depth, id, msg); + else + return svn_error_createf(SVN_ERR_WC_CORRUPT, NULL, + "DB-VRFY: %s: %s: SV%04d %s", + wc_abspath, local_relpath, id, msg); +} + +static svn_error_t * +verify_db(svn_test__sandbox_t *b) +{ + SVN_ERR(svn_wc__db_verify_db_full(b->wc_ctx->db, b->wc_abspath, + verify_db_callback, NULL, b->pool)); + + return SVN_NO_ERROR; +} + /* ---------------------------------------------------------------------- */ /* The test functions */ @@ -374,7 +632,7 @@ wc_wc_copies(svn_test__sandbox_t *b) /* Create the various kinds of source node which will be copied */ - sbox_file_write(b, source_added_file, "New file"); + SVN_ERR(sbox_file_write(b, source_added_file, "New file")); SVN_ERR(sbox_wc_add(b, source_added_file)); SVN_ERR(sbox_wc_mkdir(b, source_added_dir)); SVN_ERR(sbox_wc_mkdir(b, source_added_dir2)); @@ -578,7 +836,7 @@ repo_wc_copies(svn_test__sandbox_t *b) } /* Perform each copy. */ - SVN_ERR(svn_client_create_context(&ctx, b->pool)); + SVN_ERR(svn_test__create_client_ctx(&ctx, b, b->pool)); for (subtest = subtests; subtest->from_path; subtest++) { svn_opt_revision_t rev = { svn_opt_revision_number, { 1 } }; @@ -647,7 +905,7 @@ test_deletes(const svn_test_opts_t *opts, apr_pool_t *pool) SVN_ERR(svn_test__sandbox_create(&b, "deletes", opts, pool)); SVN_ERR(sbox_add_and_commit_greek_tree(&b)); - sbox_file_write(&b, "A/B/E/new-file", "New file"); + SVN_ERR(sbox_file_write(&b, "A/B/E/new-file", "New file")); SVN_ERR(sbox_wc_add(&b, "A/B/E/new-file")); { nodes_row_t rows[] = { @@ -709,7 +967,7 @@ test_adds(const svn_test_opts_t *opts, apr_pool_t *pool) SVN_ERR(sbox_add_and_commit_greek_tree(&b)); /* add file */ - sbox_file_write(&b, "new-file", "New file"); + SVN_ERR(sbox_file_write(&b, "new-file", "New file")); SVN_ERR(sbox_wc_add(&b, "new-file")); { nodes_row_t rows[] = { @@ -731,7 +989,7 @@ test_adds(const svn_test_opts_t *opts, apr_pool_t *pool) /* replace file */ SVN_ERR(sbox_wc_delete(&b, "iota")); - sbox_file_write(&b, "iota", "New iota file"); + SVN_ERR(sbox_file_write(&b, "iota", "New iota file")); SVN_ERR(sbox_wc_add(&b, "iota")); { nodes_row_t rows[] = { @@ -766,12 +1024,12 @@ test_adds_change_kind(const svn_test_opts_t *opts, apr_pool_t *pool) { svn_test__sandbox_t b; - SVN_ERR(svn_test__sandbox_create(&b, "adds", opts, pool)); + SVN_ERR(svn_test__sandbox_create(&b, "test_adds_change_kind", opts, pool)); SVN_ERR(sbox_add_and_commit_greek_tree(&b)); /* replace dir with file */ SVN_ERR(sbox_wc_delete(&b, "A/B/E")); - sbox_file_write(&b, "A/B/E", "New E file"); + SVN_ERR(sbox_file_write(&b, "A/B/E", "New E file")); SVN_ERR(sbox_wc_add(&b, "A/B/E")); { nodes_row_t rows[] = { @@ -1024,47 +1282,53 @@ insert_dirs(svn_test__sandbox_t *b, { svn_sqlite__db_t *sdb; svn_sqlite__stmt_t *stmt; - static const char * const statements[] = { - "DELETE FROM nodes;", - "INSERT INTO nodes (local_relpath, op_depth, presence, repos_path," - " revision, wc_id, repos_id, kind, depth)" - " VALUES (?1, ?2, ?3, ?4, ?5, 1, 1, 'dir', 'infinity');", - "INSERT INTO nodes (local_relpath, op_depth, presence, repos_path," - " revision, parent_relpath, wc_id, repos_id, kind, depth)" - " VALUES (?1, ?2, ?3, ?4, ?5, ?6, 1, 1, 'dir', 'infinity');", - NULL, - }; - SVN_ERR(open_wc_db(&sdb, b->wc_abspath, statements, b->pool, b->pool)); + SVN_ERR(open_wc_db(&sdb, b->wc_abspath, b->pool, b->pool)); - SVN_ERR(svn_sqlite__get_statement(&stmt, sdb, 0)); + SVN_ERR(svn_sqlite__get_statement(&stmt, sdb, STMT_DELETE_NODES)); SVN_ERR(svn_sqlite__step_done(stmt)); while(nodes->local_relpath) { - if (nodes->local_relpath[0]) + SVN_ERR(svn_sqlite__get_statement(&stmt, sdb, STMT_INSERT_NODE)); + SVN_ERR(svn_sqlite__bindf(stmt, "sdssrs", + nodes->local_relpath, + nodes->op_depth, + nodes->presence, + nodes->repo_relpath, + nodes->repo_revnum, + nodes->local_relpath[0] + ? svn_relpath_dirname(nodes->local_relpath, + b->pool) + : NULL)); + + if (nodes->moved_to) + SVN_ERR(svn_sqlite__bind_text(stmt, 7, nodes->moved_to)); + if (nodes->moved_here) + SVN_ERR(svn_sqlite__bind_int(stmt, 8, 1)); + if (nodes->props) { - SVN_ERR(svn_sqlite__get_statement(&stmt, sdb, 2)); - SVN_ERR(svn_sqlite__bindf(stmt, "sdssrs", - nodes->local_relpath, - nodes->op_depth, - nodes->presence, - nodes->repo_relpath, - nodes->repo_revnum, - svn_relpath_dirname(nodes->local_relpath, - b->pool))); + int i; + apr_hash_t *props = apr_hash_make(b->pool); + apr_array_header_t *names = svn_cstring_split(nodes->props, ",", + TRUE, b->pool); + + for (i = 0; i < names->nelts; i++) + { + const char *name = APR_ARRAY_IDX(names, i, const char *); + svn_hash_sets(props, name, svn_string_create(name, b->pool)); + } + + SVN_ERR(svn_sqlite__bind_properties(stmt, 9, props, b->pool)); } - else + else if (nodes->repo_relpath + && strcmp(nodes->presence, "normal") == 0) { - SVN_ERR(svn_sqlite__get_statement(&stmt, sdb, 1)); - SVN_ERR(svn_sqlite__bindf(stmt, "sdssr", - nodes->local_relpath, - nodes->op_depth, - nodes->presence, - nodes->repo_relpath, - nodes->repo_revnum)); + SVN_ERR(svn_sqlite__bind_text(stmt, 9, "()")); } + /* File externals? */ + SVN_ERR(svn_sqlite__step_done(stmt)); ++nodes; } @@ -1102,7 +1366,7 @@ base_dir_insert_remove(svn_test__sandbox_t *b, "not-even-a-uuid", revision, apr_hash_make(b->pool), revision, 0, NULL, NULL, svn_depth_infinity, - NULL, NULL, FALSE, NULL, NULL, NULL, + NULL, FALSE, NULL, NULL, NULL, NULL, b->pool)); after = apr_palloc(b->pool, sizeof(*after) * (apr_size_t)(num_before + num_added + 1)); @@ -1115,9 +1379,7 @@ base_dir_insert_remove(svn_test__sandbox_t *b, SVN_ERR(check_db_rows(b, "", after)); SVN_ERR(svn_wc__db_base_remove(b->wc_ctx->db, dir_abspath, - FALSE /* keep_as_Working */, - FALSE /* queue_deletes */, - FALSE /* remove_locks */, + FALSE, FALSE, FALSE, SVN_INVALID_REVNUM, NULL, NULL, b->pool)); SVN_ERR(svn_wc__wq_run(b->wc_ctx->db, dir_abspath, @@ -1427,29 +1689,11 @@ test_base_dir_insert_remove(const svn_test_opts_t *opts, apr_pool_t *pool) } static svn_error_t * -temp_op_make_copy(svn_test__sandbox_t *b, - const char *local_relpath, - nodes_row_t *before, - nodes_row_t *after) -{ - const char *dir_abspath = svn_path_join(b->wc_abspath, local_relpath, - b->pool); - - SVN_ERR(insert_dirs(b, before)); - - SVN_ERR(svn_wc__db_op_make_copy(b->wc_ctx->db, dir_abspath, NULL, NULL, b->pool)); - - SVN_ERR(check_db_rows(b, "", after)); - - return SVN_NO_ERROR; -} - -static svn_error_t * -test_temp_op_make_copy(const svn_test_opts_t *opts, apr_pool_t *pool) +test_db_make_copy(const svn_test_opts_t *opts, apr_pool_t *pool) { svn_test__sandbox_t b; - SVN_ERR(svn_test__sandbox_create(&b, "temp_op_make_copy", opts, pool)); + SVN_ERR(svn_test__sandbox_create(&b, "make_copy", opts, pool)); { /* / norm - @@ -1480,7 +1724,7 @@ test_temp_op_make_copy(const svn_test_opts_t *opts, apr_pool_t *pool) { 2, "A/F", "normal", 1, "S2" }, { 2, "A/F/G", "normal", 1, "S2/G" }, { 2, "A/F/H", "not-present", 1, "S2/H" }, - { 2, "A/F/E", "base-deleted", 2, "A/F/E" }, + { 2, "A/F/E", "base-deleted", NO_COPY_FROM }, { 0 } }; /* / norm - @@ -1518,14 +1762,18 @@ test_temp_op_make_copy(const svn_test_opts_t *opts, apr_pool_t *pool) { 2, "A/B", "normal", NO_COPY_FROM }, { 2, "A/B/C", "base-deleted", NO_COPY_FROM }, { 2, "A/F", "normal", 1, "S2" }, - { 2, "A/F/E", "base-deleted", 2, "A/F/E" }, + { 2, "A/F/E", "base-deleted", NO_COPY_FROM }, { 2, "A/F/G", "normal", 1, "S2/G" }, { 2, "A/F/H", "not-present", 1, "S2/H" }, { 3, "A/B/C", "normal", NO_COPY_FROM }, { 0 } }; - SVN_ERR(temp_op_make_copy(&b, "A", before, after)); + SVN_ERR(insert_dirs(&b, before)); + SVN_ERR(svn_wc__db_op_make_copy(b.wc_ctx->db, sbox_wc_path(&b, "A"), + NULL, NULL, pool)); + + SVN_ERR(check_db_rows(&b, "", after)); } return SVN_NO_ERROR; @@ -1821,46 +2069,32 @@ insert_actual(svn_test__sandbox_t *b, { svn_sqlite__db_t *sdb; svn_sqlite__stmt_t *stmt; - static const char * const statements[] = { - "DELETE FROM actual_node;", - "INSERT INTO actual_node (local_relpath, changelist, wc_id)" - " VALUES (?1, ?2, 1)", - "INSERT INTO actual_node (local_relpath, parent_relpath, changelist, wc_id)" - " VALUES (?1, ?2, ?3, 1)", - "UPDATE nodes SET kind = 'file' WHERE wc_id = 1 and local_relpath = ?1", - NULL, - }; if (!actual) return SVN_NO_ERROR; - SVN_ERR(open_wc_db(&sdb, b->wc_abspath, statements, b->pool, b->pool)); + SVN_ERR(open_wc_db(&sdb, b->wc_abspath, b->pool, b->pool)); - SVN_ERR(svn_sqlite__get_statement(&stmt, sdb, 0)); + SVN_ERR(svn_sqlite__get_statement(&stmt, sdb, STMT_DELETE_ACTUAL)); SVN_ERR(svn_sqlite__step_done(stmt)); while(actual->local_relpath) { - if (actual->local_relpath[0]) - { - SVN_ERR(svn_sqlite__get_statement(&stmt, sdb, 2)); - SVN_ERR(svn_sqlite__bindf(stmt, "sss", - actual->local_relpath, - svn_relpath_dirname(actual->local_relpath, - b->pool), - actual->changelist)); - } - else - { - SVN_ERR(svn_sqlite__get_statement(&stmt, sdb, 1)); - SVN_ERR(svn_sqlite__bindf(stmt, "ss", - actual->local_relpath, - actual->changelist)); - } + SVN_ERR(svn_sqlite__get_statement(&stmt, sdb, STMT_INSERT_ACTUAL)); + SVN_ERR(svn_sqlite__bindf(stmt, "sss", + actual->local_relpath, + actual->local_relpath[0] + ? svn_relpath_dirname(actual->local_relpath, + b->pool) + : NULL, + actual->changelist)); SVN_ERR(svn_sqlite__step_done(stmt)); if (actual->changelist) { - SVN_ERR(svn_sqlite__get_statement(&stmt, sdb, 3)); + SVN_ERR(svn_sqlite__get_statement(&stmt, sdb, + STMT_ENSURE_EMPTY_PRISTINE)); + SVN_ERR(svn_sqlite__step_done(stmt)); + SVN_ERR(svn_sqlite__get_statement(&stmt, sdb, STMT_NODES_SET_FILE)); SVN_ERR(svn_sqlite__bindf(stmt, "s", actual->local_relpath)); SVN_ERR(svn_sqlite__step_done(stmt)); } @@ -1876,10 +2110,6 @@ check_db_actual(svn_test__sandbox_t* b, actual_row_t *rows) { svn_sqlite__db_t *sdb; svn_sqlite__stmt_t *stmt; - static const char * const statements[] = { - "SELECT local_relpath FROM actual_node WHERE wc_id = 1;", - NULL, - }; svn_boolean_t have_row; apr_hash_t *path_hash = apr_hash_make(b->pool); @@ -1893,15 +2123,15 @@ check_db_actual(svn_test__sandbox_t* b, actual_row_t *rows) ++rows; } - SVN_ERR(open_wc_db(&sdb, b->wc_abspath, statements, b->pool, b->pool)); + SVN_ERR(open_wc_db(&sdb, b->wc_abspath, b->pool, b->pool)); - SVN_ERR(svn_sqlite__get_statement(&stmt, sdb, 0)); + SVN_ERR(svn_sqlite__get_statement(&stmt, sdb, STMT_SELECT_ALL_ACTUAL)); SVN_ERR(svn_sqlite__step(&have_row, stmt)); while (have_row) { const char *local_relpath = svn_sqlite__column_text(stmt, 0, b->pool); if (!apr_hash_get(path_hash, local_relpath, APR_HASH_KEY_STRING)) - return svn_error_createf(SVN_ERR_TEST_FAILED, svn_sqlite__close(sdb), + return svn_error_createf(SVN_ERR_TEST_FAILED, svn_sqlite__reset(stmt), "actual '%s' unexpected", local_relpath); apr_hash_set(path_hash, local_relpath, APR_HASH_KEY_STRING, NULL); SVN_ERR(svn_sqlite__step(&have_row, stmt)); @@ -1910,8 +2140,8 @@ check_db_actual(svn_test__sandbox_t* b, actual_row_t *rows) if (apr_hash_count(path_hash)) { const char *local_relpath - = svn__apr_hash_index_key(apr_hash_first(b->pool, path_hash)); - return svn_error_createf(SVN_ERR_TEST_FAILED, svn_sqlite__close(sdb), + = apr_hash_this_key(apr_hash_first(b->pool, path_hash)); + return svn_error_createf(SVN_ERR_TEST_FAILED, svn_sqlite__reset(stmt), "actual '%s' expected", local_relpath); } @@ -1943,7 +2173,7 @@ revert(svn_test__sandbox_t *b, SVN_ERR(insert_actual(b, before_actual)); SVN_ERR(check_db_rows(b, "", before_nodes)); SVN_ERR(check_db_actual(b, before_actual)); - err = svn_wc__db_op_revert(b->wc_ctx->db, local_abspath, depth, + err = svn_wc__db_op_revert(b->wc_ctx->db, local_abspath, depth, FALSE, b->pool, b->pool); if (err) { @@ -2511,7 +2741,7 @@ check_hash_keys(apr_hash_t *hash, for (hi = apr_hash_first(scratch_pool, hash); hi; hi = apr_hash_next(hi)) { - const char *name = svn__apr_hash_index_key(hi); + const char *name = apr_hash_this_key(hi); err = svn_error_compose_create( err, svn_error_createf(SVN_ERR_TEST_FAILED, NULL, _("Found, not expected: '%s'"), name)); @@ -2613,8 +2843,8 @@ test_children_of_replaced_dir(const svn_test_opts_t *opts, apr_pool_t *pool) &children_array, b.wc_ctx->db, A_abspath, pool, pool)); SVN_ERR(CHECK_ARRAY(children_array, working_children_inc_hidden, pool)); - SVN_ERR(svn_wc__node_get_children(&children_array, b.wc_ctx, A_abspath, - TRUE /* show_hidden */, pool, pool)); + SVN_ERR(svn_wc__db_read_children(&children_array, b.wc_ctx->db, A_abspath, + pool, pool)); SVN_ERR(CHECK_ARRAY(children_array, all_children_inc_hidden, pool)); /* I am not testing svn_wc__node_get_children(show_hidden=FALSE) because @@ -2623,17 +2853,14 @@ test_children_of_replaced_dir(const svn_test_opts_t *opts, apr_pool_t *pool) * a 'hidden' child of the working dir (so should be excluded). */ SVN_ERR(svn_wc__node_get_children_of_working_node( - &children_array, b.wc_ctx, A_abspath, TRUE /* show_hidden */, - pool, pool)); - SVN_ERR(CHECK_ARRAY(children_array, working_children_inc_hidden, pool)); - - SVN_ERR(svn_wc__node_get_children_of_working_node( - &children_array, b.wc_ctx, A_abspath, FALSE /* show_hidden */, + &children_array, b.wc_ctx, A_abspath, pool, pool)); SVN_ERR(CHECK_ARRAY(children_array, working_children_exc_hidden, pool)); SVN_ERR(svn_wc__db_read_children_info(&children_hash, &conflicts_hash, - b.wc_ctx->db, A_abspath, pool, pool)); + b.wc_ctx->db, A_abspath, + FALSE /* base_tree_only */, + pool, pool)); SVN_ERR(CHECK_HASH(children_hash, all_children_inc_hidden, pool)); /* We don't yet have a svn_wc__db_read_children_info2() to test. */ @@ -2970,7 +3197,7 @@ test_shadowed_update(const svn_test_opts_t *opts, apr_pool_t *pool) SVN_ERR(svn_test__sandbox_create(&b, "shadowed_update", opts, pool)); /* Set up the base state as revision 1. */ - sbox_file_write(&b, "iota", "This is iota"); + SVN_ERR(sbox_file_write(&b, "iota", "This is iota")); SVN_ERR(sbox_wc_add(&b, "iota")); SVN_ERR(sbox_wc_commit(&b, "")); @@ -2985,7 +3212,7 @@ test_shadowed_update(const svn_test_opts_t *opts, apr_pool_t *pool) SVN_ERR(sbox_wc_commit(&b, "")); /* And change something in r3 */ - sbox_file_write(&b, "iota", "This is a new iota"); + SVN_ERR(sbox_file_write(&b, "iota", "This is a new iota")); SVN_ERR(sbox_wc_commit(&b, "")); /* And delete C & M */ @@ -3306,12 +3533,12 @@ commit_file_external(const svn_test_opts_t *opts, apr_pool_t *pool) svn_test__sandbox_t b; SVN_ERR(svn_test__sandbox_create(&b, "commit_file_external", opts, pool)); - sbox_file_write(&b, "f", "this is f\n"); + SVN_ERR(sbox_file_write(&b, "f", "this is f\n")); SVN_ERR(sbox_wc_add(&b, "f")); SVN_ERR(sbox_wc_propset(&b, "svn:externals", "^/f g", "")); SVN_ERR(sbox_wc_commit(&b, "")); SVN_ERR(sbox_wc_update(&b, "", 1)); - sbox_file_write(&b, "g", "this is f\nmodified via g\n"); + SVN_ERR(sbox_file_write(&b, "g", "this is f\nmodified via g\n")); SVN_ERR(sbox_wc_commit(&b, "")); SVN_ERR(sbox_wc_update(&b, "", 2)); @@ -3334,7 +3561,7 @@ revert_file_externals(const svn_test_opts_t *opts, apr_pool_t *pool) svn_test__sandbox_t b; SVN_ERR(svn_test__sandbox_create(&b, "revert_file_externals", opts, pool)); - sbox_file_write(&b, "f", "this is f\n"); + SVN_ERR(sbox_file_write(&b, "f", "this is f\n")); SVN_ERR(sbox_wc_add(&b, "f")); SVN_ERR(sbox_wc_propset(&b, "svn:externals", "^/f g", "")); SVN_ERR(sbox_wc_commit(&b, "")); @@ -3356,11 +3583,13 @@ revert_file_externals(const svn_test_opts_t *opts, apr_pool_t *pool) SVN_ERR(sbox_wc_update(&b, "", 1)); { nodes_row_t rows[] = { - { 0, "", "normal", 1, "" }, - { 0, "f", "normal", 1, "f" }, - { 1, "A", "normal", NO_COPY_FROM }, - { 0, "h", "normal", 1, "f", TRUE }, - { 0, "A/g", "normal", 1, "f", TRUE }, + { 0, "", "normal", 1, "" }, + { 0, "f", "normal", 1, "f" }, + { 1, "A", "normal", NO_COPY_FROM }, + { 0, "h", "normal", 1, "f", TRUE }, + { 0, "A/g", "normal", 1, "f", TRUE }, + + { 0, "g", "not-present", 0, "g"}, { 0 } }; SVN_ERR(check_db_rows(&b, "", rows)); @@ -3369,10 +3598,12 @@ revert_file_externals(const svn_test_opts_t *opts, apr_pool_t *pool) SVN_ERR(sbox_wc_revert(&b, "", svn_depth_infinity)); { nodes_row_t rows[] = { - { 0, "", "normal", 1, "" }, - { 0, "f", "normal", 1, "f" }, - { 0, "h", "normal", 1, "f", TRUE }, - { 0, "A/g", "normal", 1, "f", TRUE }, + { 0, "", "normal", 1, "" }, + { 0, "f", "normal", 1, "f" }, + { 0, "h", "normal", 1, "f", TRUE }, + { 0, "A/g", "normal", 1, "f", TRUE }, + + { 0, "g", "not-present", 0, "g"}, { 0 } }; SVN_ERR(check_db_rows(&b, "", rows)); @@ -3381,9 +3612,11 @@ revert_file_externals(const svn_test_opts_t *opts, apr_pool_t *pool) SVN_ERR(sbox_wc_update(&b, "", 1)); { nodes_row_t rows[] = { - { 0, "", "normal", 1, "" }, - { 0, "f", "normal", 1, "f" }, - { 0, "g", "normal", 1, "f", TRUE }, + { 0, "", "normal", 1, "" }, + { 0, "f", "normal", 1, "f" }, + { 0, "g", "normal", 1, "f", TRUE }, + + { 0, "h", "not-present", 0, "h"}, { 0 } }; SVN_ERR(check_db_rows(&b, "", rows)); @@ -3398,7 +3631,7 @@ copy_file_externals(const svn_test_opts_t *opts, apr_pool_t *pool) svn_test__sandbox_t b; SVN_ERR(svn_test__sandbox_create(&b, "copy_file_externals", opts, pool)); - sbox_file_write(&b, "f", "this is f\n"); + SVN_ERR(sbox_file_write(&b, "f", "this is f\n")); SVN_ERR(sbox_wc_add(&b, "f")); SVN_ERR(sbox_wc_mkdir(&b, "A")); SVN_ERR(sbox_wc_propset(&b, "svn:externals", "^/f g", "A")); @@ -3560,6 +3793,8 @@ incomplete_switch(const svn_test_opts_t *opts, apr_pool_t *pool) }; SVN_ERR(insert_dirs(&b, before)); + SVN_ERR(svn_io_remove_dir2(sbox_wc_path(&b, "A/B/C/D"), FALSE, + NULL, NULL, pool)); SVN_ERR(check_db_rows(&b, "", before)); SVN_ERR(sbox_wc_update(&b, "", 4)); SVN_ERR(check_db_rows(&b, "", after_update)); @@ -4430,27 +4665,27 @@ move_update(const svn_test_opts_t *opts, apr_pool_t *pool) /* r1: Create files 'f', 'h' */ SVN_ERR(sbox_wc_mkdir(&b, "A")); SVN_ERR(sbox_wc_mkdir(&b, "A/B")); - sbox_file_write(&b, "A/B/f", "r1 content\n"); - sbox_file_write(&b, "A/B/h", "r1 content\n"); + SVN_ERR(sbox_file_write(&b, "A/B/f", "r1 content\n")); + SVN_ERR(sbox_file_write(&b, "A/B/h", "r1 content\n")); SVN_ERR(sbox_wc_add(&b, "A/B/f")); SVN_ERR(sbox_wc_add(&b, "A/B/h")); SVN_ERR(sbox_wc_commit(&b, "")); /* r2: Modify 'f' */ - sbox_file_write(&b, "A/B/f", "r1 content\nr2 content\n"); + SVN_ERR(sbox_file_write(&b, "A/B/f", "r1 content\nr2 content\n")); SVN_ERR(sbox_wc_commit(&b, "")); /* r3: Delete 'h', add 'g' */ - sbox_file_write(&b, "A/B/g", "r3 content\n"); + SVN_ERR(sbox_file_write(&b, "A/B/g", "r3 content\n")); SVN_ERR(sbox_wc_add(&b, "A/B/g")); SVN_ERR(sbox_wc_delete(&b, "A/B/h")); SVN_ERR(sbox_wc_commit(&b, "")); /* r4: Add a new subtree 'X' */ SVN_ERR(sbox_wc_mkdir(&b, "X")); - sbox_file_write(&b, "X/f", "r4 content\n"); - sbox_file_write(&b, "X/g", "r4 content\n"); - sbox_file_write(&b, "X/h", "r4 content\n"); + SVN_ERR(sbox_file_write(&b, "X/f", "r4 content\n")); + SVN_ERR(sbox_file_write(&b, "X/g", "r4 content\n")); + SVN_ERR(sbox_file_write(&b, "X/h", "r4 content\n")); SVN_ERR(sbox_wc_add(&b, "X/f")); SVN_ERR(sbox_wc_add(&b, "X/g")); SVN_ERR(sbox_wc_add(&b, "X/h")); @@ -5031,8 +5266,10 @@ mixed_rev_move(const svn_test_opts_t *opts, apr_pool_t *pool) SVN_ERR(sbox_wc_mkdir(&b, "A")); SVN_ERR(sbox_wc_commit(&b, "")); SVN_ERR(sbox_wc_mkdir(&b, "A/B")); + SVN_ERR(sbox_wc_mkdir(&b, "A/D")); SVN_ERR(sbox_wc_commit(&b, "")); SVN_ERR(sbox_wc_mkdir(&b, "A/B/C")); + SVN_ERR(sbox_wc_mkdir(&b, "A/D/E")); SVN_ERR(sbox_wc_commit(&b, "")); { @@ -5041,6 +5278,8 @@ mixed_rev_move(const svn_test_opts_t *opts, apr_pool_t *pool) {0, "A", "normal", 1, "A"}, {0, "A/B", "normal", 2, "A/B"}, {0, "A/B/C", "normal", 3, "A/B/C"}, + {0, "A/D", "normal", 2, "A/D"}, + {0, "A/D/E", "normal", 3, "A/D/E"}, {0} }; SVN_ERR(check_db_rows(&b, "", nodes)); @@ -5060,20 +5299,30 @@ mixed_rev_move(const svn_test_opts_t *opts, apr_pool_t *pool) {0, "A", "normal", 1, "A"}, {0, "A/B", "normal", 2, "A/B"}, {0, "A/B/C", "normal", 3, "A/B/C"}, + {0, "A/D", "normal", 2, "A/D"}, + {0, "A/D/E", "normal", 3, "A/D/E"}, {1, "A", "base-deleted", NO_COPY_FROM, "X"}, {1, "A/B", "base-deleted", NO_COPY_FROM}, {1, "A/B/C", "base-deleted", NO_COPY_FROM}, + {1, "A/D", "base-deleted", NO_COPY_FROM}, + {1, "A/D/E", "base-deleted", NO_COPY_FROM}, {1, "X", "normal", 1, "A", MOVED_HERE}, {1, "X/B", "not-present", 2, "A/B"}, + {1, "X/D", "not-present", 2, "A/D"}, {2, "X/B", "normal", 2, "A/B"}, {2, "X/B/C", "not-present", 3, "A/B/C"}, + {2, "X/D", "normal", 2, "A/D"}, + {2, "X/D/E", "not-present", 3, "A/D/E"}, {3, "X/B/C", "normal", 3, "A/B/C"}, + {3, "X/D/E", "normal", 3, "A/D/E"}, + {0} }; SVN_ERR(check_db_rows(&b, "", nodes)); } /* ### These values PASS but I'm not sure they are correct. */ + /* A/B/C doesn't exist as X/B/C at op depth 1, but is reported */ SVN_ERR(svn_wc__db_follow_moved_to(&moved_tos, b.wc_ctx->db, sbox_wc_path(&b, "A/B/C"), pool, pool)); SVN_ERR(check_moved_to(moved_tos, 0, 1, "X/B/C")); @@ -5099,32 +5348,109 @@ mixed_rev_move(const svn_test_opts_t *opts, apr_pool_t *pool) {0, "A", "normal", 1, "A"}, {0, "A/B", "normal", 2, "A/B"}, {0, "A/B/C", "normal", 3, "A/B/C"}, + {0, "A/D", "normal", 2, "A/D"}, + {0, "A/D/E", "normal", 3, "A/D/E"}, {1, "A", "base-deleted", NO_COPY_FROM, "X"}, {1, "A/B", "base-deleted", NO_COPY_FROM}, {1, "A/B/C", "base-deleted", NO_COPY_FROM}, + {1, "A/D", "base-deleted", NO_COPY_FROM}, + {1, "A/D/E", "base-deleted", NO_COPY_FROM}, {1, "X", "normal", 1, "A", MOVED_HERE}, {1, "X/B", "not-present", 2, "A/B"}, + {1, "X/D", "not-present", 2, "A/D"}, + {2, "X/D", "normal", 2, "A/D"}, + {2, "X/D/E", "not-present", 3, "A/D/E"}, {2, "X/Y", "normal", 2, "A/B"}, {2, "X/Y/C", "not-present", NO_COPY_FROM}, {3, "X/Y/C", "normal", 3, "A/B/C"}, + {3, "X/D/E", "normal", 3, "A/D/E"}, + {0} }; SVN_ERR(check_db_rows(&b, "", nodes)); } + /* A/B/C still doesn't exist as X/B/C at op depth 1 */ SVN_ERR(svn_wc__db_follow_moved_to(&moved_tos, b.wc_ctx->db, sbox_wc_path(&b, "A/B/C"), pool, pool)); - SVN_TEST_ASSERT(moved_tos->nelts == 0); + SVN_ERR(check_moved_to(moved_tos, 0, 1, "X/B/C")); + SVN_TEST_ASSERT(moved_tos->nelts == 1); + /* A/B doesn't exist exist as X/B and the move to Y can't be tracked in + the current scheme */ SVN_ERR(svn_wc__db_follow_moved_to(&moved_tos, b.wc_ctx->db, sbox_wc_path(&b, "A/B"), pool, pool)); - SVN_TEST_ASSERT(moved_tos->nelts == 0); + SVN_ERR(check_moved_to(moved_tos, 0, 1, "X/B")); + SVN_TEST_ASSERT(moved_tos->nelts == 1); + + SVN_ERR(svn_wc__db_follow_moved_to(&moved_tos, b.wc_ctx->db, + sbox_wc_path(&b, "A"), pool, pool)); + SVN_ERR(check_moved_to(moved_tos, 0, 1, "X")); + SVN_TEST_ASSERT(moved_tos->nelts == 1); + + + SVN_ERR(sbox_wc_mkdir(&b, "Z")); + SVN_ERR(sbox_wc_commit(&b, "Z")); /* r4 */ + + SVN_ERR(sbox_wc_update(&b, "", 4)); + + { + nodes_row_t nodes[] = { + {0, "", "normal", 4, ""}, + {0, "A", "normal", 4, "A"}, + {0, "A/B", "normal", 4, "A/B"}, + {0, "A/B/C", "normal", 4, "A/B/C"}, + {0, "A/D", "normal", 4, "A/D"}, + {0, "A/D/E", "normal", 4, "A/D/E"}, + {1, "A", "base-deleted", NO_COPY_FROM, "X"}, + {1, "A/B", "base-deleted", NO_COPY_FROM}, + {1, "A/B/C", "base-deleted", NO_COPY_FROM}, + {1, "A/D", "base-deleted", NO_COPY_FROM}, + {1, "A/D/E", "base-deleted", NO_COPY_FROM}, + /* X is expanded on update. The not-present nodes are now here */ + {1, "X", "normal", 4, "A", MOVED_HERE}, + {1, "X/B", "normal", 4, "A/B", MOVED_HERE}, + {1, "X/B/C", "normal", 4, "A/B/C", MOVED_HERE}, + {1, "X/D", "normal", 4, "A/D", MOVED_HERE}, + {1, "X/D/E", "normal", 4, "A/D/E", MOVED_HERE}, + {2, "X/D", "normal", 2, "A/D"}, + {2, "X/D/E", "not-present", 3, "A/D/E"}, + {2, "X/Y", "normal", 2, "A/B"}, + {2, "X/Y/C", "not-present", NO_COPY_FROM}, + {3, "X/D/E", "normal", 3, "A/D/E"}, + {3, "X/Y/C", "normal", 3, "A/B/C"}, + + {0, "Z", "normal", 4, "Z"}, + {0} + }; + + SVN_ERR(check_db_rows(&b, "", nodes)); + } + + SVN_ERR(svn_wc__db_follow_moved_to(&moved_tos, b.wc_ctx->db, + sbox_wc_path(&b, "A/B/C"), pool, pool)); + SVN_ERR(check_moved_to(moved_tos, 0, 1, "X/B/C")); + SVN_TEST_ASSERT(moved_tos->nelts == 1); + + SVN_ERR(svn_wc__db_follow_moved_to(&moved_tos, b.wc_ctx->db, + sbox_wc_path(&b, "A/B"), pool, pool)); + SVN_ERR(check_moved_to(moved_tos, 0, 1, "X/B")); + SVN_TEST_ASSERT(moved_tos->nelts == 1); SVN_ERR(svn_wc__db_follow_moved_to(&moved_tos, b.wc_ctx->db, sbox_wc_path(&b, "A"), pool, pool)); SVN_ERR(check_moved_to(moved_tos, 0, 1, "X")); SVN_TEST_ASSERT(moved_tos->nelts == 1); + { + conflict_info_t conflicts[] = { + { "X/D", FALSE, FALSE, {0 /* ### Needs fixing */} }, + {0} + }; + + SVN_ERR(check_db_conflicts(&b, "", conflicts)); + } + return SVN_NO_ERROR; } @@ -5140,8 +5466,8 @@ update_prop_mod_into_moved(const svn_test_opts_t *opts, apr_pool_t *pool) /* r1: Create files 'f', 'h' */ SVN_ERR(sbox_wc_mkdir(&b, "A")); SVN_ERR(sbox_wc_mkdir(&b, "A/B")); - sbox_file_write(&b, "A/B/f", "r1 content\n"); - sbox_file_write(&b, "A/B/h", "r1 content\n"); + SVN_ERR(sbox_file_write(&b, "A/B/f", "r1 content\n")); + SVN_ERR(sbox_file_write(&b, "A/B/h", "r1 content\n")); SVN_ERR(sbox_wc_add(&b, "A/B/f")); SVN_ERR(sbox_wc_add(&b, "A/B/h")); SVN_ERR(sbox_wc_propset(&b, "pd", "f1", "A/B/f")); @@ -5152,14 +5478,14 @@ update_prop_mod_into_moved(const svn_test_opts_t *opts, apr_pool_t *pool) /* r2: Modify 'f'. Delete prop 'pd', modify prop 'pm', add prop 'pa', * leave prop 'pn' unchanged. */ - sbox_file_write(&b, "A/B/f", "r1 content\nr2 content\n"); + SVN_ERR(sbox_file_write(&b, "A/B/f", "r1 content\nr2 content\n")); SVN_ERR(sbox_wc_propset(&b, "pd", NULL, "A/B/f")); SVN_ERR(sbox_wc_propset(&b, "pm", "f2", "A/B/f")); SVN_ERR(sbox_wc_propset(&b, "pa", "f2", "A/B/f")); SVN_ERR(sbox_wc_commit(&b, "")); /* r3: Delete 'h', add 'g' */ - sbox_file_write(&b, "A/B/g", "r3 content\n"); + SVN_ERR(sbox_file_write(&b, "A/B/g", "r3 content\n")); SVN_ERR(sbox_wc_add(&b, "A/B/g")); SVN_ERR(sbox_wc_propset(&b, "p", "g3", "A/B/g")); SVN_ERR(sbox_wc_delete(&b, "A/B/h")); @@ -5222,9 +5548,22 @@ update_prop_mod_into_moved(const svn_test_opts_t *opts, apr_pool_t *pool) SVN_ERR(check_db_rows(&b, "", nodes)); } + { + conflict_info_t conflicts[] = { + { "A", FALSE, FALSE, {svn_wc_conflict_action_edit, + svn_wc_conflict_reason_moved_away, "A"}}, + {0} + }; + + SVN_ERR(check_db_conflicts(&b, "", conflicts)); + } + /* Resolve should update the move. */ SVN_ERR(sbox_wc_resolve(&b, "A", svn_depth_empty, svn_wc_conflict_choose_mine_conflict)); + + SVN_ERR(check_db_conflicts(&b, "", NULL)); + { nodes_row_t nodes[] = { {0, "", "normal", 2, ""}, @@ -5259,12 +5598,12 @@ nested_move_update(const svn_test_opts_t *opts, apr_pool_t *pool) SVN_ERR(sbox_wc_mkdir(&b, "A")); SVN_ERR(sbox_wc_mkdir(&b, "A/B")); SVN_ERR(sbox_wc_mkdir(&b, "A/B/C")); - sbox_file_write(&b, "A/B/C/f", "r1 content\n"); + SVN_ERR(sbox_file_write(&b, "A/B/C/f", "r1 content\n")); SVN_ERR(sbox_wc_add(&b, "A/B/C/f")); SVN_ERR(sbox_wc_commit(&b, "")); /* r2: Modify 'f' */ - sbox_file_write(&b, "A/B/C/f", "r1 content\nr2 content\n"); + SVN_ERR(sbox_file_write(&b, "A/B/C/f", "r1 content\nr2 content\n")); SVN_ERR(sbox_wc_commit(&b, "")); /* r3: Create 'X' */ @@ -5301,12 +5640,37 @@ nested_move_update(const svn_test_opts_t *opts, apr_pool_t *pool) SVN_ERR(sbox_wc_update(&b, "", 2)); + { + nodes_row_t nodes[] = { + {0, "", "normal", 2, ""}, + {0, "A", "normal", 2, "A"}, + {0, "A/B", "normal", 2, "A/B"}, + {0, "A/B/C", "normal", 2, "A/B/C"}, + {0, "A/B/C/f", "normal", 2, "A/B/C/f"}, + {1, "A", "base-deleted", NO_COPY_FROM, "A2"}, + {1, "A/B", "base-deleted", NO_COPY_FROM}, + {1, "A/B/C", "base-deleted", NO_COPY_FROM}, + {1, "A/B/C/f", "base-deleted", NO_COPY_FROM}, + {1, "A2", "normal", 1, "A", MOVED_HERE}, + {1, "A2/B", "normal", 1, "A/B", MOVED_HERE}, + {1, "A2/B/C", "normal", 1, "A/B/C", MOVED_HERE}, + {1, "A2/B/C/f", "normal", 1, "A/B/C/f", MOVED_HERE}, + {3, "A2/B/C", "base-deleted", NO_COPY_FROM, "A2/B/C2"}, + {3, "A2/B/C/f", "base-deleted", NO_COPY_FROM}, + {3, "A2/B/C2", "normal", 1, "A/B/C", MOVED_HERE}, + {3, "A2/B/C2/f", "normal", 1, "A/B/C/f", MOVED_HERE}, + {0} + }; + SVN_ERR(check_db_rows(&b, "", nodes)); + } + /* Following the A->A2 move should raise a tree-conflict on A2/B/C, resolving that may require an explicit resolve. */ SVN_ERR(sbox_wc_resolve(&b, "A", svn_depth_empty, svn_wc_conflict_choose_mine_conflict)); SVN_ERR(sbox_wc_resolve(&b, "A2/B/C", svn_depth_empty, svn_wc_conflict_choose_mine_conflict)); + SVN_ERR(check_db_conflicts(&b, "", NULL /* no conflicts */)); { nodes_row_t nodes[] = { {0, "", "normal", 2, ""}, @@ -5333,6 +5697,8 @@ nested_move_update(const svn_test_opts_t *opts, apr_pool_t *pool) /* Update A to r3 brings no changes but updates the revisions. */ SVN_ERR(sbox_wc_update(&b, "A", 3)); + SVN_ERR(check_db_conflicts(&b, "", NULL /* no conflicts */)); + { nodes_row_t nodes[] = { {0, "", "normal", 2, ""}, @@ -5371,7 +5737,7 @@ nested_move_commit(const svn_test_opts_t *opts, apr_pool_t *pool) SVN_ERR(sbox_wc_mkdir(&b, "A")); SVN_ERR(sbox_wc_mkdir(&b, "A/B")); SVN_ERR(sbox_wc_mkdir(&b, "A/B/C")); - sbox_file_write(&b, "A/B/C/f", "r1 content\n"); + SVN_ERR(sbox_file_write(&b, "A/B/C/f", "r1 content\n")); SVN_ERR(sbox_wc_add(&b, "A/B/C/f")); SVN_ERR(sbox_wc_commit(&b, "")); SVN_ERR(sbox_wc_update(&b, "", 1)); @@ -5615,10 +5981,12 @@ check_tree_conflict_repos_path(svn_test__sandbox_t *b, const apr_array_header_t *locations; svn_boolean_t text_conflicted, prop_conflicted, tree_conflicted; - SVN_ERR(svn_wc__db_read_conflict(&conflict, b->wc_ctx->db, - sbox_wc_path(b, wc_path), + SVN_ERR(svn_wc__db_read_conflict(&conflict, NULL, NULL, + b->wc_ctx->db, sbox_wc_path(b, wc_path), b->pool, b->pool)); + SVN_TEST_ASSERT(conflict != NULL); + SVN_ERR(svn_wc__conflict_read_info(&operation, &locations, &text_conflicted, &prop_conflicted, &tree_conflicted, @@ -5632,7 +6000,9 @@ check_tree_conflict_repos_path(svn_test__sandbox_t *b, svn_wc_conflict_version_t *version = APR_ARRAY_IDX(locations, 0, svn_wc_conflict_version_t *); - SVN_ERR_ASSERT(!strcmp(version->path_in_repos, repos_path1)); + SVN_TEST_ASSERT(version != NULL); + + SVN_TEST_STRING_ASSERT(version->path_in_repos, repos_path1); } if (repos_path2) @@ -5640,7 +6010,9 @@ check_tree_conflict_repos_path(svn_test__sandbox_t *b, svn_wc_conflict_version_t *version = APR_ARRAY_IDX(locations, 1, svn_wc_conflict_version_t *); - SVN_ERR_ASSERT(!strcmp(version->path_in_repos, repos_path2)); + SVN_TEST_ASSERT(version != NULL); + + SVN_TEST_STRING_ASSERT(version->path_in_repos, repos_path2); } return SVN_NO_ERROR; @@ -5666,7 +6038,7 @@ move_update_conflicts(const svn_test_opts_t *opts, apr_pool_t *pool) SVN_ERR(sbox_wc_update(&b, "", 1)); SVN_ERR(sbox_wc_move(&b, "A", "A2")); SVN_ERR(sbox_wc_move(&b, "A2/B/C", "A2/B/C2")); - sbox_file_write(&b, "A2/B/F", "obstruction\n"); + SVN_ERR(sbox_file_write(&b, "A2/B/F", "obstruction\n")); { nodes_row_t nodes[] = { @@ -5744,7 +6116,7 @@ move_update_delete_mods(const svn_test_opts_t *opts, apr_pool_t *pool) SVN_ERR(sbox_wc_mkdir(&b, "A/B")); SVN_ERR(sbox_wc_mkdir(&b, "A/B/C")); SVN_ERR(sbox_wc_mkdir(&b, "A/B/D")); - sbox_file_write(&b, "A/B/C/f", "r1 content\n"); + SVN_ERR(sbox_file_write(&b, "A/B/C/f", "r1 content\n")); SVN_ERR(sbox_wc_add(&b, "A/B/C/f")); SVN_ERR(sbox_wc_commit(&b, "")); SVN_ERR(sbox_wc_delete(&b, "A/B/C")); @@ -5753,7 +6125,7 @@ move_update_delete_mods(const svn_test_opts_t *opts, apr_pool_t *pool) SVN_ERR(sbox_wc_update(&b, "", 1)); SVN_ERR(sbox_wc_move(&b, "A/B", "B2")); - sbox_file_write(&b, "B2/C/f", "modified content\n"); + SVN_ERR(sbox_file_write(&b, "B2/C/f", "modified content\n")); SVN_ERR(sbox_wc_delete(&b, "B2/D")); { nodes_row_t nodes[] = { @@ -5791,7 +6163,16 @@ move_update_delete_mods(const svn_test_opts_t *opts, apr_pool_t *pool) {2, "B2/C/f", "normal", 1, "A/B/C/f"}, {0} }; + conflict_info_t conflicts[] = { + {"B2/C", FALSE, FALSE, {svn_wc_conflict_action_delete, + svn_wc_conflict_reason_edited}}, + {"B2/D", FALSE, FALSE, {svn_wc_conflict_action_delete, + svn_wc_conflict_reason_deleted}}, + { 0 } + }; + SVN_ERR(check_db_rows(&b, "", nodes)); + SVN_ERR(check_db_conflicts(&b, "", conflicts)); } SVN_ERR(check_tree_conflict_repos_path(&b, "B2/C", "A/B/C", "A/B/C")); @@ -5949,6 +6330,34 @@ move_in_delete(const svn_test_opts_t *opts, apr_pool_t *pool) } SVN_ERR(sbox_wc_update(&b, "", 3)); + { + nodes_row_t nodes[] = { + {0, "", "normal", 3, ""}, + {0, "A", "normal", 3, "A"}, + {0, "A/B", "normal", 3, "A/B"}, + {0, "A/B/C", "normal", 3, "A/B/C"}, + {0, "A/B/C/D", "normal", 3, "A/B/C/D"}, + {0, "A/B/C/D/E", "normal", 3, "A/B/C/D/E"}, + + {1, "C2", "normal", 2, "A/B/C", MOVED_HERE}, + {1, "C2/D", "normal", 2, "A/B/C/D", MOVED_HERE}, + + {2, "A/B", "base-deleted", NO_COPY_FROM}, + {2, "A/B/C", "base-deleted", NO_COPY_FROM, "C2"}, + {2, "A/B/C/D", "base-deleted", NO_COPY_FROM}, + {2, "A/B/C/D/E", "base-deleted", NO_COPY_FROM}, + + {0} + }; + conflict_info_t conflicts[] = { + {"A/B", FALSE, FALSE, {svn_wc_conflict_action_edit, + svn_wc_conflict_reason_deleted}}, + {0} + }; + + SVN_ERR(check_db_rows(&b, "", nodes)); + SVN_ERR(check_db_conflicts(&b, "", conflicts)); + } SVN_ERR(sbox_wc_revert(&b, "A/B", svn_depth_empty)); { nodes_row_t nodes[] = { @@ -5965,7 +6374,14 @@ move_in_delete(const svn_test_opts_t *opts, apr_pool_t *pool) {1, "C2/D", "normal", 2, "A/B/C/D", MOVED_HERE}, {0} }; + conflict_info_t conflicts[] = { + {"A/B/C", FALSE, FALSE, {svn_wc_conflict_action_edit, + svn_wc_conflict_reason_moved_away, "A/B/C"}}, + {0} + }; SVN_ERR(check_db_rows(&b, "", nodes)); + /* Where did this conflict come from? */ + SVN_ERR(check_db_conflicts(&b, "", conflicts)); } /* Revert should have left a tree-conflict (or broken the move). */ @@ -5988,6 +6404,7 @@ move_in_delete(const svn_test_opts_t *opts, apr_pool_t *pool) {0} }; SVN_ERR(check_db_rows(&b, "", nodes)); + SVN_ERR(check_db_conflicts(&b, "", NULL)); } return SVN_NO_ERROR; @@ -6563,6 +6980,31 @@ commit_moved_descendant(const svn_test_opts_t *opts, apr_pool_t *pool) shadowed, like in this case. The commit processing doesn't support this yet though*/ + { + nodes_row_t nodes[] = { + {0, "", "normal", 0, ""}, + {0, "A", "normal", 1, "A"}, + {0, "A/A", "normal", 2, "A/A"}, + {0, "A/A/A", "normal", 2, "A/A/A"}, + {0, "A/A/A/A", "normal", 2, "A/A/A/A"}, + {0, "A/A/A/A/A", "normal", 2, "A/A/A/A/A"}, + {0, "A/A/A/A/A/A", "normal", 2, "A/A/A/A/A/A"}, + {0, "A_copied", "normal", 2, "A_copied"}, + {0, "A_copied/A", "normal", 2, "A_copied/A"}, + {0, "A_copied/A/A", "normal", 2, "A_copied/A/A"}, + {0, "A_copied/A/A/A", "normal", 2, "A_copied/A/A/A"}, + {0, "A_copied/A/A/A/A", "normal", 2, "A_copied/A/A/A/A"}, + {0, "A_copied/A/A/A/A/A","normal", 2, "A_copied/A/A/A/A/A"}, + {0, "AAA_moved", "normal", 2, "AAA_moved"}, + {0, "AAA_moved/A", "normal", 2, "AAA_moved/A"}, + {0, "AAA_moved/A/A", "normal", 2, "AAA_moved/A/A"}, + {0, "AAA_moved/A/A/A", "normal", 2, "AAA_moved/A/A/A"}, + + {0} + }; + SVN_ERR(check_db_rows(&b, "", nodes)); + } + return SVN_NO_ERROR; } @@ -6585,10 +7027,63 @@ commit_moved_away_descendant(const svn_test_opts_t *opts, apr_pool_t *pool) SVN_ERR(sbox_wc_delete(&b, "A/A")); SVN_ERR(sbox_wc_copy(&b, "A_copied/A", "A/A")); + { + nodes_row_t nodes[] = { + {0, "", "normal", 0, ""}, + {0, "A", "normal", 1, "A"}, + {0, "A/A", "normal", 1, "A/A"}, + {0, "A/A/A", "normal", 1, "A/A/A"}, + {0, "A/A/A/A", "normal", 1, "A/A/A/A"}, + {0, "A/A/A/A/A", "normal", 1, "A/A/A/A/A"}, + {0, "A/A/A/A/A/A", "normal", 1, "A/A/A/A/A/A"}, + {1, "A_copied", "normal", 1, "A"}, + {1, "A_copied/A", "normal", 1, "A/A"}, + {1, "A_copied/A/A", "normal", 1, "A/A/A"}, + {1, "A_copied/A/A/A", "normal", 1, "A/A/A/A"}, + {1, "A_copied/A/A/A/A", "normal", 1, "A/A/A/A/A"}, + {1, "A_copied/A/A/A/A/A", "normal", 1, "A/A/A/A/A/A"}, + {1, "AAA_moved", "normal", 1, "A/A/A", MOVED_HERE}, + {1, "AAA_moved/A", "normal", 1, "A/A/A/A", MOVED_HERE}, + {1, "AAA_moved/A/A", "normal", 1, "A/A/A/A/A", MOVED_HERE}, + {1, "AAA_moved/A/A/A", "normal", 1, "A/A/A/A/A/A", MOVED_HERE}, + {2, "A/A", "normal", 1, "A/A"}, + {2, "A/A/A", "normal", 1, "A/A/A", FALSE, "AAA_moved"}, + {2, "A/A/A/A", "normal", 1, "A/A/A/A"}, + {2, "A/A/A/A/A", "normal", 1, "A/A/A/A/A"}, + {2, "A/A/A/A/A/A", "normal", 1, "A/A/A/A/A/A"}, + {0} + }; + SVN_ERR(check_db_rows(&b, "", nodes)); + } + /* And now I want to make sure that I can't commit A, without also committing AAA_moved, as that would break the move*/ SVN_ERR(sbox_wc_commit(&b, "A")); + { + nodes_row_t nodes[] = { + {0, "", "normal", 0, ""}, + {0, "A", "normal", 1, "A"}, + {0, "A/A", "normal", 2, "A/A"}, + {0, "A/A/A", "normal", 2, "A/A/A"}, + {0, "A/A/A/A", "normal", 2, "A/A/A/A"}, + {0, "A/A/A/A/A", "normal", 2, "A/A/A/A/A"}, + {0, "A/A/A/A/A/A", "normal", 2, "A/A/A/A/A/A"}, + {1, "A_copied", "normal", 1, "A"}, + {1, "A_copied/A", "normal", 1, "A/A"}, + {1, "A_copied/A/A", "normal", 1, "A/A/A"}, + {1, "A_copied/A/A/A", "normal", 1, "A/A/A/A"}, + {1, "A_copied/A/A/A/A", "normal", 1, "A/A/A/A/A"}, + {1, "A_copied/A/A/A/A/A", "normal", 1, "A/A/A/A/A/A"}, + {1, "AAA_moved", "normal", 1, "A/A/A"}, + {1, "AAA_moved/A", "normal", 1, "A/A/A/A"}, + {1, "AAA_moved/A/A", "normal", 1, "A/A/A/A/A"}, + {1, "AAA_moved/A/A/A", "normal", 1, "A/A/A/A/A/A"}, + {0} + }; + SVN_ERR(check_db_rows(&b, "", nodes)); + } + return svn_error_create(SVN_ERR_TEST_FAILED, NULL, "The commit should have failed"); @@ -6608,7 +7103,7 @@ finite_move_update_bump(const svn_test_opts_t *opts, apr_pool_t *pool) SVN_ERR(sbox_wc_mkdir(&b, "A/B/C")); SVN_ERR(sbox_wc_mkdir(&b, "P")); SVN_ERR(sbox_wc_mkdir(&b, "P/Q")); - sbox_file_write(&b, "P/Q/f", "r1 content\n"); + SVN_ERR(sbox_file_write(&b, "P/Q/f", "r1 content\n")); SVN_ERR(sbox_wc_add(&b, "P/Q/f")); SVN_ERR(sbox_wc_commit(&b, "")); SVN_ERR(sbox_wc_mkdir(&b, "X")); @@ -6645,10 +7140,27 @@ finite_move_update_bump(const svn_test_opts_t *opts, apr_pool_t *pool) SVN_ERR(sbox_wc_move(&b, "P/Q", "Q2")); SVN_ERR(sbox_wc_update_depth(&b, "A/B", 2, svn_depth_files, FALSE)); SVN_ERR(sbox_wc_update_depth(&b, "P/Q", 2, svn_depth_files, FALSE)); - SVN_ERR(check_tree_conflict_repos_path(&b, "A/B", NULL, NULL)); + { + conflict_info_t conflicts[] = { + {"A/B", FALSE, FALSE, {svn_wc_conflict_action_edit, + svn_wc_conflict_reason_moved_away, "A/B"}}, + {"P/Q", FALSE, FALSE, {svn_wc_conflict_action_edit, + svn_wc_conflict_reason_moved_away, "P/Q"}}, + {0} + }; + SVN_ERR(check_db_conflicts(&b, "", conflicts)); + } + + SVN_ERR(check_tree_conflict_repos_path(&b, "A/B", "A/B", "A/B")); + SVN_ERR(check_tree_conflict_repos_path(&b, "P/Q", "P/Q", "P/Q")); err = sbox_wc_resolve(&b, "A/B", svn_depth_empty, svn_wc_conflict_choose_mine_conflict); SVN_TEST_ASSERT_ERROR(err, SVN_ERR_WC_CONFLICT_RESOLVER_FAILURE); + + /* sbox_wc_resolve() obtains a lock on the target path, so now it + will apply the change on the target */ + SVN_ERR(sbox_wc_resolve(&b, "P/Q", svn_depth_empty, + svn_wc_conflict_choose_mine_conflict)); { nodes_row_t nodes[] = { {0, "", "normal", 1, ""}, @@ -6677,10 +7189,23 @@ finite_move_update_bump(const svn_test_opts_t *opts, apr_pool_t *pool) SVN_ERR(sbox_wc_move(&b, "P", "P2")); SVN_ERR(sbox_wc_update_depth(&b, "A/B", 2, svn_depth_immediates, FALSE)); SVN_ERR(sbox_wc_update_depth(&b, "P", 2, svn_depth_immediates, FALSE)); - SVN_ERR(check_tree_conflict_repos_path(&b, "P", NULL, NULL)); + { + conflict_info_t conflicts[] = { + {"A/B", FALSE, FALSE, {svn_wc_conflict_action_edit, + svn_wc_conflict_reason_moved_away, "A/B"}}, + {"P", FALSE, FALSE, {svn_wc_conflict_action_edit, + svn_wc_conflict_reason_moved_away, "P"}}, + {0} + }; + SVN_ERR(check_db_conflicts(&b, "", conflicts)); + } + SVN_ERR(check_tree_conflict_repos_path(&b, "P", "P", "P")); + SVN_ERR(check_tree_conflict_repos_path(&b, "A/B", "A/B", "A/B")); err = sbox_wc_resolve(&b, "P", svn_depth_empty, svn_wc_conflict_choose_mine_conflict); SVN_TEST_ASSERT_ERROR(err, SVN_ERR_WC_CONFLICT_RESOLVER_FAILURE); + SVN_ERR(sbox_wc_resolve(&b, "A/B", svn_depth_empty, + svn_wc_conflict_choose_mine_conflict)); { nodes_row_t nodes[] = { {0, "", "normal", 1, ""}, @@ -6711,10 +7236,24 @@ finite_move_update_bump(const svn_test_opts_t *opts, apr_pool_t *pool) SVN_ERR(sbox_wc_move(&b, "P/Q", "Q2")); SVN_ERR(sbox_wc_update_depth(&b, "A/B/C", 2, svn_depth_empty, FALSE)); SVN_ERR(sbox_wc_update_depth(&b, "P/Q", 2, svn_depth_empty, FALSE)); - SVN_ERR(check_tree_conflict_repos_path(&b, "P/Q", NULL, NULL)); + { + conflict_info_t conflicts[] = { + {"A/B/C", FALSE, FALSE, {svn_wc_conflict_action_edit, + svn_wc_conflict_reason_moved_away, "A/B/C"}}, + {"P/Q", FALSE, FALSE, {svn_wc_conflict_action_edit, + svn_wc_conflict_reason_moved_away, "P/Q"}}, + + {0} + }; + SVN_ERR(check_db_conflicts(&b, "", conflicts)); + } + SVN_ERR(check_tree_conflict_repos_path(&b, "A/B/C", "A/B/C", "A/B/C")); + SVN_ERR(check_tree_conflict_repos_path(&b, "P/Q", "P/Q", "P/Q")); err = sbox_wc_resolve(&b, "P/Q", svn_depth_empty, svn_wc_conflict_choose_mine_conflict); SVN_TEST_ASSERT_ERROR(err, SVN_ERR_WC_CONFLICT_RESOLVER_FAILURE); + SVN_ERR(sbox_wc_resolve(&b, "A/B/C", svn_depth_empty, + svn_wc_conflict_choose_mine_conflict)); { nodes_row_t nodes[] = { {0, "", "normal", 1, ""}, @@ -6767,11 +7306,23 @@ move_away_delete_update(const svn_test_opts_t *opts, apr_pool_t *pool) {0, "", "normal", 2, ""}, {0, "A", "normal", 2, "A"}, {0, "P", "normal", 2, "P"}, - {1, "C2", "normal", 1, "A/B/C"}, + {1, "C2", "normal", 1, "A/B/C", MOVED_HERE}, {1, "Q2", "normal", 1, "P/Q"}, + + {2, "A/B", "normal", 1, "A/B"}, + {2, "A/B/C", "normal", 1, "A/B/C"}, + {3, "A/B/C", "base-deleted", NO_COPY_FROM, "C2"}, + {0} + }; + conflict_info_t conflicts[] = { + {"A/B", FALSE, FALSE, {svn_wc_conflict_action_delete, + svn_wc_conflict_reason_edited}}, + {"P/Q", FALSE, FALSE, {svn_wc_conflict_action_delete, + svn_wc_conflict_reason_moved_away, "P/Q"}}, {0} }; SVN_ERR(check_db_rows(&b, "", nodes)); + SVN_ERR(check_db_conflicts(&b, "", conflicts)); } return SVN_NO_ERROR; @@ -7092,7 +7643,7 @@ movedto_opdepth(const svn_test_opts_t *opts, apr_pool_t *pool) { svn_test__sandbox_t b; - SVN_ERR(svn_test__sandbox_create(&b, "moved_to_op_depth", + SVN_ERR(svn_test__sandbox_create(&b, "movedto_opdepth", opts, pool)); SVN_ERR(sbox_wc_mkdir(&b, "A")); @@ -7731,6 +8282,42 @@ move_depth_expand(const svn_test_opts_t *opts, apr_pool_t *pool) SVN_ERR(sbox_wc_update_depth(&b, "", 1, svn_depth_infinity, TRUE)); + /* And now verify that there are no not-present nodes left and a + consistent working copy */ + { + nodes_row_t nodes[] = { + {0, "", "normal", 1, "" }, + + {0, "A", "normal", 1, "A" }, + {0, "A/A", "normal", 1, "A/A" }, + {0, "A/A/A", "normal", 1, "A/A/A" }, + {0, "A/A/A/A", "normal", 1, "A/A/A/A" }, + {0, "A/B", "normal", 1, "A/B" }, + {0, "A/B/A", "normal", 1, "A/B/A" }, + {0, "A/B/A/A", "normal", 1, "A/B/A/A" }, + + {1, "A", "base-deleted", NO_COPY_FROM, "C" }, + {1, "A/A", "base-deleted", NO_COPY_FROM }, + {1, "A/A/A", "base-deleted", NO_COPY_FROM }, + {1, "A/B", "base-deleted", NO_COPY_FROM }, + {1, "A/B/A", "base-deleted", NO_COPY_FROM }, + {1, "A/B/A/A", "base-deleted", NO_COPY_FROM }, + {1, "A/A/A/A", "base-deleted", NO_COPY_FROM }, + + {1, "C", "normal", 1, "A", MOVED_HERE }, + {1, "C/A", "normal", 1, "A/A", MOVED_HERE }, + {1, "C/B", "not-present", 0, "A/B", MOVED_HERE}, + + {2, "C/B", "normal", 1, "A/A" }, + + {3, "C/A/A", "normal", NO_COPY_FROM }, + {3, "C/B/A", "normal", NO_COPY_FROM }, + + {0} + }; + SVN_ERR(check_db_rows(&b, "", nodes)); + } + /* This used to cause a segfault. Then it asserted in a different place */ SVN_ERR(sbox_wc_resolve(&b, "A", svn_depth_empty, svn_wc_conflict_choose_mine_conflict)); @@ -7762,10 +8349,6 @@ move_depth_expand(const svn_test_opts_t *opts, apr_pool_t *pool) {1, "C/A", "normal", 1, "A/A", MOVED_HERE }, {1, "C/A/A", "normal", 1, "A/A/A", MOVED_HERE }, {1, "C/A/A/A", "normal", 1, "A/A/A/A", MOVED_HERE }, - - {3, "C/A/A", "normal", NO_COPY_FROM }, - {3, "C/A/A/A", "base-deleted", NO_COPY_FROM }, - {1, "C/B", "normal", 1, "A/B", MOVED_HERE }, {1, "C/B/A", "normal", 1, "A/B/A", MOVED_HERE }, {1, "C/B/A/A", "normal", 1, "A/B/A/A", MOVED_HERE }, @@ -7774,6 +8357,8 @@ move_depth_expand(const svn_test_opts_t *opts, apr_pool_t *pool) {2, "C/B/A", "base-deleted", NO_COPY_FROM }, {2, "C/B/A/A", "base-deleted", NO_COPY_FROM }, + {3, "C/A/A", "normal", NO_COPY_FROM }, + {3, "C/A/A/A", "base-deleted", NO_COPY_FROM }, {3, "C/B/A", "normal", NO_COPY_FROM }, {0} @@ -7878,7 +8463,15 @@ move_retract(const svn_test_opts_t *opts, apr_pool_t *pool) {0} }; + conflict_info_t conflicts[] = { + {"A/A", FALSE, FALSE, {svn_wc_conflict_action_edit, + svn_wc_conflict_reason_moved_away, "A/A"}}, + {"A/B", FALSE, FALSE, {svn_wc_conflict_action_edit, + svn_wc_conflict_reason_replaced}}, + { 0 }, + }; SVN_ERR(check_db_rows(&b, "", nodes)); + SVN_ERR(check_db_conflicts(&b, "", conflicts)); } @@ -7909,9 +8502,21 @@ move_retract(const svn_test_opts_t *opts, apr_pool_t *pool) /* Still conflicted */ {1, "D", "normal", 1, "A/B/A/D", MOVED_HERE }, + {4, "A/B/A/C", "normal", 1, "A/A/A/C"}, + + + {0} + }; + conflict_info_t conflicts[] = { + {"A/B", FALSE, FALSE, {svn_wc_conflict_action_edit, + svn_wc_conflict_reason_replaced}}, + {"A/B/A/C", FALSE, FALSE, {svn_wc_conflict_action_delete, + svn_wc_conflict_reason_edited}}, {0} }; + SVN_ERR(check_db_rows(&b, "", nodes)); + SVN_ERR(check_db_conflicts(&b, "", conflicts)); } /* ### TODO: Resolve via which specific target? */ @@ -7921,7 +8526,7 @@ move_retract(const svn_test_opts_t *opts, apr_pool_t *pool) { nodes_row_t nodes[] = { - {1, "D", "normal", 2, "A/B/A/D", MOVED_HERE }, + {1, "D", "normal", 1, "A/B/A/D", MOVED_HERE }, {0} }; @@ -7941,7 +8546,7 @@ move_delete_file_externals(const svn_test_opts_t *opts, apr_pool_t *pool) SVN_ERR(sbox_wc_mkdir(&b, "A")); SVN_ERR(sbox_wc_mkdir(&b, "A/B")); - sbox_file_write(&b, "f", "New file"); + SVN_ERR(sbox_file_write(&b, "f", "New file")); SVN_ERR(sbox_wc_add(&b, "f")); SVN_ERR(sbox_wc_propset(&b, "svn:externals", "^/f B/P/g", "A")); SVN_ERR(sbox_wc_propset(&b, "svn:externals", "^/f Q/g\n^/f g", "A/B")); @@ -8158,6 +8763,128 @@ update_with_tree_conflict(const svn_test_opts_t *opts, apr_pool_t *pool) } static svn_error_t * +move_update_parent_replace(const svn_test_opts_t *opts, apr_pool_t *pool) +{ + svn_test__sandbox_t b; + + SVN_ERR(svn_test__sandbox_create(&b, "move_update_parent_replace", opts, + pool)); + + SVN_ERR(sbox_wc_mkdir(&b, "A")); + SVN_ERR(sbox_wc_mkdir(&b, "A/B")); + SVN_ERR(sbox_wc_mkdir(&b, "A/B/C")); + SVN_ERR(sbox_wc_commit(&b, "")); + SVN_ERR(sbox_wc_delete(&b, "A/B")); + SVN_ERR(sbox_wc_mkdir(&b, "A/B")); + SVN_ERR(sbox_wc_commit(&b, "")); + SVN_ERR(sbox_wc_update(&b, "", 1)); + SVN_ERR(sbox_wc_move(&b, "A/B/C", "A/C")); + + /* Update breaks the move and leaves a conflict. */ + SVN_ERR(sbox_wc_update(&b, "", 2)); + { + nodes_row_t nodes[] = { + {0, "", "normal", 2, ""}, + {0, "A", "normal", 2, "A"}, + {0, "A/B", "normal", 2, "A/B"}, + + {2, "A/C", "normal", 1, "A/B/C", MOVED_HERE}, + + {2, "A/B", "normal", 1, "A/B"}, + {2, "A/B/C", "normal", 1, "A/B/C", FALSE}, + + {3, "A/B/C", "base-deleted", NO_COPY_FROM, "A/C"}, + + {0} + }; + conflict_info_t conflicts[] = { + {"A/B", FALSE, FALSE, {svn_wc_conflict_action_replace, + svn_wc_conflict_reason_edited}}, + {0} + }; + SVN_ERR(check_db_rows(&b, "", nodes)); + SVN_ERR(check_db_conflicts(&b, "", conflicts)); + } + + SVN_ERR(sbox_wc_resolve(&b, "A/B", svn_depth_infinity, + svn_wc_conflict_choose_merged)); + + { + nodes_row_t nodes[] = { + {0, "", "normal", 2, ""}, + {0, "A", "normal", 2, "A"}, + {0, "A/B", "normal", 2, "A/B"}, + {2, "A/C", "normal", 1, "A/B/C", MOVED_HERE}, + {2, "A/B", "normal", 1, "A/B"}, + {2, "A/B/C", "normal", 1, "A/B/C", FALSE}, + {3, "A/B/C", "base-deleted", NO_COPY_FROM, "A/C"}, + + {0} + }; + + SVN_ERR(check_db_rows(&b, "", nodes)); + SVN_ERR(check_db_conflicts(&b, "", NULL)); + } + + return SVN_NO_ERROR; +} + +static svn_error_t * +copy_mixed_rev_mods(const svn_test_opts_t *opts, apr_pool_t *pool) +{ + svn_test__sandbox_t b; + + SVN_ERR(svn_test__sandbox_create(&b, "copy_mixed_rev_mods", opts, + pool)); + + SVN_ERR(sbox_wc_mkdir(&b, "A")); + SVN_ERR(sbox_wc_mkdir(&b, "A/B")); + SVN_ERR(sbox_wc_commit(&b, "")); + SVN_ERR(sbox_wc_mkdir(&b, "A/B/C")); + SVN_ERR(sbox_wc_commit(&b, "")); + SVN_ERR(sbox_wc_update(&b, "", 1)); + SVN_ERR(sbox_wc_update(&b, "A/B", 2)); + SVN_ERR(sbox_wc_delete(&b, "A/B")); + SVN_ERR(sbox_wc_mkdir(&b, "A/B")); + + { + nodes_row_t nodes[] = { + {0, "", "normal", 1, ""}, + {0, "A", "normal", 1, "A"}, + {0, "A/B", "normal", 2, "A/B"}, + {0, "A/B/C", "normal", 2, "A/B/C"}, + {2, "A/B", "normal", NO_COPY_FROM}, + {2, "A/B/C", "base-deleted", NO_COPY_FROM}, + {0} + }; + SVN_ERR(check_db_rows(&b, "", nodes)); + } + + SVN_ERR(sbox_wc_copy(&b, "A", "X")); + { + nodes_row_t nodes[] = { + {1, "X", "normal", 1, "A"}, + {1, "X/B", "not-present", 2, "A/B"}, + {2, "X/B", "normal", NO_COPY_FROM}, + {0} + }; + SVN_ERR(check_db_rows(&b, "X", nodes)); + } + + SVN_ERR(sbox_wc_commit(&b, "X")); + { + nodes_row_t nodes[] = { + {0, "X", "normal", 3, "X"}, + {0, "X/B", "normal", 3, "X/B"}, + {0} + }; + SVN_ERR(check_db_rows(&b, "X", nodes)); + } + + return SVN_NO_ERROR; +} + +static svn_error_t * move_child_to_parent_revert(const svn_test_opts_t *opts, apr_pool_t *pool) { svn_test__sandbox_t b; @@ -8493,61 +9220,6 @@ move_revert_intermediate(const svn_test_opts_t *opts, apr_pool_t *pool) } static svn_error_t * -copy_mixed_rev_mods(const svn_test_opts_t *opts, apr_pool_t *pool) -{ - svn_test__sandbox_t b; - - SVN_ERR(svn_test__sandbox_create(&b, "copy_mixed_rev_mods", opts, - pool)); - - SVN_ERR(sbox_wc_mkdir(&b, "A")); - SVN_ERR(sbox_wc_mkdir(&b, "A/B")); - SVN_ERR(sbox_wc_commit(&b, "")); - SVN_ERR(sbox_wc_mkdir(&b, "A/B/C")); - SVN_ERR(sbox_wc_commit(&b, "")); - SVN_ERR(sbox_wc_update(&b, "", 1)); - SVN_ERR(sbox_wc_update(&b, "A/B", 2)); - SVN_ERR(sbox_wc_delete(&b, "A/B")); - SVN_ERR(sbox_wc_mkdir(&b, "A/B")); - - { - nodes_row_t nodes[] = { - {0, "", "normal", 1, ""}, - {0, "A", "normal", 1, "A"}, - {0, "A/B", "normal", 2, "A/B"}, - {0, "A/B/C", "normal", 2, "A/B/C"}, - {2, "A/B", "normal", NO_COPY_FROM}, - {2, "A/B/C", "base-deleted", NO_COPY_FROM}, - {0} - }; - SVN_ERR(check_db_rows(&b, "", nodes)); - } - - SVN_ERR(sbox_wc_copy(&b, "A", "X")); - { - nodes_row_t nodes[] = { - {1, "X", "normal", 1, "A"}, - {1, "X/B", "not-present", 2, "A/B"}, - {2, "X/B", "normal", NO_COPY_FROM}, - {0} - }; - SVN_ERR(check_db_rows(&b, "X", nodes)); - } - - SVN_ERR(sbox_wc_commit(&b, "X")); - { - nodes_row_t nodes[] = { - {0, "X", "normal", 3, "X"}, - {0, "X/B", "normal", 3, "X/B"}, - {0} - }; - SVN_ERR(check_db_rows(&b, "X", nodes)); - } - - return SVN_NO_ERROR; -} - -static svn_error_t * move_replace_ancestor_with_child(const svn_test_opts_t *opts, apr_pool_t *pool) { svn_test__sandbox_t b; @@ -8650,15 +9322,15 @@ move_twice_within_delete(const svn_test_opts_t *opts, apr_pool_t *pool) nodes_row_t nodes[] = { { 0, "", "normal", 1, "" }, - + { 0, "A", "normal", 1, "A" }, { 0, "A/A", "normal", 1, "A/A" }, { 0, "A/A/A", "normal", 1, "A/A/A" }, - + { 1, "A", "base-deleted", NO_COPY_FROM, "B/A" }, { 1, "A/A", "base-deleted", NO_COPY_FROM }, { 1, "A/A/A", "base-deleted", NO_COPY_FROM }, - + { 1, "AA", "normal", 1, "A/A/A", MOVED_HERE }, { 1, "B", "normal", NO_COPY_FROM }, @@ -8703,73 +9375,1160 @@ move_twice_within_delete(const svn_test_opts_t *opts, apr_pool_t *pool) return SVN_NO_ERROR; } +/* Helper function for 4 move4 tests */ static svn_error_t * -repo_wc_copy(const svn_test_opts_t *opts, apr_pool_t *pool) +init_move4(svn_test__sandbox_t *sandbox, + const char *test_name, + const svn_test_opts_t *opts, + svn_boolean_t move_away, + apr_pool_t *pool) +{ + SVN_ERR(svn_test__sandbox_create(sandbox, test_name, opts, pool)); + + SVN_ERR(sbox_wc_mkdir(sandbox, "A")); + SVN_ERR(sbox_wc_mkdir(sandbox, "A/A")); + SVN_ERR(sbox_wc_mkdir(sandbox, "A/A/A")); + SVN_ERR(sbox_wc_mkdir(sandbox, "A/A/A/A")); + + SVN_ERR(sbox_wc_mkdir(sandbox, "B")); + SVN_ERR(sbox_wc_mkdir(sandbox, "B/A")); + SVN_ERR(sbox_wc_mkdir(sandbox, "B/A/A")); + SVN_ERR(sbox_wc_mkdir(sandbox, "B/A/A/A")); + + SVN_ERR(sbox_wc_mkdir(sandbox, "C")); + SVN_ERR(sbox_wc_mkdir(sandbox, "C/A")); + SVN_ERR(sbox_wc_mkdir(sandbox, "C/A/A")); + SVN_ERR(sbox_wc_mkdir(sandbox, "C/A/A/A")); + + SVN_ERR(sbox_wc_mkdir(sandbox, "D")); + SVN_ERR(sbox_wc_mkdir(sandbox, "D/A")); + SVN_ERR(sbox_wc_mkdir(sandbox, "D/A/A")); + SVN_ERR(sbox_wc_mkdir(sandbox, "D/A/A/A")); + + SVN_ERR(sbox_wc_commit(sandbox, "")); /* r1 */ + + if (strstr(test_name, "_edit_")) + { + SVN_ERR(sbox_wc_propset(sandbox, "key", "value", "A/A/A")); + SVN_ERR(sbox_wc_propset(sandbox, "key", "value", "B/A/A")); + SVN_ERR(sbox_wc_propset(sandbox, "key", "value", "C/A/A")); + SVN_ERR(sbox_wc_propset(sandbox, "key", "value", "D/A/A")); + SVN_ERR(sbox_wc_propset(sandbox, "key", "value", "A/A/A/A")); + SVN_ERR(sbox_wc_propset(sandbox, "key", "value", "B/A/A/A")); + SVN_ERR(sbox_wc_propset(sandbox, "key", "value", "C/A/A/A")); + SVN_ERR(sbox_wc_propset(sandbox, "key", "value", "D/A/A/A")); + } + else if (strstr(test_name, "_delete_")) + { + SVN_ERR(sbox_wc_delete(sandbox, "A/A/A/A")); + SVN_ERR(sbox_wc_delete(sandbox, "B/A/A/A")); + SVN_ERR(sbox_wc_delete(sandbox, "C/A/A/A")); + SVN_ERR(sbox_wc_delete(sandbox, "D/A/A/A")); + } + else if (strstr(test_name, "_add_")) + { + SVN_ERR(sbox_wc_mkdir(sandbox, "A/A/A/NEW")); + SVN_ERR(sbox_wc_mkdir(sandbox, "B/A/A/NEW")); + SVN_ERR(sbox_wc_mkdir(sandbox, "C/A/A/NEW")); + SVN_ERR(sbox_wc_mkdir(sandbox, "D/A/A/NEW")); + } + else if (strstr(test_name, "_replace_")) + { + SVN_ERR(sbox_wc_delete(sandbox, "A/A/A/A")); + SVN_ERR(sbox_wc_delete(sandbox, "B/A/A/A")); + SVN_ERR(sbox_wc_delete(sandbox, "C/A/A/A")); + SVN_ERR(sbox_wc_delete(sandbox, "D/A/A/A")); + SVN_ERR(sbox_file_write(sandbox, "A/A/A/A", "A")); + SVN_ERR(sbox_file_write(sandbox, "B/A/A/A", "A")); + SVN_ERR(sbox_file_write(sandbox, "C/A/A/A", "A")); + SVN_ERR(sbox_file_write(sandbox, "D/A/A/A", "A")); + SVN_ERR(sbox_wc_add(sandbox, "A/A/A/A")); + SVN_ERR(sbox_wc_add(sandbox, "B/A/A/A")); + SVN_ERR(sbox_wc_add(sandbox, "C/A/A/A")); + SVN_ERR(sbox_wc_add(sandbox, "D/A/A/A")); + } + else if (strstr(test_name, "_delself_")) + { + SVN_ERR(sbox_wc_delete(sandbox, "A/A/A")); + SVN_ERR(sbox_wc_delete(sandbox, "B/A/A")); + SVN_ERR(sbox_wc_delete(sandbox, "C/A/A")); + SVN_ERR(sbox_wc_delete(sandbox, "D/A/A")); + } + else if (strstr(test_name, "_replaceself_")) + { + SVN_ERR(sbox_wc_delete(sandbox, "A/A/A")); + SVN_ERR(sbox_wc_delete(sandbox, "B/A/A")); + SVN_ERR(sbox_wc_delete(sandbox, "C/A/A")); + SVN_ERR(sbox_wc_delete(sandbox, "D/A/A")); + SVN_ERR(sbox_file_write(sandbox, "A/A/A", "A")); + SVN_ERR(sbox_file_write(sandbox, "B/A/A", "A")); + SVN_ERR(sbox_file_write(sandbox, "C/A/A", "A")); + SVN_ERR(sbox_file_write(sandbox, "D/A/A", "A")); + SVN_ERR(sbox_wc_add(sandbox, "A/A/A")); + SVN_ERR(sbox_wc_add(sandbox, "B/A/A")); + SVN_ERR(sbox_wc_add(sandbox, "C/A/A")); + SVN_ERR(sbox_wc_add(sandbox, "D/A/A")); + } + + SVN_ERR(sbox_wc_commit(sandbox, "")); + SVN_ERR(sbox_wc_update(sandbox, "", 1)); + + SVN_ERR(sbox_wc_move(sandbox, "A/A/A", "AAA_1")); + + if (move_away) + SVN_ERR(sbox_wc_move(sandbox, "A", "A_moved")); + else + SVN_ERR(sbox_wc_delete(sandbox, "A")); + + SVN_ERR(sbox_wc_move(sandbox, "B", "A")); + + SVN_ERR(sbox_wc_move(sandbox, "A/A/A", "AAA_2")); + + if (move_away) + SVN_ERR(sbox_wc_move(sandbox, "A/A", "BA_moved")); + else + SVN_ERR(sbox_wc_delete(sandbox, "A/A")); + + SVN_ERR(sbox_wc_move(sandbox, "C/A", "A/A")); + + SVN_ERR(sbox_wc_move(sandbox, "A/A/A", "AAA_3")); + + SVN_ERR(sbox_wc_move(sandbox, "D/A/A", "A/A/A")); + + return SVN_NO_ERROR; +} + +static svn_error_t * +del4_update_edit_AAA(const svn_test_opts_t *opts, apr_pool_t *pool) { svn_test__sandbox_t b; - const char *repos_dir; - const char *new_repos_dir; - const char *new_repos_url; - SVN_ERR(svn_test__sandbox_create(&b, "repo_wc_copy", - opts, pool)); - SVN_ERR(sbox_add_and_commit_greek_tree(&b)); + SVN_ERR(init_move4(&b, "del4_update_edit_AAA", opts, FALSE, pool)); - SVN_ERR(sbox_wc_copy_url(&b, - svn_path_url_add_component2(b.repos_url, "A/B", - pool), - -1, "AA")); + { + nodes_row_t nodes[] = { + + { 0, "A/A/A", "normal", 1, "A/A/A" }, + { 1, "A/A/A", "normal", 1, "B/A/A", FALSE, "AAA_1", TRUE }, + { 2, "A/A/A", "normal", 1, "C/A/A", FALSE, "AAA_2", TRUE }, + { 3, "A/A/A", "normal", 1, "D/A/A", FALSE, "AAA_3", TRUE }, + + { 0, "A/A/A/A", "normal", 1, "A/A/A/A" }, + { 1, "A/A/A/A", "normal", 1, "B/A/A/A", FALSE, NULL, TRUE }, + { 2, "A/A/A/A", "normal", 1, "C/A/A/A", FALSE, NULL, TRUE }, + { 3, "A/A/A/A", "normal", 1, "D/A/A/A", FALSE, NULL, TRUE }, + + { 0 }, + }; + + SVN_ERR(check_db_rows(&b, "A/A/A", nodes)); + } + + /* Update and resolve via mine strategy */ + SVN_ERR(sbox_wc_update(&b, "", 2)); + SVN_ERR(sbox_wc_resolve(&b, "", svn_depth_infinity, svn_wc_conflict_choose_mine_conflict)); + + { + nodes_row_t nodes[] = { + {0, "", "normal", 2, ""}, + {0, "A", "normal", 2, "A"}, + {0, "A/A", "normal", 2, "A/A"}, + {0, "A/A/A", "normal", 2, "A/A/A", NOT_MOVED, "key"}, + {0, "A/A/A/A", "normal", 2, "A/A/A/A", NOT_MOVED, "key"}, + {0, "B", "normal", 2, "B"}, + {0, "B/A", "normal", 2, "B/A"}, + {0, "B/A/A", "normal", 2, "B/A/A", NOT_MOVED, "key"}, + {0, "B/A/A/A", "normal", 2, "B/A/A/A", NOT_MOVED, "key"}, + {0, "C", "normal", 2, "C"}, + {0, "C/A", "normal", 2, "C/A"}, + {0, "C/A/A", "normal", 2, "C/A/A", NOT_MOVED, "key"}, + {0, "C/A/A/A", "normal", 2, "C/A/A/A", NOT_MOVED, "key"}, + {0, "D", "normal", 2, "D"}, + {0, "D/A", "normal", 2, "D/A"}, + {0, "D/A/A", "normal", 2, "D/A/A", NOT_MOVED, "key"}, + {0, "D/A/A/A", "normal", 2, "D/A/A/A", NOT_MOVED, "key"}, + + {1, "A", "normal", 2, "B", MOVED_HERE}, + {1, "A/A", "normal", 2, "B/A", MOVED_HERE}, + {1, "A/A/A", "normal", 2, "B/A/A", FALSE, "AAA_1", TRUE, "key"}, + {1, "A/A/A/A", "normal", 2, "B/A/A/A", FALSE, NULL, TRUE, "key"}, + {1, "AAA_1", "normal", 2, "A/A/A", MOVED_HERE, "key"}, + {1, "AAA_1/A", "normal", 2, "A/A/A/A", MOVED_HERE, "key"}, + {1, "AAA_2", "normal", 2, "B/A/A", MOVED_HERE, "key"}, + {1, "AAA_2/A", "normal", 2, "B/A/A/A", MOVED_HERE, "key"}, + {1, "AAA_3", "normal", 2, "C/A/A", MOVED_HERE, "key"}, + {1, "AAA_3/A", "normal", 2, "C/A/A/A", MOVED_HERE, "key"}, + {1, "B", "base-deleted", NO_COPY_FROM, "A"}, + {1, "B/A", "base-deleted", NO_COPY_FROM}, + {1, "B/A/A", "base-deleted", NO_COPY_FROM}, + {1, "B/A/A/A", "base-deleted", NO_COPY_FROM}, + + {2, "A/A", "normal", 2, "C/A", MOVED_HERE}, + {2, "A/A/A", "normal", 2, "C/A/A", FALSE, "AAA_2", TRUE, "key"}, + {2, "A/A/A/A", "normal", 2, "C/A/A/A", FALSE, NULL, TRUE, "key"}, + {2, "C/A", "base-deleted", NO_COPY_FROM, "A/A"}, + {2, "C/A/A", "base-deleted", NO_COPY_FROM}, + {2, "C/A/A/A", "base-deleted", NO_COPY_FROM}, + + {3, "A/A/A", "normal", 2, "D/A/A", FALSE, "AAA_3", TRUE, "key"}, + {3, "A/A/A/A", "normal", 2, "D/A/A/A", FALSE, NULL, TRUE, "key"}, + {3, "D/A/A", "base-deleted", NO_COPY_FROM, "A/A/A"}, + {3, "D/A/A/A", "base-deleted", NO_COPY_FROM}, + + { 0 }, + }; + + SVN_ERR(check_db_rows(&b, "", nodes)); + SVN_ERR(check_db_conflicts(&b, "", NULL)); + } + + /* Go back to start position */ + SVN_ERR(sbox_wc_update(&b, "", 1)); + SVN_ERR(sbox_wc_resolve(&b, "", svn_depth_infinity, svn_wc_conflict_choose_mine_conflict)); + + { + nodes_row_t nodes[] = { + {0, "", "normal", 1, ""}, + {0, "A", "normal", 1, "A"}, + {0, "A/A", "normal", 1, "A/A"}, + {0, "A/A/A", "normal", 1, "A/A/A", NOT_MOVED}, + {0, "A/A/A/A", "normal", 1, "A/A/A/A", NOT_MOVED}, + {0, "B", "normal", 1, "B"}, + {0, "B/A", "normal", 1, "B/A"}, + {0, "B/A/A", "normal", 1, "B/A/A", NOT_MOVED}, + {0, "B/A/A/A", "normal", 1, "B/A/A/A", NOT_MOVED}, + {0, "C", "normal", 1, "C"}, + {0, "C/A", "normal", 1, "C/A"}, + {0, "C/A/A", "normal", 1, "C/A/A", NOT_MOVED}, + {0, "C/A/A/A", "normal", 1, "C/A/A/A", NOT_MOVED}, + {0, "D", "normal", 1, "D"}, + {0, "D/A", "normal", 1, "D/A"}, + {0, "D/A/A", "normal", 1, "D/A/A", NOT_MOVED}, + {0, "D/A/A/A", "normal", 1, "D/A/A/A", NOT_MOVED}, + + {1, "A", "normal", 1, "B", MOVED_HERE}, + {1, "A/A", "normal", 1, "B/A", MOVED_HERE}, + {1, "A/A/A", "normal", 1, "B/A/A", FALSE, "AAA_1", TRUE}, + {1, "A/A/A/A", "normal", 1, "B/A/A/A", FALSE, NULL, TRUE}, + {1, "AAA_1", "normal", 1, "A/A/A", MOVED_HERE}, + {1, "AAA_1/A", "normal", 1, "A/A/A/A", MOVED_HERE}, + {1, "AAA_2", "normal", 1, "B/A/A", MOVED_HERE}, + {1, "AAA_2/A", "normal", 1, "B/A/A/A", MOVED_HERE}, + {1, "AAA_3", "normal", 1, "C/A/A", MOVED_HERE}, + {1, "AAA_3/A", "normal", 1, "C/A/A/A", MOVED_HERE}, + {1, "B", "base-deleted", NO_COPY_FROM, "A"}, + {1, "B/A", "base-deleted", NO_COPY_FROM}, + {1, "B/A/A", "base-deleted", NO_COPY_FROM}, + {1, "B/A/A/A", "base-deleted", NO_COPY_FROM}, + + {2, "A/A", "normal", 1, "C/A", MOVED_HERE}, + {2, "A/A/A", "normal", 1, "C/A/A", FALSE, "AAA_2", TRUE}, + {2, "A/A/A/A", "normal", 1, "C/A/A/A", FALSE, NULL, TRUE}, + {2, "C/A", "base-deleted", NO_COPY_FROM, "A/A"}, + {2, "C/A/A", "base-deleted", NO_COPY_FROM}, + {2, "C/A/A/A", "base-deleted", NO_COPY_FROM}, + + {3, "A/A/A", "normal", 1, "D/A/A", FALSE, "AAA_3", TRUE}, + {3, "A/A/A/A", "normal", 1, "D/A/A/A", FALSE, NULL, TRUE}, + {3, "D/A/A", "base-deleted", NO_COPY_FROM, "A/A/A"}, + {3, "D/A/A/A", "base-deleted", NO_COPY_FROM}, + + { 0 }, + }; + + SVN_ERR(check_db_rows(&b, "", nodes)); + SVN_ERR(check_db_conflicts(&b, "", NULL)); + } + + /* Update and resolve via their strategy */ + SVN_ERR(sbox_wc_update(&b, "", 2)); + { + conflict_info_t conflicts[] = { + {"A", FALSE, FALSE, {svn_wc_conflict_action_edit, + svn_wc_conflict_reason_replaced}}, + {"B", FALSE, FALSE, {svn_wc_conflict_action_edit, + svn_wc_conflict_reason_moved_away, "B"}}, + {"C/A", FALSE, FALSE, {svn_wc_conflict_action_edit, + svn_wc_conflict_reason_moved_away, "C/A"}}, + {"D/A/A", FALSE, FALSE, {svn_wc_conflict_action_edit, + svn_wc_conflict_reason_moved_away, "D/A/A"}}, + {0}, + }; + + SVN_ERR(check_db_conflicts(&b, "", conflicts)); + } + /* This breaks the move A/A/A -> AAA_1 */ + SVN_ERR(sbox_wc_resolve(&b, "A", svn_depth_empty, svn_wc_conflict_choose_merged)); + /* This breaks the move B -> A */ + SVN_ERR(sbox_wc_resolve(&b, "B", svn_depth_empty, svn_wc_conflict_choose_merged)); + /* This breaks the move C/A/A -> A/A */ + SVN_ERR(sbox_wc_resolve(&b, "C/A", svn_depth_empty, svn_wc_conflict_choose_merged)); + /* This breaks the move from D/A/A -> A/A/A */ + SVN_ERR(sbox_wc_resolve(&b, "D/A/A", svn_depth_empty, svn_wc_conflict_choose_merged)); { nodes_row_t nodes[] = { + {0, "", "normal", 2, ""}, + {0, "A", "normal", 2, "A"}, + {0, "A/A", "normal", 2, "A/A"}, + {0, "A/A/A", "normal", 2, "A/A/A", NOT_MOVED, "key"}, + {0, "A/A/A/A", "normal", 2, "A/A/A/A", NOT_MOVED, "key"}, + {0, "B", "normal", 2, "B"}, + {0, "B/A", "normal", 2, "B/A"}, + {0, "B/A/A", "normal", 2, "B/A/A", NOT_MOVED, "key"}, + {0, "B/A/A/A", "normal", 2, "B/A/A/A", NOT_MOVED, "key"}, + {0, "C", "normal", 2, "C"}, + {0, "C/A", "normal", 2, "C/A"}, + {0, "C/A/A", "normal", 2, "C/A/A", NOT_MOVED, "key"}, + {0, "C/A/A/A", "normal", 2, "C/A/A/A", NOT_MOVED, "key"}, + {0, "D", "normal", 2, "D"}, + {0, "D/A", "normal", 2, "D/A"}, + {0, "D/A/A", "normal", 2, "D/A/A", NOT_MOVED, "key"}, + {0, "D/A/A/A", "normal", 2, "D/A/A/A", NOT_MOVED, "key"}, + {1, "A", "normal", 1, "B"}, + {1, "A/A", "normal", 1, "B/A"}, + {1, "A/A/A", "normal", 1, "B/A/A", FALSE}, + {1, "A/A/A/A", "normal", 1, "B/A/A/A"}, + {1, "AAA_1", "normal", 1, "A/A/A"}, + {1, "AAA_1/A", "normal", 1, "A/A/A/A"}, + {1, "AAA_2", "normal", 1, "B/A/A", MOVED_HERE}, + {1, "AAA_2/A", "normal", 1, "B/A/A/A", MOVED_HERE}, + {1, "AAA_3", "normal", 1, "C/A/A", MOVED_HERE}, + {1, "AAA_3/A", "normal", 1, "C/A/A/A", MOVED_HERE}, + {1, "B", "base-deleted", NO_COPY_FROM}, + {1, "B/A", "base-deleted", NO_COPY_FROM}, + {1, "B/A/A", "base-deleted", NO_COPY_FROM}, + {1, "B/A/A/A", "base-deleted", NO_COPY_FROM}, + {2, "A/A", "normal", 1, "C/A"}, + {2, "A/A/A", "normal", 1, "C/A/A", FALSE, "AAA_2"}, + {2, "A/A/A/A", "normal", 1, "C/A/A/A"}, + {2, "C/A", "base-deleted", NO_COPY_FROM}, + {2, "C/A/A", "base-deleted", NO_COPY_FROM}, + {2, "C/A/A/A", "base-deleted", NO_COPY_FROM}, + {3, "A/A/A", "normal", 1, "D/A/A", FALSE, "AAA_3"}, + {3, "A/A/A/A", "normal", 1, "D/A/A/A"}, + {3, "D/A/A", "base-deleted", NO_COPY_FROM}, + {3, "D/A/A/A", "base-deleted", NO_COPY_FROM}, - {1, "AA/lambda", "normal", 1, "A/B/lambda"}, - {1, "AA", "normal", 1, "A/B"}, - {1, "AA/E/beta", "normal", 1, "A/B/E/beta"}, - {1, "AA/E/alpha", "normal", 1, "A/B/E/alpha"}, - {1, "AA/F", "normal", 1, "A/B/F"}, - {1, "AA/E", "normal", 1, "A/B/E"}, + { 0 }, + }; + + SVN_ERR(check_db_rows(&b, "", nodes)); + } + + + return SVN_NO_ERROR; +} + +static svn_error_t * +del4_update_delete_AAA(const svn_test_opts_t *opts, apr_pool_t *pool) +{ + svn_test__sandbox_t b; + + SVN_ERR(init_move4(&b, "del4_update_delete_AAA", opts, FALSE, pool)); + + /* Update and resolve via mine strategy */ + SVN_ERR(sbox_wc_update(&b, "", 2)); + { + conflict_info_t conflicts[] = { + {"A", FALSE, FALSE, {svn_wc_conflict_action_edit, + svn_wc_conflict_reason_replaced}}, + {"B", FALSE, FALSE, {svn_wc_conflict_action_edit, + svn_wc_conflict_reason_moved_away, "B"}}, + {"C/A", FALSE, FALSE, {svn_wc_conflict_action_edit, + svn_wc_conflict_reason_moved_away, "C/A"}}, + {"D/A/A", FALSE, FALSE, {svn_wc_conflict_action_edit, + svn_wc_conflict_reason_moved_away, "D/A/A"}}, + {0} + }; + + SVN_ERR(check_db_conflicts(&b, "", conflicts)); + } + SVN_ERR(sbox_wc_resolve(&b, "", svn_depth_infinity, svn_wc_conflict_choose_mine_conflict)); + /* Go back to start position */ + SVN_ERR(sbox_wc_update(&b, "", 1)); + { + conflict_info_t conflicts[] = { + {"A", FALSE, FALSE, {svn_wc_conflict_action_edit, + svn_wc_conflict_reason_replaced}}, + {"B", FALSE, FALSE, {svn_wc_conflict_action_edit, + svn_wc_conflict_reason_moved_away, "B"}}, + {"C/A", FALSE, FALSE, {svn_wc_conflict_action_edit, + svn_wc_conflict_reason_moved_away, "C/A"}}, + {"D/A/A", FALSE, FALSE, {svn_wc_conflict_action_edit, + svn_wc_conflict_reason_moved_away, "D/A/A"}}, + {0} + }; + + SVN_ERR(check_db_conflicts(&b, "", conflicts)); + } + SVN_ERR(sbox_wc_resolve(&b, "", svn_depth_infinity, svn_wc_conflict_choose_mine_conflict)); + /* Update and resolve via their strategy */ + SVN_ERR(sbox_wc_update(&b, "", 2)); + { + conflict_info_t conflicts[] = { + {"A", FALSE, FALSE, {svn_wc_conflict_action_edit, svn_wc_conflict_reason_replaced}}, + {"B", FALSE, FALSE, {svn_wc_conflict_action_edit, svn_wc_conflict_reason_moved_away, "B"}}, + {"C/A", FALSE, FALSE, {svn_wc_conflict_action_edit, svn_wc_conflict_reason_moved_away, "C/A"}}, + {"D/A/A", FALSE, FALSE, {svn_wc_conflict_action_edit, svn_wc_conflict_reason_moved_away, "D/A/A"}}, + {0} + }; + + SVN_ERR(check_db_conflicts(&b, "", conflicts)); + } + SVN_ERR(sbox_wc_resolve(&b, "", svn_depth_infinity, svn_wc_conflict_choose_merged)); + + return SVN_NO_ERROR; +} + +static svn_error_t * +del4_update_add_AAA(const svn_test_opts_t *opts, apr_pool_t *pool) +{ + svn_test__sandbox_t b; + + SVN_ERR(init_move4(&b, "del4_update_add_AAA", opts, FALSE, pool)); + + /* Update and resolve via mine strategy */ + SVN_ERR(sbox_wc_update(&b, "", 2)); + { + conflict_info_t conflicts[] = { + {"A", FALSE, FALSE, {svn_wc_conflict_action_edit, svn_wc_conflict_reason_replaced}}, + {"B", FALSE, FALSE, {svn_wc_conflict_action_edit, svn_wc_conflict_reason_moved_away, "B"}}, + {"C/A", FALSE, FALSE, {svn_wc_conflict_action_edit, svn_wc_conflict_reason_moved_away, "C/A"}}, + {"D/A/A", FALSE, FALSE, {svn_wc_conflict_action_edit, svn_wc_conflict_reason_moved_away, "D/A/A"}}, + {0} + }; + + SVN_ERR(check_db_conflicts(&b, "", conflicts)); + } + SVN_ERR(sbox_wc_resolve(&b, "", svn_depth_infinity, svn_wc_conflict_choose_mine_conflict)); + /* Go back to start position */ + SVN_ERR(sbox_wc_update(&b, "", 1)); + { + conflict_info_t conflicts[] = { + {"A", FALSE, FALSE, {svn_wc_conflict_action_edit, + svn_wc_conflict_reason_replaced}}, + {"B", FALSE, FALSE, {svn_wc_conflict_action_edit, + svn_wc_conflict_reason_moved_away, "B"}}, + {"C/A", FALSE, FALSE, {svn_wc_conflict_action_edit, + svn_wc_conflict_reason_moved_away, "C/A"}}, + {"D/A/A", FALSE, FALSE, {svn_wc_conflict_action_edit, + svn_wc_conflict_reason_moved_away, "D/A/A"}}, + {0} + }; + + SVN_ERR(check_db_conflicts(&b, "", conflicts)); + } + SVN_ERR(sbox_wc_resolve(&b, "", svn_depth_infinity, svn_wc_conflict_choose_mine_conflict)); + /* Update and resolve via their strategy */ + SVN_ERR(sbox_wc_update(&b, "", 2)); + { + conflict_info_t conflicts[] = { + {"A", FALSE, FALSE, {svn_wc_conflict_action_edit, + svn_wc_conflict_reason_replaced}}, + {"B", FALSE, FALSE, {svn_wc_conflict_action_edit, + svn_wc_conflict_reason_moved_away, "B"}}, + {"C/A", FALSE, FALSE, {svn_wc_conflict_action_edit, + svn_wc_conflict_reason_moved_away, "C/A"}}, + {"D/A/A", FALSE, FALSE, {svn_wc_conflict_action_edit, + svn_wc_conflict_reason_moved_away, "D/A/A"}}, + {0} + }; + + SVN_ERR(check_db_conflicts(&b, "", conflicts)); + } + SVN_ERR(sbox_wc_resolve(&b, "", svn_depth_infinity, svn_wc_conflict_choose_merged)); + + return SVN_NO_ERROR; +} + +static svn_error_t * +del4_update_replace_AAA(const svn_test_opts_t *opts, apr_pool_t *pool) +{ + svn_test__sandbox_t b; + + SVN_ERR(init_move4(&b, "del4_update_replace_AAA", opts, FALSE, pool)); + + /* Update and resolve via mine strategy */ + SVN_ERR(sbox_wc_update(&b, "", 2)); + { + conflict_info_t conflicts[] = { + {"A", FALSE, FALSE, {svn_wc_conflict_action_edit, + svn_wc_conflict_reason_replaced}}, + {"B", FALSE, FALSE, {svn_wc_conflict_action_edit, + svn_wc_conflict_reason_moved_away, "B"}}, + {"C/A", FALSE, FALSE, {svn_wc_conflict_action_edit, + svn_wc_conflict_reason_moved_away, "C/A"}}, + {"D/A/A", FALSE, FALSE, {svn_wc_conflict_action_edit, + svn_wc_conflict_reason_moved_away, "D/A/A"}}, + {0} + }; + + SVN_ERR(check_db_conflicts(&b, "", conflicts)); + } + SVN_ERR(sbox_wc_resolve(&b, "", svn_depth_infinity, svn_wc_conflict_choose_mine_conflict)); + /* Go back to start position */ + SVN_ERR(sbox_wc_update(&b, "", 1)); + { + conflict_info_t conflicts[] = { + {"A", FALSE, FALSE, {svn_wc_conflict_action_edit, + svn_wc_conflict_reason_replaced}}, + {"B", FALSE, FALSE, {svn_wc_conflict_action_edit, + svn_wc_conflict_reason_moved_away, "B"}}, + {"C/A", FALSE, FALSE, {svn_wc_conflict_action_edit, + svn_wc_conflict_reason_moved_away, "C/A"}}, + {"D/A/A", FALSE, FALSE, {svn_wc_conflict_action_edit, + svn_wc_conflict_reason_moved_away, "D/A/A"}}, + {0} + }; + + SVN_ERR(check_db_conflicts(&b, "", conflicts)); + } + SVN_ERR(sbox_wc_resolve(&b, "", svn_depth_infinity, svn_wc_conflict_choose_mine_conflict)); + /* Update and resolve via their strategy */ + SVN_ERR(sbox_wc_update(&b, "", 2)); + { + conflict_info_t conflicts[] = { + {"A", FALSE, FALSE, {svn_wc_conflict_action_edit, + svn_wc_conflict_reason_replaced}}, + {"B", FALSE, FALSE, {svn_wc_conflict_action_edit, + svn_wc_conflict_reason_moved_away, "B"}}, + {"C/A", FALSE, FALSE, {svn_wc_conflict_action_edit, + svn_wc_conflict_reason_moved_away, "C/A"}}, + {"D/A/A", FALSE, FALSE, {svn_wc_conflict_action_edit, + svn_wc_conflict_reason_moved_away, "D/A/A"}}, + {0} + }; + + SVN_ERR(check_db_conflicts(&b, "", conflicts)); + } + SVN_ERR(sbox_wc_resolve(&b, "", svn_depth_infinity, svn_wc_conflict_choose_merged)); + + return SVN_NO_ERROR; +} + +static svn_error_t * +del4_update_delself_AAA(const svn_test_opts_t *opts, apr_pool_t *pool) +{ + svn_test__sandbox_t b; + + SVN_ERR(init_move4(&b, "del4_update_delself_AAA", opts, FALSE, pool)); + + /* Update and resolve via mine strategy */ + SVN_ERR(sbox_wc_update(&b, "", 2)); + + { + conflict_info_t conflicts[] = { + {"A", FALSE, FALSE, {svn_wc_conflict_action_edit, + svn_wc_conflict_reason_replaced}}, + {"B", FALSE, FALSE, {svn_wc_conflict_action_edit, + svn_wc_conflict_reason_moved_away, "B"}}, + {"C/A", FALSE, FALSE, {svn_wc_conflict_action_edit, + svn_wc_conflict_reason_moved_away, "C/A"}}, + {"D/A/A", FALSE, FALSE, {svn_wc_conflict_action_delete, + svn_wc_conflict_reason_moved_away, "D/A/A"}}, + {0} + }; + + SVN_ERR(check_db_conflicts(&b, "", conflicts)); + } + + + /* Resolve a few conflicts manually */ + SVN_ERR(sbox_wc_resolve(&b, "A", svn_depth_empty, + svn_wc_conflict_choose_mine_conflict)); + SVN_ERR(sbox_wc_resolve(&b, "B", svn_depth_empty, + svn_wc_conflict_choose_mine_conflict)); + SVN_ERR(sbox_wc_resolve(&b, "C/A", svn_depth_empty, + svn_wc_conflict_choose_mine_conflict)); + + { + nodes_row_t nodes[] = { + {0, "", "normal", 2, ""}, + {0, "A", "normal", 2, "A"}, + {0, "A/A", "normal", 2, "A/A"}, + {0, "B", "normal", 2, "B"}, + {0, "B/A", "normal", 2, "B/A"}, + {0, "C", "normal", 2, "C"}, + {0, "C/A", "normal", 2, "C/A"}, + {0, "D", "normal", 2, "D"}, + {0, "D/A", "normal", 2, "D/A"}, + {1, "A", "normal", 2, "B", MOVED_HERE}, + {1, "A/A", "normal", 2, "B/A", MOVED_HERE}, + {1, "AAA_1", "normal", 1, "A/A/A"}, + {1, "AAA_1/A", "normal", 1, "A/A/A/A"}, + {1, "AAA_2", "normal", 1, "B/A/A"}, + {1, "AAA_2/A", "normal", 1, "B/A/A/A"}, + {1, "AAA_3", "normal", 1, "C/A/A"}, + {1, "AAA_3/A", "normal", 1, "C/A/A/A"}, + {1, "B", "base-deleted", NO_COPY_FROM, "A"}, + {1, "B/A", "base-deleted", NO_COPY_FROM}, + {2, "A/A", "normal", 2, "C/A", MOVED_HERE}, + {2, "C/A", "base-deleted", NO_COPY_FROM, "A/A"}, + {3, "A/A/A", "normal", 1, "D/A/A"}, + {3, "A/A/A/A", "normal", 1, "D/A/A/A"}, { 0 }, }; - SVN_ERR(check_db_rows(&b, "AA", nodes)); + + SVN_ERR(check_db_rows(&b, "", nodes)); } + { + conflict_info_t conflicts[] = { + /* Not resolved yet */ + {"D/A/A", FALSE, FALSE, {svn_wc_conflict_action_delete, + svn_wc_conflict_reason_moved_away, "D/A/A"}}, - SVN_ERR(svn_uri_get_dirent_from_file_url(&repos_dir, b.repos_url, - pool)); - new_repos_dir = apr_pstrcat(pool, repos_dir, "-2", SVN_VA_NULL); - new_repos_url = apr_pstrcat(pool, b.repos_url, "-2", SVN_VA_NULL); + /* New */ + {"A/A", FALSE, FALSE, {svn_wc_conflict_action_edit, + svn_wc_conflict_reason_deleted}}, + {"A/A/A", FALSE, FALSE, {svn_wc_conflict_action_delete, + svn_wc_conflict_reason_moved_away, "A/A/A"}}, - svn_test_add_dir_cleanup(new_repos_dir); + {0} + }; - SVN_ERR(svn_io_remove_dir2(new_repos_dir, TRUE, NULL, NULL, pool)); - SVN_ERR(svn_io_copy_dir_recursively(repos_dir, - svn_dirent_dirname(new_repos_dir, pool), - svn_dirent_basename(new_repos_dir, pool), - FALSE, NULL, NULL, pool)); + SVN_ERR(check_db_conflicts(&b, "", conflicts)); + } - SVN_ERR(sbox_wc_relocate(&b, new_repos_url)); + /* These can only be resolved to merged, as the merge is already broken + (because the move source is gone): incoming delete on moved_away */ + SVN_ERR(sbox_wc_resolve(&b, "D/A/A", svn_depth_empty, + svn_wc_conflict_choose_merged)); + SVN_ERR(sbox_wc_resolve(&b, "A/A/A", svn_depth_empty, + svn_wc_conflict_choose_merged)); - /* This produced an invalid copy in Subversion <= 1.8.8. - Status would show all descendants as incomplete */ - SVN_ERR(sbox_wc_copy_url(&b, - svn_path_url_add_component2(b.repos_url, "A/B", - pool), - -1, "BB")); + SVN_ERR(sbox_wc_resolve(&b, "", svn_depth_infinity, svn_wc_conflict_choose_mine_conflict)); + /* Go back to start position */ + SVN_ERR(sbox_wc_update(&b, "", 1)); + { + conflict_info_t conflicts[] = { + {"A", FALSE, FALSE, { svn_wc_conflict_action_edit, + svn_wc_conflict_reason_replaced}}, + {"B", FALSE, FALSE, { svn_wc_conflict_action_edit, + svn_wc_conflict_reason_moved_away, "B"}}, + {"C/A", FALSE, FALSE, { svn_wc_conflict_action_edit, + svn_wc_conflict_reason_moved_away, "C/A"}}, + {0} + }; + + SVN_ERR(check_db_conflicts(&b, "", conflicts)); + } + SVN_ERR(sbox_wc_resolve(&b, "A", svn_depth_empty, + svn_wc_conflict_choose_mine_conflict)); + SVN_ERR(sbox_wc_resolve(&b, "B", svn_depth_empty, + svn_wc_conflict_choose_mine_conflict)); + SVN_ERR(sbox_wc_resolve(&b, "C/A", svn_depth_empty, + svn_wc_conflict_choose_mine_conflict)); + { + conflict_info_t conflicts[] = { + {"A/A", FALSE, FALSE, { svn_wc_conflict_action_edit, + svn_wc_conflict_reason_deleted}}, + {"A/A/A", FALSE, FALSE, { svn_wc_conflict_action_add, + svn_wc_conflict_reason_added}}, + {0} + }; + + SVN_ERR(check_db_conflicts(&b, "", conflicts)); + } + SVN_ERR(sbox_wc_resolve(&b, "A/A/A", svn_depth_empty, + svn_wc_conflict_choose_merged)); + SVN_ERR(sbox_wc_resolve(&b, "", svn_depth_infinity, + svn_wc_conflict_choose_mine_conflict)); + /* Update and resolve via their strategy */ + SVN_ERR(sbox_wc_update(&b, "", 2)); + SVN_ERR(sbox_wc_resolve(&b, "", svn_depth_infinity, svn_wc_conflict_choose_merged)); + + return SVN_NO_ERROR; +} + +static svn_error_t * +del4_update_replaceself_AAA(const svn_test_opts_t *opts, apr_pool_t *pool) +{ + svn_test__sandbox_t b; + + SVN_ERR(init_move4(&b, "del4_update_replaceself_AAA", opts, FALSE, pool)); + + /* Update and resolve via mine strategy */ + SVN_ERR(sbox_wc_update(&b, "", 2)); + SVN_ERR(sbox_wc_resolve(&b, "", svn_depth_infinity, svn_wc_conflict_choose_mine_conflict)); + /* Go back to start position */ + SVN_ERR(sbox_wc_update(&b, "", 1)); + SVN_ERR(sbox_wc_resolve(&b, "", svn_depth_infinity, svn_wc_conflict_choose_mine_conflict)); + /* Update and resolve via their strategy */ + SVN_ERR(sbox_wc_update(&b, "", 2)); + SVN_ERR(sbox_wc_resolve(&b, "", svn_depth_infinity, svn_wc_conflict_choose_merged)); + + return SVN_NO_ERROR; +} + + +static svn_error_t * +move4_update_edit_AAA(const svn_test_opts_t *opts, apr_pool_t *pool) +{ + svn_test__sandbox_t b; + + SVN_ERR(init_move4(&b, "move4_update_edit_AAA", opts, TRUE, pool)); + + { + nodes_row_t nodes[] = { + + { 0, "A/A/A", "normal", 1, "A/A/A" }, + { 1, "A/A/A", "normal", 1, "B/A/A", FALSE, NULL /*"AAA_1"*/, TRUE }, + { 2, "A/A/A", "normal", 1, "C/A/A", FALSE, NULL /*"AAA_2"*/, TRUE }, + { 3, "A/A/A", "normal", 1, "D/A/A", FALSE, "AAA_3", TRUE }, + + { 0, "A/A/A/A", "normal", 1, "A/A/A/A" }, + { 1, "A/A/A/A", "normal", 1, "B/A/A/A", FALSE, NULL, TRUE }, + { 2, "A/A/A/A", "normal", 1, "C/A/A/A", FALSE, NULL, TRUE }, + { 3, "A/A/A/A", "normal", 1, "D/A/A/A", FALSE, NULL, TRUE }, + + + { 0 }, + }; + + SVN_ERR(check_db_rows(&b, "A/A/A", nodes)); + } + + /* Update and resolve via mine strategy */ + SVN_ERR(sbox_wc_update(&b, "", 2)); + SVN_ERR(sbox_wc_resolve(&b, "", svn_depth_infinity, svn_wc_conflict_choose_mine_conflict)); + + { + nodes_row_t nodes[] = { + + { 0, "A/A/A", "normal", 2, "A/A/A", NOT_MOVED, "key" }, + { 1, "A/A/A", "normal", 2, "B/A/A", FALSE, NULL /*"AAA_1"*/, TRUE, "key" }, + { 2, "A/A/A", "normal", 2, "C/A/A", FALSE, NULL /*"AAA_2"*/, TRUE, "key" }, + { 3, "A/A/A", "normal", 2, "D/A/A", FALSE, "AAA_3", TRUE, "key" }, + + { 0, "A/A/A/A", "normal", 2, "A/A/A/A", NOT_MOVED, "key" }, + { 1, "A/A/A/A", "normal", 2, "B/A/A/A", FALSE, NULL, TRUE, "key" }, + { 2, "A/A/A/A", "normal", 2, "C/A/A/A", FALSE, NULL, TRUE, "key" }, + { 3, "A/A/A/A", "normal", 2, "D/A/A/A", FALSE, NULL, TRUE, "key" }, + + { 0 }, + }; + SVN_ERR(check_db_rows(&b, "A/A/A", nodes)); + } + + /* Go back to start position */ + SVN_ERR(sbox_wc_update(&b, "", 1)); + SVN_ERR(sbox_wc_resolve(&b, "", svn_depth_infinity, svn_wc_conflict_choose_mine_conflict)); + /* Update and resolve via their strategy */ + SVN_ERR(sbox_wc_update(&b, "", 2)); + SVN_ERR(sbox_wc_resolve(&b, "", svn_depth_infinity, svn_wc_conflict_choose_merged)); + + { + nodes_row_t nodes[] = { + + { 0, "A/A/A", "normal", 2, "A/A/A", NOT_MOVED, "key" }, + { 1, "A/A/A", "normal", 1, "B/A/A" }, + { 2, "A/A/A", "normal", 1, "C/A/A" }, + { 3, "A/A/A", "normal", 1, "D/A/A", FALSE, "AAA_3"}, + + { 0, "A/A/A/A", "normal", 2, "A/A/A/A", NOT_MOVED, "key" }, + { 1, "A/A/A/A", "normal", 1, "B/A/A/A" }, + { 2, "A/A/A/A", "normal", 1, "C/A/A/A" }, + { 3, "A/A/A/A", "normal", 1, "D/A/A/A" }, + + { 0 }, + }; + + SVN_ERR(check_db_rows(&b, "A/A/A", nodes)); + } + + return SVN_NO_ERROR; +} + +static svn_error_t * +move4_update_delete_AAA(const svn_test_opts_t *opts, apr_pool_t *pool) +{ + svn_test__sandbox_t b; + + SVN_ERR(init_move4(&b, "move4_update_delete_AAA", opts, TRUE, pool)); + + /* Update and resolve via mine strategy */ + SVN_ERR(sbox_wc_update(&b, "", 2)); + SVN_ERR(sbox_wc_resolve(&b, "", svn_depth_infinity, svn_wc_conflict_choose_mine_conflict)); + /* Go back to start position */ + SVN_ERR(sbox_wc_update(&b, "", 1)); + SVN_ERR(sbox_wc_resolve(&b, "", svn_depth_infinity, svn_wc_conflict_choose_mine_conflict)); + /* Update and resolve via their strategy */ + SVN_ERR(sbox_wc_update(&b, "", 2)); + SVN_ERR(sbox_wc_resolve(&b, "", svn_depth_infinity, svn_wc_conflict_choose_merged)); + + return SVN_NO_ERROR; +} + +static svn_error_t * +move4_update_add_AAA(const svn_test_opts_t *opts, apr_pool_t *pool) +{ + svn_test__sandbox_t b; + + SVN_ERR(init_move4(&b, "move4_update_add_AAA", opts, TRUE, pool)); + + /* Update and resolve via mine strategy */ + SVN_ERR(sbox_wc_update(&b, "", 2)); + SVN_ERR(sbox_wc_resolve(&b, "", svn_depth_infinity, svn_wc_conflict_choose_mine_conflict)); + /* Go back to start position */ + SVN_ERR(sbox_wc_update(&b, "", 1)); + SVN_ERR(sbox_wc_resolve(&b, "", svn_depth_infinity, svn_wc_conflict_choose_mine_conflict)); + /* Update and resolve via their strategy */ + SVN_ERR(sbox_wc_update(&b, "", 2)); + SVN_ERR(sbox_wc_resolve(&b, "", svn_depth_infinity, svn_wc_conflict_choose_merged)); + + return SVN_NO_ERROR; +} + +static svn_error_t * +move4_update_replace_AAA(const svn_test_opts_t *opts, apr_pool_t *pool) +{ + svn_test__sandbox_t b; + + SVN_ERR(init_move4(&b, "move4_update_replace_AAA", opts, TRUE, pool)); + + /* Update and resolve via mine strategy */ + SVN_ERR(sbox_wc_update(&b, "", 2)); + SVN_ERR(sbox_wc_resolve(&b, "", svn_depth_infinity, svn_wc_conflict_choose_mine_conflict)); + /* Go back to start position */ + SVN_ERR(sbox_wc_update(&b, "", 1)); + SVN_ERR(sbox_wc_resolve(&b, "", svn_depth_infinity, svn_wc_conflict_choose_mine_conflict)); + /* Update and resolve via their strategy */ + SVN_ERR(sbox_wc_update(&b, "", 2)); + SVN_ERR(sbox_wc_resolve(&b, "", svn_depth_infinity, svn_wc_conflict_choose_merged)); + + return SVN_NO_ERROR; +} + +static svn_error_t * +move4_update_delself_AAA(const svn_test_opts_t *opts, apr_pool_t *pool) +{ + svn_test__sandbox_t b; + + SVN_ERR(init_move4(&b, "move4_update_delself_AAA", opts, TRUE, pool)); + + /* Update and resolve via mine strategy */ + SVN_ERR(sbox_wc_update(&b, "", 2)); + + { + nodes_row_t nodes[] = { + + {1, "A_moved", "normal", 1, "A", MOVED_HERE}, + {1, "A_moved/A", "normal", 1, "A/A", MOVED_HERE}, + {1, "A_moved/A/A", "normal", 1, "A/A/A", MOVED_HERE}, + {3, "A_moved/A/A", "base-deleted", NO_COPY_FROM, "AAA_1"}, + {1, "A_moved/A/A/A", "normal", 1, "A/A/A/A", MOVED_HERE}, + {3, "A_moved/A/A/A", "base-deleted", NO_COPY_FROM}, + + { 0 }, + }; + SVN_ERR(check_db_rows(&b, "A_moved", nodes)); + } + + /* Resolve a few conflicts manually */ + SVN_ERR(sbox_wc_resolve(&b, "A", svn_depth_empty, + svn_wc_conflict_choose_mine_conflict)); + + { + nodes_row_t nodes[] = { + {0, "", "normal", 2, ""}, + {0, "A", "normal", 2, "A"}, + {0, "A/A", "normal", 2, "A/A"}, + {0, "B", "normal", 2, "B"}, + {0, "B/A", "normal", 2, "B/A"}, + {0, "C", "normal", 2, "C"}, + {0, "C/A", "normal", 2, "C/A"}, + {0, "D", "normal", 2, "D"}, + {0, "D/A", "normal", 2, "D/A"}, + {1, "A", "normal", 1, "B", FALSE, "A_moved", TRUE}, + {1, "A/A", "normal", 1, "B/A", MOVED_HERE}, + {1, "A/A/A", "normal", 1, "B/A/A", MOVED_HERE}, + {1, "A/A/A/A", "normal", 1, "B/A/A/A", MOVED_HERE}, + {1, "AAA_1", "normal", 1, "A/A/A"}, + {1, "AAA_1/A", "normal", 1, "A/A/A/A"}, + {1, "AAA_2", "normal", 1, "B/A/A", MOVED_HERE}, + {1, "AAA_2/A", "normal", 1, "B/A/A/A", MOVED_HERE}, + {1, "AAA_3", "normal", 1, "C/A/A", MOVED_HERE}, + {1, "AAA_3/A", "normal", 1, "C/A/A/A", MOVED_HERE}, + {1, "A_moved", "normal", 2, "A", MOVED_HERE}, + {1, "A_moved/A", "normal", 2, "A/A", MOVED_HERE}, + {1, "B", "base-deleted", NO_COPY_FROM, "A"}, + {1, "B/A", "base-deleted", NO_COPY_FROM}, + {1, "BA_moved", "normal", 1, "B/A", MOVED_HERE}, + {1, "BA_moved/A", "normal", 1, "B/A/A", MOVED_HERE}, + {1, "BA_moved/A/A", "normal", 1, "B/A/A/A", MOVED_HERE}, + {2, "A/A", "normal", 1, "C/A", FALSE, "BA_moved", TRUE}, + {2, "A/A/A", "normal", 1, "C/A/A", MOVED_HERE}, + {2, "A/A/A/A", "normal", 1, "C/A/A/A", MOVED_HERE}, + {2, "BA_moved/A", "base-deleted", NO_COPY_FROM, "AAA_2"}, + {2, "BA_moved/A/A", "base-deleted", NO_COPY_FROM}, + {2, "C/A", "base-deleted", NO_COPY_FROM, "A/A"}, + {3, "A/A/A", "normal", 1, "D/A/A", FALSE, "AAA_3"}, + {3, "A/A/A/A", "normal", 1, "D/A/A/A"}, + + { 0 }, + }; + conflict_info_t conflicts[] = { + {"A_moved/A/A", FALSE, FALSE, {svn_wc_conflict_action_delete, + svn_wc_conflict_reason_moved_away, "A_moved/A/A"}}, + {"B", FALSE, FALSE, {svn_wc_conflict_action_edit, + svn_wc_conflict_reason_moved_away, "B"}}, + {"C/A", FALSE, FALSE, {svn_wc_conflict_action_edit, + svn_wc_conflict_reason_moved_away, "C/A"}}, + {"D/A/A", FALSE, FALSE, {svn_wc_conflict_action_delete, + svn_wc_conflict_reason_moved_away, "D/A/A"}}, + + { 0 }, + }; + + SVN_ERR(check_db_rows(&b, "", nodes)); + SVN_ERR(check_db_conflicts(&b, "", conflicts)); + } + + SVN_ERR(sbox_wc_resolve(&b, "B", svn_depth_empty, + svn_wc_conflict_choose_mine_conflict)); + SVN_ERR(sbox_wc_resolve(&b, "C/A", svn_depth_empty, + svn_wc_conflict_choose_mine_conflict)); + + + /* ### These can currently only be resolved to merged ???? */ + SVN_ERR(sbox_wc_resolve(&b, "D/A/A", svn_depth_empty, + svn_wc_conflict_choose_merged)); + SVN_ERR(sbox_wc_resolve(&b, "A/A/A", svn_depth_empty, + svn_wc_conflict_choose_merged)); + SVN_ERR(sbox_wc_resolve(&b, "A_moved/A/A", svn_depth_empty, + svn_wc_conflict_choose_merged)); + SVN_ERR(sbox_wc_resolve(&b, "A/A", svn_depth_empty, + svn_wc_conflict_choose_mine_conflict)); + SVN_ERR(sbox_wc_resolve(&b, "BA_moved/A", svn_depth_empty, + svn_wc_conflict_choose_merged)); + SVN_ERR(check_db_conflicts(&b, "", NULL)); + /* Go back to start position */ + SVN_ERR(sbox_wc_update(&b, "", 1)); + { + nodes_row_t nodes[] = { + {0, "", "normal", 1, ""}, + {0, "A", "normal", 1, "A"}, + {0, "A/A", "normal", 1, "A/A"}, + {0, "A/A/A", "normal", 1, "A/A/A"}, + {0, "A/A/A/A", "normal", 1, "A/A/A/A"}, + {0, "B", "normal", 1, "B"}, + {0, "B/A", "normal", 1, "B/A"}, + {0, "B/A/A", "normal", 1, "B/A/A"}, + {0, "B/A/A/A", "normal", 1, "B/A/A/A"}, + {0, "C", "normal", 1, "C"}, + {0, "C/A", "normal", 1, "C/A"}, + {0, "C/A/A", "normal", 1, "C/A/A"}, + {0, "C/A/A/A", "normal", 1, "C/A/A/A"}, + {0, "D", "normal", 1, "D"}, + {0, "D/A", "normal", 1, "D/A"}, + {0, "D/A/A", "normal", 1, "D/A/A"}, + {0, "D/A/A/A", "normal", 1, "D/A/A/A"}, + {1, "A", "normal", 2, "B", FALSE, "A_moved", TRUE}, + {1, "A/A", "normal", 2, "B/A", MOVED_HERE}, + {1, "A/A/A", "base-deleted", NO_COPY_FROM}, + {1, "A/A/A/A", "base-deleted", NO_COPY_FROM}, + {1, "A_moved", "normal", 2, "A", MOVED_HERE}, + {1, "A_moved/A", "normal", 2, "A/A", MOVED_HERE}, + {1, "AAA_1", "normal", 1, "A/A/A"}, + {1, "AAA_1/A", "normal", 1, "A/A/A/A"}, + {1, "AAA_2", "normal", 1, "B/A/A"}, + {1, "AAA_2/A", "normal", 1, "B/A/A/A"}, + {1, "AAA_3", "normal", 1, "C/A/A"}, + {1, "AAA_3/A", "normal", 1, "C/A/A/A"}, + {1, "B", "base-deleted", NO_COPY_FROM, "A"}, + {1, "B/A", "base-deleted", NO_COPY_FROM}, + {1, "B/A/A", "base-deleted", NO_COPY_FROM}, + {1, "B/A/A/A", "base-deleted", NO_COPY_FROM}, + {1, "BA_moved", "normal", 2, "B/A", MOVED_HERE}, + {2, "A/A", "normal", 2, "C/A", FALSE, "BA_moved", TRUE}, + {2, "C/A", "base-deleted", NO_COPY_FROM, "A/A"}, + {2, "C/A/A", "base-deleted", NO_COPY_FROM}, + {2, "C/A/A/A", "base-deleted", NO_COPY_FROM}, + {3, "A/A/A", "normal", 1, "D/A/A"}, + {3, "A/A/A/A", "normal", 1, "D/A/A/A"}, + + { 0 }, + }; + + conflict_info_t conflicts[] = { + {"A", FALSE, FALSE, { svn_wc_conflict_action_edit, + svn_wc_conflict_reason_replaced}}, + {"B", FALSE, FALSE, { svn_wc_conflict_action_edit, + svn_wc_conflict_reason_moved_away, "B"}}, + {"C/A", FALSE, FALSE, { svn_wc_conflict_action_edit, + svn_wc_conflict_reason_moved_away, "C/A"}}, + {0} + }; + + SVN_ERR(check_db_rows(&b, "", nodes)); + SVN_ERR(check_db_conflicts(&b, "", conflicts)); + } + + SVN_ERR(sbox_wc_resolve(&b, "A", svn_depth_empty, + svn_wc_conflict_choose_mine_conflict)); + SVN_ERR(sbox_wc_resolve(&b, "B", svn_depth_empty, + svn_wc_conflict_choose_mine_conflict)); + SVN_ERR(sbox_wc_resolve(&b, "C/A", svn_depth_empty, + svn_wc_conflict_choose_mine_conflict)); + { + conflict_info_t conflicts[] = { + {"A/A", FALSE, FALSE, { svn_wc_conflict_action_edit, + svn_wc_conflict_reason_moved_away, "A/A"}}, + {"A/A/A", FALSE, FALSE, { svn_wc_conflict_action_add, + svn_wc_conflict_reason_added}}, + {0} + }; + + SVN_ERR(check_db_conflicts(&b, "", conflicts)); + } + SVN_ERR(sbox_wc_resolve(&b, "A/A/A", svn_depth_empty, + svn_wc_conflict_choose_merged)); + SVN_ERR(sbox_wc_resolve(&b, "", svn_depth_infinity, svn_wc_conflict_choose_mine_conflict)); + /* Update and resolve via their strategy */ + SVN_ERR(sbox_wc_update(&b, "", 2)); + SVN_ERR(sbox_wc_resolve(&b, "", svn_depth_infinity, svn_wc_conflict_choose_merged)); + + return SVN_NO_ERROR; +} + +static svn_error_t * +move4_update_replaceself_AAA(const svn_test_opts_t *opts, apr_pool_t *pool) +{ + svn_test__sandbox_t b; + + SVN_ERR(init_move4(&b, "move4_update_replaceself_AAA", opts, TRUE, pool)); + + /* Update and resolve via mine strategy */ + SVN_ERR(sbox_wc_update(&b, "", 2)); + SVN_ERR(sbox_wc_resolve(&b, "", svn_depth_infinity, svn_wc_conflict_choose_mine_conflict)); + /* Go back to start position */ + SVN_ERR(sbox_wc_update(&b, "", 1)); + SVN_ERR(sbox_wc_resolve(&b, "", svn_depth_infinity, svn_wc_conflict_choose_mine_conflict)); + /* Update and resolve via their strategy */ + SVN_ERR(sbox_wc_update(&b, "", 2)); + SVN_ERR(sbox_wc_resolve(&b, "", svn_depth_infinity, svn_wc_conflict_choose_merged)); + + return SVN_NO_ERROR; +} + +static svn_error_t * +simple_move_bump(const svn_test_opts_t *opts, apr_pool_t *pool) +{ + svn_test__sandbox_t b; + + SVN_ERR(svn_test__sandbox_create(&b, "simple_move_bump", opts, pool)); + + SVN_ERR(sbox_wc_mkdir(&b, "A")); + SVN_ERR(sbox_wc_mkdir(&b, "A/B")); + + SVN_ERR(sbox_wc_propset(&b, "old_A", "val", "A")); + SVN_ERR(sbox_wc_propset(&b, "old_B", "val", "A/B")); + + SVN_ERR(sbox_wc_commit(&b, "")); + + SVN_ERR(sbox_wc_propset(&b, "new_A", "val", "A")); + SVN_ERR(sbox_wc_propset(&b, "new_B", "val", "A/B")); + + SVN_ERR(sbox_wc_commit(&b, "")); + + SVN_ERR(sbox_wc_update(&b, "", 1)); + + SVN_ERR(sbox_wc_move(&b, "A/B", "A/B_mv")); + SVN_ERR(sbox_wc_move(&b, "A", "A_mv")); { nodes_row_t nodes[] = { - {1, "BB/lambda", "normal", 1, "A/B/lambda"}, - {1, "BB", "normal", 1, "A/B"}, - {1, "BB/E/beta", "normal", 1, "A/B/E/beta"}, - {1, "BB/E/alpha", "normal", 1, "A/B/E/alpha"}, - {1, "BB/F", "normal", 1, "A/B/F"}, - {1, "BB/E", "normal", 1, "A/B/E"}, + { 0, "", "normal", 1, ""}, + { 0, "A", "normal", 1, "A", NOT_MOVED, "old_A"}, + { 0, "A/B", "normal", 1, "A/B", NOT_MOVED, "old_B"}, + + { 1, "A", "base-deleted", NO_COPY_FROM, "A_mv"}, + { 1, "A/B", "base-deleted", NO_COPY_FROM}, + + { 1, "A_mv", "normal", 1, "A", MOVED_HERE, "old_A" }, + { 1, "A_mv/B", "normal", 1, "A/B", MOVED_HERE, "old_B" }, + + { 2, "A_mv/B", "base-deleted", NO_COPY_FROM, "A_mv/B_mv" }, + { 2, "A_mv/B_mv", "normal", 1, "A/B", FALSE, NULL, TRUE, "old_B" }, { 0 }, }; - SVN_ERR(check_db_rows(&b, "BB", nodes)); + SVN_ERR(check_db_rows(&b, "", nodes)); + } + + SVN_ERR(sbox_wc_update(&b, "", 2)); + + /* Expect the A tree to be updated */ + { + nodes_row_t nodes[] = { + + { 0, "", "normal", 2, ""}, + { 0, "A", "normal", 2, "A", NOT_MOVED, "new_A,old_A"}, + { 0, "A/B", "normal", 2, "A/B", NOT_MOVED, "new_B,old_B"}, + + { 1, "A", "base-deleted", NO_COPY_FROM, "A_mv"}, + { 1, "A/B", "base-deleted", NO_COPY_FROM}, + + { 1, "A_mv", "normal", 1, "A", MOVED_HERE, "old_A" }, + { 1, "A_mv/B", "normal", 1, "A/B", MOVED_HERE, "old_B" }, + + { 2, "A_mv/B", "base-deleted", NO_COPY_FROM, "A_mv/B_mv" }, + { 2, "A_mv/B_mv", "normal", 1, "A/B", FALSE, NULL, TRUE, "old_B" }, + + { 0 }, + }; + SVN_ERR(check_db_rows(&b, "", nodes)); + } + + SVN_ERR(sbox_wc_resolve(&b, "A", svn_depth_empty, + svn_wc_conflict_choose_mine_conflict)); + + { + nodes_row_t nodes[] = { + + { 0, "", "normal", 2, ""}, + { 0, "A", "normal", 2, "A", NOT_MOVED, "new_A,old_A"}, + { 0, "A/B", "normal", 2, "A/B", NOT_MOVED, "new_B,old_B"}, + + { 1, "A", "base-deleted", NO_COPY_FROM, "A_mv"}, + { 1, "A/B", "base-deleted", NO_COPY_FROM}, + + { 1, "A_mv", "normal", 2, "A", MOVED_HERE, "new_A,old_A" }, + { 1, "A_mv/B", "normal", 2, "A/B", MOVED_HERE, "new_B,old_B" }, + + { 2, "A_mv/B", "base-deleted", NO_COPY_FROM, "A_mv/B_mv" }, + { 2, "A_mv/B_mv", "normal", 1, "A/B", FALSE, NULL, TRUE, "old_B" }, + + { 0 }, + }; + SVN_ERR(check_db_rows(&b, "", nodes)); + } + + SVN_ERR(sbox_wc_resolve(&b, "A_mv/B", svn_depth_empty, + svn_wc_conflict_choose_mine_conflict)); + + { + nodes_row_t nodes[] = { + + { 0, "", "normal", 2, ""}, + { 0, "A", "normal", 2, "A", NOT_MOVED, "new_A,old_A"}, + { 0, "A/B", "normal", 2, "A/B", NOT_MOVED, "new_B,old_B"}, + + { 1, "A", "base-deleted", NO_COPY_FROM, "A_mv"}, + { 1, "A/B", "base-deleted", NO_COPY_FROM}, + + { 1, "A_mv", "normal", 2, "A", MOVED_HERE, "new_A,old_A" }, + { 1, "A_mv/B", "normal", 2, "A/B", MOVED_HERE, "new_B,old_B" }, + + { 2, "A_mv/B", "base-deleted", NO_COPY_FROM, "A_mv/B_mv" }, + { 2, "A_mv/B_mv", "normal", 2, "A/B", FALSE, NULL, TRUE, "new_B,old_B" }, + + { 0 }, + }; + SVN_ERR(check_db_rows(&b, "", nodes)); } return SVN_NO_ERROR; @@ -8826,8 +10585,35 @@ movedhere_extract_retract(const svn_test_opts_t *opts, apr_pool_t *pool) SVN_ERR(sbox_wc_mkdir(&b, "Z/E2")); SVN_ERR(sbox_wc_update(&b, "", 2)); + { + conflict_info_t conflicts[] = { + {"A", FALSE, FALSE, {svn_wc_conflict_action_edit, + svn_wc_conflict_reason_moved_away, "A"}}, + {0} + }; + + SVN_ERR(check_db_conflicts(&b, "", conflicts)); + } SVN_ERR(sbox_wc_resolve(&b, "A", svn_depth_empty, svn_wc_conflict_choose_mine_conflict)); + { + conflict_info_t conflicts[] = { + {"Z/B1", FALSE, FALSE, {svn_wc_conflict_action_edit, + svn_wc_conflict_reason_deleted}}, + {"Z/B2", FALSE, FALSE, {svn_wc_conflict_action_edit, + svn_wc_conflict_reason_moved_away, "Z/B2"}}, + {"Z/C1", FALSE, FALSE, {svn_wc_conflict_action_delete, + svn_wc_conflict_reason_deleted}}, + {"Z/C2", FALSE, FALSE, {svn_wc_conflict_action_delete, + svn_wc_conflict_reason_moved_away, "Z/C2"}}, + {"Z/E2", FALSE, FALSE, {svn_wc_conflict_action_add, + svn_wc_conflict_reason_added}}, + + {0} + }; + + SVN_ERR(check_db_conflicts(&b, "", conflicts)); + } SVN_ERR(sbox_wc_resolve(&b, "Z/B1", svn_depth_empty, svn_wc_conflict_choose_mine_conflict)); SVN_ERR(sbox_wc_resolve(&b, "Z/B2", svn_depth_empty, @@ -8838,8 +10624,10 @@ movedhere_extract_retract(const svn_test_opts_t *opts, apr_pool_t *pool) SVN_ERR(sbox_wc_resolve(&b, "Z/C2", svn_depth_empty, svn_wc_conflict_choose_merged)); - SVN_ERR(sbox_wc_resolve(&b, "", svn_depth_infinity, - svn_wc_conflict_choose_mine_conflict)); + SVN_ERR(sbox_wc_resolve(&b, "Z/E2", svn_depth_empty, + svn_wc_conflict_choose_merged)); + + SVN_ERR(check_db_conflicts(&b, "", NULL)); { nodes_row_t nodes[] = { @@ -8894,6 +10682,168 @@ movedhere_extract_retract(const svn_test_opts_t *opts, apr_pool_t *pool) { 0 }, }; SVN_ERR(check_db_rows(&b, "", nodes)); + + SVN_ERR(check_db_conflicts(&b, "", NULL)); + } + + return SVN_NO_ERROR; +} + +static svn_error_t * +repo_wc_copy(const svn_test_opts_t *opts, apr_pool_t *pool) +{ + svn_test__sandbox_t b; + const char *new_repos_dir; + const char *new_repos_url; + + SVN_ERR(svn_test__sandbox_create(&b, "repo_wc_copy", + opts, pool)); + SVN_ERR(sbox_add_and_commit_greek_tree(&b)); + + SVN_ERR(sbox_wc_copy_url(&b, + svn_path_url_add_component2(b.repos_url, "A/B", + pool), + -1, "AA")); + + { + nodes_row_t nodes[] = { + + {1, "AA/lambda", "normal", 1, "A/B/lambda"}, + {1, "AA", "normal", 1, "A/B"}, + {1, "AA/E/beta", "normal", 1, "A/B/E/beta"}, + {1, "AA/E/alpha", "normal", 1, "A/B/E/alpha"}, + {1, "AA/F", "normal", 1, "A/B/F"}, + {1, "AA/E", "normal", 1, "A/B/E"}, + + { 0 }, + }; + SVN_ERR(check_db_rows(&b, "AA", nodes)); + } + + new_repos_dir = apr_pstrcat(pool, b.repos_dir, "-2", SVN_VA_NULL); + new_repos_url = apr_pstrcat(pool, b.repos_url, "-2", SVN_VA_NULL); + + svn_test_add_dir_cleanup(new_repos_dir); + + SVN_ERR(svn_io_remove_dir2(new_repos_dir, TRUE, NULL, NULL, pool)); + SVN_ERR(svn_io_copy_dir_recursively(b.repos_dir, + svn_dirent_dirname(new_repos_dir, pool), + svn_dirent_basename(new_repos_dir, pool), + FALSE, NULL, NULL, pool)); + + SVN_ERR(sbox_wc_relocate(&b, new_repos_url)); + + /* This produced an invalid copy in Subversion <= 1.8.8. + Status would show all descendants as incomplete */ + SVN_ERR(sbox_wc_copy_url(&b, + svn_path_url_add_component2(b.repos_url, "A/B", + pool), + -1, "BB")); + + { + nodes_row_t nodes[] = { + + {1, "BB/lambda", "normal", 1, "A/B/lambda"}, + {1, "BB", "normal", 1, "A/B"}, + {1, "BB/E/beta", "normal", 1, "A/B/E/beta"}, + {1, "BB/E/alpha", "normal", 1, "A/B/E/alpha"}, + {1, "BB/F", "normal", 1, "A/B/F"}, + {1, "BB/E", "normal", 1, "A/B/E"}, + + { 0 }, + }; + SVN_ERR(check_db_rows(&b, "BB", nodes)); + } + + return SVN_NO_ERROR; +} + +static svn_error_t * +break_move_in_delete(const svn_test_opts_t *opts, apr_pool_t *pool) +{ + svn_test__sandbox_t b; + + SVN_ERR(svn_test__sandbox_create(&b, "break_move_in_delete", opts, pool)); + + SVN_ERR(sbox_wc_mkdir(&b, "A")); + SVN_ERR(sbox_wc_mkdir(&b, "A/B")); + SVN_ERR(sbox_wc_mkdir(&b, "X")); + SVN_ERR(sbox_wc_mkdir(&b, "X/Y")); + SVN_ERR(sbox_wc_mkdir(&b, "X/Y/Z")); + SVN_ERR(sbox_wc_commit(&b, "")); + SVN_ERR(sbox_wc_propset(&b, "key", "value", "X/Y/Z")); + SVN_ERR(sbox_wc_commit(&b, "")); + SVN_ERR(sbox_wc_update(&b, "", 1)); + + SVN_ERR(sbox_wc_move(&b, "X/Y/Z", "A/Z")); + SVN_ERR(sbox_wc_delete(&b, "X")); + { + nodes_row_t nodes[] = { + {0, "", "normal", 1, ""}, + {0, "A", "normal", 1, "A"}, + {0, "A/B", "normal", 1, "A/B"}, + {0, "X", "normal", 1, "X"}, + {0, "X/Y", "normal", 1, "X/Y"}, + {0, "X/Y/Z", "normal", 1, "X/Y/Z"}, + {1, "X", "base-deleted", NO_COPY_FROM}, + {1, "X/Y", "base-deleted", NO_COPY_FROM}, + {1, "X/Y/Z", "base-deleted", NO_COPY_FROM, "A/Z"}, + {2, "A/Z", "normal", 1, "X/Y/Z", MOVED_HERE}, + {0} + }; + SVN_ERR(check_db_rows(&b, "", nodes)); + } + + SVN_ERR(sbox_wc_update(&b, "", 2)); + { + nodes_row_t nodes[] = { + {0, "", "normal", 2, ""}, + {0, "A", "normal", 2, "A"}, + {0, "A/B", "normal", 2, "A/B"}, + {0, "X", "normal", 2, "X"}, + {0, "X/Y", "normal", 2, "X/Y"}, + {0, "X/Y/Z", "normal", 2, "X/Y/Z"}, + {1, "X", "base-deleted", NO_COPY_FROM}, + {1, "X/Y", "base-deleted", NO_COPY_FROM}, + {1, "X/Y/Z", "base-deleted", NO_COPY_FROM, "A/Z"}, + {2, "A/Z", "normal", 1, "X/Y/Z", MOVED_HERE}, + {0} + }; + conflict_info_t conflicts1[] = { + {"X", FALSE, FALSE, {svn_wc_conflict_action_edit, + svn_wc_conflict_reason_deleted}}, + {0} + }; + conflict_info_t conflicts2[] = { + {"X/Y/Z", FALSE, FALSE, {svn_wc_conflict_action_edit, + svn_wc_conflict_reason_moved_away, "X"}}, + {0} + }; + SVN_ERR(check_db_rows(&b, "", nodes)); + SVN_ERR(check_db_conflicts(&b, "", conflicts1)); + SVN_ERR(sbox_wc_resolve(&b, "X", svn_depth_empty, + svn_wc_conflict_choose_mine_conflict)); + SVN_ERR(check_db_rows(&b, "", nodes)); + SVN_ERR(check_db_conflicts(&b, "", conflicts2)); + } + + SVN_ERR(sbox_wc_resolved(&b, "X/Y/Z")); + { + nodes_row_t nodes[] = { + {0, "", "normal", 2, ""}, + {0, "A", "normal", 2, "A"}, + {0, "A/B", "normal", 2, "A/B"}, + {0, "X", "normal", 2, "X"}, + {0, "X/Y", "normal", 2, "X/Y"}, + {0, "X/Y/Z", "normal", 2, "X/Y/Z"}, + {1, "X", "base-deleted", NO_COPY_FROM}, + {1, "X/Y", "base-deleted", NO_COPY_FROM}, + {1, "X/Y/Z", "base-deleted", NO_COPY_FROM}, + {2, "A/Z", "normal", 1, "X/Y/Z"}, + {0} + }; + SVN_ERR(check_db_rows(&b, "", nodes)); + SVN_ERR(check_db_conflicts(&b, "", NULL)); } return SVN_NO_ERROR; @@ -9021,10 +10971,911 @@ nested_move_delete(const svn_test_opts_t *opts, apr_pool_t *pool) return SVN_NO_ERROR; } +static svn_error_t * +move_within_mixed_move(const svn_test_opts_t *opts, apr_pool_t *pool) +{ + svn_test__sandbox_t b; + + SVN_ERR(svn_test__sandbox_create(&b, "move_within_mixed_move", opts, pool)); + + SVN_ERR(sbox_add_and_commit_greek_tree(&b)); + + SVN_ERR(sbox_wc_delete(&b, "iota")); + SVN_ERR(sbox_wc_commit(&b, "")); + + /* Make A mixed revision */ + SVN_ERR(sbox_wc_update(&b, "A/B/E", 2)); + + /* Single rev moves.. ok */ + SVN_ERR(sbox_wc_move(&b, "A/D", "A/D_mv")); + SVN_ERR(sbox_wc_move(&b, "A/C", "C_mv")); + + { + nodes_row_t nodes[] = { + {0, "", "normal", 0, ""}, + {0, "A", "normal", 1, "A"}, + {0, "A/B", "normal", 1, "A/B"}, + {0, "A/B/E", "normal", 2, "A/B/E"}, + {0, "A/B/E/alpha", "normal", 2, "A/B/E/alpha"}, + {0, "A/B/E/beta", "normal", 2, "A/B/E/beta"}, + {0, "A/B/F", "normal", 1, "A/B/F"}, + {0, "A/B/lambda", "normal", 1, "A/B/lambda"}, + {0, "A/C", "normal", 1, "A/C"}, + {0, "A/D", "normal", 1, "A/D"}, + {0, "A/D/G", "normal", 1, "A/D/G"}, + {0, "A/D/G/pi", "normal", 1, "A/D/G/pi"}, + {0, "A/D/G/rho", "normal", 1, "A/D/G/rho"}, + {0, "A/D/G/tau", "normal", 1, "A/D/G/tau"}, + {0, "A/D/gamma", "normal", 1, "A/D/gamma"}, + {0, "A/D/H", "normal", 1, "A/D/H"}, + {0, "A/D/H/chi", "normal", 1, "A/D/H/chi"}, + {0, "A/D/H/omega", "normal", 1, "A/D/H/omega"}, + {0, "A/D/H/psi", "normal", 1, "A/D/H/psi"}, + {0, "A/mu", "normal", 1, "A/mu"}, + {0, "iota", "not-present", 2, "iota"}, + {1, "C_mv", "normal", 1, "A/C", MOVED_HERE}, + {2, "A/C", "base-deleted", NO_COPY_FROM, "C_mv"}, + {2, "A/D", "base-deleted", NO_COPY_FROM, "A/D_mv"}, + {2, "A/D/G", "base-deleted", NO_COPY_FROM}, + {2, "A/D/G/pi", "base-deleted", NO_COPY_FROM}, + {2, "A/D/G/rho", "base-deleted", NO_COPY_FROM}, + {2, "A/D/G/tau", "base-deleted", NO_COPY_FROM}, + {2, "A/D/gamma", "base-deleted", NO_COPY_FROM}, + {2, "A/D/H", "base-deleted", NO_COPY_FROM}, + {2, "A/D/H/chi", "base-deleted", NO_COPY_FROM}, + {2, "A/D/H/omega", "base-deleted", NO_COPY_FROM}, + {2, "A/D/H/psi", "base-deleted", NO_COPY_FROM}, + {2, "A/D_mv", "normal", 1, "A/D", MOVED_HERE}, + {2, "A/D_mv/G", "normal", 1, "A/D/G", MOVED_HERE}, + {2, "A/D_mv/G/pi", "normal", 1, "A/D/G/pi", MOVED_HERE}, + {2, "A/D_mv/G/rho", "normal", 1, "A/D/G/rho", MOVED_HERE}, + {2, "A/D_mv/G/tau", "normal", 1, "A/D/G/tau", MOVED_HERE}, + {2, "A/D_mv/gamma", "normal", 1, "A/D/gamma", MOVED_HERE}, + {2, "A/D_mv/H", "normal", 1, "A/D/H", MOVED_HERE}, + {2, "A/D_mv/H/chi", "normal", 1, "A/D/H/chi", MOVED_HERE}, + {2, "A/D_mv/H/omega", "normal", 1, "A/D/H/omega", MOVED_HERE}, + {2, "A/D_mv/H/psi", "normal", 1, "A/D/H/psi", MOVED_HERE}, + {0} + }; + + SVN_ERR(check_db_rows(&b, "", nodes)); + } + + /* Mixed rev move... breaks recordings "A/D" -> "A/D_mv" */ + SVN_ERR(sbox_wc_move(&b, "A", "A_mv")); + + { + nodes_row_t nodes[] = { + {0, "", "normal", 0, ""}, + {0, "A", "normal", 1, "A"}, + {0, "A/B", "normal", 1, "A/B"}, + {0, "A/B/E", "normal", 2, "A/B/E"}, + {0, "A/B/E/alpha", "normal", 2, "A/B/E/alpha"}, + {0, "A/B/E/beta", "normal", 2, "A/B/E/beta"}, + {0, "A/B/F", "normal", 1, "A/B/F"}, + {0, "A/B/lambda", "normal", 1, "A/B/lambda"}, + {0, "A/C", "normal", 1, "A/C"}, + {0, "A/D", "normal", 1, "A/D"}, + {0, "A/D/G", "normal", 1, "A/D/G"}, + {0, "A/D/G/pi", "normal", 1, "A/D/G/pi"}, + {0, "A/D/G/rho", "normal", 1, "A/D/G/rho"}, + {0, "A/D/G/tau", "normal", 1, "A/D/G/tau"}, + {0, "A/D/gamma", "normal", 1, "A/D/gamma"}, + {0, "A/D/H", "normal", 1, "A/D/H"}, + {0, "A/D/H/chi", "normal", 1, "A/D/H/chi"}, + {0, "A/D/H/omega", "normal", 1, "A/D/H/omega"}, + {0, "A/D/H/psi", "normal", 1, "A/D/H/psi"}, + {0, "A/mu", "normal", 1, "A/mu"}, + {0, "iota", "not-present", 2, "iota"}, + {1, "A", "base-deleted", NO_COPY_FROM }, + {1, "A/B", "base-deleted", NO_COPY_FROM }, + {1, "A/B/E", "base-deleted", NO_COPY_FROM }, + {1, "A/B/E/alpha", "base-deleted", NO_COPY_FROM }, + {1, "A/B/E/beta", "base-deleted", NO_COPY_FROM }, + {1, "A/B/F", "base-deleted", NO_COPY_FROM }, + {1, "A/B/lambda", "base-deleted", NO_COPY_FROM }, + {1, "A/C", "base-deleted", NO_COPY_FROM, "C_mv"}, + {1, "A/D", "base-deleted", NO_COPY_FROM, "A/D_mv" }, + {1, "A/D/G", "base-deleted", NO_COPY_FROM }, + {1, "A/D/G/pi", "base-deleted", NO_COPY_FROM }, + {1, "A/D/G/rho", "base-deleted", NO_COPY_FROM }, + {1, "A/D/G/tau", "base-deleted", NO_COPY_FROM }, + {1, "A/D/gamma", "base-deleted", NO_COPY_FROM }, + {1, "A/D/H", "base-deleted", NO_COPY_FROM }, + {1, "A/D/H/chi", "base-deleted", NO_COPY_FROM }, + {1, "A/D/H/omega", "base-deleted", NO_COPY_FROM }, + {1, "A/D/H/psi", "base-deleted", NO_COPY_FROM }, + {1, "A/mu", "base-deleted", NO_COPY_FROM }, + {1, "A_mv", "normal", 1, "A"}, + {1, "A_mv/B", "normal", 1, "A/B"}, + {1, "A_mv/B/E", "not-present", 2, "A/B/E"}, + {1, "A_mv/B/F", "normal", 1, "A/B/F"}, + {1, "A_mv/B/lambda", "normal", 1, "A/B/lambda"}, + {1, "A_mv/C", "normal", 1, "A/C"}, + {1, "A_mv/D", "normal", 1, "A/D"}, + {1, "A_mv/D/G", "normal", 1, "A/D/G"}, + {1, "A_mv/D/G/pi", "normal", 1, "A/D/G/pi"}, + {1, "A_mv/D/G/rho", "normal", 1, "A/D/G/rho"}, + {1, "A_mv/D/G/tau", "normal", 1, "A/D/G/tau"}, + {1, "A_mv/D/gamma", "normal", 1, "A/D/gamma"}, + {1, "A_mv/D/H", "normal", 1, "A/D/H"}, + {1, "A_mv/D/H/chi", "normal", 1, "A/D/H/chi"}, + {1, "A_mv/D/H/omega", "normal", 1, "A/D/H/omega"}, + {1, "A_mv/D/H/psi", "normal", 1, "A/D/H/psi"}, + {1, "A_mv/mu", "normal", 1, "A/mu"}, + {1, "C_mv", "normal", 1, "A/C", MOVED_HERE}, + {2, "A_mv/C", "base-deleted", NO_COPY_FROM }, + {2, "A_mv/D", "base-deleted", NO_COPY_FROM }, + {2, "A_mv/D/G", "base-deleted", NO_COPY_FROM }, + {2, "A_mv/D/G/pi", "base-deleted", NO_COPY_FROM }, + {2, "A_mv/D/G/rho", "base-deleted", NO_COPY_FROM }, + {2, "A_mv/D/G/tau", "base-deleted", NO_COPY_FROM }, + {2, "A_mv/D/gamma", "base-deleted", NO_COPY_FROM }, + {2, "A_mv/D/H", "base-deleted", NO_COPY_FROM }, + {2, "A_mv/D/H/chi", "base-deleted", NO_COPY_FROM }, + {2, "A_mv/D/H/omega", "base-deleted", NO_COPY_FROM }, + {2, "A_mv/D/H/psi", "base-deleted", NO_COPY_FROM }, + {2, "A_mv/D_mv", "normal", 1, "A/D", MOVED_HERE}, + {2, "A_mv/D_mv/G", "normal", 1, "A/D/G", MOVED_HERE}, + {2, "A_mv/D_mv/G/pi", "normal", 1, "A/D/G/pi", MOVED_HERE}, + {2, "A_mv/D_mv/G/rho", "normal", 1, "A/D/G/rho", MOVED_HERE}, + {2, "A_mv/D_mv/G/tau", "normal", 1, "A/D/G/tau", MOVED_HERE}, + {2, "A_mv/D_mv/gamma", "normal", 1, "A/D/gamma", MOVED_HERE}, + {2, "A_mv/D_mv/H", "normal", 1, "A/D/H", MOVED_HERE}, + {2, "A_mv/D_mv/H/chi", "normal", 1, "A/D/H/chi", MOVED_HERE}, + {2, "A_mv/D_mv/H/omega","normal", 1, "A/D/H/omega", MOVED_HERE}, + {2, "A_mv/D_mv/H/psi", "normal", 1, "A/D/H/psi", MOVED_HERE}, + {3, "A_mv/B/E", "normal", 2, "A/B/E"}, + {3, "A_mv/B/E/alpha", "normal", 2, "A/B/E/alpha"}, + {3, "A_mv/B/E/beta", "normal", 2, "A/B/E/beta"}, + + {0} + }; + + SVN_ERR(check_db_rows(&b, "", nodes)); + } + + return SVN_NO_ERROR; +} + +static svn_error_t * +move_edit_obstruction(const svn_test_opts_t *opts, apr_pool_t *pool) +{ + svn_test__sandbox_t b; + + SVN_ERR(svn_test__sandbox_create(&b, "move_edit_obstruction", opts, pool)); + + SVN_ERR(sbox_add_and_commit_greek_tree(&b)); + + SVN_ERR(sbox_file_write(&b, "A/B/E/alpha", "Update alpha")); + SVN_ERR(sbox_wc_propset(&b, "a", "b", "A/B/F")); + SVN_ERR(sbox_wc_commit(&b, "")); /* r2 */ + + SVN_ERR(sbox_wc_update(&b, "", 1)); + + /* Simple move */ + SVN_ERR(sbox_wc_move(&b, "A", "A_mv")); + SVN_ERR(sbox_wc_update(&b, "", 2)); + SVN_ERR(sbox_wc_resolve(&b, "A", svn_depth_empty, + svn_wc_conflict_choose_mine_conflict)); + + { + nodes_row_t nodes[] = { + {1, "A_mv", "normal", 2, "A", MOVED_HERE}, + {1, "A_mv/B", "normal", 2, "A/B", MOVED_HERE}, + {1, "A_mv/B/E", "normal", 2, "A/B/E", MOVED_HERE}, + {1, "A_mv/B/E/alpha", "normal", 2, "A/B/E/alpha", MOVED_HERE}, + {1, "A_mv/B/E/beta", "normal", 2, "A/B/E/beta", MOVED_HERE}, + {1, "A_mv/B/F", "normal", 2, "A/B/F", MOVED_HERE, "a"}, + {1, "A_mv/B/lambda", "normal", 2, "A/B/lambda", MOVED_HERE}, + {1, "A_mv/C", "normal", 2, "A/C", MOVED_HERE}, + {1, "A_mv/D", "normal", 2, "A/D", MOVED_HERE}, + {1, "A_mv/D/G", "normal", 2, "A/D/G", MOVED_HERE}, + {1, "A_mv/D/G/pi", "normal", 2, "A/D/G/pi", MOVED_HERE}, + {1, "A_mv/D/G/rho", "normal", 2, "A/D/G/rho", MOVED_HERE}, + {1, "A_mv/D/G/tau", "normal", 2, "A/D/G/tau", MOVED_HERE}, + {1, "A_mv/D/gamma", "normal", 2, "A/D/gamma", MOVED_HERE}, + {1, "A_mv/D/H", "normal", 2, "A/D/H", MOVED_HERE}, + {1, "A_mv/D/H/chi", "normal", 2, "A/D/H/chi", MOVED_HERE}, + {1, "A_mv/D/H/omega", "normal", 2, "A/D/H/omega", MOVED_HERE}, + {1, "A_mv/D/H/psi", "normal", 2, "A/D/H/psi", MOVED_HERE}, + {1, "A_mv/mu", "normal", 2, "A/mu", MOVED_HERE}, + {0} + }; + + SVN_ERR(check_db_rows(&b, "A_mv", nodes)); + SVN_ERR(check_db_conflicts(&b, "", NULL)); + } + + /* Now do the same thing with local obstructions on the edited nodes */ + SVN_ERR(sbox_wc_update(&b, "", 1)); + SVN_ERR(sbox_wc_revert(&b, "", svn_depth_infinity)); + SVN_ERR(sbox_wc_move(&b, "A", "A_mv")); + + SVN_ERR(svn_io_remove_file2(sbox_wc_path(&b, "A_mv/B/E/alpha"), FALSE, pool)); + SVN_ERR(svn_io_dir_make(sbox_wc_path(&b, "A_mv/B/E/alpha"), APR_OS_DEFAULT, + pool)); + SVN_ERR(svn_io_dir_remove_nonrecursive(sbox_wc_path(&b, "A_mv/B/F"), pool)); + SVN_ERR(sbox_file_write(&b, "A_mv/B/F", "F file")); + + SVN_ERR(sbox_wc_update(&b, "", 2)); + SVN_ERR(sbox_wc_resolve(&b, "A", svn_depth_empty, + svn_wc_conflict_choose_mine_conflict)); + + { + nodes_row_t nodes[] = { + {1, "A_mv", "normal", 2, "A", MOVED_HERE}, + {1, "A_mv/B", "normal", 2, "A/B", MOVED_HERE}, + {1, "A_mv/B/E", "normal", 2, "A/B/E", MOVED_HERE}, + {1, "A_mv/B/E/alpha", "normal", 2, "A/B/E/alpha", MOVED_HERE}, + {1, "A_mv/B/E/beta", "normal", 2, "A/B/E/beta", MOVED_HERE}, + {1, "A_mv/B/F", "normal", 2, "A/B/F", MOVED_HERE, "a"}, + {1, "A_mv/B/lambda", "normal", 2, "A/B/lambda", MOVED_HERE}, + {1, "A_mv/C", "normal", 2, "A/C", MOVED_HERE}, + {1, "A_mv/D", "normal", 2, "A/D", MOVED_HERE}, + {1, "A_mv/D/G", "normal", 2, "A/D/G", MOVED_HERE}, + {1, "A_mv/D/G/pi", "normal", 2, "A/D/G/pi", MOVED_HERE}, + {1, "A_mv/D/G/rho", "normal", 2, "A/D/G/rho", MOVED_HERE}, + {1, "A_mv/D/G/tau", "normal", 2, "A/D/G/tau", MOVED_HERE}, + {1, "A_mv/D/gamma", "normal", 2, "A/D/gamma", MOVED_HERE}, + {1, "A_mv/D/H", "normal", 2, "A/D/H", MOVED_HERE}, + {1, "A_mv/D/H/chi", "normal", 2, "A/D/H/chi", MOVED_HERE}, + {1, "A_mv/D/H/omega", "normal", 2, "A/D/H/omega", MOVED_HERE}, + {1, "A_mv/D/H/psi", "normal", 2, "A/D/H/psi", MOVED_HERE}, + {1, "A_mv/mu", "normal", 2, "A/mu", MOVED_HERE}, + {0} + }; + conflict_info_t conflicts[] = { + {"A_mv/B/E/alpha", FALSE, FALSE, {svn_wc_conflict_action_edit, + svn_wc_conflict_reason_obstructed}}, + {"A_mv/B/F", FALSE, FALSE, {svn_wc_conflict_action_edit, + svn_wc_conflict_reason_obstructed}}, + {0} + }; + + SVN_ERR(check_db_rows(&b, "A_mv", nodes)); + SVN_ERR(check_db_conflicts(&b, "", conflicts)); + } + + return SVN_NO_ERROR; +} + +static svn_error_t * +move_deep_bump(const svn_test_opts_t *opts, apr_pool_t *pool) +{ + svn_test__sandbox_t b; + + SVN_ERR(svn_test__sandbox_create(&b, "move_deep_bump", opts, pool)); + + SVN_ERR(sbox_wc_mkdir(&b, "B")); + SVN_ERR(sbox_wc_mkdir(&b, "B/B")); + SVN_ERR(sbox_wc_mkdir(&b, "B/B/A")); + SVN_ERR(sbox_wc_mkdir(&b, "B/B/A/A")); + SVN_ERR(sbox_wc_mkdir(&b, "B/B/A/A/A")); + SVN_ERR(sbox_wc_mkdir(&b, "C")); + SVN_ERR(sbox_wc_mkdir(&b, "C/C")); + SVN_ERR(sbox_wc_commit(&b, "")); + SVN_ERR(sbox_wc_mkdir(&b, "Z")); + SVN_ERR(sbox_wc_commit(&b, "")); + SVN_ERR(sbox_wc_mkdir(&b, "B/B/A/A/A/A")); + SVN_ERR(sbox_wc_commit(&b, "")); + SVN_ERR(sbox_wc_update(&b, "", 1)); + + SVN_ERR(sbox_wc_move(&b, "B/B/A", "B/B/B")); + SVN_ERR(sbox_wc_move(&b, "B/B/B/A", "C/C/A")); + + /* This can't bump C/C/A as that is outside the lock range + so we expect a tree conflict. + + This used to cause a node not found during bumping + because B/B/B/A doesn't have a BASE node */ + SVN_ERR(sbox_wc_update(&b, "B/B", 2)); + + { + nodes_row_t nodes[] = { + {0, "", "normal", 1, ""}, + {0, "B", "normal", 1, "B"}, + {0, "B/B", "normal", 2, "B/B"}, + {0, "B/B/A", "normal", 2, "B/B/A"}, + {0, "B/B/A/A", "normal", 2, "B/B/A/A"}, + {0, "B/B/A/A/A", "normal", 2, "B/B/A/A/A"}, + {0, "C", "normal", 1, "C"}, + {0, "C/C", "normal", 1, "C/C"}, + {3, "B/B/A", "base-deleted", NO_COPY_FROM, "B/B/B"}, + {3, "B/B/A/A", "base-deleted", NO_COPY_FROM}, + {3, "B/B/A/A/A", "base-deleted", NO_COPY_FROM}, + {3, "B/B/B", "normal", 2, "B/B/A", MOVED_HERE}, + {3, "B/B/B/A", "normal", 2, "B/B/A/A", MOVED_HERE}, + {3, "B/B/B/A/A", "normal", 2, "B/B/A/A/A", MOVED_HERE}, + {3, "C/C/A", "normal", 1, "B/B/A/A", MOVED_HERE}, + {3, "C/C/A/A", "normal", 1, "B/B/A/A/A", MOVED_HERE}, + {4, "B/B/B/A", "base-deleted", NO_COPY_FROM, "C/C/A"}, + {4, "B/B/B/A/A", "base-deleted", NO_COPY_FROM}, + {0} + }; + conflict_info_t conflicts[] = { + {"B/B/B/A", FALSE, FALSE, {svn_wc_conflict_action_edit, + svn_wc_conflict_reason_moved_away, "B/B/B/A"}}, + {0} + }; + + SVN_ERR(check_db_rows(&b, "", nodes)); + SVN_ERR(check_db_conflicts(&b, "", conflicts)); + } + + SVN_ERR(sbox_wc_resolve(&b, "B/B/B/A", svn_depth_empty, + svn_wc_conflict_choose_mine_conflict)); + SVN_ERR(check_db_conflicts(&b, "", NULL)); + + return SVN_NO_ERROR; +} + +static svn_error_t * +make_copy_mixed(const svn_test_opts_t *opts, apr_pool_t *pool) +{ + svn_test__sandbox_t b; + + SVN_ERR(svn_test__sandbox_create(&b, "make_copy_mixed", opts, pool)); + + SVN_ERR(sbox_wc_mkdir(&b, "A")); + SVN_ERR(sbox_wc_mkdir(&b, "A/B")); + SVN_ERR(sbox_wc_mkdir(&b, "A/B/C")); + SVN_ERR(sbox_wc_mkdir(&b, "A/B/C/D")); + SVN_ERR(sbox_wc_mkdir(&b, "A/B/C/E")); + SVN_ERR(sbox_wc_mkdir(&b, "A/B/C/F")); + SVN_ERR(sbox_wc_mkdir(&b, "A/B/G")); + SVN_ERR(sbox_wc_mkdir(&b, "A/B/G/H")); + SVN_ERR(sbox_wc_mkdir(&b, "A/B/G/I")); + SVN_ERR(sbox_wc_mkdir(&b, "A/B/G/J")); + SVN_ERR(sbox_wc_mkdir(&b, "A/B/K")); + SVN_ERR(sbox_wc_mkdir(&b, "A/B/K/L")); + SVN_ERR(sbox_wc_mkdir(&b, "A/B/K/M")); + SVN_ERR(sbox_wc_mkdir(&b, "A/N")); + SVN_ERR(sbox_wc_mkdir(&b, "A/N/O")); + SVN_ERR(sbox_wc_mkdir(&b, "A/N/P")); + SVN_ERR(sbox_wc_mkdir(&b, "A/N/Q")); + SVN_ERR(sbox_wc_mkdir(&b, "A/R")); + SVN_ERR(sbox_wc_mkdir(&b, "A/R/S")); + SVN_ERR(sbox_wc_mkdir(&b, "A/R/S/T")); + SVN_ERR(sbox_wc_commit(&b, "")); + SVN_ERR(sbox_wc_update(&b, "", 1)); + SVN_ERR(sbox_wc_propset(&b, "k", "r2", "")); + SVN_ERR(sbox_wc_commit(&b, "")); + SVN_ERR(sbox_wc_propset(&b, "k", "r3", "")); + SVN_ERR(sbox_wc_commit(&b, "")); + SVN_ERR(sbox_wc_propset(&b, "k", "r4", "")); + SVN_ERR(sbox_wc_commit(&b, "")); + SVN_ERR(sbox_wc_propset(&b, "k", "r5", "")); + SVN_ERR(sbox_wc_commit(&b, "")); + + SVN_ERR(sbox_wc_update(&b, "", 5)); + SVN_ERR(sbox_wc_update(&b, "A", 4)); + SVN_ERR(sbox_wc_update(&b, "A/B", 3)); + SVN_ERR(sbox_wc_update(&b, "A/B/C", 2)); + SVN_ERR(sbox_wc_update(&b, "A/B/K", 1)); + SVN_ERR(sbox_wc_update(&b, "A/N/O", 3)); + + SVN_ERR(sbox_wc_delete(&b, "A/B/C/F")); + SVN_ERR(sbox_wc_delete(&b, "A/B/G/J")); + SVN_ERR(sbox_wc_mkdir(&b, "A/B/G/J")); + + SVN_ERR(sbox_wc_update(&b, "A/N/P", 1)); + SVN_ERR(sbox_wc_update(&b, "A/N/Q", 1)); + SVN_ERR(sbox_wc_delete(&b, "A/N/P")); + SVN_ERR(sbox_wc_mkdir(&b, "A/N/P")); + SVN_ERR(sbox_wc_move(&b, "A/N/Q", "Q")); + SVN_ERR(sbox_wc_move(&b, "A/B/G/H", "H")); + + /* And something that can't be represented */ + SVN_ERR(sbox_wc_update(&b, "A/B/C/E", 1)); + SVN_ERR(sbox_wc_move(&b, "A/B/C/E", "E")); + + { + nodes_row_t nodes[] = { + {0, "", "normal", 5, "", NOT_MOVED, "k"}, + {0, "A", "normal", 4, "A"}, + {0, "A/B", "normal", 3, "A/B"}, + {0, "A/B/C", "normal", 2, "A/B/C"}, + {0, "A/B/C/D", "normal", 2, "A/B/C/D"}, + {0, "A/B/C/E", "normal", 1, "A/B/C/E"}, + {0, "A/B/C/F", "normal", 2, "A/B/C/F"}, + {0, "A/B/G", "normal", 3, "A/B/G"}, + {0, "A/B/G/H", "normal", 3, "A/B/G/H"}, + {0, "A/B/G/I", "normal", 3, "A/B/G/I"}, + {0, "A/B/G/J", "normal", 3, "A/B/G/J"}, + {0, "A/B/K", "normal", 1, "A/B/K"}, + {0, "A/B/K/L", "normal", 1, "A/B/K/L"}, + {0, "A/B/K/M", "normal", 1, "A/B/K/M"}, + {0, "A/N", "normal", 4, "A/N"}, + {0, "A/N/O", "normal", 3, "A/N/O"}, + {0, "A/N/P", "normal", 1, "A/N/P"}, + {0, "A/N/Q", "normal", 1, "A/N/Q"}, + {0, "A/R", "normal", 4, "A/R"}, + {0, "A/R/S", "normal", 4, "A/R/S"}, + {0, "A/R/S/T", "normal", 4, "A/R/S/T"}, + {1, "E", "normal", 1, "A/B/C/E", MOVED_HERE}, + {1, "H", "normal", 3, "A/B/G/H", MOVED_HERE}, + {1, "Q", "normal", 1, "A/N/Q", MOVED_HERE}, + {3, "A/N/P", "normal", NO_COPY_FROM}, + {3, "A/N/Q", "base-deleted", NO_COPY_FROM, "Q"}, + {4, "A/B/C/E", "base-deleted", NO_COPY_FROM, "E"}, + {4, "A/B/C/F", "base-deleted", NO_COPY_FROM}, + {4, "A/B/G/H", "base-deleted", NO_COPY_FROM, "H"}, + {4, "A/B/G/J", "normal", NO_COPY_FROM}, + + {0} + }; + + SVN_ERR(check_db_rows(&b, "", nodes)); + } + + SVN_ERR(svn_wc__db_op_make_copy(b.wc_ctx->db, sbox_wc_path(&b, "A"), + NULL, NULL, pool)); + + { + nodes_row_t nodes[] = { + {0, "", "normal", 5, "", NOT_MOVED, "k"}, + {0, "A", "normal", 4, "A"}, + {0, "A/B", "normal", 3, "A/B"}, + {0, "A/B/C", "normal", 2, "A/B/C"}, + {0, "A/B/C/D", "normal", 2, "A/B/C/D"}, + {0, "A/B/C/E", "normal", 1, "A/B/C/E"}, + {0, "A/B/C/F", "normal", 2, "A/B/C/F"}, + {0, "A/B/G", "normal", 3, "A/B/G"}, + {0, "A/B/G/H", "normal", 3, "A/B/G/H"}, + {0, "A/B/G/I", "normal", 3, "A/B/G/I"}, + {0, "A/B/G/J", "normal", 3, "A/B/G/J"}, + {0, "A/B/K", "normal", 1, "A/B/K"}, + {0, "A/B/K/L", "normal", 1, "A/B/K/L"}, + {0, "A/B/K/M", "normal", 1, "A/B/K/M"}, + {0, "A/N", "normal", 4, "A/N"}, + {0, "A/N/O", "normal", 3, "A/N/O"}, + {0, "A/N/P", "normal", 1, "A/N/P"}, + {0, "A/N/Q", "normal", 1, "A/N/Q"}, + {0, "A/R", "normal", 4, "A/R"}, + {0, "A/R/S", "normal", 4, "A/R/S"}, + {0, "A/R/S/T", "normal", 4, "A/R/S/T"}, + {1, "A", "normal", 4, "A"}, + {1, "A/B", "not-present", 3, "A/B"}, + {1, "A/B/C", "base-deleted", NO_COPY_FROM}, + {1, "A/B/C/D", "base-deleted", NO_COPY_FROM}, + {1, "A/B/C/E", "base-deleted", NO_COPY_FROM, "E"}, + {1, "A/B/C/F", "base-deleted", NO_COPY_FROM}, + {1, "A/B/G", "base-deleted", NO_COPY_FROM}, + {1, "A/B/G/H", "base-deleted", NO_COPY_FROM, "H"}, + {1, "A/B/G/I", "base-deleted", NO_COPY_FROM}, + {1, "A/B/G/J", "base-deleted", NO_COPY_FROM}, + {1, "A/B/K", "base-deleted", NO_COPY_FROM}, + {1, "A/B/K/L", "base-deleted", NO_COPY_FROM}, + {1, "A/B/K/M", "base-deleted", NO_COPY_FROM}, + {1, "A/N", "normal", 4, "A/N"}, + {1, "A/N/O", "not-present", 3, "A/N/O"}, + {1, "A/N/P", "not-present", 1, "A/N/P"}, + {1, "A/N/Q", "not-present", 1, "A/N/Q", FALSE, "Q"}, + {1, "A/R", "normal", 4, "A/R"}, + {1, "A/R/S", "normal", 4, "A/R/S"}, + {1, "A/R/S/T", "normal", 4, "A/R/S/T"}, + {1, "E", "normal", 1, "A/B/C/E", MOVED_HERE}, + {1, "H", "normal", 3, "A/B/G/H", MOVED_HERE}, + {1, "Q", "normal", 1, "A/N/Q", MOVED_HERE}, + {2, "A/B", "normal", 3, "A/B"}, + {2, "A/B/C", "not-present", 2, "A/B/C"}, + {2, "A/B/G", "normal", 3, "A/B/G"}, + {2, "A/B/G/H", "normal", 3, "A/B/G/H"}, + {2, "A/B/G/I", "normal", 3, "A/B/G/I"}, + {2, "A/B/G/J", "normal", 3, "A/B/G/J"}, + {2, "A/B/K", "not-present", 1, "A/B/K"}, + {3, "A/B/C", "normal", 2, "A/B/C"}, + {3, "A/B/C/D", "normal", 2, "A/B/C/D"}, + {3, "A/B/C/E", "not-present", 1, "A/B/C/E"}, + {3, "A/B/C/F", "normal", 2, "A/B/C/F"}, + {3, "A/B/K", "normal", 1, "A/B/K"}, + {3, "A/B/K/L", "normal", 1, "A/B/K/L"}, + {3, "A/B/K/M", "normal", 1, "A/B/K/M"}, + {3, "A/N/O", "normal", 3, "A/N/O"}, + {3, "A/N/P", "normal", NO_COPY_FROM}, + {4, "A/B/C/F", "base-deleted", NO_COPY_FROM}, + {4, "A/B/G/H", "base-deleted", NO_COPY_FROM}, + {4, "A/B/G/J", "normal", NO_COPY_FROM}, + + {0} + }; + + SVN_ERR(check_db_rows(&b, "", nodes)); + } + + SVN_ERR(verify_db(&b)); + + return SVN_NO_ERROR; +} + +static svn_error_t * +make_copy_and_delete_mixed(const svn_test_opts_t *opts, apr_pool_t *pool) +{ + svn_test__sandbox_t b; + + SVN_ERR(svn_test__sandbox_create(&b, "make_copy_and_del_mixed", opts, pool)); + + SVN_ERR(sbox_wc_mkdir(&b, "A")); + SVN_ERR(sbox_wc_mkdir(&b, "A/B")); + SVN_ERR(sbox_wc_mkdir(&b, "A/B/C")); + SVN_ERR(sbox_wc_mkdir(&b, "A/B/C/D")); + SVN_ERR(sbox_wc_mkdir(&b, "A/B/C/E")); + SVN_ERR(sbox_wc_mkdir(&b, "A/B/C/F")); + SVN_ERR(sbox_wc_mkdir(&b, "A/B/G")); + SVN_ERR(sbox_wc_mkdir(&b, "A/B/G/H")); + SVN_ERR(sbox_wc_mkdir(&b, "A/B/G/I")); + SVN_ERR(sbox_wc_mkdir(&b, "A/B/G/J")); + SVN_ERR(sbox_wc_mkdir(&b, "A/B/K")); + SVN_ERR(sbox_wc_mkdir(&b, "A/B/K/L")); + SVN_ERR(sbox_wc_mkdir(&b, "A/B/K/M")); + SVN_ERR(sbox_wc_mkdir(&b, "A/N")); + SVN_ERR(sbox_wc_mkdir(&b, "A/N/O")); + SVN_ERR(sbox_wc_mkdir(&b, "A/N/P")); + SVN_ERR(sbox_wc_mkdir(&b, "A/N/Q")); + SVN_ERR(sbox_wc_mkdir(&b, "A/R")); + SVN_ERR(sbox_wc_mkdir(&b, "A/R/S")); + SVN_ERR(sbox_wc_mkdir(&b, "A/R/S/T")); + SVN_ERR(sbox_wc_commit(&b, "")); + SVN_ERR(sbox_wc_update(&b, "", 1)); + SVN_ERR(sbox_wc_propset(&b, "k", "r2", "")); + SVN_ERR(sbox_wc_commit(&b, "")); + SVN_ERR(sbox_wc_propset(&b, "k", "r3", "")); + SVN_ERR(sbox_wc_commit(&b, "")); + SVN_ERR(sbox_wc_propset(&b, "k", "r4", "")); + SVN_ERR(sbox_wc_commit(&b, "")); + SVN_ERR(sbox_wc_propset(&b, "k", "r5", "")); + SVN_ERR(sbox_wc_commit(&b, "")); + + SVN_ERR(sbox_wc_update(&b, "", 5)); + SVN_ERR(sbox_wc_update(&b, "A", 4)); + SVN_ERR(sbox_wc_update(&b, "A/B", 3)); + SVN_ERR(sbox_wc_update(&b, "A/B/C", 2)); + SVN_ERR(sbox_wc_update(&b, "A/B/K", 1)); + SVN_ERR(sbox_wc_update(&b, "A/N/O", 3)); + + SVN_ERR(sbox_wc_delete(&b, "A/B/C/F")); + SVN_ERR(sbox_wc_delete(&b, "A/B/G/J")); + SVN_ERR(sbox_wc_mkdir(&b, "A/B/G/J")); + + SVN_ERR(sbox_wc_update(&b, "A/N/P", 1)); + SVN_ERR(sbox_wc_update(&b, "A/N/Q", 1)); + SVN_ERR(sbox_wc_delete(&b, "A/N/P")); + SVN_ERR(sbox_wc_mkdir(&b, "A/N/P")); + SVN_ERR(sbox_wc_move(&b, "A/N/Q", "Q")); + SVN_ERR(sbox_wc_move(&b, "A/B/G/H", "H")); + + /* And something that can't be represented */ + SVN_ERR(sbox_wc_update(&b, "A/B/C/E", 1)); + SVN_ERR(sbox_wc_move(&b, "A/B/C/E", "E")); + + { + nodes_row_t nodes[] = { + {0, "", "normal", 5, "", NOT_MOVED, "k"}, + {0, "A", "normal", 4, "A"}, + {0, "A/B", "normal", 3, "A/B"}, + {0, "A/B/C", "normal", 2, "A/B/C"}, + {0, "A/B/C/D", "normal", 2, "A/B/C/D"}, + {0, "A/B/C/E", "normal", 1, "A/B/C/E"}, + {0, "A/B/C/F", "normal", 2, "A/B/C/F"}, + {0, "A/B/G", "normal", 3, "A/B/G"}, + {0, "A/B/G/H", "normal", 3, "A/B/G/H"}, + {0, "A/B/G/I", "normal", 3, "A/B/G/I"}, + {0, "A/B/G/J", "normal", 3, "A/B/G/J"}, + {0, "A/B/K", "normal", 1, "A/B/K"}, + {0, "A/B/K/L", "normal", 1, "A/B/K/L"}, + {0, "A/B/K/M", "normal", 1, "A/B/K/M"}, + {0, "A/N", "normal", 4, "A/N"}, + {0, "A/N/O", "normal", 3, "A/N/O"}, + {0, "A/N/P", "normal", 1, "A/N/P"}, + {0, "A/N/Q", "normal", 1, "A/N/Q"}, + {0, "A/R", "normal", 4, "A/R"}, + {0, "A/R/S", "normal", 4, "A/R/S"}, + {0, "A/R/S/T", "normal", 4, "A/R/S/T"}, + {1, "E", "normal", 1, "A/B/C/E", MOVED_HERE}, + {1, "H", "normal", 3, "A/B/G/H", MOVED_HERE}, + {1, "Q", "normal", 1, "A/N/Q", MOVED_HERE}, + {3, "A/N/P", "normal", NO_COPY_FROM}, + {3, "A/N/Q", "base-deleted", NO_COPY_FROM, "Q"}, + {4, "A/B/C/E", "base-deleted", NO_COPY_FROM, "E"}, + {4, "A/B/C/F", "base-deleted", NO_COPY_FROM}, + {4, "A/B/G/H", "base-deleted", NO_COPY_FROM, "H"}, + {4, "A/B/G/J", "normal", NO_COPY_FROM}, + + {0} + }; + + SVN_ERR(check_db_rows(&b, "", nodes)); + } + + SVN_ERR(svn_wc__db_base_remove(b.wc_ctx->db, sbox_wc_path(&b, "A"), + TRUE, TRUE, FALSE, 99, + NULL, NULL, pool)); + + { + nodes_row_t nodes[] = { + {0, "", "normal", 5, "", NOT_MOVED, "k"}, + {0, "A", "not-present", 99, "A"}, + {1, "A", "normal", 4, "A"}, + {1, "A/B", "not-present", 3, "A/B"}, + {1, "A/N", "normal", 4, "A/N"}, + {1, "A/N/O", "not-present", 3, "A/N/O"}, + {1, "A/N/P", "not-present", 1, "A/N/P"}, + {1, "A/N/Q", "not-present", 1, "A/N/Q", FALSE}, + {1, "A/R", "normal", 4, "A/R"}, + {1, "A/R/S", "normal", 4, "A/R/S"}, + {1, "A/R/S/T", "normal", 4, "A/R/S/T"}, + {1, "E", "normal", 1, "A/B/C/E"}, + {1, "H", "normal", 3, "A/B/G/H", MOVED_HERE}, + {1, "Q", "normal", 1, "A/N/Q"}, + {2, "A/B", "normal", 3, "A/B"}, + {2, "A/B/C", "not-present", 2, "A/B/C"}, + {2, "A/B/G", "normal", 3, "A/B/G"}, + {2, "A/B/G/H", "normal", 3, "A/B/G/H"}, + {2, "A/B/G/I", "normal", 3, "A/B/G/I"}, + {2, "A/B/G/J", "normal", 3, "A/B/G/J"}, + {3, "A/B/C", "normal", 2, "A/B/C"}, + {3, "A/B/C/D", "normal", 2, "A/B/C/D"}, + {3, "A/B/C/E", "not-present", 1, "A/B/C/E"}, + {3, "A/B/C/F", "normal", 2, "A/B/C/F"}, + {2, "A/B/K", "not-present", 1, "A/B/K"}, + {3, "A/B/K", "normal", 1, "A/B/K"}, + {3, "A/B/K/L", "normal", 1, "A/B/K/L"}, + {3, "A/B/K/M", "normal", 1, "A/B/K/M"}, + {3, "A/N/O", "normal", 3, "A/N/O"}, + {3, "A/N/P", "normal", NO_COPY_FROM}, + {4, "A/B/C/F", "base-deleted", NO_COPY_FROM}, + {4, "A/B/G/H", "base-deleted", NO_COPY_FROM, "H"}, + {4, "A/B/G/J", "normal", NO_COPY_FROM}, + + {0} + }; + + /* This currently fails because Q and E are still marked as moved, + while there is nothing to be moved. */ + SVN_ERR(check_db_rows(&b, "", nodes)); + } + + SVN_ERR(verify_db(&b)); + + return SVN_NO_ERROR; +} + +static svn_error_t * +test_global_commit(const svn_test_opts_t *opts, apr_pool_t *pool) +{ + svn_test__sandbox_t b; + + SVN_ERR(svn_test__sandbox_create(&b, "global_commit", opts, pool)); + + { + nodes_row_t before[] = { + { 0, "", "normal", 2, "" }, + { 0, "A", "normal", 2, "A" }, + { 0, "A/B", "normal", 2, "A/B" }, + { 0, "A/B/C", "normal", 2, "A/B/C" }, + { 0, "A/B/D", "normal", 2, "A/B/D" }, + { 0, "A/B/D/E", "normal", 2, "A/B/D/E" }, + { 0, "A/F", "normal", 2, "A/F" }, + { 0, "A/F/G", "normal", 2, "A/F/G" }, + { 0, "A/F/H", "normal", 2, "A/F/H" }, + { 0, "A/F/E", "normal", 2, "A/F/E" }, + { 0, "A/X", "normal", 2, "A/X" }, + { 0, "A/X/Y", "incomplete", 2, "A/X/Y" }, + { 1, "C", "normal", 2, "A/B/C", MOVED_HERE}, + { 1, "E", "normal", 2, "A/B/D/E", MOVED_HERE}, + { 2, "A/B", "normal", 3, "some", MOVED_HERE }, + { 2, "A/B/C", "base-deleted", NO_COPY_FROM, "C" }, + { 2, "A/B/D", "normal", 3, "some/D", MOVED_HERE}, + { 2, "A/B/D/E", "not-present", 3, "some/D/E", FALSE, "E", TRUE}, + { 3, "A/B/C", "normal", NO_COPY_FROM}, + { 2, "A/F", "normal", 1, "S2" }, + { 2, "A/F/G", "normal", 1, "S2/G" }, + { 2, "A/F/H", "not-present", 1, "S2/H" }, + { 2, "A/F/E", "base-deleted", NO_COPY_FROM }, + { 1, "some", "normal", 3, "some", FALSE, "A/B"}, + { 0 } + }; + SVN_ERR(insert_dirs(&b, before)); + SVN_ERR(check_db_rows(&b, "", before)); /* Check move insertion logic */ + SVN_ERR(verify_db(&b)); + } + + /* This should break the moves */ + SVN_ERR(svn_wc__db_global_commit(b.wc_ctx->db, + sbox_wc_path(&b, "A/B"), + 5, 5, 700, "me", NULL, NULL, + FALSE, FALSE, NULL, pool)); + { + nodes_row_t after[] = { + { 0, "", "normal", 2, "" }, + { 0, "A", "normal", 2, "A" }, + { 0, "A/B", "normal", 5, "A/B" }, + { 0, "A/B/D", "normal", 5, "A/B/D"}, + { 0, "A/B/D/E", "not-present", 5, "A/B/D/E"}, + { 0, "A/F", "normal", 2, "A/F" }, + { 0, "A/F/G", "normal", 2, "A/F/G" }, + { 0, "A/F/H", "normal", 2, "A/F/H" }, + { 0, "A/F/E", "normal", 2, "A/F/E" }, + { 0, "A/X", "normal", 2, "A/X" }, + { 0, "A/X/Y", "incomplete", 2, "A/X/Y" }, + { 1, "C", "normal", 2, "A/B/C"}, + { 1, "E", "normal", 2, "A/B/D/E"}, + { 1, "some", "normal", 3, "some"}, + { 3, "A/B/C", "normal", NO_COPY_FROM}, + { 2, "A/F", "normal", 1, "S2" }, + { 2, "A/F/G", "normal", 1, "S2/G" }, + { 2, "A/F/H", "not-present", 1, "S2/H" }, + { 2, "A/F/E", "base-deleted", NO_COPY_FROM }, + { 0 } + }; + + SVN_ERR(check_db_rows(&b, "", after)); + SVN_ERR(verify_db(&b)); + } + + SVN_ERR(svn_wc__db_global_commit(b.wc_ctx->db, + sbox_wc_path(&b, "A/F"), + 6, 6, 800, "me", NULL, NULL, + FALSE, FALSE, NULL, pool)); + + { + nodes_row_t after[] = { + { 0, "", "normal", 2, "" }, + { 0, "A", "normal", 2, "A" }, + { 0, "A/B", "normal", 5, "A/B" }, + { 0, "A/B/D", "normal", 5, "A/B/D"}, + { 0, "A/B/D/E", "not-present", 5, "A/B/D/E"}, + { 0, "A/F", "normal", 6, "A/F" }, + { 0, "A/F/G", "normal", 6, "A/F/G" }, + { 0, "A/F/H", "not-present", 6, "A/F/H" }, + { 0, "A/X", "normal", 2, "A/X" }, + { 0, "A/X/Y", "incomplete", 2, "A/X/Y" }, + { 1, "C", "normal", 2, "A/B/C"}, + { 1, "E", "normal", 2, "A/B/D/E"}, + { 1, "some", "normal", 3, "some"}, + { 3, "A/B/C", "normal", NO_COPY_FROM }, + { 0 } + }; + + SVN_ERR(check_db_rows(&b, "", after)); + SVN_ERR(verify_db(&b)); + } + + SVN_ERR(svn_wc__db_global_commit(b.wc_ctx->db, + sbox_wc_path(&b, "A/B/C"), + 7, 7, 900, "me", NULL, NULL, + FALSE, FALSE, NULL, pool)); + + { + nodes_row_t after[] = { + { 0, "", "normal", 2, "" }, + { 0, "A", "normal", 2, "A" }, + { 0, "A/B", "normal", 5, "A/B" }, + { 0, "A/B/C", "normal", 7, "A/B/C"}, + { 0, "A/B/D", "normal", 5, "A/B/D"}, + { 0, "A/B/D/E", "not-present", 5, "A/B/D/E"}, + { 0, "A/F", "normal", 6, "A/F" }, + { 0, "A/F/G", "normal", 6, "A/F/G" }, + { 0, "A/F/H", "not-present", 6, "A/F/H" }, + { 0, "A/X", "normal", 2, "A/X" }, + { 0, "A/X/Y", "incomplete", 2, "A/X/Y" }, + { 1, "some", "normal", 3, "some"}, + { 1, "E", "normal", 2, "A/B/D/E"}, + { 1, "C", "normal", 2, "A/B/C"}, + { 0 } + }; + + SVN_ERR(check_db_rows(&b, "", after)); + SVN_ERR(verify_db(&b)); + } + + return SVN_NO_ERROR; +} + +static svn_error_t * +test_global_commit_switched(const svn_test_opts_t *opts, apr_pool_t *pool) +{ + svn_test__sandbox_t b; + + SVN_ERR(svn_test__sandbox_create(&b, "global_commit_switched", opts, pool)); + { + nodes_row_t before[] = { + { 0, "", "normal", 2, "" }, + { 0, "A", "normal", 2, "A" }, + /* A/B is switched... The libsvn_client layer tries to prevent this, + because it has such an unexpected behavior. */ + { 0, "A/B", "normal", 2, "N/B" }, + { 0, "A/B/C", "normal", 2, "N/B/C" }, + { 0, "A/B/C/D", "normal", 2, "N/B/C/D" }, + { 0, "A/B/C/E", "normal", 2, "N/B/C/E" }, + { 2, "A/B", "normal", 3, "Z/B" }, + { 2, "A/B/C", "normal", 3, "Z/B/C" }, + { 2, "A/B/C/D", "normal", 3, "Z/B/C/D" }, + { 2, "A/B/C/E", "base-deleted", NO_COPY_FROM }, + /* not-present nodes have an 'uninteresting path', + which doesn't have to be as implied by ancestor at same depth */ + { 2, "A/B/C/F", "not-present", 3, "ZZ-Z-Z_ZZ_Z_Z" }, + { 2, "A/B/C/G", "normal", 3, "Z/B/C/G" }, + { 2, "A/B/C/G/H", "normal", 3, "Z/B/C/G/H" }, + + { 3, "A/B/C", "normal", 4, "Q/C" }, + { 3, "A/B/C/D", "base-deleted", NO_COPY_FROM }, + { 3, "A/B/C/G", "normal", 4, "Q/C/G" }, + { 3, "A/B/C/G/H", "base-deleted", NO_COPY_FROM }, + + { 4, "A/B/C/F", "normal", NO_COPY_FROM }, + { 5, "A/B/C/G/H", "normal", NO_COPY_FROM }, + { 0 } + }; + SVN_ERR(insert_dirs(&b, before)); + SVN_ERR(verify_db(&b)); + } + + SVN_ERR(svn_wc__db_global_commit(b.wc_ctx->db, + sbox_wc_path(&b, "A/B"), + 7, 7, 12, "me", NULL, NULL, + FALSE, FALSE, NULL, pool)); + + { + nodes_row_t after[] = { + { 0, "", "normal", 2, "" }, + { 0, "A", "normal", 2, "A" }, + /* The commit is applied as A/B, because the path is calculated from A, + and not the shadowed node at A/B. (Fixed in r1663991) */ + { 0, "A/B", "normal", 7, "A/B" }, + { 0, "A/B/C", "normal", 7, "A/B/C" }, + { 0, "A/B/C/D", "normal", 7, "A/B/C/D" }, + /* Even calculated path of not-present is fixed */ + { 0, "A/B/C/F", "not-present", 7, "A/B/C/F" }, + { 0, "A/B/C/G", "normal", 7, "A/B/C/G" }, + { 0, "A/B/C/G/H", "normal", 7, "A/B/C/G/H" }, + + /* The higher layers are unaffected */ + { 3, "A/B/C", "normal", 4, "Q/C" }, + { 3, "A/B/C/D", "base-deleted", NO_COPY_FROM }, + { 3, "A/B/C/G", "normal", 4, "Q/C/G" }, + { 3, "A/B/C/G/H", "base-deleted", NO_COPY_FROM }, + + { 4, "A/B/C/F", "normal", NO_COPY_FROM }, + { 5, "A/B/C/G/H", "normal", NO_COPY_FROM }, + { 0 } + }; + SVN_ERR(verify_db(&b)); + SVN_ERR(check_db_rows(&b, "", after)); + } + + SVN_ERR(svn_wc__db_global_commit(b.wc_ctx->db, + sbox_wc_path(&b, "A/B/C"), + 8, 8, 12, "me", NULL, NULL, + FALSE, FALSE, NULL, pool)); + + { + nodes_row_t after[] = { + { 0, "", "normal", 2, "" }, + { 0, "A", "normal", 2, "A" }, + { 0, "A/B", "normal", 7, "A/B" }, + /* Base deleted and not-present are now gone */ + { 0, "A/B/C", "normal", 8, "A/B/C" }, + { 0, "A/B/C/G", "normal", 8, "A/B/C/G" }, + + { 4, "A/B/C/F", "normal", NO_COPY_FROM }, + { 5, "A/B/C/G/H", "normal", NO_COPY_FROM }, + { 0 } + }; + SVN_ERR(verify_db(&b)); + SVN_ERR(check_db_rows(&b, "", after)); + } + + return SVN_NO_ERROR; +} + /* ---------------------------------------------------------------------- */ /* The list of test functions */ -struct svn_test_descriptor_t test_funcs[] = +static int max_threads = 4; + +static struct svn_test_descriptor_t test_funcs[] = { SVN_TEST_NULL, SVN_TEST_OPTS_PASS(test_wc_wc_copies, @@ -9047,8 +11898,8 @@ struct svn_test_descriptor_t test_funcs[] = "test_adds_change_kind"), SVN_TEST_OPTS_PASS(test_base_dir_insert_remove, "test_base_dir_insert_remove"), - SVN_TEST_OPTS_PASS(test_temp_op_make_copy, - "test_temp_op_make_copy"), + SVN_TEST_OPTS_PASS(test_db_make_copy, + "test_db_make_copy"), SVN_TEST_OPTS_PASS(test_wc_move, "test_wc_move"), SVN_TEST_OPTS_PASS(test_mixed_rev_copy, @@ -9168,12 +12019,16 @@ struct svn_test_descriptor_t test_funcs[] = "move_parent_into_child (issue 4333)"), SVN_TEST_OPTS_PASS(move_depth_expand, "move depth expansion"), - SVN_TEST_OPTS_PASS(move_retract, + SVN_TEST_OPTS_XFAIL(move_retract, "move retract (issue 4336)"), SVN_TEST_OPTS_PASS(move_delete_file_externals, "move/delete file externals (issue 4293)"), SVN_TEST_OPTS_PASS(update_with_tree_conflict, "update with tree conflict (issue 4347)"), + SVN_TEST_OPTS_PASS(move_update_parent_replace, + "move update with replaced parent (issue 4388)"), + SVN_TEST_OPTS_PASS(copy_mixed_rev_mods, + "copy mixed-rev with mods"), SVN_TEST_OPTS_PASS(move_child_to_parent_revert, "move child to parent and revert (issue 4436)"), SVN_TEST_OPTS_PASS(move_delete_intermediate, @@ -9184,13 +12039,55 @@ struct svn_test_descriptor_t test_funcs[] = "move replace ancestor with child"), SVN_TEST_OPTS_PASS(move_twice_within_delete, "move twice and then delete"), - SVN_TEST_OPTS_PASS(repo_wc_copy, - "repo_wc_copy"), - SVN_TEST_OPTS_PASS(copy_mixed_rev_mods, - "copy mixed-rev with mods"), + SVN_TEST_OPTS_PASS(del4_update_edit_AAA, + "del4: edit AAA"), + SVN_TEST_OPTS_XFAIL(del4_update_delete_AAA, + "del4: delete AAA"), + SVN_TEST_OPTS_XFAIL(del4_update_add_AAA, + "del4: add AAA"), + SVN_TEST_OPTS_XFAIL(del4_update_replace_AAA, + "del4: replace AAA"), + SVN_TEST_OPTS_PASS(del4_update_delself_AAA, + "del4: delete self AAA"), + SVN_TEST_OPTS_XFAIL(del4_update_replaceself_AAA, + "del4: replace self AAA"), + SVN_TEST_OPTS_PASS(move4_update_edit_AAA, + "move4: edit AAA"), + SVN_TEST_OPTS_XFAIL(move4_update_delete_AAA, + "move4: delete AAA"), + SVN_TEST_OPTS_XFAIL(move4_update_add_AAA, + "move4: add AAA"), + SVN_TEST_OPTS_XFAIL(move4_update_replace_AAA, + "move4: replace AAA"), + SVN_TEST_OPTS_PASS(move4_update_delself_AAA, + "move4: delete self AAA"), + SVN_TEST_OPTS_XFAIL(move4_update_replaceself_AAA, + "move4: replace self AAA"), + SVN_TEST_OPTS_PASS(simple_move_bump, + "simple move bump"), SVN_TEST_OPTS_PASS(movedhere_extract_retract, "movedhere extract retract"), + SVN_TEST_OPTS_PASS(repo_wc_copy, + "repo_wc_copy"), + SVN_TEST_OPTS_PASS(break_move_in_delete, + "break move in delete (issue 4491)"), SVN_TEST_OPTS_PASS(nested_move_delete, "nested move delete"), + SVN_TEST_OPTS_XFAIL(move_within_mixed_move, + "move within mixed move"), + SVN_TEST_OPTS_PASS(move_edit_obstruction, + "move edit obstruction"), + SVN_TEST_OPTS_PASS(move_deep_bump, + "move deep bump"), + SVN_TEST_OPTS_PASS(make_copy_mixed, + "make a copy of a mixed revision tree"), + SVN_TEST_OPTS_PASS(make_copy_and_delete_mixed, + "make a copy of a mixed revision tree and del"), + SVN_TEST_OPTS_PASS(test_global_commit, + "test global commit"), + SVN_TEST_OPTS_PASS(test_global_commit_switched, + "test global commit switched"), SVN_TEST_NULL }; + +SVN_TEST_MAIN diff --git a/subversion/tests/libsvn_wc/pristine-store-test.c b/subversion/tests/libsvn_wc/pristine-store-test.c index 6df96fc..d9ed077 100644 --- a/subversion/tests/libsvn_wc/pristine-store-test.c +++ b/subversion/tests/libsvn_wc/pristine-store-test.c @@ -70,40 +70,6 @@ create_repos_and_wc(const char **wc_abspath, return SVN_NO_ERROR; } - -/* Write the string DATA into a new unique-named file in the directory - * DIR_ABSPATH. Set *FILE_ABSPATH to its absolute path and *CHECKSUM_SHA1 - * and *CHECKSUM_MD5 to its SHA-1 and MD-5 checksums. - * - * CHECKSUM_SHA1 and/or CHECKSUM_MD5 may be null if not required. */ -static svn_error_t * -write_and_checksum_temp_file(const char **file_abspath, - svn_checksum_t **sha1_checksum, - svn_checksum_t **md5_checksum, - const char *data, - const char *dir_abspath, - apr_pool_t *pool) -{ - apr_file_t *file; - - SVN_ERR(svn_io_open_unique_file3(&file, file_abspath, - dir_abspath, svn_io_file_del_none, - pool, pool)); - - SVN_ERR(svn_io_file_write_full(file, data, strlen(data), NULL, pool)); - SVN_ERR(svn_io_file_close(file, pool)); - - if (sha1_checksum) - SVN_ERR(svn_io_file_checksum2(sha1_checksum, *file_abspath, - svn_checksum_sha1, pool)); - if (md5_checksum) - SVN_ERR(svn_io_file_checksum2(md5_checksum, *file_abspath, - svn_checksum_md5, pool)); - - return SVN_NO_ERROR; -} - - /* Exercise the pristine text API with a simple write and read. */ static svn_error_t * pristine_write_read(const svn_test_opts_t *opts, @@ -112,7 +78,9 @@ pristine_write_read(const svn_test_opts_t *opts, svn_wc__db_t *db; const char *wc_abspath; - const char *pristine_tmp_abspath; + svn_wc__db_install_data_t *install_data; + svn_stream_t *pristine_stream; + apr_size_t sz; const char data[] = "Blah"; svn_string_t *data_string = svn_string_create(data, pool); @@ -123,15 +91,15 @@ pristine_write_read(const svn_test_opts_t *opts, /* Write DATA into a new temporary pristine file, set PRISTINE_TMP_ABSPATH * to its path and set DATA_SHA1 and DATA_MD5 to its checksums. */ - { - const char *pristine_tmp_dir; + SVN_ERR(svn_wc__db_pristine_prepare_install(&pristine_stream, + &install_data, + &data_sha1, &data_md5, + db, wc_abspath, + pool, pool)); - SVN_ERR(svn_wc__db_pristine_get_tempdir(&pristine_tmp_dir, db, - wc_abspath, pool, pool)); - SVN_ERR(write_and_checksum_temp_file(&pristine_tmp_abspath, - &data_sha1, &data_md5, - data, pristine_tmp_dir, pool)); - } + sz = strlen(data); + SVN_ERR(svn_stream_write(pristine_stream, data, &sz)); + SVN_ERR(svn_stream_close(pristine_stream)); /* Ensure it's not already in the store. */ { @@ -143,7 +111,7 @@ pristine_write_read(const svn_test_opts_t *opts, } /* Install the new pristine file, referenced by its checksum. */ - SVN_ERR(svn_wc__db_pristine_install(db, pristine_tmp_abspath, + SVN_ERR(svn_wc__db_pristine_install(install_data, data_sha1, data_md5, pool)); /* Ensure it is now found in the store. */ @@ -209,8 +177,10 @@ pristine_delete_while_open(const svn_test_opts_t *opts, { svn_wc__db_t *db; const char *wc_abspath; - const char *pristine_tmp_dir; + svn_wc__db_install_data_t *install_data; + svn_stream_t *pristine_stream; svn_stream_t *contents; + apr_size_t sz; const char data[] = "Blah"; svn_checksum_t *data_sha1, *data_md5; @@ -218,17 +188,17 @@ pristine_delete_while_open(const svn_test_opts_t *opts, SVN_ERR(create_repos_and_wc(&wc_abspath, &db, "pristine_delete_while_open", opts, pool)); - SVN_ERR(svn_wc__db_pristine_get_tempdir(&pristine_tmp_dir, db, - wc_abspath, pool, pool)); + SVN_ERR(svn_wc__db_pristine_prepare_install(&pristine_stream, + &install_data, + &data_sha1, &data_md5, + db, wc_abspath, + pool, pool)); - /* Install a pristine text. */ - { - const char *path; - - SVN_ERR(write_and_checksum_temp_file(&path, &data_sha1, &data_md5, - data, pristine_tmp_dir, pool)); - SVN_ERR(svn_wc__db_pristine_install(db, path, data_sha1, data_md5, pool)); - } + sz = strlen(data); + SVN_ERR(svn_stream_write(pristine_stream, data, &sz)); + SVN_ERR(svn_stream_close(pristine_stream)); + SVN_ERR(svn_wc__db_pristine_install(install_data, + data_sha1, data_md5, pool)); /* Open it for reading */ SVN_ERR(svn_wc__db_pristine_read(&contents, NULL, db, wc_abspath, data_sha1, @@ -242,7 +212,7 @@ pristine_delete_while_open(const svn_test_opts_t *opts, char buffer[4]; apr_size_t len = 4; - SVN_ERR(svn_stream_read(contents, buffer, &len)); + SVN_ERR(svn_stream_read_full(contents, buffer, &len)); SVN_TEST_ASSERT(len == 4); SVN_TEST_ASSERT(memcmp(buffer, data, len) == 0); } @@ -276,7 +246,6 @@ reject_mismatching_text(const svn_test_opts_t *opts, #ifdef SVN_DEBUG /* The pristine store only checks this in debug mode. */ svn_wc__db_t *db; const char *wc_abspath; - const char *pristine_tmp_dir; const char data[] = "Blah"; svn_checksum_t *data_sha1, *data_md5; @@ -286,28 +255,47 @@ reject_mismatching_text(const svn_test_opts_t *opts, SVN_ERR(create_repos_and_wc(&wc_abspath, &db, "reject_mismatching_text", opts, pool)); - SVN_ERR(svn_wc__db_pristine_get_tempdir(&pristine_tmp_dir, db, - wc_abspath, pool, pool)); - /* Install a pristine text. */ { - const char *path; - - SVN_ERR(write_and_checksum_temp_file(&path, &data_sha1, &data_md5, - data, pristine_tmp_dir, pool)); - SVN_ERR(svn_wc__db_pristine_install(db, path, data_sha1, data_md5, pool)); + svn_wc__db_install_data_t *install_data; + svn_stream_t *pristine_stream; + apr_size_t sz; + + SVN_ERR(svn_wc__db_pristine_prepare_install(&pristine_stream, + &install_data, + &data_sha1, &data_md5, + db, wc_abspath, + pool, pool)); + + sz = strlen(data); + SVN_ERR(svn_stream_write(pristine_stream, data, &sz)); + SVN_ERR(svn_stream_close(pristine_stream)); + + SVN_ERR(svn_wc__db_pristine_install(install_data, + data_sha1, data_md5, + pool)); } /* Try to install the wrong pristine text against the same checksum. * Should fail. */ { - svn_error_t *err; - const char *path; - - SVN_ERR(write_and_checksum_temp_file(&path, NULL, NULL, - data2, pristine_tmp_dir, pool)); - err = svn_wc__db_pristine_install(db, path, data_sha1, data_md5, pool); - SVN_TEST_ASSERT_ERROR(err, SVN_ERR_WC_CORRUPT_TEXT_BASE); + svn_wc__db_install_data_t *install_data; + svn_stream_t *pristine_stream; + apr_size_t sz; + + SVN_ERR(svn_wc__db_pristine_prepare_install(&pristine_stream, + &install_data, + &data_sha1, &data_md5, + db, wc_abspath, + pool, pool)); + + sz = strlen(data2); + SVN_ERR(svn_stream_write(pristine_stream, data2, &sz)); + SVN_ERR(svn_stream_close(pristine_stream)); + + SVN_ERR(svn_wc__db_pristine_install(install_data, + data_sha1, data_md5, + pool)); } return SVN_NO_ERROR; @@ -319,7 +307,9 @@ reject_mismatching_text(const svn_test_opts_t *opts, } -struct svn_test_descriptor_t test_funcs[] = +static int max_threads = -1; + +static struct svn_test_descriptor_t test_funcs[] = { SVN_TEST_NULL, SVN_TEST_OPTS_PASS(pristine_write_read, @@ -330,3 +320,5 @@ struct svn_test_descriptor_t test_funcs[] = "reject_mismatching_text"), SVN_TEST_NULL }; + +SVN_TEST_MAIN diff --git a/subversion/tests/libsvn_wc/utils.c b/subversion/tests/libsvn_wc/utils.c index 1682b98..bebfc8a 100644 --- a/subversion/tests/libsvn_wc/utils.c +++ b/subversion/tests/libsvn_wc/utils.c @@ -22,8 +22,8 @@ #include "svn_error.h" #include "svn_client.h" +#include "svn_cmdline.h" #include "svn_pools.h" -#include "private/svn_dep_compat.h" #include "utils.h" @@ -33,10 +33,26 @@ #include "../../libsvn_wc/wc-queries.h" #define SVN_WC__I_AM_WC_DB #include "../../libsvn_wc/wc_db_private.h" +#include "../../libsvn_wc/token-map.h" +svn_error_t * +svn_test__create_client_ctx(svn_client_ctx_t **ctx, + svn_test__sandbox_t *sbox, + apr_pool_t *result_pool) +{ + SVN_ERR(svn_client_create_context2(ctx, NULL, result_pool)); + + SVN_ERR(svn_test__init_auth_baton(&(*ctx)->auth_baton, + result_pool)); + + if (sbox) + (*ctx)->wc_ctx = sbox->wc_ctx; + return SVN_NO_ERROR; +} /* Create an empty repository and WC for the test TEST_NAME. Set *REPOS_URL - * to the URL of the new repository and *WC_ABSPATH to the root path of the + * to the URL of the new repository, *REPOS_DIR to its local path and + * *WC_ABSPATH to the root path of the * new WC. * * Create the repository and WC in subdirectories called @@ -46,6 +62,7 @@ * Register the repo and WC to be cleaned up when the test suite exits. */ static svn_error_t * create_repos_and_wc(const char **repos_url, + const char **repos_dir, const char **wc_abspath, const char *test_name, const svn_test_opts_t *opts, @@ -66,8 +83,6 @@ create_repos_and_wc(const char **repos_url, /* Create a repos. Register it for clean-up. Set *REPOS_URL to its path. */ { - svn_repos_t *repos; - /* Use a subpool to create the repository and then destroy the subpool so the repository's underlying filesystem is closed. If opts->fs_type is BDB this prevents any attempt to open a second environment handle @@ -75,8 +90,8 @@ create_repos_and_wc(const char **repos_url, only a single environment handle to be open per process. */ apr_pool_t *subpool = svn_pool_create(pool); - SVN_ERR(svn_test__create_repos(&repos, repos_path, opts, subpool)); - SVN_ERR(svn_uri_get_file_url_from_dirent(repos_url, repos_path, pool)); + SVN_ERR(svn_test__create_repos2(NULL, repos_url, repos_dir, repos_path, + opts, pool, subpool)); svn_pool_destroy(subpool); } @@ -86,7 +101,7 @@ create_repos_and_wc(const char **repos_url, svn_client_ctx_t *ctx; svn_opt_revision_t head_rev = { svn_opt_revision_head, {0} }; - SVN_ERR(svn_client_create_context2(&ctx, NULL, subpool)); + SVN_ERR(svn_test__create_client_ctx(&ctx, NULL, subpool)); SVN_ERR(svn_dirent_get_absolute(wc_abspath, wc_path, pool)); SVN_ERR(svn_client_checkout3(NULL, *repos_url, *wc_abspath, &head_rev, &head_rev, svn_depth_infinity, @@ -102,44 +117,151 @@ create_repos_and_wc(const char **repos_url, return SVN_NO_ERROR; } - WC_QUERIES_SQL_DECLARE_STATEMENTS(statements); svn_error_t * svn_test__create_fake_wc(const char *wc_abspath, const char *extra_statements, - apr_pool_t *result_pool, + const svn_test__nodes_data_t nodes[], + const svn_test__actual_data_t actuals[], + apr_pool_t *scratch_pool) { const char *dotsvn_abspath = svn_dirent_join(wc_abspath, ".svn", scratch_pool); - const char *db_abspath = svn_dirent_join(dotsvn_abspath, "wc.db", - scratch_pool); svn_sqlite__db_t *sdb; const char **my_statements; int i; + svn_sqlite__stmt_t *stmt; + const apr_int64_t wc_id = 1; /* Allocate MY_STATEMENTS in RESULT_POOL because the SDB will continue to * refer to it over its lifetime. */ - my_statements = apr_palloc(result_pool, 6 * sizeof(const char *)); + my_statements = apr_palloc(scratch_pool, 7 * sizeof(const char *)); my_statements[0] = statements[STMT_CREATE_SCHEMA]; my_statements[1] = statements[STMT_CREATE_NODES]; my_statements[2] = statements[STMT_CREATE_NODES_TRIGGERS]; my_statements[3] = statements[STMT_CREATE_EXTERNALS]; - my_statements[4] = extra_statements; - my_statements[5] = NULL; + my_statements[4] = statements[STMT_INSTALL_SCHEMA_STATISTICS]; + my_statements[5] = extra_statements; + my_statements[6] = NULL; /* Create fake-wc/SUBDIR/.svn/ for placing the metadata. */ SVN_ERR(svn_io_make_dir_recursively(dotsvn_abspath, scratch_pool)); - - svn_error_clear(svn_io_remove_file2(db_abspath, FALSE, scratch_pool)); SVN_ERR(svn_wc__db_util_open_db(&sdb, wc_abspath, "wc.db", svn_sqlite__mode_rwcreate, - FALSE /* exclusive */, my_statements, - result_pool, scratch_pool)); + FALSE /* exclusive */, 0 /* timeout */, + my_statements, + scratch_pool, scratch_pool)); for (i = 0; my_statements[i] != NULL; i++) SVN_ERR(svn_sqlite__exec_statements(sdb, /* my_statements[] */ i)); + SVN_ERR(svn_sqlite__close(sdb)); + + if (!nodes && !actuals) + return SVN_NO_ERROR; + + /* Re-open with normal set of statements */ + SVN_ERR(svn_wc__db_util_open_db(&sdb, wc_abspath, "wc.db", + svn_sqlite__mode_readwrite, + FALSE /* exclusive */, 0 /* timeout */, + statements, + scratch_pool, scratch_pool)); + + if (nodes) + { + SVN_ERR(svn_sqlite__get_statement(&stmt, sdb, + STMT_INSERT_NODE)); + + for (i = 0; nodes[i].local_relpath; i++) + { + SVN_ERR(svn_sqlite__bindf(stmt, "isdsnnns", + wc_id, + nodes[i].local_relpath, + nodes[i].op_depth, + nodes[i].local_relpath[0] + ? svn_relpath_dirname(nodes[i].local_relpath, + scratch_pool) + : NULL, + nodes[i].presence)); + + if (nodes[i].repos_relpath) + { + SVN_ERR(svn_sqlite__bind_int64(stmt, 5, nodes[i].repos_id)); + SVN_ERR(svn_sqlite__bind_text(stmt, 6, nodes[i].repos_relpath)); + SVN_ERR(svn_sqlite__bind_revnum(stmt, 7, nodes[i].revision)); + } + + if (nodes[i].depth) + SVN_ERR(svn_sqlite__bind_text(stmt, 9, nodes[i].depth)); + + if (nodes[i].kind != 0) + SVN_ERR(svn_sqlite__bind_token(stmt, 10, kind_map, nodes[i].kind)); + + if (nodes[i].last_author || nodes[i].last_date) + { + SVN_ERR(svn_sqlite__bind_revnum(stmt, 11, nodes[i].last_revision)); + SVN_ERR(svn_sqlite__bind_int64(stmt, 12, nodes[i].last_date)); + SVN_ERR(svn_sqlite__bind_text(stmt, 13, nodes[i].last_author)); + } + + if (nodes[i].checksum) + SVN_ERR(svn_sqlite__bind_text(stmt, 14, nodes[i].checksum)); + + if (nodes[i].properties) + SVN_ERR(svn_sqlite__bind_text(stmt, 15, nodes[i].properties)); + + if (nodes[i].recorded_size || nodes[i].recorded_time) + { + SVN_ERR(svn_sqlite__bind_int64(stmt, 16, nodes[i].recorded_size)); + SVN_ERR(svn_sqlite__bind_int64(stmt, 17, nodes[i].recorded_time)); + } + + /* 18 is DAV cache */ + + if (nodes[i].symlink_target) + SVN_ERR(svn_sqlite__bind_text(stmt, 19, nodes[i].symlink_target)); + + if (nodes[i].file_external) + SVN_ERR(svn_sqlite__bind_int(stmt, 20, 1)); + + if (nodes[i].moved_to) + SVN_ERR(svn_sqlite__bind_text(stmt, 21, nodes[i].moved_to)); + + if (nodes[i].moved_here) + SVN_ERR(svn_sqlite__bind_int(stmt, 22, 1)); + + if (nodes[i].inherited_props) + SVN_ERR(svn_sqlite__bind_text(stmt, 23, nodes[i].inherited_props)); + + SVN_ERR(svn_sqlite__step_done(stmt)); + } + } + + if (actuals) + { + SVN_ERR(svn_sqlite__get_statement(&stmt, sdb, + STMT_INSERT_ACTUAL_NODE)); + + for (i = 0; actuals[i].local_relpath; i++) + { + SVN_ERR(svn_sqlite__bindf(stmt, "isssss", + wc_id, + actuals[i].local_relpath, + actuals[i].local_relpath[0] + ? svn_relpath_dirname(actuals[i].local_relpath, + scratch_pool) + : NULL, + actuals[i].properties, + actuals[i].changelist, + actuals[i].conflict_data)); + + SVN_ERR(svn_sqlite__step_done(stmt)); + } + } + + SVN_ERR(svn_sqlite__close(sdb)); + return SVN_NO_ERROR; } @@ -151,19 +273,28 @@ svn_test__sandbox_create(svn_test__sandbox_t *sandbox, apr_pool_t *pool) { sandbox->pool = pool; - SVN_ERR(create_repos_and_wc(&sandbox->repos_url, &sandbox->wc_abspath, + SVN_ERR(create_repos_and_wc(&sandbox->repos_url, &sandbox->repos_dir, + &sandbox->wc_abspath, test_name, opts, pool)); SVN_ERR(svn_wc_context_create(&sandbox->wc_ctx, NULL, pool, pool)); return SVN_NO_ERROR; } -void +svn_error_t * sbox_file_write(svn_test__sandbox_t *b, const char *path, const char *text) { - FILE *f = fopen(sbox_wc_path(b, path), "w"); + apr_file_t *f; - fputs(text, f); - fclose(f); + SVN_ERR(svn_io_file_open(&f, sbox_wc_path(b, path), + (APR_WRITE | APR_CREATE | APR_TRUNCATE), + APR_OS_DEFAULT, + b->pool)); + + SVN_ERR(svn_io_file_write_full(f, text, strlen(text), NULL, b->pool)); + + SVN_ERR(svn_io_file_close(f, b->pool)); + + return SVN_NO_ERROR; } svn_error_t * @@ -175,7 +306,8 @@ sbox_wc_add(svn_test__sandbox_t *b, const char *path) parent_abspath = svn_dirent_dirname(path, b->pool); SVN_ERR(svn_wc__acquire_write_lock(NULL, b->wc_ctx, parent_abspath, FALSE, b->pool, b->pool)); - SVN_ERR(svn_wc_add_from_disk2(b->wc_ctx, path, NULL /*props*/, + SVN_ERR(svn_wc_add_from_disk3(b->wc_ctx, path, NULL /*props*/, + FALSE /* skip checks */, NULL, NULL, b->pool)); SVN_ERR(svn_wc__release_write_lock(b->wc_ctx, parent_abspath, b->pool)); return SVN_NO_ERROR; @@ -241,8 +373,7 @@ sbox_wc_copy_url(svn_test__sandbox_t *b, const char *from_url, scratch_pool, 1, sizeof(svn_client_copy_source_t *)); - SVN_ERR(svn_client_create_context2(&ctx, NULL, scratch_pool)); - ctx->wc_ctx = b->wc_ctx; + SVN_ERR(svn_test__create_client_ctx(&ctx, b, scratch_pool)); if (SVN_IS_VALID_REVNUM(revision)) { @@ -258,8 +389,14 @@ sbox_wc_copy_url(svn_test__sandbox_t *b, const char *from_url, APR_ARRAY_PUSH(sources, svn_client_copy_source_t *) = src; - SVN_ERR(svn_client_copy6(sources, sbox_wc_path(b, to_path), - FALSE, FALSE, FALSE, NULL, NULL, NULL, + SVN_ERR(svn_client_copy7(sources, sbox_wc_path(b, to_path), + FALSE /* copy_as_child */, + FALSE /* make_parents */, + FALSE /* ignore_externals */, + FALSE /* metadata_only */, + FALSE, NULL /* pin_external */, + NULL /* revprops */, + NULL, NULL, /* commit_callback */ ctx, scratch_pool)); ctx->wc_ctx = NULL; @@ -283,7 +420,11 @@ sbox_wc_revert(svn_test__sandbox_t *b, const char *path, svn_depth_t depth) SVN_ERR(svn_wc__acquire_write_lock(&lock_root_abspath, b->wc_ctx, dir_abspath, FALSE /* lock_anchor */, b->pool, b->pool)); - SVN_ERR(svn_wc_revert4(b->wc_ctx, abspath, depth, FALSE, NULL, + SVN_ERR(svn_wc_revert5(b->wc_ctx, abspath, depth, + FALSE /* use_commit_times */, + NULL /* changelist_filter */, + FALSE /* clear_changelists */, + FALSE /* metadata_only */, NULL, NULL, /* cancel baton + func */ NULL, NULL, /* notify baton + func */ b->pool)); @@ -335,8 +476,7 @@ sbox_wc_commit_ex(svn_test__sandbox_t *b, apr_pool_t *scratch_pool = svn_pool_create(b->pool); svn_error_t *err; - SVN_ERR(svn_client_create_context2(&ctx, NULL, scratch_pool)); - ctx->wc_ctx = b->wc_ctx; + SVN_ERR(svn_test__create_client_ctx(&ctx, b, scratch_pool)); /* A successfull commit doesn't close the ra session, but leaves that to the caller. This leaves the BDB handle open, which might cause @@ -377,12 +517,19 @@ sbox_wc_update_depth(svn_test__sandbox_t *b, sizeof(const char *)); svn_opt_revision_t revision; - revision.kind = svn_opt_revision_number; - revision.value.number = revnum; + if (SVN_IS_VALID_REVNUM(revnum)) + { + revision.kind = svn_opt_revision_number; + revision.value.number = revnum; + } + else + { + revision.kind = svn_opt_revision_head; + } APR_ARRAY_PUSH(paths, const char *) = sbox_wc_path(b, path); - SVN_ERR(svn_client_create_context2(&ctx, NULL, b->pool)); - ctx->wc_ctx = b->wc_ctx; + SVN_ERR(svn_test__create_client_ctx(&ctx, b, b->pool)); + return svn_client_update4(&result_revs, paths, &revision, depth, sticky, FALSE, FALSE, FALSE, FALSE, ctx, b->pool); @@ -405,9 +552,9 @@ sbox_wc_switch(svn_test__sandbox_t *b, svn_revnum_t result_rev; svn_opt_revision_t head_rev = { svn_opt_revision_head, {0} }; - url = apr_pstrcat(b->pool, b->repos_url, url, (char*)NULL); - SVN_ERR(svn_client_create_context2(&ctx, NULL, b->pool)); - ctx->wc_ctx = b->wc_ctx; + url = apr_pstrcat(b->pool, b->repos_url, url, SVN_VA_NULL); + SVN_ERR(svn_test__create_client_ctx(&ctx, b, b->pool)); + return svn_client_switch3(&result_rev, sbox_wc_path(b, path), url, &head_rev, &head_rev, depth, FALSE /* depth_is_sticky */, @@ -452,14 +599,43 @@ sbox_wc_resolve(svn_test__sandbox_t *b, const char *path, svn_depth_t depth, } svn_error_t * +sbox_wc_resolve_prop(svn_test__sandbox_t *b, const char *path, + const char *propname, + svn_wc_conflict_choice_t conflict_choice) +{ + const char *lock_abspath; + svn_error_t *err; + + SVN_ERR(svn_wc__acquire_write_lock_for_resolve(&lock_abspath, b->wc_ctx, + sbox_wc_path(b, path), + b->pool, b->pool)); + err = svn_wc__resolve_conflicts(b->wc_ctx, sbox_wc_path(b, path), + svn_depth_empty, + FALSE, + propname, + FALSE, + conflict_choice, + NULL, NULL, /* conflict func */ + NULL, NULL, /* cancellation */ + NULL, NULL, /* notification */ + b->pool); + + err = svn_error_compose_create(err, svn_wc__release_write_lock(b->wc_ctx, + lock_abspath, + b->pool)); + return err; +} + + +svn_error_t * sbox_wc_move(svn_test__sandbox_t *b, const char *src, const char *dst) { svn_client_ctx_t *ctx; apr_array_header_t *paths = apr_array_make(b->pool, 1, sizeof(const char *)); - SVN_ERR(svn_client_create_context2(&ctx, NULL, b->pool)); - ctx->wc_ctx = b->wc_ctx; + SVN_ERR(svn_test__create_client_ctx(&ctx, b, b->pool)); + APR_ARRAY_PUSH(paths, const char *) = sbox_wc_path(b, src); return svn_client_move7(paths, sbox_wc_path(b, dst), FALSE /* move_as_child */, @@ -482,8 +658,8 @@ sbox_wc_propset(svn_test__sandbox_t *b, sizeof(const char *)); svn_string_t *pval = value ? svn_string_create(value, b->pool) : NULL; - SVN_ERR(svn_client_create_context2(&ctx, NULL, b->pool)); - ctx->wc_ctx = b->wc_ctx; + SVN_ERR(svn_test__create_client_ctx(&ctx, b, b->pool)); + APR_ARRAY_PUSH(paths, const char *) = sbox_wc_path(b, path); return svn_client_propset_local(name, pval, paths, svn_depth_empty, TRUE /* skip_checks */, @@ -497,8 +673,7 @@ sbox_wc_relocate(svn_test__sandbox_t *b, apr_pool_t *scratch_pool = b->pool; svn_client_ctx_t *ctx; - SVN_ERR(svn_client_create_context2(&ctx, NULL, scratch_pool)); - ctx->wc_ctx = b->wc_ctx; + SVN_ERR(svn_test__create_client_ctx(&ctx, b, scratch_pool)); SVN_ERR(svn_client_relocate2(b->wc_abspath, b->repos_url, new_repos_url, FALSE, ctx,scratch_pool)); @@ -517,7 +692,7 @@ sbox_add_and_commit_greek_tree(svn_test__sandbox_t *b) { if (node->contents) { - sbox_file_write(b, node->path, node->contents); + SVN_ERR(sbox_file_write(b, node->path, node->contents)); SVN_ERR(sbox_wc_add(b, node->path)); } else diff --git a/subversion/tests/libsvn_wc/utils.h b/subversion/tests/libsvn_wc/utils.h index 3004634..260139d 100644 --- a/subversion/tests/libsvn_wc/utils.h +++ b/subversion/tests/libsvn_wc/utils.h @@ -25,6 +25,8 @@ #include <apr_pools.h> #include "svn_error.h" +#include "svn_client.h" + #include "../svn_test.h" #ifdef __cplusplus @@ -53,6 +55,8 @@ typedef struct svn_test__sandbox_t svn_wc_context_t *wc_ctx; /* The repository URL. */ const char *repos_url; + /* Local path to the repository */ + const char *repos_dir; /* The absolute local path of the WC root. */ const char *wc_abspath; /* A pool that can be used for all allocations. */ @@ -83,7 +87,7 @@ svn_test__sandbox_create(svn_test__sandbox_t *sandbox, (svn_dirent_join((b)->wc_abspath, (path), (b)->pool)) /* Create a file on disk at PATH, with TEXT as its content. */ -void +svn_error_t * sbox_file_write(svn_test__sandbox_t *b, const char *path, const char *text); /* Schedule for addition the single node that exists on disk at PATH, @@ -162,6 +166,12 @@ sbox_wc_resolve(svn_test__sandbox_t *b, const char *path, svn_depth_t depth, /* */ svn_error_t * +sbox_wc_resolve_prop(svn_test__sandbox_t *b, const char *path, + const char *propname, + svn_wc_conflict_choice_t conflict_choice); + +/* */ +svn_error_t * sbox_wc_move(svn_test__sandbox_t *b, const char *src, const char *dst); /* Set property NAME to VALUE on PATH. If VALUE=NULL, delete the property. */ @@ -175,6 +185,39 @@ sbox_wc_propset(svn_test__sandbox_t *b, svn_error_t * sbox_add_and_commit_greek_tree(svn_test__sandbox_t *b); +/* Initial data to store in NODES */ +typedef struct svn_test__nodes_data_t +{ + int op_depth; + const char *local_relpath; + const char *presence; + int repos_id; + const char *repos_relpath; + svn_revnum_t revision; + svn_boolean_t moved_here; + const char *moved_to; + svn_node_kind_t kind; + const char *properties; + const char *depth; + const char *checksum; + const char *symlink_target; + svn_revnum_t last_revision; + apr_time_t last_date; + const char *last_author; + svn_boolean_t file_external; + const char *inherited_props; + svn_filesize_t recorded_size; + apr_time_t recorded_time; +} svn_test__nodes_data_t; + +/* Initial data to store in ACTUAL */ +typedef struct svn_test__actual_data_t +{ + const char *local_relpath; + const char *properties; + const char *changelist; + const char *conflict_data; +} svn_test__actual_data_t; /* Create a WC directory at WC_ABSPATH containing a fake WC DB, generated by * executing the SQL statements EXTRA_STATEMENTS in addition to the standard @@ -182,10 +225,18 @@ sbox_add_and_commit_greek_tree(svn_test__sandbox_t *b); svn_error_t * svn_test__create_fake_wc(const char *wc_abspath, const char *extra_statements, - apr_pool_t *result_pool, + const svn_test__nodes_data_t nodes[], + const svn_test__actual_data_t actuals[], apr_pool_t *scratch_pool); +/* Create a client context for the specified sandbox */ +svn_error_t * +svn_test__create_client_ctx(svn_client_ctx_t **ctx, + svn_test__sandbox_t *sbox, + apr_pool_t *result_pool); + + #ifdef __cplusplus } #endif /* __cplusplus */ diff --git a/subversion/tests/libsvn_wc/wc-lock-tester.c b/subversion/tests/libsvn_wc/wc-lock-tester.c index d72c536..1daee66 100644 --- a/subversion/tests/libsvn_wc/wc-lock-tester.c +++ b/subversion/tests/libsvn_wc/wc-lock-tester.c @@ -35,16 +35,19 @@ #include "private/svn_wc_private.h" #include "../../libsvn_wc/wc.h" #include "../../libsvn_wc/wc_db.h" +#include "../../libsvn_wc/workqueue.h" #include "svn_private_config.h" #define USAGE_MSG \ - "Usage: %s [-r|-1] DIRNAME\n" \ + "Usage: %s [-1|-r|-w] DIRNAME\n" \ "\n" \ - "Locks one directory (-1), or a tree recursively (-r)\n" + "Locks one directory (-1), or a tree recursively (-r), or locks\n" \ + "recursively and creates an outstanding work queue item (-w)\n" static svn_error_t * obtain_lock(const char *path, svn_boolean_t recursive, + svn_boolean_t populate_work_queue, apr_pool_t *scratch_pool) { const char *local_abspath; @@ -52,9 +55,7 @@ obtain_lock(const char *path, svn_boolean_t recursive, SVN_ERR(svn_path_cstring_to_utf8(&path, path, scratch_pool)); SVN_ERR(svn_dirent_get_absolute(&local_abspath, path, scratch_pool)); - - SVN_ERR(svn_wc_context_create(&wc_ctx, NULL, scratch_pool, - scratch_pool)); + SVN_ERR(svn_wc_context_create(&wc_ctx, NULL, scratch_pool, scratch_pool)); if (recursive) { @@ -68,6 +69,19 @@ obtain_lock(const char *path, svn_boolean_t recursive, scratch_pool)); } + if (populate_work_queue) + { + svn_skel_t *work_item; + + /* Add an arbitrary work item to the work queue for DB, but don't + * run the work queue. */ + SVN_ERR(svn_wc__wq_build_sync_file_flags(&work_item, wc_ctx->db, + local_abspath, scratch_pool, + scratch_pool)); + SVN_ERR(svn_wc__db_wq_add(wc_ctx->db, local_abspath, work_item, + scratch_pool)); + } + SVN_ERR(svn_cmdline_printf(scratch_pool, "Lock on '%s' obtained, and we " "are not going to release it.\n", svn_dirent_local_style(local_abspath, @@ -83,9 +97,11 @@ main(int argc, const char *argv[]) int exit_code = EXIT_SUCCESS; svn_error_t *err; svn_boolean_t recursive; + svn_boolean_t populate_work_queue; if (argc != 3 - || (strcmp(argv[1], "-1") && apr_strnatcmp(argv[1], "-r"))) + || (strcmp(argv[1], "-1") && apr_strnatcmp(argv[1], "-r") && + apr_strnatcmp(argv[1], "-w"))) { fprintf(stderr, USAGE_MSG, argv[0]); exit(EXIT_FAILURE); @@ -100,9 +116,10 @@ main(int argc, const char *argv[]) /* set up the global pool */ pool = svn_pool_create(NULL); - recursive = (strcmp(argv[1], "-1") != 0); + populate_work_queue = (strcmp(argv[1], "-w") == 0); + recursive = ((strcmp(argv[1], "-1") != 0) || populate_work_queue); - err = obtain_lock(argv[2], recursive, pool); + err = obtain_lock(argv[2], recursive, populate_work_queue, pool); if (err) { diff --git a/subversion/tests/libsvn_wc/wc-queries-test.c b/subversion/tests/libsvn_wc/wc-queries-test.c index 0621720..d63aa57 100644 --- a/subversion/tests/libsvn_wc/wc-queries-test.c +++ b/subversion/tests/libsvn_wc/wc-queries-test.c @@ -22,6 +22,7 @@ */ #include "svn_pools.h" +#include "svn_hash.h" #include "svn_ctype.h" #include "private/svn_dep_compat.h" @@ -30,22 +31,17 @@ #include "../svn_test.h" #ifdef SVN_SQLITE_INLINE -/* Include sqlite3 inline, making all symbols private. */ - #define SQLITE_API static - #ifdef __APPLE__ - #include <Availability.h> - #if __MAC_OS_X_VERSION_MIN_REQUIRED < 1060 - /* <libkern/OSAtomic.h> is included on OS X by sqlite3.c, and - on old systems (Leopard or older), it cannot be compiled - with -std=c89 because it uses inline. This is a work-around. */ - #define inline __inline__ - #include <libkern/OSAtomic.h> - #undef inline - #endif - #endif - #include <sqlite3.c> +/* Import the sqlite3 API vtable from sqlite3wrapper.c */ +# define SQLITE_OMIT_DEPRECATED +# include <sqlite3ext.h> +extern const sqlite3_api_routines *const svn_sqlite3__api_funcs; +extern int (*const svn_sqlite3__api_initialize)(void); +extern int (*const svn_sqlite3__api_config)(int, ...); +# define sqlite3_api svn_sqlite3__api_funcs +# define sqlite3_initialize svn_sqlite3__api_initialize +# define sqlite3_config svn_sqlite3__api_config #else - #include <sqlite3.h> +# include <sqlite3.h> #endif #include "../../libsvn_wc/wc-queries.h" @@ -56,7 +52,7 @@ WC_QUERIES_SQL_DECLARE_STATEMENT_INFO(wc_query_info); /* The first query after the normal wc queries */ #define STMT_SCHEMA_FIRST STMT_CREATE_SCHEMA -#define SQLITE_ERR(x) \ +#define SQLITE_ERR(x) do \ { \ int sqlite_err__temp = (x); \ if (sqlite_err__temp != SQLITE_OK) \ @@ -101,6 +97,7 @@ static const int slow_statements[] = /* Full temporary table read */ STMT_INSERT_ACTUAL_EMPTIES, + STMT_INSERT_ACTUAL_EMPTIES_FILES, STMT_SELECT_REVERT_LIST_RECURSIVE, STMT_SELECT_DELETE_LIST, STMT_SELECT_UPDATE_MOVE_LIST, @@ -177,15 +174,15 @@ create_memory_db(sqlite3 **db, static svn_error_t * test_sqlite_version(apr_pool_t *scratch_pool) { - printf("DBG: Using Sqlite %s\n", sqlite3_version); + printf("DBG: Using Sqlite %s\n", sqlite3_libversion()); if (sqlite3_libversion_number() != SQLITE_VERSION_NUMBER) - printf("DBG: Compiled against Sqlite %s", SQLITE_VERSION); + printf("DBG: Compiled against Sqlite %s\n", SQLITE_VERSION); if (sqlite3_libversion_number() < SQLITE_VERSION_NUMBER) return svn_error_createf(SVN_ERR_TEST_FAILED, NULL, "Compiled against Sqlite %s (at runtime we have Sqlite %s)", - SQLITE_VERSION, sqlite3_version); + SQLITE_VERSION, sqlite3_libversion()); #if !SQLITE_VERSION_AT_LEAST(3, 7, 9) return svn_error_create(SVN_ERR_TEST_FAILED, NULL, @@ -307,14 +304,21 @@ parse_explanation_item(struct explanation_item **parsed_item, item->search = TRUE; /* Search or scan */ token = apr_strtok(NULL, " ", &last); - if (!MATCH_TOKEN(token, "TABLE")) + if (MATCH_TOKEN(token, "TABLE")) + { + item->table = apr_strtok(NULL, " ", &last); + } + else if (MATCH_TOKEN(token, "SUBQUERY")) + { + item->table = apr_psprintf(result_pool, "SUBQUERY-%s", + apr_strtok(NULL, " ", &last)); + } + else { printf("DBG: Expected 'TABLE', got '%s' in '%s'\n", token, text); return SVN_NO_ERROR; /* Nothing to parse */ } - item->table = apr_strtok(NULL, " ", &last); - token = apr_strtok(NULL, " ", &last); /* Skip alias */ @@ -418,7 +422,7 @@ parse_explanation_item(struct explanation_item **parsed_item, return SVN_NO_ERROR; } - /* Parsing successfull */ + /* Parsing successful */ } else if (MATCH_TOKEN(item->operation, "EXECUTE")) { @@ -606,7 +610,7 @@ test_query_expectations(apr_pool_t *scratch_pool) apr_pstrcat(iterpool, "EXPLAIN QUERY PLAN ", wc_queries[i], - NULL), + SVN_VA_NULL), -1, &stmt, &tail); if (r != SQLITE_OK) @@ -744,6 +748,105 @@ test_query_expectations(apr_pool_t *scratch_pool) return warnings; } +static svn_error_t * +test_query_duplicates(apr_pool_t *scratch_pool) +{ + sqlite3 *sdb; + int i; + apr_pool_t *iterpool = svn_pool_create(scratch_pool); + svn_error_t *warnings = NULL; + svn_boolean_t supports_query_info; + apr_hash_t *sha_to_query = apr_hash_make(scratch_pool); + + SVN_ERR(create_memory_db(&sdb, scratch_pool)); + + SVN_ERR(supported_explain_query_plan(&supports_query_info, sdb, + scratch_pool)); + if (!supports_query_info) + { + SQLITE_ERR(sqlite3_close(sdb)); + return svn_error_create(SVN_ERR_TEST_SKIPPED, NULL, + "Sqlite doesn't support EXPLAIN QUERY PLAN"); + } + + for (i = 0; i < STMT_SCHEMA_FIRST; i++) + { + sqlite3_stmt *stmt; + const char *tail; + int r; + svn_stringbuf_t *result; + svn_checksum_t *checksum; + + if (is_schema_statement(i)) + continue; + + /* Prepare statement to find if it is a single statement. */ + r = sqlite3_prepare_v2(sdb, wc_queries[i], -1, &stmt, &tail); + + if (r != SQLITE_OK) + continue; /* Parse failure is already reported by 'test_parable' */ + + SQLITE_ERR(sqlite3_finalize(stmt)); + if (tail[0] != '\0') + continue; /* Multi-queries are currently not testable */ + + svn_pool_clear(iterpool); + + r = sqlite3_prepare_v2(sdb, + apr_pstrcat(iterpool, + "EXPLAIN ", + wc_queries[i], + SVN_VA_NULL), + -1, &stmt, &tail); + + if (r != SQLITE_OK) + continue; /* EXPLAIN not enabled or doesn't support this query */ + + result = svn_stringbuf_create_empty(iterpool); + + while (SQLITE_ROW == (r = sqlite3_step(stmt))) + { + int col; + + for (col = 0; col < sqlite3_column_count(stmt); col++) + { + const char *txt = (const char*)sqlite3_column_text(stmt, col); + if (txt) + svn_stringbuf_appendcstr(result, txt); + + svn_stringbuf_appendcstr(result, "|"); + } + + svn_stringbuf_appendcstr(result, "\n"); + } + + SQLITE_ERR(sqlite3_reset(stmt)); + SQLITE_ERR(sqlite3_finalize(stmt)); + + SVN_ERR(svn_checksum(&checksum, svn_checksum_sha1, + result->data, result->len, + iterpool)); + + { + const char *hex = svn_checksum_to_cstring(checksum, scratch_pool); + const char *other; + + other = svn_hash_gets(sha_to_query, hex); + if (other) + { + warnings = svn_error_createf(SVN_ERR_TEST_FAILED, warnings, + "Query %s has an identical execution plan as %s", + wc_query_info[i][0], other); + } + else + svn_hash_sets(sha_to_query, hex, wc_query_info[i][0]); + } + } + SQLITE_ERR(sqlite3_close(sdb)); /* Close the DB if ok; otherwise leaked */ + + return warnings; +} + /* Helper to verify a bit of data in the sqlite3 statistics */ static int parse_stat_data(const char *stat) @@ -824,6 +927,15 @@ test_schema_statistics(apr_pool_t *scratch_pool) "VALUES (1, '', '')", NULL, NULL, NULL)); + SQLITE_ERR( + sqlite3_exec(sdb, + "INSERT INTO EXTERNALS (wc_id, local_relpath," + " parent_relpath, repos_id," + " presence, kind, def_local_relpath," + " def_repos_relpath) " + "VALUES (1, 'subdir', '', 1, 'normal', 'dir', '', '')", + NULL, NULL, NULL)); + /* These are currently not necessary for query optimization, but it's better to tell Sqlite how we intend to use this table anyway */ SQLITE_ERR( @@ -882,7 +994,62 @@ test_schema_statistics(apr_pool_t *scratch_pool) return SVN_NO_ERROR; } -struct svn_test_descriptor_t test_funcs[] = +/* An SQLite application defined function that allows SQL queries to + use "relpath_depth(local_relpath)". */ +static void relpath_depth_sqlite(sqlite3_context* context, + int argc, + sqlite3_value* values[]) +{ + SVN_ERR_MALFUNCTION_NO_RETURN(); /* STUB! */ +} + +/* Parse all verify/check queries */ +static svn_error_t * +test_verify_parsable(apr_pool_t *scratch_pool) +{ + sqlite3 *sdb; + int i; + + SVN_ERR(create_memory_db(&sdb, scratch_pool)); + + SQLITE_ERR(sqlite3_create_function(sdb, "relpath_depth", 1, SQLITE_ANY, NULL, + relpath_depth_sqlite, NULL, NULL)); + + for (i=STMT_VERIFICATION_TRIGGERS; wc_queries[i]; i++) + { + sqlite3_stmt *stmt; + const char *text = wc_queries[i]; + + /* Some of our statement texts contain multiple queries. We prepare + them all. */ + while (*text != '\0') + { + const char *tail; + int r = sqlite3_prepare_v2(sdb, text, -1, &stmt, &tail); + + if (r != SQLITE_OK) + return svn_error_createf(SVN_ERR_SQLITE_ERROR, NULL, + "Preparing %s failed: %s\n%s", + wc_query_info[i][0], + sqlite3_errmsg(sdb), + text); + + SQLITE_ERR(sqlite3_finalize(stmt)); + + /* Continue after the current statement */ + text = tail; + } + } + + SQLITE_ERR(sqlite3_close(sdb)); /* Close the DB if ok; otherwise leaked */ + + return SVN_NO_ERROR; +} + + +static int max_threads = 1; + +static struct svn_test_descriptor_t test_funcs[] = { SVN_TEST_NULL, SVN_TEST_PASS2(test_sqlite_version, @@ -891,7 +1058,13 @@ struct svn_test_descriptor_t test_funcs[] = "queries are parsable"), SVN_TEST_PASS2(test_query_expectations, "test query expectations"), + SVN_TEST_PASS2(test_query_duplicates, + "test query duplicates"), SVN_TEST_PASS2(test_schema_statistics, "test schema statistics"), + SVN_TEST_PASS2(test_verify_parsable, + "verify queries are parsable"), SVN_TEST_NULL }; + +SVN_TEST_MAIN diff --git a/subversion/tests/libsvn_wc/wc-test-queries.h b/subversion/tests/libsvn_wc/wc-test-queries.h new file mode 100644 index 0000000..4b5060c --- /dev/null +++ b/subversion/tests/libsvn_wc/wc-test-queries.h @@ -0,0 +1,112 @@ +/* This file is automatically generated from wc-test-queries.sql and .dist_sandbox/subversion-1.9.7/subversion/tests/libsvn_wc/token-map.h. + * Do not edit this file -- edit the source and rerun gen-make.py */ + +#define STMT_SELECT_NODES_INFO 0 +#define STMT_0_INFO {"STMT_SELECT_NODES_INFO", NULL} +#define STMT_0 \ + "SELECT op_depth, n.presence, n.local_relpath, revision, " \ + " repos_path, file_external, def_local_relpath, moved_to, moved_here, " \ + " properties " \ + "FROM nodes n " \ + "LEFT OUTER JOIN externals e " \ + " ON n.wc_id = e.wc_id " \ + " AND n.local_relpath = e.local_relpath " \ + "WHERE n.wc_id = ?1 " \ + " AND (n.local_relpath = ?2 OR (((n.local_relpath) > (CASE (?2) WHEN '' THEN '' ELSE (?2) || '/' END)) AND ((n.local_relpath) < CASE (?2) WHEN '' THEN X'FFFF' ELSE (?2) || '0' END))) " \ + "" + +#define STMT_SELECT_ACTUAL_INFO 1 +#define STMT_1_INFO {"STMT_SELECT_ACTUAL_INFO", NULL} +#define STMT_1 \ + "SELECT local_relpath " \ + "FROM actual_node " \ + "WHERE wc_id = ?1 " \ + " AND conflict_data is NOT NULL " \ + " AND (local_relpath = ?2 OR (((local_relpath) > (CASE (?2) WHEN '' THEN '' ELSE (?2) || '/' END)) AND ((local_relpath) < CASE (?2) WHEN '' THEN X'FFFF' ELSE (?2) || '0' END))) " \ + "" + +#define STMT_DELETE_NODES 2 +#define STMT_2_INFO {"STMT_DELETE_NODES", NULL} +#define STMT_2 \ + "DELETE FROM nodes; " \ + "" + +#define STMT_INSERT_NODE 3 +#define STMT_3_INFO {"STMT_INSERT_NODE", NULL} +#define STMT_3 \ + "INSERT INTO nodes (local_relpath, op_depth, presence, repos_path, " \ + " revision, parent_relpath, moved_to, moved_here, " \ + " properties, wc_id, repos_id, kind, " \ + " depth) " \ + " VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8, ?9, 1, " \ + " CASE WHEN ?3 != 'base-deleted' THEN 1 END, " \ + " 'dir', " \ + " CASE WHEN ?3 in ('normal', 'incomplete') " \ + " THEN 'infinity' END) " \ + "" + +#define STMT_DELETE_ACTUAL 4 +#define STMT_4_INFO {"STMT_DELETE_ACTUAL", NULL} +#define STMT_4 \ + "DELETE FROM actual_node; " \ + "" + +#define STMT_INSERT_ACTUAL 5 +#define STMT_5_INFO {"STMT_INSERT_ACTUAL", NULL} +#define STMT_5 \ + "INSERT INTO actual_node (local_relpath, parent_relpath, changelist, wc_id) " \ + " VALUES (?1, ?2, ?3, 1) " \ + "" + +#define STMT_ENSURE_EMPTY_PRISTINE 6 +#define STMT_6_INFO {"STMT_ENSURE_EMPTY_PRISTINE", NULL} +#define STMT_6 \ + "INSERT OR IGNORE INTO pristine (checksum, md5_checksum, size, refcount) " \ + " VALUES ('$sha1$da39a3ee5e6b4b0d3255bfef95601890afd80709', " \ + " '$md5 $d41d8cd98f00b204e9800998ecf8427e', " \ + " 0, 0) " \ + "" + +#define STMT_NODES_SET_FILE 7 +#define STMT_7_INFO {"STMT_NODES_SET_FILE", NULL} +#define STMT_7 \ + "UPDATE nodes " \ + " SET kind = 'file', " \ + " checksum = '$sha1$da39a3ee5e6b4b0d3255bfef95601890afd80709', " \ + " depth = NULL " \ + "WHERE wc_id = 1 and local_relpath = ?1 " \ + "" + +#define STMT_SELECT_ALL_ACTUAL 8 +#define STMT_8_INFO {"STMT_SELECT_ALL_ACTUAL", NULL} +#define STMT_8 \ + "SELECT local_relpath FROM actual_node WHERE wc_id = 1 " \ + "" + +#define WC_TEST_QUERIES_SQL_DECLARE_STATEMENTS(varname) \ + static const char * const varname[] = { \ + STMT_0, \ + STMT_1, \ + STMT_2, \ + STMT_3, \ + STMT_4, \ + STMT_5, \ + STMT_6, \ + STMT_7, \ + STMT_8, \ + NULL \ + } + +#define WC_TEST_QUERIES_SQL_DECLARE_STATEMENT_INFO(varname) \ + static const char * const varname[][2] = { \ + STMT_0_INFO, \ + STMT_1_INFO, \ + STMT_2_INFO, \ + STMT_3_INFO, \ + STMT_4_INFO, \ + STMT_5_INFO, \ + STMT_6_INFO, \ + STMT_7_INFO, \ + STMT_8_INFO, \ + {NULL, NULL} \ + } diff --git a/subversion/tests/libsvn_wc/wc-test-queries.sql b/subversion/tests/libsvn_wc/wc-test-queries.sql new file mode 100644 index 0000000..613819a --- /dev/null +++ b/subversion/tests/libsvn_wc/wc-test-queries.sql @@ -0,0 +1,78 @@ +/* wc-test-queries.sql -- queries used to verify wc metadata from + * the C tests. + * + * ==================================================================== + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + * ==================================================================== + */ + +-- STMT_SELECT_NODES_INFO +SELECT op_depth, n.presence, n.local_relpath, revision, + repos_path, file_external, def_local_relpath, moved_to, moved_here, + properties +FROM nodes n +LEFT OUTER JOIN externals e + ON n.wc_id = e.wc_id + AND n.local_relpath = e.local_relpath +WHERE n.wc_id = ?1 + AND (n.local_relpath = ?2 OR IS_STRICT_DESCENDANT_OF(n.local_relpath, ?2)) + +-- STMT_SELECT_ACTUAL_INFO +SELECT local_relpath +FROM actual_node +WHERE wc_id = ?1 + AND conflict_data is NOT NULL + AND (local_relpath = ?2 OR IS_STRICT_DESCENDANT_OF(local_relpath, ?2)) + +-- STMT_DELETE_NODES +DELETE FROM nodes; + +-- STMT_INSERT_NODE +INSERT INTO nodes (local_relpath, op_depth, presence, repos_path, + revision, parent_relpath, moved_to, moved_here, + properties, wc_id, repos_id, kind, + depth) + VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8, ?9, 1, + CASE WHEN ?3 != 'base-deleted' THEN 1 END, + 'dir', + CASE WHEN ?3 in ('normal', 'incomplete') + THEN 'infinity' END) + +-- STMT_DELETE_ACTUAL +DELETE FROM actual_node; + +-- STMT_INSERT_ACTUAL +INSERT INTO actual_node (local_relpath, parent_relpath, changelist, wc_id) + VALUES (?1, ?2, ?3, 1) + +-- STMT_ENSURE_EMPTY_PRISTINE +INSERT OR IGNORE INTO pristine (checksum, md5_checksum, size, refcount) + VALUES ('$sha1$da39a3ee5e6b4b0d3255bfef95601890afd80709', + '$md5 $d41d8cd98f00b204e9800998ecf8427e', + 0, 0) + +-- STMT_NODES_SET_FILE +UPDATE nodes + SET kind = 'file', + checksum = '$sha1$da39a3ee5e6b4b0d3255bfef95601890afd80709', + depth = NULL +WHERE wc_id = 1 and local_relpath = ?1 + +-- STMT_SELECT_ALL_ACTUAL +SELECT local_relpath FROM actual_node WHERE wc_id = 1 + diff --git a/subversion/tests/libsvn_wc/wc-test.c b/subversion/tests/libsvn_wc/wc-test.c index 30eb18a..8910cb0 100644 --- a/subversion/tests/libsvn_wc/wc-test.c +++ b/subversion/tests/libsvn_wc/wc-test.c @@ -23,6 +23,9 @@ #include <apr_pools.h> #include <apr_general.h> +#include <apr_md5.h> + +#define SVN_DEPRECATED #include "svn_types.h" #include "svn_io.h" @@ -71,7 +74,7 @@ struct base_origin_t }; /* Data for testing node_get_base and node_get_origin. */ -struct base_origin_t base_origin_subtests[] = +static struct base_origin_t base_origin_subtests[] = { /* file copied onto nothing */ { "A/C/copy1", -1, "iota", {"iota", 1} }, @@ -138,7 +141,6 @@ test_node_get_base(const svn_test_opts_t *opts, apr_pool_t *pool) NULL, b->wc_ctx, local_abspath, TRUE /* ignore_enoent */, - FALSE /* show_hidden */, b->pool, b->pool)); SVN_TEST_ASSERT(revision == subtest->base_rev); if (SVN_IS_VALID_REVNUM(subtest->base_rev)) @@ -181,6 +183,7 @@ test_node_get_origin(const svn_test_opts_t *opts, apr_pool_t *pool) SVN_ERR(svn_wc__node_get_origin(NULL, &revision, &repos_relpath, &repos_root_url, &repos_uuid, NULL, + NULL, b->wc_ctx, local_abspath, FALSE, b->pool, b->pool)); SVN_TEST_ASSERT(revision == subtest->origin.rev); @@ -304,11 +307,139 @@ test_externals_parse_erratic(apr_pool_t *pool) return SVN_NO_ERROR; } +static svn_error_t * +test_legacy_commit1(const svn_test_opts_t *opts, apr_pool_t *pool) +{ + svn_test__sandbox_t b; + svn_wc_adm_access_t *adm_access; + const char *lambda; + + SVN_ERR(svn_test__sandbox_create(&b, "legacy_commit1", opts, pool)); + SVN_ERR(sbox_add_and_commit_greek_tree(&b)); + + SVN_ERR(sbox_wc_copy(&b, "A", "A_copied")); + + lambda = sbox_wc_path(&b, "A_copied/B/lambda"); + + + SVN_ERR(svn_io_remove_file2(lambda, FALSE, pool)); + SVN_ERR(svn_io_copy_file(sbox_wc_path(&b, "iota"), lambda, FALSE, pool)); + SVN_ERR(svn_wc_adm_open3(&adm_access, NULL, b.wc_abspath, TRUE, -1, + NULL, NULL, pool)); + + { + svn_wc_status2_t *status; + + SVN_ERR(svn_wc_status2(&status, lambda, adm_access, pool)); + + SVN_TEST_ASSERT(status != NULL); + SVN_TEST_ASSERT(status->text_status == svn_wc_status_modified); + SVN_TEST_ASSERT(status->copied == TRUE); + } + + /* Simulate a very old style svn ci . -m "QQQ" on the WC root */ + SVN_ERR(svn_wc_process_committed4(sbox_wc_path(&b, "A_copied"), adm_access, + TRUE, 12, "2014-10-01T19:00:50.966679Z", + "me", NULL, TRUE, TRUE, + NULL, pool)); + + { + unsigned char digest[APR_MD5_DIGESTSIZE]; + + /* Use the fact that iota has the same checksum to ease committing */ + + SVN_ERR(svn_io_file_checksum (digest, lambda, pool)); + + SVN_ERR(svn_wc_process_committed4(lambda, adm_access, + TRUE, 12, "2014-10-01T19:00:50.966679Z", + "me", NULL, TRUE, TRUE, + digest, pool)); + } + + { + svn_wc_status2_t *status; + + SVN_ERR(svn_wc_status2(&status, lambda, adm_access, pool)); + + /* Node is still modified, as we didn't change the text base! */ + SVN_TEST_ASSERT(status != NULL); + SVN_TEST_ASSERT(status->text_status == svn_wc_status_normal); + SVN_TEST_ASSERT(status->copied == FALSE); + } + + return SVN_NO_ERROR; +} + +static svn_error_t * +test_legacy_commit2(const svn_test_opts_t *opts, apr_pool_t *pool) +{ + svn_test__sandbox_t b; + svn_wc_adm_access_t *adm_access; + const char *lambda; + svn_wc_committed_queue_t *queue; + + SVN_ERR(svn_test__sandbox_create(&b, "legacy_commit2", opts, pool)); + SVN_ERR(sbox_add_and_commit_greek_tree(&b)); + + SVN_ERR(sbox_wc_copy(&b, "A", "A_copied")); + + lambda = sbox_wc_path(&b, "A_copied/B/lambda"); + + SVN_ERR(svn_io_remove_file2(lambda, FALSE, pool)); + SVN_ERR(svn_io_copy_file(sbox_wc_path(&b, "iota"), lambda, FALSE, pool)); + + SVN_ERR(svn_wc_adm_open3(&adm_access, NULL, b.wc_abspath, TRUE, -1, + NULL, NULL, pool)); + + { + svn_wc_status2_t *status; + + SVN_ERR(svn_wc_status2(&status, lambda, adm_access, pool)); + + SVN_TEST_ASSERT(status != NULL); + SVN_TEST_ASSERT(status->text_status == svn_wc_status_modified); + SVN_TEST_ASSERT(status->copied == TRUE); + } + + /* Simulate an old style svn ci . -m "QQQ" on the WC root */ + queue = svn_wc_committed_queue_create(pool); + SVN_ERR(svn_wc_queue_committed(&queue, sbox_wc_path(&b, "A_copied"), adm_access, + TRUE, NULL, FALSE, FALSE, NULL, pool)); + { + unsigned char digest[APR_MD5_DIGESTSIZE]; + + /* Use the fact that iota has the same checksum to ease committing */ + + SVN_ERR(svn_io_file_checksum(digest, lambda, pool)); + + SVN_ERR(svn_wc_queue_committed(&queue, lambda, adm_access, FALSE, NULL, + FALSE, FALSE, digest, pool)); + } + + SVN_ERR(svn_wc_process_committed_queue(queue, adm_access, + 12, "2014-10-01T19:00:50.966679Z", + "me", pool)); + + { + svn_wc_status2_t *status; + + SVN_ERR(svn_wc_status2(&status, lambda, adm_access, pool)); + + /* Node is still modified, as we didn't change the text base! */ + SVN_TEST_ASSERT(status != NULL); + SVN_TEST_ASSERT(status->text_status == svn_wc_status_normal); + SVN_TEST_ASSERT(status->copied == FALSE); + } + + return SVN_NO_ERROR; +} /* ---------------------------------------------------------------------- */ /* The list of test functions */ -struct svn_test_descriptor_t test_funcs[] = +static int max_threads = 2; + +static struct svn_test_descriptor_t test_funcs[] = { SVN_TEST_NULL, SVN_TEST_OPTS_PASS(test_node_get_base, @@ -319,5 +450,11 @@ struct svn_test_descriptor_t test_funcs[] = "test svn_wc_parse_externals_description3"), SVN_TEST_PASS2(test_externals_parse_erratic, "parse erratic externals definition"), + SVN_TEST_OPTS_PASS(test_legacy_commit1, + "test legacy commit1"), + SVN_TEST_OPTS_PASS(test_legacy_commit2, + "test legacy commit2"), SVN_TEST_NULL }; + +SVN_TEST_MAIN diff --git a/subversion/tests/svn_test.h b/subversion/tests/svn_test.h index ab3a204..23e002e 100644 --- a/subversion/tests/svn_test.h +++ b/subversion/tests/svn_test.h @@ -27,6 +27,8 @@ #define SVN_DEPRECATED #endif /* ! SVN_ENABLE_DEPRECATION_WARNINGS_IN_TESTS */ +#include <stdio.h> + #include <apr_pools.h> #include "svn_delta.h" @@ -34,6 +36,7 @@ #include "svn_types.h" #include "svn_error.h" #include "svn_string.h" +#include "svn_auth.h" #ifdef __cplusplus extern "C" { @@ -53,6 +56,23 @@ extern "C" { #expr, __FILE__, __LINE__); \ } while (0) +/** + * Macro for testing assumptions when the context does not allow + * returning an svn_error_t*. + * + * Will write to stderr and cause a segfault if EXPR is false. + */ +#define SVN_TEST_ASSERT_NO_RETURN(expr) \ + do { \ + if (!(expr)) \ + { \ + unsigned int z_e_r_o_p_a_g_e__; \ + fprintf(stderr, "TEST ASSERTION FAILED: %s\n", #expr); \ + z_e_r_o_p_a_g_e__ = *(volatile unsigned int*)0; \ + *(volatile unsigned int*)0 = z_e_r_o_p_a_g_e__; \ + } \ + } while (0) + /** Handy macro for testing an expected svn_error_t return value. * EXPECTED must be a real error (neither SVN_NO_ERROR nor APR_SUCCESS). * The error returned by EXPR will be cleared. @@ -63,12 +83,13 @@ extern "C" { SVN_ERR_ASSERT((expected)); \ if (err__ == SVN_NO_ERROR || err__->apr_err != (expected)) \ return err__ ? svn_error_createf(SVN_ERR_TEST_FAILED, err__, \ - "Expected error %d but got %d", \ - (expected), \ - err__->apr_err) \ + "Expected error %s but got %s", \ + svn_error_symbolic_name(expected), \ + svn_error_symbolic_name( \ + err__->apr_err)) \ : svn_error_createf(SVN_ERR_TEST_FAILED, err__, \ - "Expected error %d but got %s", \ - (expected), \ + "Expected error %s but got %s", \ + svn_error_symbolic_name(expected), \ "SVN_NO_ERROR"); \ svn_error_clear(err__); \ } while (0) @@ -77,7 +98,7 @@ extern "C" { * The result must be neither SVN_NO_ERROR nor SVN_ERR_ASSERTION_FAIL. * The error returned by EXPR will be cleared. */ -#define SVN_TEST__ASSERT_ANY_ERROR(expr) \ +#define SVN_TEST_ASSERT_ANY_ERROR(expr) \ do { \ svn_error_t *err__ = (expr); \ if (err__ == SVN_NO_ERROR || err__->apr_err == SVN_ERR_ASSERTION_FAIL)\ @@ -110,21 +131,46 @@ extern "C" { tst_str2, tst_str1, __FILE__, __LINE__); \ } while(0) + /** Handy macro for testing integer equality. + */ +#define SVN_TEST_INT_ASSERT(expr, expected_expr) \ + do { \ + apr_int64_t tst_int1 = (expr); \ + apr_int64_t tst_int2 = (expected_expr); \ + \ + if (tst_int1 != tst_int2) \ + return svn_error_createf(SVN_ERR_TEST_FAILED, NULL, \ + "Integers not equal\n" \ + " Expected: %" APR_INT64_T_FMT "\n" \ + " Found: %" APR_INT64_T_FMT "\n" \ + "\n at %s:%d", \ + tst_int2, tst_int1, __FILE__, __LINE__); \ + } while(0) + /* Baton for any arguments that need to be passed from main() to svn * test functions. */ typedef struct svn_test_opts_t { + /* The name of the application (to generate unique names) */ + const char *prog_name; /* Description of the fs backend that should be used for testing. */ const char *fs_type; /* Config file. */ const char *config_file; /* Source dir. */ const char *srcdir; + /* Repository dir: temporary directory to create repositories in as subdir */ + const char *repos_dir; + /* Repository url: The url to access REPOS_DIR as */ + const char *repos_url; + /* Repository template: pre-created repository to copy for tests */ + const char *repos_template; /* Minor version to use for servers and FS backends, or zero to use the current latest version. */ int server_minor_version; + svn_boolean_t verbose; /* Add future "arguments" here. */ } svn_test_opts_t; @@ -135,6 +181,11 @@ typedef svn_error_t* (*svn_test_driver2_t)(apr_pool_t *pool); typedef svn_error_t* (*svn_test_driver_opts_t)(const svn_test_opts_t *opts, apr_pool_t *pool); +/* Prototype for test predicate functions. */ +typedef svn_boolean_t (*svn_test_predicate_func_t)(const svn_test_opts_t *opts, + const char *predicate_value, + apr_pool_t *pool); + /* Test modes. */ enum svn_test_mode_t { @@ -144,6 +195,23 @@ enum svn_test_mode_t svn_test_all }; +/* Structure for runtime test predicates. */ +struct svn_test_predicate_t +{ + /* The predicate function. */ + svn_test_predicate_func_t func; + + /* The value that the predicate function tests. */ + const char *value; + + /* The test mode that's used if the predicate matches. */ + enum svn_test_mode_t alternate_mode; + + /* Description for the test log */ + const char *description; +}; + + /* Each test gets a test descriptor, holding the function and other * associated data. */ @@ -163,12 +231,29 @@ struct svn_test_descriptor_t /* An optional description of a work-in-progress test. */ const char *wip; + + /* An optional runtiume predicate. */ + struct svn_test_predicate_t predicate; }; /* All Subversion test programs include an array of svn_test_descriptor_t's * (all of our sub-tests) that begins and ends with a SVN_TEST_NULL entry. + * This descriptor must be passed to the svn_test_main function. + * + * MAX_THREADS is the number of concurrent tests to run. Set to 1 if + * all tests must be executed serially. Numbers less than 1 mean + * "unbounded". */ -extern struct svn_test_descriptor_t test_funcs[]; +int svn_test_main(int argc, const char *argv[], int max_threads, + struct svn_test_descriptor_t *test_funcs); + +/* Boilerplate for the main function for each test program. */ +#define SVN_TEST_MAIN \ + int main(int argc, const char *argv[]) \ + { \ + return svn_test_main(argc, argv, \ + max_threads, test_funcs); \ + } /* A null initializer for the test descriptor. */ #define SVN_TEST_NULL {0} @@ -192,6 +277,8 @@ extern struct svn_test_descriptor_t test_funcs[]; #define SVN_TEST_OPTS_XFAIL(func, msg) {svn_test_xfail, NULL, func, msg} #define SVN_TEST_OPTS_XFAIL_COND(func, p, msg) \ {(p) ? svn_test_xfail : svn_test_pass, NULL, func, msg} +#define SVN_TEST_OPTS_XFAIL_OTOH(func, msg, predicate) \ + {svn_test_xfail, NULL, func, msg, NULL, predicate} #define SVN_TEST_OPTS_SKIP(func, p, msg) \ {(p) ? svn_test_skip : svn_test_pass, NULL, func, msg} @@ -205,7 +292,6 @@ extern struct svn_test_descriptor_t test_funcs[]; #define SVN_TEST_OPTS_WIMP_COND(func, p, msg, wip) \ {(p) ? svn_test_xfail : svn_test_pass, NULL, func, msg, wip} - /* Return a pseudo-random number based on SEED, and modify SEED. * @@ -240,6 +326,52 @@ svn_test__tree_t; extern const svn_test__tree_entry_t svn_test__greek_tree_nodes[21]; +/* Returns a path to BASENAME within the transient data area for the + current test. */ +const char * +svn_test_data_path(const char* basename, apr_pool_t *result_pool); + + +/* Some tests require the --srcdir option and should use this function + * to get it. If not provided, print a warning and attempt to run the + * tests under the assumption that --srcdir is the current directory. */ +svn_error_t * +svn_test_get_srcdir(const char **srcdir, + const svn_test_opts_t *opts, + apr_pool_t *pool); + +/* Initializes a standard auth baton for accessing the repositories */ +svn_error_t * +svn_test__init_auth_baton(svn_auth_baton_t **baton, + apr_pool_t *result_pool); + + +/* + * Test predicates + */ + +#define SVN_TEST_PASS_IF_FS_TYPE_IS(fs_type) \ + { svn_test__fs_type_is, fs_type, svn_test_pass, \ + "PASS if fs-type = " fs_type } + +#define SVN_TEST_PASS_IF_FS_TYPE_IS_NOT(fs_type) \ + { svn_test__fs_type_not, fs_type, svn_test_pass, \ + "PASS if fs-type != " fs_type } + +/* Return TRUE if the fs-type in OPTS matches PREDICATE_VALUE. */ +svn_boolean_t +svn_test__fs_type_is(const svn_test_opts_t *opts, + const char *predicate_value, + apr_pool_t *pool); + + +/* Return TRUE if the fs-type in OPTS does not matches PREDICATE_VALUE. */ +svn_boolean_t +svn_test__fs_type_not(const svn_test_opts_t *opts, + const char *predicate_value, + apr_pool_t *pool); + + #ifdef __cplusplus } #endif /* __cplusplus */ diff --git a/subversion/tests/svn_test_fs.c b/subversion/tests/svn_test_fs.c index 2dcb096..2d62f64 100644 --- a/subversion/tests/svn_test_fs.c +++ b/subversion/tests/svn_test_fs.c @@ -75,13 +75,14 @@ make_fs_config(const char *fs_type, apr_pool_t *pool) { apr_hash_t *fs_config = apr_hash_make(pool); - apr_hash_set(fs_config, SVN_FS_CONFIG_BDB_TXN_NOSYNC, - APR_HASH_KEY_STRING, "1"); - apr_hash_set(fs_config, SVN_FS_CONFIG_FS_TYPE, - APR_HASH_KEY_STRING, - fs_type); + + svn_hash_sets(fs_config, SVN_FS_CONFIG_BDB_TXN_NOSYNC, "1"); + svn_hash_sets(fs_config, SVN_FS_CONFIG_BDB_LOG_AUTOREMOVE, "1"); + svn_hash_sets(fs_config, SVN_FS_CONFIG_FS_TYPE, fs_type); if (server_minor_version) { + svn_hash_sets(fs_config, SVN_FS_CONFIG_COMPATIBLE_VERSION, + apr_psprintf(pool, "1.%d.0", server_minor_version)); if (server_minor_version == 6 || server_minor_version == 7) svn_hash_sets(fs_config, SVN_FS_CONFIG_PRE_1_8_COMPATIBLE, "1"); else if (server_minor_version == 5) @@ -103,28 +104,20 @@ create_fs(svn_fs_t **fs_p, const char *name, const char *fs_type, int server_minor_version, + apr_hash_t *overlay_fs_config, apr_pool_t *pool) { - apr_finfo_t finfo; apr_hash_t *fs_config = make_fs_config(fs_type, server_minor_version, pool); + if (overlay_fs_config) + fs_config = apr_hash_overlay(pool, overlay_fs_config, fs_config); + /* If there's already a repository named NAME, delete it. Doing things this way means that repositories stick around after a failure for postmortem analysis, but also that tests can be re-run without cleaning out the repositories created by prior runs. */ - if (apr_stat(&finfo, name, APR_FINFO_TYPE, pool) == APR_SUCCESS) - { - if (finfo.filetype == APR_DIR) - SVN_ERR_W(svn_io_remove_dir2(name, TRUE, NULL, NULL, pool), - apr_psprintf(pool, - "cannot create fs '%s' there is already " - "a directory of that name", name)); - else - return svn_error_createf(SVN_ERR_TEST_FAILED, NULL, - "cannot create fs '%s' there is already " - "a file of that name", name); - } + SVN_ERR(svn_io_remove_dir2(name, TRUE, NULL, NULL, pool)); SVN_ERR(svn_fs_create(fs_p, name, fs_config, pool)); if (! *fs_p) @@ -144,21 +137,36 @@ create_fs(svn_fs_t **fs_p, * copy that file into the filesystem FS and set *MUST_REOPEN to TRUE, else * set *MUST_REOPEN to FALSE. */ static svn_error_t * -maybe_install_fsfs_conf(svn_fs_t *fs, - const svn_test_opts_t *opts, - svn_boolean_t *must_reopen, - apr_pool_t *pool) +maybe_install_fs_conf(svn_fs_t *fs, + const svn_test_opts_t *opts, + svn_boolean_t *must_reopen, + apr_pool_t *pool) { *must_reopen = FALSE; - if (strcmp(opts->fs_type, "fsfs") != 0 || ! opts->config_file) + if (! opts->config_file) return SVN_NO_ERROR; - *must_reopen = TRUE; - return svn_io_copy_file(opts->config_file, - svn_path_join(svn_fs_path(fs, pool), - "fsfs.conf", pool), - FALSE /* copy_perms */, - pool); + if (strcmp(opts->fs_type, "fsfs") == 0) + { + *must_reopen = TRUE; + return svn_io_copy_file(opts->config_file, + svn_path_join(svn_fs_path(fs, pool), + "fsfs.conf", pool), + FALSE /* copy_perms */, + pool); + } + + if (strcmp(opts->fs_type, "fsx") == 0) + { + *must_reopen = TRUE; + return svn_io_copy_file(opts->config_file, + svn_path_join(svn_fs_path(fs, pool), + "fsx.conf", pool), + FALSE /* copy_perms */, + pool); + } + + return SVN_NO_ERROR; } @@ -168,80 +176,151 @@ svn_test__create_bdb_fs(svn_fs_t **fs_p, const svn_test_opts_t *opts, apr_pool_t *pool) { - return create_fs(fs_p, name, "bdb", opts->server_minor_version, pool); + return create_fs(fs_p, name, "bdb", opts->server_minor_version, NULL, pool); } svn_error_t * -svn_test__create_fs(svn_fs_t **fs_p, - const char *name, - const svn_test_opts_t *opts, - apr_pool_t *pool) +svn_test__create_fs2(svn_fs_t **fs_p, + const char *name, + const svn_test_opts_t *opts, + apr_hash_t *fs_config, + apr_pool_t *pool) { svn_boolean_t must_reopen; - SVN_ERR(create_fs(fs_p, name, opts->fs_type, - opts->server_minor_version, pool)); + SVN_ERR(create_fs(fs_p, name, opts->fs_type, opts->server_minor_version, + fs_config, pool)); - SVN_ERR(maybe_install_fsfs_conf(*fs_p, opts, &must_reopen, pool)); + SVN_ERR(maybe_install_fs_conf(*fs_p, opts, &must_reopen, pool)); if (must_reopen) { - SVN_ERR(svn_fs_open(fs_p, name, NULL, pool)); + SVN_ERR(svn_fs_open2(fs_p, name, NULL, pool, pool)); svn_fs_set_warning_func(*fs_p, fs_warning_handler, NULL); } return SVN_NO_ERROR; } +svn_error_t * +svn_test__create_fs(svn_fs_t **fs_p, + const char *name, + const svn_test_opts_t *opts, + apr_pool_t *pool) +{ + return svn_test__create_fs2(fs_p, name, opts, NULL, pool); +} svn_error_t * -svn_test__create_repos(svn_repos_t **repos_p, - const char *name, - const svn_test_opts_t *opts, - apr_pool_t *pool) +svn_test__create_repos2(svn_repos_t **repos_p, + const char **repos_url, + const char **repos_dirent, + const char *name, + const svn_test_opts_t *opts, + apr_pool_t *result_pool, + apr_pool_t *scratch_pool) { - apr_finfo_t finfo; svn_repos_t *repos; svn_boolean_t must_reopen; + const char *repos_abspath; + apr_pool_t *repos_pool = repos_p ? result_pool : scratch_pool; + svn_boolean_t init_svnserve = FALSE; apr_hash_t *fs_config = make_fs_config(opts->fs_type, - opts->server_minor_version, pool); + opts->server_minor_version, + repos_pool); + + if (repos_url && opts->repos_dir && opts->repos_url) + { + name = apr_psprintf(scratch_pool, "%s-%s", opts->prog_name, + svn_dirent_basename(name, NULL)); + + repos_abspath = svn_dirent_join(opts->repos_dir, name, scratch_pool); + + SVN_ERR(svn_dirent_get_absolute(&repos_abspath, repos_abspath, + scratch_pool)); + + SVN_ERR(svn_io_make_dir_recursively(repos_abspath, scratch_pool)); + + *repos_url = svn_path_url_add_component2(opts->repos_url, name, + result_pool); + + if (strstr(opts->repos_url, "svn://")) + init_svnserve = TRUE; + } + else + { + SVN_ERR(svn_dirent_get_absolute(&repos_abspath, name, scratch_pool)); + + if (repos_url) + SVN_ERR(svn_uri_get_file_url_from_dirent(repos_url, repos_abspath, + result_pool)); + } /* If there's already a repository named NAME, delete it. Doing things this way means that repositories stick around after a failure for postmortem analysis, but also that tests can be re-run without cleaning out the repositories created by prior runs. */ - if (apr_stat(&finfo, name, APR_FINFO_TYPE, pool) == APR_SUCCESS) - { - if (finfo.filetype == APR_DIR) - SVN_ERR_W(svn_io_remove_dir2(name, TRUE, NULL, NULL, pool), - apr_psprintf(pool, - "cannot create repos '%s' there is already " - "a directory of that name", name)); - else - return svn_error_createf(SVN_ERR_TEST_FAILED, NULL, - "there is already a file named '%s'", name); - } + SVN_ERR(svn_io_remove_dir2(repos_abspath, TRUE, NULL, NULL, scratch_pool)); - SVN_ERR(svn_repos_create(&repos, name, NULL, NULL, NULL, - fs_config, pool)); + SVN_ERR(svn_repos_create(&repos, repos_abspath, NULL, NULL, NULL, + fs_config, repos_pool)); /* Register this repo for cleanup. */ - svn_test_add_dir_cleanup(name); + svn_test_add_dir_cleanup(repos_abspath); - SVN_ERR(maybe_install_fsfs_conf(svn_repos_fs(repos), opts, &must_reopen, - pool)); + SVN_ERR(maybe_install_fs_conf(svn_repos_fs(repos), opts, &must_reopen, + scratch_pool)); if (must_reopen) { - SVN_ERR(svn_repos_open2(&repos, name, NULL, pool)); - svn_fs_set_warning_func(svn_repos_fs(repos), fs_warning_handler, NULL); + SVN_ERR(svn_repos_open3(&repos, repos_abspath, NULL, repos_pool, + scratch_pool)); } - *repos_p = repos; + svn_fs_set_warning_func(svn_repos_fs(repos), fs_warning_handler, NULL); + + if (init_svnserve) + { + const char *cfg; + const char *pwd; + + cfg = svn_dirent_join(repos_abspath, "conf/svnserve.conf", scratch_pool); + SVN_ERR(svn_io_remove_file2(cfg, FALSE, scratch_pool)); + SVN_ERR(svn_io_file_create(cfg, + "[general]\n" + "auth-access = write\n" + "password-db = passwd\n", + scratch_pool)); + + pwd = svn_dirent_join(repos_abspath, "conf/passwd", scratch_pool); + SVN_ERR(svn_io_remove_file2(pwd, FALSE, scratch_pool)); + SVN_ERR(svn_io_file_create(pwd, + "[users]\n" + "jrandom = rayjandom\n" + "jconstant = rayjandom\n", + scratch_pool)); + } + + if (repos_p) + *repos_p = repos; + if (repos_dirent) + *repos_dirent = apr_pstrdup(result_pool, repos_abspath); + return SVN_NO_ERROR; } svn_error_t * +svn_test__create_repos(svn_repos_t **repos_p, + const char *name, + const svn_test_opts_t *opts, + apr_pool_t *pool) +{ + return svn_error_trace( + svn_test__create_repos2(repos_p, NULL, NULL, name, + opts, pool, pool)); +} + +svn_error_t * svn_test__stream_to_string(svn_stringbuf_t **string, svn_stream_t *stream, apr_pool_t *pool) @@ -264,7 +343,7 @@ svn_test__stream_to_string(svn_stringbuf_t **string, do { len = sizeof(buf); - SVN_ERR(svn_stream_read(stream, buf, &len)); + SVN_ERR(svn_stream_read_full(stream, buf, &len)); /* Now copy however many bytes were *actually* read into str. */ svn_stringbuf_appendbytes(str, buf, len); @@ -355,6 +434,8 @@ get_dir_entries(apr_hash_t *tree_entries, } +/* Verify that PATH under ROOT is: a directory if contents is NULL; + a file with contents CONTENTS otherwise. */ static svn_error_t * validate_tree_entry(svn_fs_root_t *root, const char *path, @@ -363,10 +444,19 @@ validate_tree_entry(svn_fs_root_t *root, { svn_stream_t *rstream; svn_stringbuf_t *rstring; - svn_boolean_t is_dir; + svn_node_kind_t kind; + svn_boolean_t is_dir, is_file; - /* Verify that this is the expected type of node */ + /* Verify that node types are reported consistently. */ + SVN_ERR(svn_fs_check_path(&kind, root, path, pool)); SVN_ERR(svn_fs_is_dir(&is_dir, root, path, pool)); + SVN_ERR(svn_fs_is_file(&is_file, root, path, pool)); + + SVN_TEST_ASSERT(!is_dir || kind == svn_node_dir); + SVN_TEST_ASSERT(!is_file || kind == svn_node_file); + SVN_TEST_ASSERT(is_dir || is_file); + + /* Verify that this is the expected type of node */ if ((!is_dir && !contents) || (is_dir && contents)) return svn_error_createf (SVN_ERR_FS_GENERAL, NULL, @@ -376,10 +466,17 @@ validate_tree_entry(svn_fs_root_t *root, /* Verify that the contents are as expected (files only) */ if (! is_dir) { + svn_stringbuf_t *expected = svn_stringbuf_create(contents, pool); + + /* File lengths. */ + svn_filesize_t length; + SVN_ERR(svn_fs_file_length(&length, root, path, pool)); + SVN_TEST_ASSERT(expected->len == length); + + /* Text contents. */ SVN_ERR(svn_fs_file_contents(&rstream, root, path, pool)); SVN_ERR(svn_test__stream_to_string(&rstring, rstream, pool)); - if (! svn_stringbuf_compare(rstring, - svn_stringbuf_create(contents, pool))) + if (! svn_stringbuf_compare(rstring, expected)) return svn_error_createf (SVN_ERR_FS_GENERAL, NULL, "node '%s' in tree had unexpected contents", @@ -409,6 +506,9 @@ svn_test__validate_tree(svn_fs_root_t *root, apr_hash_index_t *hi; int i; + /* There should be no entry with this name. */ + const char *na_name = "es-vee-en"; + /* Create a hash for storing our expected entries */ expected_entries = apr_hash_make(subpool); @@ -497,6 +597,23 @@ svn_test__validate_tree(svn_fs_root_t *root, svn_stringbuf_appendcstr(extra_entries, "\n"); } + /* Test that non-existent paths will not be found. + * Skip this test if somebody sneakily added NA_NAME. */ + if (!svn_hash_gets(expected_entries, na_name)) + { + svn_node_kind_t kind; + svn_boolean_t is_dir, is_file; + + /* Verify that the node is reported as "n/a". */ + SVN_ERR(svn_fs_check_path(&kind, root, na_name, subpool)); + SVN_ERR(svn_fs_is_dir(&is_dir, root, na_name, subpool)); + SVN_ERR(svn_fs_is_file(&is_file, root, na_name, subpool)); + + SVN_TEST_ASSERT(kind == svn_node_none); + SVN_TEST_ASSERT(!is_file); + SVN_TEST_ASSERT(!is_dir); + } + if (missing_entries || extra_entries || corrupt_entries) { return svn_error_createf @@ -530,23 +647,23 @@ svn_test__validate_changes(svn_fs_root_t *root, { int i; for (i=0, hi = apr_hash_first(pool, expected); hi; hi = apr_hash_next(hi)) - SVN_DBG(("expected[%d] = '%s'\n", i++, svn__apr_hash_index_key(hi))); + SVN_DBG(("expected[%d] = '%s'\n", i++, apr_hash_this_key(hi))); for (i=0, hi = apr_hash_first(pool, actual); hi; hi = apr_hash_next(hi)) - SVN_DBG(("actual[%d] = '%s'\n", i++, svn__apr_hash_index_key(hi))); + SVN_DBG(("actual[%d] = '%s'\n", i++, apr_hash_this_key(hi))); } #endif for (hi = apr_hash_first(pool, expected); hi; hi = apr_hash_next(hi)) - if (NULL == svn_hash_gets(actual, svn__apr_hash_index_key(hi))) + if (NULL == svn_hash_gets(actual, apr_hash_this_key(hi))) return svn_error_createf(SVN_ERR_TEST_FAILED, NULL, "Path '%s' missing from actual changed-paths", - svn__apr_hash_index_key(hi)); + (const char *)apr_hash_this_key(hi)); for (hi = apr_hash_first(pool, actual); hi; hi = apr_hash_next(hi)) - if (NULL == svn_hash_gets(expected, svn__apr_hash_index_key(hi))) + if (NULL == svn_hash_gets(expected, apr_hash_this_key(hi))) return svn_error_createf(SVN_ERR_TEST_FAILED, NULL, "Path '%s' missing from expected changed-paths", - svn__apr_hash_index_key(hi)); + (const char *)apr_hash_this_key(hi)); return SVN_NO_ERROR; } diff --git a/subversion/tests/svn_test_fs.h b/subversion/tests/svn_test_fs.h index bbbbfb8..592243b 100644 --- a/subversion/tests/svn_test_fs.h +++ b/subversion/tests/svn_test_fs.h @@ -57,7 +57,16 @@ svn_test__create_bdb_fs(svn_fs_t **fs_p, /* Create a filesystem based on OPTS in a subdir NAME and return a new - FS object which points to it. */ + FS object which points to it. Override the default test filesystem + config with values from FS_CONFIG. */ +svn_error_t * +svn_test__create_fs2(svn_fs_t **fs_p, + const char *name, + const svn_test_opts_t *opts, + apr_hash_t *fs_config, + apr_pool_t *pool); + +/* The same as svn_test__create_fs2() but with FS_CONFIG set to NULL. */ svn_error_t * svn_test__create_fs(svn_fs_t **fs_p, const char *name, @@ -73,6 +82,19 @@ svn_test__create_repos(svn_repos_t **repos_p, const svn_test_opts_t *opts, apr_pool_t *pool); +/* Create a repository with a filesystem based on OPTS in a subdir NAME + and return optionally new REPOS object, the directory it was created in + and/or the url of the repository . */ +svn_error_t * +svn_test__create_repos2(svn_repos_t **repos_p, + const char **repos_url, + const char **repos_dirent, + const char *name, + const svn_test_opts_t *opts, + apr_pool_t *result_pool, + apr_pool_t *scratch_pool); + + /* Read all data from a generic read STREAM, and return it in STRING. Allocate the svn_stringbuf_t in POOL. (All data in STRING will be dup'ed from STREAM using POOL too.) */ diff --git a/subversion/tests/svn_test_main.c b/subversion/tests/svn_test_main.c index 15d460c..46c0c45 100644 --- a/subversion/tests/svn_test_main.c +++ b/subversion/tests/svn_test_main.c @@ -45,16 +45,30 @@ #include "svn_path.h" #include "svn_ctype.h" #include "svn_utf.h" +#include "svn_version.h" #include "private/svn_cmdline_private.h" +#include "private/svn_atomic.h" +#include "private/svn_mutex.h" +#include "private/svn_sqlite.h" #include "svn_private_config.h" +#if APR_HAS_THREADS +# include <apr_thread_proc.h> +#endif + /* Some Subversion test programs may want to parse options in the argument list, so we remember it here. */ +extern int test_argc; +extern const char **test_argv; int test_argc; const char **test_argv; +/* Many tests write to disk. Instead of writing to the current + directory, they should use this path as the root of the test data + area. */ +static const char *data_path; /* Test option: Print more output */ static svn_boolean_t verbose_mode = FALSE; @@ -70,10 +84,13 @@ static svn_boolean_t allow_segfaults = FALSE; /* Test option: Limit testing to a given mode (i.e. XFail, Skip, Pass, All). */ -enum svn_test_mode_t mode_filter = svn_test_all; +static enum svn_test_mode_t mode_filter = svn_test_all; + +/* Test option: Allow concurrent execution of tests */ +static svn_boolean_t parallel = FALSE; /* Option parsing enums and structures */ -enum { +enum test_options_e { help_opt = SVN_OPT_FIRST_LONGOPT_ID, cleanup_opt, fstype_opt, @@ -84,7 +101,13 @@ enum { server_minor_version_opt, allow_segfault_opt, srcdir_opt, - mode_filter_opt + reposdir_opt, + reposurl_opt, + repostemplate_opt, + mode_filter_opt, + sqlite_log_opt, + parallel_opt, + fsfs_version_opt }; static const apr_getopt_option_t cl_options[] = @@ -97,6 +120,8 @@ static const apr_getopt_option_t cl_options[] = N_("specify test config file ARG")}, {"fs-type", fstype_opt, 1, N_("specify a filesystem backend type ARG")}, + {"fsfs-version", fsfs_version_opt, 1, + N_("specify the FSFS version ARG")}, {"list", list_opt, 0, N_("lists all the tests with their short description")}, {"mode-filter", mode_filter_opt, 1, @@ -112,7 +137,17 @@ static const apr_getopt_option_t cl_options[] = {"allow-segfaults", allow_segfault_opt, 0, N_("don't trap seg faults (useful for debugging)")}, {"srcdir", srcdir_opt, 1, - N_("source directory")}, + N_("directory which contains test's C source files")}, + {"repos-dir", reposdir_opt, 1, + N_("directory to create repositories in")}, + {"repos-url", reposurl_opt, 1, + N_("the url to access reposdir as")}, + {"repos-template",repostemplate_opt, 1, + N_("the repository to use as template")}, + {"sqlite-logging", sqlite_log_opt, 0, + N_("enable SQLite logging")}, + {"parallel", parallel_opt, 0, + N_("allow concurrent execution of tests")}, {0, 0, 0, 0} }; @@ -123,8 +158,50 @@ static const apr_getopt_option_t cl_options[] = /* When non-zero, don't remove test directories */ static svn_boolean_t skip_cleanup = FALSE; -/* All cleanup actions are registered as cleanups on this pool. */ +/* All cleanup actions are registered as cleanups on the cleanup_pool, + * which may be thread-specific. */ +#if APR_HAS_THREADS +/* The thread-local data key for the cleanup pool. */ +static apr_threadkey_t *cleanup_pool_key = NULL; + +/* No-op destructor for apr_threadkey_private_create(). */ +static void null_threadkey_dtor(void *stuff) {} + +/* Set the thread-specific cleanup pool. */ +static void set_cleanup_pool(apr_pool_t *pool) +{ + apr_status_t status = apr_threadkey_private_set(pool, cleanup_pool_key); + if (status) + { + printf("apr_threadkey_private_set() failed with code %ld.\n", + (long)status); + exit(1); + } +} + +/* Get the thread-specific cleanup pool. */ +static apr_pool_t *get_cleanup_pool() +{ + void *data; + apr_status_t status = apr_threadkey_private_get(&data, cleanup_pool_key); + if (status) + { + printf("apr_threadkey_private_get() failed with code %ld.\n", + (long)status); + exit(1); + } + return data; +} + +# define cleanup_pool (get_cleanup_pool()) +# define HAVE_PER_THREAD_CLEANUP +#else static apr_pool_t *cleanup_pool = NULL; +# define set_cleanup_pool(p) (cleanup_pool = (p)) +#endif + +/* Used by test_thread to serialize access to stdout. */ +static svn_mutex__t *log_mutex = NULL; static apr_status_t cleanup_rmtree(void *data) @@ -150,19 +227,41 @@ cleanup_rmtree(void *data) } + void svn_test_add_dir_cleanup(const char *path) { if (cleanup_mode) { const char *abspath; - svn_error_t *err = svn_path_get_absolute(&abspath, path, cleanup_pool); + svn_error_t *err; + + /* All cleanup functions use the *same* pool (not subpools of it). + Thus, we need to synchronize. */ + err = svn_mutex__lock(log_mutex); + if (err) + { + if (verbose_mode) + printf("FAILED svn_mutex__lock in svn_test_add_dir_cleanup.\n"); + svn_error_clear(err); + return; + } + + err = svn_path_get_absolute(&abspath, path, cleanup_pool); svn_error_clear(err); if (!err) apr_pool_cleanup_register(cleanup_pool, abspath, cleanup_rmtree, apr_pool_cleanup_null); else if (verbose_mode) printf("FAILED ABSPATH: %s\n", path); + + err = svn_mutex__unlock(log_mutex, NULL); + if (err) + { + if (verbose_mode) + printf("FAILED svn_mutex__unlock in svn_test_add_dir_cleanup.\n"); + svn_error_clear(err); + } } } @@ -183,7 +282,7 @@ svn_test_rand(apr_uint32_t *seed) /* Determine the array size of test_funcs[], the inelegant way. :) */ static int -get_array_size(void) +get_array_size(struct svn_test_descriptor_t *test_funcs) { int i; @@ -205,6 +304,91 @@ crash_handler(int signum) longjmp(jump_buffer, 1); } +/* Write the result of test number TEST_NUM to stdout. Pretty-print test + name and dots according to our test-suite spec, and return TRUE if there + has been a test failure. + + The parameters are basically the internal state of do_test_num() and + test_thread(). */ +/* */ +static svn_boolean_t +log_results(const char *progname, + int test_num, + svn_boolean_t msg_only, + svn_boolean_t run_this_test, + svn_boolean_t skip, + svn_boolean_t xfail, + svn_boolean_t wimp, + svn_error_t *err, + const char *msg, + const struct svn_test_descriptor_t *desc) +{ + svn_boolean_t test_failed; + + if (err && err->apr_err == SVN_ERR_TEST_SKIPPED) + { + svn_error_clear(err); + err = SVN_NO_ERROR; + skip = TRUE; + xfail = FALSE; /* Or all XFail tests reporting SKIP would be failing */ + } + + /* Failure means unexpected results -- FAIL or XPASS. */ + test_failed = (!wimp && ((err != SVN_NO_ERROR) != (xfail != 0))); + + /* If we got an error, print it out. */ + if (err) + { + svn_handle_error2(err, stdout, FALSE, "svn_tests: "); + svn_error_clear(err); + } + + if (msg_only) + { + const svn_boolean_t otoh = !!desc->predicate.description; + + if (run_this_test) + printf(" %3d %-5s %s%s%s%s%s%s\n", + test_num, + (xfail ? "XFAIL" : (skip ? "SKIP" : "")), + msg ? msg : "(test did not provide name)", + (wimp && verbose_mode) ? " [[" : "", + (wimp && verbose_mode) ? desc->wip : "", + (wimp && verbose_mode) ? "]]" : "", + (otoh ? " / " : ""), + (otoh ? desc->predicate.description : "")); + } + else if (run_this_test && ((! quiet_mode) || test_failed)) + { + printf("%s %s %d: %s%s%s%s\n", + (err + ? (xfail ? "XFAIL:" : "FAIL: ") + : (xfail ? "XPASS:" : (skip ? "SKIP: " : "PASS: "))), + progname, + test_num, + msg ? msg : "(test did not provide name)", + wimp ? " [[WIMP: " : "", + wimp ? desc->wip : "", + wimp ? "]]" : ""); + } + + if (msg) + { + size_t len = strlen(msg); + if (len > 50) + printf("WARNING: Test docstring exceeds 50 characters\n"); + if (msg[len - 1] == '.') + printf("WARNING: Test docstring ends in a period (.)\n"); + if (svn_ctype_isupper(msg[0])) + printf("WARNING: Test docstring is capitalized\n"); + } + if (desc->msg == NULL) + printf("WARNING: New-style test descriptor is missing a docstring.\n"); + + fflush(stdout); + + return test_failed; +} /* Execute a test number TEST_NUM. Pretty-print test name and dots according to our test-suite spec, and return the result code. @@ -213,6 +397,7 @@ crash_handler(int signum) static svn_boolean_t do_test_num(const char *progname, int test_num, + struct svn_test_descriptor_t *test_funcs, svn_boolean_t msg_only, svn_test_opts_t *opts, const char **header_msg, @@ -220,11 +405,11 @@ do_test_num(const char *progname, { svn_boolean_t skip, xfail, wimp; svn_error_t *err = NULL; - svn_boolean_t test_failed; const char *msg = NULL; /* the message this individual test prints out */ const struct svn_test_descriptor_t *desc; - const int array_size = get_array_size(); + const int array_size = get_array_size(test_funcs); svn_boolean_t run_this_test; /* This test's mode matches DESC->MODE. */ + enum svn_test_mode_t test_mode; /* Check our array bounds! */ if (test_num < 0) @@ -239,11 +424,18 @@ do_test_num(const char *progname, } desc = &test_funcs[test_num]; - skip = desc->mode == svn_test_skip; - xfail = desc->mode == svn_test_xfail; + /* Check the test predicate. */ + if (desc->predicate.func + && desc->predicate.func(opts, desc->predicate.value, pool)) + test_mode = desc->predicate.alternate_mode; + else + test_mode = desc->mode; + + skip = test_mode == svn_test_skip; + xfail = test_mode == svn_test_xfail; wimp = xfail && desc->wip; msg = desc->msg; - run_this_test = mode_filter == svn_test_all || mode_filter == desc->mode; + run_this_test = mode_filter == svn_test_all || mode_filter == test_mode; if (run_this_test && header_msg && *header_msg) { @@ -272,13 +464,6 @@ do_test_num(const char *progname, err = (*desc->func2)(pool); else err = (*desc->func_opts)(opts, pool); - - if (err && err->apr_err == SVN_ERR_TEST_SKIPPED) - { - svn_error_clear(err); - err = SVN_NO_ERROR; - skip = TRUE; - } } else err = svn_error_create(SVN_ERR_TEST_FAILED, NULL, @@ -292,60 +477,171 @@ do_test_num(const char *progname, } /* Failure means unexpected results -- FAIL or XPASS. */ - test_failed = (!wimp && ((err != SVN_NO_ERROR) != (xfail != 0))); + skip_cleanup = log_results(progname, test_num, msg_only, run_this_test, + skip, xfail, wimp, err, msg, desc); - /* If we got an error, print it out. */ - if (err) - { - svn_handle_error2(err, stdout, FALSE, "svn_tests: "); - svn_error_clear(err); - } + return skip_cleanup; +} - if (msg_only) +#if APR_HAS_THREADS + +/* Per-test parameters used by test_thread */ +typedef struct test_params_t +{ + /* Name of the application */ + const char *progname; + + /* Total number of tests to execute */ + svn_atomic_t test_count; + + /* Global test options as provided by main() */ + svn_test_opts_t *opts; + + /* Reference to the global failure flag. Set this if any test failed. */ + svn_atomic_t got_error; + + /* Test to execute next. */ + svn_atomic_t test_num; + + /* Test functions array. */ + struct svn_test_descriptor_t *test_funcs; +} test_params_t; + +/* Thread function similar to do_test_num() but with fewer options. We do + catch segfaults. All parameters are given as a test_params_t in DATA. + */ +static void * APR_THREAD_FUNC +test_thread(apr_thread_t *thread, void *data) +{ + svn_boolean_t skip, xfail, wimp; + svn_error_t *err; + const struct svn_test_descriptor_t *desc; + svn_boolean_t run_this_test; /* This test's mode matches DESC->MODE. */ + enum svn_test_mode_t test_mode; + test_params_t *params = data; + svn_atomic_t test_num; + apr_pool_t *pool; + apr_pool_t *thread_root + = apr_allocator_owner_get(svn_pool_create_allocator(FALSE)); + +#ifdef HAVE_PER_THREAD_CLEANUP + set_cleanup_pool(svn_pool_create(thread_root)); +#endif + + pool = svn_pool_create(thread_root); + + for (test_num = svn_atomic_inc(¶ms->test_num); + test_num <= params->test_count; + test_num = svn_atomic_inc(¶ms->test_num)) { - if (run_this_test) - printf(" %3d %-5s %s%s%s%s\n", - test_num, - (xfail ? "XFAIL" : (skip ? "SKIP" : "")), - msg ? msg : "(test did not provide name)", - (wimp && verbose_mode) ? " [[" : "", - (wimp && verbose_mode) ? desc->wip : "", - (wimp && verbose_mode) ? "]]" : ""); + svn_pool_clear(pool); +#ifdef HAVE_PER_THREAD_CLEANUP + svn_pool_clear(cleanup_pool); /* after clearing pool*/ +#endif + + desc = ¶ms->test_funcs[test_num]; + /* Check the test predicate. */ + if (desc->predicate.func + && desc->predicate.func(params->opts, desc->predicate.value, pool)) + test_mode = desc->predicate.alternate_mode; + else + test_mode = desc->mode; + + skip = test_mode == svn_test_skip; + xfail = test_mode == svn_test_xfail; + wimp = xfail && desc->wip; + run_this_test = mode_filter == svn_test_all + || mode_filter == test_mode; + + /* Do test */ + if (skip || !run_this_test) + err = NULL; /* pass */ + else if (desc->func2) + err = (*desc->func2)(pool); + else + err = (*desc->func_opts)(params->opts, pool); + + /* Write results to console */ + svn_error_clear(svn_mutex__lock(log_mutex)); + if (log_results(params->progname, test_num, FALSE, run_this_test, + skip, xfail, wimp, err, desc->msg, desc)) + svn_atomic_set(¶ms->got_error, TRUE); + svn_error_clear(svn_mutex__unlock(log_mutex, NULL)); } - else if (run_this_test && ((! quiet_mode) || test_failed)) + + svn_pool_clear(pool); /* Make sure this is cleared before cleanup_pool*/ + + /* Release all test memory. Possibly includes cleanup_pool */ + svn_pool_destroy(thread_root); + + /* End thread explicitly to prevent APR_INCOMPLETE return codes in + apr_thread_join(). */ + apr_thread_exit(thread, 0); + return NULL; +} + +/* Log an error with message MSG if the APR status of EXPR is not 0. + */ +#define CHECK_STATUS(expr,msg) \ + do { \ + apr_status_t rv = (expr); \ + if (rv) \ + { \ + svn_error_t *svn_err__temp = svn_error_wrap_apr(rv, msg); \ + svn_handle_error2(svn_err__temp, stdout, FALSE, "svn_tests: "); \ + svn_error_clear(svn_err__temp); \ + } \ + } while (0); + +/* Execute all ARRAY_SIZE tests concurrently using MAX_THREADS threads. + Pass PROGNAME and OPTS to the individual tests. Return TRUE if at least + one of the tests failed. Allocate all data in POOL. + + Note that cleanups are delayed until all tests have been completed. + */ +static svn_boolean_t +do_tests_concurrently(const char *progname, + struct svn_test_descriptor_t *test_funcs, + int array_size, + int max_threads, + svn_test_opts_t *opts, + apr_pool_t *pool) +{ + int i; + apr_thread_t **threads; + + /* Prepare thread parameters. */ + test_params_t params; + params.got_error = FALSE; + params.opts = opts; + params.progname = progname; + params.test_num = 1; + params.test_funcs = test_funcs; + params.test_count = array_size; + + /* Start all threads. */ + threads = apr_pcalloc(pool, max_threads * sizeof(*threads)); + for (i = 0; i < max_threads; ++i) { - printf("%s %s %d: %s%s%s%s\n", - (err - ? (xfail ? "XFAIL:" : "FAIL: ") - : (xfail ? "XPASS:" : (skip ? "SKIP: " : "PASS: "))), - progname, - test_num, - msg ? msg : "(test did not provide name)", - wimp ? " [[WIMP: " : "", - wimp ? desc->wip : "", - wimp ? "]]" : ""); + CHECK_STATUS(apr_thread_create(&threads[i], NULL, test_thread, ¶ms, + pool), + "creating test thread failed.\n"); } - if (msg) + /* Wait for all tasks (tests) to complete. */ + for (i = 0; i < max_threads; ++i) { - size_t len = strlen(msg); - if (len > 50) - printf("WARNING: Test docstring exceeds 50 characters\n"); - if (msg[len - 1] == '.') - printf("WARNING: Test docstring ends in a period (.)\n"); - if (svn_ctype_isupper(msg[0])) - printf("WARNING: Test docstring is capitalized\n"); + apr_status_t result = 0; + CHECK_STATUS(apr_thread_join(&result, threads[i]), + "Waiting for test thread to finish failed."); + CHECK_STATUS(result, + "Test thread returned an error."); } - if (desc->msg == NULL) - printf("WARNING: New-style test descriptor is missing a docstring.\n"); - fflush(stdout); - - skip_cleanup = test_failed; - - return test_failed; + return params.got_error != FALSE; } +#endif static void help(const char *progname, apr_pool_t *pool) { @@ -366,12 +662,106 @@ static void help(const char *progname, apr_pool_t *pool) svn_error_clear(svn_cmdline_fprintf(stdout, pool, "\n")); } +static svn_error_t *init_test_data(const char *argv0, apr_pool_t *pool) +{ + const char *temp_path; + const char *base_name; + + /* Convert the program path to an absolute path. */ + SVN_ERR(svn_utf_cstring_to_utf8(&temp_path, argv0, pool)); + temp_path = svn_dirent_internal_style(temp_path, pool); + SVN_ERR(svn_dirent_get_absolute(&temp_path, temp_path, pool)); + SVN_ERR_ASSERT(!svn_dirent_is_root(temp_path, strlen(temp_path))); + + /* Extract the interesting bits of the path. */ + temp_path = svn_dirent_dirname(temp_path, pool); + base_name = svn_dirent_basename(temp_path, pool); + if (0 == strcmp(base_name, ".libs")) + { + /* This is a libtoolized binary, skip the .libs directory. */ + temp_path = svn_dirent_dirname(temp_path, pool); + base_name = svn_dirent_basename(temp_path, pool); + } + temp_path = svn_dirent_dirname(temp_path, pool); + + /* temp_path should now point to the root of the test + builddir. Construct the path to the transient dir. Note that we + put the path insinde the cmdline/svn-test-work area. This is + because trying to get the cmdline tests to use a different work + area is unprintable; so we put the C test transient dir in the + cmdline tests area, as the lesser of evils ... */ + temp_path = svn_dirent_join_many(pool, temp_path, + "cmdline", "svn-test-work", + base_name, SVN_VA_NULL); + + /* Finally, create the transient directory. */ + SVN_ERR(svn_io_make_dir_recursively(temp_path, pool)); + + data_path = temp_path; + return SVN_NO_ERROR; +} + +const char * +svn_test_data_path(const char *base_name, apr_pool_t *result_pool) +{ + return svn_dirent_join(data_path, base_name, result_pool); +} + +svn_error_t * +svn_test_get_srcdir(const char **srcdir, + const svn_test_opts_t *opts, + apr_pool_t *pool) +{ + const char *cwd; + + if (opts->srcdir) + { + *srcdir = opts->srcdir; + return SVN_NO_ERROR; + } + + fprintf(stderr, "WARNING: missing '--srcdir' option"); + SVN_ERR(svn_dirent_get_absolute(&cwd, ".", pool)); + fprintf(stderr, ", assuming '%s'\n", cwd); + *srcdir = cwd; + + return SVN_NO_ERROR; +} + +svn_error_t * +svn_test__init_auth_baton(svn_auth_baton_t **ab, + apr_pool_t *result_pool) +{ + svn_config_t *cfg_config; + + SVN_ERR(svn_config_create2(&cfg_config, FALSE, FALSE, result_pool)); + + /* Disable the crypto backends that might not be entirely + threadsafe and/or compatible with running headless. + + The windows system is just our own files, but then with user-key + encrypted data inside. */ + svn_config_set(cfg_config, + SVN_CONFIG_SECTION_AUTH, + SVN_CONFIG_OPTION_PASSWORD_STORES, + "windows-cryptoapi"); + + SVN_ERR(svn_cmdline_create_auth_baton(ab, + TRUE /* non_interactive */, + "jrandom", "rayjandom", + NULL, + TRUE /* no_auth_cache */, + FALSE /* trust_server_cert */, + cfg_config, NULL, NULL, result_pool)); + + return SVN_NO_ERROR; +} /* Standard svn test program */ int -main(int argc, const char *argv[]) +svn_test_main(int argc, const char *argv[], int max_threads, + struct svn_test_descriptor_t *test_funcs) { - const char *prog_name; int i; svn_boolean_t got_error = FALSE; apr_pool_t *pool, *test_pool; @@ -383,7 +773,7 @@ main(int argc, const char *argv[]) svn_error_t *err; char errmsg[200]; /* How many tests are there? */ - int array_size = get_array_size(); + int array_size = get_array_size(test_funcs); svn_test_opts_t opts = { NULL }; @@ -400,31 +790,61 @@ main(int argc, const char *argv[]) * usage but make it thread-safe to allow for multi-threaded tests. */ pool = apr_allocator_owner_get(svn_pool_create_allocator(TRUE)); + err = svn_mutex__init(&log_mutex, TRUE, pool); + if (err) + { + svn_handle_error2(err, stderr, TRUE, "svn_tests: "); + svn_error_clear(err); + } + + /* Set up the thread-local storage key for the cleanup pool. */ +#ifdef HAVE_PER_THREAD_CLEANUP + apr_err = apr_threadkey_private_create(&cleanup_pool_key, + null_threadkey_dtor, + pool); + if (apr_err) + { + printf("apr_threadkey_private_create() failed with code %ld.\n", + (long)apr_err); + exit(1); + } +#endif /* HAVE_PER_THREAD_CLEANUP */ /* Remember the command line */ test_argc = argc; test_argv = argv; + err = init_test_data(argv[0], pool); + if (err) + { + svn_handle_error2(err, stderr, TRUE, "svn_tests: "); + svn_error_clear(err); + } + err = svn_cmdline__getopt_init(&os, argc, argv, pool); + if (err) + { + svn_handle_error2(err, stderr, TRUE, "svn_tests: "); + svn_error_clear(err); + } + os->interleave = TRUE; /* Let options and arguments be interleaved */ /* Strip off any leading path components from the program name. */ - prog_name = strrchr(argv[0], '/'); - if (prog_name) - prog_name++; - else - { - /* Just check if this is that weird platform that uses \ instead - of / for the path separator. */ - prog_name = strrchr(argv[0], '\\'); - if (prog_name) - prog_name++; - else - prog_name = argv[0]; - } + opts.prog_name = svn_dirent_internal_style(argv[0], pool); + opts.prog_name = svn_dirent_basename(opts.prog_name, NULL); #ifdef WIN32 + /* Abuse cast in strstr() to remove .exe extension. + Value is allocated in pool by svn_dirent_internal_style() */ + { + char *exe_ext = strstr(opts.prog_name, ".exe"); + + if (exe_ext) + *exe_ext = '\0'; + } + #if _MSC_VER >= 1400 /* ### This should work for VC++ 2002 (=1300) and later */ /* Show the abort message on STDERR instead of a dialog to allow @@ -446,7 +866,7 @@ main(int argc, const char *argv[]) #endif if (err) - return svn_cmdline_handle_exit_error(err, pool, prog_name); + return svn_cmdline_handle_exit_error(err, pool, opts.prog_name); while (1) { const char *opt_arg; @@ -457,7 +877,7 @@ main(int argc, const char *argv[]) break; else if (apr_err && (apr_err != APR_BADCH)) { - /* Ignore invalid option error to allow passing arbitary options */ + /* Ignore invalid option error to allow passing arbitrary options */ fprintf(stderr, "apr_getopt_long failed : [%d] %s\n", apr_err, apr_strerror(apr_err, errmsg, sizeof(errmsg))); exit(1); @@ -465,7 +885,7 @@ main(int argc, const char *argv[]) switch (opt_id) { case help_opt: - help(prog_name, pool); + help(opts.prog_name, pool); exit(0); case cleanup_opt: cleanup_mode = TRUE; @@ -480,6 +900,20 @@ main(int argc, const char *argv[]) SVN_INT_ERR(svn_utf_cstring_to_utf8(&opts.srcdir, opt_arg, pool)); opts.srcdir = svn_dirent_internal_style(opts.srcdir, pool); break; + case reposdir_opt: + SVN_INT_ERR(svn_utf_cstring_to_utf8(&opts.repos_dir, opt_arg, pool)); + opts.repos_dir = svn_dirent_internal_style(opts.repos_dir, pool); + break; + case reposurl_opt: + SVN_INT_ERR(svn_utf_cstring_to_utf8(&opts.repos_url, opt_arg, pool)); + opts.repos_url = svn_uri_canonicalize(opts.repos_url, pool); + break; + case repostemplate_opt: + SVN_INT_ERR(svn_utf_cstring_to_utf8(&opts.repos_template, opt_arg, + pool)); + opts.repos_template = svn_dirent_internal_style(opts.repos_template, + pool); + break; case list_opt: list_mode = TRUE; break; @@ -518,14 +952,24 @@ main(int argc, const char *argv[]) exit(1); } if ((opts.server_minor_version < 3) - || (opts.server_minor_version > 6)) + || (opts.server_minor_version > SVN_VER_MINOR)) { fprintf(stderr, "FAIL: Invalid minor version given\n"); exit(1); } + break; } + case sqlite_log_opt: + svn_sqlite__dbg_enable_errorlog(); + break; +#if APR_HAS_THREADS + case parallel_opt: + parallel = TRUE; + break; +#endif } } + opts.verbose = verbose_mode; /* Disable sleeping for timestamps, to speed up the tests. */ apr_env_set( @@ -540,7 +984,7 @@ main(int argc, const char *argv[]) } /* Create an iteration pool for the tests */ - cleanup_pool = svn_pool_create(pool); + set_cleanup_pool(svn_pool_create(pool)); test_pool = svn_pool_create(pool); if (!allow_segfaults) @@ -558,8 +1002,8 @@ main(int argc, const char *argv[]) "------ ----- ----------------\n"; for (i = 1; i <= array_size; i++) { - if (do_test_num(prog_name, i, TRUE, &opts, &header_msg, - test_pool)) + if (do_test_num(opts.prog_name, i, test_funcs, + TRUE, &opts, &header_msg, test_pool)) got_error = TRUE; /* Clear the per-function pool */ @@ -579,8 +1023,8 @@ main(int argc, const char *argv[]) continue; ran_a_test = TRUE; - if (do_test_num(prog_name, test_num, FALSE, &opts, NULL, - test_pool)) + if (do_test_num(opts.prog_name, test_num, test_funcs, + FALSE, &opts, NULL, test_pool)) got_error = TRUE; /* Clear the per-function pool */ @@ -594,15 +1038,34 @@ main(int argc, const char *argv[]) if (! ran_a_test) { /* just run all tests */ - for (i = 1; i <= array_size; i++) + if (max_threads < 1) + max_threads = array_size; + + if (max_threads == 1 || !parallel) { - if (do_test_num(prog_name, i, FALSE, &opts, NULL, test_pool)) - got_error = TRUE; + for (i = 1; i <= array_size; i++) + { + if (do_test_num(opts.prog_name, i, test_funcs, + FALSE, &opts, NULL, test_pool)) + got_error = TRUE; - /* Clear the per-function pool */ + /* Clear the per-function pool */ + svn_pool_clear(test_pool); + svn_pool_clear(cleanup_pool); + } + } +#if APR_HAS_THREADS + else + { + got_error = do_tests_concurrently(opts.prog_name, test_funcs, + array_size, max_threads, + &opts, test_pool); + + /* Execute all cleanups */ svn_pool_clear(test_pool); svn_pool_clear(cleanup_pool); } +#endif } /* Clean up APR */ @@ -611,3 +1074,20 @@ main(int argc, const char *argv[]) return got_error; } + + +svn_boolean_t +svn_test__fs_type_is(const svn_test_opts_t *opts, + const char *predicate_value, + apr_pool_t *pool) +{ + return (0 == strcmp(predicate_value, opts->fs_type)); +} + +svn_boolean_t +svn_test__fs_type_not(const svn_test_opts_t *opts, + const char *predicate_value, + apr_pool_t *pool) +{ + return (0 != strcmp(predicate_value, opts->fs_type)); +} diff --git a/subversion/tests/templates/empty-fsfs-v4.zip b/subversion/tests/templates/empty-fsfs-v4.zip Binary files differnew file mode 100644 index 0000000..deca429 --- /dev/null +++ b/subversion/tests/templates/empty-fsfs-v4.zip diff --git a/subversion/tests/templates/empty-fsfs-v6.zip b/subversion/tests/templates/empty-fsfs-v6.zip Binary files differnew file mode 100644 index 0000000..4fce7ca --- /dev/null +++ b/subversion/tests/templates/empty-fsfs-v6.zip diff --git a/subversion/tests/templates/greek-fsfs-v4.zip b/subversion/tests/templates/greek-fsfs-v4.zip Binary files differnew file mode 100644 index 0000000..22011f9 --- /dev/null +++ b/subversion/tests/templates/greek-fsfs-v4.zip diff --git a/subversion/tests/templates/greek-fsfs-v6.zip b/subversion/tests/templates/greek-fsfs-v6.zip Binary files differnew file mode 100644 index 0000000..3dc2c3e --- /dev/null +++ b/subversion/tests/templates/greek-fsfs-v6.zip diff --git a/subversion/tests/templates/greek.dump b/subversion/tests/templates/greek.dump new file mode 100644 index 0000000..b70c811 --- /dev/null +++ b/subversion/tests/templates/greek.dump @@ -0,0 +1,260 @@ +SVN-fs-dump-format-version: 2 + +UUID: 77e48e13-c942-4450-8676-1d60a12bd220 + +Revision-number: 0 +Prop-content-length: 56 +Content-length: 56 + +K 8 +svn:date +V 27 +2014-08-22T10:58:13.847732Z +PROPS-END + +Revision-number: 1 +Prop-content-length: 129 +Content-length: 129 + +K 10 +svn:author +V 7 +jrandom +K 8 +svn:date +V 27 +2014-08-22T11:16:26.921067Z +K 7 +svn:log +V 27 +Log message for revision 1. +PROPS-END + +Node-path: A +Node-kind: dir +Node-action: add +Prop-content-length: 10 +Content-length: 10 + +PROPS-END + + +Node-path: A/B +Node-kind: dir +Node-action: add +Prop-content-length: 10 +Content-length: 10 + +PROPS-END + + +Node-path: A/B/E +Node-kind: dir +Node-action: add +Prop-content-length: 10 +Content-length: 10 + +PROPS-END + + +Node-path: A/B/E/alpha +Node-kind: file +Node-action: add +Prop-content-length: 10 +Text-content-length: 26 +Text-content-md5: d1fa4a3ced98961674a441930a51f2d3 +Text-content-sha1: b347d1da69df9a6a70433ceeaa0d46c8483e8c03 +Content-length: 36 + +PROPS-END +This is the file 'alpha'. + + +Node-path: A/B/E/beta +Node-kind: file +Node-action: add +Prop-content-length: 10 +Text-content-length: 25 +Text-content-md5: 67c756078f24f946f6ec2d00d02f50e1 +Text-content-sha1: d001710ac8e622c6d1fe59b1e265a3908acdd2a3 +Content-length: 35 + +PROPS-END +This is the file 'beta'. + + +Node-path: A/B/F +Node-kind: dir +Node-action: add +Prop-content-length: 10 +Content-length: 10 + +PROPS-END + + +Node-path: A/B/lambda +Node-kind: file +Node-action: add +Prop-content-length: 10 +Text-content-length: 27 +Text-content-md5: 911c7a8d869b8c1e566f57da54d889c6 +Text-content-sha1: 784a9298366863da2b65ebf82b4e1123755a2421 +Content-length: 37 + +PROPS-END +This is the file 'lambda'. + + +Node-path: A/C +Node-kind: dir +Node-action: add +Prop-content-length: 10 +Content-length: 10 + +PROPS-END + + +Node-path: A/D +Node-kind: dir +Node-action: add +Prop-content-length: 10 +Content-length: 10 + +PROPS-END + + +Node-path: A/D/G +Node-kind: dir +Node-action: add +Prop-content-length: 10 +Content-length: 10 + +PROPS-END + + +Node-path: A/D/G/pi +Node-kind: file +Node-action: add +Prop-content-length: 10 +Text-content-length: 23 +Text-content-md5: adddfc3e6b605b5f90ceeab11b4e8ab6 +Text-content-sha1: 411e258dc14b42701fdc29b75f653e93f8686415 +Content-length: 33 + +PROPS-END +This is the file 'pi'. + + +Node-path: A/D/G/rho +Node-kind: file +Node-action: add +Prop-content-length: 10 +Text-content-length: 24 +Text-content-md5: 82f2211cf4ab22e3555fc7b835fbc604 +Text-content-sha1: 56388a031dffbf9df7c32e1f299b1d5d7ef60881 +Content-length: 34 + +PROPS-END +This is the file 'rho'. + + +Node-path: A/D/G/tau +Node-kind: file +Node-action: add +Prop-content-length: 10 +Text-content-length: 24 +Text-content-md5: 9936e2716e469bb686deb98c280ead58 +Text-content-sha1: 62e8c07d56bee94ea4577e80414fa8805aaf0175 +Content-length: 34 + +PROPS-END +This is the file 'tau'. + + +Node-path: A/D/H +Node-kind: dir +Node-action: add +Prop-content-length: 10 +Content-length: 10 + +PROPS-END + + +Node-path: A/D/H/chi +Node-kind: file +Node-action: add +Prop-content-length: 10 +Text-content-length: 24 +Text-content-md5: 8f5ebad6d1f7775c2682e54417cbe4d3 +Text-content-sha1: abeac1bf62099ab66b44779198dc19f40e3244f4 +Content-length: 34 + +PROPS-END +This is the file 'chi'. + + +Node-path: A/D/H/omega +Node-kind: file +Node-action: add +Prop-content-length: 10 +Text-content-length: 26 +Text-content-md5: fe4ec8bdd3d2056db4f55b474a10fadc +Text-content-sha1: c06e671bf15a6af55086176a0931d3b5034c82e6 +Content-length: 36 + +PROPS-END +This is the file 'omega'. + + +Node-path: A/D/H/psi +Node-kind: file +Node-action: add +Prop-content-length: 10 +Text-content-length: 24 +Text-content-md5: e81f8f68ba50e749c200cb3c9ce5d2b1 +Text-content-sha1: 9c438bde39e8ccbbd366df2638e3cb6700950204 +Content-length: 34 + +PROPS-END +This is the file 'psi'. + + +Node-path: A/D/gamma +Node-kind: file +Node-action: add +Prop-content-length: 10 +Text-content-length: 26 +Text-content-md5: 412138bd677d64cd1c32fafbffe6245d +Text-content-sha1: 74b75d7f2e1a0292f17d5a57c570bd89783f5d1c +Content-length: 36 + +PROPS-END +This is the file 'gamma'. + + +Node-path: A/mu +Node-kind: file +Node-action: add +Prop-content-length: 10 +Text-content-length: 23 +Text-content-md5: baf78ae06a2d5b7d9554c5f1280d3fa8 +Text-content-sha1: b4d00c56351d1a752e24d839d41a362d8da4a4c7 +Content-length: 33 + +PROPS-END +This is the file 'mu'. + + +Node-path: iota +Node-kind: file +Node-action: add +Prop-content-length: 10 +Text-content-length: 25 +Text-content-md5: 2d18c5e57e84c5b8a5e9a6e13fa394dc +Text-content-sha1: 2c0aa9014a0cd07f01795a333d82485ef6d083e2 +Content-length: 35 + +PROPS-END +This is the file 'iota'. + + |