summaryrefslogtreecommitdiff
path: root/tools
diff options
context:
space:
mode:
authorLorry Tar Creator <lorry-tar-importer@lorry>2017-08-05 16:22:51 +0000
committerLorry Tar Creator <lorry-tar-importer@lorry>2017-08-05 16:22:51 +0000
commitcf46733632c7279a9fd0fe6ce26f9185a4ae82a9 (patch)
treeda27775a2161723ef342e91af41a8b51fedef405 /tools
parentbb0ef45f7c46b0ae221b26265ef98a768c33f820 (diff)
downloadsubversion-tarball-master.tar.gz
Diffstat (limited to 'tools')
-rwxr-xr-xtools/backup/hot-backup.py.in25
-rw-r--r--tools/buildbot/master/Feeder.py391
-rw-r--r--tools/buildbot/master/README7
-rw-r--r--tools/buildbot/master/SVNMailNotifier.py210
-rw-r--r--tools/buildbot/master/master.cfg258
-rw-r--r--tools/buildbot/master/private-sample.py32
-rw-r--r--tools/buildbot/master/public_html/buildbot.css68
-rw-r--r--tools/buildbot/master/public_html/index.html53
-rw-r--r--tools/buildbot/master/public_html/robots.txt9
-rwxr-xr-xtools/buildbot/slaves/bb-openbsd/svncheck.sh2
-rwxr-xr-xtools/buildbot/slaves/bb-openbsd/svnclean.sh2
-rwxr-xr-xtools/buildbot/slaves/i686-debian-sarge1/svnlog.sh2
-rwxr-xr-xtools/buildbot/slaves/svn-sparc-solaris/svnbuild.sh51
-rwxr-xr-xtools/buildbot/slaves/svn-sparc-solaris/svncheck.sh39
-rwxr-xr-xtools/buildbot/slaves/svn-sparc-solaris/svncleanup.sh30
-rw-r--r--tools/buildbot/slaves/svn-sparc-solaris/svnenv.sh23
-rwxr-xr-xtools/buildbot/slaves/svn-x64-centos/list-svn-deps.sh34
-rwxr-xr-xtools/buildbot/slaves/svn-x64-centos/svnbuild.sh (renamed from tools/buildbot/slaves/centos/svnbuild.sh)26
-rwxr-xr-xtools/buildbot/slaves/svn-x64-centos/svncheck-bindings.sh (renamed from tools/buildbot/slaves/centos/svncheck-bindings.sh)0
-rwxr-xr-xtools/buildbot/slaves/svn-x64-centos/svncheck.sh (renamed from tools/buildbot/slaves/centos/svncheck.sh)5
-rwxr-xr-xtools/buildbot/slaves/svn-x64-centos/svnclean.sh (renamed from tools/buildbot/slaves/centos/svnclean.sh)2
-rwxr-xr-xtools/buildbot/slaves/svn-x64-centos/svnlog.sh (renamed from tools/buildbot/slaves/centos/svnlog.sh)2
-rwxr-xr-xtools/buildbot/slaves/svn-x64-macosx-gnu-shared-daily-ra_serf/svnlog.sh2
-rwxr-xr-xtools/buildbot/slaves/svn-x64-macosx-gnu-shared/svnlog.sh2
-rwxr-xr-xtools/buildbot/slaves/svn-x64-macosx/mkramdisk.sh63
-rwxr-xr-xtools/buildbot/slaves/svn-x64-macosx/rmramdisk.sh62
-rw-r--r--tools/buildbot/slaves/svn-x64-macosx/setenv.sh70
-rwxr-xr-xtools/buildbot/slaves/svn-x64-macosx/svnbuild-bindings.sh45
-rwxr-xr-xtools/buildbot/slaves/svn-x64-macosx/svnbuild.sh101
-rwxr-xr-xtools/buildbot/slaves/svn-x64-macosx/svncheck-bindings.sh58
-rwxr-xr-xtools/buildbot/slaves/svn-x64-macosx/svncheck.sh94
-rwxr-xr-xtools/buildbot/slaves/svn-x64-macosx/svnclean.sh27
-rwxr-xr-xtools/buildbot/slaves/ubuntu-x64/svnlog.sh2
-rw-r--r--tools/buildbot/slaves/win32-SharpSvn/svn-config.cmd.template19
-rw-r--r--tools/buildbot/slaves/win32-SharpSvn/svntest-bindings.cmd124
-rw-r--r--tools/buildbot/slaves/win32-SharpSvn/svntest-build-bindings.cmd20
-rw-r--r--tools/buildbot/slaves/win32-SharpSvn/svntest-build.cmd6
-rw-r--r--tools/buildbot/slaves/win32-SharpSvn/svntest-cleanup.cmd35
-rw-r--r--tools/buildbot/slaves/win32-SharpSvn/svntest-javahl.cmd46
-rw-r--r--tools/buildbot/slaves/win32-SharpSvn/svntest-test.cmd8
-rw-r--r--tools/client-side/bash_completion49
-rwxr-xr-xtools/client-side/bash_completion_test8
-rwxr-xr-xtools/client-side/showchange.pl66
-rw-r--r--tools/client-side/svn-bench/cl.h198
-rw-r--r--tools/client-side/svn-bench/client_errors.h97
-rw-r--r--tools/client-side/svn-bench/help-cmd.c94
-rw-r--r--tools/client-side/svn-bench/notify.c1045
-rw-r--r--tools/client-side/svn-bench/null-export-cmd.c346
-rw-r--r--tools/client-side/svn-bench/null-list-cmd.c169
-rw-r--r--tools/client-side/svn-bench/null-log-cmd.c243
-rw-r--r--tools/client-side/svn-bench/svn-bench.c954
-rw-r--r--tools/client-side/svn-bench/util.c92
-rwxr-xr-xtools/client-side/svn-graph.pl17
-rwxr-xr-xtools/client-side/svn-ssl-fingerprints.sh2
-rwxr-xr-xtools/client-side/svn-vendor.py1065
-rw-r--r--tools/dev/aprerr.txt1
-rw-r--r--tools/dev/benchmarks/RepoPerf/ClearMemory.cpp55
-rw-r--r--tools/dev/benchmarks/RepoPerf/TimeWin.cpp118
-rw-r--r--tools/dev/benchmarks/RepoPerf/copy_repo.py313
-rw-r--r--tools/dev/benchmarks/RepoPerf/win_repo_bench.py268
-rwxr-xr-xtools/dev/benchmarks/large_dirs/create_bigdir.sh10
-rwxr-xr-xtools/dev/build-svn-deps-win.pl919
-rw-r--r--tools/dev/fsfs-access-map.c180
-rw-r--r--tools/dev/fsfs-reorg.c3147
-rw-r--r--tools/dev/gdb-py/svndbg/printers.py8
-rwxr-xr-xtools/dev/po-merge.py14
-rwxr-xr-xtools/dev/remove-trailing-whitespace.sh8
-rw-r--r--tools/dev/svnraisetreeconflict/svnraisetreeconflict.c121
-rwxr-xr-xtools/dev/trails.py18
-rw-r--r--tools/dev/unix-build/Makefile.svn467
-rw-r--r--tools/dev/wc-ng/svn-wc-db-tester.c269
-rwxr-xr-xtools/dev/which-error.py14
-rw-r--r--tools/dev/x509-parser.c178
-rw-r--r--tools/diff/diff.c15
-rw-r--r--tools/diff/diff3.c153
-rw-r--r--tools/diff/diff4.c3
-rwxr-xr-xtools/dist/backport.pl1235
-rw-r--r--tools/dist/backport_accept.dump550
-rw-r--r--tools/dist/backport_branches.dump642
-rw-r--r--tools/dist/backport_indented_entry.dump522
-rw-r--r--tools/dist/backport_multirevisions.dump534
-rwxr-xr-xtools/dist/backport_tests.py578
-rw-r--r--tools/dist/backport_two_approveds.dump961
-rwxr-xr-xtools/dist/dist.sh33
-rwxr-xr-xtools/dist/make-deps-tarball.sh121
-rwxr-xr-xtools/dist/nightly.sh6
l---------tools/dist/nominate.pl1
-rwxr-xr-xtools/dist/release.py198
-rw-r--r--tools/dist/templates/download.ezt2
-rw-r--r--tools/dist/templates/rc-news.ezt2
-rw-r--r--tools/dist/templates/rc-release-ann.ezt2
-rw-r--r--tools/dist/templates/stable-news.ezt2
-rw-r--r--tools/dist/templates/stable-release-ann.ezt2
-rw-r--r--tools/examples/ExampleAuthn.java105
-rw-r--r--tools/examples/ExampleAuthnOld.java119
-rw-r--r--tools/examples/ExampleAuthnVeryOld.java118
-rwxr-xr-xtools/examples/svnlook.py203
-rwxr-xr-xtools/examples/walk-config-auth.py4
-rwxr-xr-xtools/hook-scripts/commit-access-control.pl.in2
-rwxr-xr-xtools/hook-scripts/mailer/mailer.py38
-rwxr-xr-xtools/hook-scripts/mailer/tests/mailer-init.sh8
-rwxr-xr-xtools/hook-scripts/svn2feed.py2
-rwxr-xr-xtools/hook-scripts/svnperms.py2
-rwxr-xr-xtools/po/po-update.sh4
-rwxr-xr-xtools/server-side/fsfs-reshard.py2
-rw-r--r--tools/server-side/fsfs-stats.c2181
-rw-r--r--tools/server-side/mod_dontdothat/mod_dontdothat.c41
-rw-r--r--tools/server-side/svn-populate-node-origins-index.c2
-rw-r--r--tools/server-side/svn-rep-sharing-stats.c530
-rw-r--r--tools/server-side/svnauthz.c147
-rwxr-xr-xtools/server-side/svnpredumpfilter.py51
-rwxr-xr-xtools/server-side/svnpubsub/commit-hook.py43
-rw-r--r--tools/server-side/svnpubsub/daemonize.py193
-rwxr-xr-xtools/server-side/svnpubsub/rc.d/svnpubsub.freebsd2
-rwxr-xr-xtools/server-side/svnpubsub/revprop-change-hook.py90
-rw-r--r--tools/server-side/svnpubsub/svnpubsub/client.py54
-rw-r--r--tools/server-side/svnpubsub/svnpubsub/server.py114
-rw-r--r--tools/server-side/svnpubsub/svnpubsub/util.py36
-rwxr-xr-xtools/server-side/svnpubsub/svnwcsub.py46
-rwxr-xr-xtools/server-side/svnpubsub/watcher.py5
120 files changed, 11086 insertions, 11226 deletions
diff --git a/tools/backup/hot-backup.py.in b/tools/backup/hot-backup.py.in
index 87b0bb1..bc661c5 100755
--- a/tools/backup/hot-backup.py.in
+++ b/tools/backup/hot-backup.py.in
@@ -28,10 +28,10 @@
# under the License.
# ====================================================================
-# $HeadURL: http://svn.apache.org/repos/asf/subversion/branches/1.8.x/tools/backup/hot-backup.py.in $
-# $LastChangedDate: 2010-08-20 04:30:52 +0000 (Fri, 20 Aug 2010) $
-# $LastChangedBy: cmpilato $
-# $LastChangedRevision: 987379 $
+# $HeadURL: https://svn.apache.org/repos/asf/subversion/branches/1.9.x/tools/backup/hot-backup.py.in $
+# $LastChangedDate: 2016-03-15 04:00:40 +0000 (Tue, 15 Mar 2016) $
+# $LastChangedBy: svn-role $
+# $LastChangedRevision: 1735016 $
######################################################################
@@ -137,6 +137,11 @@ for o, a in opts:
usage()
sys.exit()
+if archive_type not in (None, 'bz2', 'gz', 'zip', 'zip64'):
+ sys.stderr.write("ERROR: Bad --archive-type\n")
+ usage(sys.stderr)
+ sys.exit(2)
+
if len(args) != 2:
sys.stderr.write("ERROR: only two arguments allowed.\n\n")
sys.stderr.flush()
@@ -198,7 +203,7 @@ def get_youngest_revision():
"""Examine the repository REPO_DIR using the svnlook binary
specified by SVNLOOK, and return the youngest revision."""
- p = subprocess.Popen([svnlook, 'youngest', repo_dir],
+ p = subprocess.Popen([svnlook, 'youngest', '--', repo_dir],
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
@@ -245,7 +250,7 @@ backup_subdir = os.path.join(backup_dir, repo + "-" + youngest)
# rather than start from 1 and increment because the starting
# increments may have already been removed due to num_backups.
-regexp = re.compile("^" + repo + "-" + youngest +
+regexp = re.compile("^" + re.escape(repo) + "-" + re.escape(youngest) +
"(-(?P<increment>[0-9]+))?" + ext_re + "$")
directory_list = os.listdir(backup_dir)
young_list = [x for x in directory_list if regexp.search(x)]
@@ -262,8 +267,8 @@ if young_list:
### copied last.
print("Backing up repository to '" + backup_subdir + "'...")
-err_code = subprocess.call([svnadmin, "hotcopy", repo_dir,
- backup_subdir, "--clean-logs"])
+err_code = subprocess.call([svnadmin, "hotcopy", "--clean-logs",
+ '--', repo_dir, backup_subdir])
if err_code != 0:
sys.stderr.write("Unable to backup the repository.\n")
sys.stderr.flush()
@@ -274,7 +279,7 @@ else:
### Step 4: Verify the hotcopy
if verify_copy:
print("Verifying backup...")
- err_code = subprocess.call([svnadmin, "verify", "--quiet", backup_subdir])
+ err_code = subprocess.call([svnadmin, "verify", "--quiet", '--', backup_subdir])
if err_code != 0:
sys.stderr.write("Backup verification failed.\n")
sys.stderr.flush()
@@ -340,7 +345,7 @@ if archive_type:
### NUM_BACKUPS.
if num_backups > 0:
- regexp = re.compile("^" + repo + "-[0-9]+(-[0-9]+)?" + ext_re + "$")
+ regexp = re.compile("^" + re.escape(repo) + "-[0-9]+(-[0-9]+)?" + ext_re + "$")
directory_list = os.listdir(backup_dir)
old_list = [x for x in directory_list if regexp.search(x)]
old_list.sort(comparator)
diff --git a/tools/buildbot/master/Feeder.py b/tools/buildbot/master/Feeder.py
deleted file mode 100644
index 59e79b9..0000000
--- a/tools/buildbot/master/Feeder.py
+++ /dev/null
@@ -1,391 +0,0 @@
-#
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements. See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership. The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied. See the License for the
-# specific language governing permissions and limitations
-# under the License.
-#
-#
-# This file is part of the Buildbot configuration for the Subversion project.
-# The original file was created by Lieven Govaerts
-#
-# Minor changes made by API (apinheiro@igalia.com) in order to fit with our
-# configuration and last buildbot changes
-#
-# Minor whitespace clean up, clean up imports, adapted to buildbot 0.7.7,
-# and finally attempt to create valid atom and RSS feeds.
-# Changes by Chandan-Dutta Chowdhury <chandan-dutta chowdhury @ hp com> and
-# Gareth Armstrong <gareth armstrong @ hp com>
-# Also integrate changes from
-# http://code.google.com/p/pybots/source/browse/trunk/master/Feeder.py
-# which adds ability to filter RSS feeds to specific builders.
-# e.g. http://localhost:8012/rss?builder=builder-log4c-rhel-4-i386
-
-import time
-import os
-import re
-import sys
-
-from twisted.web.resource import Resource
-
-from buildbot.status.web import baseweb
-from buildbot.status.builder import FAILURE, SUCCESS, WARNINGS
-
-class XmlResource(Resource):
- contentType = "text/xml; charset=UTF-8"
- def render(self, request):
- data = self.content(request)
- request.setHeader("content-type", self.contentType)
- if request.method == "HEAD":
- request.setHeader("content-length", len(data))
- return ''
- return data
- docType = ''
- def header (self, request):
- data = ('<?xml version="1.0"?>\n')
- return data
- def footer(self, request):
- data = ''
- return data
- def content(self, request):
- data = self.docType
- data += self.header(request)
- data += self.body(request)
- data += self.footer(request)
- return data
- def body(self, request):
- return ''
-
-class FeedResource(XmlResource):
- title = 'Dummy'
- link = 'http://dummylink'
- language = 'en-us'
- description = 'Dummy rss'
- status = None
-
- def __init__(self, status, categories=None):
- self.status = status
- self.categories = categories
- self.link = self.status.getBuildbotURL()
- self.title = 'Build status of ' + status.getProjectName()
- self.description = 'List of FAILED builds'
- self.pubdate = time.gmtime(int(time.time()))
-
- def getBuilds(self, request):
- builds = []
- # THIS is lifted straight from the WaterfallStatusResource Class in
- # status/web/waterfall.py
- #
- # we start with all Builders available to this Waterfall: this is
- # limited by the config-file -time categories= argument, and defaults
- # to all defined Builders.
- allBuilderNames = self.status.getBuilderNames(categories=self.categories)
- builders = [self.status.getBuilder(name) for name in allBuilderNames]
-
- # but if the URL has one or more builder= arguments (or the old show=
- # argument, which is still accepted for backwards compatibility), we
- # use that set of builders instead. We still don't show anything
- # outside the config-file time set limited by categories=.
- showBuilders = request.args.get("show", [])
- showBuilders.extend(request.args.get("builder", []))
- if showBuilders:
- builders = [b for b in builders if b.name in showBuilders]
-
- # now, if the URL has one or category= arguments, use them as a
- # filter: only show those builders which belong to one of the given
- # categories.
- showCategories = request.args.get("category", [])
- if showCategories:
- builders = [b for b in builders if b.category in showCategories]
-
- maxFeeds = 25
-
- # Copy all failed builds in a new list.
- # This could clearly be implemented much better if we had
- # access to a global list of builds.
- for b in builders:
- lastbuild = b.getLastFinishedBuild()
- if lastbuild is None:
- continue
-
- lastnr = lastbuild.getNumber()
-
- totalbuilds = 0
- i = lastnr
- while i >= 0:
- build = b.getBuild(i)
- i -= 1
- if not build:
- continue
-
- results = build.getResults()
-
- # only add entries for failed builds!
- if results == FAILURE:
- totalbuilds += 1
- builds.append(build)
-
- # stop for this builder when our total nr. of feeds is reached
- if totalbuilds >= maxFeeds:
- break
-
- # Sort build list by date, youngest first.
- if sys.version_info[:3] >= (2,4,0):
- builds.sort(key=lambda build: build.getTimes(), reverse=True)
- else:
- # If you need compatibility with python < 2.4, use this for
- # sorting instead:
- # We apply Decorate-Sort-Undecorate
- deco = [(build.getTimes(), build) for build in builds]
- deco.sort()
- deco.reverse()
- builds = [build for (b1, build) in deco]
-
- if builds:
- builds = builds[:min(len(builds), maxFeeds)]
- return builds
-
- def body (self, request):
- data = ''
- builds = self.getBuilds(request)
-
- for build in builds:
- start, finished = build.getTimes()
- finishedTime = time.gmtime(int(finished))
- projectName = self.status.getProjectName()
- link = re.sub(r'index.html', "", self.status.getURLForThing(build))
-
- # title: trunk r862265 (plus patch) failed on 'i686-debian-sarge1 shared gcc-3.3.5'
- ss = build.getSourceStamp()
- source = ""
- if ss.branch:
- source += "Branch %s " % ss.branch
- if ss.revision:
- source += "Revision %s " % str(ss.revision)
- if ss.patch:
- source += " (plus patch)"
- if ss.changes:
- pass
- if (ss.branch is None and ss.revision is None and ss.patch is None
- and not ss.changes):
- source += "Latest revision "
- got_revision = None
- try:
- got_revision = build.getProperty("got_revision")
- except KeyError:
- pass
- if got_revision:
- got_revision = str(got_revision)
- if len(got_revision) > 40:
- got_revision = "[revision string too long]"
- source += "(Got Revision: %s)" % got_revision
- title = ('%s failed on "%s"' %
- (source, build.getBuilder().getName()))
-
- # get name of the failed step and the last 30 lines of its log.
- if build.getLogs():
- log = build.getLogs()[-1]
- laststep = log.getStep().getName()
- try:
- lastlog = log.getText()
- except IOError:
- # Probably the log file has been removed
- lastlog='<b>log file not available</b>'
-
- lines = re.split('\n', lastlog)
- lastlog = ''
- for logline in lines[max(0, len(lines)-30):]:
- lastlog = lastlog + logline + '<br/>'
- lastlog = lastlog.replace('\n', '<br/>')
-
- description = ''
- description += ('Date: %s<br/><br/>' %
- time.strftime("%a, %d %b %Y %H:%M:%S GMT",
- finishedTime))
- description += ('Full details available here: <a href="%s">%s</a><br/>' % (self.link, projectName))
- builder_summary_link = ('%s/builders/%s' %
- (re.sub(r'/index.html', '', self.link),
- build.getBuilder().getName()))
- description += ('Build summary: <a href="%s">%s</a><br/><br/>' %
- (builder_summary_link,
- build.getBuilder().getName()))
- description += ('Build details: <a href="%s">%s</a><br/><br/>' %
- (link, self.link + link[1:]))
- description += ('Author list: <b>%s</b><br/><br/>' %
- ",".join(build.getResponsibleUsers()))
- description += ('Failed step: <b>%s</b><br/><br/>' % laststep)
- description += 'Last lines of the build log:<br/>'
-
- data += self.item(title, description=description, lastlog=lastlog,
- link=link, pubDate=finishedTime)
-
- return data
-
- def item(self, title='', link='', description='', pubDate=''):
- """Generates xml for one item in the feed."""
-
-class Rss20StatusResource(FeedResource):
- def __init__(self, status, categories=None):
- FeedResource.__init__(self, status, categories)
- contentType = 'application/rss+xml'
-
- def header(self, request):
- data = FeedResource.header(self, request)
- data += ('<rss version="2.0" xmlns:atom="http://www.w3.org/2005/Atom">\n')
- data += (' <channel>\n')
- if self.title is not None:
- data += (' <title>%s</title>\n' % self.title)
- if self.link is not None:
- data += (' <link>%s</link>\n' % self.link)
- link = re.sub(r'/index.html', '', self.link)
- data += (' <atom:link href="%s/rss" rel="self" type="application/rss+xml"/>\n' % link)
- if self.language is not None:
- data += (' <language>%s</language>\n' % self.language)
- if self.description is not None:
- data += (' <description>%s</description>\n' % self.description)
- if self.pubdate is not None:
- rfc822_pubdate = time.strftime("%a, %d %b %Y %H:%M:%S GMT",
- self.pubdate)
- data += (' <pubDate>%s</pubDate>\n' % rfc822_pubdate)
- return data
-
- def item(self, title='', link='', description='', lastlog='', pubDate=''):
- data = (' <item>\n')
- data += (' <title>%s</title>\n' % title)
- if link is not None:
- data += (' <link>%s</link>\n' % link)
- if (description is not None and lastlog is not None):
- lastlog = re.sub(r'<br/>', "\n", lastlog)
- lastlog = re.sub(r'&', "&amp;", lastlog)
- lastlog = re.sub(r"'", "&apos;", lastlog)
- lastlog = re.sub(r'"', "&quot;", lastlog)
- lastlog = re.sub(r'<', '&lt;', lastlog)
- lastlog = re.sub(r'>', '&gt;', lastlog)
- lastlog = lastlog.replace('\n', '<br/>')
- content = '<![CDATA['
- content += description
- content += lastlog
- content += ']]>'
- data += (' <description>%s</description>\n' % content)
- if pubDate is not None:
- rfc822pubDate = time.strftime("%a, %d %b %Y %H:%M:%S GMT",
- pubDate)
- data += (' <pubDate>%s</pubDate>\n' % rfc822pubDate)
- # Every RSS item must have a globally unique ID
- guid = ('tag:%s@%s,%s:%s' % (os.environ['USER'],
- os.environ['HOSTNAME'],
- time.strftime("%Y-%m-%d", pubDate),
- time.strftime("%Y%m%d%H%M%S",
- pubDate)))
- data += (' <guid isPermaLink="false">%s</guid>\n' % guid)
- data += (' </item>\n')
- return data
-
- def footer(self, request):
- data = (' </channel>\n'
- '</rss>')
- return data
-
-class Atom10StatusResource(FeedResource):
- def __init__(self, status, categories=None):
- FeedResource.__init__(self, status, categories)
- contentType = 'application/atom+xml'
-
- def header(self, request):
- data = FeedResource.header(self, request)
- data += '<feed xmlns="http://www.w3.org/2005/Atom">\n'
- data += (' <id>%s</id>\n' % self.status.getBuildbotURL())
- if self.title is not None:
- data += (' <title>%s</title>\n' % self.title)
- if self.link is not None:
- link = re.sub(r'/index.html', '', self.link)
- data += (' <link rel="self" href="%s/atom"/>\n' % link)
- data += (' <link rel="alternate" href="%s/"/>\n' % link)
- if self.description is not None:
- data += (' <subtitle>%s</subtitle>\n' % self.description)
- if self.pubdate is not None:
- rfc3339_pubdate = time.strftime("%Y-%m-%dT%H:%M:%SZ",
- self.pubdate)
- data += (' <updated>%s</updated>\n' % rfc3339_pubdate)
- data += (' <author>\n')
- data += (' <name>Build Bot</name>\n')
- data += (' </author>\n')
- return data
-
- def item(self, title='', link='', description='', lastlog='', pubDate=''):
- data = (' <entry>\n')
- data += (' <title>%s</title>\n' % title)
- if link is not None:
- data += (' <link href="%s"/>\n' % link)
- if (description is not None and lastlog is not None):
- lastlog = re.sub(r'<br/>', "\n", lastlog)
- lastlog = re.sub(r'&', "&amp;", lastlog)
- lastlog = re.sub(r"'", "&apos;", lastlog)
- lastlog = re.sub(r'"', "&quot;", lastlog)
- lastlog = re.sub(r'<', '&lt;', lastlog)
- lastlog = re.sub(r'>', '&gt;', lastlog)
- data += (' <content type="xhtml">\n')
- data += (' <div xmlns="http://www.w3.org/1999/xhtml">\n')
- data += (' %s\n' % description)
- data += (' <pre xml:space="preserve">%s</pre>\n' % lastlog)
- data += (' </div>\n')
- data += (' </content>\n')
- if pubDate is not None:
- rfc3339pubDate = time.strftime("%Y-%m-%dT%H:%M:%SZ",
- pubDate)
- data += (' <updated>%s</updated>\n' % rfc3339pubDate)
- # Every Atom entry must have a globally unique ID
- # http://diveintomark.org/archives/2004/05/28/howto-atom-id
- guid = ('tag:%s@%s,%s:%s' % (os.environ['USER'],
- os.environ['HOSTNAME'],
- time.strftime("%Y-%m-%d", pubDate),
- time.strftime("%Y%m%d%H%M%S",
- pubDate)))
- data += (' <id>%s</id>\n' % guid)
- data += (' <author>\n')
- data += (' <name>Build Bot</name>\n')
- data += (' </author>\n')
- data += (' </entry>\n')
- return data
-
- def footer(self, request):
- data = ('</feed>')
- return data
-
-class WebStatusWithFeeds(baseweb.WebStatus):
- """Override the standard WebStatus class to add RSS and Atom feeds.
-
- This adds the following web resources in addition to /waterfall:
- /rss
- /atom
-
- The same "branch" and "category" query arguments can be passed
- as with /waterfall
- e.g. http://mybot.buildbot.com:8012/rss?branch=&builder=builder-log4c-rhel-4-i386
- or
- http://mybot.buildbot.com:8012/rss?branch=&category=log4c
- """
-
- def setupSite(self):
- baseweb.WebStatus.setupSite(self)
-
- status = self.parent.getStatus()
- sr = self.site.resource
-
- rss = Rss20StatusResource(status, categories=None)
- sr.putChild("rss", rss)
- atom = Atom10StatusResource(status, categories=None)
- sr.putChild("atom", atom)
-
diff --git a/tools/buildbot/master/README b/tools/buildbot/master/README
new file mode 100644
index 0000000..35fbff0
--- /dev/null
+++ b/tools/buildbot/master/README
@@ -0,0 +1,7 @@
+The BuildBot Master is managed by the ASF Infrastructure team.
+
+This was announced per this email:
+https://mail-archives.apache.org/mod_mbox/subversion-dev/201005.mbox/%3CAANLkTilvSpSwJHLlJVpKpGVAI2-JQyGqLqCn1Sjgo-Qf@mail.gmail.com%3E
+
+The new BuildBot Master configuration is maintained here:
+https://svn.apache.org/repos/infra/infrastructure/buildbot/aegis/buildmaster/master1/
diff --git a/tools/buildbot/master/SVNMailNotifier.py b/tools/buildbot/master/SVNMailNotifier.py
deleted file mode 100644
index 1dfe839..0000000
--- a/tools/buildbot/master/SVNMailNotifier.py
+++ /dev/null
@@ -1,210 +0,0 @@
-#
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements. See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership. The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied. See the License for the
-# specific language governing permissions and limitations
-# under the License.
-#
-#
-import os
-import urllib
-import re
-
-from email.Message import Message
-from email.Utils import formatdate
-from email.MIMEText import MIMEText
-
-from twisted.internet import defer
-from twisted.application import service
-
-from buildbot.status.builder import FAILURE, SUCCESS, WARNINGS
-from buildbot.status.mail import MailNotifier
-
-class SVNMailNotifier(MailNotifier):
- """Implement custom status mails for the Subversion project"""
-
- def __init__(self, fromaddr, mode="all", categories=None, builders=None,
- addLogs=False, relayhost="localhost",
- subject="buildbot %(result)s in %(builder)s",
- lookup=None, extraRecipients=[],
- sendToInterestedUsers=True,
- body="",
- replytoaddr=""):
- """
- @type body: string
- @param body: a string to be used as the body of the message.
-
- @type replytoaddr: string
- @param replytoaddr: the email address to be used in the 'Reply-To' header.
- """
-
- self.body = body
- self.replytoaddr = replytoaddr
-
- # pass the rest of the parameters to our parent.
- MailNotifier.__init__(self, fromaddr, mode, categories, builders,
- addLogs, relayhost, subject, lookup, extraRecipients,
- sendToInterestedUsers)
-
- def buildMessage(self, name, build, results):
- if self.mode == "all":
- intro = "The Buildbot has finished a build of %s.\n" % name
- elif self.mode == "failing":
- intro = "The Buildbot has detected a failed build of %s.\n" % name
- else:
- intro = "The Buildbot has detected a new failure of %s.\n" % name
-
- # buildurl
- buildurl = self.status.getURLForThing(build)
-# lgo: url's are already quoted now.
-# if buildurl:
-# buildurl = urllib.quote(buildurl, '/:')
-
- # buildboturl
- buildboturl = self.status.getBuildbotURL()
-# if url:
-# buildboturl = urllib.quote(url, '/:')
-
- # reason of build
- buildreason = build.getReason()
-
- # source stamp
- patch = None
- ss = build.getSourceStamp()
- if ss is None:
- source = "unavailable"
- else:
- if build.getChanges():
- revision = max([int(c.revision) for c in build.getChanges()])
-
- source = ""
- if ss.branch is None:
- ss.branch = "trunk"
- source += "[branch %s] " % ss.branch
- if revision:
- source += str(revision)
- else:
- source += "HEAD"
- if ss.patch is not None:
- source += " (plus patch)"
-
- # actual buildslave
- buildslave = build.getSlavename()
-
- # TODO: maybe display changes here? or in an attachment?
-
- # status
- t = build.getText()
- if t:
- t = ": " + " ".join(t)
- else:
- t = ""
-
- if results == SUCCESS:
- status = "Build succeeded!\n"
- res = "PASS"
- elif results == WARNINGS:
- status = "Build Had Warnings%s\n" % t
- res = "WARN"
- else:
- status = "BUILD FAILED%s\n" % t
- res = "FAIL"
-
- if build.getLogs():
- log = build.getLogs()[-1]
- laststep = log.getStep().getName()
- lastlog = log.getText()
-
- # only give me the last lines of the log files.
- lines = re.split('\n', lastlog)
- lastlog = ''
- for logline in lines[max(0, len(lines)-100):]:
- lastlog = lastlog + logline
-
- # TODO: it would be nice to provide a URL for the specific build
- # here. That involves some coordination with html.Waterfall .
- # Ideally we could do:
- # helper = self.parent.getServiceNamed("html")
- # if helper:
- # url = helper.getURLForBuild(build)
-
- text = self.body % { 'result': res,
- 'builder': name,
- 'revision': revision,
- 'branch': ss.branch,
- 'blamelist': ",".join(build.getResponsibleUsers()),
- 'buildurl': buildurl,
- 'buildboturl': buildboturl,
- 'reason': buildreason,
- 'source': source,
- 'intro': intro,
- 'status': status,
- 'slave': buildslave,
- 'laststep': laststep,
- 'lastlog': lastlog,
- }
-
- haveAttachments = False
- if ss.patch or self.addLogs:
- haveAttachments = True
- if not canDoAttachments:
- log.msg("warning: I want to send mail with attachments, "
- "but this python is too old to have "
- "email.MIMEMultipart . Please upgrade to python-2.3 "
- "or newer to enable addLogs=True")
-
- if haveAttachments and canDoAttachments:
- m = MIMEMultipart()
- m.attach(MIMEText(text))
- else:
- m = Message()
- m.set_payload(text)
-
- m['Date'] = formatdate(localtime=True)
- m['Subject'] = self.subject % { 'result': res,
- 'builder': name,
- 'revision': revision,
- 'branch': ss.branch
- }
- m['From'] = self.fromaddr
- # m['To'] is added later
- m['Reply-To'] = self.replytoaddr
-
- if ss.patch:
- a = MIMEText(patch)
- a.add_header('Content-Disposition', "attachment",
- filename="source patch")
- m.attach(a)
- if self.addLogs:
- for log in build.getLogs():
- name = "%s.%s" % (log.getStep().getName(),
- log.getName())
- a = MIMEText(log.getText())
- a.add_header('Content-Disposition', "attachment",
- filename=name)
- m.attach(a)
-
- # now, who is this message going to?
- dl = []
- recipients = self.extraRecipients[:]
- if self.sendToInterestedUsers and self.lookup:
- for u in build.getInterestedUsers():
- d = defer.maybeDeferred(self.lookup.getAddress, u)
- d.addCallback(recipients.append)
- dl.append(d)
- d = defer.DeferredList(dl)
- d.addCallback(self._gotRecipients, recipients, m)
- return d
-
diff --git a/tools/buildbot/master/master.cfg b/tools/buildbot/master/master.cfg
deleted file mode 100644
index 96b0037..0000000
--- a/tools/buildbot/master/master.cfg
+++ /dev/null
@@ -1,258 +0,0 @@
-# -*- python -*-
-#
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements. See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership. The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied. See the License for the
-# specific language governing permissions and limitations
-# under the License.
-#
-#
-
-import os, os.path, re
-
-from buildbot.scheduler import Scheduler
-from buildbot.process import factory
-from buildbot.steps import source, shell
-from buildbot.status.html import WebStatus
-from buildbot.scheduler import AnyBranchScheduler
-#from buildbot.twcompat import implements
-from buildbot.scheduler import Try_Userpass
-from buildbot.scheduler import Nightly
-from buildbot.changes.svnpoller import SVNPoller, split_file_branches
-from buildbot.buildslave import BuildSlave
-
-#import TigrisMailSource
-import SVNMailNotifier
-from Feeder import WebStatusWithFeeds
-import private
-
-REPO="http://svn.apache.org/repos/asf/subversion/"
-
-s = factory.s
-
-# This is the dictionary that the buildmaster pays attention to. We also use
-# a shorter alias to save typing.
-c = BuildmasterConfig = {}
-
-# slaves
-c['slaves'] = [BuildSlave("fc1-gcc3.3.2-ia32", private.slavePwd),
- BuildSlave("osx10.4-gcc4.0.1-ia32", private.slavePwd),
- BuildSlave("xp-vs2003-ia32", private.slavePwd),
- BuildSlave("dlr-fc3", private.slavePwd),
- BuildSlave("eh-debsarge1", private.slavePwd),
- BuildSlave("x64-ubuntu", private.hwrightPwd),
- BuildSlave("x64-centos", private.wandPwd),
-]
-
-# sources
-c['change_source'] = SVNPoller(REPO,
- split_file=split_file_branches,
- svnbin=private.svnbin,
- pollinterval=300)
-
-excludes = ["COMMITTERS", "STATUS", "CHANGES", "README", "INSTALL", "COPYING", "HACKING", "TRANSLATING", "BUGS", "www", "notes", "packages", "subversion/LICENSE", "subversion/po", "doc", "contrib", "tools", "dist.sh"]
-
-# function checks if this revision is interesting enough to trigger the builds.
-def isImportant(change):
- if not excludes:
- return True
-
- for file in change.files:
- triggerBuild = True
- for pattern in excludes:
- match = re.match(pattern, file)
- if match:
- triggerBuild = False
- break
- if triggerBuild:
- return True
-
-# schedulers
-bs1 = AnyBranchScheduler("main",
- [None, "branches/1.3.x", "branches/1.4.x", "branches/1.5.x",
- "branches/1.6.x"],
- 5*60, ["x86-macosx-gnu shared",
- "debian-x86_64-32 shared gcc",
- "x64-ubuntu gcc",
- "x64-centos gcc",
- ],
- fileIsImportant=isImportant)
-
-ps1 = Nightly('daily-2pm-cet', ['x86-macosx-gnu shared daily ra_serf'], hour=14, minute=0)
-
-ts = Try_Userpass("try", ["x86-macosx-gnu shared", "debian-x86_64-32 shared gcc"],
- port=private.tryPort, userpass=[(private.tryUser,private.tryPwd)] )
-c['schedulers'] = [bs1, ps1, ts]
-
-# steps and builders
-
-# define default set of steps, all under masters control.
-defSteps = [shell.ShellCommand(name="Cleanup", command=["../svnclean.sh"], timeout=3600),
- source.SVN(baseURL=REPO,defaultBranch='trunk', timeout=3600),
- shell.ShellCommand(name="Build", command=["../svnbuild.sh"], logfiles={"configlog": "config.log"}, timeout=3600, haltOnFailure=True),
- shell.ShellCommand(name="Test fsfs+ra_neon", command=["../svncheck.sh", "fsfs", "ra_neon"], logfiles={"testlog": "tests.log"}, timeout=3600, flunkOnFailure=True),
- ]
-
-defFact = factory.BuildFactory(defSteps)
-
-# define Windows custom steps
-winSteps = [source.SVN(baseURL=REPO,defaultBranch='trunk', timeout=3600),
- shell.ShellCommand(name="Build", command=["..\svnbuild.bat"], timeout=3600, haltOnFailure=True),
- shell.ShellCommand(name="Test fsfs+ra_local", command=["..\svncheck.bat","fsfs","ra_local"], timeout=3600, flunkOnFailure=True),
- shell.ShellCommand(name="Test fsfs+ra_dav", command=["..\svncheck.bat","fsfs","ra_dav"], timeout=3600, flunkOnFailure=True),
- shell.ShellCommand(name="Test fsfs+ra_svn", command=["..\svncheck.bat","fsfs","ra_svn"], timeout=3600, flunkOnFailure=True),
- shell.ShellCommand(name="Cleanup", command=["..\svnclean.bat"], timeout=3600),
- ]
-winFact = factory.BuildFactory(winSteps)
-
-# define Windows 6 way steps
-win6wSteps = [source.SVN(baseURL=REPO,defaultBranch='trunk', timeout=3600),
- shell.ShellCommand(name="Cleanup", command=["..\svnclean.bat"], timeout=3600),
- shell.ShellCommand(name="Build", command=["..\svnbuild.bat", "%(branch)"], timeout=3600, haltOnFailure=True),
- shell.ShellCommand(name="Test fsfs+ra_local", command=["..\svncheck.bat","fsfs","ra_local"], logfiles={"testlog": "tests.log"}, timeout=3600, flunkOnFailure=True),
- ]
-win6wFact = factory.BuildFactory(win6wSteps)
-
-# define set of steps for eh-x84_64-32, clean step comes first.
-ehSteps = [shell.ShellCommand(name="Cleanup", command=["../svnclean.sh"], workdir='', timeout=3600),
- source.SVN(baseURL=REPO,defaultBranch='trunk', timeout=3600),
- shell.ShellCommand(name="Build", command=["../svnbuild.sh"], logfiles={"configlog": "config.log"}, timeout=3600, haltOnFailure=True),
- shell.ShellCommand(name="Test fsfs+ra_svn", command=["../svncheck.sh","fsfs","ra_svn"], logfiles={"testlog": "tests.log"}, timeout=3600, flunkOnFailure=True),
- ]
-ehFact = factory.BuildFactory(ehSteps)
-
-# nightly build ra_serf
-serfSteps = [shell.ShellCommand(name="Cleanup", command=["../svnclean.sh"], timeout=3600),
- source.SVN(baseURL=REPO,defaultBranch='trunk', timeout=3600),
- shell.ShellCommand(name="Build", command=["../svnbuild.sh"], logfiles={"configlog": "config.log"}, timeout=3600, haltOnFailure=True),
- shell.ShellCommand(name="Test fsfs+ra_serf", command=["../svncheck.sh", "fsfs", "ra_serf"], logfiles={"testlog": "tests.log"}, timeout=3600, flunkOnFailure=True),
- ]
-serfFact = factory.BuildFactory(serfSteps)
-
-# define set of steps for x64-ubuntu, clean step comes first.
-x64ubSteps = [shell.ShellCommand(name="Cleanup", command=["../svnclean.sh"], workdir='', timeout=3600),
- source.SVN(baseURL=REPO,defaultBranch='trunk', timeout=3600),
- shell.ShellCommand(name="Build", command=["../svnbuild.sh"], logfiles={"configlog": "config.log"}, timeout=3600, haltOnFailure=True),
- shell.ShellCommand(name="Test fsfs+ra_local", command=["../svncheck.sh","fsfs","ra_local"], logfiles={"testlog": "tests.log"}, timeout=3600, flunkOnFailure=False),
- shell.ShellCommand(name="Test bindings", command=["../svncheck-bindings.sh","fsfs","ra_local"], logfiles={"testlog": "tests.log"}, timeout=3600, flunkOnFailure=True),
- ]
-x64ubFact = factory.BuildFactory(x64ubSteps)
-
-x64coSteps = [shell.ShellCommand(name="Cleanup", command=["../svnclean.sh"], timeout=3600),
- source.SVN(baseURL=REPO,defaultBranch='trunk', timeout=3600),
- shell.ShellCommand(name="Build", command=["../svnbuild.sh"], logfiles={"configlog": "config.log"}, timeout=3600, haltOnFailure=True),
- shell.ShellCommand(name="Test fsfs+ra_local", command=["../svncheck.sh", "fsfs", "ra_neon"], logfiles={"testlog": "tests.log"}, timeout=3600, flunkOnFailure=True),
- shell.ShellCommand(name="Test bindings", command=["../svncheck-bindings.sh","fsfs","ra_neon"], logfiles={"testlog": "tests.log"}, timeout=3600, flunkOnFailure=True),
- ]
-x64coFact = factory.BuildFactory(x64coSteps)
-
-
-c['builders'] = [
- {'name': "x86-macosx-gnu shared",
- 'slavename': "osx10.4-gcc4.0.1-ia32",
- 'builddir': "osx10.4-gcc4.0.1-ia32",
- 'factory': defFact,
- 'category': "prod",
- },
- {'name': "debian-x86_64-32 shared gcc",
- 'slavename': "eh-debsarge1",
- 'builddir': "eh-debsarge1",
- 'factory': ehFact,
- 'category': "prod",
- },
- {'name': "x86-macosx-gnu shared daily ra_serf",
- 'slavename': "osx10.4-gcc4.0.1-ia32",
- 'builddir': "osx10.4-gcc4.0.1-ia32-serf",
- 'factory': serfFact,
- 'category': "prod",
- },
- {'name': "x64-ubuntu gcc",
- 'slavename': "x64-ubuntu",
- 'builddir': "x64-ubuntu",
- 'factory': x64ubFact,
- 'category': "prod",
- },
- {'name': "x64-centos gcc",
- 'slavename': "x64-centos",
- 'builddir': "x64-centos",
- 'factory': x64coFact,
- 'category': "prod",
- },
-]
-
-# 'slavePortnum' defines the TCP port to listen on. This must match the value
-# configured into the buildslaves (with their --master option)
-
-c['slavePortnum'] = private.slavePortnum
-
-# show webpage
-c['status'] = []
-c['status'].append(WebStatusWithFeeds(http_port="tcp:"+str(private.htmlPort)+":interface=127.0.0.1", allowForce=True))
-
-# send emails
-from buildbot.status import mail
-mailbody = 'Full details are available at: \n%(buildurl)s\n\n'\
- 'Author list: %(blamelist)s\n\n'\
- 'Build Slave: %(slave)s\n\n\n'\
- 'Subversion Buildbot\n'\
- '%(buildboturl)s\n\n\n'\
- 'Last 100 lines of the build log (step: %(laststep)s ):\n\n %(lastlog)s'
-
-
-c['status'].append(SVNMailNotifier.SVNMailNotifier(
- fromaddr="buildbot@mobsol.be",
- extraRecipients=["notifications@subversion.apache.org"],
- sendToInterestedUsers=False,
- subject="svn %(branch)s r%(revision)s: %(result)s (%(builder)s)",
- body=mailbody,
- replytoaddr="dev@subversion.apache.org",
- categories=["prod"],
- relayhost=private.smtp))
-
-# from buildbot.status import words
-# c['status'].append(words.IRC(host="irc.example.com", nick="bb",
-# channels=["#example"]))
-
-
-# if you set 'debugPassword', then you can connect to the buildmaster with
-# the diagnostic tool in contrib/debugclient.py . From this tool, you can
-# manually force builds and inject changes, which may be useful for testing
-# your buildmaster without actually commiting changes to your repository (or
-# before you have a functioning 'sources' set up). The debug tool uses the
-# same port number as the slaves do: 'slavePortnum'.
-
-#c['debugPassword'] = "debugpassword"
-
-# if you set 'manhole', you can telnet into the buildmaster and get an
-# interactive python shell, which may be useful for debugging buildbot
-# internals. It is probably only useful for buildbot developers.
-# from buildbot.master import Manhole
-#c['manhole'] = Manhole(9999, "admin", "password")
-
-# the 'projectName' string will be used to describe the project that this
-# buildbot is working on. For example, it is used as the title of the
-# waterfall HTML page. The 'projectURL' string will be used to provide a link
-# from buildbot HTML pages to your project's home page.
-
-c['projectName'] = "Subversion"
-c['projectURL'] = "http://subversion.apache.org/"
-
-# the 'buildbotURL' string should point to the location where the buildbot's
-# internal web server (usually the html.Waterfall page) is visible. This
-# typically uses the port number set in the Waterfall 'status' entry, but
-# with an externally-visible host name which the buildbot cannot figure out
-# without some help.
-
-c['buildbotURL'] = "http://crest.ics.uci.edu/buildbot/"
diff --git a/tools/buildbot/master/private-sample.py b/tools/buildbot/master/private-sample.py
deleted file mode 100644
index 2d2cd8c..0000000
--- a/tools/buildbot/master/private-sample.py
+++ /dev/null
@@ -1,32 +0,0 @@
-#
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements. See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership. The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied. See the License for the
-# specific language governing permissions and limitations
-# under the License.
-#
-#
-# -*- python -*-
-
-svnbin="svn"
-smtp="smtp.example.com"
-htmlPort = 8010
-slavePortnum = 9989
-tryPort = 8031
-tryUser = "xxxx"
-tryPwd = "xxxx"
-slavePwd = "xxxx"
-hwrightPwd = "xxxx"
-aholst_slavePwd = "xxxx"
diff --git a/tools/buildbot/master/public_html/buildbot.css b/tools/buildbot/master/public_html/buildbot.css
deleted file mode 100644
index edf51f9..0000000
--- a/tools/buildbot/master/public_html/buildbot.css
+++ /dev/null
@@ -1,68 +0,0 @@
-/* Copied from buildbot.ethereal.com. Thanks! */
-
-* {
- font-family: verdana, arial, helvetica, sans-serif;
- font-size: 12px;
- font-weight: bold;
-}
-
-a:link,a:visited,a:active {
- color: #333;
-}
-a:hover {
- color: #999;
-}
-
-.table {
- border-spacing: 2px;
-}
-
-td.Project {
- color: #000;
- border: 1px solid #666666;
- background-color: #fff;
-}
-
-td.Event, td.Activity, td.Time, td.Builder {
-/* color: #333333; */
- border: 1px solid #666666;
- background-color: #eee;
- font-weight: normal;
-}
-
-td.Change {
- color: #fff;
- border: 1px solid #666666;
- background-color: #aaf;
-}
-
-/* LastBuild, BuildStep states */
-.success {
- color: #FFFFFF;
- border: 1px solid #666666;
- background-color: #3b0;
-}
-
-.failure {
- color: #FFFFFF;
- border: 1px solid #666666;
- background-color: #d33;
-}
-
-.warnings {
- color: #FFFFFF;
- border: 1px solid #666666;
- background-color: #fa2;
-}
-
-.exception, td.offline {
- color: #FFFFFF;
- border: 1px solid #666666;
- background-color: #e0b0ff;
-}
-
-.start,.running, td.building {
- color: #555;
- border: 1px solid #666666;
- background-color: #fffc6c;
-}
diff --git a/tools/buildbot/master/public_html/index.html b/tools/buildbot/master/public_html/index.html
deleted file mode 100644
index c2b419f..0000000
--- a/tools/buildbot/master/public_html/index.html
+++ /dev/null
@@ -1,53 +0,0 @@
-<!--
-
- Licensed to the Apache Software Foundation (ASF) under one
- or more contributor license agreements. See the NOTICE file
- distributed with this work for additional information
- regarding copyright ownership. The ASF licenses this file
- to you under the Apache License, Version 2.0 (the
- "License"); you may not use this file except in compliance
- with the License. You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing,
- software distributed under the License is distributed on an
- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- KIND, either express or implied. See the License for the
- specific language governing permissions and limitations
- under the License.
-
--->
-
-<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
-<html>
-<head>
-<meta http-equiv="Content-Type" content="text/html; charset=iso-8859-15">
-<title>Welcome to the Buildbot</title>
-</head>
-
-<body>
-<h1>Welcome to the Buildbot!</h1>
-
-<ul>
- <li>the <a href="waterfall">Waterfall Display</a> will give you a
- time-oriented summary of recent buildbot activity.</li>
-
- <li>the <a href="grid">Grid Display</a> will give you a
- developer-oriented summary of recent buildbot activity.</li>
-
- <li>The <a href="one_box_per_builder">Latest Build</a> for each builder is
- here.</li>
-
- <li><a href="one_line_per_build">Recent Builds</a> are summarized here, one
- per line.</li>
-
- <li><a href="buildslaves">Buildslave</a> information</li>
- <li><a href="changes">ChangeSource</a> information.</li>
-
- <br />
- <li><a href="about">About this Buildbot</a></li>
-</ul>
-
-
-</body> </html>
diff --git a/tools/buildbot/master/public_html/robots.txt b/tools/buildbot/master/public_html/robots.txt
deleted file mode 100644
index 47a9d27..0000000
--- a/tools/buildbot/master/public_html/robots.txt
+++ /dev/null
@@ -1,9 +0,0 @@
-User-agent: *
-Disallow: /waterfall
-Disallow: /builders
-Disallow: /changes
-Disallow: /buildslaves
-Disallow: /schedulers
-Disallow: /one_line_per_build
-Disallow: /one_box_per_builder
-Disallow: /xmlrpc
diff --git a/tools/buildbot/slaves/bb-openbsd/svncheck.sh b/tools/buildbot/slaves/bb-openbsd/svncheck.sh
index ef785b1..7d50d98 100755
--- a/tools/buildbot/slaves/bb-openbsd/svncheck.sh
+++ b/tools/buildbot/slaves/bb-openbsd/svncheck.sh
@@ -25,6 +25,8 @@ set -x
branch="$(basename $(svn info . | grep ^URL | cut -d' ' -f2))"
export MALLOC_OPTIONS=S
(cd .. && gmake BRANCH="$branch" PARALLEL="" THREADING="no" \
+ MEMCACHED_SERVER="127.0.0.1:11211" \
+ EXCLUSIVE_WC_LOCKS=1 \
svn-check-local \
svn-check-svn \
svn-check-neon \
diff --git a/tools/buildbot/slaves/bb-openbsd/svnclean.sh b/tools/buildbot/slaves/bb-openbsd/svnclean.sh
index 82dbbbb..063f4dd 100755
--- a/tools/buildbot/slaves/bb-openbsd/svnclean.sh
+++ b/tools/buildbot/slaves/bb-openbsd/svnclean.sh
@@ -24,7 +24,7 @@ set -x
branch="$(basename $(svn info . | grep ^URL | cut -d' ' -f2))"
(test -h ../svn-trunk || ln -s build ../svn-trunk)
-for i in 6 7; do
+for i in 6 7 8 9 10; do
(test -h ../svn-1.${i}.x || ln -s build ../svn-1.${i}.x)
done
svn update ../../unix-build
diff --git a/tools/buildbot/slaves/i686-debian-sarge1/svnlog.sh b/tools/buildbot/slaves/i686-debian-sarge1/svnlog.sh
index d3b5036..c2302e1 100755
--- a/tools/buildbot/slaves/i686-debian-sarge1/svnlog.sh
+++ b/tools/buildbot/slaves/i686-debian-sarge1/svnlog.sh
@@ -25,7 +25,7 @@ set -x
# upload file to server
FILENAME=tests-`date +%Y%m%d%H%M`.log.tgz
tar -czf $FILENAME tests.log
-ftp -n www.mobsol.be < ../ftpscript
+ftp -n www.mobsol.be < ../ftpscript
rm $FILENAME
echo "Logs of the testrun can be found here: http://www.mobsol.be/logs/eh-debsarge1/$FILENAME"
diff --git a/tools/buildbot/slaves/svn-sparc-solaris/svnbuild.sh b/tools/buildbot/slaves/svn-sparc-solaris/svnbuild.sh
new file mode 100755
index 0000000..495cb21
--- /dev/null
+++ b/tools/buildbot/slaves/svn-sparc-solaris/svnbuild.sh
@@ -0,0 +1,51 @@
+#!/bin/sh
+
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+set -x
+. ../svnenv.sh
+
+echo "============ autogen.sh"
+./autogen.sh || exit $?
+
+SVN_VER_MINOR=`awk '/define SVN_VER_MINOR/ { print $3 }' subversion/include/svn_version.h`
+
+cd ../obj
+grep obj/subversion/tests /etc/mnttab > /dev/null || mount-tmpfs
+
+# --enable-optimize adds -flto which breaks the 1.8 C tests because
+# they link main() from a library.
+if [ $SVN_VER_MINOR -gt 8 ]; then
+ OPTIMIZE_OPTION='--enable-optimize'
+fi
+
+echo "============ configure"
+../build/configure CC='cc -m64 -v' \
+ --with-apr=/export/home/wandisco/buildbot/install \
+ --with-apr-util=/export/home/wandisco/buildbot/install \
+ --with-serf=/export/home/wandisco/buildbot/install \
+ --with-apxs=/export/home/wandisco/buildbot/install/bin/apxs \
+ --with-sqlite=/export/home/wandisco/buildbot/sqlite-amalgamation-3071501/sqlite3.c \
+ --disable-shared \
+ $OPTIMIZE_OPTION \
+ || exit $?
+
+echo "============ make"
+make -j30 || exit $?
+
+exit 0
diff --git a/tools/buildbot/slaves/svn-sparc-solaris/svncheck.sh b/tools/buildbot/slaves/svn-sparc-solaris/svncheck.sh
new file mode 100755
index 0000000..0ea134c
--- /dev/null
+++ b/tools/buildbot/slaves/svn-sparc-solaris/svncheck.sh
@@ -0,0 +1,39 @@
+#!/bin/sh
+
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+set -x
+. ../svnenv.sh
+
+SVN_VER_MINOR=`awk '/define SVN_VER_MINOR/ { print $3 }' subversion/include/svn_version.h`
+
+cd ../obj
+
+# Use GNU iconv since the system one does not work well enough
+LD_PRELOAD_64=/export/home/wandisco/buildbot/install/lib/preloadable_libiconv.so
+export LD_PRELOAD_64
+
+if [ $SVN_VER_MINOR -eq 9 ]; then
+ echo "============ make svnserveautocheck"
+ make svnserveautocheck CLEANUP=1 PARALLEL=30 THREADED=1 || exit $?
+else
+ echo "============ make check"
+ make check CLEANUP=1 PARALLEL=30 THREADED=1 || exit $?
+fi
+
+exit 0
diff --git a/tools/buildbot/slaves/svn-sparc-solaris/svncleanup.sh b/tools/buildbot/slaves/svn-sparc-solaris/svncleanup.sh
new file mode 100755
index 0000000..b828e5e
--- /dev/null
+++ b/tools/buildbot/slaves/svn-sparc-solaris/svncleanup.sh
@@ -0,0 +1,30 @@
+#!/bin/sh
+
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+set -x
+. ../svnenv.sh
+
+cd ../obj
+
+echo "============ make extraclean"
+test -f Makefile && (make extraclean || exit $?)
+
+grep obj/subversion/tests /etc/mnttab > /dev/null && umount-tmpfs
+
+exit 0
diff --git a/tools/buildbot/slaves/svn-sparc-solaris/svnenv.sh b/tools/buildbot/slaves/svn-sparc-solaris/svnenv.sh
new file mode 100644
index 0000000..48d6b42
--- /dev/null
+++ b/tools/buildbot/slaves/svn-sparc-solaris/svnenv.sh
@@ -0,0 +1,23 @@
+#!/bin/sh
+
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+GREP=/usr/bin/grep
+export GREP
+PATH=/opt/csw/gnu:/usr/bin:/usr/ccs/bin:/opt/csw/bin:/export/home/wandisco/buildbot/install/bin
+export PATH
diff --git a/tools/buildbot/slaves/svn-x64-centos/list-svn-deps.sh b/tools/buildbot/slaves/svn-x64-centos/list-svn-deps.sh
new file mode 100755
index 0000000..793874c
--- /dev/null
+++ b/tools/buildbot/slaves/svn-x64-centos/list-svn-deps.sh
@@ -0,0 +1,34 @@
+#!/bin/sh
+# List the versions of all of SVN's dependencies.
+# The output is suitable for putting in the buildbot slave's 'info/host'
+# file, after a general description of the slave machine.
+
+echo "=== SVN dependencies ==="
+DEPS="gcc apr apr-util apr-devel apr-util-devel httpd httpd-devel \
+ neon neon-devel python python-devel ruby ruby-devel"
+#yum -C list $DEPS
+rpm -q ${DEPS} | sort | uniq
+# The SQLite version is found by the name of the amalgamation directory,
+# which is found in the home dir. It is also explicitly referenced in the
+# './configure' line in 'svnbuild.sh'.
+(cd && echo sqlite-3.*[0-9].*[0-9])
+echo
+
+echo "=== SVN test dependencies ==="
+#rpm -q pysqlite | sort | uniq
+echo
+
+JAVA_VER=`java -fullversion 2>&1`
+PY_VER=`python -V 2>&1`
+RUBY_VER=`ruby --version`
+PERL_VER=`perl -v | grep This`
+echo "=== interpreters / bindings ==="
+echo "Java: $JAVA_VER"
+echo "Python: $PY_VER"
+echo "Ruby: $RUBY_VER"
+echo "Perl: $PERL_VER"
+echo
+
+echo "=== BuildBot version ==="
+buildbot --version
+echo
diff --git a/tools/buildbot/slaves/centos/svnbuild.sh b/tools/buildbot/slaves/svn-x64-centos/svnbuild.sh
index 9278aeb..eea0863 100755
--- a/tools/buildbot/slaves/centos/svnbuild.sh
+++ b/tools/buildbot/slaves/svn-x64-centos/svnbuild.sh
@@ -25,6 +25,22 @@ set -x
export MAKEFLAGS=-j4
export PYTHON=/usr/local/python25/bin/python
+SVN_VER_MINOR=`awk '/define SVN_VER_MINOR/ { print $3 }' subversion/include/svn_version.h`
+
+if [ $SVN_VER_MINOR -ge 9 ]; then
+ # 1.9 or newer requires APR 1.3.x and Serf 1.3.4
+ # this bubbles out to httpd as well. So use the right dependencies
+ APR=/home/bt/packages/apr-1.3.9-prefix/bin/apr-1-config
+ APU=/home/bt/packages/apr-1.3.9-prefix/bin/apu-1-config
+ APXS=/home/bt/packages/apr-1.3.9-prefix/bin/apxs
+ SERF=/home/bt/packages/apr-1.3.9-prefix
+else
+ APR=/usr
+ APU=/usr
+ APXS=/usr/sbin/apxs
+ SERF=/usr/local
+fi
+
echo "========= autogen.sh"
./autogen.sh || exit $?
@@ -32,13 +48,15 @@ echo "========= configure"
# --with-junit=/usr/share/java/junit.jar
# --with-jdk=/usr/lib/jvm/java-1.6.0-openjdk-1.6.0.0.x86_64 \
# --without-berkeley-db \
+# --disable-shared \
+#CFLAGS='-fprofile-arcs -ftest-coverage' \
./configure --enable-javahl --enable-maintainer-mode \
--with-neon=/usr \
- --with-serf=/usr/local \
- --with-apxs=/usr/sbin/apxs \
+ --with-serf=$SERF \
+ --with-apxs=$APXS \
--with-berkeley-db \
- --with-apr=/usr \
- --with-apr-util=/usr \
+ --with-apr=$APR \
+ --with-apr-util=$APU \
--with-jdk=/opt/java/jdk1.6.0_15 \
--with-junit=/home/bt/junit-4.4.jar \
--with-sqlite=/home/bt/packages/sqlite-amalgamation-dir/sqlite3.c \
diff --git a/tools/buildbot/slaves/centos/svncheck-bindings.sh b/tools/buildbot/slaves/svn-x64-centos/svncheck-bindings.sh
index 4e8c1aa..4e8c1aa 100755
--- a/tools/buildbot/slaves/centos/svncheck-bindings.sh
+++ b/tools/buildbot/slaves/svn-x64-centos/svncheck-bindings.sh
diff --git a/tools/buildbot/slaves/centos/svncheck.sh b/tools/buildbot/slaves/svn-x64-centos/svncheck.sh
index 8240e36..77ed04d 100755
--- a/tools/buildbot/slaves/centos/svncheck.sh
+++ b/tools/buildbot/slaves/svn-x64-centos/svncheck.sh
@@ -35,10 +35,7 @@ test -e ../mount-ramdrive && ../mount-ramdrive
echo "========= make"
case "$2" in
- ""|ra_dav|ra_neon)
- make davautocheck FS_TYPE=$1 HTTP_LIBRARY=neon CLEANUP=1 || exit $?
- ;;
- ra_serf)
+ ""|ra_serf)
make davautocheck FS_TYPE=$1 HTTP_LIBRARY=serf CLEANUP=1 || exit $?
;;
ra_svn)
diff --git a/tools/buildbot/slaves/centos/svnclean.sh b/tools/buildbot/slaves/svn-x64-centos/svnclean.sh
index 95d4290..9a5e715 100755
--- a/tools/buildbot/slaves/centos/svnclean.sh
+++ b/tools/buildbot/slaves/svn-x64-centos/svnclean.sh
@@ -27,6 +27,6 @@ echo "========= unmount RAM disc"
test -e ../unmount-ramdrive && ../unmount-ramdrive
echo "========= make extraclean"
-test -e Makefile && { make extraclean || exit $?; }
+test -e Makefile && (make extraclean || exit $?)
exit 0
diff --git a/tools/buildbot/slaves/centos/svnlog.sh b/tools/buildbot/slaves/svn-x64-centos/svnlog.sh
index d3b5036..c2302e1 100755
--- a/tools/buildbot/slaves/centos/svnlog.sh
+++ b/tools/buildbot/slaves/svn-x64-centos/svnlog.sh
@@ -25,7 +25,7 @@ set -x
# upload file to server
FILENAME=tests-`date +%Y%m%d%H%M`.log.tgz
tar -czf $FILENAME tests.log
-ftp -n www.mobsol.be < ../ftpscript
+ftp -n www.mobsol.be < ../ftpscript
rm $FILENAME
echo "Logs of the testrun can be found here: http://www.mobsol.be/logs/eh-debsarge1/$FILENAME"
diff --git a/tools/buildbot/slaves/svn-x64-macosx-gnu-shared-daily-ra_serf/svnlog.sh b/tools/buildbot/slaves/svn-x64-macosx-gnu-shared-daily-ra_serf/svnlog.sh
index b4d9e0c..ce845f8 100755
--- a/tools/buildbot/slaves/svn-x64-macosx-gnu-shared-daily-ra_serf/svnlog.sh
+++ b/tools/buildbot/slaves/svn-x64-macosx-gnu-shared-daily-ra_serf/svnlog.sh
@@ -23,7 +23,7 @@
# upload file to server
FILENAME=tests-`date +%Y%m%d%H%M`.log.tgz
tar -czf $FILENAME tests.log
-ftp -n www.mobsol.be < ../ftpscript
+ftp -n www.mobsol.be < ../ftpscript
rm $FILENAME
echo "Logs of the testrun can be found here: http://www.mobsol.be/logs/osx10.4-gcc4.0.1-ia32/$FILENAME"
diff --git a/tools/buildbot/slaves/svn-x64-macosx-gnu-shared/svnlog.sh b/tools/buildbot/slaves/svn-x64-macosx-gnu-shared/svnlog.sh
index b4d9e0c..ce845f8 100755
--- a/tools/buildbot/slaves/svn-x64-macosx-gnu-shared/svnlog.sh
+++ b/tools/buildbot/slaves/svn-x64-macosx-gnu-shared/svnlog.sh
@@ -23,7 +23,7 @@
# upload file to server
FILENAME=tests-`date +%Y%m%d%H%M`.log.tgz
tar -czf $FILENAME tests.log
-ftp -n www.mobsol.be < ../ftpscript
+ftp -n www.mobsol.be < ../ftpscript
rm $FILENAME
echo "Logs of the testrun can be found here: http://www.mobsol.be/logs/osx10.4-gcc4.0.1-ia32/$FILENAME"
diff --git a/tools/buildbot/slaves/svn-x64-macosx/mkramdisk.sh b/tools/buildbot/slaves/svn-x64-macosx/mkramdisk.sh
new file mode 100755
index 0000000..27c2e87
--- /dev/null
+++ b/tools/buildbot/slaves/svn-x64-macosx/mkramdisk.sh
@@ -0,0 +1,63 @@
+#!/bin/bash
+
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+set -x
+
+if [ -z "$1" ]; then
+ echo "Missing parameter: volume name"
+ exit 1
+fi
+
+if [ -z "$2" ]; then
+ echo "Missing parameter: RAMdisk config file"
+ exit 1
+fi
+
+volume="/Volumes/$1"
+ramconf="$2"
+
+ramconfpath=$(dirname "${ramconf}")
+if [ ! -d "${ramconfpath}" ]; then
+ echo "Missing RAMdisk config file path: ${ramconfpath}"
+ exit 1
+fi
+if [ -f "${ramconf}" ]; then
+ echo "RAMdisk config file exists: ${ramconf}"
+ exit 1
+fi
+
+if [ -d "${volume}" ]; then
+ echo "Mount point exists: ${volume}"
+ exit 1
+fi
+
+mount | grep "^/dev/disk[0-9][0-9]* on ${volume} (hfs" >/dev/null || {
+ set -e
+ echo -n "" > "${ramconf}"
+
+ # Make sure we strip trailing spaces from the result of older
+ # versions of hduitil.
+ device=$(echo $(hdiutil attach -nomount ram://900000))
+ newfs_hfs -M 0700 -v "$1" "${device}"
+ hdiutil mountvol "${device}"
+
+ echo -n "${device}" > "${ramconf}"
+}
+
+exit 0
diff --git a/tools/buildbot/slaves/svn-x64-macosx/rmramdisk.sh b/tools/buildbot/slaves/svn-x64-macosx/rmramdisk.sh
new file mode 100755
index 0000000..c5db9bb
--- /dev/null
+++ b/tools/buildbot/slaves/svn-x64-macosx/rmramdisk.sh
@@ -0,0 +1,62 @@
+#!/bin/bash
+
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+set -x
+
+if [ -z "$1" ]; then
+ echo "Missing parameter: volume name"
+ exit 1
+fi
+
+if [ -z "$2" ]; then
+ echo "Missing parameter: RAMdisk config file"
+ exit 1
+fi
+
+volume="/Volumes/$1"
+ramconf="$2"
+
+if [ ! -f "${ramconf}" ]; then
+ mount | grep "^/dev/disk[0-9][0-9]* on ${volume} (hfs" || {
+ echo "Not mounted: ${volume}"
+ exit 0
+ }
+ echo "Missing RAMdisk config file: ${ramconf}"
+ exit 1
+fi
+
+if [ ! -d "${volume}" ]; then
+ echo "Mount point missing: ${volume}"
+ exit 1
+fi
+
+device=$(cat "${ramconf}")
+devfmt=$(echo "${device}" | grep "^/dev/disk[0-9][0-9]*$")
+if [ "${device}" != "${devfmt}" ]; then
+ echo "Invalid device name: ${device}"
+ exit 1
+fi
+
+mount | grep "^${device} on ${volume} (hfs" >/dev/null && {
+ set -e
+ rm "${ramconf}"
+ hdiutil detach "${device}" -force
+}
+
+exit 0
diff --git a/tools/buildbot/slaves/svn-x64-macosx/setenv.sh b/tools/buildbot/slaves/svn-x64-macosx/setenv.sh
new file mode 100644
index 0000000..31ece51
--- /dev/null
+++ b/tools/buildbot/slaves/svn-x64-macosx/setenv.sh
@@ -0,0 +1,70 @@
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+## This script calls a helper that provides the folloing environemnt
+## variables:
+##
+## PATH The search path
+## SVNBB_OPENSSL OpenSSL installation prefix
+## SVNBB_BDB Berkeley DB installation prefix
+## SVNBB_SWIG Swig installation prefix
+## SVNBB_SERF Serf installation prefix
+## Note: Serf should be built only
+## with the system APR/-Util.
+## SVNBB_APR_13_NOTHREAD Path of APR-1.3 with threading disabled
+## SVNBB_APR_15 Path of APR-1.5
+## SVNBB_APR_20_DEV Path of APR-2.0
+## SVNBB_JUNIT The path of the junit.jar
+## SVNBB_PARALLEL Optional: parallelization; defaults to 2
+## LIBTOOL_CONFIG Optional: libtool configuration path
+##
+## The invoking script will set local variable named ${scripts} that
+## is the absolute path the parent of this file.
+
+# Modify this to suit your deployment
+environment=$(cd "${scripts}/.." && pwd)/environment.sh
+
+eval $(${environment})
+SVNBB_PARALLEL="${SVNBB_PARALLEL-2}"
+
+export PATH
+export SVNBB_BDB
+export SVNBB_SWIG
+export SVNBB_SERF
+export SVNBB_APR_13_NOTHREAD
+export SVNBB_APR_15
+export SVNBB_APR_20_DEV
+export SVNBB_JUNIT
+export SVNBB_PARALLEL
+export LIBTOOL_CONFIG
+
+
+# Set the absolute source path
+abssrc=$(pwd)
+
+# Set the path to the RAMdisk device name file
+ramconf=$(dirname "${abssrc}")/ramdisk.conf
+
+# The RAMdisk volume name is the same as the name of the builder
+volume_name=$(basename $(dirname "${abssrc}"))
+if [ -z "${volume_name}" ]; then
+ echo "Missing config parameter: RAMdisk volume name"
+ exit 1
+fi
+
+# Set the absolute build path
+absbld="/Volumes/${volume_name}"
diff --git a/tools/buildbot/slaves/svn-x64-macosx/svnbuild-bindings.sh b/tools/buildbot/slaves/svn-x64-macosx/svnbuild-bindings.sh
new file mode 100755
index 0000000..fa085dd
--- /dev/null
+++ b/tools/buildbot/slaves/svn-x64-macosx/svnbuild-bindings.sh
@@ -0,0 +1,45 @@
+#!/bin/bash
+
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+set -e
+set -x
+
+scripts=$(cd $(dirname "$0") && pwd)
+
+. ${scripts}/setenv.sh
+
+#
+# Step 4: build swig-py
+#
+
+echo "============ make swig-py"
+cd ${absbld}
+make swig-py
+
+echo "============ make swig-pl"
+cd ${absbld}
+make swig-pl
+
+echo "============ make swig-rb"
+cd ${absbld}
+make swig-rb
+
+echo "============ make javahl"
+cd ${absbld}
+make javahl
diff --git a/tools/buildbot/slaves/svn-x64-macosx/svnbuild.sh b/tools/buildbot/slaves/svn-x64-macosx/svnbuild.sh
new file mode 100755
index 0000000..4f1e4e9
--- /dev/null
+++ b/tools/buildbot/slaves/svn-x64-macosx/svnbuild.sh
@@ -0,0 +1,101 @@
+#!/bin/bash
+
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+set -e
+set -x
+
+scripts=$(cd $(dirname "$0") && pwd)
+
+. ${scripts}/setenv.sh
+
+${scripts}/mkramdisk.sh ${volume_name} ${ramconf}
+
+# These are the default APR and Serf config options
+serfconfig="--with-serf=${SVNBB_SERF} --with-apxs=/usr/sbin/apxs"
+
+# An optional parameter tells build scripts which version of APR to use
+if [ ! -z "$1" ]; then
+ aprdir=$(eval 'echo $SVNBB_'"$1")
+fi
+if [ ! -z "${aprdir}" -a -d "${aprdir}" ]; then
+ aprconfig="--with-apr=${aprdir} --with-apr-util=${aprdir}"
+ serfconfig=" --without-serf --without-apxs"
+fi
+
+#
+# Step 0: Create a directory for the test log files
+#
+if [ -d "${abssrc}/.test-logs" ]; then
+ rm -fr "${abssrc}/.test-logs"
+fi
+mkdir "${abssrc}/.test-logs" || exit 1
+
+#
+# Step 1: get the latest and greatest amalgamanted SQLite
+#
+
+echo "============ get-deps.sh sqlite"
+cd ${abssrc}
+rm -fr sqlite-amalgamation
+./get-deps.sh sqlite
+
+#
+# Step 2: Regenerate build scripts
+#
+
+echo "============ autogen.sh"
+cd ${abssrc}
+./autogen.sh
+
+svnminor=$(awk '/define *SVN_VER_MINOR/ { print $3 }' subversion/include/svn_version.h)
+
+# --enable-optimize adds -flto which breaks the 1.8 C tests because
+# they link main() from a library.
+if [ ${svnminor} -gt 8 ]; then
+ optimizeconfig=' --enable-optimize'
+fi
+
+#
+# Step 3: Configure
+#
+
+echo "============ configure"
+cd ${absbld}
+env CC=clang CXX=clang++ \
+${abssrc}/configure \
+ --prefix="${absbld}/.install-prefix" \
+ --disable-debug${optimizeconfig} \
+ --disable-nls \
+ --disable-mod-activation \
+ ${aprconfig}${serfconfig} \
+ --with-swig="${SVNBB_SWIG}" \
+ --with-berkeley-db=db.h:"${SVNBB_BDB}/include":${SVNBB_BDB}/lib:db \
+ --enable-javahl \
+ --without-jikes \
+ --with-junit="${SVNBB_JUNIT}"
+
+test -f config.log && mv config.log "${abssrc}/.test-logs/config.log"
+
+#
+# Step 4: build
+#
+
+echo "============ make"
+cd ${absbld}
+make -j${SVNBB_PARALLEL}
diff --git a/tools/buildbot/slaves/svn-x64-macosx/svncheck-bindings.sh b/tools/buildbot/slaves/svn-x64-macosx/svncheck-bindings.sh
new file mode 100755
index 0000000..943eb56
--- /dev/null
+++ b/tools/buildbot/slaves/svn-x64-macosx/svncheck-bindings.sh
@@ -0,0 +1,58 @@
+#!/bin/bash
+
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+
+run_tests() {
+ check="$1"
+
+ echo "============ make check-${check}"
+ cd ${absbld}
+ make check-${check} || exit 1
+}
+
+
+set -x
+
+scripts=$(cd $(dirname "$0") && pwd)
+
+. ${scripts}/setenv.sh
+
+# Parse arguments to find out which tests we should run
+check_swig_py=false
+check_swig_pl=false
+check_swig_rb=false
+check_javahl=false
+
+while [ ! -z "$1" ]; do
+ case "$1" in
+ swig-py) check_swig_py=true;;
+ swig-pl) check_swig_pl=true;;
+ swig-rb) check_swig_rb=true;;
+ javahl) check_javahl=true;;
+ *) exit 1;;
+ esac
+ shift
+done
+
+${check_swig_py} && run_tests swig-py
+${check_swig_pl} && run_tests swig-pl
+${check_swig_rb} && run_tests swig-rb
+${check_javahl} && run_tests javahl
+
+exit 0
diff --git a/tools/buildbot/slaves/svn-x64-macosx/svncheck.sh b/tools/buildbot/slaves/svn-x64-macosx/svncheck.sh
new file mode 100755
index 0000000..31ca3a7
--- /dev/null
+++ b/tools/buildbot/slaves/svn-x64-macosx/svncheck.sh
@@ -0,0 +1,94 @@
+#!/bin/bash
+
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+
+run_tests() {
+ ra="$1"
+ fs="$2"
+ ok=true
+
+ case "${ra}" in
+ local) check=check; skipC=;;
+ svn) check=svnserveautocheck; skipC="SKIP_C_TESTS=1";;
+ dav) check=davautocheck; skipC="SKIP_C_TESTS=1";;
+ *) exit 1;;
+ esac
+
+ echo "============ make check ${ra}+${fs}"
+ cd ${absbld}
+ make ${check} FS_TYPE=${fs} PARALLEL=${SVNBB_PARALLEL} CLEANUP=1 ${skipC} || ok=false
+
+ # Move any log files to the buildbot work directory
+ test -f tests.log && mv tests.log "${abssrc}/.test-logs/tests-${ra}-${fs}.log"
+ test -f fails.log && mv fails.log "${abssrc}/.test-logs/fails-${ra}-${fs}.log"
+
+ # Remove the test working directory to make space on the RAM disk
+ # for more tests.
+ rm -fr subversion/tests/cmdline/svn-test-work
+
+ ${ok} || exit 1
+}
+
+check_tests() {
+ ra="$1"
+
+ ${check_fsfs} && run_tests ${ra} fsfs
+ ${check_fsfs_v6} && run_tests ${ra} fsfs-v6
+ ${check_fsfs_v4} && run_tests ${ra} fsfs-v4
+ ${check_bdb} && run_tests ${ra} bdb
+ ${check_fsx} && run_tests ${ra} fsx
+}
+
+
+set -x
+
+scripts=$(cd $(dirname "$0") && pwd)
+
+. ${scripts}/setenv.sh
+
+# Parse arguments to find out which tests we should run
+check_local=false
+check_svn=false
+check_dav=false
+check_fsfs=false
+check_fsfs_v6=false
+check_fsfs_v4=false
+check_fsx=false
+check_bdb=false
+
+while [ ! -z "$1" ]; do
+ case "$1" in
+ local) check_local=true;;
+ svn) check_svn=true;;
+ dav) check_dav=true;;
+ fsfs) check_fsfs=true;;
+ fsfs-v6) check_fsfs_v6=true;;
+ fsfs-v4) check_fsfs_v4=true;;
+ fsx) check_fsx=true;;
+ bdb) check_bdb=true;;
+ *) exit 1;;
+ esac
+ shift
+done
+
+${check_local} && check_tests local
+${check_svn} && check_tests svn
+${check_dav} && check_tests dav
+
+exit 0
diff --git a/tools/buildbot/slaves/svn-x64-macosx/svnclean.sh b/tools/buildbot/slaves/svn-x64-macosx/svnclean.sh
new file mode 100755
index 0000000..b2b0bb3
--- /dev/null
+++ b/tools/buildbot/slaves/svn-x64-macosx/svnclean.sh
@@ -0,0 +1,27 @@
+#!/bin/bash
+
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+set -e
+set -x
+
+scripts=$(cd $(dirname "$0") && pwd)
+
+. ${scripts}/setenv.sh
+
+${scripts}/rmramdisk.sh ${volume_name} ${ramconf}
diff --git a/tools/buildbot/slaves/ubuntu-x64/svnlog.sh b/tools/buildbot/slaves/ubuntu-x64/svnlog.sh
index d3b5036..c2302e1 100755
--- a/tools/buildbot/slaves/ubuntu-x64/svnlog.sh
+++ b/tools/buildbot/slaves/ubuntu-x64/svnlog.sh
@@ -25,7 +25,7 @@ set -x
# upload file to server
FILENAME=tests-`date +%Y%m%d%H%M`.log.tgz
tar -czf $FILENAME tests.log
-ftp -n www.mobsol.be < ../ftpscript
+ftp -n www.mobsol.be < ../ftpscript
rm $FILENAME
echo "Logs of the testrun can be found here: http://www.mobsol.be/logs/eh-debsarge1/$FILENAME"
diff --git a/tools/buildbot/slaves/win32-SharpSvn/svn-config.cmd.template b/tools/buildbot/slaves/win32-SharpSvn/svn-config.cmd.template
index 5f337c0..d283de6 100644
--- a/tools/buildbot/slaves/win32-SharpSvn/svn-config.cmd.template
+++ b/tools/buildbot/slaves/win32-SharpSvn/svn-config.cmd.template
@@ -29,3 +29,22 @@ SET TMP=%TEMP%
IF NOT EXIST "%TESTDIR%\" MKDIR "%TESTDIR%"
IF NOT EXIST "%TEMP%\" MKDIR "%TEMP%"
+
+
+
+
+
+SET SVN_URL=
+SET SVN_RELURL=
+for /F "usebackq tokens=1,* delims=:" %%i IN (`svn info .`) do (
+
+ IF "%%i" == "URL" (
+ SET SVN_URL=%%j
+ ) ELSE IF "%%i" == "Relative URL" (
+ SET SVN_RELURL=%%j
+ )
+)
+SET SVN_URL=%SVN_URL:~1%
+SET SVN_RELURL=%SVN_RELURL:~3%
+SET SVN_SUBBRANCH=%SVN_RELURL:~11%
+SET SVN_BRANCH=%SVN_SUBBRANCH:branches/=%
diff --git a/tools/buildbot/slaves/win32-SharpSvn/svntest-bindings.cmd b/tools/buildbot/slaves/win32-SharpSvn/svntest-bindings.cmd
index f3c551e..90fd5d2 100644
--- a/tools/buildbot/slaves/win32-SharpSvn/svntest-bindings.cmd
+++ b/tools/buildbot/slaves/win32-SharpSvn/svntest-bindings.cmd
@@ -23,63 +23,99 @@ SETLOCAL ENABLEEXTENSIONS ENABLEDELAYEDEXPANSION
CALL ..\svn-config.cmd
IF ERRORLEVEL 1 EXIT /B 1
-svnversion . /1.6.x | find "S" > nul:
-IF ERRORLEVEL 1 (
- ECHO --- Building 1.6.x: Skipping bindings ---
- EXIT /B 0
+IF "%SVN_BRANCH%" LEQ "1.6.x" (
+ ECHO --- Building 1.6.x or older: Skipping bindings ---
+ EXIT /B 0
)
-PATH %PATH%;%TESTDIR%\bin
-SET result=0
+IF "%SVN_BRANCH%" LSS "1.9." (
+ IF NOT EXIST "%TESTDIR%\bin" MKDIR "%TESTDIR%\bin"
+ xcopy /y /i ..\deps\release\bin\*.dll "%TESTDIR%\bin"
-python win-tests.py -d -f fsfs --javahl "%TESTDIR%\tests"
-IF ERRORLEVEL 1 (
- echo [python reported error %ERRORLEVEL%]
- SET result=1
+ PATH %TESTDIR%\bin;!PATH!
)
-IF EXIST "%TESTDIR%\swig" rmdir /s /q "%TESTDIR%\swig"
-mkdir "%TESTDIR%\swig\py-release\libsvn"
-mkdir "%TESTDIR%\swig\py-release\svn"
+SET result=0
+
+if "%SVN_BRANCH%" GTR "1.9." (
-xcopy "release\subversion\bindings\swig\python\*.pyd" "%TESTDIR%\swig\py-release\libsvn\*.pyd" > nul:
-xcopy "release\subversion\bindings\swig\python\libsvn_swig_py\*.dll" "%TESTDIR%\swig\py-release\libsvn\*.dll" > nul:
-xcopy "subversion\bindings\swig\python\*.py" "%TESTDIR%\swig\py-release\libsvn\*.py" > nul:
-xcopy "subversion\bindings\swig\python\svn\*.py" "%TESTDIR%\swig\py-release\svn\*.py" > nul:
+ python win-tests.py -r -f fsfs --swig=python "%TESTDIR%\tests"
-SET PYTHONPATH=%TESTDIR%\swig\py-release
+ IF ERRORLEVEL 1 (
+ echo [Python tests reported error !ERRORLEVEL!] 1>&2
+ SET result=1
+ ) ELSE (
+ echo Done.
+ )
-python subversion\bindings\swig\python\tests\run_all.py
-IF ERRORLEVEL 1 (
- echo [Python reported error %ERRORLEVEL%]
- SET result=1
-)
+) ELSE (
+ IF EXIST "%TESTDIR%\swig" rmdir /s /q "%TESTDIR%\swig"
+ mkdir "%TESTDIR%\swig\py-release\libsvn"
+ mkdir "%TESTDIR%\swig\py-release\svn"
-mkdir "%TESTDIR%\swig\pl-release\SVN"
-mkdir "%TESTDIR%\swig\pl-release\auto\SVN"
-xcopy subversion\bindings\swig\perl\native\*.pm "%TESTDIR%\swig\pl-release\SVN" > nul:
-pushd release\subversion\bindings\swig\perl\native
-for %%i in (*.dll) do (
- set name=%%i
- mkdir "%TESTDIR%\swig\pl-release\auto\SVN\!name:~0,-4!"
- xcopy "!name:~0,-4!.*" "%TESTDIR%\swig\pl-release\auto\SVN\!name:~0,-4!" > nul:
- xcopy /y "_Core.dll" "%TESTDIR%\swig\pl-release\auto\SVN\!name:~0,-4!" > nul:
+ xcopy "release\subversion\bindings\swig\python\*.pyd" "%TESTDIR%\swig\py-release\libsvn\*.pyd" > nul:
+ xcopy "release\subversion\bindings\swig\python\libsvn_swig_py\*.dll" "%TESTDIR%\swig\py-release\libsvn\*.dll" > nul:
+ xcopy "subversion\bindings\swig\python\*.py" "%TESTDIR%\swig\py-release\libsvn\*.py" > nul:
+ xcopy "subversion\bindings\swig\python\svn\*.py" "%TESTDIR%\swig\py-release\svn\*.py" > nul:
+
+ SET PYTHONPATH=%TESTDIR%\swig\py-release
+
+ python subversion\bindings\swig\python\tests\run_all.py
+ IF ERRORLEVEL 1 (
+ echo [Python tests reported error !ERRORLEVEL!] 1>&2
+ REM SET result=1
+ ) ELSE (
+ echo Done.
+ )
)
-popd
-svnversion . /1.7.x | find "S" > nul:
-IF ERRORLEVEL 1 (
- ECHO --- Building 1.7.x: Skipping perl tests ---
- EXIT /B %result%
+if "%SVN_BRANCH%" GTR "1.9." (
+
+ python win-tests.py -d -f fsfs --swig=perl "%TESTDIR%\tests"
+
+ IF ERRORLEVEL 1 (
+ echo [Perl tests reported error !ERRORLEVEL!] 1>&2
+ SET result=1
+ ) ELSE (
+ echo Done.
+ )
+
+) ELSE IF "%SVN_BRANCH%" GTR "1.8." (
+
+ mkdir "%TESTDIR%\swig\pl-debug\SVN"
+ mkdir "%TESTDIR%\swig\pl-debug\auto\SVN"
+ xcopy subversion\bindings\swig\perl\native\*.pm "%TESTDIR%\swig\pl-debug\SVN" > nul:
+ pushd debug\subversion\bindings\swig\perl\native
+ for %%i in (*.dll) do (
+ set name=%%i
+ mkdir "%TESTDIR%\swig\pl-debug\auto\SVN\!name:~0,-4!"
+ xcopy "!name:~0,-4!.*" "%TESTDIR%\swig\pl-debug\auto\SVN\!name:~0,-4!" > nul:
+ xcopy /y "_Core.dll" "%TESTDIR%\swig\pl-debug\auto\SVN\!name:~0,-4!" > nul:
+ )
+ popd
+
+
+ SET PERL5LIB=%PERL5LIB%;%TESTDIR%\swig\pl-debug;
+ pushd subversion\bindings\swig\perl\native
+ perl -MExtUtils::Command::MM -e "test_harness()" t\*.t
+ IF ERRORLEVEL 1 (
+ echo [Test runner reported error !ERRORLEVEL!]
+ REM SET result=1
+ )
+ popd
)
-SET PERL5LIB=%PERL5LIB%;%TESTDIR%\swig\pl-release;
-pushd subversion\bindings\swig\perl\native
-perl -MExtUtils::Command::MM -e test_harness() t\*.t
-IF ERRORLEVEL 1 (
- echo [Perl reported error %ERRORLEVEL%]
- SET result=1
+if "%SVN_BRANCH%" GTR "1.9." (
+ python win-tests.py -d -f fsfs --swig=ruby "%TESTDIR%\tests"
+
+ IF ERRORLEVEL 1 (
+ echo [Ruby tests reported error !ERRORLEVEL!] 1>&2
+ REM SET result=1
+ ) ELSE (
+ echo Done.
+ )
+
+ taskkill /im svnserve.exe /f
)
-popd
exit /b %result%
diff --git a/tools/buildbot/slaves/win32-SharpSvn/svntest-build-bindings.cmd b/tools/buildbot/slaves/win32-SharpSvn/svntest-build-bindings.cmd
index 9ed5879..c51133d 100644
--- a/tools/buildbot/slaves/win32-SharpSvn/svntest-build-bindings.cmd
+++ b/tools/buildbot/slaves/win32-SharpSvn/svntest-build-bindings.cmd
@@ -23,14 +23,26 @@ SETLOCAL ENABLEEXTENSIONS ENABLEDELAYEDEXPANSION
CALL ..\svn-config.cmd
IF ERRORLEVEL 1 EXIT /B 1
-svnversion . /1.6.x | find "S" > nul:
-IF ERRORLEVEL 1 (
+IF "%SVN_BRANCH%" LEQ "1.6.x" (
ECHO --- Building 1.6.x: Skipping bindings ---
EXIT /B 0
)
-msbuild subversion_vcnet.sln /p:Configuration=Debug /p:Platform=win32 /t:__JAVAHL__ /t:__JAVAHL_TESTS__
+SET DEBUG_TARGETS=/t:__ALL_TESTS__
+SET RELEASE_TARGETS=/t:__SWIG_PYTHON__
+
+if "%SVN_BRANCH%" GTR "1.8." (
+ SET DEBUG_TARGETS=%DEBUG_TARGETS% /t:__SWIG_PERL__
+)
+
+if "%SVN_BRANCH%" GTR "1.9." (
+ SET DEBUG_TARGETS=%DEBUG_TARGETS% /t:__SWIG_RUBY__
+)
+
+msbuild subversion_vcnet.sln /m /v:m /p:Configuration=Debug /p:Platform=Win32 %DEBUG_TARGETS%
IF ERRORLEVEL 1 EXIT /B 1
-msbuild subversion_vcnet.sln /p:Configuration=Release /p:Platform=win32 /t:__SWIG_PYTHON__ /t:__SWIG_PERL__
+msbuild subversion_vcnet.sln /m /v:m /p:Configuration=Release /p:Platform=Win32 %RELEASE_TARGETS%
IF ERRORLEVEL 1 EXIT /B 1
+
+EXIT /B 0
diff --git a/tools/buildbot/slaves/win32-SharpSvn/svntest-build.cmd b/tools/buildbot/slaves/win32-SharpSvn/svntest-build.cmd
index 27ca272..41a8438 100644
--- a/tools/buildbot/slaves/win32-SharpSvn/svntest-build.cmd
+++ b/tools/buildbot/slaves/win32-SharpSvn/svntest-build.cmd
@@ -25,10 +25,12 @@ IF ERRORLEVEL 1 EXIT /B 1
PUSHD ..\deps
-nant gen-dev -D:wc=..\build -D:impBase=../deps/build/win32 -D:botBuild=true %NANTARGS%
+nant gen-dev -D:wc=..\build -D:impBase=../deps/build/win32 -D:botBuild=true %SVN_NANT_ARGS%
IF ERRORLEVEL 1 EXIT /B 1
POPD
-msbuild subversion_vcnet.sln /p:Configuration=Debug /p:Platform=win32 /t:__ALL_TESTS__
+msbuild subversion_vcnet.sln /m /v:m /p:Configuration=Debug /p:Platform=Win32 /t:__ALL_TESTS__ %SVN_MSBUILD_ARGS%
IF ERRORLEVEL 1 EXIT /B 1
+
+EXIT /B 0
diff --git a/tools/buildbot/slaves/win32-SharpSvn/svntest-cleanup.cmd b/tools/buildbot/slaves/win32-SharpSvn/svntest-cleanup.cmd
index fc0adb6..571e2c0 100644
--- a/tools/buildbot/slaves/win32-SharpSvn/svntest-cleanup.cmd
+++ b/tools/buildbot/slaves/win32-SharpSvn/svntest-cleanup.cmd
@@ -28,17 +28,17 @@ PUSHD ..\deps
ECHO Checking dependencies in %CD%
IF NOT EXIST "imports\" (
- svn co --username guest --password "" http://sharpsvn.open.collab.net/svn/sharpsvn/trunk/imports imports
+ svn co https://ctf.open.collab.net/svn/repos/sharpsvn/trunk/imports imports --username guest --password ""
)
IF NOT EXIST build\imports.done (
- svn up imports
- copy /y imports\dev-default.build default.build
- nant prep-dev %NANTARGS%
- IF ERRORLEVEL 1 (
- exit /B 1
- )
- del release\bin\*svn* release\bin\_*.* 2>nul:
- echo. > build\imports.done
+ svn up imports --username guest --password ""
+ copy /y imports\dev-default.build default.build
+ nant prep-dev %NANTARGS%
+ IF ERRORLEVEL 1 (
+ EXIT /B 1
+ )
+ del release\bin\*svn* release\bin\_*.* 2>nul:
+ ECHO. > build\imports.done
)
POPD
@@ -50,6 +50,7 @@ IF NOT ERRORLEVEL 1 (
POPD
+taskkill /im msbuild.exe /f 2> nul:
taskkill /im svn.exe /f 2> nul:
taskkill /im svnlook.exe /f 2> nul:
taskkill /im svnadmin.exe /f 2> nul:
@@ -57,19 +58,23 @@ taskkill /im svnserve.exe /f 2> nul:
taskkill /im svnrdump.exe /f 2> nul:
taskkill /im svnsync.exe /f 2> nul:
taskkill /im httpd.exe /f 2> nul:
+taskkill /im client-test.exe /f 2> nul:
taskkill /im fs-test.exe /f 2> nul:
taskkill /im op-depth-test.exe /f 2> nul:
+taskkill /im atomic-ra-revprop-change.exe /f 2> nul:
taskkill /im java.exe /f 2> nul:
taskkill /im perl.exe /f 2> nul:
+taskkill /im ruby.exe /f 2> nul:
taskkill /im mspdbsrv.exe /f 2> nul:
-IF EXIST "%TESTDIR%\tests\subversion\tests\cmdline\httpd\" (
- rmdir /s /q "%TESTDIR%\tests\subversion\tests\cmdline\httpd"
-)
+
IF EXIST "%TESTDIR%\swig\" (
- rmdir /s /q "%TESTDIR%\swig"
+ rmdir /s /q "%TESTDIR%\swig"
)
-del "%TESTDIR%\tests\*.log" 2> nul:
-
+IF EXIST "%TESTDIR%\tests\" (
+ PUSHD "%TESTDIR%\tests\"
+ rmdir /s /q "%TESTDIR%\tests\" 2> nul:
+ POPD
+)
exit /B 0
diff --git a/tools/buildbot/slaves/win32-SharpSvn/svntest-javahl.cmd b/tools/buildbot/slaves/win32-SharpSvn/svntest-javahl.cmd
new file mode 100644
index 0000000..0b0a507
--- /dev/null
+++ b/tools/buildbot/slaves/win32-SharpSvn/svntest-javahl.cmd
@@ -0,0 +1,46 @@
+@echo off
+REM ================================================================
+REM Licensed to the Apache Software Foundation (ASF) under one
+REM or more contributor license agreements. See the NOTICE file
+REM distributed with this work for additional information
+REM regarding copyright ownership. The ASF licenses this file
+REM to you under the Apache License, Version 2.0 (the
+REM "License"); you may not use this file except in compliance
+REM with the License. You may obtain a copy of the License at
+REM
+REM http://www.apache.org/licenses/LICENSE-2.0
+REM
+REM Unless required by applicable law or agreed to in writing,
+REM software distributed under the License is distributed on an
+REM "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+REM KIND, either express or implied. See the License for the
+REM specific language governing permissions and limitations
+REM under the License.
+REM ================================================================
+
+SETLOCAL ENABLEEXTENSIONS ENABLEDELAYEDEXPANSION
+
+CALL ..\svn-config.cmd
+IF ERRORLEVEL 1 EXIT /B 1
+
+IF "%SVN_BRANCH%" LEQ "1.6.x" (
+ ECHO --- Building 1.6.x or older: Skipping JavaHL ---
+ EXIT /B 0
+)
+
+IF "%SVN_BRANCH%" LSS "1.9." (
+ IF NOT EXIST "%TESTDIR%\bin" MKDIR "%TESTDIR%\bin"
+ xcopy /y /i ..\deps\release\bin\*.dll "%TESTDIR%\bin"
+
+ PATH %TESTDIR%\bin;!PATH!;!JAVADIR!
+)
+
+SET result=0
+
+python win-tests.py -d -f fsfs --javahl "%TESTDIR%\tests"
+IF ERRORLEVEL 1 (
+ ECHO [JavaHL test runner reported error !ERRORLEVEL!] 1>&2
+ SET result=1
+)
+
+EXIT /b %result%
diff --git a/tools/buildbot/slaves/win32-SharpSvn/svntest-test.cmd b/tools/buildbot/slaves/win32-SharpSvn/svntest-test.cmd
index 522cdae..d56ec3b 100644
--- a/tools/buildbot/slaves/win32-SharpSvn/svntest-test.cmd
+++ b/tools/buildbot/slaves/win32-SharpSvn/svntest-test.cmd
@@ -65,10 +65,12 @@ IF NOT "%1" == "" GOTO next
taskkill /im svnserve.exe httpd.exe /f 2> nul:
-IF NOT EXIST "%TESTDIR%\bin" MKDIR "%TESTDIR%\bin"
-xcopy /y /i ..\deps\release\bin\*.dll "%TESTDIR%\bin"
+IF "%SVN_BRANCH%" LSS "1.9." (
+ IF NOT EXIST "%TESTDIR%\bin" MKDIR "%TESTDIR%\bin"
+ xcopy /y /i ..\deps\release\bin\*.dll "%TESTDIR%\bin"
-PATH %TESTDIR%\bin;%PATH%
+ PATH %TESTDIR%\bin;!PATH!
+)
IF "%LOCAL%+%FSFS%" == "1+1" (
echo win-tests.py -c %PARALLEL% %MODE% -f fsfs %ARGS% "%TESTDIR%\tests"
diff --git a/tools/client-side/bash_completion b/tools/client-side/bash_completion
index eabc15c..8187fde 100644
--- a/tools/client-side/bash_completion
+++ b/tools/client-side/bash_completion
@@ -162,12 +162,12 @@ _svn()
cur=${COMP_WORDS[COMP_CWORD]}
# Possible expansions, without pure-prefix abbreviations such as "up".
- cmds='add blame annotate praise cat changelist cl checkout co cleanup'
+ cmds='add auth blame annotate praise cat changelist cl checkout co cleanup'
cmds="$cmds commit ci copy cp delete remove rm diff export help import"
cmds="$cmds info list ls lock log merge mergeinfo mkdir move mv rename"
cmds="$cmds patch propdel pdel propedit pedit propget pget proplist"
cmds="$cmds plist propset pset relocate resolve resolved revert status"
- cmds="$cmds switch unlock update upgrade"
+ cmds="$cmds switch unlock update upgrade"
# help options have a strange command status...
local helpOpts='--help -h'
@@ -781,7 +781,8 @@ _svn()
# otherwise build possible options for the command
pOpts="--username --password --no-auth-cache --non-interactive \
- --trust-server-cert --force-interactive"
+ --trust-server-cert-failures \
+ --force-interactive"
mOpts="-m --message -F --file --encoding --force-log --with-revprop"
rOpts="-r --revision"
qOpts="-q --quiet"
@@ -798,12 +799,15 @@ _svn()
cmdOpts="--auto-props --no-auto-props --force --targets \
--no-ignore --parents $nOpts $qOpts $pOpts"
;;
+ auth)
+ cmdOpts="--remove --show-passwords $pOpts"
+ ;;
blame|annotate|ann|praise)
cmdOpts="$rOpts $pOpts -v --verbose --incremental --xml \
-x --extensions --force $gOpts"
;;
cat)
- cmdOpts="$rOpts $pOpts"
+ cmdOpts="$rOpts $pOpts --ignore-keywords"
;;
changelist|cl)
cmdOpts="--targets $pOpts $qOpts $cOpts \
@@ -814,7 +818,8 @@ _svn()
--force"
;;
cleanup)
- cmdOpts="--diff3-cmd $pOpts"
+ cmdOpts="--diff3-cmd $pOpts --include-externals -q --quiet\
+ --remove-ignored --remove-unversioned"
;;
commit|ci)
cmdOpts="$mOpts $qOpts $nOpts --targets --editor-cmd $pOpts \
@@ -823,7 +828,7 @@ _svn()
;;
copy|cp)
cmdOpts="$mOpts $rOpts $qOpts --editor-cmd $pOpts --parents \
- --ignore-externals"
+ --ignore-externals --pin-externals"
;;
delete|del|remove|rm)
cmdOpts="--force $mOpts $qOpts --targets --editor-cmd $pOpts \
@@ -850,7 +855,8 @@ _svn()
;;
info)
cmdOpts="$pOpts $rOpts --targets -R --recursive --depth \
- --incremental --xml $cOpts"
+ --include-externals --incremental --xml \
+ --show-item --no-newline $cOpts"
;;
list|ls)
cmdOpts="$rOpts -v --verbose -R --recursive $pOpts \
@@ -870,11 +876,12 @@ _svn()
merge)
cmdOpts="$rOpts $nOpts $qOpts --force --dry-run --diff3-cmd \
$pOpts --ignore-ancestry -c --change -x --extensions \
- --record-only --accept --reintegrate \
+ --record-only --accept \
--allow-mixed-revisions -v --verbose"
;;
mergeinfo)
- cmdOpts="$rOpts $pOpts --depth --show-revs -R --recursive"
+ cmdOpts="$rOpts $pOpts --depth --show-revs -R --recursive \
+ $qOpts -v --verbose --incremental --log"
;;
mkdir)
cmdOpts="$mOpts $qOpts --editor-cmd $pOpts --parents"
@@ -898,7 +905,7 @@ _svn()
cmdOpts="$cmdOpts --revprop $rOpts"
;;
propget|pget|pg)
- cmdOpts="-v --verbose -R --recursive $rOpts --strict \
+ cmdOpts="-v --verbose -R --recursive $rOpts --no-newline \
$pOpts $cOpts --depth --xml --show-inherited-props"
[[ $isRevProp || ! $prop ]] && cmdOpts="$cmdOpts --revprop"
;;
@@ -930,7 +937,7 @@ _svn()
status|stat|st)
cmdOpts="-u --show-updates -v --verbose $nOpts $qOpts $pOpts \
--no-ignore --ignore-externals --incremental --xml \
- $cOpts"
+ $rOpts $cOpts"
;;
switch|sw)
cmdOpts="--relocate $rOpts $nOpts $qOpts $pOpts --diff3-cmd \
@@ -1030,7 +1037,8 @@ _svnadmin ()
cur=${COMP_WORDS[COMP_CWORD]}
# Possible expansions, without pure-prefix abbreviations such as "h".
- cmds='crashtest create deltify dump freeze help hotcopy list-dblogs \
+ cmds='crashtest create delrevprop deltify dump freeze help hotcopy \
+ info list-dblogs \
list-unused-dblogs load lock lslocks lstxns pack recover rmlocks \
rmtxns setlog setrevprop setuuid unlock upgrade verify --version'
@@ -1093,8 +1101,9 @@ _svnadmin ()
setlog)
cmdOpts="-r --revision --bypass-hooks"
;;
- setrevprop)
- cmdOpts="-r --revision --use-pre-revprop-change-hook \
+ setrevprop|delrevprop)
+ cmdOpts="-r --revision -t --transaction \
+ --use-pre-revprop-change-hook \
--use-post-revprop-change-hook"
;;
verify)
@@ -1126,6 +1135,8 @@ _svnadmin ()
--help) cmdOpts=${cmdOpts/ -h / } ;;
-r) cmdOpts=${cmdOpts/ --revision / } ;;
--revision) cmdOpts=${cmdOpts/ -r / } ;;
+ -t) cmdOpts=${cmdOpts/ --transaction / } ;;
+ --transaction) cmdOpts=${cmdOpts/ -t / } ;;
-F) cmdOpts=${cmdOpts/ --file / } ;;
--file) cmdOpts=${cmdOpts/ -F / } ;;
-M) cmdOpts=${cmdOpts/ --memory-cache-size / } ;;
@@ -1226,8 +1237,8 @@ _svnlook ()
cur=${COMP_WORDS[COMP_CWORD]}
# Possible expansions, without pure-prefix abbreviations such as "h".
- cmds='author cat changed date diff dirs-changed help history info \
- lock log propget proplist tree uuid youngest --version'
+ cmds='author cat changed date diff dirs-changed filesize help history \
+ info lock log propget proplist tree uuid youngest --version'
if [[ $COMP_CWORD -eq 1 ]] ; then
COMPREPLY=( $( compgen -W "$cmds" -- $cur ) )
@@ -1268,6 +1279,9 @@ _svnlook ()
dirs-changed)
cmdOpts="-r --revision -t --transaction"
;;
+ filesize)
+ cmdOpts="-r --revision -t --transaction"
+ ;;
help|h|\?)
cmdOpts="$cmds"
;;
@@ -1380,7 +1394,8 @@ _svnsync ()
copy-revprops|initialize|init|synchronize|sync)
cmdOpts="--non-interactive --no-auth-cache --trust-server-cert \
--source-username --source-password --sync-username \
- --sync-password --config-dir --config-option -q --quiet"
+ --sync-password --config-dir --config-option \
+ -q --quiet -M --memory-cache-size"
;;
help|h|\?)
cmdOpts="$cmds"
diff --git a/tools/client-side/bash_completion_test b/tools/client-side/bash_completion_test
index 49e3532..d2c1785 100755
--- a/tools/client-side/bash_completion_test
+++ b/tools/client-side/bash_completion_test
@@ -114,14 +114,18 @@ get_svn_subcommands() {
# Usage: get_svn_options SUBCMD
get_svn_options() {
{ svn help "$1" |
+ # Remove deprecated options
+ grep -v deprecated |
# Find the relevant lines; remove "arg" and description.
sed -n -e '1,/^Valid options:$/d;/^ -/!d' \
-e 's/\( ARG\)* * : .*//;p' |
# Remove brackets; put each word on its own line.
tr -d '] ' | tr '[' '\n'
# The following options are always accepted but not listed in the help
- echo "-h"
- echo "--help"
+ if [ "$1" != "help" ] ; then
+ echo "-h"
+ echo "--help"
+ fi
} | sort
}
diff --git a/tools/client-side/showchange.pl b/tools/client-side/showchange.pl
deleted file mode 100755
index e4cf7eb..0000000
--- a/tools/client-side/showchange.pl
+++ /dev/null
@@ -1,66 +0,0 @@
-#!/usr/bin/perl -w
-# ====================================================================
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements. See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership. The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied. See the License for the
-# specific language governing permissions and limitations
-# under the License.
-# ====================================================================
-
-use strict;
-
-# ====================================================================
-# Show the log message and diff for a revision.
-#
-# $ showchange.pl REVISION [WC_PATH|URL]
-
-
-if ((scalar(@ARGV) == 0)
- or ($ARGV[0] eq '-?')
- or ($ARGV[0] eq '-h')
- or ($ARGV[0] eq '--help')) {
- print <<EOF;
-Show the log message and diff for a revision.
-usage: $0 REVISION [WC_PATH|URL]
-EOF
- exit 0;
-}
-
-my $revision = shift || die ("Revision argument required.\n");
-if ($revision =~ /r([0-9]+)/) {
- $revision = $1;
-}
-
-my $url = shift || "";
-
-my $svn = "svn";
-
-my $prev_revision = $revision - 1;
-
-if (not $url) {
- # If no URL was provided, use the repository root from the current
- # directory's working copy. We want the root, rather than the URL
- # of the current dir, because when someone's asking for a change
- # by name (that is, by revision number), they generally don't want
- # to have to cd to a particular working copy directory to get it.
- my @info_lines = `${svn} info`;
- foreach my $info_line (@info_lines) {
- if ($info_line =~ s/^Repository Root: (.*)$/$1/e) {
- $url = $info_line;
- }
- }
-}
-
-system ("${svn} log -v --incremental -r${revision} $url");
-system ("${svn} diff -r${prev_revision}:${revision} $url");
diff --git a/tools/client-side/svn-bench/cl.h b/tools/client-side/svn-bench/cl.h
deleted file mode 100644
index 7a1e48d..0000000
--- a/tools/client-side/svn-bench/cl.h
+++ /dev/null
@@ -1,198 +0,0 @@
-/*
- * cl.h: shared stuff in the command line program
- *
- * ====================================================================
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- * ====================================================================
- */
-
-/* ==================================================================== */
-
-
-
-#ifndef SVN_CL_H
-#define SVN_CL_H
-
-/*** Includes. ***/
-
-#include <apr_tables.h>
-
-#include "svn_client.h"
-
-#ifdef __cplusplus
-extern "C" {
-#endif /* __cplusplus */
-
-
-/*** Command dispatch. ***/
-
-/* Hold results of option processing that are shared by multiple
- commands. */
-typedef struct svn_cl__opt_state_t
-{
- /* An array of svn_opt_revision_range_t *'s representing revisions
- ranges indicated on the command-line via the -r and -c options.
- For each range in the list, if only one revision was provided
- (-rN), its 'end' member remains 'svn_opt_revision_unspecified'.
- This array always has at least one element, even if that is a
- null range in which both ends are 'svn_opt_revision_unspecified'. */
- apr_array_header_t *revision_ranges;
-
- /* These are simply a copy of the range start and end values present
- in the first item of the revision_ranges list. */
- svn_opt_revision_t start_revision;
- svn_opt_revision_t end_revision;
-
- /* Flag which is only set if the '-c' option was used. */
- svn_boolean_t used_change_arg;
-
- /* Flag which is only set if the '-r' option was used. */
- svn_boolean_t used_revision_arg;
-
- /* Max number of log messages to get back from svn_client_log2. */
- int limit;
-
- /* After option processing is done, reflects the switch actually
- given on the command line, or svn_depth_unknown if none. */
- svn_depth_t depth;
-
- svn_boolean_t quiet; /* sssh...avoid unnecessary output */
- svn_boolean_t non_interactive; /* do no interactive prompting */
- svn_boolean_t version; /* print version information */
- svn_boolean_t verbose; /* be verbose */
- svn_boolean_t strict; /* do strictly what was requested */
- const char *encoding; /* the locale/encoding of the data*/
- svn_boolean_t help; /* print usage message */
- const char *auth_username; /* auth username */ /* UTF-8! */
- const char *auth_password; /* auth password */ /* UTF-8! */
- const char *extensions; /* subprocess extension args */ /* UTF-8! */
- apr_array_header_t *targets; /* target list from file */ /* UTF-8! */
- svn_boolean_t no_auth_cache; /* do not cache authentication information */
- svn_boolean_t stop_on_copy; /* don't cross copies during processing */
- const char *config_dir; /* over-riding configuration directory */
- apr_array_header_t *config_options; /* over-riding configuration options */
- svn_boolean_t all_revprops; /* retrieve all revprops */
- svn_boolean_t no_revprops; /* retrieve no revprops */
- apr_hash_t *revprop_table; /* table of revision properties to get/set */
- svn_boolean_t use_merge_history; /* use/display extra merge information */
- svn_boolean_t trust_server_cert; /* trust server SSL certs that would
- otherwise be rejected as "untrusted" */
-} svn_cl__opt_state_t;
-
-
-typedef struct svn_cl__cmd_baton_t
-{
- svn_cl__opt_state_t *opt_state;
- svn_client_ctx_t *ctx;
-} svn_cl__cmd_baton_t;
-
-
-/* Declare all the command procedures */
-svn_opt_subcommand_t
- svn_cl__help,
- svn_cl__null_export,
- svn_cl__null_list,
- svn_cl__null_log;
-
-
-/* See definition in main.c for documentation. */
-extern const svn_opt_subcommand_desc2_t svn_cl__cmd_table[];
-
-/* See definition in main.c for documentation. */
-extern const int svn_cl__global_options[];
-
-/* See definition in main.c for documentation. */
-extern const apr_getopt_option_t svn_cl__options[];
-
-
-/* A helper for the many subcommands that wish to merely warn when
- * invoked on an unversioned, nonexistent, or otherwise innocuously
- * errorful resource. Meant to be wrapped with SVN_ERR().
- *
- * If ERR is null, return SVN_NO_ERROR.
- *
- * Else if ERR->apr_err is one of the error codes supplied in varargs,
- * then handle ERR as a warning (unless QUIET is true), clear ERR, and
- * return SVN_NO_ERROR, and push the value of ERR->apr_err into the
- * ERRORS_SEEN array, if ERRORS_SEEN is not NULL.
- *
- * Else return ERR.
- *
- * Typically, error codes like SVN_ERR_UNVERSIONED_RESOURCE,
- * SVN_ERR_ENTRY_NOT_FOUND, etc, are supplied in varargs. Don't
- * forget to terminate the argument list with SVN_NO_ERROR.
- */
-svn_error_t *
-svn_cl__try(svn_error_t *err,
- apr_array_header_t *errors_seen,
- svn_boolean_t quiet,
- ...);
-
-
-/* Our cancellation callback. */
-svn_error_t *
-svn_cl__check_cancel(void *baton);
-
-
-
-/*** Notification functions to display results on the terminal. */
-
-/* Set *NOTIFY_FUNC_P and *NOTIFY_BATON_P to a notifier/baton for all
- * operations, allocated in POOL.
- */
-svn_error_t *
-svn_cl__get_notifier(svn_wc_notify_func2_t *notify_func_p,
- void **notify_baton_p,
- apr_pool_t *pool);
-
-/* Make the notifier for use with BATON print the appropriate summary
- * line at the end of the output.
- */
-svn_error_t *
-svn_cl__notifier_mark_export(void *baton);
-
-/* Like svn_client_args_to_target_array() but, if the only error is that some
- * arguments are reserved file names, then print warning messages for those
- * targets, store the rest of the targets in TARGETS_P and return success. */
-svn_error_t *
-svn_cl__args_to_target_array_print_reserved(apr_array_header_t **targets_p,
- apr_getopt_t *os,
- const apr_array_header_t *known_targets,
- svn_client_ctx_t *ctx,
- svn_boolean_t keep_dest_origpath_on_truepath_collision,
- apr_pool_t *pool);
-
-/* Return an error if TARGET is a URL; otherwise return SVN_NO_ERROR. */
-svn_error_t *
-svn_cl__check_target_is_local_path(const char *target);
-
-/* Return a copy of PATH, converted to the local path style, skipping
- * PARENT_PATH if it is non-null and is a parent of or equal to PATH.
- *
- * This function assumes PARENT_PATH and PATH are both absolute "dirents"
- * or both relative "dirents". */
-const char *
-svn_cl__local_style_skip_ancestor(const char *parent_path,
- const char *path,
- apr_pool_t *pool);
-
-#ifdef __cplusplus
-}
-#endif /* __cplusplus */
-
-#endif /* SVN_CL_H */
diff --git a/tools/client-side/svn-bench/client_errors.h b/tools/client-side/svn-bench/client_errors.h
deleted file mode 100644
index 19f0bdf..0000000
--- a/tools/client-side/svn-bench/client_errors.h
+++ /dev/null
@@ -1,97 +0,0 @@
-/*
- * client_errors.h: error codes this command line client features
- *
- * ====================================================================
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- * ====================================================================
- */
-
-/* ==================================================================== */
-
-
-
-#ifndef SVN_CLIENT_ERRORS_H
-#define SVN_CLIENT_ERRORS_H
-
-#ifdef __cplusplus
-extern "C" {
-#endif /* __cplusplus */
-
-/*
- * This error defining system is copied from and explained in
- * ../../include/svn_error_codes.h
- */
-
-/* Process this file if we're building an error array, or if we have
- not defined the enumerated constants yet. */
-#if defined(SVN_ERROR_BUILD_ARRAY) || !defined(SVN_CMDLINE_ERROR_ENUM_DEFINED)
-
-#if defined(SVN_ERROR_BUILD_ARRAY)
-
-#error "Need to update err_defn for r1464679 and un-typo 'CDMLINE'"
-
-#define SVN_ERROR_START \
- static const err_defn error_table[] = { \
- { SVN_ERR_CDMLINE__WARNING, "Warning" },
-#define SVN_ERRDEF(n, s) { n, s },
-#define SVN_ERROR_END { 0, NULL } };
-
-#elif !defined(SVN_CMDLINE_ERROR_ENUM_DEFINED)
-
-#define SVN_ERROR_START \
- typedef enum svn_client_errno_t { \
- SVN_ERR_CDMLINE__WARNING = SVN_ERR_LAST + 1,
-#define SVN_ERRDEF(n, s) n,
-#define SVN_ERROR_END SVN_ERR_CMDLINE__ERR_LAST } svn_client_errno_t;
-
-#define SVN_CMDLINE_ERROR_ENUM_DEFINED
-
-#endif
-
-/* Define custom command line client error numbers */
-
-SVN_ERROR_START
-
- /* BEGIN Client errors */
-
-SVN_ERRDEF(SVN_ERR_CMDLINE__TMPFILE_WRITE,
- "Failed writing to temporary file.")
-
- SVN_ERRDEF(SVN_ERR_CMDLINE__TMPFILE_STAT,
- "Failed getting info about temporary file.")
-
- SVN_ERRDEF(SVN_ERR_CMDLINE__TMPFILE_OPEN,
- "Failed opening temporary file.")
-
- /* END Client errors */
-
-
-SVN_ERROR_END
-
-#undef SVN_ERROR_START
-#undef SVN_ERRDEF
-#undef SVN_ERROR_END
-
-#endif /* SVN_ERROR_BUILD_ARRAY || !SVN_CMDLINE_ERROR_ENUM_DEFINED */
-
-
-#ifdef __cplusplus
-}
-#endif /* __cplusplus */
-
-#endif /* SVN_CLIENT_ERRORS_H */
diff --git a/tools/client-side/svn-bench/help-cmd.c b/tools/client-side/svn-bench/help-cmd.c
deleted file mode 100644
index a3302ec..0000000
--- a/tools/client-side/svn-bench/help-cmd.c
+++ /dev/null
@@ -1,94 +0,0 @@
-/*
- * help-cmd.c -- Provide help
- *
- * ====================================================================
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- * ====================================================================
- */
-
-/* ==================================================================== */
-
-
-
-/*** Includes. ***/
-
-#include "svn_string.h"
-#include "svn_error.h"
-#include "svn_version.h"
-#include "cl.h"
-
-#include "svn_private_config.h"
-
-
-/*** Code. ***/
-
-/* This implements the `svn_opt_subcommand_t' interface. */
-svn_error_t *
-svn_cl__help(apr_getopt_t *os,
- void *baton,
- apr_pool_t *pool)
-{
- svn_cl__opt_state_t *opt_state;
-
- /* xgettext: the %s is for SVN_VER_NUMBER. */
- char help_header_template[] =
- N_("usage: svn-bench <subcommand> [options] [args]\n"
- "Subversion command-line client, version %s.\n"
- "Type 'svn-bench help <subcommand>' for help on a specific subcommand.\n"
- "Type 'svn-bench --version' to see the program version and RA modules\n"
- " or 'svn-bench --version --quiet' to see just the version number.\n"
- "\n"
- "Most subcommands take file and/or directory arguments, recursing\n"
- "on the directories. If no arguments are supplied to such a\n"
- "command, it recurses on the current directory (inclusive) by default.\n"
- "\n"
- "Available subcommands:\n");
-
- char help_footer[] =
- N_("Subversion is a tool for version control.\n"
- "For additional information, see http://subversion.apache.org/\n");
-
- char *help_header =
- apr_psprintf(pool, _(help_header_template), SVN_VER_NUMBER);
-
- const char *ra_desc_start
- = _("The following repository access (RA) modules are available:\n\n");
-
- svn_stringbuf_t *version_footer;
-
- if (baton)
- opt_state = ((svn_cl__cmd_baton_t *) baton)->opt_state;
- else
- opt_state = NULL;
-
- version_footer = svn_stringbuf_create(ra_desc_start, pool);
- SVN_ERR(svn_ra_print_modules(version_footer, pool));
-
- return svn_opt_print_help4(os,
- "svn-bench", /* ### erm, derive somehow? */
- opt_state ? opt_state->version : FALSE,
- opt_state ? opt_state->quiet : FALSE,
- opt_state ? opt_state->verbose : FALSE,
- version_footer->data,
- help_header, /* already gettext()'d */
- svn_cl__cmd_table,
- svn_cl__options,
- svn_cl__global_options,
- _(help_footer),
- pool);
-}
diff --git a/tools/client-side/svn-bench/notify.c b/tools/client-side/svn-bench/notify.c
deleted file mode 100644
index 5e19d8a..0000000
--- a/tools/client-side/svn-bench/notify.c
+++ /dev/null
@@ -1,1045 +0,0 @@
-/*
- * notify.c: feedback handlers for cmdline client.
- *
- * ====================================================================
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- * ====================================================================
- */
-
-/* ==================================================================== */
-
-
-
-/*** Includes. ***/
-
-#define APR_WANT_STDIO
-#define APR_WANT_STRFUNC
-#include <apr_want.h>
-
-#include "svn_cmdline.h"
-#include "svn_pools.h"
-#include "svn_dirent_uri.h"
-#include "svn_path.h"
-#include "svn_sorts.h"
-#include "cl.h"
-
-#include "svn_private_config.h"
-
-
-/* Baton for notify and friends. */
-struct notify_baton
-{
- svn_boolean_t received_some_change;
- svn_boolean_t is_checkout;
- svn_boolean_t is_export;
- svn_boolean_t is_wc_to_repos_copy;
- svn_boolean_t sent_first_txdelta;
- svn_boolean_t in_external;
- svn_boolean_t had_print_error; /* Used to not keep printing error messages
- when we've already had one print error. */
-
- /* Conflict stats for update and merge. */
- unsigned int text_conflicts;
- unsigned int prop_conflicts;
- unsigned int tree_conflicts;
- unsigned int skipped_paths;
- apr_hash_t *conflicted_paths;
-
- /* The cwd, for use in decomposing absolute paths. */
- const char *path_prefix;
-};
-
-
-/* Add a conflicted path to the list of conflicted paths stored
- * in the notify baton. */
-static void
-add_conflicted_path(struct notify_baton *nb, const char *path)
-{
- apr_hash_set(nb->conflicted_paths,
- apr_pstrdup(apr_hash_pool_get(nb->conflicted_paths), path),
- APR_HASH_KEY_STRING, "");
-}
-
-/* This implements `svn_wc_notify_func2_t'.
- * NOTE: This function can't fail, so we just ignore any print errors. */
-static void
-notify(void *baton, const svn_wc_notify_t *n, apr_pool_t *pool)
-{
- struct notify_baton *nb = baton;
- char statchar_buf[5] = " ";
- const char *path_local;
- svn_error_t *err;
-
- if (n->url)
- path_local = n->url;
- else
- {
- if (n->path_prefix)
- path_local = svn_cl__local_style_skip_ancestor(n->path_prefix, n->path,
- pool);
- else /* skip nb->path_prefix, if it's non-null */
- path_local = svn_cl__local_style_skip_ancestor(nb->path_prefix, n->path,
- pool);
- }
-
- switch (n->action)
- {
- case svn_wc_notify_skip:
- nb->skipped_paths++;
- if (n->content_state == svn_wc_notify_state_missing)
- {
- if ((err = svn_cmdline_printf
- (pool, _("Skipped missing target: '%s'\n"),
- path_local)))
- goto print_error;
- }
- else if (n->content_state == svn_wc_notify_state_source_missing)
- {
- if ((err = svn_cmdline_printf
- (pool, _("Skipped target: '%s' -- copy-source is missing\n"),
- path_local)))
- goto print_error;
- }
- else
- {
- if ((err = svn_cmdline_printf
- (pool, _("Skipped '%s'\n"), path_local)))
- goto print_error;
- }
- break;
- case svn_wc_notify_update_skip_obstruction:
- nb->skipped_paths++;
- if ((err = svn_cmdline_printf(
- pool, _("Skipped '%s' -- An obstructing working copy was found\n"),
- path_local)))
- goto print_error;
- break;
- case svn_wc_notify_update_skip_working_only:
- nb->skipped_paths++;
- if ((err = svn_cmdline_printf(
- pool, _("Skipped '%s' -- Has no versioned parent\n"),
- path_local)))
- goto print_error;
- break;
- case svn_wc_notify_update_skip_access_denied:
- nb->skipped_paths++;
- if ((err = svn_cmdline_printf(
- pool, _("Skipped '%s' -- Access denied\n"),
- path_local)))
- goto print_error;
- break;
- case svn_wc_notify_skip_conflicted:
- nb->skipped_paths++;
- if ((err = svn_cmdline_printf(
- pool, _("Skipped '%s' -- Node remains in conflict\n"),
- path_local)))
- goto print_error;
- break;
- case svn_wc_notify_update_delete:
- case svn_wc_notify_exclude:
- nb->received_some_change = TRUE;
- if ((err = svn_cmdline_printf(pool, "D %s\n", path_local)))
- goto print_error;
- break;
- case svn_wc_notify_update_broken_lock:
- if ((err = svn_cmdline_printf(pool, "B %s\n", path_local)))
- goto print_error;
- break;
-
- case svn_wc_notify_update_external_removed:
- nb->received_some_change = TRUE;
- if (n->err && n->err->message)
- {
- if ((err = svn_cmdline_printf(pool, "Removed external '%s': %s\n",
- path_local, n->err->message)))
- goto print_error;
- }
- else
- {
- if ((err = svn_cmdline_printf(pool, "Removed external '%s'\n",
- path_local)))
- goto print_error;
- }
- break;
-
- case svn_wc_notify_left_local_modifications:
- if ((err = svn_cmdline_printf(pool, "Left local modifications as '%s'\n",
- path_local)))
- goto print_error;
- break;
-
- case svn_wc_notify_update_replace:
- nb->received_some_change = TRUE;
- if ((err = svn_cmdline_printf(pool, "R %s\n", path_local)))
- goto print_error;
- break;
-
- case svn_wc_notify_update_add:
- nb->received_some_change = TRUE;
- if (n->content_state == svn_wc_notify_state_conflicted)
- {
- nb->text_conflicts++;
- add_conflicted_path(nb, n->path);
- if ((err = svn_cmdline_printf(pool, "C %s\n", path_local)))
- goto print_error;
- }
- else
- {
- if ((err = svn_cmdline_printf(pool, "A %s\n", path_local)))
- goto print_error;
- }
- break;
-
- case svn_wc_notify_exists:
- nb->received_some_change = TRUE;
- if (n->content_state == svn_wc_notify_state_conflicted)
- {
- nb->text_conflicts++;
- add_conflicted_path(nb, n->path);
- statchar_buf[0] = 'C';
- }
- else
- statchar_buf[0] = 'E';
-
- if (n->prop_state == svn_wc_notify_state_conflicted)
- {
- nb->prop_conflicts++;
- add_conflicted_path(nb, n->path);
- statchar_buf[1] = 'C';
- }
- else if (n->prop_state == svn_wc_notify_state_merged)
- statchar_buf[1] = 'G';
-
- if ((err = svn_cmdline_printf(pool, "%s %s\n", statchar_buf, path_local)))
- goto print_error;
- break;
-
- case svn_wc_notify_restore:
- if ((err = svn_cmdline_printf(pool, _("Restored '%s'\n"),
- path_local)))
- goto print_error;
- break;
-
- case svn_wc_notify_revert:
- if ((err = svn_cmdline_printf(pool, _("Reverted '%s'\n"),
- path_local)))
- goto print_error;
- break;
-
- case svn_wc_notify_failed_revert:
- if (( err = svn_cmdline_printf(pool, _("Failed to revert '%s' -- "
- "try updating instead.\n"),
- path_local)))
- goto print_error;
- break;
-
- case svn_wc_notify_resolved:
- if ((err = svn_cmdline_printf(pool,
- _("Resolved conflicted state of '%s'\n"),
- path_local)))
- goto print_error;
- break;
-
- case svn_wc_notify_add:
- /* We *should* only get the MIME_TYPE if PATH is a file. If we
- do get it, and the mime-type is not textual, note that this
- is a binary addition. */
- if (n->mime_type && (svn_mime_type_is_binary(n->mime_type)))
- {
- if ((err = svn_cmdline_printf(pool, "A (bin) %s\n",
- path_local)))
- goto print_error;
- }
- else
- {
- if ((err = svn_cmdline_printf(pool, "A %s\n",
- path_local)))
- goto print_error;
- }
- break;
-
- case svn_wc_notify_delete:
- nb->received_some_change = TRUE;
- if ((err = svn_cmdline_printf(pool, "D %s\n",
- path_local)))
- goto print_error;
- break;
-
- case svn_wc_notify_patch:
- {
- nb->received_some_change = TRUE;
- if (n->content_state == svn_wc_notify_state_conflicted)
- {
- nb->text_conflicts++;
- add_conflicted_path(nb, n->path);
- statchar_buf[0] = 'C';
- }
- else if (n->kind == svn_node_file)
- {
- if (n->content_state == svn_wc_notify_state_merged)
- statchar_buf[0] = 'G';
- else if (n->content_state == svn_wc_notify_state_changed)
- statchar_buf[0] = 'U';
- }
-
- if (n->prop_state == svn_wc_notify_state_conflicted)
- {
- nb->prop_conflicts++;
- add_conflicted_path(nb, n->path);
- statchar_buf[1] = 'C';
- }
- else if (n->prop_state == svn_wc_notify_state_changed)
- statchar_buf[1] = 'U';
-
- if (statchar_buf[0] != ' ' || statchar_buf[1] != ' ')
- {
- if ((err = svn_cmdline_printf(pool, "%s %s\n",
- statchar_buf, path_local)))
- goto print_error;
- }
- }
- break;
-
- case svn_wc_notify_patch_applied_hunk:
- nb->received_some_change = TRUE;
- if (n->hunk_original_start != n->hunk_matched_line)
- {
- apr_uint64_t off;
- const char *s;
- const char *minus;
-
- if (n->hunk_matched_line > n->hunk_original_start)
- {
- off = n->hunk_matched_line - n->hunk_original_start;
- minus = "";
- }
- else
- {
- off = n->hunk_original_start - n->hunk_matched_line;
- minus = "-";
- }
-
- /* ### We're creating the localized strings without
- * ### APR_INT64_T_FMT since it isn't translator-friendly */
- if (n->hunk_fuzz)
- {
-
- if (n->prop_name)
- {
- s = _("> applied hunk ## -%lu,%lu +%lu,%lu ## "
- "with offset %s");
-
- err = svn_cmdline_printf(pool,
- apr_pstrcat(pool, s,
- "%"APR_UINT64_T_FMT
- " and fuzz %lu (%s)\n",
- (char *)NULL),
- n->hunk_original_start,
- n->hunk_original_length,
- n->hunk_modified_start,
- n->hunk_modified_length,
- minus, off, n->hunk_fuzz,
- n->prop_name);
- }
- else
- {
- s = _("> applied hunk @@ -%lu,%lu +%lu,%lu @@ "
- "with offset %s");
-
- err = svn_cmdline_printf(pool,
- apr_pstrcat(pool, s,
- "%"APR_UINT64_T_FMT
- " and fuzz %lu\n",
- (char *)NULL),
- n->hunk_original_start,
- n->hunk_original_length,
- n->hunk_modified_start,
- n->hunk_modified_length,
- minus, off, n->hunk_fuzz);
- }
-
- if (err)
- goto print_error;
- }
- else
- {
-
- if (n->prop_name)
- {
- s = _("> applied hunk ## -%lu,%lu +%lu,%lu ## "
- "with offset %s");
- err = svn_cmdline_printf(pool,
- apr_pstrcat(pool, s,
- "%"APR_UINT64_T_FMT" (%s)\n",
- (char *)NULL),
- n->hunk_original_start,
- n->hunk_original_length,
- n->hunk_modified_start,
- n->hunk_modified_length,
- minus, off, n->prop_name);
- }
- else
- {
- s = _("> applied hunk @@ -%lu,%lu +%lu,%lu @@ "
- "with offset %s");
- err = svn_cmdline_printf(pool,
- apr_pstrcat(pool, s,
- "%"APR_UINT64_T_FMT"\n",
- (char *)NULL),
- n->hunk_original_start,
- n->hunk_original_length,
- n->hunk_modified_start,
- n->hunk_modified_length,
- minus, off);
- }
-
- if (err)
- goto print_error;
- }
- }
- else if (n->hunk_fuzz)
- {
- if (n->prop_name)
- err = svn_cmdline_printf(pool,
- _("> applied hunk ## -%lu,%lu +%lu,%lu ## "
- "with fuzz %lu (%s)\n"),
- n->hunk_original_start,
- n->hunk_original_length,
- n->hunk_modified_start,
- n->hunk_modified_length,
- n->hunk_fuzz,
- n->prop_name);
- else
- err = svn_cmdline_printf(pool,
- _("> applied hunk @@ -%lu,%lu +%lu,%lu @@ "
- "with fuzz %lu\n"),
- n->hunk_original_start,
- n->hunk_original_length,
- n->hunk_modified_start,
- n->hunk_modified_length,
- n->hunk_fuzz);
- if (err)
- goto print_error;
-
- }
- break;
-
- case svn_wc_notify_patch_rejected_hunk:
- nb->received_some_change = TRUE;
-
- if (n->prop_name)
- err = svn_cmdline_printf(pool,
- _("> rejected hunk "
- "## -%lu,%lu +%lu,%lu ## (%s)\n"),
- n->hunk_original_start,
- n->hunk_original_length,
- n->hunk_modified_start,
- n->hunk_modified_length,
- n->prop_name);
- else
- err = svn_cmdline_printf(pool,
- _("> rejected hunk "
- "@@ -%lu,%lu +%lu,%lu @@\n"),
- n->hunk_original_start,
- n->hunk_original_length,
- n->hunk_modified_start,
- n->hunk_modified_length);
- if (err)
- goto print_error;
- break;
-
- case svn_wc_notify_patch_hunk_already_applied:
- nb->received_some_change = TRUE;
- if (n->prop_name)
- err = svn_cmdline_printf(pool,
- _("> hunk "
- "## -%lu,%lu +%lu,%lu ## "
- "already applied (%s)\n"),
- n->hunk_original_start,
- n->hunk_original_length,
- n->hunk_modified_start,
- n->hunk_modified_length,
- n->prop_name);
- else
- err = svn_cmdline_printf(pool,
- _("> hunk "
- "@@ -%lu,%lu +%lu,%lu @@ "
- "already applied\n"),
- n->hunk_original_start,
- n->hunk_original_length,
- n->hunk_modified_start,
- n->hunk_modified_length);
- if (err)
- goto print_error;
- break;
-
- case svn_wc_notify_update_update:
- case svn_wc_notify_merge_record_info:
- {
- if (n->content_state == svn_wc_notify_state_conflicted)
- {
- nb->text_conflicts++;
- add_conflicted_path(nb, n->path);
- statchar_buf[0] = 'C';
- }
- else if (n->kind == svn_node_file)
- {
- if (n->content_state == svn_wc_notify_state_merged)
- statchar_buf[0] = 'G';
- else if (n->content_state == svn_wc_notify_state_changed)
- statchar_buf[0] = 'U';
- }
-
- if (n->prop_state == svn_wc_notify_state_conflicted)
- {
- nb->prop_conflicts++;
- add_conflicted_path(nb, n->path);
- statchar_buf[1] = 'C';
- }
- else if (n->prop_state == svn_wc_notify_state_merged)
- statchar_buf[1] = 'G';
- else if (n->prop_state == svn_wc_notify_state_changed)
- statchar_buf[1] = 'U';
-
- if (n->lock_state == svn_wc_notify_lock_state_unlocked)
- statchar_buf[2] = 'B';
-
- if (statchar_buf[0] != ' ' || statchar_buf[1] != ' ')
- nb->received_some_change = TRUE;
-
- if (statchar_buf[0] != ' ' || statchar_buf[1] != ' '
- || statchar_buf[2] != ' ')
- {
- if ((err = svn_cmdline_printf(pool, "%s %s\n",
- statchar_buf, path_local)))
- goto print_error;
- }
- }
- break;
-
- case svn_wc_notify_update_external:
- /* Remember that we're now "inside" an externals definition. */
- nb->in_external = TRUE;
-
- /* Currently this is used for checkouts and switches too. If we
- want different output, we'll have to add new actions. */
- if ((err = svn_cmdline_printf(pool,
- _("\nFetching external item into '%s':\n"),
- path_local)))
- goto print_error;
- break;
-
- case svn_wc_notify_failed_external:
- /* If we are currently inside the handling of an externals
- definition, then we can simply present n->err as a warning
- and feel confident that after this, we aren't handling that
- externals definition any longer. */
- if (nb->in_external)
- {
- svn_handle_warning2(stderr, n->err, "svn: ");
- nb->in_external = FALSE;
- if ((err = svn_cmdline_printf(pool, "\n")))
- goto print_error;
- }
- /* Otherwise, we'll just print two warnings. Why? Because
- svn_handle_warning2() only shows the single "best message",
- but we have two pretty important ones: that the external at
- '/some/path' didn't pan out, and then the more specific
- reason why (from n->err). */
- else
- {
- svn_error_t *warn_err =
- svn_error_createf(SVN_ERR_BASE, NULL,
- _("Error handling externals definition for '%s':"),
- path_local);
- svn_handle_warning2(stderr, warn_err, "svn: ");
- svn_error_clear(warn_err);
- svn_handle_warning2(stderr, n->err, "svn: ");
- }
- break;
-
- case svn_wc_notify_update_started:
- if (! (nb->in_external ||
- nb->is_checkout ||
- nb->is_export))
- {
- if ((err = svn_cmdline_printf(pool, _("Updating '%s':\n"),
- path_local)))
- goto print_error;
- }
- break;
-
- case svn_wc_notify_update_completed:
- {
- if (SVN_IS_VALID_REVNUM(n->revision))
- {
- if (nb->is_export)
- {
- if ((err = svn_cmdline_printf
- (pool, nb->in_external
- ? _("Exported external at revision %ld.\n")
- : _("Exported revision %ld.\n"),
- n->revision)))
- goto print_error;
- }
- else if (nb->is_checkout)
- {
- if ((err = svn_cmdline_printf
- (pool, nb->in_external
- ? _("Checked out external at revision %ld.\n")
- : _("Checked out revision %ld.\n"),
- n->revision)))
- goto print_error;
- }
- else
- {
- if (nb->received_some_change)
- {
- nb->received_some_change = FALSE;
- if ((err = svn_cmdline_printf
- (pool, nb->in_external
- ? _("Updated external to revision %ld.\n")
- : _("Updated to revision %ld.\n"),
- n->revision)))
- goto print_error;
- }
- else
- {
- if ((err = svn_cmdline_printf
- (pool, nb->in_external
- ? _("External at revision %ld.\n")
- : _("At revision %ld.\n"),
- n->revision)))
- goto print_error;
- }
- }
- }
- else /* no revision */
- {
- if (nb->is_export)
- {
- if ((err = svn_cmdline_printf
- (pool, nb->in_external
- ? _("External export complete.\n")
- : _("Export complete.\n"))))
- goto print_error;
- }
- else if (nb->is_checkout)
- {
- if ((err = svn_cmdline_printf
- (pool, nb->in_external
- ? _("External checkout complete.\n")
- : _("Checkout complete.\n"))))
- goto print_error;
- }
- else
- {
- if ((err = svn_cmdline_printf
- (pool, nb->in_external
- ? _("External update complete.\n")
- : _("Update complete.\n"))))
- goto print_error;
- }
- }
- }
-
- if (nb->in_external)
- {
- nb->in_external = FALSE;
- if ((err = svn_cmdline_printf(pool, "\n")))
- goto print_error;
- }
- break;
-
- case svn_wc_notify_status_external:
- if ((err = svn_cmdline_printf
- (pool, _("\nPerforming status on external item at '%s':\n"),
- path_local)))
- goto print_error;
- break;
-
- case svn_wc_notify_status_completed:
- if (SVN_IS_VALID_REVNUM(n->revision))
- if ((err = svn_cmdline_printf(pool,
- _("Status against revision: %6ld\n"),
- n->revision)))
- goto print_error;
- break;
-
- case svn_wc_notify_commit_modified:
- /* xgettext: Align the %s's on this and the following 4 messages */
- if ((err = svn_cmdline_printf(pool,
- nb->is_wc_to_repos_copy
- ? _("Sending copy of %s\n")
- : _("Sending %s\n"),
- path_local)))
- goto print_error;
- break;
-
- case svn_wc_notify_commit_added:
- case svn_wc_notify_commit_copied:
- if (n->mime_type && svn_mime_type_is_binary(n->mime_type))
- {
- if ((err = svn_cmdline_printf(pool,
- nb->is_wc_to_repos_copy
- ? _("Adding copy of (bin) %s\n")
- : _("Adding (bin) %s\n"),
- path_local)))
- goto print_error;
- }
- else
- {
- if ((err = svn_cmdline_printf(pool,
- nb->is_wc_to_repos_copy
- ? _("Adding copy of %s\n")
- : _("Adding %s\n"),
- path_local)))
- goto print_error;
- }
- break;
-
- case svn_wc_notify_commit_deleted:
- if ((err = svn_cmdline_printf(pool,
- nb->is_wc_to_repos_copy
- ? _("Deleting copy of %s\n")
- : _("Deleting %s\n"),
- path_local)))
- goto print_error;
- break;
-
- case svn_wc_notify_commit_replaced:
- case svn_wc_notify_commit_copied_replaced:
- if ((err = svn_cmdline_printf(pool,
- nb->is_wc_to_repos_copy
- ? _("Replacing copy of %s\n")
- : _("Replacing %s\n"),
- path_local)))
- goto print_error;
- break;
-
- case svn_wc_notify_commit_postfix_txdelta:
- if (! nb->sent_first_txdelta)
- {
- nb->sent_first_txdelta = TRUE;
- if ((err = svn_cmdline_printf(pool,
- _("Transmitting file data "))))
- goto print_error;
- }
-
- if ((err = svn_cmdline_printf(pool, ".")))
- goto print_error;
- break;
-
- case svn_wc_notify_locked:
- if ((err = svn_cmdline_printf(pool, _("'%s' locked by user '%s'.\n"),
- path_local, n->lock->owner)))
- goto print_error;
- break;
-
- case svn_wc_notify_unlocked:
- if ((err = svn_cmdline_printf(pool, _("'%s' unlocked.\n"),
- path_local)))
- goto print_error;
- break;
-
- case svn_wc_notify_failed_lock:
- case svn_wc_notify_failed_unlock:
- svn_handle_warning2(stderr, n->err, "svn: ");
- break;
-
- case svn_wc_notify_changelist_set:
- if ((err = svn_cmdline_printf(pool, "A [%s] %s\n",
- n->changelist_name, path_local)))
- goto print_error;
- break;
-
- case svn_wc_notify_changelist_clear:
- case svn_wc_notify_changelist_moved:
- if ((err = svn_cmdline_printf(pool,
- "D [%s] %s\n",
- n->changelist_name, path_local)))
- goto print_error;
- break;
-
- case svn_wc_notify_merge_begin:
- if (n->merge_range == NULL)
- err = svn_cmdline_printf(pool,
- _("--- Merging differences between "
- "repository URLs into '%s':\n"),
- path_local);
- else if (n->merge_range->start == n->merge_range->end - 1
- || n->merge_range->start == n->merge_range->end)
- err = svn_cmdline_printf(pool, _("--- Merging r%ld into '%s':\n"),
- n->merge_range->end, path_local);
- else if (n->merge_range->start - 1 == n->merge_range->end)
- err = svn_cmdline_printf(pool,
- _("--- Reverse-merging r%ld into '%s':\n"),
- n->merge_range->start, path_local);
- else if (n->merge_range->start < n->merge_range->end)
- err = svn_cmdline_printf(pool,
- _("--- Merging r%ld through r%ld into "
- "'%s':\n"),
- n->merge_range->start + 1,
- n->merge_range->end, path_local);
- else /* n->merge_range->start > n->merge_range->end - 1 */
- err = svn_cmdline_printf(pool,
- _("--- Reverse-merging r%ld through r%ld "
- "into '%s':\n"),
- n->merge_range->start,
- n->merge_range->end + 1, path_local);
- if (err)
- goto print_error;
- break;
-
- case svn_wc_notify_merge_record_info_begin:
- if (!n->merge_range)
- {
- err = svn_cmdline_printf(pool,
- _("--- Recording mergeinfo for merge "
- "between repository URLs into '%s':\n"),
- path_local);
- }
- else
- {
- if (n->merge_range->start == n->merge_range->end - 1
- || n->merge_range->start == n->merge_range->end)
- err = svn_cmdline_printf(
- pool,
- _("--- Recording mergeinfo for merge of r%ld into '%s':\n"),
- n->merge_range->end, path_local);
- else if (n->merge_range->start - 1 == n->merge_range->end)
- err = svn_cmdline_printf(
- pool,
- _("--- Recording mergeinfo for reverse merge of r%ld into '%s':\n"),
- n->merge_range->start, path_local);
- else if (n->merge_range->start < n->merge_range->end)
- err = svn_cmdline_printf(
- pool,
- _("--- Recording mergeinfo for merge of r%ld through r%ld into '%s':\n"),
- n->merge_range->start + 1, n->merge_range->end, path_local);
- else /* n->merge_range->start > n->merge_range->end - 1 */
- err = svn_cmdline_printf(
- pool,
- _("--- Recording mergeinfo for reverse merge of r%ld through r%ld into '%s':\n"),
- n->merge_range->start, n->merge_range->end + 1, path_local);
- }
-
- if (err)
- goto print_error;
- break;
-
- case svn_wc_notify_merge_elide_info:
- if ((err = svn_cmdline_printf(pool,
- _("--- Eliding mergeinfo from '%s':\n"),
- path_local)))
- goto print_error;
- break;
-
- case svn_wc_notify_foreign_merge_begin:
- if (n->merge_range == NULL)
- err = svn_cmdline_printf(pool,
- _("--- Merging differences between "
- "foreign repository URLs into '%s':\n"),
- path_local);
- else if (n->merge_range->start == n->merge_range->end - 1
- || n->merge_range->start == n->merge_range->end)
- err = svn_cmdline_printf(pool,
- _("--- Merging (from foreign repository) "
- "r%ld into '%s':\n"),
- n->merge_range->end, path_local);
- else if (n->merge_range->start - 1 == n->merge_range->end)
- err = svn_cmdline_printf(pool,
- _("--- Reverse-merging (from foreign "
- "repository) r%ld into '%s':\n"),
- n->merge_range->start, path_local);
- else if (n->merge_range->start < n->merge_range->end)
- err = svn_cmdline_printf(pool,
- _("--- Merging (from foreign repository) "
- "r%ld through r%ld into '%s':\n"),
- n->merge_range->start + 1,
- n->merge_range->end, path_local);
- else /* n->merge_range->start > n->merge_range->end - 1 */
- err = svn_cmdline_printf(pool,
- _("--- Reverse-merging (from foreign "
- "repository) r%ld through r%ld into "
- "'%s':\n"),
- n->merge_range->start,
- n->merge_range->end + 1, path_local);
- if (err)
- goto print_error;
- break;
-
- case svn_wc_notify_tree_conflict:
- nb->tree_conflicts++;
- add_conflicted_path(nb, n->path);
- if ((err = svn_cmdline_printf(pool, " C %s\n", path_local)))
- goto print_error;
- break;
-
- case svn_wc_notify_update_shadowed_add:
- nb->received_some_change = TRUE;
- if ((err = svn_cmdline_printf(pool, " A %s\n", path_local)))
- goto print_error;
- break;
-
- case svn_wc_notify_update_shadowed_update:
- nb->received_some_change = TRUE;
- if ((err = svn_cmdline_printf(pool, " U %s\n", path_local)))
- goto print_error;
- break;
-
- case svn_wc_notify_update_shadowed_delete:
- nb->received_some_change = TRUE;
- if ((err = svn_cmdline_printf(pool, " D %s\n", path_local)))
- goto print_error;
- break;
-
- case svn_wc_notify_property_modified:
- case svn_wc_notify_property_added:
- err = svn_cmdline_printf(pool,
- _("property '%s' set on '%s'\n"),
- n->prop_name, path_local);
- if (err)
- goto print_error;
- break;
-
- case svn_wc_notify_property_deleted:
- err = svn_cmdline_printf(pool,
- _("property '%s' deleted from '%s'.\n"),
- n->prop_name, path_local);
- if (err)
- goto print_error;
- break;
-
- case svn_wc_notify_property_deleted_nonexistent:
- err = svn_cmdline_printf(pool,
- _("Attempting to delete nonexistent "
- "property '%s' on '%s'\n"), n->prop_name,
- path_local);
- if (err)
- goto print_error;
- break;
-
- case svn_wc_notify_revprop_set:
- err = svn_cmdline_printf(pool,
- _("property '%s' set on repository revision %ld\n"),
- n->prop_name, n->revision);
- if (err)
- goto print_error;
- break;
-
- case svn_wc_notify_revprop_deleted:
- err = svn_cmdline_printf(pool,
- _("property '%s' deleted from repository revision %ld\n"),
- n->prop_name, n->revision);
- if (err)
- goto print_error;
- break;
-
- case svn_wc_notify_upgraded_path:
- err = svn_cmdline_printf(pool, _("Upgraded '%s'\n"), path_local);
- if (err)
- goto print_error;
- break;
-
- case svn_wc_notify_url_redirect:
- err = svn_cmdline_printf(pool, _("Redirecting to URL '%s':\n"),
- n->url);
- if (err)
- goto print_error;
- break;
-
- case svn_wc_notify_path_nonexistent:
- err = svn_cmdline_printf(pool, _("'%s' is not under version control"),
- path_local);
- if (err)
- goto print_error;
- break;
-
- case svn_wc_notify_conflict_resolver_starting:
- /* Once all operations invoke the interactive conflict resolution after
- * they've completed, we can run svn_cl__print_conflict_stats() here. */
- break;
-
- case svn_wc_notify_conflict_resolver_done:
- break;
-
- default:
- break;
- }
-
- if ((err = svn_cmdline_fflush(stdout)))
- goto print_error;
-
- return;
-
- print_error:
- /* If we had no errors before, print this error to stderr. Else, don't print
- anything. The user already knows there were some output errors,
- so there is no point in flooding her with an error per notification. */
- if (!nb->had_print_error)
- {
- nb->had_print_error = TRUE;
- /* Issue #3014:
- * Don't print anything on broken pipes. The pipe was likely
- * closed by the process at the other end. We expect that
- * process to perform error reporting as necessary.
- *
- * ### This assumes that there is only one error in a chain for
- * ### SVN_ERR_IO_PIPE_WRITE_ERROR. See svn_cmdline_fputs(). */
- if (err->apr_err != SVN_ERR_IO_PIPE_WRITE_ERROR)
- svn_handle_error2(err, stderr, FALSE, "svn: ");
- }
- svn_error_clear(err);
-}
-
-
-svn_error_t *
-svn_cl__get_notifier(svn_wc_notify_func2_t *notify_func_p,
- void **notify_baton_p,
- apr_pool_t *pool)
-{
- struct notify_baton *nb = apr_pcalloc(pool, sizeof(*nb));
-
- nb->received_some_change = FALSE;
- nb->sent_first_txdelta = FALSE;
- nb->is_checkout = FALSE;
- nb->is_export = FALSE;
- nb->is_wc_to_repos_copy = FALSE;
- nb->in_external = FALSE;
- nb->had_print_error = FALSE;
- nb->text_conflicts = 0;
- nb->prop_conflicts = 0;
- nb->tree_conflicts = 0;
- nb->skipped_paths = 0;
- nb->conflicted_paths = apr_hash_make(pool);
- SVN_ERR(svn_dirent_get_absolute(&nb->path_prefix, "", pool));
-
- *notify_func_p = notify;
- *notify_baton_p = nb;
- return SVN_NO_ERROR;
-}
-
-svn_error_t *
-svn_cl__notifier_mark_export(void *baton)
-{
- struct notify_baton *nb = baton;
-
- nb->is_export = TRUE;
- return SVN_NO_ERROR;
-}
diff --git a/tools/client-side/svn-bench/null-export-cmd.c b/tools/client-side/svn-bench/null-export-cmd.c
deleted file mode 100644
index 8220bfb..0000000
--- a/tools/client-side/svn-bench/null-export-cmd.c
+++ /dev/null
@@ -1,346 +0,0 @@
-/*
- * export-cmd.c -- Subversion export command
- *
- * ====================================================================
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- * ====================================================================
- */
-
-/* ==================================================================== */
-
-
-
-/*** Includes. ***/
-
-#include "svn_client.h"
-#include "svn_error.h"
-#include "svn_dirent_uri.h"
-#include "svn_path.h"
-#include "svn_cmdline.h"
-#include "cl.h"
-
-#include "svn_private_config.h"
-#include "private/svn_string_private.h"
-#include "private/svn_client_private.h"
-
-/*** The export editor code. ***/
-
-/* ---------------------------------------------------------------------- */
-
-/*** A dedicated 'export' editor, which does no .svn/ accounting. ***/
-
-typedef struct edit_baton_t
-{
- apr_int64_t file_count;
- apr_int64_t dir_count;
- apr_int64_t byte_count;
- apr_int64_t prop_count;
- apr_int64_t prop_byte_count;
-} edit_baton_t;
-
-static svn_error_t *
-set_target_revision(void *edit_baton,
- svn_revnum_t target_revision,
- apr_pool_t *pool)
-{
- return SVN_NO_ERROR;
-}
-
-
-/* Just ensure that the main export directory exists. */
-static svn_error_t *
-open_root(void *edit_baton,
- svn_revnum_t base_revision,
- apr_pool_t *pool,
- void **root_baton)
-{
- *root_baton = edit_baton;
- return SVN_NO_ERROR;
-}
-
-
-/* Ensure the directory exists, and send feedback. */
-static svn_error_t *
-add_directory(const char *path,
- void *parent_baton,
- const char *copyfrom_path,
- svn_revnum_t copyfrom_revision,
- apr_pool_t *pool,
- void **baton)
-{
- edit_baton_t *eb = parent_baton;
- eb->dir_count++;
-
- *baton = parent_baton;
- return SVN_NO_ERROR;
-}
-
-
-/* Build a file baton. */
-static svn_error_t *
-add_file(const char *path,
- void *parent_baton,
- const char *copyfrom_path,
- svn_revnum_t copyfrom_revision,
- apr_pool_t *pool,
- void **baton)
-{
- edit_baton_t *eb = parent_baton;
- eb->file_count++;
-
- *baton = parent_baton;
- return SVN_NO_ERROR;
-}
-
-static svn_error_t *
-window_handler(svn_txdelta_window_t *window, void *baton)
-{
- edit_baton_t *eb = baton;
- if (window != NULL)
- eb->byte_count += window->tview_len;
-
- return SVN_NO_ERROR;
-}
-
-/* Write incoming data into the tmpfile stream */
-
-static svn_error_t *
-apply_textdelta(void *file_baton,
- const char *base_checksum,
- apr_pool_t *pool,
- svn_txdelta_window_handler_t *handler,
- void **handler_baton)
-{
- *handler_baton = file_baton;
- *handler = window_handler;
-
- return SVN_NO_ERROR;
-}
-
-static svn_error_t *
-change_file_prop(void *file_baton,
- const char *name,
- const svn_string_t *value,
- apr_pool_t *pool)
-{
- edit_baton_t *eb = file_baton;
- eb->prop_count++;
- eb->prop_byte_count += value->len;
-
- return SVN_NO_ERROR;
-}
-
-static svn_error_t *
-change_dir_prop(void *dir_baton,
- const char *name,
- const svn_string_t *value,
- apr_pool_t *pool)
-{
- edit_baton_t *eb = dir_baton;
- eb->prop_count++;
-
- return SVN_NO_ERROR;
-}
-
-static svn_error_t *
-close_file(void *file_baton,
- const char *text_checksum,
- apr_pool_t *pool)
-{
- return SVN_NO_ERROR;
-}
-
-
-/*** Public Interfaces ***/
-
-static svn_error_t *
-bench_null_export(svn_revnum_t *result_rev,
- const char *from_path_or_url,
- svn_opt_revision_t *peg_revision,
- svn_opt_revision_t *revision,
- svn_depth_t depth,
- void *baton,
- svn_client_ctx_t *ctx,
- svn_boolean_t quiet,
- apr_pool_t *pool)
-{
- svn_revnum_t edit_revision = SVN_INVALID_REVNUM;
- svn_boolean_t from_is_url = svn_path_is_url(from_path_or_url);
-
- SVN_ERR_ASSERT(peg_revision != NULL);
- SVN_ERR_ASSERT(revision != NULL);
-
- if (peg_revision->kind == svn_opt_revision_unspecified)
- peg_revision->kind = svn_path_is_url(from_path_or_url)
- ? svn_opt_revision_head
- : svn_opt_revision_working;
-
- if (revision->kind == svn_opt_revision_unspecified)
- revision = peg_revision;
-
- if (from_is_url || ! SVN_CLIENT__REVKIND_IS_LOCAL_TO_WC(revision->kind))
- {
- svn_client__pathrev_t *loc;
- svn_ra_session_t *ra_session;
- svn_node_kind_t kind;
-
- /* Get the RA connection. */
- SVN_ERR(svn_client__ra_session_from_path2(&ra_session, &loc,
- from_path_or_url, NULL,
- peg_revision,
- revision, ctx, pool));
-
- SVN_ERR(svn_ra_check_path(ra_session, "", loc->rev, &kind, pool));
-
- if (kind == svn_node_file)
- {
- apr_hash_t *props;
-
- /* Since you cannot actually root an editor at a file, we
- * manually drive a few functions of our editor. */
-
- /* Step outside the editor-likeness for a moment, to actually talk
- * to the repository. */
- /* ### note: the stream will not be closed */
- SVN_ERR(svn_ra_get_file(ra_session, "", loc->rev,
- svn_stream_empty(pool),
- NULL, &props, pool));
- }
- else if (kind == svn_node_dir)
- {
- void *edit_baton = NULL;
- const svn_delta_editor_t *export_editor = NULL;
- const svn_ra_reporter3_t *reporter;
- void *report_baton;
-
- svn_delta_editor_t *editor = svn_delta_default_editor(pool);
-
- editor->set_target_revision = set_target_revision;
- editor->open_root = open_root;
- editor->add_directory = add_directory;
- editor->add_file = add_file;
- editor->apply_textdelta = apply_textdelta;
- editor->close_file = close_file;
- editor->change_file_prop = change_file_prop;
- editor->change_dir_prop = change_dir_prop;
-
- /* for ra_svn, we don't need an editior in quiet mode */
- if (!quiet || strncmp(loc->repos_root_url, "svn:", 4))
- SVN_ERR(svn_delta_get_cancellation_editor(ctx->cancel_func,
- ctx->cancel_baton,
- editor,
- baton,
- &export_editor,
- &edit_baton,
- pool));
-
- /* Manufacture a basic 'report' to the update reporter. */
- SVN_ERR(svn_ra_do_update3(ra_session,
- &reporter, &report_baton,
- loc->rev,
- "", /* no sub-target */
- depth,
- FALSE, /* don't want copyfrom-args */
- FALSE, /* don't want ignore_ancestry */
- export_editor, edit_baton,
- pool, pool));
-
- SVN_ERR(reporter->set_path(report_baton, "", loc->rev,
- /* Depth is irrelevant, as we're
- passing start_empty=TRUE anyway. */
- svn_depth_infinity,
- TRUE, /* "help, my dir is empty!" */
- NULL, pool));
-
- SVN_ERR(reporter->finish_report(report_baton, pool));
- }
- else if (kind == svn_node_none)
- {
- return svn_error_createf(SVN_ERR_RA_ILLEGAL_URL, NULL,
- _("URL '%s' doesn't exist"),
- from_path_or_url);
- }
- /* kind == svn_node_unknown not handled */
- }
-
-
- if (result_rev)
- *result_rev = edit_revision;
-
- return SVN_NO_ERROR;
-}
-
-
-/*** Code. ***/
-
-/* This implements the `svn_opt_subcommand_t' interface. */
-svn_error_t *
-svn_cl__null_export(apr_getopt_t *os,
- void *baton,
- apr_pool_t *pool)
-{
- svn_cl__opt_state_t *opt_state = ((svn_cl__cmd_baton_t *) baton)->opt_state;
- svn_client_ctx_t *ctx = ((svn_cl__cmd_baton_t *) baton)->ctx;
- const char *from;
- apr_array_header_t *targets;
- svn_error_t *err;
- svn_opt_revision_t peg_revision;
- const char *truefrom;
- edit_baton_t eb = { 0 };
-
- SVN_ERR(svn_cl__args_to_target_array_print_reserved(&targets, os,
- opt_state->targets,
- ctx, FALSE, pool));
-
- /* We want exactly 1 or 2 targets for this subcommand. */
- if (targets->nelts < 1)
- return svn_error_create(SVN_ERR_CL_INSUFFICIENT_ARGS, 0, NULL);
- if (targets->nelts > 2)
- return svn_error_create(SVN_ERR_CL_ARG_PARSING_ERROR, 0, NULL);
-
- /* The first target is the `from' path. */
- from = APR_ARRAY_IDX(targets, 0, const char *);
-
- /* Get the peg revision if present. */
- SVN_ERR(svn_opt_parse_path(&peg_revision, &truefrom, from, pool));
-
- if (opt_state->depth == svn_depth_unknown)
- opt_state->depth = svn_depth_infinity;
-
- /* Do the export. */
- err = bench_null_export(NULL, truefrom, &peg_revision,
- &(opt_state->start_revision),
- opt_state->depth,
- &eb,
- ctx, opt_state->quiet, pool);
-
- if (!opt_state->quiet)
- SVN_ERR(svn_cmdline_printf(pool,
- _("%15s directories\n"
- "%15s files\n"
- "%15s bytes in files\n"
- "%15s properties\n"
- "%15s bytes in properties\n"),
- svn__ui64toa_sep(eb.dir_count, ',', pool),
- svn__ui64toa_sep(eb.file_count, ',', pool),
- svn__ui64toa_sep(eb.byte_count, ',', pool),
- svn__ui64toa_sep(eb.prop_count, ',', pool),
- svn__ui64toa_sep(eb.prop_byte_count, ',', pool)));
-
- return svn_error_trace(err);
-}
diff --git a/tools/client-side/svn-bench/null-list-cmd.c b/tools/client-side/svn-bench/null-list-cmd.c
deleted file mode 100644
index 8aa08cd..0000000
--- a/tools/client-side/svn-bench/null-list-cmd.c
+++ /dev/null
@@ -1,169 +0,0 @@
-/*
- * list-cmd.c -- list a URL
- *
- * ====================================================================
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- * ====================================================================
- */
-
-#include "svn_cmdline.h"
-#include "svn_client.h"
-#include "svn_error.h"
-#include "svn_pools.h"
-#include "svn_time.h"
-#include "svn_xml.h"
-#include "svn_dirent_uri.h"
-#include "svn_path.h"
-#include "svn_utf.h"
-#include "svn_opt.h"
-
-#include "cl.h"
-
-#include "svn_private_config.h"
-#include "private/svn_string_private.h"
-
-
-
-/* Baton used when printing directory entries. */
-struct print_baton {
- svn_boolean_t verbose;
- apr_int64_t directories;
- apr_int64_t files;
- apr_int64_t locks;
- svn_client_ctx_t *ctx;
-};
-
-/* This implements the svn_client_list_func2_t API, printing a single
- directory entry in text format. */
-static svn_error_t *
-print_dirent(void *baton,
- const char *path,
- const svn_dirent_t *dirent,
- const svn_lock_t *lock,
- const char *abs_path,
- const char *external_parent_url,
- const char *external_target,
- apr_pool_t *pool)
-{
- struct print_baton *pb = baton;
-
- if (pb->ctx->cancel_func)
- SVN_ERR(pb->ctx->cancel_func(pb->ctx->cancel_baton));
-
- if (dirent->kind == svn_node_dir)
- pb->directories++;
- if (dirent->kind == svn_node_file)
- pb->files++;
- if (lock)
- pb->locks++;
-
- return SVN_NO_ERROR;
-}
-
-
-/* This implements the `svn_opt_subcommand_t' interface. */
-svn_error_t *
-svn_cl__null_list(apr_getopt_t *os,
- void *baton,
- apr_pool_t *pool)
-{
- svn_cl__opt_state_t *opt_state = ((svn_cl__cmd_baton_t *) baton)->opt_state;
- svn_client_ctx_t *ctx = ((svn_cl__cmd_baton_t *) baton)->ctx;
- apr_array_header_t *targets;
- int i;
- apr_pool_t *subpool = svn_pool_create(pool);
- apr_uint32_t dirent_fields;
- struct print_baton pb = { FALSE };
- svn_boolean_t seen_nonexistent_target = FALSE;
- svn_error_t *err;
-
- SVN_ERR(svn_cl__args_to_target_array_print_reserved(&targets, os,
- opt_state->targets,
- ctx, FALSE, pool));
-
- /* Add "." if user passed 0 arguments */
- svn_opt_push_implicit_dot_target(targets, pool);
-
- if (opt_state->verbose)
- dirent_fields = SVN_DIRENT_ALL;
- else
- dirent_fields = SVN_DIRENT_KIND; /* the only thing we actually need... */
-
- pb.ctx = ctx;
- pb.verbose = opt_state->verbose;
-
- if (opt_state->depth == svn_depth_unknown)
- opt_state->depth = svn_depth_immediates;
-
- /* For each target, try to list it. */
- for (i = 0; i < targets->nelts; i++)
- {
- const char *target = APR_ARRAY_IDX(targets, i, const char *);
- const char *truepath;
- svn_opt_revision_t peg_revision;
-
- svn_pool_clear(subpool);
-
- SVN_ERR(svn_cl__check_cancel(ctx->cancel_baton));
-
- /* Get peg revisions. */
- SVN_ERR(svn_opt_parse_path(&peg_revision, &truepath, target,
- subpool));
-
- err = svn_client_list3(truepath, &peg_revision,
- &(opt_state->start_revision),
- opt_state->depth,
- dirent_fields,
- opt_state->verbose,
- FALSE, /* include externals */
- print_dirent,
- &pb, ctx, subpool);
-
- if (err)
- {
- /* If one of the targets is a non-existent URL or wc-entry,
- don't bail out. Just warn and move on to the next target. */
- if (err->apr_err == SVN_ERR_WC_PATH_NOT_FOUND ||
- err->apr_err == SVN_ERR_FS_NOT_FOUND)
- svn_handle_warning2(stderr, err, "svn-bench: ");
- else
- return svn_error_trace(err);
-
- svn_error_clear(err);
- err = NULL;
- seen_nonexistent_target = TRUE;
- }
- else if (!opt_state->quiet)
- SVN_ERR(svn_cmdline_printf(pool,
- _("%15s directories\n"
- "%15s files\n"
- "%15s locks\n"),
- svn__ui64toa_sep(pb.directories, ',', pool),
- svn__ui64toa_sep(pb.files, ',', pool),
- svn__ui64toa_sep(pb.locks, ',', pool)));
- }
-
- svn_pool_destroy(subpool);
-
- if (seen_nonexistent_target)
- return svn_error_create(
- SVN_ERR_ILLEGAL_TARGET, NULL,
- _("Could not list all targets because some targets don't exist"));
- else
- return SVN_NO_ERROR;
-}
diff --git a/tools/client-side/svn-bench/null-log-cmd.c b/tools/client-side/svn-bench/null-log-cmd.c
deleted file mode 100644
index b35c8f2..0000000
--- a/tools/client-side/svn-bench/null-log-cmd.c
+++ /dev/null
@@ -1,243 +0,0 @@
-/*
- * log-cmd.c -- Display log messages
- *
- * ====================================================================
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- * ====================================================================
- */
-
-#define APR_WANT_STRFUNC
-#define APR_WANT_STDIO
-#include <apr_want.h>
-
-#include "svn_cmdline.h"
-#include "svn_compat.h"
-#include "svn_path.h"
-#include "svn_props.h"
-
-#include "cl.h"
-
-#include "svn_private_config.h"
-#include "private/svn_string_private.h"
-
-
-/*** Code. ***/
-
-/* Baton for log_entry_receiver() and log_entry_receiver_xml(). */
-struct log_receiver_baton
-{
- /* Client context. */
- svn_client_ctx_t *ctx;
-
- /* Level of merge revision nesting */
- apr_size_t merge_depth;
-
- /* collect counters? */
- svn_boolean_t quiet;
-
- /* total revision counters */
- apr_int64_t revisions;
- apr_int64_t changes;
- apr_int64_t message_lines;
-
- /* part that came from merges */
- apr_int64_t merges;
- apr_int64_t merged_revs;
- apr_int64_t merged_changes;
- apr_int64_t merged_message_lines;
-};
-
-
-/* Implement `svn_log_entry_receiver_t', printing the logs in
- * a human-readable and machine-parseable format.
- *
- * BATON is of type `struct log_receiver_baton'.
- */
-static svn_error_t *
-log_entry_receiver(void *baton,
- svn_log_entry_t *log_entry,
- apr_pool_t *pool)
-{
- struct log_receiver_baton *lb = baton;
- const char *author;
- const char *date;
- const char *message;
-
- if (lb->ctx->cancel_func)
- SVN_ERR(lb->ctx->cancel_func(lb->ctx->cancel_baton));
-
- if (! SVN_IS_VALID_REVNUM(log_entry->revision))
- {
- lb->merge_depth--;
- return SVN_NO_ERROR;
- }
-
- /* if we don't want counters, we are done */
- if (lb->quiet)
- return SVN_NO_ERROR;
-
- /* extract the message and do all the other counting */
- svn_compat_log_revprops_out(&author, &date, &message, log_entry->revprops);
- if (log_entry->revision == 0 && message == NULL)
- return SVN_NO_ERROR;
-
- lb->revisions++;
- if (lb->merge_depth)
- lb->merged_revs++;
-
- if (message != NULL)
- {
- int count = svn_cstring_count_newlines(message) + 1;
- lb->message_lines += count;
- if (lb->merge_depth)
- lb->merged_message_lines += count;
- }
-
- if (log_entry->changed_paths2)
- {
- unsigned count = apr_hash_count(log_entry->changed_paths2);
- lb->changes += count;
- if (lb->merge_depth)
- lb->merged_changes += count;
- }
-
- if (log_entry->has_children)
- {
- lb->merge_depth++;
- lb->merges++;
- }
-
- return SVN_NO_ERROR;
-}
-
-/* This implements the `svn_opt_subcommand_t' interface. */
-svn_error_t *
-svn_cl__null_log(apr_getopt_t *os,
- void *baton,
- apr_pool_t *pool)
-{
- svn_cl__opt_state_t *opt_state = ((svn_cl__cmd_baton_t *) baton)->opt_state;
- svn_client_ctx_t *ctx = ((svn_cl__cmd_baton_t *) baton)->ctx;
- apr_array_header_t *targets;
- struct log_receiver_baton lb = { 0 };
- const char *target;
- int i;
- apr_array_header_t *revprops;
- svn_opt_revision_t target_peg_revision;
- const char *target_path_or_url;
-
- SVN_ERR(svn_cl__args_to_target_array_print_reserved(&targets, os,
- opt_state->targets,
- ctx, FALSE, pool));
-
- /* Add "." if user passed 0 arguments */
- svn_opt_push_implicit_dot_target(targets, pool);
-
- /* Determine if they really want a two-revision range. */
- if (opt_state->used_change_arg)
- {
- if (opt_state->used_revision_arg && opt_state->revision_ranges->nelts > 1)
- {
- return svn_error_create
- (SVN_ERR_CLIENT_BAD_REVISION, NULL,
- _("-c and -r are mutually exclusive"));
- }
- for (i = 0; i < opt_state->revision_ranges->nelts; i++)
- {
- svn_opt_revision_range_t *range;
- range = APR_ARRAY_IDX(opt_state->revision_ranges, i,
- svn_opt_revision_range_t *);
- if (range->start.value.number < range->end.value.number)
- range->start.value.number++;
- else
- range->end.value.number++;
- }
- }
-
- /* Parse the first target into path-or-url and peg revision. */
- target = APR_ARRAY_IDX(targets, 0, const char *);
- SVN_ERR(svn_opt_parse_path(&target_peg_revision, &target_path_or_url,
- target, pool));
- if (target_peg_revision.kind == svn_opt_revision_unspecified)
- target_peg_revision.kind = (svn_path_is_url(target)
- ? svn_opt_revision_head
- : svn_opt_revision_working);
- APR_ARRAY_IDX(targets, 0, const char *) = target_path_or_url;
-
- if (svn_path_is_url(target))
- {
- for (i = 1; i < targets->nelts; i++)
- {
- target = APR_ARRAY_IDX(targets, i, const char *);
-
- if (svn_path_is_url(target) || target[0] == '/')
- return svn_error_createf(SVN_ERR_CL_ARG_PARSING_ERROR, NULL,
- _("Only relative paths can be specified"
- " after a URL for 'svn-bench log', "
- "but '%s' is not a relative path"),
- target);
- }
- }
-
- lb.ctx = ctx;
- lb.quiet = opt_state->quiet;
-
- revprops = apr_array_make(pool, 3, sizeof(char *));
- APR_ARRAY_PUSH(revprops, const char *) = SVN_PROP_REVISION_AUTHOR;
- APR_ARRAY_PUSH(revprops, const char *) = SVN_PROP_REVISION_DATE;
- if (!opt_state->quiet)
- APR_ARRAY_PUSH(revprops, const char *) = SVN_PROP_REVISION_LOG;
- SVN_ERR(svn_client_log5(targets,
- &target_peg_revision,
- opt_state->revision_ranges,
- opt_state->limit,
- opt_state->verbose,
- opt_state->stop_on_copy,
- opt_state->use_merge_history,
- revprops,
- log_entry_receiver,
- &lb,
- ctx,
- pool));
-
- if (!opt_state->quiet)
- {
- if (opt_state->use_merge_history)
- SVN_ERR(svn_cmdline_printf(pool,
- _("%15s revisions, %15s merged in %s merges\n"
- "%15s msg lines, %15s in merged revisions\n"
- "%15s changes, %15s in merged revisions\n"),
- svn__ui64toa_sep(lb.revisions, ',', pool),
- svn__ui64toa_sep(lb.merged_revs, ',', pool),
- svn__ui64toa_sep(lb.merges, ',', pool),
- svn__ui64toa_sep(lb.message_lines, ',', pool),
- svn__ui64toa_sep(lb.merged_message_lines, ',', pool),
- svn__ui64toa_sep(lb.changes, ',', pool),
- svn__ui64toa_sep(lb.merged_changes, ',', pool)));
- else
- SVN_ERR(svn_cmdline_printf(pool,
- _("%15s revisions\n"
- "%15s msg lines\n"
- "%15s changes\n"),
- svn__ui64toa_sep(lb.revisions, ',', pool),
- svn__ui64toa_sep(lb.message_lines, ',', pool),
- svn__ui64toa_sep(lb.changes, ',', pool)));
- }
-
- return SVN_NO_ERROR;
-}
diff --git a/tools/client-side/svn-bench/svn-bench.c b/tools/client-side/svn-bench/svn-bench.c
deleted file mode 100644
index bf8964e..0000000
--- a/tools/client-side/svn-bench/svn-bench.c
+++ /dev/null
@@ -1,954 +0,0 @@
-/*
- * main.c: Subversion command line client.
- *
- * ====================================================================
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- * ====================================================================
- */
-
-/* ==================================================================== */
-
-
-
-/*** Includes. ***/
-
-#include <string.h>
-#include <assert.h>
-
-#include <apr_signal.h>
-
-#include "svn_cmdline.h"
-#include "svn_dirent_uri.h"
-#include "svn_pools.h"
-#include "svn_utf.h"
-#include "svn_version.h"
-
-#include "cl.h"
-
-#include "private/svn_opt_private.h"
-#include "private/svn_cmdline_private.h"
-
-#include "svn_private_config.h"
-
-
-/*** Option Processing ***/
-
-/* Add an identifier here for long options that don't have a short
- option. Options that have both long and short options should just
- use the short option letter as identifier. */
-typedef enum svn_cl__longopt_t {
- opt_auth_password = SVN_OPT_FIRST_LONGOPT_ID,
- opt_auth_username,
- opt_config_dir,
- opt_config_options,
- opt_depth,
- opt_no_auth_cache,
- opt_non_interactive,
- opt_stop_on_copy,
- opt_strict,
- opt_targets,
- opt_version,
- opt_with_revprop,
- opt_with_all_revprops,
- opt_with_no_revprops,
- opt_trust_server_cert
-} svn_cl__longopt_t;
-
-
-/* Option codes and descriptions for the command line client.
- *
- * The entire list must be terminated with an entry of nulls.
- */
-const apr_getopt_option_t svn_cl__options[] =
-{
- {"help", 'h', 0, N_("show help on a subcommand")},
- {NULL, '?', 0, N_("show help on a subcommand")},
- {"quiet", 'q', 0, N_("print nothing, or only summary information")},
- {"recursive", 'R', 0, N_("descend recursively, same as --depth=infinity")},
- {"non-recursive", 'N', 0, N_("obsolete; try --depth=files or --depth=immediates")},
- {"change", 'c', 1,
- N_("the change made by revision ARG (like -r ARG-1:ARG)\n"
- " "
- "If ARG is negative this is like -r ARG:ARG-1\n"
- " "
- "If ARG is of the form ARG1-ARG2 then this is like\n"
- " "
- "ARG1:ARG2, where ARG1 is inclusive")},
- {"revision", 'r', 1,
- N_("ARG (some commands also take ARG1:ARG2 range)\n"
- " "
- "A revision argument can be one of:\n"
- " "
- " NUMBER revision number\n"
- " "
- " '{' DATE '}' revision at start of the date\n"
- " "
- " 'HEAD' latest in repository\n"
- " "
- " 'BASE' base rev of item's working copy\n"
- " "
- " 'COMMITTED' last commit at or before BASE\n"
- " "
- " 'PREV' revision just before COMMITTED")},
- {"version", opt_version, 0, N_("show program version information")},
- {"verbose", 'v', 0, N_("print extra information")},
- {"username", opt_auth_username, 1, N_("specify a username ARG")},
- {"password", opt_auth_password, 1, N_("specify a password ARG")},
- {"targets", opt_targets, 1,
- N_("pass contents of file ARG as additional args")},
- {"depth", opt_depth, 1,
- N_("limit operation by depth ARG ('empty', 'files',\n"
- " "
- "'immediates', or 'infinity')")},
- {"strict", opt_strict, 0, N_("use strict semantics")},
- {"stop-on-copy", opt_stop_on_copy, 0,
- N_("do not cross copies while traversing history")},
- {"no-auth-cache", opt_no_auth_cache, 0,
- N_("do not cache authentication tokens")},
- {"trust-server-cert", opt_trust_server_cert, 0,
- N_("accept SSL server certificates from unknown\n"
- " "
- "certificate authorities without prompting (but only\n"
- " "
- "with '--non-interactive')") },
- {"non-interactive", opt_non_interactive, 0,
- N_("do no interactive prompting")},
- {"config-dir", opt_config_dir, 1,
- N_("read user configuration files from directory ARG")},
- {"config-option", opt_config_options, 1,
- N_("set user configuration option in the format:\n"
- " "
- " FILE:SECTION:OPTION=[VALUE]\n"
- " "
- "For example:\n"
- " "
- " servers:global:http-library=serf")},
- {"limit", 'l', 1, N_("maximum number of log entries")},
- {"with-all-revprops", opt_with_all_revprops, 0,
- N_("retrieve all revision properties")},
- {"with-no-revprops", opt_with_no_revprops, 0,
- N_("retrieve no revision properties")},
- {"with-revprop", opt_with_revprop, 1,
- N_("set revision property ARG in new revision\n"
- " "
- "using the name[=value] format")},
- {"use-merge-history", 'g', 0,
- N_("use/display additional information from merge\n"
- " "
- "history")},
-
- /* Long-opt Aliases
- *
- * These have NULL desriptions, but an option code that matches some
- * other option (whose description should probably mention its aliases).
- */
-
- {0, 0, 0, 0},
-};
-
-
-
-/*** Command dispatch. ***/
-
-/* Our array of available subcommands.
- *
- * The entire list must be terminated with an entry of nulls.
- *
- * In most of the help text "PATH" is used where a working copy path is
- * required, "URL" where a repository URL is required and "TARGET" when
- * either a path or a url can be used. Hmm, should this be part of the
- * help text?
- */
-
-/* Options that apply to all commands. (While not every command may
- currently require authentication or be interactive, allowing every
- command to take these arguments allows scripts to just pass them
- willy-nilly to every invocation of 'svn') . */
-const int svn_cl__global_options[] =
-{ opt_auth_username, opt_auth_password, opt_no_auth_cache, opt_non_interactive,
- opt_trust_server_cert, opt_config_dir, opt_config_options, 0
-};
-
-const svn_opt_subcommand_desc2_t svn_cl__cmd_table[] =
-{
- { "help", svn_cl__help, {"?", "h"}, N_
- ("Describe the usage of this program or its subcommands.\n"
- "usage: help [SUBCOMMAND...]\n"),
- {0} },
- /* This command is also invoked if we see option "--help", "-h" or "-?". */
-
- { "null-export", svn_cl__null_export, {0}, N_
- ("Create an unversioned copy of a tree.\n"
- "usage: null-export [-r REV] URL[@PEGREV]\n"
- "\n"
- " Exports a clean directory tree from the repository specified by\n"
- " URL, at revision REV if it is given, otherwise at HEAD.\n"
- "\n"
- " If specified, PEGREV determines in which revision the target is first\n"
- " looked up.\n"),
- {'r', 'q', 'N', opt_depth} },
-
- { "null-list", svn_cl__null_list, {"ls"}, N_
- ("List directory entries in the repository.\n"
- "usage: list [TARGET[@REV]...]\n"
- "\n"
- " List each TARGET file and the contents of each TARGET directory as\n"
- " they exist in the repository. If TARGET is a working copy path, the\n"
- " corresponding repository URL will be used. If specified, REV determines\n"
- " in which revision the target is first looked up.\n"
- "\n"
- " The default TARGET is '.', meaning the repository URL of the current\n"
- " working directory.\n"
- "\n"
- " With --verbose, the following fields will be fetched for each item:\n"
- "\n"
- " Revision number of the last commit\n"
- " Author of the last commit\n"
- " If locked, the letter 'O'. (Use 'svn info URL' to see details)\n"
- " Size (in bytes)\n"
- " Date and time of the last commit\n"),
- {'r', 'v', 'q', 'R', opt_depth} },
-
- { "null-log", svn_cl__null_log, {0}, N_
- ("Fetch the log messages for a set of revision(s) and/or path(s).\n"
- "usage: 1. null-log [PATH][@REV]\n"
- " 2. null-log URL[@REV] [PATH...]\n"
- "\n"
- " 1. Fetch the log messages for the URL corresponding to PATH\n"
- " (default: '.'). If specified, REV is the revision in which the\n"
- " URL is first looked up, and the default revision range is REV:1.\n"
- " If REV is not specified, the default revision range is BASE:1,\n"
- " since the URL might not exist in the HEAD revision.\n"
- "\n"
- " 2. Fetch the log messages for the PATHs (default: '.') under URL.\n"
- " If specified, REV is the revision in which the URL is first\n"
- " looked up, and the default revision range is REV:1; otherwise,\n"
- " the URL is looked up in HEAD, and the default revision range is\n"
- " HEAD:1.\n"
- "\n"
- " Multiple '-c' or '-r' options may be specified (but not a\n"
- " combination of '-c' and '-r' options), and mixing of forward and\n"
- " reverse ranges is allowed.\n"
- "\n"
- " With -v, also print all affected paths with each log message.\n"
- " With -q, don't print the log message body itself (note that this is\n"
- " compatible with -v).\n"
- "\n"
- " Each log message is printed just once, even if more than one of the\n"
- " affected paths for that revision were explicitly requested. Logs\n"
- " follow copy history by default. Use --stop-on-copy to disable this\n"
- " behavior, which can be useful for determining branchpoints.\n"),
- {'r', 'q', 'v', 'g', 'c', opt_targets, opt_stop_on_copy,
- 'l', opt_with_all_revprops, opt_with_no_revprops, opt_with_revprop,
- 'x',},
- {{opt_with_revprop, N_("retrieve revision property ARG")},
- {'c', N_("the change made in revision ARG")}} },
-
- { NULL, NULL, {0}, NULL, {0} }
-};
-
-
-/* Version compatibility check */
-static svn_error_t *
-check_lib_versions(void)
-{
- static const svn_version_checklist_t checklist[] =
- {
- { "svn_subr", svn_subr_version },
- { "svn_client", svn_client_version },
- { "svn_wc", svn_wc_version },
- { "svn_ra", svn_ra_version },
- { "svn_delta", svn_delta_version },
- { NULL, NULL }
- };
- SVN_VERSION_DEFINE(my_version);
-
- return svn_ver_check_list(&my_version, checklist);
-}
-
-
-/* A flag to see if we've been cancelled by the client or not. */
-static volatile sig_atomic_t cancelled = FALSE;
-
-/* A signal handler to support cancellation. */
-static void
-signal_handler(int signum)
-{
- apr_signal(signum, SIG_IGN);
- cancelled = TRUE;
-}
-
-/* Our cancellation callback. */
-svn_error_t *
-svn_cl__check_cancel(void *baton)
-{
- if (cancelled)
- return svn_error_create(SVN_ERR_CANCELLED, NULL, _("Caught signal"));
- else
- return SVN_NO_ERROR;
-}
-
-
-/*** Main. ***/
-
-/* Report and clear the error ERR, and return EXIT_FAILURE. */
-#define EXIT_ERROR(err) \
- svn_cmdline_handle_exit_error(err, NULL, "svn: ")
-
-/* A redefinition of the public SVN_INT_ERR macro, that suppresses the
- * error message if it is SVN_ERR_IO_PIPE_WRITE_ERROR. */
-#undef SVN_INT_ERR
-#define SVN_INT_ERR(expr) \
- do { \
- svn_error_t *svn_err__temp = (expr); \
- if (svn_err__temp) \
- return EXIT_ERROR(svn_err__temp); \
- } while (0)
-
-static int
-sub_main(int argc, const char *argv[], apr_pool_t *pool)
-{
- svn_error_t *err;
- int opt_id;
- apr_getopt_t *os;
- svn_cl__opt_state_t opt_state = { 0, { 0 } };
- svn_client_ctx_t *ctx;
- apr_array_header_t *received_opts;
- int i;
- const svn_opt_subcommand_desc2_t *subcommand = NULL;
- svn_cl__cmd_baton_t command_baton;
- svn_auth_baton_t *ab;
- svn_config_t *cfg_config;
- svn_boolean_t descend = TRUE;
- svn_boolean_t use_notifier = TRUE;
-
- received_opts = apr_array_make(pool, SVN_OPT_MAX_OPTIONS, sizeof(int));
-
- /* Check library versions */
- SVN_INT_ERR(check_lib_versions());
-
-#if defined(WIN32) || defined(__CYGWIN__)
- /* Set the working copy administrative directory name. */
- if (getenv("SVN_ASP_DOT_NET_HACK"))
- {
- SVN_INT_ERR(svn_wc_set_adm_dir("_svn", pool));
- }
-#endif
-
- /* Initialize the RA library. */
- SVN_INT_ERR(svn_ra_initialize(pool));
-
- /* Begin processing arguments. */
- opt_state.start_revision.kind = svn_opt_revision_unspecified;
- opt_state.end_revision.kind = svn_opt_revision_unspecified;
- opt_state.revision_ranges =
- apr_array_make(pool, 0, sizeof(svn_opt_revision_range_t *));
- opt_state.depth = svn_depth_unknown;
-
- /* No args? Show usage. */
- if (argc <= 1)
- {
- SVN_INT_ERR(svn_cl__help(NULL, NULL, pool));
- return EXIT_FAILURE;
- }
-
- /* Else, parse options. */
- SVN_INT_ERR(svn_cmdline__getopt_init(&os, argc, argv, pool));
-
- os->interleave = 1;
- while (1)
- {
- const char *opt_arg;
- const char *utf8_opt_arg;
-
- /* Parse the next option. */
- apr_status_t apr_err = apr_getopt_long(os, svn_cl__options, &opt_id,
- &opt_arg);
- if (APR_STATUS_IS_EOF(apr_err))
- break;
- else if (apr_err)
- {
- SVN_INT_ERR(svn_cl__help(NULL, NULL, pool));
- return EXIT_FAILURE;
- }
-
- /* Stash the option code in an array before parsing it. */
- APR_ARRAY_PUSH(received_opts, int) = opt_id;
-
- switch (opt_id) {
- case 'l':
- {
- err = svn_cstring_atoi(&opt_state.limit, opt_arg);
- if (err)
- {
- err = svn_error_create(SVN_ERR_CL_ARG_PARSING_ERROR, err,
- _("Non-numeric limit argument given"));
- return EXIT_ERROR(err);
- }
- if (opt_state.limit <= 0)
- {
- err = svn_error_create(SVN_ERR_INCORRECT_PARAMS, NULL,
- _("Argument to --limit must be positive"));
- return EXIT_ERROR(err);
- }
- }
- break;
- case 'c':
- {
- apr_array_header_t *change_revs =
- svn_cstring_split(opt_arg, ", \n\r\t\v", TRUE, pool);
-
- for (i = 0; i < change_revs->nelts; i++)
- {
- char *end;
- svn_revnum_t changeno, changeno_end;
- const char *change_str =
- APR_ARRAY_IDX(change_revs, i, const char *);
- const char *s = change_str;
- svn_boolean_t is_negative;
-
- /* Check for a leading minus to allow "-c -r42".
- * The is_negative flag is used to handle "-c -42" and "-c -r42".
- * The "-c r-42" case is handled by strtol() returning a
- * negative number. */
- is_negative = (*s == '-');
- if (is_negative)
- s++;
-
- /* Allow any number of 'r's to prefix a revision number. */
- while (*s == 'r')
- s++;
- changeno = changeno_end = strtol(s, &end, 10);
- if (end != s && *end == '-')
- {
- if (changeno < 0 || is_negative)
- {
- err = svn_error_createf(SVN_ERR_CL_ARG_PARSING_ERROR,
- NULL,
- _("Negative number in range (%s)"
- " not supported with -c"),
- change_str);
- return EXIT_ERROR(err);
- }
- s = end + 1;
- while (*s == 'r')
- s++;
- changeno_end = strtol(s, &end, 10);
- }
- if (end == change_str || *end != '\0')
- {
- err = svn_error_createf(SVN_ERR_CL_ARG_PARSING_ERROR, NULL,
- _("Non-numeric change argument (%s) "
- "given to -c"), change_str);
- return EXIT_ERROR(err);
- }
-
- if (changeno == 0)
- {
- err = svn_error_create(SVN_ERR_CL_ARG_PARSING_ERROR, NULL,
- _("There is no change 0"));
- return EXIT_ERROR(err);
- }
-
- if (is_negative)
- changeno = -changeno;
-
- /* Figure out the range:
- -c N -> -r N-1:N
- -c -N -> -r N:N-1
- -c M-N -> -r M-1:N for M < N
- -c M-N -> -r M:N-1 for M > N
- -c -M-N -> error (too confusing/no valid use case)
- */
- if (changeno > 0)
- {
- if (changeno <= changeno_end)
- changeno--;
- else
- changeno_end--;
- }
- else
- {
- changeno = -changeno;
- changeno_end = changeno - 1;
- }
-
- opt_state.used_change_arg = TRUE;
- APR_ARRAY_PUSH(opt_state.revision_ranges,
- svn_opt_revision_range_t *)
- = svn_opt__revision_range_from_revnums(changeno, changeno_end,
- pool);
- }
- }
- break;
- case 'r':
- opt_state.used_revision_arg = TRUE;
- if (svn_opt_parse_revision_to_range(opt_state.revision_ranges,
- opt_arg, pool) != 0)
- {
- SVN_INT_ERR(svn_utf_cstring_to_utf8(&utf8_opt_arg, opt_arg, pool));
- err = svn_error_createf
- (SVN_ERR_CL_ARG_PARSING_ERROR, NULL,
- _("Syntax error in revision argument '%s'"),
- utf8_opt_arg);
- return EXIT_ERROR(err);
- }
- break;
- case 'v':
- opt_state.verbose = TRUE;
- break;
- case 'h':
- case '?':
- opt_state.help = TRUE;
- break;
- case 'q':
- opt_state.quiet = TRUE;
- break;
- case opt_targets:
- {
- svn_stringbuf_t *buffer, *buffer_utf8;
-
- /* We need to convert to UTF-8 now, even before we divide
- the targets into an array, because otherwise we wouldn't
- know what delimiter to use for svn_cstring_split(). */
-
- SVN_INT_ERR(svn_utf_cstring_to_utf8(&utf8_opt_arg, opt_arg, pool));
- SVN_INT_ERR(svn_stringbuf_from_file2(&buffer, utf8_opt_arg, pool));
- SVN_INT_ERR(svn_utf_stringbuf_to_utf8(&buffer_utf8, buffer, pool));
- opt_state.targets = svn_cstring_split(buffer_utf8->data, "\n\r",
- TRUE, pool);
- }
- break;
- case 'N':
- descend = FALSE;
- break;
- case opt_depth:
- err = svn_utf_cstring_to_utf8(&utf8_opt_arg, opt_arg, pool);
- if (err)
- return EXIT_ERROR
- (svn_error_createf(SVN_ERR_CL_ARG_PARSING_ERROR, err,
- _("Error converting depth "
- "from locale to UTF-8")));
- opt_state.depth = svn_depth_from_word(utf8_opt_arg);
- if (opt_state.depth == svn_depth_unknown
- || opt_state.depth == svn_depth_exclude)
- {
- return EXIT_ERROR
- (svn_error_createf(SVN_ERR_CL_ARG_PARSING_ERROR, NULL,
- _("'%s' is not a valid depth; try "
- "'empty', 'files', 'immediates', "
- "or 'infinity'"),
- utf8_opt_arg));
- }
- break;
- case opt_version:
- opt_state.version = TRUE;
- break;
- case opt_auth_username:
- SVN_INT_ERR(svn_utf_cstring_to_utf8(&opt_state.auth_username,
- opt_arg, pool));
- break;
- case opt_auth_password:
- SVN_INT_ERR(svn_utf_cstring_to_utf8(&opt_state.auth_password,
- opt_arg, pool));
- break;
- case opt_stop_on_copy:
- opt_state.stop_on_copy = TRUE;
- break;
- case opt_strict:
- opt_state.strict = TRUE;
- break;
- case opt_no_auth_cache:
- opt_state.no_auth_cache = TRUE;
- break;
- case opt_non_interactive:
- opt_state.non_interactive = TRUE;
- break;
- case opt_trust_server_cert:
- opt_state.trust_server_cert = TRUE;
- break;
- case 'x':
- SVN_INT_ERR(svn_utf_cstring_to_utf8(&opt_state.extensions,
- opt_arg, pool));
- break;
- case opt_config_dir:
- {
- const char *path_utf8;
- SVN_INT_ERR(svn_utf_cstring_to_utf8(&path_utf8, opt_arg, pool));
- opt_state.config_dir = svn_dirent_internal_style(path_utf8, pool);
- }
- break;
- case opt_config_options:
- if (!opt_state.config_options)
- opt_state.config_options =
- apr_array_make(pool, 1,
- sizeof(svn_cmdline__config_argument_t*));
-
- SVN_INT_ERR(svn_utf_cstring_to_utf8(&opt_arg, opt_arg, pool));
- SVN_INT_ERR(svn_cmdline__parse_config_option(opt_state.config_options,
- opt_arg, pool));
- break;
- case opt_with_all_revprops:
- /* If --with-all-revprops is specified along with one or more
- * --with-revprops options, --with-all-revprops takes precedence. */
- opt_state.all_revprops = TRUE;
- break;
- case opt_with_no_revprops:
- opt_state.no_revprops = TRUE;
- break;
- case opt_with_revprop:
- SVN_INT_ERR(svn_opt_parse_revprop(&opt_state.revprop_table,
- opt_arg, pool));
- break;
- case 'g':
- opt_state.use_merge_history = TRUE;
- break;
- default:
- /* Hmmm. Perhaps this would be a good place to squirrel away
- opts that commands like svn diff might need. Hmmm indeed. */
- break;
- }
- }
-
- /* ### This really belongs in libsvn_client. The trouble is,
- there's no one place there to run it from, no
- svn_client_init(). We'd have to add it to all the public
- functions that a client might call. It's unmaintainable to do
- initialization from within libsvn_client itself, but it seems
- burdensome to demand that all clients call svn_client_init()
- before calling any other libsvn_client function... On the other
- hand, the alternative is effectively to demand that they call
- svn_config_ensure() instead, so maybe we should have a generic
- init function anyway. Thoughts? */
- SVN_INT_ERR(svn_config_ensure(opt_state.config_dir, pool));
-
- /* If the user asked for help, then the rest of the arguments are
- the names of subcommands to get help on (if any), or else they're
- just typos/mistakes. Whatever the case, the subcommand to
- actually run is svn_cl__help(). */
- if (opt_state.help)
- subcommand = svn_opt_get_canonical_subcommand2(svn_cl__cmd_table, "help");
-
- /* If we're not running the `help' subcommand, then look for a
- subcommand in the first argument. */
- if (subcommand == NULL)
- {
- if (os->ind >= os->argc)
- {
- if (opt_state.version)
- {
- /* Use the "help" subcommand to handle the "--version" option. */
- static const svn_opt_subcommand_desc2_t pseudo_cmd =
- { "--version", svn_cl__help, {0}, "",
- {opt_version, /* must accept its own option */
- 'q', /* brief output */
- 'v', /* verbose output */
- opt_config_dir /* all commands accept this */
- } };
-
- subcommand = &pseudo_cmd;
- }
- else
- {
- svn_error_clear
- (svn_cmdline_fprintf(stderr, pool,
- _("Subcommand argument required\n")));
- SVN_INT_ERR(svn_cl__help(NULL, NULL, pool));
- return EXIT_FAILURE;
- }
- }
- else
- {
- const char *first_arg = os->argv[os->ind++];
- subcommand = svn_opt_get_canonical_subcommand2(svn_cl__cmd_table,
- first_arg);
- if (subcommand == NULL)
- {
- const char *first_arg_utf8;
- SVN_INT_ERR(svn_utf_cstring_to_utf8(&first_arg_utf8,
- first_arg, pool));
- svn_error_clear
- (svn_cmdline_fprintf(stderr, pool,
- _("Unknown subcommand: '%s'\n"),
- first_arg_utf8));
- SVN_INT_ERR(svn_cl__help(NULL, NULL, pool));
- return EXIT_FAILURE;
- }
- }
- }
-
- /* Check that the subcommand wasn't passed any inappropriate options. */
- for (i = 0; i < received_opts->nelts; i++)
- {
- opt_id = APR_ARRAY_IDX(received_opts, i, int);
-
- /* All commands implicitly accept --help, so just skip over this
- when we see it. Note that we don't want to include this option
- in their "accepted options" list because it would be awfully
- redundant to display it in every commands' help text. */
- if (opt_id == 'h' || opt_id == '?')
- continue;
-
- if (! svn_opt_subcommand_takes_option3(subcommand, opt_id,
- svn_cl__global_options))
- {
- const char *optstr;
- const apr_getopt_option_t *badopt =
- svn_opt_get_option_from_code2(opt_id, svn_cl__options,
- subcommand, pool);
- svn_opt_format_option(&optstr, badopt, FALSE, pool);
- if (subcommand->name[0] == '-')
- SVN_INT_ERR(svn_cl__help(NULL, NULL, pool));
- else
- svn_error_clear
- (svn_cmdline_fprintf
- (stderr, pool, _("Subcommand '%s' doesn't accept option '%s'\n"
- "Type 'svn-bench help %s' for usage.\n"),
- subcommand->name, optstr, subcommand->name));
- return EXIT_FAILURE;
- }
- }
-
- /* Only merge and log support multiple revisions/revision ranges. */
- if (subcommand->cmd_func != svn_cl__null_log)
- {
- if (opt_state.revision_ranges->nelts > 1)
- {
- err = svn_error_create(SVN_ERR_CL_ARG_PARSING_ERROR, NULL,
- _("Multiple revision arguments "
- "encountered; can't specify -c twice, "
- "or both -c and -r"));
- return EXIT_ERROR(err);
- }
- }
-
- /* Disallow simultaneous use of both --with-all-revprops and
- --with-no-revprops. */
- if (opt_state.all_revprops && opt_state.no_revprops)
- {
- err = svn_error_create(SVN_ERR_CL_ARG_PARSING_ERROR, NULL,
- _("--with-all-revprops and --with-no-revprops "
- "are mutually exclusive"));
- return EXIT_ERROR(err);
- }
-
- /* Disallow simultaneous use of both --with-revprop and
- --with-no-revprops. */
- if (opt_state.revprop_table && opt_state.no_revprops)
- {
- err = svn_error_create(SVN_ERR_CL_ARG_PARSING_ERROR, NULL,
- _("--with-revprop and --with-no-revprops "
- "are mutually exclusive"));
- return EXIT_ERROR(err);
- }
-
- /* --trust-server-cert can only be used with --non-interactive */
- if (opt_state.trust_server_cert && !opt_state.non_interactive)
- {
- err = svn_error_create(SVN_ERR_CL_ARG_PARSING_ERROR, NULL,
- _("--trust-server-cert requires "
- "--non-interactive"));
- return EXIT_ERROR(err);
- }
-
- /* Ensure that 'revision_ranges' has at least one item, and make
- 'start_revision' and 'end_revision' match that item. */
- if (opt_state.revision_ranges->nelts == 0)
- {
- svn_opt_revision_range_t *range = apr_palloc(pool, sizeof(*range));
- range->start.kind = svn_opt_revision_unspecified;
- range->end.kind = svn_opt_revision_unspecified;
- APR_ARRAY_PUSH(opt_state.revision_ranges,
- svn_opt_revision_range_t *) = range;
- }
- opt_state.start_revision = APR_ARRAY_IDX(opt_state.revision_ranges, 0,
- svn_opt_revision_range_t *)->start;
- opt_state.end_revision = APR_ARRAY_IDX(opt_state.revision_ranges, 0,
- svn_opt_revision_range_t *)->end;
-
- /* Create a client context object. */
- command_baton.opt_state = &opt_state;
- SVN_INT_ERR(svn_client_create_context2(&ctx, NULL, pool));
- command_baton.ctx = ctx;
-
- /* Only a few commands can accept a revision range; the rest can take at
- most one revision number. */
- if (subcommand->cmd_func != svn_cl__null_log)
- {
- if (opt_state.end_revision.kind != svn_opt_revision_unspecified)
- {
- err = svn_error_create(SVN_ERR_CLIENT_REVISION_RANGE, NULL, NULL);
- return EXIT_ERROR(err);
- }
- }
-
- /* -N has a different meaning depending on the command */
- if (!descend)
- opt_state.depth = svn_depth_files;
-
- err = svn_config_get_config(&(ctx->config),
- opt_state.config_dir, pool);
- if (err)
- {
- /* Fallback to default config if the config directory isn't readable
- or is not a directory. */
- if (APR_STATUS_IS_EACCES(err->apr_err)
- || SVN__APR_STATUS_IS_ENOTDIR(err->apr_err))
- {
- svn_handle_warning2(stderr, err, "svn: ");
- svn_error_clear(err);
- }
- else
- return EXIT_ERROR(err);
- }
-
- cfg_config = apr_hash_get(ctx->config, SVN_CONFIG_CATEGORY_CONFIG,
- APR_HASH_KEY_STRING);
-
- /* Update the options in the config */
- if (opt_state.config_options)
- {
- svn_error_clear(
- svn_cmdline__apply_config_options(ctx->config,
- opt_state.config_options,
- "svn: ", "--config-option"));
- }
-
- /* Set up the notifier.
-
- In general, we use it any time we aren't in --quiet mode. 'svn
- status' is unique, though, in that we don't want it in --quiet mode
- unless we're also in --verbose mode. When in --xml mode,
- though, we never want it. */
- if (opt_state.quiet)
- use_notifier = FALSE;
- if (use_notifier)
- {
- SVN_INT_ERR(svn_cl__get_notifier(&ctx->notify_func2, &ctx->notify_baton2,
- pool));
- }
-
- /* Set up our cancellation support. */
- ctx->cancel_func = svn_cl__check_cancel;
- apr_signal(SIGINT, signal_handler);
-#ifdef SIGBREAK
- /* SIGBREAK is a Win32 specific signal generated by ctrl-break. */
- apr_signal(SIGBREAK, signal_handler);
-#endif
-#ifdef SIGHUP
- apr_signal(SIGHUP, signal_handler);
-#endif
-#ifdef SIGTERM
- apr_signal(SIGTERM, signal_handler);
-#endif
-
-#ifdef SIGPIPE
- /* Disable SIGPIPE generation for the platforms that have it. */
- apr_signal(SIGPIPE, SIG_IGN);
-#endif
-
-#ifdef SIGXFSZ
- /* Disable SIGXFSZ generation for the platforms that have it, otherwise
- * working with large files when compiled against an APR that doesn't have
- * large file support will crash the program, which is uncool. */
- apr_signal(SIGXFSZ, SIG_IGN);
-#endif
-
- /* Set up Authentication stuff. */
- SVN_INT_ERR(svn_cmdline_create_auth_baton(&ab,
- opt_state.non_interactive,
- opt_state.auth_username,
- opt_state.auth_password,
- opt_state.config_dir,
- opt_state.no_auth_cache,
- opt_state.trust_server_cert,
- cfg_config,
- ctx->cancel_func,
- ctx->cancel_baton,
- pool));
-
- ctx->auth_baton = ab;
-
- /* The new svn behavior is to postpone everything until after the operation
- completed */
- ctx->conflict_func = NULL;
- ctx->conflict_baton = NULL;
- ctx->conflict_func2 = NULL;
- ctx->conflict_baton2 = NULL;
-
- /* And now we finally run the subcommand. */
- err = (*subcommand->cmd_func)(os, &command_baton, pool);
- if (err)
- {
- /* For argument-related problems, suggest using the 'help'
- subcommand. */
- if (err->apr_err == SVN_ERR_CL_INSUFFICIENT_ARGS
- || err->apr_err == SVN_ERR_CL_ARG_PARSING_ERROR)
- {
- err = svn_error_quick_wrap(
- err, apr_psprintf(pool,
- _("Try 'svn-bench help %s' for more information"),
- subcommand->name));
- }
- if (err->apr_err == SVN_ERR_WC_UPGRADE_REQUIRED)
- {
- err = svn_error_quick_wrap(err,
- _("Please see the 'svn upgrade' command"));
- }
-
- /* Tell the user about 'svn cleanup' if any error on the stack
- was about locked working copies. */
- if (svn_error_find_cause(err, SVN_ERR_WC_LOCKED))
- {
- err = svn_error_quick_wrap(
- err, _("Run 'svn cleanup' to remove locks "
- "(type 'svn help cleanup' for details)"));
- }
-
- return EXIT_ERROR(err);
- }
- else
- {
- /* Ensure that stdout is flushed, so the user will see any write errors.
- This makes sure that output is not silently lost. */
- SVN_INT_ERR(svn_cmdline_fflush(stdout));
-
- return EXIT_SUCCESS;
- }
-}
-
-int
-main(int argc, const char *argv[])
-{
- apr_pool_t *pool;
- int exit_code;
-
- /* Initialize the app. */
- if (svn_cmdline_init("svn", stderr) != EXIT_SUCCESS)
- return EXIT_FAILURE;
-
- /* Create our top-level pool. Use a separate mutexless allocator,
- * given this application is single threaded.
- */
- pool = apr_allocator_owner_get(svn_pool_create_allocator(FALSE));
-
- exit_code = sub_main(argc, argv, pool);
-
- svn_pool_destroy(pool);
- return exit_code;
-}
diff --git a/tools/client-side/svn-bench/util.c b/tools/client-side/svn-bench/util.c
deleted file mode 100644
index 2aedde6..0000000
--- a/tools/client-side/svn-bench/util.c
+++ /dev/null
@@ -1,92 +0,0 @@
-/*
- * util.c: Subversion command line client utility functions. Any
- * functions that need to be shared across subcommands should be put
- * in here.
- *
- * ====================================================================
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- * ====================================================================
- */
-
-/* ==================================================================== */
-
-
-
-/*** Includes. ***/
-
-#include <string.h>
-#include <ctype.h>
-#include <assert.h>
-
-#include "svn_private_config.h"
-#include "svn_error.h"
-#include "svn_path.h"
-
-#include "cl.h"
-
-
-
-svn_error_t *
-svn_cl__args_to_target_array_print_reserved(apr_array_header_t **targets,
- apr_getopt_t *os,
- const apr_array_header_t *known_targets,
- svn_client_ctx_t *ctx,
- svn_boolean_t keep_last_origpath_on_truepath_collision,
- apr_pool_t *pool)
-{
- svn_error_t *err = svn_client_args_to_target_array2(targets,
- os,
- known_targets,
- ctx,
- keep_last_origpath_on_truepath_collision,
- pool);
- if (err)
- {
- if (err->apr_err == SVN_ERR_RESERVED_FILENAME_SPECIFIED)
- {
- svn_handle_error2(err, stderr, FALSE, "svn: Skipping argument: ");
- svn_error_clear(err);
- }
- else
- return svn_error_trace(err);
- }
- return SVN_NO_ERROR;
-}
-
-svn_error_t *
-svn_cl__check_target_is_local_path(const char *target)
-{
- if (svn_path_is_url(target))
- return svn_error_createf(SVN_ERR_CL_ARG_PARSING_ERROR, NULL,
- _("'%s' is not a local path"), target);
- return SVN_NO_ERROR;
-}
-
-const char *
-svn_cl__local_style_skip_ancestor(const char *parent_path,
- const char *path,
- apr_pool_t *pool)
-{
- const char *relpath = NULL;
-
- if (parent_path)
- relpath = svn_dirent_skip_ancestor(parent_path, path);
-
- return svn_dirent_local_style(relpath ? relpath : path, pool);
-}
-
diff --git a/tools/client-side/svn-graph.pl b/tools/client-side/svn-graph.pl
index cd76d04..0675e8a 100755
--- a/tools/client-side/svn-graph.pl
+++ b/tools/client-side/svn-graph.pl
@@ -43,7 +43,6 @@ use Getopt::Std;
$|=1;
require SVN::Core;
-require SVN::Ra;
require SVN::Client;
# The URL of the Subversion repository we wish to graph
@@ -60,17 +59,6 @@ my $startpath;
# Set the variables declared above.
parse_commandline();
-# Point at the root of a repository so we get can look at
-# every revision.
-my $auth = (new SVN::Client())->auth;
-my $ra = SVN::Ra->new(url => $repos_url, auth => $auth);
-
-# Handle identifier for the aboslutely youngest revision.
-if ($youngest eq 'HEAD')
-{
- $youngest = $ra->get_latest_revnum();
-}
-
# The "interesting" nodes are potential sources for copies. This list
# grows as we move through time.
# The "tracking" nodes are the most recent revisions of paths we're
@@ -110,7 +98,7 @@ usage: svn-graph.pl [-r START_REV:END_REV] [-p PATH] REPOS_URL
getopts('r:p:h', \%cmd_opts) or die $usage;
die $usage if scalar(@ARGV) < 1;
- $repos_url = $ARGV[0];
+ $repos_url = SVN::Core::uri_canonicalize($ARGV[0]);
$cmd_opts{'r'} =~ m/(\d+)(:(.+))?/;
if ($3)
@@ -207,6 +195,7 @@ sub process_revision
# Write a descriptor for the graph in GraphViz .dot format to stdout.
sub write_graph_descriptor
{
+ my $client = SVN::Client->new;
# Begin writing the graph descriptor.
print "digraph tree {\n";
print "\tgraph [bgcolor=white];\n";
@@ -215,7 +204,7 @@ sub write_graph_descriptor
print "\n";
# Retrieve the requested history.
- $ra->get_log(['/'], $startrev, $youngest, 0, 1, 0, \&process_revision);
+ $client->log($repos_url, $startrev, $youngest, 1, 0, \&process_revision);
# Now ensure that everything is linked.
foreach my $codeline_change (keys %codeline_changes_forward)
diff --git a/tools/client-side/svn-ssl-fingerprints.sh b/tools/client-side/svn-ssl-fingerprints.sh
index 6fed58b..828ea4a 100755
--- a/tools/client-side/svn-ssl-fingerprints.sh
+++ b/tools/client-side/svn-ssl-fingerprints.sh
@@ -20,7 +20,7 @@
#
#
# $0 --- list the fingerprints of SSL certificates that svn has seen before.
-#
+#
# SYNOPSIS:
# $0
# $0 /path/to/.subversion
diff --git a/tools/client-side/svn-vendor.py b/tools/client-side/svn-vendor.py
new file mode 100755
index 0000000..d0c862c
--- /dev/null
+++ b/tools/client-side/svn-vendor.py
@@ -0,0 +1,1065 @@
+#!/usr/bin/python3
+# vim: set sw=4 expandtab :
+# ====================================================================
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+# ====================================================================
+#
+##############################################################################
+# svn-vendor.py
+#
+# Overview
+# --------
+# Replacement for svn_load_dirs.pl (included as a 'contributed utility' in
+# Subversion sources). Main difference is some heuristics in detection of
+# the renames. Note that this script does not attempt to automate remote
+# SVN operations (check-out, check-in and tagging), so it is possible to
+# review the state of sources that are about to be checked in. Another
+# difference is an ability to save the detected renames, review/re-apply
+# them.
+#
+# This script requires Python 3.3.x or higher. Sorry, I was too lazy
+# to write shell quoting routines that are already available in recent
+# Python versions.
+#
+# Using this script
+# -----------------
+# First, it is necessary to check out the working copy from the URL that
+# will host the imported sources. E.g., if the versions of FOO are being
+# imported into svn://example.com/vendor/FOO/current:
+#
+# svn co svn://example.com/vendor/FOO/current wc
+#
+# Then, unpack the sources of the version to be imported:
+#
+# tar xzf foo-1.1.tar.gz
+#
+# Examples below assume the command above created a `foo-1.1' directory.
+# After that, there are three different modes of operation:
+#
+# 1. Fully automatic
+#
+# svn-vendor.py --auto wc foo-1.1
+# svn st wc
+# svn ci wc
+#
+# In this mode, the script fully relies on its heuristics in detection of
+# renames. In many cases, it "just works". There can be spurious moves
+# detected in this mode, though. For example, consider a deleted header
+# that consists of 50 lines of GPL text, 1 line of copyright, and
+# 3 lines of declarations, and a similar unrelated header in the imported
+# sources. From the script's point of view, the files are nearly identical
+# (4 lines removed, 4 lines added, 50 lines unchanged).
+#
+# After the script completes, examine the working copy by doing 'svn diff'
+# and/or 'svn status', paying particular attention to renames. If all the
+# moves are detected correctly, check in the changes in the working copy.
+#
+# 2. Semi-automatic
+#
+# svn-vendor.py --detect moves-foo-1.1.txt wc foo-1.1
+# vi moves-foo-1.1.txt
+# svn-vendor.py --apply moves-foo-1.1.txt wc foo-1.1
+# svn ci wc
+#
+# If the fully automatic mode mis-detected some spurious moves, or did not
+# detect some renames you want to be performed, it is still possible to
+# leverage what the script has detected automatically. First command above
+# does the automatic detection, just as it does in fully automatic mode,
+# but stops short of performing any modification of the working copy.
+# The list of detected copies and renames is saved into a text file,
+# `moves-foo-1.1.txt'.
+#
+# That file can be inspected after the script finishes. Spurious moves can
+# be deleted from the file, and new copies/renames can be added. Then the
+# changes can be applied to the working copy.
+#
+# 3. Manual
+#
+# svn-vendor.py wc foo-1.1
+# (svn-vendor) detect
+# (svn-vendor) move x.c y.c
+# (svn-vendor) move include/1.h include/2.h
+# (svn-vendor) copy include/3.h include/3-copy.h
+# (svn-vendor) lsprep
+# (svn-vendor) save /tmp/renames-to-be-applied.txt
+# (svn-vendor) apply
+#
+# If the automatic detection does not help, it is possible to do the renames
+# manually (similarly to svn_load_dirs.pl). Use the 'help' command to get
+# the list of supported commands and their description. Feel free to play
+# around - since the script does not perform any remote SVN operation,
+# there is no chance to commit the changes accidentally.
+#
+# Notes
+# -----
+# I. The time for rename detection O(Fs*Fd) + O(Ds*Dd), where Fs is
+# the number of files removed from current directory, Fd is number of files
+# added in imported sources, and Ds/Dd is the same for directories. That is,
+# the running time may become an issue if the numbers of added/removed files
+# go into a few thousands (e.g. if updating Linux kernel 2.6.35 to 3.10).
+# As a workaround, import interim releases first so that the number of
+# renames remains sane at each step. That makes reviewing the renames
+# performed by the script much easier.
+#
+# Enjoy!
+#
+##############################################################################
+
+import argparse
+import cmd
+import difflib
+import filecmp
+import os
+import readline
+import shlex
+import shutil
+import subprocess
+import sys
+
+def name_similarity(n1, n2):
+ '''
+ Function to be used as a key for sorting dirs/files by name matching
+ '''
+ sm = difflib.SequenceMatcher(a=n1, b=n2)
+ return 1.0 - sm.ratio()
+
+
+def filename_sort_key(s):
+ '''
+ Function to sort filenames so that parent directory is always followed
+ by its children. Without it, [ "/a", "/a-b", "/a/b", "/a-b/c" ] would
+ not be sorted correctly.
+ '''
+ return s.replace('/', '\001')
+
+
+def descendant_or_self(path, ancestor):
+ '''
+ Check if path is somewhere in hierarchy under ancestor.
+ '''
+ return path == ancestor or path.startswith(ancestor + os.sep)
+
+def path_rebase(path, old_base, new_base):
+ '''
+ Return a path name that has the same relative path to new_base as path
+ had to old_base. Assumes path is a descendant of old_base.
+ '''
+ if path == old_base:
+ return new_base
+ return os.path.normpath(os.path.join(new_base,
+ os.path.relpath(path, old_base)))
+
+
+def for_all_parents(path, func):
+ '''
+ Invoke func for each parent path.
+ '''
+ d = os.path.dirname(path)
+ while d != "":
+ func(d)
+ d = os.path.dirname(d)
+
+class InvalidUsageException(Exception):
+ '''
+ Raised if command line arguments are invalid
+ '''
+ def __init__(self, cmd, msg):
+ Exception.__init__(self, msg)
+ self.cmd = cmd
+
+
+class NotImplementedException(Exception):
+ '''
+ Raised if some code path is not implemented
+ '''
+ pass
+
+
+# Indexes into FSO.state
+S_WC = 0
+S_IM = 1
+
+class FSO(object):
+ '''
+ File system object (file/dir either in imported dir or in WC)
+ '''
+ def __init__(self):
+ self.wc_path = None
+ self.state = [ "-", "-" ] # '-': absent, 'F': file, 'D': dir
+
+ def status(self):
+ return "[%s%s]" % (self.state[S_WC], self.state[S_IM])
+
+ def orig_reference(self, curpath):
+ if self.wc_path and self.wc_path != curpath:
+ return " (original: %s)" % shlex.quote(self.wc_path)
+ return ""
+
+
+class FSOCollection(dict):
+ '''
+ Collection of FSOs
+ '''
+ def print(self):
+ print(" / Status in working copy (-:absent, F:file, D:dir)")
+ print(" |/ Status in imported sources (-:absent, F:file, D:dir)")
+ for k in sorted(self.keys(), key=filename_sort_key):
+ e = self[k]
+ print("%s %s%s" % (e.status(), shlex.quote(k),
+ e.orig_reference(k)))
+
+ def get(self, path):
+ 'Get existing FSO or create a new one'
+ if path in self:
+ return self[path]
+ e = FSO()
+ self[path] = e
+ return e
+
+ def add(self, path, where, kind):
+ 'Adding entries during initial scan'
+ path = os.path.normpath(path)
+ e = self.get(path)
+ e.state[where] = kind
+ if where == S_WC:
+ e.wc_path = path
+
+ def wc_copy(self, src, dst):
+ 'Handle move in a working copy'
+ keys = list(self.keys())
+ for k in keys:
+ if descendant_or_self(k, src):
+ esrc = self[k]
+ if esrc.state[S_WC] == "-":
+ continue
+ kn = path_rebase(k, src, dst)
+ edst = self.get(kn)
+ if edst.state[S_WC] != "-":
+ # Copying into existing destination.
+ # Caller should've checked this.
+ raise NotImplementedException
+ edst.wc_path = esrc.wc_path
+ edst.state[S_WC] = esrc.state[S_WC]
+
+ def wc_remove(self, path):
+ 'Handle removal in a working copy'
+ keys = list(self.keys())
+ for k in keys:
+ if descendant_or_self(k, path):
+ self[k].state[S_WC] = "-"
+
+
+class ConfigOpt(object):
+ 'Helper class - single option (string)'
+ def __init__(self, value, helpmsg):
+ self.value = value
+ self.helpmsg = helpmsg
+
+ def set(self, new_value):
+ self.value = new_value
+
+ def __str__(self):
+ return "<none>" if self.value is None else "`%s'" % self.value
+
+
+class ConfigOptInt(ConfigOpt):
+ 'Helper class - single option (integer)'
+ def set(self, new_value):
+ try:
+ self.value = int(new_value)
+ except ValueError:
+ raise InvalidUsageException(None, "Value must be integer")
+
+ def __str__(self):
+ return "%d" % self.value
+
+
+class Config(dict):
+ '''
+ Store configuration options.
+ '''
+ def add_option(self, name, cfgopt):
+ self[name] = cfgopt
+
+ def set(self, name, value):
+ if name not in self:
+ raise InvalidUsageException(None,
+ "Unknown config variable '%s'" % name)
+ self[name].set(value)
+
+ def get(self, name):
+ if name not in self:
+ raise NotImplementedException()
+ return self[name].value
+
+ def print(self):
+ for k in sorted(self):
+ o = self[k]
+ print("# %s" % o.helpmsg)
+ print("%-20s: %s" % (k, str(o)))
+ print()
+
+
+class SvnVndImport(cmd.Cmd):
+ '''
+ Main driving class.
+ '''
+ intro = "Welcome to SVN vendor import helper. " + \
+ "Type help or ? to list commands.\n"
+ prompt = "(svn-vendor) "
+ prepare_ops = []
+
+ def __init__(self, wcdir, importdir, svninfo):
+ cmd.Cmd.__init__(self)
+ self.wcdir = wcdir
+ self.importdir = importdir
+ self.svninfo = svninfo
+ self.config = Config()
+ self.config.add_option('save-diff-copied',
+ ConfigOpt(None, "Save 'svn diff' output on the " +
+ "moved/copied files and directories to this " +
+ "file as part of 'apply'"))
+ self.config.add_option('dir-similarity',
+ ConfigOptInt(600, "Similarity between dirs to assume " +
+ "a copy/move [0..1000]"))
+ self.config.add_option('file-similarity',
+ ConfigOptInt(600, "Similarity between files to assume a " +
+ "copy/move [0..1000]"))
+ self.config.add_option('file-min-lines',
+ ConfigOptInt(10, "Minimal number of lines in a file for " +
+ "meaningful comparison"))
+ self.config.add_option('verbose',
+ ConfigOptInt(3, "Verbosity of the output [0..5]"))
+ try:
+ self.termwidth = os.get_terminal_size()[0]
+ except OSError:
+ # Not running in a terminal - probably redirected to file
+ self.termwidth = 150 # arbitrary number
+
+ def info(self, level, msg):
+ 'Print message with specified verbosity'
+ if level <= self.config.get('verbose'):
+ print(msg, flush=True)
+
+ def scan(self):
+ self.items = FSOCollection()
+ self.info(1, "Scanning working copy directory...")
+ self.get_lists(self.wcdir, S_WC)
+ self.info(1, "Scanning imported directory...")
+ self.get_lists(self.importdir, S_IM)
+
+ def get_lists(self, top, where):
+ for d, dn, fn in os.walk(top, followlinks=True):
+ dr = os.path.relpath(d, top)
+ # If under .svn directory at the top (SVN 1.7+) or has .svn
+ # in the path (older SVN), ignore
+ if descendant_or_self(dr, '.svn') or \
+ os.path.basename(dr) == '.svn' or \
+ (os.sep + '.svn' + os.sep) in dr:
+ continue
+ if dr != '.':
+ self.items.add(dr, where, "D")
+ for f in fn:
+ fr = os.path.normpath(os.path.join(dr, f))
+ self.items.add(fr, where, "F")
+
+ def onecmd(self, str):
+ 'Override for checking number of arguments'
+ try:
+ return cmd.Cmd.onecmd(self, str)
+ except InvalidUsageException as e:
+ if e.cmd is not None:
+ print("!!! Invalid usage of `%s' command: %s" % (e.cmd, e))
+ print()
+ self.onecmd("help " + e.cmd)
+ else:
+ print("!!! %s" % e)
+
+ def parse_args(self, line, nargs, cmd):
+ 'Parse arguments for a command'
+ args = shlex.split(line)
+ if len(args) != nargs:
+ raise InvalidUsageException(cmd, "expect %d arguments" % nargs)
+ return args
+
+ def run_svn(self, args_fixed, args_split=[]):
+ 'Run SVN command(s), potentially splitting long argument lists'
+ rv = True
+ pos = 0
+ atatime = 100
+ output = ""
+ while pos < len(args_split) or (pos == 0 and len(args_split) == 0):
+ svnargs = ['svn'] + args_fixed + args_split[pos : pos + atatime]
+ pos += atatime
+ self.info(5, "Running: " + " ".join(map(shlex.quote, svnargs)))
+ p = subprocess.Popen(args=svnargs, stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE, cwd=self.wcdir)
+ so, se = p.communicate()
+ if p.returncode != 0:
+ print("`%s' exited with %d status:" %
+ (" ".join(map(shlex.quote, svnargs)), p.returncode))
+ print(se.decode())
+ rv = False
+ else:
+ output += so.decode()
+ return rv, output
+
+ def copy_or_move(self, op, src, dst):
+ 'Handle copy or move operation'
+ if src not in self.items or self.items[src].state[S_WC] == "-":
+ raise InvalidUsageException(None,
+ "Nothing known about `%s'" % src)
+ if dst in self.items and self.items[dst].state[S_WC] != "-":
+ raise InvalidUsageException(None,
+ "Destination path `%s' already exists" % dst)
+ # Check that we're not creating dst under a file (not a dir)
+ new_dirs = []
+ def check_parent(d):
+ if d not in self.items or self.items[d].state[S_WC] == "-":
+ new_dirs.append(d)
+ elif self.items[d].state[S_WC] == "F":
+ raise InvalidUsageException(None,
+ "Destination path `%s' created under `%s' " +
+ "which is a file" % (dst, d))
+ for_all_parents(dst, check_parent)
+ # All ok, record new directories that may be created
+ for d in new_dirs:
+ self.items.get(d).state[S_WC] = "D"
+ # Record the operation and update the FSO collection
+ self.prepare_ops.append((op, src, dst))
+ self.items.wc_copy(src, dst)
+ if op == "mv":
+ self.items.wc_remove(src)
+
+ def remove(self, path):
+ if path not in self.items or self.items[path].state[S_WC] == "-":
+ raise InvalidUsageException(None,
+ "Nothing known about `%s'" % path)
+ self.prepare_ops.append(("rm", path))
+ self.items.wc_remove(path)
+
+ def similarity_file(self, src, dst, threshold, lst_removal):
+ 'Compare two files, return similarity ratio on 0..1000 scale'
+ if self.items[src].state[S_WC] != "F":
+ return 0
+ # Source is in working copy
+ fn1 = os.path.join(self.wcdir, self.items[src].wc_path)
+ # Destination is in imported dir
+ fn2 = os.path.join(self.importdir, dst)
+ minlines = self.config.get('file-min-lines')
+ try:
+ f1 = open(fn1, 'r')
+ l1 = f1.readlines()
+ f1.close()
+ if len(l1) < minlines:
+ return 0
+ f2 = open(fn2, 'r')
+ l2 = f2.readlines()
+ f2.close()
+ if len(l2) < minlines:
+ return 0
+ sm = difflib.SequenceMatcher(a=l1, b=l2)
+ return int(1000 * sm.quick_ratio())
+ except UnicodeDecodeError:
+ # Oops, file seems to be binary. Fall back to comparing whole
+ # file contents.
+ if filecmp.cmp(fn1, fn2, shallow=False):
+ return 1000
+ return 0
+
+ def _similarity_dir(self, src, dst, get_file_similarity, lst_removal):
+ 'Iterate over FSOs, using callback to compare file entries'
+ common = 0
+ total = 0
+ for xsrc in self.items:
+ if xsrc.startswith(src + os.sep):
+ esrc = self.items[xsrc]
+ if esrc.state[S_WC] == "-":
+ # Source not in WC - ignore for similarity calculation
+ continue
+ skip = False
+ if lst_removal is not None:
+ for i in lst_removal:
+ if descendant_or_self(xsrc, i):
+ skip = True
+ if skip:
+ # Moved to another place, do not consider in score
+ continue
+ total += 1000
+ xdst = path_rebase(xsrc, src, dst)
+ if xdst not in self.items:
+ # Destination not in imported sources - non-similar item
+ continue
+ edst = self.items[xdst]
+ if edst.state[S_IM] == esrc.state[S_WC]:
+ if esrc.state[S_WC] == "D":
+ common += 1000
+ else:
+ common += get_file_similarity(xsrc, xdst)
+ if total == 0:
+ # No files/subdirs in source directory - avoid copying empty dirs
+ return 0
+ return 1000 * common / total
+
+ def similarity_dir(self, src, dst, threshold, lst_removal):
+ '''
+ Compare two dirs recursively, return similarity ratio on
+ 0..1000 scale.
+ '''
+ common = 0
+ total = 0
+ # Quickly estimate upper boundary by comparing file names. Only
+ # concern ourselves with files in source directory. I.e., if
+ # files were added after the move in the destination directory,
+ # it's ok. If most of the files from the source directory were
+ # removed, the directory is not considered similar - instead,
+ # file move detection would move files one by one.
+ upper = self._similarity_dir(src, dst, lambda s, d: 1000, lst_removal)
+ if upper <= threshold:
+ # Even the best estimate is worse than current cut-off
+ return 0
+ # Okay, looks roughly similar. Now redo the above procedure, but also
+ # compare the file content.
+ return self._similarity_dir(src, dst,
+ lambda s, d: self.similarity_file(s, d, 0, lst_removal),
+ lst_removal)
+
+ def similar(self, src, dst, threshold=0, lst_removal=None):
+ 'Compare two FSOs, source in WC and destination in imported dir'
+ if src not in self.items:
+ print("Source `%s' not in the working copy" % src)
+ return
+ xsrc = self.items[src]
+ if xsrc.state[S_WC] == "-":
+ print("Source `%s' not in the working copy" % src)
+ return
+ if dst not in self.items:
+ print("Destination `%s' not in imported sources" % dst)
+ return
+ xdst = self.items[dst]
+ if xdst.state[S_IM] == "-":
+ print("Destination `%s' not in imported sources" % dst)
+ return
+ if xsrc.state[S_WC] != xdst.state[S_IM]:
+ # Different kinds - definitely not the same object
+ return 0
+ if xsrc.state[S_WC] == "D":
+ return self.similarity_dir(src, dst, threshold, lst_removal)
+ else:
+ return self.similarity_file(src, dst, threshold, lst_removal)
+
+ def handle_op(self, op_tuple):
+ 'Handle one SVN operation, recorded as a tuple'
+ def x_mv(src, dst):
+ self.info(2, " Move `%s' to `%s'" % (src, dst))
+ self.copy_or_move("mv", src, dst)
+ def x_cp(src, dst):
+ self.info(2, " Copy `%s' to `%s'" % (src, dst))
+ self.copy_or_move("cp", src, dst)
+ def x_rm(path):
+ self.info(2, " Remove `%s'" % path)
+ self.remove(path)
+ known_ops = {
+ # key: (nargs, handler)
+ 'cp' : (3, x_cp),
+ 'mv' : (3, x_mv),
+ 'rm' : (2, x_rm),
+ }
+ if len(op_tuple) == 0:
+ raise InvalidUsageException
+ op = op_tuple[0]
+ if op not in known_ops:
+ return False
+ nargs, func = known_ops[op]
+ if nargs != len(op_tuple):
+ return False
+ func(*op_tuple[1:])
+ return True
+
+ def detect(self, thresholds):
+ 'Helper for finding copy/move destinations'
+ ilst = []
+ wlst = {}
+ ilst_map = {}
+ for p in self.items:
+ e = self.items[p]
+ if e.state[S_WC] != "-" and e.state[S_IM] == "-":
+ wlst[p] = [] # wlst hash stores copy destinations
+ elif e.state[S_WC] == "-" and e.state[S_IM] != "-":
+ # ilst just lists destination paths as tuples with node kind
+ ilst.append((e.state[S_IM], p))
+ iteration = 0
+ # Do not apply operations immediately - we'll need to post-process
+ # them to account for files/dirs moved inside a moved parent dir.
+ ops = []
+ to_be_removed = []
+ def get_renamed_name(path, rename_ops):
+ '''
+ Check if path was renamed/removed in the recorded operations,
+ return new name.
+ '''
+ for op_tuple in rename_ops:
+ # Since copies do not remove the source file, ignore them.
+ # We push no 'rm' ops in this function
+ if op_tuple[0] == "mv":
+ src = op_tuple[1]
+ dst = op_tuple[2]
+ if descendant_or_self(path, src):
+ path = path_rebase(path, src, dst)
+ return path
+
+ while len(wlst):
+ iteration += 1
+ self.info(2, ("Iteration %d: Possible sources: %d, " +
+ "possible destinations: %d") %
+ (iteration, len(wlst), len(ilst)))
+ ndst = len(ilst)
+ for idx, (nk, dst) in enumerate(sorted(ilst,
+ key=lambda s: filename_sort_key(s[1]))):
+ class SkipDestFile(Exception):
+ pass
+ # Check if moved as a part of a parent directory.
+ def check_moved_parent(xdst):
+ if xdst in ilst_map:
+ src = path_rebase(dst, xdst, ilst_map[xdst])
+ # Did it exist in copied directory?
+ if src in self.items and \
+ self.items[src].state[S_WC] == nk:
+ sim = self.similar(src, dst, thresholds[nk],
+ to_be_removed)
+ if sim > thresholds[nk]:
+ self.info(2, (" [%04d/%04d] Skipping `%s' " +
+ "(copied as part of `%s')") %
+ (idx, ndst, dst, xdst))
+ raise SkipDestFile
+ # Copied, not similar - search for other sources
+ raise StopIteration
+ try:
+ for_all_parents(dst, check_moved_parent)
+ except SkipDestFile:
+ continue
+ except StopIteration:
+ pass
+ self.info(2, (" [%04d/%04d] Looking for possible source " +
+ "for `%s'") % (idx, ndst, dst))
+ bestsrc = None
+ # Won't even consider those lower than threshold
+ bestsim = thresholds[nk]
+ for src in sorted(wlst.keys(),
+ key=lambda x: name_similarity(x, dst)):
+ sim = self.similar(src, dst, bestsim, to_be_removed)
+ if sim > bestsim:
+ self.info(3, " [similarity %4d] %s" % (sim, src))
+ bestsim = sim
+ bestsrc = src
+ if bestsim == 1000:
+ # No chance we're finding anything better
+ break
+ if bestsrc is not None:
+ wlst[bestsrc].append(dst)
+ ilst_map[dst] = bestsrc
+
+ # Discovered all copies/moves, now record them.
+ new_wlst = {}
+ for src in sorted(wlst.keys(), key=filename_sort_key):
+ dlist = wlst[src]
+ if len(dlist) == 0:
+ continue
+ if len(dlist) == 1:
+ ops.append(("mv", src, dlist[0]))
+ to_be_removed.append(src)
+ else:
+ # We don't remove the source here, it will be done when
+ # the changes are applied (it will remove all the WC files
+ # not found in imported sources). Avoiding removal here
+ # simplifies operation sorting below, since we would not
+ # be concerned with source file/dir disappearing before
+ # it is copied to its destination.
+ to_be_removed.append(src)
+ for d in dlist:
+ ops.append(("cp", src, d))
+ # If we copied something - recheck parent source directories.
+ # Since some source file/dir was scheduled to be removed,
+ # this may have increased the similarity to some destination.
+ def recheck_parent(x):
+ if x in wlst and len(wlst) == 0:
+ new_wlst[x] = []
+ for_all_parents(src, recheck_parent)
+
+ # At this point, if we're going to have the next iteration, we
+ # are only concerned about directories (by the way new_wlst is
+ # created above). So, filter out all files from ilst as well.
+ wlst = new_wlst
+ ilst = list(filter(lambda t: t[0] == 'D', ilst))
+
+ # Finished collecting the operations - now can post-process and
+ # apply them. First, sort copies/moves by destination (so that
+ # parent directories are created before files/subdirs are
+ # copied/renamed inside)
+ ops = sorted(ops, key=lambda op: filename_sort_key(op[2]))
+ for i, op_tuple in enumerate(ops):
+ # For each operation, go over its precedents to see if the source
+ # has been renamed. If it is, find out new name.
+ op = op_tuple[0]
+ src = get_renamed_name(op_tuple[1], reversed(ops[:i]))
+ if src != op_tuple[2]:
+ # Unless it became the same file after renames
+ try:
+ # Try to remove the destination, if it existed
+ self.remove(op_tuple[2])
+ except InvalidUsageException:
+ # Okay, it didn't exist
+ pass
+ self.handle_op((op, src, op_tuple[2]))
+
+ def do_detect(self, arg):
+ '''
+ detect : auto-detect possible moves (where source/destination name
+ is unique). If not all moves are applicable, save move list,
+ edit and load.
+ '''
+ self.parse_args(arg, 0, "detect")
+ self.detect({ "D": self.config.get('dir-similarity'),
+ "F": self.config.get('file-similarity')})
+
+ def do_apply(self, arg):
+ '''
+ apply : Perform copies/renames; then copy imported sources into
+ the working copy. Modifies working copy. Exits after
+ completion.
+ '''
+ self.info(1, "Copying imported sources into working copy...")
+ # Perform the recorded copies/moves/removals
+ self.info(2, " Preparatory operations (copies/renames/removals)")
+ to_be_diffed = []
+ for o in self.prepare_ops:
+ op = o[0]
+ if op == "mv":
+ self.run_svn(["mv", "--parents", o[1], o[2]])
+ to_be_diffed.append(o[2])
+ elif op == "cp":
+ self.run_svn(["cp", "--parents", o[1], o[2]])
+ to_be_diffed.append(o[2])
+ elif op == "rm":
+ # --force, as the removed path is likely created as a result
+ # of previous copy/rename
+ self.run_svn(["rm", "--force", o[1]])
+ dirs_added = []
+ dirs_removed = []
+ files_added = []
+ files_removed = []
+ self.info(2, " Creating dirs and copying files...")
+ for i in sorted(self.items.keys()):
+ e = self.items[i]
+ nk_wc = e.state[S_WC]
+ nk_im = e.state[S_IM]
+ flg = None
+ if nk_wc == "-":
+ # Absent in working copy
+ if nk_im == "D":
+ # Directory added
+ os.mkdir(os.path.join(self.wcdir, i))
+ dirs_added.append(i)
+ flg = "(added dir)"
+ elif nk_im == "F":
+ # New file added
+ shutil.copyfile(os.path.join(self.importdir, i),
+ os.path.join(self.wcdir, i))
+ files_added.append(i)
+ flg = "(added file)"
+ else:
+ # Not in imported sources, not in WC (moved
+ # away/removed) - nothing to do
+ pass
+ elif nk_wc == "F":
+ # File in a working copy
+ if nk_im == "D":
+ # File replaced with a directory. See comment above.
+ self.run_svn(["rm", "--force", i])
+ os.mkdir(os.path.join(self.wcdir, i))
+ dirs_added.append(i)
+ flg = "(replaced file with dir)"
+ elif nk_im == "F":
+ # Was a file, is a file - just copy contents
+ shutil.copyfile(os.path.join(self.importdir, i),
+ os.path.join(self.wcdir, i))
+ flg = "(copied)"
+ else:
+ # Was a file, removed
+ files_removed.append(i)
+ flg = "(removed file)"
+ elif nk_wc == "D":
+ # Directory in a working copy
+ if nk_im == "D":
+ # Was a directory, is a directory - nothing to do
+ pass
+ elif nk_im == "F":
+ # Directory replaced with file. Need to remove dir
+ # immediately, as bulk removals/additions assume new files
+ # and dirs already in place.
+ self.run_svn(["rm", "--force", i])
+ shutil.copyfile(os.path.join(self.importdir, i),
+ os.path.join(self.wcdir, i))
+ files_added.append(i)
+ flg = "(replaced dir with file)"
+ else:
+ # Directory removed
+ dirs_removed.append(i)
+ flg = "(removed dir)"
+ if flg is not None:
+ self.info(4, " %s %s %s" % (e.status(), i, flg))
+ # Filter files/directories removed as a part of parent directory
+ files_removed = list(filter(lambda x: os.path.dirname(x) not in
+ dirs_removed, files_removed))
+ dirs_removed = list(filter(lambda x: os.path.dirname(x) not in
+ dirs_removed, dirs_removed))
+ files_added = list(filter(lambda x: os.path.dirname(x) not in
+ dirs_added, files_added))
+ dirs_added = list(filter(lambda x: os.path.dirname(x) not in
+ dirs_added, dirs_added))
+ self.info(2, " Running SVN add/rm commands");
+ if len(dirs_added):
+ self.run_svn(["add"], dirs_added)
+ if len(files_added):
+ self.run_svn(["add"], files_added)
+ if len(dirs_removed):
+ self.run_svn(["rm"], dirs_removed)
+ if len(files_removed):
+ self.run_svn(["rm"], files_removed)
+ # Save the diff for the copied/moved items
+ diff_save = self.config.get('save-diff-copied')
+ if diff_save is not None:
+ self.info(2, " Saving 'svn diff' on copied files/dirs to `%s'" %
+ diff_save)
+ to_be_diffed = list(filter(lambda x: os.path.dirname(x) not in
+ to_be_diffed, to_be_diffed))
+ if len(to_be_diffed):
+ try:
+ rv, out = self.run_svn(["diff"], to_be_diffed)
+ except UnicodeDecodeError:
+ # Some binary files not marked with appropriate MIME type,
+ # or broken text files
+ rv, out = (True, "WARNING: diff contained binary files\n")
+ else:
+ rv, out = (True, "")
+ if rv:
+ f = open(diff_save, "w")
+ f.write(out)
+ f.close()
+ # Exiting, as the resulting working copy can no longer be used
+ # for move analysis
+ self.info(1, "Done. Exiting; please examine the working copy " +
+ "and commit.")
+ return True
+
+ def do_similarity(self, arg):
+ '''
+ similarity SRD DST : estimate whether SRC could be potential source
+ for DST (0=no match, 1000=perfect match)
+ '''
+ src, dst = self.parse_args(arg, 2, "similarity")
+ sim = self.similar(src, dst)
+ if sim is not None:
+ print("Similarity between source `%s' and destination `%s': %4d" %
+ (src, dst, sim))
+
+ def do_set(self, arg):
+ '''
+ set : display current settings
+ set CFG VAL : set a config variable
+ '''
+ if arg.strip() == '':
+ self.config.print()
+ else:
+ cfg, val = self.parse_args(arg, 2, "set")
+ self.config.set(cfg, val)
+
+ def do_move(self, arg):
+ '''
+ move SRC DST : Perform a move from source to destination
+ '''
+ src, dst = self.parse_args(arg, 2, "move")
+ self.copy_or_move("mv", src, dst)
+
+ def do_copy(self, arg):
+ '''
+ copy SRC DST : Perform a copy from source to destination
+ '''
+ src, dst = self.parse_args(arg, 2, "copy")
+ self.copy_or_move("cp", src, dst)
+
+ def do_remove(self, arg):
+ '''
+ remove PATH : Remove a path
+ '''
+ path = self.parse_args(arg, 1, "remove")[0]
+ self.copy_or_move("rm", path)
+
+ def do_lsprep(self, arg):
+ '''
+ lsprep : List the currently recorded moves/copies/removals
+ '''
+ self.parse_args(arg, 0, "lsprep")
+ colsz = int((self.termwidth - 14) / 2)
+ if len(self.prepare_ops):
+ print("Currently recorded preparatory operations:")
+ print()
+ print("%5s %s %-*s %-*s" %
+ ("#", "Op", colsz, "Source", colsz, "Destination"))
+ for id, o in enumerate(self.prepare_ops):
+ if id % 10 == 0:
+ print("%5s %s %*s %*s" %
+ ("-"*5, "--", colsz, "-"*colsz, colsz, "-"*colsz))
+ if len(o) == 3:
+ print("%5d %s %-*s %-*s" %
+ (id, o[0], colsz, o[1], colsz, o[2]))
+ else:
+ print("%5d %s %-*s" % (id, o[0], colsz, o[1]))
+ print()
+ else:
+ print("No copies/moves/removals recorded")
+ print()
+
+ def do_save(self, arg):
+ '''
+ save FILENAME : Save current preparation operations to a file
+ '''
+ fn = self.parse_args(arg, 1, "save")[0]
+ f = open(fn, 'w')
+ longestname = 0
+ for o in self.prepare_ops:
+ if len(o[1]) > longestname:
+ longestname = len(o[1])
+ if len(o) == 3 and len(o[2]) > longestname:
+ longestname = len(o[2])
+ for o in self.prepare_ops:
+ if len(o) == 2:
+ f.write("svn %s %-*s\n" %
+ (o[0], longestname, shlex.quote(o[1])))
+ else:
+ f.write("svn %s %-*s %-*s\n" %
+ (o[0], longestname, shlex.quote(o[1]),
+ longestname, shlex.quote(o[2])))
+ pass
+ f.close()
+
+ def do_load(self, arg):
+ '''
+ load FILENAME : Load/append preparation operations from a file
+ '''
+ fn = self.parse_args(arg, 1, "load")[0]
+ self.info(1, "Performing operations from `%s'" % fn)
+ f = open(fn, 'r')
+ for l in f.readlines():
+ if l[0] == '#':
+ continue
+ args = shlex.split(l)
+ try:
+ if len(args) < 2 or args[0] != 'svn':
+ raise InvalidUsageException(None, "")
+ self.handle_op(args[1:])
+ except InvalidUsageException as e:
+ # Rethrow
+ raise InvalidUsageException(None,
+ "Invalid line in file: %s(%s)" % (l, e))
+ f.close()
+
+ def do_svninfo(self, arg):
+ '''
+ svninfo : Display SVN info on the working copy (debug)
+ '''
+ self.parse_args(arg, 0, "svninfo")
+ print(str(self.svninfo))
+
+ def do_printlst(self, arg):
+ '''
+ printlst WHAT : Print list of files; WHAT is one of {dir,file} (debug)
+ '''
+ self.parse_args(arg, 0, "printlst")
+ self.items.print()
+
+ def do_help(self, arg):
+ '''
+ help [COMMAND] : Print the help message
+ '''
+ cmd.Cmd.do_help(self, arg)
+
+ def do_EOF(self, arg):
+ '''
+ Quit the script
+ '''
+ return True
+
+ def do_quit(self, arg):
+ '''
+ quit : Quit the script
+ '''
+ return True
+
+
+if __name__ == '__main__':
+ parser = argparse.ArgumentParser(
+ description="Prepare a working copy for SVN vendor import.")
+ parser.add_argument('wcdir',
+ help="Path to working copy (destination of import)")
+ parser.add_argument('importdir',
+ help="Path to imported sources (source of import)")
+ grp = parser.add_mutually_exclusive_group()
+ grp.add_argument('--auto', action='store_true',
+ help="Automatic mode: detect moves, apply them and copy sources")
+ grp.add_argument('--detect', metavar='FILE',
+ help="Semi-automatic mode: detect moves and save them to FILE")
+ grp.add_argument('--apply', metavar='FILE',
+ help="Semi-automatic mode: apply the moves from FILE " +
+ "and copy the sources")
+ parser.add_argument('--save', metavar='FILE',
+ help="Automatic mode: save moves to FILE after detection, " +
+ "then proceed to apply the changes")
+ parser.add_argument('--config', metavar=('OPT','VALUE'), action='append',
+ nargs=2, help="Set configuration option OPT to VALUE")
+ args = parser.parse_args()
+ p = subprocess.Popen(args=['svn', 'info', args.wcdir],
+ stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ so, se = p.communicate()
+ if p.returncode != 0:
+ print("%s: does not appear to be SVN working copy." % args.wcdir)
+ print("`svn info' exited with status %d and returned:" % p.returncode)
+ print()
+ print(se.decode())
+ sys.exit(1)
+ imp = SvnVndImport(args.wcdir, args.importdir, so.decode())
+ if args.config:
+ try:
+ for o, v in args.config:
+ imp.config.set(o, v)
+ except InvalidUsageException as e:
+ parser.error(e)
+ imp.scan()
+ if args.auto:
+ imp.onecmd("detect")
+ if args.save:
+ imp.onecmd("save " + shlex.quote(args.save))
+ imp.onecmd("apply")
+ elif args.detect:
+ imp.onecmd("detect")
+ imp.onecmd("save " + shlex.quote(args.detect))
+ elif args.apply:
+ imp.onecmd("load " + shlex.quote(args.apply))
+ imp.onecmd("apply")
+ else:
+ imp.cmdloop()
diff --git a/tools/dev/aprerr.txt b/tools/dev/aprerr.txt
index 7b532db..281c424 100644
--- a/tools/dev/aprerr.txt
+++ b/tools/dev/aprerr.txt
@@ -1,3 +1,4 @@
+# This file is used by which-error.py and gen_base.py:write_errno_table()
APR_SUCCESS = 0
SOCBASEERR = 10000
SOCEPERM = 10001
diff --git a/tools/dev/benchmarks/RepoPerf/ClearMemory.cpp b/tools/dev/benchmarks/RepoPerf/ClearMemory.cpp
new file mode 100644
index 0000000..06ef6f5
--- /dev/null
+++ b/tools/dev/benchmarks/RepoPerf/ClearMemory.cpp
@@ -0,0 +1,55 @@
+/* ClearMemory.cpp --- A simple Window memory cleaning tool
+ *
+ * ====================================================================
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ * ====================================================================
+ */
+
+#include "targetver.h"
+
+#include <Windows.h>
+
+#include <stdio.h>
+#include <tchar.h>
+
+int _tmain(int argc, _TCHAR* argv[])
+{
+ // Get the current memory usage stats
+ MEMORYSTATUSEX statex;
+ statex.dwLength = sizeof (statex);
+ GlobalMemoryStatusEx(&statex);
+
+ // (Clean) cache memory will be listed under "available".
+ // So, allocate all available RAM, touch it and release it again.
+ unsigned char *memory = new unsigned char[statex.ullAvailPhys];
+ if (memory)
+ {
+ // Make every page dirty.
+ for (DWORDLONG i = 0; i < statex.ullAvailPhys; i += 4096)
+ memory[i]++;
+
+ // Give everything back to the OS.
+ // The in-RAM file read cache is empty now. There may still be bits in
+ // the swap file as well as dirty write buffers. But we don't care
+ // much about these here ...
+ delete memory;
+ }
+
+ return 0;
+}
+
diff --git a/tools/dev/benchmarks/RepoPerf/TimeWin.cpp b/tools/dev/benchmarks/RepoPerf/TimeWin.cpp
new file mode 100644
index 0000000..4acab99
--- /dev/null
+++ b/tools/dev/benchmarks/RepoPerf/TimeWin.cpp
@@ -0,0 +1,118 @@
+/* TimeWin.cpp --- A simple Windows tool inspired by Unix' "time".
+ *
+ * ====================================================================
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ * ====================================================================
+ */
+
+#include "targetver.h"
+
+#include <Windows.h>
+
+#include <stdio.h>
+#include <tchar.h>
+
+void usage()
+{
+ _tprintf(_T("Execute a command, redirect its stdout to NUL and print\n"));
+ _tprintf(_T("execution times ELAPSED\\tUSER\\tKERNEL in seconds.\n"));
+ _tprintf(_T("\n"));
+ _tprintf(_T("Usage: TimeWin.EXE COMMAND [PARAMETERS]\n"));
+}
+
+LPCTSTR skip_first_arg(LPCTSTR targv)
+{
+ LPCTSTR s = _tcschr(targv, ' ');
+ while (s && *s == ' ')
+ ++s;
+
+ return s;
+}
+
+double as_seconds(FILETIME time)
+{
+ return (double)*reinterpret_cast<LONGLONG *>(&time) / 10000000.0;
+}
+
+int _tmain(int argc, LPTSTR argv[])
+{
+ // Minimal CL help support
+ if (argc < 2 || _tcscmp(argv[1], _T("/?")) == 0)
+ {
+ usage();
+ return 0;
+ }
+
+ // Get a file handle for NUL.
+ SECURITY_ATTRIBUTES sa;
+ sa.nLength = sizeof(sa);
+ sa.lpSecurityDescriptor = NULL;
+ sa.bInheritHandle = TRUE;
+
+ HANDLE nul = CreateFile(_T("nul"), FILE_APPEND_DATA, FILE_SHARE_WRITE,
+ &sa, OPEN_ALWAYS, FILE_ATTRIBUTE_NORMAL, NULL);
+
+ // Construct a process startup info that uses the same handles as this
+ // one but redirects stdout to NUL.
+ STARTUPINFO startup_info;
+ GetStartupInfo(&startup_info);
+ startup_info.dwFlags |= STARTF_USESTDHANDLES;
+ startup_info.hStdOutput = nul;
+
+ // Execute the command line.
+ PROCESS_INFORMATION process_info;
+ CreateProcess(NULL, _tscdup(skip_first_arg(GetCommandLine())), NULL, NULL,
+ TRUE, NORMAL_PRIORITY_CLASS, NULL, NULL, &startup_info,
+ &process_info);
+
+ // Get a handle with the needed access rights to the child process.
+ HANDLE child = INVALID_HANDLE_VALUE;
+ DuplicateHandle(GetCurrentProcess(), process_info.hProcess,
+ GetCurrentProcess(), &child,
+ PROCESS_QUERY_INFORMATION | SYNCHRONIZE, FALSE, 0);
+
+ // Wait for the child to finish.
+ // If there was problem earlier (application not found etc.), this will fail.
+ bool success = false;
+ if (WaitForSingleObject(child, INFINITE) == WAIT_OBJECT_0)
+ {
+ // Finally, query the timers and show the result
+ FILETIME start_time, end_time, user_time, kernel_time;
+ if (GetProcessTimes(child, &start_time, &end_time, &kernel_time,
+ &user_time))
+ {
+ _tprintf(_T("%1.3f\t%1.3f\t%1.3f\n"),
+ as_seconds(end_time) - as_seconds(start_time),
+ as_seconds(user_time), as_seconds(kernel_time));
+ success = true;
+ }
+ }
+
+ // In case of failure, give some indication that something went wrong.
+ if (!success)
+ _tprintf(_T("?.???\t?.???f\t?.???\n"),
+
+ // Be good citizens and clean up our mess
+ CloseHandle(child);
+ CloseHandle(process_info.hThread);
+ CloseHandle(process_info.hProcess);
+
+ CloseHandle(nul);
+
+ return 0;
+}
diff --git a/tools/dev/benchmarks/RepoPerf/copy_repo.py b/tools/dev/benchmarks/RepoPerf/copy_repo.py
new file mode 100644
index 0000000..a95a82d
--- /dev/null
+++ b/tools/dev/benchmarks/RepoPerf/copy_repo.py
@@ -0,0 +1,313 @@
+#!/usr/bin/env python
+#
+# copy_repo.py: create multiple, interleaved copies of a set of repositories.
+#
+# Subversion is a tool for revision control.
+# See http://subversion.apache.org for more information.
+#
+# ====================================================================
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+######################################################################
+
+# General modules
+import os
+import random
+import shutil
+import sys
+
+class Separators:
+ """ This class is a container for dummy / filler files.
+ It will be used to create spaces between repository
+ versions on disk, i.e. to simulate some aspect of
+ real-world FS fragmentation.
+
+ It gets initialized with some parent path as well as
+ the desired average file size and will create a new
+ such file with each call to write(). Automatic
+ sharding keeps FS specific overhead at bay. Call
+ cleanup() to eventually delete all dummy files. """
+
+ buffer = "A" * 4096
+ """ Write this non-NULL contents into the dummy files. """
+
+ def __init__(self, path, average_size):
+ """ Initialize and store all dummy files in a '__tmp'
+ sub-folder of PATH. The size of each dummy file
+ is a random value and will be slightly AVERAGE_SIZE
+ kBytes on average. A value of 0 will effectively
+ disable dummy file creation. """
+
+ self.path = os.path.join(path, '__tmp')
+ self.size = average_size
+ self.count = 0
+
+ if os.path.exists(self.path):
+ shutil.rmtree(self.path)
+
+ os.mkdir(self.path)
+
+ def write(self):
+ """ Add a new dummy file """
+
+ # Throw dice of a file size.
+ # Factor 1024 for kBytes, factor 2 for being an average.
+ size = (int)(float(self.size) * random.random() * 2 * 1024.0)
+
+ # Don't create empty files. This also implements the
+ # "average = 0 means no files" rule.
+ if size > 0:
+ self.count += 1
+
+ # Create a new shard for every 1000 files
+ subfolder = os.path.join(self.path, str(self.count / 1000))
+ if not os.path.exists(subfolder):
+ os.mkdir(subfolder)
+
+ # Create and write the file in 4k chunks.
+ # Writing full chunks will result in average file sizes
+ # being slightly above the SELF.SIZE. That's good enough
+ # for our purposes.
+ f = open(os.path.join(subfolder, str(self.count)), "wb")
+ while size > 0:
+ f.write(self.buffer)
+ size -= len(self.buffer)
+
+ f.close()
+
+ def cleanup(self):
+ """ Get rid of all the files (and folders) that we created. """
+
+ shutil.rmtree(self.path)
+
+class Repository:
+ """ Encapsulates key information of a repository. Is is being
+ used for copy sources only and contains information about
+ its NAME, PATH, SHARD_SIZE, HEAD revision and MIN_UNPACKED_REV. """
+
+ def _read_config(self, filename):
+ """ Read and return all lines from FILENAME.
+ This will be used to read 'format', 'current' etc. . """
+
+ f = open(os.path.join(self.path, 'db', filename), "rb")
+ lines = f.readlines()
+ f.close()
+
+ return lines
+
+ def __init__(self, parent, name):
+ """ Constructor collecting everything we need to know about
+ the repository NAME within PARENT folder. """
+
+ self.name = name
+ self.path = os.path.join(parent, name)
+
+ self.shard_size = int(self._read_config('format')[1].split(' ')[2])
+ self.min_unpacked_rev = int(self._read_config('min-unpacked-rev')[0])
+ self.head = int(self._read_config('current')[0])
+
+ def needs_copy(self, revision):
+ """ Return True if REVISION is a revision in this repository
+ and is "directly copyable", i.e. is either non-packed or
+ the first rev in a packed shard. Everything else is either
+ not a valid rev or already gets / got copied as part of
+ some packed shard. """
+
+ if revision > self.head:
+ return False
+ if revision < self.min_unpacked_rev:
+ return revision % self.shard_size == 0
+
+ return True
+
+ @classmethod
+ def is_repository(cls, path):
+ """ Quick check that PATH is (probably) a repository.
+ This is mainly to filter out aux files put next to
+ (not inside) the repositories to copy. """
+
+ format_path = os.path.join(path, 'db', 'format')
+ return os.path.isfile(format_path)
+
+class Multicopy:
+ """ Helper class doing the actual copying. It copies individual
+ revisions and packed shards from the one source repository
+ to multiple copies of it. The copies have the same name
+ as the source repo but with numbers 0 .. N-1 appended to it.
+
+ The copy process is being initiated by the constructor
+ (copies the repo skeleton w/o revision contents). Revision
+ contents is then copied by successive calls to the copy()
+ method. """
+
+ def _init_copy(self, number):
+ """ Called from the constructor, this will copy SELF.SOURCE_REPO
+ into NUMBER new repos below SELF.DEST_BASE but omit everything
+ below db/revs and db/revprops. """
+
+ src = self.source_repo.path
+ dst = self.dest_base + str(number)
+
+ # Copy the repo skeleton w/o revs and revprops
+ shutil.copytree(src, dst, ignore=shutil.ignore_patterns('revs', 'revprops'))
+
+ # Add revs and revprops
+ self.dst_revs.append(os.path.join(dst, 'db', 'revs'))
+ self.dst_revprops.append(os.path.join(dst, 'db', 'revprops'))
+
+ os.mkdir(self.dst_revs[number])
+ os.mkdir(self.dst_revprops[number])
+
+ def _copy_packed_shard(self, shard, number):
+ """ Copy packed shard number SHARD from SELF.SOURCE_REPO to
+ the copy NUMBER below SELF.DEST_BASE. """
+
+ # Shards are simple subtrees
+ src_revs = os.path.join(self.src_revs, str(shard) + '.pack')
+ dst_revs = os.path.join(self.dst_revs[number], str(shard) + '.pack')
+ src_revprops = os.path.join(self.src_revprops, str(shard) + '.pack')
+ dst_revprops = os.path.join(self.dst_revprops[number], str(shard) + '.pack')
+
+ shutil.copytree(src_revs, dst_revs)
+ shutil.copytree(src_revprops, dst_revprops)
+
+ # Special case: revprops of rev 0 are never packed => extra copy
+ if shard == 0:
+ src_revprops = os.path.join(self.src_revprops, '0')
+ dest_revprops = os.path.join(self.dst_revprops[number], '0')
+
+ shutil.copytree(src_revprops, dest_revprops)
+
+ def _copy_single_revision(self, revision, number):
+ """ Copy non-packed REVISION from SELF.SOURCE_REPO to the copy
+ NUMBER below SELF.DEST_BASE. """
+
+ shard = str(revision / self.source_repo.shard_size)
+
+ # Auto-create shard folder
+ if revision % self.source_repo.shard_size == 0:
+ os.mkdir(os.path.join(self.dst_revs[number], shard))
+ os.mkdir(os.path.join(self.dst_revprops[number], shard))
+
+ # Copy the rev file and the revprop file
+ src_rev = os.path.join(self.src_revs, shard, str(revision))
+ dest_rev = os.path.join(self.dst_revs[number], shard, str(revision))
+ src_revprop = os.path.join(self.src_revprops, shard, str(revision))
+ dest_revprop = os.path.join(self.dst_revprops[number], shard, str(revision))
+
+ shutil.copyfile(src_rev, dest_rev)
+ shutil.copyfile(src_revprop, dest_revprop)
+
+ def __init__(self, source, target_parent, count):
+ """ Initiate the copy process for the SOURCE repository to
+ be copied COUNT times into the TARGET_PARENT directory. """
+
+ self.source_repo = source
+ self.dest_base = os.path.join(target_parent, source.name)
+
+ self.src_revs = os.path.join(source.path, 'db', 'revs')
+ self.src_revprops = os.path.join(source.path, 'db', 'revprops')
+
+ self.dst_revs = []
+ self.dst_revprops = []
+ for i in range(0, count):
+ self._init_copy(i)
+
+ def copy(self, revision, number):
+ """ Copy (packed or non-packed) REVISION from SELF.SOURCE_REPO
+ to the copy NUMBER below SELF.DEST_BASE.
+
+ SELF.SOURCE_REPO.needs_copy(REVISION) must be True. """
+
+ if revision < self.source_repo.min_unpacked_rev:
+ self._copy_packed_shard(revision / self.source_repo.shard_size, number)
+ else:
+ self._copy_single_revision(revision, number)
+
+def copy_repos(src, dst, count, separator_size):
+ """ Under DST, create COUNT copies of all repositories immediately
+ below SRC.
+
+ All copies will "interleaved" such that we copy each individual
+ revision / packed shard to all target repos first before
+ continuing with the next revision / packed shard. After each
+ round (revision / packed shard) insert a temporary file of
+ SEPARATOR_SIZE kBytes on average to add more spacing between
+ revisions. The temp files get automatically removed at the end.
+
+ Please note that this function will clear DST before copying
+ anything into it. """
+
+ # Remove any remnants from the target folder.
+ # (DST gets auto-created by the first repo copy.)
+ shutil.rmtree(dst)
+
+ # Repositories to copy and the respective copy utilities
+ repositories = []
+ copies = []
+
+ # Find repositories, initiate copies and determine the range of
+ # revisions to copy in total
+ max_revision = 0
+ for name in os.listdir(src):
+ if Repository.is_repository(os.path.join(src, name)):
+ repository = Repository(src, name)
+ repositories.append(repository)
+ copies.append(Multicopy(repository, dst, count))
+
+ if repository.head > max_revision:
+ max_revision = repository.head
+
+ # Temp file collection (spacers)
+ separators = Separators(dst, separator_size)
+
+ # Copy all repos in revision,number-major order
+ for revision in xrange(0, max_revision + 1):
+ for number in xrange(0, count):
+
+ any_copy = False
+ for i in xrange(0, len(repositories)):
+ if repositories[i].needs_copy(revision):
+ any_copy = True
+ copies[i].copy(revision, number)
+
+ # Don't add spacers when nothing got copied (REVISION is
+ # packed in all repositories).
+ if any_copy:
+ separators.write()
+
+ # Now that all data is in position, remove the spacers
+ separators.cleanup()
+
+def show_usage():
+ """ Write a simple CL docstring """
+
+ print "Copies and duplicates repositories in a way that mimics larger deployments."
+ print
+ print "Usage:"
+ print "copy_repo.py SRC DST COUNT SEPARATOR_SIZE"
+ print
+ print "SRC Immediate parent folder of all the repositories to copy."
+ print "DST Folder to copy into; current contents will be lost."
+ print "COUNT Number of copies to create of each source repository."
+ print "SEPARATOR_SIZE Additional spacing, in kBytes, between revisions."
+
+#main function
+if len(argv) == 5:
+ copy_repos(sys.argv[1], sys.argv[2], int(sys.argv[3]), int(sys.argv[4]))
+else:
+ show_usage()
diff --git a/tools/dev/benchmarks/RepoPerf/win_repo_bench.py b/tools/dev/benchmarks/RepoPerf/win_repo_bench.py
new file mode 100644
index 0000000..d470a04
--- /dev/null
+++ b/tools/dev/benchmarks/RepoPerf/win_repo_bench.py
@@ -0,0 +1,268 @@
+#!/usr/bin/env python
+#
+# win_repo_bench.py: run repository / server performance tests on Windows.
+#
+# Subversion is a tool for revision control.
+# See http://subversion.apache.org for more information.
+#
+# ====================================================================
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+######################################################################
+
+# General modules
+import os
+import shutil
+import sys
+import subprocess
+import time
+
+from win32com.shell import shell, shellcon
+
+# Adapt these paths to your needs
+
+# Contains all the REPOSITORIES
+repo_parent = "C:\\repos"
+
+# Where to create working copies
+wc_path = "C:\\wc"
+exe_path = "C:\\develop\\Subversion\\trunk\\Release"
+apache_path = "C:\\develop\\Subversion"
+
+# Test these repositories and in this order.
+# Actual repository names have numbers 0 .. REPETITIONS-1 append to them
+repositories = ["ruby-f6-nonpacked", "ruby-f7-nonpacked",
+ "ruby-f6-packed", "ruby-f7-packed",
+ "bsd-f6-nonpacked", "bsd-f7-nonpacked",
+ "bsd-f6-packed", "bsd-f7-packed"]
+
+# Basically lists the RA backends to test but as long as all repositories
+# can be accessed using any of them, arbitrary URLs are possible.
+prefixes = ["svn://localhost/", "http://localhost/svn/", "file:///C:/repos/"]
+
+# Number of time to repeat the tests. For each iteration, there must be
+# a separate copy of all repositories.
+repetitions = 3
+
+# Server configurations to test
+configurations = ['slow', 'medium', 'fast']
+svnserve_params = {
+ 'slow':"",
+ 'medium':"-M 256" ,
+ 'fast':"-M 1024 -c 0 --cache-revprops yes --block-read yes --client-speed 1000"
+}
+
+
+def clear_memory():
+ """ Clear in-RAM portion of the file / disk cache """
+ subprocess.call(["ClearMemory.exe"])
+
+def start_server(prefix, config):
+ """ Depending on the url PREFIX, start the corresponding server with the
+ given CONFIGuration. file: and http: access will actually have been
+ configured by set_config(). """
+
+ if prefix[:4] == "svn:":
+ exe = os.path.join(exe_path, "svnserve.exe")
+ command = "cmd.exe /c start " + exe + " -dr " + repo_parent + \
+ " " + svnserve_params[config]
+ subprocess.call(command)
+ time.sleep(2)
+ elif prefix[:5] == "http:":
+ exe = os.path.join(apache_path, 'bin', 'httpd.exe')
+ subprocess.call(exe + " -k start")
+ time.sleep(2)
+
+def stop_server(prefix):
+ """ Depending on the url PREFIX, stop / kill the corresponding server. """
+
+ if prefix[:4] == "svn:":
+ subprocess.call("cmd.exe /c taskkill /im svnserve.exe /f > nul 2>&1")
+ time.sleep(1)
+ elif prefix[:5] == "http:":
+ exe = os.path.join(apache_path, 'bin', 'httpd.exe')
+ subprocess.call(exe + " -k stop")
+ time.sleep(1)
+
+def run_cs_command(state, config, repository, prefix, args):
+ """ Run the client-side command given in ARGS. Log the STATE of the
+ caches, the CONFIG we are using, the REPOSITORY, the url PREFIX
+ and finally the execution times. """
+
+ # Make sure we can create a new working copy if we want to.
+ if os.path.exists(wc_path):
+ shutil.rmtree(wc_path)
+
+ # Select the client to use.
+ if ('null-export' in args) or ('null-log' in args):
+ exe = os.path.join(exe_path, "svn-bench.exe")
+ else:
+ exe = os.path.join(exe_path, "svn.exe")
+
+ # Display the operation
+ repo_title = repository.replace('nonpacked', 'nopack')
+ print state, "\t", repo_title, "\t", prefix, "\t", config, "\t",
+ sys.stdout.flush()
+
+ # Execute the command and show the execution times
+ subprocess.call(["TimeWin.exe", exe] + args)
+
+
+def run_test_cs_sequence(config, repository, run, prefix, command, args):
+ """ Run the client-side COMMAND with the given ARGS in various stages
+ of cache heat-up. Execute the test with server CONFIG on REPOSITORY
+ with the given url PREFIX. """
+
+ # Build the full URL to use. Exports operate on the main dev line only.
+ url = prefix + repository + str(run)
+ if (command == 'export') or (command == 'null-export'):
+ if repository[:3] == 'bsd':
+ url += '/head'
+ else:
+ url += '/trunk'
+
+ # Full set of command arguments
+ args = [command, url] + args
+
+ # Free up caches best we can.
+ clear_memory()
+
+ # Caches are quite cool now and ready to take up new data
+ start_server(prefix, config)
+ run_cs_command("Cold", config, repository, prefix, args)
+ stop_server(prefix)
+
+ # OS caches are quite hot now.
+ # Run operation from hot OS caches but cold SVN caches.
+ start_server(prefix, config)
+ run_cs_command("WarmOS", config, repository, prefix, args)
+ stop_server(prefix)
+
+ # OS caches may be even hotter now.
+ # Run operation from hot OS caches but cold SVN caches.
+ start_server(prefix, config)
+ run_cs_command("HotOS", config, repository, prefix, args)
+
+ # Keep server process and thus the warmed up SVN caches.
+ # Run operation from hot OS and SVN caches.
+ run_cs_command("WrmSVN", config, repository, prefix, args)
+ run_cs_command("HotSVN", config, repository, prefix, args)
+ stop_server(prefix)
+
+
+def set_config(config):
+ """ Switch configuration files to CONFIG. This overwrites the client
+ config file with config.$CONFIG and the server config file with
+ subversion.$CONFIG.conf. """
+
+ appdata = shell.SHGetFolderPath(0, shellcon.CSIDL_APPDATA, None, 0)
+ svn_config_folder = os.path.join(appdata, 'Subversion')
+ svn_config_file = os.path.join(svn_config_folder, 'config')
+ svn_config_template = svn_config_file + '.' + config
+
+ shutil.copyfile(svn_config_template, svn_config_file)
+
+ apache_config_folder = os.path.join(apache_path, 'conf', 'extra')
+ apache_config_file = os.path.join(apache_config_folder, 'subversion.conf')
+ apache_config_template = os.path.join(apache_config_folder,
+ 'subversion.' + config + '.conf')
+
+ shutil.copyfile(apache_config_template, apache_config_file)
+
+
+def run_test_cs_configurations(command, args):
+ """ Run client COMMAND with basic arguments ARGS in all configurations
+ repeatedly with all servers on all repositories. """
+
+ print
+ print command
+ print
+
+ for config in configurations:
+ set_config(config)
+ for prefix in prefixes:
+ # These two must be the innermost loops and must be in that order.
+ # It gives us the coldest caches and the least temporal favoritism.
+ for run in range(0, repetitions):
+ for repository in repositories:
+ run_test_cs_sequence(config, repository, run, prefix, command, args)
+
+def run_admin_command(state, config, repository, args):
+ """ Run the svnadmin command given in ARGS. Log the STATE of the
+ caches, the CONFIG we are using, the REPOSITORY and finally
+ the execution times. """
+
+ exe = os.path.join(exe_path, "svnadmin.exe")
+
+ if config == 'medium':
+ extra = ['-M', '256']
+ elif config == 'fast':
+ extra = ['-M', '1024']
+ else:
+ extra = []
+
+ print state, "\t", repository, "\t", config, "\t",
+ sys.stdout.flush()
+ subprocess.call(["TimeWin.exe", exe] + args + extra)
+
+def run_test_admin_sequence(config, repository, run, command, args):
+ """ Run the svnadmin COMMAND with the given ARGS in various stages
+ of cache heat-up. Execute the test with server CONFIG on
+ REPOSITORY. """
+
+ # Full set of command arguments
+ path = os.path.join(repo_parent, repository + str(run))
+ args = [command, path] + args
+
+ # Free up caches best we can.
+ clear_memory()
+
+ # svnadmin runs can be quite costly and are usually CPU-bound.
+ # Test with "cold" and "hot" CPU caches only.
+ run_admin_command("Cold", config, repository, args)
+ run_admin_command("Hot", config, repository, args)
+
+
+def run_test_admin_configurations(command, args):
+ """ Run svnadmin COMMAND with basic arguments ARGS in all configurations
+ repeatedly on all repositories. """
+
+ print
+ print command
+ print
+
+ for config in configurations:
+ # These two must be the innermost loops and must be in that order.
+ # It gives us the coldest caches and the least temporal favoritism.
+ for run in range(0, repetitions):
+ for repository in repositories:
+ run_test_admin_sequence(config, repository, run, command, args)
+
+
+def bench():
+ """ Run all performance tests. """
+
+ run_test_cs_configurations('log', ['-v', '--limit', '50000'])
+ run_test_cs_configurations('export', [wc_path, '-q'])
+
+ run_test_cs_configurations('null-log', ['-v', '--limit', '50000', '-q'])
+ run_test_cs_configurations('null-export', ['-q'])
+
+ run_test_admin_configurations('dump', ['-q'])
+
+# main function
+bench()
diff --git a/tools/dev/benchmarks/large_dirs/create_bigdir.sh b/tools/dev/benchmarks/large_dirs/create_bigdir.sh
index a389dcc..c2830c8 100755
--- a/tools/dev/benchmarks/large_dirs/create_bigdir.sh
+++ b/tools/dev/benchmarks/large_dirs/create_bigdir.sh
@@ -29,7 +29,7 @@ SVNPATH="$('pwd')/subversion"
# Comment the SVNSERVE line to use file:// instead of svn://.
SVN=${SVNPATH}/svn/svn
-SVNADMIN=${SVNPATH}/svnadmin/svnadmin
+SVNADMIN=${SVNPATH}/svnadmin/svnadmin
SVNSERVE=${SVNPATH}/svnserve/svnserve
# VALGRIND="valgrind --tool=callgrind"
@@ -45,7 +45,7 @@ REPOROOT=/dev/shm
FILECOUNT=1
MAXCOUNT=20000
-# only 1.7 supports server-side caching and uncompressed data transfer
+# only 1.7 supports server-side caching and uncompressed data transfer
SERVEROPTS="-c 0 -M 400"
@@ -162,7 +162,7 @@ run_svn_get() {
fi
}
-# main loop
+# main loop
while [ $FILECOUNT -lt $MAXCOUNT ]; do
echo "Processing $FILECOUNT files in the same folder"
@@ -172,7 +172,7 @@ while [ $FILECOUNT -lt $MAXCOUNT ]; do
mkdir $WC/$FILECOUNT
for i in 1 $sequence; do
echo "File number $i" > $WC/$FILECOUNT/$i
- done
+ done
printf "\tAdding files ... \t"
run_svn add $FILECOUNT -q
@@ -182,7 +182,7 @@ while [ $FILECOUNT -lt $MAXCOUNT ]; do
printf "\tCommit files ... \t"
run_svn_ci $FILECOUNT add
-
+
printf "\tListing files ... \t"
run_svn ls $FILECOUNT
diff --git a/tools/dev/build-svn-deps-win.pl b/tools/dev/build-svn-deps-win.pl
new file mode 100755
index 0000000..d936369
--- /dev/null
+++ b/tools/dev/build-svn-deps-win.pl
@@ -0,0 +1,919 @@
+# ====================================================================
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+# ====================================================================
+#
+# Script to build all the dependencies for Subversion on Windows
+# It's been written for Windows 8 and Visual Studio 2012, but
+# it's entirely possible it will work with older versions of both.
+
+# The goal here is not to necessarily have everyone using this script.
+# But rather to be able to produce binary packages of the dependencies
+# already built to allow developers to be able to download or checkout
+# Subversion and quickly get up a development environment.
+
+# Prerequisites:
+# Perl: http://www.activestate.com/activeperl/downloads
+# Python: http://www.activestate.com/activepython/downloads
+# 7-Zip: http://www.7-zip.org/download.html
+# CMake: http://www.cmake.org/cmake/resources/software.html
+# Microsoft Visual Studio 2012 (Ultimate has been tested, Express does not work)
+#
+# You probably want these on your PATH. The installers usually
+# offer an option to do that for you so if you can let them.
+#
+# You are expected to run this script within the correct Visual Studio
+# Shell. Probably "VS2012 x86 Native Tools Command Prompt". This
+# sets the proper PATH arguments so that the the compiler tools are
+# available.
+#
+# TODO:
+# Find some way to work around the lack of devenv in Express (msbuild will help some)
+# Include a package target that zips everything up.
+# Perl script that runs the Subversion get-make.py tool with the right args.
+# Alternatively update gen-make.py with an arg that knows about our layout.
+# Make the Windows build not expect to go looking into source code (httpd/zlib)
+# Add SWIG (to support checkout builds where SWIG generation hasn't been done).
+# Usage/help output from the usual flags/on error input.
+# Make SQLITE_VER friendly since we're using no dots right now.
+# Work out the fixes to the projects' sources and contribute them back.
+# Allow selection of Arch (x86 and x64)
+# ZLib support for OpenSSL (have to patch openssl)
+# Use CMake zlib build instead.
+# Assembler support for OpenSSL.
+# Add more specific commands to the command line (e.g. build-httpd)
+
+###################################
+###### V A R I A B L E S ######
+###################################
+package Vars;
+# variables in the Vars package can be overriden from the command
+# line with the FOO=BAR syntax. If you want any defaults to reference
+# other variables the defaults need to be in set_defaults() below to
+# allow the defaults to be set after processing user set variables.
+
+# Paths to commands to use, provide full paths if it's not
+# on your PATH already.
+our $SEVEN_ZIP = 'C:\Program Files\7-Zip\7z.exe';
+our $CMAKE = 'cmake';
+our $NMAKE = 'nmake';
+# Use the .com version so we get output, the .exe doesn't produce any output
+our $DEVENV = 'devenv.com';
+our $VCUPGRADE = 'vcupgrade';
+our $PYTHON = 'python';
+
+# Versions of the dependencies we will use
+# Change these if you want but these are known to work with
+# this script as is.
+our $HTTPD_VER = '2.4.4';
+our $APR_VER = '1.4.6';
+our $APU_VER = '1.5.2'; # apr-util version
+our $API_VER = '1.2.1'; # arp-iconv version
+our $ZLIB_VER = '1.2.8';
+our $OPENSSL_VER = '1.0.1e';
+our $PCRE_VER = '8.35';
+our $BDB_VER = '5.3.21';
+our $SQLITE_VER = '3071602';
+our $SERF_VER = '1.3.6';
+our $NEON_VER = '0.29.6';
+
+# Sources for files to download
+our $AWK_URL = 'http://www.cs.princeton.edu/~bwk/btl.mirror/awk95.exe';
+our $HTTPD_URL;
+our $APR_URL;
+our $APU_URL;
+our $API_URL;
+our $ZLIB_URL;
+our $OPENSSL_URL;
+our $PCRE_URL;
+our $BDB_URL;
+our $SQLITE_URL;
+our $SERF_URL;
+our $NEON_URL;
+our $PROJREF_URL = 'https://downloads.redhoundsoftware.com/blog/ProjRef.py';
+
+# Location of the already downloaded file.
+# by default these are undefined and set by the downloader.
+# However, they can be overriden from the commandline and then
+# the downloader is skipped. Note that BDB has no downloader
+# so it must be overriden from the command line.
+our $AWK_FILE;
+our $HTTPD_FILE;
+our $APR_FILE;
+our $APU_FILE;
+our $API_FILE;
+our $ZLIB_FILE;
+our $OPENSSL_FILE;
+our $PCRE_FILE;
+our $BDB_FILE;
+our $SQLITE_FILE;
+our $SERF_FILE;
+our $NEON_FILE;
+our $PROJREF_FILE;
+
+# Various directories we use
+our $TOPDIR = Cwd::cwd(); # top of our tree
+our $INSTDIR; # where we install to
+our $BLDDIR; # directory where we actually build
+our $SRCDIR; # directory where we store package files
+
+# Some other options
+our $VS_VER;
+our $NEON;
+our $SVN_VER = '1.9.x';
+our $DEBUG = 0;
+
+# Utility function to remove dots from a string
+sub remove_dots {
+ my $in = shift;
+
+ $in =~ tr/.//d;
+ return $in;
+}
+
+# unless the variable is already defined set the value
+sub set_default {
+ my $var = shift;
+ my $value = shift;
+
+ unless (defined($$var)) {
+ $$var = $value;
+ }
+}
+
+sub set_svn_ver_defaults {
+ my ($svn_major, $svn_minor, $svn_patch) = $SVN_VER =~ /^(\d+)\.(\d+)\.(.+)$/;
+
+ if ($svn_major > 1 or ($svn_major == 1 and $svn_minor >= 8)) {
+ $NEON=0 unless defined($NEON);
+ } else {
+ $NEON=1 unless defined($NEON);
+ }
+}
+
+# Any variables with defaults that reference other values
+# should be set here. This defers setting of the default until runtime in these cases.
+sub set_defaults {
+ set_default(\$HTTPD_URL, "http://archive.apache.org/dist/httpd/httpd-$HTTPD_VER.tar.bz2");
+ set_default(\$APR_URL, "http://archive.apache.org/dist/apr/apr-$APR_VER.tar.bz2");
+ set_default(\$APU_URL, "http://archive.apache.org/dist/apr/apr-util-$APU_VER.tar.bz2");
+ set_default(\$API_URL, "http://archive.apache.org/dist/apr/apr-iconv-$API_VER.tar.bz2");
+ set_default(\$ZLIB_URL, "http://sourceforge.net/projects/libpng/files/zlib/$ZLIB_VER/zlib" . remove_dots($ZLIB_VER) . '.zip');
+ set_default(\$OPENSSL_URL, "http://www.openssl.org/source/openssl-$OPENSSL_VER.tar.gz");
+ set_default(\$PCRE_URL, "ftp://ftp.csx.cam.ac.uk/pub/software/programming/pcre/pcre-$PCRE_VER.zip");
+ set_default(\$BDB_URL, "http://download.oracle.com/berkeley-db/db-5.3.21.zip");
+ set_default(\$SQLITE_URL, "http://www.sqlite.org/2013/sqlite-amalgamation-$SQLITE_VER.zip");
+ set_default(\$SERF_URL, "https://archive.apache.org/dist/serf/serf-$SERF_VER.zip");
+ set_default(\$NEON_URL, "http://www.webdav.org/neon/neon-$NEON_VER.tar.gz");
+ set_default(\$INSTDIR, $TOPDIR);
+ set_default(\$BLDDIR, "$TOPDIR\\build");
+ set_default(\$SRCDIR, "$TOPDIR\\sources");
+ set_svn_ver_defaults();
+}
+
+#################################
+###### M A I N ######
+#################################
+# You shouldn't have any reason to modify below this unless you've changed
+# versions of something.
+package main;
+
+use warnings;
+use strict;
+
+use LWP::Simple;
+use File::Path;
+use File::Copy;
+use File::Basename;
+use File::Find;
+use Cwd;
+use Config;
+
+# Full path to perl, this shouldn't need to be messed with
+my $PERL = $Config{perlpath};
+
+# Directory constants that we setup for convenience, but that
+# shouldn't be changed since they are assumed in the build systems
+# of the various dependencies.
+my $HTTPD; # Where httpd gets built
+my $BDB; # Where bdb gets built
+my $BINDIR; # where binaries are installed
+my $LIBDIR; # where libraries are installed
+my $INCDIR; # where headers are installed
+my $SRCLIB; # httpd's srclib dir
+
+# defer setting these values till runtime so users can override the
+# user controlled vars they derive from.
+sub set_paths {
+ $HTTPD = "$BLDDIR\\httpd";
+ $BDB = "$BLDDIR\\bdb";
+ $BINDIR = "$INSTDIR\\bin";
+ $LIBDIR = "$INSTDIR\\lib";
+ $INCDIR = "$INSTDIR\\include";
+ $SRCLIB = "$HTTPD\\srclib";
+ # Add bin to PATH this will be needed for at least awk later on
+ $ENV{PATH} = "$BINDIR;$ENV{PATH}";
+ # Setup LIB and INCLUDE so we can find BDB
+ $ENV{LIB} = "$LIBDIR;$ENV{LIB}";
+ $ENV{INCLUDE} = "$INCDIR;$ENV{INCLUDE}";
+}
+
+#####################
+# UTILTIY FUNCTIONS #
+#####################
+
+# copy a file with error handling
+sub copy_or_die {
+ my $src = shift;
+ my $dest = shift;
+
+ copy($src, $dest) or die "Failed to copy $src to $dest: $!";
+}
+
+# Rename a file and deal with errors.
+sub rename_or_die {
+ my $src = shift;
+ my $dest = shift;
+
+ rename($src, $dest) or die "Failed to rename $src to $dest: $!";
+}
+
+# Utility function to chdir with error handling.
+sub chdir_or_die {
+ my $dir = shift;
+
+ chdir($dir) or die "Failed to chdir to $dir: $!";
+}
+
+# Utility function to call system with error handling.
+# First arg is an error message to print if something fails.
+# Remaining args are passed to system.
+sub system_or_die {
+ my $error_msg = shift;
+ unless (system(@_) == 0) {
+ if (defined($error_msg)) {
+ die "$error_msg (exit code: $?)";
+ } else {
+ die "Failed while running '@_' (exit code: $?)";
+ }
+ }
+}
+
+# Like perl -pi.orig the second arg is a reference to a
+# function that does whatever line processing you want.
+# Note that $_ is used for the input and output of the
+# function. So modifying $_ changes the line in the file.
+# bak can be passed to set the backup extension. If the
+# backup file already exists, shortcut this step.
+sub modify_file_in_place {
+ my $file = shift;
+ my $func = shift;
+ my $bak = shift;
+
+ unless (defined($bak)) {
+ $bak = '.orig';
+ }
+
+ my $backup = $file . $bak;
+ return if -e $backup;
+ rename_or_die($file, $backup);
+ open(IN, "<$backup") or die "Failed to open $backup: $!";
+ open(OUT, ">$file") or die "Failed to open $file: $!";
+ while (<IN>) {
+ &{$func}();
+ print OUT;
+ }
+ close(IN);
+ close(OUT);
+}
+
+sub check_vs_ver {
+ return if defined($VS_VER);
+
+ # using the vcupgrade command here because it has a consistent name and version
+ # numbering across versions including express versions.
+ my $help_output = `"$VCUPGRADE" /?`;
+ my ($major_version) = $help_output =~ /Version (\d+)\./s;
+
+ if (defined($major_version)) {
+ if ($major_version eq '12') {
+ $VS_VER = '2013';
+ return;
+ } elsif ($major_version eq '11') {
+ $VS_VER = '2012';
+ return;
+ } elsif ($major_version eq '10') {
+ $VS_VER = '2010';
+ return;
+ }
+ }
+
+ die("Visual Studio Version Not Supported");
+}
+
+##################
+# TREE STRUCTURE #
+##################
+
+# Create directories that this script directly needs
+sub prepare_structure {
+ # ignore errors the directories may already exist.
+ mkdir($BINDIR);
+ mkdir($SRCDIR);
+ mkdir($BLDDIR);
+ mkdir($LIBDIR);
+ mkdir($INCDIR);
+}
+
+# Remove paths created by this script (directly or indecirectly)
+# If the first arg is 1 it'll remove the downloaded files otherwise it
+# leaves them alone.
+sub clean_structure {
+ # ignore errors in this function the paths may not exist
+ my $real_clean = shift;
+
+ if ($real_clean) {
+ rmtree($SRCDIR);
+ }
+ rmtree($BINDIR);
+ rmtree($BLDDIR);
+ rmtree($INCDIR);
+ rmtree($LIBDIR);
+ rmtree("$INSTDIR\\serf");
+ rmtree("$INSTDIR\\neon");
+ rmtree("$INSTDIR\\sqlite-amalgamation");
+
+ # Dirs created indirectly by the install targets
+ rmtree("$INSTDIR\\man");
+ rmtree("$INSTDIR\\share");
+ rmtree("$INSTDIR\\ssl");
+ rmtree("$INSTDIR\\cgi-bin");
+ rmtree("$INSTDIR\\conf");
+ rmtree("$INSTDIR\\error");
+ rmtree("$INSTDIR\\htdocs");
+ rmtree("$INSTDIR\\icons");
+ rmtree("$INSTDIR\\logs");
+ rmtree("$INSTDIR\\manual");
+ rmtree("$INSTDIR\\modules");
+ unlink("$INSTDIR\\ABOUT_APACHE.txt");
+ unlink("$INSTDIR\\CHANGES.txt");
+ unlink("$INSTDIR\\INSTALL.txt");
+ unlink("$INSTDIR\\LICENSE.txt");
+ unlink("$INSTDIR\\NOTICE.txt");
+ unlink("$INSTDIR\\OPENSSL-NEWS.txt");
+ unlink("$INSTDIR\\OPENSSL-README.txt");
+ unlink("$INSTDIR\\README.txt");
+}
+
+############
+# DOWNLOAD #
+############
+
+# Download a url into a file if successful put the destination into the
+# variable referenced by $dest_ref.
+sub download_file {
+ my $url = shift;
+ my $file = shift;
+ my $dest_ref = shift;
+
+ # If the variable referenced by $dest_ref is already set, skip downloading
+ # means we've been asked to use an already downloaded file.
+ return if (defined($$dest_ref));
+
+ print "Downloading $url\n";
+ # Using mirror() here so that repeated runs shouldn't try to keep downloading
+ # the file.
+ my $response = mirror($url, $file);
+ if (is_error($response)) {
+ die "Couldn't save $url to $file received $response";
+ }
+ $$dest_ref = $file;
+}
+
+# Download all the dependencies we need
+sub download_dependencies {
+ # putting awk in sources is a bit of a hack but it lets us
+ # avoid having to figure out what to delete when cleaning bin
+ download_file($AWK_URL, "$SRCDIR\\awk.exe", \$AWK_FILE);
+ unless(-x "$BINDIR\\awk.exe") { # skip the copy if it exists
+ copy_or_die($AWK_FILE, "$BINDIR\\awk.exe");
+ }
+ download_file($PROJREF_URL, "$SRCDIR\\ProjRef.py", \$PROJREF_FILE);
+ unless(-x "$BINDIR\\ProjRef.py") { # skip the copy if it exists
+ copy_or_die($PROJREF_FILE, $BINDIR);
+ }
+ download_file($BDB_URL, "$SRCDIR\\db.zip", \$BDB_FILE);
+ download_file($ZLIB_URL, "$SRCDIR\\zlib.zip", \$ZLIB_FILE);
+ download_file($OPENSSL_URL, "$SRCDIR\\openssl.tar.gz", \$OPENSSL_FILE);
+ download_file($HTTPD_URL, "$SRCDIR\\httpd.tar.bz2", \$HTTPD_FILE);
+ download_file($APR_URL, "$SRCDIR\\apr.tar.bz2", \$APR_FILE);
+ download_file($APU_URL, "$SRCDIR\\apr-util.tar.bz2", \$APU_FILE);
+ download_file($API_URL, "$SRCDIR\\apr-iconv.tar.bz2", \$API_FILE);
+ download_file($PCRE_URL, "$SRCDIR\\pcre.zip", \$PCRE_FILE);
+ download_file($SQLITE_URL, "$SRCDIR\\sqlite-amalgamation.zip", \$SQLITE_FILE);
+ download_file($SERF_URL, "$SRCDIR\\serf.zip", \$SERF_FILE);
+ download_file($NEON_URL, "$SRCDIR\\neon.tar.gz", \$NEON_FILE) if $NEON;
+}
+
+##############
+# EXTRACTION #
+##############
+
+# Extract a compressed file with 7-zip into a given directory
+# Skip extraction if destination of rename_to or expected_name exists
+# if rename_to is set rename the path from expected_name to rename_to
+sub extract_file {
+ my $file = shift;
+ my $container = shift;
+ my $expected_name = shift;
+ my $rename_to = shift;
+
+ if (defined($rename_to)) {
+ return if -d $rename_to;
+ } elsif (defined($expected_name)) {
+ return if -d $expected_name;
+ }
+
+ my $dest_opt = "";
+ if (defined($container)) {
+ $dest_opt = qq(-o"$container" );
+ }
+
+ my $cmd;
+ if ($file =~ /\.tar\.(bz2|gz)$/) {
+ $cmd = qq("$SEVEN_ZIP" x "$file" -so | "$SEVEN_ZIP" x -y -si -ttar $dest_opt);
+ } else {
+ $cmd = qq("$SEVEN_ZIP" x -y $dest_opt $file);
+ }
+
+ system_or_die("Problem extracting $file", $cmd);
+ if (defined($rename_to)) {
+ rename_or_die($expected_name, $rename_to);
+ }
+}
+
+sub extract_dependencies {
+ extract_file($BDB_FILE, $BLDDIR,
+ "$BLDDIR\\db-$BDB_VER", "$BLDDIR\\bdb");
+ extract_file($HTTPD_FILE, $BLDDIR,
+ "$BLDDIR\\httpd-$HTTPD_VER", "$BLDDIR\\httpd");
+ extract_file($APR_FILE, $SRCLIB,
+ "$SRCLIB\\apr-$APR_VER", "$SRCLIB\\apr");
+ extract_file($APU_FILE, $SRCLIB,
+ "$SRCLIB\\apr-util-$APU_VER", "$SRCLIB\\apr-util");
+ extract_file($API_FILE, $SRCLIB,
+ "$SRCLIB\\apr-iconv-$API_VER", "$SRCLIB\\apr-iconv");
+ # We fix the line endings before putting the non-Apache deps in place since it
+ # touches everything under httpd and there's no point in doing other things.
+ httpd_fix_lineends();
+ extract_file($ZLIB_FILE, $SRCLIB,
+ "$SRCLIB\\zlib-$ZLIB_VER", "$SRCLIB\\zlib");
+ extract_file($OPENSSL_FILE, $SRCLIB,
+ "$SRCLIB\\openssl-$OPENSSL_VER", "$SRCLIB\\openssl");
+ extract_file($PCRE_FILE, $SRCLIB,
+ "$SRCLIB\\pcre-$PCRE_VER", "$SRCLIB\\pcre");
+ extract_file($SQLITE_FILE, $INSTDIR,
+ "$INSTDIR\\sqlite-amalgamation-$SQLITE_VER",
+ "$INSTDIR\\sqlite-amalgamation");
+ extract_file($SERF_FILE, $INSTDIR,
+ "$INSTDIR\\serf-$SERF_VER", "$INSTDIR\\serf");
+ extract_file($NEON_FILE, $INSTDIR,
+ "$INSTDIR\\neon-$NEON_VER", "$INSTDIR\\neon") if $NEON;
+}
+
+#########
+# BUILD #
+#########
+
+sub build_pcre {
+ chdir_or_die("$SRCLIB\\pcre");
+ my $pcre_generator = 'NMake Makefiles';
+ # Have to use RelWithDebInfo since httpd looks for the pdb files
+ my $pcre_build_type = '-DCMAKE_BUILD_TYPE:STRING=' . ($DEBUG ? 'Debug' : 'RelWithDebInfo');
+ my $pcre_options = '-DPCRE_NO_RECURSE:BOOL=ON';
+ my $pcre_shared_libs = '-DBUILD_SHARED_LIBS:BOOL=ON';
+ my $pcre_install_prefix = "-DCMAKE_INSTALL_PREFIX:PATH=$INSTDIR";
+ my $cmake_cmd = qq("$CMAKE" -G "$pcre_generator" "$pcre_build_type" "$pcre_shared_libs" "$pcre_install_prefix" "$pcre_options" .);
+ system_or_die("Failure generating pcre Makefiles", $cmake_cmd);
+ system_or_die("Failure building pcre", qq("$NMAKE"));
+ system_or_die("Failure testing pcre", qq("$NMAKE" test));
+ system_or_die("Failure installing pcre", qq("$NMAKE" install));
+ chdir_or_die($TOPDIR);
+}
+
+# This is based roughly off the build_zlib.bat that the Subversion Windows
+# build generates, it it doesn't match that then Subversion will fail to build.
+sub build_zlib {
+ chdir_or_die("$SRCLIB\\zlib");
+ $ENV{CC_OPTS} = $DEBUG ? '/MDd /Gm /ZI /Od /GZ /D_DEBUG' : '/MD /02 /Zi';
+ $ENV{COMMON_CC_OPTS} = '/nologo /W3 /DWIN32 /D_WINDOWS';
+
+ system_or_die("Failure building zilb", qq("$NMAKE" /nologo -f win32\\Makefile.msc STATICLIB=zlibstat.lib all));
+
+ delete $ENV{CC_OPTS};
+ delete $ENV{COMMON_CC_OPTS};
+
+ chdir_or_die($TOPDIR);
+}
+
+sub build_openssl {
+ chdir_or_die("$SRCLIB\\openssl");
+
+ # We're building openssl without an assembler. If someone wants to
+ # use this for production they should probably download NASM and
+ # remove the no-asm below and use ms\do_nasm.bat instead.
+
+ # TODO: Enable openssl to use zlib. openssl needs some patching to do
+ # this since it wants to look for zlib as zlib1.dll and as the httpd
+ # build instructions note you probably don't want to dynamic link zlib.
+
+ # TODO: OpenSSL requires perl on the path since it uses perl without a full
+ # path in the batch file and the makefiles. Probably should determine
+ # if PERL is on the path and add it here if not.
+
+ # The apache build docs suggest no-rc5 no-idea enable-mdc2 on top of what
+ # is used below, the primary driver behind that is patents, but I believe
+ # the rc5 and idea patents have expired.
+ my $platform = $DEBUG ? 'debug-VC-WIN32' : 'VC-WIN32';
+ system_or_die("Failure configuring openssl",
+ qq("$PERL" Configure no-asm "--prefix=$INSTDIR" $platform));
+ system_or_die("Failure building openssl (bat)", 'ms\do_ms.bat');
+ system_or_die("Failure building openssl (nmake)", qq("$NMAKE" /f ms\\ntdll.mak));
+ system_or_die("Failure testing openssl", qq("$NMAKE" /f ms\\ntdll.mak test));
+ system_or_die("Failure installing openssl",
+ qq("$NMAKE" /f ms\\ntdll.mak install));
+ chdir_or_die($TOPDIR);
+}
+
+# Run devenv /Upgrade on file.
+# If the file isn't a .sln file and the sln file isn't empty shortcut this
+# If the file isn't a .sln file touch the basename.sln of file to avoid
+# Visual Studio whining about its backup step.
+sub upgrade_solution {
+ my $file = shift;
+ my $interactive = shift;
+ my $flags = "";
+
+ my ($basename, $directories) = fileparse($file, qr/\.[^.]*$/);
+ my $sln = $directories . $basename . '.sln';
+ return if $file ne $sln and -s $sln; # shortcut if sln file is unique and isn't empty
+ # 'touch' the sln file so that Visual Studio 2012
+ # doesn't try to say there was an error while upgrading because
+ # it was unable to backup the original solution file.
+ unless (-e $sln) {
+ open(SLN, ">$sln") or die "Can't create $sln: $!";
+ close(SLN);
+ }
+ print "Upgrading $file (this may take a while)\n";
+ $flags = " /Upgrade" unless $interactive;
+ system_or_die("Failure upgrading $file", qq("$DEVENV" "$file"$flags));
+ if ($interactive) {
+ print "Can't do automatic upgrade, doing interactive upgrade\n";
+ print "IDE will load, choose to convert all projects, exit the IDE and\n";
+ print "save the resulting solution file\n\n";
+ print "Press Enter to Continue\n";
+ <>;
+ }
+}
+
+# Run the lineends.pl script
+sub httpd_fix_lineends {
+ chdir_or_die($HTTPD);
+ # This script fixes the lineendings to be CRLF in appropriate files.
+ # If we don't run this script then the DSW Upgrade will fail.
+ system_or_die(undef, qq("$PERL" "$SRCLIB\\apr\\build\\lineends.pl"));
+ chdir_or_die($TOPDIR);
+}
+
+# The httpd makefile in 2.4.4 doesn't know about .vcxproj files and
+# still thinks it's got an older version of Visual Studio because
+# .vcproj files have become .vcxproj.
+sub httpd_fix_makefile {
+ my $file = shift;
+
+ modify_file_in_place($file, sub {
+ s/\.vcproj/.vcxproj/i;
+ # below fixes that installd breaks when trying to install pcre because
+ # dll is named pcred.dll when a Debug build.
+ s/^(\s*copy srclib\\pcre\\pcre\.\$\(src_dll\)\s+"\$\(inst_dll\)"\s+<\s*\.y\s*)$/!IF EXISTS("srclib\\pcre\\pcre\.\$(src_dll)")\n$1!ENDIF\n!IF EXISTS("srclib\\pcre\\pcred\.\$(src_dll)")\n\tcopy srclib\\pcre\\pcred.\$(src_dll)\t\t\t"\$(inst_dll)" <.y\n!ENDIF\n/;
+ });
+}
+
+# This is a poor mans way of inserting a property group into a
+# vcxproj file. It assumes that the ending Project tag will
+# be the start and end of the line with no whitespace, probably
+# not an entirely valid assumption but it works in this case.
+sub insert_property_group {
+ my $file = shift;
+ my $xml = shift;
+ my $bak = shift;
+
+ modify_file_in_place($file, sub {
+ s#(^</Project>$)#<PropertyGroup>$xml</PropertyGroup>\n$1#i;
+ }, $bak);
+}
+
+# Strip pre-compiled headers compile and linker flags from file they follow
+# the form: /Ycfoo.h or /Yufoo.h.
+sub disable_pch {
+ my $file = shift;
+
+ modify_file_in_place($file, sub {
+ s#/Y[cu][^ ]+##;
+ });
+}
+
+# Find the first .exe .dll or .so OutputFile in the project
+# provided by file. There may be macros or paths in the
+# result.
+sub get_output_file {
+ my $file = shift;
+ my $result;
+ local $_; # Don't mess with the $_ from the find callback
+
+ open(IN, "<$file") or die "Couldn't open file $file: $!";
+ while (<IN>) {
+ if (m#<OutputFile>(.*?\.(?:exec|dll|so))</OutputFile>#) {
+ $result = $1;
+ last;
+ }
+ }
+ close(IN);
+ return $result;
+}
+
+# Find the name of the bdb library we've installed in our LIBDIR.
+sub find_bdb_lib {
+ my $result;
+ my $debug = $DEBUG ? 'd' : '';
+ find(sub {
+ if (not defined($result) and /^libdb\d+$debug\.lib$/) {
+ $result = $_;
+ }
+ }, $LIBDIR);
+ return $result;
+}
+
+# Insert the dependency dep into project file.
+# bak can be set to set the backup filename made of the project.
+sub insert_dependency_in_proj {
+ my $file = shift;
+ my $dep = shift;
+ my $bak = shift;
+
+ modify_file_in_place($file, sub {
+ s/(%\(AdditionalDependencies\))/$dep;$1/;
+ }, $bak);
+}
+
+# Do what's needed to enable BDB in the httpd and apr-util builds
+sub httpd_enable_bdb {
+ # Make APU_HAVE_DB be true so the code builds.
+ modify_file_in_place('srclib\apr-util\include\apu.hw', sub {
+ s/(#define\s+APU_HAVE_DB\s+)0/${1}1/;
+ });
+
+ # Fix the linkage, apr_dbm_db is hardcoded to libdb47.lib
+ my $bdb_lib = find_bdb_lib();
+ modify_file_in_place('srclib\apr-util\dbm\apr_dbm_db.vcxproj', sub {
+ s/libdb\d+\.lib/$bdb_lib/g;
+ }, '.bdb');
+
+ # httxt2dbm and htdbm need a BDB dependency and don't have one.
+ insert_dependency_in_proj('support\httxt2dbm.vcxproj', $bdb_lib, '.bdb');
+ insert_dependency_in_proj('support\htdbm.vcxproj', $bdb_lib, '.bdb');
+}
+
+# Apply the same fix as found in r1486937 on httpd 2.4.x branch.
+sub httpd_fix_debug {
+ my ($httpd_major, $httpd_minor, $httpd_patch) = $HTTPD_VER =~ /^(\d+)\.(\d+)\.(.+)$/;
+ return unless ($httpd_major <= 2 && $httpd_minor <= 4 && $httpd_patch < 5);
+
+ modify_file_in_place('libhttpd.dsp', sub {
+ s/^(!MESSAGE "libhttpd - Win32 Debug" \(based on "Win32 \(x86\) Dynamic-Link Library"\))$/$1\n!MESSAGE "libhttpd - Win32 Lexical" (based on "Win32 (x86) Dynamic-Link Library")/;
+ s/^(# Begin Group "headers")$/# Name "libhttpd - Win32 Lexical"\n$1/;
+ }, '.lexical');
+}
+
+sub build_httpd {
+ chdir_or_die($HTTPD);
+
+ my $vs_2013 = $VS_VER eq '2013';
+ my $vs_2012 = $VS_VER eq '2012';
+ my $vs_2010 = $VS_VER eq '2010';
+
+ httpd_fix_debug();
+
+ # I don't think cvtdsp.pl is necessary with Visual Studio 2012
+ # but it shouldn't hurt anything either. Including it allows
+ # for the possibility that this may work for older Visual Studio
+ # versions.
+ system_or_die("Failure converting DSP files",
+ qq("$PERL" srclib\\apr\\build\\cvtdsp.pl -2005));
+
+ upgrade_solution('Apache.dsw', $vs_2010);
+ httpd_enable_bdb();
+ httpd_fix_makefile('Makefile.win');
+
+ # Modules and support projects randomly fail due to an error about the
+ # CL.read.1.tlog file already existing. This is really because of the
+ # intermediate dirs being shared between modules, but for the time being
+ # this works around it.
+ find(sub {
+ if (/\.vcxproj$/) {
+ insert_property_group($_, '<TrackFileAccess>false</TrackFileAccess>')
+ }
+ }, 'modules', 'support');
+
+ if ($vs_2012 or $vs_2013) {
+ # Turn off pre-compiled headers for apr-iconv to avoid:
+ # LNK2011: http://msdn.microsoft.com/en-us/library/3ay26wa2(v=vs.110).aspx
+ disable_pch('srclib\apr-iconv\build\modules.mk.win');
+
+ # ApacheMonitor build fails due a duplicate manifest, turn off
+ # GenerateManifest
+ insert_property_group('support\win32\ApacheMonitor.vcxproj',
+ '<GenerateManifest>false</GenerateManifest>',
+ '.dupman');
+
+ # The APR libraries have projects named libapr but produce output named libapr-1
+ # The problem with this is in newer versions of Visual Studio TargetName defaults
+ # to the project name and not the basename of the output. Since the PDB file
+ # is named based on the TargetName the pdb file ends up being named libapr.pdb
+ # instead of libapr-1.pdb. The below call fixes this by explicitly providing
+ # a TargetName definition and shuts up some warnings about this problem as well.
+ # Without this fix the install fails when it tries to copy libapr-1.pdb.
+ # See this thread for details of the changes:
+ # http://social.msdn.microsoft.com/Forums/en-US/vcprerelease/thread/3c03e730-6a0e-4ee4-a0d6-6a5c3ce4343c
+ find(sub {
+ return unless (/\.vcxproj$/);
+ my $output_file = get_output_file($_);
+ return unless (defined($output_file));
+ my ($project_name) = fileparse($_, qr/\.[^.]*$/);
+ my ($old_style_target_name) = fileparse($output_file, qr/\.[^.]*$/);
+ return if ($old_style_target_name eq $project_name);
+ insert_property_group($_,
+ "<TargetName>$old_style_target_name</TargetName>", '.torig');
+ }, "$SRCLIB\\apr", "$SRCLIB\\apr-util", "$SRCLIB\\apr-iconv");
+ } elsif ($vs_2010) {
+ system_or_die("Failed fixing project guid references",
+ qq("$PYTHON" "$BINDIR\\ProjRef.py" -i Apache.sln"));
+ }
+
+ # If you're looking here it's possible that something went
+ # wrong with the httpd build. Debugging it can be a bit of a pain
+ # when using this script. There are log files created in the
+ # Release dirs named with the same basename as the project. E.G.
+ # for support\httxt2dbm.vcxproj you can find the log in
+ # support\Release\httxt2dbm.log. You can also run a similar build
+ # from in the IDE, but you'll need to disable some projects since
+ # they are separately driven by the Makefile.win. Grepping for
+ # '/project' in Makefile.win should tell you which projects. You'll
+ # also need to add the bin, include and lib paths to the appropriate
+ # configurations inside the project since we get them from the environment.
+ # Once all that is done the BuildBin project should be buildable for you to
+ # diagnose the problem.
+ my $target = $DEBUG ? "installd" : "installr";
+ system_or_die("Failed building/installing httpd/apr/apu/api",
+ qq("$NMAKE" /f Makefile.win $target "DBM_LIST=db" "INSTDIR=$INSTDIR"));
+
+ chdir_or_die($TOPDIR);
+}
+
+sub build_bdb {
+ chdir_or_die($BDB);
+
+ print(cwd(),$/);
+ my $sln = 'build_windows\Berkeley_DB_vs2010.sln';
+ upgrade_solution($sln);
+
+ my $platform = $DEBUG ? 'Debug|Win32' : 'Release|Win32';
+
+ # Build the db Project first since the full solution fails due to a broken
+ # dependency with the current version of BDB if we don't.
+ system_or_die("Failed building DBD (Project db)",
+ qq("$DEVENV" "$sln" /Build "$platform" /Project db));
+
+ system_or_die("Failed building DBD",
+ qq("$DEVENV" "$sln" /Build "$platform"));
+
+ # BDB doesn't seem to have it's own install routines so we'll do it ourselves
+ copy_or_die('build_windows\db.h', $INCDIR);
+ find(sub {
+ if (/\.(exe|dll|pdb)$/) {
+ copy_or_die($_, $BINDIR);
+ } elsif (/\.lib$/) {
+ copy_or_die($_, $LIBDIR);
+ }
+ }, 'build_windows\\Win32\\' . ($DEBUG ? 'Debug' : 'Release'));
+
+ chdir_or_die($TOPDIR);
+}
+
+# Right now this doesn't actually build serf but just patches it so that it
+# can build against a debug build of OpenSSL.
+sub build_serf {
+ chdir_or_die("$TOPDIR\\serf");
+
+ modify_file_in_place('serf.mak', sub {
+ s/^(INTDIR = Release)$/$1\nOPENSSL_OUT_SUFFIX =/;
+ s/^(INTDIR = Debug)$/$1\nOPENSSL_OUT_SUFFIX = .dbg/;
+ s/(\$\(OPENSSL_SRC\)\\out32(?:dll)?)/$1\$(OPENSSL_OUT_SUFFIX)/g;
+ }, '.debug');
+
+ chdir_or_die($TOPDIR);
+}
+
+sub build_dependencies {
+ build_bdb();
+ build_zlib();
+ build_pcre();
+ build_openssl();
+ build_serf();
+ build_httpd();
+}
+
+###############
+# COMMANDLINE #
+###############
+
+# Implement an interface somewhat similar to the make command line
+# You can give a list of commands and variable assignments interspersed.
+# Variable assignments are always VAR=VALUE with no spaces (in a single
+# argv entry).
+sub main {
+ my @commands;
+ while (my $arg = shift @ARGV) {
+ # Look for variable assignment
+ if (my ($lhs, $rhs) = $arg =~ /([^=]+)=(.*)/) {
+ # Bit of hackery to allow the global values in the
+ # Vars package to be overriden from the command line.
+ # E.G. "CMAKE=C:\CMake\cmake.exe" would replace the
+ # default value with this value.
+ if (exists($Vars::{$lhs})) {
+ ${$Vars::{$lhs}} = $rhs;
+ } else {
+ # Don't allow variables that don't exist already to be touched.
+ die "$lhs is an unknown variable.";
+ }
+ } else {
+ # Not a variable so must be a command
+ push @commands, $arg;
+ }
+ }
+
+ # No commands so add the implicit all command
+ if ($#commands == -1) {
+ push @commands, 'all';
+ }
+
+ # Set defaults and paths that have to be set at runtime since they are based
+ # on other variables.
+ Vars::set_defaults();
+ set_paths();
+
+ # Determine the Visual Studio Version and die if not supported.
+ check_vs_ver();
+
+ # change directory to our TOPDIR before running any commands
+ # the variable assignment might have changed it.
+ chdir_or_die($TOPDIR);
+
+ # Run the commands in the order given.
+ foreach my $command (@commands) {
+ if ($command eq 'clean') {
+ clean_structure(0);
+ } elsif ($command eq 'real-clean') {
+ clean_structure(1);
+ } elsif ($command eq 'prepare') {
+ prepare_structure();
+ } elsif ($command eq 'download') {
+ download_dependencies();
+ } elsif ($command eq 'extract') {
+ extract_dependencies();
+ } elsif ($command eq 'all') {
+ prepare_structure();
+ download_dependencies();
+ extract_dependencies();
+ build_dependencies();
+ } else {
+ die "Command '$command' is unknown";
+ }
+ }
+}
+
+main();
diff --git a/tools/dev/fsfs-access-map.c b/tools/dev/fsfs-access-map.c
index 5fbd221..ac65182 100644
--- a/tools/dev/fsfs-access-map.c
+++ b/tools/dev/fsfs-access-map.c
@@ -52,9 +52,15 @@ typedef struct file_stats_t
/* number of lseek calls to clusters not previously read */
apr_int64_t uncached_seek_count;
+ /* number of lseek counts not followed by a read */
+ apr_int64_t unnecessary_seeks;
+
/* number of read() calls */
apr_int64_t read_count;
+ /* number of read() calls that returned 0 bytes */
+ apr_int64_t empty_reads;
+
/* total number of bytes returned by those reads */
apr_int64_t read_size;
@@ -86,12 +92,17 @@ typedef struct handle_info_t
/* bytes read so far in the current series of reads started (default: 0) */
apr_int64_t last_read_size;
+ /* number of read() calls in this series */
+ apr_int64_t read_count;
} handle_info_t;
/* useful typedef */
typedef unsigned char byte;
typedef unsigned short word;
+/* an RGB color */
+typedef byte color_t[3];
+
/* global const char * file name -> *file_info_t map */
static apr_hash_t *files = NULL;
@@ -136,6 +147,11 @@ store_read_info(handle_info_t *handle_info)
++*count;
}
}
+ else if (handle_info->read_count == 0)
+ {
+ /* two consecutive seeks */
+ handle_info->file->unnecessary_seeks++;
+ }
}
/* Handle a open() call. Ensures that a file_info_t for the given NAME
@@ -152,18 +168,18 @@ open_file(const char *name, int handle)
if (!file)
{
apr_pool_t *pool = apr_hash_pool_get(files);
- apr_pool_t *sub_pool = svn_pool_create(pool);
+ apr_pool_t *subpool = svn_pool_create(pool);
apr_file_t *apr_file = NULL;
apr_finfo_t finfo = { 0 };
- apr_size_t cluster_count = 0;
+ int cluster_count = 0;
/* determine file size (if file still exists) */
apr_file_open(&apr_file, name,
- APR_READ | APR_BUFFERED, APR_OS_DEFAULT, sub_pool);
+ APR_READ | APR_BUFFERED, APR_OS_DEFAULT, subpool);
if (apr_file)
apr_file_info_get(&finfo, APR_FINFO_SIZE, apr_file);
- svn_pool_destroy(sub_pool);
+ svn_pool_destroy(subpool);
file = apr_pcalloc(pool, sizeof(*file));
file->name = apr_pstrdup(pool, name);
@@ -171,7 +187,7 @@ open_file(const char *name, int handle)
/* pre-allocate cluster map accordingly
* (will be auto-expanded later if necessary) */
- cluster_count = (apr_size_t)(1 + (file->size - 1) / cluster_size);
+ cluster_count = (int)(1 + (file->size - 1) / cluster_size);
file->read_map = apr_array_make(pool, file->size
? cluster_count
: 1, sizeof(word));
@@ -188,6 +204,14 @@ open_file(const char *name, int handle)
else
file->rev_num = -1;
+ /* filter out log/phys index files */
+ if (file->rev_num >= 0)
+ {
+ const char *suffix = name + strlen(name) - 4;
+ if (strcmp(suffix, ".l2p") == 0 || strcmp(suffix, ".p2l") == 0)
+ file->rev_num = -1;
+ }
+
apr_hash_set(files, file->name, APR_HASH_KEY_STRING, file);
}
@@ -220,9 +244,13 @@ read_file(int handle, apr_int64_t count)
{
/* known file handle -> expand current read sequence */
+ handle_info->read_count++;
handle_info->last_read_size += count;
handle_info->file->read_count++;
handle_info->file->read_size += count;
+
+ if (count == 0)
+ handle_info->file->empty_reads++;
}
}
@@ -242,6 +270,7 @@ seek_file(int handle, apr_int64_t location)
handle_info->last_read_size = 0;
handle_info->last_read_start = location;
+ handle_info->read_count = 0;
handle_info->file->seek_count++;
/* if we seek to a location that had not been read from before,
@@ -275,10 +304,17 @@ parse_line(svn_stringbuf_t *line)
char *return_value = strrchr(line->data, ' ');
char *first_param_end;
apr_int64_t func_return = 0;
+ char *func_start = strchr(line->data, ' ');
if (func_end == NULL || return_value == NULL)
return;
+ if (func_start == NULL || func_start > func_end)
+ func_start = line->data;
+ else
+ while(*func_start == ' ')
+ func_start++;
+
first_param_end = strchr(func_end, ',');
if (first_param_end == NULL)
first_param_end = strchr(func_end, ')');
@@ -295,7 +331,7 @@ parse_line(svn_stringbuf_t *line)
svn_error_clear(svn_cstring_atoi64(&func_return, return_value));
/* process those operations that we care about */
- if (strcmp(line->data, "open") == 0)
+ if (strcmp(func_start, "open") == 0)
{
/* remove double quotes from file name parameter */
*func_end++ = 0;
@@ -303,11 +339,11 @@ parse_line(svn_stringbuf_t *line)
open_file(func_end, (int)func_return);
}
- else if (strcmp(line->data, "read") == 0)
+ else if (strcmp(func_start, "read") == 0)
read_file(atoi(func_end), func_return);
- else if (strcmp(line->data, "lseek") == 0)
+ else if (strcmp(func_start, "lseek") == 0)
seek_file(atoi(func_end), func_return);
- else if (strcmp(line->data, "close") == 0)
+ else if (strcmp(func_start, "close") == 0)
close_file(atoi(func_end));
}
@@ -317,7 +353,7 @@ static void
parse_file(apr_file_t *file)
{
apr_pool_t *pool = svn_pool_create(NULL);
- apr_pool_t *iter_pool = svn_pool_create(pool);
+ apr_pool_t *iterpool = svn_pool_create(pool);
/* limit lines to 4k (usually, we need less than 200 bytes) */
svn_stringbuf_t *line = svn_stringbuf_create_ensure(4096, pool);
@@ -327,13 +363,13 @@ parse_file(apr_file_t *file)
svn_error_t *err = NULL;
line->len = line->blocksize-1;
- err = svn_io_read_length_line(file, line->data, &line->len, iter_pool);
+ err = svn_io_read_length_line(file, line->data, &line->len, iterpool);
svn_error_clear(err);
if (err)
break;
parse_line(line);
- svn_pool_clear(iter_pool);
+ svn_pool_clear(iterpool);
}
while (line->len > 0);
}
@@ -494,17 +530,82 @@ write_bitmap_header(apr_file_t *file, int xsize, int ysize)
apr_file_write(file, header, &written);
}
-/* write the cluster read map for all files in INFO as BMP image to FILE.
+/* To COLOR, add the fractional value of SOURCE from fractional indexes
+ * SOURCE_START to SOURCE_END and apply the SCALING_FACTOR.
+ */
+static void
+add_sample(color_t color,
+ color_t *source,
+ double source_start,
+ double source_end,
+ double scaling_factor)
+{
+ double factor = (source_end - source_start) / scaling_factor;
+
+ apr_size_t i;
+ for (i = 0; i < sizeof(color_t) / sizeof(*color); ++i)
+ color[i] += (source_end - source_start < 0.5) && source_start > 1.0
+ ? factor * source[(apr_size_t)source_start - 1][i]
+ : factor * source[(apr_size_t)source_start][i];
+}
+
+/* Scale the IN_LEN RGB values from IN to OUT_LEN RGB values in OUT.
+ */
+static void
+scale_line(color_t* out,
+ int out_len,
+ color_t *in,
+ int in_len)
+{
+ double scaling_factor = (double)(in_len) / (double)(out_len);
+
+ apr_size_t i;
+ memset(out, 0, out_len * sizeof(color_t));
+ for (i = 0; i < out_len; ++i)
+ {
+ color_t color = { 0 };
+
+ double source_start = i * scaling_factor;
+ double source_end = (i + 1) * scaling_factor;
+
+ if ((apr_size_t)source_start == (apr_size_t)source_end)
+ {
+ add_sample(color, in, source_start, source_end, scaling_factor);
+ }
+ else
+ {
+ apr_size_t k;
+ apr_size_t first_sample_end = (apr_size_t)source_start + 1;
+ apr_size_t last_sample_start = (apr_size_t)source_end;
+
+ add_sample(color, in, source_start, first_sample_end, scaling_factor);
+ for (k = first_sample_end; k < last_sample_start; ++k)
+ add_sample(color, in, k, k + 1, scaling_factor);
+
+ add_sample(color, in, last_sample_start, source_end, scaling_factor);
+ }
+
+ memcpy(out[i], color, sizeof(color));
+ }
+}
+
+/* Write the cluster read map for all files in INFO as BMP image to FILE.
+ * If MAX_X is not 0, scale all lines to MAX_X pixels. Use POOL for
+ * allocations.
*/
static void
-write_bitmap(apr_array_header_t *info, apr_file_t *file)
+write_bitmap(apr_array_header_t *info,
+ int max_x,
+ apr_file_t *file,
+ apr_pool_t *pool)
{
int ysize = info->nelts;
int xsize = 0;
int x, y;
- int row_size;
- int padding;
+ apr_size_t row_size;
apr_size_t written;
+ color_t *line, *scaled_line;
+ svn_boolean_t do_scale = max_x > 0;
/* xsize = max cluster number */
for (y = 0; y < ysize; ++y)
@@ -516,37 +617,40 @@ write_bitmap(apr_array_header_t *info, apr_file_t *file)
xsize = 0x3fff;
if (ysize >= 0x4000)
ysize = 0x3fff;
+ if (max_x == 0)
+ max_x = xsize;
/* rows in BMP files must be aligned to 4 bytes */
- row_size = APR_ALIGN(xsize * 3, 4);
- padding = row_size - xsize * 3;
+ row_size = APR_ALIGN(max_x * sizeof(color_t), 4);
+
+ /**/
+ line = apr_pcalloc(pool, xsize * sizeof(color_t));
+ scaled_line = apr_pcalloc(pool, row_size);
/* write header to file */
- write_bitmap_header(file, xsize, ysize);
+ write_bitmap_header(file, max_x, ysize);
/* write all rows */
for (y = 0; y < ysize; ++y)
{
file_stats_t *file_info = APR_ARRAY_IDX(info, y, file_stats_t *);
+ int block_count = file_info->read_map->nelts;
for (x = 0; x < xsize; ++x)
{
- byte color[3] = { 128, 128, 128 };
- if (x < file_info->read_map->nelts)
+ color_t color = { 128, 128, 128 };
+ if (x < block_count)
{
word count = APR_ARRAY_IDX(file_info->read_map, x, word);
select_color(color, count);
}
- written = sizeof(color);
- apr_file_write(file, color, &written);
+ memcpy(line[x], color, sizeof(color));
}
- if (padding)
- {
- char pad[3] = { 0 };
- written = padding;
- apr_file_write(file, pad, &written);
- }
+ scale_line(scaled_line, max_x, line, block_count ? block_count : 1);
+
+ written = row_size;
+ apr_file_write(file, do_scale ? scaled_line : line, &written);
}
}
@@ -592,6 +696,8 @@ print_stats(apr_pool_t *pool)
apr_int64_t clusters_read = 0;
apr_int64_t unique_clusters_read = 0;
apr_int64_t uncached_seek_count = 0;
+ apr_int64_t unnecessary_seek_count = 0;
+ apr_int64_t empty_read_count = 0;
apr_hash_index_t *hi;
for (hi = apr_hash_first(pool, files); hi; hi = apr_hash_next(hi))
@@ -609,13 +715,17 @@ print_stats(apr_pool_t *pool)
clusters_read += file->clusters_read;
unique_clusters_read += file->unique_clusters_read;
uncached_seek_count += file->uncached_seek_count;
+ unnecessary_seek_count += file->unnecessary_seeks;
+ empty_read_count += file->empty_reads;
}
printf("%20s files\n", svn__i64toa_sep(apr_hash_count(files), ',', pool));
printf("%20s files opened\n", svn__i64toa_sep(open_count, ',', pool));
printf("%20s seeks\n", svn__i64toa_sep(seek_count, ',', pool));
+ printf("%20s unnecessary seeks\n", svn__i64toa_sep(unnecessary_seek_count, ',', pool));
printf("%20s uncached seeks\n", svn__i64toa_sep(uncached_seek_count, ',', pool));
printf("%20s reads\n", svn__i64toa_sep(read_count, ',', pool));
+ printf("%20s empty reads\n", svn__i64toa_sep(empty_read_count, ',', pool));
printf("%20s unique clusters read\n", svn__i64toa_sep(unique_clusters_read, ',', pool));
printf("%20s clusters read\n", svn__i64toa_sep(clusters_read, ',', pool));
printf("%20s bytes read\n", svn__i64toa_sep(read_size, ',', pool));
@@ -629,7 +739,7 @@ print_usage(void)
printf("Reads strace of some FSFS-based tool from <file>, prints some stats\n");
printf("and writes a cluster access map to 'access.bmp' the current folder.\n");
printf("Each pixel corresponds to one 64kB cluster and every line to a rev\n");
- printf("or packed rev file in the repository. Turquoise and greed indicate\n");
+ printf("or packed rev file in the repository. Turquoise and green indicate\n");
printf("1 and 2 hits, yellow to read-ish colors for up to 20, shares of\n");
printf("for up to 100 and black for > 200 hits.\n\n");
printf("A typical strace invocation looks like this:\n");
@@ -665,7 +775,13 @@ int main(int argc, const char *argv[])
apr_file_open(&file, "access.bmp",
APR_WRITE | APR_CREATE | APR_TRUNCATE | APR_BUFFERED,
APR_OS_DEFAULT, pool);
- write_bitmap(get_rev_files(pool), file);
+ write_bitmap(get_rev_files(pool), 0, file, pool);
+ apr_file_close(file);
+
+ apr_file_open(&file, "access_scaled.bmp",
+ APR_WRITE | APR_CREATE | APR_TRUNCATE | APR_BUFFERED,
+ APR_OS_DEFAULT, pool);
+ write_bitmap(get_rev_files(pool), 1024, file, pool);
apr_file_close(file);
apr_file_open(&file, "scale.bmp",
@@ -675,4 +791,4 @@ int main(int argc, const char *argv[])
apr_file_close(file);
return 0;
-} \ No newline at end of file
+}
diff --git a/tools/dev/fsfs-reorg.c b/tools/dev/fsfs-reorg.c
deleted file mode 100644
index 052ad39..0000000
--- a/tools/dev/fsfs-reorg.c
+++ /dev/null
@@ -1,3147 +0,0 @@
-/* fsfs-reorg.c -- prototypic tool to reorganize packed FSFS repositories
- * to reduce seeks
- *
- * ====================================================================
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- * ====================================================================
- */
-
-
-#include <assert.h>
-
-#include <apr.h>
-#include <apr_general.h>
-#include <apr_file_io.h>
-#include <apr_poll.h>
-
-#include "svn_pools.h"
-#include "svn_diff.h"
-#include "svn_io.h"
-#include "svn_utf.h"
-#include "svn_dirent_uri.h"
-#include "svn_sorts.h"
-#include "svn_delta.h"
-#include "svn_hash.h"
-
-#include "private/svn_string_private.h"
-#include "private/svn_subr_private.h"
-#include "private/svn_dep_compat.h"
-
-#ifndef _
-#define _(x) x
-#endif
-
-#define ERROR_TAG "fsfs-reporg: "
-
-/* forward declarations */
-typedef struct noderev_t noderev_t;
-typedef struct revision_info_t revision_info_t;
-
-/* A FSFS rev file is sequence of fragments and unused space (the latter
- * only being inserted by this tool and not during ordinary SVN operation).
- *
- * This type defines the type of any fragment.
- *
- * Please note that the classification as "property", "dir" or "file"
- * fragments is only to be used while determining the future placement
- * of a representation. If the rep is shared, the same rep may be used
- * as *any* of the 3 kinds.
- */
-enum fragment_kind_t
-{
- /* the 2 number line containing changes and root node offsets */
- header_fragment,
-
- /* list of all changes in a revision */
- changes_fragment,
-
- /* (the textual representation of) a noderev */
- noderev_fragment,
-
- /* a property rep (including PLAIN / DELTA header) */
- property_fragment,
-
- /* a directory rep (including PLAIN / DELTA header) */
- dir_fragment,
-
- /* a file rep (including PLAIN / DELTA header) */
- file_fragment
-};
-
-/* A fragment. This is used to represent the final ordering, i.e. there
- * will be an array containing elements of this type that basically put
- * a fragment at some location in the target file.
- */
-typedef struct fragment_t
-{
- /* position in the target file */
- apr_size_t position;
-
- /* kind of fragment */
- enum fragment_kind_t kind;
-
- /* pointer to the fragment struct; type depends on KIND */
- void *data;
-} fragment_t;
-
-/* Location info for a single revision.
- */
-typedef struct revision_location_t
-{
- /* pack file offset (manifest value), 0 for non-packed files */
- apr_size_t offset;
-
- /* offset of the changes list relative to OFFSET */
- apr_size_t changes;
-
- /* length of the changes list on bytes */
- apr_size_t changes_len;
-
- /* first offset behind the revision data in the pack file (file length
- * for non-packed revs) */
- apr_size_t end;
-} revision_location_t;
-
-/* Absolute position and size of some item.
- */
-typedef struct location_t
-{
- /* absolute offset in the file */
- apr_size_t offset;
-
- /* item length in bytes */
- apr_size_t size;
-} location_t;
-
-/* A parsed directory entry. Note that instances of this struct may be
- * shared between different DIRECTORY_T containers.
- */
-typedef struct direntry_t
-{
- /* (local) entry / path name */
- const char *name;
-
- /* strlen (name) */
- apr_size_t name_len;
-
- /* node rev providing ID and representation(s) */
- noderev_t *node;
-} direntry_t;
-
-/* Representation of a parsed directory content.
- */
-typedef struct directory_t
-{
- /* array of pointers to DIRENTRY_T */
- apr_array_header_t *entries;
-
- /* MD5 of the textual representation. Will be set lazily as a side-effect
- * of determining the length of this dir's textual representation. */
- unsigned char target_md5[16];
-
- /* (expanded) length of the textual representation.
- * Determined lazily during the write process. */
- apr_size_t size;
-} directory_t;
-
-/* A representation fragment.
- */
-typedef struct representation_t
-{
- /* location in the source file */
- location_t original;
-
- /* location in the reordered target file */
- location_t target;
-
- /* length of the PLAIN / DELTA line in the source file in bytes */
- apr_size_t header_size;
-
- /* deltification base, or NULL if there is none */
- struct representation_t *delta_base;
-
- /* revision that contains this representation
- * (may be referenced by other revisions, though) */
- revision_info_t *revision;
-
- /* representation content parsed as a directory. This will be NULL, if
- * *no* directory noderev uses this representation. */
- directory_t *dir;
-
- /* the source content has a PLAIN header, so we may simply copy the
- * source content into the target */
- svn_boolean_t is_plain;
-
- /* coloring flag used in the reordering algorithm to keep track of
- * representations that still need to be placed. */
- svn_boolean_t covered;
-} representation_t;
-
-/* A node rev.
- */
-struct noderev_t
-{
- /* location within the source file */
- location_t original;
-
- /* location within the reorganized target file. */
- location_t target;
-
- /* predecessor node, or NULL if there is none */
- noderev_t *predecessor;
-
- /* content representation; may be NULL if there is none */
- representation_t *text;
-
- /* properties representation; may be NULL if there is none */
- representation_t *props;
-
- /* revision that this noderev belongs to */
- revision_info_t *revision;
-
- /* coloring flag used in the reordering algorithm to keep track of
- * representations that still need to be placed. */
- svn_boolean_t covered;
-};
-
-/* Represents a single revision.
- * There will be only one instance per revision. */
-struct revision_info_t
-{
- /* number of this revision */
- svn_revnum_t revision;
-
- /* position in the source file */
- revision_location_t original;
-
- /* position in the reorganized target file */
- revision_location_t target;
-
- /* noderev of the root directory */
- noderev_t *root_noderev;
-
- /* all noderevs_t of this revision (ordered by source file offset),
- * i.e. those that point back to this struct */
- apr_array_header_t *node_revs;
-
- /* all representation_t of this revision (ordered by source file offset),
- * i.e. those that point back to this struct */
- apr_array_header_t *representations;
-};
-
-/* Represents a packed revision file.
- */
-typedef struct revision_pack_t
-{
- /* first revision in the pack file */
- svn_revnum_t base;
-
- /* revision_info_t* of all revisions in the pack file; in revision order. */
- apr_array_header_t *info;
-
- /* list of fragments to place in the target pack file; in target order. */
- apr_array_header_t *fragments;
-
- /* source pack file length */
- apr_size_t filesize;
-
- /* temporary value. Equal to the number of bytes in the target pack file
- * already allocated to fragments. */
- apr_size_t target_offset;
-} revision_pack_t;
-
-/* Cache for revision source content. All content is stored in DATA and
- * the HASH maps revision number to an svn_string_t instance whose data
- * member points into DATA.
- *
- * Once TOTAL_SIZE exceeds LIMIT, all content will be discarded. Similarly,
- * the hash gets cleared every 10000 insertions to keep the HASH_POOL
- * memory usage in check.
- */
-typedef struct content_cache_t
-{
- /* pool used for HASH */
- apr_pool_t *hash_pool;
-
- /* svn_revnum_t -> svn_string_t.
- * The strings become (potentially) invalid when adding new cache entries. */
- apr_hash_t *hash;
-
- /* data buffer. the first TOTAL_SIZE bytes are actually being used. */
- char *data;
-
- /* DATA capacity */
- apr_size_t limit;
-
- /* number of bytes used in DATA */
- apr_size_t total_size;
-
- /* number of insertions since the last hash cleanup */
- apr_size_t insert_count;
-} content_cache_t;
-
-/* A cached directory. In contrast to directory_t, this stored the data as
- * the plain hash that the normal FSFS will use to serialize & diff dirs.
- */
-typedef struct dir_cache_entry_t
-{
- /* revision containing the representation */
- svn_revnum_t revision;
-
- /* offset of the representation within that revision */
- apr_size_t offset;
-
- /* key-value representation of the directory entries */
- apr_hash_t *hash;
-} dir_cache_entry_t;
-
-/* Directory cache. (revision, offset) will be mapped directly into the
- * ENTRIES array of ENTRY_COUNT buckets (many entries will be NULL).
- * Two alternating pools will be used to allocate dir content.
- *
- * If the INSERT_COUNT exceeds a given limit, the pools get exchanged and
- * the older of the two will be cleared. This is to keep dir objects valid
- * for at least one insertion.
- */
-typedef struct dir_cache_t
-{
- /* fixed-size array of ENTRY_COUNT elements */
- dir_cache_entry_t *entries;
-
- /* currently used for entry allocations */
- apr_pool_t *pool1;
-
- /* previously used for entry allocations */
- apr_pool_t *pool2;
-
- /* size of ENTRIES in elements */
- apr_size_t entry_count;
-
- /* number of directory elements added. I.e. usually >> #cached dirs */
- apr_size_t insert_count;
-} dir_cache_t;
-
-/* A cached, undeltified txdelta window.
- */
-typedef struct window_cache_entry_t
-{
- /* revision containing the window */
- svn_revnum_t revision;
-
- /* offset of the deltified window within that revision */
- apr_size_t offset;
-
- /* window content */
- svn_stringbuf_t *window;
-} window_cache_entry_t;
-
-/* Cache for undeltified txdelta windows. (revision, offset) will be mapped
- * directly into the ENTRIES array of INSERT_COUNT buckets (most entries
- * will be NULL).
- *
- * The cache will be cleared when USED exceeds CAPACITY.
- */
-typedef struct window_cache_t
-{
- /* fixed-size array of ENTRY_COUNT elements */
- window_cache_entry_t *entries;
-
- /* used to allocate windows */
- apr_pool_t *pool;
-
- /* size of ENTRIES in elements */
- apr_size_t entry_count;
-
- /* maximum combined size of all cached windows */
- apr_size_t capacity;
-
- /* current combined size of all cached windows */
- apr_size_t used;
-} window_cache_t;
-
-/* Root data structure containing all information about a given repository.
- */
-typedef struct fs_fs_t
-{
- /* repository to reorg */
- const char *path;
-
- /* revision to start at (must be 0, ATM) */
- svn_revnum_t start_revision;
-
- /* FSFS format number */
- int format;
-
- /* highest revision number in the repo */
- svn_revnum_t max_revision;
-
- /* first non-packed revision */
- svn_revnum_t min_unpacked_rev;
-
- /* sharing size*/
- int max_files_per_dir;
-
- /* all revisions */
- apr_array_header_t *revisions;
-
- /* all packed files */
- apr_array_header_t *packs;
-
- /* empty representation.
- * Used as a dummy base for DELTA reps without base. */
- representation_t *null_base;
-
- /* revision content cache */
- content_cache_t *cache;
-
- /* directory hash cache */
- dir_cache_t *dir_cache;
-
- /* undeltified txdelta window cache */
- window_cache_t *window_cache;
-} fs_fs_t;
-
-/* Return the rev pack folder for revision REV in FS.
- */
-static const char *
-get_pack_folder(fs_fs_t *fs,
- svn_revnum_t rev,
- apr_pool_t *pool)
-{
- return apr_psprintf(pool, "%s/db/revs/%ld.pack",
- fs->path, rev / fs->max_files_per_dir);
-}
-
-/* Return the path of the file containing revision REV in FS.
- */
-static const char *
-rev_or_pack_file_name(fs_fs_t *fs,
- svn_revnum_t rev,
- apr_pool_t *pool)
-{
- return fs->min_unpacked_rev > rev
- ? svn_dirent_join(get_pack_folder(fs, rev, pool), "pack", pool)
- : apr_psprintf(pool, "%s/db/revs/%ld/%ld", fs->path,
- rev / fs->max_files_per_dir, rev);
-}
-
-/* Open the file containing revision REV in FS and return it in *FILE.
- */
-static svn_error_t *
-open_rev_or_pack_file(apr_file_t **file,
- fs_fs_t *fs,
- svn_revnum_t rev,
- apr_pool_t *pool)
-{
- return svn_io_file_open(file,
- rev_or_pack_file_name(fs, rev, pool),
- APR_READ | APR_BUFFERED,
- APR_OS_DEFAULT,
- pool);
-}
-
-/* Read the whole content of the file containing REV in FS and return that
- * in *CONTENT.
- */
-static svn_error_t *
-read_rev_or_pack_file(svn_stringbuf_t **content,
- fs_fs_t *fs,
- svn_revnum_t rev,
- apr_pool_t *pool)
-{
- return svn_stringbuf_from_file2(content,
- rev_or_pack_file_name(fs, rev, pool),
- pool);
-}
-
-/* Return a new content cache with the given size LIMIT. Use POOL for
- * all cache-related allocations.
- */
-static content_cache_t *
-create_content_cache(apr_pool_t *pool,
- apr_size_t limit)
-{
- content_cache_t *result = apr_pcalloc(pool, sizeof(*result));
-
- result->hash_pool = svn_pool_create(pool);
- result->hash = svn_hash__make(result->hash_pool);
- result->limit = limit;
- result->total_size = 0;
- result->insert_count = 0;
- result->data = apr_palloc(pool, limit);
-
- return result;
-}
-
-/* Return the content of revision REVISION from CACHE. Return NULL upon a
- * cache miss. This is a cache-internal function.
- */
-static svn_string_t *
-get_cached_content(content_cache_t *cache,
- svn_revnum_t revision)
-{
- return apr_hash_get(cache->hash, &revision, sizeof(revision));
-}
-
-/* Take the content in DATA and store it under REVISION in CACHE.
- * This is a cache-internal function.
- */
-static void
-set_cached_content(content_cache_t *cache,
- svn_revnum_t revision,
- svn_string_t *data)
-{
- svn_string_t *content;
- svn_revnum_t *key;
-
- /* double insertion? -> broken cache logic */
- assert(get_cached_content(cache, revision) == NULL);
-
- /* purge the cache upon overflow */
- if (cache->total_size + data->len > cache->limit)
- {
- /* the hash pool grows slowly over time; clear it once in a while */
- if (cache->insert_count > 10000)
- {
- svn_pool_clear(cache->hash_pool);
- cache->hash = svn_hash__make(cache->hash_pool);
- cache->insert_count = 0;
- }
- else
- cache->hash = svn_hash__make(cache->hash_pool);
-
- cache->total_size = 0;
-
- /* buffer overflow / revision too large */
- if (data->len > cache->limit)
- SVN_ERR_MALFUNCTION_NO_RETURN();
- }
-
- /* copy data to cache and update he index (hash) */
- content = apr_palloc(cache->hash_pool, sizeof(*content));
- content->data = cache->data + cache->total_size;
- content->len = data->len;
-
- memcpy(cache->data + cache->total_size, data->data, data->len);
- cache->total_size += data->len;
-
- key = apr_palloc(cache->hash_pool, sizeof(*key));
- *key = revision;
-
- apr_hash_set(cache->hash, key, sizeof(*key), content);
- ++cache->insert_count;
-}
-
-/* Get the file content of revision REVISION in FS and return it in *DATA.
- * Use SCRATCH_POOL for temporary allocations.
- */
-static svn_error_t *
-get_content(svn_string_t **data,
- fs_fs_t *fs,
- svn_revnum_t revision,
- apr_pool_t *scratch_pool)
-{
- apr_file_t *file;
- revision_info_t *revision_info;
- svn_stringbuf_t *temp;
- apr_off_t temp_offset;
-
- /* try getting the data from our cache */
- svn_string_t *result = get_cached_content(fs->cache, revision);
- if (result)
- {
- *data = result;
- return SVN_NO_ERROR;
- }
-
- /* not in cache. Is the revision valid at all? */
- if (revision - fs->start_revision > fs->revisions->nelts)
- return svn_error_createf(SVN_ERR_BAD_VERSION_FILE_FORMAT, NULL,
- _("Unknown revision %ld"), revision);
- revision_info = APR_ARRAY_IDX(fs->revisions,
- revision - fs->start_revision,
- revision_info_t*);
-
- /* read the revision content. Assume that the file has *not* been
- * reorg'ed, yet, i.e. all data is in one place. */
- temp = svn_stringbuf_create_ensure( revision_info->original.end
- - revision_info->original.offset,
- scratch_pool);
- temp->len = revision_info->original.end - revision_info->original.offset;
- SVN_ERR(open_rev_or_pack_file(&file, fs, revision, scratch_pool));
-
- temp_offset = revision_info->original.offset;
- SVN_ERR(svn_io_file_seek(file, APR_SET, &temp_offset,
- scratch_pool));
- SVN_ERR_ASSERT(temp_offset < APR_SIZE_MAX);
- revision_info->original.offset = (apr_size_t)temp_offset;
- SVN_ERR(svn_io_file_read(file, temp->data, &temp->len, scratch_pool));
-
- /* cache the result and return it */
- set_cached_content(fs->cache, revision,
- svn_stringbuf__morph_into_string(temp));
- *data = get_cached_content(fs->cache, revision);
-
- return SVN_NO_ERROR;
-}
-
-/* Return a new directory cache with ENTRY_COUNT buckets in its index.
- * Use POOL for all cache-related allocations.
- */
-static dir_cache_t *
-create_dir_cache(apr_pool_t *pool,
- apr_size_t entry_count)
-{
- dir_cache_t *result = apr_pcalloc(pool, sizeof(*result));
-
- result->pool1 = svn_pool_create(pool);
- result->pool2 = svn_pool_create(pool);
- result->entry_count = entry_count;
- result->insert_count = 0;
- result->entries = apr_pcalloc(pool, sizeof(*result->entries) * entry_count);
-
- return result;
-}
-
-/* Return the position within FS' dir cache ENTRIES index for the given
- * (REVISION, OFFSET) pair. This is a cache-internal function.
- */
-static apr_size_t
-get_dir_cache_index(fs_fs_t *fs,
- svn_revnum_t revision,
- apr_size_t offset)
-{
- return (revision + offset * 0xd1f3da69) % fs->dir_cache->entry_count;
-}
-
-/* Return the currently active pool of FS' dir cache. Note that it may be
- * cleared after *2* insertions.
- */
-static apr_pool_t *
-get_cached_dir_pool(fs_fs_t *fs)
-{
- return fs->dir_cache->pool1;
-}
-
-/* Return the cached directory content stored in REPRESENTATION within FS.
- * If that has not been found in cache, return NULL.
- */
-static apr_hash_t *
-get_cached_dir(fs_fs_t *fs,
- representation_t *representation)
-{
- svn_revnum_t revision = representation->revision->revision;
- apr_size_t offset = representation->original.offset;
-
- apr_size_t i = get_dir_cache_index(fs, revision, offset);
- dir_cache_entry_t *entry = &fs->dir_cache->entries[i];
-
- return entry->offset == offset && entry->revision == revision
- ? entry->hash
- : NULL;
-}
-
-/* Cache the directory HASH for REPRESENTATION within FS.
- */
-static void
-set_cached_dir(fs_fs_t *fs,
- representation_t *representation,
- apr_hash_t *hash)
-{
- /* select the entry to use */
- svn_revnum_t revision = representation->revision->revision;
- apr_size_t offset = representation->original.offset;
-
- apr_size_t i = get_dir_cache_index(fs, revision, offset);
- dir_cache_entry_t *entry = &fs->dir_cache->entries[i];
-
- /* clean the cache and rotate pools at regular intervals */
- fs->dir_cache->insert_count += apr_hash_count(hash);
- if (fs->dir_cache->insert_count >= fs->dir_cache->entry_count * 100)
- {
- apr_pool_t *pool;
-
- svn_pool_clear(fs->dir_cache->pool2);
- memset(fs->dir_cache->entries,
- 0,
- sizeof(*fs->dir_cache->entries) * fs->dir_cache->entry_count);
- fs->dir_cache->insert_count = 0;
-
- pool = fs->dir_cache->pool2;
- fs->dir_cache->pool2 = fs->dir_cache->pool1;
- fs->dir_cache->pool1 = pool;
- }
-
- /* write data to cache */
- entry->hash = hash;
- entry->offset = offset;
- entry->revision = revision;
-}
-
-/* Return a new txdelta window cache with ENTRY_COUNT buckets in its index
- * and a the total CAPACITY given in bytes.
- * Use POOL for all cache-related allocations.
- */
-static window_cache_t *
-create_window_cache(apr_pool_t *pool,
- apr_size_t entry_count,
- apr_size_t capacity)
-{
- window_cache_t *result = apr_pcalloc(pool, sizeof(*result));
-
- result->pool = svn_pool_create(pool);
- result->entry_count = entry_count;
- result->capacity = capacity;
- result->used = 0;
- result->entries = apr_pcalloc(pool, sizeof(*result->entries) * entry_count);
-
- return result;
-}
-
-/* Return the position within FS' window cache ENTRIES index for the given
- * (REVISION, OFFSET) pair. This is a cache-internal function.
- */
-static apr_size_t
-get_window_cache_index(fs_fs_t *fs,
- svn_revnum_t revision,
- apr_size_t offset)
-{
- return (revision + offset * 0xd1f3da69) % fs->window_cache->entry_count;
-}
-
-/* Return the cached txdelta window stored in REPRESENTATION within FS.
- * If that has not been found in cache, return NULL.
- */
-static svn_stringbuf_t *
-get_cached_window(fs_fs_t *fs,
- representation_t *representation,
- apr_pool_t *pool)
-{
- svn_revnum_t revision = representation->revision->revision;
- apr_size_t offset = representation->original.offset;
-
- apr_size_t i = get_window_cache_index(fs, revision, offset);
- window_cache_entry_t *entry = &fs->window_cache->entries[i];
-
- return entry->offset == offset && entry->revision == revision
- ? svn_stringbuf_dup(entry->window, pool)
- : NULL;
-}
-
-/* Cache the undeltified txdelta WINDOW for REPRESENTATION within FS.
- */
-static void
-set_cached_window(fs_fs_t *fs,
- representation_t *representation,
- svn_stringbuf_t *window)
-{
- /* select entry */
- svn_revnum_t revision = representation->revision->revision;
- apr_size_t offset = representation->original.offset;
-
- apr_size_t i = get_window_cache_index(fs, revision, offset);
- window_cache_entry_t *entry = &fs->window_cache->entries[i];
-
- /* if the capacity is exceeded, clear the cache */
- fs->window_cache->used += window->len;
- if (fs->window_cache->used >= fs->window_cache->capacity)
- {
- svn_pool_clear(fs->window_cache->pool);
- memset(fs->window_cache->entries,
- 0,
- sizeof(*fs->window_cache->entries) * fs->window_cache->entry_count);
- fs->window_cache->used = window->len;
- }
-
- /* set the entry to a copy of the window data */
- entry->window = svn_stringbuf_dup(window, fs->window_cache->pool);
- entry->offset = offset;
- entry->revision = revision;
-}
-
-/* Given rev pack PATH in FS, read the manifest file and return the offsets
- * in *MANIFEST. Use POOL for allocations.
- */
-static svn_error_t *
-read_manifest(apr_array_header_t **manifest,
- fs_fs_t *fs,
- const char *path,
- apr_pool_t *pool)
-{
- svn_stream_t *manifest_stream;
- apr_pool_t *iterpool;
-
- /* Open the manifest file. */
- SVN_ERR(svn_stream_open_readonly(&manifest_stream,
- svn_dirent_join(path, "manifest", pool),
- pool, pool));
-
- /* While we're here, let's just read the entire manifest file into an array,
- so we can cache the entire thing. */
- iterpool = svn_pool_create(pool);
- *manifest = apr_array_make(pool, fs->max_files_per_dir, sizeof(apr_size_t));
- while (1)
- {
- svn_stringbuf_t *sb;
- svn_boolean_t eof;
- apr_uint64_t val;
- svn_error_t *err;
-
- svn_pool_clear(iterpool);
- SVN_ERR(svn_stream_readline(manifest_stream, &sb, "\n", &eof, iterpool));
- if (eof)
- break;
-
- err = svn_cstring_strtoui64(&val, sb->data, 0, APR_SIZE_MAX, 10);
- if (err)
- return svn_error_createf(SVN_ERR_FS_CORRUPT, err,
- _("Manifest offset '%s' too large"),
- sb->data);
- APR_ARRAY_PUSH(*manifest, apr_size_t) = (apr_size_t)val;
- }
- svn_pool_destroy(iterpool);
-
- return svn_stream_close(manifest_stream);
-}
-
-/* Read header information for the revision stored in FILE_CONTENT at
- * offsets START or END. Return the offsets within FILE_CONTENT for the
- * *ROOT_NODEREV, the list of *CHANGES and its len in *CHANGES_LEN.
- * Use POOL for temporary allocations. */
-static svn_error_t *
-read_revision_header(apr_size_t *changes,
- apr_size_t *changes_len,
- apr_size_t *root_noderev,
- svn_stringbuf_t *file_content,
- apr_size_t start,
- apr_size_t end,
- apr_pool_t *pool)
-{
- char buf[64];
- const char *line;
- char *space;
- apr_uint64_t val;
- apr_size_t len;
-
- /* Read in this last block, from which we will identify the last line. */
- len = sizeof(buf);
- if (start + len > end)
- len = end - start;
-
- memcpy(buf, file_content->data + end - len, len);
-
- /* The last byte should be a newline. */
- if (buf[(apr_ssize_t)len - 1] != '\n')
- return svn_error_create(SVN_ERR_FS_CORRUPT, NULL,
- _("Revision lacks trailing newline"));
-
- /* Look for the next previous newline. */
- buf[len - 1] = 0;
- line = strrchr(buf, '\n');
- if (line == NULL)
- return svn_error_create(SVN_ERR_FS_CORRUPT, NULL,
- _("Final line in revision file longer "
- "than 64 characters"));
-
- space = strchr(line, ' ');
- if (space == NULL)
- return svn_error_create(SVN_ERR_FS_CORRUPT, NULL,
- _("Final line in revision file missing space"));
-
- /* terminate the header line */
- *space = 0;
-
- /* extract information */
- SVN_ERR(svn_cstring_strtoui64(&val, line+1, 0, APR_SIZE_MAX, 10));
- *root_noderev = (apr_size_t)val;
- SVN_ERR(svn_cstring_strtoui64(&val, space+1, 0, APR_SIZE_MAX, 10));
- *changes = (apr_size_t)val;
- *changes_len = end - *changes - start - (buf + len - line) + 1;
-
- return SVN_NO_ERROR;
-}
-
-/* Read the FSFS format number and sharding size from the format file at
- * PATH and return it in *PFORMAT and *MAX_FILES_PER_DIR respectively.
- * Use POOL for temporary allocations.
- */
-static svn_error_t *
-read_format(int *pformat, int *max_files_per_dir,
- const char *path, apr_pool_t *pool)
-{
- svn_error_t *err;
- apr_file_t *file;
- char buf[80];
- apr_size_t len;
-
- /* open format file and read the first line */
- err = svn_io_file_open(&file, path, APR_READ | APR_BUFFERED,
- APR_OS_DEFAULT, pool);
- if (err && APR_STATUS_IS_ENOENT(err->apr_err))
- {
- /* Treat an absent format file as format 1. Do not try to
- create the format file on the fly, because the repository
- might be read-only for us, or this might be a read-only
- operation, and the spirit of FSFS is to make no changes
- whatseover in read-only operations. See thread starting at
- http://subversion.tigris.org/servlets/ReadMsg?list=dev&msgNo=97600
- for more. */
- svn_error_clear(err);
- *pformat = 1;
- *max_files_per_dir = 0;
-
- return SVN_NO_ERROR;
- }
- SVN_ERR(err);
-
- len = sizeof(buf);
- err = svn_io_read_length_line(file, buf, &len, pool);
- if (err && APR_STATUS_IS_EOF(err->apr_err))
- {
- /* Return a more useful error message. */
- svn_error_clear(err);
- return svn_error_createf(SVN_ERR_BAD_VERSION_FILE_FORMAT, NULL,
- _("Can't read first line of format file '%s'"),
- svn_dirent_local_style(path, pool));
- }
- SVN_ERR(err);
-
- /* Check that the first line contains only digits. */
- SVN_ERR(svn_cstring_atoi(pformat, buf));
-
- /* Set the default values for anything that can be set via an option. */
- *max_files_per_dir = 0;
-
- /* Read any options. */
- while (1)
- {
- len = sizeof(buf);
- err = svn_io_read_length_line(file, buf, &len, pool);
- if (err && APR_STATUS_IS_EOF(err->apr_err))
- {
- /* No more options; that's okay. */
- svn_error_clear(err);
- break;
- }
- SVN_ERR(err);
-
- if (strncmp(buf, "layout ", 7) == 0)
- {
- if (strcmp(buf+7, "linear") == 0)
- {
- *max_files_per_dir = 0;
- continue;
- }
-
- if (strncmp(buf+7, "sharded ", 8) == 0)
- {
- /* Check that the argument is numeric. */
- SVN_ERR(svn_cstring_atoi(max_files_per_dir, buf + 15));
- continue;
- }
- }
-
- return svn_error_createf(SVN_ERR_BAD_VERSION_FILE_FORMAT, NULL,
- _("'%s' contains invalid filesystem format option '%s'"),
- svn_dirent_local_style(path, pool), buf);
- }
-
- return svn_io_file_close(file, pool);
-}
-
-/* Read the content of the file at PATH and return it in *RESULT.
- * Use POOL for temporary allocations.
- */
-static svn_error_t *
-read_number(svn_revnum_t *result, const char *path, apr_pool_t *pool)
-{
- svn_stringbuf_t *content;
- apr_uint64_t number;
-
- SVN_ERR(svn_stringbuf_from_file2(&content, path, pool));
-
- content->data[content->len-1] = 0;
- SVN_ERR(svn_cstring_strtoui64(&number, content->data, 0, LONG_MAX, 10));
- *result = (svn_revnum_t)number;
-
- return SVN_NO_ERROR;
-}
-
-/* Create *FS for the repository at PATH and read the format and size info.
- * Use POOL for temporary allocations.
- */
-static svn_error_t *
-fs_open(fs_fs_t **fs, const char *path, apr_pool_t *pool)
-{
- *fs = apr_pcalloc(pool, sizeof(**fs));
- (*fs)->path = apr_pstrdup(pool, path);
- (*fs)->max_files_per_dir = 1000;
-
- /* Read the FS format number. */
- SVN_ERR(read_format(&(*fs)->format,
- &(*fs)->max_files_per_dir,
- svn_dirent_join(path, "db/format", pool),
- pool));
- if (((*fs)->format != 4) && ((*fs)->format != 6))
- return svn_error_create(SVN_ERR_FS_UNSUPPORTED_FORMAT, NULL, NULL);
-
- /* read size (HEAD) info */
- SVN_ERR(read_number(&(*fs)->min_unpacked_rev,
- svn_dirent_join(path, "db/min-unpacked-rev", pool),
- pool));
- return read_number(&(*fs)->max_revision,
- svn_dirent_join(path, "db/current", pool),
- pool);
-}
-
-/* Utility function that returns true if STRING->DATA matches KEY.
- */
-static svn_boolean_t
-key_matches(svn_string_t *string, const char *key)
-{
- return strcmp(string->data, key) == 0;
-}
-
-/* Comparator used for binary search comparing the absolute file offset
- * of a noderev to some other offset. DATA is a *noderev_t, KEY is pointer
- * to an apr_size_t.
- */
-static int
-compare_noderev_offsets(const void *data, const void *key)
-{
- apr_ssize_t diff = (*(const noderev_t *const *)data)->original.offset
- - *(const apr_size_t *)key;
-
- /* sizeof(int) may be < sizeof(ssize_t) */
- if (diff < 0)
- return -1;
- return diff > 0 ? 1 : 0;
-}
-
-/* Get the revision and offset info from the node ID with FS. Return the
- * data as *REVISION_INFO and *OFFSET, respectively.
- *
- * Note that we assume that the revision_info_t object ID's revision has
- * already been created. That can be guaranteed for standard FSFS pack
- * files as IDs never point to future revisions.
- */
-static svn_error_t *
-parse_revnode_pos(revision_info_t **revision_info,
- apr_size_t *offset,
- fs_fs_t *fs,
- svn_string_t *id)
-{
- int revision;
- apr_uint64_t temp;
-
- /* split the ID and verify the format */
- const char *revision_pos = strrchr(id->data, 'r');
- char *offset_pos = (char *)strchr(id->data, '/');
-
- if (revision_pos == NULL || offset_pos == NULL)
- return svn_error_createf(SVN_ERR_BAD_VERSION_FILE_FORMAT, NULL,
- _("Invalid node id '%s'"), id->data);
-
- /* extract the numbers (temp. modifying the ID)*/
- *offset_pos = 0;
- SVN_ERR(svn_cstring_atoi(&revision, revision_pos + 1));
- SVN_ERR(svn_cstring_strtoui64(&temp, offset_pos + 1, 0, APR_SIZE_MAX, 10));
- *offset = (apr_size_t)temp;
- *offset_pos = '/';
-
- /* validate the revision number and return the revision info */
- if (revision - fs->start_revision > fs->revisions->nelts)
- return svn_error_createf(SVN_ERR_BAD_VERSION_FILE_FORMAT, NULL,
- _("Unknown revision %d"), revision);
-
- *revision_info = APR_ARRAY_IDX(fs->revisions,
- revision - fs->start_revision,
- revision_info_t*);
-
- return SVN_NO_ERROR;
-}
-
-/* Returns in *RESULT the noderev at OFFSET relative the revision given in
- * REVISION_INFO. If no such noderev has been parsed, yet, error out.
- *
- * Since we require the noderev to already have been parsed, we can use
- * this functions only to access "older", i.e. predecessor noderevs.
- */
-static svn_error_t *
-find_noderev(noderev_t **result,
- revision_info_t *revision_info,
- apr_size_t offset)
-{
- int idx = svn_sort__bsearch_lower_bound(&offset,
- revision_info->node_revs,
- compare_noderev_offsets);
- if ((idx < 0) || (idx >= revision_info->node_revs->nelts))
- return svn_error_createf(SVN_ERR_BAD_VERSION_FILE_FORMAT, NULL,
- _("No noderev found at offset %ld"),
- (long)offset);
-
- *result = APR_ARRAY_IDX(revision_info->node_revs, idx, noderev_t *);
- if ((*result)->original.offset != offset)
- return svn_error_createf(SVN_ERR_BAD_VERSION_FILE_FORMAT, NULL,
- _("No noderev found at offset %ld"),
- (long)offset);
-
- return SVN_NO_ERROR;
-}
-
-/* In *RESULT, return the noderev given by ID in FS. The noderev must
- * already have been parsed and put into the FS data structures.
- */
-static svn_error_t *
-parse_pred(noderev_t **result,
- fs_fs_t *fs,
- svn_string_t *id)
-{
- apr_size_t offset;
- revision_info_t *revision_info;
-
- SVN_ERR(parse_revnode_pos(&revision_info, &offset, fs, id));
- SVN_ERR(find_noderev(result, revision_info, offset));
-
- return SVN_NO_ERROR;
-}
-
-/* Comparator used for binary search comparing the absolute file offset
- * of a representation to some other offset. DATA is a *representation_t,
- * KEY is a pointer to an apr_size_t.
- */
-static int
-compare_representation_offsets(const void *data, const void *key)
-{
- apr_ssize_t diff = (*(const representation_t *const *)data)->original.offset
- - *(const apr_size_t *)key;
-
- /* sizeof(int) may be < sizeof(ssize_t) */
- if (diff < 0)
- return -1;
- return diff > 0 ? 1 : 0;
-}
-
-/* Find the revision_info_t object to the given REVISION in FS and return
- * it in *REVISION_INFO. For performance reasons, we skip the lookup if
- * the info is already provided.
- *
- * In that revision, look for the representation_t object for offset OFFSET.
- * If it already exists, set *idx to its index in *REVISION_INFO's
- * representations list and return the representation object. Otherwise,
- * set the index to where it must be inserted and return NULL.
- */
-static representation_t *
-find_representation(int *idx,
- fs_fs_t *fs,
- revision_info_t **revision_info,
- int revision,
- apr_size_t offset)
-{
- revision_info_t *info;
- *idx = -1;
-
- /* first let's find the revision '*/
- info = revision_info ? *revision_info : NULL;
- if (info == NULL || info->revision != revision)
- {
- info = APR_ARRAY_IDX(fs->revisions,
- revision - fs->start_revision,
- revision_info_t*);
- if (revision_info)
- *revision_info = info;
- }
-
- /* not found -> no result */
- if (info == NULL)
- return NULL;
-
- assert(revision == info->revision);
-
- /* look for the representation */
- *idx = svn_sort__bsearch_lower_bound(&offset,
- info->representations,
- compare_representation_offsets);
- if (*idx < info->representations->nelts)
- {
- /* return the representation, if this is the one we were looking for */
- representation_t *result
- = APR_ARRAY_IDX(info->representations, *idx, representation_t *);
- if (result->original.offset == offset)
- return result;
- }
-
- /* not parsed, yet */
- return NULL;
-}
-
-/* Read the representation header in FILE_CONTENT at OFFSET. Return its
- * size in *HEADER_SIZE, set *IS_PLAIN if no deltification was used and
- * return the deltification base representation in *REPRESENTATION. If
- * there is none, set it to NULL. Use FS to it look up.
- *
- * Use SCRATCH_POOL for temporary allocations.
- */
-static svn_error_t *
-read_rep_base(representation_t **representation,
- apr_size_t *header_size,
- svn_boolean_t *is_plain,
- fs_fs_t *fs,
- svn_stringbuf_t *file_content,
- apr_size_t offset,
- apr_pool_t *scratch_pool)
-{
- char *str, *last_str;
- int idx, revision;
- apr_uint64_t temp;
-
- /* identify representation header (1 line) */
- const char *buffer = file_content->data + offset;
- const char *line_end = strchr(buffer, '\n');
- *header_size = line_end - buffer + 1;
-
- /* check for PLAIN rep */
- if (strncmp(buffer, "PLAIN\n", *header_size) == 0)
- {
- *is_plain = TRUE;
- *representation = NULL;
- return SVN_NO_ERROR;
- }
-
- /* check for DELTA against empty rep */
- *is_plain = FALSE;
- if (strncmp(buffer, "DELTA\n", *header_size) == 0)
- {
- /* This is a delta against the empty stream. */
- *representation = fs->null_base;
- return SVN_NO_ERROR;
- }
-
- /* it's delta against some other rep. Duplicate the header info such
- * that we may modify it during parsing. */
- str = apr_pstrndup(scratch_pool, buffer, line_end - buffer);
- last_str = str;
-
- /* parse it. */
- str = svn_cstring_tokenize(" ", &last_str);
- str = svn_cstring_tokenize(" ", &last_str);
- SVN_ERR(svn_cstring_atoi(&revision, str));
-
- str = svn_cstring_tokenize(" ", &last_str);
- SVN_ERR(svn_cstring_strtoui64(&temp, str, 0, APR_SIZE_MAX, 10));
-
- /* it should refer to a rep in an earlier revision. Look it up */
- *representation = find_representation(&idx, fs, NULL, revision, (apr_size_t)temp);
- return SVN_NO_ERROR;
-}
-
-/* Parse the representation reference (text: or props:) in VALUE, look
- * it up in FS and return it in *REPRESENTATION. To be able to parse the
- * base rep, we pass the FILE_CONTENT as well.
- *
- * If necessary, allocate the result in POOL; use SCRATCH_POOL for temp.
- * allocations.
- */
-static svn_error_t *
-parse_representation(representation_t **representation,
- fs_fs_t *fs,
- svn_stringbuf_t *file_content,
- svn_string_t *value,
- revision_info_t *revision_info,
- apr_pool_t *pool,
- apr_pool_t *scratch_pool)
-{
- representation_t *result;
- int revision;
-
- apr_uint64_t offset;
- apr_uint64_t size;
- int idx;
-
- /* read location (revision, offset) and size */
- char *c = (char *)value->data;
- SVN_ERR(svn_cstring_atoi(&revision, svn_cstring_tokenize(" ", &c)));
- SVN_ERR(svn_cstring_strtoui64(&offset, svn_cstring_tokenize(" ", &c), 0, APR_SIZE_MAX, 10));
- SVN_ERR(svn_cstring_strtoui64(&size, svn_cstring_tokenize(" ", &c), 0, APR_SIZE_MAX, 10));
-
- /* look it up */
- result = find_representation(&idx, fs, &revision_info, revision, (apr_size_t)offset);
- if (!result)
- {
- /* not parsed, yet (probably a rep in the same revision).
- * Create a new rep object and determine its base rep as well.
- */
- result = apr_pcalloc(pool, sizeof(*result));
- result->revision = revision_info;
- result->original.offset = (apr_size_t)offset;
- result->original.size = (apr_size_t)size;
- SVN_ERR(read_rep_base(&result->delta_base, &result->header_size,
- &result->is_plain, fs, file_content,
- (apr_size_t)offset + revision_info->original.offset,
- scratch_pool));
-
- svn_sort__array_insert(&result, revision_info->representations, idx);
- }
-
- *representation = result;
-
- return SVN_NO_ERROR;
-}
-
-/* Read the delta window contents of all windows in REPRESENTATION in FS.
- * Return the data as svn_txdelta_window_t* instances in *WINDOWS.
- * Use POOL for allocations.
- */
-static svn_error_t *
-read_windows(apr_array_header_t **windows,
- fs_fs_t *fs,
- representation_t *representation,
- apr_pool_t *pool)
-{
- svn_string_t *content;
- svn_string_t data;
- svn_stream_t *stream;
- apr_size_t offset = representation->original.offset
- + representation->header_size;
- char version;
- apr_size_t len = sizeof(version);
-
- *windows = apr_array_make(pool, 0, sizeof(svn_txdelta_window_t *));
-
- /* get the whole revision content */
- SVN_ERR(get_content(&content, fs, representation->revision->revision, pool));
-
- /* create a read stream and position it directly after the rep header */
- data.data = content->data + offset + 3;
- data.len = representation->original.size - 3;
- stream = svn_stream_from_string(&data, pool);
- SVN_ERR(svn_stream_read(stream, &version, &len));
-
- /* read the windows from that stream */
- while (TRUE)
- {
- svn_txdelta_window_t *window;
- svn_stream_mark_t *mark;
- char dummy;
-
- len = sizeof(dummy);
- SVN_ERR(svn_stream_mark(stream, &mark, pool));
- SVN_ERR(svn_stream_read(stream, &dummy, &len));
- if (len == 0)
- break;
-
- SVN_ERR(svn_stream_seek(stream, mark));
- SVN_ERR(svn_txdelta_read_svndiff_window(&window, stream, version, pool));
- APR_ARRAY_PUSH(*windows, svn_txdelta_window_t *) = window;
- }
-
- return SVN_NO_ERROR;
-}
-
-/* Read the content of the PLAIN REPRESENTATION in FS and return it in
- * *CONTENT. Use POOL for allocations.
- */
-static svn_error_t *
-read_plain(svn_stringbuf_t **content,
- fs_fs_t *fs,
- representation_t *representation,
- apr_pool_t *pool)
-{
- svn_string_t *data;
- apr_size_t offset = representation->original.offset
- + representation->header_size;
-
- SVN_ERR(get_content(&data, fs, representation->revision->revision, pool));
-
- /* content is stored as fulltext already */
- *content = svn_stringbuf_ncreate(data->data + offset,
- representation->original.size,
- pool);
-
- return SVN_NO_ERROR;
-}
-
-/* Get the undeltified representation that is a result of combining all
- * deltas from the current desired REPRESENTATION in FS with its base
- * representation. Store the result in *CONTENT.
- * Use POOL for allocations. */
-static svn_error_t *
-get_combined_window(svn_stringbuf_t **content,
- fs_fs_t *fs,
- representation_t *representation,
- apr_pool_t *pool)
-{
- int i;
- apr_array_header_t *windows;
- svn_stringbuf_t *base_content, *result;
- const char *source;
- apr_pool_t *sub_pool;
- apr_pool_t *iter_pool;
-
- /* special case: no un-deltification necessary */
- if (representation->is_plain)
- return read_plain(content, fs, representation, pool);
-
- /* special case: data already in cache */
- *content = get_cached_window(fs, representation, pool);
- if (*content)
- return SVN_NO_ERROR;
-
- /* read the delta windows for this representation */
- sub_pool = svn_pool_create(pool);
- iter_pool = svn_pool_create(pool);
- SVN_ERR(read_windows(&windows, fs, representation, sub_pool));
-
- /* fetch the / create a base content */
- if (representation->delta_base && representation->delta_base->revision)
- SVN_ERR(get_combined_window(&base_content, fs,
- representation->delta_base, sub_pool));
- else
- base_content = svn_stringbuf_create_empty(sub_pool);
-
- /* apply deltas */
- result = svn_stringbuf_create_empty(pool);
- source = base_content->data;
-
- for (i = 0; i < windows->nelts; ++i)
- {
- svn_txdelta_window_t *window
- = APR_ARRAY_IDX(windows, i, svn_txdelta_window_t *);
- svn_stringbuf_t *buf
- = svn_stringbuf_create_ensure(window->tview_len, iter_pool);
-
- buf->len = window->tview_len;
- svn_txdelta_apply_instructions(window, window->src_ops ? source : NULL,
- buf->data, &buf->len);
-
- svn_stringbuf_appendbytes(result, buf->data, buf->len);
- source += window->sview_len;
-
- svn_pool_clear(iter_pool);
- }
-
- svn_pool_destroy(iter_pool);
- svn_pool_destroy(sub_pool);
-
- /* cache result and return it */
- set_cached_window(fs, representation, result);
- *content = result;
-
- return SVN_NO_ERROR;
-}
-
-/* forward declaration */
-static svn_error_t *
-read_noderev(noderev_t **noderev,
- fs_fs_t *fs,
- svn_stringbuf_t *file_content,
- apr_size_t offset,
- revision_info_t *revision_info,
- apr_pool_t *pool,
- apr_pool_t *scratch_pool);
-
-/* Get the noderev at OFFSET in FILE_CONTENT in FS. The file content must
- * pertain to the revision given in REVISION_INFO. If the data has not
- * been read yet, parse it and store it in REVISION_INFO. Return the result
- * in *NODEREV.
- *
- * Use POOL for allocations and SCRATCH_POOL for temporaries.
- */
-static svn_error_t *
-get_noderev(noderev_t **noderev,
- fs_fs_t *fs,
- svn_stringbuf_t *file_content,
- apr_size_t offset,
- revision_info_t *revision_info,
- apr_pool_t *pool,
- apr_pool_t *scratch_pool)
-{
- int idx = svn_sort__bsearch_lower_bound(&offset,
- revision_info->node_revs,
- compare_noderev_offsets);
- if ((idx < 0) || (idx >= revision_info->node_revs->nelts))
- SVN_ERR(read_noderev(noderev, fs, file_content, offset, revision_info,
- pool, scratch_pool));
- else
- {
- *noderev = APR_ARRAY_IDX(revision_info->node_revs, idx, noderev_t *);
- if ((*noderev)->original.offset != offset)
- SVN_ERR(read_noderev(noderev, fs, file_content, offset, revision_info,
- pool, scratch_pool));
- }
-
- return SVN_NO_ERROR;
-}
-
-/* Read the directory stored in REPRESENTATION in FS into *HASH. The result
- * will be allocated in FS' directory cache and it will be plain key-value
- * hash. Use SCRATCH_POOL for temporary allocations.
- */
-static svn_error_t *
-read_dir(apr_hash_t **hash,
- fs_fs_t *fs,
- representation_t *representation,
- apr_pool_t *scratch_pool)
-{
- svn_stringbuf_t *text;
- apr_pool_t *text_pool;
- svn_stream_t *stream;
- apr_pool_t *pool;
-
- /* chances are, we find the info in cache already */
- *hash = get_cached_dir(fs, representation);
- if (*hash)
- return SVN_NO_ERROR;
-
- /* create the result container */
- pool = get_cached_dir_pool(fs);
- *hash = svn_hash__make(pool);
-
- /* if this is a non-empty rep, read it and de-serialize the hash */
- if (representation != NULL)
- {
- text_pool = svn_pool_create(scratch_pool);
- SVN_ERR(get_combined_window(&text, fs, representation, text_pool));
- stream = svn_stream_from_stringbuf(text, text_pool);
- SVN_ERR(svn_hash_read2(*hash, stream, SVN_HASH_TERMINATOR, pool));
- svn_pool_destroy(text_pool);
- }
-
- /* cache the result */
- set_cached_dir(fs, representation, *hash);
-
- return SVN_NO_ERROR;
-}
-
-/* Starting at the directory in REPRESENTATION in FILE_CONTENT, read all
- * DAG nodes, directories and representations linked in that tree structure.
- * Store them in FS and read them only once.
- *
- * Use POOL for persistent allocations and SCRATCH_POOL for temporaries.
- */
-static svn_error_t *
-parse_dir(fs_fs_t *fs,
- svn_stringbuf_t *file_content,
- representation_t *representation,
- apr_pool_t *pool,
- apr_pool_t *scratch_pool)
-{
- apr_hash_t *hash;
- apr_hash_index_t *hi;
- apr_pool_t *iter_pool = svn_pool_create(scratch_pool);
- apr_hash_t *base_dir = svn_hash__make(scratch_pool);
-
- /* special case: empty dir rep */
- if (representation == NULL)
- return SVN_NO_ERROR;
-
- /* if we have a previous representation of that dir, hash it by name */
- if (representation->delta_base && representation->delta_base->dir)
- {
- apr_array_header_t *dir = representation->delta_base->dir->entries;
- int i;
-
- for (i = 0; i < dir->nelts; ++i)
- {
- direntry_t *entry = APR_ARRAY_IDX(dir, i, direntry_t *);
- apr_hash_set(base_dir, entry->name, entry->name_len, entry);
- }
- }
-
- /* read this directory */
- SVN_ERR(read_dir(&hash, fs, representation, scratch_pool));
-
- /* add it as an array to the representation (entries yet to be filled) */
- representation->dir = apr_pcalloc(pool, sizeof(*representation->dir));
- representation->dir->entries
- = apr_array_make(pool, apr_hash_count(hash), sizeof(direntry_t *));
-
- /* Translate the string dir entries into real entries. Reuse existing
- * objects as much as possible to keep memory consumption low.
- */
- for (hi = apr_hash_first(pool, hash); hi; hi = apr_hash_next(hi))
- {
- const char *name = svn__apr_hash_index_key(hi);
- svn_string_t *str_val = svn__apr_hash_index_val(hi);
- apr_size_t offset;
- revision_info_t *revision_info;
-
- /* look for corresponding entry in previous version */
- apr_size_t name_len = strlen(name);
- direntry_t *entry = base_dir
- ? apr_hash_get(base_dir, name, name_len)
- : NULL;
-
- /* parse the new target revnode ID (revision, offset) */
- SVN_ERR(parse_revnode_pos(&revision_info, &offset, fs, str_val));
-
- /* if this is a new entry or if the content changed, create a new
- * instance for it. */
- if ( !entry
- || !entry->node->text
- || entry->node->text->revision != revision_info
- || entry->node->original.offset != offset)
- {
- /* create & init the new entry. Reuse the name string if possible */
- direntry_t *new_entry = apr_pcalloc(pool, sizeof(*entry));
- new_entry->name_len = name_len;
- if (entry)
- new_entry->name = entry->name;
- else
- new_entry->name = apr_pstrdup(pool, name);
-
- /* Link it to the content noderev. Recurse. */
- entry = new_entry;
- SVN_ERR(get_noderev(&entry->node, fs, file_content, offset,
- revision_info, pool, iter_pool));
- }
-
- /* set the directory entry */
- APR_ARRAY_PUSH(representation->dir->entries, direntry_t *) = entry;
- svn_pool_clear(iter_pool);
- }
-
- svn_pool_destroy(iter_pool);
- return SVN_NO_ERROR;
-}
-
-/* Starting at the noderev at OFFSET in FILE_CONTENT, read all DAG nodes,
- * directories and representations linked in that tree structure. Store
- * them in FS and read them only once. Return the result in *NODEREV.
- *
- * Use POOL for persistent allocations and SCRATCH_POOL for temporaries.
- */
-static svn_error_t *
-read_noderev(noderev_t **noderev,
- fs_fs_t *fs,
- svn_stringbuf_t *file_content,
- apr_size_t offset,
- revision_info_t *revision_info,
- apr_pool_t *pool,
- apr_pool_t *scratch_pool)
-{
- noderev_t *result = apr_pcalloc(pool, sizeof(*result));
- svn_string_t *line;
- svn_boolean_t is_dir = FALSE;
-
- scratch_pool = svn_pool_create(scratch_pool);
-
- /* parse the noderev line-by-line until we find an empty line */
- result->original.offset = offset;
- while (1)
- {
- /* for this line, extract key and value. Ignore invalid values */
- svn_string_t key;
- svn_string_t value;
- char *sep;
- const char *start = file_content->data + offset
- + revision_info->original.offset;
- const char *end = strchr(start, '\n');
-
- line = svn_string_ncreate(start, end - start, scratch_pool);
- offset += end - start + 1;
-
- /* empty line -> end of noderev data */
- if (line->len == 0)
- break;
-
- sep = strchr(line->data, ':');
- if (sep == NULL)
- continue;
-
- key.data = line->data;
- key.len = sep - key.data;
- *sep = 0;
-
- if (key.len + 2 > line->len)
- continue;
-
- value.data = sep + 2;
- value.len = line->len - (key.len + 2);
-
- /* translate (key, value) into noderev elements */
- if (key_matches(&key, "type"))
- is_dir = strcmp(value.data, "dir") == 0;
- else if (key_matches(&key, "pred"))
- SVN_ERR(parse_pred(&result->predecessor, fs, &value));
- else if (key_matches(&key, "text"))
- SVN_ERR(parse_representation(&result->text, fs, file_content,
- &value, revision_info,
- pool, scratch_pool));
- else if (key_matches(&key, "props"))
- SVN_ERR(parse_representation(&result->props, fs, file_content,
- &value, revision_info,
- pool, scratch_pool));
- }
-
- /* link noderev to revision info */
- result->revision = revision_info;
- result->original.size = offset - result->original.offset;
-
- svn_sort__array_insert(&result,
- revision_info->node_revs,
- svn_sort__bsearch_lower_bound(&offset,
- revision_info->node_revs,
- compare_noderev_offsets));
-
- /* if this is a directory, read and process that recursively */
- if (is_dir)
- SVN_ERR(parse_dir(fs, file_content, result->text,
- pool, scratch_pool));
-
- /* done */
- svn_pool_destroy(scratch_pool);
- *noderev = result;
-
- return SVN_NO_ERROR;
-}
-
-/* Simple utility to print a REVISION number and make it appear immediately.
- */
-static void
-print_progress(svn_revnum_t revision)
-{
- printf("%8ld", revision);
- fflush(stdout);
-}
-
-/* Read the content of the pack file staring at revision BASE and store it
- * in FS. Use POOL for allocations.
- */
-static svn_error_t *
-read_pack_file(fs_fs_t *fs,
- svn_revnum_t base,
- apr_pool_t *pool)
-{
- apr_array_header_t *manifest = NULL;
- apr_pool_t *local_pool = svn_pool_create(pool);
- apr_pool_t *iter_pool = svn_pool_create(local_pool);
- int i;
- svn_stringbuf_t *file_content;
- revision_pack_t *revisions;
- const char *pack_folder = get_pack_folder(fs, base, local_pool);
-
- /* read the whole pack file into memory */
- SVN_ERR(read_rev_or_pack_file(&file_content, fs, base, local_pool));
-
- /* create the revision container */
- revisions = apr_pcalloc(pool, sizeof(*revisions));
- revisions->base = base;
- revisions->fragments = NULL;
- revisions->info = apr_array_make(pool,
- fs->max_files_per_dir,
- sizeof(revision_info_t*));
- revisions->filesize = file_content->len;
- APR_ARRAY_PUSH(fs->packs, revision_pack_t*) = revisions;
-
- /* parse the manifest file */
- SVN_ERR(read_manifest(&manifest, fs, pack_folder, local_pool));
- if (manifest->nelts != fs->max_files_per_dir)
- return svn_error_create(SVN_ERR_FS_CORRUPT, NULL, NULL);
-
- /* process each revision in the pack file */
- for (i = 0; i < manifest->nelts; ++i)
- {
- apr_size_t root_node_offset;
- svn_string_t rev_content;
-
- /* create the revision info for the current rev */
- revision_info_t *info = apr_pcalloc(pool, sizeof(*info));
- info->node_revs = apr_array_make(iter_pool, 4, sizeof(noderev_t*));
- info->representations = apr_array_make(iter_pool, 4, sizeof(representation_t*));
-
- info->revision = base + i;
- info->original.offset = APR_ARRAY_IDX(manifest, i, apr_size_t);
- info->original.end = i+1 < manifest->nelts
- ? APR_ARRAY_IDX(manifest, i+1 , apr_size_t)
- : file_content->len;
- SVN_ERR(read_revision_header(&info->original.changes,
- &info->original.changes_len,
- &root_node_offset,
- file_content,
- APR_ARRAY_IDX(manifest, i , apr_size_t),
- info->original.end,
- iter_pool));
-
- /* put it into our containers */
- APR_ARRAY_PUSH(revisions->info, revision_info_t*) = info;
- APR_ARRAY_PUSH(fs->revisions, revision_info_t*) = info;
-
- /* cache the revision content */
- rev_content.data = file_content->data + info->original.offset;
- rev_content.len = info->original.end - info->original.offset;
- set_cached_content(fs->cache, info->revision, &rev_content);
-
- /* parse the revision content recursively. */
- SVN_ERR(read_noderev(&info->root_noderev, fs, file_content,
- root_node_offset, info, pool, iter_pool));
-
- /* copy dynamically grown containers from temp into result pool */
- info->node_revs = apr_array_copy(pool, info->node_revs);
- info->representations = apr_array_copy(pool, info->representations);
-
- /* destroy temps */
- svn_pool_clear(iter_pool);
- }
-
- /* one more pack file processed */
- print_progress(base);
- svn_pool_destroy(local_pool);
-
- return SVN_NO_ERROR;
-}
-
-/* Read the content of REVSION file and store it in FS.
- * Use POOL for allocations.
- */
-static svn_error_t *
-read_revision_file(fs_fs_t *fs,
- svn_revnum_t revision,
- apr_pool_t *pool)
-{
- apr_size_t root_node_offset;
- apr_pool_t *local_pool = svn_pool_create(pool);
- svn_stringbuf_t *file_content;
- svn_string_t rev_content;
- revision_pack_t *revisions = apr_pcalloc(pool, sizeof(*revisions));
- revision_info_t *info = apr_pcalloc(pool, sizeof(*info));
-
- /* read the whole pack file into memory */
- SVN_ERR(read_rev_or_pack_file(&file_content, fs, revision, local_pool));
-
- /* create the revision info for the current rev */
- info->node_revs = apr_array_make(pool, 4, sizeof(noderev_t*));
- info->representations = apr_array_make(pool, 4, sizeof(representation_t*));
-
- info->revision = revision;
- info->original.offset = 0;
- info->original.end = file_content->len;
- SVN_ERR(read_revision_header(&info->original.changes,
- &info->original.changes_len,
- &root_node_offset,
- file_content,
- 0,
- info->original.end,
- local_pool));
-
- /* put it into our containers */
- APR_ARRAY_PUSH(fs->revisions, revision_info_t*) = info;
-
- /* create a pseudo-pack file container for just this rev to keep our
- * data structures as uniform as possible.
- */
- revisions->base = revision;
- revisions->fragments = NULL;
- revisions->info = apr_array_make(pool, 1, sizeof(revision_info_t*));
- revisions->filesize = file_content->len;
- APR_ARRAY_PUSH(revisions->info, revision_info_t*) = info;
- APR_ARRAY_PUSH(fs->packs, revision_pack_t*) = revisions;
-
- /* cache the revision content */
- rev_content.data = file_content->data + info->original.offset;
- rev_content.len = info->original.end - info->original.offset;
- set_cached_content(fs->cache, info->revision, &rev_content);
-
- /* parse the revision content recursively. */
- SVN_ERR(read_noderev(&info->root_noderev, fs, file_content,
- root_node_offset, info,
- pool, local_pool));
- APR_ARRAY_PUSH(info->node_revs, noderev_t*) = info->root_noderev;
-
- /* show progress every 1000 revs or so */
- if (revision % fs->max_files_per_dir == 0)
- print_progress(revision);
-
- svn_pool_destroy(local_pool);
-
- return SVN_NO_ERROR;
-}
-
-/* Read the repository at PATH beginning with revision START_REVISION and
- * return the result in *FS. Allocate caches with MEMSIZE bytes total
- * capacity. Use POOL for non-cache allocations.
- */
-static svn_error_t *
-read_revisions(fs_fs_t **fs,
- const char *path,
- svn_revnum_t start_revision,
- apr_size_t memsize,
- apr_pool_t *pool)
-{
- svn_revnum_t revision;
- apr_size_t content_cache_size;
- apr_size_t window_cache_size;
- apr_size_t dir_cache_size;
-
- /* determine cache sizes */
- if (memsize < 100)
- memsize = 100;
-
- content_cache_size = memsize * 7 / 10 > 4000 ? 4000 : memsize * 7 / 10;
- window_cache_size = memsize * 2 / 10 * 1024 * 1024;
- dir_cache_size = (memsize / 10) * 16000;
-
- /* read repo format and such */
- SVN_ERR(fs_open(fs, path, pool));
-
- /* create data containers and caches */
- (*fs)->start_revision = start_revision
- - (start_revision % (*fs)->max_files_per_dir);
- (*fs)->revisions = apr_array_make(pool,
- (*fs)->max_revision + 1 - (*fs)->start_revision,
- sizeof(revision_info_t *));
- (*fs)->packs = apr_array_make(pool,
- ((*fs)->min_unpacked_rev - (*fs)->start_revision)
- / (*fs)->max_files_per_dir,
- sizeof(revision_pack_t *));
- (*fs)->null_base = apr_pcalloc(pool, sizeof(*(*fs)->null_base));
- (*fs)->cache = create_content_cache
- (apr_allocator_owner_get
- (svn_pool_create_allocator(FALSE)),
- content_cache_size * 1024 * 1024);
- (*fs)->dir_cache = create_dir_cache
- (apr_allocator_owner_get
- (svn_pool_create_allocator(FALSE)),
- dir_cache_size);
- (*fs)->window_cache = create_window_cache
- (apr_allocator_owner_get
- (svn_pool_create_allocator(FALSE)),
- 10000, window_cache_size);
-
- /* read all packed revs */
- for ( revision = start_revision
- ; revision < (*fs)->min_unpacked_rev
- ; revision += (*fs)->max_files_per_dir)
- SVN_ERR(read_pack_file(*fs, revision, pool));
-
- /* read non-packed revs */
- for ( ; revision <= (*fs)->max_revision; ++revision)
- SVN_ERR(read_revision_file(*fs, revision, pool));
-
- return SVN_NO_ERROR;
-}
-
-/* Return the maximum number of decimal digits required to represent offsets
- * in the given PACK file.
- */
-static apr_size_t
-get_max_offset_len(const revision_pack_t *pack)
-{
- /* the pack files may grow a few percent.
- * Fudge it up to be on safe side.
- */
- apr_size_t max_future_size = pack->filesize * 2 + 10000;
- apr_size_t result = 0;
-
- while (max_future_size > 0)
- {
- ++result;
- max_future_size /= 10;
- }
-
- return result;
-}
-
-/* Create the fragments container in PACK and add revision header fragments
- * to it. Use POOL for allocations.
- */
-static svn_error_t *
-add_revisions_pack_heads(revision_pack_t *pack,
- apr_pool_t *pool)
-{
- int i;
- revision_info_t *info;
- apr_size_t offset_len = get_max_offset_len(pack);
- fragment_t fragment;
-
- /* allocate fragment arrays */
-
- int fragment_count = 1;
- for (i = 0; i < pack->info->nelts; ++i)
- {
- info = APR_ARRAY_IDX(pack->info, i, revision_info_t*);
- fragment_count += info->node_revs->nelts
- + info->representations->nelts
- + 2;
- }
-
- pack->target_offset = pack->info->nelts > 1 ? 64 : 0;
- pack->fragments = apr_array_make(pool,
- fragment_count,
- sizeof(fragment_t));
-
- /* put revision headers first */
-
- for (i = 0; i < pack->info->nelts - 1; ++i)
- {
- info = APR_ARRAY_IDX(pack->info, i, revision_info_t*);
- info->target.offset = pack->target_offset;
-
- fragment.data = info;
- fragment.kind = header_fragment;
- fragment.position = pack->target_offset;
- APR_ARRAY_PUSH(pack->fragments, fragment_t) = fragment;
-
- pack->target_offset += 2 * offset_len + 3;
- }
-
- info = APR_ARRAY_IDX(pack->info, pack->info->nelts - 1, revision_info_t*);
- info->target.offset = pack->target_offset;
-
- /* followed by the changes list */
-
- for (i = 0; i < pack->info->nelts; ++i)
- {
- info = APR_ARRAY_IDX(pack->info, i, revision_info_t*);
-
- info->target.changes = pack->target_offset - info->target.offset;
- info->target.changes_len = info->original.changes_len;
-
- fragment.data = info;
- fragment.kind = changes_fragment;
- fragment.position = pack->target_offset;
- APR_ARRAY_PUSH(pack->fragments, fragment_t) = fragment;
-
- pack->target_offset += info->original.changes_len;
- }
-
- return SVN_NO_ERROR;
-}
-
-/* For the revision given by INFO in FS, return the fragment container in
- * *FRAGMENTS and the current placement offset in *CURRENT_POS.
- */
-static svn_error_t *
-get_target_offset(apr_size_t **current_pos,
- apr_array_header_t **fragments,
- fs_fs_t *fs,
- revision_info_t *info)
-{
- int i;
- revision_pack_t *pack;
- svn_revnum_t revision = info->revision;
-
- /* identify the pack object */
- if (fs->min_unpacked_rev > revision)
- {
- i = (revision - fs->start_revision) / fs->max_files_per_dir;
- }
- else
- {
- i = (fs->min_unpacked_rev - fs->start_revision) / fs->max_files_per_dir;
- i += revision - fs->min_unpacked_rev;
- }
-
- /* extract the desired info from it */
- pack = APR_ARRAY_IDX(fs->packs, i, revision_pack_t*);
- *current_pos = &pack->target_offset;
- *fragments = pack->fragments;
-
- return SVN_NO_ERROR;
-}
-
-/* forward declaration */
-static svn_error_t *
-add_noderev_recursively(fs_fs_t *fs,
- noderev_t *node,
- apr_pool_t *pool);
-
-/* Place fragments for the given REPRESENTATION of the given KIND, iff it
- * has not been covered, yet. Place the base reps along the deltification
- * chain as far as those reps have not been covered, yet. If REPRESENTATION
- * is a directory, recursively place its elements.
- *
- * Use POOL for allocations.
- */
-static svn_error_t *
-add_representation_recursively(fs_fs_t *fs,
- representation_t *representation,
- enum fragment_kind_t kind,
- apr_pool_t *pool)
-{
- apr_size_t *current_pos;
- apr_array_header_t *fragments;
- fragment_t fragment;
-
- /* place REPRESENTATION only once and only if it exists and will not
- * be covered later as a directory. */
- if ( representation == NULL
- || representation->covered
- || (representation->dir && kind != dir_fragment)
- || representation == fs->null_base)
- return SVN_NO_ERROR;
-
- /* add and place a fragment for REPRESENTATION */
- SVN_ERR(get_target_offset(&current_pos, &fragments,
- fs, representation->revision));
- representation->target.offset = *current_pos;
- representation->covered = TRUE;
-
- fragment.data = representation;
- fragment.kind = kind;
- fragment.position = *current_pos;
- APR_ARRAY_PUSH(fragments, fragment_t) = fragment;
-
- /* determine the size of data to be added to the target file */
- if ( kind != dir_fragment
- && representation->delta_base && representation->delta_base->dir)
- {
- /* base rep is a dir -> would change -> need to store it as fulltext
- * in our target file */
- apr_pool_t *text_pool = svn_pool_create(pool);
- svn_stringbuf_t *content;
-
- SVN_ERR(get_combined_window(&content, fs, representation, text_pool));
- representation->target.size = content->len;
- *current_pos += representation->target.size + 13;
-
- svn_pool_destroy(text_pool);
- }
- else
- if ( kind == dir_fragment
- || (representation->delta_base && representation->delta_base->dir))
- {
- /* deltified directories may grow considerably */
- if (representation->original.size < 50)
- *current_pos += 300;
- else
- *current_pos += representation->original.size * 3 + 150;
- }
- else
- {
- /* plain / deltified content will not change but the header may
- * grow slightly due to larger offsets. */
- representation->target.size = representation->original.size;
-
- if (representation->delta_base &&
- (representation->delta_base != fs->null_base))
- *current_pos += representation->original.size + 50;
- else
- *current_pos += representation->original.size + 13;
- }
-
- /* follow the delta chain and place base revs immediately after this */
- if (representation->delta_base)
- SVN_ERR(add_representation_recursively(fs,
- representation->delta_base,
- kind,
- pool));
-
- /* finally, recurse into directories */
- if (representation->dir)
- {
- int i;
- apr_array_header_t *entries = representation->dir->entries;
-
- for (i = 0; i < entries->nelts; ++i)
- {
- direntry_t *entry = APR_ARRAY_IDX(entries, i, direntry_t *);
- if (entry->node)
- SVN_ERR(add_noderev_recursively(fs, entry->node, pool));
- }
- }
-
- return SVN_NO_ERROR;
-}
-
-/* Place fragments for the given NODE in FS, iff it has not been covered,
- * yet. Place the reps (text, props) immediately after the node.
- *
- * Use POOL for allocations.
- */
-static svn_error_t *
-add_noderev_recursively(fs_fs_t *fs,
- noderev_t *node,
- apr_pool_t *pool)
-{
- apr_size_t *current_pos;
- apr_array_header_t *fragments;
- fragment_t fragment;
-
- /* don't add it twice */
- if (node->covered)
- return SVN_NO_ERROR;
-
- /* add and place a fragment for NODE */
- SVN_ERR(get_target_offset(&current_pos, &fragments, fs, node->revision));
- node->covered = TRUE;
- node->target.offset = *current_pos;
-
- fragment.data = node;
- fragment.kind = noderev_fragment;
- fragment.position = *current_pos;
- APR_ARRAY_PUSH(fragments, fragment_t) = fragment;
-
- /* size may slightly increase */
- *current_pos += node->original.size + 40;
-
- /* recurse into representations */
- if (node->text && node->text->dir)
- SVN_ERR(add_representation_recursively(fs, node->text, dir_fragment, pool));
- else
- SVN_ERR(add_representation_recursively(fs, node->text, file_fragment, pool));
-
- SVN_ERR(add_representation_recursively(fs, node->props, property_fragment, pool));
-
- return SVN_NO_ERROR;
-}
-
-/* Place a fragment for the last revision in PACK. Use POOL for allocations.
- */
-static svn_error_t *
-add_revisions_pack_tail(revision_pack_t *pack,
- apr_pool_t *pool)
-{
- int i;
- revision_info_t *info;
- apr_size_t offset_len = get_max_offset_len(pack);
- fragment_t fragment;
-
- /* put final revision header last and fix up revision lengths */
-
- info = APR_ARRAY_IDX(pack->info, pack->info->nelts-1, revision_info_t*);
-
- fragment.data = info;
- fragment.kind = header_fragment;
- fragment.position = pack->target_offset;
- APR_ARRAY_PUSH(pack->fragments, fragment_t) = fragment;
-
- pack->target_offset += 2 * offset_len + 3;
-
- /* end of target file reached. Store that info in all revs. */
- for (i = 0; i < pack->info->nelts; ++i)
- {
- info = APR_ARRAY_IDX(pack->info, i, revision_info_t*);
- info->target.end = pack->target_offset;
- }
-
- return SVN_NO_ERROR;
-}
-
-/* Place all fragments for all revisions / packs in FS.
- * Use POOL for allocations.
- */
-static svn_error_t *
-reorder_revisions(fs_fs_t *fs,
- apr_pool_t *pool)
-{
- int i, k;
-
- /* headers and changes */
-
- for (i = 0; i < fs->packs->nelts; ++i)
- {
- revision_pack_t *pack = APR_ARRAY_IDX(fs->packs, i, revision_pack_t*);
- SVN_ERR(add_revisions_pack_heads(pack, pool));
- }
-
- /* representations & nodes */
-
- for (i = fs->revisions->nelts-1; i >= 0; --i)
- {
- revision_info_t *info = APR_ARRAY_IDX(fs->revisions, i, revision_info_t*);
- for (k = info->node_revs->nelts - 1; k >= 0; --k)
- {
- noderev_t *node = APR_ARRAY_IDX(info->node_revs, k, noderev_t*);
- SVN_ERR(add_noderev_recursively(fs, node, pool));
- }
-
- if (info->revision % fs->max_files_per_dir == 0)
- print_progress(info->revision);
- }
-
- /* pack file tails */
-
- for (i = 0; i < fs->packs->nelts; ++i)
- {
- revision_pack_t *pack = APR_ARRAY_IDX(fs->packs, i, revision_pack_t*);
- SVN_ERR(add_revisions_pack_tail(pack, pool));
- }
-
- return SVN_NO_ERROR;
-}
-
-/* forward declaration */
-static svn_error_t *
-get_fragment_content(svn_string_t **content,
- fs_fs_t *fs,
- fragment_t *fragment,
- apr_pool_t *pool);
-
-/* Directory content may change and with it, the deltified representations
- * may significantly. This function causes all directory target reps in
- * PACK of FS to be built and their new MD5 as well as rep sizes be updated.
- * We must do that before attempting to write noderevs.
- *
- * Use POOL for allocations.
- */
-static svn_error_t *
-update_noderevs(fs_fs_t *fs,
- revision_pack_t *pack,
- apr_pool_t *pool)
-{
- int i;
- apr_pool_t *itempool = svn_pool_create(pool);
-
- for (i = 0; i < pack->fragments->nelts; ++i)
- {
- fragment_t *fragment = &APR_ARRAY_IDX(pack->fragments, i, fragment_t);
- if (fragment->kind == dir_fragment)
- {
- svn_string_t *content;
-
- /* request updated rep content but ignore the result.
- * We are only interested in the MD5, content and rep size updates. */
- SVN_ERR(get_fragment_content(&content, fs, fragment, itempool));
- svn_pool_clear(itempool);
- }
- }
-
- svn_pool_destroy(itempool);
-
- return SVN_NO_ERROR;
-}
-
-/* Determine the target size of the FRAGMENT in FS and return the value
- * in *LENGTH. If ADD_PADDING has been set, slightly fudge the numbers
- * to account for changes in offset lengths etc. Use POOL for temporary
- * allocations.
- */
-static svn_error_t *
-get_content_length(apr_size_t *length,
- fs_fs_t *fs,
- fragment_t *fragment,
- svn_boolean_t add_padding,
- apr_pool_t *pool)
-{
- svn_string_t *content;
-
- SVN_ERR(get_fragment_content(&content, fs, fragment, pool));
- if (add_padding)
- switch (fragment->kind)
- {
- case dir_fragment:
- *length = content->len + 16;
- break;
- case noderev_fragment:
- *length = content->len + 3;
- break;
- default:
- *length = content->len;
- break;
- }
- else
- *length = content->len;
-
- return SVN_NO_ERROR;
-}
-
-/* Move the FRAGMENT to global file offset NEW_POSITION. Update the target
- * location info of the underlying object as well.
- */
-static void
-move_fragment(fragment_t *fragment,
- apr_size_t new_position)
-{
- revision_info_t *info;
- representation_t *representation;
- noderev_t *node;
-
- /* move the fragment */
- fragment->position = new_position;
-
- /* move the underlying object */
- switch (fragment->kind)
- {
- case header_fragment:
- info = fragment->data;
- info->target.offset = new_position;
- break;
-
- case changes_fragment:
- info = fragment->data;
- info->target.changes = new_position - info->target.offset;
- break;
-
- case property_fragment:
- case file_fragment:
- case dir_fragment:
- representation = fragment->data;
- representation->target.offset = new_position;
- break;
-
- case noderev_fragment:
- node = fragment->data;
- node->target.offset = new_position;
- break;
- }
-}
-
-/* Move the fragments in PACK's target fragment list to their final offsets.
- * This may require several iterations if the fudge factors turned out to
- * be insufficient. Use POOL for allocations.
- */
-static svn_error_t *
-pack_revisions(fs_fs_t *fs,
- revision_pack_t *pack,
- apr_pool_t *pool)
-{
- int i;
- fragment_t *fragment, *next;
- svn_boolean_t needed_to_expand;
- revision_info_t *info;
- apr_size_t current_pos, len, old_len;
-
- apr_pool_t *itempool = svn_pool_create(pool);
-
- /* update all directory reps. Chances are that most of the target rep
- * sizes are now close to accurate. */
- SVN_ERR(update_noderevs(fs, pack, pool));
-
- /* compression phase: pack all fragments tightly with only a very small
- * fudge factor. This should cause offsets to shrink, thus all the
- * actual fragment rate should tend to be even smaller afterwards. */
- current_pos = pack->info->nelts > 1 ? 64 : 0;
- for (i = 0; i + 1 < pack->fragments->nelts; ++i)
- {
- fragment = &APR_ARRAY_IDX(pack->fragments, i, fragment_t);
- SVN_ERR(get_content_length(&len, fs, fragment, TRUE, itempool));
- move_fragment(fragment, current_pos);
- current_pos += len;
-
- svn_pool_clear(itempool);
- }
-
- /* don't forget the final fragment (last revision's revision header) */
- fragment = &APR_ARRAY_IDX(pack->fragments, pack->fragments->nelts-1, fragment_t);
- fragment->position = current_pos;
-
- /* expansion phase: check whether all fragments fit into their allotted
- * slots. Grow them geometrically if they don't fit. Retry until they
- * all do fit.
- * Note: there is an upper limit to which fragments can grow. So, this
- * loop will terminate. Often, no expansion will be necessary at all. */
- do
- {
- needed_to_expand = FALSE;
- current_pos = pack->info->nelts > 1 ? 64 : 0;
-
- for (i = 0; i + 1 < pack->fragments->nelts; ++i)
- {
- fragment = &APR_ARRAY_IDX(pack->fragments, i, fragment_t);
- next = &APR_ARRAY_IDX(pack->fragments, i + 1, fragment_t);
- old_len = next->position - fragment->position;
-
- SVN_ERR(get_content_length(&len, fs, fragment, FALSE, itempool));
-
- if (len > old_len)
- {
- len = (apr_size_t)(len * 1.1) + 10;
- needed_to_expand = TRUE;
- }
- else
- len = old_len;
-
- if (i == pack->info->nelts - 1)
- {
- info = APR_ARRAY_IDX(pack->info, pack->info->nelts - 1, revision_info_t*);
- info->target.offset = current_pos;
- }
-
- move_fragment(fragment, current_pos);
- current_pos += len;
-
- svn_pool_clear(itempool);
- }
-
- fragment = &APR_ARRAY_IDX(pack->fragments, pack->fragments->nelts-1, fragment_t);
- fragment->position = current_pos;
-
- /* update the revision
- * sizes (they all end at the end of the pack file now) */
- SVN_ERR(get_content_length(&len, fs, fragment, FALSE, itempool));
- current_pos += len;
-
- for (i = 0; i < pack->info->nelts; ++i)
- {
- info = APR_ARRAY_IDX(pack->info, i, revision_info_t*);
- info->target.end = current_pos;
- }
- }
- while (needed_to_expand);
-
- svn_pool_destroy(itempool);
-
- return SVN_NO_ERROR;
-}
-
-/* Write reorg'ed target content for PACK in FS. Use POOL for allocations.
- */
-static svn_error_t *
-write_revisions(fs_fs_t *fs,
- revision_pack_t *pack,
- apr_pool_t *pool)
-{
- int i;
- fragment_t *fragment = NULL;
- svn_string_t *content;
-
- apr_pool_t *itempool = svn_pool_create(pool);
- apr_pool_t *iterpool = svn_pool_create(pool);
-
- apr_file_t *file;
- apr_size_t current_pos = 0;
- svn_stringbuf_t *null_buffer = svn_stringbuf_create_empty(iterpool);
-
- /* create the target file */
- const char *dir = apr_psprintf(iterpool, "%s/new/%ld%s",
- fs->path, pack->base / fs->max_files_per_dir,
- pack->info->nelts > 1 ? ".pack" : "");
- SVN_ERR(svn_io_make_dir_recursively(dir, pool));
- SVN_ERR(svn_io_file_open(&file,
- pack->info->nelts > 1
- ? apr_psprintf(iterpool, "%s/pack", dir)
- : apr_psprintf(iterpool, "%s/%ld", dir, pack->base),
- APR_WRITE | APR_CREATE | APR_BUFFERED,
- APR_OS_DEFAULT,
- iterpool));
-
- /* write all fragments */
- for (i = 0; i < pack->fragments->nelts; ++i)
- {
- apr_size_t padding;
-
- /* get fragment content to write */
- fragment = &APR_ARRAY_IDX(pack->fragments, i, fragment_t);
- SVN_ERR(get_fragment_content(&content, fs, fragment, itempool));
- SVN_ERR_ASSERT(fragment->position >= current_pos);
-
- /* number of bytes between this and the previous fragment */
- if ( fragment->kind == header_fragment
- && i+1 < pack->fragments->nelts)
- /* special case: header fragments are aligned to the slot end */
- padding = APR_ARRAY_IDX(pack->fragments, i+1, fragment_t).position -
- content->len - current_pos;
- else
- /* standard case: fragments are aligned to the slot start */
- padding = fragment->position - current_pos;
-
- /* write padding between fragments */
- if (padding)
- {
- while (null_buffer->len < padding)
- svn_stringbuf_appendbyte(null_buffer, 0);
-
- SVN_ERR(svn_io_file_write_full(file,
- null_buffer->data,
- padding,
- NULL,
- itempool));
- current_pos += padding;
- }
-
- /* write fragment content */
- SVN_ERR(svn_io_file_write_full(file,
- content->data,
- content->len,
- NULL,
- itempool));
- current_pos += content->len;
-
- svn_pool_clear(itempool);
- }
-
- apr_file_close(file);
-
- /* write new manifest file */
- if (pack->info->nelts > 1)
- {
- svn_stream_t *stream;
- SVN_ERR(svn_io_file_open(&file,
- apr_psprintf(iterpool, "%s/manifest", dir),
- APR_WRITE | APR_CREATE | APR_BUFFERED,
- APR_OS_DEFAULT,
- iterpool));
- stream = svn_stream_from_aprfile2(file, FALSE, iterpool);
-
- for (i = 0; i < pack->info->nelts; ++i)
- {
- revision_info_t *info = APR_ARRAY_IDX(pack->info, i,
- revision_info_t *);
- SVN_ERR(svn_stream_printf(stream, itempool,
- "%" APR_SIZE_T_FMT "\n",
- info->target.offset));
- svn_pool_clear(itempool);
- }
- }
-
- /* cleanup */
- svn_pool_destroy(itempool);
- svn_pool_destroy(iterpool);
-
- return SVN_NO_ERROR;
-}
-
-/* Write reorg'ed target content for all revisions in FS. To maximize
- * data locality, pack and write in one go per pack file.
- * Use POOL for allocations.
- */
-static svn_error_t *
-pack_and_write_revisions(fs_fs_t *fs,
- apr_pool_t *pool)
-{
- int i;
-
- SVN_ERR(svn_io_make_dir_recursively(apr_psprintf(pool, "%s/new",
- fs->path),
- pool));
-
- for (i = 0; i < fs->packs->nelts; ++i)
- {
- revision_pack_t *pack = APR_ARRAY_IDX(fs->packs, i, revision_pack_t*);
- if (pack->base % fs->max_files_per_dir == 0)
- print_progress(pack->base);
-
- SVN_ERR(pack_revisions(fs, pack, pool));
- SVN_ERR(write_revisions(fs, pack, pool));
- }
-
- return SVN_NO_ERROR;
-}
-
-/* For the directory REPRESENTATION in FS, construct the new (target)
- * serialized plaintext representation and return it in *CONTENT.
- * Allocate the result in POOL and temporaries in SCRATCH_POOL.
- */
-static svn_error_t *
-get_updated_dir(svn_string_t **content,
- fs_fs_t *fs,
- representation_t *representation,
- apr_pool_t *pool,
- apr_pool_t *scratch_pool)
-{
- apr_hash_t *hash;
- apr_pool_t *hash_pool = svn_pool_create(scratch_pool);
- apr_array_header_t *dir = representation->dir->entries;
- int i;
- svn_stream_t *stream;
- svn_stringbuf_t *result;
-
- /* get the original content */
- SVN_ERR(read_dir(&hash, fs, representation, scratch_pool));
- hash = apr_hash_copy(hash_pool, hash);
-
- /* update all entries */
- for (i = 0; i < dir->nelts; ++i)
- {
- char buffer[256];
- svn_string_t *new_val;
- apr_size_t pos;
-
- /* find the original entry for for the current name */
- direntry_t *entry = APR_ARRAY_IDX(dir, i, direntry_t *);
- svn_string_t *str_val = apr_hash_get(hash, entry->name, entry->name_len);
- if (str_val == NULL)
- return svn_error_createf(SVN_ERR_FS_CORRUPT, NULL,
- _("Dir entry '%s' not found"), entry->name);
-
- SVN_ERR_ASSERT(str_val->len < sizeof(buffer));
-
- /* create and updated node ID */
- memcpy(buffer, str_val->data, str_val->len+1);
- pos = strchr(buffer, '/') - buffer + 1;
- pos += svn__ui64toa(buffer + pos, entry->node->target.offset - entry->node->revision->target.offset);
- new_val = svn_string_ncreate(buffer, pos, hash_pool);
-
- /* store it in the hash */
- apr_hash_set(hash, entry->name, entry->name_len, new_val);
- }
-
- /* serialize the updated hash */
- result = svn_stringbuf_create_ensure(representation->target.size, pool);
- stream = svn_stream_from_stringbuf(result, hash_pool);
- SVN_ERR(svn_hash_write2(hash, stream, SVN_HASH_TERMINATOR, hash_pool));
- svn_pool_destroy(hash_pool);
-
- /* done */
- *content = svn_stringbuf__morph_into_string(result);
-
- return SVN_NO_ERROR;
-}
-
-/* Calculate the delta representation for the given CONTENT and BASE.
- * Return the rep in *DIFF. Use POOL for allocations.
- */
-static svn_error_t *
-diff_stringbufs(svn_stringbuf_t *diff,
- svn_string_t *base,
- svn_string_t *content,
- apr_pool_t *pool)
-{
- svn_txdelta_window_handler_t diff_wh;
- void *diff_whb;
-
- svn_stream_t *stream;
- svn_stream_t *source = svn_stream_from_string(base, pool);
- svn_stream_t *target = svn_stream_from_stringbuf(diff, pool);
-
- /* Prepare to write the svndiff data. */
- svn_txdelta_to_svndiff3(&diff_wh,
- &diff_whb,
- target,
- 1,
- SVN_DELTA_COMPRESSION_LEVEL_DEFAULT,
- pool);
-
- /* create delta stream */
- stream = svn_txdelta_target_push(diff_wh, diff_whb, source, pool);
-
- /* run delta */
- SVN_ERR(svn_stream_write(stream, content->data, &content->len));
- SVN_ERR(svn_stream_close(stream));
-
- return SVN_NO_ERROR;
-}
-
-/* Update the noderev id value for KEY in the textual noderev representation
- * in NODE_REV. Take the new id from NODE. This is a no-op if the KEY
- * cannot be found.
- */
-static void
-update_id(svn_stringbuf_t *node_rev,
- const char *key,
- noderev_t *node)
-{
- char *newline_pos = 0;
- char *pos;
-
- /* we need to update the offset only -> find its position */
- pos = strstr(node_rev->data, key);
- if (pos)
- pos = strchr(pos, '/');
- if (pos)
- newline_pos = strchr(++pos, '\n');
-
- if (pos && newline_pos)
- {
- /* offset data has been found -> replace it */
- char temp[SVN_INT64_BUFFER_SIZE];
- apr_size_t len = svn__i64toa(temp, node->target.offset - node->revision->target.offset);
- svn_stringbuf_replace(node_rev,
- pos - node_rev->data, newline_pos - pos,
- temp, len);
- }
-}
-
-/* Update the representation id value for KEY in the textual noderev
- * representation in NODE_REV. Take the offset, sizes and new MD5 from
- * REPRESENTATION. Use SCRATCH_POOL for allocations.
- * This is a no-op if the KEY cannot be found.
- */
-static void
-update_text(svn_stringbuf_t *node_rev,
- const char *key,
- representation_t *representation,
- apr_pool_t *scratch_pool)
-{
- apr_size_t key_len = strlen(key);
- char *pos = strstr(node_rev->data, key);
- char *val_pos;
-
- if (!pos)
- return;
-
- val_pos = pos + key_len;
- if (representation->dir)
- {
- /* for directories, we need to write all rep info anew */
- char *newline_pos = strchr(val_pos, '\n');
- svn_checksum_t checksum;
- const char* temp = apr_psprintf(scratch_pool, "%ld %" APR_SIZE_T_FMT " %"
- APR_SIZE_T_FMT" %" APR_SIZE_T_FMT " %s",
- representation->revision->revision,
- representation->target.offset - representation->revision->target.offset,
- representation->target.size,
- representation->dir->size,
- svn_checksum_to_cstring(&checksum,
- scratch_pool));
-
- checksum.digest = representation->dir->target_md5;
- checksum.kind = svn_checksum_md5;
- svn_stringbuf_replace(node_rev,
- val_pos - node_rev->data, newline_pos - val_pos,
- temp, strlen(temp));
- }
- else
- {
- /* ordinary representation: replace offset and rep size only.
- * Content size and checksums are unchanged. */
- const char* temp;
- char *end_pos = strchr(val_pos, ' ');
-
- val_pos = end_pos + 1;
- end_pos = strchr(strchr(val_pos, ' ') + 1, ' ');
- temp = apr_psprintf(scratch_pool, "%" APR_SIZE_T_FMT " %" APR_SIZE_T_FMT,
- representation->target.offset - representation->revision->target.offset,
- representation->target.size);
-
- svn_stringbuf_replace(node_rev,
- val_pos - node_rev->data, end_pos - val_pos,
- temp, strlen(temp));
- }
-}
-
-/* Get the target content (data block as to be written to the file) for
- * the given FRAGMENT in FS. Return the content in *CONTENT. Use POOL
- * for allocations.
- *
- * Note that, as a side-effect, this will update the target rep. info for
- * directories.
- */
-static svn_error_t *
-get_fragment_content(svn_string_t **content,
- fs_fs_t *fs,
- fragment_t *fragment,
- apr_pool_t *pool)
-{
- revision_info_t *info;
- representation_t *representation;
- noderev_t *node;
- svn_string_t *revision_content, *base_content;
- svn_stringbuf_t *header, *node_rev, *text;
- apr_size_t header_size;
- svn_checksum_t *checksum = NULL;
-
- switch (fragment->kind)
- {
- /* revision headers can be constructed from target position info */
- case header_fragment:
- info = fragment->data;
- *content = svn_string_createf(pool,
- "\n%" APR_SIZE_T_FMT " %" APR_SIZE_T_FMT "\n",
- info->root_noderev->target.offset - info->target.offset,
- info->target.changes);
- return SVN_NO_ERROR;
-
- /* The changes list remains untouched */
- case changes_fragment:
- info = fragment->data;
- SVN_ERR(get_content(&revision_content, fs, info->revision, pool));
-
- *content = svn_string_create_empty(pool);
- (*content)->data = revision_content->data + info->original.changes;
- (*content)->len = info->target.changes_len;
- return SVN_NO_ERROR;
-
- /* property and file reps get new headers any need to be rewritten,
- * iff the base rep is a directory. The actual (deltified) content
- * remains unchanged, though. MD5 etc. do not change. */
- case property_fragment:
- case file_fragment:
- representation = fragment->data;
- SVN_ERR(get_content(&revision_content, fs,
- representation->revision->revision, pool));
-
- if (representation->delta_base)
- if (representation->delta_base->dir)
- {
- /* if the base happens to be a directory, reconstruct the
- * full text and represent it as PLAIN rep. */
- SVN_ERR(get_combined_window(&text, fs, representation, pool));
- representation->target.size = text->len;
-
- svn_stringbuf_insert(text, 0, "PLAIN\n", 6);
- svn_stringbuf_appendcstr(text, "ENDREP\n");
- *content = svn_stringbuf__morph_into_string(text);
-
- return SVN_NO_ERROR;
- }
- else
- /* construct a new rep header */
- if (representation->delta_base == fs->null_base)
- header = svn_stringbuf_create("DELTA\n", pool);
- else
- header = svn_stringbuf_createf(pool,
- "DELTA %ld %" APR_SIZE_T_FMT " %" APR_SIZE_T_FMT "\n",
- representation->delta_base->revision->revision,
- representation->delta_base->target.offset
- - representation->delta_base->revision->target.offset,
- representation->delta_base->target.size);
- else
- header = svn_stringbuf_create("PLAIN\n", pool);
-
- /* if it exists, the actual delta base is unchanged. Hence, this
- * rep is unchanged even if it has been deltified. */
- header_size = strchr(revision_content->data +
- representation->original.offset, '\n') -
- revision_content->data -
- representation->original.offset + 1;
- svn_stringbuf_appendbytes(header,
- revision_content->data +
- representation->original.offset +
- header_size,
- representation->original.size);
- svn_stringbuf_appendcstr(header, "ENDREP\n");
- *content = svn_stringbuf__morph_into_string(header);
- return SVN_NO_ERROR;
-
- /* directory reps need to be rewritten (and deltified) completely.
- * As a side-effect, update the MD5 and target content size. */
- case dir_fragment:
- /* construct new content and update MD5 */
- representation = fragment->data;
- SVN_ERR(get_updated_dir(&revision_content, fs, representation,
- pool, pool));
- SVN_ERR(svn_checksum(&checksum, svn_checksum_md5,
- revision_content->data, revision_content->len,
- pool));
- memcpy(representation->dir->target_md5,
- checksum->digest,
- sizeof(representation->dir->target_md5));
-
- /* deltify against the base rep if necessary */
- if (representation->delta_base)
- {
- if (representation->delta_base->dir == NULL)
- {
- /* dummy or non-dir base rep -> self-compress only */
- header = svn_stringbuf_create("DELTA\n", pool);
- base_content = svn_string_create_empty(pool);
- }
- else
- {
- /* deltify against base rep (which is a directory, too)*/
- representation_t *base_rep = representation->delta_base;
- header = svn_stringbuf_createf(pool,
- "DELTA %ld %" APR_SIZE_T_FMT " %" APR_SIZE_T_FMT "\n",
- base_rep->revision->revision,
- base_rep->target.offset - base_rep->revision->target.offset,
- base_rep->target.size);
- SVN_ERR(get_updated_dir(&base_content, fs, base_rep,
- pool, pool));
- }
-
- /* run deltification and update target content size */
- header_size = header->len;
- SVN_ERR(diff_stringbufs(header, base_content,
- revision_content, pool));
- representation->dir->size = revision_content->len;
- representation->target.size = header->len - header_size;
- svn_stringbuf_appendcstr(header, "ENDREP\n");
- *content = svn_stringbuf__morph_into_string(header);
- }
- else
- {
- /* no delta base (not even a dummy) -> PLAIN rep */
- representation->target.size = revision_content->len;
- representation->dir->size = revision_content->len;
- *content = svn_string_createf(pool, "PLAIN\n%sENDREP\n",
- revision_content->data);
- }
-
- return SVN_NO_ERROR;
-
- /* construct the new noderev content. No side-effects.*/
- case noderev_fragment:
- /* get the original noderev as string */
- node = fragment->data;
- SVN_ERR(get_content(&revision_content, fs,
- node->revision->revision, pool));
- node_rev = svn_stringbuf_ncreate(revision_content->data +
- node->original.offset,
- node->original.size,
- pool);
-
- /* update the values that may have hanged for target */
- update_id(node_rev, "id: ", node);
- update_id(node_rev, "pred: ", node->predecessor);
- update_text(node_rev, "text: ", node->text, pool);
- update_text(node_rev, "props: ", node->props, pool);
-
- *content = svn_stringbuf__morph_into_string(node_rev);
- return SVN_NO_ERROR;
- }
-
- SVN_ERR_ASSERT(0);
-
- return SVN_NO_ERROR;
-}
-
-/* In the repository at PATH, restore the original content in case we ran
- * this reorg tool before. Use POOL for allocations.
- */
-static svn_error_t *
-prepare_repo(const char *path, apr_pool_t *pool)
-{
- svn_node_kind_t kind;
-
- const char *old_path = svn_dirent_join(path, "db/old", pool);
- const char *new_path = svn_dirent_join(path, "new", pool);
- const char *revs_path = svn_dirent_join(path, "db/revs", pool);
- const char *old_rep_cache_path = svn_dirent_join(path, "db/rep-cache.db.old", pool);
- const char *rep_cache_path = svn_dirent_join(path, "db/rep-cache.db", pool);
-
- /* is there a backup? */
- SVN_ERR(svn_io_check_path(old_path, &kind, pool));
- if (kind == svn_node_dir)
- {
- /* yes, restore the org content from it */
- SVN_ERR(svn_io_remove_dir2(new_path, TRUE, NULL, NULL, pool));
- SVN_ERR(svn_io_file_move(revs_path, new_path, pool));
- SVN_ERR(svn_io_file_move(old_path, revs_path, pool));
- SVN_ERR(svn_io_remove_dir2(new_path, TRUE, NULL, NULL, pool));
- }
-
- /* same for the rep cache db */
- SVN_ERR(svn_io_check_path(old_rep_cache_path, &kind, pool));
- if (kind == svn_node_file)
- SVN_ERR(svn_io_file_move(old_rep_cache_path, rep_cache_path, pool));
-
- return SVN_NO_ERROR;
-}
-
-/* In the repository at PATH, create a backup of the orig content and
- * replace it with the reorg'ed. Use POOL for allocations.
- */
-static svn_error_t *
-activate_new_revs(const char *path, apr_pool_t *pool)
-{
- svn_node_kind_t kind;
-
- const char *old_path = svn_dirent_join(path, "db/old", pool);
- const char *new_path = svn_dirent_join(path, "new", pool);
- const char *revs_path = svn_dirent_join(path, "db/revs", pool);
- const char *old_rep_cache_path = svn_dirent_join(path, "db/rep-cache.db.old", pool);
- const char *rep_cache_path = svn_dirent_join(path, "db/rep-cache.db", pool);
-
- /* if there is no backup, yet, move the current repo content to the backup
- * and place it with the new (reorg'ed) data. */
- SVN_ERR(svn_io_check_path(old_path, &kind, pool));
- if (kind == svn_node_none)
- {
- SVN_ERR(svn_io_file_move(revs_path, old_path, pool));
- SVN_ERR(svn_io_file_move(new_path, revs_path, pool));
- }
-
- /* same for the rep cache db */
- SVN_ERR(svn_io_check_path(old_rep_cache_path, &kind, pool));
- if (kind == svn_node_none)
- SVN_ERR(svn_io_file_move(rep_cache_path, old_rep_cache_path, pool));
-
- return SVN_NO_ERROR;
-}
-
-/* Write tool usage info text to OSTREAM using PROGNAME as a prefix and
- * POOL for allocations.
- */
-static void
-print_usage(svn_stream_t *ostream, const char *progname,
- apr_pool_t *pool)
-{
- svn_error_clear(svn_stream_printf(ostream, pool,
- "\n"
- "!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! WARNING !!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n"
- "!!! This is an experimental tool. Don't use it on production data !!!\n"
- "!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n"
- "\n"
- "Usage: %s <repo> <cachesize>\n"
- "\n"
- "Optimize the repository at local path <repo> staring from revision 0.\n"
- "Use up to <cachesize> MB of memory for caching. This does not include\n"
- "temporary representation of the repository structure, i.e. the actual\n"
- "memory will be higher and <cachesize> be the lower limit.\n",
- progname));
-}
-
-/* linear control flow */
-int main(int argc, const char *argv[])
-{
- apr_pool_t *pool;
- svn_stream_t *ostream;
- svn_error_t *svn_err;
- const char *repo_path = NULL;
- svn_revnum_t start_revision = 0;
- apr_size_t memsize = 0;
- apr_uint64_t temp = 0;
- fs_fs_t *fs;
-
- apr_initialize();
- atexit(apr_terminate);
-
- pool = apr_allocator_owner_get(svn_pool_create_allocator(FALSE));
-
- svn_err = svn_stream_for_stdout(&ostream, pool);
- if (svn_err)
- {
- svn_handle_error2(svn_err, stdout, FALSE, ERROR_TAG);
- return 2;
- }
-
- if (argc != 3)
- {
- print_usage(ostream, argv[0], pool);
- return 2;
- }
-
- svn_err = svn_cstring_strtoui64(&temp, argv[2], 0, APR_SIZE_MAX, 10);
- if (svn_err)
- {
- print_usage(ostream, argv[0], pool);
- svn_error_clear(svn_err);
- return 2;
- }
-
- memsize = (apr_size_t)temp;
- repo_path = argv[1];
- start_revision = 0;
-
- printf("\nPreparing repository\n");
- svn_err = prepare_repo(repo_path, pool);
-
- if (!svn_err)
- {
- printf("Reading revisions\n");
- svn_err = read_revisions(&fs, repo_path, start_revision, memsize, pool);
- }
-
- if (!svn_err)
- {
- printf("\nReordering revision content\n");
- svn_err = reorder_revisions(fs, pool);
- }
-
- if (!svn_err)
- {
- printf("\nPacking and writing revisions\n");
- svn_err = pack_and_write_revisions(fs, pool);
- }
-
- if (!svn_err)
- {
- printf("\nSwitch to new revs\n");
- svn_err = activate_new_revs(repo_path, pool);
- }
-
- if (svn_err)
- {
- svn_handle_error2(svn_err, stdout, FALSE, ERROR_TAG);
- return 2;
- }
-
- return 0;
-}
diff --git a/tools/dev/gdb-py/svndbg/printers.py b/tools/dev/gdb-py/svndbg/printers.py
index da041b4..f1ee085 100644
--- a/tools/dev/gdb-py/svndbg/printers.py
+++ b/tools/dev/gdb-py/svndbg/printers.py
@@ -145,8 +145,8 @@ cstringType = gdb.lookup_type('char').pointer()
apr_hash_count = InferiorFunction('apr_hash_count')
apr_hash_first = InferiorFunction('apr_hash_first')
apr_hash_next = InferiorFunction('apr_hash_next')
-svn__apr_hash_index_key = InferiorFunction('svn__apr_hash_index_key')
-svn__apr_hash_index_val = InferiorFunction('svn__apr_hash_index_val')
+apr_hash_this_key = InferiorFunction('apr_hash_this_key')
+apr_hash_this_val = InferiorFunction('apr_hash_this_val')
def children_of_apr_hash(hash_p, value_type=None):
"""Iterate over an 'apr_hash_t *' GDB value, in the way required for a
@@ -156,9 +156,9 @@ def children_of_apr_hash(hash_p, value_type=None):
"""
hi = apr_hash_first(0, hash_p)
while (hi):
- k = svn__apr_hash_index_key(hi).reinterpret_cast(cstringType)
+ k = apr_hash_this_key(hi).reinterpret_cast(cstringType)
if value_type:
- val = svn__apr_hash_index_val(hi).reinterpret_cast(value_type)
+ val = apr_hash_this_val(hi).reinterpret_cast(value_type)
else:
val = '...'
try:
diff --git a/tools/dev/po-merge.py b/tools/dev/po-merge.py
index 15f0897..e63a739 100755
--- a/tools/dev/po-merge.py
+++ b/tools/dev/po-merge.py
@@ -146,6 +146,7 @@ def main(argv):
string_count = 0
update_count = 0
untranslated = 0
+ fuzzy = 0
while True:
comments, msgid, msgid_plural, msgstr = parse_translation(infile)
if not comments and msgid is None:
@@ -177,14 +178,19 @@ def main(argv):
for i in msgstr:
outfile.write('msgstr[%s] %s\n' % (n, msgstr[n]))
n += 1
- for m in msgstr:
- if m == '""':
- untranslated += 1
+ if msgstr is not None:
+ for m in msgstr:
+ if m == '""':
+ untranslated += 1
+ for c in comments:
+ if c.startswith('#,') and 'fuzzy' in c.split(', '):
+ fuzzy += 1
# We're done. Tell the user what we did.
print(('%d strings updated. '
+ '%d fuzzy strings. '
'%d of %d strings are still untranslated (%.0f%%).' %
- (update_count, untranslated, string_count,
+ (update_count, fuzzy, untranslated, string_count,
100.0 * untranslated / string_count)))
if __name__ == '__main__':
diff --git a/tools/dev/remove-trailing-whitespace.sh b/tools/dev/remove-trailing-whitespace.sh
index 440dfaa..1dbde0c 100755
--- a/tools/dev/remove-trailing-whitespace.sh
+++ b/tools/dev/remove-trailing-whitespace.sh
@@ -17,8 +17,8 @@
# specific language governing permissions and limitations
# under the License.
- for ext in c h cpp java py pl rb hpp cmd bat; do
- find . -name "*.$ext" -exec \
+ for ext in c h cpp java py pl rb hpp cmd bat sql sh; do
+ find . -name "*.$ext" -not -type l -exec \
perl -pi -e 's/[ \t]*$//' {} + ;
- # don't use \t to not strip ^L pagebreaks
- done
+ # don't use \s to not strip ^L pagebreaks
+ done
diff --git a/tools/dev/svnraisetreeconflict/svnraisetreeconflict.c b/tools/dev/svnraisetreeconflict/svnraisetreeconflict.c
index aa39816..65825d5 100644
--- a/tools/dev/svnraisetreeconflict/svnraisetreeconflict.c
+++ b/tools/dev/svnraisetreeconflict/svnraisetreeconflict.c
@@ -49,22 +49,6 @@
#define OPT_VERSION SVN_OPT_FIRST_LONGOPT_ID
-/** A statement macro, similar to @c SVN_INT_ERR, but issues a
- * message saying "svnraisetreeconflict:" instead of "svn:".
- *
- * Evaluate @a expr. If it yields an error, handle that error and
- * return @c EXIT_FAILURE.
- */
-#define SVNRAISETC_INT_ERR(expr) \
- do { \
- svn_error_t *svn_err__temp = (expr); \
- if (svn_err__temp) { \
- svn_handle_error2(svn_err__temp, stderr, FALSE, \
- "svnraisetreeconflict: "); \
- svn_error_clear(svn_err__temp); \
- return EXIT_FAILURE; } \
- } while (0)
-
static svn_error_t *
version(apr_pool_t *pool)
{
@@ -78,7 +62,6 @@ usage(apr_pool_t *pool)
svn_error_clear(svn_cmdline_fprintf
(stderr, pool,
_("Type 'svnraisetreeconflict --help' for usage.\n")));
- exit(1);
}
/***************************************************************************
@@ -224,7 +207,7 @@ raise_tree_conflict(int argc, const char **argv, apr_pool_t *pool)
right = svn_wc_conflict_version_create2(repos_url2, NULL, path_in_repos2,
peg_rev2, kind2, pool);
c = svn_wc_conflict_description_create_tree2(wc_abspath, kind,
- operation, left, right, pool);
+ operation, left, right, pool);
c->action = (svn_wc_conflict_action_t)action;
c->reason = (svn_wc_conflict_reason_t)reason;
@@ -295,7 +278,6 @@ help(const apr_getopt_option_t *options, apr_pool_t *pool)
get_enum_str(node_kind_map, svn_node_file),
get_enum_str(node_kind_map, svn_node_none)
));
- exit(0);
}
@@ -311,14 +293,17 @@ check_lib_versions(void)
};
SVN_VERSION_DEFINE(my_version);
- return svn_ver_check_list(&my_version, checklist);
+ return svn_ver_check_list2(&my_version, checklist, svn_ver_equal);
}
-int
-main(int argc, const char *argv[])
+/*
+ * On success, leave *EXIT_CODE untouched and return SVN_NO_ERROR. On error,
+ * either return an error to be displayed, or set *EXIT_CODE to non-zero and
+ * return SVN_NO_ERROR.
+ */
+static svn_error_t *
+sub_main(int *exit_code, int argc, const char *argv[], apr_pool_t *pool)
{
- apr_pool_t *pool;
- svn_error_t *err;
apr_getopt_t *os;
const apr_getopt_option_t options[] =
{
@@ -329,33 +314,18 @@ main(int argc, const char *argv[])
};
apr_array_header_t *remaining_argv;
- /* Initialize the app. */
- if (svn_cmdline_init("svnraisetreeconflict", stderr) != EXIT_SUCCESS)
- return EXIT_FAILURE;
-
- /* Create our top-level pool. Use a separate mutexless allocator,
- * given this application is single threaded.
- */
- pool = apr_allocator_owner_get(svn_pool_create_allocator(FALSE));
-
/* Check library versions */
- err = check_lib_versions();
- if (err)
- return svn_cmdline_handle_exit_error(err, pool, "svnraisetreeconflict: ");
+ SVN_ERR(check_lib_versions());
#if defined(WIN32) || defined(__CYGWIN__)
/* Set the working copy administrative directory name. */
if (getenv("SVN_ASP_DOT_NET_HACK"))
{
- err = svn_wc_set_adm_dir("_svn", pool);
- if (err)
- return svn_cmdline_handle_exit_error(err, pool, "svnraisetreeconflict: ");
+ SVN_ERR(svn_wc_set_adm_dir("_svn", pool));
}
#endif
- err = svn_cmdline__getopt_init(&os, argc, argv, pool);
- if (err)
- return svn_cmdline_handle_exit_error(err, pool, "svnraisetreeconflict: ");
+ SVN_ERR(svn_cmdline__getopt_init(&os, argc, argv, pool));
os->interleave = 1;
while (1)
@@ -366,19 +336,24 @@ main(int argc, const char *argv[])
if (APR_STATUS_IS_EOF(status))
break;
if (status != APR_SUCCESS)
- usage(pool); /* this will exit() */
+ {
+ usage(pool);
+ *exit_code = EXIT_FAILURE;
+ return SVN_NO_ERROR;
+ }
switch (opt)
{
case 'h':
help(options, pool);
- break;
+ return SVN_NO_ERROR;
case OPT_VERSION:
- SVNRAISETC_INT_ERR(version(pool));
- exit(0);
- break;
+ SVN_ERR(version(pool));
+ return SVN_NO_ERROR;
default:
- usage(pool); /* this will exit() */
+ usage(pool);
+ *exit_code = EXIT_FAILURE;
+ return SVN_NO_ERROR;
}
}
@@ -388,23 +363,53 @@ main(int argc, const char *argv[])
{
const char *s;
- SVNRAISETC_INT_ERR(svn_utf_cstring_to_utf8(&s, os->argv[os->ind++],
- pool));
+ SVN_ERR(svn_utf_cstring_to_utf8(&s, os->argv[os->ind++], pool));
APR_ARRAY_PUSH(remaining_argv, const char *) = s;
}
if (remaining_argv->nelts < 1)
- usage(pool); /* this will exit() */
+ {
+ usage(pool);
+ *exit_code = EXIT_FAILURE;
+ return SVN_NO_ERROR;
+ }
/* Do the main task */
- SVNRAISETC_INT_ERR(raise_tree_conflict(remaining_argv->nelts,
- (const char **)remaining_argv->elts,
- pool));
+ SVN_ERR(raise_tree_conflict(remaining_argv->nelts,
+ (const char **)remaining_argv->elts,
+ pool));
- svn_pool_destroy(pool);
+ return SVN_NO_ERROR;
+}
+
+int
+main(int argc, const char *argv[])
+{
+ apr_pool_t *pool;
+ int exit_code = EXIT_SUCCESS;
+ svn_error_t *err;
+
+ /* Initialize the app. */
+ if (svn_cmdline_init("svnraisetreeconflict", stderr) != EXIT_SUCCESS)
+ return EXIT_FAILURE;
+
+ /* Create our top-level pool. Use a separate mutexless allocator,
+ * given this application is single threaded.
+ */
+ pool = apr_allocator_owner_get(svn_pool_create_allocator(FALSE));
- /* Flush stdout to make sure that the user will see any printing errors. */
- SVNRAISETC_INT_ERR(svn_cmdline_fflush(stdout));
+ err = sub_main(&exit_code, argc, argv, pool);
- return EXIT_SUCCESS;
+ /* Flush stdout and report if it fails. It would be flushed on exit anyway
+ but this makes sure that output is not silently lost if it fails. */
+ err = svn_error_compose_create(err, svn_cmdline_fflush(stdout));
+
+ if (err)
+ {
+ exit_code = EXIT_FAILURE;
+ svn_cmdline_handle_exit_error(err, NULL, "svnraisetreeconflict: ");
+ }
+
+ svn_pool_destroy(pool);
+ return exit_code;
}
diff --git a/tools/dev/trails.py b/tools/dev/trails.py
index 9717c6c..917d234 100755
--- a/tools/dev/trails.py
+++ b/tools/dev/trails.py
@@ -35,7 +35,7 @@ import operator
_re_trail = re.compile('\((?P<txn_body>[a-z_]*), (?P<filename>[a-z_\-./]*), (?P<lineno>[0-9]*), (?P<txn>0|1)\): (?P<ops>.*)')
_re_table_op = re.compile('\(([a-z]*), ([a-z]*)\)')
-_seperator = '------------------------------------------------------------\n'
+_separator = '------------------------------------------------------------\n'
def parse_trails_log(infile):
trails = []
@@ -79,9 +79,9 @@ def output_summary(trails, outfile):
median_ops = ops[total_trails / 2]
average_ops = float(total_ops) / total_trails
- outfile.write(_seperator)
+ outfile.write(_separator)
outfile.write('Summary\n')
- outfile.write(_seperator)
+ outfile.write(_separator)
outfile.write('Total number of trails: %10i\n' % total_trails)
outfile.write('Total number of ops: %10i\n' % total_ops)
outfile.write('max ops/trail: %10i\n' % max_ops)
@@ -123,9 +123,9 @@ def output_trail_length_frequencies(trails, outfile):
total_trails = len(ops)
frequencies = list_frequencies(ops)
- outfile.write(_seperator)
+ outfile.write(_separator)
outfile.write('Trail length frequencies\n')
- outfile.write(_seperator)
+ outfile.write(_separator)
outfile.write('ops/trail frequency percentage\n')
for (r, f) in frequencies:
p = float(f) * 100 / total_trails
@@ -164,9 +164,9 @@ def output_trail_frequencies(trails, outfile):
frequencies = list_frequencies(ttrails)
- outfile.write(_seperator)
+ outfile.write(_separator)
outfile.write('Trail frequencies\n')
- outfile.write(_seperator)
+ outfile.write(_separator)
outfile.write('frequency percentage ops/trail trail\n')
for (((txn_body, file, line), trail), f) in frequencies:
p = float(f) * 100 / total_trails
@@ -183,9 +183,9 @@ def output_txn_body_frequencies(trails, outfile):
total_trails = len(trails)
frequencies = list_frequencies(bodies)
- outfile.write(_seperator)
+ outfile.write(_separator)
outfile.write('txn_body frequencies\n')
- outfile.write(_seperator)
+ outfile.write(_separator)
outfile.write('frequency percentage txn_body\n')
for ((txn_body, file, line), f) in frequencies:
p = float(f) * 100 / total_trails
diff --git a/tools/dev/unix-build/Makefile.svn b/tools/dev/unix-build/Makefile.svn
index 0bdddd5..d6032e3 100644
--- a/tools/dev/unix-build/Makefile.svn
+++ b/tools/dev/unix-build/Makefile.svn
@@ -29,16 +29,19 @@
# | the bot's health after making changes to this file. |
# |______________________________________________________________|
-ENABLE_PERL_BINDINGS ?= yes
+PERL ?= yes
+ENABLE_PERL_BINDINGS = $(PERL)
THREADING ?= yes
ifeq ($(THREADING),yes)
-ENABLE_JAVA_BINDINGS ?= yes
+JAVA ?= yes
else
-ENABLE_JAVA_BINDINGS ?= no
+JAVA ?= no
endif
+ENABLE_JAVA_BINDINGS = $(JAVA)
USE_APR_ICONV ?= no # set to yes to use APR iconv instead of GNU iconv
PARALLEL ?= 1
CLEANUP ?= 1
+EXCLUSIVE_WC_LOCKS ?= 1
USE_HTTPV1 ?= no
USE_AUTHZ_SHORT_CIRCUIT ?= no
RAMDISK ?= /ramdisk
@@ -65,21 +68,22 @@ OBJDIR = $(PWD)/objdir
BDB_MAJOR_VER = 4.7
BDB_VER = $(BDB_MAJOR_VER).25
-APR_VER = 1.4.6
+APR_VER = 1.5.1
APR_ICONV_VER = 1.2.1
GNU_ICONV_VER = 1.14
-APR_UTIL_VER = 1.4.1
-HTTPD_VER = 2.2.22
-NEON_VER = 0.29.6
-SERF_VER = 1.2.0
+APR_UTIL_VER = 1.5.3
+HTTPD_VER = 2.2.29
+NEON_VER = 0.30.0
+SERF_VER = 1.3.8
SERF_OLD_VER = 0.3.1
CYRUS_SASL_VER = 2.1.25
-SQLITE_VER = 3071600
-LIBMAGIC_VER = 5.11
+SQLITE_VER = 3080500
+LIBMAGIC_VER = 5.19
RUBY_VER = 1.8.7-p358
BZ2_VER = 1.0.6
-PYTHON_VER = 2.7.3
+PYTHON_VER = 2.7.8
JUNIT_VER = 4.10
+GETTEXT_VER = 0.18.3.1
BDB_DIST = db-$(BDB_VER).tar.gz
APR_ICONV_DIST = apr-iconv-$(APR_ICONV_VER).tar.gz
@@ -87,12 +91,40 @@ GNU_ICONV_DIST = libiconv-$(GNU_ICONV_VER).tar.gz
NEON_DIST = neon-$(NEON_VER).tar.gz
SQLITE_DIST = sqlite-autoconf-$(SQLITE_VER).tar.gz
CYRUS_SASL_DIST = cyrus-sasl-$(CYRUS_SASL_VER).tar.gz
-HTTPD_DIST = httpd-$(HTTPD_VER).tar.bz2
+HTTPD_DIST = httpd-$(HTTPD_VER).tar.gz
LIBMAGIC_DIST = file-$(LIBMAGIC_VER).tar.gz
RUBY_DIST = ruby-$(RUBY_VER).tar.gz
BZ2_DIST = bzip2-$(BZ2_VER).tar.gz
PYTHON_DIST = Python-$(PYTHON_VER).tgz
JUNIT_DIST = junit-${JUNIT_VER}.jar
+GETTEXT_DIST = gettext-$(GETTEXT_VER).tar.gz
+
+SHA256_${BDB_DIST} = f14fd96dd38915a1d63dcb94a63fbb8092334ceba6b5060760427096f631263e
+SHA256_${APR_ICONV_DIST} = 19381959d50c4a5f3b9c84d594a5f9ffb3809786919b3058281f4c87e1f4b245
+SHA256_${GNU_ICONV_DIST} = 72b24ded17d687193c3366d0ebe7cde1e6b18f0df8c55438ac95be39e8a30613
+SHA256_${HTTPD_DIST} = cec2878884b758b0d159a1385b2667a2ae0ca21b0bc7bcc8a9a41b5cfa5452ff
+SHA256_${NEON_DIST} = 2962cfcb5d30f3272e3d2fa0e473434419770a3801afe3d46e5d1650787990c2
+SHA256_${CYRUS_SASL_DIST} = 418c16e6240a4f9b637cbe3d62937b9675627bad27c622191d47de8686fe24fe
+SHA256_${SQLITE_DIST} = 98c33abe4106e508e73fda648b2657ac9e969fe24695f543dcde68cc71f3091b
+SHA256_${LIBMAGIC_DIST} = 9484b3bbda1acc7b13a4f71031a85ce10c77bd0ffec7226741a219ef587e3a7c
+SHA256_${RUBY_DIST} = 9e0856d58830e08f1e38233947d859898ae09d4780cb1a502108e41308de33cb
+SHA256_${BZ2_DIST} = a2848f34fcd5d6cf47def00461fcb528a0484d8edef8208d6d2e2909dc61d9cd
+SHA256_${PYTHON_DIST} = 74d70b914da4487aa1d97222b29e9554d042f825f26cb2b93abd20fdda56b557
+SHA256_${JUNIT_DIST} = 36a747ca1e0b86f6ea88055b8723bb87030d627766da6288bf077afdeeb0f75a
+SHA256_${GETTEXT_DIST} = 0d8f9a33531b77776b3dc473e7940019ca19bfca5b4c06db6e96065eeb07245d
+
+define do_check_sha256
+if [ -x /bin/sha256 ]; then \
+ (cd $(DISTDIR) && \
+ echo "SHA256 (${1}) = ${SHA256_${1}}" | /bin/sha256 -C /dev/stdin "${1}"); \
+elif [ -x /usr/bin/sha256sum ]; then \
+ (cd $(DISTDIR) && \
+ echo "${SHA256_${1}} ${1}" | /usr/bin/sha256sum --quiet --check); \
+else \
+ echo "Error: No tool found to verify checksum"; \
+ false; \
+fi
+endef
DISTFILES = $(DISTDIR)/$(NEON_DIST) \
$(DISTDIR)/$(SERF_DIST) \
@@ -105,28 +137,29 @@ DISTFILES = $(DISTDIR)/$(NEON_DIST) \
$(DISTDIR)/$(RUBY_DIST) \
$(DISTDIR)/$(BZ2_DIST) \
$(DISTDIR)/$(PYTHON_DIST) \
- $(DISTDIR)/$(JUNIT_DIST)
+ $(DISTDIR)/$(JUNIT_DIST) \
+ $(DISTDIR)/$(GETTEXT_DIST)
FETCH_CMD = wget -c
SUBVERSION_REPOS_URL = https://svn.apache.org/repos/asf/subversion
-BDB_URL = http://ftp2.de.freebsd.org/pub/FreeBSD/distfiles/bdb/$(BDB_DIST)
-APR_URL = http://svn.apache.org/repos/asf/apr/apr
-APR_ICONV_URL = http://www.apache.org/dist/apr/$(APR_ICONV_DIST)
-GNU_ICONV_URL = http://ftp.gnu.org/pub/gnu/libiconv/$(GNU_ICONV_DIST)
-APR_UTIL_URL = http://svn.apache.org/repos/asf/apr/apr-util
-HTTPD_URL = http://archive.apache.org/dist/httpd/$(HTTPD_DIST)
+BDB_URL = http://download.oracle.com/berkeley-db/$(BDB_DIST)
+APR_URL = https://svn.apache.org/repos/asf/apr/apr
+APR_ICONV_URL = https://www.apache.org/dist/apr/$(APR_ICONV_DIST)
+GNU_ICONV_URL = https://ftp.gnu.org/pub/gnu/libiconv/$(GNU_ICONV_DIST)
+APR_UTIL_URL = https://svn.apache.org/repos/asf/apr/apr-util
+HTTPD_URL = https://archive.apache.org/dist/httpd/$(HTTPD_DIST)
NEON_URL = http://webdav.org/neon/$(NEON_DIST)
-#SERF_URL = http://serf.googlecode.com/files/$(SERF_DIST)
-SERF_URL = http://serf.googlecode.com/svn/tags/$(SERF_VER)
-SERF_OLD_URL = http://serf.googlecode.com/svn/tags/$(SERF_OLD_VER)
-SQLITE_URL = http://www.sqlite.org/2013/$(SQLITE_DIST)
+SERF_URL = https://svn.apache.org/repos/asf/serf/tags/$(SERF_VER)
+SERF_OLD_URL = https://svn.apache.org/repos/asf/serf/tags/$(SERF_OLD_VER)
+SQLITE_URL = https://www.sqlite.org/2014/$(SQLITE_DIST)
CYRUS_SASL_URL = ftp://ftp.andrew.cmu.edu/pub/cyrus-mail/$(CYRUS_SASL_DIST)
LIBMAGIC_URL = ftp://ftp.astron.com/pub/file/$(LIBMAGIC_DIST)
RUBY_URL = http://ftp.ruby-lang.org/pub/ruby/1.8/$(RUBY_DIST)
BZ2_URL = http://bzip.org/$(BZ2_VER)/$(BZ2_DIST)
-PYTHON_URL = http://python.org/ftp/python/$(PYTHON_VER)/$(PYTHON_DIST)
-JUNIT_URL = http://cloud.github.com/downloads/KentBeck/junit/$(JUNIT_DIST)
+PYTHON_URL = https://python.org/ftp/python/$(PYTHON_VER)/$(PYTHON_DIST)
+JUNIT_URL = https://downloads.sourceforge.net/project/junit/junit/$(JUNIT_VER)/$(JUNIT_DIST)
+GETTEXT_URL = https://ftp.gnu.org/pub/gnu/gettext/$(GETTEXT_DIST)
BDB_SRCDIR = $(SRCDIR)/db-$(BDB_VER)
@@ -144,6 +177,7 @@ LIBMAGIC_SRCDIR = $(SRCDIR)/file-$(LIBMAGIC_VER)
RUBY_SRCDIR = $(SRCDIR)/ruby-$(RUBY_VER)
BZ2_SRCDIR = $(SRCDIR)/bzip2-$(BZ2_VER)
PYTHON_SRCDIR = $(SRCDIR)/Python-$(PYTHON_VER)
+GETTEXT_SRCDIR = $(SRCDIR)/gettext-$(GETTEXT_VER)
SVN_SRCDIR = $(SVN_WC)
BDB_OBJDIR = $(OBJDIR)/db-$(BDB_VER)
@@ -161,6 +195,7 @@ LIBMAGIC_OBJDIR = $(OBJDIR)/file-$(LIBMAGIC_VER)
RUBY_OBJDIR = $(OBJDIR)/ruby-$(RUBY_VER)
BZ2_OBJDIR = $(OBJDIR)/bzip2-$(BZ2_VER)
PYTHON_OBJDIR = $(OBJDIR)/python-$(PYTHON_VER)
+GETTEXT_OBJDIR = $(OBJDIR)/gettext-$(GETTEXT_VER)
SVN_OBJDIR = $(OBJDIR)/$(SVN_REL_WC)
# Tweak this for out-of-tree builds. Note that running individual
@@ -173,30 +208,30 @@ PROFILE_CFLAGS=-pg
endif
# We need this to make sure some targets below pick up the right libraries
-LD_LIBRARY_PATH=$(PREFIX)/apr/lib:$(PREFIX)/iconv/lib:$(PREFIX)/bdb/lib:$(PREFIX)/neon/lib:$(PREFIX)/serf/lib:$(PREFIX)/sqlite/lib:$(PREFIX)/cyrus-sasl/lib:$(PREFIX)/iconv/lib:$(PREFIX)/libmagic/lib:$(PREFIX)/ruby/lib:$(PREFIX)/python/lib:$(PREFIX)/svn-$(WC)/lib
+LD_LIBRARY_PATH=$(PREFIX)/apr/lib:$(PREFIX)/gettext/lib:$(PREFIX)/iconv/lib:$(PREFIX)/bdb/lib:$(PREFIX)/neon/lib:$(PREFIX)/serf/lib:$(PREFIX)/sqlite/lib:$(PREFIX)/cyrus-sasl/lib:$(PREFIX)/iconv/lib:$(PREFIX)/libmagic/lib:$(PREFIX)/ruby/lib:$(PREFIX)/python/lib:$(PREFIX)/svn-$(WC)/lib
#######################################################################
# Main targets.
#######################################################################
-.PHONY: all reset clean nuke
+.PHONY: all reset clean nuke fetch
all: dirs-create bdb-install apr-install iconv-install apr-util-install \
httpd-install neon-install serf-install serf-old-install \
sqlite-install cyrus-sasl-install libmagic-install \
- ruby-install bz2-install python-install \
+ ruby-install bz2-install python-install gettext-install \
svn-install svn-bindings-install
# Use these to start a build from the beginning.
reset: dirs-reset bdb-reset apr-reset iconv-reset apr-util-reset \
httpd-reset neon-reset serf-reset serf-old-reset sqlite-reset \
cyrus-sasl-reset libmagic-reset ruby-reset python-reset \
- bz2-reset svn-reset
+ bz2-reset gettext-reset svn-reset
# Use to save disk space.
clean: bdb-clean apr-clean iconv-clean apr-util-clean httpd-clean \
neon-clean serf-clean serf-old-clean sqlite-clean cyrus-sasl-clean \
- libmagic-clean ruby-clean bz2-clean python-clean svn-clean
+ libmagic-clean ruby-clean bz2-clean python-clean gettext-clean svn-clean
# Nukes everything (including installed binaries!)
# Use this to start ALL OVER AGAIN! Use with caution!
@@ -222,6 +257,8 @@ nuke:
;; \
esac
+fetch: $(DISTFILES)
+
#######################################################################
# directories
#######################################################################
@@ -248,7 +285,7 @@ bdb-reset:
rm -f $(BDB_OBJDIR)/$(f);)
bdb-clean:
- -(cd $(BDB_SRCDIR)/build_unix/ && make clean)
+ -(cd $(BDB_SRCDIR)/build_unix/ && env MAKEFLAGS= make clean)
# fetch distfile for bdb
$(DISTDIR)/$(BDB_DIST):
@@ -256,6 +293,7 @@ $(DISTDIR)/$(BDB_DIST):
# retrieve bdb
$(BDB_OBJDIR)/.retrieved: $(DISTDIR)/$(BDB_DIST)
+ $(call do_check_sha256,$(BDB_DIST))
[ -d $(BDB_OBJDIR) ] || mkdir -p $(BDB_OBJDIR)
tar -C $(SRCDIR) -zxf $(DISTDIR)/$(BDB_DIST)
touch $@
@@ -271,12 +309,12 @@ $(BDB_OBJDIR)/.configured: $(BDB_OBJDIR)/.retrieved
# compile bdb
$(BDB_OBJDIR)/.compiled: $(BDB_OBJDIR)/.configured
- (cd $(BDB_SRCDIR)/build_unix && make)
+ (cd $(BDB_SRCDIR)/build_unix && env MAKEFLAGS= make)
touch $@
# install bdb
$(BDB_OBJDIR)/.installed: $(BDB_OBJDIR)/.compiled
- (cd $(BDB_SRCDIR)/build_unix && make install)
+ (cd $(BDB_SRCDIR)/build_unix && env MAKEFLAGS= make install)
touch $@
#######################################################################
@@ -292,7 +330,7 @@ apr-reset:
rm -f $(APR_OBJDIR)/$(f);)
apr-clean:
- -(cd $(APR_OBJDIR) && make clean)
+ -(cd $(APR_OBJDIR) && env MAKEFLAGS= make clean)
# retrieve apr if not present yet
$(APR_OBJDIR)/.retrieved:
@@ -310,10 +348,14 @@ endif
ifdef POOL_DEBUG
POOL_DEBUG_FLAG=--enable-pool-debug=all
+else
+# Map apr_palloc()/apr_pool_{clear,destroy}() to malloc()/free().
+# This also puts poison bytes into freed memory to help detect use after free.
+POOL_DEBUG_FLAG=--enable-pool-debug=yes
endif
# configure apr
-$(APR_OBJDIR)/.configured: $(APR_OBJDIR)/.retrieved
+$(APR_OBJDIR)/.configured: $(APR_OBJDIR)/.retrieved $(BDB_OBJDIR)/.installed
cd $(APR_SRCDIR) && ./buildconf
cd $(APR_OBJDIR) \
&& env CFLAGS="-O0 -g $(PROFILE_CFLAGS)" GREP="`which grep`" \
@@ -326,12 +368,12 @@ $(APR_OBJDIR)/.configured: $(APR_OBJDIR)/.retrieved
# compile apr
$(APR_OBJDIR)/.compiled: $(APR_OBJDIR)/.configured
- (cd $(APR_OBJDIR) && make)
+ (cd $(APR_OBJDIR) && env MAKEFLAGS= make)
touch $@
# install apr
$(APR_OBJDIR)/.installed: $(APR_OBJDIR)/.compiled
- (cd $(APR_OBJDIR) && make install)
+ (cd $(APR_OBJDIR) && env MAKEFLAGS= make install)
touch $@
#######################################################################
@@ -347,7 +389,7 @@ apr-iconv-reset:
rm -f $(APR_ICONV_OBJDIR)/$(f);)
apr-iconv-clean:
- -(cd $(APR_ICONV_OBJDIR) && make clean)
+ -(cd $(APR_ICONV_OBJDIR) && env MAKEFLAGS= make clean)
# fetch distfile for apr-iconv
$(DISTDIR)/$(APR_ICONV_DIST):
@@ -355,14 +397,17 @@ $(DISTDIR)/$(APR_ICONV_DIST):
# retrieve apr-iconv
$(APR_ICONV_OBJDIR)/.retrieved: $(DISTDIR)/$(APR_ICONV_DIST)
+ $(call do_check_sha256,$(APR_ICONV_DIST))
[ -d $(APR_ICONV_OBJDIR) ] || mkdir -p $(APR_ICONV_OBJDIR)
tar -C $(SRCDIR) -zxf $(DISTDIR)/$(APR_ICONV_DIST)
touch $@
# configure apr-iconv
-$(APR_ICONV_OBJDIR)/.configured: $(APR_ICONV_OBJDIR)/.retrieved
+$(APR_ICONV_OBJDIR)/.configured: $(APR_ICONV_OBJDIR)/.retrieved \
+ $(APR_OBJDIR)/.installed
cd $(APR_ICONV_OBJDIR) \
- && env CFLAGS="-g $(PROFILE_CFLAGS)" GREP="`which grep`" \
+ && env CFLAGS="-g $(PROFILE_CFLAGS) -DAPR_POOL_DEBUG" \
+ GREP="`which grep`" \
$(APR_ICONV_SRCDIR)/configure \
--prefix=$(PREFIX)/apr \
--with-apr=$(PREFIX)/apr
@@ -371,12 +416,12 @@ $(APR_ICONV_OBJDIR)/.configured: $(APR_ICONV_OBJDIR)/.retrieved
# compile apr-iconv
$(APR_ICONV_OBJDIR)/.compiled: $(APR_ICONV_OBJDIR)/.configured
(cd $(APR_ICONV_OBJDIR) \
- && make CPPFLAGS="-D_OSD_POSIX" CFLAGS="-g -O0 $(PROFILE_CFLAGS)")
+ && env MAKEFLAGS= make CPPFLAGS="-D_OSD_POSIX" CFLAGS="-g -O0 $(PROFILE_CFLAGS)")
touch $@
# install apr-iconv
$(APR_ICONV_OBJDIR)/.installed: $(APR_ICONV_OBJDIR)/.compiled
- (cd $(APR_ICONV_OBJDIR) && make install)
+ (cd $(APR_ICONV_OBJDIR) && env MAKEFLAGS= make install)
touch $@
#######################################################################
@@ -392,7 +437,7 @@ gnu-iconv-reset:
rm -f $(GNU_ICONV_OBJDIR)/$(f);)
gnu-iconv-clean:
- -(cd $(GNU_ICONV_OBJDIR) && make clean)
+ -(cd $(GNU_ICONV_OBJDIR) && env MAKEFLAGS= make clean)
rm -f $(GNU_ICONV_OBJDIR)/lib_encodings.def.diff
rm -f $(GNU_ICONV_OBJDIR)/lib_aliases.gperf.diff
@@ -433,6 +478,7 @@ $(GNU_ICONV_OBJDIR)/lib_aliases.gperf.diff:
$(GNU_ICONV_OBJDIR)/.retrieved: $(DISTDIR)/$(GNU_ICONV_DIST) \
$(GNU_ICONV_OBJDIR)/lib_encodings.def.diff \
$(GNU_ICONV_OBJDIR)/lib_aliases.gperf.diff
+ $(call do_check_sha256,$(GNU_ICONV_DIST))
tar -C $(SRCDIR) -zxf $(DISTDIR)/$(GNU_ICONV_DIST)
cd $(SRCDIR)/libiconv-$(GNU_ICONV_VER) && \
patch -p0 < $(GNU_ICONV_OBJDIR)/lib_encodings.def.diff && \
@@ -452,12 +498,12 @@ $(GNU_ICONV_OBJDIR)/.configured: $(GNU_ICONV_OBJDIR)/.retrieved
# compile gnu-iconv
$(GNU_ICONV_OBJDIR)/.compiled: $(GNU_ICONV_OBJDIR)/.configured
- (cd $(GNU_ICONV_OBJDIR) && make)
+ (cd $(GNU_ICONV_OBJDIR) && env MAKEFLAGS= make)
touch $@
# install gnu-iconv
$(GNU_ICONV_OBJDIR)/.installed: $(GNU_ICONV_OBJDIR)/.compiled
- (cd $(GNU_ICONV_OBJDIR) && make install)
+ (cd $(GNU_ICONV_OBJDIR) && env MAKEFLAGS= make install)
touch $@
#######################################################################
@@ -489,7 +535,7 @@ apr-util-reset:
rm -f $(APR_UTIL_OBJDIR)/$(f);)
apr-util-clean:
- -(cd $(APR_UTIL_OBJDIR) && make clean)
+ -(cd $(APR_UTIL_OBJDIR) && env MAKEFLAGS= make clean)
# retrieve apr-util if not present yet
@@ -503,16 +549,19 @@ $(APR_UTIL_OBJDIR)/.retrieved:
ifeq ($(USE_APR_ICONV),yes)
ICONV_FLAG=--with-iconv=$(PREFIX)/apr
+ICONV_OBJDIR=$(APR_ICONV_OBJDIR)
else
ICONV_FLAG=--with-iconv=$(PREFIX)/iconv
+ICONV_OBJDIR=$(GNU_ICONV_OBJDIR)
endif
# configure apr-util
-$(APR_UTIL_OBJDIR)/.configured: $(APR_UTIL_OBJDIR)/.retrieved
+$(APR_UTIL_OBJDIR)/.configured: $(APR_UTIL_OBJDIR)/.retrieved \
+ $(APR_OBJDIR)/.installed $(ICONV_OBJDIR)/.installed
cd $(APR_UTIL_SRCDIR) && ./buildconf --with-apr=$(APR_SRCDIR)
cd $(APR_UTIL_OBJDIR) \
&& env LD_LIBRARY_PATH=$(PREFIX)/bdb/lib \
- CFLAGS="-O0 -g $(PROFILE_CFLAGS)" \
+ CFLAGS="-O0 -g $(PROFILE_CFLAGS) -DAPR_POOL_DEBUG" \
GREP="`which grep`" \
$(APR_UTIL_SRCDIR)/configure \
--prefix=$(PREFIX)/apr \
@@ -524,12 +573,12 @@ $(APR_UTIL_OBJDIR)/.configured: $(APR_UTIL_OBJDIR)/.retrieved
# compile apr-util
$(APR_UTIL_OBJDIR)/.compiled: $(APR_UTIL_OBJDIR)/.configured
- (cd $(APR_UTIL_OBJDIR) && make)
+ (cd $(APR_UTIL_OBJDIR) && env MAKEFLAGS= make)
touch $@
# install apr-util
$(APR_UTIL_OBJDIR)/.installed: $(APR_UTIL_OBJDIR)/.compiled
- (cd $(APR_UTIL_OBJDIR) && make install)
+ (cd $(APR_UTIL_OBJDIR) && env MAKEFLAGS= make install)
touch $@
#######################################################################
@@ -546,22 +595,64 @@ httpd-reset:
rm -f $(HTTPD_OBJDIR)/$(f);)
httpd-clean:
- -(cd $(HTTPD_OBJDIR) && make clean)
+ -(cd $(HTTPD_OBJDIR) && env MAKEFLAGS= make clean)
# fetch distfile for httpd
$(DISTDIR)/$(HTTPD_DIST):
cd $(DISTDIR) && $(FETCH_CMD) $(HTTPD_URL)
+$(HTTPD_OBJDIR)/chil-engine.diff:
+ mkdir -p $(dir $@)
+ echo > $@.tmp '--- modules/ssl/ssl_engine_init.c.orig Mon Apr 14 13:20:57 2014'
+ echo >>$@.tmp '+++ modules/ssl/ssl_engine_init.c Mon Apr 14 13:21:22 2014'
+ echo >>$@.tmp '@@ -406,9 +406,11 @@ void ssl_init_Engine(server_rec *s, apr_pool_t *p)'
+ echo >>$@.tmp ' ssl_die();'
+ echo >>$@.tmp ' }'
+ echo >>$@.tmp ' '
+ echo >>$@.tmp '+#ifdef ENGINE_CTRL_CHIL_SET_FORKCHECK'
+ echo >>$@.tmp ' if (strEQ(mc->szCryptoDevice, "chil")) {'
+ echo >>$@.tmp ' ENGINE_ctrl(e, ENGINE_CTRL_CHIL_SET_FORKCHECK, 1, 0, 0);'
+ echo >>$@.tmp ' }'
+ echo >>$@.tmp '+#endif'
+ echo >>$@.tmp ' '
+ echo >>$@.tmp ' if (!ENGINE_set_default(e, ENGINE_METHOD_ALL)) {'
+ echo >>$@.tmp ' ap_log_error(APLOG_MARK, APLOG_ERR, 0, s,'
+ mv -f $@.tmp $@
+
+
# retrieve httpd
-$(HTTPD_OBJDIR)/.retrieved: $(DISTDIR)/$(HTTPD_DIST)
+$(HTTPD_OBJDIR)/.retrieved: $(DISTDIR)/$(HTTPD_DIST) \
+ $(HTTPD_OBJDIR)/chil-engine.diff
+ $(call do_check_sha256,$(HTTPD_DIST))
[ -d $(HTTPD_OBJDIR) ] || mkdir -p $(HTTPD_OBJDIR)
- tar -C $(SRCDIR) -jxf $(DISTDIR)/$(HTTPD_DIST)
+ tar -C $(SRCDIR) -zxf $(DISTDIR)/$(HTTPD_DIST)
+ cd $(HTTPD_SRCDIR) && patch -p0 < $(HTTPD_OBJDIR)/chil-engine.diff
+ cp $(HTTPD_SRCDIR)/modules/ssl/ssl_toolkit_compat.h \
+ $(HTTPD_SRCDIR)/modules/ssl/ssl_toolkit_compat.h.orig
+ sed '/^#define HAVE_SSL_RAND_EGD/d' \
+ < $(HTTPD_SRCDIR)/modules/ssl/ssl_toolkit_compat.h.orig \
+ > $(HTTPD_SRCDIR)/modules/ssl/ssl_toolkit_compat.h
+ cp $(HTTPD_SRCDIR)/modules/ssl/ssl_engine_vars.c \
+ $(HTTPD_SRCDIR)/modules/ssl/ssl_engine_vars.c.orig
+ sed 's/^\(#if (OPENSSL_VERSION_NUMBER >= 0x00908000)\)$$/\1 \&\& !defined(OPENSSL_NO_COMP)/' \
+ < $(HTTPD_SRCDIR)/modules/ssl/ssl_engine_vars.c.orig \
+ > $(HTTPD_SRCDIR)/modules/ssl/ssl_engine_vars.c
+ cp $(HTTPD_SRCDIR)/modules/ssl/ssl_engine_init.c \
+ $(HTTPD_SRCDIR)/modules/ssl/ssl_engine_init.c.orig
+ $(foreach f, ssl_engine_init.c ssl_util_ssl.c ssl_util_ssl.h, \
+ cp $(HTTPD_SRCDIR)/modules/ssl/${f} $(HTTPD_SRCDIR)/modules/ssl/${f}.orig; \
+ sed 's/SSL_CTX_use_certificate_chain/_SSL_CTX_use_certificate_chain/' \
+ < $(HTTPD_SRCDIR)/modules/ssl/${f}.orig \
+ > $(HTTPD_SRCDIR)/modules/ssl/${f};\
+ )
touch $@
# configure httpd
-$(HTTPD_OBJDIR)/.configured: $(HTTPD_OBJDIR)/.retrieved
+$(HTTPD_OBJDIR)/.configured: $(HTTPD_OBJDIR)/.retrieved \
+ $(APR_UTIL_OBJDIR)/.installed
cd $(HTTPD_OBJDIR) \
- && env CFLAGS="-g $(PROFILE_CFLAGS)" GREP="`which grep`" \
+ && env CFLAGS="-g $(PROFILE_CFLAGS) -DAPR_POOL_DEBUG" \
+ GREP="`which grep`" \
$(HTTPD_SRCDIR)/configure \
--prefix=$(PREFIX)/httpd \
--enable-maintainer-mode \
@@ -574,12 +665,12 @@ $(HTTPD_OBJDIR)/.configured: $(HTTPD_OBJDIR)/.retrieved
# compile httpd
$(HTTPD_OBJDIR)/.compiled: $(HTTPD_OBJDIR)/.configured
- (cd $(HTTPD_OBJDIR) && make)
+ (cd $(HTTPD_OBJDIR) && env MAKEFLAGS= make)
touch $@
# install httpd
$(HTTPD_OBJDIR)/.installed: $(HTTPD_OBJDIR)/.compiled
- (cd $(HTTPD_OBJDIR) && make install)
+ (cd $(HTTPD_OBJDIR) && env MAKEFLAGS= make install)
touch $@
# create a httpd.conf for mod_dav_svn
@@ -617,26 +708,17 @@ neon-reset:
rm -f $(NEON_OBJDIR)/$(f);)
neon-clean:
- -(cd $(NEON_OBJDIR) && make clean)
+ -(cd $(NEON_OBJDIR) && env MAKEFLAGS= make clean)
# fetch distfile for neon
$(DISTDIR)/$(NEON_DIST):
cd $(DISTDIR) && $(FETCH_CMD) $(NEON_URL)
# retrieve neon
-NEON_SVN_URL=http://svn.webdav.org/repos/projects/neon/trunk
$(NEON_OBJDIR)/.retrieved: $(DISTDIR)/$(NEON_DIST)
+ $(call do_check_sha256,$(NEON_DIST))
[ -d $(NEON_OBJDIR) ] || mkdir -p $(NEON_OBJDIR)
tar -C $(SRCDIR) -zxf $(DISTDIR)/$(NEON_DIST)
- # fix build with OpenSSL lacking SSLv2 support:
- cd $(NEON_SRCDIR)/src && svn diff -c 1865 \
- $(NEON_SVN_URL)/src/ne_openssl.c | patch -p0
- cd $(NEON_SRCDIR)/src && svn diff -c 1872 \
- $(NEON_SVN_URL)/src/ne_openssl.c | patch -p0
- cd $(NEON_SRCDIR)/src && svn diff -c 1865 \
- $(NEON_SVN_URL)/src/ne_ssl.h | patch -p0
- cd $(NEON_SRCDIR)/src && svn diff -c 1865 \
- $(NEON_SVN_URL)/src/ne_session.c | patch -p0
touch $@
# OpenBSD does not have krb5-config in PATH, but the neon port has
@@ -664,12 +746,12 @@ $(NEON_OBJDIR)/.configured: $(NEON_OBJDIR)/.retrieved
# compile neon
$(NEON_OBJDIR)/.compiled: $(NEON_OBJDIR)/.configured
- (cd $(NEON_OBJDIR) && make)
+ (cd $(NEON_OBJDIR) && env MAKEFLAGS= make)
touch $@
# install neon
$(NEON_OBJDIR)/.installed: $(NEON_OBJDIR)/.compiled
- (cd $(NEON_OBJDIR) && make install)
+ (cd $(NEON_OBJDIR) && env MAKEFLAGS= make install)
touch $@
@@ -686,7 +768,7 @@ serf-reset:
rm -f $(SERF_OBJDIR)/$(f);)
serf-clean:
- -(cd $(SERF_SRCDIR) && ./serfmake clean)
+ -(cd $(SERF_SRCDIR) && scons -c)
# fetch distfile for serf
@@ -710,21 +792,24 @@ $(SERF_OBJDIR)/.retrieved:
touch $@
# compile serf (serf won't compile outside its source tree)
-$(SERF_OBJDIR)/.compiled: $(SERF_OBJDIR)/.retrieved
+$(SERF_OBJDIR)/.compiled: $(SERF_OBJDIR)/.retrieved \
+ $(APR_UTIL_OBJDIR)/.installed
cd $(SERF_SRCDIR) && \
- env CFLAGS="-O0 -g $(PROFILE_CFLAGS)" \
- ./serfmake --with-apr=$(PREFIX)/apr \
- --prefix=$(PREFIX)/serf \
- build
+ scons DEBUG=1 \
+ CFLAGS="-O0 -g $(PROFILE_CFLAGS) -DAPR_POOL_DEBUG" \
+ APR=$(PREFIX)/apr \
+ APU=$(PREFIX)/apr \
+ PREFIX=$(PREFIX)/serf
touch $@
# install serf
$(SERF_OBJDIR)/.installed: $(SERF_OBJDIR)/.compiled
+ rm -rf $(PREFIX)/serf # XXX scons cannot reinstall :(
cd $(SERF_SRCDIR) && \
- ./serfmake --with-apr=$(PREFIX)/apr \
- --with-apr-util=$(PREFIX)/apr \
- --prefix=$(PREFIX)/serf \
- install
+ scons install
+ # work around unportable scons shared lib support
+ -ln -s libserf-1.so.$(shell echo $(SERF_VER) | sed -e 's/[0-9]$$/0/') \
+ $(PREFIX)/serf/lib/libserf-1.so
touch $@
#######################################################################
@@ -751,9 +836,10 @@ $(SERF_OLD_OBJDIR)/.retrieved:
touch $@
# compile serf (serf won't compile outside its source tree)
-$(SERF_OLD_OBJDIR)/.compiled: $(SERF_OLD_OBJDIR)/.retrieved
+$(SERF_OLD_OBJDIR)/.compiled: $(SERF_OLD_OBJDIR)/.retrieved \
+ $(APR_UTIL_OBJDIR)/.installed
cd $(SERF_OLD_SRCDIR) && \
- env CFLAGS="-O0 -g $(PROFILE_CFLAGS)" \
+ env CFLAGS="-O0 -g $(PROFILE_CFLAGS) -DAPR_POOL_DEBUG" \
./serfmake --with-apr=$(PREFIX)/apr \
--prefix=$(PREFIX)/serf-old \
build
@@ -782,7 +868,7 @@ sqlite-reset:
rm -f $(SQLITE_OBJDIR)/$(f);)
sqlite-clean:
- -cd $(SQLITE_OBJDIR) && make clean
+ -cd $(SQLITE_OBJDIR) && env MAKEFLAGS= make clean
# fetch distfile for sqlite
$(DISTDIR)/$(SQLITE_DIST):
@@ -790,6 +876,7 @@ $(DISTDIR)/$(SQLITE_DIST):
# retrieve sqlite
$(SQLITE_OBJDIR)/.retrieved: $(DISTDIR)/$(SQLITE_DIST)
+ $(call do_check_sha256,$(SQLITE_DIST))
[ -d $(SQLITE_OBJDIR) ] || mkdir -p $(SQLITE_OBJDIR)
tar -C $(SRCDIR) -zxf $(DISTDIR)/$(SQLITE_DIST)
touch $@
@@ -811,12 +898,12 @@ $(SQLITE_OBJDIR)/.configured: $(SQLITE_OBJDIR)/.retrieved
# compile sqlite
$(SQLITE_OBJDIR)/.compiled: $(SQLITE_OBJDIR)/.configured
- (cd $(SQLITE_OBJDIR) && make)
+ (cd $(SQLITE_OBJDIR) && env MAKEFLAGS= make)
touch $@
# install sqlite
$(SQLITE_OBJDIR)/.installed: $(SQLITE_OBJDIR)/.compiled
- (cd $(SQLITE_OBJDIR) && make install)
+ (cd $(SQLITE_OBJDIR) && env MAKEFLAGS= make install)
touch $@
#######################################################################
@@ -832,7 +919,7 @@ cyrus-sasl-reset:
rm -f $(CYRUS_SASL_OBJDIR)/$(f);)
cyrus-sasl-clean:
- -(cd $(CYRUS_SASL_OBJDIR) && make distclean)
+ -(cd $(CYRUS_SASL_OBJDIR) && env MAKEFLAGS= make distclean)
# fetch distfile for cyrus-sasl
$(DISTDIR)/$(CYRUS_SASL_DIST):
@@ -840,6 +927,7 @@ $(DISTDIR)/$(CYRUS_SASL_DIST):
# retrieve cyrus-sasl
$(CYRUS_SASL_OBJDIR)/.retrieved: $(DISTDIR)/$(CYRUS_SASL_DIST)
+ $(call do_check_sha256,$(CYRUS_SASL_DIST))
[ -d $(CYRUS_SASL_OBJDIR) ] || mkdir -p $(CYRUS_SASL_OBJDIR)
tar -C $(SRCDIR) -zxf $(DISTDIR)/$(CYRUS_SASL_DIST)
# fixes build on Debian:
@@ -867,7 +955,8 @@ endif
touch $@
# configure cyrus-sasl
-$(CYRUS_SASL_OBJDIR)/.configured: $(CYRUS_SASL_OBJDIR)/.retrieved
+$(CYRUS_SASL_OBJDIR)/.configured: $(CYRUS_SASL_OBJDIR)/.retrieved \
+ $(BDB_OBJDIR)/.installed $(SQLITE_OBJDIR)/.installed
cd $(CYRUS_SASL_OBJDIR) \
&& env CFLAGS="-g $(PROFILE_CFLAGS)" \
CPPFLAGS="-I/usr/include/kerberosV" \
@@ -885,12 +974,12 @@ $(CYRUS_SASL_OBJDIR)/.configured: $(CYRUS_SASL_OBJDIR)/.retrieved
# compile cyrus-sasl
$(CYRUS_SASL_OBJDIR)/.compiled: $(CYRUS_SASL_OBJDIR)/.configured
- (cd $(CYRUS_SASL_OBJDIR) && make)
+ (cd $(CYRUS_SASL_OBJDIR) && env MAKEFLAGS= make)
touch $@
# install cyrus-sasl
$(CYRUS_SASL_OBJDIR)/.installed: $(CYRUS_SASL_OBJDIR)/.compiled
- (cd $(CYRUS_SASL_OBJDIR) && make install)
+ (cd $(CYRUS_SASL_OBJDIR) && env MAKEFLAGS= make install)
touch $@
#######################################################################
@@ -906,7 +995,7 @@ libmagic-reset:
rm -f $(LIBMAGIC_OBJDIR)/$(f);)
libmagic-clean:
- -(cd $(LIBMAGIC_OBJDIR) && make distclean)
+ -(cd $(LIBMAGIC_OBJDIR) && env MAKEFLAGS= make distclean)
# fetch distfile for libmagic
$(DISTDIR)/$(LIBMAGIC_DIST):
@@ -914,6 +1003,7 @@ $(DISTDIR)/$(LIBMAGIC_DIST):
# retrieve libmagic
$(LIBMAGIC_OBJDIR)/.retrieved: $(DISTDIR)/$(LIBMAGIC_DIST)
+ $(call do_check_sha256,$(LIBMAGIC_DIST))
[ -d $(LIBMAGIC_OBJDIR) ] || mkdir -p $(LIBMAGIC_OBJDIR)
tar -C $(SRCDIR) -zxf $(DISTDIR)/$(LIBMAGIC_DIST)
touch $@
@@ -929,12 +1019,12 @@ $(LIBMAGIC_OBJDIR)/.configured: $(LIBMAGIC_OBJDIR)/.retrieved
# compile libmagic
$(LIBMAGIC_OBJDIR)/.compiled: $(LIBMAGIC_OBJDIR)/.configured
- (cd $(LIBMAGIC_OBJDIR) && make)
+ (cd $(LIBMAGIC_OBJDIR) && env MAKEFLAGS= make)
touch $@
# install libmagic
$(LIBMAGIC_OBJDIR)/.installed: $(LIBMAGIC_OBJDIR)/.compiled
- (cd $(LIBMAGIC_OBJDIR) && make install)
+ (cd $(LIBMAGIC_OBJDIR) && env MAKEFLAGS= make install)
touch $@
#######################################################################
@@ -950,7 +1040,7 @@ ruby-reset:
rm -f $(RUBY_OBJDIR)/$(f);)
ruby-clean:
- -(cd $(RUBY_OBJDIR) && make distclean)
+ -(cd $(RUBY_OBJDIR) && env MAKEFLAGS= make distclean)
# fetch distfile for ruby
$(DISTDIR)/$(RUBY_DIST):
@@ -959,6 +1049,7 @@ $(DISTDIR)/$(RUBY_DIST):
# retrieve ruby
#
$(RUBY_OBJDIR)/.retrieved: $(DISTDIR)/$(RUBY_DIST)
+ $(call do_check_sha256,$(RUBY_DIST))
[ -d $(RUBY_OBJDIR) ] || mkdir -p $(RUBY_OBJDIR)
tar -C $(SRCDIR) -zxf $(DISTDIR)/$(RUBY_DIST)
touch $@
@@ -981,12 +1072,12 @@ $(RUBY_OBJDIR)/.configured: $(RUBY_OBJDIR)/.retrieved
# compile ruby
$(RUBY_OBJDIR)/.compiled: $(RUBY_OBJDIR)/.configured
- (cd $(RUBY_OBJDIR) && make)
+ (cd $(RUBY_OBJDIR) && env MAKEFLAGS= make)
touch $@
# install ruby
$(RUBY_OBJDIR)/.installed: $(RUBY_OBJDIR)/.compiled
- (cd $(RUBY_OBJDIR) && make install)
+ (cd $(RUBY_OBJDIR) && env MAKEFLAGS= make install)
touch $@
#######################################################################
@@ -1001,7 +1092,7 @@ bz2-reset:
rm -f $(BZ2_OBJDIR)/$(f);)
bz2-clean:
- -(cd $(BZ2_SRCDIR) && make distclean)
+ -(cd $(BZ2_SRCDIR) && env MAKEFLAGS= make distclean)
# fetch distfile for bz2
$(DISTDIR)/$(BZ2_DIST):
@@ -1009,18 +1100,19 @@ $(DISTDIR)/$(BZ2_DIST):
# retrieve bz2
$(BZ2_OBJDIR)/.retrieved: $(DISTDIR)/$(BZ2_DIST)
+ $(call do_check_sha256,$(BZ2_DIST))
[ -d $(BZ2_OBJDIR) ] || mkdir -p $(BZ2_OBJDIR)
tar -C $(SRCDIR) -zxf $(DISTDIR)/$(BZ2_DIST)
touch $@
# compile bz2
$(BZ2_OBJDIR)/.compiled: $(BZ2_OBJDIR)/.retrieved
- (cd $(BZ2_SRCDIR) && make CFLAGS="-g $(PROFILE_CFLAGS) -fPIC")
+ (cd $(BZ2_SRCDIR) && env MAKEFLAGS= make CFLAGS="-g $(PROFILE_CFLAGS) -fPIC")
touch $@
# install bz2
$(BZ2_OBJDIR)/.installed: $(BZ2_OBJDIR)/.compiled
- (cd $(BZ2_SRCDIR) && make install PREFIX=$(PREFIX)/bz2)
+ (cd $(BZ2_SRCDIR) && env MAKEFLAGS= make install PREFIX=$(PREFIX)/bz2)
touch $@
@@ -1037,7 +1129,7 @@ python-reset:
rm -f $(PYTHON_OBJDIR)/$(f);)
python-clean:
- -(cd $(PYTHON_OBJDIR) && make distclean)
+ -(cd $(PYTHON_OBJDIR) && env MAKEFLAGS= make distclean)
# fetch distfile for python
$(DISTDIR)/$(PYTHON_DIST):
@@ -1046,13 +1138,14 @@ $(DISTDIR)/$(PYTHON_DIST):
# retrieve python
#
$(PYTHON_OBJDIR)/.retrieved: $(DISTDIR)/$(PYTHON_DIST)
+ $(call do_check_sha256,$(PYTHON_DIST))
[ -d $(PYTHON_OBJDIR) ] || mkdir -p $(PYTHON_OBJDIR)
tar -C $(SRCDIR) -zxf $(DISTDIR)/$(PYTHON_DIST)
# Make setup.py use our own dependencies instead of system ones
sed -e "s#sqlite_inc_paths = \[ '/usr/include',#sqlite_inc_paths = [ '$(PREFIX)/sqlite/include',#" \
-e "s#'/usr/include/db4'#'$(PREFIX)/bdb/include'#" \
- -e "s|\(add_dir_to_list(self.compiler.library_dirs, '/usr/local/lib')\)|#\1|" \
- -e "s|\(add_dir_to_list(self.compiler.include_dirs, '/usr/local/include')\)|#\1|" \
+ -e "s|\(add_dir_to_list(self.compiler.library_dirs, '/usr/local/lib')\)|pass #\1|" \
+ -e "s|\(add_dir_to_list(self.compiler.include_dirs, '/usr/local/include')\)|pass #\1|" \
-e "s#find_library_file(lib_dirs, 'bz2'#find_library_file(['$(PREFIX)/bz2/lib'] + lib_dirs, 'bz2'#" \
< $(PYTHON_SRCDIR)/setup.py \
> $(PYTHON_SRCDIR)/setup.py.patched
@@ -1074,7 +1167,8 @@ $(PYTHON_OBJDIR)/.retrieved: $(DISTDIR)/$(PYTHON_DIST)
ifdef PROFILE
PYTHON_PROFILING=--enable-profiling
endif
-$(PYTHON_OBJDIR)/.configured: $(PYTHON_OBJDIR)/.retrieved
+$(PYTHON_OBJDIR)/.configured: $(PYTHON_OBJDIR)/.retrieved \
+ $(BZ2_OBJDIR)/.installed
cd $(PYTHON_OBJDIR) \
&& env CFLAGS="-g $(PROFILE_CFLAGS)" GREP="`which grep`" \
CPPFLAGS="-I$(PREFIX)/bz2/include" \
@@ -1089,12 +1183,12 @@ $(PYTHON_OBJDIR)/.configured: $(PYTHON_OBJDIR)/.retrieved
# compile python
$(PYTHON_OBJDIR)/.compiled: $(PYTHON_OBJDIR)/.configured
- (cd $(PYTHON_OBJDIR) && make)
+ (cd $(PYTHON_OBJDIR) && env MAKEFLAGS= make)
touch $@
# install python
$(PYTHON_OBJDIR)/.installed: $(PYTHON_OBJDIR)/.compiled
- (cd $(PYTHON_OBJDIR) && make install)
+ (cd $(PYTHON_OBJDIR) && env MAKEFLAGS= make install)
touch $@
@@ -1105,6 +1199,65 @@ $(PYTHON_OBJDIR)/.installed: $(PYTHON_OBJDIR)/.compiled
# fetch distfile for junit
$(DISTDIR)/$(JUNIT_DIST):
cd $(DISTDIR) && $(FETCH_CMD) $(JUNIT_URL)
+ $(call do_check_sha256,$(JUNIT_DIST))
+
+
+#######################################################################
+# gettext
+#######################################################################
+
+gettext-retrieve: $(GETTEXT_OBJDIR)/.retrieved
+gettext-configure: $(GETTEXT_OBJDIR)/.configured
+gettext-compile: $(GETTEXT_OBJDIR)/.compiled
+gettext-install: $(GETTEXT_OBJDIR)/.installed
+gettext-reset:
+ $(foreach f, .retrieved .configured .compiled .installed, \
+ rm -f $(GETTEXT_OBJDIR)/$(f);)
+
+gettext-clean:
+ -(cd $(GETTEXT_OBJDIR) && env MAKEFLAGS= make clean)
+
+# fetch distfile for gettext
+$(DISTDIR)/$(GETTEXT_DIST):
+ cd $(DISTDIR) && $(FETCH_CMD) $(GETTEXT_URL)
+
+# retrieve gettext
+$(GETTEXT_OBJDIR)/.retrieved: $(DISTDIR)/$(GETTEXT_DIST)
+ $(call do_check_sha256,$(GETTEXT_DIST))
+ [ -d $(GETTEXT_OBJDIR) ] || mkdir -p $(GETTEXT_OBJDIR)
+ tar -C $(SRCDIR) -zxf $(DISTDIR)/$(GETTEXT_DIST)
+ touch $@
+
+# (gettext won't compile outside its source tree)
+# configure gettext
+$(GETTEXT_OBJDIR)/.configured: $(GETTEXT_OBJDIR)/.retrieved
+ cd $(GETTEXT_SRCDIR) \
+ && env CFLAGS="-g $(PROFILE_CFLAGS)" GREP="`which grep`"\
+ LDFLAGS="-L$(PREFIX)/iconv/lib" \
+ $(GETTEXT_SRCDIR)/configure \
+ --prefix=$(PREFIX)/gettext \
+ --with-libiconv-prefix=$(PREFIX)/iconv \
+ --disable-c++ \
+ --disable-java \
+ --disable-csharp \
+ $(THREADS_FLAG)
+ -which gsed && \
+ sed -e 's/sed /gsed /g' < $(GETTEXT_SRCDIR)/build-aux/moopp \
+ > $(GETTEXT_SRCDIR)/build-aux/moopp.fixed && \
+ mv $(GETTEXT_SRCDIR)/build-aux/moopp.fixed \
+ $(GETTEXT_SRCDIR)/build-aux/moopp && \
+ chmod +x $(GETTEXT_SRCDIR)/build-aux/moopp
+ touch $@
+
+# compile gettext
+$(GETTEXT_OBJDIR)/.compiled: $(GETTEXT_OBJDIR)/.configured
+ (cd $(GETTEXT_SRCDIR) && env MAKEFLAGS= make)
+ touch $@
+
+# install gettext
+$(GETTEXT_OBJDIR)/.installed: $(GETTEXT_OBJDIR)/.compiled
+ (cd $(GETTEXT_SRCDIR) && env MAKEFLAGS= make install)
+ touch $@
#######################################################################
# svn
@@ -1129,7 +1282,7 @@ svn-reset: svn-bindings-reset
rm -f $(SVN_OBJDIR)/$(f);)
svn-clean:
- -(cd $(svn_builddir) && make distclean)
+ -(cd $(svn_builddir) && env MAKEFLAGS= make distclean)
# retrieve svn if not present yet
$(SVN_OBJDIR)/.retrieved:
@@ -1153,16 +1306,20 @@ $(SVN_OBJDIR)/.retrieved:
ifeq ($(BRANCH_MAJOR),1.7)
BDB_FLAG=db.h:$(PREFIX)/bdb/include:$(PREFIX)/bdb/lib:db-$(BDB_MAJOR_VER)
SERF_FLAG=--with-serf="$(PREFIX)/serf"
+SERF_LDFLAG=-Wl,-rpath,$(PREFIX)/serf/lib -Wl,-rpath,$(PREFIX)/bdb/lib
MOD_DAV_SVN=modules/svn-$(WC)/mod_dav_svn.so
MOD_AUTHZ_SVN=modules/svn-$(WC)/mod_authz_svn.so
+MOD_DONTDOTHAT=modules/svn-$(WC)/mod_dontdothat.so
LIBMAGIC_FLAG=--with-libmagic=$(PREFIX)/libmagic
NEON_FLAG=--with-neon="$(PREFIX)/neon"
JAVAHL_CHECK_TARGET=check-javahl
else ifeq ($(BRANCH_MAJOR),1.6)
BDB_FLAG=db.h:$(PREFIX)/bdb/include:$(PREFIX)/bdb/lib:db-$(BDB_MAJOR_VER)
SERF_FLAG=--with-serf="$(PREFIX)/serf"
+SERF_LDFLAG=-Wl,-rpath,$(PREFIX)/serf/lib -Wl,-rpath,$(PREFIX)/bdb/lib
MOD_DAV_SVN=modules/svn-$(WC)/mod_dav_svn.so
MOD_AUTHZ_SVN=modules/svn-$(WC)/mod_authz_svn.so
+MOD_DONTDOTHAT=modules/svn-$(WC)/mod_dontdothat.so
W_NO_SYSTEM_HEADERS=-Wno-system-headers
NEON_FLAG=--with-neon="$(PREFIX)/neon"
JAVAHL_CHECK_TARGET=check-javahl
@@ -1171,6 +1328,7 @@ BDB_FLAG=$(PREFIX)/bdb
SERF_FLAG=--with-serf="$(PREFIX)/serf-old"
MOD_DAV_SVN=modules/mod_dav_svn.so
MOD_AUTHZ_SVN=modules/mod_authz_svn.so
+MOD_DONTDOTHAT=modules/mod_dontdothat.so
DISABLE_NEON_VERSION_CHECK=--disable-neon-version-check
W_NO_SYSTEM_HEADERS=-Wno-system-headers
NEON_FLAG=--with-neon="$(PREFIX)/neon"
@@ -1178,8 +1336,12 @@ JAVAHL_CHECK_TARGET=check-javahl
else # 1.8
BDB_FLAG=db.h:$(PREFIX)/bdb/include:$(PREFIX)/bdb/lib:db-$(BDB_MAJOR_VER)
SERF_FLAG=--with-serf="$(PREFIX)/serf"
+# serf >= 1.3.0 is built with scons and no longer sets up rpath linker flags,
+# so we have to do that ourselves :(
+SERF_LDFLAG=-Wl,-rpath,$(PREFIX)/serf/lib -Wl,-rpath,$(PREFIX)/bdb/lib
MOD_DAV_SVN=modules/svn-$(WC)/mod_dav_svn.so
MOD_AUTHZ_SVN=modules/svn-$(WC)/mod_authz_svn.so
+MOD_DONTDOTHAT=modules/svn-$(WC)/mod_dontdothat.so
LIBMAGIC_FLAG=--with-libmagic=$(PREFIX)/libmagic
JAVAHL_CHECK_TARGET=check-all-javahl
endif
@@ -1192,21 +1354,29 @@ else
endif
ifdef PROFILE
-SVN_ALL_STATIC=--enable-all-static
+SVN_STATIC_FLAG=--enable-all-static
else
+SVN_STATIC_FLAG=--disable-static
SVN_WITH_HTTPD=--with-apxs="$(PREFIX)/httpd/bin/apxs" \
--with-apache-libexecdir="$(PREFIX)/httpd/modules/svn-$(WC)"
SVN_WITH_SASL=--with-sasl="$(PREFIX)/cyrus-sasl"
endif
-# configure svn
-$(SVN_OBJDIR)/.configured: $(SVN_OBJDIR)/.retrieved $(DISTDIR)/$(JUNIT_DIST)
+$(SVN_OBJDIR)/.configured: $(SVN_OBJDIR)/.retrieved $(DISTDIR)/$(JUNIT_DIST) \
+ $(APR_OBJDIR)/.installed $(APR_UTIL_OBJDIR)/.installed \
+ $(BDB_OBJDIR)/.installed $(SQLITE_OBJDIR)/.installed \
+ $(HTTPD_OBJDIR)/.installed $(CYRUS_SASL_OBJDIR)/.installed \
+ $(LIBMAGIC_OBJDIR)/.installed $(NEON_OBJDIR)/.installed \
+ $(SERF_OBJDIR)/.installed $(SERF_OLD_OBJDIR)/.installed \
+ $(RUBY_OBJDIR)/.installed $(PYTHON_OBJDIR)/.installed
cd $(SVN_SRCDIR) && ./autogen.sh
cd $(svn_builddir) && \
- env LDFLAGS="-L$(PREFIX)/neon/lib -L$(PREFIX)/apr/lib" \
+ env LDFLAGS="-L$(PREFIX)/neon/lib -L$(PREFIX)/apr/lib $(SERF_LDFLAG) -L$(PREFIX)/gettext/lib -L$(PREFIX)/iconv/lib" \
+ CFLAGS="-I$(PREFIX)/gettext/include -DAPR_POOL_DEBUG" \
+ CXXFLAGS="-I$(PREFIX)/gettext/include -DAPR_POOL_DEBUG" \
LD_LIBRARY_PATH="$(LD_LIBRARY_PATH):$$LD_LIBRARY_PATH" \
GREP="`which grep`" \
- PATH=$(PREFIX)/ruby/bin:$(PREFIX)/python/bin:$$PATH \
+ PATH=$(PREFIX)/ruby/bin:$(PREFIX)/python/bin:$(PREFIX)/gettext/bin:$$PATH \
$(SVN_SRCDIR)/configure \
--enable-maintainer-mode \
--with-ssl \
@@ -1223,20 +1393,20 @@ $(SVN_OBJDIR)/.configured: $(SVN_OBJDIR)/.retrieved $(DISTDIR)/$(JUNIT_DIST)
--disable-mod-activation \
$(JAVAHL_FLAG) \
$(LIBMAGIC_FLAG) \
- $(SVN_ALL_STATIC) \
+ $(SVN_STATIC_FLAG) \
$(DISABLE_NEON_VERSION_CHECK)
touch $@
# compile svn
$(SVN_OBJDIR)/.compiled: $(SVN_OBJDIR)/.configured
cd $(svn_builddir) \
- && make EXTRA_CFLAGS="$(PROFILE_CFLAGS) $(W_NO_SYSTEM_HEADERS)"
+ && env MAKEFLAGS= make EXTRA_CFLAGS="$(PROFILE_CFLAGS) $(W_NO_SYSTEM_HEADERS)"
touch $@
# install svn
$(SVN_OBJDIR)/.installed: $(SVN_OBJDIR)/.compiled
cd $(svn_builddir) \
- && make install
+ && env MAKEFLAGS= make install install-tools
touch $@
# SWIG 1.x and 2.x are not compatible. If SWIG 2.x is used to generated .swg
@@ -1246,44 +1416,44 @@ $(SVN_OBJDIR)/.installed: $(SVN_OBJDIR)/.compiled
# by the same version of SWIG.
$(SVN_OBJDIR)/.pre-generated-swig-cleaned:
-cd $(svn_builddir) \
- && make extraclean-swig
+ && env MAKEFLAGS= make clean-swig
touch $@
$(SVN_OBJDIR)/.bindings-compiled: $(SVN_OBJDIR)/.installed $(SVN_OBJDIR)/.pre-generated-swig-cleaned
cd $(svn_builddir) \
&& env LD_LIBRARY_PATH=$(LD_LIBRARY_PATH) \
- make swig-py
+ env MAKEFLAGS= make swig-py
cd $(svn_builddir) && \
env PATH=$(PREFIX)/ruby/bin:$$PATH \
- LD_LIBRARY_PATH=$(LD_LIBRARY_PATH) make swig-rb
+ LD_LIBRARY_PATH=$(LD_LIBRARY_PATH) env MAKEFLAGS= make swig-rb
if [ $(ENABLE_PERL_BINDINGS) = yes ]; then \
cd $(svn_builddir) \
&& env LD_LIBRARY_PATH=$(LD_LIBRARY_PATH) \
- make swig-pl; \
+ env MAKEFLAGS= make swig-pl; \
fi
if [ $(ENABLE_JAVA_BINDINGS) = yes ]; then \
cd $(svn_builddir) \
- && make javahl; \
+ && env MAKEFLAGS= make javahl; \
fi
touch $@
$(SVN_OBJDIR)/.bindings-installed: $(SVN_OBJDIR)/.bindings-compiled
cd $(svn_builddir) \
&& env LD_LIBRARY_PATH=$(LD_LIBRARY_PATH) \
- make install-swig-py
+ env MAKEFLAGS= make install-swig-py
cd $(svn_builddir) && \
env PATH=$(PREFIX)/ruby/bin:$$PATH \
- LD_LIBRARY_PATH=$(LD_LIBRARY_PATH) make install-swig-rb
+ LD_LIBRARY_PATH=$(LD_LIBRARY_PATH) env MAKEFLAGS= make install-swig-rb
if [ $(ENABLE_PERL_BINDINGS) = yes ]; then \
cd $(svn_builddir) \
- && make install-swig-pl-lib; \
+ && env MAKEFLAGS= make install-swig-pl-lib; \
cd subversion/bindings/swig/perl/native \
&& perl Makefile.PL PREFIX="$(SVN_PREFIX)" \
- && make install; \
+ && env MAKEFLAGS= make install; \
fi
if [ $(ENABLE_JAVA_BINDINGS) = yes ]; then \
cd $(svn_builddir) \
- && make install-javahl; \
+ && env MAKEFLAGS= make install-javahl; \
fi
touch $@
@@ -1291,6 +1461,13 @@ $(SVN_OBJDIR)/.bindings-installed: $(SVN_OBJDIR)/.bindings-compiled
HTTPD_CHECK_CONF=$(PREFIX)/httpd/conf/httpd-svn-check-$(WC).conf
HTTPD_CHECK_USERS=$(PREFIX)/httpd/conf/httpd-svn-check-users
HTTPD_CHECK_PORT=8081
+MOD_DONTDOTHAT_CONF=$(PREFIX)/httpd/conf/dontdothat
+
+$(MOD_DONTDOTHAT_CONF):
+ mkdir -p $(dir $@)
+ echo > $@.tmp '[recursive-actions]'
+ echo >>$@.tmp '/ = deny'
+ mv -f $@.tmp $@
$(HTTPD_CHECK_USERS):
mkdir -p $(dir $@)
@@ -1298,12 +1475,13 @@ $(HTTPD_CHECK_USERS):
echo >>$@.tmp 'jconstant:xCGl35kV9oWCY'
mv -f $@.tmp $@
-$(HTTPD_CHECK_CONF): $(HTTPD_CHECK_USERS)
+$(HTTPD_CHECK_CONF): $(HTTPD_CHECK_USERS) $(MOD_DONTDOTHAT_CONF)
echo > $@.tmp '# httpd config for make check'
echo >>$@.tmp 'ServerRoot "$(PREFIX)/httpd"'
echo >>$@.tmp 'Listen localhost:$(HTTPD_CHECK_PORT)'
echo >>$@.tmp 'LoadModule dav_svn_module $(MOD_DAV_SVN)'
echo >>$@.tmp 'LoadModule authz_svn_module $(MOD_AUTHZ_SVN)'
+ echo >>$@.tmp 'LoadModule dontdothat_module $(MOD_DONTDOTHAT)'
echo >>$@.tmp 'DocumentRoot "$(PREFIX)/httpd/htdocs"'
echo >>$@.tmp '# These two Locations are used for "make check"'
echo >>$@.tmp '<Directory />'
@@ -1347,15 +1525,38 @@ endif
echo >>$@.tmp ' DAV svn'
echo >>$@.tmp ' SVNParentPath /tmp'
echo >>$@.tmp ' Allow from all'
+ echo >>$@.tmp ' #AuthType Basic'
+ echo >>$@.tmp ' #AuthName "Subversion Repository"'
+ echo >>$@.tmp ' #AuthUserFile $(HTTPD_CHECK_USERS)'
+ echo >>$@.tmp ' #Require valid-user'
ifeq ($(USE_HTTPV1),yes)
- echo >>$@.tmp ' SVNAdvertiseV2Protocol off'
+ echo >> $@.tmp ' SVNAdvertiseV2Protocol off'
endif
ifeq ($(USE_AUTHZ_SHORT_CIRCUIT),yes)
- echo >>$@.tmp ' SVNPathAuthz short_circuit'
+ echo >> $@.tmp ' SVNPathAuthz short_circuit'
endif
echo >>$@.tmp '</Location>'
+ echo >>$@.tmp '# Location for tests using mod_dontdothat'
+ echo >>$@.tmp '<Location /ddt-test-work/repositories>'
+ echo >> $@.tmp 'DAV svn'
+ echo >> $@.tmp 'SVNParentPath "$(SVN_WC)/subversion/tests/cmdline/svn-test-work/repositories"'
+ echo >> $@.tmp 'AuthzSVNAccessFile "$(SVN_WC)/subversion/tests/cmdline/svn-test-work/authz"'
+ echo >> $@.tmp 'AuthType Basic'
+ echo >> $@.tmp 'AuthName "Subversion Repository"'
+ echo >> $@.tmp 'AuthzSVNAccessFile $(SVN_WC)/subversion/tests/cmdline/svn-test-work/authz'
+ echo >> $@.tmp 'AuthUserFile $(HTTPD_CHECK_USERS)'
+ echo >> $@.tmp 'Require valid-user'
+ifeq ($(USE_HTTPV1),yes)
+ echo >> $@.tmp ' SVNAdvertiseV2Protocol off'
+endif
+ echo >> $@.tmp 'DontDoThatConfigFile "$(MOD_DONTDOTHAT_CONF)"'
+ echo >> $@.tmp '</Location>'
echo >>$@.tmp 'RedirectMatch permanent ^/svn-test-work/repositories/REDIRECT-PERM-(.*)$$ /svn-test-work/repositories/$$1'
echo >>$@.tmp 'RedirectMatch ^/svn-test-work/repositories/REDIRECT-TEMP-(.*)$$ /svn-test-work/repositories/$$1'
+ echo >>$@.tmp 'Include "conf/$(SVN_REL_WC)*-custom.conf"'
+ echo >> $@.tmp '#SVNInMemoryCacheSize 0'
+ echo >> $@.tmp '#SVNCacheTextDeltas Off'
+ echo >> $@.tmp '#SVNCacheRevProps Off'
mv -f $@.tmp $@
.PHONY: libpath
@@ -1407,7 +1608,7 @@ start-httpd-debug: $(HTTPD_CHECK_CONF)
@sleep 1
gdb $(PREFIX)/httpd/bin/httpd `cat $(PREFIX)/httpd/logs/httpd.pid`
-stop-httpd:
+stop-httpd: $(HTTPD_CHECK_CONF)
$(HTTPD_STOP_CMD)
start-svnserve: $(SVN_OBJDIR)/.compiled
@@ -1421,7 +1622,9 @@ define do_check
echo "Begin test: $(subst svn-check-,,$@) x $$fs"; \
test -d "$(RAMDISK)/tmp" && export TMPDIR="$(RAMDISK)/tmp"; \
env LD_LIBRARY_PATH=$(LD_LIBRARY_PATH) $(LIB_PTHREAD_HACK) \
- make check PARALLEL=$(PARALLEL) CLEANUP=$(CLEANUP) $1 FS_TYPE=$$fs; \
+ env MAKEFLAGS= make check PARALLEL=$(PARALLEL) CLEANUP=$(CLEANUP) \
+ EXCLUSIVE_WC_LOCKS=$(EXCLUSIVE_WC_LOCKS) \
+ MEMCACHED_SERVER=$(MEMCACHED_SERVER) $1 FS_TYPE=$$fs; \
for log in tests.log fails.log; do \
test -f $$log && mv -f $$log $$log.$@-$$fs; \
done; \
@@ -1477,14 +1680,14 @@ svn-check-swig-pl:
(cd $(svn_builddir) && \
env LD_LIBRARY_PATH=$(LD_LIBRARY_PATH) \
$(LIB_PTHREAD_HACK) \
- make check-swig-pl 2>&1) | \
+ env MAKEFLAGS= make check-swig-pl 2>&1) | \
tee $(svn_builddir)/tests.log.bindings.pl; \
fi
svn-check-swig-py:
-(cd $(svn_builddir) && \
env LD_LIBRARY_PATH=$(LD_LIBRARY_PATH) \
- make check-swig-py 2>&1) | \
+ env MAKEFLAGS= make check-swig-py 2>&1) | \
tee $(svn_builddir)/tests.log.bindings.py
# We add the svn prefix to PATH here because the ruby tests
@@ -1495,14 +1698,14 @@ svn-check-swig-rb:
LD_LIBRARY_PATH=$(LD_LIBRARY_PATH) \
PATH=$(SVN_PREFIX)/bin:$$PATH \
$(LIB_PTHREAD_HACK) \
- make check-swig-rb 2>&1) | \
+ env MAKEFLAGS= make check-swig-rb 2>&1) | \
tee $(svn_builddir)/tests.log.bindings.rb
svn-check-javahl:
-if [ $(ENABLE_JAVA_BINDINGS) = yes ]; then \
(cd $(svn_builddir) && \
env LD_LIBRARY_PATH=$(LD_LIBRARY_PATH) \
- make $(JAVAHL_CHECK_TARGET) 2>&1) | \
+ env MAKEFLAGS= make $(JAVAHL_CHECK_TARGET) 2>&1) | \
tee $(svn_builddir)/tests.log.bindings.javahl; \
fi
diff --git a/tools/dev/wc-ng/svn-wc-db-tester.c b/tools/dev/wc-ng/svn-wc-db-tester.c
new file mode 100644
index 0000000..ccdd102
--- /dev/null
+++ b/tools/dev/wc-ng/svn-wc-db-tester.c
@@ -0,0 +1,269 @@
+/* svn-wc-db-tester.c
+ *
+ * This is a crude command line tool that makes it possible to
+ * run the wc-db validation checks directly.
+ *
+ * ====================================================================
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ * ====================================================================
+ */
+
+#include "svn_cmdline.h"
+#include "svn_pools.h"
+#include "svn_wc.h"
+#include "svn_utf.h"
+#include "svn_path.h"
+#include "svn_opt.h"
+#include "svn_version.h"
+
+#include "private/svn_wc_private.h"
+#include "private/svn_cmdline_private.h"
+
+#include "../../../subversion/libsvn_wc/wc.h"
+#include "../../../subversion/libsvn_wc/wc_db.h"
+
+#include "svn_private_config.h"
+
+#define OPT_VERSION SVN_OPT_FIRST_LONGOPT_ID
+
+static svn_error_t *
+version(apr_pool_t *pool)
+{
+ return svn_opt_print_help4(NULL, "svn-wc-db-tester", TRUE, FALSE, FALSE,
+ NULL, NULL, NULL, NULL, NULL, NULL, pool);
+}
+
+static void
+usage(apr_pool_t *pool)
+{
+ svn_error_clear(svn_cmdline_fprintf
+ (stderr, pool,
+ _("Type 'svn-wc-db-tester --help' for usage.\n")));
+}
+
+struct verify_baton
+{
+ svn_boolean_t found_err;
+};
+
+static svn_error_t *
+verify_cb(void *baton,
+ const char *wc_abspath,
+ const char *local_relpath,
+ int op_depth,
+ int id,
+ const char *msg,
+ apr_pool_t *scratch_pool)
+{
+ struct verify_baton *vb = baton;
+
+ if (op_depth >= 0)
+ {
+ SVN_ERR(svn_cmdline_printf(scratch_pool, "%s (depth=%d) DBV%04d: %s\n",
+ local_relpath, op_depth, id, msg));
+ }
+ else
+ {
+ SVN_ERR(svn_cmdline_printf(scratch_pool, "%s DBV%04d: %s\n",
+ local_relpath, id, msg));
+ }
+
+ vb->found_err = TRUE;
+ return SVN_NO_ERROR;
+}
+
+static svn_error_t *
+verify_db(int argc, const char *path, apr_pool_t *pool)
+{
+ const char *local_abspath;
+ svn_wc_context_t *wc_ctx;
+ struct verify_baton vb = { FALSE };
+
+ /* Read the parameters */
+ path = svn_dirent_internal_style(path, pool);
+
+ SVN_ERR(svn_dirent_get_absolute(&local_abspath, path, pool));
+
+ SVN_ERR(svn_wc_context_create(&wc_ctx, NULL, pool, pool));
+
+ SVN_ERR(svn_wc__db_verify_db_full(wc_ctx->db, local_abspath,
+ verify_cb, &vb, pool));
+
+ if (vb.found_err)
+ return svn_error_create(SVN_ERR_WC_PATH_UNEXPECTED_STATUS, NULL,
+ _("Found one or more potential wc.db inconsistencies"));
+
+ return SVN_NO_ERROR;
+}
+
+
+static void
+help(const apr_getopt_option_t *options, apr_pool_t *pool)
+{
+ svn_error_clear
+ (svn_cmdline_fprintf
+ (stdout, pool,
+ _("usage: svn-wc-db-tester [OPTIONS] WC_PATH\n\n"
+ " Run verifications on the working copy\n"
+ "\n"
+ " WC_PATH's parent directory must be a working copy, otherwise a\n"
+ " tree conflict cannot be raised.\n"
+ "\n"
+ "Valid options:\n")));
+ while (options->description)
+ {
+ const char *optstr;
+ svn_opt_format_option(&optstr, options, TRUE, pool);
+ svn_error_clear(svn_cmdline_fprintf(stdout, pool, " %s\n", optstr));
+ ++options;
+ }
+}
+
+
+/* Version compatibility check */
+static svn_error_t *
+check_lib_versions(void)
+{
+ static const svn_version_checklist_t checklist[] =
+ {
+ { "svn_subr", svn_subr_version },
+ { "svn_wc", svn_wc_version },
+ { NULL, NULL }
+ };
+ SVN_VERSION_DEFINE(my_version);
+
+ return svn_ver_check_list2(&my_version, checklist, svn_ver_equal);
+}
+
+/*
+ * On success, leave *EXIT_CODE untouched and return SVN_NO_ERROR. On error,
+ * either return an error to be displayed, or set *EXIT_CODE to non-zero and
+ * return SVN_NO_ERROR.
+ */
+static svn_error_t *
+sub_main(int *exit_code, int argc, const char *argv[], apr_pool_t *pool)
+{
+ apr_getopt_t *os;
+ const apr_getopt_option_t options[] =
+ {
+ {"help", 'h', 0, N_("display this help")},
+ {"version", OPT_VERSION, 0,
+ N_("show program version information")},
+ {0, 0, 0, 0}
+ };
+ apr_array_header_t *remaining_argv;
+
+ /* Check library versions */
+ SVN_ERR(check_lib_versions());
+
+#if defined(WIN32) || defined(__CYGWIN__)
+ /* Set the working copy administrative directory name. */
+ if (getenv("SVN_ASP_DOT_NET_HACK"))
+ {
+ SVN_ERR(svn_wc_set_adm_dir("_svn", pool));
+ }
+#endif
+
+ SVN_ERR(svn_cmdline__getopt_init(&os, argc, argv, pool));
+
+ os->interleave = 1;
+ while (1)
+ {
+ int opt;
+ const char *arg;
+ apr_status_t status = apr_getopt_long(os, options, &opt, &arg);
+ if (APR_STATUS_IS_EOF(status))
+ break;
+ if (status != APR_SUCCESS)
+ {
+ usage(pool);
+ *exit_code = EXIT_FAILURE;
+ return SVN_NO_ERROR;
+ }
+
+ switch (opt)
+ {
+ case 'h':
+ help(options, pool);
+ return SVN_NO_ERROR;
+ case OPT_VERSION:
+ SVN_ERR(version(pool));
+ return SVN_NO_ERROR;
+ default:
+ usage(pool);
+ *exit_code = EXIT_FAILURE;
+ return SVN_NO_ERROR;
+ }
+ }
+
+ /* Convert the remaining arguments to UTF-8. */
+ remaining_argv = apr_array_make(pool, 0, sizeof(const char *));
+ while (os->ind < argc)
+ {
+ const char *s;
+
+ SVN_ERR(svn_utf_cstring_to_utf8(&s, os->argv[os->ind++], pool));
+ APR_ARRAY_PUSH(remaining_argv, const char *) = s;
+ }
+
+ if (remaining_argv->nelts != 1)
+ {
+ usage(pool);
+ *exit_code = EXIT_FAILURE;
+ return SVN_NO_ERROR;
+ }
+
+ /* Do the main task */
+ SVN_ERR(verify_db(remaining_argv->nelts,
+ APR_ARRAY_IDX(remaining_argv, 0, const char *),
+ pool));
+
+ return SVN_NO_ERROR;
+}
+
+int
+main(int argc, const char *argv[])
+{
+ apr_pool_t *pool;
+ int exit_code = EXIT_SUCCESS;
+ svn_error_t *err;
+
+ /* Initialize the app. */
+ if (svn_cmdline_init("svn-wc-db-tester", stderr) != EXIT_SUCCESS)
+ return EXIT_FAILURE;
+
+ /* Create our top-level pool. Use a separate mutexless allocator,
+ * given this application is single threaded.
+ */
+ pool = apr_allocator_owner_get(svn_pool_create_allocator(FALSE));
+
+ err = sub_main(&exit_code, argc, argv, pool);
+
+ /* Flush stdout and report if it fails. It would be flushed on exit anyway
+ but this makes sure that output is not silently lost if it fails. */
+ err = svn_error_compose_create(err, svn_cmdline_fflush(stdout));
+
+ if (err)
+ {
+ exit_code = EXIT_FAILURE;
+ svn_cmdline_handle_exit_error(err, NULL, "svn-wc-db-tester: ");
+ }
+
+ svn_pool_destroy(pool);
+ return exit_code;
+}
diff --git a/tools/dev/which-error.py b/tools/dev/which-error.py
index dc6a8f5..46086ac 100755
--- a/tools/dev/which-error.py
+++ b/tools/dev/which-error.py
@@ -23,10 +23,10 @@
# under the License.
# ====================================================================
#
-# $HeadURL: http://svn.apache.org/repos/asf/subversion/branches/1.8.x/tools/dev/which-error.py $
-# $LastChangedDate: 2012-03-30 20:29:32 +0000 (Fri, 30 Mar 2012) $
-# $LastChangedBy: danielsh $
-# $LastChangedRevision: 1307598 $
+# $HeadURL: https://svn.apache.org/repos/asf/subversion/branches/1.9.x/tools/dev/which-error.py $
+# $LastChangedDate: 2013-12-05 00:42:34 +0000 (Thu, 05 Dec 2013) $
+# $LastChangedBy: breser $
+# $LastChangedRevision: 1547977 $
#
import errno
@@ -72,7 +72,11 @@ def get_errors():
## errno values.
errs.update(errno.errorcode)
## APR-defined errors, from apr_errno.h.
- for line in open(os.path.join(os.path.dirname(sys.argv[0]), 'aprerr.txt')):
+ dirname = os.path.dirname(os.path.realpath(__file__))
+ for line in open(os.path.join(dirname, 'aprerr.txt')):
+ # aprerr.txt parsing duplicated in gen_base.py:write_errno_table()
+ if line.startswith('#'):
+ continue
key, _, val = line.split()
errs[int(val)] = key
## Subversion errors, from svn_error_codes.h.
diff --git a/tools/dev/x509-parser.c b/tools/dev/x509-parser.c
new file mode 100644
index 0000000..882bf6c
--- /dev/null
+++ b/tools/dev/x509-parser.c
@@ -0,0 +1,178 @@
+/* x509-parser.c -- print human readable info from an X.509 certificate
+ *
+ * ====================================================================
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ * ====================================================================
+ */
+
+#include "svn_pools.h"
+#include "svn_cmdline.h"
+#include "svn_string.h"
+#include "svn_dirent_uri.h"
+#include "svn_io.h"
+#include "svn_base64.h"
+#include "svn_x509.h"
+#include "svn_time.h"
+
+#include "svn_private_config.h"
+
+#define PEM_BEGIN_CERT "-----BEGIN CERTIFICATE-----"
+#define PEM_END_CERT "-----END CERTIFICATE-----"
+
+static svn_error_t *
+show_cert(const svn_string_t *der_cert, apr_pool_t *scratch_pool)
+{
+ svn_x509_certinfo_t *certinfo;
+ const apr_array_header_t *hostnames;
+
+ SVN_ERR(svn_x509_parse_cert(&certinfo, der_cert->data, der_cert->len,
+ scratch_pool, scratch_pool));
+
+ SVN_ERR(svn_cmdline_printf(scratch_pool, _("Subject: %s\n"),
+ svn_x509_certinfo_get_subject(certinfo, scratch_pool)));
+ SVN_ERR(svn_cmdline_printf(scratch_pool, _("Valid from: %s\n"),
+ svn_time_to_human_cstring(
+ svn_x509_certinfo_get_valid_from(certinfo),
+ scratch_pool)));
+ SVN_ERR(svn_cmdline_printf(scratch_pool, _("Valid until: %s\n"),
+ svn_time_to_human_cstring(
+ svn_x509_certinfo_get_valid_to(certinfo),
+ scratch_pool)));
+ SVN_ERR(svn_cmdline_printf(scratch_pool, _("Issuer: %s\n"),
+ svn_x509_certinfo_get_issuer(certinfo, scratch_pool)));
+ SVN_ERR(svn_cmdline_printf(scratch_pool, _("Fingerprint: %s\n"),
+ svn_checksum_to_cstring_display(
+ svn_x509_certinfo_get_digest(certinfo),
+ scratch_pool)));
+
+ hostnames = svn_x509_certinfo_get_hostnames(certinfo);
+ if (hostnames && !apr_is_empty_array(hostnames))
+ {
+ int i;
+ svn_stringbuf_t *buf = svn_stringbuf_create_empty(scratch_pool);
+ for (i = 0; i < hostnames->nelts; ++i)
+ {
+ const char *hostname = APR_ARRAY_IDX(hostnames, i, const char*);
+ if (i > 0)
+ svn_stringbuf_appendbytes(buf, ", ", 2);
+ svn_stringbuf_appendbytes(buf, hostname, strlen(hostname));
+ }
+ SVN_ERR(svn_cmdline_printf(scratch_pool, _("Hostnames: %s\n"),
+ buf->data));
+ }
+
+ return SVN_NO_ERROR;
+}
+
+static svn_boolean_t
+is_der_cert(const svn_string_t *raw)
+{
+ /* really simplistic fingerprinting of a DER. By definition it must
+ * start with an ASN.1 tag of a constructed (0x20) sequence (0x10).
+ * It's somewhat unfortunate that 0x30 happens to also come out to the
+ * ASCII for '0' which may mean this will create false positives. */
+ return raw->data[0] == 0x30 ? TRUE : FALSE;
+}
+
+static svn_error_t *
+get_der_cert_from_stream(const svn_string_t **der_cert, svn_stream_t *in,
+ apr_pool_t *pool)
+{
+ svn_string_t *raw;
+ SVN_ERR(svn_string_from_stream(&raw, in, pool, pool));
+
+ *der_cert = NULL;
+
+ /* look for a DER cert */
+ if (is_der_cert(raw))
+ {
+ *der_cert = raw;
+ return SVN_NO_ERROR;
+ }
+ else
+ {
+ const svn_string_t *base64_decoded;
+ const char *start, *end;
+
+ /* Try decoding as base64 without headers */
+ base64_decoded = svn_base64_decode_string(raw, pool);
+ if (base64_decoded && is_der_cert(base64_decoded))
+ {
+ *der_cert = base64_decoded;
+ return SVN_NO_ERROR;
+ }
+
+ /* Try decoding as a PEM with begining and ending headers. */
+ start = strstr(raw->data, PEM_BEGIN_CERT);
+ end = strstr(raw->data, PEM_END_CERT);
+ if (start && end && end > start)
+ {
+ svn_string_t *encoded;
+
+ start += sizeof(PEM_BEGIN_CERT) - 1;
+ end -= 1;
+ encoded = svn_string_ncreate(start, end - start, pool);
+ base64_decoded = svn_base64_decode_string(encoded, pool);
+ if (is_der_cert(base64_decoded))
+ {
+ *der_cert = base64_decoded;
+ return SVN_NO_ERROR;
+ }
+ }
+ }
+
+ return svn_error_create(SVN_ERR_X509_CERT_INVALID_PEM, NULL,
+ _("Couldn't find certificate in input data"));
+}
+
+int main (int argc, const char *argv[])
+{
+ apr_pool_t *pool = NULL;
+ svn_error_t *err;
+ svn_stream_t *in;
+
+ apr_initialize();
+ atexit(apr_terminate);
+
+ pool = svn_pool_create(NULL);
+
+ if (argc == 2)
+ {
+ const char *target = svn_dirent_canonicalize(argv[1], pool);
+ err = svn_stream_open_readonly(&in, target, pool, pool);
+ }
+ else if (argc == 1)
+ {
+ err = svn_stream_for_stdin(&in, pool);
+ }
+ else
+ err = svn_error_create(SVN_ERR_CL_ARG_PARSING_ERROR, NULL, _("Too many arguments"));
+
+ if (!err)
+ {
+ const svn_string_t *der_cert;
+ err = get_der_cert_from_stream(&der_cert, in, pool);
+ if (!err)
+ err = show_cert(der_cert, pool);
+ }
+
+ if (err)
+ return svn_cmdline_handle_exit_error(err, pool, "x509-parser: ");
+
+ return 0;
+}
diff --git a/tools/diff/diff.c b/tools/diff/diff.c
index c4b5e9d..c69a6d9 100644
--- a/tools/diff/diff.c
+++ b/tools/diff/diff.c
@@ -43,9 +43,11 @@ do_diff(svn_stream_t *ostream,
SVN_ERR(svn_diff_file_diff_2(&diff, original, modified, options, pool));
*has_changes = svn_diff_contains_diffs(diff);
- return svn_diff_file_output_unified3(ostream, diff, original, modified,
+ return svn_diff_file_output_unified4(ostream, diff, original, modified,
NULL, NULL, SVN_APR_LOCALE_CHARSET,
- NULL, show_c_function, pool);
+ NULL, show_c_function,
+ options->context_size,
+ NULL, NULL, pool);
}
static void
@@ -112,7 +114,16 @@ int main(int argc, const char *argv[])
diff_options->ignore_space = svn_diff_file_ignore_space_all;
continue;
}
+
APR_ARRAY_PUSH(options_array, const char *) = argv[i];
+
+ /* Special case: '-U' takes an argument, so capture the
+ * next argument in the array. */
+ if (argv[i][1] == 'U' && !argv[i][2])
+ {
+ i++;
+ APR_ARRAY_PUSH(options_array, const char *) = argv[i];
+ }
}
else
{
diff --git a/tools/diff/diff3.c b/tools/diff/diff3.c
index 63d7dec..5502254 100644
--- a/tools/diff/diff3.c
+++ b/tools/diff/diff3.c
@@ -28,11 +28,19 @@
#include "svn_pools.h"
#include "svn_diff.h"
#include "svn_io.h"
+#include "svn_opt.h"
+#include "private/svn_token.h"
static svn_error_t *
do_diff3(svn_stream_t *ostream,
- const char *original, const char *modified, const char *latest,
+ const char *original,
+ const char *modified,
+ const char *latest,
+ const char *conflict_original,
+ const char *conflict_modified,
+ const char *conflict_latest,
+ svn_diff_conflict_display_style_t conflict_style,
svn_boolean_t *has_changes,
apr_pool_t *pool)
{
@@ -43,38 +51,129 @@ do_diff3(svn_stream_t *ostream,
*has_changes = svn_diff_contains_diffs(diff);
- SVN_ERR(svn_diff_file_output_merge2(ostream, diff,
+ SVN_ERR(svn_diff_file_output_merge3(ostream, diff,
original, modified, latest,
- NULL, NULL, NULL, NULL,
- svn_diff_conflict_display_modified_latest,
+ conflict_original,
+ conflict_modified,
+ conflict_latest,
+ "=======",
+ conflict_style,
+ NULL, NULL, /* cancel */
pool));
return NULL;
}
-int main(int argc, char *argv[])
+int main(int argc, const char *argv[])
{
apr_pool_t *pool;
svn_stream_t *ostream;
int rc;
- svn_error_t *svn_err;
+ svn_error_t *svn_err = SVN_NO_ERROR;
+ apr_getopt_t *opts;
+ svn_boolean_t help = FALSE;
+
+ enum {
+ conflict_style_opt = SVN_OPT_FIRST_LONGOPT_ID
+ };
+ static const apr_getopt_option_t options[] = {
+ {"conflict-style", conflict_style_opt, 1, ""},
+ {"label", 'L', 1, ""},
+ {"show-overlap", 'E', 0, ""},
+ {"merge", 'm', 0, ""},
+ {"help", 'h', 0, ""},
+ {NULL, '?', 0, ""},
+ {NULL, 0, 0, NULL}
+ };
+ svn_diff_conflict_display_style_t conflict_style
+ = svn_diff_conflict_display_modified_latest;
+ const svn_token_map_t style_map[] = {
+ { "modified-latest",
+ svn_diff_conflict_display_modified_latest },
+ { "resolved-modified-latest",
+ svn_diff_conflict_display_resolved_modified_latest },
+ { "modified-original-latest",
+ svn_diff_conflict_display_modified_original_latest },
+ { "modified",
+ svn_diff_conflict_display_modified },
+ { "latest",
+ svn_diff_conflict_display_latest },
+ { "only-conflicts",
+ svn_diff_conflict_display_only_conflicts },
+ {NULL, 0}
+ };
+ const char *conflict_original = NULL;
+ const char *conflict_modified = NULL;
+ const char *conflict_latest = NULL;
apr_initialize();
pool = svn_pool_create(NULL);
- svn_err = svn_stream_for_stdout(&ostream, pool);
+ apr_getopt_init(&opts, pool, argc, argv);
+ opts->interleave = 1;
+ while (!svn_err)
+ {
+ int opt;
+ const char *arg;
+ apr_status_t status = apr_getopt_long(opts, options, &opt, &arg);
+
+ if (APR_STATUS_IS_EOF(status))
+ break;
+ if (status != APR_SUCCESS)
+ {
+ svn_err = svn_error_wrap_apr(status, "getopt failure");
+ break;
+ }
+ switch (opt)
+ {
+ case conflict_style_opt:
+ {
+ int val;
+ svn_err = svn_token__from_word_err(&val, style_map, arg);
+ conflict_style = val;
+ break;
+ }
+ case 'L':
+ if (!conflict_modified)
+ conflict_modified = apr_pstrcat(pool, "<<<<<<< ", arg, SVN_VA_NULL);
+ else if (!conflict_original)
+ conflict_original = apr_pstrcat(pool, "||||||| ", arg, SVN_VA_NULL);
+ else if (!conflict_latest)
+ conflict_latest = apr_pstrcat(pool, ">>>>>>> ", arg, SVN_VA_NULL);
+ else
+ svn_err = svn_error_create(SVN_ERR_CL_ARG_PARSING_ERROR, NULL,
+ "too many labels");
+ break;
+ case 'E':
+ case 'm':
+ /* These are allowed and ignored so that all the options
+ passed when invoking --diff3-cmd are accepted as that
+ makes it easier to use this as an external diff3
+ program. */
+ break;
+ case 'h':
+ case '?':
+ help = TRUE;
+ break;
+ }
+ }
+
+ if (!svn_err)
+ svn_err = svn_stream_for_stdout(&ostream, pool);
if (svn_err)
{
svn_handle_error2(svn_err, stdout, FALSE, "diff3: ");
+ svn_error_clear(svn_err);
rc = 2;
}
- else if (argc == 4)
+ else if (argc - opts->ind == 3 && !help)
{
svn_boolean_t has_changes;
- svn_err = do_diff3(ostream, argv[2], argv[1], argv[3],
- &has_changes, pool);
+ svn_err = do_diff3(ostream, argv[argc-2], argv[argc-3], argv[argc-1],
+ conflict_original, conflict_modified, conflict_latest,
+ conflict_style, &has_changes, pool);
if (svn_err == NULL)
{
rc = has_changes ? 1 : 0;
@@ -88,8 +187,38 @@ int main(int argc, char *argv[])
else
{
svn_error_clear(svn_stream_printf(ostream, pool,
- "Usage: %s <mine> <older> <yours>\n",
- argv[0]));
+ "Usage: %s [options] <mine> <older> <yours>\n"
+ "Options:\n"
+ " --conflict-style STYLE\n"
+ " where STYLE can be:\n"
+ " %s\n"
+ " %s\n"
+ " %s\n"
+ " %s\n"
+ " %s\n"
+ " %s\n"
+ "\n"
+ " --label [-L] LABEL\n"
+ " can be repeated up to three times\n"
+ "\n"
+ " --merge [-m]\n"
+ " ignored (present for compatibility)\n"
+ "\n"
+ " --show-overlap [-E]\n"
+ " ignored (present for compatibility)\n",
+ argv[0],
+ svn_token__to_word(style_map,
+ svn_diff_conflict_display_modified_latest),
+ svn_token__to_word(style_map,
+ svn_diff_conflict_display_resolved_modified_latest),
+ svn_token__to_word(style_map,
+ svn_diff_conflict_display_modified_original_latest),
+ svn_token__to_word(style_map,
+ svn_diff_conflict_display_modified),
+ svn_token__to_word(style_map,
+ svn_diff_conflict_display_latest),
+ svn_token__to_word(style_map,
+ svn_diff_conflict_display_only_conflicts)));
rc = 2;
}
diff --git a/tools/diff/diff4.c b/tools/diff/diff4.c
index 084184c..58216a8 100644
--- a/tools/diff/diff4.c
+++ b/tools/diff/diff4.c
@@ -42,10 +42,11 @@ do_diff4(svn_stream_t *ostream,
SVN_ERR(svn_diff_file_diff4_2(&diff, original, modified, latest, ancestor,
svn_diff_file_options_create(pool), pool));
- SVN_ERR(svn_diff_file_output_merge2(ostream, diff,
+ SVN_ERR(svn_diff_file_output_merge3(ostream, diff,
original, modified, latest,
NULL, NULL, NULL, NULL,
svn_diff_conflict_display_modified_latest,
+ NULL, NULL, /* cancel */
pool));
return NULL;
diff --git a/tools/dist/backport.pl b/tools/dist/backport.pl
index ab5c823..0c5f6be 100755
--- a/tools/dist/backport.pl
+++ b/tools/dist/backport.pl
@@ -1,8 +1,10 @@
-#!/usr/bin/perl -l
+#!/usr/bin/perl
use warnings;
use strict;
use feature qw/switch say/;
+#no warnings 'experimental::smartmatch';
+
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
@@ -20,252 +22,1259 @@ use feature qw/switch say/;
# specific language governing permissions and limitations
# under the License.
+use Carp qw/croak confess carp cluck/;
+use Digest ();
use Term::ReadKey qw/ReadMode ReadKey/;
+use File::Basename qw/basename dirname/;
+use File::Copy qw/copy move/;
use File::Temp qw/tempfile/;
-use POSIX qw/ctermid/;
+use IO::Select ();
+use IPC::Open3 qw/open3/;
+use POSIX qw/ctermid strftime isprint isspace/;
+use Text::Wrap qw/wrap/;
+use Tie::File ();
+
+############### Start of reading values from environment ###############
+# Programs we use.
+#
+# TODO: document which are interpreted by sh and which should point to binary.
my $SVN = $ENV{SVN} || 'svn'; # passed unquoted to sh
+my $SHELL = $ENV{SHELL} // '/bin/sh';
my $VIM = 'vim';
+my $EDITOR = $ENV{SVN_EDITOR} // $ENV{VISUAL} // $ENV{EDITOR} // 'ed';
+my $PAGER = $ENV{PAGER} // 'less' // 'cat';
+
+# Mode flags.
+package Mode {
+ use constant {
+ AutoCommitApproveds => 1, # used by nightly commits (svn-role)
+ Conflicts => 2, # used by the hourly conflicts-detection buildbot
+ Interactive => 3,
+ };
+};
+my $YES = ($ENV{YES} // "0") =~ /^(1|yes|true)$/i; # batch mode: eliminate prompts, add sleeps
+my $MAY_COMMIT = ($ENV{MAY_COMMIT} // "false") =~ /^(1|yes|true)$/i;
+my $MODE = ($YES ? ($MAY_COMMIT ? Mode::AutoCommitApproveds : Mode::Conflicts )
+ : Mode::Interactive );
+
+# Other knobs.
+my $VERBOSE = 0;
+my $DEBUG = (exists $ENV{DEBUG}); # 'set -x', etc
+
+# Force all these knobs to be usable via @sh.
+my @sh = qw/false true/;
+die if grep { ($sh[$_] eq 'true') != !!$_ } $DEBUG, $MAY_COMMIT, $VERBOSE, $YES;
+
+# Username for entering votes.
+my $SVN_A_O_REALM = '<https://svn.apache.org:443> ASF Committers';
+my ($AVAILID) = $ENV{AVAILID} // do {
+ local $_ = `$SVN auth svn.apache.org:443 2>/dev/null`; # TODO: pass $SVN_A_O_REALM
+ ($? == 0 && /Auth.*realm: \Q$SVN_A_O_REALM\E\nUsername: (.*)/) ? $1 : undef
+} // do {
+ local $/; # slurp mode
+ my $fh;
+ my $dir = "$ENV{HOME}/.subversion/auth/svn.simple/";
+ my $filename = Digest->new("MD5")->add($SVN_A_O_REALM)->hexdigest;
+ open $fh, '<', "$dir/$filename"
+ and <$fh> =~ /K 8\nusername\nV \d+\n(.*)/
+ ? $1
+ : undef
+};
+
+unless (defined $AVAILID) {
+ unless ($MODE == Mode::Conflicts) {
+ warn "Username for commits (of votes/merges) not found; "
+ ."it will be possible to review nominations but not to commit votes "
+ ."or merges.\n";
+ warn "Press the 'any' key to continue...\n";
+ die if $MODE == Mode::AutoCommitApproveds; # unattended mode; can't prompt.
+ ReadMode 'cbreak';
+ ReadKey 0;
+ ReadMode 'restore';
+ }
+}
+
+############## End of reading values from the environment ##############
+
+# Constants.
my $STATUS = './STATUS';
+my $STATEFILE = './.backports1';
my $BRANCHES = '^/subversion/branches';
+my $TRUNK = '^/subversion/trunk';
+$ENV{LC_ALL} = "C"; # since we parse 'svn info' output and use isprint()
-my $YES = $ENV{YES}; # batch mode: eliminate prompts, add sleeps
-my $WET_RUN = qw[false true][1]; # don't commit
-my $DEBUG = qw[false true][0]; # 'set -x', etc
-
-# derived values
+# Globals.
+my %ERRORS = ();
+# TODO: can $MERGED_SOMETHING be removed and references to it replaced by scalar(@MERGES_TODAY) ?
+# alternately, does @MERGES_TODAY need to be purged whenever $MERGED_SOMETHING is reset?
+# The scalar is only used in interactive runs, but the array is used in
+# svn-role batch mode too.
+my @MERGES_TODAY;
+my $MERGED_SOMETHING = 0;
my $SVNq;
+# Derived values.
+my $SVNvsn = do {
+ my ($major, $minor, $patch) = `$SVN --version -q` =~ /^(\d+)\.(\d+)\.(\d+)/;
+ 1e6*$major + 1e3*$minor + $patch;
+};
$SVN .= " --non-interactive" if $YES or not defined ctermid;
$SVNq = "$SVN -q ";
-$SVNq =~ s/-q// if $DEBUG eq 'true';
+$SVNq =~ s/-q// if $DEBUG;
-sub usage {
- my $basename = $0;
- $basename =~ s#.*/##;
+
+sub backport_usage {
+ my $basename = basename $0;
print <<EOF;
-Run this from the root of your release branch (e.g., 1.6.x) working copy.
+backport.pl: a tool for reviewing, merging, and voting on STATUS entries.
+
+Normally, invoke this with CWD being the root of the stable branch (e.g.,
+1.8.x):
-For each entry in STATUS, you will be prompted whether to merge it.
+ Usage: test -e \$d/STATUS && cd \$d && \\
+ backport.pl [PATTERN]
+ (where \$d is a working copy of branches/1.8.x)
-WARNING:
-If you accept the prompt, $basename will revert all local changes and will
-commit the merge immediately.
+Alternatively, invoke this via a symlink named "b" placed at the same directory
+as the STATUS file, in which case the CWD doesn't matter (the script will cd):
+
+ Usage: ln -s /path/to/backport.pl \$d/b && \\
+ \$d/b [PATTERN]
+ (where \$d is a working copy of branches/1.8.x)
+
+In either case, the ./STATUS file should be at HEAD. If it has local mods,
+they will be preserved through 'revert' operations but included in 'commit'
+operations.
+
+If PATTERN is provided, only entries which match PATTERN are considered. The
+sense of "match" is either substring (fgrep) or Perl regexp (with /msi).
+
+In interactive mode (the default), you will be prompted once per STATUS entry.
+At a prompt, you have the following options:
+
+y: Run a merge. It will not be committed.
+ WARNING: This will run 'update' and 'revert -R ./'.
+l: Show logs for the entries being nominated.
+v: Show the full entry (the prompt only shows an abridged version).
+q: Quit the "for each nomination" loop.
+±1: Enter a +1 or -1 vote
+ You will be prompted to commit your vote at the end.
+±0: Enter a +0 or -0 vote
+ You will be prompted to commit your vote at the end.
+a: Move the entry to the "Approved changes" section.
+ When both approving and voting on an entry, approve first: for example,
+ to enter a third +1 vote, type "a" "+" "1".
+e: Edit the entry in $EDITOR.
+ You will be prompted to commit your edits at the end.
+N: Move to the next entry. Cache the entry in '$STATEFILE' and do not
+ prompt for it again (even across runs) until it is changed.
+ : Move to the next entry, without adding the current one to the cache.
+ (That's a space character, ASCII 0x20.)
+
+After running a merge, you have the following options:
+
+y: Open a shell.
+d: View a diff.
+N: Move to the next entry.
+
+To commit a merge, you have two options: either answer 'y' to the second prompt
+to open a shell, and manually run 'svn commit' therein; or set \$MAY_COMMIT=1
+in the environment before running the script, in which case answering 'y'
+to the first prompt will not only run the merge but also commit it.
+
+There are two batch modes. The first mode is used by the nightly svn-role
+mergebot. It is enabled by setting \$YES and \$MAY_COMMIT to '1' in the
+environment. In this mode, the script will iterate the "Approved changes:"
+section and merge and commit each entry therein. To prevent an entry from
+being auto-merged, veto it or move it to a new section named "Approved, but
+merge manually:".
+
+The second batch mode is used by the hourly conflicts detector bot. It is
+triggered by having \$YES defined in the environment to '1' and \$MAY_COMMIT
+undefined. In this mode, the script will locally merge every nomination
+(including unapproved and vetoed ones), and complain to stderr if the merge
+failed due to a conflict. This mode never commits anything.
+
+The hourly conflicts detector bot turns red if any entry produced a merge
+conflict. When entry A depends on entry B for a clean merge, put a "Depends:"
+header on entry A to instruct the bot not to turn red due to A. (The header
+is not parsed; only its presence or absence matters.)
+
+Both batch modes also perform a basic sanity-check on entries that declare
+backport branches (via the "Branch:" header): if a backport branch is used, but
+at least one of the revisions enumerated in the entry title had not been merged
+from $TRUNK to the branch root, the hourly bot will turn red and
+nightly bot will skip the entry and email its admins. (The nightly bot does
+not email the list on failure, since it doesn't use buildbot.)
The 'svn' binary defined by the environment variable \$SVN, or otherwise the
'svn' found in \$PATH, will be used to manage the working copy.
EOF
}
+sub nominate_usage {
+ my $availid = $AVAILID // "(your username)";
+ my $basename = basename $0;
+ print <<EOF;
+nominate.pl: a tool for adding entries to STATUS.
+
+Usage: $0 "foo r42 bar r43 qux 45." "\$Some_justification"
+
+Will add:
+ * r42, r43, r45
+ (log message of r42)
+ Justification:
+ \$Some_justification
+ Votes:
+ +1: $availid
+to STATUS. Backport branches are detected automatically.
+
+The STATUS file in the current directory is used (unless argv[0] is "n", in
+which case the STATUS file in the directory of argv[0] is used; the intent
+is to create a symlink named "n" in the branch wc root).
+
+EOF
+# TODO: Optionally add a "Notes" section.
+# TODO: Look for backport branches named after issues.
+# TODO: Do a dry-run merge on added entries.
+# TODO: Do a dry-run merge on interactively-edited entries in backport.pl
+}
+
+# If $AVAILID is undefined, warn about it and return true.
+# Else return false.
+#
+# $_[0] is a string for inclusion in generated error messages.
+sub warned_cannot_commit {
+ my $caller_error_string = shift;
+ return 0 if defined $AVAILID;
+
+ warn "$0: $caller_error_string: unable to determine your username via \$AVAILID or svnauth(1) or ~/.subversion/auth/";
+ return 1;
+}
+
+sub digest_string {
+ Digest->new("MD5")->add(@_)->hexdigest
+}
+
+sub digest_entry($) {
+ # Canonicalize the number of trailing EOLs to two. This matters when there's
+ # on empty line after the last entry in Approved, for example.
+ local $_ = shift;
+ s/\n*\z// and $_ .= "\n\n";
+ digest_string($_)
+}
+
sub prompt {
- local $\; # disable 'perl -l' effects
- print "Go ahead? ";
-
- # TODO: this part was written by trial-and-error
- ReadMode 'cbreak';
- my $answer = (ReadKey 0);
- print $answer, "\n";
- return ($answer =~ /^y/i) ? 1 : 0;
+ print $_[0]; shift;
+ my %args = @_;
+ my $getchar = sub {
+ my $answer;
+ do {
+ ReadMode 'cbreak';
+ $answer = (ReadKey 0);
+ ReadMode 'normal';
+ die if $@ or not defined $answer;
+ # Swallow terminal escape codes (e.g., arrow keys).
+ unless (isprint $answer or isspace $answer) {
+ $answer = (ReadKey -1) while defined $answer;
+ # TODO: provide an indication that the keystroke was sensed and ignored.
+ }
+ } until defined $answer and (isprint $answer or isspace $answer);
+ print $answer;
+ return $answer;
+ };
+
+ die "$0: called prompt() in non-interactive mode!" if $YES;
+ my $answer = $getchar->();
+ $answer .= $getchar->() if exists $args{extra} and $answer =~ $args{extra};
+ say "" unless $args{dontprint};
+ return $args{verbose}
+ ? $answer
+ : ($answer =~ /^y/i) ? 1 : 0;
+}
+
+# Bourne-escape a string.
+# Example:
+# >>> shell_escape(q[foo'bar]) eq q['foo'\''bar']
+# True
+sub shell_escape {
+ my (@reply) = map {
+ local $_ = $_; # the LHS $_ is mutable; the RHS $_ may not be.
+ s/\x27/'\\\x27'/g;
+ "'$_'"
+ } @_;
+ wantarray ? @reply : $reply[0]
+}
+
+sub shell_safe_path_or_url($) {
+ local $_ = shift;
+ return (m{^[A-Za-z0-9._:+/-]+$} and !/^-|^[+]/);
+}
+
+# Shell-safety-validating wrapper for File::Temp::tempfile
+sub my_tempfile {
+ my ($fh, $fn) = tempfile();
+ croak "Tempfile name '$fn' not shell-safe; aborting"
+ unless shell_safe_path_or_url $fn;
+ return ($fh, $fn);
+}
+
+# The first argument is a shell script. Run it and return the shell's
+# exit code, and stdout and stderr as references to arrays of lines.
+sub run_in_shell($) {
+ my $script = shift;
+ my $pid = open3 \*SHELL_IN, \*SHELL_OUT, \*SHELL_ERR, qw#/bin/sh#;
+ # open3 raises exception when it fails; no need to error check
+
+ print SHELL_IN $script;
+ close SHELL_IN;
+
+ # Read loop: tee stdout,stderr to arrays.
+ my $select = IO::Select->new(\*SHELL_OUT, \*SHELL_ERR);
+ my (@readable, $outlines, $errlines);
+ while (@readable = $select->can_read) {
+ for my $fh (@readable) {
+ my $line = <$fh>;
+ $select->remove($fh) if eof $fh or not defined $line;
+ next unless defined $line;
+
+ if ($fh == \*SHELL_OUT) {
+ push @$outlines, $line;
+ print STDOUT $line;
+ }
+ if ($fh == \*SHELL_ERR) {
+ push @$errlines, $line;
+ print STDERR $line;
+ }
+ }
+ }
+ waitpid $pid, 0; # sets $?
+ return $?, $outlines, $errlines;
}
+
+# EXPECTED_ERROR_P is subref called with EXIT_CODE, OUTLINES, ERRLINES,
+# expected to return TRUE if the error should be considered fatal (cause
+# backport.pl to exit non-zero) or not. It may be undef for default behaviour.
sub merge {
- my %entry = @_;
+ my %entry = %{ +shift };
+ my $expected_error_p = shift // sub { 0 }; # by default, errors are unexpected
+ my $parno = $entry{parno} - scalar grep { $_->{parno} < $entry{parno} } @MERGES_TODAY;
- my ($logmsg_fh, $logmsg_filename) = tempfile();
- my ($mergeargs, $pattern);
+ my ($logmsg_fh, $logmsg_filename) = my_tempfile();
+ my (@mergeargs);
- my $backupfile = "backport_pl.$$.tmp";
+ my $shell_escaped_branch = shell_escape($entry{branch})
+ if defined($entry{branch});
if ($entry{branch}) {
- # NOTE: This doesn't escape the branch into the pattern.
- $pattern = sprintf '\V\(%s branch(es)?\|branches\/%s\|Branch(es)?:\n *%s\)', $entry{branch}, $entry{branch}, $entry{branch};
- $mergeargs = "--reintegrate $BRANCHES/$entry{branch}";
- print $logmsg_fh "Reintegrate the $entry{header}:";
- print $logmsg_fh "";
- } elsif (@{$entry{revisions}}) {
- $pattern = '^ [*] \V' . 'r' . $entry{revisions}->[0];
- $mergeargs = join " ", (map { "-c$_" } @{$entry{revisions}}), '^/subversion/trunk';
- if (@{$entry{revisions}} > 1) {
- print $logmsg_fh "Merge the $entry{header} from trunk:";
- print $logmsg_fh "";
+ if ($SVNvsn >= 1_008_000) {
+ @mergeargs = shell_escape "$BRANCHES/$entry{branch}";
+ say $logmsg_fh "Merge $entry{header}:";
} else {
- print $logmsg_fh "Merge r$entry{revisions}->[0] from trunk:";
- print $logmsg_fh "";
+ @mergeargs = shell_escape qw/--reintegrate/, "$BRANCHES/$entry{branch}";
+ say $logmsg_fh "Reintegrate $entry{header}:";
}
+ say $logmsg_fh "";
+ } elsif (@{$entry{revisions}}) {
+ @mergeargs = shell_escape(
+ ($entry{accept} ? "--accept=$entry{accept}" : ()),
+ (map { "-c$_" } @{$entry{revisions}}),
+ '--',
+ '^/subversion/trunk',
+ );
+ say $logmsg_fh
+ "Merge $entry{header} from trunk",
+ $entry{accept} ? ", with --accept=$entry{accept}" : "",
+ ":";
+ say $logmsg_fh "";
} else {
die "Don't know how to call $entry{header}";
}
- print $logmsg_fh $_ for @{$entry{entry}};
+ say $logmsg_fh $_ for @{$entry{entry}};
close $logmsg_fh or die "Can't close $logmsg_filename: $!";
+ my $reintegrated_word = ($SVNvsn >= 1_008_000) ? "merged" : "reintegrated";
my $script = <<"EOF";
#!/bin/sh
set -e
-if $DEBUG; then
+if $sh[$DEBUG]; then
set -x
fi
-$SVN diff > $backupfile
-$SVNq revert -R .
$SVNq up
-$SVNq merge $mergeargs
-$VIM -e -s -n -N -i NONE -u NONE -c '/$pattern/normal! dap' -c wq $STATUS
-if $WET_RUN; then
+$SVNq merge @mergeargs
+if [ "`$SVN status -q | wc -l`" -eq 1 ]; then
+ if [ -n "`$SVN diff | perl -lne 'print if s/^(Added|Deleted|Modified): //' | grep -vx svn:mergeinfo`" ]; then
+ # This check detects STATUS entries that name non-^/subversion/ revnums.
+ # ### Q: What if we actually commit a mergeinfo fix to trunk and then want
+ # ### to backport it?
+ # ### A: We don't merge it using the script.
+ echo "Bogus merge: includes only svn:mergeinfo changes!" >&2
+ exit 2
+ fi
+fi
+if $sh[$MAY_COMMIT]; then
+ # Remove the approved entry. The sentinel is important when the entry being
+ # removed is the very last one in STATUS, and in that case it has two effects:
+ # (1) keeps STATUS from ending in a run of multiple empty lines;
+ # (2) makes the \x{7d}k motion behave the same as in all other cases.
+ #
+ # Use a tempfile because otherwise backport_main() would see the "sentinel paragraph".
+ # Since backport_main() has an open descriptor, it will continue to see
+ # the STATUS inode that existed when control flow entered backport_main();
+ # since we replace the file on disk, when this block of code runs in the
+ # next iteration, it will see the new contents.
+ cp $STATUS $STATUS.t
+ (echo; echo; echo "sentinel paragraph") >> $STATUS.t
+ $VIM -e -s -n -N -i NONE -u NONE -c ':0normal! $parno\x{7d}kdap' -c wq $STATUS.t
+ $VIM -e -s -n -N -i NONE -u NONE -c '\$normal! dap' -c wq $STATUS.t
+ mv $STATUS.t $STATUS
$SVNq commit -F $logmsg_filename
-else
- echo "Committing:"
+elif ! $sh[$YES]; then
+ echo "Would have committed:"
+ echo '[[['
$SVN status -q
+ echo 'M STATUS (not shown in the diff)'
cat $logmsg_filename
+ echo ']]]'
fi
EOF
+ if ($MAY_COMMIT) {
+ # STATUS has been edited and the change has been committed
+ push @MERGES_TODAY, \%entry;
+ }
+
$script .= <<"EOF" if $entry{branch};
reinteg_rev=\`$SVN info $STATUS | sed -ne 's/Last Changed Rev: //p'\`
-if $WET_RUN; then
+if $sh[$MAY_COMMIT]; then
# Sleep to avoid out-of-order commit notifications
- if [ -n "\$YES" ]; then sleep 15; fi
- $SVNq rm $BRANCHES/$entry{branch} -m "Remove the '$entry{branch}' branch, reintegrated in r\$reinteg_rev."
- if [ -n "\$YES" ]; then sleep 1; fi
-else
- echo "Removing reintegrated '$entry{branch}' branch"
+ if $sh[$YES]; then sleep 15; fi
+ $SVNq rm $BRANCHES/$shell_escaped_branch -m "Remove the '"$shell_escaped_branch"' branch, $reintegrated_word in r\$reinteg_rev."
+ if $sh[$YES]; then sleep 1; fi
+elif ! $sh[$YES]; then
+ echo "Would remove $reintegrated_word '"$shell_escaped_branch"' branch"
fi
EOF
- open SHELL, '|-', qw#/bin/sh# or die $!;
- print SHELL $script;
- close SHELL or warn "$0: sh($?): $!";
+ # Include the time so it's easier to find the interesting backups.
+ my $backupfile = strftime "backport_pl.%Y%m%d-%H%M%S.$$.tmp", localtime;
+ die if -s $backupfile;
+ system("$SVN diff > $backupfile") == 0
+ or die "Saving a backup diff ($backupfile) failed ($?): $!";
+ if (-z $backupfile) {
+ unlink $backupfile;
+ } else {
+ warn "Local mods saved to '$backupfile'\n";
+ }
+
+ # If $MAY_COMMIT, then $script will edit STATUS anyway.
+ revert(verbose => 0, discard_STATUS => $MAY_COMMIT);
+
+ $MERGED_SOMETHING++;
+ my ($exit_code, $outlines, $errlines) = run_in_shell $script;
+ unless ($! == 0) {
+ die "system() failed to spawn subshell ($!); aborting";
+ }
+ unless ($exit_code == 0) {
+ warn "$0: subshell exited with code $exit_code (in '$entry{header}') "
+ ."(maybe due to 'set -e'?)";
+
+ # If we're committing, don't attempt to guess the problem and gracefully
+ # continue; just abort.
+ if ($MAY_COMMIT) {
+ die "Lost track of paragraph numbers; aborting";
+ }
- unlink $backupfile if -z $backupfile;
- unlink $logmsg_filename unless $? or $!;
+ # Record the error, unless the caller wants not to.
+ $ERRORS{$entry{id}} = [\%entry, "subshell exited with code $exit_code"]
+ unless $expected_error_p->($exit_code, $outlines, $errlines);
+ }
+
+ unlink $logmsg_filename unless $exit_code;
}
+# Input formats:
+# "1.8.x-r42",
+# "branches/1.8.x-r42",
+# "branches/1.8.x-r42/",
+# "subversion/branches/1.8.x-r42",
+# "subversion/branches/1.8.x-r42/",
+# "^/subversion/branches/1.8.x-r42",
+# "^/subversion/branches/1.8.x-r42/",
+# Return value:
+# "1.8.x-r42"
+# Works for any branch name that doesn't include slashes.
sub sanitize_branch {
local $_ = shift;
- s#.*/##;
s/^\s*//;
s/\s*$//;
+ s#/*$##;
+ s#.*/##;
return $_;
}
+sub logsummarysummary {
+ my $entry = shift;
+ join "",
+ $entry->{logsummary}->[0], ('[...]' x (0 < $#{$entry->{logsummary}}))
+}
+
# TODO: may need to parse other headers too?
sub parse_entry {
+ my $raw = shift;
+ my $parno = shift;
my @lines = @_;
+ my $depends;
+ my $accept;
my (@revisions, @logsummary, $branch, @votes);
# @lines = @_;
- # strip first three spaces
- $_[0] =~ s/^ \* / /;
- s/^ // for @_;
+ # strip spaces to match up with the indention
+ $_[0] =~ s/^( *)\* //;
+ my $indentation = ' ' x (length($1) + 2);
+ s/^$indentation// for @_;
+
+ # Ignore trailing spaces: it is not significant on any field, and makes the
+ # regexes simpler.
+ s/\s*$// for @_;
# revisions
- $branch = sanitize_branch $1 if $_[0] =~ /^(\S*) branch$/;
- while ($_[0] =~ /^r/) {
- while ($_[0] =~ s/^r(\d+)(?:$|[,; ]+)//) {
- push @revisions, $1;
- }
+ $branch = sanitize_branch $1
+ and shift
+ if $_[0] =~ /^(\S*) branch$/ or $_[0] =~ m#branches/(\S+)#;
+ while ($_[0] =~ /^(?:r?\d+[,; ]*)+$/) {
+ push @revisions, ($_[0] =~ /(\d+)/g);
shift;
}
# summary
- push @logsummary, shift until $_[0] =~ /^\s*\w+:/ or not defined $_[0];
+ do {
+ push @logsummary, shift
+ } until $_[0] =~ /^\s*[][\w]+:/ or not defined $_[0];
# votes
unshift @votes, pop until $_[-1] =~ /^\s*Votes:/ or not defined $_[-1];
pop;
- # branch
+ # depends, branch, notes
+ # Ignored headers: Changes[*]
while (@_) {
- shift and next unless $_[0] =~ s/^\s*Branch(es)?:\s*//;
- $branch = sanitize_branch (shift || shift || die "Branch header found without value");
+ given (shift) {
+ when (/^Depends:/) {
+ $depends++;
+ }
+ if (s/^Branch:\s*//) {
+ $branch = sanitize_branch ($_ || shift || die "Branch header found without value");
+ }
+ if (s/^Notes:\s*//) {
+ my $notes = $_;
+ $notes .= shift while @_ and $_[0] !~ /^\w/;
+ my %accepts = map { $_ => 1 } ($notes =~ /--accept[ =]([a-z-]+)/g);
+ given (scalar keys %accepts) {
+ when (0) { }
+ when (1) { $accept = [keys %accepts]->[0]; }
+ default {
+ warn "Too many --accept values at '",
+ logsummarysummary({ logsummary => [@logsummary] }),
+ "'";
+ }
+ }
+ }
+ }
}
# Compute a header.
- my $header;
- $header = "r$revisions[0] group" if @revisions;
- $header = "$branch branch" if $branch;
- warn "No header for [@lines]" unless $header;
+ my ($header, $id);
+ if ($branch) {
+ $header = "the $branch branch";
+ $id = $branch;
+ } elsif (@revisions == 1) {
+ $header = "r$revisions[0]";
+ $id = "r$revisions[0]";
+ } elsif (@revisions) {
+ $header = "the r$revisions[0] group";
+ $id = "r$revisions[0]";
+ } else {
+ die "Entry '$raw' has neither revisions nor branch";
+ }
+ my $header_start = ($header =~ /^the/ ? ucfirst($header) : $header);
+
+ warn "Entry has both branch '$branch' and --accept=$accept specified\n"
+ if $branch and $accept;
return (
revisions => [@revisions],
logsummary => [@logsummary],
branch => $branch,
header => $header,
+ header_start => $header_start,
+ depends => $depends,
+ id => $id,
votes => [@votes],
entry => [@lines],
+ accept => $accept,
+ raw => $raw,
+ digest => digest_entry($raw),
+ parno => $parno, # $. from backport_main()
);
}
+sub edit_string {
+ # Edits $_[0] in an editor.
+ # $_[1] is used in error messages.
+ die "$0: called edit_string() in non-interactive mode!" if $YES;
+ my $string = shift;
+ my $name = shift;
+ my %args = @_;
+ my $trailing_eol = $args{trailing_eol};
+ my ($fh, $fn) = my_tempfile();
+ print $fh $string;
+ $fh->flush or die $!;
+ system("$EDITOR -- $fn") == 0
+ or warn "\$EDITOR failed editing $name: $! ($?); "
+ ."edit results ($fn) ignored.";
+ my $rv = `cat $fn`;
+ $rv =~ s/\n*\z// and $rv .= ("\n" x $trailing_eol) if defined $trailing_eol;
+ $rv;
+}
+
+sub vote {
+ my ($state, $approved, $votes) = @_;
+ # TODO: use votesarray instead of votescheck
+ my (%approvedcheck, %votescheck);
+ my $raw_approved = "";
+ my @votesarray;
+ return unless %$approved or %$votes;
+
+ # If $AVAILID is undef, we can only process 'edit' pseudovotes; handle_entry() is
+ # supposed to prevent numeric (±1,±0) votes from getting to this point.
+ die "Assertion failed" if not defined $AVAILID
+ and grep { $_ ne 'edit' } map { $_->[0] } values %$votes;
+
+ my $had_empty_line;
+
+ $. = 0;
+ open STATUS, "<", $STATUS;
+ open VOTES, ">", "$STATUS.$$.tmp";
+ while (<STATUS>) {
+ $had_empty_line = /\n\n\z/;
+ my $key = digest_entry $_;
+
+ $approvedcheck{$key}++ if exists $approved->{$key};
+ $votescheck{$key}++ if exists $votes->{$key};
+
+ unless (exists $votes->{$key} or exists $approved->{$key}) {
+ print VOTES;
+ next;
+ }
+
+ unless (exists $votes->{$key}) {
+ push @votesarray, {
+ entry => $approved->{$key},
+ approval => 1,
+ digest => $key,
+ };
+ $raw_approved .= $_;
+ next;
+ }
+
+ # We have a vote, and potentially an approval.
+
+ my ($vote, $entry) = @{$votes->{$key}};
+ push @votesarray, {
+ entry => $entry,
+ vote => $vote,
+ approval => (exists $approved->{$key}),
+ digest => $key,
+ };
+
+ if ($vote eq 'edit') {
+ local $_ = $entry->{raw};
+ $votesarray[-1]->{digest} = digest_entry $_;
+ (exists $approved->{$key}) ? ($raw_approved .= $_) : (print VOTES);
+ next;
+ }
+
+ s/^(\s*\Q$vote\E:.*)/"$1, $AVAILID"/me
+ or s/(.*\w.*?\n)/"$1 $vote: $AVAILID\n"/se;
+ $_ = edit_string $_, $entry->{header}, trailing_eol => 2
+ if $vote ne '+1';
+ $votesarray[-1]->{digest} = digest_entry $_;
+ (exists $approved->{$key}) ? ($raw_approved .= $_) : (print VOTES);
+ }
+ close STATUS;
+ print VOTES "\n" if $raw_approved and !$had_empty_line;
+ print VOTES $raw_approved;
+ close VOTES;
+ warn "Some vote chunks weren't found: ",
+ join ',',
+ map $votes->{$_}->[1]->{id},
+ grep { !$votescheck{$_} } keys %$votes
+ if scalar(keys %$votes) != scalar(keys %votescheck);
+ warn "Some approval chunks weren't found: ",
+ join ',',
+ map $approved->{$_}->{id},
+ grep { !$approvedcheck{$_} } keys %$approved
+ if scalar(keys %$approved) != scalar(keys %approvedcheck);
+ prompt "Press the 'any' key to continue...\n", dontprint => 1
+ if scalar(keys %$approved) != scalar(keys %approvedcheck)
+ or scalar(keys %$votes) != scalar(keys %votescheck);
+ move "$STATUS.$$.tmp", $STATUS;
+
+ my $logmsg = do {
+ my @sentences = map {
+ my $words_vote = ", approving" x $_->{approval};
+ my $words_edit = " and approve" x $_->{approval};
+ exists $_->{vote}
+ ? (
+ ( $_->{vote} eq 'edit'
+ ? "Edit$words_edit the $_->{entry}->{id} entry"
+ : "Vote $_->{vote} on $_->{entry}->{header}$words_vote"
+ )
+ . "."
+ )
+ : # exists only in $approved
+ "Approve $_->{entry}->{header}."
+ } @votesarray;
+ (@sentences == 1)
+ ? $sentences[0]
+ : "* STATUS:\n" . join "", map " $_\n", @sentences;
+ };
+
+ system "$SVN diff -- $STATUS";
+ printf "[[[\n%s%s]]]\n", $logmsg, ("\n" x ($logmsg !~ /\n\z/));
+ if (prompt "Commit these votes? ") {
+ my ($logmsg_fh, $logmsg_filename) = my_tempfile();
+ print $logmsg_fh $logmsg;
+ close $logmsg_fh;
+ system("$SVN commit -F $logmsg_filename -- $STATUS") == 0
+ or warn("Committing the votes failed($?): $!") and return;
+ unlink $logmsg_filename;
+
+ # Add to state votes that aren't '+0' or 'edit'
+ $state->{$_->{digest}}++ for grep
+ +{ qw/-1 t -0 t +1 t/ }->{$_->{vote}},
+ @votesarray;
+ }
+}
+
+sub check_local_mods_to_STATUS {
+ if (`$SVN status -q $STATUS`) {
+ die "Local mods to STATUS file $STATUS" if $YES;
+ warn "Local mods to STATUS file $STATUS";
+ system "$SVN diff -- $STATUS";
+ prompt "Press the 'any' key to continue...\n", dontprint => 1;
+ return 1;
+ }
+ return 0;
+}
+
+sub renormalize_STATUS {
+ my $vimscript = <<'EOVIM';
+:"" Strip trailing whitespace before entries and section headers, but not
+:"" inside entries (e.g., multi-paragraph Notes: fields).
+:""
+:"" Since an entry is always followed by another entry, section header, or EOF,
+:"" there is no need to separately strip trailing whitespace from lines following
+:"" entries.
+:%s/\v\s+\n(\s*\n)*\ze(\s*[*]|\w)/\r\r/g
+
+:"" Ensure there is exactly one blank line around each entry and header.
+:""
+:"" First, inject a new empty line above and below each entry and header; then,
+:"" squeeze runs of empty lines together.
+:0/^=/,$ g/^ *[*]/normal! O
+:g/^=/normal! o
+:g/^=/-normal! O
+:
+:%s/\n\n\n\+/\r\r/g
+
+:"" Save.
+:wq
+EOVIM
+ open VIM, '|-', $VIM, qw/-e -s -n -N -i NONE -u NONE --/, $STATUS
+ or die "Can't renormalize STATUS: $!";
+ print VIM $vimscript;
+ close VIM or warn "$0: renormalize_STATUS failed ($?): $!)";
+
+ system("$SVN commit -m '* STATUS: Whitespace changes only.' -- $STATUS") == 0
+ or die "$0: Can't renormalize STATUS ($?): $!"
+ if $MAY_COMMIT;
+}
+
+sub revert {
+ my %args = @_;
+ die "Bug: \$args{verbose} undefined" unless exists $args{verbose};
+ die "Bug: unknown argument" if grep !/^(?:verbose|discard_STATUS)$/, keys %args;
+
+ copy $STATUS, "$STATUS.$$.tmp" unless $args{discard_STATUS};
+ system("$SVN revert -q $STATUS") == 0
+ or die "revert failed ($?): $!";
+ system("$SVN revert -R ./" . (" -q" x !$args{verbose})) == 0
+ or die "revert failed ($?): $!";
+ move "$STATUS.$$.tmp", $STATUS unless $args{discard_STATUS};
+ $MERGED_SOMETHING = 0;
+}
+
+sub maybe_revert {
+ # This is both a SIGINT handler, and the tail end of main() in normal runs.
+ # @_ is 'INT' in the former case and () in the latter.
+ delete $SIG{INT} unless @_;
+ revert verbose => 1 if !$YES and $MERGED_SOMETHING and prompt 'Revert? ';
+ (@_ ? exit : return);
+}
+
+sub signal_handler {
+ my $sig = shift;
+
+ # Clean up after prompt()
+ ReadMode 'normal';
+
+ # Fall back to default action
+ delete $SIG{$sig};
+ kill $sig, $$;
+}
+
+sub warning_summary {
+ return unless %ERRORS;
+
+ warn "Warning summary\n";
+ warn "===============\n";
+ warn "\n";
+ for my $id (keys %ERRORS) {
+ my $title = logsummarysummary $ERRORS{$id}->[0];
+ warn "$id ($title): $ERRORS{$id}->[1]\n";
+ }
+}
+
+sub read_state {
+ # die "$0: called read_state() in non-interactive mode!" if $YES;
+
+ open my $fh, '<', $STATEFILE or do {
+ return {} if $!{ENOENT};
+ die "Can't read statefile: $!";
+ };
+
+ my %rv;
+ while (<$fh>) {
+ chomp;
+ $rv{$_}++;
+ }
+ return \%rv;
+}
+
+sub write_state {
+ my $state = shift;
+ open STATE, '>', $STATEFILE or warn("Can't write state: $!"), return;
+ say STATE for keys %$state;
+ close STATE;
+}
+
+sub exit_stage_left {
+ my $state = shift;
+ maybe_revert;
+ warning_summary if $YES;
+ vote $state, @_;
+ write_state $state;
+ exit scalar keys %ERRORS;
+}
+
+# Given an ENTRY, check whether all ENTRY->{revisions} have been merged
+# into ENTRY->{branch}, if it has one. If revisions are missing, record
+# a warning in $ERRORS. Return TRUE If the entry passed the validation
+# and FALSE otherwise.
+sub validate_branch_contains_named_revisions {
+ my %entry = @_;
+ return 1 unless defined $entry{branch};
+ my %present;
+
+ return "Why are you running so old versions?" # true in boolean context
+ if $SVNvsn < 1_005_000; # doesn't have the 'mergeinfo' subcommand
+
+ my $shell_escaped_branch = shell_escape($entry{branch});
+ %present = do {
+ my @present = `$SVN mergeinfo --show-revs=merged -- $TRUNK $BRANCHES/$shell_escaped_branch`;
+ chomp @present;
+ @present = map /(\d+)/g, @present;
+ map +($_ => 1), @present;
+ };
+
+ my @absent = grep { not exists $present{$_} } @{$entry{revisions}};
+
+ if (@absent) {
+ $ERRORS{$entry{id}} //= [\%entry,
+ sprintf("Revisions '%s' nominated but not included in branch",
+ (join ", ", map { "r$_" } @absent)),
+ ];
+ }
+ return @absent ? 0 : 1;
+}
+
sub handle_entry {
- my %entry = parse_entry @_;
- my @vetoes = grep { /^ -1:/ } @{$entry{votes}};
+ my $in_approved = shift;
+ my $approved = shift;
+ my $votes = shift;
+ my $state = shift;
+ my $raw = shift;
+ my $parno = shift;
+ my $skip = shift;
+ my %entry = parse_entry $raw, $parno, @_;
+ my @vetoes = grep /^\s*-1:/, @{$entry{votes}};
+
+ my $match = defined($skip) ? ($raw =~ /\Q$skip\E/ or $raw =~ /$skip/msi) : 0
+ unless $YES;
if ($YES) {
- merge %entry unless @vetoes;
+ # Run a merge if:
+ unless (@vetoes) {
+ if ($MAY_COMMIT and $in_approved) {
+ # svn-role mode
+ merge \%entry if validate_branch_contains_named_revisions %entry;
+ } elsif (!$MAY_COMMIT) {
+ # Scan-for-conflicts mode
+
+ # First, sanity-check the entry. We ignore the result; even if it
+ # failed, we do want to check for conflicts, in the remainder of this
+ # block.
+ validate_branch_contains_named_revisions %entry;
+
+ # E155015 is SVN_ERR_WC_FOUND_CONFLICT
+ my $expected_error_p = sub {
+ my ($exit_code, $outlines, $errlines) = @_;
+ ($exit_code == 0)
+ or
+ (grep /svn: E155015:/, @$errlines)
+ };
+ merge \%entry, ($entry{depends} ? $expected_error_p : undef);
+
+ my $output = `$SVN status`;
+
+ # Pre-1.6 svn's don't have the 7th column, so fake it.
+ $output =~ s/^(......)/$1 /mg if $SVNvsn < 1_006_000;
+
+ my (@conflicts) = ($output =~ m#^(?:C......|.C.....|......C)\s(.*)#mg);
+ if (@conflicts and !$entry{depends}) {
+ $ERRORS{$entry{id}} //= [\%entry,
+ sprintf "Conflicts on %s%s%s",
+ '[' x !!$#conflicts,
+ (join ', ',
+ map { basename $_ }
+ @conflicts),
+ ']' x !!$#conflicts,
+ ];
+ say STDERR "Conflicts merging $entry{header}!";
+ say STDERR "";
+ say STDERR $output;
+ system "$SVN diff -- " . join ' ', shell_escape @conflicts;
+ } elsif (!@conflicts and $entry{depends}) {
+ # Not a warning since svn-role may commit the dependency without
+ # also committing the dependent in the same pass.
+ print "No conflicts merging $entry{header}, but conflicts were "
+ ."expected ('Depends:' header set)\n";
+ } elsif (@conflicts) {
+ say "Conflicts found merging $entry{header}, as expected.";
+ }
+ revert verbose => 0;
+ }
+ }
+ } elsif (defined($skip) ? not $match : $state->{$entry{digest}}) {
+ print "\n\n";
+ my $reason = defined($skip) ? "doesn't match pattern"
+ : "remove $STATEFILE to reset";
+ say "Skipping $entry{header} ($reason):";
+ say logsummarysummary \%entry;
+ } elsif ($match or not defined $skip) {
+ # This loop is just a hack because 'goto' panics. The goto should be where
+ # the "next PROMPT;" is; there's a "last;" at the end of the loop body.
+ PROMPT: while (1) {
+ say "";
+ say "\n>>> $entry{header_start}:";
+ say join ", ", map { "r$_" } @{$entry{revisions}} if @{$entry{revisions}};
+ say "$BRANCHES/$entry{branch}" if $entry{branch};
+ say "--accept=$entry{accept}" if $entry{accept};
+ say "";
+ say for @{$entry{logsummary}};
+ say "";
+ say for @{$entry{votes}};
+ say "";
+ say "Vetoes found!" if @vetoes;
+
+ # See above for why the while(1).
+ QUESTION: while (1) {
+ my $key = $entry{digest};
+ given (prompt 'Run a merge? [y,l,v,±1,±0,q,e,a, ,N] ',
+ verbose => 1, extra => qr/[+-]/) {
+ when (/^y/i) {
+ #validate_branch_contains_named_revisions %entry;
+ merge \%entry;
+ while (1) {
+ given (prompt "Shall I open a subshell? [ydN] ", verbose => 1) {
+ when (/^y/i) {
+ # TODO: if $MAY_COMMIT, save the log message to a file (say,
+ # backport.logmsg in the wcroot).
+ system($SHELL) == 0
+ or warn "Creating an interactive subshell failed ($?): $!"
+ }
+ when (/^d/) {
+ system("$SVN diff | $PAGER") == 0
+ or warn "diff failed ($?): $!";
+ next;
+ }
+ when (/^N/i) {
+ # fall through.
+ }
+ default {
+ next;
+ }
+ }
+ revert verbose => 1;
+ next PROMPT;
+ }
+ # NOTREACHED
+ }
+ when (/^l/i) {
+ if ($entry{branch}) {
+ system "$SVN log --stop-on-copy -v -g -r 0:HEAD -- "
+ .shell_escape("$BRANCHES/$entry{branch}")." "
+ ."| $PAGER";
+ } elsif (@{$entry{revisions}}) {
+ system "$SVN log ".(join ' ', map { "-r$_" } @{$entry{revisions}})
+ ." -- ^/subversion | $PAGER";
+ } else {
+ die "Assertion failed: entry has neither branch nor revisions:\n",
+ '[[[', (join ';;', %entry), ']]]';
+ }
+ next PROMPT;
+ }
+ when (/^v/i) {
+ say "";
+ say for @{$entry{entry}};
+ say "";
+ next QUESTION;
+ }
+ when (/^q/i) {
+ exit_stage_left $state, $approved, $votes;
+ }
+ when (/^a/i) {
+ $approved->{$key} = \%entry;
+ next PROMPT;
+ }
+ when (/^([+-][01])\s*$/i) {
+ next QUESTION if warned_cannot_commit "Entering a vote failed";
+ $votes->{$key} = [$1, \%entry];
+ say "Your '$1' vote has been recorded." if $VERBOSE;
+ last PROMPT;
+ }
+ when (/^e/i) {
+ prompt "Press the 'any' key to continue...\n"
+ if warned_cannot_commit "Committing this edit later on may fail";
+ my $original = $entry{raw};
+ $entry{raw} = edit_string $entry{raw}, $entry{header},
+ trailing_eol => 2;
+ # TODO: parse the edited entry (empty lines, logsummary+votes, etc.)
+ $votes->{$key} = ['edit', \%entry] # marker for the 2nd pass
+ if $original ne $entry{raw};
+ last PROMPT;
+ }
+ when (/^N/i) {
+ $state->{$entry{digest}}++;
+ last PROMPT;
+ }
+ when (/^\x20/) {
+ last PROMPT; # Fall off the end of the given/when block.
+ }
+ default {
+ say "Please use one of the options in brackets (q to quit)!";
+ next QUESTION;
+ }
+ }
+ last; } # QUESTION
+ last; } # PROMPT
} else {
- print "";
- print "\n>>> The $entry{header}:";
- print join ", ", map { "r$_" } @{$entry{revisions}};
- print "$BRANCHES/$entry{branch}" if $entry{branch};
- print "";
- print for @{$entry{logsummary}};
- print "";
- print for @{$entry{votes}};
- print "";
- print "Vetoes found!" if @vetoes;
-
- merge %entry if prompt;
+ # NOTREACHED
+ die "Unreachable code reached.";
}
- # TODO: merge() changes ./STATUS, which we're reading below, but
- # on my system the loop in main() doesn't seem to care.
-
1;
}
-sub main {
- usage, exit 0 if @ARGV;
+
+sub backport_main {
+ my %approved;
+ my %votes;
+ my $state = read_state;
+ my $renormalize;
- open STATUS, "<", $STATUS or (usage, exit 1);
+ if (@ARGV && $ARGV[0] eq '--renormalize') {
+ $renormalize = 1;
+ shift;
+ }
+
+ backport_usage, exit 0 if @ARGV > ($YES ? 0 : 1) or grep /^--help$/, @ARGV;
+ backport_usage, exit 0 if grep /^(?:-h|-\?|--help|help)$/, @ARGV;
+ my $skip = shift; # maybe undef
+ # assert not defined $skip if $YES;
+
+ open STATUS, "<", $STATUS or (backport_usage, exit 1);
# Because we use the ':normal' command in Vim...
- die "A vim with the +ex_extra feature is required"
- if `${VIM} --version` !~ /[+]ex_extra/;
+ die "A vim with the +ex_extra feature is required for --renormalize and "
+ ."\$MAY_COMMIT modes"
+ if ($renormalize or $MAY_COMMIT) and `${VIM} --version` !~ /[+]ex_extra/;
# ### TODO: need to run 'revert' here
# ### TODO: both here and in merge(), unlink files that previous merges added
- die "Local mods to STATUS file $STATUS" if `$SVN status -q $STATUS`;
+ # When running from cron, there shouldn't be local mods. (For interactive
+ # usage, we preserve local mods to STATUS.)
+ system("$SVN info $STATUS >/dev/null") == 0
+ or die "$0: svn error; point \$SVN to an appropriate binary";
+
+ check_local_mods_to_STATUS;
+ renormalize_STATUS if $renormalize;
# Skip most of the file
+ $/ = ""; # paragraph mode
while (<STATUS>) {
- last if /^Approved changes/;
- }
- while (<STATUS>) {
- last unless /^=+$/;
+ last if /^Status of \d+\.\d+/;
}
- $/ = ""; # paragraph mode
+ $SIG{INT} = \&maybe_revert unless $YES;
+ $SIG{TERM} = \&signal_handler unless $YES;
+
+ my $in_approved = 0;
while (<STATUS>) {
+ my $lines = $_;
my @lines = split /\n/;
given ($lines[0]) {
# Section header
when (/^[A-Z].*:$/i) {
- print "\n\n=== $lines[0]" unless $YES;
+ say "\n\n=== $lines[0]" unless $YES;
+ $in_approved = $lines[0] =~ /^Approved changes/;
+ }
+ # Comment
+ when (/^[#\x5b]/i) {
+ next;
}
# Separator after section header
when (/^=+$/i) {
break;
}
# Backport entry?
- when (/^ \*/) {
+ when (/^ *\*/) {
warn "Too many bullets in $lines[0]" and next
- if grep /^ \*/, @lines[1..$#lines];
- handle_entry @lines;
+ if grep /^ *\*/, @lines[1..$#lines];
+ handle_entry $in_approved, \%approved, \%votes, $state, $lines, $.,
+ $skip,
+ @lines;
}
default {
- warn "Unknown entry '$lines[0]' at $ARGV:$.\n";
+ warn "Unknown entry '$lines[0]'";
}
}
}
+
+ exit_stage_left $state, \%approved, \%votes;
}
-&main
+sub nominate_main {
+ my $had_local_mods;
+
+ local $Text::Wrap::columns = 79;
+
+ $had_local_mods = check_local_mods_to_STATUS;
+
+ # Argument parsing.
+ nominate_usage, exit 0 if @ARGV != 2;
+ my (@revnums) = (+shift) =~ /(\d+)/g;
+ my $justification = shift;
+
+ die "Unable to proceed." if warned_cannot_commit "Nominating failed";
+
+ @revnums = sort { $a <=> $b } keys %{{ map { $_ => 1 } @revnums }};
+ die "No revision numbers specified" unless @revnums;
+
+ # Determine whether a backport branch exists
+ my ($URL) = `$SVN info` =~ /^URL: (.*)$/m;
+ die "Can't retrieve URL of cwd" unless $URL;
+
+ die unless shell_safe_path_or_url $URL;
+ system "$SVN info -- $URL-r$revnums[0] 2>/dev/null";
+ my $branch = ($? == 0) ? basename("$URL-r$revnums[0]") : undef;
+
+ # Construct entry.
+ my $logmsg = `$SVN propget --revprop -r $revnums[0] --strict svn:log '^/'`;
+ die "Can't fetch log message of r$revnums[0]: $!" unless $logmsg;
+
+ unless ($logmsg =~ s/^(.*?)\n\n.*/$1/s) {
+ # "* file\n (symbol): Log message."
+
+ # Strip before and after the first symbol's log message.
+ $logmsg =~ s/^.*?: //s;
+ $logmsg =~ s/^ \x28.*//ms;
+
+ # Undo line wrapping. (We'll re-do it later.)
+ $logmsg =~ s/\s*\n\s+/ /g;
+ }
+
+ my @lines;
+ warn "Wrapping [$logmsg]\n";
+ push @lines, wrap " * ", ' 'x3, join ', ', map "r$_", @revnums;
+ push @lines, wrap ' 'x3, ' 'x3, split /\n/, $logmsg;
+ push @lines, " Justification:";
+ push @lines, wrap ' 'x5, ' 'x5, $justification;
+ push @lines, " Branch: $branch" if defined $branch;
+ push @lines, " Votes:";
+ push @lines, " +1: $AVAILID";
+ push @lines, "";
+ my $raw = join "", map "$_\n", @lines;
+
+ # Open the file in line-mode (not paragraph-mode).
+ my @STATUS;
+ tie @STATUS, "Tie::File", $STATUS, recsep => "\n";
+ my ($index) = grep { $STATUS[$_] =~ /^Veto/ } (0..$#STATUS);
+ die "Couldn't find where to add an entry" unless $index;
+
+ # Add an empty line if needed.
+ if ($STATUS[$index-1] =~ /\S/) {
+ splice @STATUS, $index, 0, "";
+ $index++;
+ }
+
+ # Add the entry.
+ splice @STATUS, $index, 0, @lines;
+
+ # Save.
+ untie @STATUS;
+
+ # Done!
+ system "$SVN diff -- $STATUS";
+ if (prompt "Commit this nomination? ") {
+ system "$SVN commit -m 'Nominate r$revnums[0].' -- $STATUS";
+ exit $?;
+ }
+ elsif (!$had_local_mods or prompt "Revert STATUS (destroying local mods)? ") {
+ # TODO: we could be smarter and just un-splice the lines we'd added.
+ system "$SVN revert -- $STATUS";
+ exit $?;
+ }
+
+ exit 0;
+}
+
+# Dispatch to the appropriate main().
+given (basename($0)) {
+ when (/^b$|backport/) {
+ chdir dirname $0 or die "Can't chdir: $!" if /^b$/;
+ &backport_main(@ARGV);
+ }
+ when (/^n$|nominate/) {
+ chdir dirname $0 or die "Can't chdir: $!" if /^n$/;
+ &nominate_main(@ARGV);
+ }
+ default {
+ &backport_main(@ARGV);
+ }
+}
diff --git a/tools/dist/backport_accept.dump b/tools/dist/backport_accept.dump
new file mode 100644
index 0000000..9532dc3
--- /dev/null
+++ b/tools/dist/backport_accept.dump
@@ -0,0 +1,550 @@
+SVN-fs-dump-format-version: 2
+
+UUID: 76cee987-25c9-4d6c-ad40-000000000003
+
+Revision-number: 0
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+Revision-number: 1
+Prop-content-length: 83
+Content-length: 83
+
+K 10
+svn:author
+V 7
+jrandom
+K 7
+svn:log
+V 27
+Log message for revision 1.
+PROPS-END
+
+Node-path: A
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: A/B
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: A/B/E
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: A/B/E/alpha
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 26
+Text-content-md5: d1fa4a3ced98961674a441930a51f2d3
+Text-content-sha1: b347d1da69df9a6a70433ceeaa0d46c8483e8c03
+Content-length: 36
+
+PROPS-END
+This is the file 'alpha'.
+
+
+Node-path: A/B/E/beta
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 25
+Text-content-md5: 67c756078f24f946f6ec2d00d02f50e1
+Text-content-sha1: d001710ac8e622c6d1fe59b1e265a3908acdd2a3
+Content-length: 35
+
+PROPS-END
+This is the file 'beta'.
+
+
+Node-path: A/B/F
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: A/B/lambda
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 27
+Text-content-md5: 911c7a8d869b8c1e566f57da54d889c6
+Text-content-sha1: 784a9298366863da2b65ebf82b4e1123755a2421
+Content-length: 37
+
+PROPS-END
+This is the file 'lambda'.
+
+
+Node-path: A/C
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: A/D
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: A/D/G
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: A/D/G/pi
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 23
+Text-content-md5: adddfc3e6b605b5f90ceeab11b4e8ab6
+Text-content-sha1: 411e258dc14b42701fdc29b75f653e93f8686415
+Content-length: 33
+
+PROPS-END
+This is the file 'pi'.
+
+
+Node-path: A/D/G/rho
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 24
+Text-content-md5: 82f2211cf4ab22e3555fc7b835fbc604
+Text-content-sha1: 56388a031dffbf9df7c32e1f299b1d5d7ef60881
+Content-length: 34
+
+PROPS-END
+This is the file 'rho'.
+
+
+Node-path: A/D/G/tau
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 24
+Text-content-md5: 9936e2716e469bb686deb98c280ead58
+Text-content-sha1: 62e8c07d56bee94ea4577e80414fa8805aaf0175
+Content-length: 34
+
+PROPS-END
+This is the file 'tau'.
+
+
+Node-path: A/D/H
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: A/D/H/chi
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 24
+Text-content-md5: 8f5ebad6d1f7775c2682e54417cbe4d3
+Text-content-sha1: abeac1bf62099ab66b44779198dc19f40e3244f4
+Content-length: 34
+
+PROPS-END
+This is the file 'chi'.
+
+
+Node-path: A/D/H/omega
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 26
+Text-content-md5: fe4ec8bdd3d2056db4f55b474a10fadc
+Text-content-sha1: c06e671bf15a6af55086176a0931d3b5034c82e6
+Content-length: 36
+
+PROPS-END
+This is the file 'omega'.
+
+
+Node-path: A/D/H/psi
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 24
+Text-content-md5: e81f8f68ba50e749c200cb3c9ce5d2b1
+Text-content-sha1: 9c438bde39e8ccbbd366df2638e3cb6700950204
+Content-length: 34
+
+PROPS-END
+This is the file 'psi'.
+
+
+Node-path: A/D/gamma
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 26
+Text-content-md5: 412138bd677d64cd1c32fafbffe6245d
+Text-content-sha1: 74b75d7f2e1a0292f17d5a57c570bd89783f5d1c
+Content-length: 36
+
+PROPS-END
+This is the file 'gamma'.
+
+
+Node-path: A/mu
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 23
+Text-content-md5: baf78ae06a2d5b7d9554c5f1280d3fa8
+Text-content-sha1: b4d00c56351d1a752e24d839d41a362d8da4a4c7
+Content-length: 33
+
+PROPS-END
+This is the file 'mu'.
+
+
+Node-path: iota
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 25
+Text-content-md5: 2d18c5e57e84c5b8a5e9a6e13fa394dc
+Text-content-sha1: 2c0aa9014a0cd07f01795a333d82485ef6d083e2
+Content-length: 35
+
+PROPS-END
+This is the file 'iota'.
+
+
+Revision-number: 2
+Prop-content-length: 68
+Content-length: 68
+
+K 10
+svn:author
+V 7
+jrandom
+K 7
+svn:log
+V 12
+Create trunk
+PROPS-END
+
+Node-path: subversion
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: subversion/branches
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: subversion/tags
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: subversion/trunk
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: subversion/trunk/A
+Node-kind: dir
+Node-action: add
+Node-copyfrom-rev: 1
+Node-copyfrom-path: A
+
+
+Node-path: subversion/trunk/iota
+Node-kind: file
+Node-action: add
+Node-copyfrom-rev: 1
+Node-copyfrom-path: iota
+Text-copy-source-md5: 2d18c5e57e84c5b8a5e9a6e13fa394dc
+Text-copy-source-sha1: 2c0aa9014a0cd07f01795a333d82485ef6d083e2
+
+
+Node-path: A
+Node-action: delete
+
+
+Node-path: iota
+Node-action: delete
+
+
+Revision-number: 3
+Prop-content-length: 87
+Content-length: 87
+
+K 10
+svn:author
+V 7
+jrandom
+K 7
+svn:log
+V 31
+Create branch, with STATUS file
+PROPS-END
+
+Node-path: branch
+Node-kind: dir
+Node-action: add
+Node-copyfrom-rev: 2
+Node-copyfrom-path: subversion/trunk
+
+
+Node-path: branch/STATUS
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 0
+Text-content-md5: d41d8cd98f00b204e9800998ecf8427e
+Text-content-sha1: da39a3ee5e6b4b0d3255bfef95601890afd80709
+Content-length: 10
+
+PROPS-END
+
+
+Revision-number: 4
+Prop-content-length: 68
+Content-length: 68
+
+K 10
+svn:author
+V 7
+jrandom
+K 7
+svn:log
+V 12
+First change
+PROPS-END
+
+Node-path: subversion/trunk/iota
+Node-kind: file
+Node-action: change
+Text-content-length: 38
+Text-content-md5: 67f471c2ecc2c9e561d122d6e6b0f847
+Text-content-sha1: 750accb6e7f880a1d05ce725c19eb60183bb4b26
+Content-length: 38
+
+This is the file 'iota'.
+First change
+
+
+Revision-number: 5
+Prop-content-length: 69
+Content-length: 69
+
+K 10
+svn:author
+V 7
+jrandom
+K 7
+svn:log
+V 13
+Second change
+PROPS-END
+
+Node-path: subversion/trunk/A/mu
+Node-kind: file
+Node-action: change
+Text-content-length: 37
+Text-content-md5: eab751301b4e650c83324dfef4aad667
+Text-content-sha1: ab36cad564c7c50dec5ac1eb0bf879cf4e3a5f99
+Content-length: 37
+
+This is the file 'mu'.
+Second change
+
+
+Revision-number: 6
+Prop-content-length: 82
+Content-length: 82
+
+K 10
+svn:author
+V 7
+jrandom
+K 7
+svn:log
+V 26
+Conflicting change on iota
+PROPS-END
+
+Node-path: branch/iota
+Node-kind: file
+Node-action: change
+Text-content-length: 53
+Text-content-md5: 0c42f8c8b103bf00045cdf514238cfab
+Text-content-sha1: 440ad0a1673258aea8ba78fef0845e182757f8f9
+Content-length: 53
+
+This is the file 'iota'.
+Conflicts with first change
+
+
+Revision-number: 7
+Prop-content-length: 67
+Content-length: 67
+
+K 10
+svn:author
+V 7
+jrandom
+K 7
+svn:log
+V 11
+Nominate r4
+PROPS-END
+
+Node-path: branch/STATUS
+Node-kind: file
+Node-action: change
+Text-content-length: 284
+Text-content-md5: f1f6d73c681587eba4082139a9f2b724
+Text-content-sha1: 251bb84036790a810b1f4cc7f7a4e64c6a54ce9b
+Content-length: 284
+
+Status of 1.8.x:
+
+Candidate changes:
+==================
+
+Random new subheading:
+======================
+
+Veto-blocked changes:
+=====================
+
+Approved changes:
+=================
+
+* r4
+ default logsummary
+ Notes: Merge with --accept=theirs-conflict.
+ Votes:
+ +1: jrandom
+
+
+
+Revision-number: 8
+Prop-content-length: 206
+Content-length: 206
+
+K 10
+svn:author
+V 6
+daniel
+K 7
+svn:log
+V 150
+Merge r4 from trunk, with --accept=theirs-conflict:
+
+* r4
+ default logsummary
+ Notes: Merge with --accept=theirs-conflict.
+ Votes:
+ +1: jrandom
+
+PROPS-END
+
+Node-path: branch
+Node-kind: dir
+Node-action: change
+Prop-content-length: 54
+Content-length: 54
+
+K 13
+svn:mergeinfo
+V 19
+/subversion/trunk:4
+PROPS-END
+
+
+Node-path: branch/STATUS
+Node-kind: file
+Node-action: change
+Text-content-length: 185
+Text-content-md5: 6f71fec92afeaa5c1ebe02349f548ca9
+Text-content-sha1: eece02003d9c51610249e3fdd0d4e191e02ba3b7
+Content-length: 185
+
+Status of 1.8.x:
+
+Candidate changes:
+==================
+
+Random new subheading:
+======================
+
+Veto-blocked changes:
+=====================
+
+Approved changes:
+=================
+
+
+Node-path: branch/iota
+Node-kind: file
+Node-action: change
+Text-content-length: 38
+Text-content-md5: 67f471c2ecc2c9e561d122d6e6b0f847
+Text-content-sha1: 750accb6e7f880a1d05ce725c19eb60183bb4b26
+Content-length: 38
+
+This is the file 'iota'.
+First change
+
+
diff --git a/tools/dist/backport_branches.dump b/tools/dist/backport_branches.dump
new file mode 100644
index 0000000..de6c800
--- /dev/null
+++ b/tools/dist/backport_branches.dump
@@ -0,0 +1,642 @@
+SVN-fs-dump-format-version: 2
+
+UUID: 76cee987-25c9-4d6c-ad40-000000000004
+
+Revision-number: 0
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+Revision-number: 1
+Prop-content-length: 83
+Content-length: 83
+
+K 10
+svn:author
+V 7
+jrandom
+K 7
+svn:log
+V 27
+Log message for revision 1.
+PROPS-END
+
+Node-path: A
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: A/B
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: A/B/E
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: A/B/E/alpha
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 26
+Text-content-md5: d1fa4a3ced98961674a441930a51f2d3
+Text-content-sha1: b347d1da69df9a6a70433ceeaa0d46c8483e8c03
+Content-length: 36
+
+PROPS-END
+This is the file 'alpha'.
+
+
+Node-path: A/B/E/beta
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 25
+Text-content-md5: 67c756078f24f946f6ec2d00d02f50e1
+Text-content-sha1: d001710ac8e622c6d1fe59b1e265a3908acdd2a3
+Content-length: 35
+
+PROPS-END
+This is the file 'beta'.
+
+
+Node-path: A/B/F
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: A/B/lambda
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 27
+Text-content-md5: 911c7a8d869b8c1e566f57da54d889c6
+Text-content-sha1: 784a9298366863da2b65ebf82b4e1123755a2421
+Content-length: 37
+
+PROPS-END
+This is the file 'lambda'.
+
+
+Node-path: A/C
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: A/D
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: A/D/G
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: A/D/G/pi
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 23
+Text-content-md5: adddfc3e6b605b5f90ceeab11b4e8ab6
+Text-content-sha1: 411e258dc14b42701fdc29b75f653e93f8686415
+Content-length: 33
+
+PROPS-END
+This is the file 'pi'.
+
+
+Node-path: A/D/G/rho
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 24
+Text-content-md5: 82f2211cf4ab22e3555fc7b835fbc604
+Text-content-sha1: 56388a031dffbf9df7c32e1f299b1d5d7ef60881
+Content-length: 34
+
+PROPS-END
+This is the file 'rho'.
+
+
+Node-path: A/D/G/tau
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 24
+Text-content-md5: 9936e2716e469bb686deb98c280ead58
+Text-content-sha1: 62e8c07d56bee94ea4577e80414fa8805aaf0175
+Content-length: 34
+
+PROPS-END
+This is the file 'tau'.
+
+
+Node-path: A/D/H
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: A/D/H/chi
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 24
+Text-content-md5: 8f5ebad6d1f7775c2682e54417cbe4d3
+Text-content-sha1: abeac1bf62099ab66b44779198dc19f40e3244f4
+Content-length: 34
+
+PROPS-END
+This is the file 'chi'.
+
+
+Node-path: A/D/H/omega
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 26
+Text-content-md5: fe4ec8bdd3d2056db4f55b474a10fadc
+Text-content-sha1: c06e671bf15a6af55086176a0931d3b5034c82e6
+Content-length: 36
+
+PROPS-END
+This is the file 'omega'.
+
+
+Node-path: A/D/H/psi
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 24
+Text-content-md5: e81f8f68ba50e749c200cb3c9ce5d2b1
+Text-content-sha1: 9c438bde39e8ccbbd366df2638e3cb6700950204
+Content-length: 34
+
+PROPS-END
+This is the file 'psi'.
+
+
+Node-path: A/D/gamma
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 26
+Text-content-md5: 412138bd677d64cd1c32fafbffe6245d
+Text-content-sha1: 74b75d7f2e1a0292f17d5a57c570bd89783f5d1c
+Content-length: 36
+
+PROPS-END
+This is the file 'gamma'.
+
+
+Node-path: A/mu
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 23
+Text-content-md5: baf78ae06a2d5b7d9554c5f1280d3fa8
+Text-content-sha1: b4d00c56351d1a752e24d839d41a362d8da4a4c7
+Content-length: 33
+
+PROPS-END
+This is the file 'mu'.
+
+
+Node-path: iota
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 25
+Text-content-md5: 2d18c5e57e84c5b8a5e9a6e13fa394dc
+Text-content-sha1: 2c0aa9014a0cd07f01795a333d82485ef6d083e2
+Content-length: 35
+
+PROPS-END
+This is the file 'iota'.
+
+
+Revision-number: 2
+Prop-content-length: 68
+Content-length: 68
+
+K 10
+svn:author
+V 7
+jrandom
+K 7
+svn:log
+V 12
+Create trunk
+PROPS-END
+
+Node-path: subversion
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: subversion/branches
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: subversion/tags
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: subversion/trunk
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: subversion/trunk/A
+Node-kind: dir
+Node-action: add
+Node-copyfrom-rev: 1
+Node-copyfrom-path: A
+
+
+Node-path: subversion/trunk/iota
+Node-kind: file
+Node-action: add
+Node-copyfrom-rev: 1
+Node-copyfrom-path: iota
+Text-copy-source-md5: 2d18c5e57e84c5b8a5e9a6e13fa394dc
+Text-copy-source-sha1: 2c0aa9014a0cd07f01795a333d82485ef6d083e2
+
+
+Node-path: A
+Node-action: delete
+
+
+Node-path: iota
+Node-action: delete
+
+
+Revision-number: 3
+Prop-content-length: 87
+Content-length: 87
+
+K 10
+svn:author
+V 7
+jrandom
+K 7
+svn:log
+V 31
+Create branch, with STATUS file
+PROPS-END
+
+Node-path: branch
+Node-kind: dir
+Node-action: add
+Node-copyfrom-rev: 2
+Node-copyfrom-path: subversion/trunk
+
+
+Node-path: branch/STATUS
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 0
+Text-content-md5: d41d8cd98f00b204e9800998ecf8427e
+Text-content-sha1: da39a3ee5e6b4b0d3255bfef95601890afd80709
+Content-length: 10
+
+PROPS-END
+
+
+Revision-number: 4
+Prop-content-length: 68
+Content-length: 68
+
+K 10
+svn:author
+V 7
+jrandom
+K 7
+svn:log
+V 12
+First change
+PROPS-END
+
+Node-path: subversion/trunk/iota
+Node-kind: file
+Node-action: change
+Text-content-length: 38
+Text-content-md5: 67f471c2ecc2c9e561d122d6e6b0f847
+Text-content-sha1: 750accb6e7f880a1d05ce725c19eb60183bb4b26
+Content-length: 38
+
+This is the file 'iota'.
+First change
+
+
+Revision-number: 5
+Prop-content-length: 69
+Content-length: 69
+
+K 10
+svn:author
+V 7
+jrandom
+K 7
+svn:log
+V 13
+Second change
+PROPS-END
+
+Node-path: subversion/trunk/A/mu
+Node-kind: file
+Node-action: change
+Text-content-length: 37
+Text-content-md5: eab751301b4e650c83324dfef4aad667
+Text-content-sha1: ab36cad564c7c50dec5ac1eb0bf879cf4e3a5f99
+Content-length: 37
+
+This is the file 'mu'.
+Second change
+
+
+Revision-number: 6
+Prop-content-length: 82
+Content-length: 82
+
+K 10
+svn:author
+V 7
+jrandom
+K 7
+svn:log
+V 26
+Conflicting change on iota
+PROPS-END
+
+Node-path: branch/iota
+Node-kind: file
+Node-action: change
+Text-content-length: 52
+Text-content-md5: 2309abeef2762865a65aef15a23bd613
+Text-content-sha1: d3339d12dee6df117675e9abf30ebfa1a1dde889
+Content-length: 52
+
+This is the file 'iota'.
+Conflicts with first change
+
+Revision-number: 7
+Prop-content-length: 80
+Content-length: 80
+
+K 10
+svn:author
+V 7
+jrandom
+K 7
+svn:log
+V 24
+Create a backport branch
+PROPS-END
+
+Node-path: subversion/branches/r4
+Node-kind: dir
+Node-action: add
+Node-copyfrom-rev: 6
+Node-copyfrom-path: branch
+
+
+Revision-number: 8
+Prop-content-length: 85
+Content-length: 85
+
+K 10
+svn:author
+V 7
+jrandom
+K 7
+svn:log
+V 29
+Conflict resolution via mkdir
+PROPS-END
+
+Node-path: subversion/branches/r4
+Node-kind: dir
+Node-action: change
+Prop-content-length: 54
+Content-length: 54
+
+K 13
+svn:mergeinfo
+V 19
+/subversion/trunk:4
+PROPS-END
+
+
+Node-path: subversion/branches/r4/A_resolved
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: subversion/branches/r4/iota
+Node-kind: file
+Node-action: change
+Text-content-length: 9
+Text-content-md5: 1d0413d4da6866dae63f902165786614
+Text-content-sha1: e2cb0815ec8f0a8b36c6aa910c1f894ec1487da3
+Content-length: 9
+
+resolved
+
+
+Revision-number: 9
+Prop-content-length: 67
+Content-length: 67
+
+K 10
+svn:author
+V 7
+jrandom
+K 7
+svn:log
+V 11
+Nominate r4
+PROPS-END
+
+Node-path: branch/STATUS
+Node-kind: file
+Node-action: change
+Text-content-length: 256
+Text-content-md5: 76f9bca3ededa2eb3c196ef0bbc9ee1b
+Text-content-sha1: 283a9f7ec716dc64b5ec8e5e1d9739d55e34b2d5
+Content-length: 256
+
+Status of 1.8.x:
+
+Candidate changes:
+==================
+
+Random new subheading:
+======================
+
+Veto-blocked changes:
+=====================
+
+Approved changes:
+=================
+
+ * r4
+ default logsummary
+ Branch: r4
+ Votes:
+ +1: jrandom
+
+
+
+Revision-number: 10
+Prop-content-length: 146
+Content-length: 146
+
+K 10
+svn:author
+V 6
+daniel
+K 7
+svn:log
+V 91
+Merge the r4 branch:
+
+ * r4
+ default logsummary
+ Branch: r4
+ Votes:
+ +1: jrandom
+
+PROPS-END
+
+Node-path: branch
+Node-kind: dir
+Node-action: change
+Prop-content-length: 82
+Content-length: 82
+
+K 13
+svn:mergeinfo
+V 47
+/subversion/branches/r4:7-9
+/subversion/trunk:4
+PROPS-END
+
+
+Node-path: branch/A_resolved
+Node-kind: dir
+Node-action: add
+Node-copyfrom-rev: 9
+Node-copyfrom-path: subversion/branches/r4/A_resolved
+
+
+Node-path: branch/STATUS
+Node-kind: file
+Node-action: change
+Text-content-length: 185
+Text-content-md5: 6f71fec92afeaa5c1ebe02349f548ca9
+Text-content-sha1: eece02003d9c51610249e3fdd0d4e191e02ba3b7
+Content-length: 185
+
+Status of 1.8.x:
+
+Candidate changes:
+==================
+
+Random new subheading:
+======================
+
+Veto-blocked changes:
+=====================
+
+Approved changes:
+=================
+
+
+Node-path: branch/iota
+Node-kind: file
+Node-action: change
+Text-content-length: 9
+Text-content-md5: 1d0413d4da6866dae63f902165786614
+Text-content-sha1: e2cb0815ec8f0a8b36c6aa910c1f894ec1487da3
+Content-length: 9
+
+resolved
+
+
+Revision-number: 11
+Prop-content-length: 93
+Content-length: 93
+
+K 10
+svn:author
+V 6
+daniel
+K 7
+svn:log
+V 38
+Remove the 'r4' branch, merged in r10.
+PROPS-END
+
+Node-path: subversion/branches/r4
+Node-action: delete
+
+
diff --git a/tools/dist/backport_indented_entry.dump b/tools/dist/backport_indented_entry.dump
new file mode 100644
index 0000000..bbc501d
--- /dev/null
+++ b/tools/dist/backport_indented_entry.dump
@@ -0,0 +1,522 @@
+SVN-fs-dump-format-version: 2
+
+UUID: 76cee987-25c9-4d6c-ad40-000000000001
+
+Revision-number: 0
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+Revision-number: 1
+Prop-content-length: 83
+Content-length: 83
+
+K 10
+svn:author
+V 7
+jrandom
+K 7
+svn:log
+V 27
+Log message for revision 1.
+PROPS-END
+
+Node-path: A
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: A/B
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: A/B/E
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: A/B/E/alpha
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 26
+Text-content-md5: d1fa4a3ced98961674a441930a51f2d3
+Text-content-sha1: b347d1da69df9a6a70433ceeaa0d46c8483e8c03
+Content-length: 36
+
+PROPS-END
+This is the file 'alpha'.
+
+
+Node-path: A/B/E/beta
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 25
+Text-content-md5: 67c756078f24f946f6ec2d00d02f50e1
+Text-content-sha1: d001710ac8e622c6d1fe59b1e265a3908acdd2a3
+Content-length: 35
+
+PROPS-END
+This is the file 'beta'.
+
+
+Node-path: A/B/F
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: A/B/lambda
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 27
+Text-content-md5: 911c7a8d869b8c1e566f57da54d889c6
+Text-content-sha1: 784a9298366863da2b65ebf82b4e1123755a2421
+Content-length: 37
+
+PROPS-END
+This is the file 'lambda'.
+
+
+Node-path: A/C
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: A/D
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: A/D/G
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: A/D/G/pi
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 23
+Text-content-md5: adddfc3e6b605b5f90ceeab11b4e8ab6
+Text-content-sha1: 411e258dc14b42701fdc29b75f653e93f8686415
+Content-length: 33
+
+PROPS-END
+This is the file 'pi'.
+
+
+Node-path: A/D/G/rho
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 24
+Text-content-md5: 82f2211cf4ab22e3555fc7b835fbc604
+Text-content-sha1: 56388a031dffbf9df7c32e1f299b1d5d7ef60881
+Content-length: 34
+
+PROPS-END
+This is the file 'rho'.
+
+
+Node-path: A/D/G/tau
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 24
+Text-content-md5: 9936e2716e469bb686deb98c280ead58
+Text-content-sha1: 62e8c07d56bee94ea4577e80414fa8805aaf0175
+Content-length: 34
+
+PROPS-END
+This is the file 'tau'.
+
+
+Node-path: A/D/H
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: A/D/H/chi
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 24
+Text-content-md5: 8f5ebad6d1f7775c2682e54417cbe4d3
+Text-content-sha1: abeac1bf62099ab66b44779198dc19f40e3244f4
+Content-length: 34
+
+PROPS-END
+This is the file 'chi'.
+
+
+Node-path: A/D/H/omega
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 26
+Text-content-md5: fe4ec8bdd3d2056db4f55b474a10fadc
+Text-content-sha1: c06e671bf15a6af55086176a0931d3b5034c82e6
+Content-length: 36
+
+PROPS-END
+This is the file 'omega'.
+
+
+Node-path: A/D/H/psi
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 24
+Text-content-md5: e81f8f68ba50e749c200cb3c9ce5d2b1
+Text-content-sha1: 9c438bde39e8ccbbd366df2638e3cb6700950204
+Content-length: 34
+
+PROPS-END
+This is the file 'psi'.
+
+
+Node-path: A/D/gamma
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 26
+Text-content-md5: 412138bd677d64cd1c32fafbffe6245d
+Text-content-sha1: 74b75d7f2e1a0292f17d5a57c570bd89783f5d1c
+Content-length: 36
+
+PROPS-END
+This is the file 'gamma'.
+
+
+Node-path: A/mu
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 23
+Text-content-md5: baf78ae06a2d5b7d9554c5f1280d3fa8
+Text-content-sha1: b4d00c56351d1a752e24d839d41a362d8da4a4c7
+Content-length: 33
+
+PROPS-END
+This is the file 'mu'.
+
+
+Node-path: iota
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 25
+Text-content-md5: 2d18c5e57e84c5b8a5e9a6e13fa394dc
+Text-content-sha1: 2c0aa9014a0cd07f01795a333d82485ef6d083e2
+Content-length: 35
+
+PROPS-END
+This is the file 'iota'.
+
+
+Revision-number: 2
+Prop-content-length: 68
+Content-length: 68
+
+K 10
+svn:author
+V 7
+jrandom
+K 7
+svn:log
+V 12
+Create trunk
+PROPS-END
+
+Node-path: subversion
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: subversion/branches
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: subversion/tags
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: subversion/trunk
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: subversion/trunk/A
+Node-kind: dir
+Node-action: add
+Node-copyfrom-rev: 1
+Node-copyfrom-path: A
+
+
+Node-path: subversion/trunk/iota
+Node-kind: file
+Node-action: add
+Node-copyfrom-rev: 1
+Node-copyfrom-path: iota
+Text-copy-source-md5: 2d18c5e57e84c5b8a5e9a6e13fa394dc
+Text-copy-source-sha1: 2c0aa9014a0cd07f01795a333d82485ef6d083e2
+
+
+Node-path: A
+Node-action: delete
+
+
+Node-path: iota
+Node-action: delete
+
+
+Revision-number: 3
+Prop-content-length: 87
+Content-length: 87
+
+K 10
+svn:author
+V 7
+jrandom
+K 7
+svn:log
+V 31
+Create branch, with STATUS file
+PROPS-END
+
+Node-path: branch
+Node-kind: dir
+Node-action: add
+Node-copyfrom-rev: 2
+Node-copyfrom-path: subversion/trunk
+
+
+Node-path: branch/STATUS
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 0
+Text-content-md5: d41d8cd98f00b204e9800998ecf8427e
+Text-content-sha1: da39a3ee5e6b4b0d3255bfef95601890afd80709
+Content-length: 10
+
+PROPS-END
+
+
+Revision-number: 4
+Prop-content-length: 68
+Content-length: 68
+
+K 10
+svn:author
+V 7
+jrandom
+K 7
+svn:log
+V 12
+First change
+PROPS-END
+
+Node-path: subversion/trunk/iota
+Node-kind: file
+Node-action: change
+Text-content-length: 38
+Text-content-md5: 67f471c2ecc2c9e561d122d6e6b0f847
+Text-content-sha1: 750accb6e7f880a1d05ce725c19eb60183bb4b26
+Content-length: 38
+
+This is the file 'iota'.
+First change
+
+
+Revision-number: 5
+Prop-content-length: 69
+Content-length: 69
+
+K 10
+svn:author
+V 7
+jrandom
+K 7
+svn:log
+V 13
+Second change
+PROPS-END
+
+Node-path: subversion/trunk/A/mu
+Node-kind: file
+Node-action: change
+Text-content-length: 37
+Text-content-md5: eab751301b4e650c83324dfef4aad667
+Text-content-sha1: ab36cad564c7c50dec5ac1eb0bf879cf4e3a5f99
+Content-length: 37
+
+This is the file 'mu'.
+Second change
+
+
+Revision-number: 6
+Prop-content-length: 67
+Content-length: 67
+
+K 10
+svn:author
+V 7
+jrandom
+K 7
+svn:log
+V 11
+Nominate r4
+PROPS-END
+
+Node-path: branch/STATUS
+Node-kind: file
+Node-action: change
+Text-content-length: 238
+Text-content-md5: d746b12362ddd59c13d39f291710b25b
+Text-content-sha1: aafcdde209c276ffd2d63d6cd4c4b5ab35b36c27
+Content-length: 238
+
+Status of 1.8.x:
+
+Candidate changes:
+==================
+
+Random new subheading:
+======================
+
+Veto-blocked changes:
+=====================
+
+Approved changes:
+=================
+
+* r4
+ default logsummary
+ Votes:
+ +1: jrandom
+
+
+
+Revision-number: 7
+Prop-content-length: 128
+Content-length: 128
+
+K 10
+svn:author
+V 6
+daniel
+K 7
+svn:log
+V 73
+Merge r4 from trunk:
+
+* r4
+ default logsummary
+ Votes:
+ +1: jrandom
+
+PROPS-END
+
+Node-path: branch
+Node-kind: dir
+Node-action: change
+Prop-content-length: 54
+Content-length: 54
+
+K 13
+svn:mergeinfo
+V 19
+/subversion/trunk:4
+PROPS-END
+
+
+Node-path: branch/STATUS
+Node-kind: file
+Node-action: change
+Text-content-length: 185
+Text-content-md5: 6f71fec92afeaa5c1ebe02349f548ca9
+Text-content-sha1: eece02003d9c51610249e3fdd0d4e191e02ba3b7
+Content-length: 185
+
+Status of 1.8.x:
+
+Candidate changes:
+==================
+
+Random new subheading:
+======================
+
+Veto-blocked changes:
+=====================
+
+Approved changes:
+=================
+
+
+Node-path: branch/iota
+Node-kind: file
+Node-action: change
+Text-content-length: 38
+Text-content-md5: 67f471c2ecc2c9e561d122d6e6b0f847
+Text-content-sha1: 750accb6e7f880a1d05ce725c19eb60183bb4b26
+Content-length: 38
+
+This is the file 'iota'.
+First change
+
+
diff --git a/tools/dist/backport_multirevisions.dump b/tools/dist/backport_multirevisions.dump
new file mode 100644
index 0000000..d04c850
--- /dev/null
+++ b/tools/dist/backport_multirevisions.dump
@@ -0,0 +1,534 @@
+SVN-fs-dump-format-version: 2
+
+UUID: 76cee987-25c9-4d6c-ad40-000000000005
+
+Revision-number: 0
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+Revision-number: 1
+Prop-content-length: 83
+Content-length: 83
+
+K 10
+svn:author
+V 7
+jrandom
+K 7
+svn:log
+V 27
+Log message for revision 1.
+PROPS-END
+
+Node-path: A
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: A/B
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: A/B/E
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: A/B/E/alpha
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 26
+Text-content-md5: d1fa4a3ced98961674a441930a51f2d3
+Text-content-sha1: b347d1da69df9a6a70433ceeaa0d46c8483e8c03
+Content-length: 36
+
+PROPS-END
+This is the file 'alpha'.
+
+
+Node-path: A/B/E/beta
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 25
+Text-content-md5: 67c756078f24f946f6ec2d00d02f50e1
+Text-content-sha1: d001710ac8e622c6d1fe59b1e265a3908acdd2a3
+Content-length: 35
+
+PROPS-END
+This is the file 'beta'.
+
+
+Node-path: A/B/F
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: A/B/lambda
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 27
+Text-content-md5: 911c7a8d869b8c1e566f57da54d889c6
+Text-content-sha1: 784a9298366863da2b65ebf82b4e1123755a2421
+Content-length: 37
+
+PROPS-END
+This is the file 'lambda'.
+
+
+Node-path: A/C
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: A/D
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: A/D/G
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: A/D/G/pi
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 23
+Text-content-md5: adddfc3e6b605b5f90ceeab11b4e8ab6
+Text-content-sha1: 411e258dc14b42701fdc29b75f653e93f8686415
+Content-length: 33
+
+PROPS-END
+This is the file 'pi'.
+
+
+Node-path: A/D/G/rho
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 24
+Text-content-md5: 82f2211cf4ab22e3555fc7b835fbc604
+Text-content-sha1: 56388a031dffbf9df7c32e1f299b1d5d7ef60881
+Content-length: 34
+
+PROPS-END
+This is the file 'rho'.
+
+
+Node-path: A/D/G/tau
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 24
+Text-content-md5: 9936e2716e469bb686deb98c280ead58
+Text-content-sha1: 62e8c07d56bee94ea4577e80414fa8805aaf0175
+Content-length: 34
+
+PROPS-END
+This is the file 'tau'.
+
+
+Node-path: A/D/H
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: A/D/H/chi
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 24
+Text-content-md5: 8f5ebad6d1f7775c2682e54417cbe4d3
+Text-content-sha1: abeac1bf62099ab66b44779198dc19f40e3244f4
+Content-length: 34
+
+PROPS-END
+This is the file 'chi'.
+
+
+Node-path: A/D/H/omega
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 26
+Text-content-md5: fe4ec8bdd3d2056db4f55b474a10fadc
+Text-content-sha1: c06e671bf15a6af55086176a0931d3b5034c82e6
+Content-length: 36
+
+PROPS-END
+This is the file 'omega'.
+
+
+Node-path: A/D/H/psi
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 24
+Text-content-md5: e81f8f68ba50e749c200cb3c9ce5d2b1
+Text-content-sha1: 9c438bde39e8ccbbd366df2638e3cb6700950204
+Content-length: 34
+
+PROPS-END
+This is the file 'psi'.
+
+
+Node-path: A/D/gamma
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 26
+Text-content-md5: 412138bd677d64cd1c32fafbffe6245d
+Text-content-sha1: 74b75d7f2e1a0292f17d5a57c570bd89783f5d1c
+Content-length: 36
+
+PROPS-END
+This is the file 'gamma'.
+
+
+Node-path: A/mu
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 23
+Text-content-md5: baf78ae06a2d5b7d9554c5f1280d3fa8
+Text-content-sha1: b4d00c56351d1a752e24d839d41a362d8da4a4c7
+Content-length: 33
+
+PROPS-END
+This is the file 'mu'.
+
+
+Node-path: iota
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 25
+Text-content-md5: 2d18c5e57e84c5b8a5e9a6e13fa394dc
+Text-content-sha1: 2c0aa9014a0cd07f01795a333d82485ef6d083e2
+Content-length: 35
+
+PROPS-END
+This is the file 'iota'.
+
+
+Revision-number: 2
+Prop-content-length: 68
+Content-length: 68
+
+K 10
+svn:author
+V 7
+jrandom
+K 7
+svn:log
+V 12
+Create trunk
+PROPS-END
+
+Node-path: subversion
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: subversion/branches
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: subversion/tags
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: subversion/trunk
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: subversion/trunk/A
+Node-kind: dir
+Node-action: add
+Node-copyfrom-rev: 1
+Node-copyfrom-path: A
+
+
+Node-path: subversion/trunk/iota
+Node-kind: file
+Node-action: add
+Node-copyfrom-rev: 1
+Node-copyfrom-path: iota
+Text-copy-source-md5: 2d18c5e57e84c5b8a5e9a6e13fa394dc
+Text-copy-source-sha1: 2c0aa9014a0cd07f01795a333d82485ef6d083e2
+
+
+Node-path: A
+Node-action: delete
+
+
+Node-path: iota
+Node-action: delete
+
+
+Revision-number: 3
+Prop-content-length: 87
+Content-length: 87
+
+K 10
+svn:author
+V 7
+jrandom
+K 7
+svn:log
+V 31
+Create branch, with STATUS file
+PROPS-END
+
+Node-path: branch
+Node-kind: dir
+Node-action: add
+Node-copyfrom-rev: 2
+Node-copyfrom-path: subversion/trunk
+
+
+Node-path: branch/STATUS
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 0
+Text-content-md5: d41d8cd98f00b204e9800998ecf8427e
+Text-content-sha1: da39a3ee5e6b4b0d3255bfef95601890afd80709
+Content-length: 10
+
+PROPS-END
+
+
+Revision-number: 4
+Prop-content-length: 68
+Content-length: 68
+
+K 10
+svn:author
+V 7
+jrandom
+K 7
+svn:log
+V 12
+First change
+PROPS-END
+
+Node-path: subversion/trunk/iota
+Node-kind: file
+Node-action: change
+Text-content-length: 38
+Text-content-md5: 67f471c2ecc2c9e561d122d6e6b0f847
+Text-content-sha1: 750accb6e7f880a1d05ce725c19eb60183bb4b26
+Content-length: 38
+
+This is the file 'iota'.
+First change
+
+
+Revision-number: 5
+Prop-content-length: 69
+Content-length: 69
+
+K 10
+svn:author
+V 7
+jrandom
+K 7
+svn:log
+V 13
+Second change
+PROPS-END
+
+Node-path: subversion/trunk/A/mu
+Node-kind: file
+Node-action: change
+Text-content-length: 37
+Text-content-md5: eab751301b4e650c83324dfef4aad667
+Text-content-sha1: ab36cad564c7c50dec5ac1eb0bf879cf4e3a5f99
+Content-length: 37
+
+This is the file 'mu'.
+Second change
+
+
+Revision-number: 6
+Prop-content-length: 73
+Content-length: 73
+
+K 10
+svn:author
+V 7
+jrandom
+K 7
+svn:log
+V 17
+Nominate a group.
+PROPS-END
+
+Node-path: branch/STATUS
+Node-kind: file
+Node-action: change
+Text-content-length: 246
+Text-content-md5: 50068058cd9700828164f97c8bc9e44e
+Text-content-sha1: 02f8ed7e3256e1eabd302b8f5b6e35000e2d4ce8
+Content-length: 246
+
+Status of 1.8.x:
+
+Candidate changes:
+==================
+
+Random new subheading:
+======================
+
+Veto-blocked changes:
+=====================
+
+Approved changes:
+=================
+
+ * r4, r5
+ default logsummary
+ Votes:
+ +1: jrandom
+
+
+
+Revision-number: 7
+Prop-content-length: 146
+Content-length: 146
+
+K 10
+svn:author
+V 6
+daniel
+K 7
+svn:log
+V 91
+Merge the r4 group from trunk:
+
+ * r4, r5
+ default logsummary
+ Votes:
+ +1: jrandom
+
+PROPS-END
+
+Node-path: branch
+Node-kind: dir
+Node-action: change
+Prop-content-length: 56
+Content-length: 56
+
+K 13
+svn:mergeinfo
+V 21
+/subversion/trunk:4-5
+PROPS-END
+
+
+Node-path: branch/A/mu
+Node-kind: file
+Node-action: change
+Text-content-length: 37
+Text-content-md5: eab751301b4e650c83324dfef4aad667
+Text-content-sha1: ab36cad564c7c50dec5ac1eb0bf879cf4e3a5f99
+Content-length: 37
+
+This is the file 'mu'.
+Second change
+
+
+Node-path: branch/STATUS
+Node-kind: file
+Node-action: change
+Text-content-length: 185
+Text-content-md5: 6f71fec92afeaa5c1ebe02349f548ca9
+Text-content-sha1: eece02003d9c51610249e3fdd0d4e191e02ba3b7
+Content-length: 185
+
+Status of 1.8.x:
+
+Candidate changes:
+==================
+
+Random new subheading:
+======================
+
+Veto-blocked changes:
+=====================
+
+Approved changes:
+=================
+
+
+Node-path: branch/iota
+Node-kind: file
+Node-action: change
+Text-content-length: 38
+Text-content-md5: 67f471c2ecc2c9e561d122d6e6b0f847
+Text-content-sha1: 750accb6e7f880a1d05ce725c19eb60183bb4b26
+Content-length: 38
+
+This is the file 'iota'.
+First change
+
+
diff --git a/tools/dist/backport_tests.py b/tools/dist/backport_tests.py
new file mode 100755
index 0000000..e2b4862
--- /dev/null
+++ b/tools/dist/backport_tests.py
@@ -0,0 +1,578 @@
+#!/usr/bin/env python
+# py:encoding=utf-8
+#
+# backport_tests.py: Test backport.pl
+#
+# Subversion is a tool for revision control.
+# See http://subversion.apache.org for more information.
+#
+# ====================================================================
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+######################################################################
+
+# General modules
+import contextlib
+import functools
+import os
+import re
+import sys
+
+@contextlib.contextmanager
+def chdir(dir):
+ try:
+ saved_dir = os.getcwd()
+ os.chdir(dir)
+ yield
+ finally:
+ os.chdir(saved_dir)
+
+# Our testing module
+# HACK: chdir to cause svntest.main.svn_binary to be set correctly
+sys.path.insert(0, os.path.abspath('../../subversion/tests/cmdline'))
+with chdir('../../subversion/tests/cmdline'):
+ import svntest
+
+# (abbreviations)
+Skip = svntest.testcase.Skip_deco
+SkipUnless = svntest.testcase.SkipUnless_deco
+XFail = svntest.testcase.XFail_deco
+Issues = svntest.testcase.Issues_deco
+Issue = svntest.testcase.Issue_deco
+Wimp = svntest.testcase.Wimp_deco
+
+######################################################################
+# Helper functions
+
+BACKPORT_PL = os.path.abspath(os.path.join(os.path.dirname(__file__),
+ 'backport.pl'))
+STATUS = 'branch/STATUS'
+
+class BackportTest(object):
+ """Decorator. See self.__call__()."""
+
+ def __init__(self, uuid):
+ """The argument is the UUID embedded in the dump file.
+ If the argument is None, then there is no dump file."""
+ self.uuid = uuid
+
+ def __call__(self, test_func):
+ """Return a decorator that: builds TEST_FUNC's sbox, creates
+ ^/subversion/trunk, and calls TEST_FUNC, then compare its output to the
+ expected dump file named after TEST_FUNC."""
+
+ # .wraps() propagates the wrappee's docstring to the wrapper.
+ @functools.wraps(test_func)
+ def wrapped_test_func(sbox):
+ expected_dump_file = './%s.dump' % (test_func.func_name,)
+
+ sbox.build()
+
+ # r2: prepare ^/subversion/ tree
+ sbox.simple_mkdir('subversion', 'subversion/trunk')
+ sbox.simple_mkdir('subversion/tags', 'subversion/branches')
+ sbox.simple_move('A', 'subversion/trunk')
+ sbox.simple_move('iota', 'subversion/trunk')
+ sbox.simple_commit(message='Create trunk')
+
+ # r3: branch
+ sbox.simple_copy('subversion/trunk', 'branch')
+ sbox.simple_append('branch/STATUS', '')
+ sbox.simple_add('branch/STATUS')
+ sbox.simple_commit(message='Create branch, with STATUS file')
+
+ # r4: random change on trunk
+ sbox.simple_append('subversion/trunk/iota', 'First change\n')
+ sbox.simple_commit(message='First change')
+
+ # r5: random change on trunk
+ sbox.simple_append('subversion/trunk/A/mu', 'Second change\n')
+ sbox.simple_commit(message='Second change')
+
+ # Do the work.
+ test_func(sbox)
+
+ # Verify it.
+ verify_backport(sbox, expected_dump_file, self.uuid)
+ return wrapped_test_func
+
+def make_entry(revisions=None, logsummary=None, notes=None, branch=None,
+ depends=None, votes=None):
+ assert revisions
+ if logsummary is None:
+ logsummary = "default logsummary"
+ if votes is None:
+ votes = {+1 : ['jrandom']}
+
+ entry = {
+ 'revisions': revisions,
+ 'logsummary': logsummary,
+ 'notes': notes,
+ 'branch': branch,
+ 'depends': depends,
+ 'votes': votes,
+ }
+
+ return entry
+
+def serialize_entry(entry):
+ return ''.join([
+
+ # revisions,
+ ' * %s\n'
+ % (", ".join("r%ld" % revision for revision in entry['revisions'])),
+
+ # logsummary
+ ' %s\n' % (entry['logsummary'],),
+
+ # notes
+ ' Notes: %s\n' % (entry['notes'],) if entry['notes'] else '',
+
+ # branch
+ ' Branch: %s\n' % (entry['branch'],) if entry['branch'] else '',
+
+ # depends
+ ' Depends: %s\n' % (entry['depends'],) if entry['depends'] else '',
+
+ # votes
+ ' Votes:\n',
+ ''.join(' '
+ '%s: %s\n' % ({1: '+1', 0: '+0', -1: '-1', -0: '-0'}[vote],
+ ", ".join(entry['votes'][vote]))
+ for vote in entry['votes']),
+
+ '\n', # empty line after entry
+ ])
+
+def serialize_STATUS(approveds,
+ serialize_entry=serialize_entry):
+ """Construct and return the contents of a STATUS file.
+
+ APPROVEDS is an iterable of ENTRY dicts. The dicts are defined
+ to have the following keys: 'revisions', a list of revision numbers (ints);
+ 'logsummary'; and 'votes', a dict mapping ±1/±0 (int) to list of voters.
+ """
+
+ strings = []
+ strings.append("Status of 1.8.x:\n\n")
+
+ strings.append("Candidate changes:\n")
+ strings.append("==================\n\n")
+
+ strings.append("Random new subheading:\n")
+ strings.append("======================\n\n")
+
+ strings.append("Veto-blocked changes:\n")
+ strings.append("=====================\n\n")
+
+ strings.append("Approved changes:\n")
+ strings.append("=================\n\n")
+
+ strings.extend(map(serialize_entry, approveds))
+
+ return "".join(strings)
+
+def run_backport(sbox, error_expected=False, extra_env=[]):
+ """Run backport.pl. EXTRA_ENV is a list of key=value pairs (str) to set in
+ the child's environment. ERROR_EXPECTED is propagated to run_command()."""
+ # TODO: if the test is run in verbose mode, pass DEBUG=1 in the environment,
+ # and pass error_expected=True to run_command() to not croak on
+ # stderr output from the child (because it uses 'sh -x').
+ args = [
+ '/usr/bin/env',
+ 'SVN=' + svntest.main.svn_binary,
+ 'YES=1', 'MAY_COMMIT=1', 'AVAILID=jrandom',
+ ] + list(extra_env) + [
+ 'perl', BACKPORT_PL,
+ ]
+ with chdir(sbox.ospath('branch')):
+ return svntest.main.run_command(args[0], error_expected, False, *(args[1:]))
+
+def verify_backport(sbox, expected_dump_file, uuid):
+ """Compare the contents of the SBOX repository with EXPECTED_DUMP_FILE.
+ Set the UUID of SBOX to UUID beforehand.
+ Based on svnsync_tests.py:verify_mirror."""
+
+ if uuid is None:
+ # There is no expected dump file.
+ return
+
+ # Remove some SVNSync-specific housekeeping properties from the
+ # mirror repository in preparation for the comparison dump.
+ svntest.actions.enable_revprop_changes(sbox.repo_dir)
+ for revnum in range(0, 1+int(sbox.youngest())):
+ svntest.actions.run_and_verify_svnadmin([], [],
+ "delrevprop", "-r", revnum, sbox.repo_dir, "svn:date")
+
+ # Create a dump file from the mirror repository.
+ dest_dump = open(expected_dump_file).readlines()
+ svntest.actions.run_and_verify_svnadmin(None, [],
+ 'setuuid', '--', sbox.repo_dir, uuid)
+ src_dump = svntest.actions.run_and_verify_dump(sbox.repo_dir)
+
+ svntest.verify.compare_dump_files(
+ "Dump files", "DUMP", src_dump, dest_dump)
+
+######################################################################
+# Tests
+#
+# Each test must return on success or raise on failure.
+
+#----------------------------------------------------------------------
+@BackportTest('76cee987-25c9-4d6c-ad40-000000000001')
+def backport_indented_entry(sbox):
+ "parsing of entries with nonstandard indentation"
+
+ # r6: nominate r4
+ approved_entries = [
+ make_entry([4]),
+ ]
+ def reindenting_serialize_entry(*args, **kwargs):
+ entry = serialize_entry(*args, **kwargs)
+ return ('\n' + entry).replace('\n ', '\n')[1:]
+ sbox.simple_append(STATUS, serialize_STATUS(approved_entries,
+ serialize_entry=reindenting_serialize_entry))
+ sbox.simple_commit(message='Nominate r4')
+
+ # Run it.
+ run_backport(sbox)
+
+
+#----------------------------------------------------------------------
+@BackportTest('76cee987-25c9-4d6c-ad40-000000000002')
+def backport_two_approveds(sbox):
+ "backport with two approveds"
+
+ # r6: Enter votes
+ approved_entries = [
+ make_entry([4]),
+ make_entry([5]),
+ ]
+ sbox.simple_append(STATUS, serialize_STATUS(approved_entries))
+ sbox.simple_commit(message='Nominate r4. Nominate r5.')
+
+ # r7, r8: Run it.
+ run_backport(sbox)
+
+ # Now back up and do three entries.
+ # r9: revert r7, r8
+ svntest.actions.run_and_verify_svnlook(["8\n"], [],
+ 'youngest', sbox.repo_dir)
+ sbox.simple_update()
+ svntest.main.run_svn(None, 'merge', '-r8:6',
+ '^/branch', sbox.ospath('branch'))
+ sbox.simple_commit(message='Revert the merges.')
+
+ # r10: Another change on trunk.
+ # (Note that this change must be merged after r5.)
+ sbox.simple_rm('subversion/trunk/A')
+ sbox.simple_commit(message='Third change on trunk.')
+
+ # r11: Nominate r10.
+ sbox.simple_append(STATUS, serialize_entry(make_entry([10])))
+ sbox.simple_commit(message='Nominate r10.')
+
+ # r12, r13, r14: Run it.
+ run_backport(sbox)
+
+
+
+#----------------------------------------------------------------------
+@BackportTest('76cee987-25c9-4d6c-ad40-000000000003')
+def backport_accept(sbox):
+ "test --accept parsing"
+
+ # r6: conflicting change on branch
+ sbox.simple_append('branch/iota', 'Conflicts with first change\n')
+ sbox.simple_commit(message="Conflicting change on iota")
+
+ # r7: nominate r4 with --accept (because of r6)
+ approved_entries = [
+ make_entry([4], notes="Merge with --accept=theirs-conflict."),
+ ]
+ def reindenting_serialize_entry(*args, **kwargs):
+ entry = serialize_entry(*args, **kwargs)
+ return ('\n' + entry).replace('\n ', '\n')[1:]
+ sbox.simple_append(STATUS, serialize_STATUS(approved_entries,
+ serialize_entry=reindenting_serialize_entry))
+ sbox.simple_commit(message='Nominate r4')
+
+ # Run it.
+ run_backport(sbox)
+
+
+#----------------------------------------------------------------------
+@BackportTest('76cee987-25c9-4d6c-ad40-000000000004')
+def backport_branches(sbox):
+ "test branches"
+
+ # r6: conflicting change on branch
+ sbox.simple_append('branch/iota', 'Conflicts with first change')
+ sbox.simple_commit(message="Conflicting change on iota")
+
+ # r7: backport branch
+ sbox.simple_update()
+ sbox.simple_copy('branch', 'subversion/branches/r4')
+ sbox.simple_commit(message='Create a backport branch')
+
+ # r8: merge into backport branch
+ sbox.simple_update()
+ svntest.main.run_svn(None, 'merge', '--record-only', '-c4',
+ '^/subversion/trunk', sbox.ospath('subversion/branches/r4'))
+ sbox.simple_mkdir('subversion/branches/r4/A_resolved')
+ sbox.simple_append('subversion/branches/r4/iota', "resolved\n", truncate=1)
+ sbox.simple_commit(message='Conflict resolution via mkdir')
+
+ # r9: nominate r4 with branch
+ approved_entries = [
+ make_entry([4], branch="r4")
+ ]
+ sbox.simple_append(STATUS, serialize_STATUS(approved_entries))
+ sbox.simple_commit(message='Nominate r4')
+
+ # Run it.
+ run_backport(sbox)
+
+ # This also serves as the 'success mode' part of backport_branch_contains().
+
+
+#----------------------------------------------------------------------
+@BackportTest('76cee987-25c9-4d6c-ad40-000000000005')
+def backport_multirevisions(sbox):
+ "test multirevision entries"
+
+ # r6: nominate r4,r5
+ approved_entries = [
+ make_entry([4,5])
+ ]
+ sbox.simple_append(STATUS, serialize_STATUS(approved_entries))
+ sbox.simple_commit(message='Nominate a group.')
+
+ # Run it.
+ run_backport(sbox)
+
+
+#----------------------------------------------------------------------
+@BackportTest(None) # would be 000000000006
+def backport_conflicts_detection(sbox):
+ "test the conflicts detector"
+
+ # r6: conflicting change on branch
+ sbox.simple_append('branch/iota', 'Conflicts with first change\n')
+ sbox.simple_commit(message="Conflicting change on iota")
+
+ # r7: nominate r4, but without the requisite --accept
+ approved_entries = [
+ make_entry([4], notes="This will conflict."),
+ ]
+ sbox.simple_append(STATUS, serialize_STATUS(approved_entries))
+ sbox.simple_commit(message='Nominate r4')
+
+ # Run it.
+ exit_code, output, errput = run_backport(sbox, True,
+ # Choose conflicts mode:
+ ["MAY_COMMIT=0"])
+
+ # Verify the conflict is detected.
+ expected_output = svntest.verify.RegexOutput(
+ 'Index: iota',
+ match_all=False,
+ )
+ expected_errput = (
+ r'(?ms)' # re.MULTILINE | re.DOTALL
+ r'.*Warning summary.*'
+ r'^r4 [(]default logsummary[)]: Conflicts on iota.*'
+ )
+ expected_errput = svntest.verify.RegexListOutput(
+ [
+ r'Warning summary',
+ r'===============',
+ r'r4 [(]default logsummary[)]: Conflicts on iota',
+ ],
+ match_all=False)
+ svntest.verify.verify_outputs(None, output, errput,
+ expected_output, expected_errput)
+ svntest.verify.verify_exit_code(None, exit_code, 1)
+
+ ## Now, let's test the "Depends:" annotation silences the error.
+
+ # Re-nominate.
+ approved_entries = [
+ make_entry([4], depends="World peace."),
+ ]
+ sbox.simple_append(STATUS, serialize_STATUS(approved_entries), truncate=True)
+ sbox.simple_commit(message='Re-nominate r4')
+
+ # Detect conflicts.
+ exit_code, output, errput = run_backport(sbox, extra_env=["MAY_COMMIT=0"])
+
+ # Verify stdout. (exit_code and errput were verified by run_backport().)
+ svntest.verify.verify_outputs(None, output, errput,
+ "Conflicts found.*, as expected.", [])
+
+
+#----------------------------------------------------------------------
+@BackportTest(None) # would be 000000000007
+def backport_branch_contains(sbox):
+ "branch must contain the revisions"
+
+ # r6: conflicting change on branch
+ sbox.simple_append('branch/iota', 'Conflicts with first change')
+ sbox.simple_commit(message="Conflicting change on iota")
+
+ # r7: backport branch
+ sbox.simple_update()
+ sbox.simple_copy('branch', 'subversion/branches/r4')
+ sbox.simple_commit(message='Create a backport branch')
+
+ # r8: merge into backport branch
+ sbox.simple_update()
+ svntest.main.run_svn(None, 'merge', '--record-only', '-c4',
+ '^/subversion/trunk', sbox.ospath('subversion/branches/r4'))
+ sbox.simple_mkdir('subversion/branches/r4/A_resolved')
+ sbox.simple_append('subversion/branches/r4/iota', "resolved\n", truncate=1)
+ sbox.simple_commit(message='Conflict resolution via mkdir')
+
+ # r9: nominate r4,r5 with branch that contains not all of them
+ approved_entries = [
+ make_entry([4,5], branch="r4")
+ ]
+ sbox.simple_append(STATUS, serialize_STATUS(approved_entries))
+ sbox.simple_commit(message='Nominate r4')
+
+ # Run it.
+ exit_code, output, errput = run_backport(sbox, error_expected=True)
+
+ # Verify the error message.
+ expected_errput = svntest.verify.RegexOutput(
+ ".*Revisions 'r5' nominated but not included in branch",
+ match_all=False,
+ )
+ svntest.verify.verify_outputs(None, output, errput,
+ [], expected_errput)
+ svntest.verify.verify_exit_code(None, exit_code, 1)
+
+ # Verify no commit occurred.
+ svntest.actions.run_and_verify_svnlook(["9\n"], [],
+ 'youngest', sbox.repo_dir)
+
+ # Verify the working copy has been reverted.
+ svntest.actions.run_and_verify_svn([], [], 'status', '-q',
+ sbox.repo_dir)
+
+ # The sibling test backport_branches() verifies the success mode.
+
+
+
+
+#----------------------------------------------------------------------
+@BackportTest(None) # would be 000000000008
+def backport_double_conflict(sbox):
+ "two-revisioned entry with two conflicts"
+
+ # r6: conflicting change on branch
+ sbox.simple_append('branch/iota', 'Conflicts with first change')
+ sbox.simple_commit(message="Conflicting change on iota")
+
+ # r7: further conflicting change to same file
+ sbox.simple_update()
+ sbox.simple_append('subversion/trunk/iota', 'Third line\n')
+ sbox.simple_commit(message="iota's third line")
+
+ # r8: nominate
+ approved_entries = [
+ make_entry([4,7], depends="World peace.")
+ ]
+ sbox.simple_append(STATUS, serialize_STATUS(approved_entries))
+ sbox.simple_commit(message='Nominate the r4 group')
+
+ # Run it, in conflicts mode.
+ exit_code, output, errput = run_backport(sbox, True, ["MAY_COMMIT=0"])
+
+ # Verify the failure mode: "merge conflict" error on stderr, but backport.pl
+ # itself exits with code 0, since conflicts were confined to Depends:-ed
+ # entries.
+ #
+ # The error only happens with multi-pass merges where the first pass
+ # conflicts and the second pass touches the conflict victim.
+ #
+ # The error would be:
+ # subversion/libsvn_client/merge.c:5499: (apr_err=SVN_ERR_WC_FOUND_CONFLICT)
+ # svn: E155015: One or more conflicts were produced while merging r3:4
+ # into '/tmp/stw/working_copies/backport_tests-8/branch' -- resolve all
+ # conflicts and rerun the merge to apply the remaining unmerged revisions
+ # ...
+ # Warning summary
+ # ===============
+ #
+ # r4 (default logsummary): subshell exited with code 256
+ # And backport.pl would exit with exit code 1.
+
+ expected_output = 'Conflicts found.*, as expected.'
+ expected_errput = svntest.verify.RegexOutput(
+ ".*svn: E155015:.*", # SVN_ERR_WC_FOUND_CONFLICT
+ match_all=False,
+ )
+ svntest.verify.verify_outputs(None, output, errput,
+ expected_output, expected_errput)
+ svntest.verify.verify_exit_code(None, exit_code, 0)
+ if any("Warning summary" in line for line in errput):
+ raise svntest.verify.SVNUnexpectedStderr(errput)
+
+ ## Now, let's ensure this does get detected if not silenced.
+ # r9: Re-nominate
+ approved_entries = [
+ make_entry([4,7]) # no depends=
+ ]
+ sbox.simple_append(STATUS, serialize_STATUS(approved_entries), truncate=True)
+ sbox.simple_commit(message='Re-nominate the r4 group')
+
+ exit_code, output, errput = run_backport(sbox, True, ["MAY_COMMIT=0"])
+
+ # [1-9]\d+ matches non-zero exit codes
+ expected_errput = r'r4 .*: subshell exited with code (?:[1-9]\d+)'
+ svntest.verify.verify_exit_code(None, exit_code, 1)
+ svntest.verify.verify_outputs(None, output, errput,
+ svntest.verify.AnyOutput, expected_errput)
+
+
+
+#----------------------------------------------------------------------
+
+########################################################################
+# Run the tests
+
+# list all tests here, starting with None:
+test_list = [ None,
+ backport_indented_entry,
+ backport_two_approveds,
+ backport_accept,
+ backport_branches,
+ backport_multirevisions,
+ backport_conflicts_detection,
+ backport_branch_contains,
+ backport_double_conflict,
+ # When adding a new test, include the test number in the last
+ # 6 bytes of the UUID.
+ ]
+
+if __name__ == '__main__':
+ svntest.main.run_tests(test_list)
+ # NOTREACHED
+
+
+### End of file.
diff --git a/tools/dist/backport_two_approveds.dump b/tools/dist/backport_two_approveds.dump
new file mode 100644
index 0000000..c4349b2
--- /dev/null
+++ b/tools/dist/backport_two_approveds.dump
@@ -0,0 +1,961 @@
+SVN-fs-dump-format-version: 2
+
+UUID: 76cee987-25c9-4d6c-ad40-000000000002
+
+Revision-number: 0
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+Revision-number: 1
+Prop-content-length: 83
+Content-length: 83
+
+K 10
+svn:author
+V 7
+jrandom
+K 7
+svn:log
+V 27
+Log message for revision 1.
+PROPS-END
+
+Node-path: A
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: A/B
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: A/B/E
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: A/B/E/alpha
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 26
+Text-content-md5: d1fa4a3ced98961674a441930a51f2d3
+Text-content-sha1: b347d1da69df9a6a70433ceeaa0d46c8483e8c03
+Content-length: 36
+
+PROPS-END
+This is the file 'alpha'.
+
+
+Node-path: A/B/E/beta
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 25
+Text-content-md5: 67c756078f24f946f6ec2d00d02f50e1
+Text-content-sha1: d001710ac8e622c6d1fe59b1e265a3908acdd2a3
+Content-length: 35
+
+PROPS-END
+This is the file 'beta'.
+
+
+Node-path: A/B/F
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: A/B/lambda
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 27
+Text-content-md5: 911c7a8d869b8c1e566f57da54d889c6
+Text-content-sha1: 784a9298366863da2b65ebf82b4e1123755a2421
+Content-length: 37
+
+PROPS-END
+This is the file 'lambda'.
+
+
+Node-path: A/C
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: A/D
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: A/D/G
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: A/D/G/pi
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 23
+Text-content-md5: adddfc3e6b605b5f90ceeab11b4e8ab6
+Text-content-sha1: 411e258dc14b42701fdc29b75f653e93f8686415
+Content-length: 33
+
+PROPS-END
+This is the file 'pi'.
+
+
+Node-path: A/D/G/rho
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 24
+Text-content-md5: 82f2211cf4ab22e3555fc7b835fbc604
+Text-content-sha1: 56388a031dffbf9df7c32e1f299b1d5d7ef60881
+Content-length: 34
+
+PROPS-END
+This is the file 'rho'.
+
+
+Node-path: A/D/G/tau
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 24
+Text-content-md5: 9936e2716e469bb686deb98c280ead58
+Text-content-sha1: 62e8c07d56bee94ea4577e80414fa8805aaf0175
+Content-length: 34
+
+PROPS-END
+This is the file 'tau'.
+
+
+Node-path: A/D/H
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: A/D/H/chi
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 24
+Text-content-md5: 8f5ebad6d1f7775c2682e54417cbe4d3
+Text-content-sha1: abeac1bf62099ab66b44779198dc19f40e3244f4
+Content-length: 34
+
+PROPS-END
+This is the file 'chi'.
+
+
+Node-path: A/D/H/omega
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 26
+Text-content-md5: fe4ec8bdd3d2056db4f55b474a10fadc
+Text-content-sha1: c06e671bf15a6af55086176a0931d3b5034c82e6
+Content-length: 36
+
+PROPS-END
+This is the file 'omega'.
+
+
+Node-path: A/D/H/psi
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 24
+Text-content-md5: e81f8f68ba50e749c200cb3c9ce5d2b1
+Text-content-sha1: 9c438bde39e8ccbbd366df2638e3cb6700950204
+Content-length: 34
+
+PROPS-END
+This is the file 'psi'.
+
+
+Node-path: A/D/gamma
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 26
+Text-content-md5: 412138bd677d64cd1c32fafbffe6245d
+Text-content-sha1: 74b75d7f2e1a0292f17d5a57c570bd89783f5d1c
+Content-length: 36
+
+PROPS-END
+This is the file 'gamma'.
+
+
+Node-path: A/mu
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 23
+Text-content-md5: baf78ae06a2d5b7d9554c5f1280d3fa8
+Text-content-sha1: b4d00c56351d1a752e24d839d41a362d8da4a4c7
+Content-length: 33
+
+PROPS-END
+This is the file 'mu'.
+
+
+Node-path: iota
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 25
+Text-content-md5: 2d18c5e57e84c5b8a5e9a6e13fa394dc
+Text-content-sha1: 2c0aa9014a0cd07f01795a333d82485ef6d083e2
+Content-length: 35
+
+PROPS-END
+This is the file 'iota'.
+
+
+Revision-number: 2
+Prop-content-length: 68
+Content-length: 68
+
+K 10
+svn:author
+V 7
+jrandom
+K 7
+svn:log
+V 12
+Create trunk
+PROPS-END
+
+Node-path: subversion
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: subversion/branches
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: subversion/tags
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: subversion/trunk
+Node-kind: dir
+Node-action: add
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: subversion/trunk/A
+Node-kind: dir
+Node-action: add
+Node-copyfrom-rev: 1
+Node-copyfrom-path: A
+
+
+Node-path: subversion/trunk/iota
+Node-kind: file
+Node-action: add
+Node-copyfrom-rev: 1
+Node-copyfrom-path: iota
+Text-copy-source-md5: 2d18c5e57e84c5b8a5e9a6e13fa394dc
+Text-copy-source-sha1: 2c0aa9014a0cd07f01795a333d82485ef6d083e2
+
+
+Node-path: A
+Node-action: delete
+
+
+Node-path: iota
+Node-action: delete
+
+
+Revision-number: 3
+Prop-content-length: 87
+Content-length: 87
+
+K 10
+svn:author
+V 7
+jrandom
+K 7
+svn:log
+V 31
+Create branch, with STATUS file
+PROPS-END
+
+Node-path: branch
+Node-kind: dir
+Node-action: add
+Node-copyfrom-rev: 2
+Node-copyfrom-path: subversion/trunk
+
+
+Node-path: branch/STATUS
+Node-kind: file
+Node-action: add
+Prop-content-length: 10
+Text-content-length: 0
+Text-content-md5: d41d8cd98f00b204e9800998ecf8427e
+Text-content-sha1: da39a3ee5e6b4b0d3255bfef95601890afd80709
+Content-length: 10
+
+PROPS-END
+
+
+Revision-number: 4
+Prop-content-length: 68
+Content-length: 68
+
+K 10
+svn:author
+V 7
+jrandom
+K 7
+svn:log
+V 12
+First change
+PROPS-END
+
+Node-path: subversion/trunk/iota
+Node-kind: file
+Node-action: change
+Text-content-length: 38
+Text-content-md5: 67f471c2ecc2c9e561d122d6e6b0f847
+Text-content-sha1: 750accb6e7f880a1d05ce725c19eb60183bb4b26
+Content-length: 38
+
+This is the file 'iota'.
+First change
+
+
+Revision-number: 5
+Prop-content-length: 69
+Content-length: 69
+
+K 10
+svn:author
+V 7
+jrandom
+K 7
+svn:log
+V 13
+Second change
+PROPS-END
+
+Node-path: subversion/trunk/A/mu
+Node-kind: file
+Node-action: change
+Text-content-length: 37
+Text-content-md5: eab751301b4e650c83324dfef4aad667
+Text-content-sha1: ab36cad564c7c50dec5ac1eb0bf879cf4e3a5f99
+Content-length: 37
+
+This is the file 'mu'.
+Second change
+
+
+Revision-number: 6
+Prop-content-length: 82
+Content-length: 82
+
+K 10
+svn:author
+V 7
+jrandom
+K 7
+svn:log
+V 26
+Nominate r4. Nominate r5.
+PROPS-END
+
+Node-path: branch/STATUS
+Node-kind: file
+Node-action: change
+Text-content-length: 298
+Text-content-md5: 4ebc11d7e1ec3a5cb75d3cfdcf0c1399
+Text-content-sha1: 86dd246b9072d6baeaac50f58ee2fa6444f6f889
+Content-length: 298
+
+Status of 1.8.x:
+
+Candidate changes:
+==================
+
+Random new subheading:
+======================
+
+Veto-blocked changes:
+=====================
+
+Approved changes:
+=================
+
+ * r4
+ default logsummary
+ Votes:
+ +1: jrandom
+
+ * r5
+ default logsummary
+ Votes:
+ +1: jrandom
+
+
+
+Revision-number: 7
+Prop-content-length: 132
+Content-length: 132
+
+K 10
+svn:author
+V 6
+daniel
+K 7
+svn:log
+V 77
+Merge r4 from trunk:
+
+ * r4
+ default logsummary
+ Votes:
+ +1: jrandom
+
+PROPS-END
+
+Node-path: branch
+Node-kind: dir
+Node-action: change
+Prop-content-length: 54
+Content-length: 54
+
+K 13
+svn:mergeinfo
+V 19
+/subversion/trunk:4
+PROPS-END
+
+
+Node-path: branch/STATUS
+Node-kind: file
+Node-action: change
+Text-content-length: 241
+Text-content-md5: cd8d55451e22cd8f83599bc64e67b515
+Text-content-sha1: 6b54b54b2711d0de2f252f34c26f2ac8f222ce35
+Content-length: 241
+
+Status of 1.8.x:
+
+Candidate changes:
+==================
+
+Random new subheading:
+======================
+
+Veto-blocked changes:
+=====================
+
+Approved changes:
+=================
+
+ * r5
+ default logsummary
+ Votes:
+ +1: jrandom
+
+
+Node-path: branch/iota
+Node-kind: file
+Node-action: change
+Text-content-length: 38
+Text-content-md5: 67f471c2ecc2c9e561d122d6e6b0f847
+Text-content-sha1: 750accb6e7f880a1d05ce725c19eb60183bb4b26
+Content-length: 38
+
+This is the file 'iota'.
+First change
+
+
+Revision-number: 8
+Prop-content-length: 132
+Content-length: 132
+
+K 10
+svn:author
+V 6
+daniel
+K 7
+svn:log
+V 77
+Merge r5 from trunk:
+
+ * r5
+ default logsummary
+ Votes:
+ +1: jrandom
+
+PROPS-END
+
+Node-path: branch
+Node-kind: dir
+Node-action: change
+Prop-content-length: 56
+Content-length: 56
+
+K 13
+svn:mergeinfo
+V 21
+/subversion/trunk:4-5
+PROPS-END
+
+
+Node-path: branch/A/mu
+Node-kind: file
+Node-action: change
+Text-content-length: 37
+Text-content-md5: eab751301b4e650c83324dfef4aad667
+Text-content-sha1: ab36cad564c7c50dec5ac1eb0bf879cf4e3a5f99
+Content-length: 37
+
+This is the file 'mu'.
+Second change
+
+
+Node-path: branch/STATUS
+Node-kind: file
+Node-action: change
+Text-content-length: 185
+Text-content-md5: 6f71fec92afeaa5c1ebe02349f548ca9
+Text-content-sha1: eece02003d9c51610249e3fdd0d4e191e02ba3b7
+Content-length: 185
+
+Status of 1.8.x:
+
+Candidate changes:
+==================
+
+Random new subheading:
+======================
+
+Veto-blocked changes:
+=====================
+
+Approved changes:
+=================
+
+
+Revision-number: 9
+Prop-content-length: 74
+Content-length: 74
+
+K 10
+svn:author
+V 7
+jrandom
+K 7
+svn:log
+V 18
+Revert the merges.
+PROPS-END
+
+Node-path: branch
+Node-kind: dir
+Node-action: change
+Prop-content-length: 10
+Content-length: 10
+
+PROPS-END
+
+
+Node-path: branch/A/mu
+Node-kind: file
+Node-action: change
+Text-content-length: 23
+Text-content-md5: baf78ae06a2d5b7d9554c5f1280d3fa8
+Text-content-sha1: b4d00c56351d1a752e24d839d41a362d8da4a4c7
+Content-length: 23
+
+This is the file 'mu'.
+
+
+Node-path: branch/STATUS
+Node-kind: file
+Node-action: change
+Text-content-length: 298
+Text-content-md5: 4ebc11d7e1ec3a5cb75d3cfdcf0c1399
+Text-content-sha1: 86dd246b9072d6baeaac50f58ee2fa6444f6f889
+Content-length: 298
+
+Status of 1.8.x:
+
+Candidate changes:
+==================
+
+Random new subheading:
+======================
+
+Veto-blocked changes:
+=====================
+
+Approved changes:
+=================
+
+ * r4
+ default logsummary
+ Votes:
+ +1: jrandom
+
+ * r5
+ default logsummary
+ Votes:
+ +1: jrandom
+
+
+
+Node-path: branch/iota
+Node-kind: file
+Node-action: change
+Text-content-length: 25
+Text-content-md5: 2d18c5e57e84c5b8a5e9a6e13fa394dc
+Text-content-sha1: 2c0aa9014a0cd07f01795a333d82485ef6d083e2
+Content-length: 25
+
+This is the file 'iota'.
+
+
+Revision-number: 10
+Prop-content-length: 78
+Content-length: 78
+
+K 10
+svn:author
+V 7
+jrandom
+K 7
+svn:log
+V 22
+Third change on trunk.
+PROPS-END
+
+Node-path: subversion/trunk/A
+Node-action: delete
+
+
+Revision-number: 11
+Prop-content-length: 69
+Content-length: 69
+
+K 10
+svn:author
+V 7
+jrandom
+K 7
+svn:log
+V 13
+Nominate r10.
+PROPS-END
+
+Node-path: branch/STATUS
+Node-kind: file
+Node-action: change
+Text-content-length: 355
+Text-content-md5: cc8dd910efc8d555f5dc51e5c331b403
+Text-content-sha1: c67ec7e762d8f7dfa6d2b876e540a6038781171f
+Content-length: 355
+
+Status of 1.8.x:
+
+Candidate changes:
+==================
+
+Random new subheading:
+======================
+
+Veto-blocked changes:
+=====================
+
+Approved changes:
+=================
+
+ * r4
+ default logsummary
+ Votes:
+ +1: jrandom
+
+ * r5
+ default logsummary
+ Votes:
+ +1: jrandom
+
+ * r10
+ default logsummary
+ Votes:
+ +1: jrandom
+
+
+
+Revision-number: 12
+Prop-content-length: 132
+Content-length: 132
+
+K 10
+svn:author
+V 6
+daniel
+K 7
+svn:log
+V 77
+Merge r4 from trunk:
+
+ * r4
+ default logsummary
+ Votes:
+ +1: jrandom
+
+PROPS-END
+
+Node-path: branch
+Node-kind: dir
+Node-action: change
+Prop-content-length: 54
+Content-length: 54
+
+K 13
+svn:mergeinfo
+V 19
+/subversion/trunk:4
+PROPS-END
+
+
+Node-path: branch/STATUS
+Node-kind: file
+Node-action: change
+Text-content-length: 298
+Text-content-md5: 41e1f764781ee0b7874dc92607e9b9f6
+Text-content-sha1: 19e57ad83073cc50d86033ab0f03d3b8574c68fc
+Content-length: 298
+
+Status of 1.8.x:
+
+Candidate changes:
+==================
+
+Random new subheading:
+======================
+
+Veto-blocked changes:
+=====================
+
+Approved changes:
+=================
+
+ * r5
+ default logsummary
+ Votes:
+ +1: jrandom
+
+ * r10
+ default logsummary
+ Votes:
+ +1: jrandom
+
+
+Node-path: branch/iota
+Node-kind: file
+Node-action: change
+Text-content-length: 38
+Text-content-md5: 67f471c2ecc2c9e561d122d6e6b0f847
+Text-content-sha1: 750accb6e7f880a1d05ce725c19eb60183bb4b26
+Content-length: 38
+
+This is the file 'iota'.
+First change
+
+
+Revision-number: 13
+Prop-content-length: 132
+Content-length: 132
+
+K 10
+svn:author
+V 6
+daniel
+K 7
+svn:log
+V 77
+Merge r5 from trunk:
+
+ * r5
+ default logsummary
+ Votes:
+ +1: jrandom
+
+PROPS-END
+
+Node-path: branch
+Node-kind: dir
+Node-action: change
+Prop-content-length: 56
+Content-length: 56
+
+K 13
+svn:mergeinfo
+V 21
+/subversion/trunk:4-5
+PROPS-END
+
+
+Node-path: branch/A/mu
+Node-kind: file
+Node-action: change
+Text-content-length: 37
+Text-content-md5: eab751301b4e650c83324dfef4aad667
+Text-content-sha1: ab36cad564c7c50dec5ac1eb0bf879cf4e3a5f99
+Content-length: 37
+
+This is the file 'mu'.
+Second change
+
+
+Node-path: branch/STATUS
+Node-kind: file
+Node-action: change
+Text-content-length: 242
+Text-content-md5: 30f964a922fe4e9f01b25a274c0a8efb
+Text-content-sha1: f1180ea711cbbbbfb2af52cac509da15313ca319
+Content-length: 242
+
+Status of 1.8.x:
+
+Candidate changes:
+==================
+
+Random new subheading:
+======================
+
+Veto-blocked changes:
+=====================
+
+Approved changes:
+=================
+
+ * r10
+ default logsummary
+ Votes:
+ +1: jrandom
+
+
+Revision-number: 14
+Prop-content-length: 134
+Content-length: 134
+
+K 10
+svn:author
+V 6
+daniel
+K 7
+svn:log
+V 79
+Merge r10 from trunk:
+
+ * r10
+ default logsummary
+ Votes:
+ +1: jrandom
+
+PROPS-END
+
+Node-path: branch
+Node-kind: dir
+Node-action: change
+Prop-content-length: 59
+Content-length: 59
+
+K 13
+svn:mergeinfo
+V 24
+/subversion/trunk:4-5,10
+PROPS-END
+
+
+Node-path: branch/STATUS
+Node-kind: file
+Node-action: change
+Text-content-length: 185
+Text-content-md5: 6f71fec92afeaa5c1ebe02349f548ca9
+Text-content-sha1: eece02003d9c51610249e3fdd0d4e191e02ba3b7
+Content-length: 185
+
+Status of 1.8.x:
+
+Candidate changes:
+==================
+
+Random new subheading:
+======================
+
+Veto-blocked changes:
+=====================
+
+Approved changes:
+=================
+
+
+Node-path: branch/A
+Node-action: delete
+
+
diff --git a/tools/dist/dist.sh b/tools/dist/dist.sh
index 03d5c39..676db68 100755
--- a/tools/dist/dist.sh
+++ b/tools/dist/dist.sh
@@ -22,7 +22,7 @@
# USAGE: ./dist.sh -v VERSION -r REVISION -pr REPOS-PATH
# [-alpha ALPHA_NUM|-beta BETA_NUM|-rc RC_NUM|pre PRE_NUM]
-# [-apr PATH-TO-APR ] [-apru PATH-TO-APR-UTIL]
+# [-apr PATH-TO-APR ] [-apru PATH-TO-APR-UTIL]
# [-apri PATH-TO-APR-ICONV] [-neon PATH-TO-NEON]
# [-serf PATH-TO-SERF] [-zlib PATH-TO-ZLIB]
# [-sqlite PATH-TO-SQLITE] [-zip] [-sign]
@@ -47,13 +47,13 @@
# working copy, so you may wish to create a dist-resources directory
# containing the apr/, apr-util/, neon/, serf/, zlib/ and sqlite/
# dependencies, and run dist.sh from that.
-#
+#
# When building alpha, beta or rc tarballs pass the appropriate flag
# followed by a number. For example "-alpha 5", "-beta 3", "-rc 2".
-#
+#
# If neither an -alpha, -beta, -pre or -rc option is specified, a release
# tarball will be built.
-#
+#
# To build a Windows zip file package, additionally pass -zip and the
# path to apr-iconv with -apri.
@@ -119,7 +119,7 @@ if [ -n "$ALPHA" ] && [ -n "$BETA" ] && [ -n "$NIGHTLY" ] && [ -n "$PRE" ] ||
exit 1
elif [ -n "$ALPHA" ] ; then
VER_TAG="Alpha $ALPHA"
- VER_NUMTAG="-alpha$ALPHA"
+ VER_NUMTAG="-alpha$ALPHA"
elif [ -n "$BETA" ] ; then
VER_TAG="Beta $BETA"
VER_NUMTAG="-beta$BETA"
@@ -183,20 +183,6 @@ if [ $? -ne 0 ] && [ -z "$ZIP" ]; then
exit 1
fi
-# Default to 'wget', but allow 'curl' to be used if available.
-HTTP_FETCH=wget
-HTTP_FETCH_OUTPUT="-O"
-type wget > /dev/null 2>&1
-if [ $? -ne 0 ]; then
- type curl > /dev/null 2>&1
- if [ $? -ne 0 ]; then
- echo "Neither curl or wget found."
- exit 2
- fi
- HTTP_FETCH=curl
- HTTP_FETCH_OUTPUT="-o"
-fi
-
DISTNAME="subversion-${VERSION}${VER_NUMTAG}"
DIST_SANDBOX=.dist_sandbox
DISTPATH="$DIST_SANDBOX/$DISTNAME"
@@ -306,6 +292,15 @@ if [ -z "$ZIP" ] ; then
(cd "$DISTPATH" && ./autogen.sh --release) || exit 1
fi
+# Generate the .pot file, for use by translators.
+echo "Running po-update.sh in sandbox, to create subversion.pot..."
+# Can't use the po-update.sh in the packaged export since it might have CRLF
+# line endings, in which case it won't run. So first we export it again.
+${svn:-svn} export -q -r "$REVISION" \
+ "http://svn.apache.org/repos/asf/subversion/$REPOS_PATH/tools/po/po-update.sh" \
+ --username none --password none "$DIST_SANDBOX/po-update.sh"
+(cd "$DISTPATH" && ../po-update.sh pot) || exit 1
+
# Pre-translate the various sql-derived header files
echo "Generating SQL-derived headers..."
for f in `find "$DISTPATH/subversion" -name '*.sql'`; do
diff --git a/tools/dist/make-deps-tarball.sh b/tools/dist/make-deps-tarball.sh
deleted file mode 100755
index 318adc6..0000000
--- a/tools/dist/make-deps-tarball.sh
+++ /dev/null
@@ -1,121 +0,0 @@
-#!/bin/sh
-#
-#
-# Licensed to the Apache Software Foundation (ASF) under one
-# or more contributor license agreements. See the NOTICE file
-# distributed with this work for additional information
-# regarding copyright ownership. The ASF licenses this file
-# to you under the Apache License, Version 2.0 (the
-# "License"); you may not use this file except in compliance
-# with the License. You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing,
-# software distributed under the License is distributed on an
-# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# KIND, either express or implied. See the License for the
-# specific language governing permissions and limitations
-# under the License.
-#
-#
-set -e
-
-APR=apr-1.4.6
-APR_UTIL=apr-util-1.4.1
-NEON=neon-0.29.6
-SERF=serf-0.3.1
-ZLIB=zlib-1.2.7
-SQLITE_VERSION=3071400
-SQLITE=sqlite-amalgamation-$SQLITE_VERSION
-
-HTTPD=httpd-2.2.22
-HTTPD_OOPS=
-APR_ICONV=apr-iconv-1.2.1
-APR_ICONV_OOPS=
-
-WIN32_APR_VIA_HTTPD=1
-
-BASEDIR=`pwd`
-TEMPDIR=$BASEDIR/temp
-
-APACHE_MIRROR=http://archive.apache.org/dist
-
-create_deps() {
- SVN_VERSION="$1"
- set -x
-
- mkdir -p $TEMPDIR
- cd $TEMPDIR
- wget -qnc $APACHE_MIRROR/apr/$APR.tar.bz2
- wget -qnc $APACHE_MIRROR/apr/$APR_UTIL.tar.bz2
- if [ -n "$WIN32_APR_VIA_HTTPD" ]; then
- wget -qnc $APACHE_MIRROR/httpd/$HTTPD-win32-src$HTTPD_OOPS.zip
- else
- wget -qnc $APACHE_MIRROR/apr/$APR-win32-src.zip
- wget -qnc $APACHE_MIRROR/apr/$APR_UTIL-win32-src.zip
- wget -qnc $APACHE_MIRROR/apr/$APR_ICONV-win32-src$APR_ICONV_OOPS.zip
- fi
- wget -qnc http://webdav.org/neon/$NEON.tar.gz
- wget -qnc http://serf.googlecode.com/files/$SERF.tar.bz2
- wget -qnc http://www.zlib.net/$ZLIB.tar.bz2
- wget -qnc http://www.sqlite.org/$SQLITE.zip
-
- mkdir $BASEDIR/unix-dependencies
- cd $BASEDIR/unix-dependencies
- tar zxf $TEMPDIR/$NEON.tar.gz
- tar jxf $TEMPDIR/$ZLIB.tar.bz2
- tar jxf $TEMPDIR/$SERF.tar.bz2
- unzip -q $TEMPDIR/$SQLITE.zip
- mv $NEON neon
- mv $ZLIB zlib
- mv $SERF serf
- mv $SQLITE sqlite-amalgamation
- tar jxf $TEMPDIR/$APR.tar.bz2
- tar jxf $TEMPDIR/$APR_UTIL.tar.bz2
- mv $APR apr
- mv $APR_UTIL apr-util
- cd $TEMPDIR
-
- mkdir $BASEDIR/win32-dependencies
- cd $BASEDIR/win32-dependencies
- tar zxf $TEMPDIR/$NEON.tar.gz
- tar jxf $TEMPDIR/$ZLIB.tar.bz2
- tar jxf $TEMPDIR/$SERF.tar.bz2
- unzip -q $TEMPDIR/$SQLITE.zip
- mv $NEON neon
- mv $ZLIB zlib
- mv $SERF serf
- mv $SQLITE sqlite-amalgamation
- if [ -n "$WIN32_APR_VIA_HTTPD" ]; then
- unzip -q $TEMPDIR/$HTTPD-win32-src$HTTPD_OOPS.zip
- for i in apr apr-util apr-iconv; do
- mv $HTTPD/srclib/$i .
- done
- rm -rf $HTTPD
- else
- unzip -q $TEMPDIR/$APR-win32-src.zip
- unzip -q $TEMPDIR/$APR_UTIL-win32-src.zip
- unzip -q $TEMPDIR/$APR_ICONV-win32-src$APR_ICONV_OOPS.zip
- mv $APR apr
- mv $APR_UTIL apr-util
- mv $APR_ICONV apr-iconv
- fi
-
- cd $BASEDIR
- mv unix-dependencies subversion-$SVN_VERSION
- tar jcf subversion-deps-$SVN_VERSION.tar.bz2 subversion-$SVN_VERSION
- tar zcf subversion-deps-$SVN_VERSION.tar.gz subversion-$SVN_VERSION
- rm -rf subversion-$SVN_VERSION
- mv win32-dependencies subversion-$SVN_VERSION
- zip -qr subversion-deps-$SVN_VERSION.zip subversion-$SVN_VERSION
- rm -rf subversion-$SVN_VERSION
-}
-
-if [ -z "$1" ]; then
- echo "Please provide a Subversion release number."
- echo "Example: ./`basename $0` 1.6.19"
- exit 1
-fi
-
-create_deps "$1"
diff --git a/tools/dist/nightly.sh b/tools/dist/nightly.sh
index 0f2f991..b167ab3 100755
--- a/tools/dist/nightly.sh
+++ b/tools/dist/nightly.sh
@@ -54,7 +54,7 @@ head=`$svn info $repo/trunk | grep '^Revision' | cut -d ' ' -f 2`
# Get the latest versions of the rolling scripts
for i in release.py dist.sh
-do
+do
$svn export --force -r $head $repo/trunk/tools/dist/$i@$head $dir/$i
done
# We also need ezt
@@ -63,11 +63,11 @@ $svn export --force -r $head $repo/trunk/build/generator/ezt.py@$head $dir/ezt.p
# Create the environment
cd roll
echo '----------------building environment------------------'
-../release.py --base-dir ${abscwd}/roll build-env trunk-nightly
+../release.py --verbose --base-dir ${abscwd}/roll build-env trunk-nightly
# Roll the tarballs
echo '-------------------rolling tarball--------------------'
-../release.py --base-dir ${abscwd}/roll roll --branch trunk trunk-nightly $head
+../release.py --verbose --base-dir ${abscwd}/roll roll --branch trunk trunk-nightly $head
cd ..
# Create the information page
diff --git a/tools/dist/nominate.pl b/tools/dist/nominate.pl
new file mode 120000
index 0000000..411377e
--- /dev/null
+++ b/tools/dist/nominate.pl
@@ -0,0 +1 @@
+backport.pl \ No newline at end of file
diff --git a/tools/dist/release.py b/tools/dist/release.py
index bc80549..30a1f0b 100755
--- a/tools/dist/release.py
+++ b/tools/dist/release.py
@@ -66,16 +66,42 @@ except ImportError:
import ezt
+try:
+ subprocess.check_output
+except AttributeError:
+ def check_output(cmd):
+ proc = subprocess.Popen(['svn', 'list', dist_dev_url],
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE)
+ (stdout, stderr) = proc.communicate()
+ rc = proc.wait()
+ if rc or stderr:
+ logging.error('%r failed with stderr %r', cmd, stderr)
+ raise subprocess.CalledProcessError(rc, cmd)
+ return stdout
+ subprocess.check_output = check_output
+ del check_output
+
# Our required / recommended release tool versions by release branch
tool_versions = {
'trunk' : {
- 'autoconf' : '2.68',
- 'libtool' : '2.4',
- 'swig' : '2.0.4',
+ 'autoconf' : '2.69',
+ 'libtool' : '2.4.3',
+ 'swig' : '3.0.0',
+ },
+ '1.9' : {
+ 'autoconf' : '2.69',
+ 'libtool' : '2.4.3',
+ 'swig' : '3.0.0'
+ },
+ '1.8' : {
+ 'autoconf' : '2.69',
+ 'libtool' : '2.4.3',
+ 'swig' : '2.0.9',
},
'1.7' : {
'autoconf' : '2.68',
- 'libtool' : '2.4',
+ 'libtool' : '2.4.3',
'swig' : '2.0.4',
},
'1.6' : {
@@ -85,6 +111,9 @@ tool_versions = {
},
}
+# The version that is our current recommended release
+recommended_release = '1.8'
+
# Some constants
repos = 'http://svn.apache.org/repos/asf/subversion'
secure_repos = 'https://svn.apache.org/repos/asf/subversion'
@@ -99,7 +128,7 @@ extns = ['zip', 'tar.gz', 'tar.bz2']
# Utility functions
class Version(object):
- regex = re.compile('(\d+).(\d+).(\d+)(?:-(?:(rc|alpha|beta)(\d+)))?')
+ regex = re.compile(r'(\d+).(\d+).(\d+)(?:-(?:(rc|alpha|beta)(\d+)))?')
def __init__(self, ver_str):
# Special case the 'trunk-nightly' version
@@ -135,6 +164,18 @@ class Version(object):
def is_prerelease(self):
return self.pre != None
+ def is_recommended(self):
+ return self.branch == recommended_release
+
+ def get_download_anchor(self):
+ if self.is_prerelease():
+ return 'pre-releases'
+ else:
+ if self.is_recommended():
+ return 'recommended-release'
+ else:
+ return 'supported-releases'
+
def __lt__(self, that):
if self.major < that.major: return True
if self.major > that.major: return False
@@ -155,7 +196,7 @@ class Version(object):
else:
return self.pre_num < that.pre_num
- def __str(self):
+ def __str__(self):
if self.pre:
if self.pre == 'nightly':
return 'nightly'
@@ -168,11 +209,7 @@ class Version(object):
def __repr__(self):
- return "Version('%s')" % self.__str()
-
- def __str__(self):
- return self.__str()
-
+ return "Version(%s)" % repr(str(self))
def get_prefix(base_dir):
return os.path.join(base_dir, 'prefix')
@@ -183,6 +220,13 @@ def get_tempdir(base_dir):
def get_deploydir(base_dir):
return os.path.join(base_dir, 'deploy')
+def get_target(args):
+ "Return the location of the artifacts"
+ if args.target:
+ return args.target
+ else:
+ return get_deploydir(args.base_dir)
+
def get_tmpldir():
return os.path.join(os.path.abspath(sys.path[0]), 'templates')
@@ -194,8 +238,7 @@ def get_tmplfile(filename):
return urllib2.urlopen(repos + '/trunk/tools/dist/templates/' + filename)
def get_nullfile():
- # This is certainly not cross platform
- return open('/dev/null', 'w')
+ return open(os.path.devnull, 'w')
def run_script(verbose, script):
if verbose:
@@ -371,12 +414,7 @@ def compare_changes(repos, branch, revision):
mergeinfo_cmd = ['svn', 'mergeinfo', '--show-revs=eligible',
repos + '/trunk/CHANGES',
repos + '/' + branch + '/' + 'CHANGES']
- proc = subprocess.Popen(mergeinfo_cmd, stdout=subprocess.PIPE,
- stderr=subprocess.PIPE)
- (stdout, stderr) = proc.communicate()
- rc = proc.wait()
- if stderr:
- raise RuntimeError('svn mergeinfo failed: %s' % stderr)
+ stdout = subprocess.check_output(mergeinfo_cmd)
if stdout:
# Treat this as a warning since we are now putting entries for future
# minor releases in CHANGES on trunk.
@@ -463,15 +501,11 @@ def sign_candidates(args):
def sign_file(filename):
asc_file = open(filename + '.asc', 'a')
logging.info("Signing %s" % filename)
- proc = subprocess.Popen(['gpg', '-ba', '-o', '-', filename],
- stdout=asc_file)
- proc.wait()
+ proc = subprocess.check_call(['gpg', '-ba', '-o', '-', filename],
+ stdout=asc_file)
asc_file.close()
- if args.target:
- target = args.target
- else:
- target = get_deploydir(args.base_dir)
+ target = get_target(args)
for e in extns:
filename = os.path.join(target, 'subversion-%s.%s' % (args.version, e))
@@ -488,17 +522,17 @@ def sign_candidates(args):
def post_candidates(args):
'Post candidate artifacts to the dist development directory.'
+ target = get_target(args)
+
logging.info('Importing tarballs to %s' % dist_dev_url)
svn_cmd = ['svn', 'import', '-m',
'Add %s candidate release artifacts' % args.version.base,
'--auto-props', '--config-option',
'config:auto-props:*.asc=svn:eol-style=native;svn:mime-type=text/plain',
- get_deploydir(args.base_dir), dist_dev_url]
+ target, dist_dev_url]
if (args.username):
svn_cmd += ['--username', args.username]
- proc = subprocess.Popen(svn_cmd)
- (stdout, stderr) = proc.communicate()
- proc.wait()
+ subprocess.check_call(svn_cmd)
#----------------------------------------------------------------------
# Create tag
@@ -513,6 +547,7 @@ def create_tag(args):
else:
branch = secure_repos + '/branches/%d.%d.x' % (args.version.major,
args.version.minor)
+ target = get_target(args)
tag = secure_repos + '/tags/' + str(args.version)
@@ -521,13 +556,63 @@ def create_tag(args):
if (args.username):
svnmucc_cmd += ['--username', args.username]
svnmucc_cmd += ['cp', str(args.revnum), branch, tag]
- svnmucc_cmd += ['put', os.path.join(get_deploydir(args.base_dir),
- 'svn_version.h.dist'),
+ svnmucc_cmd += ['put', os.path.join(target, 'svn_version.h.dist' + '-' +
+ str(args.version)),
tag + '/subversion/include/svn_version.h']
# don't redirect stdout/stderr since svnmucc might ask for a password
- proc = subprocess.Popen(svnmucc_cmd)
- proc.wait()
+ subprocess.check_call(svnmucc_cmd)
+
+ if not args.version.is_prerelease():
+ logging.info('Bumping revisions on the branch')
+ def replace_in_place(fd, startofline, flat, spare):
+ """In file object FD, replace FLAT with SPARE in the first line
+ starting with STARTOFLINE."""
+
+ fd.seek(0, os.SEEK_SET)
+ lines = fd.readlines()
+ for i, line in enumerate(lines):
+ if line.startswith(startofline):
+ lines[i] = line.replace(flat, spare)
+ break
+ else:
+ raise RuntimeError('Definition of %r not found' % startofline)
+
+ fd.seek(0, os.SEEK_SET)
+ fd.writelines(lines)
+ fd.truncate() # for current callers, new value is never shorter.
+
+ new_version = Version('%d.%d.%d' %
+ (args.version.major, args.version.minor,
+ args.version.patch + 1))
+
+ def file_object_for(relpath):
+ fd = tempfile.NamedTemporaryFile()
+ url = branch + '/' + relpath
+ fd.url = url
+ subprocess.check_call(['svn', 'cat', '%s@%d' % (url, args.revnum)],
+ stdout=fd)
+ return fd
+
+ svn_version_h = file_object_for('subversion/include/svn_version.h')
+ replace_in_place(svn_version_h, '#define SVN_VER_PATCH ',
+ str(args.version.patch), str(new_version.patch))
+
+ STATUS = file_object_for('STATUS')
+ replace_in_place(STATUS, 'Status of ',
+ str(args.version), str(new_version))
+
+ svn_version_h.seek(0, os.SEEK_SET)
+ STATUS.seek(0, os.SEEK_SET)
+ subprocess.check_call(['svnmucc', '-r', str(args.revnum),
+ '-m', 'Post-release housekeeping: '
+ 'bump the %s branch to %s.'
+ % (branch.split('/')[-1], str(new_version)),
+ 'put', svn_version_h.name, svn_version_h.url,
+ 'put', STATUS.name, STATUS.url,
+ ])
+ del svn_version_h
+ del STATUS
#----------------------------------------------------------------------
# Clean dist
@@ -535,13 +620,7 @@ def create_tag(args):
def clean_dist(args):
'Clean the distribution directory of all but the most recent artifacts.'
- proc = subprocess.Popen(['svn', 'list', dist_release_url],
- stdout=subprocess.PIPE,
- stderr=subprocess.PIPE)
- (stdout, stderr) = proc.communicate()
- proc.wait()
- if stderr:
- raise RuntimeError(stderr)
+ stdout = subprocess.check_output(['svn', 'list', dist_release_url])
filenames = stdout.split('\n')
tar_gz_archives = []
@@ -570,8 +649,7 @@ def clean_dist(args):
svnmucc_cmd += ['rm', dist_release_url + '/' + filename]
# don't redirect stdout/stderr since svnmucc might ask for a password
- proc = subprocess.Popen(svnmucc_cmd)
- proc.wait()
+ subprocess.check_call(svnmucc_cmd)
#----------------------------------------------------------------------
# Move to dist
@@ -579,13 +657,7 @@ def clean_dist(args):
def move_to_dist(args):
'Move candidate artifacts to the distribution directory.'
- proc = subprocess.Popen(['svn', 'list', dist_dev_url],
- stdout=subprocess.PIPE,
- stderr=subprocess.PIPE)
- (stdout, stderr) = proc.communicate()
- proc.wait()
- if stderr:
- raise RuntimeError(stderr)
+ stdout = subprocess.check_output(['svn', 'list', dist_dev_url])
filenames = []
for entry in stdout.split('\n'):
@@ -603,8 +675,7 @@ def move_to_dist(args):
# don't redirect stdout/stderr since svnmucc might ask for a password
logging.info('Moving release artifacts to %s' % dist_release_url)
- proc = subprocess.Popen(svnmucc_cmd)
- proc.wait()
+ subprocess.check_call(svnmucc_cmd)
#----------------------------------------------------------------------
# Write announcements
@@ -613,9 +684,10 @@ def write_news(args):
'Write text for the Subversion website.'
data = { 'date' : datetime.date.today().strftime('%Y%m%d'),
'date_pres' : datetime.date.today().strftime('%Y-%m-%d'),
- 'major-minor' : '%d.%d' % (args.version.major, args.version.minor),
+ 'major-minor' : args.version.branch,
'version' : str(args.version),
'version_base' : args.version.base,
+ 'anchor': args.version.get_download_anchor(),
}
if args.version.is_prerelease():
@@ -631,10 +703,7 @@ def write_news(args):
def get_sha1info(args, replace=False):
'Return a list of sha1 info for the release'
- if args.target:
- target = args.target
- else:
- target = get_deploydir(args.base_dir)
+ target = get_target(args)
sha1s = glob.glob(os.path.join(target, 'subversion*-%s*.sha1' % args.version))
@@ -665,9 +734,9 @@ def write_announcement(args):
data = { 'version' : str(args.version),
'sha1info' : sha1info,
'siginfo' : siginfo,
- 'major-minor' : '%d.%d' % (args.version.major,
- args.version.minor),
+ 'major-minor' : args.version.branch,
'major-minor-patch' : args.version.base,
+ 'anchor' : args.version.get_download_anchor(),
}
if args.version.is_prerelease():
@@ -708,10 +777,7 @@ def get_siginfo(args, quiet=False):
import _gnupg as gnupg
gpg = gnupg.GPG()
- if args.target:
- target = args.target
- else:
- target = get_deploydir(args.base_dir)
+ target = get_target(args)
good_sigs = {}
fingerprints = {}
@@ -842,6 +908,9 @@ def main():
help='''The release label, such as '1.7.0-alpha1'.''')
subparser.add_argument('--username',
help='''Username for ''' + dist_repos + '''.''')
+ subparser.add_argument('--target',
+ help='''The full path to the directory containing
+ release artifacts.''')
# Setup the parser for the create-tag subcommand
subparser = subparsers.add_parser('create-tag',
@@ -855,6 +924,9 @@ def main():
help='''The branch to base the release on.''')
subparser.add_argument('--username',
help='''Username for ''' + secure_repos + '''.''')
+ subparser.add_argument('--target',
+ help='''The full path to the directory containing
+ release artifacts.''')
# The clean-dist subcommand
subparser = subparsers.add_parser('clean-dist',
diff --git a/tools/dist/templates/download.ezt b/tools/dist/templates/download.ezt
index 601818d..d5fcb54 100644
--- a/tools/dist/templates/download.ezt
+++ b/tools/dist/templates/download.ezt
@@ -1,4 +1,4 @@
-<p style="font-size: 150%; text-align: center;">Subversion [version]</p>
+<p style="font-size: 150%; text-align: center;">Apache Subversion [version]</p>
<table class="centered">
<tr>
<th>File</th>
diff --git a/tools/dist/templates/rc-news.ezt b/tools/dist/templates/rc-news.ezt
index 959735c..704899a 100644
--- a/tools/dist/templates/rc-news.ezt
+++ b/tools/dist/templates/rc-news.ezt
@@ -16,7 +16,7 @@
in the [version_base] release.</p>
<p>To get this release from the nearest mirror, please visit our
- <a href="/download/#pre-releases">download page</a>.</p>
+ <a href="/download/#[anchor]">download page</a>.</p>
</div> <!-- #news-[date] -->
diff --git a/tools/dist/templates/rc-release-ann.ezt b/tools/dist/templates/rc-release-ann.ezt
index f9af5c1..b3085f7 100644
--- a/tools/dist/templates/rc-release-ann.ezt
+++ b/tools/dist/templates/rc-release-ann.ezt
@@ -1,7 +1,7 @@
I'm happy to announce the release of Apache Subversion [version].
Please choose the mirror closest to you by visiting:
- http://subversion.apache.org/download/#pre-releases
+ http://subversion.apache.org/download/#[anchor]
The SHA1 checksums are:
diff --git a/tools/dist/templates/stable-news.ezt b/tools/dist/templates/stable-news.ezt
index aee573f..63ee9da 100644
--- a/tools/dist/templates/stable-news.ezt
+++ b/tools/dist/templates/stable-news.ezt
@@ -13,7 +13,7 @@
>change log</a> for more information about this release.</p>
<p>To get this release from the nearest mirror, please visit our
- <a href="/download/#recommended-release">download page</a>.</p>
+ <a href="/download/#[anchor]">download page</a>.</p>
</div> <!-- #news-[date] -->
diff --git a/tools/dist/templates/stable-release-ann.ezt b/tools/dist/templates/stable-release-ann.ezt
index c865a84..a6ffa9a 100644
--- a/tools/dist/templates/stable-release-ann.ezt
+++ b/tools/dist/templates/stable-release-ann.ezt
@@ -1,7 +1,7 @@
I'm happy to announce the release of Apache Subversion [version].
Please choose the mirror closest to you by visiting:
- http://subversion.apache.org/download/#recommended-release
+ http://subversion.apache.org/download/#[anchor]
The SHA1 checksums are:
diff --git a/tools/examples/ExampleAuthn.java b/tools/examples/ExampleAuthn.java
new file mode 100644
index 0000000..9e0a23e
--- /dev/null
+++ b/tools/examples/ExampleAuthn.java
@@ -0,0 +1,105 @@
+/* Username/password prompt/save using 1.9 org.apache.subversion API.
+
+ Compile against non-installed Subversion JavaHL build:
+
+ javac -cp subversion/bindings/javahl/classes -d subversion/bindings/javahl/classes ExampleAuthn.java
+
+ Run:
+
+ LD_LIBRARY_PATH=subversion/libsvn_auth_gnome_keyring/.libs java -cp subversion/bindings/javahl/classes -Djava.library.path=subversion/bindings/javahl/native/.libs ExampleAuthn <URL> <config-dir>
+
+ */
+import org.apache.subversion.javahl.*;
+import org.apache.subversion.javahl.types.*;
+import org.apache.subversion.javahl.remote.*;
+import org.apache.subversion.javahl.callback.*;
+import java.io.Console;
+
+public class ExampleAuthn {
+
+ protected static class MyAuthn {
+
+ public static AuthnCallback getAuthn() {
+ return new MyAuthnCallback();
+ }
+
+ private static class MyAuthnCallback
+ implements AuthnCallback {
+
+ public UserPasswordResult
+ userPasswordPrompt(String realm, String username, boolean maySave) {
+ System.out.println("userPasswordPrompt");
+ System.out.println("Realm: " + realm);
+ String prompt;
+ if (username == null) {
+ System.out.print("Username: ");
+ username = System.console().readLine();
+ prompt = "Password: ";
+ } else {
+ prompt = "Password for " + username + ": ";
+ }
+ String password = new String(System.console().readPassword(prompt));
+ return new UserPasswordResult(username, password, maySave);
+ }
+
+ public boolean
+ allowStorePlaintextPassword(String realm) {
+ System.out.println("allowStorePlaintextPassword");
+ System.out.println("Realm: " + realm);
+ System.out.print("Store plaintext password on disk? (y/n): ");
+ String s = System.console().readLine();
+ return s.equals("y") ? true : false;
+ }
+
+ public UsernameResult
+ usernamePrompt(String realm, boolean maySave) {
+ System.out.println("usernamePrompt not implemented!");
+ return null;
+ }
+
+ public boolean
+ allowStorePlaintextPassphrase(String realm) {
+ System.out.println("allowStorePlaintextPassphrase not implemented!");
+ return false;
+ }
+
+ public SSLServerTrustResult
+ sslServerTrustPrompt(String realm,
+ SSLServerCertFailures failures,
+ SSLServerCertInfo info,
+ boolean maySave) {
+ System.out.println("sslServerTrustPrompt not implemented!");
+ return SSLServerTrustResult.acceptTemporarily();
+ }
+
+ public SSLClientCertResult
+ sslClientCertPrompt(String realm, boolean maySave) {
+ System.out.println("sslClientCertPrompt not implemented!");
+ return null;
+ }
+
+ public SSLClientCertPassphraseResult
+ sslClientCertPassphrasePrompt(String realm, boolean maySave) {
+ System.out.println("sslClientCertPassphrasePrompt not implemented!");
+ return null;
+ }
+ }
+ }
+
+ public static void main(String argv[]) {
+
+ if (argv.length != 2) {
+ System.err.println("usage: ExampleAuthn <URL> <config-dir>");
+ return;
+ }
+ RemoteFactory factory = new RemoteFactory();
+ factory.setConfigDirectory(argv[1]);
+ factory.setPrompt(MyAuthn.getAuthn());
+ try {
+ ISVNRemote raSession = factory.openRemoteSession(argv[0]);
+ raSession.getReposUUID();
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+}
diff --git a/tools/examples/ExampleAuthnOld.java b/tools/examples/ExampleAuthnOld.java
new file mode 100644
index 0000000..cbcd92d
--- /dev/null
+++ b/tools/examples/ExampleAuthnOld.java
@@ -0,0 +1,119 @@
+/* Username/password prompt/save using old org.apache.subversion API.
+
+ Compile against non-installed Subversion JavaHL build:
+
+ javac -cp subversion/bindings/javahl/classes -d subversion/bindings/javahl/classes ExampleAuthnOld.java
+
+ Run:
+
+ LD_LIBRARY_PATH=subversion/libsvn_auth_gnome_keyring/.libs java -cp subversion/bindings/javahl/classes -Djava.library.path=subversion/bindings/javahl/native/.libs ExampleAuthnOld <URL> <config-dir>
+
+ */
+import org.apache.subversion.javahl.*;
+import org.apache.subversion.javahl.types.*;
+import org.apache.subversion.javahl.callback.*;
+import java.io.Console;
+
+public class ExampleAuthnOld {
+
+ protected static class MyAuthn {
+
+ public static UserPasswordCallback getAuthn() {
+ return new MyUserPasswordCallback();
+ }
+
+ private static class MyUserPasswordCallback
+ implements UserPasswordCallback {
+
+ private String _username = null;
+
+ public String
+ getUsername() {
+ System.out.println("getUsername");
+ return _username;
+ }
+
+ private String _password = null;
+
+ public String
+ getPassword() {
+ System.out.println("getPassword");
+ return _password;
+ }
+
+ public boolean
+ userAllowedSave() {
+ System.out.println("userAllowedSave");
+ return true;
+ }
+
+ public boolean
+ askYesNo(String realm, String question, boolean yesIsDefault) {
+ System.out.println("askYesNo");
+ System.out.print(question + " (y/n): ");
+ String s = System.console().readLine();
+ return s.equals("y") ? true : s.equals("") ? yesIsDefault : false;
+ }
+
+ public boolean
+ prompt(String realm, String username, boolean maySave) {
+ System.out.println("prompt");
+ System.out.println("Realm: " + realm);
+ String prompt;
+ if (username == null) {
+ System.out.print("Username: ");
+ _username = System.console().readLine();
+ prompt = "Password: ";
+ } else {
+ _username = username;
+ prompt = "Password for " + username + ": ";
+ }
+ _password = new String(System.console().readPassword(prompt));
+ return maySave;
+ }
+
+ public boolean
+ prompt(String realm, String username) {
+ System.out.println("prompt not implemented!");
+ return true;
+ }
+
+ public String
+ askQuestion(String realm,
+ String question,
+ boolean showAnswer,
+ boolean maySave) {
+ System.out.println("askQuestion not implemented!");
+ return null;
+ }
+
+ public String
+ askQuestion(String realm, String question, boolean showAnswer) {
+ System.out.println("askQuestion not implemented!");
+ return null;
+ }
+
+ public int
+ askTrustSSLServer(String info, boolean allowPermanently) {
+ System.out.println("askTrustSSLServer not implemented!");
+ return UserPasswordCallback.AcceptTemporary;
+ }
+ }
+ }
+
+ public static void main(String argv[]) {
+
+ if (argv.length != 2) {
+ System.err.println("usage: ExampleAuthnOld <URL> <config-dir>");
+ return;
+ }
+ ISVNClient client = new SVNClient();
+ client.setPrompt(MyAuthn.getAuthn());
+ try {
+ client.setConfigDirectory(argv[1]);
+ client.revProperty(argv[0], "svn:log", Revision.getInstance(0));
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+}
diff --git a/tools/examples/ExampleAuthnVeryOld.java b/tools/examples/ExampleAuthnVeryOld.java
new file mode 100644
index 0000000..b2bd8d0
--- /dev/null
+++ b/tools/examples/ExampleAuthnVeryOld.java
@@ -0,0 +1,118 @@
+/* Username/password prompt/save using very old org.tigris.subversion API.
+
+ Compile against non-installed Subversion JavaHL build:
+
+ javac -cp subversion/bindings/javahl/classes -d subversion/bindings/javahl/classes ExampleAuthnVeryOld.java
+
+ Run:
+
+ LD_LIBRARY_PATH=subversion/libsvn_auth_gnome_keyring/.libs java -cp subversion/bindings/javahl/classes -Djava.library.path=subversion/bindings/javahl/native/.libs ExampleAuthnVeryOld <URL> <config-dir>
+
+ */
+import org.tigris.subversion.javahl.*;
+import java.io.Console;
+
+public class ExampleAuthnVeryOld {
+
+ protected static class MyAuthn {
+
+ public static PromptUserPassword3 getAuthn() {
+ return new MyUserPasswordCallback();
+ }
+
+ private static class MyUserPasswordCallback
+ implements PromptUserPassword3 {
+
+ private String _username = null;
+
+ public String
+ getUsername() {
+ System.out.println("getUsername");
+ return _username;
+ }
+
+ private String _password = null;
+
+ public String
+ getPassword() {
+ System.out.println("getPassword");
+ return _password;
+ }
+
+ public boolean
+ userAllowedSave() {
+ System.out.println("userAllowedSave");
+ return true;
+ }
+
+ public boolean
+ askYesNo(String realm, String question, boolean yesIsDefault) {
+ System.out.println("askYesNo");
+ System.out.print(question + " (y/n): ");
+ String s = System.console().readLine();
+ return s.equals("y") ? true : s.equals("") ? yesIsDefault : false;
+ }
+
+ public boolean
+ prompt(String realm, String username, boolean maySave) {
+ System.out.println("prompt");
+ System.out.println("Realm: " + realm);
+ String prompt;
+ if (username == null) {
+ System.out.print("Username: ");
+ _username = System.console().readLine();
+ prompt = "Password: ";
+ } else {
+ _username = username;
+ prompt = "Password for " + username + ": ";
+ }
+ _password = new String(System.console().readPassword(prompt));
+ return maySave;
+ }
+
+ public boolean
+ prompt(String realm, String username) {
+ System.out.println("prompt not implemented!");
+ return true;
+ }
+
+ public String
+ askQuestion(String realm,
+ String question,
+ boolean showAnswer,
+ boolean maySave) {
+ System.out.println("askQuestion not implemented!");
+ return null;
+ }
+
+ public String
+ askQuestion(String realm, String question, boolean showAnswer) {
+ System.out.println("askQuestion not implemented!");
+ return null;
+ }
+
+ public int
+ askTrustSSLServer(String info, boolean allowPermanently) {
+ System.out.println("askTrustSSLServer not implemented!");
+ return PromptUserPassword3.AcceptTemporary;
+ }
+ }
+ }
+
+ public static void main(String argv[]) {
+
+ if (argv.length != 2) {
+ System.err.println("usage: ExampleAuthnVeryOld <URL> <config-dir>");
+ return;
+ }
+ SVNClientInterface client = new SVNClient();
+ client.setPrompt(MyAuthn.getAuthn());
+ try {
+ client.setConfigDirectory(argv[1]);
+ client.logMessages(argv[0], Revision.getInstance(0),
+ Revision.getInstance(0), false, false, 0);
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+}
diff --git a/tools/examples/svnlook.py b/tools/examples/svnlook.py
index 72043e6..bd36951 100755
--- a/tools/examples/svnlook.py
+++ b/tools/examples/svnlook.py
@@ -1,6 +1,6 @@
#!/usr/bin/env python
#
-# svnlook.py : a Python-based replacement for svnlook
+# svnlook.py : alternative svnlook in Python with library API
#
######################################################################
# Licensed to the Apache Software Foundation (ASF) under one
@@ -21,6 +21,34 @@
# under the License.
######################################################################
#
+# $HeadURL: https://svn.apache.org/repos/asf/subversion/branches/1.9.x/tools/examples/svnlook.py $
+# $LastChangedDate: 2013-11-14 11:11:07 +0000 (Thu, 14 Nov 2013) $
+# $LastChangedRevision: 1541878 $
+
+"""
+svnlook.py can also be used as a Python module::
+
+ >>> import svnlook
+ >>> svnlook = svnlook.SVNLook("/testrepo")
+ >>> svnlook.get_author()
+ 'randomjoe'
+
+
+Accessible API::
+
+[x] author
+[x] changed
+[x] date
+[ ] diff
+[x] dirs-changed
+[ ] ids
+[x] info
+[x] log
+[ ] tree
+---
+[ ] generator API to avoid passing lists
+"""
+
import sys
import time
@@ -28,86 +56,155 @@ import os
from svn import core, fs, delta, repos
-class SVNLook:
- def __init__(self, path, cmd, rev, txn):
+class SVNLook(object):
+ def __init__(self, path, rev=None, txn=None, cmd=None):
+ """
+ path - path to repository
+ rev - revision number
+ txn - name of transaction (usually the one about to be committed)
+ cmd - if set, specifies cmd_* method to execute
+
+ txn takes precedence over rev; if both are None, inspect the head revision
+ """
path = core.svn_path_canonicalize(path)
repos_ptr = repos.open(path)
self.fs_ptr = repos.fs(repos_ptr)
+ # if set, txn takes precendence
if txn:
self.txn_ptr = fs.open_txn(self.fs_ptr, txn)
else:
self.txn_ptr = None
if rev is None:
rev = fs.youngest_rev(self.fs_ptr)
+ else:
+ rev = int(rev)
self.rev = rev
- getattr(self, 'cmd_' + cmd)()
+ if cmd != None:
+ getattr(self, 'cmd_' + cmd)()
def cmd_default(self):
self.cmd_info()
self.cmd_tree()
def cmd_author(self):
- # get the author property, or empty string if the property is not present
- author = self._get_property(core.SVN_PROP_REVISION_AUTHOR) or ''
- print(author)
+ print(self.get_author() or '')
def cmd_changed(self):
- self._print_tree(ChangedEditor, pass_root=1)
+ for status, path in self.get_changed():
+ print("%-3s %s" % (status, path))
def cmd_date(self):
- if self.txn_ptr:
+ # duplicate original svnlook format, which is
+ # 2010-02-08 21:53:15 +0200 (Mon, 08 Feb 2010)
+ secs = self.get_date(unixtime=True)
+ if secs is None:
print("")
else:
- date = self._get_property(core.SVN_PROP_REVISION_DATE)
- if date:
- aprtime = core.svn_time_from_cstring(date)
- # ### convert to a time_t; this requires intimate knowledge of
- # ### the apr_time_t type
- secs = aprtime / 1000000 # aprtime is microseconds; make seconds
-
- # assume secs in local TZ, convert to tuple, and format
- ### we don't really know the TZ, do we?
- print(time.strftime('%Y-%m-%d %H:%M', time.localtime(secs)))
- else:
- print("")
+ # convert to tuple, detect time zone and format
+ stamp = time.localtime(secs)
+ isdst = stamp.tm_isdst
+ utcoffset = -(time.altzone if (time.daylight and isdst) else time.timezone) // 60
+
+ suffix = "%+03d%02d" % (utcoffset // 60, abs(utcoffset) % 60)
+ outstr = time.strftime('%Y-%m-%d %H:%M:%S ', stamp) + suffix
+ outstr += time.strftime(' (%a, %d %b %Y)', stamp)
+ print(outstr)
+
def cmd_diff(self):
self._print_tree(DiffEditor, pass_root=1)
def cmd_dirs_changed(self):
- self._print_tree(DirsChangedEditor)
+ for dir in self.get_changed_dirs():
+ print(dir)
def cmd_ids(self):
self._print_tree(Editor, base_rev=0, pass_root=1)
def cmd_info(self):
+ """print the author, data, log_size, and log message"""
self.cmd_author()
self.cmd_date()
- self.cmd_log(1)
-
- def cmd_log(self, print_size=0):
- # get the log property, or empty string if the property is not present
- log = self._get_property(core.SVN_PROP_REVISION_LOG) or ''
- if print_size:
- print(len(log))
+ log = self.get_log() or ''
+ print(len(log))
print(log)
+ def cmd_log(self):
+ print(self.get_log() or '')
+
def cmd_tree(self):
self._print_tree(Editor, base_rev=0)
+
+ # --- API getters
+ def get_author(self):
+ """return string with the author name or None"""
+ return self._get_property(core.SVN_PROP_REVISION_AUTHOR)
+
+ def get_changed(self):
+ """return list of tuples (status, path)"""
+ ret = []
+ def list_callback(status, path):
+ ret.append( (status, path) )
+ self._walk_tree(ChangedEditor, pass_root=1, callback=list_callback)
+ return ret
+
+ def get_date(self, unixtime=False):
+ """return commit timestamp in RFC 3339 format (2010-02-08T20:37:25.195000Z)
+ if unixtime is True, return unix timestamp
+ return None for a txn, or if date property is not set
+ """
+ if self.txn_ptr:
+ return None
+
+ date = self._get_property(core.SVN_PROP_REVISION_DATE)
+ if not unixtime or date == None:
+ return date
+
+ # convert to unix time
+ aprtime = core.svn_time_from_cstring(date)
+ # ### convert to a time_t; this requires intimate knowledge of
+ # ### the apr_time_t type
+ secs = aprtime / 1000000 # aprtime is microseconds; make seconds
+ return secs
+
+ def get_changed_dirs(self):
+ """return list of changed dirs
+ dir names end with trailing forward slash even on windows
+ """
+ dirlist = []
+ def list_callback(item):
+ dirlist.append(item)
+ self._walk_tree(DirsChangedEditor, callback=list_callback)
+ return dirlist
+
+ def get_log(self):
+ """return log message string or None if not present"""
+ return self._get_property(core.SVN_PROP_REVISION_LOG)
+
+
+ # --- Internal helpers
def _get_property(self, name):
if self.txn_ptr:
return fs.txn_prop(self.txn_ptr, name)
return fs.revision_prop(self.fs_ptr, self.rev, name)
def _print_tree(self, e_factory, base_rev=None, pass_root=0):
+ def print_callback(msg):
+ print(msg)
+ self._walk_tree(e_factory, base_rev, pass_root, callback=print_callback)
+
+ # svn fs, delta, repos calls needs review according to DeltaEditor documentation
+ def _walk_tree(self, e_factory, base_rev=None, pass_root=0, callback=None):
if base_rev is None:
# a specific base rev was not provided. use the transaction base,
# or the previous revision
if self.txn_ptr:
base_rev = fs.txn_base_revision(self.txn_ptr)
+ elif self.rev == 0:
+ base_rev = 0
else:
base_rev = self.rev - 1
@@ -120,10 +217,13 @@ class SVNLook:
# the base of the comparison
base_root = fs.revision_root(self.fs_ptr, base_rev)
+ if callback == None:
+ callback = lambda msg: None
+
if pass_root:
- editor = e_factory(root, base_root)
+ editor = e_factory(root, base_root, callback)
else:
- editor = e_factory()
+ editor = e_factory(callback=callback)
# construct the editor for printing these things out
e_ptr, e_baton = delta.make_editor(editor)
@@ -135,8 +235,17 @@ class SVNLook:
e_ptr, e_baton, authz_cb, 0, 1, 0, 0)
+# ---------------------------------------------------------
+# Delta Editors. For documentation see:
+# http://subversion.apache.org/docs/community-guide/#docs
+
+# this one doesn't process delete_entry, change_dir_prop, apply_text_delta,
+# change_file_prop, close_file, close_edit, abort_edit
+# ?set_target_revision
+# need review
class Editor(delta.Editor):
- def __init__(self, root=None, base_root=None):
+ def __init__(self, root=None, base_root=None, callback=None):
+ """callback argument is unused for this editor"""
self.root = root
# base_root ignored
@@ -172,7 +281,14 @@ class Editor(delta.Editor):
return ' <%s>' % fs.unparse_id(id)
return ''
+# doesn't process close_directory, apply_text_delta,
+# change_file_prop, close_file, close_edit, abort_edit
+# ?set_target_revision
class DirsChangedEditor(delta.Editor):
+ """print names of changed dirs, callback(dir) is a printer function"""
+ def __init__(self, callback):
+ self.callback = callback
+
def open_root(self, base_revision, dir_pool):
return [ 1, '' ]
@@ -201,13 +317,15 @@ class DirsChangedEditor(delta.Editor):
def _dir_changed(self, baton):
if baton[0]:
# the directory hasn't been printed yet. do it.
- print(baton[1] + '/')
+ self.callback(baton[1] + '/')
baton[0] = 0
class ChangedEditor(delta.Editor):
- def __init__(self, root, base_root):
+ def __init__(self, root, base_root, callback):
+ """callback(status, path) is a printer function"""
self.root = root
self.base_root = base_root
+ self.callback = callback
def open_root(self, base_revision, dir_pool):
return [ 1, '' ]
@@ -215,13 +333,13 @@ class ChangedEditor(delta.Editor):
def delete_entry(self, path, revision, parent_baton, pool):
### need more logic to detect 'replace'
if fs.is_dir(self.base_root, '/' + path):
- print('D ' + path + '/')
+ self.callback('D', path + '/')
else:
- print('D ' + path)
+ self.callback('D', path)
def add_directory(self, path, parent_baton,
copyfrom_path, copyfrom_revision, dir_pool):
- print('A ' + path + '/')
+ self.callback('A', path + '/')
return [ 0, path ]
def open_directory(self, path, parent_baton, base_revision, dir_pool):
@@ -230,12 +348,12 @@ class ChangedEditor(delta.Editor):
def change_dir_prop(self, dir_baton, name, value, pool):
if dir_baton[0]:
# the directory hasn't been printed yet. do it.
- print('_U ' + dir_baton[1] + '/')
+ self.callback('_U', dir_baton[1] + '/')
dir_baton[0] = 0
def add_file(self, path, parent_baton,
copyfrom_path, copyfrom_revision, file_pool):
- print('A ' + path)
+ self.callback('A', path)
return [ '_', ' ', None ]
def open_file(self, path, parent_baton, base_revision, file_pool):
@@ -257,11 +375,12 @@ class ChangedEditor(delta.Editor):
status = text_mod + prop_mod
# was there some kind of change?
if status != '_ ':
- print(status + ' ' + path)
+ self.callback(status.rstrip(), path)
class DiffEditor(delta.Editor):
- def __init__(self, root, base_root):
+ def __init__(self, root, base_root, callback=None):
+ """callback argument is unused for this editor"""
self.root = root
self.base_root = base_root
self.target_revision = 0
@@ -435,7 +554,7 @@ def main():
if not hasattr(SVNLook, 'cmd_' + cmd):
usage(1)
- SVNLook(sys.argv[1], cmd, rev, txn)
+ SVNLook(sys.argv[1], rev, txn, cmd)
if __name__ == '__main__':
main()
diff --git a/tools/examples/walk-config-auth.py b/tools/examples/walk-config-auth.py
index b3298f3..5841b6c 100755
--- a/tools/examples/walk-config-auth.py
+++ b/tools/examples/walk-config-auth.py
@@ -37,8 +37,8 @@ def print_help():
def show_creds(hash):
hash_keys = hash.keys()
- maxkeylen = max(map(lambda x: len(x), hash_keys))
- maxvallen = max(map(lambda x: len(x), hash.values()))
+ maxkeylen = max(map(len, hash_keys))
+ maxvallen = max(map(len, hash.values()))
hash_keys.sort()
sys.stdout.write("+")
sys.stdout.write("-" * (maxkeylen + 2))
diff --git a/tools/hook-scripts/commit-access-control.pl.in b/tools/hook-scripts/commit-access-control.pl.in
index e2a968b..d30f23a 100755
--- a/tools/hook-scripts/commit-access-control.pl.in
+++ b/tools/hook-scripts/commit-access-control.pl.in
@@ -6,7 +6,7 @@
# commit in repository REPOS using the permissions listed in the
# configuration file CONF_FILE.
#
-# $HeadURL: http://svn.apache.org/repos/asf/subversion/branches/1.8.x/tools/hook-scripts/commit-access-control.pl.in $
+# $HeadURL: https://svn.apache.org/repos/asf/subversion/branches/1.9.x/tools/hook-scripts/commit-access-control.pl.in $
# $LastChangedDate: 2009-11-16 19:07:17 +0000 (Mon, 16 Nov 2009) $
# $LastChangedBy: hwright $
# $LastChangedRevision: 880911 $
diff --git a/tools/hook-scripts/mailer/mailer.py b/tools/hook-scripts/mailer/mailer.py
index 65146f1..fd41714 100755
--- a/tools/hook-scripts/mailer/mailer.py
+++ b/tools/hook-scripts/mailer/mailer.py
@@ -22,10 +22,10 @@
#
# mailer.py: send email describing a commit
#
-# $HeadURL: http://svn.apache.org/repos/asf/subversion/branches/1.8.x/tools/hook-scripts/mailer/mailer.py $
-# $LastChangedDate: 2013-04-12 07:44:37 +0000 (Fri, 12 Apr 2013) $
+# $HeadURL: https://svn.apache.org/repos/asf/subversion/branches/1.9.x/tools/hook-scripts/mailer/mailer.py $
+# $LastChangedDate: 2015-02-13 11:17:40 +0000 (Fri, 13 Feb 2015) $
# $LastChangedBy: rhuijben $
-# $LastChangedRevision: 1467191 $
+# $LastChangedRevision: 1659509 $
#
# USAGE: mailer.py commit REPOS REVISION [CONFIG-FILE]
# mailer.py propchange REPOS REVISION AUTHOR REVPROPNAME [CONFIG-FILE]
@@ -236,16 +236,30 @@ class MailedOutput(OutputBase):
and self.reply_to[2] == ']':
self.reply_to = self.reply_to[3:]
+ def _rfc2047_encode(self, hdr):
+ # Return the result of splitting HDR into tokens (on space
+ # characters), encoding (per RFC2047) each token as necessary, and
+ # slapping 'em back to together again.
+ from email.Header import Header
+
+ def _maybe_encode_header(hdr_token):
+ try:
+ hdr_token.encode('ascii')
+ return hdr_token
+ except UnicodeError:
+ return Header(hdr_token, 'utf-8').encode()
+
+ return ' '.join(map(_maybe_encode_header, hdr.split()))
+
def mail_headers(self, group, params):
from email import Utils
- subject = self.make_subject(group, params)
- try:
- subject.encode('ascii')
- except UnicodeError:
- from email.Header import Header
- subject = Header(subject, 'utf-8').encode()
- hdrs = 'From: %s\n' \
- 'To: %s\n' \
+
+ subject = self._rfc2047_encode(self.make_subject(group, params))
+ from_hdr = self._rfc2047_encode(self.from_addr)
+ to_hdr = self._rfc2047_encode(', '.join(self.to_addrs))
+
+ hdrs = 'From: %s\n' \
+ 'To: %s\n' \
'Subject: %s\n' \
'Date: %s\n' \
'Message-ID: %s\n' \
@@ -256,7 +270,7 @@ class MailedOutput(OutputBase):
'X-Svn-Commit-Author: %s\n' \
'X-Svn-Commit-Revision: %d\n' \
'X-Svn-Commit-Repository: %s\n' \
- % (self.from_addr, ', '.join(self.to_addrs), subject,
+ % (from_hdr, to_hdr, subject,
Utils.formatdate(), Utils.make_msgid(), group,
self.repos.author or 'no_author', self.repos.rev,
os.path.basename(self.repos.repos_dir))
diff --git a/tools/hook-scripts/mailer/tests/mailer-init.sh b/tools/hook-scripts/mailer/tests/mailer-init.sh
index ef961b4..d0a4a79 100755
--- a/tools/hook-scripts/mailer/tests/mailer-init.sh
+++ b/tools/hook-scripts/mailer/tests/mailer-init.sh
@@ -101,15 +101,15 @@ echo change C6 >> dir6/file4
svn commit -m "copy dir, then make a change"
# add a binary file and set property to binary value
-echo -e "\x00\x01\x02\x03\x04" > file11
+printf "\x00\x01\x02\x03\x04\n" > file11
svn add file11
svn ps svn:mime-type application/octect-stream file11
-svn ps prop2 -F file11 file9
+svn ps prop2 -F file11 file9
svn commit -m "add binary file"
# change the binary file and set property to non binary value
-echo -e "\x20\x01\x02\x20" > file11
-svn ps prop2 propval2 file9
+printf "\x20\x01\x02\x20\n" > file11
+svn ps prop2 propval2 file9
svn commit -m "change binary file"
# tweak the commit dates to known quantities
diff --git a/tools/hook-scripts/svn2feed.py b/tools/hook-scripts/svn2feed.py
index b4ba2ac..ecb8605 100755
--- a/tools/hook-scripts/svn2feed.py
+++ b/tools/hook-scripts/svn2feed.py
@@ -70,7 +70,7 @@ Options:
# is actually set only on initial feed creation, and thereafter simply
# re-used from the pickle each time.
-# $HeadURL: http://svn.apache.org/repos/asf/subversion/branches/1.8.x/tools/hook-scripts/svn2feed.py $
+# $HeadURL: https://svn.apache.org/repos/asf/subversion/branches/1.9.x/tools/hook-scripts/svn2feed.py $
# $LastChangedDate: 2009-11-16 19:07:17 +0000 (Mon, 16 Nov 2009) $
# $LastChangedBy: hwright $
# $LastChangedRevision: 880911 $
diff --git a/tools/hook-scripts/svnperms.py b/tools/hook-scripts/svnperms.py
index 14fbf7a..8fae998 100755
--- a/tools/hook-scripts/svnperms.py
+++ b/tools/hook-scripts/svnperms.py
@@ -21,7 +21,7 @@
#
#
-# $HeadURL: http://svn.apache.org/repos/asf/subversion/branches/1.8.x/tools/hook-scripts/svnperms.py $
+# $HeadURL: https://svn.apache.org/repos/asf/subversion/branches/1.9.x/tools/hook-scripts/svnperms.py $
# $LastChangedDate: 2011-07-12 18:37:44 +0000 (Tue, 12 Jul 2011) $
# $LastChangedBy: blair $
# $LastChangedRevision: 1145712 $
diff --git a/tools/po/po-update.sh b/tools/po/po-update.sh
index 9891531..2aca523 100755
--- a/tools/po/po-update.sh
+++ b/tools/po/po-update.sh
@@ -93,8 +93,8 @@ update_po()
# GNU gettext-tools 0.14.6 implementation) inverts the order of obsolete
# messages every time it is run. Therefore, run it twice, to invert and
# then re-invert, to minimize spurious diffs.
- $MSGMERGE --sort-by-file --no-wrap --update $i subversion.pot
- $MSGMERGE --sort-by-file --no-wrap --update $i subversion.pot
+ $MSGMERGE --sort-by-file --no-wrap --update $i subversion.pot
+ $MSGMERGE --sort-by-file --no-wrap --update $i subversion.pot
done )
}
diff --git a/tools/server-side/fsfs-reshard.py b/tools/server-side/fsfs-reshard.py
index 16d2fcd..bd82080 100755
--- a/tools/server-side/fsfs-reshard.py
+++ b/tools/server-side/fsfs-reshard.py
@@ -46,7 +46,7 @@
# under the License.
# ====================================================================
#
-# $HeadURL: http://svn.apache.org/repos/asf/subversion/branches/1.8.x/tools/server-side/fsfs-reshard.py $
+# $HeadURL: https://svn.apache.org/repos/asf/subversion/branches/1.9.x/tools/server-side/fsfs-reshard.py $
# $LastChangedDate: 2009-11-16 19:07:17 +0000 (Mon, 16 Nov 2009) $
# $LastChangedBy: hwright $
# $LastChangedRevision: 880911 $
diff --git a/tools/server-side/fsfs-stats.c b/tools/server-side/fsfs-stats.c
deleted file mode 100644
index 80a09f9..0000000
--- a/tools/server-side/fsfs-stats.c
+++ /dev/null
@@ -1,2181 +0,0 @@
-/* fsfs-stats.c -- gather size statistics on FSFS repositories
- *
- * ====================================================================
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- * ====================================================================
- */
-
-
-#include <assert.h>
-
-#include <apr.h>
-#include <apr_general.h>
-#include <apr_file_io.h>
-#include <apr_poll.h>
-
-#include "svn_pools.h"
-#include "svn_diff.h"
-#include "svn_io.h"
-#include "svn_utf.h"
-#include "svn_dirent_uri.h"
-#include "svn_sorts.h"
-#include "svn_delta.h"
-#include "svn_hash.h"
-#include "svn_cache_config.h"
-
-#include "private/svn_string_private.h"
-#include "private/svn_subr_private.h"
-#include "private/svn_dep_compat.h"
-#include "private/svn_cache.h"
-
-#ifndef _
-#define _(x) x
-#endif
-
-#define ERROR_TAG "fsfs-stats: "
-
-/* We group representations into 2x2 different kinds plus one default:
- * [dir / file] x [text / prop]. The assignment is done by the first node
- * that references the respective representation.
- */
-typedef enum rep_kind_t
-{
- /* The representation is _directly_ unused, i.e. not referenced by any
- * noderev. However, some other representation may use it as delta base.
- * null value. Should not occur in real-word repositories. */
- unused_rep,
-
- /* a properties on directory rep */
- dir_property_rep,
-
- /* a properties on file rep */
- file_property_rep,
-
- /* a directory rep */
- dir_rep,
-
- /* a file rep */
- file_rep
-} rep_kind_t;
-
-/* A representation fragment.
- */
-typedef struct representation_t
-{
- /* absolute offset in the file */
- apr_size_t offset;
-
- /* item length in bytes */
- apr_size_t size;
-
- /* item length after de-deltification */
- apr_size_t expanded_size;
-
- /* deltification base, or NULL if there is none */
- struct representation_t *delta_base;
-
- /* revision that contains this representation
- * (may be referenced by other revisions, though) */
- svn_revnum_t revision;
-
- /* number of nodes that reference this representation */
- apr_uint32_t ref_count;
-
- /* length of the PLAIN / DELTA line in the source file in bytes */
- apr_uint16_t header_size;
-
- /* classification of the representation. values of rep_kind_t */
- char kind;
-
- /* the source content has a PLAIN header, so we may simply copy the
- * source content into the target */
- char is_plain;
-
-} representation_t;
-
-/* Represents a single revision.
- * There will be only one instance per revision. */
-typedef struct revision_info_t
-{
- /* number of this revision */
- svn_revnum_t revision;
-
- /* pack file offset (manifest value), 0 for non-packed files */
- apr_size_t offset;
-
- /* offset of the changes list relative to OFFSET */
- apr_size_t changes;
-
- /* length of the changes list on bytes */
- apr_size_t changes_len;
-
- /* offset of the changes list relative to OFFSET */
- apr_size_t change_count;
-
- /* first offset behind the revision data in the pack file (file length
- * for non-packed revs) */
- apr_size_t end;
-
- /* number of directory noderevs in this revision */
- apr_size_t dir_noderev_count;
-
- /* number of file noderevs in this revision */
- apr_size_t file_noderev_count;
-
- /* total size of directory noderevs (i.e. the structs - not the rep) */
- apr_size_t dir_noderev_size;
-
- /* total size of file noderevs (i.e. the structs - not the rep) */
- apr_size_t file_noderev_size;
-
- /* all representation_t of this revision (in no particular order),
- * i.e. those that point back to this struct */
- apr_array_header_t *representations;
-} revision_info_t;
-
-/* Data type to identify a representation. It will be used to address
- * cached combined (un-deltified) windows.
- */
-typedef struct window_cache_key_t
-{
- /* revision of the representation */
- svn_revnum_t revision;
-
- /* its offset */
- apr_size_t offset;
-} window_cache_key_t;
-
-/* Description of one large representation. It's content will be reused /
- * overwritten when it gets replaced by an even larger representation.
- */
-typedef struct large_change_info_t
-{
- /* size of the (deltified) representation */
- apr_size_t size;
-
- /* revision of the representation */
- svn_revnum_t revision;
-
- /* node path. "" for unused instances */
- svn_stringbuf_t *path;
-} large_change_info_t;
-
-/* Container for the largest representations found so far. The capacity
- * is fixed and entries will be inserted by reusing the last one and
- * reshuffling the entry pointers.
- */
-typedef struct largest_changes_t
-{
- /* number of entries allocated in CHANGES */
- apr_size_t count;
-
- /* size of the smallest change */
- apr_size_t min_size;
-
- /* changes kept in this struct */
- large_change_info_t **changes;
-} largest_changes_t;
-
-/* Information we gather per size bracket.
- */
-typedef struct histogram_line_t
-{
- /* number of item that fall into this bracket */
- apr_int64_t count;
-
- /* sum of values in this bracket */
- apr_int64_t sum;
-} histogram_line_t;
-
-/* A histogram of 64 bit integer values.
- */
-typedef struct histogram_t
-{
- /* total sum over all brackets */
- histogram_line_t total;
-
- /* one bracket per binary step.
- * line[i] is the 2^(i-1) <= x < 2^i bracket */
- histogram_line_t lines[64];
-} histogram_t;
-
-/* Information we collect per file ending.
- */
-typedef struct extension_info_t
-{
- /* file extension, including leading "."
- * "(none)" in the container for files w/o extension. */
- const char *extension;
-
- /* histogram of representation sizes */
- histogram_t rep_histogram;
-
- /* histogram of sizes of changed files */
- histogram_t node_histogram;
-} extension_info_t;
-
-/* Root data structure containing all information about a given repository.
- */
-typedef struct fs_fs_t
-{
- /* repository to reorg */
- const char *path;
-
- /* revision to start at (must be 0, ATM) */
- svn_revnum_t start_revision;
-
- /* FSFS format number */
- int format;
-
- /* highest revision number in the repo */
- svn_revnum_t max_revision;
-
- /* first non-packed revision */
- svn_revnum_t min_unpacked_rev;
-
- /* sharing size*/
- int max_files_per_dir;
-
- /* all revisions */
- apr_array_header_t *revisions;
-
- /* empty representation.
- * Used as a dummy base for DELTA reps without base. */
- representation_t *null_base;
-
- /* undeltified txdelta window cache */
- svn_cache__t *window_cache;
-
- /* track the biggest contributors to repo size */
- largest_changes_t *largest_changes;
-
- /* history of representation sizes */
- histogram_t rep_size_histogram;
-
- /* history of sizes of changed nodes */
- histogram_t node_size_histogram;
-
- /* history of unused representations */
- histogram_t unused_rep_histogram;
-
- /* history of sizes of changed files */
- histogram_t file_histogram;
-
- /* history of sizes of file representations */
- histogram_t file_rep_histogram;
-
- /* history of sizes of changed file property sets */
- histogram_t file_prop_histogram;
-
- /* history of sizes of file property representations */
- histogram_t file_prop_rep_histogram;
-
- /* history of sizes of changed directories (in bytes) */
- histogram_t dir_histogram;
-
- /* history of sizes of directories representations */
- histogram_t dir_rep_histogram;
-
- /* history of sizes of changed directories property sets */
- histogram_t dir_prop_histogram;
-
- /* history of sizes of directories property representations */
- histogram_t dir_prop_rep_histogram;
-
- /* extension -> extension_info_t* map */
- apr_hash_t *by_extension;
-} fs_fs_t;
-
-/* Return the rev pack folder for revision REV in FS.
- */
-static const char *
-get_pack_folder(fs_fs_t *fs,
- svn_revnum_t rev,
- apr_pool_t *pool)
-{
- return apr_psprintf(pool, "%s/db/revs/%ld.pack",
- fs->path, rev / fs->max_files_per_dir);
-}
-
-/* Return the path of the file containing revision REV in FS.
- */
-static const char *
-rev_or_pack_file_name(fs_fs_t *fs,
- svn_revnum_t rev,
- apr_pool_t *pool)
-{
- return fs->min_unpacked_rev > rev
- ? svn_dirent_join(get_pack_folder(fs, rev, pool), "pack", pool)
- : apr_psprintf(pool, "%s/db/revs/%ld/%ld", fs->path,
- rev / fs->max_files_per_dir, rev);
-}
-
-/* Open the file containing revision REV in FS and return it in *FILE.
- */
-static svn_error_t *
-open_rev_or_pack_file(apr_file_t **file,
- fs_fs_t *fs,
- svn_revnum_t rev,
- apr_pool_t *pool)
-{
- return svn_io_file_open(file,
- rev_or_pack_file_name(fs, rev, pool),
- APR_READ | APR_BUFFERED,
- APR_OS_DEFAULT,
- pool);
-}
-
-/* Return the length of FILE in *FILE_SIZE. Use POOL for allocations.
-*/
-static svn_error_t *
-get_file_size(apr_off_t *file_size,
- apr_file_t *file,
- apr_pool_t *pool)
-{
- apr_finfo_t finfo;
-
- SVN_ERR(svn_io_file_info_get(&finfo, APR_FINFO_SIZE, file, pool));
-
- *file_size = finfo.size;
- return SVN_NO_ERROR;
-}
-
-/* Get the file content of revision REVISION in FS and return it in *CONTENT.
- * Read the LEN bytes starting at file OFFSET. When provided, use FILE as
- * packed or plain rev file.
- * Use POOL for temporary allocations.
- */
-static svn_error_t *
-get_content(svn_stringbuf_t **content,
- apr_file_t *file,
- fs_fs_t *fs,
- svn_revnum_t revision,
- apr_off_t offset,
- apr_size_t len,
- apr_pool_t *pool)
-{
- apr_pool_t * file_pool = svn_pool_create(pool);
- apr_size_t large_buffer_size = 0x10000;
-
- if (file == NULL)
- SVN_ERR(open_rev_or_pack_file(&file, fs, revision, file_pool));
-
- *content = svn_stringbuf_create_ensure(len, pool);
- (*content)->len = len;
-
-#if APR_VERSION_AT_LEAST(1,3,0)
- /* for better efficiency use larger buffers on large reads */
- if ( (len >= large_buffer_size)
- && (apr_file_buffer_size_get(file) < large_buffer_size))
- apr_file_buffer_set(file,
- apr_palloc(apr_file_pool_get(file),
- large_buffer_size),
- large_buffer_size);
-#endif
-
- SVN_ERR(svn_io_file_seek(file, APR_SET, &offset, pool));
- SVN_ERR(svn_io_file_read_full2(file, (*content)->data, len,
- NULL, NULL, pool));
- svn_pool_destroy(file_pool);
-
- return SVN_NO_ERROR;
-}
-
-/* In *RESULT, return the cached txdelta window stored in REPRESENTATION
- * within FS. If that has not been found in cache, return NULL.
- * Allocate the result in POOL.
- */
-static svn_error_t *
-get_cached_window(svn_stringbuf_t **result,
- fs_fs_t *fs,
- representation_t *representation,
- apr_pool_t *pool)
-{
- svn_boolean_t found = FALSE;
- window_cache_key_t key;
- key.revision = representation->revision;
- key.offset = representation->offset;
-
- *result = NULL;
- return svn_error_trace(svn_cache__get((void**)result, &found,
- fs->window_cache,
- &key, pool));
-}
-
-/* Cache the undeltified txdelta WINDOW for REPRESENTATION within FS.
- * Use POOL for temporaries.
- */
-static svn_error_t *
-set_cached_window(fs_fs_t *fs,
- representation_t *representation,
- svn_stringbuf_t *window,
- apr_pool_t *pool)
-{
- /* select entry */
- window_cache_key_t key;
- key.revision = representation->revision;
- key.offset = representation->offset;
-
- return svn_error_trace(svn_cache__set(fs->window_cache, &key, window,
- pool));
-}
-
-/* Initialize the LARGEST_CHANGES member in FS with a capacity of COUNT
- * entries. Use POOL for allocations.
- */
-static void
-initialize_largest_changes(fs_fs_t *fs,
- apr_size_t count,
- apr_pool_t *pool)
-{
- apr_size_t i;
-
- fs->largest_changes = apr_pcalloc(pool, sizeof(*fs->largest_changes));
- fs->largest_changes->count = count;
- fs->largest_changes->min_size = 1;
- fs->largest_changes->changes
- = apr_palloc(pool, count * sizeof(*fs->largest_changes->changes));
-
- /* allocate *all* entries before the path stringbufs. This increases
- * cache locality and enhances performance significantly. */
- for (i = 0; i < count; ++i)
- fs->largest_changes->changes[i]
- = apr_palloc(pool, sizeof(**fs->largest_changes->changes));
-
- /* now initialize them and allocate the stringbufs */
- for (i = 0; i < count; ++i)
- {
- fs->largest_changes->changes[i]->size = 0;
- fs->largest_changes->changes[i]->revision = SVN_INVALID_REVNUM;
- fs->largest_changes->changes[i]->path
- = svn_stringbuf_create_ensure(1024, pool);
- }
-}
-
-/* Add entry for SIZE to HISTOGRAM.
- */
-static void
-add_to_histogram(histogram_t *histogram,
- apr_int64_t size)
-{
- apr_int64_t shift = 0;
-
- while (((apr_int64_t)(1) << shift) <= size)
- shift++;
-
- histogram->total.count++;
- histogram->total.sum += size;
- histogram->lines[(apr_size_t)shift].count++;
- histogram->lines[(apr_size_t)shift].sum += size;
-}
-
-/* Update data aggregators in FS with this representation of type KIND, on-
- * disk REP_SIZE and expanded node size EXPANDED_SIZE for PATH in REVSION.
- */
-static void
-add_change(fs_fs_t *fs,
- apr_int64_t rep_size,
- apr_int64_t expanded_size,
- svn_revnum_t revision,
- const char *path,
- rep_kind_t kind)
-{
- /* identify largest reps */
- if (rep_size >= fs->largest_changes->min_size)
- {
- apr_size_t i;
- large_change_info_t *info
- = fs->largest_changes->changes[fs->largest_changes->count - 1];
- info->size = rep_size;
- info->revision = revision;
- svn_stringbuf_set(info->path, path);
-
- /* linear insertion but not too bad since count is low and insertions
- * near the end are more likely than close to front */
- for (i = fs->largest_changes->count - 1; i > 0; --i)
- if (fs->largest_changes->changes[i-1]->size >= rep_size)
- break;
- else
- fs->largest_changes->changes[i] = fs->largest_changes->changes[i-1];
-
- fs->largest_changes->changes[i] = info;
- fs->largest_changes->min_size
- = fs->largest_changes->changes[fs->largest_changes->count-1]->size;
- }
-
- /* global histograms */
- add_to_histogram(&fs->rep_size_histogram, rep_size);
- add_to_histogram(&fs->node_size_histogram, expanded_size);
-
- /* specific histograms by type */
- switch (kind)
- {
- case unused_rep: add_to_histogram(&fs->unused_rep_histogram,
- rep_size);
- break;
- case dir_property_rep: add_to_histogram(&fs->dir_prop_rep_histogram,
- rep_size);
- add_to_histogram(&fs->dir_prop_histogram,
- expanded_size);
- break;
- case file_property_rep: add_to_histogram(&fs->file_prop_rep_histogram,
- rep_size);
- add_to_histogram(&fs->file_prop_histogram,
- expanded_size);
- break;
- case dir_rep: add_to_histogram(&fs->dir_rep_histogram,
- rep_size);
- add_to_histogram(&fs->dir_histogram,
- expanded_size);
- break;
- case file_rep: add_to_histogram(&fs->file_rep_histogram,
- rep_size);
- add_to_histogram(&fs->file_histogram,
- expanded_size);
- break;
- }
-
- /* by extension */
- if (kind == file_rep)
- {
- /* determine extension */
- extension_info_t *info;
- const char * file_name = strrchr(path, '/');
- const char * extension = file_name ? strrchr(file_name, '.') : NULL;
-
- if (extension == NULL || extension == file_name + 1)
- extension = "(none)";
-
- /* get / auto-insert entry for this extension */
- info = apr_hash_get(fs->by_extension, extension, APR_HASH_KEY_STRING);
- if (info == NULL)
- {
- apr_pool_t *pool = apr_hash_pool_get(fs->by_extension);
- info = apr_pcalloc(pool, sizeof(*info));
- info->extension = apr_pstrdup(pool, extension);
-
- apr_hash_set(fs->by_extension, info->extension,
- APR_HASH_KEY_STRING, info);
- }
-
- /* update per-extension histogram */
- add_to_histogram(&info->node_histogram, expanded_size);
- add_to_histogram(&info->rep_histogram, rep_size);
- }
-}
-
-/* Given rev pack PATH in FS, read the manifest file and return the offsets
- * in *MANIFEST. Use POOL for allocations.
- */
-static svn_error_t *
-read_manifest(apr_array_header_t **manifest,
- fs_fs_t *fs,
- const char *path,
- apr_pool_t *pool)
-{
- svn_stream_t *manifest_stream;
- apr_pool_t *iterpool;
-
- /* Open the manifest file. */
- SVN_ERR(svn_stream_open_readonly(&manifest_stream,
- svn_dirent_join(path, "manifest", pool),
- pool, pool));
-
- /* While we're here, let's just read the entire manifest file into an array,
- so we can cache the entire thing. */
- iterpool = svn_pool_create(pool);
- *manifest = apr_array_make(pool, fs->max_files_per_dir, sizeof(apr_size_t));
- while (1)
- {
- svn_stringbuf_t *sb;
- svn_boolean_t eof;
- apr_uint64_t val;
- svn_error_t *err;
-
- svn_pool_clear(iterpool);
- SVN_ERR(svn_stream_readline(manifest_stream, &sb, "\n", &eof, iterpool));
- if (eof)
- break;
-
- err = svn_cstring_strtoui64(&val, sb->data, 0, APR_SIZE_MAX, 10);
- if (err)
- return svn_error_createf(SVN_ERR_FS_CORRUPT, err,
- _("Manifest offset '%s' too large"),
- sb->data);
- APR_ARRAY_PUSH(*manifest, apr_size_t) = (apr_size_t)val;
- }
- svn_pool_destroy(iterpool);
-
- return svn_stream_close(manifest_stream);
-}
-
-/* Read header information for the revision stored in FILE_CONTENT (one
- * whole revision). Return the offsets within FILE_CONTENT for the
- * *ROOT_NODEREV, the list of *CHANGES and its len in *CHANGES_LEN.
- * Use POOL for temporary allocations. */
-static svn_error_t *
-read_revision_header(apr_size_t *changes,
- apr_size_t *changes_len,
- apr_size_t *root_noderev,
- svn_stringbuf_t *file_content,
- apr_pool_t *pool)
-{
- char buf[64];
- const char *line;
- char *space;
- apr_uint64_t val;
- apr_size_t len;
-
- /* Read in this last block, from which we will identify the last line. */
- len = sizeof(buf);
- if (len > file_content->len)
- len = file_content->len;
-
- memcpy(buf, file_content->data + file_content->len - len, len);
-
- /* The last byte should be a newline. */
- if (buf[(apr_ssize_t)len - 1] != '\n')
- return svn_error_create(SVN_ERR_FS_CORRUPT, NULL,
- _("Revision lacks trailing newline"));
-
- /* Look for the next previous newline. */
- buf[len - 1] = 0;
- line = strrchr(buf, '\n');
- if (line == NULL)
- return svn_error_create(SVN_ERR_FS_CORRUPT, NULL,
- _("Final line in revision file longer "
- "than 64 characters"));
-
- space = strchr(line, ' ');
- if (space == NULL)
- return svn_error_create(SVN_ERR_FS_CORRUPT, NULL,
- _("Final line in revision file missing space"));
-
- /* terminate the header line */
- *space = 0;
-
- /* extract information */
- SVN_ERR(svn_cstring_strtoui64(&val, line+1, 0, APR_SIZE_MAX, 10));
- *root_noderev = (apr_size_t)val;
- SVN_ERR(svn_cstring_strtoui64(&val, space+1, 0, APR_SIZE_MAX, 10));
- *changes = (apr_size_t)val;
- *changes_len = file_content->len - *changes - (buf + len - line) + 1;
-
- return SVN_NO_ERROR;
-}
-
-/* Read the FSFS format number and sharding size from the format file at
- * PATH and return it in *PFORMAT and *MAX_FILES_PER_DIR respectively.
- * Use POOL for temporary allocations.
- */
-static svn_error_t *
-read_format(int *pformat, int *max_files_per_dir,
- const char *path, apr_pool_t *pool)
-{
- svn_error_t *err;
- apr_file_t *file;
- char buf[80];
- apr_size_t len;
-
- /* open format file and read the first line */
- err = svn_io_file_open(&file, path, APR_READ | APR_BUFFERED,
- APR_OS_DEFAULT, pool);
- if (err && APR_STATUS_IS_ENOENT(err->apr_err))
- {
- /* Treat an absent format file as format 1. Do not try to
- create the format file on the fly, because the repository
- might be read-only for us, or this might be a read-only
- operation, and the spirit of FSFS is to make no changes
- whatseover in read-only operations. See thread starting at
- http://subversion.tigris.org/servlets/ReadMsg?list=dev&msgNo=97600
- for more. */
- svn_error_clear(err);
- *pformat = 1;
- *max_files_per_dir = 0;
-
- return SVN_NO_ERROR;
- }
- SVN_ERR(err);
-
- len = sizeof(buf);
- err = svn_io_read_length_line(file, buf, &len, pool);
- if (err && APR_STATUS_IS_EOF(err->apr_err))
- {
- /* Return a more useful error message. */
- svn_error_clear(err);
- return svn_error_createf(SVN_ERR_BAD_VERSION_FILE_FORMAT, NULL,
- _("Can't read first line of format file '%s'"),
- svn_dirent_local_style(path, pool));
- }
- SVN_ERR(err);
-
- /* Check that the first line contains only digits. */
- SVN_ERR(svn_cstring_atoi(pformat, buf));
-
- /* Set the default values for anything that can be set via an option. */
- *max_files_per_dir = 0;
-
- /* Read any options. */
- while (1)
- {
- len = sizeof(buf);
- err = svn_io_read_length_line(file, buf, &len, pool);
- if (err && APR_STATUS_IS_EOF(err->apr_err))
- {
- /* No more options; that's okay. */
- svn_error_clear(err);
- break;
- }
- SVN_ERR(err);
-
- if (strncmp(buf, "layout ", 7) == 0)
- {
- if (strcmp(buf+7, "linear") == 0)
- {
- *max_files_per_dir = 0;
- continue;
- }
-
- if (strncmp(buf+7, "sharded ", 8) == 0)
- {
- /* Check that the argument is numeric. */
- SVN_ERR(svn_cstring_atoi(max_files_per_dir, buf + 15));
- continue;
- }
- }
-
- return svn_error_createf(SVN_ERR_BAD_VERSION_FILE_FORMAT, NULL,
- _("'%s' contains invalid filesystem format option '%s'"),
- svn_dirent_local_style(path, pool), buf);
- }
-
- return svn_io_file_close(file, pool);
-}
-
-/* Read the content of the file at PATH and return it in *RESULT.
- * Use POOL for temporary allocations.
- */
-static svn_error_t *
-read_number(svn_revnum_t *result, const char *path, apr_pool_t *pool)
-{
- svn_stringbuf_t *content;
- apr_uint64_t number;
-
- SVN_ERR(svn_stringbuf_from_file2(&content, path, pool));
-
- content->data[content->len-1] = 0;
- SVN_ERR(svn_cstring_strtoui64(&number, content->data, 0, LONG_MAX, 10));
- *result = (svn_revnum_t)number;
-
- return SVN_NO_ERROR;
-}
-
-/* Create *FS for the repository at PATH and read the format and size info.
- * Use POOL for temporary allocations.
- */
-static svn_error_t *
-fs_open(fs_fs_t **fs, const char *path, apr_pool_t *pool)
-{
- *fs = apr_pcalloc(pool, sizeof(**fs));
- (*fs)->path = apr_pstrdup(pool, path);
- (*fs)->max_files_per_dir = 1000;
-
- /* Read the FS format number. */
- SVN_ERR(read_format(&(*fs)->format,
- &(*fs)->max_files_per_dir,
- svn_dirent_join(path, "db/format", pool),
- pool));
- if (((*fs)->format != 4) && ((*fs)->format != 6))
- return svn_error_create(SVN_ERR_FS_UNSUPPORTED_FORMAT, NULL, NULL);
-
- /* read size (HEAD) info */
- SVN_ERR(read_number(&(*fs)->min_unpacked_rev,
- svn_dirent_join(path, "db/min-unpacked-rev", pool),
- pool));
- return read_number(&(*fs)->max_revision,
- svn_dirent_join(path, "db/current", pool),
- pool);
-}
-
-/* Utility function that returns true if STRING->DATA matches KEY.
- */
-static svn_boolean_t
-key_matches(svn_string_t *string, const char *key)
-{
- return strcmp(string->data, key) == 0;
-}
-
-/* Comparator used for binary search comparing the absolute file offset
- * of a representation to some other offset. DATA is a *representation_t,
- * KEY is a pointer to an apr_size_t.
- */
-static int
-compare_representation_offsets(const void *data, const void *key)
-{
- apr_ssize_t diff = (*(const representation_t *const *)data)->offset
- - *(const apr_size_t *)key;
-
- /* sizeof(int) may be < sizeof(ssize_t) */
- if (diff < 0)
- return -1;
- return diff > 0 ? 1 : 0;
-}
-
-/* Find the revision_info_t object to the given REVISION in FS and return
- * it in *REVISION_INFO. For performance reasons, we skip the lookup if
- * the info is already provided.
- *
- * In that revision, look for the representation_t object for offset OFFSET.
- * If it already exists, set *IDX to its index in *REVISION_INFO's
- * representations list and return the representation object. Otherwise,
- * set the index to where it must be inserted and return NULL.
- */
-static representation_t *
-find_representation(int *idx,
- fs_fs_t *fs,
- revision_info_t **revision_info,
- svn_revnum_t revision,
- apr_size_t offset)
-{
- revision_info_t *info;
- *idx = -1;
-
- /* first let's find the revision */
- info = revision_info ? *revision_info : NULL;
- if (info == NULL || info->revision != revision)
- {
- info = APR_ARRAY_IDX(fs->revisions,
- revision - fs->start_revision,
- revision_info_t*);
- if (revision_info)
- *revision_info = info;
- }
-
- /* not found -> no result */
- if (info == NULL)
- return NULL;
-
- assert(revision == info->revision);
-
- /* look for the representation */
- *idx = svn_sort__bsearch_lower_bound(&offset,
- info->representations,
- compare_representation_offsets);
- if (*idx < info->representations->nelts)
- {
- /* return the representation, if this is the one we were looking for */
- representation_t *result
- = APR_ARRAY_IDX(info->representations, *idx, representation_t *);
- if (result->offset == offset)
- return result;
- }
-
- /* not parsed, yet */
- return NULL;
-}
-
-/* Read the representation header in FILE_CONTENT at OFFSET. Return its
- * size in *HEADER_SIZE, set *IS_PLAIN if no deltification was used and
- * return the deltification base representation in *REPRESENTATION. If
- * there is none, set it to NULL. Use FS to it look up.
- *
- * Use POOL for allocations and SCRATCH_POOL for temporaries.
- */
-static svn_error_t *
-read_rep_base(representation_t **representation,
- apr_size_t *header_size,
- svn_boolean_t *is_plain,
- fs_fs_t *fs,
- svn_stringbuf_t *file_content,
- apr_size_t offset,
- apr_pool_t *pool,
- apr_pool_t *scratch_pool)
-{
- char *str, *last_str;
- int idx;
- svn_revnum_t revision;
- apr_uint64_t temp;
-
- /* identify representation header (1 line) */
- const char *buffer = file_content->data + offset;
- const char *line_end = strchr(buffer, '\n');
- *header_size = line_end - buffer + 1;
-
- /* check for PLAIN rep */
- if (strncmp(buffer, "PLAIN\n", *header_size) == 0)
- {
- *is_plain = TRUE;
- *representation = NULL;
- return SVN_NO_ERROR;
- }
-
- /* check for DELTA against empty rep */
- *is_plain = FALSE;
- if (strncmp(buffer, "DELTA\n", *header_size) == 0)
- {
- /* This is a delta against the empty stream. */
- *representation = fs->null_base;
- return SVN_NO_ERROR;
- }
-
- str = apr_pstrndup(scratch_pool, buffer, line_end - buffer);
- last_str = str;
-
- /* parse it. */
- str = svn_cstring_tokenize(" ", &last_str);
- str = svn_cstring_tokenize(" ", &last_str);
- SVN_ERR(svn_revnum_parse(&revision, str, NULL));
-
- str = svn_cstring_tokenize(" ", &last_str);
- SVN_ERR(svn_cstring_strtoui64(&temp, str, 0, APR_SIZE_MAX, 10));
-
- /* it should refer to a rep in an earlier revision. Look it up */
- *representation = find_representation(&idx, fs, NULL, revision, (apr_size_t)temp);
- return SVN_NO_ERROR;
-}
-
-/* Parse the representation reference (text: or props:) in VALUE, look
- * it up in FS and return it in *REPRESENTATION. To be able to parse the
- * base rep, we pass the FILE_CONTENT as well.
- *
- * If necessary, allocate the result in POOL; use SCRATCH_POOL for temp.
- * allocations.
- */
-static svn_error_t *
-parse_representation(representation_t **representation,
- fs_fs_t *fs,
- svn_stringbuf_t *file_content,
- svn_string_t *value,
- revision_info_t *revision_info,
- apr_pool_t *pool,
- apr_pool_t *scratch_pool)
-{
- representation_t *result;
- svn_revnum_t revision;
-
- apr_uint64_t offset;
- apr_uint64_t size;
- apr_uint64_t expanded_size;
- int idx;
-
- /* read location (revision, offset) and size */
- char *c = (char *)value->data;
- SVN_ERR(svn_revnum_parse(&revision, svn_cstring_tokenize(" ", &c), NULL));
- SVN_ERR(svn_cstring_strtoui64(&offset, svn_cstring_tokenize(" ", &c), 0, APR_SIZE_MAX, 10));
- SVN_ERR(svn_cstring_strtoui64(&size, svn_cstring_tokenize(" ", &c), 0, APR_SIZE_MAX, 10));
- SVN_ERR(svn_cstring_strtoui64(&expanded_size, svn_cstring_tokenize(" ", &c), 0, APR_SIZE_MAX, 10));
-
- /* look it up */
- result = find_representation(&idx, fs, &revision_info, revision, (apr_size_t)offset);
- if (!result)
- {
- /* not parsed, yet (probably a rep in the same revision).
- * Create a new rep object and determine its base rep as well.
- */
- apr_size_t header_size;
- svn_boolean_t is_plain;
-
- result = apr_pcalloc(pool, sizeof(*result));
- result->revision = revision;
- result->expanded_size = (apr_size_t)(expanded_size ? expanded_size : size);
- result->offset = (apr_size_t)offset;
- result->size = (apr_size_t)size;
- SVN_ERR(read_rep_base(&result->delta_base, &header_size,
- &is_plain, fs, file_content,
- (apr_size_t)offset,
- pool, scratch_pool));
-
- result->header_size = header_size;
- result->is_plain = is_plain;
- svn_sort__array_insert(&result, revision_info->representations, idx);
- }
-
- *representation = result;
-
- return SVN_NO_ERROR;
-}
-
-/* Get the unprocessed (i.e. still deltified) content of REPRESENTATION in
- * FS and return it in *CONTENT. If no NULL, FILE_CONTENT must contain
- * the contents of the revision that also contains the representation.
- * Use POOL for allocations.
- */
-static svn_error_t *
-get_rep_content(svn_stringbuf_t **content,
- fs_fs_t *fs,
- representation_t *representation,
- svn_stringbuf_t *file_content,
- apr_pool_t *pool)
-{
- apr_off_t offset;
- svn_revnum_t revision = representation->revision;
- revision_info_t *revision_info = APR_ARRAY_IDX(fs->revisions,
- revision - fs->start_revision,
- revision_info_t*);
-
- /* not in cache. Is the revision valid at all? */
- if (revision - fs->start_revision > fs->revisions->nelts)
- return svn_error_createf(SVN_ERR_FS_CORRUPT, NULL,
- _("Unknown revision %ld"), revision);
-
- if (file_content)
- {
- offset = representation->offset
- + representation->header_size;
- *content = svn_stringbuf_ncreate(file_content->data + offset,
- representation->size, pool);
- }
- else
- {
- offset = revision_info->offset
- + representation->offset
- + representation->header_size;
- SVN_ERR(get_content(content, NULL, fs, revision, offset,
- representation->size, pool));
- }
-
- return SVN_NO_ERROR;
-}
-
-
-/* Read the delta window contents of all windows in REPRESENTATION in FS.
- * If no NULL, FILE_CONTENT must contain the contents of the revision that
- * also contains the representation.
- * Return the data as svn_txdelta_window_t* instances in *WINDOWS.
- * Use POOL for allocations.
- */
-static svn_error_t *
-read_windows(apr_array_header_t **windows,
- fs_fs_t *fs,
- representation_t *representation,
- svn_stringbuf_t *file_content,
- apr_pool_t *pool)
-{
- svn_stringbuf_t *content;
- svn_stream_t *stream;
- char version;
- apr_size_t len = sizeof(version);
-
- *windows = apr_array_make(pool, 0, sizeof(svn_txdelta_window_t *));
-
- /* get the whole revision content */
- SVN_ERR(get_rep_content(&content, fs, representation, file_content, pool));
-
- /* create a read stream and position it directly after the rep header */
- content->data += 3;
- content->len -= 3;
- stream = svn_stream_from_stringbuf(content, pool);
- SVN_ERR(svn_stream_read(stream, &version, &len));
-
- /* read the windows from that stream */
- while (TRUE)
- {
- svn_txdelta_window_t *window;
- svn_stream_mark_t *mark;
- char dummy;
-
- len = sizeof(dummy);
- SVN_ERR(svn_stream_mark(stream, &mark, pool));
- SVN_ERR(svn_stream_read(stream, &dummy, &len));
- if (len == 0)
- break;
-
- SVN_ERR(svn_stream_seek(stream, mark));
- SVN_ERR(svn_txdelta_read_svndiff_window(&window, stream, version, pool));
- APR_ARRAY_PUSH(*windows, svn_txdelta_window_t *) = window;
- }
-
- return SVN_NO_ERROR;
-}
-
-/* Get the undeltified representation that is a result of combining all
- * deltas from the current desired REPRESENTATION in FS with its base
- * representation. If no NULL, FILE_CONTENT must contain the contents of
- * the revision that also contains the representation. Store the result
- * in *CONTENT. Use POOL for allocations.
- */
-static svn_error_t *
-get_combined_window(svn_stringbuf_t **content,
- fs_fs_t *fs,
- representation_t *representation,
- svn_stringbuf_t *file_content,
- apr_pool_t *pool)
-{
- int i;
- apr_array_header_t *windows;
- svn_stringbuf_t *base_content, *result;
- const char *source;
- apr_pool_t *sub_pool;
- apr_pool_t *iter_pool;
-
- /* special case: no un-deltification necessary */
- if (representation->is_plain)
- {
- SVN_ERR(get_rep_content(content, fs, representation, file_content,
- pool));
- SVN_ERR(set_cached_window(fs, representation, *content, pool));
- return SVN_NO_ERROR;
- }
-
- /* special case: data already in cache */
- SVN_ERR(get_cached_window(content, fs, representation, pool));
- if (*content)
- return SVN_NO_ERROR;
-
- /* read the delta windows for this representation */
- sub_pool = svn_pool_create(pool);
- iter_pool = svn_pool_create(pool);
- SVN_ERR(read_windows(&windows, fs, representation, file_content, sub_pool));
-
- /* fetch the / create a base content */
- if (representation->delta_base && representation->delta_base->revision)
- SVN_ERR(get_combined_window(&base_content, fs,
- representation->delta_base, NULL, sub_pool));
- else
- base_content = svn_stringbuf_create_empty(sub_pool);
-
- /* apply deltas */
- result = svn_stringbuf_create_empty(pool);
- source = base_content->data;
-
- for (i = 0; i < windows->nelts; ++i)
- {
- svn_txdelta_window_t *window
- = APR_ARRAY_IDX(windows, i, svn_txdelta_window_t *);
- svn_stringbuf_t *buf
- = svn_stringbuf_create_ensure(window->tview_len, iter_pool);
-
- buf->len = window->tview_len;
- svn_txdelta_apply_instructions(window, window->src_ops ? source : NULL,
- buf->data, &buf->len);
-
- svn_stringbuf_appendbytes(result, buf->data, buf->len);
- source += window->sview_len;
-
- svn_pool_clear(iter_pool);
- }
-
- /* cache result and return it */
- SVN_ERR(set_cached_window(fs, representation, result, sub_pool));
- *content = result;
-
- svn_pool_destroy(iter_pool);
- svn_pool_destroy(sub_pool);
-
- return SVN_NO_ERROR;
-}
-
-/* forward declaration */
-static svn_error_t *
-read_noderev(fs_fs_t *fs,
- svn_stringbuf_t *file_content,
- apr_size_t offset,
- revision_info_t *revision_info,
- apr_pool_t *pool,
- apr_pool_t *scratch_pool);
-
-/* Starting at the directory in REPRESENTATION in FILE_CONTENT, read all
- * DAG nodes, directories and representations linked in that tree structure.
- * Store them in FS and REVISION_INFO. Also, read them only once.
- *
- * Use POOL for persistent allocations and SCRATCH_POOL for temporaries.
- */
-static svn_error_t *
-parse_dir(fs_fs_t *fs,
- svn_stringbuf_t *file_content,
- representation_t *representation,
- revision_info_t *revision_info,
- apr_pool_t *pool,
- apr_pool_t *scratch_pool)
-{
- svn_stringbuf_t *text;
- apr_pool_t *iter_pool;
- apr_pool_t *text_pool;
- const char *current;
- const char *revision_key;
- apr_size_t key_len;
-
- /* special case: empty dir rep */
- if (representation == NULL)
- return SVN_NO_ERROR;
-
- /* get the directory as unparsed string */
- iter_pool = svn_pool_create(scratch_pool);
- text_pool = svn_pool_create(scratch_pool);
-
- SVN_ERR(get_combined_window(&text, fs, representation, file_content,
- text_pool));
- current = text->data;
-
- /* calculate some invariants */
- revision_key = apr_psprintf(text_pool, "r%ld/", representation->revision);
- key_len = strlen(revision_key);
-
- /* Parse and process all directory entries. */
- while (*current != 'E')
- {
- char *next;
-
- /* skip "K ???\n<name>\nV ???\n" lines*/
- current = strchr(current, '\n');
- if (current)
- current = strchr(current+1, '\n');
- if (current)
- current = strchr(current+1, '\n');
- next = current ? strchr(++current, '\n') : NULL;
- if (next == NULL)
- return svn_error_createf(SVN_ERR_FS_CORRUPT, NULL,
- _("Corrupt directory representation in r%ld at offset %ld"),
- representation->revision,
- (long)representation->offset);
-
- /* iff this entry refers to a node in the same revision as this dir,
- * recurse into that node */
- *next = 0;
- current = strstr(current, revision_key);
- if (current)
- {
- /* recurse */
- apr_uint64_t offset;
-
- SVN_ERR(svn_cstring_strtoui64(&offset, current + key_len, 0,
- APR_SIZE_MAX, 10));
- SVN_ERR(read_noderev(fs, file_content, (apr_size_t)offset,
- revision_info, pool, iter_pool));
-
- svn_pool_clear(iter_pool);
- }
- current = next+1;
- }
-
- svn_pool_destroy(iter_pool);
- svn_pool_destroy(text_pool);
- return SVN_NO_ERROR;
-}
-
-/* Starting at the noderev at OFFSET in FILE_CONTENT, read all DAG nodes,
- * directories and representations linked in that tree structure. Store
- * them in FS and REVISION_INFO. Also, read them only once. Return the
- * result in *NODEREV.
- *
- * Use POOL for persistent allocations and SCRATCH_POOL for temporaries.
- */
-static svn_error_t *
-read_noderev(fs_fs_t *fs,
- svn_stringbuf_t *file_content,
- apr_size_t offset,
- revision_info_t *revision_info,
- apr_pool_t *pool,
- apr_pool_t *scratch_pool)
-{
- svn_string_t *line;
- representation_t *text = NULL;
- representation_t *props = NULL;
- apr_size_t start_offset = offset;
- svn_boolean_t is_dir = FALSE;
- const char *path = "???";
-
- scratch_pool = svn_pool_create(scratch_pool);
-
- /* parse the noderev line-by-line until we find an empty line */
- while (1)
- {
- /* for this line, extract key and value. Ignore invalid values */
- svn_string_t key;
- svn_string_t value;
- char *sep;
- const char *start = file_content->data + offset;
- const char *end = strchr(start, '\n');
-
- line = svn_string_ncreate(start, end - start, scratch_pool);
- offset += end - start + 1;
-
- /* empty line -> end of noderev data */
- if (line->len == 0)
- break;
-
- sep = strchr(line->data, ':');
- if (sep == NULL)
- continue;
-
- key.data = line->data;
- key.len = sep - key.data;
- *sep = 0;
-
- if (key.len + 2 > line->len)
- continue;
-
- value.data = sep + 2;
- value.len = line->len - (key.len + 2);
-
- /* translate (key, value) into noderev elements */
- if (key_matches(&key, "type"))
- is_dir = strcmp(value.data, "dir") == 0;
- else if (key_matches(&key, "text"))
- {
- SVN_ERR(parse_representation(&text, fs, file_content,
- &value, revision_info,
- pool, scratch_pool));
-
- /* if we are the first to use this rep, mark it as "text rep" */
- if (++text->ref_count == 1)
- text->kind = is_dir ? dir_rep : file_rep;
- }
- else if (key_matches(&key, "props"))
- {
- SVN_ERR(parse_representation(&props, fs, file_content,
- &value, revision_info,
- pool, scratch_pool));
-
- /* if we are the first to use this rep, mark it as "prop rep" */
- if (++props->ref_count == 1)
- props->kind = is_dir ? dir_property_rep : file_property_rep;
- }
- else if (key_matches(&key, "cpath"))
- path = value.data;
- }
-
- /* record largest changes */
- if (text && text->ref_count == 1)
- add_change(fs, (apr_int64_t)text->size, (apr_int64_t)text->expanded_size,
- text->revision, path, text->kind);
- if (props && props->ref_count == 1)
- add_change(fs, (apr_int64_t)props->size, (apr_int64_t)props->expanded_size,
- props->revision, path, props->kind);
-
- /* if this is a directory and has not been processed, yet, read and
- * process it recursively */
- if (is_dir && text && text->ref_count == 1)
- SVN_ERR(parse_dir(fs, file_content, text, revision_info,
- pool, scratch_pool));
-
- /* update stats */
- if (is_dir)
- {
- revision_info->dir_noderev_size += offset - start_offset;
- revision_info->dir_noderev_count++;
- }
- else
- {
- revision_info->file_noderev_size += offset - start_offset;
- revision_info->file_noderev_count++;
- }
- svn_pool_destroy(scratch_pool);
-
- return SVN_NO_ERROR;
-}
-
-/* Given the unparsed changes list in CHANGES with LEN chars, return the
- * number of changed paths encoded in it.
- */
-static apr_size_t
-get_change_count(const char *changes,
- apr_size_t len)
-{
- apr_size_t lines = 0;
- const char *end = changes + len;
-
- /* line count */
- for (; changes < end; ++changes)
- if (*changes == '\n')
- ++lines;
-
- /* two lines per change */
- return lines / 2;
-}
-
-/* Simple utility to print a REVISION number and make it appear immediately.
- */
-static void
-print_progress(svn_revnum_t revision)
-{
- printf("%8ld", revision);
- fflush(stdout);
-}
-
-/* Read the content of the pack file staring at revision BASE and store it
- * in FS. Use POOL for allocations.
- */
-static svn_error_t *
-read_pack_file(fs_fs_t *fs,
- svn_revnum_t base,
- apr_pool_t *pool)
-{
- apr_array_header_t *manifest = NULL;
- apr_pool_t *local_pool = svn_pool_create(pool);
- apr_pool_t *iter_pool = svn_pool_create(local_pool);
- int i;
- apr_off_t file_size = 0;
- apr_file_t *file;
- const char *pack_folder = get_pack_folder(fs, base, local_pool);
-
- /* parse the manifest file */
- SVN_ERR(read_manifest(&manifest, fs, pack_folder, local_pool));
- if (manifest->nelts != fs->max_files_per_dir)
- return svn_error_create(SVN_ERR_FS_CORRUPT, NULL, NULL);
-
- SVN_ERR(open_rev_or_pack_file(&file, fs, base, local_pool));
- SVN_ERR(get_file_size(&file_size, file, local_pool));
-
- /* process each revision in the pack file */
- for (i = 0; i < manifest->nelts; ++i)
- {
- apr_size_t root_node_offset;
- svn_stringbuf_t *rev_content;
-
- /* create the revision info for the current rev */
- revision_info_t *info = apr_pcalloc(pool, sizeof(*info));
- info->representations = apr_array_make(iter_pool, 4, sizeof(representation_t*));
-
- info->revision = base + i;
- info->offset = APR_ARRAY_IDX(manifest, i, apr_size_t);
- info->end = i+1 < manifest->nelts
- ? APR_ARRAY_IDX(manifest, i+1 , apr_size_t)
- : file_size;
-
- SVN_ERR(get_content(&rev_content, file, fs, info->revision,
- info->offset,
- info->end - info->offset,
- iter_pool));
-
- SVN_ERR(read_revision_header(&info->changes,
- &info->changes_len,
- &root_node_offset,
- rev_content,
- iter_pool));
-
- info->change_count
- = get_change_count(rev_content->data + info->changes,
- info->changes_len);
- SVN_ERR(read_noderev(fs, rev_content,
- root_node_offset, info, pool, iter_pool));
-
- info->representations = apr_array_copy(pool, info->representations);
- APR_ARRAY_PUSH(fs->revisions, revision_info_t*) = info;
-
- /* destroy temps */
- svn_pool_clear(iter_pool);
- }
-
- /* one more pack file processed */
- print_progress(base);
- svn_pool_destroy(local_pool);
-
- return SVN_NO_ERROR;
-}
-
-/* Read the content of the file for REVSION and store its contents in FS.
- * Use POOL for allocations.
- */
-static svn_error_t *
-read_revision_file(fs_fs_t *fs,
- svn_revnum_t revision,
- apr_pool_t *pool)
-{
- apr_size_t root_node_offset;
- apr_pool_t *local_pool = svn_pool_create(pool);
- svn_stringbuf_t *rev_content;
- revision_info_t *info = apr_pcalloc(pool, sizeof(*info));
- apr_off_t file_size = 0;
- apr_file_t *file;
-
- /* read the whole pack file into memory */
- SVN_ERR(open_rev_or_pack_file(&file, fs, revision, local_pool));
- SVN_ERR(get_file_size(&file_size, file, local_pool));
-
- /* create the revision info for the current rev */
- info->representations = apr_array_make(pool, 4, sizeof(representation_t*));
-
- info->revision = revision;
- info->offset = 0;
- info->end = file_size;
-
- SVN_ERR(get_content(&rev_content, file, fs, revision, 0, file_size,
- local_pool));
-
- SVN_ERR(read_revision_header(&info->changes,
- &info->changes_len,
- &root_node_offset,
- rev_content,
- local_pool));
-
- /* put it into our containers */
- APR_ARRAY_PUSH(fs->revisions, revision_info_t*) = info;
-
- info->change_count
- = get_change_count(rev_content->data + info->changes,
- info->changes_len);
-
- /* parse the revision content recursively. */
- SVN_ERR(read_noderev(fs, rev_content,
- root_node_offset, info,
- pool, local_pool));
-
- /* show progress every 1000 revs or so */
- if (revision % fs->max_files_per_dir == 0)
- print_progress(revision);
-
- svn_pool_destroy(local_pool);
-
- return SVN_NO_ERROR;
-}
-
-/* Read the repository at PATH beginning with revision START_REVISION and
- * return the result in *FS. Allocate caches with MEMSIZE bytes total
- * capacity. Use POOL for non-cache allocations.
- */
-static svn_error_t *
-read_revisions(fs_fs_t **fs,
- const char *path,
- svn_revnum_t start_revision,
- apr_size_t memsize,
- apr_pool_t *pool)
-{
- svn_revnum_t revision;
- svn_cache_config_t cache_config = *svn_cache_config_get();
-
- /* determine cache sizes */
-
- if (memsize < 100)
- memsize = 100;
-
- cache_config.cache_size = memsize * 1024 * 1024;
- svn_cache_config_set(&cache_config);
-
- SVN_ERR(fs_open(fs, path, pool));
-
- /* create data containers and caches */
- (*fs)->start_revision = start_revision
- - (start_revision % (*fs)->max_files_per_dir);
- (*fs)->revisions = apr_array_make(pool,
- (*fs)->max_revision + 1 - (*fs)->start_revision,
- sizeof(revision_info_t *));
- (*fs)->null_base = apr_pcalloc(pool, sizeof(*(*fs)->null_base));
- initialize_largest_changes(*fs, 64, pool);
- (*fs)->by_extension = apr_hash_make(pool);
-
- SVN_ERR(svn_cache__create_membuffer_cache(&(*fs)->window_cache,
- svn_cache__get_global_membuffer_cache(),
- NULL, NULL,
- sizeof(window_cache_key_t),
- "", FALSE, pool));
-
- /* read all packed revs */
- for ( revision = start_revision
- ; revision < (*fs)->min_unpacked_rev
- ; revision += (*fs)->max_files_per_dir)
- SVN_ERR(read_pack_file(*fs, revision, pool));
-
- /* read non-packed revs */
- for ( ; revision <= (*fs)->max_revision; ++revision)
- SVN_ERR(read_revision_file(*fs, revision, pool));
-
- return SVN_NO_ERROR;
-}
-
-/* Compression statistics we collect over a given set of representations.
- */
-typedef struct rep_pack_stats_t
-{
- /* number of representations */
- apr_int64_t count;
-
- /* total size after deltification (i.e. on disk size) */
- apr_int64_t packed_size;
-
- /* total size after de-deltification (i.e. plain text size) */
- apr_int64_t expanded_size;
-
- /* total on-disk header size */
- apr_int64_t overhead_size;
-} rep_pack_stats_t;
-
-/* Statistics we collect over a given set of representations.
- * We group them into shared and non-shared ("unique") reps.
- */
-typedef struct representation_stats_t
-{
- /* stats over all representations */
- rep_pack_stats_t total;
-
- /* stats over those representations with ref_count == 1 */
- rep_pack_stats_t uniques;
-
- /* stats over those representations with ref_count > 1 */
- rep_pack_stats_t shared;
-
- /* sum of all ref_counts */
- apr_int64_t references;
-
- /* sum of ref_count * expanded_size,
- * i.e. total plaintext content if there was no rep sharing */
- apr_int64_t expanded_size;
-} representation_stats_t;
-
-/* Basic statistics we collect over a given set of noderevs.
- */
-typedef struct node_stats_t
-{
- /* number of noderev structs */
- apr_int64_t count;
-
- /* their total size on disk (structs only) */
- apr_int64_t size;
-} node_stats_t;
-
-/* Accumulate stats of REP in STATS.
- */
-static void
-add_rep_pack_stats(rep_pack_stats_t *stats,
- representation_t *rep)
-{
- stats->count++;
-
- stats->packed_size += rep->size;
- stats->expanded_size += rep->expanded_size;
- stats->overhead_size += rep->header_size + 7 /* ENDREP\n */;
-}
-
-/* Accumulate stats of REP in STATS.
- */
-static void
-add_rep_stats(representation_stats_t *stats,
- representation_t *rep)
-{
- add_rep_pack_stats(&stats->total, rep);
- if (rep->ref_count == 1)
- add_rep_pack_stats(&stats->uniques, rep);
- else
- add_rep_pack_stats(&stats->shared, rep);
-
- stats->references += rep->ref_count;
- stats->expanded_size += rep->ref_count * rep->expanded_size;
-}
-
-/* Print statistics for the given group of representations to console.
- * Use POOL for allocations.
- */
-static void
-print_rep_stats(representation_stats_t *stats,
- apr_pool_t *pool)
-{
- printf(_("%20s bytes in %12s reps\n"
- "%20s bytes in %12s shared reps\n"
- "%20s bytes expanded size\n"
- "%20s bytes expanded shared size\n"
- "%20s bytes with rep-sharing off\n"
- "%20s shared references\n"),
- svn__i64toa_sep(stats->total.packed_size, ',', pool),
- svn__i64toa_sep(stats->total.count, ',', pool),
- svn__i64toa_sep(stats->shared.packed_size, ',', pool),
- svn__i64toa_sep(stats->shared.count, ',', pool),
- svn__i64toa_sep(stats->total.expanded_size, ',', pool),
- svn__i64toa_sep(stats->shared.expanded_size, ',', pool),
- svn__i64toa_sep(stats->expanded_size, ',', pool),
- svn__i64toa_sep(stats->references - stats->total.count, ',', pool));
-}
-
-/* Print the (used) contents of CHANGES. Use POOL for allocations.
- */
-static void
-print_largest_reps(largest_changes_t *changes,
- apr_pool_t *pool)
-{
- apr_size_t i;
- for (i = 0; i < changes->count && changes->changes[i]->size; ++i)
- printf(_("%12s r%-8ld %s\n"),
- svn__i64toa_sep(changes->changes[i]->size, ',', pool),
- changes->changes[i]->revision,
- changes->changes[i]->path->data);
-}
-
-/* Print the non-zero section of HISTOGRAM to console.
- * Use POOL for allocations.
- */
-static void
-print_histogram(histogram_t *histogram,
- apr_pool_t *pool)
-{
- int first = 0;
- int last = 63;
- int i;
-
- /* identify non-zero range */
- while (last > 0 && histogram->lines[last].count == 0)
- --last;
-
- while (first <= last && histogram->lines[first].count == 0)
- ++first;
-
- /* display histogram lines */
- for (i = last; i >= first; --i)
- printf(_(" [2^%2d, 2^%2d) %15s (%2d%%) bytes in %12s (%2d%%) items\n"),
- i-1, i,
- svn__i64toa_sep(histogram->lines[i].sum, ',', pool),
- (int)(histogram->lines[i].sum * 100 / histogram->total.sum),
- svn__i64toa_sep(histogram->lines[i].count, ',', pool),
- (int)(histogram->lines[i].count * 100 / histogram->total.count));
-}
-
-/* COMPARISON_FUNC for svn_sort__hash.
- * Sort extension_info_t values by total count in descending order.
- */
-static int
-compare_count(const svn_sort__item_t *a,
- const svn_sort__item_t *b)
-{
- const extension_info_t *lhs = a->value;
- const extension_info_t *rhs = b->value;
- apr_int64_t diff = lhs->node_histogram.total.count
- - rhs->node_histogram.total.count;
-
- return diff > 0 ? -1 : (diff < 0 ? 1 : 0);
-}
-
-/* COMPARISON_FUNC for svn_sort__hash.
- * Sort extension_info_t values by total uncompressed size in descending order.
- */
-static int
-compare_node_size(const svn_sort__item_t *a,
- const svn_sort__item_t *b)
-{
- const extension_info_t *lhs = a->value;
- const extension_info_t *rhs = b->value;
- apr_int64_t diff = lhs->node_histogram.total.sum
- - rhs->node_histogram.total.sum;
-
- return diff > 0 ? -1 : (diff < 0 ? 1 : 0);
-}
-
-/* COMPARISON_FUNC for svn_sort__hash.
- * Sort extension_info_t values by total prep count in descending order.
- */
-static int
-compare_rep_size(const svn_sort__item_t *a,
- const svn_sort__item_t *b)
-{
- const extension_info_t *lhs = a->value;
- const extension_info_t *rhs = b->value;
- apr_int64_t diff = lhs->rep_histogram.total.sum
- - rhs->rep_histogram.total.sum;
-
- return diff > 0 ? -1 : (diff < 0 ? 1 : 0);
-}
-
-/* Return an array of extension_info_t* for the (up to) 16 most prominent
- * extensions in FS according to the sort criterion COMPARISON_FUNC.
- * Allocate results in POOL.
- */
-static apr_array_header_t *
-get_by_extensions(fs_fs_t *fs,
- int (*comparison_func)(const svn_sort__item_t *,
- const svn_sort__item_t *),
- apr_pool_t *pool)
-{
- /* sort all data by extension */
- apr_array_header_t *sorted
- = svn_sort__hash(fs->by_extension, comparison_func, pool);
-
- /* select the top (first) 16 entries */
- int count = MIN(sorted->nelts, 16);
- apr_array_header_t *result
- = apr_array_make(pool, count, sizeof(extension_info_t*));
- int i;
-
- for (i = 0; i < count; ++i)
- APR_ARRAY_PUSH(result, extension_info_t*)
- = APR_ARRAY_IDX(sorted, i, svn_sort__item_t).value;
-
- return result;
-}
-
-/* Add all extension_info_t* entries of TO_ADD not already in TARGET to
- * TARGET.
- */
-static void
-merge_by_extension(apr_array_header_t *target,
- apr_array_header_t *to_add)
-{
- int i, k, count;
-
- count = target->nelts;
- for (i = 0; i < to_add->nelts; ++i)
- {
- extension_info_t *info = APR_ARRAY_IDX(to_add, i, extension_info_t *);
- for (k = 0; k < count; ++k)
- if (info == APR_ARRAY_IDX(target, k, extension_info_t *))
- break;
-
- if (k == count)
- APR_ARRAY_PUSH(target, extension_info_t*) = info;
- }
-}
-
-/* Print the (up to) 16 extensions in FS with the most changes.
- * Use POOL for allocations.
- */
-static void
-print_extensions_by_changes(fs_fs_t *fs,
- apr_pool_t *pool)
-{
- apr_array_header_t *data = get_by_extensions(fs, compare_count, pool);
- apr_int64_t sum = 0;
- int i;
-
- for (i = 0; i < data->nelts; ++i)
- {
- extension_info_t *info = APR_ARRAY_IDX(data, i, extension_info_t *);
- sum += info->node_histogram.total.count;
- printf(_(" %9s %12s (%2d%%) changes\n"),
- info->extension,
- svn__i64toa_sep(info->node_histogram.total.count, ',', pool),
- (int)(info->node_histogram.total.count * 100 /
- fs->file_histogram.total.count));
- }
-
- printf(_(" %9s %12s (%2d%%) changes\n"),
- "(others)",
- svn__i64toa_sep(fs->file_histogram.total.count - sum, ',', pool),
- (int)((fs->file_histogram.total.count - sum) * 100 /
- fs->file_histogram.total.count));
-}
-
-/* Print the (up to) 16 extensions in FS with the largest total size of
- * changed file content. Use POOL for allocations.
- */
-static void
-print_extensions_by_nodes(fs_fs_t *fs,
- apr_pool_t *pool)
-{
- apr_array_header_t *data = get_by_extensions(fs, compare_node_size, pool);
- apr_int64_t sum = 0;
- int i;
-
- for (i = 0; i < data->nelts; ++i)
- {
- extension_info_t *info = APR_ARRAY_IDX(data, i, extension_info_t *);
- sum += info->node_histogram.total.sum;
- printf(_(" %9s %20s (%2d%%) bytes\n"),
- info->extension,
- svn__i64toa_sep(info->node_histogram.total.sum, ',', pool),
- (int)(info->node_histogram.total.sum * 100 /
- fs->file_histogram.total.sum));
- }
-
- printf(_(" %9s %20s (%2d%%) bytes\n"),
- "(others)",
- svn__i64toa_sep(fs->file_histogram.total.sum - sum, ',', pool),
- (int)((fs->file_histogram.total.sum - sum) * 100 /
- fs->file_histogram.total.sum));
-}
-
-/* Print the (up to) 16 extensions in FS with the largest total size of
- * changed file content. Use POOL for allocations.
- */
-static void
-print_extensions_by_reps(fs_fs_t *fs,
- apr_pool_t *pool)
-{
- apr_array_header_t *data = get_by_extensions(fs, compare_rep_size, pool);
- apr_int64_t sum = 0;
- int i;
-
- for (i = 0; i < data->nelts; ++i)
- {
- extension_info_t *info = APR_ARRAY_IDX(data, i, extension_info_t *);
- sum += info->rep_histogram.total.sum;
- printf(_(" %9s %20s (%2d%%) bytes\n"),
- info->extension,
- svn__i64toa_sep(info->rep_histogram.total.sum, ',', pool),
- (int)(info->rep_histogram.total.sum * 100 /
- fs->rep_size_histogram.total.sum));
- }
-
- printf(_(" %9s %20s (%2d%%) bytes\n"),
- "(others)",
- svn__i64toa_sep(fs->rep_size_histogram.total.sum - sum, ',', pool),
- (int)((fs->rep_size_histogram.total.sum - sum) * 100 /
- fs->rep_size_histogram.total.sum));
-}
-
-/* Print per-extension histograms for the most frequent extensions in FS.
- * Use POOL for allocations. */
-static void
-print_histograms_by_extension(fs_fs_t *fs,
- apr_pool_t *pool)
-{
- apr_array_header_t *data = get_by_extensions(fs, compare_count, pool);
- int i;
-
- merge_by_extension(data, get_by_extensions(fs, compare_node_size, pool));
- merge_by_extension(data, get_by_extensions(fs, compare_rep_size, pool));
-
- for (i = 0; i < data->nelts; ++i)
- {
- extension_info_t *info = APR_ARRAY_IDX(data, i, extension_info_t *);
- printf("\nHistogram of '%s' file sizes:\n", info->extension);
- print_histogram(&info->node_histogram, pool);
- printf("\nHistogram of '%s' file representation sizes:\n",
- info->extension);
- print_histogram(&info->rep_histogram, pool);
- }
-}
-
-/* Post-process stats for FS and print them to the console.
- * Use POOL for allocations.
- */
-static void
-print_stats(fs_fs_t *fs,
- apr_pool_t *pool)
-{
- int i, k;
-
- /* initialize stats to collect */
- representation_stats_t file_rep_stats = { { 0 } };
- representation_stats_t dir_rep_stats = { { 0 } };
- representation_stats_t file_prop_rep_stats = { { 0 } };
- representation_stats_t dir_prop_rep_stats = { { 0 } };
- representation_stats_t total_rep_stats = { { 0 } };
-
- node_stats_t dir_node_stats = { 0 };
- node_stats_t file_node_stats = { 0 };
- node_stats_t total_node_stats = { 0 };
-
- apr_int64_t total_size = 0;
- apr_int64_t change_count = 0;
- apr_int64_t change_len = 0;
-
- /* aggregate info from all revisions */
- for (i = 0; i < fs->revisions->nelts; ++i)
- {
- revision_info_t *revision = APR_ARRAY_IDX(fs->revisions, i,
- revision_info_t *);
-
- /* data gathered on a revision level */
- change_count += revision->change_count;
- change_len += revision->changes_len;
- total_size += revision->end - revision->offset;
-
- dir_node_stats.count += revision->dir_noderev_count;
- dir_node_stats.size += revision->dir_noderev_size;
- file_node_stats.count += revision->file_noderev_count;
- file_node_stats.size += revision->file_noderev_size;
- total_node_stats.count += revision->dir_noderev_count
- + revision->file_noderev_count;
- total_node_stats.size += revision->dir_noderev_size
- + revision->file_noderev_size;
-
- /* process representations */
- for (k = 0; k < revision->representations->nelts; ++k)
- {
- representation_t *rep = APR_ARRAY_IDX(revision->representations,
- k, representation_t *);
-
- /* accumulate in the right bucket */
- switch(rep->kind)
- {
- case file_rep:
- add_rep_stats(&file_rep_stats, rep);
- break;
- case dir_rep:
- add_rep_stats(&dir_rep_stats, rep);
- break;
- case file_property_rep:
- add_rep_stats(&file_prop_rep_stats, rep);
- break;
- case dir_property_rep:
- add_rep_stats(&dir_prop_rep_stats, rep);
- break;
- default:
- break;
- }
-
- add_rep_stats(&total_rep_stats, rep);
- }
- }
-
- /* print results */
- printf("\nGlobal statistics:\n");
- printf(_("%20s bytes in %12s revisions\n"
- "%20s bytes in %12s changes\n"
- "%20s bytes in %12s node revision records\n"
- "%20s bytes in %12s representations\n"
- "%20s bytes expanded representation size\n"
- "%20s bytes with rep-sharing off\n"),
- svn__i64toa_sep(total_size, ',', pool),
- svn__i64toa_sep(fs->revisions->nelts, ',', pool),
- svn__i64toa_sep(change_len, ',', pool),
- svn__i64toa_sep(change_count, ',', pool),
- svn__i64toa_sep(total_node_stats.size, ',', pool),
- svn__i64toa_sep(total_node_stats.count, ',', pool),
- svn__i64toa_sep(total_rep_stats.total.packed_size, ',', pool),
- svn__i64toa_sep(total_rep_stats.total.count, ',', pool),
- svn__i64toa_sep(total_rep_stats.total.expanded_size, ',', pool),
- svn__i64toa_sep(total_rep_stats.expanded_size, ',', pool));
-
- printf("\nNoderev statistics:\n");
- printf(_("%20s bytes in %12s nodes total\n"
- "%20s bytes in %12s directory noderevs\n"
- "%20s bytes in %12s file noderevs\n"),
- svn__i64toa_sep(total_node_stats.size, ',', pool),
- svn__i64toa_sep(total_node_stats.count, ',', pool),
- svn__i64toa_sep(dir_node_stats.size, ',', pool),
- svn__i64toa_sep(dir_node_stats.count, ',', pool),
- svn__i64toa_sep(file_node_stats.size, ',', pool),
- svn__i64toa_sep(file_node_stats.count, ',', pool));
-
- printf("\nRepresentation statistics:\n");
- printf(_("%20s bytes in %12s representations total\n"
- "%20s bytes in %12s directory representations\n"
- "%20s bytes in %12s file representations\n"
- "%20s bytes in %12s directory property representations\n"
- "%20s bytes in %12s file property representations\n"
- "%20s bytes in header & footer overhead\n"),
- svn__i64toa_sep(total_rep_stats.total.packed_size, ',', pool),
- svn__i64toa_sep(total_rep_stats.total.count, ',', pool),
- svn__i64toa_sep(dir_rep_stats.total.packed_size, ',', pool),
- svn__i64toa_sep(dir_rep_stats.total.count, ',', pool),
- svn__i64toa_sep(file_rep_stats.total.packed_size, ',', pool),
- svn__i64toa_sep(file_rep_stats.total.count, ',', pool),
- svn__i64toa_sep(dir_prop_rep_stats.total.packed_size, ',', pool),
- svn__i64toa_sep(dir_prop_rep_stats.total.count, ',', pool),
- svn__i64toa_sep(file_prop_rep_stats.total.packed_size, ',', pool),
- svn__i64toa_sep(file_prop_rep_stats.total.count, ',', pool),
- svn__i64toa_sep(total_rep_stats.total.overhead_size, ',', pool));
-
- printf("\nDirectory representation statistics:\n");
- print_rep_stats(&dir_rep_stats, pool);
- printf("\nFile representation statistics:\n");
- print_rep_stats(&file_rep_stats, pool);
- printf("\nDirectory property representation statistics:\n");
- print_rep_stats(&dir_prop_rep_stats, pool);
- printf("\nFile property representation statistics:\n");
- print_rep_stats(&file_prop_rep_stats, pool);
-
- printf("\nLargest representations:\n");
- print_largest_reps(fs->largest_changes, pool);
- printf("\nExtensions by number of changes:\n");
- print_extensions_by_changes(fs, pool);
- printf("\nExtensions by size of changed files:\n");
- print_extensions_by_nodes(fs, pool);
- printf("\nExtensions by size of representations:\n");
- print_extensions_by_reps(fs, pool);
-
- printf("\nHistogram of expanded node sizes:\n");
- print_histogram(&fs->node_size_histogram, pool);
- printf("\nHistogram of representation sizes:\n");
- print_histogram(&fs->rep_size_histogram, pool);
- printf("\nHistogram of file sizes:\n");
- print_histogram(&fs->file_histogram, pool);
- printf("\nHistogram of file representation sizes:\n");
- print_histogram(&fs->file_rep_histogram, pool);
- printf("\nHistogram of file property sizes:\n");
- print_histogram(&fs->file_prop_histogram, pool);
- printf("\nHistogram of file property representation sizes:\n");
- print_histogram(&fs->file_prop_rep_histogram, pool);
- printf("\nHistogram of directory sizes:\n");
- print_histogram(&fs->dir_histogram, pool);
- printf("\nHistogram of directory representation sizes:\n");
- print_histogram(&fs->dir_rep_histogram, pool);
- printf("\nHistogram of directory property sizes:\n");
- print_histogram(&fs->dir_prop_histogram, pool);
- printf("\nHistogram of directory property representation sizes:\n");
- print_histogram(&fs->dir_prop_rep_histogram, pool);
-
- print_histograms_by_extension(fs, pool);
-}
-
-/* Write tool usage info text to OSTREAM using PROGNAME as a prefix and
- * POOL for allocations.
- */
-static void
-print_usage(svn_stream_t *ostream, const char *progname,
- apr_pool_t *pool)
-{
- svn_error_clear(svn_stream_printf(ostream, pool,
- "\n"
- "Usage: %s <repo> [cachesize]\n"
- "\n"
- "Read the repository at local path <repo> starting at revision 0,\n"
- "count statistical information and write that data to stdout.\n"
- "Use up to [cachesize] MB of memory for caching. This does not include\n"
- "temporary representation of the repository structure, i.e. the actual\n"
- "memory may be considerably higher. If not given, defaults to 100 MB.\n",
- progname));
-}
-
-/* linear control flow */
-int main(int argc, const char *argv[])
-{
- apr_pool_t *pool;
- svn_stream_t *ostream;
- svn_error_t *svn_err;
- const char *repo_path = NULL;
- svn_revnum_t start_revision = 0;
- apr_size_t memsize = 100;
- apr_uint64_t temp = 0;
- fs_fs_t *fs;
-
- apr_initialize();
- atexit(apr_terminate);
-
- pool = apr_allocator_owner_get(svn_pool_create_allocator(FALSE));
-
- svn_err = svn_stream_for_stdout(&ostream, pool);
- if (svn_err)
- {
- svn_handle_error2(svn_err, stdout, FALSE, ERROR_TAG);
- return 2;
- }
-
- if (argc < 2 || argc > 3)
- {
- print_usage(ostream, argv[0], pool);
- return 2;
- }
-
- if (argc == 3)
- {
- svn_err = svn_cstring_strtoui64(&temp, argv[2], 0, APR_SIZE_MAX, 10);
- if (svn_err)
- {
- print_usage(ostream, argv[0], pool);
- svn_error_clear(svn_err);
- return 2;
- }
-
- memsize = (apr_size_t)temp;
- }
-
- repo_path = svn_dirent_canonicalize(argv[1], pool);
- start_revision = 0;
-
- printf("Reading revisions\n");
- svn_err = read_revisions(&fs, repo_path, start_revision, memsize, pool);
- printf("\n");
-
- if (svn_err)
- {
- svn_handle_error2(svn_err, stdout, FALSE, ERROR_TAG);
- return 2;
- }
-
- print_stats(fs, pool);
-
- return 0;
-}
diff --git a/tools/server-side/mod_dontdothat/mod_dontdothat.c b/tools/server-side/mod_dontdothat/mod_dontdothat.c
index b4801ed..b939ca7 100644
--- a/tools/server-side/mod_dontdothat/mod_dontdothat.c
+++ b/tools/server-side/mod_dontdothat/mod_dontdothat.c
@@ -40,7 +40,15 @@
#include "svn_path.h"
#include "private/svn_fspath.h"
-module AP_MODULE_DECLARE_DATA dontdothat_module;
+extern module AP_MODULE_DECLARE_DATA dontdothat_module;
+
+#ifndef XML_VERSION_AT_LEAST
+#define XML_VERSION_AT_LEAST(major,minor,patch) \
+(((major) < XML_MAJOR_VERSION) \
+ || ((major) == XML_MAJOR_VERSION && (minor) < XML_MINOR_VERSION) \
+ || ((major) == XML_MAJOR_VERSION && (minor) == XML_MINOR_VERSION && \
+ (patch) <= XML_MICRO_VERSION))
+#endif /* XML_VERSION_AT_LEAST */
typedef struct dontdothat_config_rec {
const char *config_file;
@@ -551,6 +559,31 @@ end_element(void *baton, const char *name)
}
}
+#if XML_VERSION_AT_LEAST(1, 95, 8)
+static void
+expat_entity_declaration(void *userData,
+ const XML_Char *entityName,
+ int is_parameter_entity,
+ const XML_Char *value,
+ int value_length,
+ const XML_Char *base,
+ const XML_Char *systemId,
+ const XML_Char *publicId,
+ const XML_Char *notationName)
+{
+ dontdothat_filter_ctx *ctx = userData;
+
+ /* Stop the parser if an entity declaration is hit. */
+ XML_StopParser(ctx->xmlp, 0 /* resumable */);
+}
+#else
+/* A noop default_handler. */
+static void
+expat_default_handler(void *userData, const XML_Char *s, int len)
+{
+}
+#endif
+
static svn_boolean_t
is_valid_wildcard(const char *wc)
{
@@ -696,6 +729,12 @@ dontdothat_insert_filters(request_rec *r)
XML_SetElementHandler(ctx->xmlp, start_element, end_element);
XML_SetCharacterDataHandler(ctx->xmlp, cdata);
+#if XML_VERSION_AT_LEAST(1, 95, 8)
+ XML_SetEntityDeclHandler(ctx->xmlp, expat_entity_declaration);
+#else
+ XML_SetDefaultHandler(ctx->xmlp, expat_default_handler);
+#endif
+
ap_add_input_filter("DONTDOTHAT_FILTER", ctx, r, r->connection);
}
}
diff --git a/tools/server-side/svn-populate-node-origins-index.c b/tools/server-side/svn-populate-node-origins-index.c
index b9762c4..5d74c0c 100644
--- a/tools/server-side/svn-populate-node-origins-index.c
+++ b/tools/server-side/svn-populate-node-origins-index.c
@@ -122,7 +122,7 @@ build_index(const char *repos_path, apr_pool_t *pool)
apr_pool_t *subpool;
/* Open the repository. */
- SVN_ERR(svn_repos_open2(&repos, repos_path, NULL, pool));
+ SVN_ERR(svn_repos_open3(&repos, repos_path, NULL, pool, pool));
/* Get a filesystem object. */
fs = svn_repos_fs(repos);
diff --git a/tools/server-side/svn-rep-sharing-stats.c b/tools/server-side/svn-rep-sharing-stats.c
deleted file mode 100644
index f610409..0000000
--- a/tools/server-side/svn-rep-sharing-stats.c
+++ /dev/null
@@ -1,530 +0,0 @@
-/*
- * ====================================================================
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied. See the License for the
- * specific language governing permissions and limitations
- * under the License.
- * ====================================================================
- */
-
-#include <apr_signal.h>
-
-#include "svn_cmdline.h"
-#include "svn_dirent_uri.h"
-#include "svn_pools.h"
-#include "svn_repos.h"
-#include "svn_opt.h"
-#include "svn_utf.h"
-#include "svn_version.h"
-
-#include "../../subversion/libsvn_fs_fs/fs.h"
-#include "../../subversion/libsvn_fs_fs/fs_fs.h"
-/* for svn_fs_fs__id_* (used in assertions only) */
-#include "../../subversion/libsvn_fs_fs/id.h"
-
-#include "private/svn_cmdline_private.h"
-
-#include "svn_private_config.h"
-
-
-/** Help messages and version checking. **/
-
-static svn_error_t *
-version(apr_pool_t *pool)
-{
- return svn_opt_print_help4(NULL, "svn-rep-sharing-stats", TRUE, FALSE, FALSE,
- NULL, NULL, NULL, NULL, NULL, NULL, pool);
-}
-
-static void
-usage(apr_pool_t *pool)
-{
- svn_error_clear(svn_cmdline_fprintf
- (stderr, pool,
- _("Type 'svn-rep-sharing-stats --help' for usage.\n")));
-}
-
-
-static void
-help(const apr_getopt_option_t *options, apr_pool_t *pool)
-{
- svn_error_clear
- (svn_cmdline_fprintf
- (stdout, pool,
- _("usage: svn-rep-sharing-stats [OPTIONS] REPOS_PATH\n\n"
- " Prints the reference count statistics for representations\n"
- " in an FSFS repository.\n"
- "\n"
- " At least one of the options --data/--prop/--both must be specified.\n"
- "\n"
- "Valid options:\n")));
- while (options->description)
- {
- const char *optstr;
- svn_opt_format_option(&optstr, options, TRUE, pool);
- svn_error_clear(svn_cmdline_fprintf(stdout, pool, " %s\n", optstr));
- ++options;
- }
- svn_error_clear(svn_cmdline_fprintf(stdout, pool, "\n"));
- exit(0);
-}
-
-
-/* Version compatibility check */
-static svn_error_t *
-check_lib_versions(void)
-{
- static const svn_version_checklist_t checklist[] =
- {
- /* ### check FSFS version */
- { "svn_subr", svn_subr_version },
- { "svn_fs", svn_fs_version },
- { NULL, NULL }
- };
- SVN_VERSION_DEFINE(my_version);
-
- return svn_error_trace(svn_ver_check_list(&my_version, checklist));
-}
-
-
-
-/** Cancellation stuff, ### copied from subversion/svn/main.c */
-
-/* A flag to see if we've been cancelled by the client or not. */
-static volatile sig_atomic_t cancelled = FALSE;
-
-/* A signal handler to support cancellation. */
-static void
-signal_handler(int signum)
-{
- apr_signal(signum, SIG_IGN);
- cancelled = TRUE;
-}
-
-/* Our cancellation callback. */
-static svn_error_t *
-svn_cl__check_cancel(void *baton)
-{
- if (cancelled)
- return svn_error_create(SVN_ERR_CANCELLED, NULL, _("Caught signal"));
- else
- return SVN_NO_ERROR;
-}
-
-static svn_cancel_func_t cancel_func = svn_cl__check_cancel;
-
-static void set_up_cancellation(void)
-{
- /* Set up our cancellation support. */
- apr_signal(SIGINT, signal_handler);
-#ifdef SIGBREAK
- /* SIGBREAK is a Win32 specific signal generated by ctrl-break. */
- apr_signal(SIGBREAK, signal_handler);
-#endif
-#ifdef SIGHUP
- apr_signal(SIGHUP, signal_handler);
-#endif
-#ifdef SIGTERM
- apr_signal(SIGTERM, signal_handler);
-#endif
-
-#ifdef SIGPIPE
- /* Disable SIGPIPE generation for the platforms that have it. */
- apr_signal(SIGPIPE, SIG_IGN);
-#endif
-
-#ifdef SIGXFSZ
- /* Disable SIGXFSZ generation for the platforms that have it, otherwise
- * working with large files when compiled against an APR that doesn't have
- * large file support will crash the program, which is uncool. */
- apr_signal(SIGXFSZ, SIG_IGN);
-#endif
-}
-
-
-/** Program-specific code. **/
-enum {
- OPT_VERSION = SVN_OPT_FIRST_LONGOPT_ID,
- OPT_DATA,
- OPT_PROP,
- OPT_BOTH
-};
-
-static svn_error_t *check_experimental(void)
-{
- if (getenv("SVN_REP_SHARING_STATS_IS_EXPERIMENTAL"))
- return SVN_NO_ERROR;
-
- return svn_error_create(APR_EGENERAL, NULL,
- "This code is experimental and should not "
- "be used on live data.");
-}
-
-/* The parts of a rep that determine whether it's being shared. */
-struct key_t
-{
- svn_revnum_t revision;
- apr_off_t offset;
-};
-
-/* What we need to know about a rep. */
-struct value_t
-{
- svn_checksum_t *sha1_checksum;
- apr_uint64_t refcount;
-};
-
-/* Increment records[rep] if both are non-NULL and REP contains a sha1.
- * Allocate keys and values in RESULT_POOL.
- */
-static svn_error_t *record(apr_hash_t *records,
- representation_t *rep,
- apr_pool_t *result_pool)
-{
- struct key_t *key;
- struct value_t *value;
-
- /* Skip if we ignore this particular kind of reps, or if the rep doesn't
- * exist or doesn't have the checksum we are after. (The latter case
- * often corresponds to node_rev->kind == svn_node_dir.)
- */
- if (records == NULL || rep == NULL || rep->sha1_checksum == NULL)
- return SVN_NO_ERROR;
-
- /* Construct the key.
- *
- * Must use calloc() because apr_hash_* pay attention to padding bytes too.
- */
- key = apr_pcalloc(result_pool, sizeof(*key));
- key->revision = rep->revision;
- key->offset = rep->offset;
-
- /* Update or create the value. */
- if ((value = apr_hash_get(records, key, sizeof(*key))))
- {
- /* Paranoia. */
- SVN_ERR_ASSERT(value->sha1_checksum != NULL);
- SVN_ERR_ASSERT(svn_checksum_match(value->sha1_checksum,
- rep->sha1_checksum));
- /* Real work. */
- value->refcount++;
- }
- else
- {
- value = apr_palloc(result_pool, sizeof(*value));
- value->sha1_checksum = svn_checksum_dup(rep->sha1_checksum, result_pool);
- value->refcount = 1;
- }
-
- /* Store them. */
- apr_hash_set(records, key, sizeof(*key), value);
-
- return SVN_NO_ERROR;
-}
-
-/* Inspect the data and/or prop reps of revision REVNUM in FS. Store
- * reference count tallies in passed hashes (allocated in RESULT_POOL).
- *
- * If PROP_REPS or DATA_REPS is NULL, the respective kind of reps are not
- * tallied.
- *
- * Print progress report to STDERR unless QUIET is true.
- *
- * Use SCRATCH_POOL for temporary allocations.
- */
-static svn_error_t *
-process_one_revision(svn_fs_t *fs,
- svn_revnum_t revnum,
- svn_boolean_t quiet,
- apr_hash_t *prop_reps,
- apr_hash_t *data_reps,
- apr_hash_t *both_reps,
- apr_pool_t *result_pool,
- apr_pool_t *scratch_pool)
-{
- svn_fs_root_t *rev_root;
- apr_hash_t *paths_changed;
- apr_hash_index_t *hi;
-
- if (! quiet)
- SVN_ERR(svn_cmdline_fprintf(stderr, scratch_pool,
- "processing r%ld\n", revnum));
-
- /* Get the changed paths. */
- SVN_ERR(svn_fs_revision_root(&rev_root, fs, revnum, scratch_pool));
- SVN_ERR(svn_fs_paths_changed2(&paths_changed, rev_root, scratch_pool));
-
- /* Iterate them. */
- /* ### use iterpool? */
- for (hi = apr_hash_first(scratch_pool, paths_changed);
- hi; hi = apr_hash_next(hi))
- {
- const char *path;
- const svn_fs_path_change2_t *change;
- const svn_fs_id_t *node_rev_id1, *node_rev_id2;
- const svn_fs_id_t *the_id;
-
- node_revision_t *node_rev;
-
- path = svn__apr_hash_index_key(hi);
- change = svn__apr_hash_index_val(hi);
- if (! quiet)
- SVN_ERR(svn_cmdline_fprintf(stderr, scratch_pool,
- "processing r%ld:%s\n", revnum, path));
-
- if (change->change_kind == svn_fs_path_change_delete)
- /* Can't ask for reps of PATH at REVNUM if the path no longer exists
- * at that revision! */
- continue;
-
- /* Okay, we have two node_rev id's for this change: the txn one and
- * the revision one. We'll use the latter. */
- node_rev_id1 = change->node_rev_id;
- SVN_ERR(svn_fs_node_id(&node_rev_id2, rev_root, path, scratch_pool));
-
- SVN_ERR_ASSERT(svn_fs_fs__id_txn_id(node_rev_id1) != NULL);
- SVN_ERR_ASSERT(svn_fs_fs__id_rev(node_rev_id2) != SVN_INVALID_REVNUM);
-
- the_id = node_rev_id2;
-
- /* Get the node_rev using the chosen node_rev_id. */
- SVN_ERR(svn_fs_fs__get_node_revision(&node_rev, fs, the_id, scratch_pool));
-
- /* Maybe record the sha1's. */
- SVN_ERR(record(prop_reps, node_rev->prop_rep, result_pool));
- SVN_ERR(record(data_reps, node_rev->data_rep, result_pool));
- SVN_ERR(record(both_reps, node_rev->prop_rep, result_pool));
- SVN_ERR(record(both_reps, node_rev->data_rep, result_pool));
- }
-
- return SVN_NO_ERROR;
-}
-
-/* Print REPS_REF_COUNT (a hash as for process_one_revision())
- * to stdout in "refcount => sha1" format. A sha1 may appear
- * more than once if not all its instances are shared. Prepend
- * each line by NAME.
- *
- * Use SCRATCH_POOL for temporary allocations.
- */
-static svn_error_t *
-pretty_print(const char *name,
- apr_hash_t *reps_ref_counts,
- apr_pool_t *scratch_pool)
-{
- apr_hash_index_t *hi;
-
- if (reps_ref_counts == NULL)
- return SVN_NO_ERROR;
-
- for (hi = apr_hash_first(scratch_pool, reps_ref_counts);
- hi; hi = apr_hash_next(hi))
- {
- struct value_t *value;
-
- SVN_ERR(cancel_func(NULL));
-
- value = svn__apr_hash_index_val(hi);
- SVN_ERR(svn_cmdline_printf(scratch_pool, "%s %" APR_UINT64_T_FMT " %s\n",
- name, value->refcount,
- svn_checksum_to_cstring_display(
- value->sha1_checksum,
- scratch_pool)));
- }
-
- return SVN_NO_ERROR;
-}
-
-/* Return an error unless FS is an fsfs fs. */
-static svn_error_t *is_fs_fsfs(svn_fs_t *fs, apr_pool_t *scratch_pool)
-{
- const char *actual, *expected, *path;
-
- path = svn_fs_path(fs, scratch_pool);
-
- expected = SVN_FS_TYPE_FSFS;
- SVN_ERR(svn_fs_type(&actual, path, scratch_pool));
-
- if (strcmp(actual, expected) != 0)
- return svn_error_createf(SVN_ERR_FS_UNKNOWN_FS_TYPE, NULL,
- "Filesystem '%s' is not of type '%s'",
- svn_dirent_local_style(path, scratch_pool),
- actual);
-
- return SVN_NO_ERROR;
-}
-
-/* The core logic. This function iterates the repository REPOS_PATH
- * and sends all the (DATA and/or PROP) reps in each revision for counting
- * by process_one_revision(). QUIET is passed to process_one_revision().
- */
-static svn_error_t *process(const char *repos_path,
- svn_boolean_t prop,
- svn_boolean_t data,
- svn_boolean_t quiet,
- apr_pool_t *scratch_pool)
-{
- apr_hash_t *prop_reps = NULL;
- apr_hash_t *data_reps = NULL;
- apr_hash_t *both_reps = NULL;
- svn_revnum_t rev, youngest;
- apr_pool_t *iterpool;
- svn_repos_t *repos;
- svn_fs_t *fs;
-
- if (prop)
- prop_reps = apr_hash_make(scratch_pool);
- if (data)
- data_reps = apr_hash_make(scratch_pool);
- if (prop && data)
- both_reps = apr_hash_make(scratch_pool);
-
- /* Open the FS. */
- SVN_ERR(svn_repos_open2(&repos, repos_path, NULL, scratch_pool));
- fs = svn_repos_fs(repos);
-
- SVN_ERR(is_fs_fsfs(fs, scratch_pool));
-
- SVN_ERR(svn_fs_youngest_rev(&youngest, fs, scratch_pool));
-
- /* Iterate the revisions. */
- iterpool = svn_pool_create(scratch_pool);
- for (rev = 0; rev <= youngest; rev++)
- {
- svn_pool_clear(iterpool);
- SVN_ERR(cancel_func(NULL));
- SVN_ERR(process_one_revision(fs, rev, quiet,
- prop_reps, data_reps, both_reps,
- scratch_pool, iterpool));
- }
- svn_pool_destroy(iterpool);
-
- /* Print stats. */
- SVN_ERR(pretty_print("prop", prop_reps, scratch_pool));
- SVN_ERR(pretty_print("data", data_reps, scratch_pool));
- SVN_ERR(pretty_print("both", both_reps, scratch_pool));
-
- return SVN_NO_ERROR;
-}
-
-int
-main(int argc, const char *argv[])
-{
- const char *repos_path;
- apr_pool_t *pool;
- svn_boolean_t prop = FALSE, data = FALSE;
- svn_boolean_t quiet = FALSE;
- svn_error_t *err;
- apr_getopt_t *os;
- const apr_getopt_option_t options[] =
- {
- {"data", OPT_DATA, 0, N_("display data reps stats")},
- {"prop", OPT_PROP, 0, N_("display prop reps stats")},
- {"both", OPT_BOTH, 0, N_("display combined (data+prop) reps stats")},
- {"quiet", 'q', 0, N_("no progress (only errors) to stderr")},
- {"help", 'h', 0, N_("display this help")},
- {"version", OPT_VERSION, 0,
- N_("show program version information")},
- {0, 0, 0, 0}
- };
-
- /* Initialize the app. */
- if (svn_cmdline_init("svn-rep-sharing-stats", stderr) != EXIT_SUCCESS)
- return EXIT_FAILURE;
-
- /* Create our top-level pool. Use a separate mutexless allocator,
- * given this application is single threaded.
- */
- pool = apr_allocator_owner_get(svn_pool_create_allocator(FALSE));
-
- /* Check library versions */
- err = check_lib_versions();
- if (err)
- return svn_cmdline_handle_exit_error(err, pool, "svn-rep-sharing-stats: ");
-
- err = svn_cmdline__getopt_init(&os, argc, argv, pool);
- if (err)
- return svn_cmdline_handle_exit_error(err, pool, "svn-rep-sharing-stats: ");
-
- SVN_INT_ERR(check_experimental());
-
- os->interleave = 1;
- while (1)
- {
- int opt;
- const char *arg;
- apr_status_t status = apr_getopt_long(os, options, &opt, &arg);
- if (APR_STATUS_IS_EOF(status))
- break;
- if (status != APR_SUCCESS)
- {
- usage(pool);
- return EXIT_FAILURE;
- }
- switch (opt)
- {
- case OPT_DATA:
- data = TRUE;
- break;
- /* It seems we don't actually rep-share props yet. */
- case OPT_PROP:
- prop = TRUE;
- break;
- case OPT_BOTH:
- data = TRUE;
- prop = TRUE;
- break;
- case 'q':
- quiet = TRUE;
- break;
- case 'h':
- help(options, pool);
- break;
- case OPT_VERSION:
- SVN_INT_ERR(version(pool));
- exit(0);
- break;
- default:
- usage(pool);
- return EXIT_FAILURE;
- }
- }
-
- /* Exactly 1 non-option argument,
- * and at least one of "--data"/"--prop"/"--both".
- */
- if (os->ind + 1 != argc || (!data && !prop))
- {
- usage(pool);
- return EXIT_FAILURE;
- }
-
- /* Grab REPOS_PATH from argv. */
- SVN_INT_ERR(svn_utf_cstring_to_utf8(&repos_path, os->argv[os->ind], pool));
- repos_path = svn_dirent_internal_style(repos_path, pool);
-
- set_up_cancellation();
-
- /* Do something. */
- SVN_INT_ERR(process(repos_path, prop, data, quiet, pool));
-
- /* We're done. */
-
- svn_pool_destroy(pool);
- /* Flush stdout to make sure that the user will see any printing errors. */
- SVN_INT_ERR(svn_cmdline_fflush(stdout));
-
- return EXIT_SUCCESS;
-}
diff --git a/tools/server-side/svnauthz.c b/tools/server-side/svnauthz.c
index ab8c62d..3fadd23 100644
--- a/tools/server-side/svnauthz.c
+++ b/tools/server-side/svnauthz.c
@@ -234,7 +234,7 @@ get_authz_from_txn(svn_authz_t **authz, const char *repos_path,
svn_error_t *err;
/* Open up the repository and find the transaction root */
- SVN_ERR(svn_repos_open2(&repos, repos_path, NULL, pool));
+ SVN_ERR(svn_repos_open3(&repos, repos_path, NULL, pool, pool));
fs = svn_repos_fs(repos);
SVN_ERR(svn_fs_open_txn(&txn, fs, txn_name, pool));
SVN_ERR(svn_fs_txn_root(&root, txn, pool));
@@ -382,42 +382,6 @@ subcommand_accessof(apr_getopt_t *os, void *baton, apr_pool_t *pool)
#undef EXIT_FAILURE
#define EXIT_FAILURE 2
-/* Similar to svn_cmdline_handle_exit_error but with an exit_code argument
- so we can comply with our contract and exit with 2 for internal failures.
- Also is missing the pool argument since we don't need it given
- main/sub_main. */
-static int
-handle_exit_error(svn_error_t *err, const char *prefix, int exit_code)
-{
- /* Issue #3014:
- * Don't print anything on broken pipes. The pipe was likely
- * closed by the process at the other end. We expect that
- * process to perform error reporting as necessary.
- *
- * ### This assumes that there is only one error in a chain for
- * ### SVN_ERR_IO_PIPE_WRITE_ERROR. See svn_cmdline_fputs(). */
- if (err->apr_err != SVN_ERR_IO_PIPE_WRITE_ERROR)
- svn_handle_error2(err, stderr, FALSE, prefix);
- svn_error_clear(err);
- return exit_code;
-}
-
-/* Report and clear the error ERR, and return EXIT_FAILURE. */
-#define EXIT_ERROR(err, exit_code) \
- handle_exit_error(err, "svnauthz: ", exit_code)
-
-/* A redefinition of the public SVN_INT_ERR macro, that suppresses the
- * error message if it is SVN_ERR_IO_PIPE_WRITE_ERROR, amd with the
- * program name 'svnauthz' instead of 'svn'. */
-#undef SVN_INT_ERR
-#define SVN_INT_ERR(expr) \
- do { \
- svn_error_t *svn_err__temp = (expr); \
- if (svn_err__temp) \
- return EXIT_ERROR(svn_err__temp, EXIT_FAILURE); \
- } while (0)
-
-
/* Return TRUE if the UI of 'svnauthz-validate' (svn 1.7 and earlier)
should be emulated, given argv[0]. */
static svn_boolean_t
@@ -485,8 +449,13 @@ canonicalize_access_file(const char **canonicalized_access_file,
return SVN_NO_ERROR;
}
-static int
-sub_main(int argc, const char *argv[], apr_pool_t *pool)
+/*
+ * On success, leave *EXIT_CODE untouched and return SVN_NO_ERROR. On error,
+ * either return an error to be displayed, or set *EXIT_CODE to non-zero and
+ * return SVN_NO_ERROR.
+ */
+static svn_error_t *
+sub_main(int *exit_code, int argc, const char *argv[], apr_pool_t *pool)
{
svn_error_t *err;
@@ -497,7 +466,7 @@ sub_main(int argc, const char *argv[], apr_pool_t *pool)
int i;
/* Initialize the FS library. */
- SVN_INT_ERR(svn_fs_initialize(pool));
+ SVN_ERR(svn_fs_initialize(pool));
received_opts = apr_array_make(pool, SVN_OPT_MAX_OPTIONS, sizeof(int));
@@ -506,7 +475,7 @@ sub_main(int argc, const char *argv[], apr_pool_t *pool)
opt_state.txn = opt_state.repos_path = opt_state.groups_file = NULL;
/* Parse options. */
- SVN_INT_ERR(svn_cmdline__getopt_init(&os, argc, argv, pool));
+ SVN_ERR(svn_cmdline__getopt_init(&os, argc, argv, pool));
os->interleave = 1;
if (!use_compat_mode(argv[0], pool))
@@ -521,8 +490,9 @@ sub_main(int argc, const char *argv[], apr_pool_t *pool)
break;
if (status != APR_SUCCESS)
{
- SVN_INT_ERR(subcommand_help(NULL, NULL, pool));
- return EXIT_FAILURE;
+ SVN_ERR(subcommand_help(NULL, NULL, pool));
+ *exit_code = EXIT_FAILURE;
+ return SVN_NO_ERROR;
}
/* Stash the option code in an array before parsing it. */
@@ -535,7 +505,7 @@ sub_main(int argc, const char *argv[], apr_pool_t *pool)
opt_state.help = TRUE;
break;
case 't':
- SVN_INT_ERR(svn_utf_cstring_to_utf8(&opt_state.txn, arg, pool));
+ SVN_ERR(svn_utf_cstring_to_utf8(&opt_state.txn, arg, pool));
break;
case 'R':
opt_state.recursive = TRUE;
@@ -544,28 +514,29 @@ sub_main(int argc, const char *argv[], apr_pool_t *pool)
opt_state.version = TRUE;
break;
case svnauthz__username:
- SVN_INT_ERR(svn_utf_cstring_to_utf8(&opt_state.username, arg, pool));
+ SVN_ERR(svn_utf_cstring_to_utf8(&opt_state.username, arg, pool));
break;
case svnauthz__path:
- SVN_INT_ERR(svn_utf_cstring_to_utf8(&opt_state.fspath, arg, pool));
+ SVN_ERR(svn_utf_cstring_to_utf8(&opt_state.fspath, arg, pool));
opt_state.fspath = svn_fspath__canonicalize(opt_state.fspath,
pool);
break;
case svnauthz__repos:
- SVN_INT_ERR(svn_utf_cstring_to_utf8(&opt_state.repos_name, arg, pool));
+ SVN_ERR(svn_utf_cstring_to_utf8(&opt_state.repos_name, arg, pool));
break;
case svnauthz__is:
- SVN_INT_ERR(svn_utf_cstring_to_utf8(&opt_state.is, arg, pool));
+ SVN_ERR(svn_utf_cstring_to_utf8(&opt_state.is, arg, pool));
break;
case svnauthz__groups_file:
- SVN_INT_ERR(
+ SVN_ERR(
svn_utf_cstring_to_utf8(&opt_state.groups_file,
arg, pool));
break;
default:
{
- SVN_INT_ERR(subcommand_help(NULL, NULL, pool));
- return EXIT_FAILURE;
+ SVN_ERR(subcommand_help(NULL, NULL, pool));
+ *exit_code = EXIT_FAILURE;
+ return SVN_NO_ERROR;
}
}
}
@@ -603,8 +574,9 @@ sub_main(int argc, const char *argv[], apr_pool_t *pool)
{
svn_error_clear(svn_cmdline_fprintf(stderr, pool,
("subcommand argument required\n")));
- SVN_INT_ERR(subcommand_help(NULL, NULL, pool));
- return EXIT_FAILURE;
+ SVN_ERR(subcommand_help(NULL, NULL, pool));
+ *exit_code = EXIT_FAILURE;
+ return SVN_NO_ERROR;
}
}
else
@@ -616,14 +588,15 @@ sub_main(int argc, const char *argv[], apr_pool_t *pool)
const char *first_arg_utf8;
os->ind++;
- SVN_INT_ERR(svn_utf_cstring_to_utf8(&first_arg_utf8,
+ SVN_ERR(svn_utf_cstring_to_utf8(&first_arg_utf8,
first_arg, pool));
svn_error_clear(
svn_cmdline_fprintf(stderr, pool,
("Unknown subcommand: '%s'\n"),
first_arg_utf8));
- SVN_INT_ERR(subcommand_help(NULL, NULL, pool));
- return EXIT_FAILURE;
+ SVN_ERR(subcommand_help(NULL, NULL, pool));
+ *exit_code = EXIT_FAILURE;
+ return SVN_NO_ERROR;
}
}
}
@@ -637,13 +610,12 @@ sub_main(int argc, const char *argv[], apr_pool_t *pool)
{
if (os->ind +2 != argc)
{
- err = svn_error_create(SVN_ERR_CL_ARG_PARSING_ERROR, NULL,
- ("Repository and authz file arguments "
- "required"));
- return EXIT_ERROR(err, EXIT_FAILURE);
+ return svn_error_create(SVN_ERR_CL_ARG_PARSING_ERROR, NULL,
+ ("Repository and authz file arguments "
+ "required"));
}
- SVN_INT_ERR(svn_utf_cstring_to_utf8(&opt_state.repos_path, os->argv[os->ind],
+ SVN_ERR(svn_utf_cstring_to_utf8(&opt_state.repos_path, os->argv[os->ind],
pool));
os->ind++;
@@ -653,24 +625,23 @@ sub_main(int argc, const char *argv[], apr_pool_t *pool)
/* Exactly 1 non-option argument */
if (os->ind + 1 != argc)
{
- err = svn_error_create(SVN_ERR_CL_ARG_PARSING_ERROR, NULL,
- ("Authz file argument required"));
- return EXIT_ERROR(err, EXIT_FAILURE);
+ return svn_error_create(SVN_ERR_CL_ARG_PARSING_ERROR, NULL,
+ ("Authz file argument required"));
}
/* Grab AUTHZ_FILE from argv. */
- SVN_INT_ERR(svn_utf_cstring_to_utf8(&opt_state.authz_file, os->argv[os->ind],
+ SVN_ERR(svn_utf_cstring_to_utf8(&opt_state.authz_file, os->argv[os->ind],
pool));
/* Canonicalize opt_state.authz_file appropriately. */
- SVN_INT_ERR(canonicalize_access_file(&opt_state.authz_file,
+ SVN_ERR(canonicalize_access_file(&opt_state.authz_file,
opt_state.authz_file,
opt_state.txn != NULL, pool));
/* Same for opt_state.groups_file if it is present. */
if (opt_state.groups_file)
{
- SVN_INT_ERR(canonicalize_access_file(&opt_state.groups_file,
+ SVN_ERR(canonicalize_access_file(&opt_state.groups_file,
opt_state.groups_file,
opt_state.txn != NULL, pool));
}
@@ -696,13 +667,14 @@ sub_main(int argc, const char *argv[], apr_pool_t *pool)
pool);
svn_opt_format_option(&optstr, badopt, FALSE, pool);
if (subcommand->name[0] == '-')
- SVN_INT_ERR(subcommand_help(NULL, NULL, pool));
+ SVN_ERR(subcommand_help(NULL, NULL, pool));
else
svn_error_clear(svn_cmdline_fprintf(stderr, pool,
("Subcommand '%s' doesn't accept option '%s'\n"
"Type 'svnauthz help %s' for usage.\n"),
subcommand->name, optstr, subcommand->name));
- return EXIT_FAILURE;
+ *exit_code = EXIT_FAILURE;
+ return SVN_NO_ERROR;
}
}
@@ -724,7 +696,8 @@ sub_main(int argc, const char *argv[], apr_pool_t *pool)
{
/* Follow our contract that says we exit with 1 if the file does not
validate. */
- return EXIT_ERROR(err, 1);
+ *exit_code = 1;
+ return err;
}
else if (err->apr_err == SVN_ERR_AUTHZ_UNREADABLE
|| err->apr_err == SVN_ERR_AUTHZ_UNWRITABLE
@@ -732,31 +705,22 @@ sub_main(int argc, const char *argv[], apr_pool_t *pool)
{
/* Follow our contract that says we exit with 3 if --is does not
* match. */
- return EXIT_ERROR(err, 3);
+ *exit_code = 3;
+ return err;
}
-
- return EXIT_ERROR(err, EXIT_FAILURE);
- }
- else
- {
- /* Ensure that everything is written to stdout, so the user will
- see any print errors. */
- err = svn_cmdline_fflush(stdout);
- if (err)
- {
- return EXIT_ERROR(err, EXIT_FAILURE);
- }
- return EXIT_SUCCESS;
+ return err;
}
+ return SVN_NO_ERROR;
}
int
main(int argc, const char *argv[])
{
apr_pool_t *pool;
- int exit_code;
+ int exit_code = EXIT_SUCCESS;
+ svn_error_t *err;
/* Initialize the app. Send all error messages to 'stderr'. */
if (svn_cmdline_init(argv[0], stderr) != EXIT_SUCCESS)
@@ -764,7 +728,18 @@ main(int argc, const char *argv[])
pool = svn_pool_create(NULL);
- exit_code = sub_main(argc, argv, pool);
+ err = sub_main(&exit_code, argc, argv, pool);
+
+ /* Flush stdout and report if it fails. It would be flushed on exit anyway
+ but this makes sure that output is not silently lost if it fails. */
+ err = svn_error_compose_create(err, svn_cmdline_fflush(stdout));
+
+ if (err)
+ {
+ if (exit_code == 0)
+ exit_code = EXIT_FAILURE;
+ svn_cmdline_handle_exit_error(err, NULL, "svnauthz: ");
+ }
svn_pool_destroy(pool);
return exit_code;
diff --git a/tools/server-side/svnpredumpfilter.py b/tools/server-side/svnpredumpfilter.py
index 5a74755..f6a97c2 100755
--- a/tools/server-side/svnpredumpfilter.py
+++ b/tools/server-side/svnpredumpfilter.py
@@ -38,6 +38,10 @@ Use the default ordering of revisions (that is, '-r HEAD:0').
Return errorcode 0 if there are no additional dependencies found, 1 if
there were; any other errorcode indicates a fatal error.
+Paths in mergeinfo are not considered as additional dependencies so the
+--skip-missing-merge-sources option of 'svndumpfilter' may be required
+for successful filtering with the resulting path list.
+
Options:
--help (-h) Show this usage message and exit.
@@ -68,7 +72,7 @@ def sanitize_path(path):
def subsumes(path, maybe_child):
if path == maybe_child:
return True
- if maybe_child.find(path + '/') == 0:
+ if maybe_child.startswith(path + '/'):
return True
return False
@@ -117,20 +121,35 @@ def log(msg, min_verbosity):
class DependencyTracker:
def __init__(self, include_paths):
- self.include_paths = include_paths[:]
- self.dependent_paths = []
+ self.include_paths = set(include_paths)
+ self.dependent_paths = set()
def path_included(self, path):
- for include_path in self.include_paths + self.dependent_paths:
+ for include_path in self.include_paths | self.dependent_paths:
if subsumes(include_path, path):
return True
return False
- def handle_changes(self, path_copies):
- for path, copyfrom_path in path_copies.items():
- if self.path_included(path) and copyfrom_path:
- if not self.path_included(copyfrom_path):
- self.dependent_paths.append(copyfrom_path)
+ def include_missing_copies(self, path_copies):
+ while True:
+ log("Cross-checking %d included paths with %d copies "
+ "for missing path dependencies..." % (
+ len(self.include_paths) + len(self.dependent_paths),
+ len(path_copies)),
+ 1)
+ included_copies = []
+ for path, copyfrom_path in path_copies:
+ if self.path_included(path):
+ log("Adding copy '%s' -> '%s'" % (copyfrom_path, path), 1)
+ self.dependent_paths.add(copyfrom_path)
+ included_copies.append((path, copyfrom_path))
+ if not included_copies:
+ log("Found all missing path dependencies", 1)
+ break
+ for path, copyfrom_path in included_copies:
+ path_copies.remove((path, copyfrom_path))
+ log("Found %d new copy dependencies, need to re-check for more"
+ % len(included_copies), 1)
def readline(stream):
line = stream.readline()
@@ -151,7 +170,7 @@ def svn_log_stream_get_dependencies(stream, included_paths):
line_buf = None
last_revision = 0
eof = False
- path_copies = {}
+ path_copies = set()
found_changed_path = False
while not eof:
@@ -195,16 +214,15 @@ def svn_log_stream_get_dependencies(stream, included_paths):
except EOFError:
eof = True
break
- match = action_re.search(line)
+ match = copy_action_re.search(line)
if match:
found_changed_path = True
- match = copy_action_re.search(line)
- if match:
- path_copies[sanitize_path(match.group(1))] = \
- sanitize_path(match.group(2))
+ path_copies.add((sanitize_path(match.group(1)),
+ sanitize_path(match.group(2))))
+ elif action_re.search(line):
+ found_changed_path = True
else:
break
- dt.handle_changes(path_copies)
# Finally, skip any log message lines. (If there are none,
# remember the last line we read, because it probably has
@@ -221,6 +239,7 @@ def svn_log_stream_get_dependencies(stream, included_paths):
"'svn log' with the --verbose (-v) option when "
"generating the input to this script?")
+ dt.include_missing_copies(path_copies)
return dt
def analyze_logs(included_paths):
diff --git a/tools/server-side/svnpubsub/commit-hook.py b/tools/server-side/svnpubsub/commit-hook.py
index 4a1a3f3..4e6a1cc 100755
--- a/tools/server-side/svnpubsub/commit-hook.py
+++ b/tools/server-side/svnpubsub/commit-hook.py
@@ -23,7 +23,6 @@ HOST="127.0.0.1"
PORT=2069
import sys
-import subprocess
try:
import simplejson as json
except ImportError:
@@ -31,32 +30,32 @@ except ImportError:
import urllib2
-def svncmd(cmd):
- return subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE)
+import svnpubsub.util
-def svncmd_uuid(repo):
- cmd = "%s uuid %s" % (SVNLOOK, repo)
- p = svncmd(cmd)
- return p.stdout.read().strip()
+def svnlook(cmd, **kwargs):
+ args = [SVNLOOK] + cmd
+ return svnpubsub.util.check_output(args, **kwargs)
-def svncmd_info(repo, revision):
- cmd = "%s info -r %s %s" % (SVNLOOK, revision, repo)
- p = svncmd(cmd)
- data = p.stdout.read().split("\n")
+def svnlook_uuid(repo):
+ cmd = ["uuid", "--", repo]
+ return svnlook(cmd).strip()
+
+def svnlook_info(repo, revision):
+ cmd = ["info", "-r", revision, "--", repo]
+ data = svnlook(cmd, universal_newlines=True).split("\n")
#print data
return {'author': data[0].strip(),
'date': data[1].strip(),
'log': "\n".join(data[3:]).strip()}
-def svncmd_changed(repo, revision):
- cmd = "%s changed -r %s %s" % (SVNLOOK, revision, repo)
- p = svncmd(cmd)
+def svnlook_changed(repo, revision):
+ cmd = ["changed", "-r", revision, "--", repo]
+ lines = svnlook(cmd, universal_newlines=True).split("\n")
changed = {}
- while True:
- line = p.stdout.readline()
- if not line:
- break
+ for line in lines:
line = line.strip()
+ if not line:
+ continue
(flags, filename) = (line[0:3], line[4:])
changed[filename] = {'flags': flags}
return changed
@@ -71,23 +70,23 @@ def do_put(body):
def main(repo, revision):
revision = revision.lstrip('r')
- i = svncmd_info(repo, revision)
+ i = svnlook_info(repo, revision)
data = {'type': 'svn',
'format': 1,
'id': int(revision),
'changed': {},
- 'repository': svncmd_uuid(repo),
+ 'repository': svnlook_uuid(repo),
'committer': i['author'],
'log': i['log'],
'date': i['date'],
}
- data['changed'].update(svncmd_changed(repo, revision))
+ data['changed'].update(svnlook_changed(repo, revision))
body = json.dumps(data)
do_put(body)
if __name__ == "__main__":
if len(sys.argv) not in (3, 4):
sys.stderr.write("invalid args\n")
- sys.exit(0)
+ sys.exit(1)
main(*sys.argv[1:3])
diff --git a/tools/server-side/svnpubsub/daemonize.py b/tools/server-side/svnpubsub/daemonize.py
index 8b85258..41b1bec 100644
--- a/tools/server-side/svnpubsub/daemonize.py
+++ b/tools/server-side/svnpubsub/daemonize.py
@@ -24,6 +24,7 @@ import os
import signal
import sys
import time
+import multiprocessing # requires Python 2.6
# possible return values from Daemon.daemonize()
@@ -50,11 +51,11 @@ class Daemon(object):
def daemonize_exit(self):
try:
result = self.daemonize()
- except (ChildFailed, DaemonFailed) as e:
+ except (ChildFailed, DaemonFailed), e:
# duplicate the exit code
sys.exit(e.code)
except (ChildTerminatedAbnormally, ChildForkFailed,
- DaemonTerminatedAbnormally, DaemonForkFailed) as e:
+ DaemonTerminatedAbnormally, DaemonForkFailed), e:
sys.stderr.write('ERROR: %s\n' % e)
sys.exit(1)
except ChildResumedIncorrectly:
@@ -71,29 +72,41 @@ class Daemon(object):
# in original process. daemon is up and running. we're done.
def daemonize(self):
- # fork off a child that can detach itself from this process.
- try:
- pid = os.fork()
- except OSError as e:
- raise ChildForkFailed(e.errno, e.strerror)
-
- if pid > 0:
- # we're in the parent. let's wait for the child to finish setting
- # things up -- on our exit, we want to ensure the child is accepting
- # connections.
- cpid, status = os.waitpid(pid, 0)
- assert pid == cpid
- if os.WIFEXITED(status):
- code = os.WEXITSTATUS(status)
- if code:
- raise ChildFailed(code)
- return DAEMON_RUNNING
-
- # the child did not exit cleanly.
- raise ChildTerminatedAbnormally(status)
-
+ ### review error situations. map to backwards compat. ??
+ ### be mindful of daemonize_exit().
+ ### we should try and raise ChildFailed / ChildTerminatedAbnormally.
+ ### ref: older revisions. OR: remove exceptions.
+
+ child_is_ready = multiprocessing.Event()
+ child_completed = multiprocessing.Event()
+
+ p = multiprocessing.Process(target=self._first_child,
+ args=(child_is_ready, child_completed))
+ p.start()
+
+ # Wait for the child to finish setting things up (in case we need
+ # to communicate with it). It will only exit when ready.
+ ### use a timeout here! (parameterized, of course)
+ p.join()
+
+ ### need to propagate errors, to adjust the return codes
+ if child_completed.is_set():
+ ### what was the exit status?
+ return DAEMON_COMPLETE
+ if child_is_ready.is_set():
+ return DAEMON_RUNNING
+
+ ### how did we get here?! the immediate child should not exit without
+ ### signalling ready/complete. some kind of error.
+ return DAEMON_STARTED
+
+ def _first_child(self, child_is_ready, child_completed):
# we're in the child.
+ ### NOTE: the original design was a bit bunk. Exceptions raised from
+ ### this point are within the child processes. We need to signal the
+ ### errors to the parent in other ways.
+
# decouple from the parent process
os.chdir('/')
os.umask(0)
@@ -102,63 +115,86 @@ class Daemon(object):
# remember this pid so the second child can signal it.
thispid = os.getpid()
- # register a signal handler so the SIGUSR1 doesn't stop the process.
- # this object will also record whether if got signalled.
- daemon_accepting = SignalCatcher(signal.SIGUSR1)
-
- # if the daemon process exits before sending SIGUSR1, then we need to see
- # the problem. trap SIGCHLD with a SignalCatcher.
+ # if the daemon process exits before signalling readiness, then we
+ # need to see the problem. trap SIGCHLD with a SignalCatcher.
daemon_exit = SignalCatcher(signal.SIGCHLD)
# perform the second fork
try:
pid = os.fork()
- except OSError as e:
+ except OSError, e:
+ ### this won't make it to the parent process
raise DaemonForkFailed(e.errno, e.strerror)
if pid > 0:
# in the parent.
- # we want to wait for the daemon to signal that it has created and
- # bound the socket, and is (thus) ready for connections. if the
- # daemon improperly exits before serving, we'll see SIGCHLD and the
- # .pause will return.
- ### we should add a timeout to this. allow an optional parameter to
- ### specify the timeout, in case it takes a long time to start up.
- signal.pause()
+
+ # Wait for the child to be ready for operation.
+ while True:
+ # The readiness event will invariably be signalled early/first.
+ # If it *doesn't* get signalled because the child has prematurely
+ # exited, then we will pause 10ms before noticing the exit. The
+ # pause is acceptable since that is aberrant/unexpected behavior.
+ ### is there a way to break this wait() on a signal such as SIGCHLD?
+ ### parameterize this wait, in case the app knows children may
+ ### fail quickly?
+ if child_is_ready.wait(timeout=0.010):
+ # The child signalled readiness. Yay!
+ break
+ if daemon_exit.signalled:
+ # Whoops. The child exited without signalling :-(
+ break
+ # Python 2.6 compat: .wait() may exit when set, but return None
+ if child_is_ready.is_set():
+ break
+ # A simple timeout. The child is taking a while to prepare. Go
+ # back and wait for readiness.
if daemon_exit.signalled:
+ # Tell the parent that the child has exited.
+ ### we need to communicate the exit status, if possible.
+ child_completed.set()
+
# reap the daemon process, getting its exit code. bubble it up.
cpid, status = os.waitpid(pid, 0)
assert pid == cpid
if os.WIFEXITED(status):
code = os.WEXITSTATUS(status)
if code:
+ ### this won't make it to the parent process
raise DaemonFailed(code)
+ ### this return value is ignored
return DAEMON_NOT_RUNNING
# the daemon did not exit cleanly.
+ ### this won't make it to the parent process
raise DaemonTerminatedAbnormally(status)
- if daemon_accepting.signalled:
- # the daemon is up and running, so save the pid and return success.
- if self.pidfile:
- # Be wary of symlink attacks
- try:
- os.remove(self.pidfile)
- except OSError:
- pass
- fd = os.open(self.pidfile, os.O_WRONLY | os.O_CREAT | os.O_EXCL, 0444)
- os.write(fd, '%d\n' % pid)
- os.close(fd)
- return DAEMON_STARTED
-
+ # child_is_ready got asserted. the daemon is up and running, so
+ # save the pid and return success.
+ if self.pidfile:
+ # Be wary of symlink attacks
+ try:
+ os.remove(self.pidfile)
+ except OSError:
+ pass
+ fd = os.open(self.pidfile, os.O_WRONLY | os.O_CREAT | os.O_EXCL, 0444)
+ os.write(fd, '%d\n' % pid)
+ os.close(fd)
+
+ ### this return value is ignored
+ return DAEMON_STARTED
+
+ ### old code. what to do with this? throw ChildResumedIncorrectly
+ ### or just toss this and the exception.
# some other signal popped us out of the pause. the daemon might not
# be running.
+ ### this won't make it to the parent process
raise ChildResumedIncorrectly()
- # we're a deamon now. get rid of the final remnants of the parent.
- # start by restoring default signal handlers
+ # we're a daemon now. get rid of the final remnants of the parent:
+ # restore the signal handlers and switch std* to the proper files.
signal.signal(signal.SIGUSR1, signal.SIG_DFL)
signal.signal(signal.SIGCHLD, signal.SIG_DFL)
sys.stdout.flush()
@@ -176,30 +212,31 @@ class Daemon(object):
so.close()
se.close()
- # TEST: don't release the parent immediately. the whole parent stack
- # should pause along with this sleep.
+ ### TEST: don't release the parent immediately. the whole parent stack
+ ### should pause along with this sleep.
#time.sleep(10)
# everything is set up. call the initialization function.
self.setup()
- # sleep for one second before signalling. we want to make sure the
- # parent has called signal.pause()
- ### we should think of a better wait around the race condition.
- time.sleep(1)
+ ### TEST: exit before signalling.
+ #sys.exit(0)
+ #sys.exit(1)
- # okay. the daemon is ready. signal the parent to tell it we're set.
- os.kill(thispid, signal.SIGUSR1)
+ # the child is now ready for parent/anyone to communicate with it.
+ child_is_ready.set()
# start the daemon now.
self.run()
# The daemon is shutting down, so toss the pidfile.
- try:
- os.remove(self.pidfile)
- except OSError:
- pass
+ if self.pidfile:
+ try:
+ os.remove(self.pidfile)
+ except OSError:
+ pass
+ ### this return value is ignored
return DAEMON_COMPLETE
def setup(self):
@@ -209,6 +246,34 @@ class Daemon(object):
raise NotImplementedError
+class _Detacher(Daemon):
+ def __init__(self, target, logfile='/dev/null', pidfile=None,
+ args=(), kwargs={}):
+ Daemon.__init__(self, logfile, pidfile)
+ self.target = target
+ self.args = args
+ self.kwargs = kwargs
+
+ def setup(self):
+ pass
+
+ def run(self):
+ self.target(*self.args, **self.kwargs)
+
+
+def run_detached(target, *args, **kwargs):
+ """Simple function to run TARGET as a detached daemon.
+
+ The additional arguments/keywords will be passed along. This function
+ does not return -- sys.exit() will be called as appropriate.
+
+ (capture SystemExit if logging/reporting is necessary)
+ ### if needed, a variant of this func could be written to not exit
+ """
+ d = _Detacher(target, args=args, kwargs=kwargs)
+ d.daemonize_exit()
+
+
class SignalCatcher(object):
def __init__(self, signum):
self.signalled = False
diff --git a/tools/server-side/svnpubsub/rc.d/svnpubsub.freebsd b/tools/server-side/svnpubsub/rc.d/svnpubsub.freebsd
index 71fc8c8..79b5901 100755
--- a/tools/server-side/svnpubsub/rc.d/svnpubsub.freebsd
+++ b/tools/server-side/svnpubsub/rc.d/svnpubsub.freebsd
@@ -26,7 +26,7 @@ pidfile="${svnpubsub_pidfile}"
export PYTHON_EGG_CACHE="/home/svn/.python-eggs"
command="/usr/local/bin/twistd"
-command_interpreter="/usr/local/bin/${svnwcsub_cmd_int}"
+command_interpreter="/usr/local/bin/${svnpubsub_cmd_int}"
command_args="-y /usr/local/svnpubsub/svnpubsub.tac \
--logfile=/var/log/vc/svnpubsub.log \
--pidfile=${pidfile} \
diff --git a/tools/server-side/svnpubsub/revprop-change-hook.py b/tools/server-side/svnpubsub/revprop-change-hook.py
new file mode 100755
index 0000000..3aa857b
--- /dev/null
+++ b/tools/server-side/svnpubsub/revprop-change-hook.py
@@ -0,0 +1,90 @@
+#!/usr/local/bin/python
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+SVNLOOK="/usr/local/svn-install/current/bin/svnlook"
+#SVNLOOK="/usr/local/bin/svnlook"
+
+HOST="127.0.0.1"
+PORT=2069
+
+import sys
+try:
+ import simplejson as json
+except ImportError:
+ import json
+
+import urllib2
+
+
+import svnpubsub.util
+
+def svnlook(cmd, **kwargs):
+ args = [SVNLOOK] + cmd
+ return svnpubsub.util.check_output(args, **kwargs)
+
+def svnlook_uuid(repo):
+ cmd = ["uuid", "--", repo]
+ return svnlook(cmd).strip()
+
+def svnlook_revprop(repo, revision, propname):
+ cmd = ["propget", "-r", revision, "--revprop", "--", repo, propname]
+ data = svnlook(cmd)
+ #print data
+ return data
+
+def do_put(body):
+ opener = urllib2.build_opener(urllib2.HTTPHandler)
+ request = urllib2.Request("http://%s:%d/metadata" %(HOST, PORT), data=body)
+ request.add_header('Content-Type', 'application/json')
+ request.get_method = lambda: 'PUT'
+ url = opener.open(request)
+
+
+def main(repo, revision, author, propname, action):
+ revision = revision.lstrip('r')
+ if action in ('A', 'M'):
+ new_value = svnlook_revprop(repo, revision, propname)
+ elif action == 'D':
+ new_value = None
+ else:
+ sys.stderr.write('Unknown revprop change action "%s"\n' % action)
+ sys.exit(1)
+ if action in ('D', 'M'):
+ old_value = sys.stdin.read()
+ else:
+ old_value = None
+ data = {'type': 'svn',
+ 'format': 1,
+ 'id': int(revision),
+ 'repository': svnlook_uuid(repo),
+ 'revprop': {
+ 'name': propname,
+ 'committer': author,
+ 'value': new_value,
+ 'old_value': old_value,
+ }
+ }
+ body = json.dumps(data)
+ do_put(body)
+
+if __name__ == "__main__":
+ if len(sys.argv) != 6:
+ sys.stderr.write("invalid args\n")
+ sys.exit(1)
+
+ main(*sys.argv[1:6])
diff --git a/tools/server-side/svnpubsub/svnpubsub/client.py b/tools/server-side/svnpubsub/svnpubsub/client.py
index c1631d6..871a5e9 100644
--- a/tools/server-side/svnpubsub/svnpubsub/client.py
+++ b/tools/server-side/svnpubsub/svnpubsub/client.py
@@ -62,7 +62,8 @@ class SvnpubsubClientException(Exception):
class Client(asynchat.async_chat):
- def __init__(self, url, commit_callback, event_callback):
+ def __init__(self, url, commit_callback, event_callback,
+ metadata_callback = None):
asynchat.async_chat.__init__(self)
self.last_activity = time.time()
@@ -82,7 +83,8 @@ class Client(asynchat.async_chat):
self.event_callback = event_callback
- self.parser = JSONRecordHandler(commit_callback, event_callback)
+ self.parser = JSONRecordHandler(commit_callback, event_callback,
+ metadata_callback)
# Wait for the end of headers. Then we start parsing JSON.
self.set_terminator(b'\r\n\r\n')
@@ -126,36 +128,50 @@ class Client(asynchat.async_chat):
self.ibuffer.append(data)
+class Notification(object):
+ def __init__(self, data):
+ self.__dict__.update(data)
+
+class Commit(Notification):
+ KIND = 'COMMIT'
+
+class Metadata(Notification):
+ KIND = 'METADATA'
+
+
class JSONRecordHandler:
- def __init__(self, commit_callback, event_callback):
+ def __init__(self, commit_callback, event_callback, metadata_callback):
self.commit_callback = commit_callback
self.event_callback = event_callback
+ self.metadata_callback = metadata_callback
+
+ EXPECTED_VERSION = 1
def feed(self, record):
obj = json.loads(record)
if 'svnpubsub' in obj:
actual_version = obj['svnpubsub'].get('version')
- EXPECTED_VERSION = 1
- if actual_version != EXPECTED_VERSION:
- raise SvnpubsubClientException("Unknown svnpubsub format: %r != %d"
- % (actual_format, expected_format))
+ if actual_version != self.EXPECTED_VERSION:
+ raise SvnpubsubClientException(
+ "Unknown svnpubsub format: %r != %d"
+ % (actual_version, self.EXPECTED_VERSION))
self.event_callback('version', obj['svnpubsub']['version'])
elif 'commit' in obj:
commit = Commit(obj['commit'])
self.commit_callback(commit)
elif 'stillalive' in obj:
self.event_callback('ping', obj['stillalive'])
-
-
-class Commit(object):
- def __init__(self, commit):
- self.__dict__.update(commit)
+ elif 'metadata' in obj and self.metadata_callback:
+ metadata = Metadata(obj['metadata'])
+ self.metadata_callback(metadata)
class MultiClient(object):
- def __init__(self, urls, commit_callback, event_callback):
+ def __init__(self, urls, commit_callback, event_callback,
+ metadata_callback = None):
self.commit_callback = commit_callback
self.event_callback = event_callback
+ self.metadata_callback = metadata_callback
# No target time, as no work to do
self.target_time = 0
@@ -185,9 +201,15 @@ class MultiClient(object):
def _add_channel(self, url):
# Simply instantiating the client will install it into the global map
# for processing in the main event loop.
- Client(url,
- functools.partial(self.commit_callback, url),
- functools.partial(self._reconnect, url))
+ if self.metadata_callback:
+ Client(url,
+ functools.partial(self.commit_callback, url),
+ functools.partial(self._reconnect, url),
+ functools.partial(self.metadata_callback, url))
+ else:
+ Client(url,
+ functools.partial(self.commit_callback, url),
+ functools.partial(self._reconnect, url))
def _check_stale(self):
now = time.time()
diff --git a/tools/server-side/svnpubsub/svnpubsub/server.py b/tools/server-side/svnpubsub/svnpubsub/server.py
index faee423..d0cdff9 100644
--- a/tools/server-side/svnpubsub/svnpubsub/server.py
+++ b/tools/server-side/svnpubsub/svnpubsub/server.py
@@ -25,20 +25,27 @@
# Instead of using a complicated XMPP/AMPQ/JMS/super messaging service,
# we have simple HTTP GETs and PUTs to get data in and out.
#
-# Currently supports both XML and JSON serialization.
+# Currently supports JSON serialization.
#
# Example Sub clients:
-# curl -sN http://127.0.0.1:2069/commits
-# curl -sN http://127.0.0.1:2069/commits/svn/*
-# curl -sN http://127.0.0.1:2069/commits/svn
-# curl -sN http://127.0.0.1:2069/commits/*/13f79535-47bb-0310-9956-ffa450edef68
-# curl -sN http://127.0.0.1:2069/commits/svn/13f79535-47bb-0310-9956-ffa450edef68
+# curl -sN http://127.0.0.1:2069/commits
+# curl -sN 'http://127.0.0.1:2069/commits/svn/*'
+# curl -sN http://127.0.0.1:2069/commits/svn
+# curl -sN 'http://127.0.0.1:2069/commits/*/13f79535-47bb-0310-9956-ffa450edef68'
+# curl -sN http://127.0.0.1:2069/commits/svn/13f79535-47bb-0310-9956-ffa450edef68
#
-# URL is built into 2 parts:
-# /commits/${optional_type}/${optional_repository}
+# curl -sN http://127.0.0.1:2069/metadata
+# curl -sN 'http://127.0.0.1:2069/metadata/svn/*'
+# curl -sN http://127.0.0.1:2069/metadata/svn
+# curl -sN 'http://127.0.0.1:2069/metadata/*/13f79535-47bb-0310-9956-ffa450edef68'
+# curl -sN http://127.0.0.1:2069/metadata/svn/13f79535-47bb-0310-9956-ffa450edef68
#
-# If the type is included in the URL, you will only get commits of that type.
-# The type can be * and then you will receive commits of any type.
+# URLs are constructed from 3 parts:
+# /${notification}/${optional_type}/${optional_repository}
+#
+# Notifications can be sent for commits or metadata (e.g., revprop) changes.
+# If the type is included in the URL, you will only get notifications of that type.
+# The type can be * and then you will receive notifications of any type.
#
# If the repository is included in the URL, you will only receive
# messages about that repository. The repository can be * and then you
@@ -71,7 +78,7 @@ from twisted.python import log
import time
-class Commit:
+class Notification(object):
def __init__(self, r):
self.__dict__.update(r)
if not self.check_value('repository'):
@@ -86,7 +93,16 @@ class Commit:
def check_value(self, k):
return hasattr(self, k) and self.__dict__[k]
- def render_commit(self):
+ def render(self):
+ raise NotImplementedError
+
+ def render_log(self):
+ raise NotImplementedError
+
+class Commit(Notification):
+ KIND = 'COMMIT'
+
+ def render(self):
obj = {'commit': {}}
obj['commit'].update(self.__dict__)
return json.dumps(obj)
@@ -96,20 +112,32 @@ class Commit:
paths_changed = " %d paths changed" % len(self.changed)
except:
paths_changed = ""
- return "%s:%s repo '%s' id '%s'%s" % (self.type,
- self.format,
- self.repository,
- self.id,
- paths_changed)
+ return "commit %s:%s repo '%s' id '%s'%s" % (
+ self.type, self.format, self.repository, self.id,
+ paths_changed)
+
+class Metadata(Notification):
+ KIND = 'METADATA'
+
+ def render(self):
+ obj = {'metadata': {}}
+ obj['metadata'].update(self.__dict__)
+ return json.dumps(obj)
+
+ def render_log(self):
+ return "metadata %s:%s repo '%s' id '%s' revprop '%s'" % (
+ self.type, self.format, self.repository, self.id,
+ self.revprop['name'])
HEARTBEAT_TIME = 15
class Client(object):
- def __init__(self, pubsub, r, type, repository):
+ def __init__(self, pubsub, r, kind, type, repository):
self.pubsub = pubsub
r.notifyFinish().addErrback(self.finished)
self.r = r
+ self.kind = kind
self.type = type
self.repository = repository
self.alive = True
@@ -123,11 +151,14 @@ class Client(object):
except ValueError:
pass
- def interested_in(self, commit):
- if self.type and self.type != commit.type:
+ def interested_in(self, notification):
+ if self.kind != notification.KIND:
+ return False
+
+ if self.type and self.type != notification.type:
return False
- if self.repository and self.repository != commit.repository:
+ if self.repository and self.repository != notification.repository:
return False
return True
@@ -164,6 +195,13 @@ class SvnPubSub(resource.Resource):
isLeaf = True
clients = []
+ __notification_uri_map = {'commits': Commit.KIND,
+ 'metadata': Metadata.KIND}
+
+ def __init__(self, notification_class):
+ resource.Resource.__init__(self)
+ self.__notification_class = notification_class
+
def cc(self):
return len(self.clients)
@@ -183,6 +221,11 @@ class SvnPubSub(resource.Resource):
request.setResponseCode(400)
return "Invalid path\n"
+ kind = self.__notification_uri_map.get(uri[1], None)
+ if kind is None:
+ request.setResponseCode(400)
+ return "Invalid path\n"
+
if uri_len >= 3:
type = uri[2]
@@ -195,17 +238,18 @@ class SvnPubSub(resource.Resource):
if repository == '*':
repository = None
- c = Client(self, request, type, repository)
+ c = Client(self, request, kind, type, repository)
self.clients.append(c)
c.start()
return twisted.web.server.NOT_DONE_YET
- def notifyAll(self, commit):
- data = commit.render_commit()
+ def notifyAll(self, notification):
+ data = notification.render()
- log.msg("COMMIT: %s (%d clients)" % (commit.render_log(), self.cc()))
+ log.msg("%s: %s (%d clients)"
+ % (notification.KIND, notification.render_log(), self.cc()))
for client in self.clients:
- if client.interested_in(commit):
+ if client.interested_in(notification):
client.write_data(data)
def render_PUT(self, request):
@@ -218,19 +262,23 @@ class SvnPubSub(resource.Resource):
#import pdb;pdb.set_trace()
#print "input: %s" % (input)
try:
- c = json.loads(input)
- commit = Commit(c)
+ data = json.loads(input)
+ notification = self.__notification_class(data)
except ValueError as e:
request.setResponseCode(400)
- log.msg("COMMIT: failed due to: %s" % str(e))
- return str(e)
- self.notifyAll(commit)
+ errstr = str(e)
+ log.msg("%s: failed due to: %s" % (notification.KIND, errstr))
+ return errstr
+ self.notifyAll(notification)
return "Ok"
+
def svnpubsub_server():
root = resource.Resource()
- s = SvnPubSub()
- root.putChild("commits", s)
+ c = SvnPubSub(Commit)
+ m = SvnPubSub(Metadata)
+ root.putChild('commits', c)
+ root.putChild('metadata', m)
return server.Site(root)
if __name__ == "__main__":
diff --git a/tools/server-side/svnpubsub/svnpubsub/util.py b/tools/server-side/svnpubsub/svnpubsub/util.py
new file mode 100644
index 0000000..e254f8b
--- /dev/null
+++ b/tools/server-side/svnpubsub/svnpubsub/util.py
@@ -0,0 +1,36 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+import subprocess as __subprocess
+
+# check_output() is only available in Python 2.7. Allow us to run with
+# earlier versions
+try:
+ __check_output = __subprocess.check_output
+ def check_output(args, env=None, universal_newlines=False):
+ return __check_output(args, shell=False, env=env,
+ universal_newlines=universal_newlines)
+except AttributeError:
+ def check_output(args, env=None, universal_newlines=False):
+ # note: we only use these three args
+ pipe = __subprocess.Popen(args, shell=False, env=env,
+ stdout=__subprocess.PIPE,
+ universal_newlines=universal_newlines)
+ output, _ = pipe.communicate()
+ if pipe.returncode:
+ raise subprocess.CalledProcessError(pipe.returncode, args)
+ return output
diff --git a/tools/server-side/svnpubsub/svnwcsub.py b/tools/server-side/svnpubsub/svnwcsub.py
index 366df7c..8105d87 100755
--- a/tools/server-side/svnpubsub/svnwcsub.py
+++ b/tools/server-side/svnpubsub/svnwcsub.py
@@ -69,27 +69,22 @@ except ImportError:
import daemonize
import svnpubsub.client
-
-# check_output() is only available in Python 2.7. Allow us to run with
-# earlier versions
-try:
- check_output = subprocess.check_output
-except AttributeError:
- def check_output(args, env): # note: we only use these two args
- pipe = subprocess.Popen(args, stdout=subprocess.PIPE, env=env)
- output, _ = pipe.communicate()
- if pipe.returncode:
- raise subprocess.CalledProcessError(pipe.returncode, args)
- return output
+import svnpubsub.util
assert hasattr(subprocess, 'check_call')
def check_call(*args, **kwds):
- """Wrapper around subprocess.check_call() that logs stderr upon failure."""
+ """Wrapper around subprocess.check_call() that logs stderr upon failure,
+ with an optional list of exit codes to consider non-failure."""
assert 'stderr' not in kwds
+ if '__okayexits' in kwds:
+ __okayexits = kwds['__okayexits']
+ del kwds['__okayexits']
+ else:
+ __okayexits = set([0]) # EXIT_SUCCESS
kwds.update(stderr=subprocess.PIPE)
pipe = subprocess.Popen(*args, **kwds)
output, errput = pipe.communicate()
- if pipe.returncode:
+ if pipe.returncode not in __okayexits:
cmd = args[0] if len(args) else kwds.get('args', '(no command)')
# TODO: log stdout too?
logging.error('Command failed: returncode=%d command=%r stderr=%r',
@@ -103,7 +98,7 @@ def check_call(*args, **kwds):
def svn_info(svnbin, env, path):
"Run 'svn info' on the target path, returning a dict of info data."
args = [svnbin, "info", "--non-interactive", "--", path]
- output = check_output(args, env=env).strip()
+ output = svnpubsub.util.check_output(args, env=env).strip()
info = { }
for line in output.split('\n'):
idx = line.index(':')
@@ -303,6 +298,21 @@ class BackgroundWorker(threading.Thread):
logging.info("updating: %s", wc.path)
+ ## Run the hook
+ HEAD = svn_info(self.svnbin, self.env, wc.url)['Revision']
+ if self.hook:
+ hook_mode = ['pre-update', 'pre-boot'][boot]
+ logging.info('running hook: %s at %s',
+ wc.path, hook_mode)
+ args = [self.hook, hook_mode, wc.path, HEAD, wc.url]
+ rc = check_call(args, env=self.env, __okayexits=[0, 1])
+ if rc == 1:
+ # TODO: log stderr
+ logging.warn('hook denied update of %s at %s',
+ wc.path, hook_mode)
+ return
+ del rc
+
### we need to move some of these args into the config. these are
### still specific to the ASF setup.
args = [self.svnbin, 'switch',
@@ -313,12 +323,13 @@ class BackgroundWorker(threading.Thread):
'--config-option',
'config:miscellany:use-commit-times=on',
'--',
- wc.url,
+ wc.url + '@' + HEAD,
wc.path]
check_call(args, env=self.env)
### check the loglevel before running 'svn info'?
info = svn_info(self.svnbin, self.env, wc.path)
+ assert info['Revision'] == HEAD
logging.info("updated: %s now at r%s", wc.path, info['Revision'])
## Run the hook
@@ -533,7 +544,8 @@ def main(args):
# We manage the logfile ourselves (along with possible rotation). The
# daemon process can just drop stdout/stderr into /dev/null.
- d = Daemon('/dev/null', options.pidfile, options.umask, bdec)
+ d = Daemon('/dev/null', os.path.abspath(options.pidfile),
+ options.umask, bdec)
if options.daemon:
# Daemonize the process and call sys.exit() with appropriate code
d.daemonize_exit()
diff --git a/tools/server-side/svnpubsub/watcher.py b/tools/server-side/svnpubsub/watcher.py
index 340b100..11bf066 100755
--- a/tools/server-side/svnpubsub/watcher.py
+++ b/tools/server-side/svnpubsub/watcher.py
@@ -35,6 +35,9 @@ def _commit(url, commit):
print('COMMIT: from %s' % url)
pprint.pprint(vars(commit), indent=2)
+def _metadata(url, metadata):
+ print('METADATA: from %s' % url)
+ pprint.pprint(vars(metadata), indent=2)
def _event(url, event_name, event_arg):
if event_arg:
@@ -44,7 +47,7 @@ def _event(url, event_name, event_arg):
def main(urls):
- mc = svnpubsub.client.MultiClient(urls, _commit, _event)
+ mc = svnpubsub.client.MultiClient(urls, _commit, _event, _metadata)
mc.run_forever()