diff options
author | Lorry <lorry@roadtrain.codethink.co.uk> | 2012-08-22 14:29:52 +0100 |
---|---|---|
committer | Lorry <lorry@roadtrain.codethink.co.uk> | 2012-08-22 14:29:52 +0100 |
commit | f1bdf13786f0752c0846cf36f0d91e4fc6747929 (patch) | |
tree | 4223b2035bf2240d681a53822808b3c7f687b905 /tools | |
download | subversion-tarball-f1bdf13786f0752c0846cf36f0d91e4fc6747929.tar.gz |
Tarball conversion
Diffstat (limited to 'tools')
196 files changed, 35588 insertions, 0 deletions
diff --git a/tools/backup/hot-backup.py.in b/tools/backup/hot-backup.py.in new file mode 100755 index 0000000..20dd794 --- /dev/null +++ b/tools/backup/hot-backup.py.in @@ -0,0 +1,354 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# hot-backup.py: perform a "hot" backup of a Subversion repository +# and clean any old Berkeley DB logfiles after the +# backup completes, if the repository backend is +# Berkeley DB. +# +# Subversion is a tool for revision control. +# See http://subversion.apache.org for more information. +# +# ==================================================================== +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# ==================================================================== + +# $HeadURL: http://svn.apache.org/repos/asf/subversion/branches/1.7.x/tools/backup/hot-backup.py.in $ +# $LastChangedDate: 2010-08-20 04:30:52 +0000 (Fri, 20 Aug 2010) $ +# $LastChangedBy: cmpilato $ +# $LastChangedRevision: 987379 $ + +###################################################################### + +import sys, os, getopt, stat, re, time, shutil, subprocess + +###################################################################### +# Global Settings + +# Path to svnlook utility +svnlook = r"@SVN_BINDIR@/svnlook" + +# Path to svnadmin utility +svnadmin = r"@SVN_BINDIR@/svnadmin" + +# Default number of backups to keep around (0 for "keep them all") +num_backups = int(os.environ.get("SVN_HOTBACKUP_BACKUPS_NUMBER", 64)) + +# Archive types/extensions +archive_map = { + 'gz' : ".tar.gz", + 'bz2' : ".tar.bz2", + 'zip' : ".zip", + 'zip64' : ".zip" + } + +# Chmod recursively on a whole subtree +def chmod_tree(path, mode, mask): + for dirpath, dirs, files in os.walk(path): + for name in dirs + files: + fullname = os.path.join(dirpath, name) + if not os.path.islink(fullname): + new_mode = (os.stat(fullname)[stat.ST_MODE] & ~mask) | mode + os.chmod(fullname, new_mode) + +# For clearing away read-only directories +def safe_rmtree(dirname, retry=0): + "Remove the tree at DIRNAME, making it writable first" + def rmtree(dirname): + chmod_tree(dirname, 0666, 0666) + shutil.rmtree(dirname) + + if not os.path.exists(dirname): + return + + if retry: + for delay in (0.5, 1, 2, 4): + try: + rmtree(dirname) + break + except: + time.sleep(delay) + else: + rmtree(dirname) + else: + rmtree(dirname) + +###################################################################### +# Command line arguments + +def usage(out = sys.stdout): + scriptname = os.path.basename(sys.argv[0]) + out.write( +"""USAGE: %s [OPTIONS] REPOS_PATH BACKUP_PATH + +Create a backup of the repository at REPOS_PATH in a subdirectory of +the BACKUP_PATH location, named after the youngest revision. + +Options: + --archive-type=FMT Create an archive of the backup. FMT can be one of: + bz2 : Creates a bzip2 compressed tar file. + gz : Creates a gzip compressed tar file. + zip : Creates a compressed zip file. + zip64: Creates a zip64 file (can be > 2GB). + --num-backups=N Number of prior backups to keep around (0 to keep all). + --verify Verify the backup. + --help -h Print this help message and exit. + +""" % (scriptname,)) + + +try: + opts, args = getopt.gnu_getopt(sys.argv[1:], "h?", ["archive-type=", + "num-backups=", + "verify", + "help"]) +except getopt.GetoptError, e: + sys.stderr.write("ERROR: %s\n\n" % e) + sys.stderr.flush() + usage(sys.stderr) + sys.exit(2) + +archive_type = None +verify_copy = False + +for o, a in opts: + if o == "--archive-type": + archive_type = a + elif o == "--num-backups": + num_backups = int(a) + elif o == "--verify": + verify_copy = True + elif o in ("-h", "--help", "-?"): + usage() + sys.exit() + +if len(args) != 2: + sys.stderr.write("ERROR: only two arguments allowed.\n\n") + sys.stderr.flush() + usage(sys.stderr) + sys.exit(2) + +# Path to repository +repo_dir = args[0] +repo = os.path.basename(os.path.abspath(repo_dir)) + +# Where to store the repository backup. The backup will be placed in +# a *subdirectory* of this location, named after the youngest +# revision. +backup_dir = args[1] + +# Added to the filename regexp, set when using --archive-type. +ext_re = "" + +# Do we want to create an archive of the backup +if archive_type: + if archive_type in archive_map: + # Additionally find files with the archive extension. + ext_re = "(" + re.escape(archive_map[archive_type]) + ")?" + else: + sys.stderr.write("Unknown archive type '%s'.\n\n\n" % archive_type) + sys.stderr.flush() + usage(sys.stderr) + sys.exit(2) + + +###################################################################### +# Helper functions + +def comparator(a, b): + # We pass in filenames so there is never a case where they are equal. + regexp = re.compile("-(?P<revision>[0-9]+)(-(?P<increment>[0-9]+))?" + + ext_re + "$") + matcha = regexp.search(a) + matchb = regexp.search(b) + reva = int(matcha.groupdict()['revision']) + revb = int(matchb.groupdict()['revision']) + if (reva < revb): + return -1 + elif (reva > revb): + return 1 + else: + inca = matcha.groupdict()['increment'] + incb = matchb.groupdict()['increment'] + if not inca: + return -1 + elif not incb: + return 1; + elif (int(inca) < int(incb)): + return -1 + else: + return 1 + +def get_youngest_revision(): + """Examine the repository REPO_DIR using the svnlook binary + specified by SVNLOOK, and return the youngest revision.""" + + p = subprocess.Popen([svnlook, 'youngest', repo_dir], + stdin=subprocess.PIPE, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE) + infile, outfile, errfile = p.stdin, p.stdout, p.stderr + + stdout_lines = outfile.readlines() + stderr_lines = errfile.readlines() + outfile.close() + infile.close() + errfile.close() + + if stderr_lines: + raise Exception("Unable to find the youngest revision for repository '%s'" + ": %s" % (repo_dir, stderr_lines[0].rstrip())) + + return stdout_lines[0].strip() + +###################################################################### +# Main + +print("Beginning hot backup of '"+ repo_dir + "'.") + + +### Step 1: get the youngest revision. + +try: + youngest = get_youngest_revision() +except Exception, e: + sys.stderr.write("%s\n" % e) + sys.stderr.flush() + sys.exit(1) + +print("Youngest revision is %s" % youngest) + + +### Step 2: Find next available backup path + +backup_subdir = os.path.join(backup_dir, repo + "-" + youngest) + +# If there is already a backup of this revision, then append the +# next highest increment to the path. We still need to do a backup +# because the repository might have changed despite no new revision +# having been created. We find the highest increment and add one +# rather than start from 1 and increment because the starting +# increments may have already been removed due to num_backups. + +regexp = re.compile("^" + repo + "-" + youngest + + "(-(?P<increment>[0-9]+))?" + ext_re + "$") +directory_list = os.listdir(backup_dir) +young_list = [x for x in directory_list if regexp.search(x)] +if young_list: + young_list.sort(comparator) + increment = regexp.search(young_list.pop()).groupdict()['increment'] + if increment: + backup_subdir = os.path.join(backup_dir, repo + "-" + youngest + "-" + + str(int(increment) + 1)) + else: + backup_subdir = os.path.join(backup_dir, repo + "-" + youngest + "-1") + +### Step 3: Ask subversion to make a hot copy of a repository. +### copied last. + +print("Backing up repository to '" + backup_subdir + "'...") +err_code = subprocess.call([svnadmin, "hotcopy", repo_dir, + backup_subdir, "--clean-logs"]) +if err_code != 0: + sys.stderr.write("Unable to backup the repository.\n") + sys.stderr.flush() + sys.exit(err_code) +else: + print("Done.") + +### Step 4: Verify the hotcopy +if verify_copy: + print("Verifying backup...") + err_code = subprocess.call([svnadmin, "verify", "--quiet", backup_subdir]) + if err_code != 0: + sys.stderr.write("Backup verification failed.\n") + sys.stderr.flush() + sys.exit(err_code) + else: + print("Done.") + +### Step 5: Make an archive of the backup if required. +if archive_type: + archive_path = backup_subdir + archive_map[archive_type] + err_msg = "" + + print("Archiving backup to '" + archive_path + "'...") + if archive_type == 'gz' or archive_type == 'bz2': + try: + import tarfile + tar = tarfile.open(archive_path, 'w:' + archive_type) + tar.add(backup_subdir, os.path.basename(backup_subdir)) + tar.close() + except ImportError, e: + err_msg = "Import failed: " + str(e) + err_code = -2 + except tarfile.TarError, e: + err_msg = "Tar failed: " + str(e) + err_code = -3 + + elif archive_type == 'zip' or archive_type == 'zip64': + try: + import zipfile + + def add_to_zip(zp, root, dirname, names): + root = os.path.join(root, '') + + for file in names: + path = os.path.join(dirname, file) + if os.path.isfile(path): + zp.write(path, path[len(root):]) + elif os.path.isdir(path) and os.path.islink(path): + for dirpath, dirs, files in os.walk(path): + add_to_zip(zp, path, dirpath, dirs + files) + + zp = zipfile.ZipFile(archive_path, 'w', zipfile.ZIP_DEFLATED, archive_type == 'zip64') + for dirpath, dirs, files in os.walk(backup_subdir): + add_to_zip(zp, backup_dir, dirpath, dirs + files) + zp.close() + except ImportError, e: + err_msg = "Import failed: " + str(e) + err_code = -4 + except zipfile.error, e: + err_msg = "Zip failed: " + str(e) + err_code = -5 + + + if err_code != 0: + sys.stderr.write("Unable to create an archive for the backup.\n%s\n" % err_msg) + sys.stderr.flush() + sys.exit(err_code) + else: + print("Archive created, removing backup '" + backup_subdir + "'...") + safe_rmtree(backup_subdir, 1) + +### Step 6: finally, remove all repository backups other than the last +### NUM_BACKUPS. + +if num_backups > 0: + regexp = re.compile("^" + repo + "-[0-9]+(-[0-9]+)?" + ext_re + "$") + directory_list = os.listdir(backup_dir) + old_list = [x for x in directory_list if regexp.search(x)] + old_list.sort(comparator) + del old_list[max(0,len(old_list)-num_backups):] + for item in old_list: + old_backup_item = os.path.join(backup_dir, item) + print("Removing old backup: " + old_backup_item) + if os.path.isdir(old_backup_item): + safe_rmtree(old_backup_item, 1) + else: + os.remove(old_backup_item) diff --git a/tools/bdb/erase-all-text-data.py b/tools/bdb/erase-all-text-data.py new file mode 100755 index 0000000..288c4ab --- /dev/null +++ b/tools/bdb/erase-all-text-data.py @@ -0,0 +1,94 @@ +#!/usr/bin/env python +# +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +# +# +# Erases the text of every file in a BDB repository +# + +import sys, os +import skel, svnfs + +def main(): + if len(sys.argv) == 2: + dbhome = os.path.join(sys.argv[1], 'db') + for i in ('', 'uuids', 'revisions', 'transactions', 'representations', + 'strings', 'changes', 'copies', 'nodes'): + if not os.path.exists(os.path.join(dbhome, i)): + sys.stderr.write("%s: '%s' is not a valid bdb svn repository\n" % + (sys.argv[0], sys.argv[1])) + sys.exit(1) + else: + sys.stderr.write("Usage: %s <bdb-svn-repository>\n" % sys.argv[0]) + sys.exit(1) + + print("WARNING!: This program will destroy all text data in the subversion") + print("repository '%s'" % sys.argv[1]) + print("Do not proceed unless this is a *COPY* of your real repository") + print("If this is really what you want to do, " \ + "type 'YESERASE' and press Return") + if sys.version_info[0] >= 3: + # Python >=3.0 + confirmation = input("Confirmation string> ") + else: + # Python <3.0 + confirmation = raw_input("Confirmation string> ") + if confirmation != "YESERASE": + print("Cancelled - confirmation string not matched") + sys.exit(0) + print("Opening database environment...") + cur = None + ctx = svnfs.Ctx(dbhome) + try: + cur = ctx.nodes_db.cursor() + nodecount = 0 + newrep = skel.Rep() + newrep.str = "empty" + empty_fulltext_rep_skel = newrep.unparse() + del newrep + ctx.strings_db['empty'] = "" + rec = cur.first() + while rec: + if rec[0] != "next-key": + if (nodecount % 10000 == 0) and nodecount != 0: + print("Processed %d nodes..." % nodecount) + nodecount += 1 + node = skel.Node(rec[1]) + if node.kind == "file": + rep = skel.Rep(ctx.reps_db[node.datarep]) + if rep.kind == "fulltext": + if rep.str in ctx.strings_db: + del ctx.strings_db[rep.str] + ctx.reps_db[node.datarep] = empty_fulltext_rep_skel + else: + for w in rep.windows: + if w.str in ctx.strings_db: + del ctx.strings_db[w.str] + ctx.reps_db[node.datarep] = empty_fulltext_rep_skel + rec = cur.next() + print("Processed %d nodes" % nodecount) + finally: + if cur: + cur.close() + ctx.close() + print("Done") + +if __name__ == '__main__': + main() diff --git a/tools/bdb/skel.py b/tools/bdb/skel.py new file mode 100644 index 0000000..3e0a255 --- /dev/null +++ b/tools/bdb/skel.py @@ -0,0 +1,226 @@ +# +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +# +# Python parser for Subversion skels + +import string, re + +def parse(s): + if s[0] != '(' and s[-1] != ')': + raise ValueError("Improperly bounded skel: '%s'" % s) + wholeskel = s + s = s[1:-1].lstrip() + prev_accums = [] + accum = [] + while True: + if len(s) == 0: + return accum + if s[0] in string.digits: + split_tuple = s.split(' ',1) + count = int(split_tuple[0]) + if len(split_tuple) > 1: + s = split_tuple[1] + else: + s = "" + accum.append(s[:count]) + s = s[count:].lstrip() + continue + if s[0] in string.ascii_letters: + i = 0 + while (s[i] not in ' ()'): + i += 1 + if i == len(s): + break + accum.append(s[:i]) + s = s[i:].lstrip() + continue + if s[0] == '(': + new_accum = [] + accum.append(new_accum) + prev_accums.append(accum) + accum = new_accum + s = s[1:].lstrip() + continue + if s[0] == ')': + accum = prev_accums.pop() + s = s[1:].lstrip() + continue + if s[0] == ' ': + s = str.lstrip(' ') + continue + raise ValueError("Unexpected contents in skel: '%s'\n'%s'" % (s, wholeskel)) + + +_ok_implicit = re.compile(r'^[A-Za-z]([^\(\) \r\n\t\f]*)$') +def unparse(struc): + accum = [] + for ent in struc: + if isinstance(ent, str): + if len(ent) > 0 and _ok_implicit.match(ent[0]): + accum.append(ent) + else: + accum.append(str(len(ent))) + accum.append(ent) + else: + accum.append(unparse(ent)) + return "("+" ".join(accum)+")" + + +class Rev: + def __init__(self, skelstring="(revision null)"): + sk = parse(skelstring) + if len(sk) == 2 and sk[0] == "revision" and isinstance(sk[1], str): + self.txn = sk[1] + else: + raise ValueError("Invalid revision skel: %s" % skelstring) + + def unparse(self): + return unparse( ("revision", self.txn) ) + + +class Change: + def __init__(self, skelstring="(change null null null 0 0 )"): + sk = parse(skelstring) + if len(sk) == 6 and sk[0] == "change" and type(sk[1]) == type(sk[2]) \ + == type(sk[3]) == type(sk[4]) == type(sk[5]) == str: + self.path = sk[1] + self.node = sk[2] + self.kind = sk[3] + self.textmod = sk[4] + self.propmod = sk[5] + else: + raise ValueError("Invalid change skel: %s" % skelstring) + + def unparse(self): + return unparse( ("change", self.path, self.node, self.kind, + self.textmod and "1" or "", self.propmod and "1" or "") ) + + +class Copy: + def __init__(self, skelstring="(copy null null null)"): + sk = parse(skelstring) + if len(sk) == 4 and sk[0] in ("copy", "soft-copy") and type(sk[1]) \ + == type(sk[2]) == type(sk[3]) == str: + self.kind = sk[0] + self.srcpath = sk[1] + self.srctxn = sk[2] + self.destnode = sk[3] + else: + raise ValueError("Invalid copy skel: %s" % skelstring) + + def unparse(self): + return unparse( (self.kind, self.srcpath, self.srctxn, self.destnode) ) + + +class Node: + def __init__(self,skelstring="((file null null 1 0) null null)"): + sk = parse(skelstring) + if (len(sk) == 3 or (len(sk) == 4 and isinstance(sk[3], str))) \ + and isinstance(sk[0], list) and isinstance(sk[1], str) \ + and isinstance(sk[2], str) and sk[0][0] in ("file", "dir") \ + and type(sk[0][1]) == type(sk[0][2]) == type(sk[0][3]) == str: + self.kind = sk[0][0] + self.createpath = sk[0][1] + self.prednode = sk[0][2] + self.predcount = int(sk[0][3]) + self.proprep = sk[1] + self.datarep = sk[2] + if len(sk) > 3: + self.editrep = sk[3] + else: + self.editrep = None + else: + raise ValueError("Invalid node skel: %s" % skelstring) + + def unparse(self): + structure = [ (self.kind, self.createpath, self.prednode, + str(self.predcount)), self.proprep, self.datarep ] + if self.editrep: + structure.append(self.editrep) + return unparse( structure ) + + +class Txn: + def __init__(self,skelstring="(transaction null null () ())"): + sk = parse(skelstring) + if len(sk) == 5 and sk[0] in ("transaction", "committed", "dead") \ + and type(sk[1]) == type(sk[2]) == str \ + and type(sk[3]) == type(sk[4]) == list and len(sk[3]) % 2 == 0: + self.kind = sk[0] + self.rootnode = sk[1] + if self.kind == "committed": + self.rev = sk[2] + else: + self.basenode = sk[2] + self.proplist = sk[3] + self.copies = sk[4] + else: + raise ValueError("Invalid transaction skel: %s" % skelstring) + + def unparse(self): + if self.kind == "committed": + base_item = self.rev + else: + base_item = self.basenode + return unparse( (self.kind, self.rootnode, base_item, self.proplist, + self.copies) ) + + +class SvnDiffWindow: + def __init__(self, skelstructure): + self.offset = skelstructure[0] + self.svndiffver = skelstructure[1][0][1] + self.str = skelstructure[1][0][2] + self.size = skelstructure[1][1] + self.vs_rep = skelstructure[1][2] + + def _unparse_structure(self): + return ([ self.offset, [ [ 'svndiff', self.svndiffver, self.str ], + self.size, self.vs_rep ] ]) + + +class Rep: + def __init__(self, skelstring="((fulltext 0 (md5 16 \0\0\0\0\0\0\0\0" \ + "\0\0\0\0\0\0\0\0)) null)"): + sk = parse(skelstring) + if isinstance(sk[0], list) and len(sk[0]) == 3 \ + and isinstance(sk[0][1], str) \ + and isinstance(sk[0][2], list) and len(sk[0][2]) == 2 \ + and type(sk[0][2][0]) == type(sk[0][2][1]) == str: + self.kind = sk[0][0] + self.txn = sk[0][1] + self.cksumtype = sk[0][2][0] + self.cksum = sk[0][2][1] + if len(sk) == 2 and sk[0][0] == "fulltext": + self.str = sk[1] + elif len(sk) >= 2 and sk[0][0] == "delta": + self.windows = list(map(SvnDiffWindow, sk[1:])) + else: + raise ValueError("Invalid representation skel: %s" % repr(skelstring)) + + def unparse(self): + structure = [ [self.kind, self.txn, [self.cksumtype, self.cksum] ] ] + if self.kind == "fulltext": + structure.append(self.str) + elif self.kind == "delta": + for w in self.windows: + structure.append(w._unparse_structure()) + return unparse( structure ) + diff --git a/tools/bdb/svn-bdb-view.py b/tools/bdb/svn-bdb-view.py new file mode 100755 index 0000000..43dcc21 --- /dev/null +++ b/tools/bdb/svn-bdb-view.py @@ -0,0 +1,295 @@ +#!/usr/bin/env python +# +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +# +# +# This is a pretty-printer for subversion BDB repository databases. +# + +import sys, os, re, codecs, textwrap +import skel, svnfs + +# Parse arguments +if len(sys.argv) == 2: + dbhome = os.path.join(sys.argv[1], 'db') + if not os.path.exists(dbhome): + sys.stderr.write("%s: '%s' is not a valid svn repository\n" % + (sys.argv[0], dbhome)) + sys.exit(1) +else: + sys.stderr.write("Usage: %s <svn-repository>\n" % sys.argv[0]) + sys.exit(1) + +# Helper Classes +class RepositoryProblem(Exception): + pass + +# Helper Functions +def ok(bool, comment): + if not bool: + raise RepositoryProblem(text) + +# Helper Data +opmap = { + 'add': 'A', + 'modify': 'M', + 'delete': 'D', + 'replace': 'R', + 'reset': 'X', +} + +# Analysis Modules +def am_uuid(ctx): + "uuids" + db = ctx.uuids_db + ok(list(db.keys()) == [1], 'uuid Table Structure') + ok(re.match(r'^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$', + db[1]), 'UUID format') + print("Repos UUID: %s" % db[1]) + +def am_revisions(ctx): + "revisions" + cur = ctx.revs_db.cursor() + try: + rec = cur.first() + ctx.txn2rev = txn2rev = {} + prevrevnum = -1 + while rec: + rev = skel.Rev(rec[1]) + revnum = rec[0] - 1 + print("r%d: txn %s%s" % (revnum, rev.txn, + (rev.txn not in ctx.txns_db) and "*** MISSING TXN ***" or "")) + ok(rev.txn not in txn2rev, 'Multiple revs bound to same txn') + txn2rev[rev.txn] = revnum + rec = cur.next() + finally: + cur.close() + +def am_changes(ctx): + "changes" + cur = ctx.changes_db.cursor() + try: + current_txnid_len = 0 + maximum_txnid_len = 0 + while current_txnid_len <= maximum_txnid_len: + current_txnid_len += 1 + rec = cur.first() + prevtxn = None + while rec: + if len(rec[0]) != current_txnid_len: + rec = cur.next() + continue + ch = skel.Change(rec[1]) + lead = "txn %s:" % rec[0] + if prevtxn == rec[0]: + lead = " " * len(lead) + print("%s %s %s %s %s %s%s" % (lead, opmap[ch.kind], ch.path, ch.node, + ch.textmod and "T" or "-", ch.propmod and "P" or "-", + (ch.node not in ctx.nodes_db) \ + and "*** MISSING NODE ***" or "")) + prevtxn = rec[0] + if len(rec[0]) > maximum_txnid_len: + maximum_txnid_len = len(rec[0]) + rec = cur.next() + finally: + cur.close() + +def am_copies(ctx): + "copies" + cur = ctx.copies_db.cursor() + try: + print("next-key: %s" % ctx.copies_db['next-key']) + rec = cur.first() + while rec: + if rec[0] != 'next-key': + cp = skel.Copy(rec[1]) + destnode = ctx.nodes_db.get(cp.destnode) + if not destnode: + destpath = "*** MISSING NODE ***" + else: + destpath = skel.Node(destnode).createpath + print("cpy %s: %s %s @txn %s to %s (%s)" % (rec[0], + {'copy':'C','soft-copy':'S'}[cp.kind], cp.srcpath or "-", + cp.srctxn or "-", cp.destnode, destpath)) + rec = cur.next() + finally: + cur.close() + +def am_txns(ctx): + "transactions" + cur = ctx.txns_db.cursor() + try: + print("next-key: %s" % ctx.txns_db['next-key']) + length = 1 + found_some = True + while found_some: + found_some = False + rec = cur.first() + while rec: + if rec[0] != 'next-key' and len(rec[0]) == length: + found_some = True + txn = skel.Txn(rec[1]) + if txn.kind == "committed": + label = "r%s" % txn.rev + ok(ctx.txn2rev[rec[0]] == int(txn.rev), 'Txn->rev not <-txn') + else: + label = "%s based-on %s" % (txn.kind, txn.basenode) + print("txn %s: %s root-node %s props %d copies %s" % (rec[0], + label, txn.rootnode, len(txn.proplist) / 2, ",".join(txn.copies))) + rec = cur.next() + length += 1 + finally: + cur.close() + +def am_nodes(ctx): + "nodes" + cur = ctx.nodes_db.cursor() + try: + print("next-key: %s" % ctx.txns_db['next-key']) + rec = cur.first() + data = {} + while rec: + if rec[0] == 'next-key': + rec = cur.next() + continue + nd = skel.Node(rec[1]) + nid,cid,tid = rec[0].split(".") + data[tid.rjust(20)+nd.createpath] = (rec[0], nd) + rec = cur.next() + k = sorted(data.keys()) + reptype = {"fulltext":"F", "delta":"D"} + for i in k: + nd = data[i][1] + prkind = drkind = " " + if nd.proprep: + try: + rep = skel.Rep(ctx.reps_db[nd.proprep]) + prkind = reptype[rep.kind] + if nd.proprep in ctx.bad_reps: + prkind += " *** BAD ***" + except KeyError: + prkind = "*** MISSING ***" + if nd.datarep: + try: + rep = skel.Rep(ctx.reps_db[nd.datarep]) + drkind = reptype[rep.kind] + if nd.datarep in ctx.bad_reps: + drkind += " *** BAD ***" + except KeyError: + drkind = "*** MISSING ***" + stringdata = "%s: %s %s pred %s count %s prop %s %s data %s %s edit %s" \ + % ( data[i][0], {"file":"F", "dir":"D"}[nd.kind], nd.createpath, + nd.prednode or "-", nd.predcount, prkind, nd.proprep or "-", + drkind, nd.datarep or "-", nd.editrep or "-") + if nd.createpath == "/": + print("") + print(stringdata) + finally: + cur.close() + +def get_string(ctx, id): + try: + return ctx.get_whole_string(id) + except DbNotFoundError: + return "*** MISSING STRING ***" + +def am_reps(ctx): + "representations" + ctx.bad_reps = {} + cur = ctx.reps_db.cursor() + try: + print("next-key: %s" % ctx.txns_db['next-key']) + rec = cur.first() + while rec: + if rec[0] != 'next-key': + rep = skel.Rep(rec[1]) + lead = "rep %s: txn %s: %s %s " % (rec[0], rep.txn, rep.cksumtype, + codecs.getencoder('hex_codec')(rep.cksum)[0]) + if rep.kind == "fulltext": + note = "" + if rep.str not in ctx.strings_db: + note = " *MISS*" + ctx.bad_reps[rec[0]] = None + print(lead+("fulltext str %s%s" % (rep.str, note))) + if ctx.verbose: + print(textwrap.fill(get_string(ctx, rep.str), initial_indent=" ", + subsequent_indent=" ", width=78)) + elif rep.kind == "delta": + print(lead+("delta of %s window%s" % (len(rep.windows), + len(rep.windows) != 1 and "s" or ""))) + for window in rep.windows: + noterep = notestr = "" + if window.vs_rep not in ctx.reps_db: + noterep = " *MISS*" + ctx.bad_reps[rec[0]] = None + if window.str not in ctx.strings_db: + notestr = " *MISS*" + ctx.bad_reps[rec[0]] = None + print("\toff %s len %s vs-rep %s%s str %s%s" % (window.offset, + window.size, window.vs_rep, noterep, window.str, notestr)) + else: + print(lead+"*** UNKNOWN REPRESENTATION TYPE ***") + rec = cur.next() + finally: + cur.close() + + +def am_stringsize(ctx): + "string size" + if not ctx.verbose: + return + cur = ctx.strings_db.cursor() + try: + rec = cur.first() + size = 0 + while rec: + size = size + len(rec[1] or "") + rec = cur.next() + print("%s %s %s" % (size, size/1024.0, size/1024.0/1024.0)) + finally: + cur.close() + +modules = ( + am_uuid, + am_revisions, + am_changes, + am_copies, + am_txns, + am_reps, + am_nodes, + # Takes too long: am_stringsize, + ) + +def main(): + print("Repository View for '%s'" % dbhome) + print("") + ctx = svnfs.Ctx(dbhome, readonly=1) + # Stash process state in a library data structure. Yuck! + ctx.verbose = 0 + try: + for am in modules: + print("MODULE: %s" % am.__doc__) + am(ctx) + print("") + finally: + ctx.close() + +if __name__ == '__main__': + main() diff --git a/tools/bdb/svnfs.py b/tools/bdb/svnfs.py new file mode 100644 index 0000000..c67fa61 --- /dev/null +++ b/tools/bdb/svnfs.py @@ -0,0 +1,97 @@ +# +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +# +# A handle object for convenience in opening a svn repository + +import sys + +# We need a bsddb linked to the same version of Berkeley DB as Subversion is +try: + import bsddb3 as bsddb +except ImportError: + import bsddb + +# Publish the result +sys.modules['svnfs_bsddb'] = bsddb + +from svnfs_bsddb.db import * + +class Ctx: + def __init__(self, dbhome, readonly=None): + self.env = self.uuids_db = self.revs_db = self.txns_db = self.changes_db \ + = self.copies_db = self.nodes_db = self.reps_db = self.strings_db = \ + None + try: + self.env = DBEnv() + self.env.set_lk_detect(DB_LOCK_RANDOM) + self.env.set_get_returns_none(1) + self.env.open(dbhome, DB_CREATE | DB_INIT_MPOOL | DB_INIT_TXN \ + | DB_INIT_LOCK | DB_INIT_LOG) + def open_db(dbname): + db = DB(self.env) + dbflags = 0 + if readonly: + dbflags = DB_RDONLY + db.open(dbname, flags=dbflags) + return db + self.uuids_db = open_db('uuids') + self.revs_db = open_db('revisions') + self.txns_db = open_db('transactions') + self.changes_db = open_db('changes') + self.copies_db = open_db('copies') + self.nodes_db = open_db('nodes') + self.reps_db = open_db('representations') + self.strings_db = open_db('strings') + except: + self.close() + raise + + def close(self): + def close_if_not_None(i): + if i is not None: + i.close() + close_if_not_None(self.uuids_db ) + close_if_not_None(self.revs_db ) + close_if_not_None(self.txns_db ) + close_if_not_None(self.changes_db ) + close_if_not_None(self.copies_db ) + close_if_not_None(self.nodes_db ) + close_if_not_None(self.reps_db ) + close_if_not_None(self.strings_db ) + close_if_not_None(self.env ) + self.env = self.uuids_db = self.revs_db = self.txns_db = self.changes_db \ + = self.copies_db = self.nodes_db = self.reps_db = self.strings_db = \ + None + + # And now, some utility functions + def get_whole_string(self, key): + cur = self.strings_db.cursor() + try: + rec = cur.set(key) + if rec is None: + raise DBNotFoundError + str = "" + while rec: + str = str + (rec[1] or "") + rec = cur.next_dup() + finally: + cur.close() + return str + diff --git a/tools/bdb/whatis-rep.py b/tools/bdb/whatis-rep.py new file mode 100755 index 0000000..55ed4b7 --- /dev/null +++ b/tools/bdb/whatis-rep.py @@ -0,0 +1,76 @@ +#!/usr/bin/env python +# +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +# +# +# Print a description (including data, path, and revision) of the +# specified node reps in a Subversion filesystem. Walks as much of +# the reps table as necessary to locate the data (e.g. does a table +# scan). + +# Standard modules +import sys, os, re, codecs + +# Local support modules +import skel, svnfs + +def main(): + progname = os.path.basename(sys.argv[0]) + if len(sys.argv) >= 3: + dbhome = os.path.join(sys.argv[1], 'db') + if not os.path.exists(dbhome): + sys.stderr.write("%s: '%s' is not a valid svn repository\n" % + (sys.argv[0], dbhome)) + sys.stderr.flush() + sys.exit(1) + rep_ids = sys.argv[2:] + else: + sys.stderr.write("Usage: %s <svn-repository> <rep-id>...\n" % progname) + sys.stderr.flush() + sys.exit(1) + + print("%s running on repository '%s'" % (progname, dbhome)) + print("") + rep_ids = dict.fromkeys(rep_ids) + ctx = svnfs.Ctx(dbhome) + try: + cur = ctx.nodes_db.cursor() + try: + rec = cur.first() + while rec: + if rec[0] != 'next-key': + nid, cid, tid = rec[0].split(".") + nd = skel.Node(rec[1]) + if nd.datarep in rep_ids: + rev = skel.Txn(ctx.txns_db[tid]).rev + print("%s: data of '%s%s' in r%s" % (nd.datarep, + nd.createpath, {"dir":'/', "file":''}[nd.kind], rev)) + if nd.proprep in rep_ids: + rev = skel.Txn(ctx.txns_db[tid]).rev + print("%s: properties of '%s%s' in r%s" % (nd.datarep, + nd.createpath, {"dir":'/', "file":''}[nd.kind], rev)) + rec = cur.next() + finally: + cur.close() + finally: + ctx.close() + +if __name__ == '__main__': + main() diff --git a/tools/buildbot/master/Feeder.py b/tools/buildbot/master/Feeder.py new file mode 100644 index 0000000..59e79b9 --- /dev/null +++ b/tools/buildbot/master/Feeder.py @@ -0,0 +1,391 @@ +# +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +# +# This file is part of the Buildbot configuration for the Subversion project. +# The original file was created by Lieven Govaerts +# +# Minor changes made by API (apinheiro@igalia.com) in order to fit with our +# configuration and last buildbot changes +# +# Minor whitespace clean up, clean up imports, adapted to buildbot 0.7.7, +# and finally attempt to create valid atom and RSS feeds. +# Changes by Chandan-Dutta Chowdhury <chandan-dutta chowdhury @ hp com> and +# Gareth Armstrong <gareth armstrong @ hp com> +# Also integrate changes from +# http://code.google.com/p/pybots/source/browse/trunk/master/Feeder.py +# which adds ability to filter RSS feeds to specific builders. +# e.g. http://localhost:8012/rss?builder=builder-log4c-rhel-4-i386 + +import time +import os +import re +import sys + +from twisted.web.resource import Resource + +from buildbot.status.web import baseweb +from buildbot.status.builder import FAILURE, SUCCESS, WARNINGS + +class XmlResource(Resource): + contentType = "text/xml; charset=UTF-8" + def render(self, request): + data = self.content(request) + request.setHeader("content-type", self.contentType) + if request.method == "HEAD": + request.setHeader("content-length", len(data)) + return '' + return data + docType = '' + def header (self, request): + data = ('<?xml version="1.0"?>\n') + return data + def footer(self, request): + data = '' + return data + def content(self, request): + data = self.docType + data += self.header(request) + data += self.body(request) + data += self.footer(request) + return data + def body(self, request): + return '' + +class FeedResource(XmlResource): + title = 'Dummy' + link = 'http://dummylink' + language = 'en-us' + description = 'Dummy rss' + status = None + + def __init__(self, status, categories=None): + self.status = status + self.categories = categories + self.link = self.status.getBuildbotURL() + self.title = 'Build status of ' + status.getProjectName() + self.description = 'List of FAILED builds' + self.pubdate = time.gmtime(int(time.time())) + + def getBuilds(self, request): + builds = [] + # THIS is lifted straight from the WaterfallStatusResource Class in + # status/web/waterfall.py + # + # we start with all Builders available to this Waterfall: this is + # limited by the config-file -time categories= argument, and defaults + # to all defined Builders. + allBuilderNames = self.status.getBuilderNames(categories=self.categories) + builders = [self.status.getBuilder(name) for name in allBuilderNames] + + # but if the URL has one or more builder= arguments (or the old show= + # argument, which is still accepted for backwards compatibility), we + # use that set of builders instead. We still don't show anything + # outside the config-file time set limited by categories=. + showBuilders = request.args.get("show", []) + showBuilders.extend(request.args.get("builder", [])) + if showBuilders: + builders = [b for b in builders if b.name in showBuilders] + + # now, if the URL has one or category= arguments, use them as a + # filter: only show those builders which belong to one of the given + # categories. + showCategories = request.args.get("category", []) + if showCategories: + builders = [b for b in builders if b.category in showCategories] + + maxFeeds = 25 + + # Copy all failed builds in a new list. + # This could clearly be implemented much better if we had + # access to a global list of builds. + for b in builders: + lastbuild = b.getLastFinishedBuild() + if lastbuild is None: + continue + + lastnr = lastbuild.getNumber() + + totalbuilds = 0 + i = lastnr + while i >= 0: + build = b.getBuild(i) + i -= 1 + if not build: + continue + + results = build.getResults() + + # only add entries for failed builds! + if results == FAILURE: + totalbuilds += 1 + builds.append(build) + + # stop for this builder when our total nr. of feeds is reached + if totalbuilds >= maxFeeds: + break + + # Sort build list by date, youngest first. + if sys.version_info[:3] >= (2,4,0): + builds.sort(key=lambda build: build.getTimes(), reverse=True) + else: + # If you need compatibility with python < 2.4, use this for + # sorting instead: + # We apply Decorate-Sort-Undecorate + deco = [(build.getTimes(), build) for build in builds] + deco.sort() + deco.reverse() + builds = [build for (b1, build) in deco] + + if builds: + builds = builds[:min(len(builds), maxFeeds)] + return builds + + def body (self, request): + data = '' + builds = self.getBuilds(request) + + for build in builds: + start, finished = build.getTimes() + finishedTime = time.gmtime(int(finished)) + projectName = self.status.getProjectName() + link = re.sub(r'index.html', "", self.status.getURLForThing(build)) + + # title: trunk r862265 (plus patch) failed on 'i686-debian-sarge1 shared gcc-3.3.5' + ss = build.getSourceStamp() + source = "" + if ss.branch: + source += "Branch %s " % ss.branch + if ss.revision: + source += "Revision %s " % str(ss.revision) + if ss.patch: + source += " (plus patch)" + if ss.changes: + pass + if (ss.branch is None and ss.revision is None and ss.patch is None + and not ss.changes): + source += "Latest revision " + got_revision = None + try: + got_revision = build.getProperty("got_revision") + except KeyError: + pass + if got_revision: + got_revision = str(got_revision) + if len(got_revision) > 40: + got_revision = "[revision string too long]" + source += "(Got Revision: %s)" % got_revision + title = ('%s failed on "%s"' % + (source, build.getBuilder().getName())) + + # get name of the failed step and the last 30 lines of its log. + if build.getLogs(): + log = build.getLogs()[-1] + laststep = log.getStep().getName() + try: + lastlog = log.getText() + except IOError: + # Probably the log file has been removed + lastlog='<b>log file not available</b>' + + lines = re.split('\n', lastlog) + lastlog = '' + for logline in lines[max(0, len(lines)-30):]: + lastlog = lastlog + logline + '<br/>' + lastlog = lastlog.replace('\n', '<br/>') + + description = '' + description += ('Date: %s<br/><br/>' % + time.strftime("%a, %d %b %Y %H:%M:%S GMT", + finishedTime)) + description += ('Full details available here: <a href="%s">%s</a><br/>' % (self.link, projectName)) + builder_summary_link = ('%s/builders/%s' % + (re.sub(r'/index.html', '', self.link), + build.getBuilder().getName())) + description += ('Build summary: <a href="%s">%s</a><br/><br/>' % + (builder_summary_link, + build.getBuilder().getName())) + description += ('Build details: <a href="%s">%s</a><br/><br/>' % + (link, self.link + link[1:])) + description += ('Author list: <b>%s</b><br/><br/>' % + ",".join(build.getResponsibleUsers())) + description += ('Failed step: <b>%s</b><br/><br/>' % laststep) + description += 'Last lines of the build log:<br/>' + + data += self.item(title, description=description, lastlog=lastlog, + link=link, pubDate=finishedTime) + + return data + + def item(self, title='', link='', description='', pubDate=''): + """Generates xml for one item in the feed.""" + +class Rss20StatusResource(FeedResource): + def __init__(self, status, categories=None): + FeedResource.__init__(self, status, categories) + contentType = 'application/rss+xml' + + def header(self, request): + data = FeedResource.header(self, request) + data += ('<rss version="2.0" xmlns:atom="http://www.w3.org/2005/Atom">\n') + data += (' <channel>\n') + if self.title is not None: + data += (' <title>%s</title>\n' % self.title) + if self.link is not None: + data += (' <link>%s</link>\n' % self.link) + link = re.sub(r'/index.html', '', self.link) + data += (' <atom:link href="%s/rss" rel="self" type="application/rss+xml"/>\n' % link) + if self.language is not None: + data += (' <language>%s</language>\n' % self.language) + if self.description is not None: + data += (' <description>%s</description>\n' % self.description) + if self.pubdate is not None: + rfc822_pubdate = time.strftime("%a, %d %b %Y %H:%M:%S GMT", + self.pubdate) + data += (' <pubDate>%s</pubDate>\n' % rfc822_pubdate) + return data + + def item(self, title='', link='', description='', lastlog='', pubDate=''): + data = (' <item>\n') + data += (' <title>%s</title>\n' % title) + if link is not None: + data += (' <link>%s</link>\n' % link) + if (description is not None and lastlog is not None): + lastlog = re.sub(r'<br/>', "\n", lastlog) + lastlog = re.sub(r'&', "&", lastlog) + lastlog = re.sub(r"'", "'", lastlog) + lastlog = re.sub(r'"', """, lastlog) + lastlog = re.sub(r'<', '<', lastlog) + lastlog = re.sub(r'>', '>', lastlog) + lastlog = lastlog.replace('\n', '<br/>') + content = '<![CDATA[' + content += description + content += lastlog + content += ']]>' + data += (' <description>%s</description>\n' % content) + if pubDate is not None: + rfc822pubDate = time.strftime("%a, %d %b %Y %H:%M:%S GMT", + pubDate) + data += (' <pubDate>%s</pubDate>\n' % rfc822pubDate) + # Every RSS item must have a globally unique ID + guid = ('tag:%s@%s,%s:%s' % (os.environ['USER'], + os.environ['HOSTNAME'], + time.strftime("%Y-%m-%d", pubDate), + time.strftime("%Y%m%d%H%M%S", + pubDate))) + data += (' <guid isPermaLink="false">%s</guid>\n' % guid) + data += (' </item>\n') + return data + + def footer(self, request): + data = (' </channel>\n' + '</rss>') + return data + +class Atom10StatusResource(FeedResource): + def __init__(self, status, categories=None): + FeedResource.__init__(self, status, categories) + contentType = 'application/atom+xml' + + def header(self, request): + data = FeedResource.header(self, request) + data += '<feed xmlns="http://www.w3.org/2005/Atom">\n' + data += (' <id>%s</id>\n' % self.status.getBuildbotURL()) + if self.title is not None: + data += (' <title>%s</title>\n' % self.title) + if self.link is not None: + link = re.sub(r'/index.html', '', self.link) + data += (' <link rel="self" href="%s/atom"/>\n' % link) + data += (' <link rel="alternate" href="%s/"/>\n' % link) + if self.description is not None: + data += (' <subtitle>%s</subtitle>\n' % self.description) + if self.pubdate is not None: + rfc3339_pubdate = time.strftime("%Y-%m-%dT%H:%M:%SZ", + self.pubdate) + data += (' <updated>%s</updated>\n' % rfc3339_pubdate) + data += (' <author>\n') + data += (' <name>Build Bot</name>\n') + data += (' </author>\n') + return data + + def item(self, title='', link='', description='', lastlog='', pubDate=''): + data = (' <entry>\n') + data += (' <title>%s</title>\n' % title) + if link is not None: + data += (' <link href="%s"/>\n' % link) + if (description is not None and lastlog is not None): + lastlog = re.sub(r'<br/>', "\n", lastlog) + lastlog = re.sub(r'&', "&", lastlog) + lastlog = re.sub(r"'", "'", lastlog) + lastlog = re.sub(r'"', """, lastlog) + lastlog = re.sub(r'<', '<', lastlog) + lastlog = re.sub(r'>', '>', lastlog) + data += (' <content type="xhtml">\n') + data += (' <div xmlns="http://www.w3.org/1999/xhtml">\n') + data += (' %s\n' % description) + data += (' <pre xml:space="preserve">%s</pre>\n' % lastlog) + data += (' </div>\n') + data += (' </content>\n') + if pubDate is not None: + rfc3339pubDate = time.strftime("%Y-%m-%dT%H:%M:%SZ", + pubDate) + data += (' <updated>%s</updated>\n' % rfc3339pubDate) + # Every Atom entry must have a globally unique ID + # http://diveintomark.org/archives/2004/05/28/howto-atom-id + guid = ('tag:%s@%s,%s:%s' % (os.environ['USER'], + os.environ['HOSTNAME'], + time.strftime("%Y-%m-%d", pubDate), + time.strftime("%Y%m%d%H%M%S", + pubDate))) + data += (' <id>%s</id>\n' % guid) + data += (' <author>\n') + data += (' <name>Build Bot</name>\n') + data += (' </author>\n') + data += (' </entry>\n') + return data + + def footer(self, request): + data = ('</feed>') + return data + +class WebStatusWithFeeds(baseweb.WebStatus): + """Override the standard WebStatus class to add RSS and Atom feeds. + + This adds the following web resources in addition to /waterfall: + /rss + /atom + + The same "branch" and "category" query arguments can be passed + as with /waterfall + e.g. http://mybot.buildbot.com:8012/rss?branch=&builder=builder-log4c-rhel-4-i386 + or + http://mybot.buildbot.com:8012/rss?branch=&category=log4c + """ + + def setupSite(self): + baseweb.WebStatus.setupSite(self) + + status = self.parent.getStatus() + sr = self.site.resource + + rss = Rss20StatusResource(status, categories=None) + sr.putChild("rss", rss) + atom = Atom10StatusResource(status, categories=None) + sr.putChild("atom", atom) + diff --git a/tools/buildbot/master/SVNMailNotifier.py b/tools/buildbot/master/SVNMailNotifier.py new file mode 100644 index 0000000..1dfe839 --- /dev/null +++ b/tools/buildbot/master/SVNMailNotifier.py @@ -0,0 +1,210 @@ +# +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +# +import os +import urllib +import re + +from email.Message import Message +from email.Utils import formatdate +from email.MIMEText import MIMEText + +from twisted.internet import defer +from twisted.application import service + +from buildbot.status.builder import FAILURE, SUCCESS, WARNINGS +from buildbot.status.mail import MailNotifier + +class SVNMailNotifier(MailNotifier): + """Implement custom status mails for the Subversion project""" + + def __init__(self, fromaddr, mode="all", categories=None, builders=None, + addLogs=False, relayhost="localhost", + subject="buildbot %(result)s in %(builder)s", + lookup=None, extraRecipients=[], + sendToInterestedUsers=True, + body="", + replytoaddr=""): + """ + @type body: string + @param body: a string to be used as the body of the message. + + @type replytoaddr: string + @param replytoaddr: the email address to be used in the 'Reply-To' header. + """ + + self.body = body + self.replytoaddr = replytoaddr + + # pass the rest of the parameters to our parent. + MailNotifier.__init__(self, fromaddr, mode, categories, builders, + addLogs, relayhost, subject, lookup, extraRecipients, + sendToInterestedUsers) + + def buildMessage(self, name, build, results): + if self.mode == "all": + intro = "The Buildbot has finished a build of %s.\n" % name + elif self.mode == "failing": + intro = "The Buildbot has detected a failed build of %s.\n" % name + else: + intro = "The Buildbot has detected a new failure of %s.\n" % name + + # buildurl + buildurl = self.status.getURLForThing(build) +# lgo: url's are already quoted now. +# if buildurl: +# buildurl = urllib.quote(buildurl, '/:') + + # buildboturl + buildboturl = self.status.getBuildbotURL() +# if url: +# buildboturl = urllib.quote(url, '/:') + + # reason of build + buildreason = build.getReason() + + # source stamp + patch = None + ss = build.getSourceStamp() + if ss is None: + source = "unavailable" + else: + if build.getChanges(): + revision = max([int(c.revision) for c in build.getChanges()]) + + source = "" + if ss.branch is None: + ss.branch = "trunk" + source += "[branch %s] " % ss.branch + if revision: + source += str(revision) + else: + source += "HEAD" + if ss.patch is not None: + source += " (plus patch)" + + # actual buildslave + buildslave = build.getSlavename() + + # TODO: maybe display changes here? or in an attachment? + + # status + t = build.getText() + if t: + t = ": " + " ".join(t) + else: + t = "" + + if results == SUCCESS: + status = "Build succeeded!\n" + res = "PASS" + elif results == WARNINGS: + status = "Build Had Warnings%s\n" % t + res = "WARN" + else: + status = "BUILD FAILED%s\n" % t + res = "FAIL" + + if build.getLogs(): + log = build.getLogs()[-1] + laststep = log.getStep().getName() + lastlog = log.getText() + + # only give me the last lines of the log files. + lines = re.split('\n', lastlog) + lastlog = '' + for logline in lines[max(0, len(lines)-100):]: + lastlog = lastlog + logline + + # TODO: it would be nice to provide a URL for the specific build + # here. That involves some coordination with html.Waterfall . + # Ideally we could do: + # helper = self.parent.getServiceNamed("html") + # if helper: + # url = helper.getURLForBuild(build) + + text = self.body % { 'result': res, + 'builder': name, + 'revision': revision, + 'branch': ss.branch, + 'blamelist': ",".join(build.getResponsibleUsers()), + 'buildurl': buildurl, + 'buildboturl': buildboturl, + 'reason': buildreason, + 'source': source, + 'intro': intro, + 'status': status, + 'slave': buildslave, + 'laststep': laststep, + 'lastlog': lastlog, + } + + haveAttachments = False + if ss.patch or self.addLogs: + haveAttachments = True + if not canDoAttachments: + log.msg("warning: I want to send mail with attachments, " + "but this python is too old to have " + "email.MIMEMultipart . Please upgrade to python-2.3 " + "or newer to enable addLogs=True") + + if haveAttachments and canDoAttachments: + m = MIMEMultipart() + m.attach(MIMEText(text)) + else: + m = Message() + m.set_payload(text) + + m['Date'] = formatdate(localtime=True) + m['Subject'] = self.subject % { 'result': res, + 'builder': name, + 'revision': revision, + 'branch': ss.branch + } + m['From'] = self.fromaddr + # m['To'] is added later + m['Reply-To'] = self.replytoaddr + + if ss.patch: + a = MIMEText(patch) + a.add_header('Content-Disposition', "attachment", + filename="source patch") + m.attach(a) + if self.addLogs: + for log in build.getLogs(): + name = "%s.%s" % (log.getStep().getName(), + log.getName()) + a = MIMEText(log.getText()) + a.add_header('Content-Disposition', "attachment", + filename=name) + m.attach(a) + + # now, who is this message going to? + dl = [] + recipients = self.extraRecipients[:] + if self.sendToInterestedUsers and self.lookup: + for u in build.getInterestedUsers(): + d = defer.maybeDeferred(self.lookup.getAddress, u) + d.addCallback(recipients.append) + dl.append(d) + d = defer.DeferredList(dl) + d.addCallback(self._gotRecipients, recipients, m) + return d + diff --git a/tools/buildbot/master/master.cfg b/tools/buildbot/master/master.cfg new file mode 100644 index 0000000..96b0037 --- /dev/null +++ b/tools/buildbot/master/master.cfg @@ -0,0 +1,258 @@ +# -*- python -*- +# +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +# + +import os, os.path, re + +from buildbot.scheduler import Scheduler +from buildbot.process import factory +from buildbot.steps import source, shell +from buildbot.status.html import WebStatus +from buildbot.scheduler import AnyBranchScheduler +#from buildbot.twcompat import implements +from buildbot.scheduler import Try_Userpass +from buildbot.scheduler import Nightly +from buildbot.changes.svnpoller import SVNPoller, split_file_branches +from buildbot.buildslave import BuildSlave + +#import TigrisMailSource +import SVNMailNotifier +from Feeder import WebStatusWithFeeds +import private + +REPO="http://svn.apache.org/repos/asf/subversion/" + +s = factory.s + +# This is the dictionary that the buildmaster pays attention to. We also use +# a shorter alias to save typing. +c = BuildmasterConfig = {} + +# slaves +c['slaves'] = [BuildSlave("fc1-gcc3.3.2-ia32", private.slavePwd), + BuildSlave("osx10.4-gcc4.0.1-ia32", private.slavePwd), + BuildSlave("xp-vs2003-ia32", private.slavePwd), + BuildSlave("dlr-fc3", private.slavePwd), + BuildSlave("eh-debsarge1", private.slavePwd), + BuildSlave("x64-ubuntu", private.hwrightPwd), + BuildSlave("x64-centos", private.wandPwd), +] + +# sources +c['change_source'] = SVNPoller(REPO, + split_file=split_file_branches, + svnbin=private.svnbin, + pollinterval=300) + +excludes = ["COMMITTERS", "STATUS", "CHANGES", "README", "INSTALL", "COPYING", "HACKING", "TRANSLATING", "BUGS", "www", "notes", "packages", "subversion/LICENSE", "subversion/po", "doc", "contrib", "tools", "dist.sh"] + +# function checks if this revision is interesting enough to trigger the builds. +def isImportant(change): + if not excludes: + return True + + for file in change.files: + triggerBuild = True + for pattern in excludes: + match = re.match(pattern, file) + if match: + triggerBuild = False + break + if triggerBuild: + return True + +# schedulers +bs1 = AnyBranchScheduler("main", + [None, "branches/1.3.x", "branches/1.4.x", "branches/1.5.x", + "branches/1.6.x"], + 5*60, ["x86-macosx-gnu shared", + "debian-x86_64-32 shared gcc", + "x64-ubuntu gcc", + "x64-centos gcc", + ], + fileIsImportant=isImportant) + +ps1 = Nightly('daily-2pm-cet', ['x86-macosx-gnu shared daily ra_serf'], hour=14, minute=0) + +ts = Try_Userpass("try", ["x86-macosx-gnu shared", "debian-x86_64-32 shared gcc"], + port=private.tryPort, userpass=[(private.tryUser,private.tryPwd)] ) +c['schedulers'] = [bs1, ps1, ts] + +# steps and builders + +# define default set of steps, all under masters control. +defSteps = [shell.ShellCommand(name="Cleanup", command=["../svnclean.sh"], timeout=3600), + source.SVN(baseURL=REPO,defaultBranch='trunk', timeout=3600), + shell.ShellCommand(name="Build", command=["../svnbuild.sh"], logfiles={"configlog": "config.log"}, timeout=3600, haltOnFailure=True), + shell.ShellCommand(name="Test fsfs+ra_neon", command=["../svncheck.sh", "fsfs", "ra_neon"], logfiles={"testlog": "tests.log"}, timeout=3600, flunkOnFailure=True), + ] + +defFact = factory.BuildFactory(defSteps) + +# define Windows custom steps +winSteps = [source.SVN(baseURL=REPO,defaultBranch='trunk', timeout=3600), + shell.ShellCommand(name="Build", command=["..\svnbuild.bat"], timeout=3600, haltOnFailure=True), + shell.ShellCommand(name="Test fsfs+ra_local", command=["..\svncheck.bat","fsfs","ra_local"], timeout=3600, flunkOnFailure=True), + shell.ShellCommand(name="Test fsfs+ra_dav", command=["..\svncheck.bat","fsfs","ra_dav"], timeout=3600, flunkOnFailure=True), + shell.ShellCommand(name="Test fsfs+ra_svn", command=["..\svncheck.bat","fsfs","ra_svn"], timeout=3600, flunkOnFailure=True), + shell.ShellCommand(name="Cleanup", command=["..\svnclean.bat"], timeout=3600), + ] +winFact = factory.BuildFactory(winSteps) + +# define Windows 6 way steps +win6wSteps = [source.SVN(baseURL=REPO,defaultBranch='trunk', timeout=3600), + shell.ShellCommand(name="Cleanup", command=["..\svnclean.bat"], timeout=3600), + shell.ShellCommand(name="Build", command=["..\svnbuild.bat", "%(branch)"], timeout=3600, haltOnFailure=True), + shell.ShellCommand(name="Test fsfs+ra_local", command=["..\svncheck.bat","fsfs","ra_local"], logfiles={"testlog": "tests.log"}, timeout=3600, flunkOnFailure=True), + ] +win6wFact = factory.BuildFactory(win6wSteps) + +# define set of steps for eh-x84_64-32, clean step comes first. +ehSteps = [shell.ShellCommand(name="Cleanup", command=["../svnclean.sh"], workdir='', timeout=3600), + source.SVN(baseURL=REPO,defaultBranch='trunk', timeout=3600), + shell.ShellCommand(name="Build", command=["../svnbuild.sh"], logfiles={"configlog": "config.log"}, timeout=3600, haltOnFailure=True), + shell.ShellCommand(name="Test fsfs+ra_svn", command=["../svncheck.sh","fsfs","ra_svn"], logfiles={"testlog": "tests.log"}, timeout=3600, flunkOnFailure=True), + ] +ehFact = factory.BuildFactory(ehSteps) + +# nightly build ra_serf +serfSteps = [shell.ShellCommand(name="Cleanup", command=["../svnclean.sh"], timeout=3600), + source.SVN(baseURL=REPO,defaultBranch='trunk', timeout=3600), + shell.ShellCommand(name="Build", command=["../svnbuild.sh"], logfiles={"configlog": "config.log"}, timeout=3600, haltOnFailure=True), + shell.ShellCommand(name="Test fsfs+ra_serf", command=["../svncheck.sh", "fsfs", "ra_serf"], logfiles={"testlog": "tests.log"}, timeout=3600, flunkOnFailure=True), + ] +serfFact = factory.BuildFactory(serfSteps) + +# define set of steps for x64-ubuntu, clean step comes first. +x64ubSteps = [shell.ShellCommand(name="Cleanup", command=["../svnclean.sh"], workdir='', timeout=3600), + source.SVN(baseURL=REPO,defaultBranch='trunk', timeout=3600), + shell.ShellCommand(name="Build", command=["../svnbuild.sh"], logfiles={"configlog": "config.log"}, timeout=3600, haltOnFailure=True), + shell.ShellCommand(name="Test fsfs+ra_local", command=["../svncheck.sh","fsfs","ra_local"], logfiles={"testlog": "tests.log"}, timeout=3600, flunkOnFailure=False), + shell.ShellCommand(name="Test bindings", command=["../svncheck-bindings.sh","fsfs","ra_local"], logfiles={"testlog": "tests.log"}, timeout=3600, flunkOnFailure=True), + ] +x64ubFact = factory.BuildFactory(x64ubSteps) + +x64coSteps = [shell.ShellCommand(name="Cleanup", command=["../svnclean.sh"], timeout=3600), + source.SVN(baseURL=REPO,defaultBranch='trunk', timeout=3600), + shell.ShellCommand(name="Build", command=["../svnbuild.sh"], logfiles={"configlog": "config.log"}, timeout=3600, haltOnFailure=True), + shell.ShellCommand(name="Test fsfs+ra_local", command=["../svncheck.sh", "fsfs", "ra_neon"], logfiles={"testlog": "tests.log"}, timeout=3600, flunkOnFailure=True), + shell.ShellCommand(name="Test bindings", command=["../svncheck-bindings.sh","fsfs","ra_neon"], logfiles={"testlog": "tests.log"}, timeout=3600, flunkOnFailure=True), + ] +x64coFact = factory.BuildFactory(x64coSteps) + + +c['builders'] = [ + {'name': "x86-macosx-gnu shared", + 'slavename': "osx10.4-gcc4.0.1-ia32", + 'builddir': "osx10.4-gcc4.0.1-ia32", + 'factory': defFact, + 'category': "prod", + }, + {'name': "debian-x86_64-32 shared gcc", + 'slavename': "eh-debsarge1", + 'builddir': "eh-debsarge1", + 'factory': ehFact, + 'category': "prod", + }, + {'name': "x86-macosx-gnu shared daily ra_serf", + 'slavename': "osx10.4-gcc4.0.1-ia32", + 'builddir': "osx10.4-gcc4.0.1-ia32-serf", + 'factory': serfFact, + 'category': "prod", + }, + {'name': "x64-ubuntu gcc", + 'slavename': "x64-ubuntu", + 'builddir': "x64-ubuntu", + 'factory': x64ubFact, + 'category': "prod", + }, + {'name': "x64-centos gcc", + 'slavename': "x64-centos", + 'builddir': "x64-centos", + 'factory': x64coFact, + 'category': "prod", + }, +] + +# 'slavePortnum' defines the TCP port to listen on. This must match the value +# configured into the buildslaves (with their --master option) + +c['slavePortnum'] = private.slavePortnum + +# show webpage +c['status'] = [] +c['status'].append(WebStatusWithFeeds(http_port="tcp:"+str(private.htmlPort)+":interface=127.0.0.1", allowForce=True)) + +# send emails +from buildbot.status import mail +mailbody = 'Full details are available at: \n%(buildurl)s\n\n'\ + 'Author list: %(blamelist)s\n\n'\ + 'Build Slave: %(slave)s\n\n\n'\ + 'Subversion Buildbot\n'\ + '%(buildboturl)s\n\n\n'\ + 'Last 100 lines of the build log (step: %(laststep)s ):\n\n %(lastlog)s' + + +c['status'].append(SVNMailNotifier.SVNMailNotifier( + fromaddr="buildbot@mobsol.be", + extraRecipients=["notifications@subversion.apache.org"], + sendToInterestedUsers=False, + subject="svn %(branch)s r%(revision)s: %(result)s (%(builder)s)", + body=mailbody, + replytoaddr="dev@subversion.apache.org", + categories=["prod"], + relayhost=private.smtp)) + +# from buildbot.status import words +# c['status'].append(words.IRC(host="irc.example.com", nick="bb", +# channels=["#example"])) + + +# if you set 'debugPassword', then you can connect to the buildmaster with +# the diagnostic tool in contrib/debugclient.py . From this tool, you can +# manually force builds and inject changes, which may be useful for testing +# your buildmaster without actually commiting changes to your repository (or +# before you have a functioning 'sources' set up). The debug tool uses the +# same port number as the slaves do: 'slavePortnum'. + +#c['debugPassword'] = "debugpassword" + +# if you set 'manhole', you can telnet into the buildmaster and get an +# interactive python shell, which may be useful for debugging buildbot +# internals. It is probably only useful for buildbot developers. +# from buildbot.master import Manhole +#c['manhole'] = Manhole(9999, "admin", "password") + +# the 'projectName' string will be used to describe the project that this +# buildbot is working on. For example, it is used as the title of the +# waterfall HTML page. The 'projectURL' string will be used to provide a link +# from buildbot HTML pages to your project's home page. + +c['projectName'] = "Subversion" +c['projectURL'] = "http://subversion.apache.org/" + +# the 'buildbotURL' string should point to the location where the buildbot's +# internal web server (usually the html.Waterfall page) is visible. This +# typically uses the port number set in the Waterfall 'status' entry, but +# with an externally-visible host name which the buildbot cannot figure out +# without some help. + +c['buildbotURL'] = "http://crest.ics.uci.edu/buildbot/" diff --git a/tools/buildbot/master/private-sample.py b/tools/buildbot/master/private-sample.py new file mode 100644 index 0000000..2d2cd8c --- /dev/null +++ b/tools/buildbot/master/private-sample.py @@ -0,0 +1,32 @@ +# +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +# +# -*- python -*- + +svnbin="svn" +smtp="smtp.example.com" +htmlPort = 8010 +slavePortnum = 9989 +tryPort = 8031 +tryUser = "xxxx" +tryPwd = "xxxx" +slavePwd = "xxxx" +hwrightPwd = "xxxx" +aholst_slavePwd = "xxxx" diff --git a/tools/buildbot/master/public_html/buildbot.css b/tools/buildbot/master/public_html/buildbot.css new file mode 100644 index 0000000..edf51f9 --- /dev/null +++ b/tools/buildbot/master/public_html/buildbot.css @@ -0,0 +1,68 @@ +/* Copied from buildbot.ethereal.com. Thanks! */ + +* { + font-family: verdana, arial, helvetica, sans-serif; + font-size: 12px; + font-weight: bold; +} + +a:link,a:visited,a:active { + color: #333; +} +a:hover { + color: #999; +} + +.table { + border-spacing: 2px; +} + +td.Project { + color: #000; + border: 1px solid #666666; + background-color: #fff; +} + +td.Event, td.Activity, td.Time, td.Builder { +/* color: #333333; */ + border: 1px solid #666666; + background-color: #eee; + font-weight: normal; +} + +td.Change { + color: #fff; + border: 1px solid #666666; + background-color: #aaf; +} + +/* LastBuild, BuildStep states */ +.success { + color: #FFFFFF; + border: 1px solid #666666; + background-color: #3b0; +} + +.failure { + color: #FFFFFF; + border: 1px solid #666666; + background-color: #d33; +} + +.warnings { + color: #FFFFFF; + border: 1px solid #666666; + background-color: #fa2; +} + +.exception, td.offline { + color: #FFFFFF; + border: 1px solid #666666; + background-color: #e0b0ff; +} + +.start,.running, td.building { + color: #555; + border: 1px solid #666666; + background-color: #fffc6c; +} diff --git a/tools/buildbot/master/public_html/index.html b/tools/buildbot/master/public_html/index.html new file mode 100644 index 0000000..c2b419f --- /dev/null +++ b/tools/buildbot/master/public_html/index.html @@ -0,0 +1,53 @@ +<!-- + + Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +--> + +<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd"> +<html> +<head> +<meta http-equiv="Content-Type" content="text/html; charset=iso-8859-15"> +<title>Welcome to the Buildbot</title> +</head> + +<body> +<h1>Welcome to the Buildbot!</h1> + +<ul> + <li>the <a href="waterfall">Waterfall Display</a> will give you a + time-oriented summary of recent buildbot activity.</li> + + <li>the <a href="grid">Grid Display</a> will give you a + developer-oriented summary of recent buildbot activity.</li> + + <li>The <a href="one_box_per_builder">Latest Build</a> for each builder is + here.</li> + + <li><a href="one_line_per_build">Recent Builds</a> are summarized here, one + per line.</li> + + <li><a href="buildslaves">Buildslave</a> information</li> + <li><a href="changes">ChangeSource</a> information.</li> + + <br /> + <li><a href="about">About this Buildbot</a></li> +</ul> + + +</body> </html> diff --git a/tools/buildbot/master/public_html/robots.txt b/tools/buildbot/master/public_html/robots.txt new file mode 100644 index 0000000..47a9d27 --- /dev/null +++ b/tools/buildbot/master/public_html/robots.txt @@ -0,0 +1,9 @@ +User-agent: * +Disallow: /waterfall +Disallow: /builders +Disallow: /changes +Disallow: /buildslaves +Disallow: /schedulers +Disallow: /one_line_per_build +Disallow: /one_box_per_builder +Disallow: /xmlrpc diff --git a/tools/buildbot/slaves/README b/tools/buildbot/slaves/README new file mode 100644 index 0000000..c5ed69a --- /dev/null +++ b/tools/buildbot/slaves/README @@ -0,0 +1,95 @@ +How to setup a buildslave? +-------------------------- + +1. Install Buildbot +------------------- +Buildbot is a Python application, you need to install Python 2.2+. + +Download & install: +- Twisted 2.2: http://twistedmatrix.com/trac/ +- ZopeInterface 3.0.1: http://www.zope.org/Products/ZopeInterface +- Buildbot 0.7.2: http://sourceforge.net/project/showfiles.php?group_id=73177 + +The Buildbot installer creates a buildbot script in /usr/bin. On Windows, +buildbot.bat is installed in the %PYTHON\Scripts folder. + +Test your buildbot install by running the buildbot script. + +2. Creating a buildslave +------------------------ +Creating a buildslave is explained in the Buildbot manual: +http://buildbot.sourceforge.net/manual-0.7.2.html#Creating-a-buildslave + +Before you can install the buildslave, you'll need some information from +the buildmaster administrator: + +- your botname and password. The botname is a unique id for your buildslave. +Current botnames include the OS+version, compiler and processor architecture. +(eg. osx10.4-gcc4.0.1-ia32, xp-vs2003-ia32...) +- hostname and port on which the buildmaster is listening. +- hostname and port on which the TryScheduler is listening. +- username and password for the FTP server where you can send the test log- + files. + +The buildmaster administrator for http://www.mobsol.be/buildbot/ can be +reached at lgo@mobsol.be. + +Make sure you fill in the info/host file with some information concerning your +buildslave. + +3. Setting up the buildslave +---------------------------- +A build on a slave is always triggered and managed by the Buildmaster. In fact, +by connecting to the buildmaster, the slave gives full shell access on his +machine to the buildmaster! Make sure you run the slave in an account with +limited access. + +The build process is split in 4 steps. +- svn checkout or update +- build +- check +- clean + +The first step is handled by the buildbot code (SVNStep) and can not be +further configured on the buildslave. +For all the other steps you have to provide scripts and put them in the +slavedir/[botname] folder. + +You can copy and reuse the scripts the other buildbot slave owners use for +Windows, Linux or Mac buildslaves. You can find them here: +http://svn.apache.org/repos/asf/subversion/trunk/tools/buildbot/slaves + +4. Builds +--------- +A build will be triggered each time someone commits changes on trunk or +branches/1.4.x. If the buildslave is not online, then the build will be +stored in the queue and started when the buildslave connects. + +Normally each build only includes the changes of one commit. However, when +multiple changes arive during a previous build, those will be combined in +a next build. + +The buildmaster looks at each commit and decides if they are important enough +to trigger a build. Documentation, contrib, www... changes are typically +ignored. + +5. Try +------ +Buildbot has a feature that allows a Subversion developer to test a patch she's +working on on one of the builders. Example, someone working on Linux might want +to test a major change first on a Windows builder before committing the patch. + +It's up to you to choose whether your buildslave can accept patches or not, +there's no obligation. Basically by allowing patches to be tried on your +buildslave you give people the ability to compile and execute arbitrary code +on your machine. +Try access is password protected and will be reserved to Subversion full +committers. + +How to use the try feature: +TryScheduler is installed on its own dedicated port. It can be used by +a client with 'buildbot try': +http://buildbot.sourceforge.net/manual-0.7.5.html#try + +Contact the buildmaster administrator for the username & password which you +need to use 'buildbot try'.
\ No newline at end of file diff --git a/tools/buildbot/slaves/bb-openbsd/svnbuild.sh b/tools/buildbot/slaves/bb-openbsd/svnbuild.sh new file mode 100755 index 0000000..9486acd --- /dev/null +++ b/tools/buildbot/slaves/bb-openbsd/svnbuild.sh @@ -0,0 +1,26 @@ +#!/bin/sh +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +# + +set -e +set -x + +branch="$(basename $(svn info . | grep ^URL | cut -d' ' -f2))" +(cd .. && gmake BRANCH="$branch") diff --git a/tools/buildbot/slaves/bb-openbsd/svncheck-bindings.sh b/tools/buildbot/slaves/bb-openbsd/svncheck-bindings.sh new file mode 100755 index 0000000..3184010 --- /dev/null +++ b/tools/buildbot/slaves/bb-openbsd/svncheck-bindings.sh @@ -0,0 +1,32 @@ +#!/bin/sh +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +# + +set -e +set -x + +branch="$(basename $(svn info . | grep ^URL | cut -d' ' -f2))" +export MALLOC_OPTIONS=S +(cd .. && gmake BRANCH="$branch" svn-check-bindings) +grep -q "^Result: PASS$" tests.log.bindings.pl || exit 1 +grep -q "^OK$" tests.log.bindings.py || exit 1 +tail -n 1 tests.log.bindings.rb | grep -q ", 0 failures, 0 errors" || exit 1 +#TODO javahl +exit 0 diff --git a/tools/buildbot/slaves/bb-openbsd/svncheck.sh b/tools/buildbot/slaves/bb-openbsd/svncheck.sh new file mode 100755 index 0000000..e5d1bca --- /dev/null +++ b/tools/buildbot/slaves/bb-openbsd/svncheck.sh @@ -0,0 +1,34 @@ +#!/bin/sh +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +# + +set -e +set -x + +branch="$(basename $(svn info . | grep ^URL | cut -d' ' -f2))" +export MALLOC_OPTIONS=S +(cd .. && gmake BRANCH="$branch" PARALLEL="" \ + svn-check-local \ + svn-check-svn \ + svn-check-neon \ + svn-check-serf) +grep -q "^FAIL:" tests.log* && exit 1 +grep -q "^XPASS:" tests.log* && exit 1 +exit 0 diff --git a/tools/buildbot/slaves/bb-openbsd/svnclean.sh b/tools/buildbot/slaves/bb-openbsd/svnclean.sh new file mode 100755 index 0000000..6273790 --- /dev/null +++ b/tools/buildbot/slaves/bb-openbsd/svnclean.sh @@ -0,0 +1,32 @@ +#!/bin/sh +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +# + +set -e +set -x + +branch="$(basename $(svn info . | grep ^URL | cut -d' ' -f2))" +(test -h ../svn-trunk || ln -s build ../svn-trunk) +for i in 3 4 5 6 7; do + (test -h ../svn-1.${i}.x || ln -s build ../svn-1.${i}.x) +done +svn update ../../unix-build +(test -h ../GNUmakefile || ln -s ../unix-build/Makefile.svn ../GNUmakefile) +(cd .. && gmake BRANCH="$branch" reset clean) diff --git a/tools/buildbot/slaves/centos/svnbuild.sh b/tools/buildbot/slaves/centos/svnbuild.sh new file mode 100755 index 0000000..c7d9145 --- /dev/null +++ b/tools/buildbot/slaves/centos/svnbuild.sh @@ -0,0 +1,59 @@ +#!/bin/bash +# +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +# + +set -x + +export MAKEFLAGS=-j4 + +echo "========= autogen.sh" +./autogen.sh || exit $? + +echo "========= configure" +# --with-junit=/usr/share/java/junit.jar +# --with-jdk=/usr/lib/jvm/java-1.6.0-openjdk-1.6.0.0.x86_64 \ +./configure --enable-javahl --enable-maintainer-mode \ + --with-neon=/usr \ + --with-apxs=/usr/sbin/apxs \ + --without-berkeley-db \ + --with-apr=/usr \ + --with-apr-util=/usr \ + --with-jdk=/opt/java/jdk1.6.0_15 \ + --with-junit=/home/bt/junit-4.4.jar \ + --with-sqlite=/home/bt/sqlite-3.6.17/sqlite3.c \ + || exit $? + +echo "========= make" +make || exit $? + +echo "========= make javahl" +make javahl -j1 || exit $? + +echo "========= make swig-py" +make swig-py || exit $? + +echo "========= make swig-pl" +make swig-pl -j1 || exit $? + +echo "========= make swig-rb" +make swig-rb -j1 || exit $? + +exit 0 diff --git a/tools/buildbot/slaves/centos/svncheck-bindings.sh b/tools/buildbot/slaves/centos/svncheck-bindings.sh new file mode 100755 index 0000000..4e8c1aa --- /dev/null +++ b/tools/buildbot/slaves/centos/svncheck-bindings.sh @@ -0,0 +1,40 @@ +#!/bin/bash +# +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +# + +set -x + +RC=0 + +echo "========= make check-javahl" +make check-javahl || RC=$? + +echo "========= make check-swig-pl" +make check-swig-pl || RC=$? + +echo "========= make check-swig-py" +make check-swig-py || RC=$? + +# ruby test currently failing, generating SEGV on centos +#echo "========= make check-swig-rb" +#make check-swig-rb # || RC=$? + +exit ${RC} diff --git a/tools/buildbot/slaves/centos/svncheck.sh b/tools/buildbot/slaves/centos/svncheck.sh new file mode 100755 index 0000000..8240e36 --- /dev/null +++ b/tools/buildbot/slaves/centos/svncheck.sh @@ -0,0 +1,57 @@ +#!/bin/bash +# +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +# + + +set -x + +if test -z "$1" ; then + echo "Missing FS_TYPE specifier (arg #1)." + exit 1 +fi + +echo "========= mount RAM disc" +# ignore the result: if it fails, the test will just take longer... +mkdir -p subversion/tests/cmdline/svn-test-work +test -e ../mount-ramdrive && ../mount-ramdrive + +echo "========= make" +case "$2" in + ""|ra_dav|ra_neon) + make davautocheck FS_TYPE=$1 HTTP_LIBRARY=neon CLEANUP=1 || exit $? + ;; + ra_serf) + make davautocheck FS_TYPE=$1 HTTP_LIBRARY=serf CLEANUP=1 || exit $? + ;; + ra_svn) + make svnserveautocheck FS_TYPE="$1" CLEANUP=1 || exit $? + ;; + ra_local) + make check FS_TYPE="$1" CLEANUP=1 || exit $? + ;; + *) + echo "Bad RA specifier (arg #2): '$2'." + exit 1 + ;; +esac + +# the bindings are checked with svncheck-bindings.sh +exit 0 diff --git a/tools/buildbot/slaves/centos/svnclean.sh b/tools/buildbot/slaves/centos/svnclean.sh new file mode 100755 index 0000000..9a5e715 --- /dev/null +++ b/tools/buildbot/slaves/centos/svnclean.sh @@ -0,0 +1,32 @@ +#!/bin/bash +# +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +# + +set -x + +echo "========= unmount RAM disc" +# ignore the result: if there was no ramdisc, that's fine +test -e ../unmount-ramdrive && ../unmount-ramdrive + +echo "========= make extraclean" +test -e Makefile && (make extraclean || exit $?) + +exit 0 diff --git a/tools/buildbot/slaves/centos/svnlog.sh b/tools/buildbot/slaves/centos/svnlog.sh new file mode 100755 index 0000000..d3b5036 --- /dev/null +++ b/tools/buildbot/slaves/centos/svnlog.sh @@ -0,0 +1,33 @@ +#!/bin/bash +# +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +# + +set -x + +# upload file to server +FILENAME=tests-`date +%Y%m%d%H%M`.log.tgz +tar -czf $FILENAME tests.log +ftp -n www.mobsol.be < ../ftpscript +rm $FILENAME + +echo "Logs of the testrun can be found here: http://www.mobsol.be/logs/eh-debsarge1/$FILENAME" + +exit 0 diff --git a/tools/buildbot/slaves/i686-debian-sarge1/mount-ramdrive.c b/tools/buildbot/slaves/i686-debian-sarge1/mount-ramdrive.c new file mode 100644 index 0000000..1c54ea5 --- /dev/null +++ b/tools/buildbot/slaves/i686-debian-sarge1/mount-ramdrive.c @@ -0,0 +1,35 @@ +/* + * + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + * + */ + + +#include <stdio.h> +#include <unistd.h> + + +int main() +{ + const char *cmd = "/bin/mount -t tmpfs -o size=50M tmpfs `subversion/tests/cmdline/svn-test-work"; + + setuid(0); + + system(cmd); + +} diff --git a/tools/buildbot/slaves/i686-debian-sarge1/svnbuild.sh b/tools/buildbot/slaves/i686-debian-sarge1/svnbuild.sh new file mode 100755 index 0000000..c1c20ae --- /dev/null +++ b/tools/buildbot/slaves/i686-debian-sarge1/svnbuild.sh @@ -0,0 +1,49 @@ +#!/bin/bash +# +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +# + +set -x + +echo "========= autogen.sh" +./autogen.sh || exit $? + +echo "========= configure" +./configure --disable-static --enable-shared \ + --enable-maintainer-mode \ + --with-neon=/usr/local/neon-0.25.5 \ + --with-apxs=/usr/sbin/apxs \ + --without-berkeley-db \ + --with-apr=/usr/local/apr \ + --with-apr-util=/usr/local/apr || exit $? + +echo "========= make" +make || exit $? + +# echo "========= make swig-py" +# make swig-py || exit $? + +# echo "========= make swig-pl" +# make swig-pl || exit $? + +#echo "========= make swig-rb" +#make swig-rb || exit $? + +exit 0 diff --git a/tools/buildbot/slaves/i686-debian-sarge1/svncheck.sh b/tools/buildbot/slaves/i686-debian-sarge1/svncheck.sh new file mode 100755 index 0000000..dc06a82 --- /dev/null +++ b/tools/buildbot/slaves/i686-debian-sarge1/svncheck.sh @@ -0,0 +1,45 @@ +#!/bin/bash +# +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +# + + +set -x + +if test -z "$1" ; then + echo "Missing FS_TYPE specifier (arg #1)." + exit 1 +fi + +echo "========= mount RAM disc" +# ignore the result: if it fails, the test will just take longer... +mkdir -p subversion/tests/cmdline/svn-test-work +test -e ../mount-ramdrive && ../mount-ramdrive + +echo "========= make check" +make check FS_TYPE=$1 CLEANUP=1 || exit $? + +# echo "========= make check-swig-pl" +# make check-swig-pl || exit $? + +#echo "========= make check-swig-rb" +#make check-swig-rb || exit $? + +exit 0 diff --git a/tools/buildbot/slaves/i686-debian-sarge1/svnclean.sh b/tools/buildbot/slaves/i686-debian-sarge1/svnclean.sh new file mode 100755 index 0000000..9a5e715 --- /dev/null +++ b/tools/buildbot/slaves/i686-debian-sarge1/svnclean.sh @@ -0,0 +1,32 @@ +#!/bin/bash +# +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +# + +set -x + +echo "========= unmount RAM disc" +# ignore the result: if there was no ramdisc, that's fine +test -e ../unmount-ramdrive && ../unmount-ramdrive + +echo "========= make extraclean" +test -e Makefile && (make extraclean || exit $?) + +exit 0 diff --git a/tools/buildbot/slaves/i686-debian-sarge1/svnlog.sh b/tools/buildbot/slaves/i686-debian-sarge1/svnlog.sh new file mode 100755 index 0000000..d3b5036 --- /dev/null +++ b/tools/buildbot/slaves/i686-debian-sarge1/svnlog.sh @@ -0,0 +1,33 @@ +#!/bin/bash +# +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +# + +set -x + +# upload file to server +FILENAME=tests-`date +%Y%m%d%H%M`.log.tgz +tar -czf $FILENAME tests.log +ftp -n www.mobsol.be < ../ftpscript +rm $FILENAME + +echo "Logs of the testrun can be found here: http://www.mobsol.be/logs/eh-debsarge1/$FILENAME" + +exit 0 diff --git a/tools/buildbot/slaves/i686-debian-sarge1/unmount-ramdrive.c b/tools/buildbot/slaves/i686-debian-sarge1/unmount-ramdrive.c new file mode 100644 index 0000000..5c3dbe7 --- /dev/null +++ b/tools/buildbot/slaves/i686-debian-sarge1/unmount-ramdrive.c @@ -0,0 +1,36 @@ +/* + * + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + * + */ + + +#include <stdio.h> +#include <unistd.h> + + +int main() +{ + const char *cmd = "/bin/umount `/usr/bin/dirname $0`/build/subversion/tests/cmdline/svn-test-work"; + + setuid(0); + + return system(cmd); + +} + diff --git a/tools/buildbot/slaves/svn-x64-macosx-gnu-shared-daily-ra_serf/svnbuild.sh b/tools/buildbot/slaves/svn-x64-macosx-gnu-shared-daily-ra_serf/svnbuild.sh new file mode 100755 index 0000000..74bc436 --- /dev/null +++ b/tools/buildbot/slaves/svn-x64-macosx-gnu-shared-daily-ra_serf/svnbuild.sh @@ -0,0 +1,44 @@ +#!/bin/bash +# +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +# + +echo "========= autogen.sh" +./autogen.sh || exit $? + +echo "========= configure" +./configure --with-serf=./serf --with-apxs=/usr/sbin/apxs --without-berkeley-db --prefix=/Users/buildslave17/slave17/osx10.6-gcc4.2.1-x64-serf/build/svninstall --with-apache-libexecdir=/Users/buildslave17/slave17/osx10.6-gcc4.2.1-x64-serf/build/libexec || exit $? + +echo "========= make" +make || exit $? + +echo "========= make swig-py" +make swig-py || exit $? + +#echo "========= make swig-pl" +#make swig-pl || exit $? + +#echo "========= make swig-rb" +#make swig-rb || exit $? + +echo "========= make install" +make install || exit $? + +exit 0 diff --git a/tools/buildbot/slaves/svn-x64-macosx-gnu-shared-daily-ra_serf/svncheck.sh b/tools/buildbot/slaves/svn-x64-macosx-gnu-shared-daily-ra_serf/svncheck.sh new file mode 100755 index 0000000..7233d3c --- /dev/null +++ b/tools/buildbot/slaves/svn-x64-macosx-gnu-shared-daily-ra_serf/svncheck.sh @@ -0,0 +1,40 @@ +#!/bin/bash +# +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +# + +set -x + +if test -z "$1" ; then + echo "Missing FS_TYPE specifier (arg #1)." + exit 1 +fi + +echo "========= make check" +if [ "$2" = "ra_serf" ]; then + make davautocheck FS_TYPE=$1 HTTP_LIBRARY=serf CLEANUP=1 || s=$?; +else + make davautocheck FS_TYPE=$1 CLEANUP=1 || s=$?; +fi + +echo "========= cat tests.log" +cat tests.log + +exit $s diff --git a/tools/buildbot/slaves/svn-x64-macosx-gnu-shared-daily-ra_serf/svnclean.sh b/tools/buildbot/slaves/svn-x64-macosx-gnu-shared-daily-ra_serf/svnclean.sh new file mode 100755 index 0000000..b5877a5 --- /dev/null +++ b/tools/buildbot/slaves/svn-x64-macosx-gnu-shared-daily-ra_serf/svnclean.sh @@ -0,0 +1,31 @@ +#!/bin/bash +# +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +# + +# ../unmount_ramd.sh + +echo "========= make extraclean" +test -e Makefile && (make extraclean || exit $?) +rm -rf ../build/* +rm -rf .svn +rm -rf .buildbot-sourcedata + +exit 0 diff --git a/tools/buildbot/slaves/svn-x64-macosx-gnu-shared-daily-ra_serf/svnlog.sh b/tools/buildbot/slaves/svn-x64-macosx-gnu-shared-daily-ra_serf/svnlog.sh new file mode 100755 index 0000000..b4d9e0c --- /dev/null +++ b/tools/buildbot/slaves/svn-x64-macosx-gnu-shared-daily-ra_serf/svnlog.sh @@ -0,0 +1,31 @@ +#!/bin/bash +# +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +# + +# upload file to server +FILENAME=tests-`date +%Y%m%d%H%M`.log.tgz +tar -czf $FILENAME tests.log +ftp -n www.mobsol.be < ../ftpscript +rm $FILENAME + +echo "Logs of the testrun can be found here: http://www.mobsol.be/logs/osx10.4-gcc4.0.1-ia32/$FILENAME" + +exit 0 diff --git a/tools/buildbot/slaves/svn-x64-macosx-gnu-shared/svnbuild.sh b/tools/buildbot/slaves/svn-x64-macosx-gnu-shared/svnbuild.sh new file mode 100755 index 0000000..a8750eb --- /dev/null +++ b/tools/buildbot/slaves/svn-x64-macosx-gnu-shared/svnbuild.sh @@ -0,0 +1,44 @@ +#!/bin/bash +# +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +# + +echo "========= autogen.sh" +./autogen.sh || exit $? + +echo "========= configure" +./configure --without-serf --with-apxs=/usr/sbin/apxs --without-berkeley-db --prefix=/Users/buildslave17/slave17/osx10.6-gcc4.2.1-x64/build/svninstall --with-apache-libexecdir=/Users/buildslave17/slave17/osx10.6-gcc4.2.1-x64/build/libexec || exit $? + +echo "========= make" +make || exit $? + +echo "========= make swig-py" +make swig-py || exit $? + +#echo "========= make swig-pl" +#make swig-pl || exit $? + +#echo "========= make swig-rb" +#make swig-rb || exit $? + +echo "========= make install" +make install || exit $? + +exit 0 diff --git a/tools/buildbot/slaves/svn-x64-macosx-gnu-shared/svncheck.sh b/tools/buildbot/slaves/svn-x64-macosx-gnu-shared/svncheck.sh new file mode 100755 index 0000000..7233d3c --- /dev/null +++ b/tools/buildbot/slaves/svn-x64-macosx-gnu-shared/svncheck.sh @@ -0,0 +1,40 @@ +#!/bin/bash +# +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +# + +set -x + +if test -z "$1" ; then + echo "Missing FS_TYPE specifier (arg #1)." + exit 1 +fi + +echo "========= make check" +if [ "$2" = "ra_serf" ]; then + make davautocheck FS_TYPE=$1 HTTP_LIBRARY=serf CLEANUP=1 || s=$?; +else + make davautocheck FS_TYPE=$1 CLEANUP=1 || s=$?; +fi + +echo "========= cat tests.log" +cat tests.log + +exit $s diff --git a/tools/buildbot/slaves/svn-x64-macosx-gnu-shared/svnclean.sh b/tools/buildbot/slaves/svn-x64-macosx-gnu-shared/svnclean.sh new file mode 100755 index 0000000..b5877a5 --- /dev/null +++ b/tools/buildbot/slaves/svn-x64-macosx-gnu-shared/svnclean.sh @@ -0,0 +1,31 @@ +#!/bin/bash +# +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +# + +# ../unmount_ramd.sh + +echo "========= make extraclean" +test -e Makefile && (make extraclean || exit $?) +rm -rf ../build/* +rm -rf .svn +rm -rf .buildbot-sourcedata + +exit 0 diff --git a/tools/buildbot/slaves/svn-x64-macosx-gnu-shared/svnlog.sh b/tools/buildbot/slaves/svn-x64-macosx-gnu-shared/svnlog.sh new file mode 100755 index 0000000..b4d9e0c --- /dev/null +++ b/tools/buildbot/slaves/svn-x64-macosx-gnu-shared/svnlog.sh @@ -0,0 +1,31 @@ +#!/bin/bash +# +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +# + +# upload file to server +FILENAME=tests-`date +%Y%m%d%H%M`.log.tgz +tar -czf $FILENAME tests.log +ftp -n www.mobsol.be < ../ftpscript +rm $FILENAME + +echo "Logs of the testrun can be found here: http://www.mobsol.be/logs/osx10.4-gcc4.0.1-ia32/$FILENAME" + +exit 0 diff --git a/tools/buildbot/slaves/ubuntu-x64/svnbuild.sh b/tools/buildbot/slaves/ubuntu-x64/svnbuild.sh new file mode 100755 index 0000000..31edb1a --- /dev/null +++ b/tools/buildbot/slaves/ubuntu-x64/svnbuild.sh @@ -0,0 +1,51 @@ +#!/bin/bash +# +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +# + +set -x + +export MAKEFLAGS=-j4 + +echo "========= autogen.sh" +./autogen.sh || exit $? + +echo "========= configure" +./configure --enable-javahl --enable-maintainer-mode \ + --without-berkeley-db \ + --with-jdk=/usr/lib/jvm/java-6-openjdk/ \ + --with-junit=/usr/share/java/junit.jar || exit $? + +echo "========= make" +make || exit $? + +echo "========= make javahl" +make javahl -j1 || exit $? + +echo "========= make swig-py" +make swig-py || exit $? + +echo "========= make swig-pl" +make swig-pl -j1 || exit $? + +echo "========= make swig-rb" +make swig-rb -j1 || exit $? + +exit 0 diff --git a/tools/buildbot/slaves/ubuntu-x64/svncheck-bindings.sh b/tools/buildbot/slaves/ubuntu-x64/svncheck-bindings.sh new file mode 100755 index 0000000..f42c0ec --- /dev/null +++ b/tools/buildbot/slaves/ubuntu-x64/svncheck-bindings.sh @@ -0,0 +1,39 @@ +#!/bin/bash +# +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +# + +set -x + +RC=0 + +echo "========= make check-javahl" +make check-javahl || RC=$? + +echo "========= make check-swig-pl" +make check-swig-pl || RC=$? + +echo "========= make check-swig-py" +make check-swig-py || RC=$? + +echo "========= make check-swig-rb" +make check-swig-rb || RC=$? + +exit ${RC} diff --git a/tools/buildbot/slaves/ubuntu-x64/svncheck.sh b/tools/buildbot/slaves/ubuntu-x64/svncheck.sh new file mode 100755 index 0000000..227e968 --- /dev/null +++ b/tools/buildbot/slaves/ubuntu-x64/svncheck.sh @@ -0,0 +1,40 @@ +#!/bin/bash +# +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +# + + +set -x + +if test -z "$1" ; then + echo "Missing FS_TYPE specifier (arg #1)." + exit 1 +fi + +echo "========= mount RAM disc" +test ! -e /dev/shm/svn-test-work && mkdir /dev/shm/svn-test-work +test -e subversion/tests/cmdline/svn-test-work && rm -rf subversion/tests/cmdline/svn-test-work +ln -s /dev/shm/svn-test-work subversion/tests/cmdline/ + +echo "========= make check" +make check FS_TYPE=$1 CLEANUP=1 || exit $? + +# the bindings are checked with svncheck-bindings.sh +exit 0 diff --git a/tools/buildbot/slaves/ubuntu-x64/svnclean.sh b/tools/buildbot/slaves/ubuntu-x64/svnclean.sh new file mode 100755 index 0000000..8cee0c4 --- /dev/null +++ b/tools/buildbot/slaves/ubuntu-x64/svnclean.sh @@ -0,0 +1,29 @@ +#!/bin/bash +# +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +# + +set -x + +echo "========= make extraclean" +cd build +test -e Makefile && (make extraclean || exit $?) + +exit 0 diff --git a/tools/buildbot/slaves/ubuntu-x64/svnlog.sh b/tools/buildbot/slaves/ubuntu-x64/svnlog.sh new file mode 100755 index 0000000..d3b5036 --- /dev/null +++ b/tools/buildbot/slaves/ubuntu-x64/svnlog.sh @@ -0,0 +1,33 @@ +#!/bin/bash +# +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +# + +set -x + +# upload file to server +FILENAME=tests-`date +%Y%m%d%H%M`.log.tgz +tar -czf $FILENAME tests.log +ftp -n www.mobsol.be < ../ftpscript +rm $FILENAME + +echo "Logs of the testrun can be found here: http://www.mobsol.be/logs/eh-debsarge1/$FILENAME" + +exit 0 diff --git a/tools/buildbot/slaves/win32-SharpSvn/svn-config.cmd.template b/tools/buildbot/slaves/win32-SharpSvn/svn-config.cmd.template new file mode 100644 index 0000000..5f337c0 --- /dev/null +++ b/tools/buildbot/slaves/win32-SharpSvn/svn-config.cmd.template @@ -0,0 +1,31 @@ +@echo off +REM ================================================================ +REM Licensed to the Apache Software Foundation (ASF) under one +REM or more contributor license agreements. See the NOTICE file +REM distributed with this work for additional information +REM regarding copyright ownership. The ASF licenses this file +REM to you under the Apache License, Version 2.0 (the +REM "License"); you may not use this file except in compliance +REM with the License. You may obtain a copy of the License at +REM +REM http://www.apache.org/licenses/LICENSE-2.0 +REM +REM Unless required by applicable law or agreed to in writing, +REM software distributed under the License is distributed on an +REM "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +REM KIND, either express or implied. See the License for the +REM specific language governing permissions and limitations +REM under the License. +REM ================================================================ + +CALL "c:\Program Files\Microsoft Visual Studio 9.0\VC\vcvarsall.bat" x86 + +SET TESTDIR=E:\Full +SET TESTPORT=1234 +SET "NANTARGS= " + +SET TEMP=%TESTDIR%\temp +SET TMP=%TEMP% + +IF NOT EXIST "%TESTDIR%\" MKDIR "%TESTDIR%" +IF NOT EXIST "%TEMP%\" MKDIR "%TEMP%" diff --git a/tools/buildbot/slaves/win32-SharpSvn/svntest-bindings.cmd b/tools/buildbot/slaves/win32-SharpSvn/svntest-bindings.cmd new file mode 100644 index 0000000..f830812 --- /dev/null +++ b/tools/buildbot/slaves/win32-SharpSvn/svntest-bindings.cmd @@ -0,0 +1,60 @@ +@echo off +REM ================================================================ +REM Licensed to the Apache Software Foundation (ASF) under one +REM or more contributor license agreements. See the NOTICE file +REM distributed with this work for additional information +REM regarding copyright ownership. The ASF licenses this file +REM to you under the Apache License, Version 2.0 (the +REM "License"); you may not use this file except in compliance +REM with the License. You may obtain a copy of the License at +REM +REM http://www.apache.org/licenses/LICENSE-2.0 +REM +REM Unless required by applicable law or agreed to in writing, +REM software distributed under the License is distributed on an +REM "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +REM KIND, either express or implied. See the License for the +REM specific language governing permissions and limitations +REM under the License. +REM ================================================================ + +SETLOCAL ENABLEEXTENSIONS ENABLEDELAYEDEXPANSION + +CALL ..\svn-config.cmd +IF ERRORLEVEL 1 EXIT /B 1 + +svnversion . /1.6.x | find "S" > nul: +IF ERRORLEVEL 1 ( + ECHO --- Building 1.6.x: Skipping bindings --- + EXIT /B 0 +) + +PATH %PATH%;%TESTDIR%\bin +SET result=0 + + +echo python win-tests.py -r -f fsfs --javahl "%TESTDIR%\tests" +python win-tests.py -r -f fsfs --javahl "%TESTDIR%\tests" +IF ERRORLEVEL 1 ( + echo [python reported error %ERRORLEVEL%] + SET result=1 +) + +IF EXIST "%TESTDIR%\swig" rmdir /s /q "%TESTDIR%\swig" +mkdir "%TESTDIR%\swig\py-release\libsvn" +mkdir "%TESTDIR%\swig\py-release\svn" + +xcopy "release\subversion\bindings\swig\python\*.pyd" "%TESTDIR%\swig\py-release\libsvn\*.pyd" +xcopy "release\subversion\bindings\swig\python\libsvn_swig_py\*.dll" "%TESTDIR%\swig\py-release\libsvn\*.dll" +xcopy "subversion\bindings\swig\python\*.py" "%TESTDIR%\swig\py-release\libsvn\*.py" +xcopy "subversion\bindings\swig\python\svn\*.py" "%TESTDIR%\swig\py-release\svn\*.py" + +SET PYTHONPATH=%TESTDIR%\swig\py-release + +python subversion\bindings\swig\python\tests\run_all.py +IF ERRORLEVEL 1 ( + echo [Python reported error %ERRORLEVEL%] + SET result=1 +) + +exit /b %result% diff --git a/tools/buildbot/slaves/win32-SharpSvn/svntest-build-bindings.cmd b/tools/buildbot/slaves/win32-SharpSvn/svntest-build-bindings.cmd new file mode 100644 index 0000000..a7f185f --- /dev/null +++ b/tools/buildbot/slaves/win32-SharpSvn/svntest-build-bindings.cmd @@ -0,0 +1,33 @@ +@echo off +REM ================================================================ +REM Licensed to the Apache Software Foundation (ASF) under one +REM or more contributor license agreements. See the NOTICE file +REM distributed with this work for additional information +REM regarding copyright ownership. The ASF licenses this file +REM to you under the Apache License, Version 2.0 (the +REM "License"); you may not use this file except in compliance +REM with the License. You may obtain a copy of the License at +REM +REM http://www.apache.org/licenses/LICENSE-2.0 +REM +REM Unless required by applicable law or agreed to in writing, +REM software distributed under the License is distributed on an +REM "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +REM KIND, either express or implied. See the License for the +REM specific language governing permissions and limitations +REM under the License. +REM ================================================================ + +SETLOCAL ENABLEEXTENSIONS ENABLEDELAYEDEXPANSION + +CALL ..\svn-config.cmd +IF ERRORLEVEL 1 EXIT /B 1 + +svnversion . /1.6.x | find "S" > nul: +IF ERRORLEVEL 1 ( + ECHO --- Building 1.6.x: Skipping bindings --- + EXIT /B 0 +) + +msbuild subversion_vcnet.sln /p:Configuration=Release /p:Platform=win32 /t:__JAVAHL__ /t:__SWIG_PYTHON__ /t:__SWIG_PERL__ /t:__JAVAHL_TESTS__ +IF ERRORLEVEL 1 EXIT /B 1 diff --git a/tools/buildbot/slaves/win32-SharpSvn/svntest-build.cmd b/tools/buildbot/slaves/win32-SharpSvn/svntest-build.cmd new file mode 100644 index 0000000..23c7cb9 --- /dev/null +++ b/tools/buildbot/slaves/win32-SharpSvn/svntest-build.cmd @@ -0,0 +1,34 @@ +@echo off +REM ================================================================ +REM Licensed to the Apache Software Foundation (ASF) under one +REM or more contributor license agreements. See the NOTICE file +REM distributed with this work for additional information +REM regarding copyright ownership. The ASF licenses this file +REM to you under the Apache License, Version 2.0 (the +REM "License"); you may not use this file except in compliance +REM with the License. You may obtain a copy of the License at +REM +REM http://www.apache.org/licenses/LICENSE-2.0 +REM +REM Unless required by applicable law or agreed to in writing, +REM software distributed under the License is distributed on an +REM "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +REM KIND, either express or implied. See the License for the +REM specific language governing permissions and limitations +REM under the License. +REM ================================================================ + +SETLOCAL ENABLEEXTENSIONS ENABLEDELAYEDEXPANSION + +CALL ..\svn-config.cmd +IF ERRORLEVEL 1 EXIT /B 1 + +PUSHD ..\deps + +nant gen-dev -D:wc=..\build -D:impBase=../deps/build/win32 %NANTARGS% +IF ERRORLEVEL 1 EXIT /B 1 + +POPD + +msbuild subversion_vcnet.sln /p:Configuration=Debug /p:Platform=win32 /t:__ALL_TESTS__ +IF ERRORLEVEL 1 EXIT /B 1 diff --git a/tools/buildbot/slaves/win32-SharpSvn/svntest-cleanup.cmd b/tools/buildbot/slaves/win32-SharpSvn/svntest-cleanup.cmd new file mode 100644 index 0000000..8e4ea01 --- /dev/null +++ b/tools/buildbot/slaves/win32-SharpSvn/svntest-cleanup.cmd @@ -0,0 +1,64 @@ +@echo off +REM ================================================================ +REM Licensed to the Apache Software Foundation (ASF) under one +REM or more contributor license agreements. See the NOTICE file +REM distributed with this work for additional information +REM regarding copyright ownership. The ASF licenses this file +REM to you under the Apache License, Version 2.0 (the +REM "License"); you may not use this file except in compliance +REM with the License. You may obtain a copy of the License at +REM +REM http://www.apache.org/licenses/LICENSE-2.0 +REM +REM Unless required by applicable law or agreed to in writing, +REM software distributed under the License is distributed on an +REM "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +REM KIND, either express or implied. See the License for the +REM specific language governing permissions and limitations +REM under the License. +REM ================================================================ + +SETLOCAL ENABLEEXTENSIONS ENABLEDELAYEDEXPANSION + +CALL ..\svn-config.cmd + +IF NOT EXIST "..\deps\" MKDIR "..\deps" + +PUSHD ..\deps +ECHO Checking dependencies in %CD% + +IF NOT EXIST "imports\" ( + svn co --username guest --password "" http://sharpsvn.open.collab.net/svn/sharpsvn/trunk/imports imports +) +IF NOT EXIST build\imports.done ( + copy /y imports\dev-default.build default.build + nant build %NANTARGS% + IF ERRORLEVEL 1 ( + exit /B 1 + ) + del release\bin\*svn* release\bin\_*.* + echo. > build\imports.done +) + +POPD + +PUSHD "%TEMP%" +IF NOT ERRORLEVEL 1 ( + rmdir /s /q "%TEMP%" 2> nul: +) +POPD + +taskkill /im svn.exe /f 2> nul: +taskkill /im svnadmin.exe /f 2> nul: +taskkill /im svnserve.exe /f 2> nul: +taskkill /im svnrdump.exe /f 2> nul: +taskkill /im svnsync.exe /f 2> nul: +taskkill /im httpd.exe /f 2> nul: +IF EXIST "%TESTDIR%\tests\subversion\tests\cmdline\httpd\" ( + rmdir /s /q "%TESTDIR%\tests\subversion\tests\cmdline\httpd" +) + +del "%TESTDIR%\tests\*.log" 2> nul: + + +exit /B 0 diff --git a/tools/buildbot/slaves/win32-SharpSvn/svntest-template.cmd b/tools/buildbot/slaves/win32-SharpSvn/svntest-template.cmd new file mode 100644 index 0000000..272d437 --- /dev/null +++ b/tools/buildbot/slaves/win32-SharpSvn/svntest-template.cmd @@ -0,0 +1,24 @@ +@echo off +REM ================================================================ +REM Licensed to the Apache Software Foundation (ASF) under one +REM or more contributor license agreements. See the NOTICE file +REM distributed with this work for additional information +REM regarding copyright ownership. The ASF licenses this file +REM to you under the Apache License, Version 2.0 (the +REM "License"); you may not use this file except in compliance +REM with the License. You may obtain a copy of the License at +REM +REM http://www.apache.org/licenses/LICENSE-2.0 +REM +REM Unless required by applicable law or agreed to in writing, +REM software distributed under the License is distributed on an +REM "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +REM KIND, either express or implied. See the License for the +REM specific language governing permissions and limitations +REM under the License. +REM ================================================================ + +SETLOCAL ENABLEEXTENSIONS ENABLEDELAYEDEXPANSION + +CALL ..\svn-config.cmd +IF ERRORLEVEL 1 EXIT /B 1 diff --git a/tools/buildbot/slaves/win32-SharpSvn/svntest-test.cmd b/tools/buildbot/slaves/win32-SharpSvn/svntest-test.cmd new file mode 100644 index 0000000..0ab32f1 --- /dev/null +++ b/tools/buildbot/slaves/win32-SharpSvn/svntest-test.cmd @@ -0,0 +1,97 @@ +@ECHO off +REM ================================================================ +REM Licensed to the Apache Software Foundation (ASF) under one +REM or more contributor license agreements. See the NOTICE file +REM distributed with this work for additional information +REM regarding copyright ownership. The ASF licenses this file +REM to you under the Apache License, Version 2.0 (the +REM "License"); you may not use this file except in compliance +REM with the License. You may obtain a copy of the License at +REM +REM http://www.apache.org/licenses/LICENSE-2.0 +REM +REM Unless required by applicable law or agreed to in writing, +REM software distributed under the License is distributed on an +REM "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +REM KIND, either express or implied. See the License for the +REM specific language governing permissions and limitations +REM under the License. +REM ================================================================ + +SETLOCAL ENABLEEXTENSIONS ENABLEDELAYEDEXPANSION + +CALL ..\svn-config.cmd +IF ERRORLEVEL 1 EXIT /B 1 + + +SET MODE=-d +SET PARALLEL= +SET ARGS= + +SET FSFS= +SET LOCAL= +:next + +IF "%1" == "-r" ( + SET MODE=-r + SHIFT +) ELSE IF "%1" == "-d" ( + SET MODE=-d + SHIFT +) ELSE IF "%1" == "-p" ( + SET PARALLEL=-p + SHIFT +) ELSE IF "%1" == "fsfs" ( + SET FSFS=1 + SHIFT +) ELSE IF "%1" == "local" ( + SET LOCAL=1 + SHIFT +) ELSE IF "%1" == "svn" ( + SET SVN=1 + SHIFT +) ELSE IF "%1" == "serf" ( + SET SERF=1 + SHIFT +) ELSE IF "%1" == "neon" ( + SET NEON=1 + SHIFT +) ELSE ( + SET ARGS=!ARGS! -t %1 + SHIFT +) + +IF NOT "%1" == "" GOTO next + + +IF NOT EXIST "%TESTDIR%\bin" MKDIR "%TESTDIR%\bin" +xcopy /y /i ..\deps\release\bin\*.dll "%TESTDIR%\bin" + +PATH %TESTDIR%\bin;%PATH% + +IF "%LOCAL%+%FSFS%" == "1+1" ( + echo win-tests.py -c %PARALLEL% %MODE% -f fsfs %ARGS% "%TESTDIR%\tests" + win-tests.py -c %PARALLEL% %MODE% -f fsfs %ARGS% "%TESTDIR%\tests" + IF ERRORLEVEL 1 EXIT /B 1 +) + +IF "%SVN%+%FSFS%" == "1+1" ( + taskkill /im svnserve.exe /f 2> nul: + echo win-tests.py -c %PARALLEL% %MODE% -f fsfs -u svn://localhost %ARGS% "%TESTDIR%\tests" + win-tests.py -c %PARALLEL% %MODE% -f fsfs -u svn://localhost %ARGS% "%TESTDIR%\tests" + IF ERRORLEVEL 1 EXIT /B 1 +) + +IF "%SERF%+%FSFS%" == "1+1" ( + taskkill /im httpd.exe /f 2> nul: + echo win-tests.py -c %PARALLEL% %MODE% -f fsfs --http-library serf --httpd-dir "%CD%\..\deps\release\httpd" --httpd-port %TESTPORT% -u http://localhost:%TESTPORT% %ARGS% "%TESTDIR%\tests" + win-tests.py -c %PARALLEL% %MODE% -f fsfs --http-library serf --httpd-dir "%CD%\..\deps\release\httpd" --httpd-port %TESTPORT% -u http://localhost:%TESTPORT% %ARGS% "%TESTDIR%\tests" + IF ERRORLEVEL 1 EXIT /B 1 +) + +IF "%NEON%+%FSFS%" == "1+1" ( + taskkill /im httpd.exe /f 2> nul: + echo win-tests.py -c %PARALLEL% %MODE% -f fsfs --http-library neon --httpd-dir "%CD%\..\deps\release\httpd" --httpd-port %TESTPORT% -u http://localhost:%TESTPORT% %ARGS% "%TESTDIR%\tests" + win-tests.py -c %PARALLEL% %MODE% -f fsfs --http-library neon --httpd-dir "%CD%\..\deps\release\httpd" --httpd-port %TESTPORT% -u http://localhost:%TESTPORT% %ARGS% "%TESTDIR%\tests" + IF ERRORLEVEL 1 EXIT /B 1 +) diff --git a/tools/buildbot/slaves/win32-xp-VS2005/config.bat b/tools/buildbot/slaves/win32-xp-VS2005/config.bat new file mode 100644 index 0000000..19e4cf0 --- /dev/null +++ b/tools/buildbot/slaves/win32-xp-VS2005/config.bat @@ -0,0 +1,40 @@ +REM Licensed to the Apache Software Foundation (ASF) under one
+REM or more contributor license agreements. See the NOTICE file
+REM distributed with this work for additional information
+REM regarding copyright ownership. The ASF licenses this file
+REM to you under the Apache License, Version 2.0 (the
+REM "License"); you may not use this file except in compliance
+REM with the License. You may obtain a copy of the License at
+REM
+REM http://www.apache.org/licenses/LICENSE-2.0
+REM
+REM Unless required by applicable law or agreed to in writing,
+REM software distributed under the License is distributed on an
+REM "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+REM KIND, either express or implied. See the License for the
+REM specific language governing permissions and limitations
+REM under the License.
+
+@echo off
+set HTTPD_BIN_DIR=C:\Apache2
+set GETTEXT_DIR=C:\svn-builder\djh-xp-vse2005\gettext
+set TEST_DIR=M:\svn-auto-test
+
+set HTTPD_SRC_DIR=..\httpd
+set BDB_DIR=..\db4-win32
+set NEON_DIR=..\neon
+set ZLIB_DIR=..\zlib
+set OPENSSL_DIR=..\openssl
+set INTL_DIR=..\svn-libintl
+
+REM Uncomment this if you want clean subversion build, after testing
+REM set CLEAN_SVN=1
+
+REM Uncomment this if you want disable ra_svn tests
+REM set NO_RA_SVN=1
+
+REM Uncomment this if you want disable ra_dav tests
+REM set NO_RA_HTTP=1
+
+set PATH=%GETTEXT_DIR%\bin;%PATH%
+call C:\VCX2005\VC\vcvarsall.bat x86
diff --git a/tools/buildbot/slaves/win32-xp-VS2005/do_all.bat b/tools/buildbot/slaves/win32-xp-VS2005/do_all.bat new file mode 100644 index 0000000..5adbe5b --- /dev/null +++ b/tools/buildbot/slaves/win32-xp-VS2005/do_all.bat @@ -0,0 +1,24 @@ +REM Licensed to the Apache Software Foundation (ASF) under one
+REM or more contributor license agreements. See the NOTICE file
+REM distributed with this work for additional information
+REM regarding copyright ownership. The ASF licenses this file
+REM to you under the Apache License, Version 2.0 (the
+REM "License"); you may not use this file except in compliance
+REM with the License. You may obtain a copy of the License at
+REM
+REM http://www.apache.org/licenses/LICENSE-2.0
+REM
+REM Unless required by applicable law or agreed to in writing,
+REM software distributed under the License is distributed on an
+REM "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+REM KIND, either express or implied. See the License for the
+REM specific language governing permissions and limitations
+REM under the License.
+
+cmd.exe /c ..\svnbuild.bat > build.log
+cmd.exe /c ..\svncheck.bat fsfs ra_local > fsfs_local.log
+cmd.exe /c ..\svncheck.bat fsfs ra_svn > fsfs_svn.log
+cmd.exe /c ..\svncheck.bat fsfs ra_dav > fsfs_dav.log
+cmd.exe /c ..\svncheck.bat bdb ra_local > bdb_local.log
+cmd.exe /c ..\svncheck.bat bdb ra_svn > bdb_svn.log
+cmd.exe /c ..\svncheck.bat bdb ra_dav > bdb_dav.log
diff --git a/tools/buildbot/slaves/win32-xp-VS2005/svnbuild.bat b/tools/buildbot/slaves/win32-xp-VS2005/svnbuild.bat new file mode 100644 index 0000000..40f33fc --- /dev/null +++ b/tools/buildbot/slaves/win32-xp-VS2005/svnbuild.bat @@ -0,0 +1,56 @@ +REM Licensed to the Apache Software Foundation (ASF) under one
+REM or more contributor license agreements. See the NOTICE file
+REM distributed with this work for additional information
+REM regarding copyright ownership. The ASF licenses this file
+REM to you under the Apache License, Version 2.0 (the
+REM "License"); you may not use this file except in compliance
+REM with the License. You may obtain a copy of the License at
+REM
+REM http://www.apache.org/licenses/LICENSE-2.0
+REM
+REM Unless required by applicable law or agreed to in writing,
+REM software distributed under the License is distributed on an
+REM "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+REM KIND, either express or implied. See the License for the
+REM specific language governing permissions and limitations
+REM under the License.
+
+@echo off
+IF NOT EXIST ..\config.bat GOTO noconfig
+call ..\config.bat
+
+cmd.exe /c call ..\svnclean.bat
+
+set PARAMS=-t vcproj --vsnet-version=2005 --with-berkeley-db=%BDB_DIR% --with-zlib=%ZLIB_DIR% --with-httpd=%HTTPD_SRC_DIR% --with-neon=%NEON_DIR% --with-libintl=%INTL_DIR%
+REM set PARAMS=-t vcproj --vsnet-version=2005 --with-berkeley-db=%BDB_DIR% --with-zlib=%ZLIB_DIR% --with-httpd=%HTTPD_SRC_DIR% --with-neon=%NEON_DIR%
+IF NOT "%OPENSSL_DIR%"=="" set PARAMS=%PARAMS% --with-openssl=%OPENSSL_DIR%
+
+python gen-make.py %PARAMS%
+IF ERRORLEVEL 1 GOTO ERROR
+
+REM MSDEV.COM %HTTPD_SRC_DIR%\apache.dsw /MAKE "BuildBin - Win32 Release"
+REM IF ERRORLEVEL 1 GOTO ERROR
+
+rem MSBUILD subversion_vcnet.sln /t:__ALL_TESTS__ /p:Configuration=Debug
+MSBUILD subversion_vcnet.sln /t:__ALL_TESTS__ /p:Configuration=Release
+IF ERRORLEVEL 1 GOTO ERROR
+MSBUILD subversion_vcnet.sln /t:__SWIG_PYTHON__ /p:Configuration=Release
+IF ERRORLEVEL 1 GOTO ERROR
+MSBUILD subversion_vcnet.sln /t:__SWIG_PERL__ /p:Configuration=Release
+IF ERRORLEVEL 1 GOTO ERROR
+MSBUILD subversion_vcnet.sln /t:__JAVAHL__ /p:Configuration=Release
+IF ERRORLEVEL 1 GOTO ERROR
+
+EXIT 0
+
+REM ----------------------------------------------------
+:ERROR
+ECHO.
+ECHO *** Whoops, something choked.
+ECHO.
+CD ..
+EXIT 1
+
+:noconfig
+echo File config.bat not found. Please copy it from config.bat.tmpl and tweak for you.
+EXIT 2
diff --git a/tools/buildbot/slaves/win32-xp-VS2005/svncheck.bat b/tools/buildbot/slaves/win32-xp-VS2005/svncheck.bat new file mode 100644 index 0000000..9061449 --- /dev/null +++ b/tools/buildbot/slaves/win32-xp-VS2005/svncheck.bat @@ -0,0 +1,76 @@ +REM Licensed to the Apache Software Foundation (ASF) under one
+REM or more contributor license agreements. See the NOTICE file
+REM distributed with this work for additional information
+REM regarding copyright ownership. The ASF licenses this file
+REM to you under the Apache License, Version 2.0 (the
+REM "License"); you may not use this file except in compliance
+REM with the License. You may obtain a copy of the License at
+REM
+REM http://www.apache.org/licenses/LICENSE-2.0
+REM
+REM Unless required by applicable law or agreed to in writing,
+REM software distributed under the License is distributed on an
+REM "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+REM KIND, either express or implied. See the License for the
+REM specific language governing permissions and limitations
+REM under the License.
+
+@echo off
+IF NOT EXIST ..\config.bat GOTO noconfig
+call ..\config.bat
+
+set FS_TYPE=%1
+set RA_TYPE=%2
+
+REM By default, return zero
+set ERR=0
+
+if "%RA_TYPE%"=="ra_local" goto ra_local
+if "%RA_TYPE%"=="ra_svn" goto ra_svn
+if "%RA_TYPE%"=="ra_dav" goto ra_dav
+
+echo Unknown ra method '%RA_TYPE%'
+EXIT 3
+
+:ra_local
+time /T
+python win-tests.py %TEST_DIR%\%FS_TYPE% -f %FS_TYPE% -c -r
+if ERRORLEVEL 1 set ERR=1
+time /T
+echo.
+echo.
+echo Detailed log for %FS_TYPE%\tests.log:
+type %TEST_DIR%\%FS_TYPE%\tests.log
+echo End of log for %FS_TYPE%\tests.log
+echo.
+EXIT %ERR%
+
+:ra_svn
+time /T
+python win-tests.py %TEST_DIR%\%FS_TYPE% -f %FS_TYPE% -c -r -u svn://localhost
+if ERRORLEVEL 1 set ERR=1
+time /T
+echo.
+echo.
+echo Detailed log for %FS_TYPE%\svn-tests.log:
+type %TEST_DIR%\%FS_TYPE%\svn-tests.log
+echo End of log for %FS_TYPE%\svn-tests.log
+echo.
+EXIT %ERR%
+
+:ra_dav
+time /T
+python win-tests.py %TEST_DIR%\%FS_TYPE% -f %FS_TYPE% -c -r --httpd-dir="%HTTPD_BIN_DIR%" --httpd-port 1234
+if ERRORLEVEL 1 set ERR=1
+time /T
+echo.
+echo.
+echo Detailed log for %FS_TYPE%\dav-tests.log:
+type %TEST_DIR%\%FS_TYPE%\dav-tests.log
+echo End of log for %FS_TYPE%\dav-tests.log
+echo.
+EXIT %ERR%
+
+:noconfig
+echo File config.bat not found. Please copy it from config.bat.tmpl and tweak for you.
+EXIT 2
diff --git a/tools/buildbot/slaves/win32-xp-VS2005/svnclean.bat b/tools/buildbot/slaves/win32-xp-VS2005/svnclean.bat new file mode 100644 index 0000000..cc9e626 --- /dev/null +++ b/tools/buildbot/slaves/win32-xp-VS2005/svnclean.bat @@ -0,0 +1,30 @@ +REM Licensed to the Apache Software Foundation (ASF) under one
+REM or more contributor license agreements. See the NOTICE file
+REM distributed with this work for additional information
+REM regarding copyright ownership. The ASF licenses this file
+REM to you under the Apache License, Version 2.0 (the
+REM "License"); you may not use this file except in compliance
+REM with the License. You may obtain a copy of the License at
+REM
+REM http://www.apache.org/licenses/LICENSE-2.0
+REM
+REM Unless required by applicable law or agreed to in writing,
+REM software distributed under the License is distributed on an
+REM "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+REM KIND, either express or implied. See the License for the
+REM specific language governing permissions and limitations
+REM under the License.
+
+@echo off
+IF NOT EXIST ..\config.bat GOTO noconfig
+call ..\config.bat
+
+REM if NOT "%CLEAN_SVN%"=="" MSBUILD subversion_vcnet.sln /t:Clean /p:Configuration=Release
+rmdir /s /q Release
+rmdir /s /q %TEST_DIR%
+
+EXIT 0
+
+:noconfig
+echo File config.bat not found. Please copy it from config.bat.tmpl and tweak for you.
+EXIT 2
diff --git a/tools/buildbot/slaves/win32-xp-VS2005/svnlog.bat b/tools/buildbot/slaves/win32-xp-VS2005/svnlog.bat new file mode 100644 index 0000000..8aa0501 --- /dev/null +++ b/tools/buildbot/slaves/win32-xp-VS2005/svnlog.bat @@ -0,0 +1,30 @@ +REM Licensed to the Apache Software Foundation (ASF) under one
+REM or more contributor license agreements. See the NOTICE file
+REM distributed with this work for additional information
+REM regarding copyright ownership. The ASF licenses this file
+REM to you under the Apache License, Version 2.0 (the
+REM "License"); you may not use this file except in compliance
+REM with the License. You may obtain a copy of the License at
+REM
+REM http://www.apache.org/licenses/LICENSE-2.0
+REM
+REM Unless required by applicable law or agreed to in writing,
+REM software distributed under the License is distributed on an
+REM "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+REM KIND, either express or implied. See the License for the
+REM specific language governing permissions and limitations
+REM under the License.
+
+@echo off
+REM IF NOT EXIST ..\config.bat GOTO noconfig
+REM call ..\config.bat
+
+echo.
+echo Detailed test logs included in svncheck.bat log.
+echo.
+
+EXIT 0
+
+:noconfig
+echo File config.bat not found. Please copy it from config.bat.tmpl and tweak for you.
+EXIT 2
diff --git a/tools/buildbot/slaves/xp-vc60-ia32/config.bat.tmpl b/tools/buildbot/slaves/xp-vc60-ia32/config.bat.tmpl new file mode 100644 index 0000000..1221b7a --- /dev/null +++ b/tools/buildbot/slaves/xp-vc60-ia32/config.bat.tmpl @@ -0,0 +1,44 @@ +REM Licensed to the Apache Software Foundation (ASF) under one
+REM or more contributor license agreements. See the NOTICE file
+REM distributed with this work for additional information
+REM regarding copyright ownership. The ASF licenses this file
+REM to you under the Apache License, Version 2.0 (the
+REM "License"); you may not use this file except in compliance
+REM with the License. You may obtain a copy of the License at
+REM
+REM http://www.apache.org/licenses/LICENSE-2.0
+REM
+REM Unless required by applicable law or agreed to in writing,
+REM software distributed under the License is distributed on an
+REM "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+REM KIND, either express or implied. See the License for the
+REM specific language governing permissions and limitations
+REM under the License.
+
+set MSDEV=C:\Program Files\VS6.0
+set HTTPD_BIN_DIR=C:\Program Files\Apache2
+set TEST_DIR=C:\tmp\svn-auto-test
+set SDK_DIR=C:\Program Files\VS.NET\Vc7\PlatformSDK\
+
+set HTTPD_SRC_DIR=..\httpd
+set BDB_DIR=..\db4-win32
+set NEON_DIR=..\neon
+set ZLIB_DIR=..\zlib
+set OPENSSL_DIR=..\openssl
+set INTL_DIR=..\svn-win32-libintl
+
+REM Uncomment this if you want clean subversion build, after testing
+REM set CLEAN_SVN=1
+
+REM Uncomment this if you want disable ra_svn tests
+REM set NO_RA_SVN=1
+
+REM Uncomment this if you want disable ra_dav tests
+REM set NO_RA_HTTP=1
+
+set SDKINC=%SDK_DIR%\include
+set SDKLIB=%SDK_DIR%\lib
+
+set PATH=%MSDEV%\VC98\Bin;%MSDEV%\Common\MSDev98\Bin\;%PATH%
+set INCLUDE=%SDKINC%;%MSDEV%\VC98\ATL\INCLUDE;%MSDEV%\VC98\INCLUDE;%MSDEV%\VC98\MFC\INCLUDE
+set LIB=%SDKLIB%;%MSDEV%\VC98\LIB;%MSDEV%\VC98\MFC\LIB
diff --git a/tools/buildbot/slaves/xp-vc60-ia32/svnbuild.bat b/tools/buildbot/slaves/xp-vc60-ia32/svnbuild.bat new file mode 100644 index 0000000..a8852de --- /dev/null +++ b/tools/buildbot/slaves/xp-vc60-ia32/svnbuild.bat @@ -0,0 +1,77 @@ +REM Licensed to the Apache Software Foundation (ASF) under one
+REM or more contributor license agreements. See the NOTICE file
+REM distributed with this work for additional information
+REM regarding copyright ownership. The ASF licenses this file
+REM to you under the Apache License, Version 2.0 (the
+REM "License"); you may not use this file except in compliance
+REM with the License. You may obtain a copy of the License at
+REM
+REM http://www.apache.org/licenses/LICENSE-2.0
+REM
+REM Unless required by applicable law or agreed to in writing,
+REM software distributed under the License is distributed on an
+REM "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+REM KIND, either express or implied. See the License for the
+REM specific language governing permissions and limitations
+REM under the License.
+
+IF NOT EXIST ..\config.bat GOTO noconfig
+call ..\config.bat
+
+IF NOT "%OPENSSL_DIR%"=="" call :openssl
+IF ERRORLEVEL 1 GOTO ERROR
+
+set PARAMS=-t dsp --with-berkeley-db=%BDB_DIR% --with-libintl=%INTL_DIR% --with-zlib=%ZLIB_DIR% --with-httpd=%HTTPD_SRC_DIR% --with-neon=%NEON_DIR% --enable-bdb-in-apr-util
+IF NOT "%OPENSSL_DIR%"=="" set PARAMS=%PARAMS% --with-openssl=%OPENSSL_DIR%
+
+python gen-make.py %PARAMS%
+IF ERRORLEVEL 1 GOTO ERROR
+
+MSDEV.COM %HTTPD_SRC_DIR%\apache.dsw /MAKE "BuildBin - Win32 Release"
+IF ERRORLEVEL 1 GOTO ERROR
+
+MSDEV.COM subversion_msvc.dsw /USEENV /MAKE "__ALL_TESTS__ - Win32 Release"
+IF ERRORLEVEL 1 GOTO ERROR
+
+
+EXIT 0
+
+REM ----------------------------------------------------
+:ERROR
+ECHO.
+ECHO *** Whoops, something choked.
+ECHO.
+CD ..
+EXIT 1
+
+
+:openssl
+rem ====== Build openssl.
+pushd %OPENSSL_DIR%
+perl Configure VC-WIN32
+IF ERRORLEVEL 1 goto openssl-err1
+
+call ms\do_ms
+IF ERRORLEVEL 1 goto openssl-err1
+
+nmake -f ms\ntdll.mak /NOLOGO /S
+IF ERRORLEVEL 1 goto openssl-err1
+
+pushd out32dll
+call ..\ms\test
+IF ERRORLEVEL 1 goto openssl-err2
+
+popd
+popd
+EXIT /B 0
+
+:openssl-err2
+popd
+
+:openssl-err1
+popd
+EXIT 1
+
+:noconfig
+echo File config.bat not found. Please copy it from config.bat.tmpl and tweak for you.
+EXIT 2
diff --git a/tools/buildbot/slaves/xp-vc60-ia32/svncheck.bat b/tools/buildbot/slaves/xp-vc60-ia32/svncheck.bat new file mode 100644 index 0000000..70f102f --- /dev/null +++ b/tools/buildbot/slaves/xp-vc60-ia32/svncheck.bat @@ -0,0 +1,51 @@ +REM Licensed to the Apache Software Foundation (ASF) under one
+REM or more contributor license agreements. See the NOTICE file
+REM distributed with this work for additional information
+REM regarding copyright ownership. The ASF licenses this file
+REM to you under the Apache License, Version 2.0 (the
+REM "License"); you may not use this file except in compliance
+REM with the License. You may obtain a copy of the License at
+REM
+REM http://www.apache.org/licenses/LICENSE-2.0
+REM
+REM Unless required by applicable law or agreed to in writing,
+REM software distributed under the License is distributed on an
+REM "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+REM KIND, either express or implied. See the License for the
+REM specific language governing permissions and limitations
+REM under the License.
+
+IF NOT EXIST ..\config.bat GOTO noconfig
+call ..\config.bat
+
+set FS_TYPE=%1
+set RA_TYPE=%2
+
+REM By default, return zero
+set ERR=0
+
+if "%RA_TYPE%"=="ra_local" goto ra_local
+if "%RA_TYPE%"=="ra_svn" goto ra_svn
+if "%RA_TYPE%"=="ra_dav" goto ra_dav
+
+echo Unknown ra method '%RA_TYPE%'
+EXIT 3
+
+:ra_local
+python win-tests.py %TEST_DIR% -f %FS_TYPE% -c -r
+if ERRORLEVEL 1 set ERR=1
+EXIT %ERR%
+
+:ra_svn
+python win-tests.py %TEST_DIR% -f %FS_TYPE% -c -r -u svn://localhost
+if ERRORLEVEL 1 set ERR=1
+EXIT %ERR%
+
+:ra_dav
+python win-tests.py %TEST_DIR% -f %FS_TYPE% -c -r --httpd-dir="%HTTPD_BIN_DIR%" --httpd-port 1234
+if ERRORLEVEL 1 set ERR=1
+EXIT %ERR%
+
+:noconfig
+echo File config.bat not found. Please copy it from config.bat.tmpl and tweak for you.
+EXIT 2
diff --git a/tools/buildbot/slaves/xp-vc60-ia32/svnclean.bat b/tools/buildbot/slaves/xp-vc60-ia32/svnclean.bat new file mode 100644 index 0000000..071610d --- /dev/null +++ b/tools/buildbot/slaves/xp-vc60-ia32/svnclean.bat @@ -0,0 +1,28 @@ +REM Licensed to the Apache Software Foundation (ASF) under one
+REM or more contributor license agreements. See the NOTICE file
+REM distributed with this work for additional information
+REM regarding copyright ownership. The ASF licenses this file
+REM to you under the Apache License, Version 2.0 (the
+REM "License"); you may not use this file except in compliance
+REM with the License. You may obtain a copy of the License at
+REM
+REM http://www.apache.org/licenses/LICENSE-2.0
+REM
+REM Unless required by applicable law or agreed to in writing,
+REM software distributed under the License is distributed on an
+REM "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+REM KIND, either express or implied. See the License for the
+REM specific language governing permissions and limitations
+REM under the License.
+
+IF NOT EXIST ..\config.bat GOTO noconfig
+call ..\config.bat
+
+if NOT "%CLEAN_SVN%"=="" MSDEV.COM subversion_msvc.dsw /MAKE "__ALL_TESTS__ - Win32 Release" /CLEAN
+if ERRORLEVEL 1 EXIT 1
+
+EXIT 0
+
+:noconfig
+echo File config.bat not found. Please copy it from config.bat.tmpl and tweak for you.
+EXIT 2
diff --git a/tools/buildbot/slaves/xp-vc60-ia32/svnlog.bat b/tools/buildbot/slaves/xp-vc60-ia32/svnlog.bat new file mode 100644 index 0000000..e93a381 --- /dev/null +++ b/tools/buildbot/slaves/xp-vc60-ia32/svnlog.bat @@ -0,0 +1,25 @@ +REM Licensed to the Apache Software Foundation (ASF) under one
+REM or more contributor license agreements. See the NOTICE file
+REM distributed with this work for additional information
+REM regarding copyright ownership. The ASF licenses this file
+REM to you under the Apache License, Version 2.0 (the
+REM "License"); you may not use this file except in compliance
+REM with the License. You may obtain a copy of the License at
+REM
+REM http://www.apache.org/licenses/LICENSE-2.0
+REM
+REM Unless required by applicable law or agreed to in writing,
+REM software distributed under the License is distributed on an
+REM "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+REM KIND, either express or implied. See the License for the
+REM specific language governing permissions and limitations
+REM under the License.
+
+IF NOT EXIST ..\config.bat GOTO noconfig
+call ..\config.bat
+
+EXIT 0
+
+:noconfig
+echo File config.bat not found. Please copy it from config.bat.tmpl and tweak for you.
+EXIT 2
diff --git a/tools/client-side/bash_completion b/tools/client-side/bash_completion new file mode 100644 index 0000000..e45c3f6 --- /dev/null +++ b/tools/client-side/bash_completion @@ -0,0 +1,1504 @@ +# ------------------------------------------------------------ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# ------------------------------------------------------------ + +# Programmable completion for the Subversion svn command under bash. Source +# this file (or on some systems add it to ~/.bash_completion and start a new +# shell) and bash's completion mechanism will know all about svn's options! +# Provides completion for the svnadmin, svndumpfilter, svnlook and svnsync +# commands as well. Who wants to read man pages/help text... + +# Known to work with bash 3.* with programmable completion and extended +# pattern matching enabled (use 'shopt -s extglob progcomp' to enable +# these if they are not already enabled). + +shopt -s extglob + +# Tree helper functions which only use bash, to ease readability. + +# look for value associated to key from stdin in K/V hash file format +# val=$(_svn_read_hashfile svn:realmstring < some/file) +function _svn_read_hashfile() +{ + local tkey=$1 key= val= + while true; do + read tag len + [ $tag = 'END' ] && break + [ $tag != 'K' ] && { + #echo "unexpected tag '$tag' instead of 'K'" >&2 + return + } + read -r -n $len key ; read + read tag len + [ $tag != 'V' ] && { + #echo "unexpected tag '$tag' instead of 'V'" >&2 + return + } + read -r -n $len val ; read + if [[ $key = $tkey ]] ; then + echo "$val" + return + fi + done + #echo "target key '$tkey' not found" >&2 +} + +# _svn_grcut shell-regular-expression +# extract filenames from 'svn status' output +function _svn_grcut() +{ + local re=$1 line= old_IFS + # fix IFS, so that leading spaces are not ignored by next read. + # (there is a leading space in svn status output if only a prop is changed) + old_IFS="$IFS" + IFS=$'\n' + while read -r line ; do + [[ ! $re || $line == $re ]] && echo "${line/????????/}" + done + IFS="$old_IFS" +} + +# extract stuff from svn info output +# _svn_info (URL|Repository Root) +function _svn_info() +{ + local what=$1 line= + LANG=C LC_MESSAGES=C svn info --non-interactive 2> /dev/null | \ + while read line ; do + [[ $line == *"$what: "* ]] && echo ${line#*: } + done +} + +# _svn_lls (dir|file|all) files... +# list svn-managed files from list +# some 'svn status --all-files' would be welcome here? +function _svn_lls() +{ + local opt=$1 f= + shift + for f in "$@" ; do + # could try to check in .svn/entries? hmmm... + if [[ $opt == @(dir|all) && -d "$f" ]] ; then + echo "$f/" + elif [[ $opt == @(file|all) ]] ; then + # split f in directory/file names + local dn= fn="$f" + [[ "$f" == */* ]] && dn=${f%\/*}/ fn=${f##*\/} + # ??? this does not work for just added files, because they + # do not have a content reference yet... + [ -f "${dn}.svn/text-base/${fn}.svn-base" ] && echo "$f" + fi + done +} + +# This completion guides the command/option order along the one suggested +# by "svn help", although other syntaxes are allowed. +# +# - there is a "real" parser to check for what is available and deduce what +# can be suggested further. +# - the syntax should be coherent with subversion/svn/{cl.h,main.c} +# - although it is not a good practice, mixed options and arguments +# is supported by the completion as it is by the svn command. +# - the completion works in the middle of a line, +# but not really in the middle of an argument or option. +# - property names are completed: see comments about issues related to handling +# ":" within property names although it is a word completion separator. +# - unknown properties are assumed to be simple file properties. +# - --revprop and --revision options are forced to revision properties +# as they are mandatory in this case. +# - argument values are suggested to some other options, eg directory names +# for --config-dir. +# - values for some options can be extended with environment variables: +# SVN_BASH_FILE_PROPS: other properties on files/directories +# SVN_BASH_REV_PROPS: other properties on revisions +# SVN_BASH_ENCODINGS: encodings to be suggested +# SVN_BASH_MIME_TYPE: mime types to be suggested +# SVN_BASH_KEYWORDS: "svn:keywords" substitutions to be suggested +# SVN_BASH_USERNAME: usernames suggested for --username +# SVN_BASH_COMPL_EXT: completion extensions for file arguments, based on the +# current subcommand, so that for instance only modified files are +# suggested for 'revert', only not svn-managed files for 'add', and so on. +# Possible values are: +# - username: guess usernames from ~/.subversion/auth/... +# - urls: guess urls from ~/.subversion/auth/... or others +# - svnstatus: use 'svn status' for completion +# - recurse: allow recursion (expensive) +# - externals: recurse into externals (very expensive) +# Former options are reasonable, but beware that both later options +# may be unadvisable if used on large working copies. +# None of these costly completions are activated by default. +# Argument completion outside a working copy results in an error message. +# Filenames with spaces are not completed properly. +# +# TODO +# - other options? +# - obsolete options could be removed from auto-comp? (e.g. -N) +# - obsolete commands could be removed? (e.g. resolved) +# - completion does not work properly when editing in the middle of the line +# status/previous are those at the end of the line, not at the entry position +# - url completion should select more cases where it is relevant +# - url completion of http:// schemas could suggest sub directories? +# - add completion for experimental 'obliterate' feature? +_svn() +{ + local cur cmds cmdOpts pOpts mOpts rOpts qOpts nOpts optsParam opt + + COMPREPLY=() + cur=${COMP_WORDS[COMP_CWORD]} + + # Possible expansions, without pure-prefix abbreviations such as "up". + cmds='add blame annotate praise cat changelist cl checkout co cleanup' + cmds="$cmds commit ci copy cp delete remove rm diff export help import" + cmds="$cmds info list ls lock log merge mergeinfo mkdir move mv rename" + cmds="$cmds patch propdel pdel propedit pedit propget pget proplist" + cmds="$cmds plist propset pset relocate resolve resolved revert status" + cmds="$cmds switch unlock update upgrade" + + # help options have a strange command status... + local helpOpts='--help -h' + # all special options that have a command status + local specOpts="--version $helpOpts" + + # options that require a parameter + # note: continued lines must end '|' continuing lines must start '|' + optsParam="-r|--revision|--username|--password|--targets" + optsParam="$optsParam|-x|--extensions|-m|--message|-F|--file" + optsParam="$optsParam|--encoding|--diff-cmd|--diff3-cmd|--editor-cmd" + optsParam="$optsParam|--old|--new|--config-dir|--config-option" + optsParam="$optsParam|--native-eol|-l|--limit|-c|--change" + optsParam="$optsParam|--depth|--set-depth|--with-revprop" + optsParam="$optsParam|--cl|--changelist|--accept|--show-revs" + + # svn:* and other (env SVN_BASH_*_PROPS) properties + local svnProps revProps allProps psCmds propCmds + + # svn and user configured "file" (or directory) properties + # the "svn:mergeinfo" prop is not included by default because it is + # managed automatically, so there should be no need to edit it by hand. + svnProps="svn:keywords svn:executable svn:needs-lock svn:externals + svn:ignore svn:eol-style svn:mime-type $SVN_BASH_FILE_PROPS" + + # svn and user configured revision properties + revProps="svn:author svn:log svn:date $SVN_BASH_REV_PROPS" + + # all properties as an array variable + allProps=( $svnProps $revProps ) + + # subcommands that expect property names + psCmds='propset|pset|ps' + propCmds="$psCmds|propget|pget|pg|propedit|pedit|pe|propdel|pdel|pd" + + # possible URL schemas to access a subversion server + local urlSchemas='file:/// http:// https:// svn:// svn+ssh://' + + # Parse arguments and set various variables about what was found. + # + # cmd: the current command if available + # isPropCmd: whether it expects a property name argument + # isPsCmd: whether it also expects a property value argument + # isHelpCmd: whether it is about help + # nExpectArgs: how many arguments are expected by the command + # help: help requested about this command (if cmd=='help') + # prop: property name (if appropriate) + # isRevProp: is it a special revision property + # val: property value (if appropriate, under pset) + # options: all options encountered + # hasRevPropOpt: is --revprop set + # hasRevisionOpt: is --revision set + # hasRelocateOpt: is --relocate set + # hasReintegrateOpt: is --reintegrate set + # acceptOpt: the value of --accept + # nargs: how many arguments were found + # stat: status of parsing at the 'current' word + # + # prev: previous command in the loop + # last: status of last parameter analyzed + # i: index + local cmd= isPropCmd= isPsCmd= isHelpCmd= nExpectArgs= isCur= i=0 + local prev= help= prop= val= isRevProp= last='none' nargs=0 stat= + local options= hasRevPropOpt= hasRevisionOpt= hasRelocateOpt= + local acceptOpt= URL= hasReintegrateOpt= + + for opt in "${COMP_WORDS[@]}" + do + # get status of current word (from previous iteration) + [[ $isCur ]] && stat=$last + + # are we processing the current word + isCur= + [[ $i -eq $COMP_CWORD ]] && isCur=1 + let i++ + + # FIRST must be the "svn" command + [ $last = 'none' ] && { last='first'; continue ; } + + # SKIP option arguments + if [[ $prev == @($optsParam) ]] ; then + + # record accept value + [[ $prev = '--accept' ]] && acceptOpt=$opt + + prev='' + last='skip' + continue ; + fi + + # Argh... This looks like a bash bug... + # Redirections are passed to the completion function + # although it is managed by the shell directly... + # It matters because we want to tell the user when no more + # completion is available, so it does not necessary + # fallback to the default case. + if [[ $prev == @(<|>|>>|[12]>|[12]>>) ]] ; then + prev='' + last='skip' + continue ; + fi + prev=$opt + + # get the subCoMmanD + if [[ ! $cmd && $opt \ + && ( $opt != -* || $opt == @(${specOpts// /|}) ) ]] + then + cmd=$opt + [[ $cmd == @($propCmds) ]] && isPropCmd=1 + [[ $cmd == @($psCmds) ]] && isPsCmd=1 + [[ $cmd == @(${helpOpts// /|}) ]] && cmd='help' + [[ $cmd = 'help' ]] && isHelpCmd=1 + # HELP about a command asked with an option + if [[ $isHelpCmd && $cmd && $cmd != 'help' && ! $help ]] + then + help=$cmd + cmd='help' + fi + last='cmd' + continue + fi + + # HELP about a command + if [[ $isHelpCmd && ! $help && $opt && $opt != -* ]] + then + help=$opt + last='help' + continue + fi + + # PROPerty name + if [[ $isPropCmd && ! $prop && $opt && $opt != -* ]] + then + prop=$opt + [[ $prop == @(${revProps// /|}) ]] && isRevProp=1 + last='prop' + continue + fi + + # property VALue + if [[ $isPsCmd && $prop && ! $val && $opt != -* ]] ; + then + val=$opt + last='val' + continue + fi + + if [[ $last != 'onlyarg' ]] + then + # more OPTions + case $opt in + -r|--revision|--revision=*) + hasRevisionOpt=1 + ;; + --revprop) + hasRevPropOpt=1 + # restrict to revision properties! + allProps=( $revProps ) + # on revprops, only one URL is expected + nExpectArgs=1 + ;; + -h|--help) + isHelpCmd=1 + ;; + -F|--file) + val='-F' + ;; + --relocate) + hasRelocateOpt=1 + ;; + --reintegrate) + hasReintegrateOpt=1 + ;; + esac + + # no more options, only arguments, whatever they look like. + if [[ $opt = '--' && ! $isCur ]] ; then + last='onlyarg' + continue + fi + + # options are recorded... + if [[ $opt == -* ]] ; then + # but not the current one! + [[ ! $isCur ]] && options="$options $opt " + last='opt' + continue + fi + else + # onlyarg + let nargs++ + continue + fi + + # then we have an argument + if [[ $cmd = 'merge' && ! $URL ]] ; then + # fist argument is the source URL for the merge + URL=$opt + fi + + last='arg' + let nargs++ + done + # end opt option processing... + [[ $stat ]] || stat=$last + + # suggest all subcommands, including special help + if [[ ! $cmd || $stat = 'cmd' ]] + then + COMPREPLY=( $( compgen -W "$cmds $specOpts" -- $cur ) ) + return 0 + fi + + # suggest all subcommands + if [[ $stat = 'help' || ( $isHelpCmd && ! $help ) ]] + then + COMPREPLY=( $( compgen -W "$cmds" -- $cur ) ) + return 0 + fi + + # URL completion + if [[ $cmd == @(co|checkout|ls|list) && $stat = 'arg' && \ + $SVN_BASH_COMPL_EXT == *urls* ]] + then + # see about COMP_WORDBREAKS workaround in prop completion + if [[ $cur == file:* ]] + then + # file completion for file:// urls + local where=${cur/file:/} + COMPREPLY=( $(compgen -d -S '/' -X '*/.*' -- $where ) ) + return + elif [[ $cur == *:* ]] + then + # get known urls + local urls= file= + for file in ~/.subversion/auth/svn.simple/* ; do + if [ -r $file ] ; then + local url=$(_svn_read_hashfile svn:realmstring < $file) + url=${url/*</} + url=${url/>*/} + urls="$urls $url" + fi + done + + # only suggest/show possible suffixes + local prefix=${cur%:*} suffix=${cur#*:} c= choices= + for c in $urls ; do + [[ $c == $prefix:* ]] && choices="$choices ${c#*:}" + done + + COMPREPLY=( $(compgen -W "$choices" -- $suffix ) ) + return + else + # show schemas + COMPREPLY=( $(compgen -W "$urlSchemas" -- $cur) ) + return + fi + fi + + if [[ $cmd = 'merge' || $cmd = 'mergeinfo' ]] + then + local here=$(_svn_info URL) + # suggest a possible URL for merging + if [[ ! $URL && $stat = 'arg' ]] ; then + # we assume a 'standard' repos with branches and trunk + if [[ "$here" == */branches/* ]] ; then + # we guess that it is a merge from the trunk + COMPREPLY=( $(compgen -W ${here/\/branches\/*/\/trunk} -- $cur ) ) + return 0 + elif [[ "$here" == */trunk* ]] ; then + # we guess that it is a merge from a branch + COMPREPLY=( $(compgen -W ${here/\/trunk*/\/branches\/} -- $cur ) ) + return 0 + else + # no se, let us suggest the repository root... + COMPREPLY=( $(compgen -W $(_svn_info Root) -- $cur ) ) + return 0 + fi + elif [[ $URL == */branches/* && $here == */trunk* && \ + ! $hasReintegrateOpt && $cur = '' && $stat = 'arg' ]] ; then + # force --reintegrate only if the current word is empty + COMPREPLY=( $(compgen -W '--reintegrate' -- $cur ) ) + return 0 + fi + fi + + # help about option arguments + if [[ $stat = 'skip' ]] + then + local previous=${COMP_WORDS[COMP_CWORD-1]} + local values= dirs= beep= exes= + + [[ $previous = '--config-dir' ]] && dirs=1 + + # external editor, diff, diff3... + [[ $previous = --*-cmd ]] && exes=1 + + [[ $previous = '--native-eol' ]] && values='LF CR CRLF' + + # just to suggest that a number is expected. hummm. + [[ $previous = '--limit' ]] && values='0 1 2 3 4 5 6 7 8 9' + + # some special partial help about --revision option. + [[ $previous = '--revision' || $previous = '-r' ]] && \ + values='HEAD BASE PREV COMMITTED 0 {' + + [[ $previous = '--encoding' ]] && \ + values="latin1 utf8 $SVN_BASH_ENCODINGS" + + [[ $previous = '--extensions' || $previous = '-x' ]] && \ + values="--unified --ignore-space-change \ + --ignore-all-space --ignore-eol-style" + + [[ $previous = '--depth' ]] && \ + values='empty files immediates infinity' + + [[ $previous = '--set-depth' ]] && \ + values='empty exclude files immediates infinity' + + [[ $previous = '--accept' ]] && \ + { + # the list is different for 'resolve' + if [[ $cmd = 'resolve' ]] ; then + # from svn help resolve + values='base working mine-full theirs-full' + else # checkout merge switch update + # not implemented yet: mine-conflict theirs-conflict + values='postpone base mine-full theirs-full edit launch' + fi + } + + [[ $previous = '--show-revs' ]] && values='merged eligible' + + if [[ $previous = '--username' ]] ; then + values="$SVN_BASH_USERNAME" + if [[ $SVN_BASH_COMPL_EXT == *username* ]] ; then + local file= + # digest? others? + for file in ~/.subversion/auth/svn.simple/* ; do + if [ -r $file ] ; then + values="$values $(_svn_read_hashfile username < $file)" + fi + done + fi + [[ ! "$values" ]] && beep=1 + fi + + # could look at ~/.subversion/ ? + # hmmm... this option should not exist + [[ $previous = '--password' ]] && beep=1 + + # TODO: provide help about other options such as: + # --old --new --with-revprop + + # if the previous option required a parameter, do something + # or fallback on ordinary filename expansion + [[ $values ]] && COMPREPLY=( $( compgen -W "$values" -- $cur ) ) + [[ $dirs ]] && COMPREPLY=( $( compgen -o dirnames -- $cur ) ) + [[ $exes ]] && COMPREPLY=( $( compgen -c -- $cur ) ) + [[ $beep ]] && + { + # 'no known completion'. hummm. + echo -en "\a" + COMPREPLY=( '' ) + } + return 0 + fi + + # provide allowed property names after property commands + if [[ $isPropCmd && ( ! $prop || $stat = 'prop' ) && $cur != -* ]] + then + # + # Ok, this part is pretty ugly. + # + # The issue is that ":" is a completion word separator, + # which is a good idea for file:// urls but not within + # property names... + # + # The first idea was to remove locally ":" from COMP_WORDBREAKS + # and then put it back in all cases but in property name + # completion. It does not always work. There is a strange bug + # where one may get "svn:svn:xxx" in some unclear cases. + # + # Thus the handling is reprogrammed here... + # The code assumes that property names look like *:*, + # but it also works reasonably well with simple names. + # + # This hack is broken in bash4... not sure what to do about it, + # especially while keeping the bash3 compatibility:-( + local choices= + + if [[ $cur == *:* ]] + then + # only suggest/show possible suffixes + local prefix=${cur%:*} suffix=${cur#*:} c= + for c in ${allProps[@]} ; do + [[ $c == $prefix:* ]] && choices="$choices ${c#*:}" + done + # everything will be appended to the prefix because ':' is + # a separator, so cur is restricted to the suffix part. + cur=$suffix + else + # only one choice is fine + COMPREPLY=( $( compgen -W "${allProps[*]}" -- $cur ) ) + [ ${#COMPREPLY[@]} -eq 1 ] && return 0 + + # no ':' so only suggest prefixes? + local seen= n=0 last= c= + for c in ${allProps[@]%:*} ; do + # do not put the same prefix twice... + if [[ $c == $cur* && ( ! $seen || $c != @($seen) ) ]] + then + let n++ + last=$c + choices="$choices $c:" + if [[ $seen ]] + then + seen="$seen|$c*" + else + seen="$c*" + fi + fi + done + + # supply two choices to force a partial completion and a beep + [[ $n -eq 1 ]] && choices="$last:1 $last:2" + fi + + COMPREPLY=( $( compgen -W "$choices" -- $cur ) ) + return 0 + fi + + # force mandatory --revprop option on revision properties + if [[ $isRevProp && ! $hasRevPropOpt ]] + then + COMPREPLY=( $( compgen -W '--revprop' -- $cur ) ) + return 0 + fi + + # force mandatory --revision option on revision properties + if [[ $isRevProp && $hasRevPropOpt && ! $hasRevisionOpt ]] + then + COMPREPLY=( $( compgen -W '--revision' -- $cur ) ) + return 0 + fi + + # possible completion when setting property values + if [[ $isPsCmd && $prop && ( ! $val || $stat = 'val' ) ]] + then + # ' is a reminder for an arbitrary value + local values="\' --file" + case $prop in + svn:keywords) + # just a subset? + values="Id Rev URL Date Author Header \' $SVN_BASH_KEYWORDS" + ;; + svn:executable|svn:needs-lock) + # hmmm... canonical value * is special to the shell. + values='\\*' + ;; + svn:eol-style) + values='native LF CR CRLF' + ;; + svn:mime-type) + # could read /etc/mime.types if available. overkill. + values="text/ text/plain text/html text/xml text/rtf + image/ image/png image/gif image/jpeg image/tiff + audio/ audio/midi audio/mpeg + video/ video/mpeg video/mp4 + application/ application/octet-stream + $SVN_BASH_MIME_TYPE" + ;; + esac + + COMPREPLY=( $( compgen -W "$values" -- $cur ) ) + # special case for --file... return even if within an option + [[ ${COMPREPLY} ]] && return 0 + fi + + # force mandatory --accept option for 'resolve' command + if [[ $cmd = 'resolve' && ! $acceptOpt ]] + then + COMPREPLY=( $( compgen -W '--accept' -- $cur ) ) + # force option now! others will be available on later completions + return 0 + fi + + # maximum number of additional arguments expected in various forms + case $cmd in + merge) + nExpectArgs=3 + ;; + mergeinfo) + nExpectArgs=1 + ;; + copy|cp|move|mv|rename|ren|export|import) + nExpectArgs=2 + ;; + switch|sw) + [[ ! $hasRelocateOpt ]] && nExpectArgs=2 + ;; + help|h) + nExpectArgs=0 + ;; + --version) + nExpectArgs=0 + ;; + esac + + # the maximum number of arguments is reached for a command + if [[ $nExpectArgs && $nargs -gt $nExpectArgs ]] + then + # some way to tell 'no completion at all'... is there a better one? + # Do not say 'file completion' here. + echo -en "\a" + COMPREPLY=( '' ) + return 0 + fi + + # if not typing an option, + # then fallback on filename expansion... + if [[ $cur != -* || $stat = 'onlyarg' ]] ; then + + # do we allow possible expensive completion here? + if [[ $SVN_BASH_COMPL_EXT == *svnstatus* ]] ; then + + # build status command and options + # "--quiet" removes 'unknown' files + local status='svn status --non-interactive' + + [[ $SVN_BASH_COMPL_EXT == *recurse* ]] || \ + status="$status --non-recursive" + + # I'm not sure that it can work with externals in call cases + # the output contains translatable sentences (even with quiet) + [[ $SVN_BASH_COMPL_EXT == *externals* ]] || \ + status="$status --ignore-externals" + + local cs= files= + # subtlety: must not set $cur* if $cur is empty in some cases + [[ $cur ]] && cs=$cur* + + # 'files' is set according to the current subcommand + case $cmd in + st*) # status completion must include all files + files=$cur* + ;; + ci|commit|revert|di*) # anything edited + files=$($status $cs| _svn_grcut '@([MADR!]*| M*|_M*)') + ;; + add) # unknown files + files=$($status $cs| _svn_grcut '\?*') + ;; + unlock) # unlock locked files + files=$($status $cs| _svn_grcut '@(??L*|?????[KOTB]*)') + ;; + resolve*) # files in conflict + files=$($status $cs| _svn_grcut '@(?C*|C*)') + ;; + praise|blame|ann*) # any svn file but added + files=$( _svn_lls all $cur* ) + ;; + p*) # prop commands + if [[ $cmd == @($propCmds) && \ + $prop == @(svn:ignore|svn:externals) ]] ; then + # directory specific props + files=$( _svn_lls dir . $cur* ) + else + # ??? added directories appear twice: foo foo/ + files="$( _svn_lls all $cur* ) + $($status $cs | _svn_grcut 'A*' )" + fi + ;; + info) # information on any file + files="$( _svn_lls all $cur* ) + $($status $cs | _svn_grcut 'A*' )" + ;; + remove|rm|del*|move|mv|rename) # changing existing files + files=$( _svn_lls all $cur* ) + ;; + mkdir) # completion in mkdir can only be for subdirs? + files=$( _svn_lls dir $cur* ) + ;; + log|lock|up*|cl*|switch) # misc, all but added files + files=$( _svn_lls all $cur* ) + ;; + merge) # may do a better job? URL/WCPATH + files=$( _svn_lls all $cur* ) + ;; + ls|list) # better job? what about URLs? + files=$( _svn_lls all $cur* ) + ;; + *) # other commands: changelist export import cat mergeinfo + local fallback=1 + ;; + esac + + # when not recursive, some relevant files may exist + # within subdirectories, so they are added here. + # should it be restricted to svn-managed subdirs? no?? + if [[ $SVN_BASH_COMPL_EXT != *recurse* ]] ; then + files="$files $( _svn_lls dir $cur* )" + fi + + # set completion depending on computed 'files' + if [[ $files ]] ; then + COMPREPLY=( $( compgen -W "$files" -- $cur ) ) + # if empty, set to nope? + [[ "${COMPREPLY[*]}" ]] || COMPREPLY=( '' ) + elif [[ ! $fallback ]] ; then + # this suggests no completion... + echo -en "\a" + COMPREPLY=( '' ) + fi + fi + # else fallback to ordinary filename completion... + return 0 + fi + + # otherwise build possible options for the command + pOpts="--username --password --no-auth-cache --non-interactive \ + --trust-server-cert" + mOpts="-m --message -F --file --encoding --force-log --with-revprop" + rOpts="-r --revision" + qOpts="-q --quiet" + nOpts="-N --non-recursive --depth" + gOpts="-g --use-merge-history" + cOpts="--cl --changelist" + + cmdOpts= + case $cmd in + --version) + cmdOpts="$qOpts" + ;; + add) + cmdOpts="--auto-props --no-auto-props --force --targets \ + --no-ignore --parents $nOpts $qOpts $pOpts" + ;; + blame|annotate|ann|praise) + cmdOpts="$rOpts $pOpts -v --verbose --incremental --xml \ + -x --extensions --force $gOpts" + ;; + cat) + cmdOpts="$rOpts $pOpts" + ;; + changelist|cl) + cmdOpts="--targets $pOpts $qOpts $cOpts \ + -R --recursive --depth --remove" + ;; + checkout|co) + cmdOpts="$rOpts $qOpts $nOpts $pOpts --ignore-externals \ + --force" + ;; + cleanup) + cmdOpts="--diff3-cmd $pOpts" + ;; + commit|ci) + cmdOpts="$mOpts $qOpts $nOpts --targets --editor-cmd $pOpts \ + --no-unlock $cOpts --keep-changelists" + ;; + copy|cp) + cmdOpts="$mOpts $rOpts $qOpts --editor-cmd $pOpts --parents \ + --ignore-externals" + ;; + delete|del|remove|rm) + cmdOpts="--force $mOpts $qOpts --targets --editor-cmd $pOpts \ + --keep-local" + ;; + diff|di) + cmdOpts="$rOpts -x --extensions --diff-cmd --no-diff-deleted \ + $nOpts $pOpts --force --old --new --notice-ancestry \ + -c --change --summarize $cOpts --xml --git \ + --internal-diff --show-copies-as-adds" + ;; + export) + cmdOpts="$rOpts $qOpts $pOpts $nOpts --force --native-eol \ + --ignore-externals --ignore-keywords" + ;; + help|h|\?) + cmdOpts= + ;; + import) + cmdOpts="--auto-props --no-auto-props $mOpts $qOpts $nOpts \ + --no-ignore --editor-cmd $pOpts --force" + ;; + info) + cmdOpts="$pOpts $rOpts --targets -R --recursive --depth \ + --incremental --xml $cOpts" + ;; + list|ls) + cmdOpts="$rOpts -v --verbose -R --recursive $pOpts \ + --incremental --xml --depth" + ;; + lock) + cmdOpts="-m --message -F --file --encoding --force-log \ + --targets --force $pOpts" + ;; + log) + cmdOpts="$rOpts -v --verbose --targets $pOpts --stop-on-copy \ + --incremental --xml $qOpts -l --limit -c --change \ + $gOpts --with-all-revprops --with-revprop --depth \ + --diff --diff-cmd -x --extensions --internal-diff \ + --with-no-revprops" + ;; + merge) + cmdOpts="$rOpts $nOpts $qOpts --force --dry-run --diff3-cmd \ + $pOpts --ignore-ancestry -c --change -x --extensions \ + --record-only --accept --reintegrate \ + --allow-mixed-revisions" + ;; + mergeinfo) + cmdOpts="$rOpts $pOpts --depth --show-revs -R --recursive" + ;; + mkdir) + cmdOpts="$mOpts $qOpts --editor-cmd $pOpts --parents" + ;; + move|mv|rename|ren) + cmdOpts="$mOpts $rOpts $qOpts --force --editor-cmd $pOpts \ + --parents" + ;; + patch) + cmdOpts="$qOpts $pOpts --dry-run --ignore-whitespace --reverse-diff --strip" + ;; + propdel|pdel|pd) + cmdOpts="$qOpts -R --recursive $rOpts $pOpts $cOpts \ + --depth" + [[ $isRevProp || ! $prop ]] && cmdOpts="$cmdOpts --revprop" + ;; + propedit|pedit|pe) + cmdOpts="--editor-cmd $pOpts $mOpts --force" + [[ $isRevProp || ! $prop ]] && \ + cmdOpts="$cmdOpts --revprop $rOpts" + ;; + propget|pget|pg) + cmdOpts="-v --verbose -R --recursive $rOpts --strict $pOpts $cOpts \ + --depth --xml" + [[ $isRevProp || ! $prop ]] && cmdOpts="$cmdOpts --revprop" + ;; + proplist|plist|pl) + cmdOpts="-v --verbose -R --recursive $rOpts --revprop $qOpts \ + $pOpts $cOpts --depth --xml" + ;; + propset|pset|ps) + cmdOpts="$qOpts --targets -R --recursive \ + --encoding $pOpts --force $cOpts --depth" + [[ $isRevProp || ! $prop ]] && \ + cmdOpts="$cmdOpts --revprop $rOpts" + [[ $val ]] || cmdOpts="$cmdOpts -F --file" + ;; + relocate) + cmdOpts="--ignore-externals $pOpts" + ;; + resolve) + cmdOpts="--targets -R --recursive $qOpts $pOpts --accept \ + --depth" + ;; + resolved) + cmdOpts="--targets -R --recursive $qOpts $pOpts --depth" + ;; + revert) + cmdOpts="--targets -R --recursive $qOpts $cOpts \ + --depth $pOpts" + ;; + status|stat|st) + cmdOpts="-u --show-updates -v --verbose $nOpts $qOpts $pOpts \ + --no-ignore --ignore-externals --incremental --xml \ + $cOpts" + ;; + switch|sw) + cmdOpts="--relocate $rOpts $nOpts $qOpts $pOpts --diff3-cmd \ + --force --accept --ignore-externals --set-depth \ + --ignore-ancestry" + ;; + unlock) + cmdOpts="--targets --force $pOpts" + ;; + update|up) + cmdOpts="$rOpts $nOpts $qOpts $pOpts --diff3-cmd \ + --ignore-externals --force --accept $cOpts \ + --parents --editor-cmd --set-depth" + ;; + upgrade) + cmdOpts="$qOpts $pOpts" + ;; + *) + ;; + esac + + # add options that are nearly always available + [[ "$cmd" != "--version" ]] && cmdOpts="$cmdOpts $helpOpts" + cmdOpts="$cmdOpts --config-dir --config-option" + + # --accept (edit|launch) incompatible with --non-interactive + if [[ $acceptOpt == @(edit|launch) ]] ; + then + cmdOpts=${cmdOpts/ --non-interactive / } + fi + + # take out options already given + for opt in $options + do + local optBase + + # remove leading dashes and arguments + case $opt in + --*) optBase=${opt/=*/} ;; + -*) optBase=${opt:0:2} ;; + esac + + cmdOpts=" $cmdOpts " + cmdOpts=${cmdOpts/ ${optBase} / } + + # take out alternatives and mutually exclusives + case $optBase in + -v) cmdOpts=${cmdOpts/ --verbose / } ;; + --verbose) cmdOpts=${cmdOpts/ -v / } ;; + -N) cmdOpts=${cmdOpts/ --non-recursive / } ;; + --non-recursive) cmdOpts=${cmdOpts/ -N / } ;; + -R) cmdOpts=${cmdOpts/ --recursive / } ;; + --recursive) cmdOpts=${cmdOpts/ -R / } ;; + -x) cmdOpts=${cmdOpts/ --extensions / } ;; + --extensions) cmdOpts=${cmdOpts/ -x / } ;; + -q) cmdOpts=${cmdOpts/ --quiet / } ;; + --quiet) cmdOpts=${cmdOpts/ -q / } ;; + -h) cmdOpts=${cmdOpts/ --help / } ;; + --help) cmdOpts=${cmdOpts/ -h / } ;; + -l) cmdOpts=${cmdOpts/ --limit / } ;; + --limit) cmdOpts=${cmdOpts/ -l / } ;; + -r) cmdOpts=${cmdOpts/ --revision / } ;; + --revision) cmdOpts=${cmdOpts/ -r / } ;; + -c) cmdOpts=${cmdOpts/ --change / } ;; + --change) cmdOpts=${cmdOpts/ -c / } ;; + --auto-props) cmdOpts=${cmdOpts/ --no-auto-props / } ;; + --no-auto-props) cmdOpts=${cmdOpts/ --auto-props / } ;; + -g) cmdOpts=${cmdOpts/ --use-merge-history / } ;; + --use-merge-history) + cmdOpts=${cmdOpts/ -g / } ;; + -m|--message|-F|--file) + cmdOpts=${cmdOpts/ --message / } + cmdOpts=${cmdOpts/ -m / } + cmdOpts=${cmdOpts/ --file / } + cmdOpts=${cmdOpts/ -F / } + ;; + esac + + # remove help options within help subcommand + if [ $isHelpCmd ] ; then + cmdOpts=${cmdOpts/ -h / } + cmdOpts=${cmdOpts/ --help / } + fi + done + + # provide help about available options + COMPREPLY=( $( compgen -W "$cmdOpts" -- $cur ) ) + return 0 +} +complete -F _svn -o default -X '@(*/.svn|*/.svn/|.svn|.svn/)' svn + +_svnadmin () +{ + local cur cmds cmdOpts optsParam opt helpCmds optBase i + + COMPREPLY=() + cur=${COMP_WORDS[COMP_CWORD]} + + # Possible expansions, without pure-prefix abbreviations such as "h". + cmds='crashtest create deltify dump help hotcopy list-dblogs \ + list-unused-dblogs load lslocks lstxns pack recover rmlocks \ + rmtxns setlog setrevprop setuuid upgrade verify --version' + + if [[ $COMP_CWORD -eq 1 ]] ; then + COMPREPLY=( $( compgen -W "$cmds" -- $cur ) ) + return 0 + fi + + # options that require a parameter + # note: continued lines must end '|' continuing lines must start '|' + optsParam="-r|--revision|--parent-dir|--fs-type" + + # if not typing an option, or if the previous option required a + # parameter, then fallback on ordinary filename expansion + helpCmds='help|--help|h|\?' + if [[ ${COMP_WORDS[1]} != @($helpCmds) ]] && \ + [[ "$cur" != -* ]] || \ + [[ ${COMP_WORDS[COMP_CWORD-1]} == @($optsParam) ]] ; then + return 0 + fi + + cmdOpts= + case ${COMP_WORDS[1]} in + create) + cmdOpts="--bdb-txn-nosync --bdb-log-keep --config-dir \ + --fs-type --pre-1.4-compatible --pre-1.5-compatible" + ;; + deltify) + cmdOpts="-r --revision -q --quiet" + ;; + dump) + cmdOpts="-r --revision --incremental -q --quiet --deltas" + ;; + help|h|\?) + cmdOpts="$cmds" + ;; + hotcopy) + cmdOpts="--clean-logs" + ;; + load) + cmdOpts="--ignore-uuid --force-uuid --parent-dir -q --quiet \ + --use-pre-commit-hook --use-post-commit-hook" + ;; + recover) + cmdOpts="--wait" + ;; + rmtxns) + cmdOpts="-q --quiet" + ;; + setlog) + cmdOpts="-r --revision --bypass-hooks" + ;; + setrevprop) + cmdOpts="-r --revision --use-pre-revprop-change-hook \ + --use-post-revprop-change-hook" + ;; + verify) + cmdOpts="-r --revision -q --quiet" + ;; + *) + ;; + esac + + cmdOpts="$cmdOpts --help -h" + + # take out options already given + for (( i=2; i<=$COMP_CWORD-1; ++i )) ; do + opt=${COMP_WORDS[$i]} + + case $opt in + --*) optBase=${opt/=*/} ;; + -*) optBase=${opt:0:2} ;; + esac + + cmdOpts=" $cmdOpts " + cmdOpts=${cmdOpts/ ${optBase} / } + + # take out alternatives + case $optBase in + -q) cmdOpts=${cmdOpts/ --quiet / } ;; + --quiet) cmdOpts=${cmdOpts/ -q / } ;; + -h) cmdOpts=${cmdOpts/ --help / } ;; + --help) cmdOpts=${cmdOpts/ -h / } ;; + -r) cmdOpts=${cmdOpts/ --revision / } ;; + --revision) cmdOpts=${cmdOpts/ -r / } ;; + esac + + # skip next option if this one requires a parameter + if [[ $opt == @($optsParam) ]] ; then + ((++i)) + fi + done + + COMPREPLY=( $( compgen -W "$cmdOpts" -- $cur ) ) + + return 0 +} +complete -F _svnadmin -o default svnadmin + +_svndumpfilter () +{ + local cur cmds cmdOpts optsParam opt helpCmds optBase i + + COMPREPLY=() + cur=${COMP_WORDS[COMP_CWORD]} + + # Possible expansions, without pure-prefix abbreviations such as "h". + cmds='exclude help include --version' + + if [[ $COMP_CWORD -eq 1 ]] ; then + COMPREPLY=( $( compgen -W "$cmds" -- $cur ) ) + return 0 + fi + + # options that require a parameter + # note: continued lines must end '|' continuing lines must start '|' + optsParam="--targets" + + # if not typing an option, or if the previous option required a + # parameter, then fallback on ordinary filename expansion + helpCmds='help|--help|h|\?' + if [[ ${COMP_WORDS[1]} != @($helpCmds) ]] && \ + [[ "$cur" != -* ]] || \ + [[ ${COMP_WORDS[COMP_CWORD-1]} == @($optsParam) ]] ; then + return 0 + fi + + cmdOpts= + case ${COMP_WORDS[1]} in + exclude|include) + cmdOpts="--drop-empty-revs --renumber-revs + --skip-missing-merge-sources --targets + --preserve-revprops --quiet" + ;; + help|h|\?) + cmdOpts="$cmds" + ;; + *) + ;; + esac + + cmdOpts="$cmdOpts --help -h" + + # take out options already given + for (( i=2; i<=$COMP_CWORD-1; ++i )) ; do + opt=${COMP_WORDS[$i]} + + case $opt in + --*) optBase=${opt/=*/} ;; + -*) optBase=${opt:0:2} ;; + esac + + cmdOpts=" $cmdOpts " + cmdOpts=${cmdOpts/ ${optBase} / } + + # take out alternatives + case $optBase in + -h) cmdOpts=${cmdOpts/ --help / } ;; + --help) cmdOpts=${cmdOpts/ -h / } ;; + esac + + # skip next option if this one requires a parameter + if [[ $opt == @($optsParam) ]] ; then + ((++i)) + fi + done + + COMPREPLY=( $( compgen -W "$cmdOpts" -- $cur ) ) + + return 0 +} +complete -F _svndumpfilter -o default svndumpfilter + +_svnlook () +{ + local cur cmds cmdOpts optsParam opt helpCmds optBase i + + COMPREPLY=() + cur=${COMP_WORDS[COMP_CWORD]} + + # Possible expansions, without pure-prefix abbreviations such as "h". + cmds='author cat changed date diff dirs-changed help history info \ + lock log propget proplist tree uuid youngest --version' + + if [[ $COMP_CWORD -eq 1 ]] ; then + COMPREPLY=( $( compgen -W "$cmds" -- $cur ) ) + return 0 + fi + + # options that require a parameter + # note: continued lines must end '|' continuing lines must start '|' + optsParam="-r|--revision|-t|--transaction|-l|--limit|-x|--extensions" + + # if not typing an option, or if the previous option required a + # parameter, then fallback on ordinary filename expansion + helpCmds='help|--help|h|\?' + if [[ ${COMP_WORDS[1]} != @($helpCmds) ]] && \ + [[ "$cur" != -* ]] || \ + [[ ${COMP_WORDS[COMP_CWORD-1]} == @($optsParam) ]] ; then + return 0 + fi + + cmdOpts= + case ${COMP_WORDS[1]} in + author) + cmdOpts="-r --revision -t --transaction" + ;; + cat) + cmdOpts="-r --revision -t --transaction" + ;; + changed) + cmdOpts="-r --revision -t --transaction --copy-info" + ;; + date) + cmdOpts="-r --revision -t --transaction" + ;; + diff) + cmdOpts="-r --revision -t --transaction --diff-copy-from \ + --no-diff-added --no-diff-deleted -x --extensions" + ;; + dirs-changed) + cmdOpts="-r --revision -t --transaction" + ;; + help|h|\?) + cmdOpts="$cmds" + ;; + history) + cmdOpts="-r --revision -l --limit --show-ids" + ;; + info) + cmdOpts="-r --revision -t --transaction" + ;; + lock) + cmdOpts= + ;; + log) + cmdOpts="-r --revision -t --transaction" + ;; + propget|pget|pg) + cmdOpts="-r --revision -t --transaction --revprop" + ;; + proplist|plist|pl) + cmdOpts="-r --revision -t --transaction --revprop -v --verbose --xml" + ;; + tree) + cmdOpts="-r --revision -t --transaction --full-paths -N --non-recursive --show-ids" + ;; + uuid) + cmdOpts= + ;; + youngest) + cmdOpts= + ;; + *) + ;; + esac + + cmdOpts="$cmdOpts --help -h" + + # take out options already given + for (( i=2; i<=$COMP_CWORD-1; ++i )) ; do + opt=${COMP_WORDS[$i]} + + case $opt in + --*) optBase=${opt/=*/} ;; + -*) optBase=${opt:0:2} ;; + esac + + cmdOpts=" $cmdOpts " + cmdOpts=${cmdOpts/ ${optBase} / } + + # take out alternatives + case $optBase in + -N) cmdOpts=${cmdOpts/ --non-recursive / } ;; + --non-recursive) cmdOpts=${cmdOpts/ -N / } ;; + -h) cmdOpts=${cmdOpts/ --help / } ;; + --help) cmdOpts=${cmdOpts/ -h / } ;; + -l) cmdOpts=${cmdOpts/ --limit / } ;; + --limit) cmdOpts=${cmdOpts/ -l / } ;; + -r) cmdOpts=${cmdOpts/ --revision / } ;; + --revision) cmdOpts=${cmdOpts/ -r / } ;; + -t) cmdOpts=${cmdOpts/ --transaction / } ;; + --transaction) cmdOpts=${cmdOpts/ -t / } ;; + -v) cmdOpts=${cmdOpts/ --verbose / } ;; + --verbose) cmdOpts=${cmdOpts/ -v / } ;; + -x) cmdOpts=${cmdOpts/ --extensions / } ;; + --extensions) cmdOpts=${cmdOpts/ -x / } ;; + esac + + # skip next option if this one requires a parameter + if [[ $opt == @($optsParam) ]] ; then + ((++i)) + fi + done + + COMPREPLY=( $( compgen -W "$cmdOpts" -- $cur ) ) + + return 0 +} +complete -F _svnlook -o default svnlook + +_svnsync () +{ + local cur cmds cmdOpts optsParam opt helpCmds optBase i + + COMPREPLY=() + cur=${COMP_WORDS[COMP_CWORD]} + + # Possible expansions, without pure-prefix abbreviations such as "h". + cmds='copy-revprops help info initialize synchronize --version' + + if [[ $COMP_CWORD -eq 1 ]] ; then + COMPREPLY=( $( compgen -W "$cmds" -- $cur ) ) + return 0 + fi + + # options that require a parameter + # note: continued lines must end '|' continuing lines must start '|' + optsParam="--config-dir|--config-option|--source-username|--source-password" + optsParam="$optsParam|--sync-username|--sync-password" + + # if not typing an option, or if the previous option required a + # parameter, then fallback on ordinary filename expansion + helpCmds='help|--help|h|\?' + if [[ ${COMP_WORDS[1]} != @($helpCmds) ]] && \ + [[ "$cur" != -* ]] || \ + [[ ${COMP_WORDS[COMP_CWORD-1]} == @($optsParam) ]] ; then + return 0 + fi + + cmdOpts= + case ${COMP_WORDS[1]} in + copy-revprops|initialize|init|synchronize|sync) + cmdOpts="--non-interactive --no-auth-cache --trust-server-cert \ + --source-username --source-password --sync-username \ + --sync-password --config-dir --config-option -q --quiet" + ;; + help|h|\?) + cmdOpts="$cmds" + ;; + info) + cmdOpts="--non-interactive --no-auth-cache --trust-server-cert \ + --source-username --source-password --sync-username \ + --sync-password --config-dir --config-option" + ;; + *) + ;; + esac + + cmdOpts="$cmdOpts --help -h" + + # take out options already given + for (( i=2; i<=$COMP_CWORD-1; ++i )) ; do + opt=${COMP_WORDS[$i]} + + case $opt in + --*) optBase=${opt/=*/} ;; + -*) optBase=${opt:0:2} ;; + esac + + cmdOpts=" $cmdOpts " + cmdOpts=${cmdOpts/ ${optBase} / } + + # take out alternatives + case $optBase in + -h) cmdOpts=${cmdOpts/ --help / } ;; + --help) cmdOpts=${cmdOpts/ -h / } ;; + -q) cmdOpts=${cmdOpts/ --quiet / } ;; + --quiet) cmdOpts=${cmdOpts/ -q / } ;; + esac + + # skip next option if this one requires a parameter + if [[ $opt == @($optsParam) ]] ; then + ((++i)) + fi + done + + COMPREPLY=( $( compgen -W "$cmdOpts" -- $cur ) ) + + return 0 +} +complete -F _svnsync -o default svnsync + +# reasonable completion for 'svnversion' +_svnversion () +{ + local cmdOpts=" -n --no-newline -c --committed -h --help --version " + local cur=${COMP_WORDS[COMP_CWORD]} + + COMPREPLY=() + + # parse current options + local options= wcpath= trailurl= last='none' stat= opt= i=-1 isCur= + for opt in ${COMP_WORDS[@]} + do + [[ $i -eq $COMP_CWORD ]] && stat=$last + let i++ + + # are we processing the current word? + isCur= + [[ $i -eq $COMP_CWORD ]] && isCur=1 + + # skip first command, should be 'svnversion' + if [ $last = 'none' ] ; then + last='first' + continue + fi + + # get options + if [[ $last != 'arg' && $opt == -* ]] + then + # if '--' is at the current position, it means that we are looking + # for '--*' options, and not the end of option processing. + if [[ $opt = '--' && ! $isCur ]] + then + last='arg' + else + options="$options $opt " + last='opt' + fi + continue + fi + # get arguments + if [[ $opt != -* ]] + then + last='arg' + if [[ ! $wcpath ]] + then + wcpath=$opt + elif [[ ! $trailurl ]] + then + trailurl=$opt + fi + fi + done + [[ $stat ]] || stat=$last + + # argument part + if [[ $cur != -* || $stat = 'arg' ]] + then + [[ $wcpath && $trailurl ]] && COMPREPLY=( '' ) + return 0 + fi + + # suggest options, and take out already given options + for opt in $options + do + # take out options + cmdOpts=${cmdOpts/ $opt / } + + # take out alternatives + case $opt in + -n) cmdOpts=${cmdOpts/ --no-newline / } ;; + --no-newline) cmdOpts=${cmdOpts/ -n / } ;; + -h) cmdOpts=${cmdOpts/ --help / } ;; + --help) cmdOpts=${cmdOpts/ -h / } ;; + -c) cmdOpts=${cmdOpts/ --committed / } ;; + --committed) cmdOpts=${cmdOpts/ -c / } ;; + esac + done + + COMPREPLY=( $( compgen -W "$cmdOpts" -- $cur ) ) + + return 0 +} +# -X option does not seem to work? +complete -F _svnversion -o dirnames -X '*.svn*' svnversion diff --git a/tools/client-side/bash_completion_test b/tools/client-side/bash_completion_test new file mode 100755 index 0000000..49e3532 --- /dev/null +++ b/tools/client-side/bash_completion_test @@ -0,0 +1,170 @@ +#!/bin/bash +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +# Checks that the "_svn" function defined in the specified "bash_completion" +# script produces appropriate lists of completions for various incomplete svn +# command lines. + +if [ ! -f "$1" ] || [ "$2" ]; then + echo "Usage: bash_completion_test BASH_COMPLETION_PATHNAME" + echo "Tests the specified \"bash_completion\" script," + echo "including checking it against the \"svn\" program found in the current PATH." + exit 1 +fi + +set -e # Exit on error +shopt -s extglob +export LC_ALL=C + +# Execute the script which is to be tested. +. "$1" + +# From the given incomplete svn command, print a space-separated list of +# possible completions of the last argument (or of an empty first argument +# if no subcommand is given). +# Usage: get_svn_completions [SVN-SUBCOMMAND [SVN-OPTION...]] +get_svn_completions() { + COMP_WORDS=(svn "$@") + if [ $# == 0 ]; then + COMP_CWORD=1 + else + COMP_CWORD=$# + fi + _svn + echo -n "${COMPREPLY[*]}" +} + +# Print a failure message, record the failure, and return "false". +# Usage: fail MESSAGE +fail() { + PREFIX="FAIL: " + for LINE in "$@"; do + echo "$PREFIX$LINE" + PREFIX=" " + done + TESTS_FAILED=1 + false +} + +# Check that EXPECTED-WORD is among the completions of the last word in +# SVN-COMMAND. SVN-COMMAND is a single argument to this function, split +# into multiple arguments when passed to "get_svn_completions()". +# Usage: includes SVN-COMMAND EXPECTED-WORD +includes() { + COMPLETIONS=`get_svn_completions $1` + if [[ "$2" != @(${COMPLETIONS// /|}) ]]; then + fail "completions of \"svn $1\" should include \"$2\"" \ + "(completions: $COMPLETIONS)" + fi +} + +excludes() { + COMPLETIONS=`get_svn_completions $1` + if [[ "$2" == @(${COMPLETIONS// /|}) ]]; then + fail "completions of \"svn $1\" should exclude \"$2\"" \ + "(completions: $COMPLETIONS)" + fi +} + +# Print the valid subcommands for "svn", one per line, sorted. +# Exclude any synonym that is just a truncation of its full name. +# Usage: get_svn_subcommands +get_svn_subcommands() { + svn help | + # Find the relevant lines. + sed -n -e '1,/^Available subcommands:$/d;/^$/q;p' | + # Remove brackets and commas + tr -d ' )' | tr '(,' ' ' | + # Remove simple abbreviations + ( while read SYNONYMS; do + for CMD in $SYNONYMS; do + if [ "$CMD" != "?" ]; then + for SYNONYM in $SYNONYMS; do + case $SYNONYM in + $CMD) ;; + $CMD*) CMD= ; break ;; + esac + done + if [ $CMD ]; then + echo $CMD + fi + fi + done + done + ) | + sort +} + +# Print the valid option switches for "svn SUBCMD", one per line, sorted. +# Usage: get_svn_options SUBCMD +get_svn_options() { + { svn help "$1" | + # Find the relevant lines; remove "arg" and description. + sed -n -e '1,/^Valid options:$/d;/^ -/!d' \ + -e 's/\( ARG\)* * : .*//;p' | + # Remove brackets; put each word on its own line. + tr -d '] ' | tr '[' '\n' + # The following options are always accepted but not listed in the help + echo "-h" + echo "--help" + } | sort + +} + + +# The tests. +set +e # Do not exit on error +TESTS_FAILED= + +echo "Checking general completion" +includes "he" "help" +includes "" "help" +includes "" "--version" + +echo "Checking list of subcommands" +HELP_SUBCMDS=`get_svn_subcommands | tr "\n" " "` +COMPLETION_SUBCMDS=`get_svn_completions | tr " " "\n" | grep -v "^-" | sort | tr "\n" " "` +if [ "$HELP_SUBCMDS" != "$COMPLETION_SUBCMDS" ]; then + fail "non-option completions for \"svn \" != subcommands accepted" \ + " (non-o. cmpl.: $COMPLETION_SUBCMDS)" \ + " (svn accepts: $HELP_SUBCMDS)" +fi + +echo "Checking list of options for each subcommand" +for SUBCMD in $HELP_SUBCMDS; do + HELP_OPTIONS=`get_svn_options $SUBCMD | tr "\n" " "` + COMPLETION_OPTIONS=`get_svn_completions $SUBCMD - | tr " " "\n" | sort | tr "\n" " "` + if [ "$HELP_OPTIONS" != "$COMPLETION_OPTIONS" ]; then + fail "completions for \"svn $SUBCMD -\" != options accepted" \ + " (completions: $COMPLETION_OPTIONS)" \ + " (svn accepts: $HELP_OPTIONS)" + fi +done + +echo "Checking rejection of synonyms" +excludes "diff -x -u -" "-x" +excludes "diff -x -u --e" "--extensions" +excludes "diff --extensions -u -" "--extensions" +excludes "diff --extensions -u -" "-x" +excludes "diff --extensions=-u -" "-x" + +if [ $TESTS_FAILED ]; then + echo "FAILURE: at least one bash_completion test failed." +else + echo "All bash_completion tests passed." +fi diff --git a/tools/client-side/change-svn-wc-format.py b/tools/client-side/change-svn-wc-format.py new file mode 100755 index 0000000..5338b0d --- /dev/null +++ b/tools/client-side/change-svn-wc-format.py @@ -0,0 +1,419 @@ +#!/usr/bin/env python +# +# change-svn-wc-format.py: Change the format of a Subversion working copy. +# +# ==================================================================== +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# ==================================================================== + +import sys +import os +import getopt +try: + my_getopt = getopt.gnu_getopt +except AttributeError: + my_getopt = getopt.getopt + +### The entries file parser in subversion/tests/cmdline/svntest/entry.py +### handles the XML-based WC entries file format used by Subversion +### 1.3 and lower. It could be rolled into this script. + +LATEST_FORMATS = { "1.4" : 8, + "1.5" : 9, + "1.6" : 10, + # Do NOT add format 11 here. See comment in must_retain_fields + # for why. + } + +def usage_and_exit(error_msg=None): + """Write usage information and exit. If ERROR_MSG is provide, that + error message is printed first (to stderr), the usage info goes to + stderr, and the script exits with a non-zero status. Otherwise, + usage info goes to stdout and the script exits with a zero status.""" + progname = os.path.basename(sys.argv[0]) + + stream = error_msg and sys.stderr or sys.stdout + if error_msg: + stream.write("ERROR: %s\n\n" % error_msg) + stream.write("""\ +usage: %s WC_PATH SVN_VERSION [--verbose] [--force] [--skip-unknown-format] + %s --help + +Change the format of a Subversion working copy to that of SVN_VERSION. + + --skip-unknown-format : skip directories with unknown working copy + format and continue the update + +""" % (progname, progname)) + stream.flush() + sys.exit(error_msg and 1 or 0) + +def get_adm_dir(): + """Return the name of Subversion's administrative directory, + adjusted for the SVN_ASP_DOT_NET_HACK environment variable. See + <http://svn.apache.org/repos/asf/subversion/trunk/notes/asp-dot-net-hack.txt> + for details.""" + return "SVN_ASP_DOT_NET_HACK" in os.environ and "_svn" or ".svn" + +class WCFormatConverter: + "Performs WC format conversions." + root_path = None + error_on_unrecognized = True + force = False + verbosity = 0 + + def write_dir_format(self, format_nbr, dirname, paths): + """Attempt to write the WC format FORMAT_NBR to the entries file + for DIRNAME. Throws LossyConversionException when not in --force + mode, and unconvertable WC data is encountered.""" + + # Avoid iterating in unversioned directories. + if not (get_adm_dir() in paths): + del paths[:] + return + + # Process the entries file for this versioned directory. + if self.verbosity: + print("Processing directory '%s'" % dirname) + entries = Entries(os.path.join(dirname, get_adm_dir(), "entries")) + entries_parsed = True + if self.verbosity: + print("Parsing file '%s'" % entries.path) + try: + entries.parse(self.verbosity) + except UnrecognizedWCFormatException, e: + if self.error_on_unrecognized: + raise + sys.stderr.write("%s, skipping\n" % e) + sys.stderr.flush() + entries_parsed = False + + if entries_parsed: + format = Format(os.path.join(dirname, get_adm_dir(), "format")) + if self.verbosity: + print("Updating file '%s'" % format.path) + format.write_format(format_nbr, self.verbosity) + else: + if self.verbosity: + print("Skipping file '%s'" % format.path) + + if self.verbosity: + print("Checking whether WC format can be converted") + try: + entries.assert_valid_format(format_nbr, self.verbosity) + except LossyConversionException, e: + # In --force mode, ignore complaints about lossy conversion. + if self.force: + print("WARNING: WC format conversion will be lossy. Dropping "\ + "field(s) %s " % ", ".join(e.lossy_fields)) + else: + raise + + if self.verbosity: + print("Writing WC format") + entries.write_format(format_nbr) + + def change_wc_format(self, format_nbr): + """Walk all paths in a WC tree, and change their format to + FORMAT_NBR. Throw LossyConversionException or NotImplementedError + if the WC format should not be converted, or is unrecognized.""" + for dirpath, dirs, files in os.walk(self.root_path): + self.write_dir_format(format_nbr, dirpath, dirs + files) + +class Entries: + """Represents a .svn/entries file. + + 'The entries file' section in subversion/libsvn_wc/README is a + useful reference.""" + + # The name and index of each field composing an entry's record. + entry_fields = ( + "name", + "kind", + "revision", + "url", + "repos", + "schedule", + "text-time", + "checksum", + "committed-date", + "committed-rev", + "last-author", + "has-props", + "has-prop-mods", + "cachable-props", + "present-props", + "conflict-old", + "conflict-new", + "conflict-wrk", + "prop-reject-file", + "copied", + "copyfrom-url", + "copyfrom-rev", + "deleted", + "absent", + "incomplete", + "uuid", + "lock-token", + "lock-owner", + "lock-comment", + "lock-creation-date", + "changelist", + "keep-local", + "working-size", + "depth", + "tree-conflicts", + "file-external", + ) + + # The format number. + format_nbr = -1 + + # How many bytes the format number takes in the file. (The format number + # may have leading zeroes after using this script to convert format 10 to + # format 9 -- which would write the format number as '09'.) + format_nbr_bytes = -1 + + def __init__(self, path): + self.path = path + self.entries = [] + + def parse(self, verbosity=0): + """Parse the entries file. Throw NotImplementedError if the WC + format is unrecognized.""" + + input = open(self.path, "r") + + # Read WC format number from INPUT. Validate that it + # is a supported format for conversion. + format_line = input.readline() + try: + self.format_nbr = int(format_line) + self.format_nbr_bytes = len(format_line.rstrip()) # remove '\n' + except ValueError: + self.format_nbr = -1 + self.format_nbr_bytes = -1 + if not self.format_nbr in LATEST_FORMATS.values(): + raise UnrecognizedWCFormatException(self.format_nbr, self.path) + + # Parse file into individual entries, to later inspect for + # non-convertable data. + entry = None + while True: + entry = self.parse_entry(input, verbosity) + if entry is None: + break + self.entries.append(entry) + + input.close() + + def assert_valid_format(self, format_nbr, verbosity=0): + if verbosity >= 2: + print("Validating format for entries file '%s'" % self.path) + for entry in self.entries: + if verbosity >= 3: + print("Validating format for entry '%s'" % entry.get_name()) + try: + entry.assert_valid_format(format_nbr) + except LossyConversionException: + if verbosity >= 3: + sys.stderr.write("Offending entry:\n%s\n" % entry) + sys.stderr.flush() + raise + + def parse_entry(self, input, verbosity=0): + "Read an individual entry from INPUT stream." + entry = None + + while True: + line = input.readline() + if line in ("", "\x0c\n"): + # EOF or end of entry terminator encountered. + break + + if entry is None: + entry = Entry() + + # Retain the field value, ditching its field terminator ("\x0a"). + entry.fields.append(line[:-1]) + + if entry is not None and verbosity >= 3: + sys.stdout.write(str(entry)) + print("-" * 76) + return entry + + def write_format(self, format_nbr): + # Overwrite all bytes of the format number (which are the first bytes in + # the file). Overwrite format '10' by format '09', which will be converted + # to '9' by Subversion when it rewrites the file. (Subversion 1.4 and later + # ignore leading zeroes in the format number.) + assert len(str(format_nbr)) <= self.format_nbr_bytes + format_string = '%0' + str(self.format_nbr_bytes) + 'd' + + os.chmod(self.path, 0600) + output = open(self.path, "r+", 0) + output.write(format_string % format_nbr) + output.close() + os.chmod(self.path, 0400) + +class Entry: + "Describes an entry in a WC." + + # Maps format numbers to indices of fields within an entry's record that must + # be retained when downgrading to that format. + must_retain_fields = { + # Not in 1.4: changelist, keep-local, depth, tree-conflicts, file-externals + 8 : (30, 31, 33, 34, 35), + # Not in 1.5: tree-conflicts, file-externals + 9 : (34, 35), + 10 : (), + # Downgrading from format 11 (1.7-dev) to format 10 is not possible, + # because 11 does not use has-props and cachable-props (but 10 does). + # Naively downgrading in that situation causes properties to disappear + # from the wc. + # + # Downgrading from the 1.7 SQLite-based format to format 10 is not + # implemented. + } + + def __init__(self): + self.fields = [] + + def assert_valid_format(self, format_nbr): + "Assure that conversion will be non-lossy by examining fields." + + # Check whether lossy conversion is being attempted. + lossy_fields = [] + for field_index in self.must_retain_fields[format_nbr]: + if len(self.fields) - 1 >= field_index and self.fields[field_index]: + lossy_fields.append(Entries.entry_fields[field_index]) + if lossy_fields: + raise LossyConversionException(lossy_fields, + "Lossy WC format conversion requested for entry '%s'\n" + "Data for the following field(s) is unsupported by older versions " + "of\nSubversion, and is likely to be subsequently discarded, and/or " + "have\nunexpected side-effects: %s\n\n" + "WC format conversion was cancelled, use the --force option to " + "override\nthe default behavior." + % (self.get_name(), ", ".join(lossy_fields))) + + def get_name(self): + "Return the name of this entry." + return len(self.fields) > 0 and self.fields[0] or "" + + def __str__(self): + "Return all fields from this entry as a multi-line string." + rep = "" + for i in range(0, len(self.fields)): + rep += "[%s] %s\n" % (Entries.entry_fields[i], self.fields[i]) + return rep + +class Format: + """Represents a .svn/format file.""" + + def __init__(self, path): + self.path = path + + def write_format(self, format_nbr, verbosity=0): + format_string = '%d\n' + if os.path.exists(self.path): + if verbosity >= 1: + print("%s will be updated." % self.path) + os.chmod(self.path,0600) + else: + if verbosity >= 1: + print("%s does not exist, creating it." % self.path) + format = open(self.path, "w") + format.write(format_string % format_nbr) + format.close() + os.chmod(self.path, 0400) + +class LocalException(Exception): + """Root of local exception class hierarchy.""" + pass + +class LossyConversionException(LocalException): + "Exception thrown when a lossy WC format conversion is requested." + def __init__(self, lossy_fields, str): + self.lossy_fields = lossy_fields + self.str = str + def __str__(self): + return self.str + +class UnrecognizedWCFormatException(LocalException): + def __init__(self, format, path): + self.format = format + self.path = path + def __str__(self): + return ("Unrecognized WC format %d in '%s'; " + "only formats 8, 9, and 10 can be supported") % (self.format, self.path) + + +def main(): + try: + opts, args = my_getopt(sys.argv[1:], "vh?", + ["debug", "force", "skip-unknown-format", + "verbose", "help"]) + except: + usage_and_exit("Unable to process arguments/options") + + converter = WCFormatConverter() + + # Process arguments. + if len(args) == 2: + converter.root_path = args[0] + svn_version = args[1] + else: + usage_and_exit() + + # Process options. + debug = False + for opt, value in opts: + if opt in ("--help", "-h", "-?"): + usage_and_exit() + elif opt == "--force": + converter.force = True + elif opt == "--skip-unknown-format": + converter.error_on_unrecognized = False + elif opt in ("--verbose", "-v"): + converter.verbosity += 1 + elif opt == "--debug": + debug = True + else: + usage_and_exit("Unknown option '%s'" % opt) + + try: + new_format_nbr = LATEST_FORMATS[svn_version] + except KeyError: + usage_and_exit("Unsupported version number '%s'; " + "only 1.4, 1.5, and 1.6 can be supported" % svn_version) + + try: + converter.change_wc_format(new_format_nbr) + except LocalException, e: + if debug: + raise + sys.stderr.write("%s\n" % e) + sys.stderr.flush() + sys.exit(1) + + print("Converted WC at '%s' into format %d for Subversion %s" % \ + (converter.root_path, new_format_nbr, svn_version)) + +if __name__ == "__main__": + main() diff --git a/tools/client-side/server-version.py b/tools/client-side/server-version.py new file mode 100755 index 0000000..e61a320 --- /dev/null +++ b/tools/client-side/server-version.py @@ -0,0 +1,92 @@ +#!/usr/bin/env python +# +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +# +# +# server-version.py: print a Subversion server's version number +# +# USAGE: server-version.py URL +# +# The URL can contain any path on the server, as we are simply looking +# for Apache's response to OPTIONS, and its Server: header. +# +# EXAMPLE: +# +# $ ./server-version.py http://svn.collab.net/ +# or +# $ ./server-version.py https://svn.collab.net/ +# + +import sys +try: + # Python >=3.0 + from http.client import HTTPConnection as http_client_HTTPConnection + from http.client import HTTPSConnection as http_client_HTTPSConnection + from urllib.parse import urlparse as urllib_parse_urlparse +except ImportError: + # Python <3.0 + from httplib import HTTPConnection as http_client_HTTPConnection + from httplib import HTTPSConnection as http_client_HTTPSConnection + from urlparse import urlparse as urllib_parse_urlparse + + +def print_version(url): + scheme, netloc, path, params, query, fragment = urllib_parse_urlparse(url) + if scheme == 'http': + conn = http_client_HTTPConnection(netloc) + elif scheme == 'https': + conn = http_client_HTTPSConnection(netloc) + else: + print('ERROR: this script only supports "http" and "https" URLs') + sys.exit(1) + conn.putrequest('OPTIONS', path) + conn.putheader('Host', netloc) + conn.endheaders() + resp = conn.getresponse() + status, msg, server = (resp.status, resp.msg, resp.getheader('Server')) + conn.close() + + # 1) Handle "OK" (200) + # 2) Handle redirect requests (302), if requested resource + # resides temporarily under a different URL + # 3) Handle authorization (401), if server requests for authorization + # ignore it, since we are interested in server version only + if status != 200 and status != 302 and status != 401: + print('ERROR: bad status response: %s %s' % (status, msg)) + sys.exit(1) + if not server: + # a missing Server: header. Bad, bad server! Go sit in the corner! + print('WARNING: missing header') + else: + for part in server.split(' '): + if part[:4] == 'SVN/': + print(part[4:]) + break + else: + # the server might be configured to hide this information, or it + # might not have mod_dav_svn loaded into it. + print('NOTICE: version unknown') + + +if __name__ == '__main__': + if len(sys.argv) != 2: + print('USAGE: %s URL' % sys.argv[0]) + sys.exit(1) + print_version(sys.argv[1]) diff --git a/tools/client-side/showchange.pl b/tools/client-side/showchange.pl new file mode 100755 index 0000000..e4cf7eb --- /dev/null +++ b/tools/client-side/showchange.pl @@ -0,0 +1,66 @@ +#!/usr/bin/perl -w +# ==================================================================== +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# ==================================================================== + +use strict; + +# ==================================================================== +# Show the log message and diff for a revision. +# +# $ showchange.pl REVISION [WC_PATH|URL] + + +if ((scalar(@ARGV) == 0) + or ($ARGV[0] eq '-?') + or ($ARGV[0] eq '-h') + or ($ARGV[0] eq '--help')) { + print <<EOF; +Show the log message and diff for a revision. +usage: $0 REVISION [WC_PATH|URL] +EOF + exit 0; +} + +my $revision = shift || die ("Revision argument required.\n"); +if ($revision =~ /r([0-9]+)/) { + $revision = $1; +} + +my $url = shift || ""; + +my $svn = "svn"; + +my $prev_revision = $revision - 1; + +if (not $url) { + # If no URL was provided, use the repository root from the current + # directory's working copy. We want the root, rather than the URL + # of the current dir, because when someone's asking for a change + # by name (that is, by revision number), they generally don't want + # to have to cd to a particular working copy directory to get it. + my @info_lines = `${svn} info`; + foreach my $info_line (@info_lines) { + if ($info_line =~ s/^Repository Root: (.*)$/$1/e) { + $url = $info_line; + } + } +} + +system ("${svn} log -v --incremental -r${revision} $url"); +system ("${svn} diff -r${prev_revision}:${revision} $url"); diff --git a/tools/client-side/svn-graph.pl b/tools/client-side/svn-graph.pl new file mode 100755 index 0000000..cd76d04 --- /dev/null +++ b/tools/client-side/svn-graph.pl @@ -0,0 +1,265 @@ +#!/usr/bin/perl -w +# vim:ts=2:sw=2:expandtab +# +# svn-graph.pl - produce a GraphViz .dot graph for the branch history +# of a node +# +# ==================================================================== +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# ==================================================================== +# +# View graphs using a command like: +# +# svn-graph.pl file:///tmp/repos | dotty - +# +# TODO: +# - Calculate the repository root at runtime so the user can pass +# the node of interest as a single URL. +# - (Also?) produce the graphical output ourselves (SVG?) instead +# of writing a GraphViz ".dot" data file. This can be done with +# GraphViz using 'dot'. +# - Display svnmerge.py/Subversion merge history. +# + +use strict; +use Getopt::Std; + +# Turn off output buffering +$|=1; + +require SVN::Core; +require SVN::Ra; +require SVN::Client; + +# The URL of the Subversion repository we wish to graph +# (e.g. "http://svn.apache.org/repos/asf/subversion"). +my $repos_url; + +# The revision range we operate on, from $startrev -> $youngest. +my $youngest; +my $startrev; + +# This is the node we're interested in +my $startpath; + +# Set the variables declared above. +parse_commandline(); + +# Point at the root of a repository so we get can look at +# every revision. +my $auth = (new SVN::Client())->auth; +my $ra = SVN::Ra->new(url => $repos_url, auth => $auth); + +# Handle identifier for the aboslutely youngest revision. +if ($youngest eq 'HEAD') +{ + $youngest = $ra->get_latest_revnum(); +} + +# The "interesting" nodes are potential sources for copies. This list +# grows as we move through time. +# The "tracking" nodes are the most recent revisions of paths we're +# following as we move through time. If we hit a delete of a path +# we remove it from the tracking array (i.e. we're no longer interested +# in it). +my %interesting = ("$startpath:$startrev", 1); +my %tracking = ("$startpath", $startrev); + +my %codeline_changes_forward = (); +my %codeline_changes_back = (); +my %copysource = (); +my %copydest = (); + +write_graph_descriptor(); +#print STDERR "\n"; + + + +# Validate the command-line arguments, and set the global variables +# $repos_url, $youngest, $startrev, and $startpath. +sub parse_commandline +{ + my %cmd_opts; + my $usage = " +usage: svn-graph.pl [-r START_REV:END_REV] [-p PATH] REPOS_URL + + -r the revision range (defaults to 0 through HEAD) + -p the repository-relative path (defaults to /trunk) + -h show this help information (other options will be ignored) +"; + + # Defaults. + $cmd_opts{'r'} = '1:HEAD'; + $cmd_opts{'p'} = '/trunk'; + + getopts('r:p:h', \%cmd_opts) or die $usage; + + die $usage if scalar(@ARGV) < 1; + $repos_url = $ARGV[0]; + + $cmd_opts{'r'} =~ m/(\d+)(:(.+))?/; + if ($3) + { + $youngest = ($3 eq 'HEAD' ? $3 : int($3)); + $startrev = int($1); + } + else + { + $youngest = ($3 eq 'HEAD' ? $1 : int($1)); + $startrev = 1; + } + + $startpath = $cmd_opts{'p'}; + + # Print help info (and exit nicely) if requested. + if ($cmd_opts{'h'}) + { + print($usage); + exit 0; + } +} + +# This function is a callback which is invoked for every revision as +# we traverse change log messages. +sub process_revision +{ + my $changed_paths = shift; + my $revision = shift || ''; + my $author = shift || ''; + my $date = shift || ''; + my $message = shift || ''; + my $pool = shift; + + #print STDERR "$revision\r"; + + foreach my $path (keys %$changed_paths) + { + my $copyfrom_path = $$changed_paths{$path}->copyfrom_path; + my $copyfrom_rev = undef; + my $action = $$changed_paths{$path}->action; + + # See if we're deleting one of our tracking nodes + if ($action eq 'D' and exists($tracking{$path})) + { + print "\t\"$path:$tracking{$path}\" "; + print "[label=\"$path:$tracking{$path}\\nDeleted in r$revision\",color=red];\n"; + delete($tracking{$path}); + next; + } + + ### TODO: Display a commit which was the result of a merge + ### operation with [sytle=dashed,color=blue] + + # If this is a copy, work out if it was from somewhere interesting + if (defined($copyfrom_path)) + { + $copyfrom_rev = $tracking{$copyfrom_path}; + } + if (defined($copyfrom_rev) && + exists($interesting{$copyfrom_path . ':' . $copyfrom_rev})) + { + $interesting{$path . ':' . $revision} = 1; + $tracking{$path} = $revision; + print "\t\"$copyfrom_path:$copyfrom_rev\" -> "; + print " \"$path:$revision\""; + print " [label=\"copy at r$revision\",color=green];\n"; + + $copysource{"$copyfrom_path:$copyfrom_rev"} = 1; + $copydest{"$path:$revision"} = 1; + } + + # For each change, we'll walk up the path one component at a time, + # updating any parents that we're tracking (i.e. a change to + # /trunk/asdf/foo updates /trunk). We mark that parent as + # interesting (a potential source for copies), draw a link, and + # update its tracking revision. + do + { + if (exists($tracking{$path}) && $tracking{$path} != $revision) + { + $codeline_changes_forward{"$path:$tracking{$path}"} = + "$path:$revision"; + $codeline_changes_back{"$path:$revision"} = + "$path:$tracking{$path}"; + $interesting{$path . ':' . $revision} = 1; + $tracking{$path} = $revision; + } + $path =~ s:/[^/]*$::; + } until ($path eq ''); + } +} + +# Write a descriptor for the graph in GraphViz .dot format to stdout. +sub write_graph_descriptor +{ + # Begin writing the graph descriptor. + print "digraph tree {\n"; + print "\tgraph [bgcolor=white];\n"; + print "\tnode [color=lightblue2, style=filled];\n"; + print "\tedge [color=black, labeljust=r];\n"; + print "\n"; + + # Retrieve the requested history. + $ra->get_log(['/'], $startrev, $youngest, 0, 1, 0, \&process_revision); + + # Now ensure that everything is linked. + foreach my $codeline_change (keys %codeline_changes_forward) + { + # If this node is not the first in its codeline chain, and it isn't + # the source of a copy, it won't be the source of an edge + if (exists($codeline_changes_back{$codeline_change}) && + !exists($copysource{$codeline_change})) + { + next; + } + + # If this node is the first in its chain, or the source of + # a copy, then we'll print it, and then find the next in + # the chain that needs to be printed too + if (!exists($codeline_changes_back{$codeline_change}) or + exists($copysource{$codeline_change}) ) + { + print "\t\"$codeline_change\" -> "; + my $nextchange = $codeline_changes_forward{$codeline_change}; + my $changecount = 1; + while (defined($nextchange)) + { + if (exists($copysource{$nextchange}) or + !exists($codeline_changes_forward{$nextchange}) ) + { + print "\"$nextchange\" [label=\"$changecount change"; + if ($changecount > 1) + { + print 's'; + } + print '"];'; + last; + } + $changecount++; + $nextchange = $codeline_changes_forward{$nextchange}; + } + print "\n"; + } + } + + # Complete the descriptor (delaying write of font size to avoid + # inheritance by any subgraphs). + #my $title = "Family Tree\n$startpath, $startrev through $youngest"; + #print "\tgraph [label=\"$title\", fontsize=18];\n"; + print "}\n"; +} diff --git a/tools/client-side/svn-ssl-fingerprints.sh b/tools/client-side/svn-ssl-fingerprints.sh new file mode 100755 index 0000000..6d1fd92 --- /dev/null +++ b/tools/client-side/svn-ssl-fingerprints.sh @@ -0,0 +1,33 @@ +#!/bin/sh +# +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +# +# $0 --- list the fingerprints of SSL certificates that svn has seen before. +# +# SYNOPSIS: +# $0 +# $0 /path/to/.subversion + +CONFIG_DIR=${1-$HOME/.subversion} +for i in $CONFIG_DIR/auth/svn.ssl.server/????????????????????????????????; do + grep :// $i + grep '.\{80\}' $i | sed 's/\(.\{64\}\)/\1\n/g' | openssl base64 -d | openssl x509 -inform der -noout -fingerprint | sed 's/=/\n/' + echo +done diff --git a/tools/client-side/svn-viewspec.py b/tools/client-side/svn-viewspec.py new file mode 100755 index 0000000..794460a --- /dev/null +++ b/tools/client-side/svn-viewspec.py @@ -0,0 +1,346 @@ +#!/usr/bin/env python +# +# ==================================================================== +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# ==================================================================== + +"""\ +Usage: 1. __SCRIPTNAME__ checkout VIEWSPEC-FILE TARGET-DIR + 2. __SCRIPTNAME__ examine VIEWSPEC-FILE + 3. __SCRIPTNAME__ help + 4. __SCRIPTNAME__ help-format + +VIEWSPEC-FILE is the path of a file whose contents describe a +Subversion sparse checkouts layout, or '-' if that description should +be read from stdin. TARGET-DIR is the working copy directory created +by this script as it checks out the specified layout. + +1. Parse VIEWSPEC-FILE and execute the necessary 'svn' command-line + operations to build out a working copy tree at TARGET-DIR. + +2. Parse VIEWSPEC-FILE and dump out a human-readable representation of + the tree described in the specification. + +3. Show this usage message. + +4. Show information about the file format this program expects. + +""" + +FORMAT_HELP = """\ +Viewspec File Format +==================== + +The viewspec file format used by this tool is a collection of headers +(using the typical one-per-line name:value syntax), followed by an +empty line, followed by a set of one-per-line rules. + +The headers must contain at least the following: + + Format - version of the viewspec format used throughout the file + Url - base URL applied to all rules; tree checkout location + +The following headers are optional: + + Revision - version of the tree items to checkout + +Following the headers and blank line separator are the path rules. +The rules are list of URLs -- relative to the base URL stated in the +headers -- with optional annotations to specify the desired working +copy depth of each item: + + PATH/** - checkout PATH and all its children to infinite depth + PATH/* - checkout PATH and its immediate children + PATH/~ - checkout PATH and its file children + PATH - checkout PATH non-recursively + +By default, the top-level directory (associated with the base URL) is +checked out with empty depth. You can override this using the special +rules '**', '*', and '~' as appropriate. + +It is not necessary to explicitly list the parent directories of each +path associated with a rule. If the parent directory of a given path +is not "covered" by a previous rule, it will be checked out with empty +depth. + +Examples +======== + +Here's a sample viewspec file: + + Format: 1 + Url: http://svn.apache.org/repos/asf/subversion + Revision: 36366 + + trunk/** + branches/1.5.x/** + branches/1.6.x/** + README + branches/1.4.x/STATUS + branches/1.4.x/subversion/tests/cmdline/~ + +You may wish to version your viewspec files. If so, you can use this +script in conjunction with 'svn cat' to fetch, parse, and act on a +versioned viewspec file: + + $ svn cat http://svn.example.com/specs/dev-spec.txt | + __SCRIPTNAME__ checkout - /path/to/target/directory + +""" + +######################################################################### +### Possible future improvements that could be made: +### +### - support for excluded paths (PATH!) +### - support for static revisions of individual paths (PATH@REV/**) +### + +import sys +import os +import urllib + +DEPTH_EMPTY = 'empty' +DEPTH_FILES = 'files' +DEPTH_IMMEDIATES = 'immediates' +DEPTH_INFINITY = 'infinity' + + +class TreeNode: + """A representation of a single node in a Subversion sparse + checkout tree.""" + + def __init__(self, name, depth): + self.name = name # the basename of this tree item + self.depth = depth # its depth (one of the DEPTH_* values) + self.children = {} # its children (basename -> TreeNode) + + def add_child(self, child_node): + child_name = child_node.name + assert not self.children.has_key(child_node) + self.children[child_name] = child_node + + def dump(self, recurse=False, indent=0): + sys.stderr.write(" " * indent) + sys.stderr.write("Path: %s (depth=%s)\n" % (self.name, self.depth)) + if recurse: + child_names = self.children.keys() + child_names.sort(svn_path_compare_paths) + for child_name in child_names: + self.children[child_name].dump(recurse, indent + 2) + +class SubversionViewspec: + """A representation of a Subversion sparse checkout specification.""" + + def __init__(self, base_url, revision, tree): + self.base_url = base_url # base URL of the checkout + self.revision = revision # revision of the checkout (-1 == HEAD) + self.tree = tree # the top-most TreeNode item + +def svn_path_compare_paths(path1, path2): + """Compare PATH1 and PATH2 as paths, sorting depth-first-ily. + + NOTE: Stolen unapologetically from Subversion's Python bindings + module svn.core.""" + + path1_len = len(path1); + path2_len = len(path2); + min_len = min(path1_len, path2_len) + i = 0 + + # Are the paths exactly the same? + if path1 == path2: + return 0 + + # Skip past common prefix + while (i < min_len) and (path1[i] == path2[i]): + i = i + 1 + + # Children of paths are greater than their parents, but less than + # greater siblings of their parents + char1 = '\0' + char2 = '\0' + if (i < path1_len): + char1 = path1[i] + if (i < path2_len): + char2 = path2[i] + + if (char1 == '/') and (i == path2_len): + return 1 + if (char2 == '/') and (i == path1_len): + return -1 + if (i < path1_len) and (char1 == '/'): + return -1 + if (i < path2_len) and (char2 == '/'): + return 1 + + # Common prefix was skipped above, next character is compared to + # determine order + return cmp(char1, char2) + +def parse_viewspec_headers(viewspec_fp): + """Parse the headers from the viewspec file, return them as a + dictionary mapping header names to values.""" + + headers = {} + while 1: + line = viewspec_fp.readline().strip() + if not line: + break + name, value = [x.strip() for x in line.split(':', 1)] + headers[name] = value + return headers + +def parse_viewspec(viewspec_fp): + """Parse the viewspec file, returning a SubversionViewspec object + that represents the specification.""" + + headers = parse_viewspec_headers(viewspec_fp) + format = headers['Format'] + assert format == '1' + base_url = headers['Url'] + revision = int(headers.get('Revision', -1)) + root_depth = DEPTH_EMPTY + rules = {} + while 1: + line = viewspec_fp.readline() + if not line: + break + line = line.rstrip() + + # These are special rules for the top-most dir; don't fall thru. + if line == '**': + root_depth = DEPTH_INFINITY + continue + elif line == '*': + root_depth = DEPTH_IMMEDIATES + continue + elif line == '~': + root_depth = DEPTH_FILES + continue + + # These are the regular per-path rules. + elif line[-3:] == '/**': + depth = DEPTH_INFINITY + path = line[:-3] + elif line[-2:] == '/*': + depth = DEPTH_IMMEDIATES + path = line[:-2] + elif line[-2:] == '/~': + depth = DEPTH_FILES + path = line[:-2] + else: + depth = DEPTH_EMPTY + path = line + + # Add our rule to the set thereof. + assert not rules.has_key(path) + rules[path] = depth + + tree = TreeNode('', root_depth) + paths = rules.keys() + paths.sort(svn_path_compare_paths) + for path in paths: + depth = rules[path] + path_parts = filter(None, path.split('/')) + tree_ptr = tree + for part in path_parts[:-1]: + child_node = tree_ptr.children.get(part, None) + if not child_node: + child_node = TreeNode(part, DEPTH_EMPTY) + tree_ptr.add_child(child_node) + tree_ptr = child_node + tree_ptr.add_child(TreeNode(path_parts[-1], depth)) + return SubversionViewspec(base_url, revision, tree) + +def checkout_tree(base_url, revision, tree_node, target_dir, is_top=True): + """Checkout from BASE_URL, and into TARGET_DIR, the TREE_NODE + sparse checkout item. IS_TOP is set iff this node represents the + root of the checkout tree. REVISION is the revision to checkout, + or -1 if checking out HEAD.""" + + depth = tree_node.depth + revision_str = '' + if revision != -1: + revision_str = "--revision=%d " % (revision) + if is_top: + os.system('svn checkout "%s" "%s" --depth=%s %s' + % (base_url, target_dir, depth, revision_str)) + else: + os.system('svn update "%s" --set-depth=%s %s' + % (target_dir, depth, revision_str)) + child_names = tree_node.children.keys() + child_names.sort(svn_path_compare_paths) + for child_name in child_names: + checkout_tree(base_url + '/' + child_name, + revision, + tree_node.children[child_name], + os.path.join(target_dir, urllib.unquote(child_name)), + False) + +def checkout_spec(viewspec, target_dir): + """Checkout the view specification VIEWSPEC into TARGET_DIR.""" + + checkout_tree(viewspec.base_url, + viewspec.revision, + viewspec.tree, + target_dir) + +def usage_and_exit(errmsg=None): + stream = errmsg and sys.stderr or sys.stdout + msg = __doc__.replace("__SCRIPTNAME__", os.path.basename(sys.argv[0])) + stream.write(msg) + if errmsg: + stream.write("ERROR: %s\n" % (errmsg)) + sys.exit(errmsg and 1 or 0) + +def main(): + argc = len(sys.argv) + if argc < 2: + usage_and_exit('Not enough arguments.') + subcommand = sys.argv[1] + if subcommand == 'help': + usage_and_exit() + elif subcommand == 'help-format': + msg = FORMAT_HELP.replace("__SCRIPTNAME__", + os.path.basename(sys.argv[0])) + sys.stdout.write(msg) + elif subcommand == 'examine': + if argc < 3: + usage_and_exit('No viewspec file specified.') + fp = (sys.argv[2] == '-') and sys.stdin or open(sys.argv[2], 'r') + viewspec = parse_viewspec(fp) + sys.stdout.write("Url: %s\n" % (viewspec.base_url)) + revision = viewspec.revision + if revision != -1: + sys.stdout.write("Revision: %s\n" % (revision)) + else: + sys.stdout.write("Revision: HEAD\n") + sys.stdout.write("\n") + viewspec.tree.dump(True) + elif subcommand == 'checkout': + if argc < 3: + usage_and_exit('No viewspec file specified.') + if argc < 4: + usage_and_exit('No target directory specified.') + fp = (sys.argv[2] == '-') and sys.stdin or open(sys.argv[2], 'r') + checkout_spec(parse_viewspec(fp), sys.argv[3]) + else: + usage_and_exit('Unknown subcommand "%s".' % (subcommand)) + +if __name__ == "__main__": + main() diff --git a/tools/client-side/svnmucc/svnmucc-test.py b/tools/client-side/svnmucc/svnmucc-test.py new file mode 100755 index 0000000..c09d15c --- /dev/null +++ b/tools/client-side/svnmucc/svnmucc-test.py @@ -0,0 +1,359 @@ +#!/usr/bin/env python +# +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +# + +# Usage: svnmucc-test.py [build-dir-top [base-url]] + +import sys +import os +import re +import shutil + +# calculate the absolute directory in which this test script lives +this_dir = os.path.dirname(os.path.abspath(sys.argv[0])) + +# add the Subversion Python test suite libraries to the path, and import +sys.path.insert(0, '%s/../../../subversion/tests/cmdline' % (this_dir)) +import svntest + +# setup the global 'svntest.main.options' object so functions in the +# module don't freak out. +svntest.main._parse_options(arglist=[]) + +# calculate the top of the build tree +if len(sys.argv) > 1: + build_top = os.path.abspath(sys.argv[1]) +else: + build_top = os.path.abspath('%s/../../../' % (this_dir)) + +# where lives svnmucc? +svnmucc_binary = \ + os.path.abspath('%s/tools/client-side/svnmucc/svnmucc' % (build_top)) + +# override some svntest binary locations +svntest.main.svn_binary = \ + os.path.abspath('%s/subversion/svn/svn' % (build_top)) +svntest.main.svnlook_binary = \ + os.path.abspath('%s/subversion/svnlook/svnlook' % (build_top)) +svntest.main.svnadmin_binary = \ + os.path.abspath('%s/subversion/svnadmin/svnadmin' % (build_top)) + +# where lives the test repository? +repos_path = \ + os.path.abspath(('%s/tools/client-side/svnmucc/svnmucc-test-repos' + % (build_top))) + +if (len(sys.argv) > 2): + repos_url = sys.argv[2] + '/svnmucc-test-repos' +else: + repos_url = 'file://' + repos_path + +def die(msg): + """Write MSG (formatted as a failure) to stderr, and exit with a + non-zero errorcode.""" + + sys.stderr.write("FAIL: " + msg + "\n") + sys.exit(1) + + +_svnmucc_re = re.compile('^(r[0-9]+) committed by svnmuccuser at (.*)$') +_log_re = re.compile('^ ([ADRM] /[^\(]+($| \(from .*:[0-9]+\)$))') +_err_re = re.compile('^svnmucc: (.*)$') + +def xrun_svnmucc(expected_errors, *varargs): + """Run svnmucc with the list of SVNMUCC_ARGS arguments. Verify that + its run results match the list of EXPECTED_ERRORS.""" + + # First, run svnmucc. + exit_code, outlines, errlines = \ + svntest.main.run_command(svnmucc_binary, 1, 0, + '-U', repos_url, + '-u', 'svnmuccuser', + '-p', 'svnmuccpass', + '--config-dir', 'dummy', + *varargs) + errors = [] + for line in errlines: + match = _err_re.match(line) + if match: + errors.append(line.rstrip('\n\r')) + if errors != expected_errors: + raise svntest.main.SVNUnmatchedError(str(errors)) + + +def run_svnmucc(expected_path_changes, *varargs): + """Run svnmucc with the list of SVNMUCC_ARGS arguments. Verify that + its run results in a new commit with 'svn log -rHEAD' changed paths + that match the list of EXPECTED_PATH_CHANGES.""" + + # First, run svnmucc. + exit_code, outlines, errlines = \ + svntest.main.run_command(svnmucc_binary, 1, 0, + '-U', repos_url, + '-u', 'svnmuccuser', + '-p', 'svnmuccpass', + '--config-dir', 'dummy', + *varargs) + if errlines: + raise svntest.main.SVNCommitFailure(str(errlines)) + if len(outlines) != 1 or not _svnmucc_re.match(outlines[0]): + raise svntest.main.SVNLineUnequal(str(outlines)) + + # Now, run 'svn log -vq -rHEAD' + changed_paths = [] + exit_code, outlines, errlines = \ + svntest.main.run_svn(None, 'log', '-vqrHEAD', repos_url) + if errlines: + raise svntest.Failure("Unable to verify commit with 'svn log': %s" + % (str(errlines))) + for line in outlines: + match = _log_re.match(line) + if match: + changed_paths.append(match.group(1).rstrip('\n\r')) + + expected_path_changes.sort() + changed_paths.sort() + if changed_paths != expected_path_changes: + raise svntest.Failure("Logged path changes differ from expectations\n" + " expected: %s\n" + " actual: %s" % (str(expected_path_changes), + str(changed_paths))) + + +def main(): + """Test svnmucc.""" + + # revision 1 + run_svnmucc(['A /foo' + ], # --------- + 'mkdir', 'foo') + + # revision 2 + run_svnmucc(['A /z.c', + ], # --------- + 'put', '/dev/null', 'z.c') + + # revision 3 + run_svnmucc(['A /foo/z.c (from /z.c:2)', + 'A /foo/bar (from /foo:2)', + ], # --------- + 'cp', '2', 'z.c', 'foo/z.c', + 'cp', '2', 'foo', 'foo/bar') + + # revision 4 + run_svnmucc(['A /zig (from /foo:3)', + 'D /zig/bar', + 'D /foo', + 'A /zig/zag (from /foo:3)', + ], # --------- + 'cp', '3', 'foo', 'zig', + 'rm', 'zig/bar', + 'mv', 'foo', 'zig/zag') + + # revision 5 + run_svnmucc(['D /z.c', + 'A /zig/zag/bar/y.c (from /z.c:4)', + 'A /zig/zag/bar/x.c (from /z.c:2)', + ], # --------- + 'mv', 'z.c', 'zig/zag/bar/y.c', + 'cp', '2', 'z.c', 'zig/zag/bar/x.c') + + # revision 6 + run_svnmucc(['D /zig/zag/bar/y.c', + 'A /zig/zag/bar/y y.c (from /zig/zag/bar/y.c:5)', + 'A /zig/zag/bar/y%20y.c (from /zig/zag/bar/y.c:5)', + ], # --------- + 'mv', 'zig/zag/bar/y.c', 'zig/zag/bar/y%20y.c', + 'cp', 'HEAD', 'zig/zag/bar/y.c', 'zig/zag/bar/y%2520y.c') + + # revision 7 + run_svnmucc(['D /zig/zag/bar/y y.c', + 'A /zig/zag/bar/z z1.c (from /zig/zag/bar/y y.c:6)', + 'A /zig/zag/bar/z%20z.c (from /zig/zag/bar/y%20y.c:6)', + 'A /zig/zag/bar/z z2.c (from /zig/zag/bar/y y.c:6)', + ], #--------- + 'mv', 'zig/zag/bar/y%20y.c', 'zig/zag/bar/z z1.c', + 'cp', 'HEAD', 'zig/zag/bar/y%2520y.c', 'zig/zag/bar/z%2520z.c', + 'cp', 'HEAD', 'zig/zag/bar/y y.c', 'zig/zag/bar/z z2.c') + + # revision 8 + run_svnmucc(['D /zig/zag', + 'A /zig/foo (from /zig/zag:7)', + 'D /zig/foo/bar/z%20z.c', + 'D /zig/foo/bar/z z2.c', + 'R /zig/foo/bar/z z1.c (from /zig/zag/bar/x.c:5)', + ], #--------- + 'mv', 'zig/zag', 'zig/foo', + 'rm', 'zig/foo/bar/z z1.c', + 'rm', 'zig/foo/bar/z%20z2.c', + 'rm', 'zig/foo/bar/z%2520z.c', + 'cp', '5', 'zig/zag/bar/x.c', 'zig/foo/bar/z%20z1.c') + + # revision 9 + run_svnmucc(['R /zig/foo/bar (from /zig/z.c:8)', + ], #--------- + 'rm', 'zig/foo/bar', + 'cp', '8', 'zig/z.c', 'zig/foo/bar') + + # revision 10 + run_svnmucc(['R /zig/foo/bar (from /zig/foo/bar:8)', + 'D /zig/foo/bar/z z1.c', + ], #--------- + 'rm', 'zig/foo/bar', + 'cp', '8', 'zig/foo/bar', 'zig/foo/bar', + 'rm', 'zig/foo/bar/z%20z1.c') + + # revision 11 + run_svnmucc(['R /zig/foo (from /zig/foo/bar:10)', + ], #--------- + 'rm', 'zig/foo', + 'cp', 'head', 'zig/foo/bar', 'zig/foo') + + # revision 12 + run_svnmucc(['D /zig', + 'A /foo (from /foo:3)', + 'A /foo/foo (from /foo:3)', + 'A /foo/foo/foo (from /foo:3)', + 'D /foo/foo/bar', + 'R /foo/foo/foo/bar (from /foo:3)', + ], #--------- + 'rm', 'zig', + 'cp', '3', 'foo', 'foo', + 'cp', '3', 'foo', 'foo/foo', + 'cp', '3', 'foo', 'foo/foo/foo', + 'rm', 'foo/foo/bar', + 'rm', 'foo/foo/foo/bar', + 'cp', '3', 'foo', 'foo/foo/foo/bar') + + # revision 13 + run_svnmucc(['A /boozle (from /foo:3)', + 'A /boozle/buz', + 'A /boozle/buz/nuz', + ], #--------- + 'cp', '3', 'foo', 'boozle', + 'mkdir', 'boozle/buz', + 'mkdir', 'boozle/buz/nuz') + + # revision 14 + run_svnmucc(['A /boozle/buz/svnmucc-test.py', + 'A /boozle/guz (from /boozle/buz:13)', + 'A /boozle/guz/svnmucc-test.py', + ], #--------- + 'put', '/dev/null', 'boozle/buz/svnmucc-test.py', + 'cp', '13', 'boozle/buz', 'boozle/guz', + 'put', '/dev/null', 'boozle/guz/svnmucc-test.py') + + # revision 15 + run_svnmucc(['M /boozle/buz/svnmucc-test.py', + 'R /boozle/guz/svnmucc-test.py', + ], #--------- + 'put', sys.argv[0], 'boozle/buz/svnmucc-test.py', + 'rm', 'boozle/guz/svnmucc-test.py', + 'put', sys.argv[0], 'boozle/guz/svnmucc-test.py') + + # revision 16 + run_svnmucc(['R /foo/bar (from /foo/foo:15)'], #--------- + 'rm', 'foo/bar', + 'cp', '15', 'foo/foo', 'foo/bar', + 'propset', 'testprop', 'true', 'foo/bar') + + # revision 17 + run_svnmucc(['M /foo/bar'], #--------- + 'propdel', 'testprop', 'foo/bar') + + # revision 18 + run_svnmucc(['M /foo/z.c', + 'M /foo/foo', + ], #--------- + 'propset', 'testprop', 'true', 'foo/z.c', + 'propset', 'testprop', 'true', 'foo/foo') + + # revision 19 + run_svnmucc(['M /foo/z.c', + 'M /foo/foo', + ], #--------- + 'propsetf', 'testprop', sys.argv[0], 'foo/z.c', + 'propsetf', 'testprop', sys.argv[0], 'foo/foo') + + # Expected missing revision error + xrun_svnmucc(["svnmucc: E200004: 'a' is not a revision" + ], #--------- + 'cp', 'a', 'b') + + # Expected cannot be younger error + xrun_svnmucc(['svnmucc: E205000: Copy source revision cannot be younger ' + + 'than base revision', + ], #--------- + 'cp', '42', 'a', 'b') + + # Expected already exists error + xrun_svnmucc(["svnmucc: E125002: 'foo' already exists", + ], #--------- + 'cp', '17', 'a', 'foo') + + # Expected copy_src already exists error + xrun_svnmucc(["svnmucc: E125002: 'a/bar' (from 'foo/bar:17') already exists", + ], #--------- + 'cp', '17', 'foo', 'a', + 'cp', '17', 'foo/foo', 'a/bar') + + # Expected not found error + xrun_svnmucc(["svnmucc: E125002: 'a' not found", + ], #--------- + 'cp', '17', 'a', 'b') + +if __name__ == "__main__": + try: + # remove any previously existing repository, then create a new one + if os.path.exists(repos_path): + shutil.rmtree(repos_path) + exit_code, outlines, errlines = \ + svntest.main.run_svnadmin('create', '--fs-type', + 'fsfs', repos_path) + if errlines: + raise svntest.main.SVNRepositoryCreateFailure(repos_path) + fp = open(os.path.join(repos_path, 'conf', 'svnserve.conf'), 'w') + fp.write('[general]\nauth-access = write\npassword-db = passwd\n') + fp.close() + fp = open(os.path.join(repos_path, 'conf', 'passwd'), 'w') + fp.write('[users]\nsvnmuccuser = svnmuccpass\n') + fp.close() + main() + except SystemExit, e: + raise + except svntest.main.SVNCommitFailure, e: + die("Error committing via svnmucc: %s" % (str(e))) + except svntest.main.SVNLineUnequal, e: + die("Unexpected svnmucc output line: %s" % (str(e))) + except svntest.main.SVNRepositoryCreateFailure, e: + die("Error creating test repository: %s" % (str(e))) + except svntest.Failure, e: + die("Test failed: %s" % (str(e))) + except Exception, e: + die("Something bad happened: %s" % (str(e))) + + # cleanup the repository on a successful run + try: + if os.path.exists(repos_path): + shutil.rmtree(repos_path) + except: + pass + print("SUCCESS!") diff --git a/tools/client-side/svnmucc/svnmucc.c b/tools/client-side/svnmucc/svnmucc.c new file mode 100644 index 0000000..b33d6a9 --- /dev/null +++ b/tools/client-side/svnmucc/svnmucc.c @@ -0,0 +1,1206 @@ +/* + * svnmucc.c: Subversion Multiple URL Client + * + * ==================================================================== + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + * ==================================================================== + * + */ + +/* Multiple URL Command Client + + Combine a list of mv, cp and rm commands on URLs into a single commit. + + How it works: the command line arguments are parsed into an array of + action structures. The action structures are interpreted to build a + tree of operation structures. The tree of operation structures is + used to drive an RA commit editor to produce a single commit. + + To build this client, type 'make svnmucc' from the root of your + Subversion source directory. +*/ + +#include <stdio.h> +#include <string.h> + +#include <apr_lib.h> + +#include "svn_client.h" +#include "svn_cmdline.h" +#include "svn_config.h" +#include "svn_error.h" +#include "svn_path.h" +#include "svn_pools.h" +#include "svn_props.h" +#include "svn_ra.h" +#include "svn_string.h" +#include "svn_subst.h" +#include "svn_utf.h" +#include "svn_version.h" +#include "private/svn_cmdline_private.h" + +static void handle_error(svn_error_t *err, apr_pool_t *pool) +{ + if (err) + svn_handle_error2(err, stderr, FALSE, "svnmucc: "); + svn_error_clear(err); + if (pool) + svn_pool_destroy(pool); + exit(EXIT_FAILURE); +} + +static apr_pool_t * +init(const char *application) +{ + apr_allocator_t *allocator; + apr_pool_t *pool; + svn_error_t *err; + const svn_version_checklist_t checklist[] = { + {"svn_client", svn_client_version}, + {"svn_subr", svn_subr_version}, + {"svn_ra", svn_ra_version}, + {NULL, NULL} + }; + + SVN_VERSION_DEFINE(my_version); + + if (svn_cmdline_init(application, stderr) + || apr_allocator_create(&allocator)) + exit(EXIT_FAILURE); + + err = svn_ver_check_list(&my_version, checklist); + if (err) + handle_error(err, NULL); + + apr_allocator_max_free_set(allocator, SVN_ALLOCATOR_RECOMMENDED_MAX_FREE); + pool = svn_pool_create_ex(NULL, allocator); + apr_allocator_owner_set(allocator, pool); + + return pool; +} + +static svn_error_t * +open_tmp_file(apr_file_t **fp, + void *callback_baton, + apr_pool_t *pool) +{ + /* Open a unique file; use APR_DELONCLOSE. */ + return svn_io_open_unique_file3(fp, NULL, NULL, svn_io_file_del_on_close, + pool, pool); +} + +static svn_error_t * +create_ra_callbacks(svn_ra_callbacks2_t **callbacks, + const char *username, + const char *password, + const char *config_dir, + svn_config_t *cfg_config, + svn_boolean_t non_interactive, + svn_boolean_t no_auth_cache, + apr_pool_t *pool) +{ + SVN_ERR(svn_ra_create_callbacks(callbacks, pool)); + + SVN_ERR(svn_cmdline_create_auth_baton(&(*callbacks)->auth_baton, + non_interactive, + username, password, config_dir, + no_auth_cache, + FALSE /* trust_server_certs */, + cfg_config, NULL, NULL, pool)); + + (*callbacks)->open_tmp_file = open_tmp_file; + + return SVN_NO_ERROR; +} + + + +static svn_error_t * +commit_callback(const svn_commit_info_t *commit_info, + void *baton, + apr_pool_t *pool) +{ + SVN_ERR(svn_cmdline_printf(pool, "r%ld committed by %s at %s\n", + commit_info->revision, + (commit_info->author + ? commit_info->author : "(no author)"), + commit_info->date)); + return SVN_NO_ERROR; +} + +typedef enum action_code_t { + ACTION_MV, + ACTION_MKDIR, + ACTION_CP, + ACTION_PROPSET, + ACTION_PROPSETF, + ACTION_PROPDEL, + ACTION_PUT, + ACTION_RM +} action_code_t; + +struct operation { + enum { + OP_OPEN, + OP_DELETE, + OP_ADD, + OP_REPLACE, + OP_PROPSET /* only for files for which no other operation is + occuring; directories are OP_OPEN with non-empty + props */ + } operation; + svn_node_kind_t kind; /* to copy, mkdir, put or set revprops */ + svn_revnum_t rev; /* to copy, valid for add and replace */ + const char *url; /* to copy, valid for add and replace */ + const char *src_file; /* for put, the source file for contents */ + apr_hash_t *children; /* const char *path -> struct operation * */ + apr_hash_t *prop_mods; /* const char *prop_name -> + const svn_string_t *prop_value */ + apr_array_header_t *prop_dels; /* const char *prop_name deletions */ + void *baton; /* as returned by the commit editor */ +}; + + +/* An iterator (for use via apr_table_do) which sets node properties. + REC is a pointer to a struct driver_state. */ +static svn_error_t * +change_props(const svn_delta_editor_t *editor, + void *baton, + struct operation *child, + apr_pool_t *pool) +{ + apr_pool_t *iterpool = svn_pool_create(pool); + + if (child->prop_dels) + { + int i; + for (i = 0; i < child->prop_dels->nelts; i++) + { + const char *prop_name; + + svn_pool_clear(iterpool); + prop_name = APR_ARRAY_IDX(child->prop_dels, i, const char *); + if (child->kind == svn_node_dir) + SVN_ERR(editor->change_dir_prop(baton, prop_name, + NULL, iterpool)); + else + SVN_ERR(editor->change_file_prop(baton, prop_name, + NULL, iterpool)); + } + } + if (apr_hash_count(child->prop_mods)) + { + apr_hash_index_t *hi; + for (hi = apr_hash_first(pool, child->prop_mods); + hi; hi = apr_hash_next(hi)) + { + const void *key; + void *val; + + svn_pool_clear(iterpool); + apr_hash_this(hi, &key, NULL, &val); + if (child->kind == svn_node_dir) + SVN_ERR(editor->change_dir_prop(baton, key, val, iterpool)); + else + SVN_ERR(editor->change_file_prop(baton, key, val, iterpool)); + } + } + + svn_pool_destroy(iterpool); + return SVN_NO_ERROR; +} + + +/* Drive EDITOR to affect the change represented by OPERATION. HEAD + is the last-known youngest revision in the repository. */ +static svn_error_t * +drive(struct operation *operation, + svn_revnum_t head, + const svn_delta_editor_t *editor, + apr_pool_t *pool) +{ + apr_pool_t *subpool = svn_pool_create(pool); + apr_hash_index_t *hi; + + for (hi = apr_hash_first(pool, operation->children); + hi; hi = apr_hash_next(hi)) + { + const void *key; + void *val; + struct operation *child; + void *file_baton = NULL; + + svn_pool_clear(subpool); + apr_hash_this(hi, &key, NULL, &val); + child = val; + + /* Deletes and replacements are simple -- delete something. */ + if (child->operation == OP_DELETE || child->operation == OP_REPLACE) + { + SVN_ERR(editor->delete_entry(key, head, operation->baton, subpool)); + } + /* Opens could be for directories or files. */ + if (child->operation == OP_OPEN || child->operation == OP_PROPSET) + { + if (child->kind == svn_node_dir) + { + SVN_ERR(editor->open_directory(key, operation->baton, head, + subpool, &child->baton)); + } + else + { + SVN_ERR(editor->open_file(key, operation->baton, head, + subpool, &file_baton)); + } + } + /* Adds and replacements could also be for directories or files. */ + if (child->operation == OP_ADD || child->operation == OP_REPLACE) + { + if (child->kind == svn_node_dir) + { + SVN_ERR(editor->add_directory(key, operation->baton, + child->url, child->rev, + subpool, &child->baton)); + } + else + { + SVN_ERR(editor->add_file(key, operation->baton, child->url, + child->rev, subpool, &file_baton)); + } + } + /* If there's a source file and an open file baton, we get to + change textual contents. */ + if ((child->src_file) && (file_baton)) + { + svn_txdelta_window_handler_t handler; + void *handler_baton; + svn_stream_t *contents; + apr_file_t *f = NULL; + + SVN_ERR(editor->apply_textdelta(file_baton, NULL, subpool, + &handler, &handler_baton)); + if (strcmp(child->src_file, "-")) + { + SVN_ERR(svn_io_file_open(&f, child->src_file, APR_READ, + APR_OS_DEFAULT, pool)); + } + else + { + apr_status_t apr_err = apr_file_open_stdin(&f, pool); + if (apr_err) + return svn_error_wrap_apr(apr_err, "Can't open stdin"); + } + contents = svn_stream_from_aprfile2(f, FALSE, pool); + SVN_ERR(svn_txdelta_send_stream(contents, handler, + handler_baton, NULL, pool)); + } + /* If we opened a file, we need to apply outstanding propmods, + then close it. */ + if (file_baton) + { + if (child->kind == svn_node_file) + { + SVN_ERR(change_props(editor, file_baton, child, subpool)); + } + SVN_ERR(editor->close_file(file_baton, NULL, subpool)); + } + /* If we opened, added, or replaced a directory, we need to + recurse, apply outstanding propmods, and then close it. */ + if ((child->kind == svn_node_dir) + && (child->operation == OP_OPEN + || child->operation == OP_ADD + || child->operation == OP_REPLACE)) + { + SVN_ERR(drive(child, head, editor, subpool)); + if (child->kind == svn_node_dir) + { + SVN_ERR(change_props(editor, child->baton, child, subpool)); + } + SVN_ERR(editor->close_directory(child->baton, subpool)); + } + } + svn_pool_destroy(subpool); + return SVN_NO_ERROR; +} + + +/* Find the operation associated with PATH, which is a single-path + component representing a child of the path represented by + OPERATION. If no such child operation exists, create a new one of + type OP_OPEN. */ +static struct operation * +get_operation(const char *path, + struct operation *operation, + apr_pool_t *pool) +{ + struct operation *child = apr_hash_get(operation->children, path, + APR_HASH_KEY_STRING); + if (! child) + { + child = apr_pcalloc(pool, sizeof(*child)); + child->children = apr_hash_make(pool); + child->operation = OP_OPEN; + child->rev = SVN_INVALID_REVNUM; + child->kind = svn_node_dir; + child->prop_mods = apr_hash_make(pool); + child->prop_dels = apr_array_make(pool, 1, sizeof(const char *)); + apr_hash_set(operation->children, path, APR_HASH_KEY_STRING, child); + } + return child; +} + +/* Return the portion of URL that is relative to ANCHOR (URI-decoded). */ +static const char * +subtract_anchor(const char *anchor, const char *url, apr_pool_t *pool) +{ + if (! strcmp(url, anchor)) + return ""; + else + return svn_uri__is_child(anchor, url, pool); +} + +/* Add PATH to the operations tree rooted at OPERATION, creating any + intermediate nodes that are required. Here's what's expected for + each action type: + + ACTION URL REV SRC-FILE PROPNAME + ------------ ----- ------- -------- -------- + ACTION_MKDIR NULL invalid NULL NULL + ACTION_CP valid valid NULL NULL + ACTION_PUT NULL invalid valid NULL + ACTION_RM NULL invalid NULL NULL + ACTION_PROPSET valid invalid NULL valid + ACTION_PROPDEL valid invalid NULL valid + + Node type information is obtained for any copy source (to determine + whether to create a file or directory) and for any deleted path (to + ensure it exists since svn_delta_editor_t->delete_entry doesn't + return an error on non-existent nodes). */ +static svn_error_t * +build(action_code_t action, + const char *path, + const char *url, + svn_revnum_t rev, + const char *prop_name, + const svn_string_t *prop_value, + const char *src_file, + svn_revnum_t head, + const char *anchor, + svn_ra_session_t *session, + struct operation *operation, + apr_pool_t *pool) +{ + apr_array_header_t *path_bits = svn_path_decompose(path, pool); + const char *path_so_far = ""; + const char *copy_src = NULL; + svn_revnum_t copy_rev = SVN_INVALID_REVNUM; + int i; + + /* Look for any previous operations we've recognized for PATH. If + any of PATH's ancestors have not yet been traversed, we'll be + creating OP_OPEN operations for them as we walk down PATH's path + components. */ + for (i = 0; i < path_bits->nelts; ++i) + { + const char *path_bit = APR_ARRAY_IDX(path_bits, i, const char *); + path_so_far = svn_relpath_join(path_so_far, path_bit, pool); + operation = get_operation(path_so_far, operation, pool); + + /* If we cross a replace- or add-with-history, remember the + source of those things in case we need to lookup the node kind + of one of their children. And if this isn't such a copy, + but we've already seen one in of our parent paths, we just need + to extend that copy source path by our current path + component. */ + if (operation->url + && SVN_IS_VALID_REVNUM(operation->rev) + && (operation->operation == OP_REPLACE + || operation->operation == OP_ADD)) + { + copy_src = subtract_anchor(anchor, operation->url, pool); + copy_rev = operation->rev; + } + else if (copy_src) + { + copy_src = svn_relpath_join(copy_src, path_bit, pool); + } + } + + /* Handle property changes. */ + if (prop_name) + { + if (operation->operation == OP_DELETE) + return svn_error_createf(SVN_ERR_BAD_URL, NULL, + "cannot set properties on a location being" + " deleted ('%s')", path); + /* If we're not adding this thing ourselves, check for existence. */ + if (! ((operation->operation == OP_ADD) || + (operation->operation == OP_REPLACE))) + { + SVN_ERR(svn_ra_check_path(session, + copy_src ? copy_src : path, + copy_src ? copy_rev : head, + &operation->kind, pool)); + if (operation->kind == svn_node_none) + return svn_error_createf(SVN_ERR_BAD_URL, NULL, + "propset: '%s' not found", path); + else if ((operation->kind == svn_node_file) + && (operation->operation == OP_OPEN)) + operation->operation = OP_PROPSET; + } + if (! prop_value) + APR_ARRAY_PUSH(operation->prop_dels, const char *) = prop_name; + else + apr_hash_set(operation->prop_mods, prop_name, + APR_HASH_KEY_STRING, prop_value); + if (!operation->rev) + operation->rev = rev; + return SVN_NO_ERROR; + } + + /* We won't fuss about multiple operations on the same path in the + following cases: + + - the prior operation was, in fact, a no-op (open) + - the prior operation was a propset placeholder + - the prior operation was a deletion + + Note: while the operation structure certainly supports the + ability to do a copy of a file followed by a put of new contents + for the file, we don't let that happen (yet). + */ + if (operation->operation != OP_OPEN + && operation->operation != OP_PROPSET + && operation->operation != OP_DELETE) + return svn_error_createf(SVN_ERR_BAD_URL, NULL, + "unsupported multiple operations on '%s'", path); + + /* For deletions, we validate that there's actually something to + delete. If this is a deletion of the child of a copied + directory, we need to remember to look in the copy source tree to + verify that this thing actually exists. */ + if (action == ACTION_RM) + { + operation->operation = OP_DELETE; + SVN_ERR(svn_ra_check_path(session, + copy_src ? copy_src : path, + copy_src ? copy_rev : head, + &operation->kind, pool)); + if (operation->kind == svn_node_none) + { + if (copy_src && strcmp(path, copy_src)) + return svn_error_createf(SVN_ERR_BAD_URL, NULL, + "'%s' (from '%s:%ld') not found", + path, copy_src, copy_rev); + else + return svn_error_createf(SVN_ERR_BAD_URL, NULL, "'%s' not found", + path); + } + } + /* Handle copy operations (which can be adds or replacements). */ + else if (action == ACTION_CP) + { + if (rev > head) + return svn_error_create(SVN_ERR_CL_ARG_PARSING_ERROR, NULL, + "Copy source revision cannot be younger " + "than base revision"); + operation->operation = + operation->operation == OP_DELETE ? OP_REPLACE : OP_ADD; + if (operation->operation == OP_ADD) + { + /* There is a bug in the current version of mod_dav_svn + which incorrectly replaces existing directories. + Therefore we need to check if the target exists + and raise an error here. */ + SVN_ERR(svn_ra_check_path(session, + copy_src ? copy_src : path, + copy_src ? copy_rev : head, + &operation->kind, pool)); + if (operation->kind != svn_node_none) + { + if (copy_src && strcmp(path, copy_src)) + return svn_error_createf(SVN_ERR_BAD_URL, NULL, + "'%s' (from '%s:%ld') already exists", + path, copy_src, copy_rev); + else + return svn_error_createf(SVN_ERR_BAD_URL, NULL, + "'%s' already exists", path); + } + } + SVN_ERR(svn_ra_check_path(session, subtract_anchor(anchor, url, pool), + rev, &operation->kind, pool)); + if (operation->kind == svn_node_none) + return svn_error_createf(SVN_ERR_BAD_URL, NULL, + "'%s' not found", + subtract_anchor(anchor, url, pool)); + operation->url = url; + operation->rev = rev; + } + /* Handle mkdir operations (which can be adds or replacements). */ + else if (action == ACTION_MKDIR) + { + operation->operation = + operation->operation == OP_DELETE ? OP_REPLACE : OP_ADD; + operation->kind = svn_node_dir; + } + /* Handle put operations (which can be adds, replacements, or opens). */ + else if (action == ACTION_PUT) + { + if (operation->operation == OP_DELETE) + { + operation->operation = OP_REPLACE; + } + else + { + SVN_ERR(svn_ra_check_path(session, + copy_src ? copy_src : path, + copy_src ? copy_rev : head, + &operation->kind, pool)); + if (operation->kind == svn_node_file) + operation->operation = OP_OPEN; + else if (operation->kind == svn_node_none) + operation->operation = OP_ADD; + else + return svn_error_createf(SVN_ERR_BAD_URL, NULL, + "'%s' is not a file", path); + } + operation->kind = svn_node_file; + operation->src_file = src_file; + } + else + { + /* We shouldn't get here. */ + SVN_ERR_MALFUNCTION(); + } + + return SVN_NO_ERROR; +} + +struct action { + action_code_t action; + + /* revision (copy-from-rev of path[0] for cp; base-rev for put) */ + svn_revnum_t rev; + + /* action path[0] path[1] + * ------ ------- ------- + * mv source target + * mkdir target (null) + * cp source target + * put target source + * rm target (null) + * propset target (null) + */ + const char *path[2]; + + /* property name/value */ + const char *prop_name; + const svn_string_t *prop_value; +}; + +static svn_error_t * +execute(const apr_array_header_t *actions, + const char *anchor, + apr_hash_t *revprops, + const char *username, + const char *password, + const char *config_dir, + const apr_array_header_t *config_options, + svn_boolean_t non_interactive, + svn_boolean_t no_auth_cache, + svn_revnum_t base_revision, + apr_pool_t *pool) +{ + svn_ra_session_t *session; + svn_revnum_t head; + const svn_delta_editor_t *editor; + svn_ra_callbacks2_t *ra_callbacks; + void *editor_baton; + struct operation root; + svn_error_t *err; + apr_hash_t *config; + svn_config_t *cfg_config; + int i; + + SVN_ERR(svn_config_get_config(&config, config_dir, pool)); + SVN_ERR(svn_cmdline__apply_config_options(config, config_options, + "svnmucc: ", "--config-option")); + cfg_config = apr_hash_get(config, SVN_CONFIG_CATEGORY_CONFIG, + APR_HASH_KEY_STRING); + SVN_ERR(create_ra_callbacks(&ra_callbacks, username, password, config_dir, + cfg_config, non_interactive, no_auth_cache, + pool)); + SVN_ERR(svn_ra_open4(&session, NULL, anchor, NULL, ra_callbacks, + NULL, config, pool)); + + SVN_ERR(svn_ra_get_latest_revnum(session, &head, pool)); + if (SVN_IS_VALID_REVNUM(base_revision)) + { + if (base_revision > head) + return svn_error_createf(SVN_ERR_FS_NO_SUCH_REVISION, NULL, + "No such revision %ld (youngest is %ld)", + base_revision, head); + head = base_revision; + } + + root.children = apr_hash_make(pool); + root.operation = OP_OPEN; + for (i = 0; i < actions->nelts; ++i) + { + struct action *action = APR_ARRAY_IDX(actions, i, struct action *); + switch (action->action) + { + const char *path1, *path2; + case ACTION_MV: + path1 = subtract_anchor(anchor, action->path[0], pool); + path2 = subtract_anchor(anchor, action->path[1], pool); + SVN_ERR(build(ACTION_RM, path1, NULL, + SVN_INVALID_REVNUM, NULL, NULL, NULL, head, anchor, + session, &root, pool)); + SVN_ERR(build(ACTION_CP, path2, action->path[0], + head, NULL, NULL, NULL, head, anchor, + session, &root, pool)); + break; + case ACTION_CP: + path2 = subtract_anchor(anchor, action->path[1], pool); + if (action->rev == SVN_INVALID_REVNUM) + action->rev = head; + SVN_ERR(build(ACTION_CP, path2, action->path[0], + action->rev, NULL, NULL, NULL, head, anchor, + session, &root, pool)); + break; + case ACTION_RM: + path1 = subtract_anchor(anchor, action->path[0], pool); + SVN_ERR(build(ACTION_RM, path1, NULL, + SVN_INVALID_REVNUM, NULL, NULL, NULL, head, anchor, + session, &root, pool)); + break; + case ACTION_MKDIR: + path1 = subtract_anchor(anchor, action->path[0], pool); + SVN_ERR(build(ACTION_MKDIR, path1, action->path[0], + SVN_INVALID_REVNUM, NULL, NULL, NULL, head, anchor, + session, &root, pool)); + break; + case ACTION_PUT: + path1 = subtract_anchor(anchor, action->path[0], pool); + SVN_ERR(build(ACTION_PUT, path1, action->path[0], + SVN_INVALID_REVNUM, NULL, NULL, action->path[1], + head, anchor, session, &root, pool)); + break; + case ACTION_PROPSET: + case ACTION_PROPDEL: + path1 = subtract_anchor(anchor, action->path[0], pool); + SVN_ERR(build(action->action, path1, action->path[0], + SVN_INVALID_REVNUM, + action->prop_name, action->prop_value, + NULL, head, anchor, session, &root, pool)); + break; + case ACTION_PROPSETF: + default: + SVN_ERR_MALFUNCTION_NO_RETURN(); + } + } + + SVN_ERR(svn_ra_get_commit_editor3(session, &editor, &editor_baton, revprops, + commit_callback, NULL, NULL, FALSE, pool)); + + SVN_ERR(editor->open_root(editor_baton, head, pool, &root.baton)); + err = drive(&root, head, editor, pool); + if (!err) + err = editor->close_edit(editor_baton, pool); + if (err) + svn_error_clear(editor->abort_edit(editor_baton, pool)); + + return err; +} + +static svn_error_t * +read_propvalue_file(const svn_string_t **value_p, + const char *filename, + apr_pool_t *pool) +{ + svn_stringbuf_t *value; + apr_pool_t *scratch_pool = svn_pool_create(pool); + apr_file_t *f; + + SVN_ERR(svn_io_file_open(&f, filename, APR_READ | APR_BINARY | APR_BUFFERED, + APR_OS_DEFAULT, scratch_pool)); + SVN_ERR(svn_stringbuf_from_aprfile(&value, f, scratch_pool)); + *value_p = svn_string_create_from_buf(value, pool); + svn_pool_destroy(scratch_pool); + return SVN_NO_ERROR; +} + +/* Perform the typical suite of manipulations for user-provided URLs + on URL, returning the result (allocated from POOL): IRI-to-URI + conversion, auto-escaping, and canonicalization. */ +static const char * +sanitize_url(const char *url, + apr_pool_t *pool) +{ + url = svn_path_uri_from_iri(url, pool); + url = svn_path_uri_autoescape(url, pool); + return svn_uri_canonicalize(url, pool); +} + +static void +usage(apr_pool_t *pool, int exit_val) +{ + FILE *stream = exit_val == EXIT_SUCCESS ? stdout : stderr; + const char msg[] = + "Multiple URL Command Client (for Subversion)\n" + "\nUsage: svnmucc [OPTION]... [ACTION]...\n" + "\nActions:\n" + " cp REV URL1 URL2 copy URL1@REV to URL2\n" + " mkdir URL create new directory URL\n" + " mv URL1 URL2 move URL1 to URL2\n" + " rm URL delete URL\n" + " put SRC-FILE URL add or modify file URL with contents copied from\n" + " SRC-FILE (use \"-\" to read from standard input)\n" + " propset NAME VAL URL set property NAME on URL to value VAL\n" + " propsetf NAME VAL URL set property NAME on URL to value from file VAL\n" + " propdel NAME URL delete property NAME from URL\n" + "\nOptions:\n" + " -h, --help display this text\n" + " -m, --message ARG use ARG as a log message\n" + " -F, --file ARG read log message from file ARG\n" + " -u, --username ARG commit the changes as username ARG\n" + " -p, --password ARG use ARG as the password\n" + " -U, --root-url ARG interpret all action URLs are relative to ARG\n" + " -r, --revision ARG use revision ARG as baseline for changes\n" + " --with-revprop A[=B] set revision property A in new revision to B\n" + " if specified, else to the empty string\n" + " -n, --non-interactive don't prompt the user about anything\n" + " -X, --extra-args ARG append arguments from file ARG (one per line;\n" + " use \"-\" to read from standard input)\n" + " --config-dir ARG use ARG to override the config directory\n" + " --config-option ARG use ARG so override a configuration option\n" + " --no-auth-cache do not cache authentication tokens\n" + " --version print version information\n"; + svn_error_clear(svn_cmdline_fputs(msg, stream, pool)); + apr_pool_destroy(pool); + exit(exit_val); +} + +static void +insufficient(apr_pool_t *pool) +{ + handle_error(svn_error_create(SVN_ERR_INCORRECT_PARAMS, NULL, + "insufficient arguments"), + pool); +} + +static svn_error_t * +display_version(apr_getopt_t *os, apr_pool_t *pool) +{ + const char *ra_desc_start + = "The following repository access (RA) modules are available:\n\n"; + svn_stringbuf_t *version_footer; + + version_footer = svn_stringbuf_create(ra_desc_start, pool); + SVN_ERR(svn_ra_print_modules(version_footer, pool)); + + SVN_ERR(svn_opt_print_help3(os, "svnmucc", TRUE, FALSE, version_footer->data, + NULL, NULL, NULL, NULL, NULL, pool)); + + return SVN_NO_ERROR; +} + +int +main(int argc, const char **argv) +{ + apr_pool_t *pool = init("svnmucc"); + apr_array_header_t *actions = apr_array_make(pool, 1, + sizeof(struct action *)); + const char *anchor = NULL; + svn_error_t *err = SVN_NO_ERROR; + apr_getopt_t *getopt; + enum { + config_dir_opt = SVN_OPT_FIRST_LONGOPT_ID, + config_inline_opt, + no_auth_cache_opt, + version_opt, + with_revprop_opt + }; + const apr_getopt_option_t options[] = { + {"message", 'm', 1, ""}, + {"file", 'F', 1, ""}, + {"username", 'u', 1, ""}, + {"password", 'p', 1, ""}, + {"root-url", 'U', 1, ""}, + {"revision", 'r', 1, ""}, + {"with-revprop", with_revprop_opt, 1, ""}, + {"extra-args", 'X', 1, ""}, + {"help", 'h', 0, ""}, + {"non-interactive", 'n', 0, ""}, + {"config-dir", config_dir_opt, 1, ""}, + {"config-option", config_inline_opt, 1, ""}, + {"no-auth-cache", no_auth_cache_opt, 0, ""}, + {"version", version_opt, 0, ""}, + {NULL, 0, 0, NULL} + }; + const char *message = NULL; + const char *username = NULL, *password = NULL; + const char *root_url = NULL, *extra_args_file = NULL; + const char *config_dir = NULL; + apr_array_header_t *config_options; + svn_boolean_t non_interactive = FALSE; + svn_boolean_t no_auth_cache = FALSE; + svn_revnum_t base_revision = SVN_INVALID_REVNUM; + apr_array_header_t *action_args; + apr_hash_t *revprops = apr_hash_make(pool); + int i; + + config_options = apr_array_make(pool, 0, + sizeof(svn_cmdline__config_argument_t*)); + + apr_getopt_init(&getopt, pool, argc, argv); + getopt->interleave = 1; + while (1) + { + int opt; + const char *arg; + const char *opt_arg; + + apr_status_t status = apr_getopt_long(getopt, options, &opt, &arg); + if (APR_STATUS_IS_EOF(status)) + break; + if (status != APR_SUCCESS) + handle_error(svn_error_wrap_apr(status, "getopt failure"), pool); + switch(opt) + { + case 'm': + err = svn_utf_cstring_to_utf8(&message, arg, pool); + if (err) + handle_error(err, pool); + break; + case 'F': + { + const char *arg_utf8; + svn_stringbuf_t *contents; + err = svn_utf_cstring_to_utf8(&arg_utf8, arg, pool); + if (! err) + err = svn_stringbuf_from_file2(&contents, arg, pool); + if (! err) + err = svn_utf_cstring_to_utf8(&message, contents->data, pool); + if (err) + handle_error(err, pool); + } + break; + case 'u': + username = apr_pstrdup(pool, arg); + break; + case 'p': + password = apr_pstrdup(pool, arg); + break; + case 'U': + err = svn_utf_cstring_to_utf8(&root_url, arg, pool); + if (err) + handle_error(err, pool); + if (! svn_path_is_url(root_url)) + handle_error(svn_error_createf(SVN_ERR_INCORRECT_PARAMS, NULL, + "'%s' is not a URL\n", root_url), + pool); + root_url = sanitize_url(root_url, pool); + break; + case 'r': + { + char *digits_end = NULL; + base_revision = strtol(arg, &digits_end, 10); + if ((! SVN_IS_VALID_REVNUM(base_revision)) + || (! digits_end) + || *digits_end) + handle_error(svn_error_create(SVN_ERR_CL_ARG_PARSING_ERROR, + NULL, "Invalid revision number"), + pool); + } + break; + case with_revprop_opt: + err = svn_opt_parse_revprop(&revprops, arg, pool); + if (err != SVN_NO_ERROR) + handle_error(err, pool); + break; + case 'X': + extra_args_file = apr_pstrdup(pool, arg); + break; + case 'n': + non_interactive = TRUE; + break; + case config_dir_opt: + err = svn_utf_cstring_to_utf8(&config_dir, arg, pool); + if (err) + handle_error(err, pool); + break; + case config_inline_opt: + err = svn_utf_cstring_to_utf8(&opt_arg, arg, pool); + if (err) + handle_error(err, pool); + + err = svn_cmdline__parse_config_option(config_options, opt_arg, + pool); + if (err) + handle_error(err, pool); + break; + case no_auth_cache_opt: + no_auth_cache = TRUE; + break; + case version_opt: + SVN_INT_ERR(display_version(getopt, pool)); + exit(EXIT_SUCCESS); + break; + case 'h': + usage(pool, EXIT_SUCCESS); + break; + } + } + + /* Copy the rest of our command-line arguments to an array, + UTF-8-ing them along the way. */ + action_args = apr_array_make(pool, getopt->argc, sizeof(const char *)); + while (getopt->ind < getopt->argc) + { + const char *arg = getopt->argv[getopt->ind++]; + if ((err = svn_utf_cstring_to_utf8(&(APR_ARRAY_PUSH(action_args, + const char *)), + arg, pool))) + handle_error(err, pool); + } + + /* If there are extra arguments in a supplementary file, tack those + on, too (again, in UTF8 form). */ + if (extra_args_file) + { + const char *extra_args_file_utf8; + svn_stringbuf_t *contents, *contents_utf8; + + err = svn_utf_cstring_to_utf8(&extra_args_file_utf8, + extra_args_file, pool); + if (! err) + err = svn_stringbuf_from_file2(&contents, extra_args_file_utf8, pool); + if (! err) + err = svn_utf_stringbuf_to_utf8(&contents_utf8, contents, pool); + if (err) + handle_error(err, pool); + svn_cstring_split_append(action_args, contents_utf8->data, "\n\r", + FALSE, pool); + } + + /* Now, we iterate over the combined set of arguments -- our actions. */ + for (i = 0; i < action_args->nelts; ) + { + int j, num_url_args; + const char *action_string = APR_ARRAY_IDX(action_args, i, const char *); + struct action *action = apr_palloc(pool, sizeof(*action)); + + /* First, parse the action. */ + if (! strcmp(action_string, "mv")) + action->action = ACTION_MV; + else if (! strcmp(action_string, "cp")) + action->action = ACTION_CP; + else if (! strcmp(action_string, "mkdir")) + action->action = ACTION_MKDIR; + else if (! strcmp(action_string, "rm")) + action->action = ACTION_RM; + else if (! strcmp(action_string, "put")) + action->action = ACTION_PUT; + else if (! strcmp(action_string, "propset")) + action->action = ACTION_PROPSET; + else if (! strcmp(action_string, "propsetf")) + action->action = ACTION_PROPSETF; + else if (! strcmp(action_string, "propdel")) + action->action = ACTION_PROPDEL; + else if (! strcmp(action_string, "?") || ! strcmp(action_string, "h") + || ! strcmp(action_string, "help")) + usage(pool, EXIT_SUCCESS); + else + handle_error(svn_error_createf(SVN_ERR_INCORRECT_PARAMS, NULL, + "'%s' is not an action\n", + action_string), pool); + if (++i == action_args->nelts) + insufficient(pool); + + /* For copies, there should be a revision number next. */ + if (action->action == ACTION_CP) + { + const char *rev_str = APR_ARRAY_IDX(action_args, i, const char *); + if (strcmp(rev_str, "head") == 0) + action->rev = SVN_INVALID_REVNUM; + else if (strcmp(rev_str, "HEAD") == 0) + action->rev = SVN_INVALID_REVNUM; + else + { + char *end; + + while (*rev_str == 'r') + ++rev_str; + + action->rev = strtol(rev_str, &end, 0); + if (*end) + handle_error(svn_error_createf(SVN_ERR_INCORRECT_PARAMS, NULL, + "'%s' is not a revision\n", + rev_str), pool); + } + if (++i == action_args->nelts) + insufficient(pool); + } + else + { + action->rev = SVN_INVALID_REVNUM; + } + + /* For puts, there should be a local file next. */ + if (action->action == ACTION_PUT) + { + action->path[1] = + svn_dirent_canonicalize(APR_ARRAY_IDX(action_args, i, + const char *), pool); + if (++i == action_args->nelts) + insufficient(pool); + } + + /* For propset, propsetf, and propdel, a property name (and + maybe a property value or file which contains one) comes next. */ + if ((action->action == ACTION_PROPSET) + || (action->action == ACTION_PROPSETF) + || (action->action == ACTION_PROPDEL)) + { + action->prop_name = APR_ARRAY_IDX(action_args, i, const char *); + if (++i == action_args->nelts) + insufficient(pool); + + if (action->action == ACTION_PROPDEL) + { + action->prop_value = NULL; + } + else if (action->action == ACTION_PROPSET) + { + action->prop_value = + svn_string_create(APR_ARRAY_IDX(action_args, i, + const char *), pool); + if (++i == action_args->nelts) + insufficient(pool); + } + else + { + const char *propval_file = + svn_dirent_canonicalize(APR_ARRAY_IDX(action_args, i, + const char *), pool); + + if (++i == action_args->nelts) + insufficient(pool); + + err = read_propvalue_file(&(action->prop_value), + propval_file, pool); + if (err) + handle_error(err, pool); + + action->action = ACTION_PROPSET; + } + + if (action->prop_value + && svn_prop_needs_translation(action->prop_name)) + { + svn_string_t *translated_value; + err = svn_subst_translate_string2(&translated_value, NULL, + NULL, action->prop_value, NULL, + FALSE, pool, pool); + if (err) + handle_error( + svn_error_quick_wrap(err, + "Error normalizing property value"), + pool); + action->prop_value = translated_value; + } + } + + /* How many URLs does this action expect? */ + if (action->action == ACTION_RM + || action->action == ACTION_MKDIR + || action->action == ACTION_PUT + || action->action == ACTION_PROPSET + || action->action == ACTION_PROPSETF /* shouldn't see this one */ + || action->action == ACTION_PROPDEL) + num_url_args = 1; + else + num_url_args = 2; + + /* Parse the required number of URLs. */ + for (j = 0; j < num_url_args; ++j) + { + const char *url = APR_ARRAY_IDX(action_args, i, const char *); + + /* If there's a ROOT_URL, we expect URL to be a path + relative to ROOT_URL (and we build a full url from the + combination of the two). Otherwise, it should be a full + url. */ + if (! svn_path_is_url(url)) + { + if (! root_url) + handle_error(svn_error_createf(SVN_ERR_INCORRECT_PARAMS, NULL, + "'%s' is not a URL, and " + "--root-url (-U) not provided\n", + url), pool); + /* ### These relpaths are already URI-encoded. */ + url = apr_pstrcat(pool, root_url, "/", + svn_relpath_canonicalize(url, pool), + (char *)NULL); + } + url = sanitize_url(url, pool); + action->path[j] = url; + + /* The cp source could be the anchor, but the other URLs should be + children of the anchor. */ + if (! (action->action == ACTION_CP && j == 0)) + url = svn_uri_dirname(url, pool); + if (! anchor) + anchor = url; + else + anchor = svn_uri_get_longest_ancestor(anchor, url, pool); + + if ((++i == action_args->nelts) && (j >= num_url_args)) + insufficient(pool); + } + APR_ARRAY_PUSH(actions, struct action *) = action; + } + + if (! actions->nelts) + usage(pool, EXIT_FAILURE); + + if (message == NULL) + { + if (apr_hash_get(revprops, SVN_PROP_REVISION_LOG, + APR_HASH_KEY_STRING) == NULL) + /* None of -F, -m, or --with-revprop=svn:log specified; default. */ + apr_hash_set(revprops, SVN_PROP_REVISION_LOG, APR_HASH_KEY_STRING, + svn_string_create("committed using svnmucc", pool)); + } + else + { + /* -F or -m specified; use that even if --with-revprop=svn:log. */ + apr_hash_set(revprops, SVN_PROP_REVISION_LOG, APR_HASH_KEY_STRING, + svn_string_create(message, pool)); + } + + if ((err = execute(actions, anchor, revprops, username, password, + config_dir, config_options, non_interactive, + no_auth_cache, base_revision, pool))) + handle_error(err, pool); + + svn_pool_destroy(pool); + return EXIT_SUCCESS; +} diff --git a/tools/client-side/wcfind b/tools/client-side/wcfind new file mode 100755 index 0000000..2aca83c --- /dev/null +++ b/tools/client-side/wcfind @@ -0,0 +1,81 @@ +#!/bin/sh + +# ------------------------------------------------------------ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# ------------------------------------------------------------ + +# Copyright 2007 Max Bowsher +# Licensed under the terms Subversion ships under + +# Runs the 'find' program, with arguments munged such that '.svn' or 'CVS' +# working copy administrative directories and their contents are ignored. + +optspaths= +expropts= +exproptarg= +exprmain= +depth= +somethingseen= +phase=optspaths +print=-print +for option in "$@"; do + if [ "$phase" = "optspaths" ]; then + case $option in + -H|-L|-P) ;; + -*|"("*|")"*|,*|!*) phase=exprmain ;; + *) ;; + esac + fi + if [ "$exproptarg" = "yes" ]; then + exproptarg= + phase=expropts + fi + if [ "$phase" = "exprmain" ]; then + case $option in + -depth|-d) depth=yes ;; + -delete|-exec|-exec|-execdir|-execdir|-fls|-fprint|-fprint0 \ + |-fprintf|-ok|-print|-okdir|-print0|-printf|-quit|-ls) print= ;; + esac + case $option in + -depth|-d|-noleaf|-mount|-xdev|-warn|-nowarn \ + |-ignore_readdir_race|-noignore_readdir_race) phase=expropts ;; + -maxdepth|-mindepth|-regextype) phase=expropts; exproptarg=yes ;; + -follow|-daystart) ;; + *) somethingseen=yes + esac + fi + eval "$phase=\"\$$phase \\\"$option\\\"\"" + if [ "$phase" = "expropts" ]; then + phase=exprmain + fi +done + +if [ -z "$somethingseen" ]; then + exprmain="$exprmain -print" + print= +fi + +if [ "$depth" = "yes" ]; then + eval find $optspaths $expropts \ + -regex \''.*/\.svn'\' -o -regex \''.*/\.svn/.*'\' \ + -o -regex \''.*/CVS'\' -o -regex \''.*/CVS/.*'\' \ + -o '\(' $exprmain '\)' $print +else + eval find $optspaths $expropts '\(' -name .svn -o -name CVS '\)' -prune \ + -o '\(' $exprmain '\)' $print +fi diff --git a/tools/dev/analyze-svnlogs.py b/tools/dev/analyze-svnlogs.py new file mode 100755 index 0000000..883b413 --- /dev/null +++ b/tools/dev/analyze-svnlogs.py @@ -0,0 +1,193 @@ +#!/usr/bin/env python +# +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +# +# +# Generate a report of each area each committer has touched over all time. +# +# $ svn log -v ^/ > svnlogdata +# $ ./analyze-svnlogs.py < svnlogdata > report.txt +# +# NOTE: ./logdata.py is written with a cached version of the data extracted +# from 'svnlogdata'. That data can be analyzed in many ways, beyond +# what this script is reporting. +# + +import sys +import re + + +RE_LOG_HEADER = re.compile('^(r[0-9]+) ' + '\| ([^|]+) ' + '\| ([^|]+) ' + '\| ([0-9]+) line') +RE_PATH = re.compile(r' [MARD] (.*?)( \(from .*\))?$') +SEPARATOR = '-' * 72 + + +def parse_one_commit(logfile): + line = logfile.readline().strip() + if line != SEPARATOR: + raise ParseError('missing separator: %s' % line) + + line = logfile.readline() + if not line: + # end of file! + return None, None + + m = RE_LOG_HEADER.match(line) + if not m: + raise ParseError('could not match log header') + revision = m.group(1) + author = m.group(2) + num_lines = int(m.group(4)) + paths = set() + + # skip "Changed paths:" + line = logfile.readline().strip() + if not line: + # there were no paths. just a blank before the log message. continue on. + sys.stderr.write('Funny revision: %s\n' % revision) + else: + if not line.startswith('Changed'): + raise ParseError('log not run with -v. paths missing in %s' % revision) + + # gather all the affected paths + while 1: + line = logfile.readline().rstrip() + if not line: + # just hit end of the changed paths + break + m = RE_PATH.match(line) + if not m: + raise ParseError('bad path in %s: %s' % (revision, line)) + paths.add(m.group(1)) + + # suck up the log message + for i in range(num_lines): + logfile.readline() + + return author, paths + + +def parse_file(logfile): + authors = { } + + while True: + author, paths = parse_one_commit(logfile) + if author is None: + return authors + + if author in authors: + authors[author] = authors[author].union(paths) + else: + authors[author] = paths + + +def write_logdata(authors): + out = open('logdata.py', 'w') + out.write('authors = {\n') + for author, paths in authors.items(): + out.write(" '%s': set([\n" % author) + for path in paths: + out.write(' %s,\n' % repr(path)) + out.write(' ]),\n') + out.write('}\n') + + +def get_key(sectionroots, path): + key = None + for section in sectionroots: + if path.startswith(section): + # add one path element below top section to the key. + elmts = len(section.split('/')) + 1 + # strip first element (always empty because path starts with '/') + key = tuple(path.split('/', elmts)[1:elmts]) + break + if key == None: + # strip first element (always empty because path starts with '/') + key = tuple(path.split('/', 3)[1:3]) + return key + + +def print_report(authors, sectionroots=[ ]): + for author, paths in sorted(authors.items()): + topdirs = { } + for path in paths: + key = get_key(sectionroots, path) + if key in topdirs: + topdirs[key] += 1 + else: + topdirs[key] = 1 + + print(author) + tags = [ ] + branches = [ ] + for topdir in sorted(topdirs): + if len(topdir) == 1: + assert topdirs[topdir] == 1 + print(' %s (ROOT)' % topdir[0]) + else: + if topdir[0] == 'tags': + if not topdir[1] in tags: + tags.append(topdir[1]) + elif topdir[0] == 'branches': + if not topdir[1] in branches: + branches.append(topdir[1]) + else: + print(' %s (%d items)' % ('/'.join(topdir), topdirs[topdir])) + if tags: + print(' TAGS: %s' % ', '.join(tags)) + if branches: + print(' BRANCHES: %s' % ', '.join(branches)) + + print('') + + +def run(logfile): + try: + import logdata + authors = logdata.authors + except ImportError: + authors = parse_file(logfile) + write_logdata(authors) + + sectionroots = [ + '/trunk/subversion/include/private', + '/trunk/subversion/include', + '/trunk/subversion/tests', + '/trunk/subversion', + '/trunk/tools', + '/trunk/contrib', + '/trunk/doc', + ]; + print_report(authors, sectionroots) + + +class ParseError(Exception): + pass + + +if __name__ == '__main__': + if len(sys.argv) > 1: + logfile = open(sys.argv[1]) + else: + logfile = sys.stdin + run(logfile) diff --git a/tools/dev/benchmarks/large_dirs/create_bigdir.sh b/tools/dev/benchmarks/large_dirs/create_bigdir.sh new file mode 100755 index 0000000..9193ee5 --- /dev/null +++ b/tools/dev/benchmarks/large_dirs/create_bigdir.sh @@ -0,0 +1,217 @@ +#!/bin/sh + +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +# usage: run this script from the root of your working copy +# and / or adjust the path settings below as needed + +# set SVNPATH to the 'subversion' folder of your SVN source code w/c + +SVNPATH="$('pwd')/subversion" + +# if using the installed svn, you may need to adapt the following. +# Uncomment the VALGRIND line to use that tool instead of "time". +# Comment the SVNSERVE line to use file:// instead of svn://. + +SVN=${SVNPATH}/svn/svn +SVNADMIN=${SVNPATH}/svnadmin/svnadmin +SVNSERVE=${SVNPATH}/svnserve/svnserve +# VALGRIND="valgrind --tool=callgrind" + +# set your data paths here + +WC=/dev/shm/wc +REPOROOT=/dev/shm + +# number of items per folder on the first run. It will be doubled +# after every iteration. The test will stop if MAXCOUNT has been +# reached or exceeded (and will not be executed for MAXCOUNT). + +FILECOUNT=1 +MAXCOUNT=20000 + +# only 1.7 supports server-side caching and uncompressed data transfer + +SERVEROPTS="-c 0 -M 400" + +# from here on, we should be good + +TIMEFORMAT='%3R %3U %3S' +REPONAME=dirs +PORT=54321 +if [ "${SVNSERVE}" != "" ] ; then + URL=svn://localhost:$PORT/$REPONAME +else + URL=file://${REPOROOT}/$REPONAME +fi + +# create repository + +rm -rf $WC $REPOROOT/$REPONAME +mkdir $REPOROOT/$REPONAME +${SVNADMIN} create $REPOROOT/$REPONAME +echo "[general] +anon-access = write" > $REPOROOT/$REPONAME/conf/svnserve.conf + +# fire up svnserve + +if [ "${SVNSERVE}" != "" ] ; then + VERSION=$( ${SVNSERVE} --version | grep " version" | sed 's/.*\ 1\.\([0-9]\).*/\1/' ) + if [ "$VERSION" -lt "7" ]; then + SERVEROPTS="" + fi + + ${SVNSERVE} -Tdr ${REPOROOT} ${SERVEROPTS} --listen-port ${PORT} --foreground & + PID=$! + sleep 1 +fi + +# construct valgrind parameters + +if [ "${VALGRIND}" != "" ] ; then + VG_TOOL=$( echo ${VALGRIND} | sed 's/.*\ --tool=\([a-z]*\).*/\1/' ) + VG_OUTFILE="--${VG_TOOL}-out-file" +fi + +# print header + +printf "using " +${SVN} --version | grep " version" +echo + +# init working copy + +rm -rf $WC +${SVN} co $URL $WC > /dev/null + +# helpers + +get_sequence() { + # three equivalents... + (jot - "$1" "$2" "1" 2>/dev/null || seq -s ' ' "$1" "$2" 2>/dev/null || python -c "for i in range($1,$2+1): print(i)") +} + +# functions that execute an SVN command + +run_svn() { + if [ "${VALGRIND}" = "" ] ; then + time ${SVN} $1 $WC/$2 $3 > /dev/null + else + ${VALGRIND} ${VG_OUTFILE}="${VG_TOOL}.out.$1.$2" ${SVN} $1 $WC/$2 $3 > /dev/null + fi +} + +run_svn_del() { + if [ "${VALGRIND}" = "" ] ; then + time ${SVN} del $WC/${1}_c/$2 -q > /dev/null + else + ${VALGRIND} ${VG_OUTFILE}="${VG_TOOL}.out.del.$1" ${SVN} del $WC/${1}_c/$2 -q > /dev/null + fi +} + +run_svn_ci() { + if [ "${VALGRIND}" = "" ] ; then + time ${SVN} ci $WC/$1 -m "" -q > /dev/null + else + ${VALGRIND} ${VG_OUTFILE}="${VG_TOOL}.out.ci_$2.$1" ${SVN} ci $WC/$1 -m "" -q > /dev/null + fi +} + +run_svn_cp() { + if [ "${VALGRIND}" = "" ] ; then + time ${SVN} cp $WC/$1 $WC/$2 > /dev/null + else + ${VALGRIND} ${VG_OUTFILE}="${VG_TOOL}.out.cp.$1" ${SVN} cp $WC/$1 $WC/$2 > /dev/null + fi +} + +run_svn_get() { + if [ "${VALGRIND}" = "" ] ; then + time ${SVN} $1 $URL $WC -q > /dev/null + else + ${VALGRIND} ${VG_OUTFILE}="${VG_TOOL}.out.$1.$2" ${SVN} $1 $URL $WC -q > /dev/null + fi +} + +# main loop + +while [ $FILECOUNT -lt $MAXCOUNT ]; do + echo "Processing $FILECOUNT files in the same folder" + + sequence=`get_sequence 2 $FILECOUNT` + printf "\tCreating files ... \t real user sys\n" + mkdir $WC/$FILECOUNT + for i in 1 $sequence; do + echo "File number $i" > $WC/$FILECOUNT/$i + done + + printf "\tAdding files ... \t" + run_svn add $FILECOUNT -q + + printf "\tRunning status ... \t" + run_svn st $FILECOUNT -q + + printf "\tCommit files ... \t" + run_svn_ci $FILECOUNT add + + printf "\tListing files ... \t" + run_svn ls $FILECOUNT + + printf "\tUpdating files ... \t" + run_svn up $FILECOUNT -q + + printf "\tLocal copy ... \t" + run_svn_cp $FILECOUNT ${FILECOUNT}_c + + printf "\tCommit copy ... \t" + run_svn_ci ${FILECOUNT}_c copy + + printf "\tDelete 1 file ... \t" + run_svn_del ${FILECOUNT} 1 + + printf "\tDeleting files ... \t" + time sh -c " + for i in $sequence; do + ${SVN} del $WC/${FILECOUNT}_c/\$i -q + done " + + printf "\tCommit deletions ...\t" + run_svn_ci ${FILECOUNT}_c del + + rm -rf $WC + + printf "\tExport all ... \t" + run_svn_get export $FILECOUNT + + rm -rf $WC + mkdir $WC + + printf "\tCheck out all ... \t" + run_svn_get co $FILECOUNT + + FILECOUNT=`echo 2 \* $FILECOUNT | bc` + echo "" +done + +# tear down + +if [ "${SVNSERVE}" != "" ] ; then + echo "killing svnserve ... " + kill $PID +fi + diff --git a/tools/dev/benchmarks/suite1/benchmark.py b/tools/dev/benchmarks/suite1/benchmark.py new file mode 100755 index 0000000..7eb3dd9 --- /dev/null +++ b/tools/dev/benchmarks/suite1/benchmark.py @@ -0,0 +1,638 @@ +#!/usr/bin/env python + +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +""" +usage: benchmark.py run <run_file> <levels> <spread> [N] + benchmark.py show <run_file> + benchmark.py compare <run_file1> <run_file2> + benchmark.py combine <new_file> <run_file1> <run_file2> ... + +Test data is written to run_file. +If a run_file exists, data is added to it. +<levels> is the number of directory levels to create +<spread> is the number of child trees spreading off each dir level +If <N> is provided, the run is repeated N times. +""" + +import os +import sys +import tempfile +import subprocess +import datetime +import random +import shutil +import cPickle +import optparse +import stat + +TOTAL_RUN = 'TOTAL RUN' + +timings = None + +def run_cmd(cmd, stdin=None, shell=False): + + if shell: + printable_cmd = 'CMD: ' + cmd + else: + printable_cmd = 'CMD: ' + ' '.join(cmd) + if options.verbose: + print printable_cmd + + if stdin: + stdin_arg = subprocess.PIPE + else: + stdin_arg = None + + p = subprocess.Popen(cmd, + stdin=stdin_arg, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + shell=shell) + stdout,stderr = p.communicate(input=stdin) + + if options.verbose: + if (stdout): + print "STDOUT: [[[\n%s]]]" % ''.join(stdout) + if (stderr): + print "STDERR: [[[\n%s]]]" % ''.join(stderr) + + return stdout,stderr + +def timedelta_to_seconds(td): + return ( float(td.seconds) + + float(td.microseconds) / (10**6) + + td.days * 24 * 60 * 60 ) + + +class Timings: + + def __init__(self, *ignore_svn_cmds): + self.timings = {} + self.current_name = None + self.tic_at = None + self.ignore = ignore_svn_cmds + self.name = None + + def tic(self, name): + if name in self.ignore: + return + self.toc() + self.current_name = name + self.tic_at = datetime.datetime.now() + + def toc(self): + if self.current_name and self.tic_at: + toc_at = datetime.datetime.now() + self.submit_timing(self.current_name, + timedelta_to_seconds(toc_at - self.tic_at)) + self.current_name = None + self.tic_at = None + + def submit_timing(self, name, seconds): + times = self.timings.get(name) + if not times: + times = [] + self.timings[name] = times + times.append(seconds) + + def min_max_avg(self, name): + ttimings = self.timings.get(name) + return ( min(ttimings), + max(ttimings), + reduce(lambda x,y: x + y, ttimings) / len(ttimings) ) + + def summary(self): + s = [] + if self.name: + s.append('Timings for %s' % self.name) + s.append(' N min max avg operation (unit is seconds)') + + names = sorted(self.timings.keys()) + + for name in names: + timings = self.timings.get(name) + if not name or not timings: continue + + tmin, tmax, tavg = self.min_max_avg(name) + + s.append('%5d %7.2f %7.2f %7.2f %s' % ( + len(timings), + tmin, + tmax, + tavg, + name)) + + return '\n'.join(s) + + + def compare_to(self, other): + def do_div(a, b): + if b: + return float(a) / float(b) + else: + return 0.0 + + def do_diff(a, b): + return float(a) - float(b) + + selfname = self.name + if not selfname: + selfname = 'unnamed' + othername = other.name + if not othername: + othername = 'the other' + + selftotal = self.min_max_avg(TOTAL_RUN)[2] + othertotal = other.min_max_avg(TOTAL_RUN)[2] + + s = ['COMPARE %s to %s' % (othername, selfname)] + + if TOTAL_RUN in self.timings and TOTAL_RUN in other.timings: + s.append(' %s times: %5.1f seconds avg for %s' % (TOTAL_RUN, + othertotal, othername)) + s.append(' %s %5.1f seconds avg for %s' % (' ' * len(TOTAL_RUN), + selftotal, selfname)) + + + s.append(' min max avg operation') + + names = sorted(self.timings.keys()) + + for name in names: + if not name in other.timings: + continue + + + min_me, max_me, avg_me = self.min_max_avg(name) + min_other, max_other, avg_other = other.min_max_avg(name) + + s.append('%-16s %-16s %-16s %s' % ( + '%7.2f|%+7.3f' % ( + do_div(min_me, min_other), + do_diff(min_me, min_other) + ), + + '%7.2f|%+7.3f' % ( + do_div(max_me, max_other), + do_diff(max_me, max_other) + ), + + '%7.2f|%+7.3f' % ( + do_div(avg_me, avg_other), + do_diff(avg_me, avg_other) + ), + + name)) + + s.extend([ + '("1.23|+0.45" means factor=1.23, difference in seconds = 0.45', + 'factor < 1 or difference < 0 means \'%s\' is faster than \'%s\')' + % (self.name, othername)]) + + return '\n'.join(s) + + + def add(self, other): + for name, other_times in other.timings.items(): + my_times = self.timings.get(name) + if not my_times: + my_times = [] + self.timings[name] = my_times + my_times.extend(other_times) + + + + +j = os.path.join + +_create_count = 0 + +def next_name(prefix): + global _create_count + _create_count += 1 + return '_'.join((prefix, str(_create_count))) + +def create_tree(in_dir, levels, spread=5): + try: + os.mkdir(in_dir) + except: + pass + + for i in range(spread): + # files + fn = j(in_dir, next_name('file')) + f = open(fn, 'w') + f.write('This is %s\n' % fn) + f.close() + + # dirs + if (levels > 1): + dn = j(in_dir, next_name('dir')) + create_tree(dn, levels - 1, spread) + + +def svn(*args): + name = args[0] + + ### options comes from the global namespace; it should be passed + cmd = [options.svn] + list(args) + if options.verbose: + print 'svn cmd:', ' '.join(cmd) + + stdin = None + if stdin: + stdin_arg = subprocess.PIPE + else: + stdin_arg = None + + ### timings comes from the global namespace; it should be passed + timings.tic(name) + try: + p = subprocess.Popen(cmd, + stdin=stdin_arg, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + shell=False) + stdout,stderr = p.communicate(input=stdin) + except OSError: + stdout = stderr = None + finally: + timings.toc() + + if options.verbose: + if (stdout): + print "STDOUT: [[[\n%s]]]" % ''.join(stdout) + if (stderr): + print "STDERR: [[[\n%s]]]" % ''.join(stderr) + + return stdout,stderr + + +def add(*args): + return svn('add', *args) + +def ci(*args): + return svn('commit', '-mm', *args) + +def up(*args): + return svn('update', *args) + +def st(*args): + return svn('status', *args) + +_chars = [chr(x) for x in range(ord('a'), ord('z') +1)] + +def randstr(len=8): + return ''.join( [random.choice(_chars) for i in range(len)] ) + +def _copy(path): + dest = next_name(path + '_copied') + svn('copy', path, dest) + +def _move(path): + dest = path + '_moved' + svn('move', path, dest) + +def _propmod(path): + so, se = svn('proplist', path) + propnames = [line.strip() for line in so.strip().split('\n')[1:]] + + # modify? + if len(propnames): + svn('ps', propnames[len(propnames) / 2], randstr(), path) + + # del? + if len(propnames) > 1: + svn('propdel', propnames[len(propnames) / 2], path) + + +def _propadd(path): + # set a new one. + svn('propset', randstr(), randstr(), path) + + +def _mod(path): + if os.path.isdir(path): + return _propmod(path) + + f = open(path, 'a') + f.write('\n%s\n' % randstr()) + f.close() + +def _add(path): + if os.path.isfile(path): + return _mod(path) + + if random.choice((True, False)): + # create a dir + svn('mkdir', j(path, next_name('new_dir'))) + else: + # create a file + new_path = j(path, next_name('new_file')) + f = open(new_path, 'w') + f.write(randstr()) + f.close() + svn('add', new_path) + +def _del(path): + svn('delete', path) + +_mod_funcs = (_mod, _add, _propmod, _propadd, )#_copy,) # _move, _del) + +def modify_tree(in_dir, fraction): + child_names = os.listdir(in_dir) + for child_name in child_names: + if child_name[0] == '.': + continue + if random.random() < fraction: + path = j(in_dir, child_name) + random.choice(_mod_funcs)(path) + + for child_name in child_names: + if child_name[0] == '.': continue + path = j(in_dir, child_name) + if os.path.isdir(path): + modify_tree(path, fraction) + +def propadd_tree(in_dir, fraction): + for child_name in os.listdir(in_dir): + if child_name[0] == '.': continue + path = j(in_dir, child_name) + if random.random() < fraction: + _propadd(path) + if os.path.isdir(path): + propadd_tree(path, fraction) + + +def rmtree_onerror(func, path, exc_info): + """Error handler for ``shutil.rmtree``. + + If the error is due to an access error (read only file) + it attempts to add write permission and then retries. + + If the error is for another reason it re-raises the error. + + Usage : ``shutil.rmtree(path, onerror=onerror)`` + """ + if not os.access(path, os.W_OK): + # Is the error an access error ? + os.chmod(path, stat.S_IWUSR) + func(path) + else: + raise + + +def run(levels, spread, N): + for i in range(N): + base = tempfile.mkdtemp() + + # ensure identical modifications for every run + random.seed(0) + + try: + repos = j(base, 'repos') + repos = repos.replace('\\', '/') + wc = j(base, 'wc') + wc2 = j(base, 'wc2') + + if repos.startswith('/'): + file_url = 'file://%s' % repos + else: + file_url = 'file:///%s' % repos + + so, se = svn('--version') + if not so: + print "Can't find svn." + exit(1) + version = ', '.join([s.strip() for s in so.split('\n')[:2]]) + + print '\nRunning svn benchmark in', base + print 'dir levels: %s; new files and dirs per leaf: %s; run %d of %d' %( + levels, spread, i + 1, N) + + print version + started = datetime.datetime.now() + + try: + run_cmd(['svnadmin', 'create', repos]) + svn('checkout', file_url, wc) + + trunk = j(wc, 'trunk') + create_tree(trunk, levels, spread) + add(trunk) + st(wc) + ci(wc) + up(wc) + propadd_tree(trunk, 0.5) + ci(wc) + up(wc) + st(wc) + + trunk_url = file_url + '/trunk' + branch_url = file_url + '/branch' + + svn('copy', '-mm', trunk_url, branch_url) + st(wc) + + up(wc) + st(wc) + + svn('checkout', trunk_url, wc2) + st(wc2) + modify_tree(wc2, 0.5) + st(wc2) + ci(wc2) + up(wc2) + up(wc) + + svn('switch', branch_url, wc2) + modify_tree(wc2, 0.5) + st(wc2) + ci(wc2) + up(wc2) + up(wc) + + modify_tree(trunk, 0.5) + st(wc) + ci(wc) + up(wc2) + up(wc) + + svn('merge', '--accept=postpone', trunk_url, wc2) + st(wc2) + svn('resolve', '--accept=mine-conflict', wc2) + st(wc2) + svn('resolved', '-R', wc2) + st(wc2) + ci(wc2) + up(wc2) + up(wc) + + svn('merge', '--accept=postpone', '--reintegrate', branch_url, trunk) + st(wc) + svn('resolve', '--accept=mine-conflict', wc) + st(wc) + svn('resolved', '-R', wc) + st(wc) + ci(wc) + up(wc2) + up(wc) + + svn('delete', j(wc, 'branch')) + ci(wc) + up(wc2) + up(wc) + + + finally: + stopped = datetime.datetime.now() + print '\nDone with svn benchmark in', (stopped - started) + + ### timings comes from the global namespace; it should be passed + timings.submit_timing(TOTAL_RUN, + timedelta_to_seconds(stopped - started)) + + # rename ps to prop mod + if timings.timings.get('ps'): + has = timings.timings.get('prop mod') + if not has: + has = [] + timings.timings['prop mod'] = has + has.extend( timings.timings['ps'] ) + del timings.timings['ps'] + + print timings.summary() + finally: + shutil.rmtree(base, onerror=rmtree_onerror) + + +def read_from_file(file_path): + f = open(file_path, 'rb') + try: + instance = cPickle.load(f) + instance.name = os.path.basename(file_path) + finally: + f.close() + return instance + + +def write_to_file(file_path, instance): + f = open(file_path, 'wb') + cPickle.dump(instance, f) + f.close() + +def cmd_compare(path1, path2): + t1 = read_from_file(path1) + t2 = read_from_file(path2) + + print t1.summary() + print '---' + print t2.summary() + print '---' + print t2.compare_to(t1) + +def cmd_combine(dest, *paths): + total = Timings('--version'); + + for path in paths: + t = read_from_file(path) + total.add(t) + + print total.summary() + write_to_file(dest, total) + +def cmd_run(timings_path, levels, spread, N=1): + levels = int(levels) + spread = int(spread) + N = int(N) + + print '\n\nHi, going to run a Subversion benchmark series of %d runs...' % N + + ### UGH! should pass to run() + global timings + + if os.path.isfile(timings_path): + print 'Going to add results to existing file', timings_path + timings = read_from_file(timings_path) + else: + print 'Going to write results to new file', timings_path + timings = Timings('--version') + + run(levels, spread, N) + + write_to_file(timings_path, timings) + +def cmd_show(*paths): + for timings_path in paths: + timings = read_from_file(timings_path) + print '---\n%s' % timings_path + print timings.summary() + + +def usage(): + print __doc__ + +if __name__ == '__main__': + parser = optparse.OptionParser() + # -h is automatically added. + ### should probably expand the help for that. and see about -? + parser.add_option('-v', '--verbose', action='store_true', dest='verbose', + help='Verbose operation') + parser.add_option('--svn', action='store', dest='svn', default='svn', + help='Specify Subversion executable to use') + + ### should start passing this, but for now: make it global + global options + + options, args = parser.parse_args() + + # there should be at least one arg left: the sub-command + if not args: + usage() + exit(1) + + cmd = args[0] + del args[0] + + if cmd == 'compare': + if len(args) != 2: + usage() + exit(1) + cmd_compare(*args) + + elif cmd == 'combine': + if len(args) < 3: + usage() + exit(1) + cmd_combine(*args) + + elif cmd == 'run': + if len(args) < 3 or len(args) > 4: + usage() + exit(1) + cmd_run(*args) + + elif cmd == 'show': + if not args: + usage() + exit(1) + cmd_show(*args) + + else: + usage() diff --git a/tools/dev/benchmarks/suite1/cronjob b/tools/dev/benchmarks/suite1/cronjob new file mode 100755 index 0000000..ca8b632 --- /dev/null +++ b/tools/dev/benchmarks/suite1/cronjob @@ -0,0 +1,78 @@ +#!/bin/bash +# This is the cronjob as run on our ASF box aka svn-qavm. +# It uses neels' mad bash script magic called 'pat' to update and +# build the latest trunk, invokes a benchmark and sends as mail. + +#EMAILS=your@email.addresses +EMAILS="" + +if [ "$USER" = "neels" ]; then + # I don't want to keep editing files after every update. ~Neels + EMAILS=dev@subversion.apache.org +fi + + +echo +echo "--------------------------------------------------------------------" +date +echo + +results="$(tempfile)" + + +# first update trunk to HEAD and rebuild. +# update/build is logged to the cronjob log (via stdout) + +cd /home/neels/pat/trunk +/home/neels/bin/pat update + +if [ "$?" -ne "0" ]; then + subject="Failed to update to HEAD." + echo "$subject" > "$results" + echo "$subject" +else + + rev="$(svn info /home/neels/pat/trunk/src | grep Revision)" + if [ -z "$rev" ]; then + subject="Working copy problem." + echo "$subject" > "$results" + echo "$subject" + else + + /home/neels/bin/pat remake + if [ "$?" -ne "0" ]; then + subject="Failed to build $rev." + echo "$subject" > "$results" + echo "$subject" + else + + + # updating and building succeeded! + # run the benchmark: + + compiled="$(/home/neels/pat/trunk/prefix/bin/svn --version | grep "compiled")" + subject="$rev$compiled" + + cd /home/neels/svnbench/ + + # make more or less sure that runs don't leak into each other via + # I/O caching. + sync + + # basically, just run it. But also, I want to + # - append output to stdout, for cronjob logging. + # - send output as mail, but only this run's output less update&build + "$(which time)" -p ./run 2>&1 | tee "$results" + + fi + fi +fi + +if [ -n "$EMAILS" ]; then + cat "$results" | mail -s "[svnbench] $subject" $EMAILS +else + echo "No email addresses configured." +fi + +rm "$results" + diff --git a/tools/dev/benchmarks/suite1/crontab.entry b/tools/dev/benchmarks/suite1/crontab.entry new file mode 100644 index 0000000..23f7aa4 --- /dev/null +++ b/tools/dev/benchmarks/suite1/crontab.entry @@ -0,0 +1,5 @@ +# This invokes the benchmarking cronjob as run on our ASF box aka svn-qavm +# (ask danielsh about the VM). +# m h dom mon dow command +21 0 * * Mon /home/neels/svnbench/cronjob >>/home/neels/cronjob.log 2>&1 + diff --git a/tools/dev/benchmarks/suite1/run b/tools/dev/benchmarks/suite1/run new file mode 100755 index 0000000..ce02fb3 --- /dev/null +++ b/tools/dev/benchmarks/suite1/run @@ -0,0 +1,98 @@ +#!/usr/bin/env bash + +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +# Where are the svn binaries you want to benchmark? +if [ "$USER" = "neels" ]; then + SVN_1_6="$HOME/pat/stable/prefix/bin/svn" + SVN_trunk="$HOME/pat/trunk/prefix/bin/svn" +else + SVN_1_6="$HOME/src/svn-1.6.x/subversion/svn/svn" + SVN_trunk="$HOME/src/svn/subversion/svn/svn" +fi + +benchmark="$PWD/benchmark.py" + +parent="$(date +"%Y%m%d-%H%M%S")" +inital_workdir="$PWD" +mkdir "$parent" +cd "$parent" +pwd + + +batch(){ + levels="$1" + spread="$2" + N="$3" + pre="${levels}x${spread}_" + echo + echo "---------------------------------------------------------------------" + echo + echo "Results for dir levels: $levels spread: $spread" + "$benchmark" "--svn=${SVN_1_6}" run ${pre}1.6 $levels $spread $N >/dev/null + "$benchmark" "--svn=${SVN_trunk}" run ${pre}trunk $levels $spread $N > /dev/null + "$benchmark" compare ${pre}1.6 ${pre}trunk +} + +N=6 +al=5 +as=5 +bl=100 +bs=1 +cl=1 +cs=100 + +##DEBUG +#N=1 +#al=1 +#as=1 +#bl=2 +#bs=1 +#cl=1 +#cs=2 +##DEBUG + + +{ +started="$(date)" +echo "Started at $started" +echo "" + +batch $al $as $N +batch $bl $bs $N +batch $cl $cs $N + +echo "" +echo ========================================================================= +echo "" +"$benchmark" combine total_1.6 *x*_1.6 >/dev/null +"$benchmark" combine total_trunk *x*_trunk >/dev/null + +echo "comparing averaged totals..." +"$benchmark" compare total_1.6 total_trunk + +echo "" +echo "Had started at $started," +echo " done at $(date)" +pwd +} 2>&1 | tee results.txt + +cd "$inital_workdir" +if [ -f "$parent/total_trunk" ]; then + rm -rf "$parent" +fi diff --git a/tools/dev/benchmarks/suite1/run.bat b/tools/dev/benchmarks/suite1/run.bat new file mode 100644 index 0000000..b2c71e7 --- /dev/null +++ b/tools/dev/benchmarks/suite1/run.bat @@ -0,0 +1,101 @@ +:: Licensed to the Apache Software Foundation (ASF) under one
+:: or more contributor license agreements. See the NOTICE file
+:: distributed with this work for additional information
+:: regarding copyright ownership. The ASF licenses this file
+:: to you under the Apache License, Version 2.0 (the
+:: "License"); you may not use this file except in compliance
+:: with the License. You may obtain a copy of the License at
+::
+:: http://www.apache.org/licenses/LICENSE-2.0
+::
+:: Unless required by applicable law or agreed to in writing,
+:: software distributed under the License is distributed on an
+:: "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+:: KIND, either express or implied. See the License for the
+:: specific language governing permissions and limitations
+:: under the License.
+
+@ECHO OFF
+SETLOCAL EnableDelayedExpansion
+
+:: Where are the svn binaries you want to benchmark?
+SET SVN_1_6=C:\path\to\1.6-svn\bin\svn
+SET SVN_trunk=C:\path\to\trunk-svn\bin\svn
+
+SET benchmark=%CD%\benchmark.py
+
+SET my_datetime=%date%-%time%
+SET my_datetime=%my_datetime: =_%
+SET my_datetime=%my_datetime:/=_%
+SET my_datetime=%my_datetime::=%
+SET my_datetime=%my_datetime:.=%
+SET my_datetime=%my_datetime:,=%
+SET parent=%my_datetime%
+SET inital_workdir=%CD%
+mkdir "%parent%"
+cd "%parent%"
+ECHO %CD%
+
+GOTO main
+
+:batch
+ SET levels=%1
+ SET spread=%2
+ SET N=%3
+ SET pre=%levels%x%spread%_
+ ECHO.
+ ECHO.---------------------------------------------------------------------
+ ECHO.
+ ECHO.Results for dir levels: %levels% spread: %spread%
+ CALL "%benchmark%" --svn="%SVN_1_6%" run %pre%1.6 %levels% %spread% %N% > NUL
+ CALL "%benchmark%" --svn="%SVN_trunk%" run %pre%trunk %levels% %spread% %N% > NUL
+ CALL "%benchmark%" compare %pre%1.6 %pre%trunk
+ GOTO :EOF
+
+:main
+SET N=6
+SET al=5
+SET as=5
+SET bl=25
+SET bs=1
+SET cl=1
+SET cs=100
+
+::::DEBUG
+::SET N=1
+::SET al=1
+::SET as=1
+::SET bl=2
+::SET bs=1
+::SET cl=1
+::SET cs=2
+::::DEBUG
+
+SET started=%date%-%time%
+ECHO.Started at %started%
+ECHO.
+
+CALL :batch %al% %as% %N%
+CALL :batch %bl% %bs% %N%
+CALL :batch %cl% %cs% %N%
+
+ECHO.
+ECHO.=========================================================================
+ECHO.
+FOR %%F IN (*x*_1.6) DO SET all_1.6=!all_1.6! %%F
+CALL "%benchmark%" combine total_1.6 %all_1.6% > NUL
+FOR %%F IN (*x*_trunk) DO SET all_trunk=!all_trunk! %%F
+CALL "%benchmark%" combine total_trunk %all_trunk% > NUL
+
+ECHO.comparing averaged totals..."
+CALL "%benchmark%" compare total_1.6 total_trunk
+
+ECHO.
+ECHO.Had started at %started%,
+ECHO. done at %date%-%time%
+ECHO %CD%
+
+cd "%inital_workdir%"
+IF EXIST %parent%\total_trunk rmdir /S /Q "%parent%"
+
+ENDLOCAL
diff --git a/tools/dev/check-license.py b/tools/dev/check-license.py new file mode 100755 index 0000000..37041be --- /dev/null +++ b/tools/dev/check-license.py @@ -0,0 +1,142 @@ +#!/usr/bin/env python +# +# check if a file has the proper license in it +# +# USAGE: check-license.py [-C] file1 file2 ... fileN +# +# A 'file' may in fact be a directory, in which case it is recursively +# searched. +# +# If the license cannot be found, then the filename is printed to stdout. +# Typical usage: +# $ check-license.py . > bad-files +# +# -C switch is used to change licenses. +# Typical usage: +# $ check-license.py -C file1 file2 ... fileN +# + +import sys, os, re + +# Note: Right now, OLD_LICENSE and NEW_LICENSE are the same, because +# r878444 updated all the license blocks. In the future, if we update +# the license block again, change just NEW_LICENSE and use this script. + +OLD_LICENSE = '''\ + \* ==================================================================== + \* Licensed to the Subversion Corporation \(SVN Corp\.\) under one + \* or more contributor license agreements\. See the NOTICE file + \* distributed with this work for additional information + \* regarding copyright ownership\. The SVN Corp\. licenses this file + \* to you under the Apache License, Version 2\.0 \(the + \* "License"\); you may not use this file except in compliance + \* with the License\. You may obtain a copy of the License at + \* + \* http://www\.apache\.org/licenses/LICENSE-2\.0 + \* + \* Unless required by applicable law or agreed to in writing, + \* software distributed under the License is distributed on an + \* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + \* KIND, either express or implied\. See the License for the + \* specific language governing permissions and limitations + \* under the License\. + \* ==================================================================== +''' + +SH_OLD_LICENSE = re.subn(r'(?m)^ \\\*', '#', OLD_LICENSE)[0] + +# Remember not to do regexp quoting for NEW_LICENSE. Only OLD_LICENSE +# is used for matching; NEW_LICENSE is inserted as-is. +NEW_LICENSE = '''\ + * ==================================================================== + * Licensed to the Subversion Corporation (SVN Corp.) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The SVN Corp. licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + * ==================================================================== +''' + +SH_NEW_LICENSE = re.subn(r'(?m)^ \*', '#', NEW_LICENSE)[0] + +re_OLD = re.compile(OLD_LICENSE) +re_SH_OLD = re.compile(SH_OLD_LICENSE) +re_EXCLUDE = re.compile( + r'automatically generated by SWIG' + + r'|Generated from configure\.in' + + r'|placed into the public domain' + ) + +c_comment_suffices = ('.c', '.java', '.h', '.cpp', '.hw', '.pas') + +# Yes, this is an empty tuple. No types that fit in this category uniformly +# have a copyright block. +# Possible types to add here: +# ('.bat', '.py', '.pl', '.in') +sh_comment_suffices = () + +def check_file(fname, old_re, new_lic): + s = open(fname).read() + if (not old_re.search(s) + and not re_EXCLUDE.search(s)): + print(fname) + +def change_license(fname, old_re, new_lic): + s = open(fname).read() + m = old_re.search(s) + if not m: + print('ERROR: missing old license: %s' % fname) + else: + s = s[:m.start()] + new_lic + s[m.end():] + open(fname, 'w').write(s) + print('Changed: %s' % fname) + +def visit(baton, dirname, dircontents): + file_func = baton + for i in dircontents: + # Don't recurse into certain directories + if i in ('.svn', '.libs'): + dircontents.remove(i) + continue + + extension = os.path.splitext(i)[1] + fullname = os.path.join(dirname, i) + + if os.path.isdir(fullname): + continue + + if extension in c_comment_suffices: + file_func(fullname, re_OLD, NEW_LICENSE) + elif extension in sh_comment_suffices: + file_func(fullname, re_SH_OLD, SH_NEW_LICENSE) + +def main(): + file_func = check_file + if sys.argv[1] == '-C': + print('Changing license text...') + del sys.argv[1] + file_func = change_license + + for f in sys.argv[1:]: + if os.path.isdir(f): + baton = file_func + for dirpath, dirs, files in os.walk(f): + visit(baton, dirpath, dirs + files) + else: + baton = file_func + dir, i = os.path.split(f) + visit(baton, dir, i) + +if __name__ == '__main__': + main() diff --git a/tools/dev/contribulyze.py b/tools/dev/contribulyze.py new file mode 100755 index 0000000..58123b0 --- /dev/null +++ b/tools/dev/contribulyze.py @@ -0,0 +1,762 @@ +#!/usr/bin/env python +# +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +# + +# See usage() for details, or run with --help option. +# +# .-------------------------------------------------. +# | "An ad hoc format deserves an ad hoc parser." | +# `-------------------------------------------------' +# +# Some Subversion project log messages include parseable data to help +# track who's contributing what. The exact syntax is described in +# http://subversion.apache.org/docs/community-guide/conventions.html#crediting, +# but here's an example, indented by three spaces, i.e., the "Patch by:" +# starts at the beginning of a line: +# +# Patch by: David Anderson <david.anderson@calixo.net> +# <justin@erenkrantz.com> +# me +# (I wrote the regression tests.) +# Found by: Phineas T. Phinder <phtph@ph1nderz.com> +# Suggested by: Snosbig Q. Ptermione <sqptermione@example.com> +# Review by: Justin Erenkrantz <justin@erenkrantz.com> +# rooneg +# (They caught an off-by-one error in the main loop.) +# +# This is a pathological example, but it shows all the things we might +# need to parse. We need to: +# +# - Detect the officially-approved "WORD by: " fields. +# - Grab every name (one per line) in each field. +# - Handle names in various formats, unifying where possible. +# - Expand "me" to the committer name for this revision. +# - Associate a parenthetical aside following a field with that field. +# +# NOTES: You might be wondering, why not take 'svn log --xml' input? +# Well, that would be the Right Thing to do, but in practice this was +# a lot easier to whip up for straight 'svn log' output. I'd have no +# objection to it being rewritten to take XML input. + +import os +import sys +import re +import getopt +try: + my_getopt = getopt.gnu_getopt +except AttributeError: + my_getopt = getopt.getopt +try: + # Python >=3.0 + from urllib.parse import quote as urllib_parse_quote +except ImportError: + # Python <3.0 + from urllib import quote as urllib_parse_quote + + +# Warnings and errors start with these strings. They are typically +# followed by a colon and a space, as in "%s: " ==> "WARNING: ". +warning_prefix = 'WARNING' +error_prefix = 'ERROR' + +def complain(msg, fatal=False): + """Print MSG as a warning, or if FATAL is true, print it as an error + and exit.""" + prefix = 'WARNING: ' + if fatal: + prefix = 'ERROR: ' + sys.stderr.write(prefix + msg + '\n') + if fatal: + sys.exit(1) + + +def html_spam_guard(addr, entities_only=False): + """Return a spam-protected version of email ADDR that renders the + same in HTML as the original address. If ENTITIES_ONLY, use a less + thorough mangling scheme involving entities only, avoiding the use + of tags.""" + if entities_only: + def mangle(x): + return "&#%d;" % ord (x) + else: + def mangle(x): + return "<span>&#%d;</span>" % ord(x) + return "".join(map(mangle, addr)) + + +def escape_html(str): + """Return an HTML-escaped version of STR.""" + return str.replace('&', '&').replace('<', '<').replace('>', '>') + + +_spam_guard_in_html_block_re = re.compile(r'<([^&]*@[^&]*)>') +def _spam_guard_in_html_block_func(m): + return "<%s>" % html_spam_guard(m.group(1)) +def spam_guard_in_html_block(str): + """Take a block of HTML data, and run html_spam_guard() on parts of it.""" + return _spam_guard_in_html_block_re.subn(_spam_guard_in_html_block_func, + str)[0] + +def html_header(title, page_heading=None, highlight_targets=False): + """Write HTML file header. TITLE and PAGE_HEADING parameters are + expected to already by HTML-escaped if needed. If HIGHLIGHT_TARGETS +is true, then write out a style header that causes anchor targets to be +surrounded by a red border when they are jumped to.""" + if not page_heading: + page_heading = title + s = '<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN"\n' + s += ' "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">\n' + s += '<html><head>\n' + s += '<meta http-equiv="Content-Type"' + s += ' content="text/html; charset=UTF-8" />\n' + if highlight_targets: + s += '<style type="text/css">\n' + s += ':target { border: 2px solid red; }\n' + s += '</style>\n' + s += '<title>%s</title>\n' % title + s += '</head>\n\n' + s += '<body style="text-color: black; background-color: white">\n\n' + s += '<h1 style="text-align: center">%s</h1>\n\n' % page_heading + s += '<hr />\n\n' + return s + + +def html_footer(): + return '\n</body>\n</html>\n' + + +class Contributor(object): + # Map contributor names to contributor instances, so that there + # exists exactly one instance associated with a given name. + # Fold names with email addresses. That is, if we see someone + # listed first with just an email address, but later with a real + # name and that same email address together, we create only one + # instance, and store it under both the email and the real name. + all_contributors = { } + + def __init__(self, username, real_name, email): + """Instantiate a contributor. Don't use this to generate a + Contributor for an external caller, though, use .get() instead.""" + self.real_name = real_name + self.username = username + self.email = email + self.is_committer = False # Assume not until hear otherwise. + self.is_full_committer = False # Assume not until hear otherwise. + # Map verbs (e.g., "Patch", "Suggested", "Review") to lists of + # LogMessage objects. For example, the log messages stored under + # "Patch" represent all the revisions for which this contributor + # contributed a patch. + self.activities = { } + + def add_activity(self, field_name, log): + """Record that this contributor was active in FIELD_NAME in LOG.""" + logs = self.activities.get(field_name) + if not logs: + logs = [ ] + self.activities[field_name] = logs + if not log in logs: + logs.append(log) + + @staticmethod + def get(username, real_name, email): + """If this contributor is already registered, just return it; + otherwise, register it then return it. Hint: use parse() to + generate the arguments.""" + c = None + for key in username, real_name, email: + if key and key in Contributor.all_contributors: + c = Contributor.all_contributors[key] + break + # If we didn't get a Contributor, create one now. + if not c: + c = Contributor(username, real_name, email) + # If we know identifying information that the Contributor lacks, + # then give it to the Contributor now. + if username: + if not c.username: + c.username = username + Contributor.all_contributors[username] = c + if real_name: + if not c.real_name: + c.real_name = real_name + Contributor.all_contributors[real_name] = c + if email: + if not c.email: + c.email = email + Contributor.all_contributors[email] = c + # This Contributor has never been in better shape; return it. + return c + + def score(self): + """Return a contribution score for this contributor.""" + # Right now we count a patch as 2, anything else as 1. + score = 0 + for activity in self.activities.keys(): + if activity == 'Patch': + score += len(self.activities[activity]) * 2 + else: + score += len(self.activities[activity]) + return score + + def score_str(self): + """Return a contribution score HTML string for this contributor.""" + patch_score = 0 + other_score = 0 + for activity in self.activities.keys(): + if activity == 'Patch': + patch_score += len(self.activities[activity]) + else: + other_score += len(self.activities[activity]) + if patch_score == 0: + patch_str = "" + elif patch_score == 1: + patch_str = "1 patch" + else: + patch_str = "%d patches" % patch_score + if other_score == 0: + other_str = "" + elif other_score == 1: + other_str = "1 non-patch" + else: + other_str = "%d non-patches" % other_score + if patch_str: + if other_str: + return ", ".join((patch_str, other_str)) + else: + return patch_str + else: + return other_str + + def __cmp__(self, other): + if self.is_full_committer and not other.is_full_committer: + return 1 + if other.is_full_committer and not self.is_full_committer: + return -1 + result = cmp(self.score(), other.score()) + if result == 0: + return cmp(self.big_name(), other.big_name()) + else: + return 0 - result + + @staticmethod + def parse(name): + """Parse NAME, which can be + + - A committer username, or + - A space-separated real name, or + - A space-separated real name followed by an email address in + angle brackets, or + - Just an email address in angle brackets. + + (The email address may have '@' disguised as '{_AT_}'.) + + Return a tuple of (committer_username, real_name, email_address) + any of which can be None if not available in NAME.""" + username = None + real_name = None + email = None + name_components = name.split() + if len(name_components) == 1: + name = name_components[0] # Effectively, name = name.strip() + if name[0] == '<' and name[-1] == '>': + email = name[1:-1] + elif name.find('@') != -1 or name.find('{_AT_}') != -1: + email = name + else: + username = name + elif name_components[-1][0] == '<' and name_components[-1][-1] == '>': + real_name = ' '.join(name_components[0:-1]) + email = name_components[-1][1:-1] + else: + real_name = ' '.join(name_components) + + if email is not None: + # We unobfuscate here and work with the '@' internally, since + # we'll obfuscate it again (differently) before writing it out. + email = email.replace('{_AT_}', '@') + + return username, real_name, email + + def canonical_name(self): + """Return a canonical name for this contributor. The canonical + name may or may not be based on the contributor's actual email + address. + + The canonical name will not contain filename-unsafe characters. + + This method is guaranteed to return the same canonical name every + time only if no further contributions are recorded from this + contributor after the first call. This is because a contribution + may bring a new form of the contributor's name, one which affects + the algorithm used to construct canonical names.""" + retval = None + if self.username: + retval = self.username + elif self.email: + # Take some rudimentary steps to shorten the email address, to + # make it more manageable. If this is ever discovered to result + # in collisions, we can always just use to the full address. + try: + at_posn = self.email.index('@') + first_dot_after_at = self.email.index('.', at_posn) + retval = self.email[0:first_dot_after_at] + except ValueError: + retval = self.email + elif self.real_name: + # Last resort: construct canonical name based on real name. + retval = ''.join(self.real_name.lower().split(' ')) + if retval is None: + complain('Unable to construct a canonical name for Contributor.', True) + return urllib_parse_quote(retval, safe="!#$&'()+,;<=>@[]^`{}~") + + def big_name(self, html=False, html_eo=False): + """Return as complete a name as possible for this contributor. + If HTML, then call html_spam_guard() on email addresses. + If HTML_EO, then do the same, but specifying entities_only mode.""" + html = html or html_eo + name_bits = [] + if self.real_name: + if html: + name_bits.append(escape_html(self.real_name)) + else: + name_bits.append(self.real_name) + if self.email: + if not self.real_name and not self.username: + name_bits.append(self.email) + elif html: + name_bits.append("<%s>" % html_spam_guard(self.email, html_eo)) + else: + name_bits.append("<%s>" % self.email) + if self.username: + if not self.real_name and not self.email: + name_bits.append(self.username) + else: + name_bits.append("(%s)" % self.username) + return " ".join(name_bits) + + def __str__(self): + s = 'CONTRIBUTOR: ' + s += self.big_name() + s += "\ncanonical name: '%s'" % self.canonical_name() + if len(self.activities) > 0: + s += '\n ' + for activity in self.activities.keys(): + val = self.activities[activity] + s += '[%s:' % activity + for log in val: + s += ' %s' % log.revision + s += ']' + return s + + def html_out(self, revision_url_pattern, filename): + """Create an HTML file named FILENAME, showing all the revisions in which + this contributor was active.""" + out = open(filename, 'w') + out.write(html_header(self.big_name(html_eo=True), + self.big_name(html=True), True)) + unique_logs = { } + + sorted_activities = sorted(self.activities.keys()) + + out.write('<div class="h2" id="activities" title="activities">\n\n') + out.write('<table border="1">\n') + out.write('<tr>\n') + for activity in sorted_activities: + out.write('<td>%s</td>\n\n' % activity) + out.write('</tr>\n') + out.write('<tr>\n') + for activity in sorted_activities: + out.write('<td>\n') + first_activity = True + for log in self.activities[activity]: + s = ',\n' + if first_activity: + s = '' + first_activity = False + out.write('%s<a href="#%s">%s</a>' % (s, log.revision, log.revision)) + unique_logs[log] = True + out.write('</td>\n') + out.write('</tr>\n') + out.write('</table>\n\n') + out.write('</div>\n\n') + + sorted_logs = sorted(unique_logs.keys()) + for log in sorted_logs: + out.write('<hr />\n') + out.write('<div class="h3" id="%s" title="%s">\n' % (log.revision, + log.revision)) + out.write('<pre>\n') + if revision_url_pattern: + revision_url = revision_url_pattern % log.revision[1:] + revision = '<a href="%s">%s</a>' \ + % (escape_html(revision_url), log.revision) + else: + revision = log.revision + out.write('<b>%s | %s | %s</b>\n\n' % (revision, + escape_html(log.committer), + escape_html(log.date))) + out.write(spam_guard_in_html_block(escape_html(log.message))) + out.write('</pre>\n') + out.write('</div>\n\n') + out.write('<hr />\n') + + out.write(html_footer()) + out.close() + + +class Field: + """One field in one log message.""" + def __init__(self, name, alias = None): + # The name of this field (e.g., "Patch", "Review", etc). + self.name = name + # An alias for the name of this field (e.g., "Reviewed"). + self.alias = alias + # A list of contributor objects, in the order in which they were + # encountered in the field. + self.contributors = [ ] + # Any parenthesized asides immediately following the field. The + # parentheses and trailing newline are left on. In theory, this + # supports concatenation of consecutive asides. In practice, the + # parser only detects the first one anyway, because additional + # ones are very uncommon and furthermore by that point one should + # probably be looking at the full log message. + self.addendum = '' + def add_contributor(self, contributor): + self.contributors.append(contributor) + def add_endum(self, addendum): + self.addendum += addendum + def __str__(self): + s = 'FIELD: %s (%d contributors)\n' % (self.name, len(self.contributors)) + for contributor in self.contributors: + s += str(contributor) + '\n' + s += self.addendum + return s + + +class LogMessage(object): + # Maps revision strings (e.g., "r12345") onto LogMessage instances, + # holding all the LogMessage instances ever created. + all_logs = { } + # Keep track of youngest rev. + max_revnum = 0 + def __init__(self, revision, committer, date): + """Instantiate a log message. All arguments are strings, + including REVISION, which should retain its leading 'r'.""" + self.revision = revision + self.committer = committer + self.date = date + self.message = '' + # Map field names (e.g., "Patch", "Review", "Suggested") onto + # Field objects. + self.fields = { } + if revision in LogMessage.all_logs: + complain("Revision '%s' seen more than once" % revision, True) + LogMessage.all_logs[revision] = self + rev_as_number = int(revision[1:]) + if rev_as_number > LogMessage.max_revnum: + LogMessage.max_revnum = rev_as_number + def add_field(self, field): + self.fields[field.name] = field + def accum(self, line): + """Accumulate one more line of raw message.""" + self.message += line + + def __cmp__(self, other): + """Compare two log messages by revision number, for sort(). + Return -1, 0 or 1 depending on whether a > b, a == b, or a < b. + Note that this is reversed from normal sorting behavior, but it's + what we want for reverse chronological ordering of revisions.""" + a = int(self.revision[1:]) + b = int(other.revision[1:]) + if a > b: return -1 + if a < b: return 1 + else: return 0 + + def __str__(self): + s = '=' * 15 + header = ' LOG: %s | %s ' % (self.revision, self.committer) + s += header + s += '=' * 15 + s += '\n' + for field_name in self.fields.keys(): + s += str(self.fields[field_name]) + '\n' + s += '-' * 15 + s += '-' * len(header) + s += '-' * 15 + s += '\n' + return s + + + +### Code to parse the logs. ## + +log_separator = '-' * 72 + '\n' +log_header_re = re.compile\ + ('^(r[0-9]+) \| ([^|]+) \| ([^|]+) \| ([0-9]+)[^0-9]') +field_re = re.compile('^(Patch|Review(ed)?|Suggested|Found|Inspired) by:\s*\S.*$') +field_aliases = { 'Reviewed' : 'Review' } +parenthetical_aside_re = re.compile('^\s*\(.*\)\s*$') + +def graze(input): + just_saw_separator = False + + while True: + line = input.readline() + if line == '': break + if line == log_separator: + if just_saw_separator: + sys.stderr.write('Two separators in a row.\n') + sys.exit(1) + else: + just_saw_separator = True + num_lines = None + continue + else: + if just_saw_separator: + m = log_header_re.match(line) + if not m: + sys.stderr.write('Could not match log message header.\n') + sys.stderr.write('Line was:\n') + sys.stderr.write("'%s'\n" % line) + sys.exit(1) + else: + log = LogMessage(m.group(1), m.group(2), m.group(3)) + num_lines = int(m.group(4)) + just_saw_separator = False + saw_patch = False + line = input.readline() + # Handle 'svn log -v' by waiting for the blank line. + while line != '\n': + line = input.readline() + # Parse the log message. + field = None + while num_lines > 0: + line = input.readline() + log.accum(line) + m = field_re.match(line) + if m: + # We're on the first line of a field. Parse the field. + while m: + if not field: + ident = m.group(1) + if ident in field_aliases: + field = Field(field_aliases[ident], ident) + else: + field = Field(ident) + # Each line begins either with "WORD by:", or with whitespace. + in_field_re = re.compile('^(' + + (field.alias or field.name) + + ' by:\s+|\s+)([^\s(].*)') + m = in_field_re.match(line) + if m is None: + sys.stderr.write("Error matching: %s\n" % (line)) + user, real, email = Contributor.parse(m.group(2)) + if user == 'me': + user = log.committer + c = Contributor.get(user, real, email) + c.add_activity(field.name, log) + if (field.name == 'Patch'): + saw_patch = True + field.add_contributor(c) + line = input.readline() + if line == log_separator: + # If the log message doesn't end with its own + # newline (that is, there's the newline added by the + # svn client, but no further newline), then just move + # on to the next log entry. + just_saw_separator = True + num_lines = 0 + break + log.accum(line) + num_lines -= 1 + m = in_field_re.match(line) + if not m: + m = field_re.match(line) + if not m: + aside_match = parenthetical_aside_re.match(line) + if aside_match: + field.add_endum(line) + log.add_field(field) + field = None + num_lines -= 1 + if not saw_patch and log.committer != '(no author)': + c = Contributor.get(log.committer, None, None) + c.add_activity('Patch', log) + continue + +index_introduction = ''' +<p>The following list of contributors and their contributions is meant +to help us keep track of whom to consider for commit access. The list +was generated from "svn log" output by <a +href="http://svn.apache.org/repos/asf/subversion/trunk/tools/dev/contribulyze.py" +>contribulyze.py</a>, which looks for log messages that use the <a +href="http://subversion.apache.org/docs/community-guide/conventions.html#crediting" +>special contribution format</a>.</p> + +<p><i>Please do not use this list as a generic guide to who has +contributed what to Subversion!</i> It omits existing <a +href="http://svn.apache.org/repos/asf/subversion/trunk/COMMITTERS" +>full committers</a>, for example, because they are irrelevant to our +search for new committers. Also, it merely counts changes, it does +not evaluate them. To truly understand what someone has contributed, +you have to read their changes in detail. This page can only assist +human judgement, not substitute for it.</p> + +''' + +def drop(revision_url_pattern): + # Output the data. + # + # The data structures are all linked up nicely to one another. You + # can get all the LogMessages, and each LogMessage contains all the + # Contributors involved with that commit; likewise, each Contributor + # points back to all the LogMessages it contributed to. + # + # However, the HTML output is pretty simple right now. It's not take + # full advantage of all that cross-linking. For each contributor, we + # just create a file listing all the revisions contributed to; and we + # build a master index of all contributors, each name being a link to + # that contributor's individual file. Much more is possible... but + # let's just get this up and running first. + + for key in LogMessage.all_logs.keys(): + # You could print out all log messages this way, if you wanted to. + pass + # print LogMessage.all_logs[key] + + detail_subdir = "detail" + if not os.path.exists(detail_subdir): + os.mkdir(detail_subdir) + + index = open('index.html', 'w') + index.write(html_header('Contributors as of r%d' % LogMessage.max_revnum)) + index.write(index_introduction) + index.write('<ol>\n') + # The same contributor appears under multiple keys, so uniquify. + seen_contributors = { } + # Sorting alphabetically is acceptable, but even better would be to + # sort by number of contributions, so the most active people appear at + # the top -- that way we know whom to look at first for commit access + # proposals. + sorted_contributors = sorted(Contributor.all_contributors.values()) + for c in sorted_contributors: + if c not in seen_contributors: + if c.score() > 0: + if c.is_full_committer: + # Don't even bother to print out full committers. They are + # a distraction from the purposes for which we're here. + continue + else: + committerness = '' + if c.is_committer: + committerness = ' (partial committer)' + urlpath = "%s/%s.html" % (detail_subdir, c.canonical_name()) + fname = os.path.join(detail_subdir, "%s.html" % c.canonical_name()) + index.write('<li><p><a href="%s">%s</a> [%s]%s</p></li>\n' + % (urllib_parse_quote(urlpath), + c.big_name(html=True), + c.score_str(), committerness)) + c.html_out(revision_url_pattern, fname) + seen_contributors[c] = True + index.write('</ol>\n') + index.write(html_footer()) + index.close() + + +def process_committers(committers): + """Read from open file handle COMMITTERS, which should be in + the same format as the Subversion 'COMMITTERS' file. Create + Contributor objects based on the contents.""" + line = committers.readline() + while line != 'Blanket commit access:\n': + line = committers.readline() + in_full_committers = True + matcher = re.compile('(\S+)\s+([^\(\)]+)\s+(\([^()]+\)){0,1}') + line = committers.readline() + while line: + # Every @-sign we see after this point indicates a committer line... + if line == 'Commit access for specific areas:\n': + in_full_committers = False + # ...except in the "dormant committers" area, which comes last anyway. + if line == 'Committers who have asked to be listed as dormant:\n': + in_full_committers = True + elif line.find('@') >= 0: + line = line.lstrip() + m = matcher.match(line) + user = m.group(1) + real_and_email = m.group(2).strip() + ignored, real, email = Contributor.parse(real_and_email) + c = Contributor.get(user, real, email) + c.is_committer = True + c.is_full_committer = in_full_committers + line = committers.readline() + + +def usage(): + print('USAGE: %s [-C COMMITTERS_FILE] < SVN_LOG_OR_LOG-V_OUTPUT' \ + % os.path.basename(sys.argv[0])) + print('') + print('Create HTML files in the current directory, rooted at index.html,') + print('in which you can browse to see who contributed what.') + print('') + print('The log input should use the contribution-tracking format defined') + print('in http://subversion.apache.org/docs/community-guide/conventions.html#crediting.') + print('') + print('Options:') + print('') + print(' -h, -H, -?, --help Print this usage message and exit') + print(' -C FILE Use FILE as the COMMITTERS file') + print(' -U URL Use URL as a Python interpolation pattern to') + print(' generate URLs to link revisions to some kind') + print(' of web-based viewer (e.g. ViewCVS). The') + print(' interpolation pattern should contain exactly') + print(' one format specifier, \'%s\', which will be') + print(' replaced with the revision number.') + print('') + + +def main(): + try: + opts, args = my_getopt(sys.argv[1:], 'C:U:hH?', [ 'help' ]) + except getopt.GetoptError, e: + complain(str(e) + '\n\n') + usage() + sys.exit(1) + + # Parse options. + revision_url_pattern = None + for opt, value in opts: + if opt in ('--help', '-h', '-H', '-?'): + usage() + sys.exit(0) + elif opt == '-C': + process_committers(open(value)) + elif opt == '-U': + revision_url_pattern = value + + # Gather the data. + graze(sys.stdin) + + # Output the data. + drop(revision_url_pattern) + +if __name__ == '__main__': + main() diff --git a/tools/dev/datecheck.py b/tools/dev/datecheck.py new file mode 100755 index 0000000..c5b4caf --- /dev/null +++ b/tools/dev/datecheck.py @@ -0,0 +1,102 @@ +#!/usr/bin/env python +# +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +# + +### This is a debugging script to test date-ordering in an SVN repository. + +'''Tell which revisions are out of order w.r.t. date in a repository. +Takes "svn log -q -r1:HEAD" output, prints results like this: + + $ svn log -q -r1:HEAD | ./datecheck.py + [...] + r42 OK 2003-06-02 22:20:31 -0500 + r43 OK 2003-06-02 22:20:31 -0500 + r44 OK 2003-06-02 23:29:14 -0500 + r45 OK 2003-06-02 23:29:14 -0500 + r46 OK 2003-06-02 23:33:13 -0500 + r47 OK 2003-06-10 15:19:47 -0500 + r48 NOT OK 2003-06-02 23:33:13 -0500 + r49 OK 2003-06-10 15:19:48 -0500 + r50 NOT OK 2003-06-02 23:33:13 -0500 + [...] +''' + +import sys +import time + +log_msg_separator = "-" * 72 + "\n" + +line = sys.stdin.readline() +last_date = 0 +while line: + + if not line: + break + + if line == log_msg_separator: + line = sys.stdin.readline() + continue + + # We're looking at a revision line like this: + # + # "r1 | svn | 2001-08-30 23:24:14 -0500 (Thu, 30 Aug 2001)" + # + # Parse out + + rev, ignored, date_full = line.split("|") + rev = rev.strip() + date_full = date_full.strip() + + # We only need the machine-readable portion of the date, so ignore + # the parenthesized part on the end, which is meant for humans. + + # Get the "2004-06-02 00:15:08" part of "2004-06-02 00:15:08 -0500". + date = date_full[0:19] + # Get the "-0500" part of "2004-06-02 00:15:08 -0500". + offset = date_full[20:25] + + # Parse the offset by hand and adjust the date accordingly, because + # http://docs.python.org/lib/module-time.html doesn't seem to offer + # a standard way to parse "-0500", "-0600", etc, suffixes. Arggh. + offset_sign = offset[0:1] + offset_hours = int(offset[1:3]) + offset_minutes = int(offset[3:5]) + + # Get a first draft of the date... + date_as_int = time.mktime(time.strptime(date, "%Y-%m-%d %H:%M:%S")) + # ... but it's still not correct, we must adjust for the offset. + if offset_sign == "-": + date_as_int -= (offset_hours * 3600) + date_as_int -= (offset_minutes * 60) + elif offset_sign == "+": + date_as_int += (offset_hours * 3600) + date_as_int += (offset_minutes * 60) + else: + sys.stderr.write("Error: unknown offset sign '%s'.\n" % offset_sign) + sys.exit(1) + + ok_not_ok = " OK" + if last_date > date_as_int: + ok_not_ok = "NOT OK" + + print("%-8s %s %s %s" % (rev, ok_not_ok, date, offset)) + last_date = date_as_int + line = sys.stdin.readline() diff --git a/tools/dev/find-bad-style.py b/tools/dev/find-bad-style.py new file mode 100755 index 0000000..537cc3d --- /dev/null +++ b/tools/dev/find-bad-style.py @@ -0,0 +1,57 @@ +#!/usr/bin/python +# +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +# +# +# Find places in our code where whitespace is erroneously used before +# the open-paren on a function all. This is typically manifested like: +# +# return svn_some_function +# (param1, param2, param3) +# +# +# USAGE: find-bad-style.py FILE1 FILE2 ... +# + +import sys +import re + +re_call = re.compile(r'^\s*\(') +re_func = re.compile(r'.*[a-z0-9_]{1,}\s*$') + + +def scan_file(fname): + lines = open(fname).readlines() + + prev = None + line_num = 1 + + for line in lines: + if re_call.match(line): + if prev and re_func.match(prev): + print('%s:%d:%s' % (fname, line_num - 1, prev.rstrip())) + + prev = line + line_num += 1 + + +if __name__ == '__main__': + for fname in sys.argv[1:]: + scan_file(fname) diff --git a/tools/dev/find-unmoved-deprecated.sh b/tools/dev/find-unmoved-deprecated.sh new file mode 100755 index 0000000..c689853 --- /dev/null +++ b/tools/dev/find-unmoved-deprecated.sh @@ -0,0 +1,36 @@ +#!/bin/sh +# +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +# +# +# Find functions marked a SVN_DEPRECATED, but which have not been moved +# to their associated deprecated.c file. +# +# Run this from within the subversion/include/ directory. +# + +deprecated="`cat svn_*.h | fgrep -A 2 SVN_DEPRECATED | sed -n '/^svn_/s/(.*//p'`" +for func in $deprecated ; do + if grep -q "${func}(" ../*/deprecated.c ; then + /usr/bin/true + else + echo $func was not found + fi +done diff --git a/tools/dev/gcov.patch b/tools/dev/gcov.patch new file mode 100644 index 0000000..b90db97 --- /dev/null +++ b/tools/dev/gcov.patch @@ -0,0 +1,73 @@ +This patch can be used to generate a report showing what C source +lines are executed when the testsuite is run. gcc is required. After +applying this patch do: + +$ ./configure --enable-gcov +$ make check +$ make gcov + +Now look at gcov-report.html and the annotated source files it links +to. + +See also gcov(1), gcc(1). + +Index: Makefile.in +=================================================================== +--- Makefile.in (revision 32484) ++++ Makefile.in (working copy) +@@ -443,6 +443,36 @@ + @$(MAKE) check \ + BASE_URL=svn+ssh://localhost`pwd`/subversion/tests/cmdline + ++gcov: ++ @echo -n "Collecting source files ..." ; \ ++ FILES=`find subversion/ -path '*/tests/*' -prune -o \ ++ -name '*.c' -print`; \ ++ echo "Done." ; \ ++ echo "Coverage report Subversion r`svnversion .`<br>" \ ++ echo "`date`<br>" \ ++ > gcov-report.html; \ ++ echo `uname -o -r -m` "<br>" \ ++ >> gcov-report.html; \ ++ (for file in $$FILES; do \ ++ echo $$file 1>&2 ; \ ++ base=`echo $$file | sed -e 's/.c$$//' `; \ ++ if [ -f "$$base.da" ] ; then \ ++ obj=$$base.o; \ ++ else \ ++ obj=`dirname $$base`/.libs/`basename $$base`.o; \ ++ fi; \ ++ stats=`gcov --preserve-paths \ ++ --object-directory=$$obj \ ++ $$file | sed -e "s/Creating.*//" | \ ++ sed -s "s|$$PWD/||"` \ ++ mangled=`echo $$base | tr '/' '#'`; \ ++ fixed=`echo $$base | tr '/' '_'`; \ ++ mv *$$mangled.c.gcov $$fixed.c.gcov; \ ++ echo -n $$stats | \ ++ sed -e "s/in file/in file <a href=\"$$fixed.c.gcov\">/"; \ ++ echo "</a><br>"; \ ++ done) | sort -g >> gcov-report.html ++ + bdbcheck: + @$(MAKE) check FS_TYPE=bdb + +Index: configure.ac +=================================================================== +--- configure.ac (revision 32484) ++++ configure.ac (working copy) +@@ -684,6 +684,14 @@ + # # do nothing + fi + ++AC_ARG_ENABLE(gcov, ++AC_HELP_STRING([--enable-gcov], ++ [Turn on coverage testing (GCC only).]), ++[ ++ if test "$enableval" = "yes" ; then ++ CFLAGS="$CFLAGS -fprofile-arcs -ftest-coverage" ++ fi ++]) + + AC_ARG_WITH(editor, + AS_HELP_STRING([--with-editor=PATH], diff --git a/tools/dev/gen-javahl-errors.py b/tools/dev/gen-javahl-errors.py new file mode 100755 index 0000000..d00ed08 --- /dev/null +++ b/tools/dev/gen-javahl-errors.py @@ -0,0 +1,86 @@ +#!/usr/bin/env python +# +# gen-javahl-errors.py: Generate a Java class containing an enum for the +# C error codes +# +# ==================================================================== +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# ==================================================================== +# + +import sys, os + +try: + from svn import core +except ImportError, e: + sys.stderr.write("ERROR: Unable to import Subversion's Python bindings: '%s'\n" \ + "Hint: Set your PYTHONPATH environment variable, or adjust your " \ + "PYTHONSTARTUP\nfile to point to your Subversion install " \ + "location's svn-python directory.\n" % e) + sys.stderr.flush() + sys.exit(1) + +def get_errors(): + errs = {} + for key in vars(core): + if key.find('SVN_ERR_') == 0: + try: + val = int(vars(core)[key]) + errs[val] = key + except: + pass + return errs + +def gen_javahl_class(error_codes, output_filename): + jfile = open(output_filename, 'w') + jfile.write( +"""/** ErrorCodes.java - This file is autogenerated by gen-javahl-errors.py + */ + +package org.tigris.subversion.javahl; + +/** + * Provide mappings from error codes generated by the C runtime to meaningful + * Java values. For a better description of each error, please see + * svn_error_codes.h in the C source. + */ +public class ErrorCodes +{ +""") + + keys = sorted(error_codes.keys()) + + for key in keys: + # Format the code name to be more Java-esque + code_name = error_codes[key][8:].replace('_', ' ').title().replace(' ', '') + code_name = code_name[0].lower() + code_name[1:] + + jfile.write(" public static final int %s = %d;\n" % (code_name, key)) + + jfile.write("}\n") + jfile.close() + +if __name__ == "__main__": + if len(sys.argv) > 1: + output_filename = sys.argv[1] + else: + output_filename = os.path.join('..', '..', 'subversion', 'bindings', + 'javahl', 'src', 'org', 'tigris', + 'subversion', 'javahl', 'ErrorCodes.java') + + gen_javahl_class(get_errors(), output_filename) diff --git a/tools/dev/gen-py-errors.py b/tools/dev/gen-py-errors.py new file mode 100755 index 0000000..6204589 --- /dev/null +++ b/tools/dev/gen-py-errors.py @@ -0,0 +1,66 @@ +#!/usr/bin/env python +# +# gen-py-errors.py: Generate a python module which maps error names to numbers. +# (The purpose being easier writing of the python tests.) +# +# ==================================================================== +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# ==================================================================== +# +# +# Meant to be run from the root of a Subversion working copy. If anybody +# wants to do some path magic to improve that use, feel free. + +import sys, os +sys.path.append(os.path.join('subversion', 'bindings', 'swig', + 'python', 'tests')) + + +import setup_path + +header = '''#!/usr/bin/env python +### This file automatically generated by tools/dev/gen-py-error.py, +### which see for more information +### +### It is versioned for convenience. + +''' + + +def write_output(errs, filename): + out = open(filename, 'w') + out.write(header) + + for name, val in errs: + out.write('%s = %d\n' % (name, val)) + + out.close() + + +def main(output_filename): + import core + + errs = [e for e in dir(core.svn.core) if e.startswith('SVN_ERR_')] + codes = [] + for e in errs: + codes.append((e[8:], getattr(core.svn.core, e))) + write_output(codes, output_filename) + + +if __name__ == '__main__': + main(os.path.join('subversion', 'tests', 'cmdline', 'svntest', 'err.py')) diff --git a/tools/dev/gen_junit_report.py b/tools/dev/gen_junit_report.py new file mode 100755 index 0000000..968c769 --- /dev/null +++ b/tools/dev/gen_junit_report.py @@ -0,0 +1,301 @@ +#!/bin/env python +# +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +# + +# $Id: gen_junit_report.py 1141953 2011-07-01 14:42:56Z rhuijben $ +""" +gen_junit_report.py -- The script is to generate the junit report for +Subversion tests. The script uses the log file, tests.log created by +"make check" process. It parses the log file and generate the junit +files for each test separately in the specified output directory. The +script can take --log-file and --output-dir arguments. +""" + +import sys +import os +import getopt + +def replace_from_map(data, encode): + """replace substrings in DATA with replacements defined in ENCODING""" + for pattern, replacement in encode.items(): + data = data.replace(pattern, replacement) + return data + +xml_encode_map = { + '&': '&', + '<': '<', + '>': '>', + '"': '"', + "'": ''', + } + +def xml_encode(data): + """encode the xml characters in the data""" + return replace_from_map(data, xml_encode_map) + +special_encode_map = { + ']]>': ']]]]><![CDATA[>', # CDATA terminator sequence + '\000': '␀', # U+2400 SYMBOL FOR NULL + '\001': '␁', # U+2401 SYMBOL FOR START OF HEADING + '\002': '␂', # U+2402 SYMBOL FOR START OF TEXT + '\003': '␃', # U+2403 SYMBOL FOR END OF TEXT + '\004': '␄', # U+2404 SYMBOL FOR END OF TRANSMISSION + '\005': '␅', # U+2405 SYMBOL FOR ENQUIRY + '\006': '␆', # U+2406 SYMBOL FOR ACKNOWLEDGE + '\007': '␇', # U+2407 SYMBOL FOR BELL + '\010': '␈', # U+2408 SYMBOL FOR BACKSPACE + '\011': '␉', # U+2409 SYMBOL FOR HORIZONTAL TABULATION + #'\012': '␊', # U+240A SYMBOL FOR LINE FEED + '\013': '␋', # U+240B SYMBOL FOR VERTICAL TABULATION + '\014': '␌', # U+240C SYMBOL FOR FORM FEED + #'\015': '␍', # U+240D SYMBOL FOR CARRIAGE RETURN + '\016': '␎', # U+240E SYMBOL FOR SHIFT OUT + '\017': '␏', # U+240F SYMBOL FOR SHIFT IN + '\020': '␐', # U+2410 SYMBOL FOR DATA LINK ESCAPE + '\021': '␑', # U+2411 SYMBOL FOR DEVICE CONTROL ONE + '\022': '␒', # U+2412 SYMBOL FOR DEVICE CONTROL TWO + '\023': '␓', # U+2413 SYMBOL FOR DEVICE CONTROL THREE + '\024': '␔', # U+2414 SYMBOL FOR DEVICE CONTROL FOUR + '\025': '␕', # U+2415 SYMBOL FOR NEGATIVE ACKNOWLEDGE + '\026': '␖', # U+2416 SYMBOL FOR SYNCHRONOUS IDLE + '\027': '␗', # U+2417 SYMBOL FOR END OF TRAMSNISSION BLOCK + '\030': '␘', # U+2418 SYMBOL FOR CANCEL + '\031': '␙', # U+2419 SYMBOL FOR END OF MEDIUM + '\032': '␚', # U+241A SYMBOL FOR SUBSTITUTE + '\033': '␛', # U+241B SYMBOL FOR ESCAPE + '\034': '␜', # U+241C SYMBOL FOR FILE SEPARATOR + '\035': '␝', # U+241D SYMBOL FOR GROUP SEPARATOR + '\036': '␞', # U+241E SYMBOL FOR RECORD SEPARATOR + '\037': '␟', # U+241F SYMBOL FOR UNIT SEPARATOR + } + +def escape_special_characters(data): + """remove special characters in test failure reasons""" + if data: + data = replace_from_map(data, special_encode_map) + return data + +def start_junit(): + """define the beginning of xml document""" + head = """<?xml version="1.0" encoding="UTF-8"?>""" + return head + +def start_testsuite(test_name): + """start testsuite. The value for the attributes are replaced later + when the junit file handling is concluded""" + sub_test_name = test_name.replace('.', '-') + start = """<testsuite time="ELAPSED_%s" tests="TOTAL_%s" name="%s" + failures="FAIL_%s" errors="FAIL_%s" skipped="SKIP_%s">""" % \ + (test_name, test_name, sub_test_name, test_name, test_name, test_name) + return start + +def junit_testcase_ok(test_name, casename): + """mark the test case as PASSED""" + casename = xml_encode(casename) + sub_test_name = test_name.replace('.', '-') + case = """<testcase time="ELAPSED_CASE_%s" name="%s" classname="%s"/>""" % \ + (test_name, casename, sub_test_name) + return case + +def junit_testcase_fail(test_name, casename, reason=None): + """mark the test case as FAILED""" + casename = xml_encode(casename) + sub_test_name = test_name.replace('.', '-') + reason = escape_special_characters(reason) + case = """<testcase time="ELAPSED_CASE_%s" name="%s" classname="%s"> + <failure type="Failed"><![CDATA[%s]]></failure> + </testcase>""" % (test_name, casename, sub_test_name, reason) + return case + +def junit_testcase_xfail(test_name, casename, reason=None): + """mark the test case as XFAILED""" + casename = xml_encode(casename) + sub_test_name = test_name.replace('.', '-') + reason = escape_special_characters(reason) + case = """<testcase time="ELAPSED_CASE_%s" name="%s" classname="%s"> + <system-out><![CDATA[%s]]></system-out> + </testcase>""" % (test_name, casename, sub_test_name, reason) + return case + +def junit_testcase_skip(test_name, casename): + """mark the test case as SKIPPED""" + casename = xml_encode(casename) + sub_test_name = test_name.replace('.', '-') + case = """<testcase time="ELAPSED_CASE_%s" name="%s" classname="%s"> + <skipped message="Skipped"/> + </testcase>""" % (test_name, casename, sub_test_name) + return case + +def end_testsuite(): + """mark the end of testsuite""" + end = """</testsuite>""" + return end + +def update_stat(test_name, junit, count): + """update the test statistics in the junit string""" + junit_str = '\n'.join(junit) + t_count = count[test_name] + total = float(t_count['pass'] + t_count['fail'] + t_count['skip']) + elapsed = float(t_count['elapsed']) + case_time = 0 + if total > 0: # there are tests with no test cases + case_time = elapsed/total + + total_patt = 'TOTAL_%s' % test_name + fail_patt = 'FAIL_%s' % test_name + skip_patt = 'SKIP_%s' % test_name + elapsed_patt = 'ELAPSED_%s' % test_name + elapsed_case_patt = 'ELAPSED_CASE_%s' % test_name + + # replace the pattern in junit string with actual statistics + junit_str = junit_str.replace(total_patt, "%s" % total) + junit_str = junit_str.replace(fail_patt, "%s" % t_count['fail']) + junit_str = junit_str.replace(skip_patt, "%s" % t_count['skip']) + junit_str = junit_str.replace(elapsed_patt, "%.3f" % elapsed) + junit_str = junit_str.replace(elapsed_case_patt, "%.3f" % case_time) + return junit_str + +def main(): + """main method""" + try: + opts, args = getopt.getopt(sys.argv[1:], 'l:d:h', + ['log-file=', 'output-dir=', 'help']) + except getopt.GetoptError, err: + usage(err) + + log_file = None + output_dir = None + for opt, value in opts: + if (opt in ('-h', '--help')): + usage() + elif (opt in ('-l', '--log-file')): + log_file = value + elif (opt in ('-d', '--output-dir')): + output_dir = value + else: + usage('Unable to recognize option') + + if not log_file or not output_dir: + usage("The options --log-file and --output-dir are mandatory") + + # create junit output directory, if not exists + if not os.path.exists(output_dir): + print("Directory '%s' not exists, creating ..." % output_dir) + try: + os.makedirs(output_dir) + except OSError, err: + sys.stderr.write("ERROR: %s\n" % err) + sys.exit(1) + patterns = { + 'start' : 'START:', + 'end' : 'END:', + 'pass' : 'PASS:', + 'skip' : 'SKIP:', + 'fail' : 'FAIL:', + 'xfail' : 'XFAIL:', + 'elapsed' : 'ELAPSED:' + } + + junit = [] + junit.append(start_junit()) + reason = None + count = {} + fp = None + try: + fp = open(log_file, 'r') + except IOError, err: + sys.stderr.write("ERROR: %s\n" % err) + sys.exit(1) + + for line in fp.readlines(): + line = line.strip() + if line.startswith(patterns['start']): + reason = "" + test_name = line.split(' ')[1] + # replace '.' in test name with '_' to avoid confusing class + # name in test result displayed in the CI user interface + test_name.replace('.', '_') + count[test_name] = { + 'pass' : 0, + 'skip' : 0, + 'fail' : 0, + 'xfail' : 0, + 'elapsed' : 0, + 'total' : 0 + } + junit.append(start_testsuite(test_name)) + elif line.startswith(patterns['end']): + junit.append(end_testsuite()) + elif line.startswith(patterns['pass']): + reason = "" + casename = line.strip(patterns['pass']).strip() + junit.append(junit_testcase_ok(test_name, casename)) + count[test_name]['pass'] += 1 + elif line.startswith(patterns['skip']): + reason = "" + casename = line.strip(patterns['skip']).strip() + junit.append(junit_testcase_skip(test_name, casename)) + count[test_name]['skip'] += 1 + elif line.startswith(patterns['fail']): + casename = line.strip(patterns['fail']).strip() + junit.append(junit_testcase_fail(test_name, casename, reason)) + count[test_name]['fail'] += 1 + reason = "" + elif line.startswith(patterns['xfail']): + casename = line.strip(patterns['xfail']).strip() + junit.append(junit_testcase_xfail(test_name, casename, reason)) + count[test_name]['pass'] += 1 + reason = "" + elif line.startswith(patterns['elapsed']): + reason = "" + elapsed = line.split(' ')[2].strip() + (hrs, mins, secs) = elapsed.split(':') + secs_taken = int(hrs)*24 + int(mins)*60 + float(secs) + count[test_name]['elapsed'] = secs_taken + + junit_str = update_stat(test_name, junit, count) + test_junit_file = os.path.join(output_dir, + "%s.junit.xml" % test_name) + w_fp = open (test_junit_file, 'w') + w_fp.writelines(junit_str) + w_fp.close() + junit = [] + elif len(line): + reason = "%s\n%s" % (reason, line) + fp.close() + +def usage(errorMsg=None): + script_name = os.path.basename(sys.argv[0]) + sys.stdout.write("""USAGE: %s: [--help|h] --log-file|l --output-dir|d + +Options: + --help|-h Display help message + --log-file|l The log file to parse for generating junit xml files + --output-dir|d The directory to create the junit xml file for each + test +""" % script_name) + if errorMsg is not None: + sys.stderr.write("\nERROR: %s\n" % errorMsg) + sys.exit(1) + sys.exit(0) + +if __name__ == '__main__': + main() diff --git a/tools/dev/gnuify-changelog.pl b/tools/dev/gnuify-changelog.pl new file mode 100755 index 0000000..a4112c7 --- /dev/null +++ b/tools/dev/gnuify-changelog.pl @@ -0,0 +1,164 @@ +#!/usr/bin/perl -w +# ==================================================================== +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# ==================================================================== +# a script to munge the output of 'svn log' into something approaching the +# style of a GNU ChangeLog. +# +# to use this, just fill in the 'hackers' hash with the usernames and +# name/emails of the people who work on your project, go to the top level +# of your working copy, and run: +# +# $ svn log | /path/to/gnuify-changelog.pl > ChangeLog + +require 5.0; +use strict; + +my %hackers = ( + "svn" => 'Collab.net Subversion Team', + "jimb" => 'Jim Blandy <jimb@redhat.com>', + "sussman" => 'Ben Collins-Sussman <sussman@collab.net>', + "kfogel" => 'Karl Fogel <kfogel@collab.net>', + "gstein" => 'Greg Stein <gstein@lyra.org>', + "brane" => 'Branko Cibej <brane@xbc.nu>', + "joe" => 'Joe Orton <joe@light.plus.com>', + "ghudson" => 'Greg Hudson <ghudson@mit.edu>', + "lefty" => 'Lee P. W. Burgess <lefty@red-bean.com>', + "fitz" => 'Brian Fitzpatrick <fitz@red-bean.com>', + "mab" => 'Matthew Braithwaite <matt@braithwaite.net>', + "daniel" => 'Daniel Stenberg <daniel@haxx.se>', + "mmurphy" => 'Mark Murphy <mmurphy@collab.net>', + "cmpilato" => 'C. Michael Pilato <cmpilato@collab.net>', + "kevin" => 'Kevin Pilch-Bisson <kevin@pilch-bisson.net>', + "philip" => 'Philip Martin <philip@codematters.co.uk>', + "jerenkrantz" => 'Justin Erenkrantz <jerenkrantz@apache.org>', + "rooneg" => 'Garrett Rooney <rooneg@electricjellyfish.net>', + "bcollins" => 'Ben Collins <bcollins@debian.org>', + "blair" => 'Blair Zajac <blair@orcaware.com>', + "striker" => 'Sander Striker <striker@apache.org>', + "XelaRellum" => 'Alexander Mueller <alex@littleblue.de>', + "yoshiki" => 'Yoshiki Hayashi <yoshiki@xemacs.org>', + "david" => 'David Summers <david@summersoft.fay.ar.us>', + "rassilon" => 'Bill Tutt <rassilon@lyra.org>', + "kbohling" => 'Kirby C. Bohling <kbohling@birddog.com>', + "breser" => 'Ben Reser <ben@reser.org>', + "bliss" => 'Tobias Ringstrom <tobias@ringstrom.mine.nu>', + "dionisos" => 'Erik Huelsmann <e.huelsmann@gmx.net>', + "josander" => 'Jostein Andersen <jostein@josander.net>', + "julianfoad" => 'Julian Foad <julianfoad@btopenworld.com>', + "clkao" => 'Chia-Liang Kao <clkao@clkao.org>', + "xsteve" => 'Stefan Reichör <reichoer@web.de>', + "mbk" => 'Mark Benedetto King <mbk@lowlatency.com>', + "patrick" => 'Patrick Mayweg <mayweg@qint.de>', + "jrepenning" => 'Jack Repenning <jrepenning@collab.net>', + "epg" => 'Eric Gillespie <epg@pretzelnet.org>', + "dwhedon" => 'David Kimdon <David_Kimdon@alumni.hmc.edu>', + "djh" => 'D.J. Heap <dj@shadyvale.net>', + "mprice" => 'Michael Price <mprice@atl.lmco.com>', + "jszakmeister" => 'John Szakmeister <john@szakmeister.net>', + "bdenny" => 'Brian Denny <brian@briandenny.net>', + "rey4" => 'Russell Yanofsky <rey4@columbia.edu>', + "maxb" => 'Max Bowsher <maxb@ukf.net>', + "dlr" => 'Daniel Rall <dlr@finemaltcoding.com>', + "jaa" => 'Jani Averbach <jaa@iki.fi>', + "pll" => 'Paul Lussier <p.lussier@comcast.net>', + "shlomif" => 'Shlomi Fish <shlomif@vipe.technion.ac.il>', + "jpieper" => 'Josh Pieper <jpieper@andrew.cmu.edu>', + "dimentiy" => 'Dmitriy O. Popkov <dimentiy@dimentiy.info>', + "kellin" => 'Shamim Islam <files@poetryunlimited.com>', + "sergeyli" => 'Sergey A. Lipnevich <sergey@optimaltec.com>', + "kraai" => 'Matt Kraai <kraai@alumni.cmu.edu>', + "ballbach" => 'Michael Ballbach <ballbach@rten.net>', + "kon" => 'Kalle Olavi Niemitalo <kon@iki.fi>', + "knacke" => 'Kai Nacke <kai.nacke@redstar.de>', + "gthompson" => 'Glenn A. Thompson <gthompson@cdr.net>', + "jespersm" => 'Jesper Steen Møller <jesper@selskabet.org>', + "naked" => 'Nuutti Kotivuori <naked@iki.fi>', + "niemeyer" => 'Gustavo Niemeyer <niemeyer@conectiva.com>', + "trow" => 'Jon Trowbridge <trow@ximian.com>', + "mmacek" => 'Marko Macek <Marko.Macek@gmx.net>', + "zbrown" => 'Zack Brown <zbrown@tumblerings.org>', + "morten" => 'Morten Ludvigsen <morten@2ps.dk>', + "fmatias" => 'Féliciano Matias <feliciano.matias@free.fr>', + "nsd" => 'Nick Duffek <nick@duffek.com>', +); + +my $parse_next_line = 0; +my $last_line_empty = 0; +my $last_rev = ""; + +while (my $entry = <>) { + + # Axe windows style line endings, since we should try to be consistent, and + # the repos has both styles in its log entries + $entry =~ s/\r\n$/\n/; + + # Remove trailing whitespace + $entry =~ s/\s+$/\n/; + + my $this_line_empty = $entry eq "\n"; + + # Avoid duplicate empty lines + next if $this_line_empty and $last_line_empty; + + # Don't fail on valid dash-only lines + if ($entry =~ /^-+$/ and length($entry) >= 72) { + + # We're at the start of a log entry, so we need to parse the next line + $parse_next_line = 1; + + # Check to see if the final line of the commit message was blank, + # if not insert one + print "\n" if $last_rev ne "" and !$last_line_empty; + + } elsif ($parse_next_line) { + + # Transform from svn style to GNU style + $parse_next_line = 0; + + my @parts = split (/ /, $entry); + $last_rev = $parts[0]; + my $hacker = $parts[2]; + my $tstamp = $parts[4]; + + # Use alias if we can't resolve to name, email + $hacker = $hackers{$hacker} if defined $hackers{$hacker}; + + printf "%s %s\n", $tstamp, $hacker; + + } elsif ($this_line_empty) { + + print "\n"; + + } else { + + print "\t$entry"; + + } + + $last_line_empty = $this_line_empty; +} + +# As a HERE doc so it also sets the final changelog's coding +print <<LOCAL; +;; Local Variables: +;; coding: utf-8 +;; End: +LOCAL + +1; diff --git a/tools/dev/graph-dav-servers.py b/tools/dev/graph-dav-servers.py new file mode 100755 index 0000000..c2174f2 --- /dev/null +++ b/tools/dev/graph-dav-servers.py @@ -0,0 +1,194 @@ +#!/usr/bin/env python +# +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +# +# +# graph-svn-dav.py by Brian W. Fitzpatrick <fitz@red-bean.com> +# +# This was originally a quick hack to make a pretty picture of svn DAV servers. +# +# I've dropped it in Subversion's repository at the request of Karl Fogel. +# +# Be warned this this script has many dependencies that don't ship with Python. + +import sys +import os +import fileinput +import datetime +import time +import datetime +from matplotlib import dates +import matplotlib +matplotlib.use('Agg') +from matplotlib import pylab +import Image + +OUTPUT_FILE = '../../www/images/svn-dav-securityspace-survey.png' +OUTPUT_IMAGE_WIDTH = 800 + +STATS = [ + ('1/1/2003', 70), + ('2/1/2003', 158), + ('3/1/2003', 222), + ('4/1/2003', 250), + ('5/1/2003', 308), + ('6/1/2003', 369), + ('7/1/2003', 448), + ('8/1/2003', 522), + ('9/1/2003', 665), + ('10/1/2003', 782), + ('11/1/2003', 969), + ('12/1/2003', 1009), + ('1/1/2004', 1162), + ('2/1/2004', 1307), + ('3/1/2004', 1424), + ('4/1/2004', 1792), + ('5/1/2004', 2113), + ('6/1/2004', 2502), + ('7/1/2004', 2941), + ('8/1/2004', 3863), + ('9/1/2004', 4174), + ('10/1/2004', 4187), + ('11/1/2004', 4783), + ('12/1/2004', 4995), + ('1/1/2005', 5565), + ('2/1/2005', 6505), + ('3/1/2005', 7897), + ('4/1/2005', 8751), + ('5/1/2005', 9793), + ('6/1/2005', 11534), + ('7/1/2005', 12808), + ('8/1/2005', 13545), + ('9/1/2005', 15233), + ('10/1/2005', 17588), + ('11/1/2005', 18893), + ('12/1/2005', 20278), + ('1/1/2006', 21084), + ('2/1/2006', 23861), + ('3/1/2006', 26540), + ('4/1/2006', 29396), + ('5/1/2006', 33001), + ('6/1/2006', 35082), + ('7/1/2006', 38939), + ('8/1/2006', 40672), + ('9/1/2006', 46525), + ('10/1/2006', 54247), + ('11/1/2006', 63145), + ('12/1/2006', 68988), + ('1/1/2007', 77027), + ('2/1/2007', 84813), + ('3/1/2007', 95679), + ('4/1/2007', 103852), + ('5/1/2007', 117267), + ('6/1/2007', 133665), + ('7/1/2007', 137575), + ('8/1/2007', 155426), + ('9/1/2007', 159055), + ('10/1/2007', 169939), + ('11/1/2007', 180831), + ('12/1/2007', 187093), + ('1/1/2008', 199432), + ('2/1/2008', 221547), + ('3/1/2008', 240794), + ('4/1/2008', 255520), + ('5/1/2008', 269478), + ('6/1/2008', 286614), + ('7/1/2008', 294579), + ('8/1/2008', 307923), + ('9/1/2008', 254757), + ('10/1/2008', 268081), + ('11/1/2008', 299071), + ('12/1/2008', 330884), + ('1/1/2009', 369719), + ('2/1/2009', 378434), + ('3/1/2009', 390502), + ('4/1/2009', 408658), + ('5/1/2009', 407044), + ('6/1/2009', 406520), + ('7/1/2009', 334276), + ] + + +def get_date(raw_date): + month, day, year = map(int, raw_date.split('/')) + return datetime.datetime(year, month, day) + + +def get_ordinal_date(date): + # This is the only way I can get matplotlib to do the dates right. + return int(dates.date2num(get_date(date))) + + +def load_stats(): + dates = [get_ordinal_date(date) for date, value in STATS] + counts = [x[1] for x in STATS] + + return dates, counts + + +def draw_graph(dates, counts): + ########################################################### + # Drawing takes place here. + pylab.figure(1) + + ax = pylab.subplot(111) + pylab.plot_date(dates, counts, + color='r', linestyle='-', marker='o', markersize=3) + + ax.xaxis.set_major_formatter( pylab.DateFormatter('%Y') ) + ax.xaxis.set_major_locator( pylab.YearLocator() ) + ax.xaxis.set_minor_locator( pylab.MonthLocator() ) + ax.set_xlim( (dates[0] - 92, dates[len(dates) - 1] + 92) ) + + ax.yaxis.set_major_formatter( pylab.FormatStrFormatter('%d') ) + + pylab.ylabel('Total # of Public DAV Servers') + + lastdate = datetime.datetime.fromordinal(dates[len(dates) - 1]).strftime("%B %Y") + pylab.xlabel("Data as of " + lastdate) + pylab.title('Security Space Survey of\nPublic Subversion DAV Servers') + # End drawing + ########################################################### + png = open(OUTPUT_FILE, 'w') + pylab.savefig(png) + png.close() + os.rename(OUTPUT_FILE, OUTPUT_FILE + ".tmp.png") + try: + im = Image.open(OUTPUT_FILE + ".tmp.png", 'r') + (width, height) = im.size + print("Original size: %d x %d pixels" % (width, height)) + scale = float(OUTPUT_IMAGE_WIDTH) / float(width) + width = OUTPUT_IMAGE_WIDTH + height = int(float(height) * scale) + print("Final size: %d x %d pixels" % (width, height)) + im = im.resize((width, height), Image.ANTIALIAS) + im.save(OUTPUT_FILE, im.format) + os.unlink(OUTPUT_FILE + ".tmp.png") + except Exception, e: + sys.stderr.write("Error attempting to resize the graphic: %s\n" % (str(e))) + os.rename(OUTPUT_FILE + ".tmp.png", OUTPUT_FILE) + raise + pylab.close() + + +if __name__ == '__main__': + dates, counts = load_stats() + draw_graph(dates, counts) + print("Don't forget to update ../../www/svn-dav-securityspace-survey.html!") diff --git a/tools/dev/iz/defect.dem b/tools/dev/iz/defect.dem new file mode 100644 index 0000000..7756b7c --- /dev/null +++ b/tools/dev/iz/defect.dem @@ -0,0 +1,6 @@ +set title "Subversion DEFECT Activity" +set boxwidth 0.5 +set data style lines +set key 10, 60 +plot "/tmp/points.found.DEFECT" title "found" with boxes, "/tmp/points.fixed.DEFECT" title "fixed" with boxes, "/tmp/points.avg.DEFECT" title "moving avg", "/tmp/points.open.DEFECT" title "open" +pause -1 "Hit return to continue" diff --git a/tools/dev/iz/ff2csv.command b/tools/dev/iz/ff2csv.command new file mode 100755 index 0000000..6826e34 --- /dev/null +++ b/tools/dev/iz/ff2csv.command @@ -0,0 +1,27 @@ +#!/bin/sh + +# MacOS X do-hickie to run ff2csv.py, with parameters, by double-click. + + +flags="hq" +Usage () { + args="$*" + if [[ -n "$args" ]] ; then + echo >&2 "$args" + fi + echo >&2 "Usage: $0 [-$flags] [querysetfile [csvfile]] +Run ff2csv.py, fetching and summarizing SVN bug status." +} +while getopts $flags flag; do + case "$flag" in + h|q) Usage; exit 0;; + esac +done + +# we want to run in the same folder as this script, not +# the users home folder +cd `dirname $0` + + +date=`date +%m%d` +./ff2csv.py ${1:-query-set-1-$date.tsv} ${2:-core-history-$date.csv} diff --git a/tools/dev/iz/ff2csv.py b/tools/dev/iz/ff2csv.py new file mode 100755 index 0000000..1820845 --- /dev/null +++ b/tools/dev/iz/ff2csv.py @@ -0,0 +1,189 @@ +#!/usr/bin/env python +# +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +# +# -*- Python -*- +"""Transform find-fix.py output into Excellable csv.""" + +__date__ = "Time-stamp: <2003-10-16 13:26:27 jrepenning>"[13:30] +__author__ = "Jack Repenning <jrepenning@collab.net>" + +import getopt +try: + my_getopt = getopt.gnu_getopt +except AttributeError: + my_getopt = getopt.getopt +import inspect +import os +import os.path +import pydoc +import re +import shutil +import string +import sys +import time + +# Long options and their usage strings; "=" means it takes an argument. +# To get a list suitable for getopt, just do +# +# [x[0] for x in long_opts] +# +# Make sure to sacrifice a lamb to Guido for each element of the list. +long_opts = [ + ["doc", """Optional, print pydocs."""], + ["help", """Optional, print usage (this text)."""], + ["verbose", """Optional, print more progress messages."""], + ] + +help = 0 +verbose = 0 +me = os.path.basename(sys.argv[0]) + +DATA_FILE = "http://subversion.tigris.org/iz-data/query-set-1.tsv" + +def main(): + """Run find-fix.py with arguments du jour for drawing pretty +manager-speak pictures.""" + + global verbose + + try: + opts, args = my_getopt(sys.argv[1:], "", [x[0] for x in long_opts]) + except getopt.GetoptError, e: + print("Error: %s" % e.msg) + shortusage() + print(me + " --help for options.") + sys.exit(1) + + for opt, arg in opts: + if opt == "--help": + usage() + sys.exit(0) + elif opt == "--verbose": + verbose = 1 + elif opt == "--doc": + pydoc.doc(pydoc.importfile(sys.argv[0])) + sys.exit(0) + + # do something fruitful with your life + if len(args) == 0: + args = ["query-set-1.tsv", "core-history.csv"] + print(("ff2csv %s %s" % args)) + + if len(args) != 2: + print("%s: Wrong number of args." % me) + shortusage() + sys.exit(1) + + if os.system("curl " + DATA_FILE + "> " + args[0]): + os.system("wget " + DATA_FILE) + + outfile = open(args[1], "w") + outfile.write("Date,found,fixed,inval,dup,other,remain\n") + + totalsre = re.compile("totals:.*found= +([0-9]+) +" + "fixed= +([0-9]+) +" + "inval= +([0-9]+) +" + "dup= +([0-9]+) +" + "other= +([0-9]+) +" + "remain= *([0-9]+)") + for year in ("2001", "2002", "2003", "2004"): + for month in ("01", "02", "03", "04", "05", "06", "07", "08", + "09", "10", "11", "12"): + for dayrange in (("01", "08"), + ("08", "15"), + ("15", "22"), + ("22", "28")): + if verbose: + print("searching %s-%s-%s to %s" % (year, + month, + dayrange[0], + dayrange[1])) + ffpy = os.popen("python ./find-fix.py --m=beta " + "%s %s-%s-%s %s-%s-%s" + % (args[0], + year, month, dayrange[0], + year, month, dayrange[1])) + if verbose: + print("ffpy: %s" % ffpy) + + line = ffpy.readline() + if verbose: + print("initial line is: %s" % line) + matches = totalsre.search(line) + if verbose: + print("initial match is: %s" % matches) + while line and not matches: + line = ffpy.readline() + if verbose: + print("%s: read line '%s'" % (me, line)) + matches = totalsre.search(line) + if verbose: + print("subsequent line is: %s" % line) + + ffpy.close() + + if verbose: + print("line is %s" % line) + + if matches.group(1) != "0" \ + or matches.group(2) != "0" \ + or matches.group(3) != "0" \ + or matches.group(4) != "0" \ + or matches.group(5) != "0": + + outfile.write("%s-%s-%s,%s,%s,%s,%s,%s,%s\n" + % (year, month, dayrange[1], + matches.group(1), + matches.group(2), + matches.group(3), + matches.group(4), + matches.group(5), + matches.group(6), + )) + elif matches.group(6) != "0": + # quit at first nothing-done week + # allows slop in loop controls + break + outfile.close() + + +def shortusage(): + "Print one-line usage summary." + print("%s - %s" % (me, pydoc.synopsis(sys.argv[0]))) + +def usage(): + "Print multi-line usage tome." + shortusage() + print('''%s [opts] [queryfile [outfile]] +Option keywords may be abbreviated to any unique prefix. +Option order is not important. +Most options require "=xxx" arguments:''' % me) + for x in long_opts: + padding_limit = 18 + if x[0][-1:] == '=': + sys.stdout.write(" --%s " % x[0][:-1]) + padding_limit = 19 + else: + sys.stdout.write(" --%s " % x[0]) + print("%s %s" % ((' ' * (padding_limit - len(x[0]))), x[1])) + +if __name__ == "__main__": + main() diff --git a/tools/dev/iz/find-fix.py b/tools/dev/iz/find-fix.py new file mode 100755 index 0000000..513ccad --- /dev/null +++ b/tools/dev/iz/find-fix.py @@ -0,0 +1,454 @@ +#!/usr/bin/env python +# +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +# +# -*- Python -*- +"""find-fix.py: produce a find/fix report for Subversion's IZ database + +For simple text summary: + find-fix.py query-set-1.tsv YYYY-MM-DD YYYY-MM-DD +Statistics will be printed for bugs found or fixed within the +time frame. + +For gnuplot presentation: + find-fix.py query-set-1.tsv outfile +Gnuplot provides its own way to select date ranges. + +Either way, get a query-set-1.tsv from: + http://subversion.tigris.org/iz-data/query-set-1.tsv (updated nightly) +See http://subversion.tigris.org/iz-data/README for more info on that file. + +For more usage info on this script: + find-fix.py --help +""" + +_version = "$Revision:" + +# +# This can be run over the data file found at: +# http://subversion.tigris.org/iz-data/query-set-1.tsv +# + +import getopt +try: + my_getopt = getopt.gnu_getopt +except AttributeError: + my_getopt = getopt.getopt +import operator +import os +import os.path +import pydoc +import re +try: + # Python >=2.6 + from functools import reduce +except ImportError: + # Python <2.6 + pass +import sys +import time + +me = os.path.basename(sys.argv[0]) + +# Long options and their usage strings; "=" means it takes an argument. +# To get a list suitable for getopt, just do +# +# [x[0] for x in long_opts] +# +# Make sure to sacrifice a lamb to Guido for each element of the list. +long_opts = [ + ["milestones=", """Optional, milestones NOT to report on + (one or more of Beta, 1.0, Post-1.0, cvs2svn-1.0, cvs2svn-opt, + inapplicable)"""], + ["update", """Optional, update the statistics first."""], + ["doc", """Optional, print pydocs."""], + ["help", """Optional, print usage (this text)."""], + ["verbose", """Optional, print more progress messages."""], + ] + +help = 0 +verbose = 0 +update = 0 + +DATA_FILE = "http://subversion.tigris.org/iz-data/query-set-1.tsv" +ONE_WEEK = 7 * 24 * 60 * 60 + +_types = [] +_milestone_filter = [] + +noncore_milestone_filter = [ + 'Post-1.0', + '1.1', + 'cvs2svn-1.0', + 'cvs2svn-opt', + 'inapplicable', + 'no milestone', + ] + +one_point_oh_milestone_filter = noncore_milestone_filter + [] + +beta_milestone_filter = one_point_oh_milestone_filter + ['1.0'] + + +_types = [ + 'DEFECT', + 'TASK', + 'FEATURE', + 'ENHANCEMENT', + 'PATCH', + ] + + +def main(): + """Report bug find/fix rate statistics for Subversion.""" + + global verbose + global update + global _types + global _milestone_filter + global noncore_milestone_filter + + try: + opts, args = my_getopt(sys.argv[1:], "", [x[0] for x in long_opts]) + except getopt.GetoptError, e: + sys.stderr.write("Error: %s\n" % e.msg) + shortusage() + sys.stderr.write("%s --help for options.\n" % me) + sys.exit(1) + + for opt, arg in opts: + if opt == "--help": + usage() + sys.exit(0) + elif opt == "--verbose": + verbose = 1 + elif opt == "--milestones": + for mstone in arg.split(","): + if mstone == "noncore": + _milestone_filter = noncore_milestone_filter + elif mstone == "beta": + _milestone_filter = beta_milestone_filter + elif mstone == "one": + _milestone_filter = one_point_oh_milestone_filter + elif mstone[0] == '-': + if mstone[1:] in _milestone_filter: + spot = _milestone_filter.index(mstone[1:]) + _milestone_filter = _milestone_filter[:spot] \ + + _milestone_filter[(spot+1):] + else: + _milestone_filter += [mstone] + + elif opt == "--update": + update = 1 + elif opt == "--doc": + pydoc.doc(pydoc.importfile(sys.argv[0])) + sys.exit(0) + + if len(_milestone_filter) == 0: + _milestone_filter = noncore_milestone_filter + + if verbose: + sys.stderr.write("%s: Filtering out milestones %s.\n" + % (me, ", ".join(_milestone_filter))) + + if len(args) == 2: + if verbose: + sys.stderr.write("%s: Generating gnuplot data.\n" % me) + if update: + if verbose: + sys.stderr.write("%s: Updating %s from %s.\n" % (me, args[0], DATA_FILE)) + if os.system("curl " + DATA_FILE + "> " + args[0]): + os.system("wget " + DATA_FILE) + plot(args[0], args[1]) + + elif len(args) == 3: + if verbose: + sys.stderr.write("%s: Generating summary from %s to %s.\n" + % (me, args[1], args[2])) + if update: + if verbose: + sys.stderr.write("%s: Updating %s from %s.\n" % (me, args[0], DATA_FILE)) + if os.system("curl " + DATA_FILE + "> " + args[0]): + os.system("wget " + DATA_FILE) + + try: + t_start = parse_time(args[1] + " 00:00:00") + except ValueError: + sys.stderr.write('%s: ERROR: bad time value: %s\n' % (me, args[1])) + sys.exit(1) + + try: + t_end = parse_time(args[2] + " 00:00:00") + except ValueError: + sys.stderr.write('%s: ERROR: bad time value: %s\n' % (me, args[2])) + sys.exit(1) + + summary(args[0], t_start, t_end) + else: + usage() + + sys.exit(0) + + +def summary(datafile, d_start, d_end): + "Prints a summary of activity within a specified date range." + + data = load_data(datafile) + + # activity during the requested period + found, fixed, inval, dup, other = extract(data, 1, d_start, d_end) + + # activity from the beginning of time to the end of the request + # used to compute remaining + # XXX It would be faster to change extract to collect this in one + # pass. But we don't presently have enough data, nor use this + # enough, to justify that rework. + fromzerofound, fromzerofixed, fromzeroinval, fromzerodup, fromzeroother \ + = extract(data, 1, 0, d_end) + + alltypes_found = alltypes_fixed = alltypes_inval = alltypes_dup \ + = alltypes_other = alltypes_rem = 0 + for t in _types: + fromzerorem_t = fromzerofound[t]\ + - (fromzerofixed[t] + fromzeroinval[t] + fromzerodup[t] + + fromzeroother[t]) + print('%12s: found=%3d fixed=%3d inval=%3d dup=%3d ' \ + 'other=%3d remain=%3d' \ + % (t, found[t], fixed[t], inval[t], dup[t], other[t], fromzerorem_t)) + alltypes_found = alltypes_found + found[t] + alltypes_fixed = alltypes_fixed + fixed[t] + alltypes_inval = alltypes_inval + inval[t] + alltypes_dup = alltypes_dup + dup[t] + alltypes_other = alltypes_other + other[t] + alltypes_rem = alltypes_rem + fromzerorem_t + + print('-' * 77) + print('%12s: found=%3d fixed=%3d inval=%3d dup=%3d ' \ + 'other=%3d remain=%3d' \ + % ('totals', alltypes_found, alltypes_fixed, alltypes_inval, + alltypes_dup, alltypes_other, alltypes_rem)) + # print '%12s find/fix ratio: %g%%' \ + # % (" "*12, (alltypes_found*100.0/(alltypes_fixed + # + alltypes_inval + alltypes_dup + alltypes_other))) + + +def plot(datafile, outbase): + "Generates data files intended for use by gnuplot." + + global _types + + data = load_data(datafile) + + t_min = 1L<<32 + for issue in data: + if issue.created < t_min: + t_min = issue.created + + # break the time up into a tuple, then back up to Sunday + t_start = time.localtime(t_min) + t_start = time.mktime((t_start[0], t_start[1], t_start[2] - t_start[6] - 1, + 0, 0, 0, 0, 0, 0)) + + plots = { } + for t in _types: + # for each issue type, we will record per-week stats, compute a moving + # average of the find/fix delta, and track the number of open issues + plots[t] = [ [ ], MovingAverage(), 0 ] + + week = 0 + for date in range(t_start, time.time(), ONE_WEEK): + ### this is quite inefficient, as we could just sort by date, but + ### I'm being lazy + found, fixed = extract(data, None, date, date + ONE_WEEK - 1) + + for t in _types: + per_week, avg, open_issues = plots[t] + delta = found[t] - fixed[t] + per_week.append((week, date, + found[t], -fixed[t], avg.add(delta), open_issues)) + plots[t][2] = open_issues + delta + + week = week + 1 + + for t in _types: + week_data = plots[t][0] + write_file(week_data, outbase, t, 'found', 2) + write_file(week_data, outbase, t, 'fixed', 3) + write_file(week_data, outbase, t, 'avg', 4) + write_file(week_data, outbase, t, 'open', 5) + +def write_file(week_data, base, type, tag, idx): + f = open('%s.%s.%s' % (base, tag, type), 'w') + for info in week_data: + f.write('%s %s # %s\n' % (info[0], info[idx], time.ctime(info[1]))) + + +class MovingAverage: + "Helper class to compute moving averages." + def __init__(self, n=4): + self.n = n + self.data = [ 0 ] * n + def add(self, value): + self.data.pop(0) + self.data.append(float(value) / self.n) + return self.avg() + def avg(self): + return reduce(operator.add, self.data) + + +def extract(data, details, d_start, d_end): + """Extract found/fixed counts for each issue type within the data range. + + If DETAILS is false, then return two dictionaries: + + found, fixed + + ...each mapping issue types to the number of issues of that type + found or fixed respectively. + + If DETAILS is true, return five dictionaries: + + found, fixed, invalid, duplicate, other + + The first is still the found issues, but the other four break down + the resolution into 'FIXED', 'INVALID', 'DUPLICATE', and a grab-bag + category for 'WORKSFORME', 'LATER', 'REMIND', and 'WONTFIX'.""" + + global _types + global _milestone_filter + + found = { } + fixed = { } + invalid = { } + duplicate = { } + other = { } # "WORKSFORME", "LATER", "REMIND", and "WONTFIX" + + for t in _types: + found[t] = fixed[t] = invalid[t] = duplicate[t] = other[t] = 0 + + for issue in data: + # filter out disrespected milestones + if issue.milestone in _milestone_filter: + continue + + # record the found/fixed counts + if d_start <= issue.created <= d_end: + found[issue.type] = found[issue.type] + 1 + if d_start <= issue.resolved <= d_end: + if details: + if issue.resolution == "FIXED": + fixed[issue.type] = fixed[issue.type] + 1 + elif issue.resolution == "INVALID": + invalid[issue.type] = invalid[issue.type] + 1 + elif issue.resolution == "DUPLICATE": + duplicate[issue.type] = duplicate[issue.type] + 1 + else: + other[issue.type] = other[issue.type] + 1 + else: + fixed[issue.type] = fixed[issue.type] + 1 + + if details: + return found, fixed, invalid, duplicate, other + else: + return found, fixed + + +def load_data(datafile): + "Return a list of Issue objects for the specified data." + return list(map(Issue, open(datafile).readlines())) + + +class Issue: + "Represents a single issue from the exported IssueZilla data." + + def __init__(self, line): + row = line.strip().split('\t') + + self.id = int(row[0]) + self.type = row[1] + self.reporter = row[2] + if row[3] == 'NULL': + self.assigned = None + else: + self.assigned = row[3] + self.milestone = row[4] + self.created = parse_time(row[5]) + self.resolution = row[7] + if not self.resolution: + # If the resolution is empty, then force the resolved date to None. + # When an issue is reopened, there will still be activity showing + # a "RESOLVED", thus we get a resolved date. But we simply want to + # ignore that date. + self.resolved = None + else: + self.resolved = parse_time(row[6]) + self.summary = row[8] + + +parse_time_re = re.compile('([0-9]{4})-([0-9]{2})-([0-9]{2}) ' + '([0-9]{2}):([0-9]{2}):([0-9]{2})') + +def parse_time(t): + "Convert an exported MySQL timestamp into seconds since the epoch." + + global parse_time_re + + if t == 'NULL': + return None + try: + matches = parse_time_re.match(t) + return time.mktime((int(matches.group(1)), + int(matches.group(2)), + int(matches.group(3)), + int(matches.group(4)), + int(matches.group(5)), + int(matches.group(6)), + 0, 0, -1)) + except ValueError: + sys.stderr.write('ERROR: bad time value: %s\n'% t) + sys.exit(1) + +def shortusage(): + print(pydoc.synopsis(sys.argv[0])) + print(""" +For simple text summary: + find-fix.py [options] query-set-1.tsv YYYY-MM-DD YYYY-MM-DD + +For gnuplot presentation: + find-fix.py [options] query-set-1.tsv outfile +""") + +def usage(): + shortusage() + for x in long_opts: + padding_limit = 18 + if x[0][-1:] == '=': + sys.stdout.write(" --%s " % x[0][:-1]) + padding_limit = 19 + else: + sys.stdout.write(" --%s " % x[0]) + print("%s %s" % ((' ' * (padding_limit - len(x[0]))), x[1])) + print(''' +Option keywords may be abbreviated to any unique prefix. +Most options require "=xxx" arguments. +Option order is not important.''') + +if __name__ == '__main__': + main() diff --git a/tools/dev/iz/run-queries.sh b/tools/dev/iz/run-queries.sh new file mode 100755 index 0000000..990caf5 --- /dev/null +++ b/tools/dev/iz/run-queries.sh @@ -0,0 +1,62 @@ +#!/bin/sh +# +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +# + +if test $# != 3; then + echo "USAGE: $0 DATABASE_USER DATABASE_PASSWORD MYSQL_DATABASE" + exit 1 +fi + +dbuser="$1" +dbpass="$2" +dbdb="$3" + +q1='select issues.issue_id, issue_type, user1.LOGIN_NAME "reporter", + user2.LOGIN_NAME "assigned_to", target_milestone, creation_ts, + max(issue_when) "resolved_ts", resolution, short_desc + from issues left join issues_activity + on issues.issue_id=issues_activity.issue_id and newvalue="RESOLVED", + profiles prof1, + profiles prof2 left join tigris.HELM_USER user1 + on user1.USER_ID=prof1.helm_user_id + left join tigris.HELM_USER user2 + on user2.USER_ID=prof2.helm_user_id + where prof1.userid=reporter and prof2.userid=assigned_to + group by issues.issue_id + order by issues.issue_id' + +q2='select issues.issue_id, issue_type, user1.LOGIN_NAME "reporter", + user2.LOGIN_NAME "assigned_to", target_milestone, creation_ts, + max(issue_when) "resolved_ts", resolution, short_desc, + priority + from issues left join issues_activity + on issues.issue_id=issues_activity.issue_id and newvalue="RESOLVED", + profiles prof1, + profiles prof2 left join tigris.HELM_USER user1 + on user1.USER_ID=prof1.helm_user_id + left join tigris.HELM_USER user2 + on user2.USER_ID=prof2.helm_user_id + where prof1.userid=reporter and prof2.userid=assigned_to + group by issues.issue_id + order by issues.issue_id' + +mysql --batch -e "use $dbdb; $q1" --user=$dbuser --password=$dbpass --silent > iz-data/query-set-1.tsv +mysql --batch -e "use $dbdb; $q2" --user=$dbuser --password=$dbpass --silent > iz-data/query-set-2.tsv diff --git a/tools/dev/lock-check.py b/tools/dev/lock-check.py new file mode 100755 index 0000000..710bf48 --- /dev/null +++ b/tools/dev/lock-check.py @@ -0,0 +1,114 @@ +#!/usr/bin/env python +# +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +# + +### Repository lock checker. Gets an exclusive lock on the provided +### repository, then runs db_stat to see if the lock counts have been +### reset to 0. If not, prints the timestamp of the run and a message +### about accumulation. + +DB_STAT = 'db_stat' + + +import sys +import os +import os.path +import time +import fcntl +import getopt +try: + my_getopt = getopt.gnu_getopt +except AttributeError: + my_getopt = getopt.getopt + +def usage_and_exit(retval): + if retval: + out = sys.stderr + else: + out = sys.stdout + out.write("""Usage: %s [OPTIONS] REPOS-PATH + +Options: + --help (-h) : Show this usage message + --non-blocking : Don't wait for a lock that can't be immediately obtained + +Obtain an exclusive lock (waiting for one unless --non-blocking is +passed) on REPOS-PATH, then check its lock usage counts. If there is +any accumulation present, report that accumulation to stdout. +""" % (os.path.basename(sys.argv[0]))) + sys.exit(retval) + +def main(): + now_time = time.asctime() + repos_path = None + nonblocking = 0 + + # Parse the options. + optlist, args = my_getopt(sys.argv[1:], "h", ['non-blocking', 'help']) + for opt, arg in optlist: + if opt == '--help' or opt == '-h': + usage_and_exit(0) + if opt == '--non-blocking': + nonblocking = 1 + else: + usage_and_exit(1) + + # We need at least a path to work with, here. + argc = len(args) + if argc < 1 or argc > 1: + usage_and_exit(1) + repos_path = args[0] + + fd = open(os.path.join(repos_path, 'locks', 'db.lock'), 'a') + try: + # Get an exclusive lock on the repository lock file, but maybe + # don't wait for it. + try: + mode = fcntl.LOCK_EX + if nonblocking: + mode = mode | fcntl.LOCK_NB + fcntl.lockf(fd, mode) + except IOError: + sys.stderr.write("Error obtaining exclusive lock.\n") + sys.exit(1) + + # Grab the db_stat results. + lines = os.popen('%s -ch %s' % (DB_STAT, os.path.join(repos_path, 'db'))) + log_lines = [] + for line in lines: + pieces = line.split('\t') + if (pieces[1].find('current lock') != -1) and (int(pieces[0]) > 0): + log = '' + if not len(log_lines): + log = log + "[%s] Lock accumulation for '%s'\n" \ + % (now_time, repos_path) + log = log + ' ' * 27 + log = log + "%s\t%s" % (pieces[0], pieces[1]) + log_lines.append(log) + if len(log_lines): + sys.stdout.write(''.join(log_lines)) + finally: + # Unlock the lockfile + fcntl.lockf(fd, fcntl.LOCK_UN) + fd.close() + +if __name__ == "__main__": + main() diff --git a/tools/dev/log_revnum_change_asf.py b/tools/dev/log_revnum_change_asf.py new file mode 100755 index 0000000..30f5507 --- /dev/null +++ b/tools/dev/log_revnum_change_asf.py @@ -0,0 +1,97 @@ +#!/usr/bin/env python +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +""" +Script to change old (svn.collab.net) revision numbers in subversion log +messages to new ASF subversion repository revision numbers. +""" + +USAGE = """python log_revnum_change_asf.py [OPTION]... URL + +Change the revision numbers relatively in the log messages of new ASF +subversion repository. +""" + +from csvn.repos import RemoteRepository +from csvn.auth import User +import csvn.core +from optparse import OptionParser +import sys +import re + +def repl_newrev(matchobj): + """ + Revision to be substituted is provided here. + """ + if matchobj.group(0): + old_rev = int(matchobj.group(0)[1:]) + if old_rev <= 45000: + return 'r'+str(old_rev + 840074) + else: + return 'r'+str(old_rev) + +def main(): + """ + Script execution starts here. + """ + + parser = OptionParser(usage=USAGE) + parser.add_option("-u", "", dest="username", + help="commit the changes as USERNAME") + parser.add_option("-p", "", dest="password", + help="commit the changes with PASSWORD") + parser.add_option("-r", "", dest="rev", + help="revision range") + + (options, args) = parser.parse_args() + + if len(args) != 1: + parser.print_help() + sys.exit(1) + + csvn.core.svn_cmdline_init("", csvn.core.stderr) + repos_url = args[0] + revs = options.rev + if revs and ":" in revs: + [start_rev, end_rev] = revs.split(":") + elif revs: + start_rev = revs + end_rev = revs + else: + start_rev = 1 + end_rev = "HEAD" + + session = RemoteRepository(repos_url, user=User(options.username, + options.password)) + + if end_rev == "HEAD": + end_rev = session.latest_revnum() + if start_rev == "HEAD": + start_rev = session.latest_revnum() + start_rev = int(start_rev) + end_rev = int(end_rev) + + for entry in session.log(start_rev, end_rev): + new_log = re.sub(r'(r\d+)', repl_newrev, entry.message) + session.revprop_set(propname='svn:log', + propval=new_log, + revnum=entry.revision, + force=True) + +if __name__ == "__main__": + main() diff --git a/tools/dev/min-includes.sh b/tools/dev/min-includes.sh new file mode 100755 index 0000000..53dfb84 --- /dev/null +++ b/tools/dev/min-includes.sh @@ -0,0 +1,80 @@ +#!/bin/sh +# +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +# +# +# Attempt to figure out the minimum set of includes for our header files. +# +# ### this is incomplete. it merely lists the header files in order from +# ### "no dependencies on other svn headers" to the larger header files +# ### which have dependencies. manually working through the headers in +# ### this order will minimize includes. +# +# Each header file is test-compiled to ensure that it has enough headers. +# Of course, this could be false-positive because another header that +# has been included has further included something to enable compilation +# of the header in question. More sophisticated testing (e.g. filtering +# includes out of the included header) would be necessary for detection. +# + +files="*.h private/*.h" +deps="deps.$$" + +INCLUDES="-I. -I.. -I/usr/include/apr-1 -I/usr/include/apache2" + +rm -f "$deps" +for f in $files ; do + sed -n "s%#include \"\(svn_[a-z0-9_]*\.h\)\".*%$f \1%p" $f | fgrep -v svn_private_config.h >> "$deps" +done + + +function process_file () +{ + echo "Processing $header" + + echo "#include \"$header\"" > "$deps".c + gcc -o /dev/null -S $INCLUDES "$deps".c + + ### monkey the includes and recompile to find the minimal set +} + +while test -s "$deps" ; do +#wc -l $deps + + for header in $files ; do + + if grep -q "^$header" "$deps" ; then + continue + fi + + process_file + + fgrep -v "$header" "$deps" > "$deps".new + mv "$deps".new "$deps" + + files="`echo $files | sed s%$header%%`" + break + done + +done + +for header in $files ; do + process_file +done diff --git a/tools/dev/mklog.py b/tools/dev/mklog.py new file mode 100755 index 0000000..a60712e --- /dev/null +++ b/tools/dev/mklog.py @@ -0,0 +1,49 @@ +#!/usr/bin/env python +# +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +# +# +# Read a diff from stdin, and output a log message template to stdout. +# Hint: It helps if the diff was generated using 'svn diff -x -p' +# +# Note: Don't completely trust the generated log message. This script +# depends on the correct output of 'diff -x -p', which can sometimes get +# confused. + +import sys, re + +rm = re.compile('@@.*@@ (.*)\(.*$') + +def main(): + for line in sys.stdin: + if line[0:6] == 'Index:': + print('\n* %s' % line[7:-1]) + prev_funcname = '' + continue + match = rm.search(line[:-1]) + if match: + if prev_funcname == match.group(1): + continue + print(' (%s):' % match.group(1)) + prev_funcname = match.group(1) + + +if __name__ == '__main__': + main() diff --git a/tools/dev/mlpatch.py b/tools/dev/mlpatch.py new file mode 100755 index 0000000..d74d820 --- /dev/null +++ b/tools/dev/mlpatch.py @@ -0,0 +1,167 @@ +#!/usr/bin/env python +# +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +# + +# mlpatch.py: Run with no arguments for usage + +import sys, os +import sgmllib +try: + # Python >=3.0 + from html.entities import entitydefs + from urllib.request import urlopen as urllib_request_urlopen +except ImportError: + # Python <3.0 + from htmlentitydefs import entitydefs + from urllib2 import urlopen as urllib_request_urlopen +import fileinput + +CHUNKSIZE = 8 * 1024 + +class MyParser(sgmllib.SGMLParser): + def __init__(self): + self.baseclass = sgmllib.SGMLParser + self.baseclass.__init__(self) + self.entitydefs = entitydefs + self.entitydefs["nbsp"] = " " + self.inbody = False + self.complete_line = False + self.discard_gathered() + + def discard_gathered(self): + self.gather_data = False + self.gathered_data = "" + + def noop(self): + pass + + def out(self, data): + sys.stdout.write(data) + + def handle_starttag(self, tag, method, attrs): + if not self.inbody: return + self.baseclass.handle_starttag(self, tag, method, attrs) + + def handle_endtag(self, tag, method): + if not self.inbody: return + self.baseclass.handle_endtag(self, tag, method) + + def handle_data(self, data): + if not self.inbody: return + data = data.replace('\n','') + if len(data) == 0: return + if self.gather_data: + self.gathered_data += data + else: + if self.complete_line: + if data[0] in ('+', '-', ' ', '#') \ + or data.startswith("Index:") \ + or data.startswith("@@ ") \ + or data.startswith("======"): + # Real new line + self.out('\n') + else: + # Presume that we are wrapped + self.out(' ') + self.complete_line = False + self.out(data) + + def handle_charref(self, ref): + if not self.inbody: return + self.baseclass.handle_charref(self, ref) + + def handle_entityref(self, ref): + if not self.inbody: return + self.baseclass.handle_entityref(self, ref) + + def handle_comment(self, comment): + if comment == ' body="start" ': + self.inbody = True + elif comment == ' body="end" ': + self.inbody = False + + def handle_decl(self, data): + if not self.inbody: return + print("DECL: " + data) + + def unknown_starttag(self, tag, attrs): + if not self.inbody: return + print("UNKTAG: %s %s" % (tag, attrs)) + + def unknown_endtag(self, tag): + if not self.inbody: return + print("UNKTAG: /%s" % (tag)) + + def do_br(self, attrs): + self.complete_line = True + + def do_p(self, attrs): + if self.complete_line: + self.out('\n') + self.out(' ') + self.complete_line = True + + def start_a(self, attrs): + self.gather_data = True + + def end_a(self): + self.out(self.gathered_data.replace('_at_', '@')) + self.discard_gathered() + + def close(self): + if self.complete_line: + self.out('\n') + self.baseclass.close(self) + + +def main(): + if len(sys.argv) == 1: + sys.stderr.write( + "usage: mlpatch.py dev|users year month msgno > foobar.patch\n" + + "example: mlpatch.py dev 2005 01 0001 > issue-XXXX.patch\n" + + """ + Very annoyingly, the http://svn.haxx.se/ subversion mailing list archives + mangle inline patches, and provide no raw message download facility + (other than for an entire month's email as an mbox). + + So, I wrote this script, to demangle them. It's not perfect, as it has to + guess about whitespace, but it does an acceptable job.\n""") + sys.exit(0) + elif len(sys.argv) != 5: + sys.stderr.write("error: mlpatch.py: Bad parameters - run with no " + + "parameters for usage\n") + sys.exit(1) + else: + list, year, month, msgno = sys.argv[1:] + url = "http://svn.haxx.se/" \ + + "%(list)s/archive-%(year)s-%(month)s/%(msgno)s.shtml" % locals() + print("MsgUrl: " + url) + msgfile = urllib_request_urlopen(url) + p = MyParser() + buffer = msgfile.read(CHUNKSIZE) + while buffer: + p.feed(buffer) + buffer = msgfile.read(CHUNKSIZE) + p.close() + msgfile.close() + +if __name__ == '__main__': + main() diff --git a/tools/dev/normalize-dump.py b/tools/dev/normalize-dump.py new file mode 100755 index 0000000..10cde4e --- /dev/null +++ b/tools/dev/normalize-dump.py @@ -0,0 +1,137 @@ +#!/usr/bin/env python +# +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +# + +import sys +import re + +header_re = re.compile(r'^([^:]*): ?(.*)$') + +class NodePath: + def __init__(self, path, headers): + self.path = path + self.headers = headers + + def dump(self): + print((' ' * 3) + self.path) + headers = sorted(self.headers.keys()) + for header in headers: + print((' ' * 6) + header + ': ' + self.headers[header]) + + +def dump_revision(rev, nodepaths): + sys.stderr.write('* Normalizing revision ' + rev + '...') + print('Revision ' + rev) + paths = sorted(nodepaths.keys()) + for path in paths: + nodepath = nodepaths[path] + nodepath.dump() + sys.stderr.write('done\n') + + + +def parse_header_block(fp): + headers = {} + while True: + line = fp.readline() + if line == '': + return headers, 1 + line = line.strip() + if line == '': + return headers, 0 + matches = header_re.match(line) + if not matches: + raise Exception('Malformed header block') + headers[matches.group(1)] = matches.group(2) + + +def parse_file(fp): + nodepaths = {} + current_rev = None + + while True: + # Parse a block of headers + headers, eof = parse_header_block(fp) + + # This is a revision header block + if 'Revision-number' in headers: + + # If there was a previous revision, dump it + if current_rev: + dump_revision(current_rev, nodepaths) + + # Reset the data for this revision + current_rev = headers['Revision-number'] + nodepaths = {} + + # Skip the contents + prop_len = headers.get('Prop-content-length', 0) + fp.read(int(prop_len)) + + # This is a node header block + elif 'Node-path' in headers: + + # Make a new NodePath object, and add it to the + # dictionary thereof + path = headers['Node-path'] + node = NodePath(path, headers) + nodepaths[path] = node + + # Skip the content + text_len = headers.get('Text-content-length', 0) + prop_len = headers.get('Prop-content-length', 0) + fp.read(int(text_len) + int(prop_len)) + + # Not a revision, not a node -- if we've already seen at least + # one revision block, we are in an errorful state. + elif current_rev and len(headers.keys()): + raise Exception('Header block from outta nowhere') + + if eof: + if current_rev: + dump_revision(current_rev, nodepaths) + break + +def usage(): + print('Usage: ' + sys.argv[0] + ' [DUMPFILE]') + print('') + print('Reads a Subversion dumpfile from DUMPFILE (or, if not provided,') + print('from stdin) and normalizes the metadata contained therein,') + print('printing summarized and sorted information. This is useful for') + print('generating data about dumpfiles in a diffable fashion.') + sys.exit(0) + +def main(): + if len(sys.argv) > 1: + if sys.argv[1] == '--help': + usage() + fp = open(sys.argv[1], 'rb') + else: + fp = sys.stdin + parse_file(fp) + + +if __name__ == '__main__': + main() + + + + diff --git a/tools/dev/po-merge.py b/tools/dev/po-merge.py new file mode 100755 index 0000000..be515bb --- /dev/null +++ b/tools/dev/po-merge.py @@ -0,0 +1,191 @@ +#!/usr/bin/env python +# +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +# + +import os, re, sys + +msgstr_re = re.compile('msgstr\[\d+\] "') + +def parse_translation(f): + """Read a single translation entry from the file F and return a + tuple with the comments, msgid, msgid_plural and msgstr. The comments is + returned as a list of lines which do not end in new-lines. The msgid is + string. The msgid_plural is string or None. The msgstr is a list of + strings. The msgid, msgid_plural and msgstr strings can contain embedded + newlines""" + line = f.readline() + + # Parse comments + comments = [] + while True: + if line.strip() == '' or line[:2] == '#~': + return comments, None, None, None + elif line[0] == '#': + comments.append(line[:-1]) + else: + break + line = f.readline() + + # Parse msgid + if line[:7] != 'msgid "' or line[-2] != '"': + raise RuntimeError("parse error") + msgid = line[6:-1] + while True: + line = f.readline() + if line[0] != '"': + break + msgid += '\n' + line[:-1] + + # Parse optional msgid_plural + msgid_plural = None + if line[:14] == 'msgid_plural "': + if line[-2] != '"': + raise RuntimeError("parse error") + msgid_plural = line[13:-1] + while True: + line = f.readline() + if line[0] != '"': + break + msgid_plural += '\n' + line[:-1] + + # Parse msgstr + msgstr = [] + if not msgid_plural: + if line[:8] != 'msgstr "' or line[-2] != '"': + raise RuntimeError("parse error") + msgstr.append(line[7:-1]) + while True: + line = f.readline() + if len(line) == 0 or line[0] != '"': + break + msgstr[0] += '\n' + line[:-1] + else: + if line[:7] != 'msgstr[' or line[-2] != '"': + raise RuntimeError("parse error") + i = 0 + while True: + matched_msgstr = msgstr_re.match(line) + if matched_msgstr: + matched_msgstr_len = len(matched_msgstr.group(0)) + msgstr.append(line[matched_msgstr_len-1:-1]) + else: + break + while True: + line = f.readline() + if len(line) == 0 or line[0] != '"': + break + msgstr[i] += '\n' + line[:-1] + i += 1 + + if line.strip() != '': + raise RuntimeError("parse error") + + return comments, msgid, msgid_plural, msgstr + +def split_comments(comments): + """Split COMMENTS into flag comments and other comments. Flag + comments are those that begin with '#,', e.g. '#,fuzzy'.""" + flags = [] + other = [] + for c in comments: + if len(c) > 1 and c[1] == ',': + flags.append(c) + else: + other.append(c) + return flags, other + +def main(argv): + if len(argv) != 2: + argv0 = os.path.basename(argv[0]) + sys.exit('Usage: %s <lang.po>\n' + '\n' + 'This script will replace the translations and flags in lang.po with\n' + 'the translations and flags in the source po file read from standard\n' + 'input. Strings that are not found in the source file are left untouched.\n' + 'A backup copy of lang.po is saved as lang.po.bak.\n' + '\n' + 'Example:\n' + ' svn cat http://svn.apache.org/repos/asf/subversion/trunk/subversion/po/sv.po | \\\n' + ' %s sv.po' % (argv0, argv0)) + + # Read the source po file into a hash + source = {} + while True: + comments, msgid, msgid_plural, msgstr = parse_translation(sys.stdin) + if not comments and msgid is None: + break + if msgid is not None: + source[msgid] = msgstr, split_comments(comments)[0] + + # Make a backup of the output file, open the copy for reading + # and the original for writing. + os.rename(argv[1], argv[1] + '.bak') + infile = open(argv[1] + '.bak') + outfile = open(argv[1], 'w') + + # Loop thought the original and replace stuff as we go + first = 1 + string_count = 0 + update_count = 0 + untranslated = 0 + while True: + comments, msgid, msgid_plural, msgstr = parse_translation(infile) + if not comments and msgid is None: + break + if not first: + outfile.write('\n') + first = 0 + if msgid is None: + outfile.write('\n'.join(comments) + '\n') + else: + string_count += 1 + # Do not update the header, and only update if the source + # has a non-empty translation. + if msgid != '""' and source.get(msgid, ['""', []])[0] != '""': + other = split_comments(comments)[1] + new_msgstr, new_flags = source[msgid] + new_comments = other + new_flags + if new_msgstr != msgstr or new_comments != comments: + update_count += 1 + msgstr = new_msgstr + comments = new_comments + outfile.write('\n'.join(comments) + '\n') + outfile.write('msgid ' + msgid + '\n') + if not msgid_plural: + outfile.write('msgstr ' + msgstr[0] + '\n') + else: + outfile.write('msgid_plural ' + msgid_plural + '\n') + n = 0 + for i in msgstr: + outfile.write('msgstr[%s] %s\n' % (n, msgstr[n])) + n += 1 + for m in msgstr: + if m == '""': + untranslated += 1 + + # We're done. Tell the user what we did. + print(('%d strings updated. ' + '%d of %d strings are still untranslated (%.0f%%).' % + (update_count, untranslated, string_count, + 100.0 * untranslated / string_count))) + +if __name__ == '__main__': + main(sys.argv) diff --git a/tools/dev/prebuild-cleanup.sh b/tools/dev/prebuild-cleanup.sh new file mode 100755 index 0000000..2ef6b80 --- /dev/null +++ b/tools/dev/prebuild-cleanup.sh @@ -0,0 +1,45 @@ +#!/bin/sh +# +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +# + +### Purify a system, to simulate building Subversion on a "clean" box. +### +### You'll probably need to run this as `root', and may need to change +### some paths for your system. + +# Clean out old apr, apr-util config scripts. +rm /usr/local/bin/apr-config +rm /usr/local/bin/apu-config + +# Clean out libs. +rm -f /usr/local/lib/APRVARS +rm -f /usr/local/lib/libapr* +rm -f /usr/local/lib/libexpat* +rm -f /usr/local/lib/libneon* +rm -f /usr/local/lib/libsvn* + +# Clean out headers. +rm -f /usr/local/include/apr* +rm -f /usr/local/include/svn* +rm -f /usr/local/include/neon/* + +### Not sure this would be useful: +# rm -f /usr/local/apache2/lib/* diff --git a/tools/dev/random-commits.py b/tools/dev/random-commits.py new file mode 100755 index 0000000..a243990 --- /dev/null +++ b/tools/dev/random-commits.py @@ -0,0 +1,50 @@ +#!/usr/bin/env python +# +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +# +# +# USAGE: random-commits.py +# +# Using the FILELIST (see config below), a series of COUNT commits will be +# constructed, each changing up to MAXFILES files per commit. The commands +# will be sent to stdout (formatted as a shell script). +# +# The FILELIST can be constructed using the find-textfiles script. +# + +import random + +FILELIST = 'textfiles' +COUNT = 1000 # this many commits +MAXFILES = 10 # up to 10 files at a time + +files = open(FILELIST).readlines() + +print('#!/bin/sh') + +for i in range(COUNT): + n = random.randrange(1, MAXFILES+1) + l = [ ] + print("echo '--- begin commit #%d -----------------------------------'" % (i+1,)) + for j in range(n): + fname = random.choice(files)[:-1] # strip trailing newline + print("echo 'part of change #%d' >> %s" % (i+1, fname)) + l.append(fname) + print("svn commit -m 'commit #%d' %s" % (i+1, ' '.join(l))) diff --git a/tools/dev/scramble-tree.py b/tools/dev/scramble-tree.py new file mode 100755 index 0000000..1400d5b --- /dev/null +++ b/tools/dev/scramble-tree.py @@ -0,0 +1,309 @@ +#!/usr/bin/env python +# +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +# +# +# scramble-tree.py: (See scramble-tree.py --help.) +# +# Makes multiple random file changes to a directory tree, for testing. +# +# This script will add some new files, remove some existing files, add +# text to some existing files, and delete text from some existing +# files. It will also leave some files completely untouched. +# +# The exact set of changes made is always the same for identical trees, +# where "identical" means the names of files and directories are the +# same, and they are arranged in the same tree structure (the actual +# contents of files may differ). If two are not identical, the sets of +# changes scramble-tree.py will make may differ arbitrarily. +# +# Directories named .svn/ and CVS/ are ignored. +# +# Example scenario, starting with a pristine Subversion working copy: +# +# $ ls +# foo/ +# $ svn st foo +# $ cp -r foo bar +# $ svn st bar +# $ scramble-tree.py foo +# $ svn st foo +# [... see lots of scary status output ...] +# $ scramble-tree.py bar +# [... see the exact same scary status output ...] +# $ scramble-tree.py foo +# [... see a new bunch of scary status output ...] +# $ + +import os +import sys +import getopt +try: + my_getopt = getopt.gnu_getopt +except AttributeError: + my_getopt = getopt.getopt +import random +try: + # Python >=2.5 + from hashlib import md5 as hashlib_md5 +except ImportError: + # Python <2.5 + from md5 import md5 as hashlib_md5 +import base64 + + +class VCActions: + def __init__(self): + pass + def add_file(self, path): + """Add an existing file to version control.""" + pass + def remove_file(self, path): + """Remove an existing file from version control, and delete it.""" + pass + + +class NoVCActions(VCActions): + def remove_file(self, path): + os.unlink(path) + + +class CVSActions(VCActions): + def add_file(self, path): + cwd = os.getcwd() + try: + dirname, basename = os.path.split(path) + os.chdir(os.path.join(cwd, dirname)) + os.system('cvs -Q add -m "Adding file to repository" "%s"' % (basename)) + finally: + os.chdir(cwd) + def remove_file(self, path): + cwd = os.getcwd() + try: + dirname, basename = os.path.split(path) + os.chdir(os.path.join(cwd, dirname)) + os.system('cvs -Q rm -f "%s"' % (basename)) + finally: + os.chdir(cwd) + + +class SVNActions(VCActions): + def add_file(self, path): + os.system('svn add --quiet "%s"' % (path)) + def remove_file(self, path): + os.remove(path) + os.system('svn rm --quiet --force "%s"' % (path)) + + +class hashDir: + """Given a directory, creates a string containing all directories + and files under that directory (sorted alphanumerically) and makes a + base64-encoded md5 hash of the resulting string. Call + hashDir.gen_seed() to generate a seed value for this tree.""" + + def __init__(self, rootdir): + self.allfiles = [] + for dirpath, dirs, files in os.walk(rootdir): + self.walker_callback(len(rootdir), dirpath, dirs + files) + + def gen_seed(self): + # Return a base64-encoded (kinda ... strip the '==\n' from the + # end) MD5 hash of sorted tree listing. + self.allfiles.sort() + return base64.encodestring(hashlib_md5(''.join(self.allfiles)).digest())[:-3] + + def walker_callback(self, baselen, dirname, fnames): + if ((dirname == '.svn') or (dirname == 'CVS')): + return + self.allfiles.append(dirname[baselen:]) + for filename in fnames: + path = os.path.join(dirname, filename) + if not os.path.isdir(path): + self.allfiles.append(path[baselen:]) + + +class Scrambler: + def __init__(self, seed, vc_actions, dry_run, quiet): + if not quiet: + print('SEED: ' + seed) + + self.rand = random.Random(seed) + self.vc_actions = vc_actions + self.dry_run = dry_run + self.quiet = quiet + self.ops = [] ### ["add" | "munge", path] + self.greeking = """ +====================================================================== +This is some text that was inserted into this file by the lovely and +talented scramble-tree.py script. +====================================================================== +""" + + ### Helpers + def shrink_list(self, list, remove_count): + if len(list) <= remove_count: + return [] + for i in range(remove_count): + j = self.rand.randrange(len(list) - 1) + del list[j] + return list + + def _make_new_file(self, dir): + i = 0 + path = None + for i in range(99999): + path = os.path.join(dir, "newfile.%05d.txt" % i) + if not os.path.exists(path): + open(path, 'w').write(self.greeking) + return path + raise Exception("Ran out of unique new filenames in directory '%s'" % dir) + + ### File Mungers + def _mod_append_to_file(self, path): + if not self.quiet: + print('append_to_file: %s' % path) + if self.dry_run: + return + fh = open(path, "a") + fh.write(self.greeking) + fh.close() + + def _mod_remove_from_file(self, path): + if not self.quiet: + print('remove_from_file: %s' % path) + if self.dry_run: + return + lines = self.shrink_list(open(path, "r").readlines(), 5) + open(path, "w").writelines(lines) + + def _mod_delete_file(self, path): + if not self.quiet: + print('delete_file: %s' % path) + if self.dry_run: + return + self.vc_actions.remove_file(path) + + ### Public Interfaces + def get_randomizer(self): + return self.rand + + def schedule_munge(self, path): + self.ops.append(tuple(["munge", path])) + + def schedule_addition(self, dir): + self.ops.append(tuple(["add", dir])) + + def enact(self, limit): + num_ops = len(self.ops) + if limit == 0: + return + elif limit > 0 and limit <= num_ops: + self.ops = self.shrink_list(self.ops, num_ops - limit) + for op, path in self.ops: + if op == "add": + path = self._make_new_file(path) + if not self.quiet: + print("add_file: %s" % path) + if self.dry_run: + return + self.vc_actions.add_file(path) + elif op == "munge": + file_mungers = [self._mod_append_to_file, + self._mod_append_to_file, + self._mod_append_to_file, + self._mod_remove_from_file, + self._mod_remove_from_file, + self._mod_remove_from_file, + self._mod_delete_file, + ] + self.rand.choice(file_mungers)(path) + + +def usage(retcode=255): + print('Usage: %s [OPTIONS] DIRECTORY' % (sys.argv[0])) + print('') + print('Options:') + print(' --help, -h : Show this usage message.') + print(' --seed ARG : Use seed ARG to scramble the tree.') + print(' --use-svn : Use Subversion (as "svn") to perform file additions') + print(' and removals.') + print(' --use-cvs : Use CVS (as "cvs") to perform file additions') + print(' and removals.') + print(' --dry-run : Don\'t actually change the disk.') + print(' --limit N : Limit the scrambling to a maximum of N operations.') + print(' --quiet, -q : Run in stealth mode!') + sys.exit(retcode) + + +def walker_callback(scrambler, dirname, fnames): + if ((dirname.find('.svn') != -1) or dirname.find('CVS') != -1): + return + rand = scrambler.get_randomizer() + if rand.randrange(5) == 1: + scrambler.schedule_addition(dirname) + for filename in fnames: + path = os.path.join(dirname, filename) + if not os.path.isdir(path) and rand.randrange(3) == 1: + scrambler.schedule_munge(path) + + +def main(): + seed = None + vc_actions = NoVCActions() + dry_run = 0 + quiet = 0 + limit = None + + # Mm... option parsing. + optlist, args = my_getopt(sys.argv[1:], "hq", + ['seed=', 'use-svn', 'use-cvs', + 'help', 'quiet', 'dry-run', 'limit=']) + for opt, arg in optlist: + if opt == '--help' or opt == '-h': + usage(0) + if opt == '--seed': + seed = arg + if opt == '--use-svn': + vc_actions = SVNActions() + if opt == '--use-cvs': + vc_actions = CVSActions() + if opt == '--dry-run': + dry_run = 1 + if opt == '--limit': + limit = int(arg) + if opt == '--quiet' or opt == '-q': + quiet = 1 + + # We need at least a path to work with, here. + argc = len(args) + if argc < 1 or argc > 1: + usage() + rootdir = args[0] + + # If a seed wasn't provide, calculate one. + if seed is None: + seed = hashDir(rootdir).gen_seed() + scrambler = Scrambler(seed, vc_actions, dry_run, quiet) + for dirpath, dirs, files in os.walk(rootdir): + walker_callback(scrambler, dirpath, dirs + files) + scrambler.enact(limit) + +if __name__ == '__main__': + main() diff --git a/tools/dev/stress.pl b/tools/dev/stress.pl new file mode 100755 index 0000000..5b76be3 --- /dev/null +++ b/tools/dev/stress.pl @@ -0,0 +1,498 @@ +#!/usr/bin/perl -w +# ==================================================================== +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# ==================================================================== + +# A script that allows some simple testing of Subversion, in +# particular concurrent read, write and read-write access by the 'svn' +# client. It can also create working copy trees containing a large +# number of files and directories. All repository access is via the +# 'svnadmin' and 'svn' commands. +# +# This script constructs a repository, and populates it with +# files. Then it loops making changes to a subset of the files and +# committing the tree. Thus when two, or more, instances are run in +# parallel there is concurrent read and write access. Sometimes a +# commit will fail due to a commit conflict. This is expected, and is +# automatically resolved by updating the working copy. +# +# Each file starts off containing: +# A0 +# 0 +# A1 +# 1 +# A2 +# . +# . +# A9 +# 9 +# +# The script runs with an ID in the range 0-9, and when it modifies a +# file it modifes the line that starts with its ID. Thus scripts with +# different IDs will make changes that can be merged automatically. +# +# The main loop is then: +# +# step 1: modify a random selection of files +# +# step 2: optional sleep or wait for RETURN keypress +# +# step 3: update the working copy automatically merging out-of-date files +# +# step 4: try to commit, if not successful go to step 3 otherwise go to step 1 +# +# To allow break-out of potentially infinite loops, the script will +# terminate if it detects the presence of a "stop file", the path to +# which is specified with the -S option (default ./stop). This allows +# the script to be stopped without any danger of interrupting an 'svn' +# command, which experiment shows may require Berkeley db_recover to +# be used on the repository. +# +# Running the Script +# ================== +# +# Use three xterms all with shells on the same directory. In the +# first xterm run (note, this will remove anything called repostress +# in the current directory) +# +# % stress.pl -c -s1 +# +# When the message "Committed revision 1." scrolls pass use the second +# xterm to run +# +# % stress.pl -s1 +# +# Both xterms will modify, update and commit separate working copies to +# the same repository. +# +# Use the third xterm to touch a file 'stop' to cause the scripts to +# exit cleanly, i.e. without interrupting an svn command. +# +# To run a third, fourth, etc. instance of the script use -i +# +# % stress.pl -s1 -i2 +# % stress.pl -s1 -i3 +# +# Running several instances at once will cause a *lot* of disk +# activity. I have run ten instances simultaneously on a Linux tmpfs +# (RAM based) filesystem -- watching ten xterms scroll irregularly +# can be quite hypnotic! + +use strict; +use IPC::Open3; +use Getopt::Std; +use File::Find; +use File::Path; +use File::Spec::Functions; +use Cwd; + +# The name of this script, for error messages. +my $stress = 'stress.pl'; + +# When testing BDB 4.4 and later with DB_RECOVER enabled, the criteria +# for a failed update and commit are a bit looser than otherwise. +my $dbrecover = undef; + +# Repository check/create +sub init_repo + { + my ( $repo, $create, $no_sync, $fsfs ) = @_; + if ( $create ) + { + rmtree([$repo]) if -e $repo; + my $svnadmin_cmd = "svnadmin create $repo"; + $svnadmin_cmd .= " --fs-type bdb" if not $fsfs; + $svnadmin_cmd .= " --bdb-txn-nosync" if $no_sync; + system( $svnadmin_cmd) and die "$stress: $svnadmin_cmd: failed: $?\n"; + open ( CONF, ">>$repo/conf/svnserve.conf") + or die "$stress: open svnserve.conf: $!\n"; + print CONF "[general]\nanon-access = write\n"; + close CONF or die "$stress: close svnserve.conf: $!\n"; + } + $repo = getcwd . "/$repo" if not file_name_is_absolute $repo; + $dbrecover = 1 if -e "$repo/db/__db.register"; + print "$stress: BDB automatic database recovery enabled\n" if $dbrecover; + return $repo; + } + +# Check-out a working copy +sub check_out + { + my ( $url, $options ) = @_; + my $wc_dir = "wcstress.$$"; + mkdir "$wc_dir", 0755 or die "$stress: mkdir wcstress.$$: $!\n"; + my $svn_cmd = "svn co $url $wc_dir $options"; + system( $svn_cmd ) and die "$stress: $svn_cmd: failed: $?\n"; + return $wc_dir; + } + +# Print status and update. The update is to do any required merges. +sub status_update + { + my ( $options, $wc_dir, $wait_for_key, $disable_status, + $resolve_conflicts ) = @_; + my $svn_cmd = "svn st -u $options $wc_dir"; + if ( not $disable_status ) { + print "Status:\n"; + system( $svn_cmd ) and die "$stress: $svn_cmd: failed: $?\n"; + } + print "Press return to update/commit\n" if $wait_for_key; + read STDIN, $wait_for_key, 1 if $wait_for_key; + print "Updating:\n"; + $svn_cmd = "svn up --non-interactive $options $wc_dir"; + + # Check for conflicts during the update. If any exist, we resolve them. + my $pid = open3(\*UPDATE_WRITE, \*UPDATE_READ, \*UPDATE_ERR_READ, + $svn_cmd); + my @conflicts = (); + while ( <UPDATE_READ> ) + { + print; + s/\r*$//; # [Windows compat] Remove trailing \r's + if ( /^C (.*)$/ ) + { + push(@conflicts, ($1)) + } + } + + # Print any errors. + my $acceptable_error = 0; + while ( <UPDATE_ERR_READ> ) + { + print; + if ($dbrecover) + { + s/\r*$//; # [Windows compat] Remove trailing \r's + $acceptable_error = 1 if ( /^svn:[ ] + ( + bdb:[ ]PANIC + | + DB_RUNRECOVERY + ) + /x ); + } + } + + # Close up the streams. + close UPDATE_ERR_READ or die "$stress: close UPDATE_ERR_READ: $!\n"; + close UPDATE_WRITE or die "$stress: close UPDATE_WRITE: $!\n"; + close UPDATE_READ or die "$stress: close UPDATE_READ: $!\n"; + + # Get commit subprocess exit status + die "$stress: waitpid: $!\n" if $pid != waitpid $pid, 0; + die "$stress: unexpected update fail: exit status: $?\n" + unless $? == 0 or ( $? == 256 and $acceptable_error ); + + if ($resolve_conflicts) + { + foreach my $conflict (@conflicts) + { + $svn_cmd = "svn resolved $conflict"; + system( $svn_cmd ) and die "$stress: $svn_cmd: failed: $?\n"; + } + } + } + +# Print status, update and commit. The update is to do any required +# merges. Returns 0 if the commit succeeds and 1 if it fails due to a +# conflict. +sub status_update_commit + { + my ( $options, $wc_dir, $wait_for_key, $disable_status, + $resolve_conflicts ) = @_; + status_update $options, $wc_dir, $wait_for_key, $disable_status, \ + $resolve_conflicts; + print "Committing:\n"; + # Use current time as log message + my $now_time = localtime; + # [Windows compat] Must use double quotes for the log message. + my $svn_cmd = "svn ci $options $wc_dir -m \"$now_time\""; + + # Need to handle the commit carefully. It could fail for all sorts + # of reasons, but errors that indicate a conflict are "acceptable" + # while other errors are not. Thus there is a need to check the + # return value and parse the error text. + my $pid = open3(\*COMMIT_WRITE, \*COMMIT_READ, \*COMMIT_ERR_READ, + $svn_cmd); + print while ( <COMMIT_READ> ); + + # Look for acceptable errors, ones we expect to occur due to conflicts + my $acceptable_error = 0; + while ( <COMMIT_ERR_READ> ) + { + print; + s/\r*$//; # [Windows compat] Remove trailing \r's + $acceptable_error = 1 if ( /^svn:[ ] + ( + .*out[ ]of[ ]date + | + Conflict[ ]at + | + Baseline[ ]incorrect + | + ) + /ix ) + or ( $dbrecover and ( /^svn:[ ] + ( + bdb:[ ]PANIC + | + DB_RUNRECOVERY + ) + /x )); + + + } + close COMMIT_ERR_READ or die "$stress: close COMMIT_ERR_READ: $!\n"; + close COMMIT_WRITE or die "$stress: close COMMIT_WRITE: $!\n"; + close COMMIT_READ or die "$stress: close COMMIT_READ: $!\n"; + + # Get commit subprocess exit status + die "$stress: waitpid: $!\n" if $pid != waitpid $pid, 0; + die "$stress: unexpected commit fail: exit status: $?\n" + if ( $? != 0 and $? != 256 ) or ( $? == 256 and $acceptable_error != 1 ); + + return $? == 256 ? 1 : 0; + } + +# Get a list of all versioned files in the working copy +{ + my @get_list_of_files_helper_array; + sub GetListOfFilesHelper + { + $File::Find::prune = 1 if $File::Find::name =~ m[/.svn]; + return if $File::Find::prune or -d; + push @get_list_of_files_helper_array, $File::Find::name; + } + sub GetListOfFiles + { + my ( $wc_dir ) = @_; + @get_list_of_files_helper_array = (); + find( \&GetListOfFilesHelper, $wc_dir); + return @get_list_of_files_helper_array; + } +} + +# Populate a working copy +sub populate + { + my ( $dir, $dir_width, $file_width, $depth, $pad, $props ) = @_; + return if not $depth--; + + for my $nfile ( 1..$file_width ) + { + my $filename = "$dir/foo$nfile"; + open( FOO, ">$filename" ) or die "$stress: open $filename: $!\n"; + + for my $line ( 0..9 ) + { + print FOO "A$line\n$line\n" + or die "$stress: write to $filename: $!\n"; + map { print FOO $_ x 255, "\n"; } ("a", "b", "c", "d") + foreach (1..$pad); + } + print FOO "\$HeadURL: \$\n" + or die "$stress: write to $filename: $!\n" if $props; + close FOO or die "$stress: close $filename: $!\n"; + + my $svn_cmd = "svn add $filename"; + system( $svn_cmd ) and die "$stress: $svn_cmd: failed: $?\n"; + + if ( $props ) + { + $svn_cmd = "svn propset svn:eol-style native $filename"; + system( $svn_cmd ) and die "$stress: $svn_cmd: failed: $?\n"; + + $svn_cmd = "svn propset svn:keywords HeadURL $filename"; + system( $svn_cmd ) and die "$stress: $svn_cmd: failed: $?\n"; + } + } + + if ( $depth ) + { + for my $ndir ( 1..$dir_width ) + { + my $dirname = "$dir/bar$ndir"; + my $svn_cmd = "svn mkdir $dirname"; + system( $svn_cmd ) and die "$stress: $svn_cmd: failed: $?\n"; + + populate( "$dirname", $dir_width, $file_width, $depth, $pad, + $props ); + } + } + } + +# Modify a versioned file in the working copy +sub ModFile + { + my ( $filename, $mod_number, $id ) = @_; + + # Read file into memory replacing the line that starts with our ID + open( FOO, "<$filename" ) or die "$stress: open $filename: $!\n"; + my @lines = map { s[(^$id.*)][$1,$mod_number]; $_ } <FOO>; + close FOO or die "$stress: close $filename: $!\n"; + + # Write the memory back to the file + open( FOO, ">$filename" ) or die "$stress: open $filename: $!\n"; + print FOO or die "$stress: print $filename: $!\n" foreach @lines; + close FOO or die "$stress: close $filename: $!\n"; + } + +sub ParseCommandLine + { + my %cmd_opts; + my $usage = " +usage: stress.pl [-cdfhprW] [-i num] [-n num] [-s secs] [-x num] [-o options] + [-D num] [-F num] [-N num] [-P num] [-R path] [-S path] + [-U url] + +where + -c cause repository creation + -d don't make the status calls + -f use --fs-type fsfs during repository creation + -h show this help information (other options will be ignored) + -i the ID (valid IDs are 0 to 9, default is 0 if -c given, 1 otherwise) + -n the number of sets of changes to commit + -p add svn:eol-style and svn:keywords properties to the files + -r perform update-time conflict resolution + -s the sleep delay (-1 wait for key, 0 none) + -x the number of files to modify in each commit + -o options to pass for subversion client + -D the number of sub-directories per directory in the tree + -F the number of files per directory in the tree + -N the depth of the tree + -P the number of 10K blocks with which to pad the file + -R the path to the repository + -S the path to the file whose presence stops this script + -U the URL to the repository (file:///<-R path> by default) + -W use --bdb-txn-nosync during repository creation +"; + + # defaults + $cmd_opts{'D'} = 2; # number of subdirs per dir + $cmd_opts{'F'} = 2; # number of files per dir + $cmd_opts{'N'} = 2; # depth + $cmd_opts{'P'} = 0; # padding blocks + $cmd_opts{'R'} = "repostress"; # repository name + $cmd_opts{'S'} = "stop"; # path of file to stop the script + $cmd_opts{'U'} = "none"; # URL + $cmd_opts{'W'} = 0; # create with --bdb-txn-nosync + $cmd_opts{'c'} = 0; # create repository + $cmd_opts{'d'} = 0; # disable status + $cmd_opts{'f'} = 0; # create with --fs-type fsfs + $cmd_opts{'h'} = 0; # help + $cmd_opts{'i'} = 0; # ID + $cmd_opts{'n'} = 200; # sets of changes + $cmd_opts{'p'} = 0; # add file properties + $cmd_opts{'r'} = 0; # conflict resolution + $cmd_opts{'s'} = -1; # sleep interval + $cmd_opts{'x'} = 4; # files to modify + $cmd_opts{'o'} = ""; # no options passed + + getopts( 'cdfhi:n:prs:x:o:D:F:N:P:R:S:U:W', \%cmd_opts ) or die $usage; + + # print help info (and exit nicely) if requested + if ( $cmd_opts{'h'} ) + { + print( $usage ); + exit 0; + } + + # default ID if not set + $cmd_opts{'i'} = 1 - $cmd_opts{'c'} if not $cmd_opts{'i'}; + die $usage if $cmd_opts{'i'} !~ /^[0-9]$/; + + return %cmd_opts; + } + +############################################################################ +# Main + +# Why the fixed seed? I use this script for more than stress testing, +# I also use it to create test repositories. When creating a test +# repository, while I don't care exactly which files get modified, I +# find it useful for the repositories to be reproducible, i.e. to have +# the same files modified each time. When using this script for +# stress testing one could remove this fixed seed and Perl will +# automatically use a pseudo-random seed. However it doesn't much +# matter, the stress testing really depends on the real-time timing +# differences between mutiple instances of the script, rather than the +# randomness of the chosen files. +srand 123456789; + +my %cmd_opts = ParseCommandLine(); + +my $repo = init_repo( $cmd_opts{'R'}, $cmd_opts{'c'}, $cmd_opts{'W'}, + $cmd_opts{'f'} ); + +# [Windows compat] +# Replace backslashes in the path, and tweak the number of slashes +# in the scheme separator to make the URL always correct. +my $urlsep = ($repo =~ m/^\// ? '//' : '///'); +$repo =~ s/\\/\//g; + +# Make URL from path if URL not explicitly specified +$cmd_opts{'U'} = "file:$urlsep$repo" if $cmd_opts{'U'} eq "none"; + +my $wc_dir = check_out $cmd_opts{'U'}, $cmd_opts{'o'}; + +if ( $cmd_opts{'c'} ) + { + my $svn_cmd = "svn mkdir $wc_dir/trunk"; + system( $svn_cmd ) and die "$stress: $svn_cmd: failed: $?\n"; + populate( "$wc_dir/trunk", $cmd_opts{'D'}, $cmd_opts{'F'}, $cmd_opts{'N'}, + $cmd_opts{'P'}, $cmd_opts{'p'} ); + status_update_commit $cmd_opts{'o'}, $wc_dir, 0, 1 + and die "$stress: populate checkin failed\n"; + } + +my @wc_files = GetListOfFiles $wc_dir; +die "$stress: not enough files in repository\n" + if $#wc_files + 1 < $cmd_opts{'x'}; + +my $wait_for_key = $cmd_opts{'s'} < 0; + +my $stop_file = $cmd_opts{'S'}; + +for my $mod_number ( 1..$cmd_opts{'n'} ) + { + my @chosen; + for ( 1..$cmd_opts{'x'} ) + { + # Extract random file from list and modify it + my $mod_file = splice @wc_files, int rand $#wc_files, 1; + ModFile $mod_file, $mod_number, $cmd_opts{'i'}; + push @chosen, $mod_file; + } + # Reinstate list of files, the order doesn't matter + push @wc_files, @chosen; + + if ( $cmd_opts{'x'} > 0 ) { + # Loop committing until successful or the stop file is created + 1 while not -e $stop_file + and status_update_commit $cmd_opts{'o'}, $wc_dir, $wait_for_key, \ + $cmd_opts{'d'}, $cmd_opts{'r'}; + } else { + status_update $cmd_opts{'o'}, $wc_dir, $wait_for_key, $cmd_opts{'d'}, \ + $cmd_opts{'r'}; + } + + # Break out of loop, or sleep, if required + print( "stop file '$stop_file' detected\n" ), last if -e $stop_file; + sleep $cmd_opts{'s'} if $cmd_opts{'s'} > 0; + } + diff --git a/tools/dev/svn-dev.el b/tools/dev/svn-dev.el new file mode 100644 index 0000000..2fc32c3 --- /dev/null +++ b/tools/dev/svn-dev.el @@ -0,0 +1,566 @@ +;;;; Emacs Lisp help for writing Subversion code. ;;;; + +;; Licensed to the Apache Software Foundation (ASF) under one +;; or more contributor license agreements. See the NOTICE file +;; distributed with this work for additional information +;; regarding copyright ownership. The ASF licenses this file +;; to you under the Apache License, Version 2.0 (the +;; "License"); you may not use this file except in compliance +;; with the License. You may obtain a copy of the License at +;; +;; http://www.apache.org/licenses/LICENSE-2.0 +;; +;; Unless required by applicable law or agreed to in writing, +;; software distributed under the License is distributed on an +;; "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +;; KIND, either express or implied. See the License for the +;; specific language governing permissions and limitations +;; under the License. + + +;; Later on, there will be auto-detection of svn files, modeline +;; status, and a whole library of routines to interface with the +;; command-line client. For now, there's this, at Ben's request. +;; +;; All this stuff should get folded into Emacs VC mode, really. + +(defun svn-revert () + "Revert the current buffer and its file to its svn base revision." + (interactive) + (let ((obuf (current-buffer)) + (fname (buffer-file-name)) + (outbuf (get-buffer-create "*svn output*"))) + (set-buffer outbuf) + (delete-region (point-min) (point-max)) + (call-process "svn" nil outbuf nil "status" fname) + (goto-char (point-min)) + (search-forward fname) + (beginning-of-line) + (if (looking-at "^?") + (error "\"%s\" is not a Subversion-controlled file" fname)) + (call-process "svn" nil outbuf nil "revert" fname) + (set-buffer obuf) + ;; todo: make a backup~ file? + (save-excursion + (revert-buffer nil t) + (save-buffer)) + (message "Reverted \"%s\"." fname))) + +(defun svn-resolved () + "Tell Subversion that conflicts in the current buffer and its file have +been resolved." + (interactive) + (let ((obuf (current-buffer)) + (fname (buffer-file-name)) + (outbuf (get-buffer-create "*svn output*"))) + (set-buffer outbuf) + (delete-region (point-min) (point-max)) + (call-process "svn" nil outbuf nil "status" fname) + (goto-char (point-min)) + (search-forward fname) + (beginning-of-line) + (if (looking-at "^?") + (error "\"%s\" is not a Subversion-controlled file" fname)) + (call-process "svn" nil outbuf nil "resolved" fname) + (set-buffer obuf) + ;; todo: make a backup~ file? + (save-excursion + (revert-buffer nil t) + (save-buffer)) + (message "Marked \"%s\" as conflict-free." fname))) + +(defconst svn-adm-area ".svn" + "The name of the Subversion administrative subdirectory.") + +(defconst svn-adm-entries ".svn/entries" + "The path from cwd to the Subversion entries file.") + +(defun svn-controlled-path-p (path) + "Return non-nil if PATH is under Subversion version control, else +return nil. If PATH does not exist, return nil. + +In the future, this will return an Emacs Lisp reflection of PATH's +entry, either an explicit svn-entry-struct, or a list of the form +\(LAST-COMMIT-REV CURRENT-REV LAST-COMMITTER ...\), so we can display +svn information in the mode line. But that requires truly parsing the +entries file, instead of just detecting PATH among the entries." + (interactive "f") ; any use for interactive, other than testing? + (cond + ((not (file-exists-p path)) + nil) + ((file-directory-p path) + (let ((adm-area (concat path "/" svn-adm-area))) + (if (file-directory-p adm-area) + t + nil))) + (t + (let ((entries (concat (file-name-directory path) svn-adm-entries)) + (basename (file-name-nondirectory path)) + (found nil)) + (save-excursion + (if (file-directory-p (concat (file-name-directory path) svn-adm-area)) + (progn + (let ((find-file-hooks nil)) + (set-buffer (find-file-noselect entries t))) + (goto-char (point-min)) + (if (search-forward (format "name=\"%s\"" basename) nil t) + (setq found t) + (setq found nil)) + (kill-buffer nil))) + found))))) + + +(defun svn-text-base-path (file) + "Return the path to the text base for FILE (a string). +If FILE is a directory or not under version control, return nil." + (cond + ((not (svn-controlled-path-p file)) nil) + ((file-directory-p file) nil) + (t + (let* ((pdir (file-name-directory file)) + (base (file-name-nondirectory file))) + (format "%s%s/text-base/%s.svn-base" (or pdir "") svn-adm-area base))))) + + +(defun svn-ediff (file) + "Ediff FILE against its text base." + (interactive "fsvn ediff: ") + (let ((tb (svn-text-base-path file))) + (if (not tb) + (error "No text base for %s" file) + (ediff-files file tb)))) + + +(defun svn-find-file-hook () + "Function for find-file-hooks. +Inhibit backup files unless `vc-make-backup-files' is non-nil." + (if (svn-controlled-path-p (buffer-file-name)) + (progn + (if (string-match "XEMACS\\|XEmacs\\|xemacs" emacs-version) + (vc-load-vc-hooks)) ; for `vc-make-backup-files' + (unless vc-make-backup-files + (make-local-variable 'backup-inhibited) + (setq backup-inhibited t))))) + +(add-hook 'find-file-hooks 'svn-find-file-hook) + + + +;;; Dynamic generation of common Subversion URLs. +;;; +;;; (I have a version of this that actually fetches the stuff from the +;;; Net if you don't have a local copy, but it requires a very recent +;;; version of Emacs, so I didn't bother with it here. -kfogel) + +(defvar svn-site-source-tree-top (expand-file-name "~/projects/svn/site/") + "*Top directory of your Subversion site source tree of +repository \"http://svn.apache.org/repos/asf/subversion/site\". +You almost certainly want to set this in your .emacs, to override +the default; use `(setq svn-site-source-tree-top +\"/path/to/the/site/tree\")'.") + +(defvar svn-faq-file (concat svn-site-source-tree-top "/publish/faq.html") + "*A local copy of the Subversion FAQ.") + +(defvar svn-hacking-file (concat svn-site-source-tree-top + "/docs/community-guide/community-guide.html") + "*A local copy of the Subversion hacking.html file.") + +;; Helper for referring to issue numbers in a user-friendly way. +(defun svn-bug-url (n) + "Insert the url for Subversion issue number N. Interactively, prompt for N." + (interactive "nSubversion issue number: ") + (insert (format "http://subversion.tigris.org/issues/show_bug.cgi?id=%d" n))) + +;; Helper for referring to revisions in a browser-friendly way. +(defun svn-rev-url (rev &optional transform) + "Insert the url for Subversion revision REV, or if TRANSFORM is not +nil, then transform the revision at or around point into an HTML link. + +Interactively, if at or inside a revision number, transform it into +full HTML link; otherwise, prompt for revision number and insert just +the resulting URL." + (interactive (let ((thing (thing-at-point 'word))) + (if (and thing (string-match "r[0-9]+" thing)) + (list thing t) + (list (read-string "Subversion revision number: ") nil)))) + (if (string-match "^r[0-9]+" rev) + (setq rev (substring rev 1))) + (if transform + (let* ((bounds (bounds-of-thing-at-point 'word)) + (start (car bounds)) + (end (cdr bounds))) + (delete-region start end))) + (insert (format "http://svn.apache.org/viewcvs?view=revision&revision=%s" + rev))) + +(defconst svn-url-base "http://subversion.apache.org/") +(defconst svn-faq-url (concat svn-url-base "faq.html")) +(defconst svn-hacking-url (concat svn-url-base + "docs/community-guide/community-guide.html")) + +(defun svn-html-get-targets (file) + "Build a list of targets for the Subversion web file FILE." + (let* ((lst nil) + (already-buffer (find-buffer-visiting file)) + (faq-buffer (or already-buffer (find-file-noselect file)))) + (save-excursion + (set-buffer faq-buffer) + (goto-char (point-min)) + ;; TODO: Ideally, this wouldn't depend on the presence of a + ;; table of contents with "#" URLs, it would read the divs and + ;; anchors themselves. + (while (search-forward "href=\"#" nil t) + (let ((b (point)) + (e (progn (search-forward "\"") (forward-char -1) (point)))) + (setq lst (cons (buffer-substring b e) lst)))) + (if (not already-buffer) + (kill-buffer (current-buffer))) + lst))) + +(defun svn-url-completing-read (file prompt &optional hist-list) + "Completingly read an HTML target for FILE, prompting with PROMPT. +If HIST-LIST is non-nil, it is a symbol: the completion history list to use." + (progn + (let* ((targets (svn-html-get-targets file)) + (target-str (completing-read prompt targets nil t nil hist-list))) + (list target-str)))) + +(defvar svn-faq-history-list nil + "History list for the 'svn-faq-url' prompt.") + +(defvar svn-hacking-history-list nil + "History list for the 'svn-hacking-url' prompt.") + +(defun svn-faq-url (target) + "Prompt with completion for a targeted SVN FAQ item, then insert it. +If called non-interactively, TARGET is the target within the faq (an +HTML anchor name, that is, the part after the \"#\")." + (interactive + (svn-url-completing-read svn-faq-file "FAQ entry: " + 'svn-faq-history-list)) + (insert svn-faq-url "#" target)) + +(defun svn-hacking-url (target) + "Prompt with completion for a targeted hacking.html item, then insert it. +If called non-interactively, TARGET is the target within hacking.html +(an HTML anchor name, that is, the part after the \"#\")." + (interactive + (svn-url-completing-read svn-hacking-file "hacking.html entry: " + 'svn-hacking-history-list)) + (insert svn-hacking-url "#" target)) + + + +;;; Subversion C conventions +(if (eq major-mode 'c-mode) + (progn + (c-add-style "svn" '("gnu" (c-offsets-alist . ((inextern-lang . 0))))) + (c-set-style "svn"))) +(setq indent-tabs-mode nil) +(setq angry-mob-with-torches-and-pitchforks t) + + + +;; Subversion Python conventions, plus some harmless helpers for +;; people who don't have python mode set up by default. +(autoload 'python-mode "python-mode" nil t) +(or (assoc "\\.py$" auto-mode-alist) + (setq auto-mode-alist + (cons '("\\.py$" . python-mode) auto-mode-alist))) + +(defun svn-python-mode-hook () + "Set up the Subversion python conventions. The effect of this is +local to the current buffer, which is presumably visiting a file in +the Subversion project. Python setup in other buffers will not be +affected." + (when (string-match "/subversion/" (buffer-file-name)) + (make-local-variable 'py-indent-offset) + (setq indent-tabs-mode nil) + (setq py-indent-offset 2) + (make-local-variable 'py-smart-indentation) + (setq py-smart-indentation nil))) + +(add-hook 'python-mode-hook 'svn-python-mode-hook) + + + +;; Much of the APR documentation is embedded perldoc format. The +;; perldoc program itself sucks, however. If you're the author of +;; perldoc, I'm sorry, but what were you thinking? Don't you know +;; that there are people in the world who don't work in vt100 +;; terminals? If I want to view a perldoc page in my Emacs shell +;; buffer, I have to run the ridiculous command +;; +;; $ PAGER=cat perldoc -t target_file +;; +;; (Not that this was documented anywhere, I had to figure it out for +;; myself by reading /usr/bin/perldoc). +;; +;; Non-paging behavior should be a standard command-line option. No +;; program that can output text should *ever* insist on invoking the +;; pager. +;; +;; Anyway, these Emacs commands will solve the problem for us. +;; +;; Acknowledgements: +;; Much of this code is copied from man.el in the FSF Emacs 21.x +;; sources. + +(defcustom svn-perldoc-overstrike-face 'bold + "*Face to use when fontifying overstrike." + :type 'face + :group 'svn-dev) + +(defcustom svn-perldoc-underline-face 'underline + "*Face to use when fontifying underlining." + :type 'face + :group 'svn-dev) + + +(defun svn-perldoc-softhyphen-to-minus () + ;; \255 is some kind of dash in Latin-N. Versions of Debian man, at + ;; least, emit it even when not in a Latin-N locale. + (unless (eq t (compare-strings "latin-" 0 nil + current-language-environment 0 6 t)) + (goto-char (point-min)) + (let ((str "\255")) + (if enable-multibyte-characters + (setq str (string-as-multibyte str))) + (while (search-forward str nil t) (replace-match "-"))))) + + +(defun svn-perldoc-fontify-buffer () + "Convert overstriking and underlining to the correct fonts. +Same for the ANSI bold and normal escape sequences." + (interactive) + (message "Please wait, making up the page...") + (goto-char (point-min)) + (while (search-forward "\e[1m" nil t) + (delete-backward-char 4) + (put-text-property (point) + (progn (if (search-forward "\e[0m" nil 'move) + (delete-backward-char 4)) + (point)) + 'face svn-perldoc-overstrike-face)) + (goto-char (point-min)) + (while (search-forward "_\b" nil t) + (backward-delete-char 2) + (put-text-property (point) (1+ (point)) 'face svn-perldoc-underline-face)) + (goto-char (point-min)) + (while (search-forward "\b_" nil t) + (backward-delete-char 2) + (put-text-property (1- (point)) (point) 'face svn-perldoc-underline-face)) + (goto-char (point-min)) + (while (re-search-forward "\\(.\\)\\(\b\\1\\)+" nil t) + (replace-match "\\1") + (put-text-property (1- (point)) (point) 'face svn-perldoc-overstrike-face)) + (goto-char (point-min)) + (while (re-search-forward "o\b\\+\\|\\+\bo" nil t) + (replace-match "o") + (put-text-property (1- (point)) (point) 'face 'bold)) + (goto-char (point-min)) + (while (re-search-forward "[-|]\\(\b[-|]\\)+" nil t) + (replace-match "+") + (put-text-property (1- (point)) (point) 'face 'bold)) + (svn-perldoc-softhyphen-to-minus) + (message "Please wait, making up the page...done")) + + +(defun svn-perldoc-cleanup-buffer () + "Remove overstriking and underlining from the current buffer." + (interactive) + (message "Please wait, cleaning up the page...") + (progn + (goto-char (point-min)) + (while (search-forward "_\b" nil t) (backward-delete-char 2)) + (goto-char (point-min)) + (while (search-forward "\b_" nil t) (backward-delete-char 2)) + (goto-char (point-min)) + (while (re-search-forward "\\(.\\)\\(\b\\1\\)+" nil t) + (replace-match "\\1")) + (goto-char (point-min)) + (while (re-search-forward "\e\\[[0-9]+m" nil t) (replace-match "")) + (goto-char (point-min)) + (while (re-search-forward "o\b\\+\\|\\+\bo" nil t) (replace-match "o")) + (goto-char (point-min)) + (while (re-search-forward "" nil t) (replace-match " "))) + (goto-char (point-min)) + (while (re-search-forward "[-|]\\(\b[-|]\\)+" nil t) (replace-match "+")) + (svn-perldoc-softhyphen-to-minus) + (message "Please wait, cleaning up the page...done")) + + +;; Entry point to svn-perldoc functionality. +(defun svn-perldoc (file) + "Run perldoc on FILE, display the output in a buffer." + (interactive "fRun perldoc on file: ") + (let ((outbuf (get-buffer-create + (format "*%s PerlDoc*" (file-name-nondirectory file)))) + (savepg (getenv "PAGER"))) + (setenv "PAGER" "cat") ;; for perldoc + (save-excursion + (set-buffer outbuf) + (delete-region (point-min) (point-max)) + (call-process "perldoc" nil outbuf nil (expand-file-name file)) + (svn-perldoc-fontify-buffer) + (svn-perldoc-cleanup-buffer) + ;; Clean out the inevitable leading dead space. + (goto-char (point-min)) + (re-search-forward "[^ \i\n]") + (beginning-of-line) + (delete-region (point-min) (point))) + (setenv "PAGER" savepg) + (display-buffer outbuf))) + + + +;;; Help developers write log messages. + +;; How to use this: just run `svn-log-message'. You might want to +;; bind it to a key, for example, +;; +;; (define-key "\C-cl" 'svn-log-message) +;; +;; The log message will accumulate in a file. Later, you can use +;; that file when you commit: +;; +;; $ svn ci -F msg ... + +(defun svn-log-path-derive (path) + "Derive a relative directory path for absolute PATH, for a log entry." + (save-match-data + (let ((base (file-name-nondirectory path)) + (chop-spot (string-match + "\\(code/\\)\\|\\(src/\\)\\|\\(projects/\\)" + path))) + (if chop-spot + (progn + (setq path (substring path (match-end 0))) + ;; Kluge for Subversion developers. + (if (string-match "subversion/" path) + (substring path (+ (match-beginning 0) 11)) + path)) + (string-match (expand-file-name "~/") path) + (substring path (match-end 0)))))) + + +(defun svn-log-message-file () + "Return the name of the appropriate log message accumulation file. +Usually this is just the file `msg' in the current directory, but +certain areas are treated specially, for example, the Subversion +source tree." + (save-match-data + (if (string-match "subversion" default-directory) + (concat (substring default-directory 0 (match-end 0)) "/msg") + "msg"))) + + +(defun svn-log-message (short-file-names) + "Add to an in-progress log message, based on context around point. +If prefix arg SHORT-FILE-NAMES is non-nil, then use basenames only in +log messages, otherwise use full paths. The current defun name is +always used. + +If the log message already contains material about this defun, then put +point there, so adding to that material is easy. + +Else if the log message already contains material about this file, put +point there, and push onto the kill ring the defun name with log +message dressing around it, plus the raw defun name, so yank and +yank-next are both useful. + +Else if there is no material about this defun nor file anywhere in the +log message, then put point at the end of the message and insert a new +entry for file with defun. + +See also the function `svn-log-message-file'." + (interactive "P") + (let ((this-file (if short-file-names + (file-name-nondirectory buffer-file-name) + (svn-log-path-derive buffer-file-name))) + (this-defun (or (add-log-current-defun) + (save-excursion + (save-match-data + (if (eq major-mode 'c-mode) + (progn + (if (fboundp 'c-beginning-of-statement-1) + (c-beginning-of-statement-1) + (c-beginning-of-statement)) + (search-forward "(" nil t) + (forward-char -1) + (forward-sexp -1) + (buffer-substring + (point) + (progn (forward-sexp 1) (point))))))))) + (log-file (svn-log-message-file))) + (find-file log-file) + (goto-char (point-min)) + ;; Strip text properties from strings + (set-text-properties 0 (length this-file) nil this-file) + (set-text-properties 0 (length this-defun) nil this-defun) + ;; If log message for defun already in progress, add to it + (if (and + this-defun ;; we have a defun to work with + (search-forward this-defun nil t) ;; it's in the log msg already + (save-excursion ;; and it's about the same file + (save-match-data + (if (re-search-backward ; Ick, I want a real filename regexp! + "^\\*\\s-+\\([a-zA-Z0-9-_.@=+^$/%!?(){}<>]+\\)" nil t) + (string-equal (match-string 1) this-file) + t)))) + (if (re-search-forward ":" nil t) + (if (looking-at " ") (forward-char 1))) + ;; Else no log message for this defun in progress... + (goto-char (point-min)) + ;; But if log message for file already in progress, add to it. + (if (search-forward this-file nil t) + (progn + (if this-defun (progn + (kill-new (format "(%s): " this-defun)) + (kill-new this-defun))) + (search-forward ")" nil t) + (if (looking-at " ") (forward-char 1))) + ;; Found neither defun nor its file, so create new entry. + (goto-char (point-max)) + (if (not (bolp)) (insert "\n")) + (insert (format "\n* %s (%s): " this-file (or this-defun ""))) + ;; Finally, if no derived defun, put point where the user can + ;; type it themselves. + (if (not this-defun) (forward-char -3)))))) + + + +;;; Log message helpers. + +(defconst svn-log-msg-sep-line + "------------------------------------------------------------------------" + "The line of dashes that separates log messages in 'svn log' output.") + +(defconst svn-log-msg-boundary-regexp + (concat "^" svn-log-msg-sep-line "\n" "r[0-9]+ | ") + "Regular expression matching the start of a log msg. The start is +the beginning of the separator line, not the rev/author/date line that +follows the separator line.") + +(defun svn-narrow-to-log-msg () + "Narrow to the current Subversion log message. +This meant to be used while browsing the output of 'svn log'. +If point is not in such output, error." + (interactive) + (let ((start nil) (end nil)) + (save-excursion + (re-search-backward svn-log-msg-boundary-regexp) + (forward-line 1) + (setq start (point)) + (end-of-line) + (re-search-backward "| \\([0-9]+\\) ") + (let ((num (match-string 1))) + (re-search-forward "^\n") + (forward-line (string-to-number num))) + (setq end (point))) + (narrow-to-region start end))) + + + +(message "loaded svn-dev.el") diff --git a/tools/dev/svn-dev.vim b/tools/dev/svn-dev.vim new file mode 100644 index 0000000..cf2c50d --- /dev/null +++ b/tools/dev/svn-dev.vim @@ -0,0 +1,76 @@ +" This file sets vim up to use subversion's coding style. It can be applied on +" a per buffer basis with :source svn-dev.vim, or can be source from ~/.vimrc to +" apply settings to all files vim uses. For other variation try :help autocmd. +" +" Licensed to the Apache Software Foundation (ASF) under one +" or more contributor license agreements. See the NOTICE file +" distributed with this work for additional information +" regarding copyright ownership. The ASF licenses this file +" to you under the Apache License, Version 2.0 (the +" "License"); you may not use this file except in compliance +" with the License. You may obtain a copy of the License at +" +" http://www.apache.org/licenses/LICENSE-2.0 +" +" Unless required by applicable law or agreed to in writing, +" software distributed under the License is distributed on an +" "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +" KIND, either express or implied. See the License for the +" specific language governing permissions and limitations +" under the License. +" +" TODO: Try to find a way to wrap comments without putting a * on the next line, +" since most of subversion doesn't use that style. (Note that taking cro out of +" formatoptions won't quite work, because then comments won't be wrapped by +" default). +" +" Expand tab characters to spaces +set expandtab + +" Tab key moves 8 spaces +set tabstop=8 + +" '>>' moves 4 spaces +set shiftwidth=4 + +" Wrap lines at 78 columns. +" 78 so that vim won't swap over to the right before it wraps a line. +set textwidth=78 + +" What counts as part of a word (used for tag matching, and motion commands) +set iskeyword=a-z,A-Z,48-57,_,.,-,> + +" How to wrap lines +" t=wrap lines, c=wrap comments, inserting comment leader, r=insert comment +" leader after an <ENTER>, o=Insert comment leader after an 'o', q=Allow +" formatting of comments with 'gq' +set formatoptions=tcroq + +" Use C style indenting +set cindent + +" Use the following rules to do C style indenting +" (Note that an s mean number*shiftwidth) +" >=normal indent, +" e=indent inside braces(brace at end of line), +" n=Added to normal indent if no braces, +" f=opening brace of function, +" {=opening braces, +" }=close braces (from opening), +" ^s=indent after brace, if brace is on column 0, +" := case labels from switch, ==statements after case, +" t=function return type, +" +=continuation line, +" c=comment lines from opener, +" (=unclosed parens (0 means match), +" u=same as ( but for second set of parens +" +" Try :help cinoptions-values +set cinoptions=>1s,e0,n-2,f0,{.5s,}0,^-.5s,=.5s,t0,+1s,c3,(0,u0,\:2 + +" The following modelines can also be used to set the same options. +"/* +" * vim:ts=8:sw=4:expandtab:tw=78:fo=tcroq cindent +" * vim:isk=a-z,A-Z,48-57,_,.,-,> +" * vim:cino=>1s,e0,n-2,f0,{.5s,}0,^-.5s,=.5s,t0,+1s,c3,(0,u0,\:2 +" */ diff --git a/tools/dev/svn-entries.el b/tools/dev/svn-entries.el new file mode 100644 index 0000000..fff322a --- /dev/null +++ b/tools/dev/svn-entries.el @@ -0,0 +1,156 @@ +;;; svn-entries.el --- Display .svn/entries field names to the left + +;; Copyright (C) 2007 David Glasser + +;; Licensed under the same license as Subversion. + +;; Licensed to the Apache Software Foundation (ASF) under one +;; or more contributor license agreements. See the NOTICE file +;; distributed with this work for additional information +;; regarding copyright ownership. The ASF licenses this file +;; to you under the Apache License, Version 2.0 (the +;; "License"); you may not use this file except in compliance +;; with the License. You may obtain a copy of the License at +;; +;; http://www.apache.org/licenses/LICENSE-2.0 +;; +;; Unless required by applicable law or agreed to in writing, +;; software distributed under the License is distributed on an +;; "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +;; KIND, either express or implied. See the License for the +;; specific language governing permissions and limitations +;; under the License. + +;;; Commentary: + +;; Display field names to the left of the lines in a .svn/entries +;; buffer. Copy svn-entries.el to your load-path and add to your +;; .emacs: + +;; (require 'svn-entries) + +;; After opening or editing an entries file, run + +;; M-x svn-entries-show + +;; To hide: + +;; M-x svn-entries-hide + +;; (I tried doing this as a minor mode but setting margins during +;; alist initialization didn't work...) + +;; Tested on FSF Emacs 22. + + +(defvar svn-entries-overlays nil "Overlays used in this buffer.") +(make-variable-buffer-local 'svn-entries-overlays) + +(defgroup svn-entries nil + "Show labels to the left of .svn/entries buffers" + :group 'convenience) + +(defface svn-entries + '((t :inherit shadow)) + "Face for displaying line numbers in the display margin." + :group 'svn-entries) + +(defun svn-entries-set-margins (buf margin) + (dolist (w (get-buffer-window-list buf nil t)) + (set-window-margins w margin))) + +(defun svn-entries-hide () + "Delete all overlays displaying labels for this buffer." + (interactive) + (mapc #'delete-overlay svn-entries-overlays) + (setq svn-entries-overlays nil) + (svn-entries-set-margins (current-buffer) 0) + (remove-hook 'window-configuration-change-hook + 'svn-entries-after-config t)) + +(defun svn-entries-show () + "Update labels for the current buffer." + (interactive) + (svn-entries-update (current-buffer)) + (add-hook 'window-configuration-change-hook + 'svn-entries-after-config nil t)) + +(defconst svn-entries-labels + ["name" + "kind" + "revision" + "url" + "repos" + "schedule" + "text-time" + "checksum" + "committed-date" + "committed-rev" + "last-author" + "has-props" + "has-prop-mods" + "cachable-props" + "present-props" + "conflict-old" + "conflict-new" + "conflict-wrk" + "prop-reject-file" + "copied" + "copyfrom-url" + "copyfrom-rev" + "deleted" + "absent" + "incomplete" + "uuid" + "lock-token" + "lock-owner" + "lock-comment" + "lock-creation-date" + "changelist" + "keep-local" + "working-size" + "depth"]) + +(defconst svn-entries-margin-width (length "lock-creation-date")) + +(defun svn-entries-update (buffer) + "Update labels for all windows displaying BUFFER." + (with-current-buffer buffer + (svn-entries-hide) + (save-excursion + (save-restriction + (widen) + (let ((last-line (line-number-at-pos (point-max))) + (field 0) + (done nil)) + (goto-char (point-min)) + (while (not done) + (cond ((= (point) 1) + (svn-entries-overlay-here "format")) + ((= (following-char) 12) ; ^L + (setq field 0)) + ((not (eobp)) + (svn-entries-overlay-here (elt svn-entries-labels field)) + (setq field (1+ field)))) + (setq done (> (forward-line) 0)))))) + (svn-entries-set-margins buffer svn-entries-margin-width))) + +(defun svn-entries-overlay-here (label) + (let* ((fmt-label (propertize label 'face 'svn-entries)) + (left-label (propertize " " 'display `((margin left-margin) + ,fmt-label))) + (ov (make-overlay (point) (point)))) + (push ov svn-entries-overlays) + (overlay-put ov 'before-string left-label))) + +(defun svn-entries-after-config () + (walk-windows (lambda (w) (svn-entries-set-margins-if-overlaid (window-buffer))) + nil 'visible)) + +(defun svn-entries-set-margins-if-overlaid (b) + (with-current-buffer b + (when svn-entries-overlays + (svn-entries-set-margins b svn-entries-margin-width)))) + +(provide 'svn-entries) +;;; svn-entries.el ends here diff --git a/tools/dev/svn-merge-revs.py b/tools/dev/svn-merge-revs.py new file mode 100755 index 0000000..f67dae4 --- /dev/null +++ b/tools/dev/svn-merge-revs.py @@ -0,0 +1,122 @@ +#!/usr/bin/env python +# +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +# + +import sys +import os + +progname = os.path.basename(sys.argv[0]) + +def usage(): + print("Usage: %s SOURCEURL WCPATH [r]REVNUM[,] [...]" % progname) + print("Try '%s --help' for more information" % progname) + +def help(): + val = """This script is meant to ease the pain of merging and +reviewing revision(s) on a release branch (although it can be used to +merge and review revisions from any line of development to another). + +To allow cutting and pasting from the STATUS file, revision numbers +can be space or comma-separated, and may also include the prefix +'r'. + +Lastly, a file (named 'rev1-rev2-rev3.log') is created for you. +This file contains each merge command that was run, the log of the +revision that was merged, and the diff from the previous revision. + +Examples: + + %s http://svn.apache.org/repos/asf/subversion/trunk svn-1.2.x-branch \ + r14041, r14149, r14186, r14194, r14238, r14273 + + %s http://svn.apache.org/repos/asf/subversion/trunk svn-1.2.x-branch \ + 14041 14149 14186 14194 14238 14273""" % (progname, progname) + print(val) + + +if len(sys.argv) > 1 and sys.argv[1] == '--help': + help() + sys.exit(0) + +if len(sys.argv) < 4: + usage() + sys.exit(255) + +src_url = sys.argv[1] +wc_path = sys.argv[2] + +# Tolerate comma separated lists of revs (e.g. "r234, r245, r251") +revs = [] +for rev in sys.argv[3:]: + orig_rev = rev + if rev[-1:] == ',': + rev = rev[:-1] + + if rev[:1] == 'r': + rev = rev[1:] + + try: + rev = int(rev) + except ValueError: + print("Encountered non integer revision '%s'" % orig_rev) + usage() + sys.exit(254) + revs.append(rev) + +# Make an easily reviewable logfile +logfile = "-".join([str(x) for x in revs]) + ".log" +log = open(logfile, 'w') + +for rev in revs: + merge_cmd = ("svn merge -r%i:%i %s %s" % (rev - 1, rev, src_url, wc_path)) + log_cmd = 'svn log -v -r%i %s' % (rev, src_url) + diff_cmd = 'svn diff -r%i:%i %s' % (rev -1, rev, src_url) + + # Do the merge + os.system(merge_cmd) + + # Write our header + log.write("=" * 72 + '\n') + log.write(merge_cmd + '\n') + + # Get our log + fh = os.popen(log_cmd) + while True: + line = fh.readline() + if not line: + break + log.write(line) + fh.close() + + # Get our diff + fh = os.popen(diff_cmd) + while True: + line = fh.readline() + if not line: + break + log.write(line) + + # Write our footer + log.write("=" * 72 + '\n' * 10) + + +log.close() +print("\nYour logfile is '%s'" % logfile) diff --git a/tools/dev/svnqlite3-dump b/tools/dev/svnqlite3-dump new file mode 100755 index 0000000..1b94f38 --- /dev/null +++ b/tools/dev/svnqlite3-dump @@ -0,0 +1,50 @@ +#!/usr/bin/perl -lpw + +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +# USAGE: +# sqlite3 .svn/wc.db .dump | $0 +# $0 /path/to/wc +# $0 /path/to/wc/.svn/wc.db +# DOES: +# decodes blobs (eg, property skels) and dates to human-readable form +# REQUIRES: +# sqlite3(1) (second and third usage forms only) + +BEGIN { + # locate sqlite3 + my $sqlite3 = $ENV{SQLITE3} || "sqlite3"; + # set stdin + my $file = shift; + $file = "." if -t and not $file; + if ($file) { + $file .= "/.svn/wc.db" if -e "$file/.svn/wc.db"; + close STDIN; + open STDIN, "-|", $sqlite3, $file, '.dump'; + } else { + # filter stdin to stdout + } +} + +# X'68656C6C6F' => "hello" +1 while s/X'([0-9A-F]{2})/chr(hex $1) . q[X']/e; +s/X''//g; +s/\n/\\n/g; # multiline props + +# 1288312835000000 => "Fri Oct 29 02:40:35 2010" +s/(?<=,)(\d\d\d\d\d\d\d\d\d\d)\d\d\d\d\d\d(?=,)/sprintf '"%s"', scalar localtime $1/eg; diff --git a/tools/dev/svnraisetreeconflict/main.c b/tools/dev/svnraisetreeconflict/main.c new file mode 100644 index 0000000..752aae6 --- /dev/null +++ b/tools/dev/svnraisetreeconflict/main.c @@ -0,0 +1,422 @@ +/* svnraisetreeconflict + * + * This is a crude command line tool that publishes API to create + * tree-conflict markings in a working copy. + * + * To compile this, go to the root of the Subversion source tree and + * call `make svnraisetreeconflict'. You will find the executable file + * next to this source file. + * + * If you want to "install" svnraisetreeconflict, you may call + * `make install-tools' in the Subversion source tree root. + * (Note: This also installs any other installable tools.) + * + * svnraisetreeconflict cannot be compiled separate from a Subversion + * source tree. + * + * ==================================================================== + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + * ==================================================================== + */ + +#include "svn_cmdline.h" +#include "svn_pools.h" +#include "svn_wc.h" +#include "svn_utf.h" +#include "svn_path.h" +#include "svn_opt.h" +#include "svn_version.h" + +#include "private/svn_wc_private.h" + +#include "svn_private_config.h" + +#define OPT_VERSION SVN_OPT_FIRST_LONGOPT_ID + +/** A statement macro, similar to @c SVN_INT_ERR, but issues a + * message saying "svnraisetreeconflict:" instead of "svn:". + * + * Evaluate @a expr. If it yields an error, handle that error and + * return @c EXIT_FAILURE. + */ +#define SVNRAISETC_INT_ERR(expr) \ + do { \ + svn_error_t *svn_err__temp = (expr); \ + if (svn_err__temp) { \ + svn_handle_error2(svn_err__temp, stderr, FALSE, \ + "svnraisetreeconflict: "); \ + svn_error_clear(svn_err__temp); \ + return EXIT_FAILURE; } \ + } while (0) + +static svn_error_t * +version(apr_pool_t *pool) +{ + return svn_opt_print_help3(NULL, "svnraisetreeconflict", TRUE, FALSE, NULL, + NULL, NULL, NULL, NULL, NULL, pool); +} + +static void +usage(apr_pool_t *pool) +{ + svn_error_clear(svn_cmdline_fprintf + (stderr, pool, + _("Type 'svnraisetreeconflict --help' for usage.\n"))); + exit(1); +} + +/*************************************************************************** + * "enum mapping" functions copied from subversion/libsvn_wc/tree_conflicts.c + **************************************************************************/ + +/* A mapping between a string STR and an enumeration value VAL. */ +typedef struct enum_mapping_t +{ + const char *str; + int val; +} enum_mapping_t; + +/* A map for svn_node_kind_t values. */ +static const enum_mapping_t node_kind_map[] = +{ + { "none", svn_node_none }, + { "file", svn_node_file }, + { "dir", svn_node_dir }, + { "unknown", svn_node_unknown }, + { NULL, 0 } +}; + +/* A map for svn_wc_operation_t values. */ +static const enum_mapping_t operation_map[] = +{ + { "update", svn_wc_operation_update }, + { "switch", svn_wc_operation_switch }, + { "merge", svn_wc_operation_merge }, + { NULL, 0 } +}; + +/* A map for svn_wc_conflict_action_t values. */ +static const enum_mapping_t action_map[] = +{ + { "edit", svn_wc_conflict_action_edit }, + { "delete", svn_wc_conflict_action_delete }, + { "add", svn_wc_conflict_action_add }, + { NULL, 0 } +}; + +/* A map for svn_wc_conflict_reason_t values. */ +static const enum_mapping_t reason_map[] = +{ + { "edited", svn_wc_conflict_reason_edited }, + { "deleted", svn_wc_conflict_reason_deleted }, + { "missing", svn_wc_conflict_reason_missing }, + { "obstructed", svn_wc_conflict_reason_obstructed }, + { "added", svn_wc_conflict_reason_added }, + { NULL, 0 } +}; + +/* Parse the enumeration field pointed to by *START into *RESULT as a plain + * 'int', using MAP to convert from strings to enumeration values. + * In MAP, a null STR field marks the end of the map. + * Don't read further than END. + * After reading, make *START point to the character after the field. + */ +static svn_error_t * +read_enum_field(int *result, + const enum_mapping_t *map, + const char *str, + apr_pool_t *pool) +{ + int i; + + /* Find STR in MAP; error if not found. */ + for (i = 0; ; i++) + { + if (map[i].str == NULL) + return svn_error_createf(SVN_ERR_CL_ARG_PARSING_ERROR, NULL, + "Unrecognised parameter value: '%s'", str); + if (strcmp(str, map[i].str) == 0) + break; + } + + *result = map[i].val; + return SVN_NO_ERROR; +} + +static const char* +get_enum_str(const enum_mapping_t *map, + int enum_val) +{ + int i; + for (i = 0; map[i].str != NULL; i++) + { + if (map[i].val == enum_val) + return map[i].str; + } + return NULL; +} + +static void +print_enum_map(const enum_mapping_t *map, + apr_pool_t *pool) +{ + int i; + for (i = 0; map[i].str != NULL; i++) + svn_error_clear(svn_cmdline_fprintf(stdout, pool, + " %s", map[i].str)); +} + +static svn_error_t * +raise_tree_conflict(int argc, const char **argv, apr_pool_t *pool) +{ + int i = 0; + svn_wc_conflict_version_t *left, *right; + svn_wc_conflict_description2_t *c; + svn_wc_context_t *wc_ctx; + + /* Conflict description parameters */ + const char *wc_path, *wc_abspath; + const char *repos_url1, *repos_url2, *path_in_repos1, *path_in_repos2; + int operation, action, reason; + long peg_rev1, peg_rev2; + int kind, kind1, kind2; + + if (argc != 13) + return svn_error_create(SVN_ERR_CL_ARG_PARSING_ERROR, NULL, + "Wrong number of arguments"); + + /* Read the parameters */ + wc_path = svn_dirent_internal_style(argv[i++], pool); + SVN_ERR(read_enum_field(&kind, node_kind_map, argv[i++], pool)); + SVN_ERR(read_enum_field(&operation, operation_map, argv[i++], pool)); + SVN_ERR(read_enum_field(&action, action_map, argv[i++], pool)); + SVN_ERR(read_enum_field(&reason, reason_map, argv[i++], pool)); + repos_url1 = argv[i++]; + path_in_repos1 = argv[i++]; + peg_rev1 = atol(argv[i++]); + SVN_ERR(read_enum_field(&kind1, node_kind_map, argv[i++], pool)); + repos_url2 = argv[i++]; + path_in_repos2 = argv[i++]; + peg_rev2 = atol(argv[i++]); + SVN_ERR(read_enum_field(&kind2, node_kind_map, argv[i++], pool)); + + + /* Allocate and fill in the description data structures */ + SVN_ERR(svn_dirent_get_absolute(&wc_abspath, wc_path, pool)); + left = svn_wc_conflict_version_create(repos_url1, path_in_repos1, peg_rev1, + kind1, pool); + right = svn_wc_conflict_version_create(repos_url2, path_in_repos2, peg_rev2, + kind2, pool); + c = svn_wc_conflict_description_create_tree2(wc_abspath, kind, + operation, left, right, pool); + c->action = (svn_wc_conflict_action_t)action; + c->reason = (svn_wc_conflict_reason_t)reason; + + /* Raise the conflict */ + SVN_ERR(svn_wc_context_create(&wc_ctx, NULL, pool, pool)); + SVN_ERR(svn_wc__add_tree_conflict(wc_ctx, c, pool)); + + return SVN_NO_ERROR; +} + + +static void +help(const apr_getopt_option_t *options, apr_pool_t *pool) +{ + svn_error_clear + (svn_cmdline_fprintf + (stdout, pool, + _("usage: svnraisetreeconflict [OPTIONS] WC_PATH NODE_KIND OPERATION ACTION REASON REPOS_URL1 PATH_IN_REPOS1 PEG_REV1 NODE_KIND1 REPOS_URL2 PATH_IN_REPOS2 PEG_REV2 NODE_KIND2\n\n" + " Mark the working-copy node WC_PATH as being the victim of a tree conflict.\n" + "\n" + " WC_PATH's parent directory must be a working copy, otherwise a\n" + " tree conflict cannot be raised.\n" + "\n" + "Valid options:\n"))); + while (options->description) + { + const char *optstr; + svn_opt_format_option(&optstr, options, TRUE, pool); + svn_error_clear(svn_cmdline_fprintf(stdout, pool, " %s\n", optstr)); + ++options; + } + svn_error_clear(svn_cmdline_fprintf(stdout, pool, + _("\n" + "Valid enum argument values:\n" + " NODE_KIND, NODE_KIND1, NODE_KIND2:\n" + " "))); + print_enum_map(node_kind_map, pool); + svn_error_clear(svn_cmdline_fprintf(stdout, pool, + _("\n" + " OPERATION:\n" + " "))); + print_enum_map(operation_map, pool); + svn_error_clear(svn_cmdline_fprintf(stdout, pool, + _("\n" + " ACTION (what svn tried to do):\n" + " "))); + print_enum_map(action_map, pool); + svn_error_clear(svn_cmdline_fprintf(stdout, pool, + _("\n" + " REASON (what local change made svn fail):\n" + " "))); + print_enum_map(reason_map, pool); + svn_error_clear(svn_cmdline_fprintf(stdout, pool, + _("\n" + " REPOS_URL1, REPOS_URL2:\n" + " The URL of the repository itself, e.g.: file://usr/repos\n" + " PATH_IN_REPOS1, PATH_IN_REPOS2:\n" + " The complete path of the node in the repository, e.g.: sub/dir/foo\n" + " PEG_REV1, PEG_REV2:\n" + " The revision number at which the given path is relevant.\n" + "\n" + "Example:\n" + " svnraisetreeconflict ./foo %s %s %s %s file://usr/repos sub/dir/foo 1 %s file://usr/repos sub/dir/foo 3 %s\n\n"), + get_enum_str(node_kind_map, svn_node_file), + get_enum_str(operation_map, svn_wc_operation_update), + get_enum_str(action_map, svn_wc_conflict_action_delete), + get_enum_str(reason_map, svn_wc_conflict_reason_deleted), + get_enum_str(node_kind_map, svn_node_file), + get_enum_str(node_kind_map, svn_node_none) + )); + exit(0); +} + + +/* Version compatibility check */ +static svn_error_t * +check_lib_versions(void) +{ + static const svn_version_checklist_t checklist[] = + { + { "svn_subr", svn_subr_version }, + { "svn_wc", svn_wc_version }, + { NULL, NULL } + }; + + SVN_VERSION_DEFINE(my_version); + return svn_ver_check_list(&my_version, checklist); +} + +int +main(int argc, const char *argv[]) +{ + apr_allocator_t *allocator; + apr_pool_t *pool; + svn_error_t *err; + apr_getopt_t *os; + const apr_getopt_option_t options[] = + { + {"help", 'h', 0, N_("display this help")}, + {"version", OPT_VERSION, 0, + N_("show program version information")}, + {0, 0, 0, 0} + }; + apr_array_header_t *remaining_argv; + + /* Initialize the app. */ + if (svn_cmdline_init("svnraisetreeconflict", stderr) != EXIT_SUCCESS) + return EXIT_FAILURE; + + /* Create our top-level pool. Use a separate mutexless allocator, + * given this application is single threaded. + */ + if (apr_allocator_create(&allocator)) + return EXIT_FAILURE; + + apr_allocator_max_free_set(allocator, SVN_ALLOCATOR_RECOMMENDED_MAX_FREE); + + pool = svn_pool_create_ex(NULL, allocator); + apr_allocator_owner_set(allocator, pool); + + /* Check library versions */ + err = check_lib_versions(); + if (err) + return svn_cmdline_handle_exit_error(err, pool, "svnraisetreeconflict: "); + +#if defined(WIN32) || defined(__CYGWIN__) + /* Set the working copy administrative directory name. */ + if (getenv("SVN_ASP_DOT_NET_HACK")) + { + err = svn_wc_set_adm_dir("_svn", pool); + if (err) + return svn_cmdline_handle_exit_error(err, pool, "svnraisetreeconflict: "); + } +#endif + + err = svn_cmdline__getopt_init(&os, argc, argv, pool); + if (err) + return svn_cmdline_handle_exit_error(err, pool, "svnraisetreeconflict: "); + + os->interleave = 1; + while (1) + { + int opt; + const char *arg; + apr_status_t status = apr_getopt_long(os, options, &opt, &arg); + if (APR_STATUS_IS_EOF(status)) + break; + if (status != APR_SUCCESS) + { + usage(pool); + return EXIT_FAILURE; + } + switch (opt) + { + case 'h': + help(options, pool); + break; + case OPT_VERSION: + SVNRAISETC_INT_ERR(version(pool)); + exit(0); + break; + default: + usage(pool); + return EXIT_FAILURE; + } + } + + /* Convert the remaining arguments to UTF-8. */ + remaining_argv = apr_array_make(pool, 0, sizeof(const char *)); + while (os->ind < argc) + { + const char *s; + + SVNRAISETC_INT_ERR(svn_utf_cstring_to_utf8(&s, os->argv[os->ind++], + pool)); + APR_ARRAY_PUSH(remaining_argv, const char *) = s; + } + + if (remaining_argv->nelts < 1) + { + usage(pool); + return EXIT_FAILURE; + } + + /* Do the main task */ + SVNRAISETC_INT_ERR(raise_tree_conflict(remaining_argv->nelts, + (const char **)remaining_argv->elts, + pool)); + + svn_pool_destroy(pool); + + /* Flush stdout to make sure that the user will see any printing errors. */ + SVNRAISETC_INT_ERR(svn_cmdline_fflush(stdout)); + + return EXIT_SUCCESS; +} diff --git a/tools/dev/trails.py b/tools/dev/trails.py new file mode 100755 index 0000000..9717c6c --- /dev/null +++ b/tools/dev/trails.py @@ -0,0 +1,229 @@ +#!/usr/bin/env python +# +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +# + +## See the usage() function for operating instructions. ## + +import re +try: + # Python >=2.6 + from functools import reduce +except ImportError: + # Python <2.6 + pass +import sys +import operator + +_re_trail = re.compile('\((?P<txn_body>[a-z_]*), (?P<filename>[a-z_\-./]*), (?P<lineno>[0-9]*), (?P<txn>0|1)\): (?P<ops>.*)') +_re_table_op = re.compile('\(([a-z]*), ([a-z]*)\)') + +_seperator = '------------------------------------------------------------\n' + +def parse_trails_log(infile): + trails = [] + lineno = 0 + for line in infile.readlines(): + m = _re_trail.match(line) + + lineno = lineno + 1 + + if not m: + sys.stderr.write('Invalid input, line %u:\n%s\n' % (lineno, line)) + sys.exit(1) + + txn = int(m.group('txn')) + if not txn: + ### We're not interested in trails that don't use txns at this point. + continue + + txn_body = (m.group('txn_body'), m.group('filename'), + int(m.group('lineno'))) + trail = _re_table_op.findall(m.group('ops')) + trail.reverse() + + if not trail: + sys.stderr.write('Warning! Empty trail at line %u:\n%s' % (lineno, line)) + + trails.append((txn_body, trail)) + + return trails + + +def output_summary(trails, outfile): + ops = [] + for (txn_body, trail) in trails: + ops.append(len(trail)) + ops.sort() + + total_trails = len(ops) + total_ops = reduce(operator.add, ops) + max_ops = ops[-1] + median_ops = ops[total_trails / 2] + average_ops = float(total_ops) / total_trails + + outfile.write(_seperator) + outfile.write('Summary\n') + outfile.write(_seperator) + outfile.write('Total number of trails: %10i\n' % total_trails) + outfile.write('Total number of ops: %10i\n' % total_ops) + outfile.write('max ops/trail: %10i\n' % max_ops) + outfile.write('median ops/trail: %10i\n' % median_ops) + outfile.write('average ops/trail: %10.2f\n' % average_ops) + outfile.write('\n') + + +# custom compare function +def _freqtable_cmp(a_b, c_d): + (a, b) = a_b + (c, d) = c_d + c = cmp(d, b) + if not c: + c = cmp(a, c) + return c + +def list_frequencies(list): + """ + Given a list, return a list composed of (item, frequency) + in sorted order + """ + + counter = {} + for item in list: + counter[item] = counter.get(item, 0) + 1 + + frequencies = list(counter.items()) + frequencies.sort(_freqtable_cmp) + + return frequencies + + +def output_trail_length_frequencies(trails, outfile): + ops = [] + for (txn_body, trail) in trails: + ops.append(len(trail)) + + total_trails = len(ops) + frequencies = list_frequencies(ops) + + outfile.write(_seperator) + outfile.write('Trail length frequencies\n') + outfile.write(_seperator) + outfile.write('ops/trail frequency percentage\n') + for (r, f) in frequencies: + p = float(f) * 100 / total_trails + outfile.write('%4i %6i %5.2f\n' % (r, f, p)) + outfile.write('\n') + + +def output_trail(outfile, trail, column = 0): + ### Output the trail itself, in its own column + + if len(trail) == 0: + outfile.write('<empty>\n') + return + + line = str(trail[0]) + for op in trail[1:]: + op_str = str(op) + if len(line) + len(op_str) > 75 - column: + outfile.write('%s,\n' % line) + outfile.write(''.join(' ' * column)) + line = op_str + else: + line = line + ', ' + op_str + outfile.write('%s\n' % line) + + outfile.write('\n') + + +def output_trail_frequencies(trails, outfile): + + total_trails = len(trails) + + ttrails = [] + for (txn_body, trail) in trails: + ttrails.append((txn_body, tuple(trail))) + + frequencies = list_frequencies(ttrails) + + outfile.write(_seperator) + outfile.write('Trail frequencies\n') + outfile.write(_seperator) + outfile.write('frequency percentage ops/trail trail\n') + for (((txn_body, file, line), trail), f) in frequencies: + p = float(f) * 100 / total_trails + outfile.write('-- %s - %s:%u --\n' % (txn_body, file, line)) + outfile.write('%6i %5.2f %4i ' % (f, p, len(trail))) + output_trail(outfile, trail, 37) + + +def output_txn_body_frequencies(trails, outfile): + bodies = [] + for (txn_body, trail) in trails: + bodies.append(txn_body) + + total_trails = len(trails) + frequencies = list_frequencies(bodies) + + outfile.write(_seperator) + outfile.write('txn_body frequencies\n') + outfile.write(_seperator) + outfile.write('frequency percentage txn_body\n') + for ((txn_body, file, line), f) in frequencies: + p = float(f) * 100 / total_trails + outfile.write('%6i %5.2f %s - %s:%u\n' + % (f, p, txn_body, file, line)) + + +def usage(pgm): + w = sys.stderr.write + w("%s: a program for analyzing Subversion trail usage statistics.\n" % pgm) + w("\n") + w("Usage:\n") + w("\n") + w(" Compile Subversion with -DSVN_FS__TRAIL_DEBUG, which will cause it\n") + w(" it to print trail statistics to stderr. Save the stats to a file,\n") + w(" invoke %s on the file, and ponder the output.\n" % pgm) + w("\n") + + +if __name__ == '__main__': + if len(sys.argv) > 2: + sys.stderr.write("Error: too many arguments\n\n") + usage(sys.argv[0]) + sys.exit(1) + + if len(sys.argv) == 1: + infile = sys.stdin + else: + try: + infile = open(sys.argv[1]) + except (IOError): + sys.stderr.write("Error: unable to open '%s'\n\n" % sys.argv[1]) + usage(sys.argv[0]) + sys.exit(1) + + trails = parse_trails_log(infile) + + output_summary(trails, sys.stdout) + output_trail_length_frequencies(trails, sys.stdout) + output_trail_frequencies(trails, sys.stdout) + output_txn_body_frequencies(trails, sys.stdout) diff --git a/tools/dev/unix-build/Makefile.svn b/tools/dev/unix-build/Makefile.svn new file mode 100644 index 0000000..0c6615d --- /dev/null +++ b/tools/dev/unix-build/Makefile.svn @@ -0,0 +1,1382 @@ +# vim: noexpandtab tabstop=8 shiftwidth=8 syntax=make +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +# WARNING: This may or may not work on your system. This Makefile is +# an example, rather than a ready-made universal solution. + +# **************************************************************** +# ** IMPORTANT NOTE FOR SVN COMMITTERS: READ THIS. ** +# ** ** +# **************************************************************** +# | This Makefile is used by the bb-openbsd buildbot | +# | (http://ci.apache.org/builders/bb-openbsd). Please check | +# | the bot's health after making changes to this file. | +# |______________________________________________________________| + +ENABLE_PYTHON_BINDINGS ?= yes +ENABLE_PERL_BINDINGS ?= yes +ENABLE_JAVA_BINDINGS ?= no # they don't build with thread-less APR... +USE_APR_ICONV ?= no # set to yes to use APR iconv instead of GNU iconv +PARALLEL ?= 1 +CLEANUP ?= 1 +USE_HTTPV1 ?= no +USE_AUTHZ_SHORT_CIRCUIT ?= no +RAMDISK ?= /ramdisk + +PWD = $(shell pwd) +UNAME = $(shell uname) + +TAG ?= none +ifeq ($(TAG),none) +BRANCH ?= trunk +else +BRANCH = $(TAG) +endif +WC ?= $(BRANCH) +BRANCH_MAJOR = $(shell echo $(BRANCH) | \ + sed -e 's/\([0-9]\)\.\([0-9]\)\.[x0-9].*$$/\1.\2/') +SVN_REL_WC = svn-$(WC) +SVN_WC = $(PWD)/$(SVN_REL_WC) +PREFIX = $(PWD)/prefix +SVN_PREFIX = $(PREFIX)/svn-$(WC) +DISTDIR = $(PWD)/distfiles +SRCDIR = $(PWD)/src +OBJDIR = $(PWD)/objdir + +BDB_MAJOR_VER = 4.7 +BDB_VER = $(BDB_MAJOR_VER).25 +APR_VER = 1.4.5 +APR_ICONV_VER = 1.2.1 +GNU_ICONV_VER = 1.13.1 +APR_UTIL_VER = 1.3.12 +HTTPD_VER = 2.2.19 +NEON_VER = 0.29.6 +SERF_VER = 0.7.x +SERF_OLD_VER = 0.3.1 +CYRUS_SASL_VER = 2.1.23 +SQLITE_VER = 3070603 +LIBMAGIC_VER = 5.07 +RUBY_VER = 1.8.7-p334 + +BDB_DIST = db-$(BDB_VER).tar.gz +APR_ICONV_DIST = apr-iconv-$(APR_ICONV_VER).tar.gz +GNU_ICONV_DIST = libiconv-$(GNU_ICONV_VER).tar.gz +NEON_DIST = neon-$(NEON_VER).tar.gz +SQLITE_DIST = sqlite-autoconf-$(SQLITE_VER).tar.gz +CYRUS_SASL_DIST = cyrus-sasl-$(CYRUS_SASL_VER).tar.gz +HTTPD_DIST = httpd-$(HTTPD_VER).tar.bz2 +LIBMAGIC_DIST = file-$(LIBMAGIC_VER).tar.gz +RUBY_DIST = ruby-$(RUBY_VER).tar.gz + +DISTFILES = $(DISTDIR)/$(NEON_DIST) \ + $(DISTDIR)/$(SERF_DIST) \ + $(DISTDIR)/$(SQLITE_DIST) \ + $(DISTDIR)/$(HTTPD_DIST) \ + $(DISTDIR)/$(APR_ICONV_DIST) \ + $(DISTDIR)/$(GNU_ICONV_DIST) \ + $(DISTDIR)/$(CYRUS_SASL_DIST) \ + $(DISTDIR)/$(LIBMAGIC_DIST) \ + $(DISTDIR)/$(RUBY_DIST) + +FETCH_CMD = wget -c + +SUBVERSION_REPOS_URL = https://svn.apache.org/repos/asf/subversion +BDB_URL = http://ftp2.de.freebsd.org/pub/FreeBSD/distfiles/bdb/$(BDB_DIST) +APR_URL = http://svn.apache.org/repos/asf/apr/apr +APR_ICONV_URL = http://www.apache.org/dist/apr/$(APR_ICONV_DIST) +GNU_ICONV_URL = http://ftp.gnu.org/pub/gnu/libiconv/$(GNU_ICONV_DIST) +APR_UTIL_URL = http://svn.apache.org/repos/asf/apr/apr-util +HTTPD_URL = http://archive.apache.org/dist/httpd/$(HTTPD_DIST) +NEON_URL = http://webdav.org/neon/$(NEON_DIST) +#SERF_URL = http://serf.googlecode.com/files/$(SERF_DIST) +SERF_URL = http://serf.googlecode.com/svn/branches/$(SERF_VER) +SERF_OLD_URL = http://serf.googlecode.com/svn/tags/$(SERF_OLD_VER) +SQLITE_URL = http://www.sqlite.org/$(SQLITE_DIST) +CYRUS_SASL_URL = ftp://ftp.andrew.cmu.edu/pub/cyrus-mail/$(CYRUS_SASL_DIST) +LIBMAGIC_URL = ftp://ftp.astron.com/pub/file/$(LIBMAGIC_DIST) +RUBY_URL = http://ftp.ruby-lang.org/pub/ruby/1.8/$(RUBY_DIST) + +BDB_SRCDIR = $(SRCDIR)/db-$(BDB_VER) +APR_SRCDIR = $(SRCDIR)/apr-$(APR_VER) +APR_ICONV_SRCDIR = $(SRCDIR)/apr-iconv-$(APR_ICONV_VER) +GNU_ICONV_SRCDIR = $(SRCDIR)/libiconv-$(GNU_ICONV_VER) +APR_UTIL_SRCDIR = $(SRCDIR)/apr-util-$(APR_UTIL_VER) +HTTPD_SRCDIR = $(SRCDIR)/httpd-$(HTTPD_VER) +NEON_SRCDIR = $(SRCDIR)/neon-$(NEON_VER) +SERF_SRCDIR = $(SRCDIR)/serf-$(SERF_VER) +SERF_OLD_SRCDIR = $(SRCDIR)/serf-$(SERF_OLD_VER) +SQLITE_SRCDIR = $(SRCDIR)/sqlite-autoconf-$(SQLITE_VER) +CYRUS_SASL_SRCDIR = $(SRCDIR)/cyrus-sasl-$(CYRUS_SASL_VER) +LIBMAGIC_SRCDIR = $(SRCDIR)/file-$(LIBMAGIC_VER) +RUBY_SRCDIR = $(SRCDIR)/ruby-$(RUBY_VER) +SVN_SRCDIR = $(SVN_WC) + +BDB_OBJDIR = $(OBJDIR)/db-$(BDB_VER) +APR_OBJDIR = $(OBJDIR)/apr-$(APR_VER) +APR_ICONV_OBJDIR = $(OBJDIR)/apr-iconv-$(APR_ICONV_VER) +GNU_ICONV_OBJDIR = $(OBJDIR)/libiconv-$(GNU_ICONV_VER) +APR_UTIL_OBJDIR = $(OBJDIR)/apr-util-$(APR_UTIL_VER) +HTTPD_OBJDIR = $(OBJDIR)/httpd-$(HTTPD_VER) +NEON_OBJDIR = $(OBJDIR)/neon-$(NEON_VER) +SERF_OBJDIR = $(OBJDIR)/serf-$(SERF_VER) +SERF_OLD_OBJDIR = $(OBJDIR)/serf-$(SERF_OLD_VER) +SQLITE_OBJDIR = $(OBJDIR)/sqlite-$(SQLITE_VER) +CYRUS_SASL_OBJDIR = $(OBJDIR)/cyrus-sasl-$(CYRUS_SASL_VER) +LIBMAGIC_OBJDIR = $(OBJDIR)/file-$(LIBMAGIC_VER) +RUBY_OBJDIR = $(OBJDIR)/ruby-$(RUBY_VER) +SVN_OBJDIR = $(OBJDIR)/$(SVN_REL_WC) + +# Tweak this for out-of-tree builds. Note that running individual +# tests in the test suite won't work conveniently with out-of-tree +# builds! +svn_builddir ?=$(SVN_WC) + +ifdef PROFILE +PROFILE_CFLAGS=-pg +endif + +####################################################################### +# Main targets. +####################################################################### + +.PHONY: all reset clean nuke + +all: dirs-create bdb-install apr-install iconv-install apr-util-install \ + httpd-install neon-install serf-install serf-old-install \ + sqlite-install cyrus-sasl-install libmagic-install \ + ruby-install svn-install svn-bindings-install + +# Use these to start a build from the beginning. +reset: dirs-reset bdb-reset apr-reset iconv-reset apr-util-reset \ + httpd-reset neon-reset serf-reset serf-old-reset sqlite-reset \ + cyrus-sasl-reset libmagic-reset ruby-reset svn-reset + +# Use to save disk space. +clean: bdb-clean apr-clean iconv-clean apr-util-clean httpd-clean \ + neon-clean serf-clean serf-old-clean sqlite-clean cyrus-sasl-clean \ + libmagic-clean ruby-clean svn-clean + +# Nukes everything (including installed binaries!) +# Use this to start ALL OVER AGAIN! Use with caution! +nuke: + @echo + @echo "I will now remove the following directories PERMANENTLY:" + @echo + @echo " $(SRCDIR)" + @echo " $(OBJDIR)" + @echo " $(PREFIX)" + @echo + @echo -n 'Do you want me to continue? ([no]/yes): ' + @read ANSWER ; \ + case $$ANSWER in \ + yes) echo "You said $$ANSWER. I will continue."; \ + echo rm -rf $(SRCDIR) $(OBJDIR) $(PREFIX); \ + rm -rf $(SRCDIR) $(OBJDIR) $(PREFIX); \ + $(MAKE) reset; \ + ;; \ + "") echo "You said no."; \ + ;; \ + *) echo "You said $$ANSWER."; \ + ;; \ + esac + +####################################################################### +# directories +####################################################################### + +dirs-create: $(PWD)/.dirs-created +dirs-reset: + rm -f $(PWD)/.dirs-created + +$(PWD)/.dirs-created: + $(foreach d, $(PREFIX) $(DISTDIR) $(SRCDIR) $(OBJDIR), \ + [ -d $(d) ] || mkdir -p $(d);) + touch $@ + +####################################################################### +# bdb +####################################################################### + +bdb-retrieve: $(BDB_OBJDIR)/.retrieved +bdb-configure: $(BDB_OBJDIR)/.configured +bdb-compile: $(BDB_OBJDIR)/.compiled +bdb-install: $(BDB_OBJDIR)/.installed +bdb-reset: + $(foreach f, .retrieved .configured .compiled .installed, \ + rm -f $(BDB_OBJDIR)/$(f);) + +bdb-clean: + -(cd $(BDB_SRCDIR)/build_unix/ && make clean) + +# fetch distfile for bdb +$(DISTDIR)/$(BDB_DIST): + cd $(DISTDIR) && $(FETCH_CMD) $(BDB_URL) + +# retrieve bdb +$(BDB_OBJDIR)/.retrieved: $(DISTDIR)/$(BDB_DIST) + [ -d $(BDB_OBJDIR) ] || mkdir -p $(BDB_OBJDIR) + tar -C $(SRCDIR) -zxf $(DISTDIR)/$(BDB_DIST) + touch $@ + +# configure bdb +$(BDB_OBJDIR)/.configured: $(BDB_OBJDIR)/.retrieved + cd $(BDB_SRCDIR)/build_unix \ + && env CFLAGS="-g $(PROFILE_CFLAGS)" GREP="`which grep`" \ + ../dist/configure \ + --prefix=$(PREFIX)/bdb \ + --enable-debug + touch $@ + +# compile bdb +$(BDB_OBJDIR)/.compiled: $(BDB_OBJDIR)/.configured + (cd $(BDB_SRCDIR)/build_unix && make) + touch $@ + +# install bdb +$(BDB_OBJDIR)/.installed: $(BDB_OBJDIR)/.compiled + (cd $(BDB_SRCDIR)/build_unix && make install) + touch $@ + +####################################################################### +# apr +####################################################################### + +apr-retrieve: $(APR_OBJDIR)/.retrieved +apr-configure: $(APR_OBJDIR)/.configured +apr-compile: $(APR_OBJDIR)/.compiled +apr-install: $(APR_OBJDIR)/.installed +apr-reset: + $(foreach f, .retrieved .configured .compiled .installed, \ + rm -f $(APR_OBJDIR)/$(f);) + +apr-clean: + -(cd $(APR_OBJDIR) && make clean) + +# retrieve apr if not present yet +$(APR_OBJDIR)/.retrieved: + [ -d $(APR_OBJDIR) ] || mkdir -p $(APR_OBJDIR) + if [ ! -d $(APR_SRCDIR) ]; then \ + svn export $(APR_URL)/tags/$(APR_VER)/ $(APR_SRCDIR); \ + fi + touch $@ + +ifdef THREADING +THREADS_FLAG=--enable-threads +else +THREADS_FLAG=--disable-threads +endif + +# configure apr +$(APR_OBJDIR)/.configured: $(APR_OBJDIR)/.retrieved + cp $(APR_SRCDIR)/build/apr_hints.m4 \ + $(APR_SRCDIR)/build/apr_hints.m4.orig + cat $(APR_SRCDIR)/build/apr_hints.m4.orig \ + | sed -e '/^.*APR_ADDTO(CPPFLAGS, \[-D_POSIX_THREADS\]).*$$/d' \ + > $(APR_SRCDIR)/build/apr_hints.m4 + cd $(APR_SRCDIR) && ./buildconf + cd $(APR_OBJDIR) \ + && env CFLAGS="-O0 -g $(PROFILE_CFLAGS)" GREP="`which grep`" \ + $(APR_SRCDIR)/configure \ + --prefix=$(PREFIX)/apr \ + --enable-maintainer-mode \ + $(THREADS_FLAG) + touch $@ + +# compile apr +$(APR_OBJDIR)/.compiled: $(APR_OBJDIR)/.configured + (cd $(APR_OBJDIR) && make) + touch $@ + +# install apr +$(APR_OBJDIR)/.installed: $(APR_OBJDIR)/.compiled + (cd $(APR_OBJDIR) && make install) + touch $@ + +####################################################################### +# apr-iconv +####################################################################### + +apr-iconv-retrieve: $(APR_ICONV_OBJDIR)/.retrieved +apr-iconv-configure: $(APR_ICONV_OBJDIR)/.configured +apr-iconv-compile: $(APR_ICONV_OBJDIR)/.compiled +apr-iconv-install: $(APR_ICONV_OBJDIR)/.installed +apr-iconv-reset: + $(foreach f, .retrieved .configured .compiled .installed, \ + rm -f $(APR_ICONV_OBJDIR)/$(f);) + +apr-iconv-clean: + -(cd $(APR_ICONV_OBJDIR) && make clean) + +# fetch distfile for apr-iconv +$(DISTDIR)/$(APR_ICONV_DIST): + cd $(DISTDIR) && $(FETCH_CMD) $(APR_ICONV_URL) + +# retrieve apr-iconv +$(APR_ICONV_OBJDIR)/.retrieved: $(DISTDIR)/$(APR_ICONV_DIST) + [ -d $(APR_ICONV_OBJDIR) ] || mkdir -p $(APR_ICONV_OBJDIR) + tar -C $(SRCDIR) -zxf $(DISTDIR)/$(APR_ICONV_DIST) + touch $@ + +# configure apr-iconv +$(APR_ICONV_OBJDIR)/.configured: $(APR_ICONV_OBJDIR)/.retrieved + cd $(APR_ICONV_OBJDIR) \ + && env CFLAGS="-g $(PROFILE_CFLAGS)" GREP="`which grep`" \ + $(APR_ICONV_SRCDIR)/configure \ + --prefix=$(PREFIX)/apr \ + --with-apr=$(PREFIX)/apr + touch $@ + +# compile apr-iconv +$(APR_ICONV_OBJDIR)/.compiled: $(APR_ICONV_OBJDIR)/.configured + (cd $(APR_ICONV_OBJDIR) \ + && make CPPFLAGS="-D_OSD_POSIX" CFLAGS="-g -O0 $(PROFILE_CFLAGS)") + touch $@ + +# install apr-iconv +$(APR_ICONV_OBJDIR)/.installed: $(APR_ICONV_OBJDIR)/.compiled + (cd $(APR_ICONV_OBJDIR) && make install) + touch $@ + +####################################################################### +# gnu-iconv +####################################################################### + +gnu-iconv-retrieve: $(GNU_ICONV_OBJDIR)/.retrieved +gnu-iconv-configure: $(GNU_ICONV_OBJDIR)/.configured +gnu-iconv-compile: $(GNU_ICONV_OBJDIR)/.compiled +gnu-iconv-install: $(GNU_ICONV_OBJDIR)/.installed +gnu-iconv-reset: + $(foreach f, .retrieved .configured .compiled .installed, \ + rm -f $(GNU_ICONV_OBJDIR)/$(f);) + +gnu-iconv-clean: + -(cd $(GNU_ICONV_OBJDIR) && make clean) + rm -f $(GNU_ICONV_OBJDIR)/lib_encodings.def.diff + rm -f $(GNU_ICONV_OBJDIR)/lib_aliases.gperf.diff + +# fetch distfile for gnu-iconv +$(DISTDIR)/$(GNU_ICONV_DIST): + cd $(DISTDIR) && $(FETCH_CMD) $(GNU_ICONV_URL) + +$(GNU_ICONV_OBJDIR)/lib_encodings.def.diff: + mkdir -p $(dir $@) + echo > $@.tmp '--- lib/encodings.def.orig Wed Oct 24 23:41:41 2007' + echo >>$@.tmp '+++ lib/encodings.def Wed Oct 24 23:43:47 2007' + echo >>$@.tmp '@@ -37,6 +37,7 @@' + echo >>$@.tmp ' ' + echo >>$@.tmp ' ' + echo >>$@.tmp ' DEFENCODING(( "US-ASCII", /* IANA */' + echo >>$@.tmp '+ "646",' + echo >>$@.tmp ' "ASCII", /* IANA, JDK 1.1 */' + echo >>$@.tmp ' "ISO646-US", /* IANA */' + echo >>$@.tmp ' "ISO_646.IRV:1991", /* IANA */' + mv -f $@.tmp $@ + +$(GNU_ICONV_OBJDIR)/lib_aliases.gperf.diff: + mkdir -p $(dir $@) + echo > $@.tmp '--- lib/aliases.gperf.orig Wed Oct 24 23:41:32 2007' + echo >>$@.tmp '+++ lib/aliases.gperf Wed Oct 24 23:47:38 2007' + echo >>$@.tmp '@@ -10,6 +10,7 @@ struct alias { int name; unsigned int encoding_index; ' + echo >>$@.tmp ' %pic' + echo >>$@.tmp ' %%' + echo >>$@.tmp ' US-ASCII, ei_ascii' + echo >>$@.tmp '+646, ei_ascii' + echo >>$@.tmp ' ASCII, ei_ascii' + echo >>$@.tmp ' ISO646-US, ei_ascii' + echo >>$@.tmp ' ISO_646.IRV:1991, ei_ascii' + mv -f $@.tmp $@ + +# retrieve gnu-iconv +# Add 646 as an alias for ASCII to fix prop_test 22 on OpenBSD +$(GNU_ICONV_OBJDIR)/.retrieved: $(DISTDIR)/$(GNU_ICONV_DIST) \ + $(GNU_ICONV_OBJDIR)/lib_encodings.def.diff \ + $(GNU_ICONV_OBJDIR)/lib_aliases.gperf.diff + tar -C $(SRCDIR) -zxf $(DISTDIR)/$(GNU_ICONV_DIST) + cd $(SRCDIR)/libiconv-$(GNU_ICONV_VER) && \ + patch -p0 < $(GNU_ICONV_OBJDIR)/lib_encodings.def.diff && \ + patch -p0 < $(GNU_ICONV_OBJDIR)/lib_aliases.gperf.diff + touch $@ + +# configure gnu-iconv +$(GNU_ICONV_OBJDIR)/.configured: $(GNU_ICONV_OBJDIR)/.retrieved + cd $(SRCDIR)/libiconv-${GNU_ICONV_VER} && \ + ${MAKE} -f Makefile.devel lib/aliases.h + cd $(GNU_ICONV_OBJDIR) \ + && env CFLAGS="-g $(PROFILE_CFLAGS)" GREP="`which grep`"\ + $(GNU_ICONV_SRCDIR)/configure \ + --prefix=$(PREFIX)/iconv \ + --enable-extra-encodings + touch $@ + +# compile gnu-iconv +$(GNU_ICONV_OBJDIR)/.compiled: $(GNU_ICONV_OBJDIR)/.configured + (cd $(GNU_ICONV_OBJDIR) && make) + touch $@ + +# install gnu-iconv +$(GNU_ICONV_OBJDIR)/.installed: $(GNU_ICONV_OBJDIR)/.compiled + (cd $(GNU_ICONV_OBJDIR) && make install) + touch $@ + +####################################################################### +# iconv +####################################################################### + +.PHONY: iconv-install iconv-reset iconv-clean + +ifeq ($(USE_APR_ICONV),yes) +iconv-install: apr-iconv-install +iconv-reset: apr-iconv-reset +iconv-clean: apr-iconv-clean +else +iconv-install: gnu-iconv-install +iconv-reset: gnu-iconv-reset +iconv-clean: gnu-iconv-clean +endif + +####################################################################### +# apr-util +####################################################################### + +apr-util-retrieve: $(APR_UTIL_OBJDIR)/.retrieved +apr-util-configure: $(APR_UTIL_OBJDIR)/.configured +apr-util-compile: $(APR_UTIL_OBJDIR)/.compiled +apr-util-install: $(APR_UTIL_OBJDIR)/.installed +apr-util-reset: + $(foreach f, .retrieved .configured .compiled .installed, \ + rm -f $(APR_UTIL_OBJDIR)/$(f);) + +apr-util-clean: + -(cd $(APR_UTIL_OBJDIR) && make clean) + + +# retrieve apr-util if not present yet +$(APR_UTIL_OBJDIR)/.retrieved: + [ -d $(APR_UTIL_OBJDIR) ] || mkdir -p $(APR_UTIL_OBJDIR) + if [ ! -d $(APR_UTIL_SRCDIR) ]; then \ + svn export $(APR_UTIL_URL)/tags/$(APR_UTIL_VER)/ \ + $(APR_UTIL_SRCDIR); \ + fi + touch $@ + +ifeq ($(USE_APR_ICONV),yes) +ICONV_FLAG=--with-iconv=$(PREFIX)/apr +else +ICONV_FLAG=--with-iconv=$(PREFIX)/iconv +endif + +# configure apr-util +$(APR_UTIL_OBJDIR)/.configured: $(APR_UTIL_OBJDIR)/.retrieved + cd $(APR_UTIL_SRCDIR) && ./buildconf --with-apr=$(APR_SRCDIR) + cd $(APR_UTIL_OBJDIR) \ + && env LD_LIBRARY_PATH=$(PREFIX)/bdb/lib \ + CFLAGS="-O0 -g $(PROFILE_CFLAGS)" \ + GREP="`which grep`" \ + $(APR_UTIL_SRCDIR)/configure \ + --prefix=$(PREFIX)/apr \ + --enable-maintainer-mode \ + --with-apr=$(PREFIX)/apr \ + --with-berkeley-db=$(PREFIX)/bdb \ + $(ICONV_FLAG) + touch $@ + +# compile apr-util +$(APR_UTIL_OBJDIR)/.compiled: $(APR_UTIL_OBJDIR)/.configured + (cd $(APR_UTIL_OBJDIR) && make) + touch $@ + +# install apr-util +$(APR_UTIL_OBJDIR)/.installed: $(APR_UTIL_OBJDIR)/.compiled + (cd $(APR_UTIL_OBJDIR) && make install) + touch $@ + +####################################################################### +# httpd +####################################################################### + +HTTPD_CONF= $(PREFIX)/httpd/conf/httpd-$(SVN_REL_WC).conf +httpd-retrieve: $(HTTPD_OBJDIR)/.retrieved +httpd-configure: $(HTTPD_OBJDIR)/.configured +httpd-compile: $(HTTPD_OBJDIR)/.compiled +httpd-install: $(HTTPD_OBJDIR)/.installed $(HTTPD_CONF) +httpd-reset: + $(foreach f, .retrieved .configured .compiled .installed, \ + rm -f $(HTTPD_OBJDIR)/$(f);) + +httpd-clean: + -(cd $(HTTPD_OBJDIR) && make clean) + +# fetch distfile for httpd +$(DISTDIR)/$(HTTPD_DIST): + cd $(DISTDIR) && $(FETCH_CMD) $(HTTPD_URL) + +# retrieve httpd +$(HTTPD_OBJDIR)/.retrieved: $(DISTDIR)/$(HTTPD_DIST) + [ -d $(HTTPD_OBJDIR) ] || mkdir -p $(HTTPD_OBJDIR) + tar -C $(SRCDIR) -jxf $(DISTDIR)/$(HTTPD_DIST) + touch $@ + +# configure httpd +$(HTTPD_OBJDIR)/.configured: $(HTTPD_OBJDIR)/.retrieved + cd $(HTTPD_OBJDIR) \ + && env CFLAGS="-g $(PROFILE_CFLAGS)" GREP="`which grep`" \ + $(HTTPD_SRCDIR)/configure \ + --prefix=$(PREFIX)/httpd \ + --enable-maintainer-mode \ + --enable-ssl \ + --enable-dav \ + --with-mpm=prefork \ + --with-apr="$(PREFIX)/apr" \ + --with-apr-util="$(PREFIX)/apr" + touch $@ + +# compile httpd +$(HTTPD_OBJDIR)/.compiled: $(HTTPD_OBJDIR)/.configured + (cd $(HTTPD_OBJDIR) && make) + touch $@ + +# install httpd +$(HTTPD_OBJDIR)/.installed: $(HTTPD_OBJDIR)/.compiled + (cd $(HTTPD_OBJDIR) && make install) + touch $@ + +# create a httpd.conf for mod_dav_svn +$(HTTPD_CONF): + mkdir -p $(dir $@) + echo > $@.tmp '# httpd config for use with mod_dav_svn' + echo >>$@.tmp 'ServerRoot "$(PREFIX)/httpd"' + echo >>$@.tmp 'Listen localhost:8080' + echo >>$@.tmp 'LoadModule dav_svn_module modules/svn-$(WC)/mod_dav_svn.so' + echo >>$@.tmp 'LoadModule authz_svn_module modules/svn-$(WC)/mod_authz_svn.so' + echo >>$@.tmp 'DocumentRoot "$(PREFIX)/httpd/htdocs"' + echo >>$@.tmp '<Directory />' + echo >>$@.tmp ' Options FollowSymLinks' + echo >>$@.tmp ' AllowOverride None' + echo >>$@.tmp ' Order deny,allow' + echo >>$@.tmp ' Deny from all' + echo >>$@.tmp '</Directory>' + echo >>$@.tmp '<Location /repos>' + echo >>$@.tmp ' DAV svn' + echo >>$@.tmp ' SVNPath /tmp/svn-sandbox/repos' + echo >>$@.tmp ' Allow from localhost' + echo >>$@.tmp '</Location>' + mv -f $@.tmp $@ + +####################################################################### +# neon +####################################################################### + +neon-retrieve: $(NEON_OBJDIR)/.retrieved +neon-configure: $(NEON_OBJDIR)/.configured +neon-compile: $(NEON_OBJDIR)/.compiled +neon-install: $(NEON_OBJDIR)/.installed +neon-reset: + $(foreach f, .retrieved .configured .compiled .installed, \ + rm -f $(NEON_OBJDIR)/$(f);) + +neon-clean: + -(cd $(NEON_OBJDIR) && make clean) + +# fetch distfile for neon +$(DISTDIR)/$(NEON_DIST): + cd $(DISTDIR) && $(FETCH_CMD) $(NEON_URL) + +# retrieve neon +$(NEON_OBJDIR)/.retrieved: $(DISTDIR)/$(NEON_DIST) + [ -d $(NEON_OBJDIR) ] || mkdir -p $(NEON_OBJDIR) + tar -C $(SRCDIR) -zxf $(DISTDIR)/$(NEON_DIST) + touch $@ + +# OpenBSD does not have krb5-config in PATH, but the neon port has +# a suitable replacement. +ifeq ($(UNAME),OpenBSD) +KRB5_CONFIG_PATH=/usr/ports/net/neon/files +endif + +# configure neon +$(NEON_OBJDIR)/.configured: $(NEON_OBJDIR)/.retrieved + cd $(NEON_SRCDIR) && ./autogen.sh + if [ -n "$(KRB5_CONFIG_PATH)" ] && [ -d "$(KRB5_CONFIG_PATH)" ]; then \ + cp $(KRB5_CONFIG_PATH)/krb5-config $(NEON_OBJDIR); \ + chmod +x $(NEON_OBJDIR)/krb5-config; \ + fi + cd $(NEON_OBJDIR) \ + && env CFLAGS="-g $(PROFILE_CFLAGS)" GREP="`which grep`" \ + $(NEON_SRCDIR)/configure \ + PATH=$(NEON_OBJDIR):$$PATH \ + --prefix=$(PREFIX)/neon \ + --with-ssl \ + --enable-shared \ + --without-libproxy + touch $@ + +# compile neon +$(NEON_OBJDIR)/.compiled: $(NEON_OBJDIR)/.configured + (cd $(NEON_OBJDIR) && make) + touch $@ + +# install neon +$(NEON_OBJDIR)/.installed: $(NEON_OBJDIR)/.compiled + (cd $(NEON_OBJDIR) && make install) + touch $@ + + +####################################################################### +# serf +####################################################################### + +serf-retrieve: $(SERF_OBJDIR)/.retrieved +serf-configure: $(SERF_OBJDIR)/.configured +serf-compile: $(SERF_OBJDIR)/.compiled +serf-install: $(SERF_OBJDIR)/.installed +serf-reset: + $(foreach f, .retrieved .configured .compiled .installed, \ + rm -f $(SERF_OBJDIR)/$(f);) + +serf-clean: + -(cd $(SERF_SRCDIR) && ./serfmake clean) + + +# fetch distfile for serf +#$(DISTDIR)/$(SERF_DIST): +# cd $(DISTDIR) && $(FETCH_CMD) $(SERF_URL) +# +# retrieve serf +#$(SERF_OBJDIR)/.retrieved: $(DISTDIR)/$(SERF_DIST) +# [ -d $(SERF_OBJDIR) ] || mkdir -p $(SERF_OBJDIR) +# tar -C $(SRCDIR) -zxf $(DISTDIR)/$(SERF_DIST) +# cd $(SRCDIR)/serf-$(SERF_VER) && \ +# svn diff -c1394 http://serf.googlecode.com/svn | patch -p2 +# touch $@ + +# retrieve serf if not present yet +$(SERF_OBJDIR)/.retrieved: + [ -d $(SERF_OBJDIR) ] || mkdir -p $(SERF_OBJDIR) + if [ ! -d $(SERF_SRCDIR) ]; then \ + svn export $(SERF_URL) $(SERF_SRCDIR); \ + fi + touch $@ + +# compile serf (serf won't compile outside its source tree) +$(SERF_OBJDIR)/.compiled: $(SERF_OBJDIR)/.retrieved + cd $(SERF_SRCDIR) && \ + env CFLAGS="-O0 -g $(PROFILE_CFLAGS)" \ + ./serfmake --with-apr=$(PREFIX)/apr \ + --prefix=$(PREFIX)/serf \ + build + touch $@ + +# install serf +$(SERF_OBJDIR)/.installed: $(SERF_OBJDIR)/.compiled + cd $(SERF_SRCDIR) && \ + ./serfmake --with-apr=$(PREFIX)/apr \ + --with-apr-util=$(PREFIX)/apr \ + --prefix=$(PREFIX)/serf \ + install + touch $@ + +####################################################################### +# serf-old (compatible with Subversion 1.5) +####################################################################### + +serf-old-retrieve: $(SERF_OLD_OBJDIR)/.retrieved +serf-old-configure: $(SERF_OLD_OBJDIR)/.configured +serf-old-compile: $(SERF_OLD_OBJDIR)/.compiled +serf-old-install: $(SERF_OLD_OBJDIR)/.installed +serf-old-reset: + $(foreach f, .retrieved .configured .compiled .installed, \ + rm -f $(SERF_OLD_OBJDIR)/$(f);) + +serf-old-clean: + -(cd $(SERF_OLD_SRCDIR) && ./serfmake clean) + +# retrieve serf if not present yet +$(SERF_OLD_OBJDIR)/.retrieved: + [ -d $(SERF_OLD_OBJDIR) ] || mkdir -p $(SERF_OLD_OBJDIR) + if [ ! -d $(SERF_OLD_SRCDIR) ]; then \ + svn export $(SERF_OLD_URL) $(SERF_OLD_SRCDIR); \ + fi + touch $@ + +# compile serf (serf won't compile outside its source tree) +$(SERF_OLD_OBJDIR)/.compiled: $(SERF_OLD_OBJDIR)/.retrieved + cd $(SERF_OLD_SRCDIR) && \ + env CFLAGS="-O0 -g $(PROFILE_CFLAGS)" \ + ./serfmake --with-apr=$(PREFIX)/apr \ + --prefix=$(PREFIX)/serf-old \ + build + touch $@ + +# install serf +$(SERF_OLD_OBJDIR)/.installed: $(SERF_OLD_OBJDIR)/.compiled + cd $(SERF_OLD_SRCDIR) && \ + ./serfmake --with-apr=$(PREFIX)/apr \ + --with-apr-util=$(PREFIX)/apr \ + --prefix=$(PREFIX)/serf-old \ + install + touch $@ + + +####################################################################### +# sqlite +####################################################################### + +sqlite-retrieve: $(SQLITE_OBJDIR)/.retrieved +sqlite-configure: $(SQLITE_OBJDIR)/.configured +sqlite-compile: $(SQLITE_OBJDIR)/.compiled +sqlite-install: $(SQLITE_OBJDIR)/.installed +sqlite-reset: + $(foreach f, .retrieved .configured .compiled .installed, \ + rm -f $(SQLITE_OBJDIR)/$(f);) + +sqlite-clean: + -cd $(SQLITE_OBJDIR) && make clean + +# fetch distfile for sqlite +$(DISTDIR)/$(SQLITE_DIST): + cd $(DISTDIR) && $(FETCH_CMD) $(SQLITE_URL) + +# retrieve sqlite +$(SQLITE_OBJDIR)/.retrieved: $(DISTDIR)/$(SQLITE_DIST) + [ -d $(SQLITE_OBJDIR) ] || mkdir -p $(SQLITE_OBJDIR) + tar -C $(SRCDIR) -zxf $(DISTDIR)/$(SQLITE_DIST) + touch $@ + +ifdef THREADING +THREADSAFE_FLAG=--enable-threadsafe +else +THREADSAFE_FLAG=--disable-threadsafe +endif + +# configure sqlite +$(SQLITE_OBJDIR)/.configured: $(SQLITE_OBJDIR)/.retrieved + cd $(SQLITE_OBJDIR) \ + && env CFLAGS="-g $(PROFILE_CFLAGS)" GREP="`which grep`" \ + $(SQLITE_SRCDIR)/configure \ + --prefix=$(PREFIX)/sqlite \ + $(THREADSAFE_FLAG) + touch $@ + +# compile sqlite +$(SQLITE_OBJDIR)/.compiled: $(SQLITE_OBJDIR)/.configured + (cd $(SQLITE_OBJDIR) && make) + touch $@ + +# install sqlite +$(SQLITE_OBJDIR)/.installed: $(SQLITE_OBJDIR)/.compiled + (cd $(SQLITE_OBJDIR) && make install) + touch $@ + +####################################################################### +# cyrus-sasl +####################################################################### + +cyrus-sasl-retrieve: $(CYRUS_SASL_OBJDIR)/.retrieved +cyrus-sasl-configure: $(CYRUS_SASL_OBJDIR)/.configured +cyrus-sasl-compile: $(CYRUS_SASL_OBJDIR)/.compiled +cyrus-sasl-install: $(CYRUS_SASL_OBJDIR)/.installed +cyrus-sasl-reset: + $(foreach f, .retrieved .configured .compiled .installed, \ + rm -f $(CYRUS_SASL_OBJDIR)/$(f);) + +cyrus-sasl-clean: + -(cd $(CYRUS_SASL_OBJDIR) && make distclean) + +# fetch distfile for cyrus-sasl +$(DISTDIR)/$(CYRUS_SASL_DIST): + cd $(DISTDIR) && $(FETCH_CMD) $(CYRUS_SASL_URL) + +# retrieve cyrus-sasl +$(CYRUS_SASL_OBJDIR)/.retrieved: $(DISTDIR)/$(CYRUS_SASL_DIST) + [ -d $(CYRUS_SASL_OBJDIR) ] || mkdir -p $(CYRUS_SASL_OBJDIR) + tar -C $(SRCDIR) -zxf $(DISTDIR)/$(CYRUS_SASL_DIST) + # fixes build on Debian: + sed 's/#elif WITH_DES/#elif defined(WITH_DES)/' \ + < $(CYRUS_SASL_SRCDIR)/plugins/digestmd5.c \ + > $(CYRUS_SASL_SRCDIR)/plugins/digestmd5.c.patched + mv $(CYRUS_SASL_SRCDIR)/plugins/digestmd5.c.patched \ + $(CYRUS_SASL_SRCDIR)/plugins/digestmd5.c +ifeq ($(UNAME),OpenBSD) + # Fixes GSSAPI support on OpenBSD, which hasn't got libroken: + for f in `grep -l -R -- -lroken $(CYRUS_SASL_SRCDIR)`; do \ + sed -e 's/-lroken//g' < $$f > $$f.tmp && \ + mv $$f.tmp $$f; \ + done + chmod +x $(CYRUS_SASL_SRCDIR)/configure +endif + touch $@ + +# configure cyrus-sasl +$(CYRUS_SASL_OBJDIR)/.configured: $(CYRUS_SASL_OBJDIR)/.retrieved + cd $(CYRUS_SASL_OBJDIR) \ + && env CFLAGS="-g $(PROFILE_CFLAGS)" \ + CPPFLAGS="-I/usr/include/kerberosV" \ + GREP="`which grep`" \ + $(CYRUS_SASL_SRCDIR)/configure \ + --with-dbpath=$(PREFIX)/cyrus-sasl/etc/sasldb2 \ + --with-plugindir=$(PREFIX)/cyrus-sasl/lib/sasl2 \ + --with-configdir=$(PREFIX)/cyrus-sasl/lib/sasl2 \ + --with-bdb-libdir=$(PREFIX)/bdb/lib \ + --with-bdb-incdir=$(PREFIX)/bdb/include \ + --with-dblib=berkeley \ + --with-sqlite=$(PREFIX)/sqlite \ + --prefix=$(PREFIX)/cyrus-sasl + touch $@ + +# compile cyrus-sasl +$(CYRUS_SASL_OBJDIR)/.compiled: $(CYRUS_SASL_OBJDIR)/.configured + (cd $(CYRUS_SASL_OBJDIR) && make) + touch $@ + +# install cyrus-sasl +$(CYRUS_SASL_OBJDIR)/.installed: $(CYRUS_SASL_OBJDIR)/.compiled + (cd $(CYRUS_SASL_OBJDIR) && make install) + touch $@ + +####################################################################### +# libmagic +####################################################################### + +libmagic-retrieve: $(LIBMAGIC_OBJDIR)/.retrieved +libmagic-configure: $(LIBMAGIC_OBJDIR)/.configured +libmagic-compile: $(LIBMAGIC_OBJDIR)/.compiled +libmagic-install: $(LIBMAGIC_OBJDIR)/.installed +libmagic-reset: + $(foreach f, .retrieved .configured .compiled .installed, \ + rm -f $(LIBMAGIC_OBJDIR)/$(f);) + +libmagic-clean: + -(cd $(LIBMAGIC_OBJDIR) && make distclean) + +# fetch distfile for libmagic +$(DISTDIR)/$(LIBMAGIC_DIST): + cd $(DISTDIR) && $(FETCH_CMD) $(LIBMAGIC_URL) + +# retrieve libmagic +$(LIBMAGIC_OBJDIR)/.retrieved: $(DISTDIR)/$(LIBMAGIC_DIST) + [ -d $(LIBMAGIC_OBJDIR) ] || mkdir -p $(LIBMAGIC_OBJDIR) + tar -C $(SRCDIR) -zxf $(DISTDIR)/$(LIBMAGIC_DIST) + touch $@ + +# configure libmagic +$(LIBMAGIC_OBJDIR)/.configured: $(LIBMAGIC_OBJDIR)/.retrieved + cd $(LIBMAGIC_OBJDIR) \ + && env CFLAGS="-g $(PROFILE_CFLAGS)" GREP="`which grep`"\ + $(LIBMAGIC_SRCDIR)/configure \ + --enable-fsect-man5 \ + --prefix=$(PREFIX)/libmagic + touch $@ + +# compile libmagic +$(LIBMAGIC_OBJDIR)/.compiled: $(LIBMAGIC_OBJDIR)/.configured + (cd $(LIBMAGIC_OBJDIR) && make) + touch $@ + +# install libmagic +$(LIBMAGIC_OBJDIR)/.installed: $(LIBMAGIC_OBJDIR)/.compiled + (cd $(LIBMAGIC_OBJDIR) && make install) + touch $@ + +####################################################################### +# ruby +####################################################################### + +ruby-retrieve: $(RUBY_OBJDIR)/.retrieved +ruby-configure: $(RUBY_OBJDIR)/.configured +ruby-compile: $(RUBY_OBJDIR)/.compiled +ruby-install: $(RUBY_OBJDIR)/.installed +ruby-reset: + $(foreach f, .retrieved .configured .compiled .installed, \ + rm -f $(RUBY_OBJDIR)/$(f);) + +ruby-clean: + -(cd $(RUBY_OBJDIR) && make distclean) + +# fetch distfile for ruby +$(DISTDIR)/$(RUBY_DIST): + cd $(DISTDIR) && $(FETCH_CMD) $(RUBY_URL) + +# retrieve ruby +# +$(RUBY_OBJDIR)/.retrieved: $(DISTDIR)/$(RUBY_DIST) + [ -d $(RUBY_OBJDIR) ] || mkdir -p $(RUBY_OBJDIR) + tar -C $(SRCDIR) -zxf $(DISTDIR)/$(RUBY_DIST) + touch $@ + +# configure ruby +$(RUBY_OBJDIR)/.configured: $(RUBY_OBJDIR)/.retrieved + cd $(RUBY_OBJDIR) \ + && env CFLAGS="-g $(PROFILE_CFLAGS)" GREP="`which grep`"\ + $(RUBY_SRCDIR)/configure \ + --prefix=$(PREFIX)/ruby \ + --enable-shared \ + --disable-pthread + touch $@ + +# compile ruby +$(RUBY_OBJDIR)/.compiled: $(RUBY_OBJDIR)/.configured + (cd $(RUBY_OBJDIR) && make) + touch $@ + +# install ruby and the test-unit gem needed to run SVN bindings tests +$(RUBY_OBJDIR)/.installed: $(RUBY_OBJDIR)/.compiled + (cd $(RUBY_OBJDIR) && make install) + touch $@ + +####################################################################### +# svn +####################################################################### + +.PHONY: svn-configure svn-compile svn-install svn-bindings-compile \ + svn-bindings-install svn-bindings-reset svn-clean + +svn-install-all: svn-install svn-bindings-install + +svn-configure: $(SVN_OBJDIR)/.configured +svn-compile: $(SVN_OBJDIR)/.compiled +svn-bindings-compile: $(SVN_OBJDIR)/.bindings-compiled +svn-install: $(SVN_OBJDIR)/.installed +svn-bindings-install: $(SVN_OBJDIR)/.bindings-installed +svn-bindings-reset: + $(foreach f, .bindings-compiled .bindings-installed, \ + rm -f $(SVN_OBJDIR)/$(f);) +svn-reset: svn-bindings-reset + $(foreach f, .retrieved .configured .compiled .installed \ + .bindings-compiled .bindings-installed, \ + rm -f $(SVN_OBJDIR)/$(f);) + +svn-clean: + -(cd $(svn_builddir) && make distclean) + +# retrieve svn if not present yet +$(SVN_OBJDIR)/.retrieved: + [ -d $(SVN_OBJDIR) ] || mkdir -p $(SVN_OBJDIR) + if [ "$(TAG)" != "none" ]; then \ + branchdir="tags/$(TAG)"; \ + co="export"; \ + elif [ $(BRANCH) != trunk ]; then \ + branchdir="branches/$(BRANCH)"; \ + co="co"; \ + else \ + branchdir="$(BRANCH)"; \ + co="co"; \ + fi; \ + if [ ! -d $(SVN_WC) ] && [ ! -h $(SVN_WC) ]; then \ + svn $${co} $(SUBVERSION_REPOS_URL)/$${branchdir} \ + $(SVN_WC); \ + fi + touch $@ + +ifeq ($(BRANCH_MAJOR),1.6) +BDB_FLAG=db.h:$(PREFIX)/bdb/include:$(PREFIX)/bdb/lib:db-$(BDB_MAJOR_VER) +SERF_FLAG=--with-serf="$(PREFIX)/serf" +MOD_DAV_SVN=modules/svn-$(WC)/mod_dav_svn.so +MOD_AUTHZ_SVN=modules/svn-$(WC)/mod_authz_svn.so +W_NO_SYSTEM_HEADERS=-Wno-system-headers +else ifeq ($(BRANCH_MAJOR),1.5) +BDB_FLAG=$(PREFIX)/bdb +SERF_FLAG=--with-serf="$(PREFIX)/serf-old" +MOD_DAV_SVN=modules/mod_dav_svn.so +MOD_AUTHZ_SVN=modules/mod_authz_svn.so +DISABLE_NEON_VERSION_CHECK=--disable-neon-version-check +W_NO_SYSTEM_HEADERS=-Wno-system-headers +else ifeq ($(BRANCH_MAJOR),1.4) +BDB_FLAG=$(PREFIX)/bdb +MOD_DAV_SVN=modules/mod_dav_svn.so +MOD_AUTHZ_SVN=modules/mod_authz_svn.so +DISABLE_NEON_VERSION_CHECK=--disable-neon-version-check +W_NO_SYSTEM_HEADERS=-Wno-system-headers +else ifeq ($(BRANCH_MAJOR),1.3) +BDB_FLAG=$(PREFIX)/bdb +MOD_DAV_SVN=modules/mod_dav_svn.so +MOD_AUTHZ_SVN=modules/mod_authz_svn.so +DISABLE_NEON_VERSION_CHECK=--disable-neon-version-check +W_NO_SYSTEM_HEADERS=-Wno-system-headers +else ifeq ($(BRANCH_MAJOR),1.2) +BDB_FLAG=$(PREFIX)/bdb +MOD_DAV_SVN=modules/mod_dav_svn.so +MOD_AUTHZ_SVN=modules/mod_authz_svn.so +DISABLE_NEON_VERSION_CHECK=--disable-neon-version-check +W_NO_SYSTEM_HEADERS=-Wno-system-headers +else ifeq ($(BRANCH_MAJOR),1.1) +BDB_FLAG=$(PREFIX)/bdb +MOD_DAV_SVN=modules/mod_dav_svn.so +MOD_AUTHZ_SVN=modules/mod_authz_svn.so +DISABLE_NEON_VERSION_CHECK=--disable-neon-version-check +W_NO_SYSTEM_HEADERS=-Wno-system-headers +else ifeq ($(BRANCH_MAJOR),1.0) +BDB_FLAG=$(PREFIX)/bdb +MOD_DAV_SVN=modules/mod_dav_svn.so +MOD_AUTHZ_SVN=modules/mod_authz_svn.so +DISABLE_NEON_VERSION_CHECK=--disable-neon-version-check +W_NO_SYSTEM_HEADERS=-Wno-system-headers +else +BDB_FLAG=db.h:$(PREFIX)/bdb/include:$(PREFIX)/bdb/lib:db-$(BDB_MAJOR_VER) +SERF_FLAG=--with-serf="$(PREFIX)/serf" +MOD_DAV_SVN=modules/svn-$(WC)/mod_dav_svn.so +MOD_AUTHZ_SVN=modules/svn-$(WC)/mod_authz_svn.so +LIBMAGIC_FLAG=--with-libmagic=$(PREFIX)/libmagic +endif + +ifeq ($(ENABLE_JAVA_BINDINGS),yes) + JAVAHL_FLAG=--enable-javahl=yes --with-jdk --with-jikes=no \ + --with-junit=$(PWD)/junit.jar +else + JAVAHL_FLAG=--enable-javahl=no +endif + +ifdef PROFILE +SVN_ALL_STATIC=--enable-all-static +else +SVN_WITH_HTTPD=--with-apxs="$(PREFIX)/httpd/bin/apxs" \ + --with-apache-libexecdir="$(PREFIX)/httpd/modules/svn-$(WC)" +SVN_WITH_SASL=--with-sasl="$(PREFIX)/cyrus-sasl" +endif + +# configure svn +$(SVN_OBJDIR)/.configured: $(SVN_OBJDIR)/.retrieved + @if [ $(ENABLE_JAVA_BINDINGS) = yes ]; then \ + if [ ! -e $(PWD)/junit.jar ]; then \ + echo "Please provide $(PWD)/junit.jar"; \ + exit 1; \ + fi; \ + fi + cd $(SVN_SRCDIR) && ./autogen.sh + cd $(svn_builddir) && \ + env LDFLAGS="-L$(PREFIX)/neon/lib -L$(PREFIX)/apr/lib" \ + LD_LIBRARY_PATH="$(PREFIX)/bdb/lib:$(PREFIX)/iconv/lib:$$LD_LIBRARY_PATH" \ + GREP="`which grep`" \ + PATH=$(PREFIX)/ruby/bin:$$PATH \ + $(SVN_SRCDIR)/configure \ + --enable-maintainer-mode \ + --with-ssl \ + --prefix="$(SVN_PREFIX)" \ + --with-apr="$(PREFIX)/apr" \ + --with-apr-util="$(PREFIX)/apr" \ + --with-neon="$(PREFIX)/neon" \ + $(SVN_WITH_HTTPD) \ + $(SVN_WITH_SASL) \ + $(SERF_FLAG) \ + --with-sqlite="$(PREFIX)/sqlite" \ + --with-berkeley-db="$(BDB_FLAG)" \ + --with-ruby-sitedir="$(SVN_PREFIX)/lib/ruby/site_ruby" \ + --disable-mod-activation \ + $(JAVAHL_FLAG) \ + $(LIBMAGIC_FLAG) \ + $(SVN_ALL_STATIC) \ + $(DISABLE_NEON_VERSION_CHECK) + touch $@ + +# compile svn +$(SVN_OBJDIR)/.compiled: $(SVN_OBJDIR)/.configured + cd $(svn_builddir) \ + && make EXTRA_CFLAGS="$(PROFILE_CFLAGS) $(W_NO_SYSTEM_HEADERS)" + touch $@ + +# install svn +$(SVN_OBJDIR)/.installed: $(SVN_OBJDIR)/.compiled + cd $(svn_builddir) \ + && make install + touch $@ + +$(SVN_OBJDIR)/.bindings-compiled: $(SVN_OBJDIR)/.installed + if [ $(ENABLE_PYTHON_BINDINGS) = yes ]; then \ + cd $(svn_builddir) \ + && make swig-py; \ + fi + cd $(svn_builddir) && \ + PATH=$(PREFIX)/ruby/bin:$$PATH make swig-rb + if [ $(ENABLE_PERL_BINDINGS) = yes ]; then \ + cd $(svn_builddir) \ + && make swig-pl; \ + fi + if [ $(ENABLE_JAVA_BINDINGS) = yes ]; then \ + cd $(svn_builddir) \ + && make javahl; \ + fi + touch $@ + +$(SVN_OBJDIR)/.bindings-installed: $(SVN_OBJDIR)/.bindings-compiled + if [ $(ENABLE_PYTHON_BINDINGS) = yes ]; then \ + cd $(svn_builddir) \ + && make install-swig-py; \ + fi + cd $(svn_builddir) && \ + PATH=$(PREFIX)/ruby/bin:$$PATH make install-swig-rb + if [ $(ENABLE_PERL_BINDINGS) = yes ]; then \ + cd $(svn_builddir) \ + && make install-swig-pl-lib; \ + cd subversion/bindings/swig/perl/native \ + && perl Makefile.PL PREFIX="$(SVN_PREFIX)" \ + && make install; \ + fi + if [ $(ENABLE_JAVA_BINDINGS) = yes ]; then \ + cd $(svn_builddir) \ + && make install-javahl; \ + fi + touch $@ + +# run svn regression tests +HTTPD_CHECK_CONF=$(PREFIX)/httpd/conf/httpd-svn-check-$(WC).conf +HTTPD_CHECK_USERS=$(PREFIX)/httpd/conf/httpd-svn-check-users +HTTPD_CHECK_PORT=8081 + +$(HTTPD_CHECK_USERS): + mkdir -p $(dir $@) + echo > $@.tmp 'jrandom:xCGl35kV9oWCY' + echo >>$@.tmp 'jconstant:xCGl35kV9oWCY' + mv -f $@.tmp $@ + +$(HTTPD_CHECK_CONF): $(HTTPD_CHECK_USERS) + echo > $@.tmp '# httpd config for make check' + echo >>$@.tmp 'ServerRoot "$(PREFIX)/httpd"' + echo >>$@.tmp 'Listen localhost:$(HTTPD_CHECK_PORT)' + echo >>$@.tmp 'LoadModule dav_svn_module $(MOD_DAV_SVN)' + echo >>$@.tmp 'LoadModule authz_svn_module $(MOD_AUTHZ_SVN)' + echo >>$@.tmp 'DocumentRoot "$(PREFIX)/httpd/htdocs"' + echo >>$@.tmp '# These two Locations are used for "make check"' + echo >>$@.tmp '<Directory />' + echo >>$@.tmp ' Options FollowSymLinks' + echo >>$@.tmp ' AllowOverride None' + echo >>$@.tmp ' Order deny,allow' + echo >>$@.tmp ' Allow from all' + echo >>$@.tmp '</Directory>' + echo >>$@.tmp '<Location /svn-test-work/repositories>' + echo >>$@.tmp ' DAV svn' + echo >>$@.tmp ' SVNParentPath $(SVN_WC)/subversion/tests/cmdline/svn-test-work/repositories' + echo >>$@.tmp ' AuthzSVNAccessFile $(SVN_WC)/subversion/tests/cmdline/svn-test-work/authz' + echo >>$@.tmp ' AuthType Basic' + echo >>$@.tmp ' AuthName "Subversion Repository"' + echo >>$@.tmp ' AuthUserFile $(HTTPD_CHECK_USERS)' + echo >>$@.tmp ' Require valid-user' +ifeq ($(USE_HTTPV1),yes) + echo >>$@.tmp ' SVNAdvertiseV2Protocol off' +endif +ifeq ($(USE_AUTHZ_SHORT_CIRCUIT),yes) + echo >>$@.tmp ' SVNPathAuthz short_circuit' +endif + echo >>$@.tmp '</Location>' + echo >>$@.tmp '<Location /svn-test-work/local_tmp/repos>' + echo >>$@.tmp ' DAV svn' + echo >>$@.tmp ' SVNPath $(SVN_WC)/subversion/tests/cmdline/svn-test-work/local_tmp/repos' + echo >>$@.tmp ' AuthzSVNAccessFile $(SVN_WC)/subversion/tests/cmdline/svn-test-work/authz' + echo >>$@.tmp ' AuthType Basic' + echo >>$@.tmp ' AuthName "Subversion Repository"' + echo >>$@.tmp ' AuthUserFile $(HTTPD_CHECK_USERS)' + echo >>$@.tmp ' Require valid-user' +ifeq ($(USE_HTTPV1),yes) + echo >>$@.tmp ' SVNAdvertiseV2Protocol off' +endif +ifeq ($(USE_AUTHZ_SHORT_CIRCUIT),yes) + echo >>$@.tmp ' SVNPathAuthz short_circuit' +endif + echo >>$@.tmp '</Location>' + echo >>$@.tmp '# This Location lets you access repositories dropped in /tmp/' + echo >>$@.tmp '<Location /svn>' + echo >>$@.tmp ' DAV svn' + echo >>$@.tmp ' SVNParentPath /tmp' + echo >>$@.tmp ' Allow from all' +ifeq ($(USE_HTTPV1),yes) + echo >>$@.tmp ' SVNAdvertiseV2Protocol off' +endif +ifeq ($(USE_AUTHZ_SHORT_CIRCUIT),yes) + echo >>$@.tmp ' SVNPathAuthz short_circuit' +endif + echo >>$@.tmp '</Location>' + echo >>$@.tmp 'RedirectMatch permanent ^/svn-test-work/repositories/REDIRECT-PERM-(.*)$$ /svn-test-work/repositories/$$1' + echo >>$@.tmp 'RedirectMatch ^/svn-test-work/repositories/REDIRECT-TEMP-(.*)$$ /svn-test-work/repositories/$$1' + mv -f $@.tmp $@ + +# We need this to make sure some targets below pick up the right libraries +LD_LIBRARY_PATH=$(PREFIX)/apr/lib:$(PREFIX)/iconv/lib:$(PREFIX)/bdb/lib:$(PREFIX)/neon/lib:$(PREFIX)/serf/lib:$(PREFIX)/sqlite/lib:$(PREFIX)/cyrus-sasl/lib:$(PREFIX)/iconv/lib:$(PREFIX)/libmagic/lib:$(PREFIX)/ruby/lib:$(PREFIX)/svn-$(WC)/lib + +.PHONY: libpath +libpath: + @echo export LD_LIBRARY_PATH=$(LD_LIBRARY_PATH):$$LD_LIBRARY_PATH + +.PHONY: start-svnserve stop-svnserve start-httpd stop-httpd + +HTTPD_CMD = env LD_LIBRARY_PATH=$(LD_LIBRARY_PATH) \ + $(PREFIX)/httpd/bin/apachectl -f $(HTTPD_CHECK_CONF) +HTTPD_START_CMD = $(HTTPD_CMD) -k start +HTTPD_START_CMD_DEBUG = $(HTTPD_START_CMD) -X +HTTPD_STOP_CMD = $(HTTPD_CMD) -k stop; sleep 3 + +SVNSERVE_START_CMD = (ls $(PWD)/svnserve-*.pid | while read pidfile; do \ + kill `cat "$$pidfile"`; sleep 3; \ + rm -f $$pidfile; \ + done); \ + $(SVN_PREFIX)/bin/svnserve \ + --listen-host 127.0.0.1 \ + --pid-file $(PWD)/svnserve-$(WC).pid \ + -d -r $(svn_builddir)/subversion/tests/cmdline +SVNSERVE_STOP_CMD = kill `cat $(PWD)/svnserve-$(WC).pid`; sleep 3; \ + rm -f $(PWD)/svnserve-$(WC).pid + +start-httpd: $(HTTPD_CHECK_CONF) + $(HTTPD_START_CMD) + @echo "To run tests over http, run:" + @echo " make check BASE_URL=http://localhost:$(HTTPD_CHECK_PORT)" + @echo "The URL http://localhost:$(HTTPD_CHECK_PORT)/svn/" + @echo "lets you access repositories dropped into /tmp" + +start-httpd-debug: $(HTTPD_CHECK_CONF) + $(HTTPD_START_CMD_DEBUG) & + @echo "To run tests over http, run:" + @echo " make check BASE_URL=http://localhost:$(HTTPD_CHECK_PORT)" + @echo "The URL http://localhost:$(HTTPD_CHECK_PORT)/svn/" + @echo "lets you access repositories dropped into /tmp" + @echo "Trying to attach gdb to httpd..." + @sleep 1 + gdb $(PREFIX)/httpd/bin/httpd `cat $(PREFIX)/httpd/logs/httpd.pid` + +stop-httpd: + $(HTTPD_STOP_CMD) + +start-svnserve: $(SVN_OBJDIR)/.compiled + $(SVNSERVE_START_CMD) + +stop-svnserve: + $(SVNSERVE_STOP_CMD) + +define do_check +-cd $(svn_builddir) && for fs in fsfs bdb; do \ + echo "Begin test: $(subst svn-check-,,$@) x $$fs"; \ + test -d "$(RAMDISK)/tmp" && export TMPDIR="$(RAMDISK)/tmp"; \ + make check PARALLEL=$(PARALLEL) CLEANUP=$(CLEANUP) $1 FS_TYPE=$$fs; \ + for log in tests.log fails.log; do \ + test -f $$log && mv -f $$log $$log.$@-$$fs; \ + done; \ +done +endef + +TEST_WORK=$(svn_builddir)/subversion/tests/cmdline/svn-test-work +svn-check-prepare-ramdisk: + -rm -rf "$(TEST_WORK)"; \ + if [ -d "$(RAMDISK)" ] && \ + touch "$(RAMDISK)/$(SVN_REL_WC).writetest" && \ + mkdir -p "$(RAMDISK)/$(SVN_REL_WC)"; then \ + rm -f "$(RAMDISK)/$(SVN_REL_WC).writetest"; \ + ln -s "$(RAMDISK)/$(SVN_REL_WC)" "$(TEST_WORK)"; \ + mkdir -p "$(RAMDISK)/tmp"; \ + fi + +svn-check-neon: $(HTTPD_CHECK_CONF) $(SVN_OBJDIR)/.compiled $(SVN_OBJDIR)/.bindings-compiled svn-check-prepare-ramdisk + $(HTTPD_START_CMD) + $(call do_check,BASE_URL=http://localhost:$(HTTPD_CHECK_PORT) HTTP_LIBRARY=neon) + $(HTTPD_STOP_CMD) + +svn-check-serf: $(HTTPD_CHECK_CONF) $(SVN_OBJDIR)/.compiled $(SVN_OBJDIR)/.bindings-compiled svn-check-prepare-ramdisk + $(HTTPD_START_CMD) + $(call do_check,BASE_URL=http://localhost:$(HTTPD_CHECK_PORT) HTTP_LIBRARY=serf) + $(HTTPD_STOP_CMD) + +svn-check-local: svn-check-prepare-ramdisk + $(call do_check) + +svn-check-svn: svn-check-prepare-ramdisk + $(SVNSERVE_START_CMD) + $(call do_check,BASE_URL=svn://127.0.0.1) + $(SVNSERVE_STOP_CMD) + +.PHONY: svn-check-swig-pl svn-check-swig-py svn-check-swig-rb svn-check-javahl +svn-check-bindings: svn-check-swig-pl svn-check-swig-py svn-check-swig-rb \ + svn-check-javahl + +RUBYLIB=$(SVN_PREFIX)/lib/ruby/site_ruby$(shell grep \ + ^svn_cv_ruby_sitedir_archsuffix $(svn_builddir)/config.log | \ + cut -d'=' -f2):$(SVN_PREFIX)/lib/ruby/site_ruby$(shell \ + grep ^svn_cv_ruby_sitedir_libsuffix $(svn_builddir)/config.log | \ + cut -d'=' -f2) +svn-check-swig-pl: + -if [ $(ENABLE_PERL_BINDINGS) = yes ]; then \ + (cd $(svn_builddir) && \ + env LD_LIBRARY_PATH=$(LD_LIBRARY_PATH) \ + make check-swig-pl 2>&1) | \ + tee $(svn_builddir)/tests.log.bindings.pl; \ + fi + +svn-check-swig-py: + -if [ $(ENABLE_PYTHON_BINDINGS) = yes ]; then \ + (cd $(svn_builddir) && \ + env LD_LIBRARY_PATH=$(LD_LIBRARY_PATH) \ + make check-swig-py 2>&1) | \ + tee $(svn_builddir)/tests.log.bindings.py; \ + fi + +# We add the svn prefix to PATH here because the ruby tests +# attempt to start an svnserve binary found in PATH. +svn-check-swig-rb: + (cd $(svn_builddir)/subversion/bindings/swig/ruby/test && \ + env RUBYLIB=$(RUBYLIB) \ + LD_LIBRARY_PATH=$(LD_LIBRARY_PATH) \ + PATH=$(SVN_PREFIX)/bin:$$PATH \ + $(PREFIX)/ruby/bin/ruby run-test.rb \ + --verbose=verbose 2>&1) | \ + tee $(svn_builddir)/tests.log.bindings.rb + +svn-check-javahl: + -if [ $(ENABLE_JAVA_BINDINGS) = yes ]; then \ + (cd $(svn_builddir) && \ + env LD_LIBRARY_PATH=$(LD_LIBRARY_PATH) \ + make check-javahl 2>&1) | \ + tee $(svn_builddir)/tests.log.bindings.javahl; \ + fi + +svn-check: svn-check-prepare-ramdisk svn-check-local svn-check-svn \ + svn-check-neon svn-check-serf svn-check-bindings + +.PHONY: sign-email +sign-email: + @echo "Summary: +1 to release" + @echo "" + @echo "Tested: [bdb | fsfs] x [ra_local | ra_svn | ra_neon | ra_serf]" + @echo " swig bindings" +ifeq ($(ENABLE_JAVA_BINDINGS),yes) + @echo " javahl bindings" +endif + @echo "" + @echo "Test results: All passed." + @echo "" + @echo "Platform: `uname -r -s -m`" + @echo "" + @echo "Dependencies:" + @echo "bdb: $(BDB_VER)" +ifeq ($(USE_APR_ICONV),yes) + @echo "apr-iconv: $(APR_ICONV_VER)" +else + @echo "GNU-iconv: $(GNU_ICONV_VER)" +endif + @echo "apr: $(APR_VER)" + @echo "apr-util: $(APR_UTIL_VER)" + @echo "httpd: $(HTTPD_VER)" + @echo "neon: $(NEON_VER)" + @echo "serf: $(SERF_VER)" + @echo "cyrus-sasl: $(CYRUS_SASL_VER)" + @echo "sqlite: $(SQLITE_VER)" + @echo "openssl: `openssl version | cut -d' ' -f2`" + @echo "swig: `swig -version | grep Version | cut -d' ' -f3`" + @echo "python: `python --version 2>&1 | cut -d' ' -f2`" + @echo "perl: `eval \`perl -V:version\`; echo $$version`" + @echo "ruby: $(RUBY_VER)" +ifeq ($(ENABLE_JAVA_BINDINGS),yes) + @echo "java: `java -version 2>&1 | grep version | cut -d' ' -f3 | sed -e 's/\"//g'`" +endif + @echo "" + @echo "Signatures:" diff --git a/tools/dev/unix-build/README b/tools/dev/unix-build/README new file mode 100644 index 0000000..6110d8e --- /dev/null +++ b/tools/dev/unix-build/README @@ -0,0 +1,66 @@ +Introduction +============ +Makefile.svn aids Subversion developers on unix-like systems set up an +SVN development environment without requiring root priviliges. It does +this by fetching Subversion along with many of its dependencies from +the internet and building them using sane defaults suitable for +development (for example, it invokes --enable-maintainer-mode while +compiling Subversion itself). However, indirect dependencies are not +covered; you need OpenSSL installed to get SSL support in neon and +serf for example. Also, it doesn't build all the bindings by default +(javahl for example). + +This README only covers basic usage. Please read Makefile.svn for more +details. + +Requirements +============ +In addition to the usual GNU buildtools including a sane compiler and +GNU autotools, some version of Subversion is required to be in +$PATH. It is used to fetch the desired version of Subversion from the +repository. + +Usage +===== +First, choose a directory $(SVN_DEV) to set up the environment. Note +that this directory cannot be changed later because the script +hardcodes build and link paths relative to the current working +directory. + +To fetch and build trunk, simply don't pass anything. +Pass the branch you want to build in BRANCH, e.g. + $ make BRANCH="1.5.x" +You can also pass a tag to build: + $ make TAG="1.6.6" +And you can specify a working copy to use, in case you need more +than one working copy of the same branch: + $ make BRANCH="1.6.x" WC="1.6.x-test2" + +When the script has finished fetching and building, it uses +$(SVN_DEV)/prefix to install Subversion libraries and +binaries. $(SVN_DEV)/prefix/svn-trunk (or whatever you choose to +build) will contain the latest Subversion binaries: you should add +$(SVN_DEV)/prefix/svn-trunk/bin to your $PATH to use them. The +Makefile in $(SVN_DEV)/svn-trunk is configured to build with sane +options: while developing Subversion, simply `svn up` to pull the +latest changes, `make` and `make install` to install the binaries in +$(SVN_DEV)/prefix/svn-trunk. + +If at any point, you want to re-configure any of the packages to the +default configuration in Makefile.svn, just run the "<PACKAGE>-reset" +target in Makefile.svn before trying to rebuild again. If, in the +extreme case, you want to remove everything including the installed +binaries effectively returning to the starting point, use the "nuke" +target. + +Extended usage +============== +The script can also run Subversion's regression test suite via all +repository backends and RA methods. It generates the necessary +configuration files and starts svnserve and httpd daemons +automatically on non-privileged ports. The default test target to test +everything is "svn-check". + +Notes +===== +The script currently doesn't build Ctypes Python bindings. diff --git a/tools/dev/verify-history.py b/tools/dev/verify-history.py new file mode 100755 index 0000000..a408cc7 --- /dev/null +++ b/tools/dev/verify-history.py @@ -0,0 +1,97 @@ +#!/usr/bin/env python +# +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +# + +# This program is used to verify the FS history code. +# +# The basic gist is this: given a repository, a path in that +# repository, and a revision at which to begin plowing through history +# (towards revision 1), verify that each history object returned by +# the svn_fs_history_prev() interface -- indirectly via +# svn_repos_history() -- represents a revision in which the node being +# tracked actually changed, or where a parent directory of the node +# was copied, according to the list of paths changed as reported by +# svn_fs_paths_changed(). +# +# A fun way to run this: +# +# #!/bin/sh +# +# export VERIFY=/path/to/verify-history.py +# export MYREPOS=/path/to/repos +# +# # List the paths in HEAD of the repos (filtering out the directories) +# for VCFILE in `svn ls -R file://${MYREPOS} | grep -v '/$'`; do +# echo "Checking ${VCFILE}" +# ${VERIFY} ${MYREPOS} ${VCFILE} +# done + +import sys +import string +from svn import core, repos, fs + +class HistoryChecker: + def __init__(self, fs_ptr): + self.fs_ptr = fs_ptr + + def _check_history(self, path, revision): + root = fs.revision_root(self.fs_ptr, revision) + changes = fs.paths_changed(root) + while True: + if path in changes: + return 1 + if path == '/': + return 0 + idx = path.rfind('/') + if idx != -1: + path = path[:idx] + else: + return 0 + + def add_history(self, path, revision, pool=None): + if not self._check_history(path, revision): + print("**WRONG** %8d %s" % (revision, path)) + else: + print(" %8d %s" % (revision, path)) + + +def check_history(fs_ptr, path, revision): + history = HistoryChecker(fs_ptr) + repos.history(fs_ptr, path, history.add_history, 1, revision, 1) + + +def main(): + argc = len(sys.argv) + if argc < 3 or argc > 4: + print("Usage: %s PATH-TO-REPOS PATH-IN-REPOS [REVISION]" % sys.argv[0]) + sys.exit(1) + + fs_ptr = repos.fs(repos.open(sys.argv[1])) + if argc == 3: + revision = fs.youngest_rev(fs_ptr) + else: + revision = int(sys.argv[3]) + check_history(fs_ptr, sys.argv[2], revision) + sys.exit(0) + + +if __name__ == '__main__': + main() diff --git a/tools/dev/warn-ignored-err.sh b/tools/dev/warn-ignored-err.sh new file mode 100755 index 0000000..2e4a106 --- /dev/null +++ b/tools/dev/warn-ignored-err.sh @@ -0,0 +1,83 @@ +#!/bin/sh +# +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +# +HELP="\ +Usage: $0 [--remove] [FILE...] + +Insert or remove the GCC attribute \"warn_unused_result\" on each function +that returns a Subversion error, in the specified files or, by default, +*.h and *.c in the ./subversion and ./tools trees. +" + +LC_ALL=C + +# Parse options +REMOVE= +case "$1" in +--remove) REMOVE=1; shift;; +--help) echo "$HELP"; exit 0;; +--*) echo "$0: unknown option \"$1\"; try \"--help\""; exit 1;; +esac + +# Set the positional parameters to the default files if none specified +if [ $# = 0 ]; then + set -- `find subversion/ tools/ -name '*.[ch]'` +fi + +# A line that declares a function return type of "svn_error_t *" looks like: +# - Possibly leading whitespace, though not often. +# - Possibly "static" or "typedef". +# - The return type "svn_error_t *". +# - Possibly a function or pointer-to-function declarator: +# - "identifier" +# - "(identifier)" (used in some typedefs) +# - "(*identifier)" +# with either nothing more, or a "(" next (especially not "," or ";" or "=" +# which all indicate a variable rather than a function). + +# Regular expressions for "sed" +# Note: take care in matching back-reference numbers to parentheses +PREFIX="^\( *\| *static *\| *typedef *\)" +RET_TYPE="\(svn_error_t *\* *\)" +IDENT="[a-zA-Z_][a-zA-Z0-9_]*" +DECLR="\($IDENT\|( *\(\*\|\) *$IDENT *)\)" +SUFFIX="\($DECLR *\((.*\|\)\|\)$" + +# The attribute string to be inserted or removed +ATTRIB_RE="__attribute__((warn_unused_result))" # regex version of it +ATTRIB_STR="__attribute__((warn_unused_result))" # plain text version of it + +if [ $REMOVE ]; then + SUBST="s/$PREFIX$ATTRIB_RE $RET_TYPE$SUFFIX/\1\2\3/" +else + SUBST="s/$PREFIX$RET_TYPE$SUFFIX/\1$ATTRIB_STR \2\3/" +fi + +for F do + # Edit the file, leaving a backup suffixed with a tilde + { sed -e "$SUBST" "$F" > "$F~1" && + { ! cmp -s "$F" "$F~1"; } && + mv "$F" "$F~" && # F is briefly absent now; a copy could avoid this + mv "$F~1" "$F" + } || + # If anything went wrong or no change was made, remove the temporary file + rm "$F~1" +done diff --git a/tools/dev/wc-format.py b/tools/dev/wc-format.py new file mode 100755 index 0000000..158f529 --- /dev/null +++ b/tools/dev/wc-format.py @@ -0,0 +1,64 @@ +#!/usr/bin/env python + +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +import os +import sqlite3 +import sys + +MIN_SINGLE_DB_FORMAT = 19 + +def get_format(wc_path): + entries = os.path.join(wc_path, '.svn', 'entries') + wc_db = os.path.join(wc_path, '.svn', 'wc.db') + + formatno = 'not under version control' + + if os.path.exists(wc_db): + conn = sqlite3.connect(wc_db) + curs = conn.cursor() + curs.execute('pragma user_version;') + formatno = curs.fetchone()[0] + elif os.path.exists(entries): + formatno = int(open(entries).readline()) + elif os.path.exists(wc_path): + parent_path = os.path.dirname(os.path.abspath(wc_path)) + if wc_path != parent_path: + formatno = get_format(parent_path) + if formatno >= MIN_SINGLE_DB_FORMAT: + return formatno + + return formatno + +def print_format(wc_path): + # see subversion/libsvn_wc/wc.h for format values and information + # 1.0.x -> 1.3.x: format 4 + # 1.4.x: format 8 + # 1.5.x: format 9 + # 1.6.x: format 10 + # 1.7.x: format XXX + formatno = get_format(wc_path) + print '%s: %s' % (wc_path, formatno) + + +if __name__ == '__main__': + paths = sys.argv[1:] + if not paths: + paths = ['.'] + for wc_path in paths: + print_format(wc_path) diff --git a/tools/dev/wc-ng/bump-to-19.py b/tools/dev/wc-ng/bump-to-19.py new file mode 100755 index 0000000..9f9dde2 --- /dev/null +++ b/tools/dev/wc-ng/bump-to-19.py @@ -0,0 +1,357 @@ +#!/usr/bin/env python + +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +"""This program converts a Subversion WC from 1.7-dev format 18 to + 1.7-dev format 19 by migrating data from multiple DBs to a single DB. + + Usage: bump-to-19.py WC_ROOT_DIR + where WC_ROOT_DIR is the path to the WC root directory. + + Skips non-WC dirs and WC dirs that are not at format 18.""" + +# TODO: Detect '_svn' as an alternative to '.svn'. + +# TODO: Probably should remove any directory that is in state to-be-deleted +# and doesn't have its 'keep_local' flag set. Otherwise it will +# become unversioned after commit, whereas format-18 and earlier would +# have deleted it after commit. Before deleting we should check there +# are no unversioned things inside, and maybe even check for "local +# mods" even though that's logically impossible. On the other hand +# it's not a big deal for the user to clean these up manually. + + +import sys, os, shutil, sqlite3 + +dot_svn = '.svn' + +def dotsvn_path(wc_path): + return os.path.join(wc_path, dot_svn) + +def db_path(wc_path): + return os.path.join(wc_path, dot_svn, 'wc.db') + +def pristine_path(wc_path): + return os.path.join(wc_path, dot_svn, 'pristine') + +def tmp_path(wc_path): + return os.path.join(wc_path, dot_svn, 'tmp') + +class NotASubversionWC(Exception): + def __init__(self, wc_path): + self.wc_path = wc_path + def __str__(self): + return "not a Subversion WC: '" + self.wc_path + "'" + +class WrongFormatException(Exception): + def __init__(self, wc_dir, format): + self.wc_dir = wc_dir + self.format = format + def __str__(self): + return "format is " + str(self.format) + " not 18: '" + self.wc_dir + "'" + + + +STMT_COPY_BASE_NODE_TABLE_TO_WCROOT_DB1 = \ + "INSERT OR REPLACE INTO root.BASE_NODE ( " \ + " wc_id, local_relpath, repos_id, repos_relpath, parent_relpath, " \ + " presence, kind, revnum, checksum, translated_size, changed_rev, " \ + " changed_date, changed_author, depth, symlink_target, last_mod_time, " \ + " properties, dav_cache, incomplete_children, file_external ) " \ + "SELECT wc_id, ?1, repos_id, repos_relpath, ?2 AS parent_relpath, " \ + " presence, kind, revnum, checksum, translated_size, changed_rev, " \ + " changed_date, changed_author, depth, symlink_target, last_mod_time, " \ + " properties, dav_cache, incomplete_children, file_external " \ + "FROM BASE_NODE WHERE local_relpath = ''; " + +STMT_COPY_BASE_NODE_TABLE_TO_WCROOT_DB2 = \ + "INSERT INTO root.BASE_NODE ( " \ + " wc_id, local_relpath, repos_id, repos_relpath, parent_relpath, " \ + " presence, kind, revnum, checksum, translated_size, changed_rev, " \ + " changed_date, changed_author, depth, symlink_target, last_mod_time, " \ + " properties, dav_cache, incomplete_children, file_external ) " \ + "SELECT wc_id, ?1 || '/' || local_relpath, repos_id, repos_relpath, " \ + " ?1 AS parent_relpath, " \ + " presence, kind, revnum, checksum, translated_size, changed_rev, " \ + " changed_date, changed_author, depth, symlink_target, last_mod_time, " \ + " properties, dav_cache, incomplete_children, file_external " \ + "FROM BASE_NODE WHERE local_relpath != ''; " + +STMT_COPY_WORKING_NODE_TABLE_TO_WCROOT_DB1 = \ + "INSERT OR REPLACE INTO root.WORKING_NODE ( " \ + " wc_id, local_relpath, parent_relpath, presence, kind, checksum, " \ + " translated_size, changed_rev, changed_date, changed_author, depth, " \ + " symlink_target, copyfrom_repos_id, copyfrom_repos_path, copyfrom_revnum, " \ + " moved_here, moved_to, last_mod_time, properties, keep_local ) " \ + "SELECT wc_id, ?1, ?2 AS parent_relpath, " \ + " presence, kind, checksum, " \ + " translated_size, changed_rev, changed_date, changed_author, depth, " \ + " symlink_target, copyfrom_repos_id, copyfrom_repos_path, copyfrom_revnum, " \ + " moved_here, moved_to, last_mod_time, properties, keep_local " \ + "FROM WORKING_NODE WHERE local_relpath = ''; " + +STMT_COPY_WORKING_NODE_TABLE_TO_WCROOT_DB2 = \ + "INSERT INTO root.WORKING_NODE ( " \ + " wc_id, local_relpath, parent_relpath, presence, kind, checksum, " \ + " translated_size, changed_rev, changed_date, changed_author, depth, " \ + " symlink_target, copyfrom_repos_id, copyfrom_repos_path, copyfrom_revnum, " \ + " moved_here, moved_to, last_mod_time, properties, keep_local ) " \ + "SELECT wc_id, ?1 || '/' || local_relpath, ?1 AS parent_relpath, " \ + " presence, kind, checksum, " \ + " translated_size, changed_rev, changed_date, changed_author, depth, " \ + " symlink_target, copyfrom_repos_id, copyfrom_repos_path, copyfrom_revnum, " \ + " moved_here, moved_to, last_mod_time, properties, keep_local " \ + "FROM WORKING_NODE WHERE local_relpath != ''; " + +STMT_COPY_ACTUAL_NODE_TABLE_TO_WCROOT_DB1 = \ + "INSERT OR REPLACE INTO root.ACTUAL_NODE ( " \ + " wc_id, local_relpath, parent_relpath, properties, " \ + " conflict_old, conflict_new, conflict_working, " \ + " prop_reject, changelist, text_mod, tree_conflict_data, " \ + " conflict_data, older_checksum, left_checksum, right_checksum ) " \ + "SELECT wc_id, ?1, ?2 AS parent_relpath, properties, " \ + " conflict_old, conflict_new, conflict_working, " \ + " prop_reject, changelist, text_mod, tree_conflict_data, " \ + " conflict_data, older_checksum, left_checksum, right_checksum " \ + "FROM ACTUAL_NODE WHERE local_relpath = ''; " + +STMT_COPY_ACTUAL_NODE_TABLE_TO_WCROOT_DB2 = \ + "INSERT INTO root.ACTUAL_NODE ( " \ + " wc_id, local_relpath, parent_relpath, properties, " \ + " conflict_old, conflict_new, conflict_working, " \ + " prop_reject, changelist, text_mod, tree_conflict_data, " \ + " conflict_data, older_checksum, left_checksum, right_checksum ) " \ + "SELECT wc_id, ?1 || '/' || local_relpath, ?1 AS parent_relpath, properties, " \ + " conflict_old, conflict_new, conflict_working, " \ + " prop_reject, changelist, text_mod, tree_conflict_data, " \ + " conflict_data, older_checksum, left_checksum, right_checksum " \ + "FROM ACTUAL_NODE WHERE local_relpath != ''; " + +STMT_COPY_LOCK_TABLE_TO_WCROOT_DB = \ + "INSERT INTO root.LOCK " \ + "SELECT * FROM LOCK; " + +STMT_COPY_PRISTINE_TABLE_TO_WCROOT_DB = \ + "INSERT OR REPLACE INTO root.PRISTINE " \ + "SELECT * FROM PRISTINE; " + +STMT_SELECT_SUBDIR = \ + "SELECT 1 FROM BASE_NODE WHERE local_relpath=?1 AND kind='subdir'" \ + "UNION " \ + "SELECT 0 FROM WORKING_NODE WHERE local_relpath=?1 AND kind='subdir';" + +def copy_db_rows_to_wcroot(wc_subdir_relpath): + """Copy all relevant table rows from the $PWD/WC_SUBDIR_RELPATH/.svn/wc.db + into $PWD/.svn/wc.db.""" + + wc_root_path = '' + wc_subdir_path = wc_subdir_relpath + wc_subdir_parent_relpath = os.path.dirname(wc_subdir_relpath) + + try: + db = sqlite3.connect(db_path(wc_subdir_path)) + except: + raise NotASubversionWC(wc_subdir_path) + c = db.cursor() + + c.execute("ATTACH '" + db_path(wc_root_path) + "' AS 'root'") + + ### TODO: the REPOSITORY table. At present we assume there is only one + # repository in use and its repos_id is consistent throughout the WC. + # That's not always true - e.g. "svn switch --relocate" creates repos_id + # 2, and then "svn mkdir" uses repos_id 1 in the subdirectory. */ + + c.execute(STMT_COPY_BASE_NODE_TABLE_TO_WCROOT_DB1, + (wc_subdir_relpath, wc_subdir_parent_relpath)) + c.execute(STMT_COPY_BASE_NODE_TABLE_TO_WCROOT_DB2, + (wc_subdir_relpath, )) + c.execute(STMT_COPY_WORKING_NODE_TABLE_TO_WCROOT_DB1, + (wc_subdir_relpath, wc_subdir_parent_relpath)) + c.execute(STMT_COPY_WORKING_NODE_TABLE_TO_WCROOT_DB2, + (wc_subdir_relpath, )) + c.execute(STMT_COPY_ACTUAL_NODE_TABLE_TO_WCROOT_DB1, + (wc_subdir_relpath, wc_subdir_parent_relpath)) + c.execute(STMT_COPY_ACTUAL_NODE_TABLE_TO_WCROOT_DB2, + (wc_subdir_relpath, )) + c.execute(STMT_COPY_LOCK_TABLE_TO_WCROOT_DB) + c.execute(STMT_COPY_PRISTINE_TABLE_TO_WCROOT_DB) + + db.commit() + db.close() + + +def move_and_shard_pristine_files(old_wc_path, new_wc_path): + """Move all pristine text files from 'OLD_WC_PATH/.svn/pristine/' + into 'NEW_WC_PATH/.svn/pristine/??/', creating shard dirs where + necessary.""" + + old_pristine_dir = pristine_path(old_wc_path) + new_pristine_dir = pristine_path(new_wc_path) + + if not os.path.exists(old_pristine_dir): + # That's fine, assuming there are no pristine texts. + return + + for basename in os.listdir(old_pristine_dir): + shard = basename[:2] + if shard == basename: # already converted + continue + old = os.path.join(old_pristine_dir, basename) + new = os.path.join(new_pristine_dir, shard, basename) + os.renames(old, new) + +def select_subdir(wc_subdir_path): + """ Return True if wc_subdir_path is a known to be a versioned subdir, + False otherwise.""" + + try: + db = sqlite3.connect(db_path('')) + except: + raise NotASubversionWC(wc_subdir_path) + c = db.cursor() + c.execute(STMT_SELECT_SUBDIR, (wc_subdir_path,)) + if c.fetchone() is None: + return False + else: + return True + + +def migrate_wc_subdirs(wc_root_path): + """Move Subversion metadata from the admin dir of each subdirectory + below WC_ROOT_PATH into WC_ROOT_PATH's own admin dir.""" + + old_cwd = os.getcwd() + os.chdir(wc_root_path) + + # Keep track of which dirs we've migrated so we can delete their .svn's + # afterwards. Done this way because the tree walking is top-down and if + # we deleted the .svn before walking into the subdir, it would look like + # an unversioned subdir. + migrated_subdirs = [] + + # For each directory in the WC, try to migrate each of its subdirs (DIRS). + # Done this way because (a) os.walk() gives us lists of subdirs, and (b) + # it's easy to skip the WC root dir. + for dir_path, dirs, files in os.walk('.'): + + # don't walk into the '.svn' subdirectory + try: + dirs.remove(dot_svn) + except ValueError: + # a non-WC dir: don't walk into any subdirectories + print "skipped: ", NotASubversionWC(dir_path) + del dirs[:] + continue + + # Try to migrate each other subdirectory + for dir in dirs[:]: # copy so we can remove some + wc_subdir_path = os.path.join(dir_path, dir) + if wc_subdir_path.startswith('./'): + wc_subdir_path = wc_subdir_path[2:] + + if not select_subdir(wc_subdir_path): + print "skipped:", wc_subdir_path + dirs.remove(dir) + continue + + try: + check_wc_format_number(wc_subdir_path) + print "migrating '" + wc_subdir_path + "'" + copy_db_rows_to_wcroot(wc_subdir_path) + move_and_shard_pristine_files(wc_subdir_path, '.') + migrated_subdirs += [wc_subdir_path] + except (WrongFormatException, NotASubversionWC), e: + print "skipped:", e + # don't walk into it + dirs.remove(dir) + continue + + # Delete the remaining parts of the migrated .svn dirs + # Make a note of any problems in deleting. + failed_delete_subdirs = [] + for wc_subdir_path in migrated_subdirs: + print "deleting " + dotsvn_path(wc_subdir_path) + try: + os.remove(db_path(wc_subdir_path)) + if os.path.exists(pristine_path(wc_subdir_path)): + os.rmdir(pristine_path(wc_subdir_path)) + shutil.rmtree(tmp_path(wc_subdir_path)) + os.rmdir(dotsvn_path(wc_subdir_path)) + except Exception, e: + print e + failed_delete_subdirs += [wc_subdir_path] + + # Notify any problems in deleting + if failed_delete_subdirs: + print "Failed to delete the following directories. Please delete them manually." + for wc_subdir_path in failed_delete_subdirs: + print " " + dotsvn_path(wc_subdir_path) + + os.chdir(old_cwd) + + +def check_wc_format_number(wc_path): + """Check that the WC format of the WC dir WC_PATH is 18. + Raise a WrongFormatException if not.""" + + try: + db = sqlite3.connect(db_path(wc_path)) + except sqlite3.OperationalError: + raise NotASubversionWC(wc_path) + c = db.cursor() + c.execute("PRAGMA user_version;") + format = c.fetchone()[0] + db.commit() + db.close() + + if format != 18: + raise WrongFormatException(wc_path, format) + + +def bump_wc_format_number(wc_path): + """Bump the WC format number of the WC dir WC_PATH to 19.""" + + try: + db = sqlite3.connect(db_path(wc_path)) + except sqlite3.OperationalError: + raise NotASubversionWC(wc_path) + c = db.cursor() + c.execute("PRAGMA user_version = 19;") + db.commit() + db.close() + + +if __name__ == '__main__': + + if len(sys.argv) != 2: + print __doc__ + sys.exit(1) + + wc_root_path = sys.argv[1] + + try: + check_wc_format_number(wc_root_path) + except (WrongFormatException, NotASubversionWC), e: + print "error:", e + sys.exit(1) + + print "merging subdir DBs into single DB '" + wc_root_path + "'" + move_and_shard_pristine_files(wc_root_path, wc_root_path) + migrate_wc_subdirs(wc_root_path) + bump_wc_format_number(wc_root_path) + diff --git a/tools/dev/wc-ng/count-progress.py b/tools/dev/wc-ng/count-progress.py new file mode 100755 index 0000000..bf06512 --- /dev/null +++ b/tools/dev/wc-ng/count-progress.py @@ -0,0 +1,117 @@ +#!/usr/bin/env python +# +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +# + +import os, sys + +SKIP = ['deprecated.c', + 'entries.c', + 'entries.h', + 'old-and-busted.c'] + +TERMS = ['svn_wc_adm_access_t', + 'svn_wc_entry_t', + 'svn_wc__node_', + 'svn_wc__db_temp_', + 'svn_wc__db_node_hidden', + 'svn_wc__loggy', + 'svn_wc__db_wq_add', + ] + + +def get_files_in(path): + names = os.listdir(path) + for skip in SKIP: + try: + names.remove(skip) + except ValueError: + pass + return [os.path.join(path, fname) for fname in names + if fname.endswith('.c') or fname.endswith('.h')] + + +def count_terms_in(path): + files = get_files_in(path) + counts = {} + for term in TERMS: + counts[term] = 0 + for filepath in get_files_in(path): + contents = open(filepath).read() + for term in TERMS: + counts[term] += contents.count(term) + return counts + + +def print_report(wcroot): + client = count_terms_in(os.path.join(wcroot, 'subversion', 'libsvn_client')) + wc = count_terms_in(os.path.join(wcroot, 'subversion', 'libsvn_wc')) + + client_total = 0 + wc_total = 0 + + FMT = '%22s |%14s |%10s |%6s' + SEP = '%s+%s+%s+%s' % (23*'-', 15*'-', 11*'-', 7*'-') + + print(FMT % ('', 'libsvn_client', 'libsvn_wc', 'Total')) + print(SEP) + for term in TERMS: + print(FMT % (term, client[term], wc[term], client[term] + wc[term])) + client_total += client[term] + wc_total += wc[term] + print(SEP) + print(FMT % ('Total', client_total, wc_total, client_total + wc_total)) + + +def usage(): + print("""\ +Usage: %s [WCROOT] + %s --help + +Show statistics related to outstanding WC-NG code conversion work +items in working copy branch root WCROOT. If WCROOT is omitted, this +program will attempt to guess it using the assumption that it is being +run from within the working copy of interest.""" +% (sys.argv[0], sys.argv[0])) + + sys.exit(0) + + +if __name__ == '__main__': + if len(sys.argv) > 1: + if '--help' in sys.argv[1:]: + usage() + + print_report(sys.argv[1]) + else: + cwd = os.path.abspath(os.getcwd()) + idx = cwd.rfind(os.sep + 'subversion') + if idx > 0: + wcroot = cwd[:idx] + else: + idx = cwd.rfind(os.sep + 'tools') + if idx > 0: + wcroot = cwd[:idx] + elif os.path.exists(os.path.join(cwd, 'subversion')): + wcroot = cwd + else: + print("ERROR: the root of 'trunk' cannot be located -- please provide") + sys.exit(1) + print_report(wcroot) diff --git a/tools/dev/wc-ng/gather-data.sh b/tools/dev/wc-ng/gather-data.sh new file mode 100755 index 0000000..fe481b9 --- /dev/null +++ b/tools/dev/wc-ng/gather-data.sh @@ -0,0 +1,78 @@ +#/usr/bin/env sh +# +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +# +# Trap Ctrl-C +trap 'exit 1' 2 + +# Some useful variables +REPOS=file:///home/hwright/dev/test/svn-mirror +WC=blech +REV_LIST=revs_list +SCRIPT=count-progress.py +DATA=data.csv + +# Sync up the local repo +svnsync sync $REPOS + +# Grab the list of revisions of interest on trunk +svn log -q -r0:HEAD $REPOS/trunk \ + | grep -v '^----' \ + | cut -f1 -d '|' \ + | cut -b2- > $REV_LIST + +# Export the counting script +if [ -e $SCRIPT ]; then + rm $SCRIPT +fi +svn export $REPOS/trunk/tools/dev/wc-ng/$SCRIPT $SCRIPT + +# Checkout a working copy +if [ ! -d "$WC" ]; then + svn co $REPOS/trunk $WC -r1 +fi + +# Get all the symbols of interest from the counting script and write +# them out at the headers in our csv file +LINE="" +for l in `./$SCRIPT $WC | tail -n +3 | grep -v '^----' | cut -f 1 -d '|'`; do + LINE="$LINE,$l" +done +echo "Revision$LINE" > $DATA + +# Iterate over all the revisions of interest +export SVN_I_LOVE_CORRUPTED_WORKING_COPIES_SO_DISABLE_SLEEP_FOR_TIMESTAMPS='yes' +for r in `cat $REV_LIST`; do + svn up -r$r $WC -q + + # Do the count for that rev, and put the data in our data file + LINE="" + for l in `./$SCRIPT $WC | tail -n +3 | grep -v '^----' | cut -f 4 -d '|'`; do + LINE="$LINE,$l" + done + echo "$r$LINE" >> $DATA + + echo "Done with revision $r" +done +unset SVN_I_LOVE_CORRUPTED_WORKING_COPIES_SO_DISABLE_SLEEP_FOR_TIMESTAMPS + +# Cleanup +rm -rf $WC +rm $REV_LIST diff --git a/tools/dev/wc-ng/graph-data.py b/tools/dev/wc-ng/graph-data.py new file mode 100755 index 0000000..a8f0c5e --- /dev/null +++ b/tools/dev/wc-ng/graph-data.py @@ -0,0 +1,70 @@ +#!/usr/bin/env python +# +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +# + +import matplotlib.mlab as mlab +import matplotlib.pyplot as plt +from matplotlib import pylab +import numpy as np + +import csv +import sys + +min_rev = 35000 + +data_reader = csv.reader(open('data.csv')) + +data = [] +for row in data_reader: + row = row[:-1] + if row[0] == 'Revision': + data.append(row) + continue + + if int(row[0]) < min_rev: + continue + + for i, x in enumerate(row): + if i <= 1: + row[i] = int(row[i]) + else: + row[i] = int(row[i-1]) + int(row[i]) + data.append(row) + +x = [d[0] for d in data[1:]] +data = [d[1:] for d in data] +y = zip(*data) + +l = [] +for i, foo in enumerate(y): + ln = plt.plot(x, foo[1:], linewidth=1) + l.append(ln) + +plt.figlegend(l, data[0], 'lower left') +plt.fill_between(x, 0, y[0][1:], facecolor=l[0].color) +#for i in range(0, len(y)-1): +# plt.fill_between(x, y[i][1:], y[i+1][1:]) +plt.xlabel('Revision') +plt.ylabel('Symbol Count') +plt.show() + +png = open('chart2.png', 'w') +plt.savefig(png) diff --git a/tools/dev/wc-ng/populate-pristine.py b/tools/dev/wc-ng/populate-pristine.py new file mode 100755 index 0000000..8857371 --- /dev/null +++ b/tools/dev/wc-ng/populate-pristine.py @@ -0,0 +1,108 @@ +#!/usr/bin/env python + +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +""" +A script that takes a .svn/pristine/ hierarchy, with its existing +.svn/wc.db database, and populates the database's PRISTINE table +accordingly. (Use 'svn cleanup' to remove unreferenced pristines.) + +Usage: + + %s /path/to/wc [...] +""" + +# TODO: resolve the NotImplemented() in __main__ + +# TODO: increment refcount upon collision +# TODO: add <given file>, not just argv[1]/.svn/pristine/??/* + +import hashlib +import os +import re +import sqlite3 +import sys + +# ### This could require any other format that has the same PRISTINE schema +# ### and semantics. +FORMAT = 22 +BUFFER_SIZE = 4 * 1024 + +class UnknownFormat(Exception): + def __init__(self, formatno): + self.formatno = formatno + +def open_db(wc_path): + wc_db = os.path.join(wc_path, '.svn', 'wc.db') + conn = sqlite3.connect(wc_db) + curs = conn.cursor() + curs.execute('pragma user_version;') + formatno = int(curs.fetchone()[0]) + if formatno > FORMAT: + raise UnknownFormat(formatno) + return conn + +_sha1_re = re.compile(r'^[0-9a-f]{40}$') + +def md5_of(path): + fd = os.open(path, os.O_RDONLY) + ctx = hashlib.md5() + while True: + s = os.read(fd, BUFFER_SIZE) + if len(s): + ctx.update(s) + else: + os.close(fd) + return ctx.hexdigest() + +INSERT_QUERY = """ + INSERT OR REPLACE + INTO pristine(checksum,compression,size,refcount,md5_checksum) + VALUES (?,?,?,?,?) +""" + +def populate(wc_path): + conn = open_db(wc_path) + sys.stdout.write("Updating '%s': " % wc_path) + for dirname, dirs, files in os.walk(os.path.join(wc_path, '.svn/pristine/')): + # skip everything but .svn/pristine/xx/ + if os.path.basename(os.path.dirname(dirname)) == 'pristine': + sys.stdout.write("'%s', " % os.path.basename(dirname)) + for f in filter(lambda x: _sha1_re.match(x), files): + fullpath = os.path.join(dirname, f) + conn.execute(INSERT_QUERY, + ('$sha1$'+f, None, os.stat(fullpath).st_size, 1, + '$md5 $'+md5_of(fullpath))) + # periodic transaction commits, for efficiency + conn.commit() + else: + sys.stdout.write(".\n") + +if __name__ == '__main__': + raise NotImplemented("""Subversion does not know yet to avoid fetching + a file when a file with matching sha1 appears in the PRISTINE table.""") + + paths = sys.argv[1:] + if not paths: + paths = ['.'] + for wc_path in paths: + try: + populate(wc_path) + except UnknownFormat, e: + sys.stderr.write("Don't know how to handle '%s' (format %d)'\n" + % (wc_path, e.formatno)) diff --git a/tools/dev/which-error.py b/tools/dev/which-error.py new file mode 100755 index 0000000..55abba7 --- /dev/null +++ b/tools/dev/which-error.py @@ -0,0 +1,125 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# which-error.py: Print semantic Subversion error code names mapped from +# their numeric error code values +# +# ==================================================================== +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# ==================================================================== +# +# $HeadURL: http://svn.apache.org/repos/asf/subversion/branches/1.7.x/tools/dev/which-error.py $ +# $LastChangedDate: 2011-07-08 13:53:27 +0000 (Fri, 08 Jul 2011) $ +# $LastChangedBy: philip $ +# $LastChangedRevision: 1144315 $ +# + +import sys +import os.path +import re + +try: + from svn import core +except ImportError, e: + sys.stderr.write("ERROR: Unable to import Subversion's Python bindings: '%s'\n" \ + "Hint: Set your PYTHONPATH environment variable, or adjust your " \ + "PYTHONSTARTUP\nfile to point to your Subversion install " \ + "location's svn-python directory.\n" % e) + sys.stderr.flush() + sys.exit(1) + + +def usage_and_exit(): + progname = os.path.basename(sys.argv[0]) + sys.stderr.write("""Usage: 1. %s ERRNUM [...] + 2. %s parse + 3. %s list + +Print numeric and semantic error code information for Subversion error +codes. This can be done in variety of ways: + + 1. For each ERRNUM, list the error code information. + + 2. Parse standard input as if it was error stream from a debug-mode + Subversion command-line client, echoing that input to stdout, + followed by the error code information for codes found in use in + that error stream. + + 3. Simply list the error code information for all known such + mappings. + +""" % (progname, progname, progname)) + sys.exit(1) + +def get_errors(): + errs = {} + for key in vars(core): + if key.find('SVN_ERR_') == 0: + try: + val = int(vars(core)[key]) + errs[val] = key + except: + pass + return errs + +def print_error(code): + try: + print('%08d %s' % (code, __svn_error_codes[code])) + except KeyError: + print('%08d *** UNKNOWN ERROR CODE ***' % (code)) + +if __name__ == "__main__": + global __svn_error_codes + __svn_error_codes = get_errors() + codes = [] + if len(sys.argv) < 2: + usage_and_exit() + + # Get a list of known codes + if sys.argv[1] == 'list': + if len(sys.argv) > 2: + usage_and_exit() + codes = sorted(__svn_error_codes.keys()) + + # Get a list of code by parsing stdin for apr_err=CODE instances + elif sys.argv[1] == 'parse': + if len(sys.argv) > 2: + usage_and_exit() + while True: + line = sys.stdin.readline() + if not line: + break + sys.stdout.write(line) + match = re.match(r'^.*apr_err=([0-9]+)[^0-9].*$', line) + if match: + codes.append(int(match.group(1))) + + # Get the list of requested codes + else: + for code in sys.argv[1:]: + try: + code = code.lstrip('EW') + codes.append(int(code)) + except ValueError: + usage_and_exit() + + # Print the harvest codes + for code in codes: + print_error(code) + + diff --git a/tools/dev/windows-build/Makefile b/tools/dev/windows-build/Makefile new file mode 100644 index 0000000..fbf7b22 --- /dev/null +++ b/tools/dev/windows-build/Makefile @@ -0,0 +1,156 @@ +# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing,
+# software distributed under the License is distributed on an
+# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+# KIND, either express or implied. See the License for the
+# specific language governing permissions and limitations
+# under the License.
+
+CONFIG=debug
+#CONFIG=release
+# will be appended to 'svn --version --quiet' output; set to zero to suppress
+BUILDDESCR=0
+
+MSBUILD=msbuild subversion_vcnet.sln /nologo /v:q /p:Configuration=$(CONFIG)
+CP=cp
+TEEPATH=C:\Path\To\Parent\Of\tee\dot\exe\and\other\utils\as\needed\see\below.80
+
+SOURCESDIR=C:\Path\To\Dependency\Sources
+SVNDIR=C:\Path\To\Subversion\SourceTree
+TARGETDIR=$(SVNDIR)\dist
+INSTALLDIR=E:\svn
+
+EXPATVER=2.0.0
+HTTPDVER=2.2.13
+NEONVER=0.28.2
+OPENSSLVER=0.9.8k
+SQLITEVER=3.6.3
+ZLIBVER=1.2.3
+#ENABLE_ML=--enable-ml
+
+PATCHESDIR=$(HOME)\mydiffs\svn
+OPENSSLDIR=$(SOURCESDIR)\openssl-$(OPENSSLVER)
+EXPATDIR=$(SOURCESDIR)\expat-$(EXPATVER)
+NEONDIR=$(SOURCESDIR)\neon-$(NEONVER)
+HTTPDDIR=$(SOURCESDIR)\httpd-$(HTTPDVER)
+#APRDIR=$(SOURCESDIR)\apr
+#APRUTILDIR=$(SOURCESDIR)\apr-util
+#APRICONVDIR=$(SOURCESDIR)\apr-iconv
+APRDIR=$(HTTPDDIR)\srclib\apr
+APRUTILDIR=$(HTTPDDIR)\srclib\apr-util
+APRICONVDIR=$(HTTPDDIR)\srclib\apr-iconv
+SQLITEDIR=$(SOURCESDIR)\sqlite-amalgamation
+ZLIBDIR=$(SOURCESDIR)\zlib-$(ZLIBVER)
+SERFDIR=$(SOURCESDIR)\serf
+
+all:
+ @echo Available targets: newfiles versionstamp
+ @echo Available targets: config
+ @echo Available targets: progname testname
+ @echo Available targets: all1 all2
+ @echo Available targets: buildlog package
+ @echo Available targets: check checklog
+TARGETDIRset: SVNDIRset
+ if X$(TARGETDIR) == X exit 1
+SVNDIRset:
+ if X$(SVNDIR) == X exit 1
+
+removealllocalmods:
+ svn revert -R .
+ svn status --no-ignore | xargs rm -rf --
+ svn status --no-ignore
+ # last, in case of wc format bump
+ rmdir /s /q dist local Release Debug
+
+newfiles: SVNDIRset
+ xcopy /s /y $(PATCHESDIR)\newfiles $(SVNDIR)
+versionstamp:
+ perl tools\dev\windows-build\document-version.pl subversion\include\svn_version.h $(TARGETDIR) $(SVNDIR) $(BUILDDESCR)
+ svn diff subversion\include\svn_version.h
+
+cleanup1: TARGETDIR
+ del log.all-tests log.gen-make.py log.devenv log.win-tests
+ rmdir /s /q $(TARGETDIR)\bin
+
+clean:
+ @echo "Sorry, '$@' target not yet implemented" >&2
+# TODO also copy sqlite3.dll if it's used
+install: TARGETDIRset
+ test ! -d $(INSTALLDIR)
+ mkdir $(INSTALLDIR)\bin
+ pushd $(TARGETDIR)\bin &&\
+ $(CP) *.exe $(INSTALLDIR)/bin &&\
+ $(CP) libapr*.dll $(INSTALLDIR)/bin &&\
+ $(CP) libeay32.dll $(INSTALLDIR)/bin &&\
+ $(CP) ssleay32.dll $(INSTALLDIR)/bin &&\
+ $(CP) libsvn*.dll $(INSTALLDIR)/bin &&\
+ $(CP) ..\*.diff $(INSTALLDIR) &&\
+ popd
+
+targetdir: TARGETDIRset
+ test -d $(TARGETDIR)\bin || mkdir $(TARGETDIR)\bin
+
+# TODO: pass --with-apr-* if you don't have httpd; make --with-* args optional
+config: targetdir
+ python gen-make.py --$(CONFIG) --with-httpd=$(HTTPDDIR) --with-neon=$(NEONDIR) --with-serf=$(SERFDIR) --with-openssl=$(OPENSSLDIR) --with-sqlite=$(SQLITEDIR) --with-zlib=$(ZLIBDIR) $(ENABLE_ML) --vsnet-version=2008 -t vcproj 2>&1 | tee log.gen-make
+
+# Visual Studio 2008
+libsvn_auth_gnome_keyring libsvn_auth_kwallet libsvn_client libsvn_delta libsvn_diff libsvn_fs libsvn_fs_base libsvn_fs_fs libsvn_fs_util libsvn_ra libsvn_ra_local libsvn_ra_neon libsvn_ra_serf libsvn_ra_svn libsvn_repos libsvn_subr libsvn_wc: targetdir
+ $(MSBUILD) /t:Libraries\$@
+ $(MAKE) package
+svn svnadmin svndumpfilter svnlook svnmucc svnserve svnsync svnversion entries-dump: targetdir
+ $(MSBUILD) /t:Programs\$@
+ $(MAKE) package
+auth-test cache-test changes-test checksum-test client-test compat-test config-test db-test diff-diff3-test dir-delta-editor dirent_uri-test error-test fs-base-test fs-pack-test fs-test hashdump-test key-test locks-test mergeinfo-test opt-test path-test ra-local-test random-test repos-test revision-test skel-test stream-test string-test strings-reps-test svn_test_fs svn_test_main svndiff-test target-test time-test translate-test tree-conflict-data-test utf-test vdelta-test window-test: targetdir
+ $(MSBUILD) /t:Tests\$@
+ $(MAKE) package
+
+__ALL__ __ALL_TESTS__: targetdir
+ $(MSBUILD) /t:$@
+ $(MAKE) package
+all1: targetdir
+ $(MSBUILD) /t:__ALL__
+ $(MAKE) package
+ @echo TODO entries-test
+all2: targetdir
+ $(MSBUILD) /t:__ALL_TESTS__
+ $(MAKE) package
+
+package:
+ test -d $(SVNDIR)\$(CONFIG)\Subversion\tests\cmdline || mkdir $(SVNDIR)\$(CONFIG)\Subversion\tests\cmdline
+ test -d $(TARGETDIR)\bin || mkdir $(TARGETDIR)\bin
+ for %%i in (svn svnadmin svndumpfilter svnlook svnserve svnsync svnversion) do @$(CP) $(CONFIG)\subversion\%%i\%%i.exe $(TARGETDIR)\bin
+ for %%i in (diff diff3 diff4) do @if exist $(CONFIG)\tools\diff\%%i.exe $(CP) $(CONFIG)\tools\diff\%%i.exe $(TARGETDIR)\bin
+ $(CP) $(APRDIR)\$(CONFIG)/*.dll $(TARGETDIR)\bin
+ $(CP) $(APRUTILDIR)\$(CONFIG)/*.dll $(TARGETDIR)\bin
+ $(CP) $(APRICONVDIR)\$(CONFIG)/*.dll $(TARGETDIR)\bin
+ $(CP) $(OPENSSLDIR)\out32dll/*.dll $(TARGETDIR)\bin
+ for %%i in (client delta diff fs ra repos subr wc) do @$(CP) $(CONFIG)\subversion\libsvn_%%i\*.dll $(TARGETDIR)\bin
+
+buildlog:
+ gvim -c "set autoread nowrap" -c "/\(\<0 \)\@<!error" log.devenv
+# 'make check'
+# TODO: also support svncheck, etc
+check:
+ echo %date% %time% :: Starting fsfs file >> log.all-tests
+ python win-tests.py --verbose --cleanup --bin=$(TARGETDIR)\bin --$(CONFIG) -f fsfs 2>&1 | %TEEPATH%\tee log.win-tests
+ echo %date% %time% :: Finished fsfs file >> log.all-tests
+
+
+# check errors
+checklog:
+ gvim -c "set autoread" -p log.win-tests *\*.log "+silent! /X\@<!FAIL\|XPASS"
+
+tags: .
+ REM vim +Ctags +quit
+ ctags -R .
+ $(CP) tags ..\svntags
diff --git a/tools/dev/windows-build/README b/tools/dev/windows-build/README new file mode 100644 index 0000000..a4d37d8 --- /dev/null +++ b/tools/dev/windows-build/README @@ -0,0 +1,13 @@ +Makefiles for automating the Windows build. + +* TODO: + - document: how to use + - known bugs/shortcomings + - separate the configurable parts to a Makefile.local.tmpl file + - allow serf,httpd,neon,etc to be optional + - auto-generate the list of individual targets from build.conf + (that list is not used by the default make targets) + - add 'make tools' to the default windows build + +See: http://svn.haxx.se/users/archive-2009-07/0764.shtml +(Message-Id: <alpine.561.2.00.0907241718550.6824@daniel2.local>) diff --git a/tools/dev/windows-build/document-version.pl b/tools/dev/windows-build/document-version.pl new file mode 100644 index 0000000..398762b --- /dev/null +++ b/tools/dev/windows-build/document-version.pl @@ -0,0 +1,48 @@ +#!/usr/local/bin/perl -w + +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +use strict; + +use Tie::File; +#use Cwd 'getcwd'; + +die "Bad args '@ARGV'" unless (@ARGV >= 3 && @ARGV <= 4); + +my ($filename, $TARGETDIR, $SVNDIR, $BUILDDESCR) = (@ARGV, ""); + +my (@file, $version, $lines); + +tie (@file, 'Tie::File', $filename) + or die $!; + +$version = `svnversion -n` or die; +$version =~ tr/M//d; +$version .= '-' . $BUILDDESCR if $BUILDDESCR; + +/^#define SVN_VER_TAG/ and s/(?<=dev build).*(?=\)"$)/-r$version/ + for @file; +/^#define SVN_VER_NUMTAG/ and s/(?<=-dev).*(?="$)/-r$version/ + for @file; + +mkdir $TARGETDIR unless -d $TARGETDIR; + +chdir $SVNDIR; +system "svn diff -x-p > $TARGETDIR\\$version.diff" + and die $!; + diff --git a/tools/diff/diff.c b/tools/diff/diff.c new file mode 100644 index 0000000..d681381 --- /dev/null +++ b/tools/diff/diff.c @@ -0,0 +1,148 @@ +/* diff.c -- test driver for text diffs + * + * ==================================================================== + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + * ==================================================================== + */ + + +#include <apr.h> +#include <apr_general.h> +#include <apr_file_io.h> + +#include "svn_pools.h" +#include "svn_diff.h" +#include "svn_io.h" +#include "svn_utf.h" + +static svn_error_t * +do_diff(svn_stream_t *ostream, + const char *original, + const char *modified, + svn_boolean_t *has_changes, + svn_diff_file_options_t *options, + svn_boolean_t show_c_function, + apr_pool_t *pool) +{ + svn_diff_t *diff; + + SVN_ERR(svn_diff_file_diff_2(&diff, original, modified, options, pool)); + *has_changes = svn_diff_contains_diffs(diff); + return svn_diff_file_output_unified3(ostream, diff, original, modified, + NULL, NULL, SVN_APR_LOCALE_CHARSET, + NULL, show_c_function, pool); +} + +static void +print_usage(svn_stream_t *ostream, const char *progname, + apr_pool_t *pool) +{ + svn_error_clear(svn_stream_printf(ostream, pool, + "Usage: %s [OPTIONS] <file1> <file2>\n" + "\n" + "Display the differences between <file1> and <file2> in unified diff\n" + "format. OPTIONS are diff extensions as described by 'svn help diff'.\n" + "Use '--' alone to indicate that no more options follow.\n", + progname)); +} + +int main(int argc, const char *argv[]) +{ + apr_pool_t *pool; + svn_stream_t *ostream; + svn_error_t *svn_err; + svn_boolean_t has_changes; + svn_diff_file_options_t *diff_options; + apr_array_header_t *options_array; + int i; + const char *from = NULL; + const char *to = NULL; + svn_boolean_t show_c_function = FALSE; + svn_boolean_t no_more_options = FALSE; + + apr_initialize(); + atexit(apr_terminate); + + pool = svn_pool_create(NULL); + + svn_err = svn_stream_for_stdout(&ostream, pool); + if (svn_err) + { + svn_handle_error2(svn_err, stdout, FALSE, "diff: "); + return 2; + } + + options_array = apr_array_make(pool, 0, sizeof(const char *)); + + for (i = 1 ; i < argc ; i++) + { + if (!no_more_options && (argv[i][0] == '-')) + { + /* Special case: '--' means "no more options follow" */ + if (argv[i][1] == '-' && !argv[i][2]) + { + no_more_options = TRUE; + continue; + } + /* Special case: we need to detect '-p' and handle it specially */ + if (argv[i][1] == 'p' && !argv[i][2]) + { + show_c_function = TRUE; + continue; + } + APR_ARRAY_PUSH(options_array, const char *) = argv[i]; + } + else + { + if (from == NULL) + from = argv[i]; + else if (to == NULL) + to = argv[i]; + else + { + print_usage(ostream, argv[0], pool); + return 2; + } + } + } + + if (!from || !to) + { + print_usage(ostream, argv[0], pool); + return 2; + } + + diff_options = svn_diff_file_options_create(pool); + + svn_err = svn_diff_file_options_parse(diff_options, options_array, pool); + if (svn_err) + { + svn_handle_error2(svn_err, stdout, FALSE, "diff: "); + return 2; + } + + svn_err = do_diff(ostream, from, to, &has_changes, + diff_options, show_c_function, pool); + if (svn_err) + { + svn_handle_error2(svn_err, stdout, FALSE, "diff: "); + return 2; + } + + return has_changes ? 1 : 0; +} diff --git a/tools/diff/diff3.c b/tools/diff/diff3.c new file mode 100644 index 0000000..63d7dec --- /dev/null +++ b/tools/diff/diff3.c @@ -0,0 +1,99 @@ +/* diff3.c -- test driver for 3-way text merges + * + * ==================================================================== + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + * ==================================================================== + */ + + +#include <apr.h> +#include <apr_general.h> +#include <apr_file_io.h> + +#include "svn_pools.h" +#include "svn_diff.h" +#include "svn_io.h" + + +static svn_error_t * +do_diff3(svn_stream_t *ostream, + const char *original, const char *modified, const char *latest, + svn_boolean_t *has_changes, + apr_pool_t *pool) +{ + svn_diff_t *diff; + + SVN_ERR(svn_diff_file_diff3_2(&diff, original, modified, latest, + svn_diff_file_options_create(pool), pool)); + + *has_changes = svn_diff_contains_diffs(diff); + + SVN_ERR(svn_diff_file_output_merge2(ostream, diff, + original, modified, latest, + NULL, NULL, NULL, NULL, + svn_diff_conflict_display_modified_latest, + pool)); + + return NULL; +} + +int main(int argc, char *argv[]) +{ + apr_pool_t *pool; + svn_stream_t *ostream; + int rc; + svn_error_t *svn_err; + + apr_initialize(); + + pool = svn_pool_create(NULL); + + svn_err = svn_stream_for_stdout(&ostream, pool); + if (svn_err) + { + svn_handle_error2(svn_err, stdout, FALSE, "diff3: "); + rc = 2; + } + else if (argc == 4) + { + svn_boolean_t has_changes; + + svn_err = do_diff3(ostream, argv[2], argv[1], argv[3], + &has_changes, pool); + if (svn_err == NULL) + { + rc = has_changes ? 1 : 0; + } + else + { + svn_handle_error2(svn_err, stdout, FALSE, "diff3: "); + rc = 2; + } + } + else + { + svn_error_clear(svn_stream_printf(ostream, pool, + "Usage: %s <mine> <older> <yours>\n", + argv[0])); + rc = 2; + } + + apr_terminate(); + + return rc; +} diff --git a/tools/diff/diff4.c b/tools/diff/diff4.c new file mode 100644 index 0000000..084184c --- /dev/null +++ b/tools/diff/diff4.c @@ -0,0 +1,93 @@ +/* diff4.c -- test driver for 4-way text merges + * + * ==================================================================== + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + * ==================================================================== + */ + + +#include <apr.h> +#include <apr_general.h> +#include <apr_file_io.h> + +#include "svn_pools.h" +#include "svn_diff.h" +#include "svn_io.h" + + +static svn_error_t * +do_diff4(svn_stream_t *ostream, + const char *original, + const char *modified, + const char *latest, + const char *ancestor, + apr_pool_t *pool) +{ + svn_diff_t *diff; + + SVN_ERR(svn_diff_file_diff4_2(&diff, original, modified, latest, ancestor, + svn_diff_file_options_create(pool), pool)); + SVN_ERR(svn_diff_file_output_merge2(ostream, diff, + original, modified, latest, + NULL, NULL, NULL, NULL, + svn_diff_conflict_display_modified_latest, + pool)); + + return NULL; +} + +int main(int argc, char *argv[]) +{ + apr_pool_t *pool; + svn_stream_t *ostream; + int rc = 0; + svn_error_t *svn_err; + + apr_initialize(); + + pool = svn_pool_create(NULL); + + svn_err = svn_stream_for_stdout(&ostream, pool); + if (svn_err) + { + svn_handle_error2(svn_err, stdout, FALSE, "diff4: "); + rc = 2; + } + else if (argc == 5) + { + svn_err = do_diff4(ostream, + argv[2], argv[1], argv[3], argv[4], + pool); + if (svn_err != NULL) + { + svn_handle_error2(svn_err, stdout, FALSE, "diff4: "); + rc = 2; + } + } + else + { + svn_error_clear(svn_stream_printf + (ostream, pool, "Usage: %s <mine> <older> <yours> <ancestor>\n", + argv[0])); + rc = 2; + } + + apr_terminate(); + + return rc; +} diff --git a/tools/dist/backport.pl b/tools/dist/backport.pl new file mode 100755 index 0000000..5a062ba --- /dev/null +++ b/tools/dist/backport.pl @@ -0,0 +1,192 @@ +#!/usr/bin/perl -l +use warnings; +use strict; + +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +use Term::ReadKey qw/ReadMode ReadKey/; +use File::Temp qw/tempfile/; + +$/ = ""; # paragraph mode + +my $SVN = $ENV{SVN} || 'svn'; # passed unquoted to sh +my $VIM = 'vim'; +my $STATUS = './STATUS'; +my $BRANCHES = '^/subversion/branches'; + +sub usage { + my $basename = $0; + $basename =~ s#.*/##; + print <<EOF; +Run this from the root of your release branch (e.g., 1.6.x) working copy. + +For each entry in STATUS, you will be prompted whether to merge it. + +WARNING: +If you accept the prompt, $basename will revert all local changes and will +commit the merge immediately. + +The 'svn' binary defined by the environment variable \$SVN, or otherwise the +'svn' found in \$PATH, will be used to manage the working copy. +EOF +} + +sub prompt { + local $\; # disable 'perl -l' effects + print "Go ahead? "; + + # TODO: this part was written by trial-and-error + ReadMode 'cbreak'; + my $answer = (ReadKey 0); + print $answer, "\n"; + return ($answer =~ /^y/i) ? 1 : 0; +} + +sub merge { + my %entry = @_; + + my ($logmsg_fh, $logmsg_filename) = tempfile(); + my $mergeargs; + + my $backupfile = "backport_pl.$$.tmp"; + + if ($entry{branch}) { + $mergeargs = "--reintegrate $BRANCHES/$entry{branch}"; + print $logmsg_fh "Reintergrate the $BRANCHES/$entry{branch} branch:"; + print $logmsg_fh ""; + } else { + $mergeargs = join " ", (map { "-c$_" } @{$entry{revisions}}), '^/subversion/trunk'; + if (@{$entry{revisions}} > 1) { + print $logmsg_fh "Merge the r$entry{revisions}->[0] group from trunk:"; + print $logmsg_fh ""; + } else { + print $logmsg_fh "Merge r$entry{revisions}->[0] from trunk:"; + print $logmsg_fh ""; + } + } + print $logmsg_fh $_ for @{$entry{entry}}; + close $logmsg_fh or die "Can't close $logmsg_filename: $!"; + + my $script = <<"EOF"; +#!/bin/sh +set -e +$SVN diff > $backupfile +$SVN revert -R . +$SVN up +$SVN merge $mergeargs +$VIM -e -s -n -N -i NONE -u NONE -c '/^ [*] r$entry{revisions}->[0]/normal! dap' -c wq $STATUS +$SVN commit -F $logmsg_filename +EOF + + $script .= <<"EOF" if $entry{branch}; +reinteg_rev=\`$SVN info $STATUS | sed -ne 's/Last Changed Rev: //p'\` +$SVN rm $BRANCHES/$entry{branch}\ + -m "Remove the '$entry{branch}' branch, reintegrated in r\$reinteg_rev." +EOF + + open SHELL, '|-', qw#/bin/sh -x# or die $!; + print SHELL $script; + close SHELL or warn "$0: sh($?): $!"; + + unlink $backupfile if -z $backupfile; + unlink $logmsg_filename unless $? or $!; +} + +# TODO: may need to parse other headers too? +sub parse_entry { + my @lines = @_; + my (@revisions, @logsummary, $branch, @votes); + # @lines = @_; + + # strip first three spaces + $_[0] =~ s/^ \* / /; + s/^ // for @_; + + # revisions + while ($_[0] =~ /^r/) { + while ($_[0] =~ s/^r(\d+)(?:,\s*)?//) { + push @revisions, $1; + } + shift; + } + + # summary + push @logsummary, shift until $_[0] =~ /^\w+:/; + + # votes + unshift @votes, pop until $_[-1] =~ /^Votes:/; + pop; + + # branch + while (@_) { + shift and next unless $_[0] =~ s/^Branch:\s*//; + $branch = (shift || shift || die "Branch header found without value"); + $branch =~ s#.*/##; + $branch =~ s/^\s*//; + $branch =~ s/\s*$//; + } + + return ( + revisions => [@revisions], + logsummary => [@logsummary], + branch => $branch, + votes => [@votes], + entry => [@lines], + ); +} + +sub handle_entry { + my %entry = parse_entry @_; + + print ""; + print "\n>>> The r$entry{revisions}->[0] group:"; + print join ", ", map { "r$_" } @{$entry{revisions}}; + print "$BRANCHES/$entry{branch}" if $entry{branch}; + print ""; + print for @{$entry{logsummary}}; + print ""; + print for @{$entry{votes}}; + print ""; + print "Vetoes found!" if grep { /^ -1:/ } @{$entry{votes}}; + + # TODO: this changes ./STATUS, which we're reading below, but + # on my system the loop in main() doesn't seem to care. + merge %entry if prompt; + + 1; +} + +sub main { + usage, exit 0 if @ARGV; + usage, exit 1 unless -r $STATUS; + + @ARGV = $STATUS; + while (<>) { + my @lines = split /\n/; + + # Section header? + print "\n\n=== $lines[0]" and next if $lines[0] =~ /^[A-Z].*:$/i; + + # Backport entry? + handle_entry @lines and next if $lines[0] =~ /^ \*/; + + warn "Unknown entry '$lines[0]' at $ARGV:$.\n"; + } +} + +&main diff --git a/tools/dist/checksums.py b/tools/dist/checksums.py new file mode 100755 index 0000000..bd6d18a --- /dev/null +++ b/tools/dist/checksums.py @@ -0,0 +1,108 @@ +#!/usr/bin/env python +# +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +# +# Check MD5 and SHA1 signatures of files, using md5sums and/or +# sha1sums as manifests. Replaces the 'md5sum' and 'sha1sum' commands +# on systems that do not have them, such as Mac OS X or Windows. +# +# Usage: checksums.py [manifest] +# where "os.path.basename(manifest)" is either "md5sums" or "sha1sums" +# +# Tested with the following Python versions: +# 2.4 2.5 2.6 2.7 3.2 + + +import os +import shutil +import sys + +try: + from hashlib import md5 + from hashlib import sha1 +except ImportError: + from md5 import md5 + from sha import sha as sha1 + + +class Digester(object): + BUFFER_SIZE = 1024*1024 + + def __init__(self, factory): + self.factory = factory + self.digest_size = factory().digest_size + self.hashfunc = None + + def reset(self): + self.hashfunc = self.factory() + + def write(self, data): + return self.hashfunc.update(data) + + def hexdigest(self): + return self.hashfunc.hexdigest() + + +def main(manipath): + basedir, manifest = os.path.split(manipath) + + if manifest == 'md5sums': + sink = Digester(md5) + elif manifest == 'sha1sums': + sink = Digester(sha1) + else: + raise ValueError('The name of the digest manifest must be ' + "'md5sums' or 'sha1sums', not '%s'" % manifest) + + # No 'with' statement in Python 2.4 ... + stream = None + try: + stream = open(manipath, 'r') + for line in stream: + sink.reset() + parse_digest(basedir, line.rstrip(), sink) + finally: + if stream is not None: + stream.close() + + +def parse_digest(basedir, entry, sink): + length = 2 * sink.digest_size + expected = entry[:length].lower() + filename = entry[length + 2:] + + # Still no 'with' statement in Python 2.4 ... + source = None + try: + source = open(os.path.join(basedir, filename), 'rb') + shutil.copyfileobj(source, sink, sink.BUFFER_SIZE) + actual = sink.hexdigest().lower() + finally: + if source is not None: + source.close() + + if expected != actual: + raise ValueError('Mismatch: expected %s, actual %s: %s' + % (expected, actual, filename)) + print('ok: %s %s' % (actual, filename)) + + +if __name__ == '__main__': + main(sys.argv[1]) diff --git a/tools/dist/collect_sigs.py b/tools/dist/collect_sigs.py new file mode 100755 index 0000000..d7204d6 --- /dev/null +++ b/tools/dist/collect_sigs.py @@ -0,0 +1,346 @@ +#!/usr/bin/env python +# +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +# +# A script intended to be useful in helping to collect signatures for a +# release. This is a pretty rough, and patches are welcome to improve it. +# +# Some thoughts about future improvement: +# * Display of per-file and per-release statistics +# * Make use of the python-gpg package (http://code.google.com/p/python-gnupg/) +# * Post to IRC when a new signature is collected +# - Since we don't want to have a long running bot, perhaps we could +# also patch wayita to accept and then echo a privmsg? +# * Mail dev@ when somebody submits a successful signature, and include a +# comments field which could be included in the mail. +# * Use a subversion repository instead of sqlite backend +# - no need to re-invent storage and retrieval +# - perhaps we could re-use existing CIA/mailer hooks? +# + +import sys, os +import sqlite3 + +def make_config(): + 'Output a blank config file' + + if os.path.exists('config.py'): + print "'config.py' already exists!'" + sys.exit(1) + + conf = open('config.py', 'w') + conf.write("version = ''\n") + conf.write("sigdir = ''\n") + conf.write("filesdir = ''\n") + conf.close() + + print "'config.py' generated" + +def make_db(): + 'Initialize a blank database' + + db = sqlite3.connect('sigs.db') + db.execute(''' + CREATE TABLE signatures ( + keyid TEXT, filename TEXT, signature BLOB, + UNIQUE(keyid,filename) + ); +'''); + +# This function is web-facing +def generate_asc_files(target_dir='.'): + fds = {} + def _open(filename): + if not fds.has_key(filename): + fd = open(os.path.join(target_dir, filename + '.asc'), 'w') + fds[filename] = fd + return fds[filename] + + db = sqlite3.connect(os.path.join(target_dir, 'sigs.db')) + curs = db.cursor() + curs.execute('SELECT filename, signature FROM signatures;') + for filename, signature in curs: + fd = _open(filename) + fd.write(signature + "\n") + + for fd in fds.values(): + fd.flush() + fd.close() + +actions = { + 'make_config' : make_config, + 'make_db' : make_db, + 'make_asc' : generate_asc_files, +} + + +if __name__ == '__main__': + if len(sys.argv) > 1: + if sys.argv[1] in actions: + actions[sys.argv[1]]() + sys.exit(0) + + +# Stuff below this line is the web-facing side +# ====================================================================== + + +import cgi +import cgitb +cgitb.enable() + +import string, subprocess, re + +try: + sys.path.append(os.path.dirname(sys.argv[0])) + import config +except: + print 'Content-type: text/plain' + print + print 'Cannot find config file' + sys.exit(1) + +r = re.compile('^\[GNUPG\:\] GOODSIG (\w*) (.*)') + +def files(): + for f in os.listdir(config.filesdir): + if config.version in f and (f.endswith('.tar.gz') or f.endswith('.zip') or f.endswith('.tar.bz2')): + yield f + +def ordinal(N): + try: + return [None, 'first', 'second', 'third', 'fourth', 'fifth', 'sixth'][N] + except: + # Huh? We only have six files to sign. + return "%dth" % N + +shell_content = ''' +<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" +"http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd"> +<html> +<head> +<title>Signature collection for Subversion $version</title> +</head> +<body style="font-size: 14pt; text-align: justify; + background-color: #f0f0f0; padding: 0 5%%"> +<p>This page is used to collect <a href="%s/list">signatures</a> for the +proposed release of Apache Subversion $version.</p> +$content +</body> +</html> +''' % os.getenv('SCRIPT_NAME') + +signature_area = ''' +<hr/> +<form method="post" action="%s"> +<p>Paste one or more signatures in the area below:<br/> +<textarea name="signatures" rows="20" cols="80"></textarea> +</p> +<input type="submit" value="Submit" /> +<p>Any text not between the <tt>BEGIN PGP SIGNATURE</tt> +and <tt>END PGP SIGNATURE</tt> lines will be ignored.</p> +</form> +<hr/> +''' % os.getenv('SCRIPT_NAME') + + + +def split(sigs): + lines = [] + for line in sigs.split('\n'): + if lines or '--BEGIN' in line: + lines.append(line) + if '--END' in line: + yield "\n".join(lines) + "\n" + lines = [] + +def list_signatures(): + db = sqlite3.connect(os.path.join(config.sigdir, 'sigs.db')) + template = ''' +<hr/> +<p>The following signature files are available:</p> +<p>%s</p> +''' + + lines = "" + curs = db.cursor() + curs.execute('''SELECT filename, COUNT(*) FROM signatures + GROUP BY filename ORDER BY filename''') + for filename, count in curs: + lines += '<a href="%s/%s.asc">%s.asc</a>: %d signature%s<br/>\n' \ + % (os.getenv('SCRIPT_NAME'), filename, filename, + count, ['s', ''][count == 1]) + return (template % lines) + signature_area + +def save_valid_sig(db, filename, keyid, signature): + db.execute('INSERT OR REPLACE INTO signatures VALUES (?,?,?);', + (keyid, filename, buffer(signature))) + db.commit() + + generate_asc_files(config.sigdir) + +def verify_sig_for_file(signature, filename): + args = ['gpg', '--logger-fd', '1', '--no-tty', + '--status-fd', '2', '--verify', '-', + os.path.join(config.filesdir, filename)] + + gpg = subprocess.Popen(args, + stdin=subprocess.PIPE, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE) + + gpg.stdin.write(signature) + gpg.stdin.close() + + rc = gpg.wait() + output = gpg.stdout.read() + status = gpg.stderr.read() + + if rc: + return (False, status + output) + + lines = status.split('\n') + for line in lines: + match = r.search(line) + if match: + keyid = match.group(1) + user = match.group(2) + + return (True, (filename, keyid, user)) + +def verify_sig(signature): + all_failures = "" + for filename in files(): + (verified, result) = verify_sig_for_file(signature, filename) + if verified: + return (verified, result) + else: + all_failures += "%s:\n[[[\n%s]]]\n\n" % (filename, result) + return (False, all_failures) + +def process_sigs(signatures): + success = ''' + <p style="color: green;">All %d signatures verified!</p> +''' + failure = ''' + <p style="color: red;">%d of %d signatures failed to verify; details below.</p> +''' + c_verified = ''' + <p style="color: green;">The signature is verified!</p> + <p>Filename: <code>%s</code></p> + <p>Key ID: <code>%s</code></p> + <p>User: <code>%s</code></p> + <p>This signature has been saved, and will be included as part of the + release signatures.</p> +''' + c_unverified = ''' + <p style="color: red;">The signature was not able to be verified!</p> + <p>Signature: <pre>%s</pre></p> + <p>Reason:</p><pre>%s</pre> + <p>Please talk to the release manager if this is in error.</p> +''' + + outcomes = [] + N_sigs = 0 + N_verified = 0 + retval = '' + + # Verify + db = sqlite3.connect(os.path.join(config.sigdir, 'sigs.db')) + for signature in split(signatures): + N_sigs += 1 + (verified, result) = verify_sig(signature) + outcomes.append((verified, result)) + + if verified: + (filename, keyid, user) = result + save_valid_sig(db, filename, keyid, signature) + N_verified += 1 + + # Output header + if N_verified == N_sigs: + retval += success % N_sigs + else: + retval += failure % (N_sigs-N_verified, N_sigs) + + # Output details + N = 0 + for outcome in outcomes: + N += 1 + (verified, result) = outcome + retval += "<h1>Results for the %s signature</h1>" % ordinal(N) + if verified: + (filename, keyid, user) = result + retval += c_verified % (filename, keyid[-8:], user) + else: + retval += c_unverified % (signature, result) + + return retval + signature_area + + +def cat_signatures(basename): + # strip '.asc' extension + assert basename[:-4] in files() + + # cat + ascfile = os.path.join(config.sigdir, basename) + if os.path.exists(ascfile): + return (open(ascfile, 'r').read()) + +def print_content_type(mimetype): + print "Content-Type: " + mimetype + print + +def main(): + form = cgi.FieldStorage() + pathinfo = os.getenv('PATH_INFO') + + # default value, to be changed below + content = signature_area + + if 'signatures' in form: + content = process_sigs(form['signatures'].value) + + elif pathinfo and pathinfo[1:]: + basename = pathinfo.split('/')[-1] + + if basename == 'list': + content = list_signatures() + + elif basename[:-4] in files(): + # early exit; bypass 'content' entirely + print_content_type('text/plain') + print cat_signatures(basename) + return + + # These are "global" values, not specific to our action. + mapping = { + 'version' : config.version, + 'content' : content, + } + + print_content_type('text/html') + + template = string.Template(shell_content) + print template.safe_substitute(mapping) + + +if __name__ == '__main__': + main() diff --git a/tools/dist/dist.sh b/tools/dist/dist.sh new file mode 100755 index 0000000..1770a85 --- /dev/null +++ b/tools/dist/dist.sh @@ -0,0 +1,394 @@ +#!/bin/sh +# +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +# + +# USAGE: ./dist.sh -v VERSION -r REVISION -pr REPOS-PATH +# [-alpha ALPHA_NUM|-beta BETA_NUM|-rc RC_NUM|pre PRE_NUM] +# [-apr PATH-TO-APR ] [-apru PATH-TO-APR-UTIL] +# [-apri PATH-TO-APR-ICONV] [-neon PATH-TO-NEON] +# [-serf PATH-TO-SERF] [-zlib PATH-TO-ZLIB] +# [-sqlite PATH-TO-SQLITE] [-zip] [-sign] +# +# Create a distribution tarball, labelling it with the given VERSION. +# The tarball will be constructed from the root located at REPOS-PATH, +# in REVISION. For example, the command line: +# +# ./dist.sh -v 1.4.0 -r ????? -pr branches/1.4.x +# +# will create a 1.4.0 release tarball. Make sure you have apr, +# apr-util, neon, serf, zlib and sqlite subdirectories in your current +# working directory or specify the path to them with the -apr, -apru, +# -neon or -zlib options. For example: +# ./dist.sh -v 1.4.0 -r ????? -pr branches/1.4.x \ +# -apr ~/in-tree-libraries/apr-0.9.12 \ +# -apru ~/in-tree-libraries/apr-util-0.9.12 \ +# -neon ~/in-tree-libraries/neon-0.25.5 \ +# -zlib ~/in-tree-libraries/zlib-1.2.3 +# +# Note that there is _no_ need to run dist.sh from a Subversion +# working copy, so you may wish to create a dist-resources directory +# containing the apr/, apr-util/, neon/, serf/, zlib/ and sqlite/ +# dependencies, and run dist.sh from that. +# +# When building alpha, beta or rc tarballs pass the appropriate flag +# followed by a number. For example "-alpha 5", "-beta 3", "-rc 2". +# +# If neither an -alpha, -beta, -pre or -rc option is specified, a release +# tarball will be built. +# +# To build a Windows zip file package, additionally pass -zip and the +# path to apr-iconv with -apri. + + +USAGE="USAGE: ./dist.sh -v VERSION -r REVISION -pr REPOS-PATH \ +[-alpha ALPHA_NUM|-beta BETA_NUM|-rc RC_NUM|-pre PRE_NUM] \ +[-apr APR_PATH ] [-apru APR_UTIL_PATH] [-apri APR_ICONV_PATH] \ +[-neon NEON_PATH ] [-serf SERF_PATH] [-zlib ZLIB_PATH] \ +[-sqlite SQLITE_PATH] [-zip] [-sign] + EXAMPLES: ./dist.sh -v 0.36.0 -r 8278 -pr branches/foo + ./dist.sh -v 0.36.0 -r 8278 -pr trunk + ./dist.sh -v 0.36.0 -r 8282 -rs 8278 -pr tags/0.36.0 + ./dist.sh -v 0.36.0 -r 8282 -rs 8278 -pr tags/0.36.0 -alpha 1 + ./dist.sh -v 0.36.0 -r 8282 -rs 8278 -pr tags/0.36.0 -beta 1 + ./dist.sh -v 0.36.0 -r 8282 -rs 8278 -pr tags/0.36.0 -pre 1 + ./dist.sh -v 0.36.0 -r 8282 -rs 8278 -pr tags/0.36.0 -nightly r8282" + +# Let's check and set all the arguments +ARG_PREV="" + +for ARG in $@ +do + if [ -n "$ARG_PREV" ]; then + case $ARG_PREV in + -v) VERSION="$ARG" ;; + -r) REVISION="$ARG" ;; + -pr) REPOS_PATH="$ARG" ;; + -alpha) ALPHA="$ARG" ;; + -beta) BETA="$ARG" ;; + -pre) PRE="$ARG" ;; + -nightly) NIGHTLY="$ARG" ;; + -rc) RC="$ARG" ;; + -apr) APR_PATH="$ARG" ;; + -apru) APRU_PATH="$ARG" ;; + -apri) APRI_PATH="$ARG" ;; + -zlib) ZLIB_PATH="$ARG" ;; + -sqlite) SQLITE_PATH="$ARG" ;; + -neon) NEON_PATH="$ARG" ;; + -serf) SERF_PATH="$ARG" ;; + esac + ARG_PREV="" + else + case $ARG in + -v|-r|-rs|-pr|-alpha|-beta|-pre|-rc|-apr|-apru|-apri|-zlib|-sqlite|-neon|-serf|-nightly) + ARG_PREV=$ARG + ;; + -zip) ZIP=1 ;; + -sign) SIGN=1 ;; + *) + echo " $USAGE" + exit 1 + ;; + esac + fi +done + +if [ -n "$ALPHA" ] && [ -n "$BETA" ] && [ -n "$NIGHTLY" ] && [ -n "$PRE" ] || + [ -n "$ALPHA" ] && [ -n "$RC" ] && [ -n "$NIGHTLY" ] && [ -n "$PRE" ] || + [ -n "$BETA" ] && [ -n "$RC" ] && [ -n "$NIGHTLY" ] && [ -n "$PRE" ] || + [ -n "$ALPHA" ] && [ -n "$BETA" ] && [ -n "$RC" ] && [ -n "$PRE" ] || + [ -n "$ALPHA" ] && [ -n "$BETA" ] && [ -n "$RC" ] && [ -n "$PRE" ]; then + echo " $USAGE" + exit 1 +elif [ -n "$ALPHA" ] ; then + VER_TAG="Alpha $ALPHA" + VER_NUMTAG="-alpha$ALPHA" +elif [ -n "$BETA" ] ; then + VER_TAG="Beta $BETA" + VER_NUMTAG="-beta$BETA" +elif [ -n "$RC" ] ; then + VER_TAG="Release Candidate $RC" + VER_NUMTAG="-rc$RC" +elif [ -n "$NIGHTLY" ] ; then + VER_TAG="Nightly Build ($NIGHTLY)" + VER_NUMTAG="-nightly-$NIGHTLY" +elif [ -n "$PRE" ] ; then + VER_TAG="Pre-release $PRE" + VER_NUMTAG="-pre$PRE" +else + VER_TAG="r$REVISION" + VER_NUMTAG="" +fi + +if [ -n "$ZIP" ] ; then + EXTRA_EXPORT_OPTIONS="--native-eol CRLF" +fi + +if [ -z "$VERSION" ] || [ -z "$REVISION" ] || [ -z "$REPOS_PATH" ]; then + echo " $USAGE" + exit 1 +fi + +if [ -z "$APR_PATH" ]; then + APR_PATH='apr' +fi + +if [ -z "$APRU_PATH" ]; then + APRU_PATH='apr-util' +fi + +if [ -z "$NEON_PATH" ]; then + NEON_PATH='neon' +fi + +if [ -z "$SERF_PATH" ]; then + SERF_PATH='serf' +fi + +if [ -z "$APRI_PATH" ]; then + APRI_PATH='apr-iconv' +fi + +if [ -z "$ZLIB_PATH" ]; then + ZLIB_PATH='zlib' +fi + +if [ -z "$SQLITE_PATH" ]; then + SQLITE_PATH='sqlite-amalgamation' +fi + +REPOS_PATH="`echo $REPOS_PATH | sed 's/^\/*//'`" + +# See comment when we 'roll' the tarballs as to why pax is required. +type pax > /dev/null 2>&1 +if [ $? -ne 0 ] && [ -z "$ZIP" ]; then + echo "ERROR: pax could not be found" + exit 1 +fi + +# Default to 'wget', but allow 'curl' to be used if available. +HTTP_FETCH=wget +HTTP_FETCH_OUTPUT="-O" +type wget > /dev/null 2>&1 +if [ $? -ne 0 ]; then + type curl > /dev/null 2>&1 + if [ $? -ne 0 ]; then + echo "Neither curl or wget found." + exit 2 + fi + HTTP_FETCH=curl + HTTP_FETCH_OUTPUT="-o" +fi + +DISTNAME="subversion-${VERSION}${VER_NUMTAG}" +DIST_SANDBOX=.dist_sandbox +DISTPATH="$DIST_SANDBOX/$DISTNAME" + +echo "Distribution will be named: $DISTNAME" +echo " constructed from path: /$REPOS_PATH" +echo " constructed from revision: $REVISION" + +rm -rf "$DIST_SANDBOX" +mkdir "$DIST_SANDBOX" +echo "Removed and recreated $DIST_SANDBOX" + +LC_ALL=C +LANG=C +TZ=UTC +export LC_ALL +export LANG +export TZ + +echo "Exporting $REPOS_PATH r$REVISION into sandbox..." +(cd "$DIST_SANDBOX" && \ + ${SVN:-svn} export -q $EXTRA_EXPORT_OPTIONS -r "$REVISION" \ + "http://svn.apache.org/repos/asf/subversion/$REPOS_PATH" \ + "$DISTNAME" --username none --password none) + +rm -f "$DISTPATH/STATUS" + +# Remove contrib/ from our distribution tarball. Some of it is of +# unknown license, and usefulness. +# (See http://svn.haxx.se/dev/archive-2009-04/0166.shtml for discussion.) +rm -rf "$DISTPATH/contrib" + +# Remove notes/ from our distribution tarball. It's large, but largely +# blue-sky and out-of-date, and of questionable use to end users. +rm -rf "$DISTPATH/notes" + +# Remove packages/ from the tarball. +# (See http://svn.haxx.se/dev/archive-2009-12/0205.shtml) +rm -rf "$DISTPATH/packages" + +# Check for a recent enough Python +# Instead of attempting to deal with various line ending issues, just export +# the find_python script manually. +${svn:-svn} export -q -r "$REVISION" \ + "http://svn.apache.org/repos/asf/subversion/$REPOS_PATH/build/find_python.sh" \ + --username none --password none "$DIST_SANDBOX/find_python.sh" +PYTHON="`$DIST_SANDBOX/find_python.sh`" +if test -z "$PYTHON"; then + echo "Python 2.4 or later is required to run dist.sh" + echo "If you have a suitable Python installed, but not on the" + echo "PATH, set the environment variable PYTHON to the full path" + echo "to the Python executable, and re-run dist.sh" + exit 1 +fi + +find "$DISTPATH" -name config.nice -print | xargs rm -f + +# Massage the new version number into svn_version.h. We need to do +# this before running autogen.sh --release on the subversion code, +# because otherwise svn_version.h's mtime makes SWIG files regenerate +# on end-user's systems, when they should just be compiled by the +# Release Manager and left at that. + +ver_major=`echo $VERSION | cut -d '.' -f 1` +ver_minor=`echo $VERSION | cut -d '.' -f 2` +ver_patch=`echo $VERSION | cut -d '.' -f 3` + +vsn_file="$DISTPATH/subversion/include/svn_version.h" + +if [ "$VERSION" != "trunk" ]; then + sed \ + -e "/#define *SVN_VER_MAJOR/s/[0-9]\+/$ver_major/" \ + -e "/#define *SVN_VER_MINOR/s/[0-9]\+/$ver_minor/" \ + -e "/#define *SVN_VER_PATCH/s/[0-9]\+/$ver_patch/" \ + -e "/#define *SVN_VER_TAG/s/\".*\"/\" ($VER_TAG)\"/" \ + -e "/#define *SVN_VER_NUMTAG/s/\".*\"/\"$VER_NUMTAG\"/" \ + -e "/#define *SVN_VER_REVISION/s/[0-9]\+/$REVISION/" \ + < "$vsn_file" > "$vsn_file.tmp" +else + # Don't munge the version number if we are creating a nightly trunk tarball + sed \ + -e "/#define *SVN_VER_TAG/s/\".*\"/\" ($VER_TAG)\"/" \ + -e "/#define *SVN_VER_NUMTAG/s/\".*\"/\"$VER_NUMTAG\"/" \ + -e "/#define *SVN_VER_REVISION/s/[0-9]\+/$REVISION/" \ + < "$vsn_file" > "$vsn_file.tmp" +fi + +mv -f "$vsn_file.tmp" "$vsn_file" + +echo "Creating svn_version.h.dist, for use in tagging matching tarball..." +cp "$vsn_file" "svn_version.h.dist" + +# Don't run autogen.sh when we are building the Windows zip file. +# Windows users don't need the files generated by this command, +# especially not the generated projects or SWIG files. +if [ -z "$ZIP" ] ; then + echo "Running ./autogen.sh in sandbox, to create ./configure ..." + (cd "$DISTPATH" && ./autogen.sh --release) || exit 1 +fi + +# Pre-translate the various sql-derived header files +echo "Generating SQL-derived headers..." +for f in `find "$DISTPATH/subversion" -name '*.sql'`; do + $PYTHON $DISTPATH/build/transform_sql.py $f `echo $f | sed 's/\.[^\.]*$//'`.h +done + +echo "Removing any autom4te.cache directories that might exist..." +find "$DISTPATH" -depth -type d -name 'autom4te*.cache' -exec rm -rf {} \; + +if [ -z "$ZIP" ]; then + # Do not use tar, it's probably GNU tar which produces tar files that are + # not compliant with POSIX.1 when including filenames longer than 100 chars. + # Platforms without a tar that understands the GNU tar extension will not + # be able to extract the resulting tar file. Use pax to produce POSIX.1 + # tar files. + echo "Rolling $DISTNAME.tar ..." + (cd "$DIST_SANDBOX" > /dev/null && pax -x ustar -w "$DISTNAME") > \ + "$DISTNAME.tar" + + echo "Compressing to $DISTNAME.tar.bz2 ..." + bzip2 -9fk "$DISTNAME.tar" + + # Use the gzip -n flag - this prevents it from storing the original name of + # the .tar file, and far more importantly, the mtime of the .tar file, in the + # produced .tar.gz file. This is important, because it makes the gzip + # encoding reproducable by anyone else who has an similar version of gzip, + # and also uses "gzip -9n". This means that committers who want to GPG-sign + # both the .tar.gz and the .tar.bz2 can download the .tar.bz2 (which is + # smaller), and locally generate an exact duplicate of the official .tar.gz + # file. This metadata is data on the temporary uncompressed tarball itself, + # not any of its contents, so there will be no effect on end-users. + echo "Compressing to $DISTNAME.tar.gz ..." + gzip -9nf "$DISTNAME.tar" +else + echo "Rolling $DISTNAME.zip ..." + (cd "$DIST_SANDBOX" > /dev/null && zip -q -r - "$DISTNAME") > \ + "$DISTNAME.zip" +fi +echo "Removing sandbox..." +rm -rf "$DIST_SANDBOX" + +sign_file() +{ + if [ -n "$SIGN" ]; then + type gpg > /dev/null 2>&1 + if [ $? -eq 0 ]; then + if test -n "$user"; then + args="--default-key $user" + fi + for ARG in $@ + do + gpg --armor $args --detach-sign $ARG + done + else + type pgp > /dev/null 2>&1 + if [ $? -eq 0 ]; then + if test -n "$user"; then + args="-u $user" + fi + for ARG in $@ + do + pgp -sba $ARG $args + done + fi + fi + fi +} + +echo "" +echo "Done:" +if [ -z "$ZIP" ]; then + ls -l "$DISTNAME.tar.bz2" "$DISTNAME.tar.gz" + sign_file $DISTNAME.tar.gz $DISTNAME.tar.bz2 + echo "" + echo "md5sums:" + md5sum "$DISTNAME.tar.bz2" "$DISTNAME.tar.gz" + type sha1sum > /dev/null 2>&1 + if [ $? -eq 0 ]; then + echo "" + echo "sha1sums:" + sha1sum "$DISTNAME.tar.bz2" "$DISTNAME.tar.gz" + fi +else + ls -l "$DISTNAME.zip" + sign_file $DISTNAME.zip + echo "" + echo "md5sum:" + md5sum "$DISTNAME.zip" + type sha1sum > /dev/null 2>&1 + if [ $? -eq 0 ]; then + echo "" + echo "sha1sum:" + sha1sum "$DISTNAME.zip" + fi +fi diff --git a/tools/dist/download-release.sh b/tools/dist/download-release.sh new file mode 100755 index 0000000..9b0737d --- /dev/null +++ b/tools/dist/download-release.sh @@ -0,0 +1,28 @@ +#!/bin/bash +# +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +# +BASEURL=$1 +VERSION=$2 +wget -nc $BASEURL/{{md5,sha1}sums,svn_version.h.dist,subversion{-deps,}-$VERSION.{{zip,tar.bz2}{.asc,},tar.gz.asc}} +bzip2 -dk subversion{-deps,}-$VERSION.tar.bz2 +gzip -9n subversion{-deps,}-$VERSION.tar +md5sum -c md5sums +sha1sum -c sha1sums diff --git a/tools/dist/extract-for-examination.sh b/tools/dist/extract-for-examination.sh new file mode 100755 index 0000000..1dfe7df --- /dev/null +++ b/tools/dist/extract-for-examination.sh @@ -0,0 +1,37 @@ +#!/bin/sh +# +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +# + +for i in *.tar.bz2; do + base=${i%.tar.bz2}-tar-bz2 + mkdir $base + cd $base + tar -jxvf ../$i + cd .. +done + +for i in *.zip; do + base=${i%.zip}-zip + mkdir $base + cd $base + unzip ../$i + cd .. +done diff --git a/tools/dist/getsigs.py b/tools/dist/getsigs.py new file mode 100755 index 0000000..17086e1 --- /dev/null +++ b/tools/dist/getsigs.py @@ -0,0 +1,101 @@ +#!/usr/bin/env python +# +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +# + +# Less terrible, ugly hack of a script than getsigs.pl, but similar. Used to +# verify the signatures on the release tarballs and produce the list of who +# signed them in the format we use for the announcements. +# +# To use just run it in the directory with the signatures and tarballs and +# pass the version of subversion you want to check. It assumes gpg is on +# your path, if it isn't you should fix that. :D +# +# Script will die if any gpg process returns an error. +# +# Because I hate perl... + +import glob, subprocess, shutil, sys, re + +key_start = '-----BEGIN PGP SIGNATURE-----\n' +sig_pattern = re.compile(r'^gpg: Signature made .*? using \w+ key ID (\w+)') +fp_pattern = re.compile(r'^pub\s+(\w+\/\w+)[^\n]*\n\s+Key\sfingerprint\s=((\s+[0-9A-F]{4}){10})\nuid\s+([^<\(]+)\s') + + +def grab_sig_ids(): + good_sigs = {} + + for filename in glob.glob('subversion-*.asc'): + shutil.copyfile(filename, '%s.bak' % filename) + text = open(filename).read() + keys = text.split(key_start) + + for key in keys[1:]: + open(filename, 'w').write(key_start + key) + gpg = subprocess.Popen(['gpg', '--logger-fd', '1', + '--verify', filename], + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT) + + rc = gpg.wait() + output = gpg.stdout.read() + if rc: + # gpg choked, die with an error + print(output) + sys.stderr.write("BAD SIGNATURE in %s\n" % filename) + shutil.move('%s.bak' % filename, filename) + sys.exit(1) + + for line in output.split('\n'): + match = sig_pattern.match(line) + if match: + key_id = match.groups()[0] + good_sigs[key_id] = True + + shutil.move('%s.bak' % filename, filename) + + return good_sigs + + +def generate_output(good_sigs): + for id in good_sigs.keys(): + gpg = subprocess.Popen(['gpg', '--fingerprint', id], + stdout=subprocess.PIPE, stderr=subprocess.STDOUT) + rc = gpg.wait() + gpg_output = gpg.stdout.read() + if rc: + print(gpg_output) + sys.stderr.write("UNABLE TO GET FINGERPRINT FOR %s" % id) + sys.exit(1) + + gpg_output = "\n".join([ l for l in gpg_output.splitlines() + if l[0:7] != 'Warning' ]) + + fp = fp_pattern.match(gpg_output).groups() + print(" %s [%s] with fingerprint:" % (fp[3], fp[0])) + print(" %s" % fp[1]) + + +if __name__ == '__main__': + if len(sys.argv) < 2: + print("Give me a version number!") + sys.exit(1) + + generate_output(grab_sig_ids()) diff --git a/tools/dist/nightly.sh b/tools/dist/nightly.sh new file mode 100755 index 0000000..b20b641 --- /dev/null +++ b/tools/dist/nightly.sh @@ -0,0 +1,98 @@ +#!/bin/sh +# +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +# +set -e + +repo=http://svn.apache.org/repos/asf/subversion +svn=svn +olds=7 + +# Parse our arguments +while getopts "cd:t:s:o:" flag; do + case $flag in + d) dir="`cd $OPTARG && pwd`" ;; # abspath + c) clean="1" ;; + t) target="$OPTARG" ;; + s) svn="$OPTARG" ;; + o) olds="$OPTARG" ;; + esac +done + +# Setup directories +if [ -n "$dir" ]; then cd $dir; else dir="."; fi +if [ -d "roll" ]; then rm -rf roll; fi +mkdir roll +if [ ! -n "$target" ]; then + if [ ! -d "target" ]; then mkdir target; fi + target="target" +fi + +abscwd=`cd $dir; pwd` + +echo "Will place results in: $target" + +# get youngest +head=`$svn info $repo/trunk | grep '^Revision' | cut -d ' ' -f 2` + +# Get the latest versions of the rolling scripts +for i in release.py dist.sh +do + $svn export -r $head $repo/trunk/tools/dist/$i@$head $dir/$i +done +# We also need ezt +$svn export -r $head $repo/trunk/build/generator/ezt.py@$head $dir/ezt.py + +# Create the environment +cd roll +echo '----------------building environment------------------' +../release.py --base-dir ${abscwd}/roll build-env + +# Roll the tarballs +echo '-------------------rolling tarball--------------------' +../release.py --base-dir ${abscwd}/roll roll --branch trunk trunk-nightly $head +cd .. + +# Create the information page +echo '-------------------moving results---------------------' +./release.py --base-dir ${abscwd}/roll post-candidates trunk-nightly $head \ + --target $target +if [ ! -d "$target/dist" ]; then mkdir "$target/dist"; fi +if [ -d "$target/dist/r$head" ]; then rm -r "$target/dist/r$head"; fi +mv $target/deploy $target/dist/r$head + +# Some static links for the most recent artifacts. +ln -sf "r$head" "$target/dist/current" +ls "$target/dist/r$head" | while read fname; do + ln -sf "r$head/$fname" "$target/dist/$fname" +done + +# Clean up old results +ls -t1 "$target/dist/" | sed -e "1,${olds}d" | while read d; do + rm -rf "$target/dist/$d" +done + +# Optionally remove our working directory +if [ -n "$clean" ]; then + echo '--------------------cleaning up-----------------------' + rm -rf roll +fi + +echo '------------------------done--------------------------' diff --git a/tools/dist/rat-excludes b/tools/dist/rat-excludes new file mode 100644 index 0000000..1f817c7 --- /dev/null +++ b/tools/dist/rat-excludes @@ -0,0 +1,45 @@ +contrib/ +www/ +BUGS/ +CHANGES/ +COMMITTERS/ +HACKING/ +TRANSLATING/ +build/win32/empty.c +build/config.guess +build/config.sub +build/generator/__init__.py +build/generator/util/__init__.py +build/install-sh +doc/doxygen.conf +notes/** +packages/ +subversion/tests/cmdline/getopt_tests_data/* +subversion/bindings/swig/NOTES +subversion/libsvn_fs_base/notes/TODO +subversion/libsvn_fs_base/notes/fs-history +subversion/libsvn_fs_base/notes/structure +subversion/libsvn_fs_fs/structure +subversion/libsvn_ra_svn/protocol +subversion/bindings/javahl/doc/index.html +subversion/bindings/swig/python/tests/trac/__init__.py +subversion/bindings/swig/python/tests/trac/versioncontrol/__init__.py +subversion/bindings/ctypes-python/TODO +subversion/bindings/ctypes-python/test/test.dumpfile +subversion/bindings/ctypes-python/csvn/__init__.py +subversion/bindings/ctypes-python/csvn/ext/__init__.py +subversion/tests/cmdline/svntest/err.py +tools/buildbot/master/public_html/buildbot.css +tools/dist/rat-excludes +tools/dev/iz/defect.dem +tools/dev/iz/ff2csv.command +tools/hook-scripts/mailer/tests/mailer-t1.output +**/*.dump +**/*.icns +**/*.odp +**/*.pal +**/*.patch +**/*.txt +**/*.svg +**/*.rtf +**/*.example diff --git a/tools/dist/release.py b/tools/dist/release.py new file mode 100755 index 0000000..7a2acb4 --- /dev/null +++ b/tools/dist/release.py @@ -0,0 +1,649 @@ +#!/usr/bin/env python +# +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# + + +# About this script: +# This script is intended to simplify creating Subversion releases, by +# automating as much as is possible. It works well with our Apache +# infrastructure, and should make rolling, posting, and announcing +# releases dirt simple. +# +# This script may be run on a number of platforms, but it is intended to +# be run on people.apache.org. As such, it may have dependencies (such +# as Python version) which may not be common, but are guaranteed to be +# available on people.apache.org. + +# It'd be kind of nice to use the Subversion python bindings in this script, +# but people.apache.org doesn't currently have them installed + +# Stuff we need +import os +import re +import sys +import glob +import shutil +import urllib2 +import hashlib +import tarfile +import logging +import datetime +import operator +import itertools +import subprocess +import argparse # standard in Python 2.7 + +# Find ezt, using Subversion's copy, if there isn't one on the system. +try: + import ezt +except ImportError: + ezt_path = os.path.dirname(os.path.dirname(os.path.abspath(sys.path[0]))) + ezt_path = os.path.join(ezt_path, 'build', 'generator') + sys.path.append(ezt_path) + + import ezt + + +# Our required / recommended versions +autoconf_ver = '2.68' +libtool_ver = '2.4' +swig_ver = '2.0.4' + +# Some constants +repos = 'http://svn.apache.org/repos/asf/subversion' +people_host = 'minotaur.apache.org' +people_dist_dir = '/www/www.apache.org/dist/subversion' + + +#---------------------------------------------------------------------- +# Utility functions + +class Version(object): + regex = re.compile('(\d+).(\d+).(\d+)(?:-(?:(rc|alpha|beta)(\d+)))?') + + def __init__(self, ver_str): + match = self.regex.search(ver_str) + + if not match: + raise RuntimeError("Bad version string '%s'" % ver_str) + + self.major = int(match.group(1)) + self.minor = int(match.group(2)) + self.patch = int(match.group(3)) + + if match.group(4): + self.pre = match.group(4) + self.pre_num = int(match.group(5)) + else: + self.pre = None + self.pre_num = None + + self.base = '%d.%d.%d' % (self.major, self.minor, self.patch) + + def is_prerelease(self): + return self.pre != None + + def __lt__(self, that): + if self.major < that.major: return True + if self.major > that.major: return False + + if self.minor < that.minor: return True + if self.minor > that.minor: return False + + if self.patch < that.patch: return True + if self.patch > that.patch: return False + + if not self.pre and not that.pre: return False + if not self.pre and that.pre: return False + if self.pre and not that.pre: return True + + # We are both pre-releases + if self.pre != that.pre: + return self.pre < that.pre + else: + return self.pre_num < that.pre_num + + def __str(self): + if self.pre: + extra = '-%s%d' % (self.pre, self.pre_num) + else: + extra = '' + + return self.base + extra + + def __repr__(self): + + return "Version('%s')" % self.__str() + + def __str__(self): + return self.__str() + + +def get_prefix(base_dir): + return os.path.join(base_dir, 'prefix') + +def get_tempdir(base_dir): + return os.path.join(base_dir, 'tempdir') + +def get_deploydir(base_dir): + return os.path.join(base_dir, 'deploy') + +def get_tmpldir(): + return os.path.join(os.path.abspath(sys.path[0]), 'templates') + +def get_tmplfile(filename): + try: + return open(os.path.join(get_tmpldir(), filename)) + except IOError: + # Hmm, we had a problem with the local version, let's try the repo + return urllib2.urlopen(repos + '/trunk/tools/dist/templates/' + filename) + +def get_nullfile(): + # This is certainly not cross platform + return open('/dev/null', 'w') + +def run_script(verbose, script): + if verbose: + stdout = None + stderr = None + else: + stdout = get_nullfile() + stderr = subprocess.STDOUT + + for l in script.split('\n'): + subprocess.check_call(l.split(), stdout=stdout, stderr=stderr) + +def download_file(url, target): + response = urllib2.urlopen(url) + target_file = open(target, 'w') + target_file.write(response.read()) + +def assert_people(): + if os.uname()[1] != people_host: + raise RuntimeError('Not running on expected host "%s"' % people_host) + +#---------------------------------------------------------------------- +# Cleaning up the environment + +def cleanup(args): + 'Remove generated files and folders.' + logging.info('Cleaning') + + shutil.rmtree(get_prefix(args.base_dir), True) + shutil.rmtree(get_tempdir(args.base_dir), True) + shutil.rmtree(get_deploydir(args.base_dir), True) + + +#---------------------------------------------------------------------- +# Creating an environment to roll the release + +class RollDep(object): + 'The super class for each of the build dependencies.' + def __init__(self, base_dir, use_existing, verbose): + self._base_dir = base_dir + self._use_existing = use_existing + self._verbose = verbose + + def _test_version(self, cmd): + proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, + stderr=subprocess.STDOUT) + (stdout, stderr) = proc.communicate() + rc = proc.wait() + if rc: return '' + + return stdout.split('\n') + + def build(self): + if not hasattr(self, '_extra_configure_flags'): + self._extra_configure_flags = '' + cwd = os.getcwd() + tempdir = get_tempdir(self._base_dir) + tarball = os.path.join(tempdir, self._filebase + '.tar.gz') + + if os.path.exists(tarball): + if not self._use_existing: + raise RuntimeError('autoconf tarball "%s" already exists' + % tarball) + logging.info('Using existing %s.tar.gz' % self._filebase) + else: + logging.info('Fetching %s' % self._filebase) + download_file(self._url, tarball) + + # Extract tarball + tarfile.open(tarball).extractall(tempdir) + + logging.info('Building ' + self.label) + os.chdir(os.path.join(tempdir, self._filebase)) + run_script(self._verbose, + '''./configure --prefix=%s %s + make + make install''' % (get_prefix(self._base_dir), + self._extra_configure_flags)) + + os.chdir(cwd) + + +class AutoconfDep(RollDep): + def __init__(self, base_dir, use_existing, verbose): + RollDep.__init__(self, base_dir, use_existing, verbose) + self.label = 'autoconf' + self._filebase = 'autoconf-' + autoconf_ver + self._url = 'http://ftp.gnu.org/gnu/autoconf/%s.tar.gz' % self._filebase + + def have_usable(self): + output = self._test_version(['autoconf', '-V']) + if not output: return False + + version = output[0].split()[-1:][0] + return version == autoconf_ver + + def use_system(self): + if not self._use_existing: return False + return self.have_usable() + + +class LibtoolDep(RollDep): + def __init__(self, base_dir, use_existing, verbose): + RollDep.__init__(self, base_dir, use_existing, verbose) + self.label = 'libtool' + self._filebase = 'libtool-' + libtool_ver + self._url = 'http://ftp.gnu.org/gnu/libtool/%s.tar.gz' % self._filebase + + def have_usable(self): + output = self._test_version(['libtool', '--version']) + if not output: return False + + version = output[0].split()[-1:][0] + return version == libtool_ver + + def use_system(self): + # We unconditionally return False here, to avoid using a borked + # system libtool (I'm looking at you, Debian). + return False + + +class SwigDep(RollDep): + def __init__(self, base_dir, use_existing, verbose, sf_mirror): + RollDep.__init__(self, base_dir, use_existing, verbose) + self.label = 'swig' + self._filebase = 'swig-' + swig_ver + self._url = 'http://sourceforge.net/projects/swig/files/swig/%(swig)s/%(swig)s.tar.gz/download?use_mirror=%(sf_mirror)s' % \ + { 'swig' : self._filebase, + 'sf_mirror' : sf_mirror } + self._extra_configure_flags = '--without-pcre' + + def have_usable(self): + output = self._test_version(['swig', '-version']) + if not output: return False + + version = output[1].split()[-1:][0] + return version == swig_ver + + def use_system(self): + if not self._use_existing: return False + return self.have_usable() + + +def build_env(args): + 'Download prerequisites for a release and prepare the environment.' + logging.info('Creating release environment') + + try: + os.mkdir(get_prefix(args.base_dir)) + os.mkdir(get_tempdir(args.base_dir)) + except OSError: + if not args.use_existing: + raise + + autoconf = AutoconfDep(args.base_dir, args.use_existing, args.verbose) + libtool = LibtoolDep(args.base_dir, args.use_existing, args.verbose) + swig = SwigDep(args.base_dir, args.use_existing, args.verbose, + args.sf_mirror) + + # iterate over our rolling deps, and build them if needed + for dep in [autoconf, libtool, swig]: + if dep.use_system(): + logging.info('Using system %s' % dep.label) + else: + dep.build() + + +#---------------------------------------------------------------------- +# Create release artifacts + +def roll_tarballs(args): + 'Create the release artifacts.' + extns = ['zip', 'tar.gz', 'tar.bz2'] + + if args.branch: + branch = args.branch + else: + branch = args.version.base[:-1] + 'x' + + logging.info('Rolling release %s from branch %s@%d' % (args.version, + branch, args.revnum)) + + # Ensure we've got the appropriate rolling dependencies available + autoconf = AutoconfDep(args.base_dir, False, args.verbose) + libtool = LibtoolDep(args.base_dir, False, args.verbose) + swig = SwigDep(args.base_dir, False, args.verbose, None) + + for dep in [autoconf, libtool, swig]: + if not dep.have_usable(): + raise RuntimeError('Cannot find usable %s' % dep.label) + + # Make sure CHANGES is sync'd + if branch != 'trunk': + trunk_CHANGES = '%s/trunk/CHANGES@%d' % (repos, args.revnum) + branch_CHANGES = '%s/branches/%s/CHANGES@%d' % (repos, branch, + args.revnum) + proc = subprocess.Popen(['svn', 'diff', '--summarize', branch_CHANGES, + trunk_CHANGES], + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT) + (stdout, stderr) = proc.communicate() + proc.wait() + + if stdout: + raise RuntimeError('CHANGES not synced between trunk and branch') + + # Create the output directory + if not os.path.exists(get_deploydir(args.base_dir)): + os.mkdir(get_deploydir(args.base_dir)) + + # For now, just delegate to dist.sh to create the actual artifacts + extra_args = '' + if args.version.is_prerelease(): + extra_args = '-%s %d' % (args.version.pre, args.version.pre_num) + logging.info('Building UNIX tarballs') + run_script(args.verbose, '%s/dist.sh -v %s -pr %s -r %d %s' + % (sys.path[0], args.version.base, branch, args.revnum, + extra_args) ) + logging.info('Buildling Windows tarballs') + run_script(args.verbose, '%s/dist.sh -v %s -pr %s -r %d -zip %s' + % (sys.path[0], args.version.base, branch, args.revnum, + extra_args) ) + + # Move the results to the deploy directory + logging.info('Moving artifacts and calculating checksums') + for e in extns: + if args.version.pre == 'nightly': + filename = 'subversion-trunk.%s' % e + else: + filename = 'subversion-%s.%s' % (args.version, e) + + shutil.move(filename, get_deploydir(args.base_dir)) + filename = os.path.join(get_deploydir(args.base_dir), filename) + m = hashlib.sha1() + m.update(open(filename, 'r').read()) + open(filename + '.sha1', 'w').write(m.hexdigest()) + + shutil.move('svn_version.h.dist', get_deploydir(args.base_dir)) + + # And we're done! + + +#---------------------------------------------------------------------- +# Post the candidate release artifacts + +def post_candidates(args): + 'Post the generated tarballs to web-accessible directory.' + if args.target: + target = args.target + else: + target = os.path.join(os.getenv('HOME'), 'public_html', 'svn', + args.version) + + if args.code_name: + dirname = args.code_name + else: + dirname = 'deploy' + + if not os.path.exists(target): + os.makedirs(target) + + data = { 'version' : args.version, + 'revnum' : args.revnum, + 'dirname' : dirname, + } + + # Choose the right template text + if args.version.is_prerelease(): + if args.version.pre == 'nightly': + template_filename = 'nightly-candidates.ezt' + else: + template_filename = 'rc-candidates.ezt' + else: + template_filename = 'stable-candidates.ezt' + + template = ezt.Template() + template.parse(get_tmplfile(template_filename).read()) + template.generate(open(os.path.join(target, 'index.html'), 'w'), data) + + logging.info('Moving tarballs to %s' % os.path.join(target, dirname)) + if os.path.exists(os.path.join(target, dirname)): + shutil.rmtree(os.path.join(target, dirname)) + shutil.copytree(get_deploydir(args.base_dir), os.path.join(target, dirname)) + + +#---------------------------------------------------------------------- +# Clean dist + +def clean_dist(args): + 'Clean the distribution directory of all but the most recent artifacts.' + + regex = re.compile('subversion-(\d+).(\d+).(\d+)(?:-(?:(rc|alpha|beta)(\d+)))?') + + if not args.dist_dir: + assert_people() + args.dist_dir = people_dist_dir + + logging.info('Cleaning dist dir \'%s\'' % args.dist_dir) + + filenames = glob.glob(os.path.join(args.dist_dir, 'subversion-*.tar.gz')) + versions = [] + for filename in filenames: + versions.append(Version(filename)) + + for k, g in itertools.groupby(sorted(versions), + lambda x: (x.major, x.minor)): + releases = list(g) + logging.info("Saving release '%s'", releases[-1]) + + for r in releases[:-1]: + for filename in glob.glob(os.path.join(args.dist_dir, + 'subversion-%s.*' % r)): + logging.info("Removing '%s'" % filename) + os.remove(filename) + + +#---------------------------------------------------------------------- +# Write announcements + +def write_news(args): + 'Write text for the Subversion website.' + data = { 'date' : datetime.date.today().strftime('%Y%m%d'), + 'date_pres' : datetime.date.today().strftime('%Y-%m-%d'), + 'version' : str(args.version), + 'version_base' : args.version.base, + } + + if args.version.is_prerelease(): + template_filename = 'rc-news.ezt' + else: + template_filename = 'stable-news.ezt' + + template = ezt.Template() + template.parse(get_tmplfile(template_filename).read()) + template.generate(sys.stdout, data) + + +def get_sha1info(args): + 'Return a list of sha1 info for the release' + sha1s = glob.glob(os.path.join(get_deploydir(args.base_dir), '*.sha1')) + + class info(object): + pass + + sha1info = [] + for s in sha1s: + i = info() + i.filename = os.path.basename(s)[:-5] + i.sha1 = open(s, 'r').read() + sha1info.append(i) + + return sha1info + + +def write_announcement(args): + 'Write the release announcement.' + sha1info = get_sha1info(args) + + data = { 'version' : args.version, + 'sha1info' : sha1info, + 'siginfo' : open('getsigs-output', 'r').read(), + 'major-minor' : args.version.base[:3], + 'major-minor-patch' : args.version.base, + } + + if args.version.is_prerelease(): + template_filename = 'rc-release-ann.ezt' + else: + template_filename = 'stable-release-ann.ezt' + + template = ezt.Template(compress_whitespace = False) + template.parse(get_tmplfile(template_filename).read()) + template.generate(sys.stdout, data) + + +#---------------------------------------------------------------------- +# Main entry point for argument parsing and handling + +def main(): + 'Parse arguments, and drive the appropriate subcommand.' + + # Setup our main parser + parser = argparse.ArgumentParser( + description='Create an Apache Subversion release.') + parser.add_argument('--clean', action='store_true', default=False, + help='Remove any directories previously created by %(prog)s') + parser.add_argument('--verbose', action='store_true', default=False, + help='Increase output verbosity') + parser.add_argument('--base-dir', default=os.getcwd(), + help='''The directory in which to create needed files and + folders. The default is the current working + directory.''') + subparsers = parser.add_subparsers(title='subcommands') + + # Setup the parser for the build-env subcommand + subparser = subparsers.add_parser('build-env', + help='''Download release prerequisistes, including autoconf, + libtool, and swig.''') + subparser.set_defaults(func=build_env) + subparser.add_argument('--sf-mirror', default='softlayer', + help='''The mirror to use for downloading files from + SourceForge. If in the EU, you may want to use + 'kent' for this value.''') + subparser.add_argument('--use-existing', action='store_true', default=False, + help='''Attempt to use existing build dependencies before + downloading and building a private set.''') + + # Setup the parser for the roll subcommand + subparser = subparsers.add_parser('roll', + help='''Create the release artifacts.''') + subparser.set_defaults(func=roll_tarballs) + subparser.add_argument('version', type=Version, + help='''The release label, such as '1.7.0-alpha1'.''') + subparser.add_argument('revnum', type=int, + help='''The revision number to base the release on.''') + subparser.add_argument('--branch', + help='''The branch to base the release on.''') + + # Setup the parser for the post-candidates subcommand + subparser = subparsers.add_parser('post-candidates', + help='''Build the website to host the candidate tarballs. + The default location is somewhere in ~/public_html. + ''') + subparser.set_defaults(func=post_candidates) + subparser.add_argument('version', type=Version, + help='''The release label, such as '1.7.0-alpha1'.''') + subparser.add_argument('revnum', type=int, + help='''The revision number to base the release on.''') + subparser.add_argument('--target', + help='''The full path to the destination.''') + subparser.add_argument('--code-name', + help='''A whimsical name for the release, used only for + naming the download directory.''') + + # The clean-dist subcommand + subparser = subparsers.add_parser('clean-dist', + help='''Clean the distribution directory (and mirrors) of + all but the most recent MAJOR.MINOR release. If no + dist-dir is given, this command will assume it is + running on people.apache.org.''') + subparser.set_defaults(func=clean_dist) + subparser.add_argument('--dist-dir', + help='''The directory to clean.''') + + # The write-news subcommand + subparser = subparsers.add_parser('write-news', + help='''Output to stdout template text for use in the news + section of the Subversion website.''') + subparser.set_defaults(func=write_news) + subparser.add_argument('version', type=Version, + help='''The release label, such as '1.7.0-alpha1'.''') + + subparser = subparsers.add_parser('write-announcement', + help='''Output to stdout template text for the emailed + release announcement.''') + subparser.set_defaults(func=write_announcement) + subparser.add_argument('version', type=Version, + help='''The release label, such as '1.7.0-alpha1'.''') + + # A meta-target + subparser = subparsers.add_parser('clean', + help='''The same as the '--clean' switch, but as a + separate subcommand.''') + subparser.set_defaults(func=cleanup) + + # Parse the arguments + args = parser.parse_args() + + # first, process any global operations + if args.clean: + cleanup(args) + + # Set up logging + logger = logging.getLogger() + if args.verbose: + logger.setLevel(logging.DEBUG) + else: + logger.setLevel(logging.INFO) + + # Fix up our path so we can use our installed versions + os.environ['PATH'] = os.path.join(get_prefix(args.base_dir), 'bin') + ':' \ + + os.environ['PATH'] + + # finally, run the subcommand, and give it the parsed arguments + args.func(args) + + +if __name__ == '__main__': + main() diff --git a/tools/dist/templates/nightly-candidates.ezt b/tools/dist/templates/nightly-candidates.ezt new file mode 100644 index 0000000..c2c6fcd --- /dev/null +++ b/tools/dist/templates/nightly-candidates.ezt @@ -0,0 +1,65 @@ +<!-- + + Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +--> + +<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" +"http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd"> +<html> +<head> +<title>Subversion nightly tarballs (r[revnum])</title> +</head> +<body style="font-size: 14pt; text-align: justify; + background-color: #f0f0f0; padding: 0 5%%"> +<h1 style="font-size: 30pt; text-align: center; + text-decoration: underline">WARNING</h1> + +<p>The code you are about to download is an <i>automatically generated</i> +nightly release of Subversion (r[revnum]).</p> + +<p>This distribution is automatically generated from the latest sources from +<a href="http://svn.apache.org/repos/asf/subversion/trunk/">Subversion trunk</a>. +It may not even compile, and is certainly <i>not</i> suitable for any sort of +production use. This distribution has not been tested, and may cause any +number of problems, up to and including death and bodily injury. Only use this +distribution on data you aren't afraid to lose. You have been warned.</p> + +<p>We provide these for testing by those members of the community who +are interested in testing it. As such, if you are interested in helping +us test this code, you're very welcome to download and test these packages. +If you are looking for a copy of Subversion for production use, this +is <i>not it</i>; you should instead grab the latest stable release +from the <a +href="http://subversion.apache.org/packages.html">Download +area</a>.</p> + +<h2 style="font-size: 18pt">Note to operating system distro package +maintainers</h2> + +<p>As stated above, this is <i>not</i> an official, end-user release +of Subversion. It is a distribution intended for testing only. Please +do <i>not</i> package this distribution in any way. It should not be +made available to users who rely on their operating system distro's +packages.</p> + +<p>If you want to help us test this distribution of Subversion, you +can find the files <a href="[dirname]/">here</a>.</p> + +</body> +</html> diff --git a/tools/dist/templates/rc-candidates.ezt b/tools/dist/templates/rc-candidates.ezt new file mode 100644 index 0000000..0f46de8 --- /dev/null +++ b/tools/dist/templates/rc-candidates.ezt @@ -0,0 +1,63 @@ +<!-- + + Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +--> + +<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" +"http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd"> +<html> +<head> +<title>Apache Subversion [version] tarballs</title> +</head> +<body style="font-size: 14pt; text-align: justify; + background-color: #f0f0f0; padding: 0 5%"> +<h1 style="font-size: 30pt; text-align: center; + text-decoration: underline">WARNING</h1> + +<p>The code you are about to download is a <i>Release Candidate</i> +for Apache Subversion [version] (r[revnum]).</p> + +<p>These are candidate tarballs for a pre-release version of Subversion. As +such, they are likely to contain bugs, some known, others unknown. You +are welcome and encouraged to test this release, and +<a href="http://subversion.apache.org/docs/community-guide/issues.html">report +bugs</a> back to the developers, but please keep in mind that this is +not a final release of Apache Subversion.</p> + +<p>If you are looking for a copy of Subversion for production use, this +is <i>not it</i>; you should instead grab the latest stable release +from the <a +href="http://subversion.apache.org/download/">Download area</a>.</p> + +<h2 style="font-size: 18pt">Note to operating system distro package +maintainers</h2> + +<p>As stated above, this is <i>not</i> an official, end-user release +of Subversion. It is a distribution intended for testing only, and has not +been publicly announced. When it has been announced, it still won't be +suitable for production use. If you chose to package this pre-release for +your operating system distro's management system, you must do so in a way which +clearly denotes that this is not the final release, and is only for testing +purposes. And please don't do so until it has been publicly announced.</p> + +<p>If you want to help us test this distribution of Subversion, you +can find the files <a href="[dirname]/">here</a>.</p> + +</body> +</html> diff --git a/tools/dist/templates/rc-news.ezt b/tools/dist/templates/rc-news.ezt new file mode 100644 index 0000000..b0468e4 --- /dev/null +++ b/tools/dist/templates/rc-news.ezt @@ -0,0 +1,22 @@ +<div class="h3" id="news-[date]"> +<h3>[date_pres] — Apache Subversion [version] Released + <a class="sectionlink" href="#news-[date]" + title="Link to this section">¶</a> +</h3> + +<p>We are please to announce to release of Apache Subversion [version]. This + release is not intended for production use, but is provided as a milestone + to encourage wider testing and feedback from intrepid users and maintainers. + Please see the + <a href="">release + announcement</a> for more information about this release, and the + <a href="/docs/release-notes/[version_base].html">release notes</a> and + <a href="http://svn.apache.org/repos/asf/subversion/tags/[version]/CHANGES"> + change log</a> for information about what will eventually be + in the [version_base].0 release.</p> + +<p>To get this release from the nearest mirror, please visit our + <a href="/download/#pre-releases">download page</a>.</p> + +</div> <!-- #news-[date] --> + diff --git a/tools/dist/templates/rc-release-ann.ezt b/tools/dist/templates/rc-release-ann.ezt new file mode 100644 index 0000000..f9af5c1 --- /dev/null +++ b/tools/dist/templates/rc-release-ann.ezt @@ -0,0 +1,58 @@ +I'm happy to announce the release of Apache Subversion [version]. +Please choose the mirror closest to you by visiting: + + http://subversion.apache.org/download/#pre-releases + +The SHA1 checksums are: + +[for sha1info] [sha1info.sha1] [sha1info.filename] +[end] +PGP Signatures are available at: + + http://www.apache.org/dist/subversion/subversion-[version].tar.bz2.asc + http://www.apache.org/dist/subversion/subversion-[version].tar.gz.asc + http://www.apache.org/dist/subversion/subversion-[version].zip.asc + +For this release, the following people have provided PGP signatures: + +[siginfo] +This is a pre-release for what will eventually become Apache Subversion +[major-minor-patch]. It may contain known issues, a complete list of +[major-minor-patch]-blocking issues can be found here: + + http://subversion.tigris.org/issues/buglist.cgi?component=subversion&issue_status=NEW&issue_status=STARTED&issue_status=REOPENED&target_milestone=[major-minor-patch] + +A pre-release means the Subversion developers feel that this release +is ready for widespread testing by the community. There are known issues +(and unknown ones!), so please use it at your own risk, though we do +encourage people to test this release thoroughly. Of particular note, please +remember than persistent data, such as the working copy or repository +formats may change before the final release, and there may not be an +upgrade path from the pre-releases to the final. + +As a note to operating system distro packagers: while we wish to have this +release candidate widely tested, we do not feel that it is ready for packaging +and providing to end-users through a distro package system. Packaging a +release candidate poses many problems, the biggest being that our policy lets +us break compatibility between the release candidate and the final release, if +we find something serious enough. Having many users depending on a release +candidate through their distro would cause no end of pain and frustration that +we do not want to have to deal with. However, if your distro has a branch that +is clearly labeled as containing experimental and often broken software, and +explicitly destined to consenting developers and integrators only, then we're +okay with packaging the release candidate there. Just don't let it near the +end users please. + + +Release notes for the [major-minor].x release series may be found at: + + http://subversion.apache.org/docs/release-notes/[major-minor].html + +You can find the list of changes between [version] and earlier versions at: + + http://svn.apache.org/repos/asf/subversion/tags/[version]/CHANGES + +Questions, comments, and bug reports to users@subversion.apache.org. + +Thanks, +- The Subversion Team diff --git a/tools/dist/templates/stable-candidates.ezt b/tools/dist/templates/stable-candidates.ezt new file mode 100644 index 0000000..3320bbb --- /dev/null +++ b/tools/dist/templates/stable-candidates.ezt @@ -0,0 +1,97 @@ +<!-- + + Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +--> + +<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" +"http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd"> +<html> +<head> +<title>Apache Subversion [version] tarballs</title> +</head> +<body style="font-size: 14pt; text-align: justify; + background-color: #f0f0f0; padding: 0 5%"> +<h1 style="font-size: 30pt; text-align: center; + text-decoration: underline">WARNING</h1> + +<p>The code you are about to download is a <i>Release Candidate</i> +for Apache Subversion [version] (r[revnum]).</p> + + +<p>A <i>Release Candidate</i> is exactly what it sounds like: a +distribution of Subversion that may become an official release later, +<i>if and only if</i> it passes preliminary testing by those members +of the community who are interested in testing it.</p> + +<!-- , which means it is considered <strong + style="text-decoration: underline">UNRELEASED</strong> code. The + term 'release candidate' means the code works to the best knowledge + of the Subversion developers, but that it still requires testing by a + larger number of people to root out bugs.</p> --> + +<p>As such, if you are interested in helping us test this Release +Candidate, you're very welcome to download and test these packages. +If you are looking for a copy of Subversion for production use, this +is <i>not it</i>; you should instead grab the latest stable release +from the <a +href="http://subversion.tigris.org/project_packages.html">Download +area</a>.</p> + +<h2 style="font-size: 18pt">Note to operating system distro package +maintainers</h2> + +<p>As stated above, this is <i>not</i> an official, end-user release +of Subversion. It is a distribution intended for testing only. Please +do <i>not</i> package this distribution in any way. It should not be +made available to users who rely on their operating system distro's +packages.</p> + +<h2 style="font-size: 14pt">Why shouldn't I set up/make available a +Release Candidate for production use?</h2> + +<p style="font-size: 11pt">(Taken from a <a + href="http://svn.haxx.se/dev/archive-2005-11/1295.shtml" +>mail by Karl Fogel</a> on the subject)</p> + +<p style="font-size: 11pt">Subversion release candidates are for +testing only. We might have to withdraw one to fix bugs, and fixing +those bugs might involve changing APIs, or changing a soft-upgrade +strategy in the repository or working copy formats. If some production +users had begun depending on the new API, or had unknowingly +soft-upgraded their repository or working copy, then they'd be in for +a very unpleasant suprise when the real release comes out and doesn't +have the same API anymore, or doesn't use the same formats. Not only +would Subversion suddenly "stop working" for them, but there wouldn't +be any convenient path to get it working again, since no blessed +Subversion release would have the code needed to interpret their +legacy data.</p> + +<p style="font-size: 11pt">We encourage RC testing by users who know +how to install from a tarball independently of their OS's packaging +system. Users who install only packaged releases, however, should wait +for and use only officially released Subversions. Anything else is +playing with fire. When the inevitable blowup happens, both your +reputation as a packager and Subversion's reputation will suffer -- +but only one will deserve it.</p> + +<p>If you want to help us test this distribution of Subversion, you +can find the files <a href="[dirname]/">here</a>.</p> + +</body> +</html> diff --git a/tools/dist/templates/stable-release-ann.ezt b/tools/dist/templates/stable-release-ann.ezt new file mode 100644 index 0000000..c865a84 --- /dev/null +++ b/tools/dist/templates/stable-release-ann.ezt @@ -0,0 +1,30 @@ +I'm happy to announce the release of Apache Subversion [version]. +Please choose the mirror closest to you by visiting: + + http://subversion.apache.org/download/#recommended-release + +The SHA1 checksums are: + +[for sha1info] [sha1info.sha1] [sha1info.filename] +[end] +PGP Signatures are available at: + + http://www.apache.org/dist/subversion/subversion-[version].tar.bz2.asc + http://www.apache.org/dist/subversion/subversion-[version].tar.gz.asc + http://www.apache.org/dist/subversion/subversion-[version].zip.asc + +For this release, the following people have provided PGP signatures: + +[siginfo] +Release notes for the [major-minor].x release series may be found at: + + http://subversion.apache.org/docs/release-notes/[major-minor].html + +You can find the list of changes between [version] and earlier versions at: + + http://svn.apache.org/repos/asf/subversion/tags/[version]/CHANGES + +Questions, comments, and bug reports to users@subversion.apache.org. + +Thanks, +- The Subversion Team diff --git a/tools/dist/test.sh b/tools/dist/test.sh new file mode 100755 index 0000000..7a1be8a --- /dev/null +++ b/tools/dist/test.sh @@ -0,0 +1,62 @@ +#!/bin/sh +# +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +# +set -e + +[ -e $HOME/.svndistrc ] && . $HOME/.svndistrc + +set -x + +[ ! -e Makefile ] && ./configure $TEST_CONFIGURE_OPTIONS +make +make swig-py +make swig-pl +make swig-rb + +make check-swig-py 2>&1 | tee tests-py.log +make check-swig-pl 2>&1 | tee tests-pl.log +make check-swig-rb SWIG_RB_TEST_VERBOSE=verbose 2>&1 | tee tests-rb.log + +TEST_DIR=`pwd`/subversion/tests/cmdline/svn-test-work +rm -rf "$TEST_DIR" +mkdir "$TEST_DIR" +sudo umount "$TEST_DIR" || true +sudo mount -t tmpfs tmpfs "$TEST_DIR" -o uid=`id -u`,mode=700,size=32M + +time make check CLEANUP=1 FS_TYPE=fsfs +mv tests.log tests-local-fsfs.log +time make check CLEANUP=1 FS_TYPE=bdb +mv tests.log tests-local-bdb.log + +./subversion/svnserve/svnserve -d -r `pwd`/subversion/tests/cmdline \ + --listen-host 127.0.0.1 --listen-port 33690 +time make check CLEANUP=1 FS_TYPE=fsfs BASE_URL=svn://localhost:33690 +mv tests.log tests-svn-fsfs.log +time make check CLEANUP=1 FS_TYPE=bdb BASE_URL=svn://localhost:33690 +mv tests.log tests-svn-bdb.log +pkill lt-svnserve + +time CLEANUP=1 FS_TYPE=fsfs ./subversion/tests/cmdline/davautocheck.sh +mv tests.log tests-dav-fsfs.log +time CLEANUP=1 FS_TYPE=bdb ./subversion/tests/cmdline/davautocheck.sh +mv tests.log tests-dav-bdb.log + +sudo umount "$TEST_DIR" diff --git a/tools/examples/SvnCLBrowse b/tools/examples/SvnCLBrowse new file mode 100755 index 0000000..43e16ef --- /dev/null +++ b/tools/examples/SvnCLBrowse @@ -0,0 +1,489 @@ +#!/usr/bin/python +# +# SvnCLBrowse -- graphical Subversion changelist browser +# +# ==================================================================== +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# ==================================================================== + +# This script requires Python 2.5 + +import sys +import os +import getopt + +# Try to import the wxWidgets modules. +try: + import wx + import wx.xrc +except ImportError: + sys.stderr.write(""" +ERROR: This program requires the wxWidgets Python bindings, which you + do not appear to have installed. + +""") + raise + +# Try to import the Subversion modules. +try: + import svn.client, svn.wc, svn.core +except ImportError: + sys.stderr.write(""" +ERROR: This program requires the Subversion Python bindings, which you + do not appear to have installed. + +""") + raise + +status_code_map = { + svn.wc.status_none : ' ', + svn.wc.status_normal : ' ', + svn.wc.status_added : 'A', + svn.wc.status_missing : '!', + svn.wc.status_incomplete : '!', + svn.wc.status_deleted : 'D', + svn.wc.status_replaced : 'R', + svn.wc.status_modified : 'M', + svn.wc.status_merged : 'G', + svn.wc.status_conflicted : 'C', + svn.wc.status_obstructed : '~', + svn.wc.status_ignored : 'I', + svn.wc.status_external : 'X', + svn.wc.status_unversioned : '?', + } + +def output_info(path, info, window): + window.AppendText("Path: %s\n" % os.path.normpath(path)) + if info.kind != svn.core.svn_node_dir: + window.AppendText("Name: %s\n" % os.path.basename(path)) + if info.URL: + window.AppendText("URL: %s\n" % info.URL) + if info.repos_root_URL: + window.AppendText("Repository Root: %s\n" % info.repos_root_URL) + if info.repos_UUID: + window.AppendText("Repository UUID: %s\n" % info.repos_UUID) + if info.rev >= 0: + window.AppendText("Revision: %ld\n" % info.rev) + if info.kind == svn.core.svn_node_file: + window.AppendText("Node Kind: file\n") + elif info.kind == svn.core.svn_node_dir: + window.AppendText("Node Kind: directory\n") + elif info.kind == svn.core.svn_node_none: + window.AppendText("Node Kind: none\n") + else: + window.AppendText("Node Kind: unknown\n") + if info.has_wc_info: + if info.schedule == svn.wc.schedule_normal: + window.AppendText("Schedule: normal\n") + elif info.schedule == svn.wc.schedule_add: + window.AppendText("Schedule: add\n") + elif info.schedule == svn.wc.schedule_delete: + window.AppendText("Schedule: delete\n") + elif info.schedule == svn.wc.schedule_replace: + window.AppendText("Schedule: replace\n") + if info.depth == svn.core.svn_depth_unknown: + pass + elif info.depth == svn.core.svn_depth_empty: + window.AppendText("Depth: empty\n") + elif info.depth == svn.core.svn_depth_files: + window.AppendText("Depth: files\n") + elif info.depth == svn.core.svn_depth_immediates: + window.AppendText("Depth: immediates\n") + elif info.depth == svn.core.svn_depth_infinity: + pass + else: + window.AppendText("Depth: INVALID\n") + if info.copyfrom_url: + window.AppendText("Copied From URL: %s\n" % info.copyfrom_url) + if info.copyfrom_rev >= 0: + window.AppendText("Copied From Rev: %ld\n" % info.copyfrom_rev) + if info.last_changed_author: + window.AppendText("Last Changed Author: %s\n" % info.last_changed_author) + if info.last_changed_rev >= 0: + window.AppendText("Last Changed Rev: %ld\n" % info.last_changed_rev) + if info.last_changed_date: + window.AppendText("Last Changed Date: %s\n" % + svn.core.svn_time_to_human_cstring(info.last_changed_date)) + if info.has_wc_info: + if info.text_time: + window.AppendText("Text Last Updated: %s\n" % + svn.core.svn_time_to_human_cstring(info.text_time)) + if info.prop_time: + window.AppendText("Properties Last Updated: %s\n" % + svn.core.svn_time_to_human_cstring(info.prop_time)) + if info.checksum: + window.AppendText("Checksum: %s\n" % info.checksum) + if info.conflict_old: + window.AppendText("Conflict Previous Base File: %s\n" % info.conflict_old) + if info.conflict_wrk: + window.AppendText("Conflict Previous Working File: %s\n" % info.conflict_wrk) + if info.conflict_new: + window.AppendText("Conflict Current Base File: %s\n" % info.conflict_new) + if info.prejfile: + window.AppendText("Conflict Properties File: %s\n" % info.prejfile) + if info.lock: + if info.lock.token: + window.AppendText("Lock Token: %s\n" % info.lock.token) + if info.lock.owner: + window.AppendText("Lock Owner: %s\n" % info.lock.owner) + if info.lock.creation_date: + window.AppendText("Lock Created: %s\n" % + svn.core.svn_time_to_human_cstring(info.lock.creation_date)) + if info.lock.expiration_date: + window.AppendText("Lock Expires: %s\n" % + svn.core.svn_time_to_human_cstring(info.lock.expiration_date)) + if info.lock.comment: + num_lines = len(info.lock.comment.split("\n")) + window.AppendText("Lock Comment (%d line%s): %s\n" + % (num_lines, num_lines > 1 and "s" or "", info.lock.comment)) + if info.changelist: + window.AppendText("Changelist: %s\n" % info.changelist) + window.AppendText("\n") + +class _item: + pass + +class SvnCLBrowse(wx.App): + def __init__(self, wc_dir): + svn.core.svn_config_ensure(None) + self.svn_ctx = svn.client.ctx_t() + self.svn_ctx.config = svn.core.svn_config_get_config(None) + if wc_dir is not None: + self.wc_dir = svn.core.svn_path_canonicalize(wc_dir) + else: + self.wc_dir = wc_dir + wx.App.__init__(self) + + def OnInit(self): + self.SetAppName("SvnCLBrowse") + + self.xrc = wx.xrc.EmptyXmlResource() + wx.FileSystem.AddHandler(wx.MemoryFSHandler()) + wx.MemoryFSHandler.AddFile('XRC/SvnCLBrowse.xrc', _XML_RESOURCE) + self.xrc.Load('memory:XRC/SvnCLBrowse.xrc') + + # XML Resource stuff. + self.resources = _item() + self.resources.CLBFrame = self.xrc.LoadFrame(None, 'CLBFrame') + self.resources.CLBMenuBar = self.xrc.LoadMenuBar('CLBMenuBar') + self.resources.CLBMenuFileQuit = self.xrc.GetXRCID('CLBMenuFileQuit') + self.resources.CLBMenuOpsInfo = self.xrc.GetXRCID('CLBMenuOpsInfo') + self.resources.CLBMenuOpsMembers = self.xrc.GetXRCID('CLBMenuOpsMembers') + self.resources.CLBMenuHelpAbout = self.xrc.GetXRCID('CLBMenuHelpAbout') + self.resources.CLBDirNav = self.resources.CLBFrame.FindWindowById( + self.xrc.GetXRCID('CLBDirNav')) + self.resources.CLBChangelists = self.resources.CLBFrame.FindWindowById( + self.xrc.GetXRCID('CLBChangelists')) + self.resources.CLBVertSplitter = self.resources.CLBFrame.FindWindowById( + self.xrc.GetXRCID('CLBVertSplitter')) + self.resources.CLBHorzSplitter = self.resources.CLBFrame.FindWindowById( + self.xrc.GetXRCID('CLBHorzSplitter')) + self.resources.CLBOutput = self.resources.CLBFrame.FindWindowById( + self.xrc.GetXRCID('CLBOutput')) + self.resources.CLBStatusBar = self.resources.CLBFrame.CreateStatusBar(2) + + # Glue some of our extra stuff onto the main frame. + self.resources.CLBFrame.SetMenuBar(self.resources.CLBMenuBar) + self.resources.CLBStatusBar.SetStatusWidths([-1, 100]) + + # Event handlers. They are the key to the world. + wx.EVT_CLOSE(self.resources.CLBFrame, self._FrameClosure) + wx.EVT_MENU(self, self.resources.CLBMenuFileQuit, self._FileQuitMenu) + wx.EVT_MENU(self, self.resources.CLBMenuOpsInfo, self._OpsInfoMenu) + wx.EVT_MENU(self, self.resources.CLBMenuOpsMembers, self._OpsMembersMenu) + wx.EVT_MENU(self, self.resources.CLBMenuHelpAbout, self._HelpAboutMenu) + wx.EVT_TREE_ITEM_ACTIVATED(self, self.resources.CLBDirNav.GetTreeCtrl().Id, + self._DirNavSelChanged) + + # Reset our working directory + self._SetWorkingDirectory(self.wc_dir) + + # Resize and display our frame. + self.resources.CLBFrame.SetSize(wx.Size(600, 400)) + self.resources.CLBFrame.Center() + self.resources.CLBFrame.Show(True) + self.resources.CLBVertSplitter.SetSashPosition( + self.resources.CLBVertSplitter.GetSize()[0] / 2) + self.resources.CLBHorzSplitter.SetSashPosition( + self.resources.CLBHorzSplitter.GetSize()[1] / 2) + + # Tell wxWidgets that this is our main window + self.SetTopWindow(self.resources.CLBFrame) + + # Return a success flag + return True + + def _SetWorkingDirectory(self, wc_dir): + if wc_dir is None: + return + if not os.path.isdir(wc_dir): + wc_dir = os.path.abspath('/') + self.wc_dir = os.path.abspath(wc_dir) + self.resources.CLBChangelists.Clear() + self.resources.CLBDirNav.SetPath(self.wc_dir) + self.resources.CLBFrame.SetTitle("SvnCLBrowse - %s" % (self.wc_dir)) + changelists = {} + self.resources.CLBFrame.SetStatusText("Checking '%s' for status..." \ + % (self.wc_dir)) + wx.BeginBusyCursor() + + def _status_callback(path, status, clists=changelists): + if status.entry and status.entry.changelist: + clists[status.entry.changelist] = None + + # Do the status crawl, using _status_callback() as our callback function. + revision = svn.core.svn_opt_revision_t() + revision.type = svn.core.svn_opt_revision_head + try: + svn.client.status2(self.wc_dir, revision, _status_callback, + svn.core.svn_depth_infinity, + False, False, False, True, self.svn_ctx) + except svn.core.SubversionException: + self.resources.CLBStatusBar.SetStatusText("UNVERSIONED", 2) + else: + changelist_names = changelists.keys() + changelist_names.sort() + for changelist in changelist_names: + self.resources.CLBChangelists.Append(changelist) + finally: + wx.EndBusyCursor() + self.resources.CLBFrame.SetStatusText("") + + def _Destroy(self): + self.resources.CLBFrame.Destroy() + + def _DirNavSelChanged(self, event): + self._SetWorkingDirectory(self.resources.CLBDirNav.GetPath()) + + def _GetSelectedChangelists(self): + changelists = [] + items = self.resources.CLBChangelists.GetSelections() + for item in items: + changelists.append(str(self.resources.CLBChangelists.GetString(item))) + return changelists + + def _OpsMembersMenu(self, event): + self.resources.CLBOutput.Clear() + changelists = self._GetSelectedChangelists() + if not changelists: + return + + def _info_receiver(path, info, pool): + self.resources.CLBOutput.AppendText(" %s\n" % (path)) + + for changelist in changelists: + self.resources.CLBOutput.AppendText("Changelist: %s\n" % (changelist)) + revision = svn.core.svn_opt_revision_t() + revision.type = svn.core.svn_opt_revision_working + svn.client.info2(self.wc_dir, revision, revision, + _info_receiver, svn.core.svn_depth_infinity, + [changelist], self.svn_ctx) + self.resources.CLBOutput.AppendText("\n") + + def _OpsInfoMenu(self, event): + self.resources.CLBOutput.Clear() + changelists = self._GetSelectedChangelists() + if not changelists: + return + + def _info_receiver(path, info, pool): + output_info(path, info, self.resources.CLBOutput) + + revision = svn.core.svn_opt_revision_t() + revision.type = svn.core.svn_opt_revision_working + svn.client.info2(self.wc_dir, revision, revision, + _info_receiver, svn.core.svn_depth_infinity, + changelists, self.svn_ctx) + + def _FrameClosure(self, event): + self._Destroy() + + def _FileQuitMenu(self, event): + self._Destroy() + + def _HelpAboutMenu(self, event): + wx.MessageBox("SvnCLBrowse" + " -- graphical Subversion changelist browser.\n\n", + "About SvnCLBrowse", + wx.OK | wx.CENTER, + self.resources.CLBFrame) + + def OnExit(self): + pass + + +_XML_RESOURCE = """<?xml version="1.0" ?> +<resource> + <object class="wxMenuBar" name="CLBMenuBar"> + <object class="wxMenu"> + <label>&File</label> + <object class="wxMenuItem" name="CLBMenuFileQuit"> + <label>&Quit</label> + <accel>CTRL+Q</accel> + <help>Quit SvnCLBrowse.</help> + </object> + </object> + <object class="wxMenu"> + <label>&Subversion</label> + <object class="wxMenuItem" name="CLBMenuOpsInfo"> + <label>&Info</label> + <help>Show information about members of the selected changelist(s).</help> + </object> + <object class="wxMenuItem" name="CLBMenuOpsMembers"> + <label>&Members</label> + <help>List the members of the selected changelist(s).</help> + </object> + </object> + <object class="wxMenu"> + <label>&Help</label> + <object class="wxMenuItem" name="CLBMenuHelpAbout"> + <label>&About...</label> + <help>About SvnCLBrowse.</help> + </object> + </object> + </object> + <object class="wxFrame" name="CLBFrame"> + <title>SvnCLBrowse -- graphical Subversion changelist browser</title> + <centered>1</centered> + <style>wxDEFAULT_FRAME_STYLE|wxCAPTION|wxSYSTEM_MENU|wxRESIZE_BORDER|wxRESIZE_BOX|wxMAXIMIZE_BOX|wxMINIMIZE_BOX|wxTAB_TRAVERSAL</style> + <object class="wxFlexGridSizer"> + <cols>1</cols> + <rows>1</rows> + <object class="sizeritem"> + <object class="wxSplitterWindow" name="CLBVertSplitter"> + <object class="wxPanel"> + <object class="wxFlexGridSizer"> + <cols>1</cols> + <rows>3</rows> + <growablecols>0</growablecols> + <growablerows>0</growablerows> + <growablerows>1</growablerows> + <growablerows>2</growablerows> + <object class="sizeritem"> + <object class="wxSplitterWindow" name="CLBHorzSplitter"> + <orientation>horizontal</orientation> + <sashpos>200</sashpos> + <minsize>50</minsize> + <style>wxSP_NOBORDER|wxSP_LIVE_UPDATE</style> + <object class="wxPanel"> + <object class="wxStaticBoxSizer"> + <label>Local Modifications</label> + <orient>wxHORIZONTAL</orient> + <object class="sizeritem"> + <object class="wxGenericDirCtrl" name="CLBDirNav"> + <style>wxDIRCTRL_DIR_ONLY</style> + </object> + <flag>wxEXPAND</flag> + <option>1</option> + </object> + </object> + </object> + <object class="wxPanel"> + <object class="wxStaticBoxSizer"> + <label>Changelists</label> + <orient>wxHORIZONTAL</orient> + <object class="sizeritem"> + <object class="wxListBox" name="CLBChangelists"> + <content> + <item/></content> + <style>wxLB_MULTIPLE</style> + </object> + <option>1</option> + <flag>wxALL|wxEXPAND</flag> + </object> + </object> + </object> + </object> + <flag>wxEXPAND</flag> + <option>1</option> + </object> + </object> + </object> + <object class="wxPanel"> + <object class="wxFlexGridSizer"> + <cols>1</cols> + <object class="sizeritem"> + <object class="wxStaticBoxSizer"> + <label>Output</label> + <orient>wxVERTICAL</orient> + <object class="sizeritem"> + <object class="wxTextCtrl" name="CLBOutput"> + <style>wxTE_MULTILINE|wxTE_READONLY|wxTE_LEFT|wxTE_DONTWRAP</style> + </object> + <option>1</option> + <flag>wxEXPAND</flag> + </object> + </object> + <option>1</option> + <flag>wxALL|wxEXPAND</flag> + <border>5</border> + </object> + <rows>1</rows> + <growablecols>0</growablecols> + <growablerows>0</growablerows> + </object> + </object> + <orientation>vertical</orientation> + <sashpos>130</sashpos> + <minsize>50</minsize> + <style>wxSP_NOBORDER|wxSP_LIVE_UPDATE</style> + </object> + <option>1</option> + <flag>wxEXPAND</flag> + </object> + <growablecols>0</growablecols> + <growablerows>0</growablerows> + </object> + </object> +</resource> +""" + +def usage_and_exit(errmsg=None): + stream = errmsg and sys.stderr or sys.stdout + progname = os.path.basename(sys.argv[0]) + stream.write("""%s -- graphical Subversion changelist browser + +Usage: %s [DIRECTORY] + +Launch the SvnCLBrowse graphical changelist browser, using DIRECTORY +(or the current working directory, if DIRECTORY is not provided) as +the initial browse location. + +""" % (progname, progname)) + if errmsg: + stream.write("ERROR: %s\n" % (errmsg)) + sys.exit(errmsg and 1 or 0) + +def main(): + opts, args = getopt.gnu_getopt(sys.argv[1:], 'h?', ['help']) + for name, value in opts: + if name == '-h' or name == '-?' or name == '--help': + usage_and_exit() + argc = len(args) + if argc == 0: + wc_dir = '.' + elif argc == 1: + wc_dir = sys.argv[1] + else: + usage_and_exit("Too many arguments") + app = SvnCLBrowse(wc_dir) + app.MainLoop() + app.OnExit() + +if __name__ == "__main__": + main() diff --git a/tools/examples/blame.py b/tools/examples/blame.py new file mode 100755 index 0000000..87d33b3 --- /dev/null +++ b/tools/examples/blame.py @@ -0,0 +1,113 @@ +#!/usr/bin/env python +# +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +# +# +# USAGE: blame.py [-r REV] repos-path file +# + +import sys +import os +import getopt +try: + my_getopt = getopt.gnu_getopt +except AttributeError: + my_getopt = getopt.getopt +import difflib +from svn import fs, core, repos + +CHUNK_SIZE = 100000 + +def blame(path, filename, rev=None): + + annotresult = {} + path = core.svn_path_canonicalize(path) + + repos_ptr = repos.open(path) + fsob = repos.fs(repos_ptr) + + if rev is None: + rev = fs.youngest_rev(fsob) + filedata = '' + for i in range(0, rev+1): + root = fs.revision_root(fsob, i) + if fs.check_path(root, filename) != core.svn_node_none: + first = i + break + print("First revision is %d" % first) + print("Last revision is %d" % rev) + for i in range(first, rev+1): + previousroot = root + root = fs.revision_root(fsob, i) + if i != first: + if not fs.contents_changed(root, filename, previousroot, filename): + continue + + file = fs.file_contents(root, filename) + previousdata = filedata + filedata = '' + while True: + data = core.svn_stream_read(file, CHUNK_SIZE) + if not data: + break + filedata = filedata + data + + print("Current revision is %d" % i) + diffresult = difflib.ndiff(previousdata.splitlines(1), + filedata.splitlines(1)) + # print ''.join(diffresult) + k = 0 + for j in diffresult: + if j[0] == ' ': + if k in annotresult: + k = k + 1 + continue + else: + annotresult[k] = (i, j[2:]) + k = k + 1 + continue + elif j[0] == '?': + continue + annotresult[k] = (i, j[2:]) + if j[0] != '-': + k = k + 1 +# print ''.join(diffresult) +# print annotresult + for x in range(len(annotresult.keys())): + sys.stdout.write("Line %d (rev %d):%s" % (x, + annotresult[x][0], + annotresult[x][1])) + +def usage(): + print("USAGE: blame.py [-r REV] repos-path file") + sys.exit(1) + +def main(): + opts, args = getopt.getopt(sys.argv[1:], 'r:') + if len(args) != 2: + usage() + rev = None + for name, value in opts: + if name == '-r': + rev = int(value) + blame(args[0], args[1], rev) + +if __name__ == '__main__': + main() diff --git a/tools/examples/check-modified.py b/tools/examples/check-modified.py new file mode 100755 index 0000000..dff3fa1 --- /dev/null +++ b/tools/examples/check-modified.py @@ -0,0 +1,65 @@ +#!/usr/bin/env python +# +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +# +# +# USAGE: check-modified.py FILE_OR_DIR1 FILE_OR_DIR2 ... +# +# prints out the URL associated with each item +# + +import sys +import os +import os.path +import svn.core +import svn.client +import svn.wc + +FORCE_COMPARISON = 0 + +def usage(): + print("Usage: " + sys.argv[0] + " FILE_OR_DIR1 FILE_OR_DIR2\n") + sys.exit(0) + +def run(files): + + for f in files: + dirpath = fullpath = os.path.abspath(f) + if not os.path.isdir(dirpath): + dirpath = os.path.dirname(dirpath) + + adm_baton = svn.wc.adm_open(None, dirpath, False, True) + + try: + entry = svn.wc.entry(fullpath, adm_baton, 0) + + if svn.wc.text_modified_p(fullpath, FORCE_COMPARISON, + adm_baton): + print("M %s" % f) + else: + print(" %s" % f) + except: + print("? %s" % f) + + svn.wc.adm_close(adm_baton) + +if __name__ == '__main__': + run(sys.argv[1:]) + diff --git a/tools/examples/dumpprops.py b/tools/examples/dumpprops.py new file mode 100755 index 0000000..09c5e6b --- /dev/null +++ b/tools/examples/dumpprops.py @@ -0,0 +1,88 @@ +#!/usr/bin/env python +# +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +# +# +# USAGE: dumprops.py [-r REV] repos-path [file] +# +# dump out the properties on a given path (recursively if given a dir) +# + +import sys +import os +import getopt +try: + my_getopt = getopt.gnu_getopt +except AttributeError: + my_getopt = getopt.getopt +import pprint + +from svn import fs, core, repos + + +def dumpprops(path, filename='', rev=None): + path = core.svn_path_canonicalize(path) + repos_ptr = repos.open(path) + fsob = repos.fs(repos_ptr) + + if rev is None: + rev = fs.youngest_rev(fsob) + + root = fs.revision_root(fsob, rev) + print_props(root, filename) + if fs.is_dir(root, filename): + walk_tree(root, filename) + +def print_props(root, path): + raw_props = fs.node_proplist(root, path) + # need to massage some buffers into strings for printing + props = { } + for key, value in raw_props.items(): + props[key] = str(value) + + print('--- %s' % path) + pprint.pprint(props) + +def walk_tree(root, path): + for name in fs.dir_entries(root, path).keys(): + full = path + '/' + name + print_props(root, full) + if fs.is_dir(root, full): + walk_tree(root, full) + +def usage(): + print("USAGE: dumpprops.py [-r REV] repos-path [file]") + sys.exit(1) + +def main(): + opts, args = my_getopt(sys.argv[1:], 'r:') + rev = None + for name, value in opts: + if name == '-r': + rev = int(value) + if len(args) == 2: + dumpprops(args[0], args[1], rev) + elif len(args) == 1: + dumpprops(args[0], "", rev) + else: + usage() + +if __name__ == '__main__': + main() diff --git a/tools/examples/get-location-segments.py b/tools/examples/get-location-segments.py new file mode 100755 index 0000000..c084dae --- /dev/null +++ b/tools/examples/get-location-segments.py @@ -0,0 +1,118 @@ +#!/usr/bin/env python +# +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +# +import sys +import os +from svn import client, ra, core + +def printer(segment, pool): + path = segment.path is not None and segment.path or "(null)" + print("r%d-r%d: %s" % (segment.range_start, segment.range_end, path)) + + +def parse_args(args): + argc = len(sys.argv) + + # parse the target URL and optional peg revision + path_pieces = args[0].split('@') + if len(path_pieces) > 1: + peg_revision = int(path_pieces[-1]) + assert peg_revision >= 0 + url = '@'.join(path_pieces[:-1]) + else: + peg_revision = core.SVN_INVALID_REVNUM + url = path_pieces[0] + url = core.svn_path_canonicalize(url) + + # parse the revision range, if any + if argc > 2: + rev_pieces = args[1].split(':') + num_revs = len(rev_pieces) + assert num_revs < 3 + if num_revs == 2: + start_revision = int(rev_pieces[0]) + end_revision = int(rev_pieces[1]) + else: + start_revision = end_revision = int(rev_pieces[0]) + assert(start_revision >= 0) + assert(end_revision >= 0) + else: + start_revision = peg_revision + end_revision = 0 + + # validate + if start_revision >= 0 \ + and end_revision >= 0 \ + and end_revision > start_revision: + raise Exception("End revision must not be younger than start revision") + if peg_revision >= 0 \ + and start_revision >= 0 \ + and start_revision > peg_revision: + raise Exception("Start revision must not be younger than peg revision") + + return url, peg_revision, start_revision, end_revision + + +def main(): + try: + url, peg_revision, start_revision, end_revision = parse_args(sys.argv[1:]) + except Exception, e: + sys.stderr.write("""Usage: %s URL[@PEG-REV] [START-REV[:END-REV]] + +Trace the history of URL@PEG-REV, printing the location(s) of its +existence between START-REV and END-REV. If START-REV is not +provided, the entire history of URL@PEG-REV back to its origin will be +displayed. If provided, START-REV must not be younger than PEG-REV. +If END-REV is provided, it must not be younger than START-REV. + +(This is a wrapper around Subversion's svn_ra_get_location_segments() API.) + +ERROR: %s +""" % (os.path.basename(sys.argv[0]), str(e))) + sys.exit(1) + + core.svn_config_ensure(None) + ctx = client.ctx_t() + + # Make sure that these are at the start of the list, so passwords from + # gnome-keyring / kwallet are checked before asking for new passwords. + # Note that we don't pass our config here, since we can't seem to access + # ctx.config.config (ctx.config is opaque). + providers = core.svn_auth_get_platform_specific_client_providers(None, None) + providers.extend([ + client.get_simple_provider(), + client.get_username_provider(), + client.get_ssl_server_trust_file_provider(), + client.get_ssl_client_cert_file_provider(), + client.get_ssl_client_cert_pw_file_provider(), + ]) + + ctx.auth_baton = core.svn_auth_open(providers) + ctx.config = core.svn_config_get_config(None) + + ra_callbacks = ra.callbacks_t() + ra_callbacks.auth_baton = ctx.auth_baton + ra_session = ra.open(url, ra_callbacks, None, ctx.config) + ra.get_location_segments(ra_session, "", peg_revision, + start_revision, end_revision, printer) + +if __name__ == "__main__": + main() diff --git a/tools/examples/getfile.py b/tools/examples/getfile.py new file mode 100755 index 0000000..b8db211 --- /dev/null +++ b/tools/examples/getfile.py @@ -0,0 +1,72 @@ +#!/usr/bin/env python +# +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +# +# +# USAGE: getfile.py [-r REV] repos-path file +# +# gets a file from an SVN repository, puts it to sys.stdout +# + +import sys +import os +import getopt +try: + my_getopt = getopt.gnu_getopt +except AttributeError: + my_getopt = getopt.getopt + +from svn import fs, core, repos + +CHUNK_SIZE = 16384 + +def getfile(path, filename, rev=None): + path = core.svn_path_canonicalize(path) + repos_ptr = repos.open(path) + fsob = repos.fs(repos_ptr) + + if rev is None: + rev = fs.youngest_rev(fsob) + print("Using youngest revision %s" % rev) + + root = fs.revision_root(fsob, rev) + file = fs.file_contents(root, filename) + while True: + data = core.svn_stream_read(file, CHUNK_SIZE) + if not data: + break + sys.stdout.write(data) + +def usage(): + print("USAGE: getfile.py [-r REV] repos-path file") + sys.exit(1) + +def main(): + opts, args = my_getopt(sys.argv[1:], 'r:') + if len(args) != 2: + usage() + rev = None + for name, value in opts: + if name == '-r': + rev = int(value) + getfile(args[0], args[1], rev) + +if __name__ == '__main__': + main() diff --git a/tools/examples/getlocks_test.c b/tools/examples/getlocks_test.c new file mode 100644 index 0000000..1051063 --- /dev/null +++ b/tools/examples/getlocks_test.c @@ -0,0 +1,271 @@ +/* + * getlocks_test.c : show all repository locks living below a URL + * + * ==================================================================== + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + * ==================================================================== + * + * To compile on unix against Subversion and APR libraries, try + * something like: + * + * cc getlocks_test.c -o getlocks_test \ + * -I/usr/local/include/subversion-1 -I/usr/local/apache2/include \ + * -L/usr/local/apache2/lib -L/usr/local/lib \ + * -lsvn_client-1 -lsvn_ra-1 -lsvn_subr-1 -lapr-0 -laprutil-0 + * + */ + +#include "svn_client.h" +#include "svn_pools.h" +#include "svn_config.h" +#include "svn_cmdline.h" +#include "svn_time.h" +#include "svn_fs.h" +#include "svn_path.h" + +/* Display a prompt and read a one-line response into the provided buffer, + removing a trailing newline if present. */ +static svn_error_t * +prompt_and_read_line(const char *prompt, + char *buffer, + size_t max) +{ + int len; + printf("%s: ", prompt); + if (fgets(buffer, max, stdin) == NULL) + return svn_error_create(0, NULL, "error reading stdin"); + len = strlen(buffer); + if (len > 0 && buffer[len-1] == '\n') + buffer[len-1] = 0; + return SVN_NO_ERROR; +} + +/* A tiny callback function of type 'svn_auth_simple_prompt_func_t'. For + a much better example, see svn_cl__auth_simple_prompt in the official + svn cmdline client. */ +static svn_error_t * +my_simple_prompt_callback (svn_auth_cred_simple_t **cred, + void *baton, + const char *realm, + const char *username, + svn_boolean_t may_save, + apr_pool_t *pool) +{ + svn_auth_cred_simple_t *ret = apr_pcalloc (pool, sizeof (*ret)); + char answerbuf[100]; + + if (realm) + { + printf ("Authentication realm: %s\n", realm); + } + + if (username) + ret->username = apr_pstrdup (pool, username); + else + { + SVN_ERR (prompt_and_read_line("Username", answerbuf, sizeof(answerbuf))); + ret->username = apr_pstrdup (pool, answerbuf); + } + + SVN_ERR (prompt_and_read_line("Password", answerbuf, sizeof(answerbuf))); + ret->password = apr_pstrdup (pool, answerbuf); + + *cred = ret; + return SVN_NO_ERROR; +} + + +/* A tiny callback function of type 'svn_auth_username_prompt_func_t'. For + a much better example, see svn_cl__auth_username_prompt in the official + svn cmdline client. */ +static svn_error_t * +my_username_prompt_callback (svn_auth_cred_username_t **cred, + void *baton, + const char *realm, + svn_boolean_t may_save, + apr_pool_t *pool) +{ + svn_auth_cred_username_t *ret = apr_pcalloc (pool, sizeof (*ret)); + char answerbuf[100]; + + if (realm) + { + printf ("Authentication realm: %s\n", realm); + } + + SVN_ERR (prompt_and_read_line("Username", answerbuf, sizeof(answerbuf))); + ret->username = apr_pstrdup (pool, answerbuf); + + *cred = ret; + return SVN_NO_ERROR; +} + + +/* A callback function used when the RA layer needs a handle to a + temporary file. This is a reduced version of the callback used in + the official svn cmdline client. */ +static svn_error_t * +open_tmp_file (apr_file_t **fp, + void *callback_baton, + apr_pool_t *pool) +{ + const char *path; + const char *ignored_filename; + + SVN_ERR (svn_io_temp_dir (&path, pool)); + path = svn_path_join (path, "tempfile", pool); + + /* Open a unique file, with delete-on-close set. */ + SVN_ERR (svn_io_open_unique_file2 (fp, &ignored_filename, + path, ".tmp", + svn_io_file_del_on_close, pool)); + + return SVN_NO_ERROR; +} + + + +int +main (int argc, const char **argv) +{ + apr_pool_t *pool; + svn_error_t *err; + apr_hash_t *locks; + apr_hash_index_t *hi; + const char *URL; + svn_ra_session_t *session; + svn_ra_callbacks_t *cbtable; + apr_hash_t *cfg_hash; + svn_auth_baton_t *auth_baton; + + if (argc <= 1) + { + printf ("Usage: %s URL\n", argv[0]); + printf (" Print all locks at or below URL.\n"); + return EXIT_FAILURE; + } + URL = argv[1]; + + /* Initialize the app. Send all error messages to 'stderr'. */ + if (svn_cmdline_init ("ra_test", stderr) != EXIT_SUCCESS) + return EXIT_FAILURE; + + /* Create top-level memory pool. Be sure to read the HACKING file to + understand how to properly use/free subpools. */ + pool = svn_pool_create (NULL); + + /* Initialize the FS library. */ + err = svn_fs_initialize (pool); + if (err) goto hit_error; + + /* Make sure the ~/.subversion run-time config files exist, and load. */ + err = svn_config_ensure (NULL, pool); + if (err) goto hit_error; + + err = svn_config_get_config (&cfg_hash, NULL, pool); + if (err) goto hit_error; + + /* Build an authentication baton. */ + { + /* There are many different kinds of authentication back-end + "providers". See svn_auth.h for a full overview. */ + svn_auth_provider_object_t *provider; + apr_array_header_t *providers + = apr_array_make (pool, 4, sizeof (svn_auth_provider_object_t *)); + + svn_client_get_simple_prompt_provider (&provider, + my_simple_prompt_callback, + NULL, /* baton */ + 2, /* retry limit */ pool); + APR_ARRAY_PUSH (providers, svn_auth_provider_object_t *) = provider; + + svn_client_get_username_prompt_provider (&provider, + my_username_prompt_callback, + NULL, /* baton */ + 2, /* retry limit */ pool); + APR_ARRAY_PUSH (providers, svn_auth_provider_object_t *) = provider; + + /* Register the auth-providers into the context's auth_baton. */ + svn_auth_open (&auth_baton, providers, pool); + } + + /* Create a table of callbacks for the RA session, mostly nonexistent. */ + cbtable = apr_pcalloc (pool, sizeof(*cbtable)); + cbtable->auth_baton = auth_baton; + cbtable->open_tmp_file = open_tmp_file; + + /* Now do the real work. */ + + err = svn_ra_open (&session, URL, cbtable, NULL, cfg_hash, pool); + if (err) goto hit_error; + + err = svn_ra_get_locks (session, &locks, "", pool); + if (err) goto hit_error; + + err = svn_cmdline_printf (pool, "\n"); + if (err) goto hit_error; + + for (hi = apr_hash_first (pool, locks); hi; hi = apr_hash_next (hi)) + { + const void *key; + void *val; + const char *path, *cr_date, *exp_date; + svn_lock_t *lock; + + apr_hash_this (hi, &key, NULL, &val); + path = key; + lock = val; + + cr_date = svn_time_to_human_cstring (lock->creation_date, pool); + + if (lock->expiration_date) + exp_date = svn_time_to_human_cstring (lock->expiration_date, pool); + else + exp_date = "never"; + + err = svn_cmdline_printf (pool, "%s\n", path); + if (err) goto hit_error; + + err = svn_cmdline_printf (pool, + " UUID Token: %s\n", lock->token); + if (err) goto hit_error; + + err = svn_cmdline_printf (pool, + " Owner: %s\n", lock->owner); + if (err) goto hit_error; + + err = svn_cmdline_printf (pool, + " Comment: %s\n", + lock->comment ? lock->comment : "none"); + if (err) goto hit_error; + + err = svn_cmdline_printf (pool, + " Created: %s\n", cr_date); + if (err) goto hit_error; + + err = svn_cmdline_printf (pool, + " Expires: %s\n\n", exp_date); + if (err) goto hit_error; + } + + return EXIT_SUCCESS; + + hit_error: + svn_handle_error2 (err, stderr, FALSE, "getlocks_test: "); + return EXIT_FAILURE; +} diff --git a/tools/examples/geturl.py b/tools/examples/geturl.py new file mode 100755 index 0000000..d50e22e --- /dev/null +++ b/tools/examples/geturl.py @@ -0,0 +1,47 @@ +#!/usr/bin/env python +# +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +# +# +# USAGE: geturl.py FILE_OR_DIR1 FILE_OR_DIR2 ... +# +# prints out the URL associated with each item +# + +import os +import sys + +import svn.wc +import svn.core + +def main(files): + for f in files: + dirpath = fullpath = os.path.abspath(f) + if not os.path.isdir(dirpath): + dirpath = os.path.dirname(dirpath) + adm_baton = svn.wc.adm_open(None, dirpath, 1, 1) + try: + entry = svn.wc.entry(fullpath, adm_baton, 0) + print(entry.url) + finally: + svn.wc.adm_close(adm_baton) + +if __name__ == '__main__': + main(sys.argv[1:]) diff --git a/tools/examples/headrev.c b/tools/examples/headrev.c new file mode 100644 index 0000000..1f71d76 --- /dev/null +++ b/tools/examples/headrev.c @@ -0,0 +1,226 @@ +/* + * headrev.c : print out the HEAD revision of a repository. + * + * ==================================================================== + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + * ==================================================================== + * + * To compile on unix against Subversion and APR libraries, try + * something like: + * + * cc headrev.c -o headrev \ + * -I/usr/local/include/subversion-1 -I/usr/local/apache2/include \ + * -L/usr/local/apache2/lib -L/usr/local/lib \ + * -lsvn_client-1 -lsvn_ra-1 -lsvn_subr-1 -lapr-0 -laprutil-0 + * + */ + +#include "svn_client.h" +#include "svn_pools.h" +#include "svn_config.h" +#include "svn_fs.h" +#include "svn_path.h" +#include "svn_cmdline.h" + + +/* Display a prompt and read a one-line response into the provided buffer, + removing a trailing newline if present. */ +static svn_error_t * +prompt_and_read_line(const char *prompt, + char *buffer, + size_t max) +{ + int len; + printf("%s: ", prompt); + if (fgets(buffer, max, stdin) == NULL) + return svn_error_create(0, NULL, "error reading stdin"); + len = strlen(buffer); + if (len > 0 && buffer[len-1] == '\n') + buffer[len-1] = 0; + return SVN_NO_ERROR; +} + +/* A tiny callback function of type 'svn_auth_simple_prompt_func_t'. For + a much better example, see svn_cl__auth_simple_prompt in the official + svn cmdline client. */ +static svn_error_t * +my_simple_prompt_callback (svn_auth_cred_simple_t **cred, + void *baton, + const char *realm, + const char *username, + svn_boolean_t may_save, + apr_pool_t *pool) +{ + svn_auth_cred_simple_t *ret = apr_pcalloc (pool, sizeof (*ret)); + char answerbuf[100]; + + if (realm) + { + printf ("Authentication realm: %s\n", realm); + } + + if (username) + ret->username = apr_pstrdup (pool, username); + else + { + SVN_ERR (prompt_and_read_line("Username", answerbuf, sizeof(answerbuf))); + ret->username = apr_pstrdup (pool, answerbuf); + } + + SVN_ERR (prompt_and_read_line("Password", answerbuf, sizeof(answerbuf))); + ret->password = apr_pstrdup (pool, answerbuf); + + *cred = ret; + return SVN_NO_ERROR; +} + + +/* A tiny callback function of type 'svn_auth_username_prompt_func_t'. For + a much better example, see svn_cl__auth_username_prompt in the official + svn cmdline client. */ +static svn_error_t * +my_username_prompt_callback (svn_auth_cred_username_t **cred, + void *baton, + const char *realm, + svn_boolean_t may_save, + apr_pool_t *pool) +{ + svn_auth_cred_username_t *ret = apr_pcalloc (pool, sizeof (*ret)); + char answerbuf[100]; + + if (realm) + { + printf ("Authentication realm: %s\n", realm); + } + + SVN_ERR (prompt_and_read_line("Username", answerbuf, sizeof(answerbuf))); + ret->username = apr_pstrdup (pool, answerbuf); + + *cred = ret; + return SVN_NO_ERROR; +} + + +/* A callback function used when the RA layer needs a handle to a + temporary file. This is a reduced version of the callback used in + the official svn cmdline client. */ +static svn_error_t * +open_tmp_file (apr_file_t **fp, + void *callback_baton, + apr_pool_t *pool) +{ + const char *path; + const char *ignored_filename; + + SVN_ERR (svn_io_temp_dir (&path, pool)); + path = svn_path_join (path, "tempfile", pool); + + /* Open a unique file, with delete-on-close set. */ + SVN_ERR (svn_io_open_unique_file2 (fp, &ignored_filename, + path, ".tmp", + svn_io_file_del_on_close, pool)); + + return SVN_NO_ERROR; +} + + +int +main (int argc, const char **argv) +{ + apr_pool_t *pool; + svn_error_t *err; + const char *URL; + svn_ra_session_t *session; + svn_ra_callbacks2_t *cbtable; + svn_revnum_t rev; + apr_hash_t *cfg_hash; + svn_auth_baton_t *auth_baton; + + if (argc <= 1) + { + printf ("Usage: %s URL\n", argv[0]); + printf (" Print HEAD revision of URL's repository.\n"); + return EXIT_FAILURE; + } + else + URL = argv[1]; + + /* Initialize the app. Send all error messages to 'stderr'. */ + if (svn_cmdline_init ("headrev", stderr) != EXIT_SUCCESS) + return EXIT_FAILURE; + + /* Create top-level memory pool. Be sure to read the HACKING file to + understand how to properly use/free subpools. */ + pool = svn_pool_create (NULL); + + /* Initialize the FS library. */ + err = svn_fs_initialize (pool); + if (err) goto hit_error; + + /* Make sure the ~/.subversion run-time config files exist, and load. */ + err = svn_config_ensure (NULL, pool); + if (err) goto hit_error; + + err = svn_config_get_config (&cfg_hash, NULL, pool); + if (err) goto hit_error; + + /* Build an authentication baton. */ + { + /* There are many different kinds of authentication back-end + "providers". See svn_auth.h for a full overview. */ + svn_auth_provider_object_t *provider; + apr_array_header_t *providers + = apr_array_make (pool, 4, sizeof (svn_auth_provider_object_t *)); + + svn_client_get_simple_prompt_provider (&provider, + my_simple_prompt_callback, + NULL, /* baton */ + 2, /* retry limit */ pool); + APR_ARRAY_PUSH (providers, svn_auth_provider_object_t *) = provider; + + svn_client_get_username_prompt_provider (&provider, + my_username_prompt_callback, + NULL, /* baton */ + 2, /* retry limit */ pool); + APR_ARRAY_PUSH (providers, svn_auth_provider_object_t *) = provider; + + /* Register the auth-providers into the context's auth_baton. */ + svn_auth_open (&auth_baton, providers, pool); + } + + /* Create a table of callbacks for the RA session, mostly nonexistent. */ + cbtable = apr_pcalloc (pool, sizeof(*cbtable)); + cbtable->auth_baton = auth_baton; + cbtable->open_tmp_file = open_tmp_file; + + /* Now do the real work. */ + + err = svn_ra_open2(&session, URL, cbtable, NULL, cfg_hash, pool); + if (err) goto hit_error; + + err = svn_ra_get_latest_revnum(session, &rev, pool); + if (err) goto hit_error; + + printf ("The latest revision is %ld.\n", rev); + + return EXIT_SUCCESS; + + hit_error: + svn_handle_error2 (err, stderr, FALSE, "headrev: "); + return EXIT_FAILURE; +} diff --git a/tools/examples/info.rb b/tools/examples/info.rb new file mode 100644 index 0000000..fad6837 --- /dev/null +++ b/tools/examples/info.rb @@ -0,0 +1,78 @@ +#!/usr/bin/env ruby +# +# info.rb : output some info about a subversion url +# +# Example based on a blogpost by Mark Deepwell +# http://www.markdeepwell.com/2010/06/ruby-subversion-bindings/ +# +###################################################################### +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +###################################################################### +# + +require "svn/core" +require "svn/client" +require "svn/wc" +require "svn/repos" + +# Prompt function mimicking svn's own prompt +simple_prompt = Proc.new do + |result, realm, username, default, may_save, pool| + + puts "Authentication realm: #{realm}" + if username != nil + result.username = username + else + print "Username: " + result.username = STDIN.gets.strip + end + print "Password for '#{result.username}': " + result.password = STDIN.gets.strip +end + + +if ARGV.length != 1 + puts "Usage: info.rb URL[@REV]" +else + ctx = Svn::Client::Context.new() + ctx.add_platform_specific_client_providers + ctx.add_simple_provider + ctx.add_simple_prompt_provider(2, simple_prompt) + ctx.add_username_provider + ctx.add_ssl_server_trust_file_provider + ctx.add_ssl_client_cert_file_provider + ctx.add_ssl_client_cert_pw_file_provider + + repos_uri, revision = ARGV[0].split("@", 2) + if revision + revision = Integer(revision) + end + + begin + ctx.info(repos_uri, revision) do |path, info| + puts("Url: #{info.url}") + puts("Last changed rev: #{info.last_changed_rev}") + puts("Last changed author: #{info.last_changed_author}") + puts("Last changed date: #{info.last_changed_date}") + puts("Kind: #{info.kind}") + end + rescue Svn::Error => e + # catch a generic svn error + raise "Failed to retrieve SVN info at revision " + revision.to_s + end +end diff --git a/tools/examples/minimal_client.c b/tools/examples/minimal_client.c new file mode 100644 index 0000000..967ce5b --- /dev/null +++ b/tools/examples/minimal_client.c @@ -0,0 +1,285 @@ +/* + * minimal_client.c - a minimal Subversion client application ("hello world") + * + * ==================================================================== + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + * ==================================================================== + * + * This app demonstrates how to use the svn_client.h API. + * + * It reads a directory URL from the commandline, runs + * svn_client_list() and prints the list of directory-entries. It + * also knows how to deal with basic username/password authentication + * challenges. + * + * For a much more complex example, the svn cmdline client might be + * considered the 'reference implementation'. + * + * From a Linux system, a typical commandline compile might look like: + * + * cc minimal_client.c -o minimal_client \ + * -I/usr/local/include/subversion-1 -I/usr/local/apache2/include \ + * -L/usr/local/apache2/lib -L/usr/local/lib \ + * -lsvn_client-1 -lapr-0 -laprutil-0 + * + */ + + +#include "svn_client.h" +#include "svn_cmdline.h" +#include "svn_pools.h" +#include "svn_config.h" +#include "svn_fs.h" + + +/* Display a prompt and read a one-line response into the provided buffer, + removing a trailing newline if present. */ +static svn_error_t * +prompt_and_read_line(const char *prompt, + char *buffer, + size_t max) +{ + int len; + printf("%s: ", prompt); + if (fgets(buffer, max, stdin) == NULL) + return svn_error_create(0, NULL, "error reading stdin"); + len = strlen(buffer); + if (len > 0 && buffer[len-1] == '\n') + buffer[len-1] = 0; + return SVN_NO_ERROR; +} + +/* A tiny callback function of type 'svn_auth_simple_prompt_func_t'. For + a much better example, see svn_cl__auth_simple_prompt in the official + svn cmdline client. */ +static svn_error_t * +my_simple_prompt_callback (svn_auth_cred_simple_t **cred, + void *baton, + const char *realm, + const char *username, + svn_boolean_t may_save, + apr_pool_t *pool) +{ + svn_auth_cred_simple_t *ret = apr_pcalloc (pool, sizeof (*ret)); + char answerbuf[100]; + + if (realm) + { + printf ("Authentication realm: %s\n", realm); + } + + if (username) + ret->username = apr_pstrdup (pool, username); + else + { + SVN_ERR (prompt_and_read_line("Username", answerbuf, sizeof(answerbuf))); + ret->username = apr_pstrdup (pool, answerbuf); + } + + SVN_ERR (prompt_and_read_line("Password", answerbuf, sizeof(answerbuf))); + ret->password = apr_pstrdup (pool, answerbuf); + + *cred = ret; + return SVN_NO_ERROR; +} + + +/* A tiny callback function of type 'svn_auth_username_prompt_func_t'. For + a much better example, see svn_cl__auth_username_prompt in the official + svn cmdline client. */ +static svn_error_t * +my_username_prompt_callback (svn_auth_cred_username_t **cred, + void *baton, + const char *realm, + svn_boolean_t may_save, + apr_pool_t *pool) +{ + svn_auth_cred_username_t *ret = apr_pcalloc (pool, sizeof (*ret)); + char answerbuf[100]; + + if (realm) + { + printf ("Authentication realm: %s\n", realm); + } + + SVN_ERR (prompt_and_read_line("Username", answerbuf, sizeof(answerbuf))); + ret->username = apr_pstrdup (pool, answerbuf); + + *cred = ret; + return SVN_NO_ERROR; +} + + + +int +main (int argc, const char **argv) +{ + apr_pool_t *pool; + svn_error_t *err; + svn_opt_revision_t revision; + apr_hash_t *dirents; + apr_hash_index_t *hi; + svn_client_ctx_t *ctx; + const char *URL; + + if (argc <= 1) + { + printf ("Usage: %s URL\n", argv[0]); + return EXIT_FAILURE; + } + else + URL = argv[1]; + + /* Initialize the app. Send all error messages to 'stderr'. */ + if (svn_cmdline_init ("minimal_client", stderr) != EXIT_SUCCESS) + return EXIT_FAILURE; + + /* Create top-level memory pool. Be sure to read the HACKING file to + understand how to properly use/free subpools. */ + pool = svn_pool_create (NULL); + + /* Initialize the FS library. */ + err = svn_fs_initialize (pool); + if (err) + { + /* For functions deeper in the stack, we usually use the + SVN_ERR() exception-throwing macro (see svn_error.h). At the + top level, we catch & print the error with svn_handle_error2(). */ + svn_handle_error2 (err, stderr, FALSE, "minimal_client: "); + return EXIT_FAILURE; + } + + /* Make sure the ~/.subversion run-time config files exist */ + err = svn_config_ensure (NULL, pool); + if (err) + { + svn_handle_error2 (err, stderr, FALSE, "minimal_client: "); + return EXIT_FAILURE; + } + + /* All clients need to fill out a client_ctx object. */ + { + /* Initialize and allocate the client_ctx object. */ + if ((err = svn_client_create_context (&ctx, pool))) + { + svn_handle_error2 (err, stderr, FALSE, "minimal_client: "); + return EXIT_FAILURE; + } + + /* Load the run-time config file into a hash */ + if ((err = svn_config_get_config (&(ctx->config), NULL, pool))) + { + svn_handle_error2 (err, stderr, FALSE, "minimal_client: "); + return EXIT_FAILURE; + } + +#ifdef WIN32 + /* Set the working copy administrative directory name. */ + if (getenv ("SVN_ASP_DOT_NET_HACK")) + { + err = svn_wc_set_adm_dir ("_svn", pool); + if (err) + { + svn_handle_error2 (err, stderr, FALSE, "minimal_client: "); + return EXIT_FAILURE; + } + } +#endif + + /* Depending on what your client does, you'll want to read about + (and implement) the various callback function types below. */ + + /* A func (& context) which receives event signals during + checkouts, updates, commits, etc. */ + /* ctx->notify_func = my_notification_func; + ctx->notify_baton = NULL; */ + + /* A func (& context) which can receive log messages */ + /* ctx->log_msg_func = my_log_msg_receiver_func; + ctx->log_msg_baton = NULL; */ + + /* A func (& context) which checks whether the user cancelled */ + /* ctx->cancel_func = my_cancel_checking_func; + ctx->cancel_baton = NULL; */ + + /* Make the client_ctx capable of authenticating users */ + { + /* There are many different kinds of authentication back-end + "providers". See svn_auth.h for a full overview. + + If you want to get the auth behavior of the 'svn' program, + you can use svn_cmdline_setup_auth_baton, which will give + you the exact set of auth providers it uses. This program + doesn't use it because it's only appropriate for a command + line program, and this is supposed to be a general purpose + example. */ + + svn_auth_provider_object_t *provider; + apr_array_header_t *providers + = apr_array_make (pool, 4, sizeof (svn_auth_provider_object_t *)); + + svn_auth_get_simple_prompt_provider (&provider, + my_simple_prompt_callback, + NULL, /* baton */ + 2, /* retry limit */ pool); + APR_ARRAY_PUSH (providers, svn_auth_provider_object_t *) = provider; + + svn_auth_get_username_prompt_provider (&provider, + my_username_prompt_callback, + NULL, /* baton */ + 2, /* retry limit */ pool); + APR_ARRAY_PUSH (providers, svn_auth_provider_object_t *) = provider; + + /* Register the auth-providers into the context's auth_baton. */ + svn_auth_open (&ctx->auth_baton, providers, pool); + } + } /* end of client_ctx setup */ + + + /* Now do the real work. */ + + /* Set revision to always be the HEAD revision. It could, however, + be set to a specific revision number, date, or other values. */ + revision.kind = svn_opt_revision_head; + + /* Main call into libsvn_client does all the work. */ + err = svn_client_ls (&dirents, + URL, &revision, + FALSE, /* no recursion */ + ctx, pool); + if (err) + { + svn_handle_error2 (err, stderr, FALSE, "minimal_client: "); + return EXIT_FAILURE; + } + + /* Print the dir entries in the hash. */ + for (hi = apr_hash_first (pool, dirents); hi; hi = apr_hash_next (hi)) + { + const char *entryname; + svn_dirent_t *val; + + apr_hash_this (hi, (void *) &entryname, NULL, (void *) &val); + printf (" %s\n", entryname); + + /* 'val' is actually an svn_dirent_t structure; a more complex + program would mine it for extra printable information. */ + } + + return EXIT_SUCCESS; +} diff --git a/tools/examples/putfile.py b/tools/examples/putfile.py new file mode 100755 index 0000000..4b028aa --- /dev/null +++ b/tools/examples/putfile.py @@ -0,0 +1,90 @@ +#!/usr/bin/env python +# +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +# +# +# USAGE: putfile.py [-m commitmsg] [-u username] file repos-path +# +# put a file into an SVN repository +# + +import sys +import os +import getopt +try: + my_getopt = getopt.gnu_getopt +except AttributeError: + my_getopt = getopt.getopt + +from svn import fs, core, repos, delta + +def putfile(fname, rpath, uname="", commitmsg=""): + rpath = core.svn_path_canonicalize(rpath) + repos_ptr = repos.open(rpath) + fsob = repos.fs(repos_ptr) + + # open a transaction against HEAD + rev = fs.youngest_rev(fsob) + + txn = repos.fs_begin_txn_for_commit(repos_ptr, rev, uname, commitmsg) + + root = fs.txn_root(txn) + rev_root = fs.revision_root(fsob, rev) + + kind = fs.check_path(root, fname) + if kind == core.svn_node_none: + print("file '%s' does not exist, creating..." % fname) + fs.make_file(root, fname) + elif kind == core.svn_node_dir: + print("File '%s' is a dir." % fname) + return + else: + print("Updating file '%s'" % fname) + + handler, baton = fs.apply_textdelta(root, fname, None, None) + + ### it would be nice to get an svn_stream_t. for now, just load in the + ### whole file and shove it into the FS. + delta.svn_txdelta_send_string(open(fname, 'rb').read(), + handler, baton) + + newrev = repos.fs_commit_txn(repos_ptr, txn) + print("revision: %s" % newrev) + +def usage(): + print("USAGE: putfile.py [-m commitmsg] [-u username] file repos-path") + sys.exit(1) + +def main(): + opts, args = my_getopt(sys.argv[1:], 'm:u:') + if len(args) != 2: + usage() + + uname = commitmsg = "" + + for name, value in opts: + if name == '-u': + uname = value + if name == '-m': + commitmsg = value + putfile(args[0], args[1], uname, commitmsg) + +if __name__ == '__main__': + main() diff --git a/tools/examples/revplist.py b/tools/examples/revplist.py new file mode 100755 index 0000000..5e22928 --- /dev/null +++ b/tools/examples/revplist.py @@ -0,0 +1,78 @@ +#!/usr/bin/env python +# +# revplist.py : display revision properties +# +###################################################################### +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +###################################################################### +# + +import sys +import os +import getopt +try: + my_getopt = getopt.gnu_getopt +except AttributeError: + my_getopt = getopt.getopt + +from svn import fs, core + +def plist(rev=None, home='.', *props): + + db_path = os.path.join(home, 'db') + if not os.path.exists(db_path): + db_path = home + + fs_ptr = fs.new(None) + fs.open_berkeley(fs_ptr, db_path) + + if rev is None: + rev = fs.youngest_rev(fs_ptr) + + print('Properties for revision: %s' % rev) + if props: + for propname in props: + value = fs.revision_prop(fs_ptr, rev, propname) + if value is None: + print('%s: <not present>' % propname) + else: + print('%s: %s' % (propname, value)) + else: + proplist = fs.revision_proplist(fs_ptr, rev) + for propname, value in proplist.items(): + print('%s: %s' % (propname, value)) + +def usage(): + print("USAGE: %s [-r REV] [-h DBHOME] [PROP1 [PROP2 ...]]" % sys.argv[0]) + sys.exit(1) + +def main(): + ### how to invoke usage() ? + opts, args = my_getopt(sys.argv[1:], 'r:h:') + rev = None + home = '.' + for name, value in opts: + if name == '-r': + rev = int(value) + elif name == '-h': + home = value + + plist(rev, home, *args) + +if __name__ == '__main__': + main() diff --git a/tools/examples/svnlog2html.rb b/tools/examples/svnlog2html.rb new file mode 100755 index 0000000..a7571cc --- /dev/null +++ b/tools/examples/svnlog2html.rb @@ -0,0 +1,139 @@ +#!/usr/bin/env ruby + +# +###################################################################### +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +###################################################################### +# + +require "erb" +require "svn/client" + +include ERB::Util + +path = File.expand_path(ARGV.shift || Dir.pwd) + +html = <<-HEADER +<?xml version="1.0" encoding="utf-8"?> +<!DOCTYPE html + PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" + "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd"> +<html xmlns="http://www.w3.org/1999/xhtml"> +<head> + <style type="text/css"> +div.entry +{ + border: 1px solid red; + border-width: 1px 0 0 1px; + margin: 2em 2em 2em 3em; + padding: 0 2em; +} + +pre.message +{ + border-left: 1px solid red; + margin: 1em 2em; + padding-left: 1em; +} + +div.info +{ + text-align: right; +} + +span.info +{ + border-bottom: 1px solid red; + padding: 0 5px 1px 1em; +} + +span.author +{ + font-style: italic; +} + +span.date +{ + color: #999; +} + +li.action-A +{ + color: blue; +} + +li.action-M +{ + color: green; +} + +li.action-D +{ + color: red; + text-decoration: line-through; +} + </style> + <title>#{h path}</title> +</head> +<body> +<h1>#{h path}</h1> +HEADER + +ctx = Svn::Client::Context.new +ctx.log(path, "HEAD", 0, 40, true, true) do + |changed_paths, rev, author, date, message| + + html << <<-ENTRY_HEADER + +<div class="entry"> + <h2>r#{h rev}</h2> + <pre class="message">#{h message}</pre> + <div class="info"> + <span class="info"> + by <span class="author">#{h author}</span> + at <span class="date">#{date}</span> + </span> + </div> + <div class="changed-path"> +ENTRY_HEADER + + changed_paths.sort.each do |path, changed_path| + action = changed_path.action + html << <<-ENTRY_PATH + <ul> + <li class="action-#{h action}"> + <span class="action">#{h action}</span>: + <span class="changed-path">#{h path}</span> + </li> + </ul> +ENTRY_PATH + end + + html << <<-ENTRY_FOOTER + </div> +</div> + +ENTRY_FOOTER +end + +html << <<-FOOTER +</body> +</html> +FOOTER + +puts html diff --git a/tools/examples/svnlook.py b/tools/examples/svnlook.py new file mode 100755 index 0000000..72043e6 --- /dev/null +++ b/tools/examples/svnlook.py @@ -0,0 +1,441 @@ +#!/usr/bin/env python +# +# svnlook.py : a Python-based replacement for svnlook +# +###################################################################### +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +###################################################################### +# + +import sys +import time +import os + +from svn import core, fs, delta, repos + +class SVNLook: + def __init__(self, path, cmd, rev, txn): + path = core.svn_path_canonicalize(path) + repos_ptr = repos.open(path) + self.fs_ptr = repos.fs(repos_ptr) + + if txn: + self.txn_ptr = fs.open_txn(self.fs_ptr, txn) + else: + self.txn_ptr = None + if rev is None: + rev = fs.youngest_rev(self.fs_ptr) + self.rev = rev + + getattr(self, 'cmd_' + cmd)() + + def cmd_default(self): + self.cmd_info() + self.cmd_tree() + + def cmd_author(self): + # get the author property, or empty string if the property is not present + author = self._get_property(core.SVN_PROP_REVISION_AUTHOR) or '' + print(author) + + def cmd_changed(self): + self._print_tree(ChangedEditor, pass_root=1) + + def cmd_date(self): + if self.txn_ptr: + print("") + else: + date = self._get_property(core.SVN_PROP_REVISION_DATE) + if date: + aprtime = core.svn_time_from_cstring(date) + # ### convert to a time_t; this requires intimate knowledge of + # ### the apr_time_t type + secs = aprtime / 1000000 # aprtime is microseconds; make seconds + + # assume secs in local TZ, convert to tuple, and format + ### we don't really know the TZ, do we? + print(time.strftime('%Y-%m-%d %H:%M', time.localtime(secs))) + else: + print("") + + def cmd_diff(self): + self._print_tree(DiffEditor, pass_root=1) + + def cmd_dirs_changed(self): + self._print_tree(DirsChangedEditor) + + def cmd_ids(self): + self._print_tree(Editor, base_rev=0, pass_root=1) + + def cmd_info(self): + self.cmd_author() + self.cmd_date() + self.cmd_log(1) + + def cmd_log(self, print_size=0): + # get the log property, or empty string if the property is not present + log = self._get_property(core.SVN_PROP_REVISION_LOG) or '' + if print_size: + print(len(log)) + print(log) + + def cmd_tree(self): + self._print_tree(Editor, base_rev=0) + + def _get_property(self, name): + if self.txn_ptr: + return fs.txn_prop(self.txn_ptr, name) + return fs.revision_prop(self.fs_ptr, self.rev, name) + + def _print_tree(self, e_factory, base_rev=None, pass_root=0): + if base_rev is None: + # a specific base rev was not provided. use the transaction base, + # or the previous revision + if self.txn_ptr: + base_rev = fs.txn_base_revision(self.txn_ptr) + else: + base_rev = self.rev - 1 + + # get the current root + if self.txn_ptr: + root = fs.txn_root(self.txn_ptr) + else: + root = fs.revision_root(self.fs_ptr, self.rev) + + # the base of the comparison + base_root = fs.revision_root(self.fs_ptr, base_rev) + + if pass_root: + editor = e_factory(root, base_root) + else: + editor = e_factory() + + # construct the editor for printing these things out + e_ptr, e_baton = delta.make_editor(editor) + + # compute the delta, printing as we go + def authz_cb(root, path, pool): + return 1 + repos.dir_delta(base_root, '', '', root, '', + e_ptr, e_baton, authz_cb, 0, 1, 0, 0) + + +class Editor(delta.Editor): + def __init__(self, root=None, base_root=None): + self.root = root + # base_root ignored + + self.indent = '' + + def open_root(self, base_revision, dir_pool): + print('/' + self._get_id('/')) + self.indent = self.indent + ' ' # indent one space + + def add_directory(self, path, *args): + id = self._get_id(path) + print(self.indent + _basename(path) + '/' + id) + self.indent = self.indent + ' ' # indent one space + + # we cheat. one method implementation for two entry points. + open_directory = add_directory + + def close_directory(self, baton): + # note: if indents are being performed, this slice just returns + # another empty string. + self.indent = self.indent[:-1] + + def add_file(self, path, *args): + id = self._get_id(path) + print(self.indent + _basename(path) + id) + + # we cheat. one method implementation for two entry points. + open_file = add_file + + def _get_id(self, path): + if self.root: + id = fs.node_id(self.root, path) + return ' <%s>' % fs.unparse_id(id) + return '' + +class DirsChangedEditor(delta.Editor): + def open_root(self, base_revision, dir_pool): + return [ 1, '' ] + + def delete_entry(self, path, revision, parent_baton, pool): + self._dir_changed(parent_baton) + + def add_directory(self, path, parent_baton, + copyfrom_path, copyfrom_revision, dir_pool): + self._dir_changed(parent_baton) + return [ 1, path ] + + def open_directory(self, path, parent_baton, base_revision, dir_pool): + return [ 1, path ] + + def change_dir_prop(self, dir_baton, name, value, pool): + self._dir_changed(dir_baton) + + def add_file(self, path, parent_baton, + copyfrom_path, copyfrom_revision, file_pool): + self._dir_changed(parent_baton) + + def open_file(self, path, parent_baton, base_revision, file_pool): + # some kind of change is going to happen + self._dir_changed(parent_baton) + + def _dir_changed(self, baton): + if baton[0]: + # the directory hasn't been printed yet. do it. + print(baton[1] + '/') + baton[0] = 0 + +class ChangedEditor(delta.Editor): + def __init__(self, root, base_root): + self.root = root + self.base_root = base_root + + def open_root(self, base_revision, dir_pool): + return [ 1, '' ] + + def delete_entry(self, path, revision, parent_baton, pool): + ### need more logic to detect 'replace' + if fs.is_dir(self.base_root, '/' + path): + print('D ' + path + '/') + else: + print('D ' + path) + + def add_directory(self, path, parent_baton, + copyfrom_path, copyfrom_revision, dir_pool): + print('A ' + path + '/') + return [ 0, path ] + + def open_directory(self, path, parent_baton, base_revision, dir_pool): + return [ 1, path ] + + def change_dir_prop(self, dir_baton, name, value, pool): + if dir_baton[0]: + # the directory hasn't been printed yet. do it. + print('_U ' + dir_baton[1] + '/') + dir_baton[0] = 0 + + def add_file(self, path, parent_baton, + copyfrom_path, copyfrom_revision, file_pool): + print('A ' + path) + return [ '_', ' ', None ] + + def open_file(self, path, parent_baton, base_revision, file_pool): + return [ '_', ' ', path ] + + def apply_textdelta(self, file_baton, base_checksum): + file_baton[0] = 'U' + + # no handler + return None + + def change_file_prop(self, file_baton, name, value, pool): + file_baton[1] = 'U' + + def close_file(self, file_baton, text_checksum): + text_mod, prop_mod, path = file_baton + # test the path. it will be None if we added this file. + if path: + status = text_mod + prop_mod + # was there some kind of change? + if status != '_ ': + print(status + ' ' + path) + + +class DiffEditor(delta.Editor): + def __init__(self, root, base_root): + self.root = root + self.base_root = base_root + self.target_revision = 0 + + def _do_diff(self, base_path, path): + if base_path is None: + print("Added: " + path) + label = path + elif path is None: + print("Removed: " + base_path) + label = base_path + else: + print("Modified: " + path) + label = path + print("===============================================================" + \ + "===============") + args = [] + args.append("-L") + args.append(label + "\t(original)") + args.append("-L") + args.append(label + "\t(new)") + args.append("-u") + differ = fs.FileDiff(self.base_root, base_path, self.root, + path, diffoptions=args) + pobj = differ.get_pipe() + while True: + line = pobj.readline() + if not line: + break + sys.stdout.write("%s " % line) + print("") + + def _do_prop_diff(self, path, prop_name, prop_val, pool): + print("Property changes on: " + path) + print("_______________________________________________________________" + \ + "_______________") + + old_prop_val = None + + try: + old_prop_val = fs.node_prop(self.base_root, path, prop_name, pool) + except core.SubversionException: + pass # Must be a new path + + if old_prop_val: + if prop_val: + print("Modified: " + prop_name) + print(" - " + str(old_prop_val)) + print(" + " + str(prop_val)) + else: + print("Deleted: " + prop_name) + print(" - " + str(old_prop_val)) + else: + print("Added: " + prop_name) + print(" + " + str(prop_val)) + + print("") + + def delete_entry(self, path, revision, parent_baton, pool): + ### need more logic to detect 'replace' + if not fs.is_dir(self.base_root, '/' + path): + self._do_diff(path, None) + + def add_directory(self, path, parent_baton, copyfrom_path, + copyfrom_revision, dir_pool): + return [ 1, path ] + + def add_file(self, path, parent_baton, + copyfrom_path, copyfrom_revision, file_pool): + self._do_diff(None, path) + return [ '_', ' ', None ] + + def open_root(self, base_revision, dir_pool): + return [ 1, '' ] + + def open_directory(self, path, parent_baton, base_revision, dir_pool): + return [ 1, path ] + + def open_file(self, path, parent_baton, base_revision, file_pool): + return [ '_', ' ', path ] + + def apply_textdelta(self, file_baton, base_checksum): + if file_baton[2] is not None: + self._do_diff(file_baton[2], file_baton[2]) + return None + + def change_file_prop(self, file_baton, name, value, pool): + if file_baton[2] is not None: + self._do_prop_diff(file_baton[2], name, value, pool) + return None + + def change_dir_prop(self, dir_baton, name, value, pool): + if dir_baton[1] is not None: + self._do_prop_diff(dir_baton[1], name, value, pool) + return None + + def set_target_revision(self, target_revision): + self.target_revision = target_revision + +def _basename(path): + "Return the basename for a '/'-separated path." + idx = path.rfind('/') + if idx == -1: + return path + return path[idx+1:] + + +def usage(exit): + if exit: + output = sys.stderr + else: + output = sys.stdout + + output.write( + "usage: %s REPOS_PATH rev REV [COMMAND] - inspect revision REV\n" + " %s REPOS_PATH txn TXN [COMMAND] - inspect transaction TXN\n" + " %s REPOS_PATH [COMMAND] - inspect the youngest revision\n" + "\n" + "REV is a revision number > 0.\n" + "TXN is a transaction name.\n" + "\n" + "If no command is given, the default output (which is the same as\n" + "running the subcommands `info' then `tree') will be printed.\n" + "\n" + "COMMAND can be one of: \n" + "\n" + " author: print author.\n" + " changed: print full change summary: all dirs & files changed.\n" + " date: print the timestamp (revisions only).\n" + " diff: print GNU-style diffs of changed files and props.\n" + " dirs-changed: print changed directories.\n" + " ids: print the tree, with nodes ids.\n" + " info: print the author, data, log_size, and log message.\n" + " log: print log message.\n" + " tree: print the tree.\n" + "\n" + % (sys.argv[0], sys.argv[0], sys.argv[0])) + + sys.exit(exit) + +def main(): + if len(sys.argv) < 2: + usage(1) + + rev = txn = None + + args = sys.argv[2:] + if args: + cmd = args[0] + if cmd == 'rev': + if len(args) == 1: + usage(1) + try: + rev = int(args[1]) + except ValueError: + usage(1) + del args[:2] + elif cmd == 'txn': + if len(args) == 1: + usage(1) + txn = args[1] + del args[:2] + + if args: + if len(args) > 1: + usage(1) + cmd = args[0].replace('-', '_') + else: + cmd = 'default' + + if not hasattr(SVNLook, 'cmd_' + cmd): + usage(1) + + SVNLook(sys.argv[1], cmd, rev, txn) + +if __name__ == '__main__': + main() diff --git a/tools/examples/svnlook.rb b/tools/examples/svnlook.rb new file mode 100755 index 0000000..a48dcca --- /dev/null +++ b/tools/examples/svnlook.rb @@ -0,0 +1,516 @@ +#!/usr/bin/env ruby +# +# svnlook.rb : a Ruby-based replacement for svnlook +# +###################################################################### +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +###################################################################### +# + +require "svn/core" +require "svn/fs" +require "svn/delta" +require "svn/repos" + +# Chomp off trailing slashes +def basename(path) + path.chomp("/") +end + +# SvnLook: a Ruby-based replacement for svnlook +class SvnLook + + # Initialize the SvnLook application + def initialize(path, rev, txn) + # Open a repository + @fs = Svn::Repos.open(basename(path)).fs + + # If a transaction was specified, open it + if txn + @txn = @fs.open_txn(txn) + else + # Use the latest revision from the repo, + # if they haven't specified a revision + @txn = nil + rev ||= @fs.youngest_rev + end + + @rev = rev + end + + # Dispatch all commands to appropriate subroutines + def run(cmd, *args) + dispatch(cmd, *args) + end + + private + + # Dispatch all commands to appropriate subroutines + def dispatch(cmd, *args) + if respond_to?("cmd_#{cmd}", true) + begin + __send__("cmd_#{cmd}", *args) + rescue ArgumentError + puts $!.message + puts $@ + puts("invalid argument for #{cmd}: #{args.join(' ')}") + end + else + puts("unknown command: #{cmd}") + end + end + + # Default command: Run the 'info' and 'tree' commands + def cmd_default + cmd_info + cmd_tree + end + + # Print the 'author' of the specified revision or transaction + def cmd_author + puts(property(Svn::Core::PROP_REVISION_AUTHOR) || "") + end + + # Not implemented yet + def cmd_cat + end + + # Find out what has changed in the specified revision or transaction + def cmd_changed + print_tree(ChangedEditor, nil, true) + end + + # Output the date that the current revision was committed. + def cmd_date + if @txn + # It's not committed yet, so output nothing + puts + else + # Get the time the revision was committed + date = property(Svn::Core::PROP_REVISION_DATE) + + if date + # Print out the date in a nice format + puts date.strftime('%Y-%m-%d %H:%M(%Z)') + else + # The specified revision doesn't have an associated date. + # Output just a blank line. + puts + end + end + end + + # Output what changed in the specified revision / transaction + def cmd_diff + print_tree(DiffEditor, nil, true) + end + + # Output what directories changed in the specified revision / transaction + def cmd_dirs_changed + print_tree(DirsChangedEditor) + end + + # Output the tree, with node ids + def cmd_ids + print_tree(Editor, 0, true) + end + + # Output the author, date, and the log associated with the specified + # revision / transaction + def cmd_info + cmd_author + cmd_date + cmd_log(true) + end + + # Output the log message associated with the specified revision / transaction + def cmd_log(print_size=false) + log = property(Svn::Core::PROP_REVISION_LOG) || '' + puts log.length if print_size + puts log + end + + # Output the tree associated with the provided tree + def cmd_tree + print_tree(Editor, 0) + end + + # Output the repository's UUID. + def cmd_uuid + puts @fs.uuid + end + + # Output the repository's youngest revision. + def cmd_youngest + puts @fs.youngest_rev + end + + # Return a property of the specified revision or transaction. + # Name: the ID of the property you want to retrieve. + # E.g. Svn::Core::PROP_REVISION_LOG + def property(name) + if @txn + @txn.prop(name) + else + @fs.prop(name, @rev) + end + end + + # Print a tree of differences between two revisions + def print_tree(editor_class, base_rev=nil, pass_root=false) + if base_rev.nil? + if @txn + # Output changes since the base revision of the transaction + base_rev = @txn.base_revision + else + # Output changes since the previous revision + base_rev = @rev - 1 + end + end + + # Get the root of the specified transaction or revision + if @txn + root = @txn.root + else + root = @fs.root(@rev) + end + + # Get the root of the base revision + base_root = @fs.root(base_rev) + + # Does the provided editor need to know + # the revision and base revision we're working with? + if pass_root + # Create a new editor with the provided root and base_root + editor = editor_class.new(root, base_root) + else + # Create a new editor with nil root and base_roots + editor = editor_class.new + end + + # Do a directory delta between the two roots with + # the specified editor + base_root.dir_delta('', '', root, '', editor) + end + + # Output the current tree for a specified revision + class Editor < Svn::Delta::BaseEditor + + # Initialize the Editor object + def initialize(root=nil, base_root=nil) + @root = root + # base_root ignored + + @indent = "" + end + + # Recurse through the root (and increase the indent level) + def open_root(base_revision) + puts "/#{id('/')}" + @indent << ' ' + end + + # If a directory is added, output this and increase + # the indent level + def add_directory(path, *args) + puts "#{@indent}#{basename(path)}/#{id(path)}" + @indent << ' ' + end + + alias open_directory add_directory + + # If a directory is closed, reduce the ident level + def close_directory(baton) + @indent.chop! + end + + # If a file is added, output that it has been changed + def add_file(path, *args) + puts "#{@indent}#{basename(path)}#{id(path)}" + end + + alias open_file add_file + + # Private methods + private + + # Get the node id of a particular path + def id(path) + if @root + fs_id = @root.node_id(path) + " <#{fs_id.unparse}>" + else + "" + end + end + end + + + # Output directories that have been changed. + # In this class, methods such as open_root and add_file + # are inherited from Svn::Delta::ChangedDirsEditor. + class DirsChangedEditor < Svn::Delta::ChangedDirsEditor + + # Private functions + private + + # Print out the name of a directory if it has been changed. + # But only do so once. + # This behaves in a way like a callback function does. + def dir_changed(baton) + if baton[0] + # The directory hasn't been printed yet, + # so print it out. + puts baton[1] + '/' + + # Make sure we don't print this directory out twice + baton[0] = nil + end + end + end + + # Output files that have been changed between two roots + class ChangedEditor < Svn::Delta::BaseEditor + + # Constructor + def initialize(root, base_root) + @root = root + @base_root = base_root + end + + # Look at the root node + def open_root(base_revision) + # Nothing has been printed out yet, so return 'true'. + [true, ''] + end + + # Output deleted files + def delete_entry(path, revision, parent_baton) + # Output deleted paths with a D in front of them + print "D #{path}" + + # If we're deleting a directory, + # indicate this with a trailing slash + if @base_root.dir?('/' + path) + puts "/" + else + puts + end + end + + # Output that a directory has been added + def add_directory(path, parent_baton, + copyfrom_path, copyfrom_revision) + # Output 'A' to indicate that the directory was added. + # Also put a trailing slash since it's a directory. + puts "A #{path}/" + + # The directory has been printed -- don't print it again. + [false, path] + end + + # Recurse inside directories + def open_directory(path, parent_baton, base_revision) + # Nothing has been printed out yet, so return true. + [true, path] + end + + def change_dir_prop(dir_baton, name, value) + # Has the directory been printed yet? + if dir_baton[0] + # Print the directory + puts "_U #{dir_baton[1]}/" + + # Don't let this directory get printed again. + dir_baton[0] = false + end + end + + def add_file(path, parent_baton, + copyfrom_path, copyfrom_revision) + # Output that a directory has been added + puts "A #{path}" + + # We've already printed out this entry, so return '_' + # to prevent it from being printed again + ['_', ' ', nil] + end + + + def open_file(path, parent_baton, base_revision) + # Changes have been made -- return '_' to indicate as such + ['_', ' ', path] + end + + def apply_textdelta(file_baton, base_checksum) + # The file has been changed -- we'll print that out later. + file_baton[0] = 'U' + nil + end + + def change_file_prop(file_baton, name, value) + # The file has been changed -- we'll print that out later. + file_baton[1] = 'U' + end + + def close_file(file_baton, text_checksum) + text_mod, prop_mod, path = file_baton + # Test the path. It will be nil if we added this file. + if path + status = text_mod + prop_mod + # Was there some kind of change? + if status != '_ ' + puts "#{status} #{path}" + end + end + end + end + + # Output diffs of files that have been changed + class DiffEditor < Svn::Delta::BaseEditor + + # Constructor + def initialize(root, base_root) + @root = root + @base_root = base_root + end + + # Handle deleted files and directories + def delete_entry(path, revision, parent_baton) + # Print out diffs of deleted files, but not + # deleted directories + unless @base_root.dir?('/' + path) + do_diff(path, nil) + end + end + + # Handle added files + def add_file(path, parent_baton, + copyfrom_path, copyfrom_revision) + # If a file has been added, print out the diff. + do_diff(nil, path) + + ['_', ' ', nil] + end + + # Handle files + def open_file(path, parent_baton, base_revision) + ['_', ' ', path] + end + + # If a file is changed, print out the diff + def apply_textdelta(file_baton, base_checksum) + if file_baton[2].nil? + nil + else + do_diff(file_baton[2], file_baton[2]) + end + end + + private + + # Print out a diff between two paths + def do_diff(base_path, path) + if base_path.nil? + # If there's no base path, then the file + # must have been added + puts("Added: #{path}") + name = path + elsif path.nil? + # If there's no new path, then the file + # must have been deleted + puts("Removed: #{base_path}") + name = base_path + else + # Otherwise, the file must have been modified + puts "Modified: #{path}" + name = path + end + + # Set up labels for the two files + base_label = "#{name} (original)" + label = "#{name} (new)" + + # Output a unified diff between the two files + puts "=" * 78 + differ = Svn::Fs::FileDiff.new(@base_root, base_path, @root, path) + puts differ.unified(base_label, label) + puts + end + end +end + +# Output usage message and exit +def usage + messages = [ + "usage: #{$0} REPOS_PATH rev REV [COMMAND] - inspect revision REV", + " #{$0} REPOS_PATH txn TXN [COMMAND] - inspect transaction TXN", + " #{$0} REPOS_PATH [COMMAND] - inspect the youngest revision", + "", + "REV is a revision number > 0.", + "TXN is a transaction name.", + "", + "If no command is given, the default output (which is the same as", + "running the subcommands `info' then `tree') will be printed.", + "", + "COMMAND can be one of: ", + "", + " author: print author.", + " changed: print full change summary: all dirs & files changed.", + " date: print the timestamp (revisions only).", + " diff: print GNU-style diffs of changed files and props.", + " dirs-changed: print changed directories.", + " ids: print the tree, with nodes ids.", + " info: print the author, data, log_size, and log message.", + " log: print log message.", + " tree: print the tree.", + " uuid: print the repository's UUID (REV and TXN ignored).", + " youngest: print the youngest revision number (REV and TXN ignored).", + ] + puts(messages.join("\n")) + exit(1) +end + +# Output usage if necessary +if ARGV.empty? + usage +end + +# Process arguments +path = ARGV.shift +cmd = ARGV.shift +rev = nil +txn = nil + +case cmd +when "rev" + rev = Integer(ARGV.shift) + cmd = ARGV.shift +when "txn" + txn = ARGV.shift + cmd = ARGV.shift +end + +# If no command is specified, use the default +cmd ||= "default" + +# Replace dashes in the command with underscores +cmd = cmd.gsub(/-/, '_') + +# Start SvnLook with the specified command +SvnLook.new(path, rev, txn).run(cmd) diff --git a/tools/examples/svnput.c b/tools/examples/svnput.c new file mode 100644 index 0000000..6414fef --- /dev/null +++ b/tools/examples/svnput.c @@ -0,0 +1,352 @@ +/* + * svnput.c : upload a single file to a repository, overwriting + * any existing file by the same name. + * + * *************************************************************** + + * WARNING!! Despite the warnings it gives, this program allows + * you to potentially overwrite a file you've never seen. + * USE AT YOUR OWN RISK! + * + * (While the repository won't 'lose' overwritten data, the + * overwriting may happen without your knowledge, and has the + * potential to cause much grief with your collaborators!) + * + * *************************************************************** + * + * ==================================================================== + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + * ==================================================================== + * + * To compile on unix against Subversion and APR libraries, try + * something like: + * + * cc svnput.c -o svnput \ + * -I/usr/local/include/subversion-1 -I/usr/local/apache2/include \ + * -L/usr/local/apache2/lib -L/usr/local/lib \ + * -lsvn_client-1 -lapr-0 -laprutil-0 + * + */ + +#include "svn_client.h" +#include "svn_pools.h" +#include "svn_config.h" +#include "svn_fs.h" +#include "svn_cmdline.h" +#include "svn_path.h" +#include "svn_time.h" + + +/* Display a prompt and read a one-line response into the provided buffer, + removing a trailing newline if present. */ +static svn_error_t * +prompt_and_read_line(const char *prompt, + char *buffer, + size_t max) +{ + int len; + printf("%s: ", prompt); + if (fgets(buffer, max, stdin) == NULL) + return svn_error_create(0, NULL, "error reading stdin"); + len = strlen(buffer); + if (len > 0 && buffer[len-1] == '\n') + buffer[len-1] = 0; + return SVN_NO_ERROR; +} + +/* A tiny callback function of type 'svn_auth_simple_prompt_func_t'. For + a much better example, see svn_cl__auth_simple_prompt in the official + svn cmdline client. */ +static svn_error_t * +my_simple_prompt_callback (svn_auth_cred_simple_t **cred, + void *baton, + const char *realm, + const char *username, + svn_boolean_t may_save, + apr_pool_t *pool) +{ + svn_auth_cred_simple_t *ret = apr_pcalloc (pool, sizeof (*ret)); + char answerbuf[100]; + + if (realm) + { + printf ("Authentication realm: %s\n", realm); + } + + if (username) + ret->username = apr_pstrdup (pool, username); + else + { + SVN_ERR (prompt_and_read_line("Username", answerbuf, sizeof(answerbuf))); + ret->username = apr_pstrdup (pool, answerbuf); + } + + SVN_ERR (prompt_and_read_line("Password", answerbuf, sizeof(answerbuf))); + ret->password = apr_pstrdup (pool, answerbuf); + + *cred = ret; + return SVN_NO_ERROR; +} + + +/* A tiny callback function of type 'svn_auth_username_prompt_func_t'. For + a much better example, see svn_cl__auth_username_prompt in the official + svn cmdline client. */ +static svn_error_t * +my_username_prompt_callback (svn_auth_cred_username_t **cred, + void *baton, + const char *realm, + svn_boolean_t may_save, + apr_pool_t *pool) +{ + svn_auth_cred_username_t *ret = apr_pcalloc (pool, sizeof (*ret)); + char answerbuf[100]; + + if (realm) + { + printf ("Authentication realm: %s\n", realm); + } + + SVN_ERR (prompt_and_read_line("Username", answerbuf, sizeof(answerbuf))); + ret->username = apr_pstrdup (pool, answerbuf); + + *cred = ret; + return SVN_NO_ERROR; +} + +/* A callback function used when the RA layer needs a handle to a + temporary file. This is a reduced version of the callback used in + the official svn cmdline client. */ +static svn_error_t * +open_tmp_file (apr_file_t **fp, + void *callback_baton, + apr_pool_t *pool) +{ + const char *path; + const char *ignored_filename; + + SVN_ERR (svn_io_temp_dir (&path, pool)); + path = svn_path_join (path, "tempfile", pool); + + /* Open a unique file, with delete-on-close set. */ + SVN_ERR (svn_io_open_unique_file2 (fp, &ignored_filename, + path, ".tmp", + svn_io_file_del_on_close, pool)); + + return SVN_NO_ERROR; +} + + +/* Called when a commit is successful. */ +static svn_error_t * +my_commit_callback (svn_revnum_t new_revision, + const char *date, + const char *author, + void *baton) +{ + printf ("Upload complete. Committed revision %ld.\n", new_revision); + return SVN_NO_ERROR; +} + + + +int +main (int argc, const char **argv) +{ + apr_pool_t *pool; + svn_error_t *err; + apr_hash_t *dirents; + const char *upload_file, *URL; + const char *parent_URL, *basename; + svn_ra_plugin_t *ra_lib; + void *session, *ra_baton; + svn_revnum_t rev; + const svn_delta_editor_t *editor; + void *edit_baton; + svn_dirent_t *dirent; + svn_ra_callbacks_t *cbtable; + apr_hash_t *cfg_hash; + svn_auth_baton_t *auth_baton; + + if (argc <= 2) + { + printf ("Usage: %s PATH URL\n", argv[0]); + printf (" Uploads file at PATH to Subversion repository URL.\n"); + return EXIT_FAILURE; + } + upload_file = argv[1]; + URL = argv[2]; + + /* Initialize the app. Send all error messages to 'stderr'. */ + if (svn_cmdline_init ("minimal_client", stderr) != EXIT_SUCCESS) + return EXIT_FAILURE; + + /* Create top-level memory pool. Be sure to read the HACKING file to + understand how to properly use/free subpools. */ + pool = svn_pool_create (NULL); + + /* Initialize the FS library. */ + err = svn_fs_initialize (pool); + if (err) goto hit_error; + + /* Make sure the ~/.subversion run-time config files exist, and load. */ + err = svn_config_ensure (NULL, pool); + if (err) goto hit_error; + + err = svn_config_get_config (&cfg_hash, NULL, pool); + if (err) goto hit_error; + + /* Build an authentication baton. */ + { + /* There are many different kinds of authentication back-end + "providers". See svn_auth.h for a full overview. */ + svn_auth_provider_object_t *provider; + apr_array_header_t *providers + = apr_array_make (pool, 4, sizeof (svn_auth_provider_object_t *)); + + svn_client_get_simple_prompt_provider (&provider, + my_simple_prompt_callback, + NULL, /* baton */ + 2, /* retry limit */ pool); + APR_ARRAY_PUSH (providers, svn_auth_provider_object_t *) = provider; + + svn_client_get_username_prompt_provider (&provider, + my_username_prompt_callback, + NULL, /* baton */ + 2, /* retry limit */ pool); + APR_ARRAY_PUSH (providers, svn_auth_provider_object_t *) = provider; + + /* Register the auth-providers into the context's auth_baton. */ + svn_auth_open (&auth_baton, providers, pool); + } + + /* Create a table of callbacks for the RA session, mostly nonexistent. */ + cbtable = apr_pcalloc (pool, sizeof(*cbtable)); + cbtable->auth_baton = auth_baton; + cbtable->open_tmp_file = open_tmp_file; + + /* Now do the real work. */ + + /* Open an RA session to the parent URL, fetch current HEAD rev and + "lock" onto that revnum for the remainder of the session. */ + svn_path_split (URL, &parent_URL, &basename, pool); + + err = svn_ra_init_ra_libs (&ra_baton, pool); + if (err) goto hit_error; + + err = svn_ra_get_ra_library (&ra_lib, ra_baton, parent_URL, pool); + if (err) goto hit_error; + + err = ra_lib->open (&session, parent_URL, cbtable, NULL, cfg_hash, pool); + if (err) goto hit_error; + + err = ra_lib->get_latest_revnum (session, &rev, pool); + if (err) goto hit_error; + + /* Examine contents of parent dir in the rev. */ + err = ra_lib->get_dir (session, "", rev, &dirents, NULL, NULL, pool); + if (err) goto hit_error; + + /* Sanity checks. Don't let the user shoot himself *too* much. */ + dirent = apr_hash_get (dirents, basename, APR_HASH_KEY_STRING); + if (dirent && dirent->kind == svn_node_dir) + { + printf ("Sorry, a directory already exists at that URL.\n"); + return EXIT_FAILURE; + } + if (dirent && dirent->kind == svn_node_file) + { + char answer[5]; + + printf ("\n*** WARNING ***\n\n"); + printf ("You're about to overwrite r%ld of this file.\n", rev); + printf ("It was last changed by user '%s',\n", + dirent->last_author ? dirent->last_author : "?"); + printf ("on %s.\n", svn_time_to_human_cstring (dirent->time, pool)); + printf ("\nSomebody *might* have just changed the file seconds ago,\n" + "and your upload would be overwriting their changes!\n\n"); + + err = prompt_and_read_line("Are you SURE you want to upload? [y/n]", + answer, sizeof(answer)); + if (err) goto hit_error; + + if (apr_strnatcasecmp (answer, "y")) + { + printf ("Operation aborted.\n"); + return EXIT_SUCCESS; + } + } + + /* Fetch a commit editor (it's anchored on the parent URL, because + the session is too.) */ + /* ### someday add an option for a user-written commit message? */ + err = ra_lib->get_commit_editor (session, &editor, &edit_baton, + "File upload from 'svnput' program.", + my_commit_callback, NULL, pool); + if (err) goto hit_error; + + /* Drive the editor */ + { + void *root_baton, *file_baton, *handler_baton; + svn_txdelta_window_handler_t handler; + svn_stream_t *contents; + apr_file_t *f = NULL; + + err = editor->open_root (edit_baton, rev, pool, &root_baton); + if (err) goto hit_error; + + if (! dirent) + { + err = editor->add_file (basename, root_baton, NULL, SVN_INVALID_REVNUM, + pool, &file_baton); + } + else + { + err = editor->open_file (basename, root_baton, rev, pool, + &file_baton); + } + if (err) goto hit_error; + + err = editor->apply_textdelta (file_baton, NULL, pool, + &handler, &handler_baton); + if (err) goto hit_error; + + err = svn_io_file_open (&f, upload_file, APR_READ, APR_OS_DEFAULT, pool); + if (err) goto hit_error; + + contents = svn_stream_from_aprfile (f, pool); + err = svn_txdelta_send_stream (contents, handler, handler_baton, + NULL, pool); + if (err) goto hit_error; + + err = svn_io_file_close (f, pool); + if (err) goto hit_error; + + err = editor->close_file (file_baton, NULL, pool); + if (err) goto hit_error; + + err = editor->close_edit (edit_baton, pool); + if (err) goto hit_error; + } + + return EXIT_SUCCESS; + + hit_error: + svn_handle_error2 (err, stderr, FALSE, "svnput: "); + return EXIT_FAILURE; +} diff --git a/tools/examples/svnserve-sgid.c b/tools/examples/svnserve-sgid.c new file mode 100644 index 0000000..29c7272 --- /dev/null +++ b/tools/examples/svnserve-sgid.c @@ -0,0 +1,60 @@ +/* + * + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + * + */ + +/* + * Wrapper to run the svnserve process setgid. + * The idea is to avoid the problem that some interpreters like bash + * invoked by svnserve in hook scripts will reset the effective gid to + * the real gid, nuking the effect of an ordinary setgid svnserve binary. + * Sadly, to set the real gid portably, you need to be root, if only + * for a moment. + * Also smashes the environment to something known, so that games + * can't be played to try to break the security of the hook scripts, + * by setting IFS, PATH, and similar means. + */ +/* + * Written by Perry Metzger, and placed into the public domain. + */ + +#include <stdio.h> +#include <unistd.h> + +#define REAL_PATH "/usr/bin/svnserve.real" + +char *newenv[] = { "PATH=/bin:/usr/bin", "SHELL=/bin/sh", NULL }; + +int +main(int argc, char **argv) +{ + if (setgid(getegid()) == -1) { + perror("setgid(getegid())"); + return 1; + } + + if (seteuid(getuid()) == -1) { + perror("seteuid(getuid())"); + return 1; + } + + execve(REAL_PATH, argv, newenv); + perror("attempting to exec " REAL_PATH " failed"); + return 1; +} diff --git a/tools/examples/svnshell.py b/tools/examples/svnshell.py new file mode 100755 index 0000000..9c67af4 --- /dev/null +++ b/tools/examples/svnshell.py @@ -0,0 +1,367 @@ +#!/usr/bin/env python +# +# svnshell.py : a Python-based shell interface for cruising 'round in +# the filesystem. +# +###################################################################### +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +###################################################################### +# + +import sys +import time +import re +from cmd import Cmd +from random import randint +from svn import fs, core, repos + + +class SVNShell(Cmd): + def __init__(self, path): + """initialize an SVNShell object""" + Cmd.__init__(self) + path = core.svn_path_canonicalize(path) + self.fs_ptr = repos.fs(repos.open(path)) + self.is_rev = 1 + self.rev = fs.youngest_rev(self.fs_ptr) + self.txn = None + self.root = fs.revision_root(self.fs_ptr, self.rev) + self.path = "/" + self._setup_prompt() + self.cmdloop() + + def precmd(self, line): + if line == "EOF": + # Ctrl-D is a command without a newline. Print a newline, so the next + # shell prompt is not on the same line as the last svnshell prompt. + print("") + return "exit" + return line + + def postcmd(self, stop, line): + self._setup_prompt() + + _errors = ["Huh?", + "Whatchoo talkin' 'bout, Willis?", + "Say what?", + "Nope. Not gonna do it.", + "Ehh...I don't think so, chief."] + + def default(self, line): + print(self._errors[randint(0, len(self._errors) - 1)]) + + def do_cat(self, arg): + """dump the contents of a file""" + if not len(arg): + print("You must supply a file path.") + return + catpath = self._parse_path(arg) + kind = fs.check_path(self.root, catpath) + if kind == core.svn_node_none: + print("Path '%s' does not exist." % catpath) + return + if kind == core.svn_node_dir: + print("Path '%s' is not a file." % catpath) + return + ### be nice to get some paging in here. + stream = fs.file_contents(self.root, catpath) + while True: + data = core.svn_stream_read(stream, core.SVN_STREAM_CHUNK_SIZE) + sys.stdout.write(data) + if len(data) < core.SVN_STREAM_CHUNK_SIZE: + break + + def do_cd(self, arg): + """change directory""" + newpath = self._parse_path(arg) + + # make sure that path actually exists in the filesystem as a directory + kind = fs.check_path(self.root, newpath) + if kind != core.svn_node_dir: + print("Path '%s' is not a valid filesystem directory." % newpath) + return + self.path = newpath + + def do_ls(self, arg): + """list the contents of the current directory or provided path""" + parent = self.path + if not len(arg): + # no arg -- show a listing for the current directory. + entries = fs.dir_entries(self.root, self.path) + else: + # arg? show a listing of that path. + newpath = self._parse_path(arg) + kind = fs.check_path(self.root, newpath) + if kind == core.svn_node_dir: + parent = newpath + entries = fs.dir_entries(self.root, parent) + elif kind == core.svn_node_file: + parts = self._path_to_parts(newpath) + name = parts.pop(-1) + parent = self._parts_to_path(parts) + print(parent + ':' + name) + tmpentries = fs.dir_entries(self.root, parent) + if not tmpentries.get(name, None): + return + entries = {} + entries[name] = tmpentries[name] + else: + print("Path '%s' not found." % newpath) + return + + keys = sorted(entries.keys()) + + print(" REV AUTHOR NODE-REV-ID SIZE DATE NAME") + print("----------------------------------------------------------------------------") + + for entry in keys: + fullpath = parent + '/' + entry + size = '' + is_dir = fs.is_dir(self.root, fullpath) + if is_dir: + name = entry + '/' + else: + size = str(fs.file_length(self.root, fullpath)) + name = entry + node_id = fs.unparse_id(entries[entry].id) + created_rev = fs.node_created_rev(self.root, fullpath) + author = fs.revision_prop(self.fs_ptr, created_rev, + core.SVN_PROP_REVISION_AUTHOR) + if not author: + author = "" + date = fs.revision_prop(self.fs_ptr, created_rev, + core.SVN_PROP_REVISION_DATE) + if not date: + date = "" + else: + date = self._format_date(date) + + print("%6s %8s %12s %8s %12s %s" % (created_rev, author[:8], + node_id, size, date, name)) + + def do_lstxns(self, arg): + """list the transactions available for browsing""" + txns = sorted(fs.list_transactions(self.fs_ptr)) + counter = 0 + for txn in txns: + counter = counter + 1 + sys.stdout.write("%8s " % txn) + if counter == 6: + print("") + counter = 0 + print("") + + def do_pcat(self, arg): + """list the properties of a path""" + catpath = self.path + if len(arg): + catpath = self._parse_path(arg) + kind = fs.check_path(self.root, catpath) + if kind == core.svn_node_none: + print("Path '%s' does not exist." % catpath) + return + plist = fs.node_proplist(self.root, catpath) + if not plist: + return + for pkey, pval in plist.items(): + print('K ' + str(len(pkey))) + print(pkey) + print('P ' + str(len(pval))) + print(pval) + print('PROPS-END') + + def do_setrev(self, arg): + """set the current revision to view""" + try: + if arg.lower() == 'head': + rev = fs.youngest_rev(self.fs_ptr) + else: + rev = int(arg) + newroot = fs.revision_root(self.fs_ptr, rev) + except: + print("Error setting the revision to '" + arg + "'.") + return + fs.close_root(self.root) + self.root = newroot + self.rev = rev + self.is_rev = 1 + self._do_path_landing() + + def do_settxn(self, arg): + """set the current transaction to view""" + try: + txnobj = fs.open_txn(self.fs_ptr, arg) + newroot = fs.txn_root(txnobj) + except: + print("Error setting the transaction to '" + arg + "'.") + return + fs.close_root(self.root) + self.root = newroot + self.txn = arg + self.is_rev = 0 + self._do_path_landing() + + def do_youngest(self, arg): + """list the youngest revision available for browsing""" + rev = fs.youngest_rev(self.fs_ptr) + print(rev) + + def do_exit(self, arg): + sys.exit(0) + + def _path_to_parts(self, path): + return [_f for _f in path.split('/') if _f] + + def _parts_to_path(self, parts): + return '/' + '/'.join(parts) + + def _parse_path(self, path): + # cleanup leading, trailing, and duplicate '/' characters + newpath = self._parts_to_path(self._path_to_parts(path)) + + # if PATH is absolute, use it, else append it to the existing path. + if path.startswith('/') or self.path == '/': + newpath = '/' + newpath + else: + newpath = self.path + '/' + newpath + + # cleanup '.' and '..' + parts = self._path_to_parts(newpath) + finalparts = [] + for part in parts: + if part == '.': + pass + elif part == '..': + if len(finalparts) != 0: + finalparts.pop(-1) + else: + finalparts.append(part) + + # finally, return the calculated path + return self._parts_to_path(finalparts) + + def _format_date(self, date): + date = core.svn_time_from_cstring(date) + date = time.asctime(time.localtime(date / 1000000)) + return date[4:-8] + + def _do_path_landing(self): + """try to land on self.path as a directory in root, failing up to '/'""" + not_found = 1 + newpath = self.path + while not_found: + kind = fs.check_path(self.root, newpath) + if kind == core.svn_node_dir: + not_found = 0 + else: + parts = self._path_to_parts(newpath) + parts.pop(-1) + newpath = self._parts_to_path(parts) + self.path = newpath + + def _setup_prompt(self): + """present the prompt and handle the user's input""" + if self.is_rev: + self.prompt = "<rev: " + str(self.rev) + else: + self.prompt = "<txn: " + self.txn + self.prompt += " " + self.path + ">$ " + + def _complete(self, text, line, begidx, endidx, limit_node_kind=None): + """Generic tab completer. Takes the 4 standard parameters passed to a + cmd.Cmd completer function, plus LIMIT_NODE_KIND, which should be a + svn.core.svn_node_foo constant to restrict the returned completions to, or + None for no limit. Catches and displays exceptions, because otherwise + they are silently ignored - which is quite frustrating when debugging!""" + try: + args = line.split() + if len(args) > 1: + arg = args[1] + else: + arg = "" + dirs = arg.split('/') + user_elem = dirs[-1] + user_dir = "/".join(dirs[:-1] + ['']) + + canon_dir = self._parse_path(user_dir) + + entries = fs.dir_entries(self.root, canon_dir) + acceptable_completions = [] + for name, dirent_t in entries.items(): + if not name.startswith(user_elem): + continue + if limit_node_kind and dirent_t.kind != limit_node_kind: + continue + if dirent_t.kind == core.svn_node_dir: + name += '/' + acceptable_completions.append(name) + if limit_node_kind == core.svn_node_dir or not limit_node_kind: + if user_elem in ('.', '..'): + for extraname in ('.', '..'): + if extraname.startswith(user_elem): + acceptable_completions.append(extraname + '/') + return acceptable_completions + except: + ei = sys.exc_info() + sys.stderr.write("EXCEPTION WHILST COMPLETING\n") + import traceback + traceback.print_tb(ei[2]) + sys.stderr.write("%s: %s\n" % (ei[0], ei[1])) + raise + + def complete_cd(self, text, line, begidx, endidx): + return self._complete(text, line, begidx, endidx, core.svn_node_dir) + + def complete_cat(self, text, line, begidx, endidx): + return self._complete(text, line, begidx, endidx, core.svn_node_file) + + def complete_ls(self, text, line, begidx, endidx): + return self._complete(text, line, begidx, endidx) + + def complete_pcat(self, text, line, begidx, endidx): + return self._complete(text, line, begidx, endidx) + + +def _basename(path): + "Return the basename for a '/'-separated path." + idx = path.rfind('/') + if idx == -1: + return path + return path[idx+1:] + + +def usage(exit): + if exit: + output = sys.stderr + else: + output = sys.stdout + output.write( + "usage: %s REPOS_PATH\n" + "\n" + "Once the program has started, type 'help' at the prompt for hints on\n" + "using the shell.\n" % sys.argv[0]) + sys.exit(exit) + +def main(): + if len(sys.argv) != 2: + usage(1) + + SVNShell(sys.argv[1]) + +if __name__ == '__main__': + main() diff --git a/tools/examples/svnshell.rb b/tools/examples/svnshell.rb new file mode 100755 index 0000000..3b43853 --- /dev/null +++ b/tools/examples/svnshell.rb @@ -0,0 +1,456 @@ +#!/usr/bin/env ruby +# +# svnshell.rb : a Ruby-based shell interface for cruising 'round in +# the filesystem. +# +# Usage: ruby svnshell.rb REPOS_PATH, where REPOS_PATH is a path to +# a repository on your local filesystem. +# +# NOTE: This program requires the Ruby readline extension. +# See http://wiki.rubyonrails.com/rails/show/ReadlineLibrary +# for details on how to install readline for Ruby. +# +###################################################################### +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +###################################################################### +# + +require "readline" +require "shellwords" + +require "svn/fs" +require "svn/core" +require "svn/repos" + +# SvnShell: a Ruby-based shell interface for cruising 'round in +# the filesystem. +class SvnShell + + # A list of potential commands. This list is populated by + # the 'method_added' function (see below). + WORDS = [] + + # Check for methods that start with "do_" + # and list them as potential commands + class << self + def method_added(name) + if /^do_(.*)$/ =~ name.to_s + WORDS << $1 + end + end + end + + # Constructor for SvnShell + # + # path: The path to a Subversion repository + def initialize(path) + @repos_path = path + @path = "/" + self.rev = youngest_rev + @exited = false + end + + # Run the shell + def run + + # While the user hasn't typed 'exit' and there is still input to be read + while !@exited and buf = Readline.readline(prompt, true) + + # Parse the command line into a single command and arguments + cmd, *args = Shellwords.shellwords(buf) + + # Skip empty lines + next if /\A\s*\z/ =~ cmd.to_s + + # Open a new connection to the repo + @fs = Svn::Repos.open(@repos_path).fs + setup_root + + # Execute the specified command + dispatch(cmd, *args) + + # Find a path that exists in the current revision + @path = find_available_path + + # Close the connection to the repo + @root.close + + end + end + + # Private functions + private + + # Get the current prompt string + def prompt + + # Gather data for the prompt string + if rev_mode? + mode = "rev" + info = @rev + else + mode = "txn" + info = @txn + end + + # Return the prompt string + "<#{mode}: #{info} #{@path}>$ " + end + + # Dispatch a command to the appropriate do_* subroutine + def dispatch(cmd, *args) + + # Dispatch cmd to the appropriate do_* function + if respond_to?("do_#{cmd}", true) + begin + __send__("do_#{cmd}", *args) + rescue ArgumentError + # puts $!.message + # puts $@ + puts("Invalid argument for #{cmd}: #{args.join(' ')}") + end + else + puts("Unknown command: #{cmd}") + puts("Try one of these commands: ", WORDS.sort.join(" ")) + end + end + + # Output the contents of a file from the repository + def do_cat(path) + + # Normalize the path to an absolute path + normalized_path = normalize_path(path) + + # Check what type of node exists at the specified path + case @root.check_path(normalized_path) + when Svn::Core::NODE_NONE + puts "Path '#{normalized_path}' does not exist." + when Svn::Core::NODE_DIR + puts "Path '#{normalized_path}' is not a file." + else + # Output the file to standard out + @root.file_contents(normalized_path) do |stream| + puts stream.read(@root.file_length(normalized_path)) + end + end + end + + # Set the current directory + def do_cd(path="/") + + # Normalize the path to an absolute path + normalized_path = normalize_path(path) + + # If it's a valid directory, then set the directory + if @root.check_path(normalized_path) == Svn::Core::NODE_DIR + @path = normalized_path + else + puts "Path '#{normalized_path}' is not a valid filesystem directory." + end + end + + # List the contents of the current directory or provided paths + def do_ls(*paths) + + # Default to listing the contents of the current directory + paths << @path if paths.empty? + + # Foreach path + paths.each do |path| + + # Normalize the path to an absolute path + normalized_path = normalize_path(path) + + # Is it a directory or file? + case @root.check_path(normalized_path) + when Svn::Core::NODE_DIR + + # Output the contents of the directory + parent = normalized_path + entries = @root.dir_entries(parent) + + when Svn::Core::NODE_FILE + + # Split the path into directory and filename components + parts = path_to_parts(normalized_path) + name = parts.pop + parent = parts_to_path(parts) + + # Output the filename + puts "#{parent}:#{name}" + + # Double check that the file exists + # inside the parent directory + parent_entries = @root.dir_entries(parent) + if parent_entries[name].nil? + # Hmm. We found the file, but it doesn't exist inside + # the parent directory. That's a bit unusual. + puts "No directory entry found for '#{normalized_path}'" + next + else + # Save the path so it can be output in detail + entries = {name => parent_entries[name]} + end + else + # Path is not a directory or a file, + # so it must not exist + puts "Path '#{normalized_path}' not found." + next + end + + # Output a detailed listing of the files we found + puts " REV AUTHOR NODE-REV-ID SIZE DATE NAME" + puts "-" * 76 + + # For each entry we found... + entries.keys.sort.each do |entry| + + # Calculate the full path to the directory entry + fullpath = parent + '/' + entry + if @root.dir?(fullpath) + # If it's a directory, output an extra slash + size = '' + name = entry + '/' + else + # If it's a file, output the size of the file + size = @root.file_length(fullpath).to_i.to_s + name = entry + end + + # Output the entry + node_id = entries[entry].id.to_s + created_rev = @root.node_created_rev(fullpath) + author = @fs.prop(Svn::Core::PROP_REVISION_AUTHOR, created_rev).to_s + date = @fs.prop(Svn::Core::PROP_REVISION_DATE, created_rev) + args = [ + created_rev, author[0,8], + node_id, size, date.strftime("%b %d %H:%M(%Z)"), name + ] + puts "%6s %8s <%10s> %8s %17s %s" % args + + end + end + end + + # List all currently open transactions available for browsing + def do_lstxns + + # Get a sorted list of open transactions + txns = @fs.transactions + txns.sort + counter = 0 + + # Output the open transactions + txns.each do |txn| + counter = counter + 1 + puts "%8s " % txn + + # Every six transactions, output an extra newline + if counter == 6 + puts + counter = 0 + end + end + puts + end + + # Output the properties of a particular path + def do_pcat(path=nil) + + # Default to the current directory + catpath = path ? normalize_path(path) : @path + + # Make sure that the specified path exists + if @root.check_path(catpath) == Svn::Core::NODE_NONE + puts "Path '#{catpath}' does not exist." + return + end + + # Get the list of properties + plist = @root.node_proplist(catpath) + return if plist.nil? + + # Output each property + plist.each do |key, value| + puts "K #{key.size}" + puts key + puts "P #{value.size}" + puts value + end + + # That's all folks! + puts 'PROPS-END' + + end + + # Set the current revision to view + def do_setrev(rev) + + # Make sure the specified revision exists + begin + @fs.root(Integer(rev)).close + rescue Svn::Error + puts "Error setting the revision to '#{rev}': #{$!.message}" + return + end + + # Set the revision + self.rev = Integer(rev) + + end + + # Open an existing transaction to view + def do_settxn(name) + + # Make sure the specified transaction exists + begin + txn = @fs.open_txn(name) + txn.root.close + rescue Svn::Error + puts "Error setting the transaction to '#{name}': #{$!.message}" + return + end + + # Set the transaction + self.txn = name + + end + + # List the youngest revision available for browsing + def do_youngest + rev = @fs.youngest_rev + puts rev + end + + # Exit this program + def do_exit + @exited = true + end + + # Find the youngest revision + def youngest_rev + Svn::Repos.open(@repos_path).fs.youngest_rev + end + + # Set the current revision + def rev=(new_value) + @rev = new_value + @txn = nil + reset_root + end + + # Set the current transaction + def txn=(new_value) + @txn = new_value + reset_root + end + + # Check whether we are in 'revision-mode' + def rev_mode? + @txn.nil? + end + + # Close the current root and setup a new one + def reset_root + if @root + @root.close + setup_root + end + end + + # Setup a new root + def setup_root + if rev_mode? + @root = @fs.root(@rev) + else + @root = @fs.open_txn(name).root + end + end + + # Convert a path into its component parts + def path_to_parts(path) + path.split(/\/+/) + end + + # Join the component parts of a path into a string + def parts_to_path(parts) + normalized_parts = parts.reject{|part| part.empty?} + "/#{normalized_parts.join('/')}" + end + + # Convert a path to a normalized, absolute path + def normalize_path(path) + + # Convert the path to an absolute path + if path[0,1] != "/" and @path != "/" + path = "#{@path}/#{path}" + end + + # Split the path into its component parts + parts = path_to_parts(path) + + # Build a list of the normalized parts of the path + normalized_parts = [] + parts.each do |part| + case part + when "." + # ignore + when ".." + normalized_parts.pop + else + normalized_parts << part + end + end + + # Join the normalized parts together into a string + parts_to_path(normalized_parts) + + end + + # Find the parent directory of a specified path + def parent_dir(path) + normalize_path("#{path}/..") + end + + # Try to land on the specified path as a directory. + # If the specified path does not exist, look for + # an ancestor path that does exist. + def find_available_path(path=@path) + if @root.check_path(path) == Svn::Core::NODE_DIR + path + else + find_available_path(parent_dir(path)) + end + end + +end + + +# Autocomplete commands +Readline.completion_proc = Proc.new do |word| + SvnShell::WORDS.grep(/^#{Regexp.quote(word)}/) +end + +# Output usage information if necessary +if ARGV.size != 1 + puts "Usage: #{$0} REPOS_PATH" + exit(1) +end + +# Create a new SvnShell with the command-line arguments and run it +SvnShell.new(ARGV.shift).run diff --git a/tools/examples/testwrite.c b/tools/examples/testwrite.c new file mode 100644 index 0000000..beb2fba --- /dev/null +++ b/tools/examples/testwrite.c @@ -0,0 +1,276 @@ +/* + * testwrite.c : test whether a user has commit access. + * + * ==================================================================== + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + * ==================================================================== + * + * To compile on unix against Subversion and APR libraries, try + * something like: + * + * cc testwrite.c -o testwrite \ + * -I/usr/local/include/subversion-1 -I/usr/local/apache2/include \ + * -L/usr/local/apache2/lib -L/usr/local/lib \ + * -lsvn_client-1 -lsvn_ra-1 -lsvn_subr-1 -lsvn-fs-1 -lapr-0 -laprutil-0 + * + */ + +#include "svn_client.h" +#include "svn_pools.h" +#include "svn_config.h" +#include "svn_fs.h" +#include "svn_cmdline.h" +#include "svn_path.h" +#include "svn_time.h" + + +/* Display a prompt and read a one-line response into the provided buffer, + removing a trailing newline if present. */ +static svn_error_t * +prompt_and_read_line(const char *prompt, + char *buffer, + size_t max) +{ + int len; + printf("%s: ", prompt); + if (fgets(buffer, max, stdin) == NULL) + return svn_error_create(0, NULL, "error reading stdin"); + len = strlen(buffer); + if (len > 0 && buffer[len-1] == '\n') + buffer[len-1] = 0; + return SVN_NO_ERROR; +} + +/* A tiny callback function of type 'svn_auth_simple_prompt_func_t'. For + a much better example, see svn_cl__auth_simple_prompt in the official + svn cmdline client. */ +static svn_error_t * +my_simple_prompt_callback (svn_auth_cred_simple_t **cred, + void *baton, + const char *realm, + const char *username, + svn_boolean_t may_save, + apr_pool_t *pool) +{ + svn_auth_cred_simple_t *ret = apr_pcalloc (pool, sizeof (*ret)); + char answerbuf[100]; + + if (realm) + { + printf ("Authentication realm: %s\n", realm); + } + + if (username) + ret->username = apr_pstrdup (pool, username); + else + { + SVN_ERR (prompt_and_read_line("Username", answerbuf, sizeof(answerbuf))); + ret->username = apr_pstrdup (pool, answerbuf); + } + + SVN_ERR (prompt_and_read_line("Password", answerbuf, sizeof(answerbuf))); + ret->password = apr_pstrdup (pool, answerbuf); + + *cred = ret; + return SVN_NO_ERROR; +} + + +/* A tiny callback function of type 'svn_auth_username_prompt_func_t'. For + a much better example, see svn_cl__auth_username_prompt in the official + svn cmdline client. */ +static svn_error_t * +my_username_prompt_callback (svn_auth_cred_username_t **cred, + void *baton, + const char *realm, + svn_boolean_t may_save, + apr_pool_t *pool) +{ + svn_auth_cred_username_t *ret = apr_pcalloc (pool, sizeof (*ret)); + char answerbuf[100]; + + if (realm) + { + printf ("Authentication realm: %s\n", realm); + } + + SVN_ERR (prompt_and_read_line("Username", answerbuf, sizeof(answerbuf))); + ret->username = apr_pstrdup (pool, answerbuf); + + *cred = ret; + return SVN_NO_ERROR; +} + +/* A callback function used when the RA layer needs a handle to a + temporary file. This is a reduced version of the callback used in + the official svn cmdline client. */ +static svn_error_t * +open_tmp_file (apr_file_t **fp, + void *callback_baton, + apr_pool_t *pool) +{ + const char *path; + const char *ignored_filename; + + SVN_ERR (svn_io_temp_dir (&path, pool)); + path = svn_path_join (path, "tempfile", pool); + + /* Open a unique file, with delete-on-close set. */ + SVN_ERR (svn_io_open_unique_file2 (fp, &ignored_filename, + path, ".tmp", + svn_io_file_del_on_close, pool)); + + return SVN_NO_ERROR; +} + + +/* Called when a commit is successful. */ +static svn_error_t * +my_commit_callback (svn_revnum_t new_revision, + const char *date, + const char *author, + void *baton) +{ + printf ("Upload complete. Committed revision %ld.\n", new_revision); + return SVN_NO_ERROR; +} + + + +int +main (int argc, const char **argv) +{ + apr_pool_t *pool; + svn_error_t *err; + apr_hash_t *dirents; + const char *upload_file, *URL; + const char *parent_URL, *basename; + svn_ra_plugin_t *ra_lib; + void *session, *ra_baton; + svn_revnum_t rev; + const svn_delta_editor_t *editor; + void *edit_baton; + svn_dirent_t *dirent; + svn_ra_callbacks_t *cbtable; + apr_hash_t *cfg_hash; + svn_auth_baton_t *auth_baton; + + if (argc <= 1) + { + printf ("Usage: %s URL\n", argv[0]); + printf (" Tries to create an svn commit-transaction at URL.\n"); + return EXIT_FAILURE; + } + URL = argv[1]; + + /* Initialize the app. Send all error messages to 'stderr'. */ + if (svn_cmdline_init ("minimal_client", stderr) != EXIT_SUCCESS) + return EXIT_FAILURE; + + /* Create top-level memory pool. Be sure to read the HACKING file to + understand how to properly use/free subpools. */ + pool = svn_pool_create (NULL); + + /* Initialize the FS library. */ + err = svn_fs_initialize (pool); + if (err) goto hit_error; + + /* Make sure the ~/.subversion run-time config files exist, and load. */ + err = svn_config_ensure (NULL, pool); + if (err) goto hit_error; + + err = svn_config_get_config (&cfg_hash, NULL, pool); + if (err) goto hit_error; + + /* Build an authentication baton. */ + { + /* There are many different kinds of authentication back-end + "providers". See svn_auth.h for a full overview. */ + svn_auth_provider_object_t *provider; + apr_array_header_t *providers + = apr_array_make (pool, 4, sizeof (svn_auth_provider_object_t *)); + + svn_client_get_simple_prompt_provider (&provider, + my_simple_prompt_callback, + NULL, /* baton */ + 2, /* retry limit */ pool); + APR_ARRAY_PUSH (providers, svn_auth_provider_object_t *) = provider; + + svn_client_get_username_prompt_provider (&provider, + my_username_prompt_callback, + NULL, /* baton */ + 2, /* retry limit */ pool); + APR_ARRAY_PUSH (providers, svn_auth_provider_object_t *) = provider; + + /* Register the auth-providers into the context's auth_baton. */ + svn_auth_open (&auth_baton, providers, pool); + } + + /* Create a table of callbacks for the RA session, mostly nonexistent. */ + cbtable = apr_pcalloc (pool, sizeof(*cbtable)); + cbtable->auth_baton = auth_baton; + cbtable->open_tmp_file = open_tmp_file; + + /* Now do the real work. */ + + /* Open an RA session to the parent URL, fetch current HEAD rev and + "lock" onto that revnum for the remainder of the session. */ + svn_path_split (URL, &parent_URL, &basename, pool); + + err = svn_ra_init_ra_libs (&ra_baton, pool); + if (err) goto hit_error; + + err = svn_ra_get_ra_library (&ra_lib, ra_baton, parent_URL, pool); + if (err) goto hit_error; + + err = ra_lib->open (&session, parent_URL, cbtable, NULL, cfg_hash, pool); + if (err) goto hit_error; + + /* Fetch a commit editor (it's anchored on the parent URL, because + the session is too.) */ + /* ### someday add an option for a user-written commit message? */ + err = ra_lib->get_commit_editor (session, &editor, &edit_baton, + "File upload from 'svnput' program.", + my_commit_callback, NULL, pool); + if (err) goto hit_error; + + /* Drive the editor */ + { + void *root_baton, *file_baton, *handler_baton; + svn_txdelta_window_handler_t handler; + svn_stream_t *contents; + apr_file_t *f = NULL; + + err = editor->open_root (edit_baton, rev, pool, &root_baton); + if (err) goto hit_error; + + err = editor->abort_edit (edit_baton, pool); + if (err) goto hit_error; + } + + printf ("No problems creating commit transaction.\n"); + return EXIT_SUCCESS; + + hit_error: + { + printf("Could not open a commit transaction.\n"); + svn_handle_error2 (err, stderr, FALSE, "testwrite: "); + return EXIT_FAILURE; + } + +} diff --git a/tools/hook-scripts/commit-access-control.cfg.example b/tools/hook-scripts/commit-access-control.cfg.example new file mode 100644 index 0000000..674c190 --- /dev/null +++ b/tools/hook-scripts/commit-access-control.cfg.example @@ -0,0 +1,74 @@ +# This is a sample configuration file for commit-access-control.pl. +# +# $Id: commit-access-control.cfg.example 845362 2003-03-12 05:56:21Z kfogel $ +# +# This file uses the Windows ini style, where the file consists of a +# number of sections, each section starts with a unique section name +# in square brackets. Parameters in each section are specified as +# Name = Value. Any spaces around the equal sign will be ignored. If +# there are multiple sections with exactly the same section name, then +# the parameters in those sections will be added together to produce +# one section with cumulative parameters. +# +# The commit-access-control.pl script reads these sections in order, +# so later sections may overwrite permissions granted or removed in +# previous sections. +# +# Each section has three valid parameters. Any other parameters are +# ignored. +# access = (read-only|read-write) +# +# This parameter is a required parameter. Valid values are +# `read-only' and `read-write'. +# +# The access rights to apply to modified files and directories +# that match the `match' regular expression described later on. +# +# match = PERL_REGEX +# +# This parameter is a required parameter and its value is a Perl +# regular expression. +# +# To help users that automatically write regular expressions that +# match the beginning of absolute paths using ^/, the script +# removes the / character because subversion paths, while they +# start at the root level, do not begin with a /. +# +# users = username1 [username2 [username3 [username4 ...]]] +# or +# users = username1 [username2] +# users = username3 username4 +# +# This parameter is optional. The usernames listed here must be +# exact usernames. There is no regular expression matching for +# usernames. You may specify all the usernames that apply on one +# line or split the names up on multiple lines. +# +# The access rights from `access' are applied to ALL modified +# paths that match the `match' regular expression only if NO +# usernames are specified in the section or if one of the listed +# usernames matches the author of the commit. +# +# By default, because you're using commit-access-control.pl in the +# first place to protect your repository, the script sets the +# permissions to all files and directories in the repository to +# read-only, so if you want to open up portions of the repository, +# you'll need to edit this file. +# +# NOTE: NEVER GIVE DIFFERENT SECTIONS THE SAME SECTION NAME, OTHERWISE +# THE PARAMETERS FOR THOSE SECTIONS WILL BE MERGED TOGETHER INTO ONE +# SECTION AND YOUR SECURITY MAY BE COMPROMISED. + +[Make everything read-only for all users] +match = .* +access = read-only + +[Make project1 read-write for users Jane and Joe] +match = ^(branches|tags|trunk)/project1 +users = jane joe +access = read-write + +[However, we don't trust Joe with project1's Makefile] +match = ^(branches|tags|trunk)/project1/Makefile +users = joe +access = read-only diff --git a/tools/hook-scripts/commit-access-control.pl.in b/tools/hook-scripts/commit-access-control.pl.in new file mode 100755 index 0000000..0e5fade --- /dev/null +++ b/tools/hook-scripts/commit-access-control.pl.in @@ -0,0 +1,411 @@ +#!/usr/bin/env perl + +# ==================================================================== +# commit-access-control.pl: check if the user that submitted the +# transaction TXN-NAME has the appropriate rights to perform the +# commit in repository REPOS using the permissions listed in the +# configuration file CONF_FILE. +# +# $HeadURL: http://svn.apache.org/repos/asf/subversion/branches/1.7.x/tools/hook-scripts/commit-access-control.pl.in $ +# $LastChangedDate: 2009-11-16 19:07:17 +0000 (Mon, 16 Nov 2009) $ +# $LastChangedBy: hwright $ +# $LastChangedRevision: 880911 $ +# +# Usage: commit-access-control.pl REPOS TXN-NAME CONF_FILE +# +# ==================================================================== +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# ==================================================================== + +# Turn on warnings the best way depending on the Perl version. +BEGIN { + if ( $] >= 5.006_000) + { require warnings; import warnings; } + else + { $^W = 1; } +} + +use strict; +use Carp; +use Config::IniFiles 2.27; + +###################################################################### +# Configuration section. + +# Svnlook path. +my $svnlook = "@SVN_BINDIR@/svnlook"; + +# Since the path to svnlook depends upon the local installation +# preferences, check that the required program exists to insure that +# the administrator has set up the script properly. +{ + my $ok = 1; + foreach my $program ($svnlook) + { + if (-e $program) + { + unless (-x $program) + { + warn "$0: required program `$program' is not executable, ", + "edit $0.\n"; + $ok = 0; + } + } + else + { + warn "$0: required program `$program' does not exist, edit $0.\n"; + $ok = 0; + } + } + exit 1 unless $ok; +} + +###################################################################### +# Initial setup/command-line handling. + +&usage unless @ARGV == 3; + +my $repos = shift; +my $txn = shift; +my $cfg_filename = shift; + +unless (-e $repos) + { + &usage("$0: repository directory `$repos' does not exist."); + } +unless (-d $repos) + { + &usage("$0: repository directory `$repos' is not a directory."); + } +unless (-e $cfg_filename) + { + &usage("$0: configuration file `$cfg_filename' does not exist."); + } +unless (-r $cfg_filename) + { + &usage("$0: configuration file `$cfg_filename' is not readable."); + } + +# Define two constant subroutines to stand for read-only or read-write +# access to the repository. +sub ACCESS_READ_ONLY () { 'read-only' } +sub ACCESS_READ_WRITE () { 'read-write' } + +###################################################################### +# Load the configuration file and validate it. +my $cfg = Config::IniFiles->new(-file => $cfg_filename); +unless ($cfg) + { + die "$0: error in loading configuration file `$cfg_filename'", + @Config::IniFiles::errors ? ":\n@Config::IniFiles::errors\n" + : ".\n"; + } + +# Go through each section of the configuration file, validate that +# each section has the required parameters and complain about unknown +# parameters. Compile any regular expressions. +my @sections = $cfg->Sections; +{ + my $ok = 1; + foreach my $section (@sections) + { + # First check for any unknown parameters. + foreach my $param ($cfg->Parameters($section)) + { + next if $param eq 'match'; + next if $param eq 'users'; + next if $param eq 'access'; + warn "$0: config file `$cfg_filename' section `$section' parameter ", + "`$param' is being ignored.\n"; + $cfg->delval($section, $param); + } + + my $access = $cfg->val($section, 'access'); + if (defined $access) + { + unless ($access eq ACCESS_READ_ONLY or $access eq ACCESS_READ_WRITE) + { + warn "$0: config file `$cfg_filename' section `$section' sets ", + "`access' to illegal value `$access'.\n"; + $ok = 0; + } + } + else + { + warn "$0: config file `$cfg_filename' section `$section' does ", + "not set `access' parameter.\n"; + $ok = 0; + } + + my $match_regex = $cfg->val($section, 'match'); + if (defined $match_regex) + { + # To help users that automatically write regular expressions + # that match the beginning of absolute paths using ^/, + # remove the / character because subversion paths, while + # they start at the root level, do not begin with a /. + $match_regex =~ s#^\^/#^#; + + my $match_re; + eval { $match_re = qr/$match_regex/ }; + if ($@) + { + warn "$0: config file `$cfg_filename' section `$section' ", + "`match' regex `$match_regex' does not compile:\n$@\n"; + $ok = 0; + } + else + { + $cfg->newval($section, 'match_re', $match_re); + } + } + else + { + warn "$0: config file `$cfg_filename' section `$section' does ", + "not set `match' parameter.\n"; + $ok = 0; + } + } + exit 1 unless $ok; +} + +###################################################################### +# Harvest data using svnlook. + +# Change into /tmp so that svnlook diff can create its .svnlook +# directory. +my $tmp_dir = '/tmp'; +chdir($tmp_dir) + or die "$0: cannot chdir `$tmp_dir': $!\n"; + +# Get the author from svnlook. +my @svnlooklines = &read_from_process($svnlook, 'author', $repos, '-t', $txn); +my $author = shift @svnlooklines; +unless (length $author) + { + die "$0: txn `$txn' has no author.\n"; + } + +# Figure out what directories have changed using svnlook.. +my @dirs_changed = &read_from_process($svnlook, 'dirs-changed', $repos, + '-t', $txn); + +# Lose the trailing slash in the directory names if one exists, except +# in the case of '/'. +my $rootchanged = 0; +for (my $i=0; $i<@dirs_changed; ++$i) + { + if ($dirs_changed[$i] eq '/') + { + $rootchanged = 1; + } + else + { + $dirs_changed[$i] =~ s#^(.+)[/\\]$#$1#; + } + } + +# Figure out what files have changed using svnlook. +my @files_changed; +foreach my $line (&read_from_process($svnlook, 'changed', $repos, '-t', $txn)) + { + # Split the line up into the modification code and path, ignoring + # property modifications. + if ($line =~ /^.. (.*)$/) + { + push(@files_changed, $1); + } + } + +# Create the list of all modified paths. +my @changed = (@dirs_changed, @files_changed); + +# There should always be at least one changed path. If there are +# none, then there maybe something fishy going on, so just exit now +# indicating that the commit should not proceed. +unless (@changed) + { + die "$0: no changed paths found in txn `$txn'.\n"; + } + +###################################################################### +# Populate the permissions table. + +# Set a hash keeping track of the access rights to each path. Because +# this is an access control script, set the default permissions to +# read-only. +my %permissions; +foreach my $path (@changed) + { + $permissions{$path} = ACCESS_READ_ONLY; + } + +foreach my $section (@sections) + { + # Decide if this section should be used. It should be used if + # there are no users listed at all for this section, or if there + # are users listed and the author is one of them. + my $use_this_section; + + # If there are any users listed, then check if the author of this + # commit is listed in the list. If not, then delete the section, + # because it won't apply. + # + # The configuration file can list users like this on multiple + # lines: + # users = joe@mysite.com betty@mysite.com + # users = bob@yoursite.com + + # Because of the way Config::IniFiles works, check if there are + # any users at all with the scalar return from val() and if there, + # then get the array value to get all users. + my $users = $cfg->val($section, 'users'); + if (defined $users and length $users) + { + my $match_user = 0; + foreach my $entry ($cfg->val($section, 'users')) + { + unless ($match_user) + { + foreach my $user (split(' ', $entry)) + { + if ($author eq $user) + { + $match_user = 1; + last; + } + } + } + } + + $use_this_section = $match_user; + } + else + { + $use_this_section = 1; + } + + next unless $use_this_section; + + # Go through each modified path and match it to the regular + # expression and set the access right if the regular expression + # matches. + my $access = $cfg->val($section, 'access'); + my $match_re = $cfg->val($section, 'match_re'); + foreach my $path (@changed) + { + $permissions{$path} = $access if $path =~ $match_re; + } + } + +# Go through all the modified paths and see if any permissions are +# read-only. If so, then fail the commit. +my @failed_paths; +foreach my $path (@changed) + { + if ($permissions{$path} ne ACCESS_READ_WRITE) + { + push(@failed_paths, $path); + } + } + +if (@failed_paths) + { + warn "$0: user `$author' does not have permission to commit to ", + @failed_paths > 1 ? "these paths:\n " : "this path:\n ", + join("\n ", @failed_paths), "\n"; + exit 1; + } +else + { + exit 0; + } + +sub usage +{ + warn "@_\n" if @_; + die "usage: $0 REPOS TXN-NAME CONF_FILE\n"; +} + +sub safe_read_from_pipe +{ + unless (@_) + { + croak "$0: safe_read_from_pipe passed no arguments.\n"; + } + print "Running @_\n"; + my $pid = open(SAFE_READ, '-|'); + unless (defined $pid) + { + die "$0: cannot fork: $!\n"; + } + unless ($pid) + { + open(STDERR, ">&STDOUT") + or die "$0: cannot dup STDOUT: $!\n"; + exec(@_) + or die "$0: cannot exec `@_': $!\n"; + } + my @output; + while (<SAFE_READ>) + { + chomp; + push(@output, $_); + } + close(SAFE_READ); + my $result = $?; + my $exit = $result >> 8; + my $signal = $result & 127; + my $cd = $result & 128 ? "with core dump" : ""; + if ($signal or $cd) + { + warn "$0: pipe from `@_' failed $cd: exit=$exit signal=$signal\n"; + } + if (wantarray) + { + return ($result, @output); + } + else + { + return $result; + } +} + +sub read_from_process + { + unless (@_) + { + croak "$0: read_from_process passed no arguments.\n"; + } + my ($status, @output) = &safe_read_from_pipe(@_); + if ($status) + { + if (@output) + { + die "$0: `@_' failed with this output:\n", join("\n", @output), "\n"; + } + else + { + die "$0: `@_' failed with no output.\n"; + } + } + else + { + return @output; + } +} diff --git a/tools/hook-scripts/commit-email.rb b/tools/hook-scripts/commit-email.rb new file mode 100755 index 0000000..2fa61a0 --- /dev/null +++ b/tools/hook-scripts/commit-email.rb @@ -0,0 +1,122 @@ +#!/usr/bin/env ruby + +# +###################################################################### +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +###################################################################### +# + +require 'English' + +original_argv = ARGV.dup +argv = [] + +found_include_option = false +while (arg = original_argv.shift) + if found_include_option + $LOAD_PATH.unshift(arg) + found_include_option = false + else + case arg + when "-I", "--include" + found_include_option = true + when /\A-I/, /\A--include=?/ + path = $POSTMATCH + $LOAD_PATH.unshift(path) unless path.empty? + else + argv << arg + end + end +end + +def extract_email_address(address) + if /<(.+?)>/ =~ address + $1 + else + address + end +end + +def sendmail(to, from, mail, server=nil, port=nil) + server ||= "localhost" + from = extract_email_address(from) + to = to.collect {|address| extract_email_address(address)} + Net::SMTP.start(server, port) do |smtp| + smtp.open_message_stream(from, to) do |f| + f.print(mail) + end + end +end + +begin + require 'svn/commit-mailer' + Svn::Locale.set + Svn::CommitMailer.run(argv) +rescue Exception => error + require 'net/smtp' + require 'socket' + + to = [] + subject = "Error" + from = "#{ENV['USER']}@#{Socket.gethostname}" + server = nil + port = nil + begin + begin + Svn::CommitMailer + rescue NameError + raise OptionParser::ParseError + end + _, _, _to, options = Svn::CommitMailer.parse(argv) + to = [_to] + to = options.error_to unless options.error_to.empty? + from = options.from || from + subject = "#{options.name}: #{subject}" if options.name + server = options.server + port = options.port + rescue OptionParser::MissingArgument + argv.delete_if {|arg| $!.args.include?(arg)} + retry + rescue OptionParser::ParseError + if to.empty? + _, _, _to, *_ = ARGV.reject {|arg| /^-/.match(arg)} + to = [_to] + end + end + + detail = <<-EOM +#{error.class}: #{error.message} +#{error.backtrace.join("\n")} +EOM + to = to.compact + if to.empty? + STDERR.puts detail + else + sendmail(to, from, <<-MAIL, server, port) +MIME-Version: 1.0 +Content-Type: text/plain; charset=us-ascii +Content-Transfer-Encoding: 7bit +From: #{from} +To: #{to.join(', ')} +Subject: #{subject} +Date: #{Time.now.rfc2822} + +#{detail} +MAIL + end +end diff --git a/tools/hook-scripts/log-police.py b/tools/hook-scripts/log-police.py new file mode 100755 index 0000000..4d0ff0f --- /dev/null +++ b/tools/hook-scripts/log-police.py @@ -0,0 +1,148 @@ +#!/usr/bin/env python +# +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +# + +# log-police.py: Ensure that log messages end with a single newline. +# See usage() function for details, or just run with no arguments. + +import os +import sys +import getopt +try: + my_getopt = getopt.gnu_getopt +except AttributeError: + my_getopt = getopt.getopt + +import svn +import svn.fs +import svn.repos +import svn.core + + +def fix_log_message(log_message): + """Return a fixed version of LOG_MESSAGE. By default, this just + means ensuring that the result ends with exactly one newline and no + other whitespace. But if you want to do other kinds of fixups, this + function is the place to implement them -- all log message fixing in + this script happens here.""" + return log_message.rstrip() + "\n" + + +def fix_txn(fs, txn_name): + "Fix up the log message for txn TXN_NAME in FS. See fix_log_message()." + txn = svn.fs.svn_fs_open_txn(fs, txn_name) + log_message = svn.fs.svn_fs_txn_prop(txn, "svn:log") + if log_message is not None: + new_message = fix_log_message(log_message) + if new_message != log_message: + svn.fs.svn_fs_change_txn_prop(txn, "svn:log", new_message) + + +def fix_rev(fs, revnum): + "Fix up the log message for revision REVNUM in FS. See fix_log_message()." + log_message = svn.fs.svn_fs_revision_prop(fs, revnum, 'svn:log') + if log_message is not None: + new_message = fix_log_message(log_message) + if new_message != log_message: + svn.fs.svn_fs_change_rev_prop(fs, revnum, "svn:log", new_message) + + +def usage_and_exit(error_msg=None): + """Write usage information and exit. If ERROR_MSG is provide, that + error message is printed first (to stderr), the usage info goes to + stderr, and the script exits with a non-zero status. Otherwise, + usage info goes to stdout and the script exits with a zero status.""" + import os.path + stream = error_msg and sys.stderr or sys.stdout + if error_msg: + stream.write("ERROR: %s\n\n" % error_msg) + stream.write("USAGE: %s [-t TXN_NAME | -r REV_NUM | --all-revs] REPOS\n" + % (os.path.basename(sys.argv[0]))) + stream.write(""" +Ensure that log messages end with exactly one newline and no other +whitespace characters. Use as a pre-commit hook by passing '-t TXN_NAME'; +fix up a single revision by passing '-r REV_NUM'; fix up all revisions by +passing '--all-revs'. (When used as a pre-commit hook, may modify the +svn:log property on the txn.) +""") + sys.exit(error_msg and 1 or 0) + + +def main(ignored_pool, argv): + repos_path = None + txn_name = None + rev_name = None + all_revs = False + + try: + opts, args = my_getopt(argv[1:], 't:r:h?', ["help", "all-revs"]) + except: + usage_and_exit("problem processing arguments / options.") + for opt, value in opts: + if opt == '--help' or opt == '-h' or opt == '-?': + usage_and_exit() + elif opt == '-t': + txn_name = value + elif opt == '-r': + rev_name = value + elif opt == '--all-revs': + all_revs = True + else: + usage_and_exit("unknown option '%s'." % opt) + + if txn_name is not None and rev_name is not None: + usage_and_exit("cannot pass both -t and -r.") + if txn_name is not None and all_revs: + usage_and_exit("cannot pass --all-revs with -t.") + if rev_name is not None and all_revs: + usage_and_exit("cannot pass --all-revs with -r.") + if rev_name is None and txn_name is None and not all_revs: + usage_and_exit("must provide exactly one of -r, -t, or --all-revs.") + if len(args) != 1: + usage_and_exit("only one argument allowed (the repository).") + + repos_path = svn.core.svn_path_canonicalize(args[0]) + + # A non-bindings version of this could be implemented by calling out + # to 'svnlook getlog' and 'svnadmin setlog'. However, using the + # bindings results in much simpler code. + + fs = svn.repos.svn_repos_fs(svn.repos.svn_repos_open(repos_path)) + if txn_name is not None: + fix_txn(fs, txn_name) + elif rev_name is not None: + fix_rev(fs, int(rev_name)) + elif all_revs: + # Do it such that if we're running on a live repository, we'll + # catch up even with commits that came in after we started. + last_youngest = 0 + while True: + youngest = svn.fs.svn_fs_youngest_rev(fs) + if youngest >= last_youngest: + for this_rev in range(last_youngest, youngest + 1): + fix_rev(fs, this_rev) + last_youngest = youngest + 1 + else: + break + + +if __name__ == '__main__': + sys.exit(svn.core.run_app(main, sys.argv)) diff --git a/tools/hook-scripts/mailer/mailer.conf.example b/tools/hook-scripts/mailer/mailer.conf.example new file mode 100644 index 0000000..be04877 --- /dev/null +++ b/tools/hook-scripts/mailer/mailer.conf.example @@ -0,0 +1,361 @@ +# +# mailer.conf: example configuration file for mailer.py +# +# $Id: mailer.conf.example 885511 2009-11-30 17:30:17Z julianfoad $ + +[general] + +# The [general].diff option is now DEPRECATED. +# Instead use [defaults].diff . + +# +# One delivery method must be chosen. mailer.py will prefer using the +# "mail_command" option. If that option is empty or commented out, +# then it checks whether the "smtp_hostname" option has been +# specified. If neither option is set, then the commit message is +# delivered to stdout. +# + +# This command will be invoked with destination addresses on the command +# line, and the message piped into it. +#mail_command = /usr/sbin/sendmail + +# This option specifies the hostname for delivery via SMTP. +#smtp_hostname = localhost + +# Username and password for SMTP servers requiring authorisation. +#smtp_username = example +#smtp_password = example + +# -------------------------------------------------------------------------- + +# +# CONFIGURATION GROUPS +# +# Any sections other than [general], [defaults], [maps] and sections +# referred to within [maps] are considered to be user-defined groups +# which override values in the [defaults] section. +# These groups are selected using the following three options: +# +# for_repos +# for_paths +# search_logmsg +# +# Each option specifies a regular expression. for_repos is matched +# against the absolute path to the repository the mailer is operating +# against. for_paths is matched against *every* path (files and +# dirs) that was modified during the commit. +# +# The options specified in the [defaults] section are always selected. The +# presence of a non-matching for_repos has no relevance. Note that you may +# still use a for_repos value to extract useful information (more on this +# later). Any user-defined groups without a for_repos, or which contains +# a matching for_repos, will be selected for potential use. +# +# The subset of user-defined groups identified by the repository are further +# refined based on the for_paths option. A group is selected if at least +# one path(*) in the commit matches the for_paths regular expression. Note +# that the paths are relative to the root of the repository and do not +# have a leading slash. +# +# (*) Actually, each path will select just one group. Thus, it is possible +# that one group will match against all paths, while another group matches +# none of the paths, even though its for_paths would have selected some of +# the paths in the commit. +# +# search_logmsg specifies a regular expression to match against the +# log message. If the regular expression does not match the log +# message, the group is not matched; if the regular expression matches +# once, the group is used. If there are multiple matches, each +# successful match generates another group-match (this is useful if +# "named groups" are used). If search_logmsg is not used, no log +# message filtering is performed. +# +# Groups are matched in no particular order. Do not depend upon their +# order within this configuration file. The values from [defaults] will +# be used if no group is matched or an option in a group does not override +# the corresponding value from [defaults]. +# +# Generally, a commit email is generated for each group that has been +# selected. The script will try to minimize mails, so it may be possible +# that a single message will be generated to multiple recipients. In +# addition, it is possible for multiple messages per group to be generated, +# based on the various substitutions that are performed (see the following +# section). +# +# +# SUBSTITUTIONS +# +# The regular expressions can use the "named group" syntax to extract +# interesting pieces of the repository or commit path. These named values +# can then be substituted in the option values during mail generation. +# +# For example, let's say that you have a repository with a top-level +# directory named "clients", with several client projects underneath: +# +# REPOS/ +# clients/ +# gsvn/ +# rapidsvn/ +# winsvn/ +# +# The client name can be extracted with a regular expression like: +# +# for_paths = clients/(?P<client>[^/]*)($|/) +# +# The substitution is performed using Python's dict-based string +# interpolation syntax: +# +# to_addr = commits@%(client)s.tigris.org +# +# The %(NAME)s syntax will substitute whatever value for NAME was captured +# in the for_repos and for_paths regular expressions. The set of names +# available is obtained from the following set of regular expressions: +# +# [defaults].for_repos (if present) +# [GROUP].for_repos (if present in the user-defined group "GROUP") +# [GROUP].for_paths (if present in the user-defined group "GROUP") +# +# The names from the regexes later in the list override the earlier names. +# If none of the groups match, but a for_paths is present in [defaults], +# then its extracted names will be available. +# +# Further suppose you want to match bug-ids in log messages: +# +# search_logmsg = (?P<bugid>(ProjA|ProjB)#\d) +# +# The bugids would be of the form ProjA#123 and ProjB#456. In this +# case, each time the regular expression matches, another match group +# will be generated. Thus, if you use: +# +# commit_subject_prefix = %(bugid)s: +# +# Then, a log message such as "Fixes ProjA#123 and ProjB#234" would +# match both bug-ids, and two emails would be generated - one with +# subject "ProjA#123: <...>" and "ProjB#234: <...>". +# +# Note that each unique set of names for substitution will generate an +# email. In the above example, if a commit modified files in all three +# client subdirectories, then an email will be sent to all three commits@ +# mailing lists on tigris.org. +# +# The substitution variable "author" is provided by default, and is set +# to the author name passed to mailer.py for revprop changes or the +# author defined for a revision; if neither is available, then it is +# set to "no_author". Thus, you might define a line like: +# +# from_addr = %(author)s@example.com +# +# +# SUMMARY +# +# While mailer.py will work to minimize the number of mail messages +# generated, a single commit can potentially generate a large number +# of variants of a commit message. The criteria for generating messages +# is based on: +# +# groups selected by for_repos +# groups selected by for_paths +# unique sets of parameters extracted by the above regular expressions +# + +[defaults] + +# This is not passed to the shell, so do not use shell metacharacters. +# The command is split around whitespace, so if you want to include +# whitespace in the command, then ### something ###. +diff = /usr/bin/diff -u -L %(label_from)s -L %(label_to)s %(from)s %(to)s + +# The default prefix for the Subject: header for commits. +commit_subject_prefix = + +# The default prefix for the Subject: header for propchanges. +propchange_subject_prefix = + +# The default prefix for the Subject: header for locks. +lock_subject_prefix = + +# The default prefix for the Subject: header for unlocks. +unlock_subject_prefix = + + +# The default From: address for messages. If the from_addr is not +# specified or it is specified but there is no text after the `=', +# then the revision's author is used as the from address. If the +# revision author is not specified, such as when a commit is done +# without requiring authentication and authorization, then the string +# 'no_author' is used. You can specify a default from_addr here and +# if you want to have a particular for_repos group use the author as +# the from address, you can use "from_addr =". +from_addr = invalid@example.com + +# The default To: addresses for message. One or more addresses, +# separated by whitespace (no commas). +# NOTE: If you want to use a different character for separating the +# addresses put it in front of the addresses included in square +# brackets '[ ]'. +to_addr = invalid@example.com + +# If this is set, then a Reply-To: will be inserted into the message. +reply_to = + +# Specify which types of repository changes mailer.py will create +# diffs for. Valid options are any combination of +# 'add copy modify delete', or 'none' to never create diffs. +# If the generate_diffs option is empty, the selection is controlled +# by the deprecated options suppress_deletes and suppress_adds. +# Note that this only affects the display of diffs - all changes are +# mentioned in the summary of changed paths at the top of the message, +# regardless of this option's value. +# Meaning of the possible values: +# add: generates diffs for all added paths +# copy: generates diffs for all copied paths +# which were not changed after copying +# modify: generates diffs for all modified paths, including paths that were +# copied and modified afterwards (within the same commit) +# delete: generates diffs for all removed paths +generate_diffs = add copy modify + +# Commit URL construction. This adds a URL to the top of the message +# that can lead the reader to a Trac, ViewVC or other view of the +# commit as a whole. +# +# The available substitution variable is: rev +#commit_url = http://diffs.server.com/trac/software/changeset/%(rev)s + +# Diff URL construction. For the configured diff URL types, the diff +# section (which follows the message header) will include the URL +# relevant to the change type, even if actual diff generation for that +# change type is disabled (per the generate_diffs option). +# +# Available substitution variables are: path, base_path, rev, base_rev +#diff_add_url = +#diff_copy_url = +#diff_modify_url = http://diffs.server.com/?p1=%(base_path)s&p2=%(path)s +#diff_delete_url = + +# When set to "yes", the mailer will suppress the creation of a diff which +# deletes all the lines in the file. If this is set to anything else, or +# is simply commented out, then the diff will be inserted. Note that the +# deletion is always mentioned in the message header, regardless of this +# option's value. +### DEPRECATED (if generate_diffs is not empty, this option is ignored) +#suppress_deletes = yes + +# When set to "yes", the mailer will suppress the creation of a diff which +# adds all the lines in the file. If this is set to anything else, or +# is simply commented out, then the diff will be inserted. Note that the +# addition is always mentioned in the message header, regardless of this +# option's value. +### DEPRECATED (if generate_diffs is not empty, this option is ignored) +#suppress_adds = yes + +# A revision is reported on if any of its changed paths match the +# for_paths option. If only some of the changed paths of a revision +# match, this variable controls the behaviour for the non-matching +# paths. Possible values are: +# +# yes: (Default) Show in both summary and diffs. +# summary: Show the changed paths in the summary, but omit the diffs. +# no: Show nothing more than a note saying "and changes in other areas" +# +show_nonmatching_paths = yes + +# Subject line length limit. The generated subject line will be truncated +# and terminated with "...", to remain within the specified maximum length. +# Set to 0 to turn off. +#truncate_subject = 200 + +# -------------------------------------------------------------------------- + +[maps] + +# +# This section can be used define rewrite mappings for option values. It +# is typically used for computing from/to addresses, but can actually be +# used to remap values for any option in this file. +# +# The mappings are global for the entire configuration file. There is +# no group-specific mapping capability. For each mapping that you want +# to perform, you will provide the name of the option (e.g. from_addr) +# and a specification of how to perform those mappings. These declarations +# are made here in the [maps] section. +# +# When an option is accessed, the value is loaded from the configuration +# file and all %(NAME)s substitutions are performed. The resulting value +# is then passed through the map. If a map entry is not available for +# the value, then it will be used unchanged. +# +# NOTES: - Avoid using map substitution names which differ only in case. +# Unexpected results may occur. +# - A colon ':' is also considered as separator between option and +# value (keep this in mind when trying to map a file path under +# windows). +# +# The format to declare a map is: +# +# option_name_to_remap = mapping_specification +# +# At the moment, there is only one type of mapping specification: +# +# mapping_specification = '[' sectionname ']' +# +# This will use the given section to map values. The option names in +# the section are the input values, and the option values are the result. +# + +# +# EXAMPLE: +# +# We have two projects using two repositories. The name of the repos +# does not easily map to their commit mailing lists, so we will use +# a mapping to go from a project name (extracted from the repository +# path) to their commit list. The committers also need a special +# mapping to derive their email address from their repository username. +# +# [projects] +# for_repos = .*/(?P<project>.*) +# from_addr = %(author)s +# to_addr = %(project)s +# +# [maps] +# from_addr = [authors] +# to_addr = [mailing-lists] +# +# [authors] +# john = jconnor@example.com +# sarah = sconnor@example.com +# +# [mailing-lists] +# t600 = spottable-commits@example.com +# tx = hotness-commits@example.com +# + +# -------------------------------------------------------------------------- + +# +# [example-group] +# # send notifications if any web pages are changed +# for_paths = .*\.html +# # set a custom prefix +# commit_subject_prefix = [commit] +# propchange_subject_prefix = [propchange] +# # override the default, sending these elsewhere +# to_addr = www-commits@example.com +# # use the revision author as the from address +# from_addr = +# # use a custom diff program for this group +# diff = /usr/bin/my-diff -u -L %(label_from)s -L %(label_to)s %(from)s %(to)s +# +# [another-example] +# # commits to personal repositories should go to that person +# for_repos = /home/(?P<who>[^/]*)/repos +# to_addr = %(who)s@example.com +# +# [issuetracker] +# search_logmsg = (?P<bugid>(?P<project>projecta|projectb|projectc)#\d+) +# # (or, use a mapping if the bug-id to email address is not this trivial) +# to_addr = %(project)s-tracker@example.com +# commit_subject_prefix = %(bugid)s: +# propchange_subject_prefix = %(bugid)s: + diff --git a/tools/hook-scripts/mailer/mailer.py b/tools/hook-scripts/mailer/mailer.py new file mode 100755 index 0000000..08079fe --- /dev/null +++ b/tools/hook-scripts/mailer/mailer.py @@ -0,0 +1,1453 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +# +# mailer.py: send email describing a commit +# +# $HeadURL: http://svn.apache.org/repos/asf/subversion/branches/1.7.x/tools/hook-scripts/mailer/mailer.py $ +# $LastChangedDate: 2010-12-30 20:46:50 +0000 (Thu, 30 Dec 2010) $ +# $LastChangedBy: hwright $ +# $LastChangedRevision: 1053998 $ +# +# USAGE: mailer.py commit REPOS REVISION [CONFIG-FILE] +# mailer.py propchange REPOS REVISION AUTHOR REVPROPNAME [CONFIG-FILE] +# mailer.py propchange2 REPOS REVISION AUTHOR REVPROPNAME ACTION \ +# [CONFIG-FILE] +# mailer.py lock REPOS AUTHOR [CONFIG-FILE] +# mailer.py unlock REPOS AUTHOR [CONFIG-FILE] +# +# Using CONFIG-FILE, deliver an email describing the changes between +# REV and REV-1 for the repository REPOS. +# +# ACTION was added as a fifth argument to the post-revprop-change hook +# in Subversion 1.2.0. Its value is one of 'A', 'M' or 'D' to indicate +# if the property was added, modified or deleted, respectively. +# +# See _MIN_SVN_VERSION below for which version of Subversion's Python +# bindings are required by this version of mailer.py. + +import os +import sys +try: + # Python >=3.0 + import configparser + from urllib.parse import quote as urllib_parse_quote +except ImportError: + # Python <3.0 + import ConfigParser as configparser + from urllib import quote as urllib_parse_quote +import time +import subprocess +if sys.version_info[0] >= 3: + # Python >=3.0 + from io import StringIO +else: + # Python <3.0 + from cStringIO import StringIO +import smtplib +import re +import tempfile + +# Minimal version of Subversion's bindings required +_MIN_SVN_VERSION = [1, 5, 0] + +# Import the Subversion Python bindings, making sure they meet our +# minimum version requirements. +try: + import svn.fs + import svn.delta + import svn.repos + import svn.core +except ImportError: + sys.stderr.write( + "You need version %s or better of the Subversion Python bindings.\n" \ + % ".".join([str(x) for x in _MIN_SVN_VERSION])) + sys.exit(1) +if _MIN_SVN_VERSION > [svn.core.SVN_VER_MAJOR, + svn.core.SVN_VER_MINOR, + svn.core.SVN_VER_PATCH]: + sys.stderr.write( + "You need version %s or better of the Subversion Python bindings.\n" \ + % ".".join([str(x) for x in _MIN_SVN_VERSION])) + sys.exit(1) + + +SEPARATOR = '=' * 78 + +def main(pool, cmd, config_fname, repos_dir, cmd_args): + ### TODO: Sanity check the incoming args + + if cmd == 'commit': + revision = int(cmd_args[0]) + repos = Repository(repos_dir, revision, pool) + cfg = Config(config_fname, repos, { 'author' : repos.author }) + messenger = Commit(pool, cfg, repos) + elif cmd == 'propchange' or cmd == 'propchange2': + revision = int(cmd_args[0]) + author = cmd_args[1] + propname = cmd_args[2] + action = (cmd == 'propchange2' and cmd_args[3] or 'A') + repos = Repository(repos_dir, revision, pool) + # Override the repos revision author with the author of the propchange + repos.author = author + cfg = Config(config_fname, repos, { 'author' : author }) + messenger = PropChange(pool, cfg, repos, author, propname, action) + elif cmd == 'lock' or cmd == 'unlock': + author = cmd_args[0] + repos = Repository(repos_dir, 0, pool) ### any old revision will do + # Override the repos revision author with the author of the lock/unlock + repos.author = author + cfg = Config(config_fname, repos, { 'author' : author }) + messenger = Lock(pool, cfg, repos, author, cmd == 'lock') + else: + raise UnknownSubcommand(cmd) + + messenger.generate() + + +def remove_leading_slashes(path): + while path and path[0] == '/': + path = path[1:] + return path + + +class OutputBase: + "Abstract base class to formalize the interface of output methods" + + def __init__(self, cfg, repos, prefix_param): + self.cfg = cfg + self.repos = repos + self.prefix_param = prefix_param + self._CHUNKSIZE = 128 * 1024 + + # This is a public member variable. This must be assigned a suitable + # piece of descriptive text before make_subject() is called. + self.subject = "" + + def make_subject(self, group, params): + prefix = self.cfg.get(self.prefix_param, group, params) + if prefix: + subject = prefix + ' ' + self.subject + else: + subject = self.subject + + try: + truncate_subject = int( + self.cfg.get('truncate_subject', group, params)) + except ValueError: + truncate_subject = 0 + + if truncate_subject and len(subject) > truncate_subject: + subject = subject[:(truncate_subject - 3)] + "..." + return subject + + def start(self, group, params): + """Override this method. + Begin writing an output representation. GROUP is the name of the + configuration file group which is causing this output to be produced. + PARAMS is a dictionary of any named subexpressions of regular expressions + defined in the configuration file, plus the key 'author' contains the + author of the action being reported.""" + raise NotImplementedError + + def finish(self): + """Override this method. + Flush any cached information and finish writing the output + representation.""" + raise NotImplementedError + + def write(self, output): + """Override this method. + Append the literal text string OUTPUT to the output representation.""" + raise NotImplementedError + + def run(self, cmd): + """Override this method, if the default implementation is not sufficient. + Execute CMD, writing the stdout produced to the output representation.""" + # By default we choose to incorporate child stderr into the output + pipe_ob = subprocess.Popen(cmd, stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + close_fds=sys.platform != "win32") + + buf = pipe_ob.stdout.read(self._CHUNKSIZE) + while buf: + self.write(buf) + buf = pipe_ob.stdout.read(self._CHUNKSIZE) + + # wait on the child so we don't end up with a billion zombies + pipe_ob.wait() + + +class MailedOutput(OutputBase): + def __init__(self, cfg, repos, prefix_param): + OutputBase.__init__(self, cfg, repos, prefix_param) + + def start(self, group, params): + # whitespace (or another character) separated list of addresses + # which must be split into a clean list + to_addr_in = self.cfg.get('to_addr', group, params) + # if list of addresses starts with '[.]' + # use the character between the square brackets as split char + # else use whitespaces + if len(to_addr_in) >= 3 and to_addr_in[0] == '[' \ + and to_addr_in[2] == ']': + self.to_addrs = \ + [_f for _f in to_addr_in[3:].split(to_addr_in[1]) if _f] + else: + self.to_addrs = [_f for _f in to_addr_in.split() if _f] + self.from_addr = self.cfg.get('from_addr', group, params) \ + or self.repos.author or 'no_author' + # if the from_addr (also) starts with '[.]' (may happen if one + # map is used for both to_addr and from_addr) remove '[.]' + if len(self.from_addr) >= 3 and self.from_addr[0] == '[' \ + and self.from_addr[2] == ']': + self.from_addr = self.from_addr[3:] + self.reply_to = self.cfg.get('reply_to', group, params) + # if the reply_to (also) starts with '[.]' (may happen if one + # map is used for both to_addr and reply_to) remove '[.]' + if len(self.reply_to) >= 3 and self.reply_to[0] == '[' \ + and self.reply_to[2] == ']': + self.reply_to = self.reply_to[3:] + + def mail_headers(self, group, params): + subject = self.make_subject(group, params) + try: + subject.encode('ascii') + except UnicodeError: + from email.Header import Header + subject = Header(subject, 'utf-8').encode() + hdrs = 'From: %s\n' \ + 'To: %s\n' \ + 'Subject: %s\n' \ + 'MIME-Version: 1.0\n' \ + 'Content-Type: text/plain; charset=UTF-8\n' \ + 'Content-Transfer-Encoding: 8bit\n' \ + 'X-Svn-Commit-Project: %s\n' \ + 'X-Svn-Commit-Author: %s\n' \ + 'X-Svn-Commit-Revision: %d\n' \ + 'X-Svn-Commit-Repository: %s\n' \ + % (self.from_addr, ', '.join(self.to_addrs), subject, + group, self.repos.author or 'no_author', self.repos.rev, + os.path.basename(self.repos.repos_dir)) + if self.reply_to: + hdrs = '%sReply-To: %s\n' % (hdrs, self.reply_to) + return hdrs + '\n' + + +class SMTPOutput(MailedOutput): + "Deliver a mail message to an MTA using SMTP." + + def start(self, group, params): + MailedOutput.start(self, group, params) + + self.buffer = StringIO() + self.write = self.buffer.write + + self.write(self.mail_headers(group, params)) + + def finish(self): + server = smtplib.SMTP(self.cfg.general.smtp_hostname) + if self.cfg.is_set('general.smtp_username'): + server.login(self.cfg.general.smtp_username, + self.cfg.general.smtp_password) + server.sendmail(self.from_addr, self.to_addrs, self.buffer.getvalue()) + server.quit() + + +class StandardOutput(OutputBase): + "Print the commit message to stdout." + + def __init__(self, cfg, repos, prefix_param): + OutputBase.__init__(self, cfg, repos, prefix_param) + self.write = sys.stdout.write + + def start(self, group, params): + self.write("Group: " + (group or "defaults") + "\n") + self.write("Subject: " + self.make_subject(group, params) + "\n\n") + + def finish(self): + pass + + +class PipeOutput(MailedOutput): + "Deliver a mail message to an MTA via a pipe." + + def __init__(self, cfg, repos, prefix_param): + MailedOutput.__init__(self, cfg, repos, prefix_param) + + # figure out the command for delivery + self.cmd = cfg.general.mail_command.split() + + def start(self, group, params): + MailedOutput.start(self, group, params) + + ### gotta fix this. this is pretty specific to sendmail and qmail's + ### mailwrapper program. should be able to use option param substitution + cmd = self.cmd + [ '-f', self.from_addr ] + self.to_addrs + + # construct the pipe for talking to the mailer + self.pipe = subprocess.Popen(cmd, stdin=subprocess.PIPE, + close_fds=sys.platform != "win32") + self.write = self.pipe.stdin.write + + # start writing out the mail message + self.write(self.mail_headers(group, params)) + + def finish(self): + # signal that we're done sending content + self.pipe.stdin.close() + + # wait to avoid zombies + self.pipe.wait() + + +class Messenger: + def __init__(self, pool, cfg, repos, prefix_param): + self.pool = pool + self.cfg = cfg + self.repos = repos + + if cfg.is_set('general.mail_command'): + cls = PipeOutput + elif cfg.is_set('general.smtp_hostname'): + cls = SMTPOutput + else: + cls = StandardOutput + + self.output = cls(cfg, repos, prefix_param) + + +class Commit(Messenger): + def __init__(self, pool, cfg, repos): + Messenger.__init__(self, pool, cfg, repos, 'commit_subject_prefix') + + # get all the changes and sort by path + editor = svn.repos.ChangeCollector(repos.fs_ptr, repos.root_this, \ + self.pool) + e_ptr, e_baton = svn.delta.make_editor(editor, self.pool) + svn.repos.replay(repos.root_this, e_ptr, e_baton, self.pool) + + self.changelist = sorted(editor.get_changes().items()) + + log = repos.get_rev_prop(svn.core.SVN_PROP_REVISION_LOG) or '' + + # collect the set of groups and the unique sets of params for the options + self.groups = { } + for path, change in self.changelist: + for (group, params) in self.cfg.which_groups(path, log): + # turn the params into a hashable object and stash it away + param_list = sorted(params.items()) + # collect the set of paths belonging to this group + if (group, tuple(param_list)) in self.groups: + old_param, paths = self.groups[group, tuple(param_list)] + else: + paths = { } + paths[path] = None + self.groups[group, tuple(param_list)] = (params, paths) + + # figure out the changed directories + dirs = { } + for path, change in self.changelist: + if change.item_kind == svn.core.svn_node_dir: + dirs[path] = None + else: + idx = path.rfind('/') + if idx == -1: + dirs[''] = None + else: + dirs[path[:idx]] = None + + dirlist = list(dirs.keys()) + + commondir, dirlist = get_commondir(dirlist) + + # compose the basic subject line. later, we can prefix it. + dirlist.sort() + dirlist = ' '.join(dirlist) + if commondir: + self.output.subject = 'r%d - in %s: %s' % (repos.rev, commondir, dirlist) + else: + self.output.subject = 'r%d - %s' % (repos.rev, dirlist) + + def generate(self): + "Generate email for the various groups and option-params." + + ### the groups need to be further compressed. if the headers and + ### body are the same across groups, then we can have multiple To: + ### addresses. SMTPOutput holds the entire message body in memory, + ### so if the body doesn't change, then it can be sent N times + ### rather than rebuilding it each time. + + subpool = svn.core.svn_pool_create(self.pool) + + # build a renderer, tied to our output stream + renderer = TextCommitRenderer(self.output) + + for (group, param_tuple), (params, paths) in self.groups.items(): + self.output.start(group, params) + + # generate the content for this group and set of params + generate_content(renderer, self.cfg, self.repos, self.changelist, + group, params, paths, subpool) + + self.output.finish() + svn.core.svn_pool_clear(subpool) + + svn.core.svn_pool_destroy(subpool) + + +class PropChange(Messenger): + def __init__(self, pool, cfg, repos, author, propname, action): + Messenger.__init__(self, pool, cfg, repos, 'propchange_subject_prefix') + self.author = author + self.propname = propname + self.action = action + + # collect the set of groups and the unique sets of params for the options + self.groups = { } + for (group, params) in self.cfg.which_groups('', None): + # turn the params into a hashable object and stash it away + param_list = sorted(params.items()) + self.groups[group, tuple(param_list)] = params + + self.output.subject = 'r%d - %s' % (repos.rev, propname) + + def generate(self): + actions = { 'A': 'added', 'M': 'modified', 'D': 'deleted' } + for (group, param_tuple), params in self.groups.items(): + self.output.start(group, params) + self.output.write('Author: %s\n' + 'Revision: %s\n' + 'Property Name: %s\n' + 'Action: %s\n' + '\n' + % (self.author, self.repos.rev, self.propname, + actions.get(self.action, 'Unknown (\'%s\')' \ + % self.action))) + if self.action == 'A' or self.action not in actions: + self.output.write('Property value:\n') + propvalue = self.repos.get_rev_prop(self.propname) + self.output.write(propvalue) + elif self.action == 'M': + self.output.write('Property diff:\n') + tempfile1 = tempfile.NamedTemporaryFile() + tempfile1.write(sys.stdin.read()) + tempfile1.flush() + tempfile2 = tempfile.NamedTemporaryFile() + tempfile2.write(self.repos.get_rev_prop(self.propname)) + tempfile2.flush() + self.output.run(self.cfg.get_diff_cmd(group, { + 'label_from' : 'old property value', + 'label_to' : 'new property value', + 'from' : tempfile1.name, + 'to' : tempfile2.name, + })) + self.output.finish() + + +def get_commondir(dirlist): + """Figure out the common portion/parent (commondir) of all the paths + in DIRLIST and return a tuple consisting of commondir, dirlist. If + a commondir is found, the dirlist returned is rooted in that + commondir. If no commondir is found, dirlist is returned unchanged, + and commondir is the empty string.""" + if len(dirlist) < 2 or '/' in dirlist: + commondir = '' + newdirs = dirlist + else: + common = dirlist[0].split('/') + for j in range(1, len(dirlist)): + d = dirlist[j] + parts = d.split('/') + for i in range(len(common)): + if i == len(parts) or common[i] != parts[i]: + del common[i:] + break + commondir = '/'.join(common) + if commondir: + # strip the common portion from each directory + l = len(commondir) + 1 + newdirs = [ ] + for d in dirlist: + if d == commondir: + newdirs.append('.') + else: + newdirs.append(d[l:]) + else: + # nothing in common, so reset the list of directories + newdirs = dirlist + + return commondir, newdirs + + +class Lock(Messenger): + def __init__(self, pool, cfg, repos, author, do_lock): + self.author = author + self.do_lock = do_lock + + Messenger.__init__(self, pool, cfg, repos, + (do_lock and 'lock_subject_prefix' + or 'unlock_subject_prefix')) + + # read all the locked paths from STDIN and strip off the trailing newlines + self.dirlist = [x.rstrip() for x in sys.stdin.readlines()] + + # collect the set of groups and the unique sets of params for the options + self.groups = { } + for path in self.dirlist: + for (group, params) in self.cfg.which_groups(path, None): + # turn the params into a hashable object and stash it away + param_list = sorted(params.items()) + # collect the set of paths belonging to this group + if (group, tuple(param_list)) in self.groups: + old_param, paths = self.groups[group, tuple(param_list)] + else: + paths = { } + paths[path] = None + self.groups[group, tuple(param_list)] = (params, paths) + + commondir, dirlist = get_commondir(self.dirlist) + + # compose the basic subject line. later, we can prefix it. + dirlist.sort() + dirlist = ' '.join(dirlist) + if commondir: + self.output.subject = '%s: %s' % (commondir, dirlist) + else: + self.output.subject = '%s' % (dirlist) + + # The lock comment is the same for all paths, so we can just pull + # the comment for the first path in the dirlist and cache it. + self.lock = svn.fs.svn_fs_get_lock(self.repos.fs_ptr, + self.dirlist[0], self.pool) + + def generate(self): + for (group, param_tuple), (params, paths) in self.groups.items(): + self.output.start(group, params) + + self.output.write('Author: %s\n' + '%s paths:\n' % + (self.author, self.do_lock and 'Locked' or 'Unlocked')) + + self.dirlist.sort() + for dir in self.dirlist: + self.output.write(' %s\n\n' % dir) + + if self.do_lock: + self.output.write('Comment:\n%s\n' % (self.lock.comment or '')) + + self.output.finish() + + +class DiffSelections: + def __init__(self, cfg, group, params): + self.add = False + self.copy = False + self.delete = False + self.modify = False + + gen_diffs = cfg.get('generate_diffs', group, params) + + ### Do a little dance for deprecated options. Note that even if you + ### don't have an option anywhere in your configuration file, it + ### still gets returned as non-None. + if len(gen_diffs): + list = gen_diffs.split(" ") + for item in list: + if item == 'add': + self.add = True + if item == 'copy': + self.copy = True + if item == 'delete': + self.delete = True + if item == 'modify': + self.modify = True + else: + self.add = True + self.copy = True + self.delete = True + self.modify = True + ### These options are deprecated + suppress = cfg.get('suppress_deletes', group, params) + if suppress == 'yes': + self.delete = False + suppress = cfg.get('suppress_adds', group, params) + if suppress == 'yes': + self.add = False + + +class DiffURLSelections: + def __init__(self, cfg, group, params): + self.cfg = cfg + self.group = group + self.params = params + + def _get_url(self, action, repos_rev, change): + # The parameters for the URLs generation need to be placed in the + # parameters for the configuration module, otherwise we may get + # KeyError exceptions. + params = self.params.copy() + params['path'] = change.path and urllib_parse_quote(change.path) or None + params['base_path'] = change.base_path and urllib_parse_quote(change.base_path) \ + or None + params['rev'] = repos_rev + params['base_rev'] = change.base_rev + + return self.cfg.get("diff_%s_url" % action, self.group, params) + + def get_add_url(self, repos_rev, change): + return self._get_url('add', repos_rev, change) + + def get_copy_url(self, repos_rev, change): + return self._get_url('copy', repos_rev, change) + + def get_delete_url(self, repos_rev, change): + return self._get_url('delete', repos_rev, change) + + def get_modify_url(self, repos_rev, change): + return self._get_url('modify', repos_rev, change) + +def generate_content(renderer, cfg, repos, changelist, group, params, paths, + pool): + + svndate = repos.get_rev_prop(svn.core.SVN_PROP_REVISION_DATE) + ### pick a different date format? + date = time.ctime(svn.core.secs_from_timestr(svndate, pool)) + + diffsels = DiffSelections(cfg, group, params) + diffurls = DiffURLSelections(cfg, group, params) + + show_nonmatching_paths = cfg.get('show_nonmatching_paths', group, params) \ + or 'yes' + + params_with_rev = params.copy() + params_with_rev['rev'] = repos.rev + commit_url = cfg.get('commit_url', group, params_with_rev) + + # figure out the lists of changes outside the selected path-space + other_added_data = other_replaced_data = other_deleted_data = \ + other_modified_data = [ ] + if len(paths) != len(changelist) and show_nonmatching_paths != 'no': + other_added_data = generate_list('A', changelist, paths, False) + other_replaced_data = generate_list('R', changelist, paths, False) + other_deleted_data = generate_list('D', changelist, paths, False) + other_modified_data = generate_list('M', changelist, paths, False) + + if len(paths) != len(changelist) and show_nonmatching_paths == 'yes': + other_diffs = DiffGenerator(changelist, paths, False, cfg, repos, date, + group, params, diffsels, diffurls, pool) + else: + other_diffs = None + + data = _data( + author=repos.author, + date=date, + rev=repos.rev, + log=repos.get_rev_prop(svn.core.SVN_PROP_REVISION_LOG) or '', + commit_url=commit_url, + added_data=generate_list('A', changelist, paths, True), + replaced_data=generate_list('R', changelist, paths, True), + deleted_data=generate_list('D', changelist, paths, True), + modified_data=generate_list('M', changelist, paths, True), + show_nonmatching_paths=show_nonmatching_paths, + other_added_data=other_added_data, + other_replaced_data=other_replaced_data, + other_deleted_data=other_deleted_data, + other_modified_data=other_modified_data, + diffs=DiffGenerator(changelist, paths, True, cfg, repos, date, group, + params, diffsels, diffurls, pool), + other_diffs=other_diffs, + ) + renderer.render(data) + + +def generate_list(changekind, changelist, paths, in_paths): + if changekind == 'A': + selection = lambda change: change.action == svn.repos.CHANGE_ACTION_ADD + elif changekind == 'R': + selection = lambda change: change.action == svn.repos.CHANGE_ACTION_REPLACE + elif changekind == 'D': + selection = lambda change: change.action == svn.repos.CHANGE_ACTION_DELETE + elif changekind == 'M': + selection = lambda change: change.action == svn.repos.CHANGE_ACTION_MODIFY + + items = [ ] + for path, change in changelist: + if selection(change) and (path in paths) == in_paths: + item = _data( + path=path, + is_dir=change.item_kind == svn.core.svn_node_dir, + props_changed=change.prop_changes, + text_changed=change.text_changed, + copied=(change.action == svn.repos.CHANGE_ACTION_ADD \ + or change.action == svn.repos.CHANGE_ACTION_REPLACE) \ + and change.base_path, + base_path=remove_leading_slashes(change.base_path), + base_rev=change.base_rev, + ) + items.append(item) + + return items + + +class DiffGenerator: + "This is a generator-like object returning DiffContent objects." + + def __init__(self, changelist, paths, in_paths, cfg, repos, date, group, + params, diffsels, diffurls, pool): + self.changelist = changelist + self.paths = paths + self.in_paths = in_paths + self.cfg = cfg + self.repos = repos + self.date = date + self.group = group + self.params = params + self.diffsels = diffsels + self.diffurls = diffurls + self.pool = pool + + self.diff = self.diff_url = None + + self.idx = 0 + + def __nonzero__(self): + # we always have some items + return True + + def __getitem__(self, idx): + while True: + if self.idx == len(self.changelist): + raise IndexError + + path, change = self.changelist[self.idx] + self.idx = self.idx + 1 + + diff = diff_url = None + kind = None + label1 = None + label2 = None + src_fname = None + dst_fname = None + binary = None + singular = None + content = None + + # just skip directories. they have no diffs. + if change.item_kind == svn.core.svn_node_dir: + continue + + # is this change in (or out of) the set of matched paths? + if (path in self.paths) != self.in_paths: + continue + + if change.base_rev != -1: + svndate = self.repos.get_rev_prop(svn.core.SVN_PROP_REVISION_DATE, + change.base_rev) + ### pick a different date format? + base_date = time.ctime(svn.core.secs_from_timestr(svndate, self.pool)) + else: + base_date = '' + + # figure out if/how to generate a diff + + base_path = remove_leading_slashes(change.base_path) + if change.action == svn.repos.CHANGE_ACTION_DELETE: + # it was delete. + kind = 'D' + + # get the diff url, if any is specified + diff_url = self.diffurls.get_delete_url(self.repos.rev, change) + + # show the diff? + if self.diffsels.delete: + diff = svn.fs.FileDiff(self.repos.get_root(change.base_rev), + base_path, None, None, self.pool) + + label1 = '%s\t%s\t(r%s)' % (base_path, self.date, change.base_rev) + label2 = '/dev/null\t00:00:00 1970\t(deleted)' + singular = True + + elif change.action == svn.repos.CHANGE_ACTION_ADD \ + or change.action == svn.repos.CHANGE_ACTION_REPLACE: + if base_path and (change.base_rev != -1): + + # any diff of interest? + if change.text_changed: + # this file was copied and modified. + kind = 'W' + + # get the diff url, if any is specified + diff_url = self.diffurls.get_copy_url(self.repos.rev, change) + + # show the diff? + if self.diffsels.modify: + diff = svn.fs.FileDiff(self.repos.get_root(change.base_rev), + base_path, + self.repos.root_this, change.path, + self.pool) + label1 = '%s\t%s\t(r%s, copy source)' \ + % (base_path, base_date, change.base_rev) + label2 = '%s\t%s\t(r%s)' \ + % (change.path, self.date, self.repos.rev) + singular = False + else: + # this file was copied. + kind = 'C' + if self.diffsels.copy: + diff = svn.fs.FileDiff(None, None, self.repos.root_this, + change.path, self.pool) + label1 = '/dev/null\t00:00:00 1970\t' \ + '(empty, because file is newly added)' + label2 = '%s\t%s\t(r%s, copy of r%s, %s)' \ + % (change.path, self.date, self.repos.rev, \ + change.base_rev, base_path) + singular = False + else: + # the file was added. + kind = 'A' + + # get the diff url, if any is specified + diff_url = self.diffurls.get_add_url(self.repos.rev, change) + + # show the diff? + if self.diffsels.add: + diff = svn.fs.FileDiff(None, None, self.repos.root_this, + change.path, self.pool) + label1 = '/dev/null\t00:00:00 1970\t' \ + '(empty, because file is newly added)' + label2 = '%s\t%s\t(r%s)' \ + % (change.path, self.date, self.repos.rev) + singular = True + + elif not change.text_changed: + # the text didn't change, so nothing to show. + continue + else: + # a simple modification. + kind = 'M' + + # get the diff url, if any is specified + diff_url = self.diffurls.get_modify_url(self.repos.rev, change) + + # show the diff? + if self.diffsels.modify: + diff = svn.fs.FileDiff(self.repos.get_root(change.base_rev), + base_path, + self.repos.root_this, change.path, + self.pool) + label1 = '%s\t%s\t(r%s)' \ + % (base_path, base_date, change.base_rev) + label2 = '%s\t%s\t(r%s)' \ + % (change.path, self.date, self.repos.rev) + singular = False + + if diff: + binary = diff.either_binary() + if binary: + content = src_fname = dst_fname = None + else: + src_fname, dst_fname = diff.get_files() + try: + content = DiffContent(self.cfg.get_diff_cmd(self.group, { + 'label_from' : label1, + 'label_to' : label2, + 'from' : src_fname, + 'to' : dst_fname, + })) + except OSError: + # diff command does not exist, try difflib.unified_diff() + content = DifflibDiffContent(label1, label2, src_fname, dst_fname) + + # return a data item for this diff + return _data( + path=change.path, + base_path=base_path, + base_rev=change.base_rev, + diff=diff, + diff_url=diff_url, + kind=kind, + label_from=label1, + label_to=label2, + from_fname=src_fname, + to_fname=dst_fname, + binary=binary, + singular=singular, + content=content, + ) + +def _classify_diff_line(line, seen_change): + # classify the type of line. + first = line[:1] + ltype = '' + if first == '@': + seen_change = True + ltype = 'H' + elif first == '-': + if seen_change: + ltype = 'D' + else: + ltype = 'F' + elif first == '+': + if seen_change: + ltype = 'A' + else: + ltype = 'T' + elif first == ' ': + ltype = 'C' + else: + ltype = 'U' + + if line[-2] == '\r': + line=line[0:-2] + '\n' # remove carriage return + + return line, ltype, seen_change + + +class DiffContent: + "This is a generator-like object returning annotated lines of a diff." + + def __init__(self, cmd): + self.seen_change = False + + # By default we choose to incorporate child stderr into the output + self.pipe = subprocess.Popen(cmd, stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + close_fds=sys.platform != "win32") + + def __nonzero__(self): + # we always have some items + return True + + def __getitem__(self, idx): + if self.pipe is None: + raise IndexError + + line = self.pipe.stdout.readline() + if not line: + # wait on the child so we don't end up with a billion zombies + self.pipe.wait() + self.pipe = None + raise IndexError + + line, ltype, self.seen_change = _classify_diff_line(line, self.seen_change) + return _data( + raw=line, + text=line[1:-1], # remove indicator and newline + type=ltype, + ) + +class DifflibDiffContent(): + "This is a generator-like object returning annotated lines of a diff." + + def __init__(self, label_from, label_to, from_file, to_file): + import difflib + self.seen_change = False + fromlines = open(from_file, 'U').readlines() + tolines = open(to_file, 'U').readlines() + self.diff = difflib.unified_diff(fromlines, tolines, + label_from, label_to) + + def __nonzero__(self): + # we always have some items + return True + + def __getitem__(self, idx): + + try: + line = self.diff.next() + except StopIteration: + raise IndexError + + line, ltype, self.seen_change = _classify_diff_line(line, self.seen_change) + return _data( + raw=line, + text=line[1:-1], # remove indicator and newline + type=ltype, + ) + +class TextCommitRenderer: + "This class will render the commit mail in plain text." + + def __init__(self, output): + self.output = output + + def render(self, data): + "Render the commit defined by 'data'." + + w = self.output.write + + w('Author: %s\nDate: %s\nNew Revision: %s\n' % (data.author, + data.date, + data.rev)) + + if data.commit_url: + w('URL: %s\n\n' % data.commit_url) + else: + w('\n') + + w('Log:\n%s\n\n' % data.log.strip()) + + # print summary sections + self._render_list('Added', data.added_data) + self._render_list('Replaced', data.replaced_data) + self._render_list('Deleted', data.deleted_data) + self._render_list('Modified', data.modified_data) + + if data.other_added_data or data.other_replaced_data \ + or data.other_deleted_data or data.other_modified_data: + if data.show_nonmatching_paths: + w('\nChanges in other areas also in this revision:\n') + self._render_list('Added', data.other_added_data) + self._render_list('Replaced', data.other_replaced_data) + self._render_list('Deleted', data.other_deleted_data) + self._render_list('Modified', data.other_modified_data) + else: + w('and changes in other areas\n') + + self._render_diffs(data.diffs, '') + if data.other_diffs: + self._render_diffs(data.other_diffs, + '\nDiffs of changes in other areas also' + ' in this revision:\n') + + def _render_list(self, header, data_list): + if not data_list: + return + + w = self.output.write + w(header + ':\n') + for d in data_list: + if d.is_dir: + is_dir = '/' + else: + is_dir = '' + if d.props_changed: + if d.text_changed: + props = ' (contents, props changed)' + else: + props = ' (props changed)' + else: + props = '' + w(' %s%s%s\n' % (d.path, is_dir, props)) + if d.copied: + if is_dir: + text = '' + elif d.text_changed: + text = ', changed' + else: + text = ' unchanged' + w(' - copied%s from r%d, %s%s\n' + % (text, d.base_rev, d.base_path, is_dir)) + + def _render_diffs(self, diffs, section_header): + """Render diffs. Write the SECTION_HEADER if there are actually + any diffs to render.""" + if not diffs: + return + w = self.output.write + section_header_printed = False + + for diff in diffs: + if not diff.diff and not diff.diff_url: + continue + if not section_header_printed: + w(section_header) + section_header_printed = True + if diff.kind == 'D': + w('\nDeleted: %s\n' % diff.base_path) + elif diff.kind == 'A': + w('\nAdded: %s\n' % diff.path) + elif diff.kind == 'C': + w('\nCopied: %s (from r%d, %s)\n' + % (diff.path, diff.base_rev, diff.base_path)) + elif diff.kind == 'W': + w('\nCopied and modified: %s (from r%d, %s)\n' + % (diff.path, diff.base_rev, diff.base_path)) + else: + # kind == 'M' + w('\nModified: %s\n' % diff.path) + + if diff.diff_url: + w('URL: %s\n' % diff.diff_url) + + if not diff.diff: + continue + + w(SEPARATOR + '\n') + + if diff.binary: + if diff.singular: + w('Binary file. No diff available.\n') + else: + w('Binary file (source and/or target). No diff available.\n') + continue + + for line in diff.content: + w(line.raw) + + +class Repository: + "Hold roots and other information about the repository." + + def __init__(self, repos_dir, rev, pool): + self.repos_dir = repos_dir + self.rev = rev + self.pool = pool + + self.repos_ptr = svn.repos.open(repos_dir, pool) + self.fs_ptr = svn.repos.fs(self.repos_ptr) + + self.roots = { } + + self.root_this = self.get_root(rev) + + self.author = self.get_rev_prop(svn.core.SVN_PROP_REVISION_AUTHOR) + + def get_rev_prop(self, propname, rev = None): + if not rev: + rev = self.rev + return svn.fs.revision_prop(self.fs_ptr, rev, propname, self.pool) + + def get_root(self, rev): + try: + return self.roots[rev] + except KeyError: + pass + root = self.roots[rev] = svn.fs.revision_root(self.fs_ptr, rev, self.pool) + return root + + +class Config: + + # The predefined configuration sections. These are omitted from the + # set of groups. + _predefined = ('general', 'defaults', 'maps') + + def __init__(self, fname, repos, global_params): + cp = configparser.ConfigParser() + cp.read(fname) + + # record the (non-default) groups that we find + self._groups = [ ] + + for section in cp.sections(): + if not hasattr(self, section): + section_ob = _sub_section() + setattr(self, section, section_ob) + if section not in self._predefined: + self._groups.append(section) + else: + section_ob = getattr(self, section) + for option in cp.options(section): + # get the raw value -- we use the same format for *our* interpolation + value = cp.get(section, option, raw=1) + setattr(section_ob, option, value) + + # be compatible with old format config files + if hasattr(self.general, 'diff') and not hasattr(self.defaults, 'diff'): + self.defaults.diff = self.general.diff + if not hasattr(self, 'maps'): + self.maps = _sub_section() + + # these params are always available, although they may be overridden + self._global_params = global_params.copy() + + # prepare maps. this may remove sections from consideration as a group. + self._prep_maps() + + # process all the group sections. + self._prep_groups(repos) + + def is_set(self, option): + """Return None if the option is not set; otherwise, its value is returned. + + The option is specified as a dotted symbol, such as 'general.mail_command' + """ + ob = self + for part in option.split('.'): + if not hasattr(ob, part): + return None + ob = getattr(ob, part) + return ob + + def get(self, option, group, params): + "Get a config value with appropriate substitutions and value mapping." + + # find the right value + value = None + if group: + sub = getattr(self, group) + value = getattr(sub, option, None) + if value is None: + value = getattr(self.defaults, option, '') + + # parameterize it + if params is not None: + value = value % params + + # apply any mapper + mapper = getattr(self.maps, option, None) + if mapper is not None: + value = mapper(value) + + # Apply any parameters that may now be available for + # substitution that were not before the mapping. + if value is not None and params is not None: + value = value % params + + return value + + def get_diff_cmd(self, group, args): + "Get a diff command as a list of argv elements." + ### do some better splitting to enable quoting of spaces + diff_cmd = self.get('diff', group, None).split() + + cmd = [ ] + for part in diff_cmd: + cmd.append(part % args) + return cmd + + def _prep_maps(self): + "Rewrite the [maps] options into callables that look up values." + + mapsections = [] + + for optname, mapvalue in vars(self.maps).items(): + if mapvalue[:1] == '[': + # a section is acting as a mapping + sectname = mapvalue[1:-1] + if not hasattr(self, sectname): + raise UnknownMappingSection(sectname) + # construct a lambda to look up the given value as an option name, + # and return the option's value. if the option is not present, + # then just return the value unchanged. + setattr(self.maps, optname, + lambda value, + sect=getattr(self, sectname): getattr(sect, + value.lower(), + value)) + # mark for removal when all optnames are done + if sectname not in mapsections: + mapsections.append(sectname) + + # elif test for other mapper types. possible examples: + # dbm:filename.db + # file:two-column-file.txt + # ldap:some-query-spec + # just craft a mapper function and insert it appropriately + + else: + raise UnknownMappingSpec(mapvalue) + + # remove each mapping section from consideration as a group + for sectname in mapsections: + self._groups.remove(sectname) + + + def _prep_groups(self, repos): + self._group_re = [ ] + + repos_dir = os.path.abspath(repos.repos_dir) + + # compute the default repository-based parameters. start with some + # basic parameters, then bring in the regex-based params. + self._default_params = self._global_params + + try: + match = re.match(self.defaults.for_repos, repos_dir) + if match: + self._default_params = self._default_params.copy() + self._default_params.update(match.groupdict()) + except AttributeError: + # there is no self.defaults.for_repos + pass + + # select the groups that apply to this repository + for group in self._groups: + sub = getattr(self, group) + params = self._default_params + if hasattr(sub, 'for_repos'): + match = re.match(sub.for_repos, repos_dir) + if not match: + continue + params = params.copy() + params.update(match.groupdict()) + + # if a matching rule hasn't been given, then use the empty string + # as it will match all paths + for_paths = getattr(sub, 'for_paths', '') + exclude_paths = getattr(sub, 'exclude_paths', None) + if exclude_paths: + exclude_paths_re = re.compile(exclude_paths) + else: + exclude_paths_re = None + + # check search_logmsg re + search_logmsg = getattr(sub, 'search_logmsg', None) + if search_logmsg is not None: + search_logmsg_re = re.compile(search_logmsg) + else: + search_logmsg_re = None + + self._group_re.append((group, + re.compile(for_paths), + exclude_paths_re, + params, + search_logmsg_re)) + + # after all the groups are done, add in the default group + try: + self._group_re.append((None, + re.compile(self.defaults.for_paths), + None, + self._default_params, + None)) + except AttributeError: + # there is no self.defaults.for_paths + pass + + def which_groups(self, path, logmsg): + "Return the path's associated groups." + groups = [] + for group, pattern, exclude_pattern, repos_params, search_logmsg_re in self._group_re: + match = pattern.match(path) + if match: + if exclude_pattern and exclude_pattern.match(path): + continue + params = repos_params.copy() + params.update(match.groupdict()) + + if search_logmsg_re is None: + groups.append((group, params)) + else: + if logmsg is None: + logmsg = '' + + for match in search_logmsg_re.finditer(logmsg): + # Add captured variables to (a copy of) params + msg_params = params.copy() + msg_params.update(match.groupdict()) + groups.append((group, msg_params)) + + if not groups: + groups.append((None, self._default_params)) + + return groups + + +class _sub_section: + pass + +class _data: + "Helper class to define an attribute-based hunk o' data." + def __init__(self, **kw): + vars(self).update(kw) + +class MissingConfig(Exception): + pass +class UnknownMappingSection(Exception): + pass +class UnknownMappingSpec(Exception): + pass +class UnknownSubcommand(Exception): + pass + + +if __name__ == '__main__': + def usage(): + scriptname = os.path.basename(sys.argv[0]) + sys.stderr.write( +"""USAGE: %s commit REPOS REVISION [CONFIG-FILE] + %s propchange REPOS REVISION AUTHOR REVPROPNAME [CONFIG-FILE] + %s propchange2 REPOS REVISION AUTHOR REVPROPNAME ACTION [CONFIG-FILE] + %s lock REPOS AUTHOR [CONFIG-FILE] + %s unlock REPOS AUTHOR [CONFIG-FILE] + +If no CONFIG-FILE is provided, the script will first search for a mailer.conf +file in REPOS/conf/. Failing that, it will search the directory in which +the script itself resides. + +ACTION was added as a fifth argument to the post-revprop-change hook +in Subversion 1.2.0. Its value is one of 'A', 'M' or 'D' to indicate +if the property was added, modified or deleted, respectively. + +""" % (scriptname, scriptname, scriptname, scriptname, scriptname)) + sys.exit(1) + + # Command list: subcommand -> number of arguments expected (not including + # the repository directory and config-file) + cmd_list = {'commit' : 1, + 'propchange' : 3, + 'propchange2': 4, + 'lock' : 1, + 'unlock' : 1, + } + + config_fname = None + argc = len(sys.argv) + if argc < 3: + usage() + + cmd = sys.argv[1] + repos_dir = svn.core.svn_path_canonicalize(sys.argv[2]) + try: + expected_args = cmd_list[cmd] + except KeyError: + usage() + + if argc < (expected_args + 3): + usage() + elif argc > expected_args + 4: + usage() + elif argc == (expected_args + 4): + config_fname = sys.argv[expected_args + 3] + + # Settle on a config file location, and open it. + if config_fname is None: + # Default to REPOS-DIR/conf/mailer.conf. + config_fname = os.path.join(repos_dir, 'conf', 'mailer.conf') + if not os.path.exists(config_fname): + # Okay. Look for 'mailer.conf' as a sibling of this script. + config_fname = os.path.join(os.path.dirname(sys.argv[0]), 'mailer.conf') + if not os.path.exists(config_fname): + raise MissingConfig(config_fname) + + svn.core.run_app(main, cmd, config_fname, repos_dir, + sys.argv[3:3+expected_args]) + +# ------------------------------------------------------------------------ +# TODO +# +# * add configuration options +# - each group defines delivery info: +# o whether to set Reply-To and/or Mail-Followup-To +# (btw: it is legal do set Reply-To since this is the originator of the +# mail; i.e. different from MLMs that munge it) +# - each group defines content construction: +# o max size of diff before trimming +# o max size of entire commit message before truncation +# - per-repository configuration +# o extra config living in repos +# o optional, non-mail log file +# o look up authors (username -> email; for the From: header) in a +# file(s) or DBM +# * get rid of global functions that should properly be class methods diff --git a/tools/hook-scripts/mailer/tests/mailer-init.sh b/tools/hook-scripts/mailer/tests/mailer-init.sh new file mode 100755 index 0000000..f51b4c5 --- /dev/null +++ b/tools/hook-scripts/mailer/tests/mailer-init.sh @@ -0,0 +1,115 @@ +#!/bin/sh +# +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +# +# +# mailer-init.sh: create and initialize a repository for the mailer tests +# +# USAGE: ./mailer-init.sh +# + +scripts="`dirname $0`" +scripts="`cd $scripts && pwd`" + +d=$scripts/mailer-init.$$ +mkdir $d +cd $d +echo "test directory is: $d" + +svnadmin create repos +svn co file://`pwd`/repos wc +cd wc + +# create a bunch of dirs and files +mkdir dir1 dir2 +echo file1 > file1 +echo file2 > file2 +echo file3 > dir1/file3 +echo file4 > dir1/file4 +echo file5 > dir2/file5 +echo file6 > dir2/file6 +svn add * +svn commit -m "initial load" + +# make some changes and set some properties +svn ps prop1 propval1 file1 +echo change C1 >> file2 +svn ps svn:keywords Id file2 +svn ps svn:new_svn_prop val file2 +svn ps prop1 propval1 file2 +svn ps prop3 propval3 dir1 +echo change C2 >> dir2/file5 +svn commit -m "two file changes. Fixes Blah#123" + +# copy a file and a dir and change property +svn cp file1 dir2/file7 +svn cp dir1 dir3 +svn ps prop3 propval4 dir3 +svn commit -m "two copies" + +# copy and modify a file +svn cp file1 dir3/file8 +echo change C3 >> dir3/file8 +svn commit -m "copied and changed" + +# change and delete properties +svn ps svn:keywords Date file2 +svn ps prop2 propval2 file2 +svn pd prop1 file2 +svn pd svn:new_svn_prop file2 +svn ps prop3 propval4 dir1 +svn pd prop3 dir3 +svn up # make sure our dirs are up to date +svn commit -m "changes and deletes of properties" + +# add a file, add a dir, and make a change +echo file9 > file9 +svn add file9 +svn mkdir dir4 +echo change C4 >> dir1/file3 +svn commit -m "mixed addition and change. Fixes Blaz#456 Blah#987" + +# add a file, add a dir, delete a file, delete a dir, and make a change +echo file10 > dir1/file10 +svn add dir1/file10 +svn mkdir dir3/dir5 +svn rm file2 dir2 +echo change C5 >> dir3/file3 +svn up # make sure our dirs are up to date +svn commit -m "adds, deletes, and a change" + +# copy a dir and change a file in it +svn cp dir3 dir6 +echo change C6 >> dir6/file4 +svn commit -m "copy dir, then make a change" + +# add a binary file and set property to binary value +echo -e "\x00\x01\x02\x03\x04" > file11 +svn add file11 +svn ps prop2 -F file11 file9 +svn commit -m "add binary file" + +# change the binary file and set property to non binary value +echo -e "\x20\x01\x02\x20" > file11 +svn ps prop2 propval2 file9 +svn commit -m "change binary file" + +# tweak the commit dates to known quantities +$scripts/mailer-tweak.py ../repos diff --git a/tools/hook-scripts/mailer/tests/mailer-t1.output b/tools/hook-scripts/mailer/tests/mailer-t1.output new file mode 100644 index 0000000..c8f8701 --- /dev/null +++ b/tools/hook-scripts/mailer/tests/mailer-t1.output @@ -0,0 +1,751 @@ +Group: file plus other areas +Subject: r1 - dir1 dir2 + +Author: mailer test +Date: Sun Sep 9 01:46:40 2001 +New Revision: 1 + +Log: +initial load + +Added: + file1 + file2 + +Changes in other areas also in this revision: +Added: + dir1/ + dir1/file3 + dir1/file4 + dir2/ + dir2/file5 + dir2/file6 + +Added: file1 +============================================================================== +--- /dev/null 00:00:00 1970 (empty, because file is newly added) ++++ file1 Sun Sep 9 01:46:40 2001 (r1) +@@ -0,0 +1 @@ ++file1 + +Added: file2 +============================================================================== +--- /dev/null 00:00:00 1970 (empty, because file is newly added) ++++ file2 Sun Sep 9 01:46:40 2001 (r1) +@@ -0,0 +1 @@ ++file2 + +Diffs of changes in other areas also in this revision: + +Added: dir1/file3 +============================================================================== +--- /dev/null 00:00:00 1970 (empty, because file is newly added) ++++ dir1/file3 Sun Sep 9 01:46:40 2001 (r1) +@@ -0,0 +1 @@ ++file3 + +Added: dir1/file4 +============================================================================== +--- /dev/null 00:00:00 1970 (empty, because file is newly added) ++++ dir1/file4 Sun Sep 9 01:46:40 2001 (r1) +@@ -0,0 +1 @@ ++file4 + +Added: dir2/file5 +============================================================================== +--- /dev/null 00:00:00 1970 (empty, because file is newly added) ++++ dir2/file5 Sun Sep 9 01:46:40 2001 (r1) +@@ -0,0 +1 @@ ++file5 + +Added: dir2/file6 +============================================================================== +--- /dev/null 00:00:00 1970 (empty, because file is newly added) ++++ dir2/file6 Sun Sep 9 01:46:40 2001 (r1) +@@ -0,0 +1 @@ ++file6 +Group: All +Subject: r1 - dir1 dir2 + +Author: mailer test +Date: Sun Sep 9 01:46:40 2001 +New Revision: 1 + +Log: +initial load + +Added: + dir1/ + dir1/file3 + dir1/file4 + dir2/ + dir2/file5 + dir2/file6 + file1 + file2 + +Added: dir1/file3 +============================================================================== +--- /dev/null 00:00:00 1970 (empty, because file is newly added) ++++ dir1/file3 Sun Sep 9 01:46:40 2001 (r1) +@@ -0,0 +1 @@ ++file3 + +Added: dir1/file4 +============================================================================== +--- /dev/null 00:00:00 1970 (empty, because file is newly added) ++++ dir1/file4 Sun Sep 9 01:46:40 2001 (r1) +@@ -0,0 +1 @@ ++file4 + +Added: dir2/file5 +============================================================================== +--- /dev/null 00:00:00 1970 (empty, because file is newly added) ++++ dir2/file5 Sun Sep 9 01:46:40 2001 (r1) +@@ -0,0 +1 @@ ++file5 + +Added: dir2/file6 +============================================================================== +--- /dev/null 00:00:00 1970 (empty, because file is newly added) ++++ dir2/file6 Sun Sep 9 01:46:40 2001 (r1) +@@ -0,0 +1 @@ ++file6 + +Added: file1 +============================================================================== +--- /dev/null 00:00:00 1970 (empty, because file is newly added) ++++ file1 Sun Sep 9 01:46:40 2001 (r1) +@@ -0,0 +1 @@ ++file1 + +Added: file2 +============================================================================== +--- /dev/null 00:00:00 1970 (empty, because file is newly added) ++++ file2 Sun Sep 9 01:46:40 2001 (r1) +@@ -0,0 +1 @@ ++file2 +Group: file +Subject: r1 - dir1 dir2 + +Author: mailer test +Date: Sun Sep 9 01:46:40 2001 +New Revision: 1 + +Log: +initial load + +Added: + file1 + file2 + +Added: file1 +============================================================================== +--- /dev/null 00:00:00 1970 (empty, because file is newly added) ++++ file1 Sun Sep 9 01:46:40 2001 (r1) +@@ -0,0 +1 @@ ++file1 + +Added: file2 +============================================================================== +--- /dev/null 00:00:00 1970 (empty, because file is newly added) ++++ file2 Sun Sep 9 01:46:40 2001 (r1) +@@ -0,0 +1 @@ ++file2 +Group: bugtracker +Subject: Fix for Blah#123: r2 - dir1 dir2 + +Author: mailer test +Date: Sun Sep 9 04:33:20 2001 +New Revision: 2 + +Log: +two file changes. Fixes Blah#123 + +Modified: + dir1/ (props changed) + dir2/file5 + file1 (props changed) + file2 (contents, props changed) + +Modified: dir2/file5 +============================================================================== +--- dir2/file5 Sun Sep 9 01:46:40 2001 (r1) ++++ dir2/file5 Sun Sep 9 04:33:20 2001 (r2) +@@ -1 +1,2 @@ + file5 ++change C2 + +Modified: file2 +============================================================================== +--- file2 Sun Sep 9 01:46:40 2001 (r1) ++++ file2 Sun Sep 9 04:33:20 2001 (r2) +@@ -1 +1,2 @@ + file2 ++change C1 +Group: All +Subject: r2 - dir1 dir2 + +Author: mailer test +Date: Sun Sep 9 04:33:20 2001 +New Revision: 2 + +Log: +two file changes. Fixes Blah#123 + +Modified: + dir1/ (props changed) + dir2/file5 + file1 (props changed) + file2 (contents, props changed) + +Modified: dir2/file5 +============================================================================== +--- dir2/file5 Sun Sep 9 01:46:40 2001 (r1) ++++ dir2/file5 Sun Sep 9 04:33:20 2001 (r2) +@@ -1 +1,2 @@ + file5 ++change C2 + +Modified: file2 +============================================================================== +--- file2 Sun Sep 9 01:46:40 2001 (r1) ++++ file2 Sun Sep 9 04:33:20 2001 (r2) +@@ -1 +1,2 @@ + file2 ++change C1 +Group: file plus other areas +Subject: r2 - dir1 dir2 + +Author: mailer test +Date: Sun Sep 9 04:33:20 2001 +New Revision: 2 + +Log: +two file changes. Fixes Blah#123 + +Modified: + file1 (props changed) + file2 (contents, props changed) + +Changes in other areas also in this revision: +Modified: + dir1/ (props changed) + dir2/file5 + +Modified: file2 +============================================================================== +--- file2 Sun Sep 9 01:46:40 2001 (r1) ++++ file2 Sun Sep 9 04:33:20 2001 (r2) +@@ -1 +1,2 @@ + file2 ++change C1 + +Diffs of changes in other areas also in this revision: + +Modified: dir2/file5 +============================================================================== +--- dir2/file5 Sun Sep 9 01:46:40 2001 (r1) ++++ dir2/file5 Sun Sep 9 04:33:20 2001 (r2) +@@ -1 +1,2 @@ + file5 ++change C2 +Group: file +Subject: r2 - dir1 dir2 + +Author: mailer test +Date: Sun Sep 9 04:33:20 2001 +New Revision: 2 + +Log: +two file changes. Fixes Blah#123 + +Modified: + file1 (props changed) + file2 (contents, props changed) + +Modified: file2 +============================================================================== +--- file2 Sun Sep 9 01:46:40 2001 (r1) ++++ file2 Sun Sep 9 04:33:20 2001 (r2) +@@ -1 +1,2 @@ + file2 ++change C1 +Group: All +Subject: r3 - dir2 dir3 + +Author: mailer test +Date: Sun Sep 9 07:20:00 2001 +New Revision: 3 + +Log: +two copies + +Added: + dir2/file7 + - copied unchanged from r2, file1 + dir3/ (props changed) + - copied from r2, dir1/ + +Copied: dir2/file7 (from r2, file1) +============================================================================== +--- /dev/null 00:00:00 1970 (empty, because file is newly added) ++++ dir2/file7 Sun Sep 9 07:20:00 2001 (r3, copy of r2, file1) +@@ -0,0 +1 @@ ++file1 +Group: All +Subject: r4 - dir3 + +Author: mailer test +Date: Sun Sep 9 10:06:40 2001 +New Revision: 4 + +Log: +copied and changed + +Added: + dir3/file8 + - copied, changed from r2, file1 + +Copied and modified: dir3/file8 (from r2, file1) +============================================================================== +--- file1 Sun Sep 9 04:33:20 2001 (r2, copy source) ++++ dir3/file8 Sun Sep 9 10:06:40 2001 (r4) +@@ -1 +1,2 @@ + file1 ++change C3 +Group: file plus other areas +Subject: r5 - dir1 dir3 + +Author: mailer test +Date: Sun Sep 9 12:53:20 2001 +New Revision: 5 + +Log: +changes and deletes of properties + +Modified: + file2 (props changed) + +Changes in other areas also in this revision: +Modified: + dir1/ (props changed) + dir3/ (props changed) +Group: All +Subject: r5 - dir1 dir3 + +Author: mailer test +Date: Sun Sep 9 12:53:20 2001 +New Revision: 5 + +Log: +changes and deletes of properties + +Modified: + dir1/ (props changed) + dir3/ (props changed) + file2 (props changed) +Group: file +Subject: r5 - dir1 dir3 + +Author: mailer test +Date: Sun Sep 9 12:53:20 2001 +New Revision: 5 + +Log: +changes and deletes of properties + +Modified: + file2 (props changed) +Group: file +Subject: r6 - dir1 dir4 + +Author: mailer test +Date: Sun Sep 9 15:40:00 2001 +New Revision: 6 + +Log: +mixed addition and change. Fixes Blaz#456 Blah#987 + +Added: + file9 + +Added: file9 +============================================================================== +--- /dev/null 00:00:00 1970 (empty, because file is newly added) ++++ file9 Sun Sep 9 15:40:00 2001 (r6) +@@ -0,0 +1 @@ ++file9 +Group: file plus other areas +Subject: r6 - dir1 dir4 + +Author: mailer test +Date: Sun Sep 9 15:40:00 2001 +New Revision: 6 + +Log: +mixed addition and change. Fixes Blaz#456 Blah#987 + +Added: + file9 + +Changes in other areas also in this revision: +Added: + dir4/ +Modified: + dir1/file3 + +Added: file9 +============================================================================== +--- /dev/null 00:00:00 1970 (empty, because file is newly added) ++++ file9 Sun Sep 9 15:40:00 2001 (r6) +@@ -0,0 +1 @@ ++file9 + +Diffs of changes in other areas also in this revision: + +Modified: dir1/file3 +============================================================================== +--- dir1/file3 Sun Sep 9 12:53:20 2001 (r5) ++++ dir1/file3 Sun Sep 9 15:40:00 2001 (r6) +@@ -1 +1,2 @@ + file3 ++change C4 +Group: All +Subject: r6 - dir1 dir4 + +Author: mailer test +Date: Sun Sep 9 15:40:00 2001 +New Revision: 6 + +Log: +mixed addition and change. Fixes Blaz#456 Blah#987 + +Added: + dir4/ + file9 +Modified: + dir1/file3 + +Modified: dir1/file3 +============================================================================== +--- dir1/file3 Sun Sep 9 12:53:20 2001 (r5) ++++ dir1/file3 Sun Sep 9 15:40:00 2001 (r6) +@@ -1 +1,2 @@ + file3 ++change C4 + +Added: file9 +============================================================================== +--- /dev/null 00:00:00 1970 (empty, because file is newly added) ++++ file9 Sun Sep 9 15:40:00 2001 (r6) +@@ -0,0 +1 @@ ++file9 +Group: bugtracker +Subject: Fix for Blah#987: r6 - dir1 dir4 + +Author: mailer test +Date: Sun Sep 9 15:40:00 2001 +New Revision: 6 + +Log: +mixed addition and change. Fixes Blaz#456 Blah#987 + +Added: + dir4/ + file9 +Modified: + dir1/file3 + +Modified: dir1/file3 +============================================================================== +--- dir1/file3 Sun Sep 9 12:53:20 2001 (r5) ++++ dir1/file3 Sun Sep 9 15:40:00 2001 (r6) +@@ -1 +1,2 @@ + file3 ++change C4 + +Added: file9 +============================================================================== +--- /dev/null 00:00:00 1970 (empty, because file is newly added) ++++ file9 Sun Sep 9 15:40:00 2001 (r6) +@@ -0,0 +1 @@ ++file9 +Group: bugtracker +Subject: Fix for Blaz#456: r6 - dir1 dir4 + +Author: mailer test +Date: Sun Sep 9 15:40:00 2001 +New Revision: 6 + +Log: +mixed addition and change. Fixes Blaz#456 Blah#987 + +Added: + dir4/ + file9 +Modified: + dir1/file3 + +Modified: dir1/file3 +============================================================================== +--- dir1/file3 Sun Sep 9 12:53:20 2001 (r5) ++++ dir1/file3 Sun Sep 9 15:40:00 2001 (r6) +@@ -1 +1,2 @@ + file3 ++change C4 + +Added: file9 +============================================================================== +--- /dev/null 00:00:00 1970 (empty, because file is newly added) ++++ file9 Sun Sep 9 15:40:00 2001 (r6) +@@ -0,0 +1 @@ ++file9 +Group: file plus other areas +Subject: r7 - dir1 dir2 dir3 dir3/dir5 + +Author: mailer test +Date: Sun Sep 9 18:26:40 2001 +New Revision: 7 + +Log: +adds, deletes, and a change + +Deleted: + file2 + +Changes in other areas also in this revision: +Added: + dir1/file10 + dir3/dir5/ +Deleted: + dir2/ +Modified: + dir3/file3 + +Deleted: file2 +============================================================================== +--- file2 Sun Sep 9 18:26:40 2001 (r6) ++++ /dev/null 00:00:00 1970 (deleted) +@@ -1,2 +0,0 @@ +-file2 +-change C1 + +Diffs of changes in other areas also in this revision: + +Added: dir1/file10 +============================================================================== +--- /dev/null 00:00:00 1970 (empty, because file is newly added) ++++ dir1/file10 Sun Sep 9 18:26:40 2001 (r7) +@@ -0,0 +1 @@ ++file10 + +Modified: dir3/file3 +============================================================================== +--- dir3/file3 Sun Sep 9 15:40:00 2001 (r6) ++++ dir3/file3 Sun Sep 9 18:26:40 2001 (r7) +@@ -1 +1,2 @@ + file3 ++change C5 +Group: All +Subject: r7 - dir1 dir2 dir3 dir3/dir5 + +Author: mailer test +Date: Sun Sep 9 18:26:40 2001 +New Revision: 7 + +Log: +adds, deletes, and a change + +Added: + dir1/file10 + dir3/dir5/ +Deleted: + dir2/ + file2 +Modified: + dir3/file3 + +Added: dir1/file10 +============================================================================== +--- /dev/null 00:00:00 1970 (empty, because file is newly added) ++++ dir1/file10 Sun Sep 9 18:26:40 2001 (r7) +@@ -0,0 +1 @@ ++file10 + +Modified: dir3/file3 +============================================================================== +--- dir3/file3 Sun Sep 9 15:40:00 2001 (r6) ++++ dir3/file3 Sun Sep 9 18:26:40 2001 (r7) +@@ -1 +1,2 @@ + file3 ++change C5 + +Deleted: file2 +============================================================================== +--- file2 Sun Sep 9 18:26:40 2001 (r6) ++++ /dev/null 00:00:00 1970 (deleted) +@@ -1,2 +0,0 @@ +-file2 +-change C1 +Group: file +Subject: r7 - dir1 dir2 dir3 dir3/dir5 + +Author: mailer test +Date: Sun Sep 9 18:26:40 2001 +New Revision: 7 + +Log: +adds, deletes, and a change + +Deleted: + file2 + +Deleted: file2 +============================================================================== +--- file2 Sun Sep 9 18:26:40 2001 (r6) ++++ /dev/null 00:00:00 1970 (deleted) +@@ -1,2 +0,0 @@ +-file2 +-change C1 +Group: All +Subject: r8 - in dir6: . dir5 + +Author: mailer test +Date: Sun Sep 9 21:13:20 2001 +New Revision: 8 + +Log: +copy dir, then make a change + +Added: + dir6/ + - copied from r6, dir3/ + dir6/dir5/ + - copied from r7, dir3/dir5/ +Replaced: + dir6/file3 + - copied unchanged from r7, dir3/file3 +Modified: + dir6/file4 + +Copied: dir6/file3 (from r7, dir3/file3) +============================================================================== +--- /dev/null 00:00:00 1970 (empty, because file is newly added) ++++ dir6/file3 Sun Sep 9 21:13:20 2001 (r8, copy of r7, dir3/file3) +@@ -0,0 +1,2 @@ ++file3 ++change C5 + +Modified: dir6/file4 +============================================================================== +--- dir3/file4 Sun Sep 9 15:40:00 2001 (r6) ++++ dir6/file4 Sun Sep 9 21:13:20 2001 (r8) +@@ -1 +1,2 @@ + file4 ++change C6 +Group: file plus other areas +Subject: r9 - + +Author: mailer test +Date: Mon Sep 10 00:00:00 2001 +New Revision: 9 + +Log: +add binary file + +Added: + file11 (contents, props changed) +Modified: + file9 (props changed) + +Added: file11 +============================================================================== +Binary file. No diff available. +Group: All +Subject: r9 - + +Author: mailer test +Date: Mon Sep 10 00:00:00 2001 +New Revision: 9 + +Log: +add binary file + +Added: + file11 (contents, props changed) +Modified: + file9 (props changed) + +Added: file11 +============================================================================== +Binary file. No diff available. +Group: file +Subject: r9 - + +Author: mailer test +Date: Mon Sep 10 00:00:00 2001 +New Revision: 9 + +Log: +add binary file + +Added: + file11 (contents, props changed) +Modified: + file9 (props changed) + +Added: file11 +============================================================================== +Binary file. No diff available. +Group: file plus other areas +Subject: r10 - + +Author: mailer test +Date: Mon Sep 10 02:46:40 2001 +New Revision: 10 + +Log: +change binary file + +Modified: + file11 + file9 (props changed) + +Modified: file11 +============================================================================== +Binary file (source and/or target). No diff available. +Group: All +Subject: r10 - + +Author: mailer test +Date: Mon Sep 10 02:46:40 2001 +New Revision: 10 + +Log: +change binary file + +Modified: + file11 + file9 (props changed) + +Modified: file11 +============================================================================== +Binary file (source and/or target). No diff available. +Group: file +Subject: r10 - + +Author: mailer test +Date: Mon Sep 10 02:46:40 2001 +New Revision: 10 + +Log: +change binary file + +Modified: + file11 + file9 (props changed) + +Modified: file11 +============================================================================== +Binary file (source and/or target). No diff available. diff --git a/tools/hook-scripts/mailer/tests/mailer-t1.sh b/tools/hook-scripts/mailer/tests/mailer-t1.sh new file mode 100755 index 0000000..ff4b6ca --- /dev/null +++ b/tools/hook-scripts/mailer/tests/mailer-t1.sh @@ -0,0 +1,60 @@ +#!/bin/sh +# +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +# +# +# mailer-t1.sh: test #1 for the mailer.py script +# +# This test generates "email" for each revision in the repository, +# concatenating them into one big blob, which is then compared against +# a known output. +# +# Note: mailer-tweak.py must have been run to make the test outputs +# consistent and reproducible +# +# USAGE: ./mailer-t1.sh REPOS MAILER-SCRIPT +# + +if test "$#" != 2; then + echo "USAGE: ./mailer-t1.sh REPOS MAILER-SCRIPT" + exit 1 +fi + +scripts="`dirname $0`" +scripts="`cd $scripts && pwd`" + +glom=$scripts/mailer-t1.current +orig=$scripts/mailer-t1.output +conf=$scripts/mailer.conf +rm -f $glom + +export TZ=GST + +youngest="`svnlook youngest $1`" +for rev in `python -c "print(\" \".join(map(str, range(1,$youngest+1))))"`; do + $2 commit $1 $rev $conf >> $glom +done + +echo "current mailer.py output in: $glom" + +dos2unix $glom + +echo diff -q $orig $glom +diff -q $orig $glom && echo "SUCCESS: no differences detected" diff --git a/tools/hook-scripts/mailer/tests/mailer-tweak.py b/tools/hook-scripts/mailer/tests/mailer-tweak.py new file mode 100755 index 0000000..0805980 --- /dev/null +++ b/tools/hook-scripts/mailer/tests/mailer-tweak.py @@ -0,0 +1,66 @@ +#!/usr/bin/env python +# +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +# +# +# mailer-tweak.py: tweak the svn:date and svn:author properties +# on all revisions +# +# We need constant dates and authors for the revisions so that we can +# consistently compare an output against a known quantity. +# +# USAGE: ./mailer-tweak.py REPOS +# + + +import sys +import os +import getopt + +from svn import fs, core + +DATE_BASE = 1000000000 +DATE_INCR = 10000 + + +def tweak_dates(pool, home='.'): + db_path = os.path.join(home, 'db') + if not os.path.exists(db_path): + db_path = home + + fsob = fs.new(None, pool) + fs.open_berkeley(fsob, db_path) + + for i in range(fs.youngest_rev(fsob, pool)): + # convert secs into microseconds, then a string + date = core.svn_time_to_cstring((DATE_BASE+i*DATE_INCR) * 1000000L, pool) + #print date + fs.change_rev_prop(fsob, i+1, core.SVN_PROP_REVISION_DATE, date, pool) + fs.change_rev_prop(fsob, i+1, core.SVN_PROP_REVISION_AUTHOR, 'mailer test', pool) + +def main(): + if len(sys.argv) != 2: + print('USAGE: %s REPOS' % sys.argv[0]) + sys.exit(1) + + core.run_app(tweak_dates, sys.argv[1]) + +if __name__ == '__main__': + main() diff --git a/tools/hook-scripts/mailer/tests/mailer.conf b/tools/hook-scripts/mailer/tests/mailer.conf new file mode 100644 index 0000000..ac25f24 --- /dev/null +++ b/tools/hook-scripts/mailer/tests/mailer.conf @@ -0,0 +1,365 @@ +# +# mailer.conf: example configuration file for mailer.py +# +# $Id: mailer.conf 1086097 2011-03-28 02:14:33Z gmcdonald $ + +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# + +[general] + +# The [general].diff option is now DEPRECATED. +# Instead use [defaults].diff . + +# +# One delivery method must be chosen. mailer.py will prefer using the +# "mail_command" option. If that option is empty or commented out, +# then it checks whether the "smtp_hostname" option has been +# specified. If neither option is set, then the commit message is +# delivered to stdout. +# + +# This command will be invoked with destination addresses on the command +# line, and the message piped into it. +#mail_command = /usr/sbin/sendmail + +# This option specifies the hostname for delivery via SMTP. +#smtp_hostname = localhost + +# Username and password for SMTP servers requiring authorisation. +#smtp_username = example +#smtp_password = example + +# -------------------------------------------------------------------------- + +# +# CONFIGURATION GROUPS +# +# Any sections other than [general], [defaults], [maps] and sections +# referred to within [maps] are considered to be user-defined groups +# which override values in the [defaults] section. +# These groups are selected using the following two options: +# +# for_repos +# for_paths +# +# Both options specify a regular expression. The former is matched against +# the absolute path to the repository the mailer is operating against. The +# second is matched against *every* path (files and dirs) that was modified +# during the commit. +# +# The options specified in the [defaults] section are always selected. The +# presence of a non-matching for_repos has no relevance. Note that you may +# still use a for_repos value to extract useful information (more on this +# later). Any user-defined groups without a for_repos, or which contains +# a matching for_repos, will be selected for potential use. +# +# The subset of user-defined groups identified by the repository are further +# refined based on the for_paths option. A group is selected if at least +# one path(*) in the commit matches the for_paths regular expression. Note +# that the paths are relative to the root of the repository and do not +# have a leading slash. +# +# (*) Actually, each path will select just one group. Thus, it is possible +# that one group will match against all paths, while another group matches +# none of the paths, even though its for_paths would have selected some of +# the paths in the commit. +# +# Groups are matched in no particular order. Do not depend upon their +# order within this configuration file. The values from [defaults] will +# be used if no group is matched or an option in a group does not override +# the corresponding value from [defaults]. +# +# Generally, a commit email is generated for each group that has been +# selected. The script will try to minimize mails, so it may be possible +# that a single message will be generated to multiple recipients. In +# addition, it is possible for multiple messages per group to be generated, +# based on the various substitutions that are performed (see the following +# section). +# +# +# SUBSTITUTIONS +# +# The regular expressions can use the "named group" syntax to extract +# interesting pieces of the repository or commit path. These named values +# can then be substituted in the option values during mail generation. +# +# For example, let's say that you have a repository with a top-level +# directory named "clients", with several client projects underneath: +# +# REPOS/ +# clients/ +# gsvn/ +# rapidsvn/ +# winsvn/ +# +# The client name can be extracted with a regular expression like: +# +# for_paths = clients/(?P<client>[^/]*)($|/) +# +# The substitution is performed using Python's dict-based string +# interpolation syntax: +# +# to_addr = commits@%(client)s.tigris.org +# +# The %(NAME)s syntax will substitute whatever value for NAME was captured +# in the for_repos and for_paths regular expressions. The set of names +# available is obtained from the following set of regular expressions: +# +# [defaults].for_repos (if present) +# [GROUP].for_repos (if present in the user-defined group "GROUP") +# [GROUP].for_paths (if present in the user-defined group "GROUP") +# +# The names from the regexes later in the list override the earlier names. +# If none of the groups match, but a for_paths is present in [defaults], +# then its extracted names will be available. +# +# Note that each unique set of names for substitution will generate an +# email. In the above example, if a commit modified files in all three +# client subdirectories, then an email will be sent to all three commits@ +# mailing lists on tigris.org. +# +# The substitution variable "author" is provided by default, and is set +# to the author name passed to mailer.py for revprop changes or the +# author defined for a revision; if neither is available, then it is +# set to "no_author". Thus, you might define a line like: +# +# from_addr = %(author)s@example.com +# +# +# SUMMARY +# +# While mailer.py will work to minimize the number of mail messages +# generated, a single commit can potentially generate a large number +# of variants of a commit message. The criteria for generating messages +# is based on: +# +# groups selected by for_repos +# groups selected by for_paths +# unique sets of parameters extracted by the above regular expressions +# + +[defaults] + +# This is not passed to the shell, so do not use shell metacharacters. +# The command is split around whitespace, so if you want to include +# whitespace in the command, then ### something ###. +diff = /usr/bin/diff -u -L %(label_from)s -L %(label_to)s %(from)s %(to)s + +# The default prefix for the Subject: header for commits. +commit_subject_prefix = + +# The default prefix for the Subject: header for propchanges. +propchange_subject_prefix = + +# The default prefix for the Subject: header for locks. +lock_subject_prefix = + +# The default prefix for the Subject: header for unlocks. +unlock_subject_prefix = + + +# The default From: address for messages. If the from_addr is not +# specified or it is specified but there is no text after the `=', +# then the revision's author is used as the from address. If the +# revision author is not specified, such as when a commit is done +# without requiring authentication and authorization, then the string +# 'no_author' is used. You can specify a default from_addr here and +# if you want to have a particular for_repos group use the author as +# the from address, you can use "from_addr =". +from_addr = invalid@example.com + +# The default To: addresses for message. One or more addresses, +# separated by whitespace (no commas). +# NOTE: If you want to use a different character for separating the +# addresses put it in front of the addresses included in square +# brackets '[ ]'. +to_addr = invalid@example.com + +# If this is set, then a Reply-To: will be inserted into the message. +reply_to = + +# Specify which types of repository changes mailer.py will create +# diffs for. Valid options are any combination of +# 'add copy modify delete', or 'none' to never create diffs. +# If the generate_diffs option is empty, the selection is controlled +# by the deprecated options suppress_deletes and suppress_adds. +# Note that this only affects the display of diffs - all changes are +# mentioned in the summary of changed paths at the top of the message, +# regardless of this option's value. +# Meaning of the possible values: +# add: generates diffs for all added paths +# copy: generates diffs for all copied paths +# which were not changed after copying +# modify: generates diffs for all modified paths, including paths that were +# copied and modified afterwards (within the same commit) +# delete: generates diffs for all removed paths +generate_diffs = add copy modify delete + +# Commit URL construction. This adds a URL to the top of the message +# that can lead the reader to a Trac, ViewVC or other view of the +# commit as a whole. +# +# The available substitution variable is: rev +#commit_url = http://diffs.server.com/trac/software/changeset/%(rev)s + +# Diff URL construction. For the configured diff URL types, the diff +# section (which follows the message header) will include the URL +# relevant to the change type, even if actual diff generation for that +# change type is disabled (per the generate_diffs option). +# +# Available substitution variables are: path, base_path, rev, base_rev +#diff_add_url = +#diff_copy_url = +#diff_modify_url = http://diffs.server.com/?p1=%(base_path)s&p2=%(path)s +#diff_delete_url = + +# When set to "yes", the mailer will suppress the creation of a diff which +# deletes all the lines in the file. If this is set to anything else, or +# is simply commented out, then the diff will be inserted. Note that the +# deletion is always mentioned in the message header, regardless of this +# option's value. +### DEPRECATED (if generate_diffs is not empty, this option is ignored) +#suppress_deletes = yes + +# When set to "yes", the mailer will suppress the creation of a diff which +# adds all the lines in the file. If this is set to anything else, or +# is simply commented out, then the diff will be inserted. Note that the +# addition is always mentioned in the message header, regardless of this +# option's value. +### DEPRECATED (if generate_diffs is not empty, this option is ignored) +#suppress_adds = yes + +# A revision is reported on if any of its changed paths match the +# for_paths option. If only some of the changed paths of a revision +# match, this variable controls the behaviour for the non-matching +# paths. Possible values are: +# +# yes: (Default) Show in both summary and diffs. +# summary: Show the changed paths in the summary, but omit the diffs. +# no: Show nothing more than a note saying "and changes in other areas" +# +show_nonmatching_paths = yes + +# Subject line length limit. The generated subject line will be truncated +# and terminated with "...", to remain within the specified maximum length. +# Set to 0 to turn off. +#truncate_subject = 200 + +# -------------------------------------------------------------------------- + +[maps] + +# +# This section can be used define rewrite mappings for option values. It +# is typically used for computing from/to addresses, but can actually be +# used to remap values for any option in this file. +# +# The mappings are global for the entire configuration file. There is +# no group-specific mapping capability. For each mapping that you want +# to perform, you will provide the name of the option (e.g. from_addr) +# and a specification of how to perform those mappings. These declarations +# are made here in the [maps] section. +# +# When an option is accessed, the value is loaded from the configuration +# file and all %(NAME)s substitutions are performed. The resulting value +# is then passed through the map. If a map entry is not available for +# the value, then it will be used unchanged. +# +# NOTES: - Avoid using map substitution names which differ only in case. +# Unexpected results may occur. +# - A colon ':' is also considered as separator between option and +# value (keep this in mind when trying to map a file path under +# windows). +# +# The format to declare a map is: +# +# option_name_to_remap = mapping_specification +# +# At the moment, there is only one type of mapping specification: +# +# mapping_specification = '[' sectionname ']' +# +# This will use the given section to map values. The option names in +# the section are the input values, and the option values are the result. +# + +# +# EXAMPLE: +# +# We have two projects using two repositories. The name of the repos +# does not easily map to their commit mailing lists, so we will use +# a mapping to go from a project name (extracted from the repository +# path) to their commit list. The committers also need a special +# mapping to derive their email address from their repository username. +# +# [projects] +# for_repos = .*/(?P<project>.*) +# from_addr = %(author)s +# to_addr = %(project)s +# +# [maps] +# from_addr = [authors] +# to_addr = [mailing-lists] +# +# [authors] +# john = jconnor@example.com +# sarah = sconnor@example.com +# +# [mailing-lists] +# t600 = spottable-commits@example.com +# tx = hotness-commits@example.com +# + +# -------------------------------------------------------------------------- + +# +# [example-group] +# # send notifications if any web pages are changed +# for_paths = .*\.html +# # set a custom prefix +# commit_subject_prefix = [commit] +# propchange_subject_prefix = [propchange] +# # override the default, sending these elsewhere +# to_addr = www-commits@example.com +# # use the revision author as the from address +# from_addr = +# # use a custom diff program for this group +# diff = /usr/bin/my-diff -u -L %(label_from)s -L %(label_to)s %(from)s %(to)s +# +# [another-example] +# # commits to personal repositories should go to that person +# for_repos = /home/(?P<who>[^/]*)/repos +# to_addr = %(who)s@example.com +# + +[All] + +[file plus other areas] +for_paths = file.* + +[file] +for_paths = file.* +show_nonmatching_paths = no + +[bugtracker] +search_logmsg = (?P<bugid>(Blaz|Blah)#\d+) +to_addr = issue-tracker@example.com +commit_subject_prefix = Fix for %(bugid)s: + diff --git a/tools/hook-scripts/svn2feed.py b/tools/hook-scripts/svn2feed.py new file mode 100755 index 0000000..0075cfc --- /dev/null +++ b/tools/hook-scripts/svn2feed.py @@ -0,0 +1,466 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# ==================================================================== +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# ==================================================================== + +"""Usage: svn2feed.py [OPTION...] REPOS-PATH + +Generate an RSS 2.0 or Atom 1.0 feed file containing commit +information for the Subversion repository located at REPOS-PATH. Once +the maximum number of items is reached, older elements are removed. +The item title is the revision number, and the item description +contains the author, date, log messages and changed paths. + +Options: + + -h, --help Show this help message. + + -F, --format=FORMAT Required option. FORMAT must be one of: + 'rss' (RSS 2.0) + 'atom' (Atom 1.0) + to select the appropriate feed format. + + -f, --feed-file=PATH Store the feed in the file located at PATH, which will + be created if it does not exist, or overwritten if it + does. If not provided, the script will store the feed + in the current working directory, in a file named + REPOS_NAME.rss or REPOS_NAME.atom (where REPOS_NAME is + the basename of the REPOS_PATH command-line argument, + and the file extension depends on the selected + format). + + -r, --revision=X[:Y] Subversion revision (or revision range) to generate + info for. If not provided, info for the single + youngest revision in the repository will be generated. + + -m, --max-items=N Keep only N items in the feed file. By default, + 20 items are kept. + + -u, --item-url=URL Use URL as the basis for generating feed item links. + This value is appended with '?rev=REV_NUMBER' to form + the actual item links. + + -U, --feed-url=URL Use URL as the global link associated with the feed. + + -P, --svn-path=DIR Look in DIR for the svnlook binary. If not provided, + svnlook must be on the PATH. +""" + +# TODO: +# --item-url should support arbitrary formatting of the revision number, +# to be useful with web viewers other than ViewVC. +# Rather more than intended is being cached in the pickle file. Instead of +# only old items being drawn from the pickle, all the global feed metadata +# is actually set only on initial feed creation, and thereafter simply +# re-used from the pickle each time. + +# $HeadURL: http://svn.apache.org/repos/asf/subversion/branches/1.7.x/tools/hook-scripts/svn2feed.py $ +# $LastChangedDate: 2009-11-16 19:07:17 +0000 (Mon, 16 Nov 2009) $ +# $LastChangedBy: hwright $ +# $LastChangedRevision: 880911 $ + +import sys + +# Python 2.4 is required for subprocess +if sys.version_info < (2, 4): + sys.stderr.write("Error: Python 2.4 or higher required.\n") + sys.stderr.flush() + sys.exit(1) + +import getopt +import os +import subprocess +try: + # Python <3.0 + import cPickle as pickle +except ImportError: + # Python >=3.0 + import pickle +import datetime +import time + +def usage_and_exit(errmsg=None): + """Print a usage message, plus an ERRMSG (if provided), then exit. + If ERRMSG is provided, the usage message is printed to stderr and + the script exits with a non-zero error code. Otherwise, the usage + message goes to stdout, and the script exits with a zero + errorcode.""" + if errmsg is None: + stream = sys.stdout + else: + stream = sys.stderr + stream.write("%s\n" % __doc__) + stream.flush() + if errmsg: + stream.write("\nError: %s\n" % errmsg) + stream.flush() + sys.exit(2) + sys.exit(0) + +def check_url(url, opt): + """Verify that URL looks like a valid URL or option OPT.""" + if not (url.startswith('https://') \ + or url.startswith('http://') \ + or url.startswith('file://')): + usage_and_exit("svn2feed.py: Invalid url '%s' is specified for " \ + "'%s' option" % (url, opt)) + + +class Svn2Feed: + def __init__(self, svn_path, repos_path, item_url, feed_file, + max_items, feed_url): + self.repos_path = repos_path + self.item_url = item_url + self.feed_file = feed_file + self.max_items = max_items + self.feed_url = feed_url + self.svnlook_cmd = 'svnlook' + if svn_path is not None: + self.svnlook_cmd = os.path.join(svn_path, 'svnlook') + self.feed_title = ("%s's Subversion Commits Feed" + % (os.path.basename(os.path.abspath(self.repos_path)))) + self.feed_desc = "The latest Subversion commits" + + def _get_item_dict(self, revision): + revision = str(revision) + + cmd = [self.svnlook_cmd, 'info', '-r', revision, self.repos_path] + proc = subprocess.Popen(cmd, stdout=subprocess.PIPE) + proc.wait() + info_lines = proc.stdout.readlines() + + cmd = [self.svnlook_cmd, 'changed', '-r', revision, self.repos_path] + proc = subprocess.Popen(cmd, stdout=subprocess.PIPE) + proc.wait() + changed_data = proc.stdout.readlines() + + desc = ("\nRevision: %s\nLog: %sModified: \n%s" + % (revision, info_lines[3], changed_data)) + + item_dict = { + 'author': info_lines[0].strip('\n'), + 'title': "Revision %s" % revision, + 'link': self.item_url and "%s?rev=%s" % (self.item_url, revision), + 'date': self._format_updated_ts(info_lines[1]), + 'description': "<pre>" + desc + "</pre>", + } + + return item_dict + + def _format_updated_ts(self, revision_ts): + + # Get "2006-08-10 20:17:08" from + # "2006-07-28 20:17:18 +0530 (Fri, 28 Jul 2006) + date = revision_ts[0:19] + epoch = time.mktime(time.strptime(date, "%Y-%m-%d %H:%M:%S")) + return time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime(epoch)) + + +class Svn2RSS(Svn2Feed): + def __init__(self, svn_path, repos_path, item_url, feed_file, + max_items, feed_url): + Svn2Feed.__init__(self, svn_path, repos_path, item_url, feed_file, + max_items, feed_url) + try: + import PyRSS2Gen + except ImportError: + sys.stderr.write(""" +Error: Required PyRSS2Gen module not found. You can download the PyRSS2Gen +module from: + + http://www.dalkescientific.com/Python/PyRSS2Gen.html + +""") + sys.exit(1) + self.PyRSS2Gen = PyRSS2Gen + + (file, ext) = os.path.splitext(self.feed_file) + self.pickle_file = file + ".pickle" + if os.path.exists(self.pickle_file): + self.rss = pickle.load(open(self.pickle_file, "r")) + else: + self.rss = self.PyRSS2Gen.RSS2( + title = self.feed_title, + link = self.feed_url, + description = self.feed_desc, + lastBuildDate = datetime.datetime.now(), + items = []) + + @staticmethod + def get_default_file_extension(): + return ".rss" + + def add_revision_item(self, revision): + rss_item = self._make_rss_item(revision) + self.rss.items.insert(0, rss_item) + if len(self.rss.items) > self.max_items: + del self.rss.items[self.max_items:] + + def write_output(self): + s = pickle.dumps(self.rss) + f = open(self.pickle_file, "w") + f.write(s) + f.close() + + f = open(self.feed_file, "w") + self.rss.write_xml(f) + f.close() + + def _make_rss_item(self, revision): + info = self._get_item_dict(revision) + + rss_item = self.PyRSS2Gen.RSSItem( + author = info['author'], + title = info['title'], + link = info['link'], + description = info['description'], + guid = self.PyRSS2Gen.Guid(info['link']), + pubDate = info['date']) + return rss_item + + +class Svn2Atom(Svn2Feed): + def __init__(self, svn_path, repos_path, item_url, feed_file, + max_items, feed_url): + Svn2Feed.__init__(self, svn_path, repos_path, item_url, feed_file, + max_items, feed_url) + from xml.dom import getDOMImplementation + self.dom_impl = getDOMImplementation() + + self.pickle_file = self.feed_file + ".pickle" + if os.path.exists(self.pickle_file): + self.document = pickle.load(open(self.pickle_file, "r")) + self.feed = self.document.getElementsByTagName('feed')[0] + else: + self._init_atom_document() + + @staticmethod + def get_default_file_extension(): + return ".atom" + + def add_revision_item(self, revision): + item = self._make_atom_item(revision) + + total = 0 + for childNode in self.feed.childNodes: + if childNode.nodeName == 'entry': + if total == 0: + self.feed.insertBefore(item, childNode) + total += 1 + total += 1 + if total > self.max_items: + self.feed.removeChild(childNode) + if total == 0: + self.feed.appendChild(item) + + def write_output(self): + s = pickle.dumps(self.document) + f = open(self.pickle_file, "w") + f.write(s) + f.close() + + f = open(self.feed_file, "w") + f.write(self.document.toxml()) + f.close() + + def _make_atom_item(self, revision): + info = self._get_item_dict(revision) + + doc = self.document + entry = doc.createElement("entry") + + id = doc.createElement("id") + entry.appendChild(id) + id.appendChild(doc.createTextNode(info['link'])) + + title = doc.createElement("title") + entry.appendChild(title) + title.appendChild(doc.createTextNode(info['title'])) + + updated = doc.createElement("updated") + entry.appendChild(updated) + updated.appendChild(doc.createTextNode(info['date'])) + + link = doc.createElement("link") + entry.appendChild(link) + link.setAttribute("href", info['link']) + + summary = doc.createElement("summary") + entry.appendChild(summary) + summary.appendChild(doc.createTextNode(info['description'])) + + author = doc.createElement("author") + entry.appendChild(author) + aname = doc.createElement("name") + author.appendChild(aname) + aname.appendChild(doc.createTextNode(info['author'])) + + return entry + + def _init_atom_document(self): + doc = self.document = self.dom_impl.createDocument(None, None, None) + feed = self.feed = doc.createElement("feed") + doc.appendChild(feed) + + feed.setAttribute("xmlns", "http://www.w3.org/2005/Atom") + + title = doc.createElement("title") + feed.appendChild(title) + title.appendChild(doc.createTextNode(self.feed_title)) + + id = doc.createElement("id") + feed.appendChild(id) + id.appendChild(doc.createTextNode(self.feed_url)) + + updated = doc.createElement("updated") + feed.appendChild(updated) + now = datetime.datetime.now() + updated.appendChild(doc.createTextNode(self._format_date(now))) + + link = doc.createElement("link") + feed.appendChild(link) + link.setAttribute("href", self.feed_url) + + author = doc.createElement("author") + feed.appendChild(author) + aname = doc.createElement("name") + author.appendChild(aname) + aname.appendChild(doc.createTextNode("subversion")) + + def _format_date(self, dt): + """ input date must be in GMT """ + return ("%04d-%02d-%02dT%02d:%02d:%02d.%02dZ" + % (dt.year, dt.month, dt.day, dt.hour, dt.minute, + dt.second, dt.microsecond)) + + +def main(): + # Parse the command-line options and arguments. + try: + opts, args = getopt.gnu_getopt(sys.argv[1:], "hP:r:u:f:m:U:F:", + ["help", + "svn-path=", + "revision=", + "item-url=", + "feed-file=", + "max-items=", + "feed-url=", + "format=", + ]) + except getopt.GetoptError, msg: + usage_and_exit(msg) + + # Make sure required arguments are present. + if len(args) != 1: + usage_and_exit("You must specify a repository path.") + repos_path = os.path.abspath(args[0]) + + # Now deal with the options. + max_items = 20 + commit_rev = svn_path = None + item_url = feed_url = None + feed_file = None + feedcls = None + feed_classes = { 'rss': Svn2RSS, 'atom': Svn2Atom } + + for opt, arg in opts: + if opt in ("-h", "--help"): + usage_and_exit() + elif opt in ("-P", "--svn-path"): + svn_path = arg + elif opt in ("-r", "--revision"): + commit_rev = arg + elif opt in ("-u", "--item-url"): + item_url = arg + check_url(item_url, opt) + elif opt in ("-f", "--feed-file"): + feed_file = arg + elif opt in ("-m", "--max-items"): + try: + max_items = int(arg) + except ValueError, msg: + usage_and_exit("Invalid value '%s' for --max-items." % (arg)) + if max_items < 1: + usage_and_exit("Value for --max-items must be a positive " + "integer.") + elif opt in ("-U", "--feed-url"): + feed_url = arg + check_url(feed_url, opt) + elif opt in ("-F", "--format"): + try: + feedcls = feed_classes[arg] + except KeyError: + usage_and_exit("Invalid value '%s' for --format." % arg) + + if feedcls is None: + usage_and_exit("Option -F [--format] is required.") + + if item_url is None: + usage_and_exit("Option -u [--item-url] is required.") + + if feed_url is None: + usage_and_exit("Option -U [--feed-url] is required.") + + if commit_rev is None: + svnlook_cmd = 'svnlook' + if svn_path is not None: + svnlook_cmd = os.path.join(svn_path, 'svnlook') + cmd = [svnlook_cmd, 'youngest', repos_path] + proc = subprocess.Popen(cmd, stdout=subprocess.PIPE) + proc.wait() + cmd_out = proc.stdout.readlines() + try: + revisions = [int(cmd_out[0])] + except IndexError, msg: + usage_and_exit("svn2feed.py: Invalid value '%s' for " \ + "REPOS-PATH" % (repos_path)) + else: + try: + rev_range = commit_rev.split(':') + len_rev_range = len(rev_range) + if len_rev_range == 1: + revisions = [int(commit_rev)] + elif len_rev_range == 2: + start, end = rev_range + start = int(start) + end = int(end) + if (start > end): + tmp = start + start = end + end = tmp + revisions = list(range(start, end + 1)[-max_items:]) + else: + raise ValueError() + except ValueError, msg: + usage_and_exit("svn2feed.py: Invalid value '%s' for --revision." \ + % (commit_rev)) + + if feed_file is None: + feed_file = (os.path.basename(repos_path) + + feedcls.get_default_file_extension()) + + feed = feedcls(svn_path, repos_path, item_url, feed_file, max_items, + feed_url) + for revision in revisions: + feed.add_revision_item(revision) + feed.write_output() + + +if __name__ == "__main__": + main() diff --git a/tools/hook-scripts/svnperms.conf.example b/tools/hook-scripts/svnperms.conf.example new file mode 100644 index 0000000..29c895a --- /dev/null +++ b/tools/hook-scripts/svnperms.conf.example @@ -0,0 +1,98 @@ +## Example configuration file for svnperms.py. ## + +# $Id: svnperms.conf.example 1028289 2010-10-28 13:20:45Z cmpilato $ +# +# Multiple global [groups] sections are accepted, but be aware +# that it's the same as concatenating them all in a single entry. +# You can also create section specific groups, using a syntax +# like [sectionname groups]. +# +[groups] +group1 = user1 user2 user3 +group2 = user4 user5 +supergroup = @group1 @group2 user6 + +# +# Example repository control, showing allowed syntax. +# +# - the latest match is what counts +# - groups are prefixed by "@" +# - you can use groups and users in the same definition +# - all permissions may be revoked with () +# - line breaks are accepted +# +[example1 groups] +group3 = user9 user10 + +[example1] +trunk/.* = *(add,remove,update) @group1,user4,user5(update) + user6,user7() +trunk/.* = user8(add,update) +tags/[^/]+/ = @group3(add) +branches/[^/]+/.* = *(add,remove,update) + +# +# One of the most used repository structures, for a single project. +# +[example2] +trunk/.* = *(add,remove,update) +tags/[^/]+/ = *(add) +branches/[^/]+/.* = *(add,remove,update) + +# +# Another common structure, expecting a project name inside the repository +# (like trunk/myproject/ and tags/myproject/). In this example, only admins +# are allowed to create projects, and there are project specific access +# lists. +# +[example3 groups] +admins = john +project1 = user1 user2 +project2 = user3 user4 + +[example3] +trunk/[^/]+/ = @admins(add,remove) +trunk/project1/.+ = @project1(add,remove,update) +trunk/project2/.+ = @project2(add,remove,update) +tags/[^/]+/ = @admins(add,remove) +tags/project1/[^/]+/ = @project1(add,remove) +tags/project2/[^/]+/ = @project2(add,remove) +branches/[^/]+/ = @admins(add,remove) +branches/project1/[^/]+/.* = @project1(add,remove,update) +branches/project2/[^/]+/.* = @project2(add,remove,update) + +# +# A more complex structure, as defined in the following URL: +# +# http://moin.conectiva.com.br/RepositorySystem +# +[example4 groups] +admins = user1 user2 +updaters = user3 + +[example4] +snapshot/[^/]+/(current/(SPECS/|SOURCES/)?)? = *(add) +snapshot/[^/]+/ = @admins(add,remove) +snapshot/[^/]+/current/SPECS/[^/]+\.spec = *(add,remove,update) +snapshot/[^/]+/current/SOURCES/[^/]+ = *(add,remove,update) +snapshot/[^/]+/releases/[^/]+/([^/+]/)? = autouser(add) +snapshot/[^/]+/pristine/ = autouser(add,remove) +branches/[^/]+/.* = *(add,remove,update) +releases/[^/]+/ = @admins(add) +tags/[^/]+/ = *(add,remove) +updates/[^/]+/[^/]+/(current/(SPECS/|SOURCES/)?)? = @updaters,autouser(add) +updates/[^/]+/[^/]+/current/SPECS/[^/]+\.spec = @updaters,autouser(add,update) +updates/[^/]+/[^/]+/current/SOURCES/[^/]+ = @updaters,autouser(add,remove,update) +updates/[^/]+/[^/]+/releases/.* = autouser(add) +updates/[^/]+/[^/]+/pristine/ = autouser(add,remove) + +# +# Sections can inherit settings from previously defined sections, using +# the "extends" keyword in the section declaration. In this example, +# the [example5] section inherits all the settings from [example2], and +# adds a new setting for a releases directory which behaves like the +# tags directory. +# +[example5 extends example2] +releases/[^/]+/ = *(add) + diff --git a/tools/hook-scripts/svnperms.py b/tools/hook-scripts/svnperms.py new file mode 100755 index 0000000..519e64e --- /dev/null +++ b/tools/hook-scripts/svnperms.py @@ -0,0 +1,363 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +# + +# $HeadURL: http://svn.apache.org/repos/asf/subversion/branches/1.7.x/tools/hook-scripts/svnperms.py $ +# $LastChangedDate: 2011-07-12 18:37:44 +0000 (Tue, 12 Jul 2011) $ +# $LastChangedBy: blair $ +# $LastChangedRevision: 1145712 $ + +import sys, os +import getopt +import shlex + +try: + # Python >=3.0 + from subprocess import getstatusoutput as subprocess_getstatusoutput +except ImportError: + # Python <3.0 + from commands import getstatusoutput as subprocess_getstatusoutput +try: + my_getopt = getopt.gnu_getopt +except AttributeError: + my_getopt = getopt.getopt +import re + +__author__ = "Gustavo Niemeyer <gustavo@niemeyer.net>" + +class Error(Exception): pass + +SECTION = re.compile(r'\[([^]]+?)(?:\s+extends\s+([^]]+))?\]') +OPTION = re.compile(r'(\S+)\s*=\s*(.*)$') + +class Config: + def __init__(self, filename): + # Options are stored in __sections_list like this: + # [(sectname, [(optname, optval), ...]), ...] + self._sections_list = [] + self._sections_dict = {} + self._read(filename) + + def _read(self, filename): + # Use the same logic as in ConfigParser.__read() + file = open(filename) + cursectdict = None + optname = None + lineno = 0 + for line in file: + lineno = lineno + 1 + if line.isspace() or line[0] == '#': + continue + if line[0].isspace() and cursectdict is not None and optname: + value = line.strip() + cursectdict[optname] = "%s %s" % (cursectdict[optname], value) + cursectlist[-1][1] = "%s %s" % (cursectlist[-1][1], value) + else: + m = SECTION.match(line) + if m: + sectname = m.group(1) + parentsectname = m.group(2) + if parentsectname is None: + # No parent section defined, so start a new section + cursectdict = self._sections_dict.setdefault \ + (sectname, {}) + cursectlist = [] + else: + # Copy the parent section into the new section + parentsectdict = self._sections_dict.get \ + (parentsectname, {}) + cursectdict = self._sections_dict.setdefault \ + (sectname, parentsectdict.copy()) + cursectlist = self.walk(parentsectname) + self._sections_list.append((sectname, cursectlist)) + optname = None + elif cursectdict is None: + raise Error("%s:%d: no section header" % \ + (filename, lineno)) + else: + m = OPTION.match(line) + if m: + optname, optval = m.groups() + optval = optval.strip() + cursectdict[optname] = optval + cursectlist.append([optname, optval]) + else: + raise Error("%s:%d: parsing error" % \ + (filename, lineno)) + + def sections(self): + return list(self._sections_dict.keys()) + + def options(self, section): + return list(self._sections_dict.get(section, {}).keys()) + + def get(self, section, option, default=None): + return self._sections_dict.get(option, default) + + def walk(self, section, option=None): + ret = [] + for sectname, options in self._sections_list: + if sectname == section: + for optname, value in options: + if not option or optname == option: + ret.append((optname, value)) + return ret + + +class Permission: + def __init__(self): + self._group = {} + self._permlist = [] + + def parse_groups(self, groupsiter): + for option, value in groupsiter: + groupusers = [] + for token in shlex.split(value): + # expand nested groups in place; no forward decls + if token[0] == "@": + try: + groupusers.extend(self._group[token[1:]]) + except KeyError: + raise Error, "group '%s' not found" % token[1:] + else: + groupusers.append(token) + self._group[option] = groupusers + + def parse_perms(self, permsiter): + for option, value in permsiter: + # Paths never start with /, so remove it if provided + if option[0] == "/": + option = option[1:] + pattern = re.compile("^%s$" % option) + for entry in value.split(): + openpar, closepar = entry.find("("), entry.find(")") + groupsusers = entry[:openpar].split(",") + perms = entry[openpar+1:closepar].split(",") + users = [] + for groupuser in groupsusers: + if groupuser[0] == "@": + try: + users.extend(self._group[groupuser[1:]]) + except KeyError: + raise Error("group '%s' not found" % \ + groupuser[1:]) + else: + users.append(groupuser) + self._permlist.append((pattern, users, perms)) + + def get(self, user, path): + ret = [] + for pattern, users, perms in self._permlist: + if pattern.match(path) and (user in users or "*" in users): + ret = perms + return ret + +class SVNLook: + def __init__(self, repospath, txn=None, rev=None): + self.repospath = repospath + self.txn = txn + self.rev = rev + + def _execcmd(self, *cmd, **kwargs): + cmdstr = " ".join(cmd) + status, output = subprocess_getstatusoutput(cmdstr) + if status != 0: + sys.stderr.write(cmdstr) + sys.stderr.write("\n") + sys.stderr.write(output) + raise Error("command failed: %s\n%s" % (cmdstr, output)) + return status, output + + def _execsvnlook(self, cmd, *args, **kwargs): + execcmd_args = ["svnlook", cmd, self.repospath] + self._add_txnrev(execcmd_args, kwargs) + execcmd_args += args + execcmd_kwargs = {} + keywords = ["show", "noerror"] + for key in keywords: + if key in kwargs: + execcmd_kwargs[key] = kwargs[key] + return self._execcmd(*execcmd_args, **execcmd_kwargs) + + def _add_txnrev(self, cmd_args, received_kwargs): + if "txn" in received_kwargs: + txn = received_kwargs.get("txn") + if txn is not None: + cmd_args += ["-t", txn] + elif self.txn is not None: + cmd_args += ["-t", self.txn] + if "rev" in received_kwargs: + rev = received_kwargs.get("rev") + if rev is not None: + cmd_args += ["-r", rev] + elif self.rev is not None: + cmd_args += ["-r", self.rev] + + def changed(self, **kwargs): + status, output = self._execsvnlook("changed", **kwargs) + if status != 0: + return None + changes = [] + for line in output.splitlines(): + line = line.rstrip() + if not line: continue + entry = [None, None, None] + changedata, changeprop, path = None, None, None + if line[0] != "_": + changedata = line[0] + if line[1] != " ": + changeprop = line[1] + path = line[4:] + changes.append((changedata, changeprop, path)) + return changes + + def author(self, **kwargs): + status, output = self._execsvnlook("author", **kwargs) + if status != 0: + return None + return output.strip() + + +def check_perms(filename, section, repos, txn=None, rev=None, author=None): + svnlook = SVNLook(repos, txn=txn, rev=rev) + if author is None: + author = svnlook.author() + changes = svnlook.changed() + try: + config = Config(filename) + except IOError: + raise Error("can't read config file "+filename) + if not section in config.sections(): + raise Error("section '%s' not found in config file" % section) + perm = Permission() + perm.parse_groups(config.walk("groups")) + perm.parse_groups(config.walk(section+" groups")) + perm.parse_perms(config.walk(section)) + permerrors = [] + for changedata, changeprop, path in changes: + pathperms = perm.get(author, path) + if changedata == "A" and "add" not in pathperms: + permerrors.append("you can't add "+path) + elif changedata == "U" and "update" not in pathperms: + permerrors.append("you can't update "+path) + elif changedata == "D" and "remove" not in pathperms: + permerrors.append("you can't remove "+path) + elif changeprop == "U" and "update" not in pathperms: + permerrors.append("you can't update properties of "+path) + #else: + # print "cdata=%s cprop=%s path=%s perms=%s" % \ + # (str(changedata), str(changeprop), path, str(pathperms)) + if permerrors: + permerrors.insert(0, "you don't have enough permissions for " + "this transaction:") + raise Error("\n".join(permerrors)) + + +# Command: + +USAGE = """\ +Usage: svnperms.py OPTIONS + +Options: + -r PATH Use repository at PATH to check transactions + -t TXN Query transaction TXN for commit information + -f PATH Use PATH as configuration file (default is repository + path + /conf/svnperms.conf) + -s NAME Use section NAME as permission section (default is + repository name, extracted from repository path) + -R REV Query revision REV for commit information (for tests) + -A AUTHOR Check commit as if AUTHOR had committed it (for tests) + -h Show this message +""" + +class MissingArgumentsException(Exception): + "Thrown when required arguments are missing." + pass + +def parse_options(): + try: + opts, args = my_getopt(sys.argv[1:], "f:s:r:t:R:A:h", ["help"]) + except getopt.GetoptError, e: + raise Error(e.msg) + class Options: pass + obj = Options() + obj.filename = None + obj.section = None + obj.repository = None + obj.transaction = None + obj.revision = None + obj.author = None + for opt, val in opts: + if opt == "-f": + obj.filename = val + elif opt == "-s": + obj.section = val + elif opt == "-r": + obj.repository = val + elif opt == "-t": + obj.transaction = val + elif opt == "-R": + obj.revision = val + elif opt == "-A": + obj.author = val + elif opt in ["-h", "--help"]: + sys.stdout.write(USAGE) + sys.exit(0) + missingopts = [] + if not obj.repository: + missingopts.append("repository") + if not (obj.transaction or obj.revision): + missingopts.append("either transaction or a revision") + if missingopts: + raise MissingArgumentsException("missing required option(s): " + ", ".join(missingopts)) + obj.repository = os.path.abspath(obj.repository) + if obj.filename is None: + obj.filename = os.path.join(obj.repository, "conf", "svnperms.conf") + if obj.section is None: + obj.section = os.path.basename(obj.repository) + if not (os.path.isdir(obj.repository) and + os.path.isdir(os.path.join(obj.repository, "db")) and + os.path.isdir(os.path.join(obj.repository, "hooks")) and + os.path.isfile(os.path.join(obj.repository, "format"))): + raise Error("path '%s' doesn't look like a repository" % \ + obj.repository) + + return obj + +def main(): + try: + opts = parse_options() + check_perms(opts.filename, opts.section, + opts.repository, opts.transaction, opts.revision, + opts.author) + except MissingArgumentsException, e: + sys.stderr.write("%s\n" % str(e)) + sys.stderr.write(USAGE) + sys.exit(1) + except Error, e: + sys.stderr.write("error: %s\n" % str(e)) + sys.exit(1) + +if __name__ == "__main__": + main() + +# vim:et:ts=4:sw=4 diff --git a/tools/hook-scripts/validate-extensions.py b/tools/hook-scripts/validate-extensions.py new file mode 100755 index 0000000..ed0283d --- /dev/null +++ b/tools/hook-scripts/validate-extensions.py @@ -0,0 +1,110 @@ +#!/usr/bin/env python +# +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +# + +"""\ +Check that any files pending commit into a Subversion repository have +suitable file extensions, printing an error and exiting with an +errorful value if any files fail validation. This is intended to be +used as a Subversion pre-commit hook script. + +Syntax 1: + + validate-extensions.py REPOS_PATH TXN_NAME deny EXT [...] + + Ensure that any newly added files do *not* have one of the provided + file extensions. + + +Syntax 2: + + validate-extensions.py REPOS_PATH TXN_NAME allow EXT [...] + + Ensure that any newly added files *do* have one of the provided + file extensions. (Extension-less files are disallowed.) + +""" + +import sys +import os +from svn import repos, fs, core + +def validate_added_extensions(repos_path, txn_name, extensions, action): + # Open the repository and transaction. + fs_ptr = repos.fs(repos.open(repos_path)) + txn_t = fs.open_txn(fs_ptr, txn_name) + txn_root = fs.txn_root(txn_t) + + # Fetch the changes made in this transaction. + changes = fs.svn_fs_paths_changed(txn_root) + paths = changes.keys() + + # Check the changes. + for path in paths: + change = changes[path] + + # Always allow deletions. + if change.change_kind == fs.path_change_delete: + continue + + # Always allow non-files. + kind = fs.check_path(txn_root, path) + if kind != core.svn_node_file: + continue + + # If this was a newly added (without history) file ... + if ((change.change_kind == fs.path_change_replace) \ + or (change.change_kind == fs.path_change_add)): + copyfrom_rev, copyfrom_path = fs.copied_from(txn_root, path) + if copyfrom_rev == core.SVN_INVALID_REVNUM: + + # ... then check it for a valid extension. + base, ext = os.path.splitext(path) + if ext: + ext = ext[1:].lower() + if ((ext in extensions) and (action == 'deny')) \ + or ((ext not in extensions) and (action == 'allow')): + sys.stderr.write("Path '%s' has an extension disallowed by server " + "configuration.\n" % (path)) + sys.exit(1) + +def usage_and_exit(errmsg=None): + stream = errmsg and sys.stderr or sys.stdout + stream.write(__doc__) + if errmsg: + stream.write("ERROR: " + errmsg + "\n") + sys.exit(errmsg and 1 or 0) + +def main(): + argc = len(sys.argv) + if argc < 5: + usage_and_exit("Not enough arguments.") + repos_path = sys.argv[1] + txn_name = sys.argv[2] + action = sys.argv[3] + if action not in ("allow", "deny"): + usage_and_exit("Invalid action '%s'. Expected either 'allow' or 'deny'." + % (action)) + extensions = [x.lower() for x in sys.argv[4:]] + validate_added_extensions(repos_path, txn_name, extensions, action) + +if __name__ == "__main__": + main() diff --git a/tools/hook-scripts/verify-po.py b/tools/hook-scripts/verify-po.py new file mode 100755 index 0000000..b860901 --- /dev/null +++ b/tools/hook-scripts/verify-po.py @@ -0,0 +1,128 @@ +#!/usr/bin/env python +# +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +# +"""This is a pre-commit hook that checks whether the contents of PO files +committed to the repository are encoded in UTF-8. +""" + +import codecs +import string +import sys +import subprocess +from svn import core, fs, delta, repos + +# Set to the path of the 'msgfmt' executable to use msgfmt to check +# the syntax of the po file + +USE_MSGFMT = None + +if USE_MSGFMT is not None: + class MsgFmtChecker: + def __init__(self): + self.pipe = subprocess.Popen([USE_MSGFMT, "-c", "-o", "/dev/null", "-"], + stdin=subprocess.PIPE, + close_fds=sys.platform != "win32") + self.io_error = 0 + + def write(self, data): + if self.io_error: + return + try: + self.pipe.stdin.write(data) + except IOError: + self.io_error = 1 + + def close(self): + try: + self.pipe.stdin.close() + except IOError: + self.io_error = 1 + return self.pipe.wait() == 0 and not self.io_error +else: + class MsgFmtChecker: + def write(self, data): + pass + def close(self): + return 1 + + +class ChangeReceiver(delta.Editor): + def __init__(self, txn_root, base_root, pool): + self.txn_root = txn_root + self.base_root = base_root + self.pool = pool + + def add_file(self, path, parent_baton, + copyfrom_path, copyfrom_revision, file_pool): + return [0, path] + + def open_file(self, path, parent_baton, base_revision, file_pool): + return [0, path] + + def apply_textdelta(self, file_baton, base_checksum): + file_baton[0] = 1 + # no handler + return None + + def close_file(self, file_baton, text_checksum): + changed, path = file_baton + if len(path) < 3 or path[-3:] != '.po' or not changed: + # This is not a .po file, or it hasn't changed + return + + try: + # Read the file contents through a validating UTF-8 decoder + subpool = core.svn_pool_create(self.pool) + checker = MsgFmtChecker() + try: + stream = core.Stream(fs.file_contents(self.txn_root, path, subpool)) + reader = codecs.getreader('UTF-8')(stream, 'strict') + writer = codecs.getwriter('UTF-8')(checker, 'strict') + while True: + data = reader.read(core.SVN_STREAM_CHUNK_SIZE) + if not data: + break + writer.write(data) + if not checker.close(): + sys.exit("PO format check failed for '" + path + "'") + except UnicodeError: + sys.exit("PO file is not in UTF-8: '" + path + "'") + finally: + core.svn_pool_destroy(subpool) + + +def check_po(pool, repos_path, txn): + def authz_cb(root, path, pool): + return 1 + + fs_ptr = repos.fs(repos.open(repos_path, pool)) + txn_ptr = fs.open_txn(fs_ptr, txn, pool) + txn_root = fs.txn_root(txn_ptr, pool) + base_root = fs.revision_root(fs_ptr, fs.txn_base_revision(txn_ptr), pool) + editor = ChangeReceiver(txn_root, base_root, pool) + e_ptr, e_baton = delta.make_editor(editor, pool) + repos.dir_delta(base_root, '', '', txn_root, '', + e_ptr, e_baton, authz_cb, 0, 1, 0, 0, pool) + + +if __name__ == '__main__': + assert len(sys.argv) == 3 + core.run_app(check_po, sys.argv[1], sys.argv[2]) diff --git a/tools/po/l10n-report.py b/tools/po/l10n-report.py new file mode 100755 index 0000000..1aadd58 --- /dev/null +++ b/tools/po/l10n-report.py @@ -0,0 +1,246 @@ +#!/usr/bin/env python +# +# $Id: l10n-report.py 1132657 2011-06-06 14:23:36Z julianfoad $ +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +# + +"""Usage: l10n-report.py [OPTION...] + +Send the l10n translation status report to an email address. If the +email address is not specified, print in stdout. + +Options: + + -h, --help Show this help message. + + -m, --to-email-id Send the l10n translation status report to this + email address. +""" + +import sys +import getopt +import os +import re +import subprocess + +FROM_ADDRESS = "Subversion Translation Status <noreply@subversion.apache.org>" +LIST_ADDRESS = "dev@subversion.apache.org" +SUBJECT_TEMPLATE = "[l10n] Translation status report for %s r%s" +MAIL_THREAD_ID = '<translation_status_report_for_%s@subversion.apache.org>' + +def _rev(): + dollar = "$Revision: 1132657 $" + return int(re.findall('[0-9]+', dollar)[0]); + +def usage_and_exit(errmsg=None): + """Print a usage message, plus an ERRMSG (if provided), then exit. + If ERRMSG is provided, the usage message is printed to stderr and + the script exits with a non-zero error code. Otherwise, the usage + message goes to stdout, and the script exits with a zero + errorcode.""" + if errmsg is None: + stream = sys.stdout + else: + stream = sys.stderr + stream.write("%s\n" % __doc__) + stream.flush() + if errmsg: + stream.write("\nError: %s\n" % errmsg) + stream.flush() + sys.exit(2) + sys.exit(0) + + +class l10nReport: + def __init__(self, to_email_id=""): + self.to_email_id = to_email_id + self.from_email_id = "<%s>" % LIST_ADDRESS + + def safe_command(self, cmd_and_args, cmd_in=""): + [stdout, stderr] = subprocess.Popen(cmd_and_args, \ + stdin=subprocess.PIPE, \ + stdout=subprocess.PIPE, \ + stderr=subprocess.PIPE).communicate(input=cmd_in) + return stdout, stderr + + def match(self, pattern, string): + if isinstance(pattern, basestring): + pattern = re.compile(pattern) + match = re.compile(pattern).search(string) + if match and match.groups(): + return match.group(1) + else: + return None + + def get_msgattribs(self, file): + msgout = self.safe_command(['msgattrib', '--translated', file])[0] + grepout = self.safe_command(['grep', '-E', '^msgid *"'], msgout)[0] + sedout = self.safe_command(['sed', '1d'], grepout)[0] + trans = self.safe_command(['wc', '-l'], sedout)[0] + + msgout = self.safe_command(['msgattrib', '--untranslated', file])[0] + grepout = self.safe_command(['grep', '-E', '^msgid *"'], msgout)[0] + sedout = self.safe_command(['sed', '1d'], grepout)[0] + untrans = self.safe_command(['wc', '-l'], sedout)[0] + + msgout = self.safe_command(['msgattrib', '--only-fuzzy', file])[0] + grepout = self.safe_command(['grep', '-E', '^msgid *"'], msgout)[0] + sedout = self.safe_command(['sed', '1d'], grepout)[0] + fuzzy = self.safe_command(['wc', '-l'], sedout)[0] + + msgout = self.safe_command(['msgattrib', '--only-obsolete', file])[0] + grepout = self.safe_command(['grep', '-E', '^#~ msgid *"'], msgout)[0] + obsolete = self.safe_command(['wc', '-l'], grepout)[0] + + return int(trans), int(untrans), int(fuzzy), int(obsolete) + + def pre_l10n_report(self): + # svn revert --recursive subversion/po + cmd = ['svn', 'revert', '--recursive', 'subversion/po'] + stderr = self.safe_command(cmd)[1] + if stderr: + sys.stderr.write("\nError: %s\n" % stderr) + sys.stderr.flush() + sys.exit(0) + + # svn update + cmd = ['svn', 'update'] + stderr = self.safe_command(cmd)[1] + if stderr: + sys.stderr.write("\nError: %s\n" % stderr) + sys.stderr.flush() + sys.exit(0) + + # tools/po/po-update.sh + cmd = ['sh', 'tools/po/po-update.sh'] + self.safe_command(cmd) + + +def bar_graph(nominal_length, trans, untrans, fuzzy, obsolete): + """Format the given four counts into a bar graph string in which the + total length of the bars representing the TRANS, UNTRANS and FUZZY + counts is NOMINAL_LENGTH characters, and the bar representing the + OBSOLETE count extends beyond that.""" + + total_count = trans + untrans + fuzzy # don't include 'obsolete' + accum_bar = 0 + accum_count = 0 + s = '' + for count, letter in [(trans, '+'), (untrans, 'U'), (fuzzy, '~'), + (obsolete, 'o')]: + accum_count += count + new_bar_end = nominal_length * accum_count / total_count + s += letter * (new_bar_end - accum_bar) + accum_bar = new_bar_end + return s + + +def main(): + # Parse the command-line options and arguments. + try: + opts, args = getopt.gnu_getopt(sys.argv[1:], "hm:", + ["help", + "to-email-id=", + ]) + except getopt.GetoptError, msg: + usage_and_exit(msg) + + to_email_id = None + for opt, arg in opts: + if opt in ("-h", "--help"): + usage_and_exit() + elif opt in ("-m", "--to-email-id"): + to_email_id = arg + + l10n = l10nReport() + os.chdir("%s/../.." % os.path.dirname(os.path.abspath(sys.argv[0]))) + l10n.pre_l10n_report() + [info_out, info_err] = l10n.safe_command(['svn', 'info']) + if info_err: + sys.stderr.write("\nError: %s\n" % info_err) + sys.stderr.flush() + sys.exit(0) + + po_dir = 'subversion/po' + branch_name = l10n.match('URL:.*/asf/subversion/(\S+)', info_out) + [info_out, info_err] = l10n.safe_command(['svnversion', po_dir]) + if info_err: + sys.stderr.write("\nError: %s\n" % info_err) + sys.stderr.flush() + sys.exit(0) + + wc_version = re.sub('[MS]', '', info_out.strip()) + title = "Translation status report for %s@r%s" % \ + (branch_name, wc_version) + + os.chdir(po_dir) + files = sorted(os.listdir('.')) + format_head = "\n%6s %7s %7s %7s %7s" % ("lang", "trans", "untrans", + "fuzzy", "obs") + format_line = "--------------------------------------" + print("\n%s\n%s\n%s" % (title, format_head, format_line)) + + body = "" + po_pattern = re.compile('(.*).po$') + for file in files: + lang = l10n.match(po_pattern, file) + if not lang: + continue + [trans, untrans, fuzzy, obsolete] = l10n.get_msgattribs(file) + po_format = "%6s %7d %7d %7d %7d" %\ + (lang, trans, untrans, fuzzy, obsolete) + po_format += " " + bar_graph(30, trans, untrans, fuzzy, obsolete) + body += "%s\n" % po_format + print(po_format) + + if to_email_id: + import smtplib + # Ensure compatibility of the email module all the way to Python 2.3 + try: + from email.message import Message + except ImportError: + from email.Message import Message + + msg = Message() + msg["From"] = FROM_ADDRESS + msg["To"] = to_email_id + msg["Subject"] = SUBJECT_TEMPLATE % (branch_name, wc_version) + msg["X-Mailer"] = "l10n-report.py r%s" % _rev() + msg["Reply-To"] = LIST_ADDRESS + msg["Mail-Followup-To"] = LIST_ADDRESS + msg["In-Reply-To"] = MAIL_THREAD_ID % (branch_name.replace('/', '_')) + msg["References"] = msg["In-Reply-To"] + + # http://www.iana.org/assignments/auto-submitted-keywords/auto-submitted-keywords.xhtml + msg["Auto-Submitted"] = 'auto-generated' + + msg.set_type("text/plain") + msg.set_payload("\n".join((title, format_head, format_line, body))) + + server = smtplib.SMTP('localhost') + server.sendmail("From: " + FROM_ADDRESS, + "To: " + to_email_id, + msg.as_string()) + print("The report is sent to '%s' email id." % to_email_id) + else: + print("\nYou have not passed '-m' option, so email is not sent.") + +if __name__ == "__main__": + main() diff --git a/tools/po/po-update.sh b/tools/po/po-update.sh new file mode 100755 index 0000000..9891531 --- /dev/null +++ b/tools/po/po-update.sh @@ -0,0 +1,123 @@ +#!/bin/sh +# +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +# +# +# Usage: +# ./po-update.sh pot +# - to generate just the pot file +# ./po-update.sh +# - to update all locales +# ./po-update.sh LL +# - to update one the LL locale + +set -e + +XGETTEXT=${XGETTEXT:-xgettext} +MSGMERGE=${MSGMERGE:-msgmerge} + +svn_base= +for i in . .. ../..; do + if [ -d "$i/subversion/po" ]; then + svn_base="$i" + break + fi +done +if [ -z "$svn_base" ]; then + echo "E: You must run po-update.sh from within a Subversion source tree." >&2 + exit 1 +fi + +pot_done= + +make_pot() +{ + if [ -z "$pot_done" ]; then + echo "Building subversion.pot..." + (cd $svn_base/subversion/po && \ + find .. \ + -name .svn -prune -or \ + -name tests -prune -or \ + -name bindings -prune -or \ + -name "*.c" -print -or \ + -name "svn_error_codes.h" -print -or \ + -name "svn_fs_util.h" -print | \ + $XGETTEXT --sort-by-file -k_ -kN_ -kQ_:1,2 -kSVN_ERRDEF:3 \ + --flag=_:1:pass-c-format \ + --flag=N_:1:pass-c-format \ + --flag=Q_:1:pass-c-format \ + --flag=Q_:2:pass-c-format \ + --flag=svn_cmdline_printf:2:c-format \ + --flag=svn_cmdline_fprintf:3:c-format \ + --flag=svn_error_createf:3:c-format \ + --flag=svn_error_wrap_apr:2:c-format \ + --flag=svn_stream_printf:3:c-format \ + --flag=svn_stream_printf_from_utf8:4:c-format \ + --flag=svn_string_createf:2:c-format \ + --flag=svn_string_createv:2:c-format \ + --flag=svn_stringbuf_createf:2:c-format \ + --flag=svn_stringbuf_createv:2:c-format \ + --flag=svn_fs_bdb__dberrf:3:c-format \ + --flag=file_printf_from_utf8:3:c-format \ + --flag=do_io_file_wrapper_cleanup:3:c-format \ + --flag=do_io_file_wrapper_cleanup:4:c-format \ + --msgid-bugs-address=dev@subversion.apache.org \ + --add-comments --files-from=- -o subversion.pot ) + pot_done=1 + fi +} + +update_po() +{ + (cd $svn_base/subversion/po && + for i in $1.po; do + echo "Updating $i..." + # In a display of truly bizarre behaviour, msgmerge (at least, the + # GNU gettext-tools 0.14.6 implementation) inverts the order of obsolete + # messages every time it is run. Therefore, run it twice, to invert and + # then re-invert, to minimize spurious diffs. + $MSGMERGE --sort-by-file --no-wrap --update $i subversion.pot + $MSGMERGE --sort-by-file --no-wrap --update $i subversion.pot + done ) +} + +if [ $# -eq 0 ]; then + make_pot + update_po \* +else + langs= + while [ $# -ge 1 ]; do + case $1 in + pot) ;; + *) + if [ -e $svn_base/subversion/po/$1.po ]; then + langs="$langs $1" + else + echo "E: No such .po file '$1.po'" >&2 + exit 1 + fi + esac + shift + done + make_pot + for lang in $langs; do + update_po $lang + done +fi diff --git a/tools/server-side/fsfs-reshard.py b/tools/server-side/fsfs-reshard.py new file mode 100755 index 0000000..d039885 --- /dev/null +++ b/tools/server-side/fsfs-reshard.py @@ -0,0 +1,399 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# fsfs-reshard.py REPOS_PATH MAX_FILES_PER_SHARD +# +# Perform an offline conversion of an FSFS repository between linear (format +# 2, usable by Subversion 1.4+) and sharded (format 3, usable by Subversion +# 1.5+) layouts. +# +# The MAX_FILES_PER_SHARD argument specifies the maximum number of files +# that will be stored in each shard (directory), or zero to specify a linear +# layout. Subversion 1.5 uses a default value of 1000 files per shard. +# +# As the repository will not be valid while the conversion is in progress, +# the repository administrator must ensure that access to the repository is +# blocked for the duration of the conversion. +# +# In the event that the conversion is interrupted, the repository will be in +# an inconsistent state. The repository administrator should then re-run +# this tool to completion. +# +# +# Note that, currently, resharding from one sharded layout to another is +# likely to be an extremely slow process. To reshard, we convert from a +# sharded to linear layout and then to the new sharded layout. The problem +# is that the initial conversion to the linear layout triggers exactly the +# same 'large number of files in a directory' problem that sharding is +# intended to solve. +# +# ==================================================================== +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# ==================================================================== +# +# $HeadURL: http://svn.apache.org/repos/asf/subversion/branches/1.7.x/tools/server-side/fsfs-reshard.py $ +# $LastChangedDate: 2009-11-16 19:07:17 +0000 (Mon, 16 Nov 2009) $ +# $LastChangedBy: hwright $ +# $LastChangedRevision: 880911 $ + +import os, stat, sys + +from errno import EEXIST + +def usage(): + """Print a usage message and exit.""" + print("""usage: %s REPOS_PATH MAX_FILES_PER_SHARD [START END] + +Perform an offline conversion of an FSFS repository between linear +(readable by Subversion 1.4 or later) and sharded (readable by +Subversion 1.5 or later) layouts. + +The MAX_FILES_PER_SHARD argument specifies the maximum number of +files that will be stored in each shard (directory), or zero to +specify a linear layout. Subversion 1.5 uses a default value of +1000 files per shard. + +Convert revisions START through END inclusive if specified, or all +revisions if unspecified. +""" % sys.argv[0]) + sys.exit(1) + +def incompatible_repos_format(repos_path, format): + """Print an error saying that REPOS_PATH is a repository with an + incompatible repository format FORMAT, then exit.""" + sys.stderr.write("""error: unable to convert repository '%s'. + +This repository is not compatible with this tool. Valid +repository formats are '3' or '5'; this repository is +format '%s'. + +""" % (repos_path, format)) + sys.stderr.flush() + sys.exit(1) + +def incompatible_fs_format(repos_path, format): + """Print an error saying that REPOS_PATH is a repository with an + incompatible filesystem format FORMAT, then exit.""" + sys.stderr.write("""error: unable to convert repository '%s'. + +This repository contains a filesystem that is not compatible with +this tool. Valid filesystem formats are '1', '2', or '3'; this +repository contains a filesystem with format '%s'. + +""" % (repos_path, format)) + sys.stderr.flush() + sys.exit(1) + +def unexpected_fs_format_options(repos_path): + """Print an error saying that REPOS_PATH is a repository with + unexpected filesystem format options, then exit.""" + sys.stderr.write("""error: unable to convert repository '%s'. + +This repository contains a filesystem that appears to be invalid - +there is unexpected data after the filesystem format number. + +""" % repos_path) + sys.stderr.flush() + sys.exit(1) + +def incompatible_fs_format_option(repos_path, option): + """Print an error saying that REPOS_PATH is a repository with an + incompatible filesystem format option OPTION, then exit.""" + sys.stderr.write("""error: unable to convert repository '%s'. + +This repository contains a filesystem that is not compatible with +this tool. This tool recognises the 'layout' option but the +filesystem uses the '%s' option. + +""" % (repos_path, option)) + sys.stderr.flush() + sys.exit(1) + +def warn_about_fs_format_1(repos_path, format_path): + """Print a warning saying that REPOS_PATH contains a format 1 FSFS + filesystem that we can't reconstruct, then exit.""" + sys.stderr.write("""warning: conversion of '%s' will be one-way. + +This repository is currently readable by Subversion 1.1 or later. +This tool can convert this repository to one that is readable by +either Subversion 1.4 (or later) or Subversion 1.5 (or later), +but it is not able to convert it back to the original format - a +separate dump/load step would be required. + +If you would like to upgrade this repository anyway, delete the +file '%s' and re-run this tool. + +""" % (repos_path, format_path)) + sys.stderr.flush() + sys.exit(1) + +def check_repos_format(repos_path): + """Check that REPOS_PATH contains a repository with a suitable format; + print a message and exit if not.""" + format_path = os.path.join(repos_path, 'format') + try: + format_file = open(format_path) + format = format_file.readline() + if not format.endswith('\n'): + incompatible_repos_format(repos_path, format + ' <missing newline>') + format = format.rstrip('\n') + if format == '3' or format == '5': + pass + else: + incompatible_repos_format(repos_path, format) + except IOError: + # In all likelihood, the file doesn't exist. + incompatible_repos_format(repos_path, '<unreadable>') + +def check_fs_format(repos_path): + """Check that REPOS_PATH contains a filesystem with a suitable format, + or that it contains no format file; print a message and exit if neither + is true. Return bool whether the filesystem is sharded.""" + sharded = False + db_path = os.path.join(repos_path, 'db') + format_path = os.path.join(db_path, 'format') + try: + format_file = open(format_path) + format = format_file.readline() + if not format.endswith('\n'): + incompatible_fs_format(repos_path, format + ' <missing newline>') + format = format.rstrip('\n') + if format == '1': + # This is a format 1 (svndiff0 only) filesystem. We can upgrade it, + # but we can't downgrade again (since we can't uncompress any of the + # svndiff1 deltas that may have been written). Warn the user and exit. + warn_about_fs_format_1(repos_path, format_path) + if format == '2': + pass + elif format == '3': + pass + else: + incompatible_fs_format(repos_path, format) + + for line in format_file: + if format == '2': + unexpected_fs_format_options(repos_path) + + line = line.rstrip('\n') + if line == 'layout linear': + pass + elif line.startswith('layout sharded '): + sharded = True + else: + incompatible_fs_format_option(repos_path, line) + + format_file.close() + except IOError: + # The format file might not exist if we've previously been interrupted, + # or if the user is following our advice about upgrading a format 1 + # repository. In both cases, we'll just assume the format was + # compatible. + pass + + return sharded + +def current_file(repos_path): + """Return triple of (revision, next_node_id, next_copy_id) from + REPOS_PATH/db/current .""" + return open(os.path.join(repos_path, 'db', 'current')).readline().split() + +def remove_fs_format(repos_path): + """Remove the filesystem format file for repository REPOS_PATH. + Do not raise an error if the file is already missing.""" + format_path = os.path.join(repos_path, 'db', 'format') + try: + statinfo = os.stat(format_path) + except OSError: + # The file probably doesn't exist. + return + + # On Windows, we need to ensure the file is writable before we can + # remove it. + os.chmod(format_path, statinfo.st_mode | stat.S_IWUSR) + os.remove(format_path) + +def write_fs_format(repos_path, contents): + """Write a new filesystem format file for repository REPOS_PATH containing + CONTENTS.""" + format_path = os.path.join(repos_path, 'db', 'format') + f = open(format_path, 'wb') + f.write(contents) + f.close() + os.chmod(format_path, stat.S_IRUSR | stat.S_IRGRP) + +def linearise(path): + """Move all the files in subdirectories of PATH into PATH, and remove the + subdirectories. Handle conflicts between subdirectory names and files + contained in subdirectories by ensuring subdirectories have a '.shard' + suffix prior to moving (the files are assumed not to have this suffix. + Abort if a subdirectory is found to contain another subdirectory.""" + # First enumerate all subdirectories of DIR and rename where necessary + # to include a .shard suffix. + for name in os.listdir(path): + if name.endswith('.shard'): + continue + subdir_path = os.path.join(path, name) + if not os.path.isdir(subdir_path): + continue + os.rename(subdir_path, subdir_path + '.shard') + + # Now move all the subdirectory contents into the parent and remove + # the subdirectories. + for root_path, dirnames, filenames in os.walk(path): + if root_path == path: + continue + if len(dirnames) > 0: + sys.stderr.write("error: directory '%s' contains other unexpected directories.\n" \ + % root_path) + sys.stderr.flush() + sys.exit(1) + for name in filenames: + from_path = os.path.join(root_path, name) + to_path = os.path.join(path, name) + os.rename(from_path, to_path) + os.rmdir(root_path) + +def shard(path, max_files_per_shard, start, end): + """Move the files for revisions START to END inclusive in PATH into + subdirectories of PATH named such that subdirectory '0' contains at most + MAX_FILES_PER_SHARD files, those named [0, MAX_FILES_PER_SHARD). Abort if + PATH is found to contain any entries with non-numeric names.""" + + tmp = path + '.reshard' + try: + os.mkdir(tmp) + except OSError, e: + if e.errno != EEXIST: + raise + + # Move all entries into shards named N.shard. + for rev in range(start, end + 1): + name = str(rev) + shard = rev // max_files_per_shard + shard_name = str(shard) + '.shard' + + from_path = os.path.join(path, name) + to_path = os.path.join(tmp, shard_name, name) + try: + os.rename(from_path, to_path) + except OSError: + # The most likely explanation is that the shard directory doesn't + # exist. Let's create it and retry the rename. + os.mkdir(os.path.join(tmp, shard_name)) + os.rename(from_path, to_path) + + # Now rename all the shards to remove the suffix. + skipped = 0 + for name in os.listdir(tmp): + if not name.endswith('.shard'): + sys.stderr.write("warning: ignoring unexpected subdirectory '%s'.\n" \ + % os.path.join(tmp, name)) + sys.stderr.flush() + skipped += 1 + continue + from_path = os.path.join(tmp, name) + to_path = os.path.join(path, os.path.basename(from_path)[:-6]) + os.rename(from_path, to_path) + skipped == 0 and os.rmdir(tmp) + +def main(): + if len(sys.argv) < 3: + usage() + + repos_path = sys.argv[1] + max_files_per_shard = sys.argv[2] + try: + start = int(sys.argv[3]) + end = int(sys.argv[4]) + except IndexError: + start = 0 + end = int(current_file(repos_path)[0]) + + # Validate the command-line arguments. + db_path = os.path.join(repos_path, 'db') + current_path = os.path.join(db_path, 'current') + if not os.path.exists(current_path): + sys.stderr.write("error: '%s' doesn't appear to be a Subversion FSFS repository.\n" \ + % repos_path) + sys.stderr.flush() + sys.exit(1) + + try: + max_files_per_shard = int(max_files_per_shard) + except ValueError, OverflowError: + sys.stderr.write("error: maximum files per shard ('%s') is not a valid number.\n" \ + % max_files_per_shard) + sys.stderr.flush() + sys.exit(1) + + if max_files_per_shard < 0: + sys.stderr.write("error: maximum files per shard ('%d') must not be negative.\n" \ + % max_files_per_shard) + sys.stderr.flush() + sys.exit(1) + + # Check the format of the repository. + check_repos_format(repos_path) + sharded = check_fs_format(repos_path) + + # Let the user know what's going on. + if max_files_per_shard > 0: + print("Converting '%s' to a sharded structure with %d files per directory" \ + % (repos_path, max_files_per_shard)) + if sharded: + print('(will convert to a linear structure first)') + else: + print("Converting '%s' to a linear structure" % repos_path) + + # Prevent access to the repository for the duration of the conversion. + # There's no clean way to do this, but since the format of the repository + # is indeterminate, let's remove the format file while we're converting. + print('- marking the repository as invalid') + remove_fs_format(repos_path) + + # First, convert to a linear scheme (this makes recovery easier because + # it's easier to reason about the behaviour on restart). + if sharded: + print('- linearising db/revs') + linearise(os.path.join(repos_path, 'db', 'revs')) + print('- linearising db/revprops') + linearise(os.path.join(repos_path, 'db', 'revprops')) + + if max_files_per_shard == 0: + # We're done. Stamp the filesystem with a format 2 db/format file. + print('- marking the repository as a valid linear repository') + write_fs_format(repos_path, '2\n') + else: + print('- sharding db/revs') + shard(os.path.join(repos_path, 'db', 'revs'), max_files_per_shard, + start, end) + print('- sharding db/revprops') + shard(os.path.join(repos_path, 'db', 'revprops'), max_files_per_shard, + start, end) + + # We're done. Stamp the filesystem with a format 3 db/format file. + print('- marking the repository as a valid sharded repository') + write_fs_format(repos_path, '3\nlayout sharded %d\n' % max_files_per_shard) + + print('- done.') + sys.exit(0) + +if __name__ == '__main__': + raise Exception("""This script is unfinished and not ready to be used on live data. + Trust us.""") + main() diff --git a/tools/server-side/mod_dontdothat/README b/tools/server-side/mod_dontdothat/README new file mode 100644 index 0000000..7d4fe36 --- /dev/null +++ b/tools/server-side/mod_dontdothat/README @@ -0,0 +1,53 @@ +mod_dontdothat is an Apache module that allows you to block specific types +of Subversion requests. Specifically, it's designed to keep users from doing +things that are particularly hard on the server, like checking out the root +of the tree, or the tags or branches directories. It works by sticking an +input filter in front of all REPORT requests and looking for dangerous types +of requests. If it finds any, it returns a 403 Forbidden error. + +You can compile and install it via apxs: + +$ apxs -c \ + -I$PREFIX/include/subversion-1 \ + -L$PREFIX/lib -lsvn_subr-1 + mod_dontdothat.c + +$ apxs -i -n dontdothat mod_dontdothat.la + +It is enabled via single httpd.conf directive, DontDoThatConfigFile: + +<Location /svn> + DAV svn + SVNParentPath /path/to/repositories + DontDoThatConfigFile /path/to/config.file + DontDoThatDisallowReplay off +</Location> + +The file you give to DontDoThatConfigFile is a Subversion configuration file +that contains the following sections. + +[recursive-actions] +/*/trunk = allow +/ = deny +/* = deny +/*/tags = deny +/*/branches = deny +/*/* = deny +/*/*/tags = deny +/*/*/branches = deny + +As you might guess, this defines a set of patterns that control what the +user is not allowed to do. Anything with a 'deny' after it is denied, and +as a fallback mechanism anything with an 'allow' after it is special cased +to be allowed, even if it matches something that is denied. + +Note that the wildcard portions of a rule only swallow a single directory, +so /* will match /foo, but not /foo/bar. They also must be at the end of +a directory segment, so /foo* or /* are valid, but /*foo is not. + +These rules are applied to any recursive action, which basically means any +Subversion command that goes through the update-report, like update, diff, +checkout, merge, etc. + +The DontDoThatDisallowReplay option makes mod_dontdothat disallow +replay requests, which is on by default. diff --git a/tools/server-side/mod_dontdothat/mod_dontdothat.c b/tools/server-side/mod_dontdothat/mod_dontdothat.c new file mode 100644 index 0000000..c7c6613 --- /dev/null +++ b/tools/server-side/mod_dontdothat/mod_dontdothat.c @@ -0,0 +1,661 @@ +/* + * mod_dontdothat.c: an Apache filter that allows you to return arbitrary + * errors for various types of Subversion requests. + * + * ==================================================================== + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + * ==================================================================== + */ + +#include <httpd.h> +#include <http_config.h> +#include <http_protocol.h> +#include <http_request.h> +#include <http_log.h> +#include <util_filter.h> +#include <ap_config.h> +#include <apr_strings.h> + +#include <expat.h> + +#include "mod_dav_svn.h" +#include "svn_string.h" +#include "svn_config.h" + +module AP_MODULE_DECLARE_DATA dontdothat_module; + +typedef struct dontdothat_config_rec { + const char *config_file; + const char *base_path; + int no_replay; +} dontdothat_config_rec; + +static void *create_dontdothat_dir_config(apr_pool_t *pool, char *dir) +{ + dontdothat_config_rec *cfg = apr_pcalloc(pool, sizeof(*cfg)); + + cfg->base_path = dir; + cfg->no_replay = 1; + + return cfg; +} + +static const command_rec dontdothat_cmds[] = +{ + AP_INIT_TAKE1("DontDoThatConfigFile", ap_set_file_slot, + (void *) APR_OFFSETOF(dontdothat_config_rec, config_file), + OR_ALL, + "Text file containing actions to take for specific requests"), + AP_INIT_FLAG("DontDoThatDisallowReplay", ap_set_flag_slot, + (void *) APR_OFFSETOF(dontdothat_config_rec, no_replay), + OR_ALL, "Disallow replay requests as if they are other recursive requests."), + { NULL } +}; + +typedef enum parse_state_t { + STATE_BEGINNING, + STATE_IN_UPDATE, + STATE_IN_SRC_PATH, + STATE_IN_DST_PATH, + STATE_IN_RECURSIVE +} parse_state_t; + +typedef struct dontdothat_filter_ctx { + /* Set to TRUE when we determine that the request is safe and should be + * allowed to continue. */ + svn_boolean_t let_it_go; + + /* Set to TRUE when we determine that the request is unsafe and should be + * stopped in its tracks. */ + svn_boolean_t no_soup_for_you; + + XML_Parser xmlp; + + /* The current location in the REPORT body. */ + parse_state_t state; + + /* A buffer to hold CDATA we encounter. */ + svn_stringbuf_t *buffer; + + dontdothat_config_rec *cfg; + + /* An array of wildcards that are special cased to be allowed. */ + apr_array_header_t *allow_recursive_ops; + + /* An array of wildcards where recursive operations are not allowed. */ + apr_array_header_t *no_recursive_ops; + + /* TRUE if a path has failed a test already. */ + svn_boolean_t path_failed; + + /* An error for when we're using this as a baton while parsing config + * files. */ + svn_error_t *err; + + /* The current request. */ + request_rec *r; +} dontdothat_filter_ctx; + +/* Return TRUE if wildcard WC matches path P, FALSE otherwise. */ +static svn_boolean_t +matches(const char *wc, const char *p) +{ + for (;;) + { + switch (*wc) + { + case '*': + if (wc[1] != '/' && wc[1] != '\0') + abort(); /* This was checked for during parsing of the config. */ + + /* It's a wild card, so eat up until the next / in p. */ + while (*p && p[1] != '/') + ++p; + + /* If we ran out of p and we're out of wc then it matched. */ + if (! *p) + { + if (wc[1] == '\0') + return TRUE; + else + return FALSE; + } + break; + + case '\0': + if (*p != '\0') + /* This means we hit the end of wc without running out of p. */ + return FALSE; + else + /* Or they were exactly the same length, so it's not lower. */ + return TRUE; + + default: + if (*wc != *p) + return FALSE; /* If we don't match, then move on to the next + * case. */ + else + break; + } + + ++wc; + ++p; + + if (! *p && *wc) + return FALSE; + } +} + +static svn_boolean_t +is_this_legal(dontdothat_filter_ctx *ctx, const char *uri) +{ + const char *relative_path; + const char *cleaned_uri; + const char *repos_name; + int trailing_slash; + dav_error *derr; + + /* Ok, so we need to skip past the scheme, host, etc. */ + uri = ap_strstr_c(uri, "://"); + if (uri) + uri = ap_strchr_c(uri + 3, '/'); + + if (uri) + { + const char *repos_path; + + derr = dav_svn_split_uri(ctx->r, + uri, + ctx->cfg->base_path, + &cleaned_uri, + &trailing_slash, + &repos_name, + &relative_path, + &repos_path); + if (! derr) + { + int idx; + + if (! repos_path) + repos_path = ""; + + repos_path = apr_psprintf(ctx->r->pool, "/%s", repos_path); + + /* First check the special cases that are always legal... */ + for (idx = 0; idx < ctx->allow_recursive_ops->nelts; ++idx) + { + const char *wc = APR_ARRAY_IDX(ctx->allow_recursive_ops, + idx, + const char *); + + if (matches(wc, repos_path)) + { + ap_log_rerror(APLOG_MARK, APLOG_DEBUG, 0, ctx->r, + "mod_dontdothat: rule %s allows %s", + wc, repos_path); + return TRUE; + } + } + + /* Then look for stuff we explicitly don't allow. */ + for (idx = 0; idx < ctx->no_recursive_ops->nelts; ++idx) + { + const char *wc = APR_ARRAY_IDX(ctx->no_recursive_ops, + idx, + const char *); + + if (matches(wc, repos_path)) + { + ap_log_rerror(APLOG_MARK, APLOG_DEBUG, 0, ctx->r, + "mod_dontdothat: rule %s forbids %s", + wc, repos_path); + return FALSE; + } + } + } + } + + return TRUE; +} + +static apr_status_t +dontdothat_filter(ap_filter_t *f, + apr_bucket_brigade *bb, + ap_input_mode_t mode, + apr_read_type_e block, + apr_off_t readbytes) +{ + dontdothat_filter_ctx *ctx = f->ctx; + apr_status_t rv; + apr_bucket *e; + + if (mode != AP_MODE_READBYTES) + return ap_get_brigade(f->next, bb, mode, block, readbytes); + + rv = ap_get_brigade(f->next, bb, mode, block, readbytes); + if (rv) + return rv; + + for (e = APR_BRIGADE_FIRST(bb); + e != APR_BRIGADE_SENTINEL(bb); + e = APR_BUCKET_NEXT(e)) + { + svn_boolean_t last = APR_BUCKET_IS_EOS(e); + const char *str; + apr_size_t len; + + if (last) + { + str = ""; + len = 0; + } + else + { + rv = apr_bucket_read(e, &str, &len, APR_BLOCK_READ); + if (rv) + return rv; + } + + if (! XML_Parse(ctx->xmlp, str, len, last)) + { + /* let_it_go so we clean up our parser, no_soup_for_you so that we + * bail out before bothering to parse this stuff a second time. */ + ctx->let_it_go = TRUE; + ctx->no_soup_for_you = TRUE; + } + + /* If we found something that isn't allowed, set the correct status + * and return an error so it'll bail out before it gets anywhere it + * can do real damage. */ + if (ctx->no_soup_for_you) + { + /* XXX maybe set up the SVN-ACTION env var so that it'll show up + * in the Subversion operational logs? */ + + ap_log_rerror(APLOG_MARK, APLOG_DEBUG, 0, f->r, + "mod_dontdothat: client broke the rules, " + "returning error"); + + /* Ok, pass an error bucket and an eos bucket back to the client. + * + * NOTE: The custom error string passed here doesn't seem to be + * used anywhere by httpd. This is quite possibly a bug. + * + * TODO: Try and pass back a custom document body containing a + * serialized svn_error_t so the client displays a better + * error message. */ + bb = apr_brigade_create(f->r->pool, f->c->bucket_alloc); + e = ap_bucket_error_create(403, "No Soup For You!", + f->r->pool, f->c->bucket_alloc); + APR_BRIGADE_INSERT_TAIL(bb, e); + e = apr_bucket_eos_create(f->c->bucket_alloc); + APR_BRIGADE_INSERT_TAIL(bb, e); + + /* Don't forget to remove us, otherwise recursion blows the stack. */ + ap_remove_input_filter(f); + + return ap_pass_brigade(f->r->output_filters, bb); + } + else if (ctx->let_it_go || last) + { + ap_remove_input_filter(f); + + ap_log_rerror(APLOG_MARK, APLOG_DEBUG, 0, f->r, + "mod_dontdothat: letting request go through"); + + return rv; + } + } + + return rv; +} + +static void +cdata(void *baton, const char *data, int len) +{ + dontdothat_filter_ctx *ctx = baton; + + if (ctx->no_soup_for_you || ctx->let_it_go) + return; + + switch (ctx->state) + { + case STATE_IN_SRC_PATH: + /* FALLTHROUGH */ + + case STATE_IN_DST_PATH: + /* FALLTHROUGH */ + + case STATE_IN_RECURSIVE: + if (! ctx->buffer) + ctx->buffer = svn_stringbuf_ncreate(data, len, ctx->r->pool); + else + svn_stringbuf_appendbytes(ctx->buffer, data, len); + break; + + default: + break; + } +} + +static void +start_element(void *baton, const char *name, const char **attrs) +{ + dontdothat_filter_ctx *ctx = baton; + const char *sep; + + if (ctx->no_soup_for_you || ctx->let_it_go) + return; + + /* XXX Hack. We should be doing real namespace support, but for now we + * just skip ahead of any namespace prefix. If someone's sending us + * an update-report element outside of the SVN namespace they'll get + * what they deserve... */ + sep = ap_strchr_c(name, ':'); + if (sep) + name = sep + 1; + + switch (ctx->state) + { + case STATE_BEGINNING: + if (strcmp(name, "update-report") == 0) + ctx->state = STATE_IN_UPDATE; + else if (strcmp(name, "replay-report") == 0 && ctx->cfg->no_replay) + { + /* XXX it would be useful if there was a way to override this + * on a per-user basis... */ + if (! is_this_legal(ctx, ctx->r->unparsed_uri)) + ctx->no_soup_for_you = TRUE; + else + ctx->let_it_go = TRUE; + } + else + ctx->let_it_go = TRUE; + break; + + case STATE_IN_UPDATE: + if (strcmp(name, "src-path") == 0) + { + ctx->state = STATE_IN_SRC_PATH; + if (ctx->buffer) + ctx->buffer->len = 0; + } + else if (strcmp(name, "dst-path") == 0) + { + ctx->state = STATE_IN_DST_PATH; + if (ctx->buffer) + ctx->buffer->len = 0; + } + else if (strcmp(name, "recursive") == 0) + { + ctx->state = STATE_IN_RECURSIVE; + if (ctx->buffer) + ctx->buffer->len = 0; + } + else + ; /* XXX Figure out what else we need to deal with... Switch + * has that link-path thing we probably need to look out + * for... */ + break; + + default: + break; + } +} + +static void +end_element(void *baton, const char *name) +{ + dontdothat_filter_ctx *ctx = baton; + const char *sep; + + if (ctx->no_soup_for_you || ctx->let_it_go) + return; + + /* XXX Hack. We should be doing real namespace support, but for now we + * just skip ahead of any namespace prefix. If someone's sending us + * an update-report element outside of the SVN namespace they'll get + * what they deserve... */ + sep = ap_strchr_c(name, ':'); + if (sep) + name = sep + 1; + + switch (ctx->state) + { + case STATE_IN_SRC_PATH: + ctx->state = STATE_IN_UPDATE; + + svn_stringbuf_strip_whitespace(ctx->buffer); + + if (! ctx->path_failed && ! is_this_legal(ctx, ctx->buffer->data)) + ctx->path_failed = TRUE; + break; + + case STATE_IN_DST_PATH: + ctx->state = STATE_IN_UPDATE; + + svn_stringbuf_strip_whitespace(ctx->buffer); + + if (! ctx->path_failed && ! is_this_legal(ctx, ctx->buffer->data)) + ctx->path_failed = TRUE; + break; + + case STATE_IN_RECURSIVE: + ctx->state = STATE_IN_UPDATE; + + svn_stringbuf_strip_whitespace(ctx->buffer); + + /* If this isn't recursive we let it go. */ + if (strcmp(ctx->buffer->data, "no") == 0) + { + ap_log_rerror(APLOG_MARK, APLOG_DEBUG, 0, ctx->r, + "mod_dontdothat: letting nonrecursive request go"); + ctx->let_it_go = TRUE; + } + break; + + case STATE_IN_UPDATE: + if (strcmp(name, "update-report") == 0) + { + /* If we made it here without figuring out that this is + * nonrecursive, then the path check is our final word + * on the subject. */ + + if (ctx->path_failed) + ctx->no_soup_for_you = TRUE; + else + ctx->let_it_go = TRUE; + } + else + ; /* XXX Is there other stuff we care about? */ + break; + + default: + abort(); + } +} + +static svn_boolean_t +is_valid_wildcard(const char *wc) +{ + while (*wc) + { + if (*wc == '*') + { + if (wc[1] && wc[1] != '/') + return FALSE; + } + + ++wc; + } + + return TRUE; +} + +static svn_boolean_t +config_enumerator(const char *wildcard, + const char *action, + void *baton, + apr_pool_t *pool) +{ + dontdothat_filter_ctx *ctx = baton; + + if (strcmp(action, "deny") == 0) + { + if (is_valid_wildcard(wildcard)) + APR_ARRAY_PUSH(ctx->no_recursive_ops, const char *) = wildcard; + else + ctx->err = svn_error_createf(APR_EINVAL, + NULL, + "'%s' is an invalid wildcard", + wildcard); + } + else if (strcmp(action, "allow") == 0) + { + if (is_valid_wildcard(wildcard)) + APR_ARRAY_PUSH(ctx->allow_recursive_ops, const char *) = wildcard; + else + ctx->err = svn_error_createf(APR_EINVAL, + NULL, + "'%s' is an invalid wildcard", + wildcard); + } + else + { + ctx->err = svn_error_createf(APR_EINVAL, + NULL, + "'%s' is not a valid action", + action); + } + + if (ctx->err) + return FALSE; + else + return TRUE; +} + +static apr_status_t +clean_up_parser(void *baton) +{ + XML_Parser xmlp = baton; + + XML_ParserFree(xmlp); + + return APR_SUCCESS; +} + +static void +dontdothat_insert_filters(request_rec *r) +{ + dontdothat_config_rec *cfg = ap_get_module_config(r->per_dir_config, + &dontdothat_module); + + if (! cfg->config_file) + return; + + if (strcmp("REPORT", r->method) == 0) + { + dontdothat_filter_ctx *ctx = apr_pcalloc(r->pool, sizeof(*ctx)); + svn_config_t *config; + svn_error_t *err; + + ctx->r = r; + + ctx->cfg = cfg; + + ctx->allow_recursive_ops = apr_array_make(r->pool, 5, sizeof(char *)); + + ctx->no_recursive_ops = apr_array_make(r->pool, 5, sizeof(char *)); + + /* XXX is there a way to error out from this point? Would be nice... */ + + err = svn_config_read(&config, cfg->config_file, TRUE, r->pool); + if (err) + { + char buff[256]; + + ap_log_rerror(APLOG_MARK, APLOG_ERR, + ((err->apr_err >= APR_OS_START_USERERR && + err->apr_err < APR_OS_START_CANONERR) ? + 0 : err->apr_err), + r, "Failed to load DontDoThatConfigFile: %s", + svn_err_best_message(err, buff, sizeof(buff))); + + svn_error_clear(err); + + return; + } + + svn_config_enumerate2(config, + "recursive-actions", + config_enumerator, + ctx, + r->pool); + if (ctx->err) + { + char buff[256]; + + ap_log_rerror(APLOG_MARK, APLOG_ERR, + ((ctx->err->apr_err >= APR_OS_START_USERERR && + ctx->err->apr_err < APR_OS_START_CANONERR) ? + 0 : ctx->err->apr_err), + r, "Failed to parse DontDoThatConfigFile: %s", + svn_err_best_message(ctx->err, buff, sizeof(buff))); + + svn_error_clear(ctx->err); + + return; + } + + ctx->state = STATE_BEGINNING; + + ctx->xmlp = XML_ParserCreate(NULL); + + apr_pool_cleanup_register(r->pool, ctx->xmlp, + clean_up_parser, + apr_pool_cleanup_null); + + XML_SetUserData(ctx->xmlp, ctx); + XML_SetElementHandler(ctx->xmlp, start_element, end_element); + XML_SetCharacterDataHandler(ctx->xmlp, cdata); + + ap_add_input_filter("DONTDOTHAT_FILTER", ctx, r, r->connection); + } +} + +static void +dontdothat_register_hooks(apr_pool_t *pool) +{ + ap_hook_insert_filter(dontdothat_insert_filters, NULL, NULL, APR_HOOK_FIRST); + + ap_register_input_filter("DONTDOTHAT_FILTER", + dontdothat_filter, + NULL, + AP_FTYPE_RESOURCE); +} + +module AP_MODULE_DECLARE_DATA dontdothat_module = +{ + STANDARD20_MODULE_STUFF, + create_dontdothat_dir_config, + NULL, + NULL, + NULL, + dontdothat_cmds, + dontdothat_register_hooks +}; diff --git a/tools/server-side/svn-backup-dumps.py b/tools/server-side/svn-backup-dumps.py new file mode 100755 index 0000000..bb6b235 --- /dev/null +++ b/tools/server-side/svn-backup-dumps.py @@ -0,0 +1,692 @@ +#!/usr/bin/env python +# +# svn-backup-dumps.py -- Create dumpfiles to backup a subversion repository. +# +# ==================================================================== +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# ==================================================================== +# +# This script creates dump files from a subversion repository. +# It is intended for use in cron jobs and post-commit hooks. +# +# The basic operation modes are: +# 1. Create a full dump (revisions 0 to HEAD). +# 2. Create incremental dumps containing at most N revisions. +# 3. Create incremental single revision dumps (for use in post-commit). +# 4. Create incremental dumps containing everything since last dump. +# +# All dump files are prefixed with the basename of the repository. All +# examples below assume that the repository '/srv/svn/repos/src' is +# dumped so all dumpfiles start with 'src'. +# +# Optional functionality: +# 5. Create gzipped dump files. +# 6. Create bzipped dump files. +# 7. Transfer the dumpfile to another host using ftp. +# 8. Transfer the dumpfile to another host using smb. +# +# See also 'svn-backup-dumps.py -h'. +# +# +# 1. Create a full dump (revisions 0 to HEAD). +# +# svn-backup-dumps.py <repos> <dumpdir> +# +# <repos> Path to the repository. +# <dumpdir> Directory for storing the dump file. +# +# This creates a dump file named 'src.000000-NNNNNN.svndmp.gz' +# where NNNNNN is the revision number of HEAD. +# +# +# 2. Create incremental dumps containing at most N revisions. +# +# svn-backup-dumps.py -c <count> <repos> <dumpdir> +# +# <count> Count of revisions per dump file. +# <repos> Path to the repository. +# <dumpdir> Directory for storing the dump file. +# +# When started the first time with a count of 1000 and if HEAD is +# at 2923 it creates the following files: +# +# src.000000-000999.svndmp.gz +# src.001000-001999.svndmp.gz +# src.002000-002923.svndmp.gz +# +# Say the next time HEAD is at 3045 it creates these two files: +# +# src.002000-002999.svndmp.gz +# src.003000-003045.svndmp.gz +# +# +# 3. Create incremental single revision dumps (for use in post-commit). +# +# svn-backup-dumps.py -r <revnr> <repos> <dumpdir> +# +# <revnr> A revision number. +# <repos> Path to the repository. +# <dumpdir> Directory for storing the dump file. +# +# This creates a dump file named 'src.NNNNNN.svndmp.gz' where +# NNNNNN is the revision number of HEAD. +# +# +# 4. Create incremental dumps relative to last dump +# +# svn-backup-dumps.py -i <repos> <dumpdir> +# +# <repos> Path to the repository. +# <dumpdir> Directory for storing the dump file. +# +# When if dumps are performed when HEAD is 2923, +# then when HEAD is 3045, is creates these files: +# +# src.000000-002923.svndmp.gz +# src.002924-003045.svndmp.gz +# +# +# 5. Create gzipped dump files. +# +# svn-backup-dumps.py -z ... +# +# ... More options, see 1-4, 7, 8. +# +# +# 6. Create bzipped dump files. +# +# svn-backup-dumps.py -b ... +# +# ... More options, see 1-4, 7, 8. +# +# +# 7. Transfer the dumpfile to another host using ftp. +# +# svn-backup-dumps.py -t ftp:<host>:<user>:<password>:<path> ... +# +# <host> Name of the FTP host. +# <user> Username on the remote host. +# <password> Password for the user. +# <path> Subdirectory on the remote host. +# ... More options, see 1-6. +# +# If <path> contains the string '%r' it is replaced by the +# repository name (basename of the repository path). +# +# +# 8. Transfer the dumpfile to another host using smb. +# +# svn-backup-dumps.py -t smb:<share>:<user>:<password>:<path> ... +# +# <share> Name of an SMB share in the form '//host/share'. +# <user> Username on the remote host. +# <password> Password for the user. +# <path> Subdirectory of the share. +# ... More options, see 1-6. +# +# If <path> contains the string '%r' it is replaced by the +# repository name (basename of the repository path). +# +# +# +# TODO: +# - find out how to report smbclient errors +# - improve documentation +# + +__version = "0.6" + +import sys +import os +if os.name != "nt": + import fcntl + import select +import gzip +import os.path +import re +from optparse import OptionParser +from ftplib import FTP +from subprocess import Popen, PIPE + +try: + import bz2 + have_bz2 = True +except ImportError: + have_bz2 = False + + +class SvnBackupOutput: + + def __init__(self, abspath, filename): + self.__filename = filename + self.__absfilename = os.path.join(abspath, filename) + + def open(self): + pass + + def write(self, data): + pass + + def close(self): + pass + + def get_filename(self): + return self.__filename + + def get_absfilename(self): + return self.__absfilename + + +class SvnBackupOutputPlain(SvnBackupOutput): + + def __init__(self, abspath, filename): + SvnBackupOutput.__init__(self, abspath, filename) + + def open(self): + self.__ofd = open(self.get_absfilename(), "wb") + + def write(self, data): + self.__ofd.write(data) + + def close(self): + self.__ofd.close() + + +class SvnBackupOutputGzip(SvnBackupOutput): + + def __init__(self, abspath, filename): + SvnBackupOutput.__init__(self, abspath, filename + ".gz") + + def open(self): + self.__compressor = gzip.GzipFile(filename=self.get_absfilename(), + mode="wb") + + def write(self, data): + self.__compressor.write(data) + + def close(self): + self.__compressor.flush() + self.__compressor.close() + + +class SvnBackupOutputBzip2(SvnBackupOutput): + + def __init__(self, abspath, filename): + SvnBackupOutput.__init__(self, abspath, filename + ".bz2") + + def open(self): + self.__compressor = bz2.BZ2Compressor() + self.__ofd = open(self.get_absfilename(), "wb") + + def write(self, data): + self.__ofd.write(self.__compressor.compress(data)) + + def close(self): + self.__ofd.write(self.__compressor.flush()) + self.__ofd.close() + +class SvnBackupOutputCommand(SvnBackupOutput): + + def __init__(self, abspath, filename, file_extension, cmd_path, + cmd_options): + SvnBackupOutput.__init__(self, abspath, filename + file_extension) + self.__cmd_path = cmd_path + self.__cmd_options = cmd_options + + def open(self): + cmd = [ self.__cmd_path, self.__cmd_options ] + + self.__ofd = open(self.get_absfilename(), "wb") + try: + proc = Popen(cmd, stdin=PIPE, stdout=self.__ofd, shell=False) + except: + print (256, "", "Popen failed (%s ...):\n %s" % (cmd[0], + str(sys.exc_info()[1]))) + sys.exit(256) + self.__proc = proc + self.__stdin = proc.stdin + + def write(self, data): + self.__stdin.write(data) + + def close(self): + self.__stdin.close() + rc = self.__proc.wait() + self.__ofd.close() + +class SvnBackupException(Exception): + + def __init__(self, errortext): + self.errortext = errortext + + def __str__(self): + return self.errortext + +class SvnBackup: + + def __init__(self, options, args): + # need 3 args: progname, reposname, dumpdir + if len(args) != 3: + if len(args) < 3: + raise SvnBackupException("too few arguments, specify" + " repospath and dumpdir.\nuse -h or" + " --help option to see help.") + else: + raise SvnBackupException("too many arguments, specify" + " repospath and dumpdir only.\nuse" + " -h or --help option to see help.") + self.__repospath = args[1] + self.__dumpdir = args[2] + # check repospath + rpathparts = os.path.split(self.__repospath) + if len(rpathparts[1]) == 0: + # repospath without trailing slash + self.__repospath = rpathparts[0] + if not os.path.exists(self.__repospath): + raise SvnBackupException("repos '%s' does not exist." % self.__repospath) + if not os.path.isdir(self.__repospath): + raise SvnBackupException("repos '%s' is not a directory." % self.__repospath) + for subdir in [ "db", "conf", "hooks" ]: + dir = os.path.join(self.__repospath, subdir) + if not os.path.isdir(dir): + raise SvnBackupException("repos '%s' is not a repository." % self.__repospath) + rpathparts = os.path.split(self.__repospath) + self.__reposname = rpathparts[1] + if self.__reposname in [ "", ".", ".." ]: + raise SvnBackupException("couldn't extract repos name from '%s'." % self.__repospath) + # check dumpdir + if not os.path.exists(self.__dumpdir): + raise SvnBackupException("dumpdir '%s' does not exist." % self.__dumpdir) + elif not os.path.isdir(self.__dumpdir): + raise SvnBackupException("dumpdir '%s' is not a directory." % self.__dumpdir) + # set options + self.__rev_nr = options.rev + self.__count = options.cnt + self.__quiet = options.quiet + self.__deltas = options.deltas + self.__relative_incremental = options.relative_incremental + + # svnadmin/svnlook path + self.__svnadmin_path = "svnadmin" + if options.svnadmin_path: + self.__svnadmin_path = options.svnadmin_path + self.__svnlook_path = "svnlook" + if options.svnlook_path: + self.__svnlook_path = options.svnlook_path + + # check compress option + self.__gzip_path = options.gzip_path + self.__bzip2_path = options.bzip2_path + self.__zip = None + compress_options = 0 + if options.gzip_path != None: + compress_options = compress_options + 1 + if options.bzip2_path != None: + compress_options = compress_options + 1 + if options.bzip2: + compress_options = compress_options + 1 + self.__zip = "bzip2" + if options.gzip: + compress_options = compress_options + 1 + self.__zip = "gzip" + if compress_options > 1: + raise SvnBackupException("--bzip2-path, --gzip-path, -b, -z are " + "mutually exclusive.") + + self.__overwrite = False + self.__overwrite_all = False + if options.overwrite > 0: + self.__overwrite = True + if options.overwrite > 1: + self.__overwrite_all = True + self.__transfer = None + if options.transfer != None: + self.__transfer = options.transfer.split(":") + if len(self.__transfer) != 5: + if len(self.__transfer) < 5: + raise SvnBackupException("too few fields for transfer '%s'." % self.__transfer) + else: + raise SvnBackupException("too many fields for transfer '%s'." % self.__transfer) + if self.__transfer[0] not in [ "ftp", "smb" ]: + raise SvnBackupException("unknown transfer method '%s'." % self.__transfer[0]) + + def set_nonblock(self, fileobj): + fd = fileobj.fileno() + n = fcntl.fcntl(fd, fcntl.F_GETFL) + fcntl.fcntl(fd, fcntl.F_SETFL, n|os.O_NONBLOCK) + + def exec_cmd(self, cmd, output=None, printerr=False): + if os.name == "nt": + return self.exec_cmd_nt(cmd, output, printerr) + else: + return self.exec_cmd_unix(cmd, output, printerr) + + def exec_cmd_unix(self, cmd, output=None, printerr=False): + try: + proc = Popen(cmd, stdout=PIPE, stderr=PIPE, shell=False) + except: + return (256, "", "Popen failed (%s ...):\n %s" % (cmd[0], + str(sys.exc_info()[1]))) + stdout = proc.stdout + stderr = proc.stderr + self.set_nonblock(stdout) + self.set_nonblock(stderr) + readfds = [ stdout, stderr ] + selres = select.select(readfds, [], []) + bufout = "" + buferr = "" + while len(selres[0]) > 0: + for fd in selres[0]: + buf = fd.read(16384) + if len(buf) == 0: + readfds.remove(fd) + elif fd == stdout: + if output: + output.write(buf) + else: + bufout += buf + else: + if printerr: + sys.stdout.write("%s " % buf) + else: + buferr += buf + if len(readfds) == 0: + break + selres = select.select(readfds, [], []) + rc = proc.wait() + if printerr: + print("") + return (rc, bufout, buferr) + + def exec_cmd_nt(self, cmd, output=None, printerr=False): + try: + proc = Popen(cmd, stdout=PIPE, stderr=None, shell=False) + except: + return (256, "", "Popen failed (%s ...):\n %s" % (cmd[0], + str(sys.exc_info()[1]))) + stdout = proc.stdout + bufout = "" + buferr = "" + buf = stdout.read(16384) + while len(buf) > 0: + if output: + output.write(buf) + else: + bufout += buf + buf = stdout.read(16384) + rc = proc.wait() + return (rc, bufout, buferr) + + def get_head_rev(self): + cmd = [ self.__svnlook_path, "youngest", self.__repospath ] + r = self.exec_cmd(cmd) + if r[0] == 0 and len(r[2]) == 0: + return int(r[1].strip()) + else: + print(r[2]) + return -1 + + def get_last_dumped_rev(self): + filename_regex = re.compile("(.+)\.\d+-(\d+)\.svndmp.*") + # start with -1 so the next one will be rev 0 + highest_rev = -1 + + for filename in os.listdir(self.__dumpdir): + m = filename_regex.match( filename ) + if m and (m.group(1) == self.__reposname): + rev_end = int(m.group(2)) + + if rev_end > highest_rev: + # determine the latest revision dumped + highest_rev = rev_end + + return highest_rev + + def transfer_ftp(self, absfilename, filename): + rc = False + try: + host = self.__transfer[1] + user = self.__transfer[2] + passwd = self.__transfer[3] + destdir = self.__transfer[4].replace("%r", self.__reposname) + ftp = FTP(host, user, passwd) + ftp.cwd(destdir) + ifd = open(absfilename, "rb") + ftp.storbinary("STOR %s" % filename, ifd) + ftp.quit() + rc = len(ifd.read(1)) == 0 + ifd.close() + except Exception, e: + raise SvnBackupException("ftp transfer failed:\n file: '%s'\n error: %s" % \ + (absfilename, str(e))) + return rc + + def transfer_smb(self, absfilename, filename): + share = self.__transfer[1] + user = self.__transfer[2] + passwd = self.__transfer[3] + if passwd == "": + passwd = "-N" + destdir = self.__transfer[4].replace("%r", self.__reposname) + cmd = ("smbclient", share, "-U", user, passwd, "-D", destdir, + "-c", "put %s %s" % (absfilename, filename)) + r = self.exec_cmd(cmd) + rc = r[0] == 0 + if not rc: + print(r[2]) + return rc + + def transfer(self, absfilename, filename): + if self.__transfer == None: + return + elif self.__transfer[0] == "ftp": + self.transfer_ftp(absfilename, filename) + elif self.__transfer[0] == "smb": + self.transfer_smb(absfilename, filename) + else: + print("unknown transfer method '%s'." % self.__transfer[0]) + + def create_dump(self, checkonly, overwrite, fromrev, torev=None): + revparam = "%d" % fromrev + r = "%06d" % fromrev + if torev != None: + revparam += ":%d" % torev + r += "-%06d" % torev + filename = "%s.%s.svndmp" % (self.__reposname, r) + output = None + if self.__bzip2_path: + output = SvnBackupOutputCommand(self.__dumpdir, filename, ".bz2", + self.__bzip2_path, "-cz" ) + elif self.__gzip_path: + output = SvnBackupOutputCommand(self.__dumpdir, filename, ".gz", + self.__gzip_path, "-cf" ) + elif self.__zip: + if self.__zip == "gzip": + output = SvnBackupOutputGzip(self.__dumpdir, filename) + else: + output = SvnBackupOutputBzip2(self.__dumpdir, filename) + else: + output = SvnBackupOutputPlain(self.__dumpdir, filename) + absfilename = output.get_absfilename() + realfilename = output.get_filename() + if checkonly: + return os.path.exists(absfilename) + elif os.path.exists(absfilename): + if overwrite: + print("overwriting " + absfilename) + else: + print("%s already exists." % absfilename) + return True + else: + print("writing " + absfilename) + cmd = [ self.__svnadmin_path, "dump", + "--incremental", "-r", revparam, self.__repospath ] + if self.__quiet: + cmd[2:2] = [ "-q" ] + if self.__deltas: + cmd[2:2] = [ "--deltas" ] + output.open() + r = self.exec_cmd(cmd, output, True) + output.close() + rc = r[0] == 0 + if rc: + self.transfer(absfilename, realfilename) + return rc + + def export_single_rev(self): + return self.create_dump(False, self.__overwrite, self.__rev_nr) + + def export(self): + headrev = self.get_head_rev() + if headrev == -1: + return False + if self.__count is None: + return self.create_dump(False, self.__overwrite, 0, headrev) + baserev = headrev - (headrev % self.__count) + rc = True + cnt = self.__count + fromrev = baserev - cnt + torev = baserev - 1 + while fromrev >= 0 and rc: + if self.__overwrite_all or \ + not self.create_dump(True, False, fromrev, torev): + rc = self.create_dump(False, self.__overwrite_all, + fromrev, torev) + fromrev -= cnt + torev -= cnt + else: + fromrev = -1 + if rc: + rc = self.create_dump(False, self.__overwrite, baserev, headrev) + return rc + + def export_relative_incremental(self): + headrev = self.get_head_rev() + if headrev == -1: + return False + + last_dumped_rev = self.get_last_dumped_rev(); + if headrev < last_dumped_rev: + # that should not happen... + return False + + if headrev == last_dumped_rev: + # already up-to-date + return True + + return self.create_dump(False, False, last_dumped_rev + 1, headrev) + + def execute(self): + if self.__rev_nr != None: + return self.export_single_rev() + elif self.__relative_incremental: + return self.export_relative_incremental() + else: + return self.export() + + +if __name__ == "__main__": + usage = "usage: svn-backup-dumps.py [options] repospath dumpdir" + parser = OptionParser(usage=usage, version="%prog "+__version) + if have_bz2: + parser.add_option("-b", + action="store_true", + dest="bzip2", default=False, + help="compress the dump using python bzip2 library.") + parser.add_option("-i", + action="store_true", + dest="relative_incremental", default=False, + help="perform incremental relative to last dump.") + parser.add_option("--deltas", + action="store_true", + dest="deltas", default=False, + help="pass --deltas to svnadmin dump.") + parser.add_option("-c", + action="store", type="int", + dest="cnt", default=None, + help="count of revisions per dumpfile.") + parser.add_option("-o", + action="store_const", const=1, + dest="overwrite", default=0, + help="overwrite files.") + parser.add_option("-O", + action="store_const", const=2, + dest="overwrite", default=0, + help="overwrite all files.") + parser.add_option("-q", + action="store_true", + dest="quiet", default=False, + help="quiet.") + parser.add_option("-r", + action="store", type="int", + dest="rev", default=None, + help="revision number for single rev dump.") + parser.add_option("-t", + action="store", type="string", + dest="transfer", default=None, + help="transfer dumps to another machine "+ + "(s.a. --help-transfer).") + parser.add_option("-z", + action="store_true", + dest="gzip", default=False, + help="compress the dump using python gzip library.") + parser.add_option("--bzip2-path", + action="store", type="string", + dest="bzip2_path", default=None, + help="compress the dump using bzip2 custom command.") + parser.add_option("--gzip-path", + action="store", type="string", + dest="gzip_path", default=None, + help="compress the dump using gzip custom command.") + parser.add_option("--svnadmin-path", + action="store", type="string", + dest="svnadmin_path", default=None, + help="svnadmin command path.") + parser.add_option("--svnlook-path", + action="store", type="string", + dest="svnlook_path", default=None, + help="svnlook command path.") + parser.add_option("--help-transfer", + action="store_true", + dest="help_transfer", default=False, + help="shows detailed help for the transfer option.") + (options, args) = parser.parse_args(sys.argv) + if options.help_transfer: + print("Transfer help:") + print("") + print(" FTP:") + print(" -t ftp:<host>:<user>:<password>:<dest-path>") + print("") + print(" SMB (using smbclient):") + print(" -t smb:<share>:<user>:<password>:<dest-path>") + print("") + sys.exit(0) + rc = False + try: + backup = SvnBackup(options, args) + rc = backup.execute() + except SvnBackupException, e: + print("svn-backup-dumps.py: %s" % e) + if rc: + print("Everything OK.") + sys.exit(0) + else: + print("An error occured!") + sys.exit(1) + +# vim:et:ts=4:sw=4 diff --git a/tools/server-side/svn-populate-node-origins-index.c b/tools/server-side/svn-populate-node-origins-index.c new file mode 100644 index 0000000..b9762c4 --- /dev/null +++ b/tools/server-side/svn-populate-node-origins-index.c @@ -0,0 +1,193 @@ +/* + * svn-populate-node-origins-index.c : Populate the repository's node + * origins index. + * + * ==================================================================== + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + * ==================================================================== + */ + +#include "svn_cmdline.h" +#include "svn_error.h" +#include "svn_fs.h" +#include "svn_path.h" +#include "svn_pools.h" +#include "svn_repos.h" +#include "svn_utf.h" + +/* Used to terminate lines in large multi-line string literals. */ +#define NL APR_EOL_STR + +static const char *usage_summary = + "Crawl the Subversion repository located at REPOS-PATH in an attempt to" NL + "populate that repository's index of node origins. " NL + "" NL + "The node origins index is new as of Subversion 1.5, and behaves as a" NL + "cache to vastly speed up certain history-querying operations. For" NL + "compatibility with repositories created with pre-1.5 versions of" NL + "Subversion, Subversion will gracefully handle cache misses by doing a" NL + "brute-force calculation of the query answer and lazily populating the" NL + "index with answers it calculates. Unfortunately, calculating that" NL + "information using the brute-force method (instead of having the" NL + "information appear in the index organically) can be very costly." NL + "" NL + "This tool triggers the lazy index population logic built into" NL + "Subversion in a fashion far more efficient than is likely to happen" NL + "during typical repository usage. It can be run while the repository" NL + "is online, too, without interrupting normal Subversion activities." NL; + +/* Print a usage message for this program (PROGNAME), possibly with an + error message ERR_MSG, if not NULL. */ +static void +usage_maybe_with_err(const char *progname, const char *err_msg) +{ + FILE *out; + + out = err_msg ? stderr : stdout; + fprintf(out, "Usage: %s REPOS-PATH\n\n%s", progname, usage_summary); + if (err_msg) + fprintf(out, "\nERROR: %s\n", err_msg); +} + +/* Build the node-origins index any newly added items introduced in + REVISION in FS. Set *COUNT to the number of new items found. */ +static svn_error_t * +index_revision_adds(int *count, svn_fs_t *fs, + svn_revnum_t revision, apr_pool_t *pool) +{ + svn_fs_root_t *root; + apr_hash_t *changes; + apr_hash_index_t *hi; + apr_pool_t *subpool; + + *count = 0; + SVN_ERR(svn_fs_revision_root(&root, fs, revision, pool)); + SVN_ERR(svn_fs_paths_changed2(&changes, root, pool)); + + /* No paths changed in this revision? Nothing to do. */ + if (apr_hash_count(changes) == 0) + return SVN_NO_ERROR; + + subpool = svn_pool_create(pool); + for (hi = apr_hash_first(pool, changes); hi; hi = apr_hash_next(hi)) + { + const void *path; + void *val; + svn_fs_path_change2_t *change; + + svn_pool_clear(subpool); + apr_hash_this(hi, &path, NULL, &val); + change = val; + if ((change->change_kind == svn_fs_path_change_add) + || (change->change_kind == svn_fs_path_change_replace)) + { + if (! (change->copyfrom_path + && SVN_IS_VALID_REVNUM(change->copyfrom_rev))) + { + svn_revnum_t origin; + SVN_ERR(svn_fs_node_origin_rev(&origin, root, path, subpool)); + (*count)++; + } + } + } + svn_pool_destroy(subpool); + + return SVN_NO_ERROR; +} + +/* Build the node-origins index for the repository located at REPOS_PATH. */ +static svn_error_t * +build_index(const char *repos_path, apr_pool_t *pool) +{ + svn_repos_t *repos; + svn_fs_t *fs; + svn_revnum_t youngest_rev, i; + size_t slotsize; + const char *progress_fmt; + apr_pool_t *subpool; + + /* Open the repository. */ + SVN_ERR(svn_repos_open2(&repos, repos_path, NULL, pool)); + + /* Get a filesystem object. */ + fs = svn_repos_fs(repos); + + /* Fetch the youngest revision of the repository. */ + SVN_ERR(svn_fs_youngest_rev(&youngest_rev, fs, pool)); + slotsize = strlen(apr_ltoa(pool, youngest_rev)); + progress_fmt = apr_psprintf + (pool, + "[%%%" APR_SIZE_T_FMT "ld" + "/%%%" APR_SIZE_T_FMT "ld] " + "Found %%d new lines of history." + "\n", slotsize, slotsize); + + /* Now, iterate over all the revisions, calling index_revision_adds(). */ + subpool = svn_pool_create(pool); + for (i = 0; i < youngest_rev; i++) + { + int count; + svn_pool_clear(subpool); + SVN_ERR(index_revision_adds(&count, fs, i + 1, subpool)); + printf(progress_fmt, i + 1, youngest_rev, count); + } + svn_pool_destroy(subpool); + + return SVN_NO_ERROR; +} + + +int +main(int argc, const char **argv) +{ + apr_pool_t *pool; + svn_error_t *err = SVN_NO_ERROR; + const char *repos_path; + + /* Initialize the app. Send all error messages to 'stderr'. */ + if (svn_cmdline_init(argv[0], stderr) == EXIT_FAILURE) + return EXIT_FAILURE; + + pool = svn_pool_create(NULL); + + if (argc <= 1) + { + usage_maybe_with_err(argv[0], "Not enough arguments."); + goto cleanup; + } + + /* Convert argv[1] into a UTF8, internal-format, canonicalized path. */ + if ((err = svn_utf_cstring_to_utf8(&repos_path, argv[1], pool))) + goto cleanup; + repos_path = svn_dirent_internal_style(repos_path, pool); + repos_path = svn_dirent_canonicalize(repos_path, pool); + + if ((err = build_index(repos_path, pool))) + goto cleanup; + + cleanup: + svn_pool_destroy(pool); + + if (err) + { + svn_handle_error2(err, stderr, FALSE, + "svn-populate-node-origins-index: "); + return EXIT_FAILURE; + } + return EXIT_SUCCESS; +} diff --git a/tools/server-side/svn-rep-sharing-stats.c b/tools/server-side/svn-rep-sharing-stats.c new file mode 100644 index 0000000..e57ff91 --- /dev/null +++ b/tools/server-side/svn-rep-sharing-stats.c @@ -0,0 +1,535 @@ +/* + * ==================================================================== + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + * ==================================================================== + */ + +#include <apr_signal.h> + +#include "svn_cmdline.h" +#include "svn_dirent_uri.h" +#include "svn_pools.h" +#include "svn_repos.h" +#include "svn_opt.h" +#include "svn_utf.h" +#include "svn_version.h" + +#include "../../subversion/libsvn_fs_fs/fs.h" +#include "../../subversion/libsvn_fs_fs/fs_fs.h" +/* for svn_fs_fs__id_* (used in assertions only) */ +#include "../../subversion/libsvn_fs_fs/id.h" + +#include "svn_private_config.h" + + +/** Help messages and version checking. **/ + +static svn_error_t * +version(apr_pool_t *pool) +{ + return svn_opt_print_help3(NULL, "svn-rep-sharing-stats", TRUE, FALSE, NULL, + NULL, NULL, NULL, NULL, NULL, pool); +} + +static void +usage(apr_pool_t *pool) +{ + svn_error_clear(svn_cmdline_fprintf + (stderr, pool, + _("Type 'svn-rep-sharing-stats --help' for usage.\n"))); +} + + +static void +help(const apr_getopt_option_t *options, apr_pool_t *pool) +{ + svn_error_clear + (svn_cmdline_fprintf + (stdout, pool, + _("usage: svn-rep-sharing-stats [OPTIONS] REPOS_PATH\n\n" + " Prints the reference count statistics for representations\n" + " in an FSFS repository.\n" + "\n" + " At least one of the options --data/--prop/--both must be specified.\n" + "\n" + "Valid options:\n"))); + while (options->description) + { + const char *optstr; + svn_opt_format_option(&optstr, options, TRUE, pool); + svn_error_clear(svn_cmdline_fprintf(stdout, pool, " %s\n", optstr)); + ++options; + } + svn_error_clear(svn_cmdline_fprintf(stdout, pool, "\n")); + exit(0); +} + + +/* Version compatibility check */ +static svn_error_t * +check_lib_versions(void) +{ + static const svn_version_checklist_t checklist[] = + { + /* ### check FSFS version */ + { "svn_subr", svn_subr_version }, + { "svn_fs", svn_fs_version }, + { NULL, NULL } + }; + + SVN_VERSION_DEFINE(my_version); + return svn_error_trace(svn_ver_check_list(&my_version, checklist)); +} + + + +/** Cancellation stuff, ### copied from subversion/svn/main.c */ + +/* A flag to see if we've been cancelled by the client or not. */ +static volatile sig_atomic_t cancelled = FALSE; + +/* A signal handler to support cancellation. */ +static void +signal_handler(int signum) +{ + apr_signal(signum, SIG_IGN); + cancelled = TRUE; +} + +/* Our cancellation callback. */ +static svn_error_t * +svn_cl__check_cancel(void *baton) +{ + if (cancelled) + return svn_error_create(SVN_ERR_CANCELLED, NULL, _("Caught signal")); + else + return SVN_NO_ERROR; +} + +static svn_cancel_func_t cancel_func = svn_cl__check_cancel; + +static void set_up_cancellation(void) +{ + /* Set up our cancellation support. */ + apr_signal(SIGINT, signal_handler); +#ifdef SIGBREAK + /* SIGBREAK is a Win32 specific signal generated by ctrl-break. */ + apr_signal(SIGBREAK, signal_handler); +#endif +#ifdef SIGHUP + apr_signal(SIGHUP, signal_handler); +#endif +#ifdef SIGTERM + apr_signal(SIGTERM, signal_handler); +#endif + +#ifdef SIGPIPE + /* Disable SIGPIPE generation for the platforms that have it. */ + apr_signal(SIGPIPE, SIG_IGN); +#endif + +#ifdef SIGXFSZ + /* Disable SIGXFSZ generation for the platforms that have it, otherwise + * working with large files when compiled against an APR that doesn't have + * large file support will crash the program, which is uncool. */ + apr_signal(SIGXFSZ, SIG_IGN); +#endif +} + + +/** Program-specific code. **/ +enum { + OPT_VERSION = SVN_OPT_FIRST_LONGOPT_ID, + OPT_DATA, + OPT_PROP, + OPT_BOTH +}; + +static svn_error_t *check_experimental(void) +{ + if (getenv("SVN_REP_SHARING_STATS_IS_EXPERIMENTAL")) + return SVN_NO_ERROR; + + return svn_error_create(APR_EGENERAL, NULL, + "This code is experimental and should not " + "be used on live data."); +} + +/* The parts of a rep that determine whether it's being shared. */ +struct key_t +{ + svn_revnum_t revision; + apr_off_t offset; +}; + +/* What we need to know about a rep. */ +struct value_t +{ + svn_checksum_t *sha1_checksum; + apr_uint64_t refcount; +}; + +/* Increment records[rep] if both are non-NULL and REP contains a sha1. + * Allocate keys and values in RESULT_POOL. + */ +static svn_error_t *record(apr_hash_t *records, + representation_t *rep, + apr_pool_t *result_pool) +{ + struct key_t *key; + struct value_t *value; + + /* Skip if we ignore this particular kind of reps, or if the rep doesn't + * exist or doesn't have the checksum we are after. (The latter case + * often corresponds to node_rev->kind == svn_node_dir.) + */ + if (records == NULL || rep == NULL || rep->sha1_checksum == NULL) + return SVN_NO_ERROR; + + /* Construct the key. + * + * Must use calloc() because apr_hash_* pay attention to padding bytes too. + */ + key = apr_pcalloc(result_pool, sizeof(*key)); + key->revision = rep->revision; + key->offset = rep->offset; + + /* Update or create the value. */ + if ((value = apr_hash_get(records, key, sizeof(*key)))) + { + /* Paranoia. */ + SVN_ERR_ASSERT(value->sha1_checksum != NULL); + SVN_ERR_ASSERT(svn_checksum_match(value->sha1_checksum, + rep->sha1_checksum)); + /* Real work. */ + value->refcount++; + } + else + { + value = apr_palloc(result_pool, sizeof(*value)); + value->sha1_checksum = svn_checksum_dup(rep->sha1_checksum, result_pool); + value->refcount = 1; + } + + /* Store them. */ + apr_hash_set(records, key, sizeof(*key), value); + + return SVN_NO_ERROR; +} + +/* Inspect the data and/or prop reps of revision REVNUM in FS. Store + * reference count tallies in passed hashes (allocated in RESULT_POOL). + * + * If PROP_REPS or DATA_REPS is NULL, the respective kind of reps are not + * tallied. + * + * Print progress report to STDERR unless QUIET is true. + * + * Use SCRATCH_POOL for temporary allocations. + */ +static svn_error_t * +process_one_revision(svn_fs_t *fs, + svn_revnum_t revnum, + svn_boolean_t quiet, + apr_hash_t *prop_reps, + apr_hash_t *data_reps, + apr_hash_t *both_reps, + apr_pool_t *result_pool, + apr_pool_t *scratch_pool) +{ + svn_fs_root_t *rev_root; + apr_hash_t *paths_changed; + apr_hash_index_t *hi; + + if (! quiet) + SVN_ERR(svn_cmdline_fprintf(stderr, scratch_pool, + "processing r%ld\n", revnum)); + + /* Get the changed paths. */ + SVN_ERR(svn_fs_revision_root(&rev_root, fs, revnum, scratch_pool)); + SVN_ERR(svn_fs_paths_changed2(&paths_changed, rev_root, scratch_pool)); + + /* Iterate them. */ + /* ### use iterpool? */ + for (hi = apr_hash_first(scratch_pool, paths_changed); + hi; hi = apr_hash_next(hi)) + { + const char *path; + const svn_fs_path_change2_t *change; + const svn_fs_id_t *node_rev_id1, *node_rev_id2; + const svn_fs_id_t *the_id; + + node_revision_t *node_rev; + + path = svn__apr_hash_index_key(hi); + change = svn__apr_hash_index_val(hi); + if (! quiet) + SVN_ERR(svn_cmdline_fprintf(stderr, scratch_pool, + "processing r%ld:%s\n", revnum, path)); + + if (change->change_kind == svn_fs_path_change_delete) + /* Can't ask for reps of PATH at REVNUM if the path no longer exists + * at that revision! */ + continue; + + /* Okay, we have two node_rev id's for this change: the txn one and + * the revision one. We'll use the latter. */ + node_rev_id1 = change->node_rev_id; + SVN_ERR(svn_fs_node_id(&node_rev_id2, rev_root, path, scratch_pool)); + + SVN_ERR_ASSERT(svn_fs_fs__id_txn_id(node_rev_id1) != NULL); + SVN_ERR_ASSERT(svn_fs_fs__id_rev(node_rev_id2) != SVN_INVALID_REVNUM); + + the_id = node_rev_id2; + + /* Get the node_rev using the chosen node_rev_id. */ + SVN_ERR(svn_fs_fs__get_node_revision(&node_rev, fs, the_id, scratch_pool)); + + /* Maybe record the sha1's. */ + SVN_ERR(record(prop_reps, node_rev->prop_rep, result_pool)); + SVN_ERR(record(data_reps, node_rev->data_rep, result_pool)); + SVN_ERR(record(both_reps, node_rev->prop_rep, result_pool)); + SVN_ERR(record(both_reps, node_rev->data_rep, result_pool)); + } + + return SVN_NO_ERROR; +} + +/* Print REPS_REF_COUNT (a hash as for process_one_revision()) + * to stdout in "refcount => sha1" format. A sha1 may appear + * more than once if not all its instances are shared. Prepend + * each line by NAME. + * + * Use SCRATCH_POOL for temporary allocations. + */ +static svn_error_t * +pretty_print(const char *name, + apr_hash_t *reps_ref_counts, + apr_pool_t *scratch_pool) +{ + apr_hash_index_t *hi; + + if (reps_ref_counts == NULL) + return SVN_NO_ERROR; + + for (hi = apr_hash_first(scratch_pool, reps_ref_counts); + hi; hi = apr_hash_next(hi)) + { + struct value_t *value; + + SVN_ERR(cancel_func(NULL)); + + value = svn__apr_hash_index_val(hi); + SVN_ERR(svn_cmdline_printf(scratch_pool, "%s %" APR_UINT64_T_FMT " %s\n", + name, value->refcount, + svn_checksum_to_cstring_display( + value->sha1_checksum, + scratch_pool))); + } + + return SVN_NO_ERROR; +} + +/* Return an error unless FS is an fsfs fs. */ +static svn_error_t *is_fs_fsfs(svn_fs_t *fs, apr_pool_t *scratch_pool) +{ + const char *actual, *expected, *path; + + path = svn_fs_path(fs, scratch_pool); + + expected = SVN_FS_TYPE_FSFS; + SVN_ERR(svn_fs_type(&actual, path, scratch_pool)); + + if (strcmp(actual, expected) != 0) + return svn_error_createf(SVN_ERR_FS_UNKNOWN_FS_TYPE, NULL, + "Filesystem '%s' is not of type '%s'", + svn_dirent_local_style(path, scratch_pool), + actual); + + return SVN_NO_ERROR; +} + +/* The core logic. This function iterates the repository REPOS_PATH + * and sends all the (DATA and/or PROP) reps in each revision for counting + * by process_one_revision(). QUIET is passed to process_one_revision(). + */ +static svn_error_t *process(const char *repos_path, + svn_boolean_t prop, + svn_boolean_t data, + svn_boolean_t quiet, + apr_pool_t *scratch_pool) +{ + apr_hash_t *prop_reps = NULL; + apr_hash_t *data_reps = NULL; + apr_hash_t *both_reps = NULL; + svn_revnum_t rev, youngest; + apr_pool_t *iterpool; + svn_repos_t *repos; + svn_fs_t *fs; + + if (prop) + prop_reps = apr_hash_make(scratch_pool); + if (data) + data_reps = apr_hash_make(scratch_pool); + if (prop && data) + both_reps = apr_hash_make(scratch_pool); + + /* Open the FS. */ + SVN_ERR(svn_repos_open2(&repos, repos_path, NULL, scratch_pool)); + fs = svn_repos_fs(repos); + + SVN_ERR(is_fs_fsfs(fs, scratch_pool)); + + SVN_ERR(svn_fs_youngest_rev(&youngest, fs, scratch_pool)); + + /* Iterate the revisions. */ + iterpool = svn_pool_create(scratch_pool); + for (rev = 0; rev <= youngest; rev++) + { + svn_pool_clear(iterpool); + SVN_ERR(cancel_func(NULL)); + SVN_ERR(process_one_revision(fs, rev, quiet, + prop_reps, data_reps, both_reps, + scratch_pool, iterpool)); + } + svn_pool_destroy(iterpool); + + /* Print stats. */ + SVN_ERR(pretty_print("prop", prop_reps, scratch_pool)); + SVN_ERR(pretty_print("data", data_reps, scratch_pool)); + SVN_ERR(pretty_print("both", both_reps, scratch_pool)); + + return SVN_NO_ERROR; +} + +int +main(int argc, const char *argv[]) +{ + const char *repos_path; + apr_allocator_t *allocator; + apr_pool_t *pool; + svn_boolean_t prop = FALSE, data = FALSE; + svn_boolean_t quiet = FALSE; + svn_error_t *err; + apr_getopt_t *os; + const apr_getopt_option_t options[] = + { + {"data", OPT_DATA, 0, N_("display data reps stats")}, + {"prop", OPT_PROP, 0, N_("display prop reps stats")}, + {"both", OPT_BOTH, 0, N_("display combined (data+prop) reps stats")}, + {"quiet", 'q', 0, N_("no progress (only errors) to stderr")}, + {"help", 'h', 0, N_("display this help")}, + {"version", OPT_VERSION, 0, + N_("show program version information")}, + {0, 0, 0, 0} + }; + + /* Initialize the app. */ + if (svn_cmdline_init("svn-rep-sharing-stats", stderr) != EXIT_SUCCESS) + return EXIT_FAILURE; + + /* Create our top-level pool. Use a separate mutexless allocator, + * given this application is single threaded. + */ + if (apr_allocator_create(&allocator)) + return EXIT_FAILURE; + + apr_allocator_max_free_set(allocator, SVN_ALLOCATOR_RECOMMENDED_MAX_FREE); + + pool = svn_pool_create_ex(NULL, allocator); + apr_allocator_owner_set(allocator, pool); + + /* Check library versions */ + err = check_lib_versions(); + if (err) + return svn_cmdline_handle_exit_error(err, pool, "svn-rep-sharing-stats: "); + + err = svn_cmdline__getopt_init(&os, argc, argv, pool); + if (err) + return svn_cmdline_handle_exit_error(err, pool, "svn-rep-sharing-stats: "); + + SVN_INT_ERR(check_experimental()); + + os->interleave = 1; + while (1) + { + int opt; + const char *arg; + apr_status_t status = apr_getopt_long(os, options, &opt, &arg); + if (APR_STATUS_IS_EOF(status)) + break; + if (status != APR_SUCCESS) + { + usage(pool); + return EXIT_FAILURE; + } + switch (opt) + { + case OPT_DATA: + data = TRUE; + break; + /* It seems we don't actually rep-share props yet. */ + case OPT_PROP: + prop = TRUE; + break; + case OPT_BOTH: + data = TRUE; + prop = TRUE; + break; + case 'q': + quiet = TRUE; + break; + case 'h': + help(options, pool); + break; + case OPT_VERSION: + SVN_INT_ERR(version(pool)); + exit(0); + break; + default: + usage(pool); + return EXIT_FAILURE; + } + } + + /* Exactly 1 non-option argument, + * and at least one of "--data"/"--prop"/"--both". + */ + if (os->ind + 1 != argc || (!data && !prop)) + { + usage(pool); + return EXIT_FAILURE; + } + + /* Grab REPOS_PATH from argv. */ + SVN_INT_ERR(svn_utf_cstring_to_utf8(&repos_path, os->argv[os->ind], pool)); + repos_path = svn_dirent_internal_style(repos_path, pool); + + set_up_cancellation(); + + /* Do something. */ + SVN_INT_ERR(process(repos_path, prop, data, quiet, pool)); + + /* We're done. */ + + svn_pool_destroy(pool); + /* Flush stdout to make sure that the user will see any printing errors. */ + SVN_INT_ERR(svn_cmdline_fflush(stdout)); + + return EXIT_SUCCESS; +} diff --git a/tools/server-side/svn_server_log_parse.py b/tools/server-side/svn_server_log_parse.py new file mode 100755 index 0000000..5ecb104 --- /dev/null +++ b/tools/server-side/svn_server_log_parse.py @@ -0,0 +1,460 @@ +#!/usr/bin/python + +# ==================================================================== +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# ==================================================================== + +# TODO: Teach parse_open about capabilities, rather than allowing any +# words at all. + +"""Parse subversion server operational logs. + +SVN-ACTION strings +------------------ + +Angle brackets denote a variable, e.g. 'commit r<N>' means you'll see +lines like 'commit r17' for this action. + +<N> and <M> are revision numbers. + +<PATH>, <FROM-PATH>, and <TO-PATH> mean a URI-encoded path relative to +the repository root, including a leading '/'. + +<REVPROP> means a revision property, e.g. 'svn:log'. + +<I> represents a svn_mergeinfo_inheritance_t value and is one of these +words: explicit inherited nearest-ancestor. + +<D> represents a svn_depth_t value and is one of these words: empty +files immediates infinity. If the depth value for the operation was +svn_depth_unknown, the depth= portion is absent entirely. + +The get-mergeinfo and log actions use lists for paths and revprops. +The lists are enclosed in parentheses and each item is separated by a +space (spaces in paths are encoded as %20). + +The words will *always* be in this order, though some may be absent. + +General:: + + change-rev-prop r<N> <REVPROP> + commit r<N> + get-dir <PATH> r<N> text? props? + get-file <PATH> r<N> text? props? + lock (<PATH> ...) steal? + rev-proplist r<N> + unlock (<PATH> ...) break? + +Reports:: + + get-file-revs <PATH> r<N>:<M> include-merged-revisions? + get-mergeinfo (<PATH> ...) <I> include-descendants? + log (<PATH> ...) r<N>:<M> limit=<N>? discover-changed-paths? strict? include-merged-revisions? revprops=all|(<REVPROP> ...)? + replay <PATH> r<N> + +The update report:: + + checkout-or-export <PATH> r<N> depth=<D>? + diff <FROM-PATH>@<N> <TO-PATH>@<M> depth=<D>? ignore-ancestry? + diff <PATH> r<N>:<M> depth=<D>? ignore-ancestry? + status <PATH> r<N> depth=<D>? + switch <FROM-PATH> <TO-PATH>@<N> depth=<D>? + update <PATH> r<N> depth=<D>? send-copyfrom-args? +""" + + +import re +try: + # Python >=3.0 + from urllib.parse import unquote as urllib_parse_unquote +except ImportError: + # Python <3.0 + from urllib import unquote as urllib_parse_unquote + +import svn.core + +# +# Valid words for _parse_depth and _parse_mergeinfo_inheritance +# + +DEPTH_WORDS = ['empty', 'files', 'immediates', 'infinity'] +INHERITANCE_WORDS = { + 'explicit': svn.core.svn_mergeinfo_explicit, + 'inherited': svn.core.svn_mergeinfo_inherited, + 'nearest-ancestor': svn.core.svn_mergeinfo_nearest_ancestor, +} + +# +# Patterns for _match +# + +# <PATH> +pPATH = r'(/\S*)' +# (<PATH> ...) +pPATHS = r'\(([^)]*)\)' +# r<N> +pREVNUM = r'r(\d+)' +# (<N> ...) +pREVNUMS = r'\(((\d+\s*)*)\)' +# r<N>:<M> +pREVRANGE = r'r(-?\d+):(-?\d+)' +# <PATH>@<N> +pPATHREV = pPATH + r'@(\d+)' +pWORD = r'(\S+)' +pPROPERTY = pWORD +# depth=<D>? +pDEPTH = 'depth=' + pWORD + +# +# Exceptions +# + +class Error(Exception): pass +class BadDepthError(Error): + def __init__(self, value): + Error.__init__(self, 'bad svn_depth_t value ' + value) +class BadMergeinfoInheritanceError(Error): + def __init__(self, value): + Error.__init__(self, 'bad svn_mergeinfo_inheritance_t value ' + value) +class MatchError(Error): + def __init__(self, pattern, line): + Error.__init__(self, '/%s/ does not match log line:\n%s' + % (pattern, line)) + + +# +# Helper functions +# + +# TODO: Move to kitchensink.c like svn_depth_from_word? +try: + from svn.core import svn_inheritance_from_word +except ImportError: + def svn_inheritance_from_word(word): + try: + return INHERITANCE_WORDS[word] + except KeyError: + # XXX svn_inheritance_to_word uses explicit as default so... + return svn.core.svn_mergeinfo_explicit + +def _parse_depth(word): + if word is None: + return svn.core.svn_depth_unknown + if word not in DEPTH_WORDS: + raise BadDepthError(word) + return svn.core.svn_depth_from_word(word) + +def _parse_mergeinfo_inheritance(word): + if word not in INHERITANCE_WORDS: + raise BadMergeinfoInheritanceError(word) + return svn_inheritance_from_word(word) + +def _match(line, *patterns): + """Return a re.match object from matching patterns against line. + + All optional arguments must be strings suitable for ''.join()ing + into a single pattern string for re.match. The last optional + argument may instead be a list of such strings, which will be + joined into the final pattern as *optional* matches. + + Raises: + Error -- if re.match returns None (i.e. no match) + """ + if isinstance(patterns[-1], list): + optional = patterns[-1] + patterns = patterns[:-1] + else: + optional = [] + pattern = r'\s+'.join(patterns) + pattern += ''.join([r'(\s+' + x + ')?' for x in optional]) + m = re.match(pattern, line) + if m is None: + raise MatchError(pattern, line) + return m + + +class Parser(object): + """Subclass this and define the handle_ methods according to the + "SVN-ACTION strings" section of this module's documentation. For + example, "lock <PATH> steal?" => def handle_lock(self, path, steal) + where steal will be True if "steal" was present. + + See the end of test_svn_server_log_parse.py for a complete example. + """ + def parse(self, line): + """Parse line and call appropriate handle_ method. + + Returns one of: + - line remaining after the svn action, if one was parsed + - whatever your handle_unknown implementation returns + + Raises: + BadDepthError -- for bad svn_depth_t values + BadMergeinfoInheritanceError -- for bad svn_mergeinfo_inheritance_t + values + Error -- any other parse error + """ + self.line = line + words = self.split_line = line.split(' ') + try: + method = getattr(self, '_parse_' + words[0].replace('-', '_')) + except AttributeError: + return self.handle_unknown(self.line) + return method(' '.join(words[1:])) + + def _parse_commit(self, line): + m = _match(line, pREVNUM) + self.handle_commit(int(m.group(1))) + return line[m.end():] + + def _parse_open(self, line): + pINT = r'(\d+)' + pCAP = r'cap=\(([^)]*)\)' + pCLIENT = pWORD + m = _match(line, pINT, pCAP, pPATH, pCLIENT, pCLIENT) + protocol = int(m.group(1)) + if m.group(2) is None: + capabilities = [] + else: + capabilities = m.group(2).split() + path = m.group(3) + ra_client = urllib_parse_unquote(m.group(4)) + client = urllib_parse_unquote(m.group(5)) + self.handle_open(protocol, capabilities, path, ra_client, client) + return line[m.end():] + + def _parse_reparent(self, line): + m = _match(line, pPATH) + self.handle_reparent(urllib_parse_unquote(m.group(1))) + return line[m.end():] + + def _parse_get_latest_rev(self, line): + self.handle_get_latest_rev() + return line + + def _parse_get_dated_rev(self, line): + m = _match(line, pWORD) + self.handle_get_dated_rev(m.group(1)) + return line[m.end():] + + def _parse_get_dir(self, line): + m = _match(line, pPATH, pREVNUM, ['text', 'props']) + self.handle_get_dir(urllib_parse_unquote(m.group(1)), int(m.group(2)), + m.group(3) is not None, + m.group(4) is not None) + return line[m.end():] + + def _parse_get_file(self, line): + m = _match(line, pPATH, pREVNUM, ['text', 'props']) + self.handle_get_file(urllib_parse_unquote(m.group(1)), int(m.group(2)), + m.group(3) is not None, + m.group(4) is not None) + return line[m.end():] + + def _parse_lock(self, line): + m = _match(line, pPATHS, ['steal']) + paths = [urllib_parse_unquote(x) for x in m.group(1).split()] + self.handle_lock(paths, m.group(2) is not None) + return line[m.end():] + + def _parse_change_rev_prop(self, line): + m = _match(line, pREVNUM, pPROPERTY) + self.handle_change_rev_prop(int(m.group(1)), + urllib_parse_unquote(m.group(2))) + return line[m.end():] + + def _parse_rev_proplist(self, line): + m = _match(line, pREVNUM) + self.handle_rev_proplist(int(m.group(1))) + return line[m.end():] + + def _parse_rev_prop(self, line): + m = _match(line, pREVNUM, pPROPERTY) + self.handle_rev_prop(int(m.group(1)), urllib_parse_unquote(m.group(2))) + return line[m.end():] + + def _parse_unlock(self, line): + m = _match(line, pPATHS, ['break']) + paths = [urllib_parse_unquote(x) for x in m.group(1).split()] + self.handle_unlock(paths, m.group(2) is not None) + return line[m.end():] + + def _parse_get_lock(self, line): + m = _match(line, pPATH) + self.handle_get_lock(urllib_parse_unquote(m.group(1))) + return line[m.end():] + + def _parse_get_locks(self, line): + m = _match(line, pPATH) + self.handle_get_locks(urllib_parse_unquote(m.group(1))) + return line[m.end():] + + def _parse_get_locations(self, line): + m = _match(line, pPATH, pREVNUMS) + path = urllib_parse_unquote(m.group(1)) + revnums = [int(x) for x in m.group(2).split()] + self.handle_get_locations(path, revnums) + return line[m.end():] + + def _parse_get_location_segments(self, line): + m = _match(line, pPATHREV, pREVRANGE) + path = urllib_parse_unquote(m.group(1)) + peg = int(m.group(2)) + left = int(m.group(3)) + right = int(m.group(4)) + self.handle_get_location_segments(path, peg, left, right) + return line[m.end():] + + def _parse_get_file_revs(self, line): + m = _match(line, pPATH, pREVRANGE, ['include-merged-revisions']) + path = urllib_parse_unquote(m.group(1)) + left = int(m.group(2)) + right = int(m.group(3)) + include_merged_revisions = m.group(4) is not None + self.handle_get_file_revs(path, left, right, include_merged_revisions) + return line[m.end():] + + def _parse_get_mergeinfo(self, line): + # <I> + pMERGEINFO_INHERITANCE = pWORD + pINCLUDE_DESCENDANTS = pWORD + m = _match(line, + pPATHS, pMERGEINFO_INHERITANCE, ['include-descendants']) + paths = [urllib_parse_unquote(x) for x in m.group(1).split()] + inheritance = _parse_mergeinfo_inheritance(m.group(2)) + include_descendants = m.group(3) is not None + self.handle_get_mergeinfo(paths, inheritance, include_descendants) + return line[m.end():] + + def _parse_log(self, line): + # limit=<N>? + pLIMIT = r'limit=(\d+)' + # revprops=all|(<REVPROP> ...)? + pREVPROPS = r'revprops=(all|\(([^)]+)\))' + m = _match(line, pPATHS, pREVRANGE, + [pLIMIT, 'discover-changed-paths', 'strict', + 'include-merged-revisions', pREVPROPS]) + paths = [urllib_parse_unquote(x) for x in m.group(1).split()] + left = int(m.group(2)) + right = int(m.group(3)) + if m.group(5) is None: + limit = 0 + else: + limit = int(m.group(5)) + discover_changed_paths = m.group(6) is not None + strict = m.group(7) is not None + include_merged_revisions = m.group(8) is not None + if m.group(10) == 'all': + revprops = None + else: + if m.group(11) is None: + revprops = [] + else: + revprops = [urllib_parse_unquote(x) for x in m.group(11).split()] + self.handle_log(paths, left, right, limit, discover_changed_paths, + strict, include_merged_revisions, revprops) + return line[m.end():] + + def _parse_check_path(self, line): + m = _match(line, pPATHREV) + path = urllib_parse_unquote(m.group(1)) + revnum = int(m.group(2)) + self.handle_check_path(path, revnum) + return line[m.end():] + + def _parse_stat(self, line): + m = _match(line, pPATHREV) + path = urllib_parse_unquote(m.group(1)) + revnum = int(m.group(2)) + self.handle_stat(path, revnum) + return line[m.end():] + + def _parse_replay(self, line): + m = _match(line, pPATH, pREVNUM) + path = urllib_parse_unquote(m.group(1)) + revision = int(m.group(2)) + self.handle_replay(path, revision) + return line[m.end():] + + # the update report + + def _parse_checkout_or_export(self, line): + m = _match(line, pPATH, pREVNUM, [pDEPTH]) + path = urllib_parse_unquote(m.group(1)) + revision = int(m.group(2)) + depth = _parse_depth(m.group(4)) + self.handle_checkout_or_export(path, revision, depth) + return line[m.end():] + + def _parse_diff(self, line): + # First, try 1-path form. + try: + m = _match(line, pPATH, pREVRANGE, [pDEPTH, 'ignore-ancestry']) + f = self._parse_diff_1path + except Error: + # OK, how about 2-path form? + m = _match(line, pPATHREV, pPATHREV, [pDEPTH, 'ignore-ancestry']) + f = self._parse_diff_2paths + return f(line, m) + + def _parse_diff_1path(self, line, m): + path = urllib_parse_unquote(m.group(1)) + left = int(m.group(2)) + right = int(m.group(3)) + depth = _parse_depth(m.group(5)) + ignore_ancestry = m.group(6) is not None + self.handle_diff_1path(path, left, right, + depth, ignore_ancestry) + return line[m.end():] + + def _parse_diff_2paths(self, line, m): + from_path = urllib_parse_unquote(m.group(1)) + from_rev = int(m.group(2)) + to_path = urllib_parse_unquote(m.group(3)) + to_rev = int(m.group(4)) + depth = _parse_depth(m.group(6)) + ignore_ancestry = m.group(7) is not None + self.handle_diff_2paths(from_path, from_rev, to_path, to_rev, + depth, ignore_ancestry) + return line[m.end():] + + def _parse_status(self, line): + m = _match(line, pPATH, pREVNUM, [pDEPTH]) + path = urllib_parse_unquote(m.group(1)) + revision = int(m.group(2)) + depth = _parse_depth(m.group(4)) + self.handle_status(path, revision, depth) + return line[m.end():] + + def _parse_switch(self, line): + m = _match(line, pPATH, pPATHREV, [pDEPTH]) + from_path = urllib_parse_unquote(m.group(1)) + to_path = urllib_parse_unquote(m.group(2)) + to_rev = int(m.group(3)) + depth = _parse_depth(m.group(5)) + self.handle_switch(from_path, to_path, to_rev, depth) + return line[m.end():] + + def _parse_update(self, line): + m = _match(line, pPATH, pREVNUM, [pDEPTH, 'send-copyfrom-args']) + path = urllib_parse_unquote(m.group(1)) + revision = int(m.group(2)) + depth = _parse_depth(m.group(4)) + send_copyfrom_args = m.group(5) is not None + self.handle_update(path, revision, depth, send_copyfrom_args) + return line[m.end():] diff --git a/tools/server-side/svnauthz-validate.c b/tools/server-side/svnauthz-validate.c new file mode 100644 index 0000000..df7d541 --- /dev/null +++ b/tools/server-side/svnauthz-validate.c @@ -0,0 +1,76 @@ +/* + * svnauthz-validate.c : Load and validate an authz file. + * + * ==================================================================== + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + * ==================================================================== + * + * + * svnauthz-validate.c : load and validate an authz file, returns + * value == 0 if syntax of authz file is correct + * value == 1 if syntax of authz file is invalid or file not found + * value == 2 in case of general error + * + */ + +#include "svn_pools.h" +#include "svn_repos.h" +#include "svn_cmdline.h" + +int +main(int argc, const char **argv) +{ + apr_pool_t *pool; + svn_error_t *err; + svn_authz_t *authz; + const char *authz_file; + + if (argc <= 1) + { + printf("Usage: %s PATH \n\n", argv[0]); + printf("Loads the authz file at PATH and validates its syntax. \n" + "Returns:\n" + " 0 when syntax is OK.\n" + " 1 when syntax is invalid.\n" + " 2 operational error\n"); + return 2; + } + + authz_file = argv[1]; + + /* Initialize the app. Send all error messages to 'stderr'. */ + if (svn_cmdline_init(argv[0], stderr) != EXIT_SUCCESS) + return 2; + + pool = svn_pool_create(NULL); + + /* Read the access file and validate it. */ + err = svn_repos_authz_read(&authz, authz_file, TRUE, pool); + + svn_pool_destroy(pool); + + if (err) + { + svn_handle_error2(err, stderr, FALSE, "svnauthz-validate: "); + return 1; + } + else + { + return 0; + } +} diff --git a/tools/server-side/test_svn_server_log_parse.py b/tools/server-side/test_svn_server_log_parse.py new file mode 100755 index 0000000..2fa8759 --- /dev/null +++ b/tools/server-side/test_svn_server_log_parse.py @@ -0,0 +1,611 @@ +#!/usr/bin/python + +# ==================================================================== +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# ==================================================================== + +# Run this without arguments to run unit tests. +# Run with a path to a davautocheck ops log to test that it can parse that. + +import os +import re +import sys +import tempfile +try: + # Python >=3.0 + from urllib.parse import quote as urllib_parse_quote +except ImportError: + # Python <3.0 + from urllib import quote as urllib_parse_quote +import unittest + +import svn.core + +import svn_server_log_parse + +class TestCase(unittest.TestCase): + def setUp(self): + # Define a class to stuff everything passed to any handle_ + # method into self.result. + class cls(svn_server_log_parse.Parser): + def __getattr__(cls_self, attr): + if attr.startswith('handle_'): + return lambda *a: setattr(self, 'result', a) + raise AttributeError + self.parse = cls().parse + + def test_unknown(self): + line = 'unknown log line' + self.parse(line) + self.assertEqual(self.result, (line,)) + + def test_open(self): + self.assertRaises(svn_server_log_parse.Error, self.parse, 'open') + self.assertRaises(svn_server_log_parse.Error, self.parse, 'open 2 cap / SVN/1.60. fooclient') + self.assertRaises(svn_server_log_parse.Error, self.parse, 'open a cap=() / SVN/1.60. fooclient') + self.assertEqual(self.parse('open 2 cap=() / SVN fooclient'), '') + self.assertEqual(self.result, (2, [], '/', 'SVN', 'fooclient')) + # TODO: Teach it about the capabilities, rather than allowing + # any words at all. + self.assertEqual(self.parse('open 2 cap=(foo) / SVN foo%20client'), '') + self.assertEqual(self.result, (2, ['foo'], '/', 'SVN', 'foo client')) + + def test_reparent(self): + self.assertRaises(svn_server_log_parse.Error, self.parse, 'reparent') + self.assertEqual(self.parse('reparent /'), '') + self.assertEqual(self.result, ('/',)) + + def test_get_latest_rev(self): + self.assertEqual(self.parse('get-latest-rev'), '') + self.assertEqual(self.result, ()) + self.assertEqual(self.parse('get-latest-rev r3'), 'r3') + self.assertEqual(self.result, ()) + + def test_get_dated_rev(self): + self.assertRaises(svn_server_log_parse.Error, self.parse, + 'get-dated-rev') + self.assertEqual(self.parse('get-dated-rev 2008-04-15T20:41:24.000000Z'), '') + self.assertEqual(self.result, ('2008-04-15T20:41:24.000000Z',)) + + def test_commit(self): + self.assertRaises(svn_server_log_parse.Error, self.parse, 'commit') + self.assertRaises(svn_server_log_parse.Error, self.parse, 'commit 3') + self.assertEqual(self.parse('commit r3'), '') + self.assertEqual(self.result, (3,)) + self.assertEqual(self.parse('commit r3 leftover'), ' leftover') + self.assertEqual(self.result, (3,)) + + def test_get_dir(self): + self.get_dir_or_file('get-dir') + + def test_get_file(self): + self.get_dir_or_file('get-file') + + def get_dir_or_file(self, c): + self.assertRaises(svn_server_log_parse.Error, self.parse, c) + self.assertRaises(svn_server_log_parse.Error, self.parse, c + ' foo') + self.assertRaises(svn_server_log_parse.Error, self.parse, c + ' foo 3') + self.assertEqual(self.parse(c + ' /a/b/c r3 ...'), ' ...') + self.assertEqual(self.result, ('/a/b/c', 3, False, False)) + self.assertEqual(self.parse(c + ' / r3'), '') + self.assertEqual(self.result, ('/', 3, False, False)) + # path must be absolute + self.assertRaises(svn_server_log_parse.Error, + self.parse, c + ' a/b/c r3') + self.assertEqual(self.parse(c + ' /k r27 text'), '') + self.assertEqual(self.result, ('/k', 27, True, False)) + self.assertEqual(self.parse(c + ' /k r27 props'), '') + self.assertEqual(self.result, ('/k', 27, False, True)) + self.assertEqual(self.parse(c + ' /k r27 text props'), '') + self.assertEqual(self.result, ('/k', 27, True, True)) + # out of order not accepted + self.assertEqual(self.parse(c + ' /k r27 props text'), ' text') + self.assertEqual(self.result, ('/k', 27, False, True)) + + def test_lock(self): + self.assertRaises(svn_server_log_parse.Error, self.parse, 'lock') + self.parse('lock (/foo)') + self.assertEqual(self.result, (['/foo'], False)) + self.assertEqual(self.parse('lock (/foo) steal ...'), ' ...') + self.assertEqual(self.result, (['/foo'], True)) + self.assertEqual(self.parse('lock (/foo) stear'), ' stear') + + def test_change_rev_prop(self): + self.assertRaises(svn_server_log_parse.Error, + self.parse, 'change-rev-prop r3') + self.assertRaises(svn_server_log_parse.Error, + self.parse, 'change-rev-prop r svn:log') + self.assertRaises(svn_server_log_parse.Error, + self.parse, 'change-rev-prop rX svn:log') + self.assertEqual(self.parse('change-rev-prop r3 svn:log ...'), ' ...') + self.assertEqual(self.result, (3, 'svn:log')) + + def test_rev_proplist(self): + self.assertRaises(svn_server_log_parse.Error, + self.parse, 'rev-proplist') + self.assertRaises(svn_server_log_parse.Error, + self.parse, 'rev-proplist r') + self.assertRaises(svn_server_log_parse.Error, + self.parse, 'rev-proplist rX') + self.assertEqual(self.parse('rev-proplist r3 ...'), ' ...') + self.assertEqual(self.result, (3,)) + + def test_rev_prop(self): + self.assertRaises(svn_server_log_parse.Error, self.parse, 'rev-prop') + self.assertRaises(svn_server_log_parse.Error, self.parse, 'rev-prop r') + self.assertRaises(svn_server_log_parse.Error, self.parse, 'rev-prop rX') + self.assertEqual(self.parse('rev-prop r3 foo ...'), ' ...') + self.assertEqual(self.result, (3, 'foo')) + + def test_unlock(self): + self.assertRaises(svn_server_log_parse.Error, self.parse, 'unlock') + self.parse('unlock (/foo)') + self.assertEqual(self.result, (['/foo'], False)) + self.assertEqual(self.parse('unlock (/foo) break ...'), ' ...') + self.assertEqual(self.result, (['/foo'], True)) + self.assertEqual(self.parse('unlock (/foo) bear'), ' bear') + + def test_get_lock(self): + self.assertRaises(svn_server_log_parse.Error, self.parse, 'get-lock') + self.parse('get-lock /foo') + self.assertEqual(self.result, ('/foo',)) + + def test_get_locks(self): + self.assertRaises(svn_server_log_parse.Error, self.parse, 'get-locks') + self.parse('get-locks /foo') + self.assertEqual(self.result, ('/foo',)) + + def test_get_locations(self): + self.assertRaises(svn_server_log_parse.Error, self.parse, + 'get-locations') + self.assertRaises(svn_server_log_parse.Error, + self.parse, 'get-locations /foo 3') + self.assertEqual(self.parse('get-locations /foo (3 4) ...'), ' ...') + self.assertEqual(self.result, ('/foo', [3, 4])) + self.assertEqual(self.parse('get-locations /foo (3)'), '') + self.assertEqual(self.result, ('/foo', [3])) + + def test_get_location_segments(self): + self.assertRaises(svn_server_log_parse.Error, self.parse, + 'get-location-segments') + self.assertRaises(svn_server_log_parse.Error, + self.parse, 'get-location-segments /foo 3') + self.assertEqual(self.parse('get-location-segments /foo@2 r3:4'), '') + self.assertEqual(self.result, ('/foo', 2, 3, 4)) + + def test_get_file_revs(self): + self.assertRaises(svn_server_log_parse.Error, self.parse, 'get-file-revs') + self.assertRaises(svn_server_log_parse.Error, + self.parse, 'get-file-revs /foo 3') + self.assertRaises(svn_server_log_parse.Error, + self.parse, 'get-file-revs /foo 3:a') + self.assertRaises(svn_server_log_parse.Error, + self.parse, 'get-file-revs /foo r3:a') + self.assertEqual(self.parse('get-file-revs /foo r3:4 ...'), ' ...') + self.assertEqual(self.result, ('/foo', 3, 4, False)) + self.assertEqual(self.parse('get-file-revs /foo r3:4' + ' include-merged-revisions ...'), ' ...') + self.assertEqual(self.result, ('/foo', 3, 4, True)) + + def test_get_mergeinfo(self): + self.assertRaises(svn_server_log_parse.Error, + self.parse, 'get-mergeinfo') + self.assertRaises(svn_server_log_parse.Error, + self.parse, 'get-mergeinfo /foo') + self.assertRaises(svn_server_log_parse.Error, + self.parse, 'get-mergeinfo (/foo') + self.assertRaises(svn_server_log_parse.Error, + self.parse, 'get-mergeinfo (/foo /bar') + self.assertRaises(svn_server_log_parse.Error, + self.parse, 'get-mergeinfo (/foo)') + self.assertRaises(svn_server_log_parse.BadMergeinfoInheritanceError, + self.parse, 'get-mergeinfo (/foo) bork') + self.assertEqual(self.parse('get-mergeinfo (/foo) explicit'), '') + self.assertEqual(self.result, (['/foo'], + svn.core.svn_mergeinfo_explicit, False)) + self.assertEqual(self.parse('get-mergeinfo (/foo /bar) inherited ...'), + ' ...') + self.assertEqual(self.result, (['/foo', '/bar'], + svn.core.svn_mergeinfo_inherited, False)) + self.assertEqual(self.result, (['/foo', '/bar'], + svn.core.svn_mergeinfo_inherited, False)) + + def test_log(self): + self.assertRaises(svn_server_log_parse.Error, self.parse, 'log') + self.assertRaises(svn_server_log_parse.Error, + self.parse, 'log /foo') + self.assertRaises(svn_server_log_parse.Error, + self.parse, 'log (/foo)') + self.assertEqual(self.parse('log (/foo) r3:4' + ' include-merged-revisions'), '') + self.assertEqual(self.result, + (['/foo'], 3, 4, 0, False, False, True, [])) + self.assertEqual(self.parse('log (/foo /bar) r3:4 revprops=all ...'), + ' ...') + self.assertEqual(self.result, + (['/foo', '/bar'], 3, 4, 0, False, False, False, None)) + self.assertEqual(self.parse('log (/foo) r3:4 revprops=(a b) ...'), + ' ...') + self.assertEqual(self.result, + (['/foo'], 3, 4, 0, False, False, False, ['a', 'b'])) + self.assertEqual(self.parse('log (/foo) r8:1 limit=3'), '') + self.assertEqual(self.result, + (['/foo'], 8, 1, 3, False, False, False, [])) + + def test_check_path(self): + self.assertRaises(svn_server_log_parse.Error, self.parse, 'check-path') + self.assertEqual(self.parse('check-path /foo@9'), '') + self.assertEqual(self.result, ('/foo', 9)) + + def test_stat(self): + self.assertRaises(svn_server_log_parse.Error, self.parse, 'stat') + self.assertEqual(self.parse('stat /foo@9'), '') + self.assertEqual(self.result, ('/foo', 9)) + + def test_replay(self): + self.assertRaises(svn_server_log_parse.Error, self.parse, 'replay') + self.assertRaises(svn_server_log_parse.Error, + self.parse, 'replay /foo') + self.assertRaises(svn_server_log_parse.Error, + self.parse, 'replay (/foo) r9') + self.assertRaises(svn_server_log_parse.Error, + self.parse, 'replay (/foo) r9:10') + self.assertEqual(self.parse('replay /foo r9'), '') + self.assertEqual(self.result, ('/foo', 9)) + + def test_checkout_or_export(self): + self.assertRaises(svn_server_log_parse.Error, + self.parse, 'checkout-or-export') + self.assertRaises(svn_server_log_parse.Error, + self.parse, 'checkout-or-export /foo') + self.assertEqual(self.parse('checkout-or-export /foo r9'), '') + self.assertEqual(self.result, ('/foo', 9, svn.core.svn_depth_unknown)) + self.assertRaises(svn_server_log_parse.BadDepthError, self.parse, + 'checkout-or-export /foo r9 depth=INVALID-DEPTH') + self.assertRaises(svn_server_log_parse.BadDepthError, self.parse, + 'checkout-or-export /foo r9 depth=bork') + self.assertEqual(self.parse('checkout-or-export /foo r9 depth=files .'), + ' .') + self.assertEqual(self.result, ('/foo', 9, svn.core.svn_depth_files)) + + def test_diff_1path(self): + self.assertRaises(svn_server_log_parse.Error, + self.parse, 'diff') + self.assertEqual(self.parse('diff /foo r9:10'), '') + self.assertEqual(self.result, ('/foo', 9, 10, + svn.core.svn_depth_unknown, False)) + self.assertEqual(self.parse('diff /foo r9:10' + ' ignore-ancestry ...'), ' ...') + self.assertEqual(self.result, ('/foo', 9, 10, + svn.core.svn_depth_unknown, True)) + self.assertEqual(self.parse('diff /foo r9:10 depth=files'), '') + self.assertEqual(self.result, ('/foo', 9, 10, + svn.core.svn_depth_files, False)) + + def test_diff_2paths(self): + self.assertEqual(self.parse('diff /foo@9 /bar@10'), '') + self.assertEqual(self.result, ('/foo', 9, '/bar', 10, + svn.core.svn_depth_unknown, False)) + self.assertEqual(self.parse('diff /foo@9 /bar@10' + ' ignore-ancestry ...'), ' ...') + self.assertEqual(self.result, ('/foo', 9, '/bar', 10, + svn.core.svn_depth_unknown, True)) + self.assertEqual(self.parse('diff /foo@9 /bar@10' + ' depth=files ignore-ancestry'), '') + self.assertEqual(self.result, ('/foo', 9, '/bar', 10, + svn.core.svn_depth_files, True)) + + def test_status(self): + self.assertRaises(svn_server_log_parse.Error, + self.parse, 'status') + self.assertRaises(svn_server_log_parse.Error, + self.parse, 'status /foo') + self.assertEqual(self.parse('status /foo r9'), '') + self.assertEqual(self.result, ('/foo', 9, svn.core.svn_depth_unknown)) + self.assertRaises(svn_server_log_parse.BadDepthError, self.parse, + 'status /foo r9 depth=INVALID-DEPTH') + self.assertRaises(svn_server_log_parse.BadDepthError, self.parse, + 'status /foo r9 depth=bork') + self.assertEqual(self.parse('status /foo r9 depth=files .'), + ' .') + self.assertEqual(self.result, ('/foo', 9, svn.core.svn_depth_files)) + + def test_switch(self): + self.assertEqual(self.parse('switch /foo /bar@10 ...'), ' ...') + self.assertEqual(self.result, ('/foo', '/bar', 10, + svn.core.svn_depth_unknown)) + self.assertEqual(self.parse('switch /foo /bar@10' + ' depth=files'), '') + self.assertEqual(self.result, ('/foo', '/bar', 10, + svn.core.svn_depth_files)) + + def test_update(self): + self.assertRaises(svn_server_log_parse.Error, + self.parse, 'update') + self.assertRaises(svn_server_log_parse.Error, + self.parse, 'update /foo') + self.assertEqual(self.parse('update /foo r9'), '') + self.assertEqual(self.result, ('/foo', 9, svn.core.svn_depth_unknown, + False)) + self.assertRaises(svn_server_log_parse.BadDepthError, self.parse, + 'update /foo r9 depth=INVALID-DEPTH') + self.assertRaises(svn_server_log_parse.BadDepthError, self.parse, + 'update /foo r9 depth=bork') + self.assertEqual(self.parse('update /foo r9 depth=files .'), ' .') + self.assertEqual(self.result, ('/foo', 9, svn.core.svn_depth_files, + False)) + self.assertEqual(self.parse('update /foo r9 send-copyfrom-args .'), + ' .') + self.assertEqual(self.result, ('/foo', 9, svn.core.svn_depth_unknown, + True)) + +if __name__ == '__main__': + if len(sys.argv) == 1: + # No arguments so run the unit tests. + unittest.main() + sys.stderr.write('unittest.main failed to exit\n') + sys.exit(2) + + # Use the argument as the path to a log file to test against. + + def uri_encode(s): + # urllib.parse.quote encodes :&@ characters, svn does not. + return urllib_parse_quote(s, safe='/:&@') + + # Define a class to reconstruct the SVN-ACTION string. + class Test(svn_server_log_parse.Parser): + def handle_unknown(self, line): + sys.stderr.write('unknown log line at %d:\n%s\n' % (self.linenum, + line)) + sys.exit(2) + + def handle_open(self, protocol, capabilities, path, ra_client, client): + capabilities = ' '.join(capabilities) + if ra_client is None: + ra_client = '-' + if client is None: + client = '-' + path = uri_encode(path) + self.action = ('open %d cap=(%s) %s %s %s' + % (protocol, capabilities, path, ra_client, client)) + + def handle_reparent(self, path): + path = uri_encode(path) + self.action = 'reparent ' + path + + def handle_get_latest_rev(self): + self.action = 'get-latest-rev' + + def handle_get_dated_rev(self, date): + self.action = 'get-dated-rev ' + date + + def handle_commit(self, revision): + self.action = 'commit r%d' % (revision,) + + def handle_get_dir(self, path, revision, text, props): + path = uri_encode(path) + self.action = 'get-dir %s r%d' % (path, revision) + if text: + self.action += ' text' + if props: + self.action += ' props' + + def handle_get_file(self, path, revision, text, props): + path = uri_encode(path) + self.action = 'get-file %s r%d' % (path, revision) + if text: + self.action += ' text' + if props: + self.action += ' props' + + def handle_lock(self, paths, steal): + paths = [uri_encode(x) for x in paths] + self.action = 'lock (%s)' % (' '.join(paths),) + if steal: + self.action += ' steal' + + def handle_change_rev_prop(self, revision, revprop): + revprop = uri_encode(revprop) + self.action = 'change-rev-prop r%d %s' % (revision, revprop) + + def handle_rev_prop(self, revision, revprop): + revprop = uri_encode(revprop) + self.action = 'rev-prop r%d %s' % (revision, revprop) + + def handle_rev_proplist(self, revision): + self.action = 'rev-proplist r%d' % (revision,) + + def handle_unlock(self, paths, break_lock): + paths = [uri_encode(x) for x in paths] + self.action = 'unlock (%s)' % (' '.join(paths),) + if break_lock: + self.action += ' break' + + def handle_get_lock(self, path): + path = uri_encode(path) + self.action = 'get-lock ' + path + + def handle_get_locks(self, path): + self.action = 'get-locks ' + path + path = uri_encode(path) + + def handle_get_locations(self, path, revisions): + path = uri_encode(path) + self.action = ('get-locations %s (%s)' + % (path, ' '.join([str(x) for x in revisions]))) + + def handle_get_location_segments(self, path, peg, left, right): + path = uri_encode(path) + self.action = 'get-location-segments %s@%d r%d:%d' % (path, peg, + left, right) + + def handle_get_file_revs(self, path, left, right, + include_merged_revisions): + path = uri_encode(path) + self.action = 'get-file-revs %s r%d:%d' % (path, left, right) + if include_merged_revisions: + self.action += ' include-merged-revisions' + + def handle_get_mergeinfo(self, paths, inheritance, include_descendants): + paths = [uri_encode(x) for x in paths] + self.action = ('get-mergeinfo (%s) %s' + % (' '.join(paths), + svn.core.svn_inheritance_to_word(inheritance))) + if include_descendants: + self.action += ' include-descendants' + + def handle_log(self, paths, left, right, limit, discover_changed_paths, + strict, include_merged_revisions, revprops): + paths = [uri_encode(x) for x in paths] + self.action = 'log (%s) r%d:%d' % (' '.join(paths), + left, right) + if limit != 0: + self.action += ' limit=%d' % (limit,) + if discover_changed_paths: + self.action += ' discover-changed-paths' + if strict: + self.action += ' strict' + if include_merged_revisions: + self.action += ' include-merged-revisions' + if revprops is None: + self.action += ' revprops=all' + elif len(revprops) > 0: + revprops = [uri_encode(x) for x in revprops] + self.action += ' revprops=(%s)' % (' '.join(revprops),) + + def handle_check_path(self, path, revision): + path = uri_encode(path) + self.action = 'check-path %s@%d' % (path, revision) + + def handle_stat(self, path, revision): + path = uri_encode(path) + self.action = 'stat %s@%d' % (path, revision) + + def handle_replay(self, path, revision): + path = uri_encode(path) + self.action = 'replay %s r%d' % (path, revision) + + def maybe_depth(self, depth): + if depth != svn.core.svn_depth_unknown: + self.action += ' depth=%s' % ( + svn.core.svn_depth_to_word(depth),) + + def handle_checkout_or_export(self, path, revision, depth): + path = uri_encode(path) + self.action = 'checkout-or-export %s r%d' % (path, revision) + self.maybe_depth(depth) + + def handle_diff_1path(self, path, left, right, + depth, ignore_ancestry): + path = uri_encode(path) + self.action = 'diff %s r%d:%d' % (path, left, right) + self.maybe_depth(depth) + if ignore_ancestry: + self.action += ' ignore-ancestry' + + def handle_diff_2paths(self, from_path, from_rev, + to_path, to_rev, + depth, ignore_ancestry): + from_path = uri_encode(from_path) + to_path = uri_encode(to_path) + self.action = ('diff %s@%d %s@%d' + % (from_path, from_rev, to_path, to_rev)) + self.maybe_depth(depth) + if ignore_ancestry: + self.action += ' ignore-ancestry' + + def handle_status(self, path, revision, depth): + path = uri_encode(path) + self.action = 'status %s r%d' % (path, revision) + self.maybe_depth(depth) + + def handle_switch(self, from_path, to_path, to_rev, depth): + from_path = uri_encode(from_path) + to_path = uri_encode(to_path) + self.action = ('switch %s %s@%d' + % (from_path, to_path, to_rev)) + self.maybe_depth(depth) + + def handle_update(self, path, revision, depth, send_copyfrom_args): + path = uri_encode(path) + self.action = 'update %s r%d' % (path, revision) + self.maybe_depth(depth) + if send_copyfrom_args: + self.action += ' send-copyfrom-args' + + tmp = tempfile.mktemp() + try: + fp = open(tmp, 'w') + parser = Test() + parser.linenum = 0 + log_file = sys.argv[1] + log_type = None + for line in open(log_file): + if log_type is None: + # Figure out which log type we have. + if re.match(r'\d+ \d\d\d\d-', line): + log_type = 'svnserve' + elif re.match(r'\[\d\d/', line): + log_type = 'mod_dav_svn' + else: + sys.stderr.write("unknown log format in '%s'" + % (log_file,)) + sys.exit(3) + sys.stderr.write('parsing %s log...\n' % (log_type,)) + sys.stderr.flush() + + words = line.split() + if log_type == 'svnserve': + # Skip over PID, date, client address, username, and repos. + if words[5].startswith('ERR'): + # Skip error lines. + fp.write(line) + continue + leading = ' '.join(words[:5]) + action = ' '.join(words[5:]) + else: + # Find the SVN-ACTION string from the CustomLog format + # davautocheck.sh uses. If that changes, this will need + # to as well. Currently it's + # %t %u %{SVN-REPOS-NAME}e %{SVN-ACTION}e + leading = ' '.join(words[:4]) + action = ' '.join(words[4:]) + + # Parse the action and write the reconstructed action to + # the temporary file. Ignore the returned trailing text, + # as we have none in the davautocheck ops log. + parser.linenum += 1 + try: + parser.parse(action) + except svn_server_log_parse.Error: + sys.stderr.write('error at line %d: %s\n' + % (parser.linenum, action)) + raise + fp.write(leading + ' ' + parser.action + '\n') + fp.close() + # Check differences between original and reconstructed files + # (should be identical). + result = os.spawnlp(os.P_WAIT, 'diff', 'diff', '-u', log_file, tmp) + if result == 0: + sys.stderr.write('OK\n') + sys.exit(result) + finally: + try: + os.unlink(tmp) + except Exception, e: + sys.stderr.write('os.unlink(tmp): %s\n' % (e,)) diff --git a/tools/xslt/svnindex.css b/tools/xslt/svnindex.css new file mode 100644 index 0000000..09752c9 --- /dev/null +++ b/tools/xslt/svnindex.css @@ -0,0 +1,108 @@ +/* + * + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + * + */ + +/* A sample style sheet for displaying the Subversion directory listing + that is generated by mod_dav_svn and "svnindex.xsl". */ + +body{ + margin: 0; + padding: 0; +} + +a { + color: navy; +} + +.footer { + margin-top: 8em; + padding: 0.5em 1em 0.5em; + border: 1px solid; + border-width: 1px 0; + clear: both; + border-color: rgb(30%,30%,50%) navy rgb(75%,80%,85%) navy; + background: rgb(88%,90%,92%); + font-size: 80%; +} + +.svn { + margin: 3em; +} + +.rev { + margin-right: 3px; + padding-left: 3px; + text-align: left; + font-size: 120%; +} + +.dir a { + text-decoration: none; + color: black; +} + +.file a { + text-decoration: none; + color: black; +} + +.path { + margin: 3px; + padding: 3px; + background: #FFCC66; + font-size: 120%; +} + +.updir { + margin: 3px; + padding: 3px; + margin-left: 3em; + background: #FFEEAA; +} + +.file { + margin: 3px; + padding: 3px; + margin-left: 3em; + background: rgb(95%,95%,95%); +} + +.file:hover { + margin: 3px; + padding: 3px; + margin-left: 3em; + background: rgb(100%,100%,90%); +/* border: 1px black solid; */ +} + +.dir { + margin: 3px; + padding: 3px; + margin-left: 3em; + background: rgb(90%,90%,90%); +} + +.dir:hover { + margin: 3px; + padding: 3px; + margin-left: 3em; + background: rgb(100%,100%,80%); +/* border: 1px black solid; */ +} diff --git a/tools/xslt/svnindex.xsl b/tools/xslt/svnindex.xsl new file mode 100644 index 0000000..83454f7 --- /dev/null +++ b/tools/xslt/svnindex.xsl @@ -0,0 +1,123 @@ +<?xml version="1.0"?> +<!-- + + Licensed to the Apache Software Foundation (ASF) under one + or more contributor license agreements. See the NOTICE file + distributed with this work for additional information + regarding copyright ownership. The ASF licenses this file + to you under the Apache License, Version 2.0 (the + "License"); you may not use this file except in compliance + with the License. You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, + software distributed under the License is distributed on an + "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + KIND, either express or implied. See the License for the + specific language governing permissions and limitations + under the License. + +--> + + +<!-- A sample XML transformation style sheet for displaying the Subversion + directory listing that is generated by mod_dav_svn when the "SVNIndexXSLT" + directive is used. --> +<xsl:stylesheet xmlns:xsl="http://www.w3.org/1999/XSL/Transform" version="1.0"> + + <xsl:output method="html"/> + + <xsl:template match="*"/> + + <xsl:template match="svn"> + <html> + <head> + <title> + <xsl:if test="string-length(index/@name) != 0"> + <xsl:value-of select="index/@name"/> + <xsl:text>: </xsl:text> + </xsl:if> + <xsl:value-of select="index/@path"/> + </title> + <link rel="stylesheet" type="text/css" href="/svnindex.css"/> + </head> + <body> + <div class="svn"> + <xsl:apply-templates/> + </div> + <div class="footer"> + <xsl:text>Powered by </xsl:text> + <xsl:element name="a"> + <xsl:attribute name="href"> + <xsl:value-of select="@href"/> + </xsl:attribute> + <xsl:text>Subversion</xsl:text> + </xsl:element> + <xsl:text> </xsl:text> + <xsl:value-of select="@version"/> + </div> + </body> + </html> + </xsl:template> + + <xsl:template match="index"> + <div class="rev"> + <xsl:value-of select="@name"/> + <xsl:if test="@base"> + <xsl:if test="@name"> + <xsl:text>:  </xsl:text> + </xsl:if> + <xsl:value-of select="@base" /> + </xsl:if> + <xsl:if test="@rev"> + <xsl:if test="@base | @name"> + <xsl:text> — </xsl:text> + </xsl:if> + <xsl:text>Revision </xsl:text> + <xsl:value-of select="@rev"/> + </xsl:if> + </div> + <div class="path"> + <xsl:value-of select="@path"/> + </div> + <xsl:apply-templates select="updir"/> + <xsl:apply-templates select="dir"/> + <xsl:apply-templates select="file"/> + </xsl:template> + + <xsl:template match="updir"> + <div class="updir"> + <xsl:text>[</xsl:text> + <xsl:element name="a"> + <xsl:attribute name="href">..</xsl:attribute> + <xsl:text>Parent Directory</xsl:text> + </xsl:element> + <xsl:text>]</xsl:text> + </div> + </xsl:template> + + <xsl:template match="dir"> + <div class="dir"> + <xsl:element name="a"> + <xsl:attribute name="href"> + <xsl:value-of select="@href"/> + </xsl:attribute> + <xsl:value-of select="@name"/> + <xsl:text>/</xsl:text> + </xsl:element> + </div> + </xsl:template> + + <xsl:template match="file"> + <div class="file"> + <xsl:element name="a"> + <xsl:attribute name="href"> + <xsl:value-of select="@href"/> + </xsl:attribute> + <xsl:value-of select="@name"/> + </xsl:element> + </div> + </xsl:template> + +</xsl:stylesheet> |