summaryrefslogtreecommitdiff
path: root/mercurial
diff options
context:
space:
mode:
Diffstat (limited to 'mercurial')
-rw-r--r--mercurial/__version__.py2
-rw-r--r--mercurial/archival.py21
-rw-r--r--mercurial/base85.c15
-rw-r--r--mercurial/bdiff.c110
-rw-r--r--mercurial/bookmarks.py92
-rw-r--r--mercurial/bundlerepo.py56
-rw-r--r--mercurial/byterange.py16
-rw-r--r--mercurial/changegroup.py2
-rw-r--r--mercurial/changelog.py38
-rw-r--r--mercurial/cmdutil.py789
-rw-r--r--mercurial/commands.py2435
-rw-r--r--mercurial/commandserver.py8
-rw-r--r--mercurial/config.py50
-rw-r--r--mercurial/context.py330
-rw-r--r--mercurial/copies.py147
-rw-r--r--mercurial/dagparser.py11
-rw-r--r--mercurial/dagutil.py12
-rw-r--r--mercurial/demandimport.py30
-rw-r--r--mercurial/diffhelpers.c31
-rw-r--r--mercurial/dirstate.py192
-rw-r--r--mercurial/discovery.py448
-rw-r--r--mercurial/dispatch.py244
-rw-r--r--mercurial/encoding.py150
-rw-r--r--mercurial/error.py3
-rw-r--r--mercurial/exewrapper.c101
-rw-r--r--mercurial/extensions.py38
-rw-r--r--mercurial/fancyopts.py2
-rw-r--r--mercurial/filemerge.py255
-rw-r--r--mercurial/fileset.py37
-rw-r--r--mercurial/formatter.py71
-rw-r--r--mercurial/graphmod.py235
-rw-r--r--mercurial/hbisect.py123
-rw-r--r--mercurial/help.py154
-rw-r--r--mercurial/help/config.txt567
-rw-r--r--mercurial/help/filesets.txt2
-rw-r--r--mercurial/help/glossary.txt23
-rw-r--r--mercurial/help/hgignore.txt17
-rw-r--r--mercurial/help/hgweb.txt71
-rw-r--r--mercurial/help/merge-tools.txt32
-rw-r--r--mercurial/help/patterns.txt4
-rw-r--r--mercurial/help/phases.txt84
-rw-r--r--mercurial/help/revisions.txt12
-rw-r--r--mercurial/help/revsets.txt7
-rw-r--r--mercurial/help/subrepos.txt68
-rw-r--r--mercurial/hg.py189
-rw-r--r--mercurial/hgweb/common.py2
-rw-r--r--mercurial/hgweb/hgweb_mod.py11
-rw-r--r--mercurial/hgweb/hgwebdir_mod.py115
-rw-r--r--mercurial/hgweb/protocol.py5
-rw-r--r--mercurial/hgweb/request.py2
-rw-r--r--mercurial/hgweb/server.py3
-rw-r--r--mercurial/hgweb/webcommands.py232
-rw-r--r--mercurial/hgweb/webutil.py86
-rw-r--r--mercurial/hgweb/wsgicgi.py3
-rw-r--r--mercurial/hook.py32
-rw-r--r--mercurial/httpclient/__init__.py214
-rw-r--r--mercurial/httpclient/_readers.py195
-rw-r--r--mercurial/httpclient/tests/__init__.py1
-rw-r--r--mercurial/httpclient/tests/simple_http_test.py386
-rw-r--r--mercurial/httpclient/tests/test_bogus_responses.py68
-rw-r--r--mercurial/httpclient/tests/test_chunked_transfer.py137
-rw-r--r--mercurial/httpclient/tests/test_proxy_support.py135
-rw-r--r--mercurial/httpclient/tests/test_ssl.py93
-rw-r--r--mercurial/httpclient/tests/util.py195
-rw-r--r--mercurial/httpconnection.py10
-rw-r--r--mercurial/httprepo.py (renamed from mercurial/httppeer.py)30
-rw-r--r--mercurial/i18n.py3
-rw-r--r--mercurial/ignore.py1
-rw-r--r--mercurial/keepalive.py23
-rw-r--r--mercurial/localrepo.py1250
-rw-r--r--mercurial/lock.py8
-rw-r--r--mercurial/lsprof.py10
-rw-r--r--mercurial/mail.py52
-rw-r--r--mercurial/manifest.py12
-rw-r--r--mercurial/match.py26
-rw-r--r--mercurial/mdiff.py175
-rw-r--r--mercurial/merge.py124
-rw-r--r--mercurial/minirst.py331
-rw-r--r--mercurial/mpatch.c75
-rw-r--r--mercurial/obsolete.py331
-rw-r--r--mercurial/osutil.c7
-rw-r--r--mercurial/parsers.c1402
-rw-r--r--mercurial/patch.py280
-rw-r--r--mercurial/phases.py387
-rw-r--r--mercurial/posix.py155
-rw-r--r--mercurial/pure/__init__.py0
-rw-r--r--mercurial/pure/base85.py5
-rw-r--r--mercurial/pure/bdiff.py9
-rw-r--r--mercurial/pure/mpatch.py4
-rw-r--r--mercurial/pure/osutil.py21
-rw-r--r--mercurial/pure/parsers.py4
-rw-r--r--mercurial/pushkey.py9
-rw-r--r--mercurial/pvec.py210
-rw-r--r--mercurial/repair.py73
-rw-r--r--mercurial/repo.py (renamed from mercurial/peer.py)20
-rw-r--r--mercurial/revlog.py210
-rw-r--r--mercurial/revset.py1123
-rw-r--r--mercurial/scmutil.py318
-rw-r--r--mercurial/setdiscovery.py21
-rw-r--r--mercurial/similar.py3
-rw-r--r--mercurial/simplemerge.py10
-rw-r--r--mercurial/sshrepo.py (renamed from mercurial/sshpeer.py)59
-rw-r--r--mercurial/sshserver.py11
-rw-r--r--mercurial/sslutil.py75
-rw-r--r--mercurial/statichttprepo.py35
-rw-r--r--mercurial/store.py51
-rw-r--r--mercurial/subrepo.py316
-rw-r--r--mercurial/tags.py11
-rw-r--r--mercurial/templatefilters.py47
-rw-r--r--mercurial/templatekw.py29
-rw-r--r--mercurial/templater.py19
-rw-r--r--mercurial/templates/coal/map13
-rw-r--r--mercurial/templates/gitweb/changelogentry.tmpl2
-rw-r--r--mercurial/templates/gitweb/changeset.tmpl2
-rw-r--r--mercurial/templates/gitweb/fileannotate.tmpl3
-rw-r--r--mercurial/templates/gitweb/filecomparison.tmpl71
-rw-r--r--mercurial/templates/gitweb/filediff.tmpl4
-rw-r--r--mercurial/templates/gitweb/filelog.tmpl1
-rw-r--r--mercurial/templates/gitweb/filerevision.tmpl3
-rw-r--r--mercurial/templates/gitweb/graph.tmpl10
-rw-r--r--mercurial/templates/gitweb/map42
-rw-r--r--mercurial/templates/gitweb/summary.tmpl4
-rw-r--r--mercurial/templates/map-cmdline.bisect25
-rw-r--r--mercurial/templates/map-cmdline.default2
-rw-r--r--mercurial/templates/monoblue/bookmarks.tmpl2
-rw-r--r--mercurial/templates/monoblue/branches.tmpl2
-rw-r--r--mercurial/templates/monoblue/changelogentry.tmpl2
-rw-r--r--mercurial/templates/monoblue/changeset.tmpl6
-rw-r--r--mercurial/templates/monoblue/error.tmpl2
-rw-r--r--mercurial/templates/monoblue/fileannotate.tmpl5
-rw-r--r--mercurial/templates/monoblue/filecomparison.tmpl72
-rw-r--r--mercurial/templates/monoblue/filediff.tmpl1
-rw-r--r--mercurial/templates/monoblue/filelog.tmpl1
-rw-r--r--mercurial/templates/monoblue/filerevision.tmpl5
-rw-r--r--mercurial/templates/monoblue/footer.tmpl2
-rw-r--r--mercurial/templates/monoblue/graph.tmpl10
-rw-r--r--mercurial/templates/monoblue/help.tmpl2
-rw-r--r--mercurial/templates/monoblue/helptopics.tmpl2
-rw-r--r--mercurial/templates/monoblue/index.tmpl2
-rw-r--r--mercurial/templates/monoblue/map34
-rw-r--r--mercurial/templates/monoblue/notfound.tmpl2
-rw-r--r--mercurial/templates/monoblue/tags.tmpl2
-rw-r--r--mercurial/templates/paper/bookmarks.tmpl2
-rw-r--r--mercurial/templates/paper/branches.tmpl2
-rw-r--r--mercurial/templates/paper/changeset.tmpl4
-rw-r--r--mercurial/templates/paper/error.tmpl4
-rw-r--r--mercurial/templates/paper/fileannotate.tmpl5
-rw-r--r--mercurial/templates/paper/filecomparison.tmpl93
-rw-r--r--mercurial/templates/paper/filediff.tmpl5
-rw-r--r--mercurial/templates/paper/filelog.tmpl3
-rw-r--r--mercurial/templates/paper/filelogentry.tmpl2
-rw-r--r--mercurial/templates/paper/filerevision.tmpl5
-rw-r--r--mercurial/templates/paper/graph.tmpl12
-rw-r--r--mercurial/templates/paper/help.tmpl4
-rw-r--r--mercurial/templates/paper/helptopics.tmpl2
-rw-r--r--mercurial/templates/paper/index.tmpl2
-rw-r--r--mercurial/templates/paper/manifest.tmpl2
-rw-r--r--mercurial/templates/paper/map15
-rw-r--r--mercurial/templates/paper/search.tmpl2
-rw-r--r--mercurial/templates/paper/shortlog.tmpl2
-rw-r--r--mercurial/templates/paper/shortlogentry.tmpl2
-rw-r--r--mercurial/templates/paper/tags.tmpl2
-rw-r--r--mercurial/templates/raw/graph.tmpl6
-rw-r--r--mercurial/templates/raw/graphedge.tmpl1
-rw-r--r--mercurial/templates/raw/graphnode.tmpl7
-rw-r--r--mercurial/templates/raw/map8
-rw-r--r--mercurial/templates/spartan/changelogentry.tmpl4
-rw-r--r--mercurial/templates/spartan/changeset.tmpl2
-rw-r--r--mercurial/templates/spartan/fileannotate.tmpl2
-rw-r--r--mercurial/templates/spartan/filelogentry.tmpl4
-rw-r--r--mercurial/templates/spartan/filerevision.tmpl2
-rw-r--r--mercurial/templates/spartan/footer.tmpl2
-rw-r--r--mercurial/templates/spartan/graph.tmpl10
-rw-r--r--mercurial/templates/spartan/map2
-rw-r--r--mercurial/templates/spartan/shortlogentry.tmpl2
-rw-r--r--mercurial/templates/static/mercurial.js57
-rw-r--r--mercurial/templates/static/style-coal.css37
-rw-r--r--mercurial/templates/static/style-gitweb.css41
-rw-r--r--mercurial/templates/static/style-monoblue.css47
-rw-r--r--mercurial/templates/static/style-paper.css37
-rw-r--r--mercurial/transaction.py2
-rw-r--r--mercurial/treediscovery.py4
-rw-r--r--mercurial/ui.py76
-rw-r--r--mercurial/url.py9
-rw-r--r--mercurial/util.h63
-rw-r--r--mercurial/util.py400
-rw-r--r--mercurial/verify.py4
-rw-r--r--mercurial/win32.py95
-rw-r--r--mercurial/windows.py81
-rw-r--r--mercurial/wireproto.py69
190 files changed, 5028 insertions, 13176 deletions
diff --git a/mercurial/__version__.py b/mercurial/__version__.py
index 74c95c5..a0ed182 100644
--- a/mercurial/__version__.py
+++ b/mercurial/__version__.py
@@ -1,2 +1,2 @@
# this file is autogenerated by setup.py
-version = "2.3"
+version = "1.9.3"
diff --git a/mercurial/archival.py b/mercurial/archival.py
index 8eddf29..3f92f4c 100644
--- a/mercurial/archival.py
+++ b/mercurial/archival.py
@@ -7,7 +7,6 @@
from i18n import _
from node import hex
-import match as matchmod
import cmdutil
import scmutil, util, encoding
import cStringIO, os, tarfile, time, zipfile
@@ -196,7 +195,7 @@ class fileit(object):
return
f = self.opener(name, "w", atomictemp=True)
f.write(data)
- f.close()
+ f.rename()
destfile = os.path.join(self.basedir, name)
os.chmod(destfile, mode)
@@ -235,6 +234,8 @@ def archive(repo, dest, node, kind, decode=True, matchfn=None,
prefix = tidyprefix(dest, kind, prefix)
def write(name, mode, islink, getdata):
+ if matchfn and not matchfn(name):
+ return
data = getdata()
if decode:
data = repo.wwritedata(name, data)
@@ -264,18 +265,11 @@ def archive(repo, dest, node, kind, decode=True, matchfn=None,
return base + tags
- name = '.hg_archival.txt'
- if not matchfn or matchfn(name):
- write(name, 0644, False, metadata)
+ write('.hg_archival.txt', 0644, False, metadata)
- if matchfn:
- files = [f for f in ctx.manifest().keys() if matchfn(f)]
- else:
- files = ctx.manifest().keys()
- files.sort()
- total = len(files)
+ total = len(ctx.manifest())
repo.ui.progress(_('archiving'), 0, unit=_('files'), total=total)
- for i, f in enumerate(files):
+ for i, f in enumerate(ctx):
ff = ctx.flags(f)
write(f, 'x' in ff and 0755 or 0644, 'l' in ff, ctx[f].data)
repo.ui.progress(_('archiving'), i + 1, item=f,
@@ -285,7 +279,6 @@ def archive(repo, dest, node, kind, decode=True, matchfn=None,
if subrepos:
for subpath in ctx.substate:
sub = ctx.sub(subpath)
- submatch = matchmod.narrowmatcher(subpath, matchfn)
- sub.archive(repo.ui, archiver, prefix, submatch)
+ sub.archive(repo.ui, archiver, prefix)
archiver.done()
diff --git a/mercurial/base85.c b/mercurial/base85.c
index 0d45da2..df96629 100644
--- a/mercurial/base85.c
+++ b/mercurial/base85.c
@@ -9,7 +9,6 @@
Largely based on git's implementation
*/
-#define PY_SSIZE_T_CLEAN
#include <Python.h>
#include "util.h"
@@ -34,7 +33,7 @@ b85encode(PyObject *self, PyObject *args)
const unsigned char *text;
PyObject *out;
char *dst;
- Py_ssize_t len, olen, i;
+ int len, olen, i;
unsigned int acc, val, ch;
int pad = 0;
@@ -82,8 +81,7 @@ b85decode(PyObject *self, PyObject *args)
PyObject *out;
const char *text;
char *dst;
- Py_ssize_t len, i, j, olen, cap;
- int c;
+ int len, i, j, olen, c, cap;
unsigned int acc;
if (!PyArg_ParseTuple(args, "s#", &text, &len))
@@ -111,8 +109,7 @@ b85decode(PyObject *self, PyObject *args)
if (c < 0)
return PyErr_Format(
PyExc_ValueError,
- "bad base85 character at position %d",
- (int)i);
+ "Bad base85 character at position %d", i);
acc = acc * 85 + c;
}
if (i++ < len)
@@ -121,15 +118,13 @@ b85decode(PyObject *self, PyObject *args)
if (c < 0)
return PyErr_Format(
PyExc_ValueError,
- "bad base85 character at position %d",
- (int)i);
+ "Bad base85 character at position %d", i);
/* overflow detection: 0xffffffff == "|NsC0",
* "|NsC" == 0x03030303 */
if (acc > 0x03030303 || (acc *= 85) > 0xffffffff - c)
return PyErr_Format(
PyExc_ValueError,
- "bad base85 sequence at position %d",
- (int)i);
+ "Bad base85 sequence at position %d", i);
acc += c;
}
diff --git a/mercurial/bdiff.c b/mercurial/bdiff.c
index be38b53..9d39d7e 100644
--- a/mercurial/bdiff.c
+++ b/mercurial/bdiff.c
@@ -9,17 +9,47 @@
Based roughly on Python difflib
*/
-#define PY_SSIZE_T_CLEAN
#include <Python.h>
#include <stdlib.h>
#include <string.h>
#include <limits.h>
+#if defined __hpux || defined __SUNPRO_C || defined _AIX
+#define inline
+#endif
+
+#ifdef __linux
+#define inline __inline
+#endif
+
+#ifdef _WIN32
+#ifdef _MSC_VER
+#define inline __inline
+typedef unsigned long uint32_t;
+#else
+#include <stdint.h>
+#endif
+static uint32_t htonl(uint32_t x)
+{
+ return ((x & 0x000000ffUL) << 24) |
+ ((x & 0x0000ff00UL) << 8) |
+ ((x & 0x00ff0000UL) >> 8) |
+ ((x & 0xff000000UL) >> 24);
+}
+#else
+#include <sys/types.h>
+#if defined __BEOS__ && !defined __HAIKU__
+#include <ByteOrder.h>
+#else
+#include <arpa/inet.h>
+#endif
+#include <inttypes.h>
+#endif
+
#include "util.h"
struct line {
- int hash, n, e;
- Py_ssize_t len;
+ int hash, len, n, e;
const char *l;
};
@@ -33,7 +63,7 @@ struct hunk {
struct hunk *next;
};
-static int splitlines(const char *a, Py_ssize_t len, struct line **lr)
+static int splitlines(const char *a, int len, struct line **lr)
{
unsigned hash;
int i;
@@ -336,18 +366,16 @@ nomem:
static PyObject *bdiff(PyObject *self, PyObject *args)
{
- char *sa, *sb, *rb;
+ char *sa, *sb;
PyObject *result = NULL;
struct line *al, *bl;
struct hunk l, *h;
- int an, bn, count;
- Py_ssize_t len = 0, la, lb;
- PyThreadState *_save;
+ char encode[12], *rb;
+ int an, bn, len = 0, la, lb, count;
if (!PyArg_ParseTuple(args, "s#s#:bdiff", &sa, &la, &sb, &lb))
return NULL;
- _save = PyEval_SaveThread();
an = splitlines(sa, la, &al);
bn = splitlines(sb, lb, &bl);
if (!al || !bl)
@@ -366,8 +394,6 @@ static PyObject *bdiff(PyObject *self, PyObject *args)
la = h->a2;
lb = h->b2;
}
- PyEval_RestoreThread(_save);
- _save = NULL;
result = PyBytes_FromStringAndSize(NULL, len);
@@ -381,18 +407,10 @@ static PyObject *bdiff(PyObject *self, PyObject *args)
for (h = l.next; h; h = h->next) {
if (h->a1 != la || h->b1 != lb) {
len = bl[h->b1].l - bl[lb].l;
-
-#define checkputbe32(__x, __c) \
- if (__x > UINT_MAX) { \
- PyErr_SetString(PyExc_ValueError, \
- "bdiff: value too large for putbe32"); \
- goto nomem; \
- } \
- putbe32((uint32_t)(__x), __c);
-
- checkputbe32(al[la].l - al->l, rb);
- checkputbe32(al[h->a1].l - al->l, rb + 4);
- checkputbe32(len, rb + 8);
+ *(uint32_t *)(encode) = htonl(al[la].l - al->l);
+ *(uint32_t *)(encode + 4) = htonl(al[h->a1].l - al->l);
+ *(uint32_t *)(encode + 8) = htonl(len);
+ memcpy(rb, encode, 12);
memcpy(rb + 12, bl[lb].l, len);
rb += 12 + len;
}
@@ -401,63 +419,17 @@ static PyObject *bdiff(PyObject *self, PyObject *args)
}
nomem:
- if (_save)
- PyEval_RestoreThread(_save);
free(al);
free(bl);
freehunks(l.next);
return result ? result : PyErr_NoMemory();
}
-/*
- * If allws != 0, remove all whitespace (' ', \t and \r). Otherwise,
- * reduce whitespace sequences to a single space and trim remaining whitespace
- * from end of lines.
- */
-static PyObject *fixws(PyObject *self, PyObject *args)
-{
- PyObject *s, *result = NULL;
- char allws, c;
- const char *r;
- Py_ssize_t i, rlen, wlen = 0;
- char *w;
-
- if (!PyArg_ParseTuple(args, "Sb:fixws", &s, &allws))
- return NULL;
- r = PyBytes_AsString(s);
- rlen = PyBytes_Size(s);
-
- w = (char *)malloc(rlen ? rlen : 1);
- if (!w)
- goto nomem;
-
- for (i = 0; i != rlen; i++) {
- c = r[i];
- if (c == ' ' || c == '\t' || c == '\r') {
- if (!allws && (wlen == 0 || w[wlen - 1] != ' '))
- w[wlen++] = ' ';
- } else if (c == '\n' && !allws
- && wlen > 0 && w[wlen - 1] == ' ') {
- w[wlen - 1] = '\n';
- } else {
- w[wlen++] = c;
- }
- }
-
- result = PyBytes_FromStringAndSize(w, wlen);
-
-nomem:
- free(w);
- return result ? result : PyErr_NoMemory();
-}
-
-
static char mdiff_doc[] = "Efficient binary diff.";
static PyMethodDef methods[] = {
{"bdiff", bdiff, METH_VARARGS, "calculate a binary diff\n"},
{"blocks", blocks, METH_VARARGS, "find a list of matching lines\n"},
- {"fixws", fixws, METH_VARARGS, "normalize diff whitespaces\n"},
{NULL, NULL}
};
diff --git a/mercurial/bookmarks.py b/mercurial/bookmarks.py
index a7cd854..a017678 100644
--- a/mercurial/bookmarks.py
+++ b/mercurial/bookmarks.py
@@ -26,17 +26,11 @@ def read(repo):
bookmarks = {}
try:
for line in repo.opener('bookmarks'):
- line = line.strip()
- if not line:
- continue
- if ' ' not in line:
- repo.ui.warn(_('malformed line in .hg/bookmarks: %r\n') % line)
- continue
- sha, refspec = line.split(' ', 1)
+ sha, refspec = line.strip().split(' ', 1)
refspec = encoding.tolocal(refspec)
try:
bookmarks[refspec] = repo.changelog.lookup(sha)
- except LookupError:
+ except error.RepoLookupError:
pass
except IOError, inst:
if inst.errno != errno.ENOENT:
@@ -90,7 +84,7 @@ def write(repo):
file = repo.opener('bookmarks', 'w', atomictemp=True)
for refspec, node in refs.iteritems():
file.write("%s %s\n" % (hex(node), encoding.fromlocal(refspec)))
- file.close()
+ file.rename()
# touch 00changelog.i so hgweb reloads bookmarks (no lock needed)
try:
@@ -121,63 +115,42 @@ def setcurrent(repo, mark):
try:
file = repo.opener('bookmarks.current', 'w', atomictemp=True)
file.write(encoding.fromlocal(mark))
- file.close()
+ file.rename()
finally:
wlock.release()
repo._bookmarkcurrent = mark
-def unsetcurrent(repo):
- wlock = repo.wlock()
- try:
- try:
- util.unlink(repo.join('bookmarks.current'))
- repo._bookmarkcurrent = None
- except OSError, inst:
- if inst.errno != errno.ENOENT:
- raise
- finally:
- wlock.release()
-
def updatecurrentbookmark(repo, oldnode, curbranch):
try:
- return update(repo, oldnode, repo.branchtip(curbranch))
- except error.RepoLookupError:
+ update(repo, oldnode, repo.branchtags()[curbranch])
+ except KeyError:
if curbranch == "default": # no default branch!
- return update(repo, oldnode, repo.lookup("tip"))
+ update(repo, oldnode, repo.lookup("tip"))
else:
raise util.Abort(_("branch %s not found") % curbranch)
def update(repo, parents, node):
marks = repo._bookmarks
update = False
- cur = repo._bookmarkcurrent
- if not cur:
- return False
-
- toupdate = [b for b in marks if b.split('@', 1)[0] == cur.split('@', 1)[0]]
- for mark in toupdate:
- if mark and marks[mark] in parents:
- old = repo[marks[mark]]
- new = repo[node]
- if new in old.descendants() and mark == cur:
- marks[cur] = new.node()
- update = True
- if mark != cur:
- del marks[mark]
+ mark = repo._bookmarkcurrent
+ if mark and marks[mark] in parents:
+ old = repo[marks[mark]]
+ new = repo[node]
+ if new in old.descendants():
+ marks[mark] = new.node()
+ update = True
if update:
- repo._writebookmarks(marks)
- return update
+ write(repo)
def listbookmarks(repo):
# We may try to list bookmarks on a repo type that does not
# support it (e.g., statichttprepository).
- marks = getattr(repo, '_bookmarks', {})
+ if not hasattr(repo, '_bookmarks'):
+ return {}
d = {}
- for k, v in marks.iteritems():
- # don't expose local divergent bookmarks
- if '@' not in k or k.endswith('@'):
- d[k] = hex(v)
+ for k, v in repo._bookmarks.iteritems():
+ d[k] = hex(v)
return d
def pushbookmark(repo, key, old, new):
@@ -197,7 +170,7 @@ def pushbookmark(repo, key, old, new):
finally:
w.release()
-def updatefromremote(ui, repo, remote, path):
+def updatefromremote(ui, repo, remote):
ui.debug("checking for updated bookmarks\n")
rb = remote.listkeys('bookmarks')
changed = False
@@ -214,26 +187,8 @@ def updatefromremote(ui, repo, remote, path):
changed = True
ui.status(_("updating bookmark %s\n") % k)
else:
- # find a unique @ suffix
- for x in range(1, 100):
- n = '%s@%d' % (k, x)
- if n not in repo._bookmarks:
- break
- # try to use an @pathalias suffix
- # if an @pathalias already exists, we overwrite (update) it
- for p, u in ui.configitems("paths"):
- if path == u:
- n = '%s@%s' % (k, p)
-
- repo._bookmarks[n] = cr.node()
- changed = True
- ui.warn(_("divergent bookmark %s stored as %s\n") % (k, n))
- elif rb[k] in repo:
- # add remote bookmarks for changes we already have
- repo._bookmarks[k] = repo[rb[k]].node()
- changed = True
- ui.status(_("adding remote bookmark %s\n") % k)
-
+ ui.warn(_("not updating divergent"
+ " bookmark %s\n") % k)
if changed:
write(repo)
@@ -245,8 +200,7 @@ def diff(ui, repo, remote):
diff = sorted(set(rmarks) - set(lmarks))
for k in diff:
- mark = ui.debugflag and rmarks[k] or rmarks[k][:12]
- ui.write(" %-25s %s\n" % (k, mark))
+ ui.write(" %-25s %s\n" % (k, rmarks[k][:12]))
if len(diff) <= 0:
ui.status(_("no changed bookmarks found\n"))
diff --git a/mercurial/bundlerepo.py b/mercurial/bundlerepo.py
index fc78b7a..9769fcb 100644
--- a/mercurial/bundlerepo.py
+++ b/mercurial/bundlerepo.py
@@ -14,7 +14,7 @@ were part of the actual repository.
from node import nullid
from i18n import _
import os, tempfile, shutil
-import changegroup, util, mdiff, discovery, cmdutil
+import changegroup, util, mdiff, discovery
import localrepo, changelog, manifest, filelog, revlog, error
class bundlerevlog(revlog.revlog):
@@ -54,7 +54,7 @@ class bundlerevlog(revlog.revlog):
continue
for p in (p1, p2):
- if p not in self.nodemap:
+ if not p in self.nodemap:
raise error.LookupError(p, self.indexfile,
_("unknown parent"))
# start, size, full unc. size, base (unused), link, p1, p2, node
@@ -95,23 +95,15 @@ class bundlerevlog(revlog.revlog):
return mdiff.textdiff(self.revision(self.node(rev1)),
self.revision(self.node(rev2)))
- def revision(self, nodeorrev):
- """return an uncompressed revision of a given node or revision
- number.
- """
- if isinstance(nodeorrev, int):
- rev = nodeorrev
- node = self.node(rev)
- else:
- node = nodeorrev
- rev = self.rev(node)
-
+ def revision(self, node):
+ """return an uncompressed revision of a given"""
if node == nullid:
return ""
text = None
chain = []
iter_node = node
+ rev = self.rev(iter_node)
# reconstruct the revision if it is from a changegroup
while self.inbundle(rev):
if self._cache and self._cache[0] == iter_node:
@@ -167,10 +159,6 @@ class bundlefilelog(bundlerevlog, filelog.filelog):
def _file(self, f):
self._repo.file(f)
-class bundlepeer(localrepo.localpeer):
- def canpush(self):
- return False
-
class bundlerepository(localrepo.localrepository):
def __init__(self, ui, path, bundlename):
self._tempparent = None
@@ -180,7 +168,6 @@ class bundlerepository(localrepo.localrepository):
self._tempparent = tempfile.mkdtemp()
localrepo.instance(ui, self._tempparent, 1)
localrepo.localrepository.__init__(self, ui, self._tempparent)
- self.ui.setconfig('phases', 'publish', False)
if path:
self._url = 'bundle:' + util.expandpath(path) + '+' + bundlename
@@ -276,25 +263,13 @@ class bundlerepository(localrepo.localrepository):
def cancopy(self):
return False
- def peer(self):
- return bundlepeer(self)
-
def getcwd(self):
return os.getcwd() # always outside the repo
- def _writebranchcache(self, branches, tip, tiprev):
- # don't overwrite the disk cache with bundle-augmented data
- pass
-
def instance(ui, path, create):
if create:
raise util.Abort(_('cannot create new bundle repository'))
parentpath = ui.config("bundle", "mainreporoot", "")
- if not parentpath:
- # try to find the correct path to the working directory repo
- parentpath = cmdutil.findrepo(os.getcwd())
- if parentpath is None:
- parentpath = ''
if parentpath:
# Try to make the full path relative so we get a nice, short URL.
# In particular, we don't want temp dir names in test outputs.
@@ -330,16 +305,13 @@ def getremotechanges(ui, repo, other, onlyheads=None, bundlename=None,
Returns a tuple (local, csets, cleanupfn):
- "local" is a local repo from which to obtain the actual incoming
- changesets; it is a bundlerepo for the obtained bundle when the
- original "other" is remote.
+ "local" is a local repo from which to obtain the actual incoming changesets; it
+ is a bundlerepo for the obtained bundle when the original "other" is remote.
"csets" lists the incoming changeset node ids.
- "cleanupfn" must be called without arguments when you're done processing
- the changes; it closes both the original "other" and the one returned
- here.
+ "cleanupfn" must be called without arguments when you're done processing the
+ changes; it closes both the original "other" and the one returned here.
'''
- tmp = discovery.findcommonincoming(repo, other, heads=onlyheads,
- force=force)
+ tmp = discovery.findcommonincoming(repo, other, heads=onlyheads, force=force)
common, incoming, rheads = tmp
if not incoming:
try:
@@ -351,8 +323,8 @@ def getremotechanges(ui, repo, other, onlyheads=None, bundlename=None,
bundle = None
bundlerepo = None
- localrepo = other.local()
- if bundlename or not localrepo:
+ localrepo = other
+ if bundlename or not other.local():
# create a bundle (uncompressed if other repo is not local)
if other.capable('getbundle'):
@@ -363,12 +335,12 @@ def getremotechanges(ui, repo, other, onlyheads=None, bundlename=None,
rheads = None
else:
cg = other.changegroupsubset(incoming, rheads, 'incoming')
- bundletype = localrepo and "HG10BZ" or "HG10UN"
+ bundletype = other.local() and "HG10BZ" or "HG10UN"
fname = bundle = changegroup.writebundle(cg, bundlename, bundletype)
# keep written bundle?
if bundlename:
bundle = None
- if not localrepo:
+ if not other.local():
# use the created uncompressed bundlerepo
localrepo = bundlerepo = bundlerepository(ui, repo.root, fname)
# this repo contains local and other now, so filter out local again
diff --git a/mercurial/byterange.py b/mercurial/byterange.py
index f4f5f53..cc8f893 100644
--- a/mercurial/byterange.py
+++ b/mercurial/byterange.py
@@ -9,8 +9,10 @@
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
-# License along with this library; if not, see
-# <http://www.gnu.org/licenses/>.
+# License along with this library; if not, write to the
+# Free Software Foundation, Inc.,
+# 59 Temple Place, Suite 330,
+# Boston, MA 02111-1307 USA
# This file is part of urlgrabber, a high-level cross-protocol url-grabber
# Copyright 2002-2004 Michael D. Stenner, Ryan Tomayko
@@ -101,7 +103,9 @@ class RangeableFileObject(object):
"""This effectively allows us to wrap at the instance level.
Any attribute not found in _this_ object will be searched for
in self.fo. This includes methods."""
- return getattr(self.fo, name)
+ if hasattr(self.fo, name):
+ return getattr(self.fo, name)
+ raise AttributeError(name)
def tell(self):
"""Return the position within the range.
@@ -166,8 +170,10 @@ class RangeableFileObject(object):
offset is relative to the current position (self.realpos).
"""
assert offset >= 0
- seek = getattr(self.fo, 'seek', self._poor_mans_seek)
- seek(self.realpos + offset)
+ if not hasattr(self.fo, 'seek'):
+ self._poor_mans_seek(offset)
+ else:
+ self.fo.seek(self.realpos + offset)
self.realpos += offset
def _poor_mans_seek(self, offset):
diff --git a/mercurial/changegroup.py b/mercurial/changegroup.py
index 236c8a3..7ee7b3f 100644
--- a/mercurial/changegroup.py
+++ b/mercurial/changegroup.py
@@ -118,7 +118,7 @@ def decompressor(fh, alg):
elif alg == 'GZ':
def generator(f):
zd = zlib.decompressobj()
- for chunk in util.filechunkiter(f):
+ for chunk in f:
yield zd.decompress(chunk)
elif alg == 'BZ':
def generator(f):
diff --git a/mercurial/changelog.py b/mercurial/changelog.py
index a3c6450..55e23c5 100644
--- a/mercurial/changelog.py
+++ b/mercurial/changelog.py
@@ -9,8 +9,6 @@ from node import bin, hex, nullid
from i18n import _
import util, error, revlog, encoding
-_defaultextra = {'branch': 'default'}
-
def _string_escape(text):
"""
>>> d = {'nl': chr(10), 'bs': chr(92), 'cr': chr(13), 'nul': chr(0)}
@@ -26,20 +24,9 @@ def _string_escape(text):
return text.replace('\0', '\\0')
def decodeextra(text):
- """
- >>> decodeextra(encodeextra({'foo': 'bar', 'baz': chr(0) + '2'}))
- {'foo': 'bar', 'baz': '\\x002', 'branch': 'default'}
- >>> decodeextra(encodeextra({'foo': 'bar', 'baz': chr(92) + chr(0) + '2'}))
- {'foo': 'bar', 'baz': '\\\\\\x002', 'branch': 'default'}
- """
- extra = _defaultextra.copy()
+ extra = {}
for l in text.split('\0'):
if l:
- if '\\0' in l:
- # fix up \0 without getting into trouble with \\0
- l = l.replace('\\\\', '\\\\\n')
- l = l.replace('\\0', '\0')
- l = l.replace('\n', '')
k, v = l.decode('string_escape').split(':', 1)
extra[k] = v
return extra
@@ -120,6 +107,8 @@ class changelog(revlog.revlog):
self._realopener = opener
self._delayed = False
self._divert = False
+ # hiddenrevs: revs that should be hidden by command and tools
+ self.hiddenrevs = set()
def delayupdate(self):
"delay visibility of index updates to other readers"
@@ -151,7 +140,6 @@ class changelog(revlog.revlog):
r = revlog.revlog(self.opener, file)
self.index = r.index
self.nodemap = r.nodemap
- self._nodecache = r._nodecache
self._chunkcache = r._chunkcache
def writepending(self):
@@ -192,26 +180,28 @@ class changelog(revlog.revlog):
"""
text = self.revision(node)
if not text:
- return (nullid, "", (0, 0), [], "", _defaultextra)
+ return (nullid, "", (0, 0), [], "", {'branch': 'default'})
last = text.index("\n\n")
desc = encoding.tolocal(text[last + 2:])
l = text[:last].split('\n')
manifest = bin(l[0])
user = encoding.tolocal(l[1])
- tdata = l[2].split(' ', 2)
- if len(tdata) != 3:
- time = float(tdata[0])
+ extra_data = l[2].split(' ', 2)
+ if len(extra_data) != 3:
+ time = float(extra_data.pop(0))
try:
# various tools did silly things with the time zone field.
- timezone = int(tdata[1])
+ timezone = int(extra_data[0])
except ValueError:
timezone = 0
- extra = _defaultextra
+ extra = {}
else:
- time, timezone = float(tdata[0]), int(tdata[1])
- extra = decodeextra(tdata[2])
-
+ time, timezone, extra = extra_data
+ time, timezone = float(time), int(timezone)
+ extra = decodeextra(extra)
+ if not extra.get('branch'):
+ extra['branch'] = 'default'
files = l[3:]
return (manifest, user, (time, timezone), files, desc, extra)
diff --git a/mercurial/cmdutil.py b/mercurial/cmdutil.py
index 7ccbb62..8f9a9a7 100644
--- a/mercurial/cmdutil.py
+++ b/mercurial/cmdutil.py
@@ -8,9 +8,9 @@
from node import hex, nullid, nullrev, short
from i18n import _
import os, sys, errno, re, tempfile
-import util, scmutil, templater, patch, error, templatekw, revlog, copies
+import util, scmutil, templater, patch, error, templatekw, revlog
import match as matchmod
-import subrepo, context, repair, bookmarks, graphmod, revset
+import subrepo
def parsealiases(cmd):
return cmd.lstrip("^").split("|")
@@ -23,14 +23,7 @@ def findpossible(cmd, table, strict=False):
"""
choice = {}
debugchoice = {}
-
- if cmd in table:
- # short-circuit exact matches, "log" alias beats "^log|history"
- keys = [cmd]
- else:
- keys = table.keys()
-
- for e in keys:
+ for e in table.keys():
aliases = parsealiases(e)
found = None
if cmd in aliases:
@@ -82,10 +75,6 @@ def bailifchanged(repo):
modified, added, removed, deleted = repo.status()[:4]
if modified or added or removed or deleted:
raise util.Abort(_("outstanding uncommitted changes"))
- ctx = repo[None]
- for s in ctx.substate:
- if ctx.sub(s).dirty():
- raise util.Abort(_("uncommitted changes in subrepo %s") % s)
def logmessage(ui, opts):
""" get the log message according to -m and -l option """
@@ -120,13 +109,12 @@ def loglimit(opts):
limit = None
return limit
-def makefilename(repo, pat, node, desc=None,
+def makefilename(repo, pat, node,
total=None, seqno=None, revwidth=None, pathname=None):
node_expander = {
'H': lambda: hex(node),
'R': lambda: str(repo.changelog.rev(node)),
'h': lambda: short(node),
- 'm': lambda: re.sub('[^\w]', '_', str(desc))
}
expander = {
'%': lambda: '%',
@@ -166,14 +154,14 @@ def makefilename(repo, pat, node, desc=None,
raise util.Abort(_("invalid format spec '%%%s' in output filename") %
inst.args[0])
-def makefileobj(repo, pat, node=None, desc=None, total=None,
+def makefileobj(repo, pat, node=None, total=None,
seqno=None, revwidth=None, mode='wb', pathname=None):
writable = mode not in ('r', 'rb')
if not pat or pat == '-':
fp = writable and repo.ui.fout or repo.ui.fin
- if util.safehasattr(fp, 'fileno'):
+ if hasattr(fp, 'fileno'):
return os.fdopen(os.dup(fp.fileno()), mode)
else:
# if this fp can't be duped properly, return
@@ -189,11 +177,11 @@ def makefileobj(repo, pat, node=None, desc=None, total=None,
return getattr(self.f, attr)
return wrappedfileobj(fp)
- if util.safehasattr(pat, 'write') and writable:
+ if hasattr(pat, 'write') and writable:
return pat
- if util.safehasattr(pat, 'read') and 'r' in mode:
+ if hasattr(pat, 'read') and 'r' in mode:
return pat
- return open(makefilename(repo, pat, node, desc, total, seqno, revwidth,
+ return open(makefilename(repo, pat, node, total, seqno, revwidth,
pathname),
mode)
@@ -268,11 +256,6 @@ def copy(ui, repo, pats, opts, rename=False):
# otarget: ossep
def copyfile(abssrc, relsrc, otarget, exact):
abstarget = scmutil.canonpath(repo.root, cwd, otarget)
- if '/' in abstarget:
- # We cannot normalize abstarget itself, this would prevent
- # case only renames, like a => A.
- abspath, absname = abstarget.rsplit('/', 1)
- abstarget = repo.dirstate.normalize(abspath) + '/' + absname
reltarget = repo.pathto(abstarget, cwd)
target = repo.wjoin(abstarget)
src = repo.wjoin(abssrc)
@@ -290,16 +273,6 @@ def copy(ui, repo, pats, opts, rename=False):
# check for overwrites
exists = os.path.lexists(target)
- samefile = False
- if exists and abssrc != abstarget:
- if (repo.dirstate.normalize(abssrc) ==
- repo.dirstate.normalize(abstarget)):
- if not rename:
- ui.warn(_("%s: can't copy - same file\n") % reltarget)
- return
- exists = False
- samefile = True
-
if not after and exists or after and state in 'mn':
if not opts['force']:
ui.warn(_('%s: not overwriting - file exists\n') %
@@ -322,12 +295,7 @@ def copy(ui, repo, pats, opts, rename=False):
targetdir = os.path.dirname(target) or '.'
if not os.path.isdir(targetdir):
os.makedirs(targetdir)
- if samefile:
- tmp = target + "~hgrename"
- os.rename(src, tmp)
- os.rename(tmp, target)
- else:
- util.copyfile(src, target)
+ util.copyfile(src, target)
srcexists = True
except IOError, inst:
if inst.errno == errno.ENOENT:
@@ -350,7 +318,7 @@ def copy(ui, repo, pats, opts, rename=False):
scmutil.dirstatecopy(ui, repo, wctx, abssrc, abstarget,
dryrun=dryrun, cwd=cwd)
if rename and not dryrun:
- if not after and srcexists and not samefile:
+ if not after and srcexists:
util.unlinkpath(repo.wjoin(abssrc))
wctx.forget([abssrc])
@@ -548,13 +516,11 @@ def export(repo, revs, template='hg-%h.patch', fp=None, switch_parent=False,
shouldclose = False
if not fp:
- desc_lines = ctx.description().rstrip().split('\n')
- desc = desc_lines[0] #Commit always has a first line.
- fp = makefileobj(repo, template, node, desc=desc, total=total,
- seqno=seqno, revwidth=revwidth, mode='ab')
+ fp = makefileobj(repo, template, node, total=total, seqno=seqno,
+ revwidth=revwidth, mode='ab')
if fp != template:
shouldclose = True
- if fp != sys.stdout and util.safehasattr(fp, 'name'):
+ if fp != sys.stdout and hasattr(fp, 'name'):
repo.ui.note("%s\n" % fp.name)
fp.write("# HG changeset patch\n")
@@ -608,17 +574,10 @@ def diffordiffstat(ui, repo, diffopts, node1, node2, match,
ctx1 = repo[node1]
ctx2 = repo[node2]
for subpath, sub in subrepo.itersubrepos(ctx1, ctx2):
- tempnode2 = node2
- try:
- if node2 is not None:
- tempnode2 = ctx2.substate[subpath][1]
- except KeyError:
- # A subrepo that existed in node1 was deleted between node1 and
- # node2 (inclusive). Thus, ctx2's substate won't contain that
- # subpath. The best we can do is to ignore it.
- tempnode2 = None
+ if node2 is not None:
+ node2 = ctx2.substate[subpath][1]
submatch = matchmod.narrowmatcher(subpath, match)
- sub.diff(diffopts, tempnode2, submatch, changes=changes,
+ sub.diff(diffopts, node2, submatch, changes=changes,
stat=stat, fp=fp, prefix=prefix)
class changeset_printer(object):
@@ -692,9 +651,6 @@ class changeset_printer(object):
for tag in self.repo.nodetags(changenode):
self.ui.write(_("tag: %s\n") % tag,
label='log.tag')
- if self.ui.debugflag and ctx.phase():
- self.ui.write(_("phase: %s\n") % _(ctx.phasestr()),
- label='log.phase')
for parent in parents:
self.ui.write(_("parent: %d:%s\n") % parent,
label='log.parent')
@@ -910,10 +866,7 @@ def show_changeset(ui, repo, opts, buffered=False):
if not (tmpl or style):
tmpl = ui.config('ui', 'logtemplate')
if tmpl:
- try:
- tmpl = templater.parsestring(tmpl)
- except SyntaxError:
- tmpl = templater.parsestring(tmpl, quoted=False)
+ tmpl = templater.parsestring(tmpl)
else:
style = util.expandpath(ui.config('ui', 'style', ''))
@@ -952,26 +905,12 @@ def finddate(ui, repo, date):
for ctx in walkchangerevs(repo, m, {'rev': None}, prep):
rev = ctx.rev()
if rev in results:
- ui.status(_("found revision %s from %s\n") %
+ ui.status(_("Found revision %s from %s\n") %
(rev, util.datestr(results[rev])))
return str(rev)
raise util.Abort(_("revision matching date not found"))
-def increasingwindows(start, end, windowsize=8, sizelimit=512):
- if start < end:
- while start < end:
- yield start, min(windowsize, end - start)
- start += windowsize
- if windowsize < sizelimit:
- windowsize *= 2
- else:
- while start > end:
- yield start, min(windowsize, start - end - 1)
- start -= windowsize
- if windowsize < sizelimit:
- windowsize *= 2
-
def walkchangerevs(repo, match, opts, prepare):
'''Iterate over files and the revs in which they changed.
@@ -987,6 +926,20 @@ def walkchangerevs(repo, match, opts, prepare):
yielding each context, the iterator will first call the prepare
function on each context in the window in forward order.'''
+ def increasing_windows(start, end, windowsize=8, sizelimit=512):
+ if start < end:
+ while start < end:
+ yield start, min(windowsize, end - start)
+ start += windowsize
+ if windowsize < sizelimit:
+ windowsize *= 2
+ else:
+ while start > end:
+ yield start, min(windowsize, start - end - 1)
+ start -= windowsize
+ if windowsize < sizelimit:
+ windowsize *= 2
+
follow = opts.get('follow') or opts.get('follow_first')
if not len(repo):
@@ -996,13 +949,13 @@ def walkchangerevs(repo, match, opts, prepare):
defrange = '%s:0' % repo['.'].rev()
else:
defrange = '-1:0'
- revs = scmutil.revrange(repo, opts.get('rev') or [defrange])
+ revs = scmutil.revrange(repo, opts['rev'] or [defrange])
if not revs:
return []
wanted = set()
slowpath = match.anypats() or (match.files() and opts.get('removed'))
fncache = {}
- change = repo.changectx
+ change = util.cachefunc(repo.changectx)
# First step is to fill wanted, the set of revisions that we want to yield.
# When it does not induce extra cost, we also fill fncache for revisions in
@@ -1014,7 +967,7 @@ def walkchangerevs(repo, match, opts, prepare):
wanted = set(revs)
copies = []
- if not slowpath and match.files():
+ if not slowpath:
# We only have to read through the filelog to find wanted revisions
minrev, maxrev = min(revs), max(revs)
@@ -1047,15 +1000,8 @@ def walkchangerevs(repo, match, opts, prepare):
return reversed(revs)
def iterfiles():
- pctx = repo['.']
for filename in match.files():
- if follow:
- if filename not in pctx:
- raise util.Abort(_('cannot follow file not in parent '
- 'revision: "%s"') % filename)
- yield filename, pctx[filename].filenode()
- else:
- yield filename, None
+ yield filename, None
for filename_node in copies:
yield filename_node
for file_, node in iterfiles():
@@ -1158,7 +1104,7 @@ def walkchangerevs(repo, match, opts, prepare):
# it might be worthwhile to do this in the iterator if the rev range
# is descending and the prune args are all within that range
for rev in opts.get('prune', ()):
- rev = repo[rev].rev()
+ rev = repo.changelog.rev(repo.lookup(rev))
ff = followfilter()
stop = min(revs[0], revs[-1])
for x in xrange(rev, stop - 1, -1):
@@ -1176,7 +1122,7 @@ def walkchangerevs(repo, match, opts, prepare):
def want(rev):
return rev in wanted
- for i, window in increasingwindows(0, len(revs)):
+ for i, window in increasing_windows(0, len(revs)):
nrevs = [rev for rev in revs[i:i + window] if want(rev)]
for rev in sorted(nrevs):
fns = fncache.get(rev)
@@ -1192,278 +1138,7 @@ def walkchangerevs(repo, match, opts, prepare):
yield change(rev)
return iterate()
-def _makegraphfilematcher(repo, pats, followfirst):
- # When displaying a revision with --patch --follow FILE, we have
- # to know which file of the revision must be diffed. With
- # --follow, we want the names of the ancestors of FILE in the
- # revision, stored in "fcache". "fcache" is populated by
- # reproducing the graph traversal already done by --follow revset
- # and relating linkrevs to file names (which is not "correct" but
- # good enough).
- fcache = {}
- fcacheready = [False]
- pctx = repo['.']
- wctx = repo[None]
-
- def populate():
- for fn in pats:
- for i in ((pctx[fn],), pctx[fn].ancestors(followfirst=followfirst)):
- for c in i:
- fcache.setdefault(c.linkrev(), set()).add(c.path())
-
- def filematcher(rev):
- if not fcacheready[0]:
- # Lazy initialization
- fcacheready[0] = True
- populate()
- return scmutil.match(wctx, fcache.get(rev, []), default='path')
-
- return filematcher
-
-def _makegraphlogrevset(repo, pats, opts, revs):
- """Return (expr, filematcher) where expr is a revset string built
- from log options and file patterns or None. If --stat or --patch
- are not passed filematcher is None. Otherwise it is a callable
- taking a revision number and returning a match objects filtering
- the files to be detailed when displaying the revision.
- """
- opt2revset = {
- 'no_merges': ('not merge()', None),
- 'only_merges': ('merge()', None),
- '_ancestors': ('ancestors(%(val)s)', None),
- '_fancestors': ('_firstancestors(%(val)s)', None),
- '_descendants': ('descendants(%(val)s)', None),
- '_fdescendants': ('_firstdescendants(%(val)s)', None),
- '_matchfiles': ('_matchfiles(%(val)s)', None),
- 'date': ('date(%(val)r)', None),
- 'branch': ('branch(%(val)r)', ' or '),
- '_patslog': ('filelog(%(val)r)', ' or '),
- '_patsfollow': ('follow(%(val)r)', ' or '),
- '_patsfollowfirst': ('_followfirst(%(val)r)', ' or '),
- 'keyword': ('keyword(%(val)r)', ' or '),
- 'prune': ('not (%(val)r or ancestors(%(val)r))', ' and '),
- 'user': ('user(%(val)r)', ' or '),
- }
-
- opts = dict(opts)
- # follow or not follow?
- follow = opts.get('follow') or opts.get('follow_first')
- followfirst = opts.get('follow_first') and 1 or 0
- # --follow with FILE behaviour depends on revs...
- startrev = revs[0]
- followdescendants = (len(revs) > 1 and revs[0] < revs[1]) and 1 or 0
-
- # branch and only_branch are really aliases and must be handled at
- # the same time
- opts['branch'] = opts.get('branch', []) + opts.get('only_branch', [])
- opts['branch'] = [repo.lookupbranch(b) for b in opts['branch']]
- # pats/include/exclude are passed to match.match() directly in
- # _matchfile() revset but walkchangerevs() builds its matcher with
- # scmutil.match(). The difference is input pats are globbed on
- # platforms without shell expansion (windows).
- pctx = repo[None]
- match, pats = scmutil.matchandpats(pctx, pats, opts)
- slowpath = match.anypats() or (match.files() and opts.get('removed'))
- if not slowpath:
- for f in match.files():
- if follow and f not in pctx:
- raise util.Abort(_('cannot follow file not in parent '
- 'revision: "%s"') % f)
- filelog = repo.file(f)
- if not len(filelog):
- # A zero count may be a directory or deleted file, so
- # try to find matching entries on the slow path.
- if follow:
- raise util.Abort(
- _('cannot follow nonexistent file: "%s"') % f)
- slowpath = True
- if slowpath:
- # See walkchangerevs() slow path.
- #
- if follow:
- raise util.Abort(_('can only follow copies/renames for explicit '
- 'filenames'))
- # pats/include/exclude cannot be represented as separate
- # revset expressions as their filtering logic applies at file
- # level. For instance "-I a -X a" matches a revision touching
- # "a" and "b" while "file(a) and not file(b)" does
- # not. Besides, filesets are evaluated against the working
- # directory.
- matchargs = ['r:', 'd:relpath']
- for p in pats:
- matchargs.append('p:' + p)
- for p in opts.get('include', []):
- matchargs.append('i:' + p)
- for p in opts.get('exclude', []):
- matchargs.append('x:' + p)
- matchargs = ','.join(('%r' % p) for p in matchargs)
- opts['_matchfiles'] = matchargs
- else:
- if follow:
- fpats = ('_patsfollow', '_patsfollowfirst')
- fnopats = (('_ancestors', '_fancestors'),
- ('_descendants', '_fdescendants'))
- if pats:
- # follow() revset inteprets its file argument as a
- # manifest entry, so use match.files(), not pats.
- opts[fpats[followfirst]] = list(match.files())
- else:
- opts[fnopats[followdescendants][followfirst]] = str(startrev)
- else:
- opts['_patslog'] = list(pats)
-
- filematcher = None
- if opts.get('patch') or opts.get('stat'):
- if follow:
- filematcher = _makegraphfilematcher(repo, pats, followfirst)
- else:
- filematcher = lambda rev: match
-
- expr = []
- for op, val in opts.iteritems():
- if not val:
- continue
- if op not in opt2revset:
- continue
- revop, andor = opt2revset[op]
- if '%(val)' not in revop:
- expr.append(revop)
- else:
- if not isinstance(val, list):
- e = revop % {'val': val}
- else:
- e = '(' + andor.join((revop % {'val': v}) for v in val) + ')'
- expr.append(e)
-
- if expr:
- expr = '(' + ' and '.join(expr) + ')'
- else:
- expr = None
- return expr, filematcher
-
-def getgraphlogrevs(repo, pats, opts):
- """Return (revs, expr, filematcher) where revs is an iterable of
- revision numbers, expr is a revset string built from log options
- and file patterns or None, and used to filter 'revs'. If --stat or
- --patch are not passed filematcher is None. Otherwise it is a
- callable taking a revision number and returning a match objects
- filtering the files to be detailed when displaying the revision.
- """
- def increasingrevs(repo, revs, matcher):
- # The sorted input rev sequence is chopped in sub-sequences
- # which are sorted in ascending order and passed to the
- # matcher. The filtered revs are sorted again as they were in
- # the original sub-sequence. This achieve several things:
- #
- # - getlogrevs() now returns a generator which behaviour is
- # adapted to log need. First results come fast, last ones
- # are batched for performances.
- #
- # - revset matchers often operate faster on revision in
- # changelog order, because most filters deal with the
- # changelog.
- #
- # - revset matchers can reorder revisions. "A or B" typically
- # returns returns the revision matching A then the revision
- # matching B. We want to hide this internal implementation
- # detail from the caller, and sorting the filtered revision
- # again achieves this.
- for i, window in increasingwindows(0, len(revs), windowsize=1):
- orevs = revs[i:i + window]
- nrevs = set(matcher(repo, sorted(orevs)))
- for rev in orevs:
- if rev in nrevs:
- yield rev
-
- if not len(repo):
- return iter([]), None, None
- # Default --rev value depends on --follow but --follow behaviour
- # depends on revisions resolved from --rev...
- follow = opts.get('follow') or opts.get('follow_first')
- if opts.get('rev'):
- revs = scmutil.revrange(repo, opts['rev'])
- else:
- if follow and len(repo) > 0:
- revs = scmutil.revrange(repo, ['.:0'])
- else:
- revs = range(len(repo) - 1, -1, -1)
- if not revs:
- return iter([]), None, None
- expr, filematcher = _makegraphlogrevset(repo, pats, opts, revs)
- if expr:
- matcher = revset.match(repo.ui, expr)
- revs = increasingrevs(repo, revs, matcher)
- if not opts.get('hidden'):
- # --hidden is still experimental and not worth a dedicated revset
- # yet. Fortunately, filtering revision number is fast.
- revs = (r for r in revs if r not in repo.hiddenrevs)
- else:
- revs = iter(revs)
- return revs, expr, filematcher
-
-def displaygraph(ui, dag, displayer, showparents, edgefn, getrenamed=None,
- filematcher=None):
- seen, state = [], graphmod.asciistate()
- for rev, type, ctx, parents in dag:
- char = 'o'
- if ctx.node() in showparents:
- char = '@'
- elif ctx.obsolete():
- char = 'x'
- copies = None
- if getrenamed and ctx.rev():
- copies = []
- for fn in ctx.files():
- rename = getrenamed(fn, ctx.rev())
- if rename:
- copies.append((fn, rename[0]))
- revmatchfn = None
- if filematcher is not None:
- revmatchfn = filematcher(ctx.rev())
- displayer.show(ctx, copies=copies, matchfn=revmatchfn)
- lines = displayer.hunk.pop(rev).split('\n')
- if not lines[-1]:
- del lines[-1]
- displayer.flush(rev)
- edges = edgefn(type, char, lines, seen, rev, parents)
- for type, char, lines, coldata in edges:
- graphmod.ascii(ui, state, type, char, lines, coldata)
- displayer.close()
-
-def graphlog(ui, repo, *pats, **opts):
- # Parameters are identical to log command ones
- revs, expr, filematcher = getgraphlogrevs(repo, pats, opts)
- revs = sorted(revs, reverse=1)
- limit = loglimit(opts)
- if limit is not None:
- revs = revs[:limit]
- revdag = graphmod.dagwalker(repo, revs)
-
- getrenamed = None
- if opts.get('copies'):
- endrev = None
- if opts.get('rev'):
- endrev = max(scmutil.revrange(repo, opts.get('rev'))) + 1
- getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
- displayer = show_changeset(ui, repo, opts, buffered=True)
- showparents = [ctx.node() for ctx in repo[None].parents()]
- displaygraph(ui, revdag, displayer, showparents,
- graphmod.asciiedges, getrenamed, filematcher)
-
-def checkunsupportedgraphflags(pats, opts):
- for op in ["newest_first"]:
- if op in opts and opts[op]:
- raise util.Abort(_("-G/--graph option is incompatible with --%s")
- % op.replace("_", "-"))
-
-def graphrevs(repo, nodes, opts):
- limit = loglimit(opts)
- nodes.reverse()
- if limit is not None:
- nodes = nodes[:limit]
- return graphmod.nodes(repo, nodes)
-
-def add(ui, repo, match, dryrun, listsubrepos, prefix, explicitonly):
+def add(ui, repo, match, dryrun, listsubrepos, prefix):
join = lambda f: os.path.join(prefix, f)
bad = []
oldbad = match.bad
@@ -1473,82 +1148,31 @@ def add(ui, repo, match, dryrun, listsubrepos, prefix, explicitonly):
cca = None
abort, warn = scmutil.checkportabilityalert(ui)
if abort or warn:
- cca = scmutil.casecollisionauditor(ui, abort, repo.dirstate)
+ cca = scmutil.casecollisionauditor(ui, abort, wctx)
for f in repo.walk(match):
exact = match.exact(f)
- if exact or not explicitonly and f not in repo.dirstate:
+ if exact or f not in repo.dirstate:
if cca:
cca(f)
names.append(f)
if ui.verbose or not exact:
ui.status(_('adding %s\n') % match.rel(join(f)))
- for subpath in wctx.substate:
- sub = wctx.sub(subpath)
- try:
- submatch = matchmod.narrowmatcher(subpath, match)
- if listsubrepos:
- bad.extend(sub.add(ui, submatch, dryrun, listsubrepos, prefix,
- False))
- else:
- bad.extend(sub.add(ui, submatch, dryrun, listsubrepos, prefix,
- True))
- except error.LookupError:
- ui.status(_("skipping missing subrepository: %s\n")
- % join(subpath))
+ if listsubrepos:
+ for subpath in wctx.substate:
+ sub = wctx.sub(subpath)
+ try:
+ submatch = matchmod.narrowmatcher(subpath, match)
+ bad.extend(sub.add(ui, submatch, dryrun, prefix))
+ except error.LookupError:
+ ui.status(_("skipping missing subrepository: %s\n")
+ % join(subpath))
if not dryrun:
rejected = wctx.add(names, prefix)
bad.extend(f for f in rejected if f in match.files())
return bad
-def forget(ui, repo, match, prefix, explicitonly):
- join = lambda f: os.path.join(prefix, f)
- bad = []
- oldbad = match.bad
- match.bad = lambda x, y: bad.append(x) or oldbad(x, y)
- wctx = repo[None]
- forgot = []
- s = repo.status(match=match, clean=True)
- forget = sorted(s[0] + s[1] + s[3] + s[6])
- if explicitonly:
- forget = [f for f in forget if match.exact(f)]
-
- for subpath in wctx.substate:
- sub = wctx.sub(subpath)
- try:
- submatch = matchmod.narrowmatcher(subpath, match)
- subbad, subforgot = sub.forget(ui, submatch, prefix)
- bad.extend([subpath + '/' + f for f in subbad])
- forgot.extend([subpath + '/' + f for f in subforgot])
- except error.LookupError:
- ui.status(_("skipping missing subrepository: %s\n")
- % join(subpath))
-
- if not explicitonly:
- for f in match.files():
- if f not in repo.dirstate and not os.path.isdir(match.rel(join(f))):
- if f not in forgot:
- if os.path.exists(match.rel(join(f))):
- ui.warn(_('not removing %s: '
- 'file is already untracked\n')
- % match.rel(join(f)))
- bad.append(f)
-
- for f in forget:
- if ui.verbose or not match.exact(f):
- ui.status(_('removing %s\n') % match.rel(join(f)))
-
- rejected = wctx.forget(forget, prefix)
- bad.extend(f for f in rejected if f in match.files())
- forgot.extend(forget)
- return bad, forgot
-
-def duplicatecopies(repo, rev, p1):
- "Reproduce copies found in the source revision in the dirstate for grafts"
- for dst, src in copies.pathcopies(repo[p1], repo[rev]).iteritems():
- repo.dirstate.copy(src, dst)
-
def commit(ui, repo, commitfunc, pats, opts):
'''commit the specified files or all outstanding changes'''
date = opts.get('date')
@@ -1564,136 +1188,6 @@ def commit(ui, repo, commitfunc, pats, opts):
return commitfunc(ui, repo, message,
scmutil.match(repo[None], pats, opts), opts)
-def amend(ui, repo, commitfunc, old, extra, pats, opts):
- ui.note(_('amending changeset %s\n') % old)
- base = old.p1()
-
- wlock = repo.wlock()
- try:
- # First, do a regular commit to record all changes in the working
- # directory (if there are any)
- ui.callhooks = False
- try:
- node = commit(ui, repo, commitfunc, pats, opts)
- finally:
- ui.callhooks = True
- ctx = repo[node]
-
- # Participating changesets:
- #
- # node/ctx o - new (intermediate) commit that contains changes from
- # | working dir to go into amending commit (or a workingctx
- # | if there were no changes)
- # |
- # old o - changeset to amend
- # |
- # base o - parent of amending changeset
-
- # Update extra dict from amended commit (e.g. to preserve graft source)
- extra.update(old.extra())
-
- # Also update it from the intermediate commit or from the wctx
- extra.update(ctx.extra())
-
- files = set(old.files())
-
- # Second, we use either the commit we just did, or if there were no
- # changes the parent of the working directory as the version of the
- # files in the final amend commit
- if node:
- ui.note(_('copying changeset %s to %s\n') % (ctx, base))
-
- user = ctx.user()
- date = ctx.date()
- message = ctx.description()
- # Recompute copies (avoid recording a -> b -> a)
- copied = copies.pathcopies(base, ctx)
-
- # Prune files which were reverted by the updates: if old introduced
- # file X and our intermediate commit, node, renamed that file, then
- # those two files are the same and we can discard X from our list
- # of files. Likewise if X was deleted, it's no longer relevant
- files.update(ctx.files())
-
- def samefile(f):
- if f in ctx.manifest():
- a = ctx.filectx(f)
- if f in base.manifest():
- b = base.filectx(f)
- return (not a.cmp(b)
- and a.flags() == b.flags())
- else:
- return False
- else:
- return f not in base.manifest()
- files = [f for f in files if not samefile(f)]
-
- def filectxfn(repo, ctx_, path):
- try:
- fctx = ctx[path]
- flags = fctx.flags()
- mctx = context.memfilectx(fctx.path(), fctx.data(),
- islink='l' in flags,
- isexec='x' in flags,
- copied=copied.get(path))
- return mctx
- except KeyError:
- raise IOError
- else:
- ui.note(_('copying changeset %s to %s\n') % (old, base))
-
- # Use version of files as in the old cset
- def filectxfn(repo, ctx_, path):
- try:
- return old.filectx(path)
- except KeyError:
- raise IOError
-
- # See if we got a message from -m or -l, if not, open the editor
- # with the message of the changeset to amend
- user = opts.get('user') or old.user()
- date = opts.get('date') or old.date()
- message = logmessage(ui, opts)
- if not message:
- cctx = context.workingctx(repo, old.description(), user, date,
- extra,
- repo.status(base.node(), old.node()))
- message = commitforceeditor(repo, cctx, [])
-
- new = context.memctx(repo,
- parents=[base.node(), nullid],
- text=message,
- files=files,
- filectxfn=filectxfn,
- user=user,
- date=date,
- extra=extra)
- newid = repo.commitctx(new)
- if newid != old.node():
- # Reroute the working copy parent to the new changeset
- repo.setparents(newid, nullid)
-
- # Move bookmarks from old parent to amend commit
- bms = repo.nodebookmarks(old.node())
- if bms:
- for bm in bms:
- repo._bookmarks[bm] = newid
- bookmarks.write(repo)
-
- # Strip the intermediate commit (if there was one) and the amended
- # commit
- lock = repo.lock()
- try:
- if node:
- ui.note(_('stripping intermediate changeset %s\n') % ctx)
- ui.note(_('stripping amended changeset %s\n') % old)
- repair.strip(ui, repo, old.node(), topic='amend-backup')
- finally:
- lock.release()
- finally:
- wlock.release()
- return newid
-
def commiteditor(repo, ctx, subs):
if ctx.description():
return ctx.description()
@@ -1734,187 +1228,6 @@ def commitforceeditor(repo, ctx, subs):
return text
-def revert(ui, repo, ctx, parents, *pats, **opts):
- parent, p2 = parents
- node = ctx.node()
-
- mf = ctx.manifest()
- if node == parent:
- pmf = mf
- else:
- pmf = None
-
- # need all matching names in dirstate and manifest of target rev,
- # so have to walk both. do not print errors if files exist in one
- # but not other.
-
- names = {}
-
- wlock = repo.wlock()
- try:
- # walk dirstate.
-
- m = scmutil.match(repo[None], pats, opts)
- m.bad = lambda x, y: False
- for abs in repo.walk(m):
- names[abs] = m.rel(abs), m.exact(abs)
-
- # walk target manifest.
-
- def badfn(path, msg):
- if path in names:
- return
- if path in ctx.substate:
- return
- path_ = path + '/'
- for f in names:
- if f.startswith(path_):
- return
- ui.warn("%s: %s\n" % (m.rel(path), msg))
-
- m = scmutil.match(ctx, pats, opts)
- m.bad = badfn
- for abs in ctx.walk(m):
- if abs not in names:
- names[abs] = m.rel(abs), m.exact(abs)
-
- # get the list of subrepos that must be reverted
- targetsubs = [s for s in ctx.substate if m(s)]
- m = scmutil.matchfiles(repo, names)
- changes = repo.status(match=m)[:4]
- modified, added, removed, deleted = map(set, changes)
-
- # if f is a rename, also revert the source
- cwd = repo.getcwd()
- for f in added:
- src = repo.dirstate.copied(f)
- if src and src not in names and repo.dirstate[src] == 'r':
- removed.add(src)
- names[src] = (repo.pathto(src, cwd), True)
-
- def removeforget(abs):
- if repo.dirstate[abs] == 'a':
- return _('forgetting %s\n')
- return _('removing %s\n')
-
- revert = ([], _('reverting %s\n'))
- add = ([], _('adding %s\n'))
- remove = ([], removeforget)
- undelete = ([], _('undeleting %s\n'))
-
- disptable = (
- # dispatch table:
- # file state
- # action if in target manifest
- # action if not in target manifest
- # make backup if in target manifest
- # make backup if not in target manifest
- (modified, revert, remove, True, True),
- (added, revert, remove, True, False),
- (removed, undelete, None, False, False),
- (deleted, revert, remove, False, False),
- )
-
- for abs, (rel, exact) in sorted(names.items()):
- mfentry = mf.get(abs)
- target = repo.wjoin(abs)
- def handle(xlist, dobackup):
- xlist[0].append(abs)
- if (dobackup and not opts.get('no_backup') and
- os.path.lexists(target)):
- bakname = "%s.orig" % rel
- ui.note(_('saving current version of %s as %s\n') %
- (rel, bakname))
- if not opts.get('dry_run'):
- util.rename(target, bakname)
- if ui.verbose or not exact:
- msg = xlist[1]
- if not isinstance(msg, basestring):
- msg = msg(abs)
- ui.status(msg % rel)
- for table, hitlist, misslist, backuphit, backupmiss in disptable:
- if abs not in table:
- continue
- # file has changed in dirstate
- if mfentry:
- handle(hitlist, backuphit)
- elif misslist is not None:
- handle(misslist, backupmiss)
- break
- else:
- if abs not in repo.dirstate:
- if mfentry:
- handle(add, True)
- elif exact:
- ui.warn(_('file not managed: %s\n') % rel)
- continue
- # file has not changed in dirstate
- if node == parent:
- if exact:
- ui.warn(_('no changes needed to %s\n') % rel)
- continue
- if pmf is None:
- # only need parent manifest in this unlikely case,
- # so do not read by default
- pmf = repo[parent].manifest()
- if abs in pmf and mfentry:
- # if version of file is same in parent and target
- # manifests, do nothing
- if (pmf[abs] != mfentry or
- pmf.flags(abs) != mf.flags(abs)):
- handle(revert, False)
- else:
- handle(remove, False)
-
- if not opts.get('dry_run'):
- def checkout(f):
- fc = ctx[f]
- repo.wwrite(f, fc.data(), fc.flags())
-
- audit_path = scmutil.pathauditor(repo.root)
- for f in remove[0]:
- if repo.dirstate[f] == 'a':
- repo.dirstate.drop(f)
- continue
- audit_path(f)
- try:
- util.unlinkpath(repo.wjoin(f))
- except OSError:
- pass
- repo.dirstate.remove(f)
-
- normal = None
- if node == parent:
- # We're reverting to our parent. If possible, we'd like status
- # to report the file as clean. We have to use normallookup for
- # merges to avoid losing information about merged/dirty files.
- if p2 != nullid:
- normal = repo.dirstate.normallookup
- else:
- normal = repo.dirstate.normal
- for f in revert[0]:
- checkout(f)
- if normal:
- normal(f)
-
- for f in add[0]:
- checkout(f)
- repo.dirstate.add(f)
-
- normal = repo.dirstate.normallookup
- if node == parent and p2 == nullid:
- normal = repo.dirstate.normal
- for f in undelete[0]:
- checkout(f)
- normal(f)
-
- if targetsubs:
- # Revert the subrepos on the revert list
- for sub in targetsubs:
- ctx.sub(sub).revert(ui, ctx.substate[sub], *pats, **opts)
- finally:
- wlock.release()
-
def command(table):
'''returns a function object bound to table which can be used as
a decorator for populating table as a command table'''
diff --git a/mercurial/commands.py b/mercurial/commands.py
index 092da83..35aee93 100644
--- a/mercurial/commands.py
+++ b/mercurial/commands.py
@@ -15,9 +15,8 @@ import archival, changegroup, cmdutil, hbisect
import sshserver, hgweb, hgweb.server, commandserver
import merge as mergemod
import minirst, revset, fileset
-import dagparser, context, simplemerge, graphmod
-import random, setdiscovery, treediscovery, dagutil, pvec, localrepo
-import phases, obsolete
+import dagparser, context, simplemerge
+import random, setdiscovery, treediscovery, dagutil
table = {}
@@ -98,7 +97,6 @@ logopts = [
_('limit number of changes displayed'), _('NUM')),
('M', 'no-merges', None, _('do not show merges')),
('', 'stat', None, _('output diffstat-style summary of changes')),
- ('G', 'graph', None, _("show the revision DAG")),
] + templateopts
diffopts = [
@@ -107,28 +105,20 @@ diffopts = [
('', 'nodates', None, _('omit dates from diff headers'))
]
-diffwsopts = [
+diffopts2 = [
+ ('p', 'show-function', None, _('show which function each change is in')),
+ ('', 'reverse', None, _('produce a diff that undoes the changes')),
('w', 'ignore-all-space', None,
_('ignore white space when comparing lines')),
('b', 'ignore-space-change', None,
_('ignore changes in the amount of white space')),
('B', 'ignore-blank-lines', None,
_('ignore changes whose lines are all blank')),
- ]
-
-diffopts2 = [
- ('p', 'show-function', None, _('show which function each change is in')),
- ('', 'reverse', None, _('produce a diff that undoes the changes')),
- ] + diffwsopts + [
('U', 'unified', '',
_('number of lines of context to show'), _('NUM')),
('', 'stat', None, _('output diffstat-style summary of changes')),
]
-mergetoolopts = [
- ('t', 'tool', '', _('specify merge tool')),
-]
-
similarityopts = [
('s', 'similarity', '',
_('guess renamed files by similarity (0<=s<=100)'), _('SIMILARITY'))
@@ -174,7 +164,7 @@ def add(ui, repo, *pats, **opts):
m = scmutil.match(repo[None], pats, opts)
rejected = cmdutil.add(ui, repo, m, opts.get('dry_run'),
- opts.get('subrepos'), prefix="", explicitonly=False)
+ opts.get('subrepos'), prefix="")
return rejected and 1 or 0
@command('addremove',
@@ -190,15 +180,13 @@ def addremove(ui, repo, *pats, **opts):
``.hgignore``. As with add, these changes take effect at the next
commit.
- Use the -s/--similarity option to detect renamed files. This
+ Use the -s/--similarity option to detect renamed files. With a
+ parameter greater than 0, this compares every removed file with
+ every added file and records those similar enough as renames. This
option takes a percentage between 0 (disabled) and 100 (files must
- be identical) as its parameter. With a parameter greater than 0,
- this compares every removed file with every added file and records
- those similar enough as renames. Detecting renamed files this way
+ be identical) as its parameter. Detecting renamed files this way
can be expensive. After using this option, :hg:`status -C` can be
- used to check which files were identified as moved or renamed. If
- not specified, -s/--similarity defaults to 100 and only renames of
- identical files are detected.
+ used to check which files were identified as moved or renamed.
Returns 0 if all files are successfully added.
"""
@@ -222,7 +210,7 @@ def addremove(ui, repo, *pats, **opts):
('n', 'number', None, _('list the revision number (default)')),
('c', 'changeset', None, _('list the changeset')),
('l', 'line-number', None, _('show line number at the first appearance'))
- ] + diffwsopts + walkopts,
+ ] + walkopts,
_('[-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...'))
def annotate(ui, repo, *pats, **opts):
"""show changeset information by line for each file
@@ -251,11 +239,9 @@ def annotate(ui, repo, *pats, **opts):
if not pats:
raise util.Abort(_('at least one filename or pattern is required'))
- hexfn = ui.debugflag and hex or short
-
opmap = [('user', ' ', lambda x: ui.shortuser(x[0].user())),
('number', ' ', lambda x: str(x[0].rev())),
- ('changeset', ' ', lambda x: hexfn(x[0].node())),
+ ('changeset', ' ', lambda x: short(x[0].node())),
('date', ' ', getdate),
('file', ' ', lambda x: x[0].path()),
('line_number', ':', lambda x: str(x[1])),
@@ -279,15 +265,13 @@ def annotate(ui, repo, *pats, **opts):
m = scmutil.match(ctx, pats, opts)
m.bad = bad
follow = not opts.get('no_follow')
- diffopts = patch.diffopts(ui, opts, section='annotate')
for abs in ctx.walk(m):
fctx = ctx[abs]
if not opts.get('text') and util.binary(fctx.data()):
ui.write(_("%s: binary file\n") % ((pats and m.rel(abs)) or abs))
continue
- lines = fctx.annotate(follow=follow, linenumber=linenumber,
- diffopts=diffopts)
+ lines = fctx.annotate(follow=follow, linenumber=linenumber)
pieces = []
for f, sep in funcmap:
@@ -302,9 +286,6 @@ def annotate(ui, repo, *pats, **opts):
for p, l in zip(zip(*pieces), lines):
ui.write("%s: %s" % ("".join(p), l[1]))
- if lines and not lines[-1][1].endswith('\n'):
- ui.write('\n')
-
@command('archive',
[('', 'no-decode', None, _('do not pass files through decoders')),
('p', 'prefix', '', _('directory prefix for files in archive'),
@@ -322,18 +303,6 @@ def archive(ui, repo, dest, **opts):
The archive type is automatically detected based on file
extension (or override using -t/--type).
- .. container:: verbose
-
- Examples:
-
- - create a zip file containing the 1.0 release::
-
- hg archive -r 1.0 project-1.0.zip
-
- - create a tarball excluding .hg files::
-
- hg archive project.tar.gz -X ".hg*"
-
Valid types are:
:``files``: a directory full of files (default)
@@ -379,10 +348,10 @@ def archive(ui, repo, dest, **opts):
@command('backout',
[('', 'merge', None, _('merge with old dirstate parent after backout')),
- ('', 'parent', '',
- _('parent to choose when backing out merge (DEPRECATED)'), _('REV')),
+ ('', 'parent', '', _('parent to choose when backing out merge'), _('REV')),
+ ('t', 'tool', '', _('specify merge tool')),
('r', 'rev', '', _('revision to backout'), _('REV')),
- ] + mergetoolopts + walkopts + commitopts + commitopts2,
+ ] + walkopts + commitopts + commitopts2,
_('[OPTION]... [-r] REV'))
def backout(ui, repo, node=None, rev=None, **opts):
'''reverse effect of earlier changeset
@@ -394,21 +363,15 @@ def backout(ui, repo, node=None, rev=None, **opts):
is committed automatically. Otherwise, hg needs to merge the
changes and the merged result is left uncommitted.
- .. note::
- backout cannot be used to fix either an unwanted or
- incorrect merge.
-
- .. container:: verbose
-
- By default, the pending changeset will have one parent,
- maintaining a linear history. With --merge, the pending
- changeset will instead have two parents: the old parent of the
- working directory and a new child of REV that simply undoes REV.
+ By default, the pending changeset will have one parent,
+ maintaining a linear history. With --merge, the pending changeset
+ will instead have two parents: the old parent of the working
+ directory and a new child of REV that simply undoes REV.
- Before version 1.7, the behavior without --merge was equivalent
- to specifying --merge followed by :hg:`update --clean .` to
- cancel the merge and leave the child of REV as a head to be
- merged separately.
+ Before version 1.7, the behavior without --merge was equivalent to
+ specifying --merge followed by :hg:`update --clean .` to cancel
+ the merge and leave the child of REV as a head to be merged
+ separately.
See :hg:`help dates` for a list of formats valid for -d/--date.
@@ -440,7 +403,8 @@ def backout(ui, repo, node=None, rev=None, **opts):
raise util.Abort(_('cannot backout a change with no parents'))
if p2 != nullid:
if not opts.get('parent'):
- raise util.Abort(_('cannot backout a merge changeset'))
+ raise util.Abort(_('cannot backout a merge changeset without '
+ '--parent'))
p = repo.lookup(opts['parent'])
if p not in (p1, p2):
raise util.Abort(_('%s is not a parent of %s') %
@@ -452,46 +416,42 @@ def backout(ui, repo, node=None, rev=None, **opts):
parent = p1
# the backout should appear on the same branch
- wlock = repo.wlock()
- try:
- branch = repo.dirstate.branch()
- hg.clean(repo, node, show_stats=False)
- repo.dirstate.setbranch(branch)
- revert_opts = opts.copy()
- revert_opts['date'] = None
- revert_opts['all'] = True
- revert_opts['rev'] = hex(parent)
- revert_opts['no_backup'] = None
- revert(ui, repo, **revert_opts)
- if not opts.get('merge') and op1 != node:
- try:
- ui.setconfig('ui', 'forcemerge', opts.get('tool', ''))
- return hg.update(repo, op1)
- finally:
- ui.setconfig('ui', 'forcemerge', '')
-
- commit_opts = opts.copy()
- commit_opts['addremove'] = False
- if not commit_opts['message'] and not commit_opts['logfile']:
- # we don't translate commit messages
- commit_opts['message'] = "Backed out changeset %s" % short(node)
- commit_opts['force_editor'] = True
- commit(ui, repo, **commit_opts)
- def nice(node):
- return '%d:%s' % (repo.changelog.rev(node), short(node))
- ui.status(_('changeset %s backs out changeset %s\n') %
- (nice(repo.changelog.tip()), nice(node)))
- if opts.get('merge') and op1 != node:
- hg.clean(repo, op1, show_stats=False)
- ui.status(_('merging with changeset %s\n')
- % nice(repo.changelog.tip()))
- try:
- ui.setconfig('ui', 'forcemerge', opts.get('tool', ''))
- return hg.merge(repo, hex(repo.changelog.tip()))
- finally:
- ui.setconfig('ui', 'forcemerge', '')
- finally:
- wlock.release()
+ branch = repo.dirstate.branch()
+ hg.clean(repo, node, show_stats=False)
+ repo.dirstate.setbranch(branch)
+ revert_opts = opts.copy()
+ revert_opts['date'] = None
+ revert_opts['all'] = True
+ revert_opts['rev'] = hex(parent)
+ revert_opts['no_backup'] = None
+ revert(ui, repo, **revert_opts)
+ if not opts.get('merge') and op1 != node:
+ try:
+ ui.setconfig('ui', 'forcemerge', opts.get('tool', ''))
+ return hg.update(repo, op1)
+ finally:
+ ui.setconfig('ui', 'forcemerge', '')
+
+ commit_opts = opts.copy()
+ commit_opts['addremove'] = False
+ if not commit_opts['message'] and not commit_opts['logfile']:
+ # we don't translate commit messages
+ commit_opts['message'] = "Backed out changeset %s" % short(node)
+ commit_opts['force_editor'] = True
+ commit(ui, repo, **commit_opts)
+ def nice(node):
+ return '%d:%s' % (repo.changelog.rev(node), short(node))
+ ui.status(_('changeset %s backs out changeset %s\n') %
+ (nice(repo.changelog.tip()), nice(node)))
+ if opts.get('merge') and op1 != node:
+ hg.clean(repo, op1, show_stats=False)
+ ui.status(_('merging with changeset %s\n')
+ % nice(repo.changelog.tip()))
+ try:
+ ui.setconfig('ui', 'forcemerge', opts.get('tool', ''))
+ return hg.merge(repo, hex(repo.changelog.tip()))
+ finally:
+ ui.setconfig('ui', 'forcemerge', '')
return 0
@command('bisect',
@@ -521,65 +481,10 @@ def bisect(ui, repo, rev=None, extra=None, command=None,
revision as good or bad without checking it out first.
If you supply a command, it will be used for automatic bisection.
- The environment variable HG_NODE will contain the ID of the
- changeset being tested. The exit status of the command will be
- used to mark revisions as good or bad: status 0 means good, 125
- means to skip the revision, 127 (command not found) will abort the
- bisection, and any other non-zero exit status means the revision
- is bad.
-
- .. container:: verbose
-
- Some examples:
-
- - start a bisection with known bad revision 12, and good revision 34::
-
- hg bisect --bad 34
- hg bisect --good 12
-
- - advance the current bisection by marking current revision as good or
- bad::
-
- hg bisect --good
- hg bisect --bad
-
- - mark the current revision, or a known revision, to be skipped (eg. if
- that revision is not usable because of another issue)::
-
- hg bisect --skip
- hg bisect --skip 23
-
- - forget the current bisection::
-
- hg bisect --reset
-
- - use 'make && make tests' to automatically find the first broken
- revision::
-
- hg bisect --reset
- hg bisect --bad 34
- hg bisect --good 12
- hg bisect --command 'make && make tests'
-
- - see all changesets whose states are already known in the current
- bisection::
-
- hg log -r "bisect(pruned)"
-
- - see the changeset currently being bisected (especially useful
- if running with -U/--noupdate)::
-
- hg log -r "bisect(current)"
-
- - see all changesets that took part in the current bisection::
-
- hg log -r "bisect(range)"
-
- - with the graphlog extension, you can even get a nice graph::
-
- hg log --graph -r "bisect(range)"
-
- See :hg:`help revsets` for more about the `bisect()` keyword.
+ Its exit status will be used to mark revisions as good or bad:
+ status 0 means good, 125 means to skip the revision, 127
+ (command not found) will abort the bisection, and any other
+ non-zero exit status means the revision is bad.
Returns 0 on success.
"""
@@ -655,22 +560,9 @@ def bisect(ui, repo, rev=None, extra=None, command=None,
if command:
changesets = 1
try:
- node = state['current'][0]
- except LookupError:
- if noupdate:
- raise util.Abort(_('current bisect revision is unknown - '
- 'start a new bisect to fix'))
- node, p2 = repo.dirstate.parents()
- if p2 != nullid:
- raise util.Abort(_('current bisect revision is a merge'))
- try:
while changesets:
# update state
- state['current'] = [node]
- hbisect.save_state(repo, state)
- status = util.system(command,
- environ={'HG_NODE': hex(node)},
- out=ui.fout)
+ status = util.system(command, out=ui.fout)
if status == 125:
transition = "skip"
elif status == 0:
@@ -682,20 +574,17 @@ def bisect(ui, repo, rev=None, extra=None, command=None,
raise util.Abort(_("%s killed") % command)
else:
transition = "bad"
- ctx = scmutil.revsingle(repo, rev, node)
+ ctx = scmutil.revsingle(repo, rev)
rev = None # clear for future iterations
state[transition].append(ctx.node())
- ui.status(_('changeset %d:%s: %s\n') % (ctx, ctx, transition))
+ ui.status(_('Changeset %d:%s: %s\n') % (ctx, ctx, transition))
check_state(state, interactive=False)
# bisect
nodes, changesets, good = hbisect.bisect(repo.changelog, state)
# update to next check
- node = nodes[0]
- if not noupdate:
- cmdutil.bailifchanged(repo)
- hg.clean(repo, node, show_stats=False)
+ cmdutil.bailifchanged(repo)
+ hg.clean(repo, nodes[0], show_stats=False)
finally:
- state['current'] = [node]
hbisect.save_state(repo, state)
print_result(nodes, good)
return
@@ -727,8 +616,6 @@ def bisect(ui, repo, rev=None, extra=None, command=None,
if extendnode is not None:
ui.write(_("Extending search to changeset %d:%s\n"
% (extendnode.rev(), extendnode)))
- state['current'] = [extendnode.node()]
- hbisect.save_state(repo, state)
if noupdate:
return
cmdutil.bailifchanged(repo)
@@ -748,8 +635,6 @@ def bisect(ui, repo, rev=None, extra=None, command=None,
ui.write(_("Testing changeset %d:%s "
"(%d changesets remaining, ~%d tests)\n")
% (rev, short(node), changesets, tests))
- state['current'] = [node]
- hbisect.save_state(repo, state)
if not noupdate:
cmdutil.bailifchanged(repo)
return hg.clean(repo, node)
@@ -759,16 +644,16 @@ def bisect(ui, repo, rev=None, extra=None, command=None,
('r', 'rev', '', _('revision'), _('REV')),
('d', 'delete', False, _('delete a given bookmark')),
('m', 'rename', '', _('rename a given bookmark'), _('NAME')),
- ('i', 'inactive', False, _('mark a bookmark inactive'))],
+ ('i', 'inactive', False, _('do not mark a new bookmark active'))],
_('hg bookmarks [-f] [-d] [-i] [-m NAME] [-r REV] [NAME]'))
def bookmark(ui, repo, mark=None, rev=None, force=False, delete=False,
rename=None, inactive=False):
'''track a line of development with movable markers
- Bookmarks are pointers to certain commits that move when committing.
- Bookmarks are local. They can be renamed, copied and deleted. It is
- possible to use :hg:`merge NAME` to merge from a given bookmark, and
- :hg:`update NAME` to update to a given bookmark.
+ Bookmarks are pointers to certain commits that move when
+ committing. Bookmarks are local. They can be renamed, copied and
+ deleted. It is possible to use bookmark names in :hg:`merge` and
+ :hg:`update` to merge and update respectively to a given bookmark.
You can use :hg:`bookmark NAME` to set a bookmark on the working
directory's parent revision with the given name. If you specify
@@ -779,27 +664,11 @@ def bookmark(ui, repo, mark=None, rev=None, force=False, delete=False,
push` and :hg:`help pull`). This requires both the local and remote
repositories to support bookmarks. For versions prior to 1.8, this means
the bookmarks extension must be enabled.
-
- With -i/--inactive, the new bookmark will not be made the active
- bookmark. If -r/--rev is given, the new bookmark will not be made
- active even if -i/--inactive is not given. If no NAME is given, the
- current active bookmark will be marked inactive.
'''
hexfn = ui.debugflag and hex or short
marks = repo._bookmarks
cur = repo.changectx('.').node()
- if delete:
- if mark is None:
- raise util.Abort(_("bookmark name required"))
- if mark not in marks:
- raise util.Abort(_("bookmark '%s' does not exist") % mark)
- if mark == repo._bookmarkcurrent:
- bookmarks.setcurrent(repo, None)
- del marks[mark]
- bookmarks.write(repo)
- return
-
if rename:
if rename not in marks:
raise util.Abort(_("bookmark '%s' does not exist") % rename)
@@ -815,6 +684,17 @@ def bookmark(ui, repo, mark=None, rev=None, force=False, delete=False,
bookmarks.write(repo)
return
+ if delete:
+ if mark is None:
+ raise util.Abort(_("bookmark name required"))
+ if mark not in marks:
+ raise util.Abort(_("bookmark '%s' does not exist") % mark)
+ if mark == repo._bookmarkcurrent:
+ bookmarks.setcurrent(repo, None)
+ del marks[mark]
+ bookmarks.write(repo)
+ return
+
if mark is not None:
if "\n" in mark:
raise util.Abort(_("bookmark name cannot contain newlines"))
@@ -828,15 +708,15 @@ def bookmark(ui, repo, mark=None, rev=None, force=False, delete=False,
if mark in marks and not force:
raise util.Abort(_("bookmark '%s' already exists "
"(use -f to force)") % mark)
- if ((mark in repo.branchmap() or mark == repo.dirstate.branch())
+ if ((mark in repo.branchtags() or mark == repo.dirstate.branch())
and not force):
raise util.Abort(
_("a bookmark cannot have the name of an existing branch"))
if rev:
marks[mark] = repo.lookup(rev)
else:
- marks[mark] = cur
- if not inactive and cur == marks[mark]:
+ marks[mark] = repo.changectx('.').node()
+ if not inactive and repo.changectx('.').node() == marks[mark]:
bookmarks.setcurrent(repo, mark)
bookmarks.write(repo)
return
@@ -870,11 +750,6 @@ def bookmark(ui, repo, mark=None, rev=None, force=False, delete=False,
def branch(ui, repo, label=None, **opts):
"""set or show the current branch name
- .. note::
- Branch names are permanent and global. Use :hg:`bookmark` to create a
- light-weight bookmark instead. See :hg:`help glossary` for more
- information about named branches and bookmarks.
-
With no argument, show the current branch name. With one argument,
set the working directory branch name (the branch will not exist
in the repository until the next commit). Standard practice
@@ -891,31 +766,29 @@ def branch(ui, repo, label=None, **opts):
Use the command :hg:`update` to switch to an existing branch. Use
:hg:`commit --close-branch` to mark this branch as closed.
+ .. note::
+
+ Branch names are permanent. Use :hg:`bookmark` to create a
+ light-weight bookmark instead. See :hg:`help glossary` for more
+ information about named branches and bookmarks.
+
Returns 0 on success.
"""
- if not opts.get('clean') and not label:
- ui.write("%s\n" % repo.dirstate.branch())
- return
- wlock = repo.wlock()
- try:
- if opts.get('clean'):
- label = repo[None].p1().branch()
- repo.dirstate.setbranch(label)
- ui.status(_('reset working directory to branch %s\n') % label)
- elif label:
- if not opts.get('force') and label in repo.branchmap():
- if label not in [p.branch() for p in repo.parents()]:
- raise util.Abort(_('a branch of the same name already'
- ' exists'),
- # i18n: "it" refers to an existing branch
- hint=_("use 'hg update' to switch to it"))
- repo.dirstate.setbranch(label)
- ui.status(_('marked working directory as branch %s\n') % label)
- ui.status(_('(branches are permanent and global, '
- 'did you want a bookmark?)\n'))
- finally:
- wlock.release()
+ if opts.get('clean'):
+ label = repo[None].p1().branch()
+ repo.dirstate.setbranch(label)
+ ui.status(_('reset working directory to branch %s\n') % label)
+ elif label:
+ if not opts.get('force') and label in repo.branchtags():
+ if label not in [p.branch() for p in repo.parents()]:
+ raise util.Abort(_('a branch of the same name already exists'),
+ # i18n: "it" refers to an existing branch
+ hint=_("use 'hg update' to switch to it"))
+ repo.dirstate.setbranch(label)
+ ui.status(_('marked working directory as branch %s\n') % label)
+ else:
+ ui.write("%s\n" % repo.dirstate.branch())
@command('branches',
[('a', 'active', False, _('show only branches that have unmerged heads')),
@@ -937,45 +810,37 @@ def branches(ui, repo, active=False, closed=False):
"""
hexfunc = ui.debugflag and hex or short
-
- activebranches = set([repo[n].branch() for n in repo.heads()])
- branches = []
- for tag, heads in repo.branchmap().iteritems():
- for h in reversed(heads):
- ctx = repo[h]
- isopen = not ctx.closesbranch()
- if isopen:
- tip = ctx
- break
- else:
- tip = repo[heads[-1]]
- isactive = tag in activebranches and isopen
- branches.append((tip, isactive, isopen))
- branches.sort(key=lambda i: (i[1], i[0].rev(), i[0].branch(), i[2]),
- reverse=True)
-
- for ctx, isactive, isopen in branches:
+ activebranches = [repo[n].branch() for n in repo.heads()]
+ def testactive(tag, node):
+ realhead = tag in activebranches
+ open = node in repo.branchheads(tag, closed=False)
+ return realhead and open
+ branches = sorted([(testactive(tag, node), repo.changelog.rev(node), tag)
+ for tag, node in repo.branchtags().items()],
+ reverse=True)
+
+ for isactive, node, tag in branches:
if (not active) or isactive:
- if isactive:
- label = 'branches.active'
- notice = ''
- elif not isopen:
- if not closed:
- continue
- label = 'branches.closed'
- notice = _(' (closed)')
- else:
- label = 'branches.inactive'
- notice = _(' (inactive)')
- if ctx.branch() == repo.dirstate.branch():
- label = 'branches.current'
- rev = str(ctx.rev()).rjust(31 - encoding.colwidth(ctx.branch()))
- rev = ui.label('%s:%s' % (rev, hexfunc(ctx.node())),
- 'log.changeset')
- tag = ui.label(ctx.branch(), label)
if ui.quiet:
ui.write("%s\n" % tag)
else:
+ hn = repo.lookup(node)
+ if isactive:
+ label = 'branches.active'
+ notice = ''
+ elif hn not in repo.branchheads(tag, closed=False):
+ if not closed:
+ continue
+ label = 'branches.closed'
+ notice = _(' (closed)')
+ else:
+ label = 'branches.inactive'
+ notice = _(' (inactive)')
+ if tag == repo.dirstate.branch():
+ label = 'branches.current'
+ rev = str(node).rjust(31 - encoding.colwidth(tag))
+ rev = ui.label('%s:%s' % (rev, hexfunc(hn)), 'log.changeset')
+ tag = ui.label(tag, label)
ui.write("%s %s%s\n" % (tag, rev, notice))
@command('bundle',
@@ -1020,12 +885,6 @@ def bundle(ui, repo, fname, dest=None, **opts):
if 'rev' in opts:
revs = scmutil.revrange(repo, opts['rev'])
- bundletype = opts.get('type', 'bzip2').lower()
- btypes = {'none': 'HG10UN', 'bzip2': 'HG10BZ', 'gzip': 'HG10GZ'}
- bundletype = btypes.get(bundletype)
- if bundletype not in changegroup.bundletypes:
- raise util.Abort(_('unknown bundle type specified with --type'))
-
if opts.get('all'):
base = ['null']
else:
@@ -1036,23 +895,27 @@ def bundle(ui, repo, fname, dest=None, **opts):
"a destination"))
common = [repo.lookup(rev) for rev in base]
heads = revs and map(repo.lookup, revs) or revs
- cg = repo.getbundle('bundle', heads=heads, common=common)
- outgoing = None
else:
dest = ui.expandpath(dest or 'default-push', dest or 'default')
dest, branches = hg.parseurl(dest, opts.get('branch'))
other = hg.peer(repo, opts, dest)
revs, checkout = hg.addbranchrevs(repo, other, branches, revs)
heads = revs and map(repo.lookup, revs) or revs
- outgoing = discovery.findcommonoutgoing(repo, other,
- onlyheads=heads,
- force=opts.get('force'),
- portable=True)
- cg = repo.getlocalbundle('bundle', outgoing)
+ common, outheads = discovery.findcommonoutgoing(repo, other,
+ onlyheads=heads,
+ force=opts.get('force'))
+
+ cg = repo.getbundle('bundle', common=common, heads=heads)
if not cg:
- scmutil.nochangesfound(ui, repo, outgoing and outgoing.excluded)
+ ui.status(_("no changes found\n"))
return 1
+ bundletype = opts.get('type', 'bzip2').lower()
+ btypes = {'none': 'HG10UN', 'bzip2': 'HG10BZ', 'gzip': 'HG10GZ'}
+ bundletype = btypes.get(bundletype)
+ if bundletype not in changegroup.bundletypes:
+ raise util.Abort(_('unknown bundle type specified with --type'))
+
changegroup.writebundle(cg, fname, bundletype)
@command('cat',
@@ -1114,84 +977,56 @@ def clone(ui, source, dest=None, **opts):
The location of the source is added to the new repository's
``.hg/hgrc`` file, as the default to be used for future pulls.
- Only local paths and ``ssh://`` URLs are supported as
- destinations. For ``ssh://`` destinations, no working directory or
- ``.hg/hgrc`` will be created on the remote side.
-
- To pull only a subset of changesets, specify one or more revisions
- identifiers with -r/--rev or branches with -b/--branch. The
- resulting clone will contain only the specified changesets and
- their ancestors. These options (or 'clone src#rev dest') imply
- --pull, even for local source repositories. Note that specifying a
- tag will include the tagged changeset but not the changeset
- containing the tag.
-
- To check out a particular version, use -u/--update, or
- -U/--noupdate to create a clone with no working directory.
-
- .. container:: verbose
-
- For efficiency, hardlinks are used for cloning whenever the
- source and destination are on the same filesystem (note this
- applies only to the repository data, not to the working
- directory). Some filesystems, such as AFS, implement hardlinking
- incorrectly, but do not report errors. In these cases, use the
- --pull option to avoid hardlinking.
-
- In some cases, you can clone repositories and the working
- directory using full hardlinks with ::
-
- $ cp -al REPO REPOCLONE
-
- This is the fastest way to clone, but it is not always safe. The
- operation is not atomic (making sure REPO is not modified during
- the operation is up to you) and you have to make sure your
- editor breaks hardlinks (Emacs and most Linux Kernel tools do
- so). Also, this is not compatible with certain extensions that
- place their metadata under the .hg directory, such as mq.
-
- Mercurial will update the working directory to the first applicable
- revision from this list:
-
- a) null if -U or the source repository has no changesets
- b) if -u . and the source repository is local, the first parent of
- the source repository's working directory
- c) the changeset specified with -u (if a branch name, this means the
- latest head of that branch)
- d) the changeset specified with -r
- e) the tipmost head specified with -b
- f) the tipmost head specified with the url#branch source syntax
- g) the tipmost head of the default branch
- h) tip
-
- Examples:
-
- - clone a remote repository to a new directory named hg/::
-
- hg clone http://selenic.com/hg
-
- - create a lightweight local clone::
-
- hg clone project/ project-feature/
-
- - clone from an absolute path on an ssh server (note double-slash)::
-
- hg clone ssh://user@server//home/projects/alpha/
-
- - do a high-speed clone over a LAN while checking out a
- specified version::
-
- hg clone --uncompressed http://server/repo -u 1.5
-
- - create a repository without changesets after a particular revision::
-
- hg clone -r 04e544 experimental/ good/
-
- - clone (and track) a particular named branch::
-
- hg clone http://selenic.com/hg#stable
-
- See :hg:`help urls` for details on specifying URLs.
+ See :hg:`help urls` for valid source format details.
+
+ It is possible to specify an ``ssh://`` URL as the destination, but no
+ ``.hg/hgrc`` and working directory will be created on the remote side.
+ Please see :hg:`help urls` for important details about ``ssh://`` URLs.
+
+ A set of changesets (tags, or branch names) to pull may be specified
+ by listing each changeset (tag, or branch name) with -r/--rev.
+ If -r/--rev is used, the cloned repository will contain only a subset
+ of the changesets of the source repository. Only the set of changesets
+ defined by all -r/--rev options (including all their ancestors)
+ will be pulled into the destination repository.
+ No subsequent changesets (including subsequent tags) will be present
+ in the destination.
+
+ Using -r/--rev (or 'clone src#rev dest') implies --pull, even for
+ local source repositories.
+
+ For efficiency, hardlinks are used for cloning whenever the source
+ and destination are on the same filesystem (note this applies only
+ to the repository data, not to the working directory). Some
+ filesystems, such as AFS, implement hardlinking incorrectly, but
+ do not report errors. In these cases, use the --pull option to
+ avoid hardlinking.
+
+ In some cases, you can clone repositories and the working directory
+ using full hardlinks with ::
+
+ $ cp -al REPO REPOCLONE
+
+ This is the fastest way to clone, but it is not always safe. The
+ operation is not atomic (making sure REPO is not modified during
+ the operation is up to you) and you have to make sure your editor
+ breaks hardlinks (Emacs and most Linux Kernel tools do so). Also,
+ this is not compatible with certain extensions that place their
+ metadata under the .hg directory, such as mq.
+
+ Mercurial will update the working directory to the first applicable
+ revision from this list:
+
+ a) null if -U or the source repository has no changesets
+ b) if -u . and the source repository is local, the first parent of
+ the source repository's working directory
+ c) the changeset specified with -u (if a branch name, this means the
+ latest head of that branch)
+ d) the changeset specified with -r
+ e) the tipmost head specified with -b
+ f) the tipmost head specified with the url#branch source syntax
+ g) the tipmost head of the default branch
+ h) tip
Returns 0 on success.
"""
@@ -1212,8 +1047,7 @@ def clone(ui, source, dest=None, **opts):
_('mark new/missing files as added/removed before committing')),
('', 'close-branch', None,
_('mark a branch as closed, hiding it from the branch list')),
- ('', 'amend', None, _('amend the parent of the working dir')),
- ] + walkopts + commitopts + commitopts2 + subrepoopts,
+ ] + walkopts + commitopts + commitopts2,
_('[OPTION]... [FILE]...'))
def commit(ui, repo, *pats, **opts):
"""commit the specified files or all outstanding changes
@@ -1233,28 +1067,10 @@ def commit(ui, repo, *pats, **opts):
commit fails, you will find a backup of your message in
``.hg/last-message.txt``.
- The --amend flag can be used to amend the parent of the
- working directory with a new commit that contains the changes
- in the parent in addition to those currently reported by :hg:`status`,
- if there are any. The old commit is stored in a backup bundle in
- ``.hg/strip-backup`` (see :hg:`help bundle` and :hg:`help unbundle`
- on how to restore it).
-
- Message, user and date are taken from the amended commit unless
- specified. When a message isn't specified on the command line,
- the editor will open with the message of the amended commit.
-
- It is not possible to amend public changesets (see :hg:`help phases`)
- or changesets that have children.
-
See :hg:`help dates` for a list of formats valid for -d/--date.
Returns 0 on success, 1 if nothing changed.
"""
- if opts.get('subrepos'):
- # Let --subrepos on the command line overide config setting.
- ui.setconfig('ui', 'commitsubrepos', True)
-
extra = {}
if opts.get('close_branch'):
if repo['.'].node() not in repo.branchheads():
@@ -1262,81 +1078,32 @@ def commit(ui, repo, *pats, **opts):
# current branch, so it's sufficient to test branchheads
raise util.Abort(_('can only close branch heads'))
extra['close'] = 1
+ e = cmdutil.commiteditor
+ if opts.get('force_editor'):
+ e = cmdutil.commitforceeditor
+
+ def commitfunc(ui, repo, message, match, opts):
+ return repo.commit(message, opts.get('user'), opts.get('date'), match,
+ editor=e, extra=extra)
branch = repo[None].branch()
bheads = repo.branchheads(branch)
- if opts.get('amend'):
- if ui.configbool('ui', 'commitsubrepos'):
- raise util.Abort(_('cannot amend recursively'))
-
- old = repo['.']
- if old.phase() == phases.public:
- raise util.Abort(_('cannot amend public changesets'))
- if len(old.parents()) > 1:
- raise util.Abort(_('cannot amend merge changesets'))
- if len(repo[None].parents()) > 1:
- raise util.Abort(_('cannot amend while merging'))
- if old.children():
- raise util.Abort(_('cannot amend changeset with children'))
-
- e = cmdutil.commiteditor
- if opts.get('force_editor'):
- e = cmdutil.commitforceeditor
-
- def commitfunc(ui, repo, message, match, opts):
- editor = e
- # message contains text from -m or -l, if it's empty,
- # open the editor with the old message
- if not message:
- message = old.description()
- editor = cmdutil.commitforceeditor
- return repo.commit(message,
- opts.get('user') or old.user(),
- opts.get('date') or old.date(),
- match,
- editor=editor,
- extra=extra)
-
- current = repo._bookmarkcurrent
- marks = old.bookmarks()
- node = cmdutil.amend(ui, repo, commitfunc, old, extra, pats, opts)
- if node == old.node():
+ node = cmdutil.commit(ui, repo, commitfunc, pats, opts)
+ if not node:
+ stat = repo.status(match=scmutil.match(repo[None], pats, opts))
+ if stat[3]:
+ ui.status(_("nothing changed (%d missing files, see 'hg status')\n")
+ % len(stat[3]))
+ else:
ui.status(_("nothing changed\n"))
- return 1
- elif marks:
- ui.debug('moving bookmarks %r from %s to %s\n' %
- (marks, old.hex(), hex(node)))
- for bm in marks:
- repo._bookmarks[bm] = node
- if bm == current:
- bookmarks.setcurrent(repo, bm)
- bookmarks.write(repo)
- else:
- e = cmdutil.commiteditor
- if opts.get('force_editor'):
- e = cmdutil.commitforceeditor
-
- def commitfunc(ui, repo, message, match, opts):
- return repo.commit(message, opts.get('user'), opts.get('date'),
- match, editor=e, extra=extra)
-
- node = cmdutil.commit(ui, repo, commitfunc, pats, opts)
-
- if not node:
- stat = repo.status(match=scmutil.match(repo[None], pats, opts))
- if stat[3]:
- ui.status(_("nothing changed (%d missing files, see "
- "'hg status')\n") % len(stat[3]))
- else:
- ui.status(_("nothing changed\n"))
- return 1
+ return 1
ctx = repo[node]
parents = ctx.parents()
- if (not opts.get('amend') and bheads and node not in bheads and not
- [x for x in parents if x.node() in bheads and x.branch() == branch]):
+ if bheads and not [x for x in parents
+ if x.node() in bheads and x.branch() == branch]:
ui.status(_('created new head\n'))
# The message is not printed for initial roots. For the other
# changesets, it is printed in the following situations:
@@ -1369,7 +1136,7 @@ def commit(ui, repo, *pats, **opts):
if not opts.get('close_branch'):
for r in parents:
- if r.closesbranch() and r.branch() == branch:
+ if r.extra().get('close') and r.branch() == branch:
ui.status(_('reopening closed branch head %d\n') % r)
if ui.debugflag:
@@ -1486,16 +1253,13 @@ def debugbuilddag(ui, repo, text=None,
tags = []
- lock = tr = None
+ tr = repo.transaction("builddag")
try:
- lock = repo.lock()
- tr = repo.transaction("builddag")
at = -1
atbranch = 'default'
nodeids = []
- id = 0
- ui.progress(_('building'), id, unit=_('revisions'), total=total)
+ ui.progress(_('building'), 0, unit=_('revisions'), total=total)
for type, data in dagparser.parsedag(text):
if type == 'n':
ui.note('node %s\n' % str(data))
@@ -1566,12 +1330,12 @@ def debugbuilddag(ui, repo, text=None,
atbranch = data
ui.progress(_('building'), id, unit=_('revisions'), total=total)
tr.close()
-
- if tags:
- repo.opener.write("localtags", "".join(tags))
finally:
ui.progress(_('building'), None)
- release(tr, lock)
+ tr.release()
+
+ if tags:
+ repo.opener.write("localtags", "".join(tags))
@command('debugbundle', [('a', 'all', None, _('show all details'))], _('FILE'))
def debugbundle(ui, bundlepath, all=None, **opts):
@@ -1707,8 +1471,7 @@ def debugdag(ui, repo, file_=None, *revs, **opts):
revs = set((int(r) for r in revs))
def events():
for r in rlog:
- yield 'n', (r, list(set(p for p in rlog.parentrevs(r)
- if p != -1)))
+ yield 'n', (r, list(set(p for p in rlog.parentrevs(r) if p != -1)))
if r in revs:
yield 'l', (r, "r%i" % r)
elif repo:
@@ -1727,8 +1490,7 @@ def debugdag(ui, repo, file_=None, *revs, **opts):
if newb != b:
yield 'a', newb
b = newb
- yield 'n', (r, list(set(p for p in cl.parentrevs(r)
- if p != -1)))
+ yield 'n', (r, list(set(p for p in cl.parentrevs(r) if p != -1)))
if tags:
ls = labels.get(r)
if ls:
@@ -1786,22 +1548,17 @@ def debugdate(ui, date, range=None, **opts):
_('[-l REV] [-r REV] [-b BRANCH]... [OTHER]'))
def debugdiscovery(ui, repo, remoteurl="default", **opts):
"""runs the changeset discovery protocol in isolation"""
- remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl),
- opts.get('branch'))
+ remoteurl, branches = hg.parseurl(ui.expandpath(remoteurl), opts.get('branch'))
remote = hg.peer(repo, opts, remoteurl)
ui.status(_('comparing with %s\n') % util.hidepassword(remoteurl))
# make sure tests are repeatable
random.seed(12323)
- def doit(localheads, remoteheads, remote=remote):
+ def doit(localheads, remoteheads):
if opts.get('old'):
if localheads:
- raise util.Abort('cannot use localheads with old style '
- 'discovery')
- if not util.safehasattr(remote, 'branches'):
- # enable in-client legacy support
- remote = localrepo.locallegacypeer(remote.local())
+ raise util.Abort('cannot use localheads with old style discovery')
common, _in, hds = treediscovery.findcommonincoming(repo, remote,
force=True)
common = set(common)
@@ -1899,9 +1656,8 @@ def debuggetbundle(ui, repopath, bundlepath, head=None, common=None, **opts):
def debugignore(ui, repo, *values, **opts):
"""display the combined ignore pattern"""
ignore = repo.dirstate._ignore
- includepat = getattr(ignore, 'includepat', None)
- if includepat is not None:
- ui.write("%s\n" % includepat)
+ if hasattr(ignore, 'includepat'):
+ ui.write("%s\n" % ignore.includepat)
else:
raise util.Abort(_("no ignore patterns found"))
@@ -1928,8 +1684,7 @@ def debugindex(ui, repo, file_ = None, **opts):
" nodeid p1 p2\n")
elif format == 1:
ui.write(" rev flag offset length"
- " size " + basehdr + " link p1 p2"
- " nodeid\n")
+ " size " + basehdr + " link p1 p2 nodeid\n")
for i in r:
node = r.node(i)
@@ -1940,7 +1695,7 @@ def debugindex(ui, repo, file_ = None, **opts):
if format == 0:
try:
pp = r.parents(node)
- except Exception:
+ except:
pp = [nullid, nullid]
ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % (
i, r.start(i), r.length(i), base, r.linkrev(i),
@@ -1987,7 +1742,7 @@ def debuginstall(ui):
problems = 0
# encoding
- ui.status(_("checking encoding (%s)...\n") % encoding.encoding)
+ ui.status(_("Checking encoding (%s)...\n") % encoding.encoding)
try:
encoding.fromlocal("test")
except util.Abort, inst:
@@ -1996,11 +1751,10 @@ def debuginstall(ui):
problems += 1
# compiled modules
- ui.status(_("checking installed modules (%s)...\n")
+ ui.status(_("Checking installed modules (%s)...\n")
% os.path.dirname(__file__))
try:
import bdiff, mpatch, base85, osutil
- dir(bdiff), dir(mpatch), dir(base85), dir(osutil) # quiet pyflakes
except Exception, inst:
ui.write(" %s\n" % inst)
ui.write(_(" One or more extensions could not be found"))
@@ -2008,10 +1762,9 @@ def debuginstall(ui):
problems += 1
# templates
- import templater
- p = templater.templatepath()
- ui.status(_("checking templates (%s)...\n") % ' '.join(p))
+ ui.status(_("Checking templates...\n"))
try:
+ import templater
templater.templater(templater.templatepath("map-cmdline.default"))
except Exception, inst:
ui.write(" %s\n" % inst)
@@ -2019,7 +1772,7 @@ def debuginstall(ui):
problems += 1
# editor
- ui.status(_("checking commit editor...\n"))
+ ui.status(_("Checking commit editor...\n"))
editor = ui.geteditor()
cmdpath = util.findexe(editor) or util.findexe(editor.split()[0])
if not cmdpath:
@@ -2034,7 +1787,7 @@ def debuginstall(ui):
problems += 1
# check username
- ui.status(_("checking username...\n"))
+ ui.status(_("Checking username...\n"))
try:
ui.username()
except util.Abort, e:
@@ -2043,7 +1796,7 @@ def debuginstall(ui):
problems += 1
if not problems:
- ui.status(_("no problems detected\n"))
+ ui.status(_("No problems detected\n"))
else:
ui.write(_("%s problems detected,"
" please check your install!\n") % problems)
@@ -2054,8 +1807,8 @@ def debuginstall(ui):
def debugknown(ui, repopath, *ids, **opts):
"""test whether node ids are known to a repo
- Every ID must be a full-length hex node id string. Returns a list of 0s
- and 1s indicating unknown/known.
+ Every ID must be a full-length hex node id string. Returns a list of 0s and 1s
+ indicating unknown/known.
"""
repo = hg.peer(ui, opts, repopath)
if not repo.capable('known'):
@@ -2063,50 +1816,6 @@ def debugknown(ui, repopath, *ids, **opts):
flags = repo.known([bin(s) for s in ids])
ui.write("%s\n" % ("".join([f and "1" or "0" for f in flags])))
-@command('debugobsolete', [] + commitopts2,
- _('[OBSOLETED [REPLACEMENT] [REPL... ]'))
-def debugobsolete(ui, repo, precursor=None, *successors, **opts):
- """create arbitrary obsolete marker"""
- def parsenodeid(s):
- try:
- # We do not use revsingle/revrange functions here to accept
- # arbitrary node identifiers, possibly not present in the
- # local repository.
- n = bin(s)
- if len(n) != len(nullid):
- raise TypeError()
- return n
- except TypeError:
- raise util.Abort('changeset references must be full hexadecimal '
- 'node identifiers')
-
- if precursor is not None:
- metadata = {}
- if 'date' in opts:
- metadata['date'] = opts['date']
- metadata['user'] = opts['user'] or ui.username()
- succs = tuple(parsenodeid(succ) for succ in successors)
- l = repo.lock()
- try:
- tr = repo.transaction('debugobsolete')
- try:
- repo.obsstore.create(tr, parsenodeid(precursor), succs, 0,
- metadata)
- tr.close()
- finally:
- tr.release()
- finally:
- l.release()
- else:
- for m in obsolete.allmarkers(repo):
- ui.write(hex(m.precnode()))
- for repl in m.succnodes():
- ui.write(' ')
- ui.write(hex(repl))
- ui.write(' %X ' % m._data[2])
- ui.write(m.metadata())
- ui.write('\n')
-
@command('debugpushkey', [], _('REPO NAMESPACE [KEY OLD NEW]'))
def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
'''access the pushkey key/value protocol
@@ -2128,27 +1837,6 @@ def debugpushkey(ui, repopath, namespace, *keyinfo, **opts):
ui.write("%s\t%s\n" % (k.encode('string-escape'),
v.encode('string-escape')))
-@command('debugpvec', [], _('A B'))
-def debugpvec(ui, repo, a, b=None):
- ca = scmutil.revsingle(repo, a)
- cb = scmutil.revsingle(repo, b)
- pa = pvec.ctxpvec(ca)
- pb = pvec.ctxpvec(cb)
- if pa == pb:
- rel = "="
- elif pa > pb:
- rel = ">"
- elif pa < pb:
- rel = "<"
- elif pa | pb:
- rel = "|"
- ui.write(_("a: %s\n") % pa)
- ui.write(_("b: %s\n") % pb)
- ui.write(_("depth(a): %d depth(b): %d\n") % (pa._depth, pb._depth))
- ui.write(_("delta: %d hdist: %d distance: %d relation: %s\n") %
- (abs(pa._depth - pb._depth), pvec._hamming(pa._vec, pb._vec),
- pa.distance(pb), rel))
-
@command('debugrebuildstate',
[('r', 'rev', '', _('revision to rebuild to'), _('REV'))],
_('[-r REV] [REV]'))
@@ -2272,11 +1960,6 @@ def debugrevlog(ui, repo, file_ = None, **opts):
elif delta != nullrev:
numother += 1
- # Adjust size min value for empty cases
- for size in (datasize, fullsize, deltasize):
- if size[0] is None:
- size[0] = 0
-
numdeltas = numrevs - numfull
numoprev = numprev - nump1prev - nump2prev
totalrawsize = datasize[2]
@@ -2284,8 +1967,7 @@ def debugrevlog(ui, repo, file_ = None, **opts):
fulltotal = fullsize[2]
fullsize[2] /= numfull
deltatotal = deltasize[2]
- if numrevs - numfull > 0:
- deltasize[2] /= numrevs - numfull
+ deltasize[2] /= numrevs - numfull
totalsize = fulltotal + deltatotal
avgchainlen = sum(chainlengths) / numrevs
compratio = totalrawsize / totalsize
@@ -2337,31 +2019,23 @@ def debugrevlog(ui, repo, file_ = None, **opts):
fmt2 = pcfmtstr(numdeltas, 4)
ui.write('deltas against prev : ' + fmt % pcfmt(numprev, numdeltas))
if numprev > 0:
- ui.write(' where prev = p1 : ' + fmt2 % pcfmt(nump1prev,
- numprev))
- ui.write(' where prev = p2 : ' + fmt2 % pcfmt(nump2prev,
- numprev))
- ui.write(' other : ' + fmt2 % pcfmt(numoprev,
- numprev))
+ ui.write(' where prev = p1 : ' + fmt2 % pcfmt(nump1prev, numprev))
+ ui.write(' where prev = p2 : ' + fmt2 % pcfmt(nump2prev, numprev))
+ ui.write(' other : ' + fmt2 % pcfmt(numoprev, numprev))
if gdelta:
ui.write('deltas against p1 : ' + fmt % pcfmt(nump1, numdeltas))
ui.write('deltas against p2 : ' + fmt % pcfmt(nump2, numdeltas))
- ui.write('deltas against other : ' + fmt % pcfmt(numother,
- numdeltas))
+ ui.write('deltas against other : ' + fmt % pcfmt(numother, numdeltas))
@command('debugrevspec', [], ('REVSPEC'))
def debugrevspec(ui, repo, expr):
- """parse and apply a revision specification
-
- Use --verbose to print the parsed tree before and after aliases
- expansion.
- """
+ '''parse and apply a revision specification'''
if ui.verbose:
tree = revset.parse(expr)[0]
- ui.note(revset.prettyformat(tree), "\n")
+ ui.note(tree, "\n")
newtree = revset.findaliases(ui, tree)
if newtree != tree:
- ui.note(revset.prettyformat(newtree), "\n")
+ ui.note(newtree, "\n")
func = revset.match(ui, expr)
for c in func(repo, range(len(repo))):
ui.write("%s\n" % c)
@@ -2381,7 +2055,7 @@ def debugsetparents(ui, repo, rev1, rev2=None):
wlock = repo.wlock()
try:
- repo.setparents(r1, r2)
+ repo.dirstate.setparents(r1, r2)
finally:
wlock.release()
@@ -2412,7 +2086,7 @@ def debugstate(ui, repo, nodates=None, datesort=None):
if ent[1] & 020000:
mode = 'lnk'
else:
- mode = '%3o' % (ent[1] & 0777 & ~util.umask)
+ mode = '%3o' % (ent[1] & 0777)
ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_))
for f in repo.dirstate.copies():
ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f))
@@ -2435,14 +2109,11 @@ def debugwalk(ui, repo, *pats, **opts):
items = list(repo.walk(m))
if not items:
return
- f = lambda fn: fn
- if ui.configbool('ui', 'slash') and os.sep != '/':
- f = lambda fn: util.normpath(fn)
fmt = 'f %%-%ds %%-%ds %%s' % (
max([len(abs) for abs in items]),
max([len(m.rel(abs)) for abs in items]))
for abs in items:
- line = fmt % (abs, f(m.rel(abs)), m.exact(abs) and 'exact' or '')
+ line = fmt % (abs, m.rel(abs), m.exact(abs) and 'exact' or '')
ui.write("%s\n" % line.rstrip())
@command('debugwireargs',
@@ -2499,32 +2170,6 @@ def diff(ui, repo, *pats, **opts):
Use the -g/--git option to generate diffs in the git extended diff
format. For more information, read :hg:`help diffs`.
- .. container:: verbose
-
- Examples:
-
- - compare a file in the current working directory to its parent::
-
- hg diff foo.c
-
- - compare two historical versions of a directory, with rename info::
-
- hg diff --git -r 1.0:1.2 lib/
-
- - get change stats relative to the last change on some date::
-
- hg diff --stat -r "date('may 2')"
-
- - diff all newly-added files that contain a keyword::
-
- hg diff "set:added() and grep(GNU)"
-
- - compare a revision and its parents::
-
- hg diff -c 9353 # compare against first parent
- hg diff -r 9353^:9353 # same using revset syntax
- hg diff -r 9353^2:9353 # compare against the second parent
-
Returns 0 on success.
"""
@@ -2556,7 +2201,7 @@ def diff(ui, repo, *pats, **opts):
('', 'switch-parent', None, _('diff against the second parent')),
('r', 'rev', [], _('revisions to export'), _('REV')),
] + diffopts,
- _('[OPTION]... [-o OUTFILESPEC] [-r] REV...'))
+ _('[OPTION]... [-o OUTFILESPEC] REV...'))
def export(ui, repo, *changesets, **opts):
"""dump the header and diffs for one or more changesets
@@ -2580,7 +2225,6 @@ def export(ui, repo, *changesets, **opts):
:``%R``: changeset revision number
:``%b``: basename of the exporting repository
:``%h``: short-form changeset hash (12 hexadecimal digits)
- :``%m``: first line of the commit message (only alphanumeric characters)
:``%n``: zero-padded sequence number, starting at 1
:``%r``: zero-padded changeset revision number
@@ -2594,31 +2238,12 @@ def export(ui, repo, *changesets, **opts):
With the --switch-parent option, the diff will be against the
second parent. It can be useful to review a merge.
- .. container:: verbose
-
- Examples:
-
- - use export and import to transplant a bugfix to the current
- branch::
-
- hg export -r 9353 | hg import -
-
- - export all the changesets between two revisions to a file with
- rename information::
-
- hg export --git -r 123:150 > changes.txt
-
- - split outgoing changes into a series of patches with
- descriptive names::
-
- hg export -r "outgoing()" -o "%n-%m.patch"
-
Returns 0 on success.
"""
changesets += tuple(opts.get('rev', []))
- revs = scmutil.revrange(repo, changesets)
- if not revs:
+ if not changesets:
raise util.Abort(_("export requires at least one changeset"))
+ revs = scmutil.revrange(repo, changesets)
if len(revs) > 1:
ui.note(_('exporting patches:\n'))
else:
@@ -2640,18 +2265,6 @@ def forget(ui, repo, *pats, **opts):
To undo a forget before the next commit, see :hg:`add`.
- .. container:: verbose
-
- Examples:
-
- - forget newly-added binary files::
-
- hg forget "set:added() and binary()"
-
- - forget files that would be excluded by .hgignore::
-
- hg forget "set:hgignore()"
-
Returns 0 on success.
"""
@@ -2659,206 +2272,23 @@ def forget(ui, repo, *pats, **opts):
raise util.Abort(_('no files specified'))
m = scmutil.match(repo[None], pats, opts)
- rejected = cmdutil.forget(ui, repo, m, prefix="", explicitonly=False)[0]
- return rejected and 1 or 0
-
-@command(
- 'graft',
- [('r', 'rev', [], _('revisions to graft'), _('REV')),
- ('c', 'continue', False, _('resume interrupted graft')),
- ('e', 'edit', False, _('invoke editor on commit messages')),
- ('', 'log', None, _('append graft info to log message')),
- ('D', 'currentdate', False,
- _('record the current date as commit date')),
- ('U', 'currentuser', False,
- _('record the current user as committer'), _('DATE'))]
- + commitopts2 + mergetoolopts + dryrunopts,
- _('[OPTION]... [-r] REV...'))
-def graft(ui, repo, *revs, **opts):
- '''copy changes from other branches onto the current branch
-
- This command uses Mercurial's merge logic to copy individual
- changes from other branches without merging branches in the
- history graph. This is sometimes known as 'backporting' or
- 'cherry-picking'. By default, graft will copy user, date, and
- description from the source changesets.
-
- Changesets that are ancestors of the current revision, that have
- already been grafted, or that are merges will be skipped.
-
- If --log is specified, log messages will have a comment appended
- of the form::
-
- (grafted from CHANGESETHASH)
-
- If a graft merge results in conflicts, the graft process is
- interrupted so that the current merge can be manually resolved.
- Once all conflicts are addressed, the graft process can be
- continued with the -c/--continue option.
-
- .. note::
- The -c/--continue option does not reapply earlier options.
-
- .. container:: verbose
-
- Examples:
-
- - copy a single change to the stable branch and edit its description::
-
- hg update stable
- hg graft --edit 9393
-
- - graft a range of changesets with one exception, updating dates::
-
- hg graft -D "2085::2093 and not 2091"
-
- - continue a graft after resolving conflicts::
-
- hg graft -c
-
- - show the source of a grafted changeset::
-
- hg log --debug -r tip
-
- Returns 0 on successful completion.
- '''
-
- revs = list(revs)
- revs.extend(opts['rev'])
-
- if not opts.get('user') and opts.get('currentuser'):
- opts['user'] = ui.username()
- if not opts.get('date') and opts.get('currentdate'):
- opts['date'] = "%d %d" % util.makedate()
-
- editor = None
- if opts.get('edit'):
- editor = cmdutil.commitforceeditor
-
- cont = False
- if opts['continue']:
- cont = True
- if revs:
- raise util.Abort(_("can't specify --continue and revisions"))
- # read in unfinished revisions
- try:
- nodes = repo.opener.read('graftstate').splitlines()
- revs = [repo[node].rev() for node in nodes]
- except IOError, inst:
- if inst.errno != errno.ENOENT:
- raise
- raise util.Abort(_("no graft state found, can't continue"))
- else:
- cmdutil.bailifchanged(repo)
- if not revs:
- raise util.Abort(_('no revisions specified'))
- revs = scmutil.revrange(repo, revs)
-
- # check for merges
- for rev in repo.revs('%ld and merge()', revs):
- ui.warn(_('skipping ungraftable merge revision %s\n') % rev)
- revs.remove(rev)
- if not revs:
- return -1
-
- # check for ancestors of dest branch
- for rev in repo.revs('::. and %ld', revs):
- ui.warn(_('skipping ancestor revision %s\n') % rev)
- revs.remove(rev)
- if not revs:
- return -1
-
- # analyze revs for earlier grafts
- ids = {}
- for ctx in repo.set("%ld", revs):
- ids[ctx.hex()] = ctx.rev()
- n = ctx.extra().get('source')
- if n:
- ids[n] = ctx.rev()
-
- # check ancestors for earlier grafts
- ui.debug('scanning for duplicate grafts\n')
- for ctx in repo.set("::. - ::%ld", revs):
- n = ctx.extra().get('source')
- if n in ids:
- r = repo[n].rev()
- if r in revs:
- ui.warn(_('skipping already grafted revision %s\n') % r)
- revs.remove(r)
- elif ids[n] in revs:
- ui.warn(_('skipping already grafted revision %s '
- '(same origin %d)\n') % (ids[n], r))
- revs.remove(ids[n])
- elif ctx.hex() in ids:
- r = ids[ctx.hex()]
- ui.warn(_('skipping already grafted revision %s '
- '(was grafted from %d)\n') % (r, ctx.rev()))
- revs.remove(r)
- if not revs:
- return -1
-
- wlock = repo.wlock()
- try:
- for pos, ctx in enumerate(repo.set("%ld", revs)):
- current = repo['.']
-
- ui.status(_('grafting revision %s\n') % ctx.rev())
- if opts.get('dry_run'):
- continue
+ s = repo.status(match=m, clean=True)
+ forget = sorted(s[0] + s[1] + s[3] + s[6])
+ errs = 0
- # we don't merge the first commit when continuing
- if not cont:
- # perform the graft merge with p1(rev) as 'ancestor'
- try:
- # ui.forcemerge is an internal variable, do not document
- repo.ui.setconfig('ui', 'forcemerge', opts.get('tool', ''))
- stats = mergemod.update(repo, ctx.node(), True, True, False,
- ctx.p1().node())
- finally:
- repo.ui.setconfig('ui', 'forcemerge', '')
- # report any conflicts
- if stats and stats[3] > 0:
- # write out state for --continue
- nodelines = [repo[rev].hex() + "\n" for rev in revs[pos:]]
- repo.opener.write('graftstate', ''.join(nodelines))
- raise util.Abort(
- _("unresolved conflicts, can't continue"),
- hint=_('use hg resolve and hg graft --continue'))
- else:
- cont = False
-
- # drop the second merge parent
- repo.setparents(current.node(), nullid)
- repo.dirstate.write()
- # fix up dirstate for copies and renames
- cmdutil.duplicatecopies(repo, ctx.rev(), ctx.p1().rev())
-
- # commit
- source = ctx.extra().get('source')
- if not source:
- source = ctx.hex()
- extra = {'source': source}
- user = ctx.user()
- if opts.get('user'):
- user = opts['user']
- date = ctx.date()
- if opts.get('date'):
- date = opts['date']
- message = ctx.description()
- if opts.get('log'):
- message += '\n(grafted from %s)' % ctx.hex()
- node = repo.commit(text=message, user=user,
- date=date, extra=extra, editor=editor)
- if node is None:
- ui.status(_('graft for revision %s is empty\n') % ctx.rev())
- finally:
- wlock.release()
+ for f in m.files():
+ if f not in repo.dirstate and not os.path.isdir(m.rel(f)):
+ if os.path.exists(m.rel(f)):
+ ui.warn(_('not removing %s: file is already untracked\n')
+ % m.rel(f))
+ errs = 1
- # remove state when we complete successfully
- if not opts.get('dry_run') and os.path.exists(repo.join('graftstate')):
- util.unlinkpath(repo.join('graftstate'))
+ for f in forget:
+ if ui.verbose or not m.exact(f):
+ ui.status(_('removing %s\n') % m.rel(f))
- return 0
+ repo[None].forget(forget)
+ return errs
@command('grep',
[('0', 'print0', None, _('end fields with NUL')),
@@ -2895,7 +2325,7 @@ def grep(ui, repo, pattern, *pats, **opts):
Returns 0 if a match is found, 1 otherwise.
"""
- reflags = re.M
+ reflags = 0
if opts.get('ignore_case'):
reflags |= re.I
try:
@@ -2919,7 +2349,7 @@ def grep(ui, repo, pattern, *pats, **opts):
mstart, mend = match.span()
linenum += body.count('\n', begin, mstart) + 1
lstart = body.rfind('\n', begin, mstart) + 1 or begin
- begin = body.find('\n', mend) + 1 or len(body) + 1
+ begin = body.find('\n', mend) + 1 or len(body)
lend = begin - 1
yield linenum, mstart - lstart, mend - lstart, body[lstart:lend]
@@ -3076,7 +2506,7 @@ def grep(ui, repo, pattern, *pats, **opts):
('a', 'active', False, _('show active branchheads only (DEPRECATED)')),
('c', 'closed', False, _('show normal and closed branch heads')),
] + templateopts,
- _('[-ct] [-r STARTREV] [REV]...'))
+ _('[-ac] [-r STARTREV] [REV]...'))
def heads(ui, repo, *branchrevs, **opts):
"""show current repository heads or show branch heads
@@ -3130,7 +2560,7 @@ def heads(ui, repo, *branchrevs, **opts):
headless = ', '.join(b for b in branches - haveheads)
msg = _('no open branch heads found on branches %s')
if opts.get('rev'):
- msg += _(' (started at %s)') % opts['rev']
+ msg += _(' (started at %s)' % opts['rev'])
ui.warn((msg + '\n') % headless)
if not heads:
@@ -3144,11 +2574,9 @@ def heads(ui, repo, *branchrevs, **opts):
@command('help',
[('e', 'extension', None, _('show only help for extensions')),
- ('c', 'command', None, _('show only help for commands')),
- ('k', 'keyword', '', _('show topics matching keyword')),
- ],
+ ('c', 'command', None, _('show only help for commands'))],
_('[-ec] [TOPIC]'))
-def help_(ui, name=None, unknowncmd=False, full=True, **opts):
+def help_(ui, name=None, with_version=False, unknowncmd=False, full=True, **opts):
"""show help for a given topic or a help overview
With no arguments, print a list of commands with short help messages.
@@ -3158,10 +2586,33 @@ def help_(ui, name=None, unknowncmd=False, full=True, **opts):
Returns 0 if successful.
"""
-
+ option_lists = []
textwidth = min(ui.termwidth(), 80) - 2
+ def addglobalopts(aliases):
+ if ui.verbose:
+ option_lists.append((_("global options:"), globalopts))
+ if name == 'shortlist':
+ option_lists.append((_('use "hg help" for the full list '
+ 'of commands'), ()))
+ else:
+ if name == 'shortlist':
+ msg = _('use "hg help" for the full list of commands '
+ 'or "hg -v" for details')
+ elif name and not full:
+ msg = _('use "hg help %s" to show the full help text' % name)
+ elif aliases:
+ msg = _('use "hg -v help%s" to show builtin aliases and '
+ 'global options') % (name and " " + name or "")
+ else:
+ msg = _('use "hg -v help %s" to show global options') % name
+ option_lists.append((msg, ()))
+
def helpcmd(name):
+ if with_version:
+ version_(ui)
+ ui.write('\n')
+
try:
aliases, entry = cmdutil.findcmd(name, table, strict=unknowncmd)
except error.AmbiguousCommand, inst:
@@ -3169,47 +2620,51 @@ def help_(ui, name=None, unknowncmd=False, full=True, **opts):
# except block, nor can be used inside a lambda. python issue4617
prefix = inst.args[0]
select = lambda c: c.lstrip('^').startswith(prefix)
- rst = helplist(select)
- return rst
-
- rst = []
+ helplist(_('list of commands:\n\n'), select)
+ return
# check if it's an invalid alias and display its error if it is
if getattr(entry[0], 'badalias', False):
if not unknowncmd:
- ui.pushbuffer()
entry[0](ui)
- rst.append(ui.popbuffer())
- return rst
+ return
# synopsis
if len(entry) > 2:
if entry[2].startswith('hg'):
- rst.append("%s\n" % entry[2])
+ ui.write("%s\n" % entry[2])
else:
- rst.append('hg %s %s\n' % (aliases[0], entry[2]))
+ ui.write('hg %s %s\n' % (aliases[0], entry[2]))
else:
- rst.append('hg %s\n' % aliases[0])
+ ui.write('hg %s\n' % aliases[0])
+
# aliases
if full and not ui.quiet and len(aliases) > 1:
- rst.append(_("\naliases: %s\n") % ', '.join(aliases[1:]))
- rst.append('\n')
+ ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:]))
# description
doc = gettext(entry[0].__doc__)
if not doc:
doc = _("(no help text available)")
- if util.safehasattr(entry[0], 'definition'): # aliased command
+ if hasattr(entry[0], 'definition'): # aliased command
if entry[0].definition.startswith('!'): # shell alias
doc = _('shell alias for::\n\n %s') % entry[0].definition[1:]
else:
doc = _('alias for: hg %s\n\n%s') % (entry[0].definition, doc)
- doc = doc.splitlines(True)
if ui.quiet or not full:
- rst.append(doc[0])
- else:
- rst.extend(doc)
- rst.append('\n')
+ doc = doc.splitlines()[0]
+ keep = ui.verbose and ['verbose'] or []
+ formatted, pruned = minirst.format(doc, textwidth, keep=keep)
+ ui.write("\n%s\n" % formatted)
+ if pruned:
+ ui.write(_('\nuse "hg -v help %s" to show verbose help\n') % name)
+
+ if not ui.quiet:
+ # options
+ if entry[1]:
+ option_lists.append((_("options:\n"), entry[1]))
+
+ addglobalopts(False)
# check if this command shadows a non-trivial (multi-line)
# extension help text
@@ -3219,36 +2674,11 @@ def help_(ui, name=None, unknowncmd=False, full=True, **opts):
if '\n' in doc.strip():
msg = _('use "hg help -e %s" to show help for '
'the %s extension') % (name, name)
- rst.append('\n%s\n' % msg)
+ ui.write('\n%s\n' % msg)
except KeyError:
pass
- # options
- if not ui.quiet and entry[1]:
- rst.append('\n%s\n\n' % _("options:"))
- rst.append(help.optrst(entry[1], ui.verbose))
-
- if ui.verbose:
- rst.append('\n%s\n\n' % _("global options:"))
- rst.append(help.optrst(globalopts, ui.verbose))
-
- if not ui.verbose:
- if not full:
- rst.append(_('\nuse "hg help %s" to show the full help text\n')
- % name)
- elif not ui.quiet:
- rst.append(_('\nuse "hg -v help %s" to show more info\n')
- % name)
- return rst
-
-
- def helplist(select=None):
- # list of commands
- if name == "shortlist":
- header = _('basic commands:\n\n')
- else:
- header = _('list of commands:\n\n')
-
+ def helplist(header, select=None):
h = {}
cmds = {}
for c, e in table.iteritems():
@@ -3272,60 +2702,24 @@ def help_(ui, name=None, unknowncmd=False, full=True, **opts):
h[f] = doc.splitlines()[0].rstrip()
cmds[f] = c.lstrip("^")
- rst = []
if not h:
- if not ui.quiet:
- rst.append(_('no commands defined\n'))
- return rst
+ ui.status(_('no commands defined\n'))
+ return
- if not ui.quiet:
- rst.append(header)
+ ui.status(header)
fns = sorted(h)
+ m = max(map(len, fns))
for f in fns:
if ui.verbose:
commands = cmds[f].replace("|",", ")
- rst.append(" :%s: %s\n" % (commands, h[f]))
+ ui.write(" %s:\n %s\n"%(commands, h[f]))
else:
- rst.append(' :%s: %s\n' % (f, h[f]))
-
- if not name:
- exts = help.listexts(_('enabled extensions:'), extensions.enabled())
- if exts:
- rst.append('\n')
- rst.extend(exts)
-
- rst.append(_("\nadditional help topics:\n\n"))
- topics = []
- for names, header, doc in help.helptable:
- topics.append((names[0], header))
- for t, desc in topics:
- rst.append(" :%s: %s\n" % (t, desc))
-
- optlist = []
- if not ui.quiet:
- if ui.verbose:
- optlist.append((_("global options:"), globalopts))
- if name == 'shortlist':
- optlist.append((_('use "hg help" for the full list '
- 'of commands'), ()))
- else:
- if name == 'shortlist':
- msg = _('use "hg help" for the full list of commands '
- 'or "hg -v" for details')
- elif name and not full:
- msg = _('use "hg help %s" to show the full help '
- 'text') % name
- else:
- msg = _('use "hg -v help%s" to show builtin aliases and '
- 'global options') % (name and " " + name or "")
- optlist.append((msg, ()))
+ ui.write('%s\n' % (util.wrap(h[f], textwidth,
+ initindent=' %-*s ' % (m, f),
+ hangindent=' ' * (m + 4))))
- if optlist:
- for title, options in optlist:
- rst.append('\n%s\n' % title)
- if options:
- rst.append('\n%s\n' % help.optrst(options, ui.verbose))
- return rst
+ if not ui.quiet:
+ addglobalopts(True)
def helptopic(name):
for names, header, doc in help.helptable:
@@ -3334,20 +2728,20 @@ def help_(ui, name=None, unknowncmd=False, full=True, **opts):
else:
raise error.UnknownCommand(name)
- rst = ["%s\n\n" % header]
# description
if not doc:
- rst.append(" %s\n" % _("(no help text available)"))
- if util.safehasattr(doc, '__call__'):
- rst += [" %s\n" % l for l in doc().splitlines()]
+ doc = _("(no help text available)")
+ if hasattr(doc, '__call__'):
+ doc = doc()
+ ui.write("%s\n\n" % header)
+ ui.write("%s\n" % minirst.format(doc, textwidth, indent=4))
try:
cmdutil.findcmd(name, table)
- rst.append(_('\nuse "hg help -c %s" to see help for '
+ ui.write(_('\nuse "hg help -c %s" to see help for '
'the %s command\n') % (name, name))
except error.UnknownCommand:
pass
- return rst
def helpext(name):
try:
@@ -3363,10 +2757,10 @@ def help_(ui, name=None, unknowncmd=False, full=True, **opts):
head, tail = doc, ""
else:
head, tail = doc.split('\n', 1)
- rst = [_('%s extension - %s\n\n') % (name.split('.')[-1], head)]
+ ui.write(_('%s extension - %s\n\n') % (name.split('.')[-1], head))
if tail:
- rst.extend(tail.splitlines(True))
- rst.append('\n')
+ ui.write(minirst.format(tail, textwidth))
+ ui.status('\n\n')
if mod:
try:
@@ -3374,38 +2768,23 @@ def help_(ui, name=None, unknowncmd=False, full=True, **opts):
except AttributeError:
ct = {}
modcmds = set([c.split('|', 1)[0] for c in ct])
- rst.extend(helplist(modcmds.__contains__))
+ helplist(_('list of commands:\n\n'), modcmds.__contains__)
else:
- rst.append(_('use "hg help extensions" for information on enabling '
+ ui.write(_('use "hg help extensions" for information on enabling '
'extensions\n'))
- return rst
def helpextcmd(name):
- cmd, ext, mod = extensions.disabledcmd(ui, name,
- ui.configbool('ui', 'strict'))
+ cmd, ext, mod = extensions.disabledcmd(ui, name, ui.config('ui', 'strict'))
doc = gettext(mod.__doc__).splitlines()[0]
- rst = help.listexts(_("'%s' is provided by the following "
+ msg = help.listexts(_("'%s' is provided by the following "
"extension:") % cmd, {ext: doc}, indent=4)
- rst.append('\n')
- rst.append(_('use "hg help extensions" for information on enabling '
+ ui.write(minirst.format(msg, textwidth))
+ ui.write('\n\n')
+ ui.write(_('use "hg help extensions" for information on enabling '
'extensions\n'))
- return rst
-
-
- rst = []
- kw = opts.get('keyword')
- if kw:
- matches = help.topicmatch(kw)
- for t, title in (('topics', _('Topics')),
- ('commands', _('Commands')),
- ('extensions', _('Extensions')),
- ('extensioncommands', _('Extension Commands'))):
- if matches[t]:
- rst.append('%s:\n\n' % title)
- rst.extend(minirst.maketable(sorted(matches[t]), 1))
- rst.append('\n')
- elif name and name != 'shortlist':
+
+ if name and name != 'shortlist':
i = None
if unknowncmd:
queries = (helpextcmd,)
@@ -3417,23 +2796,94 @@ def help_(ui, name=None, unknowncmd=False, full=True, **opts):
queries = (helptopic, helpcmd, helpext, helpextcmd)
for f in queries:
try:
- rst = f(name)
+ f(name)
i = None
break
except error.UnknownCommand, inst:
i = inst
if i:
raise i
+
else:
# program name
- if not ui.quiet:
- rst = [_("Mercurial Distributed SCM\n"), '\n']
- rst.extend(helplist())
+ if ui.verbose or with_version:
+ version_(ui)
+ else:
+ ui.status(_("Mercurial Distributed SCM\n"))
+ ui.status('\n')
+
+ # list of commands
+ if name == "shortlist":
+ header = _('basic commands:\n\n')
+ else:
+ header = _('list of commands:\n\n')
- keep = ui.verbose and ['verbose'] or []
- formatted, pruned = minirst.format(''.join(rst), textwidth, keep=keep)
- ui.write(formatted)
+ helplist(header)
+ if name != 'shortlist':
+ text = help.listexts(_('enabled extensions:'), extensions.enabled())
+ if text:
+ ui.write("\n%s\n" % minirst.format(text, textwidth))
+
+ # list all option lists
+ opt_output = []
+ multioccur = False
+ for title, options in option_lists:
+ opt_output.append(("\n%s" % title, None))
+ for option in options:
+ if len(option) == 5:
+ shortopt, longopt, default, desc, optlabel = option
+ else:
+ shortopt, longopt, default, desc = option
+ optlabel = _("VALUE") # default label
+
+ if _("DEPRECATED") in desc and not ui.verbose:
+ continue
+ if isinstance(default, list):
+ numqualifier = " %s [+]" % optlabel
+ multioccur = True
+ elif (default is not None) and not isinstance(default, bool):
+ numqualifier = " %s" % optlabel
+ else:
+ numqualifier = ""
+ opt_output.append(("%2s%s" %
+ (shortopt and "-%s" % shortopt,
+ longopt and " --%s%s" %
+ (longopt, numqualifier)),
+ "%s%s" % (desc,
+ default
+ and _(" (default: %s)") % default
+ or "")))
+ if multioccur:
+ msg = _("\n[+] marked option can be specified multiple times")
+ if ui.verbose and name != 'shortlist':
+ opt_output.append((msg, None))
+ else:
+ opt_output.insert(-1, (msg, None))
+ if not name:
+ ui.write(_("\nadditional help topics:\n\n"))
+ topics = []
+ for names, header, doc in help.helptable:
+ topics.append((sorted(names, key=len, reverse=True)[0], header))
+ topics_len = max([len(s[0]) for s in topics])
+ for t, desc in topics:
+ ui.write(" %-*s %s\n" % (topics_len, t, desc))
+
+ if opt_output:
+ colwidth = encoding.colwidth
+ # normalize: (opt or message, desc or None, width of opt)
+ entries = [desc and (opt, desc, colwidth(opt)) or (opt, None, 0)
+ for opt, desc in opt_output]
+ hanging = max([e[2] for e in entries])
+ for opt, desc, width in entries:
+ if desc:
+ initindent = ' %s%s ' % (opt, ' ' * (hanging - width))
+ hangindent = ' ' * (hanging + 3)
+ ui.write('%s\n' % (util.wrap(desc, textwidth,
+ initindent=initindent,
+ hangindent=hangindent)))
+ else:
+ ui.write("%s\n" % opt)
@command('identify|id',
[('r', 'rev', '',
@@ -3442,11 +2892,10 @@ def help_(ui, name=None, unknowncmd=False, full=True, **opts):
('i', 'id', None, _('show global revision id')),
('b', 'branch', None, _('show branch')),
('t', 'tags', None, _('show tags')),
- ('B', 'bookmarks', None, _('show bookmarks')),
- ] + remoteopts,
+ ('B', 'bookmarks', None, _('show bookmarks'))],
_('[-nibtB] [-r REV] [SOURCE]'))
def identify(ui, repo, source=None, rev=None,
- num=None, id=None, branch=None, tags=None, bookmarks=None, **opts):
+ num=None, id=None, branch=None, tags=None, bookmarks=None):
"""identify the working copy or specified revision
Print a summary identifying the repository state at REV using one or
@@ -3460,22 +2909,6 @@ def identify(ui, repo, source=None, rev=None,
Specifying a path to a repository root or Mercurial bundle will
cause lookup to operate on that repository/bundle.
- .. container:: verbose
-
- Examples:
-
- - generate a build identifier for the working directory::
-
- hg id --id > build-id.dat
-
- - find the revision corresponding to a tag::
-
- hg id -n -r 1.3
-
- - check the most recent revision of a remote repository::
-
- hg id -r tip http://selenic.com/hg/
-
Returns 0 if successful.
"""
@@ -3490,11 +2923,10 @@ def identify(ui, repo, source=None, rev=None,
if source:
source, branches = hg.parseurl(ui.expandpath(source))
- peer = hg.peer(ui, opts, source)
- repo = peer.local()
- revs, checkout = hg.addbranchrevs(repo, peer, branches, None)
+ repo = hg.peer(ui, {}, source)
+ revs, checkout = hg.addbranchrevs(repo, repo, branches, None)
- if not repo:
+ if not repo.local():
if num or branch or tags:
raise util.Abort(
_("can't query remote revision number, branch, or tags"))
@@ -3503,16 +2935,16 @@ def identify(ui, repo, source=None, rev=None,
if not rev:
rev = "tip"
- remoterev = peer.lookup(rev)
+ remoterev = repo.lookup(rev)
if default or id:
output = [hexfunc(remoterev)]
def getbms():
bms = []
- if 'bookmarks' in peer.listkeys('namespaces'):
+ if 'bookmarks' in repo.listkeys('namespaces'):
hexremoterev = hex(remoterev)
- bms = [bm for bm, bmr in peer.listkeys('bookmarks').iteritems()
+ bms = [bm for bm, bmr in repo.listkeys('bookmarks').iteritems()
if bmr == hexremoterev]
return bms
@@ -3530,9 +2962,7 @@ def identify(ui, repo, source=None, rev=None,
parents = ctx.parents()
changed = ""
if default or id or num:
- if (util.any(repo.status())
- or util.any(ctx.sub(s).dirty() for s in ctx.substate)):
- changed = '+'
+ changed = util.any(repo.status()) and "+" or ""
if default or id:
output = ["%s%s" %
('+'.join([hexfunc(p.node()) for p in parents]), changed)]
@@ -3577,7 +3007,6 @@ def identify(ui, repo, source=None, rev=None,
_('directory strip option for patch. This has the same '
'meaning as the corresponding patch option'), _('NUM')),
('b', 'base', '', _('base path (DEPRECATED)'), _('PATH')),
- ('e', 'edit', False, _('invoke editor on commit messages')),
('f', 'force', None, _('skip check for outstanding uncommitted changes')),
('', 'no-commit', None,
_("don't commit, just update the working directory")),
@@ -3589,7 +3018,7 @@ def identify(ui, repo, source=None, rev=None,
_('use any branch information in patch (implied by --exact)'))] +
commitopts + commitopts2 + similarityopts,
_('[OPTION]... PATCH...'))
-def import_(ui, repo, patch1=None, *patches, **opts):
+def import_(ui, repo, patch1, *patches, **opts):
"""import an ordered set of patches
Import a list of patches and commit them individually (unless
@@ -3622,49 +3051,20 @@ def import_(ui, repo, patch1=None, *patches, **opts):
revision.
With -s/--similarity, hg will attempt to discover renames and
- copies in the patch in the same way as :hg:`addremove`.
+ copies in the patch in the same way as 'addremove'.
To read a patch from standard input, use "-" as the patch name. If
a URL is specified, the patch will be downloaded from it.
See :hg:`help dates` for a list of formats valid for -d/--date.
- .. container:: verbose
-
- Examples:
-
- - import a traditional patch from a website and detect renames::
-
- hg import -s 80 http://example.com/bugfix.patch
-
- - import a changeset from an hgweb server::
-
- hg import http://www.selenic.com/hg/rev/5ca8c111e9aa
-
- - import all the patches in an Unix-style mbox::
-
- hg import incoming-patches.mbox
-
- - attempt to exactly restore an exported changeset (not always
- possible)::
-
- hg import --exact proposed-fix.patch
-
Returns 0 on success.
"""
-
- if not patch1:
- raise util.Abort(_('need at least one patch to import'))
-
patches = (patch1,) + patches
date = opts.get('date')
if date:
opts['date'] = util.parsedate(date)
- editor = cmdutil.commiteditor
- if opts.get('edit'):
- editor = cmdutil.commitforceeditor
-
update = not opts.get('bypass')
if not update and opts.get('no_commit'):
raise util.Abort(_('cannot use --no-commit with --bypass'))
@@ -3680,9 +3080,9 @@ def import_(ui, repo, patch1=None, *patches, **opts):
if (opts.get('exact') or not opts.get('force')) and update:
cmdutil.bailifchanged(repo)
- base = opts["base"]
+ d = opts["base"]
strip = opts["strip"]
- wlock = lock = tr = None
+ wlock = lock = None
msgs = []
def checkexact(repo, n, nodeid):
@@ -3695,8 +3095,8 @@ def import_(ui, repo, patch1=None, *patches, **opts):
patch.extract(ui, hunk)
if not tmpname:
- return (None, None)
- msg = _('applied to working directory')
+ return None
+ commitid = _('to working directory')
try:
cmdline_message = cmdutil.logmessage(ui, opts)
@@ -3722,12 +3122,6 @@ def import_(ui, repo, patch1=None, *patches, **opts):
try:
p1 = repo[p1]
p2 = repo[p2]
- # Without any options, consider p2 only if the
- # patch is being applied on top of the recorded
- # first parent.
- if p1 != parents[0]:
- p1 = parents[0]
- p2 = repo[nullid]
except error.RepoError:
p1, p2 = parents
else:
@@ -3735,10 +3129,10 @@ def import_(ui, repo, patch1=None, *patches, **opts):
n = None
if update:
- if p1 != parents[0]:
+ if opts.get('exact') and p1 != parents[0]:
hg.clean(repo, p1.node())
- if p2 != parents[1]:
- repo.setparents(p1.node(), p2.node())
+ if p1 != parents[0] and p2 != parents[1]:
+ repo.dirstate.setparents(p1.node(), p2.node())
if opts.get('exact') or opts.get('import_branch'):
repo.dirstate.setbranch(branch or 'default')
@@ -3751,17 +3145,17 @@ def import_(ui, repo, patch1=None, *patches, **opts):
if message:
msgs.append(message)
else:
- if opts.get('exact') or p2:
- # If you got here, you either use --force and know what
- # you are doing or used --exact or a merge patch while
- # being updated to its first parent.
+ if opts.get('exact'):
m = None
else:
m = scmutil.matchfiles(repo, files or [])
n = repo.commit(message, opts.get('user') or user,
opts.get('date') or date, match=m,
- editor=editor)
+ editor=cmdutil.commiteditor)
checkexact(repo, n, nodeid)
+ # Force a dirstate write so that the next transaction
+ # backups an up-to-date file.
+ repo.dirstate.write()
else:
if opts.get('exact') or opts.get('import_branch'):
branch = branch or 'default'
@@ -3787,56 +3181,45 @@ def import_(ui, repo, patch1=None, *patches, **opts):
finally:
store.close()
if n:
- # i18n: refers to a short changeset id
- msg = _('created %s') % short(n)
- return (msg, n)
+ commitid = short(n)
+ return commitid
finally:
os.unlink(tmpname)
try:
- try:
- wlock = repo.wlock()
- if not opts.get('no_commit'):
- lock = repo.lock()
- tr = repo.transaction('import')
- parents = repo.parents()
- for patchurl in patches:
- if patchurl == '-':
- ui.status(_('applying patch from stdin\n'))
- patchfile = ui.fin
- patchurl = 'stdin' # for error message
+ wlock = repo.wlock()
+ lock = repo.lock()
+ parents = repo.parents()
+ lastcommit = None
+ for p in patches:
+ pf = os.path.join(d, p)
+
+ if pf == '-':
+ ui.status(_("applying patch from stdin\n"))
+ pf = ui.fin
+ else:
+ ui.status(_("applying %s\n") % p)
+ pf = url.open(ui, pf)
+
+ haspatch = False
+ for hunk in patch.split(pf):
+ commitid = tryone(ui, hunk, parents)
+ if commitid:
+ haspatch = True
+ if lastcommit:
+ ui.status(_('applied %s\n') % lastcommit)
+ lastcommit = commitid
+ if update or opts.get('exact'):
+ parents = repo.parents()
else:
- patchurl = os.path.join(base, patchurl)
- ui.status(_('applying %s\n') % patchurl)
- patchfile = url.open(ui, patchurl)
-
- haspatch = False
- for hunk in patch.split(patchfile):
- (msg, node) = tryone(ui, hunk, parents)
- if msg:
- haspatch = True
- ui.note(msg + '\n')
- if update or opts.get('exact'):
- parents = repo.parents()
- else:
- parents = [repo[node]]
-
- if not haspatch:
- raise util.Abort(_('%s: no diffs found') % patchurl)
-
- if tr:
- tr.close()
- if msgs:
- repo.savecommitmessage('\n* * *\n'.join(msgs))
- except: # re-raises
- # wlock.release() indirectly calls dirstate.write(): since
- # we're crashing, we do not want to change the working dir
- # parent after all, so make sure it writes nothing
- repo.dirstate.invalidate()
- raise
+ parents = [repo[commitid]]
+
+ if not haspatch:
+ raise util.Abort(_('no diffs found'))
+
+ if msgs:
+ repo.savecommitmessage('\n* * *\n'.join(msgs))
finally:
- if tr:
- tr.release()
release(lock, wlock)
@command('incoming|in',
@@ -3865,17 +3248,6 @@ def incoming(ui, repo, source="default", **opts):
Returns 0 if there are incoming changes, 1 otherwise.
"""
- if opts.get('graph'):
- cmdutil.checkunsupportedgraphflags([], opts)
- def display(other, chlist, displayer):
- revdag = cmdutil.graphrevs(other, chlist, opts)
- showparents = [ctx.node() for ctx in repo[None].parents()]
- cmdutil.displaygraph(ui, revdag, displayer, showparents,
- graphmod.asciiedges)
-
- hg._incoming(display, lambda: 1, ui, repo, source, opts, buffered=True)
- return 0
-
if opts.get('bundle') and opts.get('subrepos'):
raise util.Abort(_('cannot combine --bundle and --subrepos'))
@@ -3959,14 +3331,14 @@ def locate(ui, repo, *pats, **opts):
[('f', 'follow', None,
_('follow changeset history, or file history across copies and renames')),
('', 'follow-first', None,
- _('only follow the first parent of merge changesets (DEPRECATED)')),
+ _('only follow the first parent of merge changesets')),
('d', 'date', '', _('show revisions matching date spec'), _('DATE')),
('C', 'copies', None, _('show copied files')),
('k', 'keyword', [],
_('do case-insensitive search for a given text'), _('TEXT')),
('r', 'rev', [], _('show the specified revision or range'), _('REV')),
('', 'removed', None, _('include revisions where files were removed')),
- ('m', 'only-merges', None, _('show only merges (DEPRECATED)')),
+ ('m', 'only-merges', None, _('show only merges')),
('u', 'user', [], _('revisions committed by user'), _('USER')),
('', 'only-branch', [],
_('show only changesets within the given named branch (DEPRECATED)'),
@@ -3975,7 +3347,7 @@ def locate(ui, repo, *pats, **opts):
_('show changesets within the given named branch'), _('BRANCH')),
('P', 'prune', [],
_('do not display revision or any of its ancestors'), _('REV')),
- ('', 'hidden', False, _('show hidden changesets (DEPRECATED)')),
+ ('', 'hidden', False, _('show hidden changesets')),
] + logopts + walkopts,
_('[OPTION]... [FILE]'))
def log(ui, repo, *pats, **opts):
@@ -3984,14 +3356,18 @@ def log(ui, repo, *pats, **opts):
Print the revision history of the specified files or the entire
project.
- If no revision range is specified, the default is ``tip:0`` unless
- --follow is set, in which case the working directory parent is
- used as the starting revision.
-
File history is shown without following rename or copy history of
files. Use -f/--follow with a filename to follow history across
renames and copies. --follow without a filename will only show
- ancestors or descendants of the starting revision.
+ ancestors or descendants of the starting revision. --follow-first
+ only follows the first parent of merge revisions.
+
+ If no revision range is specified, the default is ``tip:0`` unless
+ --follow is set, in which case the working directory parent is
+ used as the starting revision. You can specify a revision set for
+ log, see :hg:`help revsets` for more information.
+
+ See :hg:`help dates` for a list of formats valid for -d/--date.
By default this command prints revision number and changeset id,
tags, non-trivial parents, user, date and time, and a summary for
@@ -4004,77 +3380,19 @@ def log(ui, repo, *pats, **opts):
its first parent. Also, only files different from BOTH parents
will appear in files:.
- .. note::
- for performance reasons, log FILE may omit duplicate changes
- made on branches and will not show deletions. To see all
- changes including duplicates and deletions, use the --removed
- switch.
-
- .. container:: verbose
-
- Some examples:
-
- - changesets with full descriptions and file lists::
-
- hg log -v
-
- - changesets ancestral to the working directory::
-
- hg log -f
-
- - last 10 commits on the current branch::
-
- hg log -l 10 -b .
-
- - changesets showing all modifications of a file, including removals::
-
- hg log --removed file.c
-
- - all changesets that touch a directory, with diffs, excluding merges::
-
- hg log -Mp lib/
-
- - all revision numbers that match a keyword::
-
- hg log -k bug --template "{rev}\\n"
-
- - check if a given changeset is included is a tagged release::
-
- hg log -r "a21ccf and ancestor(1.9)"
-
- - find all changesets by some user in a date range::
-
- hg log -k alice -d "may 2008 to jul 2008"
-
- - summary of all changesets after the last tag::
-
- hg log -r "last(tagged())::" --template "{desc|firstline}\\n"
-
- See :hg:`help dates` for a list of formats valid for -d/--date.
-
- See :hg:`help revisions` and :hg:`help revsets` for more about
- specifying revisions.
-
- See :hg:`help templates` for more about pre-packaged styles and
- specifying custom templates.
-
Returns 0 on success.
"""
- if opts.get('graph'):
- return cmdutil.graphlog(ui, repo, *pats, **opts)
matchfn = scmutil.match(repo[None], pats, opts)
limit = cmdutil.loglimit(opts)
count = 0
- getrenamed, endrev = None, None
- if opts.get('copies'):
- if opts.get('rev'):
- endrev = max(scmutil.revrange(repo, opts.get('rev'))) + 1
- getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
+ endrev = None
+ if opts.get('copies') and opts.get('rev'):
+ endrev = max(scmutil.revrange(repo, opts.get('rev'))) + 1
df = False
- if opts.get("date"):
+ if opts["date"]:
df = util.matchdate(opts["date"])
branches = opts.get('branch', []) + opts.get('only_branch', [])
@@ -4095,28 +3413,22 @@ def log(ui, repo, *pats, **opts):
return
if df and not df(ctx.date()[0]):
return
-
- lower = encoding.lower
- if opts.get('user'):
- luser = lower(ctx.user())
- for k in [lower(x) for x in opts['user']]:
- if (k in luser):
- break
- else:
- return
+ if opts['user'] and not [k for k in opts['user']
+ if k.lower() in ctx.user().lower()]:
+ return
if opts.get('keyword'):
- luser = lower(ctx.user())
- ldesc = lower(ctx.description())
- lfiles = lower(" ".join(ctx.files()))
- for k in [lower(x) for x in opts['keyword']]:
- if (k in luser or k in ldesc or k in lfiles):
+ for k in [kw.lower() for kw in opts['keyword']]:
+ if (k in ctx.user().lower() or
+ k in ctx.description().lower() or
+ k in " ".join(ctx.files()).lower()):
break
else:
return
copies = None
- if getrenamed is not None and rev:
+ if opts.get('copies') and rev:
copies = []
+ getrenamed = templatekw.getrenamedfn(repo, endrev=endrev)
for fn in ctx.files():
rename = getrenamed(fn, rev)
if rename:
@@ -4195,10 +3507,10 @@ def manifest(ui, repo, node=None, rev=None, **opts):
@command('^merge',
[('f', 'force', None, _('force a merge with outstanding changes')),
+ ('t', 'tool', '', _('specify merge tool')),
('r', 'rev', '', _('revision to merge'), _('REV')),
('P', 'preview', None,
- _('review revisions to merge (no merge is performed)'))
- ] + mergetoolopts,
+ _('review revisions to merge (no merge is performed)'))],
_('[-P] [-f] [[-r] REV]'))
def merge(ui, repo, node=None, **opts):
"""merge working directory with another revision
@@ -4234,61 +3546,34 @@ def merge(ui, repo, node=None, **opts):
if not node:
node = opts.get('rev')
- if node:
- node = scmutil.revsingle(repo, node).node()
-
- if not node and repo._bookmarkcurrent:
- bmheads = repo.bookmarkheads(repo._bookmarkcurrent)
- curhead = repo[repo._bookmarkcurrent]
- if len(bmheads) == 2:
- if curhead == bmheads[0]:
- node = bmheads[1]
- else:
- node = bmheads[0]
- elif len(bmheads) > 2:
- raise util.Abort(_("multiple matching bookmarks to merge - "
- "please merge with an explicit rev or bookmark"),
- hint=_("run 'hg heads' to see all heads"))
- elif len(bmheads) <= 1:
- raise util.Abort(_("no matching bookmark to merge - "
- "please merge with an explicit rev or bookmark"),
- hint=_("run 'hg heads' to see all heads"))
-
- if not node and not repo._bookmarkcurrent:
+ if not node:
branch = repo[None].branch()
bheads = repo.branchheads(branch)
- nbhs = [bh for bh in bheads if not repo[bh].bookmarks()]
-
- if len(nbhs) > 2:
+ if len(bheads) > 2:
raise util.Abort(_("branch '%s' has %d heads - "
"please merge with an explicit rev")
% (branch, len(bheads)),
hint=_("run 'hg heads .' to see heads"))
parent = repo.dirstate.p1()
- if len(nbhs) == 1:
- if len(bheads) > 1:
- raise util.Abort(_("heads are bookmarked - "
- "please merge with an explicit rev"),
- hint=_("run 'hg heads' to see all heads"))
+ if len(bheads) == 1:
if len(repo.heads()) > 1:
raise util.Abort(_("branch '%s' has one head - "
"please merge with an explicit rev")
% branch,
hint=_("run 'hg heads' to see all heads"))
- msg, hint = _('nothing to merge'), None
- if parent != repo.lookup(branch):
- hint = _("use 'hg update' instead")
- raise util.Abort(msg, hint=hint)
+ msg = _('there is nothing to merge')
+ if parent != repo.lookup(repo[None].branch()):
+ msg = _('%s - use "hg update" instead') % msg
+ raise util.Abort(msg)
if parent not in bheads:
raise util.Abort(_('working directory not at a head revision'),
hint=_("use 'hg update' or merge with an "
"explicit revision"))
- if parent == nbhs[0]:
- node = nbhs[-1]
- else:
- node = nbhs[0]
+ node = parent == bheads[0] and bheads[-1] or bheads[0]
+ else:
+ node = scmutil.revsingle(repo, node).node()
if opts.get('preview'):
# find nodes that are ancestors of p2 but not of p1
@@ -4304,7 +3589,7 @@ def merge(ui, repo, node=None, **opts):
try:
# ui.forcemerge is an internal variable, do not document
- repo.ui.setconfig('ui', 'forcemerge', opts.get('tool', ''))
+ ui.setconfig('ui', 'forcemerge', opts.get('tool', ''))
return hg.merge(repo, node, force=opts.get('force'))
finally:
ui.setconfig('ui', 'forcemerge', '')
@@ -4330,18 +3615,6 @@ def outgoing(ui, repo, dest=None, **opts):
Returns 0 if there are outgoing changes, 1 otherwise.
"""
- if opts.get('graph'):
- cmdutil.checkunsupportedgraphflags([], opts)
- o = hg._outgoing(ui, repo, dest, opts)
- if o is None:
- return
-
- revdag = cmdutil.graphrevs(repo, o, opts)
- displayer = cmdutil.show_changeset(ui, repo, opts, buffered=True)
- showparents = [ctx.node() for ctx in repo[None].parents()]
- cmdutil.displaygraph(ui, revdag, displayer, showparents,
- graphmod.asciiedges)
- return 0
if opts.get('bookmarks'):
dest = ui.expandpath(dest or 'default-push', dest or 'default')
@@ -4447,106 +3720,21 @@ def paths(ui, repo, search=None):
else:
ui.write("%s = %s\n" % (name, util.hidepassword(path)))
-@command('^phase',
- [('p', 'public', False, _('set changeset phase to public')),
- ('d', 'draft', False, _('set changeset phase to draft')),
- ('s', 'secret', False, _('set changeset phase to secret')),
- ('f', 'force', False, _('allow to move boundary backward')),
- ('r', 'rev', [], _('target revision'), _('REV')),
- ],
- _('[-p|-d|-s] [-f] [-r] REV...'))
-def phase(ui, repo, *revs, **opts):
- """set or show the current phase name
-
- With no argument, show the phase name of specified revisions.
-
- With one of -p/--public, -d/--draft or -s/--secret, change the
- phase value of the specified revisions.
-
- Unless -f/--force is specified, :hg:`phase` won't move changeset from a
- lower phase to an higher phase. Phases are ordered as follows::
-
- public < draft < secret
-
- Return 0 on success, 1 if no phases were changed or some could not
- be changed.
- """
- # search for a unique phase argument
- targetphase = None
- for idx, name in enumerate(phases.phasenames):
- if opts[name]:
- if targetphase is not None:
- raise util.Abort(_('only one phase can be specified'))
- targetphase = idx
-
- # look for specified revision
- revs = list(revs)
- revs.extend(opts['rev'])
- if not revs:
- raise util.Abort(_('no revisions specified'))
-
- revs = scmutil.revrange(repo, revs)
-
- lock = None
- ret = 0
- if targetphase is None:
- # display
- for r in revs:
- ctx = repo[r]
- ui.write('%i: %s\n' % (ctx.rev(), ctx.phasestr()))
- else:
- lock = repo.lock()
- try:
- # set phase
- if not revs:
- raise util.Abort(_('empty revision set'))
- nodes = [repo[r].node() for r in revs]
- olddata = repo._phasecache.getphaserevs(repo)[:]
- phases.advanceboundary(repo, targetphase, nodes)
- if opts['force']:
- phases.retractboundary(repo, targetphase, nodes)
- finally:
- lock.release()
- newdata = repo._phasecache.getphaserevs(repo)
- changes = sum(o != newdata[i] for i, o in enumerate(olddata))
- rejected = [n for n in nodes
- if newdata[repo[n].rev()] < targetphase]
- if rejected:
- ui.warn(_('cannot move %i changesets to a more permissive '
- 'phase, use --force\n') % len(rejected))
- ret = 1
- if changes:
- msg = _('phase changed for %i changesets\n') % changes
- if ret:
- ui.status(msg)
- else:
- ui.note(msg)
- else:
- ui.warn(_('no phases changed\n'))
- ret = 1
- return ret
-
def postincoming(ui, repo, modheads, optupdate, checkout):
if modheads == 0:
return
if optupdate:
- movemarkfrom = repo['.'].node()
try:
- ret = hg.update(repo, checkout)
+ return hg.update(repo, checkout)
except util.Abort, inst:
- ui.warn(_("not updating: %s\n") % str(inst))
+ ui.warn(_("not updating: %s\n" % str(inst)))
return 0
- if not ret and not checkout:
- if bookmarks.update(repo, [movemarkfrom], repo['.'].node()):
- ui.status(_("updating bookmark %s\n") % repo._bookmarkcurrent)
- return ret
if modheads > 1:
currentbranchheads = len(repo.branchheads())
if currentbranchheads == modheads:
ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n"))
elif currentbranchheads > 1:
- ui.status(_("(run 'hg heads .' to see heads, 'hg merge' to "
- "merge)\n"))
+ ui.status(_("(run 'hg heads .' to see heads, 'hg merge' to merge)\n"))
else:
ui.status(_("(run 'hg heads' to see heads)\n"))
else:
@@ -4605,7 +3793,7 @@ def pull(ui, repo, source="default", **opts):
raise util.Abort(err)
modheads = repo.pull(other, heads=revs, force=opts.get('force'))
- bookmarks.updatefromremote(ui, repo, other, source)
+ bookmarks.updatefromremote(ui, repo, other)
if checkout:
checkout = str(repo.changelog.rev(other.lookup(checkout)))
repo._subtoppath = source
@@ -4660,10 +3848,6 @@ def push(ui, repo, dest=None, **opts):
If -r/--rev is used, the specified revision and all its ancestors
will be pushed to the remote repository.
- If -B/--bookmark is used, the specified bookmarked revision, its
- ancestors, and the bookmark will be pushed to the remote
- repository.
-
Please see :hg:`help urls` for important details about ``ssh://``
URLs. If DESTINATION is omitted, a default path will be used.
@@ -4686,7 +3870,7 @@ def push(ui, repo, dest=None, **opts):
revs, checkout = hg.addbranchrevs(repo, repo, branches, opts.get('rev'))
other = hg.peer(repo, opts, dest)
if revs:
- revs = [repo.lookup(r) for r in scmutil.revrange(repo, revs)]
+ revs = [repo.lookup(rev) for rev in revs]
repo._subtoppath = dest
try:
@@ -4694,14 +3878,14 @@ def push(ui, repo, dest=None, **opts):
c = repo['']
subs = c.substate # only repos that are committed
for s in sorted(subs):
- if c.sub(s).push(opts) == 0:
+ if not c.sub(s).push(opts.get('force')):
return False
finally:
del repo._subtoppath
result = repo.push(other, opts.get('force'), revs=revs,
newbranch=opts.get('new_branch'))
- result = not result
+ result = (result == 0)
if opts.get('bookmark'):
rb = other.listkeys('bookmarks')
@@ -4751,36 +3935,31 @@ def recover(ui, repo):
def remove(ui, repo, *pats, **opts):
"""remove the specified files on the next commit
- Schedule the indicated files for removal from the current branch.
-
- This command schedules the files to be removed at the next commit.
- To undo a remove before that, see :hg:`revert`. To undo added
- files, see :hg:`forget`.
+ Schedule the indicated files for removal from the repository.
- .. container:: verbose
+ This only removes files from the current branch, not from the
+ entire project history. -A/--after can be used to remove only
+ files that have already been deleted, -f/--force can be used to
+ force deletion, and -Af can be used to remove files from the next
+ revision without deleting them from the working directory.
+
+ The following table details the behavior of remove for different
+ file states (columns) and option combinations (rows). The file
+ states are Added [A], Clean [C], Modified [M] and Missing [!] (as
+ reported by :hg:`status`). The actions are Warn, Remove (from
+ branch) and Delete (from disk)::
+
+ A C M !
+ none W RD W R
+ -f R RD RD R
+ -A W W W R
+ -Af R R R R
+
+ Note that remove never deletes files in Added [A] state from the
+ working directory, not even if option --force is specified.
- -A/--after can be used to remove only files that have already
- been deleted, -f/--force can be used to force deletion, and -Af
- can be used to remove files from the next revision without
- deleting them from the working directory.
-
- The following table details the behavior of remove for different
- file states (columns) and option combinations (rows). The file
- states are Added [A], Clean [C], Modified [M] and Missing [!]
- (as reported by :hg:`status`). The actions are Warn, Remove
- (from branch) and Delete (from disk):
-
- ======= == == == ==
- A C M !
- ======= == == == ==
- none W RD W R
- -f R RD RD R
- -A W W W R
- -Af R R R R
- ======= == == == ==
-
- Note that remove never deletes files in Added [A] state from the
- working directory, not even if option --force is specified.
+ This command schedules the files to be removed at the next commit.
+ To undo a remove before that, see :hg:`revert`.
Returns 0 on success, 1 if any warnings encountered.
"""
@@ -4815,8 +3994,8 @@ def remove(ui, repo, *pats, **opts):
' to force removal)\n') % m.rel(f))
ret = 1
for f in added:
- ui.warn(_('not removing %s: file has been marked for add'
- ' (use forget to undo)\n') % m.rel(f))
+ ui.warn(_('not removing %s: file has been marked for add (use -f'
+ ' to force removal)\n') % m.rel(f))
ret = 1
for f in sorted(list):
@@ -4872,8 +4051,9 @@ def rename(ui, repo, *pats, **opts):
('l', 'list', None, _('list state of files needing merge')),
('m', 'mark', None, _('mark files as resolved')),
('u', 'unmark', None, _('mark files as unresolved')),
+ ('t', 'tool', '', _('specify merge tool')),
('n', 'no-status', None, _('hide status prefix'))]
- + mergetoolopts + walkopts,
+ + walkopts,
_('[OPTION]... [FILE]...'))
def resolve(ui, repo, *pats, **opts):
"""redo merges or set/view the merge status of files
@@ -4883,8 +4063,7 @@ def resolve(ui, repo, *pats, **opts):
setting, or a command-line merge tool like ``diff3``. The resolve
command is used to manage the files involved in a merge, after
:hg:`merge` has been run, and before :hg:`commit` is run (i.e. the
- working directory must have two parents). See :hg:`help
- merge-tools` for information on configuring merge tools.
+ working directory must have two parents).
The resolve command can be used in the following ways:
@@ -4893,8 +4072,7 @@ def resolve(ui, repo, *pats, **opts):
performed for files already marked as resolved. Use ``--all/-a``
to select all unresolved files. ``--tool`` can be used to specify
the merge tool used for the given files. It overrides the HGMERGE
- environment variable and your configuration files. Previous file
- contents are saved with a ``.orig`` suffix.
+ environment variable and your configuration files.
- :hg:`resolve -m [FILE]`: mark a file as having been resolved
(e.g. after having manually fixed-up the files). The default is
@@ -4967,17 +4145,15 @@ def resolve(ui, repo, *pats, **opts):
[('a', 'all', None, _('revert all changes when no arguments given')),
('d', 'date', '', _('tipmost revision matching date'), _('DATE')),
('r', 'rev', '', _('revert to the specified revision'), _('REV')),
- ('C', 'no-backup', None, _('do not save backup copies of files')),
+ ('', 'no-backup', None, _('do not save backup copies of files')),
] + walkopts + dryrunopts,
_('[OPTION]... [-r REV] [NAME]...'))
def revert(ui, repo, *pats, **opts):
"""restore files to their checkout state
.. note::
-
To check out earlier revisions, you should use :hg:`update REV`.
- To cancel an uncommitted merge (and lose your changes), use
- :hg:`update --clean .`.
+ To cancel a merge (and lose your changes), use :hg:`update --clean .`.
With no revision specified, revert the specified files or directories
to the contents they had in the parent of the working directory.
@@ -5013,6 +4189,7 @@ def revert(ui, repo, *pats, **opts):
hint=_('use "hg update" or see "hg help revert"'))
ctx = scmutil.revsingle(repo, opts.get('rev'))
+ node = ctx.node()
if not pats and not opts.get('all'):
msg = _("no files or directories specified")
@@ -5021,7 +4198,6 @@ def revert(ui, repo, *pats, **opts):
" or 'hg update -C .' to abort the merge")
raise util.Abort(msg, hint=hint)
dirty = util.any(repo.status())
- node = ctx.node()
if node != parent:
if dirty:
hint = _("uncommitted changes, use --all to discard all"
@@ -5035,10 +4211,177 @@ def revert(ui, repo, *pats, **opts):
hint = _("use --all to revert all files")
raise util.Abort(msg, hint=hint)
- return cmdutil.revert(ui, repo, ctx, (parent, p2), *pats, **opts)
+ mf = ctx.manifest()
+ if node == parent:
+ pmf = mf
+ else:
+ pmf = None
+
+ # need all matching names in dirstate and manifest of target rev,
+ # so have to walk both. do not print errors if files exist in one
+ # but not other.
+
+ names = {}
+
+ wlock = repo.wlock()
+ try:
+ # walk dirstate.
+
+ m = scmutil.match(repo[None], pats, opts)
+ m.bad = lambda x, y: False
+ for abs in repo.walk(m):
+ names[abs] = m.rel(abs), m.exact(abs)
+
+ # walk target manifest.
+
+ def badfn(path, msg):
+ if path in names:
+ return
+ path_ = path + '/'
+ for f in names:
+ if f.startswith(path_):
+ return
+ ui.warn("%s: %s\n" % (m.rel(path), msg))
+
+ m = scmutil.match(repo[node], pats, opts)
+ m.bad = badfn
+ for abs in repo[node].walk(m):
+ if abs not in names:
+ names[abs] = m.rel(abs), m.exact(abs)
+
+ m = scmutil.matchfiles(repo, names)
+ changes = repo.status(match=m)[:4]
+ modified, added, removed, deleted = map(set, changes)
+
+ # if f is a rename, also revert the source
+ cwd = repo.getcwd()
+ for f in added:
+ src = repo.dirstate.copied(f)
+ if src and src not in names and repo.dirstate[src] == 'r':
+ removed.add(src)
+ names[src] = (repo.pathto(src, cwd), True)
+
+ def removeforget(abs):
+ if repo.dirstate[abs] == 'a':
+ return _('forgetting %s\n')
+ return _('removing %s\n')
+
+ revert = ([], _('reverting %s\n'))
+ add = ([], _('adding %s\n'))
+ remove = ([], removeforget)
+ undelete = ([], _('undeleting %s\n'))
+
+ disptable = (
+ # dispatch table:
+ # file state
+ # action if in target manifest
+ # action if not in target manifest
+ # make backup if in target manifest
+ # make backup if not in target manifest
+ (modified, revert, remove, True, True),
+ (added, revert, remove, True, False),
+ (removed, undelete, None, False, False),
+ (deleted, revert, remove, False, False),
+ )
+
+ for abs, (rel, exact) in sorted(names.items()):
+ mfentry = mf.get(abs)
+ target = repo.wjoin(abs)
+ def handle(xlist, dobackup):
+ xlist[0].append(abs)
+ if (dobackup and not opts.get('no_backup') and
+ os.path.lexists(target)):
+ bakname = "%s.orig" % rel
+ ui.note(_('saving current version of %s as %s\n') %
+ (rel, bakname))
+ if not opts.get('dry_run'):
+ util.rename(target, bakname)
+ if ui.verbose or not exact:
+ msg = xlist[1]
+ if not isinstance(msg, basestring):
+ msg = msg(abs)
+ ui.status(msg % rel)
+ for table, hitlist, misslist, backuphit, backupmiss in disptable:
+ if abs not in table:
+ continue
+ # file has changed in dirstate
+ if mfentry:
+ handle(hitlist, backuphit)
+ elif misslist is not None:
+ handle(misslist, backupmiss)
+ break
+ else:
+ if abs not in repo.dirstate:
+ if mfentry:
+ handle(add, True)
+ elif exact:
+ ui.warn(_('file not managed: %s\n') % rel)
+ continue
+ # file has not changed in dirstate
+ if node == parent:
+ if exact:
+ ui.warn(_('no changes needed to %s\n') % rel)
+ continue
+ if pmf is None:
+ # only need parent manifest in this unlikely case,
+ # so do not read by default
+ pmf = repo[parent].manifest()
+ if abs in pmf:
+ if mfentry:
+ # if version of file is same in parent and target
+ # manifests, do nothing
+ if (pmf[abs] != mfentry or
+ pmf.flags(abs) != mf.flags(abs)):
+ handle(revert, False)
+ else:
+ handle(remove, False)
+
+ if not opts.get('dry_run'):
+ def checkout(f):
+ fc = ctx[f]
+ repo.wwrite(f, fc.data(), fc.flags())
+
+ audit_path = scmutil.pathauditor(repo.root)
+ for f in remove[0]:
+ if repo.dirstate[f] == 'a':
+ repo.dirstate.drop(f)
+ continue
+ audit_path(f)
+ try:
+ util.unlinkpath(repo.wjoin(f))
+ except OSError:
+ pass
+ repo.dirstate.remove(f)
+
+ normal = None
+ if node == parent:
+ # We're reverting to our parent. If possible, we'd like status
+ # to report the file as clean. We have to use normallookup for
+ # merges to avoid losing information about merged/dirty files.
+ if p2 != nullid:
+ normal = repo.dirstate.normallookup
+ else:
+ normal = repo.dirstate.normal
+ for f in revert[0]:
+ checkout(f)
+ if normal:
+ normal(f)
+
+ for f in add[0]:
+ checkout(f)
+ repo.dirstate.add(f)
+
+ normal = repo.dirstate.normallookup
+ if node == parent and p2 == nullid:
+ normal = repo.dirstate.normal
+ for f in undelete[0]:
+ checkout(f)
+ normal(f)
+
+ finally:
+ wlock.release()
-@command('rollback', dryrunopts +
- [('f', 'force', False, _('ignore safety measures'))])
+@command('rollback', dryrunopts)
def rollback(ui, repo, **opts):
"""roll back the last transaction (dangerous)
@@ -5050,22 +4393,14 @@ def rollback(ui, repo, **opts):
Transactions are used to encapsulate the effects of all commands
that create new changesets or propagate existing changesets into a
- repository.
-
- .. container:: verbose
-
- For example, the following commands are transactional, and their
- effects can be rolled back:
+ repository. For example, the following commands are transactional,
+ and their effects can be rolled back:
- - commit
- - import
- - pull
- - push (with this repository as the destination)
- - unbundle
-
- To avoid permanent data loss, rollback will refuse to rollback a
- commit transaction if it isn't checked out. Use --force to
- override this protection.
+ - commit
+ - import
+ - pull
+ - push (with this repository as the destination)
+ - unbundle
This command is not intended for use on public repositories. Once
changes are visible for pull by other users, rolling a transaction
@@ -5076,8 +4411,7 @@ def rollback(ui, repo, **opts):
Returns 0 on success, 1 if no rollback data is available.
"""
- return repo.rollback(dryrun=opts.get('dry_run'),
- force=opts.get('force'))
+ return repo.rollback(opts.get('dry_run'))
@command('root', [])
def root(ui, repo):
@@ -5145,7 +4479,7 @@ def serve(ui, repo, **opts):
def checkrepo():
if repo is None:
- raise error.RepoError(_("there is no Mercurial repository here"
+ raise error.RepoError(_("There is no Mercurial repository here"
" (.hg not found)"))
if opts["stdio"]:
@@ -5176,7 +4510,7 @@ def serve(ui, repo, **opts):
o = opts.get('web_conf') or opts.get('webdir_conf')
if not o:
if not repo:
- raise error.RepoError(_("there is no Mercurial repository"
+ raise error.RepoError(_("There is no Mercurial repository"
" here (.hg not found)"))
o = repo.root
@@ -5319,23 +4653,6 @@ def status(ui, repo, *pats, **opts):
I = ignored
= origin of the previous file listed as A (added)
- .. container:: verbose
-
- Examples:
-
- - show changes in the working directory relative to a
- changeset::
-
- hg status --rev 9353
-
- - show all changes including copies in an existing changeset::
-
- hg status --copies --change 9353
-
- - get a NUL separated list of added files, suitable for xargs::
-
- hg status -an0
-
Returns 0 on success.
"""
@@ -5346,7 +4663,7 @@ def status(ui, repo, *pats, **opts):
msg = _('cannot specify --rev and --change at the same time')
raise util.Abort(msg)
elif change:
- node2 = scmutil.revsingle(repo, change, None).node()
+ node2 = repo.lookup(change)
node1 = repo[node2].p1().node()
else:
node1, node2 = scmutil.revpair(repo, revs)
@@ -5367,24 +4684,31 @@ def status(ui, repo, *pats, **opts):
changestates = zip(states, 'MAR!?IC', stat)
if (opts.get('all') or opts.get('copies')) and not opts.get('no_status'):
- copy = copies.pathcopies(repo[node1], repo[node2])
-
- fm = ui.formatter('status', opts)
- format = '%s %s' + end
- if opts.get('no_status'):
- format = '%.0s%s' + end
+ ctxn = repo[nullid]
+ ctx1 = repo[node1]
+ ctx2 = repo[node2]
+ added = stat[1]
+ if node2 is None:
+ added = stat[0] + stat[1] # merged?
+
+ for k, v in copies.copies(repo, ctx1, ctx2, ctxn)[0].iteritems():
+ if k in added:
+ copy[k] = v
+ elif v in added:
+ copy[v] = k
for state, char, files in changestates:
if state in show:
- label = 'status.' + state
+ format = "%s %%s%s" % (char, end)
+ if opts.get('no_status'):
+ format = "%%s%s" % end
+
for f in files:
- fm.startitem()
- fm.write("status path", format, char,
- repo.pathto(f, cwd), label=label)
+ ui.write(format % repo.pathto(f, cwd),
+ label='status.' + state)
if f in copy:
- fm.write("copy", ' %s' + end, repo.pathto(copy[f], cwd),
+ ui.write(' %s%s' % (repo.pathto(copy[f], cwd), end),
label='status.copied')
- fm.end()
@command('^summary|sum',
[('', 'remote', None, _('check for push and pull'))], '[--remote]')
@@ -5403,7 +4727,6 @@ def summary(ui, repo, **opts):
ctx = repo[None]
parents = ctx.parents()
pnode = parents[0].node()
- marks = []
for p in parents:
# label with log.changeset (instead of log.parent) since this
@@ -5412,7 +4735,7 @@ def summary(ui, repo, **opts):
label='log.changeset')
ui.write(' '.join(p.tags()), label='log.tag')
if p.bookmarks():
- marks.extend(p.bookmarks())
+ ui.write(' ' + ' '.join(p.bookmarks()), label='log.bookmark')
if p.rev() == -1:
if not len(repo):
ui.write(_(' (empty repository)'))
@@ -5431,20 +4754,6 @@ def summary(ui, repo, **opts):
else:
ui.status(m, label='log.branch')
- if marks:
- current = repo._bookmarkcurrent
- ui.write(_('bookmarks:'), label='log.bookmark')
- if current is not None:
- try:
- marks.remove(current)
- ui.write(' *' + current, label='bookmarks.current')
- except ValueError:
- # current bookmark not in parent ctx marks
- pass
- for m in marks:
- ui.write(' ' + m, label='log.bookmark')
- ui.write('\n', label='log.bookmark')
-
st = list(repo.status(unknown=True))[:6]
c = repo.dirstate.copies()
@@ -5488,7 +4797,7 @@ def summary(ui, repo, **opts):
t += _(' (merge)')
elif branch != parents[0].branch():
t += _(' (new branch)')
- elif (parents[0].closesbranch() and
+ elif (parents[0].extra().get('close') and
pnode in repo.branchheads(branch, closed=True)):
t += _(' (head closed)')
elif not (st[0] or st[1] or st[2] or st[3] or st[4] or st[9]):
@@ -5507,12 +4816,12 @@ def summary(ui, repo, **opts):
cl = repo.changelog
for a in [cl.rev(n) for n in bheads]:
new[a] = 1
- for a in cl.ancestors([cl.rev(n) for n in bheads]):
+ for a in cl.ancestors(*[cl.rev(n) for n in bheads]):
new[a] = 1
for a in [p.rev() for p in parents]:
if a >= 0:
new[a] = 0
- for a in cl.ancestors([p.rev() for p in parents]):
+ for a in cl.ancestors(*[p.rev() for p in parents]):
new[a] = 0
new = sum(new)
@@ -5528,8 +4837,7 @@ def summary(ui, repo, **opts):
t = []
source, branches = hg.parseurl(ui.expandpath('default'))
other = hg.peer(repo, {}, source)
- revs, checkout = hg.addbranchrevs(repo, other, branches,
- opts.get('rev'))
+ revs, checkout = hg.addbranchrevs(repo, other, branches, opts.get('rev'))
ui.debug('comparing with %s\n' % util.hidepassword(source))
repo.ui.pushbuffer()
commoninc = discovery.findcommonincoming(repo, other)
@@ -5545,10 +4853,10 @@ def summary(ui, repo, **opts):
commoninc = None
ui.debug('comparing with %s\n' % util.hidepassword(dest))
repo.ui.pushbuffer()
- outgoing = discovery.findcommonoutgoing(repo, other,
- commoninc=commoninc)
+ common, outheads = discovery.findcommonoutgoing(repo, other,
+ commoninc=commoninc)
repo.ui.popbuffer()
- o = outgoing.missing
+ o = repo.changelog.findmissing(common=common, heads=outheads)
if o:
t.append(_('%d outgoing') % len(o))
if 'bookmarks' in other.listkeys('namespaces'):
@@ -5608,73 +4916,62 @@ def tag(ui, repo, name1, *names, **opts):
Returns 0 on success.
"""
- wlock = lock = None
- try:
- wlock = repo.wlock()
- lock = repo.lock()
- rev_ = "."
- names = [t.strip() for t in (name1,) + names]
- if len(names) != len(set(names)):
- raise util.Abort(_('tag names must be unique'))
- for n in names:
- if n in ['tip', '.', 'null']:
- raise util.Abort(_("the name '%s' is reserved") % n)
- if not n:
- raise util.Abort(_('tag names cannot consist entirely of '
- 'whitespace'))
- if opts.get('rev') and opts.get('remove'):
- raise util.Abort(_("--rev and --remove are incompatible"))
- if opts.get('rev'):
- rev_ = opts['rev']
- message = opts.get('message')
- if opts.get('remove'):
- expectedtype = opts.get('local') and 'local' or 'global'
- for n in names:
- if not repo.tagtype(n):
- raise util.Abort(_("tag '%s' does not exist") % n)
- if repo.tagtype(n) != expectedtype:
- if expectedtype == 'global':
- raise util.Abort(_("tag '%s' is not a global tag") % n)
- else:
- raise util.Abort(_("tag '%s' is not a local tag") % n)
- rev_ = nullid
- if not message:
- # we don't translate commit messages
- message = 'Removed tag %s' % ', '.join(names)
- elif not opts.get('force'):
- for n in names:
- if n in repo.tags():
- raise util.Abort(_("tag '%s' already exists "
- "(use -f to force)") % n)
- if not opts.get('local'):
- p1, p2 = repo.dirstate.parents()
- if p2 != nullid:
- raise util.Abort(_('uncommitted merge'))
- bheads = repo.branchheads()
- if not opts.get('force') and bheads and p1 not in bheads:
- raise util.Abort(_('not at a branch head (use -f to force)'))
- r = scmutil.revsingle(repo, rev_).node()
+ rev_ = "."
+ names = [t.strip() for t in (name1,) + names]
+ if len(names) != len(set(names)):
+ raise util.Abort(_('tag names must be unique'))
+ for n in names:
+ if n in ['tip', '.', 'null']:
+ raise util.Abort(_("the name '%s' is reserved") % n)
+ if not n:
+ raise util.Abort(_('tag names cannot consist entirely of whitespace'))
+ if opts.get('rev') and opts.get('remove'):
+ raise util.Abort(_("--rev and --remove are incompatible"))
+ if opts.get('rev'):
+ rev_ = opts['rev']
+ message = opts.get('message')
+ if opts.get('remove'):
+ expectedtype = opts.get('local') and 'local' or 'global'
+ for n in names:
+ if not repo.tagtype(n):
+ raise util.Abort(_("tag '%s' does not exist") % n)
+ if repo.tagtype(n) != expectedtype:
+ if expectedtype == 'global':
+ raise util.Abort(_("tag '%s' is not a global tag") % n)
+ else:
+ raise util.Abort(_("tag '%s' is not a local tag") % n)
+ rev_ = nullid
if not message:
# we don't translate commit messages
- message = ('Added tag %s for changeset %s' %
- (', '.join(names), short(r)))
+ message = 'Removed tag %s' % ', '.join(names)
+ elif not opts.get('force'):
+ for n in names:
+ if n in repo.tags():
+ raise util.Abort(_("tag '%s' already exists "
+ "(use -f to force)") % n)
+ if not opts.get('local'):
+ p1, p2 = repo.dirstate.parents()
+ if p2 != nullid:
+ raise util.Abort(_('uncommitted merge'))
+ bheads = repo.branchheads()
+ if not opts.get('force') and bheads and p1 not in bheads:
+ raise util.Abort(_('not at a branch head (use -f to force)'))
+ r = scmutil.revsingle(repo, rev_).node()
- date = opts.get('date')
- if date:
- date = util.parsedate(date)
+ if not message:
+ # we don't translate commit messages
+ message = ('Added tag %s for changeset %s' %
+ (', '.join(names), short(r)))
- if opts.get('edit'):
- message = ui.edit(message, ui.username())
+ date = opts.get('date')
+ if date:
+ date = util.parsedate(date)
- # don't allow tagging the null rev
- if (not opts.get('remove') and
- scmutil.revsingle(repo, rev_).rev() == nullrev):
- raise util.Abort(_("null revision specified"))
+ if opts.get('edit'):
+ message = ui.edit(message, ui.username())
- repo.tag(names, r, message, opts.get('local'), opts.get('user'), date)
- finally:
- release(lock, wlock)
+ repo.tag(names, r, message, opts.get('local'), opts.get('user'), date)
@command('tags', [], '')
def tags(ui, repo):
@@ -5691,22 +4988,19 @@ def tags(ui, repo):
for t, n in reversed(repo.tagslist()):
if ui.quiet:
- ui.write("%s\n" % t, label='tags.normal')
+ ui.write("%s\n" % t)
continue
hn = hexfunc(n)
r = "%5d:%s" % (repo.changelog.rev(n), hn)
- rev = ui.label(r, 'log.changeset')
spaces = " " * (30 - encoding.colwidth(t))
- tag = ui.label(t, 'tags.normal')
if ui.verbose:
if repo.tagtype(t) == 'local':
tagtype = " local"
- tag = ui.label(t, 'tags.local')
else:
tagtype = ""
- ui.write("%s%s %s%s\n" % (tag, spaces, rev, tagtype))
+ ui.write("%s%s %s%s\n" % (t, spaces, r, tagtype))
@command('tip',
[('p', 'patch', None, _('show patch')),
@@ -5751,10 +5045,11 @@ def unbundle(ui, repo, fname1, *fnames, **opts):
for fname in fnames:
f = url.open(ui, fname)
gen = changegroup.readbundle(f, fname)
- modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname)
+ modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname,
+ lock=lock)
+ bookmarks.updatecurrentbookmark(repo, wc.node(), wc.branch())
finally:
lock.release()
- bookmarks.updatecurrentbookmark(repo, wc.node(), wc.branch())
return postincoming(ui, repo, modheads, opts.get('update'), None)
@command('^update|up|checkout|co',
@@ -5769,40 +5064,34 @@ def update(ui, repo, node=None, rev=None, clean=False, date=None, check=False):
Update the repository's working directory to the specified
changeset. If no changeset is specified, update to the tip of the
- current named branch and move the current bookmark (see :hg:`help
- bookmarks`).
+ current named branch.
- Update sets the working directory's parent revison to the specified
- changeset (see :hg:`help parents`).
-
- If the changeset is not a descendant or ancestor of the working
- directory's parent, the update is aborted. With the -c/--check
- option, the working directory is checked for uncommitted changes; if
- none are found, the working directory is updated to the specified
+ If the changeset is not a descendant of the working directory's
+ parent, the update is aborted. With the -c/--check option, the
+ working directory is checked for uncommitted changes; if none are
+ found, the working directory is updated to the specified
changeset.
- .. container:: verbose
-
- The following rules apply when the working directory contains
- uncommitted changes:
+ Update sets the working directory's parent revison to the specified
+ changeset (see :hg:`help parents`).
- 1. If neither -c/--check nor -C/--clean is specified, and if
- the requested changeset is an ancestor or descendant of
- the working directory's parent, the uncommitted changes
- are merged into the requested changeset and the merged
- result is left uncommitted. If the requested changeset is
- not an ancestor or descendant (that is, it is on another
- branch), the update is aborted and the uncommitted changes
- are preserved.
+ The following rules apply when the working directory contains
+ uncommitted changes:
- 2. With the -c/--check option, the update is aborted and the
- uncommitted changes are preserved.
+ 1. If neither -c/--check nor -C/--clean is specified, and if
+ the requested changeset is an ancestor or descendant of
+ the working directory's parent, the uncommitted changes
+ are merged into the requested changeset and the merged
+ result is left uncommitted. If the requested changeset is
+ not an ancestor or descendant (that is, it is on another
+ branch), the update is aborted and the uncommitted changes
+ are preserved.
- 3. With the -C/--clean option, uncommitted changes are discarded and
- the working directory is updated to the requested changeset.
+ 2. With the -c/--check option, the update is aborted and the
+ uncommitted changes are preserved.
- To cancel an uncommitted merge (and lose your changes), use
- :hg:`update --clean .`.
+ 3. With the -C/--clean option, uncommitted changes are discarded and
+ the working directory is updated to the requested changeset.
Use null as the changeset to remove the working directory (like
:hg:`clone -U`).
@@ -5820,11 +5109,6 @@ def update(ui, repo, node=None, rev=None, clean=False, date=None, check=False):
if rev is None or rev == '':
rev = node
- # with no argument, we also move the current bookmark, if any
- movemarkfrom = None
- if rev is None or node == '':
- movemarkfrom = repo['.'].node()
-
# if we defined a bookmark, we have to remember the original bookmark name
brev = rev
rev = scmutil.revsingle(repo, rev, rev).rev()
@@ -5832,31 +5116,24 @@ def update(ui, repo, node=None, rev=None, clean=False, date=None, check=False):
if check and clean:
raise util.Abort(_("cannot specify both -c/--check and -C/--clean"))
+ if check:
+ # we could use dirty() but we can ignore merge and branch trivia
+ c = repo[None]
+ if c.modified() or c.added() or c.removed():
+ raise util.Abort(_("uncommitted local changes"))
+
if date:
if rev is not None:
raise util.Abort(_("you can't specify a revision and a date"))
rev = cmdutil.finddate(ui, repo, date)
- if check:
- c = repo[None]
- if c.dirty(merge=False, branch=False):
- raise util.Abort(_("uncommitted local changes"))
- if rev is None:
- rev = repo[repo[None].branch()].rev()
- mergemod._checkunknown(repo, repo[None], repo[rev])
-
- if clean:
+ if clean or check:
ret = hg.clean(repo, rev)
else:
ret = hg.update(repo, rev)
- if not ret and movemarkfrom:
- if bookmarks.update(repo, [movemarkfrom], repo['.'].node()):
- ui.status(_("updating bookmark %s\n") % repo._bookmarkcurrent)
- elif brev in repo._bookmarks:
+ if brev in repo._bookmarks:
bookmarks.setcurrent(repo, brev)
- elif brev:
- bookmarks.unsetcurrent(repo)
return ret
@@ -5882,7 +5159,7 @@ def version_(ui):
% util.version())
ui.status(_(
"(see http://mercurial.selenic.com for more information)\n"
- "\nCopyright (C) 2005-2012 Matt Mackall and others\n"
+ "\nCopyright (C) 2005-2011 Matt Mackall and others\n"
"This is free software; see the source for copying conditions. "
"There is NO\nwarranty; "
"not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n"
diff --git a/mercurial/commandserver.py b/mercurial/commandserver.py
index 4cd1583..25b1f5e 100644
--- a/mercurial/commandserver.py
+++ b/mercurial/commandserver.py
@@ -142,8 +142,8 @@ class server(object):
else:
logfile = open(logpath, 'a')
- # the ui here is really the repo ui so take its baseui so we don't end
- # up with its local configuration
+ # the ui here is really the repo ui so take its baseui so we don't end up
+ # with its local configuration
self.ui = repo.baseui
self.repo = repo
self.repoui = repo.ui
@@ -166,7 +166,7 @@ class server(object):
# is the other end closed?
if not data:
- raise EOFError
+ raise EOFError()
return data
@@ -185,8 +185,6 @@ class server(object):
copiedui = self.ui.copy()
self.repo.baseui = copiedui
self.repo.ui = self.repo.dirstate._ui = self.repoui.copy()
- self.repo.invalidate()
- self.repo.invalidatedirstate()
req = dispatch.request(args[:], copiedui, self.repo, self.cin,
self.cout, self.cerr)
diff --git a/mercurial/config.py b/mercurial/config.py
index 9906d6e..2556ef1 100644
--- a/mercurial/config.py
+++ b/mercurial/config.py
@@ -7,7 +7,7 @@
from i18n import _
import error, util
-import os, errno
+import re, os, errno
class sortdict(dict):
'a simple sorted dictionary'
@@ -35,10 +35,6 @@ class sortdict(dict):
def __delitem__(self, key):
dict.__delitem__(self, key)
self._list.remove(key)
- def keys(self):
- return self._list
- def iterkeys(self):
- return self._list.__iter__()
class config(object):
def __init__(self, data=None):
@@ -65,19 +61,6 @@ class config(object):
self._source.update(src._source)
def get(self, section, item, default=None):
return self._data.get(section, {}).get(item, default)
-
- def backup(self, section, item):
- """return a tuple allowing restore to reinstall a previous valuesi
-
- The main reason we need it is because it handle the "no data" case.
- """
- try:
- value = self._data[section][item]
- source = self.source(section, item)
- return (section, item, value, source)
- except KeyError:
- return (section, item)
-
def source(self, section, item):
return self._source.get((section, item), "")
def sections(self):
@@ -90,28 +73,14 @@ class config(object):
self._data[section][item] = value
self._source[(section, item)] = source
- def restore(self, data):
- """restore data returned by self.backup"""
- if len(data) == 4:
- # restore old data
- section, item, value, source = data
- self._data[section][item] = value
- self._source[(section, item)] = source
- else:
- # no data before, remove everything
- section, item = data
- if section in self._data:
- del self._data[section][item]
- self._source.pop((section, item), None)
-
def parse(self, src, data, sections=None, remap=None, include=None):
- sectionre = util.compilere(r'\[([^\[]+)\]')
- itemre = util.compilere(r'([^=\s][^=]*?)\s*=\s*(.*\S|)')
- contre = util.compilere(r'\s+(\S|\S.*\S)\s*$')
- emptyre = util.compilere(r'(;|#|\s*$)')
- commentre = util.compilere(r'(;|#)')
- unsetre = util.compilere(r'%unset\s+(\S+)')
- includere = util.compilere(r'%include\s+(\S|\S.*\S)\s*$')
+ sectionre = re.compile(r'\[([^\[]+)\]')
+ itemre = re.compile(r'([^=\s][^=]*?)\s*=\s*(.*\S|)')
+ contre = re.compile(r'\s+(\S|\S.*\S)\s*$')
+ emptyre = re.compile(r'(;|#|\s*$)')
+ commentre = re.compile(r'(;|#)')
+ unsetre = re.compile(r'%unset\s+(\S+)')
+ includere = re.compile(r'%include\s+(\S|\S.*\S)\s*$')
section = ""
item = None
line = 0
@@ -119,9 +88,6 @@ class config(object):
for l in data.splitlines(True):
line += 1
- if line == 1 and l.startswith('\xef\xbb\xbf'):
- # Someone set us up the BOM
- l = l[3:]
if cont:
if commentre.match(l):
continue
diff --git a/mercurial/context.py b/mercurial/context.py
index 88ea3e4..d1c195b 100644
--- a/mercurial/context.py
+++ b/mercurial/context.py
@@ -5,10 +5,9 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-from node import nullid, nullrev, short, hex, bin
+from node import nullid, nullrev, short, hex
from i18n import _
-import ancestor, mdiff, error, util, scmutil, subrepo, patch, encoding, phases
-import copies
+import ancestor, bdiff, error, util, scmutil, subrepo, patch, encoding
import match as matchmod
import os, errno, stat
@@ -22,88 +21,12 @@ class changectx(object):
if changeid == '':
changeid = '.'
self._repo = repo
-
- if isinstance(changeid, int):
+ if isinstance(changeid, (long, int)):
self._rev = changeid
- self._node = repo.changelog.node(changeid)
- return
- if isinstance(changeid, long):
- changeid = str(changeid)
- if changeid == '.':
- self._node = repo.dirstate.p1()
- self._rev = repo.changelog.rev(self._node)
- return
- if changeid == 'null':
- self._node = nullid
- self._rev = nullrev
- return
- if changeid == 'tip':
- self._rev = len(repo.changelog) - 1
- self._node = repo.changelog.node(self._rev)
- return
- if len(changeid) == 20:
- try:
- self._node = changeid
- self._rev = repo.changelog.rev(changeid)
- return
- except LookupError:
- pass
-
- try:
- r = int(changeid)
- if str(r) != changeid:
- raise ValueError
- l = len(repo.changelog)
- if r < 0:
- r += l
- if r < 0 or r >= l:
- raise ValueError
- self._rev = r
- self._node = repo.changelog.node(r)
- return
- except (ValueError, OverflowError):
- pass
-
- if len(changeid) == 40:
- try:
- self._node = bin(changeid)
- self._rev = repo.changelog.rev(self._node)
- return
- except (TypeError, LookupError):
- pass
-
- if changeid in repo._bookmarks:
- self._node = repo._bookmarks[changeid]
- self._rev = repo.changelog.rev(self._node)
- return
- if changeid in repo._tagscache.tags:
- self._node = repo._tagscache.tags[changeid]
- self._rev = repo.changelog.rev(self._node)
- return
- try:
- self._node = repo.branchtip(changeid)
- self._rev = repo.changelog.rev(self._node)
- return
- except error.RepoLookupError:
- pass
-
- self._node = repo.changelog._partialmatch(changeid)
- if self._node is not None:
- self._rev = repo.changelog.rev(self._node)
- return
-
- # lookup failed
- # check if it might have come from damaged dirstate
- if changeid in repo.dirstate.parents():
- raise error.Abort(_("working directory has unknown parent '%s'!")
- % short(changeid))
- try:
- if len(changeid) == 20:
- changeid = hex(changeid)
- except TypeError:
- pass
- raise error.RepoLookupError(
- _("unknown revision '%s'") % changeid)
+ self._node = self._repo.changelog.node(changeid)
+ else:
+ self._node = self._repo.lookup(changeid)
+ self._rev = self._repo.changelog.rev(self._node)
def __str__(self):
return short(self.node())
@@ -134,7 +57,7 @@ class changectx(object):
@propertycache
def _changeset(self):
- return self._repo.changelog.read(self.rev())
+ return self._repo.changelog.read(self.node())
@propertycache
def _manifest(self):
@@ -188,22 +111,14 @@ class changectx(object):
return self._changeset[4]
def branch(self):
return encoding.tolocal(self._changeset[5].get("branch"))
- def closesbranch(self):
- return 'close' in self._changeset[5]
def extra(self):
return self._changeset[5]
def tags(self):
return self._repo.nodetags(self._node)
def bookmarks(self):
return self._repo.nodebookmarks(self._node)
- def phase(self):
- return self._repo._phasecache.phase(self._repo, self._rev)
- def phasestr(self):
- return phases.phasenames[self.phase()]
- def mutable(self):
- return self.phase() > phases.public
def hidden(self):
- return self._rev in self._repo.hiddenrevs
+ return self._rev in self._repo.changelog.hiddenrevs
def parents(self):
"""return contexts for each parent changeset"""
@@ -223,48 +138,13 @@ class changectx(object):
return [changectx(self._repo, x) for x in c]
def ancestors(self):
- for a in self._repo.changelog.ancestors([self._rev]):
+ for a in self._repo.changelog.ancestors(self._rev):
yield changectx(self._repo, a)
def descendants(self):
- for d in self._repo.changelog.descendants([self._rev]):
+ for d in self._repo.changelog.descendants(self._rev):
yield changectx(self._repo, d)
- def obsolete(self):
- """True if the changeset is obsolete"""
- return (self.node() in self._repo.obsstore.precursors
- and self.phase() > phases.public)
-
- def extinct(self):
- """True if the changeset is extinct"""
- # We should just compute a cache a check againts it.
- # see revset implementation for details
- #
- # But this naive implementation does not require cache
- if self.phase() <= phases.public:
- return False
- if not self.obsolete():
- return False
- for desc in self.descendants():
- if not desc.obsolete():
- return False
- return True
-
- def unstable(self):
- """True if the changeset is not obsolete but it's ancestor are"""
- # We should just compute /(obsolete()::) - obsolete()/
- # and keep it in a cache.
- #
- # But this naive implementation does not require cache
- if self.phase() <= phases.public:
- return False
- if self.obsolete():
- return False
- for anc in self.ancestors():
- if anc.obsolete():
- return True
- return False
-
def _fileinfo(self, path):
if '_manifest' in self.__dict__:
try:
@@ -274,8 +154,7 @@ class changectx(object):
_('not found in manifest'))
if '_manifestdelta' in self.__dict__ or path in self.files():
if path in self._manifestdelta:
- return (self._manifestdelta[path],
- self._manifestdelta.flags(path))
+ return self._manifestdelta[path], self._manifestdelta.flags(path)
node, flag = self._repo.manifest.find(self._changeset[0], path)
if not node:
raise error.LookupError(self._node, path,
@@ -316,15 +195,14 @@ class changectx(object):
# follow that here, too
fset.discard('.')
for fn in self:
- if fn in fset:
- # specified pattern is the exact name
- fset.remove(fn)
+ for ffn in fset:
+ # match if the file is the exact name or a directory
+ if ffn == fn or fn.startswith("%s/" % ffn):
+ fset.remove(ffn)
+ break
if match(fn):
yield fn
for fn in sorted(fset):
- if fn in self._dirs:
- # specified pattern is a directory
- continue
if match.bad(fn, _('no such file in rev %s') % self) and match(fn):
yield fn
@@ -347,22 +225,6 @@ class changectx(object):
return patch.diff(self._repo, ctx2.node(), self.node(),
match=match, opts=diffopts)
- @propertycache
- def _dirs(self):
- dirs = set()
- for f in self._manifest:
- pos = f.rfind('/')
- while pos != -1:
- f = f[:pos]
- if f in dirs:
- break # dirs already contains this and above
- dirs.add(f)
- pos = f.rfind('/')
- return dirs
-
- def dirs(self):
- return self._dirs
-
class filectx(object):
"""A filecontext object makes access to data related to a particular
filerevision convenient."""
@@ -501,22 +363,12 @@ class filectx(object):
def size(self):
return self._filelog.size(self._filerev)
- def isbinary(self):
- try:
- return util.binary(self.data())
- except IOError:
- return False
-
def cmp(self, fctx):
"""compare with other file context
returns True if different than fctx.
"""
- if (fctx._filerev is None
- and (self._repo._encodefilterpats
- # if file data starts with '\1\n', empty metadata block is
- # prepended, which adds 4 bytes to filelog.size().
- or self.size() - 4 == fctx.size())
+ if (fctx._filerev is None and self._repo._encodefilterpats
or self.size() == fctx.size()):
return self._filelog.cmp(self._filenode, fctx.data())
@@ -574,7 +426,7 @@ class filectx(object):
return [filectx(self._repo, self._path, fileid=x,
filelog=self._filelog) for x in c]
- def annotate(self, follow=False, linenumber=None, diffopts=None):
+ def annotate(self, follow=False, linenumber=None):
'''returns a list of tuples of (ctx, line) for each line
in the file, where ctx is the filectx of the node where
that line was last changed.
@@ -601,13 +453,8 @@ class filectx(object):
without_linenumber)
def pair(parent, child):
- blocks = mdiff.allblocks(parent[1], child[1], opts=diffopts,
- refine=True)
- for (a1, a2, b1, b2), t in blocks:
- # Changed blocks ('!') or blocks made only of blank lines ('~')
- # belong to the child.
- if t == '=':
- child[0][b1:b2] = parent[0][a1:a2]
+ for a1, a2, b1, b2 in bdiff.blocks(parent[1], child[1]):
+ child[0][b1:b2] = parent[0][a1:a2]
return child
getlog = util.lrucachefunc(lambda x: self._repo.file(x))
@@ -672,27 +519,27 @@ class filectx(object):
return zip(hist[base][0], hist[base][1].splitlines(True))
- def ancestor(self, fc2, actx):
+ def ancestor(self, fc2, actx=None):
"""
find the common ancestor file context, if any, of self, and fc2
- actx must be the changectx of the common ancestor
+ If actx is given, it must be the changectx of the common ancestor
of self's and fc2's respective changesets.
"""
+ if actx is None:
+ actx = self.changectx().ancestor(fc2.changectx())
+
+ # the trivial case: changesets are unrelated, files must be too
+ if not actx:
+ return None
+
# the easy case: no (relevant) renames
if fc2.path() == self.path() and self.path() in actx:
return actx[self.path()]
-
- # the next easiest cases: unambiguous predecessor (name trumps
- # history)
- if self.path() in actx and fc2.path() not in actx:
- return actx[self.path()]
- if fc2.path() in actx and self.path() not in actx:
- return actx[fc2.path()]
+ acache = {}
# prime the ancestor cache for the working directory
- acache = {}
for c in (self, fc2):
if c._filerev is None:
pl = [(n.path(), n.filenode()) for n in c.parents()]
@@ -721,26 +568,17 @@ class filectx(object):
return None
- def ancestors(self, followfirst=False):
+ def ancestors(self):
visit = {}
c = self
- cut = followfirst and 1 or None
while True:
- for parent in c.parents()[:cut]:
+ for parent in c.parents():
visit[(parent.rev(), parent.node())] = parent
if not visit:
break
c = visit.pop(max(visit))
yield c
- def copies(self, c2):
- if not util.safehasattr(self, "_copycache"):
- self._copycache = {}
- sc2 = str(c2)
- if sc2 not in self._copycache:
- self._copycache[sc2] = copies.pathcopies(c2)
- return self._copycache[sc2]
-
class workingctx(changectx):
"""A workingctx object makes access to data related to
the current working directory convenient.
@@ -794,47 +632,15 @@ class workingctx(changectx):
def __contains__(self, key):
return self._repo.dirstate[key] not in "?r"
- def _buildflagfunc(self):
- # Create a fallback function for getting file flags when the
- # filesystem doesn't support them
-
- copiesget = self._repo.dirstate.copies().get
-
- if len(self._parents) < 2:
- # when we have one parent, it's easy: copy from parent
- man = self._parents[0].manifest()
- def func(f):
- f = copiesget(f, f)
- return man.flags(f)
- else:
- # merges are tricky: we try to reconstruct the unstored
- # result from the merge (issue1802)
- p1, p2 = self._parents
- pa = p1.ancestor(p2)
- m1, m2, ma = p1.manifest(), p2.manifest(), pa.manifest()
-
- def func(f):
- f = copiesget(f, f) # may be wrong for merges with copies
- fl1, fl2, fla = m1.flags(f), m2.flags(f), ma.flags(f)
- if fl1 == fl2:
- return fl1
- if fl1 == fla:
- return fl2
- if fl2 == fla:
- return fl1
- return '' # punt for conflicts
-
- return func
-
- @propertycache
- def _flagfunc(self):
- return self._repo.dirstate.flagfunc(self._buildflagfunc)
-
@propertycache
def _manifest(self):
"""generate a manifest corresponding to the working directory"""
+ if self._unknown is None:
+ self.status(unknown=True)
+
man = self._parents[0].manifest().copy()
+ copied = self._repo.dirstate.copies()
if len(self._parents) > 1:
man2 = self.p2().manifest()
def getman(f):
@@ -843,11 +649,13 @@ class workingctx(changectx):
return man2
else:
getman = lambda f: man
-
- copied = self._repo.dirstate.copies()
- ff = self._flagfunc
+ def cf(f):
+ f = copied.get(f, f)
+ return getman(f).flags(f)
+ ff = self._repo.dirstate.flagfunc(cf)
modified, added, removed, deleted = self._status
- for i, l in (("a", added), ("m", modified)):
+ unknown = self._unknown
+ for i, l in (("a", added), ("m", modified), ("u", unknown)):
for f in l:
orig = copied.get(f, f)
man[f] = getman(orig).get(orig, nullid) + i
@@ -934,8 +742,6 @@ class workingctx(changectx):
return self._clean
def branch(self):
return encoding.tolocal(self._extra['branch'])
- def closesbranch(self):
- return 'close' in self._extra
def extra(self):
return self._extra
@@ -951,15 +757,6 @@ class workingctx(changectx):
b.extend(p.bookmarks())
return b
- def phase(self):
- phase = phases.draft # default phase to draft
- for p in self.parents():
- phase = max(phase, p.phase())
- return phase
-
- def hidden(self):
- return False
-
def children(self):
return []
@@ -970,10 +767,23 @@ class workingctx(changectx):
except KeyError:
return ''
- try:
- return self._flagfunc(path)
- except OSError:
+ orig = self._repo.dirstate.copies().get(path, path)
+
+ def findflag(ctx):
+ mnode = ctx.changeset()[0]
+ node, flag = self._repo.manifest.find(mnode, orig)
+ ff = self._repo.dirstate.flagfunc(lambda x: flag or '')
+ try:
+ return ff(path)
+ except OSError:
+ pass
+
+ flag = findflag(self._parents[0])
+ if flag is None and len(self.parents()) > 1:
+ flag = findflag(self._parents[1])
+ if flag is None or self._repo.dirstate[path] == 'r':
return ''
+ return flag
def filectx(self, path, filelog=None):
"""get a file context from the working directory"""
@@ -988,15 +798,14 @@ class workingctx(changectx):
return sorted(self._repo.dirstate.walk(match, self.substate.keys(),
True, False))
- def dirty(self, missing=False, merge=True, branch=True):
+ def dirty(self, missing=False):
"check whether a working directory is modified"
# check subrepos first
for s in self.substate:
if self.sub(s).dirty():
return True
# check current working dir
- return ((merge and self.p2()) or
- (branch and self.branch() != self.p1().branch()) or
+ return (self.p2() or self.branch() != self.p1().branch() or
self.modified() or self.added() or self.removed() or
(missing and self.deleted()))
@@ -1035,26 +844,22 @@ class workingctx(changectx):
finally:
wlock.release()
- def forget(self, files, prefix=""):
- join = lambda f: os.path.join(prefix, f)
+ def forget(self, files):
wlock = self._repo.wlock()
try:
- rejected = []
for f in files:
- if f not in self._repo.dirstate:
- self._repo.ui.warn(_("%s not tracked!\n") % join(f))
- rejected.append(f)
- elif self._repo.dirstate[f] != 'a':
+ if self._repo.dirstate[f] != 'a':
self._repo.dirstate.remove(f)
+ elif f not in self._repo.dirstate:
+ self._repo.ui.warn(_("%s not tracked!\n") % f)
else:
self._repo.dirstate.drop(f)
- return rejected
finally:
wlock.release()
def ancestors(self):
for a in self._repo.changelog.ancestors(
- [p.rev() for p in self._parents]):
+ *[p.rev() for p in self._parents]):
yield changectx(self._repo, a)
def undelete(self, list):
@@ -1088,9 +893,6 @@ class workingctx(changectx):
finally:
wlock.release()
- def dirs(self):
- return set(self._repo.dirstate.dirs())
-
class workingfilectx(filectx):
"""A workingfilectx object makes access to data related to a particular
file in the working directory convenient."""
diff --git a/mercurial/copies.py b/mercurial/copies.py
index 90aa036..abd16fa 100644
--- a/mercurial/copies.py
+++ b/mercurial/copies.py
@@ -18,6 +18,15 @@ def _dirname(f):
return ""
return f[:s]
+def _dirs(files):
+ d = set()
+ for f in files:
+ f = _dirname(f)
+ while f not in d:
+ d.add(f)
+ f = _dirname(f)
+ return d
+
def _findlimit(repo, a, b):
"""Find the earliest revision that's an ancestor of a or b but not both,
None if no such revision exists.
@@ -75,124 +84,22 @@ def _findlimit(repo, a, b):
return None
return limit
-def _chain(src, dst, a, b):
- '''chain two sets of copies a->b'''
- t = a.copy()
- for k, v in b.iteritems():
- if v in t:
- # found a chain
- if t[v] != k:
- # file wasn't renamed back to itself
- t[k] = t[v]
- if v not in dst:
- # chain was a rename, not a copy
- del t[v]
- if v in src:
- # file is a copy of an existing file
- t[k] = v
-
- # remove criss-crossed copies
- for k, v in t.items():
- if k in src and v in dst:
- del t[k]
-
- return t
-
-def _tracefile(fctx, actx):
- '''return file context that is the ancestor of fctx present in actx'''
- stop = actx.rev()
- am = actx.manifest()
-
- for f in fctx.ancestors():
- if am.get(f.path(), None) == f.filenode():
- return f
- if f.rev() < stop:
- return None
-
-def _dirstatecopies(d):
- ds = d._repo.dirstate
- c = ds.copies().copy()
- for k in c.keys():
- if ds[k] not in 'anm':
- del c[k]
- return c
-
-def _forwardcopies(a, b):
- '''find {dst@b: src@a} copy mapping where a is an ancestor of b'''
-
- # check for working copy
- w = None
- if b.rev() is None:
- w = b
- b = w.p1()
- if a == b:
- # short-circuit to avoid issues with merge states
- return _dirstatecopies(w)
-
- # find where new files came from
- # we currently don't try to find where old files went, too expensive
- # this means we can miss a case like 'hg rm b; hg cp a b'
- cm = {}
- for f in b:
- if f not in a:
- ofctx = _tracefile(b[f], a)
- if ofctx:
- cm[f] = ofctx.path()
-
- # combine copies from dirstate if necessary
- if w is not None:
- cm = _chain(a, w, cm, _dirstatecopies(w))
-
- return cm
-
-def _backwardcopies(a, b):
- # because the forward mapping is 1:n, we can lose renames here
- # in particular, we find renames better than copies
- f = _forwardcopies(b, a)
- r = {}
- for k, v in f.iteritems():
- r[v] = k
- return r
-
-def pathcopies(x, y):
- '''find {dst@y: src@x} copy mapping for directed compare'''
- if x == y or not x or not y:
- return {}
- a = y.ancestor(x)
- if a == x:
- return _forwardcopies(x, y)
- if a == y:
- return _backwardcopies(x, y)
- return _chain(x, y, _backwardcopies(x, a), _forwardcopies(a, y))
-
-def mergecopies(repo, c1, c2, ca):
+def copies(repo, c1, c2, ca, checkdirs=False):
"""
- Find moves and copies between context c1 and c2 that are relevant
- for merging.
-
- Returns two dicts, "copy" and "diverge".
-
- "copy" is a mapping from destination name -> source name,
- where source is in c1 and destination is in c2 or vice-versa.
-
- "diverge" is a mapping of source name -> list of destination names
- for divergent renames.
-
- "renamedelete" is a mapping of source name -> list of destination
- names for files deleted in c1 that were renamed in c2 or vice-versa.
+ Find moves and copies between context c1 and c2
"""
# avoid silly behavior for update from empty dir
if not c1 or not c2 or c1 == c2:
- return {}, {}, {}
+ return {}, {}
# avoid silly behavior for parent -> working dir
if c2.node() is None and c1.node() == repo.dirstate.p1():
- return repo.dirstate.copies(), {}, {}
+ return repo.dirstate.copies(), {}
limit = _findlimit(repo, c1.rev(), c2.rev())
if limit is None:
# no common ancestor, no copies
- return {}, {}, {}
+ return {}, {}
m1 = c1.manifest()
m2 = c2.manifest()
ma = ca.manifest()
@@ -286,43 +193,31 @@ def mergecopies(repo, c1, c2, ca):
for f in u2:
checkcopies(f, m2, m1)
- renamedelete = {}
- renamedelete2 = set()
diverge2 = set()
for of, fl in diverge.items():
- if len(fl) == 1 or of in c1 or of in c2:
+ if len(fl) == 1 or of in c2:
del diverge[of] # not actually divergent, or not a rename
- if of not in c1 and of not in c2:
- # renamed on one side, deleted on the other side, but filter
- # out files that have been renamed and then deleted
- renamedelete[of] = [f for f in fl if f in c1 or f in c2]
- renamedelete2.update(fl) # reverse map for below
else:
diverge2.update(fl) # reverse map for below
if fullcopy:
- repo.ui.debug(" all copies found (* = to merge, ! = divergent, "
- "% = renamed and deleted):\n")
+ repo.ui.debug(" all copies found (* = to merge, ! = divergent):\n")
for f in fullcopy:
note = ""
if f in copy:
note += "*"
if f in diverge2:
note += "!"
- if f in renamedelete2:
- note += "%"
repo.ui.debug(" %s -> %s %s\n" % (f, fullcopy[f], note))
del diverge2
- if not fullcopy:
- return copy, diverge, renamedelete
+ if not fullcopy or not checkdirs:
+ return copy, diverge
repo.ui.debug(" checking for directory renames\n")
# generate a directory move map
- d1, d2 = c1.dirs(), c2.dirs()
- d1.add('')
- d2.add('')
+ d1, d2 = _dirs(m1), _dirs(m2)
invalid = set()
dirmove = {}
@@ -352,7 +247,7 @@ def mergecopies(repo, c1, c2, ca):
del d1, d2, invalid
if not dirmove:
- return copy, diverge, renamedelete
+ return copy, diverge
for d in dirmove:
repo.ui.debug(" dir %s -> %s\n" % (d, dirmove[d]))
@@ -369,4 +264,4 @@ def mergecopies(repo, c1, c2, ca):
repo.ui.debug(" file %s -> %s\n" % (f, copy[f]))
break
- return copy, diverge, renamedelete
+ return copy, diverge
diff --git a/mercurial/dagparser.py b/mercurial/dagparser.py
index 92bd4f0..e02faa5 100644
--- a/mercurial/dagparser.py
+++ b/mercurial/dagparser.py
@@ -268,8 +268,7 @@ def parsedag(desc):
s += c
i += 1
c = nextch()
- raise util.Abort(_('invalid character in dag description: '
- '%s...') % s)
+ raise util.Abort(_("invalid character in dag description: %s...") % s)
def dagtextlines(events,
addspaces=True,
@@ -437,9 +436,7 @@ def dagtext(dag,
>>> dagtext([('n', (0, [-1])), ('a', 'ann'), ('n', (1, [0]))])
'+1 @ann +1'
- >>> dagtext([('n', (0, [-1])),
- ... ('a', 'my annotation'),
- ... ('n', (1, [0]))])
+ >>> dagtext([('n', (0, [-1])), ('a', 'my annotation'), ('n', (1, [0]))])
'+1 @"my annotation" +1'
Commands:
@@ -450,9 +447,7 @@ def dagtext(dag,
>>> dagtext([('n', (0, [-1])), ('c', 'my command'), ('n', (1, [0]))])
'+1 !"my command" +1'
- >>> dagtext([('n', (0, [-1])),
- ... ('C', 'my command line'),
- ... ('n', (1, [0]))])
+ >>> dagtext([('n', (0, [-1])), ('C', 'my command line'), ('n', (1, [0]))])
'+1 !!my command line\\n+1'
Comments:
diff --git a/mercurial/dagutil.py b/mercurial/dagutil.py
index a9d8fb8..7908e63 100644
--- a/mercurial/dagutil.py
+++ b/mercurial/dagutil.py
@@ -26,25 +26,25 @@ class basedag(object):
def nodeset(self):
'''set of all node idxs'''
- raise NotImplementedError
+ raise NotImplementedError()
def heads(self):
'''list of head ixs'''
- raise NotImplementedError
+ raise NotImplementedError()
def parents(self, ix):
'''list of parents ixs of ix'''
- raise NotImplementedError
+ raise NotImplementedError()
def inverse(self):
'''inverse DAG, where parents becomes children, etc.'''
- raise NotImplementedError
+ raise NotImplementedError()
def ancestorset(self, starts, stops=None):
'''
set of all ancestors of starts (incl), but stop walk at stops (excl)
'''
- raise NotImplementedError
+ raise NotImplementedError()
def descendantset(self, starts, stops=None):
'''
@@ -59,7 +59,7 @@ class basedag(object):
By "connected list" we mean that if an ancestor and a descendant are in
the list, then so is at least one path connecting them.
'''
- raise NotImplementedError
+ raise NotImplementedError()
def externalize(self, ix):
'''return a list of (or set if given a set) of node ids'''
diff --git a/mercurial/demandimport.py b/mercurial/demandimport.py
index e439487..4f5d71e 100644
--- a/mercurial/demandimport.py
+++ b/mercurial/demandimport.py
@@ -27,17 +27,6 @@ These imports will not be delayed:
import __builtin__
_origimport = __import__
-nothing = object()
-
-try:
- _origimport(__builtin__.__name__, {}, {}, None, -1)
-except TypeError: # no level argument
- def _import(name, globals, locals, fromlist, level):
- "call _origimport with no level argument"
- return _origimport(name, globals, locals, fromlist)
-else:
- _import = _origimport
-
class _demandmod(object):
"""module demand-loader and proxy"""
def __init__(self, name, globals, locals):
@@ -61,7 +50,7 @@ class _demandmod(object):
h, t = p, None
if '.' in p:
h, t = p.split('.', 1)
- if getattr(mod, h, nothing) is nothing:
+ if not hasattr(mod, h):
setattr(mod, h, _demandmod(p, mod.__dict__, mod.__dict__))
elif t:
subload(getattr(mod, h), t)
@@ -92,14 +81,20 @@ class _demandmod(object):
def _demandimport(name, globals=None, locals=None, fromlist=None, level=-1):
if not locals or name in ignore or fromlist == ('*',):
# these cases we can't really delay
- return _import(name, globals, locals, fromlist, level)
+ if level == -1:
+ return _origimport(name, globals, locals, fromlist)
+ else:
+ return _origimport(name, globals, locals, fromlist, level)
elif not fromlist:
# import a [as b]
if '.' in name: # a.b
base, rest = name.split('.', 1)
# email.__init__ loading email.mime
if globals and globals.get('__name__', None) == base:
- return _import(name, globals, locals, fromlist, level)
+ if level != -1:
+ return _origimport(name, globals, locals, fromlist, level)
+ else:
+ return _origimport(name, globals, locals, fromlist)
# if a is already demand-loaded, add b to its submodule list
if base in locals:
if isinstance(locals[base], _demandmod):
@@ -114,12 +109,12 @@ def _demandimport(name, globals=None, locals=None, fromlist=None, level=-1):
mod = _origimport(name, globals, locals)
# recurse down the module chain
for comp in name.split('.')[1:]:
- if getattr(mod, comp, nothing) is nothing:
+ if not hasattr(mod, comp):
setattr(mod, comp, _demandmod(comp, mod.__dict__, mod.__dict__))
mod = getattr(mod, comp)
for x in fromlist:
# set requested submodules for demand load
- if getattr(mod, x, nothing) is nothing:
+ if not hasattr(mod, x):
setattr(mod, x, _demandmod(x, mod.__dict__, locals))
return mod
@@ -142,8 +137,6 @@ ignore = [
# raise ImportError if x not defined
'__main__',
'_ssl', # conditional imports in the stdlib, issue1964
- 'rfc822',
- 'mimetools',
]
def enable():
@@ -153,3 +146,4 @@ def enable():
def disable():
"disable global demand-loading of modules"
__builtin__.__import__ = _origimport
+
diff --git a/mercurial/diffhelpers.c b/mercurial/diffhelpers.c
index b0fd766..aa2a275 100644
--- a/mercurial/diffhelpers.c
+++ b/mercurial/diffhelpers.c
@@ -20,14 +20,14 @@ static PyObject *diffhelpers_Error;
/* fixup the last lines of a and b when the patch has no newline at eof */
static void _fix_newline(PyObject *hunk, PyObject *a, PyObject *b)
{
- Py_ssize_t hunksz = PyList_Size(hunk);
+ int hunksz = PyList_Size(hunk);
PyObject *s = PyList_GET_ITEM(hunk, hunksz-1);
char *l = PyBytes_AsString(s);
- Py_ssize_t alen = PyList_Size(a);
- Py_ssize_t blen = PyList_Size(b);
+ int alen = PyList_Size(a);
+ int blen = PyList_Size(b);
char c = l[0];
PyObject *hline;
- Py_ssize_t sz = PyBytes_GET_SIZE(s);
+ int sz = PyBytes_GET_SIZE(s);
if (sz > 1 && l[sz-2] == '\r')
/* tolerate CRLF in last line */
@@ -57,12 +57,6 @@ fix_newline(PyObject *self, PyObject *args)
return Py_BuildValue("l", 0);
}
-#if (PY_VERSION_HEX < 0x02050000)
-static const char *addlines_format = "OOiiOO";
-#else
-static const char *addlines_format = "OOnnOO";
-#endif
-
/*
* read lines from fp into the hunk. The hunk is parsed into two arrays
* a and b. a gets the old state of the text, b gets the new state
@@ -74,14 +68,13 @@ addlines(PyObject *self, PyObject *args)
{
PyObject *fp, *hunk, *a, *b, *x;
- Py_ssize_t i;
- Py_ssize_t lena, lenb;
- Py_ssize_t num;
- Py_ssize_t todoa, todob;
+ int i;
+ int lena, lenb;
+ int num;
+ int todoa, todob;
char *s, c;
PyObject *l;
- if (!PyArg_ParseTuple(args, addlines_format,
- &fp, &hunk, &lena, &lenb, &a, &b))
+ if (!PyArg_ParseTuple(args, "OOiiOO", &fp, &hunk, &lena, &lenb, &a, &b))
return NULL;
while (1) {
@@ -134,15 +127,15 @@ testhunk(PyObject *self, PyObject *args)
PyObject *a, *b;
long bstart;
- Py_ssize_t alen, blen;
- Py_ssize_t i;
+ int alen, blen;
+ int i;
char *sa, *sb;
if (!PyArg_ParseTuple(args, "OOl", &a, &b, &bstart))
return NULL;
alen = PyList_Size(a);
blen = PyList_Size(b);
- if (alen > blen - bstart || bstart < 0) {
+ if (alen > blen - bstart) {
return Py_BuildValue("l", -1);
}
for (i = 0; i < alen; i++) {
diff --git a/mercurial/dirstate.py b/mercurial/dirstate.py
index e516d93..c8219b5 100644
--- a/mercurial/dirstate.py
+++ b/mercurial/dirstate.py
@@ -4,7 +4,6 @@
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-import errno
from node import nullid
from i18n import _
@@ -14,17 +13,6 @@ import cStringIO
_format = ">cllll"
propertycache = util.propertycache
-filecache = scmutil.filecache
-
-class repocache(filecache):
- """filecache for files in .hg/"""
- def join(self, obj, fname):
- return obj._opener.join(fname)
-
-class rootcache(filecache):
- """filecache for files in the repository root"""
- def join(self, obj, fname):
- return obj._join(fname)
def _finddirs(path):
pos = path.rfind('/')
@@ -61,9 +49,8 @@ class dirstate(object):
self._rootdir = os.path.join(root, '')
self._dirty = False
self._dirtypl = False
- self._lastnormaltime = 0
+ self._lastnormaltime = None
self._ui = ui
- self._filecache = {}
@propertycache
def _map(self):
@@ -81,19 +68,14 @@ class dirstate(object):
def _foldmap(self):
f = {}
for name in self._map:
- f[util.normcase(name)] = name
- for name in self._dirs:
- f[util.normcase(name)] = name
- f['.'] = '.' # prevents useless util.fspath() invocation
+ f[os.path.normcase(name)] = name
return f
- @repocache('branch')
+ @propertycache
def _branch(self):
try:
return self._opener.read("branch").strip() or "default"
- except IOError, inst:
- if inst.errno != errno.ENOENT:
- raise
+ except IOError:
return "default"
@propertycache
@@ -120,10 +102,7 @@ class dirstate(object):
_incdirs(dirs, f)
return dirs
- def dirs(self):
- return self._dirs
-
- @rootcache('.hgignore')
+ @propertycache
def _ignore(self):
files = [self._join('.hgignore')]
for name, path in self._ui.configitems("ui"):
@@ -152,19 +131,17 @@ class dirstate(object):
# it's safe because f is always a relative path
return self._rootdir + f
- def flagfunc(self, buildfallback):
- if self._checklink and self._checkexec:
- def f(x):
- p = self._join(x)
- if os.path.islink(p):
- return 'l'
- if util.isexec(p):
- return 'x'
- return ''
- return f
-
- fallback = buildfallback()
+ def flagfunc(self, fallback):
if self._checklink:
+ if self._checkexec:
+ def f(x):
+ p = self._join(x)
+ if os.path.islink(p):
+ return 'l'
+ if util.isexec(p):
+ return 'x'
+ return ''
+ return f
def f(x):
if os.path.islink(self._join(x)):
return 'l'
@@ -180,8 +157,7 @@ class dirstate(object):
return 'x'
return ''
return f
- else:
- return fallback
+ return fallback
def getcwd(self):
cwd = os.getcwd()
@@ -237,36 +213,14 @@ class dirstate(object):
return encoding.tolocal(self._branch)
def setparents(self, p1, p2=nullid):
- """Set dirstate parents to p1 and p2.
-
- When moving from two parents to one, 'm' merged entries a
- adjusted to normal and previous copy records discarded and
- returned by the call.
-
- See localrepo.setparents()
- """
self._dirty = self._dirtypl = True
- oldp2 = self._pl[1]
self._pl = p1, p2
- copies = {}
- if oldp2 != nullid and p2 == nullid:
- # Discard 'm' markers when moving away from a merge state
- for f, s in self._map.iteritems():
- if s[0] == 'm':
- if f in self._copymap:
- copies[f] = self._copymap[f]
- self.normallookup(f)
- return copies
def setbranch(self, branch):
if branch in ['tip', '.', 'null']:
raise util.Abort(_('the name \'%s\' is reserved') % branch)
self._branch = encoding.fromlocal(branch)
- f = self._opener('branch', 'w', atomictemp=True)
- try:
- f.write(self._branch + '\n')
- finally:
- f.close()
+ self._opener.write("branch", self._branch + '\n')
def _read(self):
self._map = {}
@@ -289,7 +243,7 @@ class dirstate(object):
"_ignore"):
if a in self.__dict__:
delattr(self, a)
- self._lastnormaltime = 0
+ self._lastnormaltime = None
self._dirty = False
def copy(self, source, dest):
@@ -312,9 +266,9 @@ class dirstate(object):
if self[f] not in "?r" and "_dirs" in self.__dict__:
_decdirs(self._dirs, f)
- def _addpath(self, f, state, mode, size, mtime):
+ def _addpath(self, f, check=False):
oldstate = self[f]
- if state == 'a' or oldstate == 'r':
+ if check or oldstate == "r":
scmutil.checkfilename(f)
if f in self._dirs:
raise util.Abort(_('directory %r already in dirstate') % f)
@@ -327,14 +281,14 @@ class dirstate(object):
_('file %r in dirstate clashes with %r') % (d, f))
if oldstate in "?r" and "_dirs" in self.__dict__:
_incdirs(self._dirs, f)
- self._dirty = True
- self._map[f] = (state, mode, size, mtime)
def normal(self, f):
'''Mark a file normal and clean.'''
+ self._dirty = True
+ self._addpath(f)
s = os.lstat(self._join(f))
mtime = int(s.st_mtime)
- self._addpath(f, 'n', s.st_mode, s.st_size, mtime)
+ self._map[f] = ('n', s.st_mode, s.st_size, mtime)
if f in self._copymap:
del self._copymap[f]
if mtime > self._lastnormaltime:
@@ -361,7 +315,9 @@ class dirstate(object):
return
if entry[0] == 'm' or entry[0] == 'n' and entry[2] == -2:
return
- self._addpath(f, 'n', 0, -1, -1)
+ self._dirty = True
+ self._addpath(f)
+ self._map[f] = ('n', 0, -1, -1)
if f in self._copymap:
del self._copymap[f]
@@ -370,13 +326,17 @@ class dirstate(object):
if self._pl[1] == nullid:
raise util.Abort(_("setting %r to other parent "
"only allowed in merges") % f)
- self._addpath(f, 'n', 0, -2, -1)
+ self._dirty = True
+ self._addpath(f)
+ self._map[f] = ('n', 0, -2, -1)
if f in self._copymap:
del self._copymap[f]
def add(self, f):
'''Mark a file added.'''
- self._addpath(f, 'a', 0, -1, -1)
+ self._dirty = True
+ self._addpath(f, True)
+ self._map[f] = ('a', 0, -1, -1)
if f in self._copymap:
del self._copymap[f]
@@ -398,62 +358,36 @@ class dirstate(object):
def merge(self, f):
'''Mark a file merged.'''
- if self._pl[1] == nullid:
- return self.normallookup(f)
+ self._dirty = True
s = os.lstat(self._join(f))
- self._addpath(f, 'm', s.st_mode, s.st_size, int(s.st_mtime))
+ self._addpath(f)
+ self._map[f] = ('m', s.st_mode, s.st_size, int(s.st_mtime))
if f in self._copymap:
del self._copymap[f]
def drop(self, f):
'''Drop a file from the dirstate'''
- if f in self._map:
- self._dirty = True
- self._droppath(f)
- del self._map[f]
+ self._dirty = True
+ self._droppath(f)
+ del self._map[f]
- def _normalize(self, path, isknown, ignoremissing=False, exists=None):
- normed = util.normcase(path)
+ def _normalize(self, path, isknown):
+ normed = os.path.normcase(path)
folded = self._foldmap.get(normed, None)
if folded is None:
- if isknown:
+ if isknown or not os.path.lexists(os.path.join(self._root, path)):
folded = path
else:
- if exists is None:
- exists = os.path.lexists(os.path.join(self._root, path))
- if not exists:
- # Maybe a path component exists
- if not ignoremissing and '/' in path:
- d, f = path.rsplit('/', 1)
- d = self._normalize(d, isknown, ignoremissing, None)
- folded = d + "/" + f
- else:
- # No path components, preserve original case
- folded = path
- else:
- # recursively normalize leading directory components
- # against dirstate
- if '/' in normed:
- d, f = normed.rsplit('/', 1)
- d = self._normalize(d, isknown, ignoremissing, True)
- r = self._root + "/" + d
- folded = d + "/" + util.fspath(f, r)
- else:
- folded = util.fspath(normed, self._root)
- self._foldmap[normed] = folded
-
+ folded = self._foldmap.setdefault(normed,
+ util.fspath(path, self._root))
return folded
- def normalize(self, path, isknown=False, ignoremissing=False):
+ def normalize(self, path, isknown=False):
'''
normalize the case of a pathname when on a casefolding filesystem
isknown specifies whether the filename came from walking the
- disk, to avoid extra filesystem access.
-
- If ignoremissing is True, missing path are returned
- unchanged. Otherwise, we try harder to normalize possibly
- existing path components.
+ disk, to avoid extra filesystem access
The normalized case is determined based on the following precedence:
@@ -463,7 +397,7 @@ class dirstate(object):
'''
if self._checkcase:
- return self._normalize(path, isknown, ignoremissing)
+ return self._normalize(path, isknown)
return path
def clear(self):
@@ -472,7 +406,7 @@ class dirstate(object):
delattr(self, "_dirs")
self._copymap = {}
self._pl = [nullid, nullid]
- self._lastnormaltime = 0
+ self._lastnormaltime = None
self._dirty = True
def rebuild(self, parent, files):
@@ -490,24 +424,12 @@ class dirstate(object):
return
st = self._opener("dirstate", "w", atomictemp=True)
- def finish(s):
- st.write(s)
- st.close()
- self._lastnormaltime = 0
- self._dirty = self._dirtypl = False
-
# use the modification time of the newly created temporary file as the
# filesystem's notion of 'now'
- now = util.fstat(st).st_mtime
- copymap = self._copymap
- try:
- finish(parsers.pack_dirstate(self._map, copymap, self._pl, now))
- return
- except AttributeError:
- pass
+ now = int(util.fstat(st).st_mtime)
- now = int(now)
cs = cStringIO.StringIO()
+ copymap = self._copymap
pack = struct.pack
write = cs.write
write("".join(self._pl))
@@ -530,7 +452,10 @@ class dirstate(object):
e = pack(_format, e[0], e[1], e[2], e[3], len(f))
write(e)
write(f)
- finish(cs.getvalue())
+ st.write(cs.getvalue())
+ st.rename()
+ self._lastnormaltime = None
+ self._dirty = self._dirtypl = False
def _dirignore(self, f):
if f == '.':
@@ -600,11 +525,11 @@ class dirstate(object):
elif match.files() and not match.anypats(): # match.match, no patterns
skipstep3 = True
- if not exact and self._checkcase:
+ if self._checkcase:
normalize = self._normalize
skipstep3 = False
else:
- normalize = lambda x, y, z: x
+ normalize = lambda x, y: x
files = sorted(match.files())
subrepos.sort()
@@ -625,7 +550,7 @@ class dirstate(object):
# step 1: find all explicit files
for ff in files:
- nf = normalize(normpath(ff), False, True)
+ nf = normalize(normpath(ff), False)
if nf in results:
continue
@@ -675,7 +600,7 @@ class dirstate(object):
continue
raise
for f, kind, st in entries:
- nf = normalize(nd and (nd + "/" + f) or f, True, True)
+ nf = normalize(nd and (nd + "/" + f) or f, True)
if nf not in results:
if kind == dirkind:
if not ignore(nf):
@@ -696,8 +621,7 @@ class dirstate(object):
if not skipstep3 and not exact:
visit = sorted([f for f in dmap if f not in results and matchfn(f)])
for nf, st in zip(visit, util.statfiles([join(i) for i in visit])):
- if (not st is None and
- getkind(st.st_mode) not in (regkind, lnkkind)):
+ if not st is None and not getkind(st.st_mode) in (regkind, lnkkind):
st = None
results[nf] = st
for s in subrepos:
diff --git a/mercurial/discovery.py b/mercurial/discovery.py
index 0eb27a8..88a7475 100644
--- a/mercurial/discovery.py
+++ b/mercurial/discovery.py
@@ -7,7 +7,7 @@
from node import nullid, short
from i18n import _
-import util, setdiscovery, treediscovery, phases, obsolete
+import util, setdiscovery, treediscovery
def findcommonincoming(repo, remote, heads=None, force=False):
"""Return a tuple (common, anyincoming, heads) used to identify the common
@@ -46,324 +46,146 @@ def findcommonincoming(repo, remote, heads=None, force=False):
common, anyinc, srvheads = res
return (list(common), anyinc, heads or list(srvheads))
-class outgoing(object):
- '''Represents the set of nodes present in a local repo but not in a
- (possibly) remote one.
+def findcommonoutgoing(repo, other, onlyheads=None, force=False, commoninc=None):
+ '''Return a tuple (common, anyoutgoing, heads) used to identify the set
+ of nodes present in repo but not in other.
- Members:
+ If onlyheads is given, only nodes ancestral to nodes in onlyheads (inclusive)
+ are included. If you already know the local repo's heads, passing them in
+ onlyheads is faster than letting them be recomputed here.
- missing is a list of all nodes present in local but not in remote.
- common is a list of all nodes shared between the two repos.
- excluded is the list of missing changeset that shouldn't be sent remotely.
- missingheads is the list of heads of missing.
- commonheads is the list of heads of common.
-
- The sets are computed on demand from the heads, unless provided upfront
- by discovery.'''
-
- def __init__(self, revlog, commonheads, missingheads):
- self.commonheads = commonheads
- self.missingheads = missingheads
- self._revlog = revlog
- self._common = None
- self._missing = None
- self.excluded = []
-
- def _computecommonmissing(self):
- sets = self._revlog.findcommonmissing(self.commonheads,
- self.missingheads)
- self._common, self._missing = sets
-
- @util.propertycache
- def common(self):
- if self._common is None:
- self._computecommonmissing()
- return self._common
-
- @util.propertycache
- def missing(self):
- if self._missing is None:
- self._computecommonmissing()
- return self._missing
-
-def findcommonoutgoing(repo, other, onlyheads=None, force=False,
- commoninc=None, portable=False):
- '''Return an outgoing instance to identify the nodes present in repo but
- not in other.
-
- If onlyheads is given, only nodes ancestral to nodes in onlyheads
- (inclusive) are included. If you already know the local repo's heads,
- passing them in onlyheads is faster than letting them be recomputed here.
-
- If commoninc is given, it must be the result of a prior call to
+ If commoninc is given, it must the the result of a prior call to
findcommonincoming(repo, other, force) to avoid recomputing it here.
- If portable is given, compute more conservative common and missingheads,
- to make bundles created from the instance more portable.'''
- # declare an empty outgoing object to be filled later
- og = outgoing(repo.changelog, None, None)
-
- # get common set if not provided
- if commoninc is None:
- commoninc = findcommonincoming(repo, other, force=force)
- og.commonheads, _any, _hds = commoninc
-
- # compute outgoing
- mayexclude = (repo._phasecache.phaseroots[phases.secret] or repo.obsstore)
- if not mayexclude:
- og.missingheads = onlyheads or repo.heads()
- elif onlyheads is None:
- # use visible heads as it should be cached
- og.missingheads = visibleheads(repo)
- og.excluded = [ctx.node() for ctx in repo.set('secret() or extinct()')]
- else:
- # compute common, missing and exclude secret stuff
- sets = repo.changelog.findcommonmissing(og.commonheads, onlyheads)
- og._common, allmissing = sets
- og._missing = missing = []
- og.excluded = excluded = []
- for node in allmissing:
- ctx = repo[node]
- if ctx.phase() >= phases.secret or ctx.extinct():
- excluded.append(node)
- else:
- missing.append(node)
- if len(missing) == len(allmissing):
- missingheads = onlyheads
- else: # update missing heads
- missingheads = phases.newheads(repo, onlyheads, excluded)
- og.missingheads = missingheads
- if portable:
- # recompute common and missingheads as if -r<rev> had been given for
- # each head of missing, and --base <rev> for each head of the proper
- # ancestors of missing
- og._computecommonmissing()
- cl = repo.changelog
- missingrevs = set(cl.rev(n) for n in og._missing)
- og._common = set(cl.ancestors(missingrevs)) - missingrevs
- commonheads = set(og.commonheads)
- og.missingheads = [h for h in og.missingheads if h not in commonheads]
-
- return og
-
-def _headssummary(repo, remote, outgoing):
- """compute a summary of branch and heads status before and after push
-
- return {'branch': ([remoteheads], [newheads], [unsyncedheads])} mapping
-
- - branch: the branch name
- - remoteheads: the list of remote heads known locally
- None is the branch is new
- - newheads: the new remote heads (known locally) with outgoing pushed
- - unsyncedheads: the list of remote heads unknown locally.
- """
- cl = repo.changelog
- headssum = {}
- # A. Create set of branches involved in the push.
- branches = set(repo[n].branch() for n in outgoing.missing)
- remotemap = remote.branchmap()
- newbranches = branches - set(remotemap)
- branches.difference_update(newbranches)
-
- # A. register remote heads
- remotebranches = set()
- for branch, heads in remote.branchmap().iteritems():
- remotebranches.add(branch)
- known = []
- unsynced = []
- for h in heads:
- if h in cl.nodemap:
- known.append(h)
- else:
- unsynced.append(h)
- headssum[branch] = (known, list(known), unsynced)
- # B. add new branch data
- missingctx = list(repo[n] for n in outgoing.missing)
- touchedbranches = set()
- for ctx in missingctx:
- branch = ctx.branch()
- touchedbranches.add(branch)
- if branch not in headssum:
- headssum[branch] = (None, [], [])
-
- # C drop data about untouched branches:
- for branch in remotebranches - touchedbranches:
- del headssum[branch]
-
- # D. Update newmap with outgoing changes.
- # This will possibly add new heads and remove existing ones.
- newmap = dict((branch, heads[1]) for branch, heads in headssum.iteritems()
- if heads[0] is not None)
- repo._updatebranchcache(newmap, missingctx)
- for branch, newheads in newmap.iteritems():
- headssum[branch][1][:] = newheads
- return headssum
-
-def _oldheadssummary(repo, remoteheads, outgoing, inc=False):
- """Compute branchmapsummary for repo without branchmap support"""
+ The returned tuple is meant to be passed to changelog.findmissing.'''
+ common, _any, _hds = commoninc or findcommonincoming(repo, other, force=force)
+ return (common, onlyheads or repo.heads())
+
+def prepush(repo, remote, force, revs, newbranch):
+ '''Analyze the local and remote repositories and determine which
+ changesets need to be pushed to the remote. Return value depends
+ on circumstances:
+
+ If we are not going to push anything, return a tuple (None,
+ outgoing) where outgoing is 0 if there are no outgoing
+ changesets and 1 if there are, but we refuse to push them
+ (e.g. would create new remote heads).
+
+ Otherwise, return a tuple (changegroup, remoteheads), where
+ changegroup is a readable file-like object whose read() returns
+ successive changegroup chunks ready to be sent over the wire and
+ remoteheads is the list of remote heads.'''
+ commoninc = findcommonincoming(repo, remote, force=force)
+ common, revs = findcommonoutgoing(repo, remote, onlyheads=revs,
+ commoninc=commoninc, force=force)
+ _common, inc, remoteheads = commoninc
cl = repo.changelog
- # 1-4b. old servers: Check for new topological heads.
- # Construct {old,new}map with branch = None (topological branch).
- # (code based on _updatebranchcache)
- oldheads = set(h for h in remoteheads if h in cl.nodemap)
- # all nodes in outgoing.missing are children of either:
- # - an element of oldheads
- # - another element of outgoing.missing
- # - nullrev
- # This explains why the new head are very simple to compute.
- r = repo.set('heads(%ln + %ln)', oldheads, outgoing.missing)
- newheads = list(c.node() for c in r)
- unsynced = inc and set([None]) or set()
- return {None: (oldheads, newheads, unsynced)}
+ outg = cl.findmissing(common, revs)
+
+ if not outg:
+ repo.ui.status(_("no changes found\n"))
+ return None, 1
+
+ if not force and remoteheads != [nullid]:
+ if remote.capable('branchmap'):
+ # Check for each named branch if we're creating new remote heads.
+ # To be a remote head after push, node must be either:
+ # - unknown locally
+ # - a local outgoing head descended from update
+ # - a remote head that's known locally and not
+ # ancestral to an outgoing head
+
+ # 1. Create set of branches involved in the push.
+ branches = set(repo[n].branch() for n in outg)
+
+ # 2. Check for new branches on the remote.
+ remotemap = remote.branchmap()
+ newbranches = branches - set(remotemap)
+ if newbranches and not newbranch: # new branch requires --new-branch
+ branchnames = ', '.join(sorted(newbranches))
+ raise util.Abort(_("push creates new remote branches: %s!")
+ % branchnames,
+ hint=_("use 'hg push --new-branch' to create"
+ " new remote branches"))
+ branches.difference_update(newbranches)
+
+ # 3. Construct the initial oldmap and newmap dicts.
+ # They contain information about the remote heads before and
+ # after the push, respectively.
+ # Heads not found locally are not included in either dict,
+ # since they won't be affected by the push.
+ # unsynced contains all branches with incoming changesets.
+ oldmap = {}
+ newmap = {}
+ unsynced = set()
+ for branch in branches:
+ remotebrheads = remotemap[branch]
+ prunedbrheads = [h for h in remotebrheads if h in cl.nodemap]
+ oldmap[branch] = prunedbrheads
+ newmap[branch] = list(prunedbrheads)
+ if len(remotebrheads) > len(prunedbrheads):
+ unsynced.add(branch)
+
+ # 4. Update newmap with outgoing changes.
+ # This will possibly add new heads and remove existing ones.
+ ctxgen = (repo[n] for n in outg)
+ repo._updatebranchcache(newmap, ctxgen)
-def checkheads(repo, remote, outgoing, remoteheads, newbranch=False, inc=False):
- """Check that a push won't add any outgoing head
-
- raise Abort error and display ui message as needed.
- """
- # Check for each named branch if we're creating new remote heads.
- # To be a remote head after push, node must be either:
- # - unknown locally
- # - a local outgoing head descended from update
- # - a remote head that's known locally and not
- # ancestral to an outgoing head
- if remoteheads == [nullid]:
- # remote is empty, nothing to check.
- return
-
- if remote.capable('branchmap'):
- headssum = _headssummary(repo, remote, outgoing)
- else:
- headssum = _oldheadssummary(repo, remoteheads, outgoing, inc)
- newbranches = [branch for branch, heads in headssum.iteritems()
- if heads[0] is None]
- # 1. Check for new branches on the remote.
- if newbranches and not newbranch: # new branch requires --new-branch
- branchnames = ', '.join(sorted(newbranches))
- raise util.Abort(_("push creates new remote branches: %s!")
- % branchnames,
- hint=_("use 'hg push --new-branch' to create"
- " new remote branches"))
-
- # 2 compute newly pushed bookmarks. We
- # we don't warned about bookmarked heads.
- localbookmarks = repo._bookmarks
- remotebookmarks = remote.listkeys('bookmarks')
- bookmarkedheads = set()
- for bm in localbookmarks:
- rnode = remotebookmarks.get(bm)
- if rnode and rnode in repo:
- lctx, rctx = repo[bm], repo[rnode]
- if rctx == lctx.ancestor(rctx):
- bookmarkedheads.add(lctx.node())
-
- # 3. Check for new heads.
- # If there are more heads after the push than before, a suitable
- # error message, depending on unsynced status, is displayed.
- error = None
- unsynced = False
- allmissing = set(outgoing.missing)
- for branch, heads in headssum.iteritems():
- if heads[0] is None:
- # Maybe we should abort if we push more that one head
- # for new branches ?
- continue
- if heads[2]:
- unsynced = True
- oldhs = set(heads[0])
- candidate_newhs = set(heads[1])
- # add unsynced data
- oldhs.update(heads[2])
- candidate_newhs.update(heads[2])
- dhs = None
- if repo.obsstore:
- # remove future heads which are actually obsolete by another
- # pushed element:
- #
- # XXX There is several case this case does not handle properly
- #
- # (1) if <nh> is public, it won't be affected by obsolete marker
- # and a new is created
- #
- # (2) if the new heads have ancestors which are not obsolete and
- # not ancestors of any other heads we will have a new head too.
- #
- # This two case will be easy to handle for know changeset but much
- # more tricky for unsynced changes.
- newhs = set()
- for nh in candidate_newhs:
- for suc in obsolete.anysuccessors(repo.obsstore, nh):
- if suc != nh and suc in allmissing:
- break
- else:
- newhs.add(nh)
else:
- newhs = candidate_newhs
- if len(newhs) > len(oldhs):
- # strip updates to existing remote heads from the new heads list
- dhs = list(newhs - bookmarkedheads - oldhs)
- if dhs:
- if error is None:
- if branch not in ('default', None):
- error = _("push creates new remote head %s "
- "on branch '%s'!") % (short(dhs[0]), branch)
- else:
- error = _("push creates new remote head %s!"
- ) % short(dhs[0])
- if heads[2]: # unsynced
- hint = _("you should pull and merge or "
- "use push -f to force")
- else:
- hint = _("did you forget to merge? "
- "use push -f to force")
- if branch is not None:
- repo.ui.note(_("new remote heads on branch '%s'\n") % branch)
- for h in dhs:
- repo.ui.note(_("new remote head %s\n") % short(h))
- if error:
- raise util.Abort(error, hint=hint)
-
- # 6. Check for unsynced changes on involved branches.
- if unsynced:
- repo.ui.warn(_("note: unsynced remote changes!\n"))
-
-def visibleheads(repo):
- """return the set of visible head of this repo"""
- # XXX we want a cache on this
- sroots = repo._phasecache.phaseroots[phases.secret]
- if sroots or repo.obsstore:
- # XXX very slow revset. storing heads or secret "boundary"
- # would help.
- revset = repo.set('heads(not (%ln:: + extinct()))', sroots)
-
- vheads = [ctx.node() for ctx in revset]
- if not vheads:
- vheads.append(nullid)
- else:
- vheads = repo.heads()
- return vheads
-
-
-def visiblebranchmap(repo):
- """return a branchmap for the visible set"""
- # XXX Recomputing this data on the fly is very slow. We should build a
- # XXX cached version while computin the standard branchmap version.
- sroots = repo._phasecache.phaseroots[phases.secret]
- if sroots or repo.obsstore:
- vbranchmap = {}
- for branch, nodes in repo.branchmap().iteritems():
- # search for secret heads.
- for n in nodes:
- if repo[n].phase() >= phases.secret:
- nodes = None
- break
- # if secret heads were found we must compute them again
- if nodes is None:
- s = repo.set('heads(branch(%s) - secret() - extinct())',
- branch)
- nodes = [c.node() for c in s]
- vbranchmap[branch] = nodes
+ # 1-4b. old servers: Check for new topological heads.
+ # Construct {old,new}map with branch = None (topological branch).
+ # (code based on _updatebranchcache)
+ oldheads = set(h for h in remoteheads if h in cl.nodemap)
+ newheads = oldheads.union(outg)
+ if len(newheads) > 1:
+ for latest in reversed(outg):
+ if latest not in newheads:
+ continue
+ minhrev = min(cl.rev(h) for h in newheads)
+ reachable = cl.reachable(latest, cl.node(minhrev))
+ reachable.remove(latest)
+ newheads.difference_update(reachable)
+ branches = set([None])
+ newmap = {None: newheads}
+ oldmap = {None: oldheads}
+ unsynced = inc and branches or set()
+
+ # 5. Check for new heads.
+ # If there are more heads after the push than before, a suitable
+ # error message, depending on unsynced status, is displayed.
+ error = None
+ for branch in branches:
+ newhs = set(newmap[branch])
+ oldhs = set(oldmap[branch])
+ if len(newhs) > len(oldhs):
+ dhs = list(newhs - oldhs)
+ if error is None:
+ if branch != 'default':
+ error = _("push creates new remote head %s "
+ "on branch '%s'!") % (short(dhs[0]), branch)
+ else:
+ error = _("push creates new remote head %s!"
+ ) % short(dhs[0])
+ if branch in unsynced:
+ hint = _("you should pull and merge or "
+ "use push -f to force")
+ else:
+ hint = _("did you forget to merge? "
+ "use push -f to force")
+ repo.ui.note("new remote heads on branch '%s'\n" % branch)
+ for h in dhs:
+ repo.ui.note("new remote head %s\n" % short(h))
+ if error:
+ raise util.Abort(error, hint=hint)
+
+ # 6. Check for unsynced changes on involved branches.
+ if unsynced:
+ repo.ui.warn(_("note: unsynced remote changes!\n"))
+
+ if revs is None:
+ # use the fast path, no race possible on push
+ cg = repo._changegroup(outg, 'push')
else:
- vbranchmap = repo.branchmap()
- return vbranchmap
+ cg = repo.getbundle('push', heads=revs, common=common)
+ return cg, remoteheads
diff --git a/mercurial/dispatch.py b/mercurial/dispatch.py
index 520fe59..a1f8335 100644
--- a/mercurial/dispatch.py
+++ b/mercurial/dispatch.py
@@ -12,8 +12,7 @@ import cmdutil, encoding
import ui as uimod
class request(object):
- def __init__(self, args, ui=None, repo=None, fin=None, fout=None,
- ferr=None):
+ def __init__(self, args, ui=None, repo=None, fin=None, fout=None, ferr=None):
self.args = args
self.ui = ui
self.repo = repo
@@ -25,7 +24,7 @@ class request(object):
def run():
"run the command in sys.argv"
- sys.exit((dispatch(request(sys.argv[1:])) or 0) & 255)
+ sys.exit(dispatch(request(sys.argv[1:])))
def dispatch(req):
"run the command specified in req.args"
@@ -88,7 +87,7 @@ def _runcatch(req):
return _dispatch(req)
finally:
ui.flush()
- except: # re-raises
+ except:
# enter the debugger when we hit an exception
if '--debugger' in req.args:
traceback.print_exc()
@@ -124,9 +123,6 @@ def _runcatch(req):
else:
ui.warn(_("hg: %s\n") % inst.args[1])
commands.help_(ui, 'shortlist')
- except error.OutOfBandError, inst:
- ui.warn(_("abort: remote error:\n"))
- ui.warn(''.join(inst.args))
except error.RepoError, inst:
ui.warn(_("abort: %s!\n") % inst)
if inst.hint:
@@ -163,16 +159,16 @@ def _runcatch(req):
elif m in "zlib".split():
ui.warn(_("(is your Python install correct?)\n"))
except IOError, inst:
- if util.safehasattr(inst, "code"):
+ if hasattr(inst, "code"):
ui.warn(_("abort: %s\n") % inst)
- elif util.safehasattr(inst, "reason"):
+ elif hasattr(inst, "reason"):
try: # usually it is in the form (errno, strerror)
reason = inst.reason.args[1]
except (AttributeError, IndexError):
- # it might be anything, for example a string
+ # it might be anything, for example a string
reason = inst.reason
ui.warn(_("abort: error: %s\n") % reason)
- elif util.safehasattr(inst, "args") and inst.args[0] == errno.EPIPE:
+ elif hasattr(inst, "args") and inst.args[0] == errno.EPIPE:
if ui.debugflag:
ui.warn(_("broken pipe\n"))
elif getattr(inst, "strerror", None):
@@ -204,72 +200,29 @@ def _runcatch(req):
return inst.code
except socket.error, inst:
ui.warn(_("abort: %s\n") % inst.args[-1])
- except: # re-raises
- myver = util.version()
- # For compatibility checking, we discard the portion of the hg
- # version after the + on the assumption that if a "normal
- # user" is running a build with a + in it the packager
- # probably built from fairly close to a tag and anyone with a
- # 'make local' copy of hg (where the version number can be out
- # of date) will be clueful enough to notice the implausible
- # version number and try updating.
- compare = myver.split('+')[0]
- ct = tuplever(compare)
- worst = None, ct, ''
- for name, mod in extensions.extensions():
- testedwith = getattr(mod, 'testedwith', 'unknown')
- report = getattr(mod, 'buglink', _('the extension author.'))
- if testedwith == 'unknown':
- # We found an untested extension. It's likely the culprit.
- worst = name, testedwith, report
- break
- if compare not in testedwith.split() and testedwith != 'internal':
- tested = [tuplever(v) for v in testedwith.split()]
- lower = [t for t in tested if t < ct]
- nearest = max(lower or tested)
- if worst[0] is None or nearest < worst[1]:
- worst = name, nearest, report
- if worst[0] is not None:
- name, testedwith, report = worst
- if not isinstance(testedwith, str):
- testedwith = '.'.join([str(c) for c in testedwith])
- warning = (_('** Unknown exception encountered with '
- 'possibly-broken third-party extension %s\n'
- '** which supports versions %s of Mercurial.\n'
- '** Please disable %s and try your action again.\n'
- '** If that fixes the bug please report it to %s\n')
- % (name, testedwith, name, report))
- else:
- warning = (_("** unknown exception encountered, "
- "please report by visiting\n") +
- _("** http://mercurial.selenic.com/wiki/BugTracker\n"))
- warning += ((_("** Python %s\n") % sys.version.replace('\n', '')) +
- (_("** Mercurial Distributed SCM (version %s)\n") % myver) +
- (_("** Extensions loaded: %s\n") %
- ", ".join([x[0] for x in extensions.extensions()])))
- ui.warn(warning)
+ except:
+ ui.warn(_("** unknown exception encountered,"
+ " please report by visiting\n"))
+ ui.warn(_("** http://mercurial.selenic.com/wiki/BugTracker\n"))
+ ui.warn(_("** Python %s\n") % sys.version.replace('\n', ''))
+ ui.warn(_("** Mercurial Distributed SCM (version %s)\n")
+ % util.version())
+ ui.warn(_("** Extensions loaded: %s\n")
+ % ", ".join([x[0] for x in extensions.extensions()]))
raise
return -1
-def tuplever(v):
- try:
- return tuple([int(i) for i in v.split('.')])
- except ValueError:
- return tuple()
-
def aliasargs(fn, givenargs):
args = getattr(fn, 'args', [])
- if args:
+ if args and givenargs:
cmd = ' '.join(map(util.shellquote, args))
nums = []
def replacer(m):
num = int(m.group(1)) - 1
nums.append(num)
- if num < len(givenargs):
- return givenargs[num]
- raise util.Abort(_('too few arguments for command alias'))
+ return givenargs[num]
cmd = re.sub(r'\$(\d+|\$)', replacer, cmd)
givenargs = [x for i, x in enumerate(givenargs)
if i not in nums]
@@ -285,7 +238,6 @@ class cmdalias(object):
self.opts = []
self.help = ''
self.norepo = True
- self.optionalrepo = False
self.badalias = False
try:
@@ -304,6 +256,7 @@ class cmdalias(object):
return 1
self.fn = fn
self.badalias = True
+
return
if self.definition.startswith('!'):
@@ -355,8 +308,6 @@ class cmdalias(object):
self.args = aliasargs(self.fn, args)
if cmd not in commands.norepo.split(' '):
self.norepo = False
- if cmd in commands.optionalrepo.split(' '):
- self.optionalrepo = True
if self.help.startswith("hg " + cmd):
# drop prefix in old-style help lines so hg shows the alias
self.help = self.help[4 + len(cmd):]
@@ -387,7 +338,7 @@ class cmdalias(object):
ui.debug("alias '%s' shadows command '%s'\n" %
(self.name, self.cmdname))
- if util.safehasattr(self, 'shell'):
+ if hasattr(self, 'shell'):
return self.fn(ui, *args, **opts)
else:
try:
@@ -412,11 +363,9 @@ def addaliases(ui, cmdtable):
# definition might not exist or it might not be a cmdalias
pass
- cmdtable[aliasdef.name] = (aliasdef, aliasdef.opts, aliasdef.help)
+ cmdtable[aliasdef.cmd] = (aliasdef, aliasdef.opts, aliasdef.help)
if aliasdef.norepo:
commands.norepo += ' %s' % alias
- if aliasdef.optionalrepo:
- commands.optionalrepo += ' %s' % alias
def _parse(ui, args):
options = {}
@@ -430,7 +379,7 @@ def _parse(ui, args):
if args:
cmd, args = args[0], args[1:]
aliases, entry = cmdutil.findcmd(cmd, commands.table,
- ui.configbool("ui", "strict"))
+ ui.config("ui", "strict"))
cmd = aliases[0]
args = aliasargs(entry[0], args)
defaults = ui.config("defaults", cmd)
@@ -534,14 +483,16 @@ def _getlocal(ui, rpath):
lui = ui.copy()
lui.readconfig(os.path.join(path, ".hg", "hgrc"), path)
- if rpath and rpath[-1]:
+ if rpath:
path = lui.expandpath(rpath[-1])
lui = ui.copy()
lui.readconfig(os.path.join(path, ".hg", "hgrc"), path)
return path, lui
-def _checkshellalias(lui, ui, args):
+def _checkshellalias(ui, args):
+ cwd = os.getcwd()
+ norepo = commands.norepo
options = {}
try:
@@ -552,38 +503,42 @@ def _checkshellalias(lui, ui, args):
if not args:
return
- norepo = commands.norepo
- optionalrepo = commands.optionalrepo
- def restorecommands():
- commands.norepo = norepo
- commands.optionalrepo = optionalrepo
+ _parseconfig(ui, options['config'])
+ if options['cwd']:
+ os.chdir(options['cwd'])
+
+ path, lui = _getlocal(ui, [options['repository']])
cmdtable = commands.table.copy()
addaliases(lui, cmdtable)
cmd = args[0]
try:
- aliases, entry = cmdutil.findcmd(cmd, cmdtable,
- lui.configbool("ui", "strict"))
+ aliases, entry = cmdutil.findcmd(cmd, cmdtable, lui.config("ui", "strict"))
except (error.AmbiguousCommand, error.UnknownCommand):
- restorecommands()
+ commands.norepo = norepo
+ os.chdir(cwd)
return
cmd = aliases[0]
fn = entry[0]
- if cmd and util.safehasattr(fn, 'shell'):
+ if cmd and hasattr(fn, 'shell'):
d = lambda: fn(ui, *args[1:])
- return lambda: runcommand(lui, None, cmd, args[:1], ui, options, d,
- [], {})
+ return lambda: runcommand(lui, None, cmd, args[:1], ui, options, d, [], {})
- restorecommands()
+ commands.norepo = norepo
+ os.chdir(cwd)
_loaded = set()
def _dispatch(req):
args = req.args
ui = req.ui
+ shellaliasfn = _checkshellalias(ui, args)
+ if shellaliasfn:
+ return shellaliasfn()
+
# read --config before doing anything else
# (e.g. to change trust settings for reading .hg/hgrc)
cfgs = _parseconfig(ui, _earlygetopt(['--config'], args))
@@ -596,12 +551,6 @@ def _dispatch(req):
rpath = _earlygetopt(["-R", "--repository", "--repo"], args)
path, lui = _getlocal(ui, rpath)
- # Now that we're operating in the right directory/repository with
- # the right config settings, check for shell aliases
- shellaliasfn = _checkshellalias(lui, ui, args)
- if shellaliasfn:
- return shellaliasfn()
-
# Configure extensions in phases: uisetup, extsetup, cmdtable, and
# reposetup. Programs like TortoiseHg will call _dispatch several
# times so we keep track of configured extensions in _loaded.
@@ -639,7 +588,7 @@ def _dispatch(req):
raise util.Abort(_("option --cwd may not be abbreviated!"))
if options["repository"]:
raise util.Abort(_(
- "option -R has to be separated from other options (e.g. not -qR) "
+ "Option -R has to be separated from other options (e.g. not -qR) "
"and --repository may only be abbreviated as --repo!"))
if options["encoding"]:
@@ -655,7 +604,7 @@ def _dispatch(req):
s = get_times()
def print_time():
t = get_times()
- ui.warn(_("time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
+ ui.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") %
(t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3]))
atexit.register(print_time)
@@ -686,10 +635,10 @@ def _dispatch(req):
for ui_ in uis:
ui_.setconfig('web', 'cacerts', '')
- if options['version']:
- return commands.version_(ui)
if options['help']:
- return commands.help_(ui, cmd)
+ return commands.help_(ui, cmd, options['version'])
+ elif options['version']:
+ return commands.version_(ui)
elif not cmd:
return commands.help_(ui, 'shortlist')
@@ -723,8 +672,7 @@ def _dispatch(req):
return _dispatch(req)
if not path:
raise error.RepoError(_("no repository found in '%s'"
- " (.hg not found)")
- % os.getcwd())
+ " (.hg not found)") % os.getcwd())
raise
if repo:
ui = repo.ui
@@ -742,59 +690,6 @@ def _dispatch(req):
if repo and repo != req.repo:
repo.close()
-def lsprofile(ui, func, fp):
- format = ui.config('profiling', 'format', default='text')
- field = ui.config('profiling', 'sort', default='inlinetime')
- climit = ui.configint('profiling', 'nested', default=5)
-
- if format not in ['text', 'kcachegrind']:
- ui.warn(_("unrecognized profiling format '%s'"
- " - Ignored\n") % format)
- format = 'text'
-
- try:
- from mercurial import lsprof
- except ImportError:
- raise util.Abort(_(
- 'lsprof not available - install from '
- 'http://codespeak.net/svn/user/arigo/hack/misc/lsprof/'))
- p = lsprof.Profiler()
- p.enable(subcalls=True)
- try:
- return func()
- finally:
- p.disable()
-
- if format == 'kcachegrind':
- import lsprofcalltree
- calltree = lsprofcalltree.KCacheGrind(p)
- calltree.output(fp)
- else:
- # format == 'text'
- stats = lsprof.Stats(p.getstats())
- stats.sort(field)
- stats.pprint(limit=30, file=fp, climit=climit)
-
-def statprofile(ui, func, fp):
- try:
- import statprof
- except ImportError:
- raise util.Abort(_(
- 'statprof not available - install using "easy_install statprof"'))
-
- freq = ui.configint('profiling', 'freq', default=1000)
- if freq > 0:
- statprof.reset(freq)
- else:
- ui.warn(_("invalid sampling frequency '%s' - ignoring\n") % freq)
-
- statprof.start()
- try:
- return func()
- finally:
- statprof.stop()
- statprof.display(fp)
-
def _runcommand(ui, options, cmd, cmdfunc):
def checkargs():
try:
@@ -803,28 +698,45 @@ def _runcommand(ui, options, cmd, cmdfunc):
raise error.CommandError(cmd, _("invalid arguments"))
if options['profile']:
- profiler = os.getenv('HGPROF')
- if profiler is None:
- profiler = ui.config('profiling', 'type', default='ls')
- if profiler not in ('ls', 'stat'):
- ui.warn(_("unrecognized profiler '%s' - ignored\n") % profiler)
- profiler = 'ls'
+ format = ui.config('profiling', 'format', default='text')
+
+ if not format in ['text', 'kcachegrind']:
+ ui.warn(_("unrecognized profiling format '%s'"
+ " - Ignored\n") % format)
+ format = 'text'
output = ui.config('profiling', 'output')
if output:
path = ui.expandpath(output)
- fp = open(path, 'wb')
+ ostream = open(path, 'wb')
else:
- fp = sys.stderr
+ ostream = sys.stderr
try:
- if profiler == 'ls':
- return lsprofile(ui, checkargs, fp)
- else:
- return statprofile(ui, checkargs, fp)
+ from mercurial import lsprof
+ except ImportError:
+ raise util.Abort(_(
+ 'lsprof not available - install from '
+ 'http://codespeak.net/svn/user/arigo/hack/misc/lsprof/'))
+ p = lsprof.Profiler()
+ p.enable(subcalls=True)
+ try:
+ return checkargs()
finally:
+ p.disable()
+
+ if format == 'kcachegrind':
+ import lsprofcalltree
+ calltree = lsprofcalltree.KCacheGrind(p)
+ calltree.output(ostream)
+ else:
+ # format == 'text'
+ stats = lsprof.Stats(p.getstats())
+ stats.sort()
+ stats.pprint(top=10, file=ostream, climit=5)
+
if output:
- fp.close()
+ ostream.close()
else:
return checkargs()
diff --git a/mercurial/encoding.py b/mercurial/encoding.py
index 781d03b..3005752 100644
--- a/mercurial/encoding.py
+++ b/mercurial/encoding.py
@@ -92,32 +92,24 @@ def tolocal(s):
'foo: \\xc3\\xa4'
"""
- try:
+ for e in ('UTF-8', fallbackencoding):
try:
- # make sure string is actually stored in UTF-8
- u = s.decode('UTF-8')
- if encoding == 'UTF-8':
- # fast path
- return s
+ u = s.decode(e) # attempt strict decoding
r = u.encode(encoding, "replace")
if u == r.decode(encoding):
# r is a safe, non-lossy encoding of s
return r
- return localstr(s, r)
- except UnicodeDecodeError:
- # we should only get here if we're looking at an ancient changeset
- try:
- u = s.decode(fallbackencoding)
- r = u.encode(encoding, "replace")
- if u == r.decode(encoding):
- # r is a safe, non-lossy encoding of s
- return r
+ elif e == 'UTF-8':
+ return localstr(s, r)
+ else:
return localstr(u.encode('UTF-8'), r)
- except UnicodeDecodeError:
- u = s.decode("utf-8", "replace") # last ditch
- return u.encode(encoding, "replace") # can't round-trip
- except LookupError, k:
- raise error.Abort(k, hint="please check your locale settings")
+
+ except LookupError, k:
+ raise error.Abort("%s, please check your locale settings" % k)
+ except UnicodeDecodeError:
+ pass
+ u = s.decode("utf-8", "replace") # last ditch
+ return u.encode(encoding, "replace") # can't round-trip
def fromlocal(s):
"""
@@ -140,14 +132,14 @@ def fromlocal(s):
sub = s[max(0, inst.start - 10):inst.start + 10]
raise error.Abort("decoding near '%s': %s!" % (sub, inst))
except LookupError, k:
- raise error.Abort(k, hint="please check your locale settings")
+ raise error.Abort("%s, please check your locale settings" % k)
# How to treat ambiguous-width characters. Set to 'wide' to treat as wide.
wide = (os.environ.get("HGENCODINGAMBIGUOUS", "narrow") == "wide"
and "WFA" or "WF")
def colwidth(s):
- "Find the column width of a string for display in the local encoding"
+ "Find the column width of a UTF-8 string for display"
return ucolwidth(s.decode(encoding, 'replace'))
def ucolwidth(d):
@@ -157,22 +149,9 @@ def ucolwidth(d):
return sum([eaw(c) in wide and 2 or 1 for c in d])
return len(d)
-def getcols(s, start, c):
- '''Use colwidth to find a c-column substring of s starting at byte
- index start'''
- for x in xrange(start + c, len(s)):
- t = s[start:x]
- if colwidth(t) == c:
- return t
-
def lower(s):
"best-effort encoding-aware case-folding of local string s"
try:
- s.decode('ascii') # throw exception for non-ASCII character
- return s.lower()
- except UnicodeDecodeError:
- pass
- try:
if isinstance(s, localstr):
u = s._utf8.decode("utf-8")
else:
@@ -184,104 +163,3 @@ def lower(s):
return lu.encode(encoding)
except UnicodeError:
return s.lower() # we don't know how to fold this except in ASCII
- except LookupError, k:
- raise error.Abort(k, hint="please check your locale settings")
-
-def upper(s):
- "best-effort encoding-aware case-folding of local string s"
- try:
- s.decode('ascii') # throw exception for non-ASCII character
- return s.upper()
- except UnicodeDecodeError:
- pass
- try:
- if isinstance(s, localstr):
- u = s._utf8.decode("utf-8")
- else:
- u = s.decode(encoding, encodingmode)
-
- uu = u.upper()
- if u == uu:
- return s # preserve localstring
- return uu.encode(encoding)
- except UnicodeError:
- return s.upper() # we don't know how to fold this except in ASCII
- except LookupError, k:
- raise error.Abort(k, hint="please check your locale settings")
-
-def toutf8b(s):
- '''convert a local, possibly-binary string into UTF-8b
-
- This is intended as a generic method to preserve data when working
- with schemes like JSON and XML that have no provision for
- arbitrary byte strings. As Mercurial often doesn't know
- what encoding data is in, we use so-called UTF-8b.
-
- If a string is already valid UTF-8 (or ASCII), it passes unmodified.
- Otherwise, unsupported bytes are mapped to UTF-16 surrogate range,
- uDC00-uDCFF.
-
- Principles of operation:
-
- - ASCII and UTF-8 data sucessfully round-trips and is understood
- by Unicode-oriented clients
- - filenames and file contents in arbitrary other encodings can have
- be round-tripped or recovered by clueful clients
- - local strings that have a cached known UTF-8 encoding (aka
- localstr) get sent as UTF-8 so Unicode-oriented clients get the
- Unicode data they want
- - because we must preserve UTF-8 bytestring in places such as
- filenames, metadata can't be roundtripped without help
-
- (Note: "UTF-8b" often refers to decoding a mix of valid UTF-8 and
- arbitrary bytes into an internal Unicode format that can be
- re-encoded back into the original. Here we are exposing the
- internal surrogate encoding as a UTF-8 string.)
- '''
-
- if isinstance(s, localstr):
- return s._utf8
-
- try:
- if s.decode('utf-8'):
- return s
- except UnicodeDecodeError:
- # surrogate-encode any characters that don't round-trip
- s2 = s.decode('utf-8', 'ignore').encode('utf-8')
- r = ""
- pos = 0
- for c in s:
- if s2[pos:pos + 1] == c:
- r += c
- pos += 1
- else:
- r += unichr(0xdc00 + ord(c)).encode('utf-8')
- return r
-
-def fromutf8b(s):
- '''Given a UTF-8b string, return a local, possibly-binary string.
-
- return the original binary string. This
- is a round-trip process for strings like filenames, but metadata
- that's was passed through tolocal will remain in UTF-8.
-
- >>> m = "\\xc3\\xa9\\x99abcd"
- >>> n = toutf8b(m)
- >>> n
- '\\xc3\\xa9\\xed\\xb2\\x99abcd'
- >>> fromutf8b(n) == m
- True
- '''
-
- # fast path - look for uDxxx prefixes in s
- if "\xed" not in s:
- return s
-
- u = s.decode("utf-8")
- r = ""
- for c in u:
- if ord(c) & 0xff00 == 0xdc00:
- r += chr(ord(c) & 0xff)
- else:
- r += c.encode("utf-8")
- return r
diff --git a/mercurial/error.py b/mercurial/error.py
index 462b971..f68fbda 100644
--- a/mercurial/error.py
+++ b/mercurial/error.py
@@ -39,9 +39,6 @@ class Abort(Exception):
class ConfigError(Abort):
'Exception raised when parsing config files'
-class OutOfBandError(Exception):
- 'Exception raised when a remote repo reports failure'
-
class ParseError(Exception):
'Exception raised when parsing config files (msg[, pos])'
diff --git a/mercurial/exewrapper.c b/mercurial/exewrapper.c
deleted file mode 100644
index 882e7b8..0000000
--- a/mercurial/exewrapper.c
+++ /dev/null
@@ -1,101 +0,0 @@
-/*
- exewrapper.c - wrapper for calling a python script on Windows
-
- Copyright 2012 Adrian Buehlmann <adrian@cadifra.com> and others
-
- This software may be used and distributed according to the terms of the
- GNU General Public License version 2 or any later version.
-*/
-
-#include <Python.h>
-#include <windows.h>
-
-
-#ifdef __GNUC__
-int strcat_s(char *d, size_t n, const char *s)
-{
- return !strncat(d, s, n);
-}
-#endif
-
-
-static char pyscript[MAX_PATH + 10];
-
-int main(int argc, char *argv[])
-{
- char *dot;
- int ret;
- int i;
- int n;
- char **pyargv;
- WIN32_FIND_DATA fdata;
- HANDLE hfind;
- const char *err;
-
- if (GetModuleFileName(NULL, pyscript, sizeof(pyscript)) == 0)
- {
- err = "GetModuleFileName failed";
- goto bail;
- }
-
- dot = strrchr(pyscript, '.');
- if (dot == NULL) {
- err = "malformed module filename";
- goto bail;
- }
- *dot = 0; /* cut trailing ".exe" */
-
- hfind = FindFirstFile(pyscript, &fdata);
- if (hfind != INVALID_HANDLE_VALUE) {
- /* pyscript exists, close handle */
- FindClose(hfind);
- } else {
- /* file pyscript isn't there, take <pyscript>exe.py */
- strcat_s(pyscript, sizeof(pyscript), "exe.py");
- }
-
- /*
- Only add the pyscript to the args, if it's not already there. It may
- already be there, if the script spawned a child process of itself, in
- the same way as it got called, that is, with the pyscript already in
- place. So we optionally accept the pyscript as the first argument
- (argv[1]), letting our exe taking the role of the python interpreter.
- */
- if (argc >= 2 && strcmp(argv[1], pyscript) == 0) {
- /*
- pyscript is already in the args, so there is no need to copy
- the args and we can directly call the python interpreter with
- the original args.
- */
- return Py_Main(argc, argv);
- }
-
- /*
- Start assembling the args for the Python interpreter call. We put the
- name of our exe (argv[0]) in the position where the python.exe
- canonically is, and insert the pyscript next.
- */
- pyargv = malloc((argc + 5) * sizeof(char*));
- if (pyargv == NULL) {
- err = "not enough memory";
- goto bail;
- }
- n = 0;
- pyargv[n++] = argv[0];
- pyargv[n++] = pyscript;
-
- /* copy remaining args from the command line */
- for (i = 1; i < argc; i++)
- pyargv[n++] = argv[i];
- /* argv[argc] is guaranteed to be NULL, so we forward that guarantee */
- pyargv[n] = NULL;
-
- ret = Py_Main(n, pyargv); /* The Python interpreter call */
-
- free(pyargv);
- return ret;
-
-bail:
- fprintf(stderr, "abort: %s\n", err);
- return 255;
-}
diff --git a/mercurial/extensions.py b/mercurial/extensions.py
index 3f74d4e..ee9fd32 100644
--- a/mercurial/extensions.py
+++ b/mercurial/extensions.py
@@ -42,12 +42,7 @@ def loadpath(path, module_name):
fd, fpath, desc = imp.find_module(f, [d])
return imp.load_module(module_name, fd, fpath, desc)
else:
- try:
- return imp.load_source(module_name, path)
- except IOError, exc:
- if not exc.filename:
- exc.filename = path # python does not fill this
- raise
+ return imp.load_source(module_name, path)
def load(ui, name, path):
# unused ui argument kept for backwards compatibility
@@ -74,9 +69,7 @@ def load(ui, name, path):
return mod
try:
mod = importh("hgext.%s" % name)
- except ImportError, err:
- ui.debug('could not import hgext.%s (%s): trying %s\n'
- % (name, err, name))
+ except ImportError:
mod = importh(name)
_extensions[shortname] = mod
_order.append(shortname)
@@ -131,7 +124,7 @@ def wrapcommand(table, command, wrapper):
where orig is the original (wrapped) function, and *args, **kwargs
are the arguments passed to it.
'''
- assert util.safehasattr(wrapper, '__call__')
+ assert hasattr(wrapper, '__call__')
aliases, entry = cmdutil.findcmd(command, table)
for alias, e in table.iteritems():
if e is entry:
@@ -184,12 +177,12 @@ def wrapfunction(container, funcname, wrapper):
your end users, you should play nicely with others by using the
subclass trick.
'''
- assert util.safehasattr(wrapper, '__call__')
+ assert hasattr(wrapper, '__call__')
def wrap(*args, **kwargs):
return wrapper(origfn, *args, **kwargs)
origfn = getattr(container, funcname)
- assert util.safehasattr(origfn, '__call__')
+ assert hasattr(origfn, '__call__')
setattr(container, funcname, wrap)
return origfn
@@ -279,7 +272,7 @@ def disabled():
paths = _disabledpaths()
if not paths:
- return {}
+ return None
exts = {}
for name, path in paths.iteritems():
@@ -306,7 +299,7 @@ def disabledext(name):
def disabledcmd(ui, cmd, strict=False):
'''import disabled extensions until cmd is found.
- returns (cmdname, extname, module)'''
+ returns (cmdname, extname, doc)'''
paths = _disabledpaths(strip_init=True)
if not paths:
@@ -334,19 +327,18 @@ def disabledcmd(ui, cmd, strict=False):
cmd = aliases[0]
return (cmd, name, mod)
- ext = None
# first, search for an extension with the same name as the command
path = paths.pop(cmd, None)
if path:
ext = findcmd(cmd, cmd, path)
- if not ext:
- # otherwise, interrogate each extension until there's a match
- for name, path in paths.iteritems():
- ext = findcmd(cmd, name, path)
- if ext:
- break
- if ext and 'DEPRECATED' not in ext.__doc__:
- return ext
+ if ext:
+ return ext
+
+ # otherwise, interrogate each extension until there's a match
+ for name, path in paths.iteritems():
+ ext = findcmd(cmd, name, path)
+ if ext:
+ return ext
raise error.UnknownCommand(cmd)
diff --git a/mercurial/fancyopts.py b/mercurial/fancyopts.py
index ae18083..7c9e07f 100644
--- a/mercurial/fancyopts.py
+++ b/mercurial/fancyopts.py
@@ -75,7 +75,7 @@ def fancyopts(args, options, state, gnu=False):
# copy defaults to state
if isinstance(default, list):
state[name] = default[:]
- elif getattr(default, '__call__', False):
+ elif hasattr(default, '__call__'):
state[name] = None
else:
state[name] = default
diff --git a/mercurial/filemerge.py b/mercurial/filemerge.py
index d51f076..b13afe3 100644
--- a/mercurial/filemerge.py
+++ b/mercurial/filemerge.py
@@ -19,21 +19,11 @@ def _toolbool(ui, tool, part, default=False):
def _toollist(ui, tool, part, default=[]):
return ui.configlist("merge-tools", tool + "." + part, default)
-internals = {}
-
-def internaltool(name, trymerge, onfailure=None):
- '''return a decorator for populating internal merge tool table'''
- def decorator(func):
- fullname = 'internal:' + name
- func.__doc__ = "``%s``\n" % fullname + func.__doc__.strip()
- internals[fullname] = func
- func.trymerge = trymerge
- func.onfailure = onfailure
- return func
- return decorator
+_internal = ['internal:' + s
+ for s in 'fail local other merge prompt dump'.split()]
def _findtool(ui, tool):
- if tool in internals:
+ if tool in _internal:
return tool
for kn in ("regkey", "regkeyalt"):
k = _toolstr(ui, tool, kn)
@@ -44,8 +34,7 @@ def _findtool(ui, tool):
p = util.findexe(p + _toolstr(ui, tool, "regappend"))
if p:
return p
- exe = _toolstr(ui, tool, "executable", tool)
- return util.findexe(util.expandpath(exe))
+ return util.findexe(_toolstr(ui, tool, "executable", tool))
def _picktool(repo, ui, path, binary, symlink):
def check(tool, pat, symlink, binary):
@@ -107,11 +96,8 @@ def _picktool(repo, ui, path, binary, symlink):
if check(t, None, symlink, binary):
toolpath = _findtool(ui, t)
return (t, '"' + toolpath + '"')
-
- # internal merge or prompt as last resort
- if symlink or binary:
- return "internal:prompt", None
- return "internal:merge", None
+ # internal merge as last resort
+ return (not (symlink or binary) and "internal:merge" or None, None)
def _eoltype(data):
"Guess the EOL type of a file"
@@ -136,131 +122,6 @@ def _matcheol(file, origfile):
if newdata != data:
util.writefile(file, newdata)
-@internaltool('prompt', False)
-def _iprompt(repo, mynode, orig, fcd, fco, fca, toolconf):
- """Asks the user which of the local or the other version to keep as
- the merged version."""
- ui = repo.ui
- fd = fcd.path()
-
- if ui.promptchoice(_(" no tool found to merge %s\n"
- "keep (l)ocal or take (o)ther?") % fd,
- (_("&Local"), _("&Other")), 0):
- return _iother(repo, mynode, orig, fcd, fco, fca, toolconf)
- else:
- return _ilocal(repo, mynode, orig, fcd, fco, fca, toolconf)
-
-@internaltool('local', False)
-def _ilocal(repo, mynode, orig, fcd, fco, fca, toolconf):
- """Uses the local version of files as the merged version."""
- return 0
-
-@internaltool('other', False)
-def _iother(repo, mynode, orig, fcd, fco, fca, toolconf):
- """Uses the other version of files as the merged version."""
- repo.wwrite(fcd.path(), fco.data(), fco.flags())
- return 0
-
-@internaltool('fail', False)
-def _ifail(repo, mynode, orig, fcd, fco, fca, toolconf):
- """
- Rather than attempting to merge files that were modified on both
- branches, it marks them as unresolved. The resolve command must be
- used to resolve these conflicts."""
- return 1
-
-def _premerge(repo, toolconf, files):
- tool, toolpath, binary, symlink = toolconf
- a, b, c, back = files
-
- ui = repo.ui
-
- # do we attempt to simplemerge first?
- try:
- premerge = _toolbool(ui, tool, "premerge", not (binary or symlink))
- except error.ConfigError:
- premerge = _toolstr(ui, tool, "premerge").lower()
- valid = 'keep'.split()
- if premerge not in valid:
- _valid = ', '.join(["'" + v + "'" for v in valid])
- raise error.ConfigError(_("%s.premerge not valid "
- "('%s' is neither boolean nor %s)") %
- (tool, premerge, _valid))
-
- if premerge:
- r = simplemerge.simplemerge(ui, a, b, c, quiet=True)
- if not r:
- ui.debug(" premerge successful\n")
- return 0
- if premerge != 'keep':
- util.copyfile(back, a) # restore from backup and try again
- return 1 # continue merging
-
-@internaltool('merge', True,
- _("merging %s incomplete! "
- "(edit conflicts, then use 'hg resolve --mark')\n"))
-def _imerge(repo, mynode, orig, fcd, fco, fca, toolconf, files):
- """
- Uses the internal non-interactive simple merge algorithm for merging
- files. It will fail if there are any conflicts and leave markers in
- the partially merged file."""
- r = _premerge(repo, toolconf, files)
- if r:
- a, b, c, back = files
-
- ui = repo.ui
-
- r = simplemerge.simplemerge(ui, a, b, c, label=['local', 'other'])
- return True, r
- return False, 0
-
-@internaltool('dump', True)
-def _idump(repo, mynode, orig, fcd, fco, fca, toolconf, files):
- """
- Creates three versions of the files to merge, containing the
- contents of local, other and base. These files can then be used to
- perform a merge manually. If the file to be merged is named
- ``a.txt``, these files will accordingly be named ``a.txt.local``,
- ``a.txt.other`` and ``a.txt.base`` and they will be placed in the
- same directory as ``a.txt``."""
- r = _premerge(repo, toolconf, files)
- if r:
- a, b, c, back = files
-
- fd = fcd.path()
-
- util.copyfile(a, a + ".local")
- repo.wwrite(fd + ".other", fco.data(), fco.flags())
- repo.wwrite(fd + ".base", fca.data(), fca.flags())
- return False, r
-
-def _xmerge(repo, mynode, orig, fcd, fco, fca, toolconf, files):
- r = _premerge(repo, toolconf, files)
- if r:
- tool, toolpath, binary, symlink = toolconf
- a, b, c, back = files
- out = ""
- env = dict(HG_FILE=fcd.path(),
- HG_MY_NODE=short(mynode),
- HG_OTHER_NODE=str(fco.changectx()),
- HG_BASE_NODE=str(fca.changectx()),
- HG_MY_ISLINK='l' in fcd.flags(),
- HG_OTHER_ISLINK='l' in fco.flags(),
- HG_BASE_ISLINK='l' in fca.flags())
-
- ui = repo.ui
-
- args = _toolstr(ui, tool, "args", '$local $base $other')
- if "$output" in args:
- out, a = a, back # read input from backup, write to original
- replace = dict(local=a, base=b, other=c, output=out)
- args = util.interpolate(r'\$', replace, args,
- lambda s: '"%s"' % util.localpath(s))
- r = util.system(toolpath + ' ' + args, cwd=repo.root, environ=env,
- out=ui.fout)
- return True, r
- return False, 0
-
def filemerge(repo, mynode, orig, fcd, fco, fca):
"""perform a 3-way merge in the working directory
@@ -280,34 +141,42 @@ def filemerge(repo, mynode, orig, fcd, fco, fca):
f.close()
return name
+ def isbin(ctx):
+ try:
+ return util.binary(ctx.data())
+ except IOError:
+ return False
+
if not fco.cmp(fcd): # files identical?
return None
ui = repo.ui
fd = fcd.path()
- binary = fcd.isbinary() or fco.isbinary() or fca.isbinary()
+ binary = isbin(fcd) or isbin(fco) or isbin(fca)
symlink = 'l' in fcd.flags() + fco.flags()
tool, toolpath = _picktool(repo, ui, fd, binary, symlink)
ui.debug("picked tool '%s' for %s (binary %s symlink %s)\n" %
(tool, fd, binary, symlink))
- if tool in internals:
- func = internals[tool]
- trymerge = func.trymerge
- onfailure = func.onfailure
- else:
- func = _xmerge
- trymerge = True
- onfailure = _("merging %s failed!\n")
-
- toolconf = tool, toolpath, binary, symlink
-
- if not trymerge:
- return func(repo, mynode, orig, fcd, fco, fca, toolconf)
-
+ if not tool or tool == 'internal:prompt':
+ tool = "internal:local"
+ if ui.promptchoice(_(" no tool found to merge %s\n"
+ "keep (l)ocal or take (o)ther?") % fd,
+ (_("&Local"), _("&Other")), 0):
+ tool = "internal:other"
+ if tool == "internal:local":
+ return 0
+ if tool == "internal:other":
+ repo.wwrite(fd, fco.data(), fco.flags())
+ return 0
+ if tool == "internal:fail":
+ return 1
+
+ # do the actual merge
a = repo.wjoin(fd)
b = temp("base", fca)
c = temp("other", fco)
+ out = ""
back = a + ".orig"
util.copyfile(a, back)
@@ -318,18 +187,54 @@ def filemerge(repo, mynode, orig, fcd, fco, fca):
ui.debug("my %s other %s ancestor %s\n" % (fcd, fco, fca))
- needcheck, r = func(repo, mynode, orig, fcd, fco, fca, toolconf,
- (a, b, c, back))
- if not needcheck:
- if r:
- if onfailure:
- ui.warn(onfailure % fd)
- else:
+ # do we attempt to simplemerge first?
+ try:
+ premerge = _toolbool(ui, tool, "premerge", not (binary or symlink))
+ except error.ConfigError:
+ premerge = _toolstr(ui, tool, "premerge").lower()
+ valid = 'keep'.split()
+ if premerge not in valid:
+ _valid = ', '.join(["'" + v + "'" for v in valid])
+ raise error.ConfigError(_("%s.premerge not valid "
+ "('%s' is neither boolean nor %s)") %
+ (tool, premerge, _valid))
+
+ if premerge:
+ r = simplemerge.simplemerge(ui, a, b, c, quiet=True)
+ if not r:
+ ui.debug(" premerge successful\n")
os.unlink(back)
+ os.unlink(b)
+ os.unlink(c)
+ return 0
+ if premerge != 'keep':
+ util.copyfile(back, a) # restore from backup and try again
- os.unlink(b)
- os.unlink(c)
- return r
+ env = dict(HG_FILE=fd,
+ HG_MY_NODE=short(mynode),
+ HG_OTHER_NODE=str(fco.changectx()),
+ HG_BASE_NODE=str(fca.changectx()),
+ HG_MY_ISLINK='l' in fcd.flags(),
+ HG_OTHER_ISLINK='l' in fco.flags(),
+ HG_BASE_ISLINK='l' in fca.flags())
+
+ if tool == "internal:merge":
+ r = simplemerge.simplemerge(ui, a, b, c, label=['local', 'other'])
+ elif tool == 'internal:dump':
+ a = repo.wjoin(fd)
+ util.copyfile(a, a + ".local")
+ repo.wwrite(fd + ".other", fco.data(), fco.flags())
+ repo.wwrite(fd + ".base", fca.data(), fca.flags())
+ return 1 # unresolved
+ else:
+ args = _toolstr(ui, tool, "args", '$local $base $other')
+ if "$output" in args:
+ out, a = a, back # read input from backup, write to original
+ replace = dict(local=a, base=b, other=c, output=out)
+ args = util.interpolate(r'\$', replace, args,
+ lambda s: '"%s"' % util.localpath(s))
+ r = util.system(toolpath + ' ' + args, cwd=repo.root, environ=env,
+ out=ui.fout)
if not r and (_toolbool(ui, tool, "checkconflicts") or
'conflicts' in _toollist(ui, tool, "check")):
@@ -346,24 +251,20 @@ def filemerge(repo, mynode, orig, fcd, fco, fca):
if not r and not checked and (_toolbool(ui, tool, "checkchanged") or
'changed' in _toollist(ui, tool, "check")):
- if filecmp.cmp(a, back):
+ if filecmp.cmp(repo.wjoin(fd), back):
if ui.promptchoice(_(" output file %s appears unchanged\n"
"was merge successful (yn)?") % fd,
(_("&Yes"), _("&No")), 1):
r = 1
if _toolbool(ui, tool, "fixeol"):
- _matcheol(a, back)
+ _matcheol(repo.wjoin(fd), back)
if r:
- if onfailure:
- ui.warn(onfailure % fd)
+ ui.warn(_("merging %s failed!\n") % fd)
else:
os.unlink(back)
os.unlink(b)
os.unlink(c)
return r
-
-# tell hggettext to extract docstrings from these functions:
-i18nfunctions = internals.values()
diff --git a/mercurial/fileset.py b/mercurial/fileset.py
index 5c1f7eb..95a2c17 100644
--- a/mercurial/fileset.py
+++ b/mercurial/fileset.py
@@ -199,7 +199,7 @@ def binary(mctx, x):
"""
# i18n: "binary" is a keyword
getargs(x, 0, 0, _("binary takes no arguments"))
- return [f for f in mctx.existing() if util.binary(mctx.ctx[f].data())]
+ return [f for f in mctx.subset if util.binary(mctx.ctx[f].data())]
def exec_(mctx, x):
"""``exec()``
@@ -207,7 +207,7 @@ def exec_(mctx, x):
"""
# i18n: "exec" is a keyword
getargs(x, 0, 0, _("exec takes no arguments"))
- return [f for f in mctx.existing() if mctx.ctx.flags(f) == 'x']
+ return [f for f in mctx.subset if mctx.ctx.flags(f) == 'x']
def symlink(mctx, x):
"""``symlink()``
@@ -215,7 +215,7 @@ def symlink(mctx, x):
"""
# i18n: "symlink" is a keyword
getargs(x, 0, 0, _("symlink takes no arguments"))
- return [f for f in mctx.existing() if mctx.ctx.flags(f) == 'l']
+ return [f for f in mctx.subset if mctx.ctx.flags(f) == 'l']
def resolved(mctx, x):
"""``resolved()``
@@ -253,7 +253,7 @@ def grep(mctx, x):
"""
pat = getstring(x, _("grep requires a pattern"))
r = re.compile(pat)
- return [f for f in mctx.existing() if r.search(mctx.ctx[f].data())]
+ return [f for f in mctx.subset if r.search(mctx.ctx[f].data())]
_units = dict(k=2**10, K=2**10, kB=2**10, KB=2**10,
M=2**20, MB=2**20, G=2**30, GB=2**30)
@@ -320,7 +320,7 @@ def size(mctx, x):
else:
raise error.ParseError(_("couldn't parse size: %s") % expr)
- return [f for f in mctx.existing() if m(mctx.ctx[f].size())]
+ return [f for f in mctx.subset if m(mctx.ctx[f].size())]
def encoding(mctx, x):
"""``encoding(name)``
@@ -333,7 +333,7 @@ def encoding(mctx, x):
enc = getstring(x, _("encoding requires an encoding name"))
s = []
- for f in mctx.existing():
+ for f in mctx.subset:
d = mctx.ctx[f].data()
try:
d.decode(enc)
@@ -358,28 +358,6 @@ def copied(mctx, x):
s.append(f)
return s
-def subrepo(mctx, x):
- """``subrepo([pattern])``
- Subrepositories whose paths match the given pattern.
- """
- # i18n: "subrepo" is a keyword
- getargs(x, 0, 1, _("subrepo takes at most one argument"))
- ctx = mctx.ctx
- sstate = ctx.substate
- if x:
- pat = getstring(x, _("subrepo requires a pattern or no arguments"))
-
- import match as matchmod # avoid circular import issues
- fast = not matchmod.patkind(pat)
- if fast:
- def m(s):
- return (s == pat)
- else:
- m = matchmod.match(ctx._repo.root, '', [pat], ctx=ctx)
- return [sub for sub in sstate if m(sub)]
- else:
- return [sub for sub in sstate]
-
symbols = {
'added': added,
'binary': binary,
@@ -398,7 +376,6 @@ symbols = {
'symlink': symlink,
'unknown': unknown,
'unresolved': unresolved,
- 'subrepo': subrepo,
}
methods = {
@@ -423,8 +400,6 @@ class matchctx(object):
return self.ctx.match(patterns)
def filter(self, files):
return [f for f in files if f in self.subset]
- def existing(self):
- return (f for f in self.subset if f in self.ctx)
def narrow(self, files):
return matchctx(self.ctx, self.filter(files), self._status)
diff --git a/mercurial/formatter.py b/mercurial/formatter.py
deleted file mode 100644
index 53a2022..0000000
--- a/mercurial/formatter.py
+++ /dev/null
@@ -1,71 +0,0 @@
-# formatter.py - generic output formatting for mercurial
-#
-# Copyright 2012 Matt Mackall <mpm@selenic.com>
-#
-# This software may be used and distributed according to the terms of the
-# GNU General Public License version 2 or any later version.
-
-class baseformatter(object):
- def __init__(self, ui, topic, opts):
- self._ui = ui
- self._topic = topic
- self._style = opts.get("style")
- self._template = opts.get("template")
- self._item = None
- def __bool__(self):
- '''return False if we're not doing real templating so we can
- skip extra work'''
- return True
- def _showitem(self):
- '''show a formatted item once all data is collected'''
- pass
- def startitem(self):
- '''begin an item in the format list'''
- if self._item is not None:
- self._showitem()
- self._item = {}
- def data(self, **data):
- '''insert data into item that's not shown in default output'''
- def write(self, fields, deftext, *fielddata, **opts):
- '''do default text output while assigning data to item'''
- for k, v in zip(fields.split(), fielddata):
- self._item[k] = v
- def plain(self, text, **opts):
- '''show raw text for non-templated mode'''
- pass
- def end(self):
- '''end output for the formatter'''
- if self._item is not None:
- self._showitem()
-
-class plainformatter(baseformatter):
- '''the default text output scheme'''
- def __init__(self, ui, topic, opts):
- baseformatter.__init__(self, ui, topic, opts)
- def __bool__(self):
- return False
- def startitem(self):
- pass
- def data(self, **data):
- pass
- def write(self, fields, deftext, *fielddata, **opts):
- self._ui.write(deftext % fielddata, **opts)
- def plain(self, text, **opts):
- self._ui.write(text, **opts)
- def end(self):
- pass
-
-class debugformatter(baseformatter):
- def __init__(self, ui, topic, opts):
- baseformatter.__init__(self, ui, topic, opts)
- self._ui.write("%s = {\n" % self._topic)
- def _showitem(self):
- self._ui.write(" " + repr(self._item) + ",\n")
- def end(self):
- baseformatter.end(self)
- self._ui.write("}\n")
-
-def formatter(ui, topic, opts):
- if ui.configbool('ui', 'formatdebug'):
- return debugformatter(ui, topic, opts)
- return plainformatter(ui, topic, opts)
diff --git a/mercurial/graphmod.py b/mercurial/graphmod.py
index 84be437..314f2b8 100644
--- a/mercurial/graphmod.py
+++ b/mercurial/graphmod.py
@@ -18,7 +18,6 @@ Data depends on type.
"""
from mercurial.node import nullrev
-import util
CHANGESET = 'C'
@@ -68,7 +67,7 @@ def nodes(repo, nodes):
parents = set([p.rev() for p in ctx.parents() if p.node() in include])
yield (ctx.rev(), CHANGESET, ctx, sorted(parents))
-def colored(dag, repo):
+def colored(dag):
"""annotates a DAG with colored edge information
For each DAG node this function emits tuples::
@@ -84,23 +83,6 @@ def colored(dag, repo):
seen = []
colors = {}
newcolor = 1
- config = {}
-
- for key, val in repo.ui.configitems('graph'):
- if '.' in key:
- branch, setting = key.rsplit('.', 1)
- # Validation
- if setting == "width" and val.isdigit():
- config.setdefault(branch, {})[setting] = int(val)
- elif setting == "color" and val.isalnum():
- config.setdefault(branch, {})[setting] = val
-
- if config:
- getconf = util.lrucachefunc(
- lambda rev: config.get(repo[rev].branch(), {}))
- else:
- getconf = lambda rev: {}
-
for (cur, type, data, parents) in dag:
# Compute seen and next
@@ -129,18 +111,10 @@ def colored(dag, repo):
edges = []
for ecol, eid in enumerate(seen):
if eid in next:
- bconf = getconf(eid)
- edges.append((
- ecol, next.index(eid), colors[eid],
- bconf.get('width', -1),
- bconf.get('color', '')))
+ edges.append((ecol, next.index(eid), colors[eid]))
elif eid == cur:
for p in parents:
- bconf = getconf(p)
- edges.append((
- ecol, next.index(p), color,
- bconf.get('width', -1),
- bconf.get('color', '')))
+ edges.append((ecol, next.index(p), color))
# Yield and move on
yield (cur, type, data, (col, color), edges)
@@ -163,206 +137,3 @@ def grandparent(cl, lowestrev, roots, head):
pending.update([p for p in cl.parentrevs(r)])
seen.add(r)
return sorted(kept)
-
-def asciiedges(type, char, lines, seen, rev, parents):
- """adds edge info to changelog DAG walk suitable for ascii()"""
- if rev not in seen:
- seen.append(rev)
- nodeidx = seen.index(rev)
-
- knownparents = []
- newparents = []
- for parent in parents:
- if parent in seen:
- knownparents.append(parent)
- else:
- newparents.append(parent)
-
- ncols = len(seen)
- nextseen = seen[:]
- nextseen[nodeidx:nodeidx + 1] = newparents
- edges = [(nodeidx, nextseen.index(p)) for p in knownparents]
-
- while len(newparents) > 2:
- # ascii() only knows how to add or remove a single column between two
- # calls. Nodes with more than two parents break this constraint so we
- # introduce intermediate expansion lines to grow the active node list
- # slowly.
- edges.append((nodeidx, nodeidx))
- edges.append((nodeidx, nodeidx + 1))
- nmorecols = 1
- yield (type, char, lines, (nodeidx, edges, ncols, nmorecols))
- char = '\\'
- lines = []
- nodeidx += 1
- ncols += 1
- edges = []
- del newparents[0]
-
- if len(newparents) > 0:
- edges.append((nodeidx, nodeidx))
- if len(newparents) > 1:
- edges.append((nodeidx, nodeidx + 1))
- nmorecols = len(nextseen) - ncols
- seen[:] = nextseen
- yield (type, char, lines, (nodeidx, edges, ncols, nmorecols))
-
-def _fixlongrightedges(edges):
- for (i, (start, end)) in enumerate(edges):
- if end > start:
- edges[i] = (start, end + 1)
-
-def _getnodelineedgestail(
- node_index, p_node_index, n_columns, n_columns_diff, p_diff, fix_tail):
- if fix_tail and n_columns_diff == p_diff and n_columns_diff != 0:
- # Still going in the same non-vertical direction.
- if n_columns_diff == -1:
- start = max(node_index + 1, p_node_index)
- tail = ["|", " "] * (start - node_index - 1)
- tail.extend(["/", " "] * (n_columns - start))
- return tail
- else:
- return ["\\", " "] * (n_columns - node_index - 1)
- else:
- return ["|", " "] * (n_columns - node_index - 1)
-
-def _drawedges(edges, nodeline, interline):
- for (start, end) in edges:
- if start == end + 1:
- interline[2 * end + 1] = "/"
- elif start == end - 1:
- interline[2 * start + 1] = "\\"
- elif start == end:
- interline[2 * start] = "|"
- else:
- if 2 * end >= len(nodeline):
- continue
- nodeline[2 * end] = "+"
- if start > end:
- (start, end) = (end, start)
- for i in range(2 * start + 1, 2 * end):
- if nodeline[i] != "+":
- nodeline[i] = "-"
-
-def _getpaddingline(ni, n_columns, edges):
- line = []
- line.extend(["|", " "] * ni)
- if (ni, ni - 1) in edges or (ni, ni) in edges:
- # (ni, ni - 1) (ni, ni)
- # | | | | | | | |
- # +---o | | o---+
- # | | c | | c | |
- # | |/ / | |/ /
- # | | | | | |
- c = "|"
- else:
- c = " "
- line.extend([c, " "])
- line.extend(["|", " "] * (n_columns - ni - 1))
- return line
-
-def asciistate():
- """returns the initial value for the "state" argument to ascii()"""
- return [0, 0]
-
-def ascii(ui, state, type, char, text, coldata):
- """prints an ASCII graph of the DAG
-
- takes the following arguments (one call per node in the graph):
-
- - ui to write to
- - Somewhere to keep the needed state in (init to asciistate())
- - Column of the current node in the set of ongoing edges.
- - Type indicator of node data, usually 'C' for changesets.
- - Payload: (char, lines):
- - Character to use as node's symbol.
- - List of lines to display as the node's text.
- - Edges; a list of (col, next_col) indicating the edges between
- the current node and its parents.
- - Number of columns (ongoing edges) in the current revision.
- - The difference between the number of columns (ongoing edges)
- in the next revision and the number of columns (ongoing edges)
- in the current revision. That is: -1 means one column removed;
- 0 means no columns added or removed; 1 means one column added.
- """
-
- idx, edges, ncols, coldiff = coldata
- assert -2 < coldiff < 2
- if coldiff == -1:
- # Transform
- #
- # | | | | | |
- # o | | into o---+
- # |X / |/ /
- # | | | |
- _fixlongrightedges(edges)
-
- # add_padding_line says whether to rewrite
- #
- # | | | | | | | |
- # | o---+ into | o---+
- # | / / | | | # <--- padding line
- # o | | | / /
- # o | |
- add_padding_line = (len(text) > 2 and coldiff == -1 and
- [x for (x, y) in edges if x + 1 < y])
-
- # fix_nodeline_tail says whether to rewrite
- #
- # | | o | | | | o | |
- # | | |/ / | | |/ /
- # | o | | into | o / / # <--- fixed nodeline tail
- # | |/ / | |/ /
- # o | | o | |
- fix_nodeline_tail = len(text) <= 2 and not add_padding_line
-
- # nodeline is the line containing the node character (typically o)
- nodeline = ["|", " "] * idx
- nodeline.extend([char, " "])
-
- nodeline.extend(
- _getnodelineedgestail(idx, state[1], ncols, coldiff,
- state[0], fix_nodeline_tail))
-
- # shift_interline is the line containing the non-vertical
- # edges between this entry and the next
- shift_interline = ["|", " "] * idx
- if coldiff == -1:
- n_spaces = 1
- edge_ch = "/"
- elif coldiff == 0:
- n_spaces = 2
- edge_ch = "|"
- else:
- n_spaces = 3
- edge_ch = "\\"
- shift_interline.extend(n_spaces * [" "])
- shift_interline.extend([edge_ch, " "] * (ncols - idx - 1))
-
- # draw edges from the current node to its parents
- _drawedges(edges, nodeline, shift_interline)
-
- # lines is the list of all graph lines to print
- lines = [nodeline]
- if add_padding_line:
- lines.append(_getpaddingline(idx, ncols, edges))
- lines.append(shift_interline)
-
- # make sure that there are as many graph lines as there are
- # log strings
- while len(text) < len(lines):
- text.append("")
- if len(lines) < len(text):
- extra_interline = ["|", " "] * (ncols + coldiff)
- while len(lines) < len(text):
- lines.append(extra_interline)
-
- # print lines
- indentation_level = max(ncols, ncols + coldiff)
- for (line, logstr) in zip(lines, text):
- ln = "%-*s %s" % (2 * indentation_level, "".join(line), logstr)
- ui.write(ln.rstrip() + '\n')
-
- # ... and start over
- state[0] = coldiff
- state[1] = idx
diff --git a/mercurial/hbisect.py b/mercurial/hbisect.py
index 0ce8182..38ed976 100644
--- a/mercurial/hbisect.py
+++ b/mercurial/hbisect.py
@@ -8,7 +8,7 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-import os, error
+import os
from i18n import _
from node import short, hex
import util
@@ -35,18 +35,17 @@ def bisect(changelog, state):
# build visit array
ancestors = [None] * (len(changelog) + 1) # an extra for [-1]
- # set nodes descended from goodrevs
- for rev in goodrevs:
- ancestors[rev] = []
+ # set nodes descended from goodrev
+ ancestors[goodrev] = []
for rev in xrange(goodrev + 1, len(changelog)):
for prev in clparents(rev):
if ancestors[prev] == []:
ancestors[rev] = []
# clear good revs from array
- for rev in goodrevs:
- ancestors[rev] = None
- for rev in xrange(len(changelog), goodrev, -1):
+ for node in goodrevs:
+ ancestors[node] = None
+ for rev in xrange(len(changelog), -1, -1):
if ancestors[rev] is None:
for prev in clparents(rev):
ancestors[prev] = None
@@ -69,10 +68,10 @@ def bisect(changelog, state):
# build children dict
children = {}
- visit = util.deque([badrev])
+ visit = [badrev]
candidates = []
while visit:
- rev = visit.popleft()
+ rev = visit.pop(0)
if ancestors[rev] == []:
candidates.append(rev)
for prev in clparents(rev):
@@ -132,7 +131,7 @@ def bisect(changelog, state):
def load_state(repo):
- state = {'current': [], 'good': [], 'bad': [], 'skip': []}
+ state = {'good': [], 'bad': [], 'skip': []}
if os.path.exists(repo.join("bisect.state")):
for l in repo.opener("bisect.state"):
kind, node = l[:-1].split()
@@ -150,109 +149,7 @@ def save_state(repo, state):
for kind in state:
for node in state[kind]:
f.write("%s %s\n" % (kind, hex(node)))
- f.close()
+ f.rename()
finally:
wlock.release()
-def get(repo, status):
- """
- Return a list of revision(s) that match the given status:
-
- - ``good``, ``bad``, ``skip``: csets explicitly marked as good/bad/skip
- - ``goods``, ``bads`` : csets topologicaly good/bad
- - ``range`` : csets taking part in the bisection
- - ``pruned`` : csets that are goods, bads or skipped
- - ``untested`` : csets whose fate is yet unknown
- - ``ignored`` : csets ignored due to DAG topology
- - ``current`` : the cset currently being bisected
- """
- state = load_state(repo)
- if status in ('good', 'bad', 'skip', 'current'):
- return map(repo.changelog.rev, state[status])
- else:
- # In the floowing sets, we do *not* call 'bisect()' with more
- # than one level of recusrsion, because that can be very, very
- # time consuming. Instead, we always develop the expression as
- # much as possible.
-
- # 'range' is all csets that make the bisection:
- # - have a good ancestor and a bad descendant, or conversely
- # that's because the bisection can go either way
- range = '( bisect(bad)::bisect(good) | bisect(good)::bisect(bad) )'
-
- _t = repo.revs('bisect(good)::bisect(bad)')
- # The sets of topologically good or bad csets
- if len(_t) == 0:
- # Goods are topologically after bads
- goods = 'bisect(good)::' # Pruned good csets
- bads = '::bisect(bad)' # Pruned bad csets
- else:
- # Goods are topologically before bads
- goods = '::bisect(good)' # Pruned good csets
- bads = 'bisect(bad)::' # Pruned bad csets
-
- # 'pruned' is all csets whose fate is already known: good, bad, skip
- skips = 'bisect(skip)' # Pruned skipped csets
- pruned = '( (%s) | (%s) | (%s) )' % (goods, bads, skips)
-
- # 'untested' is all cset that are- in 'range', but not in 'pruned'
- untested = '( (%s) - (%s) )' % (range, pruned)
-
- # 'ignored' is all csets that were not used during the bisection
- # due to DAG topology, but may however have had an impact.
- # Eg., a branch merged between bads and goods, but whose branch-
- # point is out-side of the range.
- iba = '::bisect(bad) - ::bisect(good)' # Ignored bads' ancestors
- iga = '::bisect(good) - ::bisect(bad)' # Ignored goods' ancestors
- ignored = '( ( (%s) | (%s) ) - (%s) )' % (iba, iga, range)
-
- if status == 'range':
- return repo.revs(range)
- elif status == 'pruned':
- return repo.revs(pruned)
- elif status == 'untested':
- return repo.revs(untested)
- elif status == 'ignored':
- return repo.revs(ignored)
- elif status == "goods":
- return repo.revs(goods)
- elif status == "bads":
- return repo.revs(bads)
- else:
- raise error.ParseError(_('invalid bisect state'))
-
-def label(repo, node):
- rev = repo.changelog.rev(node)
-
- # Try explicit sets
- if rev in get(repo, 'good'):
- # i18n: bisect changeset status
- return _('good')
- if rev in get(repo, 'bad'):
- # i18n: bisect changeset status
- return _('bad')
- if rev in get(repo, 'skip'):
- # i18n: bisect changeset status
- return _('skipped')
- if rev in get(repo, 'untested') or rev in get(repo, 'current'):
- # i18n: bisect changeset status
- return _('untested')
- if rev in get(repo, 'ignored'):
- # i18n: bisect changeset status
- return _('ignored')
-
- # Try implicit sets
- if rev in get(repo, 'goods'):
- # i18n: bisect changeset status
- return _('good (implicit)')
- if rev in get(repo, 'bads'):
- # i18n: bisect changeset status
- return _('bad (implicit)')
-
- return None
-
-def shortlabel(label):
- if label:
- return label[0].upper()
-
- return None
diff --git a/mercurial/help.py b/mercurial/help.py
index 79d9966..bebf9df 100644
--- a/mercurial/help.py
+++ b/mercurial/help.py
@@ -6,119 +6,32 @@
# GNU General Public License version 2 or any later version.
from i18n import gettext, _
-import itertools, sys, os
-import extensions, revset, fileset, templatekw, templatefilters, filemerge
-import encoding, util, minirst
+import sys, os
+import extensions, revset, fileset, templatekw, templatefilters
+import util
def listexts(header, exts, indent=1):
'''return a text listing of the given extensions'''
- rst = []
- if exts:
- rst.append('\n%s\n\n' % header)
- for name, desc in sorted(exts.iteritems()):
- rst.append('%s:%s: %s\n' % (' ' * indent, name, desc))
- return rst
+ if not exts:
+ return ''
+ maxlength = max(len(e) for e in exts)
+ result = '\n%s\n\n' % header
+ for name, desc in sorted(exts.iteritems()):
+ result += '%s%-*s %s\n' % (' ' * indent, maxlength + 2,
+ ':%s:' % name, desc)
+ return result
def extshelp():
- rst = loaddoc('extensions')().splitlines(True)
- rst.extend(listexts(_('enabled extensions:'), extensions.enabled()))
- rst.extend(listexts(_('disabled extensions:'), extensions.disabled()))
- doc = ''.join(rst)
+ doc = loaddoc('extensions')()
+ doc += listexts(_('enabled extensions:'), extensions.enabled())
+ doc += listexts(_('disabled extensions:'), extensions.disabled())
return doc
-def optrst(options, verbose):
- data = []
- multioccur = False
- for option in options:
- if len(option) == 5:
- shortopt, longopt, default, desc, optlabel = option
- else:
- shortopt, longopt, default, desc = option
- optlabel = _("VALUE") # default label
-
- if _("DEPRECATED") in desc and not verbose:
- continue
-
- so = ''
- if shortopt:
- so = '-' + shortopt
- lo = '--' + longopt
- if default:
- desc += _(" (default: %s)") % default
-
- if isinstance(default, list):
- lo += " %s [+]" % optlabel
- multioccur = True
- elif (default is not None) and not isinstance(default, bool):
- lo += " %s" % optlabel
-
- data.append((so, lo, desc))
-
- rst = minirst.maketable(data, 1)
-
- if multioccur:
- rst.append(_("\n[+] marked option can be specified multiple times\n"))
-
- return ''.join(rst)
-
-def topicmatch(kw):
- """Return help topics matching kw.
-
- Returns {'section': [(name, summary), ...], ...} where section is
- one of topics, commands, extensions, or extensioncommands.
- """
- kw = encoding.lower(kw)
- def lowercontains(container):
- return kw in encoding.lower(container) # translated in helptable
- results = {'topics': [],
- 'commands': [],
- 'extensions': [],
- 'extensioncommands': [],
- }
- for names, header, doc in helptable:
- if (sum(map(lowercontains, names))
- or lowercontains(header)
- or lowercontains(doc())):
- results['topics'].append((names[0], header))
- import commands # avoid cycle
- for cmd, entry in commands.table.iteritems():
- if cmd.startswith('debug'):
- continue
- if len(entry) == 3:
- summary = entry[2]
- else:
- summary = ''
- # translate docs *before* searching there
- docs = _(getattr(entry[0], '__doc__', None)) or ''
- if kw in cmd or lowercontains(summary) or lowercontains(docs):
- doclines = docs.splitlines()
- if doclines:
- summary = doclines[0]
- cmdname = cmd.split('|')[0].lstrip('^')
- results['commands'].append((cmdname, summary))
- for name, docs in itertools.chain(
- extensions.enabled().iteritems(),
- extensions.disabled().iteritems()):
- # extensions.load ignores the UI argument
- mod = extensions.load(None, name, '')
- if lowercontains(name) or lowercontains(docs):
- # extension docs are already translated
- results['extensions'].append((name, docs.splitlines()[0]))
- for cmd, entry in getattr(mod, 'cmdtable', {}).iteritems():
- if kw in cmd or (len(entry) > 2 and lowercontains(entry[2])):
- cmdname = cmd.split('|')[0].lstrip('^')
- if entry[0].__doc__:
- cmddoc = gettext(entry[0].__doc__).splitlines()[0]
- else:
- cmddoc = _('(no help text available)')
- results['extensioncommands'].append((cmdname, cmddoc))
- return results
-
def loaddoc(topic):
"""Return a delayed loader for help/topic.txt."""
def loader():
- if util.mainfrozen():
+ if hasattr(sys, 'frozen'):
module = sys.executable
else:
module = __file__
@@ -143,24 +56,23 @@ helptable = sorted([
(["patterns"], _("File Name Patterns"), loaddoc('patterns')),
(['environment', 'env'], _('Environment Variables'),
loaddoc('environment')),
- (['revisions', 'revs'], _('Specifying Single Revisions'),
+ (['revs', 'revisions'], _('Specifying Single Revisions'),
loaddoc('revisions')),
- (['multirevs', 'mrevs'], _('Specifying Multiple Revisions'),
+ (['mrevs', 'multirevs'], _('Specifying Multiple Revisions'),
loaddoc('multirevs')),
- (['revsets', 'revset'], _("Specifying Revision Sets"), loaddoc('revsets')),
- (['filesets', 'fileset'], _("Specifying File Sets"), loaddoc('filesets')),
+ (['revset', 'revsets'], _("Specifying Revision Sets"), loaddoc('revsets')),
+ (['fileset', 'filesets'], _("Specifying File Sets"), loaddoc('filesets')),
(['diffs'], _('Diff Formats'), loaddoc('diffs')),
- (['merge-tools', 'mergetools'], _('Merge Tools'), loaddoc('merge-tools')),
- (['templating', 'templates', 'template', 'style'], _('Template Usage'),
+ (['merge-tools'], _('Merge Tools'), loaddoc('merge-tools')),
+ (['templating', 'templates'], _('Template Usage'),
loaddoc('templates')),
(['urls'], _('URL Paths'), loaddoc('urls')),
- (["extensions"], _("Using Additional Features"), extshelp),
- (["subrepos", "subrepo"], _("Subrepositories"), loaddoc('subrepos')),
- (["hgweb"], _("Configuring hgweb"), loaddoc('hgweb')),
- (["glossary"], _("Glossary"), loaddoc('glossary')),
- (["hgignore", "ignore"], _("Syntax for Mercurial Ignore Files"),
- loaddoc('hgignore')),
- (["phases"], _("Working with Phases"), loaddoc('phases')),
+ (["extensions"], _("Using additional features"), extshelp),
+ (["subrepo", "subrepos"], _("Subrepositories"), loaddoc('subrepos')),
+ (["hgweb"], _("Configuring hgweb"), loaddoc('hgweb')),
+ (["glossary"], _("Glossary"), loaddoc('glossary')),
+ (["hgignore", "ignore"], _("syntax for Mercurial ignore files"),
+ loaddoc('hgignore')),
])
# Map topics to lists of callable taking the current topic help and
@@ -181,13 +93,8 @@ def makeitemsdoc(topic, doc, marker, items):
continue
text = gettext(text)
lines = text.splitlines()
- doclines = [(lines[0])]
- for l in lines[1:]:
- # Stop once we find some Python doctest
- if l.strip().startswith('>>>'):
- break
- doclines.append(' ' + l.strip())
- entries.append('\n'.join(doclines))
+ lines[1:] = [(' ' + l.strip()) for l in lines[1:]]
+ entries.append('\n'.join(lines))
entries = '\n\n'.join(entries)
return doc.replace(marker, entries)
@@ -197,7 +104,6 @@ def addtopicsymbols(topic, marker, symbols):
addtopichook(topic, add)
addtopicsymbols('filesets', '.. predicatesmarker', fileset.symbols)
-addtopicsymbols('merge-tools', '.. internaltoolsmarker', filemerge.internals)
addtopicsymbols('revsets', '.. predicatesmarker', revset.symbols)
-addtopicsymbols('templates', '.. keywordsmarker', templatekw.dockeywords)
+addtopicsymbols('templates', '.. keywordsmarker', templatekw.keywords)
addtopicsymbols('templates', '.. filtersmarker', templatefilters.filters)
diff --git a/mercurial/help/config.txt b/mercurial/help/config.txt
index 557948b..52c2fee 100644
--- a/mercurial/help/config.txt
+++ b/mercurial/help/config.txt
@@ -13,7 +13,7 @@ The above entries will be referred to as ``ui.username`` and
``ui.verbose``, respectively. See the Syntax section below.
Files
-=====
+-----
Mercurial reads configuration data from several files, if they exist.
These files do not exist by default and you will have to create the
@@ -28,17 +28,16 @@ alphabetical order, later ones overriding earlier ones. Where multiple
paths are given below, settings from earlier paths override later
ones.
-| (All) ``<repo>/.hg/hgrc``
+| (Unix, Windows) ``<repo>/.hg/hgrc``
Per-repository configuration options that only apply in a
particular repository. This file is not version-controlled, and
will not get transferred during a "clone" operation. Options in
this file override options in all other configuration files. On
- Plan 9 and Unix, most of this file will be ignored if it doesn't
- belong to a trusted user or to a trusted group. See the documentation
- for the ``[trusted]`` section below for more details.
+ Unix, most of this file will be ignored if it doesn't belong to a
+ trusted user or to a trusted group. See the documentation for the
+ ``[trusted]`` section below for more details.
-| (Plan 9) ``$home/lib/hgrc``
| (Unix) ``$HOME/.hgrc``
| (Windows) ``%USERPROFILE%\.hgrc``
| (Windows) ``%USERPROFILE%\Mercurial.ini``
@@ -51,8 +50,6 @@ ones.
directory. Options in these files override per-system and per-installation
options.
-| (Plan 9) ``/lib/mercurial/hgrc``
-| (Plan 9) ``/lib/mercurial/hgrc.d/*.rc``
| (Unix) ``/etc/mercurial/hgrc``
| (Unix) ``/etc/mercurial/hgrc.d/*.rc``
@@ -61,8 +58,6 @@ ones.
executed by any user in any directory. Options in these files
override per-installation options.
-| (Plan 9) ``<install-root>/lib/mercurial/hgrc``
-| (Plan 9) ``<install-root>/lib/mercurial/hgrc.d/*.rc``
| (Unix) ``<install-root>/etc/mercurial/hgrc``
| (Unix) ``<install-root>/etc/mercurial/hgrc.d/*.rc``
@@ -83,10 +78,12 @@ ones.
keys contain PATH-like strings, every part of which must reference
a ``Mercurial.ini`` file or be a directory where ``*.rc`` files will
be read. Mercurial checks each of these locations in the specified
- order until one or more configuration files are detected.
+ order until one or more configuration files are detected. If the
+ pywin32 extensions are not installed, Mercurial will only look for
+ site-wide configuration in ``C:\Mercurial\Mercurial.ini``.
Syntax
-======
+------
A configuration file consists of sections, led by a ``[section]`` header
and followed by ``name = value`` entries (sometimes called
@@ -171,14 +168,14 @@ quotation marks at the beginning of a word is counted as a quotation
(e.g., ``foo"bar baz`` is the list of ``foo"bar`` and ``baz``).
Sections
-========
+--------
This section describes the different sections that may appear in a
Mercurial configuration file, the purpose of each section, its possible
keys, and their possible values.
``alias``
----------
+"""""""""
Defines command aliases.
Aliases allow you to define your own commands in terms of other
@@ -209,7 +206,7 @@ An alias can start with an exclamation point (``!``) to make it a
shell alias. A shell alias is executed with the shell and will let you
run arbitrary commands. As an example, ::
- echo = !echo $@
+ echo = !echo
will let you do ``hg echo foo`` to have ``foo`` printed in your
terminal. A better example might be::
@@ -219,43 +216,15 @@ terminal. A better example might be::
which will make ``hg purge`` delete all unknown files in the
repository in the same manner as the purge extension.
-Positional arguments like ``$1``, ``$2``, etc. in the alias definition
-expand to the command arguments. Unmatched arguments are
-removed. ``$0`` expands to the alias name and ``$@`` expands to all
-arguments separated by a space. These expansions happen before the
-command is passed to the shell.
-
-Shell aliases are executed in an environment where ``$HG`` expands to
+Shell aliases are executed in an environment where ``$HG`` expand to
the path of the Mercurial that was used to execute the alias. This is
useful when you want to call further Mercurial commands in a shell
alias, as was done above for the purge alias. In addition,
-``$HG_ARGS`` expands to the arguments given to Mercurial. In the ``hg
+``$HG_ARGS`` expand to the arguments given to Mercurial. In the ``hg
echo foo`` call above, ``$HG_ARGS`` would expand to ``echo foo``.
-.. note:: Some global configuration options such as ``-R`` are
- processed before shell aliases and will thus not be passed to
- aliases.
-
-
-``annotate``
-------------
-
-Settings used when displaying file annotations. All values are
-Booleans and default to False. See ``diff`` section for related
-options for the diff command.
-
-``ignorews``
- Ignore white space when comparing lines.
-
-``ignorewsamount``
- Ignore changes in the amount of white space.
-
-``ignoreblanklines``
- Ignore changes whose lines are all blank.
-
-
``auth``
---------
+""""""""
Authentication credentials for HTTP authentication. This section
allows you to store usernames and passwords for use when logging
@@ -322,7 +291,7 @@ for credentials as usual if required by the remote.
``decode/encode``
------------------
+"""""""""""""""""
Filters for transforming files on checkout/checkin. This would
typically be used for newline processing or other
@@ -369,7 +338,7 @@ format. We suggest you use the ``eol`` extension for convenience.
``defaults``
-------------
+""""""""""""
(defaults are deprecated. Don't use them. Use aliases instead)
@@ -389,11 +358,10 @@ to the aliases of the commands defined.
``diff``
---------
+""""""""
-Settings used when displaying diffs. Everything except for ``unified``
-is a Boolean and defaults to False. See ``annotate`` section for
-related options for the annotate command.
+Settings used when displaying diffs. Everything except for ``unified`` is a
+Boolean and defaults to False.
``git``
Use git extended diff format.
@@ -417,7 +385,7 @@ related options for the annotate command.
Number of lines of context to show.
``email``
----------
+"""""""""
Settings for extensions that send email messages.
@@ -472,7 +440,7 @@ Email example::
``extensions``
---------------
+""""""""""""""
Mercurial has an extension mechanism for adding new features. To
enable an extension, create an entry for it in this section.
@@ -498,8 +466,26 @@ Example for ``~/.hgrc``::
myfeature = ~/.hgext/myfeature.py
+``hostfingerprints``
+""""""""""""""""""""
+
+Fingerprints of the certificates of known HTTPS servers.
+A HTTPS connection to a server with a fingerprint configured here will
+only succeed if the servers certificate matches the fingerprint.
+This is very similar to how ssh known hosts works.
+The fingerprint is the SHA-1 hash value of the DER encoded certificate.
+The CA chain and web.cacerts is not used for servers with a fingerprint.
+
+For example::
+
+ [hostfingerprints]
+ hg.intevation.org = 38:76:52:7c:87:26:9a:8f:4a:f8:d3:de:08:45:3b:ea:d6:4b:ee:cc
+
+This feature is only supported when using Python 2.6 or later.
+
+
``format``
-----------
+""""""""""
``usestore``
Enable or disable the "store" repository format which improves
@@ -525,45 +511,128 @@ Example for ``~/.hgrc``::
option ensures that the on-disk format of newly created
repositories will be compatible with Mercurial before version 1.7.
-``graph``
----------
+``merge-patterns``
+""""""""""""""""""
-Web graph view configuration. This section let you change graph
-elements display properties by branches, for instance to make the
-``default`` branch stand out.
+This section specifies merge tools to associate with particular file
+patterns. Tools matched here will take precedence over the default
+merge tool. Patterns are globs by default, rooted at the repository
+root.
-Each line has the following format::
+Example::
+
+ [merge-patterns]
+ **.c = kdiff3
+ **.jpg = myimgmerge
- <branch>.<argument> = <value>
+``merge-tools``
+"""""""""""""""
-where ``<branch>`` is the name of the branch being
-customized. Example::
+This section configures external merge tools to use for file-level
+merges.
- [graph]
- # 2px width
- default.width = 2
- # red color
- default.color = FF0000
+Example ``~/.hgrc``::
+
+ [merge-tools]
+ # Override stock tool location
+ kdiff3.executable = ~/bin/kdiff3
+ # Specify command line
+ kdiff3.args = $base $local $other -o $output
+ # Give higher priority
+ kdiff3.priority = 1
+
+ # Define new tool
+ myHtmlTool.args = -m $local $other $base $output
+ myHtmlTool.regkey = Software\FooSoftware\HtmlMerge
+ myHtmlTool.priority = 1
Supported arguments:
-``width``
- Set branch edges width in pixels.
+``priority``
+ The priority in which to evaluate this tool.
+ Default: 0.
+
+``executable``
+ Either just the name of the executable or its pathname. On Windows,
+ the path can use environment variables with ${ProgramFiles} syntax.
+ Default: the tool name.
+
+``args``
+ The arguments to pass to the tool executable. You can refer to the
+ files being merged as well as the output file through these
+ variables: ``$base``, ``$local``, ``$other``, ``$output``.
+ Default: ``$local $base $other``
+
+``premerge``
+ Attempt to run internal non-interactive 3-way merge tool before
+ launching external tool. Options are ``true``, ``false``, or ``keep``
+ to leave markers in the file if the premerge fails.
+ Default: True
+
+``binary``
+ This tool can merge binary files. Defaults to False, unless tool
+ was selected by file pattern match.
+
+``symlink``
+ This tool can merge symlinks. Defaults to False, even if tool was
+ selected by file pattern match.
+
+``check``
+ A list of merge success-checking options:
+
+ ``changed``
+ Ask whether merge was successful when the merged file shows no changes.
+ ``conflicts``
+ Check whether there are conflicts even though the tool reported success.
+ ``prompt``
+ Always prompt for merge success, regardless of success reported by tool.
+
+``checkchanged``
+ True is equivalent to ``check = changed``.
+ Default: False
+
+``checkconflicts``
+ True is equivalent to ``check = conflicts``.
+ Default: False
+
+``fixeol``
+ Attempt to fix up EOL changes caused by the merge tool.
+ Default: False
+
+``gui``
+ This tool requires a graphical interface to run. Default: False
+
+``regkey``
+ Windows registry key which describes install location of this
+ tool. Mercurial will search for this key first under
+ ``HKEY_CURRENT_USER`` and then under ``HKEY_LOCAL_MACHINE``.
+ Default: None
+
+``regkeyalt``
+ An alternate Windows registry key to try if the first key is not
+ found. The alternate key uses the same ``regname`` and ``regappend``
+ semantics of the primary key. The most common use for this key
+ is to search for 32bit applications on 64bit operating systems.
+ Default: None
+
+``regname``
+ Name of value to read from specified registry key. Defaults to the
+ unnamed (default) value.
+
+``regappend``
+ String to append to the value read from the registry, typically
+ the executable name of the tool.
+ Default: None
-``color``
- Set branch edges color in hexadecimal RGB notation.
``hooks``
----------
+"""""""""
Commands or Python functions that get automatically executed by
various actions such as starting or finishing a commit. Multiple
hooks can be run for the same action by appending a suffix to the
action. Overriding a site-wide hook can be done by changing its
-value or setting it to an empty string. Hooks can be prioritized
-by adding a prefix of ``priority`` to the hook name on a new line
-and setting the priority. The default priority is 0 if
-not specified.
+value or setting it to an empty string.
Example ``.hg/hgrc``::
@@ -574,8 +643,6 @@ Example ``.hg/hgrc``::
incoming =
incoming.email = /my/email/hook
incoming.autobuild = /my/build/hook
- # force autobuild hook to run before other incoming hooks
- priority.incoming.autobuild = 1
Most hooks are run with environment variables set that give useful
additional information. For each hook below, the environment
@@ -732,26 +799,8 @@ If a Python hook returns a "true" value or raises an exception, this
is treated as a failure.
-``hostfingerprints``
---------------------
-
-Fingerprints of the certificates of known HTTPS servers.
-A HTTPS connection to a server with a fingerprint configured here will
-only succeed if the servers certificate matches the fingerprint.
-This is very similar to how ssh known hosts works.
-The fingerprint is the SHA-1 hash value of the DER encoded certificate.
-The CA chain and web.cacerts is not used for servers with a fingerprint.
-
-For example::
-
- [hostfingerprints]
- hg.intevation.org = 38:76:52:7c:87:26:9a:8f:4a:f8:d3:de:08:45:3b:ea:d6:4b:ee:cc
-
-This feature is only supported when using Python 2.6 or later.
-
-
``http_proxy``
---------------
+""""""""""""""
Used to access web-based Mercurial repositories through a HTTP
proxy.
@@ -774,122 +823,37 @@ proxy.
Optional. Always use the proxy, even for localhost and any entries
in ``http_proxy.no``. True or False. Default: False.
-``merge-patterns``
-------------------
-
-This section specifies merge tools to associate with particular file
-patterns. Tools matched here will take precedence over the default
-merge tool. Patterns are globs by default, rooted at the repository
-root.
-
-Example::
-
- [merge-patterns]
- **.c = kdiff3
- **.jpg = myimgmerge
-
-``merge-tools``
----------------
-
-This section configures external merge tools to use for file-level
-merges.
-
-Example ``~/.hgrc``::
-
- [merge-tools]
- # Override stock tool location
- kdiff3.executable = ~/bin/kdiff3
- # Specify command line
- kdiff3.args = $base $local $other -o $output
- # Give higher priority
- kdiff3.priority = 1
-
- # Define new tool
- myHtmlTool.args = -m $local $other $base $output
- myHtmlTool.regkey = Software\FooSoftware\HtmlMerge
- myHtmlTool.priority = 1
-
-Supported arguments:
-
-``priority``
- The priority in which to evaluate this tool.
- Default: 0.
-
-``executable``
- Either just the name of the executable or its pathname. On Windows,
- the path can use environment variables with ${ProgramFiles} syntax.
- Default: the tool name.
-
-``args``
- The arguments to pass to the tool executable. You can refer to the
- files being merged as well as the output file through these
- variables: ``$base``, ``$local``, ``$other``, ``$output``.
- Default: ``$local $base $other``
-
-``premerge``
- Attempt to run internal non-interactive 3-way merge tool before
- launching external tool. Options are ``true``, ``false``, or ``keep``
- to leave markers in the file if the premerge fails.
- Default: True
-
-``binary``
- This tool can merge binary files. Defaults to False, unless tool
- was selected by file pattern match.
-
-``symlink``
- This tool can merge symlinks. Defaults to False, even if tool was
- selected by file pattern match.
-
-``check``
- A list of merge success-checking options:
-
- ``changed``
- Ask whether merge was successful when the merged file shows no changes.
- ``conflicts``
- Check whether there are conflicts even though the tool reported success.
- ``prompt``
- Always prompt for merge success, regardless of success reported by tool.
-
-``checkchanged``
- True is equivalent to ``check = changed``.
- Default: False
+``smtp``
+""""""""
-``checkconflicts``
- True is equivalent to ``check = conflicts``.
- Default: False
+Configuration for extensions that need to send email messages.
-``fixeol``
- Attempt to fix up EOL changes caused by the merge tool.
- Default: False
+``host``
+ Host name of mail server, e.g. "mail.example.com".
-``gui``
- This tool requires a graphical interface to run. Default: False
+``port``
+ Optional. Port to connect to on mail server. Default: 25.
-``regkey``
- Windows registry key which describes install location of this
- tool. Mercurial will search for this key first under
- ``HKEY_CURRENT_USER`` and then under ``HKEY_LOCAL_MACHINE``.
- Default: None
+``tls``
+ Optional. Method to enable TLS when connecting to mail server: starttls,
+ smtps or none. Default: none.
-``regkeyalt``
- An alternate Windows registry key to try if the first key is not
- found. The alternate key uses the same ``regname`` and ``regappend``
- semantics of the primary key. The most common use for this key
- is to search for 32bit applications on 64bit operating systems.
- Default: None
+``username``
+ Optional. User name for authenticating with the SMTP server.
+ Default: none.
-``regname``
- Name of value to read from specified registry key. Defaults to the
- unnamed (default) value.
+``password``
+ Optional. Password for authenticating with the SMTP server. If not
+ specified, interactive sessions will prompt the user for a
+ password; non-interactive sessions will fail. Default: none.
-``regappend``
- String to append to the value read from the registry, typically
- the executable name of the tool.
- Default: None
+``local_hostname``
+ Optional. It's the hostname that the sender can use to identify
+ itself to the MTA.
``patch``
----------
+"""""""""
Settings used when applying patches, for instance through the 'import'
command or with Mercurial Queues extension.
@@ -907,7 +871,7 @@ command or with Mercurial Queues extension.
``paths``
----------
+"""""""""
Assigns symbolic names to repositories. The left side is the
symbolic name, and the right gives the directory or URL that is the
@@ -923,50 +887,18 @@ the following entries.
Optional. Directory or URL to use when pushing if no destination
is specified.
-``phases``
-----------
-
-Specifies default handling of phases. See :hg:`help phases` for more
-information about working with phases.
-
-``publish``
- Controls draft phase behavior when working as a server. When true,
- pushed changesets are set to public in both client and server and
- pulled or cloned changesets are set to public in the client.
- Default: True
-
-``new-commit``
- Phase of newly-created commits.
- Default: draft
``profiling``
--------------
-
-Specifies profiling type, format, and file output. Two profilers are
-supported: an instrumenting profiler (named ``ls``), and a sampling
-profiler (named ``stat``).
-
-In this section description, 'profiling data' stands for the raw data
-collected during profiling, while 'profiling report' stands for a
-statistical text report generated from the profiling data. The
-profiling is done using lsprof.
-
-``type``
- The type of profiler to use.
- Default: ls.
-
- ``ls``
- Use Python's built-in instrumenting profiler. This profiler
- works on all platforms, but each line number it reports is the
- first line of a function. This restriction makes it difficult to
- identify the expensive parts of a non-trivial function.
- ``stat``
- Use a third-party statistical profiler, statprof. This profiler
- currently runs only on Unix systems, and is most useful for
- profiling commands that run for longer than about 0.1 seconds.
+"""""""""""""
+
+Specifies profiling format and file output. In this section
+description, 'profiling data' stands for the raw data collected
+during profiling, while 'profiling report' stands for a statistical
+text report generated from the profiling data. The profiling is done
+using lsprof.
``format``
- Profiling format. Specific to the ``ls`` instrumenting profiler.
+ Profiling format.
Default: text.
``text``
@@ -978,22 +910,18 @@ profiling is done using lsprof.
file, the generated file can directly be loaded into
kcachegrind.
-``frequency``
- Sampling frequency. Specific to the ``stat`` sampling profiler.
- Default: 1000.
-
``output``
File path where profiling data or report should be saved. If the
file exists, it is replaced. Default: None, data is printed on
stderr
``revsetalias``
----------------
+"""""""""""""""
Alias definitions for revsets. See :hg:`help revsets` for details.
``server``
-----------
+""""""""""
Controls generic server settings.
@@ -1009,68 +937,31 @@ Controls generic server settings.
the write lock while determining what data to transfer.
Default is True.
-``preferuncompressed``
- When set, clients will try to use the uncompressed streaming
- protocol. Default is False.
-
``validate``
Whether to validate the completeness of pushed changesets by
checking that all new file revisions specified in manifests are
present. Default is False.
-``smtp``
---------
-
-Configuration for extensions that need to send email messages.
-
-``host``
- Host name of mail server, e.g. "mail.example.com".
-
-``port``
- Optional. Port to connect to on mail server. Default: 25.
-
-``tls``
- Optional. Method to enable TLS when connecting to mail server: starttls,
- smtps or none. Default: none.
-
-``username``
- Optional. User name for authenticating with the SMTP server.
- Default: none.
-
-``password``
- Optional. Password for authenticating with the SMTP server. If not
- specified, interactive sessions will prompt the user for a
- password; non-interactive sessions will fail. Default: none.
-
-``local_hostname``
- Optional. It's the hostname that the sender can use to identify
- itself to the MTA.
-
-
``subpaths``
-------------
+""""""""""""
-Subrepository source URLs can go stale if a remote server changes name
-or becomes temporarily unavailable. This section lets you define
-rewrite rules of the form::
+Defines subrepositories source locations rewriting rules of the form::
<pattern> = <replacement>
-where ``pattern`` is a regular expression matching a subrepository
-source URL and ``replacement`` is the replacement string used to
-rewrite it. Groups can be matched in ``pattern`` and referenced in
-``replacements``. For instance::
+Where ``pattern`` is a regular expression matching the source and
+``replacement`` is the replacement string used to rewrite it. Groups
+can be matched in ``pattern`` and referenced in ``replacements``. For
+instance::
http://server/(.*)-hg/ = http://hg.server/\1/
rewrites ``http://server/foo-hg/`` into ``http://hg.server/foo/``.
-Relative subrepository paths are first made absolute, and the
-rewrite rules are then applied on the full (absolute) path. The rules
-are applied in definition order.
+All patterns are applied in definition order.
``trusted``
------------
+"""""""""""
Mercurial will not use the settings in the
``.hg/hgrc`` file from a repository if it doesn't belong to a trusted
@@ -1094,7 +985,7 @@ user or service running Mercurial.
``ui``
-------
+""""""
User interface controls.
@@ -1115,7 +1006,7 @@ User interface controls.
Whether to commit modified subrepositories when committing the
parent repository. If False and one subrepository has uncommitted
changes, abort the commit.
- Default is False.
+ Default is True.
``debug``
Print debugging information. True or False. Default is False.
@@ -1164,10 +1055,6 @@ User interface controls.
``remotecmd``
remote command to use for clone/push/pull operations. Default is ``hg``.
-``reportoldssl``
- Warn if an SSL certificate is unable to be due to using Python
- 2.5 or earlier. True or False. Default is True.
-
``report_untrusted``
Warn if a ``.hg/hgrc`` file is ignored due to not being owned by a
trusted user or group. True or False. Default is True.
@@ -1213,7 +1100,7 @@ User interface controls.
``web``
--------
+"""""""
Web interface configuration. The settings in this section apply to
both the builtin webserver (started by :hg:`serve`) and the script you
@@ -1269,6 +1156,13 @@ The full set of options is:
be present in this list. The contents of the allow_push list are
examined after the deny_push list.
+``guessmime``
+ Control MIME types for raw download of file content.
+ Set to True to let hgweb guess the content type from the file
+ extension. This will serve HTML files as ``text/html`` and might
+ allow cross-site scripting attacks when serving untrusted
+ repositories. Default is False.
+
``allow_read``
If the user has not already been denied repository access due to
the contents of deny_read, this list determines whether to grant
@@ -1294,46 +1188,30 @@ The full set of options is:
authority certificates. Environment variables and ``~user``
constructs are expanded in the filename. If specified on the
client, then it will verify the identity of remote HTTPS servers
- with these certificates.
+ with these certificates. The form must be as follows::
+
+ -----BEGIN CERTIFICATE-----
+ ... (certificate in base64 PEM encoding) ...
+ -----END CERTIFICATE-----
+ -----BEGIN CERTIFICATE-----
+ ... (certificate in base64 PEM encoding) ...
+ -----END CERTIFICATE-----
This feature is only supported when using Python 2.6 or later. If you wish
to use it with earlier versions of Python, install the backported
version of the ssl library that is available from
``http://pypi.python.org``.
+ You can use OpenSSL's CA certificate file if your platform has one.
+ On most Linux systems this will be ``/etc/ssl/certs/ca-certificates.crt``.
+ Otherwise you will have to generate this file manually.
+
To disable SSL verification temporarily, specify ``--insecure`` from
command line.
- You can use OpenSSL's CA certificate file if your platform has
- one. On most Linux systems this will be
- ``/etc/ssl/certs/ca-certificates.crt``. Otherwise you will have to
- generate this file manually. The form must be as follows::
-
- -----BEGIN CERTIFICATE-----
- ... (certificate in base64 PEM encoding) ...
- -----END CERTIFICATE-----
- -----BEGIN CERTIFICATE-----
- ... (certificate in base64 PEM encoding) ...
- -----END CERTIFICATE-----
-
``cache``
Whether to support caching in hgweb. Defaults to True.
-``collapse``
- With ``descend`` enabled, repositories in subdirectories are shown at
- a single level alongside repositories in the current path. With
- ``collapse`` also enabled, repositories residing at a deeper level than
- the current path are grouped behind navigable directory entries that
- lead to the locations of these repositories. In effect, this setting
- collapses each collection of repositories found within a subdirectory
- into a single entry for that subdirectory. Default is False.
-
-``comparisoncontext``
- Number of lines of context to show in side-by-side file comparison. If
- negative or the value ``full``, whole files are shown. Default is 5.
- This setting can be overridden by a ``context`` request parameter to the
- ``comparison`` command, taking the same values.
-
``contact``
Name or email address of the person in charge of the repository.
Defaults to ui.username or ``$EMAIL`` or "unknown" if unset or empty.
@@ -1376,13 +1254,6 @@ The full set of options is:
``errorlog``
Where to output the error log. Default is stderr.
-``guessmime``
- Control MIME types for raw download of file content.
- Set to True to let hgweb guess the content type from the file
- extension. This will serve HTML files as ``text/html`` and might
- allow cross-site scripting attacks when serving untrusted
- repositories. Default is False.
-
``hidden``
Whether to hide the repository in the hgwebdir index.
Default is False.
@@ -1390,30 +1261,20 @@ The full set of options is:
``ipv6``
Whether to use IPv6. Default is False.
-``logoimg``
- File name of the logo image that some templates display on each page.
- The file name is relative to ``staticurl``. That is, the full path to
- the logo image is "staticurl/logoimg".
- If unset, ``hglogo.png`` will be used.
-
``logourl``
Base URL to use for logos. If unset, ``http://mercurial.selenic.com/``
will be used.
+``name``
+ Repository name to use in the web interface. Default is current
+ working directory.
+
``maxchanges``
Maximum number of changes to list on the changelog. Default is 10.
``maxfiles``
Maximum number of files to list per changeset. Default is 10.
-``maxshortchanges``
- Maximum number of changes to list on the shortlog, graph or filelog
- pages. Default is 60.
-
-``name``
- Repository name to use in the web interface. Default is current
- working directory.
-
``port``
Port to listen on. Default is 8000.
diff --git a/mercurial/help/filesets.txt b/mercurial/help/filesets.txt
index afad752..c929d79 100644
--- a/mercurial/help/filesets.txt
+++ b/mercurial/help/filesets.txt
@@ -52,7 +52,7 @@ Some sample queries:
- Find C files in a non-standard encoding::
- hg locate "set:**.c and not encoding('UTF-8')"
+ hg locate "set:**.c and not encoding(ascii)"
- Revert copies of large binary files::
diff --git a/mercurial/help/glossary.txt b/mercurial/help/glossary.txt
index 59a9d03..074a31d 100644
--- a/mercurial/help/glossary.txt
+++ b/mercurial/help/glossary.txt
@@ -135,7 +135,7 @@ Child changeset
See 'Changeset, child'.
Close changeset
- See 'Head, closed branch'
+ See 'Changeset, close'.
Closed branch
See 'Branch, closed'.
@@ -212,11 +212,6 @@ Directory, working
to the files introduced manually or by a merge. The repository
metadata exists in the .hg directory inside the working directory.
-Draft
- Changesets in the draft phase have not been shared with publishing
- repositories and may thus be safely changed by history-modifying
- extensions. See :hg:`help phases`.
-
Graph
See DAG and :hg:`help graphlog`.
@@ -237,9 +232,6 @@ Head, closed branch
closed when all its heads are closed and consequently is not
listed by :hg:`branches`.
- Closed heads can be re-opened by committing new changeset as the
- child of the changeset that marks a head as closed.
-
Head, repository
A topological head which has not been closed.
@@ -308,15 +300,6 @@ Patch
Example: "You will need to patch that revision."
-Phase
- A per-changeset state tracking how the changeset has been or
- should be shared. See :hg:`help phases`.
-
-Public
- Changesets in the public phase have been shared with publishing
- repositories and are therefore considered immutable. See :hg:`help
- phases`.
-
Pull
An operation in which changesets in a remote repository which are
not in the local repository are brought into the local
@@ -368,10 +351,6 @@ Root
A changeset that has only the null changeset as its parent. Most
repositories have only a single root changeset.
-Secret
- Changesets in the secret phase may not be shared via push, pull,
- or clone. See :hg:`help phases`.
-
Tag
An alternative name given to a changeset. Tags can be used in all
places where Mercurial expects a changeset ID, e.g., with
diff --git a/mercurial/help/hgignore.txt b/mercurial/help/hgignore.txt
index 3f82177..538f4b5 100644
--- a/mercurial/help/hgignore.txt
+++ b/mercurial/help/hgignore.txt
@@ -1,12 +1,12 @@
Synopsis
-========
+--------
The Mercurial system uses a file called ``.hgignore`` in the root
directory of a repository to control its behavior when it searches
for files that it is not currently tracking.
Description
-===========
+-----------
The working directory of a Mercurial repository will often contain
files that should not be tracked by Mercurial. These include backup
@@ -33,13 +33,8 @@ To control Mercurial's handling of files that it manages, many
commands support the ``-I`` and ``-X`` options; see
:hg:`help <command>` and :hg:`help patterns` for details.
-Files that are already tracked are not affected by .hgignore, even
-if they appear in .hgignore. An untracked file X can be explicitly
-added with :hg:`add X`, even if X would be excluded by a pattern
-in .hgignore.
-
Syntax
-======
+------
An ignore file is a plain text file consisting of a list of patterns,
with one pattern per line. Empty lines are skipped. The ``#``
@@ -68,12 +63,8 @@ the form ``*.c`` will match a file ending in ``.c`` in any directory,
and a regexp pattern of the form ``\.c$`` will do the same. To root a
regexp pattern, start it with ``^``.
-.. note::
- Patterns specified in other than ``.hgignore`` are always rooted.
- Please see :hg:`help patterns` for details.
-
Example
-=======
+-------
Here is an example ignore file. ::
diff --git a/mercurial/help/hgweb.txt b/mercurial/help/hgweb.txt
index 9ac557e..fa901e6 100644
--- a/mercurial/help/hgweb.txt
+++ b/mercurial/help/hgweb.txt
@@ -1,50 +1,47 @@
Mercurial's internal web server, hgweb, can serve either a single
-repository, or a tree of repositories. In the second case, repository
-paths and global options can be defined using a dedicated
-configuration file common to :hg:`serve`, ``hgweb.wsgi``,
-``hgweb.cgi`` and ``hgweb.fcgi``.
+repository, or a collection of them. In the latter case, a special
+configuration file can be used to specify the repository paths to use
+and global web configuration options.
-This file uses the same syntax as other Mercurial configuration files
-but recognizes only the following sections:
+This file uses the same syntax as other Mercurial configuration files,
+but only the following sections are recognized:
- web
- paths
- collections
-The ``web`` options are thorougly described in :hg:`help config`.
+The ``web`` section can specify all the settings described in the web
+section of the hgrc(5) documentation. See :hg:`help config` for
+information on where to find the manual page.
-The ``paths`` section maps URL paths to paths of repositories in the
-filesystem. hgweb will not expose the filesystem directly - only
-Mercurial repositories can be published and only according to the
-configuration.
-
-The left hand side is the path in the URL. Note that hgweb reserves
-subpaths like ``rev`` or ``file``, try using different names for
-nested repositories to avoid confusing effects.
-
-The right hand side is the path in the filesystem. If the specified
-path ends with ``*`` or ``**`` the filesystem will be searched
-recursively for repositories below that point.
-With ``*`` it will not recurse into the repositories it finds (except for
-``.hg/patches``).
-With ``**`` it will also search inside repository working directories
-and possibly find subrepositories.
-
-In this example::
+The ``paths`` section provides mappings of physical repository
+paths to virtual ones. For instance::
[paths]
- /projects/a = /srv/tmprepos/a
- /projects/b = c:/repos/b
- / = /srv/repos/*
- /user/bob = /home/bob/repos/**
+ projects/a = /foo/bar
+ projects/b = /baz/quux
+ web/root = /real/root/*
+ / = /real/root2/*
+ virtual/root2 = /real/root2/**
- The first two entries make two repositories in different directories
appear under the same directory in the web interface
-- The third entry will publish every Mercurial repository found in
- ``/srv/repos/``, for instance the repository ``/srv/repos/quux/``
- will appear as ``http://server/quux/``
-- The fourth entry will publish both ``http://server/user/bob/quux/``
- and ``http://server/user/bob/quux/testsubrepo/``
-
-The ``collections`` section is deprecated and has been superseeded by
-``paths``.
+- The third entry maps every Mercurial repository found in '/real/root'
+ into 'web/root'. This format is preferred over the [collections] one,
+ since using absolute paths as configuration keys is not supported on every
+ platform (especially on Windows).
+- The fourth entry is a special case mapping all repositories in
+ '/real/root2' in the root of the virtual directory.
+- The fifth entry recursively finds all repositories under the real
+ root, and maps their relative paths under the virtual root.
+
+The ``collections`` section provides mappings of trees of physical
+repositories paths to virtual ones, though the paths syntax is generally
+preferred. For instance::
+
+ [collections]
+ /foo = /foo
+
+Here, the left side will be stripped off all repositories found in the
+right side. Thus ``/foo/bar`` and ``foo/quux/baz`` will be listed as
+``bar`` and ``quux/baz`` respectively.
diff --git a/mercurial/help/merge-tools.txt b/mercurial/help/merge-tools.txt
index b7ed1ea..7324fe4 100644
--- a/mercurial/help/merge-tools.txt
+++ b/mercurial/help/merge-tools.txt
@@ -17,7 +17,7 @@ conflict markers. Mercurial does not include any interactive merge
programs but relies on external tools for that.
Available merge tools
-=====================
+"""""""""""""""""""""
External merge tools and their properties are configured in the
merge-tools configuration section - see hgrc(5) - but they can often just
@@ -34,13 +34,39 @@ GUI is available if the tool requires a GUI.
There are some internal merge tools which can be used. The internal
merge tools are:
-.. internaltoolsmarker
+``internal:merge``
+ Uses the internal non-interactive simple merge algorithm for merging
+ files. It will fail if there are any conflicts and leave markers in
+ the partially merged file.
+
+``internal:fail``
+ Rather than attempting to merge files that were modified on both
+ branches, it marks them as unresolved. The resolve command must be
+ used to resolve these conflicts.
+
+``internal:local``
+ Uses the local version of files as the merged version.
+
+``internal:other``
+ Uses the other version of files as the merged version.
+
+``internal:prompt``
+ Asks the user which of the local or the other version to keep as
+ the merged version.
+
+``internal:dump``
+ Creates three versions of the files to merge, containing the
+ contents of local, other and base. These files can then be used to
+ perform a merge manually. If the file to be merged is named
+ ``a.txt``, these files will accordingly be named ``a.txt.local``,
+ ``a.txt.other`` and ``a.txt.base`` and they will be placed in the
+ same directory as ``a.txt``.
Internal tools are always available and do not require a GUI but will by default
not handle symlinks or binary files.
Choosing a merge tool
-=====================
+"""""""""""""""""""""
Mercurial uses these rules when deciding which merge tool to use:
diff --git a/mercurial/help/patterns.txt b/mercurial/help/patterns.txt
index 30133a3..c13453e 100644
--- a/mercurial/help/patterns.txt
+++ b/mercurial/help/patterns.txt
@@ -6,10 +6,6 @@ patterns.
Alternate pattern notations must be specified explicitly.
-.. note::
- Patterns specified in ``.hgignore`` are not rooted.
- Please see :hg:`help hgignore` for details.
-
To use a plain path name without any pattern matching, start it with
``path:``. These path names must completely match starting at the
current repository root.
diff --git a/mercurial/help/phases.txt b/mercurial/help/phases.txt
deleted file mode 100644
index 19023dc..0000000
--- a/mercurial/help/phases.txt
+++ /dev/null
@@ -1,84 +0,0 @@
-What are phases?
-================
-
-Phases are a system for tracking which changesets have been or should
-be shared. This helps prevent common mistakes when modifying history
-(for instance, with the mq or rebase extensions).
-
-Each changeset in a repository is in one of the following phases:
-
- - public : changeset is visible on a public server
- - draft : changeset is not yet published
- - secret : changeset should not be pushed, pulled, or cloned
-
-These phases are ordered (public < draft < secret) and no changeset
-can be in a lower phase than its ancestors. For instance, if a
-changeset is public, all its ancestors are also public. Lastly,
-changeset phases should only be changed towards the public phase.
-
-How are phases managed?
-=======================
-
-For the most part, phases should work transparently. By default, a
-changeset is created in the draft phase and is moved into the public
-phase when it is pushed to another repository.
-
-Once changesets become public, extensions like mq and rebase will
-refuse to operate on them to prevent creating duplicate changesets.
-Phases can also be manually manipulated with the :hg:`phase` command
-if needed. See :hg:`help -v phase` for examples.
-
-Phases and servers
-==================
-
-Normally, all servers are ``publishing`` by default. This means::
-
- - all draft changesets that are pulled or cloned appear in phase
- public on the client
-
- - all draft changesets that are pushed appear as public on both
- client and server
-
- - secret changesets are neither pushed, pulled, or cloned
-
-.. note::
- Pulling a draft changeset from a publishing server does not mark it
- as public on the server side due to the read-only nature of pull.
-
-Sometimes it may be desirable to push and pull changesets in the draft
-phase to share unfinished work. This can be done by setting a
-repository to disable publishing in its configuration file::
-
- [phases]
- publish = False
-
-See :hg:`help config` for more information on config files.
-
-.. note::
- Servers running older versions of Mercurial are treated as
- publishing.
-
-Examples
-========
-
- - list changesets in draft or secret phase::
-
- hg log -r "not public()"
-
- - change all secret changesets to draft::
-
- hg phase --draft "secret()"
-
- - forcibly move the current changeset and descendants from public to draft::
-
- hg phase --force --draft .
-
- - show a list of changeset revision and phase::
-
- hg log --template "{rev} {phase}\n"
-
- - resynchronize draft changesets relative to a remote repository::
-
- hg phase -fd 'outgoing(URL)'
-
-See :hg:`help phase` for more information on manually manipulating phases.
diff --git a/mercurial/help/revisions.txt b/mercurial/help/revisions.txt
index f0c2cb2..309f8e2 100644
--- a/mercurial/help/revisions.txt
+++ b/mercurial/help/revisions.txt
@@ -12,13 +12,13 @@ unique revision identifier and is referred to as a short-form
identifier. A short-form identifier is only valid if it is the prefix
of exactly one full-length identifier.
-Any other string is treated as a bookmark, tag, or branch name. A
-bookmark is a movable pointer to a revision. A tag is a permanent name
-associated with a revision. A branch name denotes the tipmost revision
-of that branch. Bookmark, tag, and branch names must not contain the ":"
-character.
+Any other string is treated as a tag or branch name. A tag name is a
+symbolic name associated with a revision identifier. A branch name
+denotes the tipmost revision of that branch. Tag and branch names must
+not contain the ":" character.
-The reserved name "tip" always identifies the most recent revision.
+The reserved name "tip" is a special tag that always identifies the
+most recent revision.
The reserved name "null" indicates the null revision. This is the
revision of an empty repository, and the parent of revision 0.
diff --git a/mercurial/help/revsets.txt b/mercurial/help/revsets.txt
index 6511668..48838c3 100644
--- a/mercurial/help/revsets.txt
+++ b/mercurial/help/revsets.txt
@@ -4,9 +4,10 @@ revisions.
The language supports a number of predicates which are joined by infix
operators. Parenthesis can be used for grouping.
-Identifiers such as branch names may need quoting with single or
-double quotes if they contain characters like ``-`` or if they match
-one of the predefined predicates.
+Identifiers such as branch names must be quoted with single or double
+quotes if they contain characters outside of
+``[._a-zA-Z0-9\x80-\xff]`` or if they match one of the predefined
+predicates.
Special characters can be used in quoted identifiers by escaping them,
e.g., ``\n`` is interpreted as a newline. To prevent them from being
diff --git a/mercurial/help/subrepos.txt b/mercurial/help/subrepos.txt
index bf5925a..fb3217d 100644
--- a/mercurial/help/subrepos.txt
+++ b/mercurial/help/subrepos.txt
@@ -1,38 +1,32 @@
Subrepositories let you nest external repositories or projects into a
parent Mercurial repository, and make commands operate on them as a
-group.
-
-Mercurial currently supports Mercurial, Git, and Subversion
-subrepositories.
+group. External Mercurial and Subversion projects are currently
+supported.
Subrepositories are made of three components:
1. Nested repository checkouts. They can appear anywhere in the
- parent working directory.
+ parent working directory, and are Mercurial clones or Subversion
+ checkouts.
-2. Nested repository references. They are defined in ``.hgsub``, which
- should be placed in the root of working directory, and
+2. Nested repository references. They are defined in ``.hgsub`` and
tell where the subrepository checkouts come from. Mercurial
subrepositories are referenced like:
path/to/nested = https://example.com/nested/repo/path
- Git and Subversion subrepos are also supported:
-
- path/to/nested = [git]git://example.com/nested/repo/path
- path/to/nested = [svn]https://example.com/nested/trunk/path
-
where ``path/to/nested`` is the checkout location relatively to the
parent Mercurial root, and ``https://example.com/nested/repo/path``
is the source repository path. The source can also reference a
- filesystem path.
+ filesystem path. Subversion repositories are defined with:
+
+ path/to/nested = [svn]https://example.com/nested/trunk/path
Note that ``.hgsub`` does not exist by default in Mercurial
repositories, you have to create and add it to the parent
repository before using subrepositories.
-3. Nested repository states. They are defined in ``.hgsubstate``, which
- is placed in the root of working directory, and
+3. Nested repository states. They are defined in ``.hgsubstate`` and
capture whatever information is required to restore the
subrepositories to the state they were committed in a parent
repository changeset. Mercurial automatically record the nested
@@ -43,7 +37,7 @@ Subrepositories are made of three components:
Adding a Subrepository
-======================
+----------------------
If ``.hgsub`` does not exist, create it and add it to the parent
repository. Clone or checkout the external projects where you want it
@@ -53,7 +47,7 @@ subrepository is tracked and the next commit will record its state in
``.hgsubstate`` and bind it to the committed changeset.
Synchronizing a Subrepository
-=============================
+-----------------------------
Subrepos do not automatically track the latest changeset of their
sources. Instead, they are updated to the changeset that corresponds
@@ -66,47 +60,41 @@ subrepo at the desired revision, test in the top-level repo, then
commit in the parent repository to record the new combination.
Deleting a Subrepository
-========================
+------------------------
To remove a subrepository from the parent repository, delete its
reference from ``.hgsub``, then remove its files.
Interaction with Mercurial Commands
-===================================
+-----------------------------------
:add: add does not recurse in subrepos unless -S/--subrepos is
- specified. However, if you specify the full path of a file in a
- subrepo, it will be added even without -S/--subrepos specified.
- Git and Subversion subrepositories are currently silently
+ specified. Subversion subrepositories are currently silently
ignored.
:archive: archive does not recurse in subrepositories unless
-S/--subrepos is specified.
:commit: commit creates a consistent snapshot of the state of the
- entire project and its subrepositories. If any subrepositories
- have been modified, Mercurial will abort. Mercurial can be made
- to instead commit all modified subrepositories by specifying
- -S/--subrepos, or setting "ui.commitsubrepos=True" in a
- configuration file (see :hg:`help config`). After there are no
- longer any modified subrepositories, it records their state and
- finally commits it in the parent repository.
+ entire project and its subrepositories. It does this by first
+ attempting to commit all modified subrepositories, then recording
+ their state and finally committing it in the parent
+ repository. Mercurial can be made to abort if any subrepository
+ content is modified by setting "ui.commitsubrepos=no" in a
+ configuration file (see :hg:`help config`).
:diff: diff does not recurse in subrepos unless -S/--subrepos is
specified. Changes are displayed as usual, on the subrepositories
- elements. Git and Subversion subrepositories are currently
- silently ignored.
-
-:forget: forget currently only handles exact file matches in subrepos.
- Git and Subversion subrepositories are currently silently ignored.
+ elements. Subversion subrepositories are currently silently
+ ignored.
:incoming: incoming does not recurse in subrepos unless -S/--subrepos
- is specified. Git and Subversion subrepositories are currently
- silently ignored.
+ is specified. Subversion subrepositories are currently silently
+ ignored.
:outgoing: outgoing does not recurse in subrepos unless -S/--subrepos
- is specified. Git and Subversion subrepositories are currently
- silently ignored.
+ is specified. Subversion subrepositories are currently silently
+ ignored.
:pull: pull is not recursive since it is not clear what to pull prior
to running :hg:`update`. Listing and retrieving all
@@ -117,7 +105,7 @@ Interaction with Mercurial Commands
:push: Mercurial will automatically push all subrepositories first
when the parent repository is being pushed. This ensures new
subrepository changes are available when referenced by top-level
- repositories. Push is a no-op for Subversion subrepositories.
+ repositories.
:status: status does not recurse into subrepositories unless
-S/--subrepos is specified. Subrepository changes are displayed as
@@ -132,7 +120,7 @@ Interaction with Mercurial Commands
can require network access when using subrepositories.
Remapping Subrepositories Sources
-=================================
+---------------------------------
A subrepository source location may change during a project life,
invalidating references stored in the parent repository history. To
diff --git a/mercurial/hg.py b/mercurial/hg.py
index 7d452df..0f37d26 100644
--- a/mercurial/hg.py
+++ b/mercurial/hg.py
@@ -9,8 +9,8 @@
from i18n import _
from lock import release
from node import hex, nullid
-import localrepo, bundlerepo, httppeer, sshpeer, statichttprepo, bookmarks
-import lock, util, extensions, error, node, scmutil
+import localrepo, bundlerepo, httprepo, sshrepo, statichttprepo, bookmarks
+import lock, util, extensions, error, node
import cmdutil, discovery
import merge as mergemod
import verify as verifymod
@@ -20,22 +20,21 @@ def _local(path):
path = util.expandpath(util.urllocalpath(path))
return (os.path.isfile(path) and bundlerepo or localrepo)
-def addbranchrevs(lrepo, other, branches, revs):
- peer = other.peer() # a courtesy to callers using a localrepo for other
+def addbranchrevs(lrepo, repo, branches, revs):
hashbranch, branches = branches
if not hashbranch and not branches:
return revs or None, revs and revs[0] or None
revs = revs and list(revs) or []
- if not peer.capable('branchmap'):
+ if not repo.capable('branchmap'):
if branches:
raise util.Abort(_("remote branch lookup not supported"))
revs.append(hashbranch)
return revs, revs[0]
- branchmap = peer.branchmap()
+ branchmap = repo.branchmap()
def primary(branch):
if branch == '.':
- if not lrepo:
+ if not lrepo or not lrepo.local():
raise util.Abort(_("dirstate branch not accessible"))
branch = lrepo.dirstate.branch()
if branch in branchmap:
@@ -65,9 +64,9 @@ def parseurl(path, branches=None):
schemes = {
'bundle': bundlerepo,
'file': _local,
- 'http': httppeer,
- 'https': httppeer,
- 'ssh': sshpeer,
+ 'http': httprepo,
+ 'https': httprepo,
+ 'ssh': sshrepo,
'static-http': statichttprepo,
}
@@ -89,29 +88,20 @@ def islocal(repo):
return False
return repo.local()
-def _peerorrepo(ui, path, create=False):
+def repository(ui, path='', create=False):
"""return a repository object for the specified path"""
- obj = _peerlookup(path).instance(ui, path, create)
- ui = getattr(obj, "ui", ui)
+ repo = _peerlookup(path).instance(ui, path, create)
+ ui = getattr(repo, "ui", ui)
for name, module in extensions.extensions():
hook = getattr(module, 'reposetup', None)
if hook:
- hook(ui, obj)
- return obj
-
-def repository(ui, path='', create=False):
- """return a repository object for the specified path"""
- peer = _peerorrepo(ui, path, create)
- repo = peer.local()
- if not repo:
- raise util.Abort(_("repository '%s' is not local") %
- (path or peer.url()))
+ hook(ui, repo)
return repo
-def peer(uiorrepo, opts, path, create=False):
+def peer(ui, opts, path, create=False):
'''return a repository peer for the specified path'''
- rui = remoteui(uiorrepo, opts)
- return _peerorrepo(rui, path, create).peer()
+ rui = remoteui(ui, opts)
+ return repository(rui, path, create)
def defaultdest(source):
'''return default destination of clone if none is given'''
@@ -134,7 +124,7 @@ def share(ui, source, dest=None, update=True):
srcrepo = repository(ui, source)
rev, checkout = addbranchrevs(srcrepo, srcrepo, branches, None)
else:
- srcrepo = source.local()
+ srcrepo = source
origsource = source = srcrepo.url()
checkout = None
@@ -184,46 +174,13 @@ def share(ui, source, dest=None, update=True):
continue
_update(r, uprev)
-def copystore(ui, srcrepo, destpath):
- '''copy files from store of srcrepo in destpath
-
- returns destlock
- '''
- destlock = None
- try:
- hardlink = None
- num = 0
- srcpublishing = srcrepo.ui.configbool('phases', 'publish', True)
- for f in srcrepo.store.copylist():
- if srcpublishing and f.endswith('phaseroots'):
- continue
- src = os.path.join(srcrepo.sharedpath, f)
- dst = os.path.join(destpath, f)
- dstbase = os.path.dirname(dst)
- if dstbase and not os.path.exists(dstbase):
- os.mkdir(dstbase)
- if os.path.exists(src):
- if dst.endswith('data'):
- # lock to avoid premature writing to the target
- destlock = lock.lock(os.path.join(dstbase, "lock"))
- hardlink, n = util.copyfiles(src, dst, hardlink)
- num += n
- if hardlink:
- ui.debug("linked %d files\n" % num)
- else:
- ui.debug("copied %d files\n" % num)
- return destlock
- except: # re-raises
- release(destlock)
- raise
-
def clone(ui, peeropts, source, dest=None, pull=False, rev=None,
update=True, stream=False, branch=None):
"""Make a copy of an existing repository.
Create a copy of an existing repository in a new directory. The
source and destination are URLs, as passed to the repository
- function. Returns a pair of repository peers, the source and
+ function. Returns a pair of repository objects, the source and
newly created destination.
The location of the source is added to the new repository's
@@ -257,12 +214,12 @@ def clone(ui, peeropts, source, dest=None, pull=False, rev=None,
if isinstance(source, str):
origsource = ui.expandpath(source)
source, branch = parseurl(origsource, branch)
- srcpeer = peer(ui, peeropts, source)
+ srcrepo = repository(remoteui(ui, peeropts), source)
else:
- srcpeer = source.peer() # in case we were called with a localrepo
+ srcrepo = source
branch = (None, branch or [])
- origsource = source = srcpeer.url()
- rev, checkout = addbranchrevs(srcpeer, srcpeer, branch, rev)
+ origsource = source = srcrepo.url()
+ rev, checkout = addbranchrevs(srcrepo, srcrepo, branch, rev)
if dest is None:
dest = defaultdest(source)
@@ -273,8 +230,6 @@ def clone(ui, peeropts, source, dest=None, pull=False, rev=None,
dest = util.urllocalpath(dest)
source = util.urllocalpath(source)
- if not dest:
- raise util.Abort(_("empty destination path is not valid"))
if os.path.exists(dest):
if not os.path.isdir(dest):
raise util.Abort(_("destination '%s' already exists") % dest)
@@ -292,7 +247,6 @@ def clone(ui, peeropts, source, dest=None, pull=False, rev=None,
self.rmtree(self.dir_, True)
srclock = destlock = dircleanup = None
- srcrepo = srcpeer.local()
try:
abspath = origsource
if islocal(origsource):
@@ -302,8 +256,7 @@ def clone(ui, peeropts, source, dest=None, pull=False, rev=None,
dircleanup = DirCleanup(dest)
copy = False
- if (srcrepo and srcrepo.cancopy() and islocal(dest)
- and not srcrepo.revs("secret()")):
+ if srcrepo.cancopy() and islocal(dest):
copy = not pull and not rev
if copy:
@@ -334,16 +287,34 @@ def clone(ui, peeropts, source, dest=None, pull=False, rev=None,
% dest)
raise
- destlock = copystore(ui, srcrepo, destpath)
+ hardlink = None
+ num = 0
+ for f in srcrepo.store.copylist():
+ src = os.path.join(srcrepo.sharedpath, f)
+ dst = os.path.join(destpath, f)
+ dstbase = os.path.dirname(dst)
+ if dstbase and not os.path.exists(dstbase):
+ os.mkdir(dstbase)
+ if os.path.exists(src):
+ if dst.endswith('data'):
+ # lock to avoid premature writing to the target
+ destlock = lock.lock(os.path.join(dstbase, "lock"))
+ hardlink, n = util.copyfiles(src, dst, hardlink)
+ num += n
+ if hardlink:
+ ui.debug("linked %d files\n" % num)
+ else:
+ ui.debug("copied %d files\n" % num)
# we need to re-init the repo after manually copying the data
# into it
- destpeer = peer(ui, peeropts, dest)
+ destrepo = repository(remoteui(ui, peeropts), dest)
srcrepo.hook('outgoing', source='clone',
node=node.hex(node.nullid))
else:
try:
- destpeer = peer(ui, peeropts, dest, create=True)
+ destrepo = repository(remoteui(ui, peeropts), dest,
+ create=True)
except OSError, inst:
if inst.errno == errno.EEXIST:
dircleanup.close()
@@ -353,52 +324,35 @@ def clone(ui, peeropts, source, dest=None, pull=False, rev=None,
revs = None
if rev:
- if not srcpeer.capable('lookup'):
+ if not srcrepo.capable('lookup'):
raise util.Abort(_("src repository does not support "
"revision lookup and so doesn't "
"support clone by revision"))
- revs = [srcpeer.lookup(r) for r in rev]
+ revs = [srcrepo.lookup(r) for r in rev]
checkout = revs[0]
- if destpeer.local():
- destpeer.local().clone(srcpeer, heads=revs, stream=stream)
- elif srcrepo:
- srcrepo.push(destpeer, revs=revs)
+ if destrepo.local():
+ destrepo.clone(srcrepo, heads=revs, stream=stream)
+ elif srcrepo.local():
+ srcrepo.push(destrepo, revs=revs)
else:
raise util.Abort(_("clone from remote to remote not supported"))
if dircleanup:
dircleanup.close()
- # clone all bookmarks except divergent ones
- destrepo = destpeer.local()
- if destrepo and srcpeer.capable("pushkey"):
- rb = srcpeer.listkeys('bookmarks')
- for k, n in rb.iteritems():
- try:
- m = destrepo.lookup(n)
- destrepo._bookmarks[k] = m
- except error.RepoLookupError:
- pass
- if rb:
- bookmarks.write(destrepo)
- elif srcrepo and destpeer.capable("pushkey"):
- for k, n in srcrepo._bookmarks.iteritems():
- destpeer.pushkey('bookmarks', k, '', hex(n))
-
- if destrepo:
+ if destrepo.local():
fp = destrepo.opener("hgrc", "w", text=True)
fp.write("[paths]\n")
- u = util.url(abspath)
- u.passwd = None
- defaulturl = str(u)
- fp.write("default = %s\n" % defaulturl)
+ fp.write("default = %s\n" % abspath)
fp.close()
- destrepo.ui.setconfig('paths', 'default', defaulturl)
+ destrepo.ui.setconfig('paths', 'default', abspath)
if update:
if update is not True:
- checkout = srcrepo.lookup(update)
+ checkout = update
+ if srcrepo.local():
+ checkout = srcrepo.lookup(update)
for test in (checkout, 'default', 'tip'):
if test is None:
continue
@@ -411,13 +365,26 @@ def clone(ui, peeropts, source, dest=None, pull=False, rev=None,
destrepo.ui.status(_("updating to branch %s\n") % bn)
_update(destrepo, uprev)
- return srcpeer, destpeer
+ # clone all bookmarks
+ if destrepo.local() and srcrepo.capable("pushkey"):
+ rb = srcrepo.listkeys('bookmarks')
+ for k, n in rb.iteritems():
+ try:
+ m = destrepo.lookup(n)
+ destrepo._bookmarks[k] = m
+ except error.RepoLookupError:
+ pass
+ if rb:
+ bookmarks.write(destrepo)
+ elif srcrepo.local() and destrepo.capable("pushkey"):
+ for k, n in srcrepo._bookmarks.iteritems():
+ destrepo.pushkey('bookmarks', k, '', hex(n))
+
+ return srcrepo, destrepo
finally:
release(srclock, destlock)
if dircleanup is not None:
dircleanup.cleanup()
- if srcpeer is not None:
- srcpeer.close()
def _showstats(repo, stats):
repo.ui.status(_("%d files updated, %d files merged, "
@@ -518,14 +485,14 @@ def _outgoing(ui, repo, dest, opts):
ui.status(_('comparing with %s\n') % util.hidepassword(dest))
revs, checkout = addbranchrevs(repo, repo, branches, opts.get('rev'))
if revs:
- revs = [repo.lookup(rev) for rev in scmutil.revrange(repo, revs)]
+ revs = [repo.lookup(rev) for rev in revs]
other = peer(repo, opts, dest)
- outgoing = discovery.findcommonoutgoing(repo, other, revs,
- force=opts.get('force'))
- o = outgoing.missing
+ common, outheads = discovery.findcommonoutgoing(repo, other, revs,
+ force=opts.get('force'))
+ o = repo.changelog.findmissing(common, outheads)
if not o:
- scmutil.nochangesfound(repo.ui, repo, outgoing.excluded)
+ ui.status(_("no changes found\n"))
return None
return o
@@ -570,7 +537,7 @@ def verify(repo):
def remoteui(src, opts):
'build a remote ui from ui or repo and opts'
- if util.safehasattr(src, 'baseui'): # looks like a repository
+ if hasattr(src, 'baseui'): # looks like a repository
dst = src.baseui.copy() # drop repo-specific config
src = src.ui # copy target options from repo
else: # assume it's a global ui object
diff --git a/mercurial/hgweb/common.py b/mercurial/hgweb/common.py
index 3628c36..2c2c76c 100644
--- a/mercurial/hgweb/common.py
+++ b/mercurial/hgweb/common.py
@@ -95,7 +95,7 @@ class continuereader(object):
def __getattr__(self, attr):
if attr in ('close', 'readline', 'readlines', '__iter__'):
return getattr(self.f, attr)
- raise AttributeError
+ raise AttributeError()
def _statusmessage(code):
from BaseHTTPServer import BaseHTTPRequestHandler
diff --git a/mercurial/hgweb/hgweb_mod.py b/mercurial/hgweb/hgweb_mod.py
index 63a9c6c..bf24d0c 100644
--- a/mercurial/hgweb/hgweb_mod.py
+++ b/mercurial/hgweb/hgweb_mod.py
@@ -7,7 +7,7 @@
# GNU General Public License version 2 or any later version.
import os
-from mercurial import ui, hg, hook, error, encoding, templater, util
+from mercurial import ui, hg, hook, error, encoding, templater
from common import get_stat, ErrorResponse, permhooks, caching
from common import HTTP_OK, HTTP_NOT_MODIFIED, HTTP_BAD_REQUEST
from common import HTTP_NOT_FOUND, HTTP_SERVER_ERROR
@@ -36,7 +36,7 @@ class hgweb(object):
self.repo = repo
self.repo.ui.setconfig('ui', 'report_untrusted', 'off')
- self.repo.ui.setconfig('ui', 'nontty', 'true')
+ self.repo.ui.setconfig('ui', 'interactive', 'off')
hook.redirect(True)
self.mtime = -1
self.size = -1
@@ -73,8 +73,7 @@ class hgweb(object):
self.repo = hg.repository(self.repo.ui, self.repo.root)
self.maxchanges = int(self.config("web", "maxchanges", 10))
self.stripecount = int(self.config("web", "stripes", 1))
- self.maxshortchanges = int(self.config("web", "maxshortchanges",
- 60))
+ self.maxshortchanges = int(self.config("web", "maxshortchanges", 60))
self.maxfiles = int(self.config("web", "maxfiles", 10))
self.allowpull = self.configbool("web", "allowpull", True)
encoding.encoding = self.config("web", "encoding",
@@ -149,7 +148,7 @@ class hgweb(object):
cmd = cmd[style + 1:]
# avoid accepting e.g. style parameter as command
- if util.safehasattr(webcommands, cmd):
+ if hasattr(webcommands, cmd):
req.form['cmd'] = [cmd]
else:
cmd = ''
@@ -237,7 +236,6 @@ class hgweb(object):
port = port != default_port and (":" + port) or ""
urlbase = '%s://%s%s' % (proto, req.env['SERVER_NAME'], port)
logourl = self.config("web", "logourl", "http://mercurial.selenic.com/")
- logoimg = self.config("web", "logoimg", "hglogo.png")
staticurl = self.config("web", "staticurl") or req.url + 'static/'
if not staticurl.endswith('/'):
staticurl += '/'
@@ -278,7 +276,6 @@ class hgweb(object):
tmpl = templater.templater(mapfile,
defaults={"url": req.url,
"logourl": logourl,
- "logoimg": logoimg,
"staticurl": staticurl,
"urlbase": urlbase,
"repo": self.reponame,
diff --git a/mercurial/hgweb/hgwebdir_mod.py b/mercurial/hgweb/hgwebdir_mod.py
index 7cb083e..c5db7ff 100644
--- a/mercurial/hgweb/hgwebdir_mod.py
+++ b/mercurial/hgweb/hgwebdir_mod.py
@@ -23,10 +23,10 @@ def findrepos(paths):
repos = []
for prefix, root in cleannames(paths):
roothead, roottail = os.path.split(root)
- # "foo = /bar/*" or "foo = /bar/**" lets every repo /bar/N in or below
- # /bar/ be served as as foo/N .
- # '*' will not search inside dirs with .hg (except .hg/patches),
- # '**' will search inside dirs with .hg (and thus also find subrepos).
+ # "foo = /bar/*" makes every subrepo of /bar/ to be
+ # mounted as foo/subrepo
+ # and "foo = /bar/**" also recurses into the subdirectories,
+ # remember to use it without working dir.
try:
recurse = {'*': False, '**': True}[roottail]
except KeyError:
@@ -51,33 +51,6 @@ def urlrepos(prefix, roothead, paths):
yield (prefix + '/' +
util.pconvert(path[len(roothead):]).lstrip('/')).strip('/'), path
-def geturlcgivars(baseurl, port):
- """
- Extract CGI variables from baseurl
-
- >>> geturlcgivars("http://host.org/base", "80")
- ('host.org', '80', '/base')
- >>> geturlcgivars("http://host.org:8000/base", "80")
- ('host.org', '8000', '/base')
- >>> geturlcgivars('/base', 8000)
- ('', '8000', '/base')
- >>> geturlcgivars("base", '8000')
- ('', '8000', '/base')
- >>> geturlcgivars("http://host", '8000')
- ('host', '8000', '/')
- >>> geturlcgivars("http://host/", '8000')
- ('host', '8000', '/')
- """
- u = util.url(baseurl)
- name = u.host or ''
- if u.port:
- port = u.port
- path = u.path or ""
- if not path.startswith('/'):
- path = '/' + path
-
- return name, str(port), path
-
class hgwebdir(object):
refreshinterval = 20
@@ -97,7 +70,7 @@ class hgwebdir(object):
else:
u = ui.ui()
u.setconfig('ui', 'report_untrusted', 'off')
- u.setconfig('ui', 'nontty', 'true')
+ u.setconfig('ui', 'interactive', 'off')
if not isinstance(self.conf, (dict, list, tuple)):
map = {'paths': 'hgweb-paths'}
@@ -245,67 +218,12 @@ class hgwebdir(object):
def rawentries(subdir="", **map):
descend = self.ui.configbool('web', 'descend', True)
- collapse = self.ui.configbool('web', 'collapse', False)
- seenrepos = set()
- seendirs = set()
for name, path in self.repos:
if not name.startswith(subdir):
continue
name = name[len(subdir):]
- directory = False
-
- if '/' in name:
- if not descend:
- continue
-
- nameparts = name.split('/')
- rootname = nameparts[0]
-
- if not collapse:
- pass
- elif rootname in seendirs:
- continue
- elif rootname in seenrepos:
- pass
- else:
- directory = True
- name = rootname
-
- # redefine the path to refer to the directory
- discarded = '/'.join(nameparts[1:])
-
- # remove name parts plus accompanying slash
- path = path[:-len(discarded) - 1]
-
- parts = [name]
- if 'PATH_INFO' in req.env:
- parts.insert(0, req.env['PATH_INFO'].rstrip('/'))
- if req.env['SCRIPT_NAME']:
- parts.insert(0, req.env['SCRIPT_NAME'])
- url = re.sub(r'/+', '/', '/'.join(parts) + '/')
-
- # show either a directory entry or a repository
- if directory:
- # get the directory's time information
- try:
- d = (get_mtime(path), util.makedate()[1])
- except OSError:
- continue
-
- row = dict(contact="",
- contact_sort="",
- name=name,
- name_sort=name,
- url=url,
- description="",
- description_sort="",
- lastchange=d,
- lastchange_sort=d[1]-d[0],
- archives=[])
-
- seendirs.add(name)
- yield row
+ if not descend and '/' in name:
continue
u = self.ui.copy()
@@ -323,6 +241,13 @@ class hgwebdir(object):
if not self.read_allowed(u, req):
continue
+ parts = [name]
+ if 'PATH_INFO' in req.env:
+ parts.insert(0, req.env['PATH_INFO'].rstrip('/'))
+ if req.env['SCRIPT_NAME']:
+ parts.insert(0, req.env['SCRIPT_NAME'])
+ url = re.sub(r'/+', '/', '/'.join(parts) + '/')
+
# update time with local timezone
try:
r = hg.repository(self.ui, path)
@@ -350,8 +275,6 @@ class hgwebdir(object):
lastchange=d,
lastchange_sort=d[1]-d[0],
archives=archivelist(u, "tip", url))
-
- seenrepos.add(name)
yield row
sortdefault = None, False
@@ -425,7 +348,6 @@ class hgwebdir(object):
start = url[-1] == '?' and '&' or '?'
sessionvars = webutil.sessionvars(vars, start)
logourl = config('web', 'logourl', 'http://mercurial.selenic.com/')
- logoimg = config('web', 'logoimg', 'hglogo.png')
staticurl = config('web', 'staticurl') or url + 'static/'
if not staticurl.endswith('/'):
staticurl += '/'
@@ -436,14 +358,17 @@ class hgwebdir(object):
"motd": motd,
"url": url,
"logourl": logourl,
- "logoimg": logoimg,
"staticurl": staticurl,
"sessionvars": sessionvars})
return tmpl
def updatereqenv(self, env):
if self._baseurl is not None:
- name, port, path = geturlcgivars(self._baseurl, env['SERVER_PORT'])
- env['SERVER_NAME'] = name
- env['SERVER_PORT'] = port
+ u = util.url(self._baseurl)
+ env['SERVER_NAME'] = u.host
+ if u.port:
+ env['SERVER_PORT'] = u.port
+ path = u.path or ""
+ if not path.startswith('/'):
+ path = '/' + path
env['SCRIPT_NAME'] = path
diff --git a/mercurial/hgweb/protocol.py b/mercurial/hgweb/protocol.py
index 84aa6b1..dcceae9 100644
--- a/mercurial/hgweb/protocol.py
+++ b/mercurial/hgweb/protocol.py
@@ -10,7 +10,6 @@ from mercurial import util, wireproto
from common import HTTP_OK
HGTYPE = 'application/mercurial-0.1'
-HGERRTYPE = 'application/hg-error'
class webproto(object):
def __init__(self, req, ui):
@@ -91,7 +90,3 @@ def call(repo, req, cmd):
rsp = '0\n%s\n' % rsp.res
req.respond(HTTP_OK, HGTYPE, length=len(rsp))
return [rsp]
- elif isinstance(rsp, wireproto.ooberror):
- rsp = rsp.message
- req.respond(HTTP_OK, HGERRTYPE, length=len(rsp))
- return [rsp]
diff --git a/mercurial/hgweb/request.py b/mercurial/hgweb/request.py
index 1fddc95..8d2c11c 100644
--- a/mercurial/hgweb/request.py
+++ b/mercurial/hgweb/request.py
@@ -101,7 +101,7 @@ class wsgirequest(object):
self.headers = []
def write(self, thing):
- if util.safehasattr(thing, "__iter__"):
+ if hasattr(thing, "__iter__"):
for part in thing:
self.write(part)
else:
diff --git a/mercurial/hgweb/server.py b/mercurial/hgweb/server.py
index 3ad99f4..9d363f0 100644
--- a/mercurial/hgweb/server.py
+++ b/mercurial/hgweb/server.py
@@ -246,10 +246,9 @@ class _httprequesthandlerssl(_httprequesthandler):
try:
from threading import activeCount
- activeCount() # silence pyflakes
_mixin = SocketServer.ThreadingMixIn
except ImportError:
- if util.safehasattr(os, "fork"):
+ if hasattr(os, "fork"):
_mixin = SocketServer.ForkingMixIn
else:
class _mixin(object):
diff --git a/mercurial/hgweb/webcommands.py b/mercurial/hgweb/webcommands.py
index 9cd5c0a..fc43ca3 100644
--- a/mercurial/hgweb/webcommands.py
+++ b/mercurial/hgweb/webcommands.py
@@ -8,11 +8,11 @@
import os, mimetypes, re, cgi, copy
import webutil
from mercurial import error, encoding, archival, templater, templatefilters
-from mercurial.node import short, hex, nullid
+from mercurial.node import short, hex
from mercurial.util import binary
from common import paritygen, staticfile, get_contact, ErrorResponse
from common import HTTP_OK, HTTP_FORBIDDEN, HTTP_NOT_FOUND
-from mercurial import graphmod, patch
+from mercurial import graphmod
from mercurial import help as helpmod
from mercurial.i18n import _
@@ -22,7 +22,7 @@ from mercurial.i18n import _
__all__ = [
'log', 'rawfile', 'file', 'changelog', 'shortlog', 'changeset', 'rev',
'manifest', 'tags', 'bookmarks', 'branches', 'summary', 'filediff', 'diff',
- 'comparison', 'annotate', 'filelog', 'archive', 'static', 'graph', 'help',
+ 'annotate', 'filelog', 'archive', 'static', 'graph', 'help',
]
def log(web, req, tmpl):
@@ -124,8 +124,7 @@ def _search(web, req, tmpl):
def changelist(**map):
count = 0
- lower = encoding.lower
- qw = lower(query).split()
+ qw = query.lower().split()
def revgen():
for i in xrange(len(web.repo) - 1, 0, -100):
@@ -140,9 +139,9 @@ def _search(web, req, tmpl):
for ctx in revgen():
miss = 0
for q in qw:
- if not (q in lower(ctx.user()) or
- q in lower(ctx.description()) or
- q in lower(" ".join(ctx.files()))):
+ if not (q in ctx.user().lower() or
+ q in ctx.description().lower() or
+ q in " ".join(ctx.files()).lower()):
miss = 1
break
if miss:
@@ -262,10 +261,10 @@ def changeset(web, req, tmpl):
files = []
parity = paritygen(web.stripecount)
- for blockno, f in enumerate(ctx.files()):
+ for f in ctx.files():
template = f in ctx and 'filenodelink' or 'filenolink'
files.append(tmpl(template,
- node=ctx.hex(), file=f, blockno=blockno + 1,
+ node=ctx.hex(), file=f,
parity=parity.next()))
style = web.config('web', 'style', 'paper')
@@ -303,14 +302,6 @@ def changeset(web, req, tmpl):
rev = changeset
-def decodepath(path):
- """Hook for mapping a path in the repository to a path in the
- working copy.
-
- Extensions (e.g., largefiles) can override this to remap files in
- the virtual file system presented by the manifest command below."""
- return path
-
def manifest(web, req, tmpl):
ctx = webutil.changectx(web.repo, req)
path = webutil.cleanpath(web.repo, req.form.get('file', [''])[0])
@@ -326,17 +317,13 @@ def manifest(web, req, tmpl):
l = len(path)
abspath = "/" + path
- for full, n in mf.iteritems():
- # the virtual path (working copy path) used for the full
- # (repository) path
- f = decodepath(full)
-
+ for f, n in mf.iteritems():
if f[:l] != path:
continue
remain = f[l:]
elements = remain.split('/')
if len(elements) == 1:
- files[remain] = full
+ files[remain] = f
else:
h = dirs # need to retain ref to dirs (root)
for elem in elements[0:-1]:
@@ -394,7 +381,8 @@ def manifest(web, req, tmpl):
branches=webutil.nodebranchdict(web.repo, ctx))
def tags(web, req, tmpl):
- i = reversed(web.repo.tagslist())
+ i = web.repo.tagslist()
+ i.reverse()
parity = paritygen(web.stripecount)
def entries(notip=False, limit=0, **map):
@@ -440,7 +428,7 @@ def branches(web, req, tmpl):
tips = (web.repo[n] for t, n in web.repo.branchtags().iteritems())
heads = web.repo.heads()
parity = paritygen(web.stripecount)
- sortkey = lambda ctx: (not ctx.closesbranch(), ctx.rev())
+ sortkey = lambda ctx: ('close' not in ctx.extra(), ctx.rev())
def entries(limit, **map):
count = 0
@@ -465,7 +453,8 @@ def branches(web, req, tmpl):
latestentry=lambda **x: entries(1, **x))
def summary(web, req, tmpl):
- i = reversed(web.repo.tagslist())
+ i = web.repo.tagslist()
+ i.reverse()
def tagentries(**map):
parity = paritygen(web.stripecount)
@@ -556,7 +545,6 @@ def filediff(web, req, tmpl):
if fctx is not None:
n = fctx.node()
path = fctx.path()
- ctx = fctx.changectx()
else:
n = ctx.node()
# path already defined in except clause
@@ -566,7 +554,7 @@ def filediff(web, req, tmpl):
if 'style' in req.form:
style = req.form['style'][0]
- diffs = webutil.diffs(web.repo, tmpl, ctx, [path], parity, style)
+ diffs = webutil.diffs(web.repo, tmpl, fctx or ctx, [path], parity, style)
rename = fctx and webutil.renamelink(fctx) or []
ctx = fctx and fctx or ctx
return tmpl("filediff",
@@ -584,74 +572,10 @@ def filediff(web, req, tmpl):
diff = filediff
-def comparison(web, req, tmpl):
- ctx = webutil.changectx(web.repo, req)
- if 'file' not in req.form:
- raise ErrorResponse(HTTP_NOT_FOUND, 'file not given')
- path = webutil.cleanpath(web.repo, req.form['file'][0])
- rename = path in ctx and webutil.renamelink(ctx[path]) or []
-
- parsecontext = lambda v: v == 'full' and -1 or int(v)
- if 'context' in req.form:
- context = parsecontext(req.form['context'][0])
- else:
- context = parsecontext(web.config('web', 'comparisoncontext', '5'))
-
- def filelines(f):
- if binary(f.data()):
- mt = mimetypes.guess_type(f.path())[0]
- if not mt:
- mt = 'application/octet-stream'
- return [_('(binary file %s, hash: %s)') % (mt, hex(f.filenode()))]
- return f.data().splitlines()
-
- if path in ctx:
- fctx = ctx[path]
- rightrev = fctx.filerev()
- rightnode = fctx.filenode()
- rightlines = filelines(fctx)
- parents = fctx.parents()
- if not parents:
- leftrev = -1
- leftnode = nullid
- leftlines = ()
- else:
- pfctx = parents[0]
- leftrev = pfctx.filerev()
- leftnode = pfctx.filenode()
- leftlines = filelines(pfctx)
- else:
- rightrev = -1
- rightnode = nullid
- rightlines = ()
- fctx = ctx.parents()[0][path]
- leftrev = fctx.filerev()
- leftnode = fctx.filenode()
- leftlines = filelines(fctx)
-
- comparison = webutil.compare(tmpl, context, leftlines, rightlines)
- return tmpl('filecomparison',
- file=path,
- node=hex(ctx.node()),
- rev=ctx.rev(),
- date=ctx.date(),
- desc=ctx.description(),
- author=ctx.user(),
- rename=rename,
- branch=webutil.nodebranchnodefault(ctx),
- parent=webutil.parents(fctx),
- child=webutil.children(fctx),
- leftrev=leftrev,
- leftnode=hex(leftnode),
- rightrev=rightrev,
- rightnode=hex(rightnode),
- comparison=comparison)
-
def annotate(web, req, tmpl):
fctx = webutil.filectx(web.repo, req)
f = fctx.path()
parity = paritygen(web.stripecount)
- diffopts = patch.diffopts(web.repo.ui, untrusted=True, section='annotate')
def annotate(**map):
last = None
@@ -661,8 +585,7 @@ def annotate(web, req, tmpl):
lines = enumerate([((fctx.filectx(fctx.filerev()), 1),
'(binary:%s)' % mt)])
else:
- lines = enumerate(fctx.annotate(follow=True, linenumber=True,
- diffopts=diffopts))
+ lines = enumerate(fctx.annotate(follow=True, linenumber=True))
for lineno, ((f, targetline), l) in lines:
fnode = f.filenode()
@@ -817,9 +740,7 @@ def static(web, req, tmpl):
def graph(web, req, tmpl):
- ctx = webutil.changectx(web.repo, req)
- rev = ctx.rev()
-
+ rev = webutil.changectx(web.repo, req).rev()
bg_height = 39
revcount = web.maxshortchanges
if 'revcount' in req.form:
@@ -832,94 +753,45 @@ def graph(web, req, tmpl):
morevars = copy.copy(tmpl.defaults['sessionvars'])
morevars['revcount'] = revcount * 2
- count = len(web.repo)
- pos = rev
- start = max(0, pos - revcount + 1)
- end = min(count, start + revcount)
- pos = end - 1
-
- uprev = min(max(0, count - 1), rev + revcount)
+ max_rev = len(web.repo) - 1
+ revcount = min(max_rev, revcount)
+ revnode = web.repo.changelog.node(rev)
+ revnode_hex = hex(revnode)
+ uprev = min(max_rev, rev + revcount)
downrev = max(0, rev - revcount)
- changenav = webutil.revnavgen(pos, revcount, count, web.repo.changectx)
-
- dag = graphmod.dagwalker(web.repo, range(start, end)[::-1])
- tree = list(graphmod.colored(dag, web.repo))
-
- def getcolumns(tree):
- cols = 0
- for (id, type, ctx, vtx, edges) in tree:
- if type != graphmod.CHANGESET:
- continue
- cols = max(cols, max([edge[0] for edge in edges] or [0]),
- max([edge[1] for edge in edges] or [0]))
- return cols
-
- def graphdata(usetuples, **map):
- data = []
-
- row = 0
- for (id, type, ctx, vtx, edges) in tree:
- if type != graphmod.CHANGESET:
- continue
- node = str(ctx)
- age = templatefilters.age(ctx.date())
- desc = templatefilters.firstline(ctx.description())
- desc = cgi.escape(templatefilters.nonempty(desc))
- user = cgi.escape(templatefilters.person(ctx.user()))
- branch = ctx.branch()
- try:
- branchnode = web.repo.branchtip(branch)
- except error.RepoLookupError:
- branchnode = None
- branch = branch, branchnode == ctx.node()
-
- if usetuples:
- data.append((node, vtx, edges, desc, user, age, branch,
- ctx.tags(), ctx.bookmarks()))
- else:
- edgedata = [dict(col=edge[0], nextcol=edge[1],
- color=(edge[2] - 1) % 6 + 1,
- width=edge[3], bcolor=edge[4])
- for edge in edges]
-
- data.append(
- dict(node=node,
- col=vtx[0],
- color=(vtx[1] - 1) % 6 + 1,
- edges=edgedata,
- row=row,
- nextrow=row + 1,
- desc=desc,
- user=user,
- age=age,
- bookmarks=webutil.nodebookmarksdict(
- web.repo, ctx.node()),
- branches=webutil.nodebranchdict(web.repo, ctx),
- inbranch=webutil.nodeinbranch(web.repo, ctx),
- tags=webutil.nodetagsdict(web.repo, ctx.node())))
-
- row += 1
-
- return data
-
- cols = getcolumns(tree)
- rows = len(tree)
- canvasheight = (rows + 1) * bg_height - 27
+ count = len(web.repo)
+ changenav = webutil.revnavgen(rev, revcount, count, web.repo.changectx)
+ startrev = rev
+ # if starting revision is less than 60 set it to uprev
+ if rev < web.maxshortchanges:
+ startrev = uprev
+
+ dag = graphmod.dagwalker(web.repo, range(startrev, downrev - 1, -1))
+ tree = list(graphmod.colored(dag))
+ canvasheight = (len(tree) + 1) * bg_height - 27
+ data = []
+ for (id, type, ctx, vtx, edges) in tree:
+ if type != graphmod.CHANGESET:
+ continue
+ node = str(ctx)
+ age = templatefilters.age(ctx.date())
+ desc = templatefilters.firstline(ctx.description())
+ desc = cgi.escape(templatefilters.nonempty(desc))
+ user = cgi.escape(templatefilters.person(ctx.user()))
+ branch = ctx.branch()
+ branch = branch, web.repo.branchtags().get(branch) == ctx.node()
+ data.append((node, vtx, edges, desc, user, age, branch, ctx.tags(),
+ ctx.bookmarks()))
return tmpl('graph', rev=rev, revcount=revcount, uprev=uprev,
lessvars=lessvars, morevars=morevars, downrev=downrev,
- cols=cols, rows=rows,
- canvaswidth=(cols + 1) * bg_height,
- truecanvasheight=rows * bg_height,
- canvasheight=canvasheight, bg_height=bg_height,
- jsdata=lambda **x: graphdata(True, **x),
- nodes=lambda **x: graphdata(False, **x),
- node=ctx.hex(), changenav=changenav)
+ canvasheight=canvasheight, jsdata=data, bg_height=bg_height,
+ node=revnode_hex, changenav=changenav)
def _getdoc(e):
doc = e[0].__doc__
if doc:
- doc = _(doc).split('\n')[0]
+ doc = doc.split('\n')[0]
else:
doc = _('(no help text available)')
return doc
@@ -931,7 +803,8 @@ def help(web, req, tmpl):
if not topicname:
def topics(**map):
for entries, summary, _ in helpmod.helptable:
- yield {'topic': entries[0], 'summary': summary}
+ entries = sorted(entries, key=len)
+ yield {'topic': entries[-1], 'summary': summary}
early, other = [], []
primary = lambda s: s.split('|')[0]
@@ -961,7 +834,6 @@ def help(web, req, tmpl):
u = webutil.wsgiui()
u.pushbuffer()
- u.verbose = True
try:
commands.help_(u, topicname)
except error.UnknownCommand:
diff --git a/mercurial/hgweb/webutil.py b/mercurial/hgweb/webutil.py
index ca5e7d8..967b6e8 100644
--- a/mercurial/hgweb/webutil.py
+++ b/mercurial/hgweb/webutil.py
@@ -10,9 +10,6 @@ import os, copy
from mercurial import match, patch, scmutil, error, ui, util
from mercurial.i18n import _
from mercurial.node import hex, nullid
-from common import ErrorResponse
-from common import HTTP_NOT_FOUND
-import difflib
def up(p):
if p[0] != "/":
@@ -75,7 +72,7 @@ def _siblings(siblings=[], hiderev=None):
d['date'] = s.date()
d['description'] = s.description()
d['branch'] = s.branch()
- if util.safehasattr(s, 'path'):
+ if hasattr(s, 'path'):
d['file'] = s.path()
yield d
@@ -101,23 +98,16 @@ def nodebranchdict(repo, ctx):
branches = []
branch = ctx.branch()
# If this is an empty repo, ctx.node() == nullid,
- # ctx.branch() == 'default'.
- try:
- branchnode = repo.branchtip(branch)
- except error.RepoLookupError:
- branchnode = None
- if branchnode == ctx.node():
+ # ctx.branch() == 'default', but branchtags() is
+ # an empty dict. Using dict.get avoids a traceback.
+ if repo.branchtags().get(branch) == ctx.node():
branches.append({"name": branch})
return branches
def nodeinbranch(repo, ctx):
branches = []
branch = ctx.branch()
- try:
- branchnode = repo.branchtip(branch)
- except error.RepoLookupError:
- branchnode = None
- if branch != 'default' and branchnode != ctx.node():
+ if branch != 'default' and repo.branchtags().get(branch) != ctx.node():
branches.append({"name": branch})
return branches
@@ -156,15 +146,11 @@ def changectx(repo, req):
return ctx
def filectx(repo, req):
- if 'file' not in req.form:
- raise ErrorResponse(HTTP_NOT_FOUND, 'file not given')
path = cleanpath(repo, req.form['file'][0])
if 'node' in req.form:
changeid = req.form['node'][0]
- elif 'filenode' in req.form:
- changeid = req.form['filenode'][0]
else:
- raise ErrorResponse(HTTP_NOT_FOUND, 'node or filenode not given')
+ changeid = req.form['filenode'][0]
try:
fctx = repo[changeid][path]
except error.RepoError:
@@ -187,7 +173,8 @@ def diffs(repo, tmpl, ctx, files, parity, style):
start += 1
blockcount = countgen()
- def prettyprintlines(diff, blockno):
+ def prettyprintlines(diff):
+ blockno = blockcount.next()
for lineno, l in enumerate(diff.splitlines(True)):
lineno = "%d.%d" % (blockno, lineno + 1)
if l.startswith('+'):
@@ -216,63 +203,14 @@ def diffs(repo, tmpl, ctx, files, parity, style):
block = []
for chunk in patch.diff(repo, node1, node2, m, opts=diffopts):
if chunk.startswith('diff') and block:
- blockno = blockcount.next()
- yield tmpl('diffblock', parity=parity.next(), blockno=blockno,
- lines=prettyprintlines(''.join(block), blockno))
+ yield tmpl('diffblock', parity=parity.next(),
+ lines=prettyprintlines(''.join(block)))
block = []
if chunk.startswith('diff') and style != 'raw':
chunk = ''.join(chunk.splitlines(True)[1:])
block.append(chunk)
- blockno = blockcount.next()
- yield tmpl('diffblock', parity=parity.next(), blockno=blockno,
- lines=prettyprintlines(''.join(block), blockno))
-
-def compare(tmpl, context, leftlines, rightlines):
- '''Generator function that provides side-by-side comparison data.'''
-
- def compline(type, leftlineno, leftline, rightlineno, rightline):
- lineid = leftlineno and ("l%s" % leftlineno) or ''
- lineid += rightlineno and ("r%s" % rightlineno) or ''
- return tmpl('comparisonline',
- type=type,
- lineid=lineid,
- leftlinenumber="% 6s" % (leftlineno or ''),
- leftline=leftline or '',
- rightlinenumber="% 6s" % (rightlineno or ''),
- rightline=rightline or '')
-
- def getblock(opcodes):
- for type, llo, lhi, rlo, rhi in opcodes:
- len1 = lhi - llo
- len2 = rhi - rlo
- count = min(len1, len2)
- for i in xrange(count):
- yield compline(type=type,
- leftlineno=llo + i + 1,
- leftline=leftlines[llo + i],
- rightlineno=rlo + i + 1,
- rightline=rightlines[rlo + i])
- if len1 > len2:
- for i in xrange(llo + count, lhi):
- yield compline(type=type,
- leftlineno=i + 1,
- leftline=leftlines[i],
- rightlineno=None,
- rightline=None)
- elif len2 > len1:
- for i in xrange(rlo + count, rhi):
- yield compline(type=type,
- leftlineno=None,
- leftline=None,
- rightlineno=i + 1,
- rightline=rightlines[i])
-
- s = difflib.SequenceMatcher(None, leftlines, rightlines)
- if context < 0:
- yield tmpl('comparisonblock', lines=getblock(s.get_opcodes()))
- else:
- for oc in s.get_grouped_opcodes(n=context):
- yield tmpl('comparisonblock', lines=getblock(oc))
+ yield tmpl('diffblock', parity=parity.next(),
+ lines=prettyprintlines(''.join(block)))
def diffstatgen(ctx):
'''Generator function that provides the diffstat data.'''
diff --git a/mercurial/hgweb/wsgicgi.py b/mercurial/hgweb/wsgicgi.py
index 5d18d8e..6de394c 100644
--- a/mercurial/hgweb/wsgicgi.py
+++ b/mercurial/hgweb/wsgicgi.py
@@ -78,4 +78,5 @@ def launch(application):
for chunk in content:
write(chunk)
finally:
- getattr(content, 'close', lambda : None)()
+ if hasattr(content, 'close'):
+ content.close()
diff --git a/mercurial/hook.py b/mercurial/hook.py
index 9831353..0d92e91 100644
--- a/mercurial/hook.py
+++ b/mercurial/hook.py
@@ -21,14 +21,14 @@ def _pythonhook(ui, repo, name, hname, funcname, args, throw):
ui.note(_("calling hook %s: %s\n") % (hname, funcname))
obj = funcname
- if not util.safehasattr(obj, '__call__'):
+ if not hasattr(obj, '__call__'):
d = funcname.rfind('.')
if d == -1:
raise util.Abort(_('%s hook is invalid ("%s" not in '
'a module)') % (hname, funcname))
modname = funcname[:d]
oldpaths = sys.path
- if util.mainfrozen():
+ if hasattr(sys, "frozen"):
# binary installs require sys.path manipulation
modpath, modfile = os.path.split(modname)
if modpath and modfile:
@@ -60,13 +60,13 @@ def _pythonhook(ui, repo, name, hname, funcname, args, throw):
raise util.Abort(_('%s hook is invalid '
'("%s" is not defined)') %
(hname, funcname))
- if not util.safehasattr(obj, '__call__'):
+ if not hasattr(obj, '__call__'):
raise util.Abort(_('%s hook is invalid '
'("%s" is not callable)') %
(hname, funcname))
try:
try:
- # redirect IO descriptors to the ui descriptors so hooks
+ # redirect IO descriptors the the ui descriptors so hooks
# that write directly to these don't mess up the command
# protocol when running through the command server
old = sys.stdout, sys.stderr, sys.stdin
@@ -99,7 +99,7 @@ def _exthook(ui, repo, name, cmd, args, throw):
env = {}
for k, v in args.iteritems():
- if util.safehasattr(v, '__call__'):
+ if hasattr(v, '__call__'):
v = v()
if isinstance(v, dict):
# make the dictionary element order stable across Python
@@ -124,23 +124,12 @@ def _exthook(ui, repo, name, cmd, args, throw):
ui.warn(_('warning: %s hook %s\n') % (name, desc))
return r
-def _allhooks(ui):
- hooks = []
- for name, cmd in ui.configitems('hooks'):
- if not name.startswith('priority'):
- priority = ui.configint('hooks', 'priority.%s' % name, 0)
- hooks.append((-priority, len(hooks), name, cmd))
- return [(k, v) for p, o, k, v in sorted(hooks)]
-
_redirect = False
def redirect(state):
global _redirect
_redirect = state
def hook(ui, repo, name, throw=False, **args):
- if not ui.callhooks:
- return False
-
r = False
oldstdout = -1
@@ -150,7 +139,6 @@ def hook(ui, repo, name, throw=False, **args):
stderrno = sys.__stderr__.fileno()
# temporarily redirect stdout to stderr, if possible
if stdoutno >= 0 and stderrno >= 0:
- sys.__stdout__.flush()
oldstdout = os.dup(stdoutno)
os.dup2(stderrno, stdoutno)
except AttributeError:
@@ -158,10 +146,10 @@ def hook(ui, repo, name, throw=False, **args):
pass
try:
- for hname, cmd in _allhooks(ui):
+ for hname, cmd in ui.configitems('hooks'):
if hname.split('.')[0] != name or not cmd:
continue
- if util.safehasattr(cmd, '__call__'):
+ if hasattr(cmd, '__call__'):
r = _pythonhook(ui, repo, name, hname, cmd, args, throw) or r
elif cmd.startswith('python:'):
if cmd.count(':') >= 2:
@@ -169,11 +157,7 @@ def hook(ui, repo, name, throw=False, **args):
path = util.expandpath(path)
if repo:
path = os.path.join(repo.root, path)
- try:
- mod = extensions.loadpath(path, 'hghook.%s' % hname)
- except Exception:
- ui.write(_("loading %s hook failed:\n") % hname)
- raise
+ mod = extensions.loadpath(path, 'hghook.%s' % hname)
hookfn = getattr(mod, cmd)
else:
hookfn = cmd[7:].strip()
diff --git a/mercurial/httpclient/__init__.py b/mercurial/httpclient/__init__.py
index f5c3baf..227d60b 100644
--- a/mercurial/httpclient/__init__.py
+++ b/mercurial/httpclient/__init__.py
@@ -45,7 +45,6 @@ import rfc822
import select
import socket
-import _readers
import socketutil
logger = logging.getLogger(__name__)
@@ -55,6 +54,8 @@ __all__ = ['HTTPConnection', 'HTTPResponse']
HTTP_VER_1_0 = 'HTTP/1.0'
HTTP_VER_1_1 = 'HTTP/1.1'
+_LEN_CLOSE_IS_END = -1
+
OUTGOING_BUFFER_SIZE = 1 << 15
INCOMING_BUFFER_SIZE = 1 << 20
@@ -82,19 +83,23 @@ class HTTPResponse(object):
The response will continue to load as available. If you need the
complete response before continuing, check the .complete() method.
"""
- def __init__(self, sock, timeout, method):
+ def __init__(self, sock, timeout):
self.sock = sock
- self.method = method
self.raw_response = ''
+ self._body = None
self._headers_len = 0
+ self._content_len = 0
self.headers = None
self.will_close = False
self.status_line = ''
self.status = None
- self.continued = False
self.http_version = None
self.reason = None
- self._reader = None
+ self._chunked = False
+ self._chunked_done = False
+ self._chunked_until_next = 0
+ self._chunked_skip_bytes = 0
+ self._chunked_preloaded_block = None
self._read_location = 0
self._eol = EOL
@@ -112,12 +117,11 @@ class HTTPResponse(object):
socket is closed, this will nearly always return False, even
in cases where all the data has actually been loaded.
"""
- if self._reader:
- return self._reader.done()
-
- def _close(self):
- if self._reader is not None:
- self._reader._close()
+ if self._chunked:
+ return self._chunked_done
+ if self._content_len == _LEN_CLOSE_IS_END:
+ return False
+ return self._body is not None and len(self._body) >= self._content_len
def readline(self):
"""Read a single line from the response body.
@@ -125,34 +129,30 @@ class HTTPResponse(object):
This may block until either a line ending is found or the
response is complete.
"""
- # TODO: move this into the reader interface where it can be
- # smarter (and probably avoid copies)
- bytes = []
- while not bytes:
- try:
- bytes = [self._reader.read(1)]
- except _readers.ReadNotReady:
- self._select()
- while bytes[-1] != '\n' and not self.complete():
+ eol = self._body.find('\n', self._read_location)
+ while eol == -1 and not self.complete():
self._select()
- bytes.append(self._reader.read(1))
- if bytes[-1] != '\n':
- next = self._reader.read(1)
- while next and next != '\n':
- bytes.append(next)
- next = self._reader.read(1)
- bytes.append(next)
- return ''.join(bytes)
+ eol = self._body.find('\n', self._read_location)
+ if eol != -1:
+ eol += 1
+ else:
+ eol = len(self._body)
+ data = self._body[self._read_location:eol]
+ self._read_location = eol
+ return data
def read(self, length=None):
# if length is None, unbounded read
while (not self.complete() # never select on a finished read
and (not length # unbounded, so we wait for complete()
- or length > self._reader.available_data)):
+ or (self._read_location + length) > len(self._body))):
self._select()
if not length:
- length = self._reader.available_data
- r = self._reader.read(length)
+ length = len(self._body) - self._read_location
+ elif len(self._body) < (self._read_location + length):
+ length = len(self._body) - self._read_location
+ r = self._body[self._read_location:self._read_location + length]
+ self._read_location += len(r)
if self.complete() and self.will_close:
self.sock.close()
return r
@@ -160,11 +160,15 @@ class HTTPResponse(object):
def _select(self):
r, _, _ = select.select([self.sock], [], [], self._timeout)
if not r:
- # socket was not readable. If the response is not
- # complete, raise a timeout.
- if not self.complete():
+ # socket was not readable. If the response is not complete
+ # and we're not a _LEN_CLOSE_IS_END response, raise a timeout.
+ # If we are a _LEN_CLOSE_IS_END response and we have no data,
+ # raise a timeout.
+ if not (self.complete() or
+ (self._content_len == _LEN_CLOSE_IS_END and self._body)):
logger.info('timed out with timeout of %s', self._timeout)
raise HTTPTimeoutException('timeout reading data')
+ logger.info('cl: %r body: %r', self._content_len, self._body)
try:
data = self.sock.recv(INCOMING_BUFFER_SIZE)
except socket.sslerror, e:
@@ -173,22 +177,68 @@ class HTTPResponse(object):
logger.debug('SSL_WANT_READ in _select, should retry later')
return True
logger.debug('response read %d data during _select', len(data))
- # If the socket was readable and no data was read, that means
- # the socket was closed. Inform the reader (if any) so it can
- # raise an exception if this is an invalid situation.
if not data:
- if self._reader:
- self._reader._close()
+ if self.headers and self._content_len == _LEN_CLOSE_IS_END:
+ self._content_len = len(self._body)
return False
else:
self._load_response(data)
return True
+ def _chunked_parsedata(self, data):
+ if self._chunked_preloaded_block:
+ data = self._chunked_preloaded_block + data
+ self._chunked_preloaded_block = None
+ while data:
+ logger.debug('looping with %d data remaining', len(data))
+ # Slice out anything we should skip
+ if self._chunked_skip_bytes:
+ if len(data) <= self._chunked_skip_bytes:
+ self._chunked_skip_bytes -= len(data)
+ data = ''
+ break
+ else:
+ data = data[self._chunked_skip_bytes:]
+ self._chunked_skip_bytes = 0
+
+ # determine how much is until the next chunk
+ if self._chunked_until_next:
+ amt = self._chunked_until_next
+ logger.debug('reading remaining %d of existing chunk', amt)
+ self._chunked_until_next = 0
+ body = data
+ else:
+ try:
+ amt, body = data.split(self._eol, 1)
+ except ValueError:
+ self._chunked_preloaded_block = data
+ logger.debug('saving %r as a preloaded block for chunked',
+ self._chunked_preloaded_block)
+ return
+ amt = int(amt, base=16)
+ logger.debug('reading chunk of length %d', amt)
+ if amt == 0:
+ self._chunked_done = True
+
+ # read through end of what we have or the chunk
+ self._body += body[:amt]
+ if len(body) >= amt:
+ data = body[amt:]
+ self._chunked_skip_bytes = len(self._eol)
+ else:
+ self._chunked_until_next = amt - len(body)
+ self._chunked_skip_bytes = 0
+ data = ''
+
def _load_response(self, data):
- # Being here implies we're not at the end of the headers yet,
- # since at the end of this method if headers were completely
- # loaded we replace this method with the load() method of the
- # reader we created.
+ if self._chunked:
+ self._chunked_parsedata(data)
+ return
+ elif self._body is not None:
+ self._body += data
+ return
+
+ # We haven't seen end of headers yet
self.raw_response += data
# This is a bogus server with bad line endings
if self._eol not in self.raw_response:
@@ -212,7 +262,6 @@ class HTTPResponse(object):
http_ver, status = hdrs.split(' ', 1)
if status.startswith('100'):
self.raw_response = body
- self.continued = True
logger.debug('continue seen, setting body to %r', body)
return
@@ -232,46 +281,23 @@ class HTTPResponse(object):
if self._eol != EOL:
hdrs = hdrs.replace(self._eol, '\r\n')
headers = rfc822.Message(cStringIO.StringIO(hdrs))
- content_len = None
if HDR_CONTENT_LENGTH in headers:
- content_len = int(headers[HDR_CONTENT_LENGTH])
+ self._content_len = int(headers[HDR_CONTENT_LENGTH])
if self.http_version == HTTP_VER_1_0:
self.will_close = True
elif HDR_CONNECTION_CTRL in headers:
self.will_close = (
headers[HDR_CONNECTION_CTRL].lower() == CONNECTION_CLOSE)
+ if self._content_len == 0:
+ self._content_len = _LEN_CLOSE_IS_END
if (HDR_XFER_ENCODING in headers
and headers[HDR_XFER_ENCODING].lower() == XFER_ENCODING_CHUNKED):
- self._reader = _readers.ChunkedReader(self._eol)
- logger.debug('using a chunked reader')
- else:
- # HEAD responses are forbidden from returning a body, and
- # it's implausible for a CONNECT response to use
- # close-is-end logic for an OK response.
- if (self.method == 'HEAD' or
- (self.method == 'CONNECT' and content_len is None)):
- content_len = 0
- if content_len is not None:
- logger.debug('using a content-length reader with length %d',
- content_len)
- self._reader = _readers.ContentLengthReader(content_len)
- else:
- # Response body had no length specified and is not
- # chunked, so the end of the body will only be
- # identifiable by the termination of the socket by the
- # server. My interpretation of the spec means that we
- # are correct in hitting this case if
- # transfer-encoding, content-length, and
- # connection-control were left unspecified.
- self._reader = _readers.CloseIsEndReader()
- logger.debug('using a close-is-end reader')
- self.will_close = True
-
- if body:
- self._reader._load(body)
- logger.debug('headers complete')
+ self._body = ''
+ self._chunked_parsedata(body)
+ self._chunked = True
+ if self._body is None:
+ self._body = body
self.headers = headers
- self._load_response = self._reader._load
class HTTPConnection(object):
@@ -348,14 +374,13 @@ class HTTPConnection(object):
{}, HTTP_VER_1_0)
sock.send(data)
sock.setblocking(0)
- r = self.response_class(sock, self.timeout, 'CONNECT')
+ r = self.response_class(sock, self.timeout)
timeout_exc = HTTPTimeoutException(
'Timed out waiting for CONNECT response from proxy')
while not r.complete():
try:
if not r._select():
- if not r.complete():
- raise timeout_exc
+ raise timeout_exc
except HTTPTimeoutException:
# This raise/except pattern looks goofy, but
# _select can raise the timeout as well as the
@@ -372,10 +397,6 @@ class HTTPConnection(object):
else:
sock = socketutil.create_connection((self.host, self.port))
if self.ssl:
- # This is the default, but in the case of proxied SSL
- # requests the proxy logic above will have cleared
- # blocking mode, so reenable it just to be safe.
- sock.setblocking(1)
logger.debug('wrapping socket for ssl with options %r',
self.ssl_opts)
sock = socketutil.wrap_socket(sock, **self.ssl_opts)
@@ -498,7 +519,7 @@ class HTTPConnection(object):
out = outgoing_headers or body
blocking_on_continue = False
if expect_continue and not outgoing_headers and not (
- response and (response.headers or response.continued)):
+ response and response.headers):
logger.info(
'waiting up to %s seconds for'
' continue response from server',
@@ -521,6 +542,11 @@ class HTTPConnection(object):
'server, optimistically sending request body')
else:
raise HTTPTimeoutException('timeout sending data')
+ # TODO exceptional conditions with select? (what are those be?)
+ # TODO if the response is loading, must we finish sending at all?
+ #
+ # Certainly not if it's going to close the connection and/or
+ # the response is already done...I think.
was_first = first
# incoming data
@@ -538,11 +564,11 @@ class HTTPConnection(object):
logger.info('socket appears closed in read')
self.sock = None
self._current_response = None
- if response is not None:
- response._close()
# This if/elif ladder is a bit subtle,
# comments in each branch should help.
- if response is not None and response.complete():
+ if response is not None and (
+ response.complete() or
+ response._content_len == _LEN_CLOSE_IS_END):
# Server responded completely and then
# closed the socket. We should just shut
# things down and let the caller get their
@@ -571,7 +597,7 @@ class HTTPConnection(object):
'response was missing or incomplete!')
logger.debug('read %d bytes in request()', len(data))
if response is None:
- response = self.response_class(r[0], self.timeout, method)
+ response = self.response_class(r[0], self.timeout)
response._load_response(data)
# Jump to the next select() call so we load more
# data if the server is still sending us content.
@@ -579,6 +605,10 @@ class HTTPConnection(object):
except socket.error, e:
if e[0] != errno.EPIPE and not was_first:
raise
+ if (response._content_len
+ and response._content_len != _LEN_CLOSE_IS_END):
+ outgoing_headers = sent_data + outgoing_headers
+ reconnect('read')
# outgoing data
if w and out:
@@ -623,7 +653,7 @@ class HTTPConnection(object):
# close if the server response said to or responded before eating
# the whole request
if response is None:
- response = self.response_class(self.sock, self.timeout, method)
+ response = self.response_class(self.sock, self.timeout)
complete = response.complete()
data_left = bool(outgoing_headers or body)
if data_left:
@@ -641,8 +671,7 @@ class HTTPConnection(object):
raise httplib.ResponseNotReady()
r = self._current_response
while r.headers is None:
- if not r._select() and not r.complete():
- raise _readers.HTTPRemoteClosedError()
+ r._select()
if r.will_close:
self.sock = None
self._current_response = None
@@ -664,11 +693,6 @@ class BadRequestData(httplib.HTTPException):
class HTTPProxyConnectFailedException(httplib.HTTPException):
"""Connecting to the HTTP proxy failed."""
-
class HTTPStateError(httplib.HTTPException):
"""Invalid internal state encountered."""
-
-# Forward this exception type from _readers since it needs to be part
-# of the public API.
-HTTPRemoteClosedError = _readers.HTTPRemoteClosedError
# no-check-code
diff --git a/mercurial/httpclient/_readers.py b/mercurial/httpclient/_readers.py
deleted file mode 100644
index 0beb551..0000000
--- a/mercurial/httpclient/_readers.py
+++ /dev/null
@@ -1,195 +0,0 @@
-# Copyright 2011, Google Inc.
-# All rights reserved.
-#
-# Redistribution and use in source and binary forms, with or without
-# modification, are permitted provided that the following conditions are
-# met:
-#
-# * Redistributions of source code must retain the above copyright
-# notice, this list of conditions and the following disclaimer.
-# * Redistributions in binary form must reproduce the above
-# copyright notice, this list of conditions and the following disclaimer
-# in the documentation and/or other materials provided with the
-# distribution.
-# * Neither the name of Google Inc. nor the names of its
-# contributors may be used to endorse or promote products derived from
-# this software without specific prior written permission.
-
-# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
-# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
-# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
-# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
-# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
-# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
-# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-"""Reader objects to abstract out different body response types.
-
-This module is package-private. It is not expected that these will
-have any clients outside of httpplus.
-"""
-
-import httplib
-import itertools
-import logging
-
-logger = logging.getLogger(__name__)
-
-
-class ReadNotReady(Exception):
- """Raised when read() is attempted but not enough data is loaded."""
-
-
-class HTTPRemoteClosedError(httplib.HTTPException):
- """The server closed the remote socket in the middle of a response."""
-
-
-class AbstractReader(object):
- """Abstract base class for response readers.
-
- Subclasses must implement _load, and should implement _close if
- it's not an error for the server to close their socket without
- some termination condition being detected during _load.
- """
- def __init__(self):
- self._finished = False
- self._done_chunks = []
-
- @property
- def available_data(self):
- return sum(map(len, self._done_chunks))
-
- def done(self):
- return self._finished
-
- def read(self, amt):
- if self.available_data < amt and not self._finished:
- raise ReadNotReady()
- need = [amt]
- def pred(s):
- needed = need[0] > 0
- need[0] -= len(s)
- return needed
- blocks = list(itertools.takewhile(pred, self._done_chunks))
- self._done_chunks = self._done_chunks[len(blocks):]
- over_read = sum(map(len, blocks)) - amt
- if over_read > 0 and blocks:
- logger.debug('need to reinsert %d data into done chunks', over_read)
- last = blocks[-1]
- blocks[-1], reinsert = last[:-over_read], last[-over_read:]
- self._done_chunks.insert(0, reinsert)
- result = ''.join(blocks)
- assert len(result) == amt or (self._finished and len(result) < amt)
- return result
-
- def _load(self, data): # pragma: no cover
- """Subclasses must implement this.
-
- As data is available to be read out of this object, it should
- be placed into the _done_chunks list. Subclasses should not
- rely on data remaining in _done_chunks forever, as it may be
- reaped if the client is parsing data as it comes in.
- """
- raise NotImplementedError
-
- def _close(self):
- """Default implementation of close.
-
- The default implementation assumes that the reader will mark
- the response as finished on the _finished attribute once the
- entire response body has been read. In the event that this is
- not true, the subclass should override the implementation of
- close (for example, close-is-end responses have to set
- self._finished in the close handler.)
- """
- if not self._finished:
- raise HTTPRemoteClosedError(
- 'server appears to have closed the socket mid-response')
-
-
-class AbstractSimpleReader(AbstractReader):
- """Abstract base class for simple readers that require no response decoding.
-
- Examples of such responses are Connection: Close (close-is-end)
- and responses that specify a content length.
- """
- def _load(self, data):
- if data:
- assert not self._finished, (
- 'tried to add data (%r) to a closed reader!' % data)
- logger.debug('%s read an addtional %d data', self.name, len(data))
- self._done_chunks.append(data)
-
-
-class CloseIsEndReader(AbstractSimpleReader):
- """Reader for responses that specify Connection: Close for length."""
- name = 'close-is-end'
-
- def _close(self):
- logger.info('Marking close-is-end reader as closed.')
- self._finished = True
-
-
-class ContentLengthReader(AbstractSimpleReader):
- """Reader for responses that specify an exact content length."""
- name = 'content-length'
-
- def __init__(self, amount):
- AbstractReader.__init__(self)
- self._amount = amount
- if amount == 0:
- self._finished = True
- self._amount_seen = 0
-
- def _load(self, data):
- AbstractSimpleReader._load(self, data)
- self._amount_seen += len(data)
- if self._amount_seen >= self._amount:
- self._finished = True
- logger.debug('content-length read complete')
-
-
-class ChunkedReader(AbstractReader):
- """Reader for chunked transfer encoding responses."""
- def __init__(self, eol):
- AbstractReader.__init__(self)
- self._eol = eol
- self._leftover_skip_amt = 0
- self._leftover_data = ''
-
- def _load(self, data):
- assert not self._finished, 'tried to add data to a closed reader!'
- logger.debug('chunked read an addtional %d data', len(data))
- position = 0
- if self._leftover_data:
- logger.debug('chunked reader trying to finish block from leftover data')
- # TODO: avoid this string concatenation if possible
- data = self._leftover_data + data
- position = self._leftover_skip_amt
- self._leftover_data = ''
- self._leftover_skip_amt = 0
- datalen = len(data)
- while position < datalen:
- split = data.find(self._eol, position)
- if split == -1:
- self._leftover_data = data
- self._leftover_skip_amt = position
- return
- amt = int(data[position:split], base=16)
- block_start = split + len(self._eol)
- # If the whole data chunk plus the eol trailer hasn't
- # loaded, we'll wait for the next load.
- if block_start + amt + len(self._eol) > len(data):
- self._leftover_data = data
- self._leftover_skip_amt = position
- return
- if amt == 0:
- self._finished = True
- logger.debug('closing chunked redaer due to chunk of length 0')
- return
- self._done_chunks.append(data[block_start:block_start + amt])
- position = block_start + amt + len(self._eol)
-# no-check-code
diff --git a/mercurial/httpclient/tests/__init__.py b/mercurial/httpclient/tests/__init__.py
new file mode 100644
index 0000000..84b3a07
--- /dev/null
+++ b/mercurial/httpclient/tests/__init__.py
@@ -0,0 +1 @@
+# no-check-code
diff --git a/mercurial/httpclient/tests/simple_http_test.py b/mercurial/httpclient/tests/simple_http_test.py
new file mode 100644
index 0000000..dba0188
--- /dev/null
+++ b/mercurial/httpclient/tests/simple_http_test.py
@@ -0,0 +1,386 @@
+# Copyright 2010, Google Inc.
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+import socket
+import unittest
+
+import http
+
+# relative import to ease embedding the library
+import util
+
+
+class SimpleHttpTest(util.HttpTestBase, unittest.TestCase):
+
+ def _run_simple_test(self, host, server_data, expected_req, expected_data):
+ con = http.HTTPConnection(host)
+ con._connect()
+ con.sock.data = server_data
+ con.request('GET', '/')
+
+ self.assertStringEqual(expected_req, con.sock.sent)
+ self.assertEqual(expected_data, con.getresponse().read())
+
+ def test_broken_data_obj(self):
+ con = http.HTTPConnection('1.2.3.4:80')
+ con._connect()
+ self.assertRaises(http.BadRequestData,
+ con.request, 'POST', '/', body=1)
+
+ def test_no_keepalive_http_1_0(self):
+ expected_request_one = """GET /remote/.hg/requires HTTP/1.1
+Host: localhost:9999
+range: bytes=0-
+accept-encoding: identity
+accept: application/mercurial-0.1
+user-agent: mercurial/proto-1.0
+
+""".replace('\n', '\r\n')
+ expected_response_headers = """HTTP/1.0 200 OK
+Server: SimpleHTTP/0.6 Python/2.6.1
+Date: Sun, 01 May 2011 13:56:57 GMT
+Content-type: application/octet-stream
+Content-Length: 33
+Last-Modified: Sun, 01 May 2011 13:56:56 GMT
+
+""".replace('\n', '\r\n')
+ expected_response_body = """revlogv1
+store
+fncache
+dotencode
+"""
+ con = http.HTTPConnection('localhost:9999')
+ con._connect()
+ con.sock.data = [expected_response_headers, expected_response_body]
+ con.request('GET', '/remote/.hg/requires',
+ headers={'accept-encoding': 'identity',
+ 'range': 'bytes=0-',
+ 'accept': 'application/mercurial-0.1',
+ 'user-agent': 'mercurial/proto-1.0',
+ })
+ self.assertStringEqual(expected_request_one, con.sock.sent)
+ self.assertEqual(con.sock.closed, False)
+ self.assertNotEqual(con.sock.data, [])
+ self.assert_(con.busy())
+ resp = con.getresponse()
+ self.assertStringEqual(resp.read(), expected_response_body)
+ self.failIf(con.busy())
+ self.assertEqual(con.sock, None)
+ self.assertEqual(resp.sock.data, [])
+ self.assert_(resp.sock.closed)
+
+ def test_multiline_header(self):
+ con = http.HTTPConnection('1.2.3.4:80')
+ con._connect()
+ con.sock.data = ['HTTP/1.1 200 OK\r\n',
+ 'Server: BogusServer 1.0\r\n',
+ 'Multiline: Value\r\n',
+ ' Rest of value\r\n',
+ 'Content-Length: 10\r\n',
+ '\r\n'
+ '1234567890'
+ ]
+ con.request('GET', '/')
+
+ expected_req = ('GET / HTTP/1.1\r\n'
+ 'Host: 1.2.3.4\r\n'
+ 'accept-encoding: identity\r\n\r\n')
+
+ self.assertEqual(('1.2.3.4', 80), con.sock.sa)
+ self.assertEqual(expected_req, con.sock.sent)
+ resp = con.getresponse()
+ self.assertEqual('1234567890', resp.read())
+ self.assertEqual(['Value\n Rest of value'],
+ resp.headers.getheaders('multiline'))
+ # Socket should not be closed
+ self.assertEqual(resp.sock.closed, False)
+ self.assertEqual(con.sock.closed, False)
+
+ def testSimpleRequest(self):
+ con = http.HTTPConnection('1.2.3.4:80')
+ con._connect()
+ con.sock.data = ['HTTP/1.1 200 OK\r\n',
+ 'Server: BogusServer 1.0\r\n',
+ 'MultiHeader: Value\r\n'
+ 'MultiHeader: Other Value\r\n'
+ 'MultiHeader: One More!\r\n'
+ 'Content-Length: 10\r\n',
+ '\r\n'
+ '1234567890'
+ ]
+ con.request('GET', '/')
+
+ expected_req = ('GET / HTTP/1.1\r\n'
+ 'Host: 1.2.3.4\r\n'
+ 'accept-encoding: identity\r\n\r\n')
+
+ self.assertEqual(('1.2.3.4', 80), con.sock.sa)
+ self.assertEqual(expected_req, con.sock.sent)
+ resp = con.getresponse()
+ self.assertEqual('1234567890', resp.read())
+ self.assertEqual(['Value', 'Other Value', 'One More!'],
+ resp.headers.getheaders('multiheader'))
+ self.assertEqual(['BogusServer 1.0'],
+ resp.headers.getheaders('server'))
+
+ def testHeaderlessResponse(self):
+ con = http.HTTPConnection('1.2.3.4', use_ssl=False)
+ con._connect()
+ con.sock.data = ['HTTP/1.1 200 OK\r\n',
+ '\r\n'
+ '1234567890'
+ ]
+ con.request('GET', '/')
+
+ expected_req = ('GET / HTTP/1.1\r\n'
+ 'Host: 1.2.3.4\r\n'
+ 'accept-encoding: identity\r\n\r\n')
+
+ self.assertEqual(('1.2.3.4', 80), con.sock.sa)
+ self.assertEqual(expected_req, con.sock.sent)
+ resp = con.getresponse()
+ self.assertEqual('1234567890', resp.read())
+ self.assertEqual({}, dict(resp.headers))
+ self.assertEqual(resp.status, 200)
+
+ def testReadline(self):
+ con = http.HTTPConnection('1.2.3.4')
+ con._connect()
+ # make sure it trickles in one byte at a time
+ # so that we touch all the cases in readline
+ con.sock.data = list(''.join(
+ ['HTTP/1.1 200 OK\r\n',
+ 'Server: BogusServer 1.0\r\n',
+ 'Connection: Close\r\n',
+ '\r\n'
+ '1\n2\nabcdefg\n4\n5']))
+
+ expected_req = ('GET / HTTP/1.1\r\n'
+ 'Host: 1.2.3.4\r\n'
+ 'accept-encoding: identity\r\n\r\n')
+
+ con.request('GET', '/')
+ self.assertEqual(('1.2.3.4', 80), con.sock.sa)
+ self.assertEqual(expected_req, con.sock.sent)
+ r = con.getresponse()
+ for expected in ['1\n', '2\n', 'abcdefg\n', '4\n', '5']:
+ actual = r.readline()
+ self.assertEqual(expected, actual,
+ 'Expected %r, got %r' % (expected, actual))
+
+ def testIPv6(self):
+ self._run_simple_test('[::1]:8221',
+ ['HTTP/1.1 200 OK\r\n',
+ 'Server: BogusServer 1.0\r\n',
+ 'Content-Length: 10',
+ '\r\n\r\n'
+ '1234567890'],
+ ('GET / HTTP/1.1\r\n'
+ 'Host: [::1]:8221\r\n'
+ 'accept-encoding: identity\r\n\r\n'),
+ '1234567890')
+ self._run_simple_test('::2',
+ ['HTTP/1.1 200 OK\r\n',
+ 'Server: BogusServer 1.0\r\n',
+ 'Content-Length: 10',
+ '\r\n\r\n'
+ '1234567890'],
+ ('GET / HTTP/1.1\r\n'
+ 'Host: ::2\r\n'
+ 'accept-encoding: identity\r\n\r\n'),
+ '1234567890')
+ self._run_simple_test('[::3]:443',
+ ['HTTP/1.1 200 OK\r\n',
+ 'Server: BogusServer 1.0\r\n',
+ 'Content-Length: 10',
+ '\r\n\r\n'
+ '1234567890'],
+ ('GET / HTTP/1.1\r\n'
+ 'Host: ::3\r\n'
+ 'accept-encoding: identity\r\n\r\n'),
+ '1234567890')
+
+ def testEarlyContinueResponse(self):
+ con = http.HTTPConnection('1.2.3.4:80')
+ con._connect()
+ sock = con.sock
+ sock.data = ['HTTP/1.1 403 Forbidden\r\n',
+ 'Server: BogusServer 1.0\r\n',
+ 'Content-Length: 18',
+ '\r\n\r\n'
+ "You can't do that."]
+ expected_req = self.doPost(con, expect_body=False)
+ self.assertEqual(('1.2.3.4', 80), sock.sa)
+ self.assertStringEqual(expected_req, sock.sent)
+ self.assertEqual("You can't do that.", con.getresponse().read())
+ self.assertEqual(sock.closed, True)
+
+ def testDeniedAfterContinueTimeoutExpires(self):
+ con = http.HTTPConnection('1.2.3.4:80')
+ con._connect()
+ sock = con.sock
+ sock.data = ['HTTP/1.1 403 Forbidden\r\n',
+ 'Server: BogusServer 1.0\r\n',
+ 'Content-Length: 18\r\n',
+ 'Connection: close',
+ '\r\n\r\n'
+ "You can't do that."]
+ sock.read_wait_sentinel = 'Dear server, send response!'
+ sock.close_on_empty = True
+ # send enough data out that we'll chunk it into multiple
+ # blocks and the socket will close before we can send the
+ # whole request.
+ post_body = ('This is some POST data\n' * 1024 * 32 +
+ 'Dear server, send response!\n' +
+ 'This is some POST data\n' * 1024 * 32)
+ expected_req = self.doPost(con, expect_body=False,
+ body_to_send=post_body)
+ self.assertEqual(('1.2.3.4', 80), sock.sa)
+ self.assert_('POST data\n' in sock.sent)
+ self.assert_('Dear server, send response!\n' in sock.sent)
+ # We expect not all of our data was sent.
+ self.assertNotEqual(sock.sent, expected_req)
+ self.assertEqual("You can't do that.", con.getresponse().read())
+ self.assertEqual(sock.closed, True)
+
+ def testPostData(self):
+ con = http.HTTPConnection('1.2.3.4:80')
+ con._connect()
+ sock = con.sock
+ sock.read_wait_sentinel = 'POST data'
+ sock.early_data = ['HTTP/1.1 100 Co', 'ntinue\r\n\r\n']
+ sock.data = ['HTTP/1.1 200 OK\r\n',
+ 'Server: BogusServer 1.0\r\n',
+ 'Content-Length: 16',
+ '\r\n\r\n',
+ "You can do that."]
+ expected_req = self.doPost(con, expect_body=True)
+ self.assertEqual(('1.2.3.4', 80), sock.sa)
+ self.assertEqual(expected_req, sock.sent)
+ self.assertEqual("You can do that.", con.getresponse().read())
+ self.assertEqual(sock.closed, False)
+
+ def testServerWithoutContinue(self):
+ con = http.HTTPConnection('1.2.3.4:80')
+ con._connect()
+ sock = con.sock
+ sock.read_wait_sentinel = 'POST data'
+ sock.data = ['HTTP/1.1 200 OK\r\n',
+ 'Server: BogusServer 1.0\r\n',
+ 'Content-Length: 16',
+ '\r\n\r\n',
+ "You can do that."]
+ expected_req = self.doPost(con, expect_body=True)
+ self.assertEqual(('1.2.3.4', 80), sock.sa)
+ self.assertEqual(expected_req, sock.sent)
+ self.assertEqual("You can do that.", con.getresponse().read())
+ self.assertEqual(sock.closed, False)
+
+ def testServerWithSlowContinue(self):
+ con = http.HTTPConnection('1.2.3.4:80')
+ con._connect()
+ sock = con.sock
+ sock.read_wait_sentinel = 'POST data'
+ sock.data = ['HTTP/1.1 100 ', 'Continue\r\n\r\n',
+ 'HTTP/1.1 200 OK\r\n',
+ 'Server: BogusServer 1.0\r\n',
+ 'Content-Length: 16',
+ '\r\n\r\n',
+ "You can do that."]
+ expected_req = self.doPost(con, expect_body=True)
+ self.assertEqual(('1.2.3.4', 80), sock.sa)
+ self.assertEqual(expected_req, sock.sent)
+ resp = con.getresponse()
+ self.assertEqual("You can do that.", resp.read())
+ self.assertEqual(200, resp.status)
+ self.assertEqual(sock.closed, False)
+
+ def testSlowConnection(self):
+ con = http.HTTPConnection('1.2.3.4:80')
+ con._connect()
+ # simulate one byte arriving at a time, to check for various
+ # corner cases
+ con.sock.data = list('HTTP/1.1 200 OK\r\n'
+ 'Server: BogusServer 1.0\r\n'
+ 'Content-Length: 10'
+ '\r\n\r\n'
+ '1234567890')
+ con.request('GET', '/')
+
+ expected_req = ('GET / HTTP/1.1\r\n'
+ 'Host: 1.2.3.4\r\n'
+ 'accept-encoding: identity\r\n\r\n')
+
+ self.assertEqual(('1.2.3.4', 80), con.sock.sa)
+ self.assertEqual(expected_req, con.sock.sent)
+ self.assertEqual('1234567890', con.getresponse().read())
+
+ def testTimeout(self):
+ con = http.HTTPConnection('1.2.3.4:80')
+ con._connect()
+ con.sock.data = []
+ con.request('GET', '/')
+ self.assertRaises(http.HTTPTimeoutException,
+ con.getresponse)
+
+ expected_req = ('GET / HTTP/1.1\r\n'
+ 'Host: 1.2.3.4\r\n'
+ 'accept-encoding: identity\r\n\r\n')
+
+ self.assertEqual(('1.2.3.4', 80), con.sock.sa)
+ self.assertEqual(expected_req, con.sock.sent)
+
+ def test_conn_keep_alive_but_server_close_anyway(self):
+ sockets = []
+ def closingsocket(*args, **kwargs):
+ s = util.MockSocket(*args, **kwargs)
+ sockets.append(s)
+ s.data = ['HTTP/1.1 200 OK\r\n',
+ 'Server: BogusServer 1.0\r\n',
+ 'Connection: Keep-Alive\r\n',
+ 'Content-Length: 16',
+ '\r\n\r\n',
+ 'You can do that.']
+ s.close_on_empty = True
+ return s
+
+ socket.socket = closingsocket
+ con = http.HTTPConnection('1.2.3.4:80')
+ con._connect()
+ con.request('GET', '/')
+ r1 = con.getresponse()
+ r1.read()
+ self.assertFalse(con.sock.closed)
+ self.assert_(con.sock.remote_closed)
+ con.request('GET', '/')
+ self.assertEqual(2, len(sockets))
+
+ def test_no_response_raises_response_not_ready(self):
+ con = http.HTTPConnection('foo')
+ self.assertRaises(http.httplib.ResponseNotReady, con.getresponse)
+# no-check-code
diff --git a/mercurial/httpclient/tests/test_bogus_responses.py b/mercurial/httpclient/tests/test_bogus_responses.py
new file mode 100644
index 0000000..486e770
--- /dev/null
+++ b/mercurial/httpclient/tests/test_bogus_responses.py
@@ -0,0 +1,68 @@
+# Copyright 2010, Google Inc.
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+"""Tests against malformed responses.
+
+Server implementations that respond with only LF instead of CRLF have
+been observed. Checking against ones that use only CR is a hedge
+against that potential insanit.y
+"""
+import unittest
+
+import http
+
+# relative import to ease embedding the library
+import util
+
+
+class SimpleHttpTest(util.HttpTestBase, unittest.TestCase):
+
+ def bogusEOL(self, eol):
+ con = http.HTTPConnection('1.2.3.4:80')
+ con._connect()
+ con.sock.data = ['HTTP/1.1 200 OK%s' % eol,
+ 'Server: BogusServer 1.0%s' % eol,
+ 'Content-Length: 10',
+ eol * 2,
+ '1234567890']
+ con.request('GET', '/')
+
+ expected_req = ('GET / HTTP/1.1\r\n'
+ 'Host: 1.2.3.4\r\n'
+ 'accept-encoding: identity\r\n\r\n')
+
+ self.assertEqual(('1.2.3.4', 80), con.sock.sa)
+ self.assertEqual(expected_req, con.sock.sent)
+ self.assertEqual('1234567890', con.getresponse().read())
+
+ def testOnlyLinefeed(self):
+ self.bogusEOL('\n')
+
+ def testOnlyCarriageReturn(self):
+ self.bogusEOL('\r')
+# no-check-code
diff --git a/mercurial/httpclient/tests/test_chunked_transfer.py b/mercurial/httpclient/tests/test_chunked_transfer.py
new file mode 100644
index 0000000..87153e3
--- /dev/null
+++ b/mercurial/httpclient/tests/test_chunked_transfer.py
@@ -0,0 +1,137 @@
+# Copyright 2010, Google Inc.
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+import cStringIO
+import unittest
+
+import http
+
+# relative import to ease embedding the library
+import util
+
+
+def chunkedblock(x, eol='\r\n'):
+ r"""Make a chunked transfer-encoding block.
+
+ >>> chunkedblock('hi')
+ '2\r\nhi\r\n'
+ >>> chunkedblock('hi' * 10)
+ '14\r\nhihihihihihihihihihi\r\n'
+ >>> chunkedblock('hi', eol='\n')
+ '2\nhi\n'
+ """
+ return ''.join((hex(len(x))[2:], eol, x, eol))
+
+
+class ChunkedTransferTest(util.HttpTestBase, unittest.TestCase):
+ def testChunkedUpload(self):
+ con = http.HTTPConnection('1.2.3.4:80')
+ con._connect()
+ sock = con.sock
+ sock.read_wait_sentinel = '0\r\n\r\n'
+ sock.data = ['HTTP/1.1 200 OK\r\n',
+ 'Server: BogusServer 1.0\r\n',
+ 'Content-Length: 6',
+ '\r\n\r\n',
+ "Thanks"]
+
+ zz = 'zz\n'
+ con.request('POST', '/', body=cStringIO.StringIO(
+ (zz * (0x8010 / 3)) + 'end-of-body'))
+ expected_req = ('POST / HTTP/1.1\r\n'
+ 'transfer-encoding: chunked\r\n'
+ 'Host: 1.2.3.4\r\n'
+ 'accept-encoding: identity\r\n\r\n')
+ expected_req += chunkedblock('zz\n' * (0x8000 / 3) + 'zz')
+ expected_req += chunkedblock(
+ '\n' + 'zz\n' * ((0x1b - len('end-of-body')) / 3) + 'end-of-body')
+ expected_req += '0\r\n\r\n'
+ self.assertEqual(('1.2.3.4', 80), sock.sa)
+ self.assertStringEqual(expected_req, sock.sent)
+ self.assertEqual("Thanks", con.getresponse().read())
+ self.assertEqual(sock.closed, False)
+
+ def testChunkedDownload(self):
+ con = http.HTTPConnection('1.2.3.4:80')
+ con._connect()
+ sock = con.sock
+ sock.data = ['HTTP/1.1 200 OK\r\n',
+ 'Server: BogusServer 1.0\r\n',
+ 'transfer-encoding: chunked',
+ '\r\n\r\n',
+ chunkedblock('hi '),
+ chunkedblock('there'),
+ chunkedblock(''),
+ ]
+ con.request('GET', '/')
+ self.assertStringEqual('hi there', con.getresponse().read())
+
+ def testChunkedDownloadBadEOL(self):
+ con = http.HTTPConnection('1.2.3.4:80')
+ con._connect()
+ sock = con.sock
+ sock.data = ['HTTP/1.1 200 OK\n',
+ 'Server: BogusServer 1.0\n',
+ 'transfer-encoding: chunked',
+ '\n\n',
+ chunkedblock('hi ', eol='\n'),
+ chunkedblock('there', eol='\n'),
+ chunkedblock('', eol='\n'),
+ ]
+ con.request('GET', '/')
+ self.assertStringEqual('hi there', con.getresponse().read())
+
+ def testChunkedDownloadPartialChunkBadEOL(self):
+ con = http.HTTPConnection('1.2.3.4:80')
+ con._connect()
+ sock = con.sock
+ sock.data = ['HTTP/1.1 200 OK\n',
+ 'Server: BogusServer 1.0\n',
+ 'transfer-encoding: chunked',
+ '\n\n',
+ chunkedblock('hi ', eol='\n'),
+ ] + list(chunkedblock('there\n' * 5, eol='\n')) + [
+ chunkedblock('', eol='\n')]
+ con.request('GET', '/')
+ self.assertStringEqual('hi there\nthere\nthere\nthere\nthere\n',
+ con.getresponse().read())
+
+ def testChunkedDownloadPartialChunk(self):
+ con = http.HTTPConnection('1.2.3.4:80')
+ con._connect()
+ sock = con.sock
+ sock.data = ['HTTP/1.1 200 OK\r\n',
+ 'Server: BogusServer 1.0\r\n',
+ 'transfer-encoding: chunked',
+ '\r\n\r\n',
+ chunkedblock('hi '),
+ ] + list(chunkedblock('there\n' * 5)) + [chunkedblock('')]
+ con.request('GET', '/')
+ self.assertStringEqual('hi there\nthere\nthere\nthere\nthere\n',
+ con.getresponse().read())
+# no-check-code
diff --git a/mercurial/httpclient/tests/test_proxy_support.py b/mercurial/httpclient/tests/test_proxy_support.py
new file mode 100644
index 0000000..1526a9a
--- /dev/null
+++ b/mercurial/httpclient/tests/test_proxy_support.py
@@ -0,0 +1,135 @@
+# Copyright 2010, Google Inc.
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+import unittest
+import socket
+
+import http
+
+# relative import to ease embedding the library
+import util
+
+
+def make_preloaded_socket(data):
+ """Make a socket pre-loaded with data so it can be read during connect.
+
+ Useful for https proxy tests because we have to read from the
+ socket during _connect rather than later on.
+ """
+ def s(*args, **kwargs):
+ sock = util.MockSocket(*args, **kwargs)
+ sock.early_data = data[:]
+ return sock
+ return s
+
+
+class ProxyHttpTest(util.HttpTestBase, unittest.TestCase):
+
+ def _run_simple_test(self, host, server_data, expected_req, expected_data):
+ con = http.HTTPConnection(host)
+ con._connect()
+ con.sock.data = server_data
+ con.request('GET', '/')
+
+ self.assertEqual(expected_req, con.sock.sent)
+ self.assertEqual(expected_data, con.getresponse().read())
+
+ def testSimpleRequest(self):
+ con = http.HTTPConnection('1.2.3.4:80',
+ proxy_hostport=('magicproxy', 4242))
+ con._connect()
+ con.sock.data = ['HTTP/1.1 200 OK\r\n',
+ 'Server: BogusServer 1.0\r\n',
+ 'MultiHeader: Value\r\n'
+ 'MultiHeader: Other Value\r\n'
+ 'MultiHeader: One More!\r\n'
+ 'Content-Length: 10\r\n',
+ '\r\n'
+ '1234567890'
+ ]
+ con.request('GET', '/')
+
+ expected_req = ('GET http://1.2.3.4/ HTTP/1.1\r\n'
+ 'Host: 1.2.3.4\r\n'
+ 'accept-encoding: identity\r\n\r\n')
+
+ self.assertEqual(('127.0.0.42', 4242), con.sock.sa)
+ self.assertStringEqual(expected_req, con.sock.sent)
+ resp = con.getresponse()
+ self.assertEqual('1234567890', resp.read())
+ self.assertEqual(['Value', 'Other Value', 'One More!'],
+ resp.headers.getheaders('multiheader'))
+ self.assertEqual(['BogusServer 1.0'],
+ resp.headers.getheaders('server'))
+
+ def testSSLRequest(self):
+ con = http.HTTPConnection('1.2.3.4:443',
+ proxy_hostport=('magicproxy', 4242))
+ socket.socket = make_preloaded_socket(
+ ['HTTP/1.1 200 OK\r\n',
+ 'Server: BogusServer 1.0\r\n',
+ 'Content-Length: 10\r\n',
+ '\r\n'
+ '1234567890'])
+ con._connect()
+ con.sock.data = ['HTTP/1.1 200 OK\r\n',
+ 'Server: BogusServer 1.0\r\n',
+ 'Content-Length: 10\r\n',
+ '\r\n'
+ '1234567890'
+ ]
+ connect_sent = con.sock.sent
+ con.sock.sent = ''
+ con.request('GET', '/')
+
+ expected_connect = ('CONNECT 1.2.3.4:443 HTTP/1.0\r\n'
+ 'Host: 1.2.3.4\r\n'
+ 'accept-encoding: identity\r\n'
+ '\r\n')
+ expected_request = ('GET / HTTP/1.1\r\n'
+ 'Host: 1.2.3.4\r\n'
+ 'accept-encoding: identity\r\n\r\n')
+
+ self.assertEqual(('127.0.0.42', 4242), con.sock.sa)
+ self.assertStringEqual(expected_connect, connect_sent)
+ self.assertStringEqual(expected_request, con.sock.sent)
+ resp = con.getresponse()
+ self.assertEqual(resp.status, 200)
+ self.assertEqual('1234567890', resp.read())
+ self.assertEqual(['BogusServer 1.0'],
+ resp.headers.getheaders('server'))
+
+ def testSSLProxyFailure(self):
+ con = http.HTTPConnection('1.2.3.4:443',
+ proxy_hostport=('magicproxy', 4242))
+ socket.socket = make_preloaded_socket(
+ ['HTTP/1.1 407 Proxy Authentication Required\r\n\r\n'])
+ self.assertRaises(http.HTTPProxyConnectFailedException, con._connect)
+ self.assertRaises(http.HTTPProxyConnectFailedException,
+ con.request, 'GET', '/')
+# no-check-code
diff --git a/mercurial/httpclient/tests/test_ssl.py b/mercurial/httpclient/tests/test_ssl.py
new file mode 100644
index 0000000..5799a8f
--- /dev/null
+++ b/mercurial/httpclient/tests/test_ssl.py
@@ -0,0 +1,93 @@
+# Copyright 2011, Google Inc.
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+import unittest
+
+import http
+
+# relative import to ease embedding the library
+import util
+
+
+
+class HttpSslTest(util.HttpTestBase, unittest.TestCase):
+ def testSslRereadRequired(self):
+ con = http.HTTPConnection('1.2.3.4:443')
+ con._connect()
+ # extend the list instead of assign because of how
+ # MockSSLSocket works.
+ con.sock.data = ['HTTP/1.1 200 OK\r\n',
+ 'Server: BogusServer 1.0\r\n',
+ 'MultiHeader: Value\r\n'
+ 'MultiHeader: Other Value\r\n'
+ 'MultiHeader: One More!\r\n'
+ 'Content-Length: 10\r\n',
+ '\r\n'
+ '1234567890'
+ ]
+ con.request('GET', '/')
+
+ expected_req = ('GET / HTTP/1.1\r\n'
+ 'Host: 1.2.3.4\r\n'
+ 'accept-encoding: identity\r\n\r\n')
+
+ self.assertEqual(('1.2.3.4', 443), con.sock.sa)
+ self.assertEqual(expected_req, con.sock.sent)
+ resp = con.getresponse()
+ self.assertEqual('1234567890', resp.read())
+ self.assertEqual(['Value', 'Other Value', 'One More!'],
+ resp.headers.getheaders('multiheader'))
+ self.assertEqual(['BogusServer 1.0'],
+ resp.headers.getheaders('server'))
+
+ def testSslRereadInEarlyResponse(self):
+ con = http.HTTPConnection('1.2.3.4:443')
+ con._connect()
+ con.sock.early_data = ['HTTP/1.1 200 OK\r\n',
+ 'Server: BogusServer 1.0\r\n',
+ 'MultiHeader: Value\r\n'
+ 'MultiHeader: Other Value\r\n'
+ 'MultiHeader: One More!\r\n'
+ 'Content-Length: 10\r\n',
+ '\r\n'
+ '1234567890'
+ ]
+
+ expected_req = self.doPost(con, False)
+ self.assertEqual(None, con.sock,
+ 'Connection should have disowned socket')
+
+ resp = con.getresponse()
+ self.assertEqual(('1.2.3.4', 443), resp.sock.sa)
+ self.assertEqual(expected_req, resp.sock.sent)
+ self.assertEqual('1234567890', resp.read())
+ self.assertEqual(['Value', 'Other Value', 'One More!'],
+ resp.headers.getheaders('multiheader'))
+ self.assertEqual(['BogusServer 1.0'],
+ resp.headers.getheaders('server'))
+# no-check-code
diff --git a/mercurial/httpclient/tests/util.py b/mercurial/httpclient/tests/util.py
new file mode 100644
index 0000000..bbc3d87
--- /dev/null
+++ b/mercurial/httpclient/tests/util.py
@@ -0,0 +1,195 @@
+# Copyright 2010, Google Inc.
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+import difflib
+import socket
+
+import http
+
+
+class MockSocket(object):
+ """Mock non-blocking socket object.
+
+ This is ONLY capable of mocking a nonblocking socket.
+
+ Attributes:
+ early_data: data to always send as soon as end of headers is seen
+ data: a list of strings to return on recv(), with the
+ assumption that the socket would block between each
+ string in the list.
+ read_wait_sentinel: data that must be written to the socket before
+ beginning the response.
+ close_on_empty: If true, close the socket when it runs out of data
+ for the client.
+ """
+ def __init__(self, af, socktype, proto):
+ self.af = af
+ self.socktype = socktype
+ self.proto = proto
+
+ self.early_data = []
+ self.data = []
+ self.remote_closed = self.closed = False
+ self.close_on_empty = False
+ self.sent = ''
+ self.read_wait_sentinel = http._END_HEADERS
+
+ def close(self):
+ self.closed = True
+
+ def connect(self, sa):
+ self.sa = sa
+
+ def setblocking(self, timeout):
+ assert timeout == 0
+
+ def recv(self, amt=-1):
+ if self.early_data:
+ datalist = self.early_data
+ elif not self.data:
+ return ''
+ else:
+ datalist = self.data
+ if amt == -1:
+ return datalist.pop(0)
+ data = datalist.pop(0)
+ if len(data) > amt:
+ datalist.insert(0, data[amt:])
+ if not self.data and not self.early_data and self.close_on_empty:
+ self.remote_closed = True
+ return data[:amt]
+
+ @property
+ def ready_for_read(self):
+ return ((self.early_data and http._END_HEADERS in self.sent)
+ or (self.read_wait_sentinel in self.sent and self.data)
+ or self.closed or self.remote_closed)
+
+ def send(self, data):
+ # this is a horrible mock, but nothing needs us to raise the
+ # correct exception yet
+ assert not self.closed, 'attempted to write to a closed socket'
+ assert not self.remote_closed, ('attempted to write to a'
+ ' socket closed by the server')
+ if len(data) > 8192:
+ data = data[:8192]
+ self.sent += data
+ return len(data)
+
+
+def mockselect(r, w, x, timeout=0):
+ """Simple mock for select()
+ """
+ readable = filter(lambda s: s.ready_for_read, r)
+ return readable, w[:], []
+
+
+class MockSSLSocket(object):
+ def __init__(self, sock):
+ self._sock = sock
+ self._fail_recv = True
+
+ def __getattr__(self, key):
+ return getattr(self._sock, key)
+
+ def __setattr__(self, key, value):
+ if key not in ('_sock', '_fail_recv'):
+ return setattr(self._sock, key, value)
+ return object.__setattr__(self, key, value)
+
+ def recv(self, amt=-1):
+ try:
+ if self._fail_recv:
+ raise socket.sslerror(socket.SSL_ERROR_WANT_READ)
+ return self._sock.recv(amt=amt)
+ finally:
+ self._fail_recv = not self._fail_recv
+
+
+def mocksslwrap(sock, keyfile=None, certfile=None,
+ server_side=False, cert_reqs=http.socketutil.CERT_NONE,
+ ssl_version=None, ca_certs=None,
+ do_handshake_on_connect=True,
+ suppress_ragged_eofs=True):
+ return MockSSLSocket(sock)
+
+
+def mockgetaddrinfo(host, port, unused, streamtype):
+ assert unused == 0
+ assert streamtype == socket.SOCK_STREAM
+ if host.count('.') != 3:
+ host = '127.0.0.42'
+ return [(socket.AF_INET, socket.SOCK_STREAM, socket.IPPROTO_TCP, '',
+ (host, port))]
+
+
+class HttpTestBase(object):
+ def setUp(self):
+ self.orig_socket = socket.socket
+ socket.socket = MockSocket
+
+ self.orig_getaddrinfo = socket.getaddrinfo
+ socket.getaddrinfo = mockgetaddrinfo
+
+ self.orig_select = http.select.select
+ http.select.select = mockselect
+
+ self.orig_sslwrap = http.socketutil.wrap_socket
+ http.socketutil.wrap_socket = mocksslwrap
+
+ def tearDown(self):
+ socket.socket = self.orig_socket
+ http.select.select = self.orig_select
+ http.socketutil.wrap_socket = self.orig_sslwrap
+ socket.getaddrinfo = self.orig_getaddrinfo
+
+ def assertStringEqual(self, l, r):
+ try:
+ self.assertEqual(l, r, ('failed string equality check, '
+ 'see stdout for details'))
+ except:
+ add_nl = lambda li: map(lambda x: x + '\n', li)
+ print 'failed expectation:'
+ print ''.join(difflib.unified_diff(
+ add_nl(l.splitlines()), add_nl(r.splitlines()),
+ fromfile='expected', tofile='got'))
+ raise
+
+ def doPost(self, con, expect_body, body_to_send='This is some POST data'):
+ con.request('POST', '/', body=body_to_send,
+ expect_continue=True)
+ expected_req = ('POST / HTTP/1.1\r\n'
+ 'Host: 1.2.3.4\r\n'
+ 'content-length: %d\r\n'
+ 'Expect: 100-Continue\r\n'
+ 'accept-encoding: identity\r\n\r\n' %
+ len(body_to_send))
+ if expect_body:
+ expected_req += body_to_send
+ return expected_req
+# no-check-code
diff --git a/mercurial/httpconnection.py b/mercurial/httpconnection.py
index ce316d9..91745b9 100644
--- a/mercurial/httpconnection.py
+++ b/mercurial/httpconnection.py
@@ -38,7 +38,7 @@ class httpsendfile(object):
self.write = self._data.write
self.length = os.fstat(self._data.fileno()).st_size
self._pos = 0
- self._total = self.length // 1024 * 2
+ self._total = self.length / 1024 * 2
def read(self, *args, **kwargs):
try:
@@ -51,7 +51,7 @@ class httpsendfile(object):
# requires authentication. Since we can't know until we try
# once whether authentication will be required, just lie to
# the user and maybe the push succeeds suddenly at 50%.
- self.ui.progress(_('sending'), self._pos // 1024,
+ self.ui.progress(_('sending'), self._pos / 1024,
unit=_('kb'), total=self._total)
return ret
@@ -70,11 +70,7 @@ def readauthforuri(ui, uri, user):
gdict[setting] = val
# Find the best match
- if '://' in uri:
- scheme, hostpath = uri.split('://', 1)
- else:
- # py2.4.1 doesn't provide the full URI
- scheme, hostpath = 'http', uri
+ scheme, hostpath = uri.split('://', 1)
bestuser = None
bestlen = 0
bestauth = None
diff --git a/mercurial/httppeer.py b/mercurial/httprepo.py
index 9b84d32..d5fd03f 100644
--- a/mercurial/httppeer.py
+++ b/mercurial/httprepo.py
@@ -1,4 +1,4 @@
-# httppeer.py - HTTP repository proxy classes for mercurial
+# httprepo.py - HTTP repository proxy classes for mercurial
#
# Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
# Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
@@ -23,12 +23,11 @@ def zgenerator(f):
raise IOError(None, _('connection ended unexpectedly'))
yield zd.flush()
-class httppeer(wireproto.wirepeer):
+class httprepository(wireproto.wirerepository):
def __init__(self, ui, path):
self.path = path
self.caps = None
self.handler = None
- self.urlopener = None
u = util.url(path)
if u.query or u.fragment:
raise util.Abort(_('unsupported URL component: "%s"') %
@@ -43,10 +42,10 @@ class httppeer(wireproto.wirepeer):
self.urlopener = url.opener(ui, authinfo)
def __del__(self):
- if self.urlopener:
- for h in self.urlopener.handlers:
- h.close()
- getattr(h, "close_all", lambda : None)()
+ for h in self.urlopener.handlers:
+ h.close()
+ if hasattr(h, "close_all"):
+ h.close_all()
def url(self):
return self.path
@@ -56,7 +55,7 @@ class httppeer(wireproto.wirepeer):
def _fetchcaps(self):
self.caps = set(self._call('capabilities').split())
- def _capabilities(self):
+ def get_caps(self):
if self.caps is None:
try:
self._fetchcaps()
@@ -66,6 +65,8 @@ class httppeer(wireproto.wirepeer):
(' '.join(self.caps or ['none'])))
return self.caps
+ capabilities = property(get_caps)
+
def lock(self):
raise util.Abort(_('operation not supported over http'))
@@ -79,9 +80,6 @@ class httppeer(wireproto.wirepeer):
elif data is not None:
size = len(data)
headers = args.pop('headers', {})
- if data is not None and 'Content-Type' not in headers:
- headers['Content-Type'] = 'application/mercurial-0.1'
-
if size and self.ui.configbool('ui', 'usehttp2', False):
headers['Expect'] = '100-Continue'
@@ -141,8 +139,6 @@ class httppeer(wireproto.wirepeer):
proto = resp.headers.get('content-type', '')
safeurl = util.hidepassword(self._url)
- if proto.startswith('application/hg-error'):
- raise error.OutOfBandError(resp.read())
# accept old "text/plain" and "application/hg-changegroup" for now
if not (proto.startswith('application/mercurial-') or
proto.startswith('text/plain') or
@@ -216,21 +212,21 @@ class httppeer(wireproto.wirepeer):
def _decompress(self, stream):
return util.chunkbuffer(zgenerator(stream))
-class httpspeer(httppeer):
+class httpsrepository(httprepository):
def __init__(self, ui, path):
if not url.has_https:
raise util.Abort(_('Python support for SSL and HTTPS '
'is not installed'))
- httppeer.__init__(self, ui, path)
+ httprepository.__init__(self, ui, path)
def instance(ui, path, create):
if create:
raise util.Abort(_('cannot create new http repository'))
try:
if path.startswith('https:'):
- inst = httpspeer(ui, path)
+ inst = httpsrepository(ui, path)
else:
- inst = httppeer(ui, path)
+ inst = httprepository(ui, path)
try:
# Try to do useful work when checking compatibility.
# Usually saves a roundtrip since we want the caps anyway.
diff --git a/mercurial/i18n.py b/mercurial/i18n.py
index b06a02e..f6370e0 100644
--- a/mercurial/i18n.py
+++ b/mercurial/i18n.py
@@ -9,7 +9,7 @@ import encoding
import gettext, sys, os
# modelled after templater.templatepath:
-if getattr(sys, 'frozen', None) is not None:
+if hasattr(sys, 'frozen'):
module = sys.executable
else:
module = __file__
@@ -61,3 +61,4 @@ if _plain():
_ = lambda message: message
else:
_ = gettext
+
diff --git a/mercurial/ignore.py b/mercurial/ignore.py
index 9c2e9cf..c43cd05 100644
--- a/mercurial/ignore.py
+++ b/mercurial/ignore.py
@@ -78,7 +78,6 @@ def ignore(root, files, warn):
pats[f] = []
fp = open(f)
pats[f], warnings = ignorepats(fp)
- fp.close()
for warning in warnings:
warn("%s: %s\n" % (f, warning))
except IOError, inst:
diff --git a/mercurial/keepalive.py b/mercurial/keepalive.py
index 13243ba..205a9e6 100644
--- a/mercurial/keepalive.py
+++ b/mercurial/keepalive.py
@@ -9,8 +9,10 @@
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
-# License along with this library; if not, see
-# <http://www.gnu.org/licenses/>.
+# License along with this library; if not, write to the
+# Free Software Foundation, Inc.,
+# 59 Temple Place, Suite 330,
+# Boston, MA 02111-1307 USA
# This file is part of urlgrabber, a high-level cross-protocol url-grabber
# Copyright 2002-2004 Michael D. Stenner, Ryan Tomayko
@@ -136,7 +138,7 @@ class ConnectionManager(object):
def add(self, host, connection, ready):
self._lock.acquire()
try:
- if host not in self._hostmap:
+ if not host in self._hostmap:
self._hostmap[host] = []
self._hostmap[host].append(connection)
self._connmap[connection] = host
@@ -211,7 +213,7 @@ class KeepAliveHandler(object):
h.close()
def _request_closed(self, request, host, connection):
- """tells us that this request is now closed and that the
+ """tells us that this request is now closed and the the
connection is ready for another request"""
self._cm.set_ready(connection, 1)
@@ -290,7 +292,7 @@ class KeepAliveHandler(object):
# worked. We'll check the version below, too.
except (socket.error, httplib.HTTPException):
r = None
- except: # re-raises
+ except:
# adding this block just in case we've missed
# something we will still raise the exception, but
# lets try and close the connection and remove it
@@ -534,7 +536,7 @@ def safesend(self, str):
if self.auto_open:
self.connect()
else:
- raise httplib.NotConnected
+ raise httplib.NotConnected()
# send the data to the server. if we get a broken pipe, then close
# the socket. we want to reconnect when somebody tries to send again.
@@ -545,14 +547,13 @@ def safesend(self, str):
print "send:", repr(str)
try:
blocksize = 8192
- read = getattr(str, 'read', None)
- if read is not None:
+ if hasattr(str,'read') :
if self.debuglevel > 0:
print "sendIng a read()able"
- data = read(blocksize)
+ data = str.read(blocksize)
while data:
self.sock.sendall(data)
- data = read(blocksize)
+ data = str.read(blocksize)
else:
self.sock.sendall(str)
except socket.error, v:
@@ -758,7 +759,7 @@ if __name__ == '__main__':
try:
N = int(sys.argv[1])
url = sys.argv[2]
- except (IndexError, ValueError):
+ except:
print "%s <integer> <url>" % sys.argv[0]
else:
test(url, N)
diff --git a/mercurial/localrepo.py b/mercurial/localrepo.py
index 89c1edd..6c1f6b7 100644
--- a/mercurial/localrepo.py
+++ b/mercurial/localrepo.py
@@ -4,168 +4,72 @@
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
+
from node import bin, hex, nullid, nullrev, short
from i18n import _
-import peer, changegroup, subrepo, discovery, pushkey, obsolete
-import changelog, dirstate, filelog, manifest, context, bookmarks, phases
-import lock, transaction, store, encoding, base85
-import scmutil, util, extensions, hook, error, revset
+import repo, changegroup, subrepo, discovery, pushkey
+import changelog, dirstate, filelog, manifest, context, bookmarks
+import lock, transaction, store, encoding
+import scmutil, util, extensions, hook, error
import match as matchmod
import merge as mergemod
import tags as tagsmod
from lock import release
import weakref, errno, os, time, inspect
propertycache = util.propertycache
-filecache = scmutil.filecache
-
-class storecache(filecache):
- """filecache for files in the store"""
- def join(self, obj, fname):
- return obj.sjoin(fname)
-
-MODERNCAPS = set(('lookup', 'branchmap', 'pushkey', 'known', 'getbundle'))
-LEGACYCAPS = MODERNCAPS.union(set(['changegroupsubset']))
-
-class localpeer(peer.peerrepository):
- '''peer for a local repo; reflects only the most recent API'''
-
- def __init__(self, repo, caps=MODERNCAPS):
- peer.peerrepository.__init__(self)
- self._repo = repo
- self.ui = repo.ui
- self._caps = repo._restrictcapabilities(caps)
- self.requirements = repo.requirements
- self.supportedformats = repo.supportedformats
-
- def close(self):
- self._repo.close()
-
- def _capabilities(self):
- return self._caps
-
- def local(self):
- return self._repo
-
- def canpush(self):
- return True
-
- def url(self):
- return self._repo.url()
-
- def lookup(self, key):
- return self._repo.lookup(key)
-
- def branchmap(self):
- return discovery.visiblebranchmap(self._repo)
-
- def heads(self):
- return discovery.visibleheads(self._repo)
-
- def known(self, nodes):
- return self._repo.known(nodes)
-
- def getbundle(self, source, heads=None, common=None):
- return self._repo.getbundle(source, heads=heads, common=common)
-
- # TODO We might want to move the next two calls into legacypeer and add
- # unbundle instead.
-
- def lock(self):
- return self._repo.lock()
-
- def addchangegroup(self, cg, source, url):
- return self._repo.addchangegroup(cg, source, url)
-
- def pushkey(self, namespace, key, old, new):
- return self._repo.pushkey(namespace, key, old, new)
-
- def listkeys(self, namespace):
- return self._repo.listkeys(namespace)
-
- def debugwireargs(self, one, two, three=None, four=None, five=None):
- '''used to test argument passing over the wire'''
- return "%s %s %s %s %s" % (one, two, three, four, five)
-
-class locallegacypeer(localpeer):
- '''peer extension which implements legacy methods too; used for tests with
- restricted capabilities'''
-
- def __init__(self, repo):
- localpeer.__init__(self, repo, caps=LEGACYCAPS)
-
- def branches(self, nodes):
- return self._repo.branches(nodes)
-
- def between(self, pairs):
- return self._repo.between(pairs)
-
- def changegroup(self, basenodes, source):
- return self._repo.changegroup(basenodes, source)
-
- def changegroupsubset(self, bases, heads, source):
- return self._repo.changegroupsubset(bases, heads, source)
-
-class localrepository(object):
+class localrepository(repo.repository):
+ capabilities = set(('lookup', 'changegroupsubset', 'branchmap', 'pushkey',
+ 'known', 'getbundle'))
supportedformats = set(('revlogv1', 'generaldelta'))
supported = supportedformats | set(('store', 'fncache', 'shared',
'dotencode'))
- openerreqs = set(('revlogv1', 'generaldelta'))
- requirements = ['revlogv1']
-
- def _baserequirements(self, create):
- return self.requirements[:]
def __init__(self, baseui, path=None, create=False):
- self.wopener = scmutil.opener(path, expand=True)
- self.wvfs = self.wopener
- self.root = self.wvfs.base
- self.path = self.wvfs.join(".hg")
+ repo.repository.__init__(self)
+ self.root = os.path.realpath(util.expandpath(path))
+ self.path = os.path.join(self.root, ".hg")
self.origroot = path
self.auditor = scmutil.pathauditor(self.root, self._checknested)
self.opener = scmutil.opener(self.path)
- self.vfs = self.opener
+ self.wopener = scmutil.opener(self.root)
self.baseui = baseui
self.ui = baseui.copy()
- # A list of callback to shape the phase if no data were found.
- # Callback are in the form: func(repo, roots) --> processed root.
- # This list it to be filled by extension during repo setup
- self._phasedefaults = []
+
try:
self.ui.readconfig(self.join("hgrc"), self.root)
extensions.loadall(self.ui)
except IOError:
pass
- if not self.vfs.isdir():
+ if not os.path.isdir(self.path):
if create:
- if not self.wvfs.exists():
- self.wvfs.makedirs()
- self.vfs.makedir(notindexed=True)
- requirements = self._baserequirements(create)
+ if not os.path.exists(path):
+ util.makedirs(path)
+ util.makedir(self.path, notindexed=True)
+ requirements = ["revlogv1"]
if self.ui.configbool('format', 'usestore', True):
- self.vfs.mkdir("store")
+ os.mkdir(os.path.join(self.path, "store"))
requirements.append("store")
if self.ui.configbool('format', 'usefncache', True):
requirements.append("fncache")
if self.ui.configbool('format', 'dotencode', True):
requirements.append('dotencode')
# create an invalid changelog
- self.vfs.append(
+ self.opener.append(
"00changelog.i",
'\0\0\0\2' # represents revlogv2
' dummy changelog to prevent using the old repo layout'
)
if self.ui.configbool('format', 'generaldelta', False):
requirements.append("generaldelta")
- requirements = set(requirements)
else:
raise error.RepoError(_("repository %s not found") % path)
elif create:
raise error.RepoError(_("repository %s already exists") % path)
else:
try:
- requirements = scmutil.readrequires(self.vfs, self.supported)
+ requirements = scmutil.readrequires(self.opener, self.supported)
except IOError, inst:
if inst.errno != errno.ENOENT:
raise
@@ -173,7 +77,7 @@ class localrepository(object):
self.sharedpath = self.path
try:
- s = os.path.realpath(self.opener.read("sharedpath").rstrip('\n'))
+ s = os.path.realpath(self.opener.read("sharedpath"))
if not os.path.exists(s):
raise error.RepoError(
_('.hg/sharedpath points to nonexistent directory %s') % s)
@@ -185,36 +89,32 @@ class localrepository(object):
self.store = store.store(requirements, self.sharedpath, scmutil.opener)
self.spath = self.store.path
self.sopener = self.store.opener
- self.svfs = self.sopener
self.sjoin = self.store.join
self.opener.createmode = self.store.createmode
self._applyrequirements(requirements)
if create:
self._writerequirements()
+ # These two define the set of tags for this repository. _tags
+ # maps tag name to node; _tagtypes maps tag name to 'global' or
+ # 'local'. (Global tags are defined by .hgtags across all
+ # heads, and local tags are defined in .hg/localtags.) They
+ # constitute the in-memory cache of tags.
+ self._tags = None
+ self._tagtypes = None
self._branchcache = None
self._branchcachetip = None
+ self.nodetagscache = None
self.filterpats = {}
self._datafilters = {}
self._transref = self._lockref = self._wlockref = None
- # A cache for various files under .hg/ that tracks file changes,
- # (used by the filecache decorator)
- #
- # Maps a property name to its util.filecacheentry
- self._filecache = {}
-
- def close(self):
- pass
-
- def _restrictcapabilities(self, caps):
- return caps
-
def _applyrequirements(self, requirements):
self.requirements = requirements
+ openerreqs = set(('revlogv1', 'generaldelta'))
self.sopener.options = dict((r, 1) for r in requirements
- if r in self.openerreqs)
+ if r in openerreqs)
def _writerequirements(self):
reqfile = self.opener("requires", "w")
@@ -227,7 +127,6 @@ class localrepository(object):
if not path.startswith(self.root):
return False
subpath = path[len(self.root) + 1:]
- normsubpath = util.pconvert(subpath)
# XXX: Checking against the current working copy is wrong in
# the sense that it can reject things like
@@ -249,9 +148,9 @@ class localrepository(object):
ctx = self[None]
parts = util.splitpath(subpath)
while parts:
- prefix = '/'.join(parts)
+ prefix = os.sep.join(parts)
if prefix in ctx.substate:
- if prefix == normsubpath:
+ if prefix == subpath:
return True
else:
sub = ctx.sub(prefix)
@@ -260,61 +159,15 @@ class localrepository(object):
parts.pop()
return False
- def peer(self):
- return localpeer(self) # not cached to avoid reference cycle
-
- @filecache('bookmarks')
+ @util.propertycache
def _bookmarks(self):
return bookmarks.read(self)
- @filecache('bookmarks.current')
+ @util.propertycache
def _bookmarkcurrent(self):
return bookmarks.readcurrent(self)
- def _writebookmarks(self, marks):
- bookmarks.write(self)
-
- def bookmarkheads(self, bookmark):
- name = bookmark.split('@', 1)[0]
- heads = []
- for mark, n in self._bookmarks.iteritems():
- if mark.split('@', 1)[0] == name:
- heads.append(n)
- return heads
-
- @storecache('phaseroots')
- def _phasecache(self):
- return phases.phasecache(self, self._phasedefaults)
-
- @storecache('obsstore')
- def obsstore(self):
- store = obsolete.obsstore(self.sopener)
- if store and not obsolete._enabled:
- # message is rare enough to not be translated
- msg = 'obsolete feature not enabled but %i markers found!\n'
- self.ui.warn(msg % len(list(store)))
- return store
-
@propertycache
- def hiddenrevs(self):
- """hiddenrevs: revs that should be hidden by command and tools
-
- This set is carried on the repo to ease initialisation and lazy
- loading it'll probably move back to changelog for efficienty and
- consistency reason
-
- Note that the hiddenrevs will needs invalidations when
- - a new changesets is added (possible unstable above extinct)
- - a new obsolete marker is added (possible new extinct changeset)
- """
- hidden = set()
- if self.obsstore:
- ### hide extinct changeset that are not accessible by any mean
- hiddenquery = 'extinct() - ::(. + bookmark() + tagged())'
- hidden.update(self.revs(hiddenquery))
- return hidden
-
- @storecache('00changelog.i')
def changelog(self):
c = changelog.changelog(self.sopener)
if 'HG_PENDING' in os.environ:
@@ -323,11 +176,11 @@ class localrepository(object):
c.readpending('00changelog.i.a')
return c
- @storecache('00manifest.i')
+ @propertycache
def manifest(self):
return manifest.manifest(self.sopener)
- @filecache('dirstate')
+ @propertycache
def dirstate(self):
warned = [0]
def validate(node):
@@ -364,20 +217,6 @@ class localrepository(object):
for i in xrange(len(self)):
yield i
- def revs(self, expr, *args):
- '''Return a list of revisions matching the given revset'''
- expr = revset.formatspec(expr, *args)
- m = revset.match(None, expr)
- return [r for r in m(self, range(len(self)))]
-
- def set(self, expr, *args):
- '''
- Yield a context for each matching revision, after doing arg
- replacement via revset.formatspec
- '''
- for r in self.revs(expr, *args):
- yield self[r]
-
def url(self):
return 'file:' + self.root
@@ -410,9 +249,8 @@ class localrepository(object):
fp.write('\n')
for name in names:
m = munge and munge(name) or name
- if (self._tagscache.tagtypes and
- name in self._tagscache.tagtypes):
- old = self.tags().get(name, nullid)
+ if self._tagtypes and name in self._tagtypes:
+ old = self._tags.get(name, nullid)
fp.write('%s %s\n' % (hex(old), m))
fp.write('%s %s\n' % (hex(node), m))
fp.close()
@@ -446,8 +284,6 @@ class localrepository(object):
fp.close()
- self.invalidatecaches()
-
if '.hgtags' not in self.dirstate:
self[None].add(['.hgtags'])
@@ -489,40 +325,12 @@ class localrepository(object):
self.tags() # instantiate the cache
self._tag(names, node, message, local, user, date)
- @propertycache
- def _tagscache(self):
- '''Returns a tagscache object that contains various tags related
- caches.'''
-
- # This simplifies its cache management by having one decorated
- # function (this one) and the rest simply fetch things from it.
- class tagscache(object):
- def __init__(self):
- # These two define the set of tags for this repository. tags
- # maps tag name to node; tagtypes maps tag name to 'global' or
- # 'local'. (Global tags are defined by .hgtags across all
- # heads, and local tags are defined in .hg/localtags.)
- # They constitute the in-memory cache of tags.
- self.tags = self.tagtypes = None
-
- self.nodetagscache = self.tagslist = None
-
- cache = tagscache()
- cache.tags, cache.tagtypes = self._findtags()
-
- return cache
-
def tags(self):
'''return a mapping of tag to node'''
- t = {}
- for k, v in self._tagscache.tags.iteritems():
- try:
- # ignore tags to unknown nodes
- self.changelog.rev(v)
- t[k] = v
- except (error.LookupError, ValueError):
- pass
- return t
+ if self._tags is None:
+ (self._tags, self._tagtypes) = self._findtags()
+
+ return self._tags
def _findtags(self):
'''Do the hard work of finding tags. Return a pair of dicts
@@ -551,7 +359,12 @@ class localrepository(object):
tags = {}
for (name, (node, hist)) in alltags.iteritems():
if node != nullid:
- tags[encoding.tolocal(name)] = node
+ try:
+ # ignore tags to unknown nodes
+ self.changelog.lookup(node)
+ tags[encoding.tolocal(name)] = node
+ except error.LookupError:
+ pass
tags['tip'] = self.changelog.tip()
tagtypes = dict([(encoding.tolocal(name), value)
for (name, value) in tagtypes.iteritems()])
@@ -566,29 +379,27 @@ class localrepository(object):
None : tag does not exist
'''
- return self._tagscache.tagtypes.get(tagname)
+ self.tags()
+
+ return self._tagtypes.get(tagname)
def tagslist(self):
'''return a list of tags ordered by revision'''
- if not self._tagscache.tagslist:
- l = []
- for t, n in self.tags().iteritems():
- r = self.changelog.rev(n)
- l.append((r, t, n))
- self._tagscache.tagslist = [(t, n) for r, t, n in sorted(l)]
-
- return self._tagscache.tagslist
+ l = []
+ for t, n in self.tags().iteritems():
+ r = self.changelog.rev(n)
+ l.append((r, t, n))
+ return [(t, n) for r, t, n in sorted(l)]
def nodetags(self, node):
'''return the tags associated with a node'''
- if not self._tagscache.nodetagscache:
- nodetagscache = {}
- for t, n in self._tagscache.tags.iteritems():
- nodetagscache.setdefault(n, []).append(t)
- for tags in nodetagscache.itervalues():
+ if not self.nodetagscache:
+ self.nodetagscache = {}
+ for t, n in self.tags().iteritems():
+ self.nodetagscache.setdefault(n, []).append(t)
+ for tags in self.nodetagscache.itervalues():
tags.sort()
- self._tagscache.nodetagscache = nodetagscache
- return self._tagscache.nodetagscache.get(node, [])
+ return self.nodetagscache.get(node, [])
def nodebookmarks(self, node):
marks = []
@@ -610,7 +421,7 @@ class localrepository(object):
def updatebranchcache(self):
tip = self.changelog.tip()
if self._branchcache is not None and self._branchcachetip == tip:
- return
+ return self._branchcache
oldtip = self._branchcachetip
self._branchcachetip = tip
@@ -621,7 +432,7 @@ class localrepository(object):
partial = self._branchcache
self._branchtags(partial, lrev)
- # this private cache holds all heads (not just the branch tips)
+ # this private cache holds all heads (not just tips)
self._branchcache = partial
def branchmap(self):
@@ -629,27 +440,17 @@ class localrepository(object):
self.updatebranchcache()
return self._branchcache
- def _branchtip(self, heads):
- '''return the tipmost branch head in heads'''
- tip = heads[-1]
- for h in reversed(heads):
- if not self[h].closesbranch():
- tip = h
- break
- return tip
-
- def branchtip(self, branch):
- '''return the tip node for a given branch'''
- if branch not in self.branchmap():
- raise error.RepoLookupError(_("unknown branch '%s'") % branch)
- return self._branchtip(self.branchmap()[branch])
-
def branchtags(self):
'''return a dict where branch names map to the tipmost head of
the branch, open heads come before closed'''
bt = {}
for bn, heads in self.branchmap().iteritems():
- bt[bn] = self._branchtip(heads)
+ tip = heads[-1]
+ for h in reversed(heads):
+ if 'close' not in self.changelog.read(h)[5]:
+ tip = h
+ break
+ bt[bn] = tip
return bt
def _readbranchcache(self):
@@ -672,9 +473,6 @@ class localrepository(object):
continue
node, label = l.split(" ", 1)
label = encoding.tolocal(label.strip())
- if not node in self:
- raise ValueError('invalidating branch cache because node '+
- '%s does not exist' % node)
partial.setdefault(label, []).append(bin(node))
except KeyboardInterrupt:
raise
@@ -691,15 +489,11 @@ class localrepository(object):
for label, nodes in branches.iteritems():
for node in nodes:
f.write("%s %s\n" % (hex(node), encoding.fromlocal(label)))
- f.close()
+ f.rename()
except (IOError, OSError):
pass
def _updatebranchcache(self, partial, ctxgen):
- """Given a branchhead cache, partial, that may have extra nodes or be
- missing heads, and a generator of nodes that are at least a superset of
- heads missing, this function updates partial to be correct.
- """
# collect new branch entries
newbranches = {}
for c in ctxgen:
@@ -709,54 +503,54 @@ class localrepository(object):
# 1 (branch a) -> 2 (branch b) -> 3 (branch a)
for branch, newnodes in newbranches.iteritems():
bheads = partial.setdefault(branch, [])
- # Remove candidate heads that no longer are in the repo (e.g., as
- # the result of a strip that just happened). Avoid using 'node in
- # self' here because that dives down into branchcache code somewhat
- # recrusively.
- bheadrevs = [self.changelog.rev(node) for node in bheads
- if self.changelog.hasnode(node)]
- newheadrevs = [self.changelog.rev(node) for node in newnodes
- if self.changelog.hasnode(node)]
- ctxisnew = bheadrevs and min(newheadrevs) > max(bheadrevs)
- # Remove duplicates - nodes that are in newheadrevs and are already
- # in bheadrevs. This can happen if you strip a node whose parent
- # was already a head (because they're on different branches).
- bheadrevs = sorted(set(bheadrevs).union(newheadrevs))
-
- # Starting from tip means fewer passes over reachable. If we know
- # the new candidates are not ancestors of existing heads, we don't
- # have to examine ancestors of existing heads
- if ctxisnew:
- iterrevs = sorted(newheadrevs)
- else:
- iterrevs = list(bheadrevs)
-
- # This loop prunes out two kinds of heads - heads that are
- # superceded by a head in newheadrevs, and newheadrevs that are not
- # heads because an existing head is their descendant.
- while iterrevs:
- latest = iterrevs.pop()
- if latest not in bheadrevs:
+ bheads.extend(newnodes)
+ if len(bheads) <= 1:
+ continue
+ bheads = sorted(bheads, key=lambda x: self[x].rev())
+ # starting from tip means fewer passes over reachable
+ while newnodes:
+ latest = newnodes.pop()
+ if latest not in bheads:
continue
- ancestors = set(self.changelog.ancestors([latest],
- bheadrevs[0]))
- if ancestors:
- bheadrevs = [b for b in bheadrevs if b not in ancestors]
- partial[branch] = [self.changelog.node(rev) for rev in bheadrevs]
-
- # There may be branches that cease to exist when the last commit in the
- # branch was stripped. This code filters them out. Note that the
- # branch that ceased to exist may not be in newbranches because
- # newbranches is the set of candidate heads, which when you strip the
- # last commit in a branch will be the parent branch.
- for branch in partial.keys():
- nodes = [head for head in partial[branch]
- if self.changelog.hasnode(head)]
- if not nodes:
- del partial[branch]
+ minbhrev = self[bheads[0]].node()
+ reachable = self.changelog.reachable(latest, minbhrev)
+ reachable.remove(latest)
+ if reachable:
+ bheads = [b for b in bheads if b not in reachable]
+ partial[branch] = bheads
def lookup(self, key):
- return self[key].node()
+ if isinstance(key, int):
+ return self.changelog.node(key)
+ elif key == '.':
+ return self.dirstate.p1()
+ elif key == 'null':
+ return nullid
+ elif key == 'tip':
+ return self.changelog.tip()
+ n = self.changelog._match(key)
+ if n:
+ return n
+ if key in self._bookmarks:
+ return self._bookmarks[key]
+ if key in self.tags():
+ return self.tags()[key]
+ if key in self.branchtags():
+ return self.branchtags()[key]
+ n = self.changelog._partialmatch(key)
+ if n:
+ return n
+
+ # can't find key, check if it might have come from damaged dirstate
+ if key in self.dirstate.parents():
+ raise error.Abort(_("working directory has unknown parent '%s'!")
+ % short(key))
+ try:
+ if len(key) == 20:
+ key = hex(key)
+ except TypeError:
+ pass
+ raise error.RepoLookupError(_("unknown revision '%s'") % key)
def lookupbranch(self, key, remote=None):
repo = remote or self
@@ -768,20 +562,11 @@ class localrepository(object):
def known(self, nodes):
nm = self.changelog.nodemap
- pc = self._phasecache
- result = []
- for n in nodes:
- r = nm.get(n)
- resp = not (r is None or pc.phase(self, r) >= phases.secret)
- result.append(resp)
- return result
+ return [(n in nm) for n in nodes]
def local(self):
return self
- def cancopy(self):
- return self.local() # so statichttprepo's override of local() works
-
def join(self, f):
return os.path.join(self.path, f)
@@ -800,17 +585,6 @@ class localrepository(object):
'''get list of changectxs for parents of changeid'''
return self[changeid].parents()
- def setparents(self, p1, p2=nullid):
- copies = self.dirstate.setparents(p1, p2)
- if copies:
- # Adjust copy records, the dirstate cannot do it, it
- # requires access to parents manifests. Preserve them
- # only for entries added to first parent.
- pctx = self[p1]
- for f in copies:
- if f not in pctx and copies[f] in pctx:
- self.dirstate.copy(copies[f], f)
-
def filectx(self, path, changeid=None, fileid=None):
"""changeid can be a changeset revision, node, or tag.
fileid can be a file revision or node."""
@@ -901,8 +675,8 @@ class localrepository(object):
raise error.RepoError(
_("abandoned transaction found - run hg recover"))
- self._writejournal(desc)
- renames = [(x, undoname(x)) for x in self._journalfiles()]
+ journalfiles = self._writejournal(desc)
+ renames = [(x, undoname(x)) for x in journalfiles]
tr = transaction.transaction(self.ui.warn, self.sopener,
self.sjoin("journal"),
@@ -911,26 +685,27 @@ class localrepository(object):
self._transref = weakref.ref(tr)
return tr
- def _journalfiles(self):
- return (self.sjoin('journal'), self.join('journal.dirstate'),
- self.join('journal.branch'), self.join('journal.desc'),
- self.join('journal.bookmarks'),
- self.sjoin('journal.phaseroots'))
-
- def undofiles(self):
- return [undoname(x) for x in self._journalfiles()]
-
def _writejournal(self, desc):
- self.opener.write("journal.dirstate",
- self.opener.tryread("dirstate"))
+ # save dirstate for rollback
+ try:
+ ds = self.opener.read("dirstate")
+ except IOError:
+ ds = ""
+ self.opener.write("journal.dirstate", ds)
self.opener.write("journal.branch",
encoding.fromlocal(self.dirstate.branch()))
self.opener.write("journal.desc",
"%d\n%s\n" % (len(self), desc))
- self.opener.write("journal.bookmarks",
- self.opener.tryread("bookmarks"))
- self.sopener.write("journal.phaseroots",
- self.sopener.tryread("phaseroots"))
+
+ bkname = self.join('bookmarks')
+ if os.path.exists(bkname):
+ util.copyfile(bkname, self.join('journal.bookmarks'))
+ else:
+ self.opener.write('journal.bookmarks', '')
+
+ return (self.sjoin('journal'), self.join('journal.dirstate'),
+ self.join('journal.branch'), self.join('journal.desc'),
+ self.join('journal.bookmarks'))
def recover(self):
lock = self.lock()
@@ -947,127 +722,67 @@ class localrepository(object):
finally:
lock.release()
- def rollback(self, dryrun=False, force=False):
+ def rollback(self, dryrun=False):
wlock = lock = None
try:
wlock = self.wlock()
lock = self.lock()
if os.path.exists(self.sjoin("undo")):
- return self._rollback(dryrun, force)
+ try:
+ args = self.opener.read("undo.desc").splitlines()
+ if len(args) >= 3 and self.ui.verbose:
+ desc = _("repository tip rolled back to revision %s"
+ " (undo %s: %s)\n") % (
+ int(args[0]) - 1, args[1], args[2])
+ elif len(args) >= 2:
+ desc = _("repository tip rolled back to revision %s"
+ " (undo %s)\n") % (
+ int(args[0]) - 1, args[1])
+ except IOError:
+ desc = _("rolling back unknown transaction\n")
+ self.ui.status(desc)
+ if dryrun:
+ return
+ transaction.rollback(self.sopener, self.sjoin("undo"),
+ self.ui.warn)
+ util.rename(self.join("undo.dirstate"), self.join("dirstate"))
+ if os.path.exists(self.join('undo.bookmarks')):
+ util.rename(self.join('undo.bookmarks'),
+ self.join('bookmarks'))
+ try:
+ branch = self.opener.read("undo.branch")
+ self.dirstate.setbranch(branch)
+ except IOError:
+ self.ui.warn(_("named branch could not be reset, "
+ "current branch is still: %s\n")
+ % self.dirstate.branch())
+ self.invalidate()
+ self.dirstate.invalidate()
+ self.destroyed()
+ parents = tuple([p.rev() for p in self.parents()])
+ if len(parents) > 1:
+ self.ui.status(_("working directory now based on "
+ "revisions %d and %d\n") % parents)
+ else:
+ self.ui.status(_("working directory now based on "
+ "revision %d\n") % parents)
else:
self.ui.warn(_("no rollback information available\n"))
return 1
finally:
release(lock, wlock)
- def _rollback(self, dryrun, force):
- ui = self.ui
- try:
- args = self.opener.read('undo.desc').splitlines()
- (oldlen, desc, detail) = (int(args[0]), args[1], None)
- if len(args) >= 3:
- detail = args[2]
- oldtip = oldlen - 1
-
- if detail and ui.verbose:
- msg = (_('repository tip rolled back to revision %s'
- ' (undo %s: %s)\n')
- % (oldtip, desc, detail))
- else:
- msg = (_('repository tip rolled back to revision %s'
- ' (undo %s)\n')
- % (oldtip, desc))
- except IOError:
- msg = _('rolling back unknown transaction\n')
- desc = None
-
- if not force and self['.'] != self['tip'] and desc == 'commit':
- raise util.Abort(
- _('rollback of last commit while not checked out '
- 'may lose data'), hint=_('use -f to force'))
-
- ui.status(msg)
- if dryrun:
- return 0
-
- parents = self.dirstate.parents()
- transaction.rollback(self.sopener, self.sjoin('undo'), ui.warn)
- if os.path.exists(self.join('undo.bookmarks')):
- util.rename(self.join('undo.bookmarks'),
- self.join('bookmarks'))
- if os.path.exists(self.sjoin('undo.phaseroots')):
- util.rename(self.sjoin('undo.phaseroots'),
- self.sjoin('phaseroots'))
- self.invalidate()
-
- # Discard all cache entries to force reloading everything.
- self._filecache.clear()
-
- parentgone = (parents[0] not in self.changelog.nodemap or
- parents[1] not in self.changelog.nodemap)
- if parentgone:
- util.rename(self.join('undo.dirstate'), self.join('dirstate'))
- try:
- branch = self.opener.read('undo.branch')
- self.dirstate.setbranch(branch)
- except IOError:
- ui.warn(_('named branch could not be reset: '
- 'current branch is still \'%s\'\n')
- % self.dirstate.branch())
-
- self.dirstate.invalidate()
- parents = tuple([p.rev() for p in self.parents()])
- if len(parents) > 1:
- ui.status(_('working directory now based on '
- 'revisions %d and %d\n') % parents)
- else:
- ui.status(_('working directory now based on '
- 'revision %d\n') % parents)
- # TODO: if we know which new heads may result from this rollback, pass
- # them to destroy(), which will prevent the branchhead cache from being
- # invalidated.
- self.destroyed()
- return 0
-
def invalidatecaches(self):
- def delcache(name):
- try:
- delattr(self, name)
- except AttributeError:
- pass
-
- delcache('_tagscache')
-
+ self._tags = None
+ self._tagtypes = None
+ self.nodetagscache = None
self._branchcache = None # in UTF-8
self._branchcachetip = None
- def invalidatedirstate(self):
- '''Invalidates the dirstate, causing the next call to dirstate
- to check if it was modified since the last time it was read,
- rereading it if it has.
-
- This is different to dirstate.invalidate() that it doesn't always
- rereads the dirstate. Use dirstate.invalidate() if you want to
- explicitly read the dirstate again (i.e. restoring it to a previous
- known good state).'''
- if 'dirstate' in self.__dict__:
- for k in self.dirstate._filecache:
- try:
- delattr(self.dirstate, k)
- except AttributeError:
- pass
- delattr(self, 'dirstate')
-
def invalidate(self):
- for k in self._filecache:
- # dirstate is invalidated separately in invalidatedirstate()
- if k == 'dirstate':
- continue
-
- try:
- delattr(self, k)
- except AttributeError:
- pass
+ for a in ("changelog", "manifest", "_bookmarks", "_bookmarkcurrent"):
+ if a in self.__dict__:
+ delattr(self, a)
self.invalidatecaches()
def _lock(self, lockname, wait, releasefn, acquirefn, desc):
@@ -1085,16 +800,6 @@ class localrepository(object):
acquirefn()
return l
- def _afterlock(self, callback):
- """add a callback to the current repository lock.
-
- The callback will be executed on lock release."""
- l = self._lockref and self._lockref()
- if l:
- l.postrelease.append(callback)
- else:
- callback()
-
def lock(self, wait=True):
'''Lock the repository store (.hg/store) and return a weak reference
to the lock. Use this before modifying the store (e.g. committing or
@@ -1104,16 +809,7 @@ class localrepository(object):
l.lock()
return l
- def unlock():
- self.store.write()
- if '_phasecache' in vars(self):
- self._phasecache.write()
- for k, ce in self._filecache.items():
- if k == 'dirstate':
- continue
- ce.refresh()
-
- l = self._lock(self.sjoin("lock"), wait, unlock,
+ l = self._lock(self.sjoin("lock"), wait, self.store.write,
self.invalidate, _('repository %s') % self.origroot)
self._lockref = weakref.ref(l)
return l
@@ -1127,14 +823,8 @@ class localrepository(object):
l.lock()
return l
- def unlock():
- self.dirstate.write()
- ce = self._filecache.get('dirstate')
- if ce:
- ce.refresh()
-
- l = self._lock(self.join("wlock"), wait, unlock,
- self.invalidatedirstate, _('working directory of %s') %
+ l = self._lock(self.join("wlock"), wait, self.dirstate.write,
+ self.dirstate.invalidate, _('working directory of %s') %
self.origroot)
self._wlockref = weakref.ref(l)
return l
@@ -1255,58 +945,36 @@ class localrepository(object):
# check subrepos
subs = []
- commitsubs = set()
- newstate = wctx.substate.copy()
- # only manage subrepos and .hgsubstate if .hgsub is present
+ removedsubs = set()
if '.hgsub' in wctx:
- # we'll decide whether to track this ourselves, thanks
- if '.hgsubstate' in changes[0]:
- changes[0].remove('.hgsubstate')
- if '.hgsubstate' in changes[2]:
- changes[2].remove('.hgsubstate')
-
- # compare current state to last committed state
- # build new substate based on last committed state
- oldstate = wctx.p1().substate
- for s in sorted(newstate.keys()):
- if not match(s):
- # ignore working copy, use old state if present
- if s in oldstate:
- newstate[s] = oldstate[s]
- continue
- if not force:
- raise util.Abort(
- _("commit with new subrepo %s excluded") % s)
- if wctx.sub(s).dirty(True):
- if not self.ui.configbool('ui', 'commitsubrepos'):
- raise util.Abort(
- _("uncommitted changes in subrepo %s") % s,
- hint=_("use --subrepos for recursive commit"))
- subs.append(s)
- commitsubs.add(s)
- else:
- bs = wctx.sub(s).basestate()
- newstate[s] = (newstate[s][0], bs, newstate[s][2])
- if oldstate.get(s, (None, None, None))[1] != bs:
- subs.append(s)
-
- # check for removed subrepos
+ # only manage subrepos and .hgsubstate if .hgsub is present
for p in wctx.parents():
- r = [s for s in p.substate if s not in newstate]
- subs += [s for s in r if match(s)]
- if subs:
+ removedsubs.update(s for s in p.substate if match(s))
+ for s in wctx.substate:
+ removedsubs.discard(s)
+ if match(s) and wctx.sub(s).dirty():
+ subs.append(s)
+ if (subs or removedsubs):
if (not match('.hgsub') and
'.hgsub' in (wctx.modified() + wctx.added())):
raise util.Abort(
_("can't commit subrepos without .hgsub"))
- changes[0].insert(0, '.hgsubstate')
-
+ if '.hgsubstate' not in changes[0]:
+ changes[0].insert(0, '.hgsubstate')
+ if '.hgsubstate' in changes[2]:
+ changes[2].remove('.hgsubstate')
elif '.hgsub' in changes[2]:
# clean up .hgsubstate when .hgsub is removed
if ('.hgsubstate' in wctx and
'.hgsubstate' not in changes[0] + changes[1] + changes[2]):
changes[2].insert(0, '.hgsubstate')
+ if subs and not self.ui.configbool('ui', 'commitsubrepos', True):
+ changedsubs = [s for s in subs if wctx.sub(s).dirty(True)]
+ if changedsubs:
+ raise util.Abort(_("uncommitted changes in subrepo %s")
+ % changedsubs[0])
+
# make sure all explicit patterns are matched
if not force and match.files():
matched = set(changes[0] + changes[1] + changes[2])
@@ -1331,9 +999,6 @@ class localrepository(object):
and wctx.branch() == wctx.p1().branch()):
return None
- if merge and changes[3]:
- raise util.Abort(_("cannot commit merge with missing files"))
-
ms = mergemod.mergestate(self)
for f in changes[0]:
if f in ms and ms[f] == 'u':
@@ -1345,15 +1010,16 @@ class localrepository(object):
cctx._text = editor(self, cctx, subs)
edited = (text != cctx._text)
- # commit subs and write new state
- if subs:
- for s in sorted(commitsubs):
+ # commit subs
+ if subs or removedsubs:
+ state = wctx.substate.copy()
+ for s in sorted(subs):
sub = wctx.sub(s)
self.ui.status(_('committing subrepository %s\n') %
subrepo.subrelpath(sub))
sr = sub.commit(cctx._text, user, date)
- newstate[s] = (newstate[s][0], sr)
- subrepo.writestate(self, newstate)
+ state[s] = (state[s][0], sr)
+ subrepo.writestate(self, state)
# Save commit message in case this transaction gets rolled back
# (e.g. by a pretxncommit hook). Leave the content alone on
@@ -1363,17 +1029,16 @@ class localrepository(object):
p1, p2 = self.dirstate.parents()
hookp1, hookp2 = hex(p1), (p2 != nullid and hex(p2) or '')
try:
- self.hook("precommit", throw=True, parent1=hookp1,
- parent2=hookp2)
+ self.hook("precommit", throw=True, parent1=hookp1, parent2=hookp2)
ret = self.commitctx(cctx, True)
- except: # re-raises
+ except:
if edited:
self.ui.write(
_('note: commit message saved in %s\n') % msgfn)
raise
# update bookmarks, dirstate and mergestate
- bookmarks.update(self, [p1, p2], ret)
+ bookmarks.update(self, p1, ret)
for f in changes[0] + changes[1]:
self.dirstate.normal(f)
for f in changes[2]:
@@ -1383,9 +1048,7 @@ class localrepository(object):
finally:
wlock.release()
- def commithook(node=hex(ret), parent1=hookp1, parent2=hookp2):
- self.hook("commit", node=node, parent1=parent1, parent2=parent2)
- self._afterlock(commithook)
+ self.hook("commit", node=hex(ret), parent1=hookp1, parent2=hookp2)
return ret
def commitctx(self, ctx, error=False):
@@ -1452,44 +1115,22 @@ class localrepository(object):
self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1,
parent2=xp2, pending=p)
self.changelog.finalize(trp)
- # set the new commit is proper phase
- targetphase = phases.newcommitphase(self.ui)
- if targetphase:
- # retract boundary do not alter parent changeset.
- # if a parent have higher the resulting phase will
- # be compliant anyway
- #
- # if minimal phase was 0 we don't need to retract anything
- phases.retractboundary(self, targetphase, [n])
tr.close()
- self.updatebranchcache()
+
+ if self._branchcache:
+ self.updatebranchcache()
return n
finally:
if tr:
tr.release()
lock.release()
- def destroyed(self, newheadnodes=None):
+ def destroyed(self):
'''Inform the repository that nodes have been destroyed.
Intended for use by strip and rollback, so there's a common
- place for anything that has to be done after destroying history.
-
- If you know the branchheadcache was uptodate before nodes were removed
- and you also know the set of candidate new heads that may have resulted
- from the destruction, you can set newheadnodes. This will enable the
- code to update the branchheads cache, rather than having future code
- decide it's invalid and regenrating it from scratch.
- '''
- # If we have info, newheadnodes, on how to update the branch cache, do
- # it, Otherwise, since nodes were destroyed, the cache is stale and this
- # will be caught the next time it is read.
- if newheadnodes:
- tiprev = len(self) - 1
- ctxgen = (self[node] for node in newheadnodes
- if self.changelog.hasnode(node))
- self._updatebranchcache(self._branchcache, ctxgen)
- self._writebranchcache(self._branchcache, self.changelog.tip(),
- tiprev)
+ place for anything that has to be done after destroying history.'''
+ # XXX it might be nice if we could take the list of destroyed
+ # nodes, but I don't see an easy way for rollback() to do that
# Ensure the persistent tag cache is updated. Doing it now
# means that the tag cache only has to worry about destroyed
@@ -1503,9 +1144,6 @@ class localrepository(object):
# tag cache retrieval" case to work.
self.invalidatecaches()
- # Discard all cache entries to force reloading everything.
- self._filecache.clear()
-
def walk(self, match, node=None):
'''
walk recursively through the directory tree or a given
@@ -1517,8 +1155,7 @@ class localrepository(object):
def status(self, node1='.', node2=None, match=None,
ignored=False, clean=False, unknown=False,
listsubrepos=False):
- """return status of files between two nodes or node and working
- directory.
+ """return status of files between two nodes or node and working directory
If node1 is None, use the first dirstate parent instead.
If node2 is None, compare node1 with working directory.
@@ -1526,8 +1163,6 @@ class localrepository(object):
def mfmatches(ctx):
mf = ctx.manifest().copy()
- if match.always():
- return mf
for fn in mf.keys():
if not match(fn):
del mf[fn]
@@ -1553,9 +1188,7 @@ class localrepository(object):
if not parentworking:
def bad(f, msg):
- # 'f' may be a directory pattern from 'match.files()',
- # so 'f not in ctx1' is not enough
- if f not in ctx1 and f not in ctx1.dirs():
+ if f not in ctx1:
self.ui.warn('%s: %s\n' % (self.dirstate.pathto(f), msg))
match.bad = bad
@@ -1613,11 +1246,10 @@ class localrepository(object):
mf2 = mfmatches(ctx2)
modified, added, clean = [], [], []
- withflags = mf1.withflags() | mf2.withflags()
for fn in mf2:
if fn in mf1:
if (fn not in deleted and
- ((fn in withflags and mf1.flags(fn) != mf2.flags(fn)) or
+ (mf1.flags(fn) != mf2.flags(fn) or
(mf1[fn] != mf2[fn] and
(mf2[fn] or ctx1[fn].cmp(ctx2[fn]))))):
modified.append(fn)
@@ -1628,22 +1260,6 @@ class localrepository(object):
added.append(fn)
removed = mf1.keys()
- if working and modified and not self.dirstate._checklink:
- # Symlink placeholders may get non-symlink-like contents
- # via user error or dereferencing by NFS or Samba servers,
- # so we filter out any placeholders that don't look like a
- # symlink
- sane = []
- for f in modified:
- if ctx2.flags(f) == 'l':
- d = ctx2[f].data()
- if len(d) >= 1024 or '\n' in d or util.binary(d):
- self.ui.debug('ignoring suspect symlink placeholder'
- ' "%s"\n' % f)
- continue
- sane.append(f)
- modified = sane
-
r = modified, added, removed, deleted, unknown, ignored, clean
if listsubrepos:
@@ -1692,7 +1308,8 @@ class localrepository(object):
fbheads = set(self.changelog.nodesbetween([start], bheads)[2])
bheads = [h for h in bheads if h in fbheads]
if not closed:
- bheads = [h for h in bheads if not self[h].closesbranch()]
+ bheads = [h for h in bheads if
+ ('close' not in self.changelog.read(h)[5])]
return bheads
def branches(self, nodes):
@@ -1729,10 +1346,6 @@ class localrepository(object):
return r
def pull(self, remote, heads=None, force=False):
- # don't open transaction for nothing or you break future useful
- # rollback call
- tr = None
- trname = 'pull\n' + util.hidepassword(remote.url())
lock = self.lock()
try:
tmp = discovery.findcommonincoming(self, remote, heads=heads,
@@ -1740,10 +1353,8 @@ class localrepository(object):
common, fetch, rheads = tmp
if not fetch:
self.ui.status(_("no changes found\n"))
- added = []
result = 0
else:
- tr = self.transaction(trname)
if heads is None and list(common) == [nullid]:
self.ui.status(_("requesting all changes\n"))
elif heads is None and remote.capable('changegroupsubset'):
@@ -1761,50 +1372,9 @@ class localrepository(object):
"changegroupsubset."))
else:
cg = remote.changegroupsubset(fetch, heads, 'pull')
- clstart = len(self.changelog)
- result = self.addchangegroup(cg, 'pull', remote.url())
- clend = len(self.changelog)
- added = [self.changelog.node(r) for r in xrange(clstart, clend)]
-
- # compute target subset
- if heads is None:
- # We pulled every thing possible
- # sync on everything common
- subset = common + added
- else:
- # We pulled a specific subset
- # sync on this subset
- subset = heads
-
- # Get remote phases data from remote
- remotephases = remote.listkeys('phases')
- publishing = bool(remotephases.get('publishing', False))
- if remotephases and not publishing:
- # remote is new and unpublishing
- pheads, _dr = phases.analyzeremotephases(self, subset,
- remotephases)
- phases.advanceboundary(self, phases.public, pheads)
- phases.advanceboundary(self, phases.draft, subset)
- else:
- # Remote is old or publishing all common changesets
- # should be seen as public
- phases.advanceboundary(self, phases.public, subset)
-
- if obsolete._enabled:
- self.ui.debug('fetching remote obsolete markers')
- remoteobs = remote.listkeys('obsolete')
- if 'dump0' in remoteobs:
- if tr is None:
- tr = self.transaction(trname)
- for key in sorted(remoteobs, reverse=True):
- if key.startswith('dump'):
- data = base85.b85decode(remoteobs[key])
- self.obsstore.mergemarkers(tr, data)
- if tr is not None:
- tr.close()
+ result = self.addchangegroup(cg, 'pull', remote.url(),
+ lock=lock)
finally:
- if tr is not None:
- tr.release()
lock.release()
return result
@@ -1819,8 +1389,7 @@ class localrepository(object):
def push(self, remote, force=False, revs=None, newbranch=False):
'''Push outgoing changesets (limited by revs) from the current
repository to remote. Return an integer:
- - None means nothing to push
- - 0 means HTTP error
+ - 0 means HTTP error *or* nothing to push
- 1 means we pushed and remote head count is unchanged *or*
we have outgoing changesets but refused to push
- other values as described by addchangegroup()
@@ -1833,152 +1402,33 @@ class localrepository(object):
# unbundle assumes local user cannot lock remote repo (new ssh
# servers, http servers).
- if not remote.canpush():
- raise util.Abort(_("destination does not support push"))
- # get local lock as we might write phase data
- locallock = self.lock()
+ self.checkpush(force, revs)
+ lock = None
+ unbundle = remote.capable('unbundle')
+ if not unbundle:
+ lock = remote.lock()
try:
- self.checkpush(force, revs)
- lock = None
- unbundle = remote.capable('unbundle')
- if not unbundle:
- lock = remote.lock()
- try:
- # discovery
- fci = discovery.findcommonincoming
- commoninc = fci(self, remote, force=force)
- common, inc, remoteheads = commoninc
- fco = discovery.findcommonoutgoing
- outgoing = fco(self, remote, onlyheads=revs,
- commoninc=commoninc, force=force)
-
-
- if not outgoing.missing:
- # nothing to push
- scmutil.nochangesfound(self.ui, self, outgoing.excluded)
- ret = None
- else:
- # something to push
- if not force:
- # if self.obsstore == False --> no obsolete
- # then, save the iteration
- if self.obsstore:
- # this message are here for 80 char limit reason
- mso = _("push includes an obsolete changeset: %s!")
- msu = _("push includes an unstable changeset: %s!")
- # If we are to push if there is at least one
- # obsolete or unstable changeset in missing, at
- # least one of the missinghead will be obsolete or
- # unstable. So checking heads only is ok
- for node in outgoing.missingheads:
- ctx = self[node]
- if ctx.obsolete():
- raise util.Abort(_(mso) % ctx)
- elif ctx.unstable():
- raise util.Abort(_(msu) % ctx)
- discovery.checkheads(self, remote, outgoing,
- remoteheads, newbranch,
- bool(inc))
-
- # create a changegroup from local
- if revs is None and not outgoing.excluded:
- # push everything,
- # use the fast path, no race possible on push
- cg = self._changegroup(outgoing.missing, 'push')
- else:
- cg = self.getlocalbundle('push', outgoing)
-
- # apply changegroup to remote
- if unbundle:
- # local repo finds heads on server, finds out what
- # revs it must push. once revs transferred, if server
- # finds it has different heads (someone else won
- # commit/push race), server aborts.
- if force:
- remoteheads = ['force']
- # ssh: return remote's addchangegroup()
- # http: return remote's addchangegroup() or 0 for error
- ret = remote.unbundle(cg, remoteheads, 'push')
- else:
- # we return an integer indicating remote head count
- # change
- ret = remote.addchangegroup(cg, 'push', self.url())
-
- if ret:
- # push succeed, synchonize target of the push
- cheads = outgoing.missingheads
- elif revs is None:
- # All out push fails. synchronize all common
- cheads = outgoing.commonheads
- else:
- # I want cheads = heads(::missingheads and ::commonheads)
- # (missingheads is revs with secret changeset filtered out)
- #
- # This can be expressed as:
- # cheads = ( (missingheads and ::commonheads)
- # + (commonheads and ::missingheads))"
- # )
- #
- # while trying to push we already computed the following:
- # common = (::commonheads)
- # missing = ((commonheads::missingheads) - commonheads)
- #
- # We can pick:
- # * missingheads part of comon (::commonheads)
- common = set(outgoing.common)
- cheads = [node for node in revs if node in common]
- # and
- # * commonheads parents on missing
- revset = self.set('%ln and parents(roots(%ln))',
- outgoing.commonheads,
- outgoing.missing)
- cheads.extend(c.node() for c in revset)
- # even when we don't push, exchanging phase data is useful
- remotephases = remote.listkeys('phases')
- if not remotephases: # old server or public only repo
- phases.advanceboundary(self, phases.public, cheads)
- # don't push any phase data as there is nothing to push
+ cg, remote_heads = discovery.prepush(self, remote, force, revs,
+ newbranch)
+ ret = remote_heads
+ if cg is not None:
+ if unbundle:
+ # local repo finds heads on server, finds out what
+ # revs it must push. once revs transferred, if server
+ # finds it has different heads (someone else won
+ # commit/push race), server aborts.
+ if force:
+ remote_heads = ['force']
+ # ssh: return remote's addchangegroup()
+ # http: return remote's addchangegroup() or 0 for error
+ ret = remote.unbundle(cg, remote_heads, 'push')
else:
- ana = phases.analyzeremotephases(self, cheads, remotephases)
- pheads, droots = ana
- ### Apply remote phase on local
- if remotephases.get('publishing', False):
- phases.advanceboundary(self, phases.public, cheads)
- else: # publish = False
- phases.advanceboundary(self, phases.public, pheads)
- phases.advanceboundary(self, phases.draft, cheads)
- ### Apply local phase on remote
-
- # Get the list of all revs draft on remote by public here.
- # XXX Beware that revset break if droots is not strictly
- # XXX root we may want to ensure it is but it is costly
- outdated = self.set('heads((%ln::%ln) and public())',
- droots, cheads)
- for newremotehead in outdated:
- r = remote.pushkey('phases',
- newremotehead.hex(),
- str(phases.draft),
- str(phases.public))
- if not r:
- self.ui.warn(_('updating %s to public failed!\n')
- % newremotehead)
- self.ui.debug('try to push obsolete markers to remote\n')
- if (obsolete._enabled and self.obsstore and
- 'obsolete' in remote.listkeys('namespaces')):
- rslts = []
- remotedata = self.listkeys('obsolete')
- for key in sorted(remotedata, reverse=True):
- # reverse sort to ensure we end with dump0
- data = remotedata[key]
- rslts.append(remote.pushkey('obsolete', key, '', data))
- if [r for r in rslts if not r]:
- msg = _('failed to push some obsolete markers!\n')
- self.ui.warn(msg)
- finally:
- if lock is not None:
- lock.release()
+ # we return an integer indicating remote head count change
+ ret = remote.addchangegroup(cg, 'push', self.url(),
+ lock=lock)
finally:
- locallock.release()
+ if lock is not None:
+ lock.release()
self.ui.debug("checking for updated bookmarks\n")
rb = remote.listkeys('bookmarks')
@@ -2024,21 +1474,9 @@ class localrepository(object):
bases = [nullid]
csets, bases, heads = cl.nodesbetween(bases, heads)
# We assume that all ancestors of bases are known
- common = set(cl.ancestors([cl.rev(n) for n in bases]))
+ common = set(cl.ancestors(*[cl.rev(n) for n in bases]))
return self._changegroupsubset(common, csets, heads, source)
- def getlocalbundle(self, source, outgoing):
- """Like getbundle, but taking a discovery.outgoing as an argument.
-
- This is only implemented for local repos and reuses potentially
- precomputed sets in outgoing."""
- if not outgoing.missing:
- return None
- return self._changegroupsubset(outgoing.common,
- outgoing.missing,
- outgoing.missingheads,
- source)
-
def getbundle(self, source, heads=None, common=None):
"""Like changegroupsubset, but returns the set difference between the
ancestors of heads and the ancestors common.
@@ -2056,8 +1494,10 @@ class localrepository(object):
common = [nullid]
if not heads:
heads = cl.heads()
- return self.getlocalbundle(source,
- discovery.outgoing(cl, common, heads))
+ common, missing = cl.findcommonmissing(common, heads)
+ if not missing:
+ return None
+ return self._changegroupsubset(common, missing, heads, source)
def _changegroupsubset(self, commonrevs, csets, heads, source):
@@ -2067,7 +1507,7 @@ class localrepository(object):
fnodes = {} # needed file nodes
changedfiles = set()
fstate = ['', {}]
- count = [0, 0]
+ count = [0]
# can we go through the fast path ?
heads.sort()
@@ -2080,15 +1520,8 @@ class localrepository(object):
# filter any nodes that claim to be part of the known set
def prune(revlog, missing):
- rr, rl = revlog.rev, revlog.linkrev
return [n for n in missing
- if rl(rr(n)) not in commonrevs]
-
- progress = self.ui.progress
- _bundling = _('bundling')
- _changesets = _('changesets')
- _manifests = _('manifests')
- _files = _('files')
+ if revlog.linkrev(revlog.rev(n)) not in commonrevs]
def lookup(revlog, x):
if revlog == cl:
@@ -2096,22 +1529,23 @@ class localrepository(object):
changedfiles.update(c[3])
mfs.setdefault(c[0], x)
count[0] += 1
- progress(_bundling, count[0],
- unit=_changesets, total=count[1])
+ self.ui.progress(_('bundling'), count[0],
+ unit=_('changesets'), total=len(csets))
return x
elif revlog == mf:
clnode = mfs[x]
mdata = mf.readfast(x)
- for f, n in mdata.iteritems():
- if f in changedfiles:
- fnodes[f].setdefault(n, clnode)
+ for f in changedfiles:
+ if f in mdata:
+ fnodes.setdefault(f, {}).setdefault(mdata[f], clnode)
count[0] += 1
- progress(_bundling, count[0],
- unit=_manifests, total=count[1])
- return clnode
+ self.ui.progress(_('bundling'), count[0],
+ unit=_('manifests'), total=len(mfs))
+ return mfs[x]
else:
- progress(_bundling, count[0], item=fstate[0],
- unit=_files, total=count[1])
+ self.ui.progress(
+ _('bundling'), count[0], item=fstate[0],
+ unit=_('files'), total=len(changedfiles))
return fstate[1][x]
bundler = changegroup.bundle10(lookup)
@@ -2124,29 +1558,25 @@ class localrepository(object):
def gengroup():
# Create a changenode group generator that will call our functions
# back to lookup the owning changenode and collect information.
- count[:] = [0, len(csets)]
for chunk in cl.group(csets, bundler, reorder=reorder):
yield chunk
- progress(_bundling, None)
+ self.ui.progress(_('bundling'), None)
# Create a generator for the manifestnodes that calls our lookup
# and data collection functions back.
- for f in changedfiles:
- fnodes[f] = {}
- count[:] = [0, len(mfs)]
+ count[0] = 0
for chunk in mf.group(prune(mf, mfs), bundler, reorder=reorder):
yield chunk
- progress(_bundling, None)
+ self.ui.progress(_('bundling'), None)
mfs.clear()
# Go through all our files in order sorted by name.
- count[:] = [0, len(changedfiles)]
+ count[0] = 0
for fname in sorted(changedfiles):
filerevlog = self.file(fname)
if not len(filerevlog):
- raise util.Abort(_("empty or missing revlog for %s")
- % fname)
+ raise util.Abort(_("empty or missing revlog for %s") % fname)
fstate[0] = fname
fstate[1] = fnodes.pop(fname, {})
@@ -2159,7 +1589,7 @@ class localrepository(object):
# Signal that no more groups are left.
yield bundler.close()
- progress(_bundling, None)
+ self.ui.progress(_('bundling'), None)
if csets:
self.hook('outgoing', node=hex(csets[0]), source=source)
@@ -2185,7 +1615,7 @@ class localrepository(object):
mfs = {}
changedfiles = set()
fstate = ['']
- count = [0, 0]
+ count = [0]
self.hook('preoutgoing', throw=True, source=source)
self.changegroupinfo(nodes, source)
@@ -2193,14 +1623,7 @@ class localrepository(object):
revset = set([cl.rev(n) for n in nodes])
def gennodelst(log):
- ln, llr = log.node, log.linkrev
- return [ln(r) for r in log if llr(r) in revset]
-
- progress = self.ui.progress
- _bundling = _('bundling')
- _changesets = _('changesets')
- _manifests = _('manifests')
- _files = _('files')
+ return [log.node(r) for r in log if log.linkrev(r) in revset]
def lookup(revlog, x):
if revlog == cl:
@@ -2208,17 +1631,18 @@ class localrepository(object):
changedfiles.update(c[3])
mfs.setdefault(c[0], x)
count[0] += 1
- progress(_bundling, count[0],
- unit=_changesets, total=count[1])
+ self.ui.progress(_('bundling'), count[0],
+ unit=_('changesets'), total=len(nodes))
return x
elif revlog == mf:
count[0] += 1
- progress(_bundling, count[0],
- unit=_manifests, total=count[1])
+ self.ui.progress(_('bundling'), count[0],
+ unit=_('manifests'), total=len(mfs))
return cl.node(revlog.linkrev(revlog.rev(x)))
else:
- progress(_bundling, count[0], item=fstate[0],
- total=count[1], unit=_files)
+ self.ui.progress(
+ _('bundling'), count[0], item=fstate[0],
+ total=len(changedfiles), unit=_('files'))
return cl.node(revlog.linkrev(revlog.rev(x)))
bundler = changegroup.bundle10(lookup)
@@ -2232,22 +1656,20 @@ class localrepository(object):
'''yield a sequence of changegroup chunks (strings)'''
# construct a list of all changed files
- count[:] = [0, len(nodes)]
for chunk in cl.group(nodes, bundler, reorder=reorder):
yield chunk
- progress(_bundling, None)
+ self.ui.progress(_('bundling'), None)
- count[:] = [0, len(mfs)]
+ count[0] = 0
for chunk in mf.group(gennodelst(mf), bundler, reorder=reorder):
yield chunk
- progress(_bundling, None)
+ self.ui.progress(_('bundling'), None)
- count[:] = [0, len(changedfiles)]
+ count[0] = 0
for fname in sorted(changedfiles):
filerevlog = self.file(fname)
if not len(filerevlog):
- raise util.Abort(_("empty or missing revlog for %s")
- % fname)
+ raise util.Abort(_("empty or missing revlog for %s") % fname)
fstate[0] = fname
nodelist = gennodelst(filerevlog)
if nodelist:
@@ -2256,17 +1678,19 @@ class localrepository(object):
for chunk in filerevlog.group(nodelist, bundler, reorder):
yield chunk
yield bundler.close()
- progress(_bundling, None)
+ self.ui.progress(_('bundling'), None)
if nodes:
self.hook('outgoing', node=hex(nodes[0]), source=source)
return changegroup.unbundle10(util.chunkbuffer(gengroup()), 'UN')
- def addchangegroup(self, source, srctype, url, emptyok=False):
+ def addchangegroup(self, source, srctype, url, emptyok=False, lock=None):
"""Add the changegroup returned by source.read() to this repo.
srctype is a string like 'push', 'pull', or 'unbundle'. url is
the URL of the repo where this changegroup is coming from.
+ If lock is not None, the function takes ownership of the lock
+ and releases it after the changegroup is added.
Return an integer summarizing the change to this repo:
- nothing changed or no source: 0
@@ -2314,8 +1738,8 @@ class localrepository(object):
source.callback = pr
source.changelogheader()
- srccontent = cl.addgroup(source, csmap, trp)
- if not (srccontent or emptyok):
+ if (cl.addgroup(source, csmap, trp) is None
+ and not emptyok):
raise util.Abort(_("received changelog group is empty"))
clend = len(cl)
changesets = clend - clstart
@@ -2363,7 +1787,7 @@ class localrepository(object):
pr()
fl = self.file(f)
o = len(fl)
- if not fl.addgroup(source, revmap, trp):
+ if fl.addgroup(source, revmap, trp) is None:
raise util.Abort(_("received file revlog group is empty"))
revisions += len(fl) - o
files += 1
@@ -2392,7 +1816,7 @@ class localrepository(object):
heads = cl.heads()
dh = len(heads) - len(oldheads)
for h in heads:
- if h not in oldheads and self[h].closesbranch():
+ if h not in oldheads and 'close' in self[h].extra():
dh -= 1
htext = ""
if dh:
@@ -2408,46 +1832,26 @@ class localrepository(object):
node=hex(cl.node(clstart)), source=srctype,
url=url, pending=p)
- added = [cl.node(r) for r in xrange(clstart, clend)]
- publishing = self.ui.configbool('phases', 'publish', True)
- if srctype == 'push':
- # Old server can not push the boundary themself.
- # New server won't push the boundary if changeset already
- # existed locally as secrete
- #
- # We should not use added here but the list of all change in
- # the bundle
- if publishing:
- phases.advanceboundary(self, phases.public, srccontent)
- else:
- phases.advanceboundary(self, phases.draft, srccontent)
- phases.retractboundary(self, phases.draft, added)
- elif srctype != 'strip':
- # publishing only alter behavior during push
- #
- # strip should not touch boundary at all
- phases.retractboundary(self, phases.draft, added)
-
# make changelog see real files again
cl.finalize(trp)
tr.close()
-
- if changesets > 0:
- def runhooks():
- # forcefully update the on-disk branch cache
- self.ui.debug("updating the branch cache\n")
- self.updatebranchcache()
- self.hook("changegroup", node=hex(cl.node(clstart)),
- source=srctype, url=url)
-
- for n in added:
- self.hook("incoming", node=hex(n), source=srctype,
- url=url)
- self._afterlock(runhooks)
-
finally:
tr.release()
+ if lock:
+ lock.release()
+
+ if changesets > 0:
+ # forcefully update the on-disk branch cache
+ self.ui.debug("updating the branch cache\n")
+ self.updatebranchcache()
+ self.hook("changegroup", node=hex(cl.node(clstart)),
+ source=srctype, url=url)
+
+ for i in xrange(clstart, clend):
+ self.hook("incoming", node=hex(cl.node(i)),
+ source=srctype, url=url)
+
# never return 0 here:
if dh < 0:
return dh - 1
@@ -2463,7 +1867,7 @@ class localrepository(object):
resp = int(l)
except ValueError:
raise error.ResponseError(
- _('unexpected response from remote server:'), l)
+ _('Unexpected response from remote server:'), l)
if resp == 1:
raise util.Abort(_('operation forbidden by server'))
elif resp == 2:
@@ -2476,11 +1880,9 @@ class localrepository(object):
total_files, total_bytes = map(int, l.split(' ', 1))
except (ValueError, TypeError):
raise error.ResponseError(
- _('unexpected response from remote server:'), l)
+ _('Unexpected response from remote server:'), l)
self.ui.status(_('%d files to transfer, %s of data\n') %
(total_files, util.bytecount(total_bytes)))
- handled_bytes = 0
- self.ui.progress(_('clone'), 0, total=total_bytes)
start = time.time()
for i in xrange(total_files):
# XXX doesn't support '\n' or '\r' in filenames
@@ -2490,28 +1892,21 @@ class localrepository(object):
size = int(size)
except (ValueError, TypeError):
raise error.ResponseError(
- _('unexpected response from remote server:'), l)
- if self.ui.debugflag:
- self.ui.debug('adding %s (%s)\n' %
- (name, util.bytecount(size)))
+ _('Unexpected response from remote server:'), l)
+ self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size)))
# for backwards compat, name was partially encoded
ofp = self.sopener(store.decodedir(name), 'w')
for chunk in util.filechunkiter(fp, limit=size):
- handled_bytes += len(chunk)
- self.ui.progress(_('clone'), handled_bytes,
- total=total_bytes)
ofp.write(chunk)
ofp.close()
elapsed = time.time() - start
if elapsed <= 0:
elapsed = 0.001
- self.ui.progress(_('clone'), None)
self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') %
(util.bytecount(total_bytes), elapsed,
util.bytecount(total_bytes / elapsed)))
- # new requirements = old non-format requirements +
- # new format-related
+ # new requirements = old non-format requirements + new format-related
# requirements from the streamed-in repository
requirements.update(set(self.requirements) - self.supportedformats)
self._applyrequirements(requirements)
@@ -2537,10 +1932,6 @@ class localrepository(object):
# and format flags on "stream" capability, and use
# uncompressed only if compatible.
- if not stream:
- # if the server explicitely prefer to stream (for fast LANs)
- stream = remote.capable('stream-preferred')
-
if stream and not heads:
# 'stream' means remote revlog format is revlogv1 only
if remote.capable('stream'):
@@ -2557,7 +1948,6 @@ class localrepository(object):
def pushkey(self, namespace, key, old, new):
self.hook('prepushkey', throw=True, namespace=namespace, key=key,
old=old, new=new)
- self.ui.debug('pushing key for "%s:%s"\n' % (namespace, key))
ret = pushkey.push(self, namespace, key, old, new)
self.hook('pushkey', namespace=namespace, key=key, old=old, new=new,
ret=ret)
@@ -2565,7 +1955,6 @@ class localrepository(object):
def listkeys(self, namespace):
self.hook('prelistkeys', throw=True, namespace=namespace)
- self.ui.debug('listing keys for "%s"\n' % namespace)
values = pushkey.list(self, namespace)
self.hook('listkeys', namespace=namespace, values=values)
return values
@@ -2587,10 +1976,7 @@ def aftertrans(files):
renamefiles = [tuple(t) for t in files]
def a():
for src, dest in renamefiles:
- try:
- util.rename(src, dest)
- except OSError: # journal file does not yet exist
- pass
+ util.rename(src, dest)
return a
def undoname(fn):
diff --git a/mercurial/lock.py b/mercurial/lock.py
index cc2c533..bf33f02 100644
--- a/mercurial/lock.py
+++ b/mercurial/lock.py
@@ -35,7 +35,6 @@ class lock(object):
self.timeout = timeout
self.releasefn = releasefn
self.desc = desc
- self.postrelease = []
self.lock()
def __del__(self):
@@ -120,10 +119,6 @@ class lock(object):
return locker
def release(self):
- """release the lock and execute callback function if any
-
- If the lock have been aquired multiple time, the actual release is
- delayed to the last relase call."""
if self.held > 1:
self.held -= 1
elif self.held == 1:
@@ -134,10 +129,9 @@ class lock(object):
util.unlink(self.f)
except OSError:
pass
- for callback in self.postrelease:
- callback()
def release(*locks):
for lock in locks:
if lock is not None:
lock.release()
+
diff --git a/mercurial/lsprof.py b/mercurial/lsprof.py
index 4970f06..e9b185b 100644
--- a/mercurial/lsprof.py
+++ b/mercurial/lsprof.py
@@ -38,8 +38,8 @@ class Stats(object):
d = d[:top]
cols = "% 12s %12s %11.4f %11.4f %s\n"
hcols = "% 12s %12s %12s %12s %s\n"
- file.write(hcols % ("CallCount", "Recursive", "Total(s)",
- "Inline(s)", "module:lineno(function)"))
+ file.write(hcols % ("CallCount", "Recursive", "Total(ms)",
+ "Inline(ms)", "module:lineno(function)"))
count = 0
for e in d:
file.write(cols % (e.callcount, e.reccallcount, e.totaltime,
@@ -48,7 +48,7 @@ class Stats(object):
if limit is not None and count == limit:
return
ccount = 0
- if climit and e.calls:
+ if e.calls:
for se in e.calls:
file.write(cols % ("+%s" % se.callcount, se.reccallcount,
se.totaltime, se.inlinetime,
@@ -86,7 +86,9 @@ def label(code):
for k, v in list(sys.modules.iteritems()):
if v is None:
continue
- if not isinstance(getattr(v, '__file__', None), str):
+ if not hasattr(v, '__file__'):
+ continue
+ if not isinstance(v.__file__, str):
continue
if v.__file__.startswith(code.co_filename):
mname = _fn2mod[code.co_filename] = k
diff --git a/mercurial/mail.py b/mercurial/mail.py
index 1154a4a..6f11ead 100644
--- a/mercurial/mail.py
+++ b/mercurial/mail.py
@@ -7,7 +7,7 @@
from i18n import _
import util, encoding
-import os, smtplib, socket, quopri, time
+import os, smtplib, socket, quopri
import email.Header, email.MIMEText, email.Utils
_oldheaderinit = email.Header.Header.__init__
@@ -37,7 +37,7 @@ def _smtp(ui):
# backward compatible: when tls = true, we use starttls.
starttls = tls == 'starttls' or util.parsebool(tls)
smtps = tls == 'smtps'
- if (starttls or smtps) and not util.safehasattr(socket, 'ssl'):
+ if (starttls or smtps) and not hasattr(socket, 'ssl'):
raise util.Abort(_("can't use TLS: Python SSL support not installed"))
if smtps:
ui.note(_('(using smtps)\n'))
@@ -93,29 +93,15 @@ def _sendmail(ui, sender, recipients, msg):
os.path.basename(program.split(None, 1)[0]),
util.explainexit(ret)[0]))
-def _mbox(mbox, sender, recipients, msg):
- '''write mails to mbox'''
- fp = open(mbox, 'ab+')
- # Should be time.asctime(), but Windows prints 2-characters day
- # of month instead of one. Make them print the same thing.
- date = time.strftime('%a %b %d %H:%M:%S %Y', time.localtime())
- fp.write('From %s %s\n' % (sender, date))
- fp.write(msg)
- fp.write('\n\n')
- fp.close()
-
-def connect(ui, mbox=None):
+def connect(ui):
'''make a mail connection. return a function to send mail.
call as sendmail(sender, list-of-recipients, msg).'''
- if mbox:
- open(mbox, 'wb').close()
- return lambda s, r, m: _mbox(mbox, s, r, m)
if ui.config('email', 'method', 'smtp') == 'smtp':
return _smtp(ui)
return lambda s, r, m: _sendmail(ui, s, r, m)
-def sendmail(ui, sender, recipients, msg, mbox=None):
- send = connect(ui, mbox=mbox)
+def sendmail(ui, sender, recipients, msg):
+ send = connect(ui)
return send(sender, recipients, msg)
def validateconfig(ui):
@@ -131,9 +117,14 @@ def validateconfig(ui):
'but not in PATH') % method)
def mimetextpatch(s, subtype='plain', display=False):
- '''Return MIME message suitable for a patch.
- Charset will be detected as utf-8 or (possibly fake) us-ascii.
- Transfer encodings will be used if necessary.'''
+ '''If patch in utf-8 transfer-encode it.'''
+
+ enc = None
+ for line in s.splitlines():
+ if len(line) > 950:
+ s = quopri.encodestring(s)
+ enc = "quoted-printable"
+ break
cs = 'us-ascii'
if not display:
@@ -147,20 +138,7 @@ def mimetextpatch(s, subtype='plain', display=False):
# We'll go with us-ascii as a fallback.
pass
- return mimetextqp(s, subtype, cs)
-
-def mimetextqp(body, subtype, charset):
- '''Return MIME message.
- Qouted-printable transfer encoding will be used if necessary.
- '''
- enc = None
- for line in body.splitlines():
- if len(line) > 950:
- body = quopri.encodestring(body)
- enc = "quoted-printable"
- break
-
- msg = email.MIMEText.MIMEText(body, subtype, charset)
+ msg = email.MIMEText.MIMEText(s, subtype, cs)
if enc:
del msg['Content-Transfer-Encoding']
msg['Content-Transfer-Encoding'] = enc
@@ -252,4 +230,4 @@ def mimeencode(ui, s, charsets=None, display=False):
cs = 'us-ascii'
if not display:
s, cs = _encode(ui, s, charsets)
- return mimetextqp(s, 'plain', cs)
+ return email.MIMEText.MIMEText(s, 'plain', cs)
diff --git a/mercurial/manifest.py b/mercurial/manifest.py
index 07f0b6d..7c3781c 100644
--- a/mercurial/manifest.py
+++ b/mercurial/manifest.py
@@ -6,7 +6,7 @@
# GNU General Public License version 2 or any later version.
from i18n import _
-import mdiff, parsers, error, revlog, util
+import mdiff, parsers, error, revlog
import array, struct
class manifestdict(dict):
@@ -19,8 +19,6 @@ class manifestdict(dict):
self._flags = flags
def flags(self, f):
return self._flags.get(f, "")
- def withflags(self):
- return set(self._flags.keys())
def set(self, f, flags):
self._flags[f] = flags
def copy(self):
@@ -126,8 +124,8 @@ class manifest(revlog.revlog):
addlist[start:end] = array.array('c', content)
else:
del addlist[start:end]
- return "".join(struct.pack(">lll", start, end, len(content))
- + content for start, end, content in x)
+ return "".join(struct.pack(">lll", start, end, len(content)) + content
+ for start, end, content in x)
def checkforbidden(l):
for f in l:
@@ -166,7 +164,7 @@ class manifest(revlog.revlog):
dline = [""]
start = 0
# zero copy representation of addlist as a buffer
- addbuf = util.buffer(addlist)
+ addbuf = buffer(addlist)
# start with a readonly loop that finds the offset of
# each line and creates the deltas
@@ -198,7 +196,7 @@ class manifest(revlog.revlog):
# apply the delta to the addlist, and get a delta for addrevision
cachedelta = (self.rev(p1), addlistdelta(addlist, delta))
arraytext = addlist
- text = util.buffer(arraytext)
+ text = buffer(arraytext)
n = self.addrevision(text, transaction, link, p1, p2, cachedelta)
self._mancache = (n, map, arraytext)
diff --git a/mercurial/match.py b/mercurial/match.py
index 927cd59..e01b835 100644
--- a/mercurial/match.py
+++ b/mercurial/match.py
@@ -9,14 +9,6 @@ import re
import scmutil, util, fileset
from i18n import _
-def _rematcher(pat):
- m = util.compilere(pat)
- try:
- # slightly faster, provided by facebook's re2 bindings
- return m.test_match
- except AttributeError:
- return m.match
-
def _expandsets(pats, ctx):
'''convert set: patterns into a list of files in the given context'''
fset = set()
@@ -57,6 +49,7 @@ class match(object):
'<something>' - a pattern of the specified default type
"""
+ self._ctx = None
self._root = root
self._cwd = cwd
self._files = []
@@ -70,10 +63,7 @@ class match(object):
pats = _normalize(exclude, 'glob', root, cwd, auditor)
self.excludepat, em = _buildmatch(ctx, pats, '(?:/|$)')
if exact:
- if isinstance(patterns, list):
- self._files = patterns
- else:
- self._files = list(patterns)
+ self._files = patterns
pm = self.exact
elif patterns:
pats = _normalize(patterns, default, root, cwd, auditor)
@@ -129,8 +119,6 @@ class match(object):
return self._files
def anypats(self):
return self._anypats
- def always(self):
- return False
class exact(match):
def __init__(self, root, cwd, files):
@@ -139,8 +127,6 @@ class exact(match):
class always(match):
def __init__(self, root, cwd):
match.__init__(self, root, cwd, [])
- def always(self):
- return True
class narrowmatcher(match):
"""Adapt a matcher to work on a subdirectory only.
@@ -287,8 +273,8 @@ def _buildregexmatch(pats, tail):
try:
pat = '(?:%s)' % '|'.join([_regex(k, p, tail) for (k, p) in pats])
if len(pat) > 20000:
- raise OverflowError
- return pat, _rematcher(pat)
+ raise OverflowError()
+ return pat, re.compile(pat).match
except OverflowError:
# We're using a Python with a tiny regex engine and we
# made it explode, so we'll divide the pattern list in two
@@ -302,7 +288,7 @@ def _buildregexmatch(pats, tail):
except re.error:
for k, p in pats:
try:
- _rematcher('(?:%s)' % _regex(k, p, tail))
+ re.compile('(?:%s)' % _regex(k, p, tail))
except re.error:
raise util.Abort(_("invalid pattern (%s): %s") % (k, p))
raise util.Abort(_("invalid pattern"))
@@ -348,5 +334,5 @@ def _roots(patterns):
def _anypats(patterns):
for kind, name in patterns:
- if kind in ('glob', 're', 'relglob', 'relre', 'set'):
+ if kind in ('glob', 're', 'relglob', 'relre'):
return True
diff --git a/mercurial/mdiff.py b/mercurial/mdiff.py
index f93a8a6..4d1e760 100644
--- a/mercurial/mdiff.py
+++ b/mercurial/mdiff.py
@@ -67,81 +67,14 @@ defaultopts = diffopts()
def wsclean(opts, text, blank=True):
if opts.ignorews:
- text = bdiff.fixws(text, 1)
+ text = re.sub('[ \t\r]+', '', text)
elif opts.ignorewsamount:
- text = bdiff.fixws(text, 0)
+ text = re.sub('[ \t\r]+', ' ', text)
+ text = text.replace(' \n', '\n')
if blank and opts.ignoreblanklines:
- text = re.sub('\n+', '\n', text).strip('\n')
+ text = re.sub('\n+', '', text)
return text
-def splitblock(base1, lines1, base2, lines2, opts):
- # The input lines matches except for interwoven blank lines. We
- # transform it into a sequence of matching blocks and blank blocks.
- lines1 = [(wsclean(opts, l) and 1 or 0) for l in lines1]
- lines2 = [(wsclean(opts, l) and 1 or 0) for l in lines2]
- s1, e1 = 0, len(lines1)
- s2, e2 = 0, len(lines2)
- while s1 < e1 or s2 < e2:
- i1, i2, btype = s1, s2, '='
- if (i1 >= e1 or lines1[i1] == 0
- or i2 >= e2 or lines2[i2] == 0):
- # Consume the block of blank lines
- btype = '~'
- while i1 < e1 and lines1[i1] == 0:
- i1 += 1
- while i2 < e2 and lines2[i2] == 0:
- i2 += 1
- else:
- # Consume the matching lines
- while i1 < e1 and lines1[i1] == 1 and lines2[i2] == 1:
- i1 += 1
- i2 += 1
- yield [base1 + s1, base1 + i1, base2 + s2, base2 + i2], btype
- s1 = i1
- s2 = i2
-
-def allblocks(text1, text2, opts=None, lines1=None, lines2=None, refine=False):
- """Return (block, type) tuples, where block is an mdiff.blocks
- line entry. type is '=' for blocks matching exactly one another
- (bdiff blocks), '!' for non-matching blocks and '~' for blocks
- matching only after having filtered blank lines. If refine is True,
- then '~' blocks are refined and are only made of blank lines.
- line1 and line2 are text1 and text2 split with splitnewlines() if
- they are already available.
- """
- if opts is None:
- opts = defaultopts
- if opts.ignorews or opts.ignorewsamount:
- text1 = wsclean(opts, text1, False)
- text2 = wsclean(opts, text2, False)
- diff = bdiff.blocks(text1, text2)
- for i, s1 in enumerate(diff):
- # The first match is special.
- # we've either found a match starting at line 0 or a match later
- # in the file. If it starts later, old and new below will both be
- # empty and we'll continue to the next match.
- if i > 0:
- s = diff[i - 1]
- else:
- s = [0, 0, 0, 0]
- s = [s[1], s1[0], s[3], s1[2]]
-
- # bdiff sometimes gives huge matches past eof, this check eats them,
- # and deals with the special first match case described above
- if s[0] != s[1] or s[2] != s[3]:
- type = '!'
- if opts.ignoreblanklines:
- if lines1 is None:
- lines1 = splitnewlines(text1)
- if lines2 is None:
- lines2 = splitnewlines(text2)
- old = wsclean(opts, "".join(lines1[s[0]:s[1]]))
- new = wsclean(opts, "".join(lines2[s[2]:s[3]]))
- if old == new:
- type = '~'
- yield s, type
- yield s1, '='
-
def diffline(revs, a, b, opts):
parts = ['diff']
if opts.git:
@@ -156,10 +89,10 @@ def diffline(revs, a, b, opts):
return ' '.join(parts) + '\n'
def unidiff(a, ad, b, bd, fn1, fn2, r=None, opts=defaultopts):
- def datetag(date, fn=None):
+ def datetag(date, addtab=True):
if not opts.git and not opts.nodates:
return '\t%s\n' % date
- if fn and ' ' in fn:
+ if addtab and ' ' in fn1:
return '\t\n'
return '\n'
@@ -167,9 +100,6 @@ def unidiff(a, ad, b, bd, fn1, fn2, r=None, opts=defaultopts):
return ""
epoch = util.datestr((0, 0))
- fn1 = util.pconvert(fn1)
- fn2 = util.pconvert(fn2)
-
if not opts.text and (util.binary(a) or util.binary(b)):
if a and b and len(a) == len(b) and a == b:
return ""
@@ -177,19 +107,19 @@ def unidiff(a, ad, b, bd, fn1, fn2, r=None, opts=defaultopts):
elif not a:
b = splitnewlines(b)
if a is None:
- l1 = '--- /dev/null%s' % datetag(epoch)
+ l1 = '--- /dev/null%s' % datetag(epoch, False)
else:
- l1 = "--- %s%s" % ("a/" + fn1, datetag(ad, fn1))
- l2 = "+++ %s%s" % ("b/" + fn2, datetag(bd, fn2))
+ l1 = "--- %s%s" % ("a/" + fn1, datetag(ad))
+ l2 = "+++ %s%s" % ("b/" + fn2, datetag(bd))
l3 = "@@ -0,0 +1,%d @@\n" % len(b)
l = [l1, l2, l3] + ["+" + e for e in b]
elif not b:
a = splitnewlines(a)
- l1 = "--- %s%s" % ("a/" + fn1, datetag(ad, fn1))
+ l1 = "--- %s%s" % ("a/" + fn1, datetag(ad))
if b is None:
- l2 = '+++ /dev/null%s' % datetag(epoch)
+ l2 = '+++ /dev/null%s' % datetag(epoch, False)
else:
- l2 = "+++ %s%s" % ("b/" + fn2, datetag(bd, fn2))
+ l2 = "+++ %s%s" % ("b/" + fn2, datetag(bd))
l3 = "@@ -1,%d +0,0 @@\n" % len(a)
l = [l1, l2, l3] + ["-" + e for e in a]
else:
@@ -199,8 +129,8 @@ def unidiff(a, ad, b, bd, fn1, fn2, r=None, opts=defaultopts):
if not l:
return ""
- l.insert(0, "--- a/%s%s" % (fn1, datetag(ad, fn1)))
- l.insert(1, "+++ b/%s%s" % (fn2, datetag(bd, fn2)))
+ l.insert(0, "--- a/%s%s" % (fn1, datetag(ad)))
+ l.insert(1, "+++ b/%s%s" % (fn2, datetag(bd)))
for ln in xrange(len(l)):
if l[ln][-1] != '\n':
@@ -227,7 +157,6 @@ def _unidiff(t1, t2, l1, l2, opts=defaultopts):
return 0
return ret
- lastfunc = [0, '']
def yieldhunk(hunk):
(astart, a2, bstart, b2, delta) = hunk
aend = contextend(a2, len(l1))
@@ -236,55 +165,61 @@ def _unidiff(t1, t2, l1, l2, opts=defaultopts):
func = ""
if opts.showfunc:
- lastpos, func = lastfunc
- # walk backwards from the start of the context up to the start of
- # the previous hunk context until we find a line starting with an
- # alphanumeric char.
- for i in xrange(astart - 1, lastpos - 1, -1):
- if l1[i][0].isalnum():
- func = ' ' + l1[i].rstrip()[:40]
- lastfunc[1] = func
+ # walk backwards from the start of the context
+ # to find a line starting with an alphanumeric char.
+ for x in xrange(astart - 1, -1, -1):
+ t = l1[x].rstrip()
+ if funcre.match(t):
+ func = ' ' + t[:40]
break
- # by recording this hunk's starting point as the next place to
- # start looking for function lines, we avoid reading any line in
- # the file more than once.
- lastfunc[0] = astart
-
- # zero-length hunk ranges report their start line as one less
- if alen:
- astart += 1
- if blen:
- bstart += 1
-
- yield "@@ -%d,%d +%d,%d @@%s\n" % (astart, alen,
- bstart, blen, func)
+
+ yield "@@ -%d,%d +%d,%d @@%s\n" % (astart + 1, alen,
+ bstart + 1, blen, func)
for x in delta:
yield x
for x in xrange(a2, aend):
yield ' ' + l1[x]
+ if opts.showfunc:
+ funcre = re.compile('\w')
+
# bdiff.blocks gives us the matching sequences in the files. The loop
# below finds the spaces between those matching sequences and translates
# them into diff output.
#
+ if opts.ignorews or opts.ignorewsamount:
+ t1 = wsclean(opts, t1, False)
+ t2 = wsclean(opts, t2, False)
+
+ diff = bdiff.blocks(t1, t2)
hunk = None
- ignoredlines = 0
- for s, stype in allblocks(t1, t2, opts, l1, l2):
- a1, a2, b1, b2 = s
- if stype != '!':
- if stype == '~':
- # The diff context lines are based on t1 content. When
- # blank lines are ignored, the new lines offsets must
- # be adjusted as if equivalent blocks ('~') had the
- # same sizes on both sides.
- ignoredlines += (b2 - b1) - (a2 - a1)
- continue
+ for i, s1 in enumerate(diff):
+ # The first match is special.
+ # we've either found a match starting at line 0 or a match later
+ # in the file. If it starts later, old and new below will both be
+ # empty and we'll continue to the next match.
+ if i > 0:
+ s = diff[i - 1]
+ else:
+ s = [0, 0, 0, 0]
delta = []
+ a1 = s[1]
+ a2 = s1[0]
+ b1 = s[3]
+ b2 = s1[2]
+
old = l1[a1:a2]
new = l2[b1:b2]
- b1 -= ignoredlines
- b2 -= ignoredlines
+ # bdiff sometimes gives huge matches past eof, this check eats them,
+ # and deals with the special first match case described above
+ if not old and not new:
+ continue
+
+ if opts.ignoreblanklines:
+ if wsclean(opts, "".join(old)) == wsclean(opts, "".join(new)):
+ continue
+
astart = contextstart(a1)
bstart = contextstart(b1)
prev = None
@@ -327,7 +262,7 @@ def patchtext(bin):
def patch(a, bin):
if len(a) == 0:
# skip over trivial delta header
- return util.buffer(bin, 12)
+ return buffer(bin, 12)
return mpatch.patches(a, [bin])
# similar to difflib.SequenceMatcher.get_matching_blocks
diff --git a/mercurial/merge.py b/mercurial/merge.py
index 19ffb28..240081f 100644
--- a/mercurial/merge.py
+++ b/mercurial/merge.py
@@ -7,7 +7,7 @@
from node import nullid, nullrev, hex, bin
from i18n import _
-import error, scmutil, util, filemerge, copies, subrepo
+import scmutil, util, filemerge, copies, subrepo, encoding
import errno, os, shutil
class mergestate(object):
@@ -81,50 +81,23 @@ class mergestate(object):
self.mark(dfile, 'r')
return r
-def _checkunknownfile(repo, wctx, mctx, f):
- return (not repo.dirstate._ignore(f)
- and os.path.isfile(repo.wjoin(f))
- and repo.dirstate.normalize(f) not in repo.dirstate
- and mctx[f].cmp(wctx[f]))
-
-def _checkunknown(repo, wctx, mctx):
+def _checkunknown(wctx, mctx):
"check for collisions between unknown files and files in mctx"
+ for f in wctx.unknown():
+ if f in mctx and mctx[f].cmp(wctx[f]):
+ raise util.Abort(_("untracked file in working directory differs"
+ " from file in requested revision: '%s'") % f)
- error = False
- for f in mctx:
- if f not in wctx and _checkunknownfile(repo, wctx, mctx, f):
- error = True
- wctx._repo.ui.warn(_("%s: untracked file differs\n") % f)
- if error:
- raise util.Abort(_("untracked files in working directory differ "
- "from files in requested revision"))
-
-def _checkcollision(mctx, wctx):
+def _checkcollision(mctx):
"check for case folding collisions in the destination context"
folded = {}
for fn in mctx:
- fold = util.normcase(fn)
+ fold = encoding.lower(fn)
if fold in folded:
raise util.Abort(_("case-folding collision between %s and %s")
% (fn, folded[fold]))
folded[fold] = fn
- if wctx:
- # class to delay looking up copy mapping
- class pathcopies(object):
- @util.propertycache
- def map(self):
- # {dst@mctx: src@wctx} copy mapping
- return copies.pathcopies(wctx, mctx)
- pc = pathcopies()
-
- for fn in wctx:
- fold = util.normcase(fn)
- mfn = folded.get(fold, None)
- if mfn and mfn != fn and pc.map.get(mfn) != fn:
- raise util.Abort(_("case-folding collision between %s and %s")
- % (mfn, fn))
-
def _forgetremoved(wctx, mctx, branchmerge):
"""
Forget removed files
@@ -179,11 +152,6 @@ def manifestmerge(repo, p1, p2, pa, overwrite, partial):
if m and m != a: # changed from a to m
return m
if n and n != a: # changed from a to n
- if (n == 'l' or a == 'l') and m1.get(f) != ma.get(f):
- # can't automatically merge symlink flag when there
- # are file-level conflicts here, let filemerge take
- # care of it
- return m
return n
return '' # flag was cleared
@@ -198,16 +166,14 @@ def manifestmerge(repo, p1, p2, pa, overwrite, partial):
elif pa == p2: # backwards
pa = p1.p1()
elif pa and repo.ui.configbool("merge", "followcopies", True):
- copy, diverge, renamedelete = copies.mergecopies(repo, p1, p2, pa)
+ dirs = repo.ui.configbool("merge", "followdirs", True)
+ copy, diverge = copies.copies(repo, p1, p2, pa, dirs)
for of, fl in diverge.iteritems():
act("divergent renames", "dr", of, fl)
- for of, fl in renamedelete.iteritems():
- act("rename and delete", "rd", of, fl)
repo.ui.note(_("resolving manifests\n"))
- repo.ui.debug(" overwrite: %s, partial: %s\n"
- % (bool(overwrite), bool(partial)))
- repo.ui.debug(" ancestor: %s, local: %s, remote: %s\n" % (pa, p1, p2))
+ repo.ui.debug(" overwrite %s partial %s\n" % (overwrite, bool(partial)))
+ repo.ui.debug(" ancestor %s local %s remote %s\n" % (pa, p1, p2))
m1, m2, ma = p1.manifest(), p2.manifest(), pa.manifest()
copied = set(copy.values())
@@ -256,7 +222,7 @@ def manifestmerge(repo, p1, p2, pa, overwrite, partial):
act("prompt keep", "a", f)
elif n[20:] == "a": # added, no remote
act("remote deleted", "f", f)
- else:
+ elif n[20:] != "u":
act("other deleted", "r", f)
for f, n in m2.iteritems():
@@ -276,13 +242,7 @@ def manifestmerge(repo, p1, p2, pa, overwrite, partial):
act("remote moved to " + f, "m",
f2, f, f, fmerge(f2, f, f2), True)
elif f not in ma:
- if (not overwrite
- and _checkunknownfile(repo, p1, p2, f)):
- rflags = fmerge(f, f, f)
- act("remote differs from untracked local",
- "m", f, f, f, rflags, False)
- else:
- act("remote created", "g", f, m2.flags(f))
+ act("remote created", "g", f, m2.flags(f))
elif n != ma[f]:
if repo.ui.promptchoice(
_("remote changed %s which local deleted\n"
@@ -313,6 +273,7 @@ def applyupdates(repo, action, wctx, mctx, actx, overwrite):
action.sort(key=actionkey)
# prescan for merges
+ u = repo.ui
for a in action:
f, m = a[:2]
if m == 'm': # merge
@@ -347,8 +308,8 @@ def applyupdates(repo, action, wctx, mctx, actx, overwrite):
numupdates = len(action)
for i, a in enumerate(action):
f, m = a[:2]
- repo.ui.progress(_('updating'), i + 1, item=f, total=numupdates,
- unit=_('files'))
+ u.progress(_('updating'), i + 1, item=f, total=numupdates,
+ unit=_('files'))
if f and f[0] == "/":
continue
if m == "r": # remove
@@ -365,8 +326,7 @@ def applyupdates(repo, action, wctx, mctx, actx, overwrite):
removed += 1
elif m == "m": # merge
if f == '.hgsubstate': # subrepo states need updating
- subrepo.submerge(repo, wctx, mctx, wctx.ancestor(mctx),
- overwrite)
+ subrepo.submerge(repo, wctx, mctx, wctx.ancestor(mctx), overwrite)
continue
f2, fd, flags, move = a[2:]
repo.wopener.audit(fd)
@@ -378,6 +338,7 @@ def applyupdates(repo, action, wctx, mctx, actx, overwrite):
updated += 1
else:
merged += 1
+ util.setflags(repo.wjoin(fd), 'l' in flags, 'x' in flags)
if (move and repo.dirstate.normalize(fd) != f
and os.path.lexists(repo.wjoin(f))):
repo.ui.debug("removing %s\n" % f)
@@ -411,18 +372,12 @@ def applyupdates(repo, action, wctx, mctx, actx, overwrite):
"multiple times to:\n") % f)
for nf in fl:
repo.ui.warn(" %s\n" % nf)
- elif m == "rd": # rename and delete
- fl = a[2]
- repo.ui.warn(_("note: possible conflict - %s was deleted "
- "and renamed to:\n") % f)
- for nf in fl:
- repo.ui.warn(" %s\n" % nf)
elif m == "e": # exec
flags = a[2]
repo.wopener.audit(f)
util.setflags(repo.wjoin(f), 'l' in flags, 'x' in flags)
ms.commit()
- repo.ui.progress(_('updating'), None, total=numupdates, unit=_('files'))
+ u.progress(_('updating'), None, total=numupdates, unit=_('files'))
return updated, merged, removed, unresolved
@@ -488,8 +443,7 @@ def recordupdates(repo, action, branchmerge):
if f:
repo.dirstate.drop(f)
-def update(repo, node, branchmerge, force, partial, ancestor=None,
- mergeancestor=False):
+def update(repo, node, branchmerge, force, partial, ancestor=None):
"""
Perform a merge between the working directory and the given node
@@ -497,10 +451,6 @@ def update(repo, node, branchmerge, force, partial, ancestor=None,
branchmerge = whether to merge between branches
force = whether to force branch merging or file overwriting
partial = a function to filter file lists (dirstate not updated)
- mergeancestor = if false, merging with an ancestor (fast-forward)
- is only allowed between different named branches. This flag
- is used by rebase extension as a temporary fix and should be
- avoided in general.
The table below shows all the behaviors of the update command
given the -c and -C or no options, whether the working directory
@@ -537,8 +487,8 @@ def update(repo, node, branchmerge, force, partial, ancestor=None,
if node is None:
# tip of current branch
try:
- node = repo.branchtip(wc.branch())
- except error.RepoLookupError:
+ node = repo.branchtags()[wc.branch()]
+ except KeyError:
if wc.branch() == "default": # no default branch!
node = repo.lookup("tip") # update to tip
else:
@@ -561,13 +511,12 @@ def update(repo, node, branchmerge, force, partial, ancestor=None,
raise util.Abort(_("merging with a working directory ancestor"
" has no effect"))
elif pa == p1:
- if not mergeancestor and p1.branch() == p2.branch():
- raise util.Abort(_("nothing to merge"),
- hint=_("use 'hg update' "
- "or check 'hg heads'"))
+ if p1.branch() == p2.branch():
+ raise util.Abort(_("nothing to merge (use 'hg update'"
+ " or check 'hg heads')"))
if not force and (wc.files() or wc.deleted()):
- raise util.Abort(_("outstanding uncommitted changes"),
- hint=_("use 'hg status' to list changes"))
+ raise util.Abort(_("outstanding uncommitted changes "
+ "(use 'hg status' to list changes)"))
for s in wc.substate:
if wc.sub(s).dirty():
raise util.Abort(_("outstanding uncommitted changes in "
@@ -584,20 +533,15 @@ def update(repo, node, branchmerge, force, partial, ancestor=None,
" --check to force update)"))
else:
# Allow jumping branches if clean and specific rev given
- pa = p1
+ overwrite = True
### calculate phase
action = []
- folding = not util.checkcase(repo.path)
- if folding:
- # collision check is not needed for clean update
- if (not branchmerge and
- (force or not wc.dirty(missing=True, branch=False))):
- _checkcollision(p2, None)
- else:
- _checkcollision(p2, wc)
+ wc.status(unknown=True) # prime cache
if not force:
- _checkunknown(repo, wc, p2)
+ _checkunknown(wc, p2)
+ if not util.checkcase(repo.path):
+ _checkcollision(p2)
action += _forgetremoved(wc, p2, branchmerge)
action += manifestmerge(repo, wc, p2, pa, overwrite, partial)
@@ -610,7 +554,7 @@ def update(repo, node, branchmerge, force, partial, ancestor=None,
stats = applyupdates(repo, action, wc, p2, pa, overwrite)
if not partial:
- repo.setparents(fp1, fp2)
+ repo.dirstate.setparents(fp1, fp2)
recordupdates(repo, action, branchmerge)
if not branchmerge:
repo.dirstate.setbranch(p2.branch())
diff --git a/mercurial/minirst.py b/mercurial/minirst.py
index 0586213..01e6528 100644
--- a/mercurial/minirst.py
+++ b/mercurial/minirst.py
@@ -18,33 +18,18 @@ Remember to update http://mercurial.selenic.com/wiki/HelpStyleGuide
when adding support for new constructs.
"""
-import re
+import re, sys
import util, encoding
from i18n import _
+
def replace(text, substs):
- '''
- Apply a list of (find, replace) pairs to a text.
-
- >>> replace("foo bar", [('f', 'F'), ('b', 'B')])
- 'Foo Bar'
- >>> encoding.encoding = 'latin1'
- >>> replace('\\x81\\\\', [('\\\\', '/')])
- '\\x81/'
- >>> encoding.encoding = 'shiftjis'
- >>> replace('\\x81\\\\', [('\\\\', '/')])
- '\\x81\\\\'
- '''
-
- # some character encodings (cp932 for Japanese, at least) use
- # ASCII characters other than control/alphabet/digit as a part of
- # multi-bytes characters, so direct replacing with such characters
- # on strings in local encoding causes invalid byte sequences.
utext = text.decode(encoding.encoding)
for f, t in substs:
utext = utext.replace(f, t)
return utext.encode(encoding.encoding)
+
_blockre = re.compile(r"\n(?:\s*\n)+")
def findblocks(text):
@@ -54,14 +39,14 @@ def findblocks(text):
has an 'indent' field and a 'lines' field.
"""
blocks = []
- for b in _blockre.split(text.lstrip('\n').rstrip()):
+ for b in _blockre.split(text.strip()):
lines = b.splitlines()
- if lines:
- indent = min((len(l) - len(l.lstrip())) for l in lines)
- lines = [l[indent:] for l in lines]
- blocks.append(dict(indent=indent, lines=lines))
+ indent = min((len(l) - len(l.lstrip())) for l in lines)
+ lines = [l[indent:] for l in lines]
+ blocks.append(dict(indent=indent, lines=lines))
return blocks
+
def findliteralblocks(blocks):
"""Finds literal blocks and adds a 'type' field to the blocks.
@@ -118,7 +103,6 @@ _optionre = re.compile(r'^(-([a-zA-Z0-9]), )?(--[a-z0-9-]+)'
r'((.*) +)(.*)$')
_fieldre = re.compile(r':(?![: ])([^:]*)(?<! ):[ ]+(.*)')
_definitionre = re.compile(r'[^ ]')
-_tablere = re.compile(r'(=+\s+)*=+')
def splitparagraphs(blocks):
"""Split paragraphs into lists."""
@@ -162,28 +146,34 @@ def splitparagraphs(blocks):
i += 1
return blocks
-_fieldwidth = 14
+
+_fieldwidth = 12
def updatefieldlists(blocks):
- """Find key for field lists."""
+ """Find key and maximum key width for field lists."""
i = 0
while i < len(blocks):
if blocks[i]['type'] != 'field':
i += 1
continue
+ keywidth = 0
j = i
while j < len(blocks) and blocks[j]['type'] == 'field':
m = _fieldre.match(blocks[j]['lines'][0])
key, rest = m.groups()
blocks[j]['lines'][0] = rest
blocks[j]['key'] = key
+ keywidth = max(keywidth, len(key))
j += 1
+ for block in blocks[i:j]:
+ block['keywidth'] = keywidth
i = j + 1
return blocks
+
def updateoptionlists(blocks):
i = 0
while i < len(blocks):
@@ -248,67 +238,18 @@ def prunecontainers(blocks, keep):
# Always delete "..container:: type" block
del blocks[i]
j = i
- i -= 1
while j < len(blocks) and blocks[j]['indent'] > indent:
if prune:
del blocks[j]
+ i -= 1 # adjust outer index
else:
blocks[j]['indent'] -= adjustment
j += 1
i += 1
return blocks, pruned
-_sectionre = re.compile(r"""^([-=`:.'"~^_*+#])\1+$""")
-
-def findtables(blocks):
- '''Find simple tables
-
- Only simple one-line table elements are supported
- '''
- for block in blocks:
- # Searching for a block that looks like this:
- #
- # === ==== ===
- # A B C
- # === ==== === <- optional
- # 1 2 3
- # x y z
- # === ==== ===
- if (block['type'] == 'paragraph' and
- len(block['lines']) > 2 and
- _tablere.match(block['lines'][0]) and
- block['lines'][0] == block['lines'][-1]):
- block['type'] = 'table'
- block['header'] = False
- div = block['lines'][0]
-
- # column markers are ASCII so we can calculate column
- # position in bytes
- columns = [x for x in xrange(len(div))
- if div[x] == '=' and (x == 0 or div[x - 1] == ' ')]
- rows = []
- for l in block['lines'][1:-1]:
- if l == div:
- block['header'] = True
- continue
- row = []
- # we measure columns not in bytes or characters but in
- # colwidth which makes things tricky
- pos = columns[0] # leading whitespace is bytes
- for n, start in enumerate(columns):
- if n + 1 < len(columns):
- width = columns[n + 1] - start
- v = encoding.getcols(l, pos, width) # gather columns
- pos += len(v) # calculate byte position of end
- row.append(v.strip())
- else:
- row.append(l[pos:].strip())
- rows.append(row)
-
- block['table'] = rows
-
- return blocks
+_sectionre = re.compile(r"""^([-=`:.'"~^_*+#])\1+$""")
def findsections(blocks):
"""Finds sections.
@@ -332,6 +273,7 @@ def findsections(blocks):
del block['lines'][1]
return blocks
+
def inlineliterals(blocks):
substs = [('``', '"')]
for b in blocks:
@@ -339,6 +281,7 @@ def inlineliterals(blocks):
b['lines'] = [replace(l, substs) for l in b['lines']]
return blocks
+
def hgrole(blocks):
substs = [(':hg:`', '"hg '), ('`', '"')]
for b in blocks:
@@ -350,6 +293,7 @@ def hgrole(blocks):
b['lines'] = [replace(l, substs) for l in b['lines']]
return blocks
+
def addmargins(blocks):
"""Adds empty blocks for vertical spacing.
@@ -422,7 +366,7 @@ def formatoption(block, width):
hanging = block['optstrwidth']
initindent = '%s%s ' % (block['optstr'], ' ' * ((hanging - colwidth)))
hangindent = ' ' * (encoding.colwidth(initindent) + 1)
- return ' %s\n' % (util.wrap(desc, usablewidth,
+ return ' %s' % (util.wrap(desc, usablewidth,
initindent=initindent,
hangindent=hangindent))
@@ -437,47 +381,25 @@ def formatblock(block, width):
defindent = indent + hang * ' '
text = ' '.join(map(str.strip, block['lines']))
- return '%s\n%s\n' % (indent + admonition,
- util.wrap(text, width=width,
- initindent=defindent,
- hangindent=defindent))
+ return '%s\n%s' % (indent + admonition, util.wrap(text, width=width,
+ initindent=defindent,
+ hangindent=defindent))
if block['type'] == 'margin':
- return '\n'
+ return ''
if block['type'] == 'literal':
indent += ' '
- return indent + ('\n' + indent).join(block['lines']) + '\n'
+ return indent + ('\n' + indent).join(block['lines'])
if block['type'] == 'section':
underline = encoding.colwidth(block['lines'][0]) * block['underline']
- return "%s%s\n%s%s\n" % (indent, block['lines'][0],indent, underline)
- if block['type'] == 'table':
- table = block['table']
- # compute column widths
- widths = [max([encoding.colwidth(e) for e in c]) for c in zip(*table)]
- text = ''
- span = sum(widths) + len(widths) - 1
- indent = ' ' * block['indent']
- hang = ' ' * (len(indent) + span - widths[-1])
-
- for row in table:
- l = []
- for w, v in zip(widths, row):
- pad = ' ' * (w - encoding.colwidth(v))
- l.append(v + pad)
- l = ' '.join(l)
- l = util.wrap(l, width=width, initindent=indent, hangindent=hang)
- if not text and block['header']:
- text = l + '\n' + indent + '-' * (min(width, span)) + '\n'
- else:
- text += l + "\n"
- return text
+ return "%s%s\n%s%s" % (indent, block['lines'][0],indent, underline)
if block['type'] == 'definition':
term = indent + block['lines'][0]
hang = len(block['lines'][-1]) - len(block['lines'][-1].lstrip())
defindent = indent + hang * ' '
text = ' '.join(map(str.strip, block['lines'][1:]))
- return '%s\n%s\n' % (term, util.wrap(text, width=width,
- initindent=defindent,
- hangindent=defindent))
+ return '%s\n%s' % (term, util.wrap(text, width=width,
+ initindent=defindent,
+ hangindent=defindent))
subindent = indent
if block['type'] == 'bullet':
if block['lines'][0].startswith('| '):
@@ -488,13 +410,19 @@ def formatblock(block, width):
m = _bulletre.match(block['lines'][0])
subindent = indent + m.end() * ' '
elif block['type'] == 'field':
+ keywidth = block['keywidth']
key = block['key']
+
subindent = indent + _fieldwidth * ' '
if len(key) + 2 > _fieldwidth:
# key too large, use full line width
key = key.ljust(width)
+ elif keywidth + 2 < _fieldwidth:
+ # all keys are small, add only two spaces
+ key = key.ljust(keywidth + 2)
+ subindent = indent + (keywidth + 2) * ' '
else:
- # key fits within field width
+ # mixed sizes, use fieldwidth for this one
key = key.ljust(_fieldwidth)
block['lines'][0] = key + block['lines'][0]
elif block['type'] == 'option':
@@ -503,103 +431,15 @@ def formatblock(block, width):
text = ' '.join(map(str.strip, block['lines']))
return util.wrap(text, width=width,
initindent=indent,
- hangindent=subindent) + '\n'
-
-def formathtml(blocks):
- """Format RST blocks as HTML"""
-
- out = []
- headernest = ''
- listnest = []
-
- def openlist(start, level):
- if not listnest or listnest[-1][0] != start:
- listnest.append((start, level))
- out.append('<%s>\n' % start)
-
- blocks = [b for b in blocks if b['type'] != 'margin']
-
- for pos, b in enumerate(blocks):
- btype = b['type']
- level = b['indent']
- lines = b['lines']
-
- if btype == 'admonition':
- admonition = _admonitiontitles[b['admonitiontitle']]
- text = ' '.join(map(str.strip, lines))
- out.append('<p>\n<b>%s</b> %s\n</p>\n' % (admonition, text))
- elif btype == 'paragraph':
- out.append('<p>\n%s\n</p>\n' % '\n'.join(lines))
- elif btype == 'margin':
- pass
- elif btype == 'literal':
- out.append('<pre>\n%s\n</pre>\n' % '\n'.join(lines))
- elif btype == 'section':
- i = b['underline']
- if i not in headernest:
- headernest += i
- level = headernest.index(i) + 1
- out.append('<h%d>%s</h%d>\n' % (level, lines[0], level))
- elif btype == 'table':
- table = b['table']
- t = []
- for row in table:
- l = []
- for v in zip(row):
- if not t:
- l.append('<th>%s</th>' % v)
- else:
- l.append('<td>%s</td>' % v)
- t.append(' <tr>%s</tr>\n' % ''.join(l))
- out.append('<table>\n%s</table>\n' % ''.join(t))
- elif btype == 'definition':
- openlist('dl', level)
- term = lines[0]
- text = ' '.join(map(str.strip, lines[1:]))
- out.append(' <dt>%s\n <dd>%s\n' % (term, text))
- elif btype == 'bullet':
- bullet, head = lines[0].split(' ', 1)
- if bullet == '-':
- openlist('ul', level)
- else:
- openlist('ol', level)
- out.append(' <li> %s\n' % ' '.join([head] + lines[1:]))
- elif btype == 'field':
- openlist('dl', level)
- key = b['key']
- text = ' '.join(map(str.strip, lines))
- out.append(' <dt>%s\n <dd>%s\n' % (key, text))
- elif btype == 'option':
- openlist('dl', level)
- opt = b['optstr']
- desc = ' '.join(map(str.strip, lines))
- out.append(' <dt>%s\n <dd>%s\n' % (opt, desc))
-
- # close lists if indent level of next block is lower
- if listnest:
- start, level = listnest[-1]
- if pos == len(blocks) - 1:
- out.append('</%s>\n' % start)
- listnest.pop()
- else:
- nb = blocks[pos + 1]
- ni = nb['indent']
- if (ni < level or
- (ni == level and
- nb['type'] not in 'definition bullet field option')):
- out.append('</%s>\n' % start)
- listnest.pop()
-
- return ''.join(out)
-
-def parse(text, indent=0, keep=None):
- """Parse text into a list of blocks"""
- pruned = []
+ hangindent=subindent)
+
+
+def format(text, width, indent=0, keep=None):
+ """Parse and format the text according to width."""
blocks = findblocks(text)
for b in blocks:
b['indent'] += indent
blocks = findliteralblocks(blocks)
- blocks = findtables(blocks)
blocks, pruned = prunecontainers(blocks, keep or [])
blocks = findsections(blocks)
blocks = inlineliterals(blocks)
@@ -610,68 +450,33 @@ def parse(text, indent=0, keep=None):
blocks = addmargins(blocks)
blocks = prunecomments(blocks)
blocks = findadmonitions(blocks)
- return blocks, pruned
-
-def formatblocks(blocks, width):
- text = ''.join(formatblock(b, width) for b in blocks)
- return text
-
-def format(text, width=80, indent=0, keep=None, style='plain'):
- """Parse and format the text according to width."""
- blocks, pruned = parse(text, indent, keep or [])
- if style == 'html':
- text = formathtml(blocks)
- else:
- text = ''.join(formatblock(b, width) for b in blocks)
+ text = '\n'.join(formatblock(b, width) for b in blocks)
if keep is None:
return text
else:
return text, pruned
-def getsections(blocks):
- '''return a list of (section name, nesting level, blocks) tuples'''
- nest = ""
- level = 0
- secs = []
- for b in blocks:
- if b['type'] == 'section':
- i = b['underline']
- if i not in nest:
- nest += i
- level = nest.index(i) + 1
- nest = nest[:level]
- secs.append((b['lines'][0], level, [b]))
- else:
- if not secs:
- # add an initial empty section
- secs = [('', 0, [])]
- secs[-1][2].append(b)
- return secs
-
-def decorateblocks(blocks, width):
- '''generate a list of (section name, line text) pairs for search'''
- lines = []
- for s in getsections(blocks):
- section = s[0]
- text = formatblocks(s[2], width)
- lines.append([(section, l) for l in text.splitlines(True)])
- return lines
-
-def maketable(data, indent=0, header=False):
- '''Generate an RST table for the given table data as a list of lines'''
-
- widths = [max(encoding.colwidth(e) for e in c) for c in zip(*data)]
- indent = ' ' * indent
- div = indent + ' '.join('=' * w for w in widths) + '\n'
-
- out = [div]
- for row in data:
- l = []
- for w, v in zip(widths, row):
- pad = ' ' * (w - encoding.colwidth(v))
- l.append(v + pad)
- out.append(indent + ' '.join(l) + "\n")
- if header and len(data) > 1:
- out.insert(2, div)
- out.append(div)
- return out
+
+if __name__ == "__main__":
+ from pprint import pprint
+
+ def debug(func, *args):
+ blocks = func(*args)
+ print "*** after %s:" % func.__name__
+ pprint(blocks)
+ print
+ return blocks
+
+ text = sys.stdin.read()
+ blocks = debug(findblocks, text)
+ blocks = debug(findliteralblocks, blocks)
+ blocks, pruned = debug(prunecontainers, blocks, sys.argv[1:])
+ blocks = debug(inlineliterals, blocks)
+ blocks = debug(splitparagraphs, blocks)
+ blocks = debug(updatefieldlists, blocks)
+ blocks = debug(updateoptionlists, blocks)
+ blocks = debug(findsections, blocks)
+ blocks = debug(addmargins, blocks)
+ blocks = debug(prunecomments, blocks)
+ blocks = debug(findadmonitions, blocks)
+ print '\n'.join(formatblock(b, 30) for b in blocks)
diff --git a/mercurial/mpatch.c b/mercurial/mpatch.c
index ab429b5..e85d20d 100644
--- a/mercurial/mpatch.c
+++ b/mercurial/mpatch.c
@@ -20,13 +20,48 @@
of the GNU General Public License, incorporated herein by reference.
*/
-#define PY_SSIZE_T_CLEAN
#include <Python.h>
#include <stdlib.h>
#include <string.h>
#include "util.h"
+/* Definitions to get compatibility with python 2.4 and earlier which
+ does not have Py_ssize_t. See also PEP 353.
+ Note: msvc (8 or earlier) does not have ssize_t, so we use Py_ssize_t.
+*/
+#if PY_VERSION_HEX < 0x02050000 && !defined(PY_SSIZE_T_MIN)
+typedef int Py_ssize_t;
+#define PY_SSIZE_T_MAX INT_MAX
+#define PY_SSIZE_T_MIN INT_MIN
+#endif
+
+#ifdef _WIN32
+#ifdef _MSC_VER
+/* msvc 6.0 has problems */
+#define inline __inline
+typedef unsigned long uint32_t;
+#else
+#include <stdint.h>
+#endif
+static uint32_t ntohl(uint32_t x)
+{
+ return ((x & 0x000000ffUL) << 24) |
+ ((x & 0x0000ff00UL) << 8) |
+ ((x & 0x00ff0000UL) >> 8) |
+ ((x & 0xff000000UL) >> 24);
+}
+#else
+/* not windows */
+#include <sys/types.h>
+#if defined __BEOS__ && !defined __HAIKU__
+#include <ByteOrder.h>
+#else
+#include <arpa/inet.h>
+#endif
+#include <inttypes.h>
+#endif
+
static char mpatch_doc[] = "Efficient binary patching.";
static PyObject *mpatch_Error;
@@ -39,7 +74,7 @@ struct flist {
struct frag *base, *head, *tail;
};
-static struct flist *lalloc(Py_ssize_t size)
+static struct flist *lalloc(int size)
{
struct flist *a = NULL;
@@ -69,7 +104,7 @@ static void lfree(struct flist *a)
}
}
-static Py_ssize_t lsize(struct flist *a)
+static int lsize(struct flist *a)
{
return a->tail - a->head;
}
@@ -198,11 +233,12 @@ static struct flist *combine(struct flist *a, struct flist *b)
}
/* decode a binary patch into a hunk list */
-static struct flist *decode(const char *bin, Py_ssize_t len)
+static struct flist *decode(const char *bin, int len)
{
struct flist *l;
struct frag *lt;
const char *data = bin + 12, *end = bin + len;
+ uint32_t decode[3]; /* for dealing with alignment issues */
/* assume worst case size, we won't have many of these lists */
l = lalloc(len / 12);
@@ -212,9 +248,10 @@ static struct flist *decode(const char *bin, Py_ssize_t len)
lt = l->tail;
while (data <= end) {
- lt->start = getbe32(bin);
- lt->end = getbe32(bin + 4);
- lt->len = getbe32(bin + 8);
+ memcpy(decode, bin, 12);
+ lt->start = ntohl(decode[0]);
+ lt->end = ntohl(decode[1]);
+ lt->len = ntohl(decode[2]);
if (lt->start > lt->end)
break; /* sanity check */
bin = data + lt->len;
@@ -237,9 +274,9 @@ static struct flist *decode(const char *bin, Py_ssize_t len)
}
/* calculate the size of resultant text */
-static Py_ssize_t calcsize(Py_ssize_t len, struct flist *l)
+static int calcsize(int len, struct flist *l)
{
- Py_ssize_t outlen = 0, last = 0;
+ int outlen = 0, last = 0;
struct frag *f = l->head;
while (f != l->tail) {
@@ -259,7 +296,7 @@ static Py_ssize_t calcsize(Py_ssize_t len, struct flist *l)
return outlen;
}
-static int apply(char *buf, const char *orig, Py_ssize_t len, struct flist *l)
+static int apply(char *buf, const char *orig, int len, struct flist *l)
{
struct frag *f = l->head;
int last = 0;
@@ -284,9 +321,10 @@ static int apply(char *buf, const char *orig, Py_ssize_t len, struct flist *l)
}
/* recursively generate a patch of all bins between start and end */
-static struct flist *fold(PyObject *bins, Py_ssize_t start, Py_ssize_t end)
+static struct flist *fold(PyObject *bins, int start, int end)
{
- Py_ssize_t len, blen;
+ int len;
+ Py_ssize_t blen;
const char *buffer;
if (start + 1 == end) {
@@ -312,7 +350,8 @@ patches(PyObject *self, PyObject *args)
struct flist *patch;
const char *in;
char *out;
- Py_ssize_t len, outlen, inlen;
+ int len, outlen;
+ Py_ssize_t inlen;
if (!PyArg_ParseTuple(args, "OO:mpatch", &text, &bins))
return NULL;
@@ -356,8 +395,9 @@ static PyObject *
patchedsize(PyObject *self, PyObject *args)
{
long orig, start, end, len, outlen = 0, last = 0;
- Py_ssize_t patchlen;
+ int patchlen;
char *bin, *binend, *data;
+ uint32_t decode[3]; /* for dealing with alignment issues */
if (!PyArg_ParseTuple(args, "ls#", &orig, &bin, &patchlen))
return NULL;
@@ -366,9 +406,10 @@ patchedsize(PyObject *self, PyObject *args)
data = bin + 12;
while (data <= binend) {
- start = getbe32(bin);
- end = getbe32(bin + 4);
- len = getbe32(bin + 8);
+ memcpy(decode, bin, 12);
+ start = ntohl(decode[0]);
+ end = ntohl(decode[1]);
+ len = ntohl(decode[2]);
if (start > end)
break; /* sanity check */
bin = data + len;
diff --git a/mercurial/obsolete.py b/mercurial/obsolete.py
deleted file mode 100644
index aea116d..0000000
--- a/mercurial/obsolete.py
+++ /dev/null
@@ -1,331 +0,0 @@
-# obsolete.py - obsolete markers handling
-#
-# Copyright 2012 Pierre-Yves David <pierre-yves.david@ens-lyon.org>
-# Logilab SA <contact@logilab.fr>
-#
-# This software may be used and distributed according to the terms of the
-# GNU General Public License version 2 or any later version.
-
-"""Obsolete markers handling
-
-An obsolete marker maps an old changeset to a list of new
-changesets. If the list of new changesets is empty, the old changeset
-is said to be "killed". Otherwise, the old changeset is being
-"replaced" by the new changesets.
-
-Obsolete markers can be used to record and distribute changeset graph
-transformations performed by history rewriting operations, and help
-building new tools to reconciliate conflicting rewriting actions. To
-facilitate conflicts resolution, markers include various annotations
-besides old and news changeset identifiers, such as creation date or
-author name.
-
-
-Format
-------
-
-Markers are stored in an append-only file stored in
-'.hg/store/obsstore'.
-
-The file starts with a version header:
-
-- 1 unsigned byte: version number, starting at zero.
-
-
-The header is followed by the markers. Each marker is made of:
-
-- 1 unsigned byte: number of new changesets "R", could be zero.
-
-- 1 unsigned 32-bits integer: metadata size "M" in bytes.
-
-- 1 byte: a bit field. It is reserved for flags used in obsolete
- markers common operations, to avoid repeated decoding of metadata
- entries.
-
-- 20 bytes: obsoleted changeset identifier.
-
-- N*20 bytes: new changesets identifiers.
-
-- M bytes: metadata as a sequence of nul-terminated strings. Each
- string contains a key and a value, separated by a color ':', without
- additional encoding. Keys cannot contain '\0' or ':' and values
- cannot contain '\0'.
-"""
-import struct
-from mercurial import util, base85
-from i18n import _
-
-# the obsolete feature is not mature enought to be enabled by default.
-# you have to rely on third party extension extension to enable this.
-_enabled = False
-
-_pack = struct.pack
-_unpack = struct.unpack
-
-# the obsolete feature is not mature enought to be enabled by default.
-# you have to rely on third party extension extension to enable this.
-_enabled = False
-
-# data used for parsing and writing
-_fmversion = 0
-_fmfixed = '>BIB20s'
-_fmnode = '20s'
-_fmfsize = struct.calcsize(_fmfixed)
-_fnodesize = struct.calcsize(_fmnode)
-
-def _readmarkers(data):
- """Read and enumerate markers from raw data"""
- off = 0
- diskversion = _unpack('>B', data[off:off + 1])[0]
- off += 1
- if diskversion != _fmversion:
- raise util.Abort(_('parsing obsolete marker: unknown version %r')
- % diskversion)
-
- # Loop on markers
- l = len(data)
- while off + _fmfsize <= l:
- # read fixed part
- cur = data[off:off + _fmfsize]
- off += _fmfsize
- nbsuc, mdsize, flags, pre = _unpack(_fmfixed, cur)
- # read replacement
- sucs = ()
- if nbsuc:
- s = (_fnodesize * nbsuc)
- cur = data[off:off + s]
- sucs = _unpack(_fmnode * nbsuc, cur)
- off += s
- # read metadata
- # (metadata will be decoded on demand)
- metadata = data[off:off + mdsize]
- if len(metadata) != mdsize:
- raise util.Abort(_('parsing obsolete marker: metadata is too '
- 'short, %d bytes expected, got %d')
- % (mdsize, len(metadata)))
- off += mdsize
- yield (pre, sucs, flags, metadata)
-
-def encodemeta(meta):
- """Return encoded metadata string to string mapping.
-
- Assume no ':' in key and no '\0' in both key and value."""
- for key, value in meta.iteritems():
- if ':' in key or '\0' in key:
- raise ValueError("':' and '\0' are forbidden in metadata key'")
- if '\0' in value:
- raise ValueError("':' are forbidden in metadata value'")
- return '\0'.join(['%s:%s' % (k, meta[k]) for k in sorted(meta)])
-
-def decodemeta(data):
- """Return string to string dictionary from encoded version."""
- d = {}
- for l in data.split('\0'):
- if l:
- key, value = l.split(':')
- d[key] = value
- return d
-
-class marker(object):
- """Wrap obsolete marker raw data"""
-
- def __init__(self, repo, data):
- # the repo argument will be used to create changectx in later version
- self._repo = repo
- self._data = data
- self._decodedmeta = None
-
- def precnode(self):
- """Precursor changeset node identifier"""
- return self._data[0]
-
- def succnodes(self):
- """List of successor changesets node identifiers"""
- return self._data[1]
-
- def metadata(self):
- """Decoded metadata dictionary"""
- if self._decodedmeta is None:
- self._decodedmeta = decodemeta(self._data[3])
- return self._decodedmeta
-
- def date(self):
- """Creation date as (unixtime, offset)"""
- parts = self.metadata()['date'].split(' ')
- return (float(parts[0]), int(parts[1]))
-
-class obsstore(object):
- """Store obsolete markers
-
- Markers can be accessed with two mappings:
- - precursors: old -> set(new)
- - successors: new -> set(old)
- """
-
- def __init__(self, sopener):
- self._all = []
- # new markers to serialize
- self.precursors = {}
- self.successors = {}
- self.sopener = sopener
- data = sopener.tryread('obsstore')
- if data:
- self._load(_readmarkers(data))
-
- def __iter__(self):
- return iter(self._all)
-
- def __nonzero__(self):
- return bool(self._all)
-
- def create(self, transaction, prec, succs=(), flag=0, metadata=None):
- """obsolete: add a new obsolete marker
-
- * ensuring it is hashable
- * check mandatory metadata
- * encode metadata
- """
- if metadata is None:
- metadata = {}
- if len(prec) != 20:
- raise ValueError(prec)
- for succ in succs:
- if len(succ) != 20:
- raise ValueError(succ)
- marker = (str(prec), tuple(succs), int(flag), encodemeta(metadata))
- self.add(transaction, [marker])
-
- def add(self, transaction, markers):
- """Add new markers to the store
-
- Take care of filtering duplicate.
- Return the number of new marker."""
- if not _enabled:
- raise util.Abort('obsolete feature is not enabled on this repo')
- new = [m for m in markers if m not in self._all]
- if new:
- f = self.sopener('obsstore', 'ab')
- try:
- # Whether the file's current position is at the begin or at
- # the end after opening a file for appending is implementation
- # defined. So we must seek to the end before calling tell(),
- # or we may get a zero offset for non-zero sized files on
- # some platforms (issue3543).
- f.seek(0, 2) # os.SEEK_END
- offset = f.tell()
- transaction.add('obsstore', offset)
- # offset == 0: new file - add the version header
- for bytes in _encodemarkers(new, offset == 0):
- f.write(bytes)
- finally:
- # XXX: f.close() == filecache invalidation == obsstore rebuilt.
- # call 'filecacheentry.refresh()' here
- f.close()
- self._load(new)
- return len(new)
-
- def mergemarkers(self, transation, data):
- markers = _readmarkers(data)
- self.add(transation, markers)
-
- def _load(self, markers):
- for mark in markers:
- self._all.append(mark)
- pre, sucs = mark[:2]
- self.precursors.setdefault(pre, set()).add(mark)
- for suc in sucs:
- self.successors.setdefault(suc, set()).add(mark)
-
-def _encodemarkers(markers, addheader=False):
- # Kept separate from flushmarkers(), it will be reused for
- # markers exchange.
- if addheader:
- yield _pack('>B', _fmversion)
- for marker in markers:
- yield _encodeonemarker(marker)
-
-
-def _encodeonemarker(marker):
- pre, sucs, flags, metadata = marker
- nbsuc = len(sucs)
- format = _fmfixed + (_fmnode * nbsuc)
- data = [nbsuc, len(metadata), flags, pre]
- data.extend(sucs)
- return _pack(format, *data) + metadata
-
-# arbitrary picked to fit into 8K limit from HTTP server
-# you have to take in account:
-# - the version header
-# - the base85 encoding
-_maxpayload = 5300
-
-def listmarkers(repo):
- """List markers over pushkey"""
- if not repo.obsstore:
- return {}
- keys = {}
- parts = []
- currentlen = _maxpayload * 2 # ensure we create a new part
- for marker in repo.obsstore:
- nextdata = _encodeonemarker(marker)
- if (len(nextdata) + currentlen > _maxpayload):
- currentpart = []
- currentlen = 0
- parts.append(currentpart)
- currentpart.append(nextdata)
- currentlen += len(nextdata)
- for idx, part in enumerate(reversed(parts)):
- data = ''.join([_pack('>B', _fmversion)] + part)
- keys['dump%i' % idx] = base85.b85encode(data)
- return keys
-
-def pushmarker(repo, key, old, new):
- """Push markers over pushkey"""
- if not key.startswith('dump'):
- repo.ui.warn(_('unknown key: %r') % key)
- return 0
- if old:
- repo.ui.warn(_('unexpected old value') % key)
- return 0
- data = base85.b85decode(new)
- lock = repo.lock()
- try:
- tr = repo.transaction('pushkey: obsolete markers')
- try:
- repo.obsstore.mergemarkers(tr, data)
- tr.close()
- return 1
- finally:
- tr.release()
- finally:
- lock.release()
-
-def allmarkers(repo):
- """all obsolete markers known in a repository"""
- for markerdata in repo.obsstore:
- yield marker(repo, markerdata)
-
-def precursormarkers(ctx):
- """obsolete marker making this changeset obsolete"""
- for data in ctx._repo.obsstore.precursors.get(ctx.node(), ()):
- yield marker(ctx._repo, data)
-
-def successormarkers(ctx):
- """obsolete marker marking this changeset as a successors"""
- for data in ctx._repo.obsstore.successors.get(ctx.node(), ()):
- yield marker(ctx._repo, data)
-
-def anysuccessors(obsstore, node):
- """Yield every successor of <node>
-
- This this a linear yield unsuitable to detect splitted changeset."""
- remaining = set([node])
- seen = set(remaining)
- while remaining:
- current = remaining.pop()
- yield current
- for mark in obsstore.precursors.get(current, ()):
- for suc in mark[1]:
- if suc not in seen:
- seen.add(suc)
- remaining.add(suc)
diff --git a/mercurial/osutil.c b/mercurial/osutil.c
index 5c3efb7..196d5fe 100644
--- a/mercurial/osutil.c
+++ b/mercurial/osutil.c
@@ -12,7 +12,6 @@
#include <fcntl.h>
#include <stdio.h>
#include <string.h>
-#include <errno.h>
#ifdef _WIN32
#include <windows.h>
@@ -289,8 +288,7 @@ static PyObject *_listdir(char *path, int pathlen, int keepstat, char *skip)
#endif
if (pathlen >= PATH_MAX) {
- errno = ENAMETOOLONG;
- PyErr_SetFromErrnoWithFilename(PyExc_OSError, path);
+ PyErr_SetString(PyExc_ValueError, "path too long");
goto error_value;
}
strncpy(fullpath, path, PATH_MAX);
@@ -331,9 +329,6 @@ static PyObject *_listdir(char *path, int pathlen, int keepstat, char *skip)
err = lstat(fullpath, &st);
#endif
if (err == -1) {
- /* race with file deletion? */
- if (errno == ENOENT)
- continue;
strncpy(fullpath + pathlen + 1, ent->d_name,
PATH_MAX - pathlen);
fullpath[PATH_MAX] = 0;
diff --git a/mercurial/parsers.c b/mercurial/parsers.c
index 6710ea6..66060d1 100644
--- a/mercurial/parsers.c
+++ b/mercurial/parsers.c
@@ -13,10 +13,8 @@
#include "util.h"
-static inline int hexdigit(const char *p, Py_ssize_t off)
+static int hexdigit(char c)
{
- char c = p[off];
-
if (c >= '0' && c <= '9')
return c - '0';
if (c >= 'a' && c <= 'f')
@@ -34,8 +32,8 @@ static inline int hexdigit(const char *p, Py_ssize_t off)
static PyObject *unhexlify(const char *str, int len)
{
PyObject *ret;
+ const char *c;
char *d;
- int i;
ret = PyBytes_FromStringAndSize(NULL, len / 2);
@@ -44,9 +42,9 @@ static PyObject *unhexlify(const char *str, int len)
d = PyBytes_AsString(ret);
- for (i = 0; i < len;) {
- int hi = hexdigit(str, i++);
- int lo = hexdigit(str, i++);
+ for (c = str; c < str + len;) {
+ int hi = hexdigit(*c++);
+ int lo = hexdigit(*c++);
*d++ = (hi << 4) | lo;
}
@@ -137,6 +135,33 @@ quit:
return NULL;
}
+#ifdef _WIN32
+#ifdef _MSC_VER
+/* msvc 6.0 has problems */
+#define inline __inline
+typedef unsigned long uint32_t;
+typedef unsigned __int64 uint64_t;
+#else
+#include <stdint.h>
+#endif
+static uint32_t ntohl(uint32_t x)
+{
+ return ((x & 0x000000ffUL) << 24) |
+ ((x & 0x0000ff00UL) << 8) |
+ ((x & 0x00ff0000UL) >> 8) |
+ ((x & 0xff000000UL) >> 24);
+}
+#else
+/* not windows */
+#include <sys/types.h>
+#if defined __BEOS__ && !defined __HAIKU__
+#include <ByteOrder.h>
+#else
+#include <arpa/inet.h>
+#endif
+#include <inttypes.h>
+#endif
+
static PyObject *parse_dirstate(PyObject *self, PyObject *args)
{
PyObject *dmap, *cmap, *parents = NULL, *ret = NULL;
@@ -145,6 +170,7 @@ static PyObject *parse_dirstate(PyObject *self, PyObject *args)
int state, mode, size, mtime;
unsigned int flen;
int len;
+ uint32_t decode[4]; /* for alignment */
if (!PyArg_ParseTuple(args, "O!O!s#:parse_dirstate",
&PyDict_Type, &dmap,
@@ -167,10 +193,11 @@ static PyObject *parse_dirstate(PyObject *self, PyObject *args)
while (cur < end - 17) {
/* unpack header */
state = *cur;
- mode = getbe32(cur + 1);
- size = getbe32(cur + 5);
- mtime = getbe32(cur + 9);
- flen = getbe32(cur + 13);
+ memcpy(decode, cur + 1, 16);
+ mode = ntohl(decode[0]);
+ size = ntohl(decode[1]);
+ mtime = ntohl(decode[2]);
+ flen = ntohl(decode[3]);
cur += 17;
if (cur + flen > end || cur + flen < cur) {
PyErr_SetString(PyExc_ValueError, "overflow in dirstate");
@@ -214,240 +241,10 @@ quit:
return ret;
}
-static inline int getintat(PyObject *tuple, int off, uint32_t *v)
-{
- PyObject *o = PyTuple_GET_ITEM(tuple, off);
- long val;
-
- if (PyInt_Check(o))
- val = PyInt_AS_LONG(o);
- else if (PyLong_Check(o)) {
- val = PyLong_AsLong(o);
- if (val == -1 && PyErr_Occurred())
- return -1;
- } else {
- PyErr_SetString(PyExc_TypeError, "expected an int or long");
- return -1;
- }
- if (LONG_MAX > INT_MAX && (val > INT_MAX || val < INT_MIN)) {
- PyErr_SetString(PyExc_OverflowError,
- "Python value to large to convert to uint32_t");
- return -1;
- }
- *v = (uint32_t)val;
- return 0;
-}
-
-static PyObject *dirstate_unset;
-
-/*
- * Efficiently pack a dirstate object into its on-disk format.
- */
-static PyObject *pack_dirstate(PyObject *self, PyObject *args)
-{
- PyObject *packobj = NULL;
- PyObject *map, *copymap, *pl;
- Py_ssize_t nbytes, pos, l;
- PyObject *k, *v, *pn;
- char *p, *s;
- double now;
-
- if (!PyArg_ParseTuple(args, "O!O!Od:pack_dirstate",
- &PyDict_Type, &map, &PyDict_Type, &copymap,
- &pl, &now))
- return NULL;
-
- if (!PySequence_Check(pl) || PySequence_Size(pl) != 2) {
- PyErr_SetString(PyExc_TypeError, "expected 2-element sequence");
- return NULL;
- }
-
- /* Figure out how much we need to allocate. */
- for (nbytes = 40, pos = 0; PyDict_Next(map, &pos, &k, &v);) {
- PyObject *c;
- if (!PyString_Check(k)) {
- PyErr_SetString(PyExc_TypeError, "expected string key");
- goto bail;
- }
- nbytes += PyString_GET_SIZE(k) + 17;
- c = PyDict_GetItem(copymap, k);
- if (c) {
- if (!PyString_Check(c)) {
- PyErr_SetString(PyExc_TypeError,
- "expected string key");
- goto bail;
- }
- nbytes += PyString_GET_SIZE(c) + 1;
- }
- }
-
- packobj = PyString_FromStringAndSize(NULL, nbytes);
- if (packobj == NULL)
- goto bail;
-
- p = PyString_AS_STRING(packobj);
-
- pn = PySequence_ITEM(pl, 0);
- if (PyString_AsStringAndSize(pn, &s, &l) == -1 || l != 20) {
- PyErr_SetString(PyExc_TypeError, "expected a 20-byte hash");
- goto bail;
- }
- memcpy(p, s, l);
- p += 20;
- pn = PySequence_ITEM(pl, 1);
- if (PyString_AsStringAndSize(pn, &s, &l) == -1 || l != 20) {
- PyErr_SetString(PyExc_TypeError, "expected a 20-byte hash");
- goto bail;
- }
- memcpy(p, s, l);
- p += 20;
-
- for (pos = 0; PyDict_Next(map, &pos, &k, &v); ) {
- uint32_t mode, size, mtime;
- Py_ssize_t len, l;
- PyObject *o;
- char *s, *t;
-
- if (!PyTuple_Check(v) || PyTuple_GET_SIZE(v) != 4) {
- PyErr_SetString(PyExc_TypeError, "expected a 4-tuple");
- goto bail;
- }
- o = PyTuple_GET_ITEM(v, 0);
- if (PyString_AsStringAndSize(o, &s, &l) == -1 || l != 1) {
- PyErr_SetString(PyExc_TypeError, "expected one byte");
- goto bail;
- }
- *p++ = *s;
- if (getintat(v, 1, &mode) == -1)
- goto bail;
- if (getintat(v, 2, &size) == -1)
- goto bail;
- if (getintat(v, 3, &mtime) == -1)
- goto bail;
- if (*s == 'n' && mtime == (uint32_t)now) {
- /* See dirstate.py:write for why we do this. */
- if (PyDict_SetItem(map, k, dirstate_unset) == -1)
- goto bail;
- mode = 0, size = -1, mtime = -1;
- }
- putbe32(mode, p);
- putbe32(size, p + 4);
- putbe32(mtime, p + 8);
- t = p + 12;
- p += 16;
- len = PyString_GET_SIZE(k);
- memcpy(p, PyString_AS_STRING(k), len);
- p += len;
- o = PyDict_GetItem(copymap, k);
- if (o) {
- *p++ = '\0';
- l = PyString_GET_SIZE(o);
- memcpy(p, PyString_AS_STRING(o), l);
- p += l;
- len += l + 1;
- }
- putbe32((uint32_t)len, t);
- }
-
- pos = p - PyString_AS_STRING(packobj);
- if (pos != nbytes) {
- PyErr_Format(PyExc_SystemError, "bad dirstate size: %ld != %ld",
- (long)pos, (long)nbytes);
- goto bail;
- }
+const char nullid[20];
+const int nullrev = -1;
- return packobj;
-bail:
- Py_XDECREF(packobj);
- return NULL;
-}
-
-/*
- * A base-16 trie for fast node->rev mapping.
- *
- * Positive value is index of the next node in the trie
- * Negative value is a leaf: -(rev + 1)
- * Zero is empty
- */
-typedef struct {
- int children[16];
-} nodetree;
-
-/*
- * This class has two behaviours.
- *
- * When used in a list-like way (with integer keys), we decode an
- * entry in a RevlogNG index file on demand. Our last entry is a
- * sentinel, always a nullid. We have limited support for
- * integer-keyed insert and delete, only at elements right before the
- * sentinel.
- *
- * With string keys, we lazily perform a reverse mapping from node to
- * rev, using a base-16 trie.
- */
-typedef struct {
- PyObject_HEAD
- /* Type-specific fields go here. */
- PyObject *data; /* raw bytes of index */
- PyObject **cache; /* cached tuples */
- const char **offsets; /* populated on demand */
- Py_ssize_t raw_length; /* original number of elements */
- Py_ssize_t length; /* current number of elements */
- PyObject *added; /* populated on demand */
- PyObject *headrevs; /* cache, invalidated on changes */
- nodetree *nt; /* base-16 trie */
- int ntlength; /* # nodes in use */
- int ntcapacity; /* # nodes allocated */
- int ntdepth; /* maximum depth of tree */
- int ntsplits; /* # splits performed */
- int ntrev; /* last rev scanned */
- int ntlookups; /* # lookups */
- int ntmisses; /* # lookups that miss the cache */
- int inlined;
-} indexObject;
-
-static Py_ssize_t index_length(const indexObject *self)
-{
- if (self->added == NULL)
- return self->length;
- return self->length + PyList_GET_SIZE(self->added);
-}
-
-static PyObject *nullentry;
-static const char nullid[20];
-
-static long inline_scan(indexObject *self, const char **offsets);
-
-#if LONG_MAX == 0x7fffffffL
-static char *tuple_format = "Kiiiiiis#";
-#else
-static char *tuple_format = "kiiiiiis#";
-#endif
-
-/* A RevlogNG v1 index entry is 64 bytes long. */
-static const long v1_hdrsize = 64;
-
-/*
- * Return a pointer to the beginning of a RevlogNG record.
- */
-static const char *index_deref(indexObject *self, Py_ssize_t pos)
-{
- if (self->inlined && pos > 0) {
- if (self->offsets == NULL) {
- self->offsets = malloc(self->raw_length *
- sizeof(*self->offsets));
- if (self->offsets == NULL)
- return (const char *)PyErr_NoMemory();
- inline_scan(self, self->offsets);
- }
- return self->offsets[pos];
- }
-
- return PyString_AS_STRING(self->data) + pos * v1_hdrsize;
-}
-
-/*
- * RevlogNG format (all in big endian, data may be inlined):
+/* RevlogNG format (all in big endian, data may be inlined):
* 6 bytes: offset
* 2 bytes: flags
* 4 bytes: compressed length
@@ -458,1076 +255,147 @@ static const char *index_deref(indexObject *self, Py_ssize_t pos)
* 4 bytes: parent 2 revision
* 32 bytes: nodeid (only 20 bytes used)
*/
-static PyObject *index_get(indexObject *self, Py_ssize_t pos)
+static int _parse_index_ng(const char *data, int size, int inlined,
+ PyObject *index)
{
+ PyObject *entry;
+ int n = 0, err;
uint64_t offset_flags;
int comp_len, uncomp_len, base_rev, link_rev, parent_1, parent_2;
const char *c_node_id;
- const char *data;
- Py_ssize_t length = index_length(self);
- PyObject *entry;
-
- if (pos < 0)
- pos += length;
-
- if (pos < 0 || pos >= length) {
- PyErr_SetString(PyExc_IndexError, "revlog index out of range");
- return NULL;
- }
-
- if (pos == length - 1) {
- Py_INCREF(nullentry);
- return nullentry;
- }
-
- if (pos >= self->length - 1) {
- PyObject *obj;
- obj = PyList_GET_ITEM(self->added, pos - self->length + 1);
- Py_INCREF(obj);
- return obj;
- }
-
- if (self->cache) {
- if (self->cache[pos]) {
- Py_INCREF(self->cache[pos]);
- return self->cache[pos];
+ const char *end = data + size;
+ uint32_t decode[8]; /* to enforce alignment with inline data */
+
+ while (data < end) {
+ unsigned int step;
+
+ memcpy(decode, data, 32);
+ offset_flags = ntohl(decode[1]);
+ if (n == 0) /* mask out version number for the first entry */
+ offset_flags &= 0xFFFF;
+ else {
+ uint32_t offset_high = ntohl(decode[0]);
+ offset_flags |= ((uint64_t)offset_high) << 32;
}
- } else {
- self->cache = calloc(self->raw_length, sizeof(PyObject *));
- if (self->cache == NULL)
- return PyErr_NoMemory();
- }
-
- data = index_deref(self, pos);
- if (data == NULL)
- return NULL;
-
- offset_flags = getbe32(data + 4);
- if (pos == 0) /* mask out version number for the first entry */
- offset_flags &= 0xFFFF;
- else {
- uint32_t offset_high = getbe32(data);
- offset_flags |= ((uint64_t)offset_high) << 32;
- }
- comp_len = getbe32(data + 8);
- uncomp_len = getbe32(data + 12);
- base_rev = getbe32(data + 16);
- link_rev = getbe32(data + 20);
- parent_1 = getbe32(data + 24);
- parent_2 = getbe32(data + 28);
- c_node_id = data + 32;
+ comp_len = ntohl(decode[2]);
+ uncomp_len = ntohl(decode[3]);
+ base_rev = ntohl(decode[4]);
+ link_rev = ntohl(decode[5]);
+ parent_1 = ntohl(decode[6]);
+ parent_2 = ntohl(decode[7]);
+ c_node_id = data + 32;
- entry = Py_BuildValue(tuple_format, offset_flags, comp_len,
+ entry = Py_BuildValue("Liiiiiis#", offset_flags, comp_len,
uncomp_len, base_rev, link_rev,
parent_1, parent_2, c_node_id, 20);
- if (entry)
- PyObject_GC_UnTrack(entry);
-
- self->cache[pos] = entry;
- Py_INCREF(entry);
-
- return entry;
-}
-
-/*
- * Return the 20-byte SHA of the node corresponding to the given rev.
- */
-static const char *index_node(indexObject *self, Py_ssize_t pos)
-{
- Py_ssize_t length = index_length(self);
- const char *data;
-
- if (pos == length - 1 || pos == INT_MAX)
- return nullid;
-
- if (pos >= length)
- return NULL;
-
- if (pos >= self->length - 1) {
- PyObject *tuple, *str;
- tuple = PyList_GET_ITEM(self->added, pos - self->length + 1);
- str = PyTuple_GetItem(tuple, 7);
- return str ? PyString_AS_STRING(str) : NULL;
- }
-
- data = index_deref(self, pos);
- return data ? data + 32 : NULL;
-}
-
-static int nt_insert(indexObject *self, const char *node, int rev);
-
-static int node_check(PyObject *obj, char **node, Py_ssize_t *nodelen)
-{
- if (PyString_AsStringAndSize(obj, node, nodelen) == -1)
- return -1;
- if (*nodelen == 20)
- return 0;
- PyErr_SetString(PyExc_ValueError, "20-byte hash required");
- return -1;
-}
-
-static PyObject *index_insert(indexObject *self, PyObject *args)
-{
- PyObject *obj;
- char *node;
- long offset;
- Py_ssize_t len, nodelen;
-
- if (!PyArg_ParseTuple(args, "lO", &offset, &obj))
- return NULL;
-
- if (!PyTuple_Check(obj) || PyTuple_GET_SIZE(obj) != 8) {
- PyErr_SetString(PyExc_TypeError, "8-tuple required");
- return NULL;
- }
-
- if (node_check(PyTuple_GET_ITEM(obj, 7), &node, &nodelen) == -1)
- return NULL;
-
- len = index_length(self);
-
- if (offset < 0)
- offset += len;
-
- if (offset != len - 1) {
- PyErr_SetString(PyExc_IndexError,
- "insert only supported at index -1");
- return NULL;
- }
-
- if (offset > INT_MAX) {
- PyErr_SetString(PyExc_ValueError,
- "currently only 2**31 revs supported");
- return NULL;
- }
-
- if (self->added == NULL) {
- self->added = PyList_New(0);
- if (self->added == NULL)
- return NULL;
- }
-
- if (PyList_Append(self->added, obj) == -1)
- return NULL;
-
- if (self->nt)
- nt_insert(self, node, (int)offset);
-
- Py_CLEAR(self->headrevs);
- Py_RETURN_NONE;
-}
-
-static void _index_clearcaches(indexObject *self)
-{
- if (self->cache) {
- Py_ssize_t i;
-
- for (i = 0; i < self->raw_length; i++)
- Py_CLEAR(self->cache[i]);
- free(self->cache);
- self->cache = NULL;
- }
- if (self->offsets) {
- free(self->offsets);
- self->offsets = NULL;
- }
- if (self->nt) {
- free(self->nt);
- self->nt = NULL;
- }
- Py_CLEAR(self->headrevs);
-}
-
-static PyObject *index_clearcaches(indexObject *self)
-{
- _index_clearcaches(self);
- self->ntlength = self->ntcapacity = 0;
- self->ntdepth = self->ntsplits = 0;
- self->ntrev = -1;
- self->ntlookups = self->ntmisses = 0;
- Py_RETURN_NONE;
-}
-
-static PyObject *index_stats(indexObject *self)
-{
- PyObject *obj = PyDict_New();
-
- if (obj == NULL)
- return NULL;
-
-#define istat(__n, __d) \
- if (PyDict_SetItemString(obj, __d, PyInt_FromSsize_t(self->__n)) == -1) \
- goto bail;
-
- if (self->added) {
- Py_ssize_t len = PyList_GET_SIZE(self->added);
- if (PyDict_SetItemString(obj, "index entries added",
- PyInt_FromSsize_t(len)) == -1)
- goto bail;
- }
-
- if (self->raw_length != self->length - 1)
- istat(raw_length, "revs on disk");
- istat(length, "revs in memory");
- istat(ntcapacity, "node trie capacity");
- istat(ntdepth, "node trie depth");
- istat(ntlength, "node trie count");
- istat(ntlookups, "node trie lookups");
- istat(ntmisses, "node trie misses");
- istat(ntrev, "node trie last rev scanned");
- istat(ntsplits, "node trie splits");
-
-#undef istat
-
- return obj;
-
-bail:
- Py_XDECREF(obj);
- return NULL;
-}
-
-/*
- * When we cache a list, we want to be sure the caller can't mutate
- * the cached copy.
- */
-static PyObject *list_copy(PyObject *list)
-{
- Py_ssize_t len = PyList_GET_SIZE(list);
- PyObject *newlist = PyList_New(len);
- Py_ssize_t i;
-
- if (newlist == NULL)
- return NULL;
-
- for (i = 0; i < len; i++) {
- PyObject *obj = PyList_GET_ITEM(list, i);
- Py_INCREF(obj);
- PyList_SET_ITEM(newlist, i, obj);
- }
-
- return newlist;
-}
-
-static PyObject *index_headrevs(indexObject *self)
-{
- Py_ssize_t i, len, addlen;
- char *nothead = NULL;
- PyObject *heads;
-
- if (self->headrevs)
- return list_copy(self->headrevs);
-
- len = index_length(self) - 1;
- heads = PyList_New(0);
- if (heads == NULL)
- goto bail;
- if (len == 0) {
- PyObject *nullid = PyInt_FromLong(-1);
- if (nullid == NULL || PyList_Append(heads, nullid) == -1) {
- Py_XDECREF(nullid);
- goto bail;
- }
- goto done;
- }
-
- nothead = calloc(len, 1);
- if (nothead == NULL)
- goto bail;
-
- for (i = 0; i < self->raw_length; i++) {
- const char *data = index_deref(self, i);
- int parent_1 = getbe32(data + 24);
- int parent_2 = getbe32(data + 28);
- if (parent_1 >= 0)
- nothead[parent_1] = 1;
- if (parent_2 >= 0)
- nothead[parent_2] = 1;
- }
-
- addlen = self->added ? PyList_GET_SIZE(self->added) : 0;
-
- for (i = 0; i < addlen; i++) {
- PyObject *rev = PyList_GET_ITEM(self->added, i);
- PyObject *p1 = PyTuple_GET_ITEM(rev, 5);
- PyObject *p2 = PyTuple_GET_ITEM(rev, 6);
- long parent_1, parent_2;
-
- if (!PyInt_Check(p1) || !PyInt_Check(p2)) {
- PyErr_SetString(PyExc_TypeError,
- "revlog parents are invalid");
- goto bail;
- }
- parent_1 = PyInt_AS_LONG(p1);
- parent_2 = PyInt_AS_LONG(p2);
- if (parent_1 >= 0)
- nothead[parent_1] = 1;
- if (parent_2 >= 0)
- nothead[parent_2] = 1;
- }
-
- for (i = 0; i < len; i++) {
- PyObject *head;
-
- if (nothead[i])
- continue;
- head = PyInt_FromLong(i);
- if (head == NULL || PyList_Append(heads, head) == -1) {
- Py_XDECREF(head);
- goto bail;
- }
- }
-
-done:
- self->headrevs = heads;
- free(nothead);
- return list_copy(self->headrevs);
-bail:
- Py_XDECREF(heads);
- free(nothead);
- return NULL;
-}
-
-static inline int nt_level(const char *node, Py_ssize_t level)
-{
- int v = node[level>>1];
- if (!(level & 1))
- v >>= 4;
- return v & 0xf;
-}
-
-/*
- * Return values:
- *
- * -4: match is ambiguous (multiple candidates)
- * -2: not found
- * rest: valid rev
- */
-static int nt_find(indexObject *self, const char *node, Py_ssize_t nodelen,
- int hex)
-{
- int (*getnybble)(const char *, Py_ssize_t) = hex ? hexdigit : nt_level;
- int level, maxlevel, off;
-
- if (nodelen == 20 && node[0] == '\0' && memcmp(node, nullid, 20) == 0)
- return -1;
-
- if (self->nt == NULL)
- return -2;
-
- if (hex)
- maxlevel = nodelen > 40 ? 40 : (int)nodelen;
- else
- maxlevel = nodelen > 20 ? 40 : ((int)nodelen * 2);
-
- for (level = off = 0; level < maxlevel; level++) {
- int k = getnybble(node, level);
- nodetree *n = &self->nt[off];
- int v = n->children[k];
-
- if (v < 0) {
- const char *n;
- Py_ssize_t i;
-
- v = -v - 1;
- n = index_node(self, v);
- if (n == NULL)
- return -2;
- for (i = level; i < maxlevel; i++)
- if (getnybble(node, i) != nt_level(n, i))
- return -2;
- return v;
- }
- if (v == 0)
- return -2;
- off = v;
- }
- /* multiple matches against an ambiguous prefix */
- return -4;
-}
-
-static int nt_new(indexObject *self)
-{
- if (self->ntlength == self->ntcapacity) {
- self->ntcapacity *= 2;
- self->nt = realloc(self->nt,
- self->ntcapacity * sizeof(nodetree));
- if (self->nt == NULL) {
- PyErr_SetString(PyExc_MemoryError, "out of memory");
- return -1;
- }
- memset(&self->nt[self->ntlength], 0,
- sizeof(nodetree) * (self->ntcapacity - self->ntlength));
- }
- return self->ntlength++;
-}
-
-static int nt_insert(indexObject *self, const char *node, int rev)
-{
- int level = 0;
- int off = 0;
-
- while (level < 40) {
- int k = nt_level(node, level);
- nodetree *n;
- int v;
-
- n = &self->nt[off];
- v = n->children[k];
-
- if (v == 0) {
- n->children[k] = -rev - 1;
+ if (!entry)
return 0;
- }
- if (v < 0) {
- const char *oldnode = index_node(self, -v - 1);
- int noff;
-
- if (!oldnode || !memcmp(oldnode, node, 20)) {
- n->children[k] = -rev - 1;
- return 0;
- }
- noff = nt_new(self);
- if (noff == -1)
- return -1;
- /* self->nt may have been changed by realloc */
- self->nt[off].children[k] = noff;
- off = noff;
- n = &self->nt[off];
- n->children[nt_level(oldnode, ++level)] = v;
- if (level > self->ntdepth)
- self->ntdepth = level;
- self->ntsplits += 1;
- } else {
- level += 1;
- off = v;
- }
- }
-
- return -1;
-}
-
-static int nt_init(indexObject *self)
-{
- if (self->nt == NULL) {
- self->ntcapacity = self->raw_length < 4
- ? 4 : self->raw_length / 2;
- self->nt = calloc(self->ntcapacity, sizeof(nodetree));
- if (self->nt == NULL) {
- PyErr_NoMemory();
- return -1;
- }
- self->ntlength = 1;
- self->ntrev = (int)index_length(self) - 1;
- self->ntlookups = 1;
- self->ntmisses = 0;
- if (nt_insert(self, nullid, INT_MAX) == -1)
- return -1;
- }
- return 0;
-}
-
-/*
- * Return values:
- *
- * -3: error (exception set)
- * -2: not found (no exception set)
- * rest: valid rev
- */
-static int index_find_node(indexObject *self,
- const char *node, Py_ssize_t nodelen)
-{
- int rev;
-
- self->ntlookups++;
- rev = nt_find(self, node, nodelen, 0);
- if (rev >= -1)
- return rev;
-
- if (nt_init(self) == -1)
- return -3;
-
- /*
- * For the first handful of lookups, we scan the entire index,
- * and cache only the matching nodes. This optimizes for cases
- * like "hg tip", where only a few nodes are accessed.
- *
- * After that, we cache every node we visit, using a single
- * scan amortized over multiple lookups. This gives the best
- * bulk performance, e.g. for "hg log".
- */
- if (self->ntmisses++ < 4) {
- for (rev = self->ntrev - 1; rev >= 0; rev--) {
- const char *n = index_node(self, rev);
- if (n == NULL)
- return -2;
- if (memcmp(node, n, nodelen > 20 ? 20 : nodelen) == 0) {
- if (nt_insert(self, n, rev) == -1)
- return -3;
- break;
- }
- }
- } else {
- for (rev = self->ntrev - 1; rev >= 0; rev--) {
- const char *n = index_node(self, rev);
- if (n == NULL) {
- self->ntrev = rev + 1;
- return -2;
- }
- if (nt_insert(self, n, rev) == -1) {
- self->ntrev = rev + 1;
- return -3;
- }
- if (memcmp(node, n, nodelen > 20 ? 20 : nodelen) == 0) {
- break;
- }
- }
- self->ntrev = rev;
- }
-
- if (rev >= 0)
- return rev;
- return -2;
-}
-
-static PyObject *raise_revlog_error(void)
-{
- static PyObject *errclass;
- PyObject *mod = NULL, *errobj;
-
- if (errclass == NULL) {
- PyObject *dict;
-
- mod = PyImport_ImportModule("mercurial.error");
- if (mod == NULL)
- goto classfail;
-
- dict = PyModule_GetDict(mod);
- if (dict == NULL)
- goto classfail;
-
- errclass = PyDict_GetItemString(dict, "RevlogError");
- if (errclass == NULL) {
- PyErr_SetString(PyExc_SystemError,
- "could not find RevlogError");
- goto classfail;
- }
- Py_INCREF(errclass);
- }
-
- errobj = PyObject_CallFunction(errclass, NULL);
- if (errobj == NULL)
- return NULL;
- PyErr_SetObject(errclass, errobj);
- return errobj;
-
-classfail:
- Py_XDECREF(mod);
- return NULL;
-}
-
-static PyObject *index_getitem(indexObject *self, PyObject *value)
-{
- char *node;
- Py_ssize_t nodelen;
- int rev;
-
- if (PyInt_Check(value))
- return index_get(self, PyInt_AS_LONG(value));
-
- if (node_check(value, &node, &nodelen) == -1)
- return NULL;
- rev = index_find_node(self, node, nodelen);
- if (rev >= -1)
- return PyInt_FromLong(rev);
- if (rev == -2)
- raise_revlog_error();
- return NULL;
-}
-
-static int nt_partialmatch(indexObject *self, const char *node,
- Py_ssize_t nodelen)
-{
- int rev;
-
- if (nt_init(self) == -1)
- return -3;
-
- if (self->ntrev > 0) {
- /* ensure that the radix tree is fully populated */
- for (rev = self->ntrev - 1; rev >= 0; rev--) {
- const char *n = index_node(self, rev);
- if (n == NULL)
- return -2;
- if (nt_insert(self, n, rev) == -1)
- return -3;
- }
- self->ntrev = rev;
- }
-
- return nt_find(self, node, nodelen, 1);
-}
-
-static PyObject *index_partialmatch(indexObject *self, PyObject *args)
-{
- const char *fullnode;
- int nodelen;
- char *node;
- int rev, i;
-
- if (!PyArg_ParseTuple(args, "s#", &node, &nodelen))
- return NULL;
-
- if (nodelen < 4) {
- PyErr_SetString(PyExc_ValueError, "key too short");
- return NULL;
- }
-
- if (nodelen > 40)
- nodelen = 40;
-
- for (i = 0; i < nodelen; i++)
- hexdigit(node, i);
- if (PyErr_Occurred()) {
- /* input contains non-hex characters */
- PyErr_Clear();
- Py_RETURN_NONE;
- }
-
- rev = nt_partialmatch(self, node, nodelen);
-
- switch (rev) {
- case -4:
- raise_revlog_error();
- case -3:
- return NULL;
- case -2:
- Py_RETURN_NONE;
- case -1:
- return PyString_FromStringAndSize(nullid, 20);
- }
-
- fullnode = index_node(self, rev);
- if (fullnode == NULL) {
- PyErr_Format(PyExc_IndexError,
- "could not access rev %d", rev);
- return NULL;
- }
- return PyString_FromStringAndSize(fullnode, 20);
-}
-
-static PyObject *index_m_get(indexObject *self, PyObject *args)
-{
- Py_ssize_t nodelen;
- PyObject *val;
- char *node;
- int rev;
-
- if (!PyArg_ParseTuple(args, "O", &val))
- return NULL;
- if (node_check(val, &node, &nodelen) == -1)
- return NULL;
- rev = index_find_node(self, node, nodelen);
- if (rev == -3)
- return NULL;
- if (rev == -2)
- Py_RETURN_NONE;
- return PyInt_FromLong(rev);
-}
-
-static int index_contains(indexObject *self, PyObject *value)
-{
- char *node;
- Py_ssize_t nodelen;
-
- if (PyInt_Check(value)) {
- long rev = PyInt_AS_LONG(value);
- return rev >= -1 && rev < index_length(self);
- }
-
- if (node_check(value, &node, &nodelen) == -1)
- return -1;
- switch (index_find_node(self, node, nodelen)) {
- case -3:
- return -1;
- case -2:
- return 0;
- default:
- return 1;
- }
-}
-
-/*
- * Invalidate any trie entries introduced by added revs.
- */
-static void nt_invalidate_added(indexObject *self, Py_ssize_t start)
-{
- Py_ssize_t i, len = PyList_GET_SIZE(self->added);
-
- for (i = start; i < len; i++) {
- PyObject *tuple = PyList_GET_ITEM(self->added, i);
- PyObject *node = PyTuple_GET_ITEM(tuple, 7);
-
- nt_insert(self, PyString_AS_STRING(node), -1);
- }
-
- if (start == 0)
- Py_CLEAR(self->added);
-}
-
-/*
- * Delete a numeric range of revs, which must be at the end of the
- * range, but exclude the sentinel nullid entry.
- */
-static int index_slice_del(indexObject *self, PyObject *item)
-{
- Py_ssize_t start, stop, step, slicelength;
- Py_ssize_t length = index_length(self);
- int ret = 0;
-
- if (PySlice_GetIndicesEx((PySliceObject*)item, length,
- &start, &stop, &step, &slicelength) < 0)
- return -1;
-
- if (slicelength <= 0)
- return 0;
-
- if ((step < 0 && start < stop) || (step > 0 && start > stop))
- stop = start;
-
- if (step < 0) {
- stop = start + 1;
- start = stop + step*(slicelength - 1) - 1;
- step = -step;
- }
-
- if (step != 1) {
- PyErr_SetString(PyExc_ValueError,
- "revlog index delete requires step size of 1");
- return -1;
- }
-
- if (stop != length - 1) {
- PyErr_SetString(PyExc_IndexError,
- "revlog index deletion indices are invalid");
- return -1;
- }
-
- if (start < self->length - 1) {
- if (self->nt) {
- Py_ssize_t i;
-
- for (i = start + 1; i < self->length - 1; i++) {
- const char *node = index_node(self, i);
-
- if (node)
- nt_insert(self, node, -1);
- }
- if (self->added)
- nt_invalidate_added(self, 0);
- if (self->ntrev > start)
- self->ntrev = (int)start;
- }
- self->length = start + 1;
- if (start < self->raw_length)
- self->raw_length = start;
- goto done;
- }
-
- if (self->nt) {
- nt_invalidate_added(self, start - self->length + 1);
- if (self->ntrev > start)
- self->ntrev = (int)start;
- }
- if (self->added)
- ret = PyList_SetSlice(self->added, start - self->length + 1,
- PyList_GET_SIZE(self->added), NULL);
-done:
- Py_CLEAR(self->headrevs);
- return ret;
-}
-
-/*
- * Supported ops:
- *
- * slice deletion
- * string assignment (extend node->rev mapping)
- * string deletion (shrink node->rev mapping)
- */
-static int index_assign_subscript(indexObject *self, PyObject *item,
- PyObject *value)
-{
- char *node;
- Py_ssize_t nodelen;
- long rev;
-
- if (PySlice_Check(item) && value == NULL)
- return index_slice_del(self, item);
-
- if (node_check(item, &node, &nodelen) == -1)
- return -1;
+ PyObject_GC_UnTrack(entry); /* don't waste time with this */
- if (value == NULL)
- return self->nt ? nt_insert(self, node, -1) : 0;
- rev = PyInt_AsLong(value);
- if (rev > INT_MAX || rev < 0) {
- if (!PyErr_Occurred())
- PyErr_SetString(PyExc_ValueError, "rev out of range");
- return -1;
- }
- return nt_insert(self, node, (int)rev);
-}
+ if (inlined) {
+ err = PyList_Append(index, entry);
+ Py_DECREF(entry);
+ if (err)
+ return 0;
+ } else
+ PyList_SET_ITEM(index, n, entry); /* steals reference */
-/*
- * Find all RevlogNG entries in an index that has inline data. Update
- * the optional "offsets" table with those entries.
- */
-static long inline_scan(indexObject *self, const char **offsets)
-{
- const char *data = PyString_AS_STRING(self->data);
- const char *end = data + PyString_GET_SIZE(self->data);
- long incr = v1_hdrsize;
- Py_ssize_t len = 0;
-
- while (data + v1_hdrsize <= end) {
- uint32_t comp_len;
- const char *old_data;
- /* 3rd element of header is length of compressed inline data */
- comp_len = getbe32(data + 8);
- incr = v1_hdrsize + comp_len;
- if (incr < v1_hdrsize)
- break;
- if (offsets)
- offsets[len] = data;
- len++;
- old_data = data;
- data += incr;
- if (data <= old_data)
+ n++;
+ step = 64 + (inlined ? comp_len : 0);
+ if (data + step > end || data + step < data)
break;
+ data += step;
}
-
- if (data != end && data + v1_hdrsize != end) {
+ if (data != end) {
if (!PyErr_Occurred())
PyErr_SetString(PyExc_ValueError, "corrupt index file");
- return -1;
+ return 0;
}
- return len;
-}
+ /* create the magic nullid entry in the index at [-1] */
+ entry = Py_BuildValue("Liiiiiis#", (uint64_t)0, 0, 0, -1, -1, -1, -1, nullid, 20);
-static int index_init(indexObject *self, PyObject *args)
-{
- PyObject *data_obj, *inlined_obj;
- Py_ssize_t size;
-
- if (!PyArg_ParseTuple(args, "OO", &data_obj, &inlined_obj))
- return -1;
- if (!PyString_Check(data_obj)) {
- PyErr_SetString(PyExc_TypeError, "data is not a string");
- return -1;
- }
- size = PyString_GET_SIZE(data_obj);
-
- self->inlined = inlined_obj && PyObject_IsTrue(inlined_obj);
- self->data = data_obj;
- self->cache = NULL;
-
- self->added = NULL;
- self->headrevs = NULL;
- self->offsets = NULL;
- self->nt = NULL;
- self->ntlength = self->ntcapacity = 0;
- self->ntdepth = self->ntsplits = 0;
- self->ntlookups = self->ntmisses = 0;
- self->ntrev = -1;
- Py_INCREF(self->data);
-
- if (self->inlined) {
- long len = inline_scan(self, NULL);
- if (len == -1)
- goto bail;
- self->raw_length = len;
- self->length = len + 1;
- } else {
- if (size % v1_hdrsize) {
- PyErr_SetString(PyExc_ValueError, "corrupt index file");
- goto bail;
- }
- self->raw_length = size / v1_hdrsize;
- self->length = self->raw_length + 1;
- }
+ if (!entry)
+ return 0;
- return 0;
-bail:
- return -1;
-}
+ PyObject_GC_UnTrack(entry); /* don't waste time with this */
-static PyObject *index_nodemap(indexObject *self)
-{
- Py_INCREF(self);
- return (PyObject *)self;
-}
+ if (inlined) {
+ err = PyList_Append(index, entry);
+ Py_DECREF(entry);
+ if (err)
+ return 0;
+ } else
+ PyList_SET_ITEM(index, n, entry); /* steals reference */
-static void index_dealloc(indexObject *self)
-{
- _index_clearcaches(self);
- Py_DECREF(self->data);
- Py_XDECREF(self->added);
- PyObject_Del(self);
+ return 1;
}
-static PySequenceMethods index_sequence_methods = {
- (lenfunc)index_length, /* sq_length */
- 0, /* sq_concat */
- 0, /* sq_repeat */
- (ssizeargfunc)index_get, /* sq_item */
- 0, /* sq_slice */
- 0, /* sq_ass_item */
- 0, /* sq_ass_slice */
- (objobjproc)index_contains, /* sq_contains */
-};
-
-static PyMappingMethods index_mapping_methods = {
- (lenfunc)index_length, /* mp_length */
- (binaryfunc)index_getitem, /* mp_subscript */
- (objobjargproc)index_assign_subscript, /* mp_ass_subscript */
-};
-
-static PyMethodDef index_methods[] = {
- {"clearcaches", (PyCFunction)index_clearcaches, METH_NOARGS,
- "clear the index caches"},
- {"get", (PyCFunction)index_m_get, METH_VARARGS,
- "get an index entry"},
- {"headrevs", (PyCFunction)index_headrevs, METH_NOARGS,
- "get head revisions"},
- {"insert", (PyCFunction)index_insert, METH_VARARGS,
- "insert an index entry"},
- {"partialmatch", (PyCFunction)index_partialmatch, METH_VARARGS,
- "match a potentially ambiguous node ID"},
- {"stats", (PyCFunction)index_stats, METH_NOARGS,
- "stats for the index"},
- {NULL} /* Sentinel */
-};
-
-static PyGetSetDef index_getset[] = {
- {"nodemap", (getter)index_nodemap, NULL, "nodemap", NULL},
- {NULL} /* Sentinel */
-};
-
-static PyTypeObject indexType = {
- PyObject_HEAD_INIT(NULL)
- 0, /* ob_size */
- "parsers.index", /* tp_name */
- sizeof(indexObject), /* tp_basicsize */
- 0, /* tp_itemsize */
- (destructor)index_dealloc, /* tp_dealloc */
- 0, /* tp_print */
- 0, /* tp_getattr */
- 0, /* tp_setattr */
- 0, /* tp_compare */
- 0, /* tp_repr */
- 0, /* tp_as_number */
- &index_sequence_methods, /* tp_as_sequence */
- &index_mapping_methods, /* tp_as_mapping */
- 0, /* tp_hash */
- 0, /* tp_call */
- 0, /* tp_str */
- 0, /* tp_getattro */
- 0, /* tp_setattro */
- 0, /* tp_as_buffer */
- Py_TPFLAGS_DEFAULT, /* tp_flags */
- "revlog index", /* tp_doc */
- 0, /* tp_traverse */
- 0, /* tp_clear */
- 0, /* tp_richcompare */
- 0, /* tp_weaklistoffset */
- 0, /* tp_iter */
- 0, /* tp_iternext */
- index_methods, /* tp_methods */
- 0, /* tp_members */
- index_getset, /* tp_getset */
- 0, /* tp_base */
- 0, /* tp_dict */
- 0, /* tp_descr_get */
- 0, /* tp_descr_set */
- 0, /* tp_dictoffset */
- (initproc)index_init, /* tp_init */
- 0, /* tp_alloc */
-};
-
-/*
- * returns a tuple of the form (index, index, cache) with elements as
- * follows:
+/* This function parses a index file and returns a Python tuple of the
+ * following format: (index, cache)
*
- * index: an index object that lazily parses RevlogNG records
- * cache: if data is inlined, a tuple (index_file_content, 0), else None
- *
- * added complications are for backwards compatibility
+ * index: a list of tuples containing the RevlogNG records
+ * cache: if data is inlined, a tuple (index_file_content, 0) else None
*/
static PyObject *parse_index2(PyObject *self, PyObject *args)
{
- PyObject *tuple = NULL, *cache = NULL;
- indexObject *idx;
- int ret;
+ const char *data;
+ int size, inlined;
+ PyObject *rval = NULL, *index = NULL, *cache = NULL;
+ PyObject *data_obj = NULL, *inlined_obj;
- idx = PyObject_New(indexObject, &indexType);
- if (idx == NULL)
- goto bail;
+ if (!PyArg_ParseTuple(args, "s#O", &data, &size, &inlined_obj))
+ return NULL;
+ inlined = inlined_obj && PyObject_IsTrue(inlined_obj);
- ret = index_init(idx, args);
- if (ret == -1)
- goto bail;
+ /* If no data is inlined, we know the size of the index list in
+ * advance: size divided by the size of one revlog record (64 bytes)
+ * plus one for nullid */
+ index = inlined ? PyList_New(0) : PyList_New(size / 64 + 1);
+ if (!index)
+ goto quit;
- if (idx->inlined) {
- cache = Py_BuildValue("iO", 0, idx->data);
- if (cache == NULL)
- goto bail;
+ /* set up the cache return value */
+ if (inlined) {
+ /* Note that the reference to data_obj is only borrowed */
+ data_obj = PyTuple_GET_ITEM(args, 0);
+ cache = Py_BuildValue("iO", 0, data_obj);
+ if (!cache)
+ goto quit;
} else {
cache = Py_None;
- Py_INCREF(cache);
+ Py_INCREF(Py_None);
}
- tuple = Py_BuildValue("NN", idx, cache);
- if (!tuple)
- goto bail;
- return tuple;
+ /* actually populate the index with data */
+ if (!_parse_index_ng(data, size, inlined, index))
+ goto quit;
+
+ rval = Py_BuildValue("NN", index, cache);
+ if (!rval)
+ goto quit;
+ return rval;
-bail:
- Py_XDECREF(idx);
+quit:
+ Py_XDECREF(index);
Py_XDECREF(cache);
- Py_XDECREF(tuple);
+ Py_XDECREF(rval);
return NULL;
}
+
static char parsers_doc[] = "Efficient content parsing.";
static PyMethodDef methods[] = {
- {"pack_dirstate", pack_dirstate, METH_VARARGS, "pack a dirstate\n"},
{"parse_manifest", parse_manifest, METH_VARARGS, "parse a manifest\n"},
{"parse_dirstate", parse_dirstate, METH_VARARGS, "parse a dirstate\n"},
{"parse_index2", parse_index2, METH_VARARGS, "parse a revlog index\n"},
{NULL, NULL}
};
-static void module_init(PyObject *mod)
-{
- indexType.tp_new = PyType_GenericNew;
- if (PyType_Ready(&indexType) < 0)
- return;
- Py_INCREF(&indexType);
-
- PyModule_AddObject(mod, "index", (PyObject *)&indexType);
-
- nullentry = Py_BuildValue("iiiiiiis#", 0, 0, 0,
- -1, -1, -1, -1, nullid, 20);
- if (nullentry)
- PyObject_GC_UnTrack(nullentry);
-
- dirstate_unset = Py_BuildValue("ciii", 'n', 0, -1, -1);
-}
-
#ifdef IS_PY3K
static struct PyModuleDef parsers_module = {
PyModuleDef_HEAD_INIT,
@@ -1539,14 +407,12 @@ static struct PyModuleDef parsers_module = {
PyMODINIT_FUNC PyInit_parsers(void)
{
- PyObject *mod = PyModule_Create(&parsers_module);
- module_init(mod);
- return mod;
+ return PyModule_Create(&parsers_module);
}
#else
PyMODINIT_FUNC initparsers(void)
{
- PyObject *mod = Py_InitModule3("parsers", methods, parsers_doc);
- module_init(mod);
+ Py_InitModule3("parsers", methods, parsers_doc);
}
#endif
+
diff --git a/mercurial/patch.py b/mercurial/patch.py
index b216734..6c224ee 100644
--- a/mercurial/patch.py
+++ b/mercurial/patch.py
@@ -126,7 +126,7 @@ def split(stream):
mimeheaders = ['content-type']
- if not util.safehasattr(stream, 'next'):
+ if not hasattr(stream, 'next'):
# http responses, for example, have readline but not next
stream = fiter(stream)
@@ -230,7 +230,7 @@ def extract(ui, fileobj):
elif line.startswith("# Node ID "):
nodeid = line[10:]
elif line.startswith("# Parent "):
- parents.append(line[9:].lstrip())
+ parents.append(line[10:])
elif not line.startswith("# "):
hgpatchheader = False
elif line == '---' and gitsendmail:
@@ -245,7 +245,7 @@ def extract(ui, fileobj):
tmpfp.write('\n')
elif not diffs_seen and message and content_type == 'text/plain':
message += '\n' + payload
- except: # re-raises
+ except:
tmpfp.close()
os.unlink(tmpname)
raise
@@ -290,19 +290,6 @@ class patchmeta(object):
other.binary = self.binary
return other
- def _ispatchinga(self, afile):
- if afile == '/dev/null':
- return self.op == 'ADD'
- return afile == 'a/' + (self.oldpath or self.path)
-
- def _ispatchingb(self, bfile):
- if bfile == '/dev/null':
- return self.op == 'DELETE'
- return bfile == 'b/' + self.path
-
- def ispatching(self, afile, bfile):
- return self._ispatchinga(afile) and self._ispatchingb(bfile)
-
def __repr__(self):
return "<patchmeta %s %r>" % (self.op, self.path)
@@ -488,15 +475,9 @@ class workingbackend(fsbackend):
addremoved = set(self.changed)
for src, dst in self.copied:
scmutil.dirstatecopy(self.ui, self.repo, wctx, src, dst)
- if self.removed:
+ addremoved.discard(src)
+ if (not self.similarity) and self.removed:
wctx.forget(sorted(self.removed))
- for f in self.removed:
- if f not in self.repo.dirstate:
- # File was deleted and no longer belongs to the
- # dirstate, it was probably marked added then
- # deleted, and should not be considered by
- # addremove().
- addremoved.discard(f)
if addremoved:
cwd = self.repo.getcwd()
if cwd:
@@ -534,7 +515,7 @@ class filestore(object):
if fname in self.data:
return self.data[fname]
if not self.opener or fname not in self.files:
- raise IOError
+ raise IOError()
fn, mode, copied = self.files[fname]
return self.opener.read(fn), mode, copied
@@ -560,7 +541,7 @@ class repobackend(abstractbackend):
try:
fctx = self.ctx[fname]
except error.LookupError:
- raise IOError
+ raise IOError()
flags = fctx.flags()
return fctx.data(), ('l' in flags, 'x' in flags)
@@ -585,8 +566,8 @@ class repobackend(abstractbackend):
return self.changed | self.removed
# @@ -start,len +start,len @@ or @@ -start +start @@ if len is 1
-unidesc = re.compile('@@ -(\d+)(?:,(\d+))? \+(\d+)(?:,(\d+))? @@')
-contextdesc = re.compile('(?:---|\*\*\*) (\d+)(?:,(\d+))? (?:---|\*\*\*)')
+unidesc = re.compile('@@ -(\d+)(,(\d+))? \+(\d+)(,(\d+))? @@')
+contextdesc = re.compile('(---|\*\*\*) (\d+)(,(\d+))? (---|\*\*\*)')
eolmodes = ['strict', 'crlf', 'lf', 'auto']
class patchfile(object):
@@ -634,7 +615,7 @@ class patchfile(object):
if self.mode is None:
self.mode = (False, False)
if self.missing:
- self.ui.warn(_("unable to find '%s' for patching\n") % self.fname)
+ self.ui.warn(_("unable to find '%s' for patching\n") % self.fname)
self.hash = {}
self.dirty = 0
@@ -741,19 +722,22 @@ class patchfile(object):
h = h.getnormalized()
# fast case first, no offsets, no fuzz
- old, oldstart, new, newstart = h.fuzzit(0, False)
- oldstart += self.offset
- orig_start = oldstart
+ old = h.old()
+ # patch starts counting at 1 unless we are adding the file
+ if h.starta == 0:
+ start = 0
+ else:
+ start = h.starta + self.offset - 1
+ orig_start = start
# if there's skew we want to emit the "(offset %d lines)" even
# when the hunk cleanly applies at start + skew, so skip the
# fast case code
- if (self.skew == 0 and
- diffhelpers.testhunk(old, self.lines, oldstart) == 0):
+ if self.skew == 0 and diffhelpers.testhunk(old, self.lines, start) == 0:
if self.remove:
self.backend.unlink(self.fname)
else:
- self.lines[oldstart:oldstart + len(old)] = new
- self.offset += len(new) - len(old)
+ self.lines[start : start + h.lena] = h.new()
+ self.offset += h.lenb - h.lena
self.dirty = True
return 0
@@ -761,23 +745,23 @@ class patchfile(object):
self.hash = {}
for x, s in enumerate(self.lines):
self.hash.setdefault(s, []).append(x)
+ if h.hunk[-1][0] != ' ':
+ # if the hunk tried to put something at the bottom of the file
+ # override the start line and use eof here
+ search_start = len(self.lines)
+ else:
+ search_start = orig_start + self.skew
for fuzzlen in xrange(3):
for toponly in [True, False]:
- old, oldstart, new, newstart = h.fuzzit(fuzzlen, toponly)
- oldstart = oldstart + self.offset + self.skew
- oldstart = min(oldstart, len(self.lines))
- if old:
- cand = self.findlines(old[0][1:], oldstart)
- else:
- # Only adding lines with no or fuzzed context, just
- # take the skew in account
- cand = [oldstart]
+ old = h.old(fuzzlen, toponly)
+ cand = self.findlines(old[0][1:], search_start)
for l in cand:
- if not old or diffhelpers.testhunk(old, self.lines, l) == 0:
- self.lines[l : l + len(old)] = new
- self.offset += len(new) - len(old)
+ if diffhelpers.testhunk(old, self.lines, l) == 0:
+ newlines = h.new(fuzzlen, toponly)
+ self.lines[l : l + len(old)] = newlines
+ self.offset += len(newlines) - len(old)
self.skew = l - orig_start
self.dirty = True
offset = l - orig_start - fuzzlen
@@ -847,7 +831,7 @@ class hunk(object):
m = unidesc.match(self.desc)
if not m:
raise PatchError(_("bad hunk #%d") % self.number)
- self.starta, self.lena, self.startb, self.lenb = m.groups()
+ self.starta, foo, self.lena, self.startb, foo2, self.lenb = m.groups()
if self.lena is None:
self.lena = 1
else:
@@ -858,8 +842,7 @@ class hunk(object):
self.lenb = int(self.lenb)
self.starta = int(self.starta)
self.startb = int(self.startb)
- diffhelpers.addlines(lr, self.hunk, self.lena, self.lenb, self.a,
- self.b)
+ diffhelpers.addlines(lr, self.hunk, self.lena, self.lenb, self.a, self.b)
# if we hit eof before finishing out the hunk, the last line will
# be zero length. Lets try to fix it up.
while len(self.hunk[-1]) == 0:
@@ -875,7 +858,7 @@ class hunk(object):
m = contextdesc.match(self.desc)
if not m:
raise PatchError(_("bad hunk #%d") % self.number)
- self.starta, aend = m.groups()
+ foo, self.starta, foo2, aend, foo3 = m.groups()
self.starta = int(self.starta)
if aend is None:
aend = self.starta
@@ -908,7 +891,7 @@ class hunk(object):
m = contextdesc.match(l)
if not m:
raise PatchError(_("bad hunk #%d") % self.number)
- self.startb, bend = m.groups()
+ foo, self.startb, foo2, bend, foo3 = m.groups()
self.startb = int(self.startb)
if bend is None:
bend = self.startb
@@ -983,11 +966,11 @@ class hunk(object):
def complete(self):
return len(self.a) == self.lena and len(self.b) == self.lenb
- def _fuzzit(self, old, new, fuzz, toponly):
+ def fuzzit(self, l, fuzz, toponly):
# this removes context lines from the top and bottom of list 'l'. It
# checks the hunk to make sure only context lines are removed, and then
# returns a new shortened list of lines.
- fuzz = min(fuzz, len(old))
+ fuzz = min(fuzz, len(l)-1)
if fuzz:
top = 0
bot = 0
@@ -1005,28 +988,32 @@ class hunk(object):
else:
break
- bot = min(fuzz, bot)
- top = min(fuzz, top)
- return old[top:len(old)-bot], new[top:len(new)-bot], top
- return old, new, 0
-
- def fuzzit(self, fuzz, toponly):
- old, new, top = self._fuzzit(self.a, self.b, fuzz, toponly)
- oldstart = self.starta + top
- newstart = self.startb + top
- # zero length hunk ranges already have their start decremented
- if self.lena and oldstart > 0:
- oldstart -= 1
- if self.lenb and newstart > 0:
- newstart -= 1
- return old, oldstart, new, newstart
+ # top and bot now count context in the hunk
+ # adjust them if either one is short
+ context = max(top, bot, 3)
+ if bot < context:
+ bot = max(0, fuzz - (context - bot))
+ else:
+ bot = min(fuzz, bot)
+ if top < context:
+ top = max(0, fuzz - (context - top))
+ else:
+ top = min(fuzz, top)
+
+ return l[top:len(l)-bot]
+ return l
+
+ def old(self, fuzz=0, toponly=False):
+ return self.fuzzit(self.a, fuzz, toponly)
+
+ def new(self, fuzz=0, toponly=False):
+ return self.fuzzit(self.b, fuzz, toponly)
class binhunk(object):
'A binary patch file. Only understands literals so far.'
- def __init__(self, lr, fname):
+ def __init__(self, lr):
self.text = None
self.hunk = ['GIT binary patch\n']
- self._fname = fname
self._read(lr)
def complete(self):
@@ -1036,37 +1023,30 @@ class binhunk(object):
return [self.text]
def _read(self, lr):
- def getline(lr, hunk):
- l = lr.readline()
- hunk.append(l)
- return l.rstrip('\r\n')
-
- while True:
- line = getline(lr, self.hunk)
- if not line:
- raise PatchError(_('could not extract "%s" binary data')
- % self._fname)
- if line.startswith('literal '):
- break
+ line = lr.readline()
+ self.hunk.append(line)
+ while line and not line.startswith('literal '):
+ line = lr.readline()
+ self.hunk.append(line)
+ if not line:
+ raise PatchError(_('could not extract binary patch'))
size = int(line[8:].rstrip())
dec = []
- line = getline(lr, self.hunk)
+ line = lr.readline()
+ self.hunk.append(line)
while len(line) > 1:
l = line[0]
if l <= 'Z' and l >= 'A':
l = ord(l) - ord('A') + 1
else:
l = ord(l) - ord('a') + 27
- try:
- dec.append(base85.b85decode(line[1:])[:l])
- except ValueError, e:
- raise PatchError(_('could not decode "%s" binary patch: %s')
- % (self._fname, str(e)))
- line = getline(lr, self.hunk)
+ dec.append(base85.b85decode(line[1:-1])[:l])
+ line = lr.readline()
+ self.hunk.append(line)
text = zlib.decompress(''.join(dec))
if len(text) != size:
- raise PatchError(_('"%s" length is %d bytes, should be %d')
- % (self._fname, len(text), size))
+ raise PatchError(_('binary patch is %d bytes, not %d') %
+ len(text), size)
self.text = text
def parsefilename(str):
@@ -1202,10 +1182,10 @@ def iterhunks(fp):
or x.startswith('GIT binary patch')):
gp = None
if (gitpatches and
- gitpatches[-1].ispatching(afile, bfile)):
- gp = gitpatches.pop()
+ (gitpatches[-1][0] == afile or gitpatches[-1][1] == bfile)):
+ gp = gitpatches.pop()[2]
if x.startswith('GIT binary patch'):
- h = binhunk(lr, gp.path)
+ h = binhunk(lr)
else:
if context is None and x.startswith('***************'):
context = True
@@ -1216,24 +1196,25 @@ def iterhunks(fp):
yield 'file', (afile, bfile, h, gp and gp.copy() or None)
yield 'hunk', h
elif x.startswith('diff --git'):
- m = gitre.match(x.rstrip(' \r\n'))
+ m = gitre.match(x)
if not m:
continue
- if gitpatches is None:
+ if not gitpatches:
# scan whole input for git metadata
- gitpatches = scangitpatch(lr, x)
- yield 'git', [g.copy() for g in gitpatches
- if g.op in ('COPY', 'RENAME')]
+ gitpatches = [('a/' + gp.path, 'b/' + gp.path, gp) for gp
+ in scangitpatch(lr, x)]
+ yield 'git', [g[2].copy() for g in gitpatches
+ if g[2].op in ('COPY', 'RENAME')]
gitpatches.reverse()
afile = 'a/' + m.group(1)
bfile = 'b/' + m.group(2)
- while gitpatches and not gitpatches[-1].ispatching(afile, bfile):
- gp = gitpatches.pop()
+ while afile != gitpatches[-1][0] and bfile != gitpatches[-1][1]:
+ gp = gitpatches.pop()[2]
yield 'file', ('a/' + gp.path, 'b/' + gp.path, None, gp.copy())
- if not gitpatches:
- raise PatchError(_('failed to synchronize metadata for "%s"')
- % afile[2:])
- gp = gitpatches[-1]
+ gp = gitpatches[-1][2]
+ # copy/rename + modify should modify target, not source
+ if gp.op in ('COPY', 'DELETE', 'RENAME', 'ADD') or gp.mode:
+ afile = bfile
newfile = True
elif x.startswith('---'):
# check for a unified diff
@@ -1268,7 +1249,7 @@ def iterhunks(fp):
hunknum = 0
while gitpatches:
- gp = gitpatches.pop()
+ gp = gitpatches.pop()[2]
yield 'file', ('a/' + gp.path, 'b/' + gp.path, None, gp.copy())
def applydiff(ui, fp, backend, store, strip=1, eolmode='strict'):
@@ -1307,6 +1288,7 @@ def _applydiff(ui, fp, patcher, backend, store, strip=1,
current_file = None
afile, bfile, first_hunk, gp = values
if gp:
+ path = pstrip(gp.path)
gp.path = pstrip(gp.path)
if gp.oldpath:
gp.oldpath = pstrip(gp.oldpath)
@@ -1345,17 +1327,8 @@ def _applydiff(ui, fp, patcher, backend, store, strip=1,
elif state == 'git':
for gp in values:
path = pstrip(gp.oldpath)
- try:
- data, mode = backend.getfile(path)
- except IOError, e:
- if e.errno != errno.ENOENT:
- raise
- # The error ignored here will trigger a getfile()
- # error in a place more appropriate for error
- # handling, and will not interrupt the patching
- # process.
- else:
- store.setfile(path, data, mode)
+ data, mode = backend.getfile(path)
+ store.setfile(path, data, mode)
else:
raise util.Abort(_('unsupported parser state: %s') % state)
@@ -1555,10 +1528,10 @@ def b85diff(to, tn):
class GitDiffRequired(Exception):
pass
-def diffopts(ui, opts=None, untrusted=False, section='diff'):
+def diffopts(ui, opts=None, untrusted=False):
def get(key, name=None, getter=ui.configbool):
return ((opts and opts.get(key)) or
- getter(section, name or key, None, untrusted=untrusted))
+ getter('diff', name or key, None, untrusted=untrusted))
return mdiff.diffopts(
text=opts and opts.get('text'),
git=get('git'),
@@ -1597,12 +1570,12 @@ def diff(repo, node1=None, node2=None, match=None, changes=None, opts=None,
def lrugetfilectx():
cache = {}
- order = util.deque()
+ order = []
def getfilectx(f, ctx):
fctx = ctx.filectx(f, filelog=cache.get(f))
if f not in cache:
if len(cache) > 20:
- del cache[order.popleft()]
+ del cache[order.pop(0)]
cache[f] = fctx.filelog()
else:
order.remove(f)
@@ -1628,16 +1601,15 @@ def diff(repo, node1=None, node2=None, match=None, changes=None, opts=None,
copy = {}
if opts.git or opts.upgrade:
- copy = copies.pathcopies(ctx1, ctx2)
+ copy = copies.copies(repo, ctx1, ctx2, repo[nullid])[0]
- def difffn(opts, losedata):
- return trydiff(repo, revs, ctx1, ctx2, modified, added, removed,
- copy, getfilectx, opts, losedata, prefix)
+ difffn = lambda opts, losedata: trydiff(repo, revs, ctx1, ctx2,
+ modified, added, removed, copy, getfilectx, opts, losedata, prefix)
if opts.upgrade and not opts.git:
try:
def losedata(fn):
if not losedatafn or not losedatafn(fn=fn):
- raise GitDiffRequired
+ raise GitDiffRequired()
# Buffer the whole output until we are sure it can be generated
return list(difffn(opts.copy(git=False), losedata))
except GitDiffRequired:
@@ -1647,36 +1619,27 @@ def diff(repo, node1=None, node2=None, match=None, changes=None, opts=None,
def difflabel(func, *args, **kw):
'''yields 2-tuples of (output, label) based on the output of func()'''
- headprefixes = [('diff', 'diff.diffline'),
- ('copy', 'diff.extended'),
- ('rename', 'diff.extended'),
- ('old', 'diff.extended'),
- ('new', 'diff.extended'),
- ('deleted', 'diff.extended'),
- ('---', 'diff.file_a'),
- ('+++', 'diff.file_b')]
- textprefixes = [('@', 'diff.hunk'),
- ('-', 'diff.deleted'),
- ('+', 'diff.inserted')]
- head = False
+ prefixes = [('diff', 'diff.diffline'),
+ ('copy', 'diff.extended'),
+ ('rename', 'diff.extended'),
+ ('old', 'diff.extended'),
+ ('new', 'diff.extended'),
+ ('deleted', 'diff.extended'),
+ ('---', 'diff.file_a'),
+ ('+++', 'diff.file_b'),
+ ('@@', 'diff.hunk'),
+ ('-', 'diff.deleted'),
+ ('+', 'diff.inserted')]
+
for chunk in func(*args, **kw):
lines = chunk.split('\n')
for i, line in enumerate(lines):
if i != 0:
yield ('\n', '')
- if head:
- if line.startswith('@'):
- head = False
- else:
- if line and line[0] not in ' +-@\\':
- head = True
stripline = line
- if not head and line and line[0] in '+-':
+ if line and line[0] in '+-':
# highlight trailing whitespace, but only in changed lines
stripline = line.rstrip()
- prefixes = textprefixes
- if head:
- prefixes = headprefixes
for prefix, label in prefixes:
if stripline.startswith(prefix):
yield (stripline, label)
@@ -1815,29 +1778,27 @@ def diffstatdata(lines):
diffre = re.compile('^diff .*-r [a-z0-9]+\s(.*)$')
results = []
- filename, adds, removes, isbinary = None, 0, 0, False
+ filename, adds, removes = None, 0, 0
def addresult():
if filename:
+ isbinary = adds == 0 and removes == 0
results.append((filename, adds, removes, isbinary))
for line in lines:
if line.startswith('diff'):
addresult()
# set numbers to 0 anyway when starting new file
- adds, removes, isbinary = 0, 0, False
+ adds, removes = 0, 0
if line.startswith('diff --git'):
filename = gitre.search(line).group(1)
elif line.startswith('diff -r'):
# format: "diff -r ... -r ... filename"
filename = diffre.search(line).group(1)
- elif line.startswith('+') and not line.startswith('+++ '):
+ elif line.startswith('+') and not line.startswith('+++'):
adds += 1
- elif line.startswith('-') and not line.startswith('--- '):
+ elif line.startswith('-') and not line.startswith('---'):
removes += 1
- elif (line.startswith('GIT binary patch') or
- line.startswith('Binary file')):
- isbinary = True
addresult()
return results
@@ -1862,7 +1823,7 @@ def diffstat(lines, width=80, git=False):
return max(i * graphwidth // maxtotal, int(bool(i)))
for filename, adds, removes, isbinary in stats:
- if isbinary:
+ if git and isbinary:
count = 'Bin'
else:
count = adds + removes
@@ -1873,8 +1834,7 @@ def diffstat(lines, width=80, git=False):
countwidth, count, pluses, minuses))
if stats:
- output.append(_(' %d files changed, %d insertions(+), '
- '%d deletions(-)\n')
+ output.append(_(' %d files changed, %d insertions(+), %d deletions(-)\n')
% (len(stats), totaladds, totalremoves))
return ''.join(output)
diff --git a/mercurial/phases.py b/mercurial/phases.py
deleted file mode 100644
index 614bcbb..0000000
--- a/mercurial/phases.py
+++ /dev/null
@@ -1,387 +0,0 @@
-""" Mercurial phases support code
-
- ---
-
- Copyright 2011 Pierre-Yves David <pierre-yves.david@ens-lyon.org>
- Logilab SA <contact@logilab.fr>
- Augie Fackler <durin42@gmail.com>
-
- This software may be used and distributed according to the terms
- of the GNU General Public License version 2 or any later version.
-
- ---
-
-This module implements most phase logic in mercurial.
-
-
-Basic Concept
-=============
-
-A 'changeset phase' is an indicator that tells us how a changeset is
-manipulated and communicated. The details of each phase is described
-below, here we describe the properties they have in common.
-
-Like bookmarks, phases are not stored in history and thus are not
-permanent and leave no audit trail.
-
-First, no changeset can be in two phases at once. Phases are ordered,
-so they can be considered from lowest to highest. The default, lowest
-phase is 'public' - this is the normal phase of existing changesets. A
-child changeset can not be in a lower phase than its parents.
-
-These phases share a hierarchy of traits:
-
- immutable shared
- public: X X
- draft: X
- secret:
-
-Local commits are draft by default.
-
-Phase Movement and Exchange
-===========================
-
-Phase data is exchanged by pushkey on pull and push. Some servers have
-a publish option set, we call such a server a "publishing server".
-Pushing a draft changeset to a publishing server changes the phase to
-public.
-
-A small list of fact/rules define the exchange of phase:
-
-* old client never changes server states
-* pull never changes server states
-* publish and old server changesets are seen as public by client
-* any secret changeset seen in another repository is lowered to at
- least draft
-
-Here is the final table summing up the 49 possible use cases of phase
-exchange:
-
- server
- old publish non-publish
- N X N D P N D P
- old client
- pull
- N - X/X - X/D X/P - X/D X/P
- X - X/X - X/D X/P - X/D X/P
- push
- X X/X X/X X/P X/P X/P X/D X/D X/P
- new client
- pull
- N - P/X - P/D P/P - D/D P/P
- D - P/X - P/D P/P - D/D P/P
- P - P/X - P/D P/P - P/D P/P
- push
- D P/X P/X P/P P/P P/P D/D D/D P/P
- P P/X P/X P/P P/P P/P P/P P/P P/P
-
-Legend:
-
- A/B = final state on client / state on server
-
- * N = new/not present,
- * P = public,
- * D = draft,
- * X = not tracked (i.e., the old client or server has no internal
- way of recording the phase.)
-
- passive = only pushes
-
-
- A cell here can be read like this:
-
- "When a new client pushes a draft changeset (D) to a publishing
- server where it's not present (N), it's marked public on both
- sides (P/P)."
-
-Note: old client behave as a publishing server with draft only content
-- other people see it as public
-- content is pushed as draft
-
-"""
-
-import errno
-from node import nullid, nullrev, bin, hex, short
-from i18n import _
-import util
-
-allphases = public, draft, secret = range(3)
-trackedphases = allphases[1:]
-phasenames = ['public', 'draft', 'secret']
-
-def _filterunknown(ui, changelog, phaseroots):
- """remove unknown nodes from the phase boundary
-
- Nothing is lost as unknown nodes only hold data for their descendants.
- """
- updated = False
- nodemap = changelog.nodemap # to filter unknown nodes
- for phase, nodes in enumerate(phaseroots):
- missing = [node for node in nodes if node not in nodemap]
- if missing:
- for mnode in missing:
- ui.debug(
- 'removing unknown node %s from %i-phase boundary\n'
- % (short(mnode), phase))
- nodes.symmetric_difference_update(missing)
- updated = True
- return updated
-
-def _readroots(repo, phasedefaults=None):
- """Read phase roots from disk
-
- phasedefaults is a list of fn(repo, roots) callable, which are
- executed if the phase roots file does not exist. When phases are
- being initialized on an existing repository, this could be used to
- set selected changesets phase to something else than public.
-
- Return (roots, dirty) where dirty is true if roots differ from
- what is being stored.
- """
- dirty = False
- roots = [set() for i in allphases]
- try:
- f = repo.sopener('phaseroots')
- try:
- for line in f:
- phase, nh = line.split()
- roots[int(phase)].add(bin(nh))
- finally:
- f.close()
- except IOError, inst:
- if inst.errno != errno.ENOENT:
- raise
- if phasedefaults:
- for f in phasedefaults:
- roots = f(repo, roots)
- dirty = True
- if _filterunknown(repo.ui, repo.changelog, roots):
- dirty = True
- return roots, dirty
-
-class phasecache(object):
- def __init__(self, repo, phasedefaults, _load=True):
- if _load:
- # Cheap trick to allow shallow-copy without copy module
- self.phaseroots, self.dirty = _readroots(repo, phasedefaults)
- self.opener = repo.sopener
- self._phaserevs = None
-
- def copy(self):
- # Shallow copy meant to ensure isolation in
- # advance/retractboundary(), nothing more.
- ph = phasecache(None, None, _load=False)
- ph.phaseroots = self.phaseroots[:]
- ph.dirty = self.dirty
- ph.opener = self.opener
- ph._phaserevs = self._phaserevs
- return ph
-
- def replace(self, phcache):
- for a in 'phaseroots dirty opener _phaserevs'.split():
- setattr(self, a, getattr(phcache, a))
-
- def getphaserevs(self, repo, rebuild=False):
- if rebuild or self._phaserevs is None:
- revs = [public] * len(repo.changelog)
- for phase in trackedphases:
- roots = map(repo.changelog.rev, self.phaseroots[phase])
- if roots:
- for rev in roots:
- revs[rev] = phase
- for rev in repo.changelog.descendants(roots):
- revs[rev] = phase
- self._phaserevs = revs
- return self._phaserevs
-
- def phase(self, repo, rev):
- # We need a repo argument here to be able to build _phaserev
- # if necessary. The repository instance is not stored in
- # phasecache to avoid reference cycles. The changelog instance
- # is not stored because it is a filecache() property and can
- # be replaced without us being notified.
- if rev == nullrev:
- return public
- if self._phaserevs is None or rev >= len(self._phaserevs):
- self._phaserevs = self.getphaserevs(repo, rebuild=True)
- return self._phaserevs[rev]
-
- def write(self):
- if not self.dirty:
- return
- f = self.opener('phaseroots', 'w', atomictemp=True)
- try:
- for phase, roots in enumerate(self.phaseroots):
- for h in roots:
- f.write('%i %s\n' % (phase, hex(h)))
- finally:
- f.close()
- self.dirty = False
-
- def _updateroots(self, phase, newroots):
- self.phaseroots[phase] = newroots
- self._phaserevs = None
- self.dirty = True
-
- def advanceboundary(self, repo, targetphase, nodes):
- # Be careful to preserve shallow-copied values: do not update
- # phaseroots values, replace them.
-
- delroots = [] # set of root deleted by this path
- for phase in xrange(targetphase + 1, len(allphases)):
- # filter nodes that are not in a compatible phase already
- nodes = [n for n in nodes
- if self.phase(repo, repo[n].rev()) >= phase]
- if not nodes:
- break # no roots to move anymore
- olds = self.phaseroots[phase]
- roots = set(ctx.node() for ctx in repo.set(
- 'roots((%ln::) - (%ln::%ln))', olds, olds, nodes))
- if olds != roots:
- self._updateroots(phase, roots)
- # some roots may need to be declared for lower phases
- delroots.extend(olds - roots)
- # declare deleted root in the target phase
- if targetphase != 0:
- self.retractboundary(repo, targetphase, delroots)
-
- def retractboundary(self, repo, targetphase, nodes):
- # Be careful to preserve shallow-copied values: do not update
- # phaseroots values, replace them.
-
- currentroots = self.phaseroots[targetphase]
- newroots = [n for n in nodes
- if self.phase(repo, repo[n].rev()) < targetphase]
- if newroots:
- if nullid in newroots:
- raise util.Abort(_('cannot change null revision phase'))
- currentroots = currentroots.copy()
- currentroots.update(newroots)
- ctxs = repo.set('roots(%ln::)', currentroots)
- currentroots.intersection_update(ctx.node() for ctx in ctxs)
- self._updateroots(targetphase, currentroots)
-
-def advanceboundary(repo, targetphase, nodes):
- """Add nodes to a phase changing other nodes phases if necessary.
-
- This function move boundary *forward* this means that all nodes
- are set in the target phase or kept in a *lower* phase.
-
- Simplify boundary to contains phase roots only."""
- phcache = repo._phasecache.copy()
- phcache.advanceboundary(repo, targetphase, nodes)
- repo._phasecache.replace(phcache)
-
-def retractboundary(repo, targetphase, nodes):
- """Set nodes back to a phase changing other nodes phases if
- necessary.
-
- This function move boundary *backward* this means that all nodes
- are set in the target phase or kept in a *higher* phase.
-
- Simplify boundary to contains phase roots only."""
- phcache = repo._phasecache.copy()
- phcache.retractboundary(repo, targetphase, nodes)
- repo._phasecache.replace(phcache)
-
-def listphases(repo):
- """List phases root for serialization over pushkey"""
- keys = {}
- value = '%i' % draft
- for root in repo._phasecache.phaseroots[draft]:
- keys[hex(root)] = value
-
- if repo.ui.configbool('phases', 'publish', True):
- # Add an extra data to let remote know we are a publishing
- # repo. Publishing repo can't just pretend they are old repo.
- # When pushing to a publishing repo, the client still need to
- # push phase boundary
- #
- # Push do not only push changeset. It also push phase data.
- # New phase data may apply to common changeset which won't be
- # push (as they are common). Here is a very simple example:
- #
- # 1) repo A push changeset X as draft to repo B
- # 2) repo B make changeset X public
- # 3) repo B push to repo A. X is not pushed but the data that
- # X as now public should
- #
- # The server can't handle it on it's own as it has no idea of
- # client phase data.
- keys['publishing'] = 'True'
- return keys
-
-def pushphase(repo, nhex, oldphasestr, newphasestr):
- """List phases root for serialisation over pushkey"""
- lock = repo.lock()
- try:
- currentphase = repo[nhex].phase()
- newphase = abs(int(newphasestr)) # let's avoid negative index surprise
- oldphase = abs(int(oldphasestr)) # let's avoid negative index surprise
- if currentphase == oldphase and newphase < oldphase:
- advanceboundary(repo, newphase, [bin(nhex)])
- return 1
- elif currentphase == newphase:
- # raced, but got correct result
- return 1
- else:
- return 0
- finally:
- lock.release()
-
-def analyzeremotephases(repo, subset, roots):
- """Compute phases heads and root in a subset of node from root dict
-
- * subset is heads of the subset
- * roots is {<nodeid> => phase} mapping. key and value are string.
-
- Accept unknown element input
- """
- # build list from dictionary
- draftroots = []
- nodemap = repo.changelog.nodemap # to filter unknown nodes
- for nhex, phase in roots.iteritems():
- if nhex == 'publishing': # ignore data related to publish option
- continue
- node = bin(nhex)
- phase = int(phase)
- if phase == 0:
- if node != nullid:
- repo.ui.warn(_('ignoring inconsistent public root'
- ' from remote: %s\n') % nhex)
- elif phase == 1:
- if node in nodemap:
- draftroots.append(node)
- else:
- repo.ui.warn(_('ignoring unexpected root from remote: %i %s\n')
- % (phase, nhex))
- # compute heads
- publicheads = newheads(repo, subset, draftroots)
- return publicheads, draftroots
-
-def newheads(repo, heads, roots):
- """compute new head of a subset minus another
-
- * `heads`: define the first subset
- * `rroots`: define the second we substract to the first"""
- revset = repo.set('heads((%ln + parents(%ln)) - (%ln::%ln))',
- heads, roots, roots, heads)
- return [c.node() for c in revset]
-
-
-def newcommitphase(ui):
- """helper to get the target phase of new commit
-
- Handle all possible values for the phases.new-commit options.
-
- """
- v = ui.config('phases', 'new-commit', draft)
- try:
- return phasenames.index(v)
- except ValueError:
- try:
- return int(v)
- except ValueError:
- msg = _("phases.new-commit: not a valid phase name ('%s')")
- raise error.ConfigError(msg % v)
-
diff --git a/mercurial/posix.py b/mercurial/posix.py
index a97c076..367d7a4 100644
--- a/mercurial/posix.py
+++ b/mercurial/posix.py
@@ -6,8 +6,7 @@
# GNU General Public License version 2 or any later version.
from i18n import _
-import encoding
-import os, sys, errno, stat, getpass, pwd, grp, tempfile, unicodedata
+import os, sys, errno, stat, getpass, pwd, grp, tempfile
posixfile = open
nulldev = '/dev/null'
@@ -85,21 +84,6 @@ def setflags(f, l, x):
# Turn off all +x bits
os.chmod(f, s & 0666)
-def copymode(src, dst, mode=None):
- '''Copy the file mode from the file at path src to dst.
- If src doesn't exist, we're using mode instead. If mode is None, we're
- using umask.'''
- try:
- st_mode = os.lstat(src).st_mode & 0777
- except OSError, inst:
- if inst.errno != errno.ENOENT:
- raise
- st_mode = mode
- if st_mode is None:
- st_mode = ~umask
- st_mode &= 0666
- os.chmod(dst, st_mode)
-
def checkexec(path):
"""
Check whether the given path is on a filesystem with UNIX-like exec flags
@@ -165,32 +149,8 @@ def samedevice(fpath1, fpath2):
st2 = os.lstat(fpath2)
return st1.st_dev == st2.st_dev
-# os.path.normcase is a no-op, which doesn't help us on non-native filesystems
-def normcase(path):
- return path.lower()
-
if sys.platform == 'darwin':
import fcntl # only needed on darwin, missing on jython
-
- def normcase(path):
- try:
- u = path.decode('utf-8')
- except UnicodeDecodeError:
- # percent-encode any characters that don't round-trip
- p2 = path.decode('utf-8', 'ignore').encode('utf-8')
- s = ""
- pos = 0
- for c in path:
- if p2[pos:pos + 1] == c:
- s += c
- pos += 1
- else:
- s += "%%%02X" % ord(c)
- u = s.decode('utf-8')
-
- # Decompose then lowercase (HFS+ technote specifies lower)
- return unicodedata.normalize('NFD', u).lower().encode('utf-8')
-
def realpath(path):
'''
Returns the true, canonical file system path equivalent to the given
@@ -224,63 +184,10 @@ if sys.platform == 'darwin':
return fcntl.fcntl(fd, F_GETPATH, '\0' * 1024).rstrip('\0')
finally:
os.close(fd)
-elif sys.version_info < (2, 4, 2, 'final'):
- # Workaround for http://bugs.python.org/issue1213894 (os.path.realpath
- # didn't resolve symlinks that were the first component of the path.)
- def realpath(path):
- if os.path.isabs(path):
- return os.path.realpath(path)
- else:
- return os.path.realpath('./' + path)
else:
# Fallback to the likely inadequate Python builtin function.
realpath = os.path.realpath
-if sys.platform == 'cygwin':
- # workaround for cygwin, in which mount point part of path is
- # treated as case sensitive, even though underlying NTFS is case
- # insensitive.
-
- # default mount points
- cygwinmountpoints = sorted([
- "/usr/bin",
- "/usr/lib",
- "/cygdrive",
- ], reverse=True)
-
- # use upper-ing as normcase as same as NTFS workaround
- def normcase(path):
- pathlen = len(path)
- if (pathlen == 0) or (path[0] != os.sep):
- # treat as relative
- return encoding.upper(path)
-
- # to preserve case of mountpoint part
- for mp in cygwinmountpoints:
- if not path.startswith(mp):
- continue
-
- mplen = len(mp)
- if mplen == pathlen: # mount point itself
- return mp
- if path[mplen] == os.sep:
- return mp + encoding.upper(path[mplen:])
-
- return encoding.upper(path)
-
- # Cygwin translates native ACLs to POSIX permissions,
- # but these translations are not supported by native
- # tools, so the exec bit tends to be set erroneously.
- # Therefore, disable executable bit access on Cygwin.
- def checkexec(path):
- return False
-
- # Similarly, Cygwin's symlink emulation is likely to create
- # problems when Mercurial is used from both Cygwin and native
- # Windows, with other native tools, or on shared volumes
- def checklink(path):
- return False
-
def shellquote(s):
if os.sys.platform == 'OpenVMS':
return '"%s"' % s
@@ -324,16 +231,13 @@ def findexe(command):
def findexisting(executable):
'Will return executable if existing file'
- if os.path.isfile(executable) and os.access(executable, os.X_OK):
+ if os.path.exists(executable):
return executable
return None
if os.sep in command:
return findexisting(command)
- if sys.platform == 'plan9':
- return findexisting(os.path.join('/bin', command))
-
for path in os.environ.get('PATH', '').split(os.pathsep):
executable = findexisting(os.path.join(path, command))
if executable is not None:
@@ -407,13 +311,10 @@ def termwidth():
continue
if not os.isatty(fd):
continue
- try:
- arri = fcntl.ioctl(fd, termios.TIOCGWINSZ, '\0' * 8)
- width = array.array('h', arri)[1]
- if width > 0:
- return width
- except AttributeError:
- pass
+ arri = fcntl.ioctl(fd, termios.TIOCGWINSZ, '\0' * 8)
+ width = array.array('h', arri)[1]
+ if width > 0:
+ return width
except ValueError:
pass
except IOError, e:
@@ -424,47 +325,3 @@ def termwidth():
except ImportError:
pass
return 80
-
-def makedir(path, notindexed):
- os.mkdir(path)
-
-def unlinkpath(f):
- """unlink and remove the directory if it is empty"""
- os.unlink(f)
- # try removing directories that might now be empty
- try:
- os.removedirs(os.path.dirname(f))
- except OSError:
- pass
-
-def lookupreg(key, name=None, scope=None):
- return None
-
-def hidewindow():
- """Hide current shell window.
-
- Used to hide the window opened when starting asynchronous
- child process under Windows, unneeded on other systems.
- """
- pass
-
-class cachestat(object):
- def __init__(self, path):
- self.stat = os.stat(path)
-
- def cacheable(self):
- return bool(self.stat.st_ino)
-
- __hash__ = object.__hash__
-
- def __eq__(self, other):
- try:
- return self.stat == other.stat
- except AttributeError:
- return False
-
- def __ne__(self, other):
- return not self == other
-
-def executablepath():
- return None # available on Windows only
diff --git a/mercurial/pure/__init__.py b/mercurial/pure/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/mercurial/pure/__init__.py
+++ /dev/null
diff --git a/mercurial/pure/base85.py b/mercurial/pure/base85.py
index 8b93192..930d251 100644
--- a/mercurial/pure/base85.py
+++ b/mercurial/pure/base85.py
@@ -54,10 +54,9 @@ def b85decode(text):
try:
acc = acc * 85 + _b85dec[c]
except KeyError:
- raise ValueError('bad base85 character at position %d'
- % (i + j))
+ raise TypeError('Bad base85 character at byte %d' % (i + j))
if acc > 4294967295:
- raise ValueError('Base85 overflow in hunk starting at byte %d' % i)
+ raise OverflowError('Base85 overflow in hunk starting at byte %d' % i)
out.append(acc)
# Pad final chunk if necessary
diff --git a/mercurial/pure/bdiff.py b/mercurial/pure/bdiff.py
index 06f0bd3..0e457d3 100644
--- a/mercurial/pure/bdiff.py
+++ b/mercurial/pure/bdiff.py
@@ -5,7 +5,7 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-import struct, difflib, re
+import struct, difflib
def splitnewlines(text):
'''like str.splitlines, but only split on newlines.'''
@@ -78,10 +78,3 @@ def blocks(a, b):
d = _normalizeblocks(an, bn, d)
return [(i, i + n, j, j + n) for (i, j, n) in d]
-def fixws(text, allws):
- if allws:
- text = re.sub('[ \t\r]+', '', text)
- else:
- text = re.sub('[ \t\r]+', ' ', text)
- text = text.replace(' \n', '\n')
- return text
diff --git a/mercurial/pure/mpatch.py b/mercurial/pure/mpatch.py
index 1738d97..760740d 100644
--- a/mercurial/pure/mpatch.py
+++ b/mercurial/pure/mpatch.py
@@ -85,10 +85,10 @@ def patches(a, bins):
p1, p2, l = struct.unpack(">lll", m.read(12))
pull(new, frags, p1 - last) # what didn't change
pull([], frags, p2 - p1) # what got deleted
- new.append((l, pos + 12)) # what got added
+ new.append((l, pos + 12)) # what got added
pos += l + 12
last = p2
- frags.extend(reversed(new)) # what was left at the end
+ frags.extend(reversed(new)) # what was left at the end
t = collect(b2, frags)
diff --git a/mercurial/pure/osutil.py b/mercurial/pure/osutil.py
index 2476bd6..28bbbc5 100644
--- a/mercurial/pure/osutil.py
+++ b/mercurial/pure/osutil.py
@@ -58,7 +58,7 @@ def listdir(path, stat=False, skip=None):
if os.name != 'nt':
posixfile = open
else:
- import ctypes, msvcrt
+ import ctypes, ctypes.util
_kernel32 = ctypes.windll.kernel32
@@ -68,6 +68,15 @@ else:
_INVALID_HANDLE_VALUE = _HANDLE(-1).value
+ def _crtname():
+ try:
+ # find_msvcrt was introduced in Python 2.6
+ return ctypes.util.find_msvcrt()
+ except AttributeError:
+ return 'msvcr71.dll' # CPython 2.5 and 2.4
+
+ _crt = ctypes.PyDLL(_crtname())
+
# CreateFile
_FILE_SHARE_READ = 0x00000001
_FILE_SHARE_WRITE = 0x00000002
@@ -96,6 +105,9 @@ else:
_DWORD, _DWORD, _HANDLE]
_kernel32.CreateFileA.restype = _HANDLE
+ _crt._open_osfhandle.argtypes = [_HANDLE, ctypes.c_int]
+ _crt._open_osfhandle.restype = ctypes.c_int
+
def _raiseioerror(name):
err = ctypes.WinError()
raise IOError(err.errno, '%s: %s' % (name, err.strerror))
@@ -119,7 +131,7 @@ else:
flags = _O_TEXT
m0 = mode[0]
- if m0 == 'r' and '+' not in mode:
+ if m0 == 'r' and not '+' in mode:
flags |= _O_RDONLY
access = _GENERIC_READ
else:
@@ -144,7 +156,10 @@ else:
if fh == _INVALID_HANDLE_VALUE:
_raiseioerror(name)
- fd = msvcrt.open_osfhandle(fh, flags)
+ # for CPython we must use the same CRT as Python uses,
+ # or the os.fdopen call below will abort with
+ # "OSError: [Errno 9] Bad file descriptor"
+ fd = _crt._open_osfhandle(fh, flags)
if fd == -1:
_kernel32.CloseHandle(fh)
_raiseioerror(name)
diff --git a/mercurial/pure/parsers.py b/mercurial/pure/parsers.py
index c4fe285..868dba5 100644
--- a/mercurial/pure/parsers.py
+++ b/mercurial/pure/parsers.py
@@ -36,7 +36,7 @@ def parse_index2(data, inline):
s = struct.calcsize(indexformatng)
index = []
cache = None
- off = 0
+ n = off = 0
l = len(data) - s
append = index.append
@@ -45,6 +45,7 @@ def parse_index2(data, inline):
while off <= l:
e = _unpack(indexformatng, data[off:off + s])
append(e)
+ n += 1
if e[1] < 0:
break
off += e[1] + s
@@ -52,6 +53,7 @@ def parse_index2(data, inline):
while off <= l:
e = _unpack(indexformatng, data[off:off + s])
append(e)
+ n += 1
off += s
if off != len(data):
diff --git a/mercurial/pushkey.py b/mercurial/pushkey.py
index 7d692d0..2343319 100644
--- a/mercurial/pushkey.py
+++ b/mercurial/pushkey.py
@@ -5,21 +5,16 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-import bookmarks, phases, obsolete
+import bookmarks
def _nslist(repo):
n = {}
for k in _namespaces:
n[k] = ""
- if not obsolete._enabled:
- n.pop('obsolete')
return n
_namespaces = {"namespaces": (lambda *x: False, _nslist),
- "bookmarks": (bookmarks.pushbookmark, bookmarks.listbookmarks),
- "phases": (phases.pushphase, phases.listphases),
- "obsolete": (obsolete.pushmarker, obsolete.listmarkers),
- }
+ "bookmarks": (bookmarks.pushbookmark, bookmarks.listbookmarks)}
def register(namespace, pushkey, listkeys):
_namespaces[namespace] = (pushkey, listkeys)
diff --git a/mercurial/pvec.py b/mercurial/pvec.py
deleted file mode 100644
index d29bbbc..0000000
--- a/mercurial/pvec.py
+++ /dev/null
@@ -1,210 +0,0 @@
-# pvec.py - probabilistic vector clocks for Mercurial
-#
-# Copyright 2012 Matt Mackall <mpm@selenic.com>
-#
-# This software may be used and distributed according to the terms of the
-# GNU General Public License version 2 or any later version.
-
-'''
-A "pvec" is a changeset property based on the theory of vector clocks
-that can be compared to discover relatedness without consulting a
-graph. This can be useful for tasks like determining how a
-disconnected patch relates to a repository.
-
-Currently a pvec consist of 448 bits, of which 24 are 'depth' and the
-remainder are a bit vector. It is represented as a 70-character base85
-string.
-
-Construction:
-
-- a root changeset has a depth of 0 and a bit vector based on its hash
-- a normal commit has a changeset where depth is increased by one and
- one bit vector bit is flipped based on its hash
-- a merge changeset pvec is constructed by copying changes from one pvec into
- the other to balance its depth
-
-Properties:
-
-- for linear changes, difference in depth is always <= hamming distance
-- otherwise, changes are probably divergent
-- when hamming distance is < 200, we can reliably detect when pvecs are near
-
-Issues:
-
-- hamming distance ceases to work over distances of ~ 200
-- detecting divergence is less accurate when the common ancestor is very close
- to either revision or total distance is high
-- this could probably be improved by modeling the relation between
- delta and hdist
-
-Uses:
-
-- a patch pvec can be used to locate the nearest available common ancestor for
- resolving conflicts
-- ordering of patches can be established without a DAG
-- two head pvecs can be compared to determine whether push/pull/merge is needed
- and approximately how many changesets are involved
-- can be used to find a heuristic divergence measure between changesets on
- different branches
-'''
-
-import base85, util
-from node import nullrev
-
-_size = 448 # 70 chars b85-encoded
-_bytes = _size / 8
-_depthbits = 24
-_depthbytes = _depthbits / 8
-_vecbytes = _bytes - _depthbytes
-_vecbits = _vecbytes * 8
-_radius = (_vecbits - 30) / 2 # high probability vecs are related
-
-def _bin(bs):
- '''convert a bytestring to a long'''
- v = 0
- for b in bs:
- v = v * 256 + ord(b)
- return v
-
-def _str(v, l):
- bs = ""
- for p in xrange(l):
- bs = chr(v & 255) + bs
- v >>= 8
- return bs
-
-def _split(b):
- '''depth and bitvec'''
- return _bin(b[:_depthbytes]), _bin(b[_depthbytes:])
-
-def _join(depth, bitvec):
- return _str(depth, _depthbytes) + _str(bitvec, _vecbytes)
-
-def _hweight(x):
- c = 0
- while x:
- if x & 1:
- c += 1
- x >>= 1
- return c
-_htab = [_hweight(x) for x in xrange(256)]
-
-def _hamming(a, b):
- '''find the hamming distance between two longs'''
- d = a ^ b
- c = 0
- while d:
- c += _htab[d & 0xff]
- d >>= 8
- return c
-
-def _mergevec(x, y, c):
- # Ideally, this function would be x ^ y ^ ancestor, but finding
- # ancestors is a nuisance. So instead we find the minimal number
- # of changes to balance the depth and hamming distance
-
- d1, v1 = x
- d2, v2 = y
- if d1 < d2:
- d1, d2, v1, v2 = d2, d1, v2, v1
-
- hdist = _hamming(v1, v2)
- ddist = d1 - d2
- v = v1
- m = v1 ^ v2 # mask of different bits
- i = 1
-
- if hdist > ddist:
- # if delta = 10 and hdist = 100, then we need to go up 55 steps
- # to the ancestor and down 45
- changes = (hdist - ddist + 1) / 2
- else:
- # must make at least one change
- changes = 1
- depth = d1 + changes
-
- # copy changes from v2
- if m:
- while changes:
- if m & i:
- v ^= i
- changes -= 1
- i <<= 1
- else:
- v = _flipbit(v, c)
-
- return depth, v
-
-def _flipbit(v, node):
- # converting bit strings to longs is slow
- bit = (hash(node) & 0xffffffff) % _vecbits
- return v ^ (1<<bit)
-
-def ctxpvec(ctx):
- '''construct a pvec for ctx while filling in the cache'''
- r = ctx._repo
- if not util.safehasattr(r, "_pveccache"):
- r._pveccache = {}
- pvc = r._pveccache
- if ctx.rev() not in pvc:
- cl = r.changelog
- for n in xrange(ctx.rev() + 1):
- if n not in pvc:
- node = cl.node(n)
- p1, p2 = cl.parentrevs(n)
- if p1 == nullrev:
- # start with a 'random' vector at root
- pvc[n] = (0, _bin((node * 3)[:_vecbytes]))
- elif p2 == nullrev:
- d, v = pvc[p1]
- pvc[n] = (d + 1, _flipbit(v, node))
- else:
- pvc[n] = _mergevec(pvc[p1], pvc[p2], node)
- bs = _join(*pvc[ctx.rev()])
- return pvec(base85.b85encode(bs))
-
-class pvec(object):
- def __init__(self, hashorctx):
- if isinstance(hashorctx, str):
- self._bs = hashorctx
- self._depth, self._vec = _split(base85.b85decode(hashorctx))
- else:
- self._vec = ctxpvec(ctx)
-
- def __str__(self):
- return self._bs
-
- def __eq__(self, b):
- return self._vec == b._vec and self._depth == b._depth
-
- def __lt__(self, b):
- delta = b._depth - self._depth
- if delta < 0:
- return False # always correct
- if _hamming(self._vec, b._vec) > delta:
- return False
- return True
-
- def __gt__(self, b):
- return b < self
-
- def __or__(self, b):
- delta = abs(b._depth - self._depth)
- if _hamming(self._vec, b._vec) <= delta:
- return False
- return True
-
- def __sub__(self, b):
- if self | b:
- raise ValueError("concurrent pvecs")
- return self._depth - b._depth
-
- def distance(self, b):
- d = abs(b._depth - self._depth)
- h = _hamming(self._vec, b._vec)
- return max(d, h)
-
- def near(self, b):
- dist = abs(b.depth - self._depth)
- if dist > _radius or _hamming(self._vec, b._vec) > _radius:
- return False
diff --git a/mercurial/repair.py b/mercurial/repair.py
index 9ccaa34..c95dff1 100644
--- a/mercurial/repair.py
+++ b/mercurial/repair.py
@@ -10,7 +10,6 @@ from mercurial import changegroup, bookmarks
from mercurial.node import short
from mercurial.i18n import _
import os
-import errno
def _bundle(repo, bases, heads, node, suffix, compress=True):
"""create a bundle with the specified revisions as a backup"""
@@ -38,14 +37,14 @@ def _collectbrokencsets(repo, files, striprev):
"""return the changesets which will be broken by the truncation"""
s = set()
def collectone(revlog):
- linkgen = (revlog.linkrev(i) for i in revlog)
+ links = (revlog.linkrev(i) for i in revlog)
# find the truncation point of the revlog
- for lrev in linkgen:
+ for lrev in links:
if lrev >= striprev:
break
# see if any revision after this point has a linkrev
# less than striprev (those will be broken by strip)
- for lrev in linkgen:
+ for lrev in links:
if lrev < striprev:
s.add(lrev)
@@ -55,29 +54,10 @@ def _collectbrokencsets(repo, files, striprev):
return s
-def strip(ui, repo, nodelist, backup="all", topic='backup'):
- # It simplifies the logic around updating the branchheads cache if we only
- # have to consider the effect of the stripped revisions and not revisions
- # missing because the cache is out-of-date.
- repo.updatebranchcache()
-
+def strip(ui, repo, node, backup="all"):
cl = repo.changelog
- # TODO handle undo of merge sets
- if isinstance(nodelist, str):
- nodelist = [nodelist]
- striplist = [cl.rev(node) for node in nodelist]
- striprev = min(striplist)
-
- # Generate set of branches who will have nodes stripped.
- striprevs = repo.revs("%ld::", striplist)
- stripbranches = set([repo[rev].branch() for rev in striprevs])
-
- # Set of potential new heads resulting from the strip. The parents of any
- # node removed could be a new head because the node to be removed could have
- # been the only child of the parent.
- newheadrevs = repo.revs("parents(%ld::) - %ld::", striprevs, striprevs)
- newheadnodes = set([cl.node(rev) for rev in newheadrevs])
- newheadbranches = set([repo[rev].branch() for rev in newheadrevs])
+ # TODO delete the undo files, and handle undo of merge sets
+ striprev = cl.rev(node)
keeppartialbundle = backup == 'strip'
@@ -88,10 +68,8 @@ def strip(ui, repo, nodelist, backup="all", topic='backup'):
# the list of heads and bases of the set of interesting revisions.
# (head = revision in the set that has no descendant in the set;
# base = revision in the set that has no ancestor in the set)
- tostrip = set(striplist)
- for rev in striplist:
- for desc in cl.descendants([rev]):
- tostrip.add(desc)
+ tostrip = set(cl.descendants(striprev))
+ tostrip.add(striprev)
files = _collectfiles(repo, striprev)
saverevs = _collectbrokencsets(repo, files, striprev)
@@ -107,17 +85,9 @@ def strip(ui, repo, nodelist, backup="all", topic='backup'):
# compute base nodes
if saverevs:
- descendants = set(cl.descendants(saverevs))
+ descendants = set(cl.descendants(*saverevs))
saverevs.difference_update(descendants)
savebases = [cl.node(r) for r in saverevs]
- stripbases = [cl.node(r) for r in tostrip]
- rset = ' or '.join([str(r) for r in tostrip])
- newbmtarget = repo.revs('sort(heads(ancestors(%r) - (%r)), -rev)',
- rset, rset)
- if newbmtarget:
- newbmtarget = newbmtarget[0]
- else:
- newbmtarget = '.'
bm = repo._bookmarks
updatebm = []
@@ -129,7 +99,7 @@ def strip(ui, repo, nodelist, backup="all", topic='backup'):
# create a changegroup for all the branches we need to keep
backupfile = None
if backup == "all":
- backupfile = _bundle(repo, stripbases, cl.heads(), node, topic)
+ backupfile = _bundle(repo, [node], cl.heads(), node, 'backup')
repo.ui.status(_("saved backup bundle to %s\n") % backupfile)
if saveheads or savebases:
# do not compress partial bundle if we remove it from disk later
@@ -154,7 +124,7 @@ def strip(ui, repo, nodelist, backup="all", topic='backup'):
file, troffset, ignore = tr.entries[i]
repo.sopener(file, 'a').truncate(troffset)
tr.close()
- except: # re-raises
+ except:
tr.abort()
raise
@@ -172,18 +142,11 @@ def strip(ui, repo, nodelist, backup="all", topic='backup'):
if not keeppartialbundle:
os.unlink(chgrpfile)
- # remove undo files
- for undofile in repo.undofiles():
- try:
- os.unlink(undofile)
- except OSError, e:
- if e.errno != errno.ENOENT:
- ui.warn(_('error removing %s: %s\n') % (undofile, str(e)))
-
for m in updatebm:
- bm[m] = repo[newbmtarget].node()
+ bm[m] = repo['.'].node()
bookmarks.write(repo)
- except: # re-raises
+
+ except:
if backupfile:
ui.warn(_("strip failed, full bundle stored in '%s'\n")
% backupfile)
@@ -192,10 +155,4 @@ def strip(ui, repo, nodelist, backup="all", topic='backup'):
% chgrpfile)
raise
- if len(stripbranches) == 1 and len(newheadbranches) == 1 \
- and stripbranches == newheadbranches:
- repo.destroyed(newheadnodes)
- else:
- # Multiple branches involved in strip. Will allow branchcache to become
- # invalid and later on rebuilt from scratch
- repo.destroyed()
+ repo.destroyed()
diff --git a/mercurial/peer.py b/mercurial/repo.py
index 40841ca..60de133 100644
--- a/mercurial/peer.py
+++ b/mercurial/repo.py
@@ -1,4 +1,4 @@
-# peer.py - repository base classes for mercurial
+# repo.py - repository base classes for mercurial
#
# Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
# Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
@@ -9,18 +9,16 @@
from i18n import _
import error
-class peerrepository(object):
-
+class repository(object):
def capable(self, name):
'''tell whether repo supports named capability.
return False if not supported.
if boolean capability, return True.
if string capability, return string.'''
- caps = self._capabilities()
- if name in caps:
+ if name in self.capabilities:
return True
name_eq = name + '='
- for cap in caps:
+ for cap in self.capabilities:
if cap.startswith(name_eq):
return cap[len(name_eq):]
return False
@@ -33,14 +31,10 @@ class peerrepository(object):
'support the %r capability') % (purpose, name))
def local(self):
- '''return peer as a localrepo, or None'''
- return None
-
- def peer(self):
- return self
+ return False
- def canpush(self):
- return True
+ def cancopy(self):
+ return self.local()
def close(self):
pass
diff --git a/mercurial/revlog.py b/mercurial/revlog.py
index 8ed1d82..97151aa 100644
--- a/mercurial/revlog.py
+++ b/mercurial/revlog.py
@@ -75,6 +75,35 @@ def hash(text, p1, p2):
s.update(text)
return s.digest()
+def compress(text):
+ """ generate a possibly-compressed representation of text """
+ if not text:
+ return ("", text)
+ l = len(text)
+ bin = None
+ if l < 44:
+ pass
+ elif l > 1000000:
+ # zlib makes an internal copy, thus doubling memory usage for
+ # large files, so lets do this in pieces
+ z = zlib.compressobj()
+ p = []
+ pos = 0
+ while pos < l:
+ pos2 = pos + 2**20
+ p.append(z.compress(text[pos:pos2]))
+ pos = pos2
+ p.append(z.flush())
+ if sum(map(len, p)) < l:
+ bin = "".join(p)
+ else:
+ bin = _compress(text)
+ if bin is None or len(bin) > l:
+ if text[0] == '\0':
+ return ("", text)
+ return ('u', text)
+ return ("", bin)
+
def decompress(bin):
""" decompress the given input """
if not bin:
@@ -83,10 +112,7 @@ def decompress(bin):
if t == '\0':
return bin
if t == 'x':
- try:
- return _decompress(bin)
- except zlib.error, e:
- raise RevlogError(_("revlog decompress error: %s") % str(e))
+ return _decompress(bin)
if t == 'u':
return bin[1:]
raise RevlogError(_("unknown compression type %r") % t)
@@ -148,7 +174,7 @@ class revlogio(object):
def parseindex(self, data, inline):
# call the C implementation to parse the index data
index, cache = parsers.parse_index2(data, inline)
- return index, getattr(index, 'nodemap', None), cache
+ return index, None, cache
def packentry(self, entry, node, version, rev):
p = _pack(indexformatng, *entry)
@@ -200,10 +226,9 @@ class revlog(object):
self._nodepos = None
v = REVLOG_DEFAULT_VERSION
- opts = getattr(opener, 'options', None)
- if opts is not None:
- if 'revlogv1' in opts:
- if 'generaldelta' in opts:
+ if hasattr(opener, 'options'):
+ if 'revlogv1' in opener.options:
+ if 'generaldelta' in opener.options:
v |= REVLOGGENERALDELTA
else:
v = 0
@@ -262,28 +287,10 @@ class revlog(object):
self.rev(self.node(0))
return self._nodecache
- def hasnode(self, node):
- try:
- self.rev(node)
- return True
- except KeyError:
- return False
-
- def clearcaches(self):
- try:
- self._nodecache.clearcaches()
- except AttributeError:
- self._nodecache = {nullid: nullrev}
- self._nodepos = None
-
def rev(self, node):
try:
return self._nodecache[node]
- except RevlogError:
- # parsers.c radix tree lookup failed
- raise LookupError(node, self.indexfile, _('no node'))
except KeyError:
- # pure python cache lookup failed
n = self._nodecache
i = self.index
p = self._nodepos
@@ -332,35 +339,47 @@ class revlog(object):
return len(t)
size = rawsize
- def ancestors(self, revs, stoprev=0):
+ def reachable(self, node, stop=None):
+ """return the set of all nodes ancestral to a given node, including
+ the node itself, stopping when stop is matched"""
+ reachable = set((node,))
+ visit = [node]
+ if stop:
+ stopn = self.rev(stop)
+ else:
+ stopn = 0
+ while visit:
+ n = visit.pop(0)
+ if n == stop:
+ continue
+ if n == nullid:
+ continue
+ for p in self.parents(n):
+ if self.rev(p) < stopn:
+ continue
+ if p not in reachable:
+ reachable.add(p)
+ visit.append(p)
+ return reachable
+
+ def ancestors(self, *revs):
"""Generate the ancestors of 'revs' in reverse topological order.
- Does not generate revs lower than stoprev.
Yield a sequence of revision numbers starting with the parents
of each revision in revs, i.e., each revision is *not* considered
an ancestor of itself. Results are in breadth-first order:
parents of each rev in revs, then parents of those, etc. Result
does not include the null revision."""
- visit = util.deque(revs)
+ visit = list(revs)
seen = set([nullrev])
while visit:
- for parent in self.parentrevs(visit.popleft()):
- if parent < stoprev:
- continue
+ for parent in self.parentrevs(visit.pop(0)):
if parent not in seen:
visit.append(parent)
seen.add(parent)
yield parent
- def incancestors(self, revs, stoprev=0):
- """Identical to ancestors() except it also generates the
- revisions, 'revs'"""
- for rev in revs:
- yield rev
- for rev in self.ancestors(revs, stoprev):
- yield rev
-
- def descendants(self, revs):
+ def descendants(self, *revs):
"""Generate the descendants of 'revs' in revision order.
Yield a sequence of revision numbers starting with a child of
@@ -383,10 +402,13 @@ class revlog(object):
def findcommonmissing(self, common=None, heads=None):
"""Return a tuple of the ancestors of common and the ancestors of heads
- that are not ancestors of common. In revset terminology, we return the
- tuple:
+ that are not ancestors of common.
- ::common, (::heads) - (::common)
+ More specifically, the second element is a list of nodes N such that
+ every N satisfies the following constraints:
+
+ 1. N is an ancestor of some node in 'heads'
+ 2. N is not an ancestor of any node in 'common'
The list is sorted by revision number, meaning it is
topologically sorted.
@@ -403,15 +425,15 @@ class revlog(object):
heads = [self.rev(n) for n in heads]
# we want the ancestors, but inclusive
- has = set(self.ancestors(common))
+ has = set(self.ancestors(*common))
has.add(nullrev)
has.update(common)
# take all ancestors from heads that aren't in has
missing = set()
- visit = util.deque(r for r in heads if r not in has)
+ visit = [r for r in heads if r not in has]
while visit:
- r = visit.popleft()
+ r = visit.pop(0)
if r in missing:
continue
else:
@@ -597,10 +619,6 @@ class revlog(object):
return (orderedout, roots, heads)
def headrevs(self):
- try:
- return self.index.headrevs()
- except AttributeError:
- pass
count = len(self)
if not count:
return [nullrev]
@@ -662,7 +680,7 @@ class revlog(object):
def descendant(self, start, end):
if start == nullrev:
return True
- for i in self.descendants([start]):
+ for i in self.descendants(start):
if i == end:
return True
elif i > end:
@@ -688,7 +706,7 @@ class revlog(object):
return self.node(c)
def _match(self, id):
- if isinstance(id, int):
+ if isinstance(id, (long, int)):
# rev
return self.node(id)
if len(id) == 20:
@@ -722,15 +740,6 @@ class revlog(object):
pass
def _partialmatch(self, id):
- try:
- return self.index.partialmatch(id)
- except RevlogError:
- # parsers.c radix tree lookup gave multiple matches
- raise LookupError(id, self.indexfile, _("ambiguous identifier"))
- except (AttributeError, ValueError):
- # we are pure python, or key was too short to search radix tree
- pass
-
if id in self._pcache:
return self._pcache[id]
@@ -790,10 +799,9 @@ class revlog(object):
readahead = max(65536, length)
df.seek(offset)
d = df.read(readahead)
- df.close()
self._addchunk(offset, d)
if readahead > length:
- return util.buffer(d, 0, length)
+ return d[:length]
return d
def _getchunk(self, offset, length):
@@ -806,7 +814,7 @@ class revlog(object):
if cachestart >= 0 and cacheend <= l:
if cachestart == 0 and cacheend == l:
return d # avoid a copy
- return util.buffer(d, cachestart, cacheend - cachestart)
+ return d[cachestart:cacheend]
return self._loadchunk(offset, length)
@@ -839,22 +847,13 @@ class revlog(object):
def revdiff(self, rev1, rev2):
"""return or calculate a delta between two revisions"""
if rev1 != nullrev and self.deltaparent(rev2) == rev1:
- return str(self._chunk(rev2))
+ return self._chunk(rev2)
- return mdiff.textdiff(self.revision(rev1),
- self.revision(rev2))
-
- def revision(self, nodeorrev):
- """return an uncompressed revision of a given node or revision
- number.
- """
- if isinstance(nodeorrev, int):
- rev = nodeorrev
- node = self.node(rev)
- else:
- node = nodeorrev
- rev = None
+ return mdiff.textdiff(self.revision(self.node(rev1)),
+ self.revision(self.node(rev2)))
+ def revision(self, node):
+ """return an uncompressed revision of a given node"""
cachedrev = None
if node == nullid:
return ""
@@ -865,8 +864,7 @@ class revlog(object):
# look up what we need to read
text = None
- if rev is None:
- rev = self.rev(node)
+ rev = self.rev(node)
# check rev flags
if self.flags(rev) & ~REVIDX_KNOWN_FLAGS:
@@ -898,7 +896,7 @@ class revlog(object):
self._chunkraw(base, rev)
if text is None:
- text = str(self._chunkbase(base))
+ text = self._chunkbase(base)
bins = [self._chunk(r) for r in chain]
text = mdiff.patches(text, bins)
@@ -947,9 +945,9 @@ class revlog(object):
e = self._io.packentry(self.index[i], self.node, self.version, i)
fp.write(e)
- # if we don't call close, the temp file will never replace the
+ # if we don't call rename, the temp file will never replace the
# real index
- fp.close()
+ fp.rename()
tr.replace(self.indexfile, trindex * self._io.size)
self._chunkclear()
@@ -979,35 +977,6 @@ class revlog(object):
dfh.close()
ifh.close()
- def compress(self, text):
- """ generate a possibly-compressed representation of text """
- if not text:
- return ("", text)
- l = len(text)
- bin = None
- if l < 44:
- pass
- elif l > 1000000:
- # zlib makes an internal copy, thus doubling memory usage for
- # large files, so lets do this in pieces
- z = zlib.compressobj()
- p = []
- pos = 0
- while pos < l:
- pos2 = pos + 2**20
- p.append(z.compress(text[pos:pos2]))
- pos = pos2
- p.append(z.flush())
- if sum(map(len, p)) < l:
- bin = "".join(p)
- else:
- bin = _compress(text)
- if bin is None or len(bin) > l:
- if text[0] == '\0':
- return ("", text)
- return ('u', text)
- return ("", bin)
-
def _addrevision(self, node, text, transaction, link, p1, p2,
cachedelta, ifh, dfh):
"""internal function to add revisions to the log
@@ -1040,7 +1009,7 @@ class revlog(object):
t = buildtext()
ptext = self.revision(self.node(rev))
delta = mdiff.textdiff(ptext, t)
- data = self.compress(delta)
+ data = compress(delta)
l = len(data[1]) + len(data[0])
if basecache[0] == rev:
chainbase = basecache[1]
@@ -1084,7 +1053,7 @@ class revlog(object):
textlen = len(text)
if d is None or dist > textlen * 2:
text = buildtext()
- data = self.compress(text)
+ data = compress(text)
l = len(data[1]) + len(data[0])
base = chainbase = curr
@@ -1163,7 +1132,6 @@ class revlog(object):
"""
# track the base of the current delta log
- content = []
node = None
r = len(self)
@@ -1194,8 +1162,6 @@ class revlog(object):
deltabase = chunkdata['deltabase']
delta = chunkdata['delta']
- content.append(node)
-
link = linkmapper(cs)
if node in self.nodemap:
# this can happen if two branches make the same change
@@ -1203,7 +1169,7 @@ class revlog(object):
continue
for p in (p1, p2):
- if p not in self.nodemap:
+ if not p in self.nodemap:
raise LookupError(p, self.indexfile,
_('unknown parent'))
@@ -1225,7 +1191,7 @@ class revlog(object):
dfh.close()
ifh.close()
- return content
+ return node
def strip(self, minlink, transaction):
"""truncate the revlog on the first revision with a linkrev >= minlink
@@ -1239,7 +1205,7 @@ class revlog(object):
So we truncate the revlog on the first of these revisions, and
trust that the caller has saved the revisions that shouldn't be
- removed and that it'll re-add them after this truncation.
+ removed and that it'll readd them after this truncation.
"""
if len(self) == 0:
return
diff --git a/mercurial/revset.py b/mercurial/revset.py
index a7e9d07..cb089d7 100644
--- a/mercurial/revset.py
+++ b/mercurial/revset.py
@@ -6,76 +6,10 @@
# GNU General Public License version 2 or any later version.
import re
-import parser, util, error, discovery, hbisect, phases
-import node
+import parser, util, error, discovery, hbisect
import bookmarks as bookmarksmod
import match as matchmod
from i18n import _
-import encoding
-
-def _revancestors(repo, revs, followfirst):
- """Like revlog.ancestors(), but supports followfirst."""
- cut = followfirst and 1 or None
- cl = repo.changelog
- visit = util.deque(revs)
- seen = set([node.nullrev])
- while visit:
- for parent in cl.parentrevs(visit.popleft())[:cut]:
- if parent not in seen:
- visit.append(parent)
- seen.add(parent)
- yield parent
-
-def _revdescendants(repo, revs, followfirst):
- """Like revlog.descendants() but supports followfirst."""
- cut = followfirst and 1 or None
- cl = repo.changelog
- first = min(revs)
- nullrev = node.nullrev
- if first == nullrev:
- # Are there nodes with a null first parent and a non-null
- # second one? Maybe. Do we care? Probably not.
- for i in cl:
- yield i
- return
-
- seen = set(revs)
- for i in xrange(first + 1, len(cl)):
- for x in cl.parentrevs(i)[:cut]:
- if x != nullrev and x in seen:
- seen.add(i)
- yield i
- break
-
-def _revsbetween(repo, roots, heads):
- """Return all paths between roots and heads, inclusive of both endpoint
- sets."""
- if not roots:
- return []
- parentrevs = repo.changelog.parentrevs
- visit = heads[:]
- reachable = set()
- seen = {}
- minroot = min(roots)
- roots = set(roots)
- # open-code the post-order traversal due to the tiny size of
- # sys.getrecursionlimit()
- while visit:
- rev = visit.pop()
- if rev in roots:
- reachable.add(rev)
- parents = parentrevs(rev)
- seen[rev] = parents
- for parent in parents:
- if parent >= minroot and parent not in seen:
- visit.append(parent)
- if not reachable:
- return []
- for rev in sorted(seen):
- for parent in seen[rev]:
- if parent in reachable:
- reachable.add(rev)
- return sorted(reachable)
elements = {
"(": (20, ("group", 1, ")"), ("func", 1, ")")),
@@ -138,13 +72,12 @@ def tokenize(program):
pos += 1
else:
raise error.ParseError(_("unterminated string"), s)
- # gather up a symbol/keyword
- elif c.isalnum() or c in '._' or ord(c) > 127:
+ elif c.isalnum() or c in '._' or ord(c) > 127: # gather up a symbol/keyword
s = pos
pos += 1
while pos < l: # find end of symbol
d = program[pos]
- if not (d.isalnum() or d in "._/" or ord(d) > 127):
+ if not (d.isalnum() or d in "._" or ord(d) > 127):
break
if d == '.' and program[pos - 1] == '.': # special case for ..
pos -= 1
@@ -177,7 +110,7 @@ def getlist(x):
def getargs(x, min, max, err):
l = getlist(x)
- if len(l) < min or (max >= 0 and len(l) > max):
+ if len(l) < min or len(l) > max:
raise error.ParseError(err)
return l
@@ -186,16 +119,6 @@ def getset(repo, subset, x):
raise error.ParseError(_("missing argument"))
return methods[x[0]](repo, subset, *x[1:])
-def _getrevsource(repo, r):
- extra = repo[r].extra()
- for label in ('source', 'transplant_source', 'rebase_source'):
- if label in extra:
- try:
- return repo[extra[label]].rev()
- except error.RepoLookupError:
- pass
- return None
-
# operator methods
def stringset(repo, subset, x):
@@ -231,14 +154,6 @@ def rangeset(repo, subset, x, y):
s = set(subset)
return [x for x in r if x in s]
-def dagrange(repo, subset, x, y):
- if subset:
- r = range(len(repo))
- xs = _revsbetween(repo, getset(repo, r, x), getset(repo, r, y))
- s = set(subset)
- return [r for r in xs if r in s]
- return []
-
def andset(repo, subset, x, y):
return getset(repo, getset(repo, subset, x), y)
@@ -286,28 +201,19 @@ def ancestor(repo, subset, x):
return [r for r in an if r in subset]
-def _ancestors(repo, subset, x, followfirst=False):
- args = getset(repo, range(len(repo)), x)
- if not args:
- return []
- s = set(_revancestors(repo, args, followfirst)) | set(args)
- return [r for r in subset if r in s]
-
def ancestors(repo, subset, x):
"""``ancestors(set)``
Changesets that are ancestors of a changeset in set.
"""
- return _ancestors(repo, subset, x)
-
-def _firstancestors(repo, subset, x):
- # ``_firstancestors(set)``
- # Like ``ancestors(set)`` but follows only the first parents.
- return _ancestors(repo, subset, x, followfirst=True)
+ args = getset(repo, range(len(repo)), x)
+ if not args:
+ return []
+ s = set(repo.changelog.ancestors(*args)) | set(args)
+ return [r for r in subset if r in s]
def ancestorspec(repo, subset, x, n):
"""``set~n``
- Changesets that are the Nth ancestor (first parents only) of a changeset
- in set.
+ Changesets that are the Nth ancestor (first parents only) of a changeset in set.
"""
try:
n = int(n[1])
@@ -326,39 +232,22 @@ def author(repo, subset, x):
Alias for ``user(string)``.
"""
# i18n: "author" is a keyword
- n = encoding.lower(getstring(x, _("author requires a string")))
- kind, pattern, matcher = _substringmatcher(n)
- return [r for r in subset if matcher(encoding.lower(repo[r].user()))]
-
-def bisect(repo, subset, x):
- """``bisect(string)``
- Changesets marked in the specified bisect status:
-
- - ``good``, ``bad``, ``skip``: csets explicitly marked as good/bad/skip
- - ``goods``, ``bads`` : csets topologicaly good/bad
- - ``range`` : csets taking part in the bisection
- - ``pruned`` : csets that are goods, bads or skipped
- - ``untested`` : csets whose fate is yet unknown
- - ``ignored`` : csets ignored due to DAG topology
- - ``current`` : the cset currently being bisected
- """
- # i18n: "bisect" is a keyword
- status = getstring(x, _("bisect requires a string")).lower()
- state = set(hbisect.get(repo, status))
- return [r for r in subset if r in state]
+ n = getstring(x, _("author requires a string")).lower()
+ return [r for r in subset if n in repo[r].user().lower()]
-# Backward-compatibility
-# - no help entry so that we do not advertise it any more
def bisected(repo, subset, x):
- return bisect(repo, subset, x)
+ """``bisected(string)``
+ Changesets marked in the specified bisect state (good, bad, skip).
+ """
+ state = getstring(x, _("bisect requires a string")).lower()
+ if state not in ('good', 'bad', 'skip', 'unknown'):
+ raise error.ParseError(_('invalid bisect state'))
+ marked = set(repo.changelog.rev(n) for n in hbisect.load_state(repo)[state])
+ return [r for r in subset if r in marked]
def bookmark(repo, subset, x):
"""``bookmark([name])``
The named bookmark or all bookmarks.
-
- If `name` starts with `re:`, the remainder of the name is treated as
- a regular expression. To match a bookmark that actually starts with `re:`,
- use the prefix `literal:`.
"""
# i18n: "bookmark" is a keyword
args = getargs(x, 0, 1, _('bookmark takes one or no arguments'))
@@ -366,26 +255,11 @@ def bookmark(repo, subset, x):
bm = getstring(args[0],
# i18n: "bookmark" is a keyword
_('the argument to bookmark must be a string'))
- kind, pattern, matcher = _stringmatcher(bm)
- if kind == 'literal':
- bmrev = bookmarksmod.listbookmarks(repo).get(bm, None)
- if not bmrev:
- raise util.Abort(_("bookmark '%s' does not exist") % bm)
- bmrev = repo[bmrev].rev()
- return [r for r in subset if r == bmrev]
- else:
- matchrevs = set()
- for name, bmrev in bookmarksmod.listbookmarks(repo).iteritems():
- if matcher(name):
- matchrevs.add(bmrev)
- if not matchrevs:
- raise util.Abort(_("no bookmarks exist that match '%s'")
- % pattern)
- bmrevs = set()
- for bmrev in matchrevs:
- bmrevs.add(repo[bmrev].rev())
- return [r for r in subset if r in bmrevs]
-
+ bmrev = bookmarksmod.listbookmarks(repo).get(bm, None)
+ if not bmrev:
+ raise util.Abort(_("bookmark '%s' does not exist") % bm)
+ bmrev = repo[bmrev].rev()
+ return [r for r in subset if r == bmrev]
bms = set([repo[r].rev()
for r in bookmarksmod.listbookmarks(repo).values()])
return [r for r in subset if r in bms]
@@ -394,25 +268,14 @@ def branch(repo, subset, x):
"""``branch(string or set)``
All changesets belonging to the given branch or the branches of the given
changesets.
-
- If `string` starts with `re:`, the remainder of the name is treated as
- a regular expression. To match a branch that actually starts with `re:`,
- use the prefix `literal:`.
"""
try:
b = getstring(x, '')
+ if b in repo.branchmap():
+ return [r for r in subset if repo[r].branch() == b]
except error.ParseError:
# not a string, but another revspec, e.g. tip()
pass
- else:
- kind, pattern, matcher = _stringmatcher(b)
- if kind == 'literal':
- # note: falls through to the revspec case if no branch with
- # this name exists
- if pattern in repo.branchmap():
- return [r for r in subset if matcher(repo[r].branch())]
- else:
- return [r for r in subset if matcher(repo[r].branch())]
s = getset(repo, range(len(repo)), x)
b = set()
@@ -422,18 +285,13 @@ def branch(repo, subset, x):
return [r for r in subset if r in s or repo[r].branch() in b]
def checkstatus(repo, subset, pat, field):
- m = None
+ m = matchmod.match(repo.root, repo.getcwd(), [pat])
s = []
- hasset = matchmod.patkind(pat) == 'set'
- fname = None
+ fast = (m.files() == [pat])
for r in subset:
c = repo[r]
- if not m or hasset:
- m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=c)
- if not m.anypats() and len(m.files()) == 1:
- fname = m.files()[0]
- if fname is not None:
- if fname not in c.files():
+ if fast:
+ if pat not in c.files():
continue
else:
for f in c.files():
@@ -442,8 +300,8 @@ def checkstatus(repo, subset, pat, field):
else:
continue
files = repo.status(c.p1().node(), c.node())[field]
- if fname is not None:
- if fname in files:
+ if fast:
+ if pat in files:
s.append(r)
else:
for f in files:
@@ -452,21 +310,17 @@ def checkstatus(repo, subset, pat, field):
break
return s
-def _children(repo, narrow, parentset):
- cs = set()
- pr = repo.changelog.parentrevs
- for r in narrow:
- for p in pr(r):
- if p in parentset:
- cs.add(r)
- return cs
-
def children(repo, subset, x):
"""``children(set)``
Child changesets of changesets in set.
"""
+ cs = set()
+ cl = repo.changelog
s = set(getset(repo, range(len(repo)), x))
- cs = _children(repo, subset, s)
+ for r in xrange(0, len(repo)):
+ for p in cl.parentrevs(r):
+ if p in s:
+ cs.add(r)
return [r for r in subset if r in cs]
def closed(repo, subset, x):
@@ -475,7 +329,7 @@ def closed(repo, subset, x):
"""
# i18n: "closed" is a keyword
getargs(x, 0, 0, _("closed takes no arguments"))
- return [r for r in subset if repo[r].closesbranch()]
+ return [r for r in subset if repo[r].extra().get('close')]
def contains(repo, subset, x):
"""``contains(pattern)``
@@ -484,45 +338,20 @@ def contains(repo, subset, x):
"""
# i18n: "contains" is a keyword
pat = getstring(x, _("contains requires a pattern"))
- m = None
+ m = matchmod.match(repo.root, repo.getcwd(), [pat])
s = []
- if not matchmod.patkind(pat):
+ if m.files() == [pat]:
for r in subset:
if pat in repo[r]:
s.append(r)
else:
for r in subset:
- c = repo[r]
- if not m or matchmod.patkind(pat) == 'set':
- m = matchmod.match(repo.root, repo.getcwd(), [pat], ctx=c)
- for f in c.manifest():
+ for f in repo[r].manifest():
if m(f):
s.append(r)
break
return s
-def converted(repo, subset, x):
- """``converted([id])``
- Changesets converted from the given identifier in the old repository if
- present, or all converted changesets if no identifier is specified.
- """
-
- # There is exactly no chance of resolving the revision, so do a simple
- # string compare and hope for the best
-
- rev = None
- # i18n: "converted" is a keyword
- l = getargs(x, 0, 1, _('converted takes one or no arguments'))
- if l:
- # i18n: "converted" is a keyword
- rev = getstring(l[0], _('converted requires a revision'))
-
- def _matchvalue(r):
- source = repo[r].extra().get('convert_revision', None)
- return source is not None and (rev is None or source.startswith(rev))
-
- return [r for r in subset if _matchvalue(r)]
-
def date(repo, subset, x):
"""``date(interval)``
Changesets within the interval, see :hg:`help dates`.
@@ -537,136 +366,34 @@ def desc(repo, subset, x):
Search commit message for string. The match is case-insensitive.
"""
# i18n: "desc" is a keyword
- ds = encoding.lower(getstring(x, _("desc requires a string")))
+ ds = getstring(x, _("desc requires a string")).lower()
l = []
for r in subset:
c = repo[r]
- if ds in encoding.lower(c.description()):
+ if ds in c.description().lower():
l.append(r)
return l
-def _descendants(repo, subset, x, followfirst=False):
- args = getset(repo, range(len(repo)), x)
- if not args:
- return []
- s = set(_revdescendants(repo, args, followfirst)) | set(args)
- return [r for r in subset if r in s]
-
def descendants(repo, subset, x):
"""``descendants(set)``
Changesets which are descendants of changesets in set.
"""
- return _descendants(repo, subset, x)
-
-def _firstdescendants(repo, subset, x):
- # ``_firstdescendants(set)``
- # Like ``descendants(set)`` but follows only the first parents.
- return _descendants(repo, subset, x, followfirst=True)
-
-def destination(repo, subset, x):
- """``destination([set])``
- Changesets that were created by a graft, transplant or rebase operation,
- with the given revisions specified as the source. Omitting the optional set
- is the same as passing all().
- """
- if x is not None:
- args = set(getset(repo, range(len(repo)), x))
- else:
- args = set(getall(repo, range(len(repo)), x))
-
- dests = set()
-
- # subset contains all of the possible destinations that can be returned, so
- # iterate over them and see if their source(s) were provided in the args.
- # Even if the immediate src of r is not in the args, src's source (or
- # further back) may be. Scanning back further than the immediate src allows
- # transitive transplants and rebases to yield the same results as transitive
- # grafts.
- for r in subset:
- src = _getrevsource(repo, r)
- lineage = None
-
- while src is not None:
- if lineage is None:
- lineage = list()
-
- lineage.append(r)
-
- # The visited lineage is a match if the current source is in the arg
- # set. Since every candidate dest is visited by way of iterating
- # subset, any dests futher back in the lineage will be tested by a
- # different iteration over subset. Likewise, if the src was already
- # selected, the current lineage can be selected without going back
- # further.
- if src in args or src in dests:
- dests.update(lineage)
- break
-
- r = src
- src = _getrevsource(repo, r)
-
- return [r for r in subset if r in dests]
-
-def draft(repo, subset, x):
- """``draft()``
- Changeset in draft phase."""
- # i18n: "draft" is a keyword
- getargs(x, 0, 0, _("draft takes no arguments"))
- pc = repo._phasecache
- return [r for r in subset if pc.phase(repo, r) == phases.draft]
-
-def extinct(repo, subset, x):
- """``extinct()``
- Obsolete changesets with obsolete descendants only.
- """
- # i18n: "extinct" is a keyword
- getargs(x, 0, 0, _("extinct takes no arguments"))
- extinctset = set(repo.revs('(obsolete()::) - (::(not obsolete()))'))
- return [r for r in subset if r in extinctset]
-
-def extra(repo, subset, x):
- """``extra(label, [value])``
- Changesets with the given label in the extra metadata, with the given
- optional value.
-
- If `value` starts with `re:`, the remainder of the value is treated as
- a regular expression. To match a value that actually starts with `re:`,
- use the prefix `literal:`.
- """
-
- # i18n: "extra" is a keyword
- l = getargs(x, 1, 2, _('extra takes at least 1 and at most 2 arguments'))
- # i18n: "extra" is a keyword
- label = getstring(l[0], _('first argument to extra must be a string'))
- value = None
-
- if len(l) > 1:
- # i18n: "extra" is a keyword
- value = getstring(l[1], _('second argument to extra must be a string'))
- kind, value, matcher = _stringmatcher(value)
-
- def _matchvalue(r):
- extra = repo[r].extra()
- return label in extra and (value is None or matcher(extra[label]))
-
- return [r for r in subset if _matchvalue(r)]
+ args = getset(repo, range(len(repo)), x)
+ if not args:
+ return []
+ s = set(repo.changelog.descendants(*args)) | set(args)
+ return [r for r in subset if r in s]
def filelog(repo, subset, x):
"""``filelog(pattern)``
Changesets connected to the specified filelog.
-
- For performance reasons, ``filelog()`` does not show every changeset
- that affects the requested file(s). See :hg:`help log` for details. For
- a slower, more accurate result, use ``file()``.
"""
- # i18n: "filelog" is a keyword
pat = getstring(x, _("filelog requires a pattern"))
- m = matchmod.match(repo.root, repo.getcwd(), [pat], default='relpath',
- ctx=repo[None])
+ m = matchmod.match(repo.root, repo.getcwd(), [pat], default='relpath')
s = set()
- if not matchmod.patkind(pat):
+ if not m.anypats():
for f in m.files():
fl = repo.file(f)
for fr in fl:
@@ -680,42 +407,33 @@ def filelog(repo, subset, x):
return [r for r in subset if r in s]
-def first(repo, subset, x):
- """``first(set, [n])``
- An alias for limit().
+def follow(repo, subset, x):
+ """``follow([file])``
+ An alias for ``::.`` (ancestors of the working copy's first parent).
+ If a filename is specified, the history of the given file is followed,
+ including copies.
"""
- return limit(repo, subset, x)
-
-def _follow(repo, subset, x, name, followfirst=False):
- l = getargs(x, 0, 1, _("%s takes no arguments or a filename") % name)
- c = repo['.']
+ # i18n: "follow" is a keyword
+ l = getargs(x, 0, 1, _("follow takes no arguments or a filename"))
+ p = repo['.'].rev()
if l:
- x = getstring(l[0], _("%s expected a filename") % name)
- if x in c:
- cx = c[x]
- s = set(ctx.rev() for ctx in cx.ancestors(followfirst=followfirst))
- # include the revision responsible for the most recent version
- s.add(cx.linkrev())
- else:
- return []
+ x = getstring(l[0], _("follow expected a filename"))
+ s = set(ctx.rev() for ctx in repo['.'][x].ancestors())
else:
- s = set(_revancestors(repo, [c.rev()], followfirst)) | set([c.rev()])
+ s = set(repo.changelog.ancestors(p))
+ s |= set([p])
return [r for r in subset if r in s]
-def follow(repo, subset, x):
- """``follow([file])``
+def followfile(repo, subset, x):
+ """``follow()``
An alias for ``::.`` (ancestors of the working copy's first parent).
- If a filename is specified, the history of the given file is followed,
- including copies.
"""
- return _follow(repo, subset, x, 'follow')
-
-def _followfirst(repo, subset, x):
- # ``followfirst([file])``
- # Like ``follow([file])`` but follows only the first parent of
- # every revision or file revision.
- return _follow(repo, subset, x, '_followfirst', followfirst=True)
+ # i18n: "follow" is a keyword
+ getargs(x, 0, 0, _("follow takes no arguments"))
+ p = repo['.'].rev()
+ s = set(repo.changelog.ancestors(p)) | set([p])
+ return [r for r in subset if r in s]
def getall(repo, subset, x):
"""``all()``
@@ -745,79 +463,20 @@ def grep(repo, subset, x):
break
return l
-def _matchfiles(repo, subset, x):
- # _matchfiles takes a revset list of prefixed arguments:
- #
- # [p:foo, i:bar, x:baz]
- #
- # builds a match object from them and filters subset. Allowed
- # prefixes are 'p:' for regular patterns, 'i:' for include
- # patterns and 'x:' for exclude patterns. Use 'r:' prefix to pass
- # a revision identifier, or the empty string to reference the
- # working directory, from which the match object is
- # initialized. Use 'd:' to set the default matching mode, default
- # to 'glob'. At most one 'r:' and 'd:' argument can be passed.
-
- # i18n: "_matchfiles" is a keyword
- l = getargs(x, 1, -1, _("_matchfiles requires at least one argument"))
- pats, inc, exc = [], [], []
- hasset = False
- rev, default = None, None
- for arg in l:
- # i18n: "_matchfiles" is a keyword
- s = getstring(arg, _("_matchfiles requires string arguments"))
- prefix, value = s[:2], s[2:]
- if prefix == 'p:':
- pats.append(value)
- elif prefix == 'i:':
- inc.append(value)
- elif prefix == 'x:':
- exc.append(value)
- elif prefix == 'r:':
- if rev is not None:
- # i18n: "_matchfiles" is a keyword
- raise error.ParseError(_('_matchfiles expected at most one '
- 'revision'))
- rev = value
- elif prefix == 'd:':
- if default is not None:
- # i18n: "_matchfiles" is a keyword
- raise error.ParseError(_('_matchfiles expected at most one '
- 'default mode'))
- default = value
- else:
- # i18n: "_matchfiles" is a keyword
- raise error.ParseError(_('invalid _matchfiles prefix: %s') % prefix)
- if not hasset and matchmod.patkind(value) == 'set':
- hasset = True
- if not default:
- default = 'glob'
- m = None
- s = []
- for r in subset:
- c = repo[r]
- if not m or (hasset and rev is None):
- ctx = c
- if rev is not None:
- ctx = repo[rev or None]
- m = matchmod.match(repo.root, repo.getcwd(), pats, include=inc,
- exclude=exc, ctx=ctx, default=default)
- for f in c.files():
- if m(f):
- s.append(r)
- break
- return s
-
def hasfile(repo, subset, x):
"""``file(pattern)``
Changesets affecting files matched by pattern.
-
- For a faster but less accurate result, consider using ``filelog()``
- instead.
"""
# i18n: "file" is a keyword
pat = getstring(x, _("file requires a pattern"))
- return _matchfiles(repo, subset, ('string', 'p:' + pat))
+ m = matchmod.match(repo.root, repo.getcwd(), [pat])
+ s = []
+ for r in subset:
+ for f in repo[r].files():
+ if m(f):
+ s.append(r)
+ break
+ return s
def head(repo, subset, x):
"""``head()``
@@ -844,26 +503,24 @@ def keyword(repo, subset, x):
string. The match is case-insensitive.
"""
# i18n: "keyword" is a keyword
- kw = encoding.lower(getstring(x, _("keyword requires a string")))
+ kw = getstring(x, _("keyword requires a string")).lower()
l = []
for r in subset:
c = repo[r]
t = " ".join(c.files() + [c.user(), c.description()])
- if kw in encoding.lower(t):
+ if kw in t.lower():
l.append(r)
return l
def limit(repo, subset, x):
- """``limit(set, [n])``
- First n members of set, defaulting to 1.
+ """``limit(set, n)``
+ First n members of set.
"""
# i18n: "limit" is a keyword
- l = getargs(x, 1, 2, _("limit requires one or two arguments"))
+ l = getargs(x, 2, 2, _("limit requires two arguments"))
try:
- lim = 1
- if len(l) == 2:
- # i18n: "limit" is a keyword
- lim = int(getstring(l[1], _("limit requires a number")))
+ # i18n: "limit" is a keyword
+ lim = int(getstring(l[1], _("limit requires a number")))
except (TypeError, ValueError):
# i18n: "limit" is a keyword
raise error.ParseError(_("limit expects a number"))
@@ -872,16 +529,14 @@ def limit(repo, subset, x):
return [r for r in os if r in ss]
def last(repo, subset, x):
- """``last(set, [n])``
- Last n members of set, defaulting to 1.
+ """``last(set, n)``
+ Last n members of set.
"""
# i18n: "last" is a keyword
- l = getargs(x, 1, 2, _("last requires one or two arguments"))
+ l = getargs(x, 2, 2, _("last requires two arguments"))
try:
- lim = 1
- if len(l) == 2:
- # i18n: "last" is a keyword
- lim = int(getstring(l[1], _("last requires a number")))
+ # i18n: "last" is a keyword
+ lim = int(getstring(l[1], _("last requires a number")))
except (TypeError, ValueError):
# i18n: "last" is a keyword
raise error.ParseError(_("last expects a number"))
@@ -928,7 +583,7 @@ def modifies(repo, subset, x):
pat = getstring(x, _("modifies requires a pattern"))
return checkstatus(repo, subset, pat, 0)
-def node_(repo, subset, x):
+def node(repo, subset, x):
"""``id(string)``
Revision non-ambiguously specified by the given hex string prefix.
"""
@@ -939,48 +594,9 @@ def node_(repo, subset, x):
if len(n) == 40:
rn = repo[n].rev()
else:
- rn = None
- pm = repo.changelog._partialmatch(n)
- if pm is not None:
- rn = repo.changelog.rev(pm)
-
+ rn = repo.changelog.rev(repo.changelog._partialmatch(n))
return [r for r in subset if r == rn]
-def obsolete(repo, subset, x):
- """``obsolete()``
- Mutable changeset with a newer version."""
- # i18n: "obsolete" is a keyword
- getargs(x, 0, 0, _("obsolete takes no arguments"))
- return [r for r in subset if repo[r].obsolete()]
-
-def origin(repo, subset, x):
- """``origin([set])``
- Changesets that were specified as a source for the grafts, transplants or
- rebases that created the given revisions. Omitting the optional set is the
- same as passing all(). If a changeset created by these operations is itself
- specified as a source for one of these operations, only the source changeset
- for the first operation is selected.
- """
- if x is not None:
- args = set(getset(repo, range(len(repo)), x))
- else:
- args = set(getall(repo, range(len(repo)), x))
-
- def _firstsrc(rev):
- src = _getrevsource(repo, rev)
- if src is None:
- return None
-
- while True:
- prev = _getrevsource(repo, src)
-
- if prev is None:
- return src
- src = prev
-
- o = set([_firstsrc(r) for r in args])
- return [r for r in subset if r in o]
-
def outgoing(repo, subset, x):
"""``outgoing([path])``
Changesets not found in the specified destination repository, or the
@@ -998,10 +614,10 @@ def outgoing(repo, subset, x):
revs = [repo.lookup(rev) for rev in revs]
other = hg.peer(repo, {}, dest)
repo.ui.pushbuffer()
- outgoing = discovery.findcommonoutgoing(repo, other, onlyheads=revs)
+ common, outheads = discovery.findcommonoutgoing(repo, other, onlyheads=revs)
repo.ui.popbuffer()
cl = repo.changelog
- o = set([cl.rev(r) for r in outgoing.missing])
+ o = set([cl.rev(r) for r in repo.changelog.findmissing(common, outheads)])
return [r for r in subset if r in o]
def p1(repo, subset, x):
@@ -1079,59 +695,12 @@ def present(repo, subset, x):
"""``present(set)``
An empty set, if any revision in set isn't found; otherwise,
all revisions in set.
-
- If any of specified revisions is not present in the local repository,
- the query is normally aborted. But this predicate allows the query
- to continue even in such cases.
"""
try:
return getset(repo, subset, x)
except error.RepoLookupError:
return []
-def public(repo, subset, x):
- """``public()``
- Changeset in public phase."""
- # i18n: "public" is a keyword
- getargs(x, 0, 0, _("public takes no arguments"))
- pc = repo._phasecache
- return [r for r in subset if pc.phase(repo, r) == phases.public]
-
-def remote(repo, subset, x):
- """``remote([id [,path]])``
- Local revision that corresponds to the given identifier in a
- remote repository, if present. Here, the '.' identifier is a
- synonym for the current local branch.
- """
-
- import hg # avoid start-up nasties
- # i18n: "remote" is a keyword
- l = getargs(x, 0, 2, _("remote takes one, two or no arguments"))
-
- q = '.'
- if len(l) > 0:
- # i18n: "remote" is a keyword
- q = getstring(l[0], _("remote requires a string id"))
- if q == '.':
- q = repo['.'].branch()
-
- dest = ''
- if len(l) > 1:
- # i18n: "remote" is a keyword
- dest = getstring(l[1], _("remote requires a repository path"))
- dest = repo.ui.expandpath(dest or 'default')
- dest, branches = hg.parseurl(dest)
- revs, checkout = hg.addbranchrevs(repo, repo, branches, [])
- if revs:
- revs = [repo.lookup(rev) for rev in revs]
- other = hg.peer(repo, {}, dest)
- n = other.lookup(q)
- if n in repo:
- r = repo[n].rev()
- if r in subset:
- return [r]
- return []
-
def removes(repo, subset, x):
"""``removes(pattern)``
Changesets which remove files matching pattern.
@@ -1154,144 +723,21 @@ def rev(repo, subset, x):
raise error.ParseError(_("rev expects a number"))
return [r for r in subset if r == l]
-def matching(repo, subset, x):
- """``matching(revision [, field])``
- Changesets in which a given set of fields match the set of fields in the
- selected revision or set.
-
- To match more than one field pass the list of fields to match separated
- by spaces (e.g. ``author description``).
-
- Valid fields are most regular revision fields and some special fields.
-
- Regular revision fields are ``description``, ``author``, ``branch``,
- ``date``, ``files``, ``phase``, ``parents``, ``substate``, ``user``
- and ``diff``.
- Note that ``author`` and ``user`` are synonyms. ``diff`` refers to the
- contents of the revision. Two revisions matching their ``diff`` will
- also match their ``files``.
-
- Special fields are ``summary`` and ``metadata``:
- ``summary`` matches the first line of the description.
- ``metadata`` is equivalent to matching ``description user date``
- (i.e. it matches the main metadata fields).
-
- ``metadata`` is the default field which is used when no fields are
- specified. You can match more than one field at a time.
- """
- # i18n: "matching" is a keyword
- l = getargs(x, 1, 2, _("matching takes 1 or 2 arguments"))
-
- revs = getset(repo, xrange(len(repo)), l[0])
-
- fieldlist = ['metadata']
- if len(l) > 1:
- fieldlist = getstring(l[1],
- # i18n: "matching" is a keyword
- _("matching requires a string "
- "as its second argument")).split()
-
- # Make sure that there are no repeated fields,
- # expand the 'special' 'metadata' field type
- # and check the 'files' whenever we check the 'diff'
- fields = []
- for field in fieldlist:
- if field == 'metadata':
- fields += ['user', 'description', 'date']
- elif field == 'diff':
- # a revision matching the diff must also match the files
- # since matching the diff is very costly, make sure to
- # also match the files first
- fields += ['files', 'diff']
- else:
- if field == 'author':
- field = 'user'
- fields.append(field)
- fields = set(fields)
- if 'summary' in fields and 'description' in fields:
- # If a revision matches its description it also matches its summary
- fields.discard('summary')
-
- # We may want to match more than one field
- # Not all fields take the same amount of time to be matched
- # Sort the selected fields in order of increasing matching cost
- fieldorder = ['phase', 'parents', 'user', 'date', 'branch', 'summary',
- 'files', 'description', 'substate', 'diff']
- def fieldkeyfunc(f):
- try:
- return fieldorder.index(f)
- except ValueError:
- # assume an unknown field is very costly
- return len(fieldorder)
- fields = list(fields)
- fields.sort(key=fieldkeyfunc)
-
- # Each field will be matched with its own "getfield" function
- # which will be added to the getfieldfuncs array of functions
- getfieldfuncs = []
- _funcs = {
- 'user': lambda r: repo[r].user(),
- 'branch': lambda r: repo[r].branch(),
- 'date': lambda r: repo[r].date(),
- 'description': lambda r: repo[r].description(),
- 'files': lambda r: repo[r].files(),
- 'parents': lambda r: repo[r].parents(),
- 'phase': lambda r: repo[r].phase(),
- 'substate': lambda r: repo[r].substate,
- 'summary': lambda r: repo[r].description().splitlines()[0],
- 'diff': lambda r: list(repo[r].diff(git=True),)
- }
- for info in fields:
- getfield = _funcs.get(info, None)
- if getfield is None:
- raise error.ParseError(
- # i18n: "matching" is a keyword
- _("unexpected field name passed to matching: %s") % info)
- getfieldfuncs.append(getfield)
- # convert the getfield array of functions into a "getinfo" function
- # which returns an array of field values (or a single value if there
- # is only one field to match)
- getinfo = lambda r: [f(r) for f in getfieldfuncs]
-
- matches = set()
- for rev in revs:
- target = getinfo(rev)
- for r in subset:
- match = True
- for n, f in enumerate(getfieldfuncs):
- if target[n] != f(r):
- match = False
- break
- if match:
- matches.add(r)
- return [r for r in subset if r in matches]
-
def reverse(repo, subset, x):
"""``reverse(set)``
Reverse order of set.
"""
l = getset(repo, subset, x)
- if not isinstance(l, list):
- l = list(l)
l.reverse()
return l
def roots(repo, subset, x):
"""``roots(set)``
- Changesets in set with no parent changeset in set.
+ Changesets with no parent changeset in set.
"""
- s = set(getset(repo, xrange(len(repo)), x))
- subset = [r for r in subset if r in s]
- cs = _children(repo, subset, s)
- return [r for r in subset if r not in cs]
-
-def secret(repo, subset, x):
- """``secret()``
- Changeset in secret phase."""
- # i18n: "secret" is a keyword
- getargs(x, 0, 0, _("secret takes no arguments"))
- pc = repo._phasecache
- return [r for r in subset if pc.phase(repo, r) == phases.secret]
+ s = getset(repo, subset, x)
+ cs = set(children(repo, subset, x))
+ return [r for r in s if r not in cs]
def sort(repo, subset, x):
"""``sort(set[, [-]key...])``
@@ -1310,7 +756,6 @@ def sort(repo, subset, x):
l = getargs(x, 1, 2, _("sort requires one or two arguments"))
keys = "rev"
if len(l) == 2:
- # i18n: "sort" is a keyword
keys = getstring(l[1], _("sort spec must be a string"))
s = l[0]
@@ -1349,51 +794,6 @@ def sort(repo, subset, x):
l.sort()
return [e[-1] for e in l]
-def _stringmatcher(pattern):
- """
- accepts a string, possibly starting with 're:' or 'literal:' prefix.
- returns the matcher name, pattern, and matcher function.
- missing or unknown prefixes are treated as literal matches.
-
- helper for tests:
- >>> def test(pattern, *tests):
- ... kind, pattern, matcher = _stringmatcher(pattern)
- ... return (kind, pattern, [bool(matcher(t)) for t in tests])
-
- exact matching (no prefix):
- >>> test('abcdefg', 'abc', 'def', 'abcdefg')
- ('literal', 'abcdefg', [False, False, True])
-
- regex matching ('re:' prefix)
- >>> test('re:a.+b', 'nomatch', 'fooadef', 'fooadefbar')
- ('re', 'a.+b', [False, False, True])
-
- force exact matches ('literal:' prefix)
- >>> test('literal:re:foobar', 'foobar', 're:foobar')
- ('literal', 're:foobar', [False, True])
-
- unknown prefixes are ignored and treated as literals
- >>> test('foo:bar', 'foo', 'bar', 'foo:bar')
- ('literal', 'foo:bar', [False, False, True])
- """
- if pattern.startswith('re:'):
- pattern = pattern[3:]
- try:
- regex = re.compile(pattern)
- except re.error, e:
- raise error.ParseError(_('invalid regular expression: %s')
- % e)
- return 're', pattern, regex.search
- elif pattern.startswith('literal:'):
- pattern = pattern[8:]
- return 'literal', pattern, pattern.__eq__
-
-def _substringmatcher(pattern):
- kind, pattern, matcher = _stringmatcher(pattern)
- if kind == 'literal':
- matcher = lambda s: pattern in s
- return kind, pattern, matcher
-
def tag(repo, subset, x):
"""``tag([name])``
The specified tag by name, or all tagged revisions if no name is given.
@@ -1402,20 +802,12 @@ def tag(repo, subset, x):
args = getargs(x, 0, 1, _("tag takes one or no arguments"))
cl = repo.changelog
if args:
- pattern = getstring(args[0],
- # i18n: "tag" is a keyword
- _('the argument to tag must be a string'))
- kind, pattern, matcher = _stringmatcher(pattern)
- if kind == 'literal':
- # avoid resolving all tags
- tn = repo._tagscache.tags.get(pattern, None)
- if tn is None:
- raise util.Abort(_("tag '%s' does not exist") % pattern)
- s = set([repo[tn].rev()])
- else:
- s = set([cl.rev(n) for t, n in repo.tagslist() if matcher(t)])
- if not s:
- raise util.Abort(_("no tags exist that match '%s'") % pattern)
+ tn = getstring(args[0],
+ # i18n: "tag" is a keyword
+ _('the argument to tag must be a string'))
+ if not repo.tags().get(tn, None):
+ raise util.Abort(_("tag '%s' does not exist") % tn)
+ s = set([cl.rev(n) for t, n in repo.tagslist() if t == tn])
else:
s = set([cl.rev(n) for t, n in repo.tagslist() if t != 'tip'])
return [r for r in subset if r in s]
@@ -1423,102 +815,58 @@ def tag(repo, subset, x):
def tagged(repo, subset, x):
return tag(repo, subset, x)
-def unstable(repo, subset, x):
- """``unstable()``
- Non-obsolete changesets with obsolete ancestors.
- """
- # i18n: "unstable" is a keyword
- getargs(x, 0, 0, _("unstable takes no arguments"))
- unstableset = set(repo.revs('(obsolete()::) - obsolete()'))
- return [r for r in subset if r in unstableset]
-
-
def user(repo, subset, x):
"""``user(string)``
User name contains string. The match is case-insensitive.
-
- If `string` starts with `re:`, the remainder of the string is treated as
- a regular expression. To match a user that actually contains `re:`, use
- the prefix `literal:`.
"""
return author(repo, subset, x)
-# for internal use
-def _list(repo, subset, x):
- s = getstring(x, "internal error")
- if not s:
- return []
- if not isinstance(subset, set):
- subset = set(subset)
- ls = [repo[r].rev() for r in s.split('\0')]
- return [r for r in ls if r in subset]
-
symbols = {
"adds": adds,
"all": getall,
"ancestor": ancestor,
"ancestors": ancestors,
- "_firstancestors": _firstancestors,
"author": author,
- "bisect": bisect,
"bisected": bisected,
"bookmark": bookmark,
"branch": branch,
"children": children,
"closed": closed,
"contains": contains,
- "converted": converted,
"date": date,
"desc": desc,
"descendants": descendants,
- "_firstdescendants": _firstdescendants,
- "destination": destination,
- "draft": draft,
- "extinct": extinct,
- "extra": extra,
"file": hasfile,
"filelog": filelog,
- "first": first,
"follow": follow,
- "_followfirst": _followfirst,
"grep": grep,
"head": head,
"heads": heads,
- "id": node_,
+ "id": node,
"keyword": keyword,
"last": last,
"limit": limit,
- "_matchfiles": _matchfiles,
"max": maxrev,
"merge": merge,
"min": minrev,
"modifies": modifies,
- "obsolete": obsolete,
- "origin": origin,
"outgoing": outgoing,
"p1": p1,
"p2": p2,
"parents": parents,
"present": present,
- "public": public,
- "remote": remote,
"removes": removes,
"rev": rev,
"reverse": reverse,
"roots": roots,
"sort": sort,
- "secret": secret,
- "matching": matching,
"tag": tag,
"tagged": tagged,
"user": user,
- "unstable": unstable,
- "_list": _list,
}
methods = {
"range": rangeset,
- "dagrange": dagrange,
"string": stringset,
"symbol": symbolset,
"and": andset,
@@ -1542,6 +890,9 @@ def optimize(x, small):
op = x[0]
if op == 'minus':
return optimize(('and', x[1], ('not', x[2])), small)
+ elif op == 'dagrange':
+ return optimize(('and', ('func', ('symbol', 'descendants'), x[1]),
+ ('func', ('symbol', 'ancestors'), x[2])), small)
elif op == 'dagrangepre':
return optimize(('func', ('symbol', 'ancestors'), x[1]), small)
elif op == 'dagrangepost':
@@ -1555,7 +906,7 @@ def optimize(x, small):
'-' + getstring(x[1], _("can't negate that"))), small)
elif op in 'string symbol negate':
return smallbonus, x # single revisions are small
- elif op == 'and':
+ elif op == 'and' or op == 'dagrange':
wa, ta = optimize(x[1], True)
wb, tb = optimize(x[2], True)
w = min(wa, wb)
@@ -1576,7 +927,7 @@ def optimize(x, small):
return o[0], (op, o[1])
elif op == 'group':
return optimize(x[1], small)
- elif op in 'dagrange range list parent ancestorspec':
+ elif op in 'range list parent ancestorspec':
if op == 'parent':
# x^:y means (x^) : y, not x ^ (:y)
post = ('parentpost', x[1])
@@ -1600,7 +951,7 @@ def optimize(x, small):
w = 100 # very slow
elif f == "ancestor":
w = 1 * smallbonus
- elif f in "reverse limit first":
+ elif f in "reverse limit":
w = 0
elif f in "sort":
w = 10 # assume most sorts look at changelog
@@ -1609,27 +960,6 @@ def optimize(x, small):
return w + wa, (op, x[1], ta)
return 1, x
-_aliasarg = ('func', ('symbol', '_aliasarg'))
-def _getaliasarg(tree):
- """If tree matches ('func', ('symbol', '_aliasarg'), ('string', X))
- return X, None otherwise.
- """
- if (len(tree) == 3 and tree[:2] == _aliasarg
- and tree[2][0] == 'string'):
- return tree[2][1]
- return None
-
-def _checkaliasarg(tree, known=None):
- """Check tree contains no _aliasarg construct or only ones which
- value is in known. Used to avoid alias placeholders injection.
- """
- if isinstance(tree, tuple):
- arg = _getaliasarg(tree)
- if arg is not None and (not known or arg not in known):
- raise error.ParseError(_("not a function: %s") % '_aliasarg')
- for t in tree:
- _checkaliasarg(t, known)
-
class revsetalias(object):
funcre = re.compile('^([^(]+)\(([^)]+)\)$')
args = None
@@ -1640,93 +970,46 @@ class revsetalias(object):
h = heads(default)
b($1) = ancestors($1) - ancestors(default)
'''
- m = self.funcre.search(name)
- if m:
- self.name = m.group(1)
- self.tree = ('func', ('symbol', m.group(1)))
- self.args = [x.strip() for x in m.group(2).split(',')]
- for arg in self.args:
- # _aliasarg() is an unknown symbol only used separate
- # alias argument placeholders from regular strings.
- value = value.replace(arg, '_aliasarg(%r)' % (arg,))
- else:
- self.name = name
- self.tree = ('symbol', name)
-
- self.replacement, pos = parse(value)
- if pos != len(value):
- raise error.ParseError(_('invalid token'), pos)
- # Check for placeholder injection
- _checkaliasarg(self.replacement, self.args)
-
-def _getalias(aliases, tree):
- """If tree looks like an unexpanded alias, return it. Return None
- otherwise.
- """
- if isinstance(tree, tuple) and tree:
- if tree[0] == 'symbol' and len(tree) == 2:
- name = tree[1]
- alias = aliases.get(name)
- if alias and alias.args is None and alias.tree == tree:
- return alias
- if tree[0] == 'func' and len(tree) > 1:
- if tree[1][0] == 'symbol' and len(tree[1]) == 2:
- name = tree[1][1]
- alias = aliases.get(name)
- if alias and alias.args is not None and alias.tree == tree[:2]:
- return alias
- return None
-
-def _expandargs(tree, args):
- """Replace _aliasarg instances with the substitution value of the
- same name in args, recursively.
- """
- if not tree or not isinstance(tree, tuple):
- return tree
- arg = _getaliasarg(tree)
- if arg is not None:
- return args[arg]
- return tuple(_expandargs(t, args) for t in tree)
-
-def _expandaliases(aliases, tree, expanding, cache):
- """Expand aliases in tree, recursively.
-
- 'aliases' is a dictionary mapping user defined aliases to
- revsetalias objects.
- """
- if not isinstance(tree, tuple):
- # Do not expand raw strings
+ if isinstance(name, tuple): # parameter substitution
+ self.tree = name
+ self.replacement = value
+ else: # alias definition
+ m = self.funcre.search(name)
+ if m:
+ self.tree = ('func', ('symbol', m.group(1)))
+ self.args = [x.strip() for x in m.group(2).split(',')]
+ for arg in self.args:
+ value = value.replace(arg, repr(arg))
+ else:
+ self.tree = ('symbol', name)
+
+ self.replacement, pos = parse(value)
+ if pos != len(value):
+ raise error.ParseError(_('invalid token'), pos)
+
+ def process(self, tree):
+ if isinstance(tree, tuple):
+ if self.args is None:
+ if tree == self.tree:
+ return self.replacement
+ elif tree[:2] == self.tree:
+ l = getlist(tree[2])
+ if len(l) != len(self.args):
+ raise error.ParseError(
+ _('invalid number of arguments: %s') % len(l))
+ result = self.replacement
+ for a, v in zip(self.args, l):
+ valalias = revsetalias(('string', a), v)
+ result = valalias.process(result)
+ return result
+ return tuple(map(self.process, tree))
return tree
- alias = _getalias(aliases, tree)
- if alias is not None:
- if alias in expanding:
- raise error.ParseError(_('infinite expansion of revset alias "%s" '
- 'detected') % alias.name)
- expanding.append(alias)
- if alias.name not in cache:
- cache[alias.name] = _expandaliases(aliases, alias.replacement,
- expanding, cache)
- result = cache[alias.name]
- expanding.pop()
- if alias.args is not None:
- l = getlist(tree[2])
- if len(l) != len(alias.args):
- raise error.ParseError(
- _('invalid number of arguments: %s') % len(l))
- l = [_expandaliases(aliases, a, [], cache) for a in l]
- result = _expandargs(result, dict(zip(alias.args, l)))
- else:
- result = tuple(_expandaliases(aliases, t, expanding, cache)
- for t in tree)
- return result
def findaliases(ui, tree):
- _checkaliasarg(tree)
- aliases = {}
for k, v in ui.configitems('revsetalias'):
alias = revsetalias(k, v)
- aliases[alias.name] = alias
- return _expandaliases(aliases, tree, [], {})
+ tree = alias.process(tree)
+ return tree
parse = parser.parser(tokenize, elements).parse
@@ -1736,121 +1019,11 @@ def match(ui, spec):
tree, pos = parse(spec)
if (pos != len(spec)):
raise error.ParseError(_("invalid token"), pos)
- if ui:
- tree = findaliases(ui, tree)
+ tree = findaliases(ui, tree)
weight, tree = optimize(tree, True)
def mfunc(repo, subset):
return getset(repo, subset, tree)
return mfunc
-def formatspec(expr, *args):
- '''
- This is a convenience function for using revsets internally, and
- escapes arguments appropriately. Aliases are intentionally ignored
- so that intended expression behavior isn't accidentally subverted.
-
- Supported arguments:
-
- %r = revset expression, parenthesized
- %d = int(arg), no quoting
- %s = string(arg), escaped and single-quoted
- %b = arg.branch(), escaped and single-quoted
- %n = hex(arg), single-quoted
- %% = a literal '%'
-
- Prefixing the type with 'l' specifies a parenthesized list of that type.
-
- >>> formatspec('%r:: and %lr', '10 or 11', ("this()", "that()"))
- '(10 or 11):: and ((this()) or (that()))'
- >>> formatspec('%d:: and not %d::', 10, 20)
- '10:: and not 20::'
- >>> formatspec('%ld or %ld', [], [1])
- "_list('') or 1"
- >>> formatspec('keyword(%s)', 'foo\\xe9')
- "keyword('foo\\\\xe9')"
- >>> b = lambda: 'default'
- >>> b.branch = b
- >>> formatspec('branch(%b)', b)
- "branch('default')"
- >>> formatspec('root(%ls)', ['a', 'b', 'c', 'd'])
- "root(_list('a\\x00b\\x00c\\x00d'))"
- '''
-
- def quote(s):
- return repr(str(s))
-
- def argtype(c, arg):
- if c == 'd':
- return str(int(arg))
- elif c == 's':
- return quote(arg)
- elif c == 'r':
- parse(arg) # make sure syntax errors are confined
- return '(%s)' % arg
- elif c == 'n':
- return quote(node.hex(arg))
- elif c == 'b':
- return quote(arg.branch())
-
- def listexp(s, t):
- l = len(s)
- if l == 0:
- return "_list('')"
- elif l == 1:
- return argtype(t, s[0])
- elif t == 'd':
- return "_list('%s')" % "\0".join(str(int(a)) for a in s)
- elif t == 's':
- return "_list('%s')" % "\0".join(s)
- elif t == 'n':
- return "_list('%s')" % "\0".join(node.hex(a) for a in s)
- elif t == 'b':
- return "_list('%s')" % "\0".join(a.branch() for a in s)
-
- m = l // 2
- return '(%s or %s)' % (listexp(s[:m], t), listexp(s[m:], t))
-
- ret = ''
- pos = 0
- arg = 0
- while pos < len(expr):
- c = expr[pos]
- if c == '%':
- pos += 1
- d = expr[pos]
- if d == '%':
- ret += d
- elif d in 'dsnbr':
- ret += argtype(d, args[arg])
- arg += 1
- elif d == 'l':
- # a list of some type
- pos += 1
- d = expr[pos]
- ret += listexp(list(args[arg]), d)
- arg += 1
- else:
- raise util.Abort('unexpected revspec format character %s' % d)
- else:
- ret += c
- pos += 1
-
- return ret
-
-def prettyformat(tree):
- def _prettyformat(tree, level, lines):
- if not isinstance(tree, tuple) or tree[0] in ('string', 'symbol'):
- lines.append((level, str(tree)))
- else:
- lines.append((level, '(%s' % tree[0]))
- for s in tree[1:]:
- _prettyformat(s, level + 1, lines)
- lines[-1:] = [(lines[-1][0], lines[-1][1] + ')')]
-
- lines = []
- _prettyformat(tree, 0, lines)
- output = '\n'.join((' '*l + s) for l, s in lines)
- return output
-
# tell hggettext to extract docstrings from these functions:
i18nfunctions = symbols.values()
diff --git a/mercurial/scmutil.py b/mercurial/scmutil.py
index 96acfff..fcb89e6 100644
--- a/mercurial/scmutil.py
+++ b/mercurial/scmutil.py
@@ -6,27 +6,10 @@
# GNU General Public License version 2 or any later version.
from i18n import _
-import util, error, osutil, revset, similar, encoding, phases
+import util, error, osutil, revset, similar, encoding
import match as matchmod
import os, errno, re, stat, sys, glob
-def nochangesfound(ui, repo, excluded=None):
- '''Report no changes for push/pull, excluded is None or a list of
- nodes excluded from the push/pull.
- '''
- secretlist = []
- if excluded:
- for n in excluded:
- ctx = repo[n]
- if ctx.phase() >= phases.secret and not ctx.extinct():
- secretlist.append(n)
-
- if secretlist:
- ui.status(_("no changes found (ignored %d secret changesets)\n")
- % len(secretlist))
- else:
- ui.status(_("no changes found\n"))
-
def checkfilename(f):
'''Check that the filename f is an acceptable filename for a tracked file'''
if '\r' in f or '\n' in f:
@@ -58,27 +41,22 @@ def checkportabilityalert(ui):
return abort, warn
class casecollisionauditor(object):
- def __init__(self, ui, abort, dirstate):
+ def __init__(self, ui, abort, existingiter):
self._ui = ui
self._abort = abort
- allfiles = '\0'.join(dirstate._map)
- self._loweredfiles = set(encoding.lower(allfiles).split('\0'))
- self._dirstate = dirstate
- # The purpose of _newfiles is so that we don't complain about
- # case collisions if someone were to call this object with the
- # same filename twice.
- self._newfiles = set()
+ self._map = {}
+ for f in existingiter:
+ self._map[encoding.lower(f)] = f
def __call__(self, f):
fl = encoding.lower(f)
- if (fl in self._loweredfiles and f not in self._dirstate and
- f not in self._newfiles):
+ map = self._map
+ if fl in map and map[fl] != f:
msg = _('possible case-folding collision for %s') % f
if self._abort:
raise util.Abort(msg)
self._ui.warn(_("warning: %s\n") % msg)
- self._loweredfiles.add(fl)
- self._newfiles.add(f)
+ map[fl] = f
class pathauditor(object):
'''ensure that a filesystem path contains no banned components.
@@ -98,23 +76,18 @@ class pathauditor(object):
self.auditeddir = set()
self.root = root
self.callback = callback
- if os.path.lexists(root) and not util.checkcase(root):
- self.normcase = util.normcase
- else:
- self.normcase = lambda x: x
def __call__(self, path):
'''Check the relative path.
path may contain a pattern (e.g. foodir/**.txt)'''
- path = util.localpath(path)
- normpath = self.normcase(path)
- if normpath in self.audited:
+ if path in self.audited:
return
# AIX ignores "/" at end of path, others raise EISDIR.
if util.endswithsep(path):
raise util.Abort(_("path ends in directory separator: %s") % path)
- parts = util.splitpath(path)
+ normpath = os.path.normcase(path)
+ parts = util.splitpath(normpath)
if (os.path.splitdrive(path)[0]
or parts[0].lower() in ('.hg', '.hg.', '')
or os.pardir in parts):
@@ -125,19 +98,14 @@ class pathauditor(object):
if p in lparts[1:]:
pos = lparts.index(p)
base = os.path.join(*parts[:pos])
- raise util.Abort(_("path '%s' is inside nested repo %r")
+ raise util.Abort(_('path %r is inside nested repo %r')
% (path, base))
- normparts = util.splitpath(normpath)
- assert len(parts) == len(normparts)
-
parts.pop()
- normparts.pop()
prefixes = []
while parts:
prefix = os.sep.join(parts)
- normprefix = os.sep.join(normparts)
- if normprefix in self.auditeddir:
+ if prefix in self.auditeddir:
break
curpath = os.path.join(self.root, prefix)
try:
@@ -155,14 +123,12 @@ class pathauditor(object):
elif (stat.S_ISDIR(st.st_mode) and
os.path.isdir(os.path.join(curpath, '.hg'))):
if not self.callback or not self.callback(curpath):
- raise util.Abort(_("path '%s' is inside nested "
- "repo %r")
- % (path, prefix))
- prefixes.append(normprefix)
+ raise util.Abort(_('path %r is inside nested repo %r') %
+ (path, prefix))
+ prefixes.append(prefix)
parts.pop()
- normparts.pop()
- self.audited.add(normpath)
+ self.audited.add(path)
# only add prefixes to the cache after checking everything: we don't
# want to add "foo/bar/baz" before checking if there's a "foo/.hg"
self.auditeddir.update(prefixes)
@@ -174,15 +140,6 @@ class abstractopener(object):
'''Prevent instantiation; don't call this from subclasses.'''
raise NotImplementedError('attempted instantiating ' + str(type(self)))
- def tryread(self, path):
- '''gracefully return an empty string for missing files'''
- try:
- return self.read(path)
- except IOError, inst:
- if inst.errno != errno.ENOENT:
- raise
- return ""
-
def read(self, path):
fp = self(path, 'rb')
try:
@@ -204,30 +161,13 @@ class abstractopener(object):
finally:
fp.close()
- def mkdir(self, path=None):
- return os.mkdir(self.join(path))
-
- def exists(self, path=None):
- return os.path.exists(self.join(path))
-
- def isdir(self, path=None):
- return os.path.isdir(self.join(path))
-
- def makedir(self, path=None, notindexed=True):
- return util.makedir(self.join(path), notindexed)
-
- def makedirs(self, path=None, mode=None):
- return util.makedirs(self.join(path), mode)
-
class opener(abstractopener):
'''Open files relative to a base directory
This class is used to hide the details of COW semantics and
remote file access from higher level code.
'''
- def __init__(self, base, audit=True, expand=False):
- if expand:
- base = os.path.realpath(util.expandpath(base))
+ def __init__(self, base, audit=True):
self.base = base
self._audit = audit
if audit:
@@ -252,7 +192,7 @@ class opener(abstractopener):
if r:
raise util.Abort("%s: %r" % (r, path))
self.auditor(path)
- f = self.join(path)
+ f = os.path.join(self.base, path)
if not text and "b" not in mode:
mode += "b" # for that other OS
@@ -296,7 +236,7 @@ class opener(abstractopener):
def symlink(self, src, dst):
self.auditor(dst)
- linkname = self.join(dst)
+ linkname = os.path.join(self.base, dst)
try:
os.unlink(linkname)
except OSError:
@@ -321,12 +261,6 @@ class opener(abstractopener):
def audit(self, path):
self.auditor(path)
- def join(self, path):
- if path:
- return os.path.join(self.base, path)
- else:
- return self.base
-
class filteropener(abstractopener):
'''Wrapper opener for filtering filenames with a function.'''
@@ -358,16 +292,18 @@ def canonpath(root, cwd, myname, auditor=None):
else:
# Determine whether `name' is in the hierarchy at or beneath `root',
# by iterating name=dirname(name) until that causes no change (can't
- # check name == '/', because that doesn't work on windows). The list
- # `rel' holds the reversed list of components making up the relative
- # file name we want.
+ # check name == '/', because that doesn't work on windows). For each
+ # `name', compare dev/inode numbers. If they match, the list `rel'
+ # holds the reversed list of components making up the relative file
+ # name we want.
+ root_st = os.stat(root)
rel = []
while True:
try:
- s = util.samefile(name, root)
+ name_st = os.stat(name)
except OSError:
- s = False
- if s:
+ break
+ if util.samestat(name_st, root_st):
if not rel:
# name was actually the same as root (maybe a symlink)
return ''
@@ -384,15 +320,14 @@ def canonpath(root, cwd, myname, auditor=None):
raise util.Abort('%s not under root' % myname)
def walkrepos(path, followsym=False, seen_dirs=None, recurse=False):
- '''yield every hg repository under path, always recursively.
- The recurse flag will only control recursion into repo working dirs'''
+ '''yield every hg repository under path, recursively.'''
def errhandler(err):
if err.filename == path:
raise err
- samestat = getattr(os.path, 'samestat', None)
- if followsym and samestat is not None:
+ if followsym and hasattr(os.path, 'samestat'):
def adddir(dirlst, dirname):
match = False
+ samestat = os.path.samestat
dirstat = os.stat(dirname)
for lstdirstat in dirlst:
if samestat(dirstat, lstdirstat):
@@ -479,26 +414,19 @@ if os.name != 'nt':
def systemrcpath():
path = []
- if sys.platform == 'plan9':
- root = 'lib/mercurial'
- else:
- root = 'etc/mercurial'
# old mod_python does not set sys.argv
if len(getattr(sys, 'argv', [])) > 0:
p = os.path.dirname(os.path.dirname(sys.argv[0]))
- path.extend(rcfiles(os.path.join(p, root)))
- path.extend(rcfiles('/' + root))
+ path.extend(rcfiles(os.path.join(p, 'etc/mercurial')))
+ path.extend(rcfiles('/etc/mercurial'))
return path
def userrcpath():
- if sys.platform == 'plan9':
- return [os.environ['home'] + '/lib/hgrc']
- else:
- return [os.path.expanduser('~/.hgrc')]
+ return [os.path.expanduser('~/.hgrc')]
else:
- import _winreg
+ _HKEY_LOCAL_MACHINE = 0x80000002L
def systemrcpath():
'''return default os-specific hgrc search path'''
@@ -518,10 +446,10 @@ else:
return rcpath
# else look for a system rcpath in the registry
value = util.lookupreg('SOFTWARE\\Mercurial', None,
- _winreg.HKEY_LOCAL_MACHINE)
+ _HKEY_LOCAL_MACHINE)
if not isinstance(value, str) or not value:
return rcpath
- value = util.localpath(value)
+ value = value.replace('/', os.sep)
for p in value.split(os.pathsep):
if p.lower().endswith('mercurial.ini'):
rcpath.append(p)
@@ -558,11 +486,9 @@ def revpair(repo, revs):
l = revrange(repo, revs)
if len(l) == 0:
- if revs:
- raise util.Abort(_('empty revision range'))
return repo.dirstate.p1(), None
- if len(l) == 1 and len(revs) == 1 and _revrangesep not in revs[0]:
+ if len(l) == 1:
return repo.lookup(l[0]), None
return repo.lookup(l[0]), repo.lookup(l[-1])
@@ -575,12 +501,10 @@ def revrange(repo, revs):
def revfix(repo, val, defval):
if not val and val != 0 and defval is not None:
return defval
- return repo[val].rev()
+ return repo.changelog.rev(repo.lookup(val))
seen, l = set(), []
for spec in revs:
- if l and not seen:
- seen = set(l)
# attempt to parse old-style ranges first to deal with
# things like old-tag which contain query metacharacters
try:
@@ -594,18 +518,11 @@ def revrange(repo, revs):
start = revfix(repo, start, 0)
end = revfix(repo, end, len(repo) - 1)
step = start > end and -1 or 1
- if not seen and not l:
- # by far the most common case: revs = ["-1:0"]
- l = range(start, end + step, step)
- # defer syncing seen until next iteration
- continue
- newrevs = set(xrange(start, end + step, step))
- if seen:
- newrevs.difference_update(seen)
- seen.update(newrevs)
- else:
- seen = newrevs
- l.extend(sorted(newrevs, reverse=start > end))
+ for rev in xrange(start, end + step, step):
+ if rev in seen:
+ continue
+ seen.add(rev)
+ l.append(rev)
continue
elif spec and spec in repo: # single unquoted rev
rev = revfix(repo, spec, None)
@@ -619,9 +536,10 @@ def revrange(repo, revs):
# fall through to new-style queries if old-style fails
m = revset.match(repo.ui, spec)
- dl = [r for r in m(repo, xrange(len(repo))) if r not in seen]
- l.extend(dl)
- seen.update(dl)
+ for r in m(repo, range(len(repo))):
+ if r not in seen:
+ l.append(r)
+ seen.update(l)
return l
@@ -642,7 +560,7 @@ def expandpats(pats):
ret.append(p)
return ret
-def matchandpats(ctx, pats=[], opts={}, globbed=False, default='relpath'):
+def match(ctx, pats=[], opts={}, globbed=False, default='relpath'):
if pats == ("",):
pats = []
if not globbed and default == 'relpath':
@@ -653,10 +571,7 @@ def matchandpats(ctx, pats=[], opts={}, globbed=False, default='relpath'):
def badfn(f, msg):
ctx._repo.ui.warn("%s: %s\n" % (m.rel(f), msg))
m.bad = badfn
- return m, pats
-
-def match(ctx, pats=[], opts={}, globbed=False, default='relpath'):
- return matchandpats(ctx, pats, opts, globbed, default)[0]
+ return m
def matchall(repo):
return matchmod.always(repo.root, repo.getcwd())
@@ -673,9 +588,6 @@ def addremove(repo, pats=[], opts={}, dry_run=None, similarity=None):
added, unknown, deleted, removed = [], [], [], []
audit_path = pathauditor(repo.root)
m = match(repo[None], pats, opts)
- rejected = []
- m.bad = lambda x, y: rejected.append(x)
-
for abs in repo.walk(m):
target = repo.wjoin(abs)
good = True
@@ -689,9 +601,8 @@ def addremove(repo, pats=[], opts={}, dry_run=None, similarity=None):
unknown.append(abs)
if repo.ui.verbose or not exact:
repo.ui.status(_('adding %s\n') % ((pats and rel) or abs))
- elif (repo.dirstate[abs] != 'r' and
- (not good or not os.path.lexists(target) or
- (os.path.isdir(target) and not os.path.islink(target)))):
+ elif repo.dirstate[abs] != 'r' and (not good or not os.path.lexists(target)
+ or (os.path.isdir(target) and not os.path.islink(target))):
deleted.append(abs)
if repo.ui.verbose or not exact:
repo.ui.status(_('removing %s\n') % ((pats and rel) or abs))
@@ -721,11 +632,6 @@ def addremove(repo, pats=[], opts={}, dry_run=None, similarity=None):
finally:
wlock.release()
- for f in rejected:
- if f in m.files():
- return 1
- return 0
-
def updatedir(ui, repo, patches, similarity=0):
'''Update dirstate after patch application according to metadata'''
if not patches:
@@ -800,122 +706,6 @@ def readrequires(opener, supported):
missings.append(r)
missings.sort()
if missings:
- raise error.RequirementError(
- _("unknown repository format: requires features '%s' (upgrade "
- "Mercurial)") % "', '".join(missings))
+ raise error.RequirementError(_("unknown repository format: "
+ "requires features '%s' (upgrade Mercurial)") % "', '".join(missings))
return requirements
-
-class filecacheentry(object):
- def __init__(self, path):
- self.path = path
- self.cachestat = filecacheentry.stat(self.path)
-
- if self.cachestat:
- self._cacheable = self.cachestat.cacheable()
- else:
- # None means we don't know yet
- self._cacheable = None
-
- def refresh(self):
- if self.cacheable():
- self.cachestat = filecacheentry.stat(self.path)
-
- def cacheable(self):
- if self._cacheable is not None:
- return self._cacheable
-
- # we don't know yet, assume it is for now
- return True
-
- def changed(self):
- # no point in going further if we can't cache it
- if not self.cacheable():
- return True
-
- newstat = filecacheentry.stat(self.path)
-
- # we may not know if it's cacheable yet, check again now
- if newstat and self._cacheable is None:
- self._cacheable = newstat.cacheable()
-
- # check again
- if not self._cacheable:
- return True
-
- if self.cachestat != newstat:
- self.cachestat = newstat
- return True
- else:
- return False
-
- @staticmethod
- def stat(path):
- try:
- return util.cachestat(path)
- except OSError, e:
- if e.errno != errno.ENOENT:
- raise
-
-class filecache(object):
- '''A property like decorator that tracks a file under .hg/ for updates.
-
- Records stat info when called in _filecache.
-
- On subsequent calls, compares old stat info with new info, and recreates
- the object when needed, updating the new stat info in _filecache.
-
- Mercurial either atomic renames or appends for files under .hg,
- so to ensure the cache is reliable we need the filesystem to be able
- to tell us if a file has been replaced. If it can't, we fallback to
- recreating the object on every call (essentially the same behaviour as
- propertycache).'''
- def __init__(self, path):
- self.path = path
-
- def join(self, obj, fname):
- """Used to compute the runtime path of the cached file.
-
- Users should subclass filecache and provide their own version of this
- function to call the appropriate join function on 'obj' (an instance
- of the class that its member function was decorated).
- """
- return obj.join(fname)
-
- def __call__(self, func):
- self.func = func
- self.name = func.__name__
- return self
-
- def __get__(self, obj, type=None):
- # do we need to check if the file changed?
- if self.name in obj.__dict__:
- return obj.__dict__[self.name]
-
- entry = obj._filecache.get(self.name)
-
- if entry:
- if entry.changed():
- entry.obj = self.func(obj)
- else:
- path = self.join(obj, self.path)
-
- # We stat -before- creating the object so our cache doesn't lie if
- # a writer modified between the time we read and stat
- entry = filecacheentry(path)
- entry.obj = self.func(obj)
-
- obj._filecache[self.name] = entry
-
- obj.__dict__[self.name] = entry.obj
- return entry.obj
-
- def __set__(self, obj, value):
- if self.name in obj._filecache:
- obj._filecache[self.name].obj = value # update cached copy
- obj.__dict__[self.name] = value # update copy returned by obj.x
-
- def __delete__(self, obj):
- try:
- del obj.__dict__[self.name]
- except KeyError:
- raise AttributeError, self.name
diff --git a/mercurial/setdiscovery.py b/mercurial/setdiscovery.py
index 94d8bb0..c737db8 100644
--- a/mercurial/setdiscovery.py
+++ b/mercurial/setdiscovery.py
@@ -8,7 +8,7 @@
from node import nullid
from i18n import _
-import random, util, dagutil
+import random, collections, util, dagutil
def _updatesample(dag, nodes, sample, always, quicksamplesize=0):
# if nodes is empty we scan the entire graph
@@ -17,7 +17,7 @@ def _updatesample(dag, nodes, sample, always, quicksamplesize=0):
else:
heads = dag.heads()
dist = {}
- visit = util.deque(heads)
+ visit = collections.deque(heads)
seen = set()
factor = 1
while visit:
@@ -128,21 +128,16 @@ def findcommonheads(ui, local, remote,
return (srvheadhashes, False, srvheadhashes,)
if sample and util.all(yesno):
- ui.note(_("all local heads known remotely\n"))
+ ui.note("all local heads known remotely\n")
ownheadhashes = dag.externalizeall(ownheads)
return (ownheadhashes, True, srvheadhashes,)
# full blown discovery
+ undecided = dag.nodeset() # own nodes where I don't know if remote knows them
+ common = set() # own nodes I know we both know
+ missing = set() # own nodes I know remote lacks
- # own nodes where I don't know if remote knows them
- undecided = dag.nodeset()
- # own nodes I know we both know
- common = set()
- # own nodes I know remote lacks
- missing = set()
-
- # treat remote heads (and maybe own heads) as a first implicit sample
- # response
+ # treat remote heads (and maybe own heads) as a first implicit sample response
common.update(dag.ancestorset(srvheads))
undecided.difference_update(common)
@@ -163,7 +158,7 @@ def findcommonheads(ui, local, remote,
break
if full:
- ui.note(_("sampling from both directions\n"))
+ ui.note("sampling from both directions\n")
sample = _takefullsample(dag, undecided, size=fullsamplesize)
elif common:
# use cheapish initial sample
diff --git a/mercurial/similar.py b/mercurial/similar.py
index 8d4b581..b18795b 100644
--- a/mercurial/similar.py
+++ b/mercurial/similar.py
@@ -44,8 +44,7 @@ def _findsimilarmatches(repo, added, removed, threshold):
'''
copies = {}
for i, r in enumerate(removed):
- repo.ui.progress(_('searching for similar files'), i,
- total=len(removed))
+ repo.ui.progress(_('searching for similar files'), i, total=len(removed))
# lazily load text
@util.cachefunc
diff --git a/mercurial/simplemerge.py b/mercurial/simplemerge.py
index 10d7f54..7283751 100644
--- a/mercurial/simplemerge.py
+++ b/mercurial/simplemerge.py
@@ -11,7 +11,8 @@
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
-# along with this program; if not, see <http://www.gnu.org/licenses/>.
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
# mbp: "you know that thing where cvs gives you conflict markers?"
# s: "i hate that."
@@ -94,7 +95,7 @@ class Merge3Text(object):
elif self.a[0].endswith('\r'):
newline = '\r'
if base_marker and reprocess:
- raise CantReprocessAndShowBase
+ raise CantReprocessAndShowBase()
if name_a:
start_marker = start_marker + ' ' + name_a
if name_b:
@@ -222,8 +223,7 @@ class Merge3Text(object):
# section a[0:ia] has been disposed of, etc
iz = ia = ib = 0
- for region in self.find_sync_regions():
- zmatch, zend, amatch, aend, bmatch, bend = region
+ for zmatch, zend, amatch, aend, bmatch, bend in self.find_sync_regions():
#print 'match base [%d:%d]' % (zmatch, zend)
matchlen = zend - zmatch
@@ -445,7 +445,7 @@ def simplemerge(ui, local, base, other, **opts):
out.write(line)
if not opts.get('print'):
- out.close()
+ out.rename()
if m3.conflicts:
if not opts.get('quiet'):
diff --git a/mercurial/sshpeer.py b/mercurial/sshrepo.py
index c9a48c0..342ae83 100644
--- a/mercurial/sshpeer.py
+++ b/mercurial/sshrepo.py
@@ -1,11 +1,10 @@
-# sshpeer.py - ssh repository proxy class for mercurial
+# sshrepo.py - ssh repository proxy class for mercurial
#
# Copyright 2005, 2006 Matt Mackall <mpm@selenic.com>
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-import re
from i18n import _
import util, error, wireproto
@@ -19,17 +18,10 @@ class remotelock(object):
if self.repo:
self.release()
-def _serverquote(s):
- '''quote a string for the remote shell ... which we assume is sh'''
- if re.match('[a-zA-Z0-9@%_+=:,./-]*$', s):
- return s
- return "'%s'" % s.replace("'", "'\\''")
-
-class sshpeer(wireproto.wirepeer):
+class sshrepository(wireproto.wirerepository):
def __init__(self, ui, path, create=False):
self._url = path
self.ui = ui
- self.pipeo = self.pipei = self.pipee = None
u = util.url(path, parsequery=False, parsefragment=False)
if u.scheme != 'ssh' or not u.host or u.path is None:
@@ -48,9 +40,9 @@ class sshpeer(wireproto.wirepeer):
args = util.sshargs(sshcmd, self.host, self.user, self.port)
if create:
- cmd = '%s %s %s' % (sshcmd, args,
- util.shellquote("%s init %s" %
- (_serverquote(remotecmd), _serverquote(self.path))))
+ cmd = '%s %s "%s init %s"'
+ cmd = cmd % (sshcmd, args, remotecmd, self.path)
+
ui.note(_('running %s\n') % cmd)
res = util.system(cmd)
if res != 0:
@@ -65,11 +57,11 @@ class sshpeer(wireproto.wirepeer):
# cleanup up previous run
self.cleanup()
- cmd = '%s %s %s' % (sshcmd, args,
- util.shellquote("%s -R %s serve --stdio" %
- (_serverquote(remotecmd), _serverquote(self.path))))
- ui.note(_('running %s\n') % cmd)
+ cmd = '%s %s "%s -R %s serve --stdio"'
+ cmd = cmd % (sshcmd, args, remotecmd, self.path)
+
cmd = util.quotecommand(cmd)
+ ui.note(_('running %s\n') % cmd)
self.pipeo, self.pipei, self.pipee = util.popen3(cmd)
# skip any noise generated by remote shell
@@ -87,18 +79,14 @@ class sshpeer(wireproto.wirepeer):
lines.append(l)
max_noise -= 1
else:
- self._abort(error.RepoError(_('no suitable response from '
- 'remote hg')))
+ self._abort(error.RepoError(_("no suitable response from remote hg")))
- self._caps = set()
+ self.capabilities = set()
for l in reversed(lines):
if l.startswith("capabilities:"):
- self._caps.update(l[:-1].split(":")[1].split())
+ self.capabilities.update(l[:-1].split(":")[1].split())
break
- def _capabilities(self):
- return self._caps
-
def readerr(self):
while True:
size = util.fstat(self.pipee).st_size
@@ -115,17 +103,15 @@ class sshpeer(wireproto.wirepeer):
raise exception
def cleanup(self):
- if self.pipeo is None:
- return
- self.pipeo.close()
- self.pipei.close()
try:
+ self.pipeo.close()
+ self.pipei.close()
# read the error descriptor until EOF
for l in self.pipee:
self.ui.status(_("remote: "), l)
- except (IOError, ValueError):
+ self.pipee.close()
+ except:
pass
- self.pipee.close()
__del__ = cleanup
@@ -178,17 +164,6 @@ class sshpeer(wireproto.wirepeer):
def _recv(self):
l = self.pipei.readline()
- if l == '\n':
- err = []
- while True:
- line = self.pipee.readline()
- if line == '-\n':
- break
- err.extend([line])
- if len(err) > 0:
- # strip the trailing newline added to the last line server-side
- err[-1] = err[-1][:-1]
- self._abort(error.OutOfBandError(*err))
self.readerr()
try:
l = int(l)
@@ -236,4 +211,4 @@ class sshpeer(wireproto.wirepeer):
except ValueError:
self._abort(error.ResponseError(_("unexpected response:"), r))
-instance = sshpeer
+instance = sshrepository
diff --git a/mercurial/sshserver.py b/mercurial/sshserver.py
index 7c0a4e8..d231178 100644
--- a/mercurial/sshserver.py
+++ b/mercurial/sshserver.py
@@ -82,12 +82,6 @@ class sshserver(object):
def sendpusherror(self, rsp):
self.sendresponse(rsp.res)
- def sendooberror(self, rsp):
- self.ui.ferr.write('%s\n-\n' % rsp.message)
- self.ui.ferr.flush()
- self.fout.write('\n')
- self.fout.flush()
-
def serve_forever(self):
try:
while self.serve_one():
@@ -102,7 +96,6 @@ class sshserver(object):
wireproto.streamres: sendstream,
wireproto.pushres: sendpushresponse,
wireproto.pusherr: sendpusherror,
- wireproto.ooberror: sendooberror,
}
def serve_one(self):
@@ -142,8 +135,8 @@ class sshserver(object):
self.sendresponse("")
cg = changegroup.unbundle10(self.fin, "UN")
- r = self.repo.addchangegroup(cg, 'serve', self._client())
- self.lock.release()
+ r = self.repo.addchangegroup(cg, 'serve', self._client(),
+ lock=self.lock)
return str(r)
def _client(self):
diff --git a/mercurial/sslutil.py b/mercurial/sslutil.py
index c6fe18b..be3882c 100644
--- a/mercurial/sslutil.py
+++ b/mercurial/sslutil.py
@@ -13,16 +13,8 @@ from mercurial.i18n import _
try:
# avoid using deprecated/broken FakeSocket in python 2.6
import ssl
+ ssl_wrap_socket = ssl.wrap_socket
CERT_REQUIRED = ssl.CERT_REQUIRED
- def ssl_wrap_socket(sock, keyfile, certfile,
- cert_reqs=ssl.CERT_NONE, ca_certs=None):
- sslsocket = ssl.wrap_socket(sock, keyfile, certfile,
- cert_reqs=cert_reqs, ca_certs=ca_certs)
- # check if wrap_socket failed silently because socket had been closed
- # - see http://bugs.python.org/issue13721
- if not sslsocket.cipher():
- raise util.Abort(_('ssl connection failed'))
- return sslsocket
except ImportError:
CERT_REQUIRED = 2
@@ -30,8 +22,6 @@ except ImportError:
def ssl_wrap_socket(sock, key_file, cert_file,
cert_reqs=CERT_REQUIRED, ca_certs=None):
- if not util.safehasattr(socket, 'ssl'):
- raise util.Abort(_('Python SSL support not found'))
if ca_certs:
raise util.Abort(_(
'certificate checking requires Python 2.6'))
@@ -103,41 +93,36 @@ class validator(object):
host = self.host
cacerts = self.ui.config('web', 'cacerts')
hostfingerprint = self.ui.config('hostfingerprints', host)
- if not getattr(sock, 'getpeercert', False): # python 2.5 ?
- if hostfingerprint:
- raise util.Abort(_("host fingerprint for %s can't be "
- "verified (Python too old)") % host)
- if self.ui.configbool('ui', 'reportoldssl', True):
- self.ui.warn(_("warning: certificate for %s can't be verified "
- "(Python too old)\n") % host)
- return
- if not sock.cipher(): # work around http://bugs.python.org/issue13721
- raise util.Abort(_('%s ssl connection error') % host)
- peercert = sock.getpeercert(True)
- if not peercert:
- raise util.Abort(_('%s certificate error: '
- 'no certificate received') % host)
- peerfingerprint = util.sha1(peercert).hexdigest()
- nicefingerprint = ":".join([peerfingerprint[x:x + 2]
- for x in xrange(0, len(peerfingerprint), 2)])
- if hostfingerprint:
- if peerfingerprint.lower() != \
- hostfingerprint.replace(':', '').lower():
- raise util.Abort(_('certificate for %s has unexpected '
- 'fingerprint %s') % (host, nicefingerprint),
- hint=_('check hostfingerprint configuration'))
- self.ui.debug('%s certificate matched fingerprint %s\n' %
- (host, nicefingerprint))
- elif cacerts:
+ if cacerts and not hostfingerprint:
msg = _verifycert(sock.getpeercert(), host)
if msg:
- raise util.Abort(_('%s certificate error: %s') % (host, msg),
- hint=_('configure hostfingerprint %s or use '
- '--insecure to connect insecurely') %
- nicefingerprint)
+ raise util.Abort(_('%s certificate error: %s '
+ '(use --insecure to connect '
+ 'insecurely)') % (host, msg))
self.ui.debug('%s certificate successfully verified\n' % host)
else:
- self.ui.warn(_('warning: %s certificate with fingerprint %s not '
- 'verified (check hostfingerprints or web.cacerts '
- 'config setting)\n') %
- (host, nicefingerprint))
+ if getattr(sock, 'getpeercert', False):
+ peercert = sock.getpeercert(True)
+ peerfingerprint = util.sha1(peercert).hexdigest()
+ nicefingerprint = ":".join([peerfingerprint[x:x + 2]
+ for x in xrange(0, len(peerfingerprint), 2)])
+ if hostfingerprint:
+ if peerfingerprint.lower() != \
+ hostfingerprint.replace(':', '').lower():
+ raise util.Abort(_('invalid certificate for %s '
+ 'with fingerprint %s') %
+ (host, nicefingerprint))
+ self.ui.debug('%s certificate matched fingerprint %s\n' %
+ (host, nicefingerprint))
+ else:
+ self.ui.warn(_('warning: %s certificate '
+ 'with fingerprint %s not verified '
+ '(check hostfingerprints or web.cacerts '
+ 'config setting)\n') %
+ (host, nicefingerprint))
+ else: # python 2.5 ?
+ if hostfingerprint:
+ raise util.Abort(_("host fingerprint for %s can't be "
+ "verified (Python too old)") % host)
+ self.ui.warn(_("warning: certificate for %s can't be "
+ "verified (Python too old)\n") % host)
diff --git a/mercurial/statichttprepo.py b/mercurial/statichttprepo.py
index c9274cd..769e4e2 100644
--- a/mercurial/statichttprepo.py
+++ b/mercurial/statichttprepo.py
@@ -26,17 +26,20 @@ class httprangereader(object):
end = ''
if bytes:
end = self.pos + bytes - 1
- if self.pos or end:
- req.add_header('Range', 'bytes=%d-%s' % (self.pos, end))
+ req.add_header('Range', 'bytes=%d-%s' % (self.pos, end))
try:
f = self.opener.open(req)
data = f.read()
- # Python 2.6+ defines a getcode() function, and 2.4 and
- # 2.5 appear to always have an undocumented code attribute
- # set. If we can't read either of those, fall back to 206
- # and hope for the best.
- code = getattr(f, 'getcode', lambda : getattr(f, 'code', 206))()
+ if hasattr(f, 'getcode'):
+ # python 2.6+
+ code = f.getcode()
+ elif hasattr(f, 'code'):
+ # undocumented attribute, seems to be set in 2.4 and 2.5
+ code = f.code
+ else:
+ # Don't know how to check, hope for the best.
+ code = 206
except urllib2.HTTPError, inst:
num = inst.code == 404 and errno.ENOENT or None
raise IOError(num, inst)
@@ -76,12 +79,6 @@ def build_opener(ui, authinfo):
return statichttpopener
-class statichttppeer(localrepo.localpeer):
- def local(self):
- return None
- def canpush(self):
- return False
-
class statichttprepository(localrepo.localrepository):
def __init__(self, ui, path):
self._url = path
@@ -93,8 +90,6 @@ class statichttprepository(localrepo.localrepository):
opener = build_opener(ui, authinfo)
self.opener = opener(self.path)
- self.vfs = self.opener
- self._phasedefaults = []
try:
requirements = scmutil.readrequires(self.opener, self.supported)
@@ -119,10 +114,7 @@ class statichttprepository(localrepo.localrepository):
self.store = store.store(requirements, self.path, opener)
self.spath = self.store.path
self.sopener = self.store.opener
- self.svfs = self.sopener
self.sjoin = self.store.join
- self._filecache = {}
- self.requirements = requirements
self.manifest = manifest.manifest(self.sopener)
self.changelog = changelog.changelog(self.sopener)
@@ -132,9 +124,7 @@ class statichttprepository(localrepo.localrepository):
self._branchcachetip = None
self.encodepats = None
self.decodepats = None
-
- def _restrictcapabilities(self, caps):
- return caps.difference(["pushkey"])
+ self.capabilities.difference_update(["pushkey"])
def url(self):
return self._url
@@ -142,9 +132,6 @@ class statichttprepository(localrepo.localrepository):
def local(self):
return False
- def peer(self):
- return statichttppeer(self)
-
def lock(self, wait=True):
raise util.Abort(_('cannot lock static-http repository'))
diff --git a/mercurial/store.py b/mercurial/store.py
index b6eb8b3..961c3aa 100644
--- a/mercurial/store.py
+++ b/mercurial/store.py
@@ -232,8 +232,7 @@ def _calcmode(path):
mode = None
return mode
-_data = ('data 00manifest.d 00manifest.i 00changelog.d 00changelog.i'
- ' phaseroots obsstore')
+_data = 'data 00manifest.d 00manifest.i 00changelog.d 00changelog.i'
class basicstore(object):
'''base class for local repository stores'''
@@ -265,8 +264,7 @@ class basicstore(object):
l.append((decodedir(n), n, st.st_size))
elif kind == stat.S_IFDIR and recurse:
visit.append(fp)
- l.sort()
- return l
+ return sorted(l)
def datafiles(self):
return self._walk('data', True)
@@ -319,36 +317,36 @@ class fncache(object):
def _load(self):
'''fill the entries from the fncache file'''
+ self.entries = set()
self._dirty = False
try:
fp = self.opener('fncache', mode='rb')
except IOError:
# skip nonexistent file
- self.entries = set()
return
- self.entries = set(map(decodedir, fp.read().splitlines()))
- if '' in self.entries:
- fp.seek(0)
- for n, line in enumerate(fp):
- if not line.rstrip('\n'):
- t = _('invalid entry in fncache, line %s') % (n + 1)
- raise util.Abort(t)
+ for n, line in enumerate(fp):
+ if (len(line) < 2) or (line[-1] != '\n'):
+ t = _('invalid entry in fncache, line %s') % (n + 1)
+ raise util.Abort(t)
+ self.entries.add(decodedir(line[:-1]))
fp.close()
- def _write(self, files, atomictemp):
- fp = self.opener('fncache', mode='wb', atomictemp=atomictemp)
- if files:
- fp.write('\n'.join(map(encodedir, files)) + '\n')
- fp.close()
- self._dirty = False
-
def rewrite(self, files):
- self._write(files, False)
+ fp = self.opener('fncache', mode='wb')
+ for p in files:
+ fp.write(encodedir(p) + '\n')
+ fp.close()
self.entries = set(files)
+ self._dirty = False
def write(self):
- if self._dirty:
- self._write(self.entries, True)
+ if not self._dirty:
+ return
+ fp = self.opener('fncache', mode='wb', atomictemp=True)
+ for p in self.entries:
+ fp.write(encodedir(p) + '\n')
+ fp.rename()
+ self._dirty = False
def add(self, fn):
if self.entries is None:
@@ -392,16 +390,15 @@ class fncachestore(basicstore):
def join(self, f):
return self.path + '/' + self.encode(f)
- def getsize(self, path):
- return os.stat(self.path + '/' + path).st_size
-
def datafiles(self):
rewrite = False
existing = []
+ spath = self.path
for f in self.fncache:
ef = self.encode(f)
try:
- yield f, ef, self.getsize(ef)
+ st = os.stat(spath + '/' + ef)
+ yield f, ef, st.st_size
existing.append(f)
except OSError:
# nonexistent entry
@@ -412,7 +409,7 @@ class fncachestore(basicstore):
self.fncache.rewrite(existing)
def copylist(self):
- d = ('data dh fncache phaseroots obsstore'
+ d = ('data dh fncache'
' 00manifest.d 00manifest.i 00changelog.d 00changelog.i')
return (['requires', '00changelog.i'] +
['store/' + f for f in d.split()])
diff --git a/mercurial/subrepo.py b/mercurial/subrepo.py
index 437d8b9..b5068d0 100644
--- a/mercurial/subrepo.py
+++ b/mercurial/subrepo.py
@@ -8,7 +8,7 @@
import errno, os, re, xml.dom.minidom, shutil, posixpath
import stat, subprocess, tarfile
from i18n import _
-import config, scmutil, util, node, error, cmdutil, bookmarks, match as matchmod
+import config, scmutil, util, node, error, cmdutil, bookmarks
hg = None
propertycache = util.propertycache
@@ -43,22 +43,22 @@ def state(ctx, ui):
rev = {}
if '.hgsubstate' in ctx:
try:
- for i, l in enumerate(ctx['.hgsubstate'].data().splitlines()):
- l = l.lstrip()
- if not l:
- continue
- try:
- revision, path = l.split(" ", 1)
- except ValueError:
- raise util.Abort(_("invalid subrepository revision "
- "specifier in .hgsubstate line %d")
- % (i + 1))
+ for l in ctx['.hgsubstate'].data().splitlines():
+ revision, path = l.split(" ", 1)
rev[path] = revision
except IOError, err:
if err.errno != errno.ENOENT:
raise
- def remap(src):
+ state = {}
+ for path, src in p[''].items():
+ kind = 'hg'
+ if src.startswith('['):
+ if ']' not in src:
+ raise util.Abort(_('missing ] in subrepo source'))
+ kind, src = src.split(']', 1)
+ kind = kind[1:]
+
for pattern, repl in p.items('subpaths'):
# Turn r'C:\foo\bar' into r'C:\\foo\\bar' since re.sub
# does a string decode.
@@ -72,35 +72,8 @@ def state(ctx, ui):
except re.error, e:
raise util.Abort(_("bad subrepository pattern in %s: %s")
% (p.source('subpaths', pattern), e))
- return src
- state = {}
- for path, src in p[''].items():
- kind = 'hg'
- if src.startswith('['):
- if ']' not in src:
- raise util.Abort(_('missing ] in subrepo source'))
- kind, src = src.split(']', 1)
- kind = kind[1:]
- src = src.lstrip() # strip any extra whitespace after ']'
-
- if not util.url(src).isabs():
- parent = _abssource(ctx._repo, abort=False)
- if parent:
- parent = util.url(parent)
- parent.path = posixpath.join(parent.path or '', src)
- parent.path = posixpath.normpath(parent.path)
- joined = str(parent)
- # Remap the full joined path and use it if it changes,
- # else remap the original source.
- remapped = remap(joined)
- if remapped == joined:
- src = remap(src)
- else:
- src = remapped
-
- src = remap(src)
- state[util.pconvert(path)] = (src.strip(), rev.get(path, ''), kind)
+ state[path] = (src.strip(), rev.get(path, ''), kind)
return state
@@ -200,8 +173,7 @@ def _updateprompt(ui, sub, dirty, local, remote):
'use (l)ocal source (%s) or (r)emote source (%s)?\n')
% (subrelpath(sub), local, remote))
else:
- msg = (_(' subrepository sources for %s differ (in checked out '
- 'version)\n'
+ msg = (_(' subrepository sources for %s differ (in checked out version)\n'
'use (l)ocal source (%s) or (r)emote source (%s)?\n')
% (subrelpath(sub), local, remote))
return ui.promptchoice(msg, (_('&Local'), _('&Remote')), 0)
@@ -209,35 +181,34 @@ def _updateprompt(ui, sub, dirty, local, remote):
def reporelpath(repo):
"""return path to this (sub)repo as seen from outermost repo"""
parent = repo
- while util.safehasattr(parent, '_subparent'):
+ while hasattr(parent, '_subparent'):
parent = parent._subparent
- p = parent.root.rstrip(os.sep)
- return repo.root[len(p) + 1:]
+ return repo.root[len(parent.root)+1:]
def subrelpath(sub):
"""return path to this subrepo as seen from outermost repo"""
- if util.safehasattr(sub, '_relpath'):
+ if hasattr(sub, '_relpath'):
return sub._relpath
- if not util.safehasattr(sub, '_repo'):
+ if not hasattr(sub, '_repo'):
return sub._path
return reporelpath(sub._repo)
def _abssource(repo, push=False, abort=True):
"""return pull/push path of repo - either based on parent repo .hgsub info
or on the top repo config. Abort or return None if no source found."""
- if util.safehasattr(repo, '_subparent'):
+ if hasattr(repo, '_subparent'):
source = util.url(repo._subsource)
if source.isabs():
return str(source)
source.path = posixpath.normpath(source.path)
parent = _abssource(repo._subparent, push, abort=False)
if parent:
- parent = util.url(util.pconvert(parent))
+ parent = util.url(parent)
parent.path = posixpath.join(parent.path or '', source.path)
parent.path = posixpath.normpath(parent.path)
return str(parent)
else: # recursion reached top repo
- if util.safehasattr(repo, '_subtoppath'):
+ if hasattr(repo, '_subtoppath'):
return repo._subtoppath
if push and repo.ui.config('paths', 'default-push'):
return repo.ui.config('paths', 'default-push')
@@ -268,7 +239,7 @@ def subrepo(ctx, path):
hg = h
scmutil.pathauditor(ctx._repo.root)(path)
- state = ctx.substate[path]
+ state = ctx.substate.get(path, nullstate)
if state[2] not in types:
raise util.Abort(_('unknown subrepo type %s') % state[2])
return types[state[2]](ctx, path, state[:2])
@@ -284,11 +255,6 @@ class abstractsubrepo(object):
"""
raise NotImplementedError
- def basestate(self):
- """current working directory base state, disregarding .hgsubstate
- state and working directory modifications"""
- raise NotImplementedError
-
def checknested(self, path):
"""check if path is a subrepository within this repository"""
return False
@@ -317,14 +283,14 @@ class abstractsubrepo(object):
"""merge currently-saved state with the new state."""
raise NotImplementedError
- def push(self, opts):
+ def push(self, force):
"""perform whatever action is analogous to 'hg push'
This may be a no-op on some systems.
"""
raise NotImplementedError
- def add(self, ui, match, dryrun, listsubrepos, prefix, explicitonly):
+ def add(self, ui, match, dryrun, prefix):
return []
def status(self, rev2, **opts):
@@ -351,11 +317,8 @@ class abstractsubrepo(object):
"""return file flags"""
return ''
- def archive(self, ui, archiver, prefix, match=None):
- if match is not None:
- files = [f for f in self.files() if match(f)]
- else:
- files = self.files()
+ def archive(self, ui, archiver, prefix):
+ files = self.files()
total = len(files)
relpath = subrelpath(self)
ui.progress(_('archiving (%s)') % relpath, 0,
@@ -370,20 +333,6 @@ class abstractsubrepo(object):
unit=_('files'), total=total)
ui.progress(_('archiving (%s)') % relpath, None)
- def walk(self, match):
- '''
- walk recursively through the directory tree, finding all files
- matched by the match function
- '''
- pass
-
- def forget(self, ui, match, prefix):
- return ([], [])
-
- def revert(self, ui, substate, *pats, **opts):
- ui.warn('%s: reverting %s subrepos is unsupported\n' \
- % (substate[0], substate[2]))
- return []
class hgsubrepo(abstractsubrepo):
def __init__(self, ctx, path, state):
@@ -418,9 +367,9 @@ class hgsubrepo(abstractsubrepo):
addpathconfig('default-push', defpushpath)
fp.close()
- def add(self, ui, match, dryrun, listsubrepos, prefix, explicitonly):
- return cmdutil.add(ui, self._repo, match, dryrun, listsubrepos,
- os.path.join(prefix, self._path), explicitonly)
+ def add(self, ui, match, dryrun, prefix):
+ return cmdutil.add(ui, self._repo, match, dryrun, True,
+ os.path.join(prefix, self._path))
def status(self, rev2, **opts):
try:
@@ -448,16 +397,14 @@ class hgsubrepo(abstractsubrepo):
self._repo.ui.warn(_('warning: error "%s" in subrepository "%s"\n')
% (inst, subrelpath(self)))
- def archive(self, ui, archiver, prefix, match=None):
- self._get(self._state + ('hg',))
- abstractsubrepo.archive(self, ui, archiver, prefix, match)
+ def archive(self, ui, archiver, prefix):
+ abstractsubrepo.archive(self, ui, archiver, prefix)
rev = self._state[1]
ctx = self._repo[rev]
for subpath in ctx.substate:
s = subrepo(ctx, subpath)
- submatch = matchmod.narrowmatcher(subpath, match)
- s.archive(ui, archiver, os.path.join(prefix, self._path), submatch)
+ s.archive(ui, archiver, os.path.join(prefix, self._path))
def dirty(self, ignoreupdate=False):
r = self._state[1]
@@ -469,9 +416,6 @@ class hgsubrepo(abstractsubrepo):
return True
return w.dirty() # working directory changed
- def basestate(self):
- return self._repo['.'].hex()
-
def checknested(self, path):
return self._repo._checknested(self._repo.wjoin(path))
@@ -502,18 +446,15 @@ class hgsubrepo(abstractsubrepo):
self._repo.ui.status(_('cloning subrepo %s from %s\n')
% (subrelpath(self), srcurl))
parentrepo = self._repo._subparent
- shutil.rmtree(self._repo.path)
- other, cloned = hg.clone(self._repo._subparent.ui, {},
- other, self._repo.root,
- update=False)
- self._repo = cloned.local()
+ shutil.rmtree(self._repo.root)
+ other, self._repo = hg.clone(self._repo._subparent.ui, {}, other,
+ self._repo.root, update=False)
self._initrepo(parentrepo, source, create=True)
else:
self._repo.ui.status(_('pulling subrepo %s from %s\n')
% (subrelpath(self), srcurl))
self._repo.pull(other)
- bookmarks.updatefromremote(self._repo.ui, self._repo, other,
- srcurl)
+ bookmarks.updatefromremote(self._repo.ui, self._repo, other)
def get(self, state, overwrite=False):
self._get(state)
@@ -528,7 +469,7 @@ class hgsubrepo(abstractsubrepo):
anc = dst.ancestor(cur)
def mergefunc():
- if anc == cur and dst.branch() == cur.branch():
+ if anc == cur:
self._repo.ui.debug("updating subrepo %s\n" % subrelpath(self))
hg.update(self._repo, state[1])
elif anc == dst:
@@ -547,23 +488,19 @@ class hgsubrepo(abstractsubrepo):
else:
mergefunc()
- def push(self, opts):
- force = opts.get('force')
- newbranch = opts.get('new_branch')
- ssh = opts.get('ssh')
-
+ def push(self, force):
# push subrepos depth-first for coherent ordering
c = self._repo['']
subs = c.substate # only repos that are committed
for s in sorted(subs):
- if c.sub(s).push(opts) == 0:
+ if not c.sub(s).push(force):
return False
dsturl = _abssource(self._repo, True)
self._repo.ui.status(_('pushing subrepo %s to %s\n') %
(subrelpath(self), dsturl))
- other = hg.peer(self._repo.ui, {'ssh': ssh}, dsturl)
- return self._repo.push(other, force, newbranch=newbranch)
+ other = hg.peer(self._repo.ui, {}, dsturl)
+ return self._repo.push(other, force)
def outgoing(self, ui, dest, opts):
return hg.outgoing(ui, self._repo, _abssource(self._repo, True), opts)
@@ -585,45 +522,6 @@ class hgsubrepo(abstractsubrepo):
ctx = self._repo[rev]
return ctx.flags(name)
- def walk(self, match):
- ctx = self._repo[None]
- return ctx.walk(match)
-
- def forget(self, ui, match, prefix):
- return cmdutil.forget(ui, self._repo, match,
- os.path.join(prefix, self._path), True)
-
- def revert(self, ui, substate, *pats, **opts):
- # reverting a subrepo is a 2 step process:
- # 1. if the no_backup is not set, revert all modified
- # files inside the subrepo
- # 2. update the subrepo to the revision specified in
- # the corresponding substate dictionary
- ui.status(_('reverting subrepo %s\n') % substate[0])
- if not opts.get('no_backup'):
- # Revert all files on the subrepo, creating backups
- # Note that this will not recursively revert subrepos
- # We could do it if there was a set:subrepos() predicate
- opts = opts.copy()
- opts['date'] = None
- opts['rev'] = substate[1]
-
- pats = []
- if not opts['all']:
- pats = ['set:modified()']
- self.filerevert(ui, *pats, **opts)
-
- # Update the repo to the revision specified in the given substate
- self.get(substate, overwrite=True)
-
- def filerevert(self, ui, *pats, **opts):
- ctx = self._repo[opts['rev']]
- parents = self._repo.dirstate.parents()
- if opts['all']:
- pats = ['set:modified()']
- else:
- pats = []
- cmdutil.revert(ui, self._repo, ctx, parents, *pats, **opts)
class svnsubrepo(abstractsubrepo):
def __init__(self, ctx, path, state):
@@ -631,13 +529,9 @@ class svnsubrepo(abstractsubrepo):
self._state = state
self._ctx = ctx
self._ui = ctx._repo.ui
- self._exe = util.findexe('svn')
- if not self._exe:
- raise util.Abort(_("'svn' executable not found for subrepo '%s'")
- % self._path)
def _svncommand(self, commands, filename='', failok=False):
- cmd = [self._exe]
+ cmd = ['svn']
extrakw = {}
if not self._ui.interactive():
# Making stdin be a pipe should prevent svn from behaving
@@ -695,13 +589,12 @@ class svnsubrepo(abstractsubrepo):
return self._wcrevs()[0]
def _wcchanged(self):
- """Return (changes, extchanges, missing) where changes is True
- if the working directory was changed, extchanges is
- True if any of these changes concern an external entry and missing
- is True if any change is a missing entry.
+ """Return (changes, extchanges) where changes is True
+ if the working directory was changed, and extchanges is
+ True if any of these changes concern an external entry.
"""
output, err = self._svncommand(['status', '--xml'])
- externals, changes, missing = [], [], []
+ externals, changes = [], []
doc = xml.dom.minidom.parseString(output)
for e in doc.getElementsByTagName('entry'):
s = e.getElementsByTagName('wc-status')
@@ -712,16 +605,14 @@ class svnsubrepo(abstractsubrepo):
path = e.getAttribute('path')
if item == 'external':
externals.append(path)
- elif item == 'missing':
- missing.append(path)
if (item not in ('', 'normal', 'unversioned', 'external')
or props not in ('', 'none', 'normal')):
changes.append(path)
for path in changes:
for ext in externals:
if path == ext or path.startswith(ext + os.sep):
- return True, True, bool(missing)
- return bool(changes), False, bool(missing)
+ return True, True
+ return bool(changes), False
def dirty(self, ignoreupdate=False):
if not self._wcchanged()[0]:
@@ -729,42 +620,18 @@ class svnsubrepo(abstractsubrepo):
return False
return True
- def basestate(self):
- lastrev, rev = self._wcrevs()
- if lastrev != rev:
- # Last committed rev is not the same than rev. We would
- # like to take lastrev but we do not know if the subrepo
- # URL exists at lastrev. Test it and fallback to rev it
- # is not there.
- try:
- self._svncommand(['list', '%s@%s' % (self._state[0], lastrev)])
- return lastrev
- except error.Abort:
- pass
- return rev
-
def commit(self, text, user, date):
# user and date are out of our hands since svn is centralized
- changed, extchanged, missing = self._wcchanged()
+ changed, extchanged = self._wcchanged()
if not changed:
- return self.basestate()
+ return self._wcrev()
if extchanged:
# Do not try to commit externals
raise util.Abort(_('cannot commit svn externals'))
- if missing:
- # svn can commit with missing entries but aborting like hg
- # seems a better approach.
- raise util.Abort(_('cannot commit missing svn entries'))
commitinfo, err = self._svncommand(['commit', '-m', text])
self._ui.status(commitinfo)
newrev = re.search('Committed revision ([0-9]+).', commitinfo)
if not newrev:
- if not commitinfo.strip():
- # Sometimes, our definition of "changed" differs from
- # svn one. For instance, svn ignores missing files
- # when committing. If there are only missing files, no
- # commit is made, no output and no error code.
- raise util.Abort(_('failed to commit svn changes'))
raise util.Abort(commitinfo.splitlines()[-1])
newrev = newrev.groups()[0]
self._ui.status(self._svncommand(['update', '-r', newrev])[0])
@@ -806,7 +673,7 @@ class svnsubrepo(abstractsubrepo):
status, err = self._svncommand(args, failok=True)
if not re.search('Checked out revision [0-9]+.', status):
if ('is already a working copy for a different URL' in err
- and (self._wcchanged()[:2] == (False, False))):
+ and (self._wcchanged() == (False, False))):
# obstructed but clean working copy, so just blow it away.
self.remove()
self.get(state, overwrite=False)
@@ -817,36 +684,27 @@ class svnsubrepo(abstractsubrepo):
def merge(self, state):
old = self._state[1]
new = state[1]
- wcrev = self._wcrev()
- if new != wcrev:
- dirty = old == wcrev or self._wcchanged()[0]
- if _updateprompt(self._ui, self, dirty, wcrev, new):
+ if new != self._wcrev():
+ dirty = old == self._wcrev() or self._wcchanged()[0]
+ if _updateprompt(self._ui, self, dirty, self._wcrev(), new):
self.get(state, False)
- def push(self, opts):
+ def push(self, force):
# push is a no-op for SVN
return True
def files(self):
- output = self._svncommand(['list', '--recursive', '--xml'])[0]
- doc = xml.dom.minidom.parseString(output)
- paths = []
- for e in doc.getElementsByTagName('entry'):
- kind = str(e.getAttribute('kind'))
- if kind != 'file':
- continue
- name = ''.join(c.data for c
- in e.getElementsByTagName('name')[0].childNodes
- if c.nodeType == c.TEXT_NODE)
- paths.append(name)
- return paths
+ output = self._svncommand(['list'])
+ # This works because svn forbids \n in filenames.
+ return output.splitlines()
def filedata(self, name):
- return self._svncommand(['cat'], name)[0]
+ return self._svncommand(['cat'], name)
class gitsubrepo(abstractsubrepo):
def __init__(self, ctx, path, state):
+ # TODO add git version check.
self._state = state
self._ctx = ctx
self._path = path
@@ -854,29 +712,6 @@ class gitsubrepo(abstractsubrepo):
self._abspath = ctx._repo.wjoin(path)
self._subparent = ctx._repo
self._ui = ctx._repo.ui
- self._ensuregit()
-
- def _ensuregit(self):
- try:
- self._gitexecutable = 'git'
- out, err = self._gitnodir(['--version'])
- except OSError, e:
- if e.errno != 2 or os.name != 'nt':
- raise
- self._gitexecutable = 'git.cmd'
- out, err = self._gitnodir(['--version'])
- m = re.search(r'^git version (\d+)\.(\d+)\.(\d+)', out)
- if not m:
- self._ui.warn(_('cannot retrieve git version'))
- return
- version = (int(m.group(1)), m.group(2), m.group(3))
- # git 1.4.0 can't work at all, but 1.5.X can in at least some cases,
- # despite the docstring comment. For now, error on 1.4.0, warn on
- # 1.5.0 but attempt to continue.
- if version < (1, 5, 0):
- raise util.Abort(_('git subrepo requires at least 1.6.0 or later'))
- elif version < (1, 6, 0):
- self._ui.warn(_('git subrepo requires at least 1.6.0 or later'))
def _gitcommand(self, commands, env=None, stream=False):
return self._gitdir(commands, env=env, stream=stream)[0]
@@ -897,8 +732,8 @@ class gitsubrepo(abstractsubrepo):
errpipe = None
if self._ui.quiet:
errpipe = open(os.devnull, 'w')
- p = subprocess.Popen([self._gitexecutable] + commands, bufsize=-1,
- cwd=cwd, env=env, close_fds=util.closefds,
+ p = subprocess.Popen(['git'] + commands, bufsize=-1, cwd=cwd, env=env,
+ close_fds=util.closefds,
stdout=subprocess.PIPE, stderr=errpipe)
if stream:
return p.stdout, None
@@ -947,12 +782,6 @@ class gitsubrepo(abstractsubrepo):
def _gitisbare(self):
return self._gitcommand(['config', '--bool', 'core.bare']) == 'true'
- def _gitupdatestat(self):
- """This must be run before git diff-index.
- diff-index only looks at changes to file stat;
- this command looks at file contents and updates the stat."""
- self._gitcommand(['update-index', '-q', '--refresh'])
-
def _gitbranchmap(self):
'''returns 2 things:
a map from git branch to revision
@@ -980,10 +809,9 @@ class gitsubrepo(abstractsubrepo):
for b in branches:
if b.startswith('refs/remotes/'):
continue
- bname = b.split('/', 2)[2]
- remote = self._gitcommand(['config', 'branch.%s.remote' % bname])
+ remote = self._gitcommand(['config', 'branch.%s.remote' % b])
if remote:
- ref = self._gitcommand(['config', 'branch.%s.merge' % bname])
+ ref = self._gitcommand(['config', 'branch.%s.merge' % b])
tracking['refs/remotes/%s/%s' %
(remote, ref.split('/', 2)[2])] = b
return tracking
@@ -1022,13 +850,9 @@ class gitsubrepo(abstractsubrepo):
# different version checked out
return True
# check for staged changes or modified files; ignore untracked files
- self._gitupdatestat()
out, code = self._gitdir(['diff-index', '--quiet', 'HEAD'])
return code == 1
- def basestate(self):
- return self._gitstate()
-
def get(self, state, overwrite=False):
source, revision, kind = state
if not revision:
@@ -1133,7 +957,6 @@ class gitsubrepo(abstractsubrepo):
source, revision, kind = state
self._fetch(source, revision)
base = self._gitcommand(['merge-base', revision, self._state[1]])
- self._gitupdatestat()
out, code = self._gitdir(['diff-index', '--quiet', 'HEAD'])
def mergefunc():
@@ -1151,9 +974,7 @@ class gitsubrepo(abstractsubrepo):
else:
mergefunc()
- def push(self, opts):
- force = opts.get('force')
-
+ def push(self, force):
if not self._state[1]:
return True
if self._gitmissing():
@@ -1186,7 +1007,7 @@ class gitsubrepo(abstractsubrepo):
return True
else:
self._ui.warn(_('no branch checked out in subrepo %s\n'
- 'cannot push revision %s\n') %
+ 'cannot push revision %s') %
(self._relpath, self._state[1]))
return False
@@ -1210,7 +1031,7 @@ class gitsubrepo(abstractsubrepo):
else:
os.remove(path)
- def archive(self, ui, archiver, prefix, match=None):
+ def archive(self, ui, archiver, prefix):
source, revision = self._state
if not revision:
return
@@ -1226,8 +1047,6 @@ class gitsubrepo(abstractsubrepo):
for i, info in enumerate(tar):
if info.isdir():
continue
- if match and not match(info.name):
- continue
if info.issym():
data = info.linkname
else:
@@ -1245,7 +1064,6 @@ class gitsubrepo(abstractsubrepo):
# if the repo is missing, return no results
return [], [], [], [], [], [], []
modified, added, removed = [], [], []
- self._gitupdatestat()
if rev2:
command = ['diff-tree', rev1, rev2]
else:
diff --git a/mercurial/tags.py b/mercurial/tags.py
index e4e0129..d4047bd 100644
--- a/mercurial/tags.py
+++ b/mercurial/tags.py
@@ -181,7 +181,7 @@ def _readtagcache(ui, repo):
for line in cachelines:
if line == "\n":
break
- line = line.split()
+ line = line.rstrip().split()
cacherevs.append(int(line[0]))
headnode = bin(line[1])
cacheheads.append(headnode)
@@ -228,11 +228,6 @@ def _readtagcache(ui, repo):
# N.B. in case 4 (nodes destroyed), "new head" really means "newly
# exposed".
- if not len(repo.file('.hgtags')):
- # No tags have ever been committed, so we can avoid a
- # potentially expensive search.
- return (repoheads, cachefnode, None, True)
-
newheads = [head
for head in repoheads
if head not in set(cacheheads)]
@@ -241,7 +236,7 @@ def _readtagcache(ui, repo):
# This is the most expensive part of finding tags, so performance
# depends primarily on the size of newheads. Worst case: no cache
# file, so newheads == repoheads.
- for head in reversed(newheads):
+ for head in newheads:
cctx = repo[head]
try:
fnode = cctx.filenode('.hgtags')
@@ -292,6 +287,6 @@ def _writetagcache(ui, repo, heads, tagfnode, cachetags):
cachefile.write("%s %s\n" % (hex(node), name))
try:
- cachefile.close()
+ cachefile.rename()
except (OSError, IOError):
pass
diff --git a/mercurial/templatefilters.py b/mercurial/templatefilters.py
index b5264f2..389be70 100644
--- a/mercurial/templatefilters.py
+++ b/mercurial/templatefilters.py
@@ -7,7 +7,6 @@
import cgi, re, os, time, urllib
import encoding, node, util
-import hbisect
def addbreaks(text):
""":addbreaks: Any text. Add an XHTML "<br />" tag before the end of
@@ -189,13 +188,13 @@ def json(obj):
return '"%s"' % jsonescape(u)
elif isinstance(obj, unicode):
return '"%s"' % jsonescape(obj)
- elif util.safehasattr(obj, 'keys'):
+ elif hasattr(obj, 'keys'):
out = []
for k, v in obj.iteritems():
s = '%s: %s' % (json(k), json(v))
out.append(s)
return '{' + ', '.join(out) + '}'
- elif util.safehasattr(obj, '__iter__'):
+ elif hasattr(obj, '__iter__'):
out = []
for i in obj:
out.append(json(i))
@@ -242,29 +241,12 @@ def permissions(flags):
return "-rw-r--r--"
def person(author):
- """:person: Any text. Returns the name before an email address,
- interpreting it as per RFC 5322.
-
- >>> person('foo@bar')
- 'foo'
- >>> person('Foo Bar <foo@bar>')
- 'Foo Bar'
- >>> person('"Foo Bar" <foo@bar>')
- 'Foo Bar'
- >>> person('"Foo \"buz\" Bar" <foo@bar>')
- 'Foo "buz" Bar'
- >>> # The following are invalid, but do exist in real-life
- ...
- >>> person('Foo "buz" Bar <foo@bar>')
- 'Foo "buz" Bar'
- >>> person('"Foo Bar <foo@bar>')
- 'Foo Bar'
- """
- if '@' not in author:
+ """:person: Any text. Returns the text before an email address."""
+ if not '@' in author:
return author
f = author.find('<')
if f != -1:
- return author[:f].strip(' "').replace('\\"', '"')
+ return author[:f].rstrip()
f = author.find('@')
return author[:f].replace('.', ' ')
@@ -286,14 +268,6 @@ def short(text):
"""
return text[:12]
-def shortbisect(text):
- """:shortbisect: Any text. Treats `text` as a bisection status, and
- returns a single-character representing the status (G: good, B: bad,
- S: skipped, U: untested, I: ignored). Returns single space if `text`
- is not a valid bisection status.
- """
- return hbisect.shortlabel(text) or ' '
-
def shortdate(text):
""":shortdate: Date. Returns a date like "2006-09-18"."""
return util.shortdate(text)
@@ -305,7 +279,7 @@ def stringify(thing):
""":stringify: Any type. Turns the value into text by converting values into
text and concatenating them.
"""
- if util.safehasattr(thing, '__iter__') and not isinstance(thing, str):
+ if hasattr(thing, '__iter__') and not isinstance(thing, str):
return "".join([stringify(t) for t in thing if t is not None])
return str(thing)
@@ -336,14 +310,9 @@ def urlescape(text):
return urllib.quote(text)
def userfilter(text):
- """:user: Any text. Returns a short representation of a user name or email
- address."""
+ """:user: Any text. Returns the user portion of an email address."""
return util.shortuser(text)
-def emailuser(text):
- """:emailuser: Any text. Returns the user portion of an email address."""
- return util.emailuser(text)
-
def xmlescape(text):
text = (text
.replace('&', '&amp;')
@@ -378,7 +347,6 @@ filters = {
"rfc3339date": rfc3339date,
"rfc822date": rfc822date,
"short": short,
- "shortbisect": shortbisect,
"shortdate": shortdate,
"stringescape": stringescape,
"stringify": stringify,
@@ -387,7 +355,6 @@ filters = {
"tabindent": tabindent,
"urlescape": urlescape,
"user": userfilter,
- "emailuser": emailuser,
"xmlescape": xmlescape,
}
diff --git a/mercurial/templatekw.py b/mercurial/templatekw.py
index 489dda6..b88bafa 100644
--- a/mercurial/templatekw.py
+++ b/mercurial/templatekw.py
@@ -7,7 +7,6 @@
from node import hex
import patch, util, error
-import hbisect
def showlist(name, values, plural=None, **args):
'''expand set of values.
@@ -146,10 +145,6 @@ def showauthor(repo, ctx, templ, **args):
""":author: String. The unmodified author of the changeset."""
return ctx.user()
-def showbisect(repo, ctx, templ, **args):
- """:bisect: String. The changeset bisection status."""
- return hbisect.label(repo, ctx.node())
-
def showbranch(**args):
""":branch: String. The name of the branch on which the changeset was
committed.
@@ -275,14 +270,6 @@ def shownode(repo, ctx, templ, **args):
"""
return ctx.hex()
-def showphase(repo, ctx, templ, **args):
- """:phase: String. The changeset phase name."""
- return ctx.phasestr()
-
-def showphaseidx(repo, ctx, templ, **args):
- """:phaseidx: Integer. The changeset phase index."""
- return ctx.phase()
-
def showrev(repo, ctx, templ, **args):
""":rev: Integer. The repository-local changeset revision number."""
return ctx.rev()
@@ -301,7 +288,6 @@ def showtags(**args):
# revcache - a cache dictionary for the current revision
keywords = {
'author': showauthor,
- 'bisect': showbisect,
'branch': showbranch,
'branches': showbranches,
'bookmarks': showbookmarks,
@@ -320,22 +306,9 @@ keywords = {
'latesttagdistance': showlatesttagdistance,
'manifest': showmanifest,
'node': shownode,
- 'phase': showphase,
- 'phaseidx': showphaseidx,
'rev': showrev,
'tags': showtags,
}
-def _showparents(**args):
- """:parents: List of strings. The parents of the changeset in "rev:node"
- format. If the changeset has only one "natural" parent (the predecessor
- revision) nothing is shown."""
- pass
-
-dockeywords = {
- 'parents': _showparents,
-}
-dockeywords.update(keywords)
-
# tell hggettext to extract docstrings from these functions:
-i18nfunctions = dockeywords.values()
+i18nfunctions = keywords.values()
diff --git a/mercurial/templater.py b/mercurial/templater.py
index 16558da..2d8dbdd 100644
--- a/mercurial/templater.py
+++ b/mercurial/templater.py
@@ -135,7 +135,7 @@ def runsymbol(context, mapping, key):
v = mapping.get(key)
if v is None:
v = context._defaults.get(key, '')
- if util.safehasattr(v, '__call__'):
+ if hasattr(v, '__call__'):
return v(**mapping)
return v
@@ -172,14 +172,14 @@ def runmap(context, mapping, data):
def buildfunc(exp, context):
n = getsymbol(exp[1])
args = [compileexp(x, context) for x in getlist(exp[2])]
- if n in funcs:
- f = funcs[n]
- return (f, args)
if n in context._filters:
if len(args) != 1:
raise error.ParseError(_("filter %s expects one argument") % n)
f = context._filters[n]
return (runfilter, (args[0][0], args[0][1], f))
+ elif n in context._funcs:
+ f = context._funcs[n]
+ return (f, args)
methods = {
"string": lambda e, c: (runstring, e[1]),
@@ -191,9 +191,6 @@ methods = {
"func": buildfunc,
}
-funcs = {
-}
-
# template engine
path = ['templates', '../templates']
@@ -203,14 +200,14 @@ def _flatten(thing):
'''yield a single stream from a possibly nested set of iterators'''
if isinstance(thing, str):
yield thing
- elif not util.safehasattr(thing, '__iter__'):
+ elif not hasattr(thing, '__iter__'):
if thing is not None:
yield str(thing)
else:
for i in thing:
if isinstance(i, str):
yield i
- elif not util.safehasattr(i, '__iter__'):
+ elif not hasattr(i, '__iter__'):
if i is not None:
yield str(i)
elif i is not None:
@@ -312,7 +309,7 @@ class templater(object):
def load(self, t):
'''Get the template for the given template name. Use a local cache.'''
- if t not in self.cache:
+ if not t in self.cache:
try:
self.cache[t] = util.readfile(self.map[t][1])
except KeyError, inst:
@@ -341,7 +338,7 @@ def templatepath(name=None):
normpaths = []
# executable version (py2exe) doesn't support __file__
- if util.mainfrozen():
+ if hasattr(sys, 'frozen'):
module = sys.executable
else:
module = __file__
diff --git a/mercurial/templates/coal/map b/mercurial/templates/coal/map
index 320996f..50f4937 100644
--- a/mercurial/templates/coal/map
+++ b/mercurial/templates/coal/map
@@ -63,7 +63,6 @@ fileentry = '
filerevision = ../paper/filerevision.tmpl
fileannotate = ../paper/fileannotate.tmpl
filediff = ../paper/filediff.tmpl
-filecomparison = ../paper/filecomparison.tmpl
filelog = ../paper/filelog.tmpl
fileline = '
<div class="parity{parity} source"><a href="#{lineid}" id="{lineid}">{linenumber}</a> {line|escape}</div>'
@@ -84,16 +83,6 @@ difflineminus = '<a href="#{lineid}" id="{lineid}">{linenumber}</a> <span class=
difflineat = '<a href="#{lineid}" id="{lineid}">{linenumber}</a> <span class="atline">{line|escape}</span>'
diffline = '<a href="#{lineid}" id="{lineid}">{linenumber}</a> {line|escape}'
-comparisonblock ='
- <tbody class="block">
- {lines}
- </tbody>'
-comparisonline = '
- <tr>
- <td class="source {type}"><a href="#{lineid}" id="{lineid}">{leftlinenumber}</a> {leftline|escape}</td>
- <td class="source {type}"><a href="#{lineid}" id="{lineid}">{rightlinenumber}</a> {rightline|escape}</td>
- </tr>'
-
changelogparent = '
<tr>
<th class="parent">parent {rev}:</th>
@@ -210,7 +199,7 @@ indexentry = '
<td><a href="{url}{sessionvars%urlparameter}">{name|escape}</a></td>
<td>{description}</td>
<td>{contact|obfuscate}</td>
- <td class="age">{lastchange|rfc822date}</td>
+ <td class="age">{lastchange|date}</td>
<td class="indexlinks">{archives%indexarchiveentry}</td>
</tr>\n'
indexarchiveentry = '<a href="{url}archive/{node|short}{extension|urlescape}">&nbsp;&darr;{type|escape}</a>'
diff --git a/mercurial/templates/gitweb/changelogentry.tmpl b/mercurial/templates/gitweb/changelogentry.tmpl
index 3935de6..ffe97ce 100644
--- a/mercurial/templates/gitweb/changelogentry.tmpl
+++ b/mercurial/templates/gitweb/changelogentry.tmpl
@@ -1,5 +1,5 @@
<div>
-<a class="title" href="{url}rev/{node|short}{sessionvars%urlparameter}"><span class="age">{date|rfc822date}</span>{desc|strip|firstline|escape|nonempty}<span class="logtags"> {inbranch%inbranchtag}{branches%branchtag}{tags%tagtag}{bookmarks%bookmarktag}</span></a>
+<a class="title" href="{url}rev/{node|short}{sessionvars%urlparameter}"><span class="age">{date|date}</span>{desc|strip|firstline|escape|nonempty}<span class="logtags"> {inbranch%inbranchtag}{branches%branchtag}{tags%tagtag}{bookmarks%bookmarktag}</span></a>
</div>
<div class="title_text">
<div class="log_link">
diff --git a/mercurial/templates/gitweb/changeset.tmpl b/mercurial/templates/gitweb/changeset.tmpl
index 7c8f731..31399e3 100644
--- a/mercurial/templates/gitweb/changeset.tmpl
+++ b/mercurial/templates/gitweb/changeset.tmpl
@@ -32,7 +32,7 @@ changeset |
<div class="title_text">
<table cellspacing="0">
<tr><td>author</td><td>{author|obfuscate}</td></tr>
-<tr><td></td><td class="date age">{date|rfc822date}</td></tr>
+<tr><td></td><td class="date age">{date|date}</td></tr>
{branch%changesetbranch}
<tr><td>changeset {rev}</td><td style="font-family:monospace">{node|short}</td></tr>
{parent%changesetparent}
diff --git a/mercurial/templates/gitweb/fileannotate.tmpl b/mercurial/templates/gitweb/fileannotate.tmpl
index 66b4aac..6dbe69a 100644
--- a/mercurial/templates/gitweb/fileannotate.tmpl
+++ b/mercurial/templates/gitweb/fileannotate.tmpl
@@ -26,7 +26,6 @@
<a href="{url}log/{node|short}/{file|urlescape}{sessionvars%urlparameter}">revisions</a> |
annotate |
<a href="{url}diff/{node|short}/{file|urlescape}{sessionvars%urlparameter}">diff</a> |
-<a href="{url}comparison/{node|short}/{file|urlescape}{sessionvars%urlparameter}">comparison</a> |
<a href="{url}raw-annotate/{node|short}/{file|urlescape}">raw</a> |
<a href="{url}help{sessionvars%urlparameter}">help</a>
<br/>
@@ -41,7 +40,7 @@ annotate |
<td>{author|obfuscate}</td></tr>
<tr>
<td></td>
- <td class="date age">{date|rfc822date}</td></tr>
+ <td class="date age">{date|date}</td></tr>
{branch%filerevbranch}
<tr>
<td>changeset {rev}</td>
diff --git a/mercurial/templates/gitweb/filecomparison.tmpl b/mercurial/templates/gitweb/filecomparison.tmpl
deleted file mode 100644
index e062486..0000000
--- a/mercurial/templates/gitweb/filecomparison.tmpl
+++ /dev/null
@@ -1,71 +0,0 @@
-{header}
-<title>{repo|escape}: comparison {file|escape}</title>
-<link rel="alternate" type="application/atom+xml"
- href="{url}atom-log" title="Atom feed for {repo|escape}"/>
-<link rel="alternate" type="application/rss+xml"
- href="{url}rss-log" title="RSS feed for {repo|escape}"/>
-</head>
-<body>
-
-<div class="page_header">
-<a href="{logourl}" title="Mercurial" style="float: right;">Mercurial</a><a href="{url}summary{sessionvars%urlparameter}">{repo|escape}</a> / comparison
-</div>
-
-<div class="page_nav">
-<a href="{url}summary{sessionvars%urlparameter}">summary</a> |
-<a href="{url}shortlog{sessionvars%urlparameter}">shortlog</a> |
-<a href="{url}log{sessionvars%urlparameter}">changelog</a> |
-<a href="{url}graph{sessionvars%urlparameter}">graph</a> |
-<a href="{url}tags{sessionvars%urlparameter}">tags</a> |
-<a href="{url}bookmarks{sessionvars%urlparameter}">bookmarks</a> |
-<a href="{url}branches{sessionvars%urlparameter}">branches</a> |
-<a href="{url}file/{node|short}{path|urlescape}{sessionvars%urlparameter}">files</a> |
-<a href="{url}rev/{node|short}{sessionvars%urlparameter}">changeset</a> |
-<a href="{url}file/{node|short}/{file|urlescape}{sessionvars%urlparameter}">file</a> |
-<a href="{url}file/tip/{file|urlescape}{sessionvars%urlparameter}">latest</a> |
-<a href="{url}log/{node|short}/{file|urlescape}{sessionvars%urlparameter}">revisions</a> |
-<a href="{url}annotate/{node|short}/{file|urlescape}{sessionvars%urlparameter}">annotate</a> |
-<a href="{url}diff/{node|short}/{file|urlescape}{sessionvars%urlparameter}">diff</a> |
-comparison |
-<a href="{url}raw-diff/{node|short}/{file|urlescape}">raw</a> |
-<a href="{url}help{sessionvars%urlparameter}">help</a>
-<br/>
-</div>
-
-<div class="title">{file|escape}</div>
-
-<table>
-{branch%filerevbranch}
-<tr>
- <td>changeset {rev}</td>
- <td style="font-family:monospace"><a class="list" href="{url}rev/{node|short}{sessionvars%urlparameter}">{node|short}</a></td></tr>
-{parent%filecompparent}
-{child%filecompchild}
-</table>
-
-<div class="list_head"></div>
-
-<div class="page_body">
-
-<div class="legend">
- <span class="legendinfo equal">equal</span>
- <span class="legendinfo delete">deleted</span>
- <span class="legendinfo insert">inserted</span>
- <span class="legendinfo replace">replaced</span>
-</div>
-
-<div class="comparison">
- <table style="border-collapse:collapse;">
- <thead class="header">
- <tr>
- <th>{leftrev}:{leftnode|short}</th>
- <th>{rightrev}:{rightnode|short}</th>
- </tr>
- </thead>
- {comparison}
- </table>
-</div>
-
-</div>
-
-{footer}
diff --git a/mercurial/templates/gitweb/filediff.tmpl b/mercurial/templates/gitweb/filediff.tmpl
index 42d0602..5450adb 100644
--- a/mercurial/templates/gitweb/filediff.tmpl
+++ b/mercurial/templates/gitweb/filediff.tmpl
@@ -26,10 +26,8 @@
<a href="{url}log/{node|short}/{file|urlescape}{sessionvars%urlparameter}">revisions</a> |
<a href="{url}annotate/{node|short}/{file|urlescape}{sessionvars%urlparameter}">annotate</a> |
diff |
-<a href="{url}comparison/{node|short}/{file|urlescape}{sessionvars%urlparameter}">comparison</a> |
-<a href="{url}raw-diff/{node|short}/{file|urlescape}">raw</a> |
+<a href="{url}raw-diff/{node|short}/{file|urlescape}">raw</a><br/> |
<a href="{url}help{sessionvars%urlparameter}">help</a>
-<br/>
</div>
<div class="title">{file|escape}</div>
diff --git a/mercurial/templates/gitweb/filelog.tmpl b/mercurial/templates/gitweb/filelog.tmpl
index 0591726..52c042b 100644
--- a/mercurial/templates/gitweb/filelog.tmpl
+++ b/mercurial/templates/gitweb/filelog.tmpl
@@ -23,7 +23,6 @@
revisions |
<a href="{url}annotate/{node|short}/{file|urlescape}{sessionvars%urlparameter}">annotate</a> |
<a href="{url}diff/{node|short}/{file|urlescape}{sessionvars%urlparameter}">diff</a> |
-<a href="{url}comparison/{node|short}/{file|urlescape}{sessionvars%urlparameter}">comparison</a> |
<a href="{url}rss-log/tip/{file|urlescape}">rss</a> |
<a href="{url}help{sessionvars%urlparameter}">help</a>
<br/>
diff --git a/mercurial/templates/gitweb/filerevision.tmpl b/mercurial/templates/gitweb/filerevision.tmpl
index f47320d..9338c67 100644
--- a/mercurial/templates/gitweb/filerevision.tmpl
+++ b/mercurial/templates/gitweb/filerevision.tmpl
@@ -26,7 +26,6 @@ file |
<a href="{url}log/{node|short}/{file|urlescape}{sessionvars%urlparameter}">revisions</a> |
<a href="{url}annotate/{node|short}/{file|urlescape}{sessionvars%urlparameter}">annotate</a> |
<a href="{url}diff/{node|short}/{file|urlescape}{sessionvars%urlparameter}">diff</a> |
-<a href="{url}comparison/{node|short}/{file|urlescape}{sessionvars%urlparameter}">comparison</a> |
<a href="{url}raw-file/{node|short}/{file|urlescape}">raw</a> |
<a href="{url}help{sessionvars%urlparameter}">help</a>
<br/>
@@ -41,7 +40,7 @@ file |
<td>{author|obfuscate}</td></tr>
<tr>
<td></td>
- <td class="date age">{date|rfc822date}</td></tr>
+ <td class="date age">{date|date}</td></tr>
{branch%filerevbranch}
<tr>
<td>changeset {rev}</td>
diff --git a/mercurial/templates/gitweb/graph.tmpl b/mercurial/templates/gitweb/graph.tmpl
index 039ef8b..0ddcf18 100644
--- a/mercurial/templates/gitweb/graph.tmpl
+++ b/mercurial/templates/gitweb/graph.tmpl
@@ -51,6 +51,16 @@ var data = {jsdata|json};
var graph = new Graph();
graph.scale({bg_height});
+graph.edge = function(x0, y0, x1, y1, color) \{
+
+ this.setColor(color, 0.0, 0.65);
+ this.ctx.beginPath();
+ this.ctx.moveTo(x0, y0);
+ this.ctx.lineTo(x1, y1);
+ this.ctx.stroke();
+
+}
+
var revlink = '<li style="_STYLE"><span class="desc">';
revlink += '<a class="list" href="{url}rev/_NODEID{sessionvars%urlparameter}" title="_NODEID"><b>_DESC</b></a>';
revlink += '</span> _TAGS';
diff --git a/mercurial/templates/gitweb/map b/mercurial/templates/gitweb/map
index a1e93f5..3ab9be4 100644
--- a/mercurial/templates/gitweb/map
+++ b/mercurial/templates/gitweb/map
@@ -26,7 +26,6 @@ filenodelink = '
<a href="{url}file/{node|short}/{file|urlescape}{sessionvars%urlparameter}">file</a> |
<a href="{url}annotate/{node|short}/{file|urlescape}{sessionvars%urlparameter}">annotate</a> |
<a href="{url}diff/{node|short}/{file|urlescape}{sessionvars%urlparameter}">diff</a> |
- <a href="{url}comparison/{node|short}/{file|urlescape}{sessionvars%urlparameter}">comparison</a> |
<a href="{url}log/{node|short}/{file|urlescape}{sessionvars%urlparameter}">revisions</a>
</td>
</tr>'
@@ -38,7 +37,6 @@ filenolink = '
file |
annotate |
<a href="{url}diff/{node|short}/{file|urlescape}{sessionvars%urlparameter}">diff</a> |
- <a href="{url}comparison/{node|short}/{file|urlescape}{sessionvars%urlparameter}">comparison</a> |
<a href="{url}log/{node|short}/{file|urlescape}{sessionvars%urlparameter}">revisions</a>
</td>
</tr>'
@@ -83,7 +81,6 @@ fileentry = '
filerevision = filerevision.tmpl
fileannotate = fileannotate.tmpl
filediff = filediff.tmpl
-filecomparison = filecomparison.tmpl
filelog = filelog.tmpl
fileline = '
<div style="font-family:monospace" class="parity{parity}">
@@ -102,17 +99,6 @@ difflineplus = '<span style="color:#008800;"><a class="linenr" href="#{lineid}"
difflineminus = '<span style="color:#cc0000;"><a class="linenr" href="#{lineid}" id="{lineid}">{linenumber}</a> {line|escape}</span>'
difflineat = '<span style="color:#990099;"><a class="linenr" href="#{lineid}" id="{lineid}">{linenumber}</a> {line|escape}</span>'
diffline = '<span><a class="linenr" href="#{lineid}" id="{lineid}">{linenumber}</a> {line|escape}</span>'
-
-comparisonblock ='
- <tbody class="block">
- {lines}
- </tbody>'
-comparisonline = '
- <tr style="font-family:monospace">
- <td class="{type}"><pre><a class="linenr" href="#{lineid}" id="{lineid}">{leftlinenumber}</a> {leftline|escape}</pre></td>
- <td class="{type}"><pre><a class="linenr" href="#{lineid}" id="{lineid}">{rightlinenumber}</a> {rightline|escape}</pre></td>
- </tr>'
-
changelogparent = '
<tr>
<th class="parent">parent {rev}:</th>
@@ -176,7 +162,7 @@ fileannotatechild = '
tags = tags.tmpl
tagentry = '
<tr class="parity{parity}">
- <td class="age"><i class="age">{date|rfc822date}</i></td>
+ <td class="age"><i class="age">{date|date}</i></td>
<td><a class="list" href="{url}rev/{node|short}{sessionvars%urlparameter}"><b>{tag|escape}</b></a></td>
<td class="link">
<a href="{url}rev/{node|short}{sessionvars%urlparameter}">changeset</a> |
@@ -187,7 +173,7 @@ tagentry = '
bookmarks = bookmarks.tmpl
bookmarkentry = '
<tr class="parity{parity}">
- <td class="age"><i class="age">{date|rfc822date}</i></td>
+ <td class="age"><i class="age">{date|date}</i></td>
<td><a class="list" href="{url}rev/{node|short}{sessionvars%urlparameter}"><b>{bookmark|escape}</b></a></td>
<td class="link">
<a href="{url}rev/{node|short}{sessionvars%urlparameter}">changeset</a> |
@@ -198,7 +184,7 @@ bookmarkentry = '
branches = branches.tmpl
branchentry = '
<tr class="parity{parity}">
- <td class="age"><i class="age">{date|rfc822date}</i></td>
+ <td class="age"><i class="age">{date|date}</i></td>
<td><a class="list" href="{url}shortlog/{node|short}{sessionvars%urlparameter}"><b>{node|short}</b></a></td>
<td class="{status}">{branch|escape}</td>
<td class="link">
@@ -217,15 +203,6 @@ filediffparent = '
</a>
</td>
</tr>'
-filecompparent = '
- <tr>
- <td>parent {rev}</td>
- <td style="font-family:monospace">
- <a class="list" href="{url}comparison/{node|short}/{file|urlescape}{sessionvars%urlparameter}">
- {node|short}
- </a>
- </td>
- </tr>'
filelogparent = '
<tr>
<td align="right">parent {rev}:&nbsp;</td>
@@ -238,13 +215,6 @@ filediffchild = '
<a class="list" href="{url}diff/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{node|short}</a>
</td>
</tr>'
-filecompchild = '
- <tr>
- <td>child {rev}</td>
- <td style="font-family:monospace">
- <a class="list" href="{url}comparison/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{node|short}</a>
- </td>
- </tr>'
filelogchild = '
<tr>
<td align="right">child {rev}:&nbsp;</td>
@@ -258,7 +228,7 @@ inbranchtag = '<span class="inbranchtag" title="{name}">{name}</span> '
bookmarktag = '<span class="bookmarktag" title="{name}">{name}</span> '
shortlogentry = '
<tr class="parity{parity}">
- <td class="age"><i class="age">{date|rfc822date}</i></td>
+ <td class="age"><i class="age">{date|date}</i></td>
<td><i>{author|person}</i></td>
<td>
<a class="list" href="{url}rev/{node|short}{sessionvars%urlparameter}">
@@ -273,7 +243,7 @@ shortlogentry = '
</tr>'
filelogentry = '
<tr class="parity{parity}">
- <td class="age"><i class="age">{date|rfc822date}</i></td>
+ <td class="age"><i class="age">{date|date}</i></td>
<td>
<a class="list" href="{url}rev/{node|short}{sessionvars%urlparameter}">
<b>{desc|strip|firstline|escape|nonempty}</b>
@@ -292,7 +262,7 @@ indexentry = '
</td>
<td>{description}</td>
<td>{contact|obfuscate}</td>
- <td class="age">{lastchange|rfc822date}</td>
+ <td class="age">{lastchange|date}</td>
<td class="indexlinks">{archives%indexarchiveentry}</td>
<td><div class="rss_logo"><a href="{url}rss-log">RSS</a> <a href="{url}atom-log">Atom</a></div></td>
</tr>\n'
diff --git a/mercurial/templates/gitweb/summary.tmpl b/mercurial/templates/gitweb/summary.tmpl
index 2d1aca2..79f42a1 100644
--- a/mercurial/templates/gitweb/summary.tmpl
+++ b/mercurial/templates/gitweb/summary.tmpl
@@ -56,11 +56,11 @@ summary |
<tr class="light"><td colspan="3"><a class="list" href="{url}bookmarks{sessionvars%urlparameter}">...</a></td></tr>
</table>
-<div><a class="title" href="{url}branches{sessionvars%urlparameter}">branches</a></div>
+<div><a class="title" href="#">branches</a></div>
<table cellspacing="0">
{branches%branchentry}
<tr class="light">
- <td colspan="4"><a class="list" href="{url}branches{sessionvars%urlparameter}">...</a></td>
+ <td colspan="4"><a class="list" href="#">...</a></td>
</tr>
</table>
{footer}
diff --git a/mercurial/templates/map-cmdline.bisect b/mercurial/templates/map-cmdline.bisect
deleted file mode 100644
index 37c5ffb..0000000
--- a/mercurial/templates/map-cmdline.bisect
+++ /dev/null
@@ -1,25 +0,0 @@
-changeset = 'changeset: {rev}:{node|short}\nbisect: {bisect}\n{branches}{bookmarks}{tags}{parents}user: {author}\ndate: {date|date}\nsummary: {desc|firstline}\n\n'
-changeset_quiet = '{bisect|shortbisect} {rev}:{node|short}\n'
-changeset_verbose = 'changeset: {rev}:{node|short}\nbisect: {bisect}\n{branches}{bookmarks}{tags}{parents}user: {author}\ndate: {date|date}\n{files}{file_copies_switch}description:\n{desc|strip}\n\n\n'
-changeset_debug = 'changeset: {rev}:{node}\nbisect: {bisect}\n{branches}{bookmarks}{tags}{parents}{manifest}user: {author}\ndate: {date|date}\n{file_mods}{file_adds}{file_dels}{file_copies_switch}{extras}description:\n{desc|strip}\n\n\n'
-start_files = 'files: '
-file = ' {file}'
-end_files = '\n'
-start_file_mods = 'files: '
-file_mod = ' {file_mod}'
-end_file_mods = '\n'
-start_file_adds = 'files+: '
-file_add = ' {file_add}'
-end_file_adds = '\n'
-start_file_dels = 'files-: '
-file_del = ' {file_del}'
-end_file_dels = '\n'
-start_file_copies = 'copies: '
-file_copy = ' {name} ({source})'
-end_file_copies = '\n'
-parent = 'parent: {rev}:{node|formatnode}\n'
-manifest = 'manifest: {rev}:{node}\n'
-branch = 'branch: {branch}\n'
-tag = 'tag: {tag}\n'
-bookmark = 'bookmark: {bookmark}\n'
-extra = 'extra: {key}={value|stringescape}\n'
diff --git a/mercurial/templates/map-cmdline.default b/mercurial/templates/map-cmdline.default
index 41131de..aeb695b 100644
--- a/mercurial/templates/map-cmdline.default
+++ b/mercurial/templates/map-cmdline.default
@@ -1,7 +1,7 @@
changeset = 'changeset: {rev}:{node|short}\n{branches}{bookmarks}{tags}{parents}user: {author}\ndate: {date|date}\nsummary: {desc|firstline}\n\n'
changeset_quiet = '{rev}:{node|short}\n'
changeset_verbose = 'changeset: {rev}:{node|short}\n{branches}{bookmarks}{tags}{parents}user: {author}\ndate: {date|date}\n{files}{file_copies_switch}description:\n{desc|strip}\n\n\n'
-changeset_debug = 'changeset: {rev}:{node}\n{branches}{bookmarks}{tags}phase: {phase}\n{parents}{manifest}user: {author}\ndate: {date|date}\n{file_mods}{file_adds}{file_dels}{file_copies_switch}{extras}description:\n{desc|strip}\n\n\n'
+changeset_debug = 'changeset: {rev}:{node}\n{branches}{bookmarks}{tags}{parents}{manifest}user: {author}\ndate: {date|date}\n{file_mods}{file_adds}{file_dels}{file_copies_switch}{extras}description:\n{desc|strip}\n\n\n'
start_files = 'files: '
file = ' {file}'
end_files = '\n'
diff --git a/mercurial/templates/monoblue/bookmarks.tmpl b/mercurial/templates/monoblue/bookmarks.tmpl
index d3870da..7fb79ee 100644
--- a/mercurial/templates/monoblue/bookmarks.tmpl
+++ b/mercurial/templates/monoblue/bookmarks.tmpl
@@ -7,7 +7,7 @@
<body>
<div id="container">
<div class="page-header">
- <h1><a href="{url}summary{sessionvars%urlparameter}">{repo|escape}</a> / bookmarks</h1>
+ <h1><a href="{url}summary{sessionvars%urlparameter}">{repo|escape}</a> / Bookmarks</h1>
<form action="{url}log">
{sessionvars%hiddenformentry}
diff --git a/mercurial/templates/monoblue/branches.tmpl b/mercurial/templates/monoblue/branches.tmpl
index 4ef9c25..969e5a1 100644
--- a/mercurial/templates/monoblue/branches.tmpl
+++ b/mercurial/templates/monoblue/branches.tmpl
@@ -7,7 +7,7 @@
<body>
<div id="container">
<div class="page-header">
- <h1><a href="{url}summary{sessionvars%urlparameter}">{repo|escape}</a> / branches</h1>
+ <h1><a href="{url}summary{sessionvars%urlparameter}">{repo|escape}</a> / Branches</h1>
<form action="{url}log">
{sessionvars%hiddenformentry}
diff --git a/mercurial/templates/monoblue/changelogentry.tmpl b/mercurial/templates/monoblue/changelogentry.tmpl
index 1c04f07..ef7be4e 100644
--- a/mercurial/templates/monoblue/changelogentry.tmpl
+++ b/mercurial/templates/monoblue/changelogentry.tmpl
@@ -1,6 +1,6 @@
<h3 class="changelog"><a class="title" href="{url}rev/{node|short}{sessionvars%urlparameter}">{desc|strip|firstline|escape|nonempty}<span class="logtags"> {inbranch%inbranchtag}{branches%branchtag}{tags%tagtag}{bookmarks%bookmarktag}</span></a></h3>
<ul class="changelog-entry">
- <li class="age">{date|rfc822date}</li>
+ <li class="age">{date|date}</li>
<li>by <span class="name">{author|obfuscate}</span> <span class="revdate">[{date|rfc822date}] rev {rev}</span></li>
<li class="description">{desc|strip|escape|addbreaks|nonempty}</li>
</ul>
diff --git a/mercurial/templates/monoblue/changeset.tmpl b/mercurial/templates/monoblue/changeset.tmpl
index 0ecbffb..47284a9 100644
--- a/mercurial/templates/monoblue/changeset.tmpl
+++ b/mercurial/templates/monoblue/changeset.tmpl
@@ -7,7 +7,7 @@
<body>
<div id="container">
<div class="page-header">
- <h1><a href="{url}summary{sessionvars%urlparameter}">{repo|escape}</a> / changeset</h1>
+ <h1><a href="{url}summary{sessionvars%urlparameter}">{repo|escape}</a> / files</h1>
<form action="{url}log">
{sessionvars%hiddenformentry}
@@ -38,13 +38,13 @@
<h2 class="no-link no-border">changeset</h2>
<h3 class="changeset"><a href="{url}raw-rev/{node|short}">{desc|strip|escape|firstline|nonempty} <span class="logtags">{inbranch%inbranchtag}{branches%branchtag}{tags%tagtag}{bookmarks%bookmarktag}</span></a></h3>
- <p class="changeset-age age">{date|rfc822date}</p>
+ <p class="changeset-age age">{date|date}</p>
<dl class="overview">
<dt>author</dt>
<dd>{author|obfuscate}</dd>
<dt>date</dt>
- <dd>{date|rfc822date}</dd>
+ <dd>{date|date}</dd>
{branch%changesetbranch}
<dt>changeset {rev}</dt>
<dd>{node|short}</dd>
diff --git a/mercurial/templates/monoblue/error.tmpl b/mercurial/templates/monoblue/error.tmpl
index 61f3215..edbef72 100644
--- a/mercurial/templates/monoblue/error.tmpl
+++ b/mercurial/templates/monoblue/error.tmpl
@@ -7,7 +7,7 @@
<body>
<div id="container">
<div class="page-header">
- <h1><a href="{url}summary{sessionvars%urlparameter}">{repo|escape}</a> / not found: {repo|escape}</h1>
+ <h1><a href="{url}summary{sessionvars%urlparameter}">{repo|escape}</a> / Not found: {repo|escape}</h1>
<form action="{url}log">
{sessionvars%hiddenformentry}
diff --git a/mercurial/templates/monoblue/fileannotate.tmpl b/mercurial/templates/monoblue/fileannotate.tmpl
index 38563cf..2e2349d 100644
--- a/mercurial/templates/monoblue/fileannotate.tmpl
+++ b/mercurial/templates/monoblue/fileannotate.tmpl
@@ -35,19 +35,18 @@
<li><a href="{url}log/{node|short}/{file|urlescape}{sessionvars%urlparameter}">revisions</a></li>
<li class="current">annotate</li>
<li><a href="{url}diff/{node|short}/{file|urlescape}{sessionvars%urlparameter}">diff</a></li>
- <li><a href="{url}comparison/{node|short}/{file|urlescape}{sessionvars%urlparameter}">comparison</a></li>
<li><a href="{url}raw-annotate/{node|short}/{file|urlescape}">raw</a></li>
</ul>
<h2 class="no-link no-border">{file|escape}@{node|short} (annotated)</h2>
<h3 class="changeset">{file|escape}</h3>
- <p class="changeset-age age">{date|rfc822date}</p>
+ <p class="changeset-age age">{date|date}</p>
<dl class="overview">
<dt>author</dt>
<dd>{author|obfuscate}</dd>
<dt>date</dt>
- <dd>{date|rfc822date}</dd>
+ <dd>{date|date}</dd>
{branch%filerevbranch}
<dt>changeset {rev}</dt>
<dd><a href="{url}rev/{node|short}{sessionvars%urlparameter}">{node|short}</a></dd>
diff --git a/mercurial/templates/monoblue/filecomparison.tmpl b/mercurial/templates/monoblue/filecomparison.tmpl
deleted file mode 100644
index 99204a6..0000000
--- a/mercurial/templates/monoblue/filecomparison.tmpl
+++ /dev/null
@@ -1,72 +0,0 @@
-{header}
-<title>{repo|escape}: comparison {file|escape}</title>
- <link rel="alternate" type="application/atom+xml" href="{url}atom-log" title="Atom feed for {repo|escape}"/>
- <link rel="alternate" type="application/rss+xml" href="{url}rss-log" title="RSS feed for {repo|escape}"/>
-</head>
-
-<body>
-<div id="container">
- <div class="page-header">
- <h1><a href="{url}summary{sessionvars%urlparameter}">{repo|escape}</a> / file comparison</h1>
-
- <form action="{url}log">
- {sessionvars%hiddenformentry}
- <dl class="search">
- <dt><label>Search: </label></dt>
- <dd><input type="text" name="rev" /></dd>
- </dl>
- </form>
-
- <ul class="page-nav">
- <li><a href="{url}summary{sessionvars%urlparameter}">summary</a></li>
- <li><a href="{url}shortlog{sessionvars%urlparameter}">shortlog</a></li>
- <li><a href="{url}log{sessionvars%urlparameter}">changelog</a></li>
- <li><a href="{url}graph/{node|short}{sessionvars%urlparameter}">graph</a></li>
- <li><a href="{url}tags{sessionvars%urlparameter}">tags</a></li>
- <li><a href="{url}bookmarks{sessionvars%urlparameter}">bookmarks</a></li>
- <li><a href="{url}branches{sessionvars%urlparameter}">branches</a></li>
- <li><a href="{url}file/{node|short}{path|urlescape}{sessionvars%urlparameter}">files</a></li>
- <li><a href="{url}help{sessionvars%urlparameter}">help</a></li>
- </ul>
- </div>
-
- <ul class="submenu">
- <li><a href="{url}file/{node|short}/{file|urlescape}{sessionvars%urlparameter}">file</a></li>
- <li><a href="{url}log/{node|short}/{file|urlescape}{sessionvars%urlparameter}">revisions</a></li>
- <li><a href="{url}annotate/{node|short}/{file|urlescape}{sessionvars%urlparameter}">annotate</a></li>
- <li><a href="{url}diff/{node|short}/{file|urlescape}{sessionvars%urlparameter}">diff</a></li>
- <li class="current">comparison</li>
- <li><a href="{url}raw-diff/{node|short}/{file|urlescape}">raw</a></li>
- </ul>
-
- <h2 class="no-link no-border">comparison: {file|escape}</h2>
- <h3 class="changeset">{file|escape}</h3>
-
- <dl class="overview">
- {branch%filerevbranch}
- <dt>changeset {rev}</dt>
- <dd><a href="{url}rev/{node|short}{sessionvars%urlparameter}">{node|short}</a></dd>
- {parent%filecompparent}
- {child%filecompchild}
- </dl>
-
- <div class="legend">
- <span class="legendinfo equal">equal</span>
- <span class="legendinfo delete">deleted</span>
- <span class="legendinfo insert">inserted</span>
- <span class="legendinfo replace">replaced</span>
- </div>
-
- <div class="comparison">
- <table class="bigtable">
- <thead class="header">
- <tr>
- <th>{leftrev}:{leftnode|short}</th>
- <th>{rightrev}:{rightnode|short}</th>
- </tr>
- </thead>
- {comparison}
- </table>
- </div>
-
-{footer}
diff --git a/mercurial/templates/monoblue/filediff.tmpl b/mercurial/templates/monoblue/filediff.tmpl
index 2c2fa70..e6298f9 100644
--- a/mercurial/templates/monoblue/filediff.tmpl
+++ b/mercurial/templates/monoblue/filediff.tmpl
@@ -35,7 +35,6 @@
<li><a href="{url}log/{node|short}/{file|urlescape}{sessionvars%urlparameter}">revisions</a></li>
<li><a href="{url}annotate/{node|short}/{file|urlescape}{sessionvars%urlparameter}">annotate</a></li>
<li class="current">diff</li>
- <li><a href="{url}comparison/{node|short}/{file|urlescape}{sessionvars%urlparameter}">comparison</a></li>
<li><a href="{url}raw-diff/{node|short}/{file|urlescape}">raw</a></li>
</ul>
diff --git a/mercurial/templates/monoblue/filelog.tmpl b/mercurial/templates/monoblue/filelog.tmpl
index d737cd4..0e77f9f 100644
--- a/mercurial/templates/monoblue/filelog.tmpl
+++ b/mercurial/templates/monoblue/filelog.tmpl
@@ -35,7 +35,6 @@
<li class="current">revisions</li>
<li><a href="{url}annotate/{node|short}/{file|urlescape}{sessionvars%urlparameter}">annotate</a></li>
<li><a href="{url}diff/{node|short}/{file|urlescape}{sessionvars%urlparameter}">diff</a></li>
- <li><a href="{url}comparison/{node|short}/{file|urlescape}{sessionvars%urlparameter}">comparison</a></li>
<li><a href="{url}rss-log/tip/{file|urlescape}">rss</a></li>
</ul>
diff --git a/mercurial/templates/monoblue/filerevision.tmpl b/mercurial/templates/monoblue/filerevision.tmpl
index 6d404d7..dd73037 100644
--- a/mercurial/templates/monoblue/filerevision.tmpl
+++ b/mercurial/templates/monoblue/filerevision.tmpl
@@ -35,19 +35,18 @@
<li><a href="{url}log/{node|short}/{file|urlescape}{sessionvars%urlparameter}">revisions</a></li>
<li><a href="{url}annotate/{node|short}/{file|urlescape}{sessionvars%urlparameter}">annotate</a></li>
<li><a href="{url}diff/{node|short}/{file|urlescape}{sessionvars%urlparameter}">diff</a></li>
- <li><a href="{url}comparison/{node|short}/{file|urlescape}{sessionvars%urlparameter}">comparison</a></li>
<li><a href="{url}raw-file/{node|short}/{file|urlescape}">raw</a></li>
</ul>
<h2 class="no-link no-border">{file|escape}@{node|short}</h2>
<h3 class="changeset">{file|escape}</h3>
- <p class="changeset-age age">{date|rfc822date}</p>
+ <p class="changeset-age age">{date|date}</p>
<dl class="overview">
<dt>author</dt>
<dd>{author|obfuscate}</dd>
<dt>date</dt>
- <dd>{date|rfc822date}</dd>
+ <dd>{date|date}</dd>
{branch%filerevbranch}
<dt>changeset {rev}</dt>
<dd><a class="list" href="{url}rev/{node|short}{sessionvars%urlparameter}">{node|short}</a></dd>
diff --git a/mercurial/templates/monoblue/footer.tmpl b/mercurial/templates/monoblue/footer.tmpl
index c19bdae..c7bdca5 100644
--- a/mercurial/templates/monoblue/footer.tmpl
+++ b/mercurial/templates/monoblue/footer.tmpl
@@ -9,7 +9,7 @@
</div>
<div id="powered-by">
- <p><a href="{logourl}" title="Mercurial"><img src="{staticurl}{logoimg}" width=75 height=90 border=0 alt="mercurial"></a></p>
+ <p><a href="{logourl}" title="Mercurial"><img src="{staticurl}hglogo.png" width=75 height=90 border=0 alt="mercurial"></a></p>
</div>
<div id="corner-top-left"></div>
diff --git a/mercurial/templates/monoblue/graph.tmpl b/mercurial/templates/monoblue/graph.tmpl
index fad0d82..08fd79d 100644
--- a/mercurial/templates/monoblue/graph.tmpl
+++ b/mercurial/templates/monoblue/graph.tmpl
@@ -49,6 +49,16 @@
var graph = new Graph();
graph.scale({bg_height});
+ graph.edge = function(x0, y0, x1, y1, color) \{
+
+ this.setColor(color, 0.0, 0.65);
+ this.ctx.beginPath();
+ this.ctx.moveTo(x0, y0);
+ this.ctx.lineTo(x1, y1);
+ this.ctx.stroke();
+
+ }
+
var revlink = '<li style="_STYLE"><span class="desc">';
revlink += '<a href="{url}rev/_NODEID{sessionvars%urlparameter}" title="_NODEID">_DESC</a>';
revlink += '</span>_TAGS<span class="info">_DATE, by _USER</span></li>';
diff --git a/mercurial/templates/monoblue/help.tmpl b/mercurial/templates/monoblue/help.tmpl
index 8cdb77b..33468f2 100644
--- a/mercurial/templates/monoblue/help.tmpl
+++ b/mercurial/templates/monoblue/help.tmpl
@@ -7,7 +7,7 @@
<body>
<div id="container">
<div class="page-header">
- <h1><a href="{url}summary{sessionvars%urlparameter}">{repo|escape}</a> / help</h1>
+ <h1><a href="{url}summary{sessionvars%urlparameter}">{repo|escape}</a> / Help</h1>
<form action="{url}log">
{sessionvars%hiddenformentry}
diff --git a/mercurial/templates/monoblue/helptopics.tmpl b/mercurial/templates/monoblue/helptopics.tmpl
index 364dd28..ca4ca9b 100644
--- a/mercurial/templates/monoblue/helptopics.tmpl
+++ b/mercurial/templates/monoblue/helptopics.tmpl
@@ -7,7 +7,7 @@
<body>
<div id="container">
<div class="page-header">
- <h1><a href="{url}summary{sessionvars%urlparameter}">{repo|escape}</a> / help</h1>
+ <h1><a href="{url}summary{sessionvars%urlparameter}">{repo|escape}</a> / Help</h1>
<form action="{url}log">
{sessionvars%hiddenformentry}
diff --git a/mercurial/templates/monoblue/index.tmpl b/mercurial/templates/monoblue/index.tmpl
index e5086f7..25fe333 100644
--- a/mercurial/templates/monoblue/index.tmpl
+++ b/mercurial/templates/monoblue/index.tmpl
@@ -26,7 +26,7 @@
</div>
<div id="powered-by">
- <p><a href="{logourl}" title="Mercurial"><img src="{staticurl}{logoimg}" width=75 height=90 border=0 alt="mercurial"></a></p>
+ <p><a href="{logourl}" title="Mercurial"><img src="{staticurl}hglogo.png" width=75 height=90 border=0 alt="mercurial"></a></p>
</div>
<div id="corner-top-left"></div>
diff --git a/mercurial/templates/monoblue/map b/mercurial/templates/monoblue/map
index 209254d..b1fd123 100644
--- a/mercurial/templates/monoblue/map
+++ b/mercurial/templates/monoblue/map
@@ -26,7 +26,6 @@ filenodelink = '
<a href="{url}file/{node|short}/{file|urlescape}{sessionvars%urlparameter}">file</a> |
<a href="{url}annotate/{node|short}/{file|urlescape}{sessionvars%urlparameter}">annotate</a> |
<a href="{url}diff/{node|short}/{file|urlescape}{sessionvars%urlparameter}">diff</a> |
- <a href="{url}comparison/{node|short}/{file|urlescape}{sessionvars%urlparameter}">comparison</a> |
<a href="{url}log/{node|short}/{file|urlescape}{sessionvars%urlparameter}">revisions</a>
</td>
</tr>'
@@ -38,7 +37,6 @@ filenolink = '
file |
annotate |
<a href="{url}diff/{node|short}/{file|urlescape}{sessionvars%urlparameter}">diff</a> |
- <a href="{url}comparison/{node|short}/{file|urlescape}{sessionvars%urlparameter}">comparison</a> |
<a href="{url}log/{node|short}/{file|urlescape}{sessionvars%urlparameter}">revisions</a>
</td>
</tr>'
@@ -76,7 +74,6 @@ fileentry = '
filerevision = filerevision.tmpl
fileannotate = fileannotate.tmpl
filediff = filediff.tmpl
-filecomparison = filecomparison.tmpl
filelog = filelog.tmpl
fileline = '
<div style="font-family:monospace" class="parity{parity}">
@@ -85,7 +82,7 @@ fileline = '
annotateline = '
<tr class="parity{parity}">
<td class="linenr">
- <a href="{url}annotate/{node|short}/{file|urlescape}{sessionvars%urlparameter}#l{targetline}"
+ <a href="{url}annotate/{node|short}/{file|urlescape}{sessionvars%urlparameter}#{targetline}"
title="{node|short}: {desc|escape|firstline}">{author|user}@{rev}</a>
</td>
<td class="lineno">
@@ -97,17 +94,6 @@ difflineplus = '<span style="color:#008800;"><a class="linenr" href="#{lineid}"
difflineminus = '<span style="color:#cc0000;"><a class="linenr" href="#{lineid}" id="{lineid}">{linenumber}</a> {line|escape}</span>'
difflineat = '<span style="color:#990099;"><a class="linenr" href="#{lineid}" id="{lineid}">{linenumber}</a> {line|escape}</span>'
diffline = '<span><a class="linenr" href="#{lineid}" id="{lineid}">{linenumber}</a> {line|escape}</span>'
-
-comparisonblock ='
- <tbody class="block">
- {lines}
- </tbody>'
-comparisonline = '
- <tr>
- <td class="source {type}"><a class="linenr" href="#{lineid}" id="{lineid}">{leftlinenumber}</a> {leftline|escape}</td>
- <td class="source {type}"><a class="linenr" href="#{lineid}" id="{lineid}">{rightlinenumber}</a> {rightline|escape}</td>
- </tr>'
-
changelogparent = '
<tr>
<th class="parent">parent {rev}:</th>
@@ -155,7 +141,7 @@ fileannotatechild = '
tags = tags.tmpl
tagentry = '
<tr class="parity{parity}">
- <td class="nowrap age">{date|rfc822date}</td>
+ <td class="nowrap age">{date|date}</td>
<td><a href="{url}rev/{node|short}{sessionvars%urlparameter}">{tag|escape}</a></td>
<td class="nowrap">
<a href="{url}rev/{node|short}{sessionvars%urlparameter}">changeset</a> |
@@ -166,7 +152,7 @@ tagentry = '
bookmarks = bookmarks.tmpl
bookmarkentry = '
<tr class="parity{parity}">
- <td class="nowrap date">{date|rfc822date}</td>
+ <td class="nowrap date">{date|date}</td>
<td><a href="{url}rev/{node|short}{sessionvars%urlparameter}">{bookmark|escape}</a></td>
<td class="nowrap">
<a href="{url}rev/{node|short}{sessionvars%urlparameter}">changeset</a> |
@@ -177,7 +163,7 @@ bookmarkentry = '
branches = branches.tmpl
branchentry = '
<tr class="parity{parity}">
- <td class="nowrap age">{date|rfc822date}</td>
+ <td class="nowrap age">{date|date}</td>
<td><a href="{url}shortlog/{node|short}{sessionvars%urlparameter}">{node|short}</a></td>
<td class="{status}">{branch|escape}</td>
<td class="nowrap">
@@ -190,9 +176,6 @@ diffblock = '<pre>{lines}</pre>'
filediffparent = '
<dt>parent {rev}</dt>
<dd><a href="{url}diff/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{node|short}</a></dd>'
-filecompparent = '
- <dt>parent {rev}</dt>
- <dd><a href="{url}comparison/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{node|short}</a></dd>'
filelogparent = '
<tr>
<td align="right">parent {rev}:&nbsp;</td>
@@ -201,9 +184,6 @@ filelogparent = '
filediffchild = '
<dt>child {rev}</dt>
<dd><a href="{url}diff/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{node|short}</a></dd>'
-filecompchild = '
- <dt>child {rev}</dt>
- <dd><a href="{url}comparison/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{node|short}</a></dd>'
filelogchild = '
<tr>
<td align="right">child {rev}:&nbsp;</td>
@@ -216,7 +196,7 @@ inbranchtag = '<span class="inbranchtag" title="{name}">{name}</span> '
bookmarktag = '<span class="bookmarktag" title="{name}">{name}</span> '
shortlogentry = '
<tr class="parity{parity}">
- <td class="nowrap age">{date|rfc822date}</td>
+ <td class="nowrap age">{date|date}</td>
<td>{author|person}</td>
<td>
<a href="{url}rev/{node|short}{sessionvars%urlparameter}">
@@ -231,7 +211,7 @@ shortlogentry = '
</tr>'
filelogentry = '
<tr class="parity{parity}">
- <td class="nowrap age">{date|rfc822date}</td>
+ <td class="nowrap age">{date|date}</td>
<td><a href="{url}rev/{node|short}{sessionvars%urlparameter}">{desc|strip|firstline|escape|nonempty}</a></td>
<td class="nowrap">
<a href="{url}file/{node|short}/{file|urlescape}{sessionvars%urlparameter}">file</a>&nbsp;|&nbsp;<a href="{url}diff/{node|short}/{file|urlescape}{sessionvars%urlparameter}">diff</a>&nbsp;|&nbsp;<a href="{url}annotate/{node|short}/{file|urlescape}{sessionvars%urlparameter}">annotate</a>
@@ -244,7 +224,7 @@ indexentry = '
<td><a href="{url}{sessionvars%urlparameter}">{name|escape}</a></td>
<td>{description}</td>
<td>{contact|obfuscate}</td>
- <td class="age">{lastchange|rfc822date}</td>
+ <td class="age">{lastchange|date}</td>
<td class="indexlinks">{archives%indexarchiveentry}</td>
<td>
<div class="rss_logo">
diff --git a/mercurial/templates/monoblue/notfound.tmpl b/mercurial/templates/monoblue/notfound.tmpl
index 3410e6c..38df584 100644
--- a/mercurial/templates/monoblue/notfound.tmpl
+++ b/mercurial/templates/monoblue/notfound.tmpl
@@ -7,7 +7,7 @@
<body>
<div id="container">
<div class="page-header">
- <h1><a href="{url}summary{sessionvars%urlparameter}">{repo|escape}</a> / not found: {repo|escape}</h1>
+ <h1><a href="{url}summary{sessionvars%urlparameter}">{repo|escape}</a> / Not found: {repo|escape}</h1>
<form action="{url}log">
{sessionvars%hiddenformentry}
diff --git a/mercurial/templates/monoblue/tags.tmpl b/mercurial/templates/monoblue/tags.tmpl
index 3e08735..0c8f93d 100644
--- a/mercurial/templates/monoblue/tags.tmpl
+++ b/mercurial/templates/monoblue/tags.tmpl
@@ -7,7 +7,7 @@
<body>
<div id="container">
<div class="page-header">
- <h1><a href="{url}summary{sessionvars%urlparameter}">{repo|escape}</a> / tags</h1>
+ <h1><a href="{url}summary{sessionvars%urlparameter}">{repo|escape}</a> / Tags</h1>
<form action="{url}log">
{sessionvars%hiddenformentry}
diff --git a/mercurial/templates/paper/bookmarks.tmpl b/mercurial/templates/paper/bookmarks.tmpl
index 5989243..897d84b 100644
--- a/mercurial/templates/paper/bookmarks.tmpl
+++ b/mercurial/templates/paper/bookmarks.tmpl
@@ -11,7 +11,7 @@
<div class="menu">
<div class="logo">
<a href="{logourl}">
-<img src="{staticurl}{logoimg}" alt="mercurial" /></a>
+<img src="{staticurl}hglogo.png" alt="mercurial" /></a>
</div>
<ul>
<li><a href="{url}shortlog{sessionvars%urlparameter}">log</a></li>
diff --git a/mercurial/templates/paper/branches.tmpl b/mercurial/templates/paper/branches.tmpl
index b26a04d..b239291 100644
--- a/mercurial/templates/paper/branches.tmpl
+++ b/mercurial/templates/paper/branches.tmpl
@@ -11,7 +11,7 @@
<div class="menu">
<div class="logo">
<a href="{logourl}">
-<img src="{staticurl}{logoimg}" alt="mercurial" /></a>
+<img src="{staticurl}hglogo.png" alt="mercurial" /></a>
</div>
<ul>
<li><a href="{url}shortlog{sessionvars%urlparameter}">log</a></li>
diff --git a/mercurial/templates/paper/changeset.tmpl b/mercurial/templates/paper/changeset.tmpl
index b7cc62b..cca2745 100644
--- a/mercurial/templates/paper/changeset.tmpl
+++ b/mercurial/templates/paper/changeset.tmpl
@@ -6,7 +6,7 @@
<div class="menu">
<div class="logo">
<a href="{logourl}">
-<img src="{staticurl}{logoimg}" alt="mercurial" /></a>
+<img src="{staticurl}hglogo.png" alt="mercurial" /></a>
</div>
<ul>
<li><a href="{url}shortlog/{node|short}{sessionvars%urlparameter}">log</a></li>
@@ -49,7 +49,7 @@ files, or words in the commit message</div>
</tr>
<tr>
<th class="date">date</th>
- <td class="date age">{date|rfc822date}</td></tr>
+ <td class="date age">{date|date}</td></tr>
<tr>
<th class="author">parents</th>
<td class="author">{parent%changesetparent}</td>
diff --git a/mercurial/templates/paper/error.tmpl b/mercurial/templates/paper/error.tmpl
index 6d17a80..1a850cb 100644
--- a/mercurial/templates/paper/error.tmpl
+++ b/mercurial/templates/paper/error.tmpl
@@ -7,7 +7,7 @@
<div class="menu">
<div class="logo">
<a href="{logourl}">
-<img src="{staticurl}{logoimg}" width=75 height=90 border=0 alt="mercurial" /></a>
+<img src="{staticurl}hglogo.png" width=75 height=90 border=0 alt="mercurial" /></a>
</div>
<ul>
<li><a href="{url}shortlog{sessionvars%urlparameter}">log</a></li>
@@ -15,8 +15,6 @@
<li><a href="{url}tags{sessionvars%urlparameter}">tags</a></li>
<li><a href="{url}bookmarks{sessionvars%urlparameter}">bookmarks</a></li>
<li><a href="{url}branches{sessionvars%urlparameter}">branches</a></li>
-</ul>
-<ul>
<li><a href="{url}help{sessionvars%urlparameter}">help</a></li>
</ul>
</div>
diff --git a/mercurial/templates/paper/fileannotate.tmpl b/mercurial/templates/paper/fileannotate.tmpl
index 1388337..121ef5f 100644
--- a/mercurial/templates/paper/fileannotate.tmpl
+++ b/mercurial/templates/paper/fileannotate.tmpl
@@ -7,7 +7,7 @@
<div class="menu">
<div class="logo">
<a href="{logourl}">
-<img src="{staticurl}{logoimg}" alt="mercurial" /></a>
+<img src="{staticurl}hglogo.png" alt="mercurial" /></a>
</div>
<ul>
<li><a href="{url}shortlog/{node|short}{sessionvars%urlparameter}">log</a></li>
@@ -25,7 +25,6 @@
<li><a href="{url}file/{node|short}/{file|urlescape}{sessionvars%urlparameter}">file</a></li>
<li><a href="{url}file/tip/{file|urlescape}{sessionvars%urlparameter}">latest</a></li>
<li><a href="{url}diff/{node|short}/{file|urlescape}{sessionvars%urlparameter}">diff</a></li>
-<li><a href="{url}comparison/{node|short}/{file|urlescape}{sessionvars%urlparameter}">comparison</a></li>
<li class="active">annotate</li>
<li><a href="{url}log/{node|short}/{file|urlescape}{sessionvars%urlparameter}">file log</a></li>
<li><a href="{url}raw-annotate/{node|short}/{file|urlescape}">raw</a></li>
@@ -55,7 +54,7 @@ files, or words in the commit message</div>
</tr>
<tr>
<th class="date">date</th>
- <td class="date age">{date|rfc822date}</td>
+ <td class="date age">{date|date}</td>
</tr>
<tr>
<th class="author">parents</th>
diff --git a/mercurial/templates/paper/filecomparison.tmpl b/mercurial/templates/paper/filecomparison.tmpl
deleted file mode 100644
index 9d3421a..0000000
--- a/mercurial/templates/paper/filecomparison.tmpl
+++ /dev/null
@@ -1,93 +0,0 @@
-{header}
-<title>{repo|escape}: {file|escape} comparison</title>
-</head>
-<body>
-
-<div class="container">
-<div class="menu">
-<div class="logo">
-<a href="{logourl}">
-<img src="{staticurl}{logoimg}" alt="mercurial" /></a>
-</div>
-<ul>
-<li><a href="{url}shortlog/{node|short}{sessionvars%urlparameter}">log</a></li>
-<li><a href="{url}graph/{node|short}{sessionvars%urlparameter}">graph</a></li>
-<li><a href="{url}tags{sessionvars%urlparameter}">tags</a></li>
-<li><a href="{url}bookmarks{sessionvars%urlparameter}">bookmarks</a></li>
-<li><a href="{url}branches{sessionvars%urlparameter}">branches</a></li>
-</ul>
-<ul>
-<li><a href="{url}rev/{node|short}{sessionvars%urlparameter}">changeset</a></li>
-<li><a href="{url}file/{node|short}{path|urlescape}{sessionvars%urlparameter}">browse</a></li>
-</ul>
-<ul>
-<li><a href="{url}file/{node|short}/{file|urlescape}{sessionvars%urlparameter}">file</a></li>
-<li><a href="{url}file/tip/{file|urlescape}{sessionvars%urlparameter}">latest</a></li>
-<li><a href="{url}diff/{node|short}/{file|urlescape}{sessionvars%urlparameter}">diff</a></li>
-<li class="active">comparison</li>
-<li><a href="{url}annotate/{node|short}/{file|urlescape}{sessionvars%urlparameter}">annotate</a></li>
-<li><a href="{url}log/{node|short}/{file|urlescape}{sessionvars%urlparameter}">file log</a></li>
-<li><a href="{url}raw-file/{node|short}/{file|urlescape}">raw</a></li>
-</ul>
-<ul>
-<li><a href="{url}help{sessionvars%urlparameter}">help</a></li>
-</ul>
-</div>
-
-<div class="main">
-<h2><a href="{url}{sessionvars%urlparameter}">{repo|escape}</a></h2>
-<h3>comparison {file|escape} @ {rev}:{node|short}</h3>
-
-<form class="search" action="{url}log">
-<p>{sessionvars%hiddenformentry}</p>
-<p><input name="rev" id="search1" type="text" size="30" /></p>
-<div id="hint">find changesets by author, revision,
-files, or words in the commit message</div>
-</form>
-
-<div class="description">{desc|strip|escape|nonempty}</div>
-
-<table id="changesetEntry">
-<tr>
- <th>author</th>
- <td>{author|obfuscate}</td>
-</tr>
-<tr>
- <th>date</th>
- <td class="date age">{date|rfc822date}</td>
-</tr>
-<tr>
- <th>parents</th>
- <td>{parent%filerevparent}</td>
-</tr>
-<tr>
- <th>children</th>
- <td>{child%filerevchild}</td>
-</tr>
-{changesettag}
-</table>
-
-<div class="overflow">
-<div class="sourcefirst"> comparison</div>
-<div class="legend">
- <span class="legendinfo equal">equal</span>
- <span class="legendinfo delete">deleted</span>
- <span class="legendinfo insert">inserted</span>
- <span class="legendinfo replace">replaced</span>
-</div>
-
-<table class="bigtable">
- <thead class="header">
- <tr>
- <th>{leftrev}:{leftnode|short}</th>
- <th>{rightrev}:{rightnode|short}</th>
- </tr>
- </thead>
- {comparison}
-</table>
-
-</div>
-</div>
-</div>
-
-{footer}
diff --git a/mercurial/templates/paper/filediff.tmpl b/mercurial/templates/paper/filediff.tmpl
index b8d83de..3b63266 100644
--- a/mercurial/templates/paper/filediff.tmpl
+++ b/mercurial/templates/paper/filediff.tmpl
@@ -7,7 +7,7 @@
<div class="menu">
<div class="logo">
<a href="{logourl}">
-<img src="{staticurl}{logoimg}" alt="mercurial" /></a>
+<img src="{staticurl}hglogo.png" alt="mercurial" /></a>
</div>
<ul>
<li><a href="{url}shortlog/{node|short}{sessionvars%urlparameter}">log</a></li>
@@ -24,7 +24,6 @@
<li><a href="{url}file/{node|short}/{file|urlescape}{sessionvars%urlparameter}">file</a></li>
<li><a href="{url}file/tip/{file|urlescape}{sessionvars%urlparameter}">latest</a></li>
<li class="active">diff</li>
-<li><a href="{url}comparison/{node|short}/{file|urlescape}{sessionvars%urlparameter}">comparison</a></li>
<li><a href="{url}annotate/{node|short}/{file|urlescape}{sessionvars%urlparameter}">annotate</a></li>
<li><a href="{url}log/{node|short}/{file|urlescape}{sessionvars%urlparameter}">file log</a></li>
<li><a href="{url}raw-file/{node|short}/{file|urlescape}">raw</a></li>
@@ -54,7 +53,7 @@ files, or words in the commit message</div>
</tr>
<tr>
<th>date</th>
- <td class="date age">{date|rfc822date}</td>
+ <td class="date age">{date|date}</td>
</tr>
<tr>
<th>parents</th>
diff --git a/mercurial/templates/paper/filelog.tmpl b/mercurial/templates/paper/filelog.tmpl
index 8b043da..d03fcb4 100644
--- a/mercurial/templates/paper/filelog.tmpl
+++ b/mercurial/templates/paper/filelog.tmpl
@@ -11,7 +11,7 @@
<div class="menu">
<div class="logo">
<a href="{logourl}">
-<img src="{staticurl}{logoimg}" alt="mercurial" /></a>
+<img src="{staticurl}hglogo.png" alt="mercurial" /></a>
</div>
<ul>
<li><a href="{url}shortlog/{node|short}{sessionvars%urlparameter}">log</a></li>
@@ -27,7 +27,6 @@
<ul>
<li><a href="{url}file/{node|short}/{file|urlescape}{sessionvars%urlparameter}">file</a></li>
<li><a href="{url}diff/{node|short}/{file|urlescape}{sessionvars%urlparameter}">diff</a></li>
-<li><a href="{url}comparison/{node|short}/{file|urlescape}{sessionvars%urlparameter}">comparison</a></li>
<li><a href="{url}annotate/{node|short}/{file|urlescape}{sessionvars%urlparameter}">annotate</a></li>
<li class="active">file log</li>
<li><a href="{url}raw-file/{node|short}/{file|urlescape}">raw</a></li>
diff --git a/mercurial/templates/paper/filelogentry.tmpl b/mercurial/templates/paper/filelogentry.tmpl
index 8b4ae84..2829500 100644
--- a/mercurial/templates/paper/filelogentry.tmpl
+++ b/mercurial/templates/paper/filelogentry.tmpl
@@ -1,5 +1,5 @@
<tr class="parity{parity}">
- <td class="age">{date|rfc822date}</td>
+ <td class="age">{date|date}</td>
<td class="author">{author|person}</td>
<td class="description"><a href="{url}rev/{node|short}{sessionvars%urlparameter}">{desc|strip|firstline|escape|nonempty}</a>{inbranch%changelogbranchname}{branches%changelogbranchhead}{tags%changelogtag}{rename%filelogrename}</td>
</tr>
diff --git a/mercurial/templates/paper/filerevision.tmpl b/mercurial/templates/paper/filerevision.tmpl
index c2a7468..d41356a 100644
--- a/mercurial/templates/paper/filerevision.tmpl
+++ b/mercurial/templates/paper/filerevision.tmpl
@@ -7,7 +7,7 @@
<div class="menu">
<div class="logo">
<a href="{logourl}">
-<img src="{staticurl}{logoimg}" alt="mercurial" /></a>
+<img src="{staticurl}hglogo.png" alt="mercurial" /></a>
</div>
<ul>
<li><a href="{url}shortlog/{node|short}{sessionvars%urlparameter}">log</a></li>
@@ -23,7 +23,6 @@
<li class="active">file</li>
<li><a href="{url}file/tip/{file|urlescape}{sessionvars%urlparameter}">latest</a></li>
<li><a href="{url}diff/{node|short}/{file|urlescape}{sessionvars%urlparameter}">diff</a></li>
-<li><a href="{url}comparison/{node|short}/{file|urlescape}{sessionvars%urlparameter}">comparison</a></li>
<li><a href="{url}annotate/{node|short}/{file|urlescape}{sessionvars%urlparameter}">annotate</a></li>
<li><a href="{url}log/{node|short}/{file|urlescape}{sessionvars%urlparameter}">file log</a></li>
<li><a href="{url}raw-file/{node|short}/{file|urlescape}">raw</a></li>
@@ -53,7 +52,7 @@ files, or words in the commit message</div>
</tr>
<tr>
<th class="date">date</th>
- <td class="date age">{date|rfc822date}</td>
+ <td class="date age">{date|date}</td>
</tr>
<tr>
<th class="author">parents</th>
diff --git a/mercurial/templates/paper/graph.tmpl b/mercurial/templates/paper/graph.tmpl
index bee94ae..7a4f363 100644
--- a/mercurial/templates/paper/graph.tmpl
+++ b/mercurial/templates/paper/graph.tmpl
@@ -12,7 +12,7 @@
<div class="menu">
<div class="logo">
<a href="{logourl}">
-<img src="{staticurl}{logoimg}" alt="mercurial" /></a>
+<img src="{staticurl}hglogo.png" alt="mercurial" /></a>
</div>
<ul>
<li><a href="{url}shortlog/{node|short}{sessionvars%urlparameter}">log</a></li>
@@ -62,6 +62,16 @@ var data = {jsdata|json};
var graph = new Graph();
graph.scale({bg_height});
+graph.edge = function(x0, y0, x1, y1, color) \{
+
+ this.setColor(color, 0.0, 0.65);
+ this.ctx.beginPath();
+ this.ctx.moveTo(x0, y0);
+ this.ctx.lineTo(x1, y1);
+ this.ctx.stroke();
+
+}
+
var revlink = '<li style="_STYLE"><span class="desc">';
revlink += '<a href="{url}rev/_NODEID{sessionvars%urlparameter}" title="_NODEID">_DESC</a>';
revlink += '</span>_TAGS<span class="info">_DATE, by _USER</span></li>';
diff --git a/mercurial/templates/paper/help.tmpl b/mercurial/templates/paper/help.tmpl
index f490918..eab0ae5 100644
--- a/mercurial/templates/paper/help.tmpl
+++ b/mercurial/templates/paper/help.tmpl
@@ -11,7 +11,7 @@
<div class="menu">
<div class="logo">
<a href="{logourl}">
-<img src="{staticurl}{logoimg}" alt="mercurial" /></a>
+<img src="{staticurl}hglogo.png" alt="mercurial" /></a>
</div>
<ul>
<li><a href="{url}shortlog{sessionvars%urlparameter}">log</a></li>
@@ -21,7 +21,7 @@
<li><a href="{url}branches{sessionvars%urlparameter}">branches</a></li>
</ul>
<ul>
- <li class="active"><a href="{url}help{sessionvars%urlparameter}">help</a></li>
+ <li class="active">help</li>
</ul>
</div>
diff --git a/mercurial/templates/paper/helptopics.tmpl b/mercurial/templates/paper/helptopics.tmpl
index a723b53..537954f 100644
--- a/mercurial/templates/paper/helptopics.tmpl
+++ b/mercurial/templates/paper/helptopics.tmpl
@@ -11,7 +11,7 @@
<div class="menu">
<div class="logo">
<a href="{logourl}">
-<img src="{staticurl}{logoimg}" alt="mercurial" /></a>
+<img src="{staticurl}hglogo.png" alt="mercurial" /></a>
</div>
<ul>
<li><a href="{url}shortlog{sessionvars%urlparameter}">log</a></li>
diff --git a/mercurial/templates/paper/index.tmpl b/mercurial/templates/paper/index.tmpl
index 04b4ffb..57968f7 100644
--- a/mercurial/templates/paper/index.tmpl
+++ b/mercurial/templates/paper/index.tmpl
@@ -6,7 +6,7 @@
<div class="container">
<div class="menu">
<a href="{logourl}">
-<img src="{staticurl}{logoimg}" width=75 height=90 border=0 alt="mercurial" /></a>
+<img src="{staticurl}hglogo.png" width=75 height=90 border=0 alt="mercurial" /></a>
</div>
<div class="main">
<h2>Mercurial Repositories</h2>
diff --git a/mercurial/templates/paper/manifest.tmpl b/mercurial/templates/paper/manifest.tmpl
index 3dc7e77..1537550 100644
--- a/mercurial/templates/paper/manifest.tmpl
+++ b/mercurial/templates/paper/manifest.tmpl
@@ -7,7 +7,7 @@
<div class="menu">
<div class="logo">
<a href="{logourl}">
-<img src="{staticurl}{logoimg}" alt="mercurial" /></a>
+<img src="{staticurl}hglogo.png" alt="mercurial" /></a>
</div>
<ul>
<li><a href="{url}shortlog/{node|short}{sessionvars%urlparameter}">log</a></li>
diff --git a/mercurial/templates/paper/map b/mercurial/templates/paper/map
index d384cc6..df9242a 100644
--- a/mercurial/templates/paper/map
+++ b/mercurial/templates/paper/map
@@ -62,7 +62,6 @@ fileentry = '
filerevision = filerevision.tmpl
fileannotate = fileannotate.tmpl
filediff = filediff.tmpl
-filecomparison = filecomparison.tmpl
filelog = filelog.tmpl
fileline = '
<div class="parity{parity} source"><a href="#{lineid}" id="{lineid}">{linenumber}</a> {line|escape}</div>'
@@ -71,7 +70,7 @@ filelogentry = filelogentry.tmpl
annotateline = '
<tr class="parity{parity}">
<td class="annotate">
- <a href="{url}annotate/{node|short}/{file|urlescape}{sessionvars%urlparameter}#l{targetline}"
+ <a href="{url}annotate/{node|short}/{file|urlescape}{sessionvars%urlparameter}#{targetline}"
title="{node|short}: {desc|escape|firstline}">{author|user}@{rev}</a>
</td>
<td class="source"><a href="#{lineid}" id="{lineid}">{linenumber}</a> {line|escape}</td>
@@ -83,16 +82,6 @@ difflineminus = '<a href="#{lineid}" id="{lineid}">{linenumber}</a> <span class=
difflineat = '<a href="#{lineid}" id="{lineid}">{linenumber}</a> <span class="atline">{line|escape}</span>'
diffline = '<a href="#{lineid}" id="{lineid}">{linenumber}</a> {line|escape}'
-comparisonblock ='
- <tbody class="block">
- {lines}
- </tbody>'
-comparisonline = '
- <tr>
- <td class="source {type}"><a href="#{lineid}" id="{lineid}">{leftlinenumber}</a> {leftline|escape}</td>
- <td class="source {type}"><a href="#{lineid}" id="{lineid}">{rightlinenumber}</a> {rightline|escape}</td>
- </tr>'
-
changelogparent = '
<tr>
<th class="parent">parent {rev}:</th>
@@ -209,7 +198,7 @@ indexentry = '
<td><a href="{url}{sessionvars%urlparameter}">{name|escape}</a></td>
<td>{description}</td>
<td>{contact|obfuscate}</td>
- <td class="age">{lastchange|rfc822date}</td>
+ <td class="age">{lastchange|date}</td>
<td class="indexlinks">{archives%indexarchiveentry}</td>
</tr>\n'
indexarchiveentry = '<a href="{url}archive/{node|short}{extension|urlescape}">&nbsp;&darr;{type|escape}</a>'
diff --git a/mercurial/templates/paper/search.tmpl b/mercurial/templates/paper/search.tmpl
index feee155..f9d5e1f 100644
--- a/mercurial/templates/paper/search.tmpl
+++ b/mercurial/templates/paper/search.tmpl
@@ -7,7 +7,7 @@
<div class="menu">
<div class="logo">
<a href="{logourl}">
-<img src="{staticurl}{logoimg}" width=75 height=90 border=0 alt="mercurial"></a>
+<img src="{staticurl}hglogo.png" width=75 height=90 border=0 alt="mercurial"></a>
</div>
<ul>
<li><a href="{url}shortlog{sessionvars%urlparameter}">log</a></li>
diff --git a/mercurial/templates/paper/shortlog.tmpl b/mercurial/templates/paper/shortlog.tmpl
index b69d4b0..a2694b5 100644
--- a/mercurial/templates/paper/shortlog.tmpl
+++ b/mercurial/templates/paper/shortlog.tmpl
@@ -11,7 +11,7 @@
<div class="menu">
<div class="logo">
<a href="{logourl}">
-<img src="{staticurl}{logoimg}" alt="mercurial" /></a>
+<img src="{staticurl}hglogo.png" alt="mercurial" /></a>
</div>
<ul>
<li class="active">log</li>
diff --git a/mercurial/templates/paper/shortlogentry.tmpl b/mercurial/templates/paper/shortlogentry.tmpl
index 09e94c0..7faf468 100644
--- a/mercurial/templates/paper/shortlogentry.tmpl
+++ b/mercurial/templates/paper/shortlogentry.tmpl
@@ -1,5 +1,5 @@
<tr class="parity{parity}">
- <td class="age">{date|rfc822date}</td>
+ <td class="age">{date|date}</td>
<td class="author">{author|person}</td>
<td class="description"><a href="{url}rev/{node|short}{sessionvars%urlparameter}">{desc|strip|firstline|escape|nonempty}</a>{inbranch%changelogbranchname}{branches%changelogbranchhead}{tags % '<span class="tag">{name|escape}</span> '}{bookmarks % '<span class="tag">{name|escape}</span> '}</td>
</tr>
diff --git a/mercurial/templates/paper/tags.tmpl b/mercurial/templates/paper/tags.tmpl
index 9b4e623..a2321cd 100644
--- a/mercurial/templates/paper/tags.tmpl
+++ b/mercurial/templates/paper/tags.tmpl
@@ -11,7 +11,7 @@
<div class="menu">
<div class="logo">
<a href="{logourl}">
-<img src="{staticurl}{logoimg}" alt="mercurial" /></a>
+<img src="{staticurl}hglogo.png" alt="mercurial" /></a>
</div>
<ul>
<li><a href="{url}shortlog{sessionvars%urlparameter}">log</a></li>
diff --git a/mercurial/templates/raw/graph.tmpl b/mercurial/templates/raw/graph.tmpl
deleted file mode 100644
index da2a81a..0000000
--- a/mercurial/templates/raw/graph.tmpl
+++ /dev/null
@@ -1,6 +0,0 @@
-{header}
-# HG graph
-# Node ID {node}
-# Rows shown {rows}
-
-{nodes%graphnode}
diff --git a/mercurial/templates/raw/graphedge.tmpl b/mercurial/templates/raw/graphedge.tmpl
deleted file mode 100644
index cb84efa..0000000
--- a/mercurial/templates/raw/graphedge.tmpl
+++ /dev/null
@@ -1 +0,0 @@
-edge: ({col}, {row}) -> ({nextcol}, {nextrow}) (color {color})
diff --git a/mercurial/templates/raw/graphnode.tmpl b/mercurial/templates/raw/graphnode.tmpl
deleted file mode 100644
index b1c9983..0000000
--- a/mercurial/templates/raw/graphnode.tmpl
+++ /dev/null
@@ -1,7 +0,0 @@
-changeset: {node}
-user: {user}
-date: {age}
-summary: {desc}
-{branches%branchname}{tags%tagname}{bookmarks%bookmarkname}
-node: ({col}, {row}) (color {color})
-{edges%graphedge}
diff --git a/mercurial/templates/raw/map b/mercurial/templates/raw/map
index 5a682b2..ed5c35b 100644
--- a/mercurial/templates/raw/map
+++ b/mercurial/templates/raw/map
@@ -1,5 +1,3 @@
-default = 'shortlog'
-shortlog = "'raw' is not a browsable style"
mimetype = 'text/plain; charset={encoding}'
header = ''
footer = ''
@@ -30,9 +28,3 @@ bookmarks = '{entries%bookmarkentry}'
bookmarkentry = '{bookmark} {node}\n'
branches = '{entries%branchentry}'
branchentry = '{branch} {node} {status}\n'
-graph = graph.tmpl
-graphnode = graphnode.tmpl
-graphedge = graphedge.tmpl
-bookmarkname = 'bookmark: {name}\n'
-branchname = 'branch: {name}\n'
-tagname = 'tag: {name}\n'
diff --git a/mercurial/templates/spartan/changelogentry.tmpl b/mercurial/templates/spartan/changelogentry.tmpl
index f506b94..52dd3fd 100644
--- a/mercurial/templates/spartan/changelogentry.tmpl
+++ b/mercurial/templates/spartan/changelogentry.tmpl
@@ -1,6 +1,6 @@
<table class="logEntry parity{parity}">
<tr>
- <th><span class="age">{date|rfc822date}</span>:</th>
+ <th><span class="age">{date|date}</span>:</th>
<th class="firstline">{desc|strip|firstline|escape|nonempty}</th>
</tr>
<tr>
@@ -16,7 +16,7 @@
</tr>
<tr>
<th class="date">date:</th>
- <td class="date">{date|rfc822date}</td>
+ <td class="date">{date|date}</td>
</tr>
<tr>
<th class="files"><a href="{url}file/{node|short}{sessionvars%urlparameter}">files</a>:</th>
diff --git a/mercurial/templates/spartan/changeset.tmpl b/mercurial/templates/spartan/changeset.tmpl
index b357126..51944ca 100644
--- a/mercurial/templates/spartan/changeset.tmpl
+++ b/mercurial/templates/spartan/changeset.tmpl
@@ -31,7 +31,7 @@
</tr>
<tr>
<th class="date">date:</th>
- <td class="date age">{date|rfc822date}</td>
+ <td class="date age">{date|date}</td>
</tr>
<tr>
<th class="files">files:</th>
diff --git a/mercurial/templates/spartan/fileannotate.tmpl b/mercurial/templates/spartan/fileannotate.tmpl
index 5d6a72b..6648103 100644
--- a/mercurial/templates/spartan/fileannotate.tmpl
+++ b/mercurial/templates/spartan/fileannotate.tmpl
@@ -30,7 +30,7 @@
<td>{author|obfuscate}</td></tr>
<tr>
<td class="metatag">date:</td>
- <td class="date age">{date|rfc822date}</td>
+ <td class="date age">{date|date}</td>
</tr>
<tr>
<td class="metatag">permissions:</td>
diff --git a/mercurial/templates/spartan/filelogentry.tmpl b/mercurial/templates/spartan/filelogentry.tmpl
index baff52d..63dd686 100644
--- a/mercurial/templates/spartan/filelogentry.tmpl
+++ b/mercurial/templates/spartan/filelogentry.tmpl
@@ -1,6 +1,6 @@
<table class="logEntry parity{parity}">
<tr>
- <th><span class="age">{date|rfc822date}</span>:</th>
+ <th><span class="age">{date|date}</span>:</th>
<th class="firstline"><a href="{url}rev/{node|short}{sessionvars%urlparameter}">{desc|strip|firstline|escape|nonempty}</a></th>
</tr>
<tr>
@@ -18,7 +18,7 @@
</tr>
<tr>
<th class="date">date:</th>
- <td class="date">{date|rfc822date}</td>
+ <td class="date">{date|date}</td>
</tr>
</table>
diff --git a/mercurial/templates/spartan/filerevision.tmpl b/mercurial/templates/spartan/filerevision.tmpl
index 548d3c6..eb46a10 100644
--- a/mercurial/templates/spartan/filerevision.tmpl
+++ b/mercurial/templates/spartan/filerevision.tmpl
@@ -30,7 +30,7 @@
<td>{author|obfuscate}</td></tr>
<tr>
<td class="metatag">date:</td>
- <td class="date age">{date|rfc822date}</td></tr>
+ <td class="date age">{date|date}</td></tr>
<tr>
<td class="metatag">permissions:</td>
<td>{permissions|permissions}</td></tr>
diff --git a/mercurial/templates/spartan/footer.tmpl b/mercurial/templates/spartan/footer.tmpl
index 665bdb2..851c758 100644
--- a/mercurial/templates/spartan/footer.tmpl
+++ b/mercurial/templates/spartan/footer.tmpl
@@ -2,7 +2,7 @@
{motd}
<div class="logo">
<a href="{logourl}">
-<img src="{staticurl}{logoimg}" width=75 height=90 border=0 alt="mercurial"></a>
+<img src="{staticurl}hglogo.png" width=75 height=90 border=0 alt="mercurial"></a>
</div>
</body>
diff --git a/mercurial/templates/spartan/graph.tmpl b/mercurial/templates/spartan/graph.tmpl
index 054bc42..67736e0 100644
--- a/mercurial/templates/spartan/graph.tmpl
+++ b/mercurial/templates/spartan/graph.tmpl
@@ -43,6 +43,16 @@ var data = {jsdata|json};
var graph = new Graph();
graph.scale({bg_height});
+graph.edge = function(x0, y0, x1, y1, color) \{
+
+ this.setColor(color, 0.0, 0.65);
+ this.ctx.beginPath();
+ this.ctx.moveTo(x0, y0);
+ this.ctx.lineTo(x1, y1);
+ this.ctx.stroke();
+
+}
+
var revlink = '<li style="_STYLE"><span class="desc">';
revlink += '<a href="{url}rev/_NODEID{sessionvars%urlparameter}" title="_NODEID">_DESC</a>';
revlink += '</span><span class="info">_DATE, by _USER</span></li>';
diff --git a/mercurial/templates/spartan/map b/mercurial/templates/spartan/map
index 8280723..8aaacae 100644
--- a/mercurial/templates/spartan/map
+++ b/mercurial/templates/spartan/map
@@ -168,7 +168,7 @@ indexentry = '
<td><a href="{url}{sessionvars%urlparameter}">{name|escape}</a></td>
<td>{description}</td>
<td>{contact|obfuscate}</td>
- <td class="age">{lastchange|rfc822date}</td>
+ <td class="age">{lastchange|date}</td>
<td class="indexlinks">
<a href="{url}rss-log">RSS</a>
<a href="{url}atom-log">Atom</a>
diff --git a/mercurial/templates/spartan/shortlogentry.tmpl b/mercurial/templates/spartan/shortlogentry.tmpl
index d104753..670c786 100644
--- a/mercurial/templates/spartan/shortlogentry.tmpl
+++ b/mercurial/templates/spartan/shortlogentry.tmpl
@@ -1,6 +1,6 @@
<table class="slogEntry parity{parity}">
<tr>
- <td class="age">{date|rfc822date}</td>
+ <td class="age">{date|date}</td>
<td class="author">{author|person}</td>
<td class="node"><a href="{url}rev/{node|short}{sessionvars%urlparameter}">{desc|strip|firstline|escape|nonempty}</a></td>
</tr>
diff --git a/mercurial/templates/static/mercurial.js b/mercurial/templates/static/mercurial.js
index 70347d7..3bc27a4 100644
--- a/mercurial/templates/static/mercurial.js
+++ b/mercurial/templates/static/mercurial.js
@@ -58,44 +58,25 @@ function Graph() {
// Set the colour.
//
- // If color is a string, expect an hexadecimal RGB
- // value and apply it unchanged. If color is a number,
- // pick a distinct colour based on an internal wheel;
- // the bg parameter provides the value that should be
- // assigned to the 'zero' colours and the fg parameter
- // provides the multiplier that should be applied to
- // the foreground colours.
- var s;
- if(typeof color == "string") {
- s = "#" + color;
- } else { //typeof color == "number"
- color %= colors.length;
- var red = (colors[color][0] * fg) || bg;
- var green = (colors[color][1] * fg) || bg;
- var blue = (colors[color][2] * fg) || bg;
- red = Math.round(red * 255);
- green = Math.round(green * 255);
- blue = Math.round(blue * 255);
- s = 'rgb(' + red + ', ' + green + ', ' + blue + ')';
- }
+ // Picks a distinct colour based on an internal wheel; the bg
+ // parameter provides the value that should be assigned to the 'zero'
+ // colours and the fg parameter provides the multiplier that should be
+ // applied to the foreground colours.
+
+ color %= colors.length;
+ var red = (colors[color][0] * fg) || bg;
+ var green = (colors[color][1] * fg) || bg;
+ var blue = (colors[color][2] * fg) || bg;
+ red = Math.round(red * 255);
+ green = Math.round(green * 255);
+ blue = Math.round(blue * 255);
+ var s = 'rgb(' + red + ', ' + green + ', ' + blue + ')';
this.ctx.strokeStyle = s;
this.ctx.fillStyle = s;
return s;
}
- this.edge = function(x0, y0, x1, y1, color, width) {
-
- this.setColor(color, 0.0, 0.65);
- if(width >= 0)
- this.ctx.lineWidth = width;
- this.ctx.beginPath();
- this.ctx.moveTo(x0, y0);
- this.ctx.lineTo(x1, y1);
- this.ctx.stroke();
-
- }
-
this.render = function(data) {
var backgrounds = '';
@@ -112,20 +93,13 @@ function Graph() {
var edges = cur[2];
var fold = false;
- var prevWidth = this.ctx.lineWidth;
for (var j in edges) {
line = edges[j];
start = line[0];
end = line[1];
color = line[2];
- var width = line[3];
- if(width < 0)
- width = prevWidth;
- var branchcolor = line[4];
- if(branchcolor)
- color = branchcolor;
-
+
if (end > this.columns || start > this.columns) {
this.columns += 1;
}
@@ -139,10 +113,9 @@ function Graph() {
x1 = this.cell[0] + this.box_size * end + this.box_size / 2;
y1 = this.bg[1] + this.bg_height / 2;
- this.edge(x0, y0, x1, y1, color, width);
+ this.edge(x0, y0, x1, y1, color);
}
- this.ctx.lineWidth = prevWidth;
// Draw the revision node in the right column
diff --git a/mercurial/templates/static/style-coal.css b/mercurial/templates/static/style-coal.css
index 1dee828..33fb0e6 100644
--- a/mercurial/templates/static/style-coal.css
+++ b/mercurial/templates/static/style-coal.css
@@ -286,40 +286,3 @@ ul#graphnodes li .info {
position: relative;
top: -3px;
}
-
-/* Comparison */
-.legend {
- padding: 1.5% 0 1.5% 0;
-}
-
-.legendinfo {
- border: 1px solid #999;
- font-size: 80%;
- text-align: center;
- padding: 0.5%;
-}
-
-.equal {
- background-color: #ffffff;
-}
-
-.delete {
- background-color: #faa;
- color: #333;
-}
-
-.insert {
- background-color: #ffa;
-}
-
-.replace {
- background-color: #e8e8e8;
-}
-
-.header {
- text-align: center;
-}
-
-.block {
- border-top: 1px solid #999;
-}
diff --git a/mercurial/templates/static/style-gitweb.css b/mercurial/templates/static/style-gitweb.css
index c5e9514..82f5ef5 100644
--- a/mercurial/templates/static/style-gitweb.css
+++ b/mercurial/templates/static/style-gitweb.css
@@ -126,44 +126,3 @@ ul#graphnodes li .info {
top: -3px;
font-style: italic;
}
-
-/* Comparison */
-.legend {
- padding: 1.5% 0 1.5% 0;
-}
-
-.legendinfo {
- border: 1px solid #d9d8d1;
- font-size: 80%;
- text-align: center;
- padding: 0.5%;
-}
-
-.equal {
- background-color: #ffffff;
-}
-
-.delete {
- background-color: #faa;
- color: #333;
-}
-
-.insert {
- background-color: #ffa;
-}
-
-.replace {
- background-color: #e8e8e8;
-}
-
-.comparison {
- overflow-x: auto;
-}
-
-.header th {
- text-align: center;
-}
-
-.block {
- border-top: 1px solid #d9d8d1;
-}
diff --git a/mercurial/templates/static/style-monoblue.css b/mercurial/templates/static/style-monoblue.css
index b4e3579..c015061 100644
--- a/mercurial/templates/static/style-monoblue.css
+++ b/mercurial/templates/static/style-monoblue.css
@@ -477,50 +477,3 @@ ul#graphnodes li .info {
position: relative;
}
/** end of canvas **/
-
-/** comparison **/
-.legend {
- margin-left: 20px;
- padding: 1.5% 0 1.5% 0;
-}
-
-.legendinfo {
- border: 1px solid #999;
- font-size: 80%;
- text-align: center;
- padding: 0.5%;
-}
-
-.equal {
- background-color: #ffffff;
-}
-
-.delete {
- background-color: #faa;
- color: #333;
-}
-
-.insert {
- background-color: #ffa;
-}
-
-.replace {
- background-color: #e8e8e8;
-}
-
-.comparison {
- overflow-x: auto;
-}
-
-.comparison table td {
- padding: 0px 5px;
-}
-
-.header th {
- font-weight: bold;
-}
-
-.block {
- border-top: 1px solid #999;
-}
-/** end of comparison **/
diff --git a/mercurial/templates/static/style-paper.css b/mercurial/templates/static/style-paper.css
index e881c1c..6c8b1c2 100644
--- a/mercurial/templates/static/style-paper.css
+++ b/mercurial/templates/static/style-paper.css
@@ -275,40 +275,3 @@ ul#graphnodes li .info {
position: relative;
top: -3px;
}
-
-/* Comparison */
-.legend {
- padding: 1.5% 0 1.5% 0;
-}
-
-.legendinfo {
- border: 1px solid #999;
- font-size: 80%;
- text-align: center;
- padding: 0.5%;
-}
-
-.equal {
- background-color: #ffffff;
-}
-
-.delete {
- background-color: #faa;
- color: #333;
-}
-
-.insert {
- background-color: #ffa;
-}
-
-.replace {
- background-color: #e8e8e8;
-}
-
-.header {
- text-align: center;
-}
-
-.block {
- border-top: 1px solid #999;
-}
diff --git a/mercurial/transaction.py b/mercurial/transaction.py
index a7e9180..d197295 100644
--- a/mercurial/transaction.py
+++ b/mercurial/transaction.py
@@ -164,7 +164,7 @@ class transaction(object):
_playback(self.journal, self.report, self.opener,
self.entries, False)
self.report(_("rollback completed\n"))
- except Exception:
+ except:
self.report(_("rollback failed - please run hg recover\n"))
finally:
self.journal = None
diff --git a/mercurial/treediscovery.py b/mercurial/treediscovery.py
index 09a4afb..49011c9 100644
--- a/mercurial/treediscovery.py
+++ b/mercurial/treediscovery.py
@@ -56,11 +56,11 @@ def findcommonincoming(repo, remote, heads=None, force=False):
# a 'branch' here is a linear segment of history, with four parts:
# head, root, first parent, second parent
# (a branch always has two parents (or none) by definition)
- unknown = util.deque(remote.branches(unknown))
+ unknown = remote.branches(unknown)
while unknown:
r = []
while unknown:
- n = unknown.popleft()
+ n = unknown.pop(0)
if n[0] in seen:
continue
diff --git a/mercurial/ui.py b/mercurial/ui.py
index 5d80df4..8f3b1eb 100644
--- a/mercurial/ui.py
+++ b/mercurial/ui.py
@@ -7,7 +7,7 @@
from i18n import _
import errno, getpass, os, socket, sys, tempfile, traceback
-import config, scmutil, util, error, formatter
+import config, scmutil, util, error
class ui(object):
def __init__(self, src=None):
@@ -19,7 +19,6 @@ class ui(object):
self._ucfg = config.config() # untrusted
self._trustusers = set()
self._trustgroups = set()
- self.callhooks = True
if src:
self.fout = src.fout
@@ -32,7 +31,6 @@ class ui(object):
self._trustusers = src._trustusers.copy()
self._trustgroups = src._trustgroups.copy()
self.environ = src.environ
- self.callhooks = src.callhooks
self.fixconfig()
else:
self.fout = sys.stdout
@@ -48,10 +46,7 @@ class ui(object):
def copy(self):
return self.__class__(self)
- def formatter(self, topic, opts):
- return formatter.formatter(self, topic, opts)
-
- def _trusted(self, fp, f):
+ def _is_trusted(self, fp, f):
st = util.fstat(fp)
if util.isowner(st):
return True
@@ -66,7 +61,7 @@ class ui(object):
return True
if self._reportuntrusted:
- self.warn(_('not trusting file %s from untrusted '
+ self.warn(_('Not trusting file %s from untrusted '
'user %s, group %s\n') % (f, user, group))
return False
@@ -80,15 +75,14 @@ class ui(object):
raise
cfg = config.config()
- trusted = sections or trust or self._trusted(fp, filename)
+ trusted = sections or trust or self._is_trusted(fp, filename)
try:
cfg.read(filename, fp, sections=sections, remap=remap)
- fp.close()
except error.ConfigError, inst:
if trusted:
raise
- self.warn(_("ignored: %s\n") % str(inst))
+ self.warn(_("Ignored: %s\n") % str(inst))
if self.plain():
for k in ('debug', 'fallbackencoding', 'quiet', 'slash',
@@ -147,15 +141,6 @@ class ui(object):
self._trustusers.update(self.configlist('trusted', 'users'))
self._trustgroups.update(self.configlist('trusted', 'groups'))
- def backupconfig(self, section, item):
- return (self._ocfg.backup(section, item),
- self._tcfg.backup(section, item),
- self._ucfg.backup(section, item),)
- def restoreconfig(self, data):
- self._ocfg.restore(data[0])
- self._tcfg.restore(data[1])
- self._ucfg.restore(data[2])
-
def setconfig(self, section, name, value, overlay=True):
if overlay:
self._ocfg.set(section, name, value)
@@ -170,19 +155,7 @@ class ui(object):
return self._data(untrusted).source(section, name) or 'none'
def config(self, section, name, default=None, untrusted=False):
- if isinstance(name, list):
- alternates = name
- else:
- alternates = [name]
-
- for n in alternates:
- value = self._data(untrusted).get(section, name, None)
- if value is not None:
- name = n
- break
- else:
- value = default
-
+ value = self._data(untrusted).get(section, name, default)
if self.debugflag and not untrusted and self._reportuntrusted:
uvalue = self._ucfg.get(section, name)
if uvalue is not None and uvalue != value:
@@ -191,14 +164,12 @@ class ui(object):
return value
def configpath(self, section, name, default=None, untrusted=False):
- 'get a path config item, expanded relative to repo root or config file'
+ 'get a path config item, expanded relative to config file'
v = self.config(section, name, default, untrusted)
- if v is None:
- return None
if not os.path.isabs(v) or "://" not in v:
src = self.configsource(section, name, untrusted)
if ':' in src:
- base = os.path.dirname(src.rsplit(':')[0])
+ base = os.path.dirname(src.rsplit(':'))
v = os.path.join(base, os.path.expanduser(v))
return v
@@ -411,7 +382,7 @@ class ui(object):
if user is None and not self.interactive():
try:
user = '%s@%s' % (util.getuser(), socket.getfqdn())
- self.warn(_("no username found, using '%s' instead\n") % user)
+ self.warn(_("No username found, using '%s' instead\n") % user)
except KeyError:
pass
if not user:
@@ -485,19 +456,14 @@ class ui(object):
if not getattr(self.ferr, 'closed', False):
self.ferr.flush()
except IOError, inst:
- if inst.errno not in (errno.EPIPE, errno.EIO, errno.EBADF):
+ if inst.errno not in (errno.EPIPE, errno.EIO):
raise
def flush(self):
try: self.fout.flush()
- except (IOError, ValueError): pass
+ except: pass
try: self.ferr.flush()
- except (IOError, ValueError): pass
-
- def _isatty(self, fh):
- if self.configbool('ui', 'nontty', False):
- return False
- return util.isatty(fh)
+ except: pass
def interactive(self):
'''is interactive input allowed?
@@ -517,7 +483,7 @@ class ui(object):
if i is None:
# some environments replace stdin without implementing isatty
# usually those are non-interactive
- return self._isatty(self.fin)
+ return util.isatty(self.fin)
return i
@@ -555,12 +521,12 @@ class ui(object):
if i is None:
# some environments replace stdout without implementing isatty
# usually those are non-interactive
- return self._isatty(self.fout)
+ return util.isatty(self.fout)
return i
def _readline(self, prompt=''):
- if self._isatty(self.fin):
+ if util.isatty(self.fin):
try:
# magically add command line editing support, where
# available
@@ -687,25 +653,17 @@ class ui(object):
printed.'''
if self.tracebackflag:
if exc:
- traceback.print_exception(exc[0], exc[1], exc[2],
- file=self.ferr)
+ traceback.print_exception(exc[0], exc[1], exc[2], file=self.ferr)
else:
traceback.print_exc(file=self.ferr)
return self.tracebackflag
def geteditor(self):
'''return editor to use'''
- if sys.platform == 'plan9':
- # vi is the MIPS instruction simulator on Plan 9. We
- # instead default to E to plumb commit messages to
- # avoid confusion.
- editor = 'E'
- else:
- editor = 'vi'
return (os.environ.get("HGEDITOR") or
self.config("ui", "editor") or
os.environ.get("VISUAL") or
- os.environ.get("EDITOR", editor))
+ os.environ.get("EDITOR", "vi"))
def progress(self, topic, pos, item="", unit="", total=None):
'''show a progress message
diff --git a/mercurial/url.py b/mercurial/url.py
index 9cb88e0..60b1012 100644
--- a/mercurial/url.py
+++ b/mercurial/url.py
@@ -135,7 +135,7 @@ def _gen_sendfile(orgsend):
orgsend(self, data)
return _sendfile
-has_https = util.safehasattr(urllib2, 'HTTPSHandler')
+has_https = hasattr(urllib2, 'HTTPSHandler')
if has_https:
try:
_create_connection = socket.create_connection
@@ -192,8 +192,8 @@ class httpconnection(keepalive.HTTPConnection):
# general transaction handler to support different ways to handle
# HTTPS proxying before and after Python 2.6.3.
def _generic_start_transaction(handler, h, req):
- tunnel_host = getattr(req, '_tunnel_host', None)
- if tunnel_host:
+ if hasattr(req, '_tunnel_host') and req._tunnel_host:
+ tunnel_host = req._tunnel_host
if tunnel_host[:7] not in ['http://', 'https:/']:
tunnel_host = 'https://' + tunnel_host
new_tunnel = True
@@ -377,8 +377,7 @@ if has_https:
keyfile = self.auth['key']
certfile = self.auth['cert']
- conn = httpsconnection(host, port, keyfile, certfile, *args,
- **kwargs)
+ conn = httpsconnection(host, port, keyfile, certfile, *args, **kwargs)
conn.ui = self.ui
return conn
diff --git a/mercurial/util.h b/mercurial/util.h
index facfd8b..0ecc867 100644
--- a/mercurial/util.h
+++ b/mercurial/util.h
@@ -101,66 +101,5 @@
#endif /* PY_VERSION_HEX */
-#if (PY_VERSION_HEX < 0x02050000)
-/* Definitions to get compatibility with python 2.4 and earlier which
- does not have Py_ssize_t. See also PEP 353.
- Note: msvc (8 or earlier) does not have ssize_t, so we use Py_ssize_t.
-*/
-typedef int Py_ssize_t;
-typedef Py_ssize_t (*lenfunc)(PyObject *);
-typedef PyObject *(*ssizeargfunc)(PyObject *, Py_ssize_t);
-#define PyInt_FromSsize_t PyInt_FromLong
-
-#if !defined(PY_SSIZE_T_MIN)
-#define PY_SSIZE_T_MAX INT_MAX
-#define PY_SSIZE_T_MIN INT_MIN
-#endif
-#endif
-
-#ifdef _WIN32
-#ifdef _MSC_VER
-/* msvc 6.0 has problems */
-#define inline __inline
-typedef unsigned long uint32_t;
-typedef unsigned __int64 uint64_t;
-#else
-#include <stdint.h>
-#endif
-#else
-/* not windows */
-#include <sys/types.h>
-#if defined __BEOS__ && !defined __HAIKU__
-#include <ByteOrder.h>
-#else
-#include <arpa/inet.h>
-#endif
-#include <inttypes.h>
-#endif
-
-#if defined __hpux || defined __SUNPRO_C || defined _AIX
-#define inline
-#endif
-
-#ifdef __linux
-#define inline __inline
-#endif
-
-static inline uint32_t getbe32(const char *c)
-{
- const unsigned char *d = (const unsigned char *)c;
-
- return ((d[0] << 24) |
- (d[1] << 16) |
- (d[2] << 8) |
- (d[3]));
-}
-
-static inline void putbe32(uint32_t x, char *c)
-{
- c[0] = (x >> 24) & 0xff;
- c[1] = (x >> 16) & 0xff;
- c[2] = (x >> 8) & 0xff;
- c[3] = (x) & 0xff;
-}
-
#endif /* _HG_UTIL_H_ */
+
diff --git a/mercurial/util.py b/mercurial/util.py
index 4a6e215..4597ffb 100644
--- a/mercurial/util.py
+++ b/mercurial/util.py
@@ -14,85 +14,21 @@ hide platform-specific details from the core.
"""
from i18n import _
-import error, osutil, encoding, collections
+import error, osutil, encoding
import errno, re, shutil, sys, tempfile, traceback
-import os, time, datetime, calendar, textwrap, signal
+import os, time, calendar, textwrap, signal
import imp, socket, urllib
-if os.name == 'nt':
- import windows as platform
-else:
- import posix as platform
-
-cachestat = platform.cachestat
-checkexec = platform.checkexec
-checklink = platform.checklink
-copymode = platform.copymode
-executablepath = platform.executablepath
-expandglobs = platform.expandglobs
-explainexit = platform.explainexit
-findexe = platform.findexe
-gethgcmd = platform.gethgcmd
-getuser = platform.getuser
-groupmembers = platform.groupmembers
-groupname = platform.groupname
-hidewindow = platform.hidewindow
-isexec = platform.isexec
-isowner = platform.isowner
-localpath = platform.localpath
-lookupreg = platform.lookupreg
-makedir = platform.makedir
-nlinks = platform.nlinks
-normpath = platform.normpath
-normcase = platform.normcase
-nulldev = platform.nulldev
-openhardlinks = platform.openhardlinks
-oslink = platform.oslink
-parsepatchoutput = platform.parsepatchoutput
-pconvert = platform.pconvert
-popen = platform.popen
-posixfile = platform.posixfile
-quotecommand = platform.quotecommand
-realpath = platform.realpath
-rename = platform.rename
-samedevice = platform.samedevice
-samefile = platform.samefile
-samestat = platform.samestat
-setbinary = platform.setbinary
-setflags = platform.setflags
-setsignalhandler = platform.setsignalhandler
-shellquote = platform.shellquote
-spawndetached = platform.spawndetached
-sshargs = platform.sshargs
-statfiles = platform.statfiles
-termwidth = platform.termwidth
-testpid = platform.testpid
-umask = platform.umask
-unlink = platform.unlink
-unlinkpath = platform.unlinkpath
-username = platform.username
-
# Python compatibility
-_notset = object()
+def sha1(s):
+ return _fastsha1(s)
+_notset = object()
def safehasattr(thing, attr):
return getattr(thing, attr, _notset) is not _notset
-def sha1(s=''):
- '''
- Low-overhead wrapper around Python's SHA support
-
- >>> f = _fastsha1
- >>> a = sha1()
- >>> a = f()
- >>> a.hexdigest()
- 'da39a3ee5e6b4b0d3255bfef95601890afd80709'
- '''
-
- return _fastsha1(s)
-
-def _fastsha1(s=''):
+def _fastsha1(s):
# This function will import sha1 from hashlib or sha (whichever is
# available) and overwrite itself with it on the first call.
# Subsequent calls will go directly to the imported function.
@@ -104,15 +40,18 @@ def _fastsha1(s=''):
_fastsha1 = sha1 = _sha1
return _sha1(s)
+import __builtin__
+
+if sys.version_info[0] < 3:
+ def fakebuffer(sliceable, offset=0):
+ return sliceable[offset:]
+else:
+ def fakebuffer(sliceable, offset=0):
+ return memoryview(sliceable)[offset:]
try:
- buffer = buffer
+ buffer
except NameError:
- if sys.version_info[0] < 3:
- def buffer(sliceable, offset=0):
- return sliceable[offset:]
- else:
- def buffer(sliceable, offset=0):
- return memoryview(sliceable)[offset:]
+ __builtin__.buffer = fakebuffer
import subprocess
closefds = os.name == 'posix'
@@ -199,27 +138,15 @@ def cachefunc(func):
return f
-try:
- collections.deque.remove
- deque = collections.deque
-except AttributeError:
- # python 2.4 lacks deque.remove
- class deque(collections.deque):
- def remove(self, val):
- for i, v in enumerate(self):
- if v == val:
- del self[i]
- break
-
def lrucachefunc(func):
'''cache most recent results of function calls'''
cache = {}
- order = deque()
+ order = []
if func.func_code.co_argcount == 1:
def f(arg):
if arg not in cache:
if len(cache) > 20:
- del cache[order.popleft()]
+ del cache[order.pop(0)]
cache[arg] = func(arg)
else:
order.remove(arg)
@@ -229,7 +156,7 @@ def lrucachefunc(func):
def f(*args):
if args not in cache:
if len(cache) > 20:
- del cache[order.popleft()]
+ del cache[order.pop(0)]
cache[args] = func(*args)
else:
order.remove(args)
@@ -380,8 +307,8 @@ def mainfrozen():
The code supports py2exe (most common, Windows only) and tools/freeze
(portable, not much used).
"""
- return (safehasattr(sys, "frozen") or # new py2exe
- safehasattr(sys, "importers") or # old py2exe
+ return (hasattr(sys, "frozen") or # new py2exe
+ hasattr(sys, "importers") or # old py2exe
imp.is_frozen("__main__")) # tools/freeze
def hgexecutable():
@@ -391,13 +318,10 @@ def hgexecutable():
"""
if _hgexecutable is None:
hg = os.environ.get('HG')
- mainmod = sys.modules['__main__']
if hg:
_sethgexecutable(hg)
elif mainfrozen():
_sethgexecutable(sys.executable)
- elif os.path.basename(getattr(mainmod, '__file__', '')) == 'hg':
- _sethgexecutable(mainmod.__file__)
else:
exe = findexe('hg') or os.path.basename(sys.argv[0])
_sethgexecutable(exe)
@@ -431,29 +355,22 @@ def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None, out=None):
return str(val)
origcmd = cmd
cmd = quotecommand(cmd)
- if sys.platform == 'plan9':
- # subprocess kludge to work around issues in half-baked Python
- # ports, notably bichued/python:
- if not cwd is None:
- os.chdir(cwd)
- rc = os.system(cmd)
+ env = dict(os.environ)
+ env.update((k, py2shell(v)) for k, v in environ.iteritems())
+ env['HG'] = hgexecutable()
+ if out is None or out == sys.__stdout__:
+ rc = subprocess.call(cmd, shell=True, close_fds=closefds,
+ env=env, cwd=cwd)
else:
- env = dict(os.environ)
- env.update((k, py2shell(v)) for k, v in environ.iteritems())
- env['HG'] = hgexecutable()
- if out is None or out == sys.__stdout__:
- rc = subprocess.call(cmd, shell=True, close_fds=closefds,
- env=env, cwd=cwd)
- else:
- proc = subprocess.Popen(cmd, shell=True, close_fds=closefds,
- env=env, cwd=cwd, stdout=subprocess.PIPE,
- stderr=subprocess.STDOUT)
- for line in proc.stdout:
- out.write(line)
- proc.wait()
- rc = proc.returncode
- if sys.platform == 'OpenVMS' and rc & 1:
- rc = 0
+ proc = subprocess.Popen(cmd, shell=True, close_fds=closefds,
+ env=env, cwd=cwd, stdout=subprocess.PIPE,
+ stderr=subprocess.STDOUT)
+ for line in proc.stdout:
+ out.write(line)
+ proc.wait()
+ rc = proc.returncode
+ if sys.platform == 'OpenVMS' and rc & 1:
+ rc = 0
if rc and onerr:
errmsg = '%s %s' % (os.path.basename(origcmd.split(None, 1)[0]),
explainexit(rc)[0])
@@ -477,6 +394,18 @@ def checksignature(func):
return check
+def makedir(path, notindexed):
+ os.mkdir(path)
+
+def unlinkpath(f):
+ """unlink and remove the directory if it is empty"""
+ os.unlink(f)
+ # try removing directories that might now be empty
+ try:
+ os.removedirs(os.path.dirname(f))
+ except OSError:
+ pass
+
def copyfile(src, dest):
"copy a file, preserving mode and atime/mtime"
if os.path.islink(src):
@@ -542,7 +471,6 @@ def checkwinfilename(path):
"filename contains '\\\\x07', which is invalid on Windows"
>>> checkwinfilename("foo/bar/bla ")
"filename ends with ' ', which is not allowed on Windows"
- >>> checkwinfilename("../bar")
'''
for n in path.replace('\\', '/').split('/'):
if not n:
@@ -559,14 +487,26 @@ def checkwinfilename(path):
return _("filename contains '%s', which is reserved "
"on Windows") % base
t = n[-1]
- if t in '. ' and n not in '..':
+ if t in '. ':
return _("filename ends with '%s', which is not allowed "
"on Windows") % t
+def lookupreg(key, name=None, scope=None):
+ return None
+
+def hidewindow():
+ """Hide current shell window.
+
+ Used to hide the window opened when starting asynchronous
+ child process under Windows, unneeded on other systems.
+ """
+ pass
+
if os.name == 'nt':
checkosfilename = checkwinfilename
+ from windows import *
else:
- checkosfilename = platform.checkosfilename
+ from posix import *
def makelock(info, pathname):
try:
@@ -612,12 +552,9 @@ def checkcase(path):
"""
s1 = os.stat(path)
d, b = os.path.split(path)
- b2 = b.upper()
- if b == b2:
- b2 = b.lower()
- if b == b2:
- return True # no evidence against case sensitivity
- p2 = os.path.join(d, b2)
+ p2 = os.path.join(d, b.upper())
+ if path == p2:
+ p2 = os.path.join(d, b.lower())
try:
s2 = os.stat(p2)
if s2 == s1:
@@ -626,45 +563,22 @@ def checkcase(path):
except OSError:
return True
-try:
- import re2
- _re2 = None
-except ImportError:
- _re2 = False
-
-def compilere(pat):
- '''Compile a regular expression, using re2 if possible
-
- For best performance, use only re2-compatible regexp features.'''
- global _re2
- if _re2 is None:
- try:
- re2.compile
- _re2 = True
- except ImportError:
- _re2 = False
- if _re2:
- try:
- return re2.compile(pat)
- except re2.error:
- pass
- return re.compile(pat)
-
_fspathcache = {}
def fspath(name, root):
'''Get name in the case stored in the filesystem
- The name should be relative to root, and be normcase-ed for efficiency.
-
- Note that this function is unnecessary, and should not be
+ The name is either relative to root, or it is an absolute path starting
+ with root. Note that this function is unnecessary, and should not be
called, for case-sensitive filesystems (simply because it's expensive).
-
- The root should be normcase-ed, too.
'''
- def find(p, contents):
- for n in contents:
- if normcase(n) == p:
- return n
+ # If name is absolute, make it relative
+ if name.lower().startswith(root.lower()):
+ l = len(root)
+ if name[l] == os.sep or name[l] == os.altsep:
+ l = l + 1
+ name = name[l:]
+
+ if not os.path.lexists(os.path.join(root, name)):
return None
seps = os.sep
@@ -673,7 +587,7 @@ def fspath(name, root):
# Protect backslashes. This gets silly very quickly.
seps.replace('\\','\\\\')
pattern = re.compile(r'([^%s]+)|([%s]+)' % (seps, seps))
- dir = os.path.normpath(root)
+ dir = os.path.normcase(os.path.normpath(root))
result = []
for part, sep in pattern.findall(name):
if sep:
@@ -684,16 +598,16 @@ def fspath(name, root):
_fspathcache[dir] = os.listdir(dir)
contents = _fspathcache[dir]
- found = find(part, contents)
- if not found:
- # retry "once per directory" per "dirstate.walk" which
- # may take place for each patches of "hg qpush", for example
- contents = os.listdir(dir)
- _fspathcache[dir] = contents
- found = find(part, contents)
-
- result.append(found or part)
- dir = os.path.join(dir, part)
+ lpart = part.lower()
+ lenp = len(part)
+ for n in contents:
+ if lenp == len(n) and n.lower() == lpart:
+ result.append(n)
+ break
+ else:
+ # Cannot happen, as the file exists!
+ result.append(part)
+ dir = os.path.join(dir, lpart)
return ''.join(result)
@@ -776,7 +690,16 @@ def mktempcopy(name, emptyok=False, createmode=None):
# Temporary files are created with mode 0600, which is usually not
# what we want. If the original file already exists, just copy
# its mode. Otherwise, manually obey umask.
- copymode(name, temp, createmode)
+ try:
+ st_mode = os.lstat(name).st_mode & 0777
+ except OSError, inst:
+ if inst.errno != errno.ENOENT:
+ raise
+ st_mode = createmode
+ if st_mode is None:
+ st_mode = ~umask
+ st_mode &= 0666
+ os.chmod(temp, st_mode)
if emptyok:
return temp
try:
@@ -793,9 +716,9 @@ def mktempcopy(name, emptyok=False, createmode=None):
ofp.write(chunk)
ifp.close()
ofp.close()
- except: # re-raises
+ except:
try: os.unlink(temp)
- except OSError: pass
+ except: pass
raise
return temp
@@ -803,10 +726,11 @@ class atomictempfile(object):
'''writeable file object that atomically updates a file
All writes will go to a temporary copy of the original file. Call
- close() when you are done writing, and atomictempfile will rename
- the temporary copy to the original name, making the changes
- visible. If the object is destroyed without being closed, all your
- writes are discarded.
+ rename() when you are done writing, and atomictempfile will rename
+ the temporary copy to the original name, making the changes visible.
+
+ Unlike other file-like objects, close() discards your writes by
+ simply deleting the temporary file.
'''
def __init__(self, name, mode='w+b', createmode=None):
self.__name = name # permanent name
@@ -816,16 +740,14 @@ class atomictempfile(object):
# delegated methods
self.write = self._fp.write
- self.seek = self._fp.seek
- self.tell = self._fp.tell
self.fileno = self._fp.fileno
- def close(self):
+ def rename(self):
if not self._fp.closed:
self._fp.close()
rename(self._tempname, localpath(self.__name))
- def discard(self):
+ def close(self):
if not self._fp.closed:
try:
os.unlink(self._tempname)
@@ -834,25 +756,24 @@ class atomictempfile(object):
self._fp.close()
def __del__(self):
- if safehasattr(self, '_fp'): # constructor actually did something
- self.discard()
+ if hasattr(self, '_fp'): # constructor actually did something
+ self.close()
def makedirs(name, mode=None):
"""recursive directory creation with parent mode inheritance"""
+ parent = os.path.abspath(os.path.dirname(name))
try:
os.mkdir(name)
+ if mode is not None:
+ os.chmod(name, mode)
+ return
except OSError, err:
if err.errno == errno.EEXIST:
return
- if err.errno != errno.ENOENT or not name:
- raise
- parent = os.path.dirname(os.path.abspath(name))
- if parent == name:
+ if not name or parent == name or err.errno != errno.ENOENT:
raise
- makedirs(parent, mode)
- os.mkdir(name)
- if mode is not None:
- os.chmod(name, mode)
+ makedirs(parent, mode)
+ makedirs(name, mode)
def readfile(path):
fp = open(path, 'rb')
@@ -893,7 +814,7 @@ class chunkbuffer(object):
else:
yield chunk
self.iter = splitbig(in_iter)
- self._queue = deque()
+ self._queue = []
def read(self, l):
"""Read L bytes of data from the iterator of chunks of data.
@@ -913,10 +834,10 @@ class chunkbuffer(object):
if not queue:
break
- chunk = queue.popleft()
+ chunk = queue.pop(0)
left -= len(chunk)
if left < 0:
- queue.appendleft(chunk[left:])
+ queue.insert(0, chunk[left:])
buf += chunk[:left]
else:
buf += chunk
@@ -945,14 +866,16 @@ def filechunkiter(f, size=65536, limit=None):
yield s
def makedate():
- ct = time.time()
- if ct < 0:
+ lt = time.localtime()
+ if lt[8] == 1 and time.daylight:
+ tz = time.altzone
+ else:
+ tz = time.timezone
+ t = time.mktime(lt)
+ if t < 0:
hint = _("check your clock")
- raise Abort(_("negative timestamp: %d") % ct, hint=hint)
- delta = (datetime.datetime.utcfromtimestamp(ct) -
- datetime.datetime.fromtimestamp(ct))
- tz = delta.days * 86400 + delta.seconds
- return ct, tz
+ raise Abort(_("negative timestamp: %d") % t, hint=hint)
+ return t, tz
def datestr(date=None, format='%a %b %d %H:%M:%S %Y %1%2'):
"""represent a (unixtime, offset) tuple as a localized time.
@@ -1114,7 +1037,7 @@ def matchdate(date):
try:
d["d"] = days
return parsedate(date, extendeddateformats, d)[0]
- except Abort:
+ except:
pass
d["d"] = "28"
return parsedate(date, extendeddateformats, d)[0]
@@ -1167,16 +1090,6 @@ def shortuser(user):
user = user[:f]
return user
-def emailuser(user):
- """Return the user portion of an email address."""
- f = user.find('@')
- if f >= 0:
- user = user[:f]
- f = user.find('<')
- if f >= 0:
- user = user[f + 1:]
- return user
-
def email(author):
'''get email of author.'''
r = author.find('>')
@@ -1202,26 +1115,26 @@ def ellipsis(text, maxlength=400):
except (UnicodeDecodeError, UnicodeEncodeError):
return _ellipsis(text, maxlength)[0]
-_byteunits = (
- (100, 1 << 30, _('%.0f GB')),
- (10, 1 << 30, _('%.1f GB')),
- (1, 1 << 30, _('%.2f GB')),
- (100, 1 << 20, _('%.0f MB')),
- (10, 1 << 20, _('%.1f MB')),
- (1, 1 << 20, _('%.2f MB')),
- (100, 1 << 10, _('%.0f KB')),
- (10, 1 << 10, _('%.1f KB')),
- (1, 1 << 10, _('%.2f KB')),
- (1, 1, _('%.0f bytes')),
- )
-
def bytecount(nbytes):
'''return byte count formatted as readable string, with units'''
- for multiplier, divisor, format in _byteunits:
+ units = (
+ (100, 1 << 30, _('%.0f GB')),
+ (10, 1 << 30, _('%.1f GB')),
+ (1, 1 << 30, _('%.2f GB')),
+ (100, 1 << 20, _('%.0f MB')),
+ (10, 1 << 20, _('%.1f MB')),
+ (1, 1 << 20, _('%.2f MB')),
+ (100, 1 << 10, _('%.0f KB')),
+ (10, 1 << 10, _('%.1f KB')),
+ (1, 1 << 10, _('%.2f KB')),
+ (1, 1, _('%.0f bytes')),
+ )
+
+ for multiplier, divisor, format in units:
if nbytes >= divisor * multiplier:
return format % (nbytes / float(divisor))
- return _byteunits[-1][2] % nbytes
+ return units[-1][2] % nbytes
def uirepr(s):
# Avoid double backslash in Windows path repr()
@@ -1390,9 +1303,8 @@ def rundetached(args, condfn):
def handler(signum, frame):
terminated.add(os.wait())
prevhandler = None
- SIGCHLD = getattr(signal, 'SIGCHLD', None)
- if SIGCHLD is not None:
- prevhandler = signal.signal(SIGCHLD, handler)
+ if hasattr(signal, 'SIGCHLD'):
+ prevhandler = signal.signal(signal.SIGCHLD, handler)
try:
pid = spawndetached(args)
while not condfn():
@@ -1550,7 +1462,7 @@ class url(object):
"""
_safechars = "!~*'()+"
- _safepchars = "/!~*'()+:"
+ _safepchars = "/!~*'()+"
_matchscheme = re.compile(r'^[a-zA-Z0-9+.\-]+:').match
def __init__(self, path, parsequery=True, parsefragment=True):
@@ -1662,8 +1574,8 @@ class url(object):
Examples:
- >>> str(url('http://user:pw@host:80/c:/bob?fo:oo#ba:ar'))
- 'http://user:pw@host:80/c:/bob?fo:oo#ba:ar'
+ >>> str(url('http://user:pw@host:80/?foo#bar'))
+ 'http://user:pw@host:80/?foo#bar'
>>> str(url('http://user:pw@host:80/?foo=bar&baz=42'))
'http://user:pw@host:80/?foo=bar&baz=42'
>>> str(url('http://user:pw@host:80/?foo=bar%3dbaz'))
@@ -1684,8 +1596,6 @@ class url(object):
'path'
>>> str(url('file:///tmp/foo/bar'))
'file:///tmp/foo/bar'
- >>> str(url('file:///c:/tmp/foo/bar'))
- 'file:///c:/tmp/foo/bar'
>>> print url(r'bundle:foo\bar')
bundle:foo\bar
"""
@@ -1700,11 +1610,8 @@ class url(object):
s = self.scheme + ':'
if self.user or self.passwd or self.host:
s += '//'
- elif self.scheme and (not self.path or self.path.startswith('/')
- or hasdriveletter(self.path)):
+ elif self.scheme and (not self.path or self.path.startswith('/')):
s += '//'
- if hasdriveletter(self.path):
- s += '/'
if self.user:
s += urllib.quote(self.user, safe=self._safechars)
if self.passwd:
@@ -1741,10 +1648,8 @@ class url(object):
self.user, self.passwd = user, passwd
if not self.user:
return (s, None)
- # authinfo[1] is passed to urllib2 password manager, and its
- # URIs must not contain credentials. The host is passed in the
- # URIs list because Python < 2.4.3 uses only that to search for
- # a password.
+ # authinfo[1] is passed to urllib2 password manager, and its URIs
+ # must not contain credentials.
return (s, (None, (s, self.host),
self.user, self.passwd or ''))
@@ -1766,8 +1671,7 @@ class url(object):
# letters to paths with drive letters.
if hasdriveletter(self._hostport):
path = self._hostport + '/' + self.path
- elif (self.host is not None and self.path
- and not hasdriveletter(path)):
+ elif self.host is not None and self.path:
path = '/' + path
return path
return self._origpath
@@ -1776,7 +1680,7 @@ def hasscheme(path):
return bool(url(path).scheme)
def hasdriveletter(path):
- return path and path[1:2] == ':' and path[0:1].isalpha()
+ return path[1:2] == ':' and path[0:1].isalpha()
def urllocalpath(path):
return url(path, parsequery=False, parsefragment=False).localpath()
diff --git a/mercurial/verify.py b/mercurial/verify.py
index eb31faf..3e53862 100644
--- a/mercurial/verify.py
+++ b/mercurial/verify.py
@@ -87,7 +87,7 @@ def _verify(repo):
# attempt to filter down to real linkrevs
linkrevs = [l for l in linkrevs
if lrugetctx(l)[f].filenode() == node]
- except Exception:
+ except:
pass
warn(_(" (expected %s)") % " ".join(map(str, linkrevs)))
lr = None # can't be trusted
@@ -189,7 +189,7 @@ def _verify(repo):
try:
fl = repo.file(f)
lr = min([fl.linkrev(fl.rev(n)) for n in filenodes[f]])
- except Exception:
+ except:
lr = None
err(lr, _("in manifest but not in changeset"), f)
diff --git a/mercurial/win32.py b/mercurial/win32.py
index 6b3650f..e886caf 100644
--- a/mercurial/win32.py
+++ b/mercurial/win32.py
@@ -5,7 +5,8 @@
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
-import ctypes, errno, os, subprocess, random
+import encoding
+import ctypes, errno, os, struct, subprocess, random
_kernel32 = ctypes.windll.kernel32
_advapi32 = ctypes.windll.advapi32
@@ -59,8 +60,6 @@ _FILE_SHARE_DELETE = 0x00000004
_OPEN_EXISTING = 3
-_FILE_FLAG_BACKUP_SEMANTICS = 0x02000000
-
# SetFileAttributes
_FILE_ATTRIBUTE_NORMAL = 0x80
_FILE_ATTRIBUTE_NOT_CONTENT_INDEXED = 0x2000
@@ -71,6 +70,13 @@ _PROCESS_QUERY_INFORMATION = 0x0400
# GetExitCodeProcess
_STILL_ACTIVE = 259
+# registry
+_HKEY_CURRENT_USER = 0x80000001L
+_HKEY_LOCAL_MACHINE = 0x80000002L
+_KEY_READ = 0x20019
+_REG_SZ = 1
+_REG_DWORD = 4
+
class _STARTUPINFO(ctypes.Structure):
_fields_ = [('cb', _DWORD),
('lpReserved', _LPSTR),
@@ -97,7 +103,8 @@ class _PROCESS_INFORMATION(ctypes.Structure):
('dwProcessId', _DWORD),
('dwThreadId', _DWORD)]
-_CREATE_NO_WINDOW = 0x08000000
+_DETACHED_PROCESS = 0x00000008
+_STARTF_USESHOWWINDOW = 0x00000001
_SW_HIDE = 0
class _COORD(ctypes.Structure):
@@ -173,6 +180,17 @@ _kernel32.GetStdHandle.restype = _HANDLE
_kernel32.GetConsoleScreenBufferInfo.argtypes = [_HANDLE, ctypes.c_void_p]
_kernel32.GetConsoleScreenBufferInfo.restype = _BOOL
+_advapi32.RegOpenKeyExA.argtypes = [_HANDLE, _LPCSTR, _DWORD, _DWORD,
+ ctypes.c_void_p]
+_advapi32.RegOpenKeyExA.restype = _LONG
+
+_advapi32.RegQueryValueExA.argtypes = [_HANDLE, _LPCSTR, ctypes.c_void_p,
+ ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p]
+_advapi32.RegQueryValueExA.restype = _LONG
+
+_advapi32.RegCloseKey.argtypes = [_HANDLE]
+_advapi32.RegCloseKey.restype = _LONG
+
_advapi32.GetUserNameA.argtypes = [ctypes.c_void_p, ctypes.c_void_p]
_advapi32.GetUserNameA.restype = _BOOL
@@ -193,7 +211,7 @@ def _raiseoserror(name):
def _getfileinfo(name):
fh = _kernel32.CreateFileA(name, 0,
_FILE_SHARE_READ | _FILE_SHARE_WRITE | _FILE_SHARE_DELETE,
- None, _OPEN_EXISTING, _FILE_FLAG_BACKUP_SEMANTICS, None)
+ None, _OPEN_EXISTING, 0, None)
if fh == _INVALID_HANDLE_VALUE:
_raiseoserror(name)
try:
@@ -215,18 +233,20 @@ def nlinks(name):
'''return number of hardlinks for the given file'''
return _getfileinfo(name).nNumberOfLinks
-def samefile(path1, path2):
- '''Returns whether path1 and path2 refer to the same file or directory.'''
- res1 = _getfileinfo(path1)
- res2 = _getfileinfo(path2)
+def samefile(fpath1, fpath2):
+ '''Returns whether fpath1 and fpath2 refer to the same file. This is only
+ guaranteed to work for files, not directories.'''
+ res1 = _getfileinfo(fpath1)
+ res2 = _getfileinfo(fpath2)
return (res1.dwVolumeSerialNumber == res2.dwVolumeSerialNumber
and res1.nFileIndexHigh == res2.nFileIndexHigh
and res1.nFileIndexLow == res2.nFileIndexLow)
-def samedevice(path1, path2):
- '''Returns whether path1 and path2 are on the same device.'''
- res1 = _getfileinfo(path1)
- res2 = _getfileinfo(path2)
+def samedevice(fpath1, fpath2):
+ '''Returns whether fpath1 and fpath2 are on the same device. This is only
+ guaranteed to work for files, not directories.'''
+ res1 = _getfileinfo(fpath1)
+ res2 = _getfileinfo(fpath2)
return res1.dwVolumeSerialNumber == res2.dwVolumeSerialNumber
def testpid(pid):
@@ -242,13 +262,50 @@ def testpid(pid):
_kernel32.CloseHandle(h)
return _kernel32.GetLastError() != _ERROR_INVALID_PARAMETER
+def lookupreg(key, valname=None, scope=None):
+ ''' Look up a key/value name in the Windows registry.
+
+ valname: value name. If unspecified, the default value for the key
+ is used.
+ scope: optionally specify scope for registry lookup, this can be
+ a sequence of scopes to look up in order. Default (CURRENT_USER,
+ LOCAL_MACHINE).
+ '''
+ byref = ctypes.byref
+ if scope is None:
+ scope = (_HKEY_CURRENT_USER, _HKEY_LOCAL_MACHINE)
+ elif not isinstance(scope, (list, tuple)):
+ scope = (scope,)
+ for s in scope:
+ kh = _HANDLE()
+ res = _advapi32.RegOpenKeyExA(s, key, 0, _KEY_READ, ctypes.byref(kh))
+ if res != _ERROR_SUCCESS:
+ continue
+ try:
+ size = _DWORD(600)
+ type = _DWORD()
+ buf = ctypes.create_string_buffer(size.value + 1)
+ res = _advapi32.RegQueryValueExA(kh.value, valname, None,
+ byref(type), buf, byref(size))
+ if res != _ERROR_SUCCESS:
+ continue
+ if type.value == _REG_SZ:
+ # never let a Unicode string escape into the wild
+ return encoding.tolocal(buf.value.encode('UTF-8'))
+ elif type.value == _REG_DWORD:
+ fmt = '<L'
+ s = ctypes.string_at(byref(buf), struct.calcsize(fmt))
+ return struct.unpack(fmt, s)[0]
+ finally:
+ _advapi32.RegCloseKey(kh.value)
+
def executablepath():
'''return full path of hg.exe'''
size = 600
buf = ctypes.create_string_buffer(size + 1)
len = _kernel32.GetModuleFileNameA(None, ctypes.byref(buf), size)
if len == 0:
- raise ctypes.WinError
+ raise ctypes.WinError()
elif len == size:
raise ctypes.WinError(_ERROR_INSUFFICIENT_BUFFER)
return buf.value
@@ -258,7 +315,7 @@ def getuser():
size = _DWORD(300)
buf = ctypes.create_string_buffer(size.value + 1)
if not _advapi32.GetUserNameA(ctypes.byref(buf), ctypes.byref(size)):
- raise ctypes.WinError
+ raise ctypes.WinError()
return buf.value
_signalhandler = []
@@ -276,7 +333,7 @@ def setsignalhandler():
h = _SIGNAL_HANDLER(handler)
_signalhandler.append(h) # needed to prevent garbage collection
if not _kernel32.SetConsoleCtrlHandler(h, True):
- raise ctypes.WinError
+ raise ctypes.WinError()
def hidewindow():
@@ -317,6 +374,8 @@ def spawndetached(args):
# which makes really detached processes impossible.
si = _STARTUPINFO()
si.cb = ctypes.sizeof(_STARTUPINFO)
+ si.dwFlags = _STARTF_USESHOWWINDOW
+ si.wShowWindow = _SW_HIDE
pi = _PROCESS_INFORMATION()
@@ -334,10 +393,10 @@ def spawndetached(args):
args = comspec + " /c " + args
res = _kernel32.CreateProcessA(
- None, args, None, None, False, _CREATE_NO_WINDOW,
+ None, args, None, None, False, _DETACHED_PROCESS,
env, os.getcwd(), ctypes.byref(si), ctypes.byref(pi))
if not res:
- raise ctypes.WinError
+ raise ctypes.WinError()
return pi.dwProcessId
diff --git a/mercurial/windows.py b/mercurial/windows.py
index aade404..ba3e6d8 100644
--- a/mercurial/windows.py
+++ b/mercurial/windows.py
@@ -6,26 +6,11 @@
# GNU General Public License version 2 or any later version.
from i18n import _
-import osutil, encoding
-import errno, msvcrt, os, re, sys, _winreg
-
-import win32
-executablepath = win32.executablepath
-getuser = win32.getuser
-hidewindow = win32.hidewindow
-makedir = win32.makedir
-nlinks = win32.nlinks
-oslink = win32.oslink
-samedevice = win32.samedevice
-samefile = win32.samefile
-setsignalhandler = win32.setsignalhandler
-spawndetached = win32.spawndetached
-termwidth = win32.termwidth
-testpid = win32.testpid
-unlink = win32.unlink
+import osutil
+import errno, msvcrt, os, re, sys
nulldev = 'NUL:'
-umask = 0022
+umask = 002
# wrap osutil.posixfile to provide friendlier exceptions
def posixfile(name, mode='r', buffering=-1):
@@ -105,9 +90,6 @@ def sshargs(sshcmd, host, user, port):
def setflags(f, l, x):
pass
-def copymode(src, dst, mode=None):
- pass
-
def checkexec(path):
return False
@@ -117,12 +99,11 @@ def checklink(path):
def setbinary(fd):
# When run without console, pipes may expose invalid
# fileno(), usually set to -1.
- fno = getattr(fd, 'fileno', None)
- if fno is not None and fno() >= 0:
- msvcrt.setmode(fno(), os.O_BINARY)
+ if hasattr(fd, 'fileno') and fd.fileno() >= 0:
+ msvcrt.setmode(fd.fileno(), os.O_BINARY)
def pconvert(path):
- return path.replace(os.sep, '/')
+ return '/'.join(path.split(os.sep))
def localpath(path):
return path.replace('/', '\\')
@@ -130,9 +111,6 @@ def localpath(path):
def normpath(path):
return pconvert(os.path.normpath(path))
-def normcase(path):
- return encoding.upper(path)
-
def realpath(path):
'''
Returns the true, canonical file system path equivalent to the given
@@ -140,7 +118,7 @@ def realpath(path):
'''
# TODO: There may be a more clever way to do this that also handles other,
# less common file systems.
- return os.path.normpath(normcase(os.path.realpath(path)))
+ return os.path.normpath(os.path.normcase(os.path.realpath(path)))
def samestat(s1, s2):
return False
@@ -216,16 +194,17 @@ def findexe(command):
def statfiles(files):
'''Stat each file in files and yield stat or None if file does not exist.
Cluster and cache stat per directory to minimize number of OS stat calls.'''
+ ncase = os.path.normcase
dircache = {} # dirname -> filename -> status | None if file does not exist
for nf in files:
- nf = normcase(nf)
+ nf = ncase(nf)
dir, base = os.path.split(nf)
if not dir:
dir = '.'
cache = dircache.get(dir, None)
if cache is None:
try:
- dmap = dict([(normcase(n), s)
+ dmap = dict([(ncase(n), s)
for n, k, s in osutil.listdir(dir, True)])
except OSError, err:
# handle directory not found in Python version prior to 2.5
@@ -291,39 +270,17 @@ def rename(src, dst):
def gethgcmd():
return [sys.executable] + sys.argv[:1]
+def termwidth():
+ # cmd.exe does not handle CR like a unix console, the CR is
+ # counted in the line length. On 80 columns consoles, if 80
+ # characters are written, the following CR won't apply on the
+ # current line but on the new one. Keep room for it.
+ return 79
+
def groupmembers(name):
# Don't support groups on Windows for now
- raise KeyError
+ raise KeyError()
-def isexec(f):
- return False
-
-class cachestat(object):
- def __init__(self, path):
- pass
-
- def cacheable(self):
- return False
-
-def lookupreg(key, valname=None, scope=None):
- ''' Look up a key/value name in the Windows registry.
-
- valname: value name. If unspecified, the default value for the key
- is used.
- scope: optionally specify scope for registry lookup, this can be
- a sequence of scopes to look up in order. Default (CURRENT_USER,
- LOCAL_MACHINE).
- '''
- if scope is None:
- scope = (_winreg.HKEY_CURRENT_USER, _winreg.HKEY_LOCAL_MACHINE)
- elif not isinstance(scope, (list, tuple)):
- scope = (scope,)
- for s in scope:
- try:
- val = _winreg.QueryValueEx(_winreg.OpenKey(s, key), valname)[0]
- # never let a Unicode string escape into the wild
- return encoding.tolocal(val.encode('UTF-8'))
- except EnvironmentError:
- pass
+from win32 import *
expandglobs = True
diff --git a/mercurial/wireproto.py b/mercurial/wireproto.py
index 2b44fd6..51268fa 100644
--- a/mercurial/wireproto.py
+++ b/mercurial/wireproto.py
@@ -9,24 +9,24 @@ import urllib, tempfile, os, sys
from i18n import _
from node import bin, hex
import changegroup as changegroupmod
-import peer, error, encoding, util, store
-import discovery, phases
+import repo, error, encoding, util, store
+import pushkey as pushkeymod
# abstract batching support
class future(object):
'''placeholder for a value to be set later'''
def set(self, value):
- if util.safehasattr(self, 'value'):
+ if hasattr(self, 'value'):
raise error.RepoError("future is already set")
self.value = value
class batcher(object):
'''base class for batches of commands submittable in a single request
- All methods invoked on instances of this class are simply queued and
- return a a future for the result. Once you call submit(), all the queued
- calls are performed and the results set in their respective futures.
+ All methods invoked on instances of this class are simply queued and return a
+ a future for the result. Once you call submit(), all the queued calls are
+ performed and the results set in their respective futures.
'''
def __init__(self):
self.calls = []
@@ -51,17 +51,15 @@ class localbatch(batcher):
class remotebatch(batcher):
'''batches the queued calls; uses as few roundtrips as possible'''
def __init__(self, remote):
- '''remote must support _submitbatch(encbatch) and
- _submitone(op, encargs)'''
+ '''remote must support _submitbatch(encbatch) and _submitone(op, encargs)'''
batcher.__init__(self)
self.remote = remote
def submit(self):
req, rsp = [], []
for name, args, opts, resref in self.calls:
mtd = getattr(self.remote, name)
- batchablefn = getattr(mtd, 'batchable', None)
- if batchablefn is not None:
- batchable = batchablefn(mtd.im_self, *args, **opts)
+ if hasattr(mtd, 'batchable'):
+ batchable = getattr(mtd, 'batchable')(mtd.im_self, *args, **opts)
encargsorres, encresref = batchable.next()
if encresref:
req.append((name, encargsorres,))
@@ -98,14 +96,14 @@ def batchable(f):
encresref = future()
# Return encoded arguments and future:
yield encargs, encresref
- # Assuming the future to be filled with the result from the batched
- # request now. Decode it:
+ # Assuming the future to be filled with the result from the batched request
+ # now. Decode it:
yield decode(encresref.value)
- The decorator returns a function which wraps this coroutine as a plain
- method, but adds the original method as an attribute called "batchable",
- which is used by remotebatch to split the call into separate encoding and
- decoding phases.
+ The decorator returns a function which wraps this coroutine as a plain method,
+ but adds the original method as an attribute called "batchable", which is
+ used by remotebatch to split the call into separate encoding and decoding
+ phases.
'''
def plain(*args, **opts):
batchable = f(*args, **opts)
@@ -149,7 +147,7 @@ def unescapearg(escaped):
def todict(**args):
return args
-class wirepeer(peer.peerrepository):
+class wirerepository(repo.repository):
def batch(self):
return remotebatch(self)
@@ -236,20 +234,16 @@ class wirepeer(peer.peerrepository):
if not self.capable('pushkey'):
yield False, None
f = future()
- self.ui.debug('preparing pushkey for "%s:%s"\n' % (namespace, key))
yield todict(namespace=encoding.fromlocal(namespace),
key=encoding.fromlocal(key),
old=encoding.fromlocal(old),
new=encoding.fromlocal(new)), f
d = f.value
- d, output = d.split('\n', 1)
try:
d = bool(int(d))
except ValueError:
raise error.ResponseError(
_('push failed (unexpected response):'), d)
- for l in output.splitlines(True):
- self.ui.status(_('remote: '), l)
yield d
@batchable
@@ -257,7 +251,6 @@ class wirepeer(peer.peerrepository):
if not self.capable('pushkey'):
yield {}, None
f = future()
- self.ui.debug('preparing listkeys for "%s"\n' % namespace)
yield todict(namespace=encoding.fromlocal(namespace)), f
d = f.value
r = {}
@@ -341,10 +334,6 @@ class pusherr(object):
def __init__(self, res):
self.res = res
-class ooberror(object):
- def __init__(self, message):
- self.message = message
-
def dispatch(repo, proto, command):
func, spec = commands[command]
args = proto.getargs(spec)
@@ -386,8 +375,6 @@ def batch(repo, proto, cmds, others):
result = func(repo, proto, *[data[k] for k in keys])
else:
result = func(repo, proto)
- if isinstance(result, ooberror):
- return result
res.append(escapearg(result))
return ';'.join(res)
@@ -399,7 +386,7 @@ def between(repo, proto, pairs):
return "".join(r)
def branchmap(repo, proto):
- branchmap = discovery.visiblebranchmap(repo)
+ branchmap = repo.branchmap()
heads = []
for branch, nodes in branchmap.iteritems():
branchname = urllib.quote(encoding.fromlocal(branch))
@@ -418,8 +405,6 @@ def capabilities(repo, proto):
caps = ('lookup changegroupsubset branchmap pushkey known getbundle '
'unbundlehash batch').split()
if _allowstream(repo.ui):
- if repo.ui.configbool('server', 'preferuncompressed', False):
- caps.append('stream-preferred')
requiredformats = repo.requirements & repo.supportedformats
# if our local revlogs are just revlogv1, add 'stream' cap
if not requiredformats - set(('revlogv1',)):
@@ -455,7 +440,7 @@ def getbundle(repo, proto, others):
return streamres(proto.groupchunks(cg))
def heads(repo, proto):
- h = discovery.visibleheads(repo)
+ h = repo.heads()
return encodelist(h) + "\n"
def hello(repo, proto):
@@ -469,18 +454,14 @@ def hello(repo, proto):
return "capabilities: %s\n" % (capabilities(repo, proto))
def listkeys(repo, proto, namespace):
- d = repo.listkeys(encoding.tolocal(namespace)).items()
+ d = pushkeymod.list(repo, encoding.tolocal(namespace)).items()
t = '\n'.join(['%s\t%s' % (encoding.fromlocal(k), encoding.fromlocal(v))
for k, v in d])
return t
def lookup(repo, proto, key):
try:
- k = encoding.tolocal(key)
- c = repo[k]
- if c.phase() == phases.secret:
- raise error.RepoLookupError(_("unknown revision '%s'") % k)
- r = c.hex()
+ r = hex(repo.lookup(encoding.tolocal(key)))
success = 1
except Exception, inst:
r = str(inst)
@@ -503,8 +484,9 @@ def pushkey(repo, proto, namespace, key, old, new):
else:
new = encoding.tolocal(new) # normal path
- r = repo.pushkey(encoding.tolocal(namespace), encoding.tolocal(key),
- encoding.tolocal(old), new)
+ r = pushkeymod.push(repo,
+ encoding.tolocal(namespace), encoding.tolocal(key),
+ encoding.tolocal(old), new)
return '%s\n' % int(r)
def _allowstream(ui):
@@ -558,7 +540,7 @@ def unbundle(repo, proto, heads):
their_heads = decodelist(heads)
def check_heads():
- heads = discovery.visibleheads(repo)
+ heads = repo.heads()
heads_hash = util.sha1(''.join(sorted(heads))).digest()
return (their_heads == ['force'] or their_heads == heads or
their_heads == ['hashed', heads_hash])
@@ -587,7 +569,8 @@ def unbundle(repo, proto, heads):
gen = changegroupmod.readbundle(fp, None)
try:
- r = repo.addchangegroup(gen, 'serve', proto._client())
+ r = repo.addchangegroup(gen, 'serve', proto._client(),
+ lock=lock)
except util.Abort, inst:
sys.stderr.write("abort: %s\n" % inst)
finally: