diff options
Diffstat (limited to 'eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg')
436 files changed, 59909 insertions, 0 deletions
diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/EGG-INFO/PKG-INFO b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/EGG-INFO/PKG-INFO new file mode 100644 index 0000000..18a5c7f --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/EGG-INFO/PKG-INFO @@ -0,0 +1,10 @@ +Metadata-Version: 1.0 +Name: mercurial +Version: 1.7.3 +Summary: Scalable distributed SCM +Home-page: http://mercurial.selenic.com/ +Author: Matt Mackall +Author-email: mpm@selenic.com +License: GNU GPLv2+ +Description: UNKNOWN +Platform: UNKNOWN diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/EGG-INFO/SOURCES.txt b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/EGG-INFO/SOURCES.txt new file mode 100644 index 0000000..b16bcb8 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/EGG-INFO/SOURCES.txt @@ -0,0 +1,146 @@ +README +hg +setup.py +hgext/__init__.py +hgext/acl.py +hgext/bookmarks.py +hgext/bugzilla.py +hgext/children.py +hgext/churn.py +hgext/color.py +hgext/eol.py +hgext/extdiff.py +hgext/fetch.py +hgext/gpg.py +hgext/graphlog.py +hgext/hgcia.py +hgext/hgk.py +hgext/interhg.py +hgext/keyword.py +hgext/mq.py +hgext/notify.py +hgext/pager.py +hgext/parentrevspec.py +hgext/patchbomb.py +hgext/progress.py +hgext/purge.py +hgext/rebase.py +hgext/record.py +hgext/relink.py +hgext/schemes.py +hgext/share.py +hgext/transplant.py +hgext/win32mbcs.py +hgext/win32text.py +hgext/convert/__init__.py +hgext/convert/bzr.py +hgext/convert/common.py +hgext/convert/convcmd.py +hgext/convert/cvs.py +hgext/convert/cvsps.py +hgext/convert/darcs.py +hgext/convert/filemap.py +hgext/convert/git.py +hgext/convert/gnuarch.py +hgext/convert/hg.py +hgext/convert/monotone.py +hgext/convert/p4.py +hgext/convert/subversion.py +hgext/convert/transport.py +hgext/highlight/__init__.py +hgext/highlight/highlight.py +hgext/zeroconf/Zeroconf.py +hgext/zeroconf/__init__.py +mercurial/__init__.py +mercurial/__version__.py +mercurial/ancestor.py +mercurial/archival.py +mercurial/base85.c +mercurial/bdiff.c +mercurial/bundlerepo.py +mercurial/byterange.py +mercurial/changegroup.py +mercurial/changelog.py +mercurial/cmdutil.py +mercurial/commands.py +mercurial/config.py +mercurial/context.py +mercurial/copies.py +mercurial/dagparser.py +mercurial/demandimport.py +mercurial/diffhelpers.c +mercurial/dirstate.py +mercurial/discovery.py +mercurial/dispatch.py +mercurial/encoding.py +mercurial/error.py +mercurial/extensions.py +mercurial/fancyopts.py +mercurial/filelog.py +mercurial/filemerge.py +mercurial/graphmod.py +mercurial/hbisect.py +mercurial/help.py +mercurial/hg.py +mercurial/hook.py +mercurial/httprepo.py +mercurial/i18n.py +mercurial/ignore.py +mercurial/keepalive.py +mercurial/localrepo.py +mercurial/lock.py +mercurial/lsprof.py +mercurial/lsprofcalltree.py +mercurial/mail.py +mercurial/manifest.py +mercurial/match.py +mercurial/mdiff.py +mercurial/merge.py +mercurial/minirst.py +mercurial/mpatch.c +mercurial/node.py +mercurial/osutil.c +mercurial/parser.py +mercurial/parsers.c +mercurial/patch.py +mercurial/posix.py +mercurial/pushkey.py +mercurial/py3kcompat.py +mercurial/repair.py +mercurial/repo.py +mercurial/revlog.py +mercurial/revset.py +mercurial/similar.py +mercurial/simplemerge.py +mercurial/sshrepo.py +mercurial/sshserver.py +mercurial/statichttprepo.py +mercurial/store.py +mercurial/strutil.py +mercurial/subrepo.py +mercurial/tags.py +mercurial/templatefilters.py +mercurial/templatekw.py +mercurial/templater.py +mercurial/transaction.py +mercurial/ui.py +mercurial/url.py +mercurial/util.py +mercurial/verify.py +mercurial/win32.py +mercurial/windows.py +mercurial/wireproto.py +mercurial.egg-info/PKG-INFO +mercurial.egg-info/SOURCES.txt +mercurial.egg-info/dependency_links.txt +mercurial.egg-info/top_level.txt +mercurial/hgweb/__init__.py +mercurial/hgweb/common.py +mercurial/hgweb/hgweb_mod.py +mercurial/hgweb/hgwebdir_mod.py +mercurial/hgweb/protocol.py +mercurial/hgweb/request.py +mercurial/hgweb/server.py +mercurial/hgweb/webcommands.py +mercurial/hgweb/webutil.py +mercurial/hgweb/wsgicgi.py
\ No newline at end of file diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/EGG-INFO/dependency_links.txt b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/EGG-INFO/dependency_links.txt new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/EGG-INFO/dependency_links.txt @@ -0,0 +1 @@ + diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/EGG-INFO/native_libs.txt b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/EGG-INFO/native_libs.txt new file mode 100644 index 0000000..0621792 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/EGG-INFO/native_libs.txt @@ -0,0 +1,6 @@ +mercurial/bdiff.so +mercurial/parsers.so +mercurial/mpatch.so +mercurial/base85.so +mercurial/diffhelpers.so +mercurial/osutil.so diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/EGG-INFO/not-zip-safe b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/EGG-INFO/not-zip-safe new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/EGG-INFO/not-zip-safe @@ -0,0 +1 @@ + diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/EGG-INFO/scripts/hg b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/EGG-INFO/scripts/hg new file mode 100644 index 0000000..c49e107 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/EGG-INFO/scripts/hg @@ -0,0 +1,38 @@ +#!/usr/bin/python +# +# mercurial - scalable distributed SCM +# +# Copyright 2005-2007 Matt Mackall <mpm@selenic.com> +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +import os +import sys + +libdir = '../../' + +if libdir != '@' 'LIBDIR' '@': + if not os.path.isabs(libdir): + libdir = os.path.join(os.path.dirname(os.path.realpath(__file__)), + libdir) + libdir = os.path.abspath(libdir) + sys.path.insert(0, libdir) + +# enable importing on demand to reduce startup time +try: + from mercurial import demandimport; demandimport.enable() +except ImportError: + import sys + sys.stderr.write("abort: couldn't find mercurial libraries in [%s]\n" % + ' '.join(sys.path)) + sys.stderr.write("(check your install and PYTHONPATH)\n") + sys.exit(-1) + +import mercurial.util +import mercurial.dispatch + +for fp in (sys.stdin, sys.stdout, sys.stderr): + mercurial.util.set_binary(fp) + +mercurial.dispatch.run() diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/EGG-INFO/top_level.txt b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/EGG-INFO/top_level.txt new file mode 100644 index 0000000..99bc082 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/EGG-INFO/top_level.txt @@ -0,0 +1,2 @@ +mercurial +hgext diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/__init__.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/__init__.py new file mode 100644 index 0000000..fdffa2a --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/__init__.py @@ -0,0 +1 @@ +# placeholder diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/__init__.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/__init__.pyo Binary files differnew file mode 100644 index 0000000..96afc1a --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/__init__.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/acl.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/acl.py new file mode 100644 index 0000000..a50fa72 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/acl.py @@ -0,0 +1,250 @@ +# acl.py - changeset access control for mercurial +# +# Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com> +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +'''hooks for controlling repository access + +This hook makes it possible to allow or deny write access to given +branches and paths of a repository when receiving incoming changesets +via pretxnchangegroup and pretxncommit. + +The authorization is matched based on the local user name on the +system where the hook runs, and not the committer of the original +changeset (since the latter is merely informative). + +The acl hook is best used along with a restricted shell like hgsh, +preventing authenticating users from doing anything other than pushing +or pulling. The hook is not safe to use if users have interactive +shell access, as they can then disable the hook. Nor is it safe if +remote users share an account, because then there is no way to +distinguish them. + +The order in which access checks are performed is: + +1) Deny list for branches (section ``acl.deny.branches``) +2) Allow list for branches (section ``acl.allow.branches``) +3) Deny list for paths (section ``acl.deny``) +4) Allow list for paths (section ``acl.allow``) + +The allow and deny sections take key-value pairs. + +Branch-based Access Control +........................... + +Use the ``acl.deny.branches`` and ``acl.allow.branches`` sections to +have branch-based access control. Keys in these sections can be +either: + +- a branch name, or +- an asterisk, to match any branch; + +The corresponding values can be either: + +- a comma-separated list containing users and groups, or +- an asterisk, to match anyone; + +Path-based Access Control +......................... + +Use the ``acl.deny`` and ``acl.allow`` sections to have path-based +access control. Keys in these sections accept a subtree pattern (with +a glob syntax by default). The corresponding values follow the same +syntax as the other sections above. + +Groups +...... + +Group names must be prefixed with an ``@`` symbol. Specifying a group +name has the same effect as specifying all the users in that group. + +You can define group members in the ``acl.groups`` section. +If a group name is not defined there, and Mercurial is running under +a Unix-like system, the list of users will be taken from the OS. +Otherwise, an exception will be raised. + +Example Configuration +..................... + +:: + + [hooks] + + # Use this if you want to check access restrictions at commit time + pretxncommit.acl = python:hgext.acl.hook + + # Use this if you want to check access restrictions for pull, push, + # bundle and serve. + pretxnchangegroup.acl = python:hgext.acl.hook + + [acl] + # Allow or deny access for incoming changes only if their source is + # listed here, let them pass otherwise. Source is "serve" for all + # remote access (http or ssh), "push", "pull" or "bundle" when the + # related commands are run locally. + # Default: serve + sources = serve + + [acl.deny.branches] + + # Everyone is denied to the frozen branch: + frozen-branch = * + + # A bad user is denied on all branches: + * = bad-user + + [acl.allow.branches] + + # A few users are allowed on branch-a: + branch-a = user-1, user-2, user-3 + + # Only one user is allowed on branch-b: + branch-b = user-1 + + # The super user is allowed on any branch: + * = super-user + + # Everyone is allowed on branch-for-tests: + branch-for-tests = * + + [acl.deny] + # This list is checked first. If a match is found, acl.allow is not + # checked. All users are granted access if acl.deny is not present. + # Format for both lists: glob pattern = user, ..., @group, ... + + # To match everyone, use an asterisk for the user: + # my/glob/pattern = * + + # user6 will not have write access to any file: + ** = user6 + + # Group "hg-denied" will not have write access to any file: + ** = @hg-denied + + # Nobody will be able to change "DONT-TOUCH-THIS.txt", despite + # everyone being able to change all other files. See below. + src/main/resources/DONT-TOUCH-THIS.txt = * + + [acl.allow] + # if acl.allow is not present, all users are allowed by default + # empty acl.allow = no users allowed + + # User "doc_writer" has write access to any file under the "docs" + # folder: + docs/** = doc_writer + + # User "jack" and group "designers" have write access to any file + # under the "images" folder: + images/** = jack, @designers + + # Everyone (except for "user6" - see acl.deny above) will have write + # access to any file under the "resources" folder (except for 1 + # file. See acl.deny): + src/main/resources/** = * + + .hgtags = release_engineer + +''' + +from mercurial.i18n import _ +from mercurial import util, match +import getpass, urllib + +def _getusers(ui, group): + + # First, try to use group definition from section [acl.groups] + hgrcusers = ui.configlist('acl.groups', group) + if hgrcusers: + return hgrcusers + + ui.debug('acl: "%s" not defined in [acl.groups]\n' % group) + # If no users found in group definition, get users from OS-level group + try: + return util.groupmembers(group) + except KeyError: + raise util.Abort(_("group '%s' is undefined") % group) + +def _usermatch(ui, user, usersorgroups): + + if usersorgroups == '*': + return True + + for ug in usersorgroups.replace(',', ' ').split(): + if user == ug or ug.find('@') == 0 and user in _getusers(ui, ug[1:]): + return True + + return False + +def buildmatch(ui, repo, user, key): + '''return tuple of (match function, list enabled).''' + if not ui.has_section(key): + ui.debug('acl: %s not enabled\n' % key) + return None + + pats = [pat for pat, users in ui.configitems(key) + if _usermatch(ui, user, users)] + ui.debug('acl: %s enabled, %d entries for user %s\n' % + (key, len(pats), user)) + + if not repo: + if pats: + return lambda b: '*' in pats or b in pats + return lambda b: False + + if pats: + return match.match(repo.root, '', pats) + return match.exact(repo.root, '', []) + + +def hook(ui, repo, hooktype, node=None, source=None, **kwargs): + if hooktype not in ['pretxnchangegroup', 'pretxncommit']: + raise util.Abort(_('config error - hook type "%s" cannot stop ' + 'incoming changesets nor commits') % hooktype) + if (hooktype == 'pretxnchangegroup' and + source not in ui.config('acl', 'sources', 'serve').split()): + ui.debug('acl: changes have source "%s" - skipping\n' % source) + return + + user = None + if source == 'serve' and 'url' in kwargs: + url = kwargs['url'].split(':') + if url[0] == 'remote' and url[1].startswith('http'): + user = urllib.unquote(url[3]) + + if user is None: + user = getpass.getuser() + + cfg = ui.config('acl', 'config') + if cfg: + ui.readconfig(cfg, sections = ['acl.groups', 'acl.allow.branches', + 'acl.deny.branches', 'acl.allow', 'acl.deny']) + + allowbranches = buildmatch(ui, None, user, 'acl.allow.branches') + denybranches = buildmatch(ui, None, user, 'acl.deny.branches') + allow = buildmatch(ui, repo, user, 'acl.allow') + deny = buildmatch(ui, repo, user, 'acl.deny') + + for rev in xrange(repo[node], len(repo)): + ctx = repo[rev] + branch = ctx.branch() + if denybranches and denybranches(branch): + raise util.Abort(_('acl: user "%s" denied on branch "%s"' + ' (changeset "%s")') + % (user, branch, ctx)) + if allowbranches and not allowbranches(branch): + raise util.Abort(_('acl: user "%s" not allowed on branch "%s"' + ' (changeset "%s")') + % (user, branch, ctx)) + ui.debug('acl: branch access granted: "%s" on branch "%s"\n' + % (ctx, branch)) + + for f in ctx.files(): + if deny and deny(f): + ui.debug('acl: user %s denied on %s\n' % (user, f)) + raise util.Abort(_('acl: access denied for changeset %s') % ctx) + if allow and not allow(f): + ui.debug('acl: user %s not allowed on %s\n' % (user, f)) + raise util.Abort(_('acl: access denied for changeset %s') % ctx) + ui.debug('acl: allowing changeset %s\n' % ctx) diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/acl.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/acl.pyo Binary files differnew file mode 100644 index 0000000..8001bf2 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/acl.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/bookmarks.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/bookmarks.py new file mode 100644 index 0000000..1ebbc7a --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/bookmarks.py @@ -0,0 +1,579 @@ +# Mercurial extension to provide the 'hg bookmark' command +# +# Copyright 2008 David Soria Parra <dsp@php.net> +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +'''track a line of development with movable markers + +Bookmarks are local movable markers to changesets. Every bookmark +points to a changeset identified by its hash. If you commit a +changeset that is based on a changeset that has a bookmark on it, the +bookmark shifts to the new changeset. + +It is possible to use bookmark names in every revision lookup (e.g. +:hg:`merge`, :hg:`update`). + +By default, when several bookmarks point to the same changeset, they +will all move forward together. It is possible to obtain a more +git-like experience by adding the following configuration option to +your configuration file:: + + [bookmarks] + track.current = True + +This will cause Mercurial to track the bookmark that you are currently +using, and only update it. This is similar to git's approach to +branching. +''' + +from mercurial.i18n import _ +from mercurial.node import nullid, nullrev, bin, hex, short +from mercurial import util, commands, repair, extensions, pushkey, hg, url +from mercurial import revset +import os + +def write(repo): + '''Write bookmarks + + Write the given bookmark => hash dictionary to the .hg/bookmarks file + in a format equal to those of localtags. + + We also store a backup of the previous state in undo.bookmarks that + can be copied back on rollback. + ''' + refs = repo._bookmarks + + try: + bms = repo.opener('bookmarks').read() + except IOError: + bms = None + if bms is not None: + repo.opener('undo.bookmarks', 'w').write(bms) + + if repo._bookmarkcurrent not in refs: + setcurrent(repo, None) + wlock = repo.wlock() + try: + file = repo.opener('bookmarks', 'w', atomictemp=True) + for refspec, node in refs.iteritems(): + file.write("%s %s\n" % (hex(node), refspec)) + file.rename() + + # touch 00changelog.i so hgweb reloads bookmarks (no lock needed) + try: + os.utime(repo.sjoin('00changelog.i'), None) + except OSError: + pass + + finally: + wlock.release() + +def setcurrent(repo, mark): + '''Set the name of the bookmark that we are currently on + + Set the name of the bookmark that we are on (hg update <bookmark>). + The name is recorded in .hg/bookmarks.current + ''' + current = repo._bookmarkcurrent + if current == mark: + return + + refs = repo._bookmarks + + # do not update if we do update to a rev equal to the current bookmark + if (mark and mark not in refs and + current and refs[current] == repo.changectx('.').node()): + return + if mark not in refs: + mark = '' + wlock = repo.wlock() + try: + file = repo.opener('bookmarks.current', 'w', atomictemp=True) + file.write(mark) + file.rename() + finally: + wlock.release() + repo._bookmarkcurrent = mark + +def bookmark(ui, repo, mark=None, rev=None, force=False, delete=False, rename=None): + '''track a line of development with movable markers + + Bookmarks are pointers to certain commits that move when + committing. Bookmarks are local. They can be renamed, copied and + deleted. It is possible to use bookmark names in :hg:`merge` and + :hg:`update` to merge and update respectively to a given bookmark. + + You can use :hg:`bookmark NAME` to set a bookmark on the working + directory's parent revision with the given name. If you specify + a revision using -r REV (where REV may be an existing bookmark), + the bookmark is assigned to that revision. + + Bookmarks can be pushed and pulled between repositories (see :hg:`help + push` and :hg:`help pull`). This requires the bookmark extension to be + enabled for both the local and remote repositories. + ''' + hexfn = ui.debugflag and hex or short + marks = repo._bookmarks + cur = repo.changectx('.').node() + + if rename: + if rename not in marks: + raise util.Abort(_("a bookmark of this name does not exist")) + if mark in marks and not force: + raise util.Abort(_("a bookmark of the same name already exists")) + if mark is None: + raise util.Abort(_("new bookmark name required")) + marks[mark] = marks[rename] + del marks[rename] + if repo._bookmarkcurrent == rename: + setcurrent(repo, mark) + write(repo) + return + + if delete: + if mark is None: + raise util.Abort(_("bookmark name required")) + if mark not in marks: + raise util.Abort(_("a bookmark of this name does not exist")) + if mark == repo._bookmarkcurrent: + setcurrent(repo, None) + del marks[mark] + write(repo) + return + + if mark != None: + if "\n" in mark: + raise util.Abort(_("bookmark name cannot contain newlines")) + mark = mark.strip() + if not mark: + raise util.Abort(_("bookmark names cannot consist entirely of " + "whitespace")) + if mark in marks and not force: + raise util.Abort(_("a bookmark of the same name already exists")) + if ((mark in repo.branchtags() or mark == repo.dirstate.branch()) + and not force): + raise util.Abort( + _("a bookmark cannot have the name of an existing branch")) + if rev: + marks[mark] = repo.lookup(rev) + else: + marks[mark] = repo.changectx('.').node() + setcurrent(repo, mark) + write(repo) + return + + if mark is None: + if rev: + raise util.Abort(_("bookmark name required")) + if len(marks) == 0: + ui.status(_("no bookmarks set\n")) + else: + for bmark, n in marks.iteritems(): + if ui.configbool('bookmarks', 'track.current'): + current = repo._bookmarkcurrent + if bmark == current and n == cur: + prefix, label = '*', 'bookmarks.current' + else: + prefix, label = ' ', '' + else: + if n == cur: + prefix, label = '*', 'bookmarks.current' + else: + prefix, label = ' ', '' + + if ui.quiet: + ui.write("%s\n" % bmark, label=label) + else: + ui.write(" %s %-25s %d:%s\n" % ( + prefix, bmark, repo.changelog.rev(n), hexfn(n)), + label=label) + return + +def _revstostrip(changelog, node): + srev = changelog.rev(node) + tostrip = [srev] + saveheads = [] + for r in xrange(srev, len(changelog)): + parents = changelog.parentrevs(r) + if parents[0] in tostrip or parents[1] in tostrip: + tostrip.append(r) + if parents[1] != nullrev: + for p in parents: + if p not in tostrip and p > srev: + saveheads.append(p) + return [r for r in tostrip if r not in saveheads] + +def strip(oldstrip, ui, repo, node, backup="all"): + """Strip bookmarks if revisions are stripped using + the mercurial.strip method. This usually happens during + qpush and qpop""" + revisions = _revstostrip(repo.changelog, node) + marks = repo._bookmarks + update = [] + for mark, n in marks.iteritems(): + if repo.changelog.rev(n) in revisions: + update.append(mark) + oldstrip(ui, repo, node, backup) + if len(update) > 0: + for m in update: + marks[m] = repo.changectx('.').node() + write(repo) + +def reposetup(ui, repo): + if not repo.local(): + return + + class bookmark_repo(repo.__class__): + + @util.propertycache + def _bookmarks(self): + '''Parse .hg/bookmarks file and return a dictionary + + Bookmarks are stored as {HASH}\\s{NAME}\\n (localtags format) values + in the .hg/bookmarks file. + Read the file and return a (name=>nodeid) dictionary + ''' + try: + bookmarks = {} + for line in self.opener('bookmarks'): + sha, refspec = line.strip().split(' ', 1) + bookmarks[refspec] = self.changelog.lookup(sha) + except: + pass + return bookmarks + + @util.propertycache + def _bookmarkcurrent(self): + '''Get the current bookmark + + If we use gittishsh branches we have a current bookmark that + we are on. This function returns the name of the bookmark. It + is stored in .hg/bookmarks.current + ''' + mark = None + if os.path.exists(self.join('bookmarks.current')): + file = self.opener('bookmarks.current') + # No readline() in posixfile_nt, reading everything is cheap + mark = (file.readlines() or [''])[0] + if mark == '': + mark = None + file.close() + return mark + + def rollback(self, *args): + if os.path.exists(self.join('undo.bookmarks')): + util.rename(self.join('undo.bookmarks'), self.join('bookmarks')) + return super(bookmark_repo, self).rollback(*args) + + def lookup(self, key): + if key in self._bookmarks: + key = self._bookmarks[key] + return super(bookmark_repo, self).lookup(key) + + def _bookmarksupdate(self, parents, node): + marks = self._bookmarks + update = False + if ui.configbool('bookmarks', 'track.current'): + mark = self._bookmarkcurrent + if mark and marks[mark] in parents: + marks[mark] = node + update = True + else: + for mark, n in marks.items(): + if n in parents: + marks[mark] = node + update = True + if update: + write(self) + + def commitctx(self, ctx, error=False): + """Add a revision to the repository and + move the bookmark""" + wlock = self.wlock() # do both commit and bookmark with lock held + try: + node = super(bookmark_repo, self).commitctx(ctx, error) + if node is None: + return None + parents = self.changelog.parents(node) + if parents[1] == nullid: + parents = (parents[0],) + + self._bookmarksupdate(parents, node) + return node + finally: + wlock.release() + + def pull(self, remote, heads=None, force=False): + result = super(bookmark_repo, self).pull(remote, heads, force) + + self.ui.debug("checking for updated bookmarks\n") + rb = remote.listkeys('bookmarks') + changed = False + for k in rb.keys(): + if k in self._bookmarks: + nr, nl = rb[k], self._bookmarks[k] + if nr in self: + cr = self[nr] + cl = self[nl] + if cl.rev() >= cr.rev(): + continue + if cr in cl.descendants(): + self._bookmarks[k] = cr.node() + changed = True + self.ui.status(_("updating bookmark %s\n") % k) + else: + self.ui.warn(_("not updating divergent" + " bookmark %s\n") % k) + if changed: + write(repo) + + return result + + def push(self, remote, force=False, revs=None, newbranch=False): + result = super(bookmark_repo, self).push(remote, force, revs, + newbranch) + + self.ui.debug("checking for updated bookmarks\n") + rb = remote.listkeys('bookmarks') + for k in rb.keys(): + if k in self._bookmarks: + nr, nl = rb[k], self._bookmarks[k] + if nr in self: + cr = self[nr] + cl = self[nl] + if cl in cr.descendants(): + r = remote.pushkey('bookmarks', k, nr, nl) + if r: + self.ui.status(_("updating bookmark %s\n") % k) + else: + self.ui.warn(_('updating bookmark %s' + ' failed!\n') % k) + + return result + + def addchangegroup(self, *args, **kwargs): + parents = self.dirstate.parents() + + result = super(bookmark_repo, self).addchangegroup(*args, **kwargs) + if result > 1: + # We have more heads than before + return result + node = self.changelog.tip() + + self._bookmarksupdate(parents, node) + return result + + def _findtags(self): + """Merge bookmarks with normal tags""" + (tags, tagtypes) = super(bookmark_repo, self)._findtags() + tags.update(self._bookmarks) + return (tags, tagtypes) + + if hasattr(repo, 'invalidate'): + def invalidate(self): + super(bookmark_repo, self).invalidate() + for attr in ('_bookmarks', '_bookmarkcurrent'): + if attr in self.__dict__: + delattr(self, attr) + + repo.__class__ = bookmark_repo + +def listbookmarks(repo): + # We may try to list bookmarks on a repo type that does not + # support it (e.g., statichttprepository). + if not hasattr(repo, '_bookmarks'): + return {} + + d = {} + for k, v in repo._bookmarks.iteritems(): + d[k] = hex(v) + return d + +def pushbookmark(repo, key, old, new): + w = repo.wlock() + try: + marks = repo._bookmarks + if hex(marks.get(key, '')) != old: + return False + if new == '': + del marks[key] + else: + if new not in repo: + return False + marks[key] = repo[new].node() + write(repo) + return True + finally: + w.release() + +def pull(oldpull, ui, repo, source="default", **opts): + # translate bookmark args to rev args for actual pull + if opts.get('bookmark'): + # this is an unpleasant hack as pull will do this internally + source, branches = hg.parseurl(ui.expandpath(source), + opts.get('branch')) + other = hg.repository(hg.remoteui(repo, opts), source) + rb = other.listkeys('bookmarks') + + for b in opts['bookmark']: + if b not in rb: + raise util.Abort(_('remote bookmark %s not found!') % b) + opts.setdefault('rev', []).append(b) + + result = oldpull(ui, repo, source, **opts) + + # update specified bookmarks + if opts.get('bookmark'): + for b in opts['bookmark']: + # explicit pull overrides local bookmark if any + ui.status(_("importing bookmark %s\n") % b) + repo._bookmarks[b] = repo[rb[b]].node() + write(repo) + + return result + +def push(oldpush, ui, repo, dest=None, **opts): + dopush = True + if opts.get('bookmark'): + dopush = False + for b in opts['bookmark']: + if b in repo._bookmarks: + dopush = True + opts.setdefault('rev', []).append(b) + + result = 0 + if dopush: + result = oldpush(ui, repo, dest, **opts) + + if opts.get('bookmark'): + # this is an unpleasant hack as push will do this internally + dest = ui.expandpath(dest or 'default-push', dest or 'default') + dest, branches = hg.parseurl(dest, opts.get('branch')) + other = hg.repository(hg.remoteui(repo, opts), dest) + rb = other.listkeys('bookmarks') + for b in opts['bookmark']: + # explicit push overrides remote bookmark if any + if b in repo._bookmarks: + ui.status(_("exporting bookmark %s\n") % b) + new = repo[b].hex() + elif b in rb: + ui.status(_("deleting remote bookmark %s\n") % b) + new = '' # delete + else: + ui.warn(_('bookmark %s does not exist on the local ' + 'or remote repository!\n') % b) + return 2 + old = rb.get(b, '') + r = other.pushkey('bookmarks', b, old, new) + if not r: + ui.warn(_('updating bookmark %s failed!\n') % b) + if not result: + result = 2 + + return result + +def diffbookmarks(ui, repo, remote): + ui.status(_("searching for changed bookmarks\n")) + + lmarks = repo.listkeys('bookmarks') + rmarks = remote.listkeys('bookmarks') + + diff = sorted(set(rmarks) - set(lmarks)) + for k in diff: + ui.write(" %-25s %s\n" % (k, rmarks[k][:12])) + + if len(diff) <= 0: + ui.status(_("no changed bookmarks found\n")) + return 1 + return 0 + +def incoming(oldincoming, ui, repo, source="default", **opts): + if opts.get('bookmarks'): + source, branches = hg.parseurl(ui.expandpath(source), opts.get('branch')) + other = hg.repository(hg.remoteui(repo, opts), source) + ui.status(_('comparing with %s\n') % url.hidepassword(source)) + return diffbookmarks(ui, repo, other) + else: + return oldincoming(ui, repo, source, **opts) + +def outgoing(oldoutgoing, ui, repo, dest=None, **opts): + if opts.get('bookmarks'): + dest = ui.expandpath(dest or 'default-push', dest or 'default') + dest, branches = hg.parseurl(dest, opts.get('branch')) + other = hg.repository(hg.remoteui(repo, opts), dest) + ui.status(_('comparing with %s\n') % url.hidepassword(dest)) + return diffbookmarks(ui, other, repo) + else: + return oldoutgoing(ui, repo, dest, **opts) + +def uisetup(ui): + extensions.wrapfunction(repair, "strip", strip) + if ui.configbool('bookmarks', 'track.current'): + extensions.wrapcommand(commands.table, 'update', updatecurbookmark) + + entry = extensions.wrapcommand(commands.table, 'pull', pull) + entry[1].append(('B', 'bookmark', [], + _("bookmark to import"), + _('BOOKMARK'))) + entry = extensions.wrapcommand(commands.table, 'push', push) + entry[1].append(('B', 'bookmark', [], + _("bookmark to export"), + _('BOOKMARK'))) + entry = extensions.wrapcommand(commands.table, 'incoming', incoming) + entry[1].append(('B', 'bookmarks', False, + _("compare bookmark"))) + entry = extensions.wrapcommand(commands.table, 'outgoing', outgoing) + entry[1].append(('B', 'bookmarks', False, + _("compare bookmark"))) + + pushkey.register('bookmarks', pushbookmark, listbookmarks) + +def updatecurbookmark(orig, ui, repo, *args, **opts): + '''Set the current bookmark + + If the user updates to a bookmark we update the .hg/bookmarks.current + file. + ''' + res = orig(ui, repo, *args, **opts) + rev = opts['rev'] + if not rev and len(args) > 0: + rev = args[0] + setcurrent(repo, rev) + return res + +def bmrevset(repo, subset, x): + """``bookmark([name])`` + The named bookmark or all bookmarks. + """ + # i18n: "bookmark" is a keyword + args = revset.getargs(x, 0, 1, _('bookmark takes one or no arguments')) + if args: + bm = revset.getstring(args[0], + # i18n: "bookmark" is a keyword + _('the argument to bookmark must be a string')) + bmrev = listbookmarks(repo).get(bm, None) + if bmrev: + bmrev = repo.changelog.rev(bin(bmrev)) + return [r for r in subset if r == bmrev] + bms = set([repo.changelog.rev(bin(r)) for r in listbookmarks(repo).values()]) + return [r for r in subset if r in bms] + +def extsetup(ui): + revset.symbols['bookmark'] = bmrevset + +cmdtable = { + "bookmarks": + (bookmark, + [('f', 'force', False, _('force')), + ('r', 'rev', '', _('revision'), _('REV')), + ('d', 'delete', False, _('delete a given bookmark')), + ('m', 'rename', '', _('rename a given bookmark'), _('NAME'))], + _('hg bookmarks [-f] [-d] [-m NAME] [-r REV] [NAME]')), +} + +colortable = {'bookmarks.current': 'green'} + +# tell hggettext to extract docstrings from these functions: +i18nfunctions = [bmrevset] diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/bookmarks.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/bookmarks.pyo Binary files differnew file mode 100644 index 0000000..9cad1f2 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/bookmarks.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/bugzilla.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/bugzilla.py new file mode 100644 index 0000000..de72e91 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/bugzilla.py @@ -0,0 +1,441 @@ +# bugzilla.py - bugzilla integration for mercurial +# +# Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com> +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +'''hooks for integrating with the Bugzilla bug tracker + +This hook extension adds comments on bugs in Bugzilla when changesets +that refer to bugs by Bugzilla ID are seen. The hook does not change +bug status. + +The hook updates the Bugzilla database directly. Only Bugzilla +installations using MySQL are supported. + +The hook relies on a Bugzilla script to send bug change notification +emails. That script changes between Bugzilla versions; the +'processmail' script used prior to 2.18 is replaced in 2.18 and +subsequent versions by 'config/sendbugmail.pl'. Note that these will +be run by Mercurial as the user pushing the change; you will need to +ensure the Bugzilla install file permissions are set appropriately. + +The extension is configured through three different configuration +sections. These keys are recognized in the [bugzilla] section: + +host + Hostname of the MySQL server holding the Bugzilla database. + +db + Name of the Bugzilla database in MySQL. Default 'bugs'. + +user + Username to use to access MySQL server. Default 'bugs'. + +password + Password to use to access MySQL server. + +timeout + Database connection timeout (seconds). Default 5. + +version + Bugzilla version. Specify '3.0' for Bugzilla versions 3.0 and later, + '2.18' for Bugzilla versions from 2.18 and '2.16' for versions prior + to 2.18. + +bzuser + Fallback Bugzilla user name to record comments with, if changeset + committer cannot be found as a Bugzilla user. + +bzdir + Bugzilla install directory. Used by default notify. Default + '/var/www/html/bugzilla'. + +notify + The command to run to get Bugzilla to send bug change notification + emails. Substitutes from a map with 3 keys, 'bzdir', 'id' (bug id) + and 'user' (committer bugzilla email). Default depends on version; + from 2.18 it is "cd %(bzdir)s && perl -T contrib/sendbugmail.pl + %(id)s %(user)s". + +regexp + Regular expression to match bug IDs in changeset commit message. + Must contain one "()" group. The default expression matches 'Bug + 1234', 'Bug no. 1234', 'Bug number 1234', 'Bugs 1234,5678', 'Bug + 1234 and 5678' and variations thereof. Matching is case insensitive. + +style + The style file to use when formatting comments. + +template + Template to use when formatting comments. Overrides style if + specified. In addition to the usual Mercurial keywords, the + extension specifies:: + + {bug} The Bugzilla bug ID. + {root} The full pathname of the Mercurial repository. + {webroot} Stripped pathname of the Mercurial repository. + {hgweb} Base URL for browsing Mercurial repositories. + + Default 'changeset {node|short} in repo {root} refers ' + 'to bug {bug}.\\ndetails:\\n\\t{desc|tabindent}' + +strip + The number of slashes to strip from the front of {root} to produce + {webroot}. Default 0. + +usermap + Path of file containing Mercurial committer ID to Bugzilla user ID + mappings. If specified, the file should contain one mapping per + line, "committer"="Bugzilla user". See also the [usermap] section. + +The [usermap] section is used to specify mappings of Mercurial +committer ID to Bugzilla user ID. See also [bugzilla].usermap. +"committer"="Bugzilla user" + +Finally, the [web] section supports one entry: + +baseurl + Base URL for browsing Mercurial repositories. Reference from + templates as {hgweb}. + +Activating the extension:: + + [extensions] + bugzilla = + + [hooks] + # run bugzilla hook on every change pulled or pushed in here + incoming.bugzilla = python:hgext.bugzilla.hook + +Example configuration: + +This example configuration is for a collection of Mercurial +repositories in /var/local/hg/repos/ used with a local Bugzilla 3.2 +installation in /opt/bugzilla-3.2. :: + + [bugzilla] + host=localhost + password=XYZZY + version=3.0 + bzuser=unknown@domain.com + bzdir=/opt/bugzilla-3.2 + template=Changeset {node|short} in {root|basename}. + {hgweb}/{webroot}/rev/{node|short}\\n + {desc}\\n + strip=5 + + [web] + baseurl=http://dev.domain.com/hg + + [usermap] + user@emaildomain.com=user.name@bugzilladomain.com + +Commits add a comment to the Bugzilla bug record of the form:: + + Changeset 3b16791d6642 in repository-name. + http://dev.domain.com/hg/repository-name/rev/3b16791d6642 + + Changeset commit comment. Bug 1234. +''' + +from mercurial.i18n import _ +from mercurial.node import short +from mercurial import cmdutil, templater, util +import re, time + +MySQLdb = None + +def buglist(ids): + return '(' + ','.join(map(str, ids)) + ')' + +class bugzilla_2_16(object): + '''support for bugzilla version 2.16.''' + + def __init__(self, ui): + self.ui = ui + host = self.ui.config('bugzilla', 'host', 'localhost') + user = self.ui.config('bugzilla', 'user', 'bugs') + passwd = self.ui.config('bugzilla', 'password') + db = self.ui.config('bugzilla', 'db', 'bugs') + timeout = int(self.ui.config('bugzilla', 'timeout', 5)) + usermap = self.ui.config('bugzilla', 'usermap') + if usermap: + self.ui.readconfig(usermap, sections=['usermap']) + self.ui.note(_('connecting to %s:%s as %s, password %s\n') % + (host, db, user, '*' * len(passwd))) + self.conn = MySQLdb.connect(host=host, user=user, passwd=passwd, + db=db, connect_timeout=timeout) + self.cursor = self.conn.cursor() + self.longdesc_id = self.get_longdesc_id() + self.user_ids = {} + self.default_notify = "cd %(bzdir)s && ./processmail %(id)s %(user)s" + + def run(self, *args, **kwargs): + '''run a query.''' + self.ui.note(_('query: %s %s\n') % (args, kwargs)) + try: + self.cursor.execute(*args, **kwargs) + except MySQLdb.MySQLError: + self.ui.note(_('failed query: %s %s\n') % (args, kwargs)) + raise + + def get_longdesc_id(self): + '''get identity of longdesc field''' + self.run('select fieldid from fielddefs where name = "longdesc"') + ids = self.cursor.fetchall() + if len(ids) != 1: + raise util.Abort(_('unknown database schema')) + return ids[0][0] + + def filter_real_bug_ids(self, ids): + '''filter not-existing bug ids from list.''' + self.run('select bug_id from bugs where bug_id in %s' % buglist(ids)) + return sorted([c[0] for c in self.cursor.fetchall()]) + + def filter_unknown_bug_ids(self, node, ids): + '''filter bug ids from list that already refer to this changeset.''' + + self.run('''select bug_id from longdescs where + bug_id in %s and thetext like "%%%s%%"''' % + (buglist(ids), short(node))) + unknown = set(ids) + for (id,) in self.cursor.fetchall(): + self.ui.status(_('bug %d already knows about changeset %s\n') % + (id, short(node))) + unknown.discard(id) + return sorted(unknown) + + def notify(self, ids, committer): + '''tell bugzilla to send mail.''' + + self.ui.status(_('telling bugzilla to send mail:\n')) + (user, userid) = self.get_bugzilla_user(committer) + for id in ids: + self.ui.status(_(' bug %s\n') % id) + cmdfmt = self.ui.config('bugzilla', 'notify', self.default_notify) + bzdir = self.ui.config('bugzilla', 'bzdir', '/var/www/html/bugzilla') + try: + # Backwards-compatible with old notify string, which + # took one string. This will throw with a new format + # string. + cmd = cmdfmt % id + except TypeError: + cmd = cmdfmt % {'bzdir': bzdir, 'id': id, 'user': user} + self.ui.note(_('running notify command %s\n') % cmd) + fp = util.popen('(%s) 2>&1' % cmd) + out = fp.read() + ret = fp.close() + if ret: + self.ui.warn(out) + raise util.Abort(_('bugzilla notify command %s') % + util.explain_exit(ret)[0]) + self.ui.status(_('done\n')) + + def get_user_id(self, user): + '''look up numeric bugzilla user id.''' + try: + return self.user_ids[user] + except KeyError: + try: + userid = int(user) + except ValueError: + self.ui.note(_('looking up user %s\n') % user) + self.run('''select userid from profiles + where login_name like %s''', user) + all = self.cursor.fetchall() + if len(all) != 1: + raise KeyError(user) + userid = int(all[0][0]) + self.user_ids[user] = userid + return userid + + def map_committer(self, user): + '''map name of committer to bugzilla user name.''' + for committer, bzuser in self.ui.configitems('usermap'): + if committer.lower() == user.lower(): + return bzuser + return user + + def get_bugzilla_user(self, committer): + '''see if committer is a registered bugzilla user. Return + bugzilla username and userid if so. If not, return default + bugzilla username and userid.''' + user = self.map_committer(committer) + try: + userid = self.get_user_id(user) + except KeyError: + try: + defaultuser = self.ui.config('bugzilla', 'bzuser') + if not defaultuser: + raise util.Abort(_('cannot find bugzilla user id for %s') % + user) + userid = self.get_user_id(defaultuser) + user = defaultuser + except KeyError: + raise util.Abort(_('cannot find bugzilla user id for %s or %s') % + (user, defaultuser)) + return (user, userid) + + def add_comment(self, bugid, text, committer): + '''add comment to bug. try adding comment as committer of + changeset, otherwise as default bugzilla user.''' + (user, userid) = self.get_bugzilla_user(committer) + now = time.strftime('%Y-%m-%d %H:%M:%S') + self.run('''insert into longdescs + (bug_id, who, bug_when, thetext) + values (%s, %s, %s, %s)''', + (bugid, userid, now, text)) + self.run('''insert into bugs_activity (bug_id, who, bug_when, fieldid) + values (%s, %s, %s, %s)''', + (bugid, userid, now, self.longdesc_id)) + self.conn.commit() + +class bugzilla_2_18(bugzilla_2_16): + '''support for bugzilla 2.18 series.''' + + def __init__(self, ui): + bugzilla_2_16.__init__(self, ui) + self.default_notify = \ + "cd %(bzdir)s && perl -T contrib/sendbugmail.pl %(id)s %(user)s" + +class bugzilla_3_0(bugzilla_2_18): + '''support for bugzilla 3.0 series.''' + + def __init__(self, ui): + bugzilla_2_18.__init__(self, ui) + + def get_longdesc_id(self): + '''get identity of longdesc field''' + self.run('select id from fielddefs where name = "longdesc"') + ids = self.cursor.fetchall() + if len(ids) != 1: + raise util.Abort(_('unknown database schema')) + return ids[0][0] + +class bugzilla(object): + # supported versions of bugzilla. different versions have + # different schemas. + _versions = { + '2.16': bugzilla_2_16, + '2.18': bugzilla_2_18, + '3.0': bugzilla_3_0 + } + + _default_bug_re = (r'bugs?\s*,?\s*(?:#|nos?\.?|num(?:ber)?s?)?\s*' + r'((?:\d+\s*(?:,?\s*(?:and)?)?\s*)+)') + + _bz = None + + def __init__(self, ui, repo): + self.ui = ui + self.repo = repo + + def bz(self): + '''return object that knows how to talk to bugzilla version in + use.''' + + if bugzilla._bz is None: + bzversion = self.ui.config('bugzilla', 'version') + try: + bzclass = bugzilla._versions[bzversion] + except KeyError: + raise util.Abort(_('bugzilla version %s not supported') % + bzversion) + bugzilla._bz = bzclass(self.ui) + return bugzilla._bz + + def __getattr__(self, key): + return getattr(self.bz(), key) + + _bug_re = None + _split_re = None + + def find_bug_ids(self, ctx): + '''find valid bug ids that are referred to in changeset + comments and that do not already have references to this + changeset.''' + + if bugzilla._bug_re is None: + bugzilla._bug_re = re.compile( + self.ui.config('bugzilla', 'regexp', bugzilla._default_bug_re), + re.IGNORECASE) + bugzilla._split_re = re.compile(r'\D+') + start = 0 + ids = set() + while True: + m = bugzilla._bug_re.search(ctx.description(), start) + if not m: + break + start = m.end() + for id in bugzilla._split_re.split(m.group(1)): + if not id: + continue + ids.add(int(id)) + if ids: + ids = self.filter_real_bug_ids(ids) + if ids: + ids = self.filter_unknown_bug_ids(ctx.node(), ids) + return ids + + def update(self, bugid, ctx): + '''update bugzilla bug with reference to changeset.''' + + def webroot(root): + '''strip leading prefix of repo root and turn into + url-safe path.''' + count = int(self.ui.config('bugzilla', 'strip', 0)) + root = util.pconvert(root) + while count > 0: + c = root.find('/') + if c == -1: + break + root = root[c + 1:] + count -= 1 + return root + + mapfile = self.ui.config('bugzilla', 'style') + tmpl = self.ui.config('bugzilla', 'template') + t = cmdutil.changeset_templater(self.ui, self.repo, + False, None, mapfile, False) + if not mapfile and not tmpl: + tmpl = _('changeset {node|short} in repo {root} refers ' + 'to bug {bug}.\ndetails:\n\t{desc|tabindent}') + if tmpl: + tmpl = templater.parsestring(tmpl, quoted=False) + t.use_template(tmpl) + self.ui.pushbuffer() + t.show(ctx, changes=ctx.changeset(), + bug=str(bugid), + hgweb=self.ui.config('web', 'baseurl'), + root=self.repo.root, + webroot=webroot(self.repo.root)) + data = self.ui.popbuffer() + self.add_comment(bugid, data, util.email(ctx.user())) + +def hook(ui, repo, hooktype, node=None, **kwargs): + '''add comment to bugzilla for each changeset that refers to a + bugzilla bug id. only add a comment once per bug, so same change + seen multiple times does not fill bug with duplicate data.''' + try: + import MySQLdb as mysql + global MySQLdb + MySQLdb = mysql + except ImportError, err: + raise util.Abort(_('python mysql support not available: %s') % err) + + if node is None: + raise util.Abort(_('hook type %s does not pass a changeset id') % + hooktype) + try: + bz = bugzilla(ui, repo) + ctx = repo[node] + ids = bz.find_bug_ids(ctx) + if ids: + for id in ids: + bz.update(id, ctx) + bz.notify(ids, util.email(ctx.user())) + except MySQLdb.MySQLError, err: + raise util.Abort(_('database error: %s') % err.args[1]) + diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/bugzilla.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/bugzilla.pyo Binary files differnew file mode 100644 index 0000000..b4bfa04 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/bugzilla.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/children.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/children.py new file mode 100644 index 0000000..da2fe9c --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/children.py @@ -0,0 +1,45 @@ +# Mercurial extension to provide the 'hg children' command +# +# Copyright 2007 by Intevation GmbH <intevation@intevation.de> +# +# Author(s): +# Thomas Arendsen Hein <thomas@intevation.de> +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +'''command to display child changesets''' + +from mercurial import cmdutil +from mercurial.commands import templateopts +from mercurial.i18n import _ + + +def children(ui, repo, file_=None, **opts): + """show the children of the given or working directory revision + + Print the children of the working directory's revisions. If a + revision is given via -r/--rev, the children of that revision will + be printed. If a file argument is given, revision in which the + file was last changed (after the working directory revision or the + argument to --rev if given) is printed. + """ + rev = opts.get('rev') + if file_: + ctx = repo.filectx(file_, changeid=rev) + else: + ctx = repo[rev] + + displayer = cmdutil.show_changeset(ui, repo, opts) + for cctx in ctx.children(): + displayer.show(cctx) + displayer.close() + +cmdtable = { + "children": + (children, + [('r', 'rev', '', + _('show children of the specified revision'), _('REV')), + ] + templateopts, + _('hg children [-r REV] [FILE]')), +} diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/children.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/children.pyo Binary files differnew file mode 100644 index 0000000..05aecd2 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/children.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/churn.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/churn.py new file mode 100644 index 0000000..32e481f --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/churn.py @@ -0,0 +1,198 @@ +# churn.py - create a graph of revisions count grouped by template +# +# Copyright 2006 Josef "Jeff" Sipek <jeffpc@josefsipek.net> +# Copyright 2008 Alexander Solovyov <piranha@piranha.org.ua> +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +'''command to display statistics about repository history''' + +from mercurial.i18n import _ +from mercurial import patch, cmdutil, util, templater, commands +import os +import time, datetime + +def maketemplater(ui, repo, tmpl): + tmpl = templater.parsestring(tmpl, quoted=False) + try: + t = cmdutil.changeset_templater(ui, repo, False, None, None, False) + except SyntaxError, inst: + raise util.Abort(inst.args[0]) + t.use_template(tmpl) + return t + +def changedlines(ui, repo, ctx1, ctx2, fns): + added, removed = 0, 0 + fmatch = cmdutil.matchfiles(repo, fns) + diff = ''.join(patch.diff(repo, ctx1.node(), ctx2.node(), fmatch)) + for l in diff.split('\n'): + if l.startswith("+") and not l.startswith("+++ "): + added += 1 + elif l.startswith("-") and not l.startswith("--- "): + removed += 1 + return (added, removed) + +def countrate(ui, repo, amap, *pats, **opts): + """Calculate stats""" + if opts.get('dateformat'): + def getkey(ctx): + t, tz = ctx.date() + date = datetime.datetime(*time.gmtime(float(t) - tz)[:6]) + return date.strftime(opts['dateformat']) + else: + tmpl = opts.get('template', '{author|email}') + tmpl = maketemplater(ui, repo, tmpl) + def getkey(ctx): + ui.pushbuffer() + tmpl.show(ctx) + return ui.popbuffer() + + state = {'count': 0} + rate = {} + df = False + if opts.get('date'): + df = util.matchdate(opts['date']) + + m = cmdutil.match(repo, pats, opts) + def prep(ctx, fns): + rev = ctx.rev() + if df and not df(ctx.date()[0]): # doesn't match date format + return + + key = getkey(ctx) + key = amap.get(key, key) # alias remap + key = key.strip() # ignore leading and trailing spaces + if opts.get('changesets'): + rate[key] = (rate.get(key, (0,))[0] + 1, 0) + else: + parents = ctx.parents() + if len(parents) > 1: + ui.note(_('Revision %d is a merge, ignoring...\n') % (rev,)) + return + + ctx1 = parents[0] + lines = changedlines(ui, repo, ctx1, ctx, fns) + rate[key] = [r + l for r, l in zip(rate.get(key, (0, 0)), lines)] + + state['count'] += 1 + ui.progress(_('analyzing'), state['count'], total=len(repo)) + + for ctx in cmdutil.walkchangerevs(repo, m, opts, prep): + continue + + ui.progress(_('analyzing'), None) + + return rate + + +def churn(ui, repo, *pats, **opts): + '''histogram of changes to the repository + + This command will display a histogram representing the number + of changed lines or revisions, grouped according to the given + template. The default template will group changes by author. + The --dateformat option may be used to group the results by + date instead. + + Statistics are based on the number of changed lines, or + alternatively the number of matching revisions if the + --changesets option is specified. + + Examples:: + + # display count of changed lines for every committer + hg churn -t '{author|email}' + + # display daily activity graph + hg churn -f '%H' -s -c + + # display activity of developers by month + hg churn -f '%Y-%m' -s -c + + # display count of lines changed in every year + hg churn -f '%Y' -s + + It is possible to map alternate email addresses to a main address + by providing a file using the following format:: + + <alias email> = <actual email> + + Such a file may be specified with the --aliases option, otherwise + a .hgchurn file will be looked for in the working directory root. + ''' + def pad(s, l): + return (s + " " * l)[:l] + + amap = {} + aliases = opts.get('aliases') + if not aliases and os.path.exists(repo.wjoin('.hgchurn')): + aliases = repo.wjoin('.hgchurn') + if aliases: + for l in open(aliases, "r"): + try: + alias, actual = l.split('=' in l and '=' or None, 1) + amap[alias.strip()] = actual.strip() + except ValueError: + l = l.strip() + if l: + ui.warn(_("skipping malformed alias: %s\n" % l)) + continue + + rate = countrate(ui, repo, amap, *pats, **opts).items() + if not rate: + return + + sortkey = ((not opts.get('sort')) and (lambda x: -sum(x[1])) or None) + rate.sort(key=sortkey) + + # Be careful not to have a zero maxcount (issue833) + maxcount = float(max(sum(v) for k, v in rate)) or 1.0 + maxname = max(len(k) for k, v in rate) + + ttywidth = ui.termwidth() + ui.debug("assuming %i character terminal\n" % ttywidth) + width = ttywidth - maxname - 2 - 2 - 2 + + if opts.get('diffstat'): + width -= 15 + def format(name, diffstat): + added, removed = diffstat + return "%s %15s %s%s\n" % (pad(name, maxname), + '+%d/-%d' % (added, removed), + ui.label('+' * charnum(added), + 'diffstat.inserted'), + ui.label('-' * charnum(removed), + 'diffstat.deleted')) + else: + width -= 6 + def format(name, count): + return "%s %6d %s\n" % (pad(name, maxname), sum(count), + '*' * charnum(sum(count))) + + def charnum(count): + return int(round(count * width / maxcount)) + + for name, count in rate: + ui.write(format(name, count)) + + +cmdtable = { + "churn": + (churn, + [('r', 'rev', [], + _('count rate for the specified revision or range'), _('REV')), + ('d', 'date', '', + _('count rate for revisions matching date spec'), _('DATE')), + ('t', 'template', '{author|email}', + _('template to group changesets'), _('TEMPLATE')), + ('f', 'dateformat', '', + _('strftime-compatible format for grouping by date'), _('FORMAT')), + ('c', 'changesets', False, _('count rate by number of changesets')), + ('s', 'sort', False, _('sort by key (default: sort by count)')), + ('', 'diffstat', False, _('display added/removed lines separately')), + ('', 'aliases', '', + _('file with email aliases'), _('FILE')), + ] + commands.walkopts, + _("hg churn [-d DATE] [-r REV] [--aliases FILE] [FILE]")), +} diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/churn.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/churn.pyo Binary files differnew file mode 100644 index 0000000..90d9a2d --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/churn.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/color.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/color.py new file mode 100644 index 0000000..df78f8d --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/color.py @@ -0,0 +1,319 @@ +# color.py color output for the status and qseries commands +# +# Copyright (C) 2007 Kevin Christen <kevin.christen@gmail.com> +# +# This program is free software; you can redistribute it and/or modify it +# under the terms of the GNU General Public License as published by the +# Free Software Foundation; either version 2 of the License, or (at your +# option) any later version. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General +# Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +'''colorize output from some commands + +This extension modifies the status and resolve commands to add color to their +output to reflect file status, the qseries command to add color to reflect +patch status (applied, unapplied, missing), and to diff-related +commands to highlight additions, removals, diff headers, and trailing +whitespace. + +Other effects in addition to color, like bold and underlined text, are +also available. Effects are rendered with the ECMA-48 SGR control +function (aka ANSI escape codes). This module also provides the +render_text function, which can be used to add effects to any text. + +Default effects may be overridden from your configuration file:: + + [color] + status.modified = blue bold underline red_background + status.added = green bold + status.removed = red bold blue_background + status.deleted = cyan bold underline + status.unknown = magenta bold underline + status.ignored = black bold + + # 'none' turns off all effects + status.clean = none + status.copied = none + + qseries.applied = blue bold underline + qseries.unapplied = black bold + qseries.missing = red bold + + diff.diffline = bold + diff.extended = cyan bold + diff.file_a = red bold + diff.file_b = green bold + diff.hunk = magenta + diff.deleted = red + diff.inserted = green + diff.changed = white + diff.trailingwhitespace = bold red_background + + resolve.unresolved = red bold + resolve.resolved = green bold + + bookmarks.current = green + + branches.active = none + branches.closed = black bold + branches.current = green + branches.inactive = none + +The color extension will try to detect whether to use ANSI codes or +Win32 console APIs, unless it is made explicit:: + + [color] + mode = ansi + +Any value other than 'ansi', 'win32', or 'auto' will disable color. + +''' + +import os + +from mercurial import commands, dispatch, extensions, ui as uimod, util +from mercurial.i18n import _ + +# start and stop parameters for effects +_effects = {'none': 0, 'black': 30, 'red': 31, 'green': 32, 'yellow': 33, + 'blue': 34, 'magenta': 35, 'cyan': 36, 'white': 37, 'bold': 1, + 'italic': 3, 'underline': 4, 'inverse': 7, + 'black_background': 40, 'red_background': 41, + 'green_background': 42, 'yellow_background': 43, + 'blue_background': 44, 'purple_background': 45, + 'cyan_background': 46, 'white_background': 47} + +_styles = {'grep.match': 'red bold', + 'branches.active': 'none', + 'branches.closed': 'black bold', + 'branches.current': 'green', + 'branches.inactive': 'none', + 'diff.changed': 'white', + 'diff.deleted': 'red', + 'diff.diffline': 'bold', + 'diff.extended': 'cyan bold', + 'diff.file_a': 'red bold', + 'diff.file_b': 'green bold', + 'diff.hunk': 'magenta', + 'diff.inserted': 'green', + 'diff.trailingwhitespace': 'bold red_background', + 'diffstat.deleted': 'red', + 'diffstat.inserted': 'green', + 'log.changeset': 'yellow', + 'resolve.resolved': 'green bold', + 'resolve.unresolved': 'red bold', + 'status.added': 'green bold', + 'status.clean': 'none', + 'status.copied': 'none', + 'status.deleted': 'cyan bold underline', + 'status.ignored': 'black bold', + 'status.modified': 'blue bold', + 'status.removed': 'red bold', + 'status.unknown': 'magenta bold underline'} + + +def render_effects(text, effects): + 'Wrap text in commands to turn on each effect.' + if not text: + return text + start = [str(_effects[e]) for e in ['none'] + effects.split()] + start = '\033[' + ';'.join(start) + 'm' + stop = '\033[' + str(_effects['none']) + 'm' + return ''.join([start, text, stop]) + +def extstyles(): + for name, ext in extensions.extensions(): + _styles.update(getattr(ext, 'colortable', {})) + +def configstyles(ui): + for status, cfgeffects in ui.configitems('color'): + if '.' not in status: + continue + cfgeffects = ui.configlist('color', status) + if cfgeffects: + good = [] + for e in cfgeffects: + if e in _effects: + good.append(e) + else: + ui.warn(_("ignoring unknown color/effect %r " + "(configured in color.%s)\n") + % (e, status)) + _styles[status] = ' '.join(good) + +class colorui(uimod.ui): + def popbuffer(self, labeled=False): + if labeled: + return ''.join(self.label(a, label) for a, label + in self._buffers.pop()) + return ''.join(a for a, label in self._buffers.pop()) + + _colormode = 'ansi' + def write(self, *args, **opts): + label = opts.get('label', '') + if self._buffers: + self._buffers[-1].extend([(str(a), label) for a in args]) + elif self._colormode == 'win32': + for a in args: + win32print(a, super(colorui, self).write, **opts) + else: + return super(colorui, self).write( + *[self.label(str(a), label) for a in args], **opts) + + def write_err(self, *args, **opts): + label = opts.get('label', '') + if self._colormode == 'win32': + for a in args: + win32print(a, super(colorui, self).write_err, **opts) + else: + return super(colorui, self).write_err( + *[self.label(str(a), label) for a in args], **opts) + + def label(self, msg, label): + effects = [] + for l in label.split(): + s = _styles.get(l, '') + if s: + effects.append(s) + effects = ''.join(effects) + if effects: + return '\n'.join([render_effects(s, effects) + for s in msg.split('\n')]) + return msg + + +def uisetup(ui): + if ui.plain(): + return + mode = ui.config('color', 'mode', 'auto') + if mode == 'auto': + if os.name == 'nt' and 'TERM' not in os.environ: + # looks line a cmd.exe console, use win32 API or nothing + mode = w32effects and 'win32' or 'none' + else: + mode = 'ansi' + if mode == 'win32': + if w32effects is None: + # only warn if color.mode is explicitly set to win32 + ui.warn(_('win32console not found, please install pywin32\n')) + return + _effects.update(w32effects) + elif mode != 'ansi': + return + def colorcmd(orig, ui_, opts, cmd, cmdfunc): + coloropt = opts['color'] + auto = coloropt == 'auto' + always = util.parsebool(coloropt) + if (always or + (always is None and + (auto and (os.environ.get('TERM') != 'dumb' and ui_.formatted())))): + colorui._colormode = mode + colorui.__bases__ = (ui_.__class__,) + ui_.__class__ = colorui + extstyles() + configstyles(ui_) + return orig(ui_, opts, cmd, cmdfunc) + extensions.wrapfunction(dispatch, '_runcommand', colorcmd) + +def extsetup(ui): + commands.globalopts.append( + ('', 'color', 'auto', + # i18n: 'always', 'auto', and 'never' are keywords and should + # not be translated + _("when to colorize (boolean, always, auto, or never)"), + _('TYPE'))) + +try: + import re, pywintypes, win32console as win32c + + # http://msdn.microsoft.com/en-us/library/ms682088%28VS.85%29.aspx + w32effects = { + 'none': -1, + 'black': 0, + 'red': win32c.FOREGROUND_RED, + 'green': win32c.FOREGROUND_GREEN, + 'yellow': win32c.FOREGROUND_RED | win32c.FOREGROUND_GREEN, + 'blue': win32c.FOREGROUND_BLUE, + 'magenta': win32c.FOREGROUND_BLUE | win32c.FOREGROUND_RED, + 'cyan': win32c.FOREGROUND_BLUE | win32c.FOREGROUND_GREEN, + 'white': (win32c.FOREGROUND_RED | win32c.FOREGROUND_GREEN | + win32c.FOREGROUND_BLUE), + 'bold': win32c.FOREGROUND_INTENSITY, + 'black_background': 0x100, # unused value > 0x0f + 'red_background': win32c.BACKGROUND_RED, + 'green_background': win32c.BACKGROUND_GREEN, + 'yellow_background': win32c.BACKGROUND_RED | win32c.BACKGROUND_GREEN, + 'blue_background': win32c.BACKGROUND_BLUE, + 'purple_background': win32c.BACKGROUND_BLUE | win32c.BACKGROUND_RED, + 'cyan_background': win32c.BACKGROUND_BLUE | win32c.BACKGROUND_GREEN, + 'white_background': (win32c.BACKGROUND_RED | win32c.BACKGROUND_GREEN | + win32c.BACKGROUND_BLUE), + 'bold_background': win32c.BACKGROUND_INTENSITY, + 'underline': win32c.COMMON_LVB_UNDERSCORE, # double-byte charsets only + 'inverse': win32c.COMMON_LVB_REVERSE_VIDEO, # double-byte charsets only + } + + passthrough = set([win32c.FOREGROUND_INTENSITY, + win32c.BACKGROUND_INTENSITY, + win32c.COMMON_LVB_UNDERSCORE, + win32c.COMMON_LVB_REVERSE_VIDEO]) + + try: + stdout = win32c.GetStdHandle(win32c.STD_OUTPUT_HANDLE) + if stdout is None: + raise ImportError() + origattr = stdout.GetConsoleScreenBufferInfo()['Attributes'] + except pywintypes.error: + # stdout may be defined but not support + # GetConsoleScreenBufferInfo(), when called from subprocess or + # redirected. + raise ImportError() + ansire = re.compile('\033\[([^m]*)m([^\033]*)(.*)', re.MULTILINE | re.DOTALL) + + def win32print(text, orig, **opts): + label = opts.get('label', '') + attr = origattr + + def mapcolor(val, attr): + if val == -1: + return origattr + elif val in passthrough: + return attr | val + elif val > 0x0f: + return (val & 0x70) | (attr & 0x8f) + else: + return (val & 0x07) | (attr & 0xf8) + + # determine console attributes based on labels + for l in label.split(): + style = _styles.get(l, '') + for effect in style.split(): + attr = mapcolor(w32effects[effect], attr) + + # hack to ensure regexp finds data + if not text.startswith('\033['): + text = '\033[m' + text + + # Look for ANSI-like codes embedded in text + m = re.match(ansire, text) + while m: + for sattr in m.group(1).split(';'): + if sattr: + attr = mapcolor(int(sattr), attr) + stdout.SetConsoleTextAttribute(attr) + orig(m.group(2), **opts) + m = re.match(ansire, m.group(3)) + + # Explicity reset original attributes + stdout.SetConsoleTextAttribute(origattr) + +except ImportError: + w32effects = None diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/color.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/color.pyo Binary files differnew file mode 100644 index 0000000..2f131ee --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/color.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/__init__.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/__init__.py new file mode 100644 index 0000000..be7aca5 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/__init__.py @@ -0,0 +1,321 @@ +# convert.py Foreign SCM converter +# +# Copyright 2005-2007 Matt Mackall <mpm@selenic.com> +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +'''import revisions from foreign VCS repositories into Mercurial''' + +import convcmd +import cvsps +import subversion +from mercurial import commands +from mercurial.i18n import _ + +# Commands definition was moved elsewhere to ease demandload job. + +def convert(ui, src, dest=None, revmapfile=None, **opts): + """convert a foreign SCM repository to a Mercurial one. + + Accepted source formats [identifiers]: + + - Mercurial [hg] + - CVS [cvs] + - Darcs [darcs] + - git [git] + - Subversion [svn] + - Monotone [mtn] + - GNU Arch [gnuarch] + - Bazaar [bzr] + - Perforce [p4] + + Accepted destination formats [identifiers]: + + - Mercurial [hg] + - Subversion [svn] (history on branches is not preserved) + + If no revision is given, all revisions will be converted. + Otherwise, convert will only import up to the named revision + (given in a format understood by the source). + + If no destination directory name is specified, it defaults to the + basename of the source with ``-hg`` appended. If the destination + repository doesn't exist, it will be created. + + By default, all sources except Mercurial will use --branchsort. + Mercurial uses --sourcesort to preserve original revision numbers + order. Sort modes have the following effects: + + --branchsort convert from parent to child revision when possible, + which means branches are usually converted one after + the other. It generates more compact repositories. + + --datesort sort revisions by date. Converted repositories have + good-looking changelogs but are often an order of + magnitude larger than the same ones generated by + --branchsort. + + --sourcesort try to preserve source revisions order, only + supported by Mercurial sources. + + If <REVMAP> isn't given, it will be put in a default location + (<dest>/.hg/shamap by default). The <REVMAP> is a simple text file + that maps each source commit ID to the destination ID for that + revision, like so:: + + <source ID> <destination ID> + + If the file doesn't exist, it's automatically created. It's + updated on each commit copied, so :hg:`convert` can be interrupted + and can be run repeatedly to copy new commits. + + The authormap is a simple text file that maps each source commit + author to a destination commit author. It is handy for source SCMs + that use unix logins to identify authors (eg: CVS). One line per + author mapping and the line format is:: + + source author = destination author + + Empty lines and lines starting with a ``#`` are ignored. + + The filemap is a file that allows filtering and remapping of files + and directories. Each line can contain one of the following + directives:: + + include path/to/file-or-dir + + exclude path/to/file-or-dir + + rename path/to/source path/to/destination + + Comment lines start with ``#``. A specified path matches if it + equals the full relative name of a file or one of its parent + directories. The ``include`` or ``exclude`` directive with the + longest matching path applies, so line order does not matter. + + The ``include`` directive causes a file, or all files under a + directory, to be included in the destination repository, and the + exclusion of all other files and directories not explicitly + included. The ``exclude`` directive causes files or directories to + be omitted. The ``rename`` directive renames a file or directory if + it is converted. To rename from a subdirectory into the root of + the repository, use ``.`` as the path to rename to. + + The splicemap is a file that allows insertion of synthetic + history, letting you specify the parents of a revision. This is + useful if you want to e.g. give a Subversion merge two parents, or + graft two disconnected series of history together. Each entry + contains a key, followed by a space, followed by one or two + comma-separated values:: + + key parent1, parent2 + + The key is the revision ID in the source + revision control system whose parents should be modified (same + format as a key in .hg/shamap). The values are the revision IDs + (in either the source or destination revision control system) that + should be used as the new parents for that node. For example, if + you have merged "release-1.0" into "trunk", then you should + specify the revision on "trunk" as the first parent and the one on + the "release-1.0" branch as the second. + + The branchmap is a file that allows you to rename a branch when it is + being brought in from whatever external repository. When used in + conjunction with a splicemap, it allows for a powerful combination + to help fix even the most badly mismanaged repositories and turn them + into nicely structured Mercurial repositories. The branchmap contains + lines of the form:: + + original_branch_name new_branch_name + + where "original_branch_name" is the name of the branch in the + source repository, and "new_branch_name" is the name of the branch + is the destination repository. No whitespace is allowed in the + branch names. This can be used to (for instance) move code in one + repository from "default" to a named branch. + + Mercurial Source + '''''''''''''''' + + --config convert.hg.ignoreerrors=False (boolean) + ignore integrity errors when reading. Use it to fix Mercurial + repositories with missing revlogs, by converting from and to + Mercurial. + --config convert.hg.saverev=False (boolean) + store original revision ID in changeset (forces target IDs to + change) + --config convert.hg.startrev=0 (hg revision identifier) + convert start revision and its descendants + + CVS Source + '''''''''' + + CVS source will use a sandbox (i.e. a checked-out copy) from CVS + to indicate the starting point of what will be converted. Direct + access to the repository files is not needed, unless of course the + repository is :local:. The conversion uses the top level directory + in the sandbox to find the CVS repository, and then uses CVS rlog + commands to find files to convert. This means that unless a + filemap is given, all files under the starting directory will be + converted, and that any directory reorganization in the CVS + sandbox is ignored. + + The options shown are the defaults. + + --config convert.cvsps.cache=True (boolean) + Set to False to disable remote log caching, for testing and + debugging purposes. + --config convert.cvsps.fuzz=60 (integer) + Specify the maximum time (in seconds) that is allowed between + commits with identical user and log message in a single + changeset. When very large files were checked in as part of a + changeset then the default may not be long enough. + --config convert.cvsps.mergeto='{{mergetobranch ([-\\w]+)}}' + Specify a regular expression to which commit log messages are + matched. If a match occurs, then the conversion process will + insert a dummy revision merging the branch on which this log + message occurs to the branch indicated in the regex. + --config convert.cvsps.mergefrom='{{mergefrombranch ([-\\w]+)}}' + Specify a regular expression to which commit log messages are + matched. If a match occurs, then the conversion process will + add the most recent revision on the branch indicated in the + regex as the second parent of the changeset. + --config hook.cvslog + Specify a Python function to be called at the end of gathering + the CVS log. The function is passed a list with the log entries, + and can modify the entries in-place, or add or delete them. + --config hook.cvschangesets + Specify a Python function to be called after the changesets + are calculated from the the CVS log. The function is passed + a list with the changeset entries, and can modify the changesets + in-place, or add or delete them. + + An additional "debugcvsps" Mercurial command allows the builtin + changeset merging code to be run without doing a conversion. Its + parameters and output are similar to that of cvsps 2.1. Please see + the command help for more details. + + Subversion Source + ''''''''''''''''' + + Subversion source detects classical trunk/branches/tags layouts. + By default, the supplied "svn://repo/path/" source URL is + converted as a single branch. If "svn://repo/path/trunk" exists it + replaces the default branch. If "svn://repo/path/branches" exists, + its subdirectories are listed as possible branches. If + "svn://repo/path/tags" exists, it is looked for tags referencing + converted branches. Default "trunk", "branches" and "tags" values + can be overridden with following options. Set them to paths + relative to the source URL, or leave them blank to disable auto + detection. + + --config convert.svn.branches=branches (directory name) + specify the directory containing branches + --config convert.svn.tags=tags (directory name) + specify the directory containing tags + --config convert.svn.trunk=trunk (directory name) + specify the name of the trunk branch + + Source history can be retrieved starting at a specific revision, + instead of being integrally converted. Only single branch + conversions are supported. + + --config convert.svn.startrev=0 (svn revision number) + specify start Subversion revision. + + Perforce Source + ''''''''''''''' + + The Perforce (P4) importer can be given a p4 depot path or a + client specification as source. It will convert all files in the + source to a flat Mercurial repository, ignoring labels, branches + and integrations. Note that when a depot path is given you then + usually should specify a target directory, because otherwise the + target may be named ...-hg. + + It is possible to limit the amount of source history to be + converted by specifying an initial Perforce revision. + + --config convert.p4.startrev=0 (perforce changelist number) + specify initial Perforce revision. + + Mercurial Destination + ''''''''''''''''''''' + + --config convert.hg.clonebranches=False (boolean) + dispatch source branches in separate clones. + --config convert.hg.tagsbranch=default (branch name) + tag revisions branch name + --config convert.hg.usebranchnames=True (boolean) + preserve branch names + + """ + return convcmd.convert(ui, src, dest, revmapfile, **opts) + +def debugsvnlog(ui, **opts): + return subversion.debugsvnlog(ui, **opts) + +def debugcvsps(ui, *args, **opts): + '''create changeset information from CVS + + This command is intended as a debugging tool for the CVS to + Mercurial converter, and can be used as a direct replacement for + cvsps. + + Hg debugcvsps reads the CVS rlog for current directory (or any + named directory) in the CVS repository, and converts the log to a + series of changesets based on matching commit log entries and + dates.''' + return cvsps.debugcvsps(ui, *args, **opts) + +commands.norepo += " convert debugsvnlog debugcvsps" + +cmdtable = { + "convert": + (convert, + [('', 'authors', '', + _('username mapping filename (DEPRECATED, use --authormap instead)'), + _('FILE')), + ('s', 'source-type', '', + _('source repository type'), _('TYPE')), + ('d', 'dest-type', '', + _('destination repository type'), _('TYPE')), + ('r', 'rev', '', + _('import up to target revision REV'), _('REV')), + ('A', 'authormap', '', + _('remap usernames using this file'), _('FILE')), + ('', 'filemap', '', + _('remap file names using contents of file'), _('FILE')), + ('', 'splicemap', '', + _('splice synthesized history into place'), _('FILE')), + ('', 'branchmap', '', + _('change branch names while converting'), _('FILE')), + ('', 'branchsort', None, _('try to sort changesets by branches')), + ('', 'datesort', None, _('try to sort changesets by date')), + ('', 'sourcesort', None, _('preserve source changesets order'))], + _('hg convert [OPTION]... SOURCE [DEST [REVMAP]]')), + "debugsvnlog": + (debugsvnlog, + [], + 'hg debugsvnlog'), + "debugcvsps": + (debugcvsps, + [ + # Main options shared with cvsps-2.1 + ('b', 'branches', [], _('only return changes on specified branches')), + ('p', 'prefix', '', _('prefix to remove from file names')), + ('r', 'revisions', [], + _('only return changes after or between specified tags')), + ('u', 'update-cache', None, _("update cvs log cache")), + ('x', 'new-cache', None, _("create new cvs log cache")), + ('z', 'fuzz', 60, _('set commit time fuzz in seconds')), + ('', 'root', '', _('specify cvsroot')), + # Options specific to builtin cvsps + ('', 'parents', '', _('show parent changesets')), + ('', 'ancestors', '', _('show current changeset in ancestor branches')), + # Options that are ignored for compatibility with cvsps-2.1 + ('A', 'cvs-direct', None, _('ignored for compatibility')), + ], + _('hg debugcvsps [OPTION]... [PATH]...')), +} diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/__init__.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/__init__.pyo Binary files differnew file mode 100644 index 0000000..892b438 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/__init__.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/bzr.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/bzr.py new file mode 100644 index 0000000..cc16258 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/bzr.py @@ -0,0 +1,260 @@ +# bzr.py - bzr support for the convert extension +# +# Copyright 2008, 2009 Marek Kubica <marek@xivilization.net> and others +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +# This module is for handling 'bzr', that was formerly known as Bazaar-NG; +# it cannot access 'bar' repositories, but they were never used very much + +import os +from mercurial import demandimport +# these do not work with demandimport, blacklist +demandimport.ignore.extend([ + 'bzrlib.transactions', + 'bzrlib.urlutils', + 'ElementPath', + ]) + +from mercurial.i18n import _ +from mercurial import util +from common import NoRepo, commit, converter_source + +try: + # bazaar imports + from bzrlib import branch, revision, errors + from bzrlib.revisionspec import RevisionSpec +except ImportError: + pass + +supportedkinds = ('file', 'symlink') + +class bzr_source(converter_source): + """Reads Bazaar repositories by using the Bazaar Python libraries""" + + def __init__(self, ui, path, rev=None): + super(bzr_source, self).__init__(ui, path, rev=rev) + + if not os.path.exists(os.path.join(path, '.bzr')): + raise NoRepo(_('%s does not look like a Bazaar repository') + % path) + + try: + # access bzrlib stuff + branch + except NameError: + raise NoRepo(_('Bazaar modules could not be loaded')) + + path = os.path.abspath(path) + self._checkrepotype(path) + self.branch = branch.Branch.open(path) + self.sourcerepo = self.branch.repository + self._parentids = {} + + def _checkrepotype(self, path): + # Lightweight checkouts detection is informational but probably + # fragile at API level. It should not terminate the conversion. + try: + from bzrlib import bzrdir + dir = bzrdir.BzrDir.open_containing(path)[0] + try: + tree = dir.open_workingtree(recommend_upgrade=False) + branch = tree.branch + except (errors.NoWorkingTree, errors.NotLocalUrl): + tree = None + branch = dir.open_branch() + if (tree is not None and tree.bzrdir.root_transport.base != + branch.bzrdir.root_transport.base): + self.ui.warn(_('warning: lightweight checkouts may cause ' + 'conversion failures, try with a regular ' + 'branch instead.\n')) + except: + self.ui.note(_('bzr source type could not be determined\n')) + + def before(self): + """Before the conversion begins, acquire a read lock + for all the operations that might need it. Fortunately + read locks don't block other reads or writes to the + repository, so this shouldn't have any impact on the usage of + the source repository. + + The alternative would be locking on every operation that + needs locks (there are currently two: getting the file and + getting the parent map) and releasing immediately after, + but this approach can take even 40% longer.""" + self.sourcerepo.lock_read() + + def after(self): + self.sourcerepo.unlock() + + def getheads(self): + if not self.rev: + return [self.branch.last_revision()] + try: + r = RevisionSpec.from_string(self.rev) + info = r.in_history(self.branch) + except errors.BzrError: + raise util.Abort(_('%s is not a valid revision in current branch') + % self.rev) + return [info.rev_id] + + def getfile(self, name, rev): + revtree = self.sourcerepo.revision_tree(rev) + fileid = revtree.path2id(name.decode(self.encoding or 'utf-8')) + kind = None + if fileid is not None: + kind = revtree.kind(fileid) + if kind not in supportedkinds: + # the file is not available anymore - was deleted + raise IOError(_('%s is not available in %s anymore') % + (name, rev)) + mode = self._modecache[(name, rev)] + if kind == 'symlink': + target = revtree.get_symlink_target(fileid) + if target is None: + raise util.Abort(_('%s.%s symlink has no target') + % (name, rev)) + return target, mode + else: + sio = revtree.get_file(fileid) + return sio.read(), mode + + def getchanges(self, version): + # set up caches: modecache and revtree + self._modecache = {} + self._revtree = self.sourcerepo.revision_tree(version) + # get the parentids from the cache + parentids = self._parentids.pop(version) + # only diff against first parent id + prevtree = self.sourcerepo.revision_tree(parentids[0]) + return self._gettreechanges(self._revtree, prevtree) + + def getcommit(self, version): + rev = self.sourcerepo.get_revision(version) + # populate parent id cache + if not rev.parent_ids: + parents = [] + self._parentids[version] = (revision.NULL_REVISION,) + else: + parents = self._filterghosts(rev.parent_ids) + self._parentids[version] = parents + + return commit(parents=parents, + date='%d %d' % (rev.timestamp, -rev.timezone), + author=self.recode(rev.committer), + # bzr returns bytestrings or unicode, depending on the content + desc=self.recode(rev.message), + rev=version) + + def gettags(self): + if not self.branch.supports_tags(): + return {} + tagdict = self.branch.tags.get_tag_dict() + bytetags = {} + for name, rev in tagdict.iteritems(): + bytetags[self.recode(name)] = rev + return bytetags + + def getchangedfiles(self, rev, i): + self._modecache = {} + curtree = self.sourcerepo.revision_tree(rev) + if i is not None: + parentid = self._parentids[rev][i] + else: + # no parent id, get the empty revision + parentid = revision.NULL_REVISION + + prevtree = self.sourcerepo.revision_tree(parentid) + changes = [e[0] for e in self._gettreechanges(curtree, prevtree)[0]] + return changes + + def _gettreechanges(self, current, origin): + revid = current._revision_id + changes = [] + renames = {} + for (fileid, paths, changed_content, versioned, parent, name, + kind, executable) in current.iter_changes(origin): + + if paths[0] == u'' or paths[1] == u'': + # ignore changes to tree root + continue + + # bazaar tracks directories, mercurial does not, so + # we have to rename the directory contents + if kind[1] == 'directory': + if kind[0] not in (None, 'directory'): + # Replacing 'something' with a directory, record it + # so it can be removed. + changes.append((self.recode(paths[0]), revid)) + + if None not in paths and paths[0] != paths[1]: + # neither an add nor an delete - a move + # rename all directory contents manually + subdir = origin.inventory.path2id(paths[0]) + # get all child-entries of the directory + for name, entry in origin.inventory.iter_entries(subdir): + # hg does not track directory renames + if entry.kind == 'directory': + continue + frompath = self.recode(paths[0] + '/' + name) + topath = self.recode(paths[1] + '/' + name) + # register the files as changed + changes.append((frompath, revid)) + changes.append((topath, revid)) + # add to mode cache + mode = ((entry.executable and 'x') + or (entry.kind == 'symlink' and 's') + or '') + self._modecache[(topath, revid)] = mode + # register the change as move + renames[topath] = frompath + + # no futher changes, go to the next change + continue + + # we got unicode paths, need to convert them + path, topath = [self.recode(part) for part in paths] + + if topath is None: + # file deleted + changes.append((path, revid)) + continue + + # renamed + if path and path != topath: + renames[topath] = path + changes.append((path, revid)) + + # populate the mode cache + kind, executable = [e[1] for e in (kind, executable)] + mode = ((executable and 'x') or (kind == 'symlink' and 'l') + or '') + self._modecache[(topath, revid)] = mode + changes.append((topath, revid)) + + return changes, renames + + def _filterghosts(self, ids): + """Filters out ghost revisions which hg does not support, see + <http://bazaar-vcs.org/GhostRevision> + """ + parentmap = self.sourcerepo.get_parent_map(ids) + parents = tuple([parent for parent in ids if parent in parentmap]) + return parents + + def recode(self, s, encoding=None): + """This version of recode tries to encode unicode to bytecode, + and preferably using the UTF-8 codec. + Other types than Unicode are silently returned, this is by + intention, e.g. the None-type is not going to be encoded but instead + just passed through + """ + if not encoding: + encoding = self.encoding or 'utf-8' + + if isinstance(s, unicode): + return s.encode(encoding) + else: + # leave it alone + return s diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/bzr.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/bzr.pyo Binary files differnew file mode 100644 index 0000000..ab47e99 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/bzr.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/common.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/common.py new file mode 100644 index 0000000..fb3865f --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/common.py @@ -0,0 +1,389 @@ +# common.py - common code for the convert extension +# +# Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +import base64, errno +import os +import cPickle as pickle +from mercurial import util +from mercurial.i18n import _ + +def encodeargs(args): + def encodearg(s): + lines = base64.encodestring(s) + lines = [l.splitlines()[0] for l in lines] + return ''.join(lines) + + s = pickle.dumps(args) + return encodearg(s) + +def decodeargs(s): + s = base64.decodestring(s) + return pickle.loads(s) + +class MissingTool(Exception): + pass + +def checktool(exe, name=None, abort=True): + name = name or exe + if not util.find_exe(exe): + exc = abort and util.Abort or MissingTool + raise exc(_('cannot find required "%s" tool') % name) + +class NoRepo(Exception): + pass + +SKIPREV = 'SKIP' + +class commit(object): + def __init__(self, author, date, desc, parents, branch=None, rev=None, + extra={}, sortkey=None): + self.author = author or 'unknown' + self.date = date or '0 0' + self.desc = desc + self.parents = parents + self.branch = branch + self.rev = rev + self.extra = extra + self.sortkey = sortkey + +class converter_source(object): + """Conversion source interface""" + + def __init__(self, ui, path=None, rev=None): + """Initialize conversion source (or raise NoRepo("message") + exception if path is not a valid repository)""" + self.ui = ui + self.path = path + self.rev = rev + + self.encoding = 'utf-8' + + def before(self): + pass + + def after(self): + pass + + def setrevmap(self, revmap): + """set the map of already-converted revisions""" + pass + + def getheads(self): + """Return a list of this repository's heads""" + raise NotImplementedError() + + def getfile(self, name, rev): + """Return a pair (data, mode) where data is the file content + as a string and mode one of '', 'x' or 'l'. rev is the + identifier returned by a previous call to getchanges(). Raise + IOError to indicate that name was deleted in rev. + """ + raise NotImplementedError() + + def getchanges(self, version): + """Returns a tuple of (files, copies). + + files is a sorted list of (filename, id) tuples for all files + changed between version and its first parent returned by + getcommit(). id is the source revision id of the file. + + copies is a dictionary of dest: source + """ + raise NotImplementedError() + + def getcommit(self, version): + """Return the commit object for version""" + raise NotImplementedError() + + def gettags(self): + """Return the tags as a dictionary of name: revision + + Tag names must be UTF-8 strings. + """ + raise NotImplementedError() + + def recode(self, s, encoding=None): + if not encoding: + encoding = self.encoding or 'utf-8' + + if isinstance(s, unicode): + return s.encode("utf-8") + try: + return s.decode(encoding).encode("utf-8") + except: + try: + return s.decode("latin-1").encode("utf-8") + except: + return s.decode(encoding, "replace").encode("utf-8") + + def getchangedfiles(self, rev, i): + """Return the files changed by rev compared to parent[i]. + + i is an index selecting one of the parents of rev. The return + value should be the list of files that are different in rev and + this parent. + + If rev has no parents, i is None. + + This function is only needed to support --filemap + """ + raise NotImplementedError() + + def converted(self, rev, sinkrev): + '''Notify the source that a revision has been converted.''' + pass + + def hasnativeorder(self): + """Return true if this source has a meaningful, native revision + order. For instance, Mercurial revisions are store sequentially + while there is no such global ordering with Darcs. + """ + return False + + def lookuprev(self, rev): + """If rev is a meaningful revision reference in source, return + the referenced identifier in the same format used by getcommit(). + return None otherwise. + """ + return None + +class converter_sink(object): + """Conversion sink (target) interface""" + + def __init__(self, ui, path): + """Initialize conversion sink (or raise NoRepo("message") + exception if path is not a valid repository) + + created is a list of paths to remove if a fatal error occurs + later""" + self.ui = ui + self.path = path + self.created = [] + + def getheads(self): + """Return a list of this repository's heads""" + raise NotImplementedError() + + def revmapfile(self): + """Path to a file that will contain lines + source_rev_id sink_rev_id + mapping equivalent revision identifiers for each system.""" + raise NotImplementedError() + + def authorfile(self): + """Path to a file that will contain lines + srcauthor=dstauthor + mapping equivalent authors identifiers for each system.""" + return None + + def putcommit(self, files, copies, parents, commit, source, revmap): + """Create a revision with all changed files listed in 'files' + and having listed parents. 'commit' is a commit object + containing at a minimum the author, date, and message for this + changeset. 'files' is a list of (path, version) tuples, + 'copies' is a dictionary mapping destinations to sources, + 'source' is the source repository, and 'revmap' is a mapfile + of source revisions to converted revisions. Only getfile() and + lookuprev() should be called on 'source'. + + Note that the sink repository is not told to update itself to + a particular revision (or even what that revision would be) + before it receives the file data. + """ + raise NotImplementedError() + + def puttags(self, tags): + """Put tags into sink. + + tags: {tagname: sink_rev_id, ...} where tagname is an UTF-8 string. + Return a pair (tag_revision, tag_parent_revision), or (None, None) + if nothing was changed. + """ + raise NotImplementedError() + + def setbranch(self, branch, pbranches): + """Set the current branch name. Called before the first putcommit + on the branch. + branch: branch name for subsequent commits + pbranches: (converted parent revision, parent branch) tuples""" + pass + + def setfilemapmode(self, active): + """Tell the destination that we're using a filemap + + Some converter_sources (svn in particular) can claim that a file + was changed in a revision, even if there was no change. This method + tells the destination that we're using a filemap and that it should + filter empty revisions. + """ + pass + + def before(self): + pass + + def after(self): + pass + + +class commandline(object): + def __init__(self, ui, command): + self.ui = ui + self.command = command + + def prerun(self): + pass + + def postrun(self): + pass + + def _cmdline(self, cmd, *args, **kwargs): + cmdline = [self.command, cmd] + list(args) + for k, v in kwargs.iteritems(): + if len(k) == 1: + cmdline.append('-' + k) + else: + cmdline.append('--' + k.replace('_', '-')) + try: + if len(k) == 1: + cmdline.append('' + v) + else: + cmdline[-1] += '=' + v + except TypeError: + pass + cmdline = [util.shellquote(arg) for arg in cmdline] + if not self.ui.debugflag: + cmdline += ['2>', util.nulldev] + cmdline += ['<', util.nulldev] + cmdline = ' '.join(cmdline) + return cmdline + + def _run(self, cmd, *args, **kwargs): + cmdline = self._cmdline(cmd, *args, **kwargs) + self.ui.debug('running: %s\n' % (cmdline,)) + self.prerun() + try: + return util.popen(cmdline) + finally: + self.postrun() + + def run(self, cmd, *args, **kwargs): + fp = self._run(cmd, *args, **kwargs) + output = fp.read() + self.ui.debug(output) + return output, fp.close() + + def runlines(self, cmd, *args, **kwargs): + fp = self._run(cmd, *args, **kwargs) + output = fp.readlines() + self.ui.debug(''.join(output)) + return output, fp.close() + + def checkexit(self, status, output=''): + if status: + if output: + self.ui.warn(_('%s error:\n') % self.command) + self.ui.warn(output) + msg = util.explain_exit(status)[0] + raise util.Abort('%s %s' % (self.command, msg)) + + def run0(self, cmd, *args, **kwargs): + output, status = self.run(cmd, *args, **kwargs) + self.checkexit(status, output) + return output + + def runlines0(self, cmd, *args, **kwargs): + output, status = self.runlines(cmd, *args, **kwargs) + self.checkexit(status, ''.join(output)) + return output + + def getargmax(self): + if '_argmax' in self.__dict__: + return self._argmax + + # POSIX requires at least 4096 bytes for ARG_MAX + self._argmax = 4096 + try: + self._argmax = os.sysconf("SC_ARG_MAX") + except: + pass + + # Windows shells impose their own limits on command line length, + # down to 2047 bytes for cmd.exe under Windows NT/2k and 2500 bytes + # for older 4nt.exe. See http://support.microsoft.com/kb/830473 for + # details about cmd.exe limitations. + + # Since ARG_MAX is for command line _and_ environment, lower our limit + # (and make happy Windows shells while doing this). + + self._argmax = self._argmax / 2 - 1 + return self._argmax + + def limit_arglist(self, arglist, cmd, *args, **kwargs): + limit = self.getargmax() - len(self._cmdline(cmd, *args, **kwargs)) + bytes = 0 + fl = [] + for fn in arglist: + b = len(fn) + 3 + if bytes + b < limit or len(fl) == 0: + fl.append(fn) + bytes += b + else: + yield fl + fl = [fn] + bytes = b + if fl: + yield fl + + def xargs(self, arglist, cmd, *args, **kwargs): + for l in self.limit_arglist(arglist, cmd, *args, **kwargs): + self.run0(cmd, *(list(args) + l), **kwargs) + +class mapfile(dict): + def __init__(self, ui, path): + super(mapfile, self).__init__() + self.ui = ui + self.path = path + self.fp = None + self.order = [] + self._read() + + def _read(self): + if not self.path: + return + try: + fp = open(self.path, 'r') + except IOError, err: + if err.errno != errno.ENOENT: + raise + return + for i, line in enumerate(fp): + try: + key, value = line.splitlines()[0].rsplit(' ', 1) + except ValueError: + raise util.Abort( + _('syntax error in %s(%d): key/value pair expected') + % (self.path, i + 1)) + if key not in self: + self.order.append(key) + super(mapfile, self).__setitem__(key, value) + fp.close() + + def __setitem__(self, key, value): + if self.fp is None: + try: + self.fp = open(self.path, 'a') + except IOError, err: + raise util.Abort(_('could not open map file %r: %s') % + (self.path, err.strerror)) + self.fp.write('%s %s\n' % (key, value)) + self.fp.flush() + super(mapfile, self).__setitem__(key, value) + + def close(self): + if self.fp: + self.fp.close() + self.fp = None diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/common.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/common.pyo Binary files differnew file mode 100644 index 0000000..de20000 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/common.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/convcmd.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/convcmd.py new file mode 100644 index 0000000..ac91b41 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/convcmd.py @@ -0,0 +1,434 @@ +# convcmd - convert extension commands definition +# +# Copyright 2005-2007 Matt Mackall <mpm@selenic.com> +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +from common import NoRepo, MissingTool, SKIPREV, mapfile +from cvs import convert_cvs +from darcs import darcs_source +from git import convert_git +from hg import mercurial_source, mercurial_sink +from subversion import svn_source, svn_sink +from monotone import monotone_source +from gnuarch import gnuarch_source +from bzr import bzr_source +from p4 import p4_source +import filemap + +import os, shutil +from mercurial import hg, util, encoding +from mercurial.i18n import _ + +orig_encoding = 'ascii' + +def recode(s): + if isinstance(s, unicode): + return s.encode(orig_encoding, 'replace') + else: + return s.decode('utf-8').encode(orig_encoding, 'replace') + +source_converters = [ + ('cvs', convert_cvs, 'branchsort'), + ('git', convert_git, 'branchsort'), + ('svn', svn_source, 'branchsort'), + ('hg', mercurial_source, 'sourcesort'), + ('darcs', darcs_source, 'branchsort'), + ('mtn', monotone_source, 'branchsort'), + ('gnuarch', gnuarch_source, 'branchsort'), + ('bzr', bzr_source, 'branchsort'), + ('p4', p4_source, 'branchsort'), + ] + +sink_converters = [ + ('hg', mercurial_sink), + ('svn', svn_sink), + ] + +def convertsource(ui, path, type, rev): + exceptions = [] + if type and type not in [s[0] for s in source_converters]: + raise util.Abort(_('%s: invalid source repository type') % type) + for name, source, sortmode in source_converters: + try: + if not type or name == type: + return source(ui, path, rev), sortmode + except (NoRepo, MissingTool), inst: + exceptions.append(inst) + if not ui.quiet: + for inst in exceptions: + ui.write("%s\n" % inst) + raise util.Abort(_('%s: missing or unsupported repository') % path) + +def convertsink(ui, path, type): + if type and type not in [s[0] for s in sink_converters]: + raise util.Abort(_('%s: invalid destination repository type') % type) + for name, sink in sink_converters: + try: + if not type or name == type: + return sink(ui, path) + except NoRepo, inst: + ui.note(_("convert: %s\n") % inst) + raise util.Abort(_('%s: unknown repository type') % path) + +class progresssource(object): + def __init__(self, ui, source, filecount): + self.ui = ui + self.source = source + self.filecount = filecount + self.retrieved = 0 + + def getfile(self, file, rev): + self.retrieved += 1 + self.ui.progress(_('getting files'), self.retrieved, + item=file, total=self.filecount) + return self.source.getfile(file, rev) + + def lookuprev(self, rev): + return self.source.lookuprev(rev) + + def close(self): + self.ui.progress(_('getting files'), None) + +class converter(object): + def __init__(self, ui, source, dest, revmapfile, opts): + + self.source = source + self.dest = dest + self.ui = ui + self.opts = opts + self.commitcache = {} + self.authors = {} + self.authorfile = None + + # Record converted revisions persistently: maps source revision + # ID to target revision ID (both strings). (This is how + # incremental conversions work.) + self.map = mapfile(ui, revmapfile) + + # Read first the dst author map if any + authorfile = self.dest.authorfile() + if authorfile and os.path.exists(authorfile): + self.readauthormap(authorfile) + # Extend/Override with new author map if necessary + if opts.get('authormap'): + self.readauthormap(opts.get('authormap')) + self.authorfile = self.dest.authorfile() + + self.splicemap = mapfile(ui, opts.get('splicemap')) + self.branchmap = mapfile(ui, opts.get('branchmap')) + + def walktree(self, heads): + '''Return a mapping that identifies the uncommitted parents of every + uncommitted changeset.''' + visit = heads + known = set() + parents = {} + while visit: + n = visit.pop(0) + if n in known or n in self.map: + continue + known.add(n) + self.ui.progress(_('scanning'), len(known), unit=_('revisions')) + commit = self.cachecommit(n) + parents[n] = [] + for p in commit.parents: + parents[n].append(p) + visit.append(p) + self.ui.progress(_('scanning'), None) + + return parents + + def toposort(self, parents, sortmode): + '''Return an ordering such that every uncommitted changeset is + preceeded by all its uncommitted ancestors.''' + + def mapchildren(parents): + """Return a (children, roots) tuple where 'children' maps parent + revision identifiers to children ones, and 'roots' is the list of + revisions without parents. 'parents' must be a mapping of revision + identifier to its parents ones. + """ + visit = parents.keys() + seen = set() + children = {} + roots = [] + + while visit: + n = visit.pop(0) + if n in seen: + continue + seen.add(n) + # Ensure that nodes without parents are present in the + # 'children' mapping. + children.setdefault(n, []) + hasparent = False + for p in parents[n]: + if not p in self.map: + visit.append(p) + hasparent = True + children.setdefault(p, []).append(n) + if not hasparent: + roots.append(n) + + return children, roots + + # Sort functions are supposed to take a list of revisions which + # can be converted immediately and pick one + + def makebranchsorter(): + """If the previously converted revision has a child in the + eligible revisions list, pick it. Return the list head + otherwise. Branch sort attempts to minimize branch + switching, which is harmful for Mercurial backend + compression. + """ + prev = [None] + def picknext(nodes): + next = nodes[0] + for n in nodes: + if prev[0] in parents[n]: + next = n + break + prev[0] = next + return next + return picknext + + def makesourcesorter(): + """Source specific sort.""" + keyfn = lambda n: self.commitcache[n].sortkey + def picknext(nodes): + return sorted(nodes, key=keyfn)[0] + return picknext + + def makedatesorter(): + """Sort revisions by date.""" + dates = {} + def getdate(n): + if n not in dates: + dates[n] = util.parsedate(self.commitcache[n].date) + return dates[n] + + def picknext(nodes): + return min([(getdate(n), n) for n in nodes])[1] + + return picknext + + if sortmode == 'branchsort': + picknext = makebranchsorter() + elif sortmode == 'datesort': + picknext = makedatesorter() + elif sortmode == 'sourcesort': + picknext = makesourcesorter() + else: + raise util.Abort(_('unknown sort mode: %s') % sortmode) + + children, actives = mapchildren(parents) + + s = [] + pendings = {} + while actives: + n = picknext(actives) + actives.remove(n) + s.append(n) + + # Update dependents list + for c in children.get(n, []): + if c not in pendings: + pendings[c] = [p for p in parents[c] if p not in self.map] + try: + pendings[c].remove(n) + except ValueError: + raise util.Abort(_('cycle detected between %s and %s') + % (recode(c), recode(n))) + if not pendings[c]: + # Parents are converted, node is eligible + actives.insert(0, c) + pendings[c] = None + + if len(s) != len(parents): + raise util.Abort(_("not all revisions were sorted")) + + return s + + def writeauthormap(self): + authorfile = self.authorfile + if authorfile: + self.ui.status(_('Writing author map file %s\n') % authorfile) + ofile = open(authorfile, 'w+') + for author in self.authors: + ofile.write("%s=%s\n" % (author, self.authors[author])) + ofile.close() + + def readauthormap(self, authorfile): + afile = open(authorfile, 'r') + for line in afile: + + line = line.strip() + if not line or line.startswith('#'): + continue + + try: + srcauthor, dstauthor = line.split('=', 1) + except ValueError: + msg = _('Ignoring bad line in author map file %s: %s\n') + self.ui.warn(msg % (authorfile, line.rstrip())) + continue + + srcauthor = srcauthor.strip() + dstauthor = dstauthor.strip() + if self.authors.get(srcauthor) in (None, dstauthor): + msg = _('mapping author %s to %s\n') + self.ui.debug(msg % (srcauthor, dstauthor)) + self.authors[srcauthor] = dstauthor + continue + + m = _('overriding mapping for author %s, was %s, will be %s\n') + self.ui.status(m % (srcauthor, self.authors[srcauthor], dstauthor)) + + afile.close() + + def cachecommit(self, rev): + commit = self.source.getcommit(rev) + commit.author = self.authors.get(commit.author, commit.author) + commit.branch = self.branchmap.get(commit.branch, commit.branch) + self.commitcache[rev] = commit + return commit + + def copy(self, rev): + commit = self.commitcache[rev] + + changes = self.source.getchanges(rev) + if isinstance(changes, basestring): + if changes == SKIPREV: + dest = SKIPREV + else: + dest = self.map[changes] + self.map[rev] = dest + return + files, copies = changes + pbranches = [] + if commit.parents: + for prev in commit.parents: + if prev not in self.commitcache: + self.cachecommit(prev) + pbranches.append((self.map[prev], + self.commitcache[prev].branch)) + self.dest.setbranch(commit.branch, pbranches) + try: + parents = self.splicemap[rev].replace(',', ' ').split() + self.ui.status(_('spliced in %s as parents of %s\n') % + (parents, rev)) + parents = [self.map.get(p, p) for p in parents] + except KeyError: + parents = [b[0] for b in pbranches] + source = progresssource(self.ui, self.source, len(files)) + newnode = self.dest.putcommit(files, copies, parents, commit, + source, self.map) + source.close() + self.source.converted(rev, newnode) + self.map[rev] = newnode + + def convert(self, sortmode): + try: + self.source.before() + self.dest.before() + self.source.setrevmap(self.map) + self.ui.status(_("scanning source...\n")) + heads = self.source.getheads() + parents = self.walktree(heads) + self.ui.status(_("sorting...\n")) + t = self.toposort(parents, sortmode) + num = len(t) + c = None + + self.ui.status(_("converting...\n")) + for i, c in enumerate(t): + num -= 1 + desc = self.commitcache[c].desc + if "\n" in desc: + desc = desc.splitlines()[0] + # convert log message to local encoding without using + # tolocal() because the encoding.encoding convert() + # uses is 'utf-8' + self.ui.status("%d %s\n" % (num, recode(desc))) + self.ui.note(_("source: %s\n") % recode(c)) + self.ui.progress(_('converting'), i, unit=_('revisions'), + total=len(t)) + self.copy(c) + self.ui.progress(_('converting'), None) + + tags = self.source.gettags() + ctags = {} + for k in tags: + v = tags[k] + if self.map.get(v, SKIPREV) != SKIPREV: + ctags[k] = self.map[v] + + if c and ctags: + nrev, tagsparent = self.dest.puttags(ctags) + if nrev and tagsparent: + # write another hash correspondence to override the previous + # one so we don't end up with extra tag heads + tagsparents = [e for e in self.map.iteritems() + if e[1] == tagsparent] + if tagsparents: + self.map[tagsparents[0][0]] = nrev + + self.writeauthormap() + finally: + self.cleanup() + + def cleanup(self): + try: + self.dest.after() + finally: + self.source.after() + self.map.close() + +def convert(ui, src, dest=None, revmapfile=None, **opts): + global orig_encoding + orig_encoding = encoding.encoding + encoding.encoding = 'UTF-8' + + # support --authors as an alias for --authormap + if not opts.get('authormap'): + opts['authormap'] = opts.get('authors') + + if not dest: + dest = hg.defaultdest(src) + "-hg" + ui.status(_("assuming destination %s\n") % dest) + + destc = convertsink(ui, dest, opts.get('dest_type')) + + try: + srcc, defaultsort = convertsource(ui, src, opts.get('source_type'), + opts.get('rev')) + except Exception: + for path in destc.created: + shutil.rmtree(path, True) + raise + + sortmodes = ('branchsort', 'datesort', 'sourcesort') + sortmode = [m for m in sortmodes if opts.get(m)] + if len(sortmode) > 1: + raise util.Abort(_('more than one sort mode specified')) + sortmode = sortmode and sortmode[0] or defaultsort + if sortmode == 'sourcesort' and not srcc.hasnativeorder(): + raise util.Abort(_('--sourcesort is not supported by this data source')) + + fmap = opts.get('filemap') + if fmap: + srcc = filemap.filemap_source(ui, srcc, fmap) + destc.setfilemapmode(True) + + if not revmapfile: + try: + revmapfile = destc.revmapfile() + except: + revmapfile = os.path.join(destc, "map") + + c = converter(ui, srcc, destc, revmapfile, opts) + c.convert(sortmode) + diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/convcmd.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/convcmd.pyo Binary files differnew file mode 100644 index 0000000..15f040a --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/convcmd.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/cvs.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/cvs.py new file mode 100644 index 0000000..501fae2 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/cvs.py @@ -0,0 +1,271 @@ +# cvs.py: CVS conversion code inspired by hg-cvs-import and git-cvsimport +# +# Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +import os, re, socket, errno +from cStringIO import StringIO +from mercurial import encoding, util +from mercurial.i18n import _ + +from common import NoRepo, commit, converter_source, checktool +import cvsps + +class convert_cvs(converter_source): + def __init__(self, ui, path, rev=None): + super(convert_cvs, self).__init__(ui, path, rev=rev) + + cvs = os.path.join(path, "CVS") + if not os.path.exists(cvs): + raise NoRepo(_("%s does not look like a CVS checkout") % path) + + checktool('cvs') + + self.changeset = None + self.files = {} + self.tags = {} + self.lastbranch = {} + self.socket = None + self.cvsroot = open(os.path.join(cvs, "Root")).read()[:-1] + self.cvsrepo = open(os.path.join(cvs, "Repository")).read()[:-1] + self.encoding = encoding.encoding + + self._connect() + + def _parse(self): + if self.changeset is not None: + return + self.changeset = {} + + maxrev = 0 + if self.rev: + # TODO: handle tags + try: + # patchset number? + maxrev = int(self.rev) + except ValueError: + raise util.Abort(_('revision %s is not a patchset number') + % self.rev) + + d = os.getcwd() + try: + os.chdir(self.path) + id = None + + cache = 'update' + if not self.ui.configbool('convert', 'cvsps.cache', True): + cache = None + db = cvsps.createlog(self.ui, cache=cache) + db = cvsps.createchangeset(self.ui, db, + fuzz=int(self.ui.config('convert', 'cvsps.fuzz', 60)), + mergeto=self.ui.config('convert', 'cvsps.mergeto', None), + mergefrom=self.ui.config('convert', 'cvsps.mergefrom', None)) + + for cs in db: + if maxrev and cs.id > maxrev: + break + id = str(cs.id) + cs.author = self.recode(cs.author) + self.lastbranch[cs.branch] = id + cs.comment = self.recode(cs.comment) + date = util.datestr(cs.date) + self.tags.update(dict.fromkeys(cs.tags, id)) + + files = {} + for f in cs.entries: + files[f.file] = "%s%s" % ('.'.join([str(x) + for x in f.revision]), + ['', '(DEAD)'][f.dead]) + + # add current commit to set + c = commit(author=cs.author, date=date, + parents=[str(p.id) for p in cs.parents], + desc=cs.comment, branch=cs.branch or '') + self.changeset[id] = c + self.files[id] = files + + self.heads = self.lastbranch.values() + finally: + os.chdir(d) + + def _connect(self): + root = self.cvsroot + conntype = None + user, host = None, None + cmd = ['cvs', 'server'] + + self.ui.status(_("connecting to %s\n") % root) + + if root.startswith(":pserver:"): + root = root[9:] + m = re.match(r'(?:(.*?)(?::(.*?))?@)?([^:\/]*)(?::(\d*))?(.*)', + root) + if m: + conntype = "pserver" + user, passw, serv, port, root = m.groups() + if not user: + user = "anonymous" + if not port: + port = 2401 + else: + port = int(port) + format0 = ":pserver:%s@%s:%s" % (user, serv, root) + format1 = ":pserver:%s@%s:%d%s" % (user, serv, port, root) + + if not passw: + passw = "A" + cvspass = os.path.expanduser("~/.cvspass") + try: + pf = open(cvspass) + for line in pf.read().splitlines(): + part1, part2 = line.split(' ', 1) + if part1 == '/1': + # /1 :pserver:user@example.com:2401/cvsroot/foo Ah<Z + part1, part2 = part2.split(' ', 1) + format = format1 + else: + # :pserver:user@example.com:/cvsroot/foo Ah<Z + format = format0 + if part1 == format: + passw = part2 + break + pf.close() + except IOError, inst: + if inst.errno != errno.ENOENT: + if not getattr(inst, 'filename', None): + inst.filename = cvspass + raise + + sck = socket.socket() + sck.connect((serv, port)) + sck.send("\n".join(["BEGIN AUTH REQUEST", root, user, passw, + "END AUTH REQUEST", ""])) + if sck.recv(128) != "I LOVE YOU\n": + raise util.Abort(_("CVS pserver authentication failed")) + + self.writep = self.readp = sck.makefile('r+') + + if not conntype and root.startswith(":local:"): + conntype = "local" + root = root[7:] + + if not conntype: + # :ext:user@host/home/user/path/to/cvsroot + if root.startswith(":ext:"): + root = root[5:] + m = re.match(r'(?:([^@:/]+)@)?([^:/]+):?(.*)', root) + # Do not take Windows path "c:\foo\bar" for a connection strings + if os.path.isdir(root) or not m: + conntype = "local" + else: + conntype = "rsh" + user, host, root = m.group(1), m.group(2), m.group(3) + + if conntype != "pserver": + if conntype == "rsh": + rsh = os.environ.get("CVS_RSH") or "ssh" + if user: + cmd = [rsh, '-l', user, host] + cmd + else: + cmd = [rsh, host] + cmd + + # popen2 does not support argument lists under Windows + cmd = [util.shellquote(arg) for arg in cmd] + cmd = util.quotecommand(' '.join(cmd)) + self.writep, self.readp = util.popen2(cmd) + + self.realroot = root + + self.writep.write("Root %s\n" % root) + self.writep.write("Valid-responses ok error Valid-requests Mode" + " M Mbinary E Checked-in Created Updated" + " Merged Removed\n") + self.writep.write("valid-requests\n") + self.writep.flush() + r = self.readp.readline() + if not r.startswith("Valid-requests"): + raise util.Abort(_('unexpected response from CVS server ' + '(expected "Valid-requests", but got %r)') + % r) + if "UseUnchanged" in r: + self.writep.write("UseUnchanged\n") + self.writep.flush() + r = self.readp.readline() + + def getheads(self): + self._parse() + return self.heads + + def getfile(self, name, rev): + + def chunkedread(fp, count): + # file-objects returned by socked.makefile() do not handle + # large read() requests very well. + chunksize = 65536 + output = StringIO() + while count > 0: + data = fp.read(min(count, chunksize)) + if not data: + raise util.Abort(_("%d bytes missing from remote file") + % count) + count -= len(data) + output.write(data) + return output.getvalue() + + self._parse() + if rev.endswith("(DEAD)"): + raise IOError + + args = ("-N -P -kk -r %s --" % rev).split() + args.append(self.cvsrepo + '/' + name) + for x in args: + self.writep.write("Argument %s\n" % x) + self.writep.write("Directory .\n%s\nco\n" % self.realroot) + self.writep.flush() + + data = "" + mode = None + while 1: + line = self.readp.readline() + if line.startswith("Created ") or line.startswith("Updated "): + self.readp.readline() # path + self.readp.readline() # entries + mode = self.readp.readline()[:-1] + count = int(self.readp.readline()[:-1]) + data = chunkedread(self.readp, count) + elif line.startswith(" "): + data += line[1:] + elif line.startswith("M "): + pass + elif line.startswith("Mbinary "): + count = int(self.readp.readline()[:-1]) + data = chunkedread(self.readp, count) + else: + if line == "ok\n": + if mode is None: + raise util.Abort(_('malformed response from CVS')) + return (data, "x" in mode and "x" or "") + elif line.startswith("E "): + self.ui.warn(_("cvs server: %s\n") % line[2:]) + elif line.startswith("Remove"): + self.readp.readline() + else: + raise util.Abort(_("unknown CVS response: %s") % line) + + def getchanges(self, rev): + self._parse() + return sorted(self.files[rev].iteritems()), {} + + def getcommit(self, rev): + self._parse() + return self.changeset[rev] + + def gettags(self): + self._parse() + return self.tags + + def getchangedfiles(self, rev, i): + self._parse() + return sorted(self.files[rev]) diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/cvs.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/cvs.pyo Binary files differnew file mode 100644 index 0000000..d73fe3f --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/cvs.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/cvsps.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/cvsps.py new file mode 100644 index 0000000..1519d41 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/cvsps.py @@ -0,0 +1,847 @@ +# Mercurial built-in replacement for cvsps. +# +# Copyright 2008, Frank Kingswood <frank@kingswood-consulting.co.uk> +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +import os +import re +import cPickle as pickle +from mercurial import util +from mercurial.i18n import _ +from mercurial import hook + +class logentry(object): + '''Class logentry has the following attributes: + .author - author name as CVS knows it + .branch - name of branch this revision is on + .branches - revision tuple of branches starting at this revision + .comment - commit message + .date - the commit date as a (time, tz) tuple + .dead - true if file revision is dead + .file - Name of file + .lines - a tuple (+lines, -lines) or None + .parent - Previous revision of this entry + .rcs - name of file as returned from CVS + .revision - revision number as tuple + .tags - list of tags on the file + .synthetic - is this a synthetic "file ... added on ..." revision? + .mergepoint- the branch that has been merged from + (if present in rlog output) + .branchpoints- the branches that start at the current entry + ''' + def __init__(self, **entries): + self.synthetic = False + self.__dict__.update(entries) + + def __repr__(self): + return "<%s at 0x%x: %s %s>" % (self.__class__.__name__, + id(self), + self.file, + ".".join(map(str, self.revision))) + +class logerror(Exception): + pass + +def getrepopath(cvspath): + """Return the repository path from a CVS path. + + >>> getrepopath('/foo/bar') + '/foo/bar' + >>> getrepopath('c:/foo/bar') + 'c:/foo/bar' + >>> getrepopath(':pserver:10/foo/bar') + '/foo/bar' + >>> getrepopath(':pserver:10c:/foo/bar') + '/foo/bar' + >>> getrepopath(':pserver:/foo/bar') + '/foo/bar' + >>> getrepopath(':pserver:c:/foo/bar') + 'c:/foo/bar' + >>> getrepopath(':pserver:truc@foo.bar:/foo/bar') + '/foo/bar' + >>> getrepopath(':pserver:truc@foo.bar:c:/foo/bar') + 'c:/foo/bar' + """ + # According to CVS manual, CVS paths are expressed like: + # [:method:][[user][:password]@]hostname[:[port]]/path/to/repository + # + # Unfortunately, Windows absolute paths start with a drive letter + # like 'c:' making it harder to parse. Here we assume that drive + # letters are only one character long and any CVS component before + # the repository path is at least 2 characters long, and use this + # to disambiguate. + parts = cvspath.split(':') + if len(parts) == 1: + return parts[0] + # Here there is an ambiguous case if we have a port number + # immediately followed by a Windows driver letter. We assume this + # never happens and decide it must be CVS path component, + # therefore ignoring it. + if len(parts[-2]) > 1: + return parts[-1].lstrip('0123456789') + return parts[-2] + ':' + parts[-1] + +def createlog(ui, directory=None, root="", rlog=True, cache=None): + '''Collect the CVS rlog''' + + # Because we store many duplicate commit log messages, reusing strings + # saves a lot of memory and pickle storage space. + _scache = {} + def scache(s): + "return a shared version of a string" + return _scache.setdefault(s, s) + + ui.status(_('collecting CVS rlog\n')) + + log = [] # list of logentry objects containing the CVS state + + # patterns to match in CVS (r)log output, by state of use + re_00 = re.compile('RCS file: (.+)$') + re_01 = re.compile('cvs \\[r?log aborted\\]: (.+)$') + re_02 = re.compile('cvs (r?log|server): (.+)\n$') + re_03 = re.compile("(Cannot access.+CVSROOT)|" + "(can't create temporary directory.+)$") + re_10 = re.compile('Working file: (.+)$') + re_20 = re.compile('symbolic names:') + re_30 = re.compile('\t(.+): ([\\d.]+)$') + re_31 = re.compile('----------------------------$') + re_32 = re.compile('=======================================' + '======================================$') + re_50 = re.compile('revision ([\\d.]+)(\s+locked by:\s+.+;)?$') + re_60 = re.compile(r'date:\s+(.+);\s+author:\s+(.+);\s+state:\s+(.+?);' + r'(\s+lines:\s+(\+\d+)?\s+(-\d+)?;)?' + r'(.*mergepoint:\s+([^;]+);)?') + re_70 = re.compile('branches: (.+);$') + + file_added_re = re.compile(r'file [^/]+ was (initially )?added on branch') + + prefix = '' # leading path to strip of what we get from CVS + + if directory is None: + # Current working directory + + # Get the real directory in the repository + try: + prefix = open(os.path.join('CVS','Repository')).read().strip() + directory = prefix + if prefix == ".": + prefix = "" + except IOError: + raise logerror(_('not a CVS sandbox')) + + if prefix and not prefix.endswith(os.sep): + prefix += os.sep + + # Use the Root file in the sandbox, if it exists + try: + root = open(os.path.join('CVS','Root')).read().strip() + except IOError: + pass + + if not root: + root = os.environ.get('CVSROOT', '') + + # read log cache if one exists + oldlog = [] + date = None + + if cache: + cachedir = os.path.expanduser('~/.hg.cvsps') + if not os.path.exists(cachedir): + os.mkdir(cachedir) + + # The cvsps cache pickle needs a uniquified name, based on the + # repository location. The address may have all sort of nasties + # in it, slashes, colons and such. So here we take just the + # alphanumerics, concatenated in a way that does not mix up the + # various components, so that + # :pserver:user@server:/path + # and + # /pserver/user/server/path + # are mapped to different cache file names. + cachefile = root.split(":") + [directory, "cache"] + cachefile = ['-'.join(re.findall(r'\w+', s)) for s in cachefile if s] + cachefile = os.path.join(cachedir, + '.'.join([s for s in cachefile if s])) + + if cache == 'update': + try: + ui.note(_('reading cvs log cache %s\n') % cachefile) + oldlog = pickle.load(open(cachefile)) + ui.note(_('cache has %d log entries\n') % len(oldlog)) + except Exception, e: + ui.note(_('error reading cache: %r\n') % e) + + if oldlog: + date = oldlog[-1].date # last commit date as a (time,tz) tuple + date = util.datestr(date, '%Y/%m/%d %H:%M:%S %1%2') + + # build the CVS commandline + cmd = ['cvs', '-q'] + if root: + cmd.append('-d%s' % root) + p = util.normpath(getrepopath(root)) + if not p.endswith('/'): + p += '/' + if prefix: + # looks like normpath replaces "" by "." + prefix = p + util.normpath(prefix) + else: + prefix = p + cmd.append(['log', 'rlog'][rlog]) + if date: + # no space between option and date string + cmd.append('-d>%s' % date) + cmd.append(directory) + + # state machine begins here + tags = {} # dictionary of revisions on current file with their tags + branchmap = {} # mapping between branch names and revision numbers + state = 0 + store = False # set when a new record can be appended + + cmd = [util.shellquote(arg) for arg in cmd] + ui.note(_("running %s\n") % (' '.join(cmd))) + ui.debug("prefix=%r directory=%r root=%r\n" % (prefix, directory, root)) + + pfp = util.popen(' '.join(cmd)) + peek = pfp.readline() + while True: + line = peek + if line == '': + break + peek = pfp.readline() + if line.endswith('\n'): + line = line[:-1] + #ui.debug('state=%d line=%r\n' % (state, line)) + + if state == 0: + # initial state, consume input until we see 'RCS file' + match = re_00.match(line) + if match: + rcs = match.group(1) + tags = {} + if rlog: + filename = util.normpath(rcs[:-2]) + if filename.startswith(prefix): + filename = filename[len(prefix):] + if filename.startswith('/'): + filename = filename[1:] + if filename.startswith('Attic/'): + filename = filename[6:] + else: + filename = filename.replace('/Attic/', '/') + state = 2 + continue + state = 1 + continue + match = re_01.match(line) + if match: + raise logerror(match.group(1)) + match = re_02.match(line) + if match: + raise logerror(match.group(2)) + if re_03.match(line): + raise logerror(line) + + elif state == 1: + # expect 'Working file' (only when using log instead of rlog) + match = re_10.match(line) + assert match, _('RCS file must be followed by working file') + filename = util.normpath(match.group(1)) + state = 2 + + elif state == 2: + # expect 'symbolic names' + if re_20.match(line): + branchmap = {} + state = 3 + + elif state == 3: + # read the symbolic names and store as tags + match = re_30.match(line) + if match: + rev = [int(x) for x in match.group(2).split('.')] + + # Convert magic branch number to an odd-numbered one + revn = len(rev) + if revn > 3 and (revn % 2) == 0 and rev[-2] == 0: + rev = rev[:-2] + rev[-1:] + rev = tuple(rev) + + if rev not in tags: + tags[rev] = [] + tags[rev].append(match.group(1)) + branchmap[match.group(1)] = match.group(2) + + elif re_31.match(line): + state = 5 + elif re_32.match(line): + state = 0 + + elif state == 4: + # expecting '------' separator before first revision + if re_31.match(line): + state = 5 + else: + assert not re_32.match(line), _('must have at least ' + 'some revisions') + + elif state == 5: + # expecting revision number and possibly (ignored) lock indication + # we create the logentry here from values stored in states 0 to 4, + # as this state is re-entered for subsequent revisions of a file. + match = re_50.match(line) + assert match, _('expected revision number') + e = logentry(rcs=scache(rcs), file=scache(filename), + revision=tuple([int(x) for x in match.group(1).split('.')]), + branches=[], parent=None) + state = 6 + + elif state == 6: + # expecting date, author, state, lines changed + match = re_60.match(line) + assert match, _('revision must be followed by date line') + d = match.group(1) + if d[2] == '/': + # Y2K + d = '19' + d + + if len(d.split()) != 3: + # cvs log dates always in GMT + d = d + ' UTC' + e.date = util.parsedate(d, ['%y/%m/%d %H:%M:%S', + '%Y/%m/%d %H:%M:%S', + '%Y-%m-%d %H:%M:%S']) + e.author = scache(match.group(2)) + e.dead = match.group(3).lower() == 'dead' + + if match.group(5): + if match.group(6): + e.lines = (int(match.group(5)), int(match.group(6))) + else: + e.lines = (int(match.group(5)), 0) + elif match.group(6): + e.lines = (0, int(match.group(6))) + else: + e.lines = None + + if match.group(7): # cvsnt mergepoint + myrev = match.group(8).split('.') + if len(myrev) == 2: # head + e.mergepoint = 'HEAD' + else: + myrev = '.'.join(myrev[:-2] + ['0', myrev[-2]]) + branches = [b for b in branchmap if branchmap[b] == myrev] + assert len(branches) == 1, 'unknown branch: %s' % e.mergepoint + e.mergepoint = branches[0] + else: + e.mergepoint = None + e.comment = [] + state = 7 + + elif state == 7: + # read the revision numbers of branches that start at this revision + # or store the commit log message otherwise + m = re_70.match(line) + if m: + e.branches = [tuple([int(y) for y in x.strip().split('.')]) + for x in m.group(1).split(';')] + state = 8 + elif re_31.match(line) and re_50.match(peek): + state = 5 + store = True + elif re_32.match(line): + state = 0 + store = True + else: + e.comment.append(line) + + elif state == 8: + # store commit log message + if re_31.match(line): + state = 5 + store = True + elif re_32.match(line): + state = 0 + store = True + else: + e.comment.append(line) + + # When a file is added on a branch B1, CVS creates a synthetic + # dead trunk revision 1.1 so that the branch has a root. + # Likewise, if you merge such a file to a later branch B2 (one + # that already existed when the file was added on B1), CVS + # creates a synthetic dead revision 1.1.x.1 on B2. Don't drop + # these revisions now, but mark them synthetic so + # createchangeset() can take care of them. + if (store and + e.dead and + e.revision[-1] == 1 and # 1.1 or 1.1.x.1 + len(e.comment) == 1 and + file_added_re.match(e.comment[0])): + ui.debug('found synthetic revision in %s: %r\n' + % (e.rcs, e.comment[0])) + e.synthetic = True + + if store: + # clean up the results and save in the log. + store = False + e.tags = sorted([scache(x) for x in tags.get(e.revision, [])]) + e.comment = scache('\n'.join(e.comment)) + + revn = len(e.revision) + if revn > 3 and (revn % 2) == 0: + e.branch = tags.get(e.revision[:-1], [None])[0] + else: + e.branch = None + + # find the branches starting from this revision + branchpoints = set() + for branch, revision in branchmap.iteritems(): + revparts = tuple([int(i) for i in revision.split('.')]) + if len(revparts) < 2: # bad tags + continue + if revparts[-2] == 0 and revparts[-1] % 2 == 0: + # normal branch + if revparts[:-2] == e.revision: + branchpoints.add(branch) + elif revparts == (1, 1, 1): # vendor branch + if revparts in e.branches: + branchpoints.add(branch) + e.branchpoints = branchpoints + + log.append(e) + + if len(log) % 100 == 0: + ui.status(util.ellipsis('%d %s' % (len(log), e.file), 80)+'\n') + + log.sort(key=lambda x: (x.rcs, x.revision)) + + # find parent revisions of individual files + versions = {} + for e in log: + branch = e.revision[:-1] + p = versions.get((e.rcs, branch), None) + if p is None: + p = e.revision[:-2] + e.parent = p + versions[(e.rcs, branch)] = e.revision + + # update the log cache + if cache: + if log: + # join up the old and new logs + log.sort(key=lambda x: x.date) + + if oldlog and oldlog[-1].date >= log[0].date: + raise logerror(_('log cache overlaps with new log entries,' + ' re-run without cache.')) + + log = oldlog + log + + # write the new cachefile + ui.note(_('writing cvs log cache %s\n') % cachefile) + pickle.dump(log, open(cachefile, 'w')) + else: + log = oldlog + + ui.status(_('%d log entries\n') % len(log)) + + hook.hook(ui, None, "cvslog", True, log=log) + + return log + + +class changeset(object): + '''Class changeset has the following attributes: + .id - integer identifying this changeset (list index) + .author - author name as CVS knows it + .branch - name of branch this changeset is on, or None + .comment - commit message + .date - the commit date as a (time,tz) tuple + .entries - list of logentry objects in this changeset + .parents - list of one or two parent changesets + .tags - list of tags on this changeset + .synthetic - from synthetic revision "file ... added on branch ..." + .mergepoint- the branch that has been merged from + (if present in rlog output) + .branchpoints- the branches that start at the current entry + ''' + def __init__(self, **entries): + self.synthetic = False + self.__dict__.update(entries) + + def __repr__(self): + return "<%s at 0x%x: %s>" % (self.__class__.__name__, + id(self), + getattr(self, 'id', "(no id)")) + +def createchangeset(ui, log, fuzz=60, mergefrom=None, mergeto=None): + '''Convert log into changesets.''' + + ui.status(_('creating changesets\n')) + + # Merge changesets + + log.sort(key=lambda x: (x.comment, x.author, x.branch, x.date)) + + changesets = [] + files = set() + c = None + for i, e in enumerate(log): + + # Check if log entry belongs to the current changeset or not. + + # Since CVS is file centric, two different file revisions with + # different branchpoints should be treated as belonging to two + # different changesets (and the ordering is important and not + # honoured by cvsps at this point). + # + # Consider the following case: + # foo 1.1 branchpoints: [MYBRANCH] + # bar 1.1 branchpoints: [MYBRANCH, MYBRANCH2] + # + # Here foo is part only of MYBRANCH, but not MYBRANCH2, e.g. a + # later version of foo may be in MYBRANCH2, so foo should be the + # first changeset and bar the next and MYBRANCH and MYBRANCH2 + # should both start off of the bar changeset. No provisions are + # made to ensure that this is, in fact, what happens. + if not (c and + e.comment == c.comment and + e.author == c.author and + e.branch == c.branch and + (not hasattr(e, 'branchpoints') or + not hasattr (c, 'branchpoints') or + e.branchpoints == c.branchpoints) and + ((c.date[0] + c.date[1]) <= + (e.date[0] + e.date[1]) <= + (c.date[0] + c.date[1]) + fuzz) and + e.file not in files): + c = changeset(comment=e.comment, author=e.author, + branch=e.branch, date=e.date, entries=[], + mergepoint=getattr(e, 'mergepoint', None), + branchpoints=getattr(e, 'branchpoints', set())) + changesets.append(c) + files = set() + if len(changesets) % 100 == 0: + t = '%d %s' % (len(changesets), repr(e.comment)[1:-1]) + ui.status(util.ellipsis(t, 80) + '\n') + + c.entries.append(e) + files.add(e.file) + c.date = e.date # changeset date is date of latest commit in it + + # Mark synthetic changesets + + for c in changesets: + # Synthetic revisions always get their own changeset, because + # the log message includes the filename. E.g. if you add file3 + # and file4 on a branch, you get four log entries and three + # changesets: + # "File file3 was added on branch ..." (synthetic, 1 entry) + # "File file4 was added on branch ..." (synthetic, 1 entry) + # "Add file3 and file4 to fix ..." (real, 2 entries) + # Hence the check for 1 entry here. + c.synthetic = len(c.entries) == 1 and c.entries[0].synthetic + + # Sort files in each changeset + + for c in changesets: + def pathcompare(l, r): + 'Mimic cvsps sorting order' + l = l.split('/') + r = r.split('/') + nl = len(l) + nr = len(r) + n = min(nl, nr) + for i in range(n): + if i + 1 == nl and nl < nr: + return -1 + elif i + 1 == nr and nl > nr: + return +1 + elif l[i] < r[i]: + return -1 + elif l[i] > r[i]: + return +1 + return 0 + def entitycompare(l, r): + return pathcompare(l.file, r.file) + + c.entries.sort(entitycompare) + + # Sort changesets by date + + def cscmp(l, r): + d = sum(l.date) - sum(r.date) + if d: + return d + + # detect vendor branches and initial commits on a branch + le = {} + for e in l.entries: + le[e.rcs] = e.revision + re = {} + for e in r.entries: + re[e.rcs] = e.revision + + d = 0 + for e in l.entries: + if re.get(e.rcs, None) == e.parent: + assert not d + d = 1 + break + + for e in r.entries: + if le.get(e.rcs, None) == e.parent: + assert not d + d = -1 + break + + return d + + changesets.sort(cscmp) + + # Collect tags + + globaltags = {} + for c in changesets: + for e in c.entries: + for tag in e.tags: + # remember which is the latest changeset to have this tag + globaltags[tag] = c + + for c in changesets: + tags = set() + for e in c.entries: + tags.update(e.tags) + # remember tags only if this is the latest changeset to have it + c.tags = sorted(tag for tag in tags if globaltags[tag] is c) + + # Find parent changesets, handle {{mergetobranch BRANCHNAME}} + # by inserting dummy changesets with two parents, and handle + # {{mergefrombranch BRANCHNAME}} by setting two parents. + + if mergeto is None: + mergeto = r'{{mergetobranch ([-\w]+)}}' + if mergeto: + mergeto = re.compile(mergeto) + + if mergefrom is None: + mergefrom = r'{{mergefrombranch ([-\w]+)}}' + if mergefrom: + mergefrom = re.compile(mergefrom) + + versions = {} # changeset index where we saw any particular file version + branches = {} # changeset index where we saw a branch + n = len(changesets) + i = 0 + while i < n: + c = changesets[i] + + for f in c.entries: + versions[(f.rcs, f.revision)] = i + + p = None + if c.branch in branches: + p = branches[c.branch] + else: + # first changeset on a new branch + # the parent is a changeset with the branch in its + # branchpoints such that it is the latest possible + # commit without any intervening, unrelated commits. + + for candidate in xrange(i): + if c.branch not in changesets[candidate].branchpoints: + if p is not None: + break + continue + p = candidate + + c.parents = [] + if p is not None: + p = changesets[p] + + # Ensure no changeset has a synthetic changeset as a parent. + while p.synthetic: + assert len(p.parents) <= 1, \ + _('synthetic changeset cannot have multiple parents') + if p.parents: + p = p.parents[0] + else: + p = None + break + + if p is not None: + c.parents.append(p) + + if c.mergepoint: + if c.mergepoint == 'HEAD': + c.mergepoint = None + c.parents.append(changesets[branches[c.mergepoint]]) + + if mergefrom: + m = mergefrom.search(c.comment) + if m: + m = m.group(1) + if m == 'HEAD': + m = None + try: + candidate = changesets[branches[m]] + except KeyError: + ui.warn(_("warning: CVS commit message references " + "non-existent branch %r:\n%s\n") + % (m, c.comment)) + if m in branches and c.branch != m and not candidate.synthetic: + c.parents.append(candidate) + + if mergeto: + m = mergeto.search(c.comment) + if m: + try: + m = m.group(1) + if m == 'HEAD': + m = None + except: + m = None # if no group found then merge to HEAD + if m in branches and c.branch != m: + # insert empty changeset for merge + cc = changeset( + author=c.author, branch=m, date=c.date, + comment='convert-repo: CVS merge from branch %s' + % c.branch, + entries=[], tags=[], + parents=[changesets[branches[m]], c]) + changesets.insert(i + 1, cc) + branches[m] = i + 1 + + # adjust our loop counters now we have inserted a new entry + n += 1 + i += 2 + continue + + branches[c.branch] = i + i += 1 + + # Drop synthetic changesets (safe now that we have ensured no other + # changesets can have them as parents). + i = 0 + while i < len(changesets): + if changesets[i].synthetic: + del changesets[i] + else: + i += 1 + + # Number changesets + + for i, c in enumerate(changesets): + c.id = i + 1 + + ui.status(_('%d changeset entries\n') % len(changesets)) + + hook.hook(ui, None, "cvschangesets", True, changesets=changesets) + + return changesets + + +def debugcvsps(ui, *args, **opts): + '''Read CVS rlog for current directory or named path in + repository, and convert the log to changesets based on matching + commit log entries and dates. + ''' + if opts["new_cache"]: + cache = "write" + elif opts["update_cache"]: + cache = "update" + else: + cache = None + + revisions = opts["revisions"] + + try: + if args: + log = [] + for d in args: + log += createlog(ui, d, root=opts["root"], cache=cache) + else: + log = createlog(ui, root=opts["root"], cache=cache) + except logerror, e: + ui.write("%r\n"%e) + return + + changesets = createchangeset(ui, log, opts["fuzz"]) + del log + + # Print changesets (optionally filtered) + + off = len(revisions) + branches = {} # latest version number in each branch + ancestors = {} # parent branch + for cs in changesets: + + if opts["ancestors"]: + if cs.branch not in branches and cs.parents and cs.parents[0].id: + ancestors[cs.branch] = (changesets[cs.parents[0].id - 1].branch, + cs.parents[0].id) + branches[cs.branch] = cs.id + + # limit by branches + if opts["branches"] and (cs.branch or 'HEAD') not in opts["branches"]: + continue + + if not off: + # Note: trailing spaces on several lines here are needed to have + # bug-for-bug compatibility with cvsps. + ui.write('---------------------\n') + ui.write('PatchSet %d \n' % cs.id) + ui.write('Date: %s\n' % util.datestr(cs.date, + '%Y/%m/%d %H:%M:%S %1%2')) + ui.write('Author: %s\n' % cs.author) + ui.write('Branch: %s\n' % (cs.branch or 'HEAD')) + ui.write('Tag%s: %s \n' % (['', 's'][len(cs.tags) > 1], + ','.join(cs.tags) or '(none)')) + branchpoints = getattr(cs, 'branchpoints', None) + if branchpoints: + ui.write('Branchpoints: %s \n' % ', '.join(branchpoints)) + if opts["parents"] and cs.parents: + if len(cs.parents) > 1: + ui.write('Parents: %s\n' % + (','.join([str(p.id) for p in cs.parents]))) + else: + ui.write('Parent: %d\n' % cs.parents[0].id) + + if opts["ancestors"]: + b = cs.branch + r = [] + while b: + b, c = ancestors[b] + r.append('%s:%d:%d' % (b or "HEAD", c, branches[b])) + if r: + ui.write('Ancestors: %s\n' % (','.join(r))) + + ui.write('Log:\n') + ui.write('%s\n\n' % cs.comment) + ui.write('Members: \n') + for f in cs.entries: + fn = f.file + if fn.startswith(opts["prefix"]): + fn = fn[len(opts["prefix"]):] + ui.write('\t%s:%s->%s%s \n' % ( + fn, '.'.join([str(x) for x in f.parent]) or 'INITIAL', + '.'.join([str(x) for x in f.revision]), + ['', '(DEAD)'][f.dead])) + ui.write('\n') + + # have we seen the start tag? + if revisions and off: + if revisions[0] == str(cs.id) or \ + revisions[0] in cs.tags: + off = False + + # see if we reached the end tag + if len(revisions) > 1 and not off: + if revisions[1] == str(cs.id) or \ + revisions[1] in cs.tags: + break diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/cvsps.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/cvsps.pyo Binary files differnew file mode 100644 index 0000000..fdf6d44 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/cvsps.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/darcs.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/darcs.py new file mode 100644 index 0000000..9863eb8 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/darcs.py @@ -0,0 +1,200 @@ +# darcs.py - darcs support for the convert extension +# +# Copyright 2007-2009 Matt Mackall <mpm@selenic.com> and others +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +from common import NoRepo, checktool, commandline, commit, converter_source +from mercurial.i18n import _ +from mercurial import encoding, util +import os, shutil, tempfile, re + +# The naming drift of ElementTree is fun! + +try: + from xml.etree.cElementTree import ElementTree, XMLParser +except ImportError: + try: + from xml.etree.ElementTree import ElementTree, XMLParser + except ImportError: + try: + from elementtree.cElementTree import ElementTree, XMLParser + except ImportError: + try: + from elementtree.ElementTree import ElementTree, XMLParser + except ImportError: + ElementTree = None + +class darcs_source(converter_source, commandline): + def __init__(self, ui, path, rev=None): + converter_source.__init__(self, ui, path, rev=rev) + commandline.__init__(self, ui, 'darcs') + + # check for _darcs, ElementTree so that we can easily skip + # test-convert-darcs if ElementTree is not around + if not os.path.exists(os.path.join(path, '_darcs')): + raise NoRepo(_("%s does not look like a darcs repository") % path) + + checktool('darcs') + version = self.run0('--version').splitlines()[0].strip() + if version < '2.1': + raise util.Abort(_('darcs version 2.1 or newer needed (found %r)') % + version) + + if ElementTree is None: + raise util.Abort(_("Python ElementTree module is not available")) + + self.path = os.path.realpath(path) + + self.lastrev = None + self.changes = {} + self.parents = {} + self.tags = {} + + # Check darcs repository format + format = self.format() + if format: + if format in ('darcs-1.0', 'hashed'): + raise NoRepo(_("%s repository format is unsupported, " + "please upgrade") % format) + else: + self.ui.warn(_('failed to detect repository format!')) + + def before(self): + self.tmppath = tempfile.mkdtemp( + prefix='convert-' + os.path.basename(self.path) + '-') + output, status = self.run('init', repodir=self.tmppath) + self.checkexit(status) + + tree = self.xml('changes', xml_output=True, summary=True, + repodir=self.path) + tagname = None + child = None + for elt in tree.findall('patch'): + node = elt.get('hash') + name = elt.findtext('name', '') + if name.startswith('TAG '): + tagname = name[4:].strip() + elif tagname is not None: + self.tags[tagname] = node + tagname = None + self.changes[node] = elt + self.parents[child] = [node] + child = node + self.parents[child] = [] + + def after(self): + self.ui.debug('cleaning up %s\n' % self.tmppath) + shutil.rmtree(self.tmppath, ignore_errors=True) + + def recode(self, s, encoding=None): + if isinstance(s, unicode): + # XMLParser returns unicode objects for anything it can't + # encode into ASCII. We convert them back to str to get + # recode's normal conversion behavior. + s = s.encode('latin-1') + return super(darcs_source, self).recode(s, encoding) + + def xml(self, cmd, **kwargs): + # NOTE: darcs is currently encoding agnostic and will print + # patch metadata byte-for-byte, even in the XML changelog. + etree = ElementTree() + # While we are decoding the XML as latin-1 to be as liberal as + # possible, etree will still raise an exception if any + # non-printable characters are in the XML changelog. + parser = XMLParser(encoding='latin-1') + fp = self._run(cmd, **kwargs) + etree.parse(fp, parser=parser) + self.checkexit(fp.close()) + return etree.getroot() + + def format(self): + output, status = self.run('show', 'repo', no_files=True, + repodir=self.path) + self.checkexit(status) + m = re.search(r'^\s*Format:\s*(.*)$', output, re.MULTILINE) + if not m: + return None + return ','.join(sorted(f.strip() for f in m.group(1).split(','))) + + def manifest(self): + man = [] + output, status = self.run('show', 'files', no_directories=True, + repodir=self.tmppath) + self.checkexit(status) + for line in output.split('\n'): + path = line[2:] + if path: + man.append(path) + return man + + def getheads(self): + return self.parents[None] + + def getcommit(self, rev): + elt = self.changes[rev] + date = util.strdate(elt.get('local_date'), '%a %b %d %H:%M:%S %Z %Y') + desc = elt.findtext('name') + '\n' + elt.findtext('comment', '') + # etree can return unicode objects for name, comment, and author, + # so recode() is used to ensure str objects are emitted. + return commit(author=self.recode(elt.get('author')), + date=util.datestr(date), + desc=self.recode(desc).strip(), + parents=self.parents[rev]) + + def pull(self, rev): + output, status = self.run('pull', self.path, all=True, + match='hash %s' % rev, + no_test=True, no_posthook=True, + external_merge='/bin/false', + repodir=self.tmppath) + if status: + if output.find('We have conflicts in') == -1: + self.checkexit(status, output) + output, status = self.run('revert', all=True, repodir=self.tmppath) + self.checkexit(status, output) + + def getchanges(self, rev): + copies = {} + changes = [] + man = None + for elt in self.changes[rev].find('summary').getchildren(): + if elt.tag in ('add_directory', 'remove_directory'): + continue + if elt.tag == 'move': + if man is None: + man = self.manifest() + source, dest = elt.get('from'), elt.get('to') + if source in man: + # File move + changes.append((source, rev)) + changes.append((dest, rev)) + copies[dest] = source + else: + # Directory move, deduce file moves from manifest + source = source + '/' + for f in man: + if not f.startswith(source): + continue + fdest = dest + '/' + f[len(source):] + changes.append((f, rev)) + changes.append((fdest, rev)) + copies[fdest] = f + else: + changes.append((elt.text.strip(), rev)) + self.pull(rev) + self.lastrev = rev + return sorted(changes), copies + + def getfile(self, name, rev): + if rev != self.lastrev: + raise util.Abort(_('internal calling inconsistency')) + path = os.path.join(self.tmppath, name) + data = open(path, 'rb').read() + mode = os.lstat(path).st_mode + mode = (mode & 0111) and 'x' or '' + return data, mode + + def gettags(self): + return self.tags diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/darcs.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/darcs.pyo Binary files differnew file mode 100644 index 0000000..78b7568 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/darcs.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/filemap.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/filemap.py new file mode 100644 index 0000000..1064642 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/filemap.py @@ -0,0 +1,365 @@ +# Copyright 2007 Bryan O'Sullivan <bos@serpentine.com> +# Copyright 2007 Alexis S. L. Carvalho <alexis@cecm.usp.br> +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +import shlex +from mercurial.i18n import _ +from mercurial import util +from common import SKIPREV, converter_source + +def rpairs(name): + e = len(name) + while e != -1: + yield name[:e], name[e + 1:] + e = name.rfind('/', 0, e) + yield '.', name + +class filemapper(object): + '''Map and filter filenames when importing. + A name can be mapped to itself, a new name, or None (omit from new + repository).''' + + def __init__(self, ui, path=None): + self.ui = ui + self.include = {} + self.exclude = {} + self.rename = {} + if path: + if self.parse(path): + raise util.Abort(_('errors in filemap')) + + def parse(self, path): + errs = 0 + def check(name, mapping, listname): + if not name: + self.ui.warn(_('%s:%d: path to %s is missing\n') % + (lex.infile, lex.lineno, listname)) + return 1 + if name in mapping: + self.ui.warn(_('%s:%d: %r already in %s list\n') % + (lex.infile, lex.lineno, name, listname)) + return 1 + if (name.startswith('/') or + name.endswith('/') or + '//' in name): + self.ui.warn(_('%s:%d: superfluous / in %s %r\n') % + (lex.infile, lex.lineno, listname, name)) + return 1 + return 0 + lex = shlex.shlex(open(path), path, True) + lex.wordchars += '!@#$%^&*()-=+[]{}|;:,./<>?' + cmd = lex.get_token() + while cmd: + if cmd == 'include': + name = lex.get_token() + errs += check(name, self.exclude, 'exclude') + self.include[name] = name + elif cmd == 'exclude': + name = lex.get_token() + errs += check(name, self.include, 'include') + errs += check(name, self.rename, 'rename') + self.exclude[name] = name + elif cmd == 'rename': + src = lex.get_token() + dest = lex.get_token() + errs += check(src, self.exclude, 'exclude') + self.rename[src] = dest + elif cmd == 'source': + errs += self.parse(lex.get_token()) + else: + self.ui.warn(_('%s:%d: unknown directive %r\n') % + (lex.infile, lex.lineno, cmd)) + errs += 1 + cmd = lex.get_token() + return errs + + def lookup(self, name, mapping): + for pre, suf in rpairs(name): + try: + return mapping[pre], pre, suf + except KeyError: + pass + return '', name, '' + + def __call__(self, name): + if self.include: + inc = self.lookup(name, self.include)[0] + else: + inc = name + if self.exclude: + exc = self.lookup(name, self.exclude)[0] + else: + exc = '' + if (not self.include and exc) or (len(inc) <= len(exc)): + return None + newpre, pre, suf = self.lookup(name, self.rename) + if newpre: + if newpre == '.': + return suf + if suf: + return newpre + '/' + suf + return newpre + return name + + def active(self): + return bool(self.include or self.exclude or self.rename) + +# This class does two additional things compared to a regular source: +# +# - Filter and rename files. This is mostly wrapped by the filemapper +# class above. We hide the original filename in the revision that is +# returned by getchanges to be able to find things later in getfile. +# +# - Return only revisions that matter for the files we're interested in. +# This involves rewriting the parents of the original revision to +# create a graph that is restricted to those revisions. +# +# This set of revisions includes not only revisions that directly +# touch files we're interested in, but also merges that merge two +# or more interesting revisions. + +class filemap_source(converter_source): + def __init__(self, ui, baseconverter, filemap): + super(filemap_source, self).__init__(ui) + self.base = baseconverter + self.filemapper = filemapper(ui, filemap) + self.commits = {} + # if a revision rev has parent p in the original revision graph, then + # rev will have parent self.parentmap[p] in the restricted graph. + self.parentmap = {} + # self.wantedancestors[rev] is the set of all ancestors of rev that + # are in the restricted graph. + self.wantedancestors = {} + self.convertedorder = None + self._rebuilt = False + self.origparents = {} + self.children = {} + self.seenchildren = {} + + def before(self): + self.base.before() + + def after(self): + self.base.after() + + def setrevmap(self, revmap): + # rebuild our state to make things restartable + # + # To avoid calling getcommit for every revision that has already + # been converted, we rebuild only the parentmap, delaying the + # rebuild of wantedancestors until we need it (i.e. until a + # merge). + # + # We assume the order argument lists the revisions in + # topological order, so that we can infer which revisions were + # wanted by previous runs. + self._rebuilt = not revmap + seen = {SKIPREV: SKIPREV} + dummyset = set() + converted = [] + for rev in revmap.order: + mapped = revmap[rev] + wanted = mapped not in seen + if wanted: + seen[mapped] = rev + self.parentmap[rev] = rev + else: + self.parentmap[rev] = seen[mapped] + self.wantedancestors[rev] = dummyset + arg = seen[mapped] + if arg == SKIPREV: + arg = None + converted.append((rev, wanted, arg)) + self.convertedorder = converted + return self.base.setrevmap(revmap) + + def rebuild(self): + if self._rebuilt: + return True + self._rebuilt = True + self.parentmap.clear() + self.wantedancestors.clear() + self.seenchildren.clear() + for rev, wanted, arg in self.convertedorder: + if rev not in self.origparents: + self.origparents[rev] = self.getcommit(rev).parents + if arg is not None: + self.children[arg] = self.children.get(arg, 0) + 1 + + for rev, wanted, arg in self.convertedorder: + parents = self.origparents[rev] + if wanted: + self.mark_wanted(rev, parents) + else: + self.mark_not_wanted(rev, arg) + self._discard(arg, *parents) + + return True + + def getheads(self): + return self.base.getheads() + + def getcommit(self, rev): + # We want to save a reference to the commit objects to be able + # to rewrite their parents later on. + c = self.commits[rev] = self.base.getcommit(rev) + for p in c.parents: + self.children[p] = self.children.get(p, 0) + 1 + return c + + def _discard(self, *revs): + for r in revs: + if r is None: + continue + self.seenchildren[r] = self.seenchildren.get(r, 0) + 1 + if self.seenchildren[r] == self.children[r]: + del self.wantedancestors[r] + del self.parentmap[r] + del self.seenchildren[r] + if self._rebuilt: + del self.children[r] + + def wanted(self, rev, i): + # Return True if we're directly interested in rev. + # + # i is an index selecting one of the parents of rev (if rev + # has no parents, i is None). getchangedfiles will give us + # the list of files that are different in rev and in the parent + # indicated by i. If we're interested in any of these files, + # we're interested in rev. + try: + files = self.base.getchangedfiles(rev, i) + except NotImplementedError: + raise util.Abort(_("source repository doesn't support --filemap")) + for f in files: + if self.filemapper(f): + return True + return False + + def mark_not_wanted(self, rev, p): + # Mark rev as not interesting and update data structures. + + if p is None: + # A root revision. Use SKIPREV to indicate that it doesn't + # map to any revision in the restricted graph. Put SKIPREV + # in the set of wanted ancestors to simplify code elsewhere + self.parentmap[rev] = SKIPREV + self.wantedancestors[rev] = set((SKIPREV,)) + return + + # Reuse the data from our parent. + self.parentmap[rev] = self.parentmap[p] + self.wantedancestors[rev] = self.wantedancestors[p] + + def mark_wanted(self, rev, parents): + # Mark rev ss wanted and update data structures. + + # rev will be in the restricted graph, so children of rev in + # the original graph should still have rev as a parent in the + # restricted graph. + self.parentmap[rev] = rev + + # The set of wanted ancestors of rev is the union of the sets + # of wanted ancestors of its parents. Plus rev itself. + wrev = set() + for p in parents: + wrev.update(self.wantedancestors[p]) + wrev.add(rev) + self.wantedancestors[rev] = wrev + + def getchanges(self, rev): + parents = self.commits[rev].parents + if len(parents) > 1: + self.rebuild() + + # To decide whether we're interested in rev we: + # + # - calculate what parents rev will have if it turns out we're + # interested in it. If it's going to have more than 1 parent, + # we're interested in it. + # + # - otherwise, we'll compare it with the single parent we found. + # If any of the files we're interested in is different in the + # the two revisions, we're interested in rev. + + # A parent p is interesting if its mapped version (self.parentmap[p]): + # - is not SKIPREV + # - is still not in the list of parents (we don't want duplicates) + # - is not an ancestor of the mapped versions of the other parents + mparents = [] + wp = None + for i, p1 in enumerate(parents): + mp1 = self.parentmap[p1] + if mp1 == SKIPREV or mp1 in mparents: + continue + for p2 in parents: + if p1 == p2 or mp1 == self.parentmap[p2]: + continue + if mp1 in self.wantedancestors[p2]: + break + else: + mparents.append(mp1) + wp = i + + if wp is None and parents: + wp = 0 + + self.origparents[rev] = parents + + closed = 'close' in self.commits[rev].extra + + if len(mparents) < 2 and not closed and not self.wanted(rev, wp): + # We don't want this revision. + # Update our state and tell the convert process to map this + # revision to the same revision its parent as mapped to. + p = None + if parents: + p = parents[wp] + self.mark_not_wanted(rev, p) + self.convertedorder.append((rev, False, p)) + self._discard(*parents) + return self.parentmap[rev] + + # We want this revision. + # Rewrite the parents of the commit object + self.commits[rev].parents = mparents + self.mark_wanted(rev, parents) + self.convertedorder.append((rev, True, None)) + self._discard(*parents) + + # Get the real changes and do the filtering/mapping. To be + # able to get the files later on in getfile, we hide the + # original filename in the rev part of the return value. + changes, copies = self.base.getchanges(rev) + newnames = {} + files = [] + for f, r in changes: + newf = self.filemapper(f) + if newf: + files.append((newf, (f, r))) + newnames[f] = newf + + ncopies = {} + for c in copies: + newc = self.filemapper(c) + if newc: + newsource = self.filemapper(copies[c]) + if newsource: + ncopies[newc] = newsource + + return files, ncopies + + def getfile(self, name, rev): + realname, realrev = rev + return self.base.getfile(realname, realrev) + + def gettags(self): + return self.base.gettags() + + def hasnativeorder(self): + return self.base.hasnativeorder() + + def lookuprev(self, rev): + return self.base.lookuprev(rev) diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/filemap.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/filemap.pyo Binary files differnew file mode 100644 index 0000000..2ece523 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/filemap.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/git.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/git.py new file mode 100644 index 0000000..e973031 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/git.py @@ -0,0 +1,170 @@ +# git.py - git support for the convert extension +# +# Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +import os +from mercurial import util +from mercurial.node import hex, nullid +from mercurial.i18n import _ + +from common import NoRepo, commit, converter_source, checktool + +class convert_git(converter_source): + # Windows does not support GIT_DIR= construct while other systems + # cannot remove environment variable. Just assume none have + # both issues. + if hasattr(os, 'unsetenv'): + def gitopen(self, s): + prevgitdir = os.environ.get('GIT_DIR') + os.environ['GIT_DIR'] = self.path + try: + return util.popen(s, 'rb') + finally: + if prevgitdir is None: + del os.environ['GIT_DIR'] + else: + os.environ['GIT_DIR'] = prevgitdir + else: + def gitopen(self, s): + return util.popen('GIT_DIR=%s %s' % (self.path, s), 'rb') + + def gitread(self, s): + fh = self.gitopen(s) + data = fh.read() + return data, fh.close() + + def __init__(self, ui, path, rev=None): + super(convert_git, self).__init__(ui, path, rev=rev) + + if os.path.isdir(path + "/.git"): + path += "/.git" + if not os.path.exists(path + "/objects"): + raise NoRepo(_("%s does not look like a Git repository") % path) + + checktool('git', 'git') + + self.path = path + + def getheads(self): + if not self.rev: + heads, ret = self.gitread('git rev-parse --branches --remotes') + heads = heads.splitlines() + else: + heads, ret = self.gitread("git rev-parse --verify %s" % self.rev) + heads = [heads[:-1]] + if ret: + raise util.Abort(_('cannot retrieve git heads')) + return heads + + def catfile(self, rev, type): + if rev == hex(nullid): + raise IOError() + data, ret = self.gitread("git cat-file %s %s" % (type, rev)) + if ret: + raise util.Abort(_('cannot read %r object at %s') % (type, rev)) + return data + + def getfile(self, name, rev): + data = self.catfile(rev, "blob") + mode = self.modecache[(name, rev)] + return data, mode + + def getchanges(self, version): + self.modecache = {} + fh = self.gitopen("git diff-tree -z --root -m -r %s" % version) + changes = [] + seen = set() + entry = None + for l in fh.read().split('\x00'): + if not entry: + if not l.startswith(':'): + continue + entry = l + continue + f = l + if f not in seen: + seen.add(f) + entry = entry.split() + h = entry[3] + p = (entry[1] == "100755") + s = (entry[1] == "120000") + self.modecache[(f, h)] = (p and "x") or (s and "l") or "" + changes.append((f, h)) + entry = None + if fh.close(): + raise util.Abort(_('cannot read changes in %s') % version) + return (changes, {}) + + def getcommit(self, version): + c = self.catfile(version, "commit") # read the commit hash + end = c.find("\n\n") + message = c[end + 2:] + message = self.recode(message) + l = c[:end].splitlines() + parents = [] + author = committer = None + for e in l[1:]: + n, v = e.split(" ", 1) + if n == "author": + p = v.split() + tm, tz = p[-2:] + author = " ".join(p[:-2]) + if author[0] == "<": author = author[1:-1] + author = self.recode(author) + if n == "committer": + p = v.split() + tm, tz = p[-2:] + committer = " ".join(p[:-2]) + if committer[0] == "<": committer = committer[1:-1] + committer = self.recode(committer) + if n == "parent": + parents.append(v) + + if committer and committer != author: + message += "\ncommitter: %s\n" % committer + tzs, tzh, tzm = tz[-5:-4] + "1", tz[-4:-2], tz[-2:] + tz = -int(tzs) * (int(tzh) * 3600 + int(tzm)) + date = tm + " " + str(tz) + + c = commit(parents=parents, date=date, author=author, desc=message, + rev=version) + return c + + def gettags(self): + tags = {} + fh = self.gitopen('git ls-remote --tags "%s"' % self.path) + prefix = 'refs/tags/' + for line in fh: + line = line.strip() + if not line.endswith("^{}"): + continue + node, tag = line.split(None, 1) + if not tag.startswith(prefix): + continue + tag = tag[len(prefix):-3] + tags[tag] = node + if fh.close(): + raise util.Abort(_('cannot read tags from %s') % self.path) + + return tags + + def getchangedfiles(self, version, i): + changes = [] + if i is None: + fh = self.gitopen("git diff-tree --root -m -r %s" % version) + for l in fh: + if "\t" not in l: + continue + m, f = l[:-1].split("\t") + changes.append(f) + else: + fh = self.gitopen('git diff-tree --name-only --root -r %s "%s^%s" --' + % (version, version, i + 1)) + changes = [f.rstrip('\n') for f in fh] + if fh.close(): + raise util.Abort(_('cannot read changes in %s') % version) + + return changes diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/git.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/git.pyo Binary files differnew file mode 100644 index 0000000..3166318 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/git.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/gnuarch.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/gnuarch.py new file mode 100644 index 0000000..60cfede --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/gnuarch.py @@ -0,0 +1,338 @@ +# gnuarch.py - GNU Arch support for the convert extension +# +# Copyright 2008, 2009 Aleix Conchillo Flaque <aleix@member.fsf.org> +# and others +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +from common import NoRepo, commandline, commit, converter_source +from mercurial.i18n import _ +from mercurial import encoding, util +import os, shutil, tempfile, stat +from email.Parser import Parser + +class gnuarch_source(converter_source, commandline): + + class gnuarch_rev(object): + def __init__(self, rev): + self.rev = rev + self.summary = '' + self.date = None + self.author = '' + self.continuationof = None + self.add_files = [] + self.mod_files = [] + self.del_files = [] + self.ren_files = {} + self.ren_dirs = {} + + def __init__(self, ui, path, rev=None): + super(gnuarch_source, self).__init__(ui, path, rev=rev) + + if not os.path.exists(os.path.join(path, '{arch}')): + raise NoRepo(_("%s does not look like a GNU Arch repository") + % path) + + # Could use checktool, but we want to check for baz or tla. + self.execmd = None + if util.find_exe('baz'): + self.execmd = 'baz' + else: + if util.find_exe('tla'): + self.execmd = 'tla' + else: + raise util.Abort(_('cannot find a GNU Arch tool')) + + commandline.__init__(self, ui, self.execmd) + + self.path = os.path.realpath(path) + self.tmppath = None + + self.treeversion = None + self.lastrev = None + self.changes = {} + self.parents = {} + self.tags = {} + self.catlogparser = Parser() + self.encoding = encoding.encoding + self.archives = [] + + def before(self): + # Get registered archives + self.archives = [i.rstrip('\n') + for i in self.runlines0('archives', '-n')] + + if self.execmd == 'tla': + output = self.run0('tree-version', self.path) + else: + output = self.run0('tree-version', '-d', self.path) + self.treeversion = output.strip() + + # Get name of temporary directory + version = self.treeversion.split('/') + self.tmppath = os.path.join(tempfile.gettempdir(), + 'hg-%s' % version[1]) + + # Generate parents dictionary + self.parents[None] = [] + treeversion = self.treeversion + child = None + while treeversion: + self.ui.status(_('analyzing tree version %s...\n') % treeversion) + + archive = treeversion.split('/')[0] + if archive not in self.archives: + self.ui.status(_('tree analysis stopped because it points to ' + 'an unregistered archive %s...\n') % archive) + break + + # Get the complete list of revisions for that tree version + output, status = self.runlines('revisions', '-r', '-f', treeversion) + self.checkexit(status, 'failed retrieveing revisions for %s' + % treeversion) + + # No new iteration unless a revision has a continuation-of header + treeversion = None + + for l in output: + rev = l.strip() + self.changes[rev] = self.gnuarch_rev(rev) + self.parents[rev] = [] + + # Read author, date and summary + catlog, status = self.run('cat-log', '-d', self.path, rev) + if status: + catlog = self.run0('cat-archive-log', rev) + self._parsecatlog(catlog, rev) + + # Populate the parents map + self.parents[child].append(rev) + + # Keep track of the current revision as the child of the next + # revision scanned + child = rev + + # Check if we have to follow the usual incremental history + # or if we have to 'jump' to a different treeversion given + # by the continuation-of header. + if self.changes[rev].continuationof: + treeversion = '--'.join( + self.changes[rev].continuationof.split('--')[:-1]) + break + + # If we reached a base-0 revision w/o any continuation-of + # header, it means the tree history ends here. + if rev[-6:] == 'base-0': + break + + def after(self): + self.ui.debug('cleaning up %s\n' % self.tmppath) + shutil.rmtree(self.tmppath, ignore_errors=True) + + def getheads(self): + return self.parents[None] + + def getfile(self, name, rev): + if rev != self.lastrev: + raise util.Abort(_('internal calling inconsistency')) + + # Raise IOError if necessary (i.e. deleted files). + if not os.path.lexists(os.path.join(self.tmppath, name)): + raise IOError + + return self._getfile(name, rev) + + def getchanges(self, rev): + self._update(rev) + changes = [] + copies = {} + + for f in self.changes[rev].add_files: + changes.append((f, rev)) + + for f in self.changes[rev].mod_files: + changes.append((f, rev)) + + for f in self.changes[rev].del_files: + changes.append((f, rev)) + + for src in self.changes[rev].ren_files: + to = self.changes[rev].ren_files[src] + changes.append((src, rev)) + changes.append((to, rev)) + copies[to] = src + + for src in self.changes[rev].ren_dirs: + to = self.changes[rev].ren_dirs[src] + chgs, cps = self._rendirchanges(src, to) + changes += [(f, rev) for f in chgs] + copies.update(cps) + + self.lastrev = rev + return sorted(set(changes)), copies + + def getcommit(self, rev): + changes = self.changes[rev] + return commit(author=changes.author, date=changes.date, + desc=changes.summary, parents=self.parents[rev], rev=rev) + + def gettags(self): + return self.tags + + def _execute(self, cmd, *args, **kwargs): + cmdline = [self.execmd, cmd] + cmdline += args + cmdline = [util.shellquote(arg) for arg in cmdline] + cmdline += ['>', util.nulldev, '2>', util.nulldev] + cmdline = util.quotecommand(' '.join(cmdline)) + self.ui.debug(cmdline, '\n') + return os.system(cmdline) + + def _update(self, rev): + self.ui.debug('applying revision %s...\n' % rev) + changeset, status = self.runlines('replay', '-d', self.tmppath, + rev) + if status: + # Something went wrong while merging (baz or tla + # issue?), get latest revision and try from there + shutil.rmtree(self.tmppath, ignore_errors=True) + self._obtainrevision(rev) + else: + old_rev = self.parents[rev][0] + self.ui.debug('computing changeset between %s and %s...\n' + % (old_rev, rev)) + self._parsechangeset(changeset, rev) + + def _getfile(self, name, rev): + mode = os.lstat(os.path.join(self.tmppath, name)).st_mode + if stat.S_ISLNK(mode): + data = os.readlink(os.path.join(self.tmppath, name)) + mode = mode and 'l' or '' + else: + data = open(os.path.join(self.tmppath, name), 'rb').read() + mode = (mode & 0111) and 'x' or '' + return data, mode + + def _exclude(self, name): + exclude = ['{arch}', '.arch-ids', '.arch-inventory'] + for exc in exclude: + if name.find(exc) != -1: + return True + return False + + def _readcontents(self, path): + files = [] + contents = os.listdir(path) + while len(contents) > 0: + c = contents.pop() + p = os.path.join(path, c) + # os.walk could be used, but here we avoid internal GNU + # Arch files and directories, thus saving a lot time. + if not self._exclude(p): + if os.path.isdir(p): + contents += [os.path.join(c, f) for f in os.listdir(p)] + else: + files.append(c) + return files + + def _rendirchanges(self, src, dest): + changes = [] + copies = {} + files = self._readcontents(os.path.join(self.tmppath, dest)) + for f in files: + s = os.path.join(src, f) + d = os.path.join(dest, f) + changes.append(s) + changes.append(d) + copies[d] = s + return changes, copies + + def _obtainrevision(self, rev): + self.ui.debug('obtaining revision %s...\n' % rev) + output = self._execute('get', rev, self.tmppath) + self.checkexit(output) + self.ui.debug('analyzing revision %s...\n' % rev) + files = self._readcontents(self.tmppath) + self.changes[rev].add_files += files + + def _stripbasepath(self, path): + if path.startswith('./'): + return path[2:] + return path + + def _parsecatlog(self, data, rev): + try: + catlog = self.catlogparser.parsestr(data) + + # Commit date + self.changes[rev].date = util.datestr( + util.strdate(catlog['Standard-date'], + '%Y-%m-%d %H:%M:%S')) + + # Commit author + self.changes[rev].author = self.recode(catlog['Creator']) + + # Commit description + self.changes[rev].summary = '\n\n'.join((catlog['Summary'], + catlog.get_payload())) + self.changes[rev].summary = self.recode(self.changes[rev].summary) + + # Commit revision origin when dealing with a branch or tag + if 'Continuation-of' in catlog: + self.changes[rev].continuationof = self.recode( + catlog['Continuation-of']) + except Exception: + raise util.Abort(_('could not parse cat-log of %s') % rev) + + def _parsechangeset(self, data, rev): + for l in data: + l = l.strip() + # Added file (ignore added directory) + if l.startswith('A') and not l.startswith('A/'): + file = self._stripbasepath(l[1:].strip()) + if not self._exclude(file): + self.changes[rev].add_files.append(file) + # Deleted file (ignore deleted directory) + elif l.startswith('D') and not l.startswith('D/'): + file = self._stripbasepath(l[1:].strip()) + if not self._exclude(file): + self.changes[rev].del_files.append(file) + # Modified binary file + elif l.startswith('Mb'): + file = self._stripbasepath(l[2:].strip()) + if not self._exclude(file): + self.changes[rev].mod_files.append(file) + # Modified link + elif l.startswith('M->'): + file = self._stripbasepath(l[3:].strip()) + if not self._exclude(file): + self.changes[rev].mod_files.append(file) + # Modified file + elif l.startswith('M'): + file = self._stripbasepath(l[1:].strip()) + if not self._exclude(file): + self.changes[rev].mod_files.append(file) + # Renamed file (or link) + elif l.startswith('=>'): + files = l[2:].strip().split(' ') + if len(files) == 1: + files = l[2:].strip().split('\t') + src = self._stripbasepath(files[0]) + dst = self._stripbasepath(files[1]) + if not self._exclude(src) and not self._exclude(dst): + self.changes[rev].ren_files[src] = dst + # Conversion from file to link or from link to file (modified) + elif l.startswith('ch'): + file = self._stripbasepath(l[2:].strip()) + if not self._exclude(file): + self.changes[rev].mod_files.append(file) + # Renamed directory + elif l.startswith('/>'): + dirs = l[2:].strip().split(' ') + if len(dirs) == 1: + dirs = l[2:].strip().split('\t') + src = self._stripbasepath(dirs[0]) + dst = self._stripbasepath(dirs[1]) + if not self._exclude(src) and not self._exclude(dst): + self.changes[rev].ren_dirs[src] = dst diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/gnuarch.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/gnuarch.pyo Binary files differnew file mode 100644 index 0000000..0e564ba --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/gnuarch.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/hg.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/hg.py new file mode 100644 index 0000000..183377d --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/hg.py @@ -0,0 +1,376 @@ +# hg.py - hg backend for convert extension +# +# Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +# Notes for hg->hg conversion: +# +# * Old versions of Mercurial didn't trim the whitespace from the ends +# of commit messages, but new versions do. Changesets created by +# those older versions, then converted, may thus have different +# hashes for changesets that are otherwise identical. +# +# * Using "--config convert.hg.saverev=true" will make the source +# identifier to be stored in the converted revision. This will cause +# the converted revision to have a different identity than the +# source. + + +import os, time, cStringIO +from mercurial.i18n import _ +from mercurial.node import bin, hex, nullid +from mercurial import hg, util, context, error + +from common import NoRepo, commit, converter_source, converter_sink + +class mercurial_sink(converter_sink): + def __init__(self, ui, path): + converter_sink.__init__(self, ui, path) + self.branchnames = ui.configbool('convert', 'hg.usebranchnames', True) + self.clonebranches = ui.configbool('convert', 'hg.clonebranches', False) + self.tagsbranch = ui.config('convert', 'hg.tagsbranch', 'default') + self.lastbranch = None + if os.path.isdir(path) and len(os.listdir(path)) > 0: + try: + self.repo = hg.repository(self.ui, path) + if not self.repo.local(): + raise NoRepo(_('%s is not a local Mercurial repository') + % path) + except error.RepoError, err: + ui.traceback() + raise NoRepo(err.args[0]) + else: + try: + ui.status(_('initializing destination %s repository\n') % path) + self.repo = hg.repository(self.ui, path, create=True) + if not self.repo.local(): + raise NoRepo(_('%s is not a local Mercurial repository') + % path) + self.created.append(path) + except error.RepoError: + ui.traceback() + raise NoRepo(_("could not create hg repository %s as sink") + % path) + self.lock = None + self.wlock = None + self.filemapmode = False + + def before(self): + self.ui.debug('run hg sink pre-conversion action\n') + self.wlock = self.repo.wlock() + self.lock = self.repo.lock() + + def after(self): + self.ui.debug('run hg sink post-conversion action\n') + if self.lock: + self.lock.release() + if self.wlock: + self.wlock.release() + + def revmapfile(self): + return os.path.join(self.path, ".hg", "shamap") + + def authorfile(self): + return os.path.join(self.path, ".hg", "authormap") + + def getheads(self): + h = self.repo.changelog.heads() + return [hex(x) for x in h] + + def setbranch(self, branch, pbranches): + if not self.clonebranches: + return + + setbranch = (branch != self.lastbranch) + self.lastbranch = branch + if not branch: + branch = 'default' + pbranches = [(b[0], b[1] and b[1] or 'default') for b in pbranches] + pbranch = pbranches and pbranches[0][1] or 'default' + + branchpath = os.path.join(self.path, branch) + if setbranch: + self.after() + try: + self.repo = hg.repository(self.ui, branchpath) + except: + self.repo = hg.repository(self.ui, branchpath, create=True) + self.before() + + # pbranches may bring revisions from other branches (merge parents) + # Make sure we have them, or pull them. + missings = {} + for b in pbranches: + try: + self.repo.lookup(b[0]) + except: + missings.setdefault(b[1], []).append(b[0]) + + if missings: + self.after() + for pbranch, heads in missings.iteritems(): + pbranchpath = os.path.join(self.path, pbranch) + prepo = hg.repository(self.ui, pbranchpath) + self.ui.note(_('pulling from %s into %s\n') % (pbranch, branch)) + self.repo.pull(prepo, [prepo.lookup(h) for h in heads]) + self.before() + + def _rewritetags(self, source, revmap, data): + fp = cStringIO.StringIO() + for line in data.splitlines(): + s = line.split(' ', 1) + if len(s) != 2: + continue + revid = revmap.get(source.lookuprev(s[0])) + if not revid: + continue + fp.write('%s %s\n' % (revid, s[1])) + return fp.getvalue() + + def putcommit(self, files, copies, parents, commit, source, revmap): + + files = dict(files) + def getfilectx(repo, memctx, f): + v = files[f] + data, mode = source.getfile(f, v) + if f == '.hgtags': + data = self._rewritetags(source, revmap, data) + return context.memfilectx(f, data, 'l' in mode, 'x' in mode, + copies.get(f)) + + pl = [] + for p in parents: + if p not in pl: + pl.append(p) + parents = pl + nparents = len(parents) + if self.filemapmode and nparents == 1: + m1node = self.repo.changelog.read(bin(parents[0]))[0] + parent = parents[0] + + if len(parents) < 2: + parents.append(nullid) + if len(parents) < 2: + parents.append(nullid) + p2 = parents.pop(0) + + text = commit.desc + extra = commit.extra.copy() + if self.branchnames and commit.branch: + extra['branch'] = commit.branch + if commit.rev: + extra['convert_revision'] = commit.rev + + while parents: + p1 = p2 + p2 = parents.pop(0) + ctx = context.memctx(self.repo, (p1, p2), text, files.keys(), + getfilectx, commit.author, commit.date, extra) + self.repo.commitctx(ctx) + text = "(octopus merge fixup)\n" + p2 = hex(self.repo.changelog.tip()) + + if self.filemapmode and nparents == 1: + man = self.repo.manifest + mnode = self.repo.changelog.read(bin(p2))[0] + closed = 'close' in commit.extra + if not closed and not man.cmp(m1node, man.revision(mnode)): + self.ui.status(_("filtering out empty revision\n")) + self.repo.rollback() + return parent + return p2 + + def puttags(self, tags): + try: + parentctx = self.repo[self.tagsbranch] + tagparent = parentctx.node() + except error.RepoError: + parentctx = None + tagparent = nullid + + try: + oldlines = sorted(parentctx['.hgtags'].data().splitlines(True)) + except: + oldlines = [] + + newlines = sorted([("%s %s\n" % (tags[tag], tag)) for tag in tags]) + if newlines == oldlines: + return None, None + data = "".join(newlines) + def getfilectx(repo, memctx, f): + return context.memfilectx(f, data, False, False, None) + + self.ui.status(_("updating tags\n")) + date = "%s 0" % int(time.mktime(time.gmtime())) + extra = {'branch': self.tagsbranch} + ctx = context.memctx(self.repo, (tagparent, None), "update tags", + [".hgtags"], getfilectx, "convert-repo", date, + extra) + self.repo.commitctx(ctx) + return hex(self.repo.changelog.tip()), hex(tagparent) + + def setfilemapmode(self, active): + self.filemapmode = active + +class mercurial_source(converter_source): + def __init__(self, ui, path, rev=None): + converter_source.__init__(self, ui, path, rev) + self.ignoreerrors = ui.configbool('convert', 'hg.ignoreerrors', False) + self.ignored = set() + self.saverev = ui.configbool('convert', 'hg.saverev', False) + try: + self.repo = hg.repository(self.ui, path) + # try to provoke an exception if this isn't really a hg + # repo, but some other bogus compatible-looking url + if not self.repo.local(): + raise error.RepoError() + except error.RepoError: + ui.traceback() + raise NoRepo(_("%s is not a local Mercurial repository") % path) + self.lastrev = None + self.lastctx = None + self._changescache = None + self.convertfp = None + # Restrict converted revisions to startrev descendants + startnode = ui.config('convert', 'hg.startrev') + if startnode is not None: + try: + startnode = self.repo.lookup(startnode) + except error.RepoError: + raise util.Abort(_('%s is not a valid start revision') + % startnode) + startrev = self.repo.changelog.rev(startnode) + children = {startnode: 1} + for rev in self.repo.changelog.descendants(startrev): + children[self.repo.changelog.node(rev)] = 1 + self.keep = children.__contains__ + else: + self.keep = util.always + + def changectx(self, rev): + if self.lastrev != rev: + self.lastctx = self.repo[rev] + self.lastrev = rev + return self.lastctx + + def parents(self, ctx): + return [p for p in ctx.parents() if p and self.keep(p.node())] + + def getheads(self): + if self.rev: + heads = [self.repo[self.rev].node()] + else: + heads = self.repo.heads() + return [hex(h) for h in heads if self.keep(h)] + + def getfile(self, name, rev): + try: + fctx = self.changectx(rev)[name] + return fctx.data(), fctx.flags() + except error.LookupError, err: + raise IOError(err) + + def getchanges(self, rev): + ctx = self.changectx(rev) + parents = self.parents(ctx) + if not parents: + files = sorted(ctx.manifest()) + if self.ignoreerrors: + # calling getcopies() is a simple way to detect missing + # revlogs and populate self.ignored + self.getcopies(ctx, parents, files) + return [(f, rev) for f in files if f not in self.ignored], {} + if self._changescache and self._changescache[0] == rev: + m, a, r = self._changescache[1] + else: + m, a, r = self.repo.status(parents[0].node(), ctx.node())[:3] + # getcopies() detects missing revlogs early, run it before + # filtering the changes. + copies = self.getcopies(ctx, parents, m + a) + changes = [(name, rev) for name in m + a + r + if name not in self.ignored] + return sorted(changes), copies + + def getcopies(self, ctx, parents, files): + copies = {} + for name in files: + if name in self.ignored: + continue + try: + copysource, copynode = ctx.filectx(name).renamed() + if copysource in self.ignored or not self.keep(copynode): + continue + # Ignore copy sources not in parent revisions + found = False + for p in parents: + if copysource in p: + found = True + break + if not found: + continue + copies[name] = copysource + except TypeError: + pass + except error.LookupError, e: + if not self.ignoreerrors: + raise + self.ignored.add(name) + self.ui.warn(_('ignoring: %s\n') % e) + return copies + + def getcommit(self, rev): + ctx = self.changectx(rev) + parents = [p.hex() for p in self.parents(ctx)] + if self.saverev: + crev = rev + else: + crev = None + return commit(author=ctx.user(), date=util.datestr(ctx.date()), + desc=ctx.description(), rev=crev, parents=parents, + branch=ctx.branch(), extra=ctx.extra(), + sortkey=ctx.rev()) + + def gettags(self): + tags = [t for t in self.repo.tagslist() if t[0] != 'tip'] + return dict([(name, hex(node)) for name, node in tags + if self.keep(node)]) + + def getchangedfiles(self, rev, i): + ctx = self.changectx(rev) + parents = self.parents(ctx) + if not parents and i is None: + i = 0 + changes = [], ctx.manifest().keys(), [] + else: + i = i or 0 + changes = self.repo.status(parents[i].node(), ctx.node())[:3] + changes = [[f for f in l if f not in self.ignored] for l in changes] + + if i == 0: + self._changescache = (rev, changes) + + return changes[0] + changes[1] + changes[2] + + def converted(self, rev, destrev): + if self.convertfp is None: + self.convertfp = open(os.path.join(self.path, '.hg', 'shamap'), + 'a') + self.convertfp.write('%s %s\n' % (destrev, rev)) + self.convertfp.flush() + + def before(self): + self.ui.debug('run hg source pre-conversion action\n') + + def after(self): + self.ui.debug('run hg source post-conversion action\n') + + def hasnativeorder(self): + return True + + def lookuprev(self, rev): + try: + return hex(self.repo.lookup(rev)) + except error.RepoError: + return None diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/hg.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/hg.pyo Binary files differnew file mode 100644 index 0000000..633af5e --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/hg.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/monotone.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/monotone.py new file mode 100644 index 0000000..151ddc5 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/monotone.py @@ -0,0 +1,227 @@ +# monotone.py - monotone support for the convert extension +# +# Copyright 2008, 2009 Mikkel Fahnoe Jorgensen <mikkel@dvide.com> and +# others +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +import os, re +from mercurial import util +from common import NoRepo, commit, converter_source, checktool +from common import commandline +from mercurial.i18n import _ + +class monotone_source(converter_source, commandline): + def __init__(self, ui, path=None, rev=None): + converter_source.__init__(self, ui, path, rev) + commandline.__init__(self, ui, 'mtn') + + self.ui = ui + self.path = path + + norepo = NoRepo(_("%s does not look like a monotone repository") + % path) + if not os.path.exists(os.path.join(path, '_MTN')): + # Could be a monotone repository (SQLite db file) + try: + header = file(path, 'rb').read(16) + except: + header = '' + if header != 'SQLite format 3\x00': + raise norepo + + # regular expressions for parsing monotone output + space = r'\s*' + name = r'\s+"((?:\\"|[^"])*)"\s*' + value = name + revision = r'\s+\[(\w+)\]\s*' + lines = r'(?:.|\n)+' + + self.dir_re = re.compile(space + "dir" + name) + self.file_re = re.compile(space + "file" + name + + "content" + revision) + self.add_file_re = re.compile(space + "add_file" + name + + "content" + revision) + self.patch_re = re.compile(space + "patch" + name + + "from" + revision + "to" + revision) + self.rename_re = re.compile(space + "rename" + name + "to" + name) + self.delete_re = re.compile(space + "delete" + name) + self.tag_re = re.compile(space + "tag" + name + "revision" + + revision) + self.cert_re = re.compile(lines + space + "name" + name + + "value" + value) + + attr = space + "file" + lines + space + "attr" + space + self.attr_execute_re = re.compile(attr + '"mtn:execute"' + + space + '"true"') + + # cached data + self.manifest_rev = None + self.manifest = None + self.files = None + self.dirs = None + + checktool('mtn', abort=False) + + # test if there are any revisions + self.rev = None + try: + self.getheads() + except: + raise norepo + self.rev = rev + + def mtnrun(self, *args, **kwargs): + kwargs['d'] = self.path + return self.run0('automate', *args, **kwargs) + + def mtnloadmanifest(self, rev): + if self.manifest_rev == rev: + return + self.manifest = self.mtnrun("get_manifest_of", rev).split("\n\n") + self.manifest_rev = rev + self.files = {} + self.dirs = {} + + for e in self.manifest: + m = self.file_re.match(e) + if m: + attr = "" + name = m.group(1) + node = m.group(2) + if self.attr_execute_re.match(e): + attr += "x" + self.files[name] = (node, attr) + m = self.dir_re.match(e) + if m: + self.dirs[m.group(1)] = True + + def mtnisfile(self, name, rev): + # a non-file could be a directory or a deleted or renamed file + self.mtnloadmanifest(rev) + return name in self.files + + def mtnisdir(self, name, rev): + self.mtnloadmanifest(rev) + return name in self.dirs + + def mtngetcerts(self, rev): + certs = {"author":"<missing>", "date":"<missing>", + "changelog":"<missing>", "branch":"<missing>"} + certlist = self.mtnrun("certs", rev) + # mtn < 0.45: + # key "test@selenic.com" + # mtn >= 0.45: + # key [ff58a7ffb771907c4ff68995eada1c4da068d328] + certlist = re.split('\n\n key ["\[]', certlist) + for e in certlist: + m = self.cert_re.match(e) + if m: + name, value = m.groups() + value = value.replace(r'\"', '"') + value = value.replace(r'\\', '\\') + certs[name] = value + # Monotone may have subsecond dates: 2005-02-05T09:39:12.364306 + # and all times are stored in UTC + certs["date"] = certs["date"].split('.')[0] + " UTC" + return certs + + # implement the converter_source interface: + + def getheads(self): + if not self.rev: + return self.mtnrun("leaves").splitlines() + else: + return [self.rev] + + def getchanges(self, rev): + #revision = self.mtncmd("get_revision %s" % rev).split("\n\n") + revision = self.mtnrun("get_revision", rev).split("\n\n") + files = {} + ignoremove = {} + renameddirs = [] + copies = {} + for e in revision: + m = self.add_file_re.match(e) + if m: + files[m.group(1)] = rev + ignoremove[m.group(1)] = rev + m = self.patch_re.match(e) + if m: + files[m.group(1)] = rev + # Delete/rename is handled later when the convert engine + # discovers an IOError exception from getfile, + # but only if we add the "from" file to the list of changes. + m = self.delete_re.match(e) + if m: + files[m.group(1)] = rev + m = self.rename_re.match(e) + if m: + toname = m.group(2) + fromname = m.group(1) + if self.mtnisfile(toname, rev): + ignoremove[toname] = 1 + copies[toname] = fromname + files[toname] = rev + files[fromname] = rev + elif self.mtnisdir(toname, rev): + renameddirs.append((fromname, toname)) + + # Directory renames can be handled only once we have recorded + # all new files + for fromdir, todir in renameddirs: + renamed = {} + for tofile in self.files: + if tofile in ignoremove: + continue + if tofile.startswith(todir + '/'): + renamed[tofile] = fromdir + tofile[len(todir):] + # Avoid chained moves like: + # d1(/a) => d3/d1(/a) + # d2 => d3 + ignoremove[tofile] = 1 + for tofile, fromfile in renamed.items(): + self.ui.debug (_("copying file in renamed directory " + "from '%s' to '%s'") + % (fromfile, tofile), '\n') + files[tofile] = rev + copies[tofile] = fromfile + for fromfile in renamed.values(): + files[fromfile] = rev + + return (files.items(), copies) + + def getfile(self, name, rev): + if not self.mtnisfile(name, rev): + raise IOError() # file was deleted or renamed + try: + data = self.mtnrun("get_file_of", name, r=rev) + except: + raise IOError() # file was deleted or renamed + self.mtnloadmanifest(rev) + node, attr = self.files.get(name, (None, "")) + return data, attr + + def getcommit(self, rev): + certs = self.mtngetcerts(rev) + return commit( + author=certs["author"], + date=util.datestr(util.strdate(certs["date"], "%Y-%m-%dT%H:%M:%S")), + desc=certs["changelog"], + rev=rev, + parents=self.mtnrun("parents", rev).splitlines(), + branch=certs["branch"]) + + def gettags(self): + tags = {} + for e in self.mtnrun("tags").split("\n\n"): + m = self.tag_re.match(e) + if m: + tags[m.group(1)] = m.group(2) + return tags + + def getchangedfiles(self, rev, i): + # This function is only needed to support --filemap + # ... and we don't support that + raise NotImplementedError() diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/monotone.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/monotone.pyo Binary files differnew file mode 100644 index 0000000..b51d266 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/monotone.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/p4.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/p4.py new file mode 100644 index 0000000..5d640ad --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/p4.py @@ -0,0 +1,202 @@ +# Perforce source for convert extension. +# +# Copyright 2009, Frank Kingswood <frank@kingswood-consulting.co.uk> +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +from mercurial import util +from mercurial.i18n import _ + +from common import commit, converter_source, checktool, NoRepo +import marshal +import re + +def loaditer(f): + "Yield the dictionary objects generated by p4" + try: + while True: + d = marshal.load(f) + if not d: + break + yield d + except EOFError: + pass + +class p4_source(converter_source): + def __init__(self, ui, path, rev=None): + super(p4_source, self).__init__(ui, path, rev=rev) + + if "/" in path and not path.startswith('//'): + raise NoRepo(_('%s does not look like a P4 repository') % path) + + checktool('p4', abort=False) + + self.p4changes = {} + self.heads = {} + self.changeset = {} + self.files = {} + self.tags = {} + self.lastbranch = {} + self.parent = {} + self.encoding = "latin_1" + self.depotname = {} # mapping from local name to depot name + self.re_type = re.compile( + "([a-z]+)?(text|binary|symlink|apple|resource|unicode|utf\d+)" + "(\+\w+)?$") + self.re_keywords = re.compile( + r"\$(Id|Header|Date|DateTime|Change|File|Revision|Author)" + r":[^$\n]*\$") + self.re_keywords_old = re.compile("\$(Id|Header):[^$\n]*\$") + + self._parse(ui, path) + + def _parse_view(self, path): + "Read changes affecting the path" + cmd = 'p4 -G changes -s submitted %s' % util.shellquote(path) + stdout = util.popen(cmd, mode='rb') + for d in loaditer(stdout): + c = d.get("change", None) + if c: + self.p4changes[c] = True + + def _parse(self, ui, path): + "Prepare list of P4 filenames and revisions to import" + ui.status(_('reading p4 views\n')) + + # read client spec or view + if "/" in path: + self._parse_view(path) + if path.startswith("//") and path.endswith("/..."): + views = {path[:-3]:""} + else: + views = {"//": ""} + else: + cmd = 'p4 -G client -o %s' % util.shellquote(path) + clientspec = marshal.load(util.popen(cmd, mode='rb')) + + views = {} + for client in clientspec: + if client.startswith("View"): + sview, cview = clientspec[client].split() + self._parse_view(sview) + if sview.endswith("...") and cview.endswith("..."): + sview = sview[:-3] + cview = cview[:-3] + cview = cview[2:] + cview = cview[cview.find("/") + 1:] + views[sview] = cview + + # list of changes that affect our source files + self.p4changes = self.p4changes.keys() + self.p4changes.sort(key=int) + + # list with depot pathnames, longest first + vieworder = views.keys() + vieworder.sort(key=len, reverse=True) + + # handle revision limiting + startrev = self.ui.config('convert', 'p4.startrev', default=0) + self.p4changes = [x for x in self.p4changes + if ((not startrev or int(x) >= int(startrev)) and + (not self.rev or int(x) <= int(self.rev)))] + + # now read the full changelists to get the list of file revisions + ui.status(_('collecting p4 changelists\n')) + lastid = None + for change in self.p4changes: + cmd = "p4 -G describe -s %s" % change + stdout = util.popen(cmd, mode='rb') + d = marshal.load(stdout) + desc = self.recode(d["desc"]) + shortdesc = desc.split("\n", 1)[0] + t = '%s %s' % (d["change"], repr(shortdesc)[1:-1]) + ui.status(util.ellipsis(t, 80) + '\n') + + if lastid: + parents = [lastid] + else: + parents = [] + + date = (int(d["time"]), 0) # timezone not set + c = commit(author=self.recode(d["user"]), date=util.datestr(date), + parents=parents, desc=desc, branch='', + extra={"p4": change}) + + files = [] + i = 0 + while ("depotFile%d" % i) in d and ("rev%d" % i) in d: + oldname = d["depotFile%d" % i] + filename = None + for v in vieworder: + if oldname.startswith(v): + filename = views[v] + oldname[len(v):] + break + if filename: + files.append((filename, d["rev%d" % i])) + self.depotname[filename] = oldname + i += 1 + self.changeset[change] = c + self.files[change] = files + lastid = change + + if lastid: + self.heads = [lastid] + + def getheads(self): + return self.heads + + def getfile(self, name, rev): + cmd = 'p4 -G print %s' \ + % util.shellquote("%s#%s" % (self.depotname[name], rev)) + stdout = util.popen(cmd, mode='rb') + + mode = None + contents = "" + keywords = None + + for d in loaditer(stdout): + code = d["code"] + data = d.get("data") + + if code == "error": + raise IOError(d["generic"], data) + + elif code == "stat": + p4type = self.re_type.match(d["type"]) + if p4type: + mode = "" + flags = (p4type.group(1) or "") + (p4type.group(3) or "") + if "x" in flags: + mode = "x" + if p4type.group(2) == "symlink": + mode = "l" + if "ko" in flags: + keywords = self.re_keywords_old + elif "k" in flags: + keywords = self.re_keywords + + elif code == "text" or code == "binary": + contents += data + + if mode is None: + raise IOError(0, "bad stat") + + if keywords: + contents = keywords.sub("$\\1$", contents) + if mode == "l" and contents.endswith("\n"): + contents = contents[:-1] + + return contents, mode + + def getchanges(self, rev): + return self.files[rev], {} + + def getcommit(self, rev): + return self.changeset[rev] + + def gettags(self): + return self.tags + + def getchangedfiles(self, rev, i): + return sorted([x[0] for x in self.files[rev]]) diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/p4.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/p4.pyo Binary files differnew file mode 100644 index 0000000..45f1e2a --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/p4.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/subversion.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/subversion.py new file mode 100644 index 0000000..f2d26ad --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/subversion.py @@ -0,0 +1,1168 @@ +# Subversion 1.4/1.5 Python API backend +# +# Copyright(C) 2007 Daniel Holth et al + +import os +import re +import sys +import cPickle as pickle +import tempfile +import urllib +import urllib2 + +from mercurial import strutil, util, encoding +from mercurial.i18n import _ + +# Subversion stuff. Works best with very recent Python SVN bindings +# e.g. SVN 1.5 or backports. Thanks to the bzr folks for enhancing +# these bindings. + +from cStringIO import StringIO + +from common import NoRepo, MissingTool, commit, encodeargs, decodeargs +from common import commandline, converter_source, converter_sink, mapfile + +try: + from svn.core import SubversionException, Pool + import svn + import svn.client + import svn.core + import svn.ra + import svn.delta + import transport + import warnings + warnings.filterwarnings('ignore', + module='svn.core', + category=DeprecationWarning) + +except ImportError: + pass + +class SvnPathNotFound(Exception): + pass + +def geturl(path): + try: + return svn.client.url_from_path(svn.core.svn_path_canonicalize(path)) + except SubversionException: + pass + if os.path.isdir(path): + path = os.path.normpath(os.path.abspath(path)) + if os.name == 'nt': + path = '/' + util.normpath(path) + # Module URL is later compared with the repository URL returned + # by svn API, which is UTF-8. + path = encoding.tolocal(path) + return 'file://%s' % urllib.quote(path) + return path + +def optrev(number): + optrev = svn.core.svn_opt_revision_t() + optrev.kind = svn.core.svn_opt_revision_number + optrev.value.number = number + return optrev + +class changedpath(object): + def __init__(self, p): + self.copyfrom_path = p.copyfrom_path + self.copyfrom_rev = p.copyfrom_rev + self.action = p.action + +def get_log_child(fp, url, paths, start, end, limit=0, discover_changed_paths=True, + strict_node_history=False): + protocol = -1 + def receiver(orig_paths, revnum, author, date, message, pool): + if orig_paths is not None: + for k, v in orig_paths.iteritems(): + orig_paths[k] = changedpath(v) + pickle.dump((orig_paths, revnum, author, date, message), + fp, protocol) + + try: + # Use an ra of our own so that our parent can consume + # our results without confusing the server. + t = transport.SvnRaTransport(url=url) + svn.ra.get_log(t.ra, paths, start, end, limit, + discover_changed_paths, + strict_node_history, + receiver) + except SubversionException, (inst, num): + pickle.dump(num, fp, protocol) + except IOError: + # Caller may interrupt the iteration + pickle.dump(None, fp, protocol) + else: + pickle.dump(None, fp, protocol) + fp.close() + # With large history, cleanup process goes crazy and suddenly + # consumes *huge* amount of memory. The output file being closed, + # there is no need for clean termination. + os._exit(0) + +def debugsvnlog(ui, **opts): + """Fetch SVN log in a subprocess and channel them back to parent to + avoid memory collection issues. + """ + util.set_binary(sys.stdin) + util.set_binary(sys.stdout) + args = decodeargs(sys.stdin.read()) + get_log_child(sys.stdout, *args) + +class logstream(object): + """Interruptible revision log iterator.""" + def __init__(self, stdout): + self._stdout = stdout + + def __iter__(self): + while True: + try: + entry = pickle.load(self._stdout) + except EOFError: + raise util.Abort(_('Mercurial failed to run itself, check' + ' hg executable is in PATH')) + try: + orig_paths, revnum, author, date, message = entry + except: + if entry is None: + break + raise SubversionException("child raised exception", entry) + yield entry + + def close(self): + if self._stdout: + self._stdout.close() + self._stdout = None + + +# Check to see if the given path is a local Subversion repo. Verify this by +# looking for several svn-specific files and directories in the given +# directory. +def filecheck(ui, path, proto): + for x in ('locks', 'hooks', 'format', 'db'): + if not os.path.exists(os.path.join(path, x)): + return False + return True + +# Check to see if a given path is the root of an svn repo over http. We verify +# this by requesting a version-controlled URL we know can't exist and looking +# for the svn-specific "not found" XML. +def httpcheck(ui, path, proto): + try: + opener = urllib2.build_opener() + rsp = opener.open('%s://%s/!svn/ver/0/.svn' % (proto, path)) + data = rsp.read() + except urllib2.HTTPError, inst: + if inst.code != 404: + # Except for 404 we cannot know for sure this is not an svn repo + ui.warn(_('svn: cannot probe remote repository, assume it could ' + 'be a subversion repository. Use --source-type if you ' + 'know better.\n')) + return True + data = inst.fp.read() + except: + # Could be urllib2.URLError if the URL is invalid or anything else. + return False + return '<m:human-readable errcode="160013">' in data + +protomap = {'http': httpcheck, + 'https': httpcheck, + 'file': filecheck, + } +def issvnurl(ui, url): + try: + proto, path = url.split('://', 1) + if proto == 'file': + path = urllib.url2pathname(path) + except ValueError: + proto = 'file' + path = os.path.abspath(url) + if proto == 'file': + path = path.replace(os.sep, '/') + check = protomap.get(proto, lambda *args: False) + while '/' in path: + if check(ui, path, proto): + return True + path = path.rsplit('/', 1)[0] + return False + +# SVN conversion code stolen from bzr-svn and tailor +# +# Subversion looks like a versioned filesystem, branches structures +# are defined by conventions and not enforced by the tool. First, +# we define the potential branches (modules) as "trunk" and "branches" +# children directories. Revisions are then identified by their +# module and revision number (and a repository identifier). +# +# The revision graph is really a tree (or a forest). By default, a +# revision parent is the previous revision in the same module. If the +# module directory is copied/moved from another module then the +# revision is the module root and its parent the source revision in +# the parent module. A revision has at most one parent. +# +class svn_source(converter_source): + def __init__(self, ui, url, rev=None): + super(svn_source, self).__init__(ui, url, rev=rev) + + if not (url.startswith('svn://') or url.startswith('svn+ssh://') or + (os.path.exists(url) and + os.path.exists(os.path.join(url, '.svn'))) or + issvnurl(ui, url)): + raise NoRepo(_("%s does not look like a Subversion repository") + % url) + + try: + SubversionException + except NameError: + raise MissingTool(_('Subversion python bindings could not be loaded')) + + try: + version = svn.core.SVN_VER_MAJOR, svn.core.SVN_VER_MINOR + if version < (1, 4): + raise MissingTool(_('Subversion python bindings %d.%d found, ' + '1.4 or later required') % version) + except AttributeError: + raise MissingTool(_('Subversion python bindings are too old, 1.4 ' + 'or later required')) + + self.lastrevs = {} + + latest = None + try: + # Support file://path@rev syntax. Useful e.g. to convert + # deleted branches. + at = url.rfind('@') + if at >= 0: + latest = int(url[at + 1:]) + url = url[:at] + except ValueError: + pass + self.url = geturl(url) + self.encoding = 'UTF-8' # Subversion is always nominal UTF-8 + try: + self.transport = transport.SvnRaTransport(url=self.url) + self.ra = self.transport.ra + self.ctx = self.transport.client + self.baseurl = svn.ra.get_repos_root(self.ra) + # Module is either empty or a repository path starting with + # a slash and not ending with a slash. + self.module = urllib.unquote(self.url[len(self.baseurl):]) + self.prevmodule = None + self.rootmodule = self.module + self.commits = {} + self.paths = {} + self.uuid = svn.ra.get_uuid(self.ra) + except SubversionException: + ui.traceback() + raise NoRepo(_("%s does not look like a Subversion repository") + % self.url) + + if rev: + try: + latest = int(rev) + except ValueError: + raise util.Abort(_('svn: revision %s is not an integer') % rev) + + self.startrev = self.ui.config('convert', 'svn.startrev', default=0) + try: + self.startrev = int(self.startrev) + if self.startrev < 0: + self.startrev = 0 + except ValueError: + raise util.Abort(_('svn: start revision %s is not an integer') + % self.startrev) + + self.head = self.latest(self.module, latest) + if not self.head: + raise util.Abort(_('no revision found in module %s') + % self.module) + self.last_changed = self.revnum(self.head) + + self._changescache = None + + if os.path.exists(os.path.join(url, '.svn/entries')): + self.wc = url + else: + self.wc = None + self.convertfp = None + + def setrevmap(self, revmap): + lastrevs = {} + for revid in revmap.iterkeys(): + uuid, module, revnum = self.revsplit(revid) + lastrevnum = lastrevs.setdefault(module, revnum) + if revnum > lastrevnum: + lastrevs[module] = revnum + self.lastrevs = lastrevs + + def exists(self, path, optrev): + try: + svn.client.ls(self.url.rstrip('/') + '/' + urllib.quote(path), + optrev, False, self.ctx) + return True + except SubversionException: + return False + + def getheads(self): + + def isdir(path, revnum): + kind = self._checkpath(path, revnum) + return kind == svn.core.svn_node_dir + + def getcfgpath(name, rev): + cfgpath = self.ui.config('convert', 'svn.' + name) + if cfgpath is not None and cfgpath.strip() == '': + return None + path = (cfgpath or name).strip('/') + if not self.exists(path, rev): + if cfgpath: + raise util.Abort(_('expected %s to be at %r, but not found') + % (name, path)) + return None + self.ui.note(_('found %s at %r\n') % (name, path)) + return path + + rev = optrev(self.last_changed) + oldmodule = '' + trunk = getcfgpath('trunk', rev) + self.tags = getcfgpath('tags', rev) + branches = getcfgpath('branches', rev) + + # If the project has a trunk or branches, we will extract heads + # from them. We keep the project root otherwise. + if trunk: + oldmodule = self.module or '' + self.module += '/' + trunk + self.head = self.latest(self.module, self.last_changed) + if not self.head: + raise util.Abort(_('no revision found in module %s') + % self.module) + + # First head in the list is the module's head + self.heads = [self.head] + if self.tags is not None: + self.tags = '%s/%s' % (oldmodule , (self.tags or 'tags')) + + # Check if branches bring a few more heads to the list + if branches: + rpath = self.url.strip('/') + branchnames = svn.client.ls(rpath + '/' + urllib.quote(branches), + rev, False, self.ctx) + for branch in branchnames.keys(): + module = '%s/%s/%s' % (oldmodule, branches, branch) + if not isdir(module, self.last_changed): + continue + brevid = self.latest(module, self.last_changed) + if not brevid: + self.ui.note(_('ignoring empty branch %s\n') % branch) + continue + self.ui.note(_('found branch %s at %d\n') % + (branch, self.revnum(brevid))) + self.heads.append(brevid) + + if self.startrev and self.heads: + if len(self.heads) > 1: + raise util.Abort(_('svn: start revision is not supported ' + 'with more than one branch')) + revnum = self.revnum(self.heads[0]) + if revnum < self.startrev: + raise util.Abort( + _('svn: no revision found after start revision %d') + % self.startrev) + + return self.heads + + def getchanges(self, rev): + if self._changescache and self._changescache[0] == rev: + return self._changescache[1] + self._changescache = None + (paths, parents) = self.paths[rev] + if parents: + files, self.removed, copies = self.expandpaths(rev, paths, parents) + else: + # Perform a full checkout on roots + uuid, module, revnum = self.revsplit(rev) + entries = svn.client.ls(self.baseurl + urllib.quote(module), + optrev(revnum), True, self.ctx) + files = [n for n, e in entries.iteritems() + if e.kind == svn.core.svn_node_file] + copies = {} + self.removed = set() + + files.sort() + files = zip(files, [rev] * len(files)) + + # caller caches the result, so free it here to release memory + del self.paths[rev] + return (files, copies) + + def getchangedfiles(self, rev, i): + changes = self.getchanges(rev) + self._changescache = (rev, changes) + return [f[0] for f in changes[0]] + + def getcommit(self, rev): + if rev not in self.commits: + uuid, module, revnum = self.revsplit(rev) + self.module = module + self.reparent(module) + # We assume that: + # - requests for revisions after "stop" come from the + # revision graph backward traversal. Cache all of them + # down to stop, they will be used eventually. + # - requests for revisions before "stop" come to get + # isolated branches parents. Just fetch what is needed. + stop = self.lastrevs.get(module, 0) + if revnum < stop: + stop = revnum + 1 + self._fetch_revisions(revnum, stop) + commit = self.commits[rev] + # caller caches the result, so free it here to release memory + del self.commits[rev] + return commit + + def gettags(self): + tags = {} + if self.tags is None: + return tags + + # svn tags are just a convention, project branches left in a + # 'tags' directory. There is no other relationship than + # ancestry, which is expensive to discover and makes them hard + # to update incrementally. Worse, past revisions may be + # referenced by tags far away in the future, requiring a deep + # history traversal on every calculation. Current code + # performs a single backward traversal, tracking moves within + # the tags directory (tag renaming) and recording a new tag + # everytime a project is copied from outside the tags + # directory. It also lists deleted tags, this behaviour may + # change in the future. + pendings = [] + tagspath = self.tags + start = svn.ra.get_latest_revnum(self.ra) + stream = self._getlog([self.tags], start, self.startrev) + try: + for entry in stream: + origpaths, revnum, author, date, message = entry + copies = [(e.copyfrom_path, e.copyfrom_rev, p) for p, e + in origpaths.iteritems() if e.copyfrom_path] + # Apply moves/copies from more specific to general + copies.sort(reverse=True) + + srctagspath = tagspath + if copies and copies[-1][2] == tagspath: + # Track tags directory moves + srctagspath = copies.pop()[0] + + for source, sourcerev, dest in copies: + if not dest.startswith(tagspath + '/'): + continue + for tag in pendings: + if tag[0].startswith(dest): + tagpath = source + tag[0][len(dest):] + tag[:2] = [tagpath, sourcerev] + break + else: + pendings.append([source, sourcerev, dest]) + + # Filter out tags with children coming from different + # parts of the repository like: + # /tags/tag.1 (from /trunk:10) + # /tags/tag.1/foo (from /branches/foo:12) + # Here/tags/tag.1 discarded as well as its children. + # It happens with tools like cvs2svn. Such tags cannot + # be represented in mercurial. + addeds = dict((p, e.copyfrom_path) for p, e + in origpaths.iteritems() + if e.action == 'A' and e.copyfrom_path) + badroots = set() + for destroot in addeds: + for source, sourcerev, dest in pendings: + if (not dest.startswith(destroot + '/') + or source.startswith(addeds[destroot] + '/')): + continue + badroots.add(destroot) + break + + for badroot in badroots: + pendings = [p for p in pendings if p[2] != badroot + and not p[2].startswith(badroot + '/')] + + # Tell tag renamings from tag creations + remainings = [] + for source, sourcerev, dest in pendings: + tagname = dest.split('/')[-1] + if source.startswith(srctagspath): + remainings.append([source, sourcerev, tagname]) + continue + if tagname in tags: + # Keep the latest tag value + continue + # From revision may be fake, get one with changes + try: + tagid = self.latest(source, sourcerev) + if tagid and tagname not in tags: + tags[tagname] = tagid + except SvnPathNotFound: + # It happens when we are following directories + # we assumed were copied with their parents + # but were really created in the tag + # directory. + pass + pendings = remainings + tagspath = srctagspath + finally: + stream.close() + return tags + + def converted(self, rev, destrev): + if not self.wc: + return + if self.convertfp is None: + self.convertfp = open(os.path.join(self.wc, '.svn', 'hg-shamap'), + 'a') + self.convertfp.write('%s %d\n' % (destrev, self.revnum(rev))) + self.convertfp.flush() + + def revid(self, revnum, module=None): + return 'svn:%s%s@%s' % (self.uuid, module or self.module, revnum) + + def revnum(self, rev): + return int(rev.split('@')[-1]) + + def revsplit(self, rev): + url, revnum = rev.rsplit('@', 1) + revnum = int(revnum) + parts = url.split('/', 1) + uuid = parts.pop(0)[4:] + mod = '' + if parts: + mod = '/' + parts[0] + return uuid, mod, revnum + + def latest(self, path, stop=0): + """Find the latest revid affecting path, up to stop. It may return + a revision in a different module, since a branch may be moved without + a change being reported. Return None if computed module does not + belong to rootmodule subtree. + """ + if not path.startswith(self.rootmodule): + # Requests on foreign branches may be forbidden at server level + self.ui.debug('ignoring foreign branch %r\n' % path) + return None + + if not stop: + stop = svn.ra.get_latest_revnum(self.ra) + try: + prevmodule = self.reparent('') + dirent = svn.ra.stat(self.ra, path.strip('/'), stop) + self.reparent(prevmodule) + except SubversionException: + dirent = None + if not dirent: + raise SvnPathNotFound(_('%s not found up to revision %d') + % (path, stop)) + + # stat() gives us the previous revision on this line of + # development, but it might be in *another module*. Fetch the + # log and detect renames down to the latest revision. + stream = self._getlog([path], stop, dirent.created_rev) + try: + for entry in stream: + paths, revnum, author, date, message = entry + if revnum <= dirent.created_rev: + break + + for p in paths: + if not path.startswith(p) or not paths[p].copyfrom_path: + continue + newpath = paths[p].copyfrom_path + path[len(p):] + self.ui.debug("branch renamed from %s to %s at %d\n" % + (path, newpath, revnum)) + path = newpath + break + finally: + stream.close() + + if not path.startswith(self.rootmodule): + self.ui.debug('ignoring foreign branch %r\n' % path) + return None + return self.revid(dirent.created_rev, path) + + def reparent(self, module): + """Reparent the svn transport and return the previous parent.""" + if self.prevmodule == module: + return module + svnurl = self.baseurl + urllib.quote(module) + prevmodule = self.prevmodule + if prevmodule is None: + prevmodule = '' + self.ui.debug("reparent to %s\n" % svnurl) + svn.ra.reparent(self.ra, svnurl) + self.prevmodule = module + return prevmodule + + def expandpaths(self, rev, paths, parents): + changed, removed = set(), set() + copies = {} + + new_module, revnum = self.revsplit(rev)[1:] + if new_module != self.module: + self.module = new_module + self.reparent(self.module) + + for i, (path, ent) in enumerate(paths): + self.ui.progress(_('scanning paths'), i, item=path, + total=len(paths)) + entrypath = self.getrelpath(path) + + kind = self._checkpath(entrypath, revnum) + if kind == svn.core.svn_node_file: + changed.add(self.recode(entrypath)) + if not ent.copyfrom_path or not parents: + continue + # Copy sources not in parent revisions cannot be + # represented, ignore their origin for now + pmodule, prevnum = self.revsplit(parents[0])[1:] + if ent.copyfrom_rev < prevnum: + continue + copyfrom_path = self.getrelpath(ent.copyfrom_path, pmodule) + if not copyfrom_path: + continue + self.ui.debug("copied to %s from %s@%s\n" % + (entrypath, copyfrom_path, ent.copyfrom_rev)) + copies[self.recode(entrypath)] = self.recode(copyfrom_path) + elif kind == 0: # gone, but had better be a deleted *file* + self.ui.debug("gone from %s\n" % ent.copyfrom_rev) + pmodule, prevnum = self.revsplit(parents[0])[1:] + parentpath = pmodule + "/" + entrypath + fromkind = self._checkpath(entrypath, prevnum, pmodule) + + if fromkind == svn.core.svn_node_file: + removed.add(self.recode(entrypath)) + elif fromkind == svn.core.svn_node_dir: + oroot = parentpath.strip('/') + nroot = path.strip('/') + children = self._iterfiles(oroot, prevnum) + for childpath in children: + childpath = childpath.replace(oroot, nroot) + childpath = self.getrelpath("/" + childpath, pmodule) + if childpath: + removed.add(self.recode(childpath)) + else: + self.ui.debug('unknown path in revision %d: %s\n' % \ + (revnum, path)) + elif kind == svn.core.svn_node_dir: + if ent.action == 'M': + # If the directory just had a prop change, + # then we shouldn't need to look for its children. + continue + if ent.action == 'R' and parents: + # If a directory is replacing a file, mark the previous + # file as deleted + pmodule, prevnum = self.revsplit(parents[0])[1:] + pkind = self._checkpath(entrypath, prevnum, pmodule) + if pkind == svn.core.svn_node_file: + removed.add(self.recode(entrypath)) + elif pkind == svn.core.svn_node_dir: + # We do not know what files were kept or removed, + # mark them all as changed. + for childpath in self._iterfiles(pmodule, prevnum): + childpath = self.getrelpath("/" + childpath) + if childpath: + changed.add(self.recode(childpath)) + + for childpath in self._iterfiles(path, revnum): + childpath = self.getrelpath("/" + childpath) + if childpath: + changed.add(self.recode(childpath)) + + # Handle directory copies + if not ent.copyfrom_path or not parents: + continue + # Copy sources not in parent revisions cannot be + # represented, ignore their origin for now + pmodule, prevnum = self.revsplit(parents[0])[1:] + if ent.copyfrom_rev < prevnum: + continue + copyfrompath = self.getrelpath(ent.copyfrom_path, pmodule) + if not copyfrompath: + continue + self.ui.debug("mark %s came from %s:%d\n" + % (path, copyfrompath, ent.copyfrom_rev)) + children = self._iterfiles(ent.copyfrom_path, ent.copyfrom_rev) + for childpath in children: + childpath = self.getrelpath("/" + childpath, pmodule) + if not childpath: + continue + copytopath = path + childpath[len(copyfrompath):] + copytopath = self.getrelpath(copytopath) + copies[self.recode(copytopath)] = self.recode(childpath) + + self.ui.progress(_('scanning paths'), None) + changed.update(removed) + return (list(changed), removed, copies) + + def _fetch_revisions(self, from_revnum, to_revnum): + if from_revnum < to_revnum: + from_revnum, to_revnum = to_revnum, from_revnum + + self.child_cset = None + + def parselogentry(orig_paths, revnum, author, date, message): + """Return the parsed commit object or None, and True if + the revision is a branch root. + """ + self.ui.debug("parsing revision %d (%d changes)\n" % + (revnum, len(orig_paths))) + + branched = False + rev = self.revid(revnum) + # branch log might return entries for a parent we already have + + if rev in self.commits or revnum < to_revnum: + return None, branched + + parents = [] + # check whether this revision is the start of a branch or part + # of a branch renaming + orig_paths = sorted(orig_paths.iteritems()) + root_paths = [(p, e) for p, e in orig_paths + if self.module.startswith(p)] + if root_paths: + path, ent = root_paths[-1] + if ent.copyfrom_path: + branched = True + newpath = ent.copyfrom_path + self.module[len(path):] + # ent.copyfrom_rev may not be the actual last revision + previd = self.latest(newpath, ent.copyfrom_rev) + if previd is not None: + prevmodule, prevnum = self.revsplit(previd)[1:] + if prevnum >= self.startrev: + parents = [previd] + self.ui.note( + _('found parent of branch %s at %d: %s\n') % + (self.module, prevnum, prevmodule)) + else: + self.ui.debug("no copyfrom path, don't know what to do.\n") + + paths = [] + # filter out unrelated paths + for path, ent in orig_paths: + if self.getrelpath(path) is None: + continue + paths.append((path, ent)) + + # Example SVN datetime. Includes microseconds. + # ISO-8601 conformant + # '2007-01-04T17:35:00.902377Z' + date = util.parsedate(date[:19] + " UTC", ["%Y-%m-%dT%H:%M:%S"]) + + log = message and self.recode(message) or '' + author = author and self.recode(author) or '' + try: + branch = self.module.split("/")[-1] + if branch == 'trunk': + branch = '' + except IndexError: + branch = None + + cset = commit(author=author, + date=util.datestr(date), + desc=log, + parents=parents, + branch=branch, + rev=rev) + + self.commits[rev] = cset + # The parents list is *shared* among self.paths and the + # commit object. Both will be updated below. + self.paths[rev] = (paths, cset.parents) + if self.child_cset and not self.child_cset.parents: + self.child_cset.parents[:] = [rev] + self.child_cset = cset + return cset, branched + + self.ui.note(_('fetching revision log for "%s" from %d to %d\n') % + (self.module, from_revnum, to_revnum)) + + try: + firstcset = None + lastonbranch = False + stream = self._getlog([self.module], from_revnum, to_revnum) + try: + for entry in stream: + paths, revnum, author, date, message = entry + if revnum < self.startrev: + lastonbranch = True + break + if not paths: + self.ui.debug('revision %d has no entries\n' % revnum) + # If we ever leave the loop on an empty + # revision, do not try to get a parent branch + lastonbranch = lastonbranch or revnum == 0 + continue + cset, lastonbranch = parselogentry(paths, revnum, author, + date, message) + if cset: + firstcset = cset + if lastonbranch: + break + finally: + stream.close() + + if not lastonbranch and firstcset and not firstcset.parents: + # The first revision of the sequence (the last fetched one) + # has invalid parents if not a branch root. Find the parent + # revision now, if any. + try: + firstrevnum = self.revnum(firstcset.rev) + if firstrevnum > 1: + latest = self.latest(self.module, firstrevnum - 1) + if latest: + firstcset.parents.append(latest) + except SvnPathNotFound: + pass + except SubversionException, (inst, num): + if num == svn.core.SVN_ERR_FS_NO_SUCH_REVISION: + raise util.Abort(_('svn: branch has no revision %s') % to_revnum) + raise + + def getfile(self, file, rev): + # TODO: ra.get_file transmits the whole file instead of diffs. + if file in self.removed: + raise IOError() + mode = '' + try: + new_module, revnum = self.revsplit(rev)[1:] + if self.module != new_module: + self.module = new_module + self.reparent(self.module) + io = StringIO() + info = svn.ra.get_file(self.ra, file, revnum, io) + data = io.getvalue() + # ra.get_files() seems to keep a reference on the input buffer + # preventing collection. Release it explicitely. + io.close() + if isinstance(info, list): + info = info[-1] + mode = ("svn:executable" in info) and 'x' or '' + mode = ("svn:special" in info) and 'l' or mode + except SubversionException, e: + notfound = (svn.core.SVN_ERR_FS_NOT_FOUND, + svn.core.SVN_ERR_RA_DAV_PATH_NOT_FOUND) + if e.apr_err in notfound: # File not found + raise IOError() + raise + if mode == 'l': + link_prefix = "link " + if data.startswith(link_prefix): + data = data[len(link_prefix):] + return data, mode + + def _iterfiles(self, path, revnum): + """Enumerate all files in path at revnum, recursively.""" + path = path.strip('/') + pool = Pool() + rpath = '/'.join([self.baseurl, urllib.quote(path)]).strip('/') + entries = svn.client.ls(rpath, optrev(revnum), True, self.ctx, pool) + return ((path + '/' + p) for p, e in entries.iteritems() + if e.kind == svn.core.svn_node_file) + + def getrelpath(self, path, module=None): + if module is None: + module = self.module + # Given the repository url of this wc, say + # "http://server/plone/CMFPlone/branches/Plone-2_0-branch" + # extract the "entry" portion (a relative path) from what + # svn log --xml says, ie + # "/CMFPlone/branches/Plone-2_0-branch/tests/PloneTestCase.py" + # that is to say "tests/PloneTestCase.py" + if path.startswith(module): + relative = path.rstrip('/')[len(module):] + if relative.startswith('/'): + return relative[1:] + elif relative == '': + return relative + + # The path is outside our tracked tree... + self.ui.debug('%r is not under %r, ignoring\n' % (path, module)) + return None + + def _checkpath(self, path, revnum, module=None): + if module is not None: + prevmodule = self.reparent('') + path = module + '/' + path + try: + # ra.check_path does not like leading slashes very much, it leads + # to PROPFIND subversion errors + return svn.ra.check_path(self.ra, path.strip('/'), revnum) + finally: + if module is not None: + self.reparent(prevmodule) + + def _getlog(self, paths, start, end, limit=0, discover_changed_paths=True, + strict_node_history=False): + # Normalize path names, svn >= 1.5 only wants paths relative to + # supplied URL + relpaths = [] + for p in paths: + if not p.startswith('/'): + p = self.module + '/' + p + relpaths.append(p.strip('/')) + args = [self.baseurl, relpaths, start, end, limit, discover_changed_paths, + strict_node_history] + arg = encodeargs(args) + hgexe = util.hgexecutable() + cmd = '%s debugsvnlog' % util.shellquote(hgexe) + stdin, stdout = util.popen2(cmd) + stdin.write(arg) + try: + stdin.close() + except IOError: + raise util.Abort(_('Mercurial failed to run itself, check' + ' hg executable is in PATH')) + return logstream(stdout) + +pre_revprop_change = '''#!/bin/sh + +REPOS="$1" +REV="$2" +USER="$3" +PROPNAME="$4" +ACTION="$5" + +if [ "$ACTION" = "M" -a "$PROPNAME" = "svn:log" ]; then exit 0; fi +if [ "$ACTION" = "A" -a "$PROPNAME" = "hg:convert-branch" ]; then exit 0; fi +if [ "$ACTION" = "A" -a "$PROPNAME" = "hg:convert-rev" ]; then exit 0; fi + +echo "Changing prohibited revision property" >&2 +exit 1 +''' + +class svn_sink(converter_sink, commandline): + commit_re = re.compile(r'Committed revision (\d+).', re.M) + + def prerun(self): + if self.wc: + os.chdir(self.wc) + + def postrun(self): + if self.wc: + os.chdir(self.cwd) + + def join(self, name): + return os.path.join(self.wc, '.svn', name) + + def revmapfile(self): + return self.join('hg-shamap') + + def authorfile(self): + return self.join('hg-authormap') + + def __init__(self, ui, path): + converter_sink.__init__(self, ui, path) + commandline.__init__(self, ui, 'svn') + self.delete = [] + self.setexec = [] + self.delexec = [] + self.copies = [] + self.wc = None + self.cwd = os.getcwd() + + path = os.path.realpath(path) + + created = False + if os.path.isfile(os.path.join(path, '.svn', 'entries')): + self.wc = path + self.run0('update') + else: + wcpath = os.path.join(os.getcwd(), os.path.basename(path) + '-wc') + + if os.path.isdir(os.path.dirname(path)): + if not os.path.exists(os.path.join(path, 'db', 'fs-type')): + ui.status(_('initializing svn repository %r\n') % + os.path.basename(path)) + commandline(ui, 'svnadmin').run0('create', path) + created = path + path = util.normpath(path) + if not path.startswith('/'): + path = '/' + path + path = 'file://' + path + + ui.status(_('initializing svn working copy %r\n') + % os.path.basename(wcpath)) + self.run0('checkout', path, wcpath) + + self.wc = wcpath + self.opener = util.opener(self.wc) + self.wopener = util.opener(self.wc) + self.childmap = mapfile(ui, self.join('hg-childmap')) + self.is_exec = util.checkexec(self.wc) and util.is_exec or None + + if created: + hook = os.path.join(created, 'hooks', 'pre-revprop-change') + fp = open(hook, 'w') + fp.write(pre_revprop_change) + fp.close() + util.set_flags(hook, False, True) + + xport = transport.SvnRaTransport(url=geturl(path)) + self.uuid = svn.ra.get_uuid(xport.ra) + + def wjoin(self, *names): + return os.path.join(self.wc, *names) + + def putfile(self, filename, flags, data): + if 'l' in flags: + self.wopener.symlink(data, filename) + else: + try: + if os.path.islink(self.wjoin(filename)): + os.unlink(filename) + except OSError: + pass + self.wopener(filename, 'w').write(data) + + if self.is_exec: + was_exec = self.is_exec(self.wjoin(filename)) + else: + # On filesystems not supporting execute-bit, there is no way + # to know if it is set but asking subversion. Setting it + # systematically is just as expensive and much simpler. + was_exec = 'x' not in flags + + util.set_flags(self.wjoin(filename), False, 'x' in flags) + if was_exec: + if 'x' not in flags: + self.delexec.append(filename) + else: + if 'x' in flags: + self.setexec.append(filename) + + def _copyfile(self, source, dest): + # SVN's copy command pukes if the destination file exists, but + # our copyfile method expects to record a copy that has + # already occurred. Cross the semantic gap. + wdest = self.wjoin(dest) + exists = os.path.lexists(wdest) + if exists: + fd, tempname = tempfile.mkstemp( + prefix='hg-copy-', dir=os.path.dirname(wdest)) + os.close(fd) + os.unlink(tempname) + os.rename(wdest, tempname) + try: + self.run0('copy', source, dest) + finally: + if exists: + try: + os.unlink(wdest) + except OSError: + pass + os.rename(tempname, wdest) + + def dirs_of(self, files): + dirs = set() + for f in files: + if os.path.isdir(self.wjoin(f)): + dirs.add(f) + for i in strutil.rfindall(f, '/'): + dirs.add(f[:i]) + return dirs + + def add_dirs(self, files): + add_dirs = [d for d in sorted(self.dirs_of(files)) + if not os.path.exists(self.wjoin(d, '.svn', 'entries'))] + if add_dirs: + self.xargs(add_dirs, 'add', non_recursive=True, quiet=True) + return add_dirs + + def add_files(self, files): + if files: + self.xargs(files, 'add', quiet=True) + return files + + def tidy_dirs(self, names): + deleted = [] + for d in sorted(self.dirs_of(names), reverse=True): + wd = self.wjoin(d) + if os.listdir(wd) == '.svn': + self.run0('delete', d) + deleted.append(d) + return deleted + + def addchild(self, parent, child): + self.childmap[parent] = child + + def revid(self, rev): + return u"svn:%s@%s" % (self.uuid, rev) + + def putcommit(self, files, copies, parents, commit, source, revmap): + # Apply changes to working copy + for f, v in files: + try: + data, mode = source.getfile(f, v) + except IOError: + self.delete.append(f) + else: + self.putfile(f, mode, data) + if f in copies: + self.copies.append([copies[f], f]) + files = [f[0] for f in files] + + for parent in parents: + try: + return self.revid(self.childmap[parent]) + except KeyError: + pass + entries = set(self.delete) + files = frozenset(files) + entries.update(self.add_dirs(files.difference(entries))) + if self.copies: + for s, d in self.copies: + self._copyfile(s, d) + self.copies = [] + if self.delete: + self.xargs(self.delete, 'delete') + self.delete = [] + entries.update(self.add_files(files.difference(entries))) + entries.update(self.tidy_dirs(entries)) + if self.delexec: + self.xargs(self.delexec, 'propdel', 'svn:executable') + self.delexec = [] + if self.setexec: + self.xargs(self.setexec, 'propset', 'svn:executable', '*') + self.setexec = [] + + fd, messagefile = tempfile.mkstemp(prefix='hg-convert-') + fp = os.fdopen(fd, 'w') + fp.write(commit.desc) + fp.close() + try: + output = self.run0('commit', + username=util.shortuser(commit.author), + file=messagefile, + encoding='utf-8') + try: + rev = self.commit_re.search(output).group(1) + except AttributeError: + if not files: + return parents[0] + self.ui.warn(_('unexpected svn output:\n')) + self.ui.warn(output) + raise util.Abort(_('unable to cope with svn output')) + if commit.rev: + self.run('propset', 'hg:convert-rev', commit.rev, + revprop=True, revision=rev) + if commit.branch and commit.branch != 'default': + self.run('propset', 'hg:convert-branch', commit.branch, + revprop=True, revision=rev) + for parent in parents: + self.addchild(parent, rev) + return self.revid(rev) + finally: + os.unlink(messagefile) + + def puttags(self, tags): + self.ui.warn(_('writing Subversion tags is not yet implemented\n')) + return None, None diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/subversion.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/subversion.pyo Binary files differnew file mode 100644 index 0000000..df675f8 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/subversion.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/transport.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/transport.py new file mode 100644 index 0000000..db68ede --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/transport.py @@ -0,0 +1,128 @@ +# -*- coding: utf-8 -*- + +# Copyright (C) 2007 Daniel Holth <dholth@fastmail.fm> +# This is a stripped-down version of the original bzr-svn transport.py, +# Copyright (C) 2006 Jelmer Vernooij <jelmer@samba.org> + +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. + +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. + +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA + +from svn.core import SubversionException, Pool +import svn.ra +import svn.client +import svn.core + +# Some older versions of the Python bindings need to be +# explicitly initialized. But what we want to do probably +# won't work worth a darn against those libraries anyway! +svn.ra.initialize() + +svn_config = svn.core.svn_config_get_config(None) + + +def _create_auth_baton(pool): + """Create a Subversion authentication baton. """ + import svn.client + # Give the client context baton a suite of authentication + # providers.h + providers = [ + svn.client.get_simple_provider(pool), + svn.client.get_username_provider(pool), + svn.client.get_ssl_client_cert_file_provider(pool), + svn.client.get_ssl_client_cert_pw_file_provider(pool), + svn.client.get_ssl_server_trust_file_provider(pool), + ] + # Platform-dependant authentication methods + getprovider = getattr(svn.core, 'svn_auth_get_platform_specific_provider', + None) + if getprovider: + # Available in svn >= 1.6 + for name in ('gnome_keyring', 'keychain', 'kwallet', 'windows'): + for type in ('simple', 'ssl_client_cert_pw', 'ssl_server_trust'): + p = getprovider(name, type, pool) + if p: + providers.append(p) + else: + if hasattr(svn.client, 'get_windows_simple_provider'): + providers.append(svn.client.get_windows_simple_provider(pool)) + + return svn.core.svn_auth_open(providers, pool) + +class NotBranchError(SubversionException): + pass + +class SvnRaTransport(object): + """ + Open an ra connection to a Subversion repository. + """ + def __init__(self, url="", ra=None): + self.pool = Pool() + self.svn_url = url + self.username = '' + self.password = '' + + # Only Subversion 1.4 has reparent() + if ra is None or not hasattr(svn.ra, 'reparent'): + self.client = svn.client.create_context(self.pool) + ab = _create_auth_baton(self.pool) + if False: + svn.core.svn_auth_set_parameter( + ab, svn.core.SVN_AUTH_PARAM_DEFAULT_USERNAME, self.username) + svn.core.svn_auth_set_parameter( + ab, svn.core.SVN_AUTH_PARAM_DEFAULT_PASSWORD, self.password) + self.client.auth_baton = ab + self.client.config = svn_config + try: + self.ra = svn.client.open_ra_session( + self.svn_url.encode('utf8'), + self.client, self.pool) + except SubversionException, (inst, num): + if num in (svn.core.SVN_ERR_RA_ILLEGAL_URL, + svn.core.SVN_ERR_RA_LOCAL_REPOS_OPEN_FAILED, + svn.core.SVN_ERR_BAD_URL): + raise NotBranchError(url) + raise + else: + self.ra = ra + svn.ra.reparent(self.ra, self.svn_url.encode('utf8')) + + class Reporter(object): + def __init__(self, reporter_data): + self._reporter, self._baton = reporter_data + + def set_path(self, path, revnum, start_empty, lock_token, pool=None): + svn.ra.reporter2_invoke_set_path(self._reporter, self._baton, + path, revnum, start_empty, lock_token, pool) + + def delete_path(self, path, pool=None): + svn.ra.reporter2_invoke_delete_path(self._reporter, self._baton, + path, pool) + + def link_path(self, path, url, revision, start_empty, lock_token, + pool=None): + svn.ra.reporter2_invoke_link_path(self._reporter, self._baton, + path, url, revision, start_empty, lock_token, + pool) + + def finish_report(self, pool=None): + svn.ra.reporter2_invoke_finish_report(self._reporter, + self._baton, pool) + + def abort_report(self, pool=None): + svn.ra.reporter2_invoke_abort_report(self._reporter, + self._baton, pool) + + def do_update(self, revnum, path, *args, **kwargs): + return self.Reporter(svn.ra.do_update(self.ra, revnum, path, + *args, **kwargs)) diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/transport.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/transport.pyo Binary files differnew file mode 100644 index 0000000..ee1d3d1 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/transport.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/eol.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/eol.py new file mode 100644 index 0000000..88294a7 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/eol.py @@ -0,0 +1,272 @@ +"""automatically manage newlines in repository files + +This extension allows you to manage the type of line endings (CRLF or +LF) that are used in the repository and in the local working +directory. That way you can get CRLF line endings on Windows and LF on +Unix/Mac, thereby letting everybody use their OS native line endings. + +The extension reads its configuration from a versioned ``.hgeol`` +configuration file every time you run an ``hg`` command. The +``.hgeol`` file use the same syntax as all other Mercurial +configuration files. It uses two sections, ``[patterns]`` and +``[repository]``. + +The ``[patterns]`` section specifies how line endings should be +converted between the working copy and the repository. The format is +specified by a file pattern. The first match is used, so put more +specific patterns first. The available line endings are ``LF``, +``CRLF``, and ``BIN``. + +Files with the declared format of ``CRLF`` or ``LF`` are always +checked out and stored in the repository in that format and files +declared to be binary (``BIN``) are left unchanged. Additionally, +``native`` is an alias for checking out in the platform's default line +ending: ``LF`` on Unix (including Mac OS X) and ``CRLF`` on +Windows. Note that ``BIN`` (do nothing to line endings) is Mercurial's +default behaviour; it is only needed if you need to override a later, +more general pattern. + +The optional ``[repository]`` section specifies the line endings to +use for files stored in the repository. It has a single setting, +``native``, which determines the storage line endings for files +declared as ``native`` in the ``[patterns]`` section. It can be set to +``LF`` or ``CRLF``. The default is ``LF``. For example, this means +that on Windows, files configured as ``native`` (``CRLF`` by default) +will be converted to ``LF`` when stored in the repository. Files +declared as ``LF``, ``CRLF``, or ``BIN`` in the ``[patterns]`` section +are always stored as-is in the repository. + +Example versioned ``.hgeol`` file:: + + [patterns] + **.py = native + **.vcproj = CRLF + **.txt = native + Makefile = LF + **.jpg = BIN + + [repository] + native = LF + +.. note:: + The rules will first apply when files are touched in the working + copy, e.g. by updating to null and back to tip to touch all files. + +The extension uses an optional ``[eol]`` section in your hgrc file +(not the ``.hgeol`` file) for settings that control the overall +behavior. There are two settings: + +- ``eol.native`` (default ``os.linesep``) can be set to ``LF`` or + ``CRLF`` to override the default interpretation of ``native`` for + checkout. This can be used with :hg:`archive` on Unix, say, to + generate an archive where files have line endings for Windows. + +- ``eol.only-consistent`` (default True) can be set to False to make + the extension convert files with inconsistent EOLs. Inconsistent + means that there is both ``CRLF`` and ``LF`` present in the file. + Such files are normally not touched under the assumption that they + have mixed EOLs on purpose. + +The ``win32text.forbid*`` hooks provided by the win32text extension +have been unified into a single hook named ``eol.hook``. The hook will +lookup the expected line endings from the ``.hgeol`` file, which means +you must migrate to a ``.hgeol`` file first before using the hook. + +See :hg:`help patterns` for more information about the glob patterns +used. +""" + +from mercurial.i18n import _ +from mercurial import util, config, extensions, match +import re, os + +# Matches a lone LF, i.e., one that is not part of CRLF. +singlelf = re.compile('(^|[^\r])\n') +# Matches a single EOL which can either be a CRLF where repeated CR +# are removed or a LF. We do not care about old Machintosh files, so a +# stray CR is an error. +eolre = re.compile('\r*\n') + + +def inconsistenteol(data): + return '\r\n' in data and singlelf.search(data) + +def tolf(s, params, ui, **kwargs): + """Filter to convert to LF EOLs.""" + if util.binary(s): + return s + if ui.configbool('eol', 'only-consistent', True) and inconsistenteol(s): + return s + return eolre.sub('\n', s) + +def tocrlf(s, params, ui, **kwargs): + """Filter to convert to CRLF EOLs.""" + if util.binary(s): + return s + if ui.configbool('eol', 'only-consistent', True) and inconsistenteol(s): + return s + return eolre.sub('\r\n', s) + +def isbinary(s, params): + """Filter to do nothing with the file.""" + return s + +filters = { + 'to-lf': tolf, + 'to-crlf': tocrlf, + 'is-binary': isbinary, +} + + +def hook(ui, repo, node, hooktype, **kwargs): + """verify that files have expected EOLs""" + files = set() + for rev in xrange(repo[node].rev(), len(repo)): + files.update(repo[rev].files()) + tip = repo['tip'] + for f in files: + if f not in tip: + continue + for pattern, target in ui.configitems('encode'): + if match.match(repo.root, '', [pattern])(f): + data = tip[f].data() + if target == "to-lf" and "\r\n" in data: + raise util.Abort(_("%s should not have CRLF line endings") + % f) + elif target == "to-crlf" and singlelf.search(data): + raise util.Abort(_("%s should not have LF line endings") + % f) + + +def preupdate(ui, repo, hooktype, parent1, parent2): + #print "preupdate for %s: %s -> %s" % (repo.root, parent1, parent2) + repo.readhgeol(parent1) + return False + +def uisetup(ui): + ui.setconfig('hooks', 'preupdate.eol', preupdate) + +def extsetup(ui): + try: + extensions.find('win32text') + raise util.Abort(_("the eol extension is incompatible with the " + "win32text extension")) + except KeyError: + pass + + +def reposetup(ui, repo): + uisetup(repo.ui) + #print "reposetup for", repo.root + + if not repo.local(): + return + for name, fn in filters.iteritems(): + repo.adddatafilter(name, fn) + + ui.setconfig('patch', 'eol', 'auto') + + class eolrepo(repo.__class__): + + _decode = {'LF': 'to-lf', 'CRLF': 'to-crlf', 'BIN': 'is-binary'} + _encode = {'LF': 'to-lf', 'CRLF': 'to-crlf', 'BIN': 'is-binary'} + + def readhgeol(self, node=None, data=None): + if data is None: + try: + if node is None: + data = self.wfile('.hgeol').read() + else: + data = self[node]['.hgeol'].data() + except (IOError, LookupError): + return None + + if self.ui.config('eol', 'native', os.linesep) in ('LF', '\n'): + self._decode['NATIVE'] = 'to-lf' + else: + self._decode['NATIVE'] = 'to-crlf' + + eol = config.config() + # Our files should not be touched. The pattern must be + # inserted first override a '** = native' pattern. + eol.set('patterns', '.hg*', 'BIN') + # We can then parse the user's patterns. + eol.parse('.hgeol', data) + + if eol.get('repository', 'native') == 'CRLF': + self._encode['NATIVE'] = 'to-crlf' + else: + self._encode['NATIVE'] = 'to-lf' + + for pattern, style in eol.items('patterns'): + key = style.upper() + try: + self.ui.setconfig('decode', pattern, self._decode[key]) + self.ui.setconfig('encode', pattern, self._encode[key]) + except KeyError: + self.ui.warn(_("ignoring unknown EOL style '%s' from %s\n") + % (style, eol.source('patterns', pattern))) + + include = [] + exclude = [] + for pattern, style in eol.items('patterns'): + key = style.upper() + if key == 'BIN': + exclude.append(pattern) + else: + include.append(pattern) + + # This will match the files for which we need to care + # about inconsistent newlines. + return match.match(self.root, '', [], include, exclude) + + def _hgcleardirstate(self): + self._eolfile = self.readhgeol() or self.readhgeol('tip') + + if not self._eolfile: + self._eolfile = util.never + return + + try: + cachemtime = os.path.getmtime(self.join("eol.cache")) + except OSError: + cachemtime = 0 + + try: + eolmtime = os.path.getmtime(self.wjoin(".hgeol")) + except OSError: + eolmtime = 0 + + if eolmtime > cachemtime: + ui.debug("eol: detected change in .hgeol\n") + # TODO: we could introduce a method for this in dirstate. + wlock = None + try: + wlock = self.wlock() + for f, e in self.dirstate._map.iteritems(): + self.dirstate._map[f] = (e[0], e[1], -1, 0) + self.dirstate._dirty = True + # Touch the cache to update mtime. TODO: are we sure this + # always enought to update the mtime, or should we write a + # bit to the file? + self.opener("eol.cache", "w").close() + finally: + if wlock is not None: + wlock.release() + + def commitctx(self, ctx, error=False): + for f in sorted(ctx.added() + ctx.modified()): + if not self._eolfile(f): + continue + data = ctx[f].data() + if util.binary(data): + # We should not abort here, since the user should + # be able to say "** = native" to automatically + # have all non-binary files taken care of. + continue + if inconsistenteol(data): + raise util.Abort(_("inconsistent newline style " + "in %s\n" % f)) + return super(eolrepo, self).commitctx(ctx, error) + repo.__class__ = eolrepo + repo._hgcleardirstate() diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/eol.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/eol.pyo Binary files differnew file mode 100644 index 0000000..cd15b4f --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/eol.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/extdiff.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/extdiff.py new file mode 100644 index 0000000..5cf9f03 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/extdiff.py @@ -0,0 +1,325 @@ +# extdiff.py - external diff program support for mercurial +# +# Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com> +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +'''command to allow external programs to compare revisions + +The extdiff Mercurial extension allows you to use external programs +to compare revisions, or revision with working directory. The external +diff programs are called with a configurable set of options and two +non-option arguments: paths to directories containing snapshots of +files to compare. + +The extdiff extension also allows to configure new diff commands, so +you do not need to type :hg:`extdiff -p kdiff3` always. :: + + [extdiff] + # add new command that runs GNU diff(1) in 'context diff' mode + cdiff = gdiff -Nprc5 + ## or the old way: + #cmd.cdiff = gdiff + #opts.cdiff = -Nprc5 + + # add new command called vdiff, runs kdiff3 + vdiff = kdiff3 + + # add new command called meld, runs meld (no need to name twice) + meld = + + # add new command called vimdiff, runs gvimdiff with DirDiff plugin + # (see http://www.vim.org/scripts/script.php?script_id=102) Non + # English user, be sure to put "let g:DirDiffDynamicDiffText = 1" in + # your .vimrc + vimdiff = gvim -f '+next' '+execute "DirDiff" argv(0) argv(1)' + +Tool arguments can include variables that are expanded at runtime:: + + $parent1, $plabel1 - filename, descriptive label of first parent + $child, $clabel - filename, descriptive label of child revision + $parent2, $plabel2 - filename, descriptive label of second parent + $parent is an alias for $parent1. + +The extdiff extension will look in your [diff-tools] and [merge-tools] +sections for diff tool arguments, when none are specified in [extdiff]. + +:: + + [extdiff] + kdiff3 = + + [diff-tools] + kdiff3.diffargs=--L1 '$plabel1' --L2 '$clabel' $parent $child + +You can use -I/-X and list of file or directory names like normal +:hg:`diff` command. The extdiff extension makes snapshots of only +needed files, so running the external diff program will actually be +pretty fast (at least faster than having to compare the entire tree). +''' + +from mercurial.i18n import _ +from mercurial.node import short, nullid +from mercurial import cmdutil, util, commands, encoding +import os, shlex, shutil, tempfile, re + +def snapshot(ui, repo, files, node, tmproot): + '''snapshot files as of some revision + if not using snapshot, -I/-X does not work and recursive diff + in tools like kdiff3 and meld displays too many files.''' + dirname = os.path.basename(repo.root) + if dirname == "": + dirname = "root" + if node is not None: + dirname = '%s.%s' % (dirname, short(node)) + base = os.path.join(tmproot, dirname) + os.mkdir(base) + if node is not None: + ui.note(_('making snapshot of %d files from rev %s\n') % + (len(files), short(node))) + else: + ui.note(_('making snapshot of %d files from working directory\n') % + (len(files))) + wopener = util.opener(base) + fns_and_mtime = [] + ctx = repo[node] + for fn in files: + wfn = util.pconvert(fn) + if not wfn in ctx: + # File doesn't exist; could be a bogus modify + continue + ui.note(' %s\n' % wfn) + dest = os.path.join(base, wfn) + fctx = ctx[wfn] + data = repo.wwritedata(wfn, fctx.data()) + if 'l' in fctx.flags(): + wopener.symlink(data, wfn) + else: + wopener(wfn, 'w').write(data) + if 'x' in fctx.flags(): + util.set_flags(dest, False, True) + if node is None: + fns_and_mtime.append((dest, repo.wjoin(fn), os.path.getmtime(dest))) + return dirname, fns_and_mtime + +def dodiff(ui, repo, diffcmd, diffopts, pats, opts): + '''Do the actuall diff: + + - copy to a temp structure if diffing 2 internal revisions + - copy to a temp structure if diffing working revision with + another one and more than 1 file is changed + - just invoke the diff for a single file in the working dir + ''' + + revs = opts.get('rev') + change = opts.get('change') + args = ' '.join(diffopts) + do3way = '$parent2' in args + + if revs and change: + msg = _('cannot specify --rev and --change at the same time') + raise util.Abort(msg) + elif change: + node2 = repo.lookup(change) + node1a, node1b = repo.changelog.parents(node2) + else: + node1a, node2 = cmdutil.revpair(repo, revs) + if not revs: + node1b = repo.dirstate.parents()[1] + else: + node1b = nullid + + # Disable 3-way merge if there is only one parent + if do3way: + if node1b == nullid: + do3way = False + + matcher = cmdutil.match(repo, pats, opts) + mod_a, add_a, rem_a = map(set, repo.status(node1a, node2, matcher)[:3]) + if do3way: + mod_b, add_b, rem_b = map(set, repo.status(node1b, node2, matcher)[:3]) + else: + mod_b, add_b, rem_b = set(), set(), set() + modadd = mod_a | add_a | mod_b | add_b + common = modadd | rem_a | rem_b + if not common: + return 0 + + tmproot = tempfile.mkdtemp(prefix='extdiff.') + try: + # Always make a copy of node1a (and node1b, if applicable) + dir1a_files = mod_a | rem_a | ((mod_b | add_b) - add_a) + dir1a = snapshot(ui, repo, dir1a_files, node1a, tmproot)[0] + rev1a = '@%d' % repo[node1a].rev() + if do3way: + dir1b_files = mod_b | rem_b | ((mod_a | add_a) - add_b) + dir1b = snapshot(ui, repo, dir1b_files, node1b, tmproot)[0] + rev1b = '@%d' % repo[node1b].rev() + else: + dir1b = None + rev1b = '' + + fns_and_mtime = [] + + # If node2 in not the wc or there is >1 change, copy it + dir2root = '' + rev2 = '' + if node2: + dir2 = snapshot(ui, repo, modadd, node2, tmproot)[0] + rev2 = '@%d' % repo[node2].rev() + elif len(common) > 1: + #we only actually need to get the files to copy back to + #the working dir in this case (because the other cases + #are: diffing 2 revisions or single file -- in which case + #the file is already directly passed to the diff tool). + dir2, fns_and_mtime = snapshot(ui, repo, modadd, None, tmproot) + else: + # This lets the diff tool open the changed file directly + dir2 = '' + dir2root = repo.root + + label1a = rev1a + label1b = rev1b + label2 = rev2 + + # If only one change, diff the files instead of the directories + # Handle bogus modifies correctly by checking if the files exist + if len(common) == 1: + common_file = util.localpath(common.pop()) + dir1a = os.path.join(dir1a, common_file) + label1a = common_file + rev1a + if not os.path.isfile(os.path.join(tmproot, dir1a)): + dir1a = os.devnull + if do3way: + dir1b = os.path.join(dir1b, common_file) + label1b = common_file + rev1b + if not os.path.isfile(os.path.join(tmproot, dir1b)): + dir1b = os.devnull + dir2 = os.path.join(dir2root, dir2, common_file) + label2 = common_file + rev2 + + # Function to quote file/dir names in the argument string. + # When not operating in 3-way mode, an empty string is + # returned for parent2 + replace = dict(parent=dir1a, parent1=dir1a, parent2=dir1b, + plabel1=label1a, plabel2=label1b, + clabel=label2, child=dir2) + def quote(match): + key = match.group()[1:] + if not do3way and key == 'parent2': + return '' + return util.shellquote(replace[key]) + + # Match parent2 first, so 'parent1?' will match both parent1 and parent + regex = '\$(parent2|parent1?|child|plabel1|plabel2|clabel)' + if not do3way and not re.search(regex, args): + args += ' $parent1 $child' + args = re.sub(regex, quote, args) + cmdline = util.shellquote(diffcmd) + ' ' + args + + ui.debug('running %r in %s\n' % (cmdline, tmproot)) + util.system(cmdline, cwd=tmproot) + + for copy_fn, working_fn, mtime in fns_and_mtime: + if os.path.getmtime(copy_fn) != mtime: + ui.debug('file changed while diffing. ' + 'Overwriting: %s (src: %s)\n' % (working_fn, copy_fn)) + util.copyfile(copy_fn, working_fn) + + return 1 + finally: + ui.note(_('cleaning up temp directory\n')) + shutil.rmtree(tmproot) + +def extdiff(ui, repo, *pats, **opts): + '''use external program to diff repository (or selected files) + + Show differences between revisions for the specified files, using + an external program. The default program used is diff, with + default options "-Npru". + + To select a different program, use the -p/--program option. The + program will be passed the names of two directories to compare. To + pass additional options to the program, use -o/--option. These + will be passed before the names of the directories to compare. + + When two revision arguments are given, then changes are shown + between those revisions. If only one revision is specified then + that revision is compared to the working directory, and, when no + revisions are specified, the working directory files are compared + to its parent.''' + program = opts.get('program') + option = opts.get('option') + if not program: + program = 'diff' + option = option or ['-Npru'] + return dodiff(ui, repo, program, option, pats, opts) + +cmdtable = { + "extdiff": + (extdiff, + [('p', 'program', '', + _('comparison program to run'), _('CMD')), + ('o', 'option', [], + _('pass option to comparison program'), _('OPT')), + ('r', 'rev', [], + _('revision'), _('REV')), + ('c', 'change', '', + _('change made by revision'), _('REV')), + ] + commands.walkopts, + _('hg extdiff [OPT]... [FILE]...')), + } + +def uisetup(ui): + for cmd, path in ui.configitems('extdiff'): + if cmd.startswith('cmd.'): + cmd = cmd[4:] + if not path: + path = cmd + diffopts = ui.config('extdiff', 'opts.' + cmd, '') + diffopts = diffopts and [diffopts] or [] + elif cmd.startswith('opts.'): + continue + else: + # command = path opts + if path: + diffopts = shlex.split(path) + path = diffopts.pop(0) + else: + path, diffopts = cmd, [] + # look for diff arguments in [diff-tools] then [merge-tools] + if diffopts == []: + args = ui.config('diff-tools', cmd+'.diffargs') or \ + ui.config('merge-tools', cmd+'.diffargs') + if args: + diffopts = shlex.split(args) + def save(cmd, path, diffopts): + '''use closure to save diff command to use''' + def mydiff(ui, repo, *pats, **opts): + return dodiff(ui, repo, path, diffopts + opts['option'], + pats, opts) + doc = _('''\ +use %(path)s to diff repository (or selected files) + + Show differences between revisions for the specified files, using + the %(path)s program. + + When two revision arguments are given, then changes are shown + between those revisions. If only one revision is specified then + that revision is compared to the working directory, and, when no + revisions are specified, the working directory files are compared + to its parent.\ +''') % dict(path=util.uirepr(path)) + + # We must translate the docstring right away since it is + # used as a format string. The string will unfortunately + # be translated again in commands.helpcmd and this will + # fail when the docstring contains non-ASCII characters. + # Decoding the string to a Unicode string here (using the + # right encoding) prevents that. + mydiff.__doc__ = doc.decode(encoding.encoding) + return mydiff + cmdtable[cmd] = (save(cmd, path, diffopts), + cmdtable['extdiff'][1][1:], + _('hg %s [OPTION]... [FILE]...') % cmd) diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/extdiff.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/extdiff.pyo Binary files differnew file mode 100644 index 0000000..008d690 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/extdiff.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/fetch.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/fetch.py new file mode 100644 index 0000000..b8e765f --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/fetch.py @@ -0,0 +1,152 @@ +# fetch.py - pull and merge remote changes +# +# Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com> +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +'''pull, update and merge in one command''' + +from mercurial.i18n import _ +from mercurial.node import nullid, short +from mercurial import commands, cmdutil, hg, util, url, error +from mercurial.lock import release + +def fetch(ui, repo, source='default', **opts): + '''pull changes from a remote repository, merge new changes if needed. + + This finds all changes from the repository at the specified path + or URL and adds them to the local repository. + + If the pulled changes add a new branch head, the head is + automatically merged, and the result of the merge is committed. + Otherwise, the working directory is updated to include the new + changes. + + When a merge occurs, the newly pulled changes are assumed to be + "authoritative". The head of the new changes is used as the first + parent, with local changes as the second. To switch the merge + order, use --switch-parent. + + See :hg:`help dates` for a list of formats valid for -d/--date. + + Returns 0 on success. + ''' + + date = opts.get('date') + if date: + opts['date'] = util.parsedate(date) + + parent, p2 = repo.dirstate.parents() + branch = repo.dirstate.branch() + branchnode = repo.branchtags().get(branch) + if parent != branchnode: + raise util.Abort(_('working dir not at branch tip ' + '(use "hg update" to check out branch tip)')) + + if p2 != nullid: + raise util.Abort(_('outstanding uncommitted merge')) + + wlock = lock = None + try: + wlock = repo.wlock() + lock = repo.lock() + mod, add, rem, del_ = repo.status()[:4] + + if mod or add or rem: + raise util.Abort(_('outstanding uncommitted changes')) + if del_: + raise util.Abort(_('working directory is missing some files')) + bheads = repo.branchheads(branch) + bheads = [head for head in bheads if len(repo[head].children()) == 0] + if len(bheads) > 1: + raise util.Abort(_('multiple heads in this branch ' + '(use "hg heads ." and "hg merge" to merge)')) + + other = hg.repository(hg.remoteui(repo, opts), + ui.expandpath(source)) + ui.status(_('pulling from %s\n') % + url.hidepassword(ui.expandpath(source))) + revs = None + if opts['rev']: + try: + revs = [other.lookup(rev) for rev in opts['rev']] + except error.CapabilityError: + err = _("Other repository doesn't support revision lookup, " + "so a rev cannot be specified.") + raise util.Abort(err) + + # Are there any changes at all? + modheads = repo.pull(other, heads=revs) + if modheads == 0: + return 0 + + # Is this a simple fast-forward along the current branch? + newheads = repo.branchheads(branch) + newchildren = repo.changelog.nodesbetween([parent], newheads)[2] + if len(newheads) == 1: + if newchildren[0] != parent: + return hg.clean(repo, newchildren[0]) + else: + return 0 + + # Are there more than one additional branch heads? + newchildren = [n for n in newchildren if n != parent] + newparent = parent + if newchildren: + newparent = newchildren[0] + hg.clean(repo, newparent) + newheads = [n for n in newheads if n != newparent] + if len(newheads) > 1: + ui.status(_('not merging with %d other new branch heads ' + '(use "hg heads ." and "hg merge" to merge them)\n') % + (len(newheads) - 1)) + return 1 + + # Otherwise, let's merge. + err = False + if newheads: + # By default, we consider the repository we're pulling + # *from* as authoritative, so we merge our changes into + # theirs. + if opts['switch_parent']: + firstparent, secondparent = newparent, newheads[0] + else: + firstparent, secondparent = newheads[0], newparent + ui.status(_('updating to %d:%s\n') % + (repo.changelog.rev(firstparent), + short(firstparent))) + hg.clean(repo, firstparent) + ui.status(_('merging with %d:%s\n') % + (repo.changelog.rev(secondparent), short(secondparent))) + err = hg.merge(repo, secondparent, remind=False) + + if not err: + # we don't translate commit messages + message = (cmdutil.logmessage(opts) or + ('Automated merge with %s' % + url.removeauth(other.url()))) + editor = cmdutil.commiteditor + if opts.get('force_editor') or opts.get('edit'): + editor = cmdutil.commitforceeditor + n = repo.commit(message, opts['user'], opts['date'], editor=editor) + ui.status(_('new changeset %d:%s merges remote changes ' + 'with local\n') % (repo.changelog.rev(n), + short(n))) + + return err + + finally: + release(lock, wlock) + +cmdtable = { + 'fetch': + (fetch, + [('r', 'rev', [], + _('a specific revision you would like to pull'), _('REV')), + ('e', 'edit', None, _('edit commit message')), + ('', 'force-editor', None, _('edit commit message (DEPRECATED)')), + ('', 'switch-parent', None, _('switch parents when merging')), + ] + commands.commitopts + commands.commitopts2 + commands.remoteopts, + _('hg fetch [SOURCE]')), +} diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/fetch.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/fetch.pyo Binary files differnew file mode 100644 index 0000000..c8529ef --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/fetch.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/gpg.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/gpg.py new file mode 100644 index 0000000..b13ec1e --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/gpg.py @@ -0,0 +1,288 @@ +# Copyright 2005, 2006 Benoit Boissinot <benoit.boissinot@ens-lyon.org> +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +'''commands to sign and verify changesets''' + +import os, tempfile, binascii +from mercurial import util, commands, match +from mercurial import node as hgnode +from mercurial.i18n import _ + +class gpg(object): + def __init__(self, path, key=None): + self.path = path + self.key = (key and " --local-user \"%s\"" % key) or "" + + def sign(self, data): + gpgcmd = "%s --sign --detach-sign%s" % (self.path, self.key) + return util.filter(data, gpgcmd) + + def verify(self, data, sig): + """ returns of the good and bad signatures""" + sigfile = datafile = None + try: + # create temporary files + fd, sigfile = tempfile.mkstemp(prefix="hg-gpg-", suffix=".sig") + fp = os.fdopen(fd, 'wb') + fp.write(sig) + fp.close() + fd, datafile = tempfile.mkstemp(prefix="hg-gpg-", suffix=".txt") + fp = os.fdopen(fd, 'wb') + fp.write(data) + fp.close() + gpgcmd = ("%s --logger-fd 1 --status-fd 1 --verify " + "\"%s\" \"%s\"" % (self.path, sigfile, datafile)) + ret = util.filter("", gpgcmd) + finally: + for f in (sigfile, datafile): + try: + if f: + os.unlink(f) + except: + pass + keys = [] + key, fingerprint = None, None + err = "" + for l in ret.splitlines(): + # see DETAILS in the gnupg documentation + # filter the logger output + if not l.startswith("[GNUPG:]"): + continue + l = l[9:] + if l.startswith("ERRSIG"): + err = _("error while verifying signature") + break + elif l.startswith("VALIDSIG"): + # fingerprint of the primary key + fingerprint = l.split()[10] + elif (l.startswith("GOODSIG") or + l.startswith("EXPSIG") or + l.startswith("EXPKEYSIG") or + l.startswith("BADSIG")): + if key is not None: + keys.append(key + [fingerprint]) + key = l.split(" ", 2) + fingerprint = None + if err: + return err, [] + if key is not None: + keys.append(key + [fingerprint]) + return err, keys + +def newgpg(ui, **opts): + """create a new gpg instance""" + gpgpath = ui.config("gpg", "cmd", "gpg") + gpgkey = opts.get('key') + if not gpgkey: + gpgkey = ui.config("gpg", "key", None) + return gpg(gpgpath, gpgkey) + +def sigwalk(repo): + """ + walk over every sigs, yields a couple + ((node, version, sig), (filename, linenumber)) + """ + def parsefile(fileiter, context): + ln = 1 + for l in fileiter: + if not l: + continue + yield (l.split(" ", 2), (context, ln)) + ln += 1 + + # read the heads + fl = repo.file(".hgsigs") + for r in reversed(fl.heads()): + fn = ".hgsigs|%s" % hgnode.short(r) + for item in parsefile(fl.read(r).splitlines(), fn): + yield item + try: + # read local signatures + fn = "localsigs" + for item in parsefile(repo.opener(fn), fn): + yield item + except IOError: + pass + +def getkeys(ui, repo, mygpg, sigdata, context): + """get the keys who signed a data""" + fn, ln = context + node, version, sig = sigdata + prefix = "%s:%d" % (fn, ln) + node = hgnode.bin(node) + + data = node2txt(repo, node, version) + sig = binascii.a2b_base64(sig) + err, keys = mygpg.verify(data, sig) + if err: + ui.warn("%s:%d %s\n" % (fn, ln , err)) + return None + + validkeys = [] + # warn for expired key and/or sigs + for key in keys: + if key[0] == "BADSIG": + ui.write(_("%s Bad signature from \"%s\"\n") % (prefix, key[2])) + continue + if key[0] == "EXPSIG": + ui.write(_("%s Note: Signature has expired" + " (signed by: \"%s\")\n") % (prefix, key[2])) + elif key[0] == "EXPKEYSIG": + ui.write(_("%s Note: This key has expired" + " (signed by: \"%s\")\n") % (prefix, key[2])) + validkeys.append((key[1], key[2], key[3])) + return validkeys + +def sigs(ui, repo): + """list signed changesets""" + mygpg = newgpg(ui) + revs = {} + + for data, context in sigwalk(repo): + node, version, sig = data + fn, ln = context + try: + n = repo.lookup(node) + except KeyError: + ui.warn(_("%s:%d node does not exist\n") % (fn, ln)) + continue + r = repo.changelog.rev(n) + keys = getkeys(ui, repo, mygpg, data, context) + if not keys: + continue + revs.setdefault(r, []) + revs[r].extend(keys) + for rev in sorted(revs, reverse=True): + for k in revs[rev]: + r = "%5d:%s" % (rev, hgnode.hex(repo.changelog.node(rev))) + ui.write("%-30s %s\n" % (keystr(ui, k), r)) + +def check(ui, repo, rev): + """verify all the signatures there may be for a particular revision""" + mygpg = newgpg(ui) + rev = repo.lookup(rev) + hexrev = hgnode.hex(rev) + keys = [] + + for data, context in sigwalk(repo): + node, version, sig = data + if node == hexrev: + k = getkeys(ui, repo, mygpg, data, context) + if k: + keys.extend(k) + + if not keys: + ui.write(_("No valid signature for %s\n") % hgnode.short(rev)) + return + + # print summary + ui.write("%s is signed by:\n" % hgnode.short(rev)) + for key in keys: + ui.write(" %s\n" % keystr(ui, key)) + +def keystr(ui, key): + """associate a string to a key (username, comment)""" + keyid, user, fingerprint = key + comment = ui.config("gpg", fingerprint, None) + if comment: + return "%s (%s)" % (user, comment) + else: + return user + +def sign(ui, repo, *revs, **opts): + """add a signature for the current or given revision + + If no revision is given, the parent of the working directory is used, + or tip if no revision is checked out. + + See :hg:`help dates` for a list of formats valid for -d/--date. + """ + + mygpg = newgpg(ui, **opts) + sigver = "0" + sigmessage = "" + + date = opts.get('date') + if date: + opts['date'] = util.parsedate(date) + + if revs: + nodes = [repo.lookup(n) for n in revs] + else: + nodes = [node for node in repo.dirstate.parents() + if node != hgnode.nullid] + if len(nodes) > 1: + raise util.Abort(_('uncommitted merge - please provide a ' + 'specific revision')) + if not nodes: + nodes = [repo.changelog.tip()] + + for n in nodes: + hexnode = hgnode.hex(n) + ui.write(_("Signing %d:%s\n") % (repo.changelog.rev(n), + hgnode.short(n))) + # build data + data = node2txt(repo, n, sigver) + sig = mygpg.sign(data) + if not sig: + raise util.Abort(_("error while signing")) + sig = binascii.b2a_base64(sig) + sig = sig.replace("\n", "") + sigmessage += "%s %s %s\n" % (hexnode, sigver, sig) + + # write it + if opts['local']: + repo.opener("localsigs", "ab").write(sigmessage) + return + + msigs = match.exact(repo.root, '', ['.hgsigs']) + s = repo.status(match=msigs, unknown=True, ignored=True)[:6] + if util.any(s) and not opts["force"]: + raise util.Abort(_("working copy of .hgsigs is changed " + "(please commit .hgsigs manually " + "or use --force)")) + + repo.wfile(".hgsigs", "ab").write(sigmessage) + + if '.hgsigs' not in repo.dirstate: + repo[None].add([".hgsigs"]) + + if opts["no_commit"]: + return + + message = opts['message'] + if not message: + # we don't translate commit messages + message = "\n".join(["Added signature for changeset %s" + % hgnode.short(n) + for n in nodes]) + try: + repo.commit(message, opts['user'], opts['date'], match=msigs) + except ValueError, inst: + raise util.Abort(str(inst)) + +def node2txt(repo, node, ver): + """map a manifest into some text""" + if ver == "0": + return "%s\n" % hgnode.hex(node) + else: + raise util.Abort(_("unknown signature version")) + +cmdtable = { + "sign": + (sign, + [('l', 'local', None, _('make the signature local')), + ('f', 'force', None, _('sign even if the sigfile is modified')), + ('', 'no-commit', None, _('do not commit the sigfile after signing')), + ('k', 'key', '', + _('the key id to sign with'), _('ID')), + ('m', 'message', '', + _('commit message'), _('TEXT')), + ] + commands.commitopts2, + _('hg sign [OPTION]... [REVISION]...')), + "sigcheck": (check, [], _('hg sigcheck REVISION')), + "sigs": (sigs, [], _('hg sigs')), +} + diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/gpg.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/gpg.pyo Binary files differnew file mode 100644 index 0000000..3d5d415 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/gpg.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/graphlog.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/graphlog.py new file mode 100644 index 0000000..a8eb805 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/graphlog.py @@ -0,0 +1,337 @@ +# ASCII graph log extension for Mercurial +# +# Copyright 2007 Joel Rosdahl <joel@rosdahl.net> +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +'''command to view revision graphs from a shell + +This extension adds a --graph option to the incoming, outgoing and log +commands. When this options is given, an ASCII representation of the +revision graph is also shown. +''' + +import os +from mercurial.cmdutil import revrange, show_changeset +from mercurial.commands import templateopts +from mercurial.i18n import _ +from mercurial.node import nullrev +from mercurial import cmdutil, commands, extensions +from mercurial import hg, util, graphmod + +ASCIIDATA = 'ASC' + +def asciiedges(seen, rev, parents): + """adds edge info to changelog DAG walk suitable for ascii()""" + if rev not in seen: + seen.append(rev) + nodeidx = seen.index(rev) + + knownparents = [] + newparents = [] + for parent in parents: + if parent in seen: + knownparents.append(parent) + else: + newparents.append(parent) + + ncols = len(seen) + seen[nodeidx:nodeidx + 1] = newparents + edges = [(nodeidx, seen.index(p)) for p in knownparents] + + if len(newparents) > 0: + edges.append((nodeidx, nodeidx)) + if len(newparents) > 1: + edges.append((nodeidx, nodeidx + 1)) + + nmorecols = len(seen) - ncols + return nodeidx, edges, ncols, nmorecols + +def fix_long_right_edges(edges): + for (i, (start, end)) in enumerate(edges): + if end > start: + edges[i] = (start, end + 1) + +def get_nodeline_edges_tail( + node_index, p_node_index, n_columns, n_columns_diff, p_diff, fix_tail): + if fix_tail and n_columns_diff == p_diff and n_columns_diff != 0: + # Still going in the same non-vertical direction. + if n_columns_diff == -1: + start = max(node_index + 1, p_node_index) + tail = ["|", " "] * (start - node_index - 1) + tail.extend(["/", " "] * (n_columns - start)) + return tail + else: + return ["\\", " "] * (n_columns - node_index - 1) + else: + return ["|", " "] * (n_columns - node_index - 1) + +def draw_edges(edges, nodeline, interline): + for (start, end) in edges: + if start == end + 1: + interline[2 * end + 1] = "/" + elif start == end - 1: + interline[2 * start + 1] = "\\" + elif start == end: + interline[2 * start] = "|" + else: + nodeline[2 * end] = "+" + if start > end: + (start, end) = (end, start) + for i in range(2 * start + 1, 2 * end): + if nodeline[i] != "+": + nodeline[i] = "-" + +def get_padding_line(ni, n_columns, edges): + line = [] + line.extend(["|", " "] * ni) + if (ni, ni - 1) in edges or (ni, ni) in edges: + # (ni, ni - 1) (ni, ni) + # | | | | | | | | + # +---o | | o---+ + # | | c | | c | | + # | |/ / | |/ / + # | | | | | | + c = "|" + else: + c = " " + line.extend([c, " "]) + line.extend(["|", " "] * (n_columns - ni - 1)) + return line + +def asciistate(): + """returns the initial value for the "state" argument to ascii()""" + return [0, 0] + +def ascii(ui, state, type, char, text, coldata): + """prints an ASCII graph of the DAG + + takes the following arguments (one call per node in the graph): + + - ui to write to + - Somewhere to keep the needed state in (init to asciistate()) + - Column of the current node in the set of ongoing edges. + - Type indicator of node data == ASCIIDATA. + - Payload: (char, lines): + - Character to use as node's symbol. + - List of lines to display as the node's text. + - Edges; a list of (col, next_col) indicating the edges between + the current node and its parents. + - Number of columns (ongoing edges) in the current revision. + - The difference between the number of columns (ongoing edges) + in the next revision and the number of columns (ongoing edges) + in the current revision. That is: -1 means one column removed; + 0 means no columns added or removed; 1 means one column added. + """ + + idx, edges, ncols, coldiff = coldata + assert -2 < coldiff < 2 + if coldiff == -1: + # Transform + # + # | | | | | | + # o | | into o---+ + # |X / |/ / + # | | | | + fix_long_right_edges(edges) + + # add_padding_line says whether to rewrite + # + # | | | | | | | | + # | o---+ into | o---+ + # | / / | | | # <--- padding line + # o | | | / / + # o | | + add_padding_line = (len(text) > 2 and coldiff == -1 and + [x for (x, y) in edges if x + 1 < y]) + + # fix_nodeline_tail says whether to rewrite + # + # | | o | | | | o | | + # | | |/ / | | |/ / + # | o | | into | o / / # <--- fixed nodeline tail + # | |/ / | |/ / + # o | | o | | + fix_nodeline_tail = len(text) <= 2 and not add_padding_line + + # nodeline is the line containing the node character (typically o) + nodeline = ["|", " "] * idx + nodeline.extend([char, " "]) + + nodeline.extend( + get_nodeline_edges_tail(idx, state[1], ncols, coldiff, + state[0], fix_nodeline_tail)) + + # shift_interline is the line containing the non-vertical + # edges between this entry and the next + shift_interline = ["|", " "] * idx + if coldiff == -1: + n_spaces = 1 + edge_ch = "/" + elif coldiff == 0: + n_spaces = 2 + edge_ch = "|" + else: + n_spaces = 3 + edge_ch = "\\" + shift_interline.extend(n_spaces * [" "]) + shift_interline.extend([edge_ch, " "] * (ncols - idx - 1)) + + # draw edges from the current node to its parents + draw_edges(edges, nodeline, shift_interline) + + # lines is the list of all graph lines to print + lines = [nodeline] + if add_padding_line: + lines.append(get_padding_line(idx, ncols, edges)) + lines.append(shift_interline) + + # make sure that there are as many graph lines as there are + # log strings + while len(text) < len(lines): + text.append("") + if len(lines) < len(text): + extra_interline = ["|", " "] * (ncols + coldiff) + while len(lines) < len(text): + lines.append(extra_interline) + + # print lines + indentation_level = max(ncols, ncols + coldiff) + for (line, logstr) in zip(lines, text): + ln = "%-*s %s" % (2 * indentation_level, "".join(line), logstr) + ui.write(ln.rstrip() + '\n') + + # ... and start over + state[0] = coldiff + state[1] = idx + +def get_revs(repo, rev_opt): + if rev_opt: + revs = revrange(repo, rev_opt) + if len(revs) == 0: + return (nullrev, nullrev) + return (max(revs), min(revs)) + else: + return (len(repo) - 1, 0) + +def check_unsupported_flags(opts): + for op in ["follow", "follow_first", "date", "copies", "keyword", "remove", + "only_merges", "user", "branch", "only_branch", "prune", + "newest_first", "no_merges", "include", "exclude"]: + if op in opts and opts[op]: + raise util.Abort(_("--graph option is incompatible with --%s") + % op.replace("_", "-")) + +def generate(ui, dag, displayer, showparents, edgefn): + seen, state = [], asciistate() + for rev, type, ctx, parents in dag: + char = ctx.node() in showparents and '@' or 'o' + displayer.show(ctx) + lines = displayer.hunk.pop(rev).split('\n')[:-1] + displayer.flush(rev) + ascii(ui, state, type, char, lines, edgefn(seen, rev, parents)) + displayer.close() + +def graphlog(ui, repo, path=None, **opts): + """show revision history alongside an ASCII revision graph + + Print a revision history alongside a revision graph drawn with + ASCII characters. + + Nodes printed as an @ character are parents of the working + directory. + """ + + check_unsupported_flags(opts) + limit = cmdutil.loglimit(opts) + start, stop = get_revs(repo, opts["rev"]) + if start == nullrev: + return + + if path: + path = util.canonpath(repo.root, os.getcwd(), path) + if path: # could be reset in canonpath + revdag = graphmod.filerevs(repo, path, start, stop, limit) + else: + if limit is not None: + stop = max(stop, start - limit + 1) + revdag = graphmod.revisions(repo, start, stop) + + displayer = show_changeset(ui, repo, opts, buffered=True) + showparents = [ctx.node() for ctx in repo[None].parents()] + generate(ui, revdag, displayer, showparents, asciiedges) + +def graphrevs(repo, nodes, opts): + limit = cmdutil.loglimit(opts) + nodes.reverse() + if limit is not None: + nodes = nodes[:limit] + return graphmod.nodes(repo, nodes) + +def goutgoing(ui, repo, dest=None, **opts): + """show the outgoing changesets alongside an ASCII revision graph + + Print the outgoing changesets alongside a revision graph drawn with + ASCII characters. + + Nodes printed as an @ character are parents of the working + directory. + """ + + check_unsupported_flags(opts) + o = hg._outgoing(ui, repo, dest, opts) + if o is None: + return + + revdag = graphrevs(repo, o, opts) + displayer = show_changeset(ui, repo, opts, buffered=True) + showparents = [ctx.node() for ctx in repo[None].parents()] + generate(ui, revdag, displayer, showparents, asciiedges) + +def gincoming(ui, repo, source="default", **opts): + """show the incoming changesets alongside an ASCII revision graph + + Print the incoming changesets alongside a revision graph drawn with + ASCII characters. + + Nodes printed as an @ character are parents of the working + directory. + """ + def subreporecurse(): + return 1 + + check_unsupported_flags(opts) + def display(other, chlist, displayer): + revdag = graphrevs(other, chlist, opts) + showparents = [ctx.node() for ctx in repo[None].parents()] + generate(ui, revdag, displayer, showparents, asciiedges) + + hg._incoming(display, subreporecurse, ui, repo, source, opts, buffered=True) + +def uisetup(ui): + '''Initialize the extension.''' + _wrapcmd(ui, 'log', commands.table, graphlog) + _wrapcmd(ui, 'incoming', commands.table, gincoming) + _wrapcmd(ui, 'outgoing', commands.table, goutgoing) + +def _wrapcmd(ui, cmd, table, wrapfn): + '''wrap the command''' + def graph(orig, *args, **kwargs): + if kwargs['graph']: + return wrapfn(*args, **kwargs) + return orig(*args, **kwargs) + entry = extensions.wrapcommand(table, cmd, graph) + entry[1].append(('G', 'graph', None, _("show the revision DAG"))) + +cmdtable = { + "glog": + (graphlog, + [('l', 'limit', '', + _('limit number of changes displayed'), _('NUM')), + ('p', 'patch', False, _('show patch')), + ('r', 'rev', [], + _('show the specified revision or range'), _('REV')), + ] + templateopts, + _('hg glog [OPTION]... [FILE]')), +} diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/graphlog.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/graphlog.pyo Binary files differnew file mode 100644 index 0000000..2edc5f3 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/graphlog.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/hgcia.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/hgcia.py new file mode 100644 index 0000000..4e72680 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/hgcia.py @@ -0,0 +1,251 @@ +# Copyright (C) 2007-8 Brendan Cully <brendan@kublai.com> +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +"""hooks for integrating with the CIA.vc notification service + +This is meant to be run as a changegroup or incoming hook. To +configure it, set the following options in your hgrc:: + + [cia] + # your registered CIA user name + user = foo + # the name of the project in CIA + project = foo + # the module (subproject) (optional) + #module = foo + # Append a diffstat to the log message (optional) + #diffstat = False + # Template to use for log messages (optional) + #template = {desc}\\n{baseurl}/rev/{node}-- {diffstat} + # Style to use (optional) + #style = foo + # The URL of the CIA notification service (optional) + # You can use mailto: URLs to send by email, eg + # mailto:cia@cia.vc + # Make sure to set email.from if you do this. + #url = http://cia.vc/ + # print message instead of sending it (optional) + #test = False + + [hooks] + # one of these: + changegroup.cia = python:hgcia.hook + #incoming.cia = python:hgcia.hook + + [web] + # If you want hyperlinks (optional) + baseurl = http://server/path/to/repo +""" + +from mercurial.i18n import _ +from mercurial.node import bin, short +from mercurial import cmdutil, patch, templater, util, mail +import email.Parser + +import xmlrpclib +from xml.sax import saxutils + +socket_timeout = 30 # seconds +try: + # set a timeout for the socket so you don't have to wait so looooong + # when cia.vc is having problems. requires python >= 2.3: + import socket + socket.setdefaulttimeout(socket_timeout) +except: + pass + +HGCIA_VERSION = '0.1' +HGCIA_URL = 'http://hg.kublai.com/mercurial/hgcia' + + +class ciamsg(object): + """ A CIA message """ + def __init__(self, cia, ctx): + self.cia = cia + self.ctx = ctx + self.url = self.cia.url + + def fileelem(self, path, uri, action): + if uri: + uri = ' uri=%s' % saxutils.quoteattr(uri) + return '<file%s action=%s>%s</file>' % ( + uri, saxutils.quoteattr(action), saxutils.escape(path)) + + def fileelems(self): + n = self.ctx.node() + f = self.cia.repo.status(self.ctx.parents()[0].node(), n) + url = self.url or '' + elems = [] + for path in f[0]: + uri = '%s/diff/%s/%s' % (url, short(n), path) + elems.append(self.fileelem(path, url and uri, 'modify')) + for path in f[1]: + # TODO: copy/rename ? + uri = '%s/file/%s/%s' % (url, short(n), path) + elems.append(self.fileelem(path, url and uri, 'add')) + for path in f[2]: + elems.append(self.fileelem(path, '', 'remove')) + + return '\n'.join(elems) + + def sourceelem(self, project, module=None, branch=None): + msg = ['<source>', '<project>%s</project>' % saxutils.escape(project)] + if module: + msg.append('<module>%s</module>' % saxutils.escape(module)) + if branch: + msg.append('<branch>%s</branch>' % saxutils.escape(branch)) + msg.append('</source>') + + return '\n'.join(msg) + + def diffstat(self): + class patchbuf(object): + def __init__(self): + self.lines = [] + # diffstat is stupid + self.name = 'cia' + def write(self, data): + self.lines.append(data) + def close(self): + pass + + n = self.ctx.node() + pbuf = patchbuf() + cmdutil.export(self.cia.repo, [n], fp=pbuf) + return patch.diffstat(pbuf.lines) or '' + + def logmsg(self): + diffstat = self.cia.diffstat and self.diffstat() or '' + self.cia.ui.pushbuffer() + self.cia.templater.show(self.ctx, changes=self.ctx.changeset(), + url=self.cia.url, diffstat=diffstat) + return self.cia.ui.popbuffer() + + def xml(self): + n = short(self.ctx.node()) + src = self.sourceelem(self.cia.project, module=self.cia.module, + branch=self.ctx.branch()) + # unix timestamp + dt = self.ctx.date() + timestamp = dt[0] + + author = saxutils.escape(self.ctx.user()) + rev = '%d:%s' % (self.ctx.rev(), n) + log = saxutils.escape(self.logmsg()) + + url = self.url and '<url>%s/rev/%s</url>' % (saxutils.escape(self.url), + n) or '' + + msg = """ +<message> + <generator> + <name>Mercurial (hgcia)</name> + <version>%s</version> + <url>%s</url> + <user>%s</user> + </generator> + %s + <body> + <commit> + <author>%s</author> + <version>%s</version> + <log>%s</log> + %s + <files>%s</files> + </commit> + </body> + <timestamp>%d</timestamp> +</message> +""" % \ + (HGCIA_VERSION, saxutils.escape(HGCIA_URL), + saxutils.escape(self.cia.user), src, author, rev, log, url, + self.fileelems(), timestamp) + + return msg + + +class hgcia(object): + """ CIA notification class """ + + deftemplate = '{desc}' + dstemplate = '{desc}\n-- \n{diffstat}' + + def __init__(self, ui, repo): + self.ui = ui + self.repo = repo + + self.ciaurl = self.ui.config('cia', 'url', 'http://cia.vc') + self.user = self.ui.config('cia', 'user') + self.project = self.ui.config('cia', 'project') + self.module = self.ui.config('cia', 'module') + self.diffstat = self.ui.configbool('cia', 'diffstat') + self.emailfrom = self.ui.config('email', 'from') + self.dryrun = self.ui.configbool('cia', 'test') + self.url = self.ui.config('web', 'baseurl') + + style = self.ui.config('cia', 'style') + template = self.ui.config('cia', 'template') + if not template: + template = self.diffstat and self.dstemplate or self.deftemplate + template = templater.parsestring(template, quoted=False) + t = cmdutil.changeset_templater(self.ui, self.repo, False, None, + style, False) + t.use_template(template) + self.templater = t + + def sendrpc(self, msg): + srv = xmlrpclib.Server(self.ciaurl) + res = srv.hub.deliver(msg) + if res is not True: + raise util.Abort(_('%s returned an error: %s') % + (self.ciaurl, res)) + + def sendemail(self, address, data): + p = email.Parser.Parser() + msg = p.parsestr(data) + msg['Date'] = util.datestr(format="%a, %d %b %Y %H:%M:%S %1%2") + msg['To'] = address + msg['From'] = self.emailfrom + msg['Subject'] = 'DeliverXML' + msg['Content-type'] = 'text/xml' + msgtext = msg.as_string() + + self.ui.status(_('hgcia: sending update to %s\n') % address) + mail.sendmail(self.ui, util.email(self.emailfrom), + [address], msgtext) + + +def hook(ui, repo, hooktype, node=None, url=None, **kwargs): + """ send CIA notification """ + def sendmsg(cia, ctx): + msg = ciamsg(cia, ctx).xml() + if cia.dryrun: + ui.write(msg) + elif cia.ciaurl.startswith('mailto:'): + if not cia.emailfrom: + raise util.Abort(_('email.from must be defined when ' + 'sending by email')) + cia.sendemail(cia.ciaurl[7:], msg) + else: + cia.sendrpc(msg) + + n = bin(node) + cia = hgcia(ui, repo) + if not cia.user: + ui.debug('cia: no user specified') + return + if not cia.project: + ui.debug('cia: no project specified') + return + if hooktype == 'changegroup': + start = repo.changelog.rev(n) + end = len(repo.changelog) + for rev in xrange(start, end): + n = repo.changelog.node(rev) + ctx = repo.changectx(n) + sendmsg(cia, ctx) + else: + ctx = repo.changectx(n) + sendmsg(cia, ctx) diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/hgcia.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/hgcia.pyo Binary files differnew file mode 100644 index 0000000..2c5a2ee --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/hgcia.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/hgk.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/hgk.py new file mode 100644 index 0000000..e8aae47 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/hgk.py @@ -0,0 +1,348 @@ +# Minimal support for git commands on an hg repository +# +# Copyright 2005, 2006 Chris Mason <mason@suse.com> +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +'''browse the repository in a graphical way + +The hgk extension allows browsing the history of a repository in a +graphical way. It requires Tcl/Tk version 8.4 or later. (Tcl/Tk is not +distributed with Mercurial.) + +hgk consists of two parts: a Tcl script that does the displaying and +querying of information, and an extension to Mercurial named hgk.py, +which provides hooks for hgk to get information. hgk can be found in +the contrib directory, and the extension is shipped in the hgext +repository, and needs to be enabled. + +The :hg:`view` command will launch the hgk Tcl script. For this command +to work, hgk must be in your search path. Alternately, you can specify +the path to hgk in your configuration file:: + + [hgk] + path=/location/of/hgk + +hgk can make use of the extdiff extension to visualize revisions. +Assuming you had already configured extdiff vdiff command, just add:: + + [hgk] + vdiff=vdiff + +Revisions context menu will now display additional entries to fire +vdiff on hovered and selected revisions. +''' + +import os +from mercurial import commands, util, patch, revlog, cmdutil +from mercurial.node import nullid, nullrev, short +from mercurial.i18n import _ + +def difftree(ui, repo, node1=None, node2=None, *files, **opts): + """diff trees from two commits""" + def __difftree(repo, node1, node2, files=[]): + assert node2 is not None + mmap = repo[node1].manifest() + mmap2 = repo[node2].manifest() + m = cmdutil.match(repo, files) + modified, added, removed = repo.status(node1, node2, m)[:3] + empty = short(nullid) + + for f in modified: + # TODO get file permissions + ui.write(":100664 100664 %s %s M\t%s\t%s\n" % + (short(mmap[f]), short(mmap2[f]), f, f)) + for f in added: + ui.write(":000000 100664 %s %s N\t%s\t%s\n" % + (empty, short(mmap2[f]), f, f)) + for f in removed: + ui.write(":100664 000000 %s %s D\t%s\t%s\n" % + (short(mmap[f]), empty, f, f)) + ## + + while True: + if opts['stdin']: + try: + line = raw_input().split(' ') + node1 = line[0] + if len(line) > 1: + node2 = line[1] + else: + node2 = None + except EOFError: + break + node1 = repo.lookup(node1) + if node2: + node2 = repo.lookup(node2) + else: + node2 = node1 + node1 = repo.changelog.parents(node1)[0] + if opts['patch']: + if opts['pretty']: + catcommit(ui, repo, node2, "") + m = cmdutil.match(repo, files) + chunks = patch.diff(repo, node1, node2, match=m, + opts=patch.diffopts(ui, {'git': True})) + for chunk in chunks: + ui.write(chunk) + else: + __difftree(repo, node1, node2, files=files) + if not opts['stdin']: + break + +def catcommit(ui, repo, n, prefix, ctx=None): + nlprefix = '\n' + prefix + if ctx is None: + ctx = repo[n] + ui.write("tree %s\n" % short(ctx.changeset()[0])) # use ctx.node() instead ?? + for p in ctx.parents(): + ui.write("parent %s\n" % p) + + date = ctx.date() + description = ctx.description().replace("\0", "") + lines = description.splitlines() + if lines and lines[-1].startswith('committer:'): + committer = lines[-1].split(': ')[1].rstrip() + else: + committer = ctx.user() + + ui.write("author %s %s %s\n" % (ctx.user(), int(date[0]), date[1])) + ui.write("committer %s %s %s\n" % (committer, int(date[0]), date[1])) + ui.write("revision %d\n" % ctx.rev()) + ui.write("branch %s\n\n" % ctx.branch()) + + if prefix != "": + ui.write("%s%s\n" % (prefix, description.replace('\n', nlprefix).strip())) + else: + ui.write(description + "\n") + if prefix: + ui.write('\0') + +def base(ui, repo, node1, node2): + """output common ancestor information""" + node1 = repo.lookup(node1) + node2 = repo.lookup(node2) + n = repo.changelog.ancestor(node1, node2) + ui.write(short(n) + "\n") + +def catfile(ui, repo, type=None, r=None, **opts): + """cat a specific revision""" + # in stdin mode, every line except the commit is prefixed with two + # spaces. This way the our caller can find the commit without magic + # strings + # + prefix = "" + if opts['stdin']: + try: + (type, r) = raw_input().split(' ') + prefix = " " + except EOFError: + return + + else: + if not type or not r: + ui.warn(_("cat-file: type or revision not supplied\n")) + commands.help_(ui, 'cat-file') + + while r: + if type != "commit": + ui.warn(_("aborting hg cat-file only understands commits\n")) + return 1 + n = repo.lookup(r) + catcommit(ui, repo, n, prefix) + if opts['stdin']: + try: + (type, r) = raw_input().split(' ') + except EOFError: + break + else: + break + +# git rev-tree is a confusing thing. You can supply a number of +# commit sha1s on the command line, and it walks the commit history +# telling you which commits are reachable from the supplied ones via +# a bitmask based on arg position. +# you can specify a commit to stop at by starting the sha1 with ^ +def revtree(ui, args, repo, full="tree", maxnr=0, parents=False): + def chlogwalk(): + count = len(repo) + i = count + l = [0] * 100 + chunk = 100 + while True: + if chunk > i: + chunk = i + i = 0 + else: + i -= chunk + + for x in xrange(chunk): + if i + x >= count: + l[chunk - x:] = [0] * (chunk - x) + break + if full != None: + l[x] = repo[i + x] + l[x].changeset() # force reading + else: + l[x] = 1 + for x in xrange(chunk - 1, -1, -1): + if l[x] != 0: + yield (i + x, full != None and l[x] or None) + if i == 0: + break + + # calculate and return the reachability bitmask for sha + def is_reachable(ar, reachable, sha): + if len(ar) == 0: + return 1 + mask = 0 + for i in xrange(len(ar)): + if sha in reachable[i]: + mask |= 1 << i + + return mask + + reachable = [] + stop_sha1 = [] + want_sha1 = [] + count = 0 + + # figure out which commits they are asking for and which ones they + # want us to stop on + for i, arg in enumerate(args): + if arg.startswith('^'): + s = repo.lookup(arg[1:]) + stop_sha1.append(s) + want_sha1.append(s) + elif arg != 'HEAD': + want_sha1.append(repo.lookup(arg)) + + # calculate the graph for the supplied commits + for i, n in enumerate(want_sha1): + reachable.append(set()) + visit = [n] + reachable[i].add(n) + while visit: + n = visit.pop(0) + if n in stop_sha1: + continue + for p in repo.changelog.parents(n): + if p not in reachable[i]: + reachable[i].add(p) + visit.append(p) + if p in stop_sha1: + continue + + # walk the repository looking for commits that are in our + # reachability graph + for i, ctx in chlogwalk(): + n = repo.changelog.node(i) + mask = is_reachable(want_sha1, reachable, n) + if mask: + parentstr = "" + if parents: + pp = repo.changelog.parents(n) + if pp[0] != nullid: + parentstr += " " + short(pp[0]) + if pp[1] != nullid: + parentstr += " " + short(pp[1]) + if not full: + ui.write("%s%s\n" % (short(n), parentstr)) + elif full == "commit": + ui.write("%s%s\n" % (short(n), parentstr)) + catcommit(ui, repo, n, ' ', ctx) + else: + (p1, p2) = repo.changelog.parents(n) + (h, h1, h2) = map(short, (n, p1, p2)) + (i1, i2) = map(repo.changelog.rev, (p1, p2)) + + date = ctx.date()[0] + ui.write("%s %s:%s" % (date, h, mask)) + mask = is_reachable(want_sha1, reachable, p1) + if i1 != nullrev and mask > 0: + ui.write("%s:%s " % (h1, mask)), + mask = is_reachable(want_sha1, reachable, p2) + if i2 != nullrev and mask > 0: + ui.write("%s:%s " % (h2, mask)) + ui.write("\n") + if maxnr and count >= maxnr: + break + count += 1 + +def revparse(ui, repo, *revs, **opts): + """parse given revisions""" + def revstr(rev): + if rev == 'HEAD': + rev = 'tip' + return revlog.hex(repo.lookup(rev)) + + for r in revs: + revrange = r.split(':', 1) + ui.write('%s\n' % revstr(revrange[0])) + if len(revrange) == 2: + ui.write('^%s\n' % revstr(revrange[1])) + +# git rev-list tries to order things by date, and has the ability to stop +# at a given commit without walking the whole repo. TODO add the stop +# parameter +def revlist(ui, repo, *revs, **opts): + """print revisions""" + if opts['header']: + full = "commit" + else: + full = None + copy = [x for x in revs] + revtree(ui, copy, repo, full, opts['max_count'], opts['parents']) + +def config(ui, repo, **opts): + """print extension options""" + def writeopt(name, value): + ui.write('k=%s\nv=%s\n' % (name, value)) + + writeopt('vdiff', ui.config('hgk', 'vdiff', '')) + + +def view(ui, repo, *etc, **opts): + "start interactive history viewer" + os.chdir(repo.root) + optstr = ' '.join(['--%s %s' % (k, v) for k, v in opts.iteritems() if v]) + cmd = ui.config("hgk", "path", "hgk") + " %s %s" % (optstr, " ".join(etc)) + ui.debug("running %s\n" % cmd) + util.system(cmd) + +cmdtable = { + "^view": + (view, + [('l', 'limit', '', + _('limit number of changes displayed'), _('NUM'))], + _('hg view [-l LIMIT] [REVRANGE]')), + "debug-diff-tree": + (difftree, + [('p', 'patch', None, _('generate patch')), + ('r', 'recursive', None, _('recursive')), + ('P', 'pretty', None, _('pretty')), + ('s', 'stdin', None, _('stdin')), + ('C', 'copy', None, _('detect copies')), + ('S', 'search', "", _('search'))], + _('hg git-diff-tree [OPTION]... NODE1 NODE2 [FILE]...')), + "debug-cat-file": + (catfile, + [('s', 'stdin', None, _('stdin'))], + _('hg debug-cat-file [OPTION]... TYPE FILE')), + "debug-config": + (config, [], _('hg debug-config')), + "debug-merge-base": + (base, [], _('hg debug-merge-base REV REV')), + "debug-rev-parse": + (revparse, + [('', 'default', '', _('ignored'))], + _('hg debug-rev-parse REV')), + "debug-rev-list": + (revlist, + [('H', 'header', None, _('header')), + ('t', 'topo-order', None, _('topo-order')), + ('p', 'parents', None, _('parents')), + ('n', 'max-count', 0, _('max-count'))], + _('hg debug-rev-list [OPTION]... REV...')), +} diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/hgk.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/hgk.pyo Binary files differnew file mode 100644 index 0000000..97aa394 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/hgk.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/highlight/__init__.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/highlight/__init__.py new file mode 100644 index 0000000..55e3c18 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/highlight/__init__.py @@ -0,0 +1,61 @@ +# highlight - syntax highlighting in hgweb, based on Pygments +# +# Copyright 2008, 2009 Patrick Mezard <pmezard@gmail.com> and others +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. +# +# The original module was split in an interface and an implementation +# file to defer pygments loading and speedup extension setup. + +"""syntax highlighting for hgweb (requires Pygments) + +It depends on the Pygments syntax highlighting library: +http://pygments.org/ + +There is a single configuration option:: + + [web] + pygments_style = <style> + +The default is 'colorful'. +""" + +import highlight +from mercurial.hgweb import webcommands, webutil, common +from mercurial import extensions, encoding + +def filerevision_highlight(orig, web, tmpl, fctx): + mt = ''.join(tmpl('mimetype', encoding=encoding.encoding)) + # only pygmentize for mimetype containing 'html' so we both match + # 'text/html' and possibly 'application/xhtml+xml' in the future + # so that we don't have to touch the extension when the mimetype + # for a template changes; also hgweb optimizes the case that a + # raw file is sent using rawfile() and doesn't call us, so we + # can't clash with the file's content-type here in case we + # pygmentize a html file + if 'html' in mt: + style = web.config('web', 'pygments_style', 'colorful') + highlight.pygmentize('fileline', fctx, style, tmpl) + return orig(web, tmpl, fctx) + +def annotate_highlight(orig, web, req, tmpl): + mt = ''.join(tmpl('mimetype', encoding=encoding.encoding)) + if 'html' in mt: + fctx = webutil.filectx(web.repo, req) + style = web.config('web', 'pygments_style', 'colorful') + highlight.pygmentize('annotateline', fctx, style, tmpl) + return orig(web, req, tmpl) + +def generate_css(web, req, tmpl): + pg_style = web.config('web', 'pygments_style', 'colorful') + fmter = highlight.HtmlFormatter(style = pg_style) + req.respond(common.HTTP_OK, 'text/css') + return ['/* pygments_style = %s */\n\n' % pg_style, fmter.get_style_defs('')] + +def extsetup(): + # monkeypatch in the new version + extensions.wrapfunction(webcommands, '_filerevision', filerevision_highlight) + extensions.wrapfunction(webcommands, 'annotate', annotate_highlight) + webcommands.highlightcss = generate_css + webcommands.__all__.append('highlightcss') diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/highlight/__init__.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/highlight/__init__.pyo Binary files differnew file mode 100644 index 0000000..eb0ba63 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/highlight/__init__.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/highlight/highlight.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/highlight/highlight.py new file mode 100644 index 0000000..a8265cf --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/highlight/highlight.py @@ -0,0 +1,61 @@ +# highlight.py - highlight extension implementation file +# +# Copyright 2007-2009 Adam Hupp <adam@hupp.org> and others +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. +# +# The original module was split in an interface and an implementation +# file to defer pygments loading and speedup extension setup. + +from mercurial import demandimport +demandimport.ignore.extend(['pkgutil', 'pkg_resources', '__main__']) +from mercurial import util, encoding + +from pygments import highlight +from pygments.util import ClassNotFound +from pygments.lexers import guess_lexer, guess_lexer_for_filename, TextLexer +from pygments.formatters import HtmlFormatter + +SYNTAX_CSS = ('\n<link rel="stylesheet" href="{url}highlightcss" ' + 'type="text/css" />') + +def pygmentize(field, fctx, style, tmpl): + + # append a <link ...> to the syntax highlighting css + old_header = tmpl.load('header') + if SYNTAX_CSS not in old_header: + new_header = old_header + SYNTAX_CSS + tmpl.cache['header'] = new_header + + text = fctx.data() + if util.binary(text): + return + + # Pygments is best used with Unicode strings: + # <http://pygments.org/docs/unicode/> + text = text.decode(encoding.encoding, 'replace') + + # To get multi-line strings right, we can't format line-by-line + try: + lexer = guess_lexer_for_filename(fctx.path(), text[:1024]) + except (ClassNotFound, ValueError): + try: + lexer = guess_lexer(text[:1024]) + except (ClassNotFound, ValueError): + lexer = TextLexer() + + formatter = HtmlFormatter(style=style) + + colorized = highlight(text, lexer, formatter) + # strip wrapping div + colorized = colorized[:colorized.find('\n</pre>')] + colorized = colorized[colorized.find('<pre>')+5:] + coloriter = (s.encode(encoding.encoding, 'replace') + for s in colorized.splitlines()) + + tmpl.filters['colorize'] = lambda x: coloriter.next() + + oldl = tmpl.cache[field] + newl = oldl.replace('line|escape', 'line|colorize') + tmpl.cache[field] = newl diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/highlight/highlight.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/highlight/highlight.pyo Binary files differnew file mode 100644 index 0000000..7d10f48 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/highlight/highlight.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/interhg.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/interhg.py new file mode 100644 index 0000000..60c4255 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/interhg.py @@ -0,0 +1,81 @@ +# interhg.py - interhg +# +# Copyright 2007 OHASHI Hideya <ohachige@gmail.com> +# +# Contributor(s): +# Edward Lee <edward.lee@engineering.uiuc.edu> +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +'''expand expressions into changelog and summaries + +This extension allows the use of a special syntax in summaries, which +will be automatically expanded into links or any other arbitrary +expression, much like InterWiki does. + +A few example patterns (link to bug tracking, etc.) that may be used +in your hgrc:: + + [interhg] + issues = s!issue(\\d+)!<a href="http://bts/issue\\1">issue\\1</a>! + bugzilla = s!((?:bug|b=|(?=#?\\d{4,}))(?:\\s*#?)(\\d+))!<a..=\\2">\\1</a>!i + boldify = s!(^|\\s)#(\\d+)\\b! <b>#\\2</b>! +''' + +import re +from mercurial.hgweb import hgweb_mod +from mercurial import templatefilters, extensions +from mercurial.i18n import _ + +interhg_table = [] + +def uisetup(ui): + orig_escape = templatefilters.filters["escape"] + + def interhg_escape(x): + escstr = orig_escape(x) + for regexp, format in interhg_table: + escstr = regexp.sub(format, escstr) + return escstr + + templatefilters.filters["escape"] = interhg_escape + +def interhg_refresh(orig, self, *args, **kwargs): + interhg_table[:] = [] + for key, pattern in self.repo.ui.configitems('interhg'): + # grab the delimiter from the character after the "s" + unesc = pattern[1] + delim = re.escape(unesc) + + # identify portions of the pattern, taking care to avoid escaped + # delimiters. the replace format and flags are optional, but delimiters + # are required. + match = re.match(r'^s%s(.+)(?:(?<=\\\\)|(?<!\\))%s(.*)%s([ilmsux])*$' + % (delim, delim, delim), pattern) + if not match: + self.repo.ui.warn(_("interhg: invalid pattern for %s: %s\n") + % (key, pattern)) + continue + + # we need to unescape the delimiter for regexp and format + delim_re = re.compile(r'(?<!\\)\\%s' % delim) + regexp = delim_re.sub(unesc, match.group(1)) + format = delim_re.sub(unesc, match.group(2)) + + # the pattern allows for 6 regexp flags, so set them if necessary + flagin = match.group(3) + flags = 0 + if flagin: + for flag in flagin.upper(): + flags |= re.__dict__[flag] + + try: + regexp = re.compile(regexp, flags) + interhg_table.append((regexp, format)) + except re.error: + self.repo.ui.warn(_("interhg: invalid regexp for %s: %s\n") + % (key, regexp)) + return orig(self, *args, **kwargs) + +extensions.wrapfunction(hgweb_mod.hgweb, 'refresh', interhg_refresh) diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/interhg.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/interhg.pyo Binary files differnew file mode 100644 index 0000000..c5dd4d7 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/interhg.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/keyword.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/keyword.py new file mode 100644 index 0000000..9060714 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/keyword.py @@ -0,0 +1,649 @@ +# keyword.py - $Keyword$ expansion for Mercurial +# +# Copyright 2007-2010 Christian Ebert <blacktrash@gmx.net> +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. +# +# $Id$ +# +# Keyword expansion hack against the grain of a DSCM +# +# There are many good reasons why this is not needed in a distributed +# SCM, still it may be useful in very small projects based on single +# files (like LaTeX packages), that are mostly addressed to an +# audience not running a version control system. +# +# For in-depth discussion refer to +# <http://mercurial.selenic.com/wiki/KeywordPlan>. +# +# Keyword expansion is based on Mercurial's changeset template mappings. +# +# Binary files are not touched. +# +# Files to act upon/ignore are specified in the [keyword] section. +# Customized keyword template mappings in the [keywordmaps] section. +# +# Run "hg help keyword" and "hg kwdemo" to get info on configuration. + +'''expand keywords in tracked files + +This extension expands RCS/CVS-like or self-customized $Keywords$ in +tracked text files selected by your configuration. + +Keywords are only expanded in local repositories and not stored in the +change history. The mechanism can be regarded as a convenience for the +current user or for archive distribution. + +Keywords expand to the changeset data pertaining to the latest change +relative to the working directory parent of each file. + +Configuration is done in the [keyword], [keywordset] and [keywordmaps] +sections of hgrc files. + +Example:: + + [keyword] + # expand keywords in every python file except those matching "x*" + **.py = + x* = ignore + + [keywordset] + # prefer svn- over cvs-like default keywordmaps + svn = True + +.. note:: + The more specific you are in your filename patterns the less you + lose speed in huge repositories. + +For [keywordmaps] template mapping and expansion demonstration and +control run :hg:`kwdemo`. See :hg:`help templates` for a list of +available templates and filters. + +Three additional date template filters are provided:: + + utcdate "2006/09/18 15:13:13" + svnutcdate "2006-09-18 15:13:13Z" + svnisodate "2006-09-18 08:13:13 -700 (Mon, 18 Sep 2006)" + +The default template mappings (view with :hg:`kwdemo -d`) can be +replaced with customized keywords and templates. Again, run +:hg:`kwdemo` to control the results of your configuration changes. + +Before changing/disabling active keywords, run :hg:`kwshrink` to avoid +the risk of inadvertently storing expanded keywords in the change +history. + +To force expansion after enabling it, or a configuration change, run +:hg:`kwexpand`. + +Expansions spanning more than one line and incremental expansions, +like CVS' $Log$, are not supported. A keyword template map "Log = +{desc}" expands to the first line of the changeset description. +''' + +from mercurial import commands, context, cmdutil, dispatch, filelog, extensions +from mercurial import localrepo, match, patch, templatefilters, templater, util +from mercurial.hgweb import webcommands +from mercurial.i18n import _ +import os, re, shutil, tempfile + +commands.optionalrepo += ' kwdemo' + +# hg commands that do not act on keywords +nokwcommands = ('add addremove annotate bundle export grep incoming init log' + ' outgoing push tip verify convert email glog') + +# hg commands that trigger expansion only when writing to working dir, +# not when reading filelog, and unexpand when reading from working dir +restricted = 'merge kwexpand kwshrink record qrecord resolve transplant' + +# names of extensions using dorecord +recordextensions = 'record' + +# date like in cvs' $Date +utcdate = lambda x: util.datestr((x[0], 0), '%Y/%m/%d %H:%M:%S') +# date like in svn's $Date +svnisodate = lambda x: util.datestr(x, '%Y-%m-%d %H:%M:%S %1%2 (%a, %d %b %Y)') +# date like in svn's $Id +svnutcdate = lambda x: util.datestr((x[0], 0), '%Y-%m-%d %H:%M:%SZ') + +# make keyword tools accessible +kwtools = {'templater': None, 'hgcmd': ''} + + +def _defaultkwmaps(ui): + '''Returns default keywordmaps according to keywordset configuration.''' + templates = { + 'Revision': '{node|short}', + 'Author': '{author|user}', + } + kwsets = ({ + 'Date': '{date|utcdate}', + 'RCSfile': '{file|basename},v', + 'RCSFile': '{file|basename},v', # kept for backwards compatibility + # with hg-keyword + 'Source': '{root}/{file},v', + 'Id': '{file|basename},v {node|short} {date|utcdate} {author|user}', + 'Header': '{root}/{file},v {node|short} {date|utcdate} {author|user}', + }, { + 'Date': '{date|svnisodate}', + 'Id': '{file|basename},v {node|short} {date|svnutcdate} {author|user}', + 'LastChangedRevision': '{node|short}', + 'LastChangedBy': '{author|user}', + 'LastChangedDate': '{date|svnisodate}', + }) + templates.update(kwsets[ui.configbool('keywordset', 'svn')]) + return templates + +def _shrinktext(text, subfunc): + '''Helper for keyword expansion removal in text. + Depending on subfunc also returns number of substitutions.''' + return subfunc(r'$\1$', text) + +def _preselect(wstatus, changed): + '''Retrieves modfied and added files from a working directory state + and returns the subset of each contained in given changed files + retrieved from a change context.''' + modified, added = wstatus[:2] + modified = [f for f in modified if f in changed] + added = [f for f in added if f in changed] + return modified, added + + +class kwtemplater(object): + ''' + Sets up keyword templates, corresponding keyword regex, and + provides keyword substitution functions. + ''' + + def __init__(self, ui, repo, inc, exc): + self.ui = ui + self.repo = repo + self.match = match.match(repo.root, '', [], inc, exc) + self.restrict = kwtools['hgcmd'] in restricted.split() + self.record = False + + kwmaps = self.ui.configitems('keywordmaps') + if kwmaps: # override default templates + self.templates = dict((k, templater.parsestring(v, False)) + for k, v in kwmaps) + else: + self.templates = _defaultkwmaps(self.ui) + escaped = '|'.join(map(re.escape, self.templates.keys())) + self.re_kw = re.compile(r'\$(%s)\$' % escaped) + self.re_kwexp = re.compile(r'\$(%s): [^$\n\r]*? \$' % escaped) + + templatefilters.filters.update({'utcdate': utcdate, + 'svnisodate': svnisodate, + 'svnutcdate': svnutcdate}) + + def substitute(self, data, path, ctx, subfunc): + '''Replaces keywords in data with expanded template.''' + def kwsub(mobj): + kw = mobj.group(1) + ct = cmdutil.changeset_templater(self.ui, self.repo, + False, None, '', False) + ct.use_template(self.templates[kw]) + self.ui.pushbuffer() + ct.show(ctx, root=self.repo.root, file=path) + ekw = templatefilters.firstline(self.ui.popbuffer()) + return '$%s: %s $' % (kw, ekw) + return subfunc(kwsub, data) + + def expand(self, path, node, data): + '''Returns data with keywords expanded.''' + if not self.restrict and self.match(path) and not util.binary(data): + ctx = self.repo.filectx(path, fileid=node).changectx() + return self.substitute(data, path, ctx, self.re_kw.sub) + return data + + def iskwfile(self, cand, ctx): + '''Returns subset of candidates which are configured for keyword + expansion are not symbolic links.''' + return [f for f in cand if self.match(f) and not 'l' in ctx.flags(f)] + + def overwrite(self, ctx, candidates, lookup, expand, rekw=False): + '''Overwrites selected files expanding/shrinking keywords.''' + if self.restrict or lookup or self.record: # exclude kw_copy + candidates = self.iskwfile(candidates, ctx) + if not candidates: + return + kwcmd = self.restrict and lookup # kwexpand/kwshrink + if self.restrict or expand and lookup: + mf = ctx.manifest() + fctx = ctx + subn = (self.restrict or rekw) and self.re_kw.subn or self.re_kwexp.subn + msg = (expand and _('overwriting %s expanding keywords\n') + or _('overwriting %s shrinking keywords\n')) + for f in candidates: + if self.restrict: + data = self.repo.file(f).read(mf[f]) + else: + data = self.repo.wread(f) + if util.binary(data): + continue + if expand: + if lookup: + fctx = self.repo.filectx(f, fileid=mf[f]).changectx() + data, found = self.substitute(data, f, fctx, subn) + elif self.restrict: + found = self.re_kw.search(data) + else: + data, found = _shrinktext(data, subn) + if found: + self.ui.note(msg % f) + self.repo.wwrite(f, data, ctx.flags(f)) + if kwcmd: + self.repo.dirstate.normal(f) + elif self.record: + self.repo.dirstate.normallookup(f) + + def shrink(self, fname, text): + '''Returns text with all keyword substitutions removed.''' + if self.match(fname) and not util.binary(text): + return _shrinktext(text, self.re_kwexp.sub) + return text + + def shrinklines(self, fname, lines): + '''Returns lines with keyword substitutions removed.''' + if self.match(fname): + text = ''.join(lines) + if not util.binary(text): + return _shrinktext(text, self.re_kwexp.sub).splitlines(True) + return lines + + def wread(self, fname, data): + '''If in restricted mode returns data read from wdir with + keyword substitutions removed.''' + return self.restrict and self.shrink(fname, data) or data + +class kwfilelog(filelog.filelog): + ''' + Subclass of filelog to hook into its read, add, cmp methods. + Keywords are "stored" unexpanded, and processed on reading. + ''' + def __init__(self, opener, kwt, path): + super(kwfilelog, self).__init__(opener, path) + self.kwt = kwt + self.path = path + + def read(self, node): + '''Expands keywords when reading filelog.''' + data = super(kwfilelog, self).read(node) + if self.renamed(node): + return data + return self.kwt.expand(self.path, node, data) + + def add(self, text, meta, tr, link, p1=None, p2=None): + '''Removes keyword substitutions when adding to filelog.''' + text = self.kwt.shrink(self.path, text) + return super(kwfilelog, self).add(text, meta, tr, link, p1, p2) + + def cmp(self, node, text): + '''Removes keyword substitutions for comparison.''' + text = self.kwt.shrink(self.path, text) + return super(kwfilelog, self).cmp(node, text) + +def _status(ui, repo, kwt, *pats, **opts): + '''Bails out if [keyword] configuration is not active. + Returns status of working directory.''' + if kwt: + return repo.status(match=cmdutil.match(repo, pats, opts), clean=True, + unknown=opts.get('unknown') or opts.get('all')) + if ui.configitems('keyword'): + raise util.Abort(_('[keyword] patterns cannot match')) + raise util.Abort(_('no [keyword] patterns configured')) + +def _kwfwrite(ui, repo, expand, *pats, **opts): + '''Selects files and passes them to kwtemplater.overwrite.''' + wctx = repo[None] + if len(wctx.parents()) > 1: + raise util.Abort(_('outstanding uncommitted merge')) + kwt = kwtools['templater'] + wlock = repo.wlock() + try: + status = _status(ui, repo, kwt, *pats, **opts) + modified, added, removed, deleted, unknown, ignored, clean = status + if modified or added or removed or deleted: + raise util.Abort(_('outstanding uncommitted changes')) + kwt.overwrite(wctx, clean, True, expand) + finally: + wlock.release() + +def demo(ui, repo, *args, **opts): + '''print [keywordmaps] configuration and an expansion example + + Show current, custom, or default keyword template maps and their + expansions. + + Extend the current configuration by specifying maps as arguments + and using -f/--rcfile to source an external hgrc file. + + Use -d/--default to disable current configuration. + + See :hg:`help templates` for information on templates and filters. + ''' + def demoitems(section, items): + ui.write('[%s]\n' % section) + for k, v in sorted(items): + ui.write('%s = %s\n' % (k, v)) + + fn = 'demo.txt' + tmpdir = tempfile.mkdtemp('', 'kwdemo.') + ui.note(_('creating temporary repository at %s\n') % tmpdir) + repo = localrepo.localrepository(ui, tmpdir, True) + ui.setconfig('keyword', fn, '') + + uikwmaps = ui.configitems('keywordmaps') + if args or opts.get('rcfile'): + ui.status(_('\n\tconfiguration using custom keyword template maps\n')) + if uikwmaps: + ui.status(_('\textending current template maps\n')) + if opts.get('default') or not uikwmaps: + ui.status(_('\toverriding default template maps\n')) + if opts.get('rcfile'): + ui.readconfig(opts.get('rcfile')) + if args: + # simulate hgrc parsing + rcmaps = ['[keywordmaps]\n'] + [a + '\n' for a in args] + fp = repo.opener('hgrc', 'w') + fp.writelines(rcmaps) + fp.close() + ui.readconfig(repo.join('hgrc')) + kwmaps = dict(ui.configitems('keywordmaps')) + elif opts.get('default'): + ui.status(_('\n\tconfiguration using default keyword template maps\n')) + kwmaps = _defaultkwmaps(ui) + if uikwmaps: + ui.status(_('\tdisabling current template maps\n')) + for k, v in kwmaps.iteritems(): + ui.setconfig('keywordmaps', k, v) + else: + ui.status(_('\n\tconfiguration using current keyword template maps\n')) + kwmaps = dict(uikwmaps) or _defaultkwmaps(ui) + + uisetup(ui) + reposetup(ui, repo) + ui.write('[extensions]\nkeyword =\n') + demoitems('keyword', ui.configitems('keyword')) + demoitems('keywordmaps', kwmaps.iteritems()) + keywords = '$' + '$\n$'.join(sorted(kwmaps.keys())) + '$\n' + repo.wopener(fn, 'w').write(keywords) + repo[None].add([fn]) + ui.note(_('\nkeywords written to %s:\n') % fn) + ui.note(keywords) + repo.dirstate.setbranch('demobranch') + for name, cmd in ui.configitems('hooks'): + if name.split('.', 1)[0].find('commit') > -1: + repo.ui.setconfig('hooks', name, '') + msg = _('hg keyword configuration and expansion example') + ui.note("hg ci -m '%s'\n" % msg) + repo.commit(text=msg) + ui.status(_('\n\tkeywords expanded\n')) + ui.write(repo.wread(fn)) + shutil.rmtree(tmpdir, ignore_errors=True) + +def expand(ui, repo, *pats, **opts): + '''expand keywords in the working directory + + Run after (re)enabling keyword expansion. + + kwexpand refuses to run if given files contain local changes. + ''' + # 3rd argument sets expansion to True + _kwfwrite(ui, repo, True, *pats, **opts) + +def files(ui, repo, *pats, **opts): + '''show files configured for keyword expansion + + List which files in the working directory are matched by the + [keyword] configuration patterns. + + Useful to prevent inadvertent keyword expansion and to speed up + execution by including only files that are actual candidates for + expansion. + + See :hg:`help keyword` on how to construct patterns both for + inclusion and exclusion of files. + + With -A/--all and -v/--verbose the codes used to show the status + of files are:: + + K = keyword expansion candidate + k = keyword expansion candidate (not tracked) + I = ignored + i = ignored (not tracked) + ''' + kwt = kwtools['templater'] + status = _status(ui, repo, kwt, *pats, **opts) + cwd = pats and repo.getcwd() or '' + modified, added, removed, deleted, unknown, ignored, clean = status + files = [] + if not opts.get('unknown') or opts.get('all'): + files = sorted(modified + added + clean) + wctx = repo[None] + kwfiles = kwt.iskwfile(files, wctx) + kwunknown = kwt.iskwfile(unknown, wctx) + if not opts.get('ignore') or opts.get('all'): + showfiles = kwfiles, kwunknown + else: + showfiles = [], [] + if opts.get('all') or opts.get('ignore'): + showfiles += ([f for f in files if f not in kwfiles], + [f for f in unknown if f not in kwunknown]) + for char, filenames in zip('KkIi', showfiles): + fmt = (opts.get('all') or ui.verbose) and '%s %%s\n' % char or '%s\n' + for f in filenames: + ui.write(fmt % repo.pathto(f, cwd)) + +def shrink(ui, repo, *pats, **opts): + '''revert expanded keywords in the working directory + + Run before changing/disabling active keywords or if you experience + problems with :hg:`import` or :hg:`merge`. + + kwshrink refuses to run if given files contain local changes. + ''' + # 3rd argument sets expansion to False + _kwfwrite(ui, repo, False, *pats, **opts) + + +def uisetup(ui): + ''' Monkeypatches dispatch._parse to retrieve user command.''' + + def kwdispatch_parse(orig, ui, args): + '''Monkeypatch dispatch._parse to obtain running hg command.''' + cmd, func, args, options, cmdoptions = orig(ui, args) + kwtools['hgcmd'] = cmd + return cmd, func, args, options, cmdoptions + + extensions.wrapfunction(dispatch, '_parse', kwdispatch_parse) + +def reposetup(ui, repo): + '''Sets up repo as kwrepo for keyword substitution. + Overrides file method to return kwfilelog instead of filelog + if file matches user configuration. + Wraps commit to overwrite configured files with updated + keyword substitutions. + Monkeypatches patch and webcommands.''' + + try: + if (not repo.local() or kwtools['hgcmd'] in nokwcommands.split() + or '.hg' in util.splitpath(repo.root) + or repo._url.startswith('bundle:')): + return + except AttributeError: + pass + + inc, exc = [], ['.hg*'] + for pat, opt in ui.configitems('keyword'): + if opt != 'ignore': + inc.append(pat) + else: + exc.append(pat) + if not inc: + return + + kwtools['templater'] = kwt = kwtemplater(ui, repo, inc, exc) + + class kwrepo(repo.__class__): + def file(self, f): + if f[0] == '/': + f = f[1:] + return kwfilelog(self.sopener, kwt, f) + + def wread(self, filename): + data = super(kwrepo, self).wread(filename) + return kwt.wread(filename, data) + + def commit(self, *args, **opts): + # use custom commitctx for user commands + # other extensions can still wrap repo.commitctx directly + self.commitctx = self.kwcommitctx + try: + return super(kwrepo, self).commit(*args, **opts) + finally: + del self.commitctx + + def kwcommitctx(self, ctx, error=False): + n = super(kwrepo, self).commitctx(ctx, error) + # no lock needed, only called from repo.commit() which already locks + if not kwt.record: + restrict = kwt.restrict + kwt.restrict = True + kwt.overwrite(self[n], sorted(ctx.added() + ctx.modified()), + False, True) + kwt.restrict = restrict + return n + + def rollback(self, dryrun=False): + wlock = self.wlock() + try: + if not dryrun: + changed = self['.'].files() + ret = super(kwrepo, self).rollback(dryrun) + if not dryrun: + ctx = self['.'] + modified, added = _preselect(self[None].status(), changed) + kwt.overwrite(ctx, modified, True, True) + kwt.overwrite(ctx, added, True, False) + return ret + finally: + wlock.release() + + # monkeypatches + def kwpatchfile_init(orig, self, ui, fname, opener, + missing=False, eolmode=None): + '''Monkeypatch/wrap patch.patchfile.__init__ to avoid + rejects or conflicts due to expanded keywords in working dir.''' + orig(self, ui, fname, opener, missing, eolmode) + # shrink keywords read from working dir + self.lines = kwt.shrinklines(self.fname, self.lines) + + def kw_diff(orig, repo, node1=None, node2=None, match=None, changes=None, + opts=None, prefix=''): + '''Monkeypatch patch.diff to avoid expansion.''' + kwt.restrict = True + return orig(repo, node1, node2, match, changes, opts, prefix) + + def kwweb_skip(orig, web, req, tmpl): + '''Wraps webcommands.x turning off keyword expansion.''' + kwt.match = util.never + return orig(web, req, tmpl) + + def kw_copy(orig, ui, repo, pats, opts, rename=False): + '''Wraps cmdutil.copy so that copy/rename destinations do not + contain expanded keywords. + Note that the source of a regular file destination may also be a + symlink: + hg cp sym x -> x is symlink + cp sym x; hg cp -A sym x -> x is file (maybe expanded keywords) + For the latter we have to follow the symlink to find out whether its + target is configured for expansion and we therefore must unexpand the + keywords in the destination.''' + orig(ui, repo, pats, opts, rename) + if opts.get('dry_run'): + return + wctx = repo[None] + cwd = repo.getcwd() + + def haskwsource(dest): + '''Returns true if dest is a regular file and configured for + expansion or a symlink which points to a file configured for + expansion. ''' + source = repo.dirstate.copied(dest) + if 'l' in wctx.flags(source): + source = util.canonpath(repo.root, cwd, + os.path.realpath(source)) + return kwt.match(source) + + candidates = [f for f in repo.dirstate.copies() if + not 'l' in wctx.flags(f) and haskwsource(f)] + kwt.overwrite(wctx, candidates, False, False) + + def kw_dorecord(orig, ui, repo, commitfunc, *pats, **opts): + '''Wraps record.dorecord expanding keywords after recording.''' + wlock = repo.wlock() + try: + # record returns 0 even when nothing has changed + # therefore compare nodes before and after + kwt.record = True + ctx = repo['.'] + wstatus = repo[None].status() + ret = orig(ui, repo, commitfunc, *pats, **opts) + recctx = repo['.'] + if ctx != recctx: + modified, added = _preselect(wstatus, recctx.files()) + kwt.restrict = False + kwt.overwrite(recctx, modified, False, True) + kwt.overwrite(recctx, added, False, True, True) + kwt.restrict = True + return ret + finally: + wlock.release() + + repo.__class__ = kwrepo + + def kwfilectx_cmp(orig, self, fctx): + # keyword affects data size, comparing wdir and filelog size does + # not make sense + if (fctx._filerev is None and + (self._repo._encodefilterpats or + kwt.match(fctx.path()) and not 'l' in fctx.flags()) or + self.size() == fctx.size()): + return self._filelog.cmp(self._filenode, fctx.data()) + return True + + extensions.wrapfunction(context.filectx, 'cmp', kwfilectx_cmp) + extensions.wrapfunction(patch.patchfile, '__init__', kwpatchfile_init) + extensions.wrapfunction(patch, 'diff', kw_diff) + extensions.wrapfunction(cmdutil, 'copy', kw_copy) + for c in 'annotate changeset rev filediff diff'.split(): + extensions.wrapfunction(webcommands, c, kwweb_skip) + for name in recordextensions.split(): + try: + record = extensions.find(name) + extensions.wrapfunction(record, 'dorecord', kw_dorecord) + except KeyError: + pass + +cmdtable = { + 'kwdemo': + (demo, + [('d', 'default', None, _('show default keyword template maps')), + ('f', 'rcfile', '', + _('read maps from rcfile'), _('FILE'))], + _('hg kwdemo [-d] [-f RCFILE] [TEMPLATEMAP]...')), + 'kwexpand': (expand, commands.walkopts, + _('hg kwexpand [OPTION]... [FILE]...')), + 'kwfiles': + (files, + [('A', 'all', None, _('show keyword status flags of all files')), + ('i', 'ignore', None, _('show files excluded from expansion')), + ('u', 'unknown', None, _('only show unknown (not tracked) files')), + ] + commands.walkopts, + _('hg kwfiles [OPTION]... [FILE]...')), + 'kwshrink': (shrink, commands.walkopts, + _('hg kwshrink [OPTION]... [FILE]...')), +} diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/keyword.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/keyword.pyo Binary files differnew file mode 100644 index 0000000..e6e44ac --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/keyword.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/mq.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/mq.py new file mode 100644 index 0000000..5137089 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/mq.py @@ -0,0 +1,3211 @@ +# mq.py - patch queues for mercurial +# +# Copyright 2005, 2006 Chris Mason <mason@suse.com> +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +'''manage a stack of patches + +This extension lets you work with a stack of patches in a Mercurial +repository. It manages two stacks of patches - all known patches, and +applied patches (subset of known patches). + +Known patches are represented as patch files in the .hg/patches +directory. Applied patches are both patch files and changesets. + +Common tasks (use :hg:`help command` for more details):: + + create new patch qnew + import existing patch qimport + + print patch series qseries + print applied patches qapplied + + add known patch to applied stack qpush + remove patch from applied stack qpop + refresh contents of top applied patch qrefresh + +By default, mq will automatically use git patches when required to +avoid losing file mode changes, copy records, binary files or empty +files creations or deletions. This behaviour can be configured with:: + + [mq] + git = auto/keep/yes/no + +If set to 'keep', mq will obey the [diff] section configuration while +preserving existing git patches upon qrefresh. If set to 'yes' or +'no', mq will override the [diff] section and always generate git or +regular patches, possibly losing data in the second case. + +You will by default be managing a patch queue named "patches". You can +create other, independent patch queues with the :hg:`qqueue` command. +''' + +from mercurial.i18n import _ +from mercurial.node import bin, hex, short, nullid, nullrev +from mercurial.lock import release +from mercurial import commands, cmdutil, hg, patch, util +from mercurial import repair, extensions, url, error +import os, sys, re, errno, shutil + +commands.norepo += " qclone" + +# Patch names looks like unix-file names. +# They must be joinable with queue directory and result in the patch path. +normname = util.normpath + +class statusentry(object): + def __init__(self, node, name): + self.node, self.name = node, name + def __repr__(self): + return hex(self.node) + ':' + self.name + +class patchheader(object): + def __init__(self, pf, plainmode=False): + def eatdiff(lines): + while lines: + l = lines[-1] + if (l.startswith("diff -") or + l.startswith("Index:") or + l.startswith("===========")): + del lines[-1] + else: + break + def eatempty(lines): + while lines: + if not lines[-1].strip(): + del lines[-1] + else: + break + + message = [] + comments = [] + user = None + date = None + parent = None + format = None + subject = None + diffstart = 0 + + for line in file(pf): + line = line.rstrip() + if (line.startswith('diff --git') + or (diffstart and line.startswith('+++ '))): + diffstart = 2 + break + diffstart = 0 # reset + if line.startswith("--- "): + diffstart = 1 + continue + elif format == "hgpatch": + # parse values when importing the result of an hg export + if line.startswith("# User "): + user = line[7:] + elif line.startswith("# Date "): + date = line[7:] + elif line.startswith("# Parent "): + parent = line[9:] + elif not line.startswith("# ") and line: + message.append(line) + format = None + elif line == '# HG changeset patch': + message = [] + format = "hgpatch" + elif (format != "tagdone" and (line.startswith("Subject: ") or + line.startswith("subject: "))): + subject = line[9:] + format = "tag" + elif (format != "tagdone" and (line.startswith("From: ") or + line.startswith("from: "))): + user = line[6:] + format = "tag" + elif (format != "tagdone" and (line.startswith("Date: ") or + line.startswith("date: "))): + date = line[6:] + format = "tag" + elif format == "tag" and line == "": + # when looking for tags (subject: from: etc) they + # end once you find a blank line in the source + format = "tagdone" + elif message or line: + message.append(line) + comments.append(line) + + eatdiff(message) + eatdiff(comments) + eatempty(message) + eatempty(comments) + + # make sure message isn't empty + if format and format.startswith("tag") and subject: + message.insert(0, "") + message.insert(0, subject) + + self.message = message + self.comments = comments + self.user = user + self.date = date + self.parent = parent + self.haspatch = diffstart > 1 + self.plainmode = plainmode + + def setuser(self, user): + if not self.updateheader(['From: ', '# User '], user): + try: + patchheaderat = self.comments.index('# HG changeset patch') + self.comments.insert(patchheaderat + 1, '# User ' + user) + except ValueError: + if self.plainmode or self._hasheader(['Date: ']): + self.comments = ['From: ' + user] + self.comments + else: + tmp = ['# HG changeset patch', '# User ' + user, ''] + self.comments = tmp + self.comments + self.user = user + + def setdate(self, date): + if not self.updateheader(['Date: ', '# Date '], date): + try: + patchheaderat = self.comments.index('# HG changeset patch') + self.comments.insert(patchheaderat + 1, '# Date ' + date) + except ValueError: + if self.plainmode or self._hasheader(['From: ']): + self.comments = ['Date: ' + date] + self.comments + else: + tmp = ['# HG changeset patch', '# Date ' + date, ''] + self.comments = tmp + self.comments + self.date = date + + def setparent(self, parent): + if not self.updateheader(['# Parent '], parent): + try: + patchheaderat = self.comments.index('# HG changeset patch') + self.comments.insert(patchheaderat + 1, '# Parent ' + parent) + except ValueError: + pass + self.parent = parent + + def setmessage(self, message): + if self.comments: + self._delmsg() + self.message = [message] + self.comments += self.message + + def updateheader(self, prefixes, new): + '''Update all references to a field in the patch header. + Return whether the field is present.''' + res = False + for prefix in prefixes: + for i in xrange(len(self.comments)): + if self.comments[i].startswith(prefix): + self.comments[i] = prefix + new + res = True + break + return res + + def _hasheader(self, prefixes): + '''Check if a header starts with any of the given prefixes.''' + for prefix in prefixes: + for comment in self.comments: + if comment.startswith(prefix): + return True + return False + + def __str__(self): + if not self.comments: + return '' + return '\n'.join(self.comments) + '\n\n' + + def _delmsg(self): + '''Remove existing message, keeping the rest of the comments fields. + If comments contains 'subject: ', message will prepend + the field and a blank line.''' + if self.message: + subj = 'subject: ' + self.message[0].lower() + for i in xrange(len(self.comments)): + if subj == self.comments[i].lower(): + del self.comments[i] + self.message = self.message[2:] + break + ci = 0 + for mi in self.message: + while mi != self.comments[ci]: + ci += 1 + del self.comments[ci] + +class queue(object): + def __init__(self, ui, path, patchdir=None): + self.basepath = path + try: + fh = open(os.path.join(path, 'patches.queue')) + cur = fh.read().rstrip() + if not cur: + curpath = os.path.join(path, 'patches') + else: + curpath = os.path.join(path, 'patches-' + cur) + except IOError: + curpath = os.path.join(path, 'patches') + self.path = patchdir or curpath + self.opener = util.opener(self.path) + self.ui = ui + self.applied_dirty = 0 + self.series_dirty = 0 + self.added = [] + self.series_path = "series" + self.status_path = "status" + self.guards_path = "guards" + self.active_guards = None + self.guards_dirty = False + # Handle mq.git as a bool with extended values + try: + gitmode = ui.configbool('mq', 'git', None) + if gitmode is None: + raise error.ConfigError() + self.gitmode = gitmode and 'yes' or 'no' + except error.ConfigError: + self.gitmode = ui.config('mq', 'git', 'auto').lower() + self.plainmode = ui.configbool('mq', 'plain', False) + + @util.propertycache + def applied(self): + if os.path.exists(self.join(self.status_path)): + def parse(l): + n, name = l.split(':', 1) + return statusentry(bin(n), name) + lines = self.opener(self.status_path).read().splitlines() + return [parse(l) for l in lines] + return [] + + @util.propertycache + def full_series(self): + if os.path.exists(self.join(self.series_path)): + return self.opener(self.series_path).read().splitlines() + return [] + + @util.propertycache + def series(self): + self.parse_series() + return self.series + + @util.propertycache + def series_guards(self): + self.parse_series() + return self.series_guards + + def invalidate(self): + for a in 'applied full_series series series_guards'.split(): + if a in self.__dict__: + delattr(self, a) + self.applied_dirty = 0 + self.series_dirty = 0 + self.guards_dirty = False + self.active_guards = None + + def diffopts(self, opts={}, patchfn=None): + diffopts = patch.diffopts(self.ui, opts) + if self.gitmode == 'auto': + diffopts.upgrade = True + elif self.gitmode == 'keep': + pass + elif self.gitmode in ('yes', 'no'): + diffopts.git = self.gitmode == 'yes' + else: + raise util.Abort(_('mq.git option can be auto/keep/yes/no' + ' got %s') % self.gitmode) + if patchfn: + diffopts = self.patchopts(diffopts, patchfn) + return diffopts + + def patchopts(self, diffopts, *patches): + """Return a copy of input diff options with git set to true if + referenced patch is a git patch and should be preserved as such. + """ + diffopts = diffopts.copy() + if not diffopts.git and self.gitmode == 'keep': + for patchfn in patches: + patchf = self.opener(patchfn, 'r') + # if the patch was a git patch, refresh it as a git patch + for line in patchf: + if line.startswith('diff --git'): + diffopts.git = True + break + patchf.close() + return diffopts + + def join(self, *p): + return os.path.join(self.path, *p) + + def find_series(self, patch): + def matchpatch(l): + l = l.split('#', 1)[0] + return l.strip() == patch + for index, l in enumerate(self.full_series): + if matchpatch(l): + return index + return None + + guard_re = re.compile(r'\s?#([-+][^-+# \t\r\n\f][^# \t\r\n\f]*)') + + def parse_series(self): + self.series = [] + self.series_guards = [] + for l in self.full_series: + h = l.find('#') + if h == -1: + patch = l + comment = '' + elif h == 0: + continue + else: + patch = l[:h] + comment = l[h:] + patch = patch.strip() + if patch: + if patch in self.series: + raise util.Abort(_('%s appears more than once in %s') % + (patch, self.join(self.series_path))) + self.series.append(patch) + self.series_guards.append(self.guard_re.findall(comment)) + + def check_guard(self, guard): + if not guard: + return _('guard cannot be an empty string') + bad_chars = '# \t\r\n\f' + first = guard[0] + if first in '-+': + return (_('guard %r starts with invalid character: %r') % + (guard, first)) + for c in bad_chars: + if c in guard: + return _('invalid character in guard %r: %r') % (guard, c) + + def set_active(self, guards): + for guard in guards: + bad = self.check_guard(guard) + if bad: + raise util.Abort(bad) + guards = sorted(set(guards)) + self.ui.debug('active guards: %s\n' % ' '.join(guards)) + self.active_guards = guards + self.guards_dirty = True + + def active(self): + if self.active_guards is None: + self.active_guards = [] + try: + guards = self.opener(self.guards_path).read().split() + except IOError, err: + if err.errno != errno.ENOENT: + raise + guards = [] + for i, guard in enumerate(guards): + bad = self.check_guard(guard) + if bad: + self.ui.warn('%s:%d: %s\n' % + (self.join(self.guards_path), i + 1, bad)) + else: + self.active_guards.append(guard) + return self.active_guards + + def set_guards(self, idx, guards): + for g in guards: + if len(g) < 2: + raise util.Abort(_('guard %r too short') % g) + if g[0] not in '-+': + raise util.Abort(_('guard %r starts with invalid char') % g) + bad = self.check_guard(g[1:]) + if bad: + raise util.Abort(bad) + drop = self.guard_re.sub('', self.full_series[idx]) + self.full_series[idx] = drop + ''.join([' #' + g for g in guards]) + self.parse_series() + self.series_dirty = True + + def pushable(self, idx): + if isinstance(idx, str): + idx = self.series.index(idx) + patchguards = self.series_guards[idx] + if not patchguards: + return True, None + guards = self.active() + exactneg = [g for g in patchguards if g[0] == '-' and g[1:] in guards] + if exactneg: + return False, exactneg[0] + pos = [g for g in patchguards if g[0] == '+'] + exactpos = [g for g in pos if g[1:] in guards] + if pos: + if exactpos: + return True, exactpos[0] + return False, pos + return True, '' + + def explain_pushable(self, idx, all_patches=False): + write = all_patches and self.ui.write or self.ui.warn + if all_patches or self.ui.verbose: + if isinstance(idx, str): + idx = self.series.index(idx) + pushable, why = self.pushable(idx) + if all_patches and pushable: + if why is None: + write(_('allowing %s - no guards in effect\n') % + self.series[idx]) + else: + if not why: + write(_('allowing %s - no matching negative guards\n') % + self.series[idx]) + else: + write(_('allowing %s - guarded by %r\n') % + (self.series[idx], why)) + if not pushable: + if why: + write(_('skipping %s - guarded by %r\n') % + (self.series[idx], why)) + else: + write(_('skipping %s - no matching guards\n') % + self.series[idx]) + + def save_dirty(self): + def write_list(items, path): + fp = self.opener(path, 'w') + for i in items: + fp.write("%s\n" % i) + fp.close() + if self.applied_dirty: + write_list(map(str, self.applied), self.status_path) + if self.series_dirty: + write_list(self.full_series, self.series_path) + if self.guards_dirty: + write_list(self.active_guards, self.guards_path) + if self.added: + qrepo = self.qrepo() + if qrepo: + qrepo[None].add(f for f in self.added if f not in qrepo[None]) + self.added = [] + + def removeundo(self, repo): + undo = repo.sjoin('undo') + if not os.path.exists(undo): + return + try: + os.unlink(undo) + except OSError, inst: + self.ui.warn(_('error removing undo: %s\n') % str(inst)) + + def printdiff(self, repo, diffopts, node1, node2=None, files=None, + fp=None, changes=None, opts={}): + stat = opts.get('stat') + m = cmdutil.match(repo, files, opts) + cmdutil.diffordiffstat(self.ui, repo, diffopts, node1, node2, m, + changes, stat, fp) + + def mergeone(self, repo, mergeq, head, patch, rev, diffopts): + # first try just applying the patch + (err, n) = self.apply(repo, [patch], update_status=False, + strict=True, merge=rev) + + if err == 0: + return (err, n) + + if n is None: + raise util.Abort(_("apply failed for patch %s") % patch) + + self.ui.warn(_("patch didn't work out, merging %s\n") % patch) + + # apply failed, strip away that rev and merge. + hg.clean(repo, head) + self.strip(repo, [n], update=False, backup='strip') + + ctx = repo[rev] + ret = hg.merge(repo, rev) + if ret: + raise util.Abort(_("update returned %d") % ret) + n = repo.commit(ctx.description(), ctx.user(), force=True) + if n is None: + raise util.Abort(_("repo commit failed")) + try: + ph = patchheader(mergeq.join(patch), self.plainmode) + except: + raise util.Abort(_("unable to read %s") % patch) + + diffopts = self.patchopts(diffopts, patch) + patchf = self.opener(patch, "w") + comments = str(ph) + if comments: + patchf.write(comments) + self.printdiff(repo, diffopts, head, n, fp=patchf) + patchf.close() + self.removeundo(repo) + return (0, n) + + def qparents(self, repo, rev=None): + if rev is None: + (p1, p2) = repo.dirstate.parents() + if p2 == nullid: + return p1 + if not self.applied: + return None + return self.applied[-1].node + p1, p2 = repo.changelog.parents(rev) + if p2 != nullid and p2 in [x.node for x in self.applied]: + return p2 + return p1 + + def mergepatch(self, repo, mergeq, series, diffopts): + if not self.applied: + # each of the patches merged in will have two parents. This + # can confuse the qrefresh, qdiff, and strip code because it + # needs to know which parent is actually in the patch queue. + # so, we insert a merge marker with only one parent. This way + # the first patch in the queue is never a merge patch + # + pname = ".hg.patches.merge.marker" + n = repo.commit('[mq]: merge marker', force=True) + self.removeundo(repo) + self.applied.append(statusentry(n, pname)) + self.applied_dirty = 1 + + head = self.qparents(repo) + + for patch in series: + patch = mergeq.lookup(patch, strict=True) + if not patch: + self.ui.warn(_("patch %s does not exist\n") % patch) + return (1, None) + pushable, reason = self.pushable(patch) + if not pushable: + self.explain_pushable(patch, all_patches=True) + continue + info = mergeq.isapplied(patch) + if not info: + self.ui.warn(_("patch %s is not applied\n") % patch) + return (1, None) + rev = info[1] + err, head = self.mergeone(repo, mergeq, head, patch, rev, diffopts) + if head: + self.applied.append(statusentry(head, patch)) + self.applied_dirty = 1 + if err: + return (err, head) + self.save_dirty() + return (0, head) + + def patch(self, repo, patchfile): + '''Apply patchfile to the working directory. + patchfile: name of patch file''' + files = {} + try: + fuzz = patch.patch(patchfile, self.ui, strip=1, cwd=repo.root, + files=files, eolmode=None) + except Exception, inst: + self.ui.note(str(inst) + '\n') + if not self.ui.verbose: + self.ui.warn(_("patch failed, unable to continue (try -v)\n")) + return (False, files, False) + + return (True, files, fuzz) + + def apply(self, repo, series, list=False, update_status=True, + strict=False, patchdir=None, merge=None, all_files=None): + wlock = lock = tr = None + try: + wlock = repo.wlock() + lock = repo.lock() + tr = repo.transaction("qpush") + try: + ret = self._apply(repo, series, list, update_status, + strict, patchdir, merge, all_files=all_files) + tr.close() + self.save_dirty() + return ret + except: + try: + tr.abort() + finally: + repo.invalidate() + repo.dirstate.invalidate() + raise + finally: + release(tr, lock, wlock) + self.removeundo(repo) + + def _apply(self, repo, series, list=False, update_status=True, + strict=False, patchdir=None, merge=None, all_files=None): + '''returns (error, hash) + error = 1 for unable to read, 2 for patch failed, 3 for patch fuzz''' + # TODO unify with commands.py + if not patchdir: + patchdir = self.path + err = 0 + n = None + for patchname in series: + pushable, reason = self.pushable(patchname) + if not pushable: + self.explain_pushable(patchname, all_patches=True) + continue + self.ui.status(_("applying %s\n") % patchname) + pf = os.path.join(patchdir, patchname) + + try: + ph = patchheader(self.join(patchname), self.plainmode) + except: + self.ui.warn(_("unable to read %s\n") % patchname) + err = 1 + break + + message = ph.message + if not message: + # The commit message should not be translated + message = "imported patch %s\n" % patchname + else: + if list: + # The commit message should not be translated + message.append("\nimported patch %s" % patchname) + message = '\n'.join(message) + + if ph.haspatch: + (patcherr, files, fuzz) = self.patch(repo, pf) + if all_files is not None: + all_files.update(files) + patcherr = not patcherr + else: + self.ui.warn(_("patch %s is empty\n") % patchname) + patcherr, files, fuzz = 0, [], 0 + + if merge and files: + # Mark as removed/merged and update dirstate parent info + removed = [] + merged = [] + for f in files: + if os.path.lexists(repo.wjoin(f)): + merged.append(f) + else: + removed.append(f) + for f in removed: + repo.dirstate.remove(f) + for f in merged: + repo.dirstate.merge(f) + p1, p2 = repo.dirstate.parents() + repo.dirstate.setparents(p1, merge) + + files = cmdutil.updatedir(self.ui, repo, files) + match = cmdutil.matchfiles(repo, files or []) + n = repo.commit(message, ph.user, ph.date, match=match, force=True) + + if n is None: + raise util.Abort(_("repository commit failed")) + + if update_status: + self.applied.append(statusentry(n, patchname)) + + if patcherr: + self.ui.warn(_("patch failed, rejects left in working dir\n")) + err = 2 + break + + if fuzz and strict: + self.ui.warn(_("fuzz found when applying patch, stopping\n")) + err = 3 + break + return (err, n) + + def _cleanup(self, patches, numrevs, keep=False): + if not keep: + r = self.qrepo() + if r: + r[None].remove(patches, True) + else: + for p in patches: + os.unlink(self.join(p)) + + if numrevs: + del self.applied[:numrevs] + self.applied_dirty = 1 + + for i in sorted([self.find_series(p) for p in patches], reverse=True): + del self.full_series[i] + self.parse_series() + self.series_dirty = 1 + + def _revpatches(self, repo, revs): + firstrev = repo[self.applied[0].node].rev() + patches = [] + for i, rev in enumerate(revs): + + if rev < firstrev: + raise util.Abort(_('revision %d is not managed') % rev) + + ctx = repo[rev] + base = self.applied[i].node + if ctx.node() != base: + msg = _('cannot delete revision %d above applied patches') + raise util.Abort(msg % rev) + + patch = self.applied[i].name + for fmt in ('[mq]: %s', 'imported patch %s'): + if ctx.description() == fmt % patch: + msg = _('patch %s finalized without changeset message\n') + repo.ui.status(msg % patch) + break + + patches.append(patch) + return patches + + def finish(self, repo, revs): + patches = self._revpatches(repo, sorted(revs)) + self._cleanup(patches, len(patches)) + + def delete(self, repo, patches, opts): + if not patches and not opts.get('rev'): + raise util.Abort(_('qdelete requires at least one revision or ' + 'patch name')) + + realpatches = [] + for patch in patches: + patch = self.lookup(patch, strict=True) + info = self.isapplied(patch) + if info: + raise util.Abort(_("cannot delete applied patch %s") % patch) + if patch not in self.series: + raise util.Abort(_("patch %s not in series file") % patch) + if patch not in realpatches: + realpatches.append(patch) + + numrevs = 0 + if opts.get('rev'): + if not self.applied: + raise util.Abort(_('no patches applied')) + revs = cmdutil.revrange(repo, opts.get('rev')) + if len(revs) > 1 and revs[0] > revs[1]: + revs.reverse() + revpatches = self._revpatches(repo, revs) + realpatches += revpatches + numrevs = len(revpatches) + + self._cleanup(realpatches, numrevs, opts.get('keep')) + + def check_toppatch(self, repo): + if self.applied: + top = self.applied[-1].node + patch = self.applied[-1].name + pp = repo.dirstate.parents() + if top not in pp: + raise util.Abort(_("working directory revision is not qtip")) + return top, patch + return None, None + + def check_localchanges(self, repo, force=False, refresh=True): + m, a, r, d = repo.status()[:4] + if (m or a or r or d) and not force: + if refresh: + raise util.Abort(_("local changes found, refresh first")) + else: + raise util.Abort(_("local changes found")) + return m, a, r, d + + _reserved = ('series', 'status', 'guards') + def check_reserved_name(self, name): + if (name in self._reserved or name.startswith('.hg') + or name.startswith('.mq') or '#' in name or ':' in name): + raise util.Abort(_('"%s" cannot be used as the name of a patch') + % name) + + def new(self, repo, patchfn, *pats, **opts): + """options: + msg: a string or a no-argument function returning a string + """ + msg = opts.get('msg') + user = opts.get('user') + date = opts.get('date') + if date: + date = util.parsedate(date) + diffopts = self.diffopts({'git': opts.get('git')}) + self.check_reserved_name(patchfn) + if os.path.exists(self.join(patchfn)): + if os.path.isdir(self.join(patchfn)): + raise util.Abort(_('"%s" already exists as a directory') + % patchfn) + else: + raise util.Abort(_('patch "%s" already exists') % patchfn) + if opts.get('include') or opts.get('exclude') or pats: + match = cmdutil.match(repo, pats, opts) + # detect missing files in pats + def badfn(f, msg): + raise util.Abort('%s: %s' % (f, msg)) + match.bad = badfn + m, a, r, d = repo.status(match=match)[:4] + else: + m, a, r, d = self.check_localchanges(repo, force=True) + match = cmdutil.matchfiles(repo, m + a + r) + if len(repo[None].parents()) > 1: + raise util.Abort(_('cannot manage merge changesets')) + commitfiles = m + a + r + self.check_toppatch(repo) + insert = self.full_series_end() + wlock = repo.wlock() + try: + try: + # if patch file write fails, abort early + p = self.opener(patchfn, "w") + except IOError, e: + raise util.Abort(_('cannot write patch "%s": %s') + % (patchfn, e.strerror)) + try: + if self.plainmode: + if user: + p.write("From: " + user + "\n") + if not date: + p.write("\n") + if date: + p.write("Date: %d %d\n\n" % date) + else: + p.write("# HG changeset patch\n") + p.write("# Parent " + + hex(repo[None].parents()[0].node()) + "\n") + if user: + p.write("# User " + user + "\n") + if date: + p.write("# Date %s %s\n\n" % date) + if hasattr(msg, '__call__'): + msg = msg() + commitmsg = msg and msg or ("[mq]: %s" % patchfn) + n = repo.commit(commitmsg, user, date, match=match, force=True) + if n is None: + raise util.Abort(_("repo commit failed")) + try: + self.full_series[insert:insert] = [patchfn] + self.applied.append(statusentry(n, patchfn)) + self.parse_series() + self.series_dirty = 1 + self.applied_dirty = 1 + if msg: + msg = msg + "\n\n" + p.write(msg) + if commitfiles: + parent = self.qparents(repo, n) + chunks = patch.diff(repo, node1=parent, node2=n, + match=match, opts=diffopts) + for chunk in chunks: + p.write(chunk) + p.close() + wlock.release() + wlock = None + r = self.qrepo() + if r: + r[None].add([patchfn]) + except: + repo.rollback() + raise + except Exception: + patchpath = self.join(patchfn) + try: + os.unlink(patchpath) + except: + self.ui.warn(_('error unlinking %s\n') % patchpath) + raise + self.removeundo(repo) + finally: + release(wlock) + + def strip(self, repo, revs, update=True, backup="all", force=None): + wlock = lock = None + try: + wlock = repo.wlock() + lock = repo.lock() + + if update: + self.check_localchanges(repo, force=force, refresh=False) + urev = self.qparents(repo, revs[0]) + hg.clean(repo, urev) + repo.dirstate.write() + + self.removeundo(repo) + for rev in revs: + repair.strip(self.ui, repo, rev, backup) + # strip may have unbundled a set of backed up revisions after + # the actual strip + self.removeundo(repo) + finally: + release(lock, wlock) + + def isapplied(self, patch): + """returns (index, rev, patch)""" + for i, a in enumerate(self.applied): + if a.name == patch: + return (i, a.node, a.name) + return None + + # if the exact patch name does not exist, we try a few + # variations. If strict is passed, we try only #1 + # + # 1) a number to indicate an offset in the series file + # 2) a unique substring of the patch name was given + # 3) patchname[-+]num to indicate an offset in the series file + def lookup(self, patch, strict=False): + patch = patch and str(patch) + + def partial_name(s): + if s in self.series: + return s + matches = [x for x in self.series if s in x] + if len(matches) > 1: + self.ui.warn(_('patch name "%s" is ambiguous:\n') % s) + for m in matches: + self.ui.warn(' %s\n' % m) + return None + if matches: + return matches[0] + if self.series and self.applied: + if s == 'qtip': + return self.series[self.series_end(True)-1] + if s == 'qbase': + return self.series[0] + return None + + if patch is None: + return None + if patch in self.series: + return patch + + if not os.path.isfile(self.join(patch)): + try: + sno = int(patch) + except (ValueError, OverflowError): + pass + else: + if -len(self.series) <= sno < len(self.series): + return self.series[sno] + + if not strict: + res = partial_name(patch) + if res: + return res + minus = patch.rfind('-') + if minus >= 0: + res = partial_name(patch[:minus]) + if res: + i = self.series.index(res) + try: + off = int(patch[minus + 1:] or 1) + except (ValueError, OverflowError): + pass + else: + if i - off >= 0: + return self.series[i - off] + plus = patch.rfind('+') + if plus >= 0: + res = partial_name(patch[:plus]) + if res: + i = self.series.index(res) + try: + off = int(patch[plus + 1:] or 1) + except (ValueError, OverflowError): + pass + else: + if i + off < len(self.series): + return self.series[i + off] + raise util.Abort(_("patch %s not in series") % patch) + + def push(self, repo, patch=None, force=False, list=False, + mergeq=None, all=False, move=False): + diffopts = self.diffopts() + wlock = repo.wlock() + try: + heads = [] + for b, ls in repo.branchmap().iteritems(): + heads += ls + if not heads: + heads = [nullid] + if repo.dirstate.parents()[0] not in heads: + self.ui.status(_("(working directory not at a head)\n")) + + if not self.series: + self.ui.warn(_('no patches in series\n')) + return 0 + + patch = self.lookup(patch) + # Suppose our series file is: A B C and the current 'top' + # patch is B. qpush C should be performed (moving forward) + # qpush B is a NOP (no change) qpush A is an error (can't + # go backwards with qpush) + if patch: + info = self.isapplied(patch) + if info: + if info[0] < len(self.applied) - 1: + raise util.Abort( + _("cannot push to a previous patch: %s") % patch) + self.ui.warn( + _('qpush: %s is already at the top\n') % patch) + return 0 + pushable, reason = self.pushable(patch) + if not pushable: + if reason: + reason = _('guarded by %r') % reason + else: + reason = _('no matching guards') + self.ui.warn(_("cannot push '%s' - %s\n") % (patch, reason)) + return 1 + elif all: + patch = self.series[-1] + if self.isapplied(patch): + self.ui.warn(_('all patches are currently applied\n')) + return 0 + + # Following the above example, starting at 'top' of B: + # qpush should be performed (pushes C), but a subsequent + # qpush without an argument is an error (nothing to + # apply). This allows a loop of "...while hg qpush..." to + # work as it detects an error when done + start = self.series_end() + if start == len(self.series): + self.ui.warn(_('patch series already fully applied\n')) + return 1 + if not force: + self.check_localchanges(repo) + + if move: + if not patch: + raise util.Abort(_("please specify the patch to move")) + for i, rpn in enumerate(self.full_series[start:]): + # strip markers for patch guards + if self.guard_re.split(rpn, 1)[0] == patch: + break + index = start + i + assert index < len(self.full_series) + fullpatch = self.full_series[index] + del self.full_series[index] + self.full_series.insert(start, fullpatch) + self.parse_series() + self.series_dirty = 1 + + self.applied_dirty = 1 + if start > 0: + self.check_toppatch(repo) + if not patch: + patch = self.series[start] + end = start + 1 + else: + end = self.series.index(patch, start) + 1 + + s = self.series[start:end] + all_files = set() + try: + if mergeq: + ret = self.mergepatch(repo, mergeq, s, diffopts) + else: + ret = self.apply(repo, s, list, all_files=all_files) + except: + self.ui.warn(_('cleaning up working directory...')) + node = repo.dirstate.parents()[0] + hg.revert(repo, node, None) + # only remove unknown files that we know we touched or + # created while patching + for f in all_files: + if f not in repo.dirstate: + try: + util.unlink(repo.wjoin(f)) + except OSError, inst: + if inst.errno != errno.ENOENT: + raise + self.ui.warn(_('done\n')) + raise + + if not self.applied: + return ret[0] + top = self.applied[-1].name + if ret[0] and ret[0] > 1: + msg = _("errors during apply, please fix and refresh %s\n") + self.ui.write(msg % top) + else: + self.ui.write(_("now at: %s\n") % top) + return ret[0] + + finally: + wlock.release() + + def pop(self, repo, patch=None, force=False, update=True, all=False): + wlock = repo.wlock() + try: + if patch: + # index, rev, patch + info = self.isapplied(patch) + if not info: + patch = self.lookup(patch) + info = self.isapplied(patch) + if not info: + raise util.Abort(_("patch %s is not applied") % patch) + + if not self.applied: + # Allow qpop -a to work repeatedly, + # but not qpop without an argument + self.ui.warn(_("no patches applied\n")) + return not all + + if all: + start = 0 + elif patch: + start = info[0] + 1 + else: + start = len(self.applied) - 1 + + if start >= len(self.applied): + self.ui.warn(_("qpop: %s is already at the top\n") % patch) + return + + if not update: + parents = repo.dirstate.parents() + rr = [x.node for x in self.applied] + for p in parents: + if p in rr: + self.ui.warn(_("qpop: forcing dirstate update\n")) + update = True + else: + parents = [p.node() for p in repo[None].parents()] + needupdate = False + for entry in self.applied[start:]: + if entry.node in parents: + needupdate = True + break + update = needupdate + + if not force and update: + self.check_localchanges(repo) + + self.applied_dirty = 1 + end = len(self.applied) + rev = self.applied[start].node + if update: + top = self.check_toppatch(repo)[0] + + try: + heads = repo.changelog.heads(rev) + except error.LookupError: + node = short(rev) + raise util.Abort(_('trying to pop unknown node %s') % node) + + if heads != [self.applied[-1].node]: + raise util.Abort(_("popping would remove a revision not " + "managed by this patch queue")) + + # we know there are no local changes, so we can make a simplified + # form of hg.update. + if update: + qp = self.qparents(repo, rev) + ctx = repo[qp] + m, a, r, d = repo.status(qp, top)[:4] + if d: + raise util.Abort(_("deletions found between repo revs")) + for f in a: + try: + util.unlink(repo.wjoin(f)) + except OSError, e: + if e.errno != errno.ENOENT: + raise + repo.dirstate.forget(f) + for f in m + r: + fctx = ctx[f] + repo.wwrite(f, fctx.data(), fctx.flags()) + repo.dirstate.normal(f) + repo.dirstate.setparents(qp, nullid) + for patch in reversed(self.applied[start:end]): + self.ui.status(_("popping %s\n") % patch.name) + del self.applied[start:end] + self.strip(repo, [rev], update=False, backup='strip') + if self.applied: + self.ui.write(_("now at: %s\n") % self.applied[-1].name) + else: + self.ui.write(_("patch queue now empty\n")) + finally: + wlock.release() + + def diff(self, repo, pats, opts): + top, patch = self.check_toppatch(repo) + if not top: + self.ui.write(_("no patches applied\n")) + return + qp = self.qparents(repo, top) + if opts.get('reverse'): + node1, node2 = None, qp + else: + node1, node2 = qp, None + diffopts = self.diffopts(opts, patch) + self.printdiff(repo, diffopts, node1, node2, files=pats, opts=opts) + + def refresh(self, repo, pats=None, **opts): + if not self.applied: + self.ui.write(_("no patches applied\n")) + return 1 + msg = opts.get('msg', '').rstrip() + newuser = opts.get('user') + newdate = opts.get('date') + if newdate: + newdate = '%d %d' % util.parsedate(newdate) + wlock = repo.wlock() + + try: + self.check_toppatch(repo) + (top, patchfn) = (self.applied[-1].node, self.applied[-1].name) + if repo.changelog.heads(top) != [top]: + raise util.Abort(_("cannot refresh a revision with children")) + + cparents = repo.changelog.parents(top) + patchparent = self.qparents(repo, top) + ph = patchheader(self.join(patchfn), self.plainmode) + diffopts = self.diffopts({'git': opts.get('git')}, patchfn) + if msg: + ph.setmessage(msg) + if newuser: + ph.setuser(newuser) + if newdate: + ph.setdate(newdate) + ph.setparent(hex(patchparent)) + + # only commit new patch when write is complete + patchf = self.opener(patchfn, 'w', atomictemp=True) + + comments = str(ph) + if comments: + patchf.write(comments) + + # update the dirstate in place, strip off the qtip commit + # and then commit. + # + # this should really read: + # mm, dd, aa, aa2 = repo.status(tip, patchparent)[:4] + # but we do it backwards to take advantage of manifest/chlog + # caching against the next repo.status call + mm, aa, dd, aa2 = repo.status(patchparent, top)[:4] + changes = repo.changelog.read(top) + man = repo.manifest.read(changes[0]) + aaa = aa[:] + matchfn = cmdutil.match(repo, pats, opts) + # in short mode, we only diff the files included in the + # patch already plus specified files + if opts.get('short'): + # if amending a patch, we start with existing + # files plus specified files - unfiltered + match = cmdutil.matchfiles(repo, mm + aa + dd + matchfn.files()) + # filter with inc/exl options + matchfn = cmdutil.match(repo, opts=opts) + else: + match = cmdutil.matchall(repo) + m, a, r, d = repo.status(match=match)[:4] + + # we might end up with files that were added between + # qtip and the dirstate parent, but then changed in the + # local dirstate. in this case, we want them to only + # show up in the added section + for x in m: + if x == '.hgsub' or x == '.hgsubstate': + self.ui.warn(_('warning: not refreshing %s\n') % x) + continue + if x not in aa: + mm.append(x) + # we might end up with files added by the local dirstate that + # were deleted by the patch. In this case, they should only + # show up in the changed section. + for x in a: + if x == '.hgsub' or x == '.hgsubstate': + self.ui.warn(_('warning: not adding %s\n') % x) + continue + if x in dd: + del dd[dd.index(x)] + mm.append(x) + else: + aa.append(x) + # make sure any files deleted in the local dirstate + # are not in the add or change column of the patch + forget = [] + for x in d + r: + if x == '.hgsub' or x == '.hgsubstate': + self.ui.warn(_('warning: not removing %s\n') % x) + continue + if x in aa: + del aa[aa.index(x)] + forget.append(x) + continue + elif x in mm: + del mm[mm.index(x)] + dd.append(x) + + m = list(set(mm)) + r = list(set(dd)) + a = list(set(aa)) + c = [filter(matchfn, l) for l in (m, a, r)] + match = cmdutil.matchfiles(repo, set(c[0] + c[1] + c[2])) + chunks = patch.diff(repo, patchparent, match=match, + changes=c, opts=diffopts) + for chunk in chunks: + patchf.write(chunk) + + try: + if diffopts.git or diffopts.upgrade: + copies = {} + for dst in a: + src = repo.dirstate.copied(dst) + # during qfold, the source file for copies may + # be removed. Treat this as a simple add. + if src is not None and src in repo.dirstate: + copies.setdefault(src, []).append(dst) + repo.dirstate.add(dst) + # remember the copies between patchparent and qtip + for dst in aaa: + f = repo.file(dst) + src = f.renamed(man[dst]) + if src: + copies.setdefault(src[0], []).extend( + copies.get(dst, [])) + if dst in a: + copies[src[0]].append(dst) + # we can't copy a file created by the patch itself + if dst in copies: + del copies[dst] + for src, dsts in copies.iteritems(): + for dst in dsts: + repo.dirstate.copy(src, dst) + else: + for dst in a: + repo.dirstate.add(dst) + # Drop useless copy information + for f in list(repo.dirstate.copies()): + repo.dirstate.copy(None, f) + for f in r: + repo.dirstate.remove(f) + # if the patch excludes a modified file, mark that + # file with mtime=0 so status can see it. + mm = [] + for i in xrange(len(m)-1, -1, -1): + if not matchfn(m[i]): + mm.append(m[i]) + del m[i] + for f in m: + repo.dirstate.normal(f) + for f in mm: + repo.dirstate.normallookup(f) + for f in forget: + repo.dirstate.forget(f) + + if not msg: + if not ph.message: + message = "[mq]: %s\n" % patchfn + else: + message = "\n".join(ph.message) + else: + message = msg + + user = ph.user or changes[1] + + # assumes strip can roll itself back if interrupted + repo.dirstate.setparents(*cparents) + self.applied.pop() + self.applied_dirty = 1 + self.strip(repo, [top], update=False, + backup='strip') + except: + repo.dirstate.invalidate() + raise + + try: + # might be nice to attempt to roll back strip after this + patchf.rename() + n = repo.commit(message, user, ph.date, match=match, + force=True) + self.applied.append(statusentry(n, patchfn)) + except: + ctx = repo[cparents[0]] + repo.dirstate.rebuild(ctx.node(), ctx.manifest()) + self.save_dirty() + self.ui.warn(_('refresh interrupted while patch was popped! ' + '(revert --all, qpush to recover)\n')) + raise + finally: + wlock.release() + self.removeundo(repo) + + def init(self, repo, create=False): + if not create and os.path.isdir(self.path): + raise util.Abort(_("patch queue directory already exists")) + try: + os.mkdir(self.path) + except OSError, inst: + if inst.errno != errno.EEXIST or not create: + raise + if create: + return self.qrepo(create=True) + + def unapplied(self, repo, patch=None): + if patch and patch not in self.series: + raise util.Abort(_("patch %s is not in series file") % patch) + if not patch: + start = self.series_end() + else: + start = self.series.index(patch) + 1 + unapplied = [] + for i in xrange(start, len(self.series)): + pushable, reason = self.pushable(i) + if pushable: + unapplied.append((i, self.series[i])) + self.explain_pushable(i) + return unapplied + + def qseries(self, repo, missing=None, start=0, length=None, status=None, + summary=False): + def displayname(pfx, patchname, state): + if pfx: + self.ui.write(pfx) + if summary: + ph = patchheader(self.join(patchname), self.plainmode) + msg = ph.message and ph.message[0] or '' + if self.ui.formatted(): + width = self.ui.termwidth() - len(pfx) - len(patchname) - 2 + if width > 0: + msg = util.ellipsis(msg, width) + else: + msg = '' + self.ui.write(patchname, label='qseries.' + state) + self.ui.write(': ') + self.ui.write(msg, label='qseries.message.' + state) + else: + self.ui.write(patchname, label='qseries.' + state) + self.ui.write('\n') + + applied = set([p.name for p in self.applied]) + if length is None: + length = len(self.series) - start + if not missing: + if self.ui.verbose: + idxwidth = len(str(start + length - 1)) + for i in xrange(start, start + length): + patch = self.series[i] + if patch in applied: + char, state = 'A', 'applied' + elif self.pushable(i)[0]: + char, state = 'U', 'unapplied' + else: + char, state = 'G', 'guarded' + pfx = '' + if self.ui.verbose: + pfx = '%*d %s ' % (idxwidth, i, char) + elif status and status != char: + continue + displayname(pfx, patch, state) + else: + msng_list = [] + for root, dirs, files in os.walk(self.path): + d = root[len(self.path) + 1:] + for f in files: + fl = os.path.join(d, f) + if (fl not in self.series and + fl not in (self.status_path, self.series_path, + self.guards_path) + and not fl.startswith('.')): + msng_list.append(fl) + for x in sorted(msng_list): + pfx = self.ui.verbose and ('D ') or '' + displayname(pfx, x, 'missing') + + def issaveline(self, l): + if l.name == '.hg.patches.save.line': + return True + + def qrepo(self, create=False): + ui = self.ui.copy() + ui.setconfig('paths', 'default', '', overlay=False) + ui.setconfig('paths', 'default-push', '', overlay=False) + if create or os.path.isdir(self.join(".hg")): + return hg.repository(ui, path=self.path, create=create) + + def restore(self, repo, rev, delete=None, qupdate=None): + desc = repo[rev].description().strip() + lines = desc.splitlines() + i = 0 + datastart = None + series = [] + applied = [] + qpp = None + for i, line in enumerate(lines): + if line == 'Patch Data:': + datastart = i + 1 + elif line.startswith('Dirstate:'): + l = line.rstrip() + l = l[10:].split(' ') + qpp = [bin(x) for x in l] + elif datastart != None: + l = line.rstrip() + n, name = l.split(':', 1) + if n: + applied.append(statusentry(bin(n), name)) + else: + series.append(l) + if datastart is None: + self.ui.warn(_("No saved patch data found\n")) + return 1 + self.ui.warn(_("restoring status: %s\n") % lines[0]) + self.full_series = series + self.applied = applied + self.parse_series() + self.series_dirty = 1 + self.applied_dirty = 1 + heads = repo.changelog.heads() + if delete: + if rev not in heads: + self.ui.warn(_("save entry has children, leaving it alone\n")) + else: + self.ui.warn(_("removing save entry %s\n") % short(rev)) + pp = repo.dirstate.parents() + if rev in pp: + update = True + else: + update = False + self.strip(repo, [rev], update=update, backup='strip') + if qpp: + self.ui.warn(_("saved queue repository parents: %s %s\n") % + (short(qpp[0]), short(qpp[1]))) + if qupdate: + self.ui.status(_("updating queue directory\n")) + r = self.qrepo() + if not r: + self.ui.warn(_("Unable to load queue repository\n")) + return 1 + hg.clean(r, qpp[0]) + + def save(self, repo, msg=None): + if not self.applied: + self.ui.warn(_("save: no patches applied, exiting\n")) + return 1 + if self.issaveline(self.applied[-1]): + self.ui.warn(_("status is already saved\n")) + return 1 + + if not msg: + msg = _("hg patches saved state") + else: + msg = "hg patches: " + msg.rstrip('\r\n') + r = self.qrepo() + if r: + pp = r.dirstate.parents() + msg += "\nDirstate: %s %s" % (hex(pp[0]), hex(pp[1])) + msg += "\n\nPatch Data:\n" + msg += ''.join('%s\n' % x for x in self.applied) + msg += ''.join(':%s\n' % x for x in self.full_series) + n = repo.commit(msg, force=True) + if not n: + self.ui.warn(_("repo commit failed\n")) + return 1 + self.applied.append(statusentry(n, '.hg.patches.save.line')) + self.applied_dirty = 1 + self.removeundo(repo) + + def full_series_end(self): + if self.applied: + p = self.applied[-1].name + end = self.find_series(p) + if end is None: + return len(self.full_series) + return end + 1 + return 0 + + def series_end(self, all_patches=False): + """If all_patches is False, return the index of the next pushable patch + in the series, or the series length. If all_patches is True, return the + index of the first patch past the last applied one. + """ + end = 0 + def next(start): + if all_patches or start >= len(self.series): + return start + for i in xrange(start, len(self.series)): + p, reason = self.pushable(i) + if p: + break + self.explain_pushable(i) + return i + if self.applied: + p = self.applied[-1].name + try: + end = self.series.index(p) + except ValueError: + return 0 + return next(end + 1) + return next(end) + + def appliedname(self, index): + pname = self.applied[index].name + if not self.ui.verbose: + p = pname + else: + p = str(self.series.index(pname)) + " " + pname + return p + + def qimport(self, repo, files, patchname=None, rev=None, existing=None, + force=None, git=False): + def checkseries(patchname): + if patchname in self.series: + raise util.Abort(_('patch %s is already in the series file') + % patchname) + def checkfile(patchname): + if not force and os.path.exists(self.join(patchname)): + raise util.Abort(_('patch "%s" already exists') + % patchname) + + if rev: + if files: + raise util.Abort(_('option "-r" not valid when importing ' + 'files')) + rev = cmdutil.revrange(repo, rev) + rev.sort(reverse=True) + if (len(files) > 1 or len(rev) > 1) and patchname: + raise util.Abort(_('option "-n" not valid when importing multiple ' + 'patches')) + if rev: + # If mq patches are applied, we can only import revisions + # that form a linear path to qbase. + # Otherwise, they should form a linear path to a head. + heads = repo.changelog.heads(repo.changelog.node(rev[-1])) + if len(heads) > 1: + raise util.Abort(_('revision %d is the root of more than one ' + 'branch') % rev[-1]) + if self.applied: + base = repo.changelog.node(rev[0]) + if base in [n.node for n in self.applied]: + raise util.Abort(_('revision %d is already managed') + % rev[0]) + if heads != [self.applied[-1].node]: + raise util.Abort(_('revision %d is not the parent of ' + 'the queue') % rev[0]) + base = repo.changelog.rev(self.applied[0].node) + lastparent = repo.changelog.parentrevs(base)[0] + else: + if heads != [repo.changelog.node(rev[0])]: + raise util.Abort(_('revision %d has unmanaged children') + % rev[0]) + lastparent = None + + diffopts = self.diffopts({'git': git}) + for r in rev: + p1, p2 = repo.changelog.parentrevs(r) + n = repo.changelog.node(r) + if p2 != nullrev: + raise util.Abort(_('cannot import merge revision %d') % r) + if lastparent and lastparent != r: + raise util.Abort(_('revision %d is not the parent of %d') + % (r, lastparent)) + lastparent = p1 + + if not patchname: + patchname = normname('%d.diff' % r) + self.check_reserved_name(patchname) + checkseries(patchname) + checkfile(patchname) + self.full_series.insert(0, patchname) + + patchf = self.opener(patchname, "w") + cmdutil.export(repo, [n], fp=patchf, opts=diffopts) + patchf.close() + + se = statusentry(n, patchname) + self.applied.insert(0, se) + + self.added.append(patchname) + patchname = None + self.parse_series() + self.applied_dirty = 1 + self.series_dirty = True + + for i, filename in enumerate(files): + if existing: + if filename == '-': + raise util.Abort(_('-e is incompatible with import from -')) + filename = normname(filename) + self.check_reserved_name(filename) + originpath = self.join(filename) + if not os.path.isfile(originpath): + raise util.Abort(_("patch %s does not exist") % filename) + + if patchname: + self.check_reserved_name(patchname) + checkfile(patchname) + + self.ui.write(_('renaming %s to %s\n') + % (filename, patchname)) + util.rename(originpath, self.join(patchname)) + else: + patchname = filename + + else: + try: + if filename == '-': + if not patchname: + raise util.Abort( + _('need --name to import a patch from -')) + text = sys.stdin.read() + else: + text = url.open(self.ui, filename).read() + except (OSError, IOError): + raise util.Abort(_("unable to read file %s") % filename) + if not patchname: + patchname = normname(os.path.basename(filename)) + self.check_reserved_name(patchname) + checkfile(patchname) + patchf = self.opener(patchname, "w") + patchf.write(text) + if not force: + checkseries(patchname) + if patchname not in self.series: + index = self.full_series_end() + i + self.full_series[index:index] = [patchname] + self.parse_series() + self.series_dirty = True + self.ui.warn(_("adding %s to series file\n") % patchname) + self.added.append(patchname) + patchname = None + +def delete(ui, repo, *patches, **opts): + """remove patches from queue + + The patches must not be applied, and at least one patch is required. With + -k/--keep, the patch files are preserved in the patch directory. + + To stop managing a patch and move it into permanent history, + use the :hg:`qfinish` command.""" + q = repo.mq + q.delete(repo, patches, opts) + q.save_dirty() + return 0 + +def applied(ui, repo, patch=None, **opts): + """print the patches already applied + + Returns 0 on success.""" + + q = repo.mq + + if patch: + if patch not in q.series: + raise util.Abort(_("patch %s is not in series file") % patch) + end = q.series.index(patch) + 1 + else: + end = q.series_end(True) + + if opts.get('last') and not end: + ui.write(_("no patches applied\n")) + return 1 + elif opts.get('last') and end == 1: + ui.write(_("only one patch applied\n")) + return 1 + elif opts.get('last'): + start = end - 2 + end = 1 + else: + start = 0 + + q.qseries(repo, length=end, start=start, status='A', + summary=opts.get('summary')) + + +def unapplied(ui, repo, patch=None, **opts): + """print the patches not yet applied + + Returns 0 on success.""" + + q = repo.mq + if patch: + if patch not in q.series: + raise util.Abort(_("patch %s is not in series file") % patch) + start = q.series.index(patch) + 1 + else: + start = q.series_end(True) + + if start == len(q.series) and opts.get('first'): + ui.write(_("all patches applied\n")) + return 1 + + length = opts.get('first') and 1 or None + q.qseries(repo, start=start, length=length, status='U', + summary=opts.get('summary')) + +def qimport(ui, repo, *filename, **opts): + """import a patch + + The patch is inserted into the series after the last applied + patch. If no patches have been applied, qimport prepends the patch + to the series. + + The patch will have the same name as its source file unless you + give it a new one with -n/--name. + + You can register an existing patch inside the patch directory with + the -e/--existing flag. + + With -f/--force, an existing patch of the same name will be + overwritten. + + An existing changeset may be placed under mq control with -r/--rev + (e.g. qimport --rev tip -n patch will place tip under mq control). + With -g/--git, patches imported with --rev will use the git diff + format. See the diffs help topic for information on why this is + important for preserving rename/copy information and permission + changes. + + To import a patch from standard input, pass - as the patch file. + When importing from standard input, a patch name must be specified + using the --name flag. + + To import an existing patch while renaming it:: + + hg qimport -e existing-patch -n new-name + + Returns 0 if import succeeded. + """ + q = repo.mq + try: + q.qimport(repo, filename, patchname=opts.get('name'), + existing=opts.get('existing'), force=opts.get('force'), + rev=opts.get('rev'), git=opts.get('git')) + finally: + q.save_dirty() + + if opts.get('push') and not opts.get('rev'): + return q.push(repo, None) + return 0 + +def qinit(ui, repo, create): + """initialize a new queue repository + + This command also creates a series file for ordering patches, and + an mq-specific .hgignore file in the queue repository, to exclude + the status and guards files (these contain mostly transient state). + + Returns 0 if initialization succeeded.""" + q = repo.mq + r = q.init(repo, create) + q.save_dirty() + if r: + if not os.path.exists(r.wjoin('.hgignore')): + fp = r.wopener('.hgignore', 'w') + fp.write('^\\.hg\n') + fp.write('^\\.mq\n') + fp.write('syntax: glob\n') + fp.write('status\n') + fp.write('guards\n') + fp.close() + if not os.path.exists(r.wjoin('series')): + r.wopener('series', 'w').close() + r[None].add(['.hgignore', 'series']) + commands.add(ui, r) + return 0 + +def init(ui, repo, **opts): + """init a new queue repository (DEPRECATED) + + The queue repository is unversioned by default. If + -c/--create-repo is specified, qinit will create a separate nested + repository for patches (qinit -c may also be run later to convert + an unversioned patch repository into a versioned one). You can use + qcommit to commit changes to this queue repository. + + This command is deprecated. Without -c, it's implied by other relevant + commands. With -c, use :hg:`init --mq` instead.""" + return qinit(ui, repo, create=opts.get('create_repo')) + +def clone(ui, source, dest=None, **opts): + '''clone main and patch repository at same time + + If source is local, destination will have no patches applied. If + source is remote, this command can not check if patches are + applied in source, so cannot guarantee that patches are not + applied in destination. If you clone remote repository, be sure + before that it has no patches applied. + + Source patch repository is looked for in <src>/.hg/patches by + default. Use -p <url> to change. + + The patch directory must be a nested Mercurial repository, as + would be created by :hg:`init --mq`. + + Return 0 on success. + ''' + def patchdir(repo): + url = repo.url() + if url.endswith('/'): + url = url[:-1] + return url + '/.hg/patches' + if dest is None: + dest = hg.defaultdest(source) + sr = hg.repository(hg.remoteui(ui, opts), ui.expandpath(source)) + if opts.get('patches'): + patchespath = ui.expandpath(opts.get('patches')) + else: + patchespath = patchdir(sr) + try: + hg.repository(ui, patchespath) + except error.RepoError: + raise util.Abort(_('versioned patch repository not found' + ' (see init --mq)')) + qbase, destrev = None, None + if sr.local(): + if sr.mq.applied: + qbase = sr.mq.applied[0].node + if not hg.islocal(dest): + heads = set(sr.heads()) + destrev = list(heads.difference(sr.heads(qbase))) + destrev.append(sr.changelog.parents(qbase)[0]) + elif sr.capable('lookup'): + try: + qbase = sr.lookup('qbase') + except error.RepoError: + pass + ui.note(_('cloning main repository\n')) + sr, dr = hg.clone(ui, sr.url(), dest, + pull=opts.get('pull'), + rev=destrev, + update=False, + stream=opts.get('uncompressed')) + ui.note(_('cloning patch repository\n')) + hg.clone(ui, opts.get('patches') or patchdir(sr), patchdir(dr), + pull=opts.get('pull'), update=not opts.get('noupdate'), + stream=opts.get('uncompressed')) + if dr.local(): + if qbase: + ui.note(_('stripping applied patches from destination ' + 'repository\n')) + dr.mq.strip(dr, [qbase], update=False, backup=None) + if not opts.get('noupdate'): + ui.note(_('updating destination repository\n')) + hg.update(dr, dr.changelog.tip()) + +def commit(ui, repo, *pats, **opts): + """commit changes in the queue repository (DEPRECATED) + + This command is deprecated; use :hg:`commit --mq` instead.""" + q = repo.mq + r = q.qrepo() + if not r: + raise util.Abort('no queue repository') + commands.commit(r.ui, r, *pats, **opts) + +def series(ui, repo, **opts): + """print the entire series file + + Returns 0 on success.""" + repo.mq.qseries(repo, missing=opts.get('missing'), summary=opts.get('summary')) + return 0 + +def top(ui, repo, **opts): + """print the name of the current patch + + Returns 0 on success.""" + q = repo.mq + t = q.applied and q.series_end(True) or 0 + if t: + q.qseries(repo, start=t - 1, length=1, status='A', + summary=opts.get('summary')) + else: + ui.write(_("no patches applied\n")) + return 1 + +def next(ui, repo, **opts): + """print the name of the next patch + + Returns 0 on success.""" + q = repo.mq + end = q.series_end() + if end == len(q.series): + ui.write(_("all patches applied\n")) + return 1 + q.qseries(repo, start=end, length=1, summary=opts.get('summary')) + +def prev(ui, repo, **opts): + """print the name of the previous patch + + Returns 0 on success.""" + q = repo.mq + l = len(q.applied) + if l == 1: + ui.write(_("only one patch applied\n")) + return 1 + if not l: + ui.write(_("no patches applied\n")) + return 1 + q.qseries(repo, start=l - 2, length=1, status='A', + summary=opts.get('summary')) + +def setupheaderopts(ui, opts): + if not opts.get('user') and opts.get('currentuser'): + opts['user'] = ui.username() + if not opts.get('date') and opts.get('currentdate'): + opts['date'] = "%d %d" % util.makedate() + +def new(ui, repo, patch, *args, **opts): + """create a new patch + + qnew creates a new patch on top of the currently-applied patch (if + any). The patch will be initialized with any outstanding changes + in the working directory. You may also use -I/--include, + -X/--exclude, and/or a list of files after the patch name to add + only changes to matching files to the new patch, leaving the rest + as uncommitted modifications. + + -u/--user and -d/--date can be used to set the (given) user and + date, respectively. -U/--currentuser and -D/--currentdate set user + to current user and date to current date. + + -e/--edit, -m/--message or -l/--logfile set the patch header as + well as the commit message. If none is specified, the header is + empty and the commit message is '[mq]: PATCH'. + + Use the -g/--git option to keep the patch in the git extended diff + format. Read the diffs help topic for more information on why this + is important for preserving permission changes and copy/rename + information. + + Returns 0 on successful creation of a new patch. + """ + msg = cmdutil.logmessage(opts) + def getmsg(): + return ui.edit(msg, opts.get('user') or ui.username()) + q = repo.mq + opts['msg'] = msg + if opts.get('edit'): + opts['msg'] = getmsg + else: + opts['msg'] = msg + setupheaderopts(ui, opts) + q.new(repo, patch, *args, **opts) + q.save_dirty() + return 0 + +def refresh(ui, repo, *pats, **opts): + """update the current patch + + If any file patterns are provided, the refreshed patch will + contain only the modifications that match those patterns; the + remaining modifications will remain in the working directory. + + If -s/--short is specified, files currently included in the patch + will be refreshed just like matched files and remain in the patch. + + If -e/--edit is specified, Mercurial will start your configured editor for + you to enter a message. In case qrefresh fails, you will find a backup of + your message in ``.hg/last-message.txt``. + + hg add/remove/copy/rename work as usual, though you might want to + use git-style patches (-g/--git or [diff] git=1) to track copies + and renames. See the diffs help topic for more information on the + git diff format. + + Returns 0 on success. + """ + q = repo.mq + message = cmdutil.logmessage(opts) + if opts.get('edit'): + if not q.applied: + ui.write(_("no patches applied\n")) + return 1 + if message: + raise util.Abort(_('option "-e" incompatible with "-m" or "-l"')) + patch = q.applied[-1].name + ph = patchheader(q.join(patch), q.plainmode) + message = ui.edit('\n'.join(ph.message), ph.user or ui.username()) + # We don't want to lose the patch message if qrefresh fails (issue2062) + msgfile = repo.opener('last-message.txt', 'wb') + msgfile.write(message) + msgfile.close() + setupheaderopts(ui, opts) + ret = q.refresh(repo, pats, msg=message, **opts) + q.save_dirty() + return ret + +def diff(ui, repo, *pats, **opts): + """diff of the current patch and subsequent modifications + + Shows a diff which includes the current patch as well as any + changes which have been made in the working directory since the + last refresh (thus showing what the current patch would become + after a qrefresh). + + Use :hg:`diff` if you only want to see the changes made since the + last qrefresh, or :hg:`export qtip` if you want to see changes + made by the current patch without including changes made since the + qrefresh. + + Returns 0 on success. + """ + repo.mq.diff(repo, pats, opts) + return 0 + +def fold(ui, repo, *files, **opts): + """fold the named patches into the current patch + + Patches must not yet be applied. Each patch will be successively + applied to the current patch in the order given. If all the + patches apply successfully, the current patch will be refreshed + with the new cumulative patch, and the folded patches will be + deleted. With -k/--keep, the folded patch files will not be + removed afterwards. + + The header for each folded patch will be concatenated with the + current patch header, separated by a line of ``* * *``. + + Returns 0 on success.""" + + q = repo.mq + + if not files: + raise util.Abort(_('qfold requires at least one patch name')) + if not q.check_toppatch(repo)[0]: + raise util.Abort(_('no patches applied')) + q.check_localchanges(repo) + + message = cmdutil.logmessage(opts) + if opts.get('edit'): + if message: + raise util.Abort(_('option "-e" incompatible with "-m" or "-l"')) + + parent = q.lookup('qtip') + patches = [] + messages = [] + for f in files: + p = q.lookup(f) + if p in patches or p == parent: + ui.warn(_('Skipping already folded patch %s\n') % p) + if q.isapplied(p): + raise util.Abort(_('qfold cannot fold already applied patch %s') % p) + patches.append(p) + + for p in patches: + if not message: + ph = patchheader(q.join(p), q.plainmode) + if ph.message: + messages.append(ph.message) + pf = q.join(p) + (patchsuccess, files, fuzz) = q.patch(repo, pf) + if not patchsuccess: + raise util.Abort(_('error folding patch %s') % p) + cmdutil.updatedir(ui, repo, files) + + if not message: + ph = patchheader(q.join(parent), q.plainmode) + message, user = ph.message, ph.user + for msg in messages: + message.append('* * *') + message.extend(msg) + message = '\n'.join(message) + + if opts.get('edit'): + message = ui.edit(message, user or ui.username()) + + diffopts = q.patchopts(q.diffopts(), *patches) + q.refresh(repo, msg=message, git=diffopts.git) + q.delete(repo, patches, opts) + q.save_dirty() + +def goto(ui, repo, patch, **opts): + '''push or pop patches until named patch is at top of stack + + Returns 0 on success.''' + q = repo.mq + patch = q.lookup(patch) + if q.isapplied(patch): + ret = q.pop(repo, patch, force=opts.get('force')) + else: + ret = q.push(repo, patch, force=opts.get('force')) + q.save_dirty() + return ret + +def guard(ui, repo, *args, **opts): + '''set or print guards for a patch + + Guards control whether a patch can be pushed. A patch with no + guards is always pushed. A patch with a positive guard ("+foo") is + pushed only if the :hg:`qselect` command has activated it. A patch with + a negative guard ("-foo") is never pushed if the :hg:`qselect` command + has activated it. + + With no arguments, print the currently active guards. + With arguments, set guards for the named patch. + + .. note:: + Specifying negative guards now requires '--'. + + To set guards on another patch:: + + hg qguard other.patch -- +2.6.17 -stable + + Returns 0 on success. + ''' + def status(idx): + guards = q.series_guards[idx] or ['unguarded'] + if q.series[idx] in applied: + state = 'applied' + elif q.pushable(idx)[0]: + state = 'unapplied' + else: + state = 'guarded' + label = 'qguard.patch qguard.%s qseries.%s' % (state, state) + ui.write('%s: ' % ui.label(q.series[idx], label)) + + for i, guard in enumerate(guards): + if guard.startswith('+'): + ui.write(guard, label='qguard.positive') + elif guard.startswith('-'): + ui.write(guard, label='qguard.negative') + else: + ui.write(guard, label='qguard.unguarded') + if i != len(guards) - 1: + ui.write(' ') + ui.write('\n') + q = repo.mq + applied = set(p.name for p in q.applied) + patch = None + args = list(args) + if opts.get('list'): + if args or opts.get('none'): + raise util.Abort(_('cannot mix -l/--list with options or arguments')) + for i in xrange(len(q.series)): + status(i) + return + if not args or args[0][0:1] in '-+': + if not q.applied: + raise util.Abort(_('no patches applied')) + patch = q.applied[-1].name + if patch is None and args[0][0:1] not in '-+': + patch = args.pop(0) + if patch is None: + raise util.Abort(_('no patch to work with')) + if args or opts.get('none'): + idx = q.find_series(patch) + if idx is None: + raise util.Abort(_('no patch named %s') % patch) + q.set_guards(idx, args) + q.save_dirty() + else: + status(q.series.index(q.lookup(patch))) + +def header(ui, repo, patch=None): + """print the header of the topmost or specified patch + + Returns 0 on success.""" + q = repo.mq + + if patch: + patch = q.lookup(patch) + else: + if not q.applied: + ui.write(_('no patches applied\n')) + return 1 + patch = q.lookup('qtip') + ph = patchheader(q.join(patch), q.plainmode) + + ui.write('\n'.join(ph.message) + '\n') + +def lastsavename(path): + (directory, base) = os.path.split(path) + names = os.listdir(directory) + namere = re.compile("%s.([0-9]+)" % base) + maxindex = None + maxname = None + for f in names: + m = namere.match(f) + if m: + index = int(m.group(1)) + if maxindex is None or index > maxindex: + maxindex = index + maxname = f + if maxname: + return (os.path.join(directory, maxname), maxindex) + return (None, None) + +def savename(path): + (last, index) = lastsavename(path) + if last is None: + index = 0 + newpath = path + ".%d" % (index + 1) + return newpath + +def push(ui, repo, patch=None, **opts): + """push the next patch onto the stack + + When -f/--force is applied, all local changes in patched files + will be lost. + + Return 0 on succces. + """ + q = repo.mq + mergeq = None + + if opts.get('merge'): + if opts.get('name'): + newpath = repo.join(opts.get('name')) + else: + newpath, i = lastsavename(q.path) + if not newpath: + ui.warn(_("no saved queues found, please use -n\n")) + return 1 + mergeq = queue(ui, repo.join(""), newpath) + ui.warn(_("merging with queue at: %s\n") % mergeq.path) + ret = q.push(repo, patch, force=opts.get('force'), list=opts.get('list'), + mergeq=mergeq, all=opts.get('all'), move=opts.get('move')) + return ret + +def pop(ui, repo, patch=None, **opts): + """pop the current patch off the stack + + By default, pops off the top of the patch stack. If given a patch + name, keeps popping off patches until the named patch is at the + top of the stack. + + Return 0 on success. + """ + localupdate = True + if opts.get('name'): + q = queue(ui, repo.join(""), repo.join(opts.get('name'))) + ui.warn(_('using patch queue: %s\n') % q.path) + localupdate = False + else: + q = repo.mq + ret = q.pop(repo, patch, force=opts.get('force'), update=localupdate, + all=opts.get('all')) + q.save_dirty() + return ret + +def rename(ui, repo, patch, name=None, **opts): + """rename a patch + + With one argument, renames the current patch to PATCH1. + With two arguments, renames PATCH1 to PATCH2. + + Returns 0 on success.""" + + q = repo.mq + + if not name: + name = patch + patch = None + + if patch: + patch = q.lookup(patch) + else: + if not q.applied: + ui.write(_('no patches applied\n')) + return + patch = q.lookup('qtip') + absdest = q.join(name) + if os.path.isdir(absdest): + name = normname(os.path.join(name, os.path.basename(patch))) + absdest = q.join(name) + if os.path.exists(absdest): + raise util.Abort(_('%s already exists') % absdest) + + if name in q.series: + raise util.Abort( + _('A patch named %s already exists in the series file') % name) + + ui.note(_('renaming %s to %s\n') % (patch, name)) + i = q.find_series(patch) + guards = q.guard_re.findall(q.full_series[i]) + q.full_series[i] = name + ''.join([' #' + g for g in guards]) + q.parse_series() + q.series_dirty = 1 + + info = q.isapplied(patch) + if info: + q.applied[info[0]] = statusentry(info[1], name) + q.applied_dirty = 1 + + destdir = os.path.dirname(absdest) + if not os.path.isdir(destdir): + os.makedirs(destdir) + util.rename(q.join(patch), absdest) + r = q.qrepo() + if r and patch in r.dirstate: + wctx = r[None] + wlock = r.wlock() + try: + if r.dirstate[patch] == 'a': + r.dirstate.forget(patch) + r.dirstate.add(name) + else: + if r.dirstate[name] == 'r': + wctx.undelete([name]) + wctx.copy(patch, name) + wctx.remove([patch], False) + finally: + wlock.release() + + q.save_dirty() + +def restore(ui, repo, rev, **opts): + """restore the queue state saved by a revision (DEPRECATED) + + This command is deprecated, use :hg:`rebase` instead.""" + rev = repo.lookup(rev) + q = repo.mq + q.restore(repo, rev, delete=opts.get('delete'), + qupdate=opts.get('update')) + q.save_dirty() + return 0 + +def save(ui, repo, **opts): + """save current queue state (DEPRECATED) + + This command is deprecated, use :hg:`rebase` instead.""" + q = repo.mq + message = cmdutil.logmessage(opts) + ret = q.save(repo, msg=message) + if ret: + return ret + q.save_dirty() + if opts.get('copy'): + path = q.path + if opts.get('name'): + newpath = os.path.join(q.basepath, opts.get('name')) + if os.path.exists(newpath): + if not os.path.isdir(newpath): + raise util.Abort(_('destination %s exists and is not ' + 'a directory') % newpath) + if not opts.get('force'): + raise util.Abort(_('destination %s exists, ' + 'use -f to force') % newpath) + else: + newpath = savename(path) + ui.warn(_("copy %s to %s\n") % (path, newpath)) + util.copyfiles(path, newpath) + if opts.get('empty'): + try: + os.unlink(q.join(q.status_path)) + except: + pass + return 0 + +def strip(ui, repo, *revs, **opts): + """strip changesets and all their descendants from the repository + + The strip command removes the specified changesets and all their + descendants. If the working directory has uncommitted changes, + the operation is aborted unless the --force flag is supplied. + + If a parent of the working directory is stripped, then the working + directory will automatically be updated to the most recent + available ancestor of the stripped parent after the operation + completes. + + Any stripped changesets are stored in ``.hg/strip-backup`` as a + bundle (see :hg:`help bundle` and :hg:`help unbundle`). They can + be restored by running :hg:`unbundle .hg/strip-backup/BUNDLE`, + where BUNDLE is the bundle file created by the strip. Note that + the local revision numbers will in general be different after the + restore. + + Use the --no-backup option to discard the backup bundle once the + operation completes. + + Return 0 on success. + """ + backup = 'all' + if opts.get('backup'): + backup = 'strip' + elif opts.get('no_backup') or opts.get('nobackup'): + backup = 'none' + + cl = repo.changelog + revs = set(cmdutil.revrange(repo, revs)) + if not revs: + raise util.Abort(_('empty revision set')) + + descendants = set(cl.descendants(*revs)) + strippedrevs = revs.union(descendants) + roots = revs.difference(descendants) + + update = False + # if one of the wdir parent is stripped we'll need + # to update away to an earlier revision + for p in repo.dirstate.parents(): + if p != nullid and cl.rev(p) in strippedrevs: + update = True + break + + rootnodes = set(cl.node(r) for r in roots) + + q = repo.mq + if q.applied: + # refresh queue state if we're about to strip + # applied patches + if cl.rev(repo.lookup('qtip')) in strippedrevs: + q.applied_dirty = True + start = 0 + end = len(q.applied) + for i, statusentry in enumerate(q.applied): + if statusentry.node in rootnodes: + # if one of the stripped roots is an applied + # patch, only part of the queue is stripped + start = i + break + del q.applied[start:end] + q.save_dirty() + + revs = list(rootnodes) + if update and opts.get('keep'): + wlock = repo.wlock() + try: + urev = repo.mq.qparents(repo, revs[0]) + repo.dirstate.rebuild(urev, repo[urev].manifest()) + repo.dirstate.write() + update = False + finally: + wlock.release() + + repo.mq.strip(repo, revs, backup=backup, update=update, + force=opts.get('force')) + return 0 + +def select(ui, repo, *args, **opts): + '''set or print guarded patches to push + + Use the :hg:`qguard` command to set or print guards on patch, then use + qselect to tell mq which guards to use. A patch will be pushed if + it has no guards or any positive guards match the currently + selected guard, but will not be pushed if any negative guards + match the current guard. For example:: + + qguard foo.patch -stable (negative guard) + qguard bar.patch +stable (positive guard) + qselect stable + + This activates the "stable" guard. mq will skip foo.patch (because + it has a negative match) but push bar.patch (because it has a + positive match). + + With no arguments, prints the currently active guards. + With one argument, sets the active guard. + + Use -n/--none to deactivate guards (no other arguments needed). + When no guards are active, patches with positive guards are + skipped and patches with negative guards are pushed. + + qselect can change the guards on applied patches. It does not pop + guarded patches by default. Use --pop to pop back to the last + applied patch that is not guarded. Use --reapply (which implies + --pop) to push back to the current patch afterwards, but skip + guarded patches. + + Use -s/--series to print a list of all guards in the series file + (no other arguments needed). Use -v for more information. + + Returns 0 on success.''' + + q = repo.mq + guards = q.active() + if args or opts.get('none'): + old_unapplied = q.unapplied(repo) + old_guarded = [i for i in xrange(len(q.applied)) if + not q.pushable(i)[0]] + q.set_active(args) + q.save_dirty() + if not args: + ui.status(_('guards deactivated\n')) + if not opts.get('pop') and not opts.get('reapply'): + unapplied = q.unapplied(repo) + guarded = [i for i in xrange(len(q.applied)) + if not q.pushable(i)[0]] + if len(unapplied) != len(old_unapplied): + ui.status(_('number of unguarded, unapplied patches has ' + 'changed from %d to %d\n') % + (len(old_unapplied), len(unapplied))) + if len(guarded) != len(old_guarded): + ui.status(_('number of guarded, applied patches has changed ' + 'from %d to %d\n') % + (len(old_guarded), len(guarded))) + elif opts.get('series'): + guards = {} + noguards = 0 + for gs in q.series_guards: + if not gs: + noguards += 1 + for g in gs: + guards.setdefault(g, 0) + guards[g] += 1 + if ui.verbose: + guards['NONE'] = noguards + guards = guards.items() + guards.sort(key=lambda x: x[0][1:]) + if guards: + ui.note(_('guards in series file:\n')) + for guard, count in guards: + ui.note('%2d ' % count) + ui.write(guard, '\n') + else: + ui.note(_('no guards in series file\n')) + else: + if guards: + ui.note(_('active guards:\n')) + for g in guards: + ui.write(g, '\n') + else: + ui.write(_('no active guards\n')) + reapply = opts.get('reapply') and q.applied and q.appliedname(-1) + popped = False + if opts.get('pop') or opts.get('reapply'): + for i in xrange(len(q.applied)): + pushable, reason = q.pushable(i) + if not pushable: + ui.status(_('popping guarded patches\n')) + popped = True + if i == 0: + q.pop(repo, all=True) + else: + q.pop(repo, i - 1) + break + if popped: + try: + if reapply: + ui.status(_('reapplying unguarded patches\n')) + q.push(repo, reapply) + finally: + q.save_dirty() + +def finish(ui, repo, *revrange, **opts): + """move applied patches into repository history + + Finishes the specified revisions (corresponding to applied + patches) by moving them out of mq control into regular repository + history. + + Accepts a revision range or the -a/--applied option. If --applied + is specified, all applied mq revisions are removed from mq + control. Otherwise, the given revisions must be at the base of the + stack of applied patches. + + This can be especially useful if your changes have been applied to + an upstream repository, or if you are about to push your changes + to upstream. + + Returns 0 on success. + """ + if not opts.get('applied') and not revrange: + raise util.Abort(_('no revisions specified')) + elif opts.get('applied'): + revrange = ('qbase::qtip',) + revrange + + q = repo.mq + if not q.applied: + ui.status(_('no patches applied\n')) + return 0 + + revs = cmdutil.revrange(repo, revrange) + q.finish(repo, revs) + q.save_dirty() + return 0 + +def qqueue(ui, repo, name=None, **opts): + '''manage multiple patch queues + + Supports switching between different patch queues, as well as creating + new patch queues and deleting existing ones. + + Omitting a queue name or specifying -l/--list will show you the registered + queues - by default the "normal" patches queue is registered. The currently + active queue will be marked with "(active)". + + To create a new queue, use -c/--create. The queue is automatically made + active, except in the case where there are applied patches from the + currently active queue in the repository. Then the queue will only be + created and switching will fail. + + To delete an existing queue, use --delete. You cannot delete the currently + active queue. + + Returns 0 on success. + ''' + + q = repo.mq + + _defaultqueue = 'patches' + _allqueues = 'patches.queues' + _activequeue = 'patches.queue' + + def _getcurrent(): + cur = os.path.basename(q.path) + if cur.startswith('patches-'): + cur = cur[8:] + return cur + + def _noqueues(): + try: + fh = repo.opener(_allqueues, 'r') + fh.close() + except IOError: + return True + + return False + + def _getqueues(): + current = _getcurrent() + + try: + fh = repo.opener(_allqueues, 'r') + queues = [queue.strip() for queue in fh if queue.strip()] + if current not in queues: + queues.append(current) + except IOError: + queues = [_defaultqueue] + + return sorted(queues) + + def _setactive(name): + if q.applied: + raise util.Abort(_('patches applied - cannot set new queue active')) + _setactivenocheck(name) + + def _setactivenocheck(name): + fh = repo.opener(_activequeue, 'w') + if name != 'patches': + fh.write(name) + fh.close() + + def _addqueue(name): + fh = repo.opener(_allqueues, 'a') + fh.write('%s\n' % (name,)) + fh.close() + + def _queuedir(name): + if name == 'patches': + return repo.join('patches') + else: + return repo.join('patches-' + name) + + def _validname(name): + for n in name: + if n in ':\\/.': + return False + return True + + def _delete(name): + if name not in existing: + raise util.Abort(_('cannot delete queue that does not exist')) + + current = _getcurrent() + + if name == current: + raise util.Abort(_('cannot delete currently active queue')) + + fh = repo.opener('patches.queues.new', 'w') + for queue in existing: + if queue == name: + continue + fh.write('%s\n' % (queue,)) + fh.close() + util.rename(repo.join('patches.queues.new'), repo.join(_allqueues)) + + if not name or opts.get('list'): + current = _getcurrent() + for queue in _getqueues(): + ui.write('%s' % (queue,)) + if queue == current and not ui.quiet: + ui.write(_(' (active)\n')) + else: + ui.write('\n') + return + + if not _validname(name): + raise util.Abort( + _('invalid queue name, may not contain the characters ":\\/."')) + + existing = _getqueues() + + if opts.get('create'): + if name in existing: + raise util.Abort(_('queue "%s" already exists') % name) + if _noqueues(): + _addqueue(_defaultqueue) + _addqueue(name) + _setactive(name) + elif opts.get('rename'): + current = _getcurrent() + if name == current: + raise util.Abort(_('can\'t rename "%s" to its current name') % name) + if name in existing: + raise util.Abort(_('queue "%s" already exists') % name) + + olddir = _queuedir(current) + newdir = _queuedir(name) + + if os.path.exists(newdir): + raise util.Abort(_('non-queue directory "%s" already exists') % + newdir) + + fh = repo.opener('patches.queues.new', 'w') + for queue in existing: + if queue == current: + fh.write('%s\n' % (name,)) + if os.path.exists(olddir): + util.rename(olddir, newdir) + else: + fh.write('%s\n' % (queue,)) + fh.close() + util.rename(repo.join('patches.queues.new'), repo.join(_allqueues)) + _setactivenocheck(name) + elif opts.get('delete'): + _delete(name) + elif opts.get('purge'): + if name in existing: + _delete(name) + qdir = _queuedir(name) + if os.path.exists(qdir): + shutil.rmtree(qdir) + else: + if name not in existing: + raise util.Abort(_('use --create to create a new queue')) + _setactive(name) + +def reposetup(ui, repo): + class mqrepo(repo.__class__): + @util.propertycache + def mq(self): + return queue(self.ui, self.join("")) + + def abort_if_wdir_patched(self, errmsg, force=False): + if self.mq.applied and not force: + parent = self.dirstate.parents()[0] + if parent in [s.node for s in self.mq.applied]: + raise util.Abort(errmsg) + + def commit(self, text="", user=None, date=None, match=None, + force=False, editor=False, extra={}): + self.abort_if_wdir_patched( + _('cannot commit over an applied mq patch'), + force) + + return super(mqrepo, self).commit(text, user, date, match, force, + editor, extra) + + def push(self, remote, force=False, revs=None, newbranch=False): + if self.mq.applied and not force: + haspatches = True + if revs: + # Assume applied patches have no non-patch descendants + # and are not on remote already. If they appear in the + # set of resolved 'revs', bail out. + applied = set(e.node for e in self.mq.applied) + haspatches = bool([n for n in revs if n in applied]) + if haspatches: + raise util.Abort(_('source has mq patches applied')) + return super(mqrepo, self).push(remote, force, revs, newbranch) + + def _findtags(self): + '''augment tags from base class with patch tags''' + result = super(mqrepo, self)._findtags() + + q = self.mq + if not q.applied: + return result + + mqtags = [(patch.node, patch.name) for patch in q.applied] + + if mqtags[-1][0] not in self.changelog.nodemap: + self.ui.warn(_('mq status file refers to unknown node %s\n') + % short(mqtags[-1][0])) + return result + + mqtags.append((mqtags[-1][0], 'qtip')) + mqtags.append((mqtags[0][0], 'qbase')) + mqtags.append((self.changelog.parents(mqtags[0][0])[0], 'qparent')) + tags = result[0] + for patch in mqtags: + if patch[1] in tags: + self.ui.warn(_('Tag %s overrides mq patch of the same name\n') + % patch[1]) + else: + tags[patch[1]] = patch[0] + + return result + + def _branchtags(self, partial, lrev): + q = self.mq + if not q.applied: + return super(mqrepo, self)._branchtags(partial, lrev) + + cl = self.changelog + qbasenode = q.applied[0].node + if qbasenode not in cl.nodemap: + self.ui.warn(_('mq status file refers to unknown node %s\n') + % short(qbasenode)) + return super(mqrepo, self)._branchtags(partial, lrev) + + qbase = cl.rev(qbasenode) + start = lrev + 1 + if start < qbase: + # update the cache (excluding the patches) and save it + ctxgen = (self[r] for r in xrange(lrev + 1, qbase)) + self._updatebranchcache(partial, ctxgen) + self._writebranchcache(partial, cl.node(qbase - 1), qbase - 1) + start = qbase + # if start = qbase, the cache is as updated as it should be. + # if start > qbase, the cache includes (part of) the patches. + # we might as well use it, but we won't save it. + + # update the cache up to the tip + ctxgen = (self[r] for r in xrange(start, len(cl))) + self._updatebranchcache(partial, ctxgen) + + return partial + + if repo.local(): + repo.__class__ = mqrepo + +def mqimport(orig, ui, repo, *args, **kwargs): + if (hasattr(repo, 'abort_if_wdir_patched') + and not kwargs.get('no_commit', False)): + repo.abort_if_wdir_patched(_('cannot import over an applied patch'), + kwargs.get('force')) + return orig(ui, repo, *args, **kwargs) + +def mqinit(orig, ui, *args, **kwargs): + mq = kwargs.pop('mq', None) + + if not mq: + return orig(ui, *args, **kwargs) + + if args: + repopath = args[0] + if not hg.islocal(repopath): + raise util.Abort(_('only a local queue repository ' + 'may be initialized')) + else: + repopath = cmdutil.findrepo(os.getcwd()) + if not repopath: + raise util.Abort(_('there is no Mercurial repository here ' + '(.hg not found)')) + repo = hg.repository(ui, repopath) + return qinit(ui, repo, True) + +def mqcommand(orig, ui, repo, *args, **kwargs): + """Add --mq option to operate on patch repository instead of main""" + + # some commands do not like getting unknown options + mq = kwargs.pop('mq', None) + + if not mq: + return orig(ui, repo, *args, **kwargs) + + q = repo.mq + r = q.qrepo() + if not r: + raise util.Abort(_('no queue repository')) + return orig(r.ui, r, *args, **kwargs) + +def summary(orig, ui, repo, *args, **kwargs): + r = orig(ui, repo, *args, **kwargs) + q = repo.mq + m = [] + a, u = len(q.applied), len(q.unapplied(repo)) + if a: + m.append(ui.label(_("%d applied"), 'qseries.applied') % a) + if u: + m.append(ui.label(_("%d unapplied"), 'qseries.unapplied') % u) + if m: + ui.write("mq: %s\n" % ', '.join(m)) + else: + ui.note(_("mq: (empty queue)\n")) + return r + +def uisetup(ui): + mqopt = [('', 'mq', None, _("operate on patch repository"))] + + extensions.wrapcommand(commands.table, 'import', mqimport) + extensions.wrapcommand(commands.table, 'summary', summary) + + entry = extensions.wrapcommand(commands.table, 'init', mqinit) + entry[1].extend(mqopt) + + nowrap = set(commands.norepo.split(" ") + ['qrecord']) + + def dotable(cmdtable): + for cmd in cmdtable.keys(): + cmd = cmdutil.parsealiases(cmd)[0] + if cmd in nowrap: + continue + entry = extensions.wrapcommand(cmdtable, cmd, mqcommand) + entry[1].extend(mqopt) + + dotable(commands.table) + + for extname, extmodule in extensions.extensions(): + if extmodule.__file__ != __file__: + dotable(getattr(extmodule, 'cmdtable', {})) + +seriesopts = [('s', 'summary', None, _('print first line of patch header'))] + +cmdtable = { + "qapplied": + (applied, + [('1', 'last', None, _('show only the last patch'))] + seriesopts, + _('hg qapplied [-1] [-s] [PATCH]')), + "qclone": + (clone, + [('', 'pull', None, _('use pull protocol to copy metadata')), + ('U', 'noupdate', None, _('do not update the new working directories')), + ('', 'uncompressed', None, + _('use uncompressed transfer (fast over LAN)')), + ('p', 'patches', '', + _('location of source patch repository'), _('REPO')), + ] + commands.remoteopts, + _('hg qclone [OPTION]... SOURCE [DEST]')), + "qcommit|qci": + (commit, + commands.table["^commit|ci"][1], + _('hg qcommit [OPTION]... [FILE]...')), + "^qdiff": + (diff, + commands.diffopts + commands.diffopts2 + commands.walkopts, + _('hg qdiff [OPTION]... [FILE]...')), + "qdelete|qremove|qrm": + (delete, + [('k', 'keep', None, _('keep patch file')), + ('r', 'rev', [], + _('stop managing a revision (DEPRECATED)'), _('REV'))], + _('hg qdelete [-k] [PATCH]...')), + 'qfold': + (fold, + [('e', 'edit', None, _('edit patch header')), + ('k', 'keep', None, _('keep folded patch files')), + ] + commands.commitopts, + _('hg qfold [-e] [-k] [-m TEXT] [-l FILE] PATCH...')), + 'qgoto': + (goto, + [('f', 'force', None, _('overwrite any local changes'))], + _('hg qgoto [OPTION]... PATCH')), + 'qguard': + (guard, + [('l', 'list', None, _('list all patches and guards')), + ('n', 'none', None, _('drop all guards'))], + _('hg qguard [-l] [-n] [PATCH] [-- [+GUARD]... [-GUARD]...]')), + 'qheader': (header, [], _('hg qheader [PATCH]')), + "qimport": + (qimport, + [('e', 'existing', None, _('import file in patch directory')), + ('n', 'name', '', + _('name of patch file'), _('NAME')), + ('f', 'force', None, _('overwrite existing files')), + ('r', 'rev', [], + _('place existing revisions under mq control'), _('REV')), + ('g', 'git', None, _('use git extended diff format')), + ('P', 'push', None, _('qpush after importing'))], + _('hg qimport [-e] [-n NAME] [-f] [-g] [-P] [-r REV]... FILE...')), + "^qinit": + (init, + [('c', 'create-repo', None, _('create queue repository'))], + _('hg qinit [-c]')), + "^qnew": + (new, + [('e', 'edit', None, _('edit commit message')), + ('f', 'force', None, _('import uncommitted changes (DEPRECATED)')), + ('g', 'git', None, _('use git extended diff format')), + ('U', 'currentuser', None, _('add "From: <current user>" to patch')), + ('u', 'user', '', + _('add "From: <USER>" to patch'), _('USER')), + ('D', 'currentdate', None, _('add "Date: <current date>" to patch')), + ('d', 'date', '', + _('add "Date: <DATE>" to patch'), _('DATE')) + ] + commands.walkopts + commands.commitopts, + _('hg qnew [-e] [-m TEXT] [-l FILE] PATCH [FILE]...')), + "qnext": (next, [] + seriesopts, _('hg qnext [-s]')), + "qprev": (prev, [] + seriesopts, _('hg qprev [-s]')), + "^qpop": + (pop, + [('a', 'all', None, _('pop all patches')), + ('n', 'name', '', + _('queue name to pop (DEPRECATED)'), _('NAME')), + ('f', 'force', None, _('forget any local changes to patched files'))], + _('hg qpop [-a] [-f] [PATCH | INDEX]')), + "^qpush": + (push, + [('f', 'force', None, _('apply on top of local changes')), + ('l', 'list', None, _('list patch name in commit text')), + ('a', 'all', None, _('apply all patches')), + ('m', 'merge', None, _('merge from another queue (DEPRECATED)')), + ('n', 'name', '', + _('merge queue name (DEPRECATED)'), _('NAME')), + ('', 'move', None, _('reorder patch series and apply only the patch'))], + _('hg qpush [-f] [-l] [-a] [--move] [PATCH | INDEX]')), + "^qrefresh": + (refresh, + [('e', 'edit', None, _('edit commit message')), + ('g', 'git', None, _('use git extended diff format')), + ('s', 'short', None, + _('refresh only files already in the patch and specified files')), + ('U', 'currentuser', None, + _('add/update author field in patch with current user')), + ('u', 'user', '', + _('add/update author field in patch with given user'), _('USER')), + ('D', 'currentdate', None, + _('add/update date field in patch with current date')), + ('d', 'date', '', + _('add/update date field in patch with given date'), _('DATE')) + ] + commands.walkopts + commands.commitopts, + _('hg qrefresh [-I] [-X] [-e] [-m TEXT] [-l FILE] [-s] [FILE]...')), + 'qrename|qmv': + (rename, [], _('hg qrename PATCH1 [PATCH2]')), + "qrestore": + (restore, + [('d', 'delete', None, _('delete save entry')), + ('u', 'update', None, _('update queue working directory'))], + _('hg qrestore [-d] [-u] REV')), + "qsave": + (save, + [('c', 'copy', None, _('copy patch directory')), + ('n', 'name', '', + _('copy directory name'), _('NAME')), + ('e', 'empty', None, _('clear queue status file')), + ('f', 'force', None, _('force copy'))] + commands.commitopts, + _('hg qsave [-m TEXT] [-l FILE] [-c] [-n NAME] [-e] [-f]')), + "qselect": + (select, + [('n', 'none', None, _('disable all guards')), + ('s', 'series', None, _('list all guards in series file')), + ('', 'pop', None, _('pop to before first guarded applied patch')), + ('', 'reapply', None, _('pop, then reapply patches'))], + _('hg qselect [OPTION]... [GUARD]...')), + "qseries": + (series, + [('m', 'missing', None, _('print patches not in series')), + ] + seriesopts, + _('hg qseries [-ms]')), + "strip": + (strip, + [('f', 'force', None, _('force removal of changesets even if the ' + 'working directory has uncommitted changes')), + ('b', 'backup', None, _('bundle only changesets with local revision' + ' number greater than REV which are not' + ' descendants of REV (DEPRECATED)')), + ('n', 'no-backup', None, _('no backups')), + ('', 'nobackup', None, _('no backups (DEPRECATED)')), + ('k', 'keep', None, _("do not modify working copy during strip"))], + _('hg strip [-k] [-f] [-n] REV...')), + "qtop": (top, [] + seriesopts, _('hg qtop [-s]')), + "qunapplied": + (unapplied, + [('1', 'first', None, _('show only the first patch'))] + seriesopts, + _('hg qunapplied [-1] [-s] [PATCH]')), + "qfinish": + (finish, + [('a', 'applied', None, _('finish all applied changesets'))], + _('hg qfinish [-a] [REV]...')), + 'qqueue': + (qqueue, + [ + ('l', 'list', False, _('list all available queues')), + ('c', 'create', False, _('create new queue')), + ('', 'rename', False, _('rename active queue')), + ('', 'delete', False, _('delete reference to queue')), + ('', 'purge', False, _('delete queue, and remove patch dir')), + ], + _('[OPTION] [QUEUE]')), +} + +colortable = {'qguard.negative': 'red', + 'qguard.positive': 'yellow', + 'qguard.unguarded': 'green', + 'qseries.applied': 'blue bold underline', + 'qseries.guarded': 'black bold', + 'qseries.missing': 'red bold', + 'qseries.unapplied': 'black bold'} diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/mq.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/mq.pyo Binary files differnew file mode 100644 index 0000000..ed0654f --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/mq.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/notify.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/notify.py new file mode 100644 index 0000000..5ce2eee --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/notify.py @@ -0,0 +1,316 @@ +# notify.py - email notifications for mercurial +# +# Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com> +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +'''hooks for sending email notifications at commit/push time + +Subscriptions can be managed through a hgrc file. Default mode is to +print messages to stdout, for testing and configuring. + +To use, configure the notify extension and enable it in hgrc like +this:: + + [extensions] + notify = + + [hooks] + # one email for each incoming changeset + incoming.notify = python:hgext.notify.hook + # batch emails when many changesets incoming at one time + changegroup.notify = python:hgext.notify.hook + + [notify] + # config items go here + +Required configuration items:: + + config = /path/to/file # file containing subscriptions + +Optional configuration items:: + + test = True # print messages to stdout for testing + strip = 3 # number of slashes to strip for url paths + domain = example.com # domain to use if committer missing domain + style = ... # style file to use when formatting email + template = ... # template to use when formatting email + incoming = ... # template to use when run as incoming hook + changegroup = ... # template when run as changegroup hook + maxdiff = 300 # max lines of diffs to include (0=none, -1=all) + maxsubject = 67 # truncate subject line longer than this + diffstat = True # add a diffstat before the diff content + sources = serve # notify if source of incoming changes in this list + # (serve == ssh or http, push, pull, bundle) + merge = False # send notification for merges (default True) + [email] + from = user@host.com # email address to send as if none given + [web] + baseurl = http://hgserver/... # root of hg web site for browsing commits + +The notify config file has same format as a regular hgrc file. It has +two sections so you can express subscriptions in whatever way is +handier for you. + +:: + + [usersubs] + # key is subscriber email, value is ","-separated list of glob patterns + user@host = pattern + + [reposubs] + # key is glob pattern, value is ","-separated list of subscriber emails + pattern = user@host + +Glob patterns are matched against path to repository root. + +If you like, you can put notify config file in repository that users +can push changes to, they can manage their own subscriptions. +''' + +from mercurial.i18n import _ +from mercurial import patch, cmdutil, templater, util, mail +import email.Parser, email.Errors, fnmatch, socket, time + +# template for single changeset can include email headers. +single_template = ''' +Subject: changeset in {webroot}: {desc|firstline|strip} +From: {author} + +changeset {node|short} in {root} +details: {baseurl}{webroot}?cmd=changeset;node={node|short} +description: +\t{desc|tabindent|strip} +'''.lstrip() + +# template for multiple changesets should not contain email headers, +# because only first set of headers will be used and result will look +# strange. +multiple_template = ''' +changeset {node|short} in {root} +details: {baseurl}{webroot}?cmd=changeset;node={node|short} +summary: {desc|firstline} +''' + +deftemplates = { + 'changegroup': multiple_template, +} + +class notifier(object): + '''email notification class.''' + + def __init__(self, ui, repo, hooktype): + self.ui = ui + cfg = self.ui.config('notify', 'config') + if cfg: + self.ui.readconfig(cfg, sections=['usersubs', 'reposubs']) + self.repo = repo + self.stripcount = int(self.ui.config('notify', 'strip', 0)) + self.root = self.strip(self.repo.root) + self.domain = self.ui.config('notify', 'domain') + self.test = self.ui.configbool('notify', 'test', True) + self.charsets = mail._charsets(self.ui) + self.subs = self.subscribers() + self.merge = self.ui.configbool('notify', 'merge', True) + + mapfile = self.ui.config('notify', 'style') + template = (self.ui.config('notify', hooktype) or + self.ui.config('notify', 'template')) + self.t = cmdutil.changeset_templater(self.ui, self.repo, + False, None, mapfile, False) + if not mapfile and not template: + template = deftemplates.get(hooktype) or single_template + if template: + template = templater.parsestring(template, quoted=False) + self.t.use_template(template) + + def strip(self, path): + '''strip leading slashes from local path, turn into web-safe path.''' + + path = util.pconvert(path) + count = self.stripcount + while count > 0: + c = path.find('/') + if c == -1: + break + path = path[c + 1:] + count -= 1 + return path + + def fixmail(self, addr): + '''try to clean up email addresses.''' + + addr = util.email(addr.strip()) + if self.domain: + a = addr.find('@localhost') + if a != -1: + addr = addr[:a] + if '@' not in addr: + return addr + '@' + self.domain + return addr + + def subscribers(self): + '''return list of email addresses of subscribers to this repo.''' + subs = set() + for user, pats in self.ui.configitems('usersubs'): + for pat in pats.split(','): + if fnmatch.fnmatch(self.repo.root, pat.strip()): + subs.add(self.fixmail(user)) + for pat, users in self.ui.configitems('reposubs'): + if fnmatch.fnmatch(self.repo.root, pat): + for user in users.split(','): + subs.add(self.fixmail(user)) + return [mail.addressencode(self.ui, s, self.charsets, self.test) + for s in sorted(subs)] + + def url(self, path=None): + return self.ui.config('web', 'baseurl') + (path or self.root) + + def node(self, ctx, **props): + '''format one changeset, unless it is a suppressed merge.''' + if not self.merge and len(ctx.parents()) > 1: + return False + self.t.show(ctx, changes=ctx.changeset(), + baseurl=self.ui.config('web', 'baseurl'), + root=self.repo.root, webroot=self.root, **props) + return True + + def skipsource(self, source): + '''true if incoming changes from this source should be skipped.''' + ok_sources = self.ui.config('notify', 'sources', 'serve').split() + return source not in ok_sources + + def send(self, ctx, count, data): + '''send message.''' + + p = email.Parser.Parser() + try: + msg = p.parsestr(data) + except email.Errors.MessageParseError, inst: + raise util.Abort(inst) + + # store sender and subject + sender, subject = msg['From'], msg['Subject'] + del msg['From'], msg['Subject'] + + if not msg.is_multipart(): + # create fresh mime message from scratch + # (multipart templates must take care of this themselves) + headers = msg.items() + payload = msg.get_payload() + # for notification prefer readability over data precision + msg = mail.mimeencode(self.ui, payload, self.charsets, self.test) + # reinstate custom headers + for k, v in headers: + msg[k] = v + + msg['Date'] = util.datestr(format="%a, %d %b %Y %H:%M:%S %1%2") + + # try to make subject line exist and be useful + if not subject: + if count > 1: + subject = _('%s: %d new changesets') % (self.root, count) + else: + s = ctx.description().lstrip().split('\n', 1)[0].rstrip() + subject = '%s: %s' % (self.root, s) + maxsubject = int(self.ui.config('notify', 'maxsubject', 67)) + if maxsubject: + subject = util.ellipsis(subject, maxsubject) + msg['Subject'] = mail.headencode(self.ui, subject, + self.charsets, self.test) + + # try to make message have proper sender + if not sender: + sender = self.ui.config('email', 'from') or self.ui.username() + if '@' not in sender or '@localhost' in sender: + sender = self.fixmail(sender) + msg['From'] = mail.addressencode(self.ui, sender, + self.charsets, self.test) + + msg['X-Hg-Notification'] = 'changeset %s' % ctx + if not msg['Message-Id']: + msg['Message-Id'] = ('<hg.%s.%s.%s@%s>' % + (ctx, int(time.time()), + hash(self.repo.root), socket.getfqdn())) + msg['To'] = ', '.join(self.subs) + + msgtext = msg.as_string() + if self.test: + self.ui.write(msgtext) + if not msgtext.endswith('\n'): + self.ui.write('\n') + else: + self.ui.status(_('notify: sending %d subscribers %d changes\n') % + (len(self.subs), count)) + mail.sendmail(self.ui, util.email(msg['From']), + self.subs, msgtext) + + def diff(self, ctx, ref=None): + + maxdiff = int(self.ui.config('notify', 'maxdiff', 300)) + prev = ctx.parents()[0].node() + ref = ref and ref.node() or ctx.node() + chunks = patch.diff(self.repo, prev, ref, opts=patch.diffopts(self.ui)) + difflines = ''.join(chunks).splitlines() + + if self.ui.configbool('notify', 'diffstat', True): + s = patch.diffstat(difflines) + # s may be nil, don't include the header if it is + if s: + self.ui.write('\ndiffstat:\n\n%s' % s) + + if maxdiff == 0: + return + elif maxdiff > 0 and len(difflines) > maxdiff: + msg = _('\ndiffs (truncated from %d to %d lines):\n\n') + self.ui.write(msg % (len(difflines), maxdiff)) + difflines = difflines[:maxdiff] + elif difflines: + self.ui.write(_('\ndiffs (%d lines):\n\n') % len(difflines)) + + self.ui.write("\n".join(difflines)) + +def hook(ui, repo, hooktype, node=None, source=None, **kwargs): + '''send email notifications to interested subscribers. + + if used as changegroup hook, send one email for all changesets in + changegroup. else send one email per changeset.''' + + n = notifier(ui, repo, hooktype) + ctx = repo[node] + + if not n.subs: + ui.debug('notify: no subscribers to repository %s\n' % n.root) + return + if n.skipsource(source): + ui.debug('notify: changes have source "%s" - skipping\n' % source) + return + + ui.pushbuffer() + data = '' + count = 0 + if hooktype == 'changegroup': + start, end = ctx.rev(), len(repo) + for rev in xrange(start, end): + if n.node(repo[rev]): + count += 1 + else: + data += ui.popbuffer() + ui.note(_('notify: suppressing notification for merge %d:%s\n') % + (rev, repo[rev].hex()[:12])) + ui.pushbuffer() + if count: + n.diff(ctx, repo['tip']) + else: + if not n.node(ctx): + ui.popbuffer() + ui.note(_('notify: suppressing notification for merge %d:%s\n') % + (ctx.rev(), ctx.hex()[:12])) + return + count += 1 + n.diff(ctx) + + data += ui.popbuffer() + if count: + n.send(ctx, count, data) diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/notify.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/notify.pyo Binary files differnew file mode 100644 index 0000000..087cf73 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/notify.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/pager.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/pager.py new file mode 100644 index 0000000..6d73c34 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/pager.py @@ -0,0 +1,113 @@ +# pager.py - display output using a pager +# +# Copyright 2008 David Soria Parra <dsp@php.net> +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. +# +# To load the extension, add it to your configuration file: +# +# [extension] +# pager = +# +# Run "hg help pager" to get info on configuration. + +'''browse command output with an external pager + +To set the pager that should be used, set the application variable:: + + [pager] + pager = less -FRSX + +If no pager is set, the pager extensions uses the environment variable +$PAGER. If neither pager.pager, nor $PAGER is set, no pager is used. + +If you notice "BROKEN PIPE" error messages, you can disable them by +setting:: + + [pager] + quiet = True + +You can disable the pager for certain commands by adding them to the +pager.ignore list:: + + [pager] + ignore = version, help, update + +You can also enable the pager only for certain commands using +pager.attend. Below is the default list of commands to be paged:: + + [pager] + attend = annotate, cat, diff, export, glog, log, qdiff + +Setting pager.attend to an empty value will cause all commands to be +paged. + +If pager.attend is present, pager.ignore will be ignored. + +To ignore global commands like :hg:`version` or :hg:`help`, you have +to specify them in your user configuration file. + +The --pager=... option can also be used to control when the pager is +used. Use a boolean value like yes, no, on, off, or use auto for +normal behavior. +''' + +import sys, os, signal, shlex, errno +from mercurial import commands, dispatch, util, extensions +from mercurial.i18n import _ + +def _runpager(p): + if not hasattr(os, 'fork'): + sys.stderr = sys.stdout = util.popen(p, 'wb') + return + fdin, fdout = os.pipe() + pid = os.fork() + if pid == 0: + os.close(fdin) + os.dup2(fdout, sys.stdout.fileno()) + os.dup2(fdout, sys.stderr.fileno()) + os.close(fdout) + return + os.dup2(fdin, sys.stdin.fileno()) + os.close(fdin) + os.close(fdout) + try: + os.execvp('/bin/sh', ['/bin/sh', '-c', p]) + except OSError, e: + if e.errno == errno.ENOENT: + # no /bin/sh, try executing the pager directly + args = shlex.split(p) + os.execvp(args[0], args) + else: + raise + +def uisetup(ui): + if ui.plain(): + return + + def pagecmd(orig, ui, options, cmd, cmdfunc): + p = ui.config("pager", "pager", os.environ.get("PAGER")) + if p and sys.stdout.isatty() and '--debugger' not in sys.argv: + attend = ui.configlist('pager', 'attend', attended) + auto = options['pager'] == 'auto' + always = util.parsebool(options['pager']) + if (always or auto and + (cmd in attend or + (cmd not in ui.configlist('pager', 'ignore') and not attend))): + ui.setconfig('ui', 'formatted', ui.formatted()) + ui.setconfig('ui', 'interactive', False) + _runpager(p) + if ui.configbool('pager', 'quiet'): + signal.signal(signal.SIGPIPE, signal.SIG_DFL) + return orig(ui, options, cmd, cmdfunc) + + extensions.wrapfunction(dispatch, '_runcommand', pagecmd) + +def extsetup(ui): + commands.globalopts.append( + ('', 'pager', 'auto', + _("when to paginate (boolean, always, auto, or never)"), + _('TYPE'))) + +attended = ['annotate', 'cat', 'diff', 'export', 'glog', 'log', 'qdiff'] diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/pager.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/pager.pyo Binary files differnew file mode 100644 index 0000000..0b020cf --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/pager.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/parentrevspec.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/parentrevspec.py new file mode 100644 index 0000000..d66be24 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/parentrevspec.py @@ -0,0 +1,96 @@ +# Mercurial extension to make it easy to refer to the parent of a revision +# +# Copyright (C) 2007 Alexis S. L. Carvalho <alexis@cecm.usp.br> +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +'''interpret suffixes to refer to ancestor revisions + +This extension allows you to use git-style suffixes to refer to the +ancestors of a specific revision. + +For example, if you can refer to a revision as "foo", then:: + + foo^N = Nth parent of foo + foo^0 = foo + foo^1 = first parent of foo + foo^2 = second parent of foo + foo^ = foo^1 + + foo~N = Nth first grandparent of foo + foo~0 = foo + foo~1 = foo^1 = foo^ = first parent of foo + foo~2 = foo^1^1 = foo^^ = first parent of first parent of foo +''' +from mercurial import error + +def reposetup(ui, repo): + if not repo.local(): + return + + class parentrevspecrepo(repo.__class__): + def lookup(self, key): + try: + _super = super(parentrevspecrepo, self) + return _super.lookup(key) + except error.RepoError: + pass + + circ = key.find('^') + tilde = key.find('~') + if circ < 0 and tilde < 0: + raise + elif circ >= 0 and tilde >= 0: + end = min(circ, tilde) + else: + end = max(circ, tilde) + + cl = self.changelog + base = key[:end] + try: + node = _super.lookup(base) + except error.RepoError: + # eek - reraise the first error + return _super.lookup(key) + + rev = cl.rev(node) + suffix = key[end:] + i = 0 + while i < len(suffix): + # foo^N => Nth parent of foo + # foo^0 == foo + # foo^1 == foo^ == 1st parent of foo + # foo^2 == 2nd parent of foo + if suffix[i] == '^': + j = i + 1 + p = cl.parentrevs(rev) + if j < len(suffix) and suffix[j].isdigit(): + j += 1 + n = int(suffix[i + 1:j]) + if n > 2 or n == 2 and p[1] == -1: + raise + else: + n = 1 + if n: + rev = p[n - 1] + i = j + # foo~N => Nth first grandparent of foo + # foo~0 = foo + # foo~1 = foo^1 == foo^ == 1st parent of foo + # foo~2 = foo^1^1 == foo^^ == 1st parent of 1st parent of foo + elif suffix[i] == '~': + j = i + 1 + while j < len(suffix) and suffix[j].isdigit(): + j += 1 + if j == i + 1: + raise + n = int(suffix[i + 1:j]) + for k in xrange(n): + rev = cl.parentrevs(rev)[0] + i = j + else: + raise + return cl.node(rev) + + repo.__class__ = parentrevspecrepo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/parentrevspec.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/parentrevspec.pyo Binary files differnew file mode 100644 index 0000000..3a5be89 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/parentrevspec.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/patchbomb.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/patchbomb.py new file mode 100644 index 0000000..93ea4cb --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/patchbomb.py @@ -0,0 +1,553 @@ +# patchbomb.py - sending Mercurial changesets as patch emails +# +# Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +'''command to send changesets as (a series of) patch emails + +The series is started off with a "[PATCH 0 of N]" introduction, which +describes the series as a whole. + +Each patch email has a Subject line of "[PATCH M of N] ...", using the +first line of the changeset description as the subject text. The +message contains two or three body parts: + +- The changeset description. +- [Optional] The result of running diffstat on the patch. +- The patch itself, as generated by :hg:`export`. + +Each message refers to the first in the series using the In-Reply-To +and References headers, so they will show up as a sequence in threaded +mail and news readers, and in mail archives. + +To configure other defaults, add a section like this to your hgrc +file:: + + [email] + from = My Name <my@email> + to = recipient1, recipient2, ... + cc = cc1, cc2, ... + bcc = bcc1, bcc2, ... + reply-to = address1, address2, ... + +Use ``[patchbomb]`` as configuration section name if you need to +override global ``[email]`` address settings. + +Then you can use the :hg:`email` command to mail a series of +changesets as a patchbomb. + +You can also either configure the method option in the email section +to be a sendmail compatible mailer or fill out the [smtp] section so +that the patchbomb extension can automatically send patchbombs +directly from the commandline. See the [email] and [smtp] sections in +hgrc(5) for details. +''' + +import os, errno, socket, tempfile, cStringIO, time +import email.MIMEMultipart, email.MIMEBase +import email.Utils, email.Encoders, email.Generator +from mercurial import cmdutil, commands, hg, mail, patch, util, discovery, url +from mercurial.i18n import _ +from mercurial.node import bin + +def prompt(ui, prompt, default=None, rest=':'): + if not ui.interactive() and default is None: + raise util.Abort(_("%s Please enter a valid value" % (prompt + rest))) + if default: + prompt += ' [%s]' % default + prompt += rest + while True: + r = ui.prompt(prompt, default=default) + if r: + return r + if default is not None: + return default + ui.warn(_('Please enter a valid value.\n')) + +def introneeded(opts, number): + '''is an introductory message required?''' + return number > 1 or opts.get('intro') or opts.get('desc') + +def makepatch(ui, repo, patchlines, opts, _charsets, idx, total, + patchname=None): + + desc = [] + node = None + body = '' + + for line in patchlines: + if line.startswith('#'): + if line.startswith('# Node ID'): + node = line.split()[-1] + continue + if line.startswith('diff -r') or line.startswith('diff --git'): + break + desc.append(line) + + if not patchname and not node: + raise ValueError + + if opts.get('attach'): + body = ('\n'.join(desc[1:]).strip() or + 'Patch subject is complete summary.') + body += '\n\n\n' + + if opts.get('plain'): + while patchlines and patchlines[0].startswith('# '): + patchlines.pop(0) + if patchlines: + patchlines.pop(0) + while patchlines and not patchlines[0].strip(): + patchlines.pop(0) + + ds = patch.diffstat(patchlines) + if opts.get('diffstat'): + body += ds + '\n\n' + + if opts.get('attach') or opts.get('inline'): + msg = email.MIMEMultipart.MIMEMultipart() + if body: + msg.attach(mail.mimeencode(ui, body, _charsets, opts.get('test'))) + p = mail.mimetextpatch('\n'.join(patchlines), 'x-patch', opts.get('test')) + binnode = bin(node) + # if node is mq patch, it will have the patch file's name as a tag + if not patchname: + patchtags = [t for t in repo.nodetags(binnode) + if t.endswith('.patch') or t.endswith('.diff')] + if patchtags: + patchname = patchtags[0] + elif total > 1: + patchname = cmdutil.make_filename(repo, '%b-%n.patch', + binnode, seqno=idx, total=total) + else: + patchname = cmdutil.make_filename(repo, '%b.patch', binnode) + disposition = 'inline' + if opts.get('attach'): + disposition = 'attachment' + p['Content-Disposition'] = disposition + '; filename=' + patchname + msg.attach(p) + else: + body += '\n'.join(patchlines) + msg = mail.mimetextpatch(body, display=opts.get('test')) + + flag = ' '.join(opts.get('flag')) + if flag: + flag = ' ' + flag + + subj = desc[0].strip().rstrip('. ') + if not introneeded(opts, total): + subj = '[PATCH%s] %s' % (flag, opts.get('subject') or subj) + else: + tlen = len(str(total)) + subj = '[PATCH %0*d of %d%s] %s' % (tlen, idx, total, flag, subj) + msg['Subject'] = mail.headencode(ui, subj, _charsets, opts.get('test')) + msg['X-Mercurial-Node'] = node + return msg, subj, ds + +def patchbomb(ui, repo, *revs, **opts): + '''send changesets by email + + By default, diffs are sent in the format generated by + :hg:`export`, one per message. The series starts with a "[PATCH 0 + of N]" introduction, which describes the series as a whole. + + Each patch email has a Subject line of "[PATCH M of N] ...", using + the first line of the changeset description as the subject text. + The message contains two or three parts. First, the changeset + description. + + With the -d/--diffstat option, if the diffstat program is + installed, the result of running diffstat on the patch is inserted. + + Finally, the patch itself, as generated by :hg:`export`. + + With the -d/--diffstat or -c/--confirm options, you will be presented + with a final summary of all messages and asked for confirmation before + the messages are sent. + + By default the patch is included as text in the email body for + easy reviewing. Using the -a/--attach option will instead create + an attachment for the patch. With -i/--inline an inline attachment + will be created. + + With -o/--outgoing, emails will be generated for patches not found + in the destination repository (or only those which are ancestors + of the specified revisions if any are provided) + + With -b/--bundle, changesets are selected as for --outgoing, but a + single email containing a binary Mercurial bundle as an attachment + will be sent. + + With -m/--mbox, instead of previewing each patchbomb message in a + pager or sending the messages directly, it will create a UNIX + mailbox file with the patch emails. This mailbox file can be + previewed with any mail user agent which supports UNIX mbox + files. + + With -n/--test, all steps will run, but mail will not be sent. + You will be prompted for an email recipient address, a subject and + an introductory message describing the patches of your patchbomb. + Then when all is done, patchbomb messages are displayed. If the + PAGER environment variable is set, your pager will be fired up once + for each patchbomb message, so you can verify everything is alright. + + Examples:: + + hg email -r 3000 # send patch 3000 only + hg email -r 3000 -r 3001 # send patches 3000 and 3001 + hg email -r 3000:3005 # send patches 3000 through 3005 + hg email 3000 # send patch 3000 (deprecated) + + hg email -o # send all patches not in default + hg email -o DEST # send all patches not in DEST + hg email -o -r 3000 # send all ancestors of 3000 not in default + hg email -o -r 3000 DEST # send all ancestors of 3000 not in DEST + + hg email -b # send bundle of all patches not in default + hg email -b DEST # send bundle of all patches not in DEST + hg email -b -r 3000 # bundle of all ancestors of 3000 not in default + hg email -b -r 3000 DEST # bundle of all ancestors of 3000 not in DEST + + hg email -o -m mbox && # generate an mbox file... + mutt -R -f mbox # ... and view it with mutt + hg email -o -m mbox && # generate an mbox file ... + formail -s sendmail \\ # ... and use formail to send from the mbox + -bm -t < mbox # ... using sendmail + + Before using this command, you will need to enable email in your + hgrc. See the [email] section in hgrc(5) for details. + ''' + + _charsets = mail._charsets(ui) + + bundle = opts.get('bundle') + date = opts.get('date') + mbox = opts.get('mbox') + outgoing = opts.get('outgoing') + rev = opts.get('rev') + # internal option used by pbranches + patches = opts.get('patches') + + def getoutgoing(dest, revs): + '''Return the revisions present locally but not in dest''' + dest = ui.expandpath(dest or 'default-push', dest or 'default') + dest, branches = hg.parseurl(dest) + revs, checkout = hg.addbranchrevs(repo, repo, branches, revs) + if revs: + revs = [repo.lookup(rev) for rev in revs] + other = hg.repository(hg.remoteui(repo, opts), dest) + ui.status(_('comparing with %s\n') % url.hidepassword(dest)) + o = discovery.findoutgoing(repo, other) + if not o: + ui.status(_("no changes found\n")) + return [] + o = repo.changelog.nodesbetween(o, revs)[0] + return [str(repo.changelog.rev(r)) for r in o] + + def getpatches(revs): + for r in cmdutil.revrange(repo, revs): + output = cStringIO.StringIO() + cmdutil.export(repo, [r], fp=output, + opts=patch.diffopts(ui, opts)) + yield output.getvalue().split('\n') + + def getbundle(dest): + tmpdir = tempfile.mkdtemp(prefix='hg-email-bundle-') + tmpfn = os.path.join(tmpdir, 'bundle') + try: + commands.bundle(ui, repo, tmpfn, dest, **opts) + return open(tmpfn, 'rb').read() + finally: + try: + os.unlink(tmpfn) + except: + pass + os.rmdir(tmpdir) + + if not (opts.get('test') or mbox): + # really sending + mail.validateconfig(ui) + + if not (revs or rev or outgoing or bundle or patches): + raise util.Abort(_('specify at least one changeset with -r or -o')) + + if outgoing and bundle: + raise util.Abort(_("--outgoing mode always on with --bundle;" + " do not re-specify --outgoing")) + + if outgoing or bundle: + if len(revs) > 1: + raise util.Abort(_("too many destinations")) + dest = revs and revs[0] or None + revs = [] + + if rev: + if revs: + raise util.Abort(_('use only one form to specify the revision')) + revs = rev + + if outgoing: + revs = getoutgoing(dest, rev) + if bundle: + opts['revs'] = revs + + # start + if date: + start_time = util.parsedate(date) + else: + start_time = util.makedate() + + def genmsgid(id): + return '<%s.%s@%s>' % (id[:20], int(start_time[0]), socket.getfqdn()) + + def getdescription(body, sender): + if opts.get('desc'): + body = open(opts.get('desc')).read() + else: + ui.write(_('\nWrite the introductory message for the ' + 'patch series.\n\n')) + body = ui.edit(body, sender) + return body + + def getpatchmsgs(patches, patchnames=None): + jumbo = [] + msgs = [] + + ui.write(_('This patch series consists of %d patches.\n\n') + % len(patches)) + + name = None + for i, p in enumerate(patches): + jumbo.extend(p) + if patchnames: + name = patchnames[i] + msg = makepatch(ui, repo, p, opts, _charsets, i + 1, + len(patches), name) + msgs.append(msg) + + if introneeded(opts, len(patches)): + tlen = len(str(len(patches))) + + flag = ' '.join(opts.get('flag')) + if flag: + subj = '[PATCH %0*d of %d %s]' % (tlen, 0, len(patches), flag) + else: + subj = '[PATCH %0*d of %d]' % (tlen, 0, len(patches)) + subj += ' ' + (opts.get('subject') or + prompt(ui, 'Subject: ', rest=subj)) + + body = '' + ds = patch.diffstat(jumbo) + if ds and opts.get('diffstat'): + body = '\n' + ds + + body = getdescription(body, sender) + msg = mail.mimeencode(ui, body, _charsets, opts.get('test')) + msg['Subject'] = mail.headencode(ui, subj, _charsets, + opts.get('test')) + + msgs.insert(0, (msg, subj, ds)) + return msgs + + def getbundlemsgs(bundle): + subj = (opts.get('subject') + or prompt(ui, 'Subject:', 'A bundle for your repository')) + + body = getdescription('', sender) + msg = email.MIMEMultipart.MIMEMultipart() + if body: + msg.attach(mail.mimeencode(ui, body, _charsets, opts.get('test'))) + datapart = email.MIMEBase.MIMEBase('application', 'x-mercurial-bundle') + datapart.set_payload(bundle) + bundlename = '%s.hg' % opts.get('bundlename', 'bundle') + datapart.add_header('Content-Disposition', 'attachment', + filename=bundlename) + email.Encoders.encode_base64(datapart) + msg.attach(datapart) + msg['Subject'] = mail.headencode(ui, subj, _charsets, opts.get('test')) + return [(msg, subj, None)] + + sender = (opts.get('from') or ui.config('email', 'from') or + ui.config('patchbomb', 'from') or + prompt(ui, 'From', ui.username())) + + if patches: + msgs = getpatchmsgs(patches, opts.get('patchnames')) + elif bundle: + msgs = getbundlemsgs(getbundle(dest)) + else: + msgs = getpatchmsgs(list(getpatches(revs))) + + showaddrs = [] + + def getaddrs(opt, prpt=None, default=None): + addrs = opts.get(opt.replace('-', '_')) + if opt != 'reply-to': + showaddr = '%s:' % opt.capitalize() + else: + showaddr = 'Reply-To:' + + if addrs: + showaddrs.append('%s %s' % (showaddr, ', '.join(addrs))) + return mail.addrlistencode(ui, addrs, _charsets, opts.get('test')) + + addrs = ui.config('email', opt) or ui.config('patchbomb', opt) or '' + if not addrs and prpt: + addrs = prompt(ui, prpt, default) + + if addrs: + showaddrs.append('%s %s' % (showaddr, addrs)) + return mail.addrlistencode(ui, [addrs], _charsets, opts.get('test')) + + to = getaddrs('to', 'To') + cc = getaddrs('cc', 'Cc', '') + bcc = getaddrs('bcc') + replyto = getaddrs('reply-to') + + if opts.get('diffstat') or opts.get('confirm'): + ui.write(_('\nFinal summary:\n\n')) + ui.write('From: %s\n' % sender) + for addr in showaddrs: + ui.write('%s\n' % addr) + for m, subj, ds in msgs: + ui.write('Subject: %s\n' % subj) + if ds: + ui.write(ds) + ui.write('\n') + if ui.promptchoice(_('are you sure you want to send (yn)?'), + (_('&Yes'), _('&No'))): + raise util.Abort(_('patchbomb canceled')) + + ui.write('\n') + + parent = opts.get('in_reply_to') or None + # angle brackets may be omitted, they're not semantically part of the msg-id + if parent is not None: + if not parent.startswith('<'): + parent = '<' + parent + if not parent.endswith('>'): + parent += '>' + + first = True + + sender_addr = email.Utils.parseaddr(sender)[1] + sender = mail.addressencode(ui, sender, _charsets, opts.get('test')) + sendmail = None + for i, (m, subj, ds) in enumerate(msgs): + try: + m['Message-Id'] = genmsgid(m['X-Mercurial-Node']) + except TypeError: + m['Message-Id'] = genmsgid('patchbomb') + if parent: + m['In-Reply-To'] = parent + m['References'] = parent + if first: + parent = m['Message-Id'] + first = False + + m['User-Agent'] = 'Mercurial-patchbomb/%s' % util.version() + m['Date'] = email.Utils.formatdate(start_time[0], localtime=True) + + start_time = (start_time[0] + 1, start_time[1]) + m['From'] = sender + m['To'] = ', '.join(to) + if cc: + m['Cc'] = ', '.join(cc) + if bcc: + m['Bcc'] = ', '.join(bcc) + if replyto: + m['Reply-To'] = ', '.join(replyto) + if opts.get('test'): + ui.status(_('Displaying '), subj, ' ...\n') + ui.flush() + if 'PAGER' in os.environ and not ui.plain(): + fp = util.popen(os.environ['PAGER'], 'w') + else: + fp = ui + generator = email.Generator.Generator(fp, mangle_from_=False) + try: + generator.flatten(m, 0) + fp.write('\n') + except IOError, inst: + if inst.errno != errno.EPIPE: + raise + if fp is not ui: + fp.close() + elif mbox: + ui.status(_('Writing '), subj, ' ...\n') + ui.progress(_('writing'), i, item=subj, total=len(msgs)) + fp = open(mbox, 'In-Reply-To' in m and 'ab+' or 'wb+') + generator = email.Generator.Generator(fp, mangle_from_=True) + # Should be time.asctime(), but Windows prints 2-characters day + # of month instead of one. Make them print the same thing. + date = time.strftime('%a %b %d %H:%M:%S %Y', + time.localtime(start_time[0])) + fp.write('From %s %s\n' % (sender_addr, date)) + generator.flatten(m, 0) + fp.write('\n\n') + fp.close() + else: + if not sendmail: + sendmail = mail.connect(ui) + ui.status(_('Sending '), subj, ' ...\n') + ui.progress(_('sending'), i, item=subj, total=len(msgs)) + # Exim does not remove the Bcc field + del m['Bcc'] + fp = cStringIO.StringIO() + generator = email.Generator.Generator(fp, mangle_from_=False) + generator.flatten(m, 0) + sendmail(sender, to + bcc + cc, fp.getvalue()) + + ui.progress(_('writing'), None) + ui.progress(_('sending'), None) + +emailopts = [ + ('a', 'attach', None, _('send patches as attachments')), + ('i', 'inline', None, _('send patches as inline attachments')), + ('', 'bcc', [], _('email addresses of blind carbon copy recipients')), + ('c', 'cc', [], _('email addresses of copy recipients')), + ('', 'confirm', None, _('ask for confirmation before sending')), + ('d', 'diffstat', None, _('add diffstat output to messages')), + ('', 'date', '', _('use the given date as the sending date')), + ('', 'desc', '', _('use the given file as the series description')), + ('f', 'from', '', _('email address of sender')), + ('n', 'test', None, _('print messages that would be sent')), + ('m', 'mbox', '', + _('write messages to mbox file instead of sending them')), + ('', 'reply-to', [], _('email addresses replies should be sent to')), + ('s', 'subject', '', + _('subject of first message (intro or single patch)')), + ('', 'in-reply-to', '', + _('message identifier to reply to')), + ('', 'flag', [], _('flags to add in subject prefixes')), + ('t', 'to', [], _('email addresses of recipients')), + ] + + +cmdtable = { + "email": + (patchbomb, + [('g', 'git', None, _('use git extended diff format')), + ('', 'plain', None, _('omit hg patch header')), + ('o', 'outgoing', None, + _('send changes not found in the target repository')), + ('b', 'bundle', None, + _('send changes not in target as a binary bundle')), + ('', 'bundlename', 'bundle', + _('name of the bundle attachment file'), _('NAME')), + ('r', 'rev', [], + _('a revision to send'), _('REV')), + ('', 'force', None, + _('run even when remote repository is unrelated ' + '(with -b/--bundle)')), + ('', 'base', [], + _('a base changeset to specify instead of a destination ' + '(with -b/--bundle)'), + _('REV')), + ('', 'intro', None, + _('send an introduction email for a single patch')), + ] + emailopts + commands.remoteopts, + _('hg email [OPTION]... [DEST]...')) +} diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/patchbomb.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/patchbomb.pyo Binary files differnew file mode 100644 index 0000000..57d9f7f --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/patchbomb.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/progress.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/progress.py new file mode 100644 index 0000000..e25f4f8 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/progress.py @@ -0,0 +1,206 @@ +# progress.py show progress bars for some actions +# +# Copyright (C) 2010 Augie Fackler <durin42@gmail.com> +# +# This program is free software; you can redistribute it and/or modify it +# under the terms of the GNU General Public License as published by the +# Free Software Foundation; either version 2 of the License, or (at your +# option) any later version. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General +# Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +"""show progress bars for some actions + +This extension uses the progress information logged by hg commands +to draw progress bars that are as informative as possible. Some progress +bars only offer indeterminate information, while others have a definite +end point. + +The following settings are available:: + + [progress] + delay = 3 # number of seconds (float) before showing the progress bar + refresh = 0.1 # time in seconds between refreshes of the progress bar + format = topic bar number # format of the progress bar + width = <none> # if set, the maximum width of the progress information + # (that is, min(width, term width) will be used) + clear-complete = True # clear the progress bar after it's done + disable = False # if true, don't show a progress bar + assume-tty = False # if true, ALWAYS show a progress bar, unless + # disable is given + +Valid entries for the format field are topic, bar, number, unit, and +item. item defaults to the last 20 characters of the item, but this +can be changed by adding either ``-<num>`` which would take the last +num characters, or ``+<num>`` for the first num characters. +""" + +import sys +import time + +from mercurial import util + +def spacejoin(*args): + return ' '.join(s for s in args if s) + +def shouldprint(ui): + return (getattr(sys.stderr, 'isatty', None) and + (sys.stderr.isatty() or ui.configbool('progress', 'assume-tty'))) + +class progbar(object): + def __init__(self, ui): + self.ui = ui + self.resetstate() + + def resetstate(self): + self.topics = [] + self.printed = False + self.lastprint = time.time() + float(self.ui.config( + 'progress', 'delay', default=3)) + self.indetcount = 0 + self.refresh = float(self.ui.config( + 'progress', 'refresh', default=0.1)) + self.order = self.ui.configlist( + 'progress', 'format', + default=['topic', 'bar', 'number']) + + def show(self, topic, pos, item, unit, total): + if not shouldprint(self.ui): + return + termwidth = self.width() + self.printed = True + head = '' + needprogress = False + tail = '' + for indicator in self.order: + add = '' + if indicator == 'topic': + add = topic + elif indicator == 'number': + if total: + add = ('% ' + str(len(str(total))) + + 's/%s') % (pos, total) + else: + add = str(pos) + elif indicator.startswith('item') and item: + slice = 'end' + if '-' in indicator: + wid = int(indicator.split('-')[1]) + elif '+' in indicator: + slice = 'beginning' + wid = int(indicator.split('+')[1]) + else: + wid = 20 + if slice == 'end': + add = item[-wid:] + else: + add = item[:wid] + add += (wid - len(add)) * ' ' + elif indicator == 'bar': + add = '' + needprogress = True + elif indicator == 'unit' and unit: + add = unit + if not needprogress: + head = spacejoin(head, add) + else: + tail = spacejoin(add, tail) + if needprogress: + used = 0 + if head: + used += len(head) + 1 + if tail: + used += len(tail) + 1 + progwidth = termwidth - used - 3 + if total and pos <= total: + amt = pos * progwidth // total + bar = '=' * (amt - 1) + if amt > 0: + bar += '>' + bar += ' ' * (progwidth - amt) + else: + progwidth -= 3 + self.indetcount += 1 + # mod the count by twice the width so we can make the + # cursor bounce between the right and left sides + amt = self.indetcount % (2 * progwidth) + amt -= progwidth + bar = (' ' * int(progwidth - abs(amt)) + '<=>' + + ' ' * int(abs(amt))) + prog = ''.join(('[', bar , ']')) + out = spacejoin(head, prog, tail) + else: + out = spacejoin(head, tail) + sys.stderr.write('\r' + out[:termwidth]) + sys.stderr.flush() + + def clear(self): + if not shouldprint(self.ui): + return + sys.stderr.write('\r%s\r' % (' ' * self.width())) + + def complete(self): + if not shouldprint(self.ui): + return + if self.ui.configbool('progress', 'clear-complete', default=True): + self.clear() + else: + sys.stderr.write('\n') + sys.stderr.flush() + + def width(self): + tw = self.ui.termwidth() + return min(int(self.ui.config('progress', 'width', default=tw)), tw) + + def progress(self, topic, pos, item='', unit='', total=None): + if pos is None: + if self.topics and self.topics[-1] == topic and self.printed: + self.complete() + self.resetstate() + else: + if topic not in self.topics: + self.topics.append(topic) + now = time.time() + if (now - self.lastprint >= self.refresh + and topic == self.topics[-1]): + self.lastprint = now + self.show(topic, pos, item, unit, total) + +def uisetup(ui): + class progressui(ui.__class__): + _progbar = None + + def progress(self, *args, **opts): + self._progbar.progress(*args, **opts) + return super(progressui, self).progress(*args, **opts) + + def write(self, *args, **opts): + if self._progbar.printed: + self._progbar.clear() + return super(progressui, self).write(*args, **opts) + + def write_err(self, *args, **opts): + if self._progbar.printed: + self._progbar.clear() + return super(progressui, self).write_err(*args, **opts) + + # Apps that derive a class from ui.ui() can use + # setconfig('progress', 'disable', 'True') to disable this extension + if ui.configbool('progress', 'disable'): + return + if shouldprint(ui) and not ui.debugflag and not ui.quiet: + ui.__class__ = progressui + # we instantiate one globally shared progress bar to avoid + # competing progress bars when multiple UI objects get created + if not progressui._progbar: + progressui._progbar = progbar(ui) + +def reposetup(ui, repo): + uisetup(repo.ui) diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/progress.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/progress.pyo Binary files differnew file mode 100644 index 0000000..e0a83d4 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/progress.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/purge.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/purge.py new file mode 100644 index 0000000..4a99ec3 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/purge.py @@ -0,0 +1,111 @@ +# Copyright (C) 2006 - Marco Barisione <marco@barisione.org> +# +# This is a small extension for Mercurial (http://mercurial.selenic.com/) +# that removes files not known to mercurial +# +# This program was inspired by the "cvspurge" script contained in CVS +# utilities (http://www.red-bean.com/cvsutils/). +# +# For help on the usage of "hg purge" use: +# hg help purge +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. + +'''command to delete untracked files from the working directory''' + +from mercurial import util, commands, cmdutil +from mercurial.i18n import _ +import os, stat + +def purge(ui, repo, *dirs, **opts): + '''removes files not tracked by Mercurial + + Delete files not known to Mercurial. This is useful to test local + and uncommitted changes in an otherwise-clean source tree. + + This means that purge will delete: + + - Unknown files: files marked with "?" by :hg:`status` + - Empty directories: in fact Mercurial ignores directories unless + they contain files under source control management + + But it will leave untouched: + + - Modified and unmodified tracked files + - Ignored files (unless --all is specified) + - New files added to the repository (with :hg:`add`) + + If directories are given on the command line, only files in these + directories are considered. + + Be careful with purge, as you could irreversibly delete some files + you forgot to add to the repository. If you only want to print the + list of files that this program would delete, use the --print + option. + ''' + act = not opts['print'] + eol = '\n' + if opts['print0']: + eol = '\0' + act = False # --print0 implies --print + + def remove(remove_func, name): + if act: + try: + remove_func(repo.wjoin(name)) + except OSError: + m = _('%s cannot be removed') % name + if opts['abort_on_err']: + raise util.Abort(m) + ui.warn(_('warning: %s\n') % m) + else: + ui.write('%s%s' % (name, eol)) + + def removefile(path): + try: + os.remove(path) + except OSError: + # read-only files cannot be unlinked under Windows + s = os.stat(path) + if (s.st_mode & stat.S_IWRITE) != 0: + raise + os.chmod(path, stat.S_IMODE(s.st_mode) | stat.S_IWRITE) + os.remove(path) + + directories = [] + match = cmdutil.match(repo, dirs, opts) + match.dir = directories.append + status = repo.status(match=match, ignored=opts['all'], unknown=True) + + for f in sorted(status[4] + status[5]): + ui.note(_('Removing file %s\n') % f) + remove(removefile, f) + + for f in sorted(directories, reverse=True): + if match(f) and not os.listdir(repo.wjoin(f)): + ui.note(_('Removing directory %s\n') % f) + remove(os.rmdir, f) + +cmdtable = { + 'purge|clean': + (purge, + [('a', 'abort-on-err', None, _('abort if an error occurs')), + ('', 'all', None, _('purge ignored files too')), + ('p', 'print', None, _('print filenames instead of deleting them')), + ('0', 'print0', None, _('end filenames with NUL, for use with xargs' + ' (implies -p/--print)')), + ] + commands.walkopts, + _('hg purge [OPTION]... [DIR]...')) +} diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/purge.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/purge.pyo Binary files differnew file mode 100644 index 0000000..69287ba --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/purge.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/rebase.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/rebase.py new file mode 100644 index 0000000..7a43541 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/rebase.py @@ -0,0 +1,577 @@ +# rebase.py - rebasing feature for mercurial +# +# Copyright 2008 Stefano Tortarolo <stefano.tortarolo at gmail dot com> +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +'''command to move sets of revisions to a different ancestor + +This extension lets you rebase changesets in an existing Mercurial +repository. + +For more information: +http://mercurial.selenic.com/wiki/RebaseExtension +''' + +from mercurial import hg, util, repair, merge, cmdutil, commands +from mercurial import extensions, ancestor, copies, patch +from mercurial.commands import templateopts +from mercurial.node import nullrev +from mercurial.lock import release +from mercurial.i18n import _ +import os, errno + +nullmerge = -2 + +def rebase(ui, repo, **opts): + """move changeset (and descendants) to a different branch + + Rebase uses repeated merging to graft changesets from one part of + history (the source) onto another (the destination). This can be + useful for linearizing *local* changes relative to a master + development tree. + + You should not rebase changesets that have already been shared + with others. Doing so will force everybody else to perform the + same rebase or they will end up with duplicated changesets after + pulling in your rebased changesets. + + If you don't specify a destination changeset (``-d/--dest``), + rebase uses the tipmost head of the current named branch as the + destination. (The destination changeset is not modified by + rebasing, but new changesets are added as its descendants.) + + You can specify which changesets to rebase in two ways: as a + "source" changeset or as a "base" changeset. Both are shorthand + for a topologically related set of changesets (the "source + branch"). If you specify source (``-s/--source``), rebase will + rebase that changeset and all of its descendants onto dest. If you + specify base (``-b/--base``), rebase will select ancestors of base + back to but not including the common ancestor with dest. Thus, + ``-b`` is less precise but more convenient than ``-s``: you can + specify any changeset in the source branch, and rebase will select + the whole branch. If you specify neither ``-s`` nor ``-b``, rebase + uses the parent of the working directory as the base. + + By default, rebase recreates the changesets in the source branch + as descendants of dest and then destroys the originals. Use + ``--keep`` to preserve the original source changesets. Some + changesets in the source branch (e.g. merges from the destination + branch) may be dropped if they no longer contribute any change. + + One result of the rules for selecting the destination changeset + and source branch is that, unlike ``merge``, rebase will do + nothing if you are at the latest (tipmost) head of a named branch + with two heads. You need to explicitly specify source and/or + destination (or ``update`` to the other head, if it's the head of + the intended source branch). + + If a rebase is interrupted to manually resolve a merge, it can be + continued with --continue/-c or aborted with --abort/-a. + + Returns 0 on success, 1 if nothing to rebase. + """ + originalwd = target = None + external = nullrev + state = {} + skipped = set() + targetancestors = set() + + lock = wlock = None + try: + lock = repo.lock() + wlock = repo.wlock() + + # Validate input and define rebasing points + destf = opts.get('dest', None) + srcf = opts.get('source', None) + basef = opts.get('base', None) + contf = opts.get('continue') + abortf = opts.get('abort') + collapsef = opts.get('collapse', False) + extrafn = opts.get('extrafn') + keepf = opts.get('keep', False) + keepbranchesf = opts.get('keepbranches', False) + detachf = opts.get('detach', False) + # keepopen is not meant for use on the command line, but by + # other extensions + keepopen = opts.get('keepopen', False) + + if contf or abortf: + if contf and abortf: + raise util.Abort(_('cannot use both abort and continue')) + if collapsef: + raise util.Abort( + _('cannot use collapse with continue or abort')) + if detachf: + raise util.Abort(_('cannot use detach with continue or abort')) + if srcf or basef or destf: + raise util.Abort( + _('abort and continue do not allow specifying revisions')) + + (originalwd, target, state, skipped, collapsef, keepf, + keepbranchesf, external) = restorestatus(repo) + if abortf: + return abort(repo, originalwd, target, state) + else: + if srcf and basef: + raise util.Abort(_('cannot specify both a ' + 'revision and a base')) + if detachf: + if not srcf: + raise util.Abort( + _('detach requires a revision to be specified')) + if basef: + raise util.Abort(_('cannot specify a base with detach')) + + cmdutil.bail_if_changed(repo) + result = buildstate(repo, destf, srcf, basef, detachf) + if not result: + # Empty state built, nothing to rebase + ui.status(_('nothing to rebase\n')) + return 1 + else: + originalwd, target, state = result + if collapsef: + targetancestors = set(repo.changelog.ancestors(target)) + external = checkexternal(repo, state, targetancestors) + + if keepbranchesf: + if extrafn: + raise util.Abort(_('cannot use both keepbranches and extrafn')) + def extrafn(ctx, extra): + extra['branch'] = ctx.branch() + + # Rebase + if not targetancestors: + targetancestors = set(repo.changelog.ancestors(target)) + targetancestors.add(target) + + sortedstate = sorted(state) + total = len(sortedstate) + pos = 0 + for rev in sortedstate: + pos += 1 + if state[rev] == -1: + ui.progress(_("rebasing"), pos, ("%d:%s" % (rev, repo[rev])), + _('changesets'), total) + storestatus(repo, originalwd, target, state, collapsef, keepf, + keepbranchesf, external) + p1, p2 = defineparents(repo, rev, target, state, + targetancestors) + if len(repo.parents()) == 2: + repo.ui.debug('resuming interrupted rebase\n') + else: + stats = rebasenode(repo, rev, p1, p2, state) + if stats and stats[3] > 0: + raise util.Abort(_('unresolved conflicts (see hg ' + 'resolve, then hg rebase --continue)')) + updatedirstate(repo, rev, target, p2) + if not collapsef: + newrev = concludenode(repo, rev, p1, p2, extrafn=extrafn) + else: + # Skip commit if we are collapsing + repo.dirstate.setparents(repo[p1].node()) + newrev = None + # Update the state + if newrev is not None: + state[rev] = repo[newrev].rev() + else: + if not collapsef: + ui.note(_('no changes, revision %d skipped\n') % rev) + ui.debug('next revision set to %s\n' % p1) + skipped.add(rev) + state[rev] = p1 + + ui.progress(_('rebasing'), None) + ui.note(_('rebase merging completed\n')) + + if collapsef and not keepopen: + p1, p2 = defineparents(repo, min(state), target, + state, targetancestors) + commitmsg = 'Collapsed revision' + for rebased in state: + if rebased not in skipped and state[rebased] != nullmerge: + commitmsg += '\n* %s' % repo[rebased].description() + commitmsg = ui.edit(commitmsg, repo.ui.username()) + newrev = concludenode(repo, rev, p1, external, commitmsg=commitmsg, + extrafn=extrafn) + + if 'qtip' in repo.tags(): + updatemq(repo, state, skipped, **opts) + + if not keepf: + # Remove no more useful revisions + rebased = [rev for rev in state if state[rev] != nullmerge] + if rebased: + if set(repo.changelog.descendants(min(rebased))) - set(state): + ui.warn(_("warning: new changesets detected " + "on source branch, not stripping\n")) + else: + # backup the old csets by default + repair.strip(ui, repo, repo[min(rebased)].node(), "all") + + clearstatus(repo) + ui.note(_("rebase completed\n")) + if os.path.exists(repo.sjoin('undo')): + util.unlink(repo.sjoin('undo')) + if skipped: + ui.note(_("%d revisions have been skipped\n") % len(skipped)) + finally: + release(lock, wlock) + +def rebasemerge(repo, rev, first=False): + 'return the correct ancestor' + oldancestor = ancestor.ancestor + + def newancestor(a, b, pfunc): + if b == rev: + return repo[rev].parents()[0].rev() + return oldancestor(a, b, pfunc) + + if not first: + ancestor.ancestor = newancestor + else: + repo.ui.debug("first revision, do not change ancestor\n") + try: + stats = merge.update(repo, rev, True, True, False) + return stats + finally: + ancestor.ancestor = oldancestor + +def checkexternal(repo, state, targetancestors): + """Check whether one or more external revisions need to be taken in + consideration. In the latter case, abort. + """ + external = nullrev + source = min(state) + for rev in state: + if rev == source: + continue + # Check externals and fail if there are more than one + for p in repo[rev].parents(): + if (p.rev() not in state + and p.rev() not in targetancestors): + if external != nullrev: + raise util.Abort(_('unable to collapse, there is more ' + 'than one external parent')) + external = p.rev() + return external + +def updatedirstate(repo, rev, p1, p2): + """Keep track of renamed files in the revision that is going to be rebased + """ + # Here we simulate the copies and renames in the source changeset + cop, diver = copies.copies(repo, repo[rev], repo[p1], repo[p2], True) + m1 = repo[rev].manifest() + m2 = repo[p1].manifest() + for k, v in cop.iteritems(): + if k in m1: + if v in m1 or v in m2: + repo.dirstate.copy(v, k) + if v in m2 and v not in m1: + repo.dirstate.remove(v) + +def concludenode(repo, rev, p1, p2, commitmsg=None, extrafn=None): + 'Commit the changes and store useful information in extra' + try: + repo.dirstate.setparents(repo[p1].node(), repo[p2].node()) + ctx = repo[rev] + if commitmsg is None: + commitmsg = ctx.description() + extra = {'rebase_source': ctx.hex()} + if extrafn: + extrafn(ctx, extra) + # Commit might fail if unresolved files exist + newrev = repo.commit(text=commitmsg, user=ctx.user(), + date=ctx.date(), extra=extra) + repo.dirstate.setbranch(repo[newrev].branch()) + return newrev + except util.Abort: + # Invalidate the previous setparents + repo.dirstate.invalidate() + raise + +def rebasenode(repo, rev, p1, p2, state): + 'Rebase a single revision' + # Merge phase + # Update to target and merge it with local + if repo['.'].rev() != repo[p1].rev(): + repo.ui.debug(" update to %d:%s\n" % (repo[p1].rev(), repo[p1])) + merge.update(repo, p1, False, True, False) + else: + repo.ui.debug(" already in target\n") + repo.dirstate.write() + repo.ui.debug(" merge against %d:%s\n" % (repo[rev].rev(), repo[rev])) + first = repo[rev].rev() == repo[min(state)].rev() + stats = rebasemerge(repo, rev, first) + return stats + +def defineparents(repo, rev, target, state, targetancestors): + 'Return the new parent relationship of the revision that will be rebased' + parents = repo[rev].parents() + p1 = p2 = nullrev + + P1n = parents[0].rev() + if P1n in targetancestors: + p1 = target + elif P1n in state: + if state[P1n] == nullmerge: + p1 = target + else: + p1 = state[P1n] + else: # P1n external + p1 = target + p2 = P1n + + if len(parents) == 2 and parents[1].rev() not in targetancestors: + P2n = parents[1].rev() + # interesting second parent + if P2n in state: + if p1 == target: # P1n in targetancestors or external + p1 = state[P2n] + else: + p2 = state[P2n] + else: # P2n external + if p2 != nullrev: # P1n external too => rev is a merged revision + raise util.Abort(_('cannot use revision %d as base, result ' + 'would have 3 parents') % rev) + p2 = P2n + repo.ui.debug(" future parents are %d and %d\n" % + (repo[p1].rev(), repo[p2].rev())) + return p1, p2 + +def isagitpatch(repo, patchname): + 'Return true if the given patch is in git format' + mqpatch = os.path.join(repo.mq.path, patchname) + for line in patch.linereader(file(mqpatch, 'rb')): + if line.startswith('diff --git'): + return True + return False + +def updatemq(repo, state, skipped, **opts): + 'Update rebased mq patches - finalize and then import them' + mqrebase = {} + mq = repo.mq + for p in mq.applied: + rev = repo[p.node].rev() + if rev in state: + repo.ui.debug('revision %d is an mq patch (%s), finalize it.\n' % + (rev, p.name)) + mqrebase[rev] = (p.name, isagitpatch(repo, p.name)) + + if mqrebase: + mq.finish(repo, mqrebase.keys()) + + # We must start import from the newest revision + for rev in sorted(mqrebase, reverse=True): + if rev not in skipped: + name, isgit = mqrebase[rev] + repo.ui.debug('import mq patch %d (%s)\n' % (state[rev], name)) + mq.qimport(repo, (), patchname=name, git=isgit, + rev=[str(state[rev])]) + mq.save_dirty() + +def storestatus(repo, originalwd, target, state, collapse, keep, keepbranches, + external): + 'Store the current status to allow recovery' + f = repo.opener("rebasestate", "w") + f.write(repo[originalwd].hex() + '\n') + f.write(repo[target].hex() + '\n') + f.write(repo[external].hex() + '\n') + f.write('%d\n' % int(collapse)) + f.write('%d\n' % int(keep)) + f.write('%d\n' % int(keepbranches)) + for d, v in state.iteritems(): + oldrev = repo[d].hex() + newrev = repo[v].hex() + f.write("%s:%s\n" % (oldrev, newrev)) + f.close() + repo.ui.debug('rebase status stored\n') + +def clearstatus(repo): + 'Remove the status files' + if os.path.exists(repo.join("rebasestate")): + util.unlink(repo.join("rebasestate")) + +def restorestatus(repo): + 'Restore a previously stored status' + try: + target = None + collapse = False + external = nullrev + state = {} + f = repo.opener("rebasestate") + for i, l in enumerate(f.read().splitlines()): + if i == 0: + originalwd = repo[l].rev() + elif i == 1: + target = repo[l].rev() + elif i == 2: + external = repo[l].rev() + elif i == 3: + collapse = bool(int(l)) + elif i == 4: + keep = bool(int(l)) + elif i == 5: + keepbranches = bool(int(l)) + else: + oldrev, newrev = l.split(':') + state[repo[oldrev].rev()] = repo[newrev].rev() + skipped = set() + # recompute the set of skipped revs + if not collapse: + seen = set([target]) + for old, new in sorted(state.items()): + if new != nullrev and new in seen: + skipped.add(old) + seen.add(new) + repo.ui.debug('computed skipped revs: %s\n' % skipped) + repo.ui.debug('rebase status resumed\n') + return (originalwd, target, state, skipped, + collapse, keep, keepbranches, external) + except IOError, err: + if err.errno != errno.ENOENT: + raise + raise util.Abort(_('no rebase in progress')) + +def abort(repo, originalwd, target, state): + 'Restore the repository to its original state' + if set(repo.changelog.descendants(target)) - set(state.values()): + repo.ui.warn(_("warning: new changesets detected on target branch, " + "can't abort\n")) + return -1 + else: + # Strip from the first rebased revision + merge.update(repo, repo[originalwd].rev(), False, True, False) + rebased = filter(lambda x: x > -1 and x != target, state.values()) + if rebased: + strippoint = min(rebased) + # no backup of rebased cset versions needed + repair.strip(repo.ui, repo, repo[strippoint].node()) + clearstatus(repo) + repo.ui.warn(_('rebase aborted\n')) + return 0 + +def buildstate(repo, dest, src, base, detach): + 'Define which revisions are going to be rebased and where' + targetancestors = set() + detachset = set() + + if not dest: + # Destination defaults to the latest revision in the current branch + branch = repo[None].branch() + dest = repo[branch].rev() + else: + dest = repo[dest].rev() + + # This check isn't strictly necessary, since mq detects commits over an + # applied patch. But it prevents messing up the working directory when + # a partially completed rebase is blocked by mq. + if 'qtip' in repo.tags() and (repo[dest].node() in + [s.node for s in repo.mq.applied]): + raise util.Abort(_('cannot rebase onto an applied mq patch')) + + if src: + commonbase = repo[src].ancestor(repo[dest]) + if commonbase == repo[src]: + raise util.Abort(_('source is ancestor of destination')) + if commonbase == repo[dest]: + raise util.Abort(_('source is descendant of destination')) + source = repo[src].rev() + if detach: + # We need to keep track of source's ancestors up to the common base + srcancestors = set(repo.changelog.ancestors(source)) + baseancestors = set(repo.changelog.ancestors(commonbase.rev())) + detachset = srcancestors - baseancestors + detachset.discard(commonbase.rev()) + else: + if base: + cwd = repo[base].rev() + else: + cwd = repo['.'].rev() + + if cwd == dest: + repo.ui.debug('source and destination are the same\n') + return None + + targetancestors = set(repo.changelog.ancestors(dest)) + if cwd in targetancestors: + repo.ui.debug('source is ancestor of destination\n') + return None + + cwdancestors = set(repo.changelog.ancestors(cwd)) + if dest in cwdancestors: + repo.ui.debug('source is descendant of destination\n') + return None + + cwdancestors.add(cwd) + rebasingbranch = cwdancestors - targetancestors + source = min(rebasingbranch) + + repo.ui.debug('rebase onto %d starting from %d\n' % (dest, source)) + state = dict.fromkeys(repo.changelog.descendants(source), nullrev) + state.update(dict.fromkeys(detachset, nullmerge)) + state[source] = nullrev + return repo['.'].rev(), repo[dest].rev(), state + +def pullrebase(orig, ui, repo, *args, **opts): + 'Call rebase after pull if the latter has been invoked with --rebase' + if opts.get('rebase'): + if opts.get('update'): + del opts['update'] + ui.debug('--update and --rebase are not compatible, ignoring ' + 'the update flag\n') + + cmdutil.bail_if_changed(repo) + revsprepull = len(repo) + origpostincoming = commands.postincoming + def _dummy(*args, **kwargs): + pass + commands.postincoming = _dummy + try: + orig(ui, repo, *args, **opts) + finally: + commands.postincoming = origpostincoming + revspostpull = len(repo) + if revspostpull > revsprepull: + rebase(ui, repo, **opts) + branch = repo[None].branch() + dest = repo[branch].rev() + if dest != repo['.'].rev(): + # there was nothing to rebase we force an update + hg.update(repo, dest) + else: + orig(ui, repo, *args, **opts) + +def uisetup(ui): + 'Replace pull with a decorator to provide --rebase option' + entry = extensions.wrapcommand(commands.table, 'pull', pullrebase) + entry[1].append(('', 'rebase', None, + _("rebase working directory to branch head")) +) + +cmdtable = { +"rebase": + (rebase, + [ + ('s', 'source', '', + _('rebase from the specified changeset'), _('REV')), + ('b', 'base', '', + _('rebase from the base of the specified changeset ' + '(up to greatest common ancestor of base and dest)'), + _('REV')), + ('d', 'dest', '', + _('rebase onto the specified changeset'), _('REV')), + ('', 'collapse', False, _('collapse the rebased changesets')), + ('', 'keep', False, _('keep original changesets')), + ('', 'keepbranches', False, _('keep original branch names')), + ('', 'detach', False, _('force detaching of source from its original ' + 'branch')), + ('c', 'continue', False, _('continue an interrupted rebase')), + ('a', 'abort', False, _('abort an interrupted rebase'))] + + templateopts, + _('hg rebase [-s REV | -b REV] [-d REV] [options]\n' + 'hg rebase {-a|-c}')) +} diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/rebase.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/rebase.pyo Binary files differnew file mode 100644 index 0000000..c637b8f --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/rebase.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/record.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/record.py new file mode 100644 index 0000000..c515c26 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/record.py @@ -0,0 +1,569 @@ +# record.py +# +# Copyright 2007 Bryan O'Sullivan <bos@serpentine.com> +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +'''commands to interactively select changes for commit/qrefresh''' + +from mercurial.i18n import gettext, _ +from mercurial import cmdutil, commands, extensions, hg, mdiff, patch +from mercurial import util +import copy, cStringIO, errno, os, re, tempfile + +lines_re = re.compile(r'@@ -(\d+),(\d+) \+(\d+),(\d+) @@\s*(.*)') + +def scanpatch(fp): + """like patch.iterhunks, but yield different events + + - ('file', [header_lines + fromfile + tofile]) + - ('context', [context_lines]) + - ('hunk', [hunk_lines]) + - ('range', (-start,len, +start,len, diffp)) + """ + lr = patch.linereader(fp) + + def scanwhile(first, p): + """scan lr while predicate holds""" + lines = [first] + while True: + line = lr.readline() + if not line: + break + if p(line): + lines.append(line) + else: + lr.push(line) + break + return lines + + while True: + line = lr.readline() + if not line: + break + if line.startswith('diff --git a/'): + def notheader(line): + s = line.split(None, 1) + return not s or s[0] not in ('---', 'diff') + header = scanwhile(line, notheader) + fromfile = lr.readline() + if fromfile.startswith('---'): + tofile = lr.readline() + header += [fromfile, tofile] + else: + lr.push(fromfile) + yield 'file', header + elif line[0] == ' ': + yield 'context', scanwhile(line, lambda l: l[0] in ' \\') + elif line[0] in '-+': + yield 'hunk', scanwhile(line, lambda l: l[0] in '-+\\') + else: + m = lines_re.match(line) + if m: + yield 'range', m.groups() + else: + raise patch.PatchError('unknown patch content: %r' % line) + +class header(object): + """patch header + + XXX shoudn't we move this to mercurial/patch.py ? + """ + diff_re = re.compile('diff --git a/(.*) b/(.*)$') + allhunks_re = re.compile('(?:index|new file|deleted file) ') + pretty_re = re.compile('(?:new file|deleted file) ') + special_re = re.compile('(?:index|new|deleted|copy|rename) ') + + def __init__(self, header): + self.header = header + self.hunks = [] + + def binary(self): + for h in self.header: + if h.startswith('index '): + return True + + def pretty(self, fp): + for h in self.header: + if h.startswith('index '): + fp.write(_('this modifies a binary file (all or nothing)\n')) + break + if self.pretty_re.match(h): + fp.write(h) + if self.binary(): + fp.write(_('this is a binary file\n')) + break + if h.startswith('---'): + fp.write(_('%d hunks, %d lines changed\n') % + (len(self.hunks), + sum([max(h.added, h.removed) for h in self.hunks]))) + break + fp.write(h) + + def write(self, fp): + fp.write(''.join(self.header)) + + def allhunks(self): + for h in self.header: + if self.allhunks_re.match(h): + return True + + def files(self): + fromfile, tofile = self.diff_re.match(self.header[0]).groups() + if fromfile == tofile: + return [fromfile] + return [fromfile, tofile] + + def filename(self): + return self.files()[-1] + + def __repr__(self): + return '<header %s>' % (' '.join(map(repr, self.files()))) + + def special(self): + for h in self.header: + if self.special_re.match(h): + return True + +def countchanges(hunk): + """hunk -> (n+,n-)""" + add = len([h for h in hunk if h[0] == '+']) + rem = len([h for h in hunk if h[0] == '-']) + return add, rem + +class hunk(object): + """patch hunk + + XXX shouldn't we merge this with patch.hunk ? + """ + maxcontext = 3 + + def __init__(self, header, fromline, toline, proc, before, hunk, after): + def trimcontext(number, lines): + delta = len(lines) - self.maxcontext + if False and delta > 0: + return number + delta, lines[:self.maxcontext] + return number, lines + + self.header = header + self.fromline, self.before = trimcontext(fromline, before) + self.toline, self.after = trimcontext(toline, after) + self.proc = proc + self.hunk = hunk + self.added, self.removed = countchanges(self.hunk) + + def write(self, fp): + delta = len(self.before) + len(self.after) + if self.after and self.after[-1] == '\\ No newline at end of file\n': + delta -= 1 + fromlen = delta + self.removed + tolen = delta + self.added + fp.write('@@ -%d,%d +%d,%d @@%s\n' % + (self.fromline, fromlen, self.toline, tolen, + self.proc and (' ' + self.proc))) + fp.write(''.join(self.before + self.hunk + self.after)) + + pretty = write + + def filename(self): + return self.header.filename() + + def __repr__(self): + return '<hunk %r@%d>' % (self.filename(), self.fromline) + +def parsepatch(fp): + """patch -> [] of hunks """ + class parser(object): + """patch parsing state machine""" + def __init__(self): + self.fromline = 0 + self.toline = 0 + self.proc = '' + self.header = None + self.context = [] + self.before = [] + self.hunk = [] + self.stream = [] + + def addrange(self, limits): + fromstart, fromend, tostart, toend, proc = limits + self.fromline = int(fromstart) + self.toline = int(tostart) + self.proc = proc + + def addcontext(self, context): + if self.hunk: + h = hunk(self.header, self.fromline, self.toline, self.proc, + self.before, self.hunk, context) + self.header.hunks.append(h) + self.stream.append(h) + self.fromline += len(self.before) + h.removed + self.toline += len(self.before) + h.added + self.before = [] + self.hunk = [] + self.proc = '' + self.context = context + + def addhunk(self, hunk): + if self.context: + self.before = self.context + self.context = [] + self.hunk = hunk + + def newfile(self, hdr): + self.addcontext([]) + h = header(hdr) + self.stream.append(h) + self.header = h + + def finished(self): + self.addcontext([]) + return self.stream + + transitions = { + 'file': {'context': addcontext, + 'file': newfile, + 'hunk': addhunk, + 'range': addrange}, + 'context': {'file': newfile, + 'hunk': addhunk, + 'range': addrange}, + 'hunk': {'context': addcontext, + 'file': newfile, + 'range': addrange}, + 'range': {'context': addcontext, + 'hunk': addhunk}, + } + + p = parser() + + state = 'context' + for newstate, data in scanpatch(fp): + try: + p.transitions[state][newstate](p, data) + except KeyError: + raise patch.PatchError('unhandled transition: %s -> %s' % + (state, newstate)) + state = newstate + return p.finished() + +def filterpatch(ui, chunks): + """Interactively filter patch chunks into applied-only chunks""" + chunks = list(chunks) + chunks.reverse() + seen = set() + def consumefile(): + """fetch next portion from chunks until a 'header' is seen + NB: header == new-file mark + """ + consumed = [] + while chunks: + if isinstance(chunks[-1], header): + break + else: + consumed.append(chunks.pop()) + return consumed + + resp_all = [None] # this two are changed from inside prompt, + resp_file = [None] # so can't be usual variables + applied = {} # 'filename' -> [] of chunks + def prompt(query): + """prompt query, and process base inputs + + - y/n for the rest of file + - y/n for the rest + - ? (help) + - q (quit) + + Returns True/False and sets reps_all and resp_file as + appropriate. + """ + if resp_all[0] is not None: + return resp_all[0] + if resp_file[0] is not None: + return resp_file[0] + while True: + resps = _('[Ynsfdaq?]') + choices = (_('&Yes, record this change'), + _('&No, skip this change'), + _('&Skip remaining changes to this file'), + _('Record remaining changes to this &file'), + _('&Done, skip remaining changes and files'), + _('Record &all changes to all remaining files'), + _('&Quit, recording no changes'), + _('&?')) + r = ui.promptchoice("%s %s" % (query, resps), choices) + ui.write("\n") + if r == 7: # ? + doc = gettext(record.__doc__) + c = doc.find('::') + 2 + for l in doc[c:].splitlines(): + if l.startswith(' '): + ui.write(l.strip(), '\n') + continue + elif r == 0: # yes + ret = True + elif r == 1: # no + ret = False + elif r == 2: # Skip + ret = resp_file[0] = False + elif r == 3: # file (Record remaining) + ret = resp_file[0] = True + elif r == 4: # done, skip remaining + ret = resp_all[0] = False + elif r == 5: # all + ret = resp_all[0] = True + elif r == 6: # quit + raise util.Abort(_('user quit')) + return ret + pos, total = 0, len(chunks) - 1 + while chunks: + pos = total - len(chunks) + 1 + chunk = chunks.pop() + if isinstance(chunk, header): + # new-file mark + resp_file = [None] + fixoffset = 0 + hdr = ''.join(chunk.header) + if hdr in seen: + consumefile() + continue + seen.add(hdr) + if resp_all[0] is None: + chunk.pretty(ui) + r = prompt(_('examine changes to %s?') % + _(' and ').join(map(repr, chunk.files()))) + if r: + applied[chunk.filename()] = [chunk] + if chunk.allhunks(): + applied[chunk.filename()] += consumefile() + else: + consumefile() + else: + # new hunk + if resp_file[0] is None and resp_all[0] is None: + chunk.pretty(ui) + r = total == 1 and prompt(_('record this change to %r?') % + chunk.filename()) \ + or prompt(_('record change %d/%d to %r?') % + (pos, total, chunk.filename())) + if r: + if fixoffset: + chunk = copy.copy(chunk) + chunk.toline += fixoffset + applied[chunk.filename()].append(chunk) + else: + fixoffset += chunk.removed - chunk.added + return sum([h for h in applied.itervalues() + if h[0].special() or len(h) > 1], []) + +def record(ui, repo, *pats, **opts): + '''interactively select changes to commit + + If a list of files is omitted, all changes reported by :hg:`status` + will be candidates for recording. + + See :hg:`help dates` for a list of formats valid for -d/--date. + + You will be prompted for whether to record changes to each + modified file, and for files with multiple changes, for each + change to use. For each query, the following responses are + possible:: + + y - record this change + n - skip this change + + s - skip remaining changes to this file + f - record remaining changes to this file + + d - done, skip remaining changes and files + a - record all changes to all remaining files + q - quit, recording no changes + + ? - display help + + This command is not available when committing a merge.''' + + dorecord(ui, repo, commands.commit, *pats, **opts) + + +def qrecord(ui, repo, patch, *pats, **opts): + '''interactively record a new patch + + See :hg:`help qnew` & :hg:`help record` for more information and + usage. + ''' + + try: + mq = extensions.find('mq') + except KeyError: + raise util.Abort(_("'mq' extension not loaded")) + + def committomq(ui, repo, *pats, **opts): + mq.new(ui, repo, patch, *pats, **opts) + + opts = opts.copy() + opts['force'] = True # always 'qnew -f' + dorecord(ui, repo, committomq, *pats, **opts) + + +def dorecord(ui, repo, commitfunc, *pats, **opts): + if not ui.interactive(): + raise util.Abort(_('running non-interactively, use commit instead')) + + def recordfunc(ui, repo, message, match, opts): + """This is generic record driver. + + Its job is to interactively filter local changes, and accordingly + prepare working dir into a state, where the job can be delegated to + non-interactive commit command such as 'commit' or 'qrefresh'. + + After the actual job is done by non-interactive command, working dir + state is restored to original. + + In the end we'll record interesting changes, and everything else will be + left in place, so the user can continue his work. + """ + + merge = len(repo[None].parents()) > 1 + if merge: + raise util.Abort(_('cannot partially commit a merge ' + '(use hg commit instead)')) + + changes = repo.status(match=match)[:3] + diffopts = mdiff.diffopts(git=True, nodates=True) + chunks = patch.diff(repo, changes=changes, opts=diffopts) + fp = cStringIO.StringIO() + fp.write(''.join(chunks)) + fp.seek(0) + + # 1. filter patch, so we have intending-to apply subset of it + chunks = filterpatch(ui, parsepatch(fp)) + del fp + + contenders = set() + for h in chunks: + try: + contenders.update(set(h.files())) + except AttributeError: + pass + + changed = changes[0] + changes[1] + changes[2] + newfiles = [f for f in changed if f in contenders] + if not newfiles: + ui.status(_('no changes to record\n')) + return 0 + + modified = set(changes[0]) + + # 2. backup changed files, so we can restore them in the end + backups = {} + backupdir = repo.join('record-backups') + try: + os.mkdir(backupdir) + except OSError, err: + if err.errno != errno.EEXIST: + raise + try: + # backup continues + for f in newfiles: + if f not in modified: + continue + fd, tmpname = tempfile.mkstemp(prefix=f.replace('/', '_')+'.', + dir=backupdir) + os.close(fd) + ui.debug('backup %r as %r\n' % (f, tmpname)) + util.copyfile(repo.wjoin(f), tmpname) + backups[f] = tmpname + + fp = cStringIO.StringIO() + for c in chunks: + if c.filename() in backups: + c.write(fp) + dopatch = fp.tell() + fp.seek(0) + + # 3a. apply filtered patch to clean repo (clean) + if backups: + hg.revert(repo, repo.dirstate.parents()[0], + lambda key: key in backups) + + # 3b. (apply) + if dopatch: + try: + ui.debug('applying patch\n') + ui.debug(fp.getvalue()) + pfiles = {} + patch.internalpatch(fp, ui, 1, repo.root, files=pfiles, + eolmode=None) + cmdutil.updatedir(ui, repo, pfiles) + except patch.PatchError, err: + raise util.Abort(str(err)) + del fp + + # 4. We prepared working directory according to filtered patch. + # Now is the time to delegate the job to commit/qrefresh or the like! + + # it is important to first chdir to repo root -- we'll call a + # highlevel command with list of pathnames relative to repo root + cwd = os.getcwd() + os.chdir(repo.root) + try: + commitfunc(ui, repo, *newfiles, **opts) + finally: + os.chdir(cwd) + + return 0 + finally: + # 5. finally restore backed-up files + try: + for realname, tmpname in backups.iteritems(): + ui.debug('restoring %r to %r\n' % (tmpname, realname)) + util.copyfile(tmpname, repo.wjoin(realname)) + os.unlink(tmpname) + os.rmdir(backupdir) + except OSError: + pass + + # wrap ui.write so diff output can be labeled/colorized + def wrapwrite(orig, *args, **kw): + label = kw.pop('label', '') + for chunk, l in patch.difflabel(lambda: args): + orig(chunk, label=label + l) + oldwrite = ui.write + extensions.wrapfunction(ui, 'write', wrapwrite) + try: + return cmdutil.commit(ui, repo, recordfunc, pats, opts) + finally: + ui.write = oldwrite + +cmdtable = { + "record": + (record, + + # add commit options + commands.table['^commit|ci'][1], + + _('hg record [OPTION]... [FILE]...')), +} + + +def uisetup(ui): + try: + mq = extensions.find('mq') + except KeyError: + return + + qcmdtable = { + "qrecord": + (qrecord, + + # add qnew options, except '--force' + [opt for opt in mq.cmdtable['^qnew'][1] if opt[1] != 'force'], + + _('hg qrecord [OPTION]... PATCH [FILE]...')), + } + + cmdtable.update(qcmdtable) + diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/record.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/record.pyo Binary files differnew file mode 100644 index 0000000..456b7d6 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/record.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/relink.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/relink.py new file mode 100644 index 0000000..77bd3c3 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/relink.py @@ -0,0 +1,180 @@ +# Mercurial extension to provide 'hg relink' command +# +# Copyright (C) 2007 Brendan Cully <brendan@kublai.com> +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +"""recreates hardlinks between repository clones""" + +from mercurial import hg, util +from mercurial.i18n import _ +import os, stat + +def relink(ui, repo, origin=None, **opts): + """recreate hardlinks between two repositories + + When repositories are cloned locally, their data files will be + hardlinked so that they only use the space of a single repository. + + Unfortunately, subsequent pulls into either repository will break + hardlinks for any files touched by the new changesets, even if + both repositories end up pulling the same changes. + + Similarly, passing --rev to "hg clone" will fail to use any + hardlinks, falling back to a complete copy of the source + repository. + + This command lets you recreate those hardlinks and reclaim that + wasted space. + + This repository will be relinked to share space with ORIGIN, which + must be on the same local disk. If ORIGIN is omitted, looks for + "default-relink", then "default", in [paths]. + + Do not attempt any read operations on this repository while the + command is running. (Both repositories will be locked against + writes.) + """ + if not hasattr(util, 'samefile') or not hasattr(util, 'samedevice'): + raise util.Abort(_('hardlinks are not supported on this system')) + src = hg.repository( + hg.remoteui(repo, opts), + ui.expandpath(origin or 'default-relink', origin or 'default')) + if not src.local(): + raise util.Abort('must specify local origin repository') + ui.status(_('relinking %s to %s\n') % (src.store.path, repo.store.path)) + locallock = repo.lock() + try: + remotelock = src.lock() + try: + candidates = sorted(collect(src, ui)) + targets = prune(candidates, src.store.path, repo.store.path, ui) + do_relink(src.store.path, repo.store.path, targets, ui) + finally: + remotelock.release() + finally: + locallock.release() + +def collect(src, ui): + seplen = len(os.path.sep) + candidates = [] + live = len(src['tip'].manifest()) + # Your average repository has some files which were deleted before + # the tip revision. We account for that by assuming that there are + # 3 tracked files for every 2 live files as of the tip version of + # the repository. + # + # mozilla-central as of 2010-06-10 had a ratio of just over 7:5. + total = live * 3 // 2 + src = src.store.path + pos = 0 + ui.status(_("tip has %d files, estimated total number of files: %s\n") + % (live, total)) + for dirpath, dirnames, filenames in os.walk(src): + dirnames.sort() + relpath = dirpath[len(src) + seplen:] + for filename in sorted(filenames): + if not filename[-2:] in ('.d', '.i'): + continue + st = os.stat(os.path.join(dirpath, filename)) + if not stat.S_ISREG(st.st_mode): + continue + pos += 1 + candidates.append((os.path.join(relpath, filename), st)) + ui.progress(_('collecting'), pos, filename, _('files'), total) + + ui.progress(_('collecting'), None) + ui.status(_('collected %d candidate storage files\n') % len(candidates)) + return candidates + +def prune(candidates, src, dst, ui): + def linkfilter(src, dst, st): + try: + ts = os.stat(dst) + except OSError: + # Destination doesn't have this file? + return False + if util.samefile(src, dst): + return False + if not util.samedevice(src, dst): + # No point in continuing + raise util.Abort( + _('source and destination are on different devices')) + if st.st_size != ts.st_size: + return False + return st + + targets = [] + total = len(candidates) + pos = 0 + for fn, st in candidates: + pos += 1 + srcpath = os.path.join(src, fn) + tgt = os.path.join(dst, fn) + ts = linkfilter(srcpath, tgt, st) + if not ts: + ui.debug(_('not linkable: %s\n') % fn) + continue + targets.append((fn, ts.st_size)) + ui.progress(_('pruning'), pos, fn, _('files'), total) + + ui.progress(_('pruning'), None) + ui.status(_('pruned down to %d probably relinkable files\n') % len(targets)) + return targets + +def do_relink(src, dst, files, ui): + def relinkfile(src, dst): + bak = dst + '.bak' + os.rename(dst, bak) + try: + util.os_link(src, dst) + except OSError: + os.rename(bak, dst) + raise + os.remove(bak) + + CHUNKLEN = 65536 + relinked = 0 + savedbytes = 0 + + pos = 0 + total = len(files) + for f, sz in files: + pos += 1 + source = os.path.join(src, f) + tgt = os.path.join(dst, f) + # Binary mode, so that read() works correctly, especially on Windows + sfp = file(source, 'rb') + dfp = file(tgt, 'rb') + sin = sfp.read(CHUNKLEN) + while sin: + din = dfp.read(CHUNKLEN) + if sin != din: + break + sin = sfp.read(CHUNKLEN) + sfp.close() + dfp.close() + if sin: + ui.debug(_('not linkable: %s\n') % f) + continue + try: + relinkfile(source, tgt) + ui.progress(_('relinking'), pos, f, _('files'), total) + relinked += 1 + savedbytes += sz + except OSError, inst: + ui.warn('%s: %s\n' % (tgt, str(inst))) + + ui.progress(_('relinking'), None) + + ui.status(_('relinked %d files (%d bytes reclaimed)\n') % + (relinked, savedbytes)) + +cmdtable = { + 'relink': ( + relink, + [], + _('[ORIGIN]') + ) +} diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/relink.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/relink.pyo Binary files differnew file mode 100644 index 0000000..61565b1 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/relink.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/schemes.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/schemes.py new file mode 100644 index 0000000..ac9a8be --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/schemes.py @@ -0,0 +1,84 @@ +# Copyright 2009, Alexander Solovyov <piranha@piranha.org.ua> +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +"""extend schemes with shortcuts to repository swarms + +This extension allows you to specify shortcuts for parent URLs with a +lot of repositories to act like a scheme, for example:: + + [schemes] + py = http://code.python.org/hg/ + +After that you can use it like:: + + hg clone py://trunk/ + +Additionally there is support for some more complex schemas, for +example used by Google Code:: + + [schemes] + gcode = http://{1}.googlecode.com/hg/ + +The syntax is taken from Mercurial templates, and you have unlimited +number of variables, starting with ``{1}`` and continuing with +``{2}``, ``{3}`` and so on. This variables will receive parts of URL +supplied, split by ``/``. Anything not specified as ``{part}`` will be +just appended to an URL. + +For convenience, the extension adds these schemes by default:: + + [schemes] + py = http://hg.python.org/ + bb = https://bitbucket.org/ + bb+ssh = ssh://hg@bitbucket.org/ + gcode = https://{1}.googlecode.com/hg/ + kiln = https://{1}.kilnhg.com/Repo/ + +You can override a predefined scheme by defining a new scheme with the +same name. +""" + +import re +from mercurial import hg, templater + + +class ShortRepository(object): + def __init__(self, url, scheme, templater): + self.scheme = scheme + self.templater = templater + self.url = url + try: + self.parts = max(map(int, re.findall(r'\{(\d+)\}', self.url))) + except ValueError: + self.parts = 0 + + def __repr__(self): + return '<ShortRepository: %s>' % self.scheme + + def instance(self, ui, url, create): + url = url.split('://', 1)[1] + parts = url.split('/', self.parts) + if len(parts) > self.parts: + tail = parts[-1] + parts = parts[:-1] + else: + tail = '' + context = dict((str(i + 1), v) for i, v in enumerate(parts)) + url = ''.join(self.templater.process(self.url, context)) + tail + return hg._lookup(url).instance(ui, url, create) + +schemes = { + 'py': 'http://hg.python.org/', + 'bb': 'https://bitbucket.org/', + 'bb+ssh': 'ssh://hg@bitbucket.org/', + 'gcode': 'https://{1}.googlecode.com/hg/', + 'kiln': 'https://{1}.kilnhg.com/Repo/' + } + +def extsetup(ui): + schemes.update(dict(ui.configitems('schemes'))) + t = templater.engine(lambda x: x) + for scheme, url in schemes.items(): + hg.schemes[scheme] = ShortRepository(url, scheme, t) diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/schemes.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/schemes.pyo Binary files differnew file mode 100644 index 0000000..a089ec3 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/schemes.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/share.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/share.py new file mode 100644 index 0000000..cc33148 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/share.py @@ -0,0 +1,38 @@ +# Copyright 2006, 2007 Matt Mackall <mpm@selenic.com> +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +'''share a common history between several working directories''' + +from mercurial.i18n import _ +from mercurial import hg, commands + +def share(ui, source, dest=None, noupdate=False): + """create a new shared repository + + Initialize a new repository and working directory that shares its + history with another repository. + + .. note:: + using rollback or extensions that destroy/modify history (mq, + rebase, etc.) can cause considerable confusion with shared + clones. In particular, if two shared clones are both updated to + the same changeset, and one of them destroys that changeset + with rollback, the other clone will suddenly stop working: all + operations will fail with "abort: working directory has unknown + parent". The only known workaround is to use debugsetparents on + the broken clone to reset it to a changeset that still exists + (e.g. tip). + """ + + return hg.share(ui, source, dest, not noupdate) + +cmdtable = { + "share": + (share, + [('U', 'noupdate', None, _('do not create a working copy'))], + _('[-U] SOURCE [DEST]')), +} + +commands.norepo += " share" diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/share.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/share.pyo Binary files differnew file mode 100644 index 0000000..f4fca90 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/share.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/transplant.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/transplant.py new file mode 100644 index 0000000..4325d9b --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/transplant.py @@ -0,0 +1,630 @@ +# Patch transplanting extension for Mercurial +# +# Copyright 2006, 2007 Brendan Cully <brendan@kublai.com> +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +'''command to transplant changesets from another branch + +This extension allows you to transplant patches from another branch. + +Transplanted patches are recorded in .hg/transplant/transplants, as a +map from a changeset hash to its hash in the source repository. +''' + +from mercurial.i18n import _ +import os, tempfile +from mercurial import bundlerepo, cmdutil, hg, merge, match +from mercurial import patch, revlog, util, error +from mercurial import revset + +class transplantentry(object): + def __init__(self, lnode, rnode): + self.lnode = lnode + self.rnode = rnode + +class transplants(object): + def __init__(self, path=None, transplantfile=None, opener=None): + self.path = path + self.transplantfile = transplantfile + self.opener = opener + + if not opener: + self.opener = util.opener(self.path) + self.transplants = {} + self.dirty = False + self.read() + + def read(self): + abspath = os.path.join(self.path, self.transplantfile) + if self.transplantfile and os.path.exists(abspath): + for line in self.opener(self.transplantfile).read().splitlines(): + lnode, rnode = map(revlog.bin, line.split(':')) + list = self.transplants.setdefault(rnode, []) + list.append(transplantentry(lnode, rnode)) + + def write(self): + if self.dirty and self.transplantfile: + if not os.path.isdir(self.path): + os.mkdir(self.path) + fp = self.opener(self.transplantfile, 'w') + for list in self.transplants.itervalues(): + for t in list: + l, r = map(revlog.hex, (t.lnode, t.rnode)) + fp.write(l + ':' + r + '\n') + fp.close() + self.dirty = False + + def get(self, rnode): + return self.transplants.get(rnode) or [] + + def set(self, lnode, rnode): + list = self.transplants.setdefault(rnode, []) + list.append(transplantentry(lnode, rnode)) + self.dirty = True + + def remove(self, transplant): + list = self.transplants.get(transplant.rnode) + if list: + del list[list.index(transplant)] + self.dirty = True + +class transplanter(object): + def __init__(self, ui, repo): + self.ui = ui + self.path = repo.join('transplant') + self.opener = util.opener(self.path) + self.transplants = transplants(self.path, 'transplants', + opener=self.opener) + + def applied(self, repo, node, parent): + '''returns True if a node is already an ancestor of parent + or has already been transplanted''' + if hasnode(repo, node): + if node in repo.changelog.reachable(parent, stop=node): + return True + for t in self.transplants.get(node): + # it might have been stripped + if not hasnode(repo, t.lnode): + self.transplants.remove(t) + return False + if t.lnode in repo.changelog.reachable(parent, stop=t.lnode): + return True + return False + + def apply(self, repo, source, revmap, merges, opts={}): + '''apply the revisions in revmap one by one in revision order''' + revs = sorted(revmap) + p1, p2 = repo.dirstate.parents() + pulls = [] + diffopts = patch.diffopts(self.ui, opts) + diffopts.git = True + + lock = wlock = None + try: + wlock = repo.wlock() + lock = repo.lock() + for rev in revs: + node = revmap[rev] + revstr = '%s:%s' % (rev, revlog.short(node)) + + if self.applied(repo, node, p1): + self.ui.warn(_('skipping already applied revision %s\n') % + revstr) + continue + + parents = source.changelog.parents(node) + if not opts.get('filter'): + # If the changeset parent is the same as the + # wdir's parent, just pull it. + if parents[0] == p1: + pulls.append(node) + p1 = node + continue + if pulls: + if source != repo: + repo.pull(source, heads=pulls) + merge.update(repo, pulls[-1], False, False, None) + p1, p2 = repo.dirstate.parents() + pulls = [] + + domerge = False + if node in merges: + # pulling all the merge revs at once would mean we + # couldn't transplant after the latest even if + # transplants before them fail. + domerge = True + if not hasnode(repo, node): + repo.pull(source, heads=[node]) + + if parents[1] != revlog.nullid: + self.ui.note(_('skipping merge changeset %s:%s\n') + % (rev, revlog.short(node))) + patchfile = None + else: + fd, patchfile = tempfile.mkstemp(prefix='hg-transplant-') + fp = os.fdopen(fd, 'w') + gen = patch.diff(source, parents[0], node, opts=diffopts) + for chunk in gen: + fp.write(chunk) + fp.close() + + del revmap[rev] + if patchfile or domerge: + try: + n = self.applyone(repo, node, + source.changelog.read(node), + patchfile, merge=domerge, + log=opts.get('log'), + filter=opts.get('filter')) + if n and domerge: + self.ui.status(_('%s merged at %s\n') % (revstr, + revlog.short(n))) + elif n: + self.ui.status(_('%s transplanted to %s\n') + % (revlog.short(node), + revlog.short(n))) + finally: + if patchfile: + os.unlink(patchfile) + if pulls: + repo.pull(source, heads=pulls) + merge.update(repo, pulls[-1], False, False, None) + finally: + self.saveseries(revmap, merges) + self.transplants.write() + lock.release() + wlock.release() + + def filter(self, filter, changelog, patchfile): + '''arbitrarily rewrite changeset before applying it''' + + self.ui.status(_('filtering %s\n') % patchfile) + user, date, msg = (changelog[1], changelog[2], changelog[4]) + + fd, headerfile = tempfile.mkstemp(prefix='hg-transplant-') + fp = os.fdopen(fd, 'w') + fp.write("# HG changeset patch\n") + fp.write("# User %s\n" % user) + fp.write("# Date %d %d\n" % date) + fp.write(msg + '\n') + fp.close() + + try: + util.system('%s %s %s' % (filter, util.shellquote(headerfile), + util.shellquote(patchfile)), + environ={'HGUSER': changelog[1]}, + onerr=util.Abort, errprefix=_('filter failed')) + user, date, msg = self.parselog(file(headerfile))[1:4] + finally: + os.unlink(headerfile) + + return (user, date, msg) + + def applyone(self, repo, node, cl, patchfile, merge=False, log=False, + filter=None): + '''apply the patch in patchfile to the repository as a transplant''' + (manifest, user, (time, timezone), files, message) = cl[:5] + date = "%d %d" % (time, timezone) + extra = {'transplant_source': node} + if filter: + (user, date, message) = self.filter(filter, cl, patchfile) + + if log: + # we don't translate messages inserted into commits + message += '\n(transplanted from %s)' % revlog.hex(node) + + self.ui.status(_('applying %s\n') % revlog.short(node)) + self.ui.note('%s %s\n%s\n' % (user, date, message)) + + if not patchfile and not merge: + raise util.Abort(_('can only omit patchfile if merging')) + if patchfile: + try: + files = {} + try: + patch.patch(patchfile, self.ui, cwd=repo.root, + files=files, eolmode=None) + if not files: + self.ui.warn(_('%s: empty changeset') + % revlog.hex(node)) + return None + finally: + files = cmdutil.updatedir(self.ui, repo, files) + except Exception, inst: + seriespath = os.path.join(self.path, 'series') + if os.path.exists(seriespath): + os.unlink(seriespath) + p1 = repo.dirstate.parents()[0] + p2 = node + self.log(user, date, message, p1, p2, merge=merge) + self.ui.write(str(inst) + '\n') + raise util.Abort(_('fix up the merge and run ' + 'hg transplant --continue')) + else: + files = None + if merge: + p1, p2 = repo.dirstate.parents() + repo.dirstate.setparents(p1, node) + m = match.always(repo.root, '') + else: + m = match.exact(repo.root, '', files) + + n = repo.commit(message, user, date, extra=extra, match=m) + if not n: + # Crash here to prevent an unclear crash later, in + # transplants.write(). This can happen if patch.patch() + # does nothing but claims success or if repo.status() fails + # to report changes done by patch.patch(). These both + # appear to be bugs in other parts of Mercurial, but dying + # here, as soon as we can detect the problem, is preferable + # to silently dropping changesets on the floor. + raise RuntimeError('nothing committed after transplant') + if not merge: + self.transplants.set(n, node) + + return n + + def resume(self, repo, source, opts=None): + '''recover last transaction and apply remaining changesets''' + if os.path.exists(os.path.join(self.path, 'journal')): + n, node = self.recover(repo) + self.ui.status(_('%s transplanted as %s\n') % (revlog.short(node), + revlog.short(n))) + seriespath = os.path.join(self.path, 'series') + if not os.path.exists(seriespath): + self.transplants.write() + return + nodes, merges = self.readseries() + revmap = {} + for n in nodes: + revmap[source.changelog.rev(n)] = n + os.unlink(seriespath) + + self.apply(repo, source, revmap, merges, opts) + + def recover(self, repo): + '''commit working directory using journal metadata''' + node, user, date, message, parents = self.readlog() + merge = len(parents) == 2 + + if not user or not date or not message or not parents[0]: + raise util.Abort(_('transplant log file is corrupt')) + + extra = {'transplant_source': node} + wlock = repo.wlock() + try: + p1, p2 = repo.dirstate.parents() + if p1 != parents[0]: + raise util.Abort( + _('working dir not at transplant parent %s') % + revlog.hex(parents[0])) + if merge: + repo.dirstate.setparents(p1, parents[1]) + n = repo.commit(message, user, date, extra=extra) + if not n: + raise util.Abort(_('commit failed')) + if not merge: + self.transplants.set(n, node) + self.unlog() + + return n, node + finally: + wlock.release() + + def readseries(self): + nodes = [] + merges = [] + cur = nodes + for line in self.opener('series').read().splitlines(): + if line.startswith('# Merges'): + cur = merges + continue + cur.append(revlog.bin(line)) + + return (nodes, merges) + + def saveseries(self, revmap, merges): + if not revmap: + return + + if not os.path.isdir(self.path): + os.mkdir(self.path) + series = self.opener('series', 'w') + for rev in sorted(revmap): + series.write(revlog.hex(revmap[rev]) + '\n') + if merges: + series.write('# Merges\n') + for m in merges: + series.write(revlog.hex(m) + '\n') + series.close() + + def parselog(self, fp): + parents = [] + message = [] + node = revlog.nullid + inmsg = False + for line in fp.read().splitlines(): + if inmsg: + message.append(line) + elif line.startswith('# User '): + user = line[7:] + elif line.startswith('# Date '): + date = line[7:] + elif line.startswith('# Node ID '): + node = revlog.bin(line[10:]) + elif line.startswith('# Parent '): + parents.append(revlog.bin(line[9:])) + elif not line.startswith('# '): + inmsg = True + message.append(line) + return (node, user, date, '\n'.join(message), parents) + + def log(self, user, date, message, p1, p2, merge=False): + '''journal changelog metadata for later recover''' + + if not os.path.isdir(self.path): + os.mkdir(self.path) + fp = self.opener('journal', 'w') + fp.write('# User %s\n' % user) + fp.write('# Date %s\n' % date) + fp.write('# Node ID %s\n' % revlog.hex(p2)) + fp.write('# Parent ' + revlog.hex(p1) + '\n') + if merge: + fp.write('# Parent ' + revlog.hex(p2) + '\n') + fp.write(message.rstrip() + '\n') + fp.close() + + def readlog(self): + return self.parselog(self.opener('journal')) + + def unlog(self): + '''remove changelog journal''' + absdst = os.path.join(self.path, 'journal') + if os.path.exists(absdst): + os.unlink(absdst) + + def transplantfilter(self, repo, source, root): + def matchfn(node): + if self.applied(repo, node, root): + return False + if source.changelog.parents(node)[1] != revlog.nullid: + return False + extra = source.changelog.read(node)[5] + cnode = extra.get('transplant_source') + if cnode and self.applied(repo, cnode, root): + return False + return True + + return matchfn + +def hasnode(repo, node): + try: + return repo.changelog.rev(node) != None + except error.RevlogError: + return False + +def browserevs(ui, repo, nodes, opts): + '''interactively transplant changesets''' + def browsehelp(ui): + ui.write(_('y: transplant this changeset\n' + 'n: skip this changeset\n' + 'm: merge at this changeset\n' + 'p: show patch\n' + 'c: commit selected changesets\n' + 'q: cancel transplant\n' + '?: show this help\n')) + + displayer = cmdutil.show_changeset(ui, repo, opts) + transplants = [] + merges = [] + for node in nodes: + displayer.show(repo[node]) + action = None + while not action: + action = ui.prompt(_('apply changeset? [ynmpcq?]:')) + if action == '?': + browsehelp(ui) + action = None + elif action == 'p': + parent = repo.changelog.parents(node)[0] + for chunk in patch.diff(repo, parent, node): + ui.write(chunk) + action = None + elif action not in ('y', 'n', 'm', 'c', 'q'): + ui.write(_('no such option\n')) + action = None + if action == 'y': + transplants.append(node) + elif action == 'm': + merges.append(node) + elif action == 'c': + break + elif action == 'q': + transplants = () + merges = () + break + displayer.close() + return (transplants, merges) + +def transplant(ui, repo, *revs, **opts): + '''transplant changesets from another branch + + Selected changesets will be applied on top of the current working + directory with the log of the original changeset. If --log is + specified, log messages will have a comment appended of the form:: + + (transplanted from CHANGESETHASH) + + You can rewrite the changelog message with the --filter option. + Its argument will be invoked with the current changelog message as + $1 and the patch as $2. + + If --source/-s is specified, selects changesets from the named + repository. If --branch/-b is specified, selects changesets from + the branch holding the named revision, up to that revision. If + --all/-a is specified, all changesets on the branch will be + transplanted, otherwise you will be prompted to select the + changesets you want. + + :hg:`transplant --branch REVISION --all` will rebase the selected + branch (up to the named revision) onto your current working + directory. + + You can optionally mark selected transplanted changesets as merge + changesets. You will not be prompted to transplant any ancestors + of a merged transplant, and you can merge descendants of them + normally instead of transplanting them. + + If no merges or revisions are provided, :hg:`transplant` will + start an interactive changeset browser. + + If a changeset application fails, you can fix the merge by hand + and then resume where you left off by calling :hg:`transplant + --continue/-c`. + ''' + def incwalk(repo, incoming, branches, match=util.always): + if not branches: + branches = None + for node in repo.changelog.nodesbetween(incoming, branches)[0]: + if match(node): + yield node + + def transplantwalk(repo, root, branches, match=util.always): + if not branches: + branches = repo.heads() + ancestors = [] + for branch in branches: + ancestors.append(repo.changelog.ancestor(root, branch)) + for node in repo.changelog.nodesbetween(ancestors, branches)[0]: + if match(node): + yield node + + def checkopts(opts, revs): + if opts.get('continue'): + if opts.get('branch') or opts.get('all') or opts.get('merge'): + raise util.Abort(_('--continue is incompatible with ' + 'branch, all or merge')) + return + if not (opts.get('source') or revs or + opts.get('merge') or opts.get('branch')): + raise util.Abort(_('no source URL, branch tag or revision ' + 'list provided')) + if opts.get('all'): + if not opts.get('branch'): + raise util.Abort(_('--all requires a branch revision')) + if revs: + raise util.Abort(_('--all is incompatible with a ' + 'revision list')) + + checkopts(opts, revs) + + if not opts.get('log'): + opts['log'] = ui.config('transplant', 'log') + if not opts.get('filter'): + opts['filter'] = ui.config('transplant', 'filter') + + tp = transplanter(ui, repo) + + p1, p2 = repo.dirstate.parents() + if len(repo) > 0 and p1 == revlog.nullid: + raise util.Abort(_('no revision checked out')) + if not opts.get('continue'): + if p2 != revlog.nullid: + raise util.Abort(_('outstanding uncommitted merges')) + m, a, r, d = repo.status()[:4] + if m or a or r or d: + raise util.Abort(_('outstanding local changes')) + + bundle = None + source = opts.get('source') + if source: + sourcerepo = ui.expandpath(source) + source = hg.repository(ui, sourcerepo) + source, incoming, bundle = bundlerepo.getremotechanges(ui, repo, source, + force=True) + else: + source = repo + + try: + if opts.get('continue'): + tp.resume(repo, source, opts) + return + + tf = tp.transplantfilter(repo, source, p1) + if opts.get('prune'): + prune = [source.lookup(r) + for r in cmdutil.revrange(source, opts.get('prune'))] + matchfn = lambda x: tf(x) and x not in prune + else: + matchfn = tf + branches = map(source.lookup, opts.get('branch', ())) + merges = map(source.lookup, opts.get('merge', ())) + revmap = {} + if revs: + for r in cmdutil.revrange(source, revs): + revmap[int(r)] = source.lookup(r) + elif opts.get('all') or not merges: + if source != repo: + alltransplants = incwalk(source, incoming, branches, + match=matchfn) + else: + alltransplants = transplantwalk(source, p1, branches, + match=matchfn) + if opts.get('all'): + revs = alltransplants + else: + revs, newmerges = browserevs(ui, source, alltransplants, opts) + merges.extend(newmerges) + for r in revs: + revmap[source.changelog.rev(r)] = r + for r in merges: + revmap[source.changelog.rev(r)] = r + + tp.apply(repo, source, revmap, merges, opts) + finally: + if bundle: + source.close() + os.unlink(bundle) + +def revsettransplanted(repo, subset, x): + """``transplanted(set)`` + Transplanted changesets in set. + """ + if x: + s = revset.getset(repo, subset, x) + else: + s = subset + cs = set() + for r in xrange(0, len(repo)): + if repo[r].extra().get('transplant_source'): + cs.add(r) + return [r for r in s if r in cs] + +def extsetup(ui): + revset.symbols['transplanted'] = revsettransplanted + +cmdtable = { + "transplant": + (transplant, + [('s', 'source', '', + _('pull patches from REPO'), _('REPO')), + ('b', 'branch', [], + _('pull patches from branch BRANCH'), _('BRANCH')), + ('a', 'all', None, _('pull all changesets up to BRANCH')), + ('p', 'prune', [], + _('skip over REV'), _('REV')), + ('m', 'merge', [], + _('merge at REV'), _('REV')), + ('', 'log', None, _('append transplant info to log message')), + ('c', 'continue', None, _('continue last transplant session ' + 'after repair')), + ('', 'filter', '', + _('filter changesets through command'), _('CMD'))], + _('hg transplant [-s REPO] [-b BRANCH [-a]] [-p REV] ' + '[-m REV] [REV]...')) +} + +# tell hggettext to extract docstrings from these functions: +i18nfunctions = [revsettransplanted] diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/transplant.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/transplant.pyo Binary files differnew file mode 100644 index 0000000..a41b8f0 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/transplant.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/win32mbcs.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/win32mbcs.py new file mode 100644 index 0000000..f83c43e --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/win32mbcs.py @@ -0,0 +1,159 @@ +# win32mbcs.py -- MBCS filename support for Mercurial +# +# Copyright (c) 2008 Shun-ichi Goto <shunichi.goto@gmail.com> +# +# Version: 0.3 +# Author: Shun-ichi Goto <shunichi.goto@gmail.com> +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. +# + +'''allow the use of MBCS paths with problematic encodings + +Some MBCS encodings are not good for some path operations (i.e. +splitting path, case conversion, etc.) with its encoded bytes. We call +such a encoding (i.e. shift_jis and big5) as "problematic encoding". +This extension can be used to fix the issue with those encodings by +wrapping some functions to convert to Unicode string before path +operation. + +This extension is useful for: + +- Japanese Windows users using shift_jis encoding. +- Chinese Windows users using big5 encoding. +- All users who use a repository with one of problematic encodings on + case-insensitive file system. + +This extension is not needed for: + +- Any user who use only ASCII chars in path. +- Any user who do not use any of problematic encodings. + +Note that there are some limitations on using this extension: + +- You should use single encoding in one repository. +- If the repository path ends with 0x5c, .hg/hgrc cannot be read. +- win32mbcs is not compatible with fixutf8 extention. + +By default, win32mbcs uses encoding.encoding decided by Mercurial. +You can specify the encoding by config option:: + + [win32mbcs] + encoding = sjis + +It is useful for the users who want to commit with UTF-8 log message. +''' + +import os, sys +from mercurial.i18n import _ +from mercurial import util, encoding + +_encoding = None # see extsetup + +def decode(arg): + if isinstance(arg, str): + uarg = arg.decode(_encoding) + if arg == uarg.encode(_encoding): + return uarg + raise UnicodeError("Not local encoding") + elif isinstance(arg, tuple): + return tuple(map(decode, arg)) + elif isinstance(arg, list): + return map(decode, arg) + elif isinstance(arg, dict): + for k, v in arg.items(): + arg[k] = decode(v) + return arg + +def encode(arg): + if isinstance(arg, unicode): + return arg.encode(_encoding) + elif isinstance(arg, tuple): + return tuple(map(encode, arg)) + elif isinstance(arg, list): + return map(encode, arg) + elif isinstance(arg, dict): + for k, v in arg.items(): + arg[k] = encode(v) + return arg + +def appendsep(s): + # ensure the path ends with os.sep, appending it if necessary. + try: + us = decode(s) + except UnicodeError: + us = s + if us and us[-1] not in ':/\\': + s += os.sep + return s + +def wrapper(func, args, kwds): + # check argument is unicode, then call original + for arg in args: + if isinstance(arg, unicode): + return func(*args, **kwds) + + try: + # convert arguments to unicode, call func, then convert back + return encode(func(*decode(args), **decode(kwds))) + except UnicodeError: + raise util.Abort(_("[win32mbcs] filename conversion failed with" + " %s encoding\n") % (_encoding)) + +def wrapperforlistdir(func, args, kwds): + # Ensure 'path' argument ends with os.sep to avoids + # misinterpreting last 0x5c of MBCS 2nd byte as path separator. + if args: + args = list(args) + args[0] = appendsep(args[0]) + if 'path' in kwds: + kwds['path'] = appendsep(kwds['path']) + return func(*args, **kwds) + +def wrapname(name, wrapper): + module, name = name.rsplit('.', 1) + module = sys.modules[module] + func = getattr(module, name) + def f(*args, **kwds): + return wrapper(func, args, kwds) + try: + f.__name__ = func.__name__ # fail with python23 + except Exception: + pass + setattr(module, name, f) + +# List of functions to be wrapped. +# NOTE: os.path.dirname() and os.path.basename() are safe because +# they use result of os.path.split() +funcs = '''os.path.join os.path.split os.path.splitext + os.path.splitunc os.path.normpath os.path.normcase os.makedirs + mercurial.util.endswithsep mercurial.util.splitpath mercurial.util.checkcase + mercurial.util.fspath mercurial.util.pconvert mercurial.util.normpath''' + +# codec and alias names of sjis and big5 to be faked. +problematic_encodings = '''big5 big5-tw csbig5 big5hkscs big5-hkscs + hkscs cp932 932 ms932 mskanji ms-kanji shift_jis csshiftjis shiftjis + sjis s_jis shift_jis_2004 shiftjis2004 sjis_2004 sjis2004 + shift_jisx0213 shiftjisx0213 sjisx0213 s_jisx0213 950 cp950 ms950 ''' + +def extsetup(ui): + # TODO: decide use of config section for this extension + if not os.path.supports_unicode_filenames: + ui.warn(_("[win32mbcs] cannot activate on this platform.\n")) + return + # determine encoding for filename + global _encoding + _encoding = ui.config('win32mbcs', 'encoding', encoding.encoding) + # fake is only for relevant environment. + if _encoding.lower() in problematic_encodings.split(): + for f in funcs.split(): + wrapname(f, wrapper) + wrapname("mercurial.osutil.listdir", wrapperforlistdir) + # Check sys.args manually instead of using ui.debug() because + # command line options is not yet applied when + # extensions.loadall() is called. + if '--debug' in sys.argv: + ui.write("[win32mbcs] activated with encoding: %s\n" + % _encoding) + diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/win32mbcs.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/win32mbcs.pyo Binary files differnew file mode 100644 index 0000000..43fac41 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/win32mbcs.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/win32text.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/win32text.py new file mode 100644 index 0000000..82e6aed --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/win32text.py @@ -0,0 +1,170 @@ +# win32text.py - LF <-> CRLF/CR translation utilities for Windows/Mac users +# +# Copyright 2005, 2007-2009 Matt Mackall <mpm@selenic.com> and others +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +'''perform automatic newline conversion + + Deprecation: The win32text extension requires each user to configure + the extension again and again for each clone since the configuration + is not copied when cloning. + + We have therefore made the ``eol`` as an alternative. The ``eol`` + uses a version controlled file for its configuration and each clone + will therefore use the right settings from the start. + +To perform automatic newline conversion, use:: + + [extensions] + win32text = + [encode] + ** = cleverencode: + # or ** = macencode: + + [decode] + ** = cleverdecode: + # or ** = macdecode: + +If not doing conversion, to make sure you do not commit CRLF/CR by accident:: + + [hooks] + pretxncommit.crlf = python:hgext.win32text.forbidcrlf + # or pretxncommit.cr = python:hgext.win32text.forbidcr + +To do the same check on a server to prevent CRLF/CR from being +pushed or pulled:: + + [hooks] + pretxnchangegroup.crlf = python:hgext.win32text.forbidcrlf + # or pretxnchangegroup.cr = python:hgext.win32text.forbidcr +''' + +from mercurial.i18n import _ +from mercurial.node import short +from mercurial import util +import re + +# regexp for single LF without CR preceding. +re_single_lf = re.compile('(^|[^\r])\n', re.MULTILINE) + +newlinestr = {'\r\n': 'CRLF', '\r': 'CR'} +filterstr = {'\r\n': 'clever', '\r': 'mac'} + +def checknewline(s, newline, ui=None, repo=None, filename=None): + # warn if already has 'newline' in repository. + # it might cause unexpected eol conversion. + # see issue 302: + # http://mercurial.selenic.com/bts/issue302 + if newline in s and ui and filename and repo: + ui.warn(_('WARNING: %s already has %s line endings\n' + 'and does not need EOL conversion by the win32text plugin.\n' + 'Before your next commit, please reconsider your ' + 'encode/decode settings in \nMercurial.ini or %s.\n') % + (filename, newlinestr[newline], repo.join('hgrc'))) + +def dumbdecode(s, cmd, **kwargs): + checknewline(s, '\r\n', **kwargs) + # replace single LF to CRLF + return re_single_lf.sub('\\1\r\n', s) + +def dumbencode(s, cmd): + return s.replace('\r\n', '\n') + +def macdumbdecode(s, cmd, **kwargs): + checknewline(s, '\r', **kwargs) + return s.replace('\n', '\r') + +def macdumbencode(s, cmd): + return s.replace('\r', '\n') + +def cleverdecode(s, cmd, **kwargs): + if not util.binary(s): + return dumbdecode(s, cmd, **kwargs) + return s + +def cleverencode(s, cmd): + if not util.binary(s): + return dumbencode(s, cmd) + return s + +def macdecode(s, cmd, **kwargs): + if not util.binary(s): + return macdumbdecode(s, cmd, **kwargs) + return s + +def macencode(s, cmd): + if not util.binary(s): + return macdumbencode(s, cmd) + return s + +_filters = { + 'dumbdecode:': dumbdecode, + 'dumbencode:': dumbencode, + 'cleverdecode:': cleverdecode, + 'cleverencode:': cleverencode, + 'macdumbdecode:': macdumbdecode, + 'macdumbencode:': macdumbencode, + 'macdecode:': macdecode, + 'macencode:': macencode, + } + +def forbidnewline(ui, repo, hooktype, node, newline, **kwargs): + halt = False + seen = set() + # we try to walk changesets in reverse order from newest to + # oldest, so that if we see a file multiple times, we take the + # newest version as canonical. this prevents us from blocking a + # changegroup that contains an unacceptable commit followed later + # by a commit that fixes the problem. + tip = repo['tip'] + for rev in xrange(len(repo)-1, repo[node].rev()-1, -1): + c = repo[rev] + for f in c.files(): + if f in seen or f not in tip or f not in c: + continue + seen.add(f) + data = c[f].data() + if not util.binary(data) and newline in data: + if not halt: + ui.warn(_('Attempt to commit or push text file(s) ' + 'using %s line endings\n') % + newlinestr[newline]) + ui.warn(_('in %s: %s\n') % (short(c.node()), f)) + halt = True + if halt and hooktype == 'pretxnchangegroup': + crlf = newlinestr[newline].lower() + filter = filterstr[newline] + ui.warn(_('\nTo prevent this mistake in your local repository,\n' + 'add to Mercurial.ini or .hg/hgrc:\n' + '\n' + '[hooks]\n' + 'pretxncommit.%s = python:hgext.win32text.forbid%s\n' + '\n' + 'and also consider adding:\n' + '\n' + '[extensions]\n' + 'win32text =\n' + '[encode]\n' + '** = %sencode:\n' + '[decode]\n' + '** = %sdecode:\n') % (crlf, crlf, filter, filter)) + return halt + +def forbidcrlf(ui, repo, hooktype, node, **kwargs): + return forbidnewline(ui, repo, hooktype, node, '\r\n', **kwargs) + +def forbidcr(ui, repo, hooktype, node, **kwargs): + return forbidnewline(ui, repo, hooktype, node, '\r', **kwargs) + +def reposetup(ui, repo): + if not repo.local(): + return + for name, fn in _filters.iteritems(): + repo.adddatafilter(name, fn) + +def extsetup(ui): + if ui.configbool('win32text', 'warn', True): + ui.warn(_("win32text is deprecated: " + "http://mercurial.selenic.com/wiki/Win32TextExtension\n")) diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/win32text.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/win32text.pyo Binary files differnew file mode 100644 index 0000000..fe9072d --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/win32text.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/zeroconf/Zeroconf.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/zeroconf/Zeroconf.py new file mode 100644 index 0000000..acca5c2 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/zeroconf/Zeroconf.py @@ -0,0 +1,1582 @@ +""" Multicast DNS Service Discovery for Python, v0.12 + Copyright (C) 2003, Paul Scott-Murphy + + This module provides a framework for the use of DNS Service Discovery + using IP multicast. It has been tested against the JRendezvous + implementation from <a href="http://strangeberry.com">StrangeBerry</a>, + and against the mDNSResponder from Mac OS X 10.3.8. + + This library is free software; you can redistribute it and/or + modify it under the terms of the GNU Lesser General Public + License as published by the Free Software Foundation; either + version 2.1 of the License, or (at your option) any later version. + + This library is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + Lesser General Public License for more details. + + You should have received a copy of the GNU Lesser General Public + License along with this library; if not, write to the Free Software + Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA + +""" + +"""0.12 update - allow selection of binding interface + typo fix - Thanks A. M. Kuchlingi + removed all use of word 'Rendezvous' - this is an API change""" + +"""0.11 update - correction to comments for addListener method + support for new record types seen from OS X + - IPv6 address + - hostinfo + ignore unknown DNS record types + fixes to name decoding + works alongside other processes using port 5353 (e.g. on Mac OS X) + tested against Mac OS X 10.3.2's mDNSResponder + corrections to removal of list entries for service browser""" + +"""0.10 update - Jonathon Paisley contributed these corrections: + always multicast replies, even when query is unicast + correct a pointer encoding problem + can now write records in any order + traceback shown on failure + better TXT record parsing + server is now separate from name + can cancel a service browser + + modified some unit tests to accommodate these changes""" + +"""0.09 update - remove all records on service unregistration + fix DOS security problem with readName""" + +"""0.08 update - changed licensing to LGPL""" + +"""0.07 update - faster shutdown on engine + pointer encoding of outgoing names + ServiceBrowser now works + new unit tests""" + +"""0.06 update - small improvements with unit tests + added defined exception types + new style objects + fixed hostname/interface problem + fixed socket timeout problem + fixed addServiceListener() typo bug + using select() for socket reads + tested on Debian unstable with Python 2.2.2""" + +"""0.05 update - ensure case insensitivty on domain names + support for unicast DNS queries""" + +"""0.04 update - added some unit tests + added __ne__ adjuncts where required + ensure names end in '.local.' + timeout on receiving socket for clean shutdown""" + +__author__ = "Paul Scott-Murphy" +__email__ = "paul at scott dash murphy dot com" +__version__ = "0.12" + +import string +import time +import struct +import socket +import threading +import select +import traceback + +__all__ = ["Zeroconf", "ServiceInfo", "ServiceBrowser"] + +# hook for threads + +globals()['_GLOBAL_DONE'] = 0 + +# Some timing constants + +_UNREGISTER_TIME = 125 +_CHECK_TIME = 175 +_REGISTER_TIME = 225 +_LISTENER_TIME = 200 +_BROWSER_TIME = 500 + +# Some DNS constants + +_MDNS_ADDR = '224.0.0.251' +_MDNS_PORT = 5353; +_DNS_PORT = 53; +_DNS_TTL = 60 * 60; # one hour default TTL + +_MAX_MSG_TYPICAL = 1460 # unused +_MAX_MSG_ABSOLUTE = 8972 + +_FLAGS_QR_MASK = 0x8000 # query response mask +_FLAGS_QR_QUERY = 0x0000 # query +_FLAGS_QR_RESPONSE = 0x8000 # response + +_FLAGS_AA = 0x0400 # Authorative answer +_FLAGS_TC = 0x0200 # Truncated +_FLAGS_RD = 0x0100 # Recursion desired +_FLAGS_RA = 0x8000 # Recursion available + +_FLAGS_Z = 0x0040 # Zero +_FLAGS_AD = 0x0020 # Authentic data +_FLAGS_CD = 0x0010 # Checking disabled + +_CLASS_IN = 1 +_CLASS_CS = 2 +_CLASS_CH = 3 +_CLASS_HS = 4 +_CLASS_NONE = 254 +_CLASS_ANY = 255 +_CLASS_MASK = 0x7FFF +_CLASS_UNIQUE = 0x8000 + +_TYPE_A = 1 +_TYPE_NS = 2 +_TYPE_MD = 3 +_TYPE_MF = 4 +_TYPE_CNAME = 5 +_TYPE_SOA = 6 +_TYPE_MB = 7 +_TYPE_MG = 8 +_TYPE_MR = 9 +_TYPE_NULL = 10 +_TYPE_WKS = 11 +_TYPE_PTR = 12 +_TYPE_HINFO = 13 +_TYPE_MINFO = 14 +_TYPE_MX = 15 +_TYPE_TXT = 16 +_TYPE_AAAA = 28 +_TYPE_SRV = 33 +_TYPE_ANY = 255 + +# Mapping constants to names + +_CLASSES = { _CLASS_IN : "in", + _CLASS_CS : "cs", + _CLASS_CH : "ch", + _CLASS_HS : "hs", + _CLASS_NONE : "none", + _CLASS_ANY : "any" } + +_TYPES = { _TYPE_A : "a", + _TYPE_NS : "ns", + _TYPE_MD : "md", + _TYPE_MF : "mf", + _TYPE_CNAME : "cname", + _TYPE_SOA : "soa", + _TYPE_MB : "mb", + _TYPE_MG : "mg", + _TYPE_MR : "mr", + _TYPE_NULL : "null", + _TYPE_WKS : "wks", + _TYPE_PTR : "ptr", + _TYPE_HINFO : "hinfo", + _TYPE_MINFO : "minfo", + _TYPE_MX : "mx", + _TYPE_TXT : "txt", + _TYPE_AAAA : "quada", + _TYPE_SRV : "srv", + _TYPE_ANY : "any" } + +# utility functions + +def currentTimeMillis(): + """Current system time in milliseconds""" + return time.time() * 1000 + +# Exceptions + +class NonLocalNameException(Exception): + pass + +class NonUniqueNameException(Exception): + pass + +class NamePartTooLongException(Exception): + pass + +class AbstractMethodException(Exception): + pass + +class BadTypeInNameException(Exception): + pass + +class BadDomainName(Exception): + def __init__(self, pos): + Exception.__init__(self, "at position %s" % pos) + +class BadDomainNameCircular(BadDomainName): + pass + +# implementation classes + +class DNSEntry(object): + """A DNS entry""" + + def __init__(self, name, type, clazz): + self.key = string.lower(name) + self.name = name + self.type = type + self.clazz = clazz & _CLASS_MASK + self.unique = (clazz & _CLASS_UNIQUE) != 0 + + def __eq__(self, other): + """Equality test on name, type, and class""" + if isinstance(other, DNSEntry): + return self.name == other.name and self.type == other.type and self.clazz == other.clazz + return 0 + + def __ne__(self, other): + """Non-equality test""" + return not self.__eq__(other) + + def getClazz(self, clazz): + """Class accessor""" + try: + return _CLASSES[clazz] + except: + return "?(%s)" % (clazz) + + def getType(self, type): + """Type accessor""" + try: + return _TYPES[type] + except: + return "?(%s)" % (type) + + def toString(self, hdr, other): + """String representation with additional information""" + result = "%s[%s,%s" % (hdr, self.getType(self.type), self.getClazz(self.clazz)) + if self.unique: + result += "-unique," + else: + result += "," + result += self.name + if other is not None: + result += ",%s]" % (other) + else: + result += "]" + return result + +class DNSQuestion(DNSEntry): + """A DNS question entry""" + + def __init__(self, name, type, clazz): + if not name.endswith(".local."): + raise NonLocalNameException(name) + DNSEntry.__init__(self, name, type, clazz) + + def answeredBy(self, rec): + """Returns true if the question is answered by the record""" + return self.clazz == rec.clazz and (self.type == rec.type or self.type == _TYPE_ANY) and self.name == rec.name + + def __repr__(self): + """String representation""" + return DNSEntry.toString(self, "question", None) + + +class DNSRecord(DNSEntry): + """A DNS record - like a DNS entry, but has a TTL""" + + def __init__(self, name, type, clazz, ttl): + DNSEntry.__init__(self, name, type, clazz) + self.ttl = ttl + self.created = currentTimeMillis() + + def __eq__(self, other): + """Tests equality as per DNSRecord""" + if isinstance(other, DNSRecord): + return DNSEntry.__eq__(self, other) + return 0 + + def suppressedBy(self, msg): + """Returns true if any answer in a message can suffice for the + information held in this record.""" + for record in msg.answers: + if self.suppressedByAnswer(record): + return 1 + return 0 + + def suppressedByAnswer(self, other): + """Returns true if another record has same name, type and class, + and if its TTL is at least half of this record's.""" + if self == other and other.ttl > (self.ttl / 2): + return 1 + return 0 + + def getExpirationTime(self, percent): + """Returns the time at which this record will have expired + by a certain percentage.""" + return self.created + (percent * self.ttl * 10) + + def getRemainingTTL(self, now): + """Returns the remaining TTL in seconds.""" + return max(0, (self.getExpirationTime(100) - now) / 1000) + + def isExpired(self, now): + """Returns true if this record has expired.""" + return self.getExpirationTime(100) <= now + + def isStale(self, now): + """Returns true if this record is at least half way expired.""" + return self.getExpirationTime(50) <= now + + def resetTTL(self, other): + """Sets this record's TTL and created time to that of + another record.""" + self.created = other.created + self.ttl = other.ttl + + def write(self, out): + """Abstract method""" + raise AbstractMethodException + + def toString(self, other): + """String representation with addtional information""" + arg = "%s/%s,%s" % (self.ttl, self.getRemainingTTL(currentTimeMillis()), other) + return DNSEntry.toString(self, "record", arg) + +class DNSAddress(DNSRecord): + """A DNS address record""" + + def __init__(self, name, type, clazz, ttl, address): + DNSRecord.__init__(self, name, type, clazz, ttl) + self.address = address + + def write(self, out): + """Used in constructing an outgoing packet""" + out.writeString(self.address, len(self.address)) + + def __eq__(self, other): + """Tests equality on address""" + if isinstance(other, DNSAddress): + return self.address == other.address + return 0 + + def __repr__(self): + """String representation""" + try: + return socket.inet_ntoa(self.address) + except: + return self.address + +class DNSHinfo(DNSRecord): + """A DNS host information record""" + + def __init__(self, name, type, clazz, ttl, cpu, os): + DNSRecord.__init__(self, name, type, clazz, ttl) + self.cpu = cpu + self.os = os + + def write(self, out): + """Used in constructing an outgoing packet""" + out.writeString(self.cpu, len(self.cpu)) + out.writeString(self.os, len(self.os)) + + def __eq__(self, other): + """Tests equality on cpu and os""" + if isinstance(other, DNSHinfo): + return self.cpu == other.cpu and self.os == other.os + return 0 + + def __repr__(self): + """String representation""" + return self.cpu + " " + self.os + +class DNSPointer(DNSRecord): + """A DNS pointer record""" + + def __init__(self, name, type, clazz, ttl, alias): + DNSRecord.__init__(self, name, type, clazz, ttl) + self.alias = alias + + def write(self, out): + """Used in constructing an outgoing packet""" + out.writeName(self.alias) + + def __eq__(self, other): + """Tests equality on alias""" + if isinstance(other, DNSPointer): + return self.alias == other.alias + return 0 + + def __repr__(self): + """String representation""" + return self.toString(self.alias) + +class DNSText(DNSRecord): + """A DNS text record""" + + def __init__(self, name, type, clazz, ttl, text): + DNSRecord.__init__(self, name, type, clazz, ttl) + self.text = text + + def write(self, out): + """Used in constructing an outgoing packet""" + out.writeString(self.text, len(self.text)) + + def __eq__(self, other): + """Tests equality on text""" + if isinstance(other, DNSText): + return self.text == other.text + return 0 + + def __repr__(self): + """String representation""" + if len(self.text) > 10: + return self.toString(self.text[:7] + "...") + else: + return self.toString(self.text) + +class DNSService(DNSRecord): + """A DNS service record""" + + def __init__(self, name, type, clazz, ttl, priority, weight, port, server): + DNSRecord.__init__(self, name, type, clazz, ttl) + self.priority = priority + self.weight = weight + self.port = port + self.server = server + + def write(self, out): + """Used in constructing an outgoing packet""" + out.writeShort(self.priority) + out.writeShort(self.weight) + out.writeShort(self.port) + out.writeName(self.server) + + def __eq__(self, other): + """Tests equality on priority, weight, port and server""" + if isinstance(other, DNSService): + return self.priority == other.priority and self.weight == other.weight and self.port == other.port and self.server == other.server + return 0 + + def __repr__(self): + """String representation""" + return self.toString("%s:%s" % (self.server, self.port)) + +class DNSIncoming(object): + """Object representation of an incoming DNS packet""" + + def __init__(self, data): + """Constructor from string holding bytes of packet""" + self.offset = 0 + self.data = data + self.questions = [] + self.answers = [] + self.numQuestions = 0 + self.numAnswers = 0 + self.numAuthorities = 0 + self.numAdditionals = 0 + + self.readHeader() + self.readQuestions() + self.readOthers() + + def readHeader(self): + """Reads header portion of packet""" + format = '!HHHHHH' + length = struct.calcsize(format) + info = struct.unpack(format, self.data[self.offset:self.offset+length]) + self.offset += length + + self.id = info[0] + self.flags = info[1] + self.numQuestions = info[2] + self.numAnswers = info[3] + self.numAuthorities = info[4] + self.numAdditionals = info[5] + + def readQuestions(self): + """Reads questions section of packet""" + format = '!HH' + length = struct.calcsize(format) + for i in range(0, self.numQuestions): + name = self.readName() + info = struct.unpack(format, self.data[self.offset:self.offset+length]) + self.offset += length + + try: + question = DNSQuestion(name, info[0], info[1]) + self.questions.append(question) + except NonLocalNameException: + pass + + def readInt(self): + """Reads an integer from the packet""" + format = '!I' + length = struct.calcsize(format) + info = struct.unpack(format, self.data[self.offset:self.offset+length]) + self.offset += length + return info[0] + + def readCharacterString(self): + """Reads a character string from the packet""" + length = ord(self.data[self.offset]) + self.offset += 1 + return self.readString(length) + + def readString(self, len): + """Reads a string of a given length from the packet""" + format = '!' + str(len) + 's' + length = struct.calcsize(format) + info = struct.unpack(format, self.data[self.offset:self.offset+length]) + self.offset += length + return info[0] + + def readUnsignedShort(self): + """Reads an unsigned short from the packet""" + format = '!H' + length = struct.calcsize(format) + info = struct.unpack(format, self.data[self.offset:self.offset+length]) + self.offset += length + return info[0] + + def readOthers(self): + """Reads the answers, authorities and additionals section of the packet""" + format = '!HHiH' + length = struct.calcsize(format) + n = self.numAnswers + self.numAuthorities + self.numAdditionals + for i in range(0, n): + domain = self.readName() + info = struct.unpack(format, self.data[self.offset:self.offset+length]) + self.offset += length + + rec = None + if info[0] == _TYPE_A: + rec = DNSAddress(domain, info[0], info[1], info[2], self.readString(4)) + elif info[0] == _TYPE_CNAME or info[0] == _TYPE_PTR: + rec = DNSPointer(domain, info[0], info[1], info[2], self.readName()) + elif info[0] == _TYPE_TXT: + rec = DNSText(domain, info[0], info[1], info[2], self.readString(info[3])) + elif info[0] == _TYPE_SRV: + rec = DNSService(domain, info[0], info[1], info[2], self.readUnsignedShort(), self.readUnsignedShort(), self.readUnsignedShort(), self.readName()) + elif info[0] == _TYPE_HINFO: + rec = DNSHinfo(domain, info[0], info[1], info[2], self.readCharacterString(), self.readCharacterString()) + elif info[0] == _TYPE_AAAA: + rec = DNSAddress(domain, info[0], info[1], info[2], self.readString(16)) + else: + # Try to ignore types we don't know about + # this may mean the rest of the name is + # unable to be parsed, and may show errors + # so this is left for debugging. New types + # encountered need to be parsed properly. + # + #print "UNKNOWN TYPE = " + str(info[0]) + #raise BadTypeInNameException + self.offset += info[3] + + if rec is not None: + self.answers.append(rec) + + def isQuery(self): + """Returns true if this is a query""" + return (self.flags & _FLAGS_QR_MASK) == _FLAGS_QR_QUERY + + def isResponse(self): + """Returns true if this is a response""" + return (self.flags & _FLAGS_QR_MASK) == _FLAGS_QR_RESPONSE + + def readUTF(self, offset, len): + """Reads a UTF-8 string of a given length from the packet""" + return self.data[offset:offset+len].decode('utf-8') + + def readName(self): + """Reads a domain name from the packet""" + result = '' + off = self.offset + next = -1 + first = off + + while 1: + len = ord(self.data[off]) + off += 1 + if len == 0: + break + t = len & 0xC0 + if t == 0x00: + result = ''.join((result, self.readUTF(off, len) + '.')) + off += len + elif t == 0xC0: + if next < 0: + next = off + 1 + off = ((len & 0x3F) << 8) | ord(self.data[off]) + if off >= first: + raise BadDomainNameCircular(off) + first = off + else: + raise BadDomainName(off) + + if next >= 0: + self.offset = next + else: + self.offset = off + + return result + + +class DNSOutgoing(object): + """Object representation of an outgoing packet""" + + def __init__(self, flags, multicast = 1): + self.finished = 0 + self.id = 0 + self.multicast = multicast + self.flags = flags + self.names = {} + self.data = [] + self.size = 12 + + self.questions = [] + self.answers = [] + self.authorities = [] + self.additionals = [] + + def addQuestion(self, record): + """Adds a question""" + self.questions.append(record) + + def addAnswer(self, inp, record): + """Adds an answer""" + if not record.suppressedBy(inp): + self.addAnswerAtTime(record, 0) + + def addAnswerAtTime(self, record, now): + """Adds an answer if if does not expire by a certain time""" + if record is not None: + if now == 0 or not record.isExpired(now): + self.answers.append((record, now)) + + def addAuthorativeAnswer(self, record): + """Adds an authoritative answer""" + self.authorities.append(record) + + def addAdditionalAnswer(self, record): + """Adds an additional answer""" + self.additionals.append(record) + + def writeByte(self, value): + """Writes a single byte to the packet""" + format = '!c' + self.data.append(struct.pack(format, chr(value))) + self.size += 1 + + def insertShort(self, index, value): + """Inserts an unsigned short in a certain position in the packet""" + format = '!H' + self.data.insert(index, struct.pack(format, value)) + self.size += 2 + + def writeShort(self, value): + """Writes an unsigned short to the packet""" + format = '!H' + self.data.append(struct.pack(format, value)) + self.size += 2 + + def writeInt(self, value): + """Writes an unsigned integer to the packet""" + format = '!I' + self.data.append(struct.pack(format, int(value))) + self.size += 4 + + def writeString(self, value, length): + """Writes a string to the packet""" + format = '!' + str(length) + 's' + self.data.append(struct.pack(format, value)) + self.size += length + + def writeUTF(self, s): + """Writes a UTF-8 string of a given length to the packet""" + utfstr = s.encode('utf-8') + length = len(utfstr) + if length > 64: + raise NamePartTooLongException + self.writeByte(length) + self.writeString(utfstr, length) + + def writeName(self, name): + """Writes a domain name to the packet""" + + try: + # Find existing instance of this name in packet + # + index = self.names[name] + except KeyError: + # No record of this name already, so write it + # out as normal, recording the location of the name + # for future pointers to it. + # + self.names[name] = self.size + parts = name.split('.') + if parts[-1] == '': + parts = parts[:-1] + for part in parts: + self.writeUTF(part) + self.writeByte(0) + return + + # An index was found, so write a pointer to it + # + self.writeByte((index >> 8) | 0xC0) + self.writeByte(index) + + def writeQuestion(self, question): + """Writes a question to the packet""" + self.writeName(question.name) + self.writeShort(question.type) + self.writeShort(question.clazz) + + def writeRecord(self, record, now): + """Writes a record (answer, authoritative answer, additional) to + the packet""" + self.writeName(record.name) + self.writeShort(record.type) + if record.unique and self.multicast: + self.writeShort(record.clazz | _CLASS_UNIQUE) + else: + self.writeShort(record.clazz) + if now == 0: + self.writeInt(record.ttl) + else: + self.writeInt(record.getRemainingTTL(now)) + index = len(self.data) + # Adjust size for the short we will write before this record + # + self.size += 2 + record.write(self) + self.size -= 2 + + length = len(''.join(self.data[index:])) + self.insertShort(index, length) # Here is the short we adjusted for + + def packet(self): + """Returns a string containing the packet's bytes + + No further parts should be added to the packet once this + is done.""" + if not self.finished: + self.finished = 1 + for question in self.questions: + self.writeQuestion(question) + for answer, time in self.answers: + self.writeRecord(answer, time) + for authority in self.authorities: + self.writeRecord(authority, 0) + for additional in self.additionals: + self.writeRecord(additional, 0) + + self.insertShort(0, len(self.additionals)) + self.insertShort(0, len(self.authorities)) + self.insertShort(0, len(self.answers)) + self.insertShort(0, len(self.questions)) + self.insertShort(0, self.flags) + if self.multicast: + self.insertShort(0, 0) + else: + self.insertShort(0, self.id) + return ''.join(self.data) + + +class DNSCache(object): + """A cache of DNS entries""" + + def __init__(self): + self.cache = {} + + def add(self, entry): + """Adds an entry""" + try: + list = self.cache[entry.key] + except: + list = self.cache[entry.key] = [] + list.append(entry) + + def remove(self, entry): + """Removes an entry""" + try: + list = self.cache[entry.key] + list.remove(entry) + except: + pass + + def get(self, entry): + """Gets an entry by key. Will return None if there is no + matching entry.""" + try: + list = self.cache[entry.key] + return list[list.index(entry)] + except: + return None + + def getByDetails(self, name, type, clazz): + """Gets an entry by details. Will return None if there is + no matching entry.""" + entry = DNSEntry(name, type, clazz) + return self.get(entry) + + def entriesWithName(self, name): + """Returns a list of entries whose key matches the name.""" + try: + return self.cache[name] + except: + return [] + + def entries(self): + """Returns a list of all entries""" + def add(x, y): return x+y + try: + return reduce(add, self.cache.values()) + except: + return [] + + +class Engine(threading.Thread): + """An engine wraps read access to sockets, allowing objects that + need to receive data from sockets to be called back when the + sockets are ready. + + A reader needs a handle_read() method, which is called when the socket + it is interested in is ready for reading. + + Writers are not implemented here, because we only send short + packets. + """ + + def __init__(self, zeroconf): + threading.Thread.__init__(self) + self.zeroconf = zeroconf + self.readers = {} # maps socket to reader + self.timeout = 5 + self.condition = threading.Condition() + self.start() + + def run(self): + while not globals()['_GLOBAL_DONE']: + rs = self.getReaders() + if len(rs) == 0: + # No sockets to manage, but we wait for the timeout + # or addition of a socket + # + self.condition.acquire() + self.condition.wait(self.timeout) + self.condition.release() + else: + try: + rr, wr, er = select.select(rs, [], [], self.timeout) + for socket in rr: + try: + self.readers[socket].handle_read() + except: + if not globals()['_GLOBAL_DONE']: + traceback.print_exc() + except: + pass + + def getReaders(self): + self.condition.acquire() + result = self.readers.keys() + self.condition.release() + return result + + def addReader(self, reader, socket): + self.condition.acquire() + self.readers[socket] = reader + self.condition.notify() + self.condition.release() + + def delReader(self, socket): + self.condition.acquire() + del(self.readers[socket]) + self.condition.notify() + self.condition.release() + + def notify(self): + self.condition.acquire() + self.condition.notify() + self.condition.release() + +class Listener(object): + """A Listener is used by this module to listen on the multicast + group to which DNS messages are sent, allowing the implementation + to cache information as it arrives. + + It requires registration with an Engine object in order to have + the read() method called when a socket is availble for reading.""" + + def __init__(self, zeroconf): + self.zeroconf = zeroconf + self.zeroconf.engine.addReader(self, self.zeroconf.socket) + + def handle_read(self): + data, (addr, port) = self.zeroconf.socket.recvfrom(_MAX_MSG_ABSOLUTE) + self.data = data + msg = DNSIncoming(data) + if msg.isQuery(): + # Always multicast responses + # + if port == _MDNS_PORT: + self.zeroconf.handleQuery(msg, _MDNS_ADDR, _MDNS_PORT) + # If it's not a multicast query, reply via unicast + # and multicast + # + elif port == _DNS_PORT: + self.zeroconf.handleQuery(msg, addr, port) + self.zeroconf.handleQuery(msg, _MDNS_ADDR, _MDNS_PORT) + else: + self.zeroconf.handleResponse(msg) + + +class Reaper(threading.Thread): + """A Reaper is used by this module to remove cache entries that + have expired.""" + + def __init__(self, zeroconf): + threading.Thread.__init__(self) + self.zeroconf = zeroconf + self.start() + + def run(self): + while 1: + self.zeroconf.wait(10 * 1000) + if globals()['_GLOBAL_DONE']: + return + now = currentTimeMillis() + for record in self.zeroconf.cache.entries(): + if record.isExpired(now): + self.zeroconf.updateRecord(now, record) + self.zeroconf.cache.remove(record) + + +class ServiceBrowser(threading.Thread): + """Used to browse for a service of a specific type. + + The listener object will have its addService() and + removeService() methods called when this browser + discovers changes in the services availability.""" + + def __init__(self, zeroconf, type, listener): + """Creates a browser for a specific type""" + threading.Thread.__init__(self) + self.zeroconf = zeroconf + self.type = type + self.listener = listener + self.services = {} + self.nextTime = currentTimeMillis() + self.delay = _BROWSER_TIME + self.list = [] + + self.done = 0 + + self.zeroconf.addListener(self, DNSQuestion(self.type, _TYPE_PTR, _CLASS_IN)) + self.start() + + def updateRecord(self, zeroconf, now, record): + """Callback invoked by Zeroconf when new information arrives. + + Updates information required by browser in the Zeroconf cache.""" + if record.type == _TYPE_PTR and record.name == self.type: + expired = record.isExpired(now) + try: + oldrecord = self.services[record.alias.lower()] + if not expired: + oldrecord.resetTTL(record) + else: + del(self.services[record.alias.lower()]) + callback = lambda x: self.listener.removeService(x, self.type, record.alias) + self.list.append(callback) + return + except: + if not expired: + self.services[record.alias.lower()] = record + callback = lambda x: self.listener.addService(x, self.type, record.alias) + self.list.append(callback) + + expires = record.getExpirationTime(75) + if expires < self.nextTime: + self.nextTime = expires + + def cancel(self): + self.done = 1 + self.zeroconf.notifyAll() + + def run(self): + while 1: + event = None + now = currentTimeMillis() + if len(self.list) == 0 and self.nextTime > now: + self.zeroconf.wait(self.nextTime - now) + if globals()['_GLOBAL_DONE'] or self.done: + return + now = currentTimeMillis() + + if self.nextTime <= now: + out = DNSOutgoing(_FLAGS_QR_QUERY) + out.addQuestion(DNSQuestion(self.type, _TYPE_PTR, _CLASS_IN)) + for record in self.services.values(): + if not record.isExpired(now): + out.addAnswerAtTime(record, now) + self.zeroconf.send(out) + self.nextTime = now + self.delay + self.delay = min(20 * 1000, self.delay * 2) + + if len(self.list) > 0: + event = self.list.pop(0) + + if event is not None: + event(self.zeroconf) + + +class ServiceInfo(object): + """Service information""" + + def __init__(self, type, name, address=None, port=None, weight=0, priority=0, properties=None, server=None): + """Create a service description. + + type: fully qualified service type name + name: fully qualified service name + address: IP address as unsigned short, network byte order + port: port that the service runs on + weight: weight of the service + priority: priority of the service + properties: dictionary of properties (or a string holding the bytes for the text field) + server: fully qualified name for service host (defaults to name)""" + + if not name.endswith(type): + raise BadTypeInNameException + self.type = type + self.name = name + self.address = address + self.port = port + self.weight = weight + self.priority = priority + if server: + self.server = server + else: + self.server = name + self.setProperties(properties) + + def setProperties(self, properties): + """Sets properties and text of this info from a dictionary""" + if isinstance(properties, dict): + self.properties = properties + list = [] + result = '' + for key in properties: + value = properties[key] + if value is None: + suffix = '' + elif isinstance(value, str): + suffix = value + elif isinstance(value, int): + if value: + suffix = 'true' + else: + suffix = 'false' + else: + suffix = '' + list.append('='.join((key, suffix))) + for item in list: + result = ''.join((result, struct.pack('!c', chr(len(item))), item)) + self.text = result + else: + self.text = properties + + def setText(self, text): + """Sets properties and text given a text field""" + self.text = text + try: + result = {} + end = len(text) + index = 0 + strs = [] + while index < end: + length = ord(text[index]) + index += 1 + strs.append(text[index:index+length]) + index += length + + for s in strs: + eindex = s.find('=') + if eindex == -1: + # No equals sign at all + key = s + value = 0 + else: + key = s[:eindex] + value = s[eindex+1:] + if value == 'true': + value = 1 + elif value == 'false' or not value: + value = 0 + + # Only update non-existent properties + if key and result.get(key) == None: + result[key] = value + + self.properties = result + except: + traceback.print_exc() + self.properties = None + + def getType(self): + """Type accessor""" + return self.type + + def getName(self): + """Name accessor""" + if self.type is not None and self.name.endswith("." + self.type): + return self.name[:len(self.name) - len(self.type) - 1] + return self.name + + def getAddress(self): + """Address accessor""" + return self.address + + def getPort(self): + """Port accessor""" + return self.port + + def getPriority(self): + """Pirority accessor""" + return self.priority + + def getWeight(self): + """Weight accessor""" + return self.weight + + def getProperties(self): + """Properties accessor""" + return self.properties + + def getText(self): + """Text accessor""" + return self.text + + def getServer(self): + """Server accessor""" + return self.server + + def updateRecord(self, zeroconf, now, record): + """Updates service information from a DNS record""" + if record is not None and not record.isExpired(now): + if record.type == _TYPE_A: + #if record.name == self.name: + if record.name == self.server: + self.address = record.address + elif record.type == _TYPE_SRV: + if record.name == self.name: + self.server = record.server + self.port = record.port + self.weight = record.weight + self.priority = record.priority + #self.address = None + self.updateRecord(zeroconf, now, zeroconf.cache.getByDetails(self.server, _TYPE_A, _CLASS_IN)) + elif record.type == _TYPE_TXT: + if record.name == self.name: + self.setText(record.text) + + def request(self, zeroconf, timeout): + """Returns true if the service could be discovered on the + network, and updates this object with details discovered. + """ + now = currentTimeMillis() + delay = _LISTENER_TIME + next = now + delay + last = now + timeout + result = 0 + try: + zeroconf.addListener(self, DNSQuestion(self.name, _TYPE_ANY, _CLASS_IN)) + while self.server is None or self.address is None or self.text is None: + if last <= now: + return 0 + if next <= now: + out = DNSOutgoing(_FLAGS_QR_QUERY) + out.addQuestion(DNSQuestion(self.name, _TYPE_SRV, _CLASS_IN)) + out.addAnswerAtTime(zeroconf.cache.getByDetails(self.name, _TYPE_SRV, _CLASS_IN), now) + out.addQuestion(DNSQuestion(self.name, _TYPE_TXT, _CLASS_IN)) + out.addAnswerAtTime(zeroconf.cache.getByDetails(self.name, _TYPE_TXT, _CLASS_IN), now) + if self.server is not None: + out.addQuestion(DNSQuestion(self.server, _TYPE_A, _CLASS_IN)) + out.addAnswerAtTime(zeroconf.cache.getByDetails(self.server, _TYPE_A, _CLASS_IN), now) + zeroconf.send(out) + next = now + delay + delay = delay * 2 + + zeroconf.wait(min(next, last) - now) + now = currentTimeMillis() + result = 1 + finally: + zeroconf.removeListener(self) + + return result + + def __eq__(self, other): + """Tests equality of service name""" + if isinstance(other, ServiceInfo): + return other.name == self.name + return 0 + + def __ne__(self, other): + """Non-equality test""" + return not self.__eq__(other) + + def __repr__(self): + """String representation""" + result = "service[%s,%s:%s," % (self.name, socket.inet_ntoa(self.getAddress()), self.port) + if self.text is None: + result += "None" + else: + if len(self.text) < 20: + result += self.text + else: + result += self.text[:17] + "..." + result += "]" + return result + + +class Zeroconf(object): + """Implementation of Zeroconf Multicast DNS Service Discovery + + Supports registration, unregistration, queries and browsing. + """ + def __init__(self, bindaddress=None): + """Creates an instance of the Zeroconf class, establishing + multicast communications, listening and reaping threads.""" + globals()['_GLOBAL_DONE'] = 0 + if bindaddress is None: + self.intf = socket.gethostbyname(socket.gethostname()) + else: + self.intf = bindaddress + self.group = ('', _MDNS_PORT) + self.socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) + try: + self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) + self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1) + except: + # SO_REUSEADDR should be equivalent to SO_REUSEPORT for + # multicast UDP sockets (p 731, "TCP/IP Illustrated, + # Volume 2"), but some BSD-derived systems require + # SO_REUSEPORT to be specified explicity. Also, not all + # versions of Python have SO_REUSEPORT available. So + # if you're on a BSD-based system, and haven't upgraded + # to Python 2.3 yet, you may find this library doesn't + # work as expected. + # + pass + self.socket.setsockopt(socket.SOL_IP, socket.IP_MULTICAST_TTL, 255) + self.socket.setsockopt(socket.SOL_IP, socket.IP_MULTICAST_LOOP, 1) + try: + self.socket.bind(self.group) + except: + # Some versions of linux raise an exception even though + # the SO_REUSE* options have been set, so ignore it + # + pass + #self.socket.setsockopt(socket.SOL_IP, socket.IP_MULTICAST_IF, socket.inet_aton(self.intf) + socket.inet_aton('0.0.0.0')) + self.socket.setsockopt(socket.SOL_IP, socket.IP_ADD_MEMBERSHIP, socket.inet_aton(_MDNS_ADDR) + socket.inet_aton('0.0.0.0')) + + self.listeners = [] + self.browsers = [] + self.services = {} + self.servicetypes = {} + + self.cache = DNSCache() + + self.condition = threading.Condition() + + self.engine = Engine(self) + self.listener = Listener(self) + self.reaper = Reaper(self) + + def isLoopback(self): + return self.intf.startswith("127.0.0.1") + + def isLinklocal(self): + return self.intf.startswith("169.254.") + + def wait(self, timeout): + """Calling thread waits for a given number of milliseconds or + until notified.""" + self.condition.acquire() + self.condition.wait(timeout/1000) + self.condition.release() + + def notifyAll(self): + """Notifies all waiting threads""" + self.condition.acquire() + self.condition.notifyAll() + self.condition.release() + + def getServiceInfo(self, type, name, timeout=3000): + """Returns network's service information for a particular + name and type, or None if no service matches by the timeout, + which defaults to 3 seconds.""" + info = ServiceInfo(type, name) + if info.request(self, timeout): + return info + return None + + def addServiceListener(self, type, listener): + """Adds a listener for a particular service type. This object + will then have its updateRecord method called when information + arrives for that type.""" + self.removeServiceListener(listener) + self.browsers.append(ServiceBrowser(self, type, listener)) + + def removeServiceListener(self, listener): + """Removes a listener from the set that is currently listening.""" + for browser in self.browsers: + if browser.listener == listener: + browser.cancel() + del(browser) + + def registerService(self, info, ttl=_DNS_TTL): + """Registers service information to the network with a default TTL + of 60 seconds. Zeroconf will then respond to requests for + information for that service. The name of the service may be + changed if needed to make it unique on the network.""" + self.checkService(info) + self.services[info.name.lower()] = info + if self.servicetypes.has_key(info.type): + self.servicetypes[info.type]+=1 + else: + self.servicetypes[info.type]=1 + now = currentTimeMillis() + nextTime = now + i = 0 + while i < 3: + if now < nextTime: + self.wait(nextTime - now) + now = currentTimeMillis() + continue + out = DNSOutgoing(_FLAGS_QR_RESPONSE | _FLAGS_AA) + out.addAnswerAtTime(DNSPointer(info.type, _TYPE_PTR, _CLASS_IN, ttl, info.name), 0) + out.addAnswerAtTime(DNSService(info.name, _TYPE_SRV, _CLASS_IN, ttl, info.priority, info.weight, info.port, info.server), 0) + out.addAnswerAtTime(DNSText(info.name, _TYPE_TXT, _CLASS_IN, ttl, info.text), 0) + if info.address: + out.addAnswerAtTime(DNSAddress(info.server, _TYPE_A, _CLASS_IN, ttl, info.address), 0) + self.send(out) + i += 1 + nextTime += _REGISTER_TIME + + def unregisterService(self, info): + """Unregister a service.""" + try: + del(self.services[info.name.lower()]) + if self.servicetypes[info.type]>1: + self.servicetypes[info.type]-=1 + else: + del self.servicetypes[info.type] + except: + pass + now = currentTimeMillis() + nextTime = now + i = 0 + while i < 3: + if now < nextTime: + self.wait(nextTime - now) + now = currentTimeMillis() + continue + out = DNSOutgoing(_FLAGS_QR_RESPONSE | _FLAGS_AA) + out.addAnswerAtTime(DNSPointer(info.type, _TYPE_PTR, _CLASS_IN, 0, info.name), 0) + out.addAnswerAtTime(DNSService(info.name, _TYPE_SRV, _CLASS_IN, 0, info.priority, info.weight, info.port, info.name), 0) + out.addAnswerAtTime(DNSText(info.name, _TYPE_TXT, _CLASS_IN, 0, info.text), 0) + if info.address: + out.addAnswerAtTime(DNSAddress(info.server, _TYPE_A, _CLASS_IN, 0, info.address), 0) + self.send(out) + i += 1 + nextTime += _UNREGISTER_TIME + + def unregisterAllServices(self): + """Unregister all registered services.""" + if len(self.services) > 0: + now = currentTimeMillis() + nextTime = now + i = 0 + while i < 3: + if now < nextTime: + self.wait(nextTime - now) + now = currentTimeMillis() + continue + out = DNSOutgoing(_FLAGS_QR_RESPONSE | _FLAGS_AA) + for info in self.services.values(): + out.addAnswerAtTime(DNSPointer(info.type, _TYPE_PTR, _CLASS_IN, 0, info.name), 0) + out.addAnswerAtTime(DNSService(info.name, _TYPE_SRV, _CLASS_IN, 0, info.priority, info.weight, info.port, info.server), 0) + out.addAnswerAtTime(DNSText(info.name, _TYPE_TXT, _CLASS_IN, 0, info.text), 0) + if info.address: + out.addAnswerAtTime(DNSAddress(info.server, _TYPE_A, _CLASS_IN, 0, info.address), 0) + self.send(out) + i += 1 + nextTime += _UNREGISTER_TIME + + def checkService(self, info): + """Checks the network for a unique service name, modifying the + ServiceInfo passed in if it is not unique.""" + now = currentTimeMillis() + nextTime = now + i = 0 + while i < 3: + for record in self.cache.entriesWithName(info.type): + if record.type == _TYPE_PTR and not record.isExpired(now) and record.alias == info.name: + if (info.name.find('.') < 0): + info.name = info.name + ".[" + info.address + ":" + info.port + "]." + info.type + self.checkService(info) + return + raise NonUniqueNameException + if now < nextTime: + self.wait(nextTime - now) + now = currentTimeMillis() + continue + out = DNSOutgoing(_FLAGS_QR_QUERY | _FLAGS_AA) + self.debug = out + out.addQuestion(DNSQuestion(info.type, _TYPE_PTR, _CLASS_IN)) + out.addAuthorativeAnswer(DNSPointer(info.type, _TYPE_PTR, _CLASS_IN, _DNS_TTL, info.name)) + self.send(out) + i += 1 + nextTime += _CHECK_TIME + + def addListener(self, listener, question): + """Adds a listener for a given question. The listener will have + its updateRecord method called when information is available to + answer the question.""" + now = currentTimeMillis() + self.listeners.append(listener) + if question is not None: + for record in self.cache.entriesWithName(question.name): + if question.answeredBy(record) and not record.isExpired(now): + listener.updateRecord(self, now, record) + self.notifyAll() + + def removeListener(self, listener): + """Removes a listener.""" + try: + self.listeners.remove(listener) + self.notifyAll() + except: + pass + + def updateRecord(self, now, rec): + """Used to notify listeners of new information that has updated + a record.""" + for listener in self.listeners: + listener.updateRecord(self, now, rec) + self.notifyAll() + + def handleResponse(self, msg): + """Deal with incoming response packets. All answers + are held in the cache, and listeners are notified.""" + now = currentTimeMillis() + for record in msg.answers: + expired = record.isExpired(now) + if record in self.cache.entries(): + if expired: + self.cache.remove(record) + else: + entry = self.cache.get(record) + if entry is not None: + entry.resetTTL(record) + record = entry + else: + self.cache.add(record) + + self.updateRecord(now, record) + + def handleQuery(self, msg, addr, port): + """Deal with incoming query packets. Provides a response if + possible.""" + out = None + + # Support unicast client responses + # + if port != _MDNS_PORT: + out = DNSOutgoing(_FLAGS_QR_RESPONSE | _FLAGS_AA, 0) + for question in msg.questions: + out.addQuestion(question) + + for question in msg.questions: + if question.type == _TYPE_PTR: + if question.name == "_services._dns-sd._udp.local.": + for stype in self.servicetypes.keys(): + if out is None: + out = DNSOutgoing(_FLAGS_QR_RESPONSE | _FLAGS_AA) + out.addAnswer(msg, DNSPointer("_services._dns-sd._udp.local.", _TYPE_PTR, _CLASS_IN, _DNS_TTL, stype)) + for service in self.services.values(): + if question.name == service.type: + if out is None: + out = DNSOutgoing(_FLAGS_QR_RESPONSE | _FLAGS_AA) + out.addAnswer(msg, DNSPointer(service.type, _TYPE_PTR, _CLASS_IN, _DNS_TTL, service.name)) + else: + try: + if out is None: + out = DNSOutgoing(_FLAGS_QR_RESPONSE | _FLAGS_AA) + + # Answer A record queries for any service addresses we know + if question.type == _TYPE_A or question.type == _TYPE_ANY: + for service in self.services.values(): + if service.server == question.name.lower(): + out.addAnswer(msg, DNSAddress(question.name, _TYPE_A, _CLASS_IN | _CLASS_UNIQUE, _DNS_TTL, service.address)) + + service = self.services.get(question.name.lower(), None) + if not service: continue + + if question.type == _TYPE_SRV or question.type == _TYPE_ANY: + out.addAnswer(msg, DNSService(question.name, _TYPE_SRV, _CLASS_IN | _CLASS_UNIQUE, _DNS_TTL, service.priority, service.weight, service.port, service.server)) + if question.type == _TYPE_TXT or question.type == _TYPE_ANY: + out.addAnswer(msg, DNSText(question.name, _TYPE_TXT, _CLASS_IN | _CLASS_UNIQUE, _DNS_TTL, service.text)) + if question.type == _TYPE_SRV: + out.addAdditionalAnswer(DNSAddress(service.server, _TYPE_A, _CLASS_IN | _CLASS_UNIQUE, _DNS_TTL, service.address)) + except: + traceback.print_exc() + + if out is not None and out.answers: + out.id = msg.id + self.send(out, addr, port) + + def send(self, out, addr = _MDNS_ADDR, port = _MDNS_PORT): + """Sends an outgoing packet.""" + # This is a quick test to see if we can parse the packets we generate + #temp = DNSIncoming(out.packet()) + try: + self.socket.sendto(out.packet(), 0, (addr, port)) + except: + # Ignore this, it may be a temporary loss of network connection + pass + + def close(self): + """Ends the background threads, and prevent this instance from + servicing further queries.""" + if globals()['_GLOBAL_DONE'] == 0: + globals()['_GLOBAL_DONE'] = 1 + self.notifyAll() + self.engine.notify() + self.unregisterAllServices() + self.socket.setsockopt(socket.SOL_IP, socket.IP_DROP_MEMBERSHIP, socket.inet_aton(_MDNS_ADDR) + socket.inet_aton('0.0.0.0')) + self.socket.close() + +# Test a few module features, including service registration, service +# query (for Zoe), and service unregistration. + +if __name__ == '__main__': + print "Multicast DNS Service Discovery for Python, version", __version__ + r = Zeroconf() + print "1. Testing registration of a service..." + desc = {'version':'0.10','a':'test value', 'b':'another value'} + info = ServiceInfo("_http._tcp.local.", "My Service Name._http._tcp.local.", socket.inet_aton("127.0.0.1"), 1234, 0, 0, desc) + print " Registering service..." + r.registerService(info) + print " Registration done." + print "2. Testing query of service information..." + print " Getting ZOE service:", str(r.getServiceInfo("_http._tcp.local.", "ZOE._http._tcp.local.")) + print " Query done." + print "3. Testing query of own service..." + print " Getting self:", str(r.getServiceInfo("_http._tcp.local.", "My Service Name._http._tcp.local.")) + print " Query done." + print "4. Testing unregister of service information..." + r.unregisterService(info) + print " Unregister done." + r.close() + +# no-check-code diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/zeroconf/Zeroconf.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/zeroconf/Zeroconf.pyo Binary files differnew file mode 100644 index 0000000..339a003 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/zeroconf/Zeroconf.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/zeroconf/__init__.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/zeroconf/__init__.py new file mode 100644 index 0000000..60185c7 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/zeroconf/__init__.py @@ -0,0 +1,173 @@ +# zeroconf.py - zeroconf support for Mercurial +# +# Copyright 2005-2007 Matt Mackall <mpm@selenic.com> +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +'''discover and advertise repositories on the local network + +Zeroconf-enabled repositories will be announced in a network without +the need to configure a server or a service. They can be discovered +without knowing their actual IP address. + +To allow other people to discover your repository using run +:hg:`serve` in your repository:: + + $ cd test + $ hg serve + +You can discover Zeroconf-enabled repositories by running +:hg:`paths`:: + + $ hg paths + zc-test = http://example.com:8000/test +''' + +import socket, time, os + +import Zeroconf +from mercurial import ui, hg, encoding, util +from mercurial import extensions +from mercurial.hgweb import hgweb_mod +from mercurial.hgweb import hgwebdir_mod + +# publish + +server = None +localip = None + +def getip(): + # finds external-facing interface without sending any packets (Linux) + try: + s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) + s.connect(('1.0.0.1', 0)) + ip = s.getsockname()[0] + return ip + except: + pass + + # Generic method, sometimes gives useless results + try: + dumbip = socket.gethostbyaddr(socket.gethostname())[2][0] + if not dumbip.startswith('127.') and ':' not in dumbip: + return dumbip + except (socket.gaierror, socket.herror): + dumbip = '127.0.0.1' + + # works elsewhere, but actually sends a packet + try: + s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) + s.connect(('1.0.0.1', 1)) + ip = s.getsockname()[0] + return ip + except: + pass + + return dumbip + +def publish(name, desc, path, port): + global server, localip + if not server: + ip = getip() + if ip.startswith('127.'): + # if we have no internet connection, this can happen. + return + localip = socket.inet_aton(ip) + server = Zeroconf.Zeroconf(ip) + + hostname = socket.gethostname().split('.')[0] + host = hostname + ".local" + name = "%s-%s" % (hostname, name) + + # advertise to browsers + svc = Zeroconf.ServiceInfo('_http._tcp.local.', + name + '._http._tcp.local.', + server = host, + port = port, + properties = {'description': desc, + 'path': "/" + path}, + address = localip, weight = 0, priority = 0) + server.registerService(svc) + + # advertise to Mercurial clients + svc = Zeroconf.ServiceInfo('_hg._tcp.local.', + name + '._hg._tcp.local.', + server = host, + port = port, + properties = {'description': desc, + 'path': "/" + path}, + address = localip, weight = 0, priority = 0) + server.registerService(svc) + +class hgwebzc(hgweb_mod.hgweb): + def __init__(self, repo, name=None, baseui=None): + super(hgwebzc, self).__init__(repo, name=name, baseui=baseui) + name = self.reponame or os.path.basename(self.repo.root) + path = self.repo.ui.config("web", "prefix", "").strip('/') + desc = self.repo.ui.config("web", "description", name) + publish(name, desc, path, + util.getport(self.repo.ui.config("web", "port", 8000))) + +class hgwebdirzc(hgwebdir_mod.hgwebdir): + def __init__(self, conf, baseui=None): + super(hgwebdirzc, self).__init__(conf, baseui=baseui) + prefix = self.ui.config("web", "prefix", "").strip('/') + '/' + for repo, path in self.repos: + u = self.ui.copy() + u.readconfig(os.path.join(path, '.hg', 'hgrc')) + name = os.path.basename(repo) + path = (prefix + repo).strip('/') + desc = u.config('web', 'description', name) + publish(name, desc, path, util.getport(u.config("web", "port", 8000))) + +# listen + +class listener(object): + def __init__(self): + self.found = {} + def removeService(self, server, type, name): + if repr(name) in self.found: + del self.found[repr(name)] + def addService(self, server, type, name): + self.found[repr(name)] = server.getServiceInfo(type, name) + +def getzcpaths(): + ip = getip() + if ip.startswith('127.'): + return + server = Zeroconf.Zeroconf(ip) + l = listener() + Zeroconf.ServiceBrowser(server, "_hg._tcp.local.", l) + time.sleep(1) + server.close() + for value in l.found.values(): + name = value.name[:value.name.index('.')] + url = "http://%s:%s%s" % (socket.inet_ntoa(value.address), value.port, + value.properties.get("path", "/")) + yield "zc-" + name, url + +def config(orig, self, section, key, default=None, untrusted=False): + if section == "paths" and key.startswith("zc-"): + for name, path in getzcpaths(): + if name == key: + return path + return orig(self, section, key, default, untrusted) + +def configitems(orig, self, section, untrusted=False): + repos = orig(self, section, untrusted) + if section == "paths": + repos += getzcpaths() + return repos + +def defaultdest(orig, source): + for name, path in getzcpaths(): + if path == source: + return name.encode(encoding.encoding) + return orig(source) + +extensions.wrapfunction(ui.ui, 'config', config) +extensions.wrapfunction(ui.ui, 'configitems', configitems) +extensions.wrapfunction(hg, 'defaultdest', defaultdest) +hgweb_mod.hgweb = hgwebzc +hgwebdir_mod.hgwebdir = hgwebdirzc diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/zeroconf/__init__.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/zeroconf/__init__.pyo Binary files differnew file mode 100644 index 0000000..82e5bf4 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/zeroconf/__init__.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/__init__.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/__init__.py new file mode 100644 index 0000000..e69de29 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/__init__.py diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/__init__.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/__init__.pyo Binary files differnew file mode 100644 index 0000000..c642cf9 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/__init__.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/__version__.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/__version__.py new file mode 100644 index 0000000..550dfb6 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/__version__.py @@ -0,0 +1,2 @@ +# this file is autogenerated by setup.py +version = "1.7.3" diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/__version__.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/__version__.pyo Binary files differnew file mode 100644 index 0000000..c55de9b --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/__version__.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/ancestor.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/ancestor.py new file mode 100644 index 0000000..52f4dc1 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/ancestor.py @@ -0,0 +1,88 @@ +# ancestor.py - generic DAG ancestor algorithm for mercurial +# +# Copyright 2006 Matt Mackall <mpm@selenic.com> +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +import heapq + +def ancestor(a, b, pfunc): + """ + return a minimal-distance ancestor of nodes a and b, or None if there is no + such ancestor. Note that there can be several ancestors with the same + (minimal) distance, and the one returned is arbitrary. + + pfunc must return a list of parent vertices for a given vertex + """ + + if a == b: + return a + + a, b = sorted([a, b]) + + # find depth from root of all ancestors + parentcache = {} + visit = [a, b] + depth = {} + while visit: + vertex = visit[-1] + pl = pfunc(vertex) + parentcache[vertex] = pl + if not pl: + depth[vertex] = 0 + visit.pop() + else: + for p in pl: + if p == a or p == b: # did we find a or b as a parent? + return p # we're done + if p not in depth: + visit.append(p) + if visit[-1] == vertex: + depth[vertex] = min([depth[p] for p in pl]) - 1 + visit.pop() + + # traverse ancestors in order of decreasing distance from root + def ancestors(vertex): + h = [(depth[vertex], vertex)] + seen = set() + while h: + d, n = heapq.heappop(h) + if n not in seen: + seen.add(n) + yield (d, n) + for p in parentcache[n]: + heapq.heappush(h, (depth[p], p)) + + def generations(vertex): + sg, s = None, set() + for g, v in ancestors(vertex): + if g != sg: + if sg: + yield sg, s + sg, s = g, set((v,)) + else: + s.add(v) + yield sg, s + + x = generations(a) + y = generations(b) + gx = x.next() + gy = y.next() + + # increment each ancestor list until it is closer to root than + # the other, or they match + try: + while 1: + if gx[0] == gy[0]: + for v in gx[1]: + if v in gy[1]: + return v + gy = y.next() + gx = x.next() + elif gx[0] > gy[0]: + gy = y.next() + else: + gx = x.next() + except StopIteration: + return None diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/ancestor.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/ancestor.pyo Binary files differnew file mode 100644 index 0000000..6a7d32a --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/ancestor.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/archival.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/archival.py new file mode 100644 index 0000000..a2b0e93 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/archival.py @@ -0,0 +1,274 @@ +# archival.py - revision archival for mercurial +# +# Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com> +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +from i18n import _ +from node import hex +import cmdutil +import util +import cStringIO, os, stat, tarfile, time, zipfile +import zlib, gzip + +def tidyprefix(dest, kind, prefix): + '''choose prefix to use for names in archive. make sure prefix is + safe for consumers.''' + + if prefix: + prefix = util.normpath(prefix) + else: + if not isinstance(dest, str): + raise ValueError('dest must be string if no prefix') + prefix = os.path.basename(dest) + lower = prefix.lower() + for sfx in exts.get(kind, []): + if lower.endswith(sfx): + prefix = prefix[:-len(sfx)] + break + lpfx = os.path.normpath(util.localpath(prefix)) + prefix = util.pconvert(lpfx) + if not prefix.endswith('/'): + prefix += '/' + if prefix.startswith('../') or os.path.isabs(lpfx) or '/../' in prefix: + raise util.Abort(_('archive prefix contains illegal components')) + return prefix + +exts = { + 'tar': ['.tar'], + 'tbz2': ['.tbz2', '.tar.bz2'], + 'tgz': ['.tgz', '.tar.gz'], + 'zip': ['.zip'], + } + +def guesskind(dest): + for kind, extensions in exts.iteritems(): + if util.any(dest.endswith(ext) for ext in extensions): + return kind + return None + + +class tarit(object): + '''write archive to tar file or stream. can write uncompressed, + or compress with gzip or bzip2.''' + + class GzipFileWithTime(gzip.GzipFile): + + def __init__(self, *args, **kw): + timestamp = None + if 'timestamp' in kw: + timestamp = kw.pop('timestamp') + if timestamp is None: + self.timestamp = time.time() + else: + self.timestamp = timestamp + gzip.GzipFile.__init__(self, *args, **kw) + + def _write_gzip_header(self): + self.fileobj.write('\037\213') # magic header + self.fileobj.write('\010') # compression method + # Python 2.6 deprecates self.filename + fname = getattr(self, 'name', None) or self.filename + if fname and fname.endswith('.gz'): + fname = fname[:-3] + flags = 0 + if fname: + flags = gzip.FNAME + self.fileobj.write(chr(flags)) + gzip.write32u(self.fileobj, long(self.timestamp)) + self.fileobj.write('\002') + self.fileobj.write('\377') + if fname: + self.fileobj.write(fname + '\000') + + def __init__(self, dest, mtime, kind=''): + self.mtime = mtime + + def taropen(name, mode, fileobj=None): + if kind == 'gz': + mode = mode[0] + if not fileobj: + fileobj = open(name, mode + 'b') + gzfileobj = self.GzipFileWithTime(name, mode + 'b', + zlib.Z_BEST_COMPRESSION, + fileobj, timestamp=mtime) + return tarfile.TarFile.taropen(name, mode, gzfileobj) + else: + return tarfile.open(name, mode + kind, fileobj) + + if isinstance(dest, str): + self.z = taropen(dest, mode='w:') + else: + # Python 2.5-2.5.1 have a regression that requires a name arg + self.z = taropen(name='', mode='w|', fileobj=dest) + + def addfile(self, name, mode, islink, data): + i = tarfile.TarInfo(name) + i.mtime = self.mtime + i.size = len(data) + if islink: + i.type = tarfile.SYMTYPE + i.mode = 0777 + i.linkname = data + data = None + i.size = 0 + else: + i.mode = mode + data = cStringIO.StringIO(data) + self.z.addfile(i, data) + + def done(self): + self.z.close() + +class tellable(object): + '''provide tell method for zipfile.ZipFile when writing to http + response file object.''' + + def __init__(self, fp): + self.fp = fp + self.offset = 0 + + def __getattr__(self, key): + return getattr(self.fp, key) + + def write(self, s): + self.fp.write(s) + self.offset += len(s) + + def tell(self): + return self.offset + +class zipit(object): + '''write archive to zip file or stream. can write uncompressed, + or compressed with deflate.''' + + def __init__(self, dest, mtime, compress=True): + if not isinstance(dest, str): + try: + dest.tell() + except (AttributeError, IOError): + dest = tellable(dest) + self.z = zipfile.ZipFile(dest, 'w', + compress and zipfile.ZIP_DEFLATED or + zipfile.ZIP_STORED) + + # Python's zipfile module emits deprecation warnings if we try + # to store files with a date before 1980. + epoch = 315532800 # calendar.timegm((1980, 1, 1, 0, 0, 0, 1, 1, 0)) + if mtime < epoch: + mtime = epoch + + self.date_time = time.gmtime(mtime)[:6] + + def addfile(self, name, mode, islink, data): + i = zipfile.ZipInfo(name, self.date_time) + i.compress_type = self.z.compression + # unzip will not honor unix file modes unless file creator is + # set to unix (id 3). + i.create_system = 3 + ftype = stat.S_IFREG + if islink: + mode = 0777 + ftype = stat.S_IFLNK + i.external_attr = (mode | ftype) << 16L + self.z.writestr(i, data) + + def done(self): + self.z.close() + +class fileit(object): + '''write archive as files in directory.''' + + def __init__(self, name, mtime): + self.basedir = name + self.opener = util.opener(self.basedir) + + def addfile(self, name, mode, islink, data): + if islink: + self.opener.symlink(data, name) + return + f = self.opener(name, "w", atomictemp=True) + f.write(data) + f.rename() + destfile = os.path.join(self.basedir, name) + os.chmod(destfile, mode) + + def done(self): + pass + +archivers = { + 'files': fileit, + 'tar': tarit, + 'tbz2': lambda name, mtime: tarit(name, mtime, 'bz2'), + 'tgz': lambda name, mtime: tarit(name, mtime, 'gz'), + 'uzip': lambda name, mtime: zipit(name, mtime, False), + 'zip': zipit, + } + +def archive(repo, dest, node, kind, decode=True, matchfn=None, + prefix=None, mtime=None, subrepos=False): + '''create archive of repo as it was at node. + + dest can be name of directory, name of archive file, or file + object to write archive to. + + kind is type of archive to create. + + decode tells whether to put files through decode filters from + hgrc. + + matchfn is function to filter names of files to write to archive. + + prefix is name of path to put before every archive member.''' + + if kind == 'files': + if prefix: + raise util.Abort(_('cannot give prefix when archiving to files')) + else: + prefix = tidyprefix(dest, kind, prefix) + + def write(name, mode, islink, getdata): + if matchfn and not matchfn(name): + return + data = getdata() + if decode: + data = repo.wwritedata(name, data) + archiver.addfile(prefix + name, mode, islink, data) + + if kind not in archivers: + raise util.Abort(_("unknown archive type '%s'") % kind) + + ctx = repo[node] + archiver = archivers[kind](dest, mtime or ctx.date()[0]) + + if repo.ui.configbool("ui", "archivemeta", True): + def metadata(): + base = 'repo: %s\nnode: %s\nbranch: %s\n' % ( + repo[0].hex(), hex(node), ctx.branch()) + + tags = ''.join('tag: %s\n' % t for t in ctx.tags() + if repo.tagtype(t) == 'global') + if not tags: + repo.ui.pushbuffer() + opts = {'template': '{latesttag}\n{latesttagdistance}', + 'style': '', 'patch': None, 'git': None} + cmdutil.show_changeset(repo.ui, repo, opts).show(ctx) + ltags, dist = repo.ui.popbuffer().split('\n') + tags = ''.join('latesttag: %s\n' % t for t in ltags.split(':')) + tags += 'latesttagdistance: %s\n' % dist + + return base + tags + + write('.hg_archival.txt', 0644, False, metadata) + + for f in ctx: + ff = ctx.flags(f) + write(f, 'x' in ff and 0755 or 0644, 'l' in ff, ctx[f].data) + + if subrepos: + for subpath in ctx.substate: + sub = ctx.sub(subpath) + sub.archive(archiver, prefix) + + archiver.done() diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/archival.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/archival.pyo Binary files differnew file mode 100644 index 0000000..49099a1 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/archival.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/base85.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/base85.py new file mode 100644 index 0000000..dfdfe4f --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/base85.py @@ -0,0 +1,7 @@ +def __bootstrap__(): + global __bootstrap__, __loader__, __file__ + import sys, pkg_resources, imp + __file__ = pkg_resources.resource_filename(__name__,'base85.so') + __loader__ = None; del __bootstrap__, __loader__ + imp.load_dynamic(__name__,__file__) +__bootstrap__() diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/base85.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/base85.pyo Binary files differnew file mode 100644 index 0000000..f58ee56 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/base85.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/base85.so b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/base85.so Binary files differnew file mode 100755 index 0000000..5de5836 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/base85.so diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/bdiff.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/bdiff.py new file mode 100644 index 0000000..7cabf35 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/bdiff.py @@ -0,0 +1,7 @@ +def __bootstrap__(): + global __bootstrap__, __loader__, __file__ + import sys, pkg_resources, imp + __file__ = pkg_resources.resource_filename(__name__,'bdiff.so') + __loader__ = None; del __bootstrap__, __loader__ + imp.load_dynamic(__name__,__file__) +__bootstrap__() diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/bdiff.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/bdiff.pyo Binary files differnew file mode 100644 index 0000000..5074db0 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/bdiff.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/bdiff.so b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/bdiff.so Binary files differnew file mode 100755 index 0000000..45f02c9 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/bdiff.so diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/bundlerepo.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/bundlerepo.py new file mode 100644 index 0000000..db4e0f3 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/bundlerepo.py @@ -0,0 +1,324 @@ +# bundlerepo.py - repository class for viewing uncompressed bundles +# +# Copyright 2006, 2007 Benoit Boissinot <bboissin@gmail.com> +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +"""Repository class for viewing uncompressed bundles. + +This provides a read-only repository interface to bundles as if they +were part of the actual repository. +""" + +from node import nullid +from i18n import _ +import os, struct, tempfile, shutil +import changegroup, util, mdiff, discovery +import localrepo, changelog, manifest, filelog, revlog, error + +class bundlerevlog(revlog.revlog): + def __init__(self, opener, indexfile, bundle, + linkmapper=None): + # How it works: + # to retrieve a revision, we need to know the offset of + # the revision in the bundle (an unbundle object). + # + # We store this offset in the index (start), to differentiate a + # rev in the bundle and from a rev in the revlog, we check + # len(index[r]). If the tuple is bigger than 7, it is a bundle + # (it is bigger since we store the node to which the delta is) + # + revlog.revlog.__init__(self, opener, indexfile) + self.bundle = bundle + self.basemap = {} + def chunkpositer(): + while 1: + chunk = bundle.chunk() + if not chunk: + break + pos = bundle.tell() + yield chunk, pos - len(chunk) + n = len(self) + prev = None + for chunk, start in chunkpositer(): + size = len(chunk) + if size < 80: + raise util.Abort(_("invalid changegroup")) + start += 80 + size -= 80 + node, p1, p2, cs = struct.unpack("20s20s20s20s", chunk[:80]) + if node in self.nodemap: + prev = node + continue + for p in (p1, p2): + if not p in self.nodemap: + raise error.LookupError(p, self.indexfile, + _("unknown parent")) + if linkmapper is None: + link = n + else: + link = linkmapper(cs) + + if not prev: + prev = p1 + # start, size, full unc. size, base (unused), link, p1, p2, node + e = (revlog.offset_type(start, 0), size, -1, -1, link, + self.rev(p1), self.rev(p2), node) + self.basemap[n] = prev + self.index.insert(-1, e) + self.nodemap[node] = n + prev = node + n += 1 + + def inbundle(self, rev): + """is rev from the bundle""" + if rev < 0: + return False + return rev in self.basemap + def bundlebase(self, rev): + return self.basemap[rev] + def _chunk(self, rev): + # Warning: in case of bundle, the diff is against bundlebase, + # not against rev - 1 + # XXX: could use some caching + if not self.inbundle(rev): + return revlog.revlog._chunk(self, rev) + self.bundle.seek(self.start(rev)) + return self.bundle.read(self.length(rev)) + + def revdiff(self, rev1, rev2): + """return or calculate a delta between two revisions""" + if self.inbundle(rev1) and self.inbundle(rev2): + # hot path for bundle + revb = self.rev(self.bundlebase(rev2)) + if revb == rev1: + return self._chunk(rev2) + elif not self.inbundle(rev1) and not self.inbundle(rev2): + return revlog.revlog.revdiff(self, rev1, rev2) + + return mdiff.textdiff(self.revision(self.node(rev1)), + self.revision(self.node(rev2))) + + def revision(self, node): + """return an uncompressed revision of a given""" + if node == nullid: + return "" + + text = None + chain = [] + iter_node = node + rev = self.rev(iter_node) + # reconstruct the revision if it is from a changegroup + while self.inbundle(rev): + if self._cache and self._cache[0] == iter_node: + text = self._cache[2] + break + chain.append(rev) + iter_node = self.bundlebase(rev) + rev = self.rev(iter_node) + if text is None: + text = revlog.revlog.revision(self, iter_node) + + while chain: + delta = self._chunk(chain.pop()) + text = mdiff.patches(text, [delta]) + + p1, p2 = self.parents(node) + if node != revlog.hash(text, p1, p2): + raise error.RevlogError(_("integrity check failed on %s:%d") + % (self.datafile, self.rev(node))) + + self._cache = (node, self.rev(node), text) + return text + + def addrevision(self, text, transaction, link, p1=None, p2=None, d=None): + raise NotImplementedError + def addgroup(self, revs, linkmapper, transaction): + raise NotImplementedError + def strip(self, rev, minlink): + raise NotImplementedError + def checksize(self): + raise NotImplementedError + +class bundlechangelog(bundlerevlog, changelog.changelog): + def __init__(self, opener, bundle): + changelog.changelog.__init__(self, opener) + bundlerevlog.__init__(self, opener, self.indexfile, bundle) + +class bundlemanifest(bundlerevlog, manifest.manifest): + def __init__(self, opener, bundle, linkmapper): + manifest.manifest.__init__(self, opener) + bundlerevlog.__init__(self, opener, self.indexfile, bundle, + linkmapper) + +class bundlefilelog(bundlerevlog, filelog.filelog): + def __init__(self, opener, path, bundle, linkmapper): + filelog.filelog.__init__(self, opener, path) + bundlerevlog.__init__(self, opener, self.indexfile, bundle, + linkmapper) + +class bundlerepository(localrepo.localrepository): + def __init__(self, ui, path, bundlename): + self._tempparent = None + try: + localrepo.localrepository.__init__(self, ui, path) + except error.RepoError: + self._tempparent = tempfile.mkdtemp() + localrepo.instance(ui, self._tempparent, 1) + localrepo.localrepository.__init__(self, ui, self._tempparent) + + if path: + self._url = 'bundle:' + util.expandpath(path) + '+' + bundlename + else: + self._url = 'bundle:' + bundlename + + self.tempfile = None + f = open(bundlename, "rb") + self.bundle = changegroup.readbundle(f, bundlename) + if self.bundle.compressed(): + fdtemp, temp = tempfile.mkstemp(prefix="hg-bundle-", + suffix=".hg10un", dir=self.path) + self.tempfile = temp + fptemp = os.fdopen(fdtemp, 'wb') + + try: + fptemp.write("HG10UN") + while 1: + chunk = self.bundle.read(2**18) + if not chunk: + break + fptemp.write(chunk) + finally: + fptemp.close() + + f = open(self.tempfile, "rb") + self.bundle = changegroup.readbundle(f, bundlename) + + # dict with the mapping 'filename' -> position in the bundle + self.bundlefilespos = {} + + @util.propertycache + def changelog(self): + c = bundlechangelog(self.sopener, self.bundle) + self.manstart = self.bundle.tell() + return c + + @util.propertycache + def manifest(self): + self.bundle.seek(self.manstart) + m = bundlemanifest(self.sopener, self.bundle, self.changelog.rev) + self.filestart = self.bundle.tell() + return m + + @util.propertycache + def manstart(self): + self.changelog + return self.manstart + + @util.propertycache + def filestart(self): + self.manifest + return self.filestart + + def url(self): + return self._url + + def file(self, f): + if not self.bundlefilespos: + self.bundle.seek(self.filestart) + while 1: + chunk = self.bundle.chunk() + if not chunk: + break + self.bundlefilespos[chunk] = self.bundle.tell() + while 1: + c = self.bundle.chunk() + if not c: + break + + if f[0] == '/': + f = f[1:] + if f in self.bundlefilespos: + self.bundle.seek(self.bundlefilespos[f]) + return bundlefilelog(self.sopener, f, self.bundle, + self.changelog.rev) + else: + return filelog.filelog(self.sopener, f) + + def close(self): + """Close assigned bundle file immediately.""" + self.bundle.close() + if self.tempfile is not None: + os.unlink(self.tempfile) + + def __del__(self): + del self.bundle + if self.tempfile is not None: + os.unlink(self.tempfile) + if self._tempparent: + shutil.rmtree(self._tempparent, True) + + def cancopy(self): + return False + + def getcwd(self): + return os.getcwd() # always outside the repo + +def instance(ui, path, create): + if create: + raise util.Abort(_('cannot create new bundle repository')) + parentpath = ui.config("bundle", "mainreporoot", "") + if parentpath: + # Try to make the full path relative so we get a nice, short URL. + # In particular, we don't want temp dir names in test outputs. + cwd = os.getcwd() + if parentpath == cwd: + parentpath = '' + else: + cwd = os.path.join(cwd,'') + if parentpath.startswith(cwd): + parentpath = parentpath[len(cwd):] + path = util.drop_scheme('file', path) + if path.startswith('bundle:'): + path = util.drop_scheme('bundle', path) + s = path.split("+", 1) + if len(s) == 1: + repopath, bundlename = parentpath, s[0] + else: + repopath, bundlename = s + else: + repopath, bundlename = parentpath, path + return bundlerepository(ui, repopath, bundlename) + +def getremotechanges(ui, repo, other, revs=None, bundlename=None, force=False): + tmp = discovery.findcommonincoming(repo, other, heads=revs, force=force) + common, incoming, rheads = tmp + if not incoming: + try: + os.unlink(bundlename) + except: + pass + return other, None, None + + bundle = None + if bundlename or not other.local(): + # create a bundle (uncompressed if other repo is not local) + + if revs is None and other.capable('changegroupsubset'): + revs = rheads + + if revs is None: + cg = other.changegroup(incoming, "incoming") + else: + cg = other.changegroupsubset(incoming, revs, 'incoming') + bundletype = other.local() and "HG10BZ" or "HG10UN" + fname = bundle = changegroup.writebundle(cg, bundlename, bundletype) + # keep written bundle? + if bundlename: + bundle = None + if not other.local(): + # use the created uncompressed bundlerepo + other = bundlerepository(ui, repo.root, fname) + return (other, incoming, bundle) + diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/bundlerepo.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/bundlerepo.pyo Binary files differnew file mode 100644 index 0000000..a92f9b3 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/bundlerepo.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/byterange.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/byterange.py new file mode 100644 index 0000000..f8fb3f6 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/byterange.py @@ -0,0 +1,466 @@ +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the +# Free Software Foundation, Inc., +# 59 Temple Place, Suite 330, +# Boston, MA 02111-1307 USA + +# This file is part of urlgrabber, a high-level cross-protocol url-grabber +# Copyright 2002-2004 Michael D. Stenner, Ryan Tomayko + +# $Id: byterange.py,v 1.9 2005/02/14 21:55:07 mstenner Exp $ + +import os +import stat +import urllib +import urllib2 +import email.Utils + +class RangeError(IOError): + """Error raised when an unsatisfiable range is requested.""" + pass + +class HTTPRangeHandler(urllib2.BaseHandler): + """Handler that enables HTTP Range headers. + + This was extremely simple. The Range header is a HTTP feature to + begin with so all this class does is tell urllib2 that the + "206 Partial Content" reponse from the HTTP server is what we + expected. + + Example: + import urllib2 + import byterange + + range_handler = range.HTTPRangeHandler() + opener = urllib2.build_opener(range_handler) + + # install it + urllib2.install_opener(opener) + + # create Request and set Range header + req = urllib2.Request('http://www.python.org/') + req.header['Range'] = 'bytes=30-50' + f = urllib2.urlopen(req) + """ + + def http_error_206(self, req, fp, code, msg, hdrs): + # 206 Partial Content Response + r = urllib.addinfourl(fp, hdrs, req.get_full_url()) + r.code = code + r.msg = msg + return r + + def http_error_416(self, req, fp, code, msg, hdrs): + # HTTP's Range Not Satisfiable error + raise RangeError('Requested Range Not Satisfiable') + +class RangeableFileObject: + """File object wrapper to enable raw range handling. + This was implemented primarilary for handling range + specifications for file:// urls. This object effectively makes + a file object look like it consists only of a range of bytes in + the stream. + + Examples: + # expose 10 bytes, starting at byte position 20, from + # /etc/aliases. + >>> fo = RangeableFileObject(file('/etc/passwd', 'r'), (20,30)) + # seek seeks within the range (to position 23 in this case) + >>> fo.seek(3) + # tell tells where your at _within the range_ (position 3 in + # this case) + >>> fo.tell() + # read EOFs if an attempt is made to read past the last + # byte in the range. the following will return only 7 bytes. + >>> fo.read(30) + """ + + def __init__(self, fo, rangetup): + """Create a RangeableFileObject. + fo -- a file like object. only the read() method need be + supported but supporting an optimized seek() is + preferable. + rangetup -- a (firstbyte,lastbyte) tuple specifying the range + to work over. + The file object provided is assumed to be at byte offset 0. + """ + self.fo = fo + (self.firstbyte, self.lastbyte) = range_tuple_normalize(rangetup) + self.realpos = 0 + self._do_seek(self.firstbyte) + + def __getattr__(self, name): + """This effectively allows us to wrap at the instance level. + Any attribute not found in _this_ object will be searched for + in self.fo. This includes methods.""" + if hasattr(self.fo, name): + return getattr(self.fo, name) + raise AttributeError(name) + + def tell(self): + """Return the position within the range. + This is different from fo.seek in that position 0 is the + first byte position of the range tuple. For example, if + this object was created with a range tuple of (500,899), + tell() will return 0 when at byte position 500 of the file. + """ + return (self.realpos - self.firstbyte) + + def seek(self, offset, whence=0): + """Seek within the byte range. + Positioning is identical to that described under tell(). + """ + assert whence in (0, 1, 2) + if whence == 0: # absolute seek + realoffset = self.firstbyte + offset + elif whence == 1: # relative seek + realoffset = self.realpos + offset + elif whence == 2: # absolute from end of file + # XXX: are we raising the right Error here? + raise IOError('seek from end of file not supported.') + + # do not allow seek past lastbyte in range + if self.lastbyte and (realoffset >= self.lastbyte): + realoffset = self.lastbyte + + self._do_seek(realoffset - self.realpos) + + def read(self, size=-1): + """Read within the range. + This method will limit the size read based on the range. + """ + size = self._calc_read_size(size) + rslt = self.fo.read(size) + self.realpos += len(rslt) + return rslt + + def readline(self, size=-1): + """Read lines within the range. + This method will limit the size read based on the range. + """ + size = self._calc_read_size(size) + rslt = self.fo.readline(size) + self.realpos += len(rslt) + return rslt + + def _calc_read_size(self, size): + """Handles calculating the amount of data to read based on + the range. + """ + if self.lastbyte: + if size > -1: + if ((self.realpos + size) >= self.lastbyte): + size = (self.lastbyte - self.realpos) + else: + size = (self.lastbyte - self.realpos) + return size + + def _do_seek(self, offset): + """Seek based on whether wrapped object supports seek(). + offset is relative to the current position (self.realpos). + """ + assert offset >= 0 + if not hasattr(self.fo, 'seek'): + self._poor_mans_seek(offset) + else: + self.fo.seek(self.realpos + offset) + self.realpos += offset + + def _poor_mans_seek(self, offset): + """Seek by calling the wrapped file objects read() method. + This is used for file like objects that do not have native + seek support. The wrapped objects read() method is called + to manually seek to the desired position. + offset -- read this number of bytes from the wrapped + file object. + raise RangeError if we encounter EOF before reaching the + specified offset. + """ + pos = 0 + bufsize = 1024 + while pos < offset: + if (pos + bufsize) > offset: + bufsize = offset - pos + buf = self.fo.read(bufsize) + if len(buf) != bufsize: + raise RangeError('Requested Range Not Satisfiable') + pos += bufsize + +class FileRangeHandler(urllib2.FileHandler): + """FileHandler subclass that adds Range support. + This class handles Range headers exactly like an HTTP + server would. + """ + def open_local_file(self, req): + import mimetypes + import email + host = req.get_host() + file = req.get_selector() + localfile = urllib.url2pathname(file) + stats = os.stat(localfile) + size = stats[stat.ST_SIZE] + modified = email.Utils.formatdate(stats[stat.ST_MTIME]) + mtype = mimetypes.guess_type(file)[0] + if host: + host, port = urllib.splitport(host) + if port or socket.gethostbyname(host) not in self.get_names(): + raise urllib2.URLError('file not on local host') + fo = open(localfile,'rb') + brange = req.headers.get('Range', None) + brange = range_header_to_tuple(brange) + assert brange != () + if brange: + (fb, lb) = brange + if lb == '': + lb = size + if fb < 0 or fb > size or lb > size: + raise RangeError('Requested Range Not Satisfiable') + size = (lb - fb) + fo = RangeableFileObject(fo, (fb, lb)) + headers = email.message_from_string( + 'Content-Type: %s\nContent-Length: %d\nLast-Modified: %s\n' % + (mtype or 'text/plain', size, modified)) + return urllib.addinfourl(fo, headers, 'file:'+file) + + +# FTP Range Support +# Unfortunately, a large amount of base FTP code had to be copied +# from urllib and urllib2 in order to insert the FTP REST command. +# Code modifications for range support have been commented as +# follows: +# -- range support modifications start/end here + +from urllib import splitport, splituser, splitpasswd, splitattr, \ + unquote, addclosehook, addinfourl +import ftplib +import socket +import sys +import mimetypes +import email + +class FTPRangeHandler(urllib2.FTPHandler): + def ftp_open(self, req): + host = req.get_host() + if not host: + raise IOError('ftp error', 'no host given') + host, port = splitport(host) + if port is None: + port = ftplib.FTP_PORT + else: + port = int(port) + + # username/password handling + user, host = splituser(host) + if user: + user, passwd = splitpasswd(user) + else: + passwd = None + host = unquote(host) + user = unquote(user or '') + passwd = unquote(passwd or '') + + try: + host = socket.gethostbyname(host) + except socket.error, msg: + raise urllib2.URLError(msg) + path, attrs = splitattr(req.get_selector()) + dirs = path.split('/') + dirs = map(unquote, dirs) + dirs, file = dirs[:-1], dirs[-1] + if dirs and not dirs[0]: + dirs = dirs[1:] + try: + fw = self.connect_ftp(user, passwd, host, port, dirs) + type = file and 'I' or 'D' + for attr in attrs: + attr, value = splitattr(attr) + if attr.lower() == 'type' and \ + value in ('a', 'A', 'i', 'I', 'd', 'D'): + type = value.upper() + + # -- range support modifications start here + rest = None + range_tup = range_header_to_tuple(req.headers.get('Range', None)) + assert range_tup != () + if range_tup: + (fb, lb) = range_tup + if fb > 0: + rest = fb + # -- range support modifications end here + + fp, retrlen = fw.retrfile(file, type, rest) + + # -- range support modifications start here + if range_tup: + (fb, lb) = range_tup + if lb == '': + if retrlen is None or retrlen == 0: + raise RangeError('Requested Range Not Satisfiable due' + ' to unobtainable file length.') + lb = retrlen + retrlen = lb - fb + if retrlen < 0: + # beginning of range is larger than file + raise RangeError('Requested Range Not Satisfiable') + else: + retrlen = lb - fb + fp = RangeableFileObject(fp, (0, retrlen)) + # -- range support modifications end here + + headers = "" + mtype = mimetypes.guess_type(req.get_full_url())[0] + if mtype: + headers += "Content-Type: %s\n" % mtype + if retrlen is not None and retrlen >= 0: + headers += "Content-Length: %d\n" % retrlen + headers = email.message_from_string(headers) + return addinfourl(fp, headers, req.get_full_url()) + except ftplib.all_errors, msg: + raise IOError('ftp error', msg), sys.exc_info()[2] + + def connect_ftp(self, user, passwd, host, port, dirs): + fw = ftpwrapper(user, passwd, host, port, dirs) + return fw + +class ftpwrapper(urllib.ftpwrapper): + # range support note: + # this ftpwrapper code is copied directly from + # urllib. The only enhancement is to add the rest + # argument and pass it on to ftp.ntransfercmd + def retrfile(self, file, type, rest=None): + self.endtransfer() + if type in ('d', 'D'): + cmd = 'TYPE A' + isdir = 1 + else: + cmd = 'TYPE ' + type + isdir = 0 + try: + self.ftp.voidcmd(cmd) + except ftplib.all_errors: + self.init() + self.ftp.voidcmd(cmd) + conn = None + if file and not isdir: + # Use nlst to see if the file exists at all + try: + self.ftp.nlst(file) + except ftplib.error_perm, reason: + raise IOError('ftp error', reason), sys.exc_info()[2] + # Restore the transfer mode! + self.ftp.voidcmd(cmd) + # Try to retrieve as a file + try: + cmd = 'RETR ' + file + conn = self.ftp.ntransfercmd(cmd, rest) + except ftplib.error_perm, reason: + if str(reason).startswith('501'): + # workaround for REST not supported error + fp, retrlen = self.retrfile(file, type) + fp = RangeableFileObject(fp, (rest,'')) + return (fp, retrlen) + elif not str(reason).startswith('550'): + raise IOError('ftp error', reason), sys.exc_info()[2] + if not conn: + # Set transfer mode to ASCII! + self.ftp.voidcmd('TYPE A') + # Try a directory listing + if file: + cmd = 'LIST ' + file + else: + cmd = 'LIST' + conn = self.ftp.ntransfercmd(cmd) + self.busy = 1 + # Pass back both a suitably decorated object and a retrieval length + return (addclosehook(conn[0].makefile('rb'), + self.endtransfer), conn[1]) + + +#################################################################### +# Range Tuple Functions +# XXX: These range tuple functions might go better in a class. + +_rangere = None +def range_header_to_tuple(range_header): + """Get a (firstbyte,lastbyte) tuple from a Range header value. + + Range headers have the form "bytes=<firstbyte>-<lastbyte>". This + function pulls the firstbyte and lastbyte values and returns + a (firstbyte,lastbyte) tuple. If lastbyte is not specified in + the header value, it is returned as an empty string in the + tuple. + + Return None if range_header is None + Return () if range_header does not conform to the range spec + pattern. + + """ + global _rangere + if range_header is None: + return None + if _rangere is None: + import re + _rangere = re.compile(r'^bytes=(\d{1,})-(\d*)') + match = _rangere.match(range_header) + if match: + tup = range_tuple_normalize(match.group(1, 2)) + if tup and tup[1]: + tup = (tup[0], tup[1]+1) + return tup + return () + +def range_tuple_to_header(range_tup): + """Convert a range tuple to a Range header value. + Return a string of the form "bytes=<firstbyte>-<lastbyte>" or None + if no range is needed. + """ + if range_tup is None: + return None + range_tup = range_tuple_normalize(range_tup) + if range_tup: + if range_tup[1]: + range_tup = (range_tup[0], range_tup[1] - 1) + return 'bytes=%s-%s' % range_tup + +def range_tuple_normalize(range_tup): + """Normalize a (first_byte,last_byte) range tuple. + Return a tuple whose first element is guaranteed to be an int + and whose second element will be '' (meaning: the last byte) or + an int. Finally, return None if the normalized tuple == (0,'') + as that is equivelant to retrieving the entire file. + """ + if range_tup is None: + return None + # handle first byte + fb = range_tup[0] + if fb in (None, ''): + fb = 0 + else: + fb = int(fb) + # handle last byte + try: + lb = range_tup[1] + except IndexError: + lb = '' + else: + if lb is None: + lb = '' + elif lb != '': + lb = int(lb) + # check if range is over the entire file + if (fb, lb) == (0, ''): + return None + # check that the range is valid + if lb < fb: + raise RangeError('Invalid byte range: %s-%s' % (fb, lb)) + return (fb, lb) diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/byterange.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/byterange.pyo Binary files differnew file mode 100644 index 0000000..4f887ba --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/byterange.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/changegroup.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/changegroup.py new file mode 100644 index 0000000..c509cf6 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/changegroup.py @@ -0,0 +1,204 @@ +# changegroup.py - Mercurial changegroup manipulation functions +# +# Copyright 2006 Matt Mackall <mpm@selenic.com> +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +from i18n import _ +import util +import struct, os, bz2, zlib, tempfile + +def getchunk(source): + """return the next chunk from changegroup 'source' as a string""" + d = source.read(4) + if not d: + return "" + l = struct.unpack(">l", d)[0] + if l <= 4: + return "" + d = source.read(l - 4) + if len(d) < l - 4: + raise util.Abort(_("premature EOF reading chunk" + " (got %d bytes, expected %d)") + % (len(d), l - 4)) + return d + +def chunkheader(length): + """return a changegroup chunk header (string)""" + return struct.pack(">l", length + 4) + +def closechunk(): + """return a changegroup chunk header (string) for a zero-length chunk""" + return struct.pack(">l", 0) + +class nocompress(object): + def compress(self, x): + return x + def flush(self): + return "" + +bundletypes = { + "": ("", nocompress), + "HG10UN": ("HG10UN", nocompress), + "HG10BZ": ("HG10", lambda: bz2.BZ2Compressor()), + "HG10GZ": ("HG10GZ", lambda: zlib.compressobj()), +} + +def collector(cl, mmfs, files): + # Gather information about changeset nodes going out in a bundle. + # We want to gather manifests needed and filelogs affected. + def collect(node): + c = cl.read(node) + files.update(c[3]) + mmfs.setdefault(c[0], node) + return collect + +# hgweb uses this list to communicate its preferred type +bundlepriority = ['HG10GZ', 'HG10BZ', 'HG10UN'] + +def writebundle(cg, filename, bundletype): + """Write a bundle file and return its filename. + + Existing files will not be overwritten. + If no filename is specified, a temporary file is created. + bz2 compression can be turned off. + The bundle file will be deleted in case of errors. + """ + + fh = None + cleanup = None + try: + if filename: + fh = open(filename, "wb") + else: + fd, filename = tempfile.mkstemp(prefix="hg-bundle-", suffix=".hg") + fh = os.fdopen(fd, "wb") + cleanup = filename + + header, compressor = bundletypes[bundletype] + fh.write(header) + z = compressor() + + # parse the changegroup data, otherwise we will block + # in case of sshrepo because we don't know the end of the stream + + # an empty chunkgroup is the end of the changegroup + # a changegroup has at least 2 chunkgroups (changelog and manifest). + # after that, an empty chunkgroup is the end of the changegroup + empty = False + count = 0 + while not empty or count <= 2: + empty = True + count += 1 + while 1: + chunk = getchunk(cg) + if not chunk: + break + empty = False + fh.write(z.compress(chunkheader(len(chunk)))) + pos = 0 + while pos < len(chunk): + next = pos + 2**20 + fh.write(z.compress(chunk[pos:next])) + pos = next + fh.write(z.compress(closechunk())) + fh.write(z.flush()) + cleanup = None + return filename + finally: + if fh is not None: + fh.close() + if cleanup is not None: + os.unlink(cleanup) + +def decompressor(fh, alg): + if alg == 'UN': + return fh + elif alg == 'GZ': + def generator(f): + zd = zlib.decompressobj() + for chunk in f: + yield zd.decompress(chunk) + elif alg == 'BZ': + def generator(f): + zd = bz2.BZ2Decompressor() + zd.decompress("BZ") + for chunk in util.filechunkiter(f, 4096): + yield zd.decompress(chunk) + else: + raise util.Abort("unknown bundle compression '%s'" % alg) + return util.chunkbuffer(generator(fh)) + +class unbundle10(object): + def __init__(self, fh, alg): + self._stream = decompressor(fh, alg) + self._type = alg + self.callback = None + def compressed(self): + return self._type != 'UN' + def read(self, l): + return self._stream.read(l) + def seek(self, pos): + return self._stream.seek(pos) + def tell(self): + return self._stream.tell() + def close(self): + return self._stream.close() + + def chunklength(self): + d = self.read(4) + if not d: + return 0 + l = max(0, struct.unpack(">l", d)[0] - 4) + if l and self.callback: + self.callback() + return l + + def chunk(self): + """return the next chunk from changegroup 'source' as a string""" + l = self.chunklength() + d = self.read(l) + if len(d) < l: + raise util.Abort(_("premature EOF reading chunk" + " (got %d bytes, expected %d)") + % (len(d), l)) + return d + + def parsechunk(self): + l = self.chunklength() + if not l: + return {} + h = self.read(80) + node, p1, p2, cs = struct.unpack("20s20s20s20s", h) + data = self.read(l - 80) + return dict(node=node, p1=p1, p2=p2, cs=cs, data=data) + +class headerlessfixup(object): + def __init__(self, fh, h): + self._h = h + self._fh = fh + def read(self, n): + if self._h: + d, self._h = self._h[:n], self._h[n:] + if len(d) < n: + d += self._fh.read(n - len(d)) + return d + return self._fh.read(n) + +def readbundle(fh, fname): + header = fh.read(6) + + if not fname: + fname = "stream" + if not header.startswith('HG') and header.startswith('\0'): + fh = headerlessfixup(fh, header) + header = "HG10UN" + + magic, version, alg = header[0:2], header[2:4], header[4:6] + + if magic != 'HG': + raise util.Abort(_('%s: not a Mercurial bundle') % fname) + if version != '10': + raise util.Abort(_('%s: unknown bundle version %s') % (fname, version)) + return unbundle10(fh, alg) diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/changegroup.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/changegroup.pyo Binary files differnew file mode 100644 index 0000000..051fb31 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/changegroup.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/changelog.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/changelog.py new file mode 100644 index 0000000..bfecee0 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/changelog.py @@ -0,0 +1,233 @@ +# changelog.py - changelog class for mercurial +# +# Copyright 2005-2007 Matt Mackall <mpm@selenic.com> +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +from node import bin, hex, nullid +from i18n import _ +import util, error, revlog, encoding + +def _string_escape(text): + """ + >>> d = {'nl': chr(10), 'bs': chr(92), 'cr': chr(13), 'nul': chr(0)} + >>> s = "ab%(nl)scd%(bs)s%(bs)sn%(nul)sab%(cr)scd%(bs)s%(nl)s" % d + >>> s + 'ab\\ncd\\\\\\\\n\\x00ab\\rcd\\\\\\n' + >>> res = _string_escape(s) + >>> s == res.decode('string_escape') + True + """ + # subset of the string_escape codec + text = text.replace('\\', '\\\\').replace('\n', '\\n').replace('\r', '\\r') + return text.replace('\0', '\\0') + +def decodeextra(text): + extra = {} + for l in text.split('\0'): + if l: + k, v = l.decode('string_escape').split(':', 1) + extra[k] = v + return extra + +def encodeextra(d): + # keys must be sorted to produce a deterministic changelog entry + items = [_string_escape('%s:%s' % (k, d[k])) for k in sorted(d)] + return "\0".join(items) + +class appender(object): + '''the changelog index must be updated last on disk, so we use this class + to delay writes to it''' + def __init__(self, fp, buf): + self.data = buf + self.fp = fp + self.offset = fp.tell() + self.size = util.fstat(fp).st_size + + def end(self): + return self.size + len("".join(self.data)) + def tell(self): + return self.offset + def flush(self): + pass + def close(self): + self.fp.close() + + def seek(self, offset, whence=0): + '''virtual file offset spans real file and data''' + if whence == 0: + self.offset = offset + elif whence == 1: + self.offset += offset + elif whence == 2: + self.offset = self.end() + offset + if self.offset < self.size: + self.fp.seek(self.offset) + + def read(self, count=-1): + '''only trick here is reads that span real file and data''' + ret = "" + if self.offset < self.size: + s = self.fp.read(count) + ret = s + self.offset += len(s) + if count > 0: + count -= len(s) + if count != 0: + doff = self.offset - self.size + self.data.insert(0, "".join(self.data)) + del self.data[1:] + s = self.data[0][doff:doff + count] + self.offset += len(s) + ret += s + return ret + + def write(self, s): + self.data.append(str(s)) + self.offset += len(s) + +def delayopener(opener, target, divert, buf): + def o(name, mode='r'): + if name != target: + return opener(name, mode) + if divert: + return opener(name + ".a", mode.replace('a', 'w')) + # otherwise, divert to memory + return appender(opener(name, mode), buf) + return o + +class changelog(revlog.revlog): + def __init__(self, opener): + revlog.revlog.__init__(self, opener, "00changelog.i") + self._realopener = opener + self._delayed = False + self._divert = False + + def delayupdate(self): + "delay visibility of index updates to other readers" + self._delayed = True + self._divert = (len(self) == 0) + self._delaybuf = [] + self.opener = delayopener(self._realopener, self.indexfile, + self._divert, self._delaybuf) + + def finalize(self, tr): + "finalize index updates" + self._delayed = False + self.opener = self._realopener + # move redirected index data back into place + if self._divert: + n = self.opener(self.indexfile + ".a").name + util.rename(n, n[:-2]) + elif self._delaybuf: + fp = self.opener(self.indexfile, 'a') + fp.write("".join(self._delaybuf)) + fp.close() + self._delaybuf = [] + # split when we're done + self.checkinlinesize(tr) + + def readpending(self, file): + r = revlog.revlog(self.opener, file) + self.index = r.index + self.nodemap = r.nodemap + self._chunkcache = r._chunkcache + + def writepending(self): + "create a file containing the unfinalized state for pretxnchangegroup" + if self._delaybuf: + # make a temporary copy of the index + fp1 = self._realopener(self.indexfile) + fp2 = self._realopener(self.indexfile + ".a", "w") + fp2.write(fp1.read()) + # add pending data + fp2.write("".join(self._delaybuf)) + fp2.close() + # switch modes so finalize can simply rename + self._delaybuf = [] + self._divert = True + + if self._divert: + return True + + return False + + def checkinlinesize(self, tr, fp=None): + if not self._delayed: + revlog.revlog.checkinlinesize(self, tr, fp) + + def read(self, node): + """ + format used: + nodeid\n : manifest node in ascii + user\n : user, no \n or \r allowed + time tz extra\n : date (time is int or float, timezone is int) + : extra is metadatas, encoded and separated by '\0' + : older versions ignore it + files\n\n : files modified by the cset, no \n or \r allowed + (.*) : comment (free text, ideally utf-8) + + changelog v0 doesn't use extra + """ + text = self.revision(node) + if not text: + return (nullid, "", (0, 0), [], "", {'branch': 'default'}) + last = text.index("\n\n") + desc = encoding.tolocal(text[last + 2:]) + l = text[:last].split('\n') + manifest = bin(l[0]) + user = encoding.tolocal(l[1]) + + extra_data = l[2].split(' ', 2) + if len(extra_data) != 3: + time = float(extra_data.pop(0)) + try: + # various tools did silly things with the time zone field. + timezone = int(extra_data[0]) + except: + timezone = 0 + extra = {} + else: + time, timezone, extra = extra_data + time, timezone = float(time), int(timezone) + extra = decodeextra(extra) + if not extra.get('branch'): + extra['branch'] = 'default' + files = l[3:] + return (manifest, user, (time, timezone), files, desc, extra) + + def add(self, manifest, files, desc, transaction, p1, p2, + user, date=None, extra=None): + user = user.strip() + # An empty username or a username with a "\n" will make the + # revision text contain two "\n\n" sequences -> corrupt + # repository since read cannot unpack the revision. + if not user: + raise error.RevlogError(_("empty username")) + if "\n" in user: + raise error.RevlogError(_("username %s contains a newline") + % repr(user)) + + # strip trailing whitespace and leading and trailing empty lines + desc = '\n'.join([l.rstrip() for l in desc.splitlines()]).strip('\n') + + user, desc = encoding.fromlocal(user), encoding.fromlocal(desc) + + if date: + parseddate = "%d %d" % util.parsedate(date) + else: + parseddate = "%d %d" % util.makedate() + if extra: + branch = extra.get("branch") + if branch in ("default", ""): + del extra["branch"] + elif branch in (".", "null", "tip"): + raise error.RevlogError(_('the name \'%s\' is reserved') + % branch) + if extra: + extra = encodeextra(extra) + parseddate = "%s %s" % (parseddate, extra) + l = [hex(manifest), user, parseddate] + sorted(files) + ["", desc] + text = "\n".join(l) + return self.addrevision(text, transaction, len(self), p1, p2) diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/changelog.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/changelog.pyo Binary files differnew file mode 100644 index 0000000..4e0f428 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/changelog.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/cmdutil.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/cmdutil.py new file mode 100644 index 0000000..7d304d6 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/cmdutil.py @@ -0,0 +1,1374 @@ +# cmdutil.py - help for command processing in mercurial +# +# Copyright 2005-2007 Matt Mackall <mpm@selenic.com> +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +from node import hex, nullid, nullrev, short +from i18n import _ +import os, sys, errno, re, glob, tempfile +import util, templater, patch, error, encoding, templatekw +import match as matchmod +import similar, revset, subrepo + +revrangesep = ':' + +def parsealiases(cmd): + return cmd.lstrip("^").split("|") + +def findpossible(cmd, table, strict=False): + """ + Return cmd -> (aliases, command table entry) + for each matching command. + Return debug commands (or their aliases) only if no normal command matches. + """ + choice = {} + debugchoice = {} + for e in table.keys(): + aliases = parsealiases(e) + found = None + if cmd in aliases: + found = cmd + elif not strict: + for a in aliases: + if a.startswith(cmd): + found = a + break + if found is not None: + if aliases[0].startswith("debug") or found.startswith("debug"): + debugchoice[found] = (aliases, table[e]) + else: + choice[found] = (aliases, table[e]) + + if not choice and debugchoice: + choice = debugchoice + + return choice + +def findcmd(cmd, table, strict=True): + """Return (aliases, command table entry) for command string.""" + choice = findpossible(cmd, table, strict) + + if cmd in choice: + return choice[cmd] + + if len(choice) > 1: + clist = choice.keys() + clist.sort() + raise error.AmbiguousCommand(cmd, clist) + + if choice: + return choice.values()[0] + + raise error.UnknownCommand(cmd) + +def findrepo(p): + while not os.path.isdir(os.path.join(p, ".hg")): + oldp, p = p, os.path.dirname(p) + if p == oldp: + return None + + return p + +def bail_if_changed(repo): + if repo.dirstate.parents()[1] != nullid: + raise util.Abort(_('outstanding uncommitted merge')) + modified, added, removed, deleted = repo.status()[:4] + if modified or added or removed or deleted: + raise util.Abort(_("outstanding uncommitted changes")) + +def logmessage(opts): + """ get the log message according to -m and -l option """ + message = opts.get('message') + logfile = opts.get('logfile') + + if message and logfile: + raise util.Abort(_('options --message and --logfile are mutually ' + 'exclusive')) + if not message and logfile: + try: + if logfile == '-': + message = sys.stdin.read() + else: + message = open(logfile).read() + except IOError, inst: + raise util.Abort(_("can't read commit message '%s': %s") % + (logfile, inst.strerror)) + return message + +def loglimit(opts): + """get the log limit according to option -l/--limit""" + limit = opts.get('limit') + if limit: + try: + limit = int(limit) + except ValueError: + raise util.Abort(_('limit must be a positive integer')) + if limit <= 0: + raise util.Abort(_('limit must be positive')) + else: + limit = None + return limit + +def revsingle(repo, revspec, default='.'): + if not revspec: + return repo[default] + + l = revrange(repo, [revspec]) + if len(l) < 1: + raise util.Abort(_('empty revision set')) + return repo[l[-1]] + +def revpair(repo, revs): + if not revs: + return repo.dirstate.parents()[0], None + + l = revrange(repo, revs) + + if len(l) == 0: + return repo.dirstate.parents()[0], None + + if len(l) == 1: + return repo.lookup(l[0]), None + + return repo.lookup(l[0]), repo.lookup(l[-1]) + +def revrange(repo, revs): + """Yield revision as strings from a list of revision specifications.""" + + def revfix(repo, val, defval): + if not val and val != 0 and defval is not None: + return defval + return repo.changelog.rev(repo.lookup(val)) + + seen, l = set(), [] + for spec in revs: + # attempt to parse old-style ranges first to deal with + # things like old-tag which contain query metacharacters + try: + if revrangesep in spec: + start, end = spec.split(revrangesep, 1) + start = revfix(repo, start, 0) + end = revfix(repo, end, len(repo) - 1) + step = start > end and -1 or 1 + for rev in xrange(start, end + step, step): + if rev in seen: + continue + seen.add(rev) + l.append(rev) + continue + elif spec and spec in repo: # single unquoted rev + rev = revfix(repo, spec, None) + if rev in seen: + continue + seen.add(rev) + l.append(rev) + continue + except error.RepoLookupError: + pass + + # fall through to new-style queries if old-style fails + m = revset.match(spec) + for r in m(repo, range(len(repo))): + if r not in seen: + l.append(r) + seen.update(l) + + return l + +def make_filename(repo, pat, node, + total=None, seqno=None, revwidth=None, pathname=None): + node_expander = { + 'H': lambda: hex(node), + 'R': lambda: str(repo.changelog.rev(node)), + 'h': lambda: short(node), + } + expander = { + '%': lambda: '%', + 'b': lambda: os.path.basename(repo.root), + } + + try: + if node: + expander.update(node_expander) + if node: + expander['r'] = (lambda: + str(repo.changelog.rev(node)).zfill(revwidth or 0)) + if total is not None: + expander['N'] = lambda: str(total) + if seqno is not None: + expander['n'] = lambda: str(seqno) + if total is not None and seqno is not None: + expander['n'] = lambda: str(seqno).zfill(len(str(total))) + if pathname is not None: + expander['s'] = lambda: os.path.basename(pathname) + expander['d'] = lambda: os.path.dirname(pathname) or '.' + expander['p'] = lambda: pathname + + newname = [] + patlen = len(pat) + i = 0 + while i < patlen: + c = pat[i] + if c == '%': + i += 1 + c = pat[i] + c = expander[c]() + newname.append(c) + i += 1 + return ''.join(newname) + except KeyError, inst: + raise util.Abort(_("invalid format spec '%%%s' in output filename") % + inst.args[0]) + +def make_file(repo, pat, node=None, + total=None, seqno=None, revwidth=None, mode='wb', pathname=None): + + writable = 'w' in mode or 'a' in mode + + if not pat or pat == '-': + return writable and sys.stdout or sys.stdin + if hasattr(pat, 'write') and writable: + return pat + if hasattr(pat, 'read') and 'r' in mode: + return pat + return open(make_filename(repo, pat, node, total, seqno, revwidth, + pathname), + mode) + +def expandpats(pats): + if not util.expandglobs: + return list(pats) + ret = [] + for p in pats: + kind, name = matchmod._patsplit(p, None) + if kind is None: + try: + globbed = glob.glob(name) + except re.error: + globbed = [name] + if globbed: + ret.extend(globbed) + continue + ret.append(p) + return ret + +def match(repo, pats=[], opts={}, globbed=False, default='relpath'): + if not globbed and default == 'relpath': + pats = expandpats(pats or []) + m = matchmod.match(repo.root, repo.getcwd(), pats, + opts.get('include'), opts.get('exclude'), default, + auditor=repo.auditor) + def badfn(f, msg): + repo.ui.warn("%s: %s\n" % (m.rel(f), msg)) + m.bad = badfn + return m + +def matchall(repo): + return matchmod.always(repo.root, repo.getcwd()) + +def matchfiles(repo, files): + return matchmod.exact(repo.root, repo.getcwd(), files) + +def addremove(repo, pats=[], opts={}, dry_run=None, similarity=None): + if dry_run is None: + dry_run = opts.get('dry_run') + if similarity is None: + similarity = float(opts.get('similarity') or 0) + # we'd use status here, except handling of symlinks and ignore is tricky + added, unknown, deleted, removed = [], [], [], [] + audit_path = util.path_auditor(repo.root) + m = match(repo, pats, opts) + for abs in repo.walk(m): + target = repo.wjoin(abs) + good = True + try: + audit_path(abs) + except: + good = False + rel = m.rel(abs) + exact = m.exact(abs) + if good and abs not in repo.dirstate: + unknown.append(abs) + if repo.ui.verbose or not exact: + repo.ui.status(_('adding %s\n') % ((pats and rel) or abs)) + elif repo.dirstate[abs] != 'r' and (not good or not os.path.lexists(target) + or (os.path.isdir(target) and not os.path.islink(target))): + deleted.append(abs) + if repo.ui.verbose or not exact: + repo.ui.status(_('removing %s\n') % ((pats and rel) or abs)) + # for finding renames + elif repo.dirstate[abs] == 'r': + removed.append(abs) + elif repo.dirstate[abs] == 'a': + added.append(abs) + copies = {} + if similarity > 0: + for old, new, score in similar.findrenames(repo, + added + unknown, removed + deleted, similarity): + if repo.ui.verbose or not m.exact(old) or not m.exact(new): + repo.ui.status(_('recording removal of %s as rename to %s ' + '(%d%% similar)\n') % + (m.rel(old), m.rel(new), score * 100)) + copies[new] = old + + if not dry_run: + wctx = repo[None] + wlock = repo.wlock() + try: + wctx.remove(deleted) + wctx.add(unknown) + for new, old in copies.iteritems(): + wctx.copy(old, new) + finally: + wlock.release() + +def updatedir(ui, repo, patches, similarity=0): + '''Update dirstate after patch application according to metadata''' + if not patches: + return + copies = [] + removes = set() + cfiles = patches.keys() + cwd = repo.getcwd() + if cwd: + cfiles = [util.pathto(repo.root, cwd, f) for f in patches.keys()] + for f in patches: + gp = patches[f] + if not gp: + continue + if gp.op == 'RENAME': + copies.append((gp.oldpath, gp.path)) + removes.add(gp.oldpath) + elif gp.op == 'COPY': + copies.append((gp.oldpath, gp.path)) + elif gp.op == 'DELETE': + removes.add(gp.path) + + wctx = repo[None] + for src, dst in copies: + dirstatecopy(ui, repo, wctx, src, dst, cwd=cwd) + if (not similarity) and removes: + wctx.remove(sorted(removes), True) + + for f in patches: + gp = patches[f] + if gp and gp.mode: + islink, isexec = gp.mode + dst = repo.wjoin(gp.path) + # patch won't create empty files + if gp.op == 'ADD' and not os.path.lexists(dst): + flags = (isexec and 'x' or '') + (islink and 'l' or '') + repo.wwrite(gp.path, '', flags) + util.set_flags(dst, islink, isexec) + addremove(repo, cfiles, similarity=similarity) + files = patches.keys() + files.extend([r for r in removes if r not in files]) + return sorted(files) + +def dirstatecopy(ui, repo, wctx, src, dst, dryrun=False, cwd=None): + """Update the dirstate to reflect the intent of copying src to dst. For + different reasons it might not end with dst being marked as copied from src. + """ + origsrc = repo.dirstate.copied(src) or src + if dst == origsrc: # copying back a copy? + if repo.dirstate[dst] not in 'mn' and not dryrun: + repo.dirstate.normallookup(dst) + else: + if repo.dirstate[origsrc] == 'a' and origsrc == src: + if not ui.quiet: + ui.warn(_("%s has not been committed yet, so no copy " + "data will be stored for %s.\n") + % (repo.pathto(origsrc, cwd), repo.pathto(dst, cwd))) + if repo.dirstate[dst] in '?r' and not dryrun: + wctx.add([dst]) + elif not dryrun: + wctx.copy(origsrc, dst) + +def copy(ui, repo, pats, opts, rename=False): + # called with the repo lock held + # + # hgsep => pathname that uses "/" to separate directories + # ossep => pathname that uses os.sep to separate directories + cwd = repo.getcwd() + targets = {} + after = opts.get("after") + dryrun = opts.get("dry_run") + wctx = repo[None] + + def walkpat(pat): + srcs = [] + badstates = after and '?' or '?r' + m = match(repo, [pat], opts, globbed=True) + for abs in repo.walk(m): + state = repo.dirstate[abs] + rel = m.rel(abs) + exact = m.exact(abs) + if state in badstates: + if exact and state == '?': + ui.warn(_('%s: not copying - file is not managed\n') % rel) + if exact and state == 'r': + ui.warn(_('%s: not copying - file has been marked for' + ' remove\n') % rel) + continue + # abs: hgsep + # rel: ossep + srcs.append((abs, rel, exact)) + return srcs + + # abssrc: hgsep + # relsrc: ossep + # otarget: ossep + def copyfile(abssrc, relsrc, otarget, exact): + abstarget = util.canonpath(repo.root, cwd, otarget) + reltarget = repo.pathto(abstarget, cwd) + target = repo.wjoin(abstarget) + src = repo.wjoin(abssrc) + state = repo.dirstate[abstarget] + + # check for collisions + prevsrc = targets.get(abstarget) + if prevsrc is not None: + ui.warn(_('%s: not overwriting - %s collides with %s\n') % + (reltarget, repo.pathto(abssrc, cwd), + repo.pathto(prevsrc, cwd))) + return + + # check for overwrites + exists = os.path.lexists(target) + if not after and exists or after and state in 'mn': + if not opts['force']: + ui.warn(_('%s: not overwriting - file exists\n') % + reltarget) + return + + if after: + if not exists: + if rename: + ui.warn(_('%s: not recording move - %s does not exist\n') % + (relsrc, reltarget)) + else: + ui.warn(_('%s: not recording copy - %s does not exist\n') % + (relsrc, reltarget)) + return + elif not dryrun: + try: + if exists: + os.unlink(target) + targetdir = os.path.dirname(target) or '.' + if not os.path.isdir(targetdir): + os.makedirs(targetdir) + util.copyfile(src, target) + except IOError, inst: + if inst.errno == errno.ENOENT: + ui.warn(_('%s: deleted in working copy\n') % relsrc) + else: + ui.warn(_('%s: cannot copy - %s\n') % + (relsrc, inst.strerror)) + return True # report a failure + + if ui.verbose or not exact: + if rename: + ui.status(_('moving %s to %s\n') % (relsrc, reltarget)) + else: + ui.status(_('copying %s to %s\n') % (relsrc, reltarget)) + + targets[abstarget] = abssrc + + # fix up dirstate + dirstatecopy(ui, repo, wctx, abssrc, abstarget, dryrun=dryrun, cwd=cwd) + if rename and not dryrun: + wctx.remove([abssrc], not after) + + # pat: ossep + # dest ossep + # srcs: list of (hgsep, hgsep, ossep, bool) + # return: function that takes hgsep and returns ossep + def targetpathfn(pat, dest, srcs): + if os.path.isdir(pat): + abspfx = util.canonpath(repo.root, cwd, pat) + abspfx = util.localpath(abspfx) + if destdirexists: + striplen = len(os.path.split(abspfx)[0]) + else: + striplen = len(abspfx) + if striplen: + striplen += len(os.sep) + res = lambda p: os.path.join(dest, util.localpath(p)[striplen:]) + elif destdirexists: + res = lambda p: os.path.join(dest, + os.path.basename(util.localpath(p))) + else: + res = lambda p: dest + return res + + # pat: ossep + # dest ossep + # srcs: list of (hgsep, hgsep, ossep, bool) + # return: function that takes hgsep and returns ossep + def targetpathafterfn(pat, dest, srcs): + if matchmod.patkind(pat): + # a mercurial pattern + res = lambda p: os.path.join(dest, + os.path.basename(util.localpath(p))) + else: + abspfx = util.canonpath(repo.root, cwd, pat) + if len(abspfx) < len(srcs[0][0]): + # A directory. Either the target path contains the last + # component of the source path or it does not. + def evalpath(striplen): + score = 0 + for s in srcs: + t = os.path.join(dest, util.localpath(s[0])[striplen:]) + if os.path.lexists(t): + score += 1 + return score + + abspfx = util.localpath(abspfx) + striplen = len(abspfx) + if striplen: + striplen += len(os.sep) + if os.path.isdir(os.path.join(dest, os.path.split(abspfx)[1])): + score = evalpath(striplen) + striplen1 = len(os.path.split(abspfx)[0]) + if striplen1: + striplen1 += len(os.sep) + if evalpath(striplen1) > score: + striplen = striplen1 + res = lambda p: os.path.join(dest, + util.localpath(p)[striplen:]) + else: + # a file + if destdirexists: + res = lambda p: os.path.join(dest, + os.path.basename(util.localpath(p))) + else: + res = lambda p: dest + return res + + + pats = expandpats(pats) + if not pats: + raise util.Abort(_('no source or destination specified')) + if len(pats) == 1: + raise util.Abort(_('no destination specified')) + dest = pats.pop() + destdirexists = os.path.isdir(dest) and not os.path.islink(dest) + if not destdirexists: + if len(pats) > 1 or matchmod.patkind(pats[0]): + raise util.Abort(_('with multiple sources, destination must be an ' + 'existing directory')) + if util.endswithsep(dest): + raise util.Abort(_('destination %s is not a directory') % dest) + + tfn = targetpathfn + if after: + tfn = targetpathafterfn + copylist = [] + for pat in pats: + srcs = walkpat(pat) + if not srcs: + continue + copylist.append((tfn(pat, dest, srcs), srcs)) + if not copylist: + raise util.Abort(_('no files to copy')) + + errors = 0 + for targetpath, srcs in copylist: + for abssrc, relsrc, exact in srcs: + if copyfile(abssrc, relsrc, targetpath(abssrc), exact): + errors += 1 + + if errors: + ui.warn(_('(consider using --after)\n')) + + return errors != 0 + +def service(opts, parentfn=None, initfn=None, runfn=None, logfile=None, + runargs=None, appendpid=False): + '''Run a command as a service.''' + + if opts['daemon'] and not opts['daemon_pipefds']: + # Signal child process startup with file removal + lockfd, lockpath = tempfile.mkstemp(prefix='hg-service-') + os.close(lockfd) + try: + if not runargs: + runargs = util.hgcmd() + sys.argv[1:] + runargs.append('--daemon-pipefds=%s' % lockpath) + # Don't pass --cwd to the child process, because we've already + # changed directory. + for i in xrange(1, len(runargs)): + if runargs[i].startswith('--cwd='): + del runargs[i] + break + elif runargs[i].startswith('--cwd'): + del runargs[i:i + 2] + break + def condfn(): + return not os.path.exists(lockpath) + pid = util.rundetached(runargs, condfn) + if pid < 0: + raise util.Abort(_('child process failed to start')) + finally: + try: + os.unlink(lockpath) + except OSError, e: + if e.errno != errno.ENOENT: + raise + if parentfn: + return parentfn(pid) + else: + return + + if initfn: + initfn() + + if opts['pid_file']: + mode = appendpid and 'a' or 'w' + fp = open(opts['pid_file'], mode) + fp.write(str(os.getpid()) + '\n') + fp.close() + + if opts['daemon_pipefds']: + lockpath = opts['daemon_pipefds'] + try: + os.setsid() + except AttributeError: + pass + os.unlink(lockpath) + util.hidewindow() + sys.stdout.flush() + sys.stderr.flush() + + nullfd = os.open(util.nulldev, os.O_RDWR) + logfilefd = nullfd + if logfile: + logfilefd = os.open(logfile, os.O_RDWR | os.O_CREAT | os.O_APPEND) + os.dup2(nullfd, 0) + os.dup2(logfilefd, 1) + os.dup2(logfilefd, 2) + if nullfd not in (0, 1, 2): + os.close(nullfd) + if logfile and logfilefd not in (0, 1, 2): + os.close(logfilefd) + + if runfn: + return runfn() + +def export(repo, revs, template='hg-%h.patch', fp=None, switch_parent=False, + opts=None): + '''export changesets as hg patches.''' + + total = len(revs) + revwidth = max([len(str(rev)) for rev in revs]) + + def single(rev, seqno, fp): + ctx = repo[rev] + node = ctx.node() + parents = [p.node() for p in ctx.parents() if p] + branch = ctx.branch() + if switch_parent: + parents.reverse() + prev = (parents and parents[0]) or nullid + + if not fp: + fp = make_file(repo, template, node, total=total, seqno=seqno, + revwidth=revwidth, mode='ab') + if fp != sys.stdout and hasattr(fp, 'name'): + repo.ui.note("%s\n" % fp.name) + + fp.write("# HG changeset patch\n") + fp.write("# User %s\n" % ctx.user()) + fp.write("# Date %d %d\n" % ctx.date()) + if branch and branch != 'default': + fp.write("# Branch %s\n" % branch) + fp.write("# Node ID %s\n" % hex(node)) + fp.write("# Parent %s\n" % hex(prev)) + if len(parents) > 1: + fp.write("# Parent %s\n" % hex(parents[1])) + fp.write(ctx.description().rstrip()) + fp.write("\n\n") + + for chunk in patch.diff(repo, prev, node, opts=opts): + fp.write(chunk) + + for seqno, rev in enumerate(revs): + single(rev, seqno + 1, fp) + +def diffordiffstat(ui, repo, diffopts, node1, node2, match, + changes=None, stat=False, fp=None, prefix='', + listsubrepos=False): + '''show diff or diffstat.''' + if fp is None: + write = ui.write + else: + def write(s, **kw): + fp.write(s) + + if stat: + diffopts = diffopts.copy(context=0) + width = 80 + if not ui.plain(): + width = ui.termwidth() + chunks = patch.diff(repo, node1, node2, match, changes, diffopts, + prefix=prefix) + for chunk, label in patch.diffstatui(util.iterlines(chunks), + width=width, + git=diffopts.git): + write(chunk, label=label) + else: + for chunk, label in patch.diffui(repo, node1, node2, match, + changes, diffopts, prefix=prefix): + write(chunk, label=label) + + if listsubrepos: + ctx1 = repo[node1] + ctx2 = repo[node2] + for subpath, sub in subrepo.itersubrepos(ctx1, ctx2): + if node2 is not None: + node2 = ctx2.substate[subpath][1] + submatch = matchmod.narrowmatcher(subpath, match) + sub.diff(diffopts, node2, submatch, changes=changes, + stat=stat, fp=fp, prefix=prefix) + +class changeset_printer(object): + '''show changeset information when templating not requested.''' + + def __init__(self, ui, repo, patch, diffopts, buffered): + self.ui = ui + self.repo = repo + self.buffered = buffered + self.patch = patch + self.diffopts = diffopts + self.header = {} + self.hunk = {} + self.lastheader = None + self.footer = None + + def flush(self, rev): + if rev in self.header: + h = self.header[rev] + if h != self.lastheader: + self.lastheader = h + self.ui.write(h) + del self.header[rev] + if rev in self.hunk: + self.ui.write(self.hunk[rev]) + del self.hunk[rev] + return 1 + return 0 + + def close(self): + if self.footer: + self.ui.write(self.footer) + + def show(self, ctx, copies=None, matchfn=None, **props): + if self.buffered: + self.ui.pushbuffer() + self._show(ctx, copies, matchfn, props) + self.hunk[ctx.rev()] = self.ui.popbuffer(labeled=True) + else: + self._show(ctx, copies, matchfn, props) + + def _show(self, ctx, copies, matchfn, props): + '''show a single changeset or file revision''' + changenode = ctx.node() + rev = ctx.rev() + + if self.ui.quiet: + self.ui.write("%d:%s\n" % (rev, short(changenode)), + label='log.node') + return + + log = self.repo.changelog + date = util.datestr(ctx.date()) + + hexfunc = self.ui.debugflag and hex or short + + parents = [(p, hexfunc(log.node(p))) + for p in self._meaningful_parentrevs(log, rev)] + + self.ui.write(_("changeset: %d:%s\n") % (rev, hexfunc(changenode)), + label='log.changeset') + + branch = ctx.branch() + # don't show the default branch name + if branch != 'default': + branch = encoding.tolocal(branch) + self.ui.write(_("branch: %s\n") % branch, + label='log.branch') + for tag in self.repo.nodetags(changenode): + self.ui.write(_("tag: %s\n") % tag, + label='log.tag') + for parent in parents: + self.ui.write(_("parent: %d:%s\n") % parent, + label='log.parent') + + if self.ui.debugflag: + mnode = ctx.manifestnode() + self.ui.write(_("manifest: %d:%s\n") % + (self.repo.manifest.rev(mnode), hex(mnode)), + label='ui.debug log.manifest') + self.ui.write(_("user: %s\n") % ctx.user(), + label='log.user') + self.ui.write(_("date: %s\n") % date, + label='log.date') + + if self.ui.debugflag: + files = self.repo.status(log.parents(changenode)[0], changenode)[:3] + for key, value in zip([_("files:"), _("files+:"), _("files-:")], + files): + if value: + self.ui.write("%-12s %s\n" % (key, " ".join(value)), + label='ui.debug log.files') + elif ctx.files() and self.ui.verbose: + self.ui.write(_("files: %s\n") % " ".join(ctx.files()), + label='ui.note log.files') + if copies and self.ui.verbose: + copies = ['%s (%s)' % c for c in copies] + self.ui.write(_("copies: %s\n") % ' '.join(copies), + label='ui.note log.copies') + + extra = ctx.extra() + if extra and self.ui.debugflag: + for key, value in sorted(extra.items()): + self.ui.write(_("extra: %s=%s\n") + % (key, value.encode('string_escape')), + label='ui.debug log.extra') + + description = ctx.description().strip() + if description: + if self.ui.verbose: + self.ui.write(_("description:\n"), + label='ui.note log.description') + self.ui.write(description, + label='ui.note log.description') + self.ui.write("\n\n") + else: + self.ui.write(_("summary: %s\n") % + description.splitlines()[0], + label='log.summary') + self.ui.write("\n") + + self.showpatch(changenode, matchfn) + + def showpatch(self, node, matchfn): + if not matchfn: + matchfn = self.patch + if matchfn: + stat = self.diffopts.get('stat') + diff = self.diffopts.get('patch') + diffopts = patch.diffopts(self.ui, self.diffopts) + prev = self.repo.changelog.parents(node)[0] + if stat: + diffordiffstat(self.ui, self.repo, diffopts, prev, node, + match=matchfn, stat=True) + if diff: + if stat: + self.ui.write("\n") + diffordiffstat(self.ui, self.repo, diffopts, prev, node, + match=matchfn, stat=False) + self.ui.write("\n") + + def _meaningful_parentrevs(self, log, rev): + """Return list of meaningful (or all if debug) parentrevs for rev. + + For merges (two non-nullrev revisions) both parents are meaningful. + Otherwise the first parent revision is considered meaningful if it + is not the preceding revision. + """ + parents = log.parentrevs(rev) + if not self.ui.debugflag and parents[1] == nullrev: + if parents[0] >= rev - 1: + parents = [] + else: + parents = [parents[0]] + return parents + + +class changeset_templater(changeset_printer): + '''format changeset information.''' + + def __init__(self, ui, repo, patch, diffopts, mapfile, buffered): + changeset_printer.__init__(self, ui, repo, patch, diffopts, buffered) + formatnode = ui.debugflag and (lambda x: x) or (lambda x: x[:12]) + defaulttempl = { + 'parent': '{rev}:{node|formatnode} ', + 'manifest': '{rev}:{node|formatnode}', + 'file_copy': '{name} ({source})', + 'extra': '{key}={value|stringescape}' + } + # filecopy is preserved for compatibility reasons + defaulttempl['filecopy'] = defaulttempl['file_copy'] + self.t = templater.templater(mapfile, {'formatnode': formatnode}, + cache=defaulttempl) + self.cache = {} + + def use_template(self, t): + '''set template string to use''' + self.t.cache['changeset'] = t + + def _meaningful_parentrevs(self, ctx): + """Return list of meaningful (or all if debug) parentrevs for rev. + """ + parents = ctx.parents() + if len(parents) > 1: + return parents + if self.ui.debugflag: + return [parents[0], self.repo['null']] + if parents[0].rev() >= ctx.rev() - 1: + return [] + return parents + + def _show(self, ctx, copies, matchfn, props): + '''show a single changeset or file revision''' + + showlist = templatekw.showlist + + # showparents() behaviour depends on ui trace level which + # causes unexpected behaviours at templating level and makes + # it harder to extract it in a standalone function. Its + # behaviour cannot be changed so leave it here for now. + def showparents(**args): + ctx = args['ctx'] + parents = [[('rev', p.rev()), ('node', p.hex())] + for p in self._meaningful_parentrevs(ctx)] + return showlist('parent', parents, **args) + + props = props.copy() + props.update(templatekw.keywords) + props['parents'] = showparents + props['templ'] = self.t + props['ctx'] = ctx + props['repo'] = self.repo + props['revcache'] = {'copies': copies} + props['cache'] = self.cache + + # find correct templates for current mode + + tmplmodes = [ + (True, None), + (self.ui.verbose, 'verbose'), + (self.ui.quiet, 'quiet'), + (self.ui.debugflag, 'debug'), + ] + + types = {'header': '', 'footer':'', 'changeset': 'changeset'} + for mode, postfix in tmplmodes: + for type in types: + cur = postfix and ('%s_%s' % (type, postfix)) or type + if mode and cur in self.t: + types[type] = cur + + try: + + # write header + if types['header']: + h = templater.stringify(self.t(types['header'], **props)) + if self.buffered: + self.header[ctx.rev()] = h + else: + if self.lastheader != h: + self.lastheader = h + self.ui.write(h) + + # write changeset metadata, then patch if requested + key = types['changeset'] + self.ui.write(templater.stringify(self.t(key, **props))) + self.showpatch(ctx.node(), matchfn) + + if types['footer']: + if not self.footer: + self.footer = templater.stringify(self.t(types['footer'], + **props)) + + except KeyError, inst: + msg = _("%s: no key named '%s'") + raise util.Abort(msg % (self.t.mapfile, inst.args[0])) + except SyntaxError, inst: + raise util.Abort('%s: %s' % (self.t.mapfile, inst.args[0])) + +def show_changeset(ui, repo, opts, buffered=False): + """show one changeset using template or regular display. + + Display format will be the first non-empty hit of: + 1. option 'template' + 2. option 'style' + 3. [ui] setting 'logtemplate' + 4. [ui] setting 'style' + If all of these values are either the unset or the empty string, + regular display via changeset_printer() is done. + """ + # options + patch = False + if opts.get('patch') or opts.get('stat'): + patch = matchall(repo) + + tmpl = opts.get('template') + style = None + if tmpl: + tmpl = templater.parsestring(tmpl, quoted=False) + else: + style = opts.get('style') + + # ui settings + if not (tmpl or style): + tmpl = ui.config('ui', 'logtemplate') + if tmpl: + tmpl = templater.parsestring(tmpl) + else: + style = util.expandpath(ui.config('ui', 'style', '')) + + if not (tmpl or style): + return changeset_printer(ui, repo, patch, opts, buffered) + + mapfile = None + if style and not tmpl: + mapfile = style + if not os.path.split(mapfile)[0]: + mapname = (templater.templatepath('map-cmdline.' + mapfile) + or templater.templatepath(mapfile)) + if mapname: + mapfile = mapname + + try: + t = changeset_templater(ui, repo, patch, opts, mapfile, buffered) + except SyntaxError, inst: + raise util.Abort(inst.args[0]) + if tmpl: + t.use_template(tmpl) + return t + +def finddate(ui, repo, date): + """Find the tipmost changeset that matches the given date spec""" + + df = util.matchdate(date) + m = matchall(repo) + results = {} + + def prep(ctx, fns): + d = ctx.date() + if df(d[0]): + results[ctx.rev()] = d + + for ctx in walkchangerevs(repo, m, {'rev': None}, prep): + rev = ctx.rev() + if rev in results: + ui.status(_("Found revision %s from %s\n") % + (rev, util.datestr(results[rev]))) + return str(rev) + + raise util.Abort(_("revision matching date not found")) + +def walkchangerevs(repo, match, opts, prepare): + '''Iterate over files and the revs in which they changed. + + Callers most commonly need to iterate backwards over the history + in which they are interested. Doing so has awful (quadratic-looking) + performance, so we use iterators in a "windowed" way. + + We walk a window of revisions in the desired order. Within the + window, we first walk forwards to gather data, then in the desired + order (usually backwards) to display it. + + This function returns an iterator yielding contexts. Before + yielding each context, the iterator will first call the prepare + function on each context in the window in forward order.''' + + def increasing_windows(start, end, windowsize=8, sizelimit=512): + if start < end: + while start < end: + yield start, min(windowsize, end - start) + start += windowsize + if windowsize < sizelimit: + windowsize *= 2 + else: + while start > end: + yield start, min(windowsize, start - end - 1) + start -= windowsize + if windowsize < sizelimit: + windowsize *= 2 + + follow = opts.get('follow') or opts.get('follow_first') + + if not len(repo): + return [] + + if follow: + defrange = '%s:0' % repo['.'].rev() + else: + defrange = '-1:0' + revs = revrange(repo, opts['rev'] or [defrange]) + if not revs: + return [] + wanted = set() + slowpath = match.anypats() or (match.files() and opts.get('removed')) + fncache = {} + change = util.cachefunc(repo.changectx) + + # First step is to fill wanted, the set of revisions that we want to yield. + # When it does not induce extra cost, we also fill fncache for revisions in + # wanted: a cache of filenames that were changed (ctx.files()) and that + # match the file filtering conditions. + + if not slowpath and not match.files(): + # No files, no patterns. Display all revs. + wanted = set(revs) + copies = [] + + if not slowpath: + # We only have to read through the filelog to find wanted revisions + + minrev, maxrev = min(revs), max(revs) + def filerevgen(filelog, last): + """ + Only files, no patterns. Check the history of each file. + + Examines filelog entries within minrev, maxrev linkrev range + Returns an iterator yielding (linkrev, parentlinkrevs, copied) + tuples in backwards order + """ + cl_count = len(repo) + revs = [] + for j in xrange(0, last + 1): + linkrev = filelog.linkrev(j) + if linkrev < minrev: + continue + # only yield rev for which we have the changelog, it can + # happen while doing "hg log" during a pull or commit + if linkrev >= cl_count: + break + + parentlinkrevs = [] + for p in filelog.parentrevs(j): + if p != nullrev: + parentlinkrevs.append(filelog.linkrev(p)) + n = filelog.node(j) + revs.append((linkrev, parentlinkrevs, + follow and filelog.renamed(n))) + + return reversed(revs) + def iterfiles(): + for filename in match.files(): + yield filename, None + for filename_node in copies: + yield filename_node + for file_, node in iterfiles(): + filelog = repo.file(file_) + if not len(filelog): + if node is None: + # A zero count may be a directory or deleted file, so + # try to find matching entries on the slow path. + if follow: + raise util.Abort( + _('cannot follow nonexistent file: "%s"') % file_) + slowpath = True + break + else: + continue + + if node is None: + last = len(filelog) - 1 + else: + last = filelog.rev(node) + + + # keep track of all ancestors of the file + ancestors = set([filelog.linkrev(last)]) + + # iterate from latest to oldest revision + for rev, flparentlinkrevs, copied in filerevgen(filelog, last): + if not follow: + if rev > maxrev: + continue + else: + # Note that last might not be the first interesting + # rev to us: + # if the file has been changed after maxrev, we'll + # have linkrev(last) > maxrev, and we still need + # to explore the file graph + if rev not in ancestors: + continue + # XXX insert 1327 fix here + if flparentlinkrevs: + ancestors.update(flparentlinkrevs) + + fncache.setdefault(rev, []).append(file_) + wanted.add(rev) + if copied: + copies.append(copied) + if slowpath: + # We have to read the changelog to match filenames against + # changed files + + if follow: + raise util.Abort(_('can only follow copies/renames for explicit ' + 'filenames')) + + # The slow path checks files modified in every changeset. + for i in sorted(revs): + ctx = change(i) + matches = filter(match, ctx.files()) + if matches: + fncache[i] = matches + wanted.add(i) + + class followfilter(object): + def __init__(self, onlyfirst=False): + self.startrev = nullrev + self.roots = set() + self.onlyfirst = onlyfirst + + def match(self, rev): + def realparents(rev): + if self.onlyfirst: + return repo.changelog.parentrevs(rev)[0:1] + else: + return filter(lambda x: x != nullrev, + repo.changelog.parentrevs(rev)) + + if self.startrev == nullrev: + self.startrev = rev + return True + + if rev > self.startrev: + # forward: all descendants + if not self.roots: + self.roots.add(self.startrev) + for parent in realparents(rev): + if parent in self.roots: + self.roots.add(rev) + return True + else: + # backwards: all parents + if not self.roots: + self.roots.update(realparents(self.startrev)) + if rev in self.roots: + self.roots.remove(rev) + self.roots.update(realparents(rev)) + return True + + return False + + # it might be worthwhile to do this in the iterator if the rev range + # is descending and the prune args are all within that range + for rev in opts.get('prune', ()): + rev = repo.changelog.rev(repo.lookup(rev)) + ff = followfilter() + stop = min(revs[0], revs[-1]) + for x in xrange(rev, stop - 1, -1): + if ff.match(x): + wanted.discard(x) + + # Now that wanted is correctly initialized, we can iterate over the + # revision range, yielding only revisions in wanted. + def iterate(): + if follow and not match.files(): + ff = followfilter(onlyfirst=opts.get('follow_first')) + def want(rev): + return ff.match(rev) and rev in wanted + else: + def want(rev): + return rev in wanted + + for i, window in increasing_windows(0, len(revs)): + nrevs = [rev for rev in revs[i:i + window] if want(rev)] + for rev in sorted(nrevs): + fns = fncache.get(rev) + ctx = change(rev) + if not fns: + def fns_generator(): + for f in ctx.files(): + if match(f): + yield f + fns = fns_generator() + prepare(ctx, fns) + for rev in nrevs: + yield change(rev) + return iterate() + +def add(ui, repo, match, dryrun, listsubrepos, prefix): + join = lambda f: os.path.join(prefix, f) + bad = [] + oldbad = match.bad + match.bad = lambda x, y: bad.append(x) or oldbad(x, y) + names = [] + wctx = repo[None] + for f in repo.walk(match): + exact = match.exact(f) + if exact or f not in repo.dirstate: + names.append(f) + if ui.verbose or not exact: + ui.status(_('adding %s\n') % match.rel(join(f))) + + if listsubrepos: + for subpath in wctx.substate: + sub = wctx.sub(subpath) + try: + submatch = matchmod.narrowmatcher(subpath, match) + bad.extend(sub.add(ui, submatch, dryrun, prefix)) + except error.LookupError: + ui.status(_("skipping missing subrepository: %s\n") + % join(subpath)) + + if not dryrun: + rejected = wctx.add(names, prefix) + bad.extend(f for f in rejected if f in match.files()) + return bad + +def commit(ui, repo, commitfunc, pats, opts): + '''commit the specified files or all outstanding changes''' + date = opts.get('date') + if date: + opts['date'] = util.parsedate(date) + message = logmessage(opts) + + # extract addremove carefully -- this function can be called from a command + # that doesn't support addremove + if opts.get('addremove'): + addremove(repo, pats, opts) + + return commitfunc(ui, repo, message, match(repo, pats, opts), opts) + +def commiteditor(repo, ctx, subs): + if ctx.description(): + return ctx.description() + return commitforceeditor(repo, ctx, subs) + +def commitforceeditor(repo, ctx, subs): + edittext = [] + modified, added, removed = ctx.modified(), ctx.added(), ctx.removed() + if ctx.description(): + edittext.append(ctx.description()) + edittext.append("") + edittext.append("") # Empty line between message and comments. + edittext.append(_("HG: Enter commit message." + " Lines beginning with 'HG:' are removed.")) + edittext.append(_("HG: Leave message empty to abort commit.")) + edittext.append("HG: --") + edittext.append(_("HG: user: %s") % ctx.user()) + if ctx.p2(): + edittext.append(_("HG: branch merge")) + if ctx.branch(): + edittext.append(_("HG: branch '%s'") + % encoding.tolocal(ctx.branch())) + edittext.extend([_("HG: subrepo %s") % s for s in subs]) + edittext.extend([_("HG: added %s") % f for f in added]) + edittext.extend([_("HG: changed %s") % f for f in modified]) + edittext.extend([_("HG: removed %s") % f for f in removed]) + if not added and not modified and not removed: + edittext.append(_("HG: no files changed")) + edittext.append("") + # run editor in the repository root + olddir = os.getcwd() + os.chdir(repo.root) + text = repo.ui.edit("\n".join(edittext), ctx.user()) + text = re.sub("(?m)^HG:.*(\n|$)", "", text) + os.chdir(olddir) + + if not text.strip(): + raise util.Abort(_("empty commit message")) + + return text diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/cmdutil.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/cmdutil.pyo Binary files differnew file mode 100644 index 0000000..b4d8519 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/cmdutil.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/commands.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/commands.py new file mode 100644 index 0000000..ce27a3e --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/commands.py @@ -0,0 +1,4530 @@ +# commands.py - command processing for mercurial +# +# Copyright 2005-2007 Matt Mackall <mpm@selenic.com> +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +from node import hex, nullid, nullrev, short +from lock import release +from i18n import _, gettext +import os, re, sys, difflib, time, tempfile +import hg, util, revlog, extensions, copies, error +import patch, help, mdiff, url, encoding, templatekw, discovery +import archival, changegroup, cmdutil, sshserver, hbisect, hgweb, hgweb.server +import merge as mergemod +import minirst, revset +import dagparser + +# Commands start here, listed alphabetically + +def add(ui, repo, *pats, **opts): + """add the specified files on the next commit + + Schedule files to be version controlled and added to the + repository. + + The files will be added to the repository at the next commit. To + undo an add before that, see :hg:`forget`. + + If no names are given, add all files to the repository. + + .. container:: verbose + + An example showing how new (unknown) files are added + automatically by :hg:`add`:: + + $ ls + foo.c + $ hg status + ? foo.c + $ hg add + adding foo.c + $ hg status + A foo.c + + Returns 0 if all files are successfully added. + """ + + m = cmdutil.match(repo, pats, opts) + rejected = cmdutil.add(ui, repo, m, opts.get('dry_run'), + opts.get('subrepos'), prefix="") + return rejected and 1 or 0 + +def addremove(ui, repo, *pats, **opts): + """add all new files, delete all missing files + + Add all new files and remove all missing files from the + repository. + + New files are ignored if they match any of the patterns in + .hgignore. As with add, these changes take effect at the next + commit. + + Use the -s/--similarity option to detect renamed files. With a + parameter greater than 0, this compares every removed file with + every added file and records those similar enough as renames. This + option takes a percentage between 0 (disabled) and 100 (files must + be identical) as its parameter. Detecting renamed files this way + can be expensive. After using this option, :hg:`status -C` can be + used to check which files were identified as moved or renamed. + + Returns 0 if all files are successfully added. + """ + try: + sim = float(opts.get('similarity') or 100) + except ValueError: + raise util.Abort(_('similarity must be a number')) + if sim < 0 or sim > 100: + raise util.Abort(_('similarity must be between 0 and 100')) + return cmdutil.addremove(repo, pats, opts, similarity=sim / 100.0) + +def annotate(ui, repo, *pats, **opts): + """show changeset information by line for each file + + List changes in files, showing the revision id responsible for + each line + + This command is useful for discovering when a change was made and + by whom. + + Without the -a/--text option, annotate will avoid processing files + it detects as binary. With -a, annotate will annotate the file + anyway, although the results will probably be neither useful + nor desirable. + + Returns 0 on success. + """ + if opts.get('follow'): + # --follow is deprecated and now just an alias for -f/--file + # to mimic the behavior of Mercurial before version 1.5 + opts['file'] = 1 + + datefunc = ui.quiet and util.shortdate or util.datestr + getdate = util.cachefunc(lambda x: datefunc(x[0].date())) + + if not pats: + raise util.Abort(_('at least one filename or pattern is required')) + + opmap = [('user', lambda x: ui.shortuser(x[0].user())), + ('number', lambda x: str(x[0].rev())), + ('changeset', lambda x: short(x[0].node())), + ('date', getdate), + ('file', lambda x: x[0].path()), + ] + + if (not opts.get('user') and not opts.get('changeset') + and not opts.get('date') and not opts.get('file')): + opts['number'] = 1 + + linenumber = opts.get('line_number') is not None + if linenumber and (not opts.get('changeset')) and (not opts.get('number')): + raise util.Abort(_('at least one of -n/-c is required for -l')) + + funcmap = [func for op, func in opmap if opts.get(op)] + if linenumber: + lastfunc = funcmap[-1] + funcmap[-1] = lambda x: "%s:%s" % (lastfunc(x), x[1]) + + ctx = repo[opts.get('rev')] + m = cmdutil.match(repo, pats, opts) + follow = not opts.get('no_follow') + for abs in ctx.walk(m): + fctx = ctx[abs] + if not opts.get('text') and util.binary(fctx.data()): + ui.write(_("%s: binary file\n") % ((pats and m.rel(abs)) or abs)) + continue + + lines = fctx.annotate(follow=follow, linenumber=linenumber) + pieces = [] + + for f in funcmap: + l = [f(n) for n, dummy in lines] + if l: + sized = [(x, encoding.colwidth(x)) for x in l] + ml = max([w for x, w in sized]) + pieces.append(["%s%s" % (' ' * (ml - w), x) for x, w in sized]) + + if pieces: + for p, l in zip(zip(*pieces), lines): + ui.write("%s: %s" % (" ".join(p), l[1])) + +def archive(ui, repo, dest, **opts): + '''create an unversioned archive of a repository revision + + By default, the revision used is the parent of the working + directory; use -r/--rev to specify a different revision. + + The archive type is automatically detected based on file + extension (or override using -t/--type). + + Valid types are: + + :``files``: a directory full of files (default) + :``tar``: tar archive, uncompressed + :``tbz2``: tar archive, compressed using bzip2 + :``tgz``: tar archive, compressed using gzip + :``uzip``: zip archive, uncompressed + :``zip``: zip archive, compressed using deflate + + The exact name of the destination archive or directory is given + using a format string; see :hg:`help export` for details. + + Each member added to an archive file has a directory prefix + prepended. Use -p/--prefix to specify a format string for the + prefix. The default is the basename of the archive, with suffixes + removed. + + Returns 0 on success. + ''' + + ctx = repo[opts.get('rev')] + if not ctx: + raise util.Abort(_('no working directory: please specify a revision')) + node = ctx.node() + dest = cmdutil.make_filename(repo, dest, node) + if os.path.realpath(dest) == repo.root: + raise util.Abort(_('repository root cannot be destination')) + + kind = opts.get('type') or archival.guesskind(dest) or 'files' + prefix = opts.get('prefix') + + if dest == '-': + if kind == 'files': + raise util.Abort(_('cannot archive plain files to stdout')) + dest = sys.stdout + if not prefix: + prefix = os.path.basename(repo.root) + '-%h' + + prefix = cmdutil.make_filename(repo, prefix, node) + matchfn = cmdutil.match(repo, [], opts) + archival.archive(repo, dest, node, kind, not opts.get('no_decode'), + matchfn, prefix, subrepos=opts.get('subrepos')) + +def backout(ui, repo, node=None, rev=None, **opts): + '''reverse effect of earlier changeset + + The backout command merges the reverse effect of the reverted + changeset into the working directory. + + With the --merge option, it first commits the reverted changes + as a new changeset. This new changeset is a child of the reverted + changeset. + The --merge option remembers the parent of the working directory + before starting the backout, then merges the new head with that + changeset afterwards. + This will result in an explicit merge in the history. + + If you backout a changeset other than the original parent of the + working directory, the result of this merge is not committed, + as with a normal merge. Otherwise, no merge is needed and the + commit is automatic. + + Note that the default behavior (without --merge) has changed in + version 1.7. To restore the previous default behavior, use + :hg:`backout --merge` and then :hg:`update --clean .` to get rid of + the ongoing merge. + + See :hg:`help dates` for a list of formats valid for -d/--date. + + Returns 0 on success. + ''' + if rev and node: + raise util.Abort(_("please specify just one revision")) + + if not rev: + rev = node + + if not rev: + raise util.Abort(_("please specify a revision to backout")) + + date = opts.get('date') + if date: + opts['date'] = util.parsedate(date) + + cmdutil.bail_if_changed(repo) + node = repo.lookup(rev) + + op1, op2 = repo.dirstate.parents() + a = repo.changelog.ancestor(op1, node) + if a != node: + raise util.Abort(_('cannot backout change on a different branch')) + + p1, p2 = repo.changelog.parents(node) + if p1 == nullid: + raise util.Abort(_('cannot backout a change with no parents')) + if p2 != nullid: + if not opts.get('parent'): + raise util.Abort(_('cannot backout a merge changeset without ' + '--parent')) + p = repo.lookup(opts['parent']) + if p not in (p1, p2): + raise util.Abort(_('%s is not a parent of %s') % + (short(p), short(node))) + parent = p + else: + if opts.get('parent'): + raise util.Abort(_('cannot use --parent on non-merge changeset')) + parent = p1 + + # the backout should appear on the same branch + branch = repo.dirstate.branch() + hg.clean(repo, node, show_stats=False) + repo.dirstate.setbranch(branch) + revert_opts = opts.copy() + revert_opts['date'] = None + revert_opts['all'] = True + revert_opts['rev'] = hex(parent) + revert_opts['no_backup'] = None + revert(ui, repo, **revert_opts) + if not opts.get('merge') and op1 != node: + try: + ui.setconfig('ui', 'forcemerge', opts.get('tool', '')) + return hg.update(repo, op1) + finally: + ui.setconfig('ui', 'forcemerge', '') + + commit_opts = opts.copy() + commit_opts['addremove'] = False + if not commit_opts['message'] and not commit_opts['logfile']: + # we don't translate commit messages + commit_opts['message'] = "Backed out changeset %s" % short(node) + commit_opts['force_editor'] = True + commit(ui, repo, **commit_opts) + def nice(node): + return '%d:%s' % (repo.changelog.rev(node), short(node)) + ui.status(_('changeset %s backs out changeset %s\n') % + (nice(repo.changelog.tip()), nice(node))) + if opts.get('merge') and op1 != node: + hg.clean(repo, op1, show_stats=False) + ui.status(_('merging with changeset %s\n') + % nice(repo.changelog.tip())) + try: + ui.setconfig('ui', 'forcemerge', opts.get('tool', '')) + return hg.merge(repo, hex(repo.changelog.tip())) + finally: + ui.setconfig('ui', 'forcemerge', '') + return 0 + +def bisect(ui, repo, rev=None, extra=None, command=None, + reset=None, good=None, bad=None, skip=None, noupdate=None): + """subdivision search of changesets + + This command helps to find changesets which introduce problems. To + use, mark the earliest changeset you know exhibits the problem as + bad, then mark the latest changeset which is free from the problem + as good. Bisect will update your working directory to a revision + for testing (unless the -U/--noupdate option is specified). Once + you have performed tests, mark the working directory as good or + bad, and bisect will either update to another candidate changeset + or announce that it has found the bad revision. + + As a shortcut, you can also use the revision argument to mark a + revision as good or bad without checking it out first. + + If you supply a command, it will be used for automatic bisection. + Its exit status will be used to mark revisions as good or bad: + status 0 means good, 125 means to skip the revision, 127 + (command not found) will abort the bisection, and any other + non-zero exit status means the revision is bad. + + Returns 0 on success. + """ + def print_result(nodes, good): + displayer = cmdutil.show_changeset(ui, repo, {}) + if len(nodes) == 1: + # narrowed it down to a single revision + if good: + ui.write(_("The first good revision is:\n")) + else: + ui.write(_("The first bad revision is:\n")) + displayer.show(repo[nodes[0]]) + parents = repo[nodes[0]].parents() + if len(parents) > 1: + side = good and state['bad'] or state['good'] + num = len(set(i.node() for i in parents) & set(side)) + if num == 1: + common = parents[0].ancestor(parents[1]) + ui.write(_('Not all ancestors of this changeset have been' + ' checked.\nTo check the other ancestors, start' + ' from the common ancestor, %s.\n' % common)) + else: + # multiple possible revisions + if good: + ui.write(_("Due to skipped revisions, the first " + "good revision could be any of:\n")) + else: + ui.write(_("Due to skipped revisions, the first " + "bad revision could be any of:\n")) + for n in nodes: + displayer.show(repo[n]) + displayer.close() + + def check_state(state, interactive=True): + if not state['good'] or not state['bad']: + if (good or bad or skip or reset) and interactive: + return + if not state['good']: + raise util.Abort(_('cannot bisect (no known good revisions)')) + else: + raise util.Abort(_('cannot bisect (no known bad revisions)')) + return True + + # backward compatibility + if rev in "good bad reset init".split(): + ui.warn(_("(use of 'hg bisect <cmd>' is deprecated)\n")) + cmd, rev, extra = rev, extra, None + if cmd == "good": + good = True + elif cmd == "bad": + bad = True + else: + reset = True + elif extra or good + bad + skip + reset + bool(command) > 1: + raise util.Abort(_('incompatible arguments')) + + if reset: + p = repo.join("bisect.state") + if os.path.exists(p): + os.unlink(p) + return + + state = hbisect.load_state(repo) + + if command: + changesets = 1 + try: + while changesets: + # update state + status = util.system(command) + if status == 125: + transition = "skip" + elif status == 0: + transition = "good" + # status < 0 means process was killed + elif status == 127: + raise util.Abort(_("failed to execute %s") % command) + elif status < 0: + raise util.Abort(_("%s killed") % command) + else: + transition = "bad" + ctx = repo[rev or '.'] + state[transition].append(ctx.node()) + ui.status(_('Changeset %d:%s: %s\n') % (ctx, ctx, transition)) + check_state(state, interactive=False) + # bisect + nodes, changesets, good = hbisect.bisect(repo.changelog, state) + # update to next check + cmdutil.bail_if_changed(repo) + hg.clean(repo, nodes[0], show_stats=False) + finally: + hbisect.save_state(repo, state) + print_result(nodes, good) + return + + # update state + + if rev: + nodes = [repo.lookup(i) for i in cmdutil.revrange(repo, [rev])] + else: + nodes = [repo.lookup('.')] + + if good or bad or skip: + if good: + state['good'] += nodes + elif bad: + state['bad'] += nodes + elif skip: + state['skip'] += nodes + hbisect.save_state(repo, state) + + if not check_state(state): + return + + # actually bisect + nodes, changesets, good = hbisect.bisect(repo.changelog, state) + if changesets == 0: + print_result(nodes, good) + else: + assert len(nodes) == 1 # only a single node can be tested next + node = nodes[0] + # compute the approximate number of remaining tests + tests, size = 0, 2 + while size <= changesets: + tests, size = tests + 1, size * 2 + rev = repo.changelog.rev(node) + ui.write(_("Testing changeset %d:%s " + "(%d changesets remaining, ~%d tests)\n") + % (rev, short(node), changesets, tests)) + if not noupdate: + cmdutil.bail_if_changed(repo) + return hg.clean(repo, node) + +def branch(ui, repo, label=None, **opts): + """set or show the current branch name + + With no argument, show the current branch name. With one argument, + set the working directory branch name (the branch will not exist + in the repository until the next commit). Standard practice + recommends that primary development take place on the 'default' + branch. + + Unless -f/--force is specified, branch will not let you set a + branch name that already exists, even if it's inactive. + + Use -C/--clean to reset the working directory branch to that of + the parent of the working directory, negating a previous branch + change. + + Use the command :hg:`update` to switch to an existing branch. Use + :hg:`commit --close-branch` to mark this branch as closed. + + Returns 0 on success. + """ + + if opts.get('clean'): + label = repo[None].parents()[0].branch() + repo.dirstate.setbranch(label) + ui.status(_('reset working directory to branch %s\n') % label) + elif label: + utflabel = encoding.fromlocal(label) + if not opts.get('force') and utflabel in repo.branchtags(): + if label not in [p.branch() for p in repo.parents()]: + raise util.Abort(_('a branch of the same name already exists' + " (use 'hg update' to switch to it)")) + repo.dirstate.setbranch(utflabel) + ui.status(_('marked working directory as branch %s\n') % label) + else: + ui.write("%s\n" % encoding.tolocal(repo.dirstate.branch())) + +def branches(ui, repo, active=False, closed=False): + """list repository named branches + + List the repository's named branches, indicating which ones are + inactive. If -c/--closed is specified, also list branches which have + been marked closed (see :hg:`commit --close-branch`). + + If -a/--active is specified, only show active branches. A branch + is considered active if it contains repository heads. + + Use the command :hg:`update` to switch to an existing branch. + + Returns 0. + """ + + hexfunc = ui.debugflag and hex or short + activebranches = [repo[n].branch() for n in repo.heads()] + def testactive(tag, node): + realhead = tag in activebranches + open = node in repo.branchheads(tag, closed=False) + return realhead and open + branches = sorted([(testactive(tag, node), repo.changelog.rev(node), tag) + for tag, node in repo.branchtags().items()], + reverse=True) + + for isactive, node, tag in branches: + if (not active) or isactive: + encodedtag = encoding.tolocal(tag) + if ui.quiet: + ui.write("%s\n" % encodedtag) + else: + hn = repo.lookup(node) + if isactive: + label = 'branches.active' + notice = '' + elif hn not in repo.branchheads(tag, closed=False): + if not closed: + continue + label = 'branches.closed' + notice = _(' (closed)') + else: + label = 'branches.inactive' + notice = _(' (inactive)') + if tag == repo.dirstate.branch(): + label = 'branches.current' + rev = str(node).rjust(31 - encoding.colwidth(encodedtag)) + rev = ui.label('%s:%s' % (rev, hexfunc(hn)), 'log.changeset') + encodedtag = ui.label(encodedtag, label) + ui.write("%s %s%s\n" % (encodedtag, rev, notice)) + +def bundle(ui, repo, fname, dest=None, **opts): + """create a changegroup file + + Generate a compressed changegroup file collecting changesets not + known to be in another repository. + + If you omit the destination repository, then hg assumes the + destination will have all the nodes you specify with --base + parameters. To create a bundle containing all changesets, use + -a/--all (or --base null). + + You can change compression method with the -t/--type option. + The available compression methods are: none, bzip2, and + gzip (by default, bundles are compressed using bzip2). + + The bundle file can then be transferred using conventional means + and applied to another repository with the unbundle or pull + command. This is useful when direct push and pull are not + available or when exporting an entire repository is undesirable. + + Applying bundles preserves all changeset contents including + permissions, copy/rename information, and revision history. + + Returns 0 on success, 1 if no changes found. + """ + revs = opts.get('rev') or None + if opts.get('all'): + base = ['null'] + else: + base = opts.get('base') + if base: + if dest: + raise util.Abort(_("--base is incompatible with specifying " + "a destination")) + base = [repo.lookup(rev) for rev in base] + # create the right base + # XXX: nodesbetween / changegroup* should be "fixed" instead + o = [] + has = set((nullid,)) + for n in base: + has.update(repo.changelog.reachable(n)) + if revs: + revs = [repo.lookup(rev) for rev in revs] + visit = revs[:] + has.difference_update(visit) + else: + visit = repo.changelog.heads() + seen = {} + while visit: + n = visit.pop(0) + parents = [p for p in repo.changelog.parents(n) if p not in has] + if len(parents) == 0: + if n not in has: + o.append(n) + else: + for p in parents: + if p not in seen: + seen[p] = 1 + visit.append(p) + else: + dest = ui.expandpath(dest or 'default-push', dest or 'default') + dest, branches = hg.parseurl(dest, opts.get('branch')) + other = hg.repository(hg.remoteui(repo, opts), dest) + revs, checkout = hg.addbranchrevs(repo, other, branches, revs) + if revs: + revs = [repo.lookup(rev) for rev in revs] + o = discovery.findoutgoing(repo, other, force=opts.get('force')) + + if not o: + ui.status(_("no changes found\n")) + return 1 + + if revs: + cg = repo.changegroupsubset(o, revs, 'bundle') + else: + cg = repo.changegroup(o, 'bundle') + + bundletype = opts.get('type', 'bzip2').lower() + btypes = {'none': 'HG10UN', 'bzip2': 'HG10BZ', 'gzip': 'HG10GZ'} + bundletype = btypes.get(bundletype) + if bundletype not in changegroup.bundletypes: + raise util.Abort(_('unknown bundle type specified with --type')) + + changegroup.writebundle(cg, fname, bundletype) + +def cat(ui, repo, file1, *pats, **opts): + """output the current or given revision of files + + Print the specified files as they were at the given revision. If + no revision is given, the parent of the working directory is used, + or tip if no revision is checked out. + + Output may be to a file, in which case the name of the file is + given using a format string. The formatting rules are the same as + for the export command, with the following additions: + + :``%s``: basename of file being printed + :``%d``: dirname of file being printed, or '.' if in repository root + :``%p``: root-relative path name of file being printed + + Returns 0 on success. + """ + ctx = cmdutil.revsingle(repo, opts.get('rev')) + err = 1 + m = cmdutil.match(repo, (file1,) + pats, opts) + for abs in ctx.walk(m): + fp = cmdutil.make_file(repo, opts.get('output'), ctx.node(), pathname=abs) + data = ctx[abs].data() + if opts.get('decode'): + data = repo.wwritedata(abs, data) + fp.write(data) + err = 0 + return err + +def clone(ui, source, dest=None, **opts): + """make a copy of an existing repository + + Create a copy of an existing repository in a new directory. + + If no destination directory name is specified, it defaults to the + basename of the source. + + The location of the source is added to the new repository's + .hg/hgrc file, as the default to be used for future pulls. + + See :hg:`help urls` for valid source format details. + + It is possible to specify an ``ssh://`` URL as the destination, but no + .hg/hgrc and working directory will be created on the remote side. + Please see :hg:`help urls` for important details about ``ssh://`` URLs. + + A set of changesets (tags, or branch names) to pull may be specified + by listing each changeset (tag, or branch name) with -r/--rev. + If -r/--rev is used, the cloned repository will contain only a subset + of the changesets of the source repository. Only the set of changesets + defined by all -r/--rev options (including all their ancestors) + will be pulled into the destination repository. + No subsequent changesets (including subsequent tags) will be present + in the destination. + + Using -r/--rev (or 'clone src#rev dest') implies --pull, even for + local source repositories. + + For efficiency, hardlinks are used for cloning whenever the source + and destination are on the same filesystem (note this applies only + to the repository data, not to the working directory). Some + filesystems, such as AFS, implement hardlinking incorrectly, but + do not report errors. In these cases, use the --pull option to + avoid hardlinking. + + In some cases, you can clone repositories and the working directory + using full hardlinks with :: + + $ cp -al REPO REPOCLONE + + This is the fastest way to clone, but it is not always safe. The + operation is not atomic (making sure REPO is not modified during + the operation is up to you) and you have to make sure your editor + breaks hardlinks (Emacs and most Linux Kernel tools do so). Also, + this is not compatible with certain extensions that place their + metadata under the .hg directory, such as mq. + + Mercurial will update the working directory to the first applicable + revision from this list: + + a) null if -U or the source repository has no changesets + b) if -u . and the source repository is local, the first parent of + the source repository's working directory + c) the changeset specified with -u (if a branch name, this means the + latest head of that branch) + d) the changeset specified with -r + e) the tipmost head specified with -b + f) the tipmost head specified with the url#branch source syntax + g) the tipmost head of the default branch + h) tip + + Returns 0 on success. + """ + if opts.get('noupdate') and opts.get('updaterev'): + raise util.Abort(_("cannot specify both --noupdate and --updaterev")) + + r = hg.clone(hg.remoteui(ui, opts), source, dest, + pull=opts.get('pull'), + stream=opts.get('uncompressed'), + rev=opts.get('rev'), + update=opts.get('updaterev') or not opts.get('noupdate'), + branch=opts.get('branch')) + + return r is None + +def commit(ui, repo, *pats, **opts): + """commit the specified files or all outstanding changes + + Commit changes to the given files into the repository. Unlike a + centralized RCS, this operation is a local operation. See + :hg:`push` for a way to actively distribute your changes. + + If a list of files is omitted, all changes reported by :hg:`status` + will be committed. + + If you are committing the result of a merge, do not provide any + filenames or -I/-X filters. + + If no commit message is specified, Mercurial starts your + configured editor where you can enter a message. In case your + commit fails, you will find a backup of your message in + ``.hg/last-message.txt``. + + See :hg:`help dates` for a list of formats valid for -d/--date. + + Returns 0 on success, 1 if nothing changed. + """ + extra = {} + if opts.get('close_branch'): + if repo['.'].node() not in repo.branchheads(): + # The topo heads set is included in the branch heads set of the + # current branch, so it's sufficient to test branchheads + raise util.Abort(_('can only close branch heads')) + extra['close'] = 1 + e = cmdutil.commiteditor + if opts.get('force_editor'): + e = cmdutil.commitforceeditor + + def commitfunc(ui, repo, message, match, opts): + return repo.commit(message, opts.get('user'), opts.get('date'), match, + editor=e, extra=extra) + + branch = repo[None].branch() + bheads = repo.branchheads(branch) + + node = cmdutil.commit(ui, repo, commitfunc, pats, opts) + if not node: + ui.status(_("nothing changed\n")) + return 1 + + ctx = repo[node] + parents = ctx.parents() + + if bheads and not [x for x in parents + if x.node() in bheads and x.branch() == branch]: + ui.status(_('created new head\n')) + # The message is not printed for initial roots. For the other + # changesets, it is printed in the following situations: + # + # Par column: for the 2 parents with ... + # N: null or no parent + # B: parent is on another named branch + # C: parent is a regular non head changeset + # H: parent was a branch head of the current branch + # Msg column: whether we print "created new head" message + # In the following, it is assumed that there already exists some + # initial branch heads of the current branch, otherwise nothing is + # printed anyway. + # + # Par Msg Comment + # NN y additional topo root + # + # BN y additional branch root + # CN y additional topo head + # HN n usual case + # + # BB y weird additional branch root + # CB y branch merge + # HB n merge with named branch + # + # CC y additional head from merge + # CH n merge with a head + # + # HH n head merge: head count decreases + + if not opts.get('close_branch'): + for r in parents: + if r.extra().get('close') and r.branch() == branch: + ui.status(_('reopening closed branch head %d\n') % r) + + if ui.debugflag: + ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx.hex())) + elif ui.verbose: + ui.write(_('committed changeset %d:%s\n') % (int(ctx), ctx)) + +def copy(ui, repo, *pats, **opts): + """mark files as copied for the next commit + + Mark dest as having copies of source files. If dest is a + directory, copies are put in that directory. If dest is a file, + the source must be a single file. + + By default, this command copies the contents of files as they + exist in the working directory. If invoked with -A/--after, the + operation is recorded, but no copying is performed. + + This command takes effect with the next commit. To undo a copy + before that, see :hg:`revert`. + + Returns 0 on success, 1 if errors are encountered. + """ + wlock = repo.wlock(False) + try: + return cmdutil.copy(ui, repo, pats, opts) + finally: + wlock.release() + +def debugancestor(ui, repo, *args): + """find the ancestor revision of two revisions in a given index""" + if len(args) == 3: + index, rev1, rev2 = args + r = revlog.revlog(util.opener(os.getcwd(), audit=False), index) + lookup = r.lookup + elif len(args) == 2: + if not repo: + raise util.Abort(_("there is no Mercurial repository here " + "(.hg not found)")) + rev1, rev2 = args + r = repo.changelog + lookup = repo.lookup + else: + raise util.Abort(_('either two or three arguments required')) + a = r.ancestor(lookup(rev1), lookup(rev2)) + ui.write("%d:%s\n" % (r.rev(a), hex(a))) + +def debugbuilddag(ui, repo, text, + mergeable_file=False, + appended_file=False, + overwritten_file=False, + new_file=False): + """builds a repo with a given dag from scratch in the current empty repo + + Elements: + + - "+n" is a linear run of n nodes based on the current default parent + - "." is a single node based on the current default parent + - "$" resets the default parent to null (implied at the start); + otherwise the default parent is always the last node created + - "<p" sets the default parent to the backref p + - "*p" is a fork at parent p, which is a backref + - "*p1/p2" is a merge of parents p1 and p2, which are backrefs + - "/p2" is a merge of the preceding node and p2 + - ":tag" defines a local tag for the preceding node + - "@branch" sets the named branch for subsequent nodes + - "!command" runs the command using your shell + - "!!my command\\n" is like "!", but to the end of the line + - "#...\\n" is a comment up to the end of the line + + Whitespace between the above elements is ignored. + + A backref is either + + - a number n, which references the node curr-n, where curr is the current + node, or + - the name of a local tag you placed earlier using ":tag", or + - empty to denote the default parent. + + All string valued-elements are either strictly alphanumeric, or must + be enclosed in double quotes ("..."), with "\\" as escape character. + + Note that the --overwritten-file and --appended-file options imply the + use of "HGMERGE=internal:local" during DAG buildup. + """ + + if not (mergeable_file or appended_file or overwritten_file or new_file): + raise util.Abort(_('need at least one of -m, -a, -o, -n')) + + if len(repo.changelog) > 0: + raise util.Abort(_('repository is not empty')) + + if overwritten_file or appended_file: + # we don't want to fail in merges during buildup + os.environ['HGMERGE'] = 'internal:local' + + def writefile(fname, text, fmode="wb"): + f = open(fname, fmode) + try: + f.write(text) + finally: + f.close() + + if mergeable_file: + linesperrev = 2 + # determine number of revs in DAG + n = 0 + for type, data in dagparser.parsedag(text): + if type == 'n': + n += 1 + # make a file with k lines per rev + writefile("mf", "\n".join(str(i) for i in xrange(0, n * linesperrev)) + + "\n") + + at = -1 + atbranch = 'default' + for type, data in dagparser.parsedag(text): + if type == 'n': + ui.status('node %s\n' % str(data)) + id, ps = data + p1 = ps[0] + if p1 != at: + update(ui, repo, node=str(p1), clean=True) + at = p1 + if repo.dirstate.branch() != atbranch: + branch(ui, repo, atbranch, force=True) + if len(ps) > 1: + p2 = ps[1] + merge(ui, repo, node=p2) + + if mergeable_file: + f = open("mf", "rb+") + try: + lines = f.read().split("\n") + lines[id * linesperrev] += " r%i" % id + f.seek(0) + f.write("\n".join(lines)) + finally: + f.close() + + if appended_file: + writefile("af", "r%i\n" % id, "ab") + + if overwritten_file: + writefile("of", "r%i\n" % id) + + if new_file: + writefile("nf%i" % id, "r%i\n" % id) + + commit(ui, repo, addremove=True, message="r%i" % id, date=(id, 0)) + at = id + elif type == 'l': + id, name = data + ui.status('tag %s\n' % name) + tag(ui, repo, name, local=True) + elif type == 'a': + ui.status('branch %s\n' % data) + atbranch = data + elif type in 'cC': + r = util.system(data, cwd=repo.root) + if r: + desc, r = util.explain_exit(r) + raise util.Abort(_('%s command %s') % (data, desc)) + +def debugcommands(ui, cmd='', *args): + """list all available commands and options""" + for cmd, vals in sorted(table.iteritems()): + cmd = cmd.split('|')[0].strip('^') + opts = ', '.join([i[1] for i in vals[1]]) + ui.write('%s: %s\n' % (cmd, opts)) + +def debugcomplete(ui, cmd='', **opts): + """returns the completion list associated with the given command""" + + if opts.get('options'): + options = [] + otables = [globalopts] + if cmd: + aliases, entry = cmdutil.findcmd(cmd, table, False) + otables.append(entry[1]) + for t in otables: + for o in t: + if "(DEPRECATED)" in o[3]: + continue + if o[0]: + options.append('-%s' % o[0]) + options.append('--%s' % o[1]) + ui.write("%s\n" % "\n".join(options)) + return + + cmdlist = cmdutil.findpossible(cmd, table) + if ui.verbose: + cmdlist = [' '.join(c[0]) for c in cmdlist.values()] + ui.write("%s\n" % "\n".join(sorted(cmdlist))) + +def debugfsinfo(ui, path = "."): + """show information detected about current filesystem""" + open('.debugfsinfo', 'w').write('') + ui.write('exec: %s\n' % (util.checkexec(path) and 'yes' or 'no')) + ui.write('symlink: %s\n' % (util.checklink(path) and 'yes' or 'no')) + ui.write('case-sensitive: %s\n' % (util.checkcase('.debugfsinfo') + and 'yes' or 'no')) + os.unlink('.debugfsinfo') + +def debugrebuildstate(ui, repo, rev="tip"): + """rebuild the dirstate as it would look like for the given revision""" + ctx = repo[rev] + wlock = repo.wlock() + try: + repo.dirstate.rebuild(ctx.node(), ctx.manifest()) + finally: + wlock.release() + +def debugcheckstate(ui, repo): + """validate the correctness of the current dirstate""" + parent1, parent2 = repo.dirstate.parents() + m1 = repo[parent1].manifest() + m2 = repo[parent2].manifest() + errors = 0 + for f in repo.dirstate: + state = repo.dirstate[f] + if state in "nr" and f not in m1: + ui.warn(_("%s in state %s, but not in manifest1\n") % (f, state)) + errors += 1 + if state in "a" and f in m1: + ui.warn(_("%s in state %s, but also in manifest1\n") % (f, state)) + errors += 1 + if state in "m" and f not in m1 and f not in m2: + ui.warn(_("%s in state %s, but not in either manifest\n") % + (f, state)) + errors += 1 + for f in m1: + state = repo.dirstate[f] + if state not in "nrm": + ui.warn(_("%s in manifest1, but listed as state %s") % (f, state)) + errors += 1 + if errors: + error = _(".hg/dirstate inconsistent with current parent's manifest") + raise util.Abort(error) + +def showconfig(ui, repo, *values, **opts): + """show combined config settings from all hgrc files + + With no arguments, print names and values of all config items. + + With one argument of the form section.name, print just the value + of that config item. + + With multiple arguments, print names and values of all config + items with matching section names. + + With --debug, the source (filename and line number) is printed + for each config item. + + Returns 0 on success. + """ + + for f in util.rcpath(): + ui.debug(_('read config from: %s\n') % f) + untrusted = bool(opts.get('untrusted')) + if values: + sections = [v for v in values if '.' not in v] + items = [v for v in values if '.' in v] + if len(items) > 1 or items and sections: + raise util.Abort(_('only one config item permitted')) + for section, name, value in ui.walkconfig(untrusted=untrusted): + sectname = section + '.' + name + if values: + for v in values: + if v == section: + ui.debug('%s: ' % + ui.configsource(section, name, untrusted)) + ui.write('%s=%s\n' % (sectname, value)) + elif v == sectname: + ui.debug('%s: ' % + ui.configsource(section, name, untrusted)) + ui.write(value, '\n') + else: + ui.debug('%s: ' % + ui.configsource(section, name, untrusted)) + ui.write('%s=%s\n' % (sectname, value)) + +def debugpushkey(ui, repopath, namespace, *keyinfo): + '''access the pushkey key/value protocol + + With two args, list the keys in the given namespace. + + With five args, set a key to new if it currently is set to old. + Reports success or failure. + ''' + + target = hg.repository(ui, repopath) + if keyinfo: + key, old, new = keyinfo + r = target.pushkey(namespace, key, old, new) + ui.status(str(r) + '\n') + return not(r) + else: + for k, v in target.listkeys(namespace).iteritems(): + ui.write("%s\t%s\n" % (k.encode('string-escape'), + v.encode('string-escape'))) + +def debugrevspec(ui, repo, expr): + '''parse and apply a revision specification''' + if ui.verbose: + tree = revset.parse(expr) + ui.note(tree, "\n") + func = revset.match(expr) + for c in func(repo, range(len(repo))): + ui.write("%s\n" % c) + +def debugsetparents(ui, repo, rev1, rev2=None): + """manually set the parents of the current working directory + + This is useful for writing repository conversion tools, but should + be used with care. + + Returns 0 on success. + """ + + if not rev2: + rev2 = hex(nullid) + + wlock = repo.wlock() + try: + repo.dirstate.setparents(repo.lookup(rev1), repo.lookup(rev2)) + finally: + wlock.release() + +def debugstate(ui, repo, nodates=None): + """show the contents of the current dirstate""" + timestr = "" + showdate = not nodates + for file_, ent in sorted(repo.dirstate._map.iteritems()): + if showdate: + if ent[3] == -1: + # Pad or slice to locale representation + locale_len = len(time.strftime("%Y-%m-%d %H:%M:%S ", + time.localtime(0))) + timestr = 'unset' + timestr = (timestr[:locale_len] + + ' ' * (locale_len - len(timestr))) + else: + timestr = time.strftime("%Y-%m-%d %H:%M:%S ", + time.localtime(ent[3])) + if ent[1] & 020000: + mode = 'lnk' + else: + mode = '%3o' % (ent[1] & 0777) + ui.write("%c %s %10d %s%s\n" % (ent[0], mode, ent[2], timestr, file_)) + for f in repo.dirstate.copies(): + ui.write(_("copy: %s -> %s\n") % (repo.dirstate.copied(f), f)) + +def debugsub(ui, repo, rev=None): + if rev == '': + rev = None + for k, v in sorted(repo[rev].substate.items()): + ui.write('path %s\n' % k) + ui.write(' source %s\n' % v[0]) + ui.write(' revision %s\n' % v[1]) + +def debugdag(ui, repo, file_=None, *revs, **opts): + """format the changelog or an index DAG as a concise textual description + + If you pass a revlog index, the revlog's DAG is emitted. If you list + revision numbers, they get labelled in the output as rN. + + Otherwise, the changelog DAG of the current repo is emitted. + """ + spaces = opts.get('spaces') + dots = opts.get('dots') + if file_: + rlog = revlog.revlog(util.opener(os.getcwd(), audit=False), file_) + revs = set((int(r) for r in revs)) + def events(): + for r in rlog: + yield 'n', (r, list(set(p for p in rlog.parentrevs(r) if p != -1))) + if r in revs: + yield 'l', (r, "r%i" % r) + elif repo: + cl = repo.changelog + tags = opts.get('tags') + branches = opts.get('branches') + if tags: + labels = {} + for l, n in repo.tags().items(): + labels.setdefault(cl.rev(n), []).append(l) + def events(): + b = "default" + for r in cl: + if branches: + newb = cl.read(cl.node(r))[5]['branch'] + if newb != b: + yield 'a', newb + b = newb + yield 'n', (r, list(set(p for p in cl.parentrevs(r) if p != -1))) + if tags: + ls = labels.get(r) + if ls: + for l in ls: + yield 'l', (r, l) + else: + raise util.Abort(_('need repo for changelog dag')) + + for line in dagparser.dagtextlines(events(), + addspaces=spaces, + wraplabels=True, + wrapannotations=True, + wrapnonlinear=dots, + usedots=dots, + maxlinewidth=70): + ui.write(line) + ui.write("\n") + +def debugdata(ui, repo, file_, rev): + """dump the contents of a data file revision""" + r = None + if repo: + filelog = repo.file(file_) + if len(filelog): + r = filelog + if not r: + r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_[:-2] + ".i") + try: + ui.write(r.revision(r.lookup(rev))) + except KeyError: + raise util.Abort(_('invalid revision identifier %s') % rev) + +def debugdate(ui, date, range=None, **opts): + """parse and display a date""" + if opts["extended"]: + d = util.parsedate(date, util.extendeddateformats) + else: + d = util.parsedate(date) + ui.write("internal: %s %s\n" % d) + ui.write("standard: %s\n" % util.datestr(d)) + if range: + m = util.matchdate(range) + ui.write("match: %s\n" % m(d[0])) + +def debugindex(ui, repo, file_, **opts): + """dump the contents of an index file""" + r = None + if repo: + filelog = repo.file(file_) + if len(filelog): + r = filelog + + format = opts.get('format', 0) + if format not in (0, 1): + raise util.Abort("unknown format %d" % format) + + if not r: + r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_) + + if format == 0: + ui.write(" rev offset length base linkrev" + " nodeid p1 p2\n") + elif format == 1: + ui.write(" rev flag offset length" + " size base link p1 p2 nodeid\n") + + for i in r: + node = r.node(i) + if format == 0: + try: + pp = r.parents(node) + except: + pp = [nullid, nullid] + ui.write("% 6d % 9d % 7d % 6d % 7d %s %s %s\n" % ( + i, r.start(i), r.length(i), r.base(i), r.linkrev(i), + short(node), short(pp[0]), short(pp[1]))) + elif format == 1: + pr = r.parentrevs(i) + ui.write("% 6d %04x % 8d % 8d % 8d % 6d % 6d % 6d % 6d %s\n" % ( + i, r.flags(i), r.start(i), r.length(i), r.rawsize(i), + r.base(i), r.linkrev(i), pr[0], pr[1], short(node))) + +def debugindexdot(ui, repo, file_): + """dump an index DAG as a graphviz dot file""" + r = None + if repo: + filelog = repo.file(file_) + if len(filelog): + r = filelog + if not r: + r = revlog.revlog(util.opener(os.getcwd(), audit=False), file_) + ui.write("digraph G {\n") + for i in r: + node = r.node(i) + pp = r.parents(node) + ui.write("\t%d -> %d\n" % (r.rev(pp[0]), i)) + if pp[1] != nullid: + ui.write("\t%d -> %d\n" % (r.rev(pp[1]), i)) + ui.write("}\n") + +def debuginstall(ui): + '''test Mercurial installation + + Returns 0 on success. + ''' + + def writetemp(contents): + (fd, name) = tempfile.mkstemp(prefix="hg-debuginstall-") + f = os.fdopen(fd, "wb") + f.write(contents) + f.close() + return name + + problems = 0 + + # encoding + ui.status(_("Checking encoding (%s)...\n") % encoding.encoding) + try: + encoding.fromlocal("test") + except util.Abort, inst: + ui.write(" %s\n" % inst) + ui.write(_(" (check that your locale is properly set)\n")) + problems += 1 + + # compiled modules + ui.status(_("Checking installed modules (%s)...\n") + % os.path.dirname(__file__)) + try: + import bdiff, mpatch, base85, osutil + except Exception, inst: + ui.write(" %s\n" % inst) + ui.write(_(" One or more extensions could not be found")) + ui.write(_(" (check that you compiled the extensions)\n")) + problems += 1 + + # templates + ui.status(_("Checking templates...\n")) + try: + import templater + templater.templater(templater.templatepath("map-cmdline.default")) + except Exception, inst: + ui.write(" %s\n" % inst) + ui.write(_(" (templates seem to have been installed incorrectly)\n")) + problems += 1 + + # patch + ui.status(_("Checking patch...\n")) + patchproblems = 0 + a = "1\n2\n3\n4\n" + b = "1\n2\n3\ninsert\n4\n" + fa = writetemp(a) + d = mdiff.unidiff(a, None, b, None, os.path.basename(fa), + os.path.basename(fa)) + fd = writetemp(d) + + files = {} + try: + patch.patch(fd, ui, cwd=os.path.dirname(fa), files=files) + except util.Abort, e: + ui.write(_(" patch call failed:\n")) + ui.write(" " + str(e) + "\n") + patchproblems += 1 + else: + if list(files) != [os.path.basename(fa)]: + ui.write(_(" unexpected patch output!\n")) + patchproblems += 1 + a = open(fa).read() + if a != b: + ui.write(_(" patch test failed!\n")) + patchproblems += 1 + + if patchproblems: + if ui.config('ui', 'patch'): + ui.write(_(" (Current patch tool may be incompatible with patch," + " or misconfigured. Please check your configuration" + " file)\n")) + else: + ui.write(_(" Internal patcher failure, please report this error" + " to http://mercurial.selenic.com/wiki/BugTracker\n")) + problems += patchproblems + + os.unlink(fa) + os.unlink(fd) + + # editor + ui.status(_("Checking commit editor...\n")) + editor = ui.geteditor() + cmdpath = util.find_exe(editor) or util.find_exe(editor.split()[0]) + if not cmdpath: + if editor == 'vi': + ui.write(_(" No commit editor set and can't find vi in PATH\n")) + ui.write(_(" (specify a commit editor in your configuration" + " file)\n")) + else: + ui.write(_(" Can't find editor '%s' in PATH\n") % editor) + ui.write(_(" (specify a commit editor in your configuration" + " file)\n")) + problems += 1 + + # check username + ui.status(_("Checking username...\n")) + try: + ui.username() + except util.Abort, e: + ui.write(" %s\n" % e) + ui.write(_(" (specify a username in your configuration file)\n")) + problems += 1 + + if not problems: + ui.status(_("No problems detected\n")) + else: + ui.write(_("%s problems detected," + " please check your install!\n") % problems) + + return problems + +def debugrename(ui, repo, file1, *pats, **opts): + """dump rename information""" + + ctx = repo[opts.get('rev')] + m = cmdutil.match(repo, (file1,) + pats, opts) + for abs in ctx.walk(m): + fctx = ctx[abs] + o = fctx.filelog().renamed(fctx.filenode()) + rel = m.rel(abs) + if o: + ui.write(_("%s renamed from %s:%s\n") % (rel, o[0], hex(o[1]))) + else: + ui.write(_("%s not renamed\n") % rel) + +def debugwalk(ui, repo, *pats, **opts): + """show how files match on given patterns""" + m = cmdutil.match(repo, pats, opts) + items = list(repo.walk(m)) + if not items: + return + fmt = 'f %%-%ds %%-%ds %%s' % ( + max([len(abs) for abs in items]), + max([len(m.rel(abs)) for abs in items])) + for abs in items: + line = fmt % (abs, m.rel(abs), m.exact(abs) and 'exact' or '') + ui.write("%s\n" % line.rstrip()) + +def diff(ui, repo, *pats, **opts): + """diff repository (or selected files) + + Show differences between revisions for the specified files. + + Differences between files are shown using the unified diff format. + + .. note:: + diff may generate unexpected results for merges, as it will + default to comparing against the working directory's first + parent changeset if no revisions are specified. + + When two revision arguments are given, then changes are shown + between those revisions. If only one revision is specified then + that revision is compared to the working directory, and, when no + revisions are specified, the working directory files are compared + to its parent. + + Alternatively you can specify -c/--change with a revision to see + the changes in that changeset relative to its first parent. + + Without the -a/--text option, diff will avoid generating diffs of + files it detects as binary. With -a, diff will generate a diff + anyway, probably with undesirable results. + + Use the -g/--git option to generate diffs in the git extended diff + format. For more information, read :hg:`help diffs`. + + Returns 0 on success. + """ + + revs = opts.get('rev') + change = opts.get('change') + stat = opts.get('stat') + reverse = opts.get('reverse') + + if revs and change: + msg = _('cannot specify --rev and --change at the same time') + raise util.Abort(msg) + elif change: + node2 = repo.lookup(change) + node1 = repo[node2].parents()[0].node() + else: + node1, node2 = cmdutil.revpair(repo, revs) + + if reverse: + node1, node2 = node2, node1 + + diffopts = patch.diffopts(ui, opts) + m = cmdutil.match(repo, pats, opts) + cmdutil.diffordiffstat(ui, repo, diffopts, node1, node2, m, stat=stat, + listsubrepos=opts.get('subrepos')) + +def export(ui, repo, *changesets, **opts): + """dump the header and diffs for one or more changesets + + Print the changeset header and diffs for one or more revisions. + + The information shown in the changeset header is: author, date, + branch name (if non-default), changeset hash, parent(s) and commit + comment. + + .. note:: + export may generate unexpected diff output for merge + changesets, as it will compare the merge changeset against its + first parent only. + + Output may be to a file, in which case the name of the file is + given using a format string. The formatting rules are as follows: + + :``%%``: literal "%" character + :``%H``: changeset hash (40 hexadecimal digits) + :``%N``: number of patches being generated + :``%R``: changeset revision number + :``%b``: basename of the exporting repository + :``%h``: short-form changeset hash (12 hexadecimal digits) + :``%n``: zero-padded sequence number, starting at 1 + :``%r``: zero-padded changeset revision number + + Without the -a/--text option, export will avoid generating diffs + of files it detects as binary. With -a, export will generate a + diff anyway, probably with undesirable results. + + Use the -g/--git option to generate diffs in the git extended diff + format. See :hg:`help diffs` for more information. + + With the --switch-parent option, the diff will be against the + second parent. It can be useful to review a merge. + + Returns 0 on success. + """ + changesets += tuple(opts.get('rev', [])) + if not changesets: + raise util.Abort(_("export requires at least one changeset")) + revs = cmdutil.revrange(repo, changesets) + if len(revs) > 1: + ui.note(_('exporting patches:\n')) + else: + ui.note(_('exporting patch:\n')) + cmdutil.export(repo, revs, template=opts.get('output'), + switch_parent=opts.get('switch_parent'), + opts=patch.diffopts(ui, opts)) + +def forget(ui, repo, *pats, **opts): + """forget the specified files on the next commit + + Mark the specified files so they will no longer be tracked + after the next commit. + + This only removes files from the current branch, not from the + entire project history, and it does not delete them from the + working directory. + + To undo a forget before the next commit, see :hg:`add`. + + Returns 0 on success. + """ + + if not pats: + raise util.Abort(_('no files specified')) + + m = cmdutil.match(repo, pats, opts) + s = repo.status(match=m, clean=True) + forget = sorted(s[0] + s[1] + s[3] + s[6]) + errs = 0 + + for f in m.files(): + if f not in repo.dirstate and not os.path.isdir(m.rel(f)): + ui.warn(_('not removing %s: file is already untracked\n') + % m.rel(f)) + errs = 1 + + for f in forget: + if ui.verbose or not m.exact(f): + ui.status(_('removing %s\n') % m.rel(f)) + + repo[None].remove(forget, unlink=False) + return errs + +def grep(ui, repo, pattern, *pats, **opts): + """search for a pattern in specified files and revisions + + Search revisions of files for a regular expression. + + This command behaves differently than Unix grep. It only accepts + Python/Perl regexps. It searches repository history, not the + working directory. It always prints the revision number in which a + match appears. + + By default, grep only prints output for the first revision of a + file in which it finds a match. To get it to print every revision + that contains a change in match status ("-" for a match that + becomes a non-match, or "+" for a non-match that becomes a match), + use the --all flag. + + Returns 0 if a match is found, 1 otherwise. + """ + reflags = 0 + if opts.get('ignore_case'): + reflags |= re.I + try: + regexp = re.compile(pattern, reflags) + except re.error, inst: + ui.warn(_("grep: invalid match pattern: %s\n") % inst) + return 1 + sep, eol = ':', '\n' + if opts.get('print0'): + sep = eol = '\0' + + getfile = util.lrucachefunc(repo.file) + + def matchlines(body): + begin = 0 + linenum = 0 + while True: + match = regexp.search(body, begin) + if not match: + break + mstart, mend = match.span() + linenum += body.count('\n', begin, mstart) + 1 + lstart = body.rfind('\n', begin, mstart) + 1 or begin + begin = body.find('\n', mend) + 1 or len(body) + lend = begin - 1 + yield linenum, mstart - lstart, mend - lstart, body[lstart:lend] + + class linestate(object): + def __init__(self, line, linenum, colstart, colend): + self.line = line + self.linenum = linenum + self.colstart = colstart + self.colend = colend + + def __hash__(self): + return hash((self.linenum, self.line)) + + def __eq__(self, other): + return self.line == other.line + + matches = {} + copies = {} + def grepbody(fn, rev, body): + matches[rev].setdefault(fn, []) + m = matches[rev][fn] + for lnum, cstart, cend, line in matchlines(body): + s = linestate(line, lnum, cstart, cend) + m.append(s) + + def difflinestates(a, b): + sm = difflib.SequenceMatcher(None, a, b) + for tag, alo, ahi, blo, bhi in sm.get_opcodes(): + if tag == 'insert': + for i in xrange(blo, bhi): + yield ('+', b[i]) + elif tag == 'delete': + for i in xrange(alo, ahi): + yield ('-', a[i]) + elif tag == 'replace': + for i in xrange(alo, ahi): + yield ('-', a[i]) + for i in xrange(blo, bhi): + yield ('+', b[i]) + + def display(fn, ctx, pstates, states): + rev = ctx.rev() + datefunc = ui.quiet and util.shortdate or util.datestr + found = False + filerevmatches = {} + if opts.get('all'): + iter = difflinestates(pstates, states) + else: + iter = [('', l) for l in states] + for change, l in iter: + cols = [fn, str(rev)] + before, match, after = None, None, None + if opts.get('line_number'): + cols.append(str(l.linenum)) + if opts.get('all'): + cols.append(change) + if opts.get('user'): + cols.append(ui.shortuser(ctx.user())) + if opts.get('date'): + cols.append(datefunc(ctx.date())) + if opts.get('files_with_matches'): + c = (fn, rev) + if c in filerevmatches: + continue + filerevmatches[c] = 1 + else: + before = l.line[:l.colstart] + match = l.line[l.colstart:l.colend] + after = l.line[l.colend:] + ui.write(sep.join(cols)) + if before is not None: + ui.write(sep + before) + ui.write(match, label='grep.match') + ui.write(after) + ui.write(eol) + found = True + return found + + skip = {} + revfiles = {} + matchfn = cmdutil.match(repo, pats, opts) + found = False + follow = opts.get('follow') + + def prep(ctx, fns): + rev = ctx.rev() + pctx = ctx.parents()[0] + parent = pctx.rev() + matches.setdefault(rev, {}) + matches.setdefault(parent, {}) + files = revfiles.setdefault(rev, []) + for fn in fns: + flog = getfile(fn) + try: + fnode = ctx.filenode(fn) + except error.LookupError: + continue + + copied = flog.renamed(fnode) + copy = follow and copied and copied[0] + if copy: + copies.setdefault(rev, {})[fn] = copy + if fn in skip: + if copy: + skip[copy] = True + continue + files.append(fn) + + if fn not in matches[rev]: + grepbody(fn, rev, flog.read(fnode)) + + pfn = copy or fn + if pfn not in matches[parent]: + try: + fnode = pctx.filenode(pfn) + grepbody(pfn, parent, flog.read(fnode)) + except error.LookupError: + pass + + for ctx in cmdutil.walkchangerevs(repo, matchfn, opts, prep): + rev = ctx.rev() + parent = ctx.parents()[0].rev() + for fn in sorted(revfiles.get(rev, [])): + states = matches[rev][fn] + copy = copies.get(rev, {}).get(fn) + if fn in skip: + if copy: + skip[copy] = True + continue + pstates = matches.get(parent, {}).get(copy or fn, []) + if pstates or states: + r = display(fn, ctx, pstates, states) + found = found or r + if r and not opts.get('all'): + skip[fn] = True + if copy: + skip[copy] = True + del matches[rev] + del revfiles[rev] + + return not found + +def heads(ui, repo, *branchrevs, **opts): + """show current repository heads or show branch heads + + With no arguments, show all repository branch heads. + + Repository "heads" are changesets with no child changesets. They are + where development generally takes place and are the usual targets + for update and merge operations. Branch heads are changesets that have + no child changeset on the same branch. + + If one or more REVs are given, only branch heads on the branches + associated with the specified changesets are shown. + + If -c/--closed is specified, also show branch heads marked closed + (see :hg:`commit --close-branch`). + + If STARTREV is specified, only those heads that are descendants of + STARTREV will be displayed. + + If -t/--topo is specified, named branch mechanics will be ignored and only + changesets without children will be shown. + + Returns 0 if matching heads are found, 1 if not. + """ + + if opts.get('rev'): + start = repo.lookup(opts['rev']) + else: + start = None + + if opts.get('topo'): + heads = [repo[h] for h in repo.heads(start)] + else: + heads = [] + for b, ls in repo.branchmap().iteritems(): + if start is None: + heads += [repo[h] for h in ls] + continue + startrev = repo.changelog.rev(start) + descendants = set(repo.changelog.descendants(startrev)) + descendants.add(startrev) + rev = repo.changelog.rev + heads += [repo[h] for h in ls if rev(h) in descendants] + + if branchrevs: + decode, encode = encoding.fromlocal, encoding.tolocal + branches = set(repo[decode(br)].branch() for br in branchrevs) + heads = [h for h in heads if h.branch() in branches] + + if not opts.get('closed'): + heads = [h for h in heads if not h.extra().get('close')] + + if opts.get('active') and branchrevs: + dagheads = repo.heads(start) + heads = [h for h in heads if h.node() in dagheads] + + if branchrevs: + haveheads = set(h.branch() for h in heads) + if branches - haveheads: + headless = ', '.join(encode(b) for b in branches - haveheads) + msg = _('no open branch heads found on branches %s') + if opts.get('rev'): + msg += _(' (started at %s)' % opts['rev']) + ui.warn((msg + '\n') % headless) + + if not heads: + return 1 + + heads = sorted(heads, key=lambda x: -x.rev()) + displayer = cmdutil.show_changeset(ui, repo, opts) + for ctx in heads: + displayer.show(ctx) + displayer.close() + +def help_(ui, name=None, with_version=False, unknowncmd=False): + """show help for a given topic or a help overview + + With no arguments, print a list of commands with short help messages. + + Given a topic, extension, or command name, print help for that + topic. + + Returns 0 if successful. + """ + option_lists = [] + textwidth = ui.termwidth() - 2 + + def addglobalopts(aliases): + if ui.verbose: + option_lists.append((_("global options:"), globalopts)) + if name == 'shortlist': + option_lists.append((_('use "hg help" for the full list ' + 'of commands'), ())) + else: + if name == 'shortlist': + msg = _('use "hg help" for the full list of commands ' + 'or "hg -v" for details') + elif aliases: + msg = _('use "hg -v help%s" to show aliases and ' + 'global options') % (name and " " + name or "") + else: + msg = _('use "hg -v help %s" to show global options') % name + option_lists.append((msg, ())) + + def helpcmd(name): + if with_version: + version_(ui) + ui.write('\n') + + try: + aliases, entry = cmdutil.findcmd(name, table, strict=unknowncmd) + except error.AmbiguousCommand, inst: + # py3k fix: except vars can't be used outside the scope of the + # except block, nor can be used inside a lambda. python issue4617 + prefix = inst.args[0] + select = lambda c: c.lstrip('^').startswith(prefix) + helplist(_('list of commands:\n\n'), select) + return + + # check if it's an invalid alias and display its error if it is + if getattr(entry[0], 'badalias', False): + if not unknowncmd: + entry[0](ui) + return + + # synopsis + if len(entry) > 2: + if entry[2].startswith('hg'): + ui.write("%s\n" % entry[2]) + else: + ui.write('hg %s %s\n' % (aliases[0], entry[2])) + else: + ui.write('hg %s\n' % aliases[0]) + + # aliases + if not ui.quiet and len(aliases) > 1: + ui.write(_("\naliases: %s\n") % ', '.join(aliases[1:])) + + # description + doc = gettext(entry[0].__doc__) + if not doc: + doc = _("(no help text available)") + if hasattr(entry[0], 'definition'): # aliased command + if entry[0].definition.startswith('!'): # shell alias + doc = _('shell alias for::\n\n %s') % entry[0].definition[1:] + else: + doc = _('alias for: hg %s\n\n%s') % (entry[0].definition, doc) + if ui.quiet: + doc = doc.splitlines()[0] + keep = ui.verbose and ['verbose'] or [] + formatted, pruned = minirst.format(doc, textwidth, keep=keep) + ui.write("\n%s\n" % formatted) + if pruned: + ui.write(_('\nuse "hg -v help %s" to show verbose help\n') % name) + + if not ui.quiet: + # options + if entry[1]: + option_lists.append((_("options:\n"), entry[1])) + + addglobalopts(False) + + def helplist(header, select=None): + h = {} + cmds = {} + for c, e in table.iteritems(): + f = c.split("|", 1)[0] + if select and not select(f): + continue + if (not select and name != 'shortlist' and + e[0].__module__ != __name__): + continue + if name == "shortlist" and not f.startswith("^"): + continue + f = f.lstrip("^") + if not ui.debugflag and f.startswith("debug"): + continue + doc = e[0].__doc__ + if doc and 'DEPRECATED' in doc and not ui.verbose: + continue + doc = gettext(doc) + if not doc: + doc = _("(no help text available)") + h[f] = doc.splitlines()[0].rstrip() + cmds[f] = c.lstrip("^") + + if not h: + ui.status(_('no commands defined\n')) + return + + ui.status(header) + fns = sorted(h) + m = max(map(len, fns)) + for f in fns: + if ui.verbose: + commands = cmds[f].replace("|",", ") + ui.write(" %s:\n %s\n"%(commands, h[f])) + else: + ui.write('%s\n' % (util.wrap(h[f], textwidth, + initindent=' %-*s ' % (m, f), + hangindent=' ' * (m + 4)))) + + if not ui.quiet: + addglobalopts(True) + + def helptopic(name): + for names, header, doc in help.helptable: + if name in names: + break + else: + raise error.UnknownCommand(name) + + # description + if not doc: + doc = _("(no help text available)") + if hasattr(doc, '__call__'): + doc = doc() + + ui.write("%s\n\n" % header) + ui.write("%s\n" % minirst.format(doc, textwidth, indent=4)) + + def helpext(name): + try: + mod = extensions.find(name) + doc = gettext(mod.__doc__) or _('no help text available') + except KeyError: + mod = None + doc = extensions.disabledext(name) + if not doc: + raise error.UnknownCommand(name) + + if '\n' not in doc: + head, tail = doc, "" + else: + head, tail = doc.split('\n', 1) + ui.write(_('%s extension - %s\n\n') % (name.split('.')[-1], head)) + if tail: + ui.write(minirst.format(tail, textwidth)) + ui.status('\n\n') + + if mod: + try: + ct = mod.cmdtable + except AttributeError: + ct = {} + modcmds = set([c.split('|', 1)[0] for c in ct]) + helplist(_('list of commands:\n\n'), modcmds.__contains__) + else: + ui.write(_('use "hg help extensions" for information on enabling ' + 'extensions\n')) + + def helpextcmd(name): + cmd, ext, mod = extensions.disabledcmd(name, ui.config('ui', 'strict')) + doc = gettext(mod.__doc__).splitlines()[0] + + msg = help.listexts(_("'%s' is provided by the following " + "extension:") % cmd, {ext: doc}, len(ext), + indent=4) + ui.write(minirst.format(msg, textwidth)) + ui.write('\n\n') + ui.write(_('use "hg help extensions" for information on enabling ' + 'extensions\n')) + + help.addtopichook('revsets', revset.makedoc) + + if name and name != 'shortlist': + i = None + if unknowncmd: + queries = (helpextcmd,) + else: + queries = (helptopic, helpcmd, helpext, helpextcmd) + for f in queries: + try: + f(name) + i = None + break + except error.UnknownCommand, inst: + i = inst + if i: + raise i + + else: + # program name + if ui.verbose or with_version: + version_(ui) + else: + ui.status(_("Mercurial Distributed SCM\n")) + ui.status('\n') + + # list of commands + if name == "shortlist": + header = _('basic commands:\n\n') + else: + header = _('list of commands:\n\n') + + helplist(header) + if name != 'shortlist': + exts, maxlength = extensions.enabled() + text = help.listexts(_('enabled extensions:'), exts, maxlength) + if text: + ui.write("\n%s\n" % minirst.format(text, textwidth)) + + # list all option lists + opt_output = [] + multioccur = False + for title, options in option_lists: + opt_output.append(("\n%s" % title, None)) + for option in options: + if len(option) == 5: + shortopt, longopt, default, desc, optlabel = option + else: + shortopt, longopt, default, desc = option + optlabel = _("VALUE") # default label + + if _("DEPRECATED") in desc and not ui.verbose: + continue + if isinstance(default, list): + numqualifier = " %s [+]" % optlabel + multioccur = True + elif (default is not None) and not isinstance(default, bool): + numqualifier = " %s" % optlabel + else: + numqualifier = "" + opt_output.append(("%2s%s" % + (shortopt and "-%s" % shortopt, + longopt and " --%s%s" % + (longopt, numqualifier)), + "%s%s" % (desc, + default + and _(" (default: %s)") % default + or ""))) + if multioccur: + msg = _("\n[+] marked option can be specified multiple times") + if ui.verbose and name != 'shortlist': + opt_output.append((msg, None)) + else: + opt_output.insert(-1, (msg, None)) + + if not name: + ui.write(_("\nadditional help topics:\n\n")) + topics = [] + for names, header, doc in help.helptable: + topics.append((sorted(names, key=len, reverse=True)[0], header)) + topics_len = max([len(s[0]) for s in topics]) + for t, desc in topics: + ui.write(" %-*s %s\n" % (topics_len, t, desc)) + + if opt_output: + colwidth = encoding.colwidth + # normalize: (opt or message, desc or None, width of opt) + entries = [desc and (opt, desc, colwidth(opt)) or (opt, None, 0) + for opt, desc in opt_output] + hanging = max([e[2] for e in entries]) + for opt, desc, width in entries: + if desc: + initindent = ' %s%s ' % (opt, ' ' * (hanging - width)) + hangindent = ' ' * (hanging + 3) + ui.write('%s\n' % (util.wrap(desc, textwidth, + initindent=initindent, + hangindent=hangindent))) + else: + ui.write("%s\n" % opt) + +def identify(ui, repo, source=None, + rev=None, num=None, id=None, branch=None, tags=None): + """identify the working copy or specified revision + + With no revision, print a summary of the current state of the + repository. + + Specifying a path to a repository root or Mercurial bundle will + cause lookup to operate on that repository/bundle. + + This summary identifies the repository state using one or two + parent hash identifiers, followed by a "+" if there are + uncommitted changes in the working directory, a list of tags for + this revision and a branch name for non-default branches. + + Returns 0 if successful. + """ + + if not repo and not source: + raise util.Abort(_("there is no Mercurial repository here " + "(.hg not found)")) + + hexfunc = ui.debugflag and hex or short + default = not (num or id or branch or tags) + output = [] + + revs = [] + if source: + source, branches = hg.parseurl(ui.expandpath(source)) + repo = hg.repository(ui, source) + revs, checkout = hg.addbranchrevs(repo, repo, branches, None) + + if not repo.local(): + if not rev and revs: + rev = revs[0] + if not rev: + rev = "tip" + if num or branch or tags: + raise util.Abort( + "can't query remote revision number, branch, or tags") + output = [hexfunc(repo.lookup(rev))] + elif not rev: + ctx = repo[None] + parents = ctx.parents() + changed = False + if default or id or num: + changed = util.any(repo.status()) + if default or id: + output = ["%s%s" % ('+'.join([hexfunc(p.node()) for p in parents]), + (changed) and "+" or "")] + if num: + output.append("%s%s" % ('+'.join([str(p.rev()) for p in parents]), + (changed) and "+" or "")) + else: + ctx = repo[rev] + if default or id: + output = [hexfunc(ctx.node())] + if num: + output.append(str(ctx.rev())) + + if repo.local() and default and not ui.quiet: + b = encoding.tolocal(ctx.branch()) + if b != 'default': + output.append("(%s)" % b) + + # multiple tags for a single parent separated by '/' + t = "/".join(ctx.tags()) + if t: + output.append(t) + + if branch: + output.append(encoding.tolocal(ctx.branch())) + + if tags: + output.extend(ctx.tags()) + + ui.write("%s\n" % ' '.join(output)) + +def import_(ui, repo, patch1, *patches, **opts): + """import an ordered set of patches + + Import a list of patches and commit them individually (unless + --no-commit is specified). + + If there are outstanding changes in the working directory, import + will abort unless given the -f/--force flag. + + You can import a patch straight from a mail message. Even patches + as attachments work (to use the body part, it must have type + text/plain or text/x-patch). From and Subject headers of email + message are used as default committer and commit message. All + text/plain body parts before first diff are added to commit + message. + + If the imported patch was generated by :hg:`export`, user and + description from patch override values from message headers and + body. Values given on command line with -m/--message and -u/--user + override these. + + If --exact is specified, import will set the working directory to + the parent of each patch before applying it, and will abort if the + resulting changeset has a different ID than the one recorded in + the patch. This may happen due to character set problems or other + deficiencies in the text patch format. + + With -s/--similarity, hg will attempt to discover renames and + copies in the patch in the same way as 'addremove'. + + To read a patch from standard input, use "-" as the patch name. If + a URL is specified, the patch will be downloaded from it. + See :hg:`help dates` for a list of formats valid for -d/--date. + + Returns 0 on success. + """ + patches = (patch1,) + patches + + date = opts.get('date') + if date: + opts['date'] = util.parsedate(date) + + try: + sim = float(opts.get('similarity') or 0) + except ValueError: + raise util.Abort(_('similarity must be a number')) + if sim < 0 or sim > 100: + raise util.Abort(_('similarity must be between 0 and 100')) + + if opts.get('exact') or not opts.get('force'): + cmdutil.bail_if_changed(repo) + + d = opts["base"] + strip = opts["strip"] + wlock = lock = None + + def tryone(ui, hunk): + tmpname, message, user, date, branch, nodeid, p1, p2 = \ + patch.extract(ui, hunk) + + if not tmpname: + return None + commitid = _('to working directory') + + try: + cmdline_message = cmdutil.logmessage(opts) + if cmdline_message: + # pickup the cmdline msg + message = cmdline_message + elif message: + # pickup the patch msg + message = message.strip() + else: + # launch the editor + message = None + ui.debug('message:\n%s\n' % message) + + wp = repo.parents() + if opts.get('exact'): + if not nodeid or not p1: + raise util.Abort(_('not a Mercurial patch')) + p1 = repo.lookup(p1) + p2 = repo.lookup(p2 or hex(nullid)) + + if p1 != wp[0].node(): + hg.clean(repo, p1) + repo.dirstate.setparents(p1, p2) + elif p2: + try: + p1 = repo.lookup(p1) + p2 = repo.lookup(p2) + if p1 == wp[0].node(): + repo.dirstate.setparents(p1, p2) + except error.RepoError: + pass + if opts.get('exact') or opts.get('import_branch'): + repo.dirstate.setbranch(branch or 'default') + + files = {} + try: + patch.patch(tmpname, ui, strip=strip, cwd=repo.root, + files=files, eolmode=None) + finally: + files = cmdutil.updatedir(ui, repo, files, + similarity=sim / 100.0) + if not opts.get('no_commit'): + if opts.get('exact'): + m = None + else: + m = cmdutil.matchfiles(repo, files or []) + n = repo.commit(message, opts.get('user') or user, + opts.get('date') or date, match=m, + editor=cmdutil.commiteditor) + if opts.get('exact'): + if hex(n) != nodeid: + repo.rollback() + raise util.Abort(_('patch is damaged' + ' or loses information')) + # Force a dirstate write so that the next transaction + # backups an up-do-date file. + repo.dirstate.write() + if n: + commitid = short(n) + + return commitid + finally: + os.unlink(tmpname) + + try: + wlock = repo.wlock() + lock = repo.lock() + lastcommit = None + for p in patches: + pf = os.path.join(d, p) + + if pf == '-': + ui.status(_("applying patch from stdin\n")) + pf = sys.stdin + else: + ui.status(_("applying %s\n") % p) + pf = url.open(ui, pf) + + haspatch = False + for hunk in patch.split(pf): + commitid = tryone(ui, hunk) + if commitid: + haspatch = True + if lastcommit: + ui.status(_('applied %s\n') % lastcommit) + lastcommit = commitid + + if not haspatch: + raise util.Abort(_('no diffs found')) + + finally: + release(lock, wlock) + +def incoming(ui, repo, source="default", **opts): + """show new changesets found in source + + Show new changesets found in the specified path/URL or the default + pull location. These are the changesets that would have been pulled + if a pull at the time you issued this command. + + For remote repository, using --bundle avoids downloading the + changesets twice if the incoming is followed by a pull. + + See pull for valid source format details. + + Returns 0 if there are incoming changes, 1 otherwise. + """ + if opts.get('bundle') and opts.get('subrepos'): + raise util.Abort(_('cannot combine --bundle and --subrepos')) + + ret = hg.incoming(ui, repo, source, opts) + return ret + +def init(ui, dest=".", **opts): + """create a new repository in the given directory + + Initialize a new repository in the given directory. If the given + directory does not exist, it will be created. + + If no directory is given, the current directory is used. + + It is possible to specify an ``ssh://`` URL as the destination. + See :hg:`help urls` for more information. + + Returns 0 on success. + """ + hg.repository(hg.remoteui(ui, opts), ui.expandpath(dest), create=1) + +def locate(ui, repo, *pats, **opts): + """locate files matching specific patterns + + Print files under Mercurial control in the working directory whose + names match the given patterns. + + By default, this command searches all directories in the working + directory. To search just the current directory and its + subdirectories, use "--include .". + + If no patterns are given to match, this command prints the names + of all files under Mercurial control in the working directory. + + If you want to feed the output of this command into the "xargs" + command, use the -0 option to both this command and "xargs". This + will avoid the problem of "xargs" treating single filenames that + contain whitespace as multiple filenames. + + Returns 0 if a match is found, 1 otherwise. + """ + end = opts.get('print0') and '\0' or '\n' + rev = opts.get('rev') or None + + ret = 1 + m = cmdutil.match(repo, pats, opts, default='relglob') + m.bad = lambda x, y: False + for abs in repo[rev].walk(m): + if not rev and abs not in repo.dirstate: + continue + if opts.get('fullpath'): + ui.write(repo.wjoin(abs), end) + else: + ui.write(((pats and m.rel(abs)) or abs), end) + ret = 0 + + return ret + +def log(ui, repo, *pats, **opts): + """show revision history of entire repository or files + + Print the revision history of the specified files or the entire + project. + + File history is shown without following rename or copy history of + files. Use -f/--follow with a filename to follow history across + renames and copies. --follow without a filename will only show + ancestors or descendants of the starting revision. --follow-first + only follows the first parent of merge revisions. + + If no revision range is specified, the default is ``tip:0`` unless + --follow is set, in which case the working directory parent is + used as the starting revision. You can specify a revision set for + log, see :hg:`help revsets` for more information. + + See :hg:`help dates` for a list of formats valid for -d/--date. + + By default this command prints revision number and changeset id, + tags, non-trivial parents, user, date and time, and a summary for + each commit. When the -v/--verbose switch is used, the list of + changed files and full commit message are shown. + + .. note:: + log -p/--patch may generate unexpected diff output for merge + changesets, as it will only compare the merge changeset against + its first parent. Also, only files different from BOTH parents + will appear in files:. + + Returns 0 on success. + """ + + matchfn = cmdutil.match(repo, pats, opts) + limit = cmdutil.loglimit(opts) + count = 0 + + endrev = None + if opts.get('copies') and opts.get('rev'): + endrev = max(cmdutil.revrange(repo, opts.get('rev'))) + 1 + + df = False + if opts["date"]: + df = util.matchdate(opts["date"]) + + branches = opts.get('branch', []) + opts.get('only_branch', []) + opts['branch'] = [repo.lookupbranch(b) for b in branches] + + displayer = cmdutil.show_changeset(ui, repo, opts, True) + def prep(ctx, fns): + rev = ctx.rev() + parents = [p for p in repo.changelog.parentrevs(rev) + if p != nullrev] + if opts.get('no_merges') and len(parents) == 2: + return + if opts.get('only_merges') and len(parents) != 2: + return + if opts.get('branch') and ctx.branch() not in opts['branch']: + return + if df and not df(ctx.date()[0]): + return + if opts['user'] and not [k for k in opts['user'] + if k.lower() in ctx.user().lower()]: + return + if opts.get('keyword'): + for k in [kw.lower() for kw in opts['keyword']]: + if (k in ctx.user().lower() or + k in ctx.description().lower() or + k in " ".join(ctx.files()).lower()): + break + else: + return + + copies = None + if opts.get('copies') and rev: + copies = [] + getrenamed = templatekw.getrenamedfn(repo, endrev=endrev) + for fn in ctx.files(): + rename = getrenamed(fn, rev) + if rename: + copies.append((fn, rename[0])) + + revmatchfn = None + if opts.get('patch') or opts.get('stat'): + if opts.get('follow') or opts.get('follow_first'): + # note: this might be wrong when following through merges + revmatchfn = cmdutil.match(repo, fns, default='path') + else: + revmatchfn = matchfn + + displayer.show(ctx, copies=copies, matchfn=revmatchfn) + + for ctx in cmdutil.walkchangerevs(repo, matchfn, opts, prep): + if count == limit: + break + if displayer.flush(ctx.rev()): + count += 1 + displayer.close() + +def manifest(ui, repo, node=None, rev=None): + """output the current or given revision of the project manifest + + Print a list of version controlled files for the given revision. + If no revision is given, the first parent of the working directory + is used, or the null revision if no revision is checked out. + + With -v, print file permissions, symlink and executable bits. + With --debug, print file revision hashes. + + Returns 0 on success. + """ + + if rev and node: + raise util.Abort(_("please specify just one revision")) + + if not node: + node = rev + + decor = {'l':'644 @ ', 'x':'755 * ', '':'644 '} + ctx = repo[node] + for f in ctx: + if ui.debugflag: + ui.write("%40s " % hex(ctx.manifest()[f])) + if ui.verbose: + ui.write(decor[ctx.flags(f)]) + ui.write("%s\n" % f) + +def merge(ui, repo, node=None, **opts): + """merge working directory with another revision + + The current working directory is updated with all changes made in + the requested revision since the last common predecessor revision. + + Files that changed between either parent are marked as changed for + the next commit and a commit must be performed before any further + updates to the repository are allowed. The next commit will have + two parents. + + ``--tool`` can be used to specify the merge tool used for file + merges. It overrides the HGMERGE environment variable and your + configuration files. + + If no revision is specified, the working directory's parent is a + head revision, and the current branch contains exactly one other + head, the other head is merged with by default. Otherwise, an + explicit revision with which to merge with must be provided. + + :hg:`resolve` must be used to resolve unresolved files. + + To undo an uncommitted merge, use :hg:`update --clean .` which + will check out a clean copy of the original merge parent, losing + all changes. + + Returns 0 on success, 1 if there are unresolved files. + """ + + if opts.get('rev') and node: + raise util.Abort(_("please specify just one revision")) + if not node: + node = opts.get('rev') + + if not node: + branch = repo.changectx(None).branch() + bheads = repo.branchheads(branch) + if len(bheads) > 2: + raise util.Abort(_( + 'branch \'%s\' has %d heads - ' + 'please merge with an explicit rev\n' + '(run \'hg heads .\' to see heads)') + % (branch, len(bheads))) + + parent = repo.dirstate.parents()[0] + if len(bheads) == 1: + if len(repo.heads()) > 1: + raise util.Abort(_( + 'branch \'%s\' has one head - ' + 'please merge with an explicit rev\n' + '(run \'hg heads\' to see all heads)') + % branch) + msg = _('there is nothing to merge') + if parent != repo.lookup(repo[None].branch()): + msg = _('%s - use "hg update" instead') % msg + raise util.Abort(msg) + + if parent not in bheads: + raise util.Abort(_('working dir not at a head rev - ' + 'use "hg update" or merge with an explicit rev')) + node = parent == bheads[0] and bheads[-1] or bheads[0] + + if opts.get('preview'): + # find nodes that are ancestors of p2 but not of p1 + p1 = repo.lookup('.') + p2 = repo.lookup(node) + nodes = repo.changelog.findmissing(common=[p1], heads=[p2]) + + displayer = cmdutil.show_changeset(ui, repo, opts) + for node in nodes: + displayer.show(repo[node]) + displayer.close() + return 0 + + try: + # ui.forcemerge is an internal variable, do not document + ui.setconfig('ui', 'forcemerge', opts.get('tool', '')) + return hg.merge(repo, node, force=opts.get('force')) + finally: + ui.setconfig('ui', 'forcemerge', '') + +def outgoing(ui, repo, dest=None, **opts): + """show changesets not found in the destination + + Show changesets not found in the specified destination repository + or the default push location. These are the changesets that would + be pushed if a push was requested. + + See pull for details of valid destination formats. + + Returns 0 if there are outgoing changes, 1 otherwise. + """ + ret = hg.outgoing(ui, repo, dest, opts) + return ret + +def parents(ui, repo, file_=None, **opts): + """show the parents of the working directory or revision + + Print the working directory's parent revisions. If a revision is + given via -r/--rev, the parent of that revision will be printed. + If a file argument is given, the revision in which the file was + last changed (before the working directory revision or the + argument to --rev if given) is printed. + + Returns 0 on success. + """ + rev = opts.get('rev') + if rev: + ctx = repo[rev] + else: + ctx = repo[None] + + if file_: + m = cmdutil.match(repo, (file_,), opts) + if m.anypats() or len(m.files()) != 1: + raise util.Abort(_('can only specify an explicit filename')) + file_ = m.files()[0] + filenodes = [] + for cp in ctx.parents(): + if not cp: + continue + try: + filenodes.append(cp.filenode(file_)) + except error.LookupError: + pass + if not filenodes: + raise util.Abort(_("'%s' not found in manifest!") % file_) + fl = repo.file(file_) + p = [repo.lookup(fl.linkrev(fl.rev(fn))) for fn in filenodes] + else: + p = [cp.node() for cp in ctx.parents()] + + displayer = cmdutil.show_changeset(ui, repo, opts) + for n in p: + if n != nullid: + displayer.show(repo[n]) + displayer.close() + +def paths(ui, repo, search=None): + """show aliases for remote repositories + + Show definition of symbolic path name NAME. If no name is given, + show definition of all available names. + + Path names are defined in the [paths] section of your + configuration file and in ``/etc/mercurial/hgrc``. If run inside a + repository, ``.hg/hgrc`` is used, too. + + The path names ``default`` and ``default-push`` have a special + meaning. When performing a push or pull operation, they are used + as fallbacks if no location is specified on the command-line. + When ``default-push`` is set, it will be used for push and + ``default`` will be used for pull; otherwise ``default`` is used + as the fallback for both. When cloning a repository, the clone + source is written as ``default`` in ``.hg/hgrc``. Note that + ``default`` and ``default-push`` apply to all inbound (e.g. + :hg:`incoming`) and outbound (e.g. :hg:`outgoing`, :hg:`email` and + :hg:`bundle`) operations. + + See :hg:`help urls` for more information. + + Returns 0 on success. + """ + if search: + for name, path in ui.configitems("paths"): + if name == search: + ui.write("%s\n" % url.hidepassword(path)) + return + ui.warn(_("not found!\n")) + return 1 + else: + for name, path in ui.configitems("paths"): + ui.write("%s = %s\n" % (name, url.hidepassword(path))) + +def postincoming(ui, repo, modheads, optupdate, checkout): + if modheads == 0: + return + if optupdate: + if (modheads <= 1 or len(repo.branchheads()) == 1) or checkout: + return hg.update(repo, checkout) + else: + ui.status(_("not updating, since new heads added\n")) + if modheads > 1: + ui.status(_("(run 'hg heads' to see heads, 'hg merge' to merge)\n")) + else: + ui.status(_("(run 'hg update' to get a working copy)\n")) + +def pull(ui, repo, source="default", **opts): + """pull changes from the specified source + + Pull changes from a remote repository to a local one. + + This finds all changes from the repository at the specified path + or URL and adds them to a local repository (the current one unless + -R is specified). By default, this does not update the copy of the + project in the working directory. + + Use :hg:`incoming` if you want to see what would have been added + by a pull at the time you issued this command. If you then decide + to add those changes to the repository, you should use :hg:`pull + -r X` where ``X`` is the last changeset listed by :hg:`incoming`. + + If SOURCE is omitted, the 'default' path will be used. + See :hg:`help urls` for more information. + + Returns 0 on success, 1 if an update had unresolved files. + """ + source, branches = hg.parseurl(ui.expandpath(source), opts.get('branch')) + other = hg.repository(hg.remoteui(repo, opts), source) + ui.status(_('pulling from %s\n') % url.hidepassword(source)) + revs, checkout = hg.addbranchrevs(repo, other, branches, opts.get('rev')) + if revs: + try: + revs = [other.lookup(rev) for rev in revs] + except error.CapabilityError: + err = _("other repository doesn't support revision lookup, " + "so a rev cannot be specified.") + raise util.Abort(err) + + modheads = repo.pull(other, heads=revs, force=opts.get('force')) + if checkout: + checkout = str(repo.changelog.rev(other.lookup(checkout))) + repo._subtoppath = source + try: + return postincoming(ui, repo, modheads, opts.get('update'), checkout) + finally: + del repo._subtoppath + +def push(ui, repo, dest=None, **opts): + """push changes to the specified destination + + Push changesets from the local repository to the specified + destination. + + This operation is symmetrical to pull: it is identical to a pull + in the destination repository from the current one. + + By default, push will not allow creation of new heads at the + destination, since multiple heads would make it unclear which head + to use. In this situation, it is recommended to pull and merge + before pushing. + + Use --new-branch if you want to allow push to create a new named + branch that is not present at the destination. This allows you to + only create a new branch without forcing other changes. + + Use -f/--force to override the default behavior and push all + changesets on all branches. + + If -r/--rev is used, the specified revision and all its ancestors + will be pushed to the remote repository. + + Please see :hg:`help urls` for important details about ``ssh://`` + URLs. If DESTINATION is omitted, a default path will be used. + + Returns 0 if push was successful, 1 if nothing to push. + """ + dest = ui.expandpath(dest or 'default-push', dest or 'default') + dest, branches = hg.parseurl(dest, opts.get('branch')) + revs, checkout = hg.addbranchrevs(repo, repo, branches, opts.get('rev')) + other = hg.repository(hg.remoteui(repo, opts), dest) + ui.status(_('pushing to %s\n') % url.hidepassword(dest)) + if revs: + revs = [repo.lookup(rev) for rev in revs] + + repo._subtoppath = dest + try: + # push subrepos depth-first for coherent ordering + c = repo[''] + subs = c.substate # only repos that are committed + for s in sorted(subs): + if not c.sub(s).push(opts.get('force')): + return False + finally: + del repo._subtoppath + r = repo.push(other, opts.get('force'), revs=revs, + newbranch=opts.get('new_branch')) + return r == 0 + +def recover(ui, repo): + """roll back an interrupted transaction + + Recover from an interrupted commit or pull. + + This command tries to fix the repository status after an + interrupted operation. It should only be necessary when Mercurial + suggests it. + + Returns 0 if successful, 1 if nothing to recover or verify fails. + """ + if repo.recover(): + return hg.verify(repo) + return 1 + +def remove(ui, repo, *pats, **opts): + """remove the specified files on the next commit + + Schedule the indicated files for removal from the repository. + + This only removes files from the current branch, not from the + entire project history. -A/--after can be used to remove only + files that have already been deleted, -f/--force can be used to + force deletion, and -Af can be used to remove files from the next + revision without deleting them from the working directory. + + The following table details the behavior of remove for different + file states (columns) and option combinations (rows). The file + states are Added [A], Clean [C], Modified [M] and Missing [!] (as + reported by :hg:`status`). The actions are Warn, Remove (from + branch) and Delete (from disk):: + + A C M ! + none W RD W R + -f R RD RD R + -A W W W R + -Af R R R R + + This command schedules the files to be removed at the next commit. + To undo a remove before that, see :hg:`revert`. + + Returns 0 on success, 1 if any warnings encountered. + """ + + ret = 0 + after, force = opts.get('after'), opts.get('force') + if not pats and not after: + raise util.Abort(_('no files specified')) + + m = cmdutil.match(repo, pats, opts) + s = repo.status(match=m, clean=True) + modified, added, deleted, clean = s[0], s[1], s[3], s[6] + + for f in m.files(): + if f not in repo.dirstate and not os.path.isdir(m.rel(f)): + ui.warn(_('not removing %s: file is untracked\n') % m.rel(f)) + ret = 1 + + if force: + remove, forget = modified + deleted + clean, added + elif after: + remove, forget = deleted, [] + for f in modified + added + clean: + ui.warn(_('not removing %s: file still exists (use -f' + ' to force removal)\n') % m.rel(f)) + ret = 1 + else: + remove, forget = deleted + clean, [] + for f in modified: + ui.warn(_('not removing %s: file is modified (use -f' + ' to force removal)\n') % m.rel(f)) + ret = 1 + for f in added: + ui.warn(_('not removing %s: file has been marked for add (use -f' + ' to force removal)\n') % m.rel(f)) + ret = 1 + + for f in sorted(remove + forget): + if ui.verbose or not m.exact(f): + ui.status(_('removing %s\n') % m.rel(f)) + + repo[None].forget(forget) + repo[None].remove(remove, unlink=not after) + return ret + +def rename(ui, repo, *pats, **opts): + """rename files; equivalent of copy + remove + + Mark dest as copies of sources; mark sources for deletion. If dest + is a directory, copies are put in that directory. If dest is a + file, there can only be one source. + + By default, this command copies the contents of files as they + exist in the working directory. If invoked with -A/--after, the + operation is recorded, but no copying is performed. + + This command takes effect at the next commit. To undo a rename + before that, see :hg:`revert`. + + Returns 0 on success, 1 if errors are encountered. + """ + wlock = repo.wlock(False) + try: + return cmdutil.copy(ui, repo, pats, opts, rename=True) + finally: + wlock.release() + +def resolve(ui, repo, *pats, **opts): + """redo merges or set/view the merge status of files + + Merges with unresolved conflicts are often the result of + non-interactive merging using the ``internal:merge`` configuration + setting, or a command-line merge tool like ``diff3``. The resolve + command is used to manage the files involved in a merge, after + :hg:`merge` has been run, and before :hg:`commit` is run (i.e. the + working directory must have two parents). + + The resolve command can be used in the following ways: + + - :hg:`resolve [--tool TOOL] FILE...`: attempt to re-merge the specified + files, discarding any previous merge attempts. Re-merging is not + performed for files already marked as resolved. Use ``--all/-a`` + to selects all unresolved files. ``--tool`` can be used to specify + the merge tool used for the given files. It overrides the HGMERGE + environment variable and your configuration files. + + - :hg:`resolve -m [FILE]`: mark a file as having been resolved + (e.g. after having manually fixed-up the files). The default is + to mark all unresolved files. + + - :hg:`resolve -u [FILE]...`: mark a file as unresolved. The + default is to mark all resolved files. + + - :hg:`resolve -l`: list files which had or still have conflicts. + In the printed list, ``U`` = unresolved and ``R`` = resolved. + + Note that Mercurial will not let you commit files with unresolved + merge conflicts. You must use :hg:`resolve -m ...` before you can + commit after a conflicting merge. + + Returns 0 on success, 1 if any files fail a resolve attempt. + """ + + all, mark, unmark, show, nostatus = \ + [opts.get(o) for o in 'all mark unmark list no_status'.split()] + + if (show and (mark or unmark)) or (mark and unmark): + raise util.Abort(_("too many options specified")) + if pats and all: + raise util.Abort(_("can't specify --all and patterns")) + if not (all or pats or show or mark or unmark): + raise util.Abort(_('no files or directories specified; ' + 'use --all to remerge all files')) + + ms = mergemod.mergestate(repo) + m = cmdutil.match(repo, pats, opts) + ret = 0 + + for f in ms: + if m(f): + if show: + if nostatus: + ui.write("%s\n" % f) + else: + ui.write("%s %s\n" % (ms[f].upper(), f), + label='resolve.' + + {'u': 'unresolved', 'r': 'resolved'}[ms[f]]) + elif mark: + ms.mark(f, "r") + elif unmark: + ms.mark(f, "u") + else: + wctx = repo[None] + mctx = wctx.parents()[-1] + + # backup pre-resolve (merge uses .orig for its own purposes) + a = repo.wjoin(f) + util.copyfile(a, a + ".resolve") + + try: + # resolve file + ui.setconfig('ui', 'forcemerge', opts.get('tool', '')) + if ms.resolve(f, wctx, mctx): + ret = 1 + finally: + ui.setconfig('ui', 'forcemerge', '') + + # replace filemerge's .orig file with our resolve file + util.rename(a + ".resolve", a + ".orig") + + ms.commit() + return ret + +def revert(ui, repo, *pats, **opts): + """restore individual files or directories to an earlier state + + .. note:: + This command is most likely not what you are looking for. + Revert will partially overwrite content in the working + directory without changing the working directory parents. Use + :hg:`update -r rev` to check out earlier revisions, or + :hg:`update --clean .` to undo a merge which has added another + parent. + + With no revision specified, revert the named files or directories + to the contents they had in the parent of the working directory. + This restores the contents of the affected files to an unmodified + state and unschedules adds, removes, copies, and renames. If the + working directory has two parents, you must explicitly specify a + revision. + + Using the -r/--rev option, revert the given files or directories + to their contents as of a specific revision. This can be helpful + to "roll back" some or all of an earlier change. See :hg:`help + dates` for a list of formats valid for -d/--date. + + Revert modifies the working directory. It does not commit any + changes, or change the parent of the working directory. If you + revert to a revision other than the parent of the working + directory, the reverted files will thus appear modified + afterwards. + + If a file has been deleted, it is restored. If the executable mode + of a file was changed, it is reset. + + If names are given, all files matching the names are reverted. + If no arguments are given, no files are reverted. + + Modified files are saved with a .orig suffix before reverting. + To disable these backups, use --no-backup. + + Returns 0 on success. + """ + + if opts.get("date"): + if opts.get("rev"): + raise util.Abort(_("you can't specify a revision and a date")) + opts["rev"] = cmdutil.finddate(ui, repo, opts["date"]) + + if not pats and not opts.get('all'): + raise util.Abort(_('no files or directories specified; ' + 'use --all to revert the whole repo')) + + parent, p2 = repo.dirstate.parents() + if not opts.get('rev') and p2 != nullid: + raise util.Abort(_('uncommitted merge - please provide a ' + 'specific revision')) + ctx = repo[opts.get('rev')] + node = ctx.node() + mf = ctx.manifest() + if node == parent: + pmf = mf + else: + pmf = None + + # need all matching names in dirstate and manifest of target rev, + # so have to walk both. do not print errors if files exist in one + # but not other. + + names = {} + + wlock = repo.wlock() + try: + # walk dirstate. + + m = cmdutil.match(repo, pats, opts) + m.bad = lambda x, y: False + for abs in repo.walk(m): + names[abs] = m.rel(abs), m.exact(abs) + + # walk target manifest. + + def badfn(path, msg): + if path in names: + return + path_ = path + '/' + for f in names: + if f.startswith(path_): + return + ui.warn("%s: %s\n" % (m.rel(path), msg)) + + m = cmdutil.match(repo, pats, opts) + m.bad = badfn + for abs in repo[node].walk(m): + if abs not in names: + names[abs] = m.rel(abs), m.exact(abs) + + m = cmdutil.matchfiles(repo, names) + changes = repo.status(match=m)[:4] + modified, added, removed, deleted = map(set, changes) + + # if f is a rename, also revert the source + cwd = repo.getcwd() + for f in added: + src = repo.dirstate.copied(f) + if src and src not in names and repo.dirstate[src] == 'r': + removed.add(src) + names[src] = (repo.pathto(src, cwd), True) + + def removeforget(abs): + if repo.dirstate[abs] == 'a': + return _('forgetting %s\n') + return _('removing %s\n') + + revert = ([], _('reverting %s\n')) + add = ([], _('adding %s\n')) + remove = ([], removeforget) + undelete = ([], _('undeleting %s\n')) + + disptable = ( + # dispatch table: + # file state + # action if in target manifest + # action if not in target manifest + # make backup if in target manifest + # make backup if not in target manifest + (modified, revert, remove, True, True), + (added, revert, remove, True, False), + (removed, undelete, None, False, False), + (deleted, revert, remove, False, False), + ) + + for abs, (rel, exact) in sorted(names.items()): + mfentry = mf.get(abs) + target = repo.wjoin(abs) + def handle(xlist, dobackup): + xlist[0].append(abs) + if (dobackup and not opts.get('no_backup') and + os.path.lexists(target)): + bakname = "%s.orig" % rel + ui.note(_('saving current version of %s as %s\n') % + (rel, bakname)) + if not opts.get('dry_run'): + util.rename(target, bakname) + if ui.verbose or not exact: + msg = xlist[1] + if not isinstance(msg, basestring): + msg = msg(abs) + ui.status(msg % rel) + for table, hitlist, misslist, backuphit, backupmiss in disptable: + if abs not in table: + continue + # file has changed in dirstate + if mfentry: + handle(hitlist, backuphit) + elif misslist is not None: + handle(misslist, backupmiss) + break + else: + if abs not in repo.dirstate: + if mfentry: + handle(add, True) + elif exact: + ui.warn(_('file not managed: %s\n') % rel) + continue + # file has not changed in dirstate + if node == parent: + if exact: + ui.warn(_('no changes needed to %s\n') % rel) + continue + if pmf is None: + # only need parent manifest in this unlikely case, + # so do not read by default + pmf = repo[parent].manifest() + if abs in pmf: + if mfentry: + # if version of file is same in parent and target + # manifests, do nothing + if (pmf[abs] != mfentry or + pmf.flags(abs) != mf.flags(abs)): + handle(revert, False) + else: + handle(remove, False) + + if not opts.get('dry_run'): + def checkout(f): + fc = ctx[f] + repo.wwrite(f, fc.data(), fc.flags()) + + audit_path = util.path_auditor(repo.root) + for f in remove[0]: + if repo.dirstate[f] == 'a': + repo.dirstate.forget(f) + continue + audit_path(f) + try: + util.unlink(repo.wjoin(f)) + except OSError: + pass + repo.dirstate.remove(f) + + normal = None + if node == parent: + # We're reverting to our parent. If possible, we'd like status + # to report the file as clean. We have to use normallookup for + # merges to avoid losing information about merged/dirty files. + if p2 != nullid: + normal = repo.dirstate.normallookup + else: + normal = repo.dirstate.normal + for f in revert[0]: + checkout(f) + if normal: + normal(f) + + for f in add[0]: + checkout(f) + repo.dirstate.add(f) + + normal = repo.dirstate.normallookup + if node == parent and p2 == nullid: + normal = repo.dirstate.normal + for f in undelete[0]: + checkout(f) + normal(f) + + finally: + wlock.release() + +def rollback(ui, repo, **opts): + """roll back the last transaction (dangerous) + + This command should be used with care. There is only one level of + rollback, and there is no way to undo a rollback. It will also + restore the dirstate at the time of the last transaction, losing + any dirstate changes since that time. This command does not alter + the working directory. + + Transactions are used to encapsulate the effects of all commands + that create new changesets or propagate existing changesets into a + repository. For example, the following commands are transactional, + and their effects can be rolled back: + + - commit + - import + - pull + - push (with this repository as the destination) + - unbundle + + This command is not intended for use on public repositories. Once + changes are visible for pull by other users, rolling a transaction + back locally is ineffective (someone else may already have pulled + the changes). Furthermore, a race is possible with readers of the + repository; for example an in-progress pull from the repository + may fail if a rollback is performed. + + Returns 0 on success, 1 if no rollback data is available. + """ + return repo.rollback(opts.get('dry_run')) + +def root(ui, repo): + """print the root (top) of the current working directory + + Print the root directory of the current repository. + + Returns 0 on success. + """ + ui.write(repo.root + "\n") + +def serve(ui, repo, **opts): + """start stand-alone webserver + + Start a local HTTP repository browser and pull server. You can use + this for ad-hoc sharing and browsing of repositories. It is + recommended to use a real web server to serve a repository for + longer periods of time. + + Please note that the server does not implement access control. + This means that, by default, anybody can read from the server and + nobody can write to it by default. Set the ``web.allow_push`` + option to ``*`` to allow everybody to push to the server. You + should use a real web server if you need to authenticate users. + + By default, the server logs accesses to stdout and errors to + stderr. Use the -A/--accesslog and -E/--errorlog options to log to + files. + + To have the server choose a free port number to listen on, specify + a port number of 0; in this case, the server will print the port + number it uses. + + Returns 0 on success. + """ + + if opts["stdio"]: + if repo is None: + raise error.RepoError(_("There is no Mercurial repository here" + " (.hg not found)")) + s = sshserver.sshserver(ui, repo) + s.serve_forever() + + # this way we can check if something was given in the command-line + if opts.get('port'): + opts['port'] = util.getport(opts.get('port')) + + baseui = repo and repo.baseui or ui + optlist = ("name templates style address port prefix ipv6" + " accesslog errorlog certificate encoding") + for o in optlist.split(): + val = opts.get(o, '') + if val in (None, ''): # should check against default options instead + continue + baseui.setconfig("web", o, val) + if repo and repo.ui != baseui: + repo.ui.setconfig("web", o, val) + + o = opts.get('web_conf') or opts.get('webdir_conf') + if not o: + if not repo: + raise error.RepoError(_("There is no Mercurial repository" + " here (.hg not found)")) + o = repo.root + + app = hgweb.hgweb(o, baseui=ui) + + class service(object): + def init(self): + util.set_signal_handler() + self.httpd = hgweb.server.create_server(ui, app) + + if opts['port'] and not ui.verbose: + return + + if self.httpd.prefix: + prefix = self.httpd.prefix.strip('/') + '/' + else: + prefix = '' + + port = ':%d' % self.httpd.port + if port == ':80': + port = '' + + bindaddr = self.httpd.addr + if bindaddr == '0.0.0.0': + bindaddr = '*' + elif ':' in bindaddr: # IPv6 + bindaddr = '[%s]' % bindaddr + + fqaddr = self.httpd.fqaddr + if ':' in fqaddr: + fqaddr = '[%s]' % fqaddr + if opts['port']: + write = ui.status + else: + write = ui.write + write(_('listening at http://%s%s/%s (bound to %s:%d)\n') % + (fqaddr, port, prefix, bindaddr, self.httpd.port)) + + def run(self): + self.httpd.serve_forever() + + service = service() + + cmdutil.service(opts, initfn=service.init, runfn=service.run) + +def status(ui, repo, *pats, **opts): + """show changed files in the working directory + + Show status of files in the repository. If names are given, only + files that match are shown. Files that are clean or ignored or + the source of a copy/move operation, are not listed unless + -c/--clean, -i/--ignored, -C/--copies or -A/--all are given. + Unless options described with "show only ..." are given, the + options -mardu are used. + + Option -q/--quiet hides untracked (unknown and ignored) files + unless explicitly requested with -u/--unknown or -i/--ignored. + + .. note:: + status may appear to disagree with diff if permissions have + changed or a merge has occurred. The standard diff format does + not report permission changes and diff only reports changes + relative to one merge parent. + + If one revision is given, it is used as the base revision. + If two revisions are given, the differences between them are + shown. The --change option can also be used as a shortcut to list + the changed files of a revision from its first parent. + + The codes used to show the status of files are:: + + M = modified + A = added + R = removed + C = clean + ! = missing (deleted by non-hg command, but still tracked) + ? = not tracked + I = ignored + = origin of the previous file listed as A (added) + + Returns 0 on success. + """ + + revs = opts.get('rev') + change = opts.get('change') + + if revs and change: + msg = _('cannot specify --rev and --change at the same time') + raise util.Abort(msg) + elif change: + node2 = repo.lookup(change) + node1 = repo[node2].parents()[0].node() + else: + node1, node2 = cmdutil.revpair(repo, revs) + + cwd = (pats and repo.getcwd()) or '' + end = opts.get('print0') and '\0' or '\n' + copy = {} + states = 'modified added removed deleted unknown ignored clean'.split() + show = [k for k in states if opts.get(k)] + if opts.get('all'): + show += ui.quiet and (states[:4] + ['clean']) or states + if not show: + show = ui.quiet and states[:4] or states[:5] + + stat = repo.status(node1, node2, cmdutil.match(repo, pats, opts), + 'ignored' in show, 'clean' in show, 'unknown' in show, + opts.get('subrepos')) + changestates = zip(states, 'MAR!?IC', stat) + + if (opts.get('all') or opts.get('copies')) and not opts.get('no_status'): + ctxn = repo[nullid] + ctx1 = repo[node1] + ctx2 = repo[node2] + added = stat[1] + if node2 is None: + added = stat[0] + stat[1] # merged? + + for k, v in copies.copies(repo, ctx1, ctx2, ctxn)[0].iteritems(): + if k in added: + copy[k] = v + elif v in added: + copy[v] = k + + for state, char, files in changestates: + if state in show: + format = "%s %%s%s" % (char, end) + if opts.get('no_status'): + format = "%%s%s" % end + + for f in files: + ui.write(format % repo.pathto(f, cwd), + label='status.' + state) + if f in copy: + ui.write(' %s%s' % (repo.pathto(copy[f], cwd), end), + label='status.copied') + +def summary(ui, repo, **opts): + """summarize working directory state + + This generates a brief summary of the working directory state, + including parents, branch, commit status, and available updates. + + With the --remote option, this will check the default paths for + incoming and outgoing changes. This can be time-consuming. + + Returns 0 on success. + """ + + ctx = repo[None] + parents = ctx.parents() + pnode = parents[0].node() + + for p in parents: + # label with log.changeset (instead of log.parent) since this + # shows a working directory parent *changeset*: + ui.write(_('parent: %d:%s ') % (p.rev(), str(p)), + label='log.changeset') + ui.write(' '.join(p.tags()), label='log.tag') + if p.rev() == -1: + if not len(repo): + ui.write(_(' (empty repository)')) + else: + ui.write(_(' (no revision checked out)')) + ui.write('\n') + if p.description(): + ui.status(' ' + p.description().splitlines()[0].strip() + '\n', + label='log.summary') + + branch = ctx.branch() + bheads = repo.branchheads(branch) + m = _('branch: %s\n') % branch + if branch != 'default': + ui.write(m, label='log.branch') + else: + ui.status(m, label='log.branch') + + st = list(repo.status(unknown=True))[:6] + + c = repo.dirstate.copies() + copied, renamed = [], [] + for d, s in c.iteritems(): + if s in st[2]: + st[2].remove(s) + renamed.append(d) + else: + copied.append(d) + if d in st[1]: + st[1].remove(d) + st.insert(3, renamed) + st.insert(4, copied) + + ms = mergemod.mergestate(repo) + st.append([f for f in ms if ms[f] == 'u']) + + subs = [s for s in ctx.substate if ctx.sub(s).dirty()] + st.append(subs) + + labels = [ui.label(_('%d modified'), 'status.modified'), + ui.label(_('%d added'), 'status.added'), + ui.label(_('%d removed'), 'status.removed'), + ui.label(_('%d renamed'), 'status.copied'), + ui.label(_('%d copied'), 'status.copied'), + ui.label(_('%d deleted'), 'status.deleted'), + ui.label(_('%d unknown'), 'status.unknown'), + ui.label(_('%d ignored'), 'status.ignored'), + ui.label(_('%d unresolved'), 'resolve.unresolved'), + ui.label(_('%d subrepos'), 'status.modified')] + t = [] + for s, l in zip(st, labels): + if s: + t.append(l % len(s)) + + t = ', '.join(t) + cleanworkdir = False + + if len(parents) > 1: + t += _(' (merge)') + elif branch != parents[0].branch(): + t += _(' (new branch)') + elif (parents[0].extra().get('close') and + pnode in repo.branchheads(branch, closed=True)): + t += _(' (head closed)') + elif not (st[0] or st[1] or st[2] or st[3] or st[4] or st[9]): + t += _(' (clean)') + cleanworkdir = True + elif pnode not in bheads: + t += _(' (new branch head)') + + if cleanworkdir: + ui.status(_('commit: %s\n') % t.strip()) + else: + ui.write(_('commit: %s\n') % t.strip()) + + # all ancestors of branch heads - all ancestors of parent = new csets + new = [0] * len(repo) + cl = repo.changelog + for a in [cl.rev(n) for n in bheads]: + new[a] = 1 + for a in cl.ancestors(*[cl.rev(n) for n in bheads]): + new[a] = 1 + for a in [p.rev() for p in parents]: + if a >= 0: + new[a] = 0 + for a in cl.ancestors(*[p.rev() for p in parents]): + new[a] = 0 + new = sum(new) + + if new == 0: + ui.status(_('update: (current)\n')) + elif pnode not in bheads: + ui.write(_('update: %d new changesets (update)\n') % new) + else: + ui.write(_('update: %d new changesets, %d branch heads (merge)\n') % + (new, len(bheads))) + + if opts.get('remote'): + t = [] + source, branches = hg.parseurl(ui.expandpath('default')) + other = hg.repository(hg.remoteui(repo, {}), source) + revs, checkout = hg.addbranchrevs(repo, other, branches, opts.get('rev')) + ui.debug('comparing with %s\n' % url.hidepassword(source)) + repo.ui.pushbuffer() + common, incoming, rheads = discovery.findcommonincoming(repo, other) + repo.ui.popbuffer() + if incoming: + t.append(_('1 or more incoming')) + + dest, branches = hg.parseurl(ui.expandpath('default-push', 'default')) + revs, checkout = hg.addbranchrevs(repo, repo, branches, None) + other = hg.repository(hg.remoteui(repo, {}), dest) + ui.debug('comparing with %s\n' % url.hidepassword(dest)) + repo.ui.pushbuffer() + o = discovery.findoutgoing(repo, other) + repo.ui.popbuffer() + o = repo.changelog.nodesbetween(o, None)[0] + if o: + t.append(_('%d outgoing') % len(o)) + + if t: + ui.write(_('remote: %s\n') % (', '.join(t))) + else: + ui.status(_('remote: (synced)\n')) + +def tag(ui, repo, name1, *names, **opts): + """add one or more tags for the current or given revision + + Name a particular revision using <name>. + + Tags are used to name particular revisions of the repository and are + very useful to compare different revisions, to go back to significant + earlier versions or to mark branch points as releases, etc. Changing + an existing tag is normally disallowed; use -f/--force to override. + + If no revision is given, the parent of the working directory is + used, or tip if no revision is checked out. + + To facilitate version control, distribution, and merging of tags, + they are stored as a file named ".hgtags" which is managed similarly + to other project files and can be hand-edited if necessary. This + also means that tagging creates a new commit. The file + ".hg/localtags" is used for local tags (not shared among + repositories). + + Tag commits are usually made at the head of a branch. If the parent + of the working directory is not a branch head, :hg:`tag` aborts; use + -f/--force to force the tag commit to be based on a non-head + changeset. + + See :hg:`help dates` for a list of formats valid for -d/--date. + + Since tag names have priority over branch names during revision + lookup, using an existing branch name as a tag name is discouraged. + + Returns 0 on success. + """ + + rev_ = "." + names = [t.strip() for t in (name1,) + names] + if len(names) != len(set(names)): + raise util.Abort(_('tag names must be unique')) + for n in names: + if n in ['tip', '.', 'null']: + raise util.Abort(_('the name \'%s\' is reserved') % n) + if not n: + raise util.Abort(_('tag names cannot consist entirely of whitespace')) + if opts.get('rev') and opts.get('remove'): + raise util.Abort(_("--rev and --remove are incompatible")) + if opts.get('rev'): + rev_ = opts['rev'] + message = opts.get('message') + if opts.get('remove'): + expectedtype = opts.get('local') and 'local' or 'global' + for n in names: + if not repo.tagtype(n): + raise util.Abort(_('tag \'%s\' does not exist') % n) + if repo.tagtype(n) != expectedtype: + if expectedtype == 'global': + raise util.Abort(_('tag \'%s\' is not a global tag') % n) + else: + raise util.Abort(_('tag \'%s\' is not a local tag') % n) + rev_ = nullid + if not message: + # we don't translate commit messages + message = 'Removed tag %s' % ', '.join(names) + elif not opts.get('force'): + for n in names: + if n in repo.tags(): + raise util.Abort(_('tag \'%s\' already exists ' + '(use -f to force)') % n) + if not opts.get('local'): + p1, p2 = repo.dirstate.parents() + if p2 != nullid: + raise util.Abort(_('uncommitted merge')) + bheads = repo.branchheads() + if not opts.get('force') and bheads and p1 not in bheads: + raise util.Abort(_('not at a branch head (use -f to force)')) + r = repo[rev_].node() + + if not message: + # we don't translate commit messages + message = ('Added tag %s for changeset %s' % + (', '.join(names), short(r))) + + date = opts.get('date') + if date: + date = util.parsedate(date) + + if opts.get('edit'): + message = ui.edit(message, ui.username()) + + repo.tag(names, r, message, opts.get('local'), opts.get('user'), date) + +def tags(ui, repo): + """list repository tags + + This lists both regular and local tags. When the -v/--verbose + switch is used, a third column "local" is printed for local tags. + + Returns 0 on success. + """ + + hexfunc = ui.debugflag and hex or short + tagtype = "" + + for t, n in reversed(repo.tagslist()): + if ui.quiet: + ui.write("%s\n" % t) + continue + + try: + hn = hexfunc(n) + r = "%5d:%s" % (repo.changelog.rev(n), hn) + except error.LookupError: + r = " ?:%s" % hn + else: + spaces = " " * (30 - encoding.colwidth(t)) + if ui.verbose: + if repo.tagtype(t) == 'local': + tagtype = " local" + else: + tagtype = "" + ui.write("%s%s %s%s\n" % (t, spaces, r, tagtype)) + +def tip(ui, repo, **opts): + """show the tip revision + + The tip revision (usually just called the tip) is the changeset + most recently added to the repository (and therefore the most + recently changed head). + + If you have just made a commit, that commit will be the tip. If + you have just pulled changes from another repository, the tip of + that repository becomes the current tip. The "tip" tag is special + and cannot be renamed or assigned to a different changeset. + + Returns 0 on success. + """ + displayer = cmdutil.show_changeset(ui, repo, opts) + displayer.show(repo[len(repo) - 1]) + displayer.close() + +def unbundle(ui, repo, fname1, *fnames, **opts): + """apply one or more changegroup files + + Apply one or more compressed changegroup files generated by the + bundle command. + + Returns 0 on success, 1 if an update has unresolved files. + """ + fnames = (fname1,) + fnames + + lock = repo.lock() + try: + for fname in fnames: + f = url.open(ui, fname) + gen = changegroup.readbundle(f, fname) + modheads = repo.addchangegroup(gen, 'unbundle', 'bundle:' + fname, + lock=lock) + finally: + lock.release() + + return postincoming(ui, repo, modheads, opts.get('update'), None) + +def update(ui, repo, node=None, rev=None, clean=False, date=None, check=False): + """update working directory (or switch revisions) + + Update the repository's working directory to the specified + changeset. If no changeset is specified, update to the tip of the + current named branch. + + If the changeset is not a descendant of the working directory's + parent, the update is aborted. With the -c/--check option, the + working directory is checked for uncommitted changes; if none are + found, the working directory is updated to the specified + changeset. + + The following rules apply when the working directory contains + uncommitted changes: + + 1. If neither -c/--check nor -C/--clean is specified, and if + the requested changeset is an ancestor or descendant of + the working directory's parent, the uncommitted changes + are merged into the requested changeset and the merged + result is left uncommitted. If the requested changeset is + not an ancestor or descendant (that is, it is on another + branch), the update is aborted and the uncommitted changes + are preserved. + + 2. With the -c/--check option, the update is aborted and the + uncommitted changes are preserved. + + 3. With the -C/--clean option, uncommitted changes are discarded and + the working directory is updated to the requested changeset. + + Use null as the changeset to remove the working directory (like + :hg:`clone -U`). + + If you want to update just one file to an older changeset, use + :hg:`revert`. + + See :hg:`help dates` for a list of formats valid for -d/--date. + + Returns 0 on success, 1 if there are unresolved files. + """ + if rev and node: + raise util.Abort(_("please specify just one revision")) + + if not rev: + rev = node + + rev = cmdutil.revsingle(repo, rev, rev).rev() + + if check and clean: + raise util.Abort(_("cannot specify both -c/--check and -C/--clean")) + + if check: + # we could use dirty() but we can ignore merge and branch trivia + c = repo[None] + if c.modified() or c.added() or c.removed(): + raise util.Abort(_("uncommitted local changes")) + + if date: + if rev: + raise util.Abort(_("you can't specify a revision and a date")) + rev = cmdutil.finddate(ui, repo, date) + + if clean or check: + return hg.clean(repo, rev) + else: + return hg.update(repo, rev) + +def verify(ui, repo): + """verify the integrity of the repository + + Verify the integrity of the current repository. + + This will perform an extensive check of the repository's + integrity, validating the hashes and checksums of each entry in + the changelog, manifest, and tracked files, as well as the + integrity of their crosslinks and indices. + + Returns 0 on success, 1 if errors are encountered. + """ + return hg.verify(repo) + +def version_(ui): + """output version and copyright information""" + ui.write(_("Mercurial Distributed SCM (version %s)\n") + % util.version()) + ui.status(_( + "(see http://mercurial.selenic.com for more information)\n" + "\nCopyright (C) 2005-2010 Matt Mackall and others\n" + "This is free software; see the source for copying conditions. " + "There is NO\nwarranty; " + "not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.\n" + )) + +# Command options and aliases are listed here, alphabetically + +globalopts = [ + ('R', 'repository', '', + _('repository root directory or name of overlay bundle file'), + _('REPO')), + ('', 'cwd', '', + _('change working directory'), _('DIR')), + ('y', 'noninteractive', None, + _('do not prompt, assume \'yes\' for any required answers')), + ('q', 'quiet', None, _('suppress output')), + ('v', 'verbose', None, _('enable additional output')), + ('', 'config', [], + _('set/override config option (use \'section.name=value\')'), + _('CONFIG')), + ('', 'debug', None, _('enable debugging output')), + ('', 'debugger', None, _('start debugger')), + ('', 'encoding', encoding.encoding, _('set the charset encoding'), + _('ENCODE')), + ('', 'encodingmode', encoding.encodingmode, + _('set the charset encoding mode'), _('MODE')), + ('', 'traceback', None, _('always print a traceback on exception')), + ('', 'time', None, _('time how long the command takes')), + ('', 'profile', None, _('print command execution profile')), + ('', 'version', None, _('output version information and exit')), + ('h', 'help', None, _('display help and exit')), +] + +dryrunopts = [('n', 'dry-run', None, + _('do not perform actions, just print output'))] + +remoteopts = [ + ('e', 'ssh', '', + _('specify ssh command to use'), _('CMD')), + ('', 'remotecmd', '', + _('specify hg command to run on the remote side'), _('CMD')), +] + +walkopts = [ + ('I', 'include', [], + _('include names matching the given patterns'), _('PATTERN')), + ('X', 'exclude', [], + _('exclude names matching the given patterns'), _('PATTERN')), +] + +commitopts = [ + ('m', 'message', '', + _('use text as commit message'), _('TEXT')), + ('l', 'logfile', '', + _('read commit message from file'), _('FILE')), +] + +commitopts2 = [ + ('d', 'date', '', + _('record datecode as commit date'), _('DATE')), + ('u', 'user', '', + _('record the specified user as committer'), _('USER')), +] + +templateopts = [ + ('', 'style', '', + _('display using template map file'), _('STYLE')), + ('', 'template', '', + _('display with template'), _('TEMPLATE')), +] + +logopts = [ + ('p', 'patch', None, _('show patch')), + ('g', 'git', None, _('use git extended diff format')), + ('l', 'limit', '', + _('limit number of changes displayed'), _('NUM')), + ('M', 'no-merges', None, _('do not show merges')), + ('', 'stat', None, _('output diffstat-style summary of changes')), +] + templateopts + +diffopts = [ + ('a', 'text', None, _('treat all files as text')), + ('g', 'git', None, _('use git extended diff format')), + ('', 'nodates', None, _('omit dates from diff headers')) +] + +diffopts2 = [ + ('p', 'show-function', None, _('show which function each change is in')), + ('', 'reverse', None, _('produce a diff that undoes the changes')), + ('w', 'ignore-all-space', None, + _('ignore white space when comparing lines')), + ('b', 'ignore-space-change', None, + _('ignore changes in the amount of white space')), + ('B', 'ignore-blank-lines', None, + _('ignore changes whose lines are all blank')), + ('U', 'unified', '', + _('number of lines of context to show'), _('NUM')), + ('', 'stat', None, _('output diffstat-style summary of changes')), +] + +similarityopts = [ + ('s', 'similarity', '', + _('guess renamed files by similarity (0<=s<=100)'), _('SIMILARITY')) +] + +subrepoopts = [ + ('S', 'subrepos', None, + _('recurse into subrepositories')) +] + +table = { + "^add": (add, walkopts + subrepoopts + dryrunopts, + _('[OPTION]... [FILE]...')), + "addremove": + (addremove, similarityopts + walkopts + dryrunopts, + _('[OPTION]... [FILE]...')), + "^annotate|blame": + (annotate, + [('r', 'rev', '', + _('annotate the specified revision'), _('REV')), + ('', 'follow', None, + _('follow copies/renames and list the filename (DEPRECATED)')), + ('', 'no-follow', None, _("don't follow copies and renames")), + ('a', 'text', None, _('treat all files as text')), + ('u', 'user', None, _('list the author (long with -v)')), + ('f', 'file', None, _('list the filename')), + ('d', 'date', None, _('list the date (short with -q)')), + ('n', 'number', None, _('list the revision number (default)')), + ('c', 'changeset', None, _('list the changeset')), + ('l', 'line-number', None, + _('show line number at the first appearance')) + ] + walkopts, + _('[-r REV] [-f] [-a] [-u] [-d] [-n] [-c] [-l] FILE...')), + "archive": + (archive, + [('', 'no-decode', None, _('do not pass files through decoders')), + ('p', 'prefix', '', + _('directory prefix for files in archive'), _('PREFIX')), + ('r', 'rev', '', + _('revision to distribute'), _('REV')), + ('t', 'type', '', + _('type of distribution to create'), _('TYPE')), + ] + subrepoopts + walkopts, + _('[OPTION]... DEST')), + "backout": + (backout, + [('', 'merge', None, + _('merge with old dirstate parent after backout')), + ('', 'parent', '', + _('parent to choose when backing out merge'), _('REV')), + ('t', 'tool', '', + _('specify merge tool')), + ('r', 'rev', '', + _('revision to backout'), _('REV')), + ] + walkopts + commitopts + commitopts2, + _('[OPTION]... [-r] REV')), + "bisect": + (bisect, + [('r', 'reset', False, _('reset bisect state')), + ('g', 'good', False, _('mark changeset good')), + ('b', 'bad', False, _('mark changeset bad')), + ('s', 'skip', False, _('skip testing changeset')), + ('c', 'command', '', + _('use command to check changeset state'), _('CMD')), + ('U', 'noupdate', False, _('do not update to target'))], + _("[-gbsr] [-U] [-c CMD] [REV]")), + "branch": + (branch, + [('f', 'force', None, + _('set branch name even if it shadows an existing branch')), + ('C', 'clean', None, _('reset branch name to parent branch name'))], + _('[-fC] [NAME]')), + "branches": + (branches, + [('a', 'active', False, + _('show only branches that have unmerged heads')), + ('c', 'closed', False, + _('show normal and closed branches'))], + _('[-ac]')), + "bundle": + (bundle, + [('f', 'force', None, + _('run even when the destination is unrelated')), + ('r', 'rev', [], + _('a changeset intended to be added to the destination'), + _('REV')), + ('b', 'branch', [], + _('a specific branch you would like to bundle'), + _('BRANCH')), + ('', 'base', [], + _('a base changeset assumed to be available at the destination'), + _('REV')), + ('a', 'all', None, _('bundle all changesets in the repository')), + ('t', 'type', 'bzip2', + _('bundle compression type to use'), _('TYPE')), + ] + remoteopts, + _('[-f] [-t TYPE] [-a] [-r REV]... [--base REV]... FILE [DEST]')), + "cat": + (cat, + [('o', 'output', '', + _('print output to file with formatted name'), _('FORMAT')), + ('r', 'rev', '', + _('print the given revision'), _('REV')), + ('', 'decode', None, _('apply any matching decode filter')), + ] + walkopts, + _('[OPTION]... FILE...')), + "^clone": + (clone, + [('U', 'noupdate', None, + _('the clone will include an empty working copy (only a repository)')), + ('u', 'updaterev', '', + _('revision, tag or branch to check out'), _('REV')), + ('r', 'rev', [], + _('include the specified changeset'), _('REV')), + ('b', 'branch', [], + _('clone only the specified branch'), _('BRANCH')), + ('', 'pull', None, _('use pull protocol to copy metadata')), + ('', 'uncompressed', None, + _('use uncompressed transfer (fast over LAN)')), + ] + remoteopts, + _('[OPTION]... SOURCE [DEST]')), + "^commit|ci": + (commit, + [('A', 'addremove', None, + _('mark new/missing files as added/removed before committing')), + ('', 'close-branch', None, + _('mark a branch as closed, hiding it from the branch list')), + ] + walkopts + commitopts + commitopts2, + _('[OPTION]... [FILE]...')), + "copy|cp": + (copy, + [('A', 'after', None, _('record a copy that has already occurred')), + ('f', 'force', None, + _('forcibly copy over an existing managed file')), + ] + walkopts + dryrunopts, + _('[OPTION]... [SOURCE]... DEST')), + "debugancestor": (debugancestor, [], _('[INDEX] REV1 REV2')), + "debugbuilddag": + (debugbuilddag, + [('m', 'mergeable-file', None, _('add single file mergeable changes')), + ('a', 'appended-file', None, _('add single file all revs append to')), + ('o', 'overwritten-file', None, _('add single file all revs overwrite')), + ('n', 'new-file', None, _('add new file at each rev')), + ], + _('[OPTION]... TEXT')), + "debugcheckstate": (debugcheckstate, [], ''), + "debugcommands": (debugcommands, [], _('[COMMAND]')), + "debugcomplete": + (debugcomplete, + [('o', 'options', None, _('show the command options'))], + _('[-o] CMD')), + "debugdag": + (debugdag, + [('t', 'tags', None, _('use tags as labels')), + ('b', 'branches', None, _('annotate with branch names')), + ('', 'dots', None, _('use dots for runs')), + ('s', 'spaces', None, _('separate elements by spaces')), + ], + _('[OPTION]... [FILE [REV]...]')), + "debugdate": + (debugdate, + [('e', 'extended', None, _('try extended date formats'))], + _('[-e] DATE [RANGE]')), + "debugdata": (debugdata, [], _('FILE REV')), + "debugfsinfo": (debugfsinfo, [], _('[PATH]')), + "debugindex": (debugindex, + [('f', 'format', 0, _('revlog format'), _('FORMAT'))], + _('FILE')), + "debugindexdot": (debugindexdot, [], _('FILE')), + "debuginstall": (debuginstall, [], ''), + "debugpushkey": (debugpushkey, [], _('REPO NAMESPACE [KEY OLD NEW]')), + "debugrebuildstate": + (debugrebuildstate, + [('r', 'rev', '', + _('revision to rebuild to'), _('REV'))], + _('[-r REV] [REV]')), + "debugrename": + (debugrename, + [('r', 'rev', '', + _('revision to debug'), _('REV'))], + _('[-r REV] FILE')), + "debugrevspec": + (debugrevspec, [], ('REVSPEC')), + "debugsetparents": + (debugsetparents, [], _('REV1 [REV2]')), + "debugstate": + (debugstate, + [('', 'nodates', None, _('do not display the saved mtime'))], + _('[OPTION]...')), + "debugsub": + (debugsub, + [('r', 'rev', '', + _('revision to check'), _('REV'))], + _('[-r REV] [REV]')), + "debugwalk": (debugwalk, walkopts, _('[OPTION]... [FILE]...')), + "^diff": + (diff, + [('r', 'rev', [], + _('revision'), _('REV')), + ('c', 'change', '', + _('change made by revision'), _('REV')) + ] + diffopts + diffopts2 + walkopts + subrepoopts, + _('[OPTION]... ([-c REV] | [-r REV1 [-r REV2]]) [FILE]...')), + "^export": + (export, + [('o', 'output', '', + _('print output to file with formatted name'), _('FORMAT')), + ('', 'switch-parent', None, _('diff against the second parent')), + ('r', 'rev', [], + _('revisions to export'), _('REV')), + ] + diffopts, + _('[OPTION]... [-o OUTFILESPEC] REV...')), + "^forget": + (forget, + [] + walkopts, + _('[OPTION]... FILE...')), + "grep": + (grep, + [('0', 'print0', None, _('end fields with NUL')), + ('', 'all', None, _('print all revisions that match')), + ('f', 'follow', None, + _('follow changeset history,' + ' or file history across copies and renames')), + ('i', 'ignore-case', None, _('ignore case when matching')), + ('l', 'files-with-matches', None, + _('print only filenames and revisions that match')), + ('n', 'line-number', None, _('print matching line numbers')), + ('r', 'rev', [], + _('only search files changed within revision range'), _('REV')), + ('u', 'user', None, _('list the author (long with -v)')), + ('d', 'date', None, _('list the date (short with -q)')), + ] + walkopts, + _('[OPTION]... PATTERN [FILE]...')), + "heads": + (heads, + [('r', 'rev', '', + _('show only heads which are descendants of STARTREV'), + _('STARTREV')), + ('t', 'topo', False, _('show topological heads only')), + ('a', 'active', False, + _('show active branchheads only (DEPRECATED)')), + ('c', 'closed', False, + _('show normal and closed branch heads')), + ] + templateopts, + _('[-ac] [-r STARTREV] [REV]...')), + "help": (help_, [], _('[TOPIC]')), + "identify|id": + (identify, + [('r', 'rev', '', + _('identify the specified revision'), _('REV')), + ('n', 'num', None, _('show local revision number')), + ('i', 'id', None, _('show global revision id')), + ('b', 'branch', None, _('show branch')), + ('t', 'tags', None, _('show tags'))], + _('[-nibt] [-r REV] [SOURCE]')), + "import|patch": + (import_, + [('p', 'strip', 1, + _('directory strip option for patch. This has the same ' + 'meaning as the corresponding patch option'), + _('NUM')), + ('b', 'base', '', + _('base path'), _('PATH')), + ('f', 'force', None, + _('skip check for outstanding uncommitted changes')), + ('', 'no-commit', None, + _("don't commit, just update the working directory")), + ('', 'exact', None, + _('apply patch to the nodes from which it was generated')), + ('', 'import-branch', None, + _('use any branch information in patch (implied by --exact)'))] + + commitopts + commitopts2 + similarityopts, + _('[OPTION]... PATCH...')), + "incoming|in": + (incoming, + [('f', 'force', None, + _('run even if remote repository is unrelated')), + ('n', 'newest-first', None, _('show newest record first')), + ('', 'bundle', '', + _('file to store the bundles into'), _('FILE')), + ('r', 'rev', [], + _('a remote changeset intended to be added'), _('REV')), + ('b', 'branch', [], + _('a specific branch you would like to pull'), _('BRANCH')), + ] + logopts + remoteopts + subrepoopts, + _('[-p] [-n] [-M] [-f] [-r REV]...' + ' [--bundle FILENAME] [SOURCE]')), + "^init": + (init, + remoteopts, + _('[-e CMD] [--remotecmd CMD] [DEST]')), + "locate": + (locate, + [('r', 'rev', '', + _('search the repository as it is in REV'), _('REV')), + ('0', 'print0', None, + _('end filenames with NUL, for use with xargs')), + ('f', 'fullpath', None, + _('print complete paths from the filesystem root')), + ] + walkopts, + _('[OPTION]... [PATTERN]...')), + "^log|history": + (log, + [('f', 'follow', None, + _('follow changeset history,' + ' or file history across copies and renames')), + ('', 'follow-first', None, + _('only follow the first parent of merge changesets')), + ('d', 'date', '', + _('show revisions matching date spec'), _('DATE')), + ('C', 'copies', None, _('show copied files')), + ('k', 'keyword', [], + _('do case-insensitive search for a given text'), _('TEXT')), + ('r', 'rev', [], + _('show the specified revision or range'), _('REV')), + ('', 'removed', None, _('include revisions where files were removed')), + ('m', 'only-merges', None, _('show only merges')), + ('u', 'user', [], + _('revisions committed by user'), _('USER')), + ('', 'only-branch', [], + _('show only changesets within the given named branch (DEPRECATED)'), + _('BRANCH')), + ('b', 'branch', [], + _('show changesets within the given named branch'), _('BRANCH')), + ('P', 'prune', [], + _('do not display revision or any of its ancestors'), _('REV')), + ] + logopts + walkopts, + _('[OPTION]... [FILE]')), + "manifest": + (manifest, + [('r', 'rev', '', + _('revision to display'), _('REV'))], + _('[-r REV]')), + "^merge": + (merge, + [('f', 'force', None, _('force a merge with outstanding changes')), + ('t', 'tool', '', _('specify merge tool')), + ('r', 'rev', '', + _('revision to merge'), _('REV')), + ('P', 'preview', None, + _('review revisions to merge (no merge is performed)'))], + _('[-P] [-f] [[-r] REV]')), + "outgoing|out": + (outgoing, + [('f', 'force', None, + _('run even when the destination is unrelated')), + ('r', 'rev', [], + _('a changeset intended to be included in the destination'), + _('REV')), + ('n', 'newest-first', None, _('show newest record first')), + ('b', 'branch', [], + _('a specific branch you would like to push'), _('BRANCH')), + ] + logopts + remoteopts + subrepoopts, + _('[-M] [-p] [-n] [-f] [-r REV]... [DEST]')), + "parents": + (parents, + [('r', 'rev', '', + _('show parents of the specified revision'), _('REV')), + ] + templateopts, + _('[-r REV] [FILE]')), + "paths": (paths, [], _('[NAME]')), + "^pull": + (pull, + [('u', 'update', None, + _('update to new branch head if changesets were pulled')), + ('f', 'force', None, + _('run even when remote repository is unrelated')), + ('r', 'rev', [], + _('a remote changeset intended to be added'), _('REV')), + ('b', 'branch', [], + _('a specific branch you would like to pull'), _('BRANCH')), + ] + remoteopts, + _('[-u] [-f] [-r REV]... [-e CMD] [--remotecmd CMD] [SOURCE]')), + "^push": + (push, + [('f', 'force', None, _('force push')), + ('r', 'rev', [], + _('a changeset intended to be included in the destination'), + _('REV')), + ('b', 'branch', [], + _('a specific branch you would like to push'), _('BRANCH')), + ('', 'new-branch', False, _('allow pushing a new branch')), + ] + remoteopts, + _('[-f] [-r REV]... [-e CMD] [--remotecmd CMD] [DEST]')), + "recover": (recover, []), + "^remove|rm": + (remove, + [('A', 'after', None, _('record delete for missing files')), + ('f', 'force', None, + _('remove (and delete) file even if added or modified')), + ] + walkopts, + _('[OPTION]... FILE...')), + "rename|move|mv": + (rename, + [('A', 'after', None, _('record a rename that has already occurred')), + ('f', 'force', None, + _('forcibly copy over an existing managed file')), + ] + walkopts + dryrunopts, + _('[OPTION]... SOURCE... DEST')), + "resolve": + (resolve, + [('a', 'all', None, _('select all unresolved files')), + ('l', 'list', None, _('list state of files needing merge')), + ('m', 'mark', None, _('mark files as resolved')), + ('u', 'unmark', None, _('mark files as unresolved')), + ('t', 'tool', '', _('specify merge tool')), + ('n', 'no-status', None, _('hide status prefix'))] + + walkopts, + _('[OPTION]... [FILE]...')), + "revert": + (revert, + [('a', 'all', None, _('revert all changes when no arguments given')), + ('d', 'date', '', + _('tipmost revision matching date'), _('DATE')), + ('r', 'rev', '', + _('revert to the specified revision'), _('REV')), + ('', 'no-backup', None, _('do not save backup copies of files')), + ] + walkopts + dryrunopts, + _('[OPTION]... [-r REV] [NAME]...')), + "rollback": (rollback, dryrunopts), + "root": (root, []), + "^serve": + (serve, + [('A', 'accesslog', '', + _('name of access log file to write to'), _('FILE')), + ('d', 'daemon', None, _('run server in background')), + ('', 'daemon-pipefds', '', + _('used internally by daemon mode'), _('NUM')), + ('E', 'errorlog', '', + _('name of error log file to write to'), _('FILE')), + # use string type, then we can check if something was passed + ('p', 'port', '', + _('port to listen on (default: 8000)'), _('PORT')), + ('a', 'address', '', + _('address to listen on (default: all interfaces)'), _('ADDR')), + ('', 'prefix', '', + _('prefix path to serve from (default: server root)'), _('PREFIX')), + ('n', 'name', '', + _('name to show in web pages (default: working directory)'), + _('NAME')), + ('', 'web-conf', '', + _('name of the hgweb config file (see "hg help hgweb")'), + _('FILE')), + ('', 'webdir-conf', '', + _('name of the hgweb config file (DEPRECATED)'), _('FILE')), + ('', 'pid-file', '', + _('name of file to write process ID to'), _('FILE')), + ('', 'stdio', None, _('for remote clients')), + ('t', 'templates', '', + _('web templates to use'), _('TEMPLATE')), + ('', 'style', '', + _('template style to use'), _('STYLE')), + ('6', 'ipv6', None, _('use IPv6 in addition to IPv4')), + ('', 'certificate', '', + _('SSL certificate file'), _('FILE'))], + _('[OPTION]...')), + "showconfig|debugconfig": + (showconfig, + [('u', 'untrusted', None, _('show untrusted configuration options'))], + _('[-u] [NAME]...')), + "^summary|sum": + (summary, + [('', 'remote', None, _('check for push and pull'))], '[--remote]'), + "^status|st": + (status, + [('A', 'all', None, _('show status of all files')), + ('m', 'modified', None, _('show only modified files')), + ('a', 'added', None, _('show only added files')), + ('r', 'removed', None, _('show only removed files')), + ('d', 'deleted', None, _('show only deleted (but tracked) files')), + ('c', 'clean', None, _('show only files without changes')), + ('u', 'unknown', None, _('show only unknown (not tracked) files')), + ('i', 'ignored', None, _('show only ignored files')), + ('n', 'no-status', None, _('hide status prefix')), + ('C', 'copies', None, _('show source of copied files')), + ('0', 'print0', None, + _('end filenames with NUL, for use with xargs')), + ('', 'rev', [], + _('show difference from revision'), _('REV')), + ('', 'change', '', + _('list the changed files of a revision'), _('REV')), + ] + walkopts + subrepoopts, + _('[OPTION]... [FILE]...')), + "tag": + (tag, + [('f', 'force', None, _('force tag')), + ('l', 'local', None, _('make the tag local')), + ('r', 'rev', '', + _('revision to tag'), _('REV')), + ('', 'remove', None, _('remove a tag')), + # -l/--local is already there, commitopts cannot be used + ('e', 'edit', None, _('edit commit message')), + ('m', 'message', '', + _('use <text> as commit message'), _('TEXT')), + ] + commitopts2, + _('[-f] [-l] [-m TEXT] [-d DATE] [-u USER] [-r REV] NAME...')), + "tags": (tags, [], ''), + "tip": + (tip, + [('p', 'patch', None, _('show patch')), + ('g', 'git', None, _('use git extended diff format')), + ] + templateopts, + _('[-p] [-g]')), + "unbundle": + (unbundle, + [('u', 'update', None, + _('update to new branch head if changesets were unbundled'))], + _('[-u] FILE...')), + "^update|up|checkout|co": + (update, + [('C', 'clean', None, _('discard uncommitted changes (no backup)')), + ('c', 'check', None, + _('update across branches if no uncommitted changes')), + ('d', 'date', '', + _('tipmost revision matching date'), _('DATE')), + ('r', 'rev', '', + _('revision'), _('REV'))], + _('[-c] [-C] [-d DATE] [[-r] REV]')), + "verify": (verify, []), + "version": (version_, []), +} + +norepo = ("clone init version help debugcommands debugcomplete" + " debugdate debuginstall debugfsinfo debugpushkey") +optionalrepo = ("identify paths serve showconfig debugancestor debugdag" + " debugdata debugindex debugindexdot") diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/commands.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/commands.pyo Binary files differnew file mode 100644 index 0000000..eb9254f --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/commands.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/config.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/config.py new file mode 100644 index 0000000..802f444 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/config.py @@ -0,0 +1,142 @@ +# config.py - configuration parsing for Mercurial +# +# Copyright 2009 Matt Mackall <mpm@selenic.com> and others +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +from i18n import _ +import error, util +import re, os + +class sortdict(dict): + 'a simple sorted dictionary' + def __init__(self, data=None): + self._list = [] + if data: + self.update(data) + def copy(self): + return sortdict(self) + def __setitem__(self, key, val): + if key in self: + self._list.remove(key) + self._list.append(key) + dict.__setitem__(self, key, val) + def __iter__(self): + return self._list.__iter__() + def update(self, src): + for k in src: + self[k] = src[k] + def items(self): + return [(k, self[k]) for k in self._list] + def __delitem__(self, key): + dict.__delitem__(self, key) + self._list.remove(key) + +class config(object): + def __init__(self, data=None): + self._data = {} + self._source = {} + if data: + for k in data._data: + self._data[k] = data[k].copy() + self._source = data._source.copy() + def copy(self): + return config(self) + def __contains__(self, section): + return section in self._data + def __getitem__(self, section): + return self._data.get(section, {}) + def __iter__(self): + for d in self.sections(): + yield d + def update(self, src): + for s in src: + if s not in self: + self._data[s] = sortdict() + self._data[s].update(src._data[s]) + self._source.update(src._source) + def get(self, section, item, default=None): + return self._data.get(section, {}).get(item, default) + def source(self, section, item): + return self._source.get((section, item), "") + def sections(self): + return sorted(self._data.keys()) + def items(self, section): + return self._data.get(section, {}).items() + def set(self, section, item, value, source=""): + if section not in self: + self._data[section] = sortdict() + self._data[section][item] = value + self._source[(section, item)] = source + + def parse(self, src, data, sections=None, remap=None, include=None): + sectionre = re.compile(r'\[([^\[]+)\]') + itemre = re.compile(r'([^=\s][^=]*?)\s*=\s*(.*\S|)') + contre = re.compile(r'\s+(\S|\S.*\S)\s*$') + emptyre = re.compile(r'(;|#|\s*$)') + unsetre = re.compile(r'%unset\s+(\S+)') + includere = re.compile(r'%include\s+(\S|\S.*\S)\s*$') + section = "" + item = None + line = 0 + cont = False + + for l in data.splitlines(True): + line += 1 + if cont: + m = contre.match(l) + if m: + if sections and section not in sections: + continue + v = self.get(section, item) + "\n" + m.group(1) + self.set(section, item, v, "%s:%d" % (src, line)) + continue + item = None + cont = False + m = includere.match(l) + if m: + inc = util.expandpath(m.group(1)) + base = os.path.dirname(src) + inc = os.path.normpath(os.path.join(base, inc)) + if include: + try: + include(inc, remap=remap, sections=sections) + except IOError, inst: + raise error.ParseError(_("cannot include %s (%s)") + % (inc, inst.strerror), + "%s:%s" % (src, line)) + continue + if emptyre.match(l): + continue + m = sectionre.match(l) + if m: + section = m.group(1) + if remap: + section = remap.get(section, section) + if section not in self: + self._data[section] = sortdict() + continue + m = itemre.match(l) + if m: + item = m.group(1) + cont = True + if sections and section not in sections: + continue + self.set(section, item, m.group(2), "%s:%d" % (src, line)) + continue + m = unsetre.match(l) + if m: + name = m.group(1) + if sections and section not in sections: + continue + if self.get(section, name) != None: + del self._data[section][name] + continue + + raise error.ParseError(l.rstrip(), ("%s:%s" % (src, line))) + + def read(self, path, fp=None, sections=None, remap=None): + if not fp: + fp = open(path) + self.parse(path, fp.read(), sections, remap, self.read) diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/config.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/config.pyo Binary files differnew file mode 100644 index 0000000..a349147 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/config.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/context.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/context.py new file mode 100644 index 0000000..ff1bfa7 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/context.py @@ -0,0 +1,1098 @@ +# context.py - changeset and file context objects for mercurial +# +# Copyright 2006, 2007 Matt Mackall <mpm@selenic.com> +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +from node import nullid, nullrev, short, hex +from i18n import _ +import ancestor, bdiff, error, util, subrepo, patch +import os, errno, stat + +propertycache = util.propertycache + +class changectx(object): + """A changecontext object makes access to data related to a particular + changeset convenient.""" + def __init__(self, repo, changeid=''): + """changeid is a revision number, node, or tag""" + if changeid == '': + changeid = '.' + self._repo = repo + if isinstance(changeid, (long, int)): + self._rev = changeid + self._node = self._repo.changelog.node(changeid) + else: + self._node = self._repo.lookup(changeid) + self._rev = self._repo.changelog.rev(self._node) + + def __str__(self): + return short(self.node()) + + def __int__(self): + return self.rev() + + def __repr__(self): + return "<changectx %s>" % str(self) + + def __hash__(self): + try: + return hash(self._rev) + except AttributeError: + return id(self) + + def __eq__(self, other): + try: + return self._rev == other._rev + except AttributeError: + return False + + def __ne__(self, other): + return not (self == other) + + def __nonzero__(self): + return self._rev != nullrev + + @propertycache + def _changeset(self): + return self._repo.changelog.read(self.node()) + + @propertycache + def _manifest(self): + return self._repo.manifest.read(self._changeset[0]) + + @propertycache + def _manifestdelta(self): + return self._repo.manifest.readdelta(self._changeset[0]) + + @propertycache + def _parents(self): + p = self._repo.changelog.parentrevs(self._rev) + if p[1] == nullrev: + p = p[:-1] + return [changectx(self._repo, x) for x in p] + + @propertycache + def substate(self): + return subrepo.state(self, self._repo.ui) + + def __contains__(self, key): + return key in self._manifest + + def __getitem__(self, key): + return self.filectx(key) + + def __iter__(self): + for f in sorted(self._manifest): + yield f + + def changeset(self): + return self._changeset + def manifest(self): + return self._manifest + def manifestnode(self): + return self._changeset[0] + + def rev(self): + return self._rev + def node(self): + return self._node + def hex(self): + return hex(self._node) + def user(self): + return self._changeset[1] + def date(self): + return self._changeset[2] + def files(self): + return self._changeset[3] + def description(self): + return self._changeset[4] + def branch(self): + return self._changeset[5].get("branch") + def extra(self): + return self._changeset[5] + def tags(self): + return self._repo.nodetags(self._node) + + def parents(self): + """return contexts for each parent changeset""" + return self._parents + + def p1(self): + return self._parents[0] + + def p2(self): + if len(self._parents) == 2: + return self._parents[1] + return changectx(self._repo, -1) + + def children(self): + """return contexts for each child changeset""" + c = self._repo.changelog.children(self._node) + return [changectx(self._repo, x) for x in c] + + def ancestors(self): + for a in self._repo.changelog.ancestors(self._rev): + yield changectx(self._repo, a) + + def descendants(self): + for d in self._repo.changelog.descendants(self._rev): + yield changectx(self._repo, d) + + def _fileinfo(self, path): + if '_manifest' in self.__dict__: + try: + return self._manifest[path], self._manifest.flags(path) + except KeyError: + raise error.LookupError(self._node, path, + _('not found in manifest')) + if '_manifestdelta' in self.__dict__ or path in self.files(): + if path in self._manifestdelta: + return self._manifestdelta[path], self._manifestdelta.flags(path) + node, flag = self._repo.manifest.find(self._changeset[0], path) + if not node: + raise error.LookupError(self._node, path, + _('not found in manifest')) + + return node, flag + + def filenode(self, path): + return self._fileinfo(path)[0] + + def flags(self, path): + try: + return self._fileinfo(path)[1] + except error.LookupError: + return '' + + def filectx(self, path, fileid=None, filelog=None): + """get a file context from this changeset""" + if fileid is None: + fileid = self.filenode(path) + return filectx(self._repo, path, fileid=fileid, + changectx=self, filelog=filelog) + + def ancestor(self, c2): + """ + return the ancestor context of self and c2 + """ + # deal with workingctxs + n2 = c2._node + if n2 == None: + n2 = c2._parents[0]._node + n = self._repo.changelog.ancestor(self._node, n2) + return changectx(self._repo, n) + + def walk(self, match): + fset = set(match.files()) + # for dirstate.walk, files=['.'] means "walk the whole tree". + # follow that here, too + fset.discard('.') + for fn in self: + for ffn in fset: + # match if the file is the exact name or a directory + if ffn == fn or fn.startswith("%s/" % ffn): + fset.remove(ffn) + break + if match(fn): + yield fn + for fn in sorted(fset): + if match.bad(fn, _('no such file in rev %s') % self) and match(fn): + yield fn + + def sub(self, path): + return subrepo.subrepo(self, path) + + def diff(self, ctx2=None, match=None, **opts): + """Returns a diff generator for the given contexts and matcher""" + if ctx2 is None: + ctx2 = self.p1() + if ctx2 is not None and not isinstance(ctx2, changectx): + ctx2 = self._repo[ctx2] + diffopts = patch.diffopts(self._repo.ui, opts) + return patch.diff(self._repo, ctx2.node(), self.node(), + match=match, opts=diffopts) + +class filectx(object): + """A filecontext object makes access to data related to a particular + filerevision convenient.""" + def __init__(self, repo, path, changeid=None, fileid=None, + filelog=None, changectx=None): + """changeid can be a changeset revision, node, or tag. + fileid can be a file revision or node.""" + self._repo = repo + self._path = path + + assert (changeid is not None + or fileid is not None + or changectx is not None), \ + ("bad args: changeid=%r, fileid=%r, changectx=%r" + % (changeid, fileid, changectx)) + + if filelog: + self._filelog = filelog + + if changeid is not None: + self._changeid = changeid + if changectx is not None: + self._changectx = changectx + if fileid is not None: + self._fileid = fileid + + @propertycache + def _changectx(self): + return changectx(self._repo, self._changeid) + + @propertycache + def _filelog(self): + return self._repo.file(self._path) + + @propertycache + def _changeid(self): + if '_changectx' in self.__dict__: + return self._changectx.rev() + else: + return self._filelog.linkrev(self._filerev) + + @propertycache + def _filenode(self): + if '_fileid' in self.__dict__: + return self._filelog.lookup(self._fileid) + else: + return self._changectx.filenode(self._path) + + @propertycache + def _filerev(self): + return self._filelog.rev(self._filenode) + + @propertycache + def _repopath(self): + return self._path + + def __nonzero__(self): + try: + self._filenode + return True + except error.LookupError: + # file is missing + return False + + def __str__(self): + return "%s@%s" % (self.path(), short(self.node())) + + def __repr__(self): + return "<filectx %s>" % str(self) + + def __hash__(self): + try: + return hash((self._path, self._filenode)) + except AttributeError: + return id(self) + + def __eq__(self, other): + try: + return (self._path == other._path + and self._filenode == other._filenode) + except AttributeError: + return False + + def __ne__(self, other): + return not (self == other) + + def filectx(self, fileid): + '''opens an arbitrary revision of the file without + opening a new filelog''' + return filectx(self._repo, self._path, fileid=fileid, + filelog=self._filelog) + + def filerev(self): + return self._filerev + def filenode(self): + return self._filenode + def flags(self): + return self._changectx.flags(self._path) + def filelog(self): + return self._filelog + + def rev(self): + if '_changectx' in self.__dict__: + return self._changectx.rev() + if '_changeid' in self.__dict__: + return self._changectx.rev() + return self._filelog.linkrev(self._filerev) + + def linkrev(self): + return self._filelog.linkrev(self._filerev) + def node(self): + return self._changectx.node() + def hex(self): + return hex(self.node()) + def user(self): + return self._changectx.user() + def date(self): + return self._changectx.date() + def files(self): + return self._changectx.files() + def description(self): + return self._changectx.description() + def branch(self): + return self._changectx.branch() + def extra(self): + return self._changectx.extra() + def manifest(self): + return self._changectx.manifest() + def changectx(self): + return self._changectx + + def data(self): + return self._filelog.read(self._filenode) + def path(self): + return self._path + def size(self): + return self._filelog.size(self._filerev) + + def cmp(self, fctx): + """compare with other file context + + returns True if different than fctx. + """ + if (fctx._filerev is None and self._repo._encodefilterpats + or self.size() == fctx.size()): + return self._filelog.cmp(self._filenode, fctx.data()) + + return True + + def renamed(self): + """check if file was actually renamed in this changeset revision + + If rename logged in file revision, we report copy for changeset only + if file revisions linkrev points back to the changeset in question + or both changeset parents contain different file revisions. + """ + + renamed = self._filelog.renamed(self._filenode) + if not renamed: + return renamed + + if self.rev() == self.linkrev(): + return renamed + + name = self.path() + fnode = self._filenode + for p in self._changectx.parents(): + try: + if fnode == p.filenode(name): + return None + except error.LookupError: + pass + return renamed + + def parents(self): + p = self._path + fl = self._filelog + pl = [(p, n, fl) for n in self._filelog.parents(self._filenode)] + + r = self._filelog.renamed(self._filenode) + if r: + pl[0] = (r[0], r[1], None) + + return [filectx(self._repo, p, fileid=n, filelog=l) + for p, n, l in pl if n != nullid] + + def children(self): + # hard for renames + c = self._filelog.children(self._filenode) + return [filectx(self._repo, self._path, fileid=x, + filelog=self._filelog) for x in c] + + def annotate(self, follow=False, linenumber=None): + '''returns a list of tuples of (ctx, line) for each line + in the file, where ctx is the filectx of the node where + that line was last changed. + This returns tuples of ((ctx, linenumber), line) for each line, + if "linenumber" parameter is NOT "None". + In such tuples, linenumber means one at the first appearance + in the managed file. + To reduce annotation cost, + this returns fixed value(False is used) as linenumber, + if "linenumber" parameter is "False".''' + + def decorate_compat(text, rev): + return ([rev] * len(text.splitlines()), text) + + def without_linenumber(text, rev): + return ([(rev, False)] * len(text.splitlines()), text) + + def with_linenumber(text, rev): + size = len(text.splitlines()) + return ([(rev, i) for i in xrange(1, size + 1)], text) + + decorate = (((linenumber is None) and decorate_compat) or + (linenumber and with_linenumber) or + without_linenumber) + + def pair(parent, child): + for a1, a2, b1, b2 in bdiff.blocks(parent[1], child[1]): + child[0][b1:b2] = parent[0][a1:a2] + return child + + getlog = util.lrucachefunc(lambda x: self._repo.file(x)) + def getctx(path, fileid): + log = path == self._path and self._filelog or getlog(path) + return filectx(self._repo, path, fileid=fileid, filelog=log) + getctx = util.lrucachefunc(getctx) + + def parents(f): + # we want to reuse filectx objects as much as possible + p = f._path + if f._filerev is None: # working dir + pl = [(n.path(), n.filerev()) for n in f.parents()] + else: + pl = [(p, n) for n in f._filelog.parentrevs(f._filerev)] + + if follow: + r = f.renamed() + if r: + pl[0] = (r[0], getlog(r[0]).rev(r[1])) + + return [getctx(p, n) for p, n in pl if n != nullrev] + + # use linkrev to find the first changeset where self appeared + if self.rev() != self.linkrev(): + base = self.filectx(self.filerev()) + else: + base = self + + # find all ancestors + needed = {base: 1} + visit = [base] + files = [base._path] + while visit: + f = visit.pop(0) + for p in parents(f): + if p not in needed: + needed[p] = 1 + visit.append(p) + if p._path not in files: + files.append(p._path) + else: + # count how many times we'll use this + needed[p] += 1 + + # sort by revision (per file) which is a topological order + visit = [] + for f in files: + visit.extend(n for n in needed if n._path == f) + + hist = {} + for f in sorted(visit, key=lambda x: x.rev()): + curr = decorate(f.data(), f) + for p in parents(f): + curr = pair(hist[p], curr) + # trim the history of unneeded revs + needed[p] -= 1 + if not needed[p]: + del hist[p] + hist[f] = curr + + return zip(hist[f][0], hist[f][1].splitlines(True)) + + def ancestor(self, fc2, actx=None): + """ + find the common ancestor file context, if any, of self, and fc2 + + If actx is given, it must be the changectx of the common ancestor + of self's and fc2's respective changesets. + """ + + if actx is None: + actx = self.changectx().ancestor(fc2.changectx()) + + # the trivial case: changesets are unrelated, files must be too + if not actx: + return None + + # the easy case: no (relevant) renames + if fc2.path() == self.path() and self.path() in actx: + return actx[self.path()] + acache = {} + + # prime the ancestor cache for the working directory + for c in (self, fc2): + if c._filerev is None: + pl = [(n.path(), n.filenode()) for n in c.parents()] + acache[(c._path, None)] = pl + + flcache = {self._repopath:self._filelog, fc2._repopath:fc2._filelog} + def parents(vertex): + if vertex in acache: + return acache[vertex] + f, n = vertex + if f not in flcache: + flcache[f] = self._repo.file(f) + fl = flcache[f] + pl = [(f, p) for p in fl.parents(n) if p != nullid] + re = fl.renamed(n) + if re: + pl.append(re) + acache[vertex] = pl + return pl + + a, b = (self._path, self._filenode), (fc2._path, fc2._filenode) + v = ancestor.ancestor(a, b, parents) + if v: + f, n = v + return filectx(self._repo, f, fileid=n, filelog=flcache[f]) + + return None + + def ancestors(self): + seen = set(str(self)) + visit = [self] + while visit: + for parent in visit.pop(0).parents(): + s = str(parent) + if s not in seen: + visit.append(parent) + seen.add(s) + yield parent + +class workingctx(changectx): + """A workingctx object makes access to data related to + the current working directory convenient. + date - any valid date string or (unixtime, offset), or None. + user - username string, or None. + extra - a dictionary of extra values, or None. + changes - a list of file lists as returned by localrepo.status() + or None to use the repository status. + """ + def __init__(self, repo, text="", user=None, date=None, extra=None, + changes=None): + self._repo = repo + self._rev = None + self._node = None + self._text = text + if date: + self._date = util.parsedate(date) + if user: + self._user = user + if changes: + self._status = list(changes[:4]) + self._unknown = changes[4] + self._ignored = changes[5] + self._clean = changes[6] + else: + self._unknown = None + self._ignored = None + self._clean = None + + self._extra = {} + if extra: + self._extra = extra.copy() + if 'branch' not in self._extra: + branch = self._repo.dirstate.branch() + try: + branch = branch.decode('UTF-8').encode('UTF-8') + except UnicodeDecodeError: + raise util.Abort(_('branch name not in UTF-8!')) + self._extra['branch'] = branch + if self._extra['branch'] == '': + self._extra['branch'] = 'default' + + def __str__(self): + return str(self._parents[0]) + "+" + + def __nonzero__(self): + return True + + def __contains__(self, key): + return self._repo.dirstate[key] not in "?r" + + @propertycache + def _manifest(self): + """generate a manifest corresponding to the working directory""" + + if self._unknown is None: + self.status(unknown=True) + + man = self._parents[0].manifest().copy() + copied = self._repo.dirstate.copies() + if len(self._parents) > 1: + man2 = self.p2().manifest() + def getman(f): + if f in man: + return man + return man2 + else: + getman = lambda f: man + def cf(f): + f = copied.get(f, f) + return getman(f).flags(f) + ff = self._repo.dirstate.flagfunc(cf) + modified, added, removed, deleted = self._status + unknown = self._unknown + for i, l in (("a", added), ("m", modified), ("u", unknown)): + for f in l: + orig = copied.get(f, f) + man[f] = getman(orig).get(orig, nullid) + i + try: + man.set(f, ff(f)) + except OSError: + pass + + for f in deleted + removed: + if f in man: + del man[f] + + return man + + @propertycache + def _status(self): + return self._repo.status()[:4] + + @propertycache + def _user(self): + return self._repo.ui.username() + + @propertycache + def _date(self): + return util.makedate() + + @propertycache + def _parents(self): + p = self._repo.dirstate.parents() + if p[1] == nullid: + p = p[:-1] + self._parents = [changectx(self._repo, x) for x in p] + return self._parents + + def status(self, ignored=False, clean=False, unknown=False): + """Explicit status query + Unless this method is used to query the working copy status, the + _status property will implicitly read the status using its default + arguments.""" + stat = self._repo.status(ignored=ignored, clean=clean, unknown=unknown) + self._unknown = self._ignored = self._clean = None + if unknown: + self._unknown = stat[4] + if ignored: + self._ignored = stat[5] + if clean: + self._clean = stat[6] + self._status = stat[:4] + return stat + + def manifest(self): + return self._manifest + def user(self): + return self._user or self._repo.ui.username() + def date(self): + return self._date + def description(self): + return self._text + def files(self): + return sorted(self._status[0] + self._status[1] + self._status[2]) + + def modified(self): + return self._status[0] + def added(self): + return self._status[1] + def removed(self): + return self._status[2] + def deleted(self): + return self._status[3] + def unknown(self): + assert self._unknown is not None # must call status first + return self._unknown + def ignored(self): + assert self._ignored is not None # must call status first + return self._ignored + def clean(self): + assert self._clean is not None # must call status first + return self._clean + def branch(self): + return self._extra['branch'] + def extra(self): + return self._extra + + def tags(self): + t = [] + [t.extend(p.tags()) for p in self.parents()] + return t + + def children(self): + return [] + + def flags(self, path): + if '_manifest' in self.__dict__: + try: + return self._manifest.flags(path) + except KeyError: + return '' + + orig = self._repo.dirstate.copies().get(path, path) + + def findflag(ctx): + mnode = ctx.changeset()[0] + node, flag = self._repo.manifest.find(mnode, orig) + ff = self._repo.dirstate.flagfunc(lambda x: flag or '') + try: + return ff(path) + except OSError: + pass + + flag = findflag(self._parents[0]) + if flag is None and len(self.parents()) > 1: + flag = findflag(self._parents[1]) + if flag is None or self._repo.dirstate[path] == 'r': + return '' + return flag + + def filectx(self, path, filelog=None): + """get a file context from the working directory""" + return workingfilectx(self._repo, path, workingctx=self, + filelog=filelog) + + def ancestor(self, c2): + """return the ancestor context of self and c2""" + return self._parents[0].ancestor(c2) # punt on two parents for now + + def walk(self, match): + return sorted(self._repo.dirstate.walk(match, self.substate.keys(), + True, False)) + + def dirty(self, missing=False): + "check whether a working directory is modified" + # check subrepos first + for s in self.substate: + if self.sub(s).dirty(): + return True + # check current working dir + return (self.p2() or self.branch() != self.p1().branch() or + self.modified() or self.added() or self.removed() or + (missing and self.deleted())) + + def add(self, list, prefix=""): + join = lambda f: os.path.join(prefix, f) + wlock = self._repo.wlock() + ui, ds = self._repo.ui, self._repo.dirstate + try: + rejected = [] + for f in list: + p = self._repo.wjoin(f) + try: + st = os.lstat(p) + except: + ui.warn(_("%s does not exist!\n") % join(f)) + rejected.append(f) + continue + if st.st_size > 10000000: + ui.warn(_("%s: up to %d MB of RAM may be required " + "to manage this file\n" + "(use 'hg revert %s' to cancel the " + "pending addition)\n") + % (f, 3 * st.st_size // 1000000, join(f))) + if not (stat.S_ISREG(st.st_mode) or stat.S_ISLNK(st.st_mode)): + ui.warn(_("%s not added: only files and symlinks " + "supported currently\n") % join(f)) + rejected.append(p) + elif ds[f] in 'amn': + ui.warn(_("%s already tracked!\n") % join(f)) + elif ds[f] == 'r': + ds.normallookup(f) + else: + ds.add(f) + return rejected + finally: + wlock.release() + + def forget(self, list): + wlock = self._repo.wlock() + try: + for f in list: + if self._repo.dirstate[f] != 'a': + self._repo.ui.warn(_("%s not added!\n") % f) + else: + self._repo.dirstate.forget(f) + finally: + wlock.release() + + def ancestors(self): + for a in self._repo.changelog.ancestors( + *[p.rev() for p in self._parents]): + yield changectx(self._repo, a) + + def remove(self, list, unlink=False): + if unlink: + for f in list: + try: + util.unlink(self._repo.wjoin(f)) + except OSError, inst: + if inst.errno != errno.ENOENT: + raise + wlock = self._repo.wlock() + try: + for f in list: + if unlink and os.path.lexists(self._repo.wjoin(f)): + self._repo.ui.warn(_("%s still exists!\n") % f) + elif self._repo.dirstate[f] == 'a': + self._repo.dirstate.forget(f) + elif f not in self._repo.dirstate: + self._repo.ui.warn(_("%s not tracked!\n") % f) + else: + self._repo.dirstate.remove(f) + finally: + wlock.release() + + def undelete(self, list): + pctxs = self.parents() + wlock = self._repo.wlock() + try: + for f in list: + if self._repo.dirstate[f] != 'r': + self._repo.ui.warn(_("%s not removed!\n") % f) + else: + fctx = f in pctxs[0] and pctxs[0][f] or pctxs[1][f] + t = fctx.data() + self._repo.wwrite(f, t, fctx.flags()) + self._repo.dirstate.normal(f) + finally: + wlock.release() + + def copy(self, source, dest): + p = self._repo.wjoin(dest) + if not os.path.lexists(p): + self._repo.ui.warn(_("%s does not exist!\n") % dest) + elif not (os.path.isfile(p) or os.path.islink(p)): + self._repo.ui.warn(_("copy failed: %s is not a file or a " + "symbolic link\n") % dest) + else: + wlock = self._repo.wlock() + try: + if self._repo.dirstate[dest] in '?r': + self._repo.dirstate.add(dest) + self._repo.dirstate.copy(source, dest) + finally: + wlock.release() + +class workingfilectx(filectx): + """A workingfilectx object makes access to data related to a particular + file in the working directory convenient.""" + def __init__(self, repo, path, filelog=None, workingctx=None): + """changeid can be a changeset revision, node, or tag. + fileid can be a file revision or node.""" + self._repo = repo + self._path = path + self._changeid = None + self._filerev = self._filenode = None + + if filelog: + self._filelog = filelog + if workingctx: + self._changectx = workingctx + + @propertycache + def _changectx(self): + return workingctx(self._repo) + + def __nonzero__(self): + return True + + def __str__(self): + return "%s@%s" % (self.path(), self._changectx) + + def data(self): + return self._repo.wread(self._path) + def renamed(self): + rp = self._repo.dirstate.copied(self._path) + if not rp: + return None + return rp, self._changectx._parents[0]._manifest.get(rp, nullid) + + def parents(self): + '''return parent filectxs, following copies if necessary''' + def filenode(ctx, path): + return ctx._manifest.get(path, nullid) + + path = self._path + fl = self._filelog + pcl = self._changectx._parents + renamed = self.renamed() + + if renamed: + pl = [renamed + (None,)] + else: + pl = [(path, filenode(pcl[0], path), fl)] + + for pc in pcl[1:]: + pl.append((path, filenode(pc, path), fl)) + + return [filectx(self._repo, p, fileid=n, filelog=l) + for p, n, l in pl if n != nullid] + + def children(self): + return [] + + def size(self): + return os.lstat(self._repo.wjoin(self._path)).st_size + def date(self): + t, tz = self._changectx.date() + try: + return (int(os.lstat(self._repo.wjoin(self._path)).st_mtime), tz) + except OSError, err: + if err.errno != errno.ENOENT: + raise + return (t, tz) + + def cmp(self, fctx): + """compare with other file context + + returns True if different than fctx. + """ + # fctx should be a filectx (not a wfctx) + # invert comparison to reuse the same code path + return fctx.cmp(self) + +class memctx(object): + """Use memctx to perform in-memory commits via localrepo.commitctx(). + + Revision information is supplied at initialization time while + related files data and is made available through a callback + mechanism. 'repo' is the current localrepo, 'parents' is a + sequence of two parent revisions identifiers (pass None for every + missing parent), 'text' is the commit message and 'files' lists + names of files touched by the revision (normalized and relative to + repository root). + + filectxfn(repo, memctx, path) is a callable receiving the + repository, the current memctx object and the normalized path of + requested file, relative to repository root. It is fired by the + commit function for every file in 'files', but calls order is + undefined. If the file is available in the revision being + committed (updated or added), filectxfn returns a memfilectx + object. If the file was removed, filectxfn raises an + IOError. Moved files are represented by marking the source file + removed and the new file added with copy information (see + memfilectx). + + user receives the committer name and defaults to current + repository username, date is the commit date in any format + supported by util.parsedate() and defaults to current date, extra + is a dictionary of metadata or is left empty. + """ + def __init__(self, repo, parents, text, files, filectxfn, user=None, + date=None, extra=None): + self._repo = repo + self._rev = None + self._node = None + self._text = text + self._date = date and util.parsedate(date) or util.makedate() + self._user = user + parents = [(p or nullid) for p in parents] + p1, p2 = parents + self._parents = [changectx(self._repo, p) for p in (p1, p2)] + files = sorted(set(files)) + self._status = [files, [], [], [], []] + self._filectxfn = filectxfn + + self._extra = extra and extra.copy() or {} + if 'branch' not in self._extra: + self._extra['branch'] = 'default' + elif self._extra.get('branch') == '': + self._extra['branch'] = 'default' + + def __str__(self): + return str(self._parents[0]) + "+" + + def __int__(self): + return self._rev + + def __nonzero__(self): + return True + + def __getitem__(self, key): + return self.filectx(key) + + def p1(self): + return self._parents[0] + def p2(self): + return self._parents[1] + + def user(self): + return self._user or self._repo.ui.username() + def date(self): + return self._date + def description(self): + return self._text + def files(self): + return self.modified() + def modified(self): + return self._status[0] + def added(self): + return self._status[1] + def removed(self): + return self._status[2] + def deleted(self): + return self._status[3] + def unknown(self): + return self._status[4] + def ignored(self): + return self._status[5] + def clean(self): + return self._status[6] + def branch(self): + return self._extra['branch'] + def extra(self): + return self._extra + def flags(self, f): + return self[f].flags() + + def parents(self): + """return contexts for each parent changeset""" + return self._parents + + def filectx(self, path, filelog=None): + """get a file context from the working directory""" + return self._filectxfn(self._repo, self, path) + + def commit(self): + """commit context to the repo""" + return self._repo.commitctx(self) + +class memfilectx(object): + """memfilectx represents an in-memory file to commit. + + See memctx for more details. + """ + def __init__(self, path, data, islink=False, isexec=False, copied=None): + """ + path is the normalized file path relative to repository root. + data is the file content as a string. + islink is True if the file is a symbolic link. + isexec is True if the file is executable. + copied is the source file path if current file was copied in the + revision being committed, or None.""" + self._path = path + self._data = data + self._flags = (islink and 'l' or '') + (isexec and 'x' or '') + self._copied = None + if copied: + self._copied = (copied, nullid) + + def __nonzero__(self): + return True + def __str__(self): + return "%s@%s" % (self.path(), self._changectx) + def path(self): + return self._path + def data(self): + return self._data + def flags(self): + return self._flags + def isexec(self): + return 'x' in self._flags + def islink(self): + return 'l' in self._flags + def renamed(self): + return self._copied diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/context.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/context.pyo Binary files differnew file mode 100644 index 0000000..f6da42c --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/context.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/copies.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/copies.py new file mode 100644 index 0000000..05342e1 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/copies.py @@ -0,0 +1,267 @@ +# copies.py - copy detection for Mercurial +# +# Copyright 2008 Matt Mackall <mpm@selenic.com> +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +import util +import heapq + +def _nonoverlap(d1, d2, d3): + "Return list of elements in d1 not in d2 or d3" + return sorted([d for d in d1 if d not in d3 and d not in d2]) + +def _dirname(f): + s = f.rfind("/") + if s == -1: + return "" + return f[:s] + +def _dirs(files): + d = set() + for f in files: + f = _dirname(f) + while f not in d: + d.add(f) + f = _dirname(f) + return d + +def _findlimit(repo, a, b): + """Find the earliest revision that's an ancestor of a or b but not both, + None if no such revision exists. + """ + # basic idea: + # - mark a and b with different sides + # - if a parent's children are all on the same side, the parent is + # on that side, otherwise it is on no side + # - walk the graph in topological order with the help of a heap; + # - add unseen parents to side map + # - clear side of any parent that has children on different sides + # - track number of interesting revs that might still be on a side + # - track the lowest interesting rev seen + # - quit when interesting revs is zero + + cl = repo.changelog + working = len(cl) # pseudo rev for the working directory + if a is None: + a = working + if b is None: + b = working + + side = {a: -1, b: 1} + visit = [-a, -b] + heapq.heapify(visit) + interesting = len(visit) + hascommonancestor = False + limit = working + + while interesting: + r = -heapq.heappop(visit) + if r == working: + parents = [cl.rev(p) for p in repo.dirstate.parents()] + else: + parents = cl.parentrevs(r) + for p in parents: + if p < 0: + continue + if p not in side: + # first time we see p; add it to visit + side[p] = side[r] + if side[p]: + interesting += 1 + heapq.heappush(visit, -p) + elif side[p] and side[p] != side[r]: + # p was interesting but now we know better + side[p] = 0 + interesting -= 1 + hascommonancestor = True + if side[r]: + limit = r # lowest rev visited + interesting -= 1 + + if not hascommonancestor: + return None + return limit + +def copies(repo, c1, c2, ca, checkdirs=False): + """ + Find moves and copies between context c1 and c2 + """ + # avoid silly behavior for update from empty dir + if not c1 or not c2 or c1 == c2: + return {}, {} + + # avoid silly behavior for parent -> working dir + if c2.node() is None and c1.node() == repo.dirstate.parents()[0]: + return repo.dirstate.copies(), {} + + limit = _findlimit(repo, c1.rev(), c2.rev()) + if limit is None: + # no common ancestor, no copies + return {}, {} + m1 = c1.manifest() + m2 = c2.manifest() + ma = ca.manifest() + + def makectx(f, n): + if len(n) != 20: # in a working context? + if c1.rev() is None: + return c1.filectx(f) + return c2.filectx(f) + return repo.filectx(f, fileid=n) + + ctx = util.lrucachefunc(makectx) + copy = {} + fullcopy = {} + diverge = {} + + def related(f1, f2, limit): + # Walk back to common ancestor to see if the two files originate + # from the same file. Since workingfilectx's rev() is None it messes + # up the integer comparison logic, hence the pre-step check for + # None (f1 and f2 can only be workingfilectx's initially). + + if f1 == f2: + return f1 # a match + + g1, g2 = f1.ancestors(), f2.ancestors() + try: + f1r, f2r = f1.rev(), f2.rev() + + if f1r is None: + f1 = g1.next() + if f2r is None: + f2 = g2.next() + + while 1: + f1r, f2r = f1.rev(), f2.rev() + if f1r > f2r: + f1 = g1.next() + elif f2r > f1r: + f2 = g2.next() + elif f1 == f2: + return f1 # a match + elif f1r == f2r or f1r < limit or f2r < limit: + return False # copy no longer relevant + except StopIteration: + return False + + def checkcopies(f, m1, m2): + '''check possible copies of f from m1 to m2''' + of = None + seen = set([f]) + for oc in ctx(f, m1[f]).ancestors(): + ocr = oc.rev() + of = oc.path() + if of in seen: + # check limit late - grab last rename before + if ocr < limit: + break + continue + seen.add(of) + + fullcopy[f] = of # remember for dir rename detection + if of not in m2: + continue # no match, keep looking + if m2[of] == ma.get(of): + break # no merge needed, quit early + c2 = ctx(of, m2[of]) + cr = related(oc, c2, ca.rev()) + if cr and (of == f or of == c2.path()): # non-divergent + copy[f] = of + of = None + break + + if of in ma: + diverge.setdefault(of, []).append(f) + + repo.ui.debug(" searching for copies back to rev %d\n" % limit) + + u1 = _nonoverlap(m1, m2, ma) + u2 = _nonoverlap(m2, m1, ma) + + if u1: + repo.ui.debug(" unmatched files in local:\n %s\n" + % "\n ".join(u1)) + if u2: + repo.ui.debug(" unmatched files in other:\n %s\n" + % "\n ".join(u2)) + + for f in u1: + checkcopies(f, m1, m2) + for f in u2: + checkcopies(f, m2, m1) + + diverge2 = set() + for of, fl in diverge.items(): + if len(fl) == 1 or of in c2: + del diverge[of] # not actually divergent, or not a rename + else: + diverge2.update(fl) # reverse map for below + + if fullcopy: + repo.ui.debug(" all copies found (* = to merge, ! = divergent):\n") + for f in fullcopy: + note = "" + if f in copy: + note += "*" + if f in diverge2: + note += "!" + repo.ui.debug(" %s -> %s %s\n" % (f, fullcopy[f], note)) + del diverge2 + + if not fullcopy or not checkdirs: + return copy, diverge + + repo.ui.debug(" checking for directory renames\n") + + # generate a directory move map + d1, d2 = _dirs(m1), _dirs(m2) + invalid = set() + dirmove = {} + + # examine each file copy for a potential directory move, which is + # when all the files in a directory are moved to a new directory + for dst, src in fullcopy.iteritems(): + dsrc, ddst = _dirname(src), _dirname(dst) + if dsrc in invalid: + # already seen to be uninteresting + continue + elif dsrc in d1 and ddst in d1: + # directory wasn't entirely moved locally + invalid.add(dsrc) + elif dsrc in d2 and ddst in d2: + # directory wasn't entirely moved remotely + invalid.add(dsrc) + elif dsrc in dirmove and dirmove[dsrc] != ddst: + # files from the same directory moved to two different places + invalid.add(dsrc) + else: + # looks good so far + dirmove[dsrc + "/"] = ddst + "/" + + for i in invalid: + if i in dirmove: + del dirmove[i] + del d1, d2, invalid + + if not dirmove: + return copy, diverge + + for d in dirmove: + repo.ui.debug(" dir %s -> %s\n" % (d, dirmove[d])) + + # check unaccounted nonoverlapping files against directory moves + for f in u1 + u2: + if f not in fullcopy: + for d in dirmove: + if f.startswith(d): + # new file added in a directory that was moved, move it + df = dirmove[d] + f[len(d):] + if df not in copy: + copy[f] = df + repo.ui.debug(" file %s -> %s\n" % (f, copy[f])) + break + + return copy, diverge diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/copies.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/copies.pyo Binary files differnew file mode 100644 index 0000000..c69481f --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/copies.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/dagparser.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/dagparser.py new file mode 100644 index 0000000..e02faa5 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/dagparser.py @@ -0,0 +1,474 @@ +# dagparser.py - parser and generator for concise description of DAGs +# +# Copyright 2010 Peter Arrenbrecht <peter@arrenbrecht.ch> +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +import re, string +import util +from i18n import _ + +def parsedag(desc): + '''parses a DAG from a concise textual description; generates events + + "+n" is a linear run of n nodes based on the current default parent + "." is a single node based on the current default parent + "$" resets the default parent to -1 (implied at the start); + otherwise the default parent is always the last node created + "<p" sets the default parent to the backref p + "*p" is a fork at parent p, where p is a backref + "*p1/p2/.../pn" is a merge of parents p1..pn, where the pi are backrefs + "/p2/.../pn" is a merge of the preceding node and p2..pn + ":name" defines a label for the preceding node; labels can be redefined + "@text" emits an annotation event for text + "!command" emits an action event for the current node + "!!my command\n" is like "!", but to the end of the line + "#...\n" is a comment up to the end of the line + + Whitespace between the above elements is ignored. + + A backref is either + * a number n, which references the node curr-n, where curr is the current + node, or + * the name of a label you placed earlier using ":name", or + * empty to denote the default parent. + + All string valued-elements are either strictly alphanumeric, or must + be enclosed in double quotes ("..."), with "\" as escape character. + + Generates sequence of + + ('n', (id, [parentids])) for node creation + ('l', (id, labelname)) for labels on nodes + ('a', text) for annotations + ('c', command) for actions (!) + ('C', command) for line actions (!!) + + Examples + -------- + + Example of a complex graph (output not shown for brevity): + + >>> len(list(parsedag(""" + ... + ... +3 # 3 nodes in linear run + ... :forkhere # a label for the last of the 3 nodes from above + ... +5 # 5 more nodes on one branch + ... :mergethis # label again + ... <forkhere # set default parent to labelled fork node + ... +10 # 10 more nodes on a parallel branch + ... @stable # following nodes will be annotated as "stable" + ... +5 # 5 nodes in stable + ... !addfile # custom command; could trigger new file in next node + ... +2 # two more nodes + ... /mergethis # merge last node with labelled node + ... +4 # 4 more nodes descending from merge node + ... + ... """))) + 34 + + Empty list: + + >>> list(parsedag("")) + [] + + A simple linear run: + + >>> list(parsedag("+3")) + [('n', (0, [-1])), ('n', (1, [0])), ('n', (2, [1]))] + + Some non-standard ways to define such runs: + + >>> list(parsedag("+1+2")) + [('n', (0, [-1])), ('n', (1, [0])), ('n', (2, [1]))] + + >>> list(parsedag("+1*1*")) + [('n', (0, [-1])), ('n', (1, [0])), ('n', (2, [1]))] + + >>> list(parsedag("*")) + [('n', (0, [-1]))] + + >>> list(parsedag("...")) + [('n', (0, [-1])), ('n', (1, [0])), ('n', (2, [1]))] + + A fork and a join, using numeric back references: + + >>> list(parsedag("+2*2*/2")) + [('n', (0, [-1])), ('n', (1, [0])), ('n', (2, [0])), ('n', (3, [2, 1]))] + + >>> list(parsedag("+2<2+1/2")) + [('n', (0, [-1])), ('n', (1, [0])), ('n', (2, [0])), ('n', (3, [2, 1]))] + + Placing a label: + + >>> list(parsedag("+1 :mylabel +1")) + [('n', (0, [-1])), ('l', (0, 'mylabel')), ('n', (1, [0]))] + + An empty label (silly, really): + + >>> list(parsedag("+1:+1")) + [('n', (0, [-1])), ('l', (0, '')), ('n', (1, [0]))] + + Fork and join, but with labels instead of numeric back references: + + >>> list(parsedag("+1:f +1:p2 *f */p2")) + [('n', (0, [-1])), ('l', (0, 'f')), ('n', (1, [0])), ('l', (1, 'p2')), + ('n', (2, [0])), ('n', (3, [2, 1]))] + + >>> list(parsedag("+1:f +1:p2 <f +1 /p2")) + [('n', (0, [-1])), ('l', (0, 'f')), ('n', (1, [0])), ('l', (1, 'p2')), + ('n', (2, [0])), ('n', (3, [2, 1]))] + + Restarting from the root: + + >>> list(parsedag("+1 $ +1")) + [('n', (0, [-1])), ('n', (1, [-1]))] + + Annotations, which are meant to introduce sticky state for subsequent nodes: + + >>> list(parsedag("+1 @ann +1")) + [('n', (0, [-1])), ('a', 'ann'), ('n', (1, [0]))] + + >>> list(parsedag('+1 @"my annotation" +1')) + [('n', (0, [-1])), ('a', 'my annotation'), ('n', (1, [0]))] + + Commands, which are meant to operate on the most recently created node: + + >>> list(parsedag("+1 !cmd +1")) + [('n', (0, [-1])), ('c', 'cmd'), ('n', (1, [0]))] + + >>> list(parsedag('+1 !"my command" +1')) + [('n', (0, [-1])), ('c', 'my command'), ('n', (1, [0]))] + + >>> list(parsedag('+1 !!my command line\\n +1')) + [('n', (0, [-1])), ('C', 'my command line'), ('n', (1, [0]))] + + Comments, which extend to the end of the line: + + >>> list(parsedag('+1 # comment\\n+1')) + [('n', (0, [-1])), ('n', (1, [0]))] + + Error: + + >>> try: list(parsedag('+1 bad')) + ... except Exception, e: print e + invalid character in dag description: bad... + + ''' + if not desc: + return + + wordchars = string.ascii_letters + string.digits + + labels = {} + p1 = -1 + r = 0 + + def resolve(ref): + if not ref: + return p1 + elif ref[0] in string.digits: + return r - int(ref) + else: + return labels[ref] + + chiter = (c for c in desc) + + def nextch(): + try: + return chiter.next() + except StopIteration: + return '\0' + + def nextrun(c, allow): + s = '' + while c in allow: + s += c + c = nextch() + return c, s + + def nextdelimited(c, limit, escape): + s = '' + while c != limit: + if c == escape: + c = nextch() + s += c + c = nextch() + return nextch(), s + + def nextstring(c): + if c == '"': + return nextdelimited(nextch(), '"', '\\') + else: + return nextrun(c, wordchars) + + c = nextch() + while c != '\0': + while c in string.whitespace: + c = nextch() + if c == '.': + yield 'n', (r, [p1]) + p1 = r + r += 1 + c = nextch() + elif c == '+': + c, digs = nextrun(nextch(), string.digits) + n = int(digs) + for i in xrange(0, n): + yield 'n', (r, [p1]) + p1 = r + r += 1 + elif c in '*/': + if c == '*': + c = nextch() + c, pref = nextstring(c) + prefs = [pref] + while c == '/': + c, pref = nextstring(nextch()) + prefs.append(pref) + ps = [resolve(ref) for ref in prefs] + yield 'n', (r, ps) + p1 = r + r += 1 + elif c == '<': + c, ref = nextstring(nextch()) + p1 = resolve(ref) + elif c == ':': + c, name = nextstring(nextch()) + labels[name] = p1 + yield 'l', (p1, name) + elif c == '@': + c, text = nextstring(nextch()) + yield 'a', text + elif c == '!': + c = nextch() + if c == '!': + cmd = '' + c = nextch() + while c not in '\n\r\0': + cmd += c + c = nextch() + yield 'C', cmd + else: + c, cmd = nextstring(c) + yield 'c', cmd + elif c == '#': + while c not in '\n\r\0': + c = nextch() + elif c == '$': + p1 = -1 + c = nextch() + elif c == '\0': + return # in case it was preceded by whitespace + else: + s = '' + i = 0 + while c != '\0' and i < 10: + s += c + i += 1 + c = nextch() + raise util.Abort(_("invalid character in dag description: %s...") % s) + +def dagtextlines(events, + addspaces=True, + wraplabels=False, + wrapannotations=False, + wrapcommands=False, + wrapnonlinear=False, + usedots=False, + maxlinewidth=70): + '''generates single lines for dagtext()''' + + def wrapstring(text): + if re.match("^[0-9a-z]*$", text): + return text + return '"' + text.replace('\\', '\\\\').replace('"', '\"') + '"' + + def gen(): + labels = {} + run = 0 + wantr = 0 + needroot = False + for kind, data in events: + if kind == 'n': + r, ps = data + + # sanity check + if r != wantr: + raise util.Abort(_("expected id %i, got %i") % (wantr, r)) + if not ps: + ps = [-1] + else: + for p in ps: + if p >= r: + raise util.Abort(_("parent id %i is larger than " + "current id %i") % (p, r)) + wantr += 1 + + # new root? + p1 = r - 1 + if len(ps) == 1 and ps[0] == -1: + if needroot: + if run: + yield '+' + str(run) + run = 0 + if wrapnonlinear: + yield '\n' + yield '$' + p1 = -1 + else: + needroot = True + if len(ps) == 1 and ps[0] == p1: + if usedots: + yield "." + else: + run += 1 + else: + if run: + yield '+' + str(run) + run = 0 + if wrapnonlinear: + yield '\n' + prefs = [] + for p in ps: + if p == p1: + prefs.append('') + elif p in labels: + prefs.append(labels[p]) + else: + prefs.append(str(r - p)) + yield '*' + '/'.join(prefs) + else: + if run: + yield '+' + str(run) + run = 0 + if kind == 'l': + rid, name = data + labels[rid] = name + yield ':' + name + if wraplabels: + yield '\n' + elif kind == 'c': + yield '!' + wrapstring(data) + if wrapcommands: + yield '\n' + elif kind == 'C': + yield '!!' + data + yield '\n' + elif kind == 'a': + if wrapannotations: + yield '\n' + yield '@' + wrapstring(data) + elif kind == '#': + yield '#' + data + yield '\n' + else: + raise util.Abort(_("invalid event type in dag: %s") + % str((type, data))) + if run: + yield '+' + str(run) + + line = '' + for part in gen(): + if part == '\n': + if line: + yield line + line = '' + else: + if len(line) + len(part) >= maxlinewidth: + yield line + line = '' + elif addspaces and line and part != '.': + line += ' ' + line += part + if line: + yield line + +def dagtext(dag, + addspaces=True, + wraplabels=False, + wrapannotations=False, + wrapcommands=False, + wrapnonlinear=False, + usedots=False, + maxlinewidth=70): + '''generates lines of a textual representation for a dag event stream + + events should generate what parsedag() does, so: + + ('n', (id, [parentids])) for node creation + ('l', (id, labelname)) for labels on nodes + ('a', text) for annotations + ('c', text) for commands + ('C', text) for line commands ('!!') + ('#', text) for comment lines + + Parent nodes must come before child nodes. + + Examples + -------- + + Linear run: + + >>> dagtext([('n', (0, [-1])), ('n', (1, [0]))]) + '+2' + + Two roots: + + >>> dagtext([('n', (0, [-1])), ('n', (1, [-1]))]) + '+1 $ +1' + + Fork and join: + + >>> dagtext([('n', (0, [-1])), ('n', (1, [0])), ('n', (2, [0])), + ... ('n', (3, [2, 1]))]) + '+2 *2 */2' + + Fork and join with labels: + + >>> dagtext([('n', (0, [-1])), ('l', (0, 'f')), ('n', (1, [0])), + ... ('l', (1, 'p2')), ('n', (2, [0])), ('n', (3, [2, 1]))]) + '+1 :f +1 :p2 *f */p2' + + Annotations: + + >>> dagtext([('n', (0, [-1])), ('a', 'ann'), ('n', (1, [0]))]) + '+1 @ann +1' + + >>> dagtext([('n', (0, [-1])), ('a', 'my annotation'), ('n', (1, [0]))]) + '+1 @"my annotation" +1' + + Commands: + + >>> dagtext([('n', (0, [-1])), ('c', 'cmd'), ('n', (1, [0]))]) + '+1 !cmd +1' + + >>> dagtext([('n', (0, [-1])), ('c', 'my command'), ('n', (1, [0]))]) + '+1 !"my command" +1' + + >>> dagtext([('n', (0, [-1])), ('C', 'my command line'), ('n', (1, [0]))]) + '+1 !!my command line\\n+1' + + Comments: + + >>> dagtext([('n', (0, [-1])), ('#', ' comment'), ('n', (1, [0]))]) + '+1 # comment\\n+1' + + >>> dagtext([]) + '' + + Combining parsedag and dagtext: + + >>> dagtext(parsedag('+1 :f +1 :p2 *f */p2')) + '+1 :f +1 :p2 *f */p2' + + ''' + return "\n".join(dagtextlines(dag, + addspaces, + wraplabels, + wrapannotations, + wrapcommands, + wrapnonlinear, + usedots, + maxlinewidth)) diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/dagparser.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/dagparser.pyo Binary files differnew file mode 100644 index 0000000..1beb9cb --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/dagparser.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/demandimport.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/demandimport.py new file mode 100644 index 0000000..cee569b --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/demandimport.py @@ -0,0 +1,146 @@ +# demandimport.py - global demand-loading of modules for Mercurial +# +# Copyright 2006, 2007 Matt Mackall <mpm@selenic.com> +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +''' +demandimport - automatic demandloading of modules + +To enable this module, do: + + import demandimport; demandimport.enable() + +Imports of the following forms will be demand-loaded: + + import a, b.c + import a.b as c + from a import b,c # a will be loaded immediately + +These imports will not be delayed: + + from a import * + b = __import__(a) +''' + +import __builtin__ +_origimport = __import__ + +class _demandmod(object): + """module demand-loader and proxy""" + def __init__(self, name, globals, locals): + if '.' in name: + head, rest = name.split('.', 1) + after = [rest] + else: + head = name + after = [] + object.__setattr__(self, "_data", (head, globals, locals, after)) + object.__setattr__(self, "_module", None) + def _extend(self, name): + """add to the list of submodules to load""" + self._data[3].append(name) + def _load(self): + if not self._module: + head, globals, locals, after = self._data + mod = _origimport(head, globals, locals) + # load submodules + def subload(mod, p): + h, t = p, None + if '.' in p: + h, t = p.split('.', 1) + if not hasattr(mod, h): + setattr(mod, h, _demandmod(p, mod.__dict__, mod.__dict__)) + elif t: + subload(getattr(mod, h), t) + + for x in after: + subload(mod, x) + + # are we in the locals dictionary still? + if locals and locals.get(head) == self: + locals[head] = mod + object.__setattr__(self, "_module", mod) + + def __repr__(self): + if self._module: + return "<proxied module '%s'>" % self._data[0] + return "<unloaded module '%s'>" % self._data[0] + def __call__(self, *args, **kwargs): + raise TypeError("%s object is not callable" % repr(self)) + def __getattribute__(self, attr): + if attr in ('_data', '_extend', '_load', '_module'): + return object.__getattribute__(self, attr) + self._load() + return getattr(self._module, attr) + def __setattr__(self, attr, val): + self._load() + setattr(self._module, attr, val) + +def _demandimport(name, globals=None, locals=None, fromlist=None, level=None): + if not locals or name in ignore or fromlist == ('*',): + # these cases we can't really delay + if level is None: + return _origimport(name, globals, locals, fromlist) + else: + return _origimport(name, globals, locals, fromlist, level) + elif not fromlist: + # import a [as b] + if '.' in name: # a.b + base, rest = name.split('.', 1) + # email.__init__ loading email.mime + if globals and globals.get('__name__', None) == base: + return _origimport(name, globals, locals, fromlist) + # if a is already demand-loaded, add b to its submodule list + if base in locals: + if isinstance(locals[base], _demandmod): + locals[base]._extend(rest) + return locals[base] + return _demandmod(name, globals, locals) + else: + if level is not None: + # from . import b,c,d or from .a import b,c,d + return _origimport(name, globals, locals, fromlist, level) + # from a import b,c,d + mod = _origimport(name, globals, locals) + # recurse down the module chain + for comp in name.split('.')[1:]: + if not hasattr(mod, comp): + setattr(mod, comp, _demandmod(comp, mod.__dict__, mod.__dict__)) + mod = getattr(mod, comp) + for x in fromlist: + # set requested submodules for demand load + if not(hasattr(mod, x)): + setattr(mod, x, _demandmod(x, mod.__dict__, locals)) + return mod + +ignore = [ + '_hashlib', + '_xmlplus', + 'fcntl', + 'win32com.gen_py', + '_winreg', # 2.7 mimetypes needs immediate ImportError + 'pythoncom', + # imported by tarfile, not available under Windows + 'pwd', + 'grp', + # imported by profile, itself imported by hotshot.stats, + # not available under Windows + 'resource', + # this trips up many extension authors + 'gtk', + # setuptools' pkg_resources.py expects "from __main__ import x" to + # raise ImportError if x not defined + '__main__', + '_ssl', # conditional imports in the stdlib, issue1964 + ] + +def enable(): + "enable global demand-loading of modules" + __builtin__.__import__ = _demandimport + +def disable(): + "disable global demand-loading of modules" + __builtin__.__import__ = _origimport + diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/demandimport.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/demandimport.pyo Binary files differnew file mode 100644 index 0000000..eb34c3c --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/demandimport.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/diffhelpers.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/diffhelpers.py new file mode 100644 index 0000000..b92bb45 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/diffhelpers.py @@ -0,0 +1,7 @@ +def __bootstrap__(): + global __bootstrap__, __loader__, __file__ + import sys, pkg_resources, imp + __file__ = pkg_resources.resource_filename(__name__,'diffhelpers.so') + __loader__ = None; del __bootstrap__, __loader__ + imp.load_dynamic(__name__,__file__) +__bootstrap__() diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/diffhelpers.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/diffhelpers.pyo Binary files differnew file mode 100644 index 0000000..4e81b43 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/diffhelpers.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/diffhelpers.so b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/diffhelpers.so Binary files differnew file mode 100755 index 0000000..294c32e --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/diffhelpers.so diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/dirstate.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/dirstate.py new file mode 100644 index 0000000..df27f87 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/dirstate.py @@ -0,0 +1,681 @@ +# dirstate.py - working directory tracking for mercurial +# +# Copyright 2005-2007 Matt Mackall <mpm@selenic.com> +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +from node import nullid +from i18n import _ +import util, ignore, osutil, parsers +import struct, os, stat, errno +import cStringIO + +_format = ">cllll" +propertycache = util.propertycache + +def _finddirs(path): + pos = path.rfind('/') + while pos != -1: + yield path[:pos] + pos = path.rfind('/', 0, pos) + +def _incdirs(dirs, path): + for base in _finddirs(path): + if base in dirs: + dirs[base] += 1 + return + dirs[base] = 1 + +def _decdirs(dirs, path): + for base in _finddirs(path): + if dirs[base] > 1: + dirs[base] -= 1 + return + del dirs[base] + +class dirstate(object): + + def __init__(self, opener, ui, root): + '''Create a new dirstate object. + + opener is an open()-like callable that can be used to open the + dirstate file; root is the root of the directory tracked by + the dirstate. + ''' + self._opener = opener + self._root = root + self._rootdir = os.path.join(root, '') + self._dirty = False + self._dirtypl = False + self._ui = ui + + @propertycache + def _map(self): + '''Return the dirstate contents as a map from filename to + (state, mode, size, time).''' + self._read() + return self._map + + @propertycache + def _copymap(self): + self._read() + return self._copymap + + @propertycache + def _foldmap(self): + f = {} + for name in self._map: + f[os.path.normcase(name)] = name + return f + + @propertycache + def _branch(self): + try: + return self._opener("branch").read().strip() or "default" + except IOError: + return "default" + + @propertycache + def _pl(self): + try: + st = self._opener("dirstate").read(40) + l = len(st) + if l == 40: + return st[:20], st[20:40] + elif l > 0 and l < 40: + raise util.Abort(_('working directory state appears damaged!')) + except IOError, err: + if err.errno != errno.ENOENT: + raise + return [nullid, nullid] + + @propertycache + def _dirs(self): + dirs = {} + for f, s in self._map.iteritems(): + if s[0] != 'r': + _incdirs(dirs, f) + return dirs + + @propertycache + def _ignore(self): + files = [self._join('.hgignore')] + for name, path in self._ui.configitems("ui"): + if name == 'ignore' or name.startswith('ignore.'): + files.append(util.expandpath(path)) + return ignore.ignore(self._root, files, self._ui.warn) + + @propertycache + def _slash(self): + return self._ui.configbool('ui', 'slash') and os.sep != '/' + + @propertycache + def _checklink(self): + return util.checklink(self._root) + + @propertycache + def _checkexec(self): + return util.checkexec(self._root) + + @propertycache + def _checkcase(self): + return not util.checkcase(self._join('.hg')) + + def _join(self, f): + # much faster than os.path.join() + # it's safe because f is always a relative path + return self._rootdir + f + + def flagfunc(self, fallback): + if self._checklink: + if self._checkexec: + def f(x): + p = self._join(x) + if os.path.islink(p): + return 'l' + if util.is_exec(p): + return 'x' + return '' + return f + def f(x): + if os.path.islink(self._join(x)): + return 'l' + if 'x' in fallback(x): + return 'x' + return '' + return f + if self._checkexec: + def f(x): + if 'l' in fallback(x): + return 'l' + if util.is_exec(self._join(x)): + return 'x' + return '' + return f + return fallback + + def getcwd(self): + cwd = os.getcwd() + if cwd == self._root: + return '' + # self._root ends with a path separator if self._root is '/' or 'C:\' + rootsep = self._root + if not util.endswithsep(rootsep): + rootsep += os.sep + if cwd.startswith(rootsep): + return cwd[len(rootsep):] + else: + # we're outside the repo. return an absolute path. + return cwd + + def pathto(self, f, cwd=None): + if cwd is None: + cwd = self.getcwd() + path = util.pathto(self._root, cwd, f) + if self._slash: + return util.normpath(path) + return path + + def __getitem__(self, key): + '''Return the current state of key (a filename) in the dirstate. + + States are: + n normal + m needs merging + r marked for removal + a marked for addition + ? not tracked + ''' + return self._map.get(key, ("?",))[0] + + def __contains__(self, key): + return key in self._map + + def __iter__(self): + for x in sorted(self._map): + yield x + + def parents(self): + return self._pl + + def branch(self): + return self._branch + + def setparents(self, p1, p2=nullid): + self._dirty = self._dirtypl = True + self._pl = p1, p2 + + def setbranch(self, branch): + if branch in ['tip', '.', 'null']: + raise util.Abort(_('the name \'%s\' is reserved') % branch) + self._branch = branch + self._opener("branch", "w").write(branch + '\n') + + def _read(self): + self._map = {} + self._copymap = {} + try: + st = self._opener("dirstate").read() + except IOError, err: + if err.errno != errno.ENOENT: + raise + return + if not st: + return + + p = parsers.parse_dirstate(self._map, self._copymap, st) + if not self._dirtypl: + self._pl = p + + def invalidate(self): + for a in "_map _copymap _foldmap _branch _pl _dirs _ignore".split(): + if a in self.__dict__: + delattr(self, a) + self._dirty = False + + def copy(self, source, dest): + """Mark dest as a copy of source. Unmark dest if source is None.""" + if source == dest: + return + self._dirty = True + if source is not None: + self._copymap[dest] = source + elif dest in self._copymap: + del self._copymap[dest] + + def copied(self, file): + return self._copymap.get(file, None) + + def copies(self): + return self._copymap + + def _droppath(self, f): + if self[f] not in "?r" and "_dirs" in self.__dict__: + _decdirs(self._dirs, f) + + def _addpath(self, f, check=False): + oldstate = self[f] + if check or oldstate == "r": + if '\r' in f or '\n' in f: + raise util.Abort( + _("'\\n' and '\\r' disallowed in filenames: %r") % f) + if f in self._dirs: + raise util.Abort(_('directory %r already in dirstate') % f) + # shadows + for d in _finddirs(f): + if d in self._dirs: + break + if d in self._map and self[d] != 'r': + raise util.Abort( + _('file %r in dirstate clashes with %r') % (d, f)) + if oldstate in "?r" and "_dirs" in self.__dict__: + _incdirs(self._dirs, f) + + def normal(self, f): + '''Mark a file normal and clean.''' + self._dirty = True + self._addpath(f) + s = os.lstat(self._join(f)) + self._map[f] = ('n', s.st_mode, s.st_size, int(s.st_mtime)) + if f in self._copymap: + del self._copymap[f] + + def normallookup(self, f): + '''Mark a file normal, but possibly dirty.''' + if self._pl[1] != nullid and f in self._map: + # if there is a merge going on and the file was either + # in state 'm' (-1) or coming from other parent (-2) before + # being removed, restore that state. + entry = self._map[f] + if entry[0] == 'r' and entry[2] in (-1, -2): + source = self._copymap.get(f) + if entry[2] == -1: + self.merge(f) + elif entry[2] == -2: + self.otherparent(f) + if source: + self.copy(source, f) + return + if entry[0] == 'm' or entry[0] == 'n' and entry[2] == -2: + return + self._dirty = True + self._addpath(f) + self._map[f] = ('n', 0, -1, -1) + if f in self._copymap: + del self._copymap[f] + + def otherparent(self, f): + '''Mark as coming from the other parent, always dirty.''' + if self._pl[1] == nullid: + raise util.Abort(_("setting %r to other parent " + "only allowed in merges") % f) + self._dirty = True + self._addpath(f) + self._map[f] = ('n', 0, -2, -1) + if f in self._copymap: + del self._copymap[f] + + def add(self, f): + '''Mark a file added.''' + self._dirty = True + self._addpath(f, True) + self._map[f] = ('a', 0, -1, -1) + if f in self._copymap: + del self._copymap[f] + + def remove(self, f): + '''Mark a file removed.''' + self._dirty = True + self._droppath(f) + size = 0 + if self._pl[1] != nullid and f in self._map: + # backup the previous state + entry = self._map[f] + if entry[0] == 'm': # merge + size = -1 + elif entry[0] == 'n' and entry[2] == -2: # other parent + size = -2 + self._map[f] = ('r', 0, size, 0) + if size == 0 and f in self._copymap: + del self._copymap[f] + + def merge(self, f): + '''Mark a file merged.''' + self._dirty = True + s = os.lstat(self._join(f)) + self._addpath(f) + self._map[f] = ('m', s.st_mode, s.st_size, int(s.st_mtime)) + if f in self._copymap: + del self._copymap[f] + + def forget(self, f): + '''Forget a file.''' + self._dirty = True + try: + self._droppath(f) + del self._map[f] + except KeyError: + self._ui.warn(_("not in dirstate: %s\n") % f) + + def _normalize(self, path, knownpath): + norm_path = os.path.normcase(path) + fold_path = self._foldmap.get(norm_path, None) + if fold_path is None: + if knownpath or not os.path.lexists(os.path.join(self._root, path)): + fold_path = path + else: + fold_path = self._foldmap.setdefault(norm_path, + util.fspath(path, self._root)) + return fold_path + + def clear(self): + self._map = {} + if "_dirs" in self.__dict__: + delattr(self, "_dirs") + self._copymap = {} + self._pl = [nullid, nullid] + self._dirty = True + + def rebuild(self, parent, files): + self.clear() + for f in files: + if 'x' in files.flags(f): + self._map[f] = ('n', 0777, -1, 0) + else: + self._map[f] = ('n', 0666, -1, 0) + self._pl = (parent, nullid) + self._dirty = True + + def write(self): + if not self._dirty: + return + st = self._opener("dirstate", "w", atomictemp=True) + + # use the modification time of the newly created temporary file as the + # filesystem's notion of 'now' + now = int(util.fstat(st).st_mtime) + + cs = cStringIO.StringIO() + copymap = self._copymap + pack = struct.pack + write = cs.write + write("".join(self._pl)) + for f, e in self._map.iteritems(): + if e[0] == 'n' and e[3] == now: + # The file was last modified "simultaneously" with the current + # write to dirstate (i.e. within the same second for file- + # systems with a granularity of 1 sec). This commonly happens + # for at least a couple of files on 'update'. + # The user could change the file without changing its size + # within the same second. Invalidate the file's stat data in + # dirstate, forcing future 'status' calls to compare the + # contents of the file. This prevents mistakenly treating such + # files as clean. + e = (e[0], 0, -1, -1) # mark entry as 'unset' + self._map[f] = e + + if f in copymap: + f = "%s\0%s" % (f, copymap[f]) + e = pack(_format, e[0], e[1], e[2], e[3], len(f)) + write(e) + write(f) + st.write(cs.getvalue()) + st.rename() + self._dirty = self._dirtypl = False + + def _dirignore(self, f): + if f == '.': + return False + if self._ignore(f): + return True + for p in _finddirs(f): + if self._ignore(p): + return True + return False + + def walk(self, match, subrepos, unknown, ignored): + ''' + Walk recursively through the directory tree, finding all files + matched by match. + + Return a dict mapping filename to stat-like object (either + mercurial.osutil.stat instance or return value of os.stat()). + ''' + + def fwarn(f, msg): + self._ui.warn('%s: %s\n' % (self.pathto(f), msg)) + return False + + def badtype(mode): + kind = _('unknown') + if stat.S_ISCHR(mode): + kind = _('character device') + elif stat.S_ISBLK(mode): + kind = _('block device') + elif stat.S_ISFIFO(mode): + kind = _('fifo') + elif stat.S_ISSOCK(mode): + kind = _('socket') + elif stat.S_ISDIR(mode): + kind = _('directory') + return _('unsupported file type (type is %s)') % kind + + ignore = self._ignore + dirignore = self._dirignore + if ignored: + ignore = util.never + dirignore = util.never + elif not unknown: + # if unknown and ignored are False, skip step 2 + ignore = util.always + dirignore = util.always + + matchfn = match.matchfn + badfn = match.bad + dmap = self._map + normpath = util.normpath + listdir = osutil.listdir + lstat = os.lstat + getkind = stat.S_IFMT + dirkind = stat.S_IFDIR + regkind = stat.S_IFREG + lnkkind = stat.S_IFLNK + join = self._join + work = [] + wadd = work.append + + exact = skipstep3 = False + if matchfn == match.exact: # match.exact + exact = True + dirignore = util.always # skip step 2 + elif match.files() and not match.anypats(): # match.match, no patterns + skipstep3 = True + + if self._checkcase: + normalize = self._normalize + skipstep3 = False + else: + normalize = lambda x, y: x + + files = sorted(match.files()) + subrepos.sort() + i, j = 0, 0 + while i < len(files) and j < len(subrepos): + subpath = subrepos[j] + "/" + if not files[i].startswith(subpath): + i += 1 + continue + while files and files[i].startswith(subpath): + del files[i] + j += 1 + + if not files or '.' in files: + files = [''] + results = dict.fromkeys(subrepos) + results['.hg'] = None + + # step 1: find all explicit files + for ff in files: + nf = normalize(normpath(ff), False) + if nf in results: + continue + + try: + st = lstat(join(nf)) + kind = getkind(st.st_mode) + if kind == dirkind: + skipstep3 = False + if nf in dmap: + #file deleted on disk but still in dirstate + results[nf] = None + match.dir(nf) + if not dirignore(nf): + wadd(nf) + elif kind == regkind or kind == lnkkind: + results[nf] = st + else: + badfn(ff, badtype(kind)) + if nf in dmap: + results[nf] = None + except OSError, inst: + if nf in dmap: # does it exactly match a file? + results[nf] = None + else: # does it match a directory? + prefix = nf + "/" + for fn in dmap: + if fn.startswith(prefix): + match.dir(nf) + skipstep3 = False + break + else: + badfn(ff, inst.strerror) + + # step 2: visit subdirectories + while work: + nd = work.pop() + skip = None + if nd == '.': + nd = '' + else: + skip = '.hg' + try: + entries = listdir(join(nd), stat=True, skip=skip) + except OSError, inst: + if inst.errno == errno.EACCES: + fwarn(nd, inst.strerror) + continue + raise + for f, kind, st in entries: + nf = normalize(nd and (nd + "/" + f) or f, True) + if nf not in results: + if kind == dirkind: + if not ignore(nf): + match.dir(nf) + wadd(nf) + if nf in dmap and matchfn(nf): + results[nf] = None + elif kind == regkind or kind == lnkkind: + if nf in dmap: + if matchfn(nf): + results[nf] = st + elif matchfn(nf) and not ignore(nf): + results[nf] = st + elif nf in dmap and matchfn(nf): + results[nf] = None + + # step 3: report unseen items in the dmap hash + if not skipstep3 and not exact: + visit = sorted([f for f in dmap if f not in results and matchfn(f)]) + for nf, st in zip(visit, util.statfiles([join(i) for i in visit])): + if not st is None and not getkind(st.st_mode) in (regkind, lnkkind): + st = None + results[nf] = st + for s in subrepos: + del results[s] + del results['.hg'] + return results + + def status(self, match, subrepos, ignored, clean, unknown): + '''Determine the status of the working copy relative to the + dirstate and return a tuple of lists (unsure, modified, added, + removed, deleted, unknown, ignored, clean), where: + + unsure: + files that might have been modified since the dirstate was + written, but need to be read to be sure (size is the same + but mtime differs) + modified: + files that have definitely been modified since the dirstate + was written (different size or mode) + added: + files that have been explicitly added with hg add + removed: + files that have been explicitly removed with hg remove + deleted: + files that have been deleted through other means ("missing") + unknown: + files not in the dirstate that are not ignored + ignored: + files not in the dirstate that are ignored + (by _dirignore()) + clean: + files that have definitely not been modified since the + dirstate was written + ''' + listignored, listclean, listunknown = ignored, clean, unknown + lookup, modified, added, unknown, ignored = [], [], [], [], [] + removed, deleted, clean = [], [], [] + + dmap = self._map + ladd = lookup.append # aka "unsure" + madd = modified.append + aadd = added.append + uadd = unknown.append + iadd = ignored.append + radd = removed.append + dadd = deleted.append + cadd = clean.append + + lnkkind = stat.S_IFLNK + + for fn, st in self.walk(match, subrepos, listunknown, + listignored).iteritems(): + if fn not in dmap: + if (listignored or match.exact(fn)) and self._dirignore(fn): + if listignored: + iadd(fn) + elif listunknown: + uadd(fn) + continue + + state, mode, size, time = dmap[fn] + + if not st and state in "nma": + dadd(fn) + elif state == 'n': + # The "mode & lnkkind != lnkkind or self._checklink" + # lines are an expansion of "islink => checklink" + # where islink means "is this a link?" and checklink + # means "can we check links?". + if (size >= 0 and + (size != st.st_size + or ((mode ^ st.st_mode) & 0100 and self._checkexec)) + and (mode & lnkkind != lnkkind or self._checklink) + or size == -2 # other parent + or fn in self._copymap): + madd(fn) + elif (time != int(st.st_mtime) + and (mode & lnkkind != lnkkind or self._checklink)): + ladd(fn) + elif listclean: + cadd(fn) + elif state == 'm': + madd(fn) + elif state == 'a': + aadd(fn) + elif state == 'r': + radd(fn) + + return (lookup, modified, added, removed, deleted, unknown, ignored, + clean) diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/dirstate.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/dirstate.pyo Binary files differnew file mode 100644 index 0000000..ed809e8 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/dirstate.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/discovery.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/discovery.py new file mode 100644 index 0000000..cd361d6 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/discovery.py @@ -0,0 +1,308 @@ +# discovery.py - protocol changeset discovery functions +# +# Copyright 2010 Matt Mackall <mpm@selenic.com> +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +from node import nullid, short +from i18n import _ +import util, error + +def findcommonincoming(repo, remote, heads=None, force=False): + """Return a tuple (common, missing roots, heads) used to identify + missing nodes from remote. + + If a list of heads is specified, return only nodes which are heads + or ancestors of these heads. + """ + m = repo.changelog.nodemap + search = [] + fetch = set() + seen = set() + seenbranch = set() + base = set() + + if not heads: + heads = remote.heads() + + if repo.changelog.tip() == nullid: + base.add(nullid) + if heads != [nullid]: + return [nullid], [nullid], list(heads) + return [nullid], [], [] + + # assume we're closer to the tip than the root + # and start by examining the heads + repo.ui.status(_("searching for changes\n")) + + unknown = [] + for h in heads: + if h not in m: + unknown.append(h) + else: + base.add(h) + + heads = unknown + if not unknown: + return list(base), [], [] + + req = set(unknown) + reqcnt = 0 + + # search through remote branches + # a 'branch' here is a linear segment of history, with four parts: + # head, root, first parent, second parent + # (a branch always has two parents (or none) by definition) + unknown = remote.branches(unknown) + while unknown: + r = [] + while unknown: + n = unknown.pop(0) + if n[0] in seen: + continue + + repo.ui.debug("examining %s:%s\n" + % (short(n[0]), short(n[1]))) + if n[0] == nullid: # found the end of the branch + pass + elif n in seenbranch: + repo.ui.debug("branch already found\n") + continue + elif n[1] and n[1] in m: # do we know the base? + repo.ui.debug("found incomplete branch %s:%s\n" + % (short(n[0]), short(n[1]))) + search.append(n[0:2]) # schedule branch range for scanning + seenbranch.add(n) + else: + if n[1] not in seen and n[1] not in fetch: + if n[2] in m and n[3] in m: + repo.ui.debug("found new changeset %s\n" % + short(n[1])) + fetch.add(n[1]) # earliest unknown + for p in n[2:4]: + if p in m: + base.add(p) # latest known + + for p in n[2:4]: + if p not in req and p not in m: + r.append(p) + req.add(p) + seen.add(n[0]) + + if r: + reqcnt += 1 + repo.ui.progress(_('searching'), reqcnt, unit=_('queries')) + repo.ui.debug("request %d: %s\n" % + (reqcnt, " ".join(map(short, r)))) + for p in xrange(0, len(r), 10): + for b in remote.branches(r[p:p + 10]): + repo.ui.debug("received %s:%s\n" % + (short(b[0]), short(b[1]))) + unknown.append(b) + + # do binary search on the branches we found + while search: + newsearch = [] + reqcnt += 1 + repo.ui.progress(_('searching'), reqcnt, unit=_('queries')) + for n, l in zip(search, remote.between(search)): + l.append(n[1]) + p = n[0] + f = 1 + for i in l: + repo.ui.debug("narrowing %d:%d %s\n" % (f, len(l), short(i))) + if i in m: + if f <= 2: + repo.ui.debug("found new branch changeset %s\n" % + short(p)) + fetch.add(p) + base.add(i) + else: + repo.ui.debug("narrowed branch search to %s:%s\n" + % (short(p), short(i))) + newsearch.append((p, i)) + break + p, f = i, f * 2 + search = newsearch + + # sanity check our fetch list + for f in fetch: + if f in m: + raise error.RepoError(_("already have changeset ") + + short(f[:4])) + + base = list(base) + if base == [nullid]: + if force: + repo.ui.warn(_("warning: repository is unrelated\n")) + else: + raise util.Abort(_("repository is unrelated")) + + repo.ui.debug("found new changesets starting at " + + " ".join([short(f) for f in fetch]) + "\n") + + repo.ui.progress(_('searching'), None) + repo.ui.debug("%d total queries\n" % reqcnt) + + return base, list(fetch), heads + +def findoutgoing(repo, remote, base=None, remoteheads=None, force=False): + """Return list of nodes that are roots of subsets not in remote + + If base dict is specified, assume that these nodes and their parents + exist on the remote side. + If remotehead is specified, assume it is the list of the heads from + the remote repository. + """ + if base is None: + base = findcommonincoming(repo, remote, heads=remoteheads, + force=force)[0] + else: + base = list(base) + + repo.ui.debug("common changesets up to " + + " ".join(map(short, base)) + "\n") + + remain = set(repo.changelog.nodemap) + + # prune everything remote has from the tree + remain.remove(nullid) + remove = base + while remove: + n = remove.pop(0) + if n in remain: + remain.remove(n) + for p in repo.changelog.parents(n): + remove.append(p) + + # find every node whose parents have been pruned + subset = [] + # find every remote head that will get new children + for n in remain: + p1, p2 = repo.changelog.parents(n) + if p1 not in remain and p2 not in remain: + subset.append(n) + + return subset + +def prepush(repo, remote, force, revs, newbranch): + '''Analyze the local and remote repositories and determine which + changesets need to be pushed to the remote. Return value depends + on circumstances: + + If we are not going to push anything, return a tuple (None, + outgoing) where outgoing is 0 if there are no outgoing + changesets and 1 if there are, but we refuse to push them + (e.g. would create new remote heads). + + Otherwise, return a tuple (changegroup, remoteheads), where + changegroup is a readable file-like object whose read() returns + successive changegroup chunks ready to be sent over the wire and + remoteheads is the list of remote heads.''' + remoteheads = remote.heads() + common, inc, rheads = findcommonincoming(repo, remote, heads=remoteheads, + force=force) + + cl = repo.changelog + update = findoutgoing(repo, remote, common, remoteheads) + outg, bases, heads = cl.nodesbetween(update, revs) + + if not bases: + repo.ui.status(_("no changes found\n")) + return None, 1 + + if not force and remoteheads != [nullid]: + if remote.capable('branchmap'): + # Check for each named branch if we're creating new remote heads. + # To be a remote head after push, node must be either: + # - unknown locally + # - a local outgoing head descended from update + # - a remote head that's known locally and not + # ancestral to an outgoing head + # + # New named branches cannot be created without --force. + + # 1. Create set of branches involved in the push. + branches = set(repo[n].branch() for n in outg) + + # 2. Check for new branches on the remote. + remotemap = remote.branchmap() + newbranches = branches - set(remotemap) + if newbranches and not newbranch: # new branch requires --new-branch + branchnames = ', '.join(sorted(newbranches)) + raise util.Abort(_("push creates new remote branches: %s!") + % branchnames, + hint=_("use 'hg push --new-branch' to create" + " new remote branches")) + branches.difference_update(newbranches) + + # 3. Construct the initial oldmap and newmap dicts. + # They contain information about the remote heads before and + # after the push, respectively. + # Heads not found locally are not included in either dict, + # since they won't be affected by the push. + # unsynced contains all branches with incoming changesets. + oldmap = {} + newmap = {} + unsynced = set() + for branch in branches: + remotebrheads = remotemap[branch] + prunedbrheads = [h for h in remotebrheads if h in cl.nodemap] + oldmap[branch] = prunedbrheads + newmap[branch] = list(prunedbrheads) + if len(remotebrheads) > len(prunedbrheads): + unsynced.add(branch) + + # 4. Update newmap with outgoing changes. + # This will possibly add new heads and remove existing ones. + ctxgen = (repo[n] for n in outg) + repo._updatebranchcache(newmap, ctxgen) + + else: + # 1-4b. old servers: Check for new topological heads. + # Construct {old,new}map with branch = None (topological branch). + # (code based on _updatebranchcache) + oldheads = set(h for h in remoteheads if h in cl.nodemap) + newheads = oldheads.union(outg) + if len(newheads) > 1: + for latest in reversed(outg): + if latest not in newheads: + continue + minhrev = min(cl.rev(h) for h in newheads) + reachable = cl.reachable(latest, cl.node(minhrev)) + reachable.remove(latest) + newheads.difference_update(reachable) + branches = set([None]) + newmap = {None: newheads} + oldmap = {None: oldheads} + unsynced = inc and branches or set() + + # 5. Check for new heads. + # If there are more heads after the push than before, a suitable + # warning, depending on unsynced status, is displayed. + for branch in branches: + if len(newmap[branch]) > len(oldmap[branch]): + if branch: + msg = _("push creates new remote heads " + "on branch '%s'!") % branch + else: + msg = _("push creates new remote heads!") + + if branch in unsynced: + hint = _("you should pull and merge or use push -f to force") + else: + hint = _("did you forget to merge? use push -f to force") + raise util.Abort(msg, hint=hint) + + # 6. Check for unsynced changes on involved branches. + if unsynced: + repo.ui.warn(_("note: unsynced remote changes!\n")) + + if revs is None: + # use the fast path, no race possible on push + nodes = repo.changelog.findmissing(common) + cg = repo._changegroup(nodes, 'push') + else: + cg = repo.changegroupsubset(update, revs, 'push') + return cg, remoteheads diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/discovery.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/discovery.pyo Binary files differnew file mode 100644 index 0000000..02e69c0 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/discovery.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/dispatch.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/dispatch.py new file mode 100644 index 0000000..97f0e3e --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/dispatch.py @@ -0,0 +1,641 @@ +# dispatch.py - command dispatching for mercurial +# +# Copyright 2005-2007 Matt Mackall <mpm@selenic.com> +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +from i18n import _ +import os, sys, atexit, signal, pdb, socket, errno, shlex, time, traceback, re +import util, commands, hg, fancyopts, extensions, hook, error +import cmdutil, encoding +import ui as uimod + +def run(): + "run the command in sys.argv" + sys.exit(dispatch(sys.argv[1:])) + +def dispatch(args): + "run the command specified in args" + try: + u = uimod.ui() + if '--traceback' in args: + u.setconfig('ui', 'traceback', 'on') + except util.Abort, inst: + sys.stderr.write(_("abort: %s\n") % inst) + if inst.hint: + sys.stderr.write(_("(%s)\n") % inst.hint) + return -1 + except error.ParseError, inst: + if len(inst.args) > 1: + sys.stderr.write(_("hg: parse error at %s: %s\n") % + (inst.args[1], inst.args[0])) + else: + sys.stderr.write(_("hg: parse error: %s\n") % inst.args[0]) + return -1 + return _runcatch(u, args) + +def _runcatch(ui, args): + def catchterm(*args): + raise error.SignalInterrupt + + try: + for name in 'SIGBREAK', 'SIGHUP', 'SIGTERM': + num = getattr(signal, name, None) + if num: + signal.signal(num, catchterm) + except ValueError: + pass # happens if called in a thread + + try: + try: + # enter the debugger before command execution + if '--debugger' in args: + ui.warn(_("entering debugger - " + "type c to continue starting hg or h for help\n")) + pdb.set_trace() + try: + return _dispatch(ui, args) + finally: + ui.flush() + except: + # enter the debugger when we hit an exception + if '--debugger' in args: + traceback.print_exc() + pdb.post_mortem(sys.exc_info()[2]) + ui.traceback() + raise + + # Global exception handling, alphabetically + # Mercurial-specific first, followed by built-in and library exceptions + except error.AmbiguousCommand, inst: + ui.warn(_("hg: command '%s' is ambiguous:\n %s\n") % + (inst.args[0], " ".join(inst.args[1]))) + except error.ParseError, inst: + if len(inst.args) > 1: + ui.warn(_("hg: parse error at %s: %s\n") % + (inst.args[1], inst.args[0])) + else: + ui.warn(_("hg: parse error: %s\n") % inst.args[0]) + return -1 + except error.LockHeld, inst: + if inst.errno == errno.ETIMEDOUT: + reason = _('timed out waiting for lock held by %s') % inst.locker + else: + reason = _('lock held by %s') % inst.locker + ui.warn(_("abort: %s: %s\n") % (inst.desc or inst.filename, reason)) + except error.LockUnavailable, inst: + ui.warn(_("abort: could not lock %s: %s\n") % + (inst.desc or inst.filename, inst.strerror)) + except error.CommandError, inst: + if inst.args[0]: + ui.warn(_("hg %s: %s\n") % (inst.args[0], inst.args[1])) + commands.help_(ui, inst.args[0]) + else: + ui.warn(_("hg: %s\n") % inst.args[1]) + commands.help_(ui, 'shortlist') + except error.RepoError, inst: + ui.warn(_("abort: %s!\n") % inst) + except error.ResponseError, inst: + ui.warn(_("abort: %s") % inst.args[0]) + if not isinstance(inst.args[1], basestring): + ui.warn(" %r\n" % (inst.args[1],)) + elif not inst.args[1]: + ui.warn(_(" empty string\n")) + else: + ui.warn("\n%r\n" % util.ellipsis(inst.args[1])) + except error.RevlogError, inst: + ui.warn(_("abort: %s!\n") % inst) + except error.SignalInterrupt: + ui.warn(_("killed!\n")) + except error.UnknownCommand, inst: + ui.warn(_("hg: unknown command '%s'\n") % inst.args[0]) + try: + # check if the command is in a disabled extension + # (but don't check for extensions themselves) + commands.help_(ui, inst.args[0], unknowncmd=True) + except error.UnknownCommand: + commands.help_(ui, 'shortlist') + except util.Abort, inst: + ui.warn(_("abort: %s\n") % inst) + if inst.hint: + ui.warn(_("(%s)\n") % inst.hint) + except ImportError, inst: + ui.warn(_("abort: %s!\n") % inst) + m = str(inst).split()[-1] + if m in "mpatch bdiff".split(): + ui.warn(_("(did you forget to compile extensions?)\n")) + elif m in "zlib".split(): + ui.warn(_("(is your Python install correct?)\n")) + except IOError, inst: + if hasattr(inst, "code"): + ui.warn(_("abort: %s\n") % inst) + elif hasattr(inst, "reason"): + try: # usually it is in the form (errno, strerror) + reason = inst.reason.args[1] + except: # it might be anything, for example a string + reason = inst.reason + ui.warn(_("abort: error: %s\n") % reason) + elif hasattr(inst, "args") and inst.args[0] == errno.EPIPE: + if ui.debugflag: + ui.warn(_("broken pipe\n")) + elif getattr(inst, "strerror", None): + if getattr(inst, "filename", None): + ui.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename)) + else: + ui.warn(_("abort: %s\n") % inst.strerror) + else: + raise + except OSError, inst: + if getattr(inst, "filename", None): + ui.warn(_("abort: %s: %s\n") % (inst.strerror, inst.filename)) + else: + ui.warn(_("abort: %s\n") % inst.strerror) + except KeyboardInterrupt: + try: + ui.warn(_("interrupted!\n")) + except IOError, inst: + if inst.errno == errno.EPIPE: + if ui.debugflag: + ui.warn(_("\nbroken pipe\n")) + else: + raise + except MemoryError: + ui.warn(_("abort: out of memory\n")) + except SystemExit, inst: + # Commands shouldn't sys.exit directly, but give a return code. + # Just in case catch this and and pass exit code to caller. + return inst.code + except socket.error, inst: + ui.warn(_("abort: %s\n") % inst.args[-1]) + except: + ui.warn(_("** unknown exception encountered," + " please report by visiting\n")) + ui.warn(_("** http://mercurial.selenic.com/wiki/BugTracker\n")) + ui.warn(_("** Python %s\n") % sys.version.replace('\n', '')) + ui.warn(_("** Mercurial Distributed SCM (version %s)\n") + % util.version()) + ui.warn(_("** Extensions loaded: %s\n") + % ", ".join([x[0] for x in extensions.extensions()])) + raise + + return -1 + +def aliasargs(fn): + if hasattr(fn, 'args'): + return fn.args + return [] + +class cmdalias(object): + def __init__(self, name, definition, cmdtable): + self.name = self.cmd = name + self.cmdname = '' + self.definition = definition + self.args = [] + self.opts = [] + self.help = '' + self.norepo = True + self.badalias = False + + try: + aliases, entry = cmdutil.findcmd(self.name, cmdtable) + for alias, e in cmdtable.iteritems(): + if e is entry: + self.cmd = alias + break + self.shadows = True + except error.UnknownCommand: + self.shadows = False + + if not self.definition: + def fn(ui, *args): + ui.warn(_("no definition for alias '%s'\n") % self.name) + return 1 + self.fn = fn + self.badalias = True + + return + + if self.definition.startswith('!'): + self.shell = True + def fn(ui, *args): + env = {'HG_ARGS': ' '.join((self.name,) + args)} + def _checkvar(m): + if int(m.groups()[0]) <= len(args): + return m.group() + else: + return '' + cmd = re.sub(r'\$(\d+)', _checkvar, self.definition[1:]) + replace = dict((str(i + 1), arg) for i, arg in enumerate(args)) + replace['0'] = self.name + replace['@'] = ' '.join(args) + cmd = util.interpolate(r'\$', replace, cmd) + return util.system(cmd, environ=env) + self.fn = fn + return + + args = shlex.split(self.definition) + self.cmdname = cmd = args.pop(0) + args = map(util.expandpath, args) + + for invalidarg in ("--cwd", "-R", "--repository", "--repo"): + if _earlygetopt([invalidarg], args): + def fn(ui, *args): + ui.warn(_("error in definition for alias '%s': %s may only " + "be given on the command line\n") + % (self.name, invalidarg)) + return 1 + + self.fn = fn + self.badalias = True + return + + try: + tableentry = cmdutil.findcmd(cmd, cmdtable, False)[1] + if len(tableentry) > 2: + self.fn, self.opts, self.help = tableentry + else: + self.fn, self.opts = tableentry + + self.args = aliasargs(self.fn) + args + if cmd not in commands.norepo.split(' '): + self.norepo = False + if self.help.startswith("hg " + cmd): + # drop prefix in old-style help lines so hg shows the alias + self.help = self.help[4 + len(cmd):] + self.__doc__ = self.fn.__doc__ + + except error.UnknownCommand: + def fn(ui, *args): + ui.warn(_("alias '%s' resolves to unknown command '%s'\n") \ + % (self.name, cmd)) + try: + # check if the command is in a disabled extension + commands.help_(ui, cmd, unknowncmd=True) + except error.UnknownCommand: + pass + return 1 + self.fn = fn + self.badalias = True + except error.AmbiguousCommand: + def fn(ui, *args): + ui.warn(_("alias '%s' resolves to ambiguous command '%s'\n") \ + % (self.name, cmd)) + return 1 + self.fn = fn + self.badalias = True + + def __call__(self, ui, *args, **opts): + if self.shadows: + ui.debug("alias '%s' shadows command '%s'\n" % + (self.name, self.cmdname)) + + if self.definition.startswith('!'): + return self.fn(ui, *args, **opts) + else: + try: + util.checksignature(self.fn)(ui, *args, **opts) + except error.SignatureError: + args = ' '.join([self.cmdname] + self.args) + ui.debug("alias '%s' expands to '%s'\n" % (self.name, args)) + raise + +def addaliases(ui, cmdtable): + # aliases are processed after extensions have been loaded, so they + # may use extension commands. Aliases can also use other alias definitions, + # but only if they have been defined prior to the current definition. + for alias, definition in ui.configitems('alias'): + aliasdef = cmdalias(alias, definition, cmdtable) + cmdtable[aliasdef.cmd] = (aliasdef, aliasdef.opts, aliasdef.help) + if aliasdef.norepo: + commands.norepo += ' %s' % alias + +def _parse(ui, args): + options = {} + cmdoptions = {} + + try: + args = fancyopts.fancyopts(args, commands.globalopts, options) + except fancyopts.getopt.GetoptError, inst: + raise error.CommandError(None, inst) + + if args: + cmd, args = args[0], args[1:] + aliases, entry = cmdutil.findcmd(cmd, commands.table, + ui.config("ui", "strict")) + cmd = aliases[0] + args = aliasargs(entry[0]) + args + defaults = ui.config("defaults", cmd) + if defaults: + args = map(util.expandpath, shlex.split(defaults)) + args + c = list(entry[1]) + else: + cmd = None + c = [] + + # combine global options into local + for o in commands.globalopts: + c.append((o[0], o[1], options[o[1]], o[3])) + + try: + args = fancyopts.fancyopts(args, c, cmdoptions, True) + except fancyopts.getopt.GetoptError, inst: + raise error.CommandError(cmd, inst) + + # separate global options back out + for o in commands.globalopts: + n = o[1] + options[n] = cmdoptions[n] + del cmdoptions[n] + + return (cmd, cmd and entry[0] or None, args, options, cmdoptions) + +def _parseconfig(ui, config): + """parse the --config options from the command line""" + for cfg in config: + try: + name, value = cfg.split('=', 1) + section, name = name.split('.', 1) + if not section or not name: + raise IndexError + ui.setconfig(section, name, value) + except (IndexError, ValueError): + raise util.Abort(_('malformed --config option: %r ' + '(use --config section.name=value)') % cfg) + +def _earlygetopt(aliases, args): + """Return list of values for an option (or aliases). + + The values are listed in the order they appear in args. + The options and values are removed from args. + """ + try: + argcount = args.index("--") + except ValueError: + argcount = len(args) + shortopts = [opt for opt in aliases if len(opt) == 2] + values = [] + pos = 0 + while pos < argcount: + if args[pos] in aliases: + if pos + 1 >= argcount: + # ignore and let getopt report an error if there is no value + break + del args[pos] + values.append(args.pop(pos)) + argcount -= 2 + elif args[pos][:2] in shortopts: + # short option can have no following space, e.g. hg log -Rfoo + values.append(args.pop(pos)[2:]) + argcount -= 1 + else: + pos += 1 + return values + +def runcommand(lui, repo, cmd, fullargs, ui, options, d, cmdpats, cmdoptions): + # run pre-hook, and abort if it fails + ret = hook.hook(lui, repo, "pre-%s" % cmd, False, args=" ".join(fullargs), + pats=cmdpats, opts=cmdoptions) + if ret: + return ret + ret = _runcommand(ui, options, cmd, d) + # run post-hook, passing command result + hook.hook(lui, repo, "post-%s" % cmd, False, args=" ".join(fullargs), + result=ret, pats=cmdpats, opts=cmdoptions) + return ret + +def _getlocal(ui, rpath): + """Return (path, local ui object) for the given target path. + + Takes paths in [cwd]/.hg/hgrc into account." + """ + try: + wd = os.getcwd() + except OSError, e: + raise util.Abort(_("error getting current working directory: %s") % + e.strerror) + path = cmdutil.findrepo(wd) or "" + if not path: + lui = ui + else: + lui = ui.copy() + lui.readconfig(os.path.join(path, ".hg", "hgrc"), path) + + if rpath: + path = lui.expandpath(rpath[-1]) + lui = ui.copy() + lui.readconfig(os.path.join(path, ".hg", "hgrc"), path) + + return path, lui + +def _checkshellalias(ui, args): + cwd = os.getcwd() + norepo = commands.norepo + options = {} + + try: + args = fancyopts.fancyopts(args, commands.globalopts, options) + except fancyopts.getopt.GetoptError: + return + + if not args: + return + + _parseconfig(ui, options['config']) + if options['cwd']: + os.chdir(options['cwd']) + + path, lui = _getlocal(ui, [options['repository']]) + + cmdtable = commands.table.copy() + addaliases(lui, cmdtable) + + cmd = args[0] + try: + aliases, entry = cmdutil.findcmd(cmd, cmdtable, lui.config("ui", "strict")) + except (error.AmbiguousCommand, error.UnknownCommand): + commands.norepo = norepo + os.chdir(cwd) + return + + cmd = aliases[0] + fn = entry[0] + + if cmd and hasattr(fn, 'shell'): + d = lambda: fn(ui, *args[1:]) + return lambda: runcommand(lui, None, cmd, args[:1], ui, options, d, [], {}) + + commands.norepo = norepo + os.chdir(cwd) + +_loaded = set() +def _dispatch(ui, args): + shellaliasfn = _checkshellalias(ui, args) + if shellaliasfn: + return shellaliasfn() + + # read --config before doing anything else + # (e.g. to change trust settings for reading .hg/hgrc) + _parseconfig(ui, _earlygetopt(['--config'], args)) + + # check for cwd + cwd = _earlygetopt(['--cwd'], args) + if cwd: + os.chdir(cwd[-1]) + + rpath = _earlygetopt(["-R", "--repository", "--repo"], args) + path, lui = _getlocal(ui, rpath) + + # Configure extensions in phases: uisetup, extsetup, cmdtable, and + # reposetup. Programs like TortoiseHg will call _dispatch several + # times so we keep track of configured extensions in _loaded. + extensions.loadall(lui) + exts = [ext for ext in extensions.extensions() if ext[0] not in _loaded] + # Propagate any changes to lui.__class__ by extensions + ui.__class__ = lui.__class__ + + # (uisetup and extsetup are handled in extensions.loadall) + + for name, module in exts: + cmdtable = getattr(module, 'cmdtable', {}) + overrides = [cmd for cmd in cmdtable if cmd in commands.table] + if overrides: + ui.warn(_("extension '%s' overrides commands: %s\n") + % (name, " ".join(overrides))) + commands.table.update(cmdtable) + _loaded.add(name) + + # (reposetup is handled in hg.repository) + + addaliases(lui, commands.table) + + # check for fallback encoding + fallback = lui.config('ui', 'fallbackencoding') + if fallback: + encoding.fallbackencoding = fallback + + fullargs = args + cmd, func, args, options, cmdoptions = _parse(lui, args) + + if options["config"]: + raise util.Abort(_("option --config may not be abbreviated!")) + if options["cwd"]: + raise util.Abort(_("option --cwd may not be abbreviated!")) + if options["repository"]: + raise util.Abort(_( + "Option -R has to be separated from other options (e.g. not -qR) " + "and --repository may only be abbreviated as --repo!")) + + if options["encoding"]: + encoding.encoding = options["encoding"] + if options["encodingmode"]: + encoding.encodingmode = options["encodingmode"] + if options["time"]: + def get_times(): + t = os.times() + if t[4] == 0.0: # Windows leaves this as zero, so use time.clock() + t = (t[0], t[1], t[2], t[3], time.clock()) + return t + s = get_times() + def print_time(): + t = get_times() + ui.warn(_("Time: real %.3f secs (user %.3f+%.3f sys %.3f+%.3f)\n") % + (t[4]-s[4], t[0]-s[0], t[2]-s[2], t[1]-s[1], t[3]-s[3])) + atexit.register(print_time) + + if options['verbose'] or options['debug'] or options['quiet']: + ui.setconfig('ui', 'verbose', str(bool(options['verbose']))) + ui.setconfig('ui', 'debug', str(bool(options['debug']))) + ui.setconfig('ui', 'quiet', str(bool(options['quiet']))) + if options['traceback']: + ui.setconfig('ui', 'traceback', 'on') + if options['noninteractive']: + ui.setconfig('ui', 'interactive', 'off') + + if options['help']: + return commands.help_(ui, cmd, options['version']) + elif options['version']: + return commands.version_(ui) + elif not cmd: + return commands.help_(ui, 'shortlist') + + repo = None + cmdpats = args[:] + if cmd not in commands.norepo.split(): + try: + repo = hg.repository(ui, path=path) + ui = repo.ui + if not repo.local(): + raise util.Abort(_("repository '%s' is not local") % path) + ui.setconfig("bundle", "mainreporoot", repo.root) + except error.RepoError: + if cmd not in commands.optionalrepo.split(): + if args and not path: # try to infer -R from command args + repos = map(cmdutil.findrepo, args) + guess = repos[0] + if guess and repos.count(guess) == len(repos): + return _dispatch(ui, ['--repository', guess] + fullargs) + if not path: + raise error.RepoError(_("There is no Mercurial repository" + " here (.hg not found)")) + raise + args.insert(0, repo) + elif rpath: + ui.warn(_("warning: --repository ignored\n")) + + msg = ' '.join(' ' in a and repr(a) or a for a in fullargs) + ui.log("command", msg + "\n") + d = lambda: util.checksignature(func)(ui, *args, **cmdoptions) + return runcommand(lui, repo, cmd, fullargs, ui, options, d, + cmdpats, cmdoptions) + +def _runcommand(ui, options, cmd, cmdfunc): + def checkargs(): + try: + return cmdfunc() + except error.SignatureError: + raise error.CommandError(cmd, _("invalid arguments")) + + if options['profile']: + format = ui.config('profiling', 'format', default='text') + + if not format in ['text', 'kcachegrind']: + ui.warn(_("unrecognized profiling format '%s'" + " - Ignored\n") % format) + format = 'text' + + output = ui.config('profiling', 'output') + + if output: + path = ui.expandpath(output) + ostream = open(path, 'wb') + else: + ostream = sys.stderr + + try: + from mercurial import lsprof + except ImportError: + raise util.Abort(_( + 'lsprof not available - install from ' + 'http://codespeak.net/svn/user/arigo/hack/misc/lsprof/')) + p = lsprof.Profiler() + p.enable(subcalls=True) + try: + return checkargs() + finally: + p.disable() + + if format == 'kcachegrind': + import lsprofcalltree + calltree = lsprofcalltree.KCacheGrind(p) + calltree.output(ostream) + else: + # format == 'text' + stats = lsprof.Stats(p.getstats()) + stats.sort() + stats.pprint(top=10, file=ostream, climit=5) + + if output: + ostream.close() + else: + return checkargs() diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/dispatch.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/dispatch.pyo Binary files differnew file mode 100644 index 0000000..5274dc6 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/dispatch.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/encoding.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/encoding.py new file mode 100644 index 0000000..57270e6 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/encoding.py @@ -0,0 +1,103 @@ +# encoding.py - character transcoding support for Mercurial +# +# Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +import error +import unicodedata, locale, os + +def _getpreferredencoding(): + ''' + On darwin, getpreferredencoding ignores the locale environment and + always returns mac-roman. http://bugs.python.org/issue6202 fixes this + for Python 2.7 and up. This is the same corrected code for earlier + Python versions. + + However, we can't use a version check for this method, as some distributions + patch Python to fix this. Instead, we use it as a 'fixer' for the mac-roman + encoding, as it is unlikely that this encoding is the actually expected. + ''' + try: + locale.CODESET + except AttributeError: + # Fall back to parsing environment variables :-( + return locale.getdefaultlocale()[1] + + oldloc = locale.setlocale(locale.LC_CTYPE) + locale.setlocale(locale.LC_CTYPE, "") + result = locale.nl_langinfo(locale.CODESET) + locale.setlocale(locale.LC_CTYPE, oldloc) + + return result + +_encodingfixers = { + '646': lambda: 'ascii', + 'ANSI_X3.4-1968': lambda: 'ascii', + 'mac-roman': _getpreferredencoding +} + +try: + encoding = os.environ.get("HGENCODING") + if not encoding: + encoding = locale.getpreferredencoding() or 'ascii' + encoding = _encodingfixers.get(encoding, lambda: encoding)() +except locale.Error: + encoding = 'ascii' +encodingmode = os.environ.get("HGENCODINGMODE", "strict") +fallbackencoding = 'ISO-8859-1' + +def tolocal(s): + """ + Convert a string from internal UTF-8 to local encoding + + All internal strings should be UTF-8 but some repos before the + implementation of locale support may contain latin1 or possibly + other character sets. We attempt to decode everything strictly + using UTF-8, then Latin-1, and failing that, we use UTF-8 and + replace unknown characters. + """ + for e in ('UTF-8', fallbackencoding): + try: + u = s.decode(e) # attempt strict decoding + return u.encode(encoding, "replace") + except LookupError, k: + raise error.Abort("%s, please check your locale settings" % k) + except UnicodeDecodeError: + pass + u = s.decode("utf-8", "replace") # last ditch + return u.encode(encoding, "replace") + +def fromlocal(s): + """ + Convert a string from the local character encoding to UTF-8 + + We attempt to decode strings using the encoding mode set by + HGENCODINGMODE, which defaults to 'strict'. In this mode, unknown + characters will cause an error message. Other modes include + 'replace', which replaces unknown characters with a special + Unicode character, and 'ignore', which drops the character. + """ + try: + return s.decode(encoding, encodingmode).encode("utf-8") + except UnicodeDecodeError, inst: + sub = s[max(0, inst.start - 10):inst.start + 10] + raise error.Abort("decoding near '%s': %s!" % (sub, inst)) + except LookupError, k: + raise error.Abort("%s, please check your locale settings" % k) + +# How to treat ambiguous-width characters. Set to 'wide' to treat as wide. +ambiguous = os.environ.get("HGENCODINGAMBIGUOUS", "narrow") + +def colwidth(s): + "Find the column width of a UTF-8 string for display" + d = s.decode(encoding, 'replace') + if hasattr(unicodedata, 'east_asian_width'): + wide = "WF" + if ambiguous == "wide": + wide = "WFA" + w = unicodedata.east_asian_width + return sum([w(c) in wide and 2 or 1 for c in d]) + return len(d) + diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/encoding.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/encoding.pyo Binary files differnew file mode 100644 index 0000000..8780298 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/encoding.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/error.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/error.py new file mode 100644 index 0000000..b02ece3 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/error.py @@ -0,0 +1,81 @@ +# error.py - Mercurial exceptions +# +# Copyright 2005-2008 Matt Mackall <mpm@selenic.com> +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +"""Mercurial exceptions. + +This allows us to catch exceptions at higher levels without forcing +imports. +""" + +# Do not import anything here, please + +class RevlogError(Exception): + pass + +class LookupError(RevlogError, KeyError): + def __init__(self, name, index, message): + self.name = name + if isinstance(name, str) and len(name) == 20: + from node import short + name = short(name) + RevlogError.__init__(self, '%s@%s: %s' % (index, name, message)) + + def __str__(self): + return RevlogError.__str__(self) + +class CommandError(Exception): + """Exception raised on errors in parsing the command line.""" + +class Abort(Exception): + """Raised if a command needs to print an error and exit.""" + def __init__(self, *args, **kw): + Exception.__init__(self, *args) + self.hint = kw.get('hint') + +class ConfigError(Abort): + 'Exception raised when parsing config files' + +class ParseError(Exception): + 'Exception raised when parsing config files (msg[, pos])' + +class RepoError(Exception): + pass + +class RepoLookupError(RepoError): + pass + +class CapabilityError(RepoError): + pass + +class LockError(IOError): + def __init__(self, errno, strerror, filename, desc): + IOError.__init__(self, errno, strerror, filename) + self.desc = desc + +class LockHeld(LockError): + def __init__(self, errno, filename, desc, locker): + LockError.__init__(self, errno, 'Lock held', filename, desc) + self.locker = locker + +class LockUnavailable(LockError): + pass + +class ResponseError(Exception): + """Raised to print an error with part of output and exit.""" + +class UnknownCommand(Exception): + """Exception raised if command is not in the command table.""" + +class AmbiguousCommand(Exception): + """Exception raised if command shortcut matches more than one command.""" + +# derived from KeyboardInterrupt to simplify some breakout code +class SignalInterrupt(KeyboardInterrupt): + """Exception raised on SIGTERM and SIGHUP.""" + +class SignatureError(Exception): + pass diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/error.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/error.pyo Binary files differnew file mode 100644 index 0000000..d726329 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/error.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/extensions.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/extensions.py new file mode 100644 index 0000000..c4eecc6 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/extensions.py @@ -0,0 +1,302 @@ +# extensions.py - extension handling for mercurial +# +# Copyright 2005-2007 Matt Mackall <mpm@selenic.com> +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +import imp, os +import util, cmdutil, help, error +from i18n import _, gettext + +_extensions = {} +_order = [] + +def extensions(): + for name in _order: + module = _extensions[name] + if module: + yield name, module + +def find(name): + '''return module with given extension name''' + try: + return _extensions[name] + except KeyError: + for k, v in _extensions.iteritems(): + if k.endswith('.' + name) or k.endswith('/' + name): + return v + raise KeyError(name) + +def loadpath(path, module_name): + module_name = module_name.replace('.', '_') + path = util.expandpath(path) + if os.path.isdir(path): + # module/__init__.py style + d, f = os.path.split(path.rstrip('/')) + fd, fpath, desc = imp.find_module(f, [d]) + return imp.load_module(module_name, fd, fpath, desc) + else: + return imp.load_source(module_name, path) + +def load(ui, name, path): + # unused ui argument kept for backwards compatibility + if name.startswith('hgext.') or name.startswith('hgext/'): + shortname = name[6:] + else: + shortname = name + if shortname in _extensions: + return _extensions[shortname] + _extensions[shortname] = None + if path: + # the module will be loaded in sys.modules + # choose an unique name so that it doesn't + # conflicts with other modules + mod = loadpath(path, 'hgext.%s' % name) + else: + def importh(name): + mod = __import__(name) + components = name.split('.') + for comp in components[1:]: + mod = getattr(mod, comp) + return mod + try: + mod = importh("hgext.%s" % name) + except ImportError: + mod = importh(name) + _extensions[shortname] = mod + _order.append(shortname) + return mod + +def loadall(ui): + result = ui.configitems("extensions") + newindex = len(_order) + for (name, path) in result: + if path: + if path[0] == '!': + continue + try: + load(ui, name, path) + except KeyboardInterrupt: + raise + except Exception, inst: + if path: + ui.warn(_("*** failed to import extension %s from %s: %s\n") + % (name, path, inst)) + else: + ui.warn(_("*** failed to import extension %s: %s\n") + % (name, inst)) + if ui.traceback(): + return 1 + + for name in _order[newindex:]: + uisetup = getattr(_extensions[name], 'uisetup', None) + if uisetup: + uisetup(ui) + + for name in _order[newindex:]: + extsetup = getattr(_extensions[name], 'extsetup', None) + if extsetup: + try: + extsetup(ui) + except TypeError: + if extsetup.func_code.co_argcount != 0: + raise + extsetup() # old extsetup with no ui argument + +def wrapcommand(table, command, wrapper): + '''Wrap the command named `command' in table + + Replace command in the command table with wrapper. The wrapped command will + be inserted into the command table specified by the table argument. + + The wrapper will be called like + + wrapper(orig, *args, **kwargs) + + where orig is the original (wrapped) function, and *args, **kwargs + are the arguments passed to it. + ''' + assert hasattr(wrapper, '__call__') + aliases, entry = cmdutil.findcmd(command, table) + for alias, e in table.iteritems(): + if e is entry: + key = alias + break + + origfn = entry[0] + def wrap(*args, **kwargs): + return util.checksignature(wrapper)( + util.checksignature(origfn), *args, **kwargs) + + wrap.__doc__ = getattr(origfn, '__doc__') + wrap.__module__ = getattr(origfn, '__module__') + + newentry = list(entry) + newentry[0] = wrap + table[key] = tuple(newentry) + return entry + +def wrapfunction(container, funcname, wrapper): + '''Wrap the function named funcname in container + + Replace the funcname member in the given container with the specified + wrapper. The container is typically a module, class, or instance. + + The wrapper will be called like + + wrapper(orig, *args, **kwargs) + + where orig is the original (wrapped) function, and *args, **kwargs + are the arguments passed to it. + + Wrapping methods of the repository object is not recommended since + it conflicts with extensions that extend the repository by + subclassing. All extensions that need to extend methods of + localrepository should use this subclassing trick: namely, + reposetup() should look like + + def reposetup(ui, repo): + class myrepo(repo.__class__): + def whatever(self, *args, **kwargs): + [...extension stuff...] + super(myrepo, self).whatever(*args, **kwargs) + [...extension stuff...] + + repo.__class__ = myrepo + + In general, combining wrapfunction() with subclassing does not + work. Since you cannot control what other extensions are loaded by + your end users, you should play nicely with others by using the + subclass trick. + ''' + assert hasattr(wrapper, '__call__') + def wrap(*args, **kwargs): + return wrapper(origfn, *args, **kwargs) + + origfn = getattr(container, funcname) + assert hasattr(origfn, '__call__') + setattr(container, funcname, wrap) + return origfn + +def _disabledpaths(strip_init=False): + '''find paths of disabled extensions. returns a dict of {name: path} + removes /__init__.py from packages if strip_init is True''' + import hgext + extpath = os.path.dirname(os.path.abspath(hgext.__file__)) + try: # might not be a filesystem path + files = os.listdir(extpath) + except OSError: + return {} + + exts = {} + for e in files: + if e.endswith('.py'): + name = e.rsplit('.', 1)[0] + path = os.path.join(extpath, e) + else: + name = e + path = os.path.join(extpath, e, '__init__.py') + if not os.path.exists(path): + continue + if strip_init: + path = os.path.dirname(path) + if name in exts or name in _order or name == '__init__': + continue + exts[name] = path + return exts + +def _disabledhelp(path): + '''retrieve help synopsis of a disabled extension (without importing)''' + try: + file = open(path) + except IOError: + return + else: + doc = help.moduledoc(file) + file.close() + + if doc: # extracting localized synopsis + return gettext(doc).splitlines()[0] + else: + return _('(no help text available)') + +def disabled(): + '''find disabled extensions from hgext + returns a dict of {name: desc}, and the max name length''' + + paths = _disabledpaths() + if not paths: + return None, 0 + + exts = {} + maxlength = 0 + for name, path in paths.iteritems(): + doc = _disabledhelp(path) + if not doc: + continue + + exts[name] = doc + if len(name) > maxlength: + maxlength = len(name) + + return exts, maxlength + +def disabledext(name): + '''find a specific disabled extension from hgext. returns desc''' + paths = _disabledpaths() + if name in paths: + return _disabledhelp(paths[name]) + +def disabledcmd(cmd, strict=False): + '''import disabled extensions until cmd is found. + returns (cmdname, extname, doc)''' + + paths = _disabledpaths(strip_init=True) + if not paths: + raise error.UnknownCommand(cmd) + + def findcmd(cmd, name, path): + try: + mod = loadpath(path, 'hgext.%s' % name) + except Exception: + return + try: + aliases, entry = cmdutil.findcmd(cmd, + getattr(mod, 'cmdtable', {}), strict) + except (error.AmbiguousCommand, error.UnknownCommand): + return + for c in aliases: + if c.startswith(cmd): + cmd = c + break + else: + cmd = aliases[0] + return (cmd, name, mod) + + # first, search for an extension with the same name as the command + path = paths.pop(cmd, None) + if path: + ext = findcmd(cmd, cmd, path) + if ext: + return ext + + # otherwise, interrogate each extension until there's a match + for name, path in paths.iteritems(): + ext = findcmd(cmd, name, path) + if ext: + return ext + + raise error.UnknownCommand(cmd) + +def enabled(): + '''return a dict of {name: desc} of extensions, and the max name length''' + exts = {} + maxlength = 0 + for ename, ext in extensions(): + doc = (gettext(ext.__doc__) or _('(no help text available)')) + ename = ename.split('.')[-1] + maxlength = max(len(ename), maxlength) + exts[ename] = doc.splitlines()[0].strip() + + return exts, maxlength diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/extensions.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/extensions.pyo Binary files differnew file mode 100644 index 0000000..a49f044 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/extensions.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/fancyopts.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/fancyopts.py new file mode 100644 index 0000000..7c9e07f --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/fancyopts.py @@ -0,0 +1,117 @@ +# fancyopts.py - better command line parsing +# +# Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +import getopt + +def gnugetopt(args, options, longoptions): + """Parse options mostly like getopt.gnu_getopt. + + This is different from getopt.gnu_getopt in that an argument of - will + become an argument of - instead of vanishing completely. + """ + extraargs = [] + if '--' in args: + stopindex = args.index('--') + extraargs = args[stopindex + 1:] + args = args[:stopindex] + opts, parseargs = getopt.getopt(args, options, longoptions) + args = [] + while parseargs: + arg = parseargs.pop(0) + if arg and arg[0] == '-' and len(arg) > 1: + parseargs.insert(0, arg) + topts, newparseargs = getopt.getopt(parseargs, options, longoptions) + opts = opts + topts + parseargs = newparseargs + else: + args.append(arg) + args.extend(extraargs) + return opts, args + + +def fancyopts(args, options, state, gnu=False): + """ + read args, parse options, and store options in state + + each option is a tuple of: + + short option or '' + long option + default value + description + option value label(optional) + + option types include: + + boolean or none - option sets variable in state to true + string - parameter string is stored in state + list - parameter string is added to a list + integer - parameter strings is stored as int + function - call function with parameter + + non-option args are returned + """ + namelist = [] + shortlist = '' + argmap = {} + defmap = {} + + for option in options: + if len(option) == 5: + short, name, default, comment, dummy = option + else: + short, name, default, comment = option + # convert opts to getopt format + oname = name + name = name.replace('-', '_') + + argmap['-' + short] = argmap['--' + oname] = name + defmap[name] = default + + # copy defaults to state + if isinstance(default, list): + state[name] = default[:] + elif hasattr(default, '__call__'): + state[name] = None + else: + state[name] = default + + # does it take a parameter? + if not (default is None or default is True or default is False): + if short: + short += ':' + if oname: + oname += '=' + if short: + shortlist += short + if name: + namelist.append(oname) + + # parse arguments + if gnu: + parse = gnugetopt + else: + parse = getopt.getopt + opts, args = parse(args, shortlist, namelist) + + # transfer result to state + for opt, val in opts: + name = argmap[opt] + t = type(defmap[name]) + if t is type(fancyopts): + state[name] = defmap[name](val) + elif t is type(1): + state[name] = int(val) + elif t is type(''): + state[name] = val + elif t is type([]): + state[name].append(val) + elif t is type(None) or t is type(False): + state[name] = True + + # return unparsed args + return args diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/fancyopts.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/fancyopts.pyo Binary files differnew file mode 100644 index 0000000..fb665b2 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/fancyopts.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/filelog.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/filelog.py new file mode 100644 index 0000000..1ae2b17 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/filelog.py @@ -0,0 +1,79 @@ +# filelog.py - file history class for mercurial +# +# Copyright 2005-2007 Matt Mackall <mpm@selenic.com> +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +import revlog + +class filelog(revlog.revlog): + def __init__(self, opener, path): + revlog.revlog.__init__(self, opener, + "/".join(("data", path + ".i"))) + + def read(self, node): + t = self.revision(node) + if not t.startswith('\1\n'): + return t + s = t.index('\1\n', 2) + return t[s + 2:] + + def _readmeta(self, node): + t = self.revision(node) + if not t.startswith('\1\n'): + return {} + s = t.index('\1\n', 2) + mt = t[2:s] + m = {} + for l in mt.splitlines(): + k, v = l.split(": ", 1) + m[k] = v + return m + + def add(self, text, meta, transaction, link, p1=None, p2=None): + if meta or text.startswith('\1\n'): + mt = ["%s: %s\n" % (k, v) for k, v in sorted(meta.iteritems())] + text = "\1\n%s\1\n%s" % ("".join(mt), text) + return self.addrevision(text, transaction, link, p1, p2) + + def renamed(self, node): + if self.parents(node)[0] != revlog.nullid: + return False + m = self._readmeta(node) + if m and "copy" in m: + return (m["copy"], revlog.bin(m["copyrev"])) + return False + + def size(self, rev): + """return the size of a given revision""" + + # for revisions with renames, we have to go the slow way + node = self.node(rev) + if self.renamed(node): + return len(self.read(node)) + + # XXX if self.read(node).startswith("\1\n"), this returns (size+4) + return revlog.revlog.size(self, rev) + + def cmp(self, node, text): + """compare text with a given file revision + + returns True if text is different than what is stored. + """ + + t = text + if text.startswith('\1\n'): + t = '\1\n\1\n' + text + + samehashes = not revlog.revlog.cmp(self, node, t) + if samehashes: + return False + + # renaming a file produces a different hash, even if the data + # remains unchanged. Check if it's the case (slow): + if self.renamed(node): + t2 = self.read(node) + return t2 != text + + return True diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/filelog.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/filelog.pyo Binary files differnew file mode 100644 index 0000000..f60f4b3 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/filelog.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/filemerge.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/filemerge.py new file mode 100644 index 0000000..4d9b9a9 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/filemerge.py @@ -0,0 +1,267 @@ +# filemerge.py - file-level merge handling for Mercurial +# +# Copyright 2006, 2007, 2008 Matt Mackall <mpm@selenic.com> +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +from node import short +from i18n import _ +import util, simplemerge, match, error +import os, tempfile, re, filecmp + +def _toolstr(ui, tool, part, default=""): + return ui.config("merge-tools", tool + "." + part, default) + +def _toolbool(ui, tool, part, default=False): + return ui.configbool("merge-tools", tool + "." + part, default) + +def _toollist(ui, tool, part, default=[]): + return ui.configlist("merge-tools", tool + "." + part, default) + +_internal = ['internal:' + s + for s in 'fail local other merge prompt dump'.split()] + +def _findtool(ui, tool): + if tool in _internal: + return tool + k = _toolstr(ui, tool, "regkey") + if k: + p = util.lookup_reg(k, _toolstr(ui, tool, "regname")) + if p: + p = util.find_exe(p + _toolstr(ui, tool, "regappend")) + if p: + return p + return util.find_exe(_toolstr(ui, tool, "executable", tool)) + +def _picktool(repo, ui, path, binary, symlink): + def check(tool, pat, symlink, binary): + tmsg = tool + if pat: + tmsg += " specified for " + pat + if not _findtool(ui, tool): + if pat: # explicitly requested tool deserves a warning + ui.warn(_("couldn't find merge tool %s\n") % tmsg) + else: # configured but non-existing tools are more silent + ui.note(_("couldn't find merge tool %s\n") % tmsg) + elif symlink and not _toolbool(ui, tool, "symlink"): + ui.warn(_("tool %s can't handle symlinks\n") % tmsg) + elif binary and not _toolbool(ui, tool, "binary"): + ui.warn(_("tool %s can't handle binary\n") % tmsg) + elif not util.gui() and _toolbool(ui, tool, "gui"): + ui.warn(_("tool %s requires a GUI\n") % tmsg) + else: + return True + return False + + # forcemerge comes from command line arguments, highest priority + force = ui.config('ui', 'forcemerge') + if force: + toolpath = _findtool(ui, force) + if toolpath: + return (force, '"' + toolpath + '"') + else: + # mimic HGMERGE if given tool not found + return (force, force) + + # HGMERGE takes next precedence + hgmerge = os.environ.get("HGMERGE") + if hgmerge: + return (hgmerge, hgmerge) + + # then patterns + for pat, tool in ui.configitems("merge-patterns"): + mf = match.match(repo.root, '', [pat]) + if mf(path) and check(tool, pat, symlink, False): + toolpath = _findtool(ui, tool) + return (tool, '"' + toolpath + '"') + + # then merge tools + tools = {} + for k, v in ui.configitems("merge-tools"): + t = k.split('.')[0] + if t not in tools: + tools[t] = int(_toolstr(ui, t, "priority", "0")) + names = tools.keys() + tools = sorted([(-p, t) for t, p in tools.items()]) + uimerge = ui.config("ui", "merge") + if uimerge: + if uimerge not in names: + return (uimerge, uimerge) + tools.insert(0, (None, uimerge)) # highest priority + tools.append((None, "hgmerge")) # the old default, if found + for p, t in tools: + if check(t, None, symlink, binary): + toolpath = _findtool(ui, t) + return (t, '"' + toolpath + '"') + # internal merge as last resort + return (not (symlink or binary) and "internal:merge" or None, None) + +def _eoltype(data): + "Guess the EOL type of a file" + if '\0' in data: # binary + return None + if '\r\n' in data: # Windows + return '\r\n' + if '\r' in data: # Old Mac + return '\r' + if '\n' in data: # UNIX + return '\n' + return None # unknown + +def _matcheol(file, origfile): + "Convert EOL markers in a file to match origfile" + tostyle = _eoltype(open(origfile, "rb").read()) + if tostyle: + data = open(file, "rb").read() + style = _eoltype(data) + if style: + newdata = data.replace(style, tostyle) + if newdata != data: + open(file, "wb").write(newdata) + +def filemerge(repo, mynode, orig, fcd, fco, fca): + """perform a 3-way merge in the working directory + + mynode = parent node before merge + orig = original local filename before merge + fco = other file context + fca = ancestor file context + fcd = local file context for current/destination file + """ + + def temp(prefix, ctx): + pre = "%s~%s." % (os.path.basename(ctx.path()), prefix) + (fd, name) = tempfile.mkstemp(prefix=pre) + data = repo.wwritedata(ctx.path(), ctx.data()) + f = os.fdopen(fd, "wb") + f.write(data) + f.close() + return name + + def isbin(ctx): + try: + return util.binary(ctx.data()) + except IOError: + return False + + if not fco.cmp(fcd): # files identical? + return None + + ui = repo.ui + fd = fcd.path() + binary = isbin(fcd) or isbin(fco) or isbin(fca) + symlink = 'l' in fcd.flags() + fco.flags() + tool, toolpath = _picktool(repo, ui, fd, binary, symlink) + ui.debug("picked tool '%s' for %s (binary %s symlink %s)\n" % + (tool, fd, binary, symlink)) + + if not tool or tool == 'internal:prompt': + tool = "internal:local" + if ui.promptchoice(_(" no tool found to merge %s\n" + "keep (l)ocal or take (o)ther?") % fd, + (_("&Local"), _("&Other")), 0): + tool = "internal:other" + if tool == "internal:local": + return 0 + if tool == "internal:other": + repo.wwrite(fd, fco.data(), fco.flags()) + return 0 + if tool == "internal:fail": + return 1 + + # do the actual merge + a = repo.wjoin(fd) + b = temp("base", fca) + c = temp("other", fco) + out = "" + back = a + ".orig" + util.copyfile(a, back) + + if orig != fco.path(): + ui.status(_("merging %s and %s to %s\n") % (orig, fco.path(), fd)) + else: + ui.status(_("merging %s\n") % fd) + + ui.debug("my %s other %s ancestor %s\n" % (fcd, fco, fca)) + + # do we attempt to simplemerge first? + try: + premerge = _toolbool(ui, tool, "premerge", not (binary or symlink)) + except error.ConfigError: + premerge = _toolstr(ui, tool, "premerge").lower() + valid = 'keep'.split() + if premerge not in valid: + _valid = ', '.join(["'" + v + "'" for v in valid]) + raise error.ConfigError(_("%s.premerge not valid " + "('%s' is neither boolean nor %s)") % + (tool, premerge, _valid)) + + if premerge: + r = simplemerge.simplemerge(ui, a, b, c, quiet=True) + if not r: + ui.debug(" premerge successful\n") + os.unlink(back) + os.unlink(b) + os.unlink(c) + return 0 + if premerge != 'keep': + util.copyfile(back, a) # restore from backup and try again + + env = dict(HG_FILE=fd, + HG_MY_NODE=short(mynode), + HG_OTHER_NODE=str(fco.changectx()), + HG_BASE_NODE=str(fca.changectx()), + HG_MY_ISLINK='l' in fcd.flags(), + HG_OTHER_ISLINK='l' in fco.flags(), + HG_BASE_ISLINK='l' in fca.flags()) + + if tool == "internal:merge": + r = simplemerge.simplemerge(ui, a, b, c, label=['local', 'other']) + elif tool == 'internal:dump': + a = repo.wjoin(fd) + util.copyfile(a, a + ".local") + repo.wwrite(fd + ".other", fco.data(), fco.flags()) + repo.wwrite(fd + ".base", fca.data(), fca.flags()) + return 1 # unresolved + else: + args = _toolstr(ui, tool, "args", '$local $base $other') + if "$output" in args: + out, a = a, back # read input from backup, write to original + replace = dict(local=a, base=b, other=c, output=out) + args = util.interpolate(r'\$', replace, args, + lambda s: '"%s"' % util.localpath(s)) + r = util.system(toolpath + ' ' + args, cwd=repo.root, environ=env) + + if not r and (_toolbool(ui, tool, "checkconflicts") or + 'conflicts' in _toollist(ui, tool, "check")): + if re.search("^(<<<<<<< .*|=======|>>>>>>> .*)$", fcd.data(), + re.MULTILINE): + r = 1 + + checked = False + if 'prompt' in _toollist(ui, tool, "check"): + checked = True + if ui.promptchoice(_("was merge of '%s' successful (yn)?") % fd, + (_("&Yes"), _("&No")), 1): + r = 1 + + if not r and not checked and (_toolbool(ui, tool, "checkchanged") or + 'changed' in _toollist(ui, tool, "check")): + if filecmp.cmp(repo.wjoin(fd), back): + if ui.promptchoice(_(" output file %s appears unchanged\n" + "was merge successful (yn)?") % fd, + (_("&Yes"), _("&No")), 1): + r = 1 + + if _toolbool(ui, tool, "fixeol"): + _matcheol(repo.wjoin(fd), back) + + if r: + ui.warn(_("merging %s failed!\n") % fd) + else: + os.unlink(back) + + os.unlink(b) + os.unlink(c) + return r diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/filemerge.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/filemerge.pyo Binary files differnew file mode 100644 index 0000000..62ea668 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/filemerge.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/graphmod.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/graphmod.py new file mode 100644 index 0000000..71136ea --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/graphmod.py @@ -0,0 +1,122 @@ +# Revision graph generator for Mercurial +# +# Copyright 2008 Dirkjan Ochtman <dirkjan@ochtman.nl> +# Copyright 2007 Joel Rosdahl <joel@rosdahl.net> +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +"""supports walking the history as DAGs suitable for graphical output + +The most basic format we use is that of:: + + (id, type, data, [parentids]) + +The node and parent ids are arbitrary integers which identify a node in the +context of the graph returned. Type is a constant specifying the node type. +Data depends on type. +""" + +from mercurial.node import nullrev + +CHANGESET = 'C' + +def revisions(repo, start, stop): + """cset DAG generator yielding (id, CHANGESET, ctx, [parentids]) tuples + + This generator function walks through the revision history from revision + start to revision stop (which must be less than or equal to start). It + returns a tuple for each node. The node and parent ids are arbitrary + integers which identify a node in the context of the graph returned. + """ + cur = start + while cur >= stop: + ctx = repo[cur] + parents = set([p.rev() for p in ctx.parents() if p.rev() != nullrev]) + yield (cur, CHANGESET, ctx, sorted(parents)) + cur -= 1 + +def filerevs(repo, path, start, stop, limit=None): + """file cset DAG generator yielding (id, CHANGESET, ctx, [parentids]) tuples + + This generator function walks through the revision history of a single + file from revision start down to revision stop. + """ + filerev = len(repo.file(path)) - 1 + rev = stop + 1 + count = 0 + while filerev >= 0 and rev > stop: + fctx = repo.filectx(path, fileid=filerev) + parents = set([f.linkrev() for f in fctx.parents() if f.path() == path]) + rev = fctx.rev() + if rev <= start: + yield (rev, CHANGESET, fctx.changectx(), sorted(parents)) + count += 1 + if count == limit: + break + filerev -= 1 + +def nodes(repo, nodes): + """cset DAG generator yielding (id, CHANGESET, ctx, [parentids]) tuples + + This generator function walks the given nodes. It only returns parents + that are in nodes, too. + """ + include = set(nodes) + for node in nodes: + ctx = repo[node] + parents = set([p.rev() for p in ctx.parents() if p.node() in include]) + yield (ctx.rev(), CHANGESET, ctx, sorted(parents)) + +def colored(dag): + """annotates a DAG with colored edge information + + For each DAG node this function emits tuples:: + + (id, type, data, (col, color), [(col, nextcol, color)]) + + with the following new elements: + + - Tuple (col, color) with column and color index for the current node + - A list of tuples indicating the edges between the current node and its + parents. + """ + seen = [] + colors = {} + newcolor = 1 + for (cur, type, data, parents) in dag: + + # Compute seen and next + if cur not in seen: + seen.append(cur) # new head + colors[cur] = newcolor + newcolor += 1 + + col = seen.index(cur) + color = colors.pop(cur) + next = seen[:] + + # Add parents to next + addparents = [p for p in parents if p not in next] + next[col:col + 1] = addparents + + # Set colors for the parents + for i, p in enumerate(addparents): + if not i: + colors[p] = color + else: + colors[p] = newcolor + newcolor += 1 + + # Add edges to the graph + edges = [] + for ecol, eid in enumerate(seen): + if eid in next: + edges.append((ecol, next.index(eid), colors[eid])) + elif eid == cur: + for p in parents: + edges.append((ecol, next.index(p), color)) + + # Yield and move on + yield (cur, type, data, (col, color), edges) + seen = next diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/graphmod.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/graphmod.pyo Binary files differnew file mode 100644 index 0000000..e77bbef --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/graphmod.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/hbisect.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/hbisect.py new file mode 100644 index 0000000..9a1bf9a --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/hbisect.py @@ -0,0 +1,155 @@ +# changelog bisection for mercurial +# +# Copyright 2007 Matt Mackall +# Copyright 2005, 2006 Benoit Boissinot <benoit.boissinot@ens-lyon.org> +# +# Inspired by git bisect, extension skeleton taken from mq.py. +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +import os +from i18n import _ +from node import short, hex +import util + +def bisect(changelog, state): + """find the next node (if any) for testing during a bisect search. + returns a (nodes, number, good) tuple. + + 'nodes' is the final result of the bisect if 'number' is 0. + Otherwise 'number' indicates the remaining possible candidates for + the search and 'nodes' contains the next bisect target. + 'good' is True if bisect is searching for a first good changeset, False + if searching for a first bad one. + """ + + clparents = changelog.parentrevs + skip = set([changelog.rev(n) for n in state['skip']]) + + def buildancestors(bad, good): + # only the earliest bad revision matters + badrev = min([changelog.rev(n) for n in bad]) + goodrevs = [changelog.rev(n) for n in good] + goodrev = min(goodrevs) + # build visit array + ancestors = [None] * (len(changelog) + 1) # an extra for [-1] + + # set nodes descended from goodrev + ancestors[goodrev] = [] + for rev in xrange(goodrev + 1, len(changelog)): + for prev in clparents(rev): + if ancestors[prev] == []: + ancestors[rev] = [] + + # clear good revs from array + for node in goodrevs: + ancestors[node] = None + for rev in xrange(len(changelog), -1, -1): + if ancestors[rev] is None: + for prev in clparents(rev): + ancestors[prev] = None + + if ancestors[badrev] is None: + return badrev, None + return badrev, ancestors + + good = 0 + badrev, ancestors = buildancestors(state['bad'], state['good']) + if not ancestors: # looking for bad to good transition? + good = 1 + badrev, ancestors = buildancestors(state['good'], state['bad']) + bad = changelog.node(badrev) + if not ancestors: # now we're confused + if len(state['bad']) == 1 and len(state['good']) == 1: + raise util.Abort(_("starting revisions are not directly related")) + raise util.Abort(_("inconsistent state, %s:%s is good and bad") + % (badrev, short(bad))) + + # build children dict + children = {} + visit = [badrev] + candidates = [] + while visit: + rev = visit.pop(0) + if ancestors[rev] == []: + candidates.append(rev) + for prev in clparents(rev): + if prev != -1: + if prev in children: + children[prev].append(rev) + else: + children[prev] = [rev] + visit.append(prev) + + candidates.sort() + # have we narrowed it down to one entry? + # or have all other possible candidates besides 'bad' have been skipped? + tot = len(candidates) + unskipped = [c for c in candidates if (c not in skip) and (c != badrev)] + if tot == 1 or not unskipped: + return ([changelog.node(rev) for rev in candidates], 0, good) + perfect = tot // 2 + + # find the best node to test + best_rev = None + best_len = -1 + poison = set() + for rev in candidates: + if rev in poison: + # poison children + poison.update(children.get(rev, [])) + continue + + a = ancestors[rev] or [rev] + ancestors[rev] = None + + x = len(a) # number of ancestors + y = tot - x # number of non-ancestors + value = min(x, y) # how good is this test? + if value > best_len and rev not in skip: + best_len = value + best_rev = rev + if value == perfect: # found a perfect candidate? quit early + break + + if y < perfect and rev not in skip: # all downhill from here? + # poison children + poison.update(children.get(rev, [])) + continue + + for c in children.get(rev, []): + if ancestors[c]: + ancestors[c] = list(set(ancestors[c] + a)) + else: + ancestors[c] = a + [c] + + assert best_rev is not None + best_node = changelog.node(best_rev) + + return ([best_node], tot, good) + + +def load_state(repo): + state = {'good': [], 'bad': [], 'skip': []} + if os.path.exists(repo.join("bisect.state")): + for l in repo.opener("bisect.state"): + kind, node = l[:-1].split() + node = repo.lookup(node) + if kind not in state: + raise util.Abort(_("unknown bisect kind %s") % kind) + state[kind].append(node) + return state + + +def save_state(repo, state): + f = repo.opener("bisect.state", "w", atomictemp=True) + wlock = repo.wlock() + try: + for kind in state: + for node in state[kind]: + f.write("%s %s\n" % (kind, hex(node))) + f.rename() + finally: + wlock.release() + diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/hbisect.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/hbisect.pyo Binary files differnew file mode 100644 index 0000000..d65d019 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/hbisect.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/help.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/help.py new file mode 100644 index 0000000..a30499d --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/help.py @@ -0,0 +1,117 @@ +# help.py - help data for mercurial +# +# Copyright 2006 Matt Mackall <mpm@selenic.com> +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +from i18n import gettext, _ +import sys, os +import extensions + + +def moduledoc(file): + '''return the top-level python documentation for the given file + + Loosely inspired by pydoc.source_synopsis(), but rewritten to + handle triple quotes and to return the whole text instead of just + the synopsis''' + result = [] + + line = file.readline() + while line[:1] == '#' or not line.strip(): + line = file.readline() + if not line: + break + + start = line[:3] + if start == '"""' or start == "'''": + line = line[3:] + while line: + if line.rstrip().endswith(start): + line = line.split(start)[0] + if line: + result.append(line) + break + elif not line: + return None # unmatched delimiter + result.append(line) + line = file.readline() + else: + return None + + return ''.join(result) + +def listexts(header, exts, maxlength, indent=1): + '''return a text listing of the given extensions''' + if not exts: + return '' + result = '\n%s\n\n' % header + for name, desc in sorted(exts.iteritems()): + result += '%s%-*s %s\n' % (' ' * indent, maxlength + 2, + ':%s:' % name, desc) + return result + +def extshelp(): + doc = loaddoc('extensions')() + + exts, maxlength = extensions.enabled() + doc += listexts(_('enabled extensions:'), exts, maxlength) + + exts, maxlength = extensions.disabled() + doc += listexts(_('disabled extensions:'), exts, maxlength) + + return doc + +def loaddoc(topic): + """Return a delayed loader for help/topic.txt.""" + + def loader(): + if hasattr(sys, 'frozen'): + module = sys.executable + else: + module = __file__ + base = os.path.dirname(module) + + for dir in ('.', '..'): + docdir = os.path.join(base, dir, 'help') + if os.path.isdir(docdir): + break + + path = os.path.join(docdir, topic + ".txt") + doc = gettext(open(path).read()) + for rewriter in helphooks.get(topic, []): + doc = rewriter(topic, doc) + return doc + + return loader + +helptable = [ + (["config", "hgrc"], _("Configuration Files"), loaddoc('config')), + (["dates"], _("Date Formats"), loaddoc('dates')), + (["patterns"], _("File Name Patterns"), loaddoc('patterns')), + (['environment', 'env'], _('Environment Variables'), + loaddoc('environment')), + (['revs', 'revisions'], _('Specifying Single Revisions'), + loaddoc('revisions')), + (['mrevs', 'multirevs'], _('Specifying Multiple Revisions'), + loaddoc('multirevs')), + (['revset', 'revsets'], _("Specifying Revision Sets"), loaddoc('revsets')), + (['diffs'], _('Diff Formats'), loaddoc('diffs')), + (['merge-tools'], _('Merge Tools'), loaddoc('merge-tools')), + (['templating', 'templates'], _('Template Usage'), + loaddoc('templates')), + (['urls'], _('URL Paths'), loaddoc('urls')), + (["extensions"], _("Using additional features"), extshelp), + (["subrepo", "subrepos"], _("Subrepositories"), loaddoc('subrepos')), + (["hgweb"], _("Configuring hgweb"), loaddoc('hgweb')), + (["glossary"], _("Glossary"), loaddoc('glossary')), +] + +# Map topics to lists of callable taking the current topic help and +# returning the updated version +helphooks = { +} + +def addtopichook(topic, rewriter): + helphooks.setdefault(topic, []).append(rewriter) diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/help.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/help.pyo Binary files differnew file mode 100644 index 0000000..54fcd75 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/help.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/help/config.txt b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/help/config.txt new file mode 100644 index 0000000..1b8ac5b --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/help/config.txt @@ -0,0 +1,51 @@ +Mercurial reads configuration data from several files, if they exist. +Below we list the most specific file first. + +On Windows, these configuration files are read: + +- ``<repo>\.hg\hgrc`` +- ``%USERPROFILE%\.hgrc`` +- ``%USERPROFILE%\mercurial.ini`` +- ``%HOME%\.hgrc`` +- ``%HOME%\mercurial.ini`` +- ``C:\mercurial\mercurial.ini`` (unless regkey or hgrc.d\ or mercurial.ini found) +- ``HKEY_LOCAL_MACHINE\SOFTWARE\Mercurial`` (unless hgrc.d\ or mercurial.ini found) +- ``<hg.exe-dir>\hgrc.d\*.rc`` (unless mercurial.ini found) +- ``<hg.exe-dir>\mercurial.ini`` + +On Unix, these files are read: + +- ``<repo>/.hg/hgrc`` +- ``$HOME/.hgrc`` +- ``/etc/mercurial/hgrc`` +- ``/etc/mercurial/hgrc.d/*.rc`` +- ``<install-root>/etc/mercurial/hgrc`` +- ``<install-root>/etc/mercurial/hgrc.d/*.rc`` + +If there is a per-repository configuration file which is not owned by +the active user, Mercurial will warn you that the file is skipped:: + + not trusting file <repo>/.hg/hgrc from untrusted user USER, group GROUP + +If this bothers you, the warning can be silenced (the file would still +be ignored) or trust can be established. Use one of the following +settings, the syntax is explained below: + +- ``ui.report_untrusted = False`` +- ``trusted.users = USER`` +- ``trusted.groups = GROUP`` + +The configuration files for Mercurial use a simple ini-file format. A +configuration file consists of sections, led by a ``[section]`` header +and followed by ``name = value`` entries:: + + [ui] + username = Firstname Lastname <firstname.lastname@example.net> + verbose = True + +The above entries will be referred to as ``ui.username`` and +``ui.verbose``, respectively. Please see the hgrc man page for a full +description of the possible configuration values: + +- on Unix-like systems: ``man hgrc`` +- online: http://www.selenic.com/mercurial/hgrc.5.html diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/help/dates.txt b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/help/dates.txt new file mode 100644 index 0000000..80ec6f0 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/help/dates.txt @@ -0,0 +1,36 @@ +Some commands allow the user to specify a date, e.g.: + +- backout, commit, import, tag: Specify the commit date. +- log, revert, update: Select revision(s) by date. + +Many date formats are valid. Here are some examples: + +- ``Wed Dec 6 13:18:29 2006`` (local timezone assumed) +- ``Dec 6 13:18 -0600`` (year assumed, time offset provided) +- ``Dec 6 13:18 UTC`` (UTC and GMT are aliases for +0000) +- ``Dec 6`` (midnight) +- ``13:18`` (today assumed) +- ``3:39`` (3:39AM assumed) +- ``3:39pm`` (15:39) +- ``2006-12-06 13:18:29`` (ISO 8601 format) +- ``2006-12-6 13:18`` +- ``2006-12-6`` +- ``12-6`` +- ``12/6`` +- ``12/6/6`` (Dec 6 2006) + +Lastly, there is Mercurial's internal format: + +- ``1165432709 0`` (Wed Dec 6 13:18:29 2006 UTC) + +This is the internal representation format for dates. unixtime is the +number of seconds since the epoch (1970-01-01 00:00 UTC). offset is +the offset of the local timezone, in seconds west of UTC (negative if +the timezone is east of UTC). + +The log command also accepts date ranges: + +- ``<{datetime}`` - at or before a given date/time +- ``>{datetime}`` - on or after a given date/time +- ``{datetime} to {datetime}`` - a date range, inclusive +- ``-{days}`` - within a given number of days of today diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/help/diffs.txt b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/help/diffs.txt new file mode 100644 index 0000000..9ede0a5 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/help/diffs.txt @@ -0,0 +1,29 @@ +Mercurial's default format for showing changes between two versions of +a file is compatible with the unified format of GNU diff, which can be +used by GNU patch and many other standard tools. + +While this standard format is often enough, it does not encode the +following information: + +- executable status and other permission bits +- copy or rename information +- changes in binary files +- creation or deletion of empty files + +Mercurial also supports the extended diff format from the git VCS +which addresses these limitations. The git diff format is not produced +by default because a few widespread tools still do not understand this +format. + +This means that when generating diffs from a Mercurial repository +(e.g. with :hg:`export`), you should be careful about things like file +copies and renames or other things mentioned above, because when +applying a standard diff to a different repository, this extra +information is lost. Mercurial's internal operations (like push and +pull) are not affected by this, because they use an internal binary +format for communicating changes. + +To make Mercurial produce the git extended diff format, use the --git +option available for many commands, or set 'git = True' in the [diff] +section of your configuration file. You do not need to set this option +when importing diffs in this format or using them in the mq extension. diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/help/environment.txt b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/help/environment.txt new file mode 100644 index 0000000..5b3f22c --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/help/environment.txt @@ -0,0 +1,93 @@ +HG + Path to the 'hg' executable, automatically passed when running + hooks, extensions or external tools. If unset or empty, this is + the hg executable's name if it's frozen, or an executable named + 'hg' (with %PATHEXT% [defaulting to COM/EXE/BAT/CMD] extensions on + Windows) is searched. + +HGEDITOR + This is the name of the editor to run when committing. See EDITOR. + + (deprecated, use configuration file) + +HGENCODING + This overrides the default locale setting detected by Mercurial. + This setting is used to convert data including usernames, + changeset descriptions, tag names, and branches. This setting can + be overridden with the --encoding command-line option. + +HGENCODINGMODE + This sets Mercurial's behavior for handling unknown characters + while transcoding user input. The default is "strict", which + causes Mercurial to abort if it can't map a character. Other + settings include "replace", which replaces unknown characters, and + "ignore", which drops them. This setting can be overridden with + the --encodingmode command-line option. + +HGENCODINGAMBIGUOUS + This sets Mercurial's behavior for handling characters with + "ambiguous" widths like accented Latin characters with East Asian + fonts. By default, Mercurial assumes ambiguous characters are + narrow, set this variable to "wide" if such characters cause + formatting problems. + +HGMERGE + An executable to use for resolving merge conflicts. The program + will be executed with three arguments: local file, remote file, + ancestor file. + + (deprecated, use configuration file) + +HGRCPATH + A list of files or directories to search for configuration + files. Item separator is ":" on Unix, ";" on Windows. If HGRCPATH + is not set, platform default search path is used. If empty, only + the .hg/hgrc from the current repository is read. + + For each element in HGRCPATH: + + - if it's a directory, all files ending with .rc are added + - otherwise, the file itself will be added + +HGPLAIN + When set, this disables any configuration settings that might + change Mercurial's default output. This includes encoding, + defaults, verbose mode, debug mode, quiet mode, tracebacks, and + localization. This can be useful when scripting against Mercurial + in the face of existing user configuration. + + Equivalent options set via command line flags or environment + variables are not overridden. + +HGUSER + This is the string used as the author of a commit. If not set, + available values will be considered in this order: + + - HGUSER (deprecated) + - configuration files from the HGRCPATH + - EMAIL + - interactive prompt + - LOGNAME (with ``@hostname`` appended) + + (deprecated, use configuration file) + +EMAIL + May be used as the author of a commit; see HGUSER. + +LOGNAME + May be used as the author of a commit; see HGUSER. + +VISUAL + This is the name of the editor to use when committing. See EDITOR. + +EDITOR + Sometimes Mercurial needs to open a text file in an editor for a + user to modify, for example when writing commit messages. The + editor it uses is determined by looking at the environment + variables HGEDITOR, VISUAL and EDITOR, in that order. The first + non-empty one is chosen. If all of them are empty, the editor + defaults to 'vi'. + +PYTHONPATH + This is used by Python to find imported modules and may need to be + set appropriately if this Mercurial is not installed system-wide. diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/help/extensions.txt b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/help/extensions.txt new file mode 100644 index 0000000..f3d2992 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/help/extensions.txt @@ -0,0 +1,33 @@ +Mercurial has the ability to add new features through the use of +extensions. Extensions may add new commands, add options to +existing commands, change the default behavior of commands, or +implement hooks. + +Extensions are not loaded by default for a variety of reasons: +they can increase startup overhead; they may be meant for advanced +usage only; they may provide potentially dangerous abilities (such +as letting you destroy or modify history); they might not be ready +for prime time; or they may alter some usual behaviors of stock +Mercurial. It is thus up to the user to activate extensions as +needed. + +To enable the "foo" extension, either shipped with Mercurial or in the +Python search path, create an entry for it in your configuration file, +like this:: + + [extensions] + foo = + +You may also specify the full path to an extension:: + + [extensions] + myfeature = ~/.hgext/myfeature.py + +To explicitly disable an extension enabled in a configuration file of +broader scope, prepend its path with !:: + + [extensions] + # disabling extension bar residing in /path/to/extension/bar.py + bar = !/path/to/extension/bar.py + # ditto, but no path was supplied for extension baz + baz = ! diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/help/glossary.txt b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/help/glossary.txt new file mode 100644 index 0000000..003fb56 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/help/glossary.txt @@ -0,0 +1,368 @@ +Ancestor + Any changeset that can be reached by an unbroken chain of parent + changesets from a given changeset. More precisely, the ancestors + of a changeset can be defined by two properties: a parent of a + changeset is an ancestor, and a parent of an ancestor is an + ancestor. See also: 'Descendant'. + +Branch + (Noun) A child changeset that has been created from a parent that + is not a head. These are known as topological branches, see + 'Branch, topological'. If a topological branch is named, it becomes + a named branch. If a topological branch is not named, it becomes + an anonymous branch. See 'Branch, anonymous' and 'Branch, named'. + + Branches may be created when changes are pulled from or pushed to + a remote repository, since new heads may be created by these + operations. Note that the term branch can also be used informally + to describe a development process in which certain development is + done independently of other development. This is sometimes done + explicitly with a named branch, but it can also be done locally, + using bookmarks or clones and anonymous branches. + + Example: "The experimental branch". + + (Verb) The action of creating a child changeset which results in + its parent having more than one child. + + Example: "I'm going to branch at X". + +Branch, anonymous + Every time a new child changeset is created from a parent that is not + a head and the name of the branch is not changed, a new anonymous + branch is created. + +Branch, closed + A named branch whose branch heads have all been closed. + +Branch, default + The branch assigned to a changeset when no name has previously been + assigned. + +Branch head + See 'Head, branch'. + +Branch, inactive + If a named branch has no topological heads, it is considered to be + inactive. As an example, a feature branch becomes inactive when it + is merged into the default branch. The :hg:`branches` command + shows inactive branches by default, though they can be hidden with + :hg:`branches --active`. + + NOTE: this concept is deprecated because it is too implicit. + Branches should now be explicitly closed using :hg:`commit + --close-branch` when they are no longer needed. + +Branch, named + A collection of changesets which have the same branch name. By + default, children of a changeset in a named branch belong to the + same named branch. A child can be explicitly assigned to a + different branch. See :hg:`help branch`, :hg:`help branches` and + :hg:`commit --close-branch` for more information on managing + branches. + + Named branches can be thought of as a kind of namespace, dividing + the collection of changesets that comprise the repository into a + collection of disjoint subsets. A named branch is not necessarily + a topological branch. If a new named branch is created from the + head of another named branch, or the default branch, but no + further changesets are added to that previous branch, then that + previous branch will be a branch in name only. + +Branch tip + See 'Tip, branch'. + +Branch, topological + Every time a new child changeset is created from a parent that is + not a head, a new topological branch is created. If a topological + branch is named, it becomes a named branch. If a topological + branch is not named, it becomes an anonymous branch of the + current, possibly default, branch. + +Changelog + A record of the changesets in the order in which they were added + to the repository. This includes details such as changeset id, + author, commit message, date, and list of changed files. + +Changeset + A snapshot of the state of the repository used to record a change. + +Changeset, child + The converse of parent changeset: if P is a parent of C, then C is + a child of P. There is no limit to the number of children that a + changeset may have. + +Changeset id + A SHA-1 hash that uniquely identifies a changeset. It may be + represented as either a "long" 40 hexadecimal digit string, or a + "short" 12 hexadecimal digit string. + +Changeset, merge + A changeset with two parents. This occurs when a merge is + committed. + +Changeset, parent + A revision upon which a child changeset is based. Specifically, a + parent changeset of a changeset C is a changeset whose node + immediately precedes C in the DAG. Changesets have at most two + parents. + +Checkout + (Noun) The working directory being updated to a specific + revision. This use should probably be avoided where possible, as + changeset is much more appropriate than checkout in this context. + + Example: "I'm using checkout X." + + (Verb) Updating the working directory to a specific changeset. See + :hg:`help update`. + + Example: "I'm going to check out changeset X." + +Child changeset + See 'Changeset, child'. + +Close changeset + See 'Changeset, close'. + +Closed branch + See 'Branch, closed'. + +Clone + (Noun) An entire or partial copy of a repository. The partial + clone must be in the form of a revision and its ancestors. + + Example: "Is your clone up to date?". + + (Verb) The process of creating a clone, using :hg:`clone`. + + Example: "I'm going to clone the repository". + +Closed branch head + See 'Head, closed branch'. + +Commit + (Noun) A synonym for changeset. + + Example: "Is the bug fixed in your recent commit?" + + (Verb) The act of recording changes to a repository. When files + are committed in a working directory, Mercurial finds the + differences between the committed files and their parent + changeset, creating a new changeset in the repository. + + Example: "You should commit those changes now." + +Cset + A common abbreviation of the term changeset. + +DAG + The repository of changesets of a distributed version control + system (DVCS) can be described as a directed acyclic graph (DAG), + consisting of nodes and edges, where nodes correspond to + changesets and edges imply a parent -> child relation. This graph + can be visualized by graphical tools such as :hg:`glog` + (graphlog). In Mercurial, the DAG is limited by the requirement + for children to have at most two parents. + +Default branch + See 'Branch, default'. + +Descendant + Any changeset that can be reached by a chain of child changesets + from a given changeset. More precisely, the descendants of a + changeset can be defined by two properties: the child of a + changeset is a descendant, and the child of a descendant is a + descendant. See also: 'Ancestor'. + +Diff + (Noun) The difference between the contents and attributes of files + in two changesets or a changeset and the current working + directory. The difference is usually represented in a standard + form called a "diff" or "patch". The "git diff" format is used + when the changes include copies, renames, or changes to file + attributes, none of which can be represented/handled by classic + "diff" and "patch". + + Example: "Did you see my correction in the diff?" + + (Verb) Diffing two changesets is the action of creating a diff or + patch. + + Example: "If you diff with changeset X, you will see what I mean." + +Directory, working + The working directory represents the state of the files tracked by + Mercurial, that will be recorded in the next commit. The working + directory initially corresponds to the snapshot at an existing + changeset, known as the parent of the working directory. See + 'Parent, working directory'. The state may be modified by changes + to the files introduced manually or by a merge. The repository + metadata exists in the .hg directory inside the working directory. + +Graph + See DAG and :hg:`help graphlog`. + +Head + The term 'head' may be used to refer to both a branch head or a + repository head, depending on the context. See 'Head, branch' and + 'Head, repository' for specific definitions. + + Heads are where development generally takes place and are the + usual targets for update and merge operations. + +Head, branch + A changeset with no descendants on the same named branch. + +Head, closed branch + A changeset that marks a head as no longer interesting. The closed + head is no longer listed by :hg:`heads`. A branch is considered + closed when all its heads are closed and consequently is not + listed by :hg:`branches`. + +Head, repository + A topological head which has not been closed. + +Head, topological + A changeset with no children in the repository. + +History, immutable + Once committed, changesets cannot be altered. Extensions which + appear to change history actually create new changesets that + replace existing ones, and then destroy the old changesets. Doing + so in public repositories can result in old changesets being + reintroduced to the repository. + +History, rewriting + The changesets in a repository are immutable. However, extensions + to Mercurial can be used to alter the repository, usually in such + a way as to preserve changeset contents. + +Immutable history + See 'History, immutable'. + +Merge changeset + See 'Changeset, merge'. + +Manifest + Each changeset has a manifest, which is the list of files that are + tracked by the changeset. + +Merge + Used to bring together divergent branches of work. When you update + to a changeset and then merge another changeset, you bring the + history of the latter changeset into your working directory. Once + conflicts are resolved (and marked), this merge may be committed + as a merge changeset, bringing two branches together in the DAG. + +Named branch + See 'Branch, named'. + +Null changeset + The empty changeset. It is the parent state of newly-initialized + repositories and repositories with no checked out revision. It is + thus the parent of root changesets and the effective ancestor when + merging unrelated changesets. Can be specified by the alias 'null' + or by the changeset ID '000000000000'. + +Parent + See 'Changeset, parent'. + +Parent changeset + See 'Changeset, parent'. + +Parent, working directory + The working directory parent reflects a virtual revision which is + the child of the changeset (or two changesets with an uncommitted + merge) shown by :hg:`parents`. This is changed with + :hg:`update`. Other commands to see the working directory parent + are :hg:`summary` and :hg:`id`. Can be specified by the alias ".". + +Patch + (Noun) The product of a diff operation. + + Example: "I've sent you my patch." + + (Verb) The process of using a patch file to transform one + changeset into another. + + Example: "You will need to patch that revision." + +Pull + An operation in which changesets in a remote repository which are + not in the local repository are brought into the local + repository. Note that this operation without special arguments + only updates the repository, it does not update the files in the + working directory. See :hg:`help pull`. + +Push + An operation in which changesets in a local repository which are + not in a remote repository are sent to the remote repository. Note + that this operation only adds changesets which have been committed + locally to the remote repository. Uncommitted changes are not + sent. See :hg:`help push`. + +Repository + The metadata describing all recorded states of a collection of + files. Each recorded state is represented by a changeset. A + repository is usually (but not always) found in the ``.hg`` + subdirectory of a working directory. Any recorded state can be + recreated by "updating" a working directory to a specific + changeset. + +Repository head + See 'Head, repository'. + +Revision + A state of the repository at some point in time. Earlier revisions + can be updated to by using :hg:`update`. See also 'Revision + number'; See also 'Changeset'. + +Revision number + This integer uniquely identifies a changeset in a specific + repository. It represents the order in which changesets were added + to a repository, starting with revision number 0. Note that the + revision number may be different in each clone of a repository. To + identify changesets uniquely between different clones, see + 'Changeset id'. + +Revlog + History storage mechanism used by Mercurial. It is a form of delta + encoding, with occasional full revision of data followed by delta + of each successive revision. It includes data and an index + pointing to the data. + +Rewriting history + See 'History, rewriting'. + +Root + A changeset that has only the null changeset as its parent. Most + repositories have only a single root changeset. + +Tip + The changeset with the highest revision number. It is the changeset + most recently added in a repository. + +Tip, branch + The head of a given branch with the highest revision number. When + a branch name is used as a revision identifier, it refers to the + branch tip. See also 'Branch, head'. Note that because revision + numbers may be different in different repository clones, the + branch tip may be different in different cloned repositories. + +Update + (Noun) Another synonym of changeset. + + Example: "I've pushed an update". + + (Verb) This term is usually used to describe updating the state of + the working directory to that of a specific changeset. See + :hg:`help update`. + + Example: "You should update". + +Working directory + See 'Directory, working'. + +Working directory parent + See 'Parent, working directory'. diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/help/hgweb.txt b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/help/hgweb.txt new file mode 100644 index 0000000..e1ff463 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/help/hgweb.txt @@ -0,0 +1,46 @@ +Mercurial's internal web server, hgweb, can serve either a single +repository, or a collection of them. In the latter case, a special +configuration file can be used to specify the repository paths to use +and global web configuration options. + +This file uses the same syntax as hgrc configuration files, but only +the following sections are recognized: + + - web + - paths + - collections + +The ``web`` section can specify all the settings described in the web +section of the hgrc documentation. + +The ``paths`` section provides mappings of physical repository +paths to virtual ones. For instance:: + + [paths] + projects/a = /foo/bar + projects/b = /baz/quux + web/root = /real/root/* + / = /real/root2/* + virtual/root2 = /real/root2/** + +- The first two entries make two repositories in different directories + appear under the same directory in the web interface +- The third entry maps every Mercurial repository found in '/real/root' + into 'web/root'. This format is preferred over the [collections] one, + since using absolute paths as configuration keys is not supported on every + platform (especially on Windows). +- The fourth entry is a special case mapping all repositories in + '/real/root2' in the root of the virtual directory. +- The fifth entry recursively finds all repositories under the real + root, and maps their relative paths under the virtual root. + +The ``collections`` section provides mappings of trees of physical +repositories paths to virtual ones, though the paths syntax is generally +preferred. For instance:: + + [collections] + /foo = /foo + +Here, the left side will be stripped off all repositories found in the +right side. Thus ``/foo/bar`` and ``foo/quux/baz`` will be listed as +``bar`` and ``quux/baz`` respectively. diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/help/merge-tools.txt b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/help/merge-tools.txt new file mode 100644 index 0000000..7324fe4 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/help/merge-tools.txt @@ -0,0 +1,110 @@ +To merge files Mercurial uses merge tools. + +A merge tool combines two different versions of a file into a merged +file. Merge tools are given the two files and the greatest common +ancestor of the two file versions, so they can determine the changes +made on both branches. + +Merge tools are used both for :hg:`resolve`, :hg:`merge`, :hg:`update`, +:hg:`backout` and in several extensions. + +Usually, the merge tool tries to automatically reconcile the files by +combining all non-overlapping changes that occurred separately in +the two different evolutions of the same initial base file. Furthermore, some +interactive merge programs make it easier to manually resolve +conflicting merges, either in a graphical way, or by inserting some +conflict markers. Mercurial does not include any interactive merge +programs but relies on external tools for that. + +Available merge tools +""""""""""""""""""""" + +External merge tools and their properties are configured in the +merge-tools configuration section - see hgrc(5) - but they can often just +be named by their executable. + +A merge tool is generally usable if its executable can be found on the +system and if it can handle the merge. The executable is found if it +is an absolute or relative executable path or the name of an +application in the executable search path. The tool is assumed to be +able to handle the merge if it can handle symlinks if the file is a +symlink, if it can handle binary files if the file is binary, and if a +GUI is available if the tool requires a GUI. + +There are some internal merge tools which can be used. The internal +merge tools are: + +``internal:merge`` + Uses the internal non-interactive simple merge algorithm for merging + files. It will fail if there are any conflicts and leave markers in + the partially merged file. + +``internal:fail`` + Rather than attempting to merge files that were modified on both + branches, it marks them as unresolved. The resolve command must be + used to resolve these conflicts. + +``internal:local`` + Uses the local version of files as the merged version. + +``internal:other`` + Uses the other version of files as the merged version. + +``internal:prompt`` + Asks the user which of the local or the other version to keep as + the merged version. + +``internal:dump`` + Creates three versions of the files to merge, containing the + contents of local, other and base. These files can then be used to + perform a merge manually. If the file to be merged is named + ``a.txt``, these files will accordingly be named ``a.txt.local``, + ``a.txt.other`` and ``a.txt.base`` and they will be placed in the + same directory as ``a.txt``. + +Internal tools are always available and do not require a GUI but will by default +not handle symlinks or binary files. + +Choosing a merge tool +""""""""""""""""""""" + +Mercurial uses these rules when deciding which merge tool to use: + +1. If a tool has been specified with the --tool option to merge or resolve, it + is used. If it is the name of a tool in the merge-tools configuration, its + configuration is used. Otherwise the specified tool must be executable by + the shell. + +2. If the ``HGMERGE`` environment variable is present, its value is used and + must be executable by the shell. + +3. If the filename of the file to be merged matches any of the patterns in the + merge-patterns configuration section, the first usable merge tool + corresponding to a matching pattern is used. Here, binary capabilities of the + merge tool are not considered. + +4. If ui.merge is set it will be considered next. If the value is not the name + of a configured tool, the specified value is used and must be executable by + the shell. Otherwise the named tool is used if it is usable. + +5. If any usable merge tools are present in the merge-tools configuration + section, the one with the highest priority is used. + +6. If a program named ``hgmerge`` can be found on the system, it is used - but + it will by default not be used for symlinks and binary files. + +7. If the file to be merged is not binary and is not a symlink, then + ``internal:merge`` is used. + +8. The merge of the file fails and must be resolved before commit. + +.. note:: + After selecting a merge program, Mercurial will by default attempt + to merge the files using a simple merge algorithm first. Only if it doesn't + succeed because of conflicting changes Mercurial will actually execute the + merge program. Whether to use the simple merge algorithm first can be + controlled by the premerge setting of the merge tool. Premerge is enabled by + default unless the file is binary or a symlink. + +See the merge-tools and ui sections of hgrc(5) for details on the +configuration of merge tools. diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/help/multirevs.txt b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/help/multirevs.txt new file mode 100644 index 0000000..c8a2833 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/help/multirevs.txt @@ -0,0 +1,13 @@ +When Mercurial accepts more than one revision, they may be specified +individually, or provided as a topologically continuous range, +separated by the ":" character. + +The syntax of range notation is [BEGIN]:[END], where BEGIN and END are +revision identifiers. Both BEGIN and END are optional. If BEGIN is not +specified, it defaults to revision number 0. If END is not specified, +it defaults to the tip. The range ":" thus means "all revisions". + +If BEGIN is greater than END, revisions are treated in reverse order. + +A range acts as a closed interval. This means that a range of 3:5 +gives 3, 4 and 5. Similarly, a range of 9:6 gives 9, 8, 7, and 6. diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/help/patterns.txt b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/help/patterns.txt new file mode 100644 index 0000000..4140170 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/help/patterns.txt @@ -0,0 +1,41 @@ +Mercurial accepts several notations for identifying one or more files +at a time. + +By default, Mercurial treats filenames as shell-style extended glob +patterns. + +Alternate pattern notations must be specified explicitly. + +To use a plain path name without any pattern matching, start it with +``path:``. These path names must completely match starting at the +current repository root. + +To use an extended glob, start a name with ``glob:``. Globs are rooted +at the current directory; a glob such as ``*.c`` will only match files +in the current directory ending with ``.c``. + +The supported glob syntax extensions are ``**`` to match any string +across path separators and ``{a,b}`` to mean "a or b". + +To use a Perl/Python regular expression, start a name with ``re:``. +Regexp pattern matching is anchored at the root of the repository. + +Plain examples:: + + path:foo/bar a name bar in a directory named foo in the root + of the repository + path:path:name a file or directory named "path:name" + +Glob examples:: + + glob:*.c any name ending in ".c" in the current directory + *.c any name ending in ".c" in the current directory + **.c any name ending in ".c" in any subdirectory of the + current directory including itself. + foo/*.c any name ending in ".c" in the directory foo + foo/**.c any name ending in ".c" in any subdirectory of foo + including itself. + +Regexp examples:: + + re:.*\.c$ any name ending in ".c", anywhere in the repository diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/help/revisions.txt b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/help/revisions.txt new file mode 100644 index 0000000..309f8e2 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/help/revisions.txt @@ -0,0 +1,29 @@ +Mercurial supports several ways to specify individual revisions. + +A plain integer is treated as a revision number. Negative integers are +treated as sequential offsets from the tip, with -1 denoting the tip, +-2 denoting the revision prior to the tip, and so forth. + +A 40-digit hexadecimal string is treated as a unique revision +identifier. + +A hexadecimal string less than 40 characters long is treated as a +unique revision identifier and is referred to as a short-form +identifier. A short-form identifier is only valid if it is the prefix +of exactly one full-length identifier. + +Any other string is treated as a tag or branch name. A tag name is a +symbolic name associated with a revision identifier. A branch name +denotes the tipmost revision of that branch. Tag and branch names must +not contain the ":" character. + +The reserved name "tip" is a special tag that always identifies the +most recent revision. + +The reserved name "null" indicates the null revision. This is the +revision of an empty repository, and the parent of revision 0. + +The reserved name "." indicates the working directory parent. If no +working directory is checked out, it is equivalent to null. If an +uncommitted merge is in progress, "." is the revision of the first +parent. diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/help/revsets.txt b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/help/revsets.txt new file mode 100644 index 0000000..c964aae --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/help/revsets.txt @@ -0,0 +1,87 @@ +Mercurial supports a functional language for selecting a set of +revisions. + +The language supports a number of predicates which are joined by infix +operators. Parenthesis can be used for grouping. + +Identifiers such as branch names must be quoted with single or double +quotes if they contain characters outside of +``[._a-zA-Z0-9\x80-\xff]`` or if they match one of the predefined +predicates. + +Special characters can be used in quoted identifiers by escaping them, +e.g., ``\n`` is interpreted as a newline. To prevent them from being +interpreted, strings can be prefixed with ``r``, e.g. ``r'...'``. + +There is a single prefix operator: + +``not x`` + Changesets not in x. Short form is ``! x``. + +These are the supported infix operators: + +``x::y`` + A DAG range, meaning all changesets that are descendants of x and + ancestors of y, including x and y themselves. If the first endpoint + is left out, this is equivalent to ``ancestors(y)``, if the second + is left out it is equivalent to ``descendants(x)``. + + An alternative syntax is ``x..y``. + +``x:y`` + All changesets with revision numbers between x and y, both + inclusive. Either endpoint can be left out, they default to 0 and + tip. + +``x and y`` + The intersection of changesets in x and y. Short form is ``x & y``. + +``x or y`` + The union of changesets in x and y. There are two alternative short + forms: ``x | y`` and ``x + y``. + +``x - y`` + Changesets in x but not in y. + +The following predicates are supported: + +.. predicatesmarker + +Command line equivalents for :hg:`log`:: + + -f -> ::. + -d x -> date(x) + -k x -> keyword(x) + -m -> merge() + -u x -> user(x) + -b x -> branch(x) + -P x -> !::x + -l x -> limit(expr, x) + +Some sample queries: + +- Changesets on the default branch:: + + hg log -r "branch(default)" + +- Changesets on the default branch since tag 1.5 (excluding merges):: + + hg log -r "branch(default) and 1.5:: and not merge()" + +- Open branch heads:: + + hg log -r "head() and not closed()" + +- Changesets between tags 1.3 and 1.5 mentioning "bug" that affect + ``hgext/*``:: + + hg log -r "1.3::1.5 and keyword(bug) and file('hgext/*')" + +- Changesets in committed May 2008, sorted by user:: + + hg log -r "sort(date('May 2008'), user)" + +- Changesets mentioning "bug" or "issue" that are not in a tagged + release:: + + hg log -r "(keyword(bug) or keyword(issue)) and not ancestors(tagged())" diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/help/subrepos.txt b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/help/subrepos.txt new file mode 100644 index 0000000..bd4e01b --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/help/subrepos.txt @@ -0,0 +1,127 @@ +Subrepositories let you nest external repositories or projects into a +parent Mercurial repository, and make commands operate on them as a +group. External Mercurial and Subversion projects are currently +supported. + +Subrepositories are made of three components: + +1. Nested repository checkouts. They can appear anywhere in the + parent working directory, and are Mercurial clones or Subversion + checkouts. + +2. Nested repository references. They are defined in ``.hgsub`` and + tell where the subrepository checkouts come from. Mercurial + subrepositories are referenced like: + + path/to/nested = https://example.com/nested/repo/path + + where ``path/to/nested`` is the checkout location relatively to the + parent Mercurial root, and ``https://example.com/nested/repo/path`` + is the source repository path. The source can also reference a + filesystem path. Subversion repositories are defined with: + + path/to/nested = [svn]https://example.com/nested/trunk/path + + Note that ``.hgsub`` does not exist by default in Mercurial + repositories, you have to create and add it to the parent + repository before using subrepositories. + +3. Nested repository states. They are defined in ``.hgsubstate`` and + capture whatever information is required to restore the + subrepositories to the state they were committed in a parent + repository changeset. Mercurial automatically record the nested + repositories states when committing in the parent repository. + + .. note:: + The ``.hgsubstate`` file should not be edited manually. + + +Adding a Subrepository +---------------------- + +If ``.hgsub`` does not exist, create it and add it to the parent +repository. Clone or checkout the external projects where you want it +to live in the parent repository. Edit ``.hgsub`` and add the +subrepository entry as described above. At this point, the +subrepository is tracked and the next commit will record its state in +``.hgsubstate`` and bind it to the committed changeset. + +Synchronizing a Subrepository +----------------------------- + +Subrepos do not automatically track the latest changeset of their +sources. Instead, they are updated to the changeset that corresponds +with the changeset checked out in the top-level changeset. This is so +developers always get a consistent set of compatible code and +libraries when they update. + +Thus, updating subrepos is a manual process. Simply check out target +subrepo at the desired revision, test in the top-level repo, then +commit in the parent repository to record the new combination. + +Deleting a Subrepository +------------------------ + +To remove a subrepository from the parent repository, delete its +reference from ``.hgsub``, then remove its files. + +Interaction with Mercurial Commands +----------------------------------- + +:add: add does not recurse in subrepos unless -S/--subrepos is + specified. Subversion subrepositories are currently silently + ignored. + +:archive: archive does not recurse in subrepositories unless + -S/--subrepos is specified. + +:commit: commit creates a consistent snapshot of the state of the + entire project and its subrepositories. It does this by first + attempting to commit all modified subrepositories, then recording + their state and finally committing it in the parent repository. + +:diff: diff does not recurse in subrepos unless -S/--subrepos is + specified. Changes are displayed as usual, on the subrepositories + elements. Subversion subrepositories are currently silently + ignored. + +:incoming: incoming does not recurse in subrepos unless -S/--subrepos + is specified. Subversion subrepositories are currently silently + ignored. + +:outgoing: outgoing does not recurse in subrepos unless -S/--subrepos + is specified. Subversion subrepositories are currently silently + ignored. + +:pull: pull is not recursive since it is not clear what to pull prior + to running :hg:`update`. Listing and retrieving all + subrepositories changes referenced by the parent repository pulled + changesets is expensive at best, impossible in the Subversion + case. + +:push: Mercurial will automatically push all subrepositories first + when the parent repository is being pushed. This ensures new + subrepository changes are available when referenced by top-level + repositories. + +:status: status does not recurse into subrepositories unless + -S/--subrepos is specified. Subrepository changes are displayed as + regular Mercurial changes on the subrepository + elements. Subversion subrepositories are currently silently + ignored. + +:update: update restores the subrepos in the state they were + originally committed in target changeset. If the recorded + changeset is not available in the current subrepository, Mercurial + will pull it in first before updating. This means that updating + can require network access when using subrepositories. + +Remapping Subrepositories Sources +--------------------------------- + +A subrepository source location may change during a project life, +invalidating references stored in the parent repository history. To +fix this, rewriting rules can be defined in parent repository ``hgrc`` +file or in Mercurial configuration. See the ``[subpaths]`` section in +hgrc(5) for more details. + diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/help/templates.txt b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/help/templates.txt new file mode 100644 index 0000000..6ca12e5 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/help/templates.txt @@ -0,0 +1,160 @@ +Mercurial allows you to customize output of commands through +templates. You can either pass in a template from the command +line, via the --template option, or select an existing +template-style (--style). + +You can customize output for any "log-like" command: log, +outgoing, incoming, tip, parents, heads and glog. + +Four styles are packaged with Mercurial: default (the style used +when no explicit preference is passed), compact, changelog, +and xml. +Usage:: + + $ hg log -r1 --style changelog + +A template is a piece of text, with markup to invoke variable +expansion:: + + $ hg log -r1 --template "{node}\n" + b56ce7b07c52de7d5fd79fb89701ea538af65746 + +Strings in curly braces are called keywords. The availability of +keywords depends on the exact context of the templater. These +keywords are usually available for templating a log-like command: + +:author: String. The unmodified author of the changeset. + +:branches: List of strings. The name of the branch on which the + changeset was committed. Will be empty if the branch name was + default. + +:children: List of strings. The children of the changeset. + +:date: Date information. The date when the changeset was committed. + +:desc: String. The text of the changeset description. + +:diffstat: String. Statistics of changes with the following format: + "modified files: +added/-removed lines" + +:files: List of strings. All files modified, added, or removed by this + changeset. + +:file_adds: List of strings. Files added by this changeset. + +:file_copies: List of strings. Files copied in this changeset with + their sources. + +:file_copies_switch: List of strings. Like "file_copies" but displayed + only if the --copied switch is set. + +:file_mods: List of strings. Files modified by this changeset. + +:file_dels: List of strings. Files removed by this changeset. + +:node: String. The changeset identification hash, as a 40 hexadecimal + digit string. + +:parents: List of strings. The parents of the changeset. + +:rev: Integer. The repository-local changeset revision number. + +:tags: List of strings. Any tags associated with the changeset. + +:latesttag: String. Most recent global tag in the ancestors of this + changeset. + +:latesttagdistance: Integer. Longest path to the latest tag. + +The "date" keyword does not produce human-readable output. If you +want to use a date in your output, you can use a filter to process +it. Filters are functions which return a string based on the input +variable. Be sure to use the stringify filter first when you're +applying a string-input filter to a list-like input variable. +You can also use a chain of filters to get the desired output:: + + $ hg tip --template "{date|isodate}\n" + 2008-08-21 18:22 +0000 + +List of filters: + +:addbreaks: Any text. Add an XHTML "<br />" tag before the end of + every line except the last. + +:age: Date. Returns a human-readable date/time difference between the + given date/time and the current date/time. + +:basename: Any text. Treats the text as a path, and returns the last + component of the path after splitting by the path separator + (ignoring trailing separators). For example, "foo/bar/baz" becomes + "baz" and "foo/bar//" becomes "bar". + +:stripdir: Treat the text as path and strip a directory level, if + possible. For example, "foo" and "foo/bar" becomes "foo". + +:date: Date. Returns a date in a Unix date format, including the + timezone: "Mon Sep 04 15:13:13 2006 0700". + +:domain: Any text. Finds the first string that looks like an email + address, and extracts just the domain component. Example: ``User + <user@example.com>`` becomes ``example.com``. + +:email: Any text. Extracts the first string that looks like an email + address. Example: ``User <user@example.com>`` becomes + ``user@example.com``. + +:escape: Any text. Replaces the special XML/XHTML characters "&", "<" + and ">" with XML entities. + +:hex: Any text. Convert a binary Mercurial node identifier into + its long hexadecimal representation. + +:fill68: Any text. Wraps the text to fit in 68 columns. + +:fill76: Any text. Wraps the text to fit in 76 columns. + +:firstline: Any text. Returns the first line of text. + +:nonempty: Any text. Returns '(none)' if the string is empty. + +:hgdate: Date. Returns the date as a pair of numbers: "1157407993 + 25200" (Unix timestamp, timezone offset). + +:isodate: Date. Returns the date in ISO 8601 format: "2009-08-18 13:00 + +0200". + +:isodatesec: Date. Returns the date in ISO 8601 format, including + seconds: "2009-08-18 13:00:13 +0200". See also the rfc3339date + filter. + +:localdate: Date. Converts a date to local date. + +:obfuscate: Any text. Returns the input text rendered as a sequence of + XML entities. + +:person: Any text. Returns the text before an email address. + +:rfc822date: Date. Returns a date using the same format used in email + headers: "Tue, 18 Aug 2009 13:00:13 +0200". + +:rfc3339date: Date. Returns a date using the Internet date format + specified in RFC 3339: "2009-08-18T13:00:13+02:00". + +:short: Changeset hash. Returns the short form of a changeset hash, + i.e. a 12 hexadecimal digit string. + +:shortdate: Date. Returns a date like "2006-09-18". + +:stringify: Any type. Turns the value into text by converting values into + text and concatenating them. + +:strip: Any text. Strips all leading and trailing whitespace. + +:tabindent: Any text. Returns the text, with every line except the + first starting with a tab character. + +:urlescape: Any text. Escapes all "special" characters. For example, + "foo bar" becomes "foo%20bar". + +:user: Any text. Returns the user portion of an email address. diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/help/urls.txt b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/help/urls.txt new file mode 100644 index 0000000..3704090 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/help/urls.txt @@ -0,0 +1,66 @@ +Valid URLs are of the form:: + + local/filesystem/path[#revision] + file://local/filesystem/path[#revision] + http://[user[:pass]@]host[:port]/[path][#revision] + https://[user[:pass]@]host[:port]/[path][#revision] + ssh://[user[:pass]@]host[:port]/[path][#revision] + +Paths in the local filesystem can either point to Mercurial +repositories or to bundle files (as created by :hg:`bundle` or :hg:` +incoming --bundle`). See also :hg:`help paths`. + +An optional identifier after # indicates a particular branch, tag, or +changeset to use from the remote repository. See also :hg:`help +revisions`. + +Some features, such as pushing to http:// and https:// URLs are only +possible if the feature is explicitly enabled on the remote Mercurial +server. + +Note that the security of HTTPS URLs depends on proper configuration of +web.cacerts. + +Some notes about using SSH with Mercurial: + +- SSH requires an accessible shell account on the destination machine + and a copy of hg in the remote path or specified with as remotecmd. +- path is relative to the remote user's home directory by default. Use + an extra slash at the start of a path to specify an absolute path:: + + ssh://example.com//tmp/repository + +- Mercurial doesn't use its own compression via SSH; the right thing + to do is to configure it in your ~/.ssh/config, e.g.:: + + Host *.mylocalnetwork.example.com + Compression no + Host * + Compression yes + + Alternatively specify "ssh -C" as your ssh command in your + configuration file or with the --ssh command line option. + +These URLs can all be stored in your configuration file with path +aliases under the [paths] section like so:: + + [paths] + alias1 = URL1 + alias2 = URL2 + ... + +You can then use the alias for any command that uses a URL (for +example :hg:`pull alias1` will be treated as :hg:`pull URL1`). + +Two path aliases are special because they are used as defaults when +you do not provide the URL to a command: + +default: + When you create a repository with hg clone, the clone command saves + the location of the source repository as the new repository's + 'default' path. This is then used when you omit path from push- and + pull-like commands (including incoming and outgoing). + +default-push: + The push command will look for a path named 'default-push', and + prefer it over 'default' if both are defined. diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/hg.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/hg.py new file mode 100644 index 0000000..0e58c67 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/hg.py @@ -0,0 +1,556 @@ +# hg.py - repository classes for mercurial +# +# Copyright 2005-2007 Matt Mackall <mpm@selenic.com> +# Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com> +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +from i18n import _ +from lock import release +from node import hex, nullid, nullrev, short +import localrepo, bundlerepo, httprepo, sshrepo, statichttprepo +import lock, util, extensions, error, encoding, node +import cmdutil, discovery, url +import merge as mergemod +import verify as verifymod +import errno, os, shutil + +def _local(path): + path = util.expandpath(util.drop_scheme('file', path)) + return (os.path.isfile(path) and bundlerepo or localrepo) + +def addbranchrevs(lrepo, repo, branches, revs): + hashbranch, branches = branches + if not hashbranch and not branches: + return revs or None, revs and revs[0] or None + revs = revs and list(revs) or [] + if not repo.capable('branchmap'): + if branches: + raise util.Abort(_("remote branch lookup not supported")) + revs.append(hashbranch) + return revs, revs[0] + branchmap = repo.branchmap() + + def primary(butf8): + if butf8 == '.': + if not lrepo or not lrepo.local(): + raise util.Abort(_("dirstate branch not accessible")) + butf8 = lrepo.dirstate.branch() + if butf8 in branchmap: + revs.extend(node.hex(r) for r in reversed(branchmap[butf8])) + return True + else: + return False + + for branch in branches: + butf8 = encoding.fromlocal(branch) + if not primary(butf8): + raise error.RepoLookupError(_("unknown branch '%s'") % branch) + if hashbranch: + butf8 = encoding.fromlocal(hashbranch) + if not primary(butf8): + revs.append(hashbranch) + return revs, revs[0] + +def parseurl(url, branches=None): + '''parse url#branch, returning (url, (branch, branches))''' + + if '#' not in url: + return url, (None, branches or []) + url, branch = url.split('#', 1) + return url, (branch, branches or []) + +schemes = { + 'bundle': bundlerepo, + 'file': _local, + 'http': httprepo, + 'https': httprepo, + 'ssh': sshrepo, + 'static-http': statichttprepo, +} + +def _lookup(path): + scheme = 'file' + if path: + c = path.find(':') + if c > 0: + scheme = path[:c] + thing = schemes.get(scheme) or schemes['file'] + try: + return thing(path) + except TypeError: + return thing + +def islocal(repo): + '''return true if repo or path is local''' + if isinstance(repo, str): + try: + return _lookup(repo).islocal(repo) + except AttributeError: + return False + return repo.local() + +def repository(ui, path='', create=False): + """return a repository object for the specified path""" + repo = _lookup(path).instance(ui, path, create) + ui = getattr(repo, "ui", ui) + for name, module in extensions.extensions(): + hook = getattr(module, 'reposetup', None) + if hook: + hook(ui, repo) + return repo + +def defaultdest(source): + '''return default destination of clone if none is given''' + return os.path.basename(os.path.normpath(source)) + +def localpath(path): + if path.startswith('file://localhost/'): + return path[16:] + if path.startswith('file://'): + return path[7:] + if path.startswith('file:'): + return path[5:] + return path + +def share(ui, source, dest=None, update=True): + '''create a shared repository''' + + if not islocal(source): + raise util.Abort(_('can only share local repositories')) + + if not dest: + dest = defaultdest(source) + else: + dest = ui.expandpath(dest) + + if isinstance(source, str): + origsource = ui.expandpath(source) + source, branches = parseurl(origsource) + srcrepo = repository(ui, source) + rev, checkout = addbranchrevs(srcrepo, srcrepo, branches, None) + else: + srcrepo = source + origsource = source = srcrepo.url() + checkout = None + + sharedpath = srcrepo.sharedpath # if our source is already sharing + + root = os.path.realpath(dest) + roothg = os.path.join(root, '.hg') + + if os.path.exists(roothg): + raise util.Abort(_('destination already exists')) + + if not os.path.isdir(root): + os.mkdir(root) + os.mkdir(roothg) + + requirements = '' + try: + requirements = srcrepo.opener('requires').read() + except IOError, inst: + if inst.errno != errno.ENOENT: + raise + + requirements += 'shared\n' + file(os.path.join(roothg, 'requires'), 'w').write(requirements) + file(os.path.join(roothg, 'sharedpath'), 'w').write(sharedpath) + + default = srcrepo.ui.config('paths', 'default') + if default: + f = file(os.path.join(roothg, 'hgrc'), 'w') + f.write('[paths]\ndefault = %s\n' % default) + f.close() + + r = repository(ui, root) + + if update: + r.ui.status(_("updating working directory\n")) + if update is not True: + checkout = update + for test in (checkout, 'default', 'tip'): + if test is None: + continue + try: + uprev = r.lookup(test) + break + except error.RepoLookupError: + continue + _update(r, uprev) + +def clone(ui, source, dest=None, pull=False, rev=None, update=True, + stream=False, branch=None): + """Make a copy of an existing repository. + + Create a copy of an existing repository in a new directory. The + source and destination are URLs, as passed to the repository + function. Returns a pair of repository objects, the source and + newly created destination. + + The location of the source is added to the new repository's + .hg/hgrc file, as the default to be used for future pulls and + pushes. + + If an exception is raised, the partly cloned/updated destination + repository will be deleted. + + Arguments: + + source: repository object or URL + + dest: URL of destination repository to create (defaults to base + name of source repository) + + pull: always pull from source repository, even in local case + + stream: stream raw data uncompressed from repository (fast over + LAN, slow over WAN) + + rev: revision to clone up to (implies pull=True) + + update: update working directory after clone completes, if + destination is local repository (True means update to default rev, + anything else is treated as a revision) + + branch: branches to clone + """ + + if isinstance(source, str): + origsource = ui.expandpath(source) + source, branch = parseurl(origsource, branch) + src_repo = repository(ui, source) + else: + src_repo = source + branch = (None, branch or []) + origsource = source = src_repo.url() + rev, checkout = addbranchrevs(src_repo, src_repo, branch, rev) + + if dest is None: + dest = defaultdest(source) + ui.status(_("destination directory: %s\n") % dest) + else: + dest = ui.expandpath(dest) + + dest = localpath(dest) + source = localpath(source) + + if os.path.exists(dest): + if not os.path.isdir(dest): + raise util.Abort(_("destination '%s' already exists") % dest) + elif os.listdir(dest): + raise util.Abort(_("destination '%s' is not empty") % dest) + + class DirCleanup(object): + def __init__(self, dir_): + self.rmtree = shutil.rmtree + self.dir_ = dir_ + def close(self): + self.dir_ = None + def cleanup(self): + if self.dir_: + self.rmtree(self.dir_, True) + + src_lock = dest_lock = dir_cleanup = None + try: + if islocal(dest): + dir_cleanup = DirCleanup(dest) + + abspath = origsource + copy = False + if src_repo.cancopy() and islocal(dest): + abspath = os.path.abspath(util.drop_scheme('file', origsource)) + copy = not pull and not rev + + if copy: + try: + # we use a lock here because if we race with commit, we + # can end up with extra data in the cloned revlogs that's + # not pointed to by changesets, thus causing verify to + # fail + src_lock = src_repo.lock(wait=False) + except error.LockError: + copy = False + + if copy: + src_repo.hook('preoutgoing', throw=True, source='clone') + hgdir = os.path.realpath(os.path.join(dest, ".hg")) + if not os.path.exists(dest): + os.mkdir(dest) + else: + # only clean up directories we create ourselves + dir_cleanup.dir_ = hgdir + try: + dest_path = hgdir + os.mkdir(dest_path) + except OSError, inst: + if inst.errno == errno.EEXIST: + dir_cleanup.close() + raise util.Abort(_("destination '%s' already exists") + % dest) + raise + + hardlink = None + num = 0 + for f in src_repo.store.copylist(): + src = os.path.join(src_repo.sharedpath, f) + dst = os.path.join(dest_path, f) + dstbase = os.path.dirname(dst) + if dstbase and not os.path.exists(dstbase): + os.mkdir(dstbase) + if os.path.exists(src): + if dst.endswith('data'): + # lock to avoid premature writing to the target + dest_lock = lock.lock(os.path.join(dstbase, "lock")) + hardlink, n = util.copyfiles(src, dst, hardlink) + num += n + if hardlink: + ui.debug("linked %d files\n" % num) + else: + ui.debug("copied %d files\n" % num) + + # we need to re-init the repo after manually copying the data + # into it + dest_repo = repository(ui, dest) + src_repo.hook('outgoing', source='clone', + node=node.hex(node.nullid)) + else: + try: + dest_repo = repository(ui, dest, create=True) + except OSError, inst: + if inst.errno == errno.EEXIST: + dir_cleanup.close() + raise util.Abort(_("destination '%s' already exists") + % dest) + raise + + revs = None + if rev: + if 'lookup' not in src_repo.capabilities: + raise util.Abort(_("src repository does not support " + "revision lookup and so doesn't " + "support clone by revision")) + revs = [src_repo.lookup(r) for r in rev] + checkout = revs[0] + if dest_repo.local(): + dest_repo.clone(src_repo, heads=revs, stream=stream) + elif src_repo.local(): + src_repo.push(dest_repo, revs=revs) + else: + raise util.Abort(_("clone from remote to remote not supported")) + + if dir_cleanup: + dir_cleanup.close() + + if dest_repo.local(): + fp = dest_repo.opener("hgrc", "w", text=True) + fp.write("[paths]\n") + fp.write("default = %s\n" % abspath) + fp.close() + + dest_repo.ui.setconfig('paths', 'default', abspath) + + if update: + if update is not True: + checkout = update + if src_repo.local(): + checkout = src_repo.lookup(update) + for test in (checkout, 'default', 'tip'): + if test is None: + continue + try: + uprev = dest_repo.lookup(test) + break + except error.RepoLookupError: + continue + bn = dest_repo[uprev].branch() + dest_repo.ui.status(_("updating to branch %s\n") + % encoding.tolocal(bn)) + _update(dest_repo, uprev) + + return src_repo, dest_repo + finally: + release(src_lock, dest_lock) + if dir_cleanup is not None: + dir_cleanup.cleanup() + +def _showstats(repo, stats): + repo.ui.status(_("%d files updated, %d files merged, " + "%d files removed, %d files unresolved\n") % stats) + +def update(repo, node): + """update the working directory to node, merging linear changes""" + stats = mergemod.update(repo, node, False, False, None) + _showstats(repo, stats) + if stats[3]: + repo.ui.status(_("use 'hg resolve' to retry unresolved file merges\n")) + return stats[3] > 0 + +# naming conflict in clone() +_update = update + +def clean(repo, node, show_stats=True): + """forcibly switch the working directory to node, clobbering changes""" + stats = mergemod.update(repo, node, False, True, None) + if show_stats: + _showstats(repo, stats) + return stats[3] > 0 + +def merge(repo, node, force=None, remind=True): + """branch merge with node, resolving changes""" + stats = mergemod.update(repo, node, True, force, False) + _showstats(repo, stats) + if stats[3]: + repo.ui.status(_("use 'hg resolve' to retry unresolved file merges " + "or 'hg update -C .' to abandon\n")) + elif remind: + repo.ui.status(_("(branch merge, don't forget to commit)\n")) + return stats[3] > 0 + +def _incoming(displaychlist, subreporecurse, ui, repo, source, + opts, buffered=False): + """ + Helper for incoming / gincoming. + displaychlist gets called with + (remoterepo, incomingchangesetlist, displayer) parameters, + and is supposed to contain only code that can't be unified. + """ + source, branches = parseurl(ui.expandpath(source), opts.get('branch')) + other = repository(remoteui(repo, opts), source) + ui.status(_('comparing with %s\n') % url.hidepassword(source)) + revs, checkout = addbranchrevs(repo, other, branches, opts.get('rev')) + + if revs: + revs = [other.lookup(rev) for rev in revs] + other, incoming, bundle = bundlerepo.getremotechanges(ui, repo, other, revs, + opts["bundle"], opts["force"]) + if incoming is None: + ui.status(_("no changes found\n")) + return subreporecurse() + + try: + chlist = other.changelog.nodesbetween(incoming, revs)[0] + displayer = cmdutil.show_changeset(ui, other, opts, buffered) + + # XXX once graphlog extension makes it into core, + # should be replaced by a if graph/else + displaychlist(other, chlist, displayer) + + displayer.close() + finally: + if hasattr(other, 'close'): + other.close() + if bundle: + os.unlink(bundle) + subreporecurse() + return 0 # exit code is zero since we found incoming changes + +def incoming(ui, repo, source, opts): + def subreporecurse(): + ret = 1 + if opts.get('subrepos'): + ctx = repo[None] + for subpath in sorted(ctx.substate): + sub = ctx.sub(subpath) + ret = min(ret, sub.incoming(ui, source, opts)) + return ret + + def display(other, chlist, displayer): + limit = cmdutil.loglimit(opts) + if opts.get('newest_first'): + chlist.reverse() + count = 0 + for n in chlist: + if limit is not None and count >= limit: + break + parents = [p for p in other.changelog.parents(n) if p != nullid] + if opts.get('no_merges') and len(parents) == 2: + continue + count += 1 + displayer.show(other[n]) + return _incoming(display, subreporecurse, ui, repo, source, opts) + +def _outgoing(ui, repo, dest, opts): + dest = ui.expandpath(dest or 'default-push', dest or 'default') + dest, branches = parseurl(dest, opts.get('branch')) + revs, checkout = addbranchrevs(repo, repo, branches, opts.get('rev')) + if revs: + revs = [repo.lookup(rev) for rev in revs] + + other = repository(remoteui(repo, opts), dest) + ui.status(_('comparing with %s\n') % url.hidepassword(dest)) + o = discovery.findoutgoing(repo, other, force=opts.get('force')) + if not o: + ui.status(_("no changes found\n")) + return None + + return repo.changelog.nodesbetween(o, revs)[0] + +def outgoing(ui, repo, dest, opts): + def recurse(): + ret = 1 + if opts.get('subrepos'): + ctx = repo[None] + for subpath in sorted(ctx.substate): + sub = ctx.sub(subpath) + ret = min(ret, sub.outgoing(ui, dest, opts)) + return ret + + limit = cmdutil.loglimit(opts) + o = _outgoing(ui, repo, dest, opts) + if o is None: + return recurse() + + if opts.get('newest_first'): + o.reverse() + displayer = cmdutil.show_changeset(ui, repo, opts) + count = 0 + for n in o: + if limit is not None and count >= limit: + break + parents = [p for p in repo.changelog.parents(n) if p != nullid] + if opts.get('no_merges') and len(parents) == 2: + continue + count += 1 + displayer.show(repo[n]) + displayer.close() + recurse() + return 0 # exit code is zero since we found outgoing changes + +def revert(repo, node, choose): + """revert changes to revision in node without updating dirstate""" + return mergemod.update(repo, node, False, True, choose)[3] > 0 + +def verify(repo): + """verify the consistency of a repository""" + return verifymod.verify(repo) + +def remoteui(src, opts): + 'build a remote ui from ui or repo and opts' + if hasattr(src, 'baseui'): # looks like a repository + dst = src.baseui.copy() # drop repo-specific config + src = src.ui # copy target options from repo + else: # assume it's a global ui object + dst = src.copy() # keep all global options + + # copy ssh-specific options + for o in 'ssh', 'remotecmd': + v = opts.get(o) or src.config('ui', o) + if v: + dst.setconfig("ui", o, v) + + # copy bundle-specific options + r = src.config('bundle', 'mainreporoot') + if r: + dst.setconfig('bundle', 'mainreporoot', r) + + # copy selected local settings to the remote ui + for sect in ('auth', 'http_proxy'): + for key, val in src.configitems(sect): + dst.setconfig(sect, key, val) + v = src.config('web', 'cacerts') + if v: + dst.setconfig('web', 'cacerts', v) + + return dst diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/hg.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/hg.pyo Binary files differnew file mode 100644 index 0000000..3e25c2b --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/hg.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/hgweb/__init__.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/hgweb/__init__.py new file mode 100644 index 0000000..dd4d089 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/hgweb/__init__.py @@ -0,0 +1,31 @@ +# hgweb/__init__.py - web interface to a mercurial repository +# +# Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net> +# Copyright 2005 Matt Mackall <mpm@selenic.com> +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +import os +import hgweb_mod, hgwebdir_mod + +def hgweb(config, name=None, baseui=None): + '''create an hgweb wsgi object + + config can be one of: + - repo object (single repo view) + - path to repo (single repo view) + - path to config file (multi-repo view) + - dict of virtual:real pairs (multi-repo view) + - list of virtual:real tuples (multi-repo view) + ''' + + if ((isinstance(config, str) and not os.path.isdir(config)) or + isinstance(config, dict) or isinstance(config, list)): + # create a multi-dir interface + return hgwebdir_mod.hgwebdir(config, baseui=baseui) + return hgweb_mod.hgweb(config, name=name, baseui=baseui) + +def hgwebdir(config, baseui=None): + return hgwebdir_mod.hgwebdir(config, baseui=baseui) + diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/hgweb/__init__.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/hgweb/__init__.pyo Binary files differnew file mode 100644 index 0000000..fe872c0 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/hgweb/__init__.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/hgweb/common.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/hgweb/common.py new file mode 100644 index 0000000..eb7e02a --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/hgweb/common.py @@ -0,0 +1,161 @@ +# hgweb/common.py - Utility functions needed by hgweb_mod and hgwebdir_mod +# +# Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net> +# Copyright 2005, 2006 Matt Mackall <mpm@selenic.com> +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +import errno, mimetypes, os + +HTTP_OK = 200 +HTTP_NOT_MODIFIED = 304 +HTTP_BAD_REQUEST = 400 +HTTP_UNAUTHORIZED = 401 +HTTP_FORBIDDEN = 403 +HTTP_NOT_FOUND = 404 +HTTP_METHOD_NOT_ALLOWED = 405 +HTTP_SERVER_ERROR = 500 + +# Hooks for hgweb permission checks; extensions can add hooks here. Each hook +# is invoked like this: hook(hgweb, request, operation), where operation is +# either read, pull or push. Hooks should either raise an ErrorResponse +# exception, or just return. +# It is possible to do both authentication and authorization through this. +permhooks = [] + +def checkauthz(hgweb, req, op): + '''Check permission for operation based on request data (including + authentication info). Return if op allowed, else raise an ErrorResponse + exception.''' + + user = req.env.get('REMOTE_USER') + + deny_read = hgweb.configlist('web', 'deny_read') + if deny_read and (not user or deny_read == ['*'] or user in deny_read): + raise ErrorResponse(HTTP_UNAUTHORIZED, 'read not authorized') + + allow_read = hgweb.configlist('web', 'allow_read') + result = (not allow_read) or (allow_read == ['*']) + if not (result or user in allow_read): + raise ErrorResponse(HTTP_UNAUTHORIZED, 'read not authorized') + + if op == 'pull' and not hgweb.allowpull: + raise ErrorResponse(HTTP_UNAUTHORIZED, 'pull not authorized') + elif op == 'pull' or op is None: # op is None for interface requests + return + + # enforce that you can only push using POST requests + if req.env['REQUEST_METHOD'] != 'POST': + msg = 'push requires POST request' + raise ErrorResponse(HTTP_METHOD_NOT_ALLOWED, msg) + + # require ssl by default for pushing, auth info cannot be sniffed + # and replayed + scheme = req.env.get('wsgi.url_scheme') + if hgweb.configbool('web', 'push_ssl', True) and scheme != 'https': + raise ErrorResponse(HTTP_OK, 'ssl required') + + deny = hgweb.configlist('web', 'deny_push') + if deny and (not user or deny == ['*'] or user in deny): + raise ErrorResponse(HTTP_UNAUTHORIZED, 'push not authorized') + + allow = hgweb.configlist('web', 'allow_push') + result = allow and (allow == ['*'] or user in allow) + if not result: + raise ErrorResponse(HTTP_UNAUTHORIZED, 'push not authorized') + +# Add the default permhook, which provides simple authorization. +permhooks.append(checkauthz) + + +class ErrorResponse(Exception): + def __init__(self, code, message=None, headers=[]): + Exception.__init__(self) + self.code = code + self.headers = headers + if message is not None: + self.message = message + else: + self.message = _statusmessage(code) + +def _statusmessage(code): + from BaseHTTPServer import BaseHTTPRequestHandler + responses = BaseHTTPRequestHandler.responses + return responses.get(code, ('Error', 'Unknown error'))[0] + +def statusmessage(code, message=None): + return '%d %s' % (code, message or _statusmessage(code)) + +def get_mtime(spath): + cl_path = os.path.join(spath, "00changelog.i") + if os.path.exists(cl_path): + return os.stat(cl_path).st_mtime + else: + return os.stat(spath).st_mtime + +def staticfile(directory, fname, req): + """return a file inside directory with guessed Content-Type header + + fname always uses '/' as directory separator and isn't allowed to + contain unusual path components. + Content-Type is guessed using the mimetypes module. + Return an empty string if fname is illegal or file not found. + + """ + parts = fname.split('/') + for part in parts: + if (part in ('', os.curdir, os.pardir) or + os.sep in part or os.altsep is not None and os.altsep in part): + return "" + fpath = os.path.join(*parts) + if isinstance(directory, str): + directory = [directory] + for d in directory: + path = os.path.join(d, fpath) + if os.path.exists(path): + break + try: + os.stat(path) + ct = mimetypes.guess_type(path)[0] or "text/plain" + req.respond(HTTP_OK, ct, length = os.path.getsize(path)) + return open(path, 'rb').read() + except TypeError: + raise ErrorResponse(HTTP_SERVER_ERROR, 'illegal filename') + except OSError, err: + if err.errno == errno.ENOENT: + raise ErrorResponse(HTTP_NOT_FOUND) + else: + raise ErrorResponse(HTTP_SERVER_ERROR, err.strerror) + +def paritygen(stripecount, offset=0): + """count parity of horizontal stripes for easier reading""" + if stripecount and offset: + # account for offset, e.g. due to building the list in reverse + count = (stripecount + offset) % stripecount + parity = (stripecount + offset) / stripecount & 1 + else: + count = 0 + parity = 0 + while True: + yield parity + count += 1 + if stripecount and count >= stripecount: + parity = 1 - parity + count = 0 + +def get_contact(config): + """Return repo contact information or empty string. + + web.contact is the primary source, but if that is not set, try + ui.username or $EMAIL as a fallback to display something useful. + """ + return (config("web", "contact") or + config("ui", "username") or + os.environ.get("EMAIL") or "") + +def caching(web, req): + tag = str(web.mtime) + if req.env.get('HTTP_IF_NONE_MATCH') == tag: + raise ErrorResponse(HTTP_NOT_MODIFIED) + req.headers.append(('ETag', tag)) diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/hgweb/common.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/hgweb/common.pyo Binary files differnew file mode 100644 index 0000000..c743684 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/hgweb/common.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/hgweb/hgweb_mod.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/hgweb/hgweb_mod.py new file mode 100644 index 0000000..f886277 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/hgweb/hgweb_mod.py @@ -0,0 +1,290 @@ +# hgweb/hgweb_mod.py - Web interface for a repository. +# +# Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net> +# Copyright 2005-2007 Matt Mackall <mpm@selenic.com> +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +import os +from mercurial import ui, hg, hook, error, encoding, templater +from common import get_mtime, ErrorResponse, permhooks, caching +from common import HTTP_OK, HTTP_NOT_MODIFIED, HTTP_BAD_REQUEST +from common import HTTP_NOT_FOUND, HTTP_SERVER_ERROR +from request import wsgirequest +import webcommands, protocol, webutil + +perms = { + 'changegroup': 'pull', + 'changegroupsubset': 'pull', + 'stream_out': 'pull', + 'listkeys': 'pull', + 'unbundle': 'push', + 'pushkey': 'push', +} + +class hgweb(object): + def __init__(self, repo, name=None, baseui=None): + if isinstance(repo, str): + if baseui: + u = baseui.copy() + else: + u = ui.ui() + self.repo = hg.repository(u, repo) + else: + self.repo = repo + + self.repo.ui.setconfig('ui', 'report_untrusted', 'off') + self.repo.ui.setconfig('ui', 'interactive', 'off') + hook.redirect(True) + self.mtime = -1 + self.reponame = name + self.archives = 'zip', 'gz', 'bz2' + self.stripecount = 1 + # a repo owner may set web.templates in .hg/hgrc to get any file + # readable by the user running the CGI script + self.templatepath = self.config('web', 'templates') + + # The CGI scripts are often run by a user different from the repo owner. + # Trust the settings from the .hg/hgrc files by default. + def config(self, section, name, default=None, untrusted=True): + return self.repo.ui.config(section, name, default, + untrusted=untrusted) + + def configbool(self, section, name, default=False, untrusted=True): + return self.repo.ui.configbool(section, name, default, + untrusted=untrusted) + + def configlist(self, section, name, default=None, untrusted=True): + return self.repo.ui.configlist(section, name, default, + untrusted=untrusted) + + def refresh(self, request=None): + if request: + self.repo.ui.environ = request.env + mtime = get_mtime(self.repo.spath) + if mtime != self.mtime: + self.mtime = mtime + self.repo = hg.repository(self.repo.ui, self.repo.root) + self.maxchanges = int(self.config("web", "maxchanges", 10)) + self.stripecount = int(self.config("web", "stripes", 1)) + self.maxshortchanges = int(self.config("web", "maxshortchanges", 60)) + self.maxfiles = int(self.config("web", "maxfiles", 10)) + self.allowpull = self.configbool("web", "allowpull", True) + encoding.encoding = self.config("web", "encoding", + encoding.encoding) + + def run(self): + if not os.environ.get('GATEWAY_INTERFACE', '').startswith("CGI/1."): + raise RuntimeError("This function is only intended to be " + "called while running as a CGI script.") + import mercurial.hgweb.wsgicgi as wsgicgi + wsgicgi.launch(self) + + def __call__(self, env, respond): + req = wsgirequest(env, respond) + return self.run_wsgi(req) + + def run_wsgi(self, req): + + self.refresh(req) + + # work with CGI variables to create coherent structure + # use SCRIPT_NAME, PATH_INFO and QUERY_STRING as well as our REPO_NAME + + req.url = req.env['SCRIPT_NAME'] + if not req.url.endswith('/'): + req.url += '/' + if 'REPO_NAME' in req.env: + req.url += req.env['REPO_NAME'] + '/' + + if 'PATH_INFO' in req.env: + parts = req.env['PATH_INFO'].strip('/').split('/') + repo_parts = req.env.get('REPO_NAME', '').split('/') + if parts[:len(repo_parts)] == repo_parts: + parts = parts[len(repo_parts):] + query = '/'.join(parts) + else: + query = req.env['QUERY_STRING'].split('&', 1)[0] + query = query.split(';', 1)[0] + + # process this if it's a protocol request + # protocol bits don't need to create any URLs + # and the clients always use the old URL structure + + cmd = req.form.get('cmd', [''])[0] + if protocol.iscmd(cmd): + if query: + raise ErrorResponse(HTTP_NOT_FOUND) + if cmd in perms: + try: + self.check_perm(req, perms[cmd]) + except ErrorResponse, inst: + if cmd == 'unbundle': + req.drain() + req.respond(inst, protocol.HGTYPE) + return '0\n%s\n' % inst.message + return protocol.call(self.repo, req, cmd) + + # translate user-visible url structure to internal structure + + args = query.split('/', 2) + if 'cmd' not in req.form and args and args[0]: + + cmd = args.pop(0) + style = cmd.rfind('-') + if style != -1: + req.form['style'] = [cmd[:style]] + cmd = cmd[style + 1:] + + # avoid accepting e.g. style parameter as command + if hasattr(webcommands, cmd): + req.form['cmd'] = [cmd] + else: + cmd = '' + + if cmd == 'static': + req.form['file'] = ['/'.join(args)] + else: + if args and args[0]: + node = args.pop(0) + req.form['node'] = [node] + if args: + req.form['file'] = args + + ua = req.env.get('HTTP_USER_AGENT', '') + if cmd == 'rev' and 'mercurial' in ua: + req.form['style'] = ['raw'] + + if cmd == 'archive': + fn = req.form['node'][0] + for type_, spec in self.archive_specs.iteritems(): + ext = spec[2] + if fn.endswith(ext): + req.form['node'] = [fn[:-len(ext)]] + req.form['type'] = [type_] + + # process the web interface request + + try: + tmpl = self.templater(req) + ctype = tmpl('mimetype', encoding=encoding.encoding) + ctype = templater.stringify(ctype) + + # check read permissions non-static content + if cmd != 'static': + self.check_perm(req, None) + + if cmd == '': + req.form['cmd'] = [tmpl.cache['default']] + cmd = req.form['cmd'][0] + + caching(self, req) # sets ETag header or raises NOT_MODIFIED + if cmd not in webcommands.__all__: + msg = 'no such method: %s' % cmd + raise ErrorResponse(HTTP_BAD_REQUEST, msg) + elif cmd == 'file' and 'raw' in req.form.get('style', []): + self.ctype = ctype + content = webcommands.rawfile(self, req, tmpl) + else: + content = getattr(webcommands, cmd)(self, req, tmpl) + req.respond(HTTP_OK, ctype) + + return content + + except error.LookupError, err: + req.respond(HTTP_NOT_FOUND, ctype) + msg = str(err) + if 'manifest' not in msg: + msg = 'revision not found: %s' % err.name + return tmpl('error', error=msg) + except (error.RepoError, error.RevlogError), inst: + req.respond(HTTP_SERVER_ERROR, ctype) + return tmpl('error', error=str(inst)) + except ErrorResponse, inst: + req.respond(inst, ctype) + if inst.code == HTTP_NOT_MODIFIED: + # Not allowed to return a body on a 304 + return [''] + return tmpl('error', error=inst.message) + + def templater(self, req): + + # determine scheme, port and server name + # this is needed to create absolute urls + + proto = req.env.get('wsgi.url_scheme') + if proto == 'https': + proto = 'https' + default_port = "443" + else: + proto = 'http' + default_port = "80" + + port = req.env["SERVER_PORT"] + port = port != default_port and (":" + port) or "" + urlbase = '%s://%s%s' % (proto, req.env['SERVER_NAME'], port) + staticurl = self.config("web", "staticurl") or req.url + 'static/' + if not staticurl.endswith('/'): + staticurl += '/' + + # some functions for the templater + + def header(**map): + yield tmpl('header', encoding=encoding.encoding, **map) + + def footer(**map): + yield tmpl("footer", **map) + + def motd(**map): + yield self.config("web", "motd", "") + + # figure out which style to use + + vars = {} + styles = ( + req.form.get('style', [None])[0], + self.config('web', 'style'), + 'paper', + ) + style, mapfile = templater.stylemap(styles, self.templatepath) + if style == styles[0]: + vars['style'] = style + + start = req.url[-1] == '?' and '&' or '?' + sessionvars = webutil.sessionvars(vars, start) + + if not self.reponame: + self.reponame = (self.config("web", "name") + or req.env.get('REPO_NAME') + or req.url.strip('/') or self.repo.root) + + # create the templater + + tmpl = templater.templater(mapfile, + defaults={"url": req.url, + "staticurl": staticurl, + "urlbase": urlbase, + "repo": self.reponame, + "header": header, + "footer": footer, + "motd": motd, + "sessionvars": sessionvars + }) + return tmpl + + def archivelist(self, nodeid): + allowed = self.configlist("web", "allow_archive") + for i, spec in self.archive_specs.iteritems(): + if i in allowed or self.configbool("web", "allow" + i): + yield {"type" : i, "extension" : spec[2], "node" : nodeid} + + archive_specs = { + 'bz2': ('application/x-bzip2', 'tbz2', '.tar.bz2', None), + 'gz': ('application/x-gzip', 'tgz', '.tar.gz', None), + 'zip': ('application/zip', 'zip', '.zip', None), + } + + def check_perm(self, req, op): + for hook in permhooks: + hook(self, req, op) diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/hgweb/hgweb_mod.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/hgweb/hgweb_mod.pyo Binary files differnew file mode 100644 index 0000000..375c705 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/hgweb/hgweb_mod.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/hgweb/hgwebdir_mod.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/hgweb/hgwebdir_mod.py new file mode 100644 index 0000000..167cf35 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/hgweb/hgwebdir_mod.py @@ -0,0 +1,359 @@ +# hgweb/hgwebdir_mod.py - Web interface for a directory of repositories. +# +# Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net> +# Copyright 2005, 2006 Matt Mackall <mpm@selenic.com> +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +import os, re, time, urlparse +from mercurial.i18n import _ +from mercurial import ui, hg, util, templater +from mercurial import error, encoding +from common import ErrorResponse, get_mtime, staticfile, paritygen, \ + get_contact, HTTP_OK, HTTP_NOT_FOUND, HTTP_SERVER_ERROR +from hgweb_mod import hgweb +from request import wsgirequest +import webutil + +def cleannames(items): + return [(util.pconvert(name).strip('/'), path) for name, path in items] + +def findrepos(paths): + repos = [] + for prefix, root in cleannames(paths): + roothead, roottail = os.path.split(root) + # "foo = /bar/*" makes every subrepo of /bar/ to be + # mounted as foo/subrepo + # and "foo = /bar/**" also recurses into the subdirectories, + # remember to use it without working dir. + try: + recurse = {'*': False, '**': True}[roottail] + except KeyError: + repos.append((prefix, root)) + continue + roothead = os.path.normpath(os.path.abspath(roothead)) + for path in util.walkrepos(roothead, followsym=True, recurse=recurse): + path = os.path.normpath(path) + name = util.pconvert(path[len(roothead):]).strip('/') + if prefix: + name = prefix + '/' + name + repos.append((name, path)) + return repos + +class hgwebdir(object): + refreshinterval = 20 + + def __init__(self, conf, baseui=None): + self.conf = conf + self.baseui = baseui + self.lastrefresh = 0 + self.motd = None + self.refresh() + + def refresh(self): + if self.lastrefresh + self.refreshinterval > time.time(): + return + + if self.baseui: + u = self.baseui.copy() + else: + u = ui.ui() + u.setconfig('ui', 'report_untrusted', 'off') + u.setconfig('ui', 'interactive', 'off') + + if not isinstance(self.conf, (dict, list, tuple)): + map = {'paths': 'hgweb-paths'} + if not os.path.exists(self.conf): + raise util.Abort(_('config file %s not found!') % self.conf) + u.readconfig(self.conf, remap=map, trust=True) + paths = u.configitems('hgweb-paths') + elif isinstance(self.conf, (list, tuple)): + paths = self.conf + elif isinstance(self.conf, dict): + paths = self.conf.items() + + repos = findrepos(paths) + for prefix, root in u.configitems('collections'): + prefix = util.pconvert(prefix) + for path in util.walkrepos(root, followsym=True): + repo = os.path.normpath(path) + name = util.pconvert(repo) + if name.startswith(prefix): + name = name[len(prefix):] + repos.append((name.lstrip('/'), repo)) + + self.repos = repos + self.ui = u + encoding.encoding = self.ui.config('web', 'encoding', + encoding.encoding) + self.style = self.ui.config('web', 'style', 'paper') + self.templatepath = self.ui.config('web', 'templates', None) + self.stripecount = self.ui.config('web', 'stripes', 1) + if self.stripecount: + self.stripecount = int(self.stripecount) + self._baseurl = self.ui.config('web', 'baseurl') + self.lastrefresh = time.time() + + def run(self): + if not os.environ.get('GATEWAY_INTERFACE', '').startswith("CGI/1."): + raise RuntimeError("This function is only intended to be " + "called while running as a CGI script.") + import mercurial.hgweb.wsgicgi as wsgicgi + wsgicgi.launch(self) + + def __call__(self, env, respond): + req = wsgirequest(env, respond) + return self.run_wsgi(req) + + def read_allowed(self, ui, req): + """Check allow_read and deny_read config options of a repo's ui object + to determine user permissions. By default, with neither option set (or + both empty), allow all users to read the repo. There are two ways a + user can be denied read access: (1) deny_read is not empty, and the + user is unauthenticated or deny_read contains user (or *), and (2) + allow_read is not empty and the user is not in allow_read. Return True + if user is allowed to read the repo, else return False.""" + + user = req.env.get('REMOTE_USER') + + deny_read = ui.configlist('web', 'deny_read', untrusted=True) + if deny_read and (not user or deny_read == ['*'] or user in deny_read): + return False + + allow_read = ui.configlist('web', 'allow_read', untrusted=True) + # by default, allow reading if no allow_read option has been set + if (not allow_read) or (allow_read == ['*']) or (user in allow_read): + return True + + return False + + def run_wsgi(self, req): + try: + try: + self.refresh() + + virtual = req.env.get("PATH_INFO", "").strip('/') + tmpl = self.templater(req) + ctype = tmpl('mimetype', encoding=encoding.encoding) + ctype = templater.stringify(ctype) + + # a static file + if virtual.startswith('static/') or 'static' in req.form: + if virtual.startswith('static/'): + fname = virtual[7:] + else: + fname = req.form['static'][0] + static = templater.templatepath('static') + return (staticfile(static, fname, req),) + + # top-level index + elif not virtual: + req.respond(HTTP_OK, ctype) + return self.makeindex(req, tmpl) + + # nested indexes and hgwebs + + repos = dict(self.repos) + virtualrepo = virtual + while virtualrepo: + real = repos.get(virtualrepo) + if real: + req.env['REPO_NAME'] = virtualrepo + try: + repo = hg.repository(self.ui, real) + return hgweb(repo).run_wsgi(req) + except IOError, inst: + msg = inst.strerror + raise ErrorResponse(HTTP_SERVER_ERROR, msg) + except error.RepoError, inst: + raise ErrorResponse(HTTP_SERVER_ERROR, str(inst)) + + up = virtualrepo.rfind('/') + if up < 0: + break + virtualrepo = virtualrepo[:up] + + # browse subdirectories + subdir = virtual + '/' + if [r for r in repos if r.startswith(subdir)]: + req.respond(HTTP_OK, ctype) + return self.makeindex(req, tmpl, subdir) + + # prefixes not found + req.respond(HTTP_NOT_FOUND, ctype) + return tmpl("notfound", repo=virtual) + + except ErrorResponse, err: + req.respond(err, ctype) + return tmpl('error', error=err.message or '') + finally: + tmpl = None + + def makeindex(self, req, tmpl, subdir=""): + + def archivelist(ui, nodeid, url): + allowed = ui.configlist("web", "allow_archive", untrusted=True) + for i in [('zip', '.zip'), ('gz', '.tar.gz'), ('bz2', '.tar.bz2')]: + if i[0] in allowed or ui.configbool("web", "allow" + i[0], + untrusted=True): + yield {"type" : i[0], "extension": i[1], + "node": nodeid, "url": url} + + def rawentries(subdir="", **map): + + descend = self.ui.configbool('web', 'descend', True) + for name, path in self.repos: + + if not name.startswith(subdir): + continue + name = name[len(subdir):] + if not descend and '/' in name: + continue + + u = self.ui.copy() + try: + u.readconfig(os.path.join(path, '.hg', 'hgrc')) + except Exception, e: + u.warn(_('error reading %s/.hg/hgrc: %s\n') % (path, e)) + continue + def get(section, name, default=None): + return u.config(section, name, default, untrusted=True) + + if u.configbool("web", "hidden", untrusted=True): + continue + + if not self.read_allowed(u, req): + continue + + parts = [name] + if 'PATH_INFO' in req.env: + parts.insert(0, req.env['PATH_INFO'].rstrip('/')) + if req.env['SCRIPT_NAME']: + parts.insert(0, req.env['SCRIPT_NAME']) + url = re.sub(r'/+', '/', '/'.join(parts) + '/') + + # update time with local timezone + try: + r = hg.repository(self.ui, path) + except error.RepoError: + u.warn(_('error accessing repository at %s\n') % path) + continue + try: + d = (get_mtime(r.spath), util.makedate()[1]) + except OSError: + continue + + contact = get_contact(get) + description = get("web", "description", "") + name = get("web", "name", name) + row = dict(contact=contact or "unknown", + contact_sort=contact.upper() or "unknown", + name=name, + name_sort=name, + url=url, + description=description or "unknown", + description_sort=description.upper() or "unknown", + lastchange=d, + lastchange_sort=d[1]-d[0], + archives=archivelist(u, "tip", url)) + yield row + + sortdefault = None, False + def entries(sortcolumn="", descending=False, subdir="", **map): + rows = rawentries(subdir=subdir, **map) + + if sortcolumn and sortdefault != (sortcolumn, descending): + sortkey = '%s_sort' % sortcolumn + rows = sorted(rows, key=lambda x: x[sortkey], + reverse=descending) + for row, parity in zip(rows, paritygen(self.stripecount)): + row['parity'] = parity + yield row + + self.refresh() + sortable = ["name", "description", "contact", "lastchange"] + sortcolumn, descending = sortdefault + if 'sort' in req.form: + sortcolumn = req.form['sort'][0] + descending = sortcolumn.startswith('-') + if descending: + sortcolumn = sortcolumn[1:] + if sortcolumn not in sortable: + sortcolumn = "" + + sort = [("sort_%s" % column, + "%s%s" % ((not descending and column == sortcolumn) + and "-" or "", column)) + for column in sortable] + + self.refresh() + self.updatereqenv(req.env) + + return tmpl("index", entries=entries, subdir=subdir, + sortcolumn=sortcolumn, descending=descending, + **dict(sort)) + + def templater(self, req): + + def header(**map): + yield tmpl('header', encoding=encoding.encoding, **map) + + def footer(**map): + yield tmpl("footer", **map) + + def motd(**map): + if self.motd is not None: + yield self.motd + else: + yield config('web', 'motd', '') + + def config(section, name, default=None, untrusted=True): + return self.ui.config(section, name, default, untrusted) + + self.updatereqenv(req.env) + + url = req.env.get('SCRIPT_NAME', '') + if not url.endswith('/'): + url += '/' + + vars = {} + styles = ( + req.form.get('style', [None])[0], + config('web', 'style'), + 'paper' + ) + style, mapfile = templater.stylemap(styles, self.templatepath) + if style == styles[0]: + vars['style'] = style + + start = url[-1] == '?' and '&' or '?' + sessionvars = webutil.sessionvars(vars, start) + staticurl = config('web', 'staticurl') or url + 'static/' + if not staticurl.endswith('/'): + staticurl += '/' + + tmpl = templater.templater(mapfile, + defaults={"header": header, + "footer": footer, + "motd": motd, + "url": url, + "staticurl": staticurl, + "sessionvars": sessionvars}) + return tmpl + + def updatereqenv(self, env): + def splitnetloc(netloc): + if ':' in netloc: + return netloc.split(':', 1) + else: + return (netloc, None) + + if self._baseurl is not None: + urlcomp = urlparse.urlparse(self._baseurl) + host, port = splitnetloc(urlcomp[1]) + path = urlcomp[2] + env['SERVER_NAME'] = host + if port: + env['SERVER_PORT'] = port + env['SCRIPT_NAME'] = path diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/hgweb/hgwebdir_mod.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/hgweb/hgwebdir_mod.pyo Binary files differnew file mode 100644 index 0000000..fa80ba1 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/hgweb/hgwebdir_mod.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/hgweb/protocol.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/hgweb/protocol.py new file mode 100644 index 0000000..c87805f --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/hgweb/protocol.py @@ -0,0 +1,75 @@ +# +# Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net> +# Copyright 2005-2007 Matt Mackall <mpm@selenic.com> +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +import cStringIO, zlib, sys, urllib +from mercurial import util, wireproto +from common import HTTP_OK + +HGTYPE = 'application/mercurial-0.1' + +class webproto(object): + def __init__(self, req): + self.req = req + self.response = '' + def getargs(self, args): + data = {} + keys = args.split() + for k in keys: + if k == '*': + star = {} + for key in self.req.form.keys(): + if key not in keys: + star[key] = self.req.form[key][0] + data['*'] = star + else: + data[k] = self.req.form[k][0] + return [data[k] for k in keys] + def getfile(self, fp): + length = int(self.req.env['CONTENT_LENGTH']) + for s in util.filechunkiter(self.req, limit=length): + fp.write(s) + def redirect(self): + self.oldio = sys.stdout, sys.stderr + sys.stderr = sys.stdout = cStringIO.StringIO() + def groupchunks(self, cg): + z = zlib.compressobj() + while 1: + chunk = cg.read(4096) + if not chunk: + break + yield z.compress(chunk) + yield z.flush() + def _client(self): + return 'remote:%s:%s:%s' % ( + self.req.env.get('wsgi.url_scheme') or 'http', + urllib.quote(self.req.env.get('REMOTE_HOST', '')), + urllib.quote(self.req.env.get('REMOTE_USER', ''))) + +def iscmd(cmd): + return cmd in wireproto.commands + +def call(repo, req, cmd): + p = webproto(req) + rsp = wireproto.dispatch(repo, p, cmd) + if isinstance(rsp, str): + req.respond(HTTP_OK, HGTYPE, length=len(rsp)) + return [rsp] + elif isinstance(rsp, wireproto.streamres): + req.respond(HTTP_OK, HGTYPE) + return rsp.gen + elif isinstance(rsp, wireproto.pushres): + val = sys.stdout.getvalue() + sys.stdout, sys.stderr = p.oldio + req.respond(HTTP_OK, HGTYPE) + return ['%d\n%s' % (rsp.res, val)] + elif isinstance(rsp, wireproto.pusherr): + # drain the incoming bundle + req.drain() + sys.stdout, sys.stderr = p.oldio + rsp = '0\n%s\n' % rsp.res + req.respond(HTTP_OK, HGTYPE, length=len(rsp)) + return [rsp] diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/hgweb/protocol.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/hgweb/protocol.pyo Binary files differnew file mode 100644 index 0000000..a747da1 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/hgweb/protocol.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/hgweb/request.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/hgweb/request.py new file mode 100644 index 0000000..cb68664 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/hgweb/request.py @@ -0,0 +1,147 @@ +# hgweb/request.py - An http request from either CGI or the standalone server. +# +# Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net> +# Copyright 2005, 2006 Matt Mackall <mpm@selenic.com> +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +import socket, cgi, errno +from mercurial import util +from common import ErrorResponse, statusmessage, HTTP_NOT_MODIFIED + +shortcuts = { + 'cl': [('cmd', ['changelog']), ('rev', None)], + 'sl': [('cmd', ['shortlog']), ('rev', None)], + 'cs': [('cmd', ['changeset']), ('node', None)], + 'f': [('cmd', ['file']), ('filenode', None)], + 'fl': [('cmd', ['filelog']), ('filenode', None)], + 'fd': [('cmd', ['filediff']), ('node', None)], + 'fa': [('cmd', ['annotate']), ('filenode', None)], + 'mf': [('cmd', ['manifest']), ('manifest', None)], + 'ca': [('cmd', ['archive']), ('node', None)], + 'tags': [('cmd', ['tags'])], + 'tip': [('cmd', ['changeset']), ('node', ['tip'])], + 'static': [('cmd', ['static']), ('file', None)] +} + +def normalize(form): + # first expand the shortcuts + for k in shortcuts.iterkeys(): + if k in form: + for name, value in shortcuts[k]: + if value is None: + value = form[k] + form[name] = value + del form[k] + # And strip the values + for k, v in form.iteritems(): + form[k] = [i.strip() for i in v] + return form + +class wsgirequest(object): + def __init__(self, wsgienv, start_response): + version = wsgienv['wsgi.version'] + if (version < (1, 0)) or (version >= (2, 0)): + raise RuntimeError("Unknown and unsupported WSGI version %d.%d" + % version) + self.inp = wsgienv['wsgi.input'] + self.err = wsgienv['wsgi.errors'] + self.threaded = wsgienv['wsgi.multithread'] + self.multiprocess = wsgienv['wsgi.multiprocess'] + self.run_once = wsgienv['wsgi.run_once'] + self.env = wsgienv + self.form = normalize(cgi.parse(self.inp, + self.env, + keep_blank_values=1)) + self._start_response = start_response + self.server_write = None + self.headers = [] + + def __iter__(self): + return iter([]) + + def read(self, count=-1): + return self.inp.read(count) + + def drain(self): + '''need to read all data from request, httplib is half-duplex''' + length = int(self.env.get('CONTENT_LENGTH', 0)) + for s in util.filechunkiter(self.inp, limit=length): + pass + + def respond(self, status, type=None, filename=None, length=0): + if self._start_response is not None: + + self.httphdr(type, filename, length) + if not self.headers: + raise RuntimeError("request.write called before headers sent") + + for k, v in self.headers: + if not isinstance(v, str): + raise TypeError('header value must be string: %r' % v) + + if isinstance(status, ErrorResponse): + self.header(status.headers) + if status.code == HTTP_NOT_MODIFIED: + # RFC 2616 Section 10.3.5: 304 Not Modified has cases where + # it MUST NOT include any headers other than these and no + # body + self.headers = [(k, v) for (k, v) in self.headers if + k in ('Date', 'ETag', 'Expires', + 'Cache-Control', 'Vary')] + status = statusmessage(status.code, status.message) + elif status == 200: + status = '200 Script output follows' + elif isinstance(status, int): + status = statusmessage(status) + + self.server_write = self._start_response(status, self.headers) + self._start_response = None + self.headers = [] + + def write(self, thing): + if hasattr(thing, "__iter__"): + for part in thing: + self.write(part) + else: + thing = str(thing) + try: + self.server_write(thing) + except socket.error, inst: + if inst[0] != errno.ECONNRESET: + raise + + def writelines(self, lines): + for line in lines: + self.write(line) + + def flush(self): + return None + + def close(self): + return None + + def header(self, headers=[('Content-Type','text/html')]): + self.headers.extend(headers) + + def httphdr(self, type=None, filename=None, length=0, headers={}): + headers = headers.items() + if type is not None: + headers.append(('Content-Type', type)) + if filename: + filename = (filename.split('/')[-1] + .replace('\\', '\\\\').replace('"', '\\"')) + headers.append(('Content-Disposition', + 'inline; filename="%s"' % filename)) + if length: + headers.append(('Content-Length', str(length))) + self.header(headers) + +def wsgiapplication(app_maker): + '''For compatibility with old CGI scripts. A plain hgweb() or hgwebdir() + can and should now be used as a WSGI application.''' + application = app_maker() + def run_wsgi(env, respond): + return application(env, respond) + return run_wsgi diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/hgweb/request.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/hgweb/request.pyo Binary files differnew file mode 100644 index 0000000..2d9b8b2 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/hgweb/request.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/hgweb/server.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/hgweb/server.py new file mode 100644 index 0000000..fa31afa --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/hgweb/server.py @@ -0,0 +1,309 @@ +# hgweb/server.py - The standalone hg web server. +# +# Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net> +# Copyright 2005-2007 Matt Mackall <mpm@selenic.com> +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +import os, sys, errno, urllib, BaseHTTPServer, socket, SocketServer, traceback +from mercurial import util, error +from mercurial.i18n import _ + +def _splitURI(uri): + """ Return path and query splited from uri + + Just like CGI environment, the path is unquoted, the query is + not. + """ + if '?' in uri: + path, query = uri.split('?', 1) + else: + path, query = uri, '' + return urllib.unquote(path), query + +class _error_logger(object): + def __init__(self, handler): + self.handler = handler + def flush(self): + pass + def write(self, str): + self.writelines(str.split('\n')) + def writelines(self, seq): + for msg in seq: + self.handler.log_error("HG error: %s", msg) + +class _httprequesthandler(BaseHTTPServer.BaseHTTPRequestHandler): + + url_scheme = 'http' + + @staticmethod + def preparehttpserver(httpserver, ssl_cert): + """Prepare .socket of new HTTPServer instance""" + pass + + def __init__(self, *args, **kargs): + self.protocol_version = 'HTTP/1.1' + BaseHTTPServer.BaseHTTPRequestHandler.__init__(self, *args, **kargs) + + def _log_any(self, fp, format, *args): + fp.write("%s - - [%s] %s\n" % (self.client_address[0], + self.log_date_time_string(), + format % args)) + fp.flush() + + def log_error(self, format, *args): + self._log_any(self.server.errorlog, format, *args) + + def log_message(self, format, *args): + self._log_any(self.server.accesslog, format, *args) + + def do_write(self): + try: + self.do_hgweb() + except socket.error, inst: + if inst[0] != errno.EPIPE: + raise + + def do_POST(self): + try: + self.do_write() + except StandardError: + self._start_response("500 Internal Server Error", []) + self._write("Internal Server Error") + tb = "".join(traceback.format_exception(*sys.exc_info())) + self.log_error("Exception happened during processing " + "request '%s':\n%s", self.path, tb) + + def do_GET(self): + self.do_POST() + + def do_hgweb(self): + path, query = _splitURI(self.path) + + env = {} + env['GATEWAY_INTERFACE'] = 'CGI/1.1' + env['REQUEST_METHOD'] = self.command + env['SERVER_NAME'] = self.server.server_name + env['SERVER_PORT'] = str(self.server.server_port) + env['REQUEST_URI'] = self.path + env['SCRIPT_NAME'] = self.server.prefix + env['PATH_INFO'] = path[len(self.server.prefix):] + env['REMOTE_HOST'] = self.client_address[0] + env['REMOTE_ADDR'] = self.client_address[0] + if query: + env['QUERY_STRING'] = query + + if self.headers.typeheader is None: + env['CONTENT_TYPE'] = self.headers.type + else: + env['CONTENT_TYPE'] = self.headers.typeheader + length = self.headers.getheader('content-length') + if length: + env['CONTENT_LENGTH'] = length + for header in [h for h in self.headers.keys() + if h not in ('content-type', 'content-length')]: + hkey = 'HTTP_' + header.replace('-', '_').upper() + hval = self.headers.getheader(header) + hval = hval.replace('\n', '').strip() + if hval: + env[hkey] = hval + env['SERVER_PROTOCOL'] = self.request_version + env['wsgi.version'] = (1, 0) + env['wsgi.url_scheme'] = self.url_scheme + env['wsgi.input'] = self.rfile + env['wsgi.errors'] = _error_logger(self) + env['wsgi.multithread'] = isinstance(self.server, + SocketServer.ThreadingMixIn) + env['wsgi.multiprocess'] = isinstance(self.server, + SocketServer.ForkingMixIn) + env['wsgi.run_once'] = 0 + + self.close_connection = True + self.saved_status = None + self.saved_headers = [] + self.sent_headers = False + self.length = None + for chunk in self.server.application(env, self._start_response): + self._write(chunk) + + def send_headers(self): + if not self.saved_status: + raise AssertionError("Sending headers before " + "start_response() called") + saved_status = self.saved_status.split(None, 1) + saved_status[0] = int(saved_status[0]) + self.send_response(*saved_status) + should_close = True + for h in self.saved_headers: + self.send_header(*h) + if h[0].lower() == 'content-length': + should_close = False + self.length = int(h[1]) + # The value of the Connection header is a list of case-insensitive + # tokens separated by commas and optional whitespace. + if 'close' in [token.strip().lower() for token in + self.headers.get('connection', '').split(',')]: + should_close = True + if should_close: + self.send_header('Connection', 'close') + self.close_connection = should_close + self.end_headers() + self.sent_headers = True + + def _start_response(self, http_status, headers, exc_info=None): + code, msg = http_status.split(None, 1) + code = int(code) + self.saved_status = http_status + bad_headers = ('connection', 'transfer-encoding') + self.saved_headers = [h for h in headers + if h[0].lower() not in bad_headers] + return self._write + + def _write(self, data): + if not self.saved_status: + raise AssertionError("data written before start_response() called") + elif not self.sent_headers: + self.send_headers() + if self.length is not None: + if len(data) > self.length: + raise AssertionError("Content-length header sent, but more " + "bytes than specified are being written.") + self.length = self.length - len(data) + self.wfile.write(data) + self.wfile.flush() + +class _httprequesthandleropenssl(_httprequesthandler): + """HTTPS handler based on pyOpenSSL""" + + url_scheme = 'https' + + @staticmethod + def preparehttpserver(httpserver, ssl_cert): + try: + import OpenSSL + OpenSSL.SSL.Context + except ImportError: + raise util.Abort(_("SSL support is unavailable")) + ctx = OpenSSL.SSL.Context(OpenSSL.SSL.SSLv23_METHOD) + ctx.use_privatekey_file(ssl_cert) + ctx.use_certificate_file(ssl_cert) + sock = socket.socket(httpserver.address_family, httpserver.socket_type) + httpserver.socket = OpenSSL.SSL.Connection(ctx, sock) + httpserver.server_bind() + httpserver.server_activate() + + def setup(self): + self.connection = self.request + self.rfile = socket._fileobject(self.request, "rb", self.rbufsize) + self.wfile = socket._fileobject(self.request, "wb", self.wbufsize) + + def do_write(self): + import OpenSSL + try: + _httprequesthandler.do_write(self) + except OpenSSL.SSL.SysCallError, inst: + if inst.args[0] != errno.EPIPE: + raise + + def handle_one_request(self): + import OpenSSL + try: + _httprequesthandler.handle_one_request(self) + except (OpenSSL.SSL.SysCallError, OpenSSL.SSL.ZeroReturnError): + self.close_connection = True + pass + +class _httprequesthandlerssl(_httprequesthandler): + """HTTPS handler based on Pythons ssl module (introduced in 2.6)""" + + url_scheme = 'https' + + @staticmethod + def preparehttpserver(httpserver, ssl_cert): + try: + import ssl + ssl.wrap_socket + except ImportError: + raise util.Abort(_("SSL support is unavailable")) + httpserver.socket = ssl.wrap_socket(httpserver.socket, server_side=True, + certfile=ssl_cert, ssl_version=ssl.PROTOCOL_SSLv23) + + def setup(self): + self.connection = self.request + self.rfile = socket._fileobject(self.request, "rb", self.rbufsize) + self.wfile = socket._fileobject(self.request, "wb", self.wbufsize) + +try: + from threading import activeCount + _mixin = SocketServer.ThreadingMixIn +except ImportError: + if hasattr(os, "fork"): + _mixin = SocketServer.ForkingMixIn + else: + class _mixin: + pass + +def openlog(opt, default): + if opt and opt != '-': + return open(opt, 'a') + return default + +class MercurialHTTPServer(object, _mixin, BaseHTTPServer.HTTPServer): + + # SO_REUSEADDR has broken semantics on windows + if os.name == 'nt': + allow_reuse_address = 0 + + def __init__(self, ui, app, addr, handler, **kwargs): + BaseHTTPServer.HTTPServer.__init__(self, addr, handler, **kwargs) + self.daemon_threads = True + self.application = app + + handler.preparehttpserver(self, ui.config('web', 'certificate')) + + prefix = ui.config('web', 'prefix', '') + if prefix: + prefix = '/' + prefix.strip('/') + self.prefix = prefix + + alog = openlog(ui.config('web', 'accesslog', '-'), sys.stdout) + elog = openlog(ui.config('web', 'errorlog', '-'), sys.stderr) + self.accesslog = alog + self.errorlog = elog + + self.addr, self.port = self.socket.getsockname()[0:2] + self.fqaddr = socket.getfqdn(addr[0]) + +class IPv6HTTPServer(MercurialHTTPServer): + address_family = getattr(socket, 'AF_INET6', None) + def __init__(self, *args, **kwargs): + if self.address_family is None: + raise error.RepoError(_('IPv6 is not available on this system')) + super(IPv6HTTPServer, self).__init__(*args, **kwargs) + +def create_server(ui, app): + + if ui.config('web', 'certificate'): + if sys.version_info >= (2, 6): + handler = _httprequesthandlerssl + else: + handler = _httprequesthandleropenssl + else: + handler = _httprequesthandler + + if ui.configbool('web', 'ipv6'): + cls = IPv6HTTPServer + else: + cls = MercurialHTTPServer + + # ugly hack due to python issue5853 (for threaded use) + import mimetypes; mimetypes.init() + + address = ui.config('web', 'address', '') + port = util.getport(ui.config('web', 'port', 8000)) + try: + return cls(ui, app, (address, port), handler) + except socket.error, inst: + raise util.Abort(_("cannot start server at '%s:%d': %s") + % (address, port, inst.args[1])) diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/hgweb/server.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/hgweb/server.pyo Binary files differnew file mode 100644 index 0000000..c90e5de --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/hgweb/server.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/hgweb/webcommands.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/hgweb/webcommands.py new file mode 100644 index 0000000..e290da6 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/hgweb/webcommands.py @@ -0,0 +1,783 @@ +# +# Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net> +# Copyright 2005-2007 Matt Mackall <mpm@selenic.com> +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +import os, mimetypes, re, cgi, copy +import webutil +from mercurial import error, encoding, archival, templater, templatefilters +from mercurial.node import short, hex +from mercurial.util import binary +from common import paritygen, staticfile, get_contact, ErrorResponse +from common import HTTP_OK, HTTP_FORBIDDEN, HTTP_NOT_FOUND +from mercurial import graphmod +from mercurial import help as helpmod +from mercurial.i18n import _ + +# __all__ is populated with the allowed commands. Be sure to add to it if +# you're adding a new command, or the new command won't work. + +__all__ = [ + 'log', 'rawfile', 'file', 'changelog', 'shortlog', 'changeset', 'rev', + 'manifest', 'tags', 'branches', 'summary', 'filediff', 'diff', 'annotate', + 'filelog', 'archive', 'static', 'graph', 'help', +] + +def log(web, req, tmpl): + if 'file' in req.form and req.form['file'][0]: + return filelog(web, req, tmpl) + else: + return changelog(web, req, tmpl) + +def rawfile(web, req, tmpl): + path = webutil.cleanpath(web.repo, req.form.get('file', [''])[0]) + if not path: + content = manifest(web, req, tmpl) + req.respond(HTTP_OK, web.ctype) + return content + + try: + fctx = webutil.filectx(web.repo, req) + except error.LookupError, inst: + try: + content = manifest(web, req, tmpl) + req.respond(HTTP_OK, web.ctype) + return content + except ErrorResponse: + raise inst + + path = fctx.path() + text = fctx.data() + mt = mimetypes.guess_type(path)[0] + if mt is None: + mt = binary(text) and 'application/octet-stream' or 'text/plain' + if mt.startswith('text/'): + mt += '; charset="%s"' % encoding.encoding + + req.respond(HTTP_OK, mt, path, len(text)) + return [text] + +def _filerevision(web, tmpl, fctx): + f = fctx.path() + text = fctx.data() + parity = paritygen(web.stripecount) + + if binary(text): + mt = mimetypes.guess_type(f)[0] or 'application/octet-stream' + text = '(binary:%s)' % mt + + def lines(): + for lineno, t in enumerate(text.splitlines(True)): + yield {"line": t, + "lineid": "l%d" % (lineno + 1), + "linenumber": "% 6d" % (lineno + 1), + "parity": parity.next()} + + return tmpl("filerevision", + file=f, + path=webutil.up(f), + text=lines(), + rev=fctx.rev(), + node=hex(fctx.node()), + author=fctx.user(), + date=fctx.date(), + desc=fctx.description(), + branch=webutil.nodebranchnodefault(fctx), + parent=webutil.parents(fctx), + child=webutil.children(fctx), + rename=webutil.renamelink(fctx), + permissions=fctx.manifest().flags(f)) + +def file(web, req, tmpl): + path = webutil.cleanpath(web.repo, req.form.get('file', [''])[0]) + if not path: + return manifest(web, req, tmpl) + try: + return _filerevision(web, tmpl, webutil.filectx(web.repo, req)) + except error.LookupError, inst: + try: + return manifest(web, req, tmpl) + except ErrorResponse: + raise inst + +def _search(web, req, tmpl): + + query = req.form['rev'][0] + revcount = web.maxchanges + if 'revcount' in req.form: + revcount = int(req.form.get('revcount', [revcount])[0]) + tmpl.defaults['sessionvars']['revcount'] = revcount + + lessvars = copy.copy(tmpl.defaults['sessionvars']) + lessvars['revcount'] = revcount / 2 + lessvars['rev'] = query + morevars = copy.copy(tmpl.defaults['sessionvars']) + morevars['revcount'] = revcount * 2 + morevars['rev'] = query + + def changelist(**map): + count = 0 + qw = query.lower().split() + + def revgen(): + for i in xrange(len(web.repo) - 1, 0, -100): + l = [] + for j in xrange(max(0, i - 100), i + 1): + ctx = web.repo[j] + l.append(ctx) + l.reverse() + for e in l: + yield e + + for ctx in revgen(): + miss = 0 + for q in qw: + if not (q in ctx.user().lower() or + q in ctx.description().lower() or + q in " ".join(ctx.files()).lower()): + miss = 1 + break + if miss: + continue + + count += 1 + n = ctx.node() + showtags = webutil.showtag(web.repo, tmpl, 'changelogtag', n) + files = webutil.listfilediffs(tmpl, ctx.files(), n, web.maxfiles) + + yield tmpl('searchentry', + parity=parity.next(), + author=ctx.user(), + parent=webutil.parents(ctx), + child=webutil.children(ctx), + changelogtag=showtags, + desc=ctx.description(), + date=ctx.date(), + files=files, + rev=ctx.rev(), + node=hex(n), + tags=webutil.nodetagsdict(web.repo, n), + inbranch=webutil.nodeinbranch(web.repo, ctx), + branches=webutil.nodebranchdict(web.repo, ctx)) + + if count >= revcount: + break + + tip = web.repo['tip'] + parity = paritygen(web.stripecount) + + return tmpl('search', query=query, node=tip.hex(), + entries=changelist, archives=web.archivelist("tip"), + morevars=morevars, lessvars=lessvars) + +def changelog(web, req, tmpl, shortlog=False): + + if 'node' in req.form: + ctx = webutil.changectx(web.repo, req) + else: + if 'rev' in req.form: + hi = req.form['rev'][0] + else: + hi = len(web.repo) - 1 + try: + ctx = web.repo[hi] + except error.RepoError: + return _search(web, req, tmpl) # XXX redirect to 404 page? + + def changelist(limit=0, **map): + l = [] # build a list in forward order for efficiency + for i in xrange(start, end): + ctx = web.repo[i] + n = ctx.node() + showtags = webutil.showtag(web.repo, tmpl, 'changelogtag', n) + files = webutil.listfilediffs(tmpl, ctx.files(), n, web.maxfiles) + + l.insert(0, {"parity": parity.next(), + "author": ctx.user(), + "parent": webutil.parents(ctx, i - 1), + "child": webutil.children(ctx, i + 1), + "changelogtag": showtags, + "desc": ctx.description(), + "date": ctx.date(), + "files": files, + "rev": i, + "node": hex(n), + "tags": webutil.nodetagsdict(web.repo, n), + "inbranch": webutil.nodeinbranch(web.repo, ctx), + "branches": webutil.nodebranchdict(web.repo, ctx) + }) + + if limit > 0: + l = l[:limit] + + for e in l: + yield e + + revcount = shortlog and web.maxshortchanges or web.maxchanges + if 'revcount' in req.form: + revcount = int(req.form.get('revcount', [revcount])[0]) + tmpl.defaults['sessionvars']['revcount'] = revcount + + lessvars = copy.copy(tmpl.defaults['sessionvars']) + lessvars['revcount'] = revcount / 2 + morevars = copy.copy(tmpl.defaults['sessionvars']) + morevars['revcount'] = revcount * 2 + + count = len(web.repo) + pos = ctx.rev() + start = max(0, pos - revcount + 1) + end = min(count, start + revcount) + pos = end - 1 + parity = paritygen(web.stripecount, offset=start - end) + + changenav = webutil.revnavgen(pos, revcount, count, web.repo.changectx) + + return tmpl(shortlog and 'shortlog' or 'changelog', changenav=changenav, + node=hex(ctx.node()), rev=pos, changesets=count, + entries=lambda **x: changelist(limit=0,**x), + latestentry=lambda **x: changelist(limit=1,**x), + archives=web.archivelist("tip"), revcount=revcount, + morevars=morevars, lessvars=lessvars) + +def shortlog(web, req, tmpl): + return changelog(web, req, tmpl, shortlog = True) + +def changeset(web, req, tmpl): + ctx = webutil.changectx(web.repo, req) + showtags = webutil.showtag(web.repo, tmpl, 'changesettag', ctx.node()) + showbranch = webutil.nodebranchnodefault(ctx) + + files = [] + parity = paritygen(web.stripecount) + for f in ctx.files(): + template = f in ctx and 'filenodelink' or 'filenolink' + files.append(tmpl(template, + node=ctx.hex(), file=f, + parity=parity.next())) + + parity = paritygen(web.stripecount) + style = web.config('web', 'style', 'paper') + if 'style' in req.form: + style = req.form['style'][0] + + diffs = webutil.diffs(web.repo, tmpl, ctx, None, parity, style) + return tmpl('changeset', + diff=diffs, + rev=ctx.rev(), + node=ctx.hex(), + parent=webutil.parents(ctx), + child=webutil.children(ctx), + changesettag=showtags, + changesetbranch=showbranch, + author=ctx.user(), + desc=ctx.description(), + date=ctx.date(), + files=files, + archives=web.archivelist(ctx.hex()), + tags=webutil.nodetagsdict(web.repo, ctx.node()), + branch=webutil.nodebranchnodefault(ctx), + inbranch=webutil.nodeinbranch(web.repo, ctx), + branches=webutil.nodebranchdict(web.repo, ctx)) + +rev = changeset + +def manifest(web, req, tmpl): + ctx = webutil.changectx(web.repo, req) + path = webutil.cleanpath(web.repo, req.form.get('file', [''])[0]) + mf = ctx.manifest() + node = ctx.node() + + files = {} + dirs = {} + parity = paritygen(web.stripecount) + + if path and path[-1] != "/": + path += "/" + l = len(path) + abspath = "/" + path + + for f, n in mf.iteritems(): + if f[:l] != path: + continue + remain = f[l:] + elements = remain.split('/') + if len(elements) == 1: + files[remain] = f + else: + h = dirs # need to retain ref to dirs (root) + for elem in elements[0:-1]: + if elem not in h: + h[elem] = {} + h = h[elem] + if len(h) > 1: + break + h[None] = None # denotes files present + + if mf and not files and not dirs: + raise ErrorResponse(HTTP_NOT_FOUND, 'path not found: ' + path) + + def filelist(**map): + for f in sorted(files): + full = files[f] + + fctx = ctx.filectx(full) + yield {"file": full, + "parity": parity.next(), + "basename": f, + "date": fctx.date(), + "size": fctx.size(), + "permissions": mf.flags(full)} + + def dirlist(**map): + for d in sorted(dirs): + + emptydirs = [] + h = dirs[d] + while isinstance(h, dict) and len(h) == 1: + k, v = h.items()[0] + if v: + emptydirs.append(k) + h = v + + path = "%s%s" % (abspath, d) + yield {"parity": parity.next(), + "path": path, + "emptydirs": "/".join(emptydirs), + "basename": d} + + return tmpl("manifest", + rev=ctx.rev(), + node=hex(node), + path=abspath, + up=webutil.up(abspath), + upparity=parity.next(), + fentries=filelist, + dentries=dirlist, + archives=web.archivelist(hex(node)), + tags=webutil.nodetagsdict(web.repo, node), + inbranch=webutil.nodeinbranch(web.repo, ctx), + branches=webutil.nodebranchdict(web.repo, ctx)) + +def tags(web, req, tmpl): + i = web.repo.tagslist() + i.reverse() + parity = paritygen(web.stripecount) + + def entries(notip=False, limit=0, **map): + count = 0 + for k, n in i: + if notip and k == "tip": + continue + if limit > 0 and count >= limit: + continue + count = count + 1 + yield {"parity": parity.next(), + "tag": k, + "date": web.repo[n].date(), + "node": hex(n)} + + return tmpl("tags", + node=hex(web.repo.changelog.tip()), + entries=lambda **x: entries(False, 0, **x), + entriesnotip=lambda **x: entries(True, 0, **x), + latestentry=lambda **x: entries(True, 1, **x)) + +def branches(web, req, tmpl): + tips = (web.repo[n] for t, n in web.repo.branchtags().iteritems()) + heads = web.repo.heads() + parity = paritygen(web.stripecount) + sortkey = lambda ctx: ('close' not in ctx.extra(), ctx.rev()) + + def entries(limit, **map): + count = 0 + for ctx in sorted(tips, key=sortkey, reverse=True): + if limit > 0 and count >= limit: + return + count += 1 + if ctx.node() not in heads: + status = 'inactive' + elif not web.repo.branchheads(ctx.branch()): + status = 'closed' + else: + status = 'open' + yield {'parity': parity.next(), + 'branch': ctx.branch(), + 'status': status, + 'node': ctx.hex(), + 'date': ctx.date()} + + return tmpl('branches', node=hex(web.repo.changelog.tip()), + entries=lambda **x: entries(0, **x), + latestentry=lambda **x: entries(1, **x)) + +def summary(web, req, tmpl): + i = web.repo.tagslist() + i.reverse() + + def tagentries(**map): + parity = paritygen(web.stripecount) + count = 0 + for k, n in i: + if k == "tip": # skip tip + continue + + count += 1 + if count > 10: # limit to 10 tags + break + + yield tmpl("tagentry", + parity=parity.next(), + tag=k, + node=hex(n), + date=web.repo[n].date()) + + def branches(**map): + parity = paritygen(web.stripecount) + + b = web.repo.branchtags() + l = [(-web.repo.changelog.rev(n), n, t) for t, n in b.iteritems()] + for r, n, t in sorted(l): + yield {'parity': parity.next(), + 'branch': t, + 'node': hex(n), + 'date': web.repo[n].date()} + + def changelist(**map): + parity = paritygen(web.stripecount, offset=start - end) + l = [] # build a list in forward order for efficiency + for i in xrange(start, end): + ctx = web.repo[i] + n = ctx.node() + hn = hex(n) + + l.insert(0, tmpl( + 'shortlogentry', + parity=parity.next(), + author=ctx.user(), + desc=ctx.description(), + date=ctx.date(), + rev=i, + node=hn, + tags=webutil.nodetagsdict(web.repo, n), + inbranch=webutil.nodeinbranch(web.repo, ctx), + branches=webutil.nodebranchdict(web.repo, ctx))) + + yield l + + tip = web.repo['tip'] + count = len(web.repo) + start = max(0, count - web.maxchanges) + end = min(count, start + web.maxchanges) + + return tmpl("summary", + desc=web.config("web", "description", "unknown"), + owner=get_contact(web.config) or "unknown", + lastchange=tip.date(), + tags=tagentries, + branches=branches, + shortlog=changelist, + node=tip.hex(), + archives=web.archivelist("tip")) + +def filediff(web, req, tmpl): + fctx, ctx = None, None + try: + fctx = webutil.filectx(web.repo, req) + except LookupError: + ctx = webutil.changectx(web.repo, req) + path = webutil.cleanpath(web.repo, req.form['file'][0]) + if path not in ctx.files(): + raise + + if fctx is not None: + n = fctx.node() + path = fctx.path() + else: + n = ctx.node() + # path already defined in except clause + + parity = paritygen(web.stripecount) + style = web.config('web', 'style', 'paper') + if 'style' in req.form: + style = req.form['style'][0] + + diffs = webutil.diffs(web.repo, tmpl, fctx or ctx, [path], parity, style) + rename = fctx and webutil.renamelink(fctx) or [] + ctx = fctx and fctx or ctx + return tmpl("filediff", + file=path, + node=hex(n), + rev=ctx.rev(), + date=ctx.date(), + desc=ctx.description(), + author=ctx.user(), + rename=rename, + branch=webutil.nodebranchnodefault(ctx), + parent=webutil.parents(ctx), + child=webutil.children(ctx), + diff=diffs) + +diff = filediff + +def annotate(web, req, tmpl): + fctx = webutil.filectx(web.repo, req) + f = fctx.path() + parity = paritygen(web.stripecount) + + def annotate(**map): + last = None + if binary(fctx.data()): + mt = (mimetypes.guess_type(fctx.path())[0] + or 'application/octet-stream') + lines = enumerate([((fctx.filectx(fctx.filerev()), 1), + '(binary:%s)' % mt)]) + else: + lines = enumerate(fctx.annotate(follow=True, linenumber=True)) + for lineno, ((f, targetline), l) in lines: + fnode = f.filenode() + + if last != fnode: + last = fnode + + yield {"parity": parity.next(), + "node": hex(f.node()), + "rev": f.rev(), + "author": f.user(), + "desc": f.description(), + "file": f.path(), + "targetline": targetline, + "line": l, + "lineid": "l%d" % (lineno + 1), + "linenumber": "% 6d" % (lineno + 1)} + + return tmpl("fileannotate", + file=f, + annotate=annotate, + path=webutil.up(f), + rev=fctx.rev(), + node=hex(fctx.node()), + author=fctx.user(), + date=fctx.date(), + desc=fctx.description(), + rename=webutil.renamelink(fctx), + branch=webutil.nodebranchnodefault(fctx), + parent=webutil.parents(fctx), + child=webutil.children(fctx), + permissions=fctx.manifest().flags(f)) + +def filelog(web, req, tmpl): + + try: + fctx = webutil.filectx(web.repo, req) + f = fctx.path() + fl = fctx.filelog() + except error.LookupError: + f = webutil.cleanpath(web.repo, req.form['file'][0]) + fl = web.repo.file(f) + numrevs = len(fl) + if not numrevs: # file doesn't exist at all + raise + rev = webutil.changectx(web.repo, req).rev() + first = fl.linkrev(0) + if rev < first: # current rev is from before file existed + raise + frev = numrevs - 1 + while fl.linkrev(frev) > rev: + frev -= 1 + fctx = web.repo.filectx(f, fl.linkrev(frev)) + + revcount = web.maxshortchanges + if 'revcount' in req.form: + revcount = int(req.form.get('revcount', [revcount])[0]) + tmpl.defaults['sessionvars']['revcount'] = revcount + + lessvars = copy.copy(tmpl.defaults['sessionvars']) + lessvars['revcount'] = revcount / 2 + morevars = copy.copy(tmpl.defaults['sessionvars']) + morevars['revcount'] = revcount * 2 + + count = fctx.filerev() + 1 + start = max(0, fctx.filerev() - revcount + 1) # first rev on this page + end = min(count, start + revcount) # last rev on this page + parity = paritygen(web.stripecount, offset=start - end) + + def entries(limit=0, **map): + l = [] + + repo = web.repo + for i in xrange(start, end): + iterfctx = fctx.filectx(i) + + l.insert(0, {"parity": parity.next(), + "filerev": i, + "file": f, + "node": hex(iterfctx.node()), + "author": iterfctx.user(), + "date": iterfctx.date(), + "rename": webutil.renamelink(iterfctx), + "parent": webutil.parents(iterfctx), + "child": webutil.children(iterfctx), + "desc": iterfctx.description(), + "tags": webutil.nodetagsdict(repo, iterfctx.node()), + "branch": webutil.nodebranchnodefault(iterfctx), + "inbranch": webutil.nodeinbranch(repo, iterfctx), + "branches": webutil.nodebranchdict(repo, iterfctx)}) + + if limit > 0: + l = l[:limit] + + for e in l: + yield e + + nodefunc = lambda x: fctx.filectx(fileid=x) + nav = webutil.revnavgen(end - 1, revcount, count, nodefunc) + return tmpl("filelog", file=f, node=hex(fctx.node()), nav=nav, + entries=lambda **x: entries(limit=0, **x), + latestentry=lambda **x: entries(limit=1, **x), + revcount=revcount, morevars=morevars, lessvars=lessvars) + +def archive(web, req, tmpl): + type_ = req.form.get('type', [None])[0] + allowed = web.configlist("web", "allow_archive") + key = req.form['node'][0] + + if type_ not in web.archives: + msg = 'Unsupported archive type: %s' % type_ + raise ErrorResponse(HTTP_NOT_FOUND, msg) + + if not ((type_ in allowed or + web.configbool("web", "allow" + type_, False))): + msg = 'Archive type not allowed: %s' % type_ + raise ErrorResponse(HTTP_FORBIDDEN, msg) + + reponame = re.sub(r"\W+", "-", os.path.basename(web.reponame)) + cnode = web.repo.lookup(key) + arch_version = key + if cnode == key or key == 'tip': + arch_version = short(cnode) + name = "%s-%s" % (reponame, arch_version) + mimetype, artype, extension, encoding = web.archive_specs[type_] + headers = [ + ('Content-Type', mimetype), + ('Content-Disposition', 'attachment; filename=%s%s' % (name, extension)) + ] + if encoding: + headers.append(('Content-Encoding', encoding)) + req.header(headers) + req.respond(HTTP_OK) + archival.archive(web.repo, req, cnode, artype, prefix=name) + return [] + + +def static(web, req, tmpl): + fname = req.form['file'][0] + # a repo owner may set web.static in .hg/hgrc to get any file + # readable by the user running the CGI script + static = web.config("web", "static", None, untrusted=False) + if not static: + tp = web.templatepath or templater.templatepath() + if isinstance(tp, str): + tp = [tp] + static = [os.path.join(p, 'static') for p in tp] + return [staticfile(static, fname, req)] + +def graph(web, req, tmpl): + + rev = webutil.changectx(web.repo, req).rev() + bg_height = 39 + revcount = web.maxshortchanges + if 'revcount' in req.form: + revcount = int(req.form.get('revcount', [revcount])[0]) + tmpl.defaults['sessionvars']['revcount'] = revcount + + lessvars = copy.copy(tmpl.defaults['sessionvars']) + lessvars['revcount'] = revcount / 2 + morevars = copy.copy(tmpl.defaults['sessionvars']) + morevars['revcount'] = revcount * 2 + + max_rev = len(web.repo) - 1 + revcount = min(max_rev, revcount) + revnode = web.repo.changelog.node(rev) + revnode_hex = hex(revnode) + uprev = min(max_rev, rev + revcount) + downrev = max(0, rev - revcount) + count = len(web.repo) + changenav = webutil.revnavgen(rev, revcount, count, web.repo.changectx) + + dag = graphmod.revisions(web.repo, rev, downrev) + tree = list(graphmod.colored(dag)) + canvasheight = (len(tree) + 1) * bg_height - 27 + data = [] + for (id, type, ctx, vtx, edges) in tree: + if type != graphmod.CHANGESET: + continue + node = short(ctx.node()) + age = templatefilters.age(ctx.date()) + desc = templatefilters.firstline(ctx.description()) + desc = cgi.escape(templatefilters.nonempty(desc)) + user = cgi.escape(templatefilters.person(ctx.user())) + branch = ctx.branch() + branch = branch, web.repo.branchtags().get(branch) == ctx.node() + data.append((node, vtx, edges, desc, user, age, branch, ctx.tags())) + + return tmpl('graph', rev=rev, revcount=revcount, uprev=uprev, + lessvars=lessvars, morevars=morevars, downrev=downrev, + canvasheight=canvasheight, jsdata=data, bg_height=bg_height, + node=revnode_hex, changenav=changenav) + +def _getdoc(e): + doc = e[0].__doc__ + if doc: + doc = doc.split('\n')[0] + else: + doc = _('(no help text available)') + return doc + +def help(web, req, tmpl): + from mercurial import commands # avoid cycle + + topicname = req.form.get('node', [None])[0] + if not topicname: + topic = [] + + def topics(**map): + for entries, summary, _ in helpmod.helptable: + entries = sorted(entries, key=len) + yield {'topic': entries[-1], 'summary': summary} + + early, other = [], [] + primary = lambda s: s.split('|')[0] + for c, e in commands.table.iteritems(): + doc = _getdoc(e) + if 'DEPRECATED' in doc or c.startswith('debug'): + continue + cmd = primary(c) + if cmd.startswith('^'): + early.append((cmd[1:], doc)) + else: + other.append((cmd, doc)) + + early.sort() + other.sort() + + def earlycommands(**map): + for c, doc in early: + yield {'topic': c, 'summary': doc} + + def othercommands(**map): + for c, doc in other: + yield {'topic': c, 'summary': doc} + + return tmpl('helptopics', topics=topics, earlycommands=earlycommands, + othercommands=othercommands, title='Index') + + u = webutil.wsgiui() + u.pushbuffer() + try: + commands.help_(u, topicname) + except error.UnknownCommand: + raise ErrorResponse(HTTP_NOT_FOUND) + doc = u.popbuffer() + return tmpl('help', topic=topicname, doc=doc) diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/hgweb/webcommands.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/hgweb/webcommands.pyo Binary files differnew file mode 100644 index 0000000..0ae06ef --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/hgweb/webcommands.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/hgweb/webutil.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/hgweb/webutil.py new file mode 100644 index 0000000..5dbff02 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/hgweb/webutil.py @@ -0,0 +1,226 @@ +# hgweb/webutil.py - utility library for the web interface. +# +# Copyright 21 May 2005 - (c) 2005 Jake Edge <jake@edge2.net> +# Copyright 2005-2007 Matt Mackall <mpm@selenic.com> +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +import os, copy +from mercurial import match, patch, util, error, ui +from mercurial.node import hex, nullid + +def up(p): + if p[0] != "/": + p = "/" + p + if p[-1] == "/": + p = p[:-1] + up = os.path.dirname(p) + if up == "/": + return "/" + return up + "/" + +def revnavgen(pos, pagelen, limit, nodefunc): + def seq(factor, limit=None): + if limit: + yield limit + if limit >= 20 and limit <= 40: + yield 50 + else: + yield 1 * factor + yield 3 * factor + for f in seq(factor * 10): + yield f + + navbefore = [] + navafter = [] + + last = 0 + for f in seq(1, pagelen): + if f < pagelen or f <= last: + continue + if f > limit: + break + last = f + if pos + f < limit: + navafter.append(("+%d" % f, hex(nodefunc(pos + f).node()))) + if pos - f >= 0: + navbefore.insert(0, ("-%d" % f, hex(nodefunc(pos - f).node()))) + + navafter.append(("tip", "tip")) + try: + navbefore.insert(0, ("(0)", hex(nodefunc('0').node()))) + except error.RepoError: + pass + + def gen(l): + def f(**map): + for label, node in l: + yield {"label": label, "node": node} + return f + + return (dict(before=gen(navbefore), after=gen(navafter)),) + +def _siblings(siblings=[], hiderev=None): + siblings = [s for s in siblings if s.node() != nullid] + if len(siblings) == 1 and siblings[0].rev() == hiderev: + return + for s in siblings: + d = {'node': hex(s.node()), 'rev': s.rev()} + d['user'] = s.user() + d['date'] = s.date() + d['description'] = s.description() + d['branch'] = s.branch() + if hasattr(s, 'path'): + d['file'] = s.path() + yield d + +def parents(ctx, hide=None): + return _siblings(ctx.parents(), hide) + +def children(ctx, hide=None): + return _siblings(ctx.children(), hide) + +def renamelink(fctx): + r = fctx.renamed() + if r: + return [dict(file=r[0], node=hex(r[1]))] + return [] + +def nodetagsdict(repo, node): + return [{"name": i} for i in repo.nodetags(node)] + +def nodebranchdict(repo, ctx): + branches = [] + branch = ctx.branch() + # If this is an empty repo, ctx.node() == nullid, + # ctx.branch() == 'default', but branchtags() is + # an empty dict. Using dict.get avoids a traceback. + if repo.branchtags().get(branch) == ctx.node(): + branches.append({"name": branch}) + return branches + +def nodeinbranch(repo, ctx): + branches = [] + branch = ctx.branch() + if branch != 'default' and repo.branchtags().get(branch) != ctx.node(): + branches.append({"name": branch}) + return branches + +def nodebranchnodefault(ctx): + branches = [] + branch = ctx.branch() + if branch != 'default': + branches.append({"name": branch}) + return branches + +def showtag(repo, tmpl, t1, node=nullid, **args): + for t in repo.nodetags(node): + yield tmpl(t1, tag=t, **args) + +def cleanpath(repo, path): + path = path.lstrip('/') + return util.canonpath(repo.root, '', path) + +def changectx(repo, req): + changeid = "tip" + if 'node' in req.form: + changeid = req.form['node'][0] + elif 'manifest' in req.form: + changeid = req.form['manifest'][0] + + try: + ctx = repo[changeid] + except error.RepoError: + man = repo.manifest + ctx = repo[man.linkrev(man.rev(man.lookup(changeid)))] + + return ctx + +def filectx(repo, req): + path = cleanpath(repo, req.form['file'][0]) + if 'node' in req.form: + changeid = req.form['node'][0] + else: + changeid = req.form['filenode'][0] + try: + fctx = repo[changeid][path] + except error.RepoError: + fctx = repo.filectx(path, fileid=changeid) + + return fctx + +def listfilediffs(tmpl, files, node, max): + for f in files[:max]: + yield tmpl('filedifflink', node=hex(node), file=f) + if len(files) > max: + yield tmpl('fileellipses') + +def diffs(repo, tmpl, ctx, files, parity, style): + + def countgen(): + start = 1 + while True: + yield start + start += 1 + + blockcount = countgen() + def prettyprintlines(diff): + blockno = blockcount.next() + for lineno, l in enumerate(diff.splitlines(True)): + lineno = "%d.%d" % (blockno, lineno + 1) + if l.startswith('+'): + ltype = "difflineplus" + elif l.startswith('-'): + ltype = "difflineminus" + elif l.startswith('@'): + ltype = "difflineat" + else: + ltype = "diffline" + yield tmpl(ltype, + line=l, + lineid="l%s" % lineno, + linenumber="% 8s" % lineno) + + if files: + m = match.exact(repo.root, repo.getcwd(), files) + else: + m = match.always(repo.root, repo.getcwd()) + + diffopts = patch.diffopts(repo.ui, untrusted=True) + parents = ctx.parents() + node1 = parents and parents[0].node() or nullid + node2 = ctx.node() + + block = [] + for chunk in patch.diff(repo, node1, node2, m, opts=diffopts): + if chunk.startswith('diff') and block: + yield tmpl('diffblock', parity=parity.next(), + lines=prettyprintlines(''.join(block))) + block = [] + if chunk.startswith('diff') and style != 'raw': + chunk = ''.join(chunk.splitlines(True)[1:]) + block.append(chunk) + yield tmpl('diffblock', parity=parity.next(), + lines=prettyprintlines(''.join(block))) + +class sessionvars(object): + def __init__(self, vars, start='?'): + self.start = start + self.vars = vars + def __getitem__(self, key): + return self.vars[key] + def __setitem__(self, key, value): + self.vars[key] = value + def __copy__(self): + return sessionvars(copy.copy(self.vars), self.start) + def __iter__(self): + separator = self.start + for key, value in self.vars.iteritems(): + yield {'name': key, 'value': str(value), 'separator': separator} + separator = '&' + +class wsgiui(ui.ui): + # default termwidth breaks under mod_wsgi + def termwidth(self): + return 80 diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/hgweb/webutil.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/hgweb/webutil.pyo Binary files differnew file mode 100644 index 0000000..5a9a36f --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/hgweb/webutil.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/hgweb/wsgicgi.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/hgweb/wsgicgi.py new file mode 100644 index 0000000..8dc5060 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/hgweb/wsgicgi.py @@ -0,0 +1,77 @@ +# hgweb/wsgicgi.py - CGI->WSGI translator +# +# Copyright 2006 Eric Hopper <hopper@omnifarious.org> +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. +# +# This was originally copied from the public domain code at +# http://www.python.org/dev/peps/pep-0333/#the-server-gateway-side + +import os, sys +from mercurial import util + +def launch(application): + util.set_binary(sys.stdin) + util.set_binary(sys.stdout) + + environ = dict(os.environ.iteritems()) + environ.setdefault('PATH_INFO', '') + if environ.get('SERVER_SOFTWARE', '').startswith('Microsoft-IIS'): + # IIS includes script_name in path_info + scriptname = environ['SCRIPT_NAME'] + if environ['PATH_INFO'].startswith(scriptname): + environ['PATH_INFO'] = environ['PATH_INFO'][len(scriptname):] + + environ['wsgi.input'] = sys.stdin + environ['wsgi.errors'] = sys.stderr + environ['wsgi.version'] = (1, 0) + environ['wsgi.multithread'] = False + environ['wsgi.multiprocess'] = True + environ['wsgi.run_once'] = True + + if environ.get('HTTPS', 'off').lower() in ('on', '1', 'yes'): + environ['wsgi.url_scheme'] = 'https' + else: + environ['wsgi.url_scheme'] = 'http' + + headers_set = [] + headers_sent = [] + out = sys.stdout + + def write(data): + if not headers_set: + raise AssertionError("write() before start_response()") + + elif not headers_sent: + # Before the first output, send the stored headers + status, response_headers = headers_sent[:] = headers_set + out.write('Status: %s\r\n' % status) + for header in response_headers: + out.write('%s: %s\r\n' % header) + out.write('\r\n') + + out.write(data) + out.flush() + + def start_response(status, response_headers, exc_info=None): + if exc_info: + try: + if headers_sent: + # Re-raise original exception if headers sent + raise exc_info[0](exc_info[1], exc_info[2]) + finally: + exc_info = None # avoid dangling circular ref + elif headers_set: + raise AssertionError("Headers already set!") + + headers_set[:] = [status, response_headers] + return write + + content = application(environ, start_response) + try: + for chunk in content: + write(chunk) + finally: + if hasattr(content, 'close'): + content.close() diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/hgweb/wsgicgi.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/hgweb/wsgicgi.pyo Binary files differnew file mode 100644 index 0000000..0b669d9 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/hgweb/wsgicgi.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/hook.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/hook.py new file mode 100644 index 0000000..6f0bcdb --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/hook.py @@ -0,0 +1,153 @@ +# hook.py - hook support for mercurial +# +# Copyright 2007 Matt Mackall <mpm@selenic.com> +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +from i18n import _ +import os, sys +import extensions, util + +def _pythonhook(ui, repo, name, hname, funcname, args, throw): + '''call python hook. hook is callable object, looked up as + name in python module. if callable returns "true", hook + fails, else passes. if hook raises exception, treated as + hook failure. exception propagates if throw is "true". + + reason for "true" meaning "hook failed" is so that + unmodified commands (e.g. mercurial.commands.update) can + be run as hooks without wrappers to convert return values.''' + + ui.note(_("calling hook %s: %s\n") % (hname, funcname)) + obj = funcname + if not hasattr(obj, '__call__'): + d = funcname.rfind('.') + if d == -1: + raise util.Abort(_('%s hook is invalid ("%s" not in ' + 'a module)') % (hname, funcname)) + modname = funcname[:d] + oldpaths = sys.path + if hasattr(sys, "frozen"): + # binary installs require sys.path manipulation + modpath, modfile = os.path.split(modname) + if modpath and modfile: + sys.path = sys.path[:] + [modpath] + modname = modfile + try: + obj = __import__(modname) + except ImportError: + e1 = sys.exc_type, sys.exc_value, sys.exc_traceback + try: + # extensions are loaded with hgext_ prefix + obj = __import__("hgext_%s" % modname) + except ImportError: + e2 = sys.exc_type, sys.exc_value, sys.exc_traceback + if ui.tracebackflag: + ui.warn(_('exception from first failed import attempt:\n')) + ui.traceback(e1) + if ui.tracebackflag: + ui.warn(_('exception from second failed import attempt:\n')) + ui.traceback(e2) + raise util.Abort(_('%s hook is invalid ' + '(import of "%s" failed)') % + (hname, modname)) + sys.path = oldpaths + try: + for p in funcname.split('.')[1:]: + obj = getattr(obj, p) + except AttributeError: + raise util.Abort(_('%s hook is invalid ' + '("%s" is not defined)') % + (hname, funcname)) + if not hasattr(obj, '__call__'): + raise util.Abort(_('%s hook is invalid ' + '("%s" is not callable)') % + (hname, funcname)) + try: + r = obj(ui=ui, repo=repo, hooktype=name, **args) + except KeyboardInterrupt: + raise + except Exception, exc: + if isinstance(exc, util.Abort): + ui.warn(_('error: %s hook failed: %s\n') % + (hname, exc.args[0])) + else: + ui.warn(_('error: %s hook raised an exception: ' + '%s\n') % (hname, exc)) + if throw: + raise + ui.traceback() + return True + if r: + if throw: + raise util.Abort(_('%s hook failed') % hname) + ui.warn(_('warning: %s hook failed\n') % hname) + return r + +def _exthook(ui, repo, name, cmd, args, throw): + ui.note(_("running hook %s: %s\n") % (name, cmd)) + + env = {} + for k, v in args.iteritems(): + if hasattr(v, '__call__'): + v = v() + env['HG_' + k.upper()] = v + + if repo: + cwd = repo.root + else: + cwd = os.getcwd() + if 'HG_URL' in env and env['HG_URL'].startswith('remote:http'): + r = util.system(cmd, environ=env, cwd=cwd, out=ui) + else: + r = util.system(cmd, environ=env, cwd=cwd) + if r: + desc, r = util.explain_exit(r) + if throw: + raise util.Abort(_('%s hook %s') % (name, desc)) + ui.warn(_('warning: %s hook %s\n') % (name, desc)) + return r + +_redirect = False +def redirect(state): + global _redirect + _redirect = state + +def hook(ui, repo, name, throw=False, **args): + r = False + + oldstdout = -1 + if _redirect: + stdoutno = sys.__stdout__.fileno() + stderrno = sys.__stderr__.fileno() + # temporarily redirect stdout to stderr, if possible + if stdoutno >= 0 and stderrno >= 0: + oldstdout = os.dup(stdoutno) + os.dup2(stderrno, stdoutno) + + try: + for hname, cmd in ui.configitems('hooks'): + if hname.split('.')[0] != name or not cmd: + continue + if hasattr(cmd, '__call__'): + r = _pythonhook(ui, repo, name, hname, cmd, args, throw) or r + elif cmd.startswith('python:'): + if cmd.count(':') >= 2: + path, cmd = cmd[7:].rsplit(':', 1) + path = util.expandpath(path) + if repo: + path = os.path.join(repo.root, path) + mod = extensions.loadpath(path, 'hghook.%s' % hname) + hookfn = getattr(mod, cmd) + else: + hookfn = cmd[7:].strip() + r = _pythonhook(ui, repo, name, hname, hookfn, args, throw) or r + else: + r = _exthook(ui, repo, hname, cmd, args, throw) or r + finally: + if _redirect and oldstdout >= 0: + os.dup2(oldstdout, stdoutno) + os.close(oldstdout) + + return r diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/hook.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/hook.pyo Binary files differnew file mode 100644 index 0000000..9c49771 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/hook.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/httprepo.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/httprepo.py new file mode 100644 index 0000000..c19c661 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/httprepo.py @@ -0,0 +1,203 @@ +# httprepo.py - HTTP repository proxy classes for mercurial +# +# Copyright 2005, 2006 Matt Mackall <mpm@selenic.com> +# Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com> +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +from node import nullid +from i18n import _ +import changegroup, statichttprepo, error, url, util, wireproto +import os, urllib, urllib2, urlparse, zlib, httplib +import errno, socket + +def zgenerator(f): + zd = zlib.decompressobj() + try: + for chunk in util.filechunkiter(f): + while chunk: + yield zd.decompress(chunk, 2**18) + chunk = zd.unconsumed_tail + except httplib.HTTPException: + raise IOError(None, _('connection ended unexpectedly')) + yield zd.flush() + +class httprepository(wireproto.wirerepository): + def __init__(self, ui, path): + self.path = path + self.caps = None + self.handler = None + scheme, netloc, urlpath, query, frag = urlparse.urlsplit(path) + if query or frag: + raise util.Abort(_('unsupported URL component: "%s"') % + (query or frag)) + + # urllib cannot handle URLs with embedded user or passwd + self._url, authinfo = url.getauthinfo(path) + + self.ui = ui + self.ui.debug('using %s\n' % self._url) + + self.urlopener = url.opener(ui, authinfo) + + def __del__(self): + for h in self.urlopener.handlers: + h.close() + if hasattr(h, "close_all"): + h.close_all() + + def url(self): + return self.path + + # look up capabilities only when needed + + def get_caps(self): + if self.caps is None: + try: + self.caps = set(self._call('capabilities').split()) + except error.RepoError: + self.caps = set() + self.ui.debug('capabilities: %s\n' % + (' '.join(self.caps or ['none']))) + return self.caps + + capabilities = property(get_caps) + + def lock(self): + raise util.Abort(_('operation not supported over http')) + + def _callstream(self, cmd, **args): + if cmd == 'pushkey': + args['data'] = '' + data = args.pop('data', None) + headers = args.pop('headers', {}) + self.ui.debug("sending %s command\n" % cmd) + q = {"cmd": cmd} + q.update(args) + qs = '?%s' % urllib.urlencode(q) + cu = "%s%s" % (self._url, qs) + req = urllib2.Request(cu, data, headers) + if data is not None: + # len(data) is broken if data doesn't fit into Py_ssize_t + # add the header ourself to avoid OverflowError + size = data.__len__() + self.ui.debug("sending %s bytes\n" % size) + req.add_unredirected_header('Content-Length', '%d' % size) + try: + resp = self.urlopener.open(req) + except urllib2.HTTPError, inst: + if inst.code == 401: + raise util.Abort(_('authorization failed')) + raise + except httplib.HTTPException, inst: + self.ui.debug('http error while sending %s command\n' % cmd) + self.ui.traceback() + raise IOError(None, inst) + except IndexError: + # this only happens with Python 2.3, later versions raise URLError + raise util.Abort(_('http error, possibly caused by proxy setting')) + # record the url we got redirected to + resp_url = resp.geturl() + if resp_url.endswith(qs): + resp_url = resp_url[:-len(qs)] + if self._url.rstrip('/') != resp_url.rstrip('/'): + self.ui.status(_('real URL is %s\n') % resp_url) + self._url = resp_url + try: + proto = resp.getheader('content-type') + except AttributeError: + proto = resp.headers['content-type'] + + safeurl = url.hidepassword(self._url) + # accept old "text/plain" and "application/hg-changegroup" for now + if not (proto.startswith('application/mercurial-') or + proto.startswith('text/plain') or + proto.startswith('application/hg-changegroup')): + self.ui.debug("requested URL: '%s'\n" % url.hidepassword(cu)) + raise error.RepoError( + _("'%s' does not appear to be an hg repository:\n" + "---%%<--- (%s)\n%s\n---%%<---\n") + % (safeurl, proto, resp.read())) + + if proto.startswith('application/mercurial-'): + try: + version = proto.split('-', 1)[1] + version_info = tuple([int(n) for n in version.split('.')]) + except ValueError: + raise error.RepoError(_("'%s' sent a broken Content-Type " + "header (%s)") % (safeurl, proto)) + if version_info > (0, 1): + raise error.RepoError(_("'%s' uses newer protocol %s") % + (safeurl, version)) + + return resp + + def _call(self, cmd, **args): + fp = self._callstream(cmd, **args) + try: + return fp.read() + finally: + # if using keepalive, allow connection to be reused + fp.close() + + def _callpush(self, cmd, cg, **args): + # have to stream bundle to a temp file because we do not have + # http 1.1 chunked transfer. + + type = "" + types = self.capable('unbundle') + # servers older than d1b16a746db6 will send 'unbundle' as a + # boolean capability + try: + types = types.split(',') + except AttributeError: + types = [""] + if types: + for x in types: + if x in changegroup.bundletypes: + type = x + break + + tempname = changegroup.writebundle(cg, None, type) + fp = url.httpsendfile(tempname, "rb") + headers = {'Content-Type': 'application/mercurial-0.1'} + + try: + try: + r = self._call(cmd, data=fp, headers=headers, **args) + return r.split('\n', 1) + except socket.error, err: + if err.args[0] in (errno.ECONNRESET, errno.EPIPE): + raise util.Abort(_('push failed: %s') % err.args[1]) + raise util.Abort(err.args[1]) + finally: + fp.close() + os.unlink(tempname) + + def _abort(self, exception): + raise exception + + def _decompress(self, stream): + return util.chunkbuffer(zgenerator(stream)) + +class httpsrepository(httprepository): + def __init__(self, ui, path): + if not url.has_https: + raise util.Abort(_('Python support for SSL and HTTPS ' + 'is not installed')) + httprepository.__init__(self, ui, path) + +def instance(ui, path, create): + if create: + raise util.Abort(_('cannot create new http repository')) + try: + if path.startswith('https:'): + inst = httpsrepository(ui, path) + else: + inst = httprepository(ui, path) + inst.between([(nullid, nullid)]) + return inst + except error.RepoError: + ui.note('(falling back to static-http)\n') + return statichttprepo.instance(ui, "static-" + path, create) diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/httprepo.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/httprepo.pyo Binary files differnew file mode 100644 index 0000000..1936104 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/httprepo.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/i18n.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/i18n.py new file mode 100644 index 0000000..f35311c --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/i18n.py @@ -0,0 +1,58 @@ +# i18n.py - internationalization support for mercurial +# +# Copyright 2005, 2006 Matt Mackall <mpm@selenic.com> +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +import encoding +import gettext, sys, os + +# modelled after templater.templatepath: +if hasattr(sys, 'frozen'): + module = sys.executable +else: + module = __file__ + +base = os.path.dirname(module) +for dir in ('.', '..'): + localedir = os.path.join(base, dir, 'locale') + if os.path.isdir(localedir): + break + +t = gettext.translation('hg', localedir, fallback=True) + +def gettext(message): + """Translate message. + + The message is looked up in the catalog to get a Unicode string, + which is encoded in the local encoding before being returned. + + Important: message is restricted to characters in the encoding + given by sys.getdefaultencoding() which is most likely 'ascii'. + """ + # If message is None, t.ugettext will return u'None' as the + # translation whereas our callers expect us to return None. + if message is None: + return message + + paragraphs = message.split('\n\n') + # Be careful not to translate the empty string -- it holds the + # meta data of the .po file. + u = u'\n\n'.join([p and t.ugettext(p) or '' for p in paragraphs]) + try: + # encoding.tolocal cannot be used since it will first try to + # decode the Unicode string. Calling u.decode(enc) really + # means u.encode(sys.getdefaultencoding()).decode(enc). Since + # the Python encoding defaults to 'ascii', this fails if the + # translated string use non-ASCII characters. + return u.encode(encoding.encoding, "replace") + except LookupError: + # An unknown encoding results in a LookupError. + return message + +if 'HGPLAIN' in os.environ: + _ = lambda message: message +else: + _ = gettext + diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/i18n.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/i18n.pyo Binary files differnew file mode 100644 index 0000000..3cfad64 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/i18n.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/ignore.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/ignore.py new file mode 100644 index 0000000..8cc5058 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/ignore.py @@ -0,0 +1,103 @@ +# ignore.py - ignored file handling for mercurial +# +# Copyright 2007 Matt Mackall <mpm@selenic.com> +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +from i18n import _ +import util, match +import re + +_commentre = None + +def ignorepats(lines): + '''parse lines (iterable) of .hgignore text, returning a tuple of + (patterns, parse errors). These patterns should be given to compile() + to be validated and converted into a match function.''' + syntaxes = {'re': 'relre:', 'regexp': 'relre:', 'glob': 'relglob:'} + syntax = 'relre:' + patterns = [] + warnings = [] + + for line in lines: + if "#" in line: + global _commentre + if not _commentre: + _commentre = re.compile(r'((^|[^\\])(\\\\)*)#.*') + # remove comments prefixed by an even number of escapes + line = _commentre.sub(r'\1', line) + # fixup properly escaped comments that survived the above + line = line.replace("\\#", "#") + line = line.rstrip() + if not line: + continue + + if line.startswith('syntax:'): + s = line[7:].strip() + try: + syntax = syntaxes[s] + except KeyError: + warnings.append(_("ignoring invalid syntax '%s'") % s) + continue + pat = syntax + line + for s, rels in syntaxes.iteritems(): + if line.startswith(rels): + pat = line + break + elif line.startswith(s+':'): + pat = rels + line[len(s)+1:] + break + patterns.append(pat) + + return patterns, warnings + +def ignore(root, files, warn): + '''return matcher covering patterns in 'files'. + + the files parsed for patterns include: + .hgignore in the repository root + any additional files specified in the [ui] section of ~/.hgrc + + trailing white space is dropped. + the escape character is backslash. + comments start with #. + empty lines are skipped. + + lines can be of the following formats: + + syntax: regexp # defaults following lines to non-rooted regexps + syntax: glob # defaults following lines to non-rooted globs + re:pattern # non-rooted regular expression + glob:pattern # non-rooted glob + pattern # pattern of the current default type''' + + pats = {} + for f in files: + try: + pats[f] = [] + fp = open(f) + pats[f], warnings = ignorepats(fp) + for warning in warnings: + warn("%s: %s\n" % (f, warning)) + except IOError, inst: + if f != files[0]: + warn(_("skipping unreadable ignore file '%s': %s\n") % + (f, inst.strerror)) + + allpats = [] + [allpats.extend(patlist) for patlist in pats.values()] + if not allpats: + return util.never + + try: + ignorefunc = match.match(root, '', [], allpats) + except util.Abort: + # Re-raise an exception where the src is the right file + for f, patlist in pats.iteritems(): + try: + match.match(root, '', [], patlist) + except util.Abort, inst: + raise util.Abort('%s: %s' % (f, inst[0])) + + return ignorefunc diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/ignore.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/ignore.pyo Binary files differnew file mode 100644 index 0000000..0ebb7cb --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/ignore.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/keepalive.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/keepalive.py new file mode 100644 index 0000000..a5a4882 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/keepalive.py @@ -0,0 +1,765 @@ +# This library is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2.1 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library; if not, write to the +# Free Software Foundation, Inc., +# 59 Temple Place, Suite 330, +# Boston, MA 02111-1307 USA + +# This file is part of urlgrabber, a high-level cross-protocol url-grabber +# Copyright 2002-2004 Michael D. Stenner, Ryan Tomayko + +# Modified by Benoit Boissinot: +# - fix for digest auth (inspired from urllib2.py @ Python v2.4) +# Modified by Dirkjan Ochtman: +# - import md5 function from a local util module +# Modified by Martin Geisler: +# - moved md5 function from local util module to this module +# Modified by Augie Fackler: +# - add safesend method and use it to prevent broken pipe errors +# on large POST requests + +"""An HTTP handler for urllib2 that supports HTTP 1.1 and keepalive. + +>>> import urllib2 +>>> from keepalive import HTTPHandler +>>> keepalive_handler = HTTPHandler() +>>> opener = urllib2.build_opener(keepalive_handler) +>>> urllib2.install_opener(opener) +>>> +>>> fo = urllib2.urlopen('http://www.python.org') + +If a connection to a given host is requested, and all of the existing +connections are still in use, another connection will be opened. If +the handler tries to use an existing connection but it fails in some +way, it will be closed and removed from the pool. + +To remove the handler, simply re-run build_opener with no arguments, and +install that opener. + +You can explicitly close connections by using the close_connection() +method of the returned file-like object (described below) or you can +use the handler methods: + + close_connection(host) + close_all() + open_connections() + +NOTE: using the close_connection and close_all methods of the handler +should be done with care when using multiple threads. + * there is nothing that prevents another thread from creating new + connections immediately after connections are closed + * no checks are done to prevent in-use connections from being closed + +>>> keepalive_handler.close_all() + +EXTRA ATTRIBUTES AND METHODS + + Upon a status of 200, the object returned has a few additional + attributes and methods, which should not be used if you want to + remain consistent with the normal urllib2-returned objects: + + close_connection() - close the connection to the host + readlines() - you know, readlines() + status - the return status (ie 404) + reason - english translation of status (ie 'File not found') + + If you want the best of both worlds, use this inside an + AttributeError-catching try: + + >>> try: status = fo.status + >>> except AttributeError: status = None + + Unfortunately, these are ONLY there if status == 200, so it's not + easy to distinguish between non-200 responses. The reason is that + urllib2 tries to do clever things with error codes 301, 302, 401, + and 407, and it wraps the object upon return. + + For python versions earlier than 2.4, you can avoid this fancy error + handling by setting the module-level global HANDLE_ERRORS to zero. + You see, prior to 2.4, it's the HTTP Handler's job to determine what + to handle specially, and what to just pass up. HANDLE_ERRORS == 0 + means "pass everything up". In python 2.4, however, this job no + longer belongs to the HTTP Handler and is now done by a NEW handler, + HTTPErrorProcessor. Here's the bottom line: + + python version < 2.4 + HANDLE_ERRORS == 1 (default) pass up 200, treat the rest as + errors + HANDLE_ERRORS == 0 pass everything up, error processing is + left to the calling code + python version >= 2.4 + HANDLE_ERRORS == 1 pass up 200, treat the rest as errors + HANDLE_ERRORS == 0 (default) pass everything up, let the + other handlers (specifically, + HTTPErrorProcessor) decide what to do + + In practice, setting the variable either way makes little difference + in python 2.4, so for the most consistent behavior across versions, + you probably just want to use the defaults, which will give you + exceptions on errors. + +""" + +# $Id: keepalive.py,v 1.14 2006/04/04 21:00:32 mstenner Exp $ + +import errno +import httplib +import socket +import thread +import urllib2 + +DEBUG = None + +import sys +if sys.version_info < (2, 4): + HANDLE_ERRORS = 1 +else: HANDLE_ERRORS = 0 + +class ConnectionManager: + """ + The connection manager must be able to: + * keep track of all existing + """ + def __init__(self): + self._lock = thread.allocate_lock() + self._hostmap = {} # map hosts to a list of connections + self._connmap = {} # map connections to host + self._readymap = {} # map connection to ready state + + def add(self, host, connection, ready): + self._lock.acquire() + try: + if not host in self._hostmap: + self._hostmap[host] = [] + self._hostmap[host].append(connection) + self._connmap[connection] = host + self._readymap[connection] = ready + finally: + self._lock.release() + + def remove(self, connection): + self._lock.acquire() + try: + try: + host = self._connmap[connection] + except KeyError: + pass + else: + del self._connmap[connection] + del self._readymap[connection] + self._hostmap[host].remove(connection) + if not self._hostmap[host]: del self._hostmap[host] + finally: + self._lock.release() + + def set_ready(self, connection, ready): + try: + self._readymap[connection] = ready + except KeyError: + pass + + def get_ready_conn(self, host): + conn = None + self._lock.acquire() + try: + if host in self._hostmap: + for c in self._hostmap[host]: + if self._readymap[c]: + self._readymap[c] = 0 + conn = c + break + finally: + self._lock.release() + return conn + + def get_all(self, host=None): + if host: + return list(self._hostmap.get(host, [])) + else: + return dict(self._hostmap) + +class KeepAliveHandler: + def __init__(self): + self._cm = ConnectionManager() + + #### Connection Management + def open_connections(self): + """return a list of connected hosts and the number of connections + to each. [('foo.com:80', 2), ('bar.org', 1)]""" + return [(host, len(li)) for (host, li) in self._cm.get_all().items()] + + def close_connection(self, host): + """close connection(s) to <host> + host is the host:port spec, as in 'www.cnn.com:8080' as passed in. + no error occurs if there is no connection to that host.""" + for h in self._cm.get_all(host): + self._cm.remove(h) + h.close() + + def close_all(self): + """close all open connections""" + for host, conns in self._cm.get_all().iteritems(): + for h in conns: + self._cm.remove(h) + h.close() + + def _request_closed(self, request, host, connection): + """tells us that this request is now closed and the the + connection is ready for another request""" + self._cm.set_ready(connection, 1) + + def _remove_connection(self, host, connection, close=0): + if close: + connection.close() + self._cm.remove(connection) + + #### Transaction Execution + def http_open(self, req): + return self.do_open(HTTPConnection, req) + + def do_open(self, http_class, req): + host = req.get_host() + if not host: + raise urllib2.URLError('no host given') + + try: + h = self._cm.get_ready_conn(host) + while h: + r = self._reuse_connection(h, req, host) + + # if this response is non-None, then it worked and we're + # done. Break out, skipping the else block. + if r: + break + + # connection is bad - possibly closed by server + # discard it and ask for the next free connection + h.close() + self._cm.remove(h) + h = self._cm.get_ready_conn(host) + else: + # no (working) free connections were found. Create a new one. + h = http_class(host) + if DEBUG: + DEBUG.info("creating new connection to %s (%d)", + host, id(h)) + self._cm.add(host, h, 0) + self._start_transaction(h, req) + r = h.getresponse() + except (socket.error, httplib.HTTPException), err: + raise urllib2.URLError(err) + + # if not a persistent connection, don't try to reuse it + if r.will_close: + self._cm.remove(h) + + if DEBUG: + DEBUG.info("STATUS: %s, %s", r.status, r.reason) + r._handler = self + r._host = host + r._url = req.get_full_url() + r._connection = h + r.code = r.status + r.headers = r.msg + r.msg = r.reason + + if r.status == 200 or not HANDLE_ERRORS: + return r + else: + return self.parent.error('http', req, r, + r.status, r.msg, r.headers) + + def _reuse_connection(self, h, req, host): + """start the transaction with a re-used connection + return a response object (r) upon success or None on failure. + This DOES not close or remove bad connections in cases where + it returns. However, if an unexpected exception occurs, it + will close and remove the connection before re-raising. + """ + try: + self._start_transaction(h, req) + r = h.getresponse() + # note: just because we got something back doesn't mean it + # worked. We'll check the version below, too. + except (socket.error, httplib.HTTPException): + r = None + except: + # adding this block just in case we've missed + # something we will still raise the exception, but + # lets try and close the connection and remove it + # first. We previously got into a nasty loop + # where an exception was uncaught, and so the + # connection stayed open. On the next try, the + # same exception was raised, etc. The tradeoff is + # that it's now possible this call will raise + # a DIFFERENT exception + if DEBUG: + DEBUG.error("unexpected exception - closing " + "connection to %s (%d)", host, id(h)) + self._cm.remove(h) + h.close() + raise + + if r is None or r.version == 9: + # httplib falls back to assuming HTTP 0.9 if it gets a + # bad header back. This is most likely to happen if + # the socket has been closed by the server since we + # last used the connection. + if DEBUG: + DEBUG.info("failed to re-use connection to %s (%d)", + host, id(h)) + r = None + else: + if DEBUG: + DEBUG.info("re-using connection to %s (%d)", host, id(h)) + + return r + + def _start_transaction(self, h, req): + # What follows mostly reimplements HTTPConnection.request() + # except it adds self.parent.addheaders in the mix. + headers = req.headers.copy() + if sys.version_info >= (2, 4): + headers.update(req.unredirected_hdrs) + headers.update(self.parent.addheaders) + headers = dict((n.lower(), v) for n, v in headers.items()) + skipheaders = {} + for n in ('host', 'accept-encoding'): + if n in headers: + skipheaders['skip_' + n.replace('-', '_')] = 1 + try: + if req.has_data(): + data = req.get_data() + h.putrequest('POST', req.get_selector(), **skipheaders) + if 'content-type' not in headers: + h.putheader('Content-type', + 'application/x-www-form-urlencoded') + if 'content-length' not in headers: + h.putheader('Content-length', '%d' % len(data)) + else: + h.putrequest('GET', req.get_selector(), **skipheaders) + except (socket.error), err: + raise urllib2.URLError(err) + for k, v in headers.items(): + h.putheader(k, v) + h.endheaders() + if req.has_data(): + h.send(data) + +class HTTPHandler(KeepAliveHandler, urllib2.HTTPHandler): + pass + +class HTTPResponse(httplib.HTTPResponse): + # we need to subclass HTTPResponse in order to + # 1) add readline() and readlines() methods + # 2) add close_connection() methods + # 3) add info() and geturl() methods + + # in order to add readline(), read must be modified to deal with a + # buffer. example: readline must read a buffer and then spit back + # one line at a time. The only real alternative is to read one + # BYTE at a time (ick). Once something has been read, it can't be + # put back (ok, maybe it can, but that's even uglier than this), + # so if you THEN do a normal read, you must first take stuff from + # the buffer. + + # the read method wraps the original to accomodate buffering, + # although read() never adds to the buffer. + # Both readline and readlines have been stolen with almost no + # modification from socket.py + + + def __init__(self, sock, debuglevel=0, strict=0, method=None): + if method: # the httplib in python 2.3 uses the method arg + httplib.HTTPResponse.__init__(self, sock, debuglevel, method) + else: # 2.2 doesn't + httplib.HTTPResponse.__init__(self, sock, debuglevel) + self.fileno = sock.fileno + self.code = None + self._rbuf = '' + self._rbufsize = 8096 + self._handler = None # inserted by the handler later + self._host = None # (same) + self._url = None # (same) + self._connection = None # (same) + + _raw_read = httplib.HTTPResponse.read + + def close(self): + if self.fp: + self.fp.close() + self.fp = None + if self._handler: + self._handler._request_closed(self, self._host, + self._connection) + + def close_connection(self): + self._handler._remove_connection(self._host, self._connection, close=1) + self.close() + + def info(self): + return self.headers + + def geturl(self): + return self._url + + def read(self, amt=None): + # the _rbuf test is only in this first if for speed. It's not + # logically necessary + if self._rbuf and not amt is None: + L = len(self._rbuf) + if amt > L: + amt -= L + else: + s = self._rbuf[:amt] + self._rbuf = self._rbuf[amt:] + return s + + s = self._rbuf + self._raw_read(amt) + self._rbuf = '' + return s + + # stolen from Python SVN #68532 to fix issue1088 + def _read_chunked(self, amt): + chunk_left = self.chunk_left + value = '' + + # XXX This accumulates chunks by repeated string concatenation, + # which is not efficient as the number or size of chunks gets big. + while True: + if chunk_left is None: + line = self.fp.readline() + i = line.find(';') + if i >= 0: + line = line[:i] # strip chunk-extensions + try: + chunk_left = int(line, 16) + except ValueError: + # close the connection as protocol synchronisation is + # probably lost + self.close() + raise httplib.IncompleteRead(value) + if chunk_left == 0: + break + if amt is None: + value += self._safe_read(chunk_left) + elif amt < chunk_left: + value += self._safe_read(amt) + self.chunk_left = chunk_left - amt + return value + elif amt == chunk_left: + value += self._safe_read(amt) + self._safe_read(2) # toss the CRLF at the end of the chunk + self.chunk_left = None + return value + else: + value += self._safe_read(chunk_left) + amt -= chunk_left + + # we read the whole chunk, get another + self._safe_read(2) # toss the CRLF at the end of the chunk + chunk_left = None + + # read and discard trailer up to the CRLF terminator + ### note: we shouldn't have any trailers! + while True: + line = self.fp.readline() + if not line: + # a vanishingly small number of sites EOF without + # sending the trailer + break + if line == '\r\n': + break + + # we read everything; close the "file" + self.close() + + return value + + def readline(self, limit=-1): + i = self._rbuf.find('\n') + while i < 0 and not (0 < limit <= len(self._rbuf)): + new = self._raw_read(self._rbufsize) + if not new: + break + i = new.find('\n') + if i >= 0: + i = i + len(self._rbuf) + self._rbuf = self._rbuf + new + if i < 0: + i = len(self._rbuf) + else: + i = i + 1 + if 0 <= limit < len(self._rbuf): + i = limit + data, self._rbuf = self._rbuf[:i], self._rbuf[i:] + return data + + def readlines(self, sizehint = 0): + total = 0 + list = [] + while 1: + line = self.readline() + if not line: + break + list.append(line) + total += len(line) + if sizehint and total >= sizehint: + break + return list + +def safesend(self, str): + """Send `str' to the server. + + Shamelessly ripped off from httplib to patch a bad behavior. + """ + # _broken_pipe_resp is an attribute we set in this function + # if the socket is closed while we're sending data but + # the server sent us a response before hanging up. + # In that case, we want to pretend to send the rest of the + # outgoing data, and then let the user use getresponse() + # (which we wrap) to get this last response before + # opening a new socket. + if getattr(self, '_broken_pipe_resp', None) is not None: + return + + if self.sock is None: + if self.auto_open: + self.connect() + else: + raise httplib.NotConnected() + + # send the data to the server. if we get a broken pipe, then close + # the socket. we want to reconnect when somebody tries to send again. + # + # NOTE: we DO propagate the error, though, because we cannot simply + # ignore the error... the caller will know if they can retry. + if self.debuglevel > 0: + print "send:", repr(str) + try: + blocksize = 8192 + if hasattr(str,'read') : + if self.debuglevel > 0: + print "sendIng a read()able" + data = str.read(blocksize) + while data: + self.sock.sendall(data) + data = str.read(blocksize) + else: + self.sock.sendall(str) + except socket.error, v: + reraise = True + if v[0] == errno.EPIPE: # Broken pipe + if self._HTTPConnection__state == httplib._CS_REQ_SENT: + self._broken_pipe_resp = None + self._broken_pipe_resp = self.getresponse() + reraise = False + self.close() + if reraise: + raise + +def wrapgetresponse(cls): + """Wraps getresponse in cls with a broken-pipe sane version. + """ + def safegetresponse(self): + # In safesend() we might set the _broken_pipe_resp + # attribute, in which case the socket has already + # been closed and we just need to give them the response + # back. Otherwise, we use the normal response path. + r = getattr(self, '_broken_pipe_resp', None) + if r is not None: + return r + return cls.getresponse(self) + safegetresponse.__doc__ = cls.getresponse.__doc__ + return safegetresponse + +class HTTPConnection(httplib.HTTPConnection): + # use the modified response class + response_class = HTTPResponse + send = safesend + getresponse = wrapgetresponse(httplib.HTTPConnection) + + +######################################################################### +##### TEST FUNCTIONS +######################################################################### + +def error_handler(url): + global HANDLE_ERRORS + orig = HANDLE_ERRORS + keepalive_handler = HTTPHandler() + opener = urllib2.build_opener(keepalive_handler) + urllib2.install_opener(opener) + pos = {0: 'off', 1: 'on'} + for i in (0, 1): + print " fancy error handling %s (HANDLE_ERRORS = %i)" % (pos[i], i) + HANDLE_ERRORS = i + try: + fo = urllib2.urlopen(url) + fo.read() + fo.close() + try: + status, reason = fo.status, fo.reason + except AttributeError: + status, reason = None, None + except IOError, e: + print " EXCEPTION: %s" % e + raise + else: + print " status = %s, reason = %s" % (status, reason) + HANDLE_ERRORS = orig + hosts = keepalive_handler.open_connections() + print "open connections:", hosts + keepalive_handler.close_all() + +def md5(s): + try: + from hashlib import md5 as _md5 + except ImportError: + from md5 import md5 as _md5 + global md5 + md5 = _md5 + return _md5(s) + +def continuity(url): + format = '%25s: %s' + + # first fetch the file with the normal http handler + opener = urllib2.build_opener() + urllib2.install_opener(opener) + fo = urllib2.urlopen(url) + foo = fo.read() + fo.close() + m = md5.new(foo) + print format % ('normal urllib', m.hexdigest()) + + # now install the keepalive handler and try again + opener = urllib2.build_opener(HTTPHandler()) + urllib2.install_opener(opener) + + fo = urllib2.urlopen(url) + foo = fo.read() + fo.close() + m = md5.new(foo) + print format % ('keepalive read', m.hexdigest()) + + fo = urllib2.urlopen(url) + foo = '' + while 1: + f = fo.readline() + if f: + foo = foo + f + else: break + fo.close() + m = md5.new(foo) + print format % ('keepalive readline', m.hexdigest()) + +def comp(N, url): + print ' making %i connections to:\n %s' % (N, url) + + sys.stdout.write(' first using the normal urllib handlers') + # first use normal opener + opener = urllib2.build_opener() + urllib2.install_opener(opener) + t1 = fetch(N, url) + print ' TIME: %.3f s' % t1 + + sys.stdout.write(' now using the keepalive handler ') + # now install the keepalive handler and try again + opener = urllib2.build_opener(HTTPHandler()) + urllib2.install_opener(opener) + t2 = fetch(N, url) + print ' TIME: %.3f s' % t2 + print ' improvement factor: %.2f' % (t1 / t2) + +def fetch(N, url, delay=0): + import time + lens = [] + starttime = time.time() + for i in range(N): + if delay and i > 0: + time.sleep(delay) + fo = urllib2.urlopen(url) + foo = fo.read() + fo.close() + lens.append(len(foo)) + diff = time.time() - starttime + + j = 0 + for i in lens[1:]: + j = j + 1 + if not i == lens[0]: + print "WARNING: inconsistent length on read %i: %i" % (j, i) + + return diff + +def test_timeout(url): + global DEBUG + dbbackup = DEBUG + class FakeLogger: + def debug(self, msg, *args): + print msg % args + info = warning = error = debug + DEBUG = FakeLogger() + print " fetching the file to establish a connection" + fo = urllib2.urlopen(url) + data1 = fo.read() + fo.close() + + i = 20 + print " waiting %i seconds for the server to close the connection" % i + while i > 0: + sys.stdout.write('\r %2i' % i) + sys.stdout.flush() + time.sleep(1) + i -= 1 + sys.stderr.write('\r') + + print " fetching the file a second time" + fo = urllib2.urlopen(url) + data2 = fo.read() + fo.close() + + if data1 == data2: + print ' data are identical' + else: + print ' ERROR: DATA DIFFER' + + DEBUG = dbbackup + + +def test(url, N=10): + print "checking error hander (do this on a non-200)" + try: error_handler(url) + except IOError: + print "exiting - exception will prevent further tests" + sys.exit() + print + print "performing continuity test (making sure stuff isn't corrupted)" + continuity(url) + print + print "performing speed comparison" + comp(N, url) + print + print "performing dropped-connection check" + test_timeout(url) + +if __name__ == '__main__': + import time + import sys + try: + N = int(sys.argv[1]) + url = sys.argv[2] + except: + print "%s <integer> <url>" % sys.argv[0] + else: + test(url, N) diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/keepalive.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/keepalive.pyo Binary files differnew file mode 100644 index 0000000..f144af5 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/keepalive.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/localrepo.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/localrepo.py new file mode 100644 index 0000000..23e6ff4 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/localrepo.py @@ -0,0 +1,1906 @@ +# localrepo.py - read/write repository class for mercurial +# +# Copyright 2005-2007 Matt Mackall <mpm@selenic.com> +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +from node import bin, hex, nullid, nullrev, short +from i18n import _ +import repo, changegroup, subrepo, discovery, pushkey +import changelog, dirstate, filelog, manifest, context +import lock, transaction, store, encoding +import util, extensions, hook, error +import match as matchmod +import merge as mergemod +import tags as tagsmod +import url as urlmod +from lock import release +import weakref, errno, os, time, inspect +propertycache = util.propertycache + +class localrepository(repo.repository): + capabilities = set(('lookup', 'changegroupsubset', 'branchmap', 'pushkey')) + supportedformats = set(('revlogv1', 'parentdelta')) + supported = supportedformats | set(('store', 'fncache', 'shared', + 'dotencode')) + + def __init__(self, baseui, path=None, create=0): + repo.repository.__init__(self) + self.root = os.path.realpath(util.expandpath(path)) + self.path = os.path.join(self.root, ".hg") + self.origroot = path + self.auditor = util.path_auditor(self.root, self._checknested) + self.opener = util.opener(self.path) + self.wopener = util.opener(self.root) + self.baseui = baseui + self.ui = baseui.copy() + + try: + self.ui.readconfig(self.join("hgrc"), self.root) + extensions.loadall(self.ui) + except IOError: + pass + + if not os.path.isdir(self.path): + if create: + if not os.path.exists(path): + util.makedirs(path) + os.mkdir(self.path) + requirements = ["revlogv1"] + if self.ui.configbool('format', 'usestore', True): + os.mkdir(os.path.join(self.path, "store")) + requirements.append("store") + if self.ui.configbool('format', 'usefncache', True): + requirements.append("fncache") + if self.ui.configbool('format', 'dotencode', True): + requirements.append('dotencode') + # create an invalid changelog + self.opener("00changelog.i", "a").write( + '\0\0\0\2' # represents revlogv2 + ' dummy changelog to prevent using the old repo layout' + ) + if self.ui.configbool('format', 'parentdelta', False): + requirements.append("parentdelta") + else: + raise error.RepoError(_("repository %s not found") % path) + elif create: + raise error.RepoError(_("repository %s already exists") % path) + else: + # find requirements + requirements = set() + try: + requirements = set(self.opener("requires").read().splitlines()) + except IOError, inst: + if inst.errno != errno.ENOENT: + raise + for r in requirements - self.supported: + raise error.RepoError(_("requirement '%s' not supported") % r) + + self.sharedpath = self.path + try: + s = os.path.realpath(self.opener("sharedpath").read()) + if not os.path.exists(s): + raise error.RepoError( + _('.hg/sharedpath points to nonexistent directory %s') % s) + self.sharedpath = s + except IOError, inst: + if inst.errno != errno.ENOENT: + raise + + self.store = store.store(requirements, self.sharedpath, util.opener) + self.spath = self.store.path + self.sopener = self.store.opener + self.sjoin = self.store.join + self.opener.createmode = self.store.createmode + self._applyrequirements(requirements) + if create: + self._writerequirements() + + # These two define the set of tags for this repository. _tags + # maps tag name to node; _tagtypes maps tag name to 'global' or + # 'local'. (Global tags are defined by .hgtags across all + # heads, and local tags are defined in .hg/localtags.) They + # constitute the in-memory cache of tags. + self._tags = None + self._tagtypes = None + + self._branchcache = None # in UTF-8 + self._branchcachetip = None + self.nodetagscache = None + self.filterpats = {} + self._datafilters = {} + self._transref = self._lockref = self._wlockref = None + + def _applyrequirements(self, requirements): + self.requirements = requirements + self.sopener.options = {} + if 'parentdelta' in requirements: + self.sopener.options['parentdelta'] = 1 + + def _writerequirements(self): + reqfile = self.opener("requires", "w") + for r in self.requirements: + reqfile.write("%s\n" % r) + reqfile.close() + + def _checknested(self, path): + """Determine if path is a legal nested repository.""" + if not path.startswith(self.root): + return False + subpath = path[len(self.root) + 1:] + + # XXX: Checking against the current working copy is wrong in + # the sense that it can reject things like + # + # $ hg cat -r 10 sub/x.txt + # + # if sub/ is no longer a subrepository in the working copy + # parent revision. + # + # However, it can of course also allow things that would have + # been rejected before, such as the above cat command if sub/ + # is a subrepository now, but was a normal directory before. + # The old path auditor would have rejected by mistake since it + # panics when it sees sub/.hg/. + # + # All in all, checking against the working copy seems sensible + # since we want to prevent access to nested repositories on + # the filesystem *now*. + ctx = self[None] + parts = util.splitpath(subpath) + while parts: + prefix = os.sep.join(parts) + if prefix in ctx.substate: + if prefix == subpath: + return True + else: + sub = ctx.sub(prefix) + return sub.checknested(subpath[len(prefix) + 1:]) + else: + parts.pop() + return False + + + @propertycache + def changelog(self): + c = changelog.changelog(self.sopener) + if 'HG_PENDING' in os.environ: + p = os.environ['HG_PENDING'] + if p.startswith(self.root): + c.readpending('00changelog.i.a') + self.sopener.options['defversion'] = c.version + return c + + @propertycache + def manifest(self): + return manifest.manifest(self.sopener) + + @propertycache + def dirstate(self): + return dirstate.dirstate(self.opener, self.ui, self.root) + + def __getitem__(self, changeid): + if changeid is None: + return context.workingctx(self) + return context.changectx(self, changeid) + + def __contains__(self, changeid): + try: + return bool(self.lookup(changeid)) + except error.RepoLookupError: + return False + + def __nonzero__(self): + return True + + def __len__(self): + return len(self.changelog) + + def __iter__(self): + for i in xrange(len(self)): + yield i + + def url(self): + return 'file:' + self.root + + def hook(self, name, throw=False, **args): + return hook.hook(self.ui, self, name, throw, **args) + + tag_disallowed = ':\r\n' + + def _tag(self, names, node, message, local, user, date, extra={}): + if isinstance(names, str): + allchars = names + names = (names,) + else: + allchars = ''.join(names) + for c in self.tag_disallowed: + if c in allchars: + raise util.Abort(_('%r cannot be used in a tag name') % c) + + branches = self.branchmap() + for name in names: + self.hook('pretag', throw=True, node=hex(node), tag=name, + local=local) + if name in branches: + self.ui.warn(_("warning: tag %s conflicts with existing" + " branch name\n") % name) + + def writetags(fp, names, munge, prevtags): + fp.seek(0, 2) + if prevtags and prevtags[-1] != '\n': + fp.write('\n') + for name in names: + m = munge and munge(name) or name + if self._tagtypes and name in self._tagtypes: + old = self._tags.get(name, nullid) + fp.write('%s %s\n' % (hex(old), m)) + fp.write('%s %s\n' % (hex(node), m)) + fp.close() + + prevtags = '' + if local: + try: + fp = self.opener('localtags', 'r+') + except IOError: + fp = self.opener('localtags', 'a') + else: + prevtags = fp.read() + + # local tags are stored in the current charset + writetags(fp, names, None, prevtags) + for name in names: + self.hook('tag', node=hex(node), tag=name, local=local) + return + + try: + fp = self.wfile('.hgtags', 'rb+') + except IOError: + fp = self.wfile('.hgtags', 'ab') + else: + prevtags = fp.read() + + # committed tags are stored in UTF-8 + writetags(fp, names, encoding.fromlocal, prevtags) + + if '.hgtags' not in self.dirstate: + self[None].add(['.hgtags']) + + m = matchmod.exact(self.root, '', ['.hgtags']) + tagnode = self.commit(message, user, date, extra=extra, match=m) + + for name in names: + self.hook('tag', node=hex(node), tag=name, local=local) + + return tagnode + + def tag(self, names, node, message, local, user, date): + '''tag a revision with one or more symbolic names. + + names is a list of strings or, when adding a single tag, names may be a + string. + + if local is True, the tags are stored in a per-repository file. + otherwise, they are stored in the .hgtags file, and a new + changeset is committed with the change. + + keyword arguments: + + local: whether to store tags in non-version-controlled file + (default False) + + message: commit message to use if committing + + user: name of user to use if committing + + date: date tuple to use if committing''' + + if not local: + for x in self.status()[:5]: + if '.hgtags' in x: + raise util.Abort(_('working copy of .hgtags is changed ' + '(please commit .hgtags manually)')) + + self.tags() # instantiate the cache + self._tag(names, node, message, local, user, date) + + def tags(self): + '''return a mapping of tag to node''' + if self._tags is None: + (self._tags, self._tagtypes) = self._findtags() + + return self._tags + + def _findtags(self): + '''Do the hard work of finding tags. Return a pair of dicts + (tags, tagtypes) where tags maps tag name to node, and tagtypes + maps tag name to a string like \'global\' or \'local\'. + Subclasses or extensions are free to add their own tags, but + should be aware that the returned dicts will be retained for the + duration of the localrepo object.''' + + # XXX what tagtype should subclasses/extensions use? Currently + # mq and bookmarks add tags, but do not set the tagtype at all. + # Should each extension invent its own tag type? Should there + # be one tagtype for all such "virtual" tags? Or is the status + # quo fine? + + alltags = {} # map tag name to (node, hist) + tagtypes = {} + + tagsmod.findglobaltags(self.ui, self, alltags, tagtypes) + tagsmod.readlocaltags(self.ui, self, alltags, tagtypes) + + # Build the return dicts. Have to re-encode tag names because + # the tags module always uses UTF-8 (in order not to lose info + # writing to the cache), but the rest of Mercurial wants them in + # local encoding. + tags = {} + for (name, (node, hist)) in alltags.iteritems(): + if node != nullid: + tags[encoding.tolocal(name)] = node + tags['tip'] = self.changelog.tip() + tagtypes = dict([(encoding.tolocal(name), value) + for (name, value) in tagtypes.iteritems()]) + return (tags, tagtypes) + + def tagtype(self, tagname): + ''' + return the type of the given tag. result can be: + + 'local' : a local tag + 'global' : a global tag + None : tag does not exist + ''' + + self.tags() + + return self._tagtypes.get(tagname) + + def tagslist(self): + '''return a list of tags ordered by revision''' + l = [] + for t, n in self.tags().iteritems(): + try: + r = self.changelog.rev(n) + except: + r = -2 # sort to the beginning of the list if unknown + l.append((r, t, n)) + return [(t, n) for r, t, n in sorted(l)] + + def nodetags(self, node): + '''return the tags associated with a node''' + if not self.nodetagscache: + self.nodetagscache = {} + for t, n in self.tags().iteritems(): + self.nodetagscache.setdefault(n, []).append(t) + for tags in self.nodetagscache.itervalues(): + tags.sort() + return self.nodetagscache.get(node, []) + + def _branchtags(self, partial, lrev): + # TODO: rename this function? + tiprev = len(self) - 1 + if lrev != tiprev: + ctxgen = (self[r] for r in xrange(lrev + 1, tiprev + 1)) + self._updatebranchcache(partial, ctxgen) + self._writebranchcache(partial, self.changelog.tip(), tiprev) + + return partial + + def updatebranchcache(self): + tip = self.changelog.tip() + if self._branchcache is not None and self._branchcachetip == tip: + return self._branchcache + + oldtip = self._branchcachetip + self._branchcachetip = tip + if oldtip is None or oldtip not in self.changelog.nodemap: + partial, last, lrev = self._readbranchcache() + else: + lrev = self.changelog.rev(oldtip) + partial = self._branchcache + + self._branchtags(partial, lrev) + # this private cache holds all heads (not just tips) + self._branchcache = partial + + def branchmap(self): + '''returns a dictionary {branch: [branchheads]}''' + self.updatebranchcache() + return self._branchcache + + def branchtags(self): + '''return a dict where branch names map to the tipmost head of + the branch, open heads come before closed''' + bt = {} + for bn, heads in self.branchmap().iteritems(): + tip = heads[-1] + for h in reversed(heads): + if 'close' not in self.changelog.read(h)[5]: + tip = h + break + bt[bn] = tip + return bt + + + def _readbranchcache(self): + partial = {} + try: + f = self.opener("branchheads.cache") + lines = f.read().split('\n') + f.close() + except (IOError, OSError): + return {}, nullid, nullrev + + try: + last, lrev = lines.pop(0).split(" ", 1) + last, lrev = bin(last), int(lrev) + if lrev >= len(self) or self[lrev].node() != last: + # invalidate the cache + raise ValueError('invalidating branch cache (tip differs)') + for l in lines: + if not l: + continue + node, label = l.split(" ", 1) + partial.setdefault(label.strip(), []).append(bin(node)) + except KeyboardInterrupt: + raise + except Exception, inst: + if self.ui.debugflag: + self.ui.warn(str(inst), '\n') + partial, last, lrev = {}, nullid, nullrev + return partial, last, lrev + + def _writebranchcache(self, branches, tip, tiprev): + try: + f = self.opener("branchheads.cache", "w", atomictemp=True) + f.write("%s %s\n" % (hex(tip), tiprev)) + for label, nodes in branches.iteritems(): + for node in nodes: + f.write("%s %s\n" % (hex(node), label)) + f.rename() + except (IOError, OSError): + pass + + def _updatebranchcache(self, partial, ctxgen): + # collect new branch entries + newbranches = {} + for c in ctxgen: + newbranches.setdefault(c.branch(), []).append(c.node()) + # if older branchheads are reachable from new ones, they aren't + # really branchheads. Note checking parents is insufficient: + # 1 (branch a) -> 2 (branch b) -> 3 (branch a) + for branch, newnodes in newbranches.iteritems(): + bheads = partial.setdefault(branch, []) + bheads.extend(newnodes) + if len(bheads) <= 1: + continue + # starting from tip means fewer passes over reachable + while newnodes: + latest = newnodes.pop() + if latest not in bheads: + continue + minbhrev = self[min([self[bh].rev() for bh in bheads])].node() + reachable = self.changelog.reachable(latest, minbhrev) + reachable.remove(latest) + bheads = [b for b in bheads if b not in reachable] + partial[branch] = bheads + + def lookup(self, key): + if isinstance(key, int): + return self.changelog.node(key) + elif key == '.': + return self.dirstate.parents()[0] + elif key == 'null': + return nullid + elif key == 'tip': + return self.changelog.tip() + n = self.changelog._match(key) + if n: + return n + if key in self.tags(): + return self.tags()[key] + if key in self.branchtags(): + return self.branchtags()[key] + n = self.changelog._partialmatch(key) + if n: + return n + + # can't find key, check if it might have come from damaged dirstate + if key in self.dirstate.parents(): + raise error.Abort(_("working directory has unknown parent '%s'!") + % short(key)) + try: + if len(key) == 20: + key = hex(key) + except: + pass + raise error.RepoLookupError(_("unknown revision '%s'") % key) + + def lookupbranch(self, key, remote=None): + repo = remote or self + if key in repo.branchmap(): + return key + + repo = (remote and remote.local()) and remote or self + return repo[key].branch() + + def local(self): + return True + + def join(self, f): + return os.path.join(self.path, f) + + def wjoin(self, f): + return os.path.join(self.root, f) + + def file(self, f): + if f[0] == '/': + f = f[1:] + return filelog.filelog(self.sopener, f) + + def changectx(self, changeid): + return self[changeid] + + def parents(self, changeid=None): + '''get list of changectxs for parents of changeid''' + return self[changeid].parents() + + def filectx(self, path, changeid=None, fileid=None): + """changeid can be a changeset revision, node, or tag. + fileid can be a file revision or node.""" + return context.filectx(self, path, changeid, fileid) + + def getcwd(self): + return self.dirstate.getcwd() + + def pathto(self, f, cwd=None): + return self.dirstate.pathto(f, cwd) + + def wfile(self, f, mode='r'): + return self.wopener(f, mode) + + def _link(self, f): + return os.path.islink(self.wjoin(f)) + + def _loadfilter(self, filter): + if filter not in self.filterpats: + l = [] + for pat, cmd in self.ui.configitems(filter): + if cmd == '!': + continue + mf = matchmod.match(self.root, '', [pat]) + fn = None + params = cmd + for name, filterfn in self._datafilters.iteritems(): + if cmd.startswith(name): + fn = filterfn + params = cmd[len(name):].lstrip() + break + if not fn: + fn = lambda s, c, **kwargs: util.filter(s, c) + # Wrap old filters not supporting keyword arguments + if not inspect.getargspec(fn)[2]: + oldfn = fn + fn = lambda s, c, **kwargs: oldfn(s, c) + l.append((mf, fn, params)) + self.filterpats[filter] = l + return self.filterpats[filter] + + def _filter(self, filterpats, filename, data): + for mf, fn, cmd in filterpats: + if mf(filename): + self.ui.debug("filtering %s through %s\n" % (filename, cmd)) + data = fn(data, cmd, ui=self.ui, repo=self, filename=filename) + break + + return data + + @propertycache + def _encodefilterpats(self): + return self._loadfilter('encode') + + @propertycache + def _decodefilterpats(self): + return self._loadfilter('decode') + + def adddatafilter(self, name, filter): + self._datafilters[name] = filter + + def wread(self, filename): + if self._link(filename): + data = os.readlink(self.wjoin(filename)) + else: + data = self.wopener(filename, 'r').read() + return self._filter(self._encodefilterpats, filename, data) + + def wwrite(self, filename, data, flags): + data = self._filter(self._decodefilterpats, filename, data) + try: + os.unlink(self.wjoin(filename)) + except OSError: + pass + if 'l' in flags: + self.wopener.symlink(data, filename) + else: + self.wopener(filename, 'w').write(data) + if 'x' in flags: + util.set_flags(self.wjoin(filename), False, True) + + def wwritedata(self, filename, data): + return self._filter(self._decodefilterpats, filename, data) + + def transaction(self, desc): + tr = self._transref and self._transref() or None + if tr and tr.running(): + return tr.nest() + + # abort here if the journal already exists + if os.path.exists(self.sjoin("journal")): + raise error.RepoError( + _("abandoned transaction found - run hg recover")) + + # save dirstate for rollback + try: + ds = self.opener("dirstate").read() + except IOError: + ds = "" + self.opener("journal.dirstate", "w").write(ds) + self.opener("journal.branch", "w").write(self.dirstate.branch()) + self.opener("journal.desc", "w").write("%d\n%s\n" % (len(self), desc)) + + renames = [(self.sjoin("journal"), self.sjoin("undo")), + (self.join("journal.dirstate"), self.join("undo.dirstate")), + (self.join("journal.branch"), self.join("undo.branch")), + (self.join("journal.desc"), self.join("undo.desc"))] + tr = transaction.transaction(self.ui.warn, self.sopener, + self.sjoin("journal"), + aftertrans(renames), + self.store.createmode) + self._transref = weakref.ref(tr) + return tr + + def recover(self): + lock = self.lock() + try: + if os.path.exists(self.sjoin("journal")): + self.ui.status(_("rolling back interrupted transaction\n")) + transaction.rollback(self.sopener, self.sjoin("journal"), + self.ui.warn) + self.invalidate() + return True + else: + self.ui.warn(_("no interrupted transaction available\n")) + return False + finally: + lock.release() + + def rollback(self, dryrun=False): + wlock = lock = None + try: + wlock = self.wlock() + lock = self.lock() + if os.path.exists(self.sjoin("undo")): + try: + args = self.opener("undo.desc", "r").read().splitlines() + if len(args) >= 3 and self.ui.verbose: + desc = _("rolling back to revision %s" + " (undo %s: %s)\n") % ( + int(args[0]) - 1, args[1], args[2]) + elif len(args) >= 2: + desc = _("rolling back to revision %s (undo %s)\n") % ( + int(args[0]) - 1, args[1]) + except IOError: + desc = _("rolling back unknown transaction\n") + self.ui.status(desc) + if dryrun: + return + transaction.rollback(self.sopener, self.sjoin("undo"), + self.ui.warn) + util.rename(self.join("undo.dirstate"), self.join("dirstate")) + try: + branch = self.opener("undo.branch").read() + self.dirstate.setbranch(branch) + except IOError: + self.ui.warn(_("Named branch could not be reset, " + "current branch still is: %s\n") + % encoding.tolocal(self.dirstate.branch())) + self.invalidate() + self.dirstate.invalidate() + self.destroyed() + else: + self.ui.warn(_("no rollback information available\n")) + return 1 + finally: + release(lock, wlock) + + def invalidatecaches(self): + self._tags = None + self._tagtypes = None + self.nodetagscache = None + self._branchcache = None # in UTF-8 + self._branchcachetip = None + + def invalidate(self): + for a in "changelog manifest".split(): + if a in self.__dict__: + delattr(self, a) + self.invalidatecaches() + + def _lock(self, lockname, wait, releasefn, acquirefn, desc): + try: + l = lock.lock(lockname, 0, releasefn, desc=desc) + except error.LockHeld, inst: + if not wait: + raise + self.ui.warn(_("waiting for lock on %s held by %r\n") % + (desc, inst.locker)) + # default to 600 seconds timeout + l = lock.lock(lockname, int(self.ui.config("ui", "timeout", "600")), + releasefn, desc=desc) + if acquirefn: + acquirefn() + return l + + def lock(self, wait=True): + '''Lock the repository store (.hg/store) and return a weak reference + to the lock. Use this before modifying the store (e.g. committing or + stripping). If you are opening a transaction, get a lock as well.)''' + l = self._lockref and self._lockref() + if l is not None and l.held: + l.lock() + return l + + l = self._lock(self.sjoin("lock"), wait, None, self.invalidate, + _('repository %s') % self.origroot) + self._lockref = weakref.ref(l) + return l + + def wlock(self, wait=True): + '''Lock the non-store parts of the repository (everything under + .hg except .hg/store) and return a weak reference to the lock. + Use this before modifying files in .hg.''' + l = self._wlockref and self._wlockref() + if l is not None and l.held: + l.lock() + return l + + l = self._lock(self.join("wlock"), wait, self.dirstate.write, + self.dirstate.invalidate, _('working directory of %s') % + self.origroot) + self._wlockref = weakref.ref(l) + return l + + def _filecommit(self, fctx, manifest1, manifest2, linkrev, tr, changelist): + """ + commit an individual file as part of a larger transaction + """ + + fname = fctx.path() + text = fctx.data() + flog = self.file(fname) + fparent1 = manifest1.get(fname, nullid) + fparent2 = fparent2o = manifest2.get(fname, nullid) + + meta = {} + copy = fctx.renamed() + if copy and copy[0] != fname: + # Mark the new revision of this file as a copy of another + # file. This copy data will effectively act as a parent + # of this new revision. If this is a merge, the first + # parent will be the nullid (meaning "look up the copy data") + # and the second one will be the other parent. For example: + # + # 0 --- 1 --- 3 rev1 changes file foo + # \ / rev2 renames foo to bar and changes it + # \- 2 -/ rev3 should have bar with all changes and + # should record that bar descends from + # bar in rev2 and foo in rev1 + # + # this allows this merge to succeed: + # + # 0 --- 1 --- 3 rev4 reverts the content change from rev2 + # \ / merging rev3 and rev4 should use bar@rev2 + # \- 2 --- 4 as the merge base + # + + cfname = copy[0] + crev = manifest1.get(cfname) + newfparent = fparent2 + + if manifest2: # branch merge + if fparent2 == nullid or crev is None: # copied on remote side + if cfname in manifest2: + crev = manifest2[cfname] + newfparent = fparent1 + + # find source in nearest ancestor if we've lost track + if not crev: + self.ui.debug(" %s: searching for copy revision for %s\n" % + (fname, cfname)) + for ancestor in self[None].ancestors(): + if cfname in ancestor: + crev = ancestor[cfname].filenode() + break + + if crev: + self.ui.debug(" %s: copy %s:%s\n" % (fname, cfname, hex(crev))) + meta["copy"] = cfname + meta["copyrev"] = hex(crev) + fparent1, fparent2 = nullid, newfparent + else: + self.ui.warn(_("warning: can't find ancestor for '%s' " + "copied from '%s'!\n") % (fname, cfname)) + + elif fparent2 != nullid: + # is one parent an ancestor of the other? + fparentancestor = flog.ancestor(fparent1, fparent2) + if fparentancestor == fparent1: + fparent1, fparent2 = fparent2, nullid + elif fparentancestor == fparent2: + fparent2 = nullid + + # is the file changed? + if fparent2 != nullid or flog.cmp(fparent1, text) or meta: + changelist.append(fname) + return flog.add(text, meta, tr, linkrev, fparent1, fparent2) + + # are just the flags changed during merge? + if fparent1 != fparent2o and manifest1.flags(fname) != fctx.flags(): + changelist.append(fname) + + return fparent1 + + def commit(self, text="", user=None, date=None, match=None, force=False, + editor=False, extra={}): + """Add a new revision to current repository. + + Revision information is gathered from the working directory, + match can be used to filter the committed files. If editor is + supplied, it is called to get a commit message. + """ + + def fail(f, msg): + raise util.Abort('%s: %s' % (f, msg)) + + if not match: + match = matchmod.always(self.root, '') + + if not force: + vdirs = [] + match.dir = vdirs.append + match.bad = fail + + wlock = self.wlock() + try: + wctx = self[None] + merge = len(wctx.parents()) > 1 + + if (not force and merge and match and + (match.files() or match.anypats())): + raise util.Abort(_('cannot partially commit a merge ' + '(do not specify files or patterns)')) + + changes = self.status(match=match, clean=force) + if force: + changes[0].extend(changes[6]) # mq may commit unchanged files + + # check subrepos + subs = [] + removedsubs = set() + for p in wctx.parents(): + removedsubs.update(s for s in p.substate if match(s)) + for s in wctx.substate: + removedsubs.discard(s) + if match(s) and wctx.sub(s).dirty(): + subs.append(s) + if (subs or removedsubs): + if (not match('.hgsub') and + '.hgsub' in (wctx.modified() + wctx.added())): + raise util.Abort(_("can't commit subrepos without .hgsub")) + if '.hgsubstate' not in changes[0]: + changes[0].insert(0, '.hgsubstate') + + # make sure all explicit patterns are matched + if not force and match.files(): + matched = set(changes[0] + changes[1] + changes[2]) + + for f in match.files(): + if f == '.' or f in matched or f in wctx.substate: + continue + if f in changes[3]: # missing + fail(f, _('file not found!')) + if f in vdirs: # visited directory + d = f + '/' + for mf in matched: + if mf.startswith(d): + break + else: + fail(f, _("no match under directory!")) + elif f not in self.dirstate: + fail(f, _("file not tracked!")) + + if (not force and not extra.get("close") and not merge + and not (changes[0] or changes[1] or changes[2]) + and wctx.branch() == wctx.p1().branch()): + return None + + ms = mergemod.mergestate(self) + for f in changes[0]: + if f in ms and ms[f] == 'u': + raise util.Abort(_("unresolved merge conflicts " + "(see hg resolve)")) + + cctx = context.workingctx(self, text, user, date, extra, changes) + if editor: + cctx._text = editor(self, cctx, subs) + edited = (text != cctx._text) + + # commit subs + if subs or removedsubs: + state = wctx.substate.copy() + for s in sorted(subs): + sub = wctx.sub(s) + self.ui.status(_('committing subrepository %s\n') % + subrepo.subrelpath(sub)) + sr = sub.commit(cctx._text, user, date) + state[s] = (state[s][0], sr) + subrepo.writestate(self, state) + + # Save commit message in case this transaction gets rolled back + # (e.g. by a pretxncommit hook). Leave the content alone on + # the assumption that the user will use the same editor again. + msgfile = self.opener('last-message.txt', 'wb') + msgfile.write(cctx._text) + msgfile.close() + + p1, p2 = self.dirstate.parents() + hookp1, hookp2 = hex(p1), (p2 != nullid and hex(p2) or '') + try: + self.hook("precommit", throw=True, parent1=hookp1, parent2=hookp2) + ret = self.commitctx(cctx, True) + except: + if edited: + msgfn = self.pathto(msgfile.name[len(self.root)+1:]) + self.ui.write( + _('note: commit message saved in %s\n') % msgfn) + raise + + # update dirstate and mergestate + for f in changes[0] + changes[1]: + self.dirstate.normal(f) + for f in changes[2]: + self.dirstate.forget(f) + self.dirstate.setparents(ret) + ms.reset() + finally: + wlock.release() + + self.hook("commit", node=hex(ret), parent1=hookp1, parent2=hookp2) + return ret + + def commitctx(self, ctx, error=False): + """Add a new revision to current repository. + Revision information is passed via the context argument. + """ + + tr = lock = None + removed = list(ctx.removed()) + p1, p2 = ctx.p1(), ctx.p2() + m1 = p1.manifest().copy() + m2 = p2.manifest() + user = ctx.user() + + lock = self.lock() + try: + tr = self.transaction("commit") + trp = weakref.proxy(tr) + + # check in files + new = {} + changed = [] + linkrev = len(self) + for f in sorted(ctx.modified() + ctx.added()): + self.ui.note(f + "\n") + try: + fctx = ctx[f] + new[f] = self._filecommit(fctx, m1, m2, linkrev, trp, + changed) + m1.set(f, fctx.flags()) + except OSError, inst: + self.ui.warn(_("trouble committing %s!\n") % f) + raise + except IOError, inst: + errcode = getattr(inst, 'errno', errno.ENOENT) + if error or errcode and errcode != errno.ENOENT: + self.ui.warn(_("trouble committing %s!\n") % f) + raise + else: + removed.append(f) + + # update manifest + m1.update(new) + removed = [f for f in sorted(removed) if f in m1 or f in m2] + drop = [f for f in removed if f in m1] + for f in drop: + del m1[f] + mn = self.manifest.add(m1, trp, linkrev, p1.manifestnode(), + p2.manifestnode(), (new, drop)) + + # update changelog + self.changelog.delayupdate() + n = self.changelog.add(mn, changed + removed, ctx.description(), + trp, p1.node(), p2.node(), + user, ctx.date(), ctx.extra().copy()) + p = lambda: self.changelog.writepending() and self.root or "" + xp1, xp2 = p1.hex(), p2 and p2.hex() or '' + self.hook('pretxncommit', throw=True, node=hex(n), parent1=xp1, + parent2=xp2, pending=p) + self.changelog.finalize(trp) + tr.close() + + if self._branchcache: + self.updatebranchcache() + return n + finally: + if tr: + tr.release() + lock.release() + + def destroyed(self): + '''Inform the repository that nodes have been destroyed. + Intended for use by strip and rollback, so there's a common + place for anything that has to be done after destroying history.''' + # XXX it might be nice if we could take the list of destroyed + # nodes, but I don't see an easy way for rollback() to do that + + # Ensure the persistent tag cache is updated. Doing it now + # means that the tag cache only has to worry about destroyed + # heads immediately after a strip/rollback. That in turn + # guarantees that "cachetip == currenttip" (comparing both rev + # and node) always means no nodes have been added or destroyed. + + # XXX this is suboptimal when qrefresh'ing: we strip the current + # head, refresh the tag cache, then immediately add a new head. + # But I think doing it this way is necessary for the "instant + # tag cache retrieval" case to work. + self.invalidatecaches() + + def walk(self, match, node=None): + ''' + walk recursively through the directory tree or a given + changeset, finding all files matched by the match + function + ''' + return self[node].walk(match) + + def status(self, node1='.', node2=None, match=None, + ignored=False, clean=False, unknown=False, + listsubrepos=False): + """return status of files between two nodes or node and working directory + + If node1 is None, use the first dirstate parent instead. + If node2 is None, compare node1 with working directory. + """ + + def mfmatches(ctx): + mf = ctx.manifest().copy() + for fn in mf.keys(): + if not match(fn): + del mf[fn] + return mf + + if isinstance(node1, context.changectx): + ctx1 = node1 + else: + ctx1 = self[node1] + if isinstance(node2, context.changectx): + ctx2 = node2 + else: + ctx2 = self[node2] + + working = ctx2.rev() is None + parentworking = working and ctx1 == self['.'] + match = match or matchmod.always(self.root, self.getcwd()) + listignored, listclean, listunknown = ignored, clean, unknown + + # load earliest manifest first for caching reasons + if not working and ctx2.rev() < ctx1.rev(): + ctx2.manifest() + + if not parentworking: + def bad(f, msg): + if f not in ctx1: + self.ui.warn('%s: %s\n' % (self.dirstate.pathto(f), msg)) + match.bad = bad + + if working: # we need to scan the working dir + subrepos = [] + if '.hgsub' in self.dirstate: + subrepos = ctx1.substate.keys() + s = self.dirstate.status(match, subrepos, listignored, + listclean, listunknown) + cmp, modified, added, removed, deleted, unknown, ignored, clean = s + + # check for any possibly clean files + if parentworking and cmp: + fixup = [] + # do a full compare of any files that might have changed + for f in sorted(cmp): + if (f not in ctx1 or ctx2.flags(f) != ctx1.flags(f) + or ctx1[f].cmp(ctx2[f])): + modified.append(f) + else: + fixup.append(f) + + # update dirstate for files that are actually clean + if fixup: + if listclean: + clean += fixup + + try: + # updating the dirstate is optional + # so we don't wait on the lock + wlock = self.wlock(False) + try: + for f in fixup: + self.dirstate.normal(f) + finally: + wlock.release() + except error.LockError: + pass + + if not parentworking: + mf1 = mfmatches(ctx1) + if working: + # we are comparing working dir against non-parent + # generate a pseudo-manifest for the working dir + mf2 = mfmatches(self['.']) + for f in cmp + modified + added: + mf2[f] = None + mf2.set(f, ctx2.flags(f)) + for f in removed: + if f in mf2: + del mf2[f] + else: + # we are comparing two revisions + deleted, unknown, ignored = [], [], [] + mf2 = mfmatches(ctx2) + + modified, added, clean = [], [], [] + for fn in mf2: + if fn in mf1: + if (mf1.flags(fn) != mf2.flags(fn) or + (mf1[fn] != mf2[fn] and + (mf2[fn] or ctx1[fn].cmp(ctx2[fn])))): + modified.append(fn) + elif listclean: + clean.append(fn) + del mf1[fn] + else: + added.append(fn) + removed = mf1.keys() + + r = modified, added, removed, deleted, unknown, ignored, clean + + if listsubrepos: + for subpath, sub in subrepo.itersubrepos(ctx1, ctx2): + if working: + rev2 = None + else: + rev2 = ctx2.substate[subpath][1] + try: + submatch = matchmod.narrowmatcher(subpath, match) + s = sub.status(rev2, match=submatch, ignored=listignored, + clean=listclean, unknown=listunknown, + listsubrepos=True) + for rfiles, sfiles in zip(r, s): + rfiles.extend("%s/%s" % (subpath, f) for f in sfiles) + except error.LookupError: + self.ui.status(_("skipping missing subrepository: %s\n") + % subpath) + + [l.sort() for l in r] + return r + + def heads(self, start=None): + heads = self.changelog.heads(start) + # sort the output in rev descending order + heads = [(-self.changelog.rev(h), h) for h in heads] + return [n for (r, n) in sorted(heads)] + + def branchheads(self, branch=None, start=None, closed=False): + '''return a (possibly filtered) list of heads for the given branch + + Heads are returned in topological order, from newest to oldest. + If branch is None, use the dirstate branch. + If start is not None, return only heads reachable from start. + If closed is True, return heads that are marked as closed as well. + ''' + if branch is None: + branch = self[None].branch() + branches = self.branchmap() + if branch not in branches: + return [] + # the cache returns heads ordered lowest to highest + bheads = list(reversed(branches[branch])) + if start is not None: + # filter out the heads that cannot be reached from startrev + fbheads = set(self.changelog.nodesbetween([start], bheads)[2]) + bheads = [h for h in bheads if h in fbheads] + if not closed: + bheads = [h for h in bheads if + ('close' not in self.changelog.read(h)[5])] + return bheads + + def branches(self, nodes): + if not nodes: + nodes = [self.changelog.tip()] + b = [] + for n in nodes: + t = n + while 1: + p = self.changelog.parents(n) + if p[1] != nullid or p[0] == nullid: + b.append((t, n, p[0], p[1])) + break + n = p[0] + return b + + def between(self, pairs): + r = [] + + for top, bottom in pairs: + n, l, i = top, [], 0 + f = 1 + + while n != bottom and n != nullid: + p = self.changelog.parents(n)[0] + if i == f: + l.append(n) + f = f * 2 + n = p + i += 1 + + r.append(l) + + return r + + def pull(self, remote, heads=None, force=False): + lock = self.lock() + try: + tmp = discovery.findcommonincoming(self, remote, heads=heads, + force=force) + common, fetch, rheads = tmp + if not fetch: + self.ui.status(_("no changes found\n")) + return 0 + + if heads is None and fetch == [nullid]: + self.ui.status(_("requesting all changes\n")) + elif heads is None and remote.capable('changegroupsubset'): + # issue1320, avoid a race if remote changed after discovery + heads = rheads + + if heads is None: + cg = remote.changegroup(fetch, 'pull') + else: + if not remote.capable('changegroupsubset'): + raise util.Abort(_("partial pull cannot be done because " + "other repository doesn't support " + "changegroupsubset.")) + cg = remote.changegroupsubset(fetch, heads, 'pull') + return self.addchangegroup(cg, 'pull', remote.url(), lock=lock) + finally: + lock.release() + + def push(self, remote, force=False, revs=None, newbranch=False): + '''Push outgoing changesets (limited by revs) from the current + repository to remote. Return an integer: + - 0 means HTTP error *or* nothing to push + - 1 means we pushed and remote head count is unchanged *or* + we have outgoing changesets but refused to push + - other values as described by addchangegroup() + ''' + # there are two ways to push to remote repo: + # + # addchangegroup assumes local user can lock remote + # repo (local filesystem, old ssh servers). + # + # unbundle assumes local user cannot lock remote repo (new ssh + # servers, http servers). + + lock = None + unbundle = remote.capable('unbundle') + if not unbundle: + lock = remote.lock() + try: + ret = discovery.prepush(self, remote, force, revs, newbranch) + if ret[0] is None: + # and here we return 0 for "nothing to push" or 1 for + # "something to push but I refuse" + return ret[1] + + cg, remote_heads = ret + if unbundle: + # local repo finds heads on server, finds out what revs it must + # push. once revs transferred, if server finds it has + # different heads (someone else won commit/push race), server + # aborts. + if force: + remote_heads = ['force'] + # ssh: return remote's addchangegroup() + # http: return remote's addchangegroup() or 0 for error + return remote.unbundle(cg, remote_heads, 'push') + else: + # we return an integer indicating remote head count change + return remote.addchangegroup(cg, 'push', self.url(), lock=lock) + finally: + if lock is not None: + lock.release() + + def changegroupinfo(self, nodes, source): + if self.ui.verbose or source == 'bundle': + self.ui.status(_("%d changesets found\n") % len(nodes)) + if self.ui.debugflag: + self.ui.debug("list of changesets:\n") + for node in nodes: + self.ui.debug("%s\n" % hex(node)) + + def changegroupsubset(self, bases, heads, source, extranodes=None): + """Compute a changegroup consisting of all the nodes that are + descendents of any of the bases and ancestors of any of the heads. + Return a chunkbuffer object whose read() method will return + successive changegroup chunks. + + It is fairly complex as determining which filenodes and which + manifest nodes need to be included for the changeset to be complete + is non-trivial. + + Another wrinkle is doing the reverse, figuring out which changeset in + the changegroup a particular filenode or manifestnode belongs to. + + The caller can specify some nodes that must be included in the + changegroup using the extranodes argument. It should be a dict + where the keys are the filenames (or 1 for the manifest), and the + values are lists of (node, linknode) tuples, where node is a wanted + node and linknode is the changelog node that should be transmitted as + the linkrev. + """ + + # Set up some initial variables + # Make it easy to refer to self.changelog + cl = self.changelog + # Compute the list of changesets in this changegroup. + # Some bases may turn out to be superfluous, and some heads may be + # too. nodesbetween will return the minimal set of bases and heads + # necessary to re-create the changegroup. + if not bases: + bases = [nullid] + msng_cl_lst, bases, heads = cl.nodesbetween(bases, heads) + + if extranodes is None: + # can we go through the fast path ? + heads.sort() + allheads = self.heads() + allheads.sort() + if heads == allheads: + return self._changegroup(msng_cl_lst, source) + + # slow path + self.hook('preoutgoing', throw=True, source=source) + + self.changegroupinfo(msng_cl_lst, source) + + # We assume that all ancestors of bases are known + commonrevs = set(cl.ancestors(*[cl.rev(n) for n in bases])) + + # Make it easy to refer to self.manifest + mnfst = self.manifest + # We don't know which manifests are missing yet + msng_mnfst_set = {} + # Nor do we know which filenodes are missing. + msng_filenode_set = {} + + junk = mnfst.index[len(mnfst) - 1] # Get around a bug in lazyindex + junk = None + + # A changeset always belongs to itself, so the changenode lookup + # function for a changenode is identity. + def identity(x): + return x + + # A function generating function that sets up the initial environment + # the inner function. + def filenode_collector(changedfiles): + # This gathers information from each manifestnode included in the + # changegroup about which filenodes the manifest node references + # so we can include those in the changegroup too. + # + # It also remembers which changenode each filenode belongs to. It + # does this by assuming the a filenode belongs to the changenode + # the first manifest that references it belongs to. + def collect_msng_filenodes(mnfstnode): + r = mnfst.rev(mnfstnode) + if mnfst.deltaparent(r) in mnfst.parentrevs(r): + # If the previous rev is one of the parents, + # we only need to see a diff. + deltamf = mnfst.readdelta(mnfstnode) + # For each line in the delta + for f, fnode in deltamf.iteritems(): + # And if the file is in the list of files we care + # about. + if f in changedfiles: + # Get the changenode this manifest belongs to + clnode = msng_mnfst_set[mnfstnode] + # Create the set of filenodes for the file if + # there isn't one already. + ndset = msng_filenode_set.setdefault(f, {}) + # And set the filenode's changelog node to the + # manifest's if it hasn't been set already. + ndset.setdefault(fnode, clnode) + else: + # Otherwise we need a full manifest. + m = mnfst.read(mnfstnode) + # For every file in we care about. + for f in changedfiles: + fnode = m.get(f, None) + # If it's in the manifest + if fnode is not None: + # See comments above. + clnode = msng_mnfst_set[mnfstnode] + ndset = msng_filenode_set.setdefault(f, {}) + ndset.setdefault(fnode, clnode) + return collect_msng_filenodes + + # If we determine that a particular file or manifest node must be a + # node that the recipient of the changegroup will already have, we can + # also assume the recipient will have all the parents. This function + # prunes them from the set of missing nodes. + def prune(revlog, missingnodes): + hasset = set() + # If a 'missing' filenode thinks it belongs to a changenode we + # assume the recipient must have, then the recipient must have + # that filenode. + for n in missingnodes: + clrev = revlog.linkrev(revlog.rev(n)) + if clrev in commonrevs: + hasset.add(n) + for n in hasset: + missingnodes.pop(n, None) + for r in revlog.ancestors(*[revlog.rev(n) for n in hasset]): + missingnodes.pop(revlog.node(r), None) + + # Add the nodes that were explicitly requested. + def add_extra_nodes(name, nodes): + if not extranodes or name not in extranodes: + return + + for node, linknode in extranodes[name]: + if node not in nodes: + nodes[node] = linknode + + # Now that we have all theses utility functions to help out and + # logically divide up the task, generate the group. + def gengroup(): + # The set of changed files starts empty. + changedfiles = set() + collect = changegroup.collector(cl, msng_mnfst_set, changedfiles) + + # Create a changenode group generator that will call our functions + # back to lookup the owning changenode and collect information. + group = cl.group(msng_cl_lst, identity, collect) + for cnt, chnk in enumerate(group): + yield chnk + self.ui.progress(_('bundling changes'), cnt, unit=_('chunks')) + self.ui.progress(_('bundling changes'), None) + + prune(mnfst, msng_mnfst_set) + add_extra_nodes(1, msng_mnfst_set) + msng_mnfst_lst = msng_mnfst_set.keys() + # Sort the manifestnodes by revision number. + msng_mnfst_lst.sort(key=mnfst.rev) + # Create a generator for the manifestnodes that calls our lookup + # and data collection functions back. + group = mnfst.group(msng_mnfst_lst, + lambda mnode: msng_mnfst_set[mnode], + filenode_collector(changedfiles)) + for cnt, chnk in enumerate(group): + yield chnk + self.ui.progress(_('bundling manifests'), cnt, unit=_('chunks')) + self.ui.progress(_('bundling manifests'), None) + + # These are no longer needed, dereference and toss the memory for + # them. + msng_mnfst_lst = None + msng_mnfst_set.clear() + + if extranodes: + for fname in extranodes: + if isinstance(fname, int): + continue + msng_filenode_set.setdefault(fname, {}) + changedfiles.add(fname) + # Go through all our files in order sorted by name. + cnt = 0 + for fname in sorted(changedfiles): + filerevlog = self.file(fname) + if not len(filerevlog): + raise util.Abort(_("empty or missing revlog for %s") % fname) + # Toss out the filenodes that the recipient isn't really + # missing. + missingfnodes = msng_filenode_set.pop(fname, {}) + prune(filerevlog, missingfnodes) + add_extra_nodes(fname, missingfnodes) + # If any filenodes are left, generate the group for them, + # otherwise don't bother. + if missingfnodes: + yield changegroup.chunkheader(len(fname)) + yield fname + # Sort the filenodes by their revision # (topological order) + nodeiter = list(missingfnodes) + nodeiter.sort(key=filerevlog.rev) + # Create a group generator and only pass in a changenode + # lookup function as we need to collect no information + # from filenodes. + group = filerevlog.group(nodeiter, + lambda fnode: missingfnodes[fnode]) + for chnk in group: + self.ui.progress( + _('bundling files'), cnt, item=fname, unit=_('chunks')) + cnt += 1 + yield chnk + # Signal that no more groups are left. + yield changegroup.closechunk() + self.ui.progress(_('bundling files'), None) + + if msng_cl_lst: + self.hook('outgoing', node=hex(msng_cl_lst[0]), source=source) + + return changegroup.unbundle10(util.chunkbuffer(gengroup()), 'UN') + + def changegroup(self, basenodes, source): + # to avoid a race we use changegroupsubset() (issue1320) + return self.changegroupsubset(basenodes, self.heads(), source) + + def _changegroup(self, nodes, source): + """Compute the changegroup of all nodes that we have that a recipient + doesn't. Return a chunkbuffer object whose read() method will return + successive changegroup chunks. + + This is much easier than the previous function as we can assume that + the recipient has any changenode we aren't sending them. + + nodes is the set of nodes to send""" + + self.hook('preoutgoing', throw=True, source=source) + + cl = self.changelog + revset = set([cl.rev(n) for n in nodes]) + self.changegroupinfo(nodes, source) + + def identity(x): + return x + + def gennodelst(log): + for r in log: + if log.linkrev(r) in revset: + yield log.node(r) + + def lookuplinkrev_func(revlog): + def lookuplinkrev(n): + return cl.node(revlog.linkrev(revlog.rev(n))) + return lookuplinkrev + + def gengroup(): + '''yield a sequence of changegroup chunks (strings)''' + # construct a list of all changed files + changedfiles = set() + mmfs = {} + collect = changegroup.collector(cl, mmfs, changedfiles) + + for cnt, chnk in enumerate(cl.group(nodes, identity, collect)): + self.ui.progress(_('bundling changes'), cnt, unit=_('chunks')) + yield chnk + self.ui.progress(_('bundling changes'), None) + + mnfst = self.manifest + nodeiter = gennodelst(mnfst) + for cnt, chnk in enumerate(mnfst.group(nodeiter, + lookuplinkrev_func(mnfst))): + self.ui.progress(_('bundling manifests'), cnt, unit=_('chunks')) + yield chnk + self.ui.progress(_('bundling manifests'), None) + + cnt = 0 + for fname in sorted(changedfiles): + filerevlog = self.file(fname) + if not len(filerevlog): + raise util.Abort(_("empty or missing revlog for %s") % fname) + nodeiter = gennodelst(filerevlog) + nodeiter = list(nodeiter) + if nodeiter: + yield changegroup.chunkheader(len(fname)) + yield fname + lookup = lookuplinkrev_func(filerevlog) + for chnk in filerevlog.group(nodeiter, lookup): + self.ui.progress( + _('bundling files'), cnt, item=fname, unit=_('chunks')) + cnt += 1 + yield chnk + self.ui.progress(_('bundling files'), None) + + yield changegroup.closechunk() + + if nodes: + self.hook('outgoing', node=hex(nodes[0]), source=source) + + return changegroup.unbundle10(util.chunkbuffer(gengroup()), 'UN') + + def addchangegroup(self, source, srctype, url, emptyok=False, lock=None): + """Add the changegroup returned by source.read() to this repo. + srctype is a string like 'push', 'pull', or 'unbundle'. url is + the URL of the repo where this changegroup is coming from. + + Return an integer summarizing the change to this repo: + - nothing changed or no source: 0 + - more heads than before: 1+added heads (2..n) + - fewer heads than before: -1-removed heads (-2..-n) + - number of heads stays the same: 1 + """ + def csmap(x): + self.ui.debug("add changeset %s\n" % short(x)) + return len(cl) + + def revmap(x): + return cl.rev(x) + + if not source: + return 0 + + self.hook('prechangegroup', throw=True, source=srctype, url=url) + + changesets = files = revisions = 0 + efiles = set() + + # write changelog data to temp files so concurrent readers will not see + # inconsistent view + cl = self.changelog + cl.delayupdate() + oldheads = len(cl.heads()) + + tr = self.transaction("\n".join([srctype, urlmod.hidepassword(url)])) + try: + trp = weakref.proxy(tr) + # pull off the changeset group + self.ui.status(_("adding changesets\n")) + clstart = len(cl) + class prog(object): + step = _('changesets') + count = 1 + ui = self.ui + total = None + def __call__(self): + self.ui.progress(self.step, self.count, unit=_('chunks'), + total=self.total) + self.count += 1 + pr = prog() + source.callback = pr + + if (cl.addgroup(source, csmap, trp) is None + and not emptyok): + raise util.Abort(_("received changelog group is empty")) + clend = len(cl) + changesets = clend - clstart + for c in xrange(clstart, clend): + efiles.update(self[c].files()) + efiles = len(efiles) + self.ui.progress(_('changesets'), None) + + # pull off the manifest group + self.ui.status(_("adding manifests\n")) + pr.step = _('manifests') + pr.count = 1 + pr.total = changesets # manifests <= changesets + # no need to check for empty manifest group here: + # if the result of the merge of 1 and 2 is the same in 3 and 4, + # no new manifest will be created and the manifest group will + # be empty during the pull + self.manifest.addgroup(source, revmap, trp) + self.ui.progress(_('manifests'), None) + + needfiles = {} + if self.ui.configbool('server', 'validate', default=False): + # validate incoming csets have their manifests + for cset in xrange(clstart, clend): + mfest = self.changelog.read(self.changelog.node(cset))[0] + mfest = self.manifest.readdelta(mfest) + # store file nodes we must see + for f, n in mfest.iteritems(): + needfiles.setdefault(f, set()).add(n) + + # process the files + self.ui.status(_("adding file changes\n")) + pr.step = 'files' + pr.count = 1 + pr.total = efiles + source.callback = None + + while 1: + f = source.chunk() + if not f: + break + self.ui.debug("adding %s revisions\n" % f) + pr() + fl = self.file(f) + o = len(fl) + if fl.addgroup(source, revmap, trp) is None: + raise util.Abort(_("received file revlog group is empty")) + revisions += len(fl) - o + files += 1 + if f in needfiles: + needs = needfiles[f] + for new in xrange(o, len(fl)): + n = fl.node(new) + if n in needs: + needs.remove(n) + if not needs: + del needfiles[f] + self.ui.progress(_('files'), None) + + for f, needs in needfiles.iteritems(): + fl = self.file(f) + for n in needs: + try: + fl.rev(n) + except error.LookupError: + raise util.Abort( + _('missing file data for %s:%s - run hg verify') % + (f, hex(n))) + + newheads = len(cl.heads()) + heads = "" + if oldheads and newheads != oldheads: + heads = _(" (%+d heads)") % (newheads - oldheads) + + self.ui.status(_("added %d changesets" + " with %d changes to %d files%s\n") + % (changesets, revisions, files, heads)) + + if changesets > 0: + p = lambda: cl.writepending() and self.root or "" + self.hook('pretxnchangegroup', throw=True, + node=hex(cl.node(clstart)), source=srctype, + url=url, pending=p) + + # make changelog see real files again + cl.finalize(trp) + + tr.close() + finally: + tr.release() + if lock: + lock.release() + + if changesets > 0: + # forcefully update the on-disk branch cache + self.ui.debug("updating the branch cache\n") + self.updatebranchcache() + self.hook("changegroup", node=hex(cl.node(clstart)), + source=srctype, url=url) + + for i in xrange(clstart, clend): + self.hook("incoming", node=hex(cl.node(i)), + source=srctype, url=url) + + # never return 0 here: + if newheads < oldheads: + return newheads - oldheads - 1 + else: + return newheads - oldheads + 1 + + + def stream_in(self, remote, requirements): + fp = remote.stream_out() + l = fp.readline() + try: + resp = int(l) + except ValueError: + raise error.ResponseError( + _('Unexpected response from remote server:'), l) + if resp == 1: + raise util.Abort(_('operation forbidden by server')) + elif resp == 2: + raise util.Abort(_('locking the remote repository failed')) + elif resp != 0: + raise util.Abort(_('the server sent an unknown error code')) + self.ui.status(_('streaming all changes\n')) + l = fp.readline() + try: + total_files, total_bytes = map(int, l.split(' ', 1)) + except (ValueError, TypeError): + raise error.ResponseError( + _('Unexpected response from remote server:'), l) + self.ui.status(_('%d files to transfer, %s of data\n') % + (total_files, util.bytecount(total_bytes))) + start = time.time() + for i in xrange(total_files): + # XXX doesn't support '\n' or '\r' in filenames + l = fp.readline() + try: + name, size = l.split('\0', 1) + size = int(size) + except (ValueError, TypeError): + raise error.ResponseError( + _('Unexpected response from remote server:'), l) + self.ui.debug('adding %s (%s)\n' % (name, util.bytecount(size))) + # for backwards compat, name was partially encoded + ofp = self.sopener(store.decodedir(name), 'w') + for chunk in util.filechunkiter(fp, limit=size): + ofp.write(chunk) + ofp.close() + elapsed = time.time() - start + if elapsed <= 0: + elapsed = 0.001 + self.ui.status(_('transferred %s in %.1f seconds (%s/sec)\n') % + (util.bytecount(total_bytes), elapsed, + util.bytecount(total_bytes / elapsed))) + + # new requirements = old non-format requirements + new format-related + # requirements from the streamed-in repository + requirements.update(set(self.requirements) - self.supportedformats) + self._applyrequirements(requirements) + self._writerequirements() + + self.invalidate() + return len(self.heads()) + 1 + + def clone(self, remote, heads=[], stream=False): + '''clone remote repository. + + keyword arguments: + heads: list of revs to clone (forces use of pull) + stream: use streaming clone if possible''' + + # now, all clients that can request uncompressed clones can + # read repo formats supported by all servers that can serve + # them. + + # if revlog format changes, client will have to check version + # and format flags on "stream" capability, and use + # uncompressed only if compatible. + + if stream and not heads: + # 'stream' means remote revlog format is revlogv1 only + if remote.capable('stream'): + return self.stream_in(remote, set(('revlogv1',))) + # otherwise, 'streamreqs' contains the remote revlog format + streamreqs = remote.capable('streamreqs') + if streamreqs: + streamreqs = set(streamreqs.split(',')) + # if we support it, stream in and adjust our requirements + if not streamreqs - self.supportedformats: + return self.stream_in(remote, streamreqs) + return self.pull(remote, heads) + + def pushkey(self, namespace, key, old, new): + return pushkey.push(self, namespace, key, old, new) + + def listkeys(self, namespace): + return pushkey.list(self, namespace) + +# used to avoid circular references so destructors work +def aftertrans(files): + renamefiles = [tuple(t) for t in files] + def a(): + for src, dest in renamefiles: + util.rename(src, dest) + return a + +def instance(ui, path, create): + return localrepository(ui, util.drop_scheme('file', path), create) + +def islocal(path): + return True diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/localrepo.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/localrepo.pyo Binary files differnew file mode 100644 index 0000000..c6b363e --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/localrepo.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/lock.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/lock.py new file mode 100644 index 0000000..e1dffcf --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/lock.py @@ -0,0 +1,137 @@ +# lock.py - simple advisory locking scheme for mercurial +# +# Copyright 2005, 2006 Matt Mackall <mpm@selenic.com> +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +import util, error +import errno, os, socket, time +import warnings + +class lock(object): + '''An advisory lock held by one process to control access to a set + of files. Non-cooperating processes or incorrectly written scripts + can ignore Mercurial's locking scheme and stomp all over the + repository, so don't do that. + + Typically used via localrepository.lock() to lock the repository + store (.hg/store/) or localrepository.wlock() to lock everything + else under .hg/.''' + + # lock is symlink on platforms that support it, file on others. + + # symlink is used because create of directory entry and contents + # are atomic even over nfs. + + # old-style lock: symlink to pid + # new-style lock: symlink to hostname:pid + + _host = None + + def __init__(self, file, timeout=-1, releasefn=None, desc=None): + self.f = file + self.held = 0 + self.timeout = timeout + self.releasefn = releasefn + self.desc = desc + self.lock() + + def __del__(self): + if self.held: + warnings.warn("use lock.release instead of del lock", + category=DeprecationWarning, + stacklevel=2) + + # ensure the lock will be removed + # even if recursive locking did occur + self.held = 1 + + self.release() + + def lock(self): + timeout = self.timeout + while 1: + try: + self.trylock() + return 1 + except error.LockHeld, inst: + if timeout != 0: + time.sleep(1) + if timeout > 0: + timeout -= 1 + continue + raise error.LockHeld(errno.ETIMEDOUT, inst.filename, self.desc, + inst.locker) + + def trylock(self): + if self.held: + self.held += 1 + return + if lock._host is None: + lock._host = socket.gethostname() + lockname = '%s:%s' % (lock._host, os.getpid()) + while not self.held: + try: + util.makelock(lockname, self.f) + self.held = 1 + except (OSError, IOError), why: + if why.errno == errno.EEXIST: + locker = self.testlock() + if locker is not None: + raise error.LockHeld(errno.EAGAIN, self.f, self.desc, + locker) + else: + raise error.LockUnavailable(why.errno, why.strerror, + why.filename, self.desc) + + def testlock(self): + """return id of locker if lock is valid, else None. + + If old-style lock, we cannot tell what machine locker is on. + with new-style lock, if locker is on this machine, we can + see if locker is alive. If locker is on this machine but + not alive, we can safely break lock. + + The lock file is only deleted when None is returned. + + """ + locker = util.readlock(self.f) + try: + host, pid = locker.split(":", 1) + except ValueError: + return locker + if host != lock._host: + return locker + try: + pid = int(pid) + except ValueError: + return locker + if util.testpid(pid): + return locker + # if locker dead, break lock. must do this with another lock + # held, or can race and break valid lock. + try: + l = lock(self.f + '.break', timeout=0) + os.unlink(self.f) + l.release() + except error.LockError: + return locker + + def release(self): + if self.held > 1: + self.held -= 1 + elif self.held == 1: + self.held = 0 + if self.releasefn: + self.releasefn() + try: + os.unlink(self.f) + except OSError: + pass + +def release(*locks): + for lock in locks: + if lock is not None: + lock.release() + diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/lock.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/lock.pyo Binary files differnew file mode 100644 index 0000000..e52d1fc --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/lock.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/lsprof.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/lsprof.py new file mode 100644 index 0000000..e9b185b --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/lsprof.py @@ -0,0 +1,111 @@ +import sys +from _lsprof import Profiler, profiler_entry + +__all__ = ['profile', 'Stats'] + +def profile(f, *args, **kwds): + """XXX docstring""" + p = Profiler() + p.enable(subcalls=True, builtins=True) + try: + f(*args, **kwds) + finally: + p.disable() + return Stats(p.getstats()) + + +class Stats(object): + """XXX docstring""" + + def __init__(self, data): + self.data = data + + def sort(self, crit="inlinetime"): + """XXX docstring""" + if crit not in profiler_entry.__dict__: + raise ValueError("Can't sort by %s" % crit) + self.data.sort(key=lambda x: getattr(x, crit), reverse=True) + for e in self.data: + if e.calls: + e.calls.sort(key=lambda x: getattr(x, crit), reverse=True) + + def pprint(self, top=None, file=None, limit=None, climit=None): + """XXX docstring""" + if file is None: + file = sys.stdout + d = self.data + if top is not None: + d = d[:top] + cols = "% 12s %12s %11.4f %11.4f %s\n" + hcols = "% 12s %12s %12s %12s %s\n" + file.write(hcols % ("CallCount", "Recursive", "Total(ms)", + "Inline(ms)", "module:lineno(function)")) + count = 0 + for e in d: + file.write(cols % (e.callcount, e.reccallcount, e.totaltime, + e.inlinetime, label(e.code))) + count += 1 + if limit is not None and count == limit: + return + ccount = 0 + if e.calls: + for se in e.calls: + file.write(cols % ("+%s" % se.callcount, se.reccallcount, + se.totaltime, se.inlinetime, + "+%s" % label(se.code))) + count += 1 + ccount += 1 + if limit is not None and count == limit: + return + if climit is not None and ccount == climit: + break + + def freeze(self): + """Replace all references to code objects with string + descriptions; this makes it possible to pickle the instance.""" + + # this code is probably rather ickier than it needs to be! + for i in range(len(self.data)): + e = self.data[i] + if not isinstance(e.code, str): + self.data[i] = type(e)((label(e.code),) + e[1:]) + if e.calls: + for j in range(len(e.calls)): + se = e.calls[j] + if not isinstance(se.code, str): + e.calls[j] = type(se)((label(se.code),) + se[1:]) + +_fn2mod = {} + +def label(code): + if isinstance(code, str): + return code + try: + mname = _fn2mod[code.co_filename] + except KeyError: + for k, v in list(sys.modules.iteritems()): + if v is None: + continue + if not hasattr(v, '__file__'): + continue + if not isinstance(v.__file__, str): + continue + if v.__file__.startswith(code.co_filename): + mname = _fn2mod[code.co_filename] = k + break + else: + mname = _fn2mod[code.co_filename] = '<%s>' % code.co_filename + + return '%s:%d(%s)' % (mname, code.co_firstlineno, code.co_name) + + +if __name__ == '__main__': + import os + sys.argv = sys.argv[1:] + if not sys.argv: + print >> sys.stderr, "usage: lsprof.py <script> <arguments...>" + sys.exit(2) + sys.path.insert(0, os.path.abspath(os.path.dirname(sys.argv[0]))) + stats = profile(execfile, sys.argv[0], globals(), locals()) + stats.sort() + stats.pprint() diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/lsprof.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/lsprof.pyo Binary files differnew file mode 100644 index 0000000..8461f91 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/lsprof.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/lsprofcalltree.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/lsprofcalltree.py new file mode 100644 index 0000000..358b951 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/lsprofcalltree.py @@ -0,0 +1,86 @@ +""" +lsprofcalltree.py - lsprof output which is readable by kcachegrind + +Authors: + * David Allouche <david <at> allouche.net> + * Jp Calderone & Itamar Shtull-Trauring + * Johan Dahlin + +This software may be used and distributed according to the terms +of the GNU General Public License, incorporated herein by reference. +""" + +def label(code): + if isinstance(code, str): + return '~' + code # built-in functions ('~' sorts at the end) + else: + return '%s %s:%d' % (code.co_name, + code.co_filename, + code.co_firstlineno) + +class KCacheGrind(object): + def __init__(self, profiler): + self.data = profiler.getstats() + self.out_file = None + + def output(self, out_file): + self.out_file = out_file + print >> out_file, 'events: Ticks' + self._print_summary() + for entry in self.data: + self._entry(entry) + + def _print_summary(self): + max_cost = 0 + for entry in self.data: + totaltime = int(entry.totaltime * 1000) + max_cost = max(max_cost, totaltime) + print >> self.out_file, 'summary: %d' % (max_cost,) + + def _entry(self, entry): + out_file = self.out_file + + code = entry.code + #print >> out_file, 'ob=%s' % (code.co_filename,) + if isinstance(code, str): + print >> out_file, 'fi=~' + else: + print >> out_file, 'fi=%s' % (code.co_filename,) + print >> out_file, 'fn=%s' % (label(code),) + + inlinetime = int(entry.inlinetime * 1000) + if isinstance(code, str): + print >> out_file, '0 ', inlinetime + else: + print >> out_file, '%d %d' % (code.co_firstlineno, inlinetime) + + # recursive calls are counted in entry.calls + if entry.calls: + calls = entry.calls + else: + calls = [] + + if isinstance(code, str): + lineno = 0 + else: + lineno = code.co_firstlineno + + for subentry in calls: + self._subentry(lineno, subentry) + print >> out_file + + def _subentry(self, lineno, subentry): + out_file = self.out_file + code = subentry.code + #print >> out_file, 'cob=%s' % (code.co_filename,) + print >> out_file, 'cfn=%s' % (label(code),) + if isinstance(code, str): + print >> out_file, 'cfi=~' + print >> out_file, 'calls=%d 0' % (subentry.callcount,) + else: + print >> out_file, 'cfi=%s' % (code.co_filename,) + print >> out_file, 'calls=%d %d' % ( + subentry.callcount, code.co_firstlineno) + + totaltime = int(subentry.totaltime * 1000) + print >> out_file, '%d %d' % (lineno, totaltime) diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/lsprofcalltree.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/lsprofcalltree.pyo Binary files differnew file mode 100644 index 0000000..928ca97 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/lsprofcalltree.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/mail.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/mail.py new file mode 100644 index 0000000..3aa1a55 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/mail.py @@ -0,0 +1,226 @@ +# mail.py - mail sending bits for mercurial +# +# Copyright 2006 Matt Mackall <mpm@selenic.com> +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +from i18n import _ +import util, encoding +import os, smtplib, socket, quopri +import email.Header, email.MIMEText, email.Utils + +_oldheaderinit = email.Header.Header.__init__ +def _unifiedheaderinit(self, *args, **kw): + """ + Python2.7 introduces a backwards incompatible change + (Python issue1974, r70772) in email.Generator.Generator code: + pre-2.7 code passed "continuation_ws='\t'" to the Header + constructor, and 2.7 removed this parameter. + + Default argument is continuation_ws=' ', which means that the + behaviour is different in <2.7 and 2.7 + + We consider the 2.7 behaviour to be preferable, but need + to have an unified behaviour for versions 2.4 to 2.7 + """ + # override continuation_ws + kw['continuation_ws'] = ' ' + _oldheaderinit(self, *args, **kw) + +email.Header.Header.__dict__['__init__'] = _unifiedheaderinit + +def _smtp(ui): + '''build an smtp connection and return a function to send mail''' + local_hostname = ui.config('smtp', 'local_hostname') + s = smtplib.SMTP(local_hostname=local_hostname) + mailhost = ui.config('smtp', 'host') + if not mailhost: + raise util.Abort(_('smtp.host not configured - cannot send mail')) + mailport = util.getport(ui.config('smtp', 'port', 25)) + ui.note(_('sending mail: smtp host %s, port %s\n') % + (mailhost, mailport)) + s.connect(host=mailhost, port=mailport) + if ui.configbool('smtp', 'tls'): + if not hasattr(socket, 'ssl'): + raise util.Abort(_("can't use TLS: Python SSL support " + "not installed")) + ui.note(_('(using tls)\n')) + s.ehlo() + s.starttls() + s.ehlo() + username = ui.config('smtp', 'username') + password = ui.config('smtp', 'password') + if username and not password: + password = ui.getpass() + if username and password: + ui.note(_('(authenticating to mail server as %s)\n') % + (username)) + try: + s.login(username, password) + except smtplib.SMTPException, inst: + raise util.Abort(inst) + + def send(sender, recipients, msg): + try: + return s.sendmail(sender, recipients, msg) + except smtplib.SMTPRecipientsRefused, inst: + recipients = [r[1] for r in inst.recipients.values()] + raise util.Abort('\n' + '\n'.join(recipients)) + except smtplib.SMTPException, inst: + raise util.Abort(inst) + + return send + +def _sendmail(ui, sender, recipients, msg): + '''send mail using sendmail.''' + program = ui.config('email', 'method') + cmdline = '%s -f %s %s' % (program, util.email(sender), + ' '.join(map(util.email, recipients))) + ui.note(_('sending mail: %s\n') % cmdline) + fp = util.popen(cmdline, 'w') + fp.write(msg) + ret = fp.close() + if ret: + raise util.Abort('%s %s' % ( + os.path.basename(program.split(None, 1)[0]), + util.explain_exit(ret)[0])) + +def connect(ui): + '''make a mail connection. return a function to send mail. + call as sendmail(sender, list-of-recipients, msg).''' + if ui.config('email', 'method', 'smtp') == 'smtp': + return _smtp(ui) + return lambda s, r, m: _sendmail(ui, s, r, m) + +def sendmail(ui, sender, recipients, msg): + send = connect(ui) + return send(sender, recipients, msg) + +def validateconfig(ui): + '''determine if we have enough config data to try sending email.''' + method = ui.config('email', 'method', 'smtp') + if method == 'smtp': + if not ui.config('smtp', 'host'): + raise util.Abort(_('smtp specified as email transport, ' + 'but no smtp host configured')) + else: + if not util.find_exe(method): + raise util.Abort(_('%r specified as email transport, ' + 'but not in PATH') % method) + +def mimetextpatch(s, subtype='plain', display=False): + '''If patch in utf-8 transfer-encode it.''' + + enc = None + for line in s.splitlines(): + if len(line) > 950: + s = quopri.encodestring(s) + enc = "quoted-printable" + break + + cs = 'us-ascii' + if not display: + try: + s.decode('us-ascii') + except UnicodeDecodeError: + try: + s.decode('utf-8') + cs = 'utf-8' + except UnicodeDecodeError: + # We'll go with us-ascii as a fallback. + pass + + msg = email.MIMEText.MIMEText(s, subtype, cs) + if enc: + del msg['Content-Transfer-Encoding'] + msg['Content-Transfer-Encoding'] = enc + return msg + +def _charsets(ui): + '''Obtains charsets to send mail parts not containing patches.''' + charsets = [cs.lower() for cs in ui.configlist('email', 'charsets')] + fallbacks = [encoding.fallbackencoding.lower(), + encoding.encoding.lower(), 'utf-8'] + for cs in fallbacks: # find unique charsets while keeping order + if cs not in charsets: + charsets.append(cs) + return [cs for cs in charsets if not cs.endswith('ascii')] + +def _encode(ui, s, charsets): + '''Returns (converted) string, charset tuple. + Finds out best charset by cycling through sendcharsets in descending + order. Tries both encoding and fallbackencoding for input. Only as + last resort send as is in fake ascii. + Caveat: Do not use for mail parts containing patches!''' + try: + s.decode('ascii') + except UnicodeDecodeError: + sendcharsets = charsets or _charsets(ui) + for ics in (encoding.encoding, encoding.fallbackencoding): + try: + u = s.decode(ics) + except UnicodeDecodeError: + continue + for ocs in sendcharsets: + try: + return u.encode(ocs), ocs + except UnicodeEncodeError: + pass + except LookupError: + ui.warn(_('ignoring invalid sendcharset: %s\n') % ocs) + # if ascii, or all conversion attempts fail, send (broken) ascii + return s, 'us-ascii' + +def headencode(ui, s, charsets=None, display=False): + '''Returns RFC-2047 compliant header from given string.''' + if not display: + # split into words? + s, cs = _encode(ui, s, charsets) + return str(email.Header.Header(s, cs)) + return s + +def _addressencode(ui, name, addr, charsets=None): + name = headencode(ui, name, charsets) + try: + acc, dom = addr.split('@') + acc = acc.encode('ascii') + dom = dom.decode(encoding.encoding).encode('idna') + addr = '%s@%s' % (acc, dom) + except UnicodeDecodeError: + raise util.Abort(_('invalid email address: %s') % addr) + except ValueError: + try: + # too strict? + addr = addr.encode('ascii') + except UnicodeDecodeError: + raise util.Abort(_('invalid local address: %s') % addr) + return email.Utils.formataddr((name, addr)) + +def addressencode(ui, address, charsets=None, display=False): + '''Turns address into RFC-2047 compliant header.''' + if display or not address: + return address or '' + name, addr = email.Utils.parseaddr(address) + return _addressencode(ui, name, addr, charsets) + +def addrlistencode(ui, addrs, charsets=None, display=False): + '''Turns a list of addresses into a list of RFC-2047 compliant headers. + A single element of input list may contain multiple addresses, but output + always has one address per item''' + if display: + return [a.strip() for a in addrs if a.strip()] + + result = [] + for name, addr in email.Utils.getaddresses(addrs): + if name or addr: + result.append(_addressencode(ui, name, addr, charsets)) + return result + +def mimeencode(ui, s, charsets=None, display=False): + '''creates mime text object, encodes it if needed, and sets + charset and transfer-encoding accordingly.''' + cs = 'us-ascii' + if not display: + s, cs = _encode(ui, s, charsets) + return email.MIMEText.MIMEText(s, 'plain', cs) diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/mail.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/mail.pyo Binary files differnew file mode 100644 index 0000000..8111e74 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/mail.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/manifest.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/manifest.py new file mode 100644 index 0000000..0d008e8 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/manifest.py @@ -0,0 +1,196 @@ +# manifest.py - manifest revision class for mercurial +# +# Copyright 2005-2007 Matt Mackall <mpm@selenic.com> +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +from i18n import _ +import mdiff, parsers, error, revlog +import array, struct + +class manifestdict(dict): + def __init__(self, mapping=None, flags=None): + if mapping is None: + mapping = {} + if flags is None: + flags = {} + dict.__init__(self, mapping) + self._flags = flags + def flags(self, f): + return self._flags.get(f, "") + def set(self, f, flags): + self._flags[f] = flags + def copy(self): + return manifestdict(self, dict.copy(self._flags)) + +class manifest(revlog.revlog): + def __init__(self, opener): + self._mancache = None + revlog.revlog.__init__(self, opener, "00manifest.i") + + def parse(self, lines): + mfdict = manifestdict() + parsers.parse_manifest(mfdict, mfdict._flags, lines) + return mfdict + + def readdelta(self, node): + r = self.rev(node) + return self.parse(mdiff.patchtext(self.revdiff(self.deltaparent(r), r))) + + def read(self, node): + if node == revlog.nullid: + return manifestdict() # don't upset local cache + if self._mancache and self._mancache[0] == node: + return self._mancache[1] + text = self.revision(node) + arraytext = array.array('c', text) + mapping = self.parse(text) + self._mancache = (node, mapping, arraytext) + return mapping + + def _search(self, m, s, lo=0, hi=None): + '''return a tuple (start, end) that says where to find s within m. + + If the string is found m[start:end] are the line containing + that string. If start == end the string was not found and + they indicate the proper sorted insertion point. This was + taken from bisect_left, and modified to find line start/end as + it goes along. + + m should be a buffer or a string + s is a string''' + def advance(i, c): + while i < lenm and m[i] != c: + i += 1 + return i + if not s: + return (lo, lo) + lenm = len(m) + if not hi: + hi = lenm + while lo < hi: + mid = (lo + hi) // 2 + start = mid + while start > 0 and m[start - 1] != '\n': + start -= 1 + end = advance(start, '\0') + if m[start:end] < s: + # we know that after the null there are 40 bytes of sha1 + # this translates to the bisect lo = mid + 1 + lo = advance(end + 40, '\n') + 1 + else: + # this translates to the bisect hi = mid + hi = start + end = advance(lo, '\0') + found = m[lo:end] + if s == found: + # we know that after the null there are 40 bytes of sha1 + end = advance(end + 40, '\n') + return (lo, end + 1) + else: + return (lo, lo) + + def find(self, node, f): + '''look up entry for a single file efficiently. + return (node, flags) pair if found, (None, None) if not.''' + if self._mancache and self._mancache[0] == node: + return self._mancache[1].get(f), self._mancache[1].flags(f) + text = self.revision(node) + start, end = self._search(text, f) + if start == end: + return None, None + l = text[start:end] + f, n = l.split('\0') + return revlog.bin(n[:40]), n[40:-1] + + def add(self, map, transaction, link, p1=None, p2=None, + changed=None): + # apply the changes collected during the bisect loop to our addlist + # return a delta suitable for addrevision + def addlistdelta(addlist, x): + # start from the bottom up + # so changes to the offsets don't mess things up. + for start, end, content in reversed(x): + if content: + addlist[start:end] = array.array('c', content) + else: + del addlist[start:end] + return "".join(struct.pack(">lll", start, end, len(content)) + content + for start, end, content in x) + + def checkforbidden(l): + for f in l: + if '\n' in f or '\r' in f: + raise error.RevlogError( + _("'\\n' and '\\r' disallowed in filenames: %r") % f) + + # if we're using the cache, make sure it is valid and + # parented by the same node we're diffing against + if not (changed and self._mancache and p1 and self._mancache[0] == p1): + files = sorted(map) + checkforbidden(files) + + # if this is changed to support newlines in filenames, + # be sure to check the templates/ dir again (especially *-raw.tmpl) + hex, flags = revlog.hex, map.flags + text = ''.join("%s\000%s%s\n" % (f, hex(map[f]), flags(f)) + for f in files) + arraytext = array.array('c', text) + cachedelta = None + else: + added, removed = changed + addlist = self._mancache[2] + + checkforbidden(added) + # combine the changed lists into one list for sorting + work = [(x, False) for x in added] + work.extend((x, True) for x in removed) + # this could use heapq.merge() (from python2.6+) or equivalent + # since the lists are already sorted + work.sort() + + delta = [] + dstart = None + dend = None + dline = [""] + start = 0 + # zero copy representation of addlist as a buffer + addbuf = buffer(addlist) + + # start with a readonly loop that finds the offset of + # each line and creates the deltas + for f, todelete in work: + # bs will either be the index of the item or the insert point + start, end = self._search(addbuf, f, start) + if not todelete: + l = "%s\000%s%s\n" % (f, revlog.hex(map[f]), map.flags(f)) + else: + if start == end: + # item we want to delete was not found, error out + raise AssertionError( + _("failed to remove %s from manifest") % f) + l = "" + if dstart != None and dstart <= start and dend >= start: + if dend < end: + dend = end + if l: + dline.append(l) + else: + if dstart != None: + delta.append([dstart, dend, "".join(dline)]) + dstart = start + dend = end + dline = [l] + + if dstart != None: + delta.append([dstart, dend, "".join(dline)]) + # apply the delta to the addlist, and get a delta for addrevision + cachedelta = (self.rev(p1), addlistdelta(addlist, delta)) + arraytext = addlist + text = buffer(arraytext) + + n = self.addrevision(text, transaction, link, p1, p2, cachedelta) + self._mancache = (n, map, arraytext) + + return n diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/manifest.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/manifest.pyo Binary files differnew file mode 100644 index 0000000..bcabb44 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/manifest.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/match.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/match.py new file mode 100644 index 0000000..33c42b4 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/match.py @@ -0,0 +1,296 @@ +# match.py - filename matching +# +# Copyright 2008, 2009 Matt Mackall <mpm@selenic.com> and others +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +import re +import util +from i18n import _ + +class match(object): + def __init__(self, root, cwd, patterns, include=[], exclude=[], + default='glob', exact=False, auditor=None): + """build an object to match a set of file patterns + + arguments: + root - the canonical root of the tree you're matching against + cwd - the current working directory, if relevant + patterns - patterns to find + include - patterns to include + exclude - patterns to exclude + default - if a pattern in names has no explicit type, assume this one + exact - patterns are actually literals + + a pattern is one of: + 'glob:<glob>' - a glob relative to cwd + 're:<regexp>' - a regular expression + 'path:<path>' - a path relative to canonroot + 'relglob:<glob>' - an unrooted glob (*.c matches C files in all dirs) + 'relpath:<path>' - a path relative to cwd + 'relre:<regexp>' - a regexp that needn't match the start of a name + '<something>' - a pattern of the specified default type + """ + + self._root = root + self._cwd = cwd + self._files = [] + self._anypats = bool(include or exclude) + + if include: + im = _buildmatch(_normalize(include, 'glob', root, cwd, auditor), + '(?:/|$)') + if exclude: + em = _buildmatch(_normalize(exclude, 'glob', root, cwd, auditor), + '(?:/|$)') + if exact: + self._files = patterns + pm = self.exact + elif patterns: + pats = _normalize(patterns, default, root, cwd, auditor) + self._files = _roots(pats) + self._anypats = self._anypats or _anypats(pats) + pm = _buildmatch(pats, '$') + + if patterns or exact: + if include: + if exclude: + m = lambda f: im(f) and not em(f) and pm(f) + else: + m = lambda f: im(f) and pm(f) + else: + if exclude: + m = lambda f: not em(f) and pm(f) + else: + m = pm + else: + if include: + if exclude: + m = lambda f: im(f) and not em(f) + else: + m = im + else: + if exclude: + m = lambda f: not em(f) + else: + m = lambda f: True + + self.matchfn = m + self._fmap = set(self._files) + + def __call__(self, fn): + return self.matchfn(fn) + def __iter__(self): + for f in self._files: + yield f + def bad(self, f, msg): + '''callback for each explicit file that can't be + found/accessed, with an error message + ''' + pass + def dir(self, f): + pass + def missing(self, f): + pass + def exact(self, f): + return f in self._fmap + def rel(self, f): + return util.pathto(self._root, self._cwd, f) + def files(self): + return self._files + def anypats(self): + return self._anypats + +class exact(match): + def __init__(self, root, cwd, files): + match.__init__(self, root, cwd, files, exact = True) + +class always(match): + def __init__(self, root, cwd): + match.__init__(self, root, cwd, []) + +class narrowmatcher(match): + """Adapt a matcher to work on a subdirectory only. + + The paths are remapped to remove/insert the path as needed: + + >>> m1 = match('root', '', ['a.txt', 'sub/b.txt']) + >>> m2 = narrowmatcher('sub', m1) + >>> bool(m2('a.txt')) + False + >>> bool(m2('b.txt')) + True + >>> bool(m2.matchfn('a.txt')) + False + >>> bool(m2.matchfn('b.txt')) + True + >>> m2.files() + ['b.txt'] + >>> m2.exact('b.txt') + True + >>> m2.rel('b.txt') + 'b.txt' + >>> def bad(f, msg): + ... print "%s: %s" % (f, msg) + >>> m1.bad = bad + >>> m2.bad('x.txt', 'No such file') + sub/x.txt: No such file + """ + + def __init__(self, path, matcher): + self._root = matcher._root + self._cwd = matcher._cwd + self._path = path + self._matcher = matcher + + self._files = [f[len(path) + 1:] for f in matcher._files + if f.startswith(path + "/")] + self._anypats = matcher._anypats + self.matchfn = lambda fn: matcher.matchfn(self._path + "/" + fn) + self._fmap = set(self._files) + + def bad(self, f, msg): + self._matcher.bad(self._path + "/" + f, msg) + +def patkind(pat): + return _patsplit(pat, None)[0] + +def _patsplit(pat, default): + """Split a string into an optional pattern kind prefix and the + actual pattern.""" + if ':' in pat: + kind, val = pat.split(':', 1) + if kind in ('re', 'glob', 'path', 'relglob', 'relpath', 'relre'): + return kind, val + return default, pat + +def _globre(pat): + "convert a glob pattern into a regexp" + i, n = 0, len(pat) + res = '' + group = 0 + escape = re.escape + def peek(): + return i < n and pat[i] + while i < n: + c = pat[i] + i += 1 + if c not in '*?[{},\\': + res += escape(c) + elif c == '*': + if peek() == '*': + i += 1 + res += '.*' + else: + res += '[^/]*' + elif c == '?': + res += '.' + elif c == '[': + j = i + if j < n and pat[j] in '!]': + j += 1 + while j < n and pat[j] != ']': + j += 1 + if j >= n: + res += '\\[' + else: + stuff = pat[i:j].replace('\\','\\\\') + i = j + 1 + if stuff[0] == '!': + stuff = '^' + stuff[1:] + elif stuff[0] == '^': + stuff = '\\' + stuff + res = '%s[%s]' % (res, stuff) + elif c == '{': + group += 1 + res += '(?:' + elif c == '}' and group: + res += ')' + group -= 1 + elif c == ',' and group: + res += '|' + elif c == '\\': + p = peek() + if p: + i += 1 + res += escape(p) + else: + res += escape(c) + else: + res += escape(c) + return res + +def _regex(kind, name, tail): + '''convert a pattern into a regular expression''' + if not name: + return '' + if kind == 're': + return name + elif kind == 'path': + return '^' + re.escape(name) + '(?:/|$)' + elif kind == 'relglob': + return '(?:|.*/)' + _globre(name) + tail + elif kind == 'relpath': + return re.escape(name) + '(?:/|$)' + elif kind == 'relre': + if name.startswith('^'): + return name + return '.*' + name + return _globre(name) + tail + +def _buildmatch(pats, tail): + """build a matching function from a set of patterns""" + try: + pat = '(?:%s)' % '|'.join([_regex(k, p, tail) for (k, p) in pats]) + if len(pat) > 20000: + raise OverflowError() + return re.compile(pat).match + except OverflowError: + # We're using a Python with a tiny regex engine and we + # made it explode, so we'll divide the pattern list in two + # until it works + l = len(pats) + if l < 2: + raise + a, b = _buildmatch(pats[:l//2], tail), _buildmatch(pats[l//2:], tail) + return lambda s: a(s) or b(s) + except re.error: + for k, p in pats: + try: + re.compile('(?:%s)' % _regex(k, p, tail)) + except re.error: + raise util.Abort(_("invalid pattern (%s): %s") % (k, p)) + raise util.Abort(_("invalid pattern")) + +def _normalize(names, default, root, cwd, auditor): + pats = [] + for kind, name in [_patsplit(p, default) for p in names]: + if kind in ('glob', 'relpath'): + name = util.canonpath(root, cwd, name, auditor) + elif kind in ('relglob', 'path'): + name = util.normpath(name) + + pats.append((kind, name)) + return pats + +def _roots(patterns): + r = [] + for kind, name in patterns: + if kind == 'glob': # find the non-glob prefix + root = [] + for p in name.split('/'): + if '[' in p or '{' in p or '*' in p or '?' in p: + break + root.append(p) + r.append('/'.join(root) or '.') + elif kind in ('relpath', 'path'): + r.append(name or '.') + elif kind == 'relglob': + r.append('.') + return r + +def _anypats(patterns): + for kind, name in patterns: + if kind in ('glob', 're', 'relglob', 'relre'): + return True diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/match.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/match.pyo Binary files differnew file mode 100644 index 0000000..c88ac3f --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/match.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/mdiff.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/mdiff.py new file mode 100644 index 0000000..4d1e760 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/mdiff.py @@ -0,0 +1,277 @@ +# mdiff.py - diff and patch routines for mercurial +# +# Copyright 2005, 2006 Matt Mackall <mpm@selenic.com> +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +from i18n import _ +import bdiff, mpatch, util +import re, struct + +def splitnewlines(text): + '''like str.splitlines, but only split on newlines.''' + lines = [l + '\n' for l in text.split('\n')] + if lines: + if lines[-1] == '\n': + lines.pop() + else: + lines[-1] = lines[-1][:-1] + return lines + +class diffopts(object): + '''context is the number of context lines + text treats all files as text + showfunc enables diff -p output + git enables the git extended patch format + nodates removes dates from diff headers + ignorews ignores all whitespace changes in the diff + ignorewsamount ignores changes in the amount of whitespace + ignoreblanklines ignores changes whose lines are all blank + upgrade generates git diffs to avoid data loss + ''' + + defaults = { + 'context': 3, + 'text': False, + 'showfunc': False, + 'git': False, + 'nodates': False, + 'ignorews': False, + 'ignorewsamount': False, + 'ignoreblanklines': False, + 'upgrade': False, + } + + __slots__ = defaults.keys() + + def __init__(self, **opts): + for k in self.__slots__: + v = opts.get(k) + if v is None: + v = self.defaults[k] + setattr(self, k, v) + + try: + self.context = int(self.context) + except ValueError: + raise util.Abort(_('diff context lines count must be ' + 'an integer, not %r') % self.context) + + def copy(self, **kwargs): + opts = dict((k, getattr(self, k)) for k in self.defaults) + opts.update(kwargs) + return diffopts(**opts) + +defaultopts = diffopts() + +def wsclean(opts, text, blank=True): + if opts.ignorews: + text = re.sub('[ \t\r]+', '', text) + elif opts.ignorewsamount: + text = re.sub('[ \t\r]+', ' ', text) + text = text.replace(' \n', '\n') + if blank and opts.ignoreblanklines: + text = re.sub('\n+', '', text) + return text + +def diffline(revs, a, b, opts): + parts = ['diff'] + if opts.git: + parts.append('--git') + if revs and not opts.git: + parts.append(' '.join(["-r %s" % rev for rev in revs])) + if opts.git: + parts.append('a/%s' % a) + parts.append('b/%s' % b) + else: + parts.append(a) + return ' '.join(parts) + '\n' + +def unidiff(a, ad, b, bd, fn1, fn2, r=None, opts=defaultopts): + def datetag(date, addtab=True): + if not opts.git and not opts.nodates: + return '\t%s\n' % date + if addtab and ' ' in fn1: + return '\t\n' + return '\n' + + if not a and not b: + return "" + epoch = util.datestr((0, 0)) + + if not opts.text and (util.binary(a) or util.binary(b)): + if a and b and len(a) == len(b) and a == b: + return "" + l = ['Binary file %s has changed\n' % fn1] + elif not a: + b = splitnewlines(b) + if a is None: + l1 = '--- /dev/null%s' % datetag(epoch, False) + else: + l1 = "--- %s%s" % ("a/" + fn1, datetag(ad)) + l2 = "+++ %s%s" % ("b/" + fn2, datetag(bd)) + l3 = "@@ -0,0 +1,%d @@\n" % len(b) + l = [l1, l2, l3] + ["+" + e for e in b] + elif not b: + a = splitnewlines(a) + l1 = "--- %s%s" % ("a/" + fn1, datetag(ad)) + if b is None: + l2 = '+++ /dev/null%s' % datetag(epoch, False) + else: + l2 = "+++ %s%s" % ("b/" + fn2, datetag(bd)) + l3 = "@@ -1,%d +0,0 @@\n" % len(a) + l = [l1, l2, l3] + ["-" + e for e in a] + else: + al = splitnewlines(a) + bl = splitnewlines(b) + l = list(_unidiff(a, b, al, bl, opts=opts)) + if not l: + return "" + + l.insert(0, "--- a/%s%s" % (fn1, datetag(ad))) + l.insert(1, "+++ b/%s%s" % (fn2, datetag(bd))) + + for ln in xrange(len(l)): + if l[ln][-1] != '\n': + l[ln] += "\n\ No newline at end of file\n" + + if r: + l.insert(0, diffline(r, fn1, fn2, opts)) + + return "".join(l) + +# creates a headerless unified diff +# t1 and t2 are the text to be diffed +# l1 and l2 are the text broken up into lines +def _unidiff(t1, t2, l1, l2, opts=defaultopts): + def contextend(l, len): + ret = l + opts.context + if ret > len: + ret = len + return ret + + def contextstart(l): + ret = l - opts.context + if ret < 0: + return 0 + return ret + + def yieldhunk(hunk): + (astart, a2, bstart, b2, delta) = hunk + aend = contextend(a2, len(l1)) + alen = aend - astart + blen = b2 - bstart + aend - a2 + + func = "" + if opts.showfunc: + # walk backwards from the start of the context + # to find a line starting with an alphanumeric char. + for x in xrange(astart - 1, -1, -1): + t = l1[x].rstrip() + if funcre.match(t): + func = ' ' + t[:40] + break + + yield "@@ -%d,%d +%d,%d @@%s\n" % (astart + 1, alen, + bstart + 1, blen, func) + for x in delta: + yield x + for x in xrange(a2, aend): + yield ' ' + l1[x] + + if opts.showfunc: + funcre = re.compile('\w') + + # bdiff.blocks gives us the matching sequences in the files. The loop + # below finds the spaces between those matching sequences and translates + # them into diff output. + # + if opts.ignorews or opts.ignorewsamount: + t1 = wsclean(opts, t1, False) + t2 = wsclean(opts, t2, False) + + diff = bdiff.blocks(t1, t2) + hunk = None + for i, s1 in enumerate(diff): + # The first match is special. + # we've either found a match starting at line 0 or a match later + # in the file. If it starts later, old and new below will both be + # empty and we'll continue to the next match. + if i > 0: + s = diff[i - 1] + else: + s = [0, 0, 0, 0] + delta = [] + a1 = s[1] + a2 = s1[0] + b1 = s[3] + b2 = s1[2] + + old = l1[a1:a2] + new = l2[b1:b2] + + # bdiff sometimes gives huge matches past eof, this check eats them, + # and deals with the special first match case described above + if not old and not new: + continue + + if opts.ignoreblanklines: + if wsclean(opts, "".join(old)) == wsclean(opts, "".join(new)): + continue + + astart = contextstart(a1) + bstart = contextstart(b1) + prev = None + if hunk: + # join with the previous hunk if it falls inside the context + if astart < hunk[1] + opts.context + 1: + prev = hunk + astart = hunk[1] + bstart = hunk[3] + else: + for x in yieldhunk(hunk): + yield x + if prev: + # we've joined the previous hunk, record the new ending points. + hunk[1] = a2 + hunk[3] = b2 + delta = hunk[4] + else: + # create a new hunk + hunk = [astart, a2, bstart, b2, delta] + + delta[len(delta):] = [' ' + x for x in l1[astart:a1]] + delta[len(delta):] = ['-' + x for x in old] + delta[len(delta):] = ['+' + x for x in new] + + if hunk: + for x in yieldhunk(hunk): + yield x + +def patchtext(bin): + pos = 0 + t = [] + while pos < len(bin): + p1, p2, l = struct.unpack(">lll", bin[pos:pos + 12]) + pos += 12 + t.append(bin[pos:pos + l]) + pos += l + return "".join(t) + +def patch(a, bin): + if len(a) == 0: + # skip over trivial delta header + return buffer(bin, 12) + return mpatch.patches(a, [bin]) + +# similar to difflib.SequenceMatcher.get_matching_blocks +def get_matching_blocks(a, b): + return [(d[0], d[2], d[1] - d[0]) for d in bdiff.blocks(a, b)] + +def trivialdiffheader(length): + return struct.pack(">lll", 0, 0, length) + +patches = mpatch.patches +patchedsize = mpatch.patchedsize +textdiff = bdiff.bdiff diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/mdiff.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/mdiff.pyo Binary files differnew file mode 100644 index 0000000..ef8112b --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/mdiff.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/merge.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/merge.py new file mode 100644 index 0000000..ebf9d00 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/merge.py @@ -0,0 +1,544 @@ +# merge.py - directory-level update/merge handling for Mercurial +# +# Copyright 2006, 2007 Matt Mackall <mpm@selenic.com> +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +from node import nullid, nullrev, hex, bin +from i18n import _ +import util, filemerge, copies, subrepo +import errno, os, shutil + +class mergestate(object): + '''track 3-way merge state of individual files''' + def __init__(self, repo): + self._repo = repo + self._dirty = False + self._read() + def reset(self, node=None): + self._state = {} + if node: + self._local = node + shutil.rmtree(self._repo.join("merge"), True) + self._dirty = False + def _read(self): + self._state = {} + try: + f = self._repo.opener("merge/state") + for i, l in enumerate(f): + if i == 0: + self._local = bin(l[:-1]) + else: + bits = l[:-1].split("\0") + self._state[bits[0]] = bits[1:] + except IOError, err: + if err.errno != errno.ENOENT: + raise + self._dirty = False + def commit(self): + if self._dirty: + f = self._repo.opener("merge/state", "w") + f.write(hex(self._local) + "\n") + for d, v in self._state.iteritems(): + f.write("\0".join([d] + v) + "\n") + self._dirty = False + def add(self, fcl, fco, fca, fd, flags): + hash = util.sha1(fcl.path()).hexdigest() + self._repo.opener("merge/" + hash, "w").write(fcl.data()) + self._state[fd] = ['u', hash, fcl.path(), fca.path(), + hex(fca.filenode()), fco.path(), flags] + self._dirty = True + def __contains__(self, dfile): + return dfile in self._state + def __getitem__(self, dfile): + return self._state[dfile][0] + def __iter__(self): + l = self._state.keys() + l.sort() + for f in l: + yield f + def mark(self, dfile, state): + self._state[dfile][0] = state + self._dirty = True + def resolve(self, dfile, wctx, octx): + if self[dfile] == 'r': + return 0 + state, hash, lfile, afile, anode, ofile, flags = self._state[dfile] + f = self._repo.opener("merge/" + hash) + self._repo.wwrite(dfile, f.read(), flags) + fcd = wctx[dfile] + fco = octx[ofile] + fca = self._repo.filectx(afile, fileid=anode) + r = filemerge.filemerge(self._repo, self._local, lfile, fcd, fco, fca) + if not r: + self.mark(dfile, 'r') + return r + +def _checkunknown(wctx, mctx): + "check for collisions between unknown files and files in mctx" + for f in wctx.unknown(): + if f in mctx and mctx[f].cmp(wctx[f]): + raise util.Abort(_("untracked file in working directory differs" + " from file in requested revision: '%s'") % f) + +def _checkcollision(mctx): + "check for case folding collisions in the destination context" + folded = {} + for fn in mctx: + fold = fn.lower() + if fold in folded: + raise util.Abort(_("case-folding collision between %s and %s") + % (fn, folded[fold])) + folded[fold] = fn + +def _forgetremoved(wctx, mctx, branchmerge): + """ + Forget removed files + + If we're jumping between revisions (as opposed to merging), and if + neither the working directory nor the target rev has the file, + then we need to remove it from the dirstate, to prevent the + dirstate from listing the file when it is no longer in the + manifest. + + If we're merging, and the other revision has removed a file + that is not present in the working directory, we need to mark it + as removed. + """ + + action = [] + state = branchmerge and 'r' or 'f' + for f in wctx.deleted(): + if f not in mctx: + action.append((f, state)) + + if not branchmerge: + for f in wctx.removed(): + if f not in mctx: + action.append((f, "f")) + + return action + +def manifestmerge(repo, p1, p2, pa, overwrite, partial): + """ + Merge p1 and p2 with ancestor pa and generate merge action list + + overwrite = whether we clobber working files + partial = function to filter file lists + """ + + def fmerge(f, f2, fa): + """merge flags""" + a, m, n = ma.flags(fa), m1.flags(f), m2.flags(f2) + if m == n: # flags agree + return m # unchanged + if m and n and not a: # flags set, don't agree, differ from parent + r = repo.ui.promptchoice( + _(" conflicting flags for %s\n" + "(n)one, e(x)ec or sym(l)ink?") % f, + (_("&None"), _("E&xec"), _("Sym&link")), 0) + if r == 1: + return "x" # Exec + if r == 2: + return "l" # Symlink + return "" + if m and m != a: # changed from a to m + return m + if n and n != a: # changed from a to n + return n + return '' # flag was cleared + + def act(msg, m, f, *args): + repo.ui.debug(" %s: %s -> %s\n" % (f, msg, m)) + action.append((f, m) + args) + + action, copy = [], {} + + if overwrite: + pa = p1 + elif pa == p2: # backwards + pa = p1.p1() + elif pa and repo.ui.configbool("merge", "followcopies", True): + dirs = repo.ui.configbool("merge", "followdirs", True) + copy, diverge = copies.copies(repo, p1, p2, pa, dirs) + for of, fl in diverge.iteritems(): + act("divergent renames", "dr", of, fl) + + repo.ui.note(_("resolving manifests\n")) + repo.ui.debug(" overwrite %s partial %s\n" % (overwrite, bool(partial))) + repo.ui.debug(" ancestor %s local %s remote %s\n" % (pa, p1, p2)) + + m1, m2, ma = p1.manifest(), p2.manifest(), pa.manifest() + copied = set(copy.values()) + + if '.hgsubstate' in m1: + # check whether sub state is modified + for s in p1.substate: + if p1.sub(s).dirty(): + m1['.hgsubstate'] += "+" + break + + # Compare manifests + for f, n in m1.iteritems(): + if partial and not partial(f): + continue + if f in m2: + rflags = fmerge(f, f, f) + a = ma.get(f, nullid) + if n == m2[f] or m2[f] == a: # same or local newer + # is file locally modified or flags need changing? + # dirstate flags may need to be made current + if m1.flags(f) != rflags or n[20:]: + act("update permissions", "e", f, rflags) + elif n == a: # remote newer + act("remote is newer", "g", f, rflags) + else: # both changed + act("versions differ", "m", f, f, f, rflags, False) + elif f in copied: # files we'll deal with on m2 side + pass + elif f in copy: + f2 = copy[f] + if f2 not in m2: # directory rename + act("remote renamed directory to " + f2, "d", + f, None, f2, m1.flags(f)) + else: # case 2 A,B/B/B or case 4,21 A/B/B + act("local copied/moved to " + f2, "m", + f, f2, f, fmerge(f, f2, f2), False) + elif f in ma: # clean, a different, no remote + if n != ma[f]: + if repo.ui.promptchoice( + _(" local changed %s which remote deleted\n" + "use (c)hanged version or (d)elete?") % f, + (_("&Changed"), _("&Delete")), 0): + act("prompt delete", "r", f) + else: + act("prompt keep", "a", f) + elif n[20:] == "a": # added, no remote + act("remote deleted", "f", f) + elif n[20:] != "u": + act("other deleted", "r", f) + + for f, n in m2.iteritems(): + if partial and not partial(f): + continue + if f in m1 or f in copied: # files already visited + continue + if f in copy: + f2 = copy[f] + if f2 not in m1: # directory rename + act("local renamed directory to " + f2, "d", + None, f, f2, m2.flags(f)) + elif f2 in m2: # rename case 1, A/A,B/A + act("remote copied to " + f, "m", + f2, f, f, fmerge(f2, f, f2), False) + else: # case 3,20 A/B/A + act("remote moved to " + f, "m", + f2, f, f, fmerge(f2, f, f2), True) + elif f not in ma: + act("remote created", "g", f, m2.flags(f)) + elif n != ma[f]: + if repo.ui.promptchoice( + _("remote changed %s which local deleted\n" + "use (c)hanged version or leave (d)eleted?") % f, + (_("&Changed"), _("&Deleted")), 0) == 0: + act("prompt recreating", "g", f, m2.flags(f)) + + return action + +def actionkey(a): + return a[1] == 'r' and -1 or 0, a + +def applyupdates(repo, action, wctx, mctx, actx): + """apply the merge action list to the working directory + + wctx is the working copy context + mctx is the context to be merged into the working copy + actx is the context of the common ancestor + """ + + updated, merged, removed, unresolved = 0, 0, 0, 0 + ms = mergestate(repo) + ms.reset(wctx.parents()[0].node()) + moves = [] + action.sort(key=actionkey) + substate = wctx.substate # prime + + # prescan for merges + u = repo.ui + for a in action: + f, m = a[:2] + if m == 'm': # merge + f2, fd, flags, move = a[2:] + if f == '.hgsubstate': # merged internally + continue + repo.ui.debug("preserving %s for resolve of %s\n" % (f, fd)) + fcl = wctx[f] + fco = mctx[f2] + if mctx == actx: # backwards, use working dir parent as ancestor + if fcl.parents(): + fca = fcl.parents()[0] + else: + fca = repo.filectx(f, fileid=nullrev) + else: + fca = fcl.ancestor(fco, actx) + if not fca: + fca = repo.filectx(f, fileid=nullrev) + ms.add(fcl, fco, fca, fd, flags) + if f != fd and move: + moves.append(f) + + # remove renamed files after safely stored + for f in moves: + if os.path.lexists(repo.wjoin(f)): + repo.ui.debug("removing %s\n" % f) + os.unlink(repo.wjoin(f)) + + audit_path = util.path_auditor(repo.root) + + numupdates = len(action) + for i, a in enumerate(action): + f, m = a[:2] + u.progress(_('updating'), i + 1, item=f, total=numupdates, + unit=_('files')) + if f and f[0] == "/": + continue + if m == "r": # remove + repo.ui.note(_("removing %s\n") % f) + audit_path(f) + if f == '.hgsubstate': # subrepo states need updating + subrepo.submerge(repo, wctx, mctx, wctx) + try: + util.unlink(repo.wjoin(f)) + except OSError, inst: + if inst.errno != errno.ENOENT: + repo.ui.warn(_("update failed to remove %s: %s!\n") % + (f, inst.strerror)) + removed += 1 + elif m == "m": # merge + if f == '.hgsubstate': # subrepo states need updating + subrepo.submerge(repo, wctx, mctx, wctx.ancestor(mctx)) + continue + f2, fd, flags, move = a[2:] + r = ms.resolve(fd, wctx, mctx) + if r is not None and r > 0: + unresolved += 1 + else: + if r is None: + updated += 1 + else: + merged += 1 + util.set_flags(repo.wjoin(fd), 'l' in flags, 'x' in flags) + if f != fd and move and os.path.lexists(repo.wjoin(f)): + repo.ui.debug("removing %s\n" % f) + os.unlink(repo.wjoin(f)) + elif m == "g": # get + flags = a[2] + repo.ui.note(_("getting %s\n") % f) + t = mctx.filectx(f).data() + repo.wwrite(f, t, flags) + t = None + updated += 1 + if f == '.hgsubstate': # subrepo states need updating + subrepo.submerge(repo, wctx, mctx, wctx) + elif m == "d": # directory rename + f2, fd, flags = a[2:] + if f: + repo.ui.note(_("moving %s to %s\n") % (f, fd)) + t = wctx.filectx(f).data() + repo.wwrite(fd, t, flags) + util.unlink(repo.wjoin(f)) + if f2: + repo.ui.note(_("getting %s to %s\n") % (f2, fd)) + t = mctx.filectx(f2).data() + repo.wwrite(fd, t, flags) + updated += 1 + elif m == "dr": # divergent renames + fl = a[2] + repo.ui.warn(_("note: possible conflict - %s was renamed " + "multiple times to:\n") % f) + for nf in fl: + repo.ui.warn(" %s\n" % nf) + elif m == "e": # exec + flags = a[2] + util.set_flags(repo.wjoin(f), 'l' in flags, 'x' in flags) + ms.commit() + u.progress(_('updating'), None, total=numupdates, unit=_('files')) + + return updated, merged, removed, unresolved + +def recordupdates(repo, action, branchmerge): + "record merge actions to the dirstate" + + for a in action: + f, m = a[:2] + if m == "r": # remove + if branchmerge: + repo.dirstate.remove(f) + else: + repo.dirstate.forget(f) + elif m == "a": # re-add + if not branchmerge: + repo.dirstate.add(f) + elif m == "f": # forget + repo.dirstate.forget(f) + elif m == "e": # exec change + repo.dirstate.normallookup(f) + elif m == "g": # get + if branchmerge: + repo.dirstate.otherparent(f) + else: + repo.dirstate.normal(f) + elif m == "m": # merge + f2, fd, flag, move = a[2:] + if branchmerge: + # We've done a branch merge, mark this file as merged + # so that we properly record the merger later + repo.dirstate.merge(fd) + if f != f2: # copy/rename + if move: + repo.dirstate.remove(f) + if f != fd: + repo.dirstate.copy(f, fd) + else: + repo.dirstate.copy(f2, fd) + else: + # We've update-merged a locally modified file, so + # we set the dirstate to emulate a normal checkout + # of that file some time in the past. Thus our + # merge will appear as a normal local file + # modification. + if f2 == fd: # file not locally copied/moved + repo.dirstate.normallookup(fd) + if move: + repo.dirstate.forget(f) + elif m == "d": # directory rename + f2, fd, flag = a[2:] + if not f2 and f not in repo.dirstate: + # untracked file moved + continue + if branchmerge: + repo.dirstate.add(fd) + if f: + repo.dirstate.remove(f) + repo.dirstate.copy(f, fd) + if f2: + repo.dirstate.copy(f2, fd) + else: + repo.dirstate.normal(fd) + if f: + repo.dirstate.forget(f) + +def update(repo, node, branchmerge, force, partial): + """ + Perform a merge between the working directory and the given node + + node = the node to update to, or None if unspecified + branchmerge = whether to merge between branches + force = whether to force branch merging or file overwriting + partial = a function to filter file lists (dirstate not updated) + + The table below shows all the behaviors of the update command + given the -c and -C or no options, whether the working directory + is dirty, whether a revision is specified, and the relationship of + the parent rev to the target rev (linear, on the same named + branch, or on another named branch). + + This logic is tested by test-update-branches.t. + + -c -C dirty rev | linear same cross + n n n n | ok (1) x + n n n y | ok ok ok + n n y * | merge (2) (2) + n y * * | --- discard --- + y n y * | --- (3) --- + y n n * | --- ok --- + y y * * | --- (4) --- + + x = can't happen + * = don't-care + 1 = abort: crosses branches (use 'hg merge' or 'hg update -c') + 2 = abort: crosses branches (use 'hg merge' to merge or + use 'hg update -C' to discard changes) + 3 = abort: uncommitted local changes + 4 = incompatible options (checked in commands.py) + """ + + onode = node + wlock = repo.wlock() + try: + wc = repo[None] + if node is None: + # tip of current branch + try: + node = repo.branchtags()[wc.branch()] + except KeyError: + if wc.branch() == "default": # no default branch! + node = repo.lookup("tip") # update to tip + else: + raise util.Abort(_("branch %s not found") % wc.branch()) + overwrite = force and not branchmerge + pl = wc.parents() + p1, p2 = pl[0], repo[node] + pa = p1.ancestor(p2) + fp1, fp2, xp1, xp2 = p1.node(), p2.node(), str(p1), str(p2) + fastforward = False + + ### check phase + if not overwrite and len(pl) > 1: + raise util.Abort(_("outstanding uncommitted merges")) + if branchmerge: + if pa == p2: + raise util.Abort(_("merging with a working directory ancestor" + " has no effect")) + elif pa == p1: + if p1.branch() != p2.branch(): + fastforward = True + else: + raise util.Abort(_("nothing to merge (use 'hg update'" + " or check 'hg heads')")) + if not force and (wc.files() or wc.deleted()): + raise util.Abort(_("outstanding uncommitted changes " + "(use 'hg status' to list changes)")) + elif not overwrite: + if pa == p1 or pa == p2: # linear + pass # all good + elif wc.files() or wc.deleted(): + raise util.Abort(_("crosses branches (merge branches or use" + " --clean to discard changes)")) + elif onode is None: + raise util.Abort(_("crosses branches (merge branches or use" + " --check to force update)")) + else: + # Allow jumping branches if clean and specific rev given + overwrite = True + + ### calculate phase + action = [] + wc.status(unknown=True) # prime cache + if not force: + _checkunknown(wc, p2) + if not util.checkcase(repo.path): + _checkcollision(p2) + action += _forgetremoved(wc, p2, branchmerge) + action += manifestmerge(repo, wc, p2, pa, overwrite, partial) + + ### apply phase + if not branchmerge: # just jump to the new rev + fp1, fp2, xp1, xp2 = fp2, nullid, xp2, '' + if not partial: + repo.hook('preupdate', throw=True, parent1=xp1, parent2=xp2) + + stats = applyupdates(repo, action, wc, p2, pa) + + if not partial: + repo.dirstate.setparents(fp1, fp2) + recordupdates(repo, action, branchmerge) + if not branchmerge and not fastforward: + repo.dirstate.setbranch(p2.branch()) + finally: + wlock.release() + + if not partial: + repo.hook('update', parent1=xp1, parent2=xp2, error=stats[3]) + return stats diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/merge.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/merge.pyo Binary files differnew file mode 100644 index 0000000..bc7d942 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/merge.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/minirst.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/minirst.py new file mode 100644 index 0000000..73a0aa4 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/minirst.py @@ -0,0 +1,450 @@ +# minirst.py - minimal reStructuredText parser +# +# Copyright 2009, 2010 Matt Mackall <mpm@selenic.com> and others +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +"""simplified reStructuredText parser. + +This parser knows just enough about reStructuredText to parse the +Mercurial docstrings. + +It cheats in a major way: nested blocks are not really nested. They +are just indented blocks that look like they are nested. This relies +on the user to keep the right indentation for the blocks. + +It only supports a small subset of reStructuredText: + +- sections + +- paragraphs + +- literal blocks + +- definition lists + +- specific admonitions + +- bullet lists (items must start with '-') + +- enumerated lists (no autonumbering) + +- field lists (colons cannot be escaped) + +- option lists (supports only long options without arguments) + +- inline literals (no other inline markup is not recognized) +""" + +import re, sys +import util, encoding +from i18n import _ + + +def replace(text, substs): + utext = text.decode(encoding.encoding) + for f, t in substs: + utext = utext.replace(f, t) + return utext.encode(encoding.encoding) + + +_blockre = re.compile(r"\n(?:\s*\n)+") + +def findblocks(text): + """Find continuous blocks of lines in text. + + Returns a list of dictionaries representing the blocks. Each block + has an 'indent' field and a 'lines' field. + """ + blocks = [] + for b in _blockre.split(text.strip()): + lines = b.splitlines() + indent = min((len(l) - len(l.lstrip())) for l in lines) + lines = [l[indent:] for l in lines] + blocks.append(dict(indent=indent, lines=lines)) + return blocks + + +def findliteralblocks(blocks): + """Finds literal blocks and adds a 'type' field to the blocks. + + Literal blocks are given the type 'literal', all other blocks are + given type the 'paragraph'. + """ + i = 0 + while i < len(blocks): + # Searching for a block that looks like this: + # + # +------------------------------+ + # | paragraph | + # | (ends with "::") | + # +------------------------------+ + # +---------------------------+ + # | indented literal block | + # +---------------------------+ + blocks[i]['type'] = 'paragraph' + if blocks[i]['lines'][-1].endswith('::') and i + 1 < len(blocks): + indent = blocks[i]['indent'] + adjustment = blocks[i + 1]['indent'] - indent + + if blocks[i]['lines'] == ['::']: + # Expanded form: remove block + del blocks[i] + i -= 1 + elif blocks[i]['lines'][-1].endswith(' ::'): + # Partially minimized form: remove space and both + # colons. + blocks[i]['lines'][-1] = blocks[i]['lines'][-1][:-3] + else: + # Fully minimized form: remove just one colon. + blocks[i]['lines'][-1] = blocks[i]['lines'][-1][:-1] + + # List items are formatted with a hanging indent. We must + # correct for this here while we still have the original + # information on the indentation of the subsequent literal + # blocks available. + m = _bulletre.match(blocks[i]['lines'][0]) + if m: + indent += m.end() + adjustment -= m.end() + + # Mark the following indented blocks. + while i + 1 < len(blocks) and blocks[i + 1]['indent'] > indent: + blocks[i + 1]['type'] = 'literal' + blocks[i + 1]['indent'] -= adjustment + i += 1 + i += 1 + return blocks + +_bulletre = re.compile(r'(-|[0-9A-Za-z]+\.|\(?[0-9A-Za-z]+\)|\|) ') +_optionre = re.compile(r'^(--[a-z-]+)((?:[ =][a-zA-Z][\w-]*)? +)(.*)$') +_fieldre = re.compile(r':(?![: ])([^:]*)(?<! ):[ ]+(.*)') +_definitionre = re.compile(r'[^ ]') + +def splitparagraphs(blocks): + """Split paragraphs into lists.""" + # Tuples with (list type, item regexp, single line items?). Order + # matters: definition lists has the least specific regexp and must + # come last. + listtypes = [('bullet', _bulletre, True), + ('option', _optionre, True), + ('field', _fieldre, True), + ('definition', _definitionre, False)] + + def match(lines, i, itemre, singleline): + """Does itemre match an item at line i? + + A list item can be followed by an idented line or another list + item (but only if singleline is True). + """ + line1 = lines[i] + line2 = i + 1 < len(lines) and lines[i + 1] or '' + if not itemre.match(line1): + return False + if singleline: + return line2 == '' or line2[0] == ' ' or itemre.match(line2) + else: + return line2.startswith(' ') + + i = 0 + while i < len(blocks): + if blocks[i]['type'] == 'paragraph': + lines = blocks[i]['lines'] + for type, itemre, singleline in listtypes: + if match(lines, 0, itemre, singleline): + items = [] + for j, line in enumerate(lines): + if match(lines, j, itemre, singleline): + items.append(dict(type=type, lines=[], + indent=blocks[i]['indent'])) + items[-1]['lines'].append(line) + blocks[i:i + 1] = items + break + i += 1 + return blocks + + +_fieldwidth = 12 + +def updatefieldlists(blocks): + """Find key and maximum key width for field lists.""" + i = 0 + while i < len(blocks): + if blocks[i]['type'] != 'field': + i += 1 + continue + + keywidth = 0 + j = i + while j < len(blocks) and blocks[j]['type'] == 'field': + m = _fieldre.match(blocks[j]['lines'][0]) + key, rest = m.groups() + blocks[j]['lines'][0] = rest + blocks[j]['key'] = key + keywidth = max(keywidth, len(key)) + j += 1 + + for block in blocks[i:j]: + block['keywidth'] = keywidth + i = j + 1 + + return blocks + + +def prunecontainers(blocks, keep): + """Prune unwanted containers. + + The blocks must have a 'type' field, i.e., they should have been + run through findliteralblocks first. + """ + pruned = [] + i = 0 + while i + 1 < len(blocks): + # Searching for a block that looks like this: + # + # +-------+---------------------------+ + # | ".. container ::" type | + # +---+ | + # | blocks | + # +-------------------------------+ + if (blocks[i]['type'] == 'paragraph' and + blocks[i]['lines'][0].startswith('.. container::')): + indent = blocks[i]['indent'] + adjustment = blocks[i + 1]['indent'] - indent + containertype = blocks[i]['lines'][0][15:] + prune = containertype not in keep + if prune: + pruned.append(containertype) + + # Always delete "..container:: type" block + del blocks[i] + j = i + while j < len(blocks) and blocks[j]['indent'] > indent: + if prune: + del blocks[j] + i -= 1 # adjust outer index + else: + blocks[j]['indent'] -= adjustment + j += 1 + i += 1 + return blocks, pruned + + +_sectionre = re.compile(r"""^([-=`:.'"~^_*+#])\1+$""") + +def findsections(blocks): + """Finds sections. + + The blocks must have a 'type' field, i.e., they should have been + run through findliteralblocks first. + """ + for block in blocks: + # Searching for a block that looks like this: + # + # +------------------------------+ + # | Section title | + # | ------------- | + # +------------------------------+ + if (block['type'] == 'paragraph' and + len(block['lines']) == 2 and + encoding.colwidth(block['lines'][0]) == len(block['lines'][1]) and + _sectionre.match(block['lines'][1])): + block['underline'] = block['lines'][1][0] + block['type'] = 'section' + del block['lines'][1] + return blocks + + +def inlineliterals(blocks): + substs = [('``', '"')] + for b in blocks: + if b['type'] in ('paragraph', 'section'): + b['lines'] = [replace(l, substs) for l in b['lines']] + return blocks + + +def hgrole(blocks): + substs = [(':hg:`', '"hg '), ('`', '"')] + for b in blocks: + if b['type'] in ('paragraph', 'section'): + # Turn :hg:`command` into "hg command". This also works + # when there is a line break in the command and relies on + # the fact that we have no stray back-quotes in the input + # (run the blocks through inlineliterals first). + b['lines'] = [replace(l, substs) for l in b['lines']] + return blocks + + +def addmargins(blocks): + """Adds empty blocks for vertical spacing. + + This groups bullets, options, and definitions together with no vertical + space between them, and adds an empty block between all other blocks. + """ + i = 1 + while i < len(blocks): + if (blocks[i]['type'] == blocks[i - 1]['type'] and + blocks[i]['type'] in ('bullet', 'option', 'field')): + i += 1 + else: + blocks.insert(i, dict(lines=[''], indent=0, type='margin')) + i += 2 + return blocks + +def prunecomments(blocks): + """Remove comments.""" + i = 0 + while i < len(blocks): + b = blocks[i] + if b['type'] == 'paragraph' and b['lines'][0].startswith('.. '): + del blocks[i] + else: + i += 1 + return blocks + +_admonitionre = re.compile(r"\.\. (admonition|attention|caution|danger|" + r"error|hint|important|note|tip|warning)::", + flags=re.IGNORECASE) + +def findadmonitions(blocks): + """ + Makes the type of the block an admonition block if + the first line is an admonition directive + """ + i = 0 + while i < len(blocks): + m = _admonitionre.match(blocks[i]['lines'][0]) + if m: + blocks[i]['type'] = 'admonition' + admonitiontitle = blocks[i]['lines'][0][3:m.end() - 2].lower() + + firstline = blocks[i]['lines'][0][m.end() + 1:] + if firstline: + blocks[i]['lines'].insert(1, ' ' + firstline) + + blocks[i]['admonitiontitle'] = admonitiontitle + del blocks[i]['lines'][0] + i = i + 1 + return blocks + +_admonitiontitles = {'attention': _('Attention:'), + 'caution': _('Caution:'), + 'danger': _('!Danger!') , + 'error': _('Error:'), + 'hint': _('Hint:'), + 'important': _('Important:'), + 'note': _('Note:'), + 'tip': _('Tip:'), + 'warning': _('Warning!')} + +def formatblock(block, width): + """Format a block according to width.""" + if width <= 0: + width = 78 + indent = ' ' * block['indent'] + if block['type'] == 'admonition': + admonition = _admonitiontitles[block['admonitiontitle']] + hang = len(block['lines'][-1]) - len(block['lines'][-1].lstrip()) + + defindent = indent + hang * ' ' + text = ' '.join(map(str.strip, block['lines'])) + return '%s\n%s' % (indent + admonition, util.wrap(text, width=width, + initindent=defindent, + hangindent=defindent)) + if block['type'] == 'margin': + return '' + if block['type'] == 'literal': + indent += ' ' + return indent + ('\n' + indent).join(block['lines']) + if block['type'] == 'section': + underline = encoding.colwidth(block['lines'][0]) * block['underline'] + return "%s%s\n%s%s" % (indent, block['lines'][0],indent, underline) + if block['type'] == 'definition': + term = indent + block['lines'][0] + hang = len(block['lines'][-1]) - len(block['lines'][-1].lstrip()) + defindent = indent + hang * ' ' + text = ' '.join(map(str.strip, block['lines'][1:])) + return '%s\n%s' % (term, util.wrap(text, width=width, + initindent=defindent, + hangindent=defindent)) + subindent = indent + if block['type'] == 'bullet': + if block['lines'][0].startswith('| '): + # Remove bullet for line blocks and add no extra + # indention. + block['lines'][0] = block['lines'][0][2:] + else: + m = _bulletre.match(block['lines'][0]) + subindent = indent + m.end() * ' ' + elif block['type'] == 'field': + keywidth = block['keywidth'] + key = block['key'] + + subindent = indent + _fieldwidth * ' ' + if len(key) + 2 > _fieldwidth: + # key too large, use full line width + key = key.ljust(width) + elif keywidth + 2 < _fieldwidth: + # all keys are small, add only two spaces + key = key.ljust(keywidth + 2) + subindent = indent + (keywidth + 2) * ' ' + else: + # mixed sizes, use fieldwidth for this one + key = key.ljust(_fieldwidth) + block['lines'][0] = key + block['lines'][0] + elif block['type'] == 'option': + m = _optionre.match(block['lines'][0]) + option, arg, rest = m.groups() + subindent = indent + (len(option) + len(arg)) * ' ' + + text = ' '.join(map(str.strip, block['lines'])) + return util.wrap(text, width=width, + initindent=indent, + hangindent=subindent) + + +def format(text, width, indent=0, keep=None): + """Parse and format the text according to width.""" + blocks = findblocks(text) + for b in blocks: + b['indent'] += indent + blocks = findliteralblocks(blocks) + blocks, pruned = prunecontainers(blocks, keep or []) + blocks = findsections(blocks) + blocks = inlineliterals(blocks) + blocks = hgrole(blocks) + blocks = splitparagraphs(blocks) + blocks = updatefieldlists(blocks) + blocks = prunecomments(blocks) + blocks = addmargins(blocks) + blocks = findadmonitions(blocks) + text = '\n'.join(formatblock(b, width) for b in blocks) + if keep is None: + return text + else: + return text, pruned + + +if __name__ == "__main__": + from pprint import pprint + + def debug(func, *args): + blocks = func(*args) + print "*** after %s:" % func.__name__ + pprint(blocks) + print + return blocks + + text = open(sys.argv[1]).read() + blocks = debug(findblocks, text) + blocks = debug(findliteralblocks, blocks) + blocks, pruned = debug(prunecontainers, blocks, sys.argv[2:]) + blocks = debug(inlineliterals, blocks) + blocks = debug(splitparagraphs, blocks) + blocks = debug(updatefieldlists, blocks) + blocks = debug(findsections, blocks) + blocks = debug(prunecomments, blocks) + blocks = debug(addmargins, blocks) + blocks = debug(findadmonitions, blocks) + print '\n'.join(formatblock(b, 30) for b in blocks) diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/minirst.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/minirst.pyo Binary files differnew file mode 100644 index 0000000..6030fb2 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/minirst.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/mpatch.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/mpatch.py new file mode 100644 index 0000000..def4981 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/mpatch.py @@ -0,0 +1,7 @@ +def __bootstrap__(): + global __bootstrap__, __loader__, __file__ + import sys, pkg_resources, imp + __file__ = pkg_resources.resource_filename(__name__,'mpatch.so') + __loader__ = None; del __bootstrap__, __loader__ + imp.load_dynamic(__name__,__file__) +__bootstrap__() diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/mpatch.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/mpatch.pyo Binary files differnew file mode 100644 index 0000000..1d5614b --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/mpatch.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/mpatch.so b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/mpatch.so Binary files differnew file mode 100755 index 0000000..f6d0fe6 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/mpatch.so diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/node.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/node.py new file mode 100644 index 0000000..9debeaf --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/node.py @@ -0,0 +1,18 @@ +# node.py - basic nodeid manipulation for mercurial +# +# Copyright 2005, 2006 Matt Mackall <mpm@selenic.com> +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +import binascii + +nullrev = -1 +nullid = "\0" * 20 + +# This ugly style has a noticeable effect in manifest parsing +hex = binascii.hexlify +bin = binascii.unhexlify + +def short(node): + return hex(node[:6]) diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/node.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/node.pyo Binary files differnew file mode 100644 index 0000000..46f9e2b --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/node.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/osutil.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/osutil.py new file mode 100644 index 0000000..7ff826c --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/osutil.py @@ -0,0 +1,7 @@ +def __bootstrap__(): + global __bootstrap__, __loader__, __file__ + import sys, pkg_resources, imp + __file__ = pkg_resources.resource_filename(__name__,'osutil.so') + __loader__ = None; del __bootstrap__, __loader__ + imp.load_dynamic(__name__,__file__) +__bootstrap__() diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/osutil.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/osutil.pyo Binary files differnew file mode 100644 index 0000000..53db967 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/osutil.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/osutil.so b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/osutil.so Binary files differnew file mode 100755 index 0000000..7921499 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/osutil.so diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/parser.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/parser.py new file mode 100644 index 0000000..5a6a55c --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/parser.py @@ -0,0 +1,91 @@ +# parser.py - simple top-down operator precedence parser for mercurial +# +# Copyright 2010 Matt Mackall <mpm@selenic.com> +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +# see http://effbot.org/zone/simple-top-down-parsing.htm and +# http://eli.thegreenplace.net/2010/01/02/top-down-operator-precedence-parsing/ +# for background + +# takes a tokenizer and elements +# tokenizer is an iterator that returns type, value pairs +# elements is a mapping of types to binding strength, prefix and infix actions +# an action is a tree node name, a tree label, and an optional match +# __call__(program) parses program into a labelled tree + +import error + +class parser(object): + def __init__(self, tokenizer, elements, methods=None): + self._tokenizer = tokenizer + self._elements = elements + self._methods = methods + def _advance(self): + 'advance the tokenizer' + t = self.current + try: + self.current = self._iter.next() + except StopIteration: + pass + return t + def _match(self, m, pos): + 'make sure the tokenizer matches an end condition' + if self.current[0] != m: + raise error.ParseError("unexpected token: %s" % self.current[0], + self.current[2]) + self._advance() + def _parse(self, bind=0): + token, value, pos = self._advance() + # handle prefix rules on current token + prefix = self._elements[token][1] + if not prefix: + raise error.ParseError("not a prefix: %s" % token, pos) + if len(prefix) == 1: + expr = (prefix[0], value) + else: + if len(prefix) > 2 and prefix[2] == self.current[0]: + self._match(prefix[2], pos) + expr = (prefix[0], None) + else: + expr = (prefix[0], self._parse(prefix[1])) + if len(prefix) > 2: + self._match(prefix[2], pos) + # gather tokens until we meet a lower binding strength + while bind < self._elements[self.current[0]][0]: + token, value, pos = self._advance() + e = self._elements[token] + # check for suffix - next token isn't a valid prefix + if len(e) == 4 and not self._elements[self.current[0]][1]: + suffix = e[3] + expr = (suffix[0], expr) + else: + # handle infix rules + if len(e) < 3 or not e[2]: + raise error.ParseError("not an infix: %s" % token, pos) + infix = e[2] + if len(infix) == 3 and infix[2] == self.current[0]: + self._match(infix[2], pos) + expr = (infix[0], expr, (None)) + else: + expr = (infix[0], expr, self._parse(infix[1])) + if len(infix) == 3: + self._match(infix[2], pos) + return expr + def parse(self, message): + 'generate a parse tree from a message' + self._iter = self._tokenizer(message) + self.current = self._iter.next() + return self._parse() + def eval(self, tree): + 'recursively evaluate a parse tree using node methods' + if not isinstance(tree, tuple): + return tree + return self._methods[tree[0]](*[self.eval(t) for t in tree[1:]]) + def __call__(self, message): + 'parse a message into a parse tree and evaluate if methods given' + t = self.parse(message) + if self._methods: + return self.eval(t) + return t diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/parser.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/parser.pyo Binary files differnew file mode 100644 index 0000000..5935e9b --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/parser.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/parsers.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/parsers.py new file mode 100644 index 0000000..6780752 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/parsers.py @@ -0,0 +1,7 @@ +def __bootstrap__(): + global __bootstrap__, __loader__, __file__ + import sys, pkg_resources, imp + __file__ = pkg_resources.resource_filename(__name__,'parsers.so') + __loader__ = None; del __bootstrap__, __loader__ + imp.load_dynamic(__name__,__file__) +__bootstrap__() diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/parsers.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/parsers.pyo Binary files differnew file mode 100644 index 0000000..eabf6c0 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/parsers.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/parsers.so b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/parsers.so Binary files differnew file mode 100755 index 0000000..b68f418 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/parsers.so diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/patch.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/patch.py new file mode 100644 index 0000000..8e9b13e --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/patch.py @@ -0,0 +1,1630 @@ +# patch.py - patch file parsing routines +# +# Copyright 2006 Brendan Cully <brendan@kublai.com> +# Copyright 2007 Chris Mason <chris.mason@oracle.com> +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +import cStringIO, email.Parser, os, re +import tempfile, zlib + +from i18n import _ +from node import hex, nullid, short +import base85, mdiff, util, diffhelpers, copies, encoding + +gitre = re.compile('diff --git a/(.*) b/(.*)') + +class PatchError(Exception): + pass + +# helper functions + +def copyfile(src, dst, basedir): + abssrc, absdst = [util.canonpath(basedir, basedir, x) for x in [src, dst]] + if os.path.lexists(absdst): + raise util.Abort(_("cannot create %s: destination already exists") % + dst) + + dstdir = os.path.dirname(absdst) + if dstdir and not os.path.isdir(dstdir): + try: + os.makedirs(dstdir) + except IOError: + raise util.Abort( + _("cannot create %s: unable to create destination directory") + % dst) + + util.copyfile(abssrc, absdst) + +# public functions + +def split(stream): + '''return an iterator of individual patches from a stream''' + def isheader(line, inheader): + if inheader and line[0] in (' ', '\t'): + # continuation + return True + if line[0] in (' ', '-', '+'): + # diff line - don't check for header pattern in there + return False + l = line.split(': ', 1) + return len(l) == 2 and ' ' not in l[0] + + def chunk(lines): + return cStringIO.StringIO(''.join(lines)) + + def hgsplit(stream, cur): + inheader = True + + for line in stream: + if not line.strip(): + inheader = False + if not inheader and line.startswith('# HG changeset patch'): + yield chunk(cur) + cur = [] + inheader = True + + cur.append(line) + + if cur: + yield chunk(cur) + + def mboxsplit(stream, cur): + for line in stream: + if line.startswith('From '): + for c in split(chunk(cur[1:])): + yield c + cur = [] + + cur.append(line) + + if cur: + for c in split(chunk(cur[1:])): + yield c + + def mimesplit(stream, cur): + def msgfp(m): + fp = cStringIO.StringIO() + g = email.Generator.Generator(fp, mangle_from_=False) + g.flatten(m) + fp.seek(0) + return fp + + for line in stream: + cur.append(line) + c = chunk(cur) + + m = email.Parser.Parser().parse(c) + if not m.is_multipart(): + yield msgfp(m) + else: + ok_types = ('text/plain', 'text/x-diff', 'text/x-patch') + for part in m.walk(): + ct = part.get_content_type() + if ct not in ok_types: + continue + yield msgfp(part) + + def headersplit(stream, cur): + inheader = False + + for line in stream: + if not inheader and isheader(line, inheader): + yield chunk(cur) + cur = [] + inheader = True + if inheader and not isheader(line, inheader): + inheader = False + + cur.append(line) + + if cur: + yield chunk(cur) + + def remainder(cur): + yield chunk(cur) + + class fiter(object): + def __init__(self, fp): + self.fp = fp + + def __iter__(self): + return self + + def next(self): + l = self.fp.readline() + if not l: + raise StopIteration + return l + + inheader = False + cur = [] + + mimeheaders = ['content-type'] + + if not hasattr(stream, 'next'): + # http responses, for example, have readline but not next + stream = fiter(stream) + + for line in stream: + cur.append(line) + if line.startswith('# HG changeset patch'): + return hgsplit(stream, cur) + elif line.startswith('From '): + return mboxsplit(stream, cur) + elif isheader(line, inheader): + inheader = True + if line.split(':', 1)[0].lower() in mimeheaders: + # let email parser handle this + return mimesplit(stream, cur) + elif line.startswith('--- ') and inheader: + # No evil headers seen by diff start, split by hand + return headersplit(stream, cur) + # Not enough info, keep reading + + # if we are here, we have a very plain patch + return remainder(cur) + +def extract(ui, fileobj): + '''extract patch from data read from fileobj. + + patch can be a normal patch or contained in an email message. + + return tuple (filename, message, user, date, branch, node, p1, p2). + Any item in the returned tuple can be None. If filename is None, + fileobj did not contain a patch. Caller must unlink filename when done.''' + + # attempt to detect the start of a patch + # (this heuristic is borrowed from quilt) + diffre = re.compile(r'^(?:Index:[ \t]|diff[ \t]|RCS file: |' + r'retrieving revision [0-9]+(\.[0-9]+)*$|' + r'---[ \t].*?^\+\+\+[ \t]|' + r'\*\*\*[ \t].*?^---[ \t])', re.MULTILINE|re.DOTALL) + + fd, tmpname = tempfile.mkstemp(prefix='hg-patch-') + tmpfp = os.fdopen(fd, 'w') + try: + msg = email.Parser.Parser().parse(fileobj) + + subject = msg['Subject'] + user = msg['From'] + if not subject and not user: + # Not an email, restore parsed headers if any + subject = '\n'.join(': '.join(h) for h in msg.items()) + '\n' + + gitsendmail = 'git-send-email' in msg.get('X-Mailer', '') + # should try to parse msg['Date'] + date = None + nodeid = None + branch = None + parents = [] + + if subject: + if subject.startswith('[PATCH'): + pend = subject.find(']') + if pend >= 0: + subject = subject[pend + 1:].lstrip() + subject = subject.replace('\n\t', ' ') + ui.debug('Subject: %s\n' % subject) + if user: + ui.debug('From: %s\n' % user) + diffs_seen = 0 + ok_types = ('text/plain', 'text/x-diff', 'text/x-patch') + message = '' + for part in msg.walk(): + content_type = part.get_content_type() + ui.debug('Content-Type: %s\n' % content_type) + if content_type not in ok_types: + continue + payload = part.get_payload(decode=True) + m = diffre.search(payload) + if m: + hgpatch = False + hgpatchheader = False + ignoretext = False + + ui.debug('found patch at byte %d\n' % m.start(0)) + diffs_seen += 1 + cfp = cStringIO.StringIO() + for line in payload[:m.start(0)].splitlines(): + if line.startswith('# HG changeset patch') and not hgpatch: + ui.debug('patch generated by hg export\n') + hgpatch = True + hgpatchheader = True + # drop earlier commit message content + cfp.seek(0) + cfp.truncate() + subject = None + elif hgpatchheader: + if line.startswith('# User '): + user = line[7:] + ui.debug('From: %s\n' % user) + elif line.startswith("# Date "): + date = line[7:] + elif line.startswith("# Branch "): + branch = line[9:] + elif line.startswith("# Node ID "): + nodeid = line[10:] + elif line.startswith("# Parent "): + parents.append(line[10:]) + elif not line.startswith("# "): + hgpatchheader = False + elif line == '---' and gitsendmail: + ignoretext = True + if not hgpatchheader and not ignoretext: + cfp.write(line) + cfp.write('\n') + message = cfp.getvalue() + if tmpfp: + tmpfp.write(payload) + if not payload.endswith('\n'): + tmpfp.write('\n') + elif not diffs_seen and message and content_type == 'text/plain': + message += '\n' + payload + except: + tmpfp.close() + os.unlink(tmpname) + raise + + if subject and not message.startswith(subject): + message = '%s\n%s' % (subject, message) + tmpfp.close() + if not diffs_seen: + os.unlink(tmpname) + return None, message, user, date, branch, None, None, None + p1 = parents and parents.pop(0) or None + p2 = parents and parents.pop(0) or None + return tmpname, message, user, date, branch, nodeid, p1, p2 + +class patchmeta(object): + """Patched file metadata + + 'op' is the performed operation within ADD, DELETE, RENAME, MODIFY + or COPY. 'path' is patched file path. 'oldpath' is set to the + origin file when 'op' is either COPY or RENAME, None otherwise. If + file mode is changed, 'mode' is a tuple (islink, isexec) where + 'islink' is True if the file is a symlink and 'isexec' is True if + the file is executable. Otherwise, 'mode' is None. + """ + def __init__(self, path): + self.path = path + self.oldpath = None + self.mode = None + self.op = 'MODIFY' + self.binary = False + + def setmode(self, mode): + islink = mode & 020000 + isexec = mode & 0100 + self.mode = (islink, isexec) + + def __repr__(self): + return "<patchmeta %s %r>" % (self.op, self.path) + +def readgitpatch(lr): + """extract git-style metadata about patches from <patchname>""" + + # Filter patch for git information + gp = None + gitpatches = [] + for line in lr: + line = line.rstrip(' \r\n') + if line.startswith('diff --git'): + m = gitre.match(line) + if m: + if gp: + gitpatches.append(gp) + dst = m.group(2) + gp = patchmeta(dst) + elif gp: + if line.startswith('--- '): + gitpatches.append(gp) + gp = None + continue + if line.startswith('rename from '): + gp.op = 'RENAME' + gp.oldpath = line[12:] + elif line.startswith('rename to '): + gp.path = line[10:] + elif line.startswith('copy from '): + gp.op = 'COPY' + gp.oldpath = line[10:] + elif line.startswith('copy to '): + gp.path = line[8:] + elif line.startswith('deleted file'): + gp.op = 'DELETE' + elif line.startswith('new file mode '): + gp.op = 'ADD' + gp.setmode(int(line[-6:], 8)) + elif line.startswith('new mode '): + gp.setmode(int(line[-6:], 8)) + elif line.startswith('GIT binary patch'): + gp.binary = True + if gp: + gitpatches.append(gp) + + return gitpatches + +class linereader(object): + # simple class to allow pushing lines back into the input stream + def __init__(self, fp, textmode=False): + self.fp = fp + self.buf = [] + self.textmode = textmode + self.eol = None + + def push(self, line): + if line is not None: + self.buf.append(line) + + def readline(self): + if self.buf: + l = self.buf[0] + del self.buf[0] + return l + l = self.fp.readline() + if not self.eol: + if l.endswith('\r\n'): + self.eol = '\r\n' + elif l.endswith('\n'): + self.eol = '\n' + if self.textmode and l.endswith('\r\n'): + l = l[:-2] + '\n' + return l + + def __iter__(self): + while 1: + l = self.readline() + if not l: + break + yield l + +# @@ -start,len +start,len @@ or @@ -start +start @@ if len is 1 +unidesc = re.compile('@@ -(\d+)(,(\d+))? \+(\d+)(,(\d+))? @@') +contextdesc = re.compile('(---|\*\*\*) (\d+)(,(\d+))? (---|\*\*\*)') +eolmodes = ['strict', 'crlf', 'lf', 'auto'] + +class patchfile(object): + def __init__(self, ui, fname, opener, missing=False, eolmode='strict'): + self.fname = fname + self.eolmode = eolmode + self.eol = None + self.opener = opener + self.ui = ui + self.lines = [] + self.exists = False + self.missing = missing + if not missing: + try: + self.lines = self.readlines(fname) + self.exists = True + except IOError: + pass + else: + self.ui.warn(_("unable to find '%s' for patching\n") % self.fname) + + self.hash = {} + self.dirty = 0 + self.offset = 0 + self.skew = 0 + self.rej = [] + self.fileprinted = False + self.printfile(False) + self.hunks = 0 + + def readlines(self, fname): + if os.path.islink(fname): + return [os.readlink(fname)] + fp = self.opener(fname, 'r') + try: + lr = linereader(fp, self.eolmode != 'strict') + lines = list(lr) + self.eol = lr.eol + return lines + finally: + fp.close() + + def writelines(self, fname, lines): + # Ensure supplied data ends in fname, being a regular file or + # a symlink. cmdutil.updatedir will -too magically- take care + # of setting it to the proper type afterwards. + islink = os.path.islink(fname) + if islink: + fp = cStringIO.StringIO() + else: + fp = self.opener(fname, 'w') + try: + if self.eolmode == 'auto': + eol = self.eol + elif self.eolmode == 'crlf': + eol = '\r\n' + else: + eol = '\n' + + if self.eolmode != 'strict' and eol and eol != '\n': + for l in lines: + if l and l[-1] == '\n': + l = l[:-1] + eol + fp.write(l) + else: + fp.writelines(lines) + if islink: + self.opener.symlink(fp.getvalue(), fname) + finally: + fp.close() + + def unlink(self, fname): + os.unlink(fname) + + def printfile(self, warn): + if self.fileprinted: + return + if warn or self.ui.verbose: + self.fileprinted = True + s = _("patching file %s\n") % self.fname + if warn: + self.ui.warn(s) + else: + self.ui.note(s) + + + def findlines(self, l, linenum): + # looks through the hash and finds candidate lines. The + # result is a list of line numbers sorted based on distance + # from linenum + + cand = self.hash.get(l, []) + if len(cand) > 1: + # resort our list of potentials forward then back. + cand.sort(key=lambda x: abs(x - linenum)) + return cand + + def hashlines(self): + self.hash = {} + for x, s in enumerate(self.lines): + self.hash.setdefault(s, []).append(x) + + def makerejlines(self, fname): + base = os.path.basename(fname) + yield "--- %s\n+++ %s\n" % (base, base) + for x in self.rej: + for l in x.hunk: + yield l + if l[-1] != '\n': + yield "\n\ No newline at end of file\n" + + def write_rej(self): + # our rejects are a little different from patch(1). This always + # creates rejects in the same form as the original patch. A file + # header is inserted so that you can run the reject through patch again + # without having to type the filename. + + if not self.rej: + return + + fname = self.fname + ".rej" + self.ui.warn( + _("%d out of %d hunks FAILED -- saving rejects to file %s\n") % + (len(self.rej), self.hunks, fname)) + + fp = self.opener(fname, 'w') + fp.writelines(self.makerejlines(self.fname)) + fp.close() + + def apply(self, h): + if not h.complete(): + raise PatchError(_("bad hunk #%d %s (%d %d %d %d)") % + (h.number, h.desc, len(h.a), h.lena, len(h.b), + h.lenb)) + + self.hunks += 1 + + if self.missing: + self.rej.append(h) + return -1 + + if self.exists and h.createfile(): + self.ui.warn(_("file %s already exists\n") % self.fname) + self.rej.append(h) + return -1 + + if isinstance(h, binhunk): + if h.rmfile(): + self.unlink(self.fname) + else: + self.lines[:] = h.new() + self.offset += len(h.new()) + self.dirty = 1 + return 0 + + horig = h + if (self.eolmode in ('crlf', 'lf') + or self.eolmode == 'auto' and self.eol): + # If new eols are going to be normalized, then normalize + # hunk data before patching. Otherwise, preserve input + # line-endings. + h = h.getnormalized() + + # fast case first, no offsets, no fuzz + old = h.old() + # patch starts counting at 1 unless we are adding the file + if h.starta == 0: + start = 0 + else: + start = h.starta + self.offset - 1 + orig_start = start + # if there's skew we want to emit the "(offset %d lines)" even + # when the hunk cleanly applies at start + skew, so skip the + # fast case code + if self.skew == 0 and diffhelpers.testhunk(old, self.lines, start) == 0: + if h.rmfile(): + self.unlink(self.fname) + else: + self.lines[start : start + h.lena] = h.new() + self.offset += h.lenb - h.lena + self.dirty = 1 + return 0 + + # ok, we couldn't match the hunk. Lets look for offsets and fuzz it + self.hashlines() + if h.hunk[-1][0] != ' ': + # if the hunk tried to put something at the bottom of the file + # override the start line and use eof here + search_start = len(self.lines) + else: + search_start = orig_start + self.skew + + for fuzzlen in xrange(3): + for toponly in [True, False]: + old = h.old(fuzzlen, toponly) + + cand = self.findlines(old[0][1:], search_start) + for l in cand: + if diffhelpers.testhunk(old, self.lines, l) == 0: + newlines = h.new(fuzzlen, toponly) + self.lines[l : l + len(old)] = newlines + self.offset += len(newlines) - len(old) + self.skew = l - orig_start + self.dirty = 1 + offset = l - orig_start - fuzzlen + if fuzzlen: + msg = _("Hunk #%d succeeded at %d " + "with fuzz %d " + "(offset %d lines).\n") + self.printfile(True) + self.ui.warn(msg % + (h.number, l + 1, fuzzlen, offset)) + else: + msg = _("Hunk #%d succeeded at %d " + "(offset %d lines).\n") + self.ui.note(msg % (h.number, l + 1, offset)) + return fuzzlen + self.printfile(True) + self.ui.warn(_("Hunk #%d FAILED at %d\n") % (h.number, orig_start)) + self.rej.append(horig) + return -1 + +class hunk(object): + def __init__(self, desc, num, lr, context, create=False, remove=False): + self.number = num + self.desc = desc + self.hunk = [desc] + self.a = [] + self.b = [] + self.starta = self.lena = None + self.startb = self.lenb = None + if lr is not None: + if context: + self.read_context_hunk(lr) + else: + self.read_unified_hunk(lr) + self.create = create + self.remove = remove and not create + + def getnormalized(self): + """Return a copy with line endings normalized to LF.""" + + def normalize(lines): + nlines = [] + for line in lines: + if line.endswith('\r\n'): + line = line[:-2] + '\n' + nlines.append(line) + return nlines + + # Dummy object, it is rebuilt manually + nh = hunk(self.desc, self.number, None, None, False, False) + nh.number = self.number + nh.desc = self.desc + nh.hunk = self.hunk + nh.a = normalize(self.a) + nh.b = normalize(self.b) + nh.starta = self.starta + nh.startb = self.startb + nh.lena = self.lena + nh.lenb = self.lenb + nh.create = self.create + nh.remove = self.remove + return nh + + def read_unified_hunk(self, lr): + m = unidesc.match(self.desc) + if not m: + raise PatchError(_("bad hunk #%d") % self.number) + self.starta, foo, self.lena, self.startb, foo2, self.lenb = m.groups() + if self.lena is None: + self.lena = 1 + else: + self.lena = int(self.lena) + if self.lenb is None: + self.lenb = 1 + else: + self.lenb = int(self.lenb) + self.starta = int(self.starta) + self.startb = int(self.startb) + diffhelpers.addlines(lr, self.hunk, self.lena, self.lenb, self.a, self.b) + # if we hit eof before finishing out the hunk, the last line will + # be zero length. Lets try to fix it up. + while len(self.hunk[-1]) == 0: + del self.hunk[-1] + del self.a[-1] + del self.b[-1] + self.lena -= 1 + self.lenb -= 1 + + def read_context_hunk(self, lr): + self.desc = lr.readline() + m = contextdesc.match(self.desc) + if not m: + raise PatchError(_("bad hunk #%d") % self.number) + foo, self.starta, foo2, aend, foo3 = m.groups() + self.starta = int(self.starta) + if aend is None: + aend = self.starta + self.lena = int(aend) - self.starta + if self.starta: + self.lena += 1 + for x in xrange(self.lena): + l = lr.readline() + if l.startswith('---'): + # lines addition, old block is empty + lr.push(l) + break + s = l[2:] + if l.startswith('- ') or l.startswith('! '): + u = '-' + s + elif l.startswith(' '): + u = ' ' + s + else: + raise PatchError(_("bad hunk #%d old text line %d") % + (self.number, x)) + self.a.append(u) + self.hunk.append(u) + + l = lr.readline() + if l.startswith('\ '): + s = self.a[-1][:-1] + self.a[-1] = s + self.hunk[-1] = s + l = lr.readline() + m = contextdesc.match(l) + if not m: + raise PatchError(_("bad hunk #%d") % self.number) + foo, self.startb, foo2, bend, foo3 = m.groups() + self.startb = int(self.startb) + if bend is None: + bend = self.startb + self.lenb = int(bend) - self.startb + if self.startb: + self.lenb += 1 + hunki = 1 + for x in xrange(self.lenb): + l = lr.readline() + if l.startswith('\ '): + # XXX: the only way to hit this is with an invalid line range. + # The no-eol marker is not counted in the line range, but I + # guess there are diff(1) out there which behave differently. + s = self.b[-1][:-1] + self.b[-1] = s + self.hunk[hunki - 1] = s + continue + if not l: + # line deletions, new block is empty and we hit EOF + lr.push(l) + break + s = l[2:] + if l.startswith('+ ') or l.startswith('! '): + u = '+' + s + elif l.startswith(' '): + u = ' ' + s + elif len(self.b) == 0: + # line deletions, new block is empty + lr.push(l) + break + else: + raise PatchError(_("bad hunk #%d old text line %d") % + (self.number, x)) + self.b.append(s) + while True: + if hunki >= len(self.hunk): + h = "" + else: + h = self.hunk[hunki] + hunki += 1 + if h == u: + break + elif h.startswith('-'): + continue + else: + self.hunk.insert(hunki - 1, u) + break + + if not self.a: + # this happens when lines were only added to the hunk + for x in self.hunk: + if x.startswith('-') or x.startswith(' '): + self.a.append(x) + if not self.b: + # this happens when lines were only deleted from the hunk + for x in self.hunk: + if x.startswith('+') or x.startswith(' '): + self.b.append(x[1:]) + # @@ -start,len +start,len @@ + self.desc = "@@ -%d,%d +%d,%d @@\n" % (self.starta, self.lena, + self.startb, self.lenb) + self.hunk[0] = self.desc + + def fix_newline(self): + diffhelpers.fix_newline(self.hunk, self.a, self.b) + + def complete(self): + return len(self.a) == self.lena and len(self.b) == self.lenb + + def createfile(self): + return self.starta == 0 and self.lena == 0 and self.create + + def rmfile(self): + return self.startb == 0 and self.lenb == 0 and self.remove + + def fuzzit(self, l, fuzz, toponly): + # this removes context lines from the top and bottom of list 'l'. It + # checks the hunk to make sure only context lines are removed, and then + # returns a new shortened list of lines. + fuzz = min(fuzz, len(l)-1) + if fuzz: + top = 0 + bot = 0 + hlen = len(self.hunk) + for x in xrange(hlen - 1): + # the hunk starts with the @@ line, so use x+1 + if self.hunk[x + 1][0] == ' ': + top += 1 + else: + break + if not toponly: + for x in xrange(hlen - 1): + if self.hunk[hlen - bot - 1][0] == ' ': + bot += 1 + else: + break + + # top and bot now count context in the hunk + # adjust them if either one is short + context = max(top, bot, 3) + if bot < context: + bot = max(0, fuzz - (context - bot)) + else: + bot = min(fuzz, bot) + if top < context: + top = max(0, fuzz - (context - top)) + else: + top = min(fuzz, top) + + return l[top:len(l)-bot] + return l + + def old(self, fuzz=0, toponly=False): + return self.fuzzit(self.a, fuzz, toponly) + + def new(self, fuzz=0, toponly=False): + return self.fuzzit(self.b, fuzz, toponly) + +class binhunk: + 'A binary patch file. Only understands literals so far.' + def __init__(self, gitpatch): + self.gitpatch = gitpatch + self.text = None + self.hunk = ['GIT binary patch\n'] + + def createfile(self): + return self.gitpatch.op in ('ADD', 'RENAME', 'COPY') + + def rmfile(self): + return self.gitpatch.op == 'DELETE' + + def complete(self): + return self.text is not None + + def new(self): + return [self.text] + + def extract(self, lr): + line = lr.readline() + self.hunk.append(line) + while line and not line.startswith('literal '): + line = lr.readline() + self.hunk.append(line) + if not line: + raise PatchError(_('could not extract binary patch')) + size = int(line[8:].rstrip()) + dec = [] + line = lr.readline() + self.hunk.append(line) + while len(line) > 1: + l = line[0] + if l <= 'Z' and l >= 'A': + l = ord(l) - ord('A') + 1 + else: + l = ord(l) - ord('a') + 27 + dec.append(base85.b85decode(line[1:-1])[:l]) + line = lr.readline() + self.hunk.append(line) + text = zlib.decompress(''.join(dec)) + if len(text) != size: + raise PatchError(_('binary patch is %d bytes, not %d') % + len(text), size) + self.text = text + +def parsefilename(str): + # --- filename \t|space stuff + s = str[4:].rstrip('\r\n') + i = s.find('\t') + if i < 0: + i = s.find(' ') + if i < 0: + return s + return s[:i] + +def pathstrip(path, strip): + pathlen = len(path) + i = 0 + if strip == 0: + return '', path.rstrip() + count = strip + while count > 0: + i = path.find('/', i) + if i == -1: + raise PatchError(_("unable to strip away %d of %d dirs from %s") % + (count, strip, path)) + i += 1 + # consume '//' in the path + while i < pathlen - 1 and path[i] == '/': + i += 1 + count -= 1 + return path[:i].lstrip(), path[i:].rstrip() + +def selectfile(afile_orig, bfile_orig, hunk, strip): + nulla = afile_orig == "/dev/null" + nullb = bfile_orig == "/dev/null" + abase, afile = pathstrip(afile_orig, strip) + gooda = not nulla and os.path.lexists(afile) + bbase, bfile = pathstrip(bfile_orig, strip) + if afile == bfile: + goodb = gooda + else: + goodb = not nullb and os.path.lexists(bfile) + createfunc = hunk.createfile + missing = not goodb and not gooda and not createfunc() + + # some diff programs apparently produce patches where the afile is + # not /dev/null, but afile starts with bfile + abasedir = afile[:afile.rfind('/') + 1] + bbasedir = bfile[:bfile.rfind('/') + 1] + if missing and abasedir == bbasedir and afile.startswith(bfile): + # this isn't very pretty + hunk.create = True + if createfunc(): + missing = False + else: + hunk.create = False + + # If afile is "a/b/foo" and bfile is "a/b/foo.orig" we assume the + # diff is between a file and its backup. In this case, the original + # file should be patched (see original mpatch code). + isbackup = (abase == bbase and bfile.startswith(afile)) + fname = None + if not missing: + if gooda and goodb: + fname = isbackup and afile or bfile + elif gooda: + fname = afile + + if not fname: + if not nullb: + fname = isbackup and afile or bfile + elif not nulla: + fname = afile + else: + raise PatchError(_("undefined source and destination files")) + + return fname, missing + +def scangitpatch(lr, firstline): + """ + Git patches can emit: + - rename a to b + - change b + - copy a to c + - change c + + We cannot apply this sequence as-is, the renamed 'a' could not be + found for it would have been renamed already. And we cannot copy + from 'b' instead because 'b' would have been changed already. So + we scan the git patch for copy and rename commands so we can + perform the copies ahead of time. + """ + pos = 0 + try: + pos = lr.fp.tell() + fp = lr.fp + except IOError: + fp = cStringIO.StringIO(lr.fp.read()) + gitlr = linereader(fp, lr.textmode) + gitlr.push(firstline) + gitpatches = readgitpatch(gitlr) + fp.seek(pos) + return gitpatches + +def iterhunks(ui, fp, sourcefile=None): + """Read a patch and yield the following events: + - ("file", afile, bfile, firsthunk): select a new target file. + - ("hunk", hunk): a new hunk is ready to be applied, follows a + "file" event. + - ("git", gitchanges): current diff is in git format, gitchanges + maps filenames to gitpatch records. Unique event. + """ + changed = {} + current_hunk = None + afile = "" + bfile = "" + state = None + hunknum = 0 + emitfile = False + git = False + + # our states + BFILE = 1 + context = None + lr = linereader(fp) + # gitworkdone is True if a git operation (copy, rename, ...) was + # performed already for the current file. Useful when the file + # section may have no hunk. + gitworkdone = False + + while True: + newfile = newgitfile = False + x = lr.readline() + if not x: + break + if current_hunk: + if x.startswith('\ '): + current_hunk.fix_newline() + yield 'hunk', current_hunk + current_hunk = None + if ((sourcefile or state == BFILE) and ((not context and x[0] == '@') or + ((context is not False) and x.startswith('***************')))): + if context is None and x.startswith('***************'): + context = True + gpatch = changed.get(bfile) + create = afile == '/dev/null' or gpatch and gpatch.op == 'ADD' + remove = bfile == '/dev/null' or gpatch and gpatch.op == 'DELETE' + current_hunk = hunk(x, hunknum + 1, lr, context, create, remove) + hunknum += 1 + if emitfile: + emitfile = False + yield 'file', (afile, bfile, current_hunk) + elif state == BFILE and x.startswith('GIT binary patch'): + current_hunk = binhunk(changed[bfile]) + hunknum += 1 + if emitfile: + emitfile = False + yield 'file', ('a/' + afile, 'b/' + bfile, current_hunk) + current_hunk.extract(lr) + elif x.startswith('diff --git'): + # check for git diff, scanning the whole patch file if needed + m = gitre.match(x) + gitworkdone = False + if m: + afile, bfile = m.group(1, 2) + if not git: + git = True + gitpatches = scangitpatch(lr, x) + yield 'git', gitpatches + for gp in gitpatches: + changed[gp.path] = gp + # else error? + # copy/rename + modify should modify target, not source + gp = changed.get(bfile) + if gp and (gp.op in ('COPY', 'DELETE', 'RENAME', 'ADD') + or gp.mode): + afile = bfile + gitworkdone = True + newgitfile = True + elif x.startswith('---'): + # check for a unified diff + l2 = lr.readline() + if not l2.startswith('+++'): + lr.push(l2) + continue + newfile = True + context = False + afile = parsefilename(x) + bfile = parsefilename(l2) + elif x.startswith('***'): + # check for a context diff + l2 = lr.readline() + if not l2.startswith('---'): + lr.push(l2) + continue + l3 = lr.readline() + lr.push(l3) + if not l3.startswith("***************"): + lr.push(l2) + continue + newfile = True + context = True + afile = parsefilename(x) + bfile = parsefilename(l2) + + if newfile: + gitworkdone = False + + if newgitfile or newfile: + emitfile = True + state = BFILE + hunknum = 0 + if current_hunk: + if current_hunk.complete(): + yield 'hunk', current_hunk + else: + raise PatchError(_("malformed patch %s %s") % (afile, + current_hunk.desc)) + +def applydiff(ui, fp, changed, strip=1, sourcefile=None, eolmode='strict'): + """Reads a patch from fp and tries to apply it. + + The dict 'changed' is filled in with all of the filenames changed + by the patch. Returns 0 for a clean patch, -1 if any rejects were + found and 1 if there was any fuzz. + + If 'eolmode' is 'strict', the patch content and patched file are + read in binary mode. Otherwise, line endings are ignored when + patching then normalized according to 'eolmode'. + + Callers probably want to call 'cmdutil.updatedir' after this to + apply certain categories of changes not done by this function. + """ + return _applydiff( + ui, fp, patchfile, copyfile, + changed, strip=strip, sourcefile=sourcefile, eolmode=eolmode) + + +def _applydiff(ui, fp, patcher, copyfn, changed, strip=1, + sourcefile=None, eolmode='strict'): + rejects = 0 + err = 0 + current_file = None + cwd = os.getcwd() + opener = util.opener(cwd) + + def closefile(): + if not current_file: + return 0 + if current_file.dirty: + current_file.writelines(current_file.fname, current_file.lines) + current_file.write_rej() + return len(current_file.rej) + + for state, values in iterhunks(ui, fp, sourcefile): + if state == 'hunk': + if not current_file: + continue + ret = current_file.apply(values) + if ret >= 0: + changed.setdefault(current_file.fname, None) + if ret > 0: + err = 1 + elif state == 'file': + rejects += closefile() + afile, bfile, first_hunk = values + try: + if sourcefile: + current_file = patcher(ui, sourcefile, opener, + eolmode=eolmode) + else: + current_file, missing = selectfile(afile, bfile, + first_hunk, strip) + current_file = patcher(ui, current_file, opener, + missing=missing, eolmode=eolmode) + except PatchError, err: + ui.warn(str(err) + '\n') + current_file = None + rejects += 1 + continue + elif state == 'git': + for gp in values: + gp.path = pathstrip(gp.path, strip - 1)[1] + if gp.oldpath: + gp.oldpath = pathstrip(gp.oldpath, strip - 1)[1] + # Binary patches really overwrite target files, copying them + # will just make it fails with "target file exists" + if gp.op in ('COPY', 'RENAME') and not gp.binary: + copyfn(gp.oldpath, gp.path, cwd) + changed[gp.path] = gp + else: + raise util.Abort(_('unsupported parser state: %s') % state) + + rejects += closefile() + + if rejects: + return -1 + return err + +def externalpatch(patcher, patchname, ui, strip, cwd, files): + """use <patcher> to apply <patchname> to the working directory. + returns whether patch was applied with fuzz factor.""" + + fuzz = False + args = [] + if cwd: + args.append('-d %s' % util.shellquote(cwd)) + fp = util.popen('%s %s -p%d < %s' % (patcher, ' '.join(args), strip, + util.shellquote(patchname))) + + for line in fp: + line = line.rstrip() + ui.note(line + '\n') + if line.startswith('patching file '): + pf = util.parse_patch_output(line) + printed_file = False + files.setdefault(pf, None) + elif line.find('with fuzz') >= 0: + fuzz = True + if not printed_file: + ui.warn(pf + '\n') + printed_file = True + ui.warn(line + '\n') + elif line.find('saving rejects to file') >= 0: + ui.warn(line + '\n') + elif line.find('FAILED') >= 0: + if not printed_file: + ui.warn(pf + '\n') + printed_file = True + ui.warn(line + '\n') + code = fp.close() + if code: + raise PatchError(_("patch command failed: %s") % + util.explain_exit(code)[0]) + return fuzz + +def internalpatch(patchobj, ui, strip, cwd, files=None, eolmode='strict'): + """use builtin patch to apply <patchobj> to the working directory. + returns whether patch was applied with fuzz factor.""" + + if files is None: + files = {} + if eolmode is None: + eolmode = ui.config('patch', 'eol', 'strict') + if eolmode.lower() not in eolmodes: + raise util.Abort(_('unsupported line endings type: %s') % eolmode) + eolmode = eolmode.lower() + + try: + fp = open(patchobj, 'rb') + except TypeError: + fp = patchobj + if cwd: + curdir = os.getcwd() + os.chdir(cwd) + try: + ret = applydiff(ui, fp, files, strip=strip, eolmode=eolmode) + finally: + if cwd: + os.chdir(curdir) + if fp != patchobj: + fp.close() + if ret < 0: + raise PatchError(_('patch failed to apply')) + return ret > 0 + +def patch(patchname, ui, strip=1, cwd=None, files=None, eolmode='strict'): + """Apply <patchname> to the working directory. + + 'eolmode' specifies how end of lines should be handled. It can be: + - 'strict': inputs are read in binary mode, EOLs are preserved + - 'crlf': EOLs are ignored when patching and reset to CRLF + - 'lf': EOLs are ignored when patching and reset to LF + - None: get it from user settings, default to 'strict' + 'eolmode' is ignored when using an external patcher program. + + Returns whether patch was applied with fuzz factor. + """ + patcher = ui.config('ui', 'patch') + if files is None: + files = {} + try: + if patcher: + return externalpatch(patcher, patchname, ui, strip, cwd, files) + return internalpatch(patchname, ui, strip, cwd, files, eolmode) + except PatchError, err: + raise util.Abort(str(err)) + +def b85diff(to, tn): + '''print base85-encoded binary diff''' + def gitindex(text): + if not text: + return hex(nullid) + l = len(text) + s = util.sha1('blob %d\0' % l) + s.update(text) + return s.hexdigest() + + def fmtline(line): + l = len(line) + if l <= 26: + l = chr(ord('A') + l - 1) + else: + l = chr(l - 26 + ord('a') - 1) + return '%c%s\n' % (l, base85.b85encode(line, True)) + + def chunk(text, csize=52): + l = len(text) + i = 0 + while i < l: + yield text[i:i + csize] + i += csize + + tohash = gitindex(to) + tnhash = gitindex(tn) + if tohash == tnhash: + return "" + + # TODO: deltas + ret = ['index %s..%s\nGIT binary patch\nliteral %s\n' % + (tohash, tnhash, len(tn))] + for l in chunk(zlib.compress(tn)): + ret.append(fmtline(l)) + ret.append('\n') + return ''.join(ret) + +class GitDiffRequired(Exception): + pass + +def diffopts(ui, opts=None, untrusted=False): + def get(key, name=None, getter=ui.configbool): + return ((opts and opts.get(key)) or + getter('diff', name or key, None, untrusted=untrusted)) + return mdiff.diffopts( + text=opts and opts.get('text'), + git=get('git'), + nodates=get('nodates'), + showfunc=get('show_function', 'showfunc'), + ignorews=get('ignore_all_space', 'ignorews'), + ignorewsamount=get('ignore_space_change', 'ignorewsamount'), + ignoreblanklines=get('ignore_blank_lines', 'ignoreblanklines'), + context=get('unified', getter=ui.config)) + +def diff(repo, node1=None, node2=None, match=None, changes=None, opts=None, + losedatafn=None, prefix=''): + '''yields diff of changes to files between two nodes, or node and + working directory. + + if node1 is None, use first dirstate parent instead. + if node2 is None, compare node1 with working directory. + + losedatafn(**kwarg) is a callable run when opts.upgrade=True and + every time some change cannot be represented with the current + patch format. Return False to upgrade to git patch format, True to + accept the loss or raise an exception to abort the diff. It is + called with the name of current file being diffed as 'fn'. If set + to None, patches will always be upgraded to git format when + necessary. + + prefix is a filename prefix that is prepended to all filenames on + display (used for subrepos). + ''' + + if opts is None: + opts = mdiff.defaultopts + + if not node1 and not node2: + node1 = repo.dirstate.parents()[0] + + def lrugetfilectx(): + cache = {} + order = [] + def getfilectx(f, ctx): + fctx = ctx.filectx(f, filelog=cache.get(f)) + if f not in cache: + if len(cache) > 20: + del cache[order.pop(0)] + cache[f] = fctx.filelog() + else: + order.remove(f) + order.append(f) + return fctx + return getfilectx + getfilectx = lrugetfilectx() + + ctx1 = repo[node1] + ctx2 = repo[node2] + + if not changes: + changes = repo.status(ctx1, ctx2, match=match) + modified, added, removed = changes[:3] + + if not modified and not added and not removed: + return [] + + revs = None + if not repo.ui.quiet: + hexfunc = repo.ui.debugflag and hex or short + revs = [hexfunc(node) for node in [node1, node2] if node] + + copy = {} + if opts.git or opts.upgrade: + copy = copies.copies(repo, ctx1, ctx2, repo[nullid])[0] + + difffn = lambda opts, losedata: trydiff(repo, revs, ctx1, ctx2, + modified, added, removed, copy, getfilectx, opts, losedata, prefix) + if opts.upgrade and not opts.git: + try: + def losedata(fn): + if not losedatafn or not losedatafn(fn=fn): + raise GitDiffRequired() + # Buffer the whole output until we are sure it can be generated + return list(difffn(opts.copy(git=False), losedata)) + except GitDiffRequired: + return difffn(opts.copy(git=True), None) + else: + return difffn(opts, None) + +def difflabel(func, *args, **kw): + '''yields 2-tuples of (output, label) based on the output of func()''' + prefixes = [('diff', 'diff.diffline'), + ('copy', 'diff.extended'), + ('rename', 'diff.extended'), + ('old', 'diff.extended'), + ('new', 'diff.extended'), + ('deleted', 'diff.extended'), + ('---', 'diff.file_a'), + ('+++', 'diff.file_b'), + ('@@', 'diff.hunk'), + ('-', 'diff.deleted'), + ('+', 'diff.inserted')] + + for chunk in func(*args, **kw): + lines = chunk.split('\n') + for i, line in enumerate(lines): + if i != 0: + yield ('\n', '') + stripline = line + if line and line[0] in '+-': + # highlight trailing whitespace, but only in changed lines + stripline = line.rstrip() + for prefix, label in prefixes: + if stripline.startswith(prefix): + yield (stripline, label) + break + else: + yield (line, '') + if line != stripline: + yield (line[len(stripline):], 'diff.trailingwhitespace') + +def diffui(*args, **kw): + '''like diff(), but yields 2-tuples of (output, label) for ui.write()''' + return difflabel(diff, *args, **kw) + + +def _addmodehdr(header, omode, nmode): + if omode != nmode: + header.append('old mode %s\n' % omode) + header.append('new mode %s\n' % nmode) + +def trydiff(repo, revs, ctx1, ctx2, modified, added, removed, + copy, getfilectx, opts, losedatafn, prefix): + + def join(f): + return os.path.join(prefix, f) + + date1 = util.datestr(ctx1.date()) + man1 = ctx1.manifest() + + gone = set() + gitmode = {'l': '120000', 'x': '100755', '': '100644'} + + copyto = dict([(v, k) for k, v in copy.items()]) + + if opts.git: + revs = None + + for f in sorted(modified + added + removed): + to = None + tn = None + dodiff = True + header = [] + if f in man1: + to = getfilectx(f, ctx1).data() + if f not in removed: + tn = getfilectx(f, ctx2).data() + a, b = f, f + if opts.git or losedatafn: + if f in added: + mode = gitmode[ctx2.flags(f)] + if f in copy or f in copyto: + if opts.git: + if f in copy: + a = copy[f] + else: + a = copyto[f] + omode = gitmode[man1.flags(a)] + _addmodehdr(header, omode, mode) + if a in removed and a not in gone: + op = 'rename' + gone.add(a) + else: + op = 'copy' + header.append('%s from %s\n' % (op, join(a))) + header.append('%s to %s\n' % (op, join(f))) + to = getfilectx(a, ctx1).data() + else: + losedatafn(f) + else: + if opts.git: + header.append('new file mode %s\n' % mode) + elif ctx2.flags(f): + losedatafn(f) + # In theory, if tn was copied or renamed we should check + # if the source is binary too but the copy record already + # forces git mode. + if util.binary(tn): + if opts.git: + dodiff = 'binary' + else: + losedatafn(f) + if not opts.git and not tn: + # regular diffs cannot represent new empty file + losedatafn(f) + elif f in removed: + if opts.git: + # have we already reported a copy above? + if ((f in copy and copy[f] in added + and copyto[copy[f]] == f) or + (f in copyto and copyto[f] in added + and copy[copyto[f]] == f)): + dodiff = False + else: + header.append('deleted file mode %s\n' % + gitmode[man1.flags(f)]) + elif not to or util.binary(to): + # regular diffs cannot represent empty file deletion + losedatafn(f) + else: + oflag = man1.flags(f) + nflag = ctx2.flags(f) + binary = util.binary(to) or util.binary(tn) + if opts.git: + _addmodehdr(header, gitmode[oflag], gitmode[nflag]) + if binary: + dodiff = 'binary' + elif binary or nflag != oflag: + losedatafn(f) + if opts.git: + header.insert(0, mdiff.diffline(revs, join(a), join(b), opts)) + + if dodiff: + if dodiff == 'binary': + text = b85diff(to, tn) + else: + text = mdiff.unidiff(to, date1, + # ctx2 date may be dynamic + tn, util.datestr(ctx2.date()), + join(a), join(b), revs, opts=opts) + if header and (text or len(header) > 1): + yield ''.join(header) + if text: + yield text + +def diffstatdata(lines): + filename, adds, removes = None, 0, 0 + for line in lines: + if line.startswith('diff'): + if filename: + isbinary = adds == 0 and removes == 0 + yield (filename, adds, removes, isbinary) + # set numbers to 0 anyway when starting new file + adds, removes = 0, 0 + if line.startswith('diff --git'): + filename = gitre.search(line).group(1) + else: + # format: "diff -r ... -r ... filename" + filename = line.split(None, 5)[-1] + elif line.startswith('+') and not line.startswith('+++'): + adds += 1 + elif line.startswith('-') and not line.startswith('---'): + removes += 1 + if filename: + isbinary = adds == 0 and removes == 0 + yield (filename, adds, removes, isbinary) + +def diffstat(lines, width=80, git=False): + output = [] + stats = list(diffstatdata(lines)) + + maxtotal, maxname = 0, 0 + totaladds, totalremoves = 0, 0 + hasbinary = False + + sized = [(filename, adds, removes, isbinary, encoding.colwidth(filename)) + for filename, adds, removes, isbinary in stats] + + for filename, adds, removes, isbinary, namewidth in sized: + totaladds += adds + totalremoves += removes + maxname = max(maxname, namewidth) + maxtotal = max(maxtotal, adds + removes) + if isbinary: + hasbinary = True + + countwidth = len(str(maxtotal)) + if hasbinary and countwidth < 3: + countwidth = 3 + graphwidth = width - countwidth - maxname - 6 + if graphwidth < 10: + graphwidth = 10 + + def scale(i): + if maxtotal <= graphwidth: + return i + # If diffstat runs out of room it doesn't print anything, + # which isn't very useful, so always print at least one + or - + # if there were at least some changes. + return max(i * graphwidth // maxtotal, int(bool(i))) + + for filename, adds, removes, isbinary, namewidth in sized: + if git and isbinary: + count = 'Bin' + else: + count = adds + removes + pluses = '+' * scale(adds) + minuses = '-' * scale(removes) + output.append(' %s%s | %*s %s%s\n' % + (filename, ' ' * (maxname - namewidth), + countwidth, count, + pluses, minuses)) + + if stats: + output.append(_(' %d files changed, %d insertions(+), %d deletions(-)\n') + % (len(stats), totaladds, totalremoves)) + + return ''.join(output) + +def diffstatui(*args, **kw): + '''like diffstat(), but yields 2-tuples of (output, label) for + ui.write() + ''' + + for line in diffstat(*args, **kw).splitlines(): + if line and line[-1] in '+-': + name, graph = line.rsplit(' ', 1) + yield (name + ' ', '') + m = re.search(r'\++', graph) + if m: + yield (m.group(0), 'diffstat.inserted') + m = re.search(r'-+', graph) + if m: + yield (m.group(0), 'diffstat.deleted') + else: + yield (line, '') + yield ('\n', '') diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/patch.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/patch.pyo Binary files differnew file mode 100644 index 0000000..404d3fa --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/patch.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/posix.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/posix.py new file mode 100644 index 0000000..39ea40a --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/posix.py @@ -0,0 +1,297 @@ +# posix.py - Posix utility function implementations for Mercurial +# +# Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +from i18n import _ +import osutil +import os, sys, errno, stat, getpass, pwd, grp + +posixfile = open +nulldev = '/dev/null' +normpath = os.path.normpath +samestat = os.path.samestat +rename = os.rename +expandglobs = False + +umask = os.umask(0) +os.umask(umask) + +def openhardlinks(): + '''return true if it is safe to hold open file handles to hardlinks''' + return True + +def rcfiles(path): + rcs = [os.path.join(path, 'hgrc')] + rcdir = os.path.join(path, 'hgrc.d') + try: + rcs.extend([os.path.join(rcdir, f) + for f, kind in osutil.listdir(rcdir) + if f.endswith(".rc")]) + except OSError: + pass + return rcs + +def system_rcpath(): + path = [] + # old mod_python does not set sys.argv + if len(getattr(sys, 'argv', [])) > 0: + path.extend(rcfiles(os.path.dirname(sys.argv[0]) + + '/../etc/mercurial')) + path.extend(rcfiles('/etc/mercurial')) + return path + +def user_rcpath(): + return [os.path.expanduser('~/.hgrc')] + +def parse_patch_output(output_line): + """parses the output produced by patch and returns the filename""" + pf = output_line[14:] + if os.sys.platform == 'OpenVMS': + if pf[0] == '`': + pf = pf[1:-1] # Remove the quotes + else: + if pf.startswith("'") and pf.endswith("'") and " " in pf: + pf = pf[1:-1] # Remove the quotes + return pf + +def sshargs(sshcmd, host, user, port): + '''Build argument list for ssh''' + args = user and ("%s@%s" % (user, host)) or host + return port and ("%s -p %s" % (args, port)) or args + +def is_exec(f): + """check whether a file is executable""" + return (os.lstat(f).st_mode & 0100 != 0) + +def set_flags(f, l, x): + s = os.lstat(f).st_mode + if l: + if not stat.S_ISLNK(s): + # switch file to link + data = open(f).read() + os.unlink(f) + try: + os.symlink(data, f) + except: + # failed to make a link, rewrite file + open(f, "w").write(data) + # no chmod needed at this point + return + if stat.S_ISLNK(s): + # switch link to file + data = os.readlink(f) + os.unlink(f) + open(f, "w").write(data) + s = 0666 & ~umask # avoid restatting for chmod + + sx = s & 0100 + if x and not sx: + # Turn on +x for every +r bit when making a file executable + # and obey umask. + os.chmod(f, s | (s & 0444) >> 2 & ~umask) + elif not x and sx: + # Turn off all +x bits + os.chmod(f, s & 0666) + +def set_binary(fd): + pass + +def pconvert(path): + return path + +def localpath(path): + return path + +def samefile(fpath1, fpath2): + """Returns whether path1 and path2 refer to the same file. This is only + guaranteed to work for files, not directories.""" + return os.path.samefile(fpath1, fpath2) + +def samedevice(fpath1, fpath2): + """Returns whether fpath1 and fpath2 are on the same device. This is only + guaranteed to work for files, not directories.""" + st1 = os.lstat(fpath1) + st2 = os.lstat(fpath2) + return st1.st_dev == st2.st_dev + +if sys.platform == 'darwin': + import fcntl # only needed on darwin, missing on jython + def realpath(path): + ''' + Returns the true, canonical file system path equivalent to the given + path. + + Equivalent means, in this case, resulting in the same, unique + file system link to the path. Every file system entry, whether a file, + directory, hard link or symbolic link or special, will have a single + path preferred by the system, but may allow multiple, differing path + lookups to point to it. + + Most regular UNIX file systems only allow a file system entry to be + looked up by its distinct path. Obviously, this does not apply to case + insensitive file systems, whether case preserving or not. The most + complex issue to deal with is file systems transparently reencoding the + path, such as the non-standard Unicode normalisation required for HFS+ + and HFSX. + ''' + # Constants copied from /usr/include/sys/fcntl.h + F_GETPATH = 50 + O_SYMLINK = 0x200000 + + try: + fd = os.open(path, O_SYMLINK) + except OSError, err: + if err.errno == errno.ENOENT: + return path + raise + + try: + return fcntl.fcntl(fd, F_GETPATH, '\0' * 1024).rstrip('\0') + finally: + os.close(fd) +else: + # Fallback to the likely inadequate Python builtin function. + realpath = os.path.realpath + +def shellquote(s): + if os.sys.platform == 'OpenVMS': + return '"%s"' % s + else: + return "'%s'" % s.replace("'", "'\\''") + +def quotecommand(cmd): + return cmd + +def popen(command, mode='r'): + return os.popen(command, mode) + +def testpid(pid): + '''return False if pid dead, True if running or not sure''' + if os.sys.platform == 'OpenVMS': + return True + try: + os.kill(pid, 0) + return True + except OSError, inst: + return inst.errno != errno.ESRCH + +def explain_exit(code): + """return a 2-tuple (desc, code) describing a subprocess status + (codes from kill are negative - not os.system/wait encoding)""" + if code >= 0: + return _("exited with status %d") % code, code + return _("killed by signal %d") % -code, -code + +def isowner(st): + """Return True if the stat object st is from the current user.""" + return st.st_uid == os.getuid() + +def find_exe(command): + '''Find executable for command searching like which does. + If command is a basename then PATH is searched for command. + PATH isn't searched if command is an absolute or relative path. + If command isn't found None is returned.''' + if sys.platform == 'OpenVMS': + return command + + def findexisting(executable): + 'Will return executable if existing file' + if os.path.exists(executable): + return executable + return None + + if os.sep in command: + return findexisting(command) + + for path in os.environ.get('PATH', '').split(os.pathsep): + executable = findexisting(os.path.join(path, command)) + if executable is not None: + return executable + return None + +def set_signal_handler(): + pass + +def statfiles(files): + 'Stat each file in files and yield stat or None if file does not exist.' + lstat = os.lstat + for nf in files: + try: + st = lstat(nf) + except OSError, err: + if err.errno not in (errno.ENOENT, errno.ENOTDIR): + raise + st = None + yield st + +def getuser(): + '''return name of current user''' + return getpass.getuser() + +def expand_glob(pats): + '''On Windows, expand the implicit globs in a list of patterns''' + return list(pats) + +def username(uid=None): + """Return the name of the user with the given uid. + + If uid is None, return the name of the current user.""" + + if uid is None: + uid = os.getuid() + try: + return pwd.getpwuid(uid)[0] + except KeyError: + return str(uid) + +def groupname(gid=None): + """Return the name of the group with the given gid. + + If gid is None, return the name of the current group.""" + + if gid is None: + gid = os.getgid() + try: + return grp.getgrgid(gid)[0] + except KeyError: + return str(gid) + +def groupmembers(name): + """Return the list of members of the group with the given + name, KeyError if the group does not exist. + """ + return list(grp.getgrnam(name).gr_mem) + +def spawndetached(args): + return os.spawnvp(os.P_NOWAIT | getattr(os, 'P_DETACH', 0), + args[0], args) + +def gethgcmd(): + return sys.argv[:1] + +def termwidth(): + try: + import termios, array, fcntl + for dev in (sys.stderr, sys.stdout, sys.stdin): + try: + try: + fd = dev.fileno() + except AttributeError: + continue + if not os.isatty(fd): + continue + arri = fcntl.ioctl(fd, termios.TIOCGWINSZ, '\0' * 8) + return array.array('h', arri)[1] + except ValueError: + pass + except IOError, e: + if e[0] == errno.EINVAL: + pass + else: + raise + except ImportError: + pass + return 80 diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/posix.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/posix.pyo Binary files differnew file mode 100644 index 0000000..4aea4bf --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/posix.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/pushkey.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/pushkey.py new file mode 100644 index 0000000..d7868e6 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/pushkey.py @@ -0,0 +1,31 @@ +# pushkey.py - dispatching for pushing and pulling keys +# +# Copyright 2010 Matt Mackall <mpm@selenic.com> +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +def _nslist(repo): + n = {} + for k in _namespaces: + n[k] = "" + return n + +_namespaces = {"namespaces": (lambda *x: False, _nslist)} + +def register(namespace, pushkey, listkeys): + _namespaces[namespace] = (pushkey, listkeys) + +def _get(namespace): + return _namespaces.get(namespace, (lambda *x: False, lambda *x: {})) + +def push(repo, namespace, key, old, new): + '''should succeed iff value was old''' + pk = _get(namespace)[0] + return pk(repo, key, old, new) + +def list(repo, namespace): + '''return a dict''' + lk = _get(namespace)[1] + return lk(repo) + diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/pushkey.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/pushkey.pyo Binary files differnew file mode 100644 index 0000000..9d4d579 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/pushkey.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/py3kcompat.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/py3kcompat.py new file mode 100644 index 0000000..8843e9e --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/py3kcompat.py @@ -0,0 +1,72 @@ +# py3kcompat.py - compatibility definitions for running hg in py3k +# +# Copyright 2010 Renato Cunha <renatoc@gmail.com> +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +import os, builtins + +from numbers import Number + +def bytesformatter(format, args): + '''Custom implementation of a formatter for bytestrings. + + This function currently relias on the string formatter to do the + formatting and always returns bytes objects. + + >>> bytesformatter(20, 10) + 0 + >>> bytesformatter('unicode %s, %s!', ('string', 'foo')) + b'unicode string, foo!' + >>> bytesformatter(b'test %s', 'me') + b'test me' + >>> bytesformatter('test %s', 'me') + b'test me' + >>> bytesformatter(b'test %s', b'me') + b'test me' + >>> bytesformatter('test %s', b'me') + b'test me' + >>> bytesformatter('test %d: %s', (1, b'result')) + b'test 1: result' + ''' + # The current implementation just converts from bytes to unicode, do + # what's needed and then convert the results back to bytes. + # Another alternative is to use the Python C API implementation. + if isinstance(format, Number): + # If the fixer erroneously passes a number remainder operation to + # bytesformatter, we just return the correct operation + return format % args + if isinstance(format, bytes): + format = format.decode('utf-8', 'surrogateescape') + if isinstance(args, bytes): + args = args.decode('utf-8', 'surrogateescape') + if isinstance(args, tuple): + newargs = [] + for arg in args: + if isinstance(arg, bytes): + arg = arg.decode('utf-8', 'surrogateescape') + newargs.append(arg) + args = tuple(newargs) + ret = format % args + return ret.encode('utf-8', 'surrogateescape') +builtins.bytesformatter = bytesformatter + +# Create bytes equivalents for os.environ values +for key in list(os.environ.keys()): + # UTF-8 is fine for us + bkey = key.encode('utf-8', 'surrogateescape') + bvalue = os.environ[key].encode('utf-8', 'surrogateescape') + os.environ[bkey] = bvalue + +origord = builtins.ord +def fakeord(char): + if isinstance(char, int): + return char + return origord(char) +builtins.ord = fakeord + +if __name__ == '__main__': + import doctest + doctest.testmod() + diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/py3kcompat.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/py3kcompat.pyo Binary files differnew file mode 100644 index 0000000..625e824 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/py3kcompat.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/repair.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/repair.py new file mode 100644 index 0000000..2685d34 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/repair.py @@ -0,0 +1,167 @@ +# repair.py - functions for repository repair for mercurial +# +# Copyright 2005, 2006 Chris Mason <mason@suse.com> +# Copyright 2007 Matt Mackall +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +import changegroup +from node import nullrev, short +from i18n import _ +import os + +def _bundle(repo, bases, heads, node, suffix, extranodes=None, compress=True): + """create a bundle with the specified revisions as a backup""" + cg = repo.changegroupsubset(bases, heads, 'strip', extranodes) + backupdir = repo.join("strip-backup") + if not os.path.isdir(backupdir): + os.mkdir(backupdir) + name = os.path.join(backupdir, "%s-%s.hg" % (short(node), suffix)) + if compress: + bundletype = "HG10BZ" + else: + bundletype = "HG10UN" + return changegroup.writebundle(cg, name, bundletype) + +def _collectfiles(repo, striprev): + """find out the filelogs affected by the strip""" + files = set() + + for x in xrange(striprev, len(repo)): + files.update(repo[x].files()) + + return sorted(files) + +def _collectextranodes(repo, files, link): + """return the nodes that have to be saved before the strip""" + def collectone(cl, revlog): + extra = [] + startrev = count = len(revlog) + # find the truncation point of the revlog + for i in xrange(count): + lrev = revlog.linkrev(i) + if lrev >= link: + startrev = i + 1 + break + + # see if any revision after that point has a linkrev less than link + # (we have to manually save these guys) + for i in xrange(startrev, count): + node = revlog.node(i) + lrev = revlog.linkrev(i) + if lrev < link: + extra.append((node, cl.node(lrev))) + + return extra + + extranodes = {} + cl = repo.changelog + extra = collectone(cl, repo.manifest) + if extra: + extranodes[1] = extra + for fname in files: + f = repo.file(fname) + extra = collectone(cl, f) + if extra: + extranodes[fname] = extra + + return extranodes + +def strip(ui, repo, node, backup="all"): + cl = repo.changelog + # TODO delete the undo files, and handle undo of merge sets + striprev = cl.rev(node) + + keeppartialbundle = backup == 'strip' + + # Some revisions with rev > striprev may not be descendants of striprev. + # We have to find these revisions and put them in a bundle, so that + # we can restore them after the truncations. + # To create the bundle we use repo.changegroupsubset which requires + # the list of heads and bases of the set of interesting revisions. + # (head = revision in the set that has no descendant in the set; + # base = revision in the set that has no ancestor in the set) + tostrip = set((striprev,)) + saveheads = set() + savebases = [] + for r in xrange(striprev + 1, len(cl)): + parents = cl.parentrevs(r) + if parents[0] in tostrip or parents[1] in tostrip: + # r is a descendant of striprev + tostrip.add(r) + # if this is a merge and one of the parents does not descend + # from striprev, mark that parent as a savehead. + if parents[1] != nullrev: + for p in parents: + if p not in tostrip and p > striprev: + saveheads.add(p) + else: + # if no parents of this revision will be stripped, mark it as + # a savebase + if parents[0] < striprev and parents[1] < striprev: + savebases.append(cl.node(r)) + + saveheads.difference_update(parents) + saveheads.add(r) + + saveheads = [cl.node(r) for r in saveheads] + files = _collectfiles(repo, striprev) + + extranodes = _collectextranodes(repo, files, striprev) + + # create a changegroup for all the branches we need to keep + backupfile = None + if backup == "all": + backupfile = _bundle(repo, [node], cl.heads(), node, 'backup') + repo.ui.status(_("saved backup bundle to %s\n") % backupfile) + if saveheads or extranodes: + # do not compress partial bundle if we remove it from disk later + chgrpfile = _bundle(repo, savebases, saveheads, node, 'temp', + extranodes=extranodes, compress=keeppartialbundle) + + mfst = repo.manifest + + tr = repo.transaction("strip") + offset = len(tr.entries) + + try: + tr.startgroup() + cl.strip(striprev, tr) + mfst.strip(striprev, tr) + for fn in files: + repo.file(fn).strip(striprev, tr) + tr.endgroup() + + try: + for i in xrange(offset, len(tr.entries)): + file, troffset, ignore = tr.entries[i] + repo.sopener(file, 'a').truncate(troffset) + tr.close() + except: + tr.abort() + raise + + if saveheads or extranodes: + ui.note(_("adding branch\n")) + f = open(chgrpfile, "rb") + gen = changegroup.readbundle(f, chgrpfile) + if not repo.ui.verbose: + # silence internal shuffling chatter + repo.ui.pushbuffer() + repo.addchangegroup(gen, 'strip', 'bundle:' + chgrpfile, True) + if not repo.ui.verbose: + repo.ui.popbuffer() + f.close() + if not keeppartialbundle: + os.unlink(chgrpfile) + except: + if backupfile: + ui.warn(_("strip failed, full bundle stored in '%s'\n") + % backupfile) + elif saveheads: + ui.warn(_("strip failed, partial bundle stored in '%s'\n") + % chgrpfile) + raise + + repo.destroyed() diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/repair.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/repair.pyo Binary files differnew file mode 100644 index 0000000..0d61074 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/repair.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/repo.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/repo.py new file mode 100644 index 0000000..ac5510f --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/repo.py @@ -0,0 +1,37 @@ +# repo.py - repository base classes for mercurial +# +# Copyright 2005, 2006 Matt Mackall <mpm@selenic.com> +# Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com> +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +from i18n import _ +import error + +class repository(object): + def capable(self, name): + '''tell whether repo supports named capability. + return False if not supported. + if boolean capability, return True. + if string capability, return string.''' + if name in self.capabilities: + return True + name_eq = name + '=' + for cap in self.capabilities: + if cap.startswith(name_eq): + return cap[len(name_eq):] + return False + + def requirecap(self, name, purpose): + '''raise an exception if the given capability is not present''' + if not self.capable(name): + raise error.CapabilityError( + _('cannot %s; remote repository does not ' + 'support the %r capability') % (purpose, name)) + + def local(self): + return False + + def cancopy(self): + return self.local() diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/repo.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/repo.pyo Binary files differnew file mode 100644 index 0000000..0cdc8cb --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/repo.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/revlog.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/revlog.py new file mode 100644 index 0000000..de94492 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/revlog.py @@ -0,0 +1,1477 @@ +# revlog.py - storage back-end for mercurial +# +# Copyright 2005-2007 Matt Mackall <mpm@selenic.com> +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +"""Storage back-end for Mercurial. + +This provides efficient delta storage with O(1) retrieve and append +and O(changes) merge between branches. +""" + +# import stuff from node for others to import from revlog +from node import bin, hex, nullid, nullrev, short #@UnusedImport +from i18n import _ +import changegroup, ancestor, mdiff, parsers, error, util +import struct, zlib, errno + +_pack = struct.pack +_unpack = struct.unpack +_compress = zlib.compress +_decompress = zlib.decompress +_sha = util.sha1 + +# revlog header flags +REVLOGV0 = 0 +REVLOGNG = 1 +REVLOGNGINLINEDATA = (1 << 16) +REVLOGSHALLOW = (1 << 17) +REVLOG_DEFAULT_FLAGS = REVLOGNGINLINEDATA +REVLOG_DEFAULT_FORMAT = REVLOGNG +REVLOG_DEFAULT_VERSION = REVLOG_DEFAULT_FORMAT | REVLOG_DEFAULT_FLAGS +REVLOGNG_FLAGS = REVLOGNGINLINEDATA | REVLOGSHALLOW + +# revlog index flags +REVIDX_PARENTDELTA = 1 +REVIDX_PUNCHED_FLAG = 2 +REVIDX_KNOWN_FLAGS = REVIDX_PUNCHED_FLAG | REVIDX_PARENTDELTA + +# amount of data read unconditionally, should be >= 4 +# when not inline: threshold for using lazy index +_prereadsize = 1048576 +# max size of revlog with inline data +_maxinline = 131072 + +RevlogError = error.RevlogError +LookupError = error.LookupError + +def getoffset(q): + return int(q >> 16) + +def gettype(q): + return int(q & 0xFFFF) + +def offset_type(offset, type): + return long(long(offset) << 16 | type) + +nullhash = _sha(nullid) + +def hash(text, p1, p2): + """generate a hash from the given text and its parent hashes + + This hash combines both the current file contents and its history + in a manner that makes it easy to distinguish nodes with the same + content in the revision graph. + """ + # As of now, if one of the parent node is null, p2 is null + if p2 == nullid: + # deep copy of a hash is faster than creating one + s = nullhash.copy() + s.update(p1) + else: + # none of the parent nodes are nullid + l = [p1, p2] + l.sort() + s = _sha(l[0]) + s.update(l[1]) + s.update(text) + return s.digest() + +def compress(text): + """ generate a possibly-compressed representation of text """ + if not text: + return ("", text) + l = len(text) + bin = None + if l < 44: + pass + elif l > 1000000: + # zlib makes an internal copy, thus doubling memory usage for + # large files, so lets do this in pieces + z = zlib.compressobj() + p = [] + pos = 0 + while pos < l: + pos2 = pos + 2**20 + p.append(z.compress(text[pos:pos2])) + pos = pos2 + p.append(z.flush()) + if sum(map(len, p)) < l: + bin = "".join(p) + else: + bin = _compress(text) + if bin is None or len(bin) > l: + if text[0] == '\0': + return ("", text) + return ('u', text) + return ("", bin) + +def decompress(bin): + """ decompress the given input """ + if not bin: + return bin + t = bin[0] + if t == '\0': + return bin + if t == 'x': + return _decompress(bin) + if t == 'u': + return bin[1:] + raise RevlogError(_("unknown compression type %r") % t) + +class lazyparser(object): + """ + this class avoids the need to parse the entirety of large indices + """ + + # lazyparser is not safe to use on windows if win32 extensions not + # available. it keeps file handle open, which make it not possible + # to break hardlinks on local cloned repos. + + def __init__(self, dataf): + try: + size = util.fstat(dataf).st_size + except AttributeError: + size = 0 + self.dataf = dataf + self.s = struct.calcsize(indexformatng) + self.datasize = size + self.l = size // self.s + self.index = [None] * self.l + self.map = {nullid: nullrev} + self.allmap = 0 + self.all = 0 + self.mapfind_count = 0 + + def loadmap(self): + """ + during a commit, we need to make sure the rev being added is + not a duplicate. This requires loading the entire index, + which is fairly slow. loadmap can load up just the node map, + which takes much less time. + """ + if self.allmap: + return + end = self.datasize + self.allmap = 1 + cur = 0 + count = 0 + blocksize = self.s * 256 + self.dataf.seek(0) + while cur < end: + data = self.dataf.read(blocksize) + off = 0 + for x in xrange(256): + n = data[off + ngshaoffset:off + ngshaoffset + 20] + self.map[n] = count + count += 1 + if count >= self.l: + break + off += self.s + cur += blocksize + + def loadblock(self, blockstart, blocksize, data=None): + if self.all: + return + if data is None: + self.dataf.seek(blockstart) + if blockstart + blocksize > self.datasize: + # the revlog may have grown since we've started running, + # but we don't have space in self.index for more entries. + # limit blocksize so that we don't get too much data. + blocksize = max(self.datasize - blockstart, 0) + data = self.dataf.read(blocksize) + lend = len(data) // self.s + i = blockstart // self.s + off = 0 + # lazyindex supports __delitem__ + if lend > len(self.index) - i: + lend = len(self.index) - i + for x in xrange(lend): + if self.index[i + x] is None: + b = data[off : off + self.s] + self.index[i + x] = b + n = b[ngshaoffset:ngshaoffset + 20] + self.map[n] = i + x + off += self.s + + def findnode(self, node): + """search backwards through the index file for a specific node""" + if self.allmap: + return None + + # hg log will cause many many searches for the manifest + # nodes. After we get called a few times, just load the whole + # thing. + if self.mapfind_count > 8: + self.loadmap() + if node in self.map: + return node + return None + self.mapfind_count += 1 + last = self.l - 1 + while self.index[last] != None: + if last == 0: + self.all = 1 + self.allmap = 1 + return None + last -= 1 + end = (last + 1) * self.s + blocksize = self.s * 256 + while end >= 0: + start = max(end - blocksize, 0) + self.dataf.seek(start) + data = self.dataf.read(end - start) + findend = end - start + while True: + # we're searching backwards, so we have to make sure + # we don't find a changeset where this node is a parent + off = data.find(node, 0, findend) + findend = off + if off >= 0: + i = off / self.s + off = i * self.s + n = data[off + ngshaoffset:off + ngshaoffset + 20] + if n == node: + self.map[n] = i + start / self.s + return node + else: + break + end -= blocksize + return None + + def loadindex(self, i=None, end=None): + if self.all: + return + all = False + if i is None: + blockstart = 0 + blocksize = (65536 / self.s) * self.s + end = self.datasize + all = True + else: + if end: + blockstart = i * self.s + end = end * self.s + blocksize = end - blockstart + else: + blockstart = (i & ~1023) * self.s + blocksize = self.s * 1024 + end = blockstart + blocksize + while blockstart < end: + self.loadblock(blockstart, blocksize) + blockstart += blocksize + if all: + self.all = True + +class lazyindex(object): + """a lazy version of the index array""" + def __init__(self, parser): + self.p = parser + def __len__(self): + return len(self.p.index) + def load(self, pos): + if pos < 0: + pos += len(self.p.index) + self.p.loadindex(pos) + return self.p.index[pos] + def __getitem__(self, pos): + return _unpack(indexformatng, self.p.index[pos] or self.load(pos)) + def __setitem__(self, pos, item): + self.p.index[pos] = _pack(indexformatng, *item) + def __delitem__(self, pos): + del self.p.index[pos] + def insert(self, pos, e): + self.p.index.insert(pos, _pack(indexformatng, *e)) + def append(self, e): + self.p.index.append(_pack(indexformatng, *e)) + +class lazymap(object): + """a lazy version of the node map""" + def __init__(self, parser): + self.p = parser + def load(self, key): + n = self.p.findnode(key) + if n is None: + raise KeyError(key) + def __contains__(self, key): + if key in self.p.map: + return True + self.p.loadmap() + return key in self.p.map + def __iter__(self): + yield nullid + for i, ret in enumerate(self.p.index): + if not ret: + self.p.loadindex(i) + ret = self.p.index[i] + if isinstance(ret, str): + ret = _unpack(indexformatng, ret) + yield ret[7] + def __getitem__(self, key): + try: + return self.p.map[key] + except KeyError: + try: + self.load(key) + return self.p.map[key] + except KeyError: + raise KeyError("node " + hex(key)) + def __setitem__(self, key, val): + self.p.map[key] = val + def __delitem__(self, key): + del self.p.map[key] + +indexformatv0 = ">4l20s20s20s" +v0shaoffset = 56 + +class revlogoldio(object): + def __init__(self): + self.size = struct.calcsize(indexformatv0) + + def parseindex(self, fp, data, inline): + s = self.size + index = [] + nodemap = {nullid: nullrev} + n = off = 0 + if len(data) == _prereadsize: + data += fp.read() # read the rest + l = len(data) + while off + s <= l: + cur = data[off:off + s] + off += s + e = _unpack(indexformatv0, cur) + # transform to revlogv1 format + e2 = (offset_type(e[0], 0), e[1], -1, e[2], e[3], + nodemap.get(e[4], nullrev), nodemap.get(e[5], nullrev), e[6]) + index.append(e2) + nodemap[e[6]] = n + n += 1 + + return index, nodemap, None + + def packentry(self, entry, node, version, rev): + if gettype(entry[0]): + raise RevlogError(_("index entry flags need RevlogNG")) + e2 = (getoffset(entry[0]), entry[1], entry[3], entry[4], + node(entry[5]), node(entry[6]), entry[7]) + return _pack(indexformatv0, *e2) + +# index ng: +# 6 bytes: offset +# 2 bytes: flags +# 4 bytes: compressed length +# 4 bytes: uncompressed length +# 4 bytes: base rev +# 4 bytes: link rev +# 4 bytes: parent 1 rev +# 4 bytes: parent 2 rev +# 32 bytes: nodeid +indexformatng = ">Qiiiiii20s12x" +ngshaoffset = 32 +versionformat = ">I" + +class revlogio(object): + def __init__(self): + self.size = struct.calcsize(indexformatng) + + def parseindex(self, fp, data, inline): + if len(data) == _prereadsize: + if util.openhardlinks() and not inline: + # big index, let's parse it on demand + parser = lazyparser(fp) + index = lazyindex(parser) + nodemap = lazymap(parser) + e = list(index[0]) + type = gettype(e[0]) + e[0] = offset_type(0, type) + index[0] = e + return index, nodemap, None + else: + data += fp.read() + + # call the C implementation to parse the index data + index, nodemap, cache = parsers.parse_index(data, inline) + return index, nodemap, cache + + def packentry(self, entry, node, version, rev): + p = _pack(indexformatng, *entry) + if rev == 0: + p = _pack(versionformat, version) + p[4:] + return p + +class revlog(object): + """ + the underlying revision storage object + + A revlog consists of two parts, an index and the revision data. + + The index is a file with a fixed record size containing + information on each revision, including its nodeid (hash), the + nodeids of its parents, the position and offset of its data within + the data file, and the revision it's based on. Finally, each entry + contains a linkrev entry that can serve as a pointer to external + data. + + The revision data itself is a linear collection of data chunks. + Each chunk represents a revision and is usually represented as a + delta against the previous chunk. To bound lookup time, runs of + deltas are limited to about 2 times the length of the original + version data. This makes retrieval of a version proportional to + its size, or O(1) relative to the number of revisions. + + Both pieces of the revlog are written to in an append-only + fashion, which means we never need to rewrite a file to insert or + remove data, and can use some simple techniques to avoid the need + for locking while reading. + """ + def __init__(self, opener, indexfile, shallowroot=None): + """ + create a revlog object + + opener is a function that abstracts the file opening operation + and can be used to implement COW semantics or the like. + """ + self.indexfile = indexfile + self.datafile = indexfile[:-2] + ".d" + self.opener = opener + self._cache = None + self._chunkcache = (0, '') + self.nodemap = {nullid: nullrev} + self.index = [] + self._shallowroot = shallowroot + self._parentdelta = 0 + + v = REVLOG_DEFAULT_VERSION + if hasattr(opener, 'options') and 'defversion' in opener.options: + v = opener.options['defversion'] + if v & REVLOGNG: + v |= REVLOGNGINLINEDATA + if v & REVLOGNG and 'parentdelta' in opener.options: + self._parentdelta = 1 + + if shallowroot: + v |= REVLOGSHALLOW + + i = '' + try: + f = self.opener(self.indexfile) + if "nonlazy" in getattr(self.opener, 'options', {}): + i = f.read() + else: + i = f.read(_prereadsize) + if len(i) > 0: + v = struct.unpack(versionformat, i[:4])[0] + except IOError, inst: + if inst.errno != errno.ENOENT: + raise + + self.version = v + self._inline = v & REVLOGNGINLINEDATA + self._shallow = v & REVLOGSHALLOW + flags = v & ~0xFFFF + fmt = v & 0xFFFF + if fmt == REVLOGV0 and flags: + raise RevlogError(_("index %s unknown flags %#04x for format v0") + % (self.indexfile, flags >> 16)) + elif fmt == REVLOGNG and flags & ~REVLOGNG_FLAGS: + raise RevlogError(_("index %s unknown flags %#04x for revlogng") + % (self.indexfile, flags >> 16)) + elif fmt > REVLOGNG: + raise RevlogError(_("index %s unknown format %d") + % (self.indexfile, fmt)) + + self._io = revlogio() + if self.version == REVLOGV0: + self._io = revlogoldio() + if i: + try: + d = self._io.parseindex(f, i, self._inline) + except (ValueError, IndexError): + raise RevlogError(_("index %s is corrupted") % (self.indexfile)) + self.index, self.nodemap, self._chunkcache = d + if not self._chunkcache: + self._chunkclear() + + # add the magic null revision at -1 (if it hasn't been done already) + if (self.index == [] or isinstance(self.index, lazyindex) or + self.index[-1][7] != nullid) : + self.index.append((0, 0, 0, -1, -1, -1, -1, nullid)) + + def _loadindex(self, start, end): + """load a block of indexes all at once from the lazy parser""" + if isinstance(self.index, lazyindex): + self.index.p.loadindex(start, end) + + def _loadindexmap(self): + """loads both the map and the index from the lazy parser""" + if isinstance(self.index, lazyindex): + p = self.index.p + p.loadindex() + self.nodemap = p.map + + def _loadmap(self): + """loads the map from the lazy parser""" + if isinstance(self.nodemap, lazymap): + self.nodemap.p.loadmap() + self.nodemap = self.nodemap.p.map + + def tip(self): + return self.node(len(self.index) - 2) + def __len__(self): + return len(self.index) - 1 + def __iter__(self): + for i in xrange(len(self)): + yield i + def rev(self, node): + try: + return self.nodemap[node] + except KeyError: + raise LookupError(node, self.indexfile, _('no node')) + def node(self, rev): + return self.index[rev][7] + def linkrev(self, rev): + return self.index[rev][4] + def parents(self, node): + i = self.index + d = i[self.rev(node)] + return i[d[5]][7], i[d[6]][7] # map revisions to nodes inline + def parentrevs(self, rev): + return self.index[rev][5:7] + def start(self, rev): + return int(self.index[rev][0] >> 16) + def end(self, rev): + return self.start(rev) + self.length(rev) + def length(self, rev): + return self.index[rev][1] + def base(self, rev): + return self.index[rev][3] + def flags(self, rev): + return self.index[rev][0] & 0xFFFF + def rawsize(self, rev): + """return the length of the uncompressed text for a given revision""" + l = self.index[rev][2] + if l >= 0: + return l + + t = self.revision(self.node(rev)) + return len(t) + size = rawsize + + def reachable(self, node, stop=None): + """return the set of all nodes ancestral to a given node, including + the node itself, stopping when stop is matched""" + reachable = set((node,)) + visit = [node] + if stop: + stopn = self.rev(stop) + else: + stopn = 0 + while visit: + n = visit.pop(0) + if n == stop: + continue + if n == nullid: + continue + for p in self.parents(n): + if self.rev(p) < stopn: + continue + if p not in reachable: + reachable.add(p) + visit.append(p) + return reachable + + def ancestors(self, *revs): + """Generate the ancestors of 'revs' in reverse topological order. + + Yield a sequence of revision numbers starting with the parents + of each revision in revs, i.e., each revision is *not* considered + an ancestor of itself. Results are in breadth-first order: + parents of each rev in revs, then parents of those, etc. Result + does not include the null revision.""" + visit = list(revs) + seen = set([nullrev]) + while visit: + for parent in self.parentrevs(visit.pop(0)): + if parent not in seen: + visit.append(parent) + seen.add(parent) + yield parent + + def descendants(self, *revs): + """Generate the descendants of 'revs' in revision order. + + Yield a sequence of revision numbers starting with a child of + some rev in revs, i.e., each revision is *not* considered a + descendant of itself. Results are ordered by revision number (a + topological sort).""" + first = min(revs) + if first == nullrev: + for i in self: + yield i + return + + seen = set(revs) + for i in xrange(first + 1, len(self)): + for x in self.parentrevs(i): + if x != nullrev and x in seen: + seen.add(i) + yield i + break + + def findmissing(self, common=None, heads=None): + """Return the ancestors of heads that are not ancestors of common. + + More specifically, return a list of nodes N such that every N + satisfies the following constraints: + + 1. N is an ancestor of some node in 'heads' + 2. N is not an ancestor of any node in 'common' + + The list is sorted by revision number, meaning it is + topologically sorted. + + 'heads' and 'common' are both lists of node IDs. If heads is + not supplied, uses all of the revlog's heads. If common is not + supplied, uses nullid.""" + if common is None: + common = [nullid] + if heads is None: + heads = self.heads() + + common = [self.rev(n) for n in common] + heads = [self.rev(n) for n in heads] + + # we want the ancestors, but inclusive + has = set(self.ancestors(*common)) + has.add(nullrev) + has.update(common) + + # take all ancestors from heads that aren't in has + missing = set() + visit = [r for r in heads if r not in has] + while visit: + r = visit.pop(0) + if r in missing: + continue + else: + missing.add(r) + for p in self.parentrevs(r): + if p not in has: + visit.append(p) + missing = list(missing) + missing.sort() + return [self.node(r) for r in missing] + + def nodesbetween(self, roots=None, heads=None): + """Return a topological path from 'roots' to 'heads'. + + Return a tuple (nodes, outroots, outheads) where 'nodes' is a + topologically sorted list of all nodes N that satisfy both of + these constraints: + + 1. N is a descendant of some node in 'roots' + 2. N is an ancestor of some node in 'heads' + + Every node is considered to be both a descendant and an ancestor + of itself, so every reachable node in 'roots' and 'heads' will be + included in 'nodes'. + + 'outroots' is the list of reachable nodes in 'roots', i.e., the + subset of 'roots' that is returned in 'nodes'. Likewise, + 'outheads' is the subset of 'heads' that is also in 'nodes'. + + 'roots' and 'heads' are both lists of node IDs. If 'roots' is + unspecified, uses nullid as the only root. If 'heads' is + unspecified, uses list of all of the revlog's heads.""" + nonodes = ([], [], []) + if roots is not None: + roots = list(roots) + if not roots: + return nonodes + lowestrev = min([self.rev(n) for n in roots]) + else: + roots = [nullid] # Everybody's a descendent of nullid + lowestrev = nullrev + if (lowestrev == nullrev) and (heads is None): + # We want _all_ the nodes! + return ([self.node(r) for r in self], [nullid], list(self.heads())) + if heads is None: + # All nodes are ancestors, so the latest ancestor is the last + # node. + highestrev = len(self) - 1 + # Set ancestors to None to signal that every node is an ancestor. + ancestors = None + # Set heads to an empty dictionary for later discovery of heads + heads = {} + else: + heads = list(heads) + if not heads: + return nonodes + ancestors = set() + # Turn heads into a dictionary so we can remove 'fake' heads. + # Also, later we will be using it to filter out the heads we can't + # find from roots. + heads = dict.fromkeys(heads, 0) + # Start at the top and keep marking parents until we're done. + nodestotag = set(heads) + # Remember where the top was so we can use it as a limit later. + highestrev = max([self.rev(n) for n in nodestotag]) + while nodestotag: + # grab a node to tag + n = nodestotag.pop() + # Never tag nullid + if n == nullid: + continue + # A node's revision number represents its place in a + # topologically sorted list of nodes. + r = self.rev(n) + if r >= lowestrev: + if n not in ancestors: + # If we are possibly a descendent of one of the roots + # and we haven't already been marked as an ancestor + ancestors.add(n) # Mark as ancestor + # Add non-nullid parents to list of nodes to tag. + nodestotag.update([p for p in self.parents(n) if + p != nullid]) + elif n in heads: # We've seen it before, is it a fake head? + # So it is, real heads should not be the ancestors of + # any other heads. + heads.pop(n) + if not ancestors: + return nonodes + # Now that we have our set of ancestors, we want to remove any + # roots that are not ancestors. + + # If one of the roots was nullid, everything is included anyway. + if lowestrev > nullrev: + # But, since we weren't, let's recompute the lowest rev to not + # include roots that aren't ancestors. + + # Filter out roots that aren't ancestors of heads + roots = [n for n in roots if n in ancestors] + # Recompute the lowest revision + if roots: + lowestrev = min([self.rev(n) for n in roots]) + else: + # No more roots? Return empty list + return nonodes + else: + # We are descending from nullid, and don't need to care about + # any other roots. + lowestrev = nullrev + roots = [nullid] + # Transform our roots list into a set. + descendents = set(roots) + # Also, keep the original roots so we can filter out roots that aren't + # 'real' roots (i.e. are descended from other roots). + roots = descendents.copy() + # Our topologically sorted list of output nodes. + orderedout = [] + # Don't start at nullid since we don't want nullid in our output list, + # and if nullid shows up in descedents, empty parents will look like + # they're descendents. + for r in xrange(max(lowestrev, 0), highestrev + 1): + n = self.node(r) + isdescendent = False + if lowestrev == nullrev: # Everybody is a descendent of nullid + isdescendent = True + elif n in descendents: + # n is already a descendent + isdescendent = True + # This check only needs to be done here because all the roots + # will start being marked is descendents before the loop. + if n in roots: + # If n was a root, check if it's a 'real' root. + p = tuple(self.parents(n)) + # If any of its parents are descendents, it's not a root. + if (p[0] in descendents) or (p[1] in descendents): + roots.remove(n) + else: + p = tuple(self.parents(n)) + # A node is a descendent if either of its parents are + # descendents. (We seeded the dependents list with the roots + # up there, remember?) + if (p[0] in descendents) or (p[1] in descendents): + descendents.add(n) + isdescendent = True + if isdescendent and ((ancestors is None) or (n in ancestors)): + # Only include nodes that are both descendents and ancestors. + orderedout.append(n) + if (ancestors is not None) and (n in heads): + # We're trying to figure out which heads are reachable + # from roots. + # Mark this head as having been reached + heads[n] = 1 + elif ancestors is None: + # Otherwise, we're trying to discover the heads. + # Assume this is a head because if it isn't, the next step + # will eventually remove it. + heads[n] = 1 + # But, obviously its parents aren't. + for p in self.parents(n): + heads.pop(p, None) + heads = [n for n in heads.iterkeys() if heads[n] != 0] + roots = list(roots) + assert orderedout + assert roots + assert heads + return (orderedout, roots, heads) + + def heads(self, start=None, stop=None): + """return the list of all nodes that have no children + + if start is specified, only heads that are descendants of + start will be returned + if stop is specified, it will consider all the revs from stop + as if they had no children + """ + if start is None and stop is None: + count = len(self) + if not count: + return [nullid] + ishead = [1] * (count + 1) + index = self.index + for r in xrange(count): + e = index[r] + ishead[e[5]] = ishead[e[6]] = 0 + return [self.node(r) for r in xrange(count) if ishead[r]] + + if start is None: + start = nullid + if stop is None: + stop = [] + stoprevs = set([self.rev(n) for n in stop]) + startrev = self.rev(start) + reachable = set((startrev,)) + heads = set((startrev,)) + + parentrevs = self.parentrevs + for r in xrange(startrev + 1, len(self)): + for p in parentrevs(r): + if p in reachable: + if r not in stoprevs: + reachable.add(r) + heads.add(r) + if p in heads and p not in stoprevs: + heads.remove(p) + + return [self.node(r) for r in heads] + + def children(self, node): + """find the children of a given node""" + c = [] + p = self.rev(node) + for r in range(p + 1, len(self)): + prevs = [pr for pr in self.parentrevs(r) if pr != nullrev] + if prevs: + for pr in prevs: + if pr == p: + c.append(self.node(r)) + elif p == nullrev: + c.append(self.node(r)) + return c + + def descendant(self, start, end): + if start == nullrev: + return True + for i in self.descendants(start): + if i == end: + return True + elif i > end: + break + return False + + def ancestor(self, a, b): + """calculate the least common ancestor of nodes a and b""" + + # fast path, check if it is a descendant + a, b = self.rev(a), self.rev(b) + start, end = sorted((a, b)) + if self.descendant(start, end): + return self.node(start) + + def parents(rev): + return [p for p in self.parentrevs(rev) if p != nullrev] + + c = ancestor.ancestor(a, b, parents) + if c is None: + return nullid + + return self.node(c) + + def _match(self, id): + if isinstance(id, (long, int)): + # rev + return self.node(id) + if len(id) == 20: + # possibly a binary node + # odds of a binary node being all hex in ASCII are 1 in 10**25 + try: + node = id + self.rev(node) # quick search the index + return node + except LookupError: + pass # may be partial hex id + try: + # str(rev) + rev = int(id) + if str(rev) != id: + raise ValueError + if rev < 0: + rev = len(self) + rev + if rev < 0 or rev >= len(self): + raise ValueError + return self.node(rev) + except (ValueError, OverflowError): + pass + if len(id) == 40: + try: + # a full hex nodeid? + node = bin(id) + self.rev(node) + return node + except (TypeError, LookupError): + pass + + def _partialmatch(self, id): + if len(id) < 40: + try: + # hex(node)[:...] + l = len(id) // 2 # grab an even number of digits + bin_id = bin(id[:l * 2]) + nl = [n for n in self.nodemap if n[:l] == bin_id] + nl = [n for n in nl if hex(n).startswith(id)] + if len(nl) > 0: + if len(nl) == 1: + return nl[0] + raise LookupError(id, self.indexfile, + _('ambiguous identifier')) + return None + except TypeError: + pass + + def lookup(self, id): + """locate a node based on: + - revision number or str(revision number) + - nodeid or subset of hex nodeid + """ + n = self._match(id) + if n is not None: + return n + n = self._partialmatch(id) + if n: + return n + + raise LookupError(id, self.indexfile, _('no match found')) + + def cmp(self, node, text): + """compare text with a given file revision + + returns True if text is different than what is stored. + """ + p1, p2 = self.parents(node) + return hash(text, p1, p2) != node + + def _addchunk(self, offset, data): + o, d = self._chunkcache + # try to add to existing cache + if o + len(d) == offset and len(d) + len(data) < _prereadsize: + self._chunkcache = o, d + data + else: + self._chunkcache = offset, data + + def _loadchunk(self, offset, length): + if self._inline: + df = self.opener(self.indexfile) + else: + df = self.opener(self.datafile) + + readahead = max(65536, length) + df.seek(offset) + d = df.read(readahead) + self._addchunk(offset, d) + if readahead > length: + return d[:length] + return d + + def _getchunk(self, offset, length): + o, d = self._chunkcache + l = len(d) + + # is it in the cache? + cachestart = offset - o + cacheend = cachestart + length + if cachestart >= 0 and cacheend <= l: + if cachestart == 0 and cacheend == l: + return d # avoid a copy + return d[cachestart:cacheend] + + return self._loadchunk(offset, length) + + def _chunkraw(self, startrev, endrev): + start = self.start(startrev) + length = self.end(endrev) - start + if self._inline: + start += (startrev + 1) * self._io.size + return self._getchunk(start, length) + + def _chunk(self, rev): + return decompress(self._chunkraw(rev, rev)) + + def _chunkclear(self): + self._chunkcache = (0, '') + + def deltaparent(self, rev): + """return previous revision or parentrev according to flags""" + if self.flags(rev) & REVIDX_PARENTDELTA: + return self.parentrevs(rev)[0] + else: + return rev - 1 + + def revdiff(self, rev1, rev2): + """return or calculate a delta between two revisions""" + if self.base(rev2) != rev2 and self.deltaparent(rev2) == rev1: + return self._chunk(rev2) + + return mdiff.textdiff(self.revision(self.node(rev1)), + self.revision(self.node(rev2))) + + def revision(self, node): + """return an uncompressed revision of a given node""" + cachedrev = None + if node == nullid: + return "" + if self._cache: + if self._cache[0] == node: + return self._cache[2] + cachedrev = self._cache[1] + + # look up what we need to read + text = None + rev = self.rev(node) + base = self.base(rev) + + # check rev flags + if self.flags(rev) & ~REVIDX_KNOWN_FLAGS: + raise RevlogError(_('incompatible revision flag %x') % + (self.flags(rev) & ~REVIDX_KNOWN_FLAGS)) + + # build delta chain + self._loadindex(base, rev + 1) + chain = [] + index = self.index # for performance + iterrev = rev + e = index[iterrev] + while iterrev != base and iterrev != cachedrev: + chain.append(iterrev) + if e[0] & REVIDX_PARENTDELTA: + iterrev = e[5] + else: + iterrev -= 1 + e = index[iterrev] + chain.reverse() + base = iterrev + + if iterrev == cachedrev: + # cache hit + text = self._cache[2] + + # drop cache to save memory + self._cache = None + + self._chunkraw(base, rev) + if text is None: + text = self._chunk(base) + + bins = [self._chunk(r) for r in chain] + text = mdiff.patches(text, bins) + p1, p2 = self.parents(node) + if (node != hash(text, p1, p2) and + not (self.flags(rev) & REVIDX_PUNCHED_FLAG)): + raise RevlogError(_("integrity check failed on %s:%d") + % (self.indexfile, rev)) + + self._cache = (node, rev, text) + return text + + def checkinlinesize(self, tr, fp=None): + if not self._inline or (self.start(-2) + self.length(-2)) < _maxinline: + return + + trinfo = tr.find(self.indexfile) + if trinfo is None: + raise RevlogError(_("%s not found in the transaction") + % self.indexfile) + + trindex = trinfo[2] + dataoff = self.start(trindex) + + tr.add(self.datafile, dataoff) + + if fp: + fp.flush() + fp.close() + + df = self.opener(self.datafile, 'w') + try: + for r in self: + df.write(self._chunkraw(r, r)) + finally: + df.close() + + fp = self.opener(self.indexfile, 'w', atomictemp=True) + self.version &= ~(REVLOGNGINLINEDATA) + self._inline = False + for i in self: + e = self._io.packentry(self.index[i], self.node, self.version, i) + fp.write(e) + + # if we don't call rename, the temp file will never replace the + # real index + fp.rename() + + tr.replace(self.indexfile, trindex * self._io.size) + self._chunkclear() + + def addrevision(self, text, transaction, link, p1, p2, cachedelta=None): + """add a revision to the log + + text - the revision data to add + transaction - the transaction object used for rollback + link - the linkrev data to add + p1, p2 - the parent nodeids of the revision + cachedelta - an optional precomputed delta + """ + node = hash(text, p1, p2) + if (node in self.nodemap and + (not self.flags(self.rev(node)) & REVIDX_PUNCHED_FLAG)): + return node + + dfh = None + if not self._inline: + dfh = self.opener(self.datafile, "a") + ifh = self.opener(self.indexfile, "a+") + try: + return self._addrevision(node, text, transaction, link, p1, p2, + cachedelta, ifh, dfh) + finally: + if dfh: + dfh.close() + ifh.close() + + def _addrevision(self, node, text, transaction, link, p1, p2, + cachedelta, ifh, dfh): + + btext = [text] + def buildtext(): + if btext[0] is not None: + return btext[0] + # flush any pending writes here so we can read it in revision + if dfh: + dfh.flush() + ifh.flush() + basetext = self.revision(self.node(cachedelta[0])) + btext[0] = mdiff.patch(basetext, cachedelta[1]) + chk = hash(btext[0], p1, p2) + if chk != node: + raise RevlogError(_("consistency error in delta")) + return btext[0] + + def builddelta(rev): + # can we use the cached delta? + if cachedelta and cachedelta[0] == rev: + delta = cachedelta[1] + else: + t = buildtext() + ptext = self.revision(self.node(rev)) + delta = mdiff.textdiff(ptext, t) + data = compress(delta) + l = len(data[1]) + len(data[0]) + base = self.base(rev) + dist = l + offset - self.start(base) + return dist, l, data, base + + curr = len(self) + prev = curr - 1 + base = curr + offset = self.end(prev) + flags = 0 + d = None + p1r, p2r = self.rev(p1), self.rev(p2) + + # should we try to build a delta? + if prev != nullrev: + d = builddelta(prev) + if self._parentdelta and prev != p1r: + d2 = builddelta(p1r) + if d2 < d: + d = d2 + flags = REVIDX_PARENTDELTA + dist, l, data, base = d + + # full versions are inserted when the needed deltas + # become comparable to the uncompressed text + # or the base revision is punched + if text is None: + textlen = mdiff.patchedsize(self.rawsize(cachedelta[0]), + cachedelta[1]) + else: + textlen = len(text) + if (d is None or dist > textlen * 2 or + (self.flags(base) & REVIDX_PUNCHED_FLAG)): + text = buildtext() + data = compress(text) + l = len(data[1]) + len(data[0]) + base = curr + + e = (offset_type(offset, flags), l, textlen, + base, link, p1r, p2r, node) + self.index.insert(-1, e) + self.nodemap[node] = curr + + entry = self._io.packentry(e, self.node, self.version, curr) + if not self._inline: + transaction.add(self.datafile, offset) + transaction.add(self.indexfile, curr * len(entry)) + if data[0]: + dfh.write(data[0]) + dfh.write(data[1]) + dfh.flush() + ifh.write(entry) + else: + offset += curr * self._io.size + transaction.add(self.indexfile, offset, curr) + ifh.write(entry) + ifh.write(data[0]) + ifh.write(data[1]) + self.checkinlinesize(transaction, ifh) + + if type(text) == str: # only accept immutable objects + self._cache = (node, curr, text) + return node + + def group(self, nodelist, lookup, infocollect=None, fullrev=False): + """Calculate a delta group, yielding a sequence of changegroup chunks + (strings). + + Given a list of changeset revs, return a set of deltas and + metadata corresponding to nodes. The first delta is + first parent(nodelist[0]) -> nodelist[0], the receiver is + guaranteed to have this parent as it has all history before + these changesets. In the case firstparent is nullrev the + changegroup starts with a full revision. + fullrev forces the insertion of the full revision, necessary + in the case of shallow clones where the first parent might + not exist at the reciever. + """ + + revs = [self.rev(n) for n in nodelist] + + # if we don't have any revisions touched by these changesets, bail + if not revs: + yield changegroup.closechunk() + return + + # add the parent of the first rev + p = self.parentrevs(revs[0])[0] + revs.insert(0, p) + if p == nullrev: + fullrev = True + + # build deltas + for d in xrange(len(revs) - 1): + a, b = revs[d], revs[d + 1] + nb = self.node(b) + + if infocollect is not None: + infocollect(nb) + + p = self.parents(nb) + meta = nb + p[0] + p[1] + lookup(nb) + if fullrev: + d = self.revision(nb) + meta += mdiff.trivialdiffheader(len(d)) + fullrev = False + else: + d = self.revdiff(a, b) + yield changegroup.chunkheader(len(meta) + len(d)) + yield meta + yield d + + yield changegroup.closechunk() + + def addgroup(self, bundle, linkmapper, transaction): + """ + add a delta group + + given a set of deltas, add them to the revision log. the + first delta is against its parent, which should be in our + log, the rest are against the previous delta. + """ + + # track the base of the current delta log + node = None + + r = len(self) + end = 0 + if r: + end = self.end(r - 1) + ifh = self.opener(self.indexfile, "a+") + isize = r * self._io.size + if self._inline: + transaction.add(self.indexfile, end + isize, r) + dfh = None + else: + transaction.add(self.indexfile, isize, r) + transaction.add(self.datafile, end) + dfh = self.opener(self.datafile, "a") + + try: + # loop through our set of deltas + chain = None + while 1: + chunkdata = bundle.parsechunk() + if not chunkdata: + break + node = chunkdata['node'] + p1 = chunkdata['p1'] + p2 = chunkdata['p2'] + cs = chunkdata['cs'] + delta = chunkdata['data'] + + link = linkmapper(cs) + if (node in self.nodemap and + (not self.flags(self.rev(node)) & REVIDX_PUNCHED_FLAG)): + # this can happen if two branches make the same change + chain = node + continue + + for p in (p1, p2): + if not p in self.nodemap: + if self._shallow: + # add null entries for missing parents + # XXX FIXME + #if base == nullrev: + # base = len(self) + #e = (offset_type(end, REVIDX_PUNCHED_FLAG), + # 0, 0, base, nullrev, nullrev, nullrev, p) + #self.index.insert(-1, e) + #self.nodemap[p] = r + #entry = self._io.packentry(e, self.node, + # self.version, r) + #ifh.write(entry) + #t, r = r, r + 1 + raise LookupError(p, self.indexfile, + _('unknown parent')) + else: + raise LookupError(p, self.indexfile, + _('unknown parent')) + + if not chain: + # retrieve the parent revision of the delta chain + chain = p1 + if not chain in self.nodemap: + raise LookupError(chain, self.indexfile, _('unknown base')) + + chainrev = self.rev(chain) + chain = self._addrevision(node, None, transaction, link, + p1, p2, (chainrev, delta), ifh, dfh) + if not dfh and not self._inline: + # addrevision switched from inline to conventional + # reopen the index + dfh = self.opener(self.datafile, "a") + ifh = self.opener(self.indexfile, "a") + finally: + if dfh: + dfh.close() + ifh.close() + + return node + + def strip(self, minlink, transaction): + """truncate the revlog on the first revision with a linkrev >= minlink + + This function is called when we're stripping revision minlink and + its descendants from the repository. + + We have to remove all revisions with linkrev >= minlink, because + the equivalent changelog revisions will be renumbered after the + strip. + + So we truncate the revlog on the first of these revisions, and + trust that the caller has saved the revisions that shouldn't be + removed and that it'll readd them after this truncation. + """ + if len(self) == 0: + return + + if isinstance(self.index, lazyindex): + self._loadindexmap() + + for rev in self: + if self.index[rev][4] >= minlink: + break + else: + return + + # first truncate the files on disk + end = self.start(rev) + if not self._inline: + transaction.add(self.datafile, end) + end = rev * self._io.size + else: + end += rev * self._io.size + + transaction.add(self.indexfile, end) + + # then reset internal state in memory to forget those revisions + self._cache = None + self._chunkclear() + for x in xrange(rev, len(self)): + del self.nodemap[self.node(x)] + + del self.index[rev:-1] + + def checksize(self): + expected = 0 + if len(self): + expected = max(0, self.end(len(self) - 1)) + + try: + f = self.opener(self.datafile) + f.seek(0, 2) + actual = f.tell() + dd = actual - expected + except IOError, inst: + if inst.errno != errno.ENOENT: + raise + dd = 0 + + try: + f = self.opener(self.indexfile) + f.seek(0, 2) + actual = f.tell() + s = self._io.size + i = max(0, actual // s) + di = actual - (i * s) + if self._inline: + databytes = 0 + for r in self: + databytes += max(0, self.length(r)) + dd = 0 + di = actual - len(self) * s - databytes + except IOError, inst: + if inst.errno != errno.ENOENT: + raise + di = 0 + + return (dd, di) + + def files(self): + res = [self.indexfile] + if not self._inline: + res.append(self.datafile) + return res diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/revlog.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/revlog.pyo Binary files differnew file mode 100644 index 0000000..4df1c76 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/revlog.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/revset.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/revset.py new file mode 100644 index 0000000..7adc768 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/revset.py @@ -0,0 +1,797 @@ +# revset.py - revision set queries for mercurial +# +# Copyright 2010 Matt Mackall <mpm@selenic.com> +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +import re +import parser, util, error, discovery +import match as matchmod +from i18n import _, gettext + +elements = { + "(": (20, ("group", 1, ")"), ("func", 1, ")")), + "-": (5, ("negate", 19), ("minus", 5)), + "::": (17, ("dagrangepre", 17), ("dagrange", 17), + ("dagrangepost", 17)), + "..": (17, ("dagrangepre", 17), ("dagrange", 17), + ("dagrangepost", 17)), + ":": (15, ("rangepre", 15), ("range", 15), ("rangepost", 15)), + "not": (10, ("not", 10)), + "!": (10, ("not", 10)), + "and": (5, None, ("and", 5)), + "&": (5, None, ("and", 5)), + "or": (4, None, ("or", 4)), + "|": (4, None, ("or", 4)), + "+": (4, None, ("or", 4)), + ",": (2, None, ("list", 2)), + ")": (0, None, None), + "symbol": (0, ("symbol",), None), + "string": (0, ("string",), None), + "end": (0, None, None), +} + +keywords = set(['and', 'or', 'not']) + +def tokenize(program): + pos, l = 0, len(program) + while pos < l: + c = program[pos] + if c.isspace(): # skip inter-token whitespace + pass + elif c == ':' and program[pos:pos + 2] == '::': # look ahead carefully + yield ('::', None, pos) + pos += 1 # skip ahead + elif c == '.' and program[pos:pos + 2] == '..': # look ahead carefully + yield ('..', None, pos) + pos += 1 # skip ahead + elif c in "():,-|&+!": # handle simple operators + yield (c, None, pos) + elif (c in '"\'' or c == 'r' and + program[pos:pos + 2] in ("r'", 'r"')): # handle quoted strings + if c == 'r': + pos += 1 + c = program[pos] + decode = lambda x: x + else: + decode = lambda x: x.decode('string-escape') + pos += 1 + s = pos + while pos < l: # find closing quote + d = program[pos] + if d == '\\': # skip over escaped characters + pos += 2 + continue + if d == c: + yield ('string', decode(program[s:pos]), s) + break + pos += 1 + else: + raise error.ParseError(_("unterminated string"), s) + elif c.isalnum() or c in '._' or ord(c) > 127: # gather up a symbol/keyword + s = pos + pos += 1 + while pos < l: # find end of symbol + d = program[pos] + if not (d.isalnum() or d in "._" or ord(d) > 127): + break + if d == '.' and program[pos - 1] == '.': # special case for .. + pos -= 1 + break + pos += 1 + sym = program[s:pos] + if sym in keywords: # operator keywords + yield (sym, None, s) + else: + yield ('symbol', sym, s) + pos -= 1 + else: + raise error.ParseError(_("syntax error"), pos) + pos += 1 + yield ('end', None, pos) + +# helpers + +def getstring(x, err): + if x and (x[0] == 'string' or x[0] == 'symbol'): + return x[1] + raise error.ParseError(err) + +def getlist(x): + if not x: + return [] + if x[0] == 'list': + return getlist(x[1]) + [x[2]] + return [x] + +def getargs(x, min, max, err): + l = getlist(x) + if len(l) < min or len(l) > max: + raise error.ParseError(err) + return l + +def getset(repo, subset, x): + if not x: + raise error.ParseError(_("missing argument")) + return methods[x[0]](repo, subset, *x[1:]) + +# operator methods + +def stringset(repo, subset, x): + x = repo[x].rev() + if x == -1 and len(subset) == len(repo): + return [-1] + if x in subset: + return [x] + return [] + +def symbolset(repo, subset, x): + if x in symbols: + raise error.ParseError(_("can't use %s here") % x) + return stringset(repo, subset, x) + +def rangeset(repo, subset, x, y): + m = getset(repo, subset, x) + if not m: + m = getset(repo, range(len(repo)), x) + + n = getset(repo, subset, y) + if not n: + n = getset(repo, range(len(repo)), y) + + if not m or not n: + return [] + m, n = m[0], n[-1] + + if m < n: + r = range(m, n + 1) + else: + r = range(m, n - 1, -1) + s = set(subset) + return [x for x in r if x in s] + +def andset(repo, subset, x, y): + return getset(repo, getset(repo, subset, x), y) + +def orset(repo, subset, x, y): + s = set(getset(repo, subset, x)) + s |= set(getset(repo, [r for r in subset if r not in s], y)) + return [r for r in subset if r in s] + +def notset(repo, subset, x): + s = set(getset(repo, subset, x)) + return [r for r in subset if r not in s] + +def listset(repo, subset, a, b): + raise error.ParseError(_("can't use a list in this context")) + +def func(repo, subset, a, b): + if a[0] == 'symbol' and a[1] in symbols: + return symbols[a[1]](repo, subset, b) + raise error.ParseError(_("not a function: %s") % a[1]) + +# functions + +def node(repo, subset, x): + """``id(string)`` + Revision non-ambiguously specified by the given hex string prefix. + """ + # i18n: "id" is a keyword + l = getargs(x, 1, 1, _("id requires one argument")) + # i18n: "id" is a keyword + n = getstring(l[0], _("id requires a string")) + if len(n) == 40: + rn = repo[n].rev() + else: + rn = repo.changelog.rev(repo.changelog._partialmatch(n)) + return [r for r in subset if r == rn] + +def rev(repo, subset, x): + """``rev(number)`` + Revision with the given numeric identifier. + """ + # i18n: "rev" is a keyword + l = getargs(x, 1, 1, _("rev requires one argument")) + try: + # i18n: "rev" is a keyword + l = int(getstring(l[0], _("rev requires a number"))) + except ValueError: + # i18n: "rev" is a keyword + raise error.ParseError(_("rev expects a number")) + return [r for r in subset if r == l] + +def p1(repo, subset, x): + """``p1(set)`` + First parent of changesets in set. + """ + ps = set() + cl = repo.changelog + for r in getset(repo, range(len(repo)), x): + ps.add(cl.parentrevs(r)[0]) + return [r for r in subset if r in ps] + +def p2(repo, subset, x): + """``p2(set)`` + Second parent of changesets in set. + """ + ps = set() + cl = repo.changelog + for r in getset(repo, range(len(repo)), x): + ps.add(cl.parentrevs(r)[1]) + return [r for r in subset if r in ps] + +def parents(repo, subset, x): + """``parents(set)`` + The set of all parents for all changesets in set. + """ + ps = set() + cl = repo.changelog + for r in getset(repo, range(len(repo)), x): + ps.update(cl.parentrevs(r)) + return [r for r in subset if r in ps] + +def maxrev(repo, subset, x): + """``max(set)`` + Changeset with highest revision number in set. + """ + s = getset(repo, subset, x) + if s: + m = max(s) + if m in subset: + return [m] + return [] + +def minrev(repo, subset, x): + """``min(set)`` + Changeset with lowest revision number in set. + """ + s = getset(repo, subset, x) + if s: + m = min(s) + if m in subset: + return [m] + return [] + +def limit(repo, subset, x): + """``limit(set, n)`` + First n members of set. + """ + # i18n: "limit" is a keyword + l = getargs(x, 2, 2, _("limit requires two arguments")) + try: + # i18n: "limit" is a keyword + lim = int(getstring(l[1], _("limit requires a number"))) + except ValueError: + # i18n: "limit" is a keyword + raise error.ParseError(_("limit expects a number")) + return getset(repo, subset, l[0])[:lim] + +def children(repo, subset, x): + """``children(set)`` + Child changesets of changesets in set. + """ + cs = set() + cl = repo.changelog + s = set(getset(repo, range(len(repo)), x)) + for r in xrange(0, len(repo)): + for p in cl.parentrevs(r): + if p in s: + cs.add(r) + return [r for r in subset if r in cs] + +def branch(repo, subset, x): + """``branch(set)`` + All changesets belonging to the branches of changesets in set. + """ + s = getset(repo, range(len(repo)), x) + b = set() + for r in s: + b.add(repo[r].branch()) + s = set(s) + return [r for r in subset if r in s or repo[r].branch() in b] + +def ancestor(repo, subset, x): + """``ancestor(single, single)`` + Greatest common ancestor of the two changesets. + """ + # i18n: "ancestor" is a keyword + l = getargs(x, 2, 2, _("ancestor requires two arguments")) + r = range(len(repo)) + a = getset(repo, r, l[0]) + b = getset(repo, r, l[1]) + if len(a) != 1 or len(b) != 1: + # i18n: "ancestor" is a keyword + raise error.ParseError(_("ancestor arguments must be single revisions")) + an = [repo[a[0]].ancestor(repo[b[0]]).rev()] + + return [r for r in an if r in subset] + +def ancestors(repo, subset, x): + """``ancestors(set)`` + Changesets that are ancestors of a changeset in set. + """ + args = getset(repo, range(len(repo)), x) + if not args: + return [] + s = set(repo.changelog.ancestors(*args)) | set(args) + return [r for r in subset if r in s] + +def descendants(repo, subset, x): + """``descendants(set)`` + Changesets which are descendants of changesets in set. + """ + args = getset(repo, range(len(repo)), x) + if not args: + return [] + s = set(repo.changelog.descendants(*args)) | set(args) + return [r for r in subset if r in s] + +def follow(repo, subset, x): + """``follow()`` + An alias for ``::.`` (ancestors of the working copy's first parent). + """ + # i18n: "follow" is a keyword + getargs(x, 0, 0, _("follow takes no arguments")) + p = repo['.'].rev() + s = set(repo.changelog.ancestors(p)) | set([p]) + return [r for r in subset if r in s] + +def date(repo, subset, x): + """``date(interval)`` + Changesets within the interval, see :hg:`help dates`. + """ + # i18n: "date" is a keyword + ds = getstring(x, _("date requires a string")) + dm = util.matchdate(ds) + return [r for r in subset if dm(repo[r].date()[0])] + +def keyword(repo, subset, x): + """``keyword(string)`` + Search commit message, user name, and names of changed files for + string. + """ + # i18n: "keyword" is a keyword + kw = getstring(x, _("keyword requires a string")).lower() + l = [] + for r in subset: + c = repo[r] + t = " ".join(c.files() + [c.user(), c.description()]) + if kw in t.lower(): + l.append(r) + return l + +def grep(repo, subset, x): + """``grep(regex)`` + Like ``keyword(string)`` but accepts a regex. Use ``grep(r'...')`` + to ensure special escape characters are handled correctly. + """ + try: + # i18n: "grep" is a keyword + gr = re.compile(getstring(x, _("grep requires a string"))) + except re.error, e: + raise error.ParseError(_('invalid match pattern: %s') % e) + l = [] + for r in subset: + c = repo[r] + for e in c.files() + [c.user(), c.description()]: + if gr.search(e): + l.append(r) + continue + return l + +def author(repo, subset, x): + """``author(string)`` + Alias for ``user(string)``. + """ + # i18n: "author" is a keyword + n = getstring(x, _("author requires a string")).lower() + return [r for r in subset if n in repo[r].user().lower()] + +def user(repo, subset, x): + """``user(string)`` + User name is string. + """ + return author(repo, subset, x) + +def hasfile(repo, subset, x): + """``file(pattern)`` + Changesets affecting files matched by pattern. + """ + # i18n: "file" is a keyword + pat = getstring(x, _("file requires a pattern")) + m = matchmod.match(repo.root, repo.getcwd(), [pat]) + s = [] + for r in subset: + for f in repo[r].files(): + if m(f): + s.append(r) + continue + return s + +def contains(repo, subset, x): + """``contains(pattern)`` + Revision contains pattern. + """ + # i18n: "contains" is a keyword + pat = getstring(x, _("contains requires a pattern")) + m = matchmod.match(repo.root, repo.getcwd(), [pat]) + s = [] + if m.files() == [pat]: + for r in subset: + if pat in repo[r]: + s.append(r) + continue + else: + for r in subset: + for f in repo[r].manifest(): + if m(f): + s.append(r) + continue + return s + +def checkstatus(repo, subset, pat, field): + m = matchmod.match(repo.root, repo.getcwd(), [pat]) + s = [] + fast = (m.files() == [pat]) + for r in subset: + c = repo[r] + if fast: + if pat not in c.files(): + continue + else: + for f in c.files(): + if m(f): + break + else: + continue + files = repo.status(c.p1().node(), c.node())[field] + if fast: + if pat in files: + s.append(r) + continue + else: + for f in files: + if m(f): + s.append(r) + continue + return s + +def modifies(repo, subset, x): + """``modifies(pattern)`` + Changesets modifying files matched by pattern. + """ + # i18n: "modifies" is a keyword + pat = getstring(x, _("modifies requires a pattern")) + return checkstatus(repo, subset, pat, 0) + +def adds(repo, subset, x): + """``adds(pattern)`` + Changesets that add a file matching pattern. + """ + # i18n: "adds" is a keyword + pat = getstring(x, _("adds requires a pattern")) + return checkstatus(repo, subset, pat, 1) + +def removes(repo, subset, x): + """``removes(pattern)`` + Changesets which remove files matching pattern. + """ + # i18n: "removes" is a keyword + pat = getstring(x, _("removes requires a pattern")) + return checkstatus(repo, subset, pat, 2) + +def merge(repo, subset, x): + """``merge()`` + Changeset is a merge changeset. + """ + # i18n: "merge" is a keyword + getargs(x, 0, 0, _("merge takes no arguments")) + cl = repo.changelog + return [r for r in subset if cl.parentrevs(r)[1] != -1] + +def closed(repo, subset, x): + """``closed()`` + Changeset is closed. + """ + # i18n: "closed" is a keyword + getargs(x, 0, 0, _("closed takes no arguments")) + return [r for r in subset if repo[r].extra().get('close')] + +def head(repo, subset, x): + """``head()`` + Changeset is a named branch head. + """ + # i18n: "head" is a keyword + getargs(x, 0, 0, _("head takes no arguments")) + hs = set() + for b, ls in repo.branchmap().iteritems(): + hs.update(repo[h].rev() for h in ls) + return [r for r in subset if r in hs] + +def reverse(repo, subset, x): + """``reverse(set)`` + Reverse order of set. + """ + l = getset(repo, subset, x) + l.reverse() + return l + +def present(repo, subset, x): + """``present(set)`` + An empty set, if any revision in set isn't found; otherwise, + all revisions in set. + """ + try: + return getset(repo, subset, x) + except error.RepoLookupError: + return [] + +def sort(repo, subset, x): + """``sort(set[, [-]key...])`` + Sort set by keys. The default sort order is ascending, specify a key + as ``-key`` to sort in descending order. + + The keys can be: + + - ``rev`` for the revision number, + - ``branch`` for the branch name, + - ``desc`` for the commit message (description), + - ``user`` for user name (``author`` can be used as an alias), + - ``date`` for the commit date + """ + # i18n: "sort" is a keyword + l = getargs(x, 1, 2, _("sort requires one or two arguments")) + keys = "rev" + if len(l) == 2: + keys = getstring(l[1], _("sort spec must be a string")) + + s = l[0] + keys = keys.split() + l = [] + def invert(s): + return "".join(chr(255 - ord(c)) for c in s) + for r in getset(repo, subset, s): + c = repo[r] + e = [] + for k in keys: + if k == 'rev': + e.append(r) + elif k == '-rev': + e.append(-r) + elif k == 'branch': + e.append(c.branch()) + elif k == '-branch': + e.append(invert(c.branch())) + elif k == 'desc': + e.append(c.description()) + elif k == '-desc': + e.append(invert(c.description())) + elif k in 'user author': + e.append(c.user()) + elif k in '-user -author': + e.append(invert(c.user())) + elif k == 'date': + e.append(c.date()[0]) + elif k == '-date': + e.append(-c.date()[0]) + else: + raise error.ParseError(_("unknown sort key %r") % k) + e.append(r) + l.append(e) + l.sort() + return [e[-1] for e in l] + +def getall(repo, subset, x): + """``all()`` + All changesets, the same as ``0:tip``. + """ + # i18n: "all" is a keyword + getargs(x, 0, 0, _("all takes no arguments")) + return subset + +def heads(repo, subset, x): + """``heads(set)`` + Members of set with no children in set. + """ + s = getset(repo, subset, x) + ps = set(parents(repo, subset, x)) + return [r for r in s if r not in ps] + +def roots(repo, subset, x): + """``roots(set)`` + Changesets with no parent changeset in set. + """ + s = getset(repo, subset, x) + cs = set(children(repo, subset, x)) + return [r for r in s if r not in cs] + +def outgoing(repo, subset, x): + """``outgoing([path])`` + Changesets not found in the specified destination repository, or the + default push location. + """ + import hg # avoid start-up nasties + # i18n: "outgoing" is a keyword + l = getargs(x, 0, 1, _("outgoing requires a repository path")) + # i18n: "outgoing" is a keyword + dest = l and getstring(l[0], _("outgoing requires a repository path")) or '' + dest = repo.ui.expandpath(dest or 'default-push', dest or 'default') + dest, branches = hg.parseurl(dest) + revs, checkout = hg.addbranchrevs(repo, repo, branches, []) + if revs: + revs = [repo.lookup(rev) for rev in revs] + other = hg.repository(hg.remoteui(repo, {}), dest) + repo.ui.pushbuffer() + o = discovery.findoutgoing(repo, other) + repo.ui.popbuffer() + cl = repo.changelog + o = set([cl.rev(r) for r in repo.changelog.nodesbetween(o, revs)[0]]) + return [r for r in subset if r in o] + +def tag(repo, subset, x): + """``tag(name)`` + The specified tag by name, or all tagged revisions if no name is given. + """ + # i18n: "tag" is a keyword + args = getargs(x, 0, 1, _("tag takes one or no arguments")) + cl = repo.changelog + if args: + tn = getstring(args[0], + # i18n: "tag" is a keyword + _('the argument to tag must be a string')) + s = set([cl.rev(n) for t, n in repo.tagslist() if t == tn]) + else: + s = set([cl.rev(n) for t, n in repo.tagslist() if t != 'tip']) + return [r for r in subset if r in s] + +def tagged(repo, subset, x): + return tag(repo, subset, x) + +symbols = { + "adds": adds, + "all": getall, + "ancestor": ancestor, + "ancestors": ancestors, + "author": author, + "branch": branch, + "children": children, + "closed": closed, + "contains": contains, + "date": date, + "descendants": descendants, + "file": hasfile, + "follow": follow, + "grep": grep, + "head": head, + "heads": heads, + "keyword": keyword, + "limit": limit, + "max": maxrev, + "min": minrev, + "merge": merge, + "modifies": modifies, + "id": node, + "outgoing": outgoing, + "p1": p1, + "p2": p2, + "parents": parents, + "present": present, + "removes": removes, + "reverse": reverse, + "rev": rev, + "roots": roots, + "sort": sort, + "tag": tag, + "tagged": tagged, + "user": user, +} + +methods = { + "range": rangeset, + "string": stringset, + "symbol": symbolset, + "and": andset, + "or": orset, + "not": notset, + "list": listset, + "func": func, +} + +def optimize(x, small): + if x == None: + return 0, x + + smallbonus = 1 + if small: + smallbonus = .5 + + op = x[0] + if op == 'minus': + return optimize(('and', x[1], ('not', x[2])), small) + elif op == 'dagrange': + return optimize(('and', ('func', ('symbol', 'descendants'), x[1]), + ('func', ('symbol', 'ancestors'), x[2])), small) + elif op == 'dagrangepre': + return optimize(('func', ('symbol', 'ancestors'), x[1]), small) + elif op == 'dagrangepost': + return optimize(('func', ('symbol', 'descendants'), x[1]), small) + elif op == 'rangepre': + return optimize(('range', ('string', '0'), x[1]), small) + elif op == 'rangepost': + return optimize(('range', x[1], ('string', 'tip')), small) + elif op == 'negate': + return optimize(('string', + '-' + getstring(x[1], _("can't negate that"))), small) + elif op in 'string symbol negate': + return smallbonus, x # single revisions are small + elif op == 'and' or op == 'dagrange': + wa, ta = optimize(x[1], True) + wb, tb = optimize(x[2], True) + w = min(wa, wb) + if wa > wb: + return w, (op, tb, ta) + return w, (op, ta, tb) + elif op == 'or': + wa, ta = optimize(x[1], False) + wb, tb = optimize(x[2], False) + if wb < wa: + wb, wa = wa, wb + return max(wa, wb), (op, ta, tb) + elif op == 'not': + o = optimize(x[1], not small) + return o[0], (op, o[1]) + elif op == 'group': + return optimize(x[1], small) + elif op in 'range list': + wa, ta = optimize(x[1], small) + wb, tb = optimize(x[2], small) + return wa + wb, (op, ta, tb) + elif op == 'func': + f = getstring(x[1], _("not a symbol")) + wa, ta = optimize(x[2], small) + if f in "grep date user author keyword branch file outgoing": + w = 10 # slow + elif f in "modifies adds removes": + w = 30 # slower + elif f == "contains": + w = 100 # very slow + elif f == "ancestor": + w = 1 * smallbonus + elif f == "reverse limit": + w = 0 + elif f in "sort": + w = 10 # assume most sorts look at changelog + else: + w = 1 + return w + wa, (op, x[1], ta) + return 1, x + +parse = parser.parser(tokenize, elements).parse + +def match(spec): + if not spec: + raise error.ParseError(_("empty query")) + tree = parse(spec) + weight, tree = optimize(tree, True) + def mfunc(repo, subset): + return getset(repo, subset, tree) + return mfunc + +def makedoc(topic, doc): + """Generate and include predicates help in revsets topic.""" + predicates = [] + for name in sorted(symbols): + text = symbols[name].__doc__ + if not text: + continue + text = gettext(text.rstrip()) + lines = text.splitlines() + lines[1:] = [(' ' + l.strip()) for l in lines[1:]] + predicates.append('\n'.join(lines)) + predicates = '\n\n'.join(predicates) + doc = doc.replace('.. predicatesmarker', predicates) + return doc + +# tell hggettext to extract docstrings from these functions: +i18nfunctions = symbols.values() diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/revset.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/revset.pyo Binary files differnew file mode 100644 index 0000000..26ad568 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/revset.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/similar.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/similar.py new file mode 100644 index 0000000..b18795b --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/similar.py @@ -0,0 +1,103 @@ +# similar.py - mechanisms for finding similar files +# +# Copyright 2005-2007 Matt Mackall <mpm@selenic.com> +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +from i18n import _ +import util +import mdiff +import bdiff + +def _findexactmatches(repo, added, removed): + '''find renamed files that have no changes + + Takes a list of new filectxs and a list of removed filectxs, and yields + (before, after) tuples of exact matches. + ''' + numfiles = len(added) + len(removed) + + # Get hashes of removed files. + hashes = {} + for i, fctx in enumerate(removed): + repo.ui.progress(_('searching for exact renames'), i, total=numfiles) + h = util.sha1(fctx.data()).digest() + hashes[h] = fctx + + # For each added file, see if it corresponds to a removed file. + for i, fctx in enumerate(added): + repo.ui.progress(_('searching for exact renames'), i + len(removed), + total=numfiles) + h = util.sha1(fctx.data()).digest() + if h in hashes: + yield (hashes[h], fctx) + + # Done + repo.ui.progress(_('searching for exact renames'), None) + +def _findsimilarmatches(repo, added, removed, threshold): + '''find potentially renamed files based on similar file content + + Takes a list of new filectxs and a list of removed filectxs, and yields + (before, after, score) tuples of partial matches. + ''' + copies = {} + for i, r in enumerate(removed): + repo.ui.progress(_('searching for similar files'), i, total=len(removed)) + + # lazily load text + @util.cachefunc + def data(): + orig = r.data() + return orig, mdiff.splitnewlines(orig) + + def score(text): + orig, lines = data() + # bdiff.blocks() returns blocks of matching lines + # count the number of bytes in each + equal = 0 + matches = bdiff.blocks(text, orig) + for x1, x2, y1, y2 in matches: + for line in lines[y1:y2]: + equal += len(line) + + lengths = len(text) + len(orig) + return equal * 2.0 / lengths + + for a in added: + bestscore = copies.get(a, (None, threshold))[1] + myscore = score(a.data()) + if myscore >= bestscore: + copies[a] = (r, myscore) + repo.ui.progress(_('searching'), None) + + for dest, v in copies.iteritems(): + source, score = v + yield source, dest, score + +def findrenames(repo, added, removed, threshold): + '''find renamed files -- yields (before, after, score) tuples''' + parentctx = repo['.'] + workingctx = repo[None] + + # Zero length files will be frequently unrelated to each other, and + # tracking the deletion/addition of such a file will probably cause more + # harm than good. We strip them out here to avoid matching them later on. + addedfiles = set([workingctx[fp] for fp in added + if workingctx[fp].size() > 0]) + removedfiles = set([parentctx[fp] for fp in removed + if fp in parentctx and parentctx[fp].size() > 0]) + + # Find exact matches. + for (a, b) in _findexactmatches(repo, + sorted(addedfiles), sorted(removedfiles)): + addedfiles.remove(b) + yield (a.path(), b.path(), 1.0) + + # If the user requested similar files to be matched, search for them also. + if threshold < 1.0: + for (a, b, score) in _findsimilarmatches(repo, + sorted(addedfiles), sorted(removedfiles), threshold): + yield (a.path(), b.path(), score) + diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/similar.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/similar.pyo Binary files differnew file mode 100644 index 0000000..dcd7aac --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/similar.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/simplemerge.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/simplemerge.py new file mode 100644 index 0000000..a4e3f57 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/simplemerge.py @@ -0,0 +1,450 @@ +# Copyright (C) 2004, 2005 Canonical Ltd +# +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA + +# mbp: "you know that thing where cvs gives you conflict markers?" +# s: "i hate that." + +from i18n import _ +import util, mdiff +import sys, os + +class CantReprocessAndShowBase(Exception): + pass + +def intersect(ra, rb): + """Given two ranges return the range where they intersect or None. + + >>> intersect((0, 10), (0, 6)) + (0, 6) + >>> intersect((0, 10), (5, 15)) + (5, 10) + >>> intersect((0, 10), (10, 15)) + >>> intersect((0, 9), (10, 15)) + >>> intersect((0, 9), (7, 15)) + (7, 9) + """ + assert ra[0] <= ra[1] + assert rb[0] <= rb[1] + + sa = max(ra[0], rb[0]) + sb = min(ra[1], rb[1]) + if sa < sb: + return sa, sb + else: + return None + +def compare_range(a, astart, aend, b, bstart, bend): + """Compare a[astart:aend] == b[bstart:bend], without slicing. + """ + if (aend - astart) != (bend - bstart): + return False + for ia, ib in zip(xrange(astart, aend), xrange(bstart, bend)): + if a[ia] != b[ib]: + return False + else: + return True + +class Merge3Text(object): + """3-way merge of texts. + + Given strings BASE, OTHER, THIS, tries to produce a combined text + incorporating the changes from both BASE->OTHER and BASE->THIS.""" + def __init__(self, basetext, atext, btext, base=None, a=None, b=None): + self.basetext = basetext + self.atext = atext + self.btext = btext + if base is None: + base = mdiff.splitnewlines(basetext) + if a is None: + a = mdiff.splitnewlines(atext) + if b is None: + b = mdiff.splitnewlines(btext) + self.base = base + self.a = a + self.b = b + + def merge_lines(self, + name_a=None, + name_b=None, + name_base=None, + start_marker='<<<<<<<', + mid_marker='=======', + end_marker='>>>>>>>', + base_marker=None, + reprocess=False): + """Return merge in cvs-like form. + """ + self.conflicts = False + newline = '\n' + if len(self.a) > 0: + if self.a[0].endswith('\r\n'): + newline = '\r\n' + elif self.a[0].endswith('\r'): + newline = '\r' + if base_marker and reprocess: + raise CantReprocessAndShowBase() + if name_a: + start_marker = start_marker + ' ' + name_a + if name_b: + end_marker = end_marker + ' ' + name_b + if name_base and base_marker: + base_marker = base_marker + ' ' + name_base + merge_regions = self.merge_regions() + if reprocess is True: + merge_regions = self.reprocess_merge_regions(merge_regions) + for t in merge_regions: + what = t[0] + if what == 'unchanged': + for i in range(t[1], t[2]): + yield self.base[i] + elif what == 'a' or what == 'same': + for i in range(t[1], t[2]): + yield self.a[i] + elif what == 'b': + for i in range(t[1], t[2]): + yield self.b[i] + elif what == 'conflict': + self.conflicts = True + yield start_marker + newline + for i in range(t[3], t[4]): + yield self.a[i] + if base_marker is not None: + yield base_marker + newline + for i in range(t[1], t[2]): + yield self.base[i] + yield mid_marker + newline + for i in range(t[5], t[6]): + yield self.b[i] + yield end_marker + newline + else: + raise ValueError(what) + + def merge_annotated(self): + """Return merge with conflicts, showing origin of lines. + + Most useful for debugging merge. + """ + for t in self.merge_regions(): + what = t[0] + if what == 'unchanged': + for i in range(t[1], t[2]): + yield 'u | ' + self.base[i] + elif what == 'a' or what == 'same': + for i in range(t[1], t[2]): + yield what[0] + ' | ' + self.a[i] + elif what == 'b': + for i in range(t[1], t[2]): + yield 'b | ' + self.b[i] + elif what == 'conflict': + yield '<<<<\n' + for i in range(t[3], t[4]): + yield 'A | ' + self.a[i] + yield '----\n' + for i in range(t[5], t[6]): + yield 'B | ' + self.b[i] + yield '>>>>\n' + else: + raise ValueError(what) + + def merge_groups(self): + """Yield sequence of line groups. Each one is a tuple: + + 'unchanged', lines + Lines unchanged from base + + 'a', lines + Lines taken from a + + 'same', lines + Lines taken from a (and equal to b) + + 'b', lines + Lines taken from b + + 'conflict', base_lines, a_lines, b_lines + Lines from base were changed to either a or b and conflict. + """ + for t in self.merge_regions(): + what = t[0] + if what == 'unchanged': + yield what, self.base[t[1]:t[2]] + elif what == 'a' or what == 'same': + yield what, self.a[t[1]:t[2]] + elif what == 'b': + yield what, self.b[t[1]:t[2]] + elif what == 'conflict': + yield (what, + self.base[t[1]:t[2]], + self.a[t[3]:t[4]], + self.b[t[5]:t[6]]) + else: + raise ValueError(what) + + def merge_regions(self): + """Return sequences of matching and conflicting regions. + + This returns tuples, where the first value says what kind we + have: + + 'unchanged', start, end + Take a region of base[start:end] + + 'same', astart, aend + b and a are different from base but give the same result + + 'a', start, end + Non-clashing insertion from a[start:end] + + Method is as follows: + + The two sequences align only on regions which match the base + and both descendents. These are found by doing a two-way diff + of each one against the base, and then finding the + intersections between those regions. These "sync regions" + are by definition unchanged in both and easily dealt with. + + The regions in between can be in any of three cases: + conflicted, or changed on only one side. + """ + + # section a[0:ia] has been disposed of, etc + iz = ia = ib = 0 + + for zmatch, zend, amatch, aend, bmatch, bend in self.find_sync_regions(): + #print 'match base [%d:%d]' % (zmatch, zend) + + matchlen = zend - zmatch + assert matchlen >= 0 + assert matchlen == (aend - amatch) + assert matchlen == (bend - bmatch) + + len_a = amatch - ia + len_b = bmatch - ib + len_base = zmatch - iz + assert len_a >= 0 + assert len_b >= 0 + assert len_base >= 0 + + #print 'unmatched a=%d, b=%d' % (len_a, len_b) + + if len_a or len_b: + # try to avoid actually slicing the lists + equal_a = compare_range(self.a, ia, amatch, + self.base, iz, zmatch) + equal_b = compare_range(self.b, ib, bmatch, + self.base, iz, zmatch) + same = compare_range(self.a, ia, amatch, + self.b, ib, bmatch) + + if same: + yield 'same', ia, amatch + elif equal_a and not equal_b: + yield 'b', ib, bmatch + elif equal_b and not equal_a: + yield 'a', ia, amatch + elif not equal_a and not equal_b: + yield 'conflict', iz, zmatch, ia, amatch, ib, bmatch + else: + raise AssertionError("can't handle a=b=base but unmatched") + + ia = amatch + ib = bmatch + iz = zmatch + + # if the same part of the base was deleted on both sides + # that's OK, we can just skip it. + + + if matchlen > 0: + assert ia == amatch + assert ib == bmatch + assert iz == zmatch + + yield 'unchanged', zmatch, zend + iz = zend + ia = aend + ib = bend + + def reprocess_merge_regions(self, merge_regions): + """Where there are conflict regions, remove the agreed lines. + + Lines where both A and B have made the same changes are + eliminated. + """ + for region in merge_regions: + if region[0] != "conflict": + yield region + continue + type, iz, zmatch, ia, amatch, ib, bmatch = region + a_region = self.a[ia:amatch] + b_region = self.b[ib:bmatch] + matches = mdiff.get_matching_blocks(''.join(a_region), + ''.join(b_region)) + next_a = ia + next_b = ib + for region_ia, region_ib, region_len in matches[:-1]: + region_ia += ia + region_ib += ib + reg = self.mismatch_region(next_a, region_ia, next_b, + region_ib) + if reg is not None: + yield reg + yield 'same', region_ia, region_len + region_ia + next_a = region_ia + region_len + next_b = region_ib + region_len + reg = self.mismatch_region(next_a, amatch, next_b, bmatch) + if reg is not None: + yield reg + + def mismatch_region(next_a, region_ia, next_b, region_ib): + if next_a < region_ia or next_b < region_ib: + return 'conflict', None, None, next_a, region_ia, next_b, region_ib + mismatch_region = staticmethod(mismatch_region) + + def find_sync_regions(self): + """Return a list of sync regions, where both descendents match the base. + + Generates a list of (base1, base2, a1, a2, b1, b2). There is + always a zero-length sync region at the end of all the files. + """ + + ia = ib = 0 + amatches = mdiff.get_matching_blocks(self.basetext, self.atext) + bmatches = mdiff.get_matching_blocks(self.basetext, self.btext) + len_a = len(amatches) + len_b = len(bmatches) + + sl = [] + + while ia < len_a and ib < len_b: + abase, amatch, alen = amatches[ia] + bbase, bmatch, blen = bmatches[ib] + + # there is an unconflicted block at i; how long does it + # extend? until whichever one ends earlier. + i = intersect((abase, abase + alen), (bbase, bbase + blen)) + if i: + intbase = i[0] + intend = i[1] + intlen = intend - intbase + + # found a match of base[i[0], i[1]]; this may be less than + # the region that matches in either one + assert intlen <= alen + assert intlen <= blen + assert abase <= intbase + assert bbase <= intbase + + asub = amatch + (intbase - abase) + bsub = bmatch + (intbase - bbase) + aend = asub + intlen + bend = bsub + intlen + + assert self.base[intbase:intend] == self.a[asub:aend], \ + (self.base[intbase:intend], self.a[asub:aend]) + + assert self.base[intbase:intend] == self.b[bsub:bend] + + sl.append((intbase, intend, + asub, aend, + bsub, bend)) + + # advance whichever one ends first in the base text + if (abase + alen) < (bbase + blen): + ia += 1 + else: + ib += 1 + + intbase = len(self.base) + abase = len(self.a) + bbase = len(self.b) + sl.append((intbase, intbase, abase, abase, bbase, bbase)) + + return sl + + def find_unconflicted(self): + """Return a list of ranges in base that are not conflicted.""" + am = mdiff.get_matching_blocks(self.basetext, self.atext) + bm = mdiff.get_matching_blocks(self.basetext, self.btext) + + unc = [] + + while am and bm: + # there is an unconflicted block at i; how long does it + # extend? until whichever one ends earlier. + a1 = am[0][0] + a2 = a1 + am[0][2] + b1 = bm[0][0] + b2 = b1 + bm[0][2] + i = intersect((a1, a2), (b1, b2)) + if i: + unc.append(i) + + if a2 < b2: + del am[0] + else: + del bm[0] + + return unc + +def simplemerge(ui, local, base, other, **opts): + def readfile(filename): + f = open(filename, "rb") + text = f.read() + f.close() + if util.binary(text): + msg = _("%s looks like a binary file.") % filename + if not opts.get('text'): + raise util.Abort(msg) + elif not opts.get('quiet'): + ui.warn(_('warning: %s\n') % msg) + return text + + name_a = local + name_b = other + labels = opts.get('label', []) + if labels: + name_a = labels.pop(0) + if labels: + name_b = labels.pop(0) + if labels: + raise util.Abort(_("can only specify two labels.")) + + localtext = readfile(local) + basetext = readfile(base) + othertext = readfile(other) + + local = os.path.realpath(local) + if not opts.get('print'): + opener = util.opener(os.path.dirname(local)) + out = opener(os.path.basename(local), "w", atomictemp=True) + else: + out = sys.stdout + + reprocess = not opts.get('no_minimal') + + m3 = Merge3Text(basetext, localtext, othertext) + for line in m3.merge_lines(name_a=name_a, name_b=name_b, + reprocess=reprocess): + out.write(line) + + if not opts.get('print'): + out.rename() + + if m3.conflicts: + if not opts.get('quiet'): + ui.warn(_("warning: conflicts during merge.\n")) + return 1 diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/simplemerge.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/simplemerge.pyo Binary files differnew file mode 100644 index 0000000..367afbd --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/simplemerge.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/sshrepo.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/sshrepo.py new file mode 100644 index 0000000..a2b8f7b --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/sshrepo.py @@ -0,0 +1,197 @@ +# sshrepo.py - ssh repository proxy class for mercurial +# +# Copyright 2005, 2006 Matt Mackall <mpm@selenic.com> +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +from i18n import _ +import util, error, wireproto +import re + +class remotelock(object): + def __init__(self, repo): + self.repo = repo + def release(self): + self.repo.unlock() + self.repo = None + def __del__(self): + if self.repo: + self.release() + +class sshrepository(wireproto.wirerepository): + def __init__(self, ui, path, create=0): + self._url = path + self.ui = ui + + m = re.match(r'^ssh://(([^@]+)@)?([^:/]+)(:(\d+))?(/(.*))?$', path) + if not m: + self._abort(error.RepoError(_("couldn't parse location %s") % path)) + + self.user = m.group(2) + self.host = m.group(3) + self.port = m.group(5) + self.path = m.group(7) or "." + + sshcmd = self.ui.config("ui", "ssh", "ssh") + remotecmd = self.ui.config("ui", "remotecmd", "hg") + + args = util.sshargs(sshcmd, self.host, self.user, self.port) + + if create: + cmd = '%s %s "%s init %s"' + cmd = cmd % (sshcmd, args, remotecmd, self.path) + + ui.note(_('running %s\n') % cmd) + res = util.system(cmd) + if res != 0: + self._abort(error.RepoError(_("could not create remote repo"))) + + self.validate_repo(ui, sshcmd, args, remotecmd) + + def url(self): + return self._url + + def validate_repo(self, ui, sshcmd, args, remotecmd): + # cleanup up previous run + self.cleanup() + + cmd = '%s %s "%s -R %s serve --stdio"' + cmd = cmd % (sshcmd, args, remotecmd, self.path) + + cmd = util.quotecommand(cmd) + ui.note(_('running %s\n') % cmd) + self.pipeo, self.pipei, self.pipee = util.popen3(cmd) + + # skip any noise generated by remote shell + self._callstream("hello") + r = self._callstream("between", pairs=("%s-%s" % ("0"*40, "0"*40))) + lines = ["", "dummy"] + max_noise = 500 + while lines[-1] and max_noise: + l = r.readline() + self.readerr() + if lines[-1] == "1\n" and l == "\n": + break + if l: + ui.debug("remote: ", l) + lines.append(l) + max_noise -= 1 + else: + self._abort(error.RepoError(_("no suitable response from remote hg"))) + + self.capabilities = set() + for l in reversed(lines): + if l.startswith("capabilities:"): + self.capabilities.update(l[:-1].split(":")[1].split()) + break + + def readerr(self): + while 1: + size = util.fstat(self.pipee).st_size + if size == 0: + break + l = self.pipee.readline() + if not l: + break + self.ui.status(_("remote: "), l) + + def _abort(self, exception): + self.cleanup() + raise exception + + def cleanup(self): + try: + self.pipeo.close() + self.pipei.close() + # read the error descriptor until EOF + for l in self.pipee: + self.ui.status(_("remote: "), l) + self.pipee.close() + except: + pass + + __del__ = cleanup + + def _callstream(self, cmd, **args): + self.ui.debug("sending %s command\n" % cmd) + self.pipeo.write("%s\n" % cmd) + for k, v in sorted(args.iteritems()): + self.pipeo.write("%s %d\n" % (k, len(v))) + self.pipeo.write(v) + self.pipeo.flush() + + return self.pipei + + def _call(self, cmd, **args): + self._callstream(cmd, **args) + return self._recv() + + def _callpush(self, cmd, fp, **args): + r = self._call(cmd, **args) + if r: + return '', r + while 1: + d = fp.read(4096) + if not d: + break + self._send(d) + self._send("", flush=True) + r = self._recv() + if r: + return '', r + return self._recv(), '' + + def _decompress(self, stream): + return stream + + def _recv(self): + l = self.pipei.readline() + self.readerr() + try: + l = int(l) + except: + self._abort(error.ResponseError(_("unexpected response:"), l)) + return self.pipei.read(l) + + def _send(self, data, flush=False): + self.pipeo.write("%d\n" % len(data)) + if data: + self.pipeo.write(data) + if flush: + self.pipeo.flush() + self.readerr() + + def lock(self): + self._call("lock") + return remotelock(self) + + def unlock(self): + self._call("unlock") + + def addchangegroup(self, cg, source, url): + '''Send a changegroup to the remote server. Return an integer + similar to unbundle(). DEPRECATED, since it requires locking the + remote.''' + d = self._call("addchangegroup") + if d: + self._abort(error.RepoError(_("push refused: %s") % d)) + while 1: + d = cg.read(4096) + if not d: + break + self.pipeo.write(d) + self.readerr() + + self.pipeo.flush() + + self.readerr() + r = self._recv() + if not r: + return 1 + try: + return int(r) + except: + self._abort(error.ResponseError(_("unexpected response:"), r)) + +instance = sshrepository diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/sshrepo.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/sshrepo.pyo Binary files differnew file mode 100644 index 0000000..27170f4 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/sshrepo.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/sshserver.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/sshserver.py new file mode 100644 index 0000000..8d2e4af --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/sshserver.py @@ -0,0 +1,144 @@ +# sshserver.py - ssh protocol server support for mercurial +# +# Copyright 2005-2007 Matt Mackall <mpm@selenic.com> +# Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com> +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +import util, hook, wireproto, changegroup +import os, sys + +class sshserver(object): + def __init__(self, ui, repo): + self.ui = ui + self.repo = repo + self.lock = None + self.fin = sys.stdin + self.fout = sys.stdout + + hook.redirect(True) + sys.stdout = sys.stderr + + # Prevent insertion/deletion of CRs + util.set_binary(self.fin) + util.set_binary(self.fout) + + def getargs(self, args): + data = {} + keys = args.split() + count = len(keys) + for n in xrange(len(keys)): + argline = self.fin.readline()[:-1] + arg, l = argline.split() + val = self.fin.read(int(l)) + if arg not in keys: + raise util.Abort("unexpected parameter %r" % arg) + if arg == '*': + star = {} + for n in xrange(int(l)): + arg, l = argline.split() + val = self.fin.read(int(l)) + star[arg] = val + data['*'] = star + else: + data[arg] = val + return [data[k] for k in keys] + + def getarg(self, name): + return self.getargs(name)[0] + + def getfile(self, fpout): + self.sendresponse('') + count = int(self.fin.readline()) + while count: + fpout.write(self.fin.read(count)) + count = int(self.fin.readline()) + + def redirect(self): + pass + + def groupchunks(self, changegroup): + while True: + d = changegroup.read(4096) + if not d: + break + yield d + + def sendresponse(self, v): + self.fout.write("%d\n" % len(v)) + self.fout.write(v) + self.fout.flush() + + def sendstream(self, source): + for chunk in source.gen: + self.fout.write(chunk) + self.fout.flush() + + def sendpushresponse(self, rsp): + self.sendresponse('') + self.sendresponse(str(rsp.res)) + + def sendpusherror(self, rsp): + self.sendresponse(rsp.res) + + def serve_forever(self): + try: + while self.serve_one(): + pass + finally: + if self.lock is not None: + self.lock.release() + sys.exit(0) + + handlers = { + str: sendresponse, + wireproto.streamres: sendstream, + wireproto.pushres: sendpushresponse, + wireproto.pusherr: sendpusherror, + } + + def serve_one(self): + cmd = self.fin.readline()[:-1] + if cmd and cmd in wireproto.commands: + rsp = wireproto.dispatch(self.repo, self, cmd) + self.handlers[rsp.__class__](self, rsp) + elif cmd: + impl = getattr(self, 'do_' + cmd, None) + if impl: + r = impl() + if r is not None: + self.sendresponse(r) + else: self.sendresponse("") + return cmd != '' + + def do_lock(self): + '''DEPRECATED - allowing remote client to lock repo is not safe''' + + self.lock = self.repo.lock() + return "" + + def do_unlock(self): + '''DEPRECATED''' + + if self.lock: + self.lock.release() + self.lock = None + return "" + + def do_addchangegroup(self): + '''DEPRECATED''' + + if not self.lock: + self.sendresponse("not locked") + return + + self.sendresponse("") + cg = changegroup.unbundle10(self.fin, "UN") + r = self.repo.addchangegroup(cg, 'serve', self._client(), + lock=self.lock) + return str(r) + + def _client(self): + client = os.environ.get('SSH_CLIENT', '').split(' ', 1)[0] + return 'remote:ssh:' + client diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/sshserver.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/sshserver.pyo Binary files differnew file mode 100644 index 0000000..aa665eb --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/sshserver.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/statichttprepo.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/statichttprepo.py new file mode 100644 index 0000000..792497d --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/statichttprepo.py @@ -0,0 +1,146 @@ +# statichttprepo.py - simple http repository class for mercurial +# +# This provides read-only repo access to repositories exported via static http +# +# Copyright 2005-2007 Matt Mackall <mpm@selenic.com> +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +from i18n import _ +import changelog, byterange, url, error +import localrepo, manifest, util, store +import urllib, urllib2, errno + +class httprangereader(object): + def __init__(self, url, opener): + # we assume opener has HTTPRangeHandler + self.url = url + self.pos = 0 + self.opener = opener + self.name = url + def seek(self, pos): + self.pos = pos + def read(self, bytes=None): + req = urllib2.Request(self.url) + end = '' + if bytes: + end = self.pos + bytes - 1 + req.add_header('Range', 'bytes=%d-%s' % (self.pos, end)) + + try: + f = self.opener.open(req) + data = f.read() + if hasattr(f, 'getcode'): + # python 2.6+ + code = f.getcode() + elif hasattr(f, 'code'): + # undocumented attribute, seems to be set in 2.4 and 2.5 + code = f.code + else: + # Don't know how to check, hope for the best. + code = 206 + except urllib2.HTTPError, inst: + num = inst.code == 404 and errno.ENOENT or None + raise IOError(num, inst) + except urllib2.URLError, inst: + raise IOError(None, inst.reason[1]) + + if code == 200: + # HTTPRangeHandler does nothing if remote does not support + # Range headers and returns the full entity. Let's slice it. + if bytes: + data = data[self.pos:self.pos + bytes] + else: + data = data[self.pos:] + elif bytes: + data = data[:bytes] + self.pos += len(data) + return data + def __iter__(self): + return iter(self.read().splitlines(1)) + def close(self): + pass + +def build_opener(ui, authinfo): + # urllib cannot handle URLs with embedded user or passwd + urlopener = url.opener(ui, authinfo) + urlopener.add_handler(byterange.HTTPRangeHandler()) + + def opener(base): + """return a function that opens files over http""" + p = base + def o(path, mode="r", atomictemp=None): + if 'a' in mode or 'w' in mode: + raise IOError('Permission denied') + f = "/".join((p, urllib.quote(path))) + return httprangereader(f, urlopener) + return o + + opener.options = {'nonlazy': 1} + return opener + +class statichttprepository(localrepo.localrepository): + def __init__(self, ui, path): + self._url = path + self.ui = ui + + self.root = path + self.path, authinfo = url.getauthinfo(path.rstrip('/') + "/.hg") + + opener = build_opener(ui, authinfo) + self.opener = opener(self.path) + + # find requirements + try: + requirements = self.opener("requires").read().splitlines() + except IOError, inst: + if inst.errno != errno.ENOENT: + raise + # check if it is a non-empty old-style repository + try: + self.opener("00changelog.i").read(1) + except IOError, inst: + if inst.errno != errno.ENOENT: + raise + # we do not care about empty old-style repositories here + msg = _("'%s' does not appear to be an hg repository") % path + raise error.RepoError(msg) + requirements = [] + + # check them + for r in requirements: + if r not in self.supported: + raise error.RepoError(_("requirement '%s' not supported") % r) + + # setup store + def pjoin(a, b): + return a + '/' + b + self.store = store.store(requirements, self.path, opener, pjoin) + self.spath = self.store.path + self.sopener = self.store.opener + self.sjoin = self.store.join + + self.manifest = manifest.manifest(self.sopener) + self.changelog = changelog.changelog(self.sopener) + self._tags = None + self.nodetagscache = None + self._branchcache = None + self._branchcachetip = None + self.encodepats = None + self.decodepats = None + self.capabilities.remove("pushkey") + + def url(self): + return self._url + + def local(self): + return False + + def lock(self, wait=True): + raise util.Abort(_('cannot lock static-http repository')) + +def instance(ui, path, create): + if create: + raise util.Abort(_('cannot create new static-http repository')) + return statichttprepository(ui, path[7:]) diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/statichttprepo.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/statichttprepo.pyo Binary files differnew file mode 100644 index 0000000..ea6957a --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/statichttprepo.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/store.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/store.py new file mode 100644 index 0000000..c155fe9 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/store.py @@ -0,0 +1,339 @@ +# store.py - repository store handling for Mercurial +# +# Copyright 2008 Matt Mackall <mpm@selenic.com> +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +from i18n import _ +import osutil, util +import os, stat + +_sha = util.sha1 + +# This avoids a collision between a file named foo and a dir named +# foo.i or foo.d +def encodedir(path): + if not path.startswith('data/'): + return path + return (path + .replace(".hg/", ".hg.hg/") + .replace(".i/", ".i.hg/") + .replace(".d/", ".d.hg/")) + +def decodedir(path): + if not path.startswith('data/') or ".hg/" not in path: + return path + return (path + .replace(".d.hg/", ".d/") + .replace(".i.hg/", ".i/") + .replace(".hg.hg/", ".hg/")) + +def _buildencodefun(): + e = '_' + win_reserved = [ord(x) for x in '\\:*?"<>|'] + cmap = dict([(chr(x), chr(x)) for x in xrange(127)]) + for x in (range(32) + range(126, 256) + win_reserved): + cmap[chr(x)] = "~%02x" % x + for x in range(ord("A"), ord("Z")+1) + [ord(e)]: + cmap[chr(x)] = e + chr(x).lower() + dmap = {} + for k, v in cmap.iteritems(): + dmap[v] = k + def decode(s): + i = 0 + while i < len(s): + for l in xrange(1, 4): + try: + yield dmap[s[i:i + l]] + i += l + break + except KeyError: + pass + else: + raise KeyError + return (lambda s: "".join([cmap[c] for c in encodedir(s)]), + lambda s: decodedir("".join(list(decode(s))))) + +encodefilename, decodefilename = _buildencodefun() + +def _build_lower_encodefun(): + win_reserved = [ord(x) for x in '\\:*?"<>|'] + cmap = dict([(chr(x), chr(x)) for x in xrange(127)]) + for x in (range(32) + range(126, 256) + win_reserved): + cmap[chr(x)] = "~%02x" % x + for x in range(ord("A"), ord("Z")+1): + cmap[chr(x)] = chr(x).lower() + return lambda s: "".join([cmap[c] for c in s]) + +lowerencode = _build_lower_encodefun() + +_windows_reserved_filenames = '''con prn aux nul + com1 com2 com3 com4 com5 com6 com7 com8 com9 + lpt1 lpt2 lpt3 lpt4 lpt5 lpt6 lpt7 lpt8 lpt9'''.split() +def _auxencode(path, dotencode): + res = [] + for n in path.split('/'): + if n: + base = n.split('.')[0] + if base and (base in _windows_reserved_filenames): + # encode third letter ('aux' -> 'au~78') + ec = "~%02x" % ord(n[2]) + n = n[0:2] + ec + n[3:] + if n[-1] in '. ': + # encode last period or space ('foo...' -> 'foo..~2e') + n = n[:-1] + "~%02x" % ord(n[-1]) + if dotencode and n[0] in '. ': + n = "~%02x" % ord(n[0]) + n[1:] + res.append(n) + return '/'.join(res) + +MAX_PATH_LEN_IN_HGSTORE = 120 +DIR_PREFIX_LEN = 8 +_MAX_SHORTENED_DIRS_LEN = 8 * (DIR_PREFIX_LEN + 1) - 4 +def _hybridencode(path, auxencode): + '''encodes path with a length limit + + Encodes all paths that begin with 'data/', according to the following. + + Default encoding (reversible): + + Encodes all uppercase letters 'X' as '_x'. All reserved or illegal + characters are encoded as '~xx', where xx is the two digit hex code + of the character (see encodefilename). + Relevant path components consisting of Windows reserved filenames are + masked by encoding the third character ('aux' -> 'au~78', see auxencode). + + Hashed encoding (not reversible): + + If the default-encoded path is longer than MAX_PATH_LEN_IN_HGSTORE, a + non-reversible hybrid hashing of the path is done instead. + This encoding uses up to DIR_PREFIX_LEN characters of all directory + levels of the lowerencoded path, but not more levels than can fit into + _MAX_SHORTENED_DIRS_LEN. + Then follows the filler followed by the sha digest of the full path. + The filler is the beginning of the basename of the lowerencoded path + (the basename is everything after the last path separator). The filler + is as long as possible, filling in characters from the basename until + the encoded path has MAX_PATH_LEN_IN_HGSTORE characters (or all chars + of the basename have been taken). + The extension (e.g. '.i' or '.d') is preserved. + + The string 'data/' at the beginning is replaced with 'dh/', if the hashed + encoding was used. + ''' + if not path.startswith('data/'): + return path + # escape directories ending with .i and .d + path = encodedir(path) + ndpath = path[len('data/'):] + res = 'data/' + auxencode(encodefilename(ndpath)) + if len(res) > MAX_PATH_LEN_IN_HGSTORE: + digest = _sha(path).hexdigest() + aep = auxencode(lowerencode(ndpath)) + _root, ext = os.path.splitext(aep) + parts = aep.split('/') + basename = parts[-1] + sdirs = [] + for p in parts[:-1]: + d = p[:DIR_PREFIX_LEN] + if d[-1] in '. ': + # Windows can't access dirs ending in period or space + d = d[:-1] + '_' + t = '/'.join(sdirs) + '/' + d + if len(t) > _MAX_SHORTENED_DIRS_LEN: + break + sdirs.append(d) + dirs = '/'.join(sdirs) + if len(dirs) > 0: + dirs += '/' + res = 'dh/' + dirs + digest + ext + space_left = MAX_PATH_LEN_IN_HGSTORE - len(res) + if space_left > 0: + filler = basename[:space_left] + res = 'dh/' + dirs + filler + digest + ext + return res + +def _calcmode(path): + try: + # files in .hg/ will be created using this mode + mode = os.stat(path).st_mode + # avoid some useless chmods + if (0777 & ~util.umask) == (0777 & mode): + mode = None + except OSError: + mode = None + return mode + +_data = 'data 00manifest.d 00manifest.i 00changelog.d 00changelog.i' + +class basicstore(object): + '''base class for local repository stores''' + def __init__(self, path, opener, pathjoiner): + self.pathjoiner = pathjoiner + self.path = path + self.createmode = _calcmode(path) + op = opener(self.path) + op.createmode = self.createmode + self.opener = lambda f, *args, **kw: op(encodedir(f), *args, **kw) + + def join(self, f): + return self.pathjoiner(self.path, encodedir(f)) + + def _walk(self, relpath, recurse): + '''yields (unencoded, encoded, size)''' + path = self.pathjoiner(self.path, relpath) + striplen = len(self.path) + len(os.sep) + l = [] + if os.path.isdir(path): + visit = [path] + while visit: + p = visit.pop() + for f, kind, st in osutil.listdir(p, stat=True): + fp = self.pathjoiner(p, f) + if kind == stat.S_IFREG and f[-2:] in ('.d', '.i'): + n = util.pconvert(fp[striplen:]) + l.append((decodedir(n), n, st.st_size)) + elif kind == stat.S_IFDIR and recurse: + visit.append(fp) + return sorted(l) + + def datafiles(self): + return self._walk('data', True) + + def walk(self): + '''yields (unencoded, encoded, size)''' + # yield data files first + for x in self.datafiles(): + yield x + # yield manifest before changelog + for x in reversed(self._walk('', False)): + yield x + + def copylist(self): + return ['requires'] + _data.split() + +class encodedstore(basicstore): + def __init__(self, path, opener, pathjoiner): + self.pathjoiner = pathjoiner + self.path = self.pathjoiner(path, 'store') + self.createmode = _calcmode(self.path) + op = opener(self.path) + op.createmode = self.createmode + self.opener = lambda f, *args, **kw: op(encodefilename(f), *args, **kw) + + def datafiles(self): + for a, b, size in self._walk('data', True): + try: + a = decodefilename(a) + except KeyError: + a = None + yield a, b, size + + def join(self, f): + return self.pathjoiner(self.path, encodefilename(f)) + + def copylist(self): + return (['requires', '00changelog.i'] + + [self.pathjoiner('store', f) for f in _data.split()]) + +class fncache(object): + # the filename used to be partially encoded + # hence the encodedir/decodedir dance + def __init__(self, opener): + self.opener = opener + self.entries = None + + def _load(self): + '''fill the entries from the fncache file''' + self.entries = set() + try: + fp = self.opener('fncache', mode='rb') + except IOError: + # skip nonexistent file + return + for n, line in enumerate(fp): + if (len(line) < 2) or (line[-1] != '\n'): + t = _('invalid entry in fncache, line %s') % (n + 1) + raise util.Abort(t) + self.entries.add(decodedir(line[:-1])) + fp.close() + + def rewrite(self, files): + fp = self.opener('fncache', mode='wb') + for p in files: + fp.write(encodedir(p) + '\n') + fp.close() + self.entries = set(files) + + def add(self, fn): + if self.entries is None: + self._load() + if fn not in self.entries: + self.opener('fncache', 'ab').write(encodedir(fn) + '\n') + self.entries.add(fn) + + def __contains__(self, fn): + if self.entries is None: + self._load() + return fn in self.entries + + def __iter__(self): + if self.entries is None: + self._load() + return iter(self.entries) + +class fncachestore(basicstore): + def __init__(self, path, opener, pathjoiner, encode): + self.encode = encode + self.pathjoiner = pathjoiner + self.path = self.pathjoiner(path, 'store') + self.createmode = _calcmode(self.path) + op = opener(self.path) + op.createmode = self.createmode + fnc = fncache(op) + self.fncache = fnc + + def fncacheopener(path, mode='r', *args, **kw): + if mode not in ('r', 'rb') and path.startswith('data/'): + fnc.add(path) + return op(self.encode(path), mode, *args, **kw) + self.opener = fncacheopener + + def join(self, f): + return self.pathjoiner(self.path, self.encode(f)) + + def datafiles(self): + rewrite = False + existing = [] + pjoin = self.pathjoiner + spath = self.path + for f in self.fncache: + ef = self.encode(f) + try: + st = os.stat(pjoin(spath, ef)) + yield f, ef, st.st_size + existing.append(f) + except OSError: + # nonexistent entry + rewrite = True + if rewrite: + # rewrite fncache to remove nonexistent entries + # (may be caused by rollback / strip) + self.fncache.rewrite(existing) + + def copylist(self): + d = ('data dh fncache' + ' 00manifest.d 00manifest.i 00changelog.d 00changelog.i') + return (['requires', '00changelog.i'] + + [self.pathjoiner('store', f) for f in d.split()]) + +def store(requirements, path, opener, pathjoiner=None): + pathjoiner = pathjoiner or os.path.join + if 'store' in requirements: + if 'fncache' in requirements: + auxencode = lambda f: _auxencode(f, 'dotencode' in requirements) + encode = lambda f: _hybridencode(f, auxencode) + return fncachestore(path, opener, pathjoiner, encode) + return encodedstore(path, opener, pathjoiner) + return basicstore(path, opener, pathjoiner) diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/store.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/store.pyo Binary files differnew file mode 100644 index 0000000..62edb9f --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/store.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/strutil.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/strutil.py new file mode 100644 index 0000000..b33fb6b --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/strutil.py @@ -0,0 +1,34 @@ +# strutil.py - string utilities for Mercurial +# +# Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com> +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +def findall(haystack, needle, start=0, end=None): + if end is None: + end = len(haystack) + if end < 0: + end += len(haystack) + if start < 0: + start += len(haystack) + while start < end: + c = haystack.find(needle, start, end) + if c == -1: + break + yield c + start = c + 1 + +def rfindall(haystack, needle, start=0, end=None): + if end is None: + end = len(haystack) + if end < 0: + end += len(haystack) + if start < 0: + start += len(haystack) + while end >= 0: + c = haystack.rfind(needle, start, end) + if c == -1: + break + yield c + end = c - 1 diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/strutil.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/strutil.pyo Binary files differnew file mode 100644 index 0000000..0921585 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/strutil.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/subrepo.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/subrepo.py new file mode 100644 index 0000000..4761dca --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/subrepo.py @@ -0,0 +1,610 @@ +# subrepo.py - sub-repository handling for Mercurial +# +# Copyright 2009-2010 Matt Mackall <mpm@selenic.com> +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +import errno, os, re, xml.dom.minidom, shutil, urlparse, posixpath +import stat, subprocess +from i18n import _ +import config, util, node, error, cmdutil +hg = None + +nullstate = ('', '', 'empty') + +def state(ctx, ui): + """return a state dict, mapping subrepo paths configured in .hgsub + to tuple: (source from .hgsub, revision from .hgsubstate, kind + (key in types dict)) + """ + p = config.config() + def read(f, sections=None, remap=None): + if f in ctx: + try: + data = ctx[f].data() + except IOError, err: + if err.errno != errno.ENOENT: + raise + # handle missing subrepo spec files as removed + ui.warn(_("warning: subrepo spec file %s not found\n") % f) + return + p.parse(f, data, sections, remap, read) + else: + raise util.Abort(_("subrepo spec file %s not found") % f) + + if '.hgsub' in ctx: + read('.hgsub') + + for path, src in ui.configitems('subpaths'): + p.set('subpaths', path, src, ui.configsource('subpaths', path)) + + rev = {} + if '.hgsubstate' in ctx: + try: + for l in ctx['.hgsubstate'].data().splitlines(): + revision, path = l.split(" ", 1) + rev[path] = revision + except IOError, err: + if err.errno != errno.ENOENT: + raise + + state = {} + for path, src in p[''].items(): + kind = 'hg' + if src.startswith('['): + if ']' not in src: + raise util.Abort(_('missing ] in subrepo source')) + kind, src = src.split(']', 1) + kind = kind[1:] + + for pattern, repl in p.items('subpaths'): + # Turn r'C:\foo\bar' into r'C:\\foo\\bar' since re.sub + # does a string decode. + repl = repl.encode('string-escape') + # However, we still want to allow back references to go + # through unharmed, so we turn r'\\1' into r'\1'. Again, + # extra escapes are needed because re.sub string decodes. + repl = re.sub(r'\\\\([0-9]+)', r'\\\1', repl) + try: + src = re.sub(pattern, repl, src, 1) + except re.error, e: + raise util.Abort(_("bad subrepository pattern in %s: %s") + % (p.source('subpaths', pattern), e)) + + state[path] = (src.strip(), rev.get(path, ''), kind) + + return state + +def writestate(repo, state): + """rewrite .hgsubstate in (outer) repo with these subrepo states""" + repo.wwrite('.hgsubstate', + ''.join(['%s %s\n' % (state[s][1], s) + for s in sorted(state)]), '') + +def submerge(repo, wctx, mctx, actx): + """delegated from merge.applyupdates: merging of .hgsubstate file + in working context, merging context and ancestor context""" + if mctx == actx: # backwards? + actx = wctx.p1() + s1 = wctx.substate + s2 = mctx.substate + sa = actx.substate + sm = {} + + repo.ui.debug("subrepo merge %s %s %s\n" % (wctx, mctx, actx)) + + def debug(s, msg, r=""): + if r: + r = "%s:%s:%s" % r + repo.ui.debug(" subrepo %s: %s %s\n" % (s, msg, r)) + + for s, l in s1.items(): + a = sa.get(s, nullstate) + ld = l # local state with possible dirty flag for compares + if wctx.sub(s).dirty(): + ld = (l[0], l[1] + "+") + if wctx == actx: # overwrite + a = ld + + if s in s2: + r = s2[s] + if ld == r or r == a: # no change or local is newer + sm[s] = l + continue + elif ld == a: # other side changed + debug(s, "other changed, get", r) + wctx.sub(s).get(r) + sm[s] = r + elif ld[0] != r[0]: # sources differ + if repo.ui.promptchoice( + _(' subrepository sources for %s differ\n' + 'use (l)ocal source (%s) or (r)emote source (%s)?') + % (s, l[0], r[0]), + (_('&Local'), _('&Remote')), 0): + debug(s, "prompt changed, get", r) + wctx.sub(s).get(r) + sm[s] = r + elif ld[1] == a[1]: # local side is unchanged + debug(s, "other side changed, get", r) + wctx.sub(s).get(r) + sm[s] = r + else: + debug(s, "both sides changed, merge with", r) + wctx.sub(s).merge(r) + sm[s] = l + elif ld == a: # remote removed, local unchanged + debug(s, "remote removed, remove") + wctx.sub(s).remove() + else: + if repo.ui.promptchoice( + _(' local changed subrepository %s which remote removed\n' + 'use (c)hanged version or (d)elete?') % s, + (_('&Changed'), _('&Delete')), 0): + debug(s, "prompt remove") + wctx.sub(s).remove() + + for s, r in s2.items(): + if s in s1: + continue + elif s not in sa: + debug(s, "remote added, get", r) + mctx.sub(s).get(r) + sm[s] = r + elif r != sa[s]: + if repo.ui.promptchoice( + _(' remote changed subrepository %s which local removed\n' + 'use (c)hanged version or (d)elete?') % s, + (_('&Changed'), _('&Delete')), 0) == 0: + debug(s, "prompt recreate", r) + wctx.sub(s).get(r) + sm[s] = r + + # record merged .hgsubstate + writestate(repo, sm) + +def reporelpath(repo): + """return path to this (sub)repo as seen from outermost repo""" + parent = repo + while hasattr(parent, '_subparent'): + parent = parent._subparent + return repo.root[len(parent.root)+1:] + +def subrelpath(sub): + """return path to this subrepo as seen from outermost repo""" + if not hasattr(sub, '_repo'): + return sub._path + return reporelpath(sub._repo) + +def _abssource(repo, push=False, abort=True): + """return pull/push path of repo - either based on parent repo .hgsub info + or on the top repo config. Abort or return None if no source found.""" + if hasattr(repo, '_subparent'): + source = repo._subsource + if source.startswith('/') or '://' in source: + return source + parent = _abssource(repo._subparent, push, abort=False) + if parent: + if '://' in parent: + if parent[-1] == '/': + parent = parent[:-1] + r = urlparse.urlparse(parent + '/' + source) + r = urlparse.urlunparse((r[0], r[1], + posixpath.normpath(r[2]), + r[3], r[4], r[5])) + return r + else: # plain file system path + return posixpath.normpath(os.path.join(parent, repo._subsource)) + else: # recursion reached top repo + if hasattr(repo, '_subtoppath'): + return repo._subtoppath + if push and repo.ui.config('paths', 'default-push'): + return repo.ui.config('paths', 'default-push') + if repo.ui.config('paths', 'default'): + return repo.ui.config('paths', 'default') + if abort: + raise util.Abort(_("default path for subrepository %s not found") % + reporelpath(repo)) + +def itersubrepos(ctx1, ctx2): + """find subrepos in ctx1 or ctx2""" + # Create a (subpath, ctx) mapping where we prefer subpaths from + # ctx1. The subpaths from ctx2 are important when the .hgsub file + # has been modified (in ctx2) but not yet committed (in ctx1). + subpaths = dict.fromkeys(ctx2.substate, ctx2) + subpaths.update(dict.fromkeys(ctx1.substate, ctx1)) + for subpath, ctx in sorted(subpaths.iteritems()): + yield subpath, ctx.sub(subpath) + +def subrepo(ctx, path): + """return instance of the right subrepo class for subrepo in path""" + # subrepo inherently violates our import layering rules + # because it wants to make repo objects from deep inside the stack + # so we manually delay the circular imports to not break + # scripts that don't use our demand-loading + global hg + import hg as h + hg = h + + util.path_auditor(ctx._repo.root)(path) + state = ctx.substate.get(path, nullstate) + if state[2] not in types: + raise util.Abort(_('unknown subrepo type %s') % state[2]) + return types[state[2]](ctx, path, state[:2]) + +# subrepo classes need to implement the following abstract class: + +class abstractsubrepo(object): + + def dirty(self): + """returns true if the dirstate of the subrepo does not match + current stored state + """ + raise NotImplementedError + + def checknested(self, path): + """check if path is a subrepository within this repository""" + return False + + def commit(self, text, user, date): + """commit the current changes to the subrepo with the given + log message. Use given user and date if possible. Return the + new state of the subrepo. + """ + raise NotImplementedError + + def remove(self): + """remove the subrepo + + (should verify the dirstate is not dirty first) + """ + raise NotImplementedError + + def get(self, state): + """run whatever commands are needed to put the subrepo into + this state + """ + raise NotImplementedError + + def merge(self, state): + """merge currently-saved state with the new state.""" + raise NotImplementedError + + def push(self, force): + """perform whatever action is analogous to 'hg push' + + This may be a no-op on some systems. + """ + raise NotImplementedError + + def add(self, ui, match, dryrun, prefix): + return [] + + def status(self, rev2, **opts): + return [], [], [], [], [], [], [] + + def diff(self, diffopts, node2, match, prefix, **opts): + pass + + def outgoing(self, ui, dest, opts): + return 1 + + def incoming(self, ui, source, opts): + return 1 + + def files(self): + """return filename iterator""" + raise NotImplementedError + + def filedata(self, name): + """return file data""" + raise NotImplementedError + + def fileflags(self, name): + """return file flags""" + return '' + + def archive(self, archiver, prefix): + for name in self.files(): + flags = self.fileflags(name) + mode = 'x' in flags and 0755 or 0644 + symlink = 'l' in flags + archiver.addfile(os.path.join(prefix, self._path, name), + mode, symlink, self.filedata(name)) + + +class hgsubrepo(abstractsubrepo): + def __init__(self, ctx, path, state): + self._path = path + self._state = state + r = ctx._repo + root = r.wjoin(path) + create = False + if not os.path.exists(os.path.join(root, '.hg')): + create = True + util.makedirs(root) + self._repo = hg.repository(r.ui, root, create=create) + self._repo._subparent = r + self._repo._subsource = state[0] + + if create: + fp = self._repo.opener("hgrc", "w", text=True) + fp.write('[paths]\n') + + def addpathconfig(key, value): + if value: + fp.write('%s = %s\n' % (key, value)) + self._repo.ui.setconfig('paths', key, value) + + defpath = _abssource(self._repo, abort=False) + defpushpath = _abssource(self._repo, True, abort=False) + addpathconfig('default', defpath) + if defpath != defpushpath: + addpathconfig('default-push', defpushpath) + fp.close() + + def add(self, ui, match, dryrun, prefix): + return cmdutil.add(ui, self._repo, match, dryrun, True, + os.path.join(prefix, self._path)) + + def status(self, rev2, **opts): + try: + rev1 = self._state[1] + ctx1 = self._repo[rev1] + ctx2 = self._repo[rev2] + return self._repo.status(ctx1, ctx2, **opts) + except error.RepoLookupError, inst: + self._repo.ui.warn(_('warning: error "%s" in subrepository "%s"\n') + % (inst, subrelpath(self))) + return [], [], [], [], [], [], [] + + def diff(self, diffopts, node2, match, prefix, **opts): + try: + node1 = node.bin(self._state[1]) + # We currently expect node2 to come from substate and be + # in hex format + if node2 is not None: + node2 = node.bin(node2) + cmdutil.diffordiffstat(self._repo.ui, self._repo, diffopts, + node1, node2, match, + prefix=os.path.join(prefix, self._path), + listsubrepos=True, **opts) + except error.RepoLookupError, inst: + self._repo.ui.warn(_('warning: error "%s" in subrepository "%s"\n') + % (inst, subrelpath(self))) + + def archive(self, archiver, prefix): + abstractsubrepo.archive(self, archiver, prefix) + + rev = self._state[1] + ctx = self._repo[rev] + for subpath in ctx.substate: + s = subrepo(ctx, subpath) + s.archive(archiver, os.path.join(prefix, self._path)) + + def dirty(self): + r = self._state[1] + if r == '': + return True + w = self._repo[None] + if w.p1() != self._repo[r]: # version checked out change + return True + return w.dirty() # working directory changed + + def checknested(self, path): + return self._repo._checknested(self._repo.wjoin(path)) + + def commit(self, text, user, date): + self._repo.ui.debug("committing subrepo %s\n" % subrelpath(self)) + n = self._repo.commit(text, user, date) + if not n: + return self._repo['.'].hex() # different version checked out + return node.hex(n) + + def remove(self): + # we can't fully delete the repository as it may contain + # local-only history + self._repo.ui.note(_('removing subrepo %s\n') % subrelpath(self)) + hg.clean(self._repo, node.nullid, False) + + def _get(self, state): + source, revision, kind = state + try: + self._repo.lookup(revision) + except error.RepoError: + self._repo._subsource = source + srcurl = _abssource(self._repo) + self._repo.ui.status(_('pulling subrepo %s from %s\n') + % (subrelpath(self), srcurl)) + other = hg.repository(self._repo.ui, srcurl) + self._repo.pull(other) + + def get(self, state): + self._get(state) + source, revision, kind = state + self._repo.ui.debug("getting subrepo %s\n" % self._path) + hg.clean(self._repo, revision, False) + + def merge(self, state): + self._get(state) + cur = self._repo['.'] + dst = self._repo[state[1]] + anc = dst.ancestor(cur) + if anc == cur: + self._repo.ui.debug("updating subrepo %s\n" % subrelpath(self)) + hg.update(self._repo, state[1]) + elif anc == dst: + self._repo.ui.debug("skipping subrepo %s\n" % subrelpath(self)) + else: + self._repo.ui.debug("merging subrepo %s\n" % subrelpath(self)) + hg.merge(self._repo, state[1], remind=False) + + def push(self, force): + # push subrepos depth-first for coherent ordering + c = self._repo[''] + subs = c.substate # only repos that are committed + for s in sorted(subs): + if not c.sub(s).push(force): + return False + + dsturl = _abssource(self._repo, True) + self._repo.ui.status(_('pushing subrepo %s to %s\n') % + (subrelpath(self), dsturl)) + other = hg.repository(self._repo.ui, dsturl) + return self._repo.push(other, force) + + def outgoing(self, ui, dest, opts): + return hg.outgoing(ui, self._repo, _abssource(self._repo, True), opts) + + def incoming(self, ui, source, opts): + return hg.incoming(ui, self._repo, _abssource(self._repo, False), opts) + + def files(self): + rev = self._state[1] + ctx = self._repo[rev] + return ctx.manifest() + + def filedata(self, name): + rev = self._state[1] + return self._repo[rev][name].data() + + def fileflags(self, name): + rev = self._state[1] + ctx = self._repo[rev] + return ctx.flags(name) + + +class svnsubrepo(abstractsubrepo): + def __init__(self, ctx, path, state): + self._path = path + self._state = state + self._ctx = ctx + self._ui = ctx._repo.ui + + def _svncommand(self, commands, filename=''): + path = os.path.join(self._ctx._repo.origroot, self._path, filename) + cmd = ['svn'] + commands + [path] + cmd = [util.shellquote(arg) for arg in cmd] + cmd = util.quotecommand(' '.join(cmd)) + env = dict(os.environ) + # Avoid localized output, preserve current locale for everything else. + env['LC_MESSAGES'] = 'C' + p = subprocess.Popen(cmd, shell=True, bufsize=-1, + close_fds=util.closefds, + stdout=subprocess.PIPE, stderr=subprocess.PIPE, + universal_newlines=True, env=env) + stdout, stderr = p.communicate() + stderr = stderr.strip() + if stderr: + raise util.Abort(stderr) + return stdout + + def _wcrev(self): + output = self._svncommand(['info', '--xml']) + doc = xml.dom.minidom.parseString(output) + entries = doc.getElementsByTagName('entry') + if not entries: + return '0' + return str(entries[0].getAttribute('revision')) or '0' + + def _wcchanged(self): + """Return (changes, extchanges) where changes is True + if the working directory was changed, and extchanges is + True if any of these changes concern an external entry. + """ + output = self._svncommand(['status', '--xml']) + externals, changes = [], [] + doc = xml.dom.minidom.parseString(output) + for e in doc.getElementsByTagName('entry'): + s = e.getElementsByTagName('wc-status') + if not s: + continue + item = s[0].getAttribute('item') + props = s[0].getAttribute('props') + path = e.getAttribute('path') + if item == 'external': + externals.append(path) + if (item not in ('', 'normal', 'unversioned', 'external') + or props not in ('', 'none')): + changes.append(path) + for path in changes: + for ext in externals: + if path == ext or path.startswith(ext + os.sep): + return True, True + return bool(changes), False + + def dirty(self): + if self._wcrev() == self._state[1] and not self._wcchanged()[0]: + return False + return True + + def commit(self, text, user, date): + # user and date are out of our hands since svn is centralized + changed, extchanged = self._wcchanged() + if not changed: + return self._wcrev() + if extchanged: + # Do not try to commit externals + raise util.Abort(_('cannot commit svn externals')) + commitinfo = self._svncommand(['commit', '-m', text]) + self._ui.status(commitinfo) + newrev = re.search('Committed revision ([0-9]+).', commitinfo) + if not newrev: + raise util.Abort(commitinfo.splitlines()[-1]) + newrev = newrev.groups()[0] + self._ui.status(self._svncommand(['update', '-r', newrev])) + return newrev + + def remove(self): + if self.dirty(): + self._ui.warn(_('not removing repo %s because ' + 'it has changes.\n' % self._path)) + return + self._ui.note(_('removing subrepo %s\n') % self._path) + + def onerror(function, path, excinfo): + if function is not os.remove: + raise + # read-only files cannot be unlinked under Windows + s = os.stat(path) + if (s.st_mode & stat.S_IWRITE) != 0: + raise + os.chmod(path, stat.S_IMODE(s.st_mode) | stat.S_IWRITE) + os.remove(path) + + path = self._ctx._repo.wjoin(self._path) + shutil.rmtree(path, onerror=onerror) + try: + os.removedirs(os.path.dirname(path)) + except OSError: + pass + + def get(self, state): + status = self._svncommand(['checkout', state[0], '--revision', state[1]]) + if not re.search('Checked out revision [0-9]+.', status): + raise util.Abort(status.splitlines()[-1]) + self._ui.status(status) + + def merge(self, state): + old = int(self._state[1]) + new = int(state[1]) + if new > old: + self.get(state) + + def push(self, force): + # push is a no-op for SVN + return True + + def files(self): + output = self._svncommand(['list']) + # This works because svn forbids \n in filenames. + return output.splitlines() + + def filedata(self, name): + return self._svncommand(['cat'], name) + + +types = { + 'hg': hgsubrepo, + 'svn': svnsubrepo, + } diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/subrepo.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/subrepo.pyo Binary files differnew file mode 100644 index 0000000..92f6ed7 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/subrepo.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/tags.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/tags.py new file mode 100644 index 0000000..6db9613 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/tags.py @@ -0,0 +1,288 @@ +# tags.py - read tag info from local repository +# +# Copyright 2009 Matt Mackall <mpm@selenic.com> +# Copyright 2009 Greg Ward <greg@gerg.ca> +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +# Currently this module only deals with reading and caching tags. +# Eventually, it could take care of updating (adding/removing/moving) +# tags too. + +from node import nullid, bin, hex, short +from i18n import _ +import encoding +import error + +def findglobaltags(ui, repo, alltags, tagtypes): + '''Find global tags in repo by reading .hgtags from every head that + has a distinct version of it, using a cache to avoid excess work. + Updates the dicts alltags, tagtypes in place: alltags maps tag name + to (node, hist) pair (see _readtags() below), and tagtypes maps tag + name to tag type ("global" in this case).''' + # This is so we can be lazy and assume alltags contains only global + # tags when we pass it to _writetagcache(). + assert len(alltags) == len(tagtypes) == 0, \ + "findglobaltags() should be called first" + + (heads, tagfnode, cachetags, shouldwrite) = _readtagcache(ui, repo) + if cachetags is not None: + assert not shouldwrite + # XXX is this really 100% correct? are there oddball special + # cases where a global tag should outrank a local tag but won't, + # because cachetags does not contain rank info? + _updatetags(cachetags, 'global', alltags, tagtypes) + return + + seen = set() # set of fnode + fctx = None + for head in reversed(heads): # oldest to newest + assert head in repo.changelog.nodemap, \ + "tag cache returned bogus head %s" % short(head) + + fnode = tagfnode.get(head) + if fnode and fnode not in seen: + seen.add(fnode) + if not fctx: + fctx = repo.filectx('.hgtags', fileid=fnode) + else: + fctx = fctx.filectx(fnode) + + filetags = _readtags(ui, repo, fctx.data().splitlines(), fctx) + _updatetags(filetags, 'global', alltags, tagtypes) + + # and update the cache (if necessary) + if shouldwrite: + _writetagcache(ui, repo, heads, tagfnode, alltags) + +def readlocaltags(ui, repo, alltags, tagtypes): + '''Read local tags in repo. Update alltags and tagtypes.''' + try: + # localtags is in the local encoding; re-encode to UTF-8 on + # input for consistency with the rest of this module. + data = repo.opener("localtags").read() + filetags = _readtags( + ui, repo, data.splitlines(), "localtags", + recode=encoding.fromlocal) + _updatetags(filetags, "local", alltags, tagtypes) + except IOError: + pass + +def _readtags(ui, repo, lines, fn, recode=None): + '''Read tag definitions from a file (or any source of lines). + Return a mapping from tag name to (node, hist): node is the node id + from the last line read for that name, and hist is the list of node + ids previously associated with it (in file order). All node ids are + binary, not hex.''' + + filetags = {} # map tag name to (node, hist) + count = 0 + + def warn(msg): + ui.warn(_("%s, line %s: %s\n") % (fn, count, msg)) + + for line in lines: + count += 1 + if not line: + continue + try: + (nodehex, name) = line.split(" ", 1) + except ValueError: + warn(_("cannot parse entry")) + continue + name = name.strip() + if recode: + name = recode(name) + try: + nodebin = bin(nodehex) + except TypeError: + warn(_("node '%s' is not well formed") % nodehex) + continue + if nodebin not in repo.changelog.nodemap: + # silently ignore as pull -r might cause this + continue + + # update filetags + hist = [] + if name in filetags: + n, hist = filetags[name] + hist.append(n) + filetags[name] = (nodebin, hist) + return filetags + +def _updatetags(filetags, tagtype, alltags, tagtypes): + '''Incorporate the tag info read from one file into the two + dictionaries, alltags and tagtypes, that contain all tag + info (global across all heads plus local).''' + + for name, nodehist in filetags.iteritems(): + if name not in alltags: + alltags[name] = nodehist + tagtypes[name] = tagtype + continue + + # we prefer alltags[name] if: + # it supercedes us OR + # mutual supercedes and it has a higher rank + # otherwise we win because we're tip-most + anode, ahist = nodehist + bnode, bhist = alltags[name] + if (bnode != anode and anode in bhist and + (bnode not in ahist or len(bhist) > len(ahist))): + anode = bnode + ahist.extend([n for n in bhist if n not in ahist]) + alltags[name] = anode, ahist + tagtypes[name] = tagtype + + +# The tag cache only stores info about heads, not the tag contents +# from each head. I.e. it doesn't try to squeeze out the maximum +# performance, but is simpler has a better chance of actually +# working correctly. And this gives the biggest performance win: it +# avoids looking up .hgtags in the manifest for every head, and it +# can avoid calling heads() at all if there have been no changes to +# the repo. + +def _readtagcache(ui, repo): + '''Read the tag cache and return a tuple (heads, fnodes, cachetags, + shouldwrite). If the cache is completely up-to-date, cachetags is a + dict of the form returned by _readtags(); otherwise, it is None and + heads and fnodes are set. In that case, heads is the list of all + heads currently in the repository (ordered from tip to oldest) and + fnodes is a mapping from head to .hgtags filenode. If those two are + set, caller is responsible for reading tag info from each head.''' + + try: + cachefile = repo.opener('tags.cache', 'r') + # force reading the file for static-http + cachelines = iter(cachefile) + except IOError: + cachefile = None + + # The cache file consists of lines like + # <headrev> <headnode> [<tagnode>] + # where <headrev> and <headnode> redundantly identify a repository + # head from the time the cache was written, and <tagnode> is the + # filenode of .hgtags on that head. Heads with no .hgtags file will + # have no <tagnode>. The cache is ordered from tip to oldest (which + # is part of why <headrev> is there: a quick visual check is all + # that's required to ensure correct order). + # + # This information is enough to let us avoid the most expensive part + # of finding global tags, which is looking up <tagnode> in the + # manifest for each head. + cacherevs = [] # list of headrev + cacheheads = [] # list of headnode + cachefnode = {} # map headnode to filenode + if cachefile: + try: + for line in cachelines: + if line == "\n": + break + line = line.rstrip().split() + cacherevs.append(int(line[0])) + headnode = bin(line[1]) + cacheheads.append(headnode) + if len(line) == 3: + fnode = bin(line[2]) + cachefnode[headnode] = fnode + except (ValueError, TypeError): + # corruption of tags.cache, just recompute it + ui.warn(_('.hg/tags.cache is corrupt, rebuilding it\n')) + cacheheads = [] + cacherevs = [] + cachefnode = {} + + tipnode = repo.changelog.tip() + tiprev = len(repo.changelog) - 1 + + # Case 1 (common): tip is the same, so nothing has changed. + # (Unchanged tip trivially means no changesets have been added. + # But, thanks to localrepository.destroyed(), it also means none + # have been destroyed by strip or rollback.) + if cacheheads and cacheheads[0] == tipnode and cacherevs[0] == tiprev: + tags = _readtags(ui, repo, cachelines, cachefile.name) + cachefile.close() + return (None, None, tags, False) + if cachefile: + cachefile.close() # ignore rest of file + + repoheads = repo.heads() + # Case 2 (uncommon): empty repo; get out quickly and don't bother + # writing an empty cache. + if repoheads == [nullid]: + return ([], {}, {}, False) + + # Case 3 (uncommon): cache file missing or empty. + + # Case 4 (uncommon): tip rev decreased. This should only happen + # when we're called from localrepository.destroyed(). Refresh the + # cache so future invocations will not see disappeared heads in the + # cache. + + # Case 5 (common): tip has changed, so we've added/replaced heads. + + # As it happens, the code to handle cases 3, 4, 5 is the same. + + # N.B. in case 4 (nodes destroyed), "new head" really means "newly + # exposed". + newheads = [head + for head in repoheads + if head not in set(cacheheads)] + + # Now we have to lookup the .hgtags filenode for every new head. + # This is the most expensive part of finding tags, so performance + # depends primarily on the size of newheads. Worst case: no cache + # file, so newheads == repoheads. + for head in newheads: + cctx = repo[head] + try: + fnode = cctx.filenode('.hgtags') + cachefnode[head] = fnode + except error.LookupError: + # no .hgtags file on this head + pass + + # Caller has to iterate over all heads, but can use the filenodes in + # cachefnode to get to each .hgtags revision quickly. + return (repoheads, cachefnode, None, True) + +def _writetagcache(ui, repo, heads, tagfnode, cachetags): + + try: + cachefile = repo.opener('tags.cache', 'w', atomictemp=True) + except (OSError, IOError): + return + + realheads = repo.heads() # for sanity checks below + for head in heads: + # temporary sanity checks; these can probably be removed + # once this code has been in crew for a few weeks + assert head in repo.changelog.nodemap, \ + 'trying to write non-existent node %s to tag cache' % short(head) + assert head in realheads, \ + 'trying to write non-head %s to tag cache' % short(head) + assert head != nullid, \ + 'trying to write nullid to tag cache' + + # This can't fail because of the first assert above. When/if we + # remove that assert, we might want to catch LookupError here + # and downgrade it to a warning. + rev = repo.changelog.rev(head) + + fnode = tagfnode.get(head) + if fnode: + cachefile.write('%d %s %s\n' % (rev, hex(head), hex(fnode))) + else: + cachefile.write('%d %s\n' % (rev, hex(head))) + + # Tag names in the cache are in UTF-8 -- which is the whole reason + # we keep them in UTF-8 throughout this module. If we converted + # them local encoding on input, we would lose info writing them to + # the cache. + cachefile.write('\n') + for (name, (node, hist)) in cachetags.iteritems(): + cachefile.write("%s %s\n" % (hex(node), name)) + + cachefile.rename() diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/tags.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/tags.pyo Binary files differnew file mode 100644 index 0000000..c61ce3b --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/tags.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templatefilters.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templatefilters.py new file mode 100644 index 0000000..4701fed --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templatefilters.py @@ -0,0 +1,228 @@ +# template-filters.py - common template expansion filters +# +# Copyright 2005-2008 Matt Mackall <mpm@selenic.com> +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +import cgi, re, os, time, urllib +import encoding, node, util + +def stringify(thing): + '''turn nested template iterator into string.''' + if hasattr(thing, '__iter__') and not isinstance(thing, str): + return "".join([stringify(t) for t in thing if t is not None]) + return str(thing) + +agescales = [("year", 3600 * 24 * 365), + ("month", 3600 * 24 * 30), + ("week", 3600 * 24 * 7), + ("day", 3600 * 24), + ("hour", 3600), + ("minute", 60), + ("second", 1)] + +def age(date): + '''turn a (timestamp, tzoff) tuple into an age string.''' + + def plural(t, c): + if c == 1: + return t + return t + "s" + def fmt(t, c): + return "%d %s" % (c, plural(t, c)) + + now = time.time() + then = date[0] + if then > now: + return 'in the future' + + delta = max(1, int(now - then)) + if delta > agescales[0][1] * 2: + return util.shortdate(date) + + for t, s in agescales: + n = delta // s + if n >= 2 or s == 1: + return '%s ago' % fmt(t, n) + +para_re = None +space_re = None + +def fill(text, width): + '''fill many paragraphs.''' + global para_re, space_re + if para_re is None: + para_re = re.compile('(\n\n|\n\\s*[-*]\\s*)', re.M) + space_re = re.compile(r' +') + + def findparas(): + start = 0 + while True: + m = para_re.search(text, start) + if not m: + uctext = unicode(text[start:], encoding.encoding) + w = len(uctext) + while 0 < w and uctext[w - 1].isspace(): + w -= 1 + yield (uctext[:w].encode(encoding.encoding), + uctext[w:].encode(encoding.encoding)) + break + yield text[start:m.start(0)], m.group(1) + start = m.end(1) + + return "".join([space_re.sub(' ', util.wrap(para, width=width)) + rest + for para, rest in findparas()]) + +def firstline(text): + '''return the first line of text''' + try: + return text.splitlines(True)[0].rstrip('\r\n') + except IndexError: + return '' + +def nl2br(text): + '''replace raw newlines with xhtml line breaks.''' + return text.replace('\n', '<br/>\n') + +def obfuscate(text): + text = unicode(text, encoding.encoding, 'replace') + return ''.join(['&#%d;' % ord(c) for c in text]) + +def domain(author): + '''get domain of author, or empty string if none.''' + f = author.find('@') + if f == -1: + return '' + author = author[f + 1:] + f = author.find('>') + if f >= 0: + author = author[:f] + return author + +def person(author): + '''get name of author, or else username.''' + if not '@' in author: + return author + f = author.find('<') + if f == -1: + return util.shortuser(author) + return author[:f].rstrip() + +def indent(text, prefix): + '''indent each non-empty line of text after first with prefix.''' + lines = text.splitlines() + num_lines = len(lines) + endswithnewline = text[-1:] == '\n' + def indenter(): + for i in xrange(num_lines): + l = lines[i] + if i and l.strip(): + yield prefix + yield l + if i < num_lines - 1 or endswithnewline: + yield '\n' + return "".join(indenter()) + +def permissions(flags): + if "l" in flags: + return "lrwxrwxrwx" + if "x" in flags: + return "-rwxr-xr-x" + return "-rw-r--r--" + +def xmlescape(text): + text = (text + .replace('&', '&') + .replace('<', '<') + .replace('>', '>') + .replace('"', '"') + .replace("'", ''')) # ' invalid in HTML + return re.sub('[\x00-\x08\x0B\x0C\x0E-\x1F]', ' ', text) + +def uescape(c): + if ord(c) < 0x80: + return c + else: + return '\\u%04x' % ord(c) + +_escapes = [ + ('\\', '\\\\'), ('"', '\\"'), ('\t', '\\t'), ('\n', '\\n'), + ('\r', '\\r'), ('\f', '\\f'), ('\b', '\\b'), +] + +def jsonescape(s): + for k, v in _escapes: + s = s.replace(k, v) + return ''.join(uescape(c) for c in s) + +def json(obj): + if obj is None or obj is False or obj is True: + return {None: 'null', False: 'false', True: 'true'}[obj] + elif isinstance(obj, int) or isinstance(obj, float): + return str(obj) + elif isinstance(obj, str): + u = unicode(obj, encoding.encoding, 'replace') + return '"%s"' % jsonescape(u) + elif isinstance(obj, unicode): + return '"%s"' % jsonescape(obj) + elif hasattr(obj, 'keys'): + out = [] + for k, v in obj.iteritems(): + s = '%s: %s' % (json(k), json(v)) + out.append(s) + return '{' + ', '.join(out) + '}' + elif hasattr(obj, '__iter__'): + out = [] + for i in obj: + out.append(json(i)) + return '[' + ', '.join(out) + ']' + else: + raise TypeError('cannot encode type %s' % obj.__class__.__name__) + +def stripdir(text): + '''Treat the text as path and strip a directory level, if possible.''' + dir = os.path.dirname(text) + if dir == "": + return os.path.basename(text) + else: + return dir + +def nonempty(str): + return str or "(none)" + +filters = { + "addbreaks": nl2br, + "basename": os.path.basename, + "stripdir": stripdir, + "age": age, + "date": lambda x: util.datestr(x), + "domain": domain, + "email": util.email, + "escape": lambda x: cgi.escape(x, True), + "fill68": lambda x: fill(x, width=68), + "fill76": lambda x: fill(x, width=76), + "firstline": firstline, + "tabindent": lambda x: indent(x, '\t'), + "hgdate": lambda x: "%d %d" % x, + "isodate": lambda x: util.datestr(x, '%Y-%m-%d %H:%M %1%2'), + "isodatesec": lambda x: util.datestr(x, '%Y-%m-%d %H:%M:%S %1%2'), + "json": json, + "jsonescape": jsonescape, + "localdate": lambda x: (x[0], util.makedate()[1]), + "nonempty": nonempty, + "obfuscate": obfuscate, + "permissions": permissions, + "person": person, + "rfc822date": lambda x: util.datestr(x, "%a, %d %b %Y %H:%M:%S %1%2"), + "rfc3339date": lambda x: util.datestr(x, "%Y-%m-%dT%H:%M:%S%1:%2"), + "hex": node.hex, + "short": lambda x: x[:12], + "shortdate": util.shortdate, + "stringify": stringify, + "strip": lambda x: x.strip(), + "urlescape": lambda x: urllib.quote(x), + "user": lambda x: util.shortuser(x), + "stringescape": lambda x: x.encode('string_escape'), + "xmlescape": xmlescape, +} diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templatefilters.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templatefilters.pyo Binary files differnew file mode 100644 index 0000000..3263c3f --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templatefilters.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templatekw.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templatekw.py new file mode 100644 index 0000000..905edb8 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templatekw.py @@ -0,0 +1,271 @@ +# templatekw.py - common changeset template keywords +# +# Copyright 2005-2009 Matt Mackall <mpm@selenic.com> +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +from node import hex +import encoding, patch, util, error + +def showlist(name, values, plural=None, **args): + '''expand set of values. + name is name of key in template map. + values is list of strings or dicts. + plural is plural of name, if not simply name + 's'. + + expansion works like this, given name 'foo'. + + if values is empty, expand 'no_foos'. + + if 'foo' not in template map, return values as a string, + joined by space. + + expand 'start_foos'. + + for each value, expand 'foo'. if 'last_foo' in template + map, expand it instead of 'foo' for last key. + + expand 'end_foos'. + ''' + templ = args['templ'] + if plural: + names = plural + else: names = name + 's' + if not values: + noname = 'no_' + names + if noname in templ: + yield templ(noname, **args) + return + if name not in templ: + if isinstance(values[0], str): + yield ' '.join(values) + else: + for v in values: + yield dict(v, **args) + return + startname = 'start_' + names + if startname in templ: + yield templ(startname, **args) + vargs = args.copy() + def one(v, tag=name): + try: + vargs.update(v) + except (AttributeError, ValueError): + try: + for a, b in v: + vargs[a] = b + except ValueError: + vargs[name] = v + return templ(tag, **vargs) + lastname = 'last_' + name + if lastname in templ: + last = values.pop() + else: + last = None + for v in values: + yield one(v) + if last is not None: + yield one(last, tag=lastname) + endname = 'end_' + names + if endname in templ: + yield templ(endname, **args) + +def getfiles(repo, ctx, revcache): + if 'files' not in revcache: + revcache['files'] = repo.status(ctx.parents()[0].node(), + ctx.node())[:3] + return revcache['files'] + +def getlatesttags(repo, ctx, cache): + '''return date, distance and name for the latest tag of rev''' + + if 'latesttags' not in cache: + # Cache mapping from rev to a tuple with tag date, tag + # distance and tag name + cache['latesttags'] = {-1: (0, 0, 'null')} + latesttags = cache['latesttags'] + + rev = ctx.rev() + todo = [rev] + while todo: + rev = todo.pop() + if rev in latesttags: + continue + ctx = repo[rev] + tags = [t for t in ctx.tags() if repo.tagtype(t) == 'global'] + if tags: + latesttags[rev] = ctx.date()[0], 0, ':'.join(sorted(tags)) + continue + try: + # The tuples are laid out so the right one can be found by + # comparison. + pdate, pdist, ptag = max( + latesttags[p.rev()] for p in ctx.parents()) + except KeyError: + # Cache miss - recurse + todo.append(rev) + todo.extend(p.rev() for p in ctx.parents()) + continue + latesttags[rev] = pdate, pdist + 1, ptag + return latesttags[rev] + +def getrenamedfn(repo, endrev=None): + rcache = {} + if endrev is None: + endrev = len(repo) + + def getrenamed(fn, rev): + '''looks up all renames for a file (up to endrev) the first + time the file is given. It indexes on the changerev and only + parses the manifest if linkrev != changerev. + Returns rename info for fn at changerev rev.''' + if fn not in rcache: + rcache[fn] = {} + fl = repo.file(fn) + for i in fl: + lr = fl.linkrev(i) + renamed = fl.renamed(fl.node(i)) + rcache[fn][lr] = renamed + if lr >= endrev: + break + if rev in rcache[fn]: + return rcache[fn][rev] + + # If linkrev != rev (i.e. rev not found in rcache) fallback to + # filectx logic. + try: + return repo[rev][fn].renamed() + except error.LookupError: + return None + + return getrenamed + + +def showauthor(repo, ctx, templ, **args): + return ctx.user() + +def showbranches(**args): + branch = args['ctx'].branch() + if branch != 'default': + branch = encoding.tolocal(branch) + return showlist('branch', [branch], plural='branches', **args) + +def showchildren(**args): + ctx = args['ctx'] + childrevs = ['%d:%s' % (cctx, cctx) for cctx in ctx.children()] + return showlist('children', childrevs, **args) + +def showdate(repo, ctx, templ, **args): + return ctx.date() + +def showdescription(repo, ctx, templ, **args): + return ctx.description().strip() + +def showdiffstat(repo, ctx, templ, **args): + diff = patch.diff(repo, ctx.parents()[0].node(), ctx.node()) + files, adds, removes = 0, 0, 0 + for i in patch.diffstatdata(util.iterlines(diff)): + files += 1 + adds += i[1] + removes += i[2] + return '%s: +%s/-%s' % (files, adds, removes) + +def showextras(**args): + templ = args['templ'] + for key, value in sorted(args['ctx'].extra().items()): + args = args.copy() + args.update(dict(key=key, value=value)) + yield templ('extra', **args) + +def showfileadds(**args): + repo, ctx, revcache = args['repo'], args['ctx'], args['revcache'] + return showlist('file_add', getfiles(repo, ctx, revcache)[1], **args) + +def showfilecopies(**args): + cache, ctx = args['cache'], args['ctx'] + copies = args['revcache'].get('copies') + if copies is None: + if 'getrenamed' not in cache: + cache['getrenamed'] = getrenamedfn(args['repo']) + copies = [] + getrenamed = cache['getrenamed'] + for fn in ctx.files(): + rename = getrenamed(fn, ctx.rev()) + if rename: + copies.append((fn, rename[0])) + + c = [{'name': x[0], 'source': x[1]} for x in copies] + return showlist('file_copy', c, plural='file_copies', **args) + +# showfilecopiesswitch() displays file copies only if copy records are +# provided before calling the templater, usually with a --copies +# command line switch. +def showfilecopiesswitch(**args): + copies = args['revcache'].get('copies') or [] + c = [{'name': x[0], 'source': x[1]} for x in copies] + return showlist('file_copy', c, plural='file_copies', **args) + +def showfiledels(**args): + repo, ctx, revcache = args['repo'], args['ctx'], args['revcache'] + return showlist('file_del', getfiles(repo, ctx, revcache)[2], **args) + +def showfilemods(**args): + repo, ctx, revcache = args['repo'], args['ctx'], args['revcache'] + return showlist('file_mod', getfiles(repo, ctx, revcache)[0], **args) + +def showfiles(**args): + return showlist('file', args['ctx'].files(), **args) + +def showlatesttag(repo, ctx, templ, cache, **args): + return getlatesttags(repo, ctx, cache)[2] + +def showlatesttagdistance(repo, ctx, templ, cache, **args): + return getlatesttags(repo, ctx, cache)[1] + +def showmanifest(**args): + repo, ctx, templ = args['repo'], args['ctx'], args['templ'] + args = args.copy() + args.update(dict(rev=repo.manifest.rev(ctx.changeset()[0]), + node=hex(ctx.changeset()[0]))) + return templ('manifest', **args) + +def shownode(repo, ctx, templ, **args): + return ctx.hex() + +def showrev(repo, ctx, templ, **args): + return ctx.rev() + +def showtags(**args): + return showlist('tag', args['ctx'].tags(), **args) + +# keywords are callables like: +# fn(repo, ctx, templ, cache, revcache, **args) +# with: +# repo - current repository instance +# ctx - the changectx being displayed +# templ - the templater instance +# cache - a cache dictionary for the whole templater run +# revcache - a cache dictionary for the current revision +keywords = { + 'author': showauthor, + 'branches': showbranches, + 'children': showchildren, + 'date': showdate, + 'desc': showdescription, + 'diffstat': showdiffstat, + 'extras': showextras, + 'file_adds': showfileadds, + 'file_copies': showfilecopies, + 'file_copies_switch': showfilecopiesswitch, + 'file_dels': showfiledels, + 'file_mods': showfilemods, + 'files': showfiles, + 'latesttag': showlatesttag, + 'latesttagdistance': showlatesttagdistance, + 'manifest': showmanifest, + 'node': shownode, + 'rev': showrev, + 'tags': showtags, +} + diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templatekw.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templatekw.pyo Binary files differnew file mode 100644 index 0000000..04eb28c --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templatekw.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templater.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templater.py new file mode 100644 index 0000000..5b3a5ad --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templater.py @@ -0,0 +1,289 @@ +# templater.py - template expansion for output +# +# Copyright 2005, 2006 Matt Mackall <mpm@selenic.com> +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +from i18n import _ +import sys, os +import util, config, templatefilters + +path = ['templates', '../templates'] +stringify = templatefilters.stringify + +def _flatten(thing): + '''yield a single stream from a possibly nested set of iterators''' + if isinstance(thing, str): + yield thing + elif not hasattr(thing, '__iter__'): + if thing is not None: + yield str(thing) + else: + for i in thing: + if isinstance(i, str): + yield i + elif not hasattr(i, '__iter__'): + if i is not None: + yield str(i) + elif i is not None: + for j in _flatten(i): + yield j + +def parsestring(s, quoted=True): + '''parse a string using simple c-like syntax. + string must be in quotes if quoted is True.''' + if quoted: + if len(s) < 2 or s[0] != s[-1]: + raise SyntaxError(_('unmatched quotes')) + return s[1:-1].decode('string_escape') + + return s.decode('string_escape') + +class engine(object): + '''template expansion engine. + + template expansion works like this. a map file contains key=value + pairs. if value is quoted, it is treated as string. otherwise, it + is treated as name of template file. + + templater is asked to expand a key in map. it looks up key, and + looks for strings like this: {foo}. it expands {foo} by looking up + foo in map, and substituting it. expansion is recursive: it stops + when there is no more {foo} to replace. + + expansion also allows formatting and filtering. + + format uses key to expand each item in list. syntax is + {key%format}. + + filter uses function to transform value. syntax is + {key|filter1|filter2|...}.''' + + def __init__(self, loader, filters={}, defaults={}): + self._loader = loader + self._filters = filters + self._defaults = defaults + self._cache = {} + + def process(self, t, mapping): + '''Perform expansion. t is name of map element to expand. + mapping contains added elements for use during expansion. Is a + generator.''' + return _flatten(self._process(self._load(t), mapping)) + + def _load(self, t): + '''load, parse, and cache a template''' + if t not in self._cache: + self._cache[t] = self._parse(self._loader(t)) + return self._cache[t] + + def _get(self, mapping, key): + v = mapping.get(key) + if v is None: + v = self._defaults.get(key, '') + if hasattr(v, '__call__'): + v = v(**mapping) + return v + + def _filter(self, mapping, parts): + filters, val = parts + x = self._get(mapping, val) + for f in filters: + x = f(x) + return x + + def _format(self, mapping, args): + key, parsed = args + v = self._get(mapping, key) + if not hasattr(v, '__iter__'): + raise SyntaxError(_("error expanding '%s%%%s'") + % (key, parsed)) + lm = mapping.copy() + for i in v: + if isinstance(i, dict): + lm.update(i) + yield self._process(parsed, lm) + else: + # v is not an iterable of dicts, this happen when 'key' + # has been fully expanded already and format is useless. + # If so, return the expanded value. + yield i + + def _parse(self, tmpl): + '''preparse a template''' + parsed = [] + pos, stop = 0, len(tmpl) + while pos < stop: + n = tmpl.find('{', pos) + if n < 0: + parsed.append((None, tmpl[pos:stop])) + break + if n > 0 and tmpl[n - 1] == '\\': + # escaped + parsed.append((None, tmpl[pos:n - 1] + "{")) + pos = n + 1 + continue + if n > pos: + parsed.append((None, tmpl[pos:n])) + + pos = n + n = tmpl.find('}', pos) + if n < 0: + # no closing + parsed.append((None, tmpl[pos:stop])) + break + + expr = tmpl[pos + 1:n] + pos = n + 1 + + if '%' in expr: + # the keyword should be formatted with a template + key, t = expr.split('%') + parsed.append((self._format, (key.strip(), + self._load(t.strip())))) + elif '|' in expr: + # process the keyword value with one or more filters + parts = expr.split('|') + val = parts[0].strip() + try: + filters = [self._filters[f.strip()] for f in parts[1:]] + except KeyError, i: + raise SyntaxError(_("unknown filter '%s'") % i[0]) + parsed.append((self._filter, (filters, val))) + else: + # just get the keyword + parsed.append((self._get, expr.strip())) + + return parsed + + def _process(self, parsed, mapping): + '''Render a template. Returns a generator.''' + for f, e in parsed: + if f: + yield f(mapping, e) + else: + yield e + +engines = {'default': engine} + +class templater(object): + + def __init__(self, mapfile, filters={}, defaults={}, cache={}, + minchunk=1024, maxchunk=65536): + '''set up template engine. + mapfile is name of file to read map definitions from. + filters is dict of functions. each transforms a value into another. + defaults is dict of default map definitions.''' + self.mapfile = mapfile or 'template' + self.cache = cache.copy() + self.map = {} + self.base = (mapfile and os.path.dirname(mapfile)) or '' + self.filters = templatefilters.filters.copy() + self.filters.update(filters) + self.defaults = defaults + self.minchunk, self.maxchunk = minchunk, maxchunk + self.engines = {} + + if not mapfile: + return + if not os.path.exists(mapfile): + raise util.Abort(_('style not found: %s') % mapfile) + + conf = config.config() + conf.read(mapfile) + + for key, val in conf[''].items(): + if val[0] in "'\"": + try: + self.cache[key] = parsestring(val) + except SyntaxError, inst: + raise SyntaxError('%s: %s' % + (conf.source('', key), inst.args[0])) + else: + val = 'default', val + if ':' in val[1]: + val = val[1].split(':', 1) + self.map[key] = val[0], os.path.join(self.base, val[1]) + + def __contains__(self, key): + return key in self.cache or key in self.map + + def load(self, t): + '''Get the template for the given template name. Use a local cache.''' + if not t in self.cache: + try: + self.cache[t] = open(self.map[t][1]).read() + except IOError, inst: + raise IOError(inst.args[0], _('template file %s: %s') % + (self.map[t][1], inst.args[1])) + return self.cache[t] + + def __call__(self, t, **mapping): + ttype = t in self.map and self.map[t][0] or 'default' + proc = self.engines.get(ttype) + if proc is None: + proc = engines[ttype](self.load, self.filters, self.defaults) + self.engines[ttype] = proc + + stream = proc.process(t, mapping) + if self.minchunk: + stream = util.increasingchunks(stream, min=self.minchunk, + max=self.maxchunk) + return stream + +def templatepath(name=None): + '''return location of template file or directory (if no name). + returns None if not found.''' + normpaths = [] + + # executable version (py2exe) doesn't support __file__ + if hasattr(sys, 'frozen'): + module = sys.executable + else: + module = __file__ + for f in path: + if f.startswith('/'): + p = f + else: + fl = f.split('/') + p = os.path.join(os.path.dirname(module), *fl) + if name: + p = os.path.join(p, name) + if name and os.path.exists(p): + return os.path.normpath(p) + elif os.path.isdir(p): + normpaths.append(os.path.normpath(p)) + + return normpaths + +def stylemap(styles, paths=None): + """Return path to mapfile for a given style. + + Searches mapfile in the following locations: + 1. templatepath/style/map + 2. templatepath/map-style + 3. templatepath/map + """ + + if paths is None: + paths = templatepath() + elif isinstance(paths, str): + paths = [paths] + + if isinstance(styles, str): + styles = [styles] + + for style in styles: + if not style: + continue + locations = [os.path.join(style, 'map'), 'map-' + style] + locations.append('map') + + for path in paths: + for location in locations: + mapfile = os.path.join(path, location) + if os.path.isfile(mapfile): + return style, mapfile + + raise RuntimeError("No hgweb templates found in %r" % paths) diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templater.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templater.pyo Binary files differnew file mode 100644 index 0000000..b37c7ca --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templater.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/atom/changelog.tmpl b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/atom/changelog.tmpl new file mode 100644 index 0000000..29902ab --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/atom/changelog.tmpl @@ -0,0 +1,10 @@ +{header} + <!-- Changelog --> + <id>{urlbase}{url}</id> + <link rel="self" href="{urlbase}{url}atom-log"/> + <link rel="alternate" href="{urlbase}{url}"/> + <title>{repo|escape} Changelog</title> + {latestentry%feedupdated} + +{entries%changelogentry} +</feed> diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/atom/changelogentry.tmpl b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/atom/changelogentry.tmpl new file mode 100644 index 0000000..068b5ea --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/atom/changelogentry.tmpl @@ -0,0 +1,16 @@ + <entry> + <title>{desc|strip|firstline|strip|escape|nonempty}</title> + <id>{urlbase}{url}#changeset-{node}</id> + <link href="{urlbase}{url}rev/{node|short}"/> + <author> + <name>{author|person|escape}</name> + <email>{author|email|obfuscate}</email> + </author> + <updated>{date|rfc3339date}</updated> + <published>{date|rfc3339date}</published> + <content type="xhtml"> + <div xmlns="http://www.w3.org/1999/xhtml"> + <pre xml:space="preserve">{desc|escape|nonempty}</pre> + </div> + </content> + </entry> diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/atom/error.tmpl b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/atom/error.tmpl new file mode 100644 index 0000000..5735fba --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/atom/error.tmpl @@ -0,0 +1,17 @@ +{header} + <!-- Error --> + <id>{urlbase}{url}</id> + <link rel="self" href="{urlbase}{url}atom-log"/> + <link rel="alternate" href="{urlbase}{url}"/> + <title>Error</title> + <updated>1970-01-01T00:00:00+00:00</updated> + <entry> + <title>Error</title> + <id>http://mercurial.selenic.com/#error</id> + <author> + <name>mercurial</name> + </author> + <updated>1970-01-01T00:00:00+00:00</updated> + <content type="text">{error|escape}</content> + </entry> +</feed> diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/atom/filelog.tmpl b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/atom/filelog.tmpl new file mode 100644 index 0000000..99d4e9b --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/atom/filelog.tmpl @@ -0,0 +1,8 @@ +{header} + <id>{urlbase}{url}atom-log/tip/{file|escape}</id> + <link rel="self" href="{urlbase}{url}atom-log/tip/{file|urlescape}"/> + <title>{repo|escape}: {file|escape} history</title> + {latestentry%feedupdated} + +{entries%changelogentry} +</feed> diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/atom/header.tmpl b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/atom/header.tmpl new file mode 100644 index 0000000..90ffceb --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/atom/header.tmpl @@ -0,0 +1,2 @@ +<?xml version="1.0" encoding="{encoding}"?> +<feed xmlns="http://www.w3.org/2005/Atom">
\ No newline at end of file diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/atom/map b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/atom/map new file mode 100644 index 0000000..c016b55 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/atom/map @@ -0,0 +1,11 @@ +default = 'changelog' +feedupdated = '<updated>{date|rfc3339date}</updated>' +mimetype = 'application/atom+xml; charset={encoding}' +header = header.tmpl +changelog = changelog.tmpl +changelogentry = changelogentry.tmpl +filelog = filelog.tmpl +filelogentry = filelogentry.tmpl +tags = tags.tmpl +tagentry = tagentry.tmpl +error = error.tmpl diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/atom/tagentry.tmpl b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/atom/tagentry.tmpl new file mode 100644 index 0000000..857df12 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/atom/tagentry.tmpl @@ -0,0 +1,8 @@ + <entry> + <title>{tag|escape}</title> + <link rel="alternate" href="{urlbase}{url}rev/{node|short}"/> + <id>{urlbase}{url}#tag-{node}</id> + <updated>{date|rfc3339date}</updated> + <published>{date|rfc3339date}</published> + <content type="text">{tag|strip|escape}</content> + </entry> diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/atom/tags.tmpl b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/atom/tags.tmpl new file mode 100644 index 0000000..82294ec --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/atom/tags.tmpl @@ -0,0 +1,11 @@ +{header} + <id>{urlbase}{url}</id> + <link rel="self" href="{urlbase}{url}atom-tags"/> + <link rel="alternate" href="{urlbase}{url}tags"/> + <title>{repo|escape}: tags</title> + <summary>{repo|escape} tag history</summary> + <author><name>Mercurial SCM</name></author> + {latestentry%feedupdated} + +{entriesnotip%tagentry} +</feed> diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/coal/header.tmpl b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/coal/header.tmpl new file mode 100644 index 0000000..ed0f42d --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/coal/header.tmpl @@ -0,0 +1,6 @@ +<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.1//EN" "http://www.w3.org/TR/xhtml11/DTD/xhtml11.dtd"> +<html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en-US"> +<head> +<link rel="icon" href="{staticurl}hgicon.png" type="image/png" /> +<meta name="robots" content="index, nofollow" /> +<link rel="stylesheet" href="{staticurl}style-coal.css" type="text/css" /> diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/coal/map b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/coal/map new file mode 100644 index 0000000..eaa8c0c --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/coal/map @@ -0,0 +1,201 @@ +default = 'shortlog' + +mimetype = 'text/html; charset={encoding}' +header = header.tmpl +footer = ../paper/footer.tmpl +search = ../paper/search.tmpl + +changelog = ../paper/shortlog.tmpl +shortlog = ../paper/shortlog.tmpl +shortlogentry = ../paper/shortlogentry.tmpl +graph = ../paper/graph.tmpl + +help = ../paper/help.tmpl +helptopics = ../paper/helptopics.tmpl + +helpentry = '<tr><td><a href="{url}help/{topic|escape}{sessionvars%urlparameter}">{topic|escape}</a></td><td>{summary|escape}</td></tr>' + +naventry = '<a href="{url}log/{node|short}{sessionvars%urlparameter}">{label|escape}</a> ' +navshortentry = '<a href="{url}shortlog/{node|short}{sessionvars%urlparameter}">{label|escape}</a> ' +navgraphentry = '<a href="{url}graph/{node|short}{sessionvars%urlparameter}">{label|escape}</a> ' +filenaventry = '<a href="{url}log/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{label|escape}</a> ' +filedifflink = '<a href="{url}diff/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{file|escape}</a> ' +filenodelink = '<a href="{url}file/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{file|escape}</a> ' +filenolink = '{file|escape} ' +fileellipses = '...' +changelogentry = ../paper/shortlogentry.tmpl +searchentry = ../paper/shortlogentry.tmpl +changeset = ../paper/changeset.tmpl +manifest = ../paper/manifest.tmpl + +nav = '{before%naventry} {after%naventry}' +navshort = '{before%navshortentry}{after%navshortentry}' +navgraph = '{before%navgraphentry}{after%navgraphentry}' +filenav = '{before%filenaventry}{after%filenaventry}' + +direntry = ' + <tr class="fileline parity{parity}"> + <td class="name"> + <a href="{url}file/{node|short}{path|urlescape}{sessionvars%urlparameter}"> + <img src="{staticurl}coal-folder.png" alt="dir."/> {basename|escape}/ + </a> + <a href="{url}file/{node|short}{path|urlescape}/{emptydirs|urlescape}{sessionvars%urlparameter}"> + {emptydirs|escape} + </a> + </td> + <td class="size"></td> + <td class="permissions">drwxr-xr-x</td> + </tr>' + +fileentry = ' + <tr class="fileline parity{parity}"> + <td class="filename"> + <a href="{url}file/{node|short}/{file|urlescape}{sessionvars%urlparameter}"> + <img src="{staticurl}coal-file.png" alt="file"/> {basename|escape} + </a> + </td> + <td class="size">{size}</td> + <td class="permissions">{permissions|permissions}</td> + </tr>' + +filerevision = ../paper/filerevision.tmpl +fileannotate = ../paper/fileannotate.tmpl +filediff = ../paper/filediff.tmpl +filelog = ../paper/filelog.tmpl +fileline = ' + <div class="parity{parity} source"><a href="#{lineid}" id="{lineid}">{linenumber}</a> {line|escape}</div>' +filelogentry = ../paper/filelogentry.tmpl + +annotateline = ' + <tr class="parity{parity}"> + <td class="annotate"> + <a href="{url}annotate/{node|short}/{file|urlescape}{sessionvars%urlparameter}#{targetline}" + title="{node|short}: {desc|escape|firstline}">{author|user}@{rev}</a> + </td> + <td class="source"><a href="#{lineid}" id="{lineid}">{linenumber}</a> {line|escape}</td> + </tr>' + +diffblock = '<div class="source bottomline parity{parity}"><pre>{lines}</pre></div>' +difflineplus = '<a href="#{lineid}" id="{lineid}">{linenumber}</a> <span class="plusline">{line|escape}</span>' +difflineminus = '<a href="#{lineid}" id="{lineid}">{linenumber}</a> <span class="minusline">{line|escape}</span>' +difflineat = '<a href="#{lineid}" id="{lineid}">{linenumber}</a> <span class="atline">{line|escape}</span>' +diffline = '<a href="#{lineid}" id="{lineid}">{linenumber}</a> {line|escape}' + +changelogparent = ' + <tr> + <th class="parent">parent {rev}:</th> + <td class="parent"><a href="{url}rev/{node|short}{sessionvars%urlparameter}">{node|short}</a></td> + </tr>' + +changesetparent = '<a href="{url}rev/{node|short}{sessionvars%urlparameter}">{node|short}</a> ' + +filerevparent = '<a href="{url}file/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{rename%filerename}{node|short}</a> ' +filerevchild = '<a href="{url}file/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{node|short}</a> ' + +filerename = '{file|escape}@' +filelogrename = ' + <tr> + <th>base:</th> + <td> + <a href="{url}file/{node|short}/{file|urlescape}{sessionvars%urlparameter}"> + {file|escape}@{node|short} + </a> + </td> + </tr>' +fileannotateparent = ' + <tr> + <td class="metatag">parent:</td> + <td> + <a href="{url}annotate/{node|short}/{file|urlescape}{sessionvars%urlparameter}"> + {rename%filerename}{node|short} + </a> + </td> + </tr>' +changesetchild = ' <a href="{url}rev/{node|short}{sessionvars%urlparameter}">{node|short}</a>' +changelogchild = ' + <tr> + <th class="child">child</th> + <td class="child"> + <a href="{url}rev/{node|short}{sessionvars%urlparameter}"> + {node|short} + </a> + </td> + </tr>' +fileannotatechild = ' + <tr> + <td class="metatag">child:</td> + <td> + <a href="{url}annotate/{node|short}/{file|urlescape}{sessionvars%urlparameter}"> + {node|short} + </a> + </td> + </tr>' +tags = ../paper/tags.tmpl +tagentry = ' + <tr class="tagEntry parity{parity}"> + <td> + <a href="{url}rev/{node|short}{sessionvars%urlparameter}"> + {tag|escape} + </a> + </td> + <td class="node"> + {node|short} + </td> + </tr>' +branches = ../paper/branches.tmpl +branchentry = ' + <tr class="tagEntry parity{parity}"> + <td> + <a href="{url}shortlog/{node|short}{sessionvars%urlparameter}" class="{status}"> + {branch|escape} + </a> + </td> + <td class="node"> + {node|short} + </td> + </tr>' +changelogtag = '<span class="tag">{name|escape}</span> ' +changesettag = '<span class="tag">{tag|escape}</span> ' +changelogbranchhead = '<span class="branchhead">{name|escape}</span> ' +changelogbranchname = '<span class="branchname">{name|escape}</span> ' + +filediffparent = ' + <tr> + <th class="parent">parent {rev}:</th> + <td class="parent"><a href="{url}rev/{node|short}{sessionvars%urlparameter}">{node|short}</a></td> + </tr>' +filelogparent = ' + <tr> + <th>parent {rev}:</th> + <td><a href="{url}file/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{node|short}</a></td> + </tr>' +filediffchild = ' + <tr> + <th class="child">child {rev}:</th> + <td class="child"><a href="{url}rev/{node|short}{sessionvars%urlparameter}">{node|short}</a> + </td> + </tr>' +filelogchild = ' + <tr> + <th>child {rev}:</th> + <td><a href="{url}file/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{node|short}</a></td> + </tr>' + +indexentry = ' + <tr class="parity{parity}"> + <td><a href="{url}{sessionvars%urlparameter}">{name|escape}</a></td> + <td>{description}</td> + <td>{contact|obfuscate}</td> + <td class="age">{lastchange|age}</td> + <td class="indexlinks">{archives%indexarchiveentry}</td> + </tr>\n' +indexarchiveentry = '<a href="{url}archive/{node|short}{extension|urlescape}"> ↓{type|escape}</a>' +index = ../paper/index.tmpl +archiveentry = ' + <li> + <a href="{url}archive/{node|short}{extension|urlescape}">{type|escape}</a> + </li>' +notfound = ../paper/notfound.tmpl +error = ../paper/error.tmpl +urlparameter = '{separator}{name}={value|urlescape}' +hiddenformentry = '<input type="hidden" name="{name}" value="{value|escape}" />' diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/gitweb/branches.tmpl b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/gitweb/branches.tmpl new file mode 100644 index 0000000..3df49f9 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/gitweb/branches.tmpl @@ -0,0 +1,31 @@ +{header} +<title>{repo|escape}: Branches</title> +<link rel="alternate" type="application/atom+xml" + href="{url}atom-tags" title="Atom feed for {repo|escape}"/> +<link rel="alternate" type="application/rss+xml" + href="{url}rss-tags" title="RSS feed for {repo|escape}"/> +</head> +<body> + +<div class="page_header"> +<a href="http://mercurial.selenic.com/" title="Mercurial" style="float: right;">Mercurial</a><a href="{url}summary{sessionvars%urlparameter}">{repo|escape}</a> / branches +</div> + +<div class="page_nav"> +<a href="{url}summary{sessionvars%urlparameter}">summary</a> | +<a href="{url}shortlog{sessionvars%urlparameter}">shortlog</a> | +<a href="{url}log{sessionvars%urlparameter}">changelog</a> | +<a href="{url}graph{sessionvars%urlparameter}">graph</a> | +<a href="{url}tags{sessionvars%urlparameter}">tags</a> | +branches | +<a href="{url}file/{node|short}{sessionvars%urlparameter}">files</a> | +<a href="{url}help{sessionvars%urlparameter}">help</a> +<br/> +</div> + +<div class="title"> </div> +<table cellspacing="0"> +{entries%branchentry} +</table> + +{footer} diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/gitweb/changelog.tmpl b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/gitweb/changelog.tmpl new file mode 100644 index 0000000..62c188f --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/gitweb/changelog.tmpl @@ -0,0 +1,40 @@ +{header} +<title>{repo|escape}: Changelog</title> +<link rel="alternate" type="application/atom+xml" + href="{url}atom-log" title="Atom feed for {repo|escape}"/> +<link rel="alternate" type="application/rss+xml" + href="{url}rss-log" title="RSS feed for {repo|escape}"/> +</head> +<body> + +<div class="page_header"> +<a href="http://mercurial.selenic.com/" title="Mercurial" style="float: right;">Mercurial</a><a href="{url}summary{sessionvars%urlparameter}">{repo|escape}</a> / changelog +</div> + +<form action="{url}log"> +{sessionvars%hiddenformentry} +<div class="search"> +<input type="text" name="rev" /> +</div> +</form> + +<div class="page_nav"> +<a href="{url}summary{sessionvars%urlparameter}">summary</a> | +<a href="{url}shortlog/{rev}{sessionvars%urlparameter}">shortlog</a> | +changelog | +<a href="{url}graph{sessionvars%urlparameter}">graph</a> | +<a href="{url}tags{sessionvars%urlparameter}">tags</a> | +<a href="{url}branches{sessionvars%urlparameter}">branches</a> | +<a href="{url}file/{node|short}{sessionvars%urlparameter}">files</a>{archives%archiveentry} | +<a href="{url}help{sessionvars%urlparameter}">help</a> +<br/> +{changenav%nav}<br/> +</div> + +{entries%changelogentry} + +<div class="page_nav"> +{changenav%nav}<br/> +</div> + +{footer} diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/gitweb/changelogentry.tmpl b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/gitweb/changelogentry.tmpl new file mode 100644 index 0000000..bb11704 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/gitweb/changelogentry.tmpl @@ -0,0 +1,14 @@ +<div> +<a class="title" href="{url}rev/{node|short}{sessionvars%urlparameter}"><span class="age">{date|age}</span>{desc|strip|firstline|escape|nonempty}<span class="logtags"> {inbranch%inbranchtag}{branches%branchtag}{tags%tagtag}</span></a> +</div> +<div class="title_text"> +<div class="log_link"> +<a href="{url}rev/{node|short}{sessionvars%urlparameter}">changeset</a><br/> +</div> +<i>{author|obfuscate} [{date|rfc822date}] rev {rev}</i><br/> +</div> +<div class="log_body"> +{desc|strip|escape|addbreaks|nonempty} +<br/> +<br/> +</div> diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/gitweb/changeset.tmpl b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/gitweb/changeset.tmpl new file mode 100644 index 0000000..13267c2 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/gitweb/changeset.tmpl @@ -0,0 +1,52 @@ +{header} +<title>{repo|escape}: changeset {rev}:{node|short}</title> +<link rel="alternate" type="application/atom+xml" + href="{url}atom-log" title="Atom feed for {repo|escape}"/> +<link rel="alternate" type="application/rss+xml" + href="{url}rss-log" title="RSS feed for {repo|escape}"/> +</head> +<body> + +<div class="page_header"> +<a href="http://mercurial.selenic.com/" title="Mercurial" style="float: right;">Mercurial</a><a href="{url}summary{sessionvars%urlparameter}">{repo|escape}</a> / changeset +</div> + +<div class="page_nav"> +<a href="{url}summary{sessionvars%urlparameter}">summary</a> | +<a href="{url}shortlog/{rev}{sessionvars%urlparameter}">shortlog</a> | +<a href="{url}log/{rev}{sessionvars%urlparameter}">changelog</a> | +<a href="{url}graph{sessionvars%urlparameter}">graph</a> | +<a href="{url}tags{sessionvars%urlparameter}">tags</a> | +<a href="{url}branches{sessionvars%urlparameter}">branches</a> | +<a href="{url}file/{node|short}{sessionvars%urlparameter}">files</a> | +changeset | +<a href="{url}raw-rev/{node|short}">raw</a> {archives%archiveentry} | +<a href="{url}help{sessionvars%urlparameter}">help</a> +<br/> +</div> + +<div> +<a class="title" href="{url}raw-rev/{node|short}">{desc|strip|escape|firstline|nonempty} <span class="logtags">{inbranch%inbranchtag}{branches%branchtag}{tags%tagtag}</span></a> +</div> +<div class="title_text"> +<table cellspacing="0"> +<tr><td>author</td><td>{author|obfuscate}</td></tr> +<tr><td></td><td>{date|date} ({date|age})</td></tr> +{branch%changesetbranch} +<tr><td>changeset {rev}</td><td style="font-family:monospace">{node|short}</td></tr> +{parent%changesetparent} +{child%changesetchild} +</table></div> + +<div class="page_body"> +{desc|strip|escape|addbreaks|nonempty} +</div> +<div class="list_head"></div> +<div class="title_text"> +<table cellspacing="0"> +{files} +</table></div> + +<div class="page_body">{diff}</div> + +{footer} diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/gitweb/error.tmpl b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/gitweb/error.tmpl new file mode 100644 index 0000000..25b71fc --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/gitweb/error.tmpl @@ -0,0 +1,25 @@ +{header} +<title>{repo|escape}: Error</title> +<link rel="alternate" type="application/atom+xml" + href="{url}atom-log" title="Atom feed for {repo|escape}"/> +<link rel="alternate" type="application/rss+xml" + href="{url}rss-log" title="RSS feed for {repo|escape}"/> +</head> +<body> + +<div class="page_header"> +<a href="http://mercurial.selenic.com/" title="Mercurial" style="float: right;">Mercurial</a><a href="{url}summary{sessionvars%urlparameter}">{repo|escape}</a> / error +</div> + +<div class="page_nav"> +<a href="{url}summary{sessionvars%urlparameter}">summary</a> | <a href="{url}shortlog{sessionvars%urlparameter}">shortlog</a> | <a href="{url}log{sessionvars%urlparameter}">changelog</a> | <a href="{url}tags{sessionvars%urlparameter}">tags</a> | <a href="{url}file/{node|short}{sessionvars%urlparameter}">files</a><br/> +</div> + +<div class="page_body"> +<br/> +<i>An error occurred while processing your request</i><br/> +<br/> +{error|escape} +</div> + +{footer} diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/gitweb/fileannotate.tmpl b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/gitweb/fileannotate.tmpl new file mode 100644 index 0000000..54b05c6 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/gitweb/fileannotate.tmpl @@ -0,0 +1,64 @@ +{header} +<title>{repo|escape}: {file|escape}@{node|short} (annotated)</title> +<link rel="alternate" type="application/atom+xml" + href="{url}atom-log" title="Atom feed for {repo|escape}"/> +<link rel="alternate" type="application/rss+xml" + href="{url}rss-log" title="RSS feed for {repo|escape}"/> +</head> +<body> + +<div class="page_header"> +<a href="http://mercurial.selenic.com/" title="Mercurial" style="float: right;">Mercurial</a><a href="{url}summary{sessionvars%urlparameter}">{repo|escape}</a> / annotate +</div> + +<div class="page_nav"> +<a href="{url}summary{sessionvars%urlparameter}">summary</a> | +<a href="{url}shortlog{sessionvars%urlparameter}">shortlog</a> | +<a href="{url}log{sessionvars%urlparameter}">changelog</a> | +<a href="{url}graph{sessionvars%urlparameter}">graph</a> | +<a href="{url}tags{sessionvars%urlparameter}">tags</a> | +<a href="{url}branches{sessionvars%urlparameter}">branches</a> | +<a href="{url}file/{node|short}{path|urlescape}{sessionvars%urlparameter}">files</a> | +<a href="{url}rev/{node|short}{sessionvars%urlparameter}">changeset</a> | +<a href="{url}file/{node|short}/{file|urlescape}{sessionvars%urlparameter}">file</a> | +<a href="{url}file/tip/{file|urlescape}{sessionvars%urlparameter}">latest</a> | +<a href="{url}log/{node|short}/{file|urlescape}{sessionvars%urlparameter}">revisions</a> | +annotate | +<a href="{url}diff/{node|short}/{file|urlescape}{sessionvars%urlparameter}">diff</a> | +<a href="{url}raw-annotate/{node|short}/{file|urlescape}">raw</a> | +<a href="{url}help{sessionvars%urlparameter}">help</a> +<br/> +</div> + +<div class="title">{file|escape}</div> + +<div class="title_text"> +<table cellspacing="0"> +<tr> + <td>author</td> + <td>{author|obfuscate}</td></tr> +<tr> + <td></td> + <td>{date|date} ({date|age})</td></tr> +{branch%filerevbranch} +<tr> + <td>changeset {rev}</td> + <td style="font-family:monospace"><a class="list" href="{url}rev/{node|short}{sessionvars%urlparameter}">{node|short}</a></td></tr> +{parent%fileannotateparent} +{child%fileannotatechild} +<tr> + <td>permissions</td> + <td style="font-family:monospace">{permissions|permissions}</td></tr> +</table> +</div> + +<div class="page_path"> +{desc|strip|escape|addbreaks|nonempty} +</div> +<div class="page_body"> +<table> +{annotate%annotateline} +</table> +</div> + +{footer} diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/gitweb/filediff.tmpl b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/gitweb/filediff.tmpl new file mode 100644 index 0000000..91343ed --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/gitweb/filediff.tmpl @@ -0,0 +1,49 @@ +{header} +<title>{repo|escape}: diff {file|escape}</title> +<link rel="alternate" type="application/atom+xml" + href="{url}atom-log" title="Atom feed for {repo|escape}"/> +<link rel="alternate" type="application/rss+xml" + href="{url}rss-log" title="RSS feed for {repo|escape}"/> +</head> +<body> + +<div class="page_header"> +<a href="http://mercurial.selenic.com/" title="Mercurial" style="float: right;">Mercurial</a><a href="{url}summary{sessionvars%urlparameter}">{repo|escape}</a> / diff +</div> + +<div class="page_nav"> +<a href="{url}summary{sessionvars%urlparameter}">summary</a> | +<a href="{url}shortlog{sessionvars%urlparameter}">shortlog</a> | +<a href="{url}log{sessionvars%urlparameter}">changelog</a> | +<a href="{url}graph{sessionvars%urlparameter}">graph</a> | +<a href="{url}tags{sessionvars%urlparameter}">tags</a> | +<a href="{url}branches{sessionvars%urlparameter}">branches</a> | +<a href="{url}file/{node|short}{path|urlescape}{sessionvars%urlparameter}">files</a> | +<a href="{url}rev/{node|short}{sessionvars%urlparameter}">changeset</a> | +<a href="{url}file/{node|short}/{file|urlescape}{sessionvars%urlparameter}">file</a> | +<a href="{url}file/tip/{file|urlescape}{sessionvars%urlparameter}">latest</a> | +<a href="{url}log/{node|short}/{file|urlescape}{sessionvars%urlparameter}">revisions</a> | +<a href="{url}annotate/{node|short}/{file|urlescape}{sessionvars%urlparameter}">annotate</a> | +diff | +<a href="{url}raw-diff/{node|short}/{file|urlescape}">raw</a><br/> | +<a href="{url}help{sessionvars%urlparameter}">help</a> +</div> + +<div class="title">{file|escape}</div> + +<table> +{branch%filerevbranch} +<tr> + <td>changeset {rev}</td> + <td style="font-family:monospace"><a class="list" href="{url}rev/{node|short}{sessionvars%urlparameter}">{node|short}</a></td></tr> +{parent%filediffparent} +{child%filediffchild} +</table> + +<div class="list_head"></div> + +<div class="page_body"> +{diff} +</div> + +{footer} diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/gitweb/filelog.tmpl b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/gitweb/filelog.tmpl new file mode 100644 index 0000000..d034593 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/gitweb/filelog.tmpl @@ -0,0 +1,41 @@ +{header} +<title>{repo|escape}: File revisions</title> +<link rel="alternate" type="application/atom+xml" + href="{url}atom-log" title="Atom feed for {repo|escape}"/> +<link rel="alternate" type="application/rss+xml" + href="{url}rss-log" title="RSS feed for {repo|escape}"/> +</head> +<body> + +<div class="page_header"> +<a href="http://mercurial.selenic.com/" title="Mercurial" style="float: right;">Mercurial</a><a href="{url}summary{sessionvars%urlparameter}">{repo|escape}</a> / file revisions +</div> + +<div class="page_nav"> +<a href="{url}summary{sessionvars%urlparameter}">summary</a> | +<a href="{url}shortlog{sessionvars%urlparameter}">shortlog</a> | +<a href="{url}log{sessionvars%urlparameter}">changelog</a> | +<a href="{url}graph{sessionvars%urlparameter}">graph</a> | +<a href="{url}tags{sessionvars%urlparameter}">tags</a> | +<a href="{url}branches{sessionvars%urlparameter}">branches</a> | +<a href="{url}file/{node|short}/{file|urlescape}{sessionvars%urlparameter}">file</a> | +revisions | +<a href="{url}annotate/{node|short}/{file|urlescape}{sessionvars%urlparameter}">annotate</a> | +<a href="{url}diff/{node|short}/{file|urlescape}{sessionvars%urlparameter}">diff</a> | +<a href="{url}rss-log/{node|short}/{file|urlescape}">rss</a> | +<a href="{url}help{sessionvars%urlparameter}">help</a> +<br/> +{nav%filenav} +</div> + +<div class="title" >{file|urlescape}</div> + +<table> +{entries%filelogentry} +</table> + +<div class="page_nav"> +{nav%filenav} +</div> + +{footer} diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/gitweb/filerevision.tmpl b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/gitweb/filerevision.tmpl new file mode 100644 index 0000000..065a21b --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/gitweb/filerevision.tmpl @@ -0,0 +1,63 @@ +{header} +<title>{repo|escape}: {file|escape}@{node|short}</title> +<link rel="alternate" type="application/atom+xml" + href="{url}atom-log" title="Atom feed for {repo|escape}"/> +<link rel="alternate" type="application/rss+xml" + href="{url}rss-log" title="RSS feed for {repo|escape}"/> +</head> +<body> + +<div class="page_header"> +<a href="http://mercurial.selenic.com/" title="Mercurial" style="float: right;">Mercurial</a><a href="{url}summary{sessionvars%urlparameter}">{repo|escape}</a> / file revision +</div> + +<div class="page_nav"> +<a href="{url}summary{sessionvars%urlparameter}">summary</a> | +<a href="{url}shortlog{sessionvars%urlparameter}">shortlog</a> | +<a href="{url}log{sessionvars%urlparameter}">changelog</a> | +<a href="{url}graph{sessionvars%urlparameter}">graph</a> | +<a href="{url}tags{sessionvars%urlparameter}">tags</a> | +<a href="{url}branches{sessionvars%urlparameter}">branches</a> | +<a href="{url}file/{node|short}{path|urlescape}{sessionvars%urlparameter}">files</a> | +<a href="{url}rev/{node|short}{sessionvars%urlparameter}">changeset</a> | +file | +<a href="{url}file/tip/{file|urlescape}{sessionvars%urlparameter}">latest</a> | +<a href="{url}log/{node|short}/{file|urlescape}{sessionvars%urlparameter}">revisions</a> | +<a href="{url}annotate/{node|short}/{file|urlescape}{sessionvars%urlparameter}">annotate</a> | +<a href="{url}diff/{node|short}/{file|urlescape}{sessionvars%urlparameter}">diff</a> | +<a href="{url}raw-file/{node|short}/{file|urlescape}">raw</a> | +<a href="{url}help{sessionvars%urlparameter}">help</a> +<br/> +</div> + +<div class="title">{file|escape}</div> + +<div class="title_text"> +<table cellspacing="0"> +<tr> + <td>author</td> + <td>{author|obfuscate}</td></tr> +<tr> + <td></td> + <td>{date|date} ({date|age})</td></tr> +{branch%filerevbranch} +<tr> + <td>changeset {rev}</td> + <td style="font-family:monospace"><a class="list" href="{url}rev/{node|short}{sessionvars%urlparameter}">{node|short}</a></td></tr> +{parent%filerevparent} +{child%filerevchild} +<tr> + <td>permissions</td> + <td style="font-family:monospace">{permissions|permissions}</td></tr> +</table> +</div> + +<div class="page_path"> +{desc|strip|escape|addbreaks|nonempty} +</div> + +<div class="page_body"> +{text%fileline} +</div> + +{footer} diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/gitweb/footer.tmpl b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/gitweb/footer.tmpl new file mode 100644 index 0000000..a5f74c3 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/gitweb/footer.tmpl @@ -0,0 +1,11 @@ +<div class="page_footer"> +<div class="page_footer_text">{repo|escape}</div> +<div class="rss_logo"> +<a href="{url}rss-log">RSS</a> +<a href="{url}atom-log">Atom</a> +</div> +<br /> +{motd} +</div> +</body> +</html> diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/gitweb/graph.tmpl b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/gitweb/graph.tmpl new file mode 100644 index 0000000..f329d1c --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/gitweb/graph.tmpl @@ -0,0 +1,122 @@ +{header} +<title>{repo|escape}: Graph</title> +<link rel="alternate" type="application/atom+xml" + href="{url}atom-log" title="Atom feed for {repo|escape}"/> +<link rel="alternate" type="application/rss+xml" + href="{url}rss-log" title="RSS feed for {repo|escape}"/> +<!--[if IE]><script type="text/javascript" src="{staticurl}excanvas.js"></script><![endif]--> +</head> +<body> + +<div class="page_header"> +<a href="http://mercurial.selenic.com/" title="Mercurial" style="float: right;">Mercurial</a><a href="{url}summary{sessionvars%urlparameter}">{repo|escape}</a> / graph +</div> + +<form action="{url}log"> +{sessionvars%hiddenformentry} +<div class="search"> +<input type="text" name="rev" /> +</div> +</form> +<div class="page_nav"> +<a href="{url}summary{sessionvars%urlparameter}">summary</a> | +<a href="{url}shortlog{sessionvars%urlparameter}">shortlog</a> | +<a href="{url}log/{rev}{sessionvars%urlparameter}">changelog</a> | +graph | +<a href="{url}tags{sessionvars%urlparameter}">tags</a> | +<a href="{url}branches{sessionvars%urlparameter}">branches</a> | +<a href="{url}file/{node|short}{sessionvars%urlparameter}">files</a> | +<a href="{url}help{sessionvars%urlparameter}">help</a> +<br/> +<a href="{url}graph/{rev}{lessvars%urlparameter}">less</a> +<a href="{url}graph/{rev}{morevars%urlparameter}">more</a> +| {changenav%navgraph}<br/> +</div> + +<div class="title"> </div> + +<noscript>The revision graph only works with JavaScript-enabled browsers.</noscript> + +<div id="wrapper"> +<ul id="nodebgs"></ul> +<canvas id="graph" width="224" height="{canvasheight}"></canvas> +<ul id="graphnodes"></ul> +</div> + +<script type="text/javascript" src="{staticurl}graph.js"></script> +<script> +<!-- hide script content + +var data = {jsdata|json}; +var graph = new Graph(); +graph.scale({bg_height}); + +graph.edge = function(x0, y0, x1, y1, color) \{ + + this.setColor(color, 0.0, 0.65); + this.ctx.beginPath(); + this.ctx.moveTo(x0, y0); + this.ctx.lineTo(x1, y1); + this.ctx.stroke(); + +} + +var revlink = '<li style="_STYLE"><span class="desc">'; +revlink += '<a class="list" href="{url}rev/_NODEID{sessionvars%urlparameter}" title="_NODEID"><b>_DESC</b></a>'; +revlink += '</span> _TAGS'; +revlink += '<span class="info">_DATE, by _USER</span></li>'; + +graph.vertex = function(x, y, color, parity, cur) \{ + + this.ctx.beginPath(); + color = this.setColor(color, 0.25, 0.75); + this.ctx.arc(x, y, radius, 0, Math.PI * 2, true); + this.ctx.fill(); + + var bg = '<li class="bg parity' + parity + '"></li>'; + var left = (this.columns + 1) * this.bg_height; + var nstyle = 'padding-left: ' + left + 'px;'; + var item = revlink.replace(/_STYLE/, nstyle); + item = item.replace(/_PARITY/, 'parity' + parity); + item = item.replace(/_NODEID/, cur[0]); + item = item.replace(/_NODEID/, cur[0]); + item = item.replace(/_DESC/, cur[3]); + item = item.replace(/_USER/, cur[4]); + item = item.replace(/_DATE/, cur[5]); + + var tagspan = ''; + if (cur[7].length || (cur[6][0] != 'default' || cur[6][1])) \{ + tagspan = '<span class="logtags">'; + if (cur[6][1]) \{ + tagspan += '<span class="branchtag" title="' + cur[6][0] + '">'; + tagspan += cur[6][0] + '</span> '; + } else if (!cur[6][1] && cur[6][0] != 'default') \{ + tagspan += '<span class="inbranchtag" title="' + cur[6][0] + '">'; + tagspan += cur[6][0] + '</span> '; + } + if (cur[7].length) \{ + for (var t in cur[7]) \{ + var tag = cur[7][t]; + tagspan += '<span class="tagtag">' + tag + '</span> '; + } + } + tagspan += '</span>'; + } + + item = item.replace(/_TAGS/, tagspan); + return [bg, item]; + +} + +graph.render(data); + +// stop hiding script --> +</script> + +<div class="page_nav"> +<a href="{url}graph/{rev}{lessvars%urlparameter}">less</a> +<a href="{url}graph/{rev}{morevars%urlparameter}">more</a> +| {changenav%navgraph} +</div> + +{footer} diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/gitweb/header.tmpl b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/gitweb/header.tmpl new file mode 100644 index 0000000..f3df3d7 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/gitweb/header.tmpl @@ -0,0 +1,8 @@ +<?xml version="1.0" encoding="{encoding}"?> +<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd"> +<html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en-US" lang="en-US"> +<head> +<link rel="icon" href="{staticurl}hgicon.png" type="image/png" /> +<meta name="robots" content="index, nofollow"/> +<link rel="stylesheet" href="{staticurl}style-gitweb.css" type="text/css" /> + diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/gitweb/help.tmpl b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/gitweb/help.tmpl new file mode 100644 index 0000000..42ad8d2 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/gitweb/help.tmpl @@ -0,0 +1,31 @@ +{header} +<title>{repo|escape}: Branches</title> +<link rel="alternate" type="application/atom+xml" + href="{url}atom-tags" title="Atom feed for {repo|escape}"/> +<link rel="alternate" type="application/rss+xml" + href="{url}rss-tags" title="RSS feed for {repo|escape}"/> +</head> +<body> + +<div class="page_header"> +<a href="http://mercurial.selenic.com/" title="Mercurial" style="float: right;">Mercurial</a><a href="{url}summary{sessionvars%urlparameter}">{repo|escape}</a> / branches +</div> + +<div class="page_nav"> +<a href="{url}summary{sessionvars%urlparameter}">summary</a> | +<a href="{url}shortlog{sessionvars%urlparameter}">shortlog</a> | +<a href="{url}log{sessionvars%urlparameter}">changelog</a> | +<a href="{url}graph{sessionvars%urlparameter}">graph</a> | +<a href="{url}tags{sessionvars%urlparameter}">tags</a> | +branches | +<a href="{url}file/{node|short}{sessionvars%urlparameter}">files</a> +<br/> +</div> + +<div class="title"> </div> + +<pre> +{doc|escape} +</pre> + +{footer} diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/gitweb/helptopics.tmpl b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/gitweb/helptopics.tmpl new file mode 100644 index 0000000..1f2a436 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/gitweb/helptopics.tmpl @@ -0,0 +1,38 @@ +{header} +<title>{repo|escape}: Branches</title> +<link rel="alternate" type="application/atom+xml" + href="{url}atom-tags" title="Atom feed for {repo|escape}"/> +<link rel="alternate" type="application/rss+xml" + href="{url}rss-tags" title="RSS feed for {repo|escape}"/> +</head> +<body> + +<div class="page_header"> +<a href="http://mercurial.selenic.com/" title="Mercurial" style="float: right;">Mercurial</a><a href="{url}summary{sessionvars%urlparameter}">{repo|escape}</a> / branches +</div> + +<div class="page_nav"> +<a href="{url}summary{sessionvars%urlparameter}">summary</a> | +<a href="{url}shortlog{sessionvars%urlparameter}">shortlog</a> | +<a href="{url}log{sessionvars%urlparameter}">changelog</a> | +<a href="{url}graph{sessionvars%urlparameter}">graph</a> | +<a href="{url}tags{sessionvars%urlparameter}">tags</a> | +<a href="{url}branches{sessionvars%urlparameter}">branches</a> | +<a href="{url}file/{node|short}{sessionvars%urlparameter}">files</a> | +help +<br/> +</div> + +<div class="title"> </div> +<table cellspacing="0"> +<tr><td colspan="2"><h2><a name="main" href="#topics">Topics</a></h2></td></tr> +{topics % helpentry} + +<tr><td colspan="2"><h2><a name="main" href="#main">Main Commands</a></h2></td></tr> +{earlycommands % helpentry} + +<tr><td colspan="2"><h2><a name="other" href="#other">Other Commands</a></h2></td></tr> +{othercommands % helpentry} +</table> + +{footer} diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/gitweb/index.tmpl b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/gitweb/index.tmpl new file mode 100644 index 0000000..b8e66b0 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/gitweb/index.tmpl @@ -0,0 +1,26 @@ +{header} +<title>Mercurial repositories index</title> +</head> +<body> + +<div class="page_header"> + <a href="http://mercurial.selenic.com/" title="Mercurial" style="float: right;">Mercurial</a> + Repositories list +</div> + +<table cellspacing="0"> + <tr> + <td><a href="?sort={sort_name}">Name</a></td> + <td><a href="?sort={sort_description}">Description</a></td> + <td><a href="?sort={sort_contact}">Contact</a></td> + <td><a href="?sort={sort_lastchange}">Last modified</a></td> + <td> </td> + <td> </td> + </tr> + {entries%indexentry} +</table> +<div class="page_footer"> +{motd} +</div> +</body> +</html> diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/gitweb/manifest.tmpl b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/gitweb/manifest.tmpl new file mode 100644 index 0000000..5123ed6 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/gitweb/manifest.tmpl @@ -0,0 +1,40 @@ +{header} +<title>{repo|escape}: files</title> +<link rel="alternate" type="application/atom+xml" + href="{url}atom-log" title="Atom feed for {repo|escape}"/> +<link rel="alternate" type="application/rss+xml" + href="{url}rss-log" title="RSS feed for {repo|escape}"/> +</head> +<body> + +<div class="page_header"> +<a href="http://mercurial.selenic.com/" title="Mercurial" style="float: right;">Mercurial</a><a href="{url}summary{sessionvars%urlparameter}">{repo|escape}</a> / files +</div> + +<div class="page_nav"> +<a href="{url}summary{sessionvars%urlparameter}">summary</a> | +<a href="{url}shortlog{sessionvars%urlparameter}">shortlog</a> | +<a href="{url}log{sessionvars%urlparameter}">changelog</a> | +<a href="{url}graph{sessionvars%urlparameter}">graph</a> | +<a href="{url}tags{sessionvars%urlparameter}">tags</a> | +<a href="{url}branches{sessionvars%urlparameter}">branches</a> | +files | +<a href="{url}rev/{node|short}{sessionvars%urlparameter}">changeset</a> {archives%archiveentry} | +<a href="{url}help{sessionvars%urlparameter}">help</a> +<br/> +</div> + +<div class="title">{path|escape} <span class="logtags">{inbranch%inbranchtag}{branches%branchtag}{tags%tagtag}</span></div> +<table cellspacing="0"> +<tr class="parity{upparity}"> +<td style="font-family:monospace">drwxr-xr-x</td> +<td style="font-family:monospace"></td> +<td style="font-family:monospace"></td> +<td><a href="{url}file/{node|short}{up|urlescape}{sessionvars%urlparameter}">[up]</a></td> +<td class="link"> </td> +</tr> +{dentries%direntry} +{fentries%fileentry} +</table> + +{footer} diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/gitweb/map b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/gitweb/map new file mode 100644 index 0000000..ce6a325 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/gitweb/map @@ -0,0 +1,260 @@ +default = 'summary' +mimetype = 'text/html; charset={encoding}' +header = header.tmpl +footer = footer.tmpl +search = search.tmpl +changelog = changelog.tmpl +summary = summary.tmpl +error = error.tmpl +notfound = notfound.tmpl + +help = help.tmpl +helptopics = helptopics.tmpl + +helpentry = '<tr><td><a href="{url}help/{topic|escape}{sessionvars%urlparameter}">{topic|escape}</a></td><td>{summary|escape}</td></tr>' + +naventry = '<a href="{url}log/{node|short}{sessionvars%urlparameter}">{label|escape}</a> ' +navshortentry = '<a href="{url}shortlog/{node|short}{sessionvars%urlparameter}">{label|escape}</a> ' +navgraphentry = '<a href="{url}graph/{node|short}{sessionvars%urlparameter}">{label|escape}</a> ' +filenaventry = '<a href="{url}log/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{label|escape}</a> ' +filedifflink = '<a href="{url}diff/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{file|escape}</a> ' +filenodelink = ' + <tr class="parity{parity}"> + <td><a class="list" href="{url}diff/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{file|escape}</a></td> + <td></td> + <td class="link"> + <a href="{url}file/{node|short}/{file|urlescape}{sessionvars%urlparameter}">file</a> | + <a href="{url}annotate/{node|short}/{file|urlescape}{sessionvars%urlparameter}">annotate</a> | + <a href="{url}diff/{node|short}/{file|urlescape}{sessionvars%urlparameter}">diff</a> | + <a href="{url}log/{node|short}/{file|urlescape}{sessionvars%urlparameter}">revisions</a> + </td> + </tr>' +filenolink = ' + <tr class="parity{parity}"> + <td><a class="list" href="{url}diff/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{file|escape}</a></td> + <td></td> + <td class="link"> + file | + annotate | + <a href="{url}diff/{node|short}/{file|urlescape}{sessionvars%urlparameter}">diff</a> | + <a href="{url}log/{node|short}/{file|urlescape}{sessionvars%urlparameter}">revisions</a> + </td> + </tr>' + +nav = '{before%naventry} {after%naventry}' +navshort = '{before%navshortentry}{after%navshortentry}' +navgraph = '{before%navgraphentry}{after%navgraphentry}' +filenav = '{before%filenaventry}{after%filenaventry}' + +fileellipses = '...' +changelogentry = changelogentry.tmpl +searchentry = changelogentry.tmpl +changeset = changeset.tmpl +manifest = manifest.tmpl +direntry = ' + <tr class="parity{parity}"> + <td style="font-family:monospace">drwxr-xr-x</td> + <td style="font-family:monospace"></td> + <td style="font-family:monospace"></td> + <td> + <a href="{url}file/{node|short}{path|urlescape}{sessionvars%urlparameter}">{basename|escape}</a> + <a href="{url}file/{node|short}{path|urlescape}/{emptydirs|urlescape}{sessionvars%urlparameter}">{emptydirs|escape}</a> + </td> + <td class="link"> + <a href="{url}file/{node|short}{path|urlescape}{sessionvars%urlparameter}">files</a> + </td> + </tr>' +fileentry = ' + <tr class="parity{parity}"> + <td style="font-family:monospace">{permissions|permissions}</td> + <td style="font-family:monospace" align=right>{date|isodate}</td> + <td style="font-family:monospace" align=right>{size}</td> + <td class="list"> + <a class="list" href="{url}file/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{basename|escape}</a> + </td> + <td class="link"> + <a href="{url}file/{node|short}/{file|urlescape}{sessionvars%urlparameter}">file</a> | + <a href="{url}log/{node|short}/{file|urlescape}{sessionvars%urlparameter}">revisions</a> | + <a href="{url}annotate/{node|short}/{file|urlescape}{sessionvars%urlparameter}">annotate</a> + </td> + </tr>' +filerevision = filerevision.tmpl +fileannotate = fileannotate.tmpl +filediff = filediff.tmpl +filelog = filelog.tmpl +fileline = ' + <div style="font-family:monospace" class="parity{parity}"> + <pre><a class="linenr" href="#{lineid}" id="{lineid}">{linenumber}</a> {line|escape}</pre> + </div>' +annotateline = ' + <tr style="font-family:monospace" class="parity{parity}"> + <td class="linenr" style="text-align: right;"> + <a href="{url}annotate/{node|short}/{file|urlescape}{sessionvars%urlparameter}#l{targetline}" + title="{node|short}: {desc|escape|firstline}">{author|user}@{rev}</a> + </td> + <td><pre><a class="linenr" href="#{lineid}" id="{lineid}">{linenumber}</a></pre></td> + <td><pre>{line|escape}</pre></td> + </tr>' +difflineplus = '<span style="color:#008800;"><a class="linenr" href="#{lineid}" id="{lineid}">{linenumber}</a> {line|escape}</span>' +difflineminus = '<span style="color:#cc0000;"><a class="linenr" href="#{lineid}" id="{lineid}">{linenumber}</a> {line|escape}</span>' +difflineat = '<span style="color:#990099;"><a class="linenr" href="#{lineid}" id="{lineid}">{linenumber}</a> {line|escape}</span>' +diffline = '<span><a class="linenr" href="#{lineid}" id="{lineid}">{linenumber}</a> {line|escape}</span>' +changelogparent = ' + <tr> + <th class="parent">parent {rev}:</th> + <td class="parent"> + <a href="{url}rev/{node|short}{sessionvars%urlparameter}">{node|short}</a> + </td> + </tr>' +changesetbranch = '<tr><td>branch</td><td>{name}</td></tr>' +changesetparent = ' + <tr> + <td>parent {rev}</td> + <td style="font-family:monospace"> + <a class="list" href="{url}rev/{node|short}{sessionvars%urlparameter}">{node|short}</a> + </td> + </tr>' +filerevbranch = '<tr><td>branch</td><td>{name}</td></tr>' +filerevparent = ' + <tr> + <td>parent {rev}</td> + <td style="font-family:monospace"> + <a class="list" href="{url}file/{node|short}/{file|urlescape}{sessionvars%urlparameter}"> + {rename%filerename}{node|short} + </a> + </td> + </tr>' +filerename = '{file|escape}@' +filelogrename = '| <a href="{url}file/{node|short}/{file|urlescape}{sessionvars%urlparameter}">base</a>' +fileannotateparent = ' + <tr> + <td>parent {rev}</td> + <td style="font-family:monospace"> + <a class="list" href="{url}annotate/{node|short}/{file|urlescape}{sessionvars%urlparameter}"> + {rename%filerename}{node|short} + </a> + </td> + </tr>' +changelogchild = ' + <tr> + <th class="child">child {rev}:</th> + <td class="child"><a href="{url}rev/{node|short}{sessionvars%urlparameter}">{node|short}</a></td> + </tr>' +changesetchild = ' + <tr> + <td>child {rev}</td> + <td style="font-family:monospace"> + <a class="list" href="{url}rev/{node|short}{sessionvars%urlparameter}">{node|short}</a> + </td> + </tr>' +filerevchild = ' + <tr> + <td>child {rev}</td> + <td style="font-family:monospace"> + <a class="list" href="{url}file/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{node|short}</a></td> + </tr>' +fileannotatechild = ' + <tr> + <td>child {rev}</td> + <td style="font-family:monospace"> + <a class="list" href="{url}annotate/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{node|short}</a></td> + </tr>' +tags = tags.tmpl +tagentry = ' + <tr class="parity{parity}"> + <td class="age"><i>{date|age}</i></td> + <td><a class="list" href="{url}rev/{node|short}{sessionvars%urlparameter}"><b>{tag|escape}</b></a></td> + <td class="link"> + <a href="{url}rev/{node|short}{sessionvars%urlparameter}">changeset</a> | + <a href="{url}log/{node|short}{sessionvars%urlparameter}">changelog</a> | + <a href="{url}file/{node|short}{sessionvars%urlparameter}">files</a> + </td> + </tr>' +branches = branches.tmpl +branchentry = ' + <tr class="parity{parity}"> + <td class="age"><i>{date|age}</i></td> + <td><a class="list" href="{url}shortlog/{node|short}{sessionvars%urlparameter}"><b>{node|short}</b></a></td> + <td class="{status}">{branch|escape}</td> + <td class="link"> + <a href="{url}changeset/{node|short}{sessionvars%urlparameter}">changeset</a> | + <a href="{url}log/{node|short}{sessionvars%urlparameter}">changelog</a> | + <a href="{url}file/{node|short}{sessionvars%urlparameter}">files</a> + </td> + </tr>' +diffblock = '<pre>{lines}</pre>' +filediffparent = ' + <tr> + <td>parent {rev}</td> + <td style="font-family:monospace"> + <a class="list" href="{url}diff/{node|short}/{file|urlescape}{sessionvars%urlparameter}"> + {node|short} + </a> + </td> + </tr>' +filelogparent = ' + <tr> + <td align="right">parent {rev}: </td> + <td><a href="{url}file/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{node|short}</a></td> + </tr>' +filediffchild = ' + <tr> + <td>child {rev}</td> + <td style="font-family:monospace"> + <a class="list" href="{url}diff/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{node|short}</a> + </td> + </tr>' +filelogchild = ' + <tr> + <td align="right">child {rev}: </td> + <td><a href="{url}file{node|short}/{file|urlescape}{sessionvars%urlparameter}">{node|short}</a></td> + </tr>' +shortlog = shortlog.tmpl +graph = graph.tmpl +tagtag = '<span class="tagtag" title="{name}">{name}</span> ' +branchtag = '<span class="branchtag" title="{name}">{name}</span> ' +inbranchtag = '<span class="inbranchtag" title="{name}">{name}</span> ' +shortlogentry = ' + <tr class="parity{parity}"> + <td class="age"><i>{date|age}</i></td> + <td><i>{author|person}</i></td> + <td> + <a class="list" href="{url}rev/{node|short}{sessionvars%urlparameter}"> + <b>{desc|strip|firstline|escape|nonempty}</b> + <span class="logtags">{inbranch%inbranchtag}{branches%branchtag}{tags%tagtag}</span> + </a> + </td> + <td class="link" nowrap> + <a href="{url}rev/{node|short}{sessionvars%urlparameter}">changeset</a> | + <a href="{url}file/{node|short}{sessionvars%urlparameter}">files</a> + </td> + </tr>' +filelogentry = ' + <tr class="parity{parity}"> + <td class="age"><i>{date|age}</i></td> + <td> + <a class="list" href="{url}rev/{node|short}{sessionvars%urlparameter}"> + <b>{desc|strip|firstline|escape|nonempty}</b> + </a> + </td> + <td class="link"> + <a href="{url}file/{node|short}/{file|urlescape}{sessionvars%urlparameter}">file</a> | <a href="{url}diff/{node|short}/{file|urlescape}{sessionvars%urlparameter}">diff</a> | <a href="{url}annotate/{node|short}/{file|urlescape}{sessionvars%urlparameter}">annotate</a> {rename%filelogrename}</td> + </tr>' +archiveentry = ' | <a href="{url}archive/{node|short}{extension}">{type|escape}</a> ' +indexentry = ' + <tr class="parity{parity}"> + <td> + <a class="list" href="{url}{sessionvars%urlparameter}"> + <b>{name|escape}</b> + </a> + </td> + <td>{description}</td> + <td>{contact|obfuscate}</td> + <td class="age">{lastchange|age}</td> + <td class="indexlinks">{archives%indexarchiveentry}</td> + <td><div class="rss_logo"><a href="{url}rss-log">RSS</a> <a href="{url}atom-log">Atom</a></div></td> + </tr>\n' +indexarchiveentry = ' <a href="{url}archive/{node|short}{extension}">{type|escape}</a> ' +index = index.tmpl +urlparameter = '{separator}{name}={value|urlescape}' +hiddenformentry = '<input type="hidden" name="{name}" value="{value|escape}" />' diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/gitweb/notfound.tmpl b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/gitweb/notfound.tmpl new file mode 100644 index 0000000..073bb11 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/gitweb/notfound.tmpl @@ -0,0 +1,18 @@ +{header} +<title>Mercurial repository not found</title> +</head> + +<body> + +<div class="page_header"> +<a href="http://mercurial.selenic.com/" title="Mercurial" style="float: right;">Mercurial</a> Not found: {repo|escape} +</div> + +<div class="page_body"> +The specified repository "{repo|escape}" is unknown, sorry. +<br/> +<br/> +Please go back to the <a href="{url}">main repository list page</a>. +</div> + +{footer} diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/gitweb/search.tmpl b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/gitweb/search.tmpl new file mode 100644 index 0000000..6cc51ad --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/gitweb/search.tmpl @@ -0,0 +1,38 @@ +{header} +<title>{repo|escape}: Search</title> +<link rel="alternate" type="application/atom+xml" + href="{url}atom-log" title="Atom feed for {repo|escape}"/> +<link rel="alternate" type="application/rss+xml" + href="{url}rss-log" title="RSS feed for {repo|escape}"/> +</head> +<body> + +<div class="page_header"> +<a href="http://mercurial.selenic.com/" title="Mercurial" style="float: right;">Mercurial</a><a href="{url}summary{sessionvars%urlparameter}">{repo|escape}</a> / search + +<form action="{url}log"> +{sessionvars%hiddenformentry} +<div class="search"> +<input type="text" name="rev" value="{query|escape}" /> +</div> +</form> +</div> + +<div class="page_nav"> +<a href="{url}summary{sessionvars%urlparameter}">summary</a> | +<a href="{url}shortlog{sessionvars%urlparameter}">shortlog</a> | +<a href="{url}log{sessionvars%urlparameter}">changelog</a> | +<a href="{url}graph{sessionvars%urlparameter}">graph</a> | +<a href="{url}tags{sessionvars%urlparameter}">tags</a> | +<a href="{url}branches{sessionvars%urlparameter}">branches</a> | +<a href="{url}file/{node|short}{sessionvars%urlparameter}">files</a>{archives%archiveentry} + | + <a href="{url}help{sessionvars%urlparameter}">help</a> +<br/> +</div> + +<div class="title">searching for {query|escape}</div> + +{entries} + +{footer} diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/gitweb/shortlog.tmpl b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/gitweb/shortlog.tmpl new file mode 100644 index 0000000..91e8e0b --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/gitweb/shortlog.tmpl @@ -0,0 +1,41 @@ +{header} +<title>{repo|escape}: Shortlog</title> +<link rel="alternate" type="application/atom+xml" + href="{url}atom-log" title="Atom feed for {repo|escape}"/> +<link rel="alternate" type="application/rss+xml" + href="{url}rss-log" title="RSS feed for {repo|escape}"/> +</head> +<body> + +<div class="page_header"> +<a href="http://mercurial.selenic.com/" title="Mercurial" style="float: right;">Mercurial</a><a href="{url}summary{sessionvars%urlparameter}">{repo|escape}</a> / shortlog +</div> + +<form action="{url}log"> +{sessionvars%hiddenformentry} +<div class="search"> +<input type="text" name="rev" /> +</div> +</form> +<div class="page_nav"> +<a href="{url}summary{sessionvars%urlparameter}">summary</a> | +shortlog | +<a href="{url}log/{rev}{sessionvars%urlparameter}">changelog</a> | +<a href="{url}graph{sessionvars%urlparameter}">graph</a> | +<a href="{url}tags{sessionvars%urlparameter}">tags</a> | +<a href="{url}branches{sessionvars%urlparameter}">branches</a> | +<a href="{url}file/{node|short}{sessionvars%urlparameter}">files</a>{archives%archiveentry} | +<a href="{url}help{sessionvars%urlparameter}">help</a> +<br/>{changenav%navshort}<br/> +</div> + +<div class="title"> </div> +<table cellspacing="0"> +{entries%shortlogentry} +</table> + +<div class="page_nav"> +{changenav%navshort} +</div> + +{footer} diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/gitweb/summary.tmpl b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/gitweb/summary.tmpl new file mode 100644 index 0000000..b964721 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/gitweb/summary.tmpl @@ -0,0 +1,59 @@ +{header} +<title>{repo|escape}: Summary</title> +<link rel="alternate" type="application/atom+xml" + href="{url}atom-log" title="Atom feed for {repo|escape}"/> +<link rel="alternate" type="application/rss+xml" + href="{url}rss-log" title="RSS feed for {repo|escape}"/> +</head> +<body> + +<div class="page_header"> +<a href="http://mercurial.selenic.com/" title="Mercurial" style="float: right;">Mercurial</a><a href="{url}summary{sessionvars%urlparameter}">{repo|escape}</a> / summary + +<form action="{url}log"> +{sessionvars%hiddenformentry} +<div class="search"> +<input type="text" name="rev" /> +</div> +</form> +</div> + +<div class="page_nav"> +summary | +<a href="{url}shortlog{sessionvars%urlparameter}">shortlog</a> | +<a href="{url}log{sessionvars%urlparameter}">changelog</a> | +<a href="{url}graph{sessionvars%urlparameter}">graph</a> | +<a href="{url}tags{sessionvars%urlparameter}">tags</a> | +<a href="{url}branches{sessionvars%urlparameter}">branches</a> | +<a href="{url}file/{node|short}{sessionvars%urlparameter}">files</a>{archives%archiveentry} | +<a href="{url}help{sessionvars%urlparameter}">help</a> +<br/> +</div> + +<div class="title"> </div> +<table cellspacing="0"> +<tr><td>description</td><td>{desc}</td></tr> +<tr><td>owner</td><td>{owner|obfuscate}</td></tr> +<tr><td>last change</td><td>{lastchange|rfc822date}</td></tr> +</table> + +<div><a class="title" href="{url}shortlog{sessionvars%urlparameter}">changes</a></div> +<table cellspacing="0"> +{shortlog} +<tr class="light"><td colspan="4"><a class="list" href="{url}shortlog{sessionvars%urlparameter}">...</a></td></tr> +</table> + +<div><a class="title" href="{url}tags{sessionvars%urlparameter}">tags</a></div> +<table cellspacing="0"> +{tags} +<tr class="light"><td colspan="3"><a class="list" href="{url}tags{sessionvars%urlparameter}">...</a></td></tr> +</table> + +<div><a class="title" href="#">branches</a></div> +<table cellspacing="0"> +{branches%branchentry} +<tr class="light"> + <td colspan="4"><a class="list" href="#">...</a></td> +</tr> +</table> +{footer} diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/gitweb/tags.tmpl b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/gitweb/tags.tmpl new file mode 100644 index 0000000..4bdf85c --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/gitweb/tags.tmpl @@ -0,0 +1,31 @@ +{header} +<title>{repo|escape}: Tags</title> +<link rel="alternate" type="application/atom+xml" + href="{url}atom-tags" title="Atom feed for {repo|escape}"/> +<link rel="alternate" type="application/rss+xml" + href="{url}rss-tags" title="RSS feed for {repo|escape}"/> +</head> +<body> + +<div class="page_header"> +<a href="http://mercurial.selenic.com/" title="Mercurial" style="float: right;">Mercurial</a><a href="{url}summary{sessionvars%urlparameter}">{repo|escape}</a> / tags +</div> + +<div class="page_nav"> +<a href="{url}summary{sessionvars%urlparameter}">summary</a> | +<a href="{url}shortlog{sessionvars%urlparameter}">shortlog</a> | +<a href="{url}log{sessionvars%urlparameter}">changelog</a> | +<a href="{url}graph{sessionvars%urlparameter}">graph</a> | +tags | +<a href="{url}branches{sessionvars%urlparameter}">branches</a> | +<a href="{url}file/{node|short}{sessionvars%urlparameter}">files</a> | +<a href="{url}help{sessionvars%urlparameter}">help</a> +<br/> +</div> + +<div class="title"> </div> +<table cellspacing="0"> +{entries%tagentry} +</table> + +{footer} diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/map-cmdline.changelog b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/map-cmdline.changelog new file mode 100644 index 0000000..f54134a --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/map-cmdline.changelog @@ -0,0 +1,17 @@ +header = '{date|shortdate} {author|person} <{author|email}>\n\n' +header_verbose = '' +changeset = '\t* {files|stringify|fill68|tabindent}{desc|fill68|tabindent|strip}\n\t[{node|short}]{tags}{branches}\n\n' +changeset_quiet = '\t* {desc|firstline|fill68|tabindent|strip}\n\n' +changeset_verbose = '{date|isodate} {author|person} <{author|email}> ({node|short}{tags}{branches})\n\n\t* {file_adds|stringify|fill68|tabindent}{file_dels|stringify|fill68|tabindent}{files|stringify|fill68|tabindent}{desc|fill68|tabindent|strip}\n\n' +start_tags = ' [' +tag = '{tag}, ' +last_tag = '{tag}]' +start_branches = ' <' +branch = '{branch}, ' +last_branch = '{branch}>' +file = '{file}, ' +last_file = '{file}:\n\t' +file_add = '{file_add}, ' +last_file_add = '{file_add}: new file.\n* ' +file_del = '{file_del}, ' +last_file_del = '{file_del}: deleted file.\n* ' diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/map-cmdline.compact b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/map-cmdline.compact new file mode 100644 index 0000000..ee66bff --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/map-cmdline.compact @@ -0,0 +1,9 @@ +changeset = '{rev}{tags}{parents} {node|short} {date|isodate} {author|user}\n {desc|firstline|strip}\n\n' +changeset_quiet = '{rev}:{node|short}\n' +changeset_verbose = '{rev}{tags}{parents} {node|short} {date|isodate} {author}\n {desc|strip}\n\n' +start_tags = '[' +tag = '{tag},' +last_tag = '{tag}]' +start_parents = ':' +parent = '{rev},' +last_parent = '{rev}' diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/map-cmdline.default b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/map-cmdline.default new file mode 100644 index 0000000..badef72 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/map-cmdline.default @@ -0,0 +1,24 @@ +changeset = 'changeset: {rev}:{node|short}\n{branches}{tags}{parents}user: {author}\ndate: {date|date}\nsummary: {desc|firstline}\n\n' +changeset_quiet = '{rev}:{node|short}\n' +changeset_verbose = 'changeset: {rev}:{node|short}\n{branches}{tags}{parents}user: {author}\ndate: {date|date}\n{files}{file_copies_switch}description:\n{desc|strip}\n\n\n' +changeset_debug = 'changeset: {rev}:{node}\n{branches}{tags}{parents}{manifest}user: {author}\ndate: {date|date}\n{file_mods}{file_adds}{file_dels}{file_copies_switch}{extras}description:\n{desc|strip}\n\n\n' +start_files = 'files: ' +file = ' {file}' +end_files = '\n' +start_file_mods = 'files: ' +file_mod = ' {file_mod}' +end_file_mods = '\n' +start_file_adds = 'files+: ' +file_add = ' {file_add}' +end_file_adds = '\n' +start_file_dels = 'files-: ' +file_del = ' {file_del}' +end_file_dels = '\n' +start_file_copies = 'copies: ' +file_copy = ' {name} ({source})' +end_file_copies = '\n' +parent = 'parent: {rev}:{node|formatnode}\n' +manifest = 'manifest: {rev}:{node}\n' +branch = 'branch: {branch}\n' +tag = 'tag: {tag}\n' +extra = 'extra: {key}={value|stringescape}\n' diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/map-cmdline.xml b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/map-cmdline.xml new file mode 100644 index 0000000..f16a3e0 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/map-cmdline.xml @@ -0,0 +1,19 @@ +header = '<?xml version="1.0"?>\n<log>\n' +footer = '</log>\n' + +changeset = '<logentry revision="{rev}" node="{node}">\n{branches}{tags}{parents}<author email="{author|email|xmlescape}">{author|person|xmlescape}</author>\n<date>{date|rfc3339date}</date>\n<msg xml:space="preserve">{desc|xmlescape}</msg>\n</logentry>\n' +changeset_verbose = '<logentry revision="{rev}" node="{node}">\n{branches}{tags}{parents}<author email="{author|email|xmlescape}">{author|person|xmlescape}</author>\n<date>{date|rfc3339date}</date>\n<msg xml:space="preserve">{desc|xmlescape}</msg>\n<paths>\n{file_adds}{file_dels}{file_mods}</paths>\n{file_copies}</logentry>\n' +changeset_debug = '<logentry revision="{rev}" node="{node}">\n{branches}{tags}{parents}<author email="{author|email|xmlescape}">{author|person|xmlescape}</author>\n<date>{date|rfc3339date}</date>\n<msg xml:space="preserve">{desc|xmlescape}</msg>\n<paths>\n{file_adds}{file_dels}{file_mods}</paths>\n{file_copies}{extras}</logentry>\n' + +file_add = '<path action="A">{file_add|xmlescape}</path>\n' +file_mod = '<path action="M">{file_mod|xmlescape}</path>\n' +file_del = '<path action="R">{file_del|xmlescape}</path>\n' + +start_file_copies = '<copies>\n' +file_copy = '<copy source="{source|xmlescape}">{name|xmlescape}</copy>\n' +end_file_copies = '</copies>\n' + +parent = '<parent revision="{rev}" node="{node}" />\n' +branch = '<branch>{branch|xmlescape}</branch>\n' +tag = '<tag>{tag|xmlescape}</tag>\n' +extra = '<extra key="{key|xmlescape}">{value|xmlescape}</extra>\n' diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/monoblue/branches.tmpl b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/monoblue/branches.tmpl new file mode 100644 index 0000000..999daff --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/monoblue/branches.tmpl @@ -0,0 +1,37 @@ +{header} + <title>{repo|escape}: Branches</title> + <link rel="alternate" type="application/atom+xml" href="{url}atom-log" title="Atom feed for {repo|escape}"/> + <link rel="alternate" type="application/rss+xml" href="{url}rss-log" title="RSS feed for {repo|escape}"/> +</head> + +<body> +<div id="container"> + <div class="page-header"> + <h1><a href="{url}summary{sessionvars%urlparameter}">{repo|escape}</a> / Branches</h1> + + <form action="{url}log"> + {sessionvars%hiddenformentry} + <dl class="search"> + <dt><label>Search: </label></dt> + <dd><input type="text" name="rev" /></dd> + </dl> + </form> + + <ul class="page-nav"> + <li><a href="{url}summary{sessionvars%urlparameter}">summary</a></li> + <li><a href="{url}shortlog{sessionvars%urlparameter}">shortlog</a></li> + <li><a href="{url}changelog{sessionvars%urlparameter}">changelog</a></li> + <li><a href="{url}graph/{node|short}{sessionvars%urlparameter}">graph</a></li> + <li><a href="{url}tags{sessionvars%urlparameter}">tags</a></li> + <li class="current">branches</li> + <li><a href="{url}file/{node|short}{sessionvars%urlparameter}">files</a></li> + <li><a href="{url}help{sessionvars%urlparameter}">help</a></li> + </ul> + </div> + + <h2 class="no-link no-border">branches</h2> + <table cellspacing="0"> +{entries%branchentry} + </table> + +{footer} diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/monoblue/changelog.tmpl b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/monoblue/changelog.tmpl new file mode 100644 index 0000000..a00d846 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/monoblue/changelog.tmpl @@ -0,0 +1,41 @@ +{header} + <title>{repo|escape}: changelog</title> + <link rel="alternate" type="application/atom+xml" href="{url}atom-log" title="Atom feed for {repo|escape}"/> + <link rel="alternate" type="application/rss+xml" href="{url}rss-log" title="RSS feed for {repo|escape}"/> +</head> + +<body> +<div id="container"> + <div class="page-header"> + <h1><a href="{url}summary{sessionvars%urlparameter}">{repo|escape}</a> / changelog</h1> + + <form action="{url}log"> + {sessionvars%hiddenformentry} + <dl class="search"> + <dt><label>Search: </label></dt> + <dd><input type="text" name="rev" /></dd> + </dl> + </form> + + <ul class="page-nav"> + <li><a href="{url}summary{sessionvars%urlparameter}">summary</a></li> + <li><a href="{url}shortlog{sessionvars%urlparameter}">shortlog</a></li> + <li class="current">changelog</li> + <li><a href="{url}graph/{node|short}{sessionvars%urlparameter}">graph</a></li> + <li><a href="{url}tags{sessionvars%urlparameter}">tags</a></li> + <li><a href="{url}branches{sessionvars%urlparameter}">branches</a></li> + <li><a href="{url}file/{node|short}{sessionvars%urlparameter}">files</a>{archives%archiveentry}</li> + <li><a href="{url}help{sessionvars%urlparameter}">help</a></li> + </ul> + </div> + + <h2 class="no-link no-border">changelog</h2> + <div> + {entries%changelogentry} + </div> + + <div class="page-path"> +{changenav%nav} + </div> + +{footer} diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/monoblue/changelogentry.tmpl b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/monoblue/changelogentry.tmpl new file mode 100644 index 0000000..6906629 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/monoblue/changelogentry.tmpl @@ -0,0 +1,6 @@ +<h3 class="changelog"><a class="title" href="{url}rev/{node|short}{sessionvars%urlparameter}">{desc|strip|firstline|escape|nonempty}<span class="logtags"> {inbranch%inbranchtag}{branches%branchtag}{tags%tagtag}</span></a></h3> +<ul class="changelog-entry"> + <li class="age">{date|age}</li> + <li>by <span class="name">{author|obfuscate}</span> <span class="revdate">[{date|rfc822date}] rev {rev}</span></li> + <li class="description">{desc|strip|escape|addbreaks|nonempty}</li> +</ul> diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/monoblue/changeset.tmpl b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/monoblue/changeset.tmpl new file mode 100644 index 0000000..3816db3 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/monoblue/changeset.tmpl @@ -0,0 +1,64 @@ +{header} +<title>{repo|escape}: changeset {rev}:{node|short}</title> + <link rel="alternate" type="application/atom+xml" href="{url}atom-log" title="Atom feed for {repo|escape}"/> + <link rel="alternate" type="application/rss+xml" href="{url}rss-log" title="RSS feed for {repo|escape}"/> +</head> + +<body> +<div id="container"> + <div class="page-header"> + <h1><a href="{url}summary{sessionvars%urlparameter}">{repo|escape}</a> / files</h1> + + <form action="{url}log"> + {sessionvars%hiddenformentry} + <dl class="search"> + <dt><label>Search: </label></dt> + <dd><input type="text" name="rev" /></dd> + </dl> + </form> + + <ul class="page-nav"> + <li><a href="{url}summary{sessionvars%urlparameter}">summary</a></li> + <li><a href="{url}shortlog{sessionvars%urlparameter}">shortlog</a></li> + <li><a href="{url}changelog{sessionvars%urlparameter}">changelog</a></li> + <li><a href="{url}graph/{node|short}{sessionvars%urlparameter}">graph</a></li> + <li><a href="{url}tags{sessionvars%urlparameter}">tags</a></li> + <li><a href="{url}branches{sessionvars%urlparameter}">branches</a></li> + <li><a href="{url}file/{node|short}{sessionvars%urlparameter}">files</a></li> + <li><a href="{url}help{sessionvars%urlparameter}">help</a></li> + </ul> + </div> + + <ul class="submenu"> + <li class="current">changeset</li> + <li><a href="{url}raw-rev/{node|short}">raw</a> {archives%archiveentry}</li> + </ul> + + <h2 class="no-link no-border">changeset</h2> + + <h3 class="changeset"><a href="{url}raw-rev/{node|short}">{desc|strip|escape|firstline|nonempty} <span class="logtags">{inbranch%inbranchtag}{branches%branchtag}{tags%tagtag}</span></a></h3> + <p class="changeset-age"><span>{date|age}</span></p> + + <dl class="overview"> + <dt>author</dt> + <dd>{author|obfuscate}</dd> + <dt>date</dt> + <dd>{date|date}</dd> + {branch%changesetbranch} + <dt>changeset {rev}</dt> + <dd>{node|short}</dd> + {parent%changesetparent} + {child%changesetchild} + </dl> + + <p class="description">{desc|strip|escape|addbreaks|nonempty}</p> + + <table> + {files} + </table> + + <div class="diff"> + {diff} + </div> + +{footer} diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/monoblue/error.tmpl b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/monoblue/error.tmpl new file mode 100644 index 0000000..5ec26b5 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/monoblue/error.tmpl @@ -0,0 +1,35 @@ +{header} + <title>{repo|escape}: Error</title> + <link rel="alternate" type="application/atom+xml" href="{url}atom-log" title="Atom feed for {repo|escape}"/> + <link rel="alternate" type="application/rss+xml" href="{url}rss-log" title="RSS feed for {repo|escape}"/> +</head> + +<body> +<div id="container"> + <div class="page-header"> + <h1><a href="{url}summary{sessionvars%urlparameter}">{repo|escape}</a> / Not found: {repo|escape}</h1> + + <form action="{url}log"> + {sessionvars%hiddenformentry} + <dl class="search"> + <dt><label>Search: </label></dt> + <dd><input type="text" name="rev" /></dd> + </dl> + </form> + + <ul class="page-nav"> + <li class="current">summary</li> + <li><a href="{url}shortlog{sessionvars%urlparameter}">shortlog</a></li> + <li><a href="{url}log{sessionvars%urlparameter}">changelog</a></li> + <li><a href="{url}graph/{node|short}{sessionvars%urlparameter}">graph</a></li> + <li><a href="{url}tags{sessionvars%urlparameter}">tags</a></li> + <li><a href="{url}branches{sessionvars%urlparameter}">branches</a></li> + <li><a href="{url}file/{node|short}{sessionvars%urlparameter}">files</a></li> + <li><a href="{url}help{sessionvars%urlparameter}">help</a></li> + </ul> + </div> + + <h2 class="no-link no-border">An error occurred while processing your request</h2> + <p class="normal">{error|escape}</p> + +{footer} diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/monoblue/fileannotate.tmpl b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/monoblue/fileannotate.tmpl new file mode 100644 index 0000000..cb13781 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/monoblue/fileannotate.tmpl @@ -0,0 +1,64 @@ +{header} +<title>{repo|escape}: {file|escape}@{node|short} (annotated)</title> + <link rel="alternate" type="application/atom+xml" href="{url}atom-log" title="Atom feed for {repo|escape}"/> + <link rel="alternate" type="application/rss+xml" href="{url}rss-log" title="RSS feed for {repo|escape}"/> +</head> + +<body> +<div id="container"> + <div class="page-header"> + <h1><a href="{url}summary{sessionvars%urlparameter}">{repo|escape}</a> / annotate</h1> + + <form action="{url}log"> + {sessionvars%hiddenformentry} + <dl class="search"> + <dt><label>Search: </label></dt> + <dd><input type="text" name="rev" /></dd> + </dl> + </form> + + <ul class="page-nav"> + <li><a href="{url}summary{sessionvars%urlparameter}">summary</a></li> + <li><a href="{url}shortlog{sessionvars%urlparameter}">shortlog</a></li> + <li><a href="{url}log{sessionvars%urlparameter}">changelog</a></li> + <li><a href="{url}graph/{node|short}{sessionvars%urlparameter}">graph</a></li> + <li><a href="{url}tags{sessionvars%urlparameter}">tags</a></li> + <li><a href="{url}branches{sessionvars%urlparameter}">branches</a></li> + <li><a href="{url}file/{node|short}{path|urlescape}{sessionvars%urlparameter}">files</a></li> + <li><a href="{url}help{sessionvars%urlparameter}">help</a></li> + </ul> + </div> + + <ul class="submenu"> + <li><a href="{url}file/{node|short}/{file|urlescape}{sessionvars%urlparameter}">file</a></li> + <li><a href="{url}log/{node|short}/{file|urlescape}{sessionvars%urlparameter}">revisions</a></li> + <li class="current">annotate</li> + <li><a href="{url}diff/{node|short}/{file|urlescape}{sessionvars%urlparameter}">diff</a></li> + <li><a href="{url}raw-annotate/{node|short}/{file|urlescape}">raw</a></li> + </ul> + + <h2 class="no-link no-border">{file|escape}@{node|short} (annotated)</h2> + <h3 class="changeset">{file|escape}</h3> + <p class="changeset-age"><span>{date|age}</span></p> + + <dl class="overview"> + <dt>author</dt> + <dd>{author|obfuscate}</dd> + <dt>date</dt> + <dd>{date|date}</dd> + {branch%filerevbranch} + <dt>changeset {rev}</dt> + <dd><a href="{url}rev/{node|short}{sessionvars%urlparameter}">{node|short}</a></dd> + {parent%fileannotateparent} + {child%fileannotatechild} + <dt>permissions</dt> + <dd>{permissions|permissions}</dd> + </dl> + + <p class="description">{desc|strip|escape|addbreaks|nonempty}</p> + + <table class="annotated"> + {annotate%annotateline} + </table> + +{footer} diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/monoblue/filediff.tmpl b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/monoblue/filediff.tmpl new file mode 100644 index 0000000..84f1f80 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/monoblue/filediff.tmpl @@ -0,0 +1,55 @@ +{header} +<title>{repo|escape}: diff {file|escape}</title> + <link rel="alternate" type="application/atom+xml" href="{url}atom-log" title="Atom feed for {repo|escape}"/> + <link rel="alternate" type="application/rss+xml" href="{url}rss-log" title="RSS feed for {repo|escape}"/> +</head> + +<body> +<div id="container"> + <div class="page-header"> + <h1><a href="{url}summary{sessionvars%urlparameter}">{repo|escape}</a> / file diff</h1> + + <form action="{url}log"> + {sessionvars%hiddenformentry} + <dl class="search"> + <dt><label>Search: </label></dt> + <dd><input type="text" name="rev" /></dd> + </dl> + </form> + + <ul class="page-nav"> + <li><a href="{url}summary{sessionvars%urlparameter}">summary</a></li> + <li><a href="{url}shortlog{sessionvars%urlparameter}">shortlog</a></li> + <li><a href="{url}log{sessionvars%urlparameter}">changelog</a></li> + <li><a href="{url}graph/{node|short}{sessionvars%urlparameter}">graph</a></li> + <li><a href="{url}tags{sessionvars%urlparameter}">tags</a></li> + <li><a href="{url}branches{sessionvars%urlparameter}">branches</a></li> + <li><a href="{url}file/{node|short}{path|urlescape}{sessionvars%urlparameter}">files</a></li> + <li><a href="{url}help{sessionvars%urlparameter}">help</a></li> + </ul> + </div> + + <ul class="submenu"> + <li><a href="{url}file/{node|short}/{file|urlescape}{sessionvars%urlparameter}">file</a></li> + <li><a href="{url}log/{node|short}/{file|urlescape}{sessionvars%urlparameter}">revisions</a></li> + <li><a href="{url}annotate/{node|short}/{file|urlescape}{sessionvars%urlparameter}">annotate</a></li> + <li class="current">diff</li> + <li><a href="{url}raw-diff/{node|short}/{file|urlescape}">raw</a></li> + </ul> + + <h2 class="no-link no-border">diff: {file|escape}</h2> + <h3 class="changeset">{file|escape}</h3> + + <dl class="overview"> + {branch%filerevbranch} + <dt>changeset {rev}</dt> + <dd><a href="{url}rev/{node|short}{sessionvars%urlparameter}">{node|short}</a></dd> + {parent%filediffparent} + {child%filediffchild} + </dl> + + <div class="diff"> + {diff} + </div> + +{footer} diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/monoblue/filelog.tmpl b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/monoblue/filelog.tmpl new file mode 100644 index 0000000..8612016 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/monoblue/filelog.tmpl @@ -0,0 +1,50 @@ +{header} +<title>{repo|escape}: File revisions</title> + <link rel="alternate" type="application/atom+xml" href="{url}atom-log" title="Atom feed for {repo|escape}"/> + <link rel="alternate" type="application/rss+xml" href="{url}rss-log" title="RSS feed for {repo|escape}"/> +</head> + +<body> +<div id="container"> + <div class="page-header"> + <h1><a href="{url}summary{sessionvars%urlparameter}">{repo|escape}</a> / file revisions</h1> + + <form action="{url}log"> + {sessionvars%hiddenformentry} + <dl class="search"> + <dt><label>Search: </label></dt> + <dd><input type="text" name="rev" /></dd> + </dl> + </form> + + <ul class="page-nav"> + <li><a href="{url}summary{sessionvars%urlparameter}">summary</a></li> + <li><a href="{url}shortlog{sessionvars%urlparameter}">shortlog</a></li> + <li><a href="{url}log{sessionvars%urlparameter}">changelog</a></li> + <li><a href="{url}graph/{node|short}{sessionvars%urlparameter}">graph</a></li> + <li><a href="{url}tags{sessionvars%urlparameter}">tags</a></li> + <li><a href="{url}branches{sessionvars%urlparameter}">branches</a></li> + <li><a href="{url}file/{node|short}{path|urlescape}{sessionvars%urlparameter}">files</a></li> + <li><a href="{url}help{sessionvars%urlparameter}">help</a></li> + </ul> + </div> + + <ul class="submenu"> + <li><a href="{url}file/{node|short}/{file|urlescape}{sessionvars%urlparameter}">file</a></li> + <li class="current">revisions</li> + <li><a href="{url}annotate/{node|short}/{file|urlescape}{sessionvars%urlparameter}">annotate</a></li> + <li><a href="{url}diff/{node|short}/{file|urlescape}{sessionvars%urlparameter}">diff</a></li> + <li><a href="{url}rss-log/{node|short}/{file|urlescape}">rss</a></li> + </ul> + + <h2 class="no-link no-border">{file|urlescape}</h2> + + <table> + {entries%filelogentry} + </table> + + <div class="page-path"> + {nav%filenav} + </div> + +{footer} diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/monoblue/filerevision.tmpl b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/monoblue/filerevision.tmpl new file mode 100644 index 0000000..a594b1a --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/monoblue/filerevision.tmpl @@ -0,0 +1,64 @@ +{header} +<title>{repo|escape}: {file|escape}@{node|short}</title> + <link rel="alternate" type="application/atom+xml" href="{url}atom-log" title="Atom feed for {repo|escape}"/> + <link rel="alternate" type="application/rss+xml" href="{url}rss-log" title="RSS feed for {repo|escape}"/> +</head> + +<body> +<div id="container"> + <div class="page-header"> + <h1><a href="{url}summary{sessionvars%urlparameter}">{repo|escape}</a> / file revision</h1> + + <form action="{url}log"> + {sessionvars%hiddenformentry} + <dl class="search"> + <dt><label>Search: </label></dt> + <dd><input type="text" name="rev" /></dd> + </dl> + </form> + + <ul class="page-nav"> + <li><a href="{url}summary{sessionvars%urlparameter}">summary</a></li> + <li><a href="{url}shortlog{sessionvars%urlparameter}">shortlog</a></li> + <li><a href="{url}changelog{sessionvars%urlparameter}">changelog</a></li> + <li><a href="{url}graph/{node|short}{sessionvars%urlparameter}">graph</a></li> + <li><a href="{url}tags{sessionvars%urlparameter}">tags</a></li> + <li><a href="{url}branches{sessionvars%urlparameter}">branches</a></li> + <li><a href="{url}file/{node|short}{path|urlescape}{sessionvars%urlparameter}">files</a></li> + <li><a href="{url}help{sessionvars%urlparameter}">help</a></li> + </ul> + </div> + + <ul class="submenu"> + <li class="current">file</li> + <li><a href="{url}log/{node|short}/{file|urlescape}{sessionvars%urlparameter}">revisions</a></li> + <li><a href="{url}annotate/{node|short}/{file|urlescape}{sessionvars%urlparameter}">annotate</a></li> + <li><a href="{url}diff/{node|short}/{file|urlescape}{sessionvars%urlparameter}">diff</a></li> + <li><a href="{url}raw-file/{node|short}/{file|urlescape}">raw</a></li> + </ul> + + <h2 class="no-link no-border">{file|escape}@{node|short}</h2> + <h3 class="changeset">{file|escape}</h3> + <p class="changeset-age"><span>{date|age}</span></p> + + <dl class="overview"> + <dt>author</dt> + <dd>{author|obfuscate}</dd> + <dt>date</dt> + <dd>{date|date}</dd> + {branch%filerevbranch} + <dt>changeset {rev}</dt> + <dd><a class="list" href="{url}rev/{node|short}{sessionvars%urlparameter}">{node|short}</a></dd> + {parent%filerevparent} + {child%filerevchild} + <dt>permissions</dt> + <dd>{permissions|permissions}</dd> + </dl> + + <p class="description">{desc|strip|escape|addbreaks|nonempty}</p> + + <div class="source"> + {text%fileline} + </div> + +{footer} diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/monoblue/footer.tmpl b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/monoblue/footer.tmpl new file mode 100644 index 0000000..cddaa92 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/monoblue/footer.tmpl @@ -0,0 +1,22 @@ + <div class="page-footer"> + <p>Mercurial Repository: {repo|escape}</p> + <ul class="rss-logo"> + <li><a href="{url}rss-log">RSS</a></li> + <li><a href="{url}atom-log">Atom</a></li> + </ul> + {motd} + </div> + + <div id="powered-by"> + <p><a href="http://mercurial.selenic.com/" title="Mercurial"><img src="{staticurl}hglogo.png" width=75 height=90 border=0 alt="mercurial"></a></p> + </div> + + <div id="corner-top-left"></div> + <div id="corner-top-right"></div> + <div id="corner-bottom-left"></div> + <div id="corner-bottom-right"></div> + +</div> + +</body> +</html> diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/monoblue/graph.tmpl b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/monoblue/graph.tmpl new file mode 100644 index 0000000..dcdc8d7 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/monoblue/graph.tmpl @@ -0,0 +1,119 @@ +{header} + <title>{repo|escape}: graph</title> + <link rel="alternate" type="application/atom+xml" href="{url}atom-log" title="Atom feed for {repo|escape}"/> + <link rel="alternate" type="application/rss+xml" href="{url}rss-log" title="RSS feed for {repo|escape}"/> + <!--[if IE]><script type="text/javascript" src="{staticurl}excanvas.js"></script><![endif]--> +</head> + +<body> +<div id="container"> + <div class="page-header"> + <h1><a href="{url}summary{sessionvars%urlparameter}">{repo|escape}</a> / graph</h1> + + <form action="{url}log"> + {sessionvars%hiddenformentry} + <dl class="search"> + <dt><label>Search: </label></dt> + <dd><input type="text" name="rev" /></dd> + </dl> + </form> + + <ul class="page-nav"> + <li><a href="{url}summary{sessionvars%urlparameter}">summary</a></li> + <li><a href="{url}shortlog{sessionvars%urlparameter}">shortlog</a></li> + <li><a href="{url}changelog{sessionvars%urlparameter}">changelog</a></li> + <li class="current">graph</li> + <li><a href="{url}tags{sessionvars%urlparameter}">tags</a></li> + <li><a href="{url}branches{sessionvars%urlparameter}">branches</a></li> + <li><a href="{url}file/{node|short}{sessionvars%urlparameter}">files</a></li> + <li><a href="{url}help{sessionvars%urlparameter}">help</a></li> + </ul> + </div> + + <h2 class="no-link no-border">graph</h2> + + <div id="noscript">The revision graph only works with JavaScript-enabled browsers.</div> + <div id="wrapper"> + <ul id="nodebgs"></ul> + <canvas id="graph" width="224" height="{canvasheight}"></canvas> + <ul id="graphnodes"></ul> + </div> + + <script type="text/javascript" src="{staticurl}graph.js"></script> + <script> + <!-- hide script content + + document.getElementById('noscript').style.display = 'none'; + + var data = {jsdata|json}; + var graph = new Graph(); + graph.scale({bg_height}); + + graph.edge = function(x0, y0, x1, y1, color) \{ + + this.setColor(color, 0.0, 0.65); + this.ctx.beginPath(); + this.ctx.moveTo(x0, y0); + this.ctx.lineTo(x1, y1); + this.ctx.stroke(); + + } + + var revlink = '<li style="_STYLE"><span class="desc">'; + revlink += '<a href="{url}rev/_NODEID{sessionvars%urlparameter}" title="_NODEID">_DESC</a>'; + revlink += '</span>_TAGS<span class="info">_DATE, by _USER</span></li>'; + + graph.vertex = function(x, y, color, parity, cur) \{ + + this.ctx.beginPath(); + color = this.setColor(color, 0.25, 0.75); + this.ctx.arc(x, y, radius, 0, Math.PI * 2, true); + this.ctx.fill(); + + var bg = '<li class="bg parity' + parity + '"></li>'; + var left = (this.columns + 1) * this.bg_height; + var nstyle = 'padding-left: ' + left + 'px;'; + var item = revlink.replace(/_STYLE/, nstyle); + item = item.replace(/_PARITY/, 'parity' + parity); + item = item.replace(/_NODEID/, cur[0]); + item = item.replace(/_NODEID/, cur[0]); + item = item.replace(/_DESC/, cur[3]); + item = item.replace(/_USER/, cur[4]); + item = item.replace(/_DATE/, cur[5]); + + var tagspan = ''; + if (cur[7].length || (cur[6][0] != 'default' || cur[6][1])) \{ + tagspan = '<span class="logtags">'; + if (cur[6][1]) \{ + tagspan += '<span class="branchtag" title="' + cur[6][0] + '">'; + tagspan += cur[6][0] + '</span> '; + } else if (!cur[6][1] && cur[6][0] != 'default') \{ + tagspan += '<span class="inbranchtag" title="' + cur[6][0] + '">'; + tagspan += cur[6][0] + '</span> '; + } + if (cur[7].length) \{ + for (var t in cur[7]) \{ + var tag = cur[7][t]; + tagspan += '<span class="tagtag">' + tag + '</span> '; + } + } + tagspan += '</span>'; + } + + item = item.replace(/_TAGS/, tagspan); + return [bg, item]; + + } + + graph.render(data); + + // stop hiding script --> + </script> + + <div class="page-path"> + <a href="{url}graph/{rev}{lessvars%urlparameter}">less</a> + <a href="{url}graph/{rev}{morevars%urlparameter}">more</a> + | {changenav%navgraph} + </div> + +{footer} diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/monoblue/header.tmpl b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/monoblue/header.tmpl new file mode 100644 index 0000000..dd03884 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/monoblue/header.tmpl @@ -0,0 +1,6 @@ +<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd"> +<html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en" lang="en"> +<head> + <link rel="icon" href="{staticurl}hgicon.png" type="image/png" /> + <meta name="robots" content="index, nofollow"/> + <link rel="stylesheet" href="{staticurl}style-monoblue.css" type="text/css" /> diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/monoblue/help.tmpl b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/monoblue/help.tmpl new file mode 100644 index 0000000..df99fb5 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/monoblue/help.tmpl @@ -0,0 +1,37 @@ +{header} + <title>{repo|escape}: Branches</title> + <link rel="alternate" type="application/atom+xml" href="{url}atom-log" title="Atom feed for {repo|escape}"/> + <link rel="alternate" type="application/rss+xml" href="{url}rss-log" title="RSS feed for {repo|escape}"/> +</head> + +<body> +<div id="container"> + <div class="page-header"> + <h1><a href="{url}summary{sessionvars%urlparameter}">{repo|escape}</a> / Branches</h1> + + <form action="{url}log"> + {sessionvars%hiddenformentry} + <dl class="search"> + <dt><label>Search: </label></dt> + <dd><input type="text" name="rev" /></dd> + </dl> + </form> + + <ul class="page-nav"> + <li><a href="{url}summary{sessionvars%urlparameter}">summary</a></li> + <li><a href="{url}shortlog{sessionvars%urlparameter}">shortlog</a></li> + <li><a href="{url}changelog{sessionvars%urlparameter}">changelog</a></li> + <li><a href="{url}graph/{node|short}{sessionvars%urlparameter}">graph</a></li> + <li><a href="{url}tags{sessionvars%urlparameter}">tags</a></li> + <li><a href="{url}branches{sessionvars%urlparameter}">branches</a></li> + <li><a href="{url}file/{node|short}{sessionvars%urlparameter}">files</a></li> + <li class="current">help</li> + </ul> + </div> + + <h2 class="no-link no-border">branches</h2> + <pre> + {doc|escape} + </pre> + +{footer} diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/monoblue/helptopics.tmpl b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/monoblue/helptopics.tmpl new file mode 100644 index 0000000..f7f2fd6 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/monoblue/helptopics.tmpl @@ -0,0 +1,44 @@ +{header} + <title>{repo|escape}: Branches</title> + <link rel="alternate" type="application/atom+xml" href="{url}atom-log" title="Atom feed for {repo|escape}"/> + <link rel="alternate" type="application/rss+xml" href="{url}rss-log" title="RSS feed for {repo|escape}"/> +</head> + +<body> +<div id="container"> + <div class="page-header"> + <h1><a href="{url}summary{sessionvars%urlparameter}">{repo|escape}</a> / Branches</h1> + + <form action="{url}log"> + {sessionvars%hiddenformentry} + <dl class="search"> + <dt><label>Search: </label></dt> + <dd><input type="text" name="rev" /></dd> + </dl> + </form> + + <ul class="page-nav"> + <li><a href="{url}summary{sessionvars%urlparameter}">summary</a></li> + <li><a href="{url}shortlog{sessionvars%urlparameter}">shortlog</a></li> + <li><a href="{url}changelog{sessionvars%urlparameter}">changelog</a></li> + <li><a href="{url}graph/{node|short}{sessionvars%urlparameter}">graph</a></li> + <li><a href="{url}tags{sessionvars%urlparameter}">tags</a></li> + <li><a href="{url}help{sessionvars%urlparameter}">branches</a></li> + <li><a href="{url}file/{node|short}{sessionvars%urlparameter}">files</a></li> + <li class="current">help</li> + </ul> + </div> + + <h2 class="no-link no-border">branches</h2> + <table cellspacing="0"> + <tr><td colspan="2"><h2><a name="main" href="#topics">Topics</a></h2></td></tr> + {topics % helpentry} + + <tr><td colspan="2"><h2><a name="main" href="#main">Main Commands</a></h2></td></tr> + {earlycommands % helpentry} + + <tr><td colspan="2"><h2><a name="other" href="#other">Other Commands</a></h2></td></tr> + {othercommands % helpentry} + </table> + +{footer} diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/monoblue/index.tmpl b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/monoblue/index.tmpl new file mode 100644 index 0000000..1159f47 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/monoblue/index.tmpl @@ -0,0 +1,39 @@ +{header} + <title>{repo|escape}: Mercurial repositories index</title> +</head> + +<body> +<div id="container"> + <div class="page-header"> + <h1>Mercurial Repositories</h1> + <ul class="page-nav"> + </ul> + </div> + + <table cellspacing="0"> + <tr> + <td><a href="?sort={sort_name}">Name</a></td> + <td><a href="?sort={sort_description}">Description</a></td> + <td><a href="?sort={sort_contact}">Contact</a></td> + <td><a href="?sort={sort_lastchange}">Last modified</a></td> + <td> </td> + <td> </td> + </tr> + {entries%indexentry} + </table> + <div class="page-footer"> + {motd} + </div> + + <div id="powered-by"> + <p><a href="http://mercurial.selenic.com/" title="Mercurial"><img src="{staticurl}hglogo.png" width=75 height=90 border=0 alt="mercurial"></a></p> + </div> + + <div id="corner-top-left"></div> + <div id="corner-top-right"></div> + <div id="corner-bottom-left"></div> + <div id="corner-bottom-right"></div> + +</div> +</body> +</html> diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/monoblue/manifest.tmpl b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/monoblue/manifest.tmpl new file mode 100644 index 0000000..fe5ea8a --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/monoblue/manifest.tmpl @@ -0,0 +1,52 @@ +{header} +<title>{repo|escape}: files</title> + <link rel="alternate" type="application/atom+xml" href="{url}atom-log" title="Atom feed for {repo|escape}"/> + <link rel="alternate" type="application/rss+xml" href="{url}rss-log" title="RSS feed for {repo|escape}"/> +</head> + +<body> +<div id="container"> + <div class="page-header"> + <h1><a href="{url}summary{sessionvars%urlparameter}">{repo|escape}</a> / files</h1> + + <form action="{url}log"> + {sessionvars%hiddenformentry} + <dl class="search"> + <dt><label>Search: </label></dt> + <dd><input type="text" name="rev" /></dd> + </dl> + </form> + + <ul class="page-nav"> + <li><a href="{url}summary{sessionvars%urlparameter}">summary</a></li> + <li><a href="{url}shortlog{sessionvars%urlparameter}">shortlog</a></li> + <li><a href="{url}changelog{sessionvars%urlparameter}">changelog</a></li> + <li><a href="{url}graph/{node|short}{sessionvars%urlparameter}">graph</a></li> + <li><a href="{url}tags{sessionvars%urlparameter}">tags</a></li> + <li><a href="{url}branches{sessionvars%urlparameter}">branches</a></li> + <li class="current">files</li> + <li><a href="{url}help{sessionvars%urlparameter}">help</a></li> + </ul> + </div> + + <ul class="submenu"> + <li><a href="{url}rev/{node|short}{sessionvars%urlparameter}">changeset</a> {archives%archiveentry}</li> + {archives%archiveentry} + </ul> + + <h2 class="no-link no-border">files</h2> + <p class="files">{path|escape} <span class="logtags">{inbranch%inbranchtag}{branches%branchtag}{tags%tagtag}</span></p> + + <table> + <tr class="parity{upparity}"> + <td>drwxr-xr-x</td> + <td></td> + <td></td> + <td><a href="{url}file/{node|short}{up|urlescape}{sessionvars%urlparameter}">[up]</a></td> + <td class="link"> </td> + </tr> + {dentries%direntry} + {fentries%fileentry} + </table> + +{footer} diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/monoblue/map b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/monoblue/map new file mode 100644 index 0000000..1835016 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/monoblue/map @@ -0,0 +1,226 @@ +default = 'summary' +mimetype = 'text/html; charset={encoding}' +header = header.tmpl +footer = footer.tmpl +search = search.tmpl +changelog = changelog.tmpl +summary = summary.tmpl +error = error.tmpl +notfound = notfound.tmpl + +help = help.tmpl +helptopics = helptopics.tmpl + +helpentry = '<tr><td><a href="{url}help/{topic|escape}{sessionvars%urlparameter}">{topic|escape}</a></td><td>{summary|escape}</td></tr>' + +naventry = '<a href="{url}log/{node|short}{sessionvars%urlparameter}">{label|escape}</a> ' +navshortentry = '<a href="{url}shortlog/{node|short}{sessionvars%urlparameter}">{label|escape}</a> ' +navgraphentry = '<a href="{url}graph/{node|short}{sessionvars%urlparameter}">{label|escape}</a> ' +filenaventry = '<a href="{url}log/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{label|escape}</a>' +filedifflink = '<a href="{url}diff/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{file|escape}</a> ' +filenodelink = ' + <tr class="parity{parity}"> + <td><a href="{url}diff/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{file|escape}</a></td> + <td></td> + <td> + <a href="{url}file/{node|short}/{file|urlescape}{sessionvars%urlparameter}">file</a> | + <a href="{url}annotate/{node|short}/{file|urlescape}{sessionvars%urlparameter}">annotate</a> | + <a href="{url}diff/{node|short}/{file|urlescape}{sessionvars%urlparameter}">diff</a> | + <a href="{url}log/{node|short}/{file|urlescape}{sessionvars%urlparameter}">revisions</a> + </td> + </tr>' +filenolink = ' + <tr class="parity{parity}"> + <td> + <a href="{url}diff/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{file|escape}</a></td><td></td><td>file | + annotate | + <a href="{url}diff/{node|short}/{file|urlescape}{sessionvars%urlparameter}">diff</a> | + <a href="{url}log/{node|short}/{file|urlescape}{sessionvars%urlparameter}">revisions</a> + </td> + </tr>' + +nav = '{before%naventry} {after%naventry}' +navshort = '{before%navshortentry}{after%navshortentry}' +navgraph = '{before%navgraphentry}{after%navgraphentry}' +filenav = '{before%filenaventry}{after%filenaventry}' + +fileellipses = '...' +changelogentry = changelogentry.tmpl +searchentry = changelogentry.tmpl +changeset = changeset.tmpl +manifest = manifest.tmpl +direntry = ' + <tr class="parity{parity}"> + <td>drwxr-xr-x</td> + <td></td> + <td></td> + <td><a href="{url}file/{node|short}{path|urlescape}{sessionvars%urlparameter}">{basename|escape}</a></td> + <td><a href="{url}file/{node|short}{path|urlescape}{sessionvars%urlparameter}">files</a></td> + </tr>' +fileentry = ' + <tr class="parity{parity}"> + <td>{permissions|permissions}</td> + <td>{date|isodate}</td> + <td>{size}</td> + <td><a href="{url}file/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{basename|escape}</a></td> + <td> + <a href="{url}file/{node|short}/{file|urlescape}{sessionvars%urlparameter}">file</a> | + <a href="{url}log/{node|short}/{file|urlescape}{sessionvars%urlparameter}">revisions</a> | + <a href="{url}annotate/{node|short}/{file|urlescape}{sessionvars%urlparameter}">annotate</a> + </td> + </tr>' +filerevision = filerevision.tmpl +fileannotate = fileannotate.tmpl +filediff = filediff.tmpl +filelog = filelog.tmpl +fileline = ' + <div style="font-family:monospace" class="parity{parity}"> + <pre><a class="linenr" href="#{lineid}" id="{lineid}">{linenumber}</a> {line|escape}</pre> + </div>' +annotateline = ' + <tr class="parity{parity}"> + <td class="linenr"> + <a href="{url}annotate/{node|short}/{file|urlescape}{sessionvars%urlparameter}#{targetline}" + title="{node|short}: {desc|escape|firstline}">{author|user}@{rev}</a> + </td> + <td class="lineno"> + <a href="#{lineid}" id="{lineid}">{linenumber}</a> + </td> + <td class="source">{line|escape}</td> + </tr>' +difflineplus = '<span style="color:#008800;"><a class="linenr" href="#{lineid}" id="{lineid}">{linenumber}</a> {line|escape}</span>' +difflineminus = '<span style="color:#cc0000;"><a class="linenr" href="#{lineid}" id="{lineid}">{linenumber}</a> {line|escape}</span>' +difflineat = '<span style="color:#990099;"><a class="linenr" href="#{lineid}" id="{lineid}">{linenumber}</a> {line|escape}</span>' +diffline = '<span><a class="linenr" href="#{lineid}" id="{lineid}">{linenumber}</a> {line|escape}</span>' +changelogparent = ' + <tr> + <th class="parent">parent {rev}:</th> + <td class="parent"> + <a href="{url}rev/{node|short}{sessionvars%urlparameter}">{node|short}</a> + </td> + </tr>' +changesetbranch = '<dt>branch</dt><dd>{name}</dd>' +changesetparent = ' + <dt>parent {rev}</dt> + <dd><a href="{url}rev/{node|short}{sessionvars%urlparameter}">{node|short}</a></dd>' +filerevbranch = '<dt>branch</dt><dd>{name}</dd>' +filerevparent = ' + <dt>parent {rev}</dt> + <dd> + <a href="{url}file/{node|short}/{file|urlescape}{sessionvars%urlparameter}"> + {rename%filerename}{node|short} + </a> + </dd>' +filerename = '{file|escape}@' +filelogrename = '| <a href="{url}file/{node|short}/{file|urlescape}{sessionvars%urlparameter}">base</a>' +fileannotateparent = ' + <dt>parent {rev}</dt> + <dd> + <a href="{url}annotate/{node|short}/{file|urlescape}{sessionvars%urlparameter}"> + {rename%filerename}{node|short} + </a> + </dd>' +changelogchild = ' + <dt>child {rev}:</dt> + <dd><a href="{url}rev/{node|short}{sessionvars%urlparameter}">{node|short}</a></dd>' +changesetchild = ' + <dt>child {rev}</dt> + <dd><a href="{url}rev/{node|short}{sessionvars%urlparameter}">{node|short}</a></dd>' +filerevchild = ' + <dt>child {rev}</dt> + <dd> + <a href="{url}file/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{node|short}</a> + </dd>' +fileannotatechild = ' + <dt>child {rev}</dt> + <dd> + <a href="{url}annotate/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{node|short}</a> + </dd>' +tags = tags.tmpl +tagentry = ' + <tr class="parity{parity}"> + <td class="nowrap">{date|age}</td> + <td><a href="{url}rev/{node|short}{sessionvars%urlparameter}">{tag|escape}</a></td> + <td class="nowrap"> + <a href="{url}rev/{node|short}{sessionvars%urlparameter}">changeset</a> | + <a href="{url}log/{node|short}{sessionvars%urlparameter}">changelog</a> | + <a href="{url}file/{node|short}{sessionvars%urlparameter}">files</a> + </td> + </tr>' +branches = branches.tmpl +branchentry = ' + <tr class="parity{parity}"> + <td class="nowrap">{date|age}</td> + <td><a href="{url}shortlog/{node|short}{sessionvars%urlparameter}">{node|short}</a></td> + <td class="{status}">{branch|escape}</td> + <td class="nowrap"> + <a href="{url}rev/{node|short}{sessionvars%urlparameter}">changeset</a> | + <a href="{url}log/{node|short}{sessionvars%urlparameter}">changelog</a> | + <a href="{url}file/{node|short}{sessionvars%urlparameter}">files</a> + </td> + </tr>' +diffblock = '<pre>{lines}</pre>' +filediffparent = ' + <dt>parent {rev}</dt> + <dd><a href="{url}diff/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{node|short}</a></dd>' +filelogparent = ' + <tr> + <td align="right">parent {rev}: </td> + <td><a href="{url}file/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{node|short}</a></td> + </tr>' +filediffchild = ' + <dt>child {rev}</dt> + <dd><a href="{url}diff/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{node|short}</a></dd>' +filelogchild = ' + <tr> + <td align="right">child {rev}: </td> + <td><a href="{url}file{node|short}/{file|urlescape}{sessionvars%urlparameter}">{node|short}</a></td> + </tr>' +shortlog = shortlog.tmpl +tagtag = '<span class="tagtag" title="{name}">{name}</span> ' +branchtag = '<span class="branchtag" title="{name}">{name}</span> ' +inbranchtag = '<span class="inbranchtag" title="{name}">{name}</span> ' +shortlogentry = ' + <tr class="parity{parity}"> + <td class="nowrap">{date|age}</td> + <td>{author|person}</td> + <td> + <a href="{url}rev/{node|short}{sessionvars%urlparameter}"> + {desc|strip|firstline|escape|nonempty} + <span class="logtags">{inbranch%inbranchtag}{branches%branchtag}{tags%tagtag}</span> + </a> + </td> + <td class="nowrap"> + <a href="{url}rev/{node|short}{sessionvars%urlparameter}">changeset</a> | + <a href="{url}file/{node|short}{sessionvars%urlparameter}">files</a> + </td> + </tr>' +filelogentry = ' + <tr class="parity{parity}"> + <td class="nowrap">{date|age}</td> + <td><a href="{url}rev/{node|short}{sessionvars%urlparameter}">{desc|strip|firstline|escape|nonempty}</a></td> + <td class="nowrap"> + <a href="{url}file/{node|short}/{file|urlescape}{sessionvars%urlparameter}">file</a> | <a href="{url}diff/{node|short}/{file|urlescape}{sessionvars%urlparameter}">diff</a> | <a href="{url}annotate/{node|short}/{file|urlescape}{sessionvars%urlparameter}">annotate</a> + {rename%filelogrename} + </td> + </tr>' +archiveentry = '<li><a href="{url}archive/{node|short}{extension}">{type|escape}</a></li>' +indexentry = ' + <tr class="parity{parity}"> + <td><a href="{url}{sessionvars%urlparameter}">{name|escape}</a></td> + <td>{description}</td> + <td>{contact|obfuscate}</td> + <td>{lastchange|age}</td> + <td class="indexlinks">{archives%indexarchiveentry}</td> + <td> + <div class="rss_logo"> + <a href="{url}rss-log">RSS</a> + <a href="{url}atom-log">Atom</a> + </div> + </td> + </tr>\n' +indexarchiveentry = '<a href="{url}archive/{node|short}{extension}">{type|escape}</a> ' +index = index.tmpl +urlparameter = '{separator}{name}={value|urlescape}' +hiddenformentry = '<input type="hidden" name="{name}" value="{value|escape}" />' +graph = graph.tmpl diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/monoblue/notfound.tmpl b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/monoblue/notfound.tmpl new file mode 100644 index 0000000..4b1ffad --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/monoblue/notfound.tmpl @@ -0,0 +1,36 @@ +{header} + <title>{repo|escape}: Mercurial repository not found</title> + <link rel="alternate" type="application/atom+xml" href="{url}atom-log" title="Atom feed for {repo|escape}"/> + <link rel="alternate" type="application/rss+xml" href="{url}rss-log" title="RSS feed for {repo|escape}"/> +</head> + +<body> +<div id="container"> + <div class="page-header"> + <h1><a href="{url}summary{sessionvars%urlparameter}">{repo|escape}</a> / Not found: {repo|escape}</h1> + + <form action="{url}log"> + {sessionvars%hiddenformentry} + <dl class="search"> + <dt><label>Search: </label></dt> + <dd><input type="text" name="rev" /></dd> + </dl> + </form> + + <ul class="page-nav"> + <li class="current">summary</li> + <li><a href="{url}shortlog{sessionvars%urlparameter}">shortlog</a></li> + <li><a href="{url}log{sessionvars%urlparameter}">changelog</a></li> + <li><a href="{url}graph/{node|short}{sessionvars%urlparameter}">graph</a></li> + <li><a href="{url}tags{sessionvars%urlparameter}">tags</a></li> + <li><a href="{url}branches{sessionvars%urlparameter}">branches</a></li> + <li><a href="{url}file/{node|short}{sessionvars%urlparameter}">files</a>{archives%archiveentry}</li> + <li><a href="{url}help{sessionvars%urlparameter}">help</a></li> + </ul> + </div> + + <h2 class="no-link no-border">Not Found</h2> + <p class="normal">The specified repository "{repo|escape}" is unknown, sorry.</p> + <p class="normal">Please go back to the <a href="{url}">main repository list page</a>.</p> + +{footer} diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/monoblue/search.tmpl b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/monoblue/search.tmpl new file mode 100644 index 0000000..cde54a7 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/monoblue/search.tmpl @@ -0,0 +1,35 @@ +{header} + <title>{repo|escape}: Search</title> + <link rel="alternate" type="application/atom+xml" href="{url}atom-log" title="Atom feed for {repo|escape}"/> + <link rel="alternate" type="application/rss+xml" href="{url}rss-log" title="RSS feed for {repo|escape}"/> +</head> + +<body> +<div id="container"> + <div class="page-header"> + <h1><a href="{url}summary{sessionvars%urlparameter}">{repo|escape}</a> / search</h1> + + <form action="{url}log"> + {sessionvars%hiddenformentry} + <dl class="search"> + <dt><label>Search: </label></dt> + <dd><input type="text" name="rev" value="{query|escape}" /></dd> + </dl> + </form> + + <ul class="page-nav"> + <li><a href="{url}summary{sessionvars%urlparameter}">summary</a></li> + <li><a href="{url}shortlog{sessionvars%urlparameter}">shortlog</a></li> + <li><a href="{url}log{sessionvars%urlparameter}">changelog</a></li> + <li><a href="{url}graph/{node|short}{sessionvars%urlparameter}">graph</a></li> + <li><a href="{url}tags{sessionvars%urlparameter}">tags</a></li> + <li><a href="{url}branches{sessionvars%urlparameter}">branches</a></li> + <li><a href="{url}file/{node|short}{sessionvars%urlparameter}">files</a>{archives%archiveentry} + <li><a href="{url}help{sessionvars%urlparameter}">help</a></li> + </ul> + </div> + + <h2 class="no-link no-border">searching for {query|escape}</h2> + {entries} + +{footer} diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/monoblue/shortlog.tmpl b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/monoblue/shortlog.tmpl new file mode 100644 index 0000000..7a88897 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/monoblue/shortlog.tmpl @@ -0,0 +1,43 @@ +{header} + <title>{repo|escape}: shortlog</title> + <link rel="alternate" type="application/atom+xml" href="{url}atom-log" title="Atom feed for {repo|escape}"/> + <link rel="alternate" type="application/rss+xml" href="{url}rss-log" title="RSS feed for {repo|escape}"/> +</head> + +<body> +<div id="container"> + <div class="page-header"> + <h1><a href="{url}summary{sessionvars%urlparameter}">{repo|escape}</a> / shortlog</h1> + + <form action="{url}log"> + {sessionvars%hiddenformentry} + <dl class="search"> + <dt><label>Search: </label></dt> + <dd><input type="text" name="rev" /></dd> + </dl> + </form> + + <ul class="page-nav"> + <li><a href="{url}summary{sessionvars%urlparameter}">summary</a></li> + <li class="current">shortlog</li> + <li><a href="{url}log{sessionvars%urlparameter}">changelog</a></li> + <li><a href="{url}graph/{node|short}{sessionvars%urlparameter}">graph</a></li> + <li><a href="{url}tags{sessionvars%urlparameter}">tags</a></li> + <li><a href="{url}branches{sessionvars%urlparameter}">branches</a></li> + <li><a href="{url}file/{node|short}{sessionvars%urlparameter}">files</a></li> + {archives%archiveentry} + <li><a href="{url}help{sessionvars%urlparameter}">help</a></li> + </ul> + </div> + + <h2 class="no-link no-border">shortlog</h2> + + <table> +{entries%shortlogentry} + </table> + + <div class="page-path"> + {changenav%navshort} + </div> + +{footer} diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/monoblue/summary.tmpl b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/monoblue/summary.tmpl new file mode 100644 index 0000000..9830cea --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/monoblue/summary.tmpl @@ -0,0 +1,67 @@ +{header} + <title>{repo|escape}: Summary</title> + <link rel="alternate" type="application/atom+xml" href="{url}atom-log" title="Atom feed for {repo|escape}"/> + <link rel="alternate" type="application/rss+xml" href="{url}rss-log" title="RSS feed for {repo|escape}"/> +</head> + +<body> +<div id="container"> + <div class="page-header"> + <h1><a href="{url}summary{sessionvars%urlparameter}">{repo|escape}</a> / summary</h1> + + <form action="{url}log"> + {sessionvars%hiddenformentry} + <dl class="search"> + <dt><label>Search: </label></dt> + <dd><input type="text" name="rev" /></dd> + </dl> + </form> + + <ul class="page-nav"> + <li class="current">summary</li> + <li><a href="{url}shortlog{sessionvars%urlparameter}">shortlog</a></li> + <li><a href="{url}log{sessionvars%urlparameter}">changelog</a></li> + <li><a href="{url}graph/{node|short}{sessionvars%urlparameter}">graph</a></li> + <li><a href="{url}tags{sessionvars%urlparameter}">tags</a></li> + <li><a href="{url}branches{sessionvars%urlparameter}">branches</a></li> + <li><a href="{url}file/{node|short}{sessionvars%urlparameter}">files</a></li> + <li><a href="{url}help{sessionvars%urlparameter}">help</a></li> + </ul> + </div> + + <h2 class="no-link no-border">Mercurial Repository Overview</h2> + <dl class="overview"> + <dt>name</dt> + <dd>{repo|escape}</dd> + <dt>description</dt> + <dd>{desc}</dd> + <dt>owner</dt> + <dd>{owner|obfuscate}</dd> + <dt>last change</dt> + <dd>{lastchange|rfc822date}</dd> + </dl> + + <h2><a href="{url}shortlog{sessionvars%urlparameter}">Changes</a></h2> + <table> +{shortlog} + <tr class="light"> + <td colspan="4"><a class="list" href="{url}shortlog{sessionvars%urlparameter}">...</a></td> + </tr> + </table> + + <h2><a href="{url}tags{sessionvars%urlparameter}">Tags</a></h2> + <table> +{tags} + <tr class="light"> + <td colspan="3"><a class="list" href="{url}tags{sessionvars%urlparameter}">...</a></td> + </tr> + </table> + + <h2 class="no-link">Branches</h2> + <table> + {branches%branchentry} + <tr class="light"> + <td colspan="4"><a class="list" href="#">...</a></td> + </tr> + </table> +{footer} diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/monoblue/tags.tmpl b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/monoblue/tags.tmpl new file mode 100644 index 0000000..9607a8c --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/monoblue/tags.tmpl @@ -0,0 +1,37 @@ +{header} + <title>{repo|escape}: Tags</title> + <link rel="alternate" type="application/atom+xml" href="{url}atom-log" title="Atom feed for {repo|escape}"/> + <link rel="alternate" type="application/rss+xml" href="{url}rss-log" title="RSS feed for {repo|escape}"/> +</head> + +<body> +<div id="container"> + <div class="page-header"> + <h1><a href="{url}summary{sessionvars%urlparameter}">{repo|escape}</a> / Tags</h1> + + <form action="{url}log"> + {sessionvars%hiddenformentry} + <dl class="search"> + <dt><label>Search: </label></dt> + <dd><input type="text" name="rev" /></dd> + </dl> + </form> + + <ul class="page-nav"> + <li><a href="{url}summary{sessionvars%urlparameter}">summary</a></li> + <li><a href="{url}shortlog{sessionvars%urlparameter}">shortlog</a></li> + <li><a href="{url}changelog{sessionvars%urlparameter}">changelog</a></li> + <li><a href="{url}graph/{node|short}{sessionvars%urlparameter}">graph</a></li> + <li class="current">tags</li> + <li><a href="{url}branches{sessionvars%urlparameter}">branches</a></li> + <li><a href="{url}file/{node|short}{sessionvars%urlparameter}">files</a></li> + <li><a href="{url}help{sessionvars%urlparameter}">help</a></li> + </ul> + </div> + + <h2 class="no-link no-border">tags</h2> + <table cellspacing="0"> +{entries%tagentry} + </table> + +{footer} diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/paper/branches.tmpl b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/paper/branches.tmpl new file mode 100644 index 0000000..a8a4ad2 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/paper/branches.tmpl @@ -0,0 +1,48 @@ +{header} +<title>{repo|escape}: branches</title> +<link rel="alternate" type="application/atom+xml" + href="{url}atom-tags" title="Atom feed for {repo|escape}: branches" /> +<link rel="alternate" type="application/rss+xml" + href="{url}rss-tags" title="RSS feed for {repo|escape}: branches" /> +</head> +<body> + +<div class="container"> +<div class="menu"> +<div class="logo"> +<a href="http://mercurial.selenic.com/"> +<img src="{staticurl}hglogo.png" alt="mercurial" /></a> +</div> +<ul> +<li><a href="{url}shortlog{sessionvars%urlparameter}">log</a></li> +<li><a href="{url}graph{sessionvars%urlparameter}">graph</a></li> +<li><a href="{url}tags{sessionvars%urlparameter}">tags</a></li> +<li class="active">branches</li> +</ul> +<ul> + <li><a href="{url}help{sessionvars%urlparameter}">help</a></li> +</ul> +</div> + +<div class="main"> +<h2><a href="{url}{sessionvars%urlparameter}">{repo|escape}</a></h2> +<h3>branches</h3> + +<form class="search" action="{url}log"> +{sessionvars%hiddenformentry} +<p><input name="rev" id="search1" type="text" size="30" /></p> +<div id="hint">find changesets by author, revision, +files, or words in the commit message</div> +</form> + +<table class="bigtable"> +<tr> + <th>branch</th> + <th>node</th> +</tr> +{entries%branchentry} +</table> +</div> +</div> + +{footer} diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/paper/changeset.tmpl b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/paper/changeset.tmpl new file mode 100644 index 0000000..883f376 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/paper/changeset.tmpl @@ -0,0 +1,74 @@ +{header} +<title>{repo|escape}: {node|short}</title> +</head> +<body> +<div class="container"> +<div class="menu"> +<div class="logo"> +<a href="http://mercurial.selenic.com/"> +<img src="{staticurl}hglogo.png" alt="mercurial" /></a> +</div> +<ul> + <li><a href="{url}shortlog/{node|short}{sessionvars%urlparameter}">log</a></li> + <li><a href="{url}graph/{node|short}{sessionvars%urlparameter}">graph</a></li> + <li><a href="{url}tags{sessionvars%urlparameter}">tags</a></li> + <li><a href="{url}branches{sessionvars%urlparameter}">branches</a></li> +</ul> +<ul> + <li class="active">changeset</li> + <li><a href="{url}raw-rev/{node|short}{sessionvars%urlparameter}">raw</a></li> + <li><a href="{url}file/{node|short}{sessionvars%urlparameter}">browse</a></li> +</ul> +<ul> + {archives%archiveentry} +</ul> +<ul> + <li><a href="{url}help{sessionvars%urlparameter}">help</a></li> +</ul> +</div> + +<div class="main"> + +<h2><a href="{url}{sessionvars%urlparameter}">{repo|escape}</a></h2> +<h3>changeset {rev}:{node|short} {changesetbranch%changelogbranchname} {changesettag}</h3> + +<form class="search" action="{url}log"> +{sessionvars%hiddenformentry} +<p><input name="rev" id="search1" type="text" size="30" /></p> +<div id="hint">find changesets by author, revision, +files, or words in the commit message</div> +</form> + +<div class="description">{desc|strip|escape|addbreaks|nonempty}</div> + +<table id="changesetEntry"> +<tr> + <th class="author">author</th> + <td class="author">{author|obfuscate}</td> +</tr> +<tr> + <th class="date">date</th> + <td class="date">{date|date} ({date|age})</td></tr> +<tr> + <th class="author">parents</th> + <td class="author">{parent%changesetparent}</td> +</tr> +<tr> + <th class="author">children</th> + <td class="author">{child%changesetchild}</td> +</tr> +<tr> + <th class="files">files</th> + <td class="files">{files}</td> +</tr> +</table> + +<div class="overflow"> +<div class="sourcefirst"> line diff</div> + +{diff} +</div> + +</div> +</div> +{footer} diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/paper/error.tmpl b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/paper/error.tmpl new file mode 100644 index 0000000..c815fd0 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/paper/error.tmpl @@ -0,0 +1,44 @@ +{header} +<title>{repo|escape}: error</title> +</head> +<body> + +<div class="container"> +<div class="menu"> +<div class="logo"> +<a href="http://mercurial.selenic.com/"> +<img src="{staticurl}hglogo.png" width=75 height=90 border=0 alt="mercurial" /></a> +</div> +<ul> +<li><a href="{url}shortlog{sessionvars%urlparameter}">log</a></li> +<li><a href="{url}graph{sessionvars%urlparameter}">graph</a></li> +<li><a href="{url}tags{sessionvars%urlparameter}">tags</a></li> +<li><a href="{url}branches{sessionvars%urlparameter}">branches</a></li> +<li><a href="{url}help{sessionvars%urlparameter}">help</a></li> +</ul> +</div> + +<div class="main"> + +<h2><a href="{url}{sessionvars%urlparameter}">{repo|escape}</a></h2> +<h3>error</h3> + +<form class="search" action="{url}log"> +{sessionvars%hiddenformentry} +<p><input name="rev" id="search1" type="text" size="30"></p> +<div id="hint">find changesets by author, revision, +files, or words in the commit message</div> +</form> + +<div class="description"> +<p> +An error occurred while processing your request: +</p> +<p> +{error|escape} +</p> +</div> +</div> +</div> + +{footer} diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/paper/fileannotate.tmpl b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/paper/fileannotate.tmpl new file mode 100644 index 0000000..1ba24e5 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/paper/fileannotate.tmpl @@ -0,0 +1,81 @@ +{header} +<title>{repo|escape}: {file|escape} annotate</title> +</head> +<body> + +<div class="container"> +<div class="menu"> +<div class="logo"> +<a href="http://mercurial.selenic.com/"> +<img src="{staticurl}hglogo.png" alt="mercurial" /></a> +</div> +<ul> +<li><a href="{url}shortlog/{node|short}{sessionvars%urlparameter}">log</a></li> +<li><a href="{url}graph/{node|short}{sessionvars%urlparameter}">graph</a></li> +<li><a href="{url}tags{sessionvars%urlparameter}">tags</a></li> +<li><a href="{url}branches{sessionvars%urlparameter}">branches</a></li> +</ul> + +<ul> +<li><a href="{url}rev/{node|short}{sessionvars%urlparameter}">changeset</a></li> +<li><a href="{url}file/{node|short}{path|urlescape}{sessionvars%urlparameter}">browse</a></li> +</ul> +<ul> +<li><a href="{url}file/{node|short}/{file|urlescape}{sessionvars%urlparameter}">file</a></li> +<li><a href="{url}file/tip/{file|urlescape}{sessionvars%urlparameter}">latest</a></li> +<li><a href="{url}diff/{node|short}/{file|urlescape}{sessionvars%urlparameter}">diff</a></li> +<li class="active">annotate</li> +<li><a href="{url}log/{node|short}/{file|urlescape}{sessionvars%urlparameter}">file log</a></li> +<li><a href="{url}raw-annotate/{node|short}/{file|urlescape}">raw</a></li> +</ul> +<ul> +<li><a href="{url}help{sessionvars%urlparameter}">help</a></li> +</ul> +</div> + +<div class="main"> +<h2><a href="{url}{sessionvars%urlparameter}">{repo|escape}</a></h2> +<h3>annotate {file|escape} @ {rev}:{node|short}</h3> + +<form class="search" action="{url}log"> +{sessionvars%hiddenformentry} +<p><input name="rev" id="search1" type="text" size="30" /></p> +<div id="hint">find changesets by author, revision, +files, or words in the commit message</div> +</form> + +<div class="description">{desc|strip|escape|addbreaks|nonempty}</div> + +<table id="changesetEntry"> +<tr> + <th class="author">author</th> + <td class="author">{author|obfuscate}</td> +</tr> +<tr> + <th class="date">date</th> + <td class="date">{date|date} ({date|age})</td> +</tr> +<tr> + <th class="author">parents</th> + <td class="author">{parent%filerevparent}</td> +</tr> +<tr> + <th class="author">children</th> + <td class="author">{child%filerevchild}</td> +</tr> +{changesettag} +</table> + +<div class="overflow"> +<table class="bigtable"> +<tr> + <th class="annotate">rev</th> + <th class="line"> line source</th> +</tr> +{annotate%annotateline} +</table> +</div> +</div> +</div> + +{footer} diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/paper/filediff.tmpl b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/paper/filediff.tmpl new file mode 100644 index 0000000..8fec53b --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/paper/filediff.tmpl @@ -0,0 +1,76 @@ +{header} +<title>{repo|escape}: {file|escape} diff</title> +</head> +<body> + +<div class="container"> +<div class="menu"> +<div class="logo"> +<a href="http://mercurial.selenic.com/"> +<img src="{staticurl}hglogo.png" alt="mercurial" /></a> +</div> +<ul> +<li><a href="{url}shortlog/{node|short}{sessionvars%urlparameter}">log</a></li> +<li><a href="{url}graph/{node|short}{sessionvars%urlparameter}">graph</a></li> +<li><a href="{url}tags{sessionvars%urlparameter}">tags</a></li> +<li><a href="{url}branches{sessionvars%urlparameter}">branches</a></li> +</ul> +<ul> +<li><a href="{url}rev/{node|short}{sessionvars%urlparameter}">changeset</a></li> +<li><a href="{url}file/{node|short}{path|urlescape}{sessionvars%urlparameter}">browse</a></li> +</ul> +<ul> +<li><a href="{url}file/{node|short}/{file|urlescape}{sessionvars%urlparameter}">file</a></li> +<li><a href="{url}file/tip/{file|urlescape}{sessionvars%urlparameter}">latest</a></li> +<li class="active">diff</li> +<li><a href="{url}annotate/{node|short}/{file|urlescape}{sessionvars%urlparameter}">annotate</a></li> +<li><a href="{url}log/{node|short}/{file|urlescape}{sessionvars%urlparameter}">file log</a></li> +<li><a href="{url}raw-file/{node|short}/{file|urlescape}">raw</a></li> +</ul> +<ul> +<li><a href="{url}help{sessionvars%urlparameter}">help</a></li> +</ul> +</div> + +<div class="main"> +<h2><a href="{url}{sessionvars%urlparameter}">{repo|escape}</a></h2> +<h3>diff {file|escape} @ {rev}:{node|short}</h3> + +<form class="search" action="{url}log"> +<p>{sessionvars%hiddenformentry}</p> +<p><input name="rev" id="search1" type="text" size="30" /></p> +<div id="hint">find changesets by author, revision, +files, or words in the commit message</div> +</form> + +<div class="description">{desc|strip|escape|addbreaks|nonempty}</div> + +<table id="changesetEntry"> +<tr> + <th>author</th> + <td>{author|obfuscate}</td> +</tr> +<tr> + <th>date</th> + <td>{date|date} ({date|age})</td> +</tr> +<tr> + <th>parents</th> + <td>{parent%filerevparent}</td> +</tr> +<tr> + <th>children</th> + <td>{child%filerevchild}</td> +</tr> +{changesettag} +</table> + +<div class="overflow"> +<div class="sourcefirst"> line diff</div> + +{diff} +</div> +</div> +</div> + +{footer} diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/paper/filelog.tmpl b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/paper/filelog.tmpl new file mode 100644 index 0000000..a5fb5cd --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/paper/filelog.tmpl @@ -0,0 +1,72 @@ +{header} +<title>{repo|escape}: {file|escape} history</title> +<link rel="alternate" type="application/atom+xml" + href="{url}atom-log/tip/{file|urlescape}" title="Atom feed for {repo|escape}:{file}" /> +<link rel="alternate" type="application/rss+xml" + href="{url}rss-log/tip/{file|urlescape}" title="RSS feed for {repo|escape}:{file}" /> +</head> +<body> + +<div class="container"> +<div class="menu"> +<div class="logo"> +<a href="http://mercurial.selenic.com/"> +<img src="{staticurl}hglogo.png" alt="mercurial" /></a> +</div> +<ul> +<li><a href="{url}shortlog/{node|short}{sessionvars%urlparameter}">log</a></li> +<li><a href="{url}graph/{node|short}{sessionvars%urlparameter}">graph</a></li> +<li><a href="{url}tags{sessionvars%urlparameter}">tags</a></li> +<li><a href="{url}branches{sessionvars%urlparameter}">branches</a></li> +</ul> +<ul> +<li><a href="{url}rev/{node|short}{sessionvars%urlparameter}">changeset</a></li> +<li><a href="{url}file/{node|short}{path|urlescape}{sessionvars%urlparameter}">browse</a></li> +</ul> +<ul> +<li><a href="{url}file/{node|short}/{file|urlescape}{sessionvars%urlparameter}">file</a></li> +<li><a href="{url}diff/{node|short}/{file|urlescape}{sessionvars%urlparameter}">diff</a></li> +<li><a href="{url}annotate/{node|short}/{file|urlescape}{sessionvars%urlparameter}">annotate</a></li> +<li class="active">file log</li> +<li><a href="{url}raw-file/{node|short}/{file|urlescape}">raw</a></li> +</ul> +<ul> +<li><a href="{url}help{sessionvars%urlparameter}">help</a></li> +</ul> +</div> + +<div class="main"> +<h2><a href="{url}{sessionvars%urlparameter}">{repo|escape}</a></h2> +<h3>log {file|escape}</h3> + +<form class="search" action="{url}log"> +{sessionvars%hiddenformentry} +<p><input name="rev" id="search1" type="text" size="30" /></p> +<div id="hint">find changesets by author, revision, +files, or words in the commit message</div> +</form> + +<div class="navigate"> +<a href="{url}log/{node|short}/{file|urlescape}{lessvars%urlparameter}">less</a> +<a href="{url}log/{node|short}/{file|urlescape}{morevars%urlparameter}">more</a> +| {nav%filenav}</div> + +<table class="bigtable"> + <tr> + <th class="age">age</th> + <th class="author">author</th> + <th class="description">description</th> + </tr> +{entries%filelogentry} +</table> + +<div class="navigate"> +<a href="{url}log/{node|short}/{file|urlescape}{lessvars%urlparameter}">less</a> +<a href="{url}log/{node|short}/{file|urlescape}{morevars%urlparameter}">more</a> +| {nav%filenav} +</div> + +</div> +</div> + +{footer} diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/paper/filelogentry.tmpl b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/paper/filelogentry.tmpl new file mode 100644 index 0000000..43e068c --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/paper/filelogentry.tmpl @@ -0,0 +1,5 @@ + <tr class="parity{parity}"> + <td class="age">{date|age}</td> + <td class="author">{author|person}</td> + <td class="description"><a href="{url}rev/{node|short}{sessionvars%urlparameter}">{desc|strip|firstline|escape|nonempty}</a>{inbranch%changelogbranchname}{branches%changelogbranchhead}{tags%changelogtag}</td> + </tr> diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/paper/filerevision.tmpl b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/paper/filerevision.tmpl new file mode 100644 index 0000000..2a23461 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/paper/filerevision.tmpl @@ -0,0 +1,76 @@ +{header} +<title>{repo|escape}: {node|short} {file|escape}</title> +</head> +<body> + +<div class="container"> +<div class="menu"> +<div class="logo"> +<a href="http://mercurial.selenic.com/"> +<img src="{staticurl}hglogo.png" alt="mercurial" /></a> +</div> +<ul> +<li><a href="{url}shortlog/{node|short}{sessionvars%urlparameter}">log</a></li> +<li><a href="{url}graph/{node|short}{sessionvars%urlparameter}">graph</a></li> +<li><a href="{url}tags{sessionvars%urlparameter}">tags</a></li> +<li><a href="{url}branches{sessionvars%urlparameter}">branches</a></li> +</ul> +<ul> +<li><a href="{url}rev/{node|short}{sessionvars%urlparameter}">changeset</a></li> +<li><a href="{url}file/{node|short}{path|urlescape}{sessionvars%urlparameter}">browse</a></li> +</ul> +<ul> +<li class="active">file</li> +<li><a href="{url}file/tip/{file|urlescape}{sessionvars%urlparameter}">latest</a></li> +<li><a href="{url}diff/{node|short}/{file|urlescape}{sessionvars%urlparameter}">diff</a></li> +<li><a href="{url}annotate/{node|short}/{file|urlescape}{sessionvars%urlparameter}">annotate</a></li> +<li><a href="{url}log/{node|short}/{file|urlescape}{sessionvars%urlparameter}">file log</a></li> +<li><a href="{url}raw-file/{node|short}/{file|urlescape}">raw</a></li> +</ul> +<ul> +<li><a href="{url}help{sessionvars%urlparameter}">help</a></li> +</ul> +</div> + +<div class="main"> +<h2><a href="{url}{sessionvars%urlparameter}">{repo|escape}</a></h2> +<h3>view {file|escape} @ {rev}:{node|short}</h3> + +<form class="search" action="{url}log"> +{sessionvars%hiddenformentry} +<p><input name="rev" id="search1" type="text" size="30" /></p> +<div id="hint">find changesets by author, revision, +files, or words in the commit message</div> +</form> + +<div class="description">{desc|strip|escape|addbreaks|nonempty}</div> + +<table id="changesetEntry"> +<tr> + <th class="author">author</th> + <td class="author">{author|obfuscate}</td> +</tr> +<tr> + <th class="date">date</th> + <td class="date">{date|date} ({date|age})</td> +</tr> +<tr> + <th class="author">parents</th> + <td class="author">{parent%filerevparent}</td> +</tr> +<tr> + <th class="author">children</th> + <td class="author">{child%filerevchild}</td> +</tr> +{changesettag} +</table> + +<div class="overflow"> +<div class="sourcefirst"> line source</div> +{text%fileline} +<div class="sourcelast"></div> +</div> +</div> +</div> + +{footer} diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/paper/footer.tmpl b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/paper/footer.tmpl new file mode 100644 index 0000000..6231a3c --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/paper/footer.tmpl @@ -0,0 +1,4 @@ +{motd} + +</body> +</html> diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/paper/graph.tmpl b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/paper/graph.tmpl new file mode 100644 index 0000000..597614a --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/paper/graph.tmpl @@ -0,0 +1,135 @@ +{header} +<title>{repo|escape}: revision graph</title> +<link rel="alternate" type="application/atom+xml" + href="{url}atom-log" title="Atom feed for {repo|escape}: log" /> +<link rel="alternate" type="application/rss+xml" + href="{url}rss-log" title="RSS feed for {repo|escape}: log" /> +<!--[if IE]><script type="text/javascript" src="{staticurl}excanvas.js"></script><![endif]--> +</head> +<body> + +<div class="container"> +<div class="menu"> +<div class="logo"> +<a href="http://mercurial.selenic.com/"> +<img src="{staticurl}hglogo.png" alt="mercurial" /></a> +</div> +<ul> +<li><a href="{url}shortlog/{node|short}{sessionvars%urlparameter}">log</a></li> +<li class="active">graph</li> +<li><a href="{url}tags{sessionvars%urlparameter}">tags</a></li> +<li><a href="{url}branches{sessionvars%urlparameter}">branches</a></li> +</ul> +<ul> +<li><a href="{url}rev/{node|short}{sessionvars%urlparameter}">changeset</a></li> +<li><a href="{url}file/{node|short}{path|urlescape}{sessionvars%urlparameter}">browse</a></li> +</ul> +<ul> + <li><a href="{url}help{sessionvars%urlparameter}">help</a></li> +</ul> +</div> + +<div class="main"> +<h2><a href="{url}{sessionvars%urlparameter}">{repo|escape}</a></h2> +<h3>graph</h3> + +<form class="search" action="{url}log"> +{sessionvars%hiddenformentry} +<p><input name="rev" id="search1" type="text" size="30" /></p> +<div id="hint">find changesets by author, revision, +files, or words in the commit message</div> +</form> + +<div class="navigate"> +<a href="{url}graph/{rev}{lessvars%urlparameter}">less</a> +<a href="{url}graph/{rev}{morevars%urlparameter}">more</a> +| rev {rev}: {changenav%navgraph} +</div> + +<noscript><p>The revision graph only works with JavaScript-enabled browsers.</p></noscript> + +<div id="wrapper"> +<ul id="nodebgs"></ul> +<canvas id="graph" width="224" height="{canvasheight}"></canvas> +<ul id="graphnodes"></ul> +</div> + +<script type="text/javascript" src="{staticurl}graph.js"></script> +<script type="text/javascript"> +<!-- hide script content + +var data = {jsdata|json}; +var graph = new Graph(); +graph.scale({bg_height}); + +graph.edge = function(x0, y0, x1, y1, color) \{ + + this.setColor(color, 0.0, 0.65); + this.ctx.beginPath(); + this.ctx.moveTo(x0, y0); + this.ctx.lineTo(x1, y1); + this.ctx.stroke(); + +} + +var revlink = '<li style="_STYLE"><span class="desc">'; +revlink += '<a href="{url}rev/_NODEID{sessionvars%urlparameter}" title="_NODEID">_DESC</a>'; +revlink += '</span>_TAGS<span class="info">_DATE, by _USER</span></li>'; + +graph.vertex = function(x, y, color, parity, cur) \{ + + this.ctx.beginPath(); + color = this.setColor(color, 0.25, 0.75); + this.ctx.arc(x, y, radius, 0, Math.PI * 2, true); + this.ctx.fill(); + + var bg = '<li class="bg parity' + parity + '"></li>'; + var left = (this.columns + 1) * this.bg_height; + var nstyle = 'padding-left: ' + left + 'px;'; + var item = revlink.replace(/_STYLE/, nstyle); + item = item.replace(/_PARITY/, 'parity' + parity); + item = item.replace(/_NODEID/, cur[0]); + item = item.replace(/_NODEID/, cur[0]); + item = item.replace(/_DESC/, cur[3]); + item = item.replace(/_USER/, cur[4]); + item = item.replace(/_DATE/, cur[5]); + + var tagspan = ''; + if (cur[7].length || (cur[6][0] != 'default' || cur[6][1])) \{ + tagspan = '<span class="logtags">'; + if (cur[6][1]) \{ + tagspan += '<span class="branchhead" title="' + cur[6][0] + '">'; + tagspan += cur[6][0] + '</span> '; + } else if (!cur[6][1] && cur[6][0] != 'default') \{ + tagspan += '<span class="branchname" title="' + cur[6][0] + '">'; + tagspan += cur[6][0] + '</span> '; + } + if (cur[7].length) \{ + for (var t in cur[7]) \{ + var tag = cur[7][t]; + tagspan += '<span class="tag">' + tag + '</span> '; + } + } + tagspan += '</span>'; + } + + item = item.replace(/_TAGS/, tagspan); + return [bg, item]; + +} + +graph.render(data); + +// stop hiding script --> +</script> + +<div class="navigate"> +<a href="{url}graph/{rev}{lessvars%urlparameter}">less</a> +<a href="{url}graph/{rev}{morevars%urlparameter}">more</a> +| rev {rev}: {changenav%navgraph} +</div> + +</div> +</div> + +{footer} diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/paper/header.tmpl b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/paper/header.tmpl new file mode 100644 index 0000000..305bc2f --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/paper/header.tmpl @@ -0,0 +1,6 @@ +<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.1//EN" "http://www.w3.org/TR/xhtml11/DTD/xhtml11.dtd"> +<html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en-US"> +<head> +<link rel="icon" href="{staticurl}hgicon.png" type="image/png" /> +<meta name="robots" content="index, nofollow" /> +<link rel="stylesheet" href="{staticurl}style-paper.css" type="text/css" /> diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/paper/help.tmpl b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/paper/help.tmpl new file mode 100644 index 0000000..22b16bd --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/paper/help.tmpl @@ -0,0 +1,43 @@ +{header} +<title>Help: {topic}</title> +<link rel="alternate" type="application/atom+xml" + href="{url}atom-tags" title="Atom feed for {repo|escape}" /> +<link rel="alternate" type="application/rss+xml" + href="{url}rss-tags" title="RSS feed for {repo|escape}" /> +</head> +<body> + +<div class="container"> +<div class="menu"> +<div class="logo"> +<a href="http://mercurial.selenic.com/"> +<img src="{staticurl}hglogo.png" alt="mercurial" /></a> +</div> +<ul> +<li><a href="{url}shortlog{sessionvars%urlparameter}">log</a></li> +<li><a href="{url}graph{sessionvars%urlparameter}">graph</a></li> +<li><a href="{url}tags{sessionvars%urlparameter}">tags</a></li> +<li><a href="{url}branches{sessionvars%urlparameter}">branches</a></li> +</ul> +<ul> + <li class="active">help</li> +</ul> +</div> + +<div class="main"> +<h2><a href="{url}{sessionvars%urlparameter}">{repo|escape}</a></h2> +<h3>Help: {topic}</h3> + +<form class="search" action="{url}log"> +{sessionvars%hiddenformentry} +<p><input name="rev" id="search1" type="text" size="30" /></p> +<div id="hint">find changesets by author, revision, +files, or words in the commit message</div> +</form> +<pre> +{doc|escape} +</pre> +</div> +</div> + +{footer} diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/paper/helptopics.tmpl b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/paper/helptopics.tmpl new file mode 100644 index 0000000..8711160 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/paper/helptopics.tmpl @@ -0,0 +1,48 @@ +{header} +<title>Help: {title}</title> +<link rel="alternate" type="application/atom+xml" + href="{url}atom-tags" title="Atom feed for {repo|escape}" /> +<link rel="alternate" type="application/rss+xml" + href="{url}rss-tags" title="RSS feed for {repo|escape}" /> +</head> +<body> + +<div class="container"> +<div class="menu"> +<div class="logo"> +<a href="http://mercurial.selenic.com/"> +<img src="{staticurl}hglogo.png" alt="mercurial" /></a> +</div> +<ul> +<li><a href="{url}shortlog{sessionvars%urlparameter}">log</a></li> +<li><a href="{url}graph{sessionvars%urlparameter}">graph</a></li> +<li><a href="{url}tags{sessionvars%urlparameter}">tags</a></li> +<li><a href="{url}branches{sessionvars%urlparameter}">branches</a></li> +</ul> +<ul> +<li class="active">help</li> +</ul> +</div> + +<div class="main"> +<h2><a href="{url}{sessionvars%urlparameter}">{repo|escape}</a></h2> +<form class="search" action="{url}log"> +{sessionvars%hiddenformentry} +<p><input name="rev" id="search1" type="text" size="30" /></p> +<div id="hint">find changesets by author, revision, +files, or words in the commit message</div> +</form> +<table class="bigtable"> +<tr><td colspan="2"><h2><a name="main" href="#topics">Topics</a></h2></td></tr> +{topics % helpentry} + +<tr><td colspan="2"><h2><a name="main" href="#main">Main Commands</a></h2></td></tr> +{earlycommands % helpentry} + +<tr><td colspan="2"><h2><a name="other" href="#other">Other Commands</a></h2></td></tr> +{othercommands % helpentry} +</table> +</div> +</div> + +{footer} diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/paper/index.tmpl b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/paper/index.tmpl new file mode 100644 index 0000000..b8adfc9 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/paper/index.tmpl @@ -0,0 +1,26 @@ +{header} +<title>Mercurial repositories index</title> +</head> +<body> + +<div class="container"> +<div class="menu"> +<a href="http://mercurial.selenic.com/"> +<img src="{staticurl}hglogo.png" width=75 height=90 border=0 alt="mercurial" /></a> +</div> +<div class="main"> +<h2>Mercurial Repositories</h2> + +<table class="bigtable"> + <tr> + <th><a href="?sort={sort_name}">Name</a></th> + <th><a href="?sort={sort_description}">Description</a></th> + <th><a href="?sort={sort_contact}">Contact</a></th> + <th><a href="?sort={sort_lastchange}">Last modified</a></th> + <th> </th> + </tr> + {entries%indexentry} +</table> +</div> +</div> +{footer} diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/paper/manifest.tmpl b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/paper/manifest.tmpl new file mode 100644 index 0000000..ff537bd --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/paper/manifest.tmpl @@ -0,0 +1,57 @@ +{header} +<title>{repo|escape}: {node|short} {path|escape}</title> +</head> +<body> + +<div class="container"> +<div class="menu"> +<div class="logo"> +<a href="http://mercurial.selenic.com/"> +<img src="{staticurl}hglogo.png" alt="mercurial" /></a> +</div> +<ul> +<li><a href="{url}shortlog/{node|short}{sessionvars%urlparameter}">log</a></li> +<li><a href="{url}graph/{node|short}{sessionvars%urlparameter}">graph</a></li> +<li><a href="{url}tags{sessionvars%urlparameter}">tags</a></li> +<li><a href="{url}branches{sessionvars%urlparameter}">branches</a></li> +</ul> +<ul> +<li><a href="{url}rev/{node|short}{sessionvars%urlparameter}">changeset</a></li> +<li class="active">browse</li> +</ul> +<ul> +{archives%archiveentry} +</ul> +<ul> + <li><a href="{url}help{sessionvars%urlparameter}">help</a></li> +</ul> +</div> + +<div class="main"> +<h2><a href="{url}{sessionvars%urlparameter}">{repo|escape}</a></h2> +<h3>directory {path|escape} @ {rev}:{node|short} {tags%changelogtag}</h3> + +<form class="search" action="{url}log"> +{sessionvars%hiddenformentry} +<p><input name="rev" id="search1" type="text" size="30" /></p> +<div id="hint">find changesets by author, revision, +files, or words in the commit message</div> +</form> + +<table class="bigtable"> +<tr> + <th class="name">name</th> + <th class="size">size</th> + <th class="permissions">permissions</th> +</tr> +<tr class="fileline parity{upparity}"> + <td class="name"><a href="{url}file/{node|short}{up|urlescape}{sessionvars%urlparameter}">[up]</a></td> + <td class="size"></td> + <td class="permissions">drwxr-xr-x</td> +</tr> +{dentries%direntry} +{fentries%fileentry} +</table> +</div> +</div> +{footer} diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/paper/map b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/paper/map new file mode 100644 index 0000000..caac1c7 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/paper/map @@ -0,0 +1,200 @@ +default = 'shortlog' + +mimetype = 'text/html; charset={encoding}' +header = header.tmpl +footer = footer.tmpl +search = search.tmpl + +changelog = shortlog.tmpl +shortlog = shortlog.tmpl +shortlogentry = shortlogentry.tmpl +graph = graph.tmpl +help = help.tmpl +helptopics = helptopics.tmpl + +helpentry = '<tr><td><a href="{url}help/{topic|escape}{sessionvars%urlparameter}">{topic|escape}</a></td><td>{summary|escape}</td></tr>' + +naventry = '<a href="{url}log/{node|short}{sessionvars%urlparameter}">{label|escape}</a> ' +navshortentry = '<a href="{url}shortlog/{node|short}{sessionvars%urlparameter}">{label|escape}</a> ' +navgraphentry = '<a href="{url}graph/{node|short}{sessionvars%urlparameter}">{label|escape}</a> ' +filenaventry = '<a href="{url}log/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{label|escape}</a> ' +filedifflink = '<a href="{url}diff/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{file|escape}</a> ' +filenodelink = '<a href="{url}file/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{file|escape}</a> ' +filenolink = '{file|escape} ' +fileellipses = '...' +changelogentry = shortlogentry.tmpl +searchentry = shortlogentry.tmpl +changeset = changeset.tmpl +manifest = manifest.tmpl + +nav = '{before%naventry} {after%naventry}' +navshort = '{before%navshortentry}{after%navshortentry}' +navgraph = '{before%navgraphentry}{after%navgraphentry}' +filenav = '{before%filenaventry}{after%filenaventry}' + +direntry = ' + <tr class="fileline parity{parity}"> + <td class="name"> + <a href="{url}file/{node|short}{path|urlescape}{sessionvars%urlparameter}"> + <img src="{staticurl}coal-folder.png" alt="dir."/> {basename|escape}/ + </a> + <a href="{url}file/{node|short}{path|urlescape}/{emptydirs|urlescape}{sessionvars%urlparameter}"> + {emptydirs|escape} + </a> + </td> + <td class="size"></td> + <td class="permissions">drwxr-xr-x</td> + </tr>' + +fileentry = ' + <tr class="fileline parity{parity}"> + <td class="filename"> + <a href="{url}file/{node|short}/{file|urlescape}{sessionvars%urlparameter}"> + <img src="{staticurl}coal-file.png" alt="file"/> {basename|escape} + </a> + </td> + <td class="size">{size}</td> + <td class="permissions">{permissions|permissions}</td> + </tr>' + +filerevision = filerevision.tmpl +fileannotate = fileannotate.tmpl +filediff = filediff.tmpl +filelog = filelog.tmpl +fileline = ' + <div class="parity{parity} source"><a href="#{lineid}" id="{lineid}">{linenumber}</a> {line|escape}</div>' +filelogentry = filelogentry.tmpl + +annotateline = ' + <tr class="parity{parity}"> + <td class="annotate"> + <a href="{url}annotate/{node|short}/{file|urlescape}{sessionvars%urlparameter}#{targetline}" + title="{node|short}: {desc|escape|firstline}">{author|user}@{rev}</a> + </td> + <td class="source"><a href="#{lineid}" id="{lineid}">{linenumber}</a> {line|escape}</td> + </tr>' + +diffblock = '<div class="source bottomline parity{parity}"><pre>{lines}</pre></div>' +difflineplus = '<a href="#{lineid}" id="{lineid}">{linenumber}</a> <span class="plusline">{line|escape}</span>' +difflineminus = '<a href="#{lineid}" id="{lineid}">{linenumber}</a> <span class="minusline">{line|escape}</span>' +difflineat = '<a href="#{lineid}" id="{lineid}">{linenumber}</a> <span class="atline">{line|escape}</span>' +diffline = '<a href="#{lineid}" id="{lineid}">{linenumber}</a> {line|escape}' + +changelogparent = ' + <tr> + <th class="parent">parent {rev}:</th> + <td class="parent"><a href="{url}rev/{node|short}{sessionvars%urlparameter}">{node|short}</a></td> + </tr>' + +changesetparent = '<a href="{url}rev/{node|short}{sessionvars%urlparameter}">{node|short}</a> ' + +filerevparent = '<a href="{url}file/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{rename%filerename}{node|short}</a> ' +filerevchild = '<a href="{url}file/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{node|short}</a> ' + +filerename = '{file|escape}@' +filelogrename = ' + <tr> + <th>base:</th> + <td> + <a href="{url}file/{node|short}/{file|urlescape}{sessionvars%urlparameter}"> + {file|escape}@{node|short} + </a> + </td> + </tr>' +fileannotateparent = ' + <tr> + <td class="metatag">parent:</td> + <td> + <a href="{url}annotate/{node|short}/{file|urlescape}{sessionvars%urlparameter}"> + {rename%filerename}{node|short} + </a> + </td> + </tr>' +changesetchild = ' <a href="{url}rev/{node|short}{sessionvars%urlparameter}">{node|short}</a>' +changelogchild = ' + <tr> + <th class="child">child</th> + <td class="child"> + <a href="{url}rev/{node|short}{sessionvars%urlparameter}"> + {node|short} + </a> + </td> + </tr>' +fileannotatechild = ' + <tr> + <td class="metatag">child:</td> + <td> + <a href="{url}annotate/{node|short}/{file|urlescape}{sessionvars%urlparameter}"> + {node|short} + </a> + </td> + </tr>' +tags = tags.tmpl +tagentry = ' + <tr class="tagEntry parity{parity}"> + <td> + <a href="{url}rev/{node|short}{sessionvars%urlparameter}"> + {tag|escape} + </a> + </td> + <td class="node"> + {node|short} + </td> + </tr>' +branches = branches.tmpl +branchentry = ' + <tr class="tagEntry parity{parity}"> + <td> + <a href="{url}shortlog/{node|short}{sessionvars%urlparameter}" class="{status}"> + {branch|escape} + </a> + </td> + <td class="node"> + {node|short} + </td> + </tr>' +changelogtag = '<span class="tag">{name|escape}</span> ' +changesettag = '<span class="tag">{tag|escape}</span> ' +changelogbranchhead = '<span class="branchhead">{name|escape}</span> ' +changelogbranchname = '<span class="branchname">{name|escape}</span> ' + +filediffparent = ' + <tr> + <th class="parent">parent {rev}:</th> + <td class="parent"><a href="{url}rev/{node|short}{sessionvars%urlparameter}">{node|short}</a></td> + </tr>' +filelogparent = ' + <tr> + <th>parent {rev}:</th> + <td><a href="{url}file/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{node|short}</a></td> + </tr>' +filediffchild = ' + <tr> + <th class="child">child {rev}:</th> + <td class="child"><a href="{url}rev/{node|short}{sessionvars%urlparameter}">{node|short}</a> + </td> + </tr>' +filelogchild = ' + <tr> + <th>child {rev}:</th> + <td><a href="{url}file/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{node|short}</a></td> + </tr>' + +indexentry = ' + <tr class="parity{parity}"> + <td><a href="{url}{sessionvars%urlparameter}">{name|escape}</a></td> + <td>{description}</td> + <td>{contact|obfuscate}</td> + <td class="age">{lastchange|age}</td> + <td class="indexlinks">{archives%indexarchiveentry}</td> + </tr>\n' +indexarchiveentry = '<a href="{url}archive/{node|short}{extension|urlescape}"> ↓{type|escape}</a>' +index = index.tmpl +archiveentry = ' + <li> + <a href="{url}archive/{node|short}{extension|urlescape}">{type|escape}</a> + </li>' +notfound = notfound.tmpl +error = error.tmpl +urlparameter = '{separator}{name}={value|urlescape}' +hiddenformentry = '<input type="hidden" name="{name}" value="{value|escape}" />' diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/paper/notfound.tmpl b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/paper/notfound.tmpl new file mode 100644 index 0000000..e9e6ba4 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/paper/notfound.tmpl @@ -0,0 +1,12 @@ +{header} +<title>Mercurial repository not found</title> +</head> +<body> + +<h2>Mercurial repository not found</h2> + +The specified repository "{repo|escape}" is unknown, sorry. + +Please go back to the <a href="{url}">main repository list page</a>. + +{footer} diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/paper/search.tmpl b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/paper/search.tmpl new file mode 100644 index 0000000..ef5c6b0 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/paper/search.tmpl @@ -0,0 +1,54 @@ +{header} +<title>{repo|escape}: searching for {query|escape}</title> +</head> +<body> + +<div class="container"> +<div class="menu"> +<div class="logo"> +<a href="http://mercurial.selenic.com/"> +<img src="{staticurl}hglogo.png" width=75 height=90 border=0 alt="mercurial"></a> +</div> +<ul> +<li><a href="{url}shortlog{sessionvars%urlparameter}">log</a></li> +<li><a href="{url}graph{sessionvars%urlparameter}">graph</a></li> +<li><a href="{url}tags{sessionvars%urlparameter}">tags</a></li> +<li><a href="{url}branches{sessionvars%urlparameter}">branches</a></li> +<li><a href="{url}help{sessionvars%urlparameter}">help</a></li> +</ul> +</div> + +<div class="main"> +<h2><a href="{url}{sessionvars%urlparameter}">{repo|escape}</a></h2> +<h3>searching for '{query|escape}'</h3> + +<form class="search" action="{url}log"> +{sessionvars%hiddenformentry} +<p><input name="rev" id="search1" type="text" size="30"></p> +<div id="hint">find changesets by author, revision, +files, or words in the commit message</div> +</form> + +<div class="navigate"> +<a href="{url}search/{lessvars%urlparameter}">less</a> +<a href="{url}search/{morevars%urlparameter}">more</a> +</div> + +<table class="bigtable"> + <tr> + <th class="age">age</th> + <th class="author">author</th> + <th class="description">description</th> + </tr> +{entries} +</table> + +<div class="navigate"> +<a href="{url}search/{lessvars%urlparameter}">less</a> +<a href="{url}search/{morevars%urlparameter}">more</a> +</div> + +</div> +</div> + +{footer} diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/paper/shortlog.tmpl b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/paper/shortlog.tmpl new file mode 100644 index 0000000..8fed55b --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/paper/shortlog.tmpl @@ -0,0 +1,69 @@ +{header} +<title>{repo|escape}: log</title> +<link rel="alternate" type="application/atom+xml" + href="{url}atom-log" title="Atom feed for {repo|escape}" /> +<link rel="alternate" type="application/rss+xml" + href="{url}rss-log" title="RSS feed for {repo|escape}" /> +</head> +<body> + +<div class="container"> +<div class="menu"> +<div class="logo"> +<a href="http://mercurial.selenic.com/"> +<img src="{staticurl}hglogo.png" alt="mercurial" /></a> +</div> +<ul> +<li class="active">log</li> +<li><a href="{url}graph/{node|short}{sessionvars%urlparameter}">graph</a></li> +<li><a href="{url}tags{sessionvars%urlparameter}">tags</a></li> +<li><a href="{url}branches{sessionvars%urlparameter}">branches</a></li> +</ul> +<ul> +<li><a href="{url}rev/{node|short}{sessionvars%urlparameter}">changeset</a></li> +<li><a href="{url}file/{node|short}{path|urlescape}{sessionvars%urlparameter}">browse</a></li> +</ul> +<ul> +{archives%archiveentry} +</ul> +<ul> + <li><a href="{url}help{sessionvars%urlparameter}">help</a></li> +</ul> +</div> + +<div class="main"> +<h2><a href="{url}{sessionvars%urlparameter}">{repo|escape}</a></h2> +<h3>log</h3> + +<form class="search" action="{url}log"> +{sessionvars%hiddenformentry} +<p><input name="rev" id="search1" type="text" size="30" /></p> +<div id="hint">find changesets by author, revision, +files, or words in the commit message</div> +</form> + +<div class="navigate"> +<a href="{url}shortlog/{rev}{lessvars%urlparameter}">less</a> +<a href="{url}shortlog/{rev}{morevars%urlparameter}">more</a> +| rev {rev}: {changenav%navshort} +</div> + +<table class="bigtable"> + <tr> + <th class="age">age</th> + <th class="author">author</th> + <th class="description">description</th> + </tr> +{entries%shortlogentry} +</table> + +<div class="navigate"> +<a href="{url}shortlog/{rev}{lessvars%urlparameter}">less</a> +<a href="{url}shortlog/{rev}{morevars%urlparameter}">more</a> +| rev {rev}: {changenav%navshort} +</div> + +</div> +</div> + +{footer} diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/paper/shortlogentry.tmpl b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/paper/shortlogentry.tmpl new file mode 100644 index 0000000..43e068c --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/paper/shortlogentry.tmpl @@ -0,0 +1,5 @@ + <tr class="parity{parity}"> + <td class="age">{date|age}</td> + <td class="author">{author|person}</td> + <td class="description"><a href="{url}rev/{node|short}{sessionvars%urlparameter}">{desc|strip|firstline|escape|nonempty}</a>{inbranch%changelogbranchname}{branches%changelogbranchhead}{tags%changelogtag}</td> + </tr> diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/paper/tags.tmpl b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/paper/tags.tmpl new file mode 100644 index 0000000..3c2461f --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/paper/tags.tmpl @@ -0,0 +1,48 @@ +{header} +<title>{repo|escape}: tags</title> +<link rel="alternate" type="application/atom+xml" + href="{url}atom-tags" title="Atom feed for {repo|escape}: tags" /> +<link rel="alternate" type="application/rss+xml" + href="{url}rss-tags" title="RSS feed for {repo|escape}: tags" /> +</head> +<body> + +<div class="container"> +<div class="menu"> +<div class="logo"> +<a href="http://mercurial.selenic.com/"> +<img src="{staticurl}hglogo.png" alt="mercurial" /></a> +</div> +<ul> +<li><a href="{url}shortlog{sessionvars%urlparameter}">log</a></li> +<li><a href="{url}graph{sessionvars%urlparameter}">graph</a></li> +<li class="active">tags</li> +<li><a href="{url}branches{sessionvars%urlparameter}">branches</a></li> +</ul> +<ul> +<li><a href="{url}help{sessionvars%urlparameter}">help</a></li> +</ul> +</div> + +<div class="main"> +<h2><a href="{url}{sessionvars%urlparameter}">{repo|escape}</a></h2> +<h3>tags</h3> + +<form class="search" action="{url}log"> +{sessionvars%hiddenformentry} +<p><input name="rev" id="search1" type="text" size="30" /></p> +<div id="hint">find changesets by author, revision, +files, or words in the commit message</div> +</form> + +<table class="bigtable"> +<tr> + <th>tag</th> + <th>node</th> +</tr> +{entries%tagentry} +</table> +</div> +</div> + +{footer} diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/raw/changeset.tmpl b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/raw/changeset.tmpl new file mode 100644 index 0000000..b59d99b --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/raw/changeset.tmpl @@ -0,0 +1,9 @@ +{header} +# HG changeset patch +# User {author} +# Date {date|hgdate} +# Node ID {node} +{parent%changesetparent} +{desc} + +{diff} diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/raw/error.tmpl b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/raw/error.tmpl new file mode 100644 index 0000000..9407c13 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/raw/error.tmpl @@ -0,0 +1,2 @@ +{header} +error: {error} diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/raw/fileannotate.tmpl b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/raw/fileannotate.tmpl new file mode 100644 index 0000000..ad1bed6 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/raw/fileannotate.tmpl @@ -0,0 +1,5 @@ +{header} +{annotate%annotateline} +{footer} + + diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/raw/filediff.tmpl b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/raw/filediff.tmpl new file mode 100644 index 0000000..c4014bc --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/raw/filediff.tmpl @@ -0,0 +1,5 @@ +{header} +{diff} +{footer} + + diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/raw/index.tmpl b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/raw/index.tmpl new file mode 100644 index 0000000..29d7c9e --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/raw/index.tmpl @@ -0,0 +1,2 @@ +{header} +{entries%indexentry} diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/raw/manifest.tmpl b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/raw/manifest.tmpl new file mode 100644 index 0000000..8d4a934 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/raw/manifest.tmpl @@ -0,0 +1,3 @@ +{header} +{dentries%direntry}{fentries%fileentry} +{footer} diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/raw/map b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/raw/map new file mode 100644 index 0000000..980e91d --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/raw/map @@ -0,0 +1,28 @@ +mimetype = 'text/plain; charset={encoding}' +header = '' +footer = '' +changeset = changeset.tmpl +difflineplus = '{line}' +difflineminus = '{line}' +difflineat = '{line}' +diffline = '{line}' +changesetparent = '# Parent {node}' +changesetchild = '# Child {node}' +filenodelink = '' +filenolink = '' +fileline = '{line}' +diffblock = '{lines}' +filediff = filediff.tmpl +fileannotate = fileannotate.tmpl +annotateline = '{author|user}@{rev}: {line}' +manifest = manifest.tmpl +direntry = 'drwxr-xr-x {basename}\n' +fileentry = '{permissions|permissions} {size} {basename}\n' +index = index.tmpl +notfound = notfound.tmpl +error = error.tmpl +indexentry = '{url}\n' +tags = '{entries%tagentry}' +tagentry = '{tag} {node}\n' +branches = '{entries%branchentry}' +branchentry = '{branch} {node} {status}\n' diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/raw/notfound.tmpl b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/raw/notfound.tmpl new file mode 100644 index 0000000..a7b3251 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/raw/notfound.tmpl @@ -0,0 +1,2 @@ +{header} +error: repository {repo} not found diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/rss/changelog.tmpl b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/rss/changelog.tmpl new file mode 100644 index 0000000..65b96ad --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/rss/changelog.tmpl @@ -0,0 +1,6 @@ +{header} + <title>{repo|escape} Changelog</title> + <description>{repo|escape} Changelog</description> + {entries%changelogentry} + </channel> +</rss>
\ No newline at end of file diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/rss/changelogentry.tmpl b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/rss/changelogentry.tmpl new file mode 100644 index 0000000..12fe8e0 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/rss/changelogentry.tmpl @@ -0,0 +1,7 @@ +<item> + <title>{desc|strip|firstline|strip|escape}</title> + <guid isPermaLink="true">{urlbase}{url}rev/{node|short}</guid> + <description><![CDATA[{desc|strip|escape|addbreaks|nonempty}]]></description> + <author>{author|obfuscate}</author> + <pubDate>{date|rfc822date}</pubDate> +</item> diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/rss/error.tmpl b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/rss/error.tmpl new file mode 100644 index 0000000..87e6009 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/rss/error.tmpl @@ -0,0 +1,10 @@ +{header} + <title>Error</title> + <description>Error</description> + <item> + <title>Error</title> + <description>{error|escape}</description> + <guid>http://mercurial.selenic.com/#error</guid> + </item> + </channel> +</rss> diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/rss/filelog.tmpl b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/rss/filelog.tmpl new file mode 100644 index 0000000..31f4dc7 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/rss/filelog.tmpl @@ -0,0 +1,6 @@ +{header} + <title>{repo|escape}: {file|escape} history</title> + <description>{file|escape} revision history</description> + {entries%filelogentry} + </channel> +</rss> diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/rss/filelogentry.tmpl b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/rss/filelogentry.tmpl new file mode 100644 index 0000000..220dc4a --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/rss/filelogentry.tmpl @@ -0,0 +1,7 @@ +<item> + <title>{desc|strip|firstline|strip|escape}</title> + <link>{urlbase}{url}log{{node|short}}/{file|urlescape}</link> + <description><![CDATA[{desc|strip|escape|addbreaks|nonempty}]]></description> + <author>{author|obfuscate}</author> + <pubDate>{date|rfc822date}</pubDate> +</item> diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/rss/header.tmpl b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/rss/header.tmpl new file mode 100644 index 0000000..ed29196 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/rss/header.tmpl @@ -0,0 +1,5 @@ +<?xml version="1.0" encoding="{encoding}"?> +<rss version="2.0"> + <channel> + <link>{urlbase}{url}</link> + <language>en-us</language> diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/rss/map b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/rss/map new file mode 100644 index 0000000..2f777b7 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/rss/map @@ -0,0 +1,10 @@ +default = 'changelog' +mimetype = 'text/xml; charset={encoding}' +header = header.tmpl +changelog = changelog.tmpl +changelogentry = changelogentry.tmpl +filelog = filelog.tmpl +filelogentry = filelogentry.tmpl +tags = tags.tmpl +tagentry = tagentry.tmpl +error = error.tmpl diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/rss/tagentry.tmpl b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/rss/tagentry.tmpl new file mode 100644 index 0000000..42fa038 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/rss/tagentry.tmpl @@ -0,0 +1,6 @@ +<item> + <title>{tag|escape}</title> + <link>{urlbase}{url}rev/{node|short}</link> + <description><![CDATA[{tag|strip|escape|addbreaks}]]></description> + <pubDate>{date|rfc822date}</pubDate> +</item> diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/rss/tags.tmpl b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/rss/tags.tmpl new file mode 100644 index 0000000..93f1e96 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/rss/tags.tmpl @@ -0,0 +1,6 @@ +{header} + <title>{repo|escape}: tags </title> + <description>{repo|escape} tag history</description> + {entriesnotip%tagentry} + </channel> +</rss> diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/spartan/branches.tmpl b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/spartan/branches.tmpl new file mode 100644 index 0000000..43e3bdb --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/spartan/branches.tmpl @@ -0,0 +1,27 @@ +{header} +<title>{repo|escape}: branches</title> +<link rel="alternate" type="application/atom+xml" + href="{url}atom-branches" title="Atom feed for {repo|escape}: branches"> +<link rel="alternate" type="application/rss+xml" + href="{url}rss-branches" title="RSS feed for {repo|escape}: branches"> +</head> +<body> + +<div class="buttons"> +<a href="{url}log{sessionvars%urlparameter}">changelog</a> +<a href="{url}shortlog{sessionvars%urlparameter}">shortlog</a> +<a href="{url}graph{sessionvars%urlparameter}">graph</a> +<a href="{url}tags{sessionvars%urlparameter}">tags</a> +<a href="{url}file/{node|short}/{sessionvars%urlparameter}">files</a> +<a href="{url}help{sessionvars%urlparameter}">help</a> +<a type="application/rss+xml" href="{url}rss-branches">rss</a> +<a type="application/atom+xml" href="{url}atom-branches">atom</a> +</div> + +<h2>branches:</h2> + +<ul id="tagEntries"> +{entries%branchentry} +</ul> + +{footer} diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/spartan/changelog.tmpl b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/spartan/changelog.tmpl new file mode 100644 index 0000000..466e681 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/spartan/changelog.tmpl @@ -0,0 +1,44 @@ +{header} +<title>{repo|escape}: changelog</title> +<link rel="alternate" type="application/atom+xml" + href="{url}atom-log" title="Atom feed for {repo|escape}"> +<link rel="alternate" type="application/rss+xml" + href="{url}rss-log" title="RSS feed for {repo|escape}"> +</head> +<body> + +<div class="buttons"> +<a href="{url}shortlog/{rev}{sessionvars%urlparameter}">shortlog</a> +<a href="{url}graph{sessionvars%urlparameter}">graph</a> +<a href="{url}tags{sessionvars%urlparameter}">tags</a> +<a href="{url}branches{sessionvars%urlparameter}">branches</a> +<a href="{url}file/{node|short}{sessionvars%urlparameter}">files</a> +{archives%archiveentry} +<a href="{url}help{sessionvars%urlparameter}">help</a> +<a type="application/rss+xml" href="{url}rss-log">rss</a> +<a type="application/atom+xml" href="{url}atom-log" title="Atom feed for {repo|escape}">atom</a> +</div> + +<h2>changelog for {repo|escape}</h2> + +<form action="{url}log"> +{sessionvars%hiddenformentry} +<p> +<label for="search1">search:</label> +<input name="rev" id="search1" type="text" size="30"> +navigate: <small class="navigate">{changenav%nav}</small> +</p> +</form> + +{entries%changelogentry} + +<form action="{url}log"> +{sessionvars%hiddenformentry} +<p> +<label for="search2">search:</label> +<input name="rev" id="search2" type="text" size="30"> +navigate: <small class="navigate">{changenav%nav}</small> +</p> +</form> + +{footer} diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/spartan/changelogentry.tmpl b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/spartan/changelogentry.tmpl new file mode 100644 index 0000000..e924722 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/spartan/changelogentry.tmpl @@ -0,0 +1,25 @@ +<table class="logEntry parity{parity}"> + <tr> + <th class="age">{date|age}:</th> + <th class="firstline">{desc|strip|firstline|escape|nonempty}</th> + </tr> + <tr> + <th class="revision">changeset {rev}:</th> + <td class="node"><a href="{url}rev/{node|short}{sessionvars%urlparameter}">{node|short}</a></td> + </tr> + {parent%changelogparent} + {child%changelogchild} + {changelogtag} + <tr> + <th class="author">author:</th> + <td class="author">{author|obfuscate}</td> + </tr> + <tr> + <th class="date">date:</th> + <td class="date">{date|date}</td> + </tr> + <tr> + <th class="files"><a href="{url}file/{node|short}{sessionvars%urlparameter}">files</a>:</th> + <td class="files">{files}</td> + </tr> +</table> diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/spartan/changeset.tmpl b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/spartan/changeset.tmpl new file mode 100644 index 0000000..c9a72a1 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/spartan/changeset.tmpl @@ -0,0 +1,52 @@ +{header} +<title>{repo|escape}: changeset {node|short}</title> +</head> +<body> + +<div class="buttons"> +<a href="{url}log/{rev}{sessionvars%urlparameter}">changelog</a> +<a href="{url}shortlog/{rev}{sessionvars%urlparameter}">shortlog</a> +<a href="{url}graph{sessionvars%urlparameter}">graph</a> +<a href="{url}tags{sessionvars%urlparameter}">tags</a> +<a href="{url}branches{sessionvars%urlparameter}">branches</a> +<a href="{url}file/{node|short}{sessionvars%urlparameter}">files</a> +<a href="{url}raw-rev/{node|short}">raw</a> +{archives%archiveentry} +<a href="{url}help{sessionvars%urlparameter}">help</a> +</div> + +<h2>changeset: {desc|strip|escape|firstline|nonempty}</h2> + +<table id="changesetEntry"> +<tr> + <th class="changeset">changeset {rev}:</th> + <td class="changeset"><a href="{url}rev/{node|short}{sessionvars%urlparameter}">{node|short}</a></td> +</tr> +{parent%changesetparent} +{child%changesetchild} +{changesettag} +<tr> + <th class="author">author:</th> + <td class="author">{author|obfuscate}</td> +</tr> +<tr> + <th class="date">date:</th> + <td class="date">{date|date} ({date|age})</td> +</tr> +<tr> + <th class="files">files:</th> + <td class="files">{files}</td> +</tr> +<tr> + <th class="description">description:</th> + <td class="description">{desc|strip|escape|addbreaks|nonempty}</td> +</tr> +</table> + +<div id="changesetDiff"> +{diff} +</div> + +{footer} + + diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/spartan/error.tmpl b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/spartan/error.tmpl new file mode 100644 index 0000000..fc2c788 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/spartan/error.tmpl @@ -0,0 +1,15 @@ +{header} +<title>Mercurial Error</title> +</head> +<body> + +<h2>Mercurial Error</h2> + +<p> +An error occurred while processing your request: +</p> +<p> +{error|escape} +</p> + +{footer} diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/spartan/fileannotate.tmpl b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/spartan/fileannotate.tmpl new file mode 100644 index 0000000..465fd7e --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/spartan/fileannotate.tmpl @@ -0,0 +1,49 @@ +{header} +<title>{repo|escape}: {file|escape} annotate</title> +</head> +<body> + +<div class="buttons"> +<a href="{url}log/{rev}{sessionvars%urlparameter}">changelog</a> +<a href="{url}shortlog/{rev}{sessionvars%urlparameter}">shortlog</a> +<a href="{url}graph{sessionvars%urlparameter}">graph</a> +<a href="{url}tags{sessionvars%urlparameter}">tags</a> +<a href="{url}branches{sessionvars%urlparameter}">branches</a> +<a href="{url}rev/{node|short}{sessionvars%urlparameter}">changeset</a> +<a href="{url}file/{node|short}{path|urlescape}{sessionvars%urlparameter}">files</a> +<a href="{url}file/{node|short}/{file|urlescape}{sessionvars%urlparameter}">file</a> +<a href="{url}log/{node|short}/{file|urlescape}{sessionvars%urlparameter}">revisions</a> +<a href="{url}raw-annotate/{node|short}/{file|urlescape}">raw</a> +<a href="{url}help{sessionvars%urlparameter}">help</a> +</div> + +<h2>Annotate {file|escape}</h2> + +<table> +<tr> + <td class="metatag">changeset {rev}:</td> + <td><a href="{url}rev/{node|short}{sessionvars%urlparameter}">{node|short}</a></td></tr> +{parent%fileannotateparent} +{child%fileannotatechild} +<tr> + <td class="metatag">author:</td> + <td>{author|obfuscate}</td></tr> +<tr> + <td class="metatag">date:</td> + <td>{date|date} ({date|age})</td> +</tr> +<tr> + <td class="metatag">permissions:</td> + <td>{permissions|permissions}</td> +</tr> +<tr> + <td class="metatag">description:</td> + <td>{desc|strip|escape|addbreaks|nonempty}</td> +</tr> +</table> + +<table cellspacing="0" cellpadding="0"> +{annotate%annotateline} +</table> + +{footer} diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/spartan/filediff.tmpl b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/spartan/filediff.tmpl new file mode 100644 index 0000000..7640cba --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/spartan/filediff.tmpl @@ -0,0 +1,37 @@ +{header} +<title>{repo|escape}: {file|escape} diff</title> +</head> +<body> + +<div class="buttons"> +<a href="{url}log/{rev}{sessionvars%urlparameter}">changelog</a> +<a href="{url}shortlog/{rev}{sessionvars%urlparameter}">shortlog</a> +<a href="{url}graph{sessionvars%urlparameter}">graph</a> +<a href="{url}tags{sessionvars%urlparameter}">tags</a> +<a href="{url}branches{sessionvars%urlparameter}">branches</a> +<a href="{url}rev/{node|short}{sessionvars%urlparameter}">changeset</a> +<a href="{url}file/{node|short}/{file|urlescape}{sessionvars%urlparameter}">file</a> +<a href="{url}log/{node|short}/{file|urlescape}{sessionvars%urlparameter}">revisions</a> +<a href="{url}annotate/{node|short}/{file|urlescape}{sessionvars%urlparameter}">annotate</a> +<a href="{url}raw-diff/{node|short}/{file|urlescape}">raw</a> +<a href="{url}help{sessionvars%urlparameter}">help</a> +</div> + +<h2>{file|escape}</h2> + +<table id="filediffEntry"> +<tr> + <th class="revision">revision {rev}:</th> + <td class="revision"><a href="{url}rev/{node|short}{sessionvars%urlparameter}">{node|short}</a></td> +</tr> +{parent%filediffparent} +{child%filediffchild} +</table> + +<div id="fileDiff"> +{diff} +</div> + +{footer} + + diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/spartan/filelog.tmpl b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/spartan/filelog.tmpl new file mode 100644 index 0000000..d003f79 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/spartan/filelog.tmpl @@ -0,0 +1,29 @@ +{header} +<title>{repo|escape}: {file|escape} history</title> +<link rel="alternate" type="application/atom+xml" + href="{url}atom-log/tip/{file|urlescape}" title="Atom feed for {repo|escape}:{file}"> +<link rel="alternate" type="application/rss+xml" + href="{url}rss-log/tip/{file|urlescape}" title="RSS feed for {repo|escape}:{file}"> +</head> +<body> + +<div class="buttons"> +<a href="{url}log{sessionvars%urlparameter}">changelog</a> +<a href="{url}shortlog{sessionvars%urlparameter}">shortlog</a> +<a href="{url}graph{sessionvars%urlparameter}">graph</a> +<a href="{url}tags{sessionvars%urlparameter}">tags</a> +<a href="{url}branches{sessionvars%urlparameter}">branches</a> +<a href="{url}file/{node|short}/{file|urlescape}{sessionvars%urlparameter}">file</a> +<a href="{url}annotate/{node|short}/{file|urlescape}{sessionvars%urlparameter}">annotate</a> +<a href="{url}help{sessionvars%urlparameter}">help</a> +<a type="application/rss+xml" href="{url}rss-log/tip/{file|urlescape}">rss</a> +<a type="application/atom+xml" href="{url}atom-log/tip/{file|urlescape}" title="Atom feed for {repo|escape}:{file}">atom</a> +</div> + +<h2>{file|escape} revision history</h2> + +<p>navigate: <small class="navigate">{nav%filenav}</small></p> + +{entries%filelogentry} + +{footer} diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/spartan/filelogentry.tmpl b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/spartan/filelogentry.tmpl new file mode 100644 index 0000000..dcbdca8 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/spartan/filelogentry.tmpl @@ -0,0 +1,25 @@ +<table class="logEntry parity{parity}"> + <tr> + <th class="age">{date|age}:</th> + <th class="firstline"><a href="{url}rev/{node|short}{sessionvars%urlparameter}">{desc|strip|firstline|escape|nonempty}</a></th> + </tr> + <tr> + <th class="revision">revision {filerev}:</td> + <td class="node"> + <a href="{url}file/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{node|short}</a> + <a href="{url}diff/{node|short}/{file|urlescape}{sessionvars%urlparameter}">(diff)</a> + <a href="{url}annotate/{node|short}/{file|urlescape}{sessionvars%urlparameter}">(annotate)</a> + </td> + </tr> + {rename%filelogrename} + <tr> + <th class="author">author:</th> + <td class="author">{author|obfuscate}</td> + </tr> + <tr> + <th class="date">date:</th> + <td class="date">{date|date}</td> + </tr> +</table> + + diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/spartan/filerevision.tmpl b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/spartan/filerevision.tmpl new file mode 100644 index 0000000..50cbb7a --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/spartan/filerevision.tmpl @@ -0,0 +1,47 @@ +{header} +<title>{repo|escape}:{file|escape}</title> +</head> +<body> + +<div class="buttons"> +<a href="{url}log/{rev}{sessionvars%urlparameter}">changelog</a> +<a href="{url}shortlog/{rev}{sessionvars%urlparameter}">shortlog</a> +<a href="{url}graph{sessionvars%urlparameter}">graph</a> +<a href="{url}tags{sessionvars%urlparameter}">tags</a> +<a href="{url}branches{sessionvars%urlparameter}">branches</a> +<a href="{url}rev/{node|short}{sessionvars%urlparameter}">changeset</a> +<a href="{url}file/{node|short}{path|urlescape}{sessionvars%urlparameter}">files</a> +<a href="{url}log/{node|short}/{file|urlescape}{sessionvars%urlparameter}">revisions</a> +<a href="{url}annotate/{node|short}/{file|urlescape}{sessionvars%urlparameter}">annotate</a> +<a href="{url}raw-file/{node|short}/{file|urlescape}">raw</a> +<a href="{url}help{sessionvars%urlparameter}">help</a> +</div> + +<h2>{file|escape}</h2> + +<table> +<tr> + <td class="metatag">changeset {rev}:</td> + <td><a href="{url}rev/{node|short}{sessionvars%urlparameter}">{node|short}</a></td></tr> +{parent%filerevparent} +{child%filerevchild} +<tr> + <td class="metatag">author:</td> + <td>{author|obfuscate}</td></tr> +<tr> + <td class="metatag">date:</td> + <td>{date|date} ({date|age})</td></tr> +<tr> + <td class="metatag">permissions:</td> + <td>{permissions|permissions}</td></tr> +<tr> + <td class="metatag">description:</td> + <td>{desc|strip|escape|addbreaks|nonempty}</td> +</tr> +</table> + +<pre> +{text%fileline} +</pre> + +{footer} diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/spartan/footer.tmpl b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/spartan/footer.tmpl new file mode 100644 index 0000000..afcb2d0 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/spartan/footer.tmpl @@ -0,0 +1,8 @@ +{motd} +<div class="logo"> +<a href="http://mercurial.selenic.com/"> +<img src="{staticurl}hglogo.png" width=75 height=90 border=0 alt="mercurial"></a> +</div> + +</body> +</html> diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/spartan/graph.tmpl b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/spartan/graph.tmpl new file mode 100644 index 0000000..efc5adc --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/spartan/graph.tmpl @@ -0,0 +1,97 @@ +{header} +<title>{repo|escape}: graph</title> +<link rel="alternate" type="application/atom+xml" + href="{url}atom-tags" title="Atom feed for {repo|escape}: tags"> +<link rel="alternate" type="application/rss+xml" + href="{url}rss-tags" title="RSS feed for {repo|escape}: tags"> +<!--[if IE]><script type="text/javascript" src="{staticurl}excanvas.js"></script><![endif]--> +</head> +<body> + +<div class="buttons"> +<a href="{url}log{sessionvars%urlparameter}">changelog</a> +<a href="{url}shortlog{sessionvars%urlparameter}">shortlog</a> +<a href="{url}tags{sessionvars%urlparameter}">tags</a> +<a href="{url}branches{sessionvars%urlparameter}">branches</a> +<a href="{url}file/{node|short}/{sessionvars%urlparameter}">files</a> +<a href="{url}help{sessionvars%urlparameter}">help</a> +</div> + +<h2>graph</h2> + +<form action="{url}log"> +{sessionvars%hiddenformentry} +<p> +<label for="search1">search:</label> +<input name="rev" id="search1" type="text" size="30"> +navigate: <small class="navigate">{changenav%navgraph}</small> +</p> +</form> + +<noscript>The revision graph only works with JavaScript-enabled browsers.</noscript> + +<div id="wrapper"> +<ul id="nodebgs"></ul> +<canvas id="graph" width="224" height="{canvasheight}"></canvas> +<ul id="graphnodes"></ul> +</div> + +<script type="text/javascript" src="{staticurl}graph.js"></script> +<script type="text/javascript"> +<!-- hide script content + +var data = {jsdata|json}; +var graph = new Graph(); +graph.scale({bg_height}); + +graph.edge = function(x0, y0, x1, y1, color) \{ + + this.setColor(color, 0.0, 0.65); + this.ctx.beginPath(); + this.ctx.moveTo(x0, y0); + this.ctx.lineTo(x1, y1); + this.ctx.stroke(); + +} + +var revlink = '<li style="_STYLE"><span class="desc">'; +revlink += '<a href="{url}rev/_NODEID{sessionvars%urlparameter}" title="_NODEID">_DESC</a>'; +revlink += '</span><span class="info">_DATE, by _USER</span></li>'; + +graph.vertex = function(x, y, color, parity, cur) \{ + + this.ctx.beginPath(); + color = this.setColor(color, 0.25, 0.75); + this.ctx.arc(x, y, radius, 0, Math.PI * 2, true); + this.ctx.fill(); + + var bg = '<li class="bg parity' + parity + '"></li>'; + var left = (this.columns + 1) * this.bg_height; + var nstyle = 'padding-left: ' + left + 'px;'; + var item = revlink.replace(/_STYLE/, nstyle); + item = item.replace(/_PARITY/, 'parity' + parity); + item = item.replace(/_NODEID/, cur[0]); + item = item.replace(/_NODEID/, cur[0]); + item = item.replace(/_DESC/, cur[3]); + item = item.replace(/_USER/, cur[4]); + item = item.replace(/_DATE/, cur[5]); + + return [bg, item]; + +} + +graph.render(data); + +// stop hiding script --> +</script> + +<form action="{url}log"> +{sessionvars%hiddenformentry} +<p> +<label for="search1">search:</label> +<input name="rev" id="search1" type="text" size="30"> +navigate: <small class="navigate">{changenav%navgraph}</small> +</p> +</form> + +{footer} diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/spartan/header.tmpl b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/spartan/header.tmpl new file mode 100644 index 0000000..646b2fe --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/spartan/header.tmpl @@ -0,0 +1,6 @@ +<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN"> +<html> +<head> +<link rel="icon" href="{staticurl}hgicon.png" type="image/png"> +<meta name="robots" content="index, nofollow" /> +<link rel="stylesheet" href="{staticurl}style.css" type="text/css" /> diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/spartan/index.tmpl b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/spartan/index.tmpl new file mode 100644 index 0000000..f399813 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/spartan/index.tmpl @@ -0,0 +1,19 @@ +{header} +<title>Mercurial repositories index</title> +</head> +<body> + +<h2>Mercurial Repositories</h2> + +<table> + <tr> + <td><a href="?sort={sort_name}">Name</a></td> + <td><a href="?sort={sort_description}">Description</a></td> + <td><a href="?sort={sort_contact}">Contact</a></td> + <td><a href="?sort={sort_lastchange}">Last modified</a></td> + <td> </td> + </tr> + {entries%indexentry} +</table> + +{footer} diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/spartan/manifest.tmpl b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/spartan/manifest.tmpl new file mode 100644 index 0000000..24a0973 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/spartan/manifest.tmpl @@ -0,0 +1,29 @@ +{header} +<title>{repo|escape}: files for changeset {node|short}</title> +</head> +<body> + +<div class="buttons"> +<a href="{url}log/{rev}{sessionvars%urlparameter}">changelog</a> +<a href="{url}shortlog/{rev}{sessionvars%urlparameter}">shortlog</a> +<a href="{url}graph{sessionvars%urlparameter}">graph</a> +<a href="{url}tags{sessionvars%urlparameter}">tags</a> +<a href="{url}branches{sessionvars%urlparameter}">branches</a> +<a href="{url}rev/{node|short}{sessionvars%urlparameter}">changeset</a> +{archives%archiveentry} +<a href="{url}help{sessionvars%urlparameter}">help</a> +</div> + +<h2>files for changeset {node|short}: {path|escape}</h2> + +<table cellpadding="0" cellspacing="0"> +<tr class="parity{upparity}"> + <td><tt>drwxr-xr-x</tt> + <td> + <td> + <td><a href="{url}file/{node|short}{up|urlescape}{sessionvars%urlparameter}">[up]</a> +</tr> +{dentries%direntry} +{fentries%fileentry} +</table> +{footer} diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/spartan/map b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/spartan/map new file mode 100644 index 0000000..33df1ad --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/spartan/map @@ -0,0 +1,183 @@ +default = 'shortlog' +mimetype = 'text/html; charset={encoding}' +header = header.tmpl +footer = footer.tmpl +search = search.tmpl +changelog = changelog.tmpl +shortlog = shortlog.tmpl +shortlogentry = shortlogentry.tmpl +graph = graph.tmpl +naventry = '<a href="{url}log/{node|short}{sessionvars%urlparameter}">{label|escape}</a> ' +navshortentry = '<a href="{url}shortlog/{node|short}{sessionvars%urlparameter}">{label|escape}</a> ' +navgraphentry = '<a href="{url}graph/{node|short}{sessionvars%urlparameter}">{label|escape}</a> ' +filenaventry = '<a href="{url}log/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{label|escape}</a> ' +filedifflink = '<a href="{url}diff/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{file|escape}</a> ' +filenodelink = '<a href="{url}file/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{file|escape}</a> ' +filenolink = '{file|escape} ' +fileellipses = '...' +changelogentry = changelogentry.tmpl +searchentry = changelogentry.tmpl +changeset = changeset.tmpl +manifest = manifest.tmpl + +nav = '{before%naventry} {after%naventry}' +navshort = '{before%navshortentry}{after%navshortentry}' +navgraph = '{before%navgraphentry}{after%navgraphentry}' +filenav = '{before%filenaventry}{after%filenaventry}' + +direntry = ' + <tr class="parity{parity}"> + <td><tt>drwxr-xr-x</tt> + <td> + <td> + <td> + <a href="{url}file/{node|short}{path|urlescape}{sessionvars%urlparameter}">{basename|escape}/</a> + <a href="{url}file/{node|short}{path|urlescape}/{emptydirs|urlescape}{sessionvars%urlparameter}"> + {emptydirs|urlescape} + </a>' + +fileentry = ' + <tr class="parity{parity}"> + <td><tt>{permissions|permissions}</tt> + <td align=right><tt class="date">{date|isodate}</tt> + <td align=right><tt>{size}</tt> + <td><a href="{url}file/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{basename|escape}</a>' + +filerevision = filerevision.tmpl +fileannotate = fileannotate.tmpl +filediff = filediff.tmpl +filelog = filelog.tmpl +fileline = '<div class="parity{parity}"><a class="lineno" href="#{lineid}" id="{lineid}">{linenumber}</a> {line|escape}</div>' +filelogentry = filelogentry.tmpl + +# The ensures that all table cells have content (even if there +# is an empty line in the annotated file), which in turn ensures that +# all table rows have equal height. +annotateline = ' + <tr class="parity{parity}"> + <td class="annotate"> + <a href="{url}annotate/{node|short}/{file|urlescape}{sessionvars%urlparameter}#l{targetline}" + title="{node|short}: {desc|escape|firstline}">{author|user}@{rev}</a> + </td> + <td> + <a class="lineno" href="#{lineid}" id="{lineid}">{linenumber}</a> + </td> + <td><pre> {line|escape}</pre></td> + </tr>' +difflineplus = '<span class="plusline"><a class="lineno" href="#{lineid}" id="{lineid}">{linenumber}</a>{line|escape}</span>' +difflineminus = '<span class="minusline"><a class="lineno" href="#{lineid}" id="{lineid}">{linenumber}</a>{line|escape}</span>' +difflineat = '<span class="atline"><a class="lineno" href="#{lineid}" id="{lineid}">{linenumber}</a>{line|escape}</span>' +diffline = '<a class="lineno" href="#{lineid}" id="{lineid}">{linenumber}</a>{line|escape}' +changelogparent = ' + <tr> + <th class="parent">parent {rev}:</th> + <td class="parent"> + <a href="{url}rev/{node|short}{sessionvars%urlparameter}">{node|short}</a> + </td> + </tr>' +changesetparent = ' + <tr> + <th class="parent">parent {rev}:</th> + <td class="parent"><a href="{url}rev/{node|short}{sessionvars%urlparameter}">{node|short}</a></td> + </tr>' +filerevparent = ' + <tr> + <td class="metatag">parent:</td> + <td> + <a href="{url}file/{node|short}/{file|urlescape}{sessionvars%urlparameter}"> + {rename%filerename}{node|short} + </a> + </td> + </tr>' +filerename = '{file|escape}@' +filelogrename = ' + <tr> + <th>base:</th> + <td> + <a href="{url}file/{node|short}/{file|urlescape}{sessionvars%urlparameter}"> + {file|escape}@{node|short} + </a> + </td> + </tr>' +fileannotateparent = ' + <tr> + <td class="metatag">parent:</td> + <td> + <a href="{url}annotate/{node|short}/{file|urlescape}{sessionvars%urlparameter}"> + {rename%filerename}{node|short} + </a> + </td> + </tr>' +changesetchild = ' + <tr> + <th class="child">child {rev}:</th> + <td class="child"><a href="{url}rev/{node|short}{sessionvars%urlparameter}">{node|short}</a></td> + </tr>' +changelogchild = ' + <tr> + <th class="child">child {rev}:</th> + <td class="child"><a href="{url}rev/{node|short}{sessionvars%urlparameter}">{node|short}</a></td> + </tr>' +filerevchild = ' + <tr> + <td class="metatag">child:</td> + <td><a href="{url}file/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{node|short}</a></td> + </tr>' +fileannotatechild = ' + <tr> + <td class="metatag">child:</td> + <td><a href="{url}annotate/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{node|short}</a></td> + </tr>' +tags = tags.tmpl +tagentry = ' + <li class="tagEntry parity{parity}"> + <tt class="node">{node}</tt> + <a href="{url}rev/{node|short}{sessionvars%urlparameter}">{tag|escape}</a> + </li>' +branches = branches.tmpl +branchentry = ' + <li class="tagEntry parity{parity}"> + <tt class="node">{node}</tt> + <a href="{url}shortlog/{node|short}{sessionvars%urlparameter}" class="{status}">{branch|escape}</a> + </li>' +diffblock = '<pre class="parity{parity}">{lines}</pre>' +changelogtag = '<tr><th class="tag">tag:</th><td class="tag">{tag|escape}</td></tr>' +changesettag = '<tr><th class="tag">tag:</th><td class="tag">{tag|escape}</td></tr>' +filediffparent = ' + <tr> + <th class="parent">parent {rev}:</th> + <td class="parent"><a href="{url}rev/{node|short}{sessionvars%urlparameter}">{node|short}</a></td> + </tr>' +filelogparent = ' + <tr> + <th>parent {rev}:</th> + <td><a href="{url}file/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{node|short}</a></td> + </tr>' +filediffchild = ' + <tr> + <th class="child">child {rev}:</th> + <td class="child"><a href="{url}rev/{node|short}{sessionvars%urlparameter}">{node|short}</a></td> + </tr>' +filelogchild = ' + <tr> + <th>child {rev}:</th> + <td><a href="{url}file/{node|short}/{file|urlescape}{sessionvars%urlparameter}">{node|short}</a></td> + </tr>' +indexentry = ' + <tr class="parity{parity}"> + <td><a href="{url}{sessionvars%urlparameter}">{name|escape}</a></td> + <td>{description}</td> + <td>{contact|obfuscate}</td> + <td class="age">{lastchange|age}</td> + <td class="indexlinks"> + <a href="{url}rss-log">RSS</a> + <a href="{url}atom-log">Atom</a> + {archives%archiveentry} + </td> + </tr>' +index = index.tmpl +archiveentry = '<a href="{url}archive/{node|short}{extension|urlescape}">{type|escape}</a> ' +notfound = notfound.tmpl +error = error.tmpl +urlparameter = '{separator}{name}={value|urlescape}' +hiddenformentry = '<input type="hidden" name="{name}" value="{value|escape}" />' diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/spartan/notfound.tmpl b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/spartan/notfound.tmpl new file mode 100644 index 0000000..e9e6ba4 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/spartan/notfound.tmpl @@ -0,0 +1,12 @@ +{header} +<title>Mercurial repository not found</title> +</head> +<body> + +<h2>Mercurial repository not found</h2> + +The specified repository "{repo|escape}" is unknown, sorry. + +Please go back to the <a href="{url}">main repository list page</a>. + +{footer} diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/spartan/search.tmpl b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/spartan/search.tmpl new file mode 100644 index 0000000..9ace1eb --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/spartan/search.tmpl @@ -0,0 +1,37 @@ +{header} +<title>{repo|escape}: searching for {query|escape}</title> +</head> +<body> + +<div class="buttons"> +<a href="{url}log{sessionvars%urlparameter}">changelog</a> +<a href="{url}shortlog{sessionvars%urlparameter}">shortlog</a> +<a href="{url}graph{sessionvars%urlparameter}">graph</a> +<a href="{url}tags{sessionvars%urlparameter}">tags</a> +<a href="{url}branches{sessionvars%urlparameter}">branches</a> +<a href="{url}file/{node|short}{sessionvars%urlparameter}">files</a> +{archives%archiveentry} +<a href="{url}help{sessionvars%urlparameter}">help</a> +</div> + +<h2>searching for {query|escape}</h2> + +<form> +{sessionvars%hiddenformentry} +<p> +search: +<input name="rev" type="text" width="30" value="{query|escape}"> +</p> +</form> + +{entries} + +<form> +{sessionvars%hiddenformentry} +<p> +search: +<input name="rev" type="text" width="30" value="{query|escape}"> +</p> +</form> + +{footer} diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/spartan/shortlog.tmpl b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/spartan/shortlog.tmpl new file mode 100644 index 0000000..1f70a53 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/spartan/shortlog.tmpl @@ -0,0 +1,44 @@ +{header} +<title>{repo|escape}: shortlog</title> +<link rel="alternate" type="application/atom+xml" + href="{url}atom-log" title="Atom feed for {repo|escape}"> +<link rel="alternate" type="application/rss+xml" + href="{url}rss-log" title="RSS feed for {repo|escape}"> +</head> +<body> + +<div class="buttons"> +<a href="{url}log/{rev}{sessionvars%urlparameter}">changelog</a> +<a href="{url}graph{sessionvars%urlparameter}">graph</a> +<a href="{url}tags{sessionvars%urlparameter}">tags</a> +<a href="{url}branches{sessionvars%urlparameter}">branches</a> +<a href="{url}file/{node|short}/{sessionvars%urlparameter}">files</a> +{archives%archiveentry} +<a href="{url}help{sessionvars%urlparameter}">help</a> +<a type="application/rss+xml" href="{url}rss-log">rss</a> +<a type="application/rss+xml" href="{url}atom-log" title="Atom feed for {repo|escape}">atom</a> +</div> + +<h2>shortlog for {repo|escape}</h2> + +<form action="{url}log"> +{sessionvars%hiddenformentry} +<p> +<label for="search1">search:</label> +<input name="rev" id="search1" type="text" size="30"> +navigate: <small class="navigate">{changenav%navshort}</small> +</p> +</form> + +{entries%shortlogentry} + +<form action="{url}log"> +{sessionvars%hiddenformentry} +<p> +<label for="search2">search:</label> +<input name="rev" id="search2" type="text" size="30"> +navigate: <small class="navigate">{changenav%navshort}</small> +</p> +</form> + +{footer} diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/spartan/shortlogentry.tmpl b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/spartan/shortlogentry.tmpl new file mode 100644 index 0000000..b6857db --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/spartan/shortlogentry.tmpl @@ -0,0 +1,7 @@ +<table class="slogEntry parity{parity}"> + <tr> + <td class="age">{date|age}</td> + <td class="author">{author|person}</td> + <td class="node"><a href="{url}rev/{node|short}{sessionvars%urlparameter}">{desc|strip|firstline|escape|nonempty}</a></td> + </tr> +</table> diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/spartan/tags.tmpl b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/spartan/tags.tmpl new file mode 100644 index 0000000..4c41551 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/spartan/tags.tmpl @@ -0,0 +1,27 @@ +{header} +<title>{repo|escape}: tags</title> +<link rel="alternate" type="application/atom+xml" + href="{url}atom-tags" title="Atom feed for {repo|escape}: tags"> +<link rel="alternate" type="application/rss+xml" + href="{url}rss-tags" title="RSS feed for {repo|escape}: tags"> +</head> +<body> + +<div class="buttons"> +<a href="{url}log{sessionvars%urlparameter}">changelog</a> +<a href="{url}shortlog{sessionvars%urlparameter}">shortlog</a> +<a href="{url}graph{sessionvars%urlparameter}">graph</a> +<a href="{url}branches{sessionvars%urlparameter}">branches</a> +<a href="{url}file/{node|short}/{sessionvars%urlparameter}">files</a> +<a href="{url}help{sessionvars%urlparameter}">help</a> +<a type="application/rss+xml" href="{url}rss-tags">rss</a> +<a type="application/atom+xml" href="{url}atom-tags">atom</a> +</div> + +<h2>tags:</h2> + +<ul id="tagEntries"> +{entries%tagentry} +</ul> + +{footer} diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/static/background.png b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/static/background.png Binary files differnew file mode 100644 index 0000000..af8a0aa --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/static/background.png diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/static/coal-file.png b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/static/coal-file.png Binary files differnew file mode 100644 index 0000000..7ecf463 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/static/coal-file.png diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/static/coal-folder.png b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/static/coal-folder.png Binary files differnew file mode 100644 index 0000000..d1b8ecc --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/static/coal-folder.png diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/static/excanvas.js b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/static/excanvas.js new file mode 100644 index 0000000..9d71658 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/static/excanvas.js @@ -0,0 +1,19 @@ +if(!window.CanvasRenderingContext2D){(function(){var I=Math,i=I.round,L=I.sin,M=I.cos,m=10,A=m/2,Q={init:function(a){var b=a||document;if(/MSIE/.test(navigator.userAgent)&&!window.opera){var c=this;b.attachEvent("onreadystatechange",function(){c.r(b)})}},r:function(a){if(a.readyState=="complete"){if(!a.namespaces["s"]){a.namespaces.add("g_vml_","urn:schemas-microsoft-com:vml")}var b=a.createStyleSheet();b.cssText="canvas{display:inline-block;overflow:hidden;text-align:left;width:300px;height:150px}g_vml_\\:*{behavior:url(#default#VML)}"; +var c=a.getElementsByTagName("canvas");for(var d=0;d<c.length;d++){if(!c[d].getContext){this.initElement(c[d])}}}},q:function(a){var b=a.outerHTML,c=a.ownerDocument.createElement(b);if(b.slice(-2)!="/>"){var d="/"+a.tagName,e;while((e=a.nextSibling)&&e.tagName!=d){e.removeNode()}if(e){e.removeNode()}}a.parentNode.replaceChild(c,a);return c},initElement:function(a){a=this.q(a);a.getContext=function(){if(this.l){return this.l}return this.l=new K(this)};a.attachEvent("onpropertychange",V);a.attachEvent("onresize", +W);var b=a.attributes;if(b.width&&b.width.specified){a.style.width=b.width.nodeValue+"px"}else{a.width=a.clientWidth}if(b.height&&b.height.specified){a.style.height=b.height.nodeValue+"px"}else{a.height=a.clientHeight}return a}};function V(a){var b=a.srcElement;switch(a.propertyName){case "width":b.style.width=b.attributes.width.nodeValue+"px";b.getContext().clearRect();break;case "height":b.style.height=b.attributes.height.nodeValue+"px";b.getContext().clearRect();break}}function W(a){var b=a.srcElement; +if(b.firstChild){b.firstChild.style.width=b.clientWidth+"px";b.firstChild.style.height=b.clientHeight+"px"}}Q.init();var R=[];for(var E=0;E<16;E++){for(var F=0;F<16;F++){R[E*16+F]=E.toString(16)+F.toString(16)}}function J(){return[[1,0,0],[0,1,0],[0,0,1]]}function G(a,b){var c=J();for(var d=0;d<3;d++){for(var e=0;e<3;e++){var g=0;for(var h=0;h<3;h++){g+=a[d][h]*b[h][e]}c[d][e]=g}}return c}function N(a,b){b.fillStyle=a.fillStyle;b.lineCap=a.lineCap;b.lineJoin=a.lineJoin;b.lineWidth=a.lineWidth;b.miterLimit= +a.miterLimit;b.shadowBlur=a.shadowBlur;b.shadowColor=a.shadowColor;b.shadowOffsetX=a.shadowOffsetX;b.shadowOffsetY=a.shadowOffsetY;b.strokeStyle=a.strokeStyle;b.d=a.d;b.e=a.e}function O(a){var b,c=1;a=String(a);if(a.substring(0,3)=="rgb"){var d=a.indexOf("(",3),e=a.indexOf(")",d+1),g=a.substring(d+1,e).split(",");b="#";for(var h=0;h<3;h++){b+=R[Number(g[h])]}if(g.length==4&&a.substr(3,1)=="a"){c=g[3]}}else{b=a}return[b,c]}function S(a){switch(a){case "butt":return"flat";case "round":return"round"; +case "square":default:return"square"}}function K(a){this.a=J();this.m=[];this.k=[];this.c=[];this.strokeStyle="#000";this.fillStyle="#000";this.lineWidth=1;this.lineJoin="miter";this.lineCap="butt";this.miterLimit=m*1;this.globalAlpha=1;this.canvas=a;var b=a.ownerDocument.createElement("div");b.style.width=a.clientWidth+"px";b.style.height=a.clientHeight+"px";b.style.overflow="hidden";b.style.position="absolute";a.appendChild(b);this.j=b;this.d=1;this.e=1}var j=K.prototype;j.clearRect=function(){this.j.innerHTML= +"";this.c=[]};j.beginPath=function(){this.c=[]};j.moveTo=function(a,b){this.c.push({type:"moveTo",x:a,y:b});this.f=a;this.g=b};j.lineTo=function(a,b){this.c.push({type:"lineTo",x:a,y:b});this.f=a;this.g=b};j.bezierCurveTo=function(a,b,c,d,e,g){this.c.push({type:"bezierCurveTo",cp1x:a,cp1y:b,cp2x:c,cp2y:d,x:e,y:g});this.f=e;this.g=g};j.quadraticCurveTo=function(a,b,c,d){var e=this.f+0.6666666666666666*(a-this.f),g=this.g+0.6666666666666666*(b-this.g),h=e+(c-this.f)/3,l=g+(d-this.g)/3;this.bezierCurveTo(e, +g,h,l,c,d)};j.arc=function(a,b,c,d,e,g){c*=m;var h=g?"at":"wa",l=a+M(d)*c-A,n=b+L(d)*c-A,o=a+M(e)*c-A,f=b+L(e)*c-A;if(l==o&&!g){l+=0.125}this.c.push({type:h,x:a,y:b,radius:c,xStart:l,yStart:n,xEnd:o,yEnd:f})};j.rect=function(a,b,c,d){this.moveTo(a,b);this.lineTo(a+c,b);this.lineTo(a+c,b+d);this.lineTo(a,b+d);this.closePath()};j.strokeRect=function(a,b,c,d){this.beginPath();this.moveTo(a,b);this.lineTo(a+c,b);this.lineTo(a+c,b+d);this.lineTo(a,b+d);this.closePath();this.stroke()};j.fillRect=function(a, +b,c,d){this.beginPath();this.moveTo(a,b);this.lineTo(a+c,b);this.lineTo(a+c,b+d);this.lineTo(a,b+d);this.closePath();this.fill()};j.createLinearGradient=function(a,b,c,d){var e=new H("gradient");return e};j.createRadialGradient=function(a,b,c,d,e,g){var h=new H("gradientradial");h.n=c;h.o=g;h.i.x=a;h.i.y=b;return h};j.drawImage=function(a,b){var c,d,e,g,h,l,n,o,f=a.runtimeStyle.width,k=a.runtimeStyle.height;a.runtimeStyle.width="auto";a.runtimeStyle.height="auto";var q=a.width,r=a.height;a.runtimeStyle.width= +f;a.runtimeStyle.height=k;if(arguments.length==3){c=arguments[1];d=arguments[2];h=(l=0);n=(e=q);o=(g=r)}else if(arguments.length==5){c=arguments[1];d=arguments[2];e=arguments[3];g=arguments[4];h=(l=0);n=q;o=r}else if(arguments.length==9){h=arguments[1];l=arguments[2];n=arguments[3];o=arguments[4];c=arguments[5];d=arguments[6];e=arguments[7];g=arguments[8]}else{throw"Invalid number of arguments";}var s=this.b(c,d),t=[],v=10,w=10;t.push(" <g_vml_:group",' coordsize="',m*v,",",m*w,'"',' coordorigin="0,0"', +' style="width:',v,";height:",w,";position:absolute;");if(this.a[0][0]!=1||this.a[0][1]){var x=[];x.push("M11='",this.a[0][0],"',","M12='",this.a[1][0],"',","M21='",this.a[0][1],"',","M22='",this.a[1][1],"',","Dx='",i(s.x/m),"',","Dy='",i(s.y/m),"'");var p=s,y=this.b(c+e,d),z=this.b(c,d+g),B=this.b(c+e,d+g);p.x=Math.max(p.x,y.x,z.x,B.x);p.y=Math.max(p.y,y.y,z.y,B.y);t.push("padding:0 ",i(p.x/m),"px ",i(p.y/m),"px 0;filter:progid:DXImageTransform.Microsoft.Matrix(",x.join(""),", sizingmethod='clip');")}else{t.push("top:", +i(s.y/m),"px;left:",i(s.x/m),"px;")}t.push(' ">','<g_vml_:image src="',a.src,'"',' style="width:',m*e,";"," height:",m*g,';"',' cropleft="',h/q,'"',' croptop="',l/r,'"',' cropright="',(q-h-n)/q,'"',' cropbottom="',(r-l-o)/r,'"'," />","</g_vml_:group>");this.j.insertAdjacentHTML("BeforeEnd",t.join(""))};j.stroke=function(a){var b=[],c=O(a?this.fillStyle:this.strokeStyle),d=c[0],e=c[1]*this.globalAlpha,g=10,h=10;b.push("<g_vml_:shape",' fillcolor="',d,'"',' filled="',Boolean(a),'"',' style="position:absolute;width:', +g,";height:",h,';"',' coordorigin="0 0" coordsize="',m*g," ",m*h,'"',' stroked="',!a,'"',' strokeweight="',this.lineWidth,'"',' strokecolor="',d,'"',' path="');var l={x:null,y:null},n={x:null,y:null};for(var o=0;o<this.c.length;o++){var f=this.c[o];if(f.type=="moveTo"){b.push(" m ");var k=this.b(f.x,f.y);b.push(i(k.x),",",i(k.y))}else if(f.type=="lineTo"){b.push(" l ");var k=this.b(f.x,f.y);b.push(i(k.x),",",i(k.y))}else if(f.type=="close"){b.push(" x ")}else if(f.type=="bezierCurveTo"){b.push(" c "); +var k=this.b(f.x,f.y),q=this.b(f.cp1x,f.cp1y),r=this.b(f.cp2x,f.cp2y);b.push(i(q.x),",",i(q.y),",",i(r.x),",",i(r.y),",",i(k.x),",",i(k.y))}else if(f.type=="at"||f.type=="wa"){b.push(" ",f.type," ");var k=this.b(f.x,f.y),s=this.b(f.xStart,f.yStart),t=this.b(f.xEnd,f.yEnd);b.push(i(k.x-this.d*f.radius),",",i(k.y-this.e*f.radius)," ",i(k.x+this.d*f.radius),",",i(k.y+this.e*f.radius)," ",i(s.x),",",i(s.y)," ",i(t.x),",",i(t.y))}if(k){if(l.x==null||k.x<l.x){l.x=k.x}if(n.x==null||k.x>n.x){n.x=k.x}if(l.y== +null||k.y<l.y){l.y=k.y}if(n.y==null||k.y>n.y){n.y=k.y}}}b.push(' ">');if(typeof this.fillStyle=="object"){var v={x:"50%",y:"50%"},w=n.x-l.x,x=n.y-l.y,p=w>x?w:x;v.x=i(this.fillStyle.i.x/w*100+50)+"%";v.y=i(this.fillStyle.i.y/x*100+50)+"%";var y=[];if(this.fillStyle.p=="gradientradial"){var z=this.fillStyle.n/p*100,B=this.fillStyle.o/p*100-z}else{var z=0,B=100}var C={offset:null,color:null},D={offset:null,color:null};this.fillStyle.h.sort(function(T,U){return T.offset-U.offset});for(var o=0;o<this.fillStyle.h.length;o++){var u= +this.fillStyle.h[o];y.push(u.offset*B+z,"% ",u.color,",");if(u.offset>C.offset||C.offset==null){C.offset=u.offset;C.color=u.color}if(u.offset<D.offset||D.offset==null){D.offset=u.offset;D.color=u.color}}y.pop();b.push("<g_vml_:fill",' color="',D.color,'"',' color2="',C.color,'"',' type="',this.fillStyle.p,'"',' focusposition="',v.x,", ",v.y,'"',' colors="',y.join(""),'"',' opacity="',e,'" />')}else if(a){b.push('<g_vml_:fill color="',d,'" opacity="',e,'" />')}else{b.push("<g_vml_:stroke",' opacity="', +e,'"',' joinstyle="',this.lineJoin,'"',' miterlimit="',this.miterLimit,'"',' endcap="',S(this.lineCap),'"',' weight="',this.lineWidth,'px"',' color="',d,'" />')}b.push("</g_vml_:shape>");this.j.insertAdjacentHTML("beforeEnd",b.join(""));this.c=[]};j.fill=function(){this.stroke(true)};j.closePath=function(){this.c.push({type:"close"})};j.b=function(a,b){return{x:m*(a*this.a[0][0]+b*this.a[1][0]+this.a[2][0])-A,y:m*(a*this.a[0][1]+b*this.a[1][1]+this.a[2][1])-A}};j.save=function(){var a={};N(this,a); +this.k.push(a);this.m.push(this.a);this.a=G(J(),this.a)};j.restore=function(){N(this.k.pop(),this);this.a=this.m.pop()};j.translate=function(a,b){var c=[[1,0,0],[0,1,0],[a,b,1]];this.a=G(c,this.a)};j.rotate=function(a){var b=M(a),c=L(a),d=[[b,c,0],[-c,b,0],[0,0,1]];this.a=G(d,this.a)};j.scale=function(a,b){this.d*=a;this.e*=b;var c=[[a,0,0],[0,b,0],[0,0,1]];this.a=G(c,this.a)};j.clip=function(){};j.arcTo=function(){};j.createPattern=function(){return new P};function H(a){this.p=a;this.n=0;this.o= +0;this.h=[];this.i={x:0,y:0}}H.prototype.addColorStop=function(a,b){b=O(b);this.h.push({offset:1-a,color:b})};function P(){}G_vmlCanvasManager=Q;CanvasRenderingContext2D=K;CanvasGradient=H;CanvasPattern=P})()}; diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/static/graph.js b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/static/graph.js new file mode 100644 index 0000000..0d4dcdd --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/static/graph.js @@ -0,0 +1,137 @@ +// branch_renderer.js - Rendering of branch DAGs on the client side +// +// Copyright 2008 Dirkjan Ochtman <dirkjan AT ochtman DOT nl> +// Copyright 2006 Alexander Schremmer <alex AT alexanderweb DOT de> +// +// derived from code written by Scott James Remnant <scott@ubuntu.com> +// Copyright 2005 Canonical Ltd. +// +// This software may be used and distributed according to the terms +// of the GNU General Public License, incorporated herein by reference. + +var colors = [ + [ 1.0, 0.0, 0.0 ], + [ 1.0, 1.0, 0.0 ], + [ 0.0, 1.0, 0.0 ], + [ 0.0, 1.0, 1.0 ], + [ 0.0, 0.0, 1.0 ], + [ 1.0, 0.0, 1.0 ] +]; + +function Graph() { + + this.canvas = document.getElementById('graph'); + if (navigator.userAgent.indexOf('MSIE') >= 0) this.canvas = window.G_vmlCanvasManager.initElement(this.canvas); + this.ctx = this.canvas.getContext('2d'); + this.ctx.strokeStyle = 'rgb(0, 0, 0)'; + this.ctx.fillStyle = 'rgb(0, 0, 0)'; + this.cur = [0, 0]; + this.line_width = 3; + this.bg = [0, 4]; + this.cell = [2, 0]; + this.columns = 0; + this.revlink = ''; + + this.scale = function(height) { + this.bg_height = height; + this.box_size = Math.floor(this.bg_height / 1.2); + this.cell_height = this.box_size; + } + + function colorPart(num) { + num *= 255 + num = num < 0 ? 0 : num; + num = num > 255 ? 255 : num; + var digits = Math.round(num).toString(16); + if (num < 16) { + return '0' + digits; + } else { + return digits; + } + } + + this.setColor = function(color, bg, fg) { + + // Set the colour. + // + // Picks a distinct colour based on an internal wheel; the bg + // parameter provides the value that should be assigned to the 'zero' + // colours and the fg parameter provides the multiplier that should be + // applied to the foreground colours. + + color %= colors.length; + var red = (colors[color][0] * fg) || bg; + var green = (colors[color][1] * fg) || bg; + var blue = (colors[color][2] * fg) || bg; + red = Math.round(red * 255); + green = Math.round(green * 255); + blue = Math.round(blue * 255); + var s = 'rgb(' + red + ', ' + green + ', ' + blue + ')'; + this.ctx.strokeStyle = s; + this.ctx.fillStyle = s; + return s; + + } + + this.render = function(data) { + + var backgrounds = ''; + var nodedata = ''; + + for (var i in data) { + + var parity = i % 2; + this.cell[1] += this.bg_height; + this.bg[1] += this.bg_height; + + var cur = data[i]; + var node = cur[1]; + var edges = cur[2]; + var fold = false; + + for (var j in edges) { + + line = edges[j]; + start = line[0]; + end = line[1]; + color = line[2]; + + if (end > this.columns || start > this.columns) { + this.columns += 1; + } + + if (start == this.columns && start > end) { + var fold = true; + } + + x0 = this.cell[0] + this.box_size * start + this.box_size / 2; + y0 = this.bg[1] - this.bg_height / 2; + x1 = this.cell[0] + this.box_size * end + this.box_size / 2; + y1 = this.bg[1] + this.bg_height / 2; + + this.edge(x0, y0, x1, y1, color); + + } + + // Draw the revision node in the right column + + column = node[0] + color = node[1] + + radius = this.box_size / 8; + x = this.cell[0] + this.box_size * column + this.box_size / 2; + y = this.bg[1] - this.bg_height / 2; + var add = this.vertex(x, y, color, parity, cur); + backgrounds += add[0]; + nodedata += add[1]; + + if (fold) this.columns -= 1; + + } + + document.getElementById('nodebgs').innerHTML += backgrounds; + document.getElementById('graphnodes').innerHTML += nodedata; + + } + +} diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/static/hgicon.png b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/static/hgicon.png Binary files differnew file mode 100644 index 0000000..60effbc --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/static/hgicon.png diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/static/hglogo.png b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/static/hglogo.png Binary files differnew file mode 100644 index 0000000..adc6e65 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/static/hglogo.png diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/static/style-coal.css b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/static/style-coal.css new file mode 100644 index 0000000..6f633e1 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/static/style-coal.css @@ -0,0 +1,265 @@ +body { + margin: 0; + padding: 0; + background: black url(background.png) repeat-x; + font-family: sans-serif; +} + +.container { + padding-right: 150px; +} + +.main { + position: relative; + background: white; + padding: 2em; + border-right: 15px solid black; + border-bottom: 15px solid black; +} + +#.main { + width: 98%; +} + +.overflow { + width: 100%; + overflow: auto; +} + +.menu { + background: #999; + padding: 10px; + width: 75px; + margin: 0; + font-size: 80%; + text-align: left; + position: fixed; + top: 27px; + left: auto; + right: 27px; +} + +#.menu { + position: absolute !important; + top:expression(eval(document.body.scrollTop + 27)); +} + +.menu ul { + list-style: none; + padding: 0; + margin: 10px 0 0 0; +} + +.menu li { + margin-bottom: 3px; + padding: 2px 4px; + background: white; + color: black; + font-weight: normal; +} + +.menu li.active { + background: black; + color: white; +} + +.menu img { + width: 75px; + height: 90px; + border: 0; +} + +.menu a { color: black; display: block; } + +.search { + position: absolute; + top: .7em; + right: 2em; +} + +form.search div#hint { + display: none; + position: absolute; + top: 40px; + right: 0px; + width: 190px; + padding: 5px; + background: #ffc; + font-size: 70%; + border: 1px solid yellow; + -moz-border-radius: 5px; /* this works only in camino/firefox */ + -webkit-border-radius: 5px; /* this is just for Safari */ +} + +form.search:hover div#hint { display: block; } + +a { text-decoration:none; } +.age { white-space:nowrap; } +.date { white-space:nowrap; } +.indexlinks { white-space:nowrap; } +.parity0 { background-color: #f0f0f0; } +.parity1 { background-color: white; } +.plusline { color: green; } +.minusline { color: #dc143c; } /* crimson */ +.atline { color: purple; } + +.navigate { + text-align: right; + font-size: 60%; + margin: 1em 0; +} + +.tag { + color: #999; + font-size: 70%; + font-weight: normal; + margin-left: .5em; + vertical-align: baseline; +} + +.branchhead { + color: #000; + font-size: 80%; + font-weight: normal; + margin-left: .5em; + vertical-align: baseline; +} + +ul#graphnodes .branchhead { + font-size: 75%; +} + +.branchname { + color: #000; + font-size: 60%; + font-weight: normal; + margin-left: .5em; + vertical-align: baseline; +} + +h3 .branchname { + font-size: 80%; +} + +/* Common */ +pre { margin: 0; } + +h2 { font-size: 120%; border-bottom: 1px solid #999; } +h2 a { color: #000; } +h3 { + margin-top: -.7em; + font-size: 100%; +} + +/* log and tags tables */ +.bigtable { + border-bottom: 1px solid #999; + border-collapse: collapse; + font-size: 90%; + width: 100%; + font-weight: normal; + text-align: left; +} + +.bigtable td { + vertical-align: top; +} + +.bigtable th { + padding: 1px 4px; + border-bottom: 1px solid #999; +} +.bigtable tr { border: none; } +.bigtable .age { width: 6em; } +.bigtable .author { width: 12em; } +.bigtable .description { } +.bigtable .node { width: 5em; font-family: monospace;} +.bigtable .lineno { width: 2em; text-align: right;} +.bigtable .lineno a { color: #999; font-size: smaller; font-family: monospace;} +.bigtable .permissions { width: 8em; text-align: left;} +.bigtable .size { width: 5em; text-align: right; } +.bigtable .annotate { text-align: right; } +.bigtable td.annotate { font-size: smaller; } +.bigtable td.source { font-size: inherit; } + +.source, .sourcefirst, .sourcelast { + font-family: monospace; + white-space: pre; + padding: 1px 4px; + font-size: 90%; +} +.sourcefirst { border-bottom: 1px solid #999; font-weight: bold; } +.sourcelast { border-top: 1px solid #999; } +.source a { color: #999; font-size: smaller; font-family: monospace;} +.bottomline { border-bottom: 1px solid #999; } + +.fileline { font-family: monospace; } +.fileline img { border: 0; } + +.tagEntry .closed { color: #99f; } + +/* Changeset entry */ +#changesetEntry { + border-collapse: collapse; + font-size: 90%; + width: 100%; + margin-bottom: 1em; +} + +#changesetEntry th { + padding: 1px 4px; + width: 4em; + text-align: right; + font-weight: normal; + color: #999; + margin-right: .5em; + vertical-align: top; +} + +div.description { + border-left: 3px solid #999; + margin: 1em 0 1em 0; + padding: .3em; +} + +/* Graph */ +div#wrapper { + position: relative; + border-top: 1px solid black; + border-bottom: 1px solid black; + margin: 0; + padding: 0; +} + +canvas { + position: absolute; + z-index: 5; + top: -0.7em; + margin: 0; +} + +ul#graphnodes { + position: absolute; + z-index: 10; + top: -1.0em; + list-style: none inside none; + padding: 0; +} + +ul#nodebgs { + list-style: none inside none; + padding: 0; + margin: 0; + top: -0.7em; +} + +ul#graphnodes li, ul#nodebgs li { + height: 39px; +} + +ul#graphnodes li .info { + display: block; + font-size: 70%; + position: relative; + top: -3px; +} diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/static/style-gitweb.css b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/static/style-gitweb.css new file mode 100644 index 0000000..b13c62e --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/static/style-gitweb.css @@ -0,0 +1,124 @@ +body { font-family: sans-serif; font-size: 12px; margin:0px; border:solid #d9d8d1; border-width:1px; margin:10px; } +a { color:#0000cc; } +a:hover, a:visited, a:active { color:#880000; } +div.page_header { height:25px; padding:8px; font-size:18px; font-weight:bold; background-color:#d9d8d1; } +div.page_header a:visited { color:#0000cc; } +div.page_header a:hover { color:#880000; } +div.page_nav { padding:8px; } +div.page_nav a:visited { color:#0000cc; } +div.page_path { padding:8px; border:solid #d9d8d1; border-width:0px 0px 1px} +div.page_footer { padding:4px 8px; background-color: #d9d8d1; } +div.page_footer_text { float:left; color:#555555; font-style:italic; } +div.page_body { padding:8px; } +div.title, a.title { + display:block; padding:6px 8px; + font-weight:bold; background-color:#edece6; text-decoration:none; color:#000000; +} +a.title:hover { background-color: #d9d8d1; } +div.title_text { padding:6px 0px; border: solid #d9d8d1; border-width:0px 0px 1px; } +div.log_body { padding:8px 8px 8px 150px; } +.age { white-space:nowrap; } +span.age { position:relative; float:left; width:142px; font-style:italic; } +div.log_link { + padding:0px 8px; + font-size:10px; font-family:sans-serif; font-style:normal; + position:relative; float:left; width:136px; +} +div.list_head { padding:6px 8px 4px; border:solid #d9d8d1; border-width:1px 0px 0px; font-style:italic; } +a.list { text-decoration:none; color:#000000; } +a.list:hover { text-decoration:underline; color:#880000; } +table { padding:8px 4px; } +th { padding:2px 5px; font-size:12px; text-align:left; } +tr.light:hover, .parity0:hover { background-color:#edece6; } +tr.dark, .parity1 { background-color:#f6f6f0; } +tr.dark:hover, .parity1:hover { background-color:#edece6; } +td { padding:2px 5px; font-size:12px; vertical-align:top; } +td.closed { background-color: #99f; } +td.link { padding:2px 5px; font-family:sans-serif; font-size:10px; } +td.indexlinks { white-space: nowrap; } +td.indexlinks a { + padding: 2px 5px; line-height: 10px; + border: 1px solid; + color: #ffffff; background-color: #7777bb; + border-color: #aaaadd #333366 #333366 #aaaadd; + font-weight: bold; text-align: center; text-decoration: none; + font-size: 10px; +} +td.indexlinks a:hover { background-color: #6666aa; } +div.pre { font-family:monospace; font-size:12px; white-space:pre; } +div.diff_info { font-family:monospace; color:#000099; background-color:#edece6; font-style:italic; } +div.index_include { border:solid #d9d8d1; border-width:0px 0px 1px; padding:12px 8px; } +div.search { margin:4px 8px; position:absolute; top:56px; right:12px } +.linenr { color:#999999; text-decoration:none } +div.rss_logo { float: right; white-space: nowrap; } +div.rss_logo a { + padding:3px 6px; line-height:10px; + border:1px solid; border-color:#fcc7a5 #7d3302 #3e1a01 #ff954e; + color:#ffffff; background-color:#ff6600; + font-weight:bold; font-family:sans-serif; font-size:10px; + text-align:center; text-decoration:none; +} +div.rss_logo a:hover { background-color:#ee5500; } +pre { margin: 0; } +span.logtags span { + padding: 0px 4px; + font-size: 10px; + font-weight: normal; + border: 1px solid; + background-color: #ffaaff; + border-color: #ffccff #ff00ee #ff00ee #ffccff; +} +span.logtags span.tagtag { + background-color: #ffffaa; + border-color: #ffffcc #ffee00 #ffee00 #ffffcc; +} +span.logtags span.branchtag { + background-color: #aaffaa; + border-color: #ccffcc #00cc33 #00cc33 #ccffcc; +} +span.logtags span.inbranchtag { + background-color: #d5dde6; + border-color: #e3ecf4 #9398f4 #9398f4 #e3ecf4; +} + +/* Graph */ +div#wrapper { + position: relative; + margin: 0; + padding: 0; + margin-top: 3px; +} + +canvas { + position: absolute; + z-index: 5; + top: -0.9em; + margin: 0; +} + +ul#nodebgs { + list-style: none inside none; + padding: 0; + margin: 0; + top: -0.7em; +} + +ul#graphnodes li, ul#nodebgs li { + height: 39px; +} + +ul#graphnodes { + position: absolute; + z-index: 10; + top: -0.8em; + list-style: none inside none; + padding: 0; +} + +ul#graphnodes li .info { + display: block; + font-size: 100%; + position: relative; + top: -3px; + font-style: italic; +} diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/static/style-monoblue.css b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/static/style-monoblue.css new file mode 100644 index 0000000..9bea29d --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/static/style-monoblue.css @@ -0,0 +1,475 @@ +/*** Initial Settings ***/ +* { + margin: 0; + padding: 0; + font-weight: normal; + font-style: normal; +} + +html { + font-size: 100%; + font-family: sans-serif; +} + +body { + font-size: 77%; + margin: 15px 50px; + background: #4B4B4C; +} + +a { + color:#0000cc; + text-decoration: none; +} +/*** end of Initial Settings ***/ + + +/** common settings **/ +div#container { + background: #FFFFFF; + position: relative; + color: #666; +} + +div.page-header { + padding: 50px 20px 0; + background: #006699 top left repeat-x; + position: relative; +} + div.page-header h1 { + margin: 10px 0 30px; + font-size: 1.8em; + font-weight: bold; + font-family: osaka,'MS P Gothic', Georgia, serif; + letter-spacing: 1px; + color: #DDD; + } + div.page-header h1 a { + font-weight: bold; + color: #FFF; + } + div.page-header a { + text-decoration: none; + } + + div.page-header form { + position: absolute; + margin-bottom: 2px; + bottom: 0; + right: 20px; + } + div.page-header form label { + color: #DDD; + } + div.page-header form input { + padding: 2px; + border: solid 1px #DDD; + } + div.page-header form dl { + overflow: hidden; + } + div.page-header form dl dt { + font-size: 1.2em; + } + div.page-header form dl dt, + div.page-header form dl dd { + margin: 0 0 0 5px; + float: left; + height: 24px; + line-height: 20px; + } + + ul.page-nav { + margin: 10px 0 0 0; + list-style-type: none; + overflow: hidden; + width: 900px; + } + ul.page-nav li { + margin: 0 2px 0 0; + float: left; + width: 80px; + height: 24px; + font-size: 1.1em; + line-height: 24px; + text-align: center; + } + ul.page-nav li.current { + background: #FFF; + } + ul.page-nav li a { + height: 24px; + color: #666; + background: #DDD; + display: block; + text-decoration: none; + } + ul.page-nav li a:hover { + color:#333; + background: #FFF; + } + +ul.submenu { + margin: 10px 0 -10px 20px; + list-style-type: none; +} +ul.submenu li { + margin: 0 10px 0 0; + font-size: 1.2em; + display: inline; +} + +h2 { + margin: 20px 0 10px; + height: 30px; + line-height: 30px; + text-indent: 20px; + background: #FFF; + font-size: 1.2em; + border-top: dotted 1px #D5E1E6; + font-weight: bold; +} +h2.no-link { + color:#006699; +} +h2.no-border { + color: #FFF; + background: #006699; + border: 0; +} +h2 a { + font-weight:bold; + color:#006699; +} + +div.page-path { + text-align: right; + padding: 20px 30px 10px 0; + border:solid #d9d8d1; + border-width:0px 0px 1px; + font-size: 1.2em; +} + +div.page-footer { + margin: 50px 0 0; + position: relative; +} + div.page-footer p { + position: relative; + left: 20px; + bottom: 5px; + font-size: 1.2em; + } + + ul.rss-logo { + position: absolute; + top: -10px; + right: 20px; + height: 20px; + list-style-type: none; + } + ul.rss-logo li { + display: inline; + } + ul.rss-logo li a { + padding: 3px 6px; + line-height: 10px; + border:1px solid; + border-color:#fcc7a5 #7d3302 #3e1a01 #ff954e; + color:#ffffff; + background-color:#ff6600; + font-weight:bold; + font-family:sans-serif; + font-size:10px; + text-align:center; + text-decoration:none; + } + div.rss-logo li a:hover { + background-color:#ee5500; + } + +p.normal { + margin: 20px 0 20px 30px; + font-size: 1.2em; +} + +table { + margin: 10px 0 0 20px; + width: 95%; + border-collapse: collapse; +} +table tr td { + font-size: 1.1em; +} +table tr td.nowrap { + white-space: nowrap; +} +table tr td.closed { + background-color: #99f; +} +/* +table tr.parity0:hover, +table tr.parity1:hover { + background: #D5E1E6; +} +*/ +table tr.parity0 { + background: #F1F6F7; +} +table tr.parity1 { + background: #FFFFFF; +} +table tr td { + padding: 5px 5px; +} +table.annotated tr td { + padding: 0px 5px; +} + +span.logtags span { + padding: 2px 6px; + font-weight: normal; + font-size: 11px; + border: 1px solid; + background-color: #ffaaff; + border-color: #ffccff #ff00ee #ff00ee #ffccff; +} +span.logtags span.tagtag { + background-color: #ffffaa; + border-color: #ffffcc #ffee00 #ffee00 #ffffcc; +} +span.logtags span.branchtag { + background-color: #aaffaa; + border-color: #ccffcc #00cc33 #00cc33 #ccffcc; +} +span.logtags span.inbranchtag { + background-color: #d5dde6; + border-color: #e3ecf4 #9398f4 #9398f4 #e3ecf4; +} + +div.diff pre { + margin: 10px 0 0 0; +} +div.diff pre span { + font-family: monospace; + white-space: pre; + font-size: 1.2em; + padding: 3px 0; +} +td.source { + white-space: pre; + font-family: monospace; + margin: 10px 30px 0; + font-size: 1.2em; + font-family: monospace; +} + div.source div.parity0, + div.source div.parity1 { + padding: 1px; + font-size: 1.2em; + } + div.source div.parity0 { + background: #F1F6F7; + } + div.source div.parity1 { + background: #FFFFFF; + } +div.parity0:hover, +div.parity1:hover { + background: #D5E1E6; +} +.linenr { + color: #999; + text-align: right; +} +.lineno { + text-align: right; +} +.lineno a { + color: #999; +} +td.linenr { + width: 60px; +} + +div#powered-by { + position: absolute; + width: 75px; + top: 15px; + right: 20px; + font-size: 1.2em; +} +div#powered-by a { + color: #EEE; + text-decoration: none; +} +div#powered-by a:hover { + text-decoration: underline; +} +/* +div#monoblue-corner-top-left { + position: absolute; + top: 0; + left: 0; + width: 10px; + height: 10px; + background: url(./monoblue-corner.png) top left no-repeat !important; + background: none; +} +div#monoblue-corner-top-right { + position: absolute; + top: 0; + right: 0; + width: 10px; + height: 10px; + background: url(./monoblue-corner.png) top right no-repeat !important; + background: none; +} +div#monoblue-corner-bottom-left { + position: absolute; + bottom: 0; + left: 0; + width: 10px; + height: 10px; + background: url(./monoblue-corner.png) bottom left no-repeat !important; + background: none; +} +div#monoblue-corner-bottom-right { + position: absolute; + bottom: 0; + right: 0; + width: 10px; + height: 10px; + background: url(./monoblue-corner.png) bottom right no-repeat !important; + background: none; +} +*/ +/** end of common settings **/ + +/** summary **/ +dl.overview { + margin: 0 0 0 30px; + font-size: 1.1em; + overflow: hidden; +} + dl.overview dt, + dl.overview dd { + margin: 5px 0; + float: left; + } + dl.overview dt { + clear: left; + font-weight: bold; + width: 150px; + } +/** end of summary **/ + +/** chagelog **/ +h3.changelog { + margin: 20px 0 5px 30px; + padding: 0 0 2px; + font-size: 1.4em; + border-bottom: dotted 1px #D5E1E6; +} +ul.changelog-entry { + margin: 0 0 10px 30px; + list-style-type: none; + position: relative; +} +ul.changelog-entry li span.revdate { + font-size: 1.1em; +} +ul.changelog-entry li.age { + position: absolute; + top: -25px; + right: 10px; + font-size: 1.4em; + color: #CCC; + font-weight: bold; + font-style: italic; +} +ul.changelog-entry li span.name { + font-size: 1.2em; + font-weight: bold; +} +ul.changelog-entry li.description { + margin: 10px 0 0; + font-size: 1.1em; +} +/** end of changelog **/ + +/** file **/ +p.files { + margin: 0 0 0 20px; + font-size: 2.0em; + font-weight: bold; +} +/** end of file **/ + +/** changeset **/ +h3.changeset { + margin: 20px 0 5px 20px; + padding: 0 0 2px; + font-size: 1.6em; + border-bottom: dotted 1px #D5E1E6; +} +p.changeset-age { + position: relative; +} +p.changeset-age span { + position: absolute; + top: -25px; + right: 10px; + font-size: 1.4em; + color: #CCC; + font-weight: bold; + font-style: italic; +} +p.description { + margin: 10px 30px 0 30px; + padding: 10px; + border: solid 1px #CCC; + font-size: 1.2em; +} +/** end of changeset **/ + +/** canvas **/ +div#wrapper { + position: relative; + font-size: 1.2em; +} + +canvas { + position: absolute; + z-index: 5; + top: -0.7em; +} + +ul#nodebgs li.parity0 { + background: #F1F6F7; +} + +ul#nodebgs li.parity1 { + background: #FFFFFF; +} + +ul#graphnodes { + position: absolute; + z-index: 10; + top: 7px; + list-style: none inside none; +} + +ul#nodebgs { + list-style: none inside none; +} + +ul#graphnodes li, ul#nodebgs li { + height: 39px; +} + +ul#graphnodes li .info { + display: block; + position: relative; +} +/** end of canvas **/ diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/static/style-paper.css b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/static/style-paper.css new file mode 100644 index 0000000..c353656 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/static/style-paper.css @@ -0,0 +1,254 @@ +body { + margin: 0; + padding: 0; + background: white; + font-family: sans-serif; +} + +.container { + padding-left: 115px; +} + +.main { + position: relative; + background: white; + padding: 2em 2em 2em 0; +} + +#.main { + width: 98%; +} + +.overflow { + width: 100%; + overflow: auto; +} + +.menu { + width: 90px; + margin: 0; + font-size: 80%; + text-align: left; + position: absolute; + top: 20px; + left: 20px; + right: auto; +} + +.menu ul { + list-style: none; + padding: 0; + margin: 10px 0 0 0; + border-left: 2px solid #999; +} + +.menu li { + margin-bottom: 3px; + padding: 2px 4px; + background: white; + color: black; + font-weight: normal; +} + +.menu li.active { + font-weight: bold; +} + +.menu img { + width: 75px; + height: 90px; + border: 0; +} + +.menu a { color: black; display: block; } + +.search { + position: absolute; + top: .7em; + right: 2em; +} + +form.search div#hint { + display: none; + position: absolute; + top: 40px; + right: 0px; + width: 190px; + padding: 5px; + background: #ffc; + font-size: 70%; + border: 1px solid yellow; + -moz-border-radius: 5px; /* this works only in camino/firefox */ + -webkit-border-radius: 5px; /* this is just for Safari */ +} + +form.search:hover div#hint { display: block; } + +a { text-decoration:none; } +.age { white-space:nowrap; } +.date { white-space:nowrap; } +.indexlinks { white-space:nowrap; } +.parity0 { background-color: #f0f0f0; } +.parity1 { background-color: white; } +.plusline { color: green; } +.minusline { color: #dc143c; } /* crimson */ +.atline { color: purple; } + +.navigate { + text-align: right; + font-size: 60%; + margin: 1em 0; +} + +.tag { + color: #999; + font-size: 70%; + font-weight: normal; + margin-left: .5em; + vertical-align: baseline; +} + +.branchhead { + color: #000; + font-size: 80%; + font-weight: normal; + margin-left: .5em; + vertical-align: baseline; +} + +ul#graphnodes .branchhead { + font-size: 75%; +} + +.branchname { + color: #000; + font-size: 60%; + font-weight: normal; + margin-left: .5em; + vertical-align: baseline; +} + +h3 .branchname { + font-size: 80%; +} + +/* Common */ +pre { margin: 0; } + +h2 { font-size: 120%; border-bottom: 1px solid #999; } +h2 a { color: #000; } +h3 { + margin-top: -.7em; + font-size: 100%; +} + +/* log and tags tables */ +.bigtable { + border-bottom: 1px solid #999; + border-collapse: collapse; + font-size: 90%; + width: 100%; + font-weight: normal; + text-align: left; +} + +.bigtable td { + vertical-align: top; +} + +.bigtable th { + padding: 1px 4px; + border-bottom: 1px solid #999; +} +.bigtable tr { border: none; } +.bigtable .age { width: 7em; } +.bigtable .author { width: 12em; } +.bigtable .description { } +.bigtable .node { width: 5em; font-family: monospace;} +.bigtable .permissions { width: 8em; text-align: left;} +.bigtable .size { width: 5em; text-align: right; } +.bigtable .annotate { text-align: right; } +.bigtable td.annotate { font-size: smaller; } +.bigtable td.source { font-size: inherit; } + +.source, .sourcefirst, .sourcelast { + font-family: monospace; + white-space: pre; + padding: 1px 4px; + font-size: 90%; +} +.sourcefirst { border-bottom: 1px solid #999; font-weight: bold; } +.sourcelast { border-top: 1px solid #999; } +.source a { color: #999; font-size: smaller; font-family: monospace;} +.bottomline { border-bottom: 1px solid #999; } + +.fileline { font-family: monospace; } +.fileline img { border: 0; } + +.tagEntry .closed { color: #99f; } + +/* Changeset entry */ +#changesetEntry { + border-collapse: collapse; + font-size: 90%; + width: 100%; + margin-bottom: 1em; +} + +#changesetEntry th { + padding: 1px 4px; + width: 4em; + text-align: right; + font-weight: normal; + color: #999; + margin-right: .5em; + vertical-align: top; +} + +div.description { + border-left: 2px solid #999; + margin: 1em 0 1em 0; + padding: .3em; +} + +/* Graph */ +div#wrapper { + position: relative; + border-top: 1px solid black; + border-bottom: 1px solid black; + margin: 0; + padding: 0; +} + +canvas { + position: absolute; + z-index: 5; + top: -0.7em; + margin: 0; +} + +ul#graphnodes { + position: absolute; + z-index: 10; + top: -1.0em; + list-style: none inside none; + padding: 0; +} + +ul#nodebgs { + list-style: none inside none; + padding: 0; + margin: 0; + top: -0.7em; +} + +ul#graphnodes li, ul#nodebgs li { + height: 39px; +} + +ul#graphnodes li .info { + display: block; + font-size: 70%; + position: relative; + top: -3px; +} diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/static/style.css b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/static/style.css new file mode 100644 index 0000000..66bd96d --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/static/style.css @@ -0,0 +1,105 @@ +a { text-decoration:none; } +.age { white-space:nowrap; } +.date { white-space:nowrap; } +.indexlinks { white-space:nowrap; } +.parity0 { background-color: #ddd; } +.parity1 { background-color: #eee; } +.lineno { width: 60px; color: #aaa; font-size: smaller; + text-align: right; } +.plusline { color: green; } +.minusline { color: red; } +.atline { color: purple; } +.annotate { font-size: smaller; text-align: right; padding-right: 1em; } +.buttons a { + background-color: #666; + padding: 2pt; + color: white; + font-family: sans; + font-weight: bold; +} +.navigate a { + background-color: #ccc; + padding: 2pt; + font-family: sans; + color: black; +} + +.metatag { + background-color: #888; + color: white; + text-align: right; +} + +/* Common */ +pre { margin: 0; } + +.logo { + float: right; + clear: right; +} + +/* Changelog/Filelog entries */ +.logEntry { width: 100%; } +.logEntry .age { width: 15%; } +.logEntry th { font-weight: normal; text-align: right; vertical-align: top; } +.logEntry th.age, .logEntry th.firstline { font-weight: bold; } +.logEntry th.firstline { text-align: left; width: inherit; } + +/* Shortlog entries */ +.slogEntry { width: 100%; } +.slogEntry .age { width: 8em; } +.slogEntry td { font-weight: normal; text-align: left; vertical-align: top; } +.slogEntry td.author { width: 15em; } + +/* Tag entries */ +#tagEntries { list-style: none; margin: 0; padding: 0; } +#tagEntries .tagEntry { list-style: none; margin: 0; padding: 0; } + +/* Changeset entry */ +#changesetEntry { } +#changesetEntry th { font-weight: normal; background-color: #888; color: #fff; text-align: right; } +#changesetEntry th.files, #changesetEntry th.description { vertical-align: top; } + +/* File diff view */ +#filediffEntry { } +#filediffEntry th { font-weight: normal; background-color: #888; color: #fff; text-align: right; } + +/* Graph */ +div#wrapper { + position: relative; + margin: 0; + padding: 0; +} + +canvas { + position: absolute; + z-index: 5; + top: -0.6em; + margin: 0; +} + +ul#nodebgs { + list-style: none inside none; + padding: 0; + margin: 0; + top: -0.7em; +} + +ul#graphnodes li, ul#nodebgs li { + height: 39px; +} + +ul#graphnodes { + position: absolute; + z-index: 10; + top: -0.85em; + list-style: none inside none; + padding: 0; +} + +ul#graphnodes li .info { + display: block; + font-size: 70%; + position: relative; + top: -1px; +} diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/template-vars.txt b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/template-vars.txt new file mode 100644 index 0000000..28f7bc0 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/templates/template-vars.txt @@ -0,0 +1,41 @@ +repo the name of the repo +rev a changeset.manifest revision +node a changeset node +changesets total number of changesets +file a filename +filerev a file revision +filerevs total number of file revisions +up the directory of the relevant file +path a path in the manifest, starting with "/" +basename a short pathname +date a date string +age age in hours, days, etc +line a line of text (escaped) +desc a description (escaped, with breaks) +shortdesc a short description (escaped) +author a name or email addressv(obfuscated) +parent a list of the parent +child a list of the children +tags a list of tag + +header the global page header +footer the global page footer + +files a list of file links +file_copies a list of pairs of name, source filenames +dirs a set of directory links +diff a diff of one or more files +annotate an annotated file +entries the entries relevant to the page + +url base url of hgweb interface +staticurl base url for static resources + + +Templates and commands: + changelog(rev) - a page for browsing changesets + naventry - a link for jumping to a changeset number + filenodelink - jump to file diff + fileellipses - printed after maxfiles + changelogentry - an entry in the log + manifest - browse a manifest as a directory tree diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/transaction.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/transaction.py new file mode 100644 index 0000000..5bc2d1f --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/transaction.py @@ -0,0 +1,176 @@ +# transaction.py - simple journalling scheme for mercurial +# +# This transaction scheme is intended to gracefully handle program +# errors and interruptions. More serious failures like system crashes +# can be recovered with an fsck-like tool. As the whole repository is +# effectively log-structured, this should amount to simply truncating +# anything that isn't referenced in the changelog. +# +# Copyright 2005, 2006 Matt Mackall <mpm@selenic.com> +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +from i18n import _ +import os, errno +import error + +def active(func): + def _active(self, *args, **kwds): + if self.count == 0: + raise error.Abort(_( + 'cannot use transaction when it is already committed/aborted')) + return func(self, *args, **kwds) + return _active + +def _playback(journal, report, opener, entries, unlink=True): + for f, o, ignore in entries: + if o or not unlink: + try: + opener(f, 'a').truncate(o) + except IOError: + report(_("failed to truncate %s\n") % f) + raise + else: + try: + fn = opener(f).name + os.unlink(fn) + except (IOError, OSError), inst: + if inst.errno != errno.ENOENT: + raise + os.unlink(journal) + +class transaction(object): + def __init__(self, report, opener, journal, after=None, createmode=None): + self.count = 1 + self.usages = 1 + self.report = report + self.opener = opener + self.after = after + self.entries = [] + self.map = {} + self.journal = journal + self._queue = [] + + self.file = open(self.journal, "w") + if createmode is not None: + os.chmod(self.journal, createmode & 0666) + + def __del__(self): + if self.journal: + self._abort() + + @active + def startgroup(self): + self._queue.append([]) + + @active + def endgroup(self): + q = self._queue.pop() + d = ''.join(['%s\0%d\n' % (x[0], x[1]) for x in q]) + self.entries.extend(q) + self.file.write(d) + self.file.flush() + + @active + def add(self, file, offset, data=None): + if file in self.map: + return + if self._queue: + self._queue[-1].append((file, offset, data)) + return + + self.entries.append((file, offset, data)) + self.map[file] = len(self.entries) - 1 + # add enough data to the journal to do the truncate + self.file.write("%s\0%d\n" % (file, offset)) + self.file.flush() + + @active + def find(self, file): + if file in self.map: + return self.entries[self.map[file]] + return None + + @active + def replace(self, file, offset, data=None): + ''' + replace can only replace already committed entries + that are not pending in the queue + ''' + + if file not in self.map: + raise KeyError(file) + index = self.map[file] + self.entries[index] = (file, offset, data) + self.file.write("%s\0%d\n" % (file, offset)) + self.file.flush() + + @active + def nest(self): + self.count += 1 + self.usages += 1 + return self + + def release(self): + if self.count > 0: + self.usages -= 1 + # if the transaction scopes are left without being closed, fail + if self.count > 0 and self.usages == 0: + self._abort() + + def running(self): + return self.count > 0 + + @active + def close(self): + '''commit the transaction''' + self.count -= 1 + if self.count != 0: + return + self.file.close() + self.entries = [] + if self.after: + self.after() + if os.path.isfile(self.journal): + os.unlink(self.journal) + self.journal = None + + @active + def abort(self): + '''abort the transaction (generally called on error, or when the + transaction is not explicitly committed before going out of + scope)''' + self._abort() + + def _abort(self): + self.count = 0 + self.usages = 0 + self.file.close() + + try: + if not self.entries: + if self.journal: + os.unlink(self.journal) + return + + self.report(_("transaction abort!\n")) + + try: + _playback(self.journal, self.report, self.opener, + self.entries, False) + self.report(_("rollback completed\n")) + except: + self.report(_("rollback failed - please run hg recover\n")) + finally: + self.journal = None + + +def rollback(opener, file, report): + entries = [] + + for l in open(file).readlines(): + f, o = l.split('\0') + entries.append((f, int(o), None)) + + _playback(file, report, opener, entries) diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/transaction.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/transaction.pyo Binary files differnew file mode 100644 index 0000000..bdcb0af --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/transaction.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/ui.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/ui.py new file mode 100644 index 0000000..cd43fc7 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/ui.py @@ -0,0 +1,626 @@ +# ui.py - user interface bits for mercurial +# +# Copyright 2005-2007 Matt Mackall <mpm@selenic.com> +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +from i18n import _ +import errno, getpass, os, socket, sys, tempfile, traceback +import config, util, error + +class ui(object): + def __init__(self, src=None): + self._buffers = [] + self.quiet = self.verbose = self.debugflag = self.tracebackflag = False + self._reportuntrusted = True + self._ocfg = config.config() # overlay + self._tcfg = config.config() # trusted + self._ucfg = config.config() # untrusted + self._trustusers = set() + self._trustgroups = set() + + if src: + self._tcfg = src._tcfg.copy() + self._ucfg = src._ucfg.copy() + self._ocfg = src._ocfg.copy() + self._trustusers = src._trustusers.copy() + self._trustgroups = src._trustgroups.copy() + self.environ = src.environ + self.fixconfig() + else: + # shared read-only environment + self.environ = os.environ + # we always trust global config files + for f in util.rcpath(): + self.readconfig(f, trust=True) + + def copy(self): + return self.__class__(self) + + def _is_trusted(self, fp, f): + st = util.fstat(fp) + if util.isowner(st): + return True + + tusers, tgroups = self._trustusers, self._trustgroups + if '*' in tusers or '*' in tgroups: + return True + + user = util.username(st.st_uid) + group = util.groupname(st.st_gid) + if user in tusers or group in tgroups or user == util.username(): + return True + + if self._reportuntrusted: + self.warn(_('Not trusting file %s from untrusted ' + 'user %s, group %s\n') % (f, user, group)) + return False + + def readconfig(self, filename, root=None, trust=False, + sections=None, remap=None): + try: + fp = open(filename) + except IOError: + if not sections: # ignore unless we were looking for something + return + raise + + cfg = config.config() + trusted = sections or trust or self._is_trusted(fp, filename) + + try: + cfg.read(filename, fp, sections=sections, remap=remap) + except error.ConfigError, inst: + if trusted: + raise + self.warn(_("Ignored: %s\n") % str(inst)) + + if self.plain(): + for k in ('debug', 'fallbackencoding', 'quiet', 'slash', + 'logtemplate', 'style', + 'traceback', 'verbose'): + if k in cfg['ui']: + del cfg['ui'][k] + for k, v in cfg.items('alias'): + del cfg['alias'][k] + for k, v in cfg.items('defaults'): + del cfg['defaults'][k] + + if trusted: + self._tcfg.update(cfg) + self._tcfg.update(self._ocfg) + self._ucfg.update(cfg) + self._ucfg.update(self._ocfg) + + if root is None: + root = os.path.expanduser('~') + self.fixconfig(root=root) + + def fixconfig(self, root=None, section=None): + if section in (None, 'paths'): + # expand vars and ~ + # translate paths relative to root (or home) into absolute paths + root = root or os.getcwd() + for c in self._tcfg, self._ucfg, self._ocfg: + for n, p in c.items('paths'): + if not p: + continue + if '%%' in p: + self.warn(_("(deprecated '%%' in path %s=%s from %s)\n") + % (n, p, self.configsource('paths', n))) + p = p.replace('%%', '%') + p = util.expandpath(p) + if '://' not in p and not os.path.isabs(p): + p = os.path.normpath(os.path.join(root, p)) + c.set("paths", n, p) + + if section in (None, 'ui'): + # update ui options + self.debugflag = self.configbool('ui', 'debug') + self.verbose = self.debugflag or self.configbool('ui', 'verbose') + self.quiet = not self.debugflag and self.configbool('ui', 'quiet') + if self.verbose and self.quiet: + self.quiet = self.verbose = False + self._reportuntrusted = self.configbool("ui", "report_untrusted", + True) + self.tracebackflag = self.configbool('ui', 'traceback', False) + + if section in (None, 'trusted'): + # update trust information + self._trustusers.update(self.configlist('trusted', 'users')) + self._trustgroups.update(self.configlist('trusted', 'groups')) + + def setconfig(self, section, name, value, overlay=True): + if overlay: + self._ocfg.set(section, name, value) + self._tcfg.set(section, name, value) + self._ucfg.set(section, name, value) + self.fixconfig(section=section) + + def _data(self, untrusted): + return untrusted and self._ucfg or self._tcfg + + def configsource(self, section, name, untrusted=False): + return self._data(untrusted).source(section, name) or 'none' + + def config(self, section, name, default=None, untrusted=False): + value = self._data(untrusted).get(section, name, default) + if self.debugflag and not untrusted and self._reportuntrusted: + uvalue = self._ucfg.get(section, name) + if uvalue is not None and uvalue != value: + self.debug(_("ignoring untrusted configuration option " + "%s.%s = %s\n") % (section, name, uvalue)) + return value + + def configbool(self, section, name, default=False, untrusted=False): + v = self.config(section, name, None, untrusted) + if v is None: + return default + if isinstance(v, bool): + return v + b = util.parsebool(v) + if b is None: + raise error.ConfigError(_("%s.%s not a boolean ('%s')") + % (section, name, v)) + return b + + def configlist(self, section, name, default=None, untrusted=False): + """Return a list of comma/space separated strings""" + + def _parse_plain(parts, s, offset): + whitespace = False + while offset < len(s) and (s[offset].isspace() or s[offset] == ','): + whitespace = True + offset += 1 + if offset >= len(s): + return None, parts, offset + if whitespace: + parts.append('') + if s[offset] == '"' and not parts[-1]: + return _parse_quote, parts, offset + 1 + elif s[offset] == '"' and parts[-1][-1] == '\\': + parts[-1] = parts[-1][:-1] + s[offset] + return _parse_plain, parts, offset + 1 + parts[-1] += s[offset] + return _parse_plain, parts, offset + 1 + + def _parse_quote(parts, s, offset): + if offset < len(s) and s[offset] == '"': # "" + parts.append('') + offset += 1 + while offset < len(s) and (s[offset].isspace() or + s[offset] == ','): + offset += 1 + return _parse_plain, parts, offset + + while offset < len(s) and s[offset] != '"': + if (s[offset] == '\\' and offset + 1 < len(s) + and s[offset + 1] == '"'): + offset += 1 + parts[-1] += '"' + else: + parts[-1] += s[offset] + offset += 1 + + if offset >= len(s): + real_parts = _configlist(parts[-1]) + if not real_parts: + parts[-1] = '"' + else: + real_parts[0] = '"' + real_parts[0] + parts = parts[:-1] + parts.extend(real_parts) + return None, parts, offset + + offset += 1 + while offset < len(s) and s[offset] in [' ', ',']: + offset += 1 + + if offset < len(s): + if offset + 1 == len(s) and s[offset] == '"': + parts[-1] += '"' + offset += 1 + else: + parts.append('') + else: + return None, parts, offset + + return _parse_plain, parts, offset + + def _configlist(s): + s = s.rstrip(' ,') + if not s: + return [] + parser, parts, offset = _parse_plain, [''], 0 + while parser: + parser, parts, offset = parser(parts, s, offset) + return parts + + result = self.config(section, name, untrusted=untrusted) + if result is None: + result = default or [] + if isinstance(result, basestring): + result = _configlist(result.lstrip(' ,\n')) + if result is None: + result = default or [] + return result + + def has_section(self, section, untrusted=False): + '''tell whether section exists in config.''' + return section in self._data(untrusted) + + def configitems(self, section, untrusted=False): + items = self._data(untrusted).items(section) + if self.debugflag and not untrusted and self._reportuntrusted: + for k, v in self._ucfg.items(section): + if self._tcfg.get(section, k) != v: + self.debug(_("ignoring untrusted configuration option " + "%s.%s = %s\n") % (section, k, v)) + return items + + def walkconfig(self, untrusted=False): + cfg = self._data(untrusted) + for section in cfg.sections(): + for name, value in self.configitems(section, untrusted): + yield section, name, str(value).replace('\n', '\\n') + + def plain(self): + '''is plain mode active? + + Plain mode means that all configuration variables which affect the + behavior and output of Mercurial should be ignored. Additionally, the + output should be stable, reproducible and suitable for use in scripts or + applications. + + The only way to trigger plain mode is by setting the `HGPLAIN' + environment variable. + ''' + return 'HGPLAIN' in os.environ + + def username(self): + """Return default username to be used in commits. + + Searched in this order: $HGUSER, [ui] section of hgrcs, $EMAIL + and stop searching if one of these is set. + If not found and ui.askusername is True, ask the user, else use + ($LOGNAME or $USER or $LNAME or $USERNAME) + "@full.hostname". + """ + user = os.environ.get("HGUSER") + if user is None: + user = self.config("ui", "username") + if user is not None: + user = os.path.expandvars(user) + if user is None: + user = os.environ.get("EMAIL") + if user is None and self.configbool("ui", "askusername"): + user = self.prompt(_("enter a commit username:"), default=None) + if user is None and not self.interactive(): + try: + user = '%s@%s' % (util.getuser(), socket.getfqdn()) + self.warn(_("No username found, using '%s' instead\n") % user) + except KeyError: + pass + if not user: + raise util.Abort(_('no username supplied (see "hg help config")')) + if "\n" in user: + raise util.Abort(_("username %s contains a newline\n") % repr(user)) + return user + + def shortuser(self, user): + """Return a short representation of a user name or email address.""" + if not self.verbose: + user = util.shortuser(user) + return user + + def expandpath(self, loc, default=None): + """Return repository location relative to cwd or from [paths]""" + if "://" in loc or os.path.isdir(os.path.join(loc, '.hg')): + return loc + + path = self.config('paths', loc) + if not path and default is not None: + path = self.config('paths', default) + return path or loc + + def pushbuffer(self): + self._buffers.append([]) + + def popbuffer(self, labeled=False): + '''pop the last buffer and return the buffered output + + If labeled is True, any labels associated with buffered + output will be handled. By default, this has no effect + on the output returned, but extensions and GUI tools may + handle this argument and returned styled output. If output + is being buffered so it can be captured and parsed or + processed, labeled should not be set to True. + ''' + return "".join(self._buffers.pop()) + + def write(self, *args, **opts): + '''write args to output + + By default, this method simply writes to the buffer or stdout, + but extensions or GUI tools may override this method, + write_err(), popbuffer(), and label() to style output from + various parts of hg. + + An optional keyword argument, "label", can be passed in. + This should be a string containing label names separated by + space. Label names take the form of "topic.type". For example, + ui.debug() issues a label of "ui.debug". + + When labeling output for a specific command, a label of + "cmdname.type" is recommended. For example, status issues + a label of "status.modified" for modified files. + ''' + if self._buffers: + self._buffers[-1].extend([str(a) for a in args]) + else: + for a in args: + sys.stdout.write(str(a)) + + def write_err(self, *args, **opts): + try: + if not getattr(sys.stdout, 'closed', False): + sys.stdout.flush() + for a in args: + sys.stderr.write(str(a)) + # stderr may be buffered under win32 when redirected to files, + # including stdout. + if not getattr(sys.stderr, 'closed', False): + sys.stderr.flush() + except IOError, inst: + if inst.errno not in (errno.EPIPE, errno.EIO): + raise + + def flush(self): + try: sys.stdout.flush() + except: pass + try: sys.stderr.flush() + except: pass + + def interactive(self): + '''is interactive input allowed? + + An interactive session is a session where input can be reasonably read + from `sys.stdin'. If this function returns false, any attempt to read + from stdin should fail with an error, unless a sensible default has been + specified. + + Interactiveness is triggered by the value of the `ui.interactive' + configuration variable or - if it is unset - when `sys.stdin' points + to a terminal device. + + This function refers to input only; for output, see `ui.formatted()'. + ''' + i = self.configbool("ui", "interactive", None) + if i is None: + try: + return sys.stdin.isatty() + except AttributeError: + # some environments replace stdin without implementing isatty + # usually those are non-interactive + return False + + return i + + def termwidth(self): + '''how wide is the terminal in columns? + ''' + if 'COLUMNS' in os.environ: + try: + return int(os.environ['COLUMNS']) + except ValueError: + pass + return util.termwidth() + + def formatted(self): + '''should formatted output be used? + + It is often desirable to format the output to suite the output medium. + Examples of this are truncating long lines or colorizing messages. + However, this is not often not desirable when piping output into other + utilities, e.g. `grep'. + + Formatted output is triggered by the value of the `ui.formatted' + configuration variable or - if it is unset - when `sys.stdout' points + to a terminal device. Please note that `ui.formatted' should be + considered an implementation detail; it is not intended for use outside + Mercurial or its extensions. + + This function refers to output only; for input, see `ui.interactive()'. + This function always returns false when in plain mode, see `ui.plain()'. + ''' + if self.plain(): + return False + + i = self.configbool("ui", "formatted", None) + if i is None: + try: + return sys.stdout.isatty() + except AttributeError: + # some environments replace stdout without implementing isatty + # usually those are non-interactive + return False + + return i + + def _readline(self, prompt=''): + if sys.stdin.isatty(): + try: + # magically add command line editing support, where + # available + import readline + # force demandimport to really load the module + readline.read_history_file + # windows sometimes raises something other than ImportError + except Exception: + pass + line = raw_input(prompt) + # When stdin is in binary mode on Windows, it can cause + # raw_input() to emit an extra trailing carriage return + if os.linesep == '\r\n' and line and line[-1] == '\r': + line = line[:-1] + return line + + def prompt(self, msg, default="y"): + """Prompt user with msg, read response. + If ui is not interactive, the default is returned. + """ + if not self.interactive(): + self.write(msg, ' ', default, "\n") + return default + try: + r = self._readline(msg + ' ') + if not r: + return default + return r + except EOFError: + raise util.Abort(_('response expected')) + + def promptchoice(self, msg, choices, default=0): + """Prompt user with msg, read response, and ensure it matches + one of the provided choices. The index of the choice is returned. + choices is a sequence of acceptable responses with the format: + ('&None', 'E&xec', 'Sym&link') Responses are case insensitive. + If ui is not interactive, the default is returned. + """ + resps = [s[s.index('&')+1].lower() for s in choices] + while True: + r = self.prompt(msg, resps[default]) + if r.lower() in resps: + return resps.index(r.lower()) + self.write(_("unrecognized response\n")) + + def getpass(self, prompt=None, default=None): + if not self.interactive(): + return default + try: + return getpass.getpass(prompt or _('password: ')) + except EOFError: + raise util.Abort(_('response expected')) + def status(self, *msg, **opts): + '''write status message to output (if ui.quiet is False) + + This adds an output label of "ui.status". + ''' + if not self.quiet: + opts['label'] = opts.get('label', '') + ' ui.status' + self.write(*msg, **opts) + def warn(self, *msg, **opts): + '''write warning message to output (stderr) + + This adds an output label of "ui.warning". + ''' + opts['label'] = opts.get('label', '') + ' ui.warning' + self.write_err(*msg, **opts) + def note(self, *msg, **opts): + '''write note to output (if ui.verbose is True) + + This adds an output label of "ui.note". + ''' + if self.verbose: + opts['label'] = opts.get('label', '') + ' ui.note' + self.write(*msg, **opts) + def debug(self, *msg, **opts): + '''write debug message to output (if ui.debugflag is True) + + This adds an output label of "ui.debug". + ''' + if self.debugflag: + opts['label'] = opts.get('label', '') + ' ui.debug' + self.write(*msg, **opts) + def edit(self, text, user): + (fd, name) = tempfile.mkstemp(prefix="hg-editor-", suffix=".txt", + text=True) + try: + f = os.fdopen(fd, "w") + f.write(text) + f.close() + + editor = self.geteditor() + + util.system("%s \"%s\"" % (editor, name), + environ={'HGUSER': user}, + onerr=util.Abort, errprefix=_("edit failed")) + + f = open(name) + t = f.read() + f.close() + finally: + os.unlink(name) + + return t + + def traceback(self, exc=None): + '''print exception traceback if traceback printing enabled. + only to call in exception handler. returns true if traceback + printed.''' + if self.tracebackflag: + if exc: + traceback.print_exception(exc[0], exc[1], exc[2]) + else: + traceback.print_exc() + return self.tracebackflag + + def geteditor(self): + '''return editor to use''' + return (os.environ.get("HGEDITOR") or + self.config("ui", "editor") or + os.environ.get("VISUAL") or + os.environ.get("EDITOR", "vi")) + + def progress(self, topic, pos, item="", unit="", total=None): + '''show a progress message + + With stock hg, this is simply a debug message that is hidden + by default, but with extensions or GUI tools it may be + visible. 'topic' is the current operation, 'item' is a + non-numeric marker of the current position (ie the currently + in-process file), 'pos' is the current numeric position (ie + revision, bytes, etc.), unit is a corresponding unit label, + and total is the highest expected pos. + + Multiple nested topics may be active at a time. + + All topics should be marked closed by setting pos to None at + termination. + ''' + + if pos == None or not self.debugflag: + return + + if unit: + unit = ' ' + unit + if item: + item = ' ' + item + + if total: + pct = 100.0 * pos / total + self.debug('%s:%s %s/%s%s (%4.2f%%)\n' + % (topic, item, pos, total, unit, pct)) + else: + self.debug('%s:%s %s%s\n' % (topic, item, pos, unit)) + + def log(self, service, message): + '''hook for logging facility extensions + + service should be a readily-identifiable subsystem, which will + allow filtering. + message should be a newline-terminated string to log. + ''' + pass + + def label(self, msg, label): + '''style msg based on supplied label + + Like ui.write(), this just returns msg unchanged, but extensions + and GUI tools can override it to allow styling output without + writing it. + + ui.write(s, 'label') is equivalent to + ui.write(ui.label(s, 'label')). + ''' + return msg diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/ui.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/ui.pyo Binary files differnew file mode 100644 index 0000000..6129008 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/ui.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/url.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/url.py new file mode 100644 index 0000000..cd5f2dc --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/url.py @@ -0,0 +1,701 @@ +# url.py - HTTP handling for mercurial +# +# Copyright 2005, 2006, 2007, 2008 Matt Mackall <mpm@selenic.com> +# Copyright 2006, 2007 Alexis S. L. Carvalho <alexis@cecm.usp.br> +# Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com> +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +import urllib, urllib2, urlparse, httplib, os, re, socket, cStringIO +import __builtin__ +from i18n import _ +import keepalive, util + +def _urlunparse(scheme, netloc, path, params, query, fragment, url): + '''Handle cases where urlunparse(urlparse(x://)) doesn't preserve the "//"''' + result = urlparse.urlunparse((scheme, netloc, path, params, query, fragment)) + if (scheme and + result.startswith(scheme + ':') and + not result.startswith(scheme + '://') and + url.startswith(scheme + '://') + ): + result = scheme + '://' + result[len(scheme + ':'):] + return result + +def hidepassword(url): + '''hide user credential in a url string''' + scheme, netloc, path, params, query, fragment = urlparse.urlparse(url) + netloc = re.sub('([^:]*):([^@]*)@(.*)', r'\1:***@\3', netloc) + return _urlunparse(scheme, netloc, path, params, query, fragment, url) + +def removeauth(url): + '''remove all authentication information from a url string''' + scheme, netloc, path, params, query, fragment = urlparse.urlparse(url) + netloc = netloc[netloc.find('@')+1:] + return _urlunparse(scheme, netloc, path, params, query, fragment, url) + +def netlocsplit(netloc): + '''split [user[:passwd]@]host[:port] into 4-tuple.''' + + a = netloc.find('@') + if a == -1: + user, passwd = None, None + else: + userpass, netloc = netloc[:a], netloc[a + 1:] + c = userpass.find(':') + if c == -1: + user, passwd = urllib.unquote(userpass), None + else: + user = urllib.unquote(userpass[:c]) + passwd = urllib.unquote(userpass[c + 1:]) + c = netloc.find(':') + if c == -1: + host, port = netloc, None + else: + host, port = netloc[:c], netloc[c + 1:] + return host, port, user, passwd + +def netlocunsplit(host, port, user=None, passwd=None): + '''turn host, port, user, passwd into [user[:passwd]@]host[:port].''' + if port: + hostport = host + ':' + port + else: + hostport = host + if user: + quote = lambda s: urllib.quote(s, safe='') + if passwd: + userpass = quote(user) + ':' + quote(passwd) + else: + userpass = quote(user) + return userpass + '@' + hostport + return hostport + +_safe = ('abcdefghijklmnopqrstuvwxyz' + 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' + '0123456789' '_.-/') +_safeset = None +_hex = None +def quotepath(path): + '''quote the path part of a URL + + This is similar to urllib.quote, but it also tries to avoid + quoting things twice (inspired by wget): + + >>> quotepath('abc def') + 'abc%20def' + >>> quotepath('abc%20def') + 'abc%20def' + >>> quotepath('abc%20 def') + 'abc%20%20def' + >>> quotepath('abc def%20') + 'abc%20def%20' + >>> quotepath('abc def%2') + 'abc%20def%252' + >>> quotepath('abc def%') + 'abc%20def%25' + ''' + global _safeset, _hex + if _safeset is None: + _safeset = set(_safe) + _hex = set('abcdefABCDEF0123456789') + l = list(path) + for i in xrange(len(l)): + c = l[i] + if (c == '%' and i + 2 < len(l) and + l[i + 1] in _hex and l[i + 2] in _hex): + pass + elif c not in _safeset: + l[i] = '%%%02X' % ord(c) + return ''.join(l) + +class passwordmgr(urllib2.HTTPPasswordMgrWithDefaultRealm): + def __init__(self, ui): + urllib2.HTTPPasswordMgrWithDefaultRealm.__init__(self) + self.ui = ui + + def find_user_password(self, realm, authuri): + authinfo = urllib2.HTTPPasswordMgrWithDefaultRealm.find_user_password( + self, realm, authuri) + user, passwd = authinfo + if user and passwd: + self._writedebug(user, passwd) + return (user, passwd) + + if not user: + auth = self.readauthtoken(authuri) + if auth: + user, passwd = auth.get('username'), auth.get('password') + if not user or not passwd: + if not self.ui.interactive(): + raise util.Abort(_('http authorization required')) + + self.ui.write(_("http authorization required\n")) + self.ui.write(_("realm: %s\n") % realm) + if user: + self.ui.write(_("user: %s\n") % user) + else: + user = self.ui.prompt(_("user:"), default=None) + + if not passwd: + passwd = self.ui.getpass() + + self.add_password(realm, authuri, user, passwd) + self._writedebug(user, passwd) + return (user, passwd) + + def _writedebug(self, user, passwd): + msg = _('http auth: user %s, password %s\n') + self.ui.debug(msg % (user, passwd and '*' * len(passwd) or 'not set')) + + def readauthtoken(self, uri): + # Read configuration + config = dict() + for key, val in self.ui.configitems('auth'): + if '.' not in key: + self.ui.warn(_("ignoring invalid [auth] key '%s'\n") % key) + continue + group, setting = key.split('.', 1) + gdict = config.setdefault(group, dict()) + if setting in ('username', 'cert', 'key'): + val = util.expandpath(val) + gdict[setting] = val + + # Find the best match + scheme, hostpath = uri.split('://', 1) + bestlen = 0 + bestauth = None + for auth in config.itervalues(): + prefix = auth.get('prefix') + if not prefix: + continue + p = prefix.split('://', 1) + if len(p) > 1: + schemes, prefix = [p[0]], p[1] + else: + schemes = (auth.get('schemes') or 'https').split() + if (prefix == '*' or hostpath.startswith(prefix)) and \ + len(prefix) > bestlen and scheme in schemes: + bestlen = len(prefix) + bestauth = auth + return bestauth + +class proxyhandler(urllib2.ProxyHandler): + def __init__(self, ui): + proxyurl = ui.config("http_proxy", "host") or os.getenv('http_proxy') + # XXX proxyauthinfo = None + + if proxyurl: + # proxy can be proper url or host[:port] + if not (proxyurl.startswith('http:') or + proxyurl.startswith('https:')): + proxyurl = 'http://' + proxyurl + '/' + snpqf = urlparse.urlsplit(proxyurl) + proxyscheme, proxynetloc, proxypath, proxyquery, proxyfrag = snpqf + hpup = netlocsplit(proxynetloc) + + proxyhost, proxyport, proxyuser, proxypasswd = hpup + if not proxyuser: + proxyuser = ui.config("http_proxy", "user") + proxypasswd = ui.config("http_proxy", "passwd") + + # see if we should use a proxy for this url + no_list = ["localhost", "127.0.0.1"] + no_list.extend([p.lower() for + p in ui.configlist("http_proxy", "no")]) + no_list.extend([p.strip().lower() for + p in os.getenv("no_proxy", '').split(',') + if p.strip()]) + # "http_proxy.always" config is for running tests on localhost + if ui.configbool("http_proxy", "always"): + self.no_list = [] + else: + self.no_list = no_list + + proxyurl = urlparse.urlunsplit(( + proxyscheme, netlocunsplit(proxyhost, proxyport, + proxyuser, proxypasswd or ''), + proxypath, proxyquery, proxyfrag)) + proxies = {'http': proxyurl, 'https': proxyurl} + ui.debug('proxying through http://%s:%s\n' % + (proxyhost, proxyport)) + else: + proxies = {} + + # urllib2 takes proxy values from the environment and those + # will take precedence if found, so drop them + for env in ["HTTP_PROXY", "http_proxy", "no_proxy"]: + try: + if env in os.environ: + del os.environ[env] + except OSError: + pass + + urllib2.ProxyHandler.__init__(self, proxies) + self.ui = ui + + def proxy_open(self, req, proxy, type_): + host = req.get_host().split(':')[0] + if host in self.no_list: + return None + + # work around a bug in Python < 2.4.2 + # (it leaves a "\n" at the end of Proxy-authorization headers) + baseclass = req.__class__ + class _request(baseclass): + def add_header(self, key, val): + if key.lower() == 'proxy-authorization': + val = val.strip() + return baseclass.add_header(self, key, val) + req.__class__ = _request + + return urllib2.ProxyHandler.proxy_open(self, req, proxy, type_) + +class httpsendfile(object): + """This is a wrapper around the objects returned by python's "open". + + Its purpose is to send file-like objects via HTTP and, to do so, it + defines a __len__ attribute to feed the Content-Length header. + """ + + def __init__(self, *args, **kwargs): + # We can't just "self._data = open(*args, **kwargs)" here because there + # is an "open" function defined in this module that shadows the global + # one + self._data = __builtin__.open(*args, **kwargs) + self.read = self._data.read + self.seek = self._data.seek + self.close = self._data.close + self.write = self._data.write + + def __len__(self): + return os.fstat(self._data.fileno()).st_size + +def _gen_sendfile(connection): + def _sendfile(self, data): + # send a file + if isinstance(data, httpsendfile): + # if auth required, some data sent twice, so rewind here + data.seek(0) + for chunk in util.filechunkiter(data): + connection.send(self, chunk) + else: + connection.send(self, data) + return _sendfile + +has_https = hasattr(urllib2, 'HTTPSHandler') +if has_https: + try: + # avoid using deprecated/broken FakeSocket in python 2.6 + import ssl + _ssl_wrap_socket = ssl.wrap_socket + CERT_REQUIRED = ssl.CERT_REQUIRED + except ImportError: + CERT_REQUIRED = 2 + + def _ssl_wrap_socket(sock, key_file, cert_file, + cert_reqs=CERT_REQUIRED, ca_certs=None): + if ca_certs: + raise util.Abort(_( + 'certificate checking requires Python 2.6')) + + ssl = socket.ssl(sock, key_file, cert_file) + return httplib.FakeSocket(sock, ssl) + + try: + _create_connection = socket.create_connection + except AttributeError: + _GLOBAL_DEFAULT_TIMEOUT = object() + + def _create_connection(address, timeout=_GLOBAL_DEFAULT_TIMEOUT, + source_address=None): + # lifted from Python 2.6 + + msg = "getaddrinfo returns an empty list" + host, port = address + for res in socket.getaddrinfo(host, port, 0, socket.SOCK_STREAM): + af, socktype, proto, canonname, sa = res + sock = None + try: + sock = socket.socket(af, socktype, proto) + if timeout is not _GLOBAL_DEFAULT_TIMEOUT: + sock.settimeout(timeout) + if source_address: + sock.bind(source_address) + sock.connect(sa) + return sock + + except socket.error, msg: + if sock is not None: + sock.close() + + raise socket.error, msg + +class httpconnection(keepalive.HTTPConnection): + # must be able to send big bundle as stream. + send = _gen_sendfile(keepalive.HTTPConnection) + + def connect(self): + if has_https and self.realhostport: # use CONNECT proxy + self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + self.sock.connect((self.host, self.port)) + if _generic_proxytunnel(self): + # we do not support client x509 certificates + self.sock = _ssl_wrap_socket(self.sock, None, None) + else: + keepalive.HTTPConnection.connect(self) + + def getresponse(self): + proxyres = getattr(self, 'proxyres', None) + if proxyres: + if proxyres.will_close: + self.close() + self.proxyres = None + return proxyres + return keepalive.HTTPConnection.getresponse(self) + +# general transaction handler to support different ways to handle +# HTTPS proxying before and after Python 2.6.3. +def _generic_start_transaction(handler, h, req): + if hasattr(req, '_tunnel_host') and req._tunnel_host: + tunnel_host = req._tunnel_host + if tunnel_host[:7] not in ['http://', 'https:/']: + tunnel_host = 'https://' + tunnel_host + new_tunnel = True + else: + tunnel_host = req.get_selector() + new_tunnel = False + + if new_tunnel or tunnel_host == req.get_full_url(): # has proxy + urlparts = urlparse.urlparse(tunnel_host) + if new_tunnel or urlparts[0] == 'https': # only use CONNECT for HTTPS + realhostport = urlparts[1] + if realhostport[-1] == ']' or ':' not in realhostport: + realhostport += ':443' + + h.realhostport = realhostport + h.headers = req.headers.copy() + h.headers.update(handler.parent.addheaders) + return + + h.realhostport = None + h.headers = None + +def _generic_proxytunnel(self): + proxyheaders = dict( + [(x, self.headers[x]) for x in self.headers + if x.lower().startswith('proxy-')]) + self._set_hostport(self.host, self.port) + self.send('CONNECT %s HTTP/1.0\r\n' % self.realhostport) + for header in proxyheaders.iteritems(): + self.send('%s: %s\r\n' % header) + self.send('\r\n') + + # majority of the following code is duplicated from + # httplib.HTTPConnection as there are no adequate places to + # override functions to provide the needed functionality + res = self.response_class(self.sock, + strict=self.strict, + method=self._method) + + while True: + version, status, reason = res._read_status() + if status != httplib.CONTINUE: + break + while True: + skip = res.fp.readline().strip() + if not skip: + break + res.status = status + res.reason = reason.strip() + + if res.status == 200: + while True: + line = res.fp.readline() + if line == '\r\n': + break + return True + + if version == 'HTTP/1.0': + res.version = 10 + elif version.startswith('HTTP/1.'): + res.version = 11 + elif version == 'HTTP/0.9': + res.version = 9 + else: + raise httplib.UnknownProtocol(version) + + if res.version == 9: + res.length = None + res.chunked = 0 + res.will_close = 1 + res.msg = httplib.HTTPMessage(cStringIO.StringIO()) + return False + + res.msg = httplib.HTTPMessage(res.fp) + res.msg.fp = None + + # are we using the chunked-style of transfer encoding? + trenc = res.msg.getheader('transfer-encoding') + if trenc and trenc.lower() == "chunked": + res.chunked = 1 + res.chunk_left = None + else: + res.chunked = 0 + + # will the connection close at the end of the response? + res.will_close = res._check_close() + + # do we have a Content-Length? + # NOTE: RFC 2616, S4.4, #3 says we ignore this if tr_enc is "chunked" + length = res.msg.getheader('content-length') + if length and not res.chunked: + try: + res.length = int(length) + except ValueError: + res.length = None + else: + if res.length < 0: # ignore nonsensical negative lengths + res.length = None + else: + res.length = None + + # does the body have a fixed length? (of zero) + if (status == httplib.NO_CONTENT or status == httplib.NOT_MODIFIED or + 100 <= status < 200 or # 1xx codes + res._method == 'HEAD'): + res.length = 0 + + # if the connection remains open, and we aren't using chunked, and + # a content-length was not provided, then assume that the connection + # WILL close. + if (not res.will_close and + not res.chunked and + res.length is None): + res.will_close = 1 + + self.proxyres = res + + return False + +class httphandler(keepalive.HTTPHandler): + def http_open(self, req): + return self.do_open(httpconnection, req) + + def _start_transaction(self, h, req): + _generic_start_transaction(self, h, req) + return keepalive.HTTPHandler._start_transaction(self, h, req) + +def _verifycert(cert, hostname): + '''Verify that cert (in socket.getpeercert() format) matches hostname. + CRLs and subjectAltName are not handled. + + Returns error message if any problems are found and None on success. + ''' + if not cert: + return _('no certificate received') + dnsname = hostname.lower() + for s in cert.get('subject', []): + key, value = s[0] + if key == 'commonName': + certname = value.lower() + if (certname == dnsname or + '.' in dnsname and certname == '*.' + dnsname.split('.', 1)[1]): + return None + return _('certificate is for %s') % certname + return _('no commonName found in certificate') + +if has_https: + class BetterHTTPS(httplib.HTTPSConnection): + send = keepalive.safesend + + def connect(self): + if hasattr(self, 'ui'): + cacerts = self.ui.config('web', 'cacerts') + else: + cacerts = None + + if cacerts: + sock = _create_connection((self.host, self.port)) + self.sock = _ssl_wrap_socket(sock, self.key_file, + self.cert_file, cert_reqs=CERT_REQUIRED, + ca_certs=cacerts) + msg = _verifycert(self.sock.getpeercert(), self.host) + if msg: + raise util.Abort(_('%s certificate error: %s') % + (self.host, msg)) + self.ui.debug('%s certificate successfully verified\n' % + self.host) + else: + self.ui.warn(_("warning: %s certificate not verified " + "(check web.cacerts config setting)\n") % + self.host) + httplib.HTTPSConnection.connect(self) + + class httpsconnection(BetterHTTPS): + response_class = keepalive.HTTPResponse + # must be able to send big bundle as stream. + send = _gen_sendfile(BetterHTTPS) + getresponse = keepalive.wrapgetresponse(httplib.HTTPSConnection) + + def connect(self): + if self.realhostport: # use CONNECT proxy + self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + self.sock.connect((self.host, self.port)) + if _generic_proxytunnel(self): + self.sock = _ssl_wrap_socket(self.sock, self.key_file, + self.cert_file) + else: + BetterHTTPS.connect(self) + + class httpshandler(keepalive.KeepAliveHandler, urllib2.HTTPSHandler): + def __init__(self, ui): + keepalive.KeepAliveHandler.__init__(self) + urllib2.HTTPSHandler.__init__(self) + self.ui = ui + self.pwmgr = passwordmgr(self.ui) + + def _start_transaction(self, h, req): + _generic_start_transaction(self, h, req) + return keepalive.KeepAliveHandler._start_transaction(self, h, req) + + def https_open(self, req): + self.auth = self.pwmgr.readauthtoken(req.get_full_url()) + return self.do_open(self._makeconnection, req) + + def _makeconnection(self, host, port=None, *args, **kwargs): + keyfile = None + certfile = None + + if len(args) >= 1: # key_file + keyfile = args[0] + if len(args) >= 2: # cert_file + certfile = args[1] + args = args[2:] + + # if the user has specified different key/cert files in + # hgrc, we prefer these + if self.auth and 'key' in self.auth and 'cert' in self.auth: + keyfile = self.auth['key'] + certfile = self.auth['cert'] + + conn = httpsconnection(host, port, keyfile, certfile, *args, **kwargs) + conn.ui = self.ui + return conn + +class httpdigestauthhandler(urllib2.HTTPDigestAuthHandler): + def __init__(self, *args, **kwargs): + urllib2.HTTPDigestAuthHandler.__init__(self, *args, **kwargs) + self.retried_req = None + + def reset_retry_count(self): + # Python 2.6.5 will call this on 401 or 407 errors and thus loop + # forever. We disable reset_retry_count completely and reset in + # http_error_auth_reqed instead. + pass + + def http_error_auth_reqed(self, auth_header, host, req, headers): + # Reset the retry counter once for each request. + if req is not self.retried_req: + self.retried_req = req + self.retried = 0 + # In python < 2.5 AbstractDigestAuthHandler raises a ValueError if + # it doesn't know about the auth type requested. This can happen if + # somebody is using BasicAuth and types a bad password. + try: + return urllib2.HTTPDigestAuthHandler.http_error_auth_reqed( + self, auth_header, host, req, headers) + except ValueError, inst: + arg = inst.args[0] + if arg.startswith("AbstractDigestAuthHandler doesn't know "): + return + raise + +class httpbasicauthhandler(urllib2.HTTPBasicAuthHandler): + def __init__(self, *args, **kwargs): + urllib2.HTTPBasicAuthHandler.__init__(self, *args, **kwargs) + self.retried_req = None + + def reset_retry_count(self): + # Python 2.6.5 will call this on 401 or 407 errors and thus loop + # forever. We disable reset_retry_count completely and reset in + # http_error_auth_reqed instead. + pass + + def http_error_auth_reqed(self, auth_header, host, req, headers): + # Reset the retry counter once for each request. + if req is not self.retried_req: + self.retried_req = req + self.retried = 0 + return urllib2.HTTPBasicAuthHandler.http_error_auth_reqed( + self, auth_header, host, req, headers) + +def getauthinfo(path): + scheme, netloc, urlpath, query, frag = urlparse.urlsplit(path) + if not urlpath: + urlpath = '/' + if scheme != 'file': + # XXX: why are we quoting the path again with some smart + # heuristic here? Anyway, it cannot be done with file:// + # urls since path encoding is os/fs dependent (see + # urllib.pathname2url() for details). + urlpath = quotepath(urlpath) + host, port, user, passwd = netlocsplit(netloc) + + # urllib cannot handle URLs with embedded user or passwd + url = urlparse.urlunsplit((scheme, netlocunsplit(host, port), + urlpath, query, frag)) + if user: + netloc = host + if port: + netloc += ':' + port + # Python < 2.4.3 uses only the netloc to search for a password + authinfo = (None, (url, netloc), user, passwd or '') + else: + authinfo = None + return url, authinfo + +handlerfuncs = [] + +def opener(ui, authinfo=None): + ''' + construct an opener suitable for urllib2 + authinfo will be added to the password manager + ''' + handlers = [httphandler()] + if has_https: + handlers.append(httpshandler(ui)) + + handlers.append(proxyhandler(ui)) + + passmgr = passwordmgr(ui) + if authinfo is not None: + passmgr.add_password(*authinfo) + user, passwd = authinfo[2:4] + ui.debug('http auth: user %s, password %s\n' % + (user, passwd and '*' * len(passwd) or 'not set')) + + handlers.extend((httpbasicauthhandler(passmgr), + httpdigestauthhandler(passmgr))) + handlers.extend([h(ui, passmgr) for h in handlerfuncs]) + opener = urllib2.build_opener(*handlers) + + # 1.0 here is the _protocol_ version + opener.addheaders = [('User-agent', 'mercurial/proto-1.0')] + opener.addheaders.append(('Accept', 'application/mercurial-0.1')) + return opener + +scheme_re = re.compile(r'^([a-zA-Z0-9+-.]+)://') + +def open(ui, url, data=None): + scheme = None + m = scheme_re.search(url) + if m: + scheme = m.group(1).lower() + if not scheme: + path = util.normpath(os.path.abspath(url)) + url = 'file://' + urllib.pathname2url(path) + authinfo = None + else: + url, authinfo = getauthinfo(url) + return opener(ui, authinfo).open(url, data) diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/url.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/url.pyo Binary files differnew file mode 100644 index 0000000..c6e73dd --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/url.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/util.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/util.py new file mode 100644 index 0000000..d435108 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/util.py @@ -0,0 +1,1538 @@ +# util.py - Mercurial utility functions and platform specfic implementations +# +# Copyright 2005 K. Thananchayan <thananck@yahoo.com> +# Copyright 2005-2007 Matt Mackall <mpm@selenic.com> +# Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com> +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +"""Mercurial utility functions and platform specfic implementations. + +This contains helper routines that are independent of the SCM core and +hide platform-specific details from the core. +""" + +from i18n import _ +import error, osutil, encoding +import errno, re, shutil, sys, tempfile, traceback +import os, stat, time, calendar, textwrap, unicodedata, signal +import imp, socket + +# Python compatibility + +def sha1(s): + return _fastsha1(s) + +def _fastsha1(s): + # This function will import sha1 from hashlib or sha (whichever is + # available) and overwrite itself with it on the first call. + # Subsequent calls will go directly to the imported function. + if sys.version_info >= (2, 5): + from hashlib import sha1 as _sha1 + else: + from sha import sha as _sha1 + global _fastsha1, sha1 + _fastsha1 = sha1 = _sha1 + return _sha1(s) + +import __builtin__ + +if sys.version_info[0] < 3: + def fakebuffer(sliceable, offset=0): + return sliceable[offset:] +else: + def fakebuffer(sliceable, offset=0): + return memoryview(sliceable)[offset:] +try: + buffer +except NameError: + __builtin__.buffer = fakebuffer + +import subprocess +closefds = os.name == 'posix' + +def popen2(cmd, env=None, newlines=False): + # Setting bufsize to -1 lets the system decide the buffer size. + # The default for bufsize is 0, meaning unbuffered. This leads to + # poor performance on Mac OS X: http://bugs.python.org/issue4194 + p = subprocess.Popen(cmd, shell=True, bufsize=-1, + close_fds=closefds, + stdin=subprocess.PIPE, stdout=subprocess.PIPE, + universal_newlines=newlines, + env=env) + return p.stdin, p.stdout + +def popen3(cmd, env=None, newlines=False): + p = subprocess.Popen(cmd, shell=True, bufsize=-1, + close_fds=closefds, + stdin=subprocess.PIPE, stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + universal_newlines=newlines, + env=env) + return p.stdin, p.stdout, p.stderr + +def version(): + """Return version information if available.""" + try: + import __version__ + return __version__.version + except ImportError: + return 'unknown' + +# used by parsedate +defaultdateformats = ( + '%Y-%m-%d %H:%M:%S', + '%Y-%m-%d %I:%M:%S%p', + '%Y-%m-%d %H:%M', + '%Y-%m-%d %I:%M%p', + '%Y-%m-%d', + '%m-%d', + '%m/%d', + '%m/%d/%y', + '%m/%d/%Y', + '%a %b %d %H:%M:%S %Y', + '%a %b %d %I:%M:%S%p %Y', + '%a, %d %b %Y %H:%M:%S', # GNU coreutils "/bin/date --rfc-2822" + '%b %d %H:%M:%S %Y', + '%b %d %I:%M:%S%p %Y', + '%b %d %H:%M:%S', + '%b %d %I:%M:%S%p', + '%b %d %H:%M', + '%b %d %I:%M%p', + '%b %d %Y', + '%b %d', + '%H:%M:%S', + '%I:%M:%S%p', + '%H:%M', + '%I:%M%p', +) + +extendeddateformats = defaultdateformats + ( + "%Y", + "%Y-%m", + "%b", + "%b %Y", + ) + +def cachefunc(func): + '''cache the result of function calls''' + # XXX doesn't handle keywords args + cache = {} + if func.func_code.co_argcount == 1: + # we gain a small amount of time because + # we don't need to pack/unpack the list + def f(arg): + if arg not in cache: + cache[arg] = func(arg) + return cache[arg] + else: + def f(*args): + if args not in cache: + cache[args] = func(*args) + return cache[args] + + return f + +def lrucachefunc(func): + '''cache most recent results of function calls''' + cache = {} + order = [] + if func.func_code.co_argcount == 1: + def f(arg): + if arg not in cache: + if len(cache) > 20: + del cache[order.pop(0)] + cache[arg] = func(arg) + else: + order.remove(arg) + order.append(arg) + return cache[arg] + else: + def f(*args): + if args not in cache: + if len(cache) > 20: + del cache[order.pop(0)] + cache[args] = func(*args) + else: + order.remove(args) + order.append(args) + return cache[args] + + return f + +class propertycache(object): + def __init__(self, func): + self.func = func + self.name = func.__name__ + def __get__(self, obj, type=None): + result = self.func(obj) + setattr(obj, self.name, result) + return result + +def pipefilter(s, cmd): + '''filter string S through command CMD, returning its output''' + p = subprocess.Popen(cmd, shell=True, close_fds=closefds, + stdin=subprocess.PIPE, stdout=subprocess.PIPE) + pout, perr = p.communicate(s) + return pout + +def tempfilter(s, cmd): + '''filter string S through a pair of temporary files with CMD. + CMD is used as a template to create the real command to be run, + with the strings INFILE and OUTFILE replaced by the real names of + the temporary files generated.''' + inname, outname = None, None + try: + infd, inname = tempfile.mkstemp(prefix='hg-filter-in-') + fp = os.fdopen(infd, 'wb') + fp.write(s) + fp.close() + outfd, outname = tempfile.mkstemp(prefix='hg-filter-out-') + os.close(outfd) + cmd = cmd.replace('INFILE', inname) + cmd = cmd.replace('OUTFILE', outname) + code = os.system(cmd) + if sys.platform == 'OpenVMS' and code & 1: + code = 0 + if code: + raise Abort(_("command '%s' failed: %s") % + (cmd, explain_exit(code))) + return open(outname, 'rb').read() + finally: + try: + if inname: + os.unlink(inname) + except: + pass + try: + if outname: + os.unlink(outname) + except: + pass + +filtertable = { + 'tempfile:': tempfilter, + 'pipe:': pipefilter, + } + +def filter(s, cmd): + "filter a string through a command that transforms its input to its output" + for name, fn in filtertable.iteritems(): + if cmd.startswith(name): + return fn(s, cmd[len(name):].lstrip()) + return pipefilter(s, cmd) + +def binary(s): + """return true if a string is binary data""" + return bool(s and '\0' in s) + +def increasingchunks(source, min=1024, max=65536): + '''return no less than min bytes per chunk while data remains, + doubling min after each chunk until it reaches max''' + def log2(x): + if not x: + return 0 + i = 0 + while x: + x >>= 1 + i += 1 + return i - 1 + + buf = [] + blen = 0 + for chunk in source: + buf.append(chunk) + blen += len(chunk) + if blen >= min: + if min < max: + min = min << 1 + nmin = 1 << log2(blen) + if nmin > min: + min = nmin + if min > max: + min = max + yield ''.join(buf) + blen = 0 + buf = [] + if buf: + yield ''.join(buf) + +Abort = error.Abort + +def always(fn): + return True + +def never(fn): + return False + +def pathto(root, n1, n2): + '''return the relative path from one place to another. + root should use os.sep to separate directories + n1 should use os.sep to separate directories + n2 should use "/" to separate directories + returns an os.sep-separated path. + + If n1 is a relative path, it's assumed it's + relative to root. + n2 should always be relative to root. + ''' + if not n1: + return localpath(n2) + if os.path.isabs(n1): + if os.path.splitdrive(root)[0] != os.path.splitdrive(n1)[0]: + return os.path.join(root, localpath(n2)) + n2 = '/'.join((pconvert(root), n2)) + a, b = splitpath(n1), n2.split('/') + a.reverse() + b.reverse() + while a and b and a[-1] == b[-1]: + a.pop() + b.pop() + b.reverse() + return os.sep.join((['..'] * len(a)) + b) or '.' + +def canonpath(root, cwd, myname, auditor=None): + """return the canonical path of myname, given cwd and root""" + if endswithsep(root): + rootsep = root + else: + rootsep = root + os.sep + name = myname + if not os.path.isabs(name): + name = os.path.join(root, cwd, name) + name = os.path.normpath(name) + if auditor is None: + auditor = path_auditor(root) + if name != rootsep and name.startswith(rootsep): + name = name[len(rootsep):] + auditor(name) + return pconvert(name) + elif name == root: + return '' + else: + # Determine whether `name' is in the hierarchy at or beneath `root', + # by iterating name=dirname(name) until that causes no change (can't + # check name == '/', because that doesn't work on windows). For each + # `name', compare dev/inode numbers. If they match, the list `rel' + # holds the reversed list of components making up the relative file + # name we want. + root_st = os.stat(root) + rel = [] + while True: + try: + name_st = os.stat(name) + except OSError: + break + if samestat(name_st, root_st): + if not rel: + # name was actually the same as root (maybe a symlink) + return '' + rel.reverse() + name = os.path.join(*rel) + auditor(name) + return pconvert(name) + dirname, basename = os.path.split(name) + rel.append(basename) + if dirname == name: + break + name = dirname + + raise Abort('%s not under root' % myname) + +_hgexecutable = None + +def main_is_frozen(): + """return True if we are a frozen executable. + + The code supports py2exe (most common, Windows only) and tools/freeze + (portable, not much used). + """ + return (hasattr(sys, "frozen") or # new py2exe + hasattr(sys, "importers") or # old py2exe + imp.is_frozen("__main__")) # tools/freeze + +def hgexecutable(): + """return location of the 'hg' executable. + + Defaults to $HG or 'hg' in the search path. + """ + if _hgexecutable is None: + hg = os.environ.get('HG') + if hg: + set_hgexecutable(hg) + elif main_is_frozen(): + set_hgexecutable(sys.executable) + else: + exe = find_exe('hg') or os.path.basename(sys.argv[0]) + set_hgexecutable(exe) + return _hgexecutable + +def set_hgexecutable(path): + """set location of the 'hg' executable""" + global _hgexecutable + _hgexecutable = path + +def system(cmd, environ={}, cwd=None, onerr=None, errprefix=None, out=None): + '''enhanced shell command execution. + run with environment maybe modified, maybe in different dir. + + if command fails and onerr is None, return status. if ui object, + print error message and return status, else raise onerr object as + exception. + + if out is specified, it is assumed to be a file-like object that has a + write() method. stdout and stderr will be redirected to out.''' + def py2shell(val): + 'convert python object into string that is useful to shell' + if val is None or val is False: + return '0' + if val is True: + return '1' + return str(val) + origcmd = cmd + cmd = quotecommand(cmd) + env = dict(os.environ) + env.update((k, py2shell(v)) for k, v in environ.iteritems()) + env['HG'] = hgexecutable() + if out is None: + rc = subprocess.call(cmd, shell=True, close_fds=closefds, + env=env, cwd=cwd) + else: + proc = subprocess.Popen(cmd, shell=True, close_fds=closefds, + env=env, cwd=cwd, stdout=subprocess.PIPE, + stderr=subprocess.STDOUT) + for line in proc.stdout: + out.write(line) + proc.wait() + rc = proc.returncode + if sys.platform == 'OpenVMS' and rc & 1: + rc = 0 + if rc and onerr: + errmsg = '%s %s' % (os.path.basename(origcmd.split(None, 1)[0]), + explain_exit(rc)[0]) + if errprefix: + errmsg = '%s: %s' % (errprefix, errmsg) + try: + onerr.warn(errmsg + '\n') + except AttributeError: + raise onerr(errmsg) + return rc + +def checksignature(func): + '''wrap a function with code to check for calling errors''' + def check(*args, **kwargs): + try: + return func(*args, **kwargs) + except TypeError: + if len(traceback.extract_tb(sys.exc_info()[2])) == 1: + raise error.SignatureError + raise + + return check + +def unlink(f): + """unlink and remove the directory if it is empty""" + os.unlink(f) + # try removing directories that might now be empty + try: + os.removedirs(os.path.dirname(f)) + except OSError: + pass + +def copyfile(src, dest): + "copy a file, preserving mode and atime/mtime" + if os.path.islink(src): + try: + os.unlink(dest) + except: + pass + os.symlink(os.readlink(src), dest) + else: + try: + shutil.copyfile(src, dest) + shutil.copystat(src, dest) + except shutil.Error, inst: + raise Abort(str(inst)) + +def copyfiles(src, dst, hardlink=None): + """Copy a directory tree using hardlinks if possible""" + + if hardlink is None: + hardlink = (os.stat(src).st_dev == + os.stat(os.path.dirname(dst)).st_dev) + + num = 0 + if os.path.isdir(src): + os.mkdir(dst) + for name, kind in osutil.listdir(src): + srcname = os.path.join(src, name) + dstname = os.path.join(dst, name) + hardlink, n = copyfiles(srcname, dstname, hardlink) + num += n + else: + if hardlink: + try: + os_link(src, dst) + except (IOError, OSError): + hardlink = False + shutil.copy(src, dst) + else: + shutil.copy(src, dst) + num += 1 + + return hardlink, num + +class path_auditor(object): + '''ensure that a filesystem path contains no banned components. + the following properties of a path are checked: + + - under top-level .hg + - starts at the root of a windows drive + - contains ".." + - traverses a symlink (e.g. a/symlink_here/b) + - inside a nested repository (a callback can be used to approve + some nested repositories, e.g., subrepositories) + ''' + + def __init__(self, root, callback=None): + self.audited = set() + self.auditeddir = set() + self.root = root + self.callback = callback + + def __call__(self, path): + if path in self.audited: + return + normpath = os.path.normcase(path) + parts = splitpath(normpath) + if (os.path.splitdrive(path)[0] + or parts[0].lower() in ('.hg', '.hg.', '') + or os.pardir in parts): + raise Abort(_("path contains illegal component: %s") % path) + if '.hg' in path.lower(): + lparts = [p.lower() for p in parts] + for p in '.hg', '.hg.': + if p in lparts[1:]: + pos = lparts.index(p) + base = os.path.join(*parts[:pos]) + raise Abort(_('path %r is inside repo %r') % (path, base)) + def check(prefix): + curpath = os.path.join(self.root, prefix) + try: + st = os.lstat(curpath) + except OSError, err: + # EINVAL can be raised as invalid path syntax under win32. + # They must be ignored for patterns can be checked too. + if err.errno not in (errno.ENOENT, errno.ENOTDIR, errno.EINVAL): + raise + else: + if stat.S_ISLNK(st.st_mode): + raise Abort(_('path %r traverses symbolic link %r') % + (path, prefix)) + elif (stat.S_ISDIR(st.st_mode) and + os.path.isdir(os.path.join(curpath, '.hg'))): + if not self.callback or not self.callback(curpath): + raise Abort(_('path %r is inside repo %r') % + (path, prefix)) + parts.pop() + prefixes = [] + while parts: + prefix = os.sep.join(parts) + if prefix in self.auditeddir: + break + check(prefix) + prefixes.append(prefix) + parts.pop() + + self.audited.add(path) + # only add prefixes to the cache after checking everything: we don't + # want to add "foo/bar/baz" before checking if there's a "foo/.hg" + self.auditeddir.update(prefixes) + +def nlinks(pathname): + """Return number of hardlinks for the given file.""" + return os.lstat(pathname).st_nlink + +if hasattr(os, 'link'): + os_link = os.link +else: + def os_link(src, dst): + raise OSError(0, _("Hardlinks not supported")) + +def lookup_reg(key, name=None, scope=None): + return None + +def hidewindow(): + """Hide current shell window. + + Used to hide the window opened when starting asynchronous + child process under Windows, unneeded on other systems. + """ + pass + +if os.name == 'nt': + from windows import * +else: + from posix import * + +def makelock(info, pathname): + try: + return os.symlink(info, pathname) + except OSError, why: + if why.errno == errno.EEXIST: + raise + except AttributeError: # no symlink in os + pass + + ld = os.open(pathname, os.O_CREAT | os.O_WRONLY | os.O_EXCL) + os.write(ld, info) + os.close(ld) + +def readlock(pathname): + try: + return os.readlink(pathname) + except OSError, why: + if why.errno not in (errno.EINVAL, errno.ENOSYS): + raise + except AttributeError: # no symlink in os + pass + return posixfile(pathname).read() + +def fstat(fp): + '''stat file object that may not have fileno method.''' + try: + return os.fstat(fp.fileno()) + except AttributeError: + return os.stat(fp.name) + +# File system features + +def checkcase(path): + """ + Check whether the given path is on a case-sensitive filesystem + + Requires a path (like /foo/.hg) ending with a foldable final + directory component. + """ + s1 = os.stat(path) + d, b = os.path.split(path) + p2 = os.path.join(d, b.upper()) + if path == p2: + p2 = os.path.join(d, b.lower()) + try: + s2 = os.stat(p2) + if s2 == s1: + return False + return True + except: + return True + +_fspathcache = {} +def fspath(name, root): + '''Get name in the case stored in the filesystem + + The name is either relative to root, or it is an absolute path starting + with root. Note that this function is unnecessary, and should not be + called, for case-sensitive filesystems (simply because it's expensive). + ''' + # If name is absolute, make it relative + if name.lower().startswith(root.lower()): + l = len(root) + if name[l] == os.sep or name[l] == os.altsep: + l = l + 1 + name = name[l:] + + if not os.path.lexists(os.path.join(root, name)): + return None + + seps = os.sep + if os.altsep: + seps = seps + os.altsep + # Protect backslashes. This gets silly very quickly. + seps.replace('\\','\\\\') + pattern = re.compile(r'([^%s]+)|([%s]+)' % (seps, seps)) + dir = os.path.normcase(os.path.normpath(root)) + result = [] + for part, sep in pattern.findall(name): + if sep: + result.append(sep) + continue + + if dir not in _fspathcache: + _fspathcache[dir] = os.listdir(dir) + contents = _fspathcache[dir] + + lpart = part.lower() + lenp = len(part) + for n in contents: + if lenp == len(n) and n.lower() == lpart: + result.append(n) + break + else: + # Cannot happen, as the file exists! + result.append(part) + dir = os.path.join(dir, lpart) + + return ''.join(result) + +def checkexec(path): + """ + Check whether the given path is on a filesystem with UNIX-like exec flags + + Requires a directory (like /foo/.hg) + """ + + # VFAT on some Linux versions can flip mode but it doesn't persist + # a FS remount. Frequently we can detect it if files are created + # with exec bit on. + + try: + EXECFLAGS = stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH + fh, fn = tempfile.mkstemp(dir=path, prefix='hg-checkexec-') + try: + os.close(fh) + m = os.stat(fn).st_mode & 0777 + new_file_has_exec = m & EXECFLAGS + os.chmod(fn, m ^ EXECFLAGS) + exec_flags_cannot_flip = ((os.stat(fn).st_mode & 0777) == m) + finally: + os.unlink(fn) + except (IOError, OSError): + # we don't care, the user probably won't be able to commit anyway + return False + return not (new_file_has_exec or exec_flags_cannot_flip) + +def checklink(path): + """check whether the given path is on a symlink-capable filesystem""" + # mktemp is not racy because symlink creation will fail if the + # file already exists + name = tempfile.mktemp(dir=path, prefix='hg-checklink-') + try: + os.symlink(".", name) + os.unlink(name) + return True + except (OSError, AttributeError): + return False + +def checknlink(testfile): + '''check whether hardlink count reporting works properly''' + + # testfile may be open, so we need a separate file for checking to + # work around issue2543 (or testfile may get lost on Samba shares) + f1 = testfile + ".hgtmp1" + if os.path.lexists(f1): + return False + try: + posixfile(f1, 'w').close() + except IOError: + return False + + f2 = testfile + ".hgtmp2" + fd = None + try: + try: + os_link(f1, f2) + except OSError: + return False + + # nlinks() may behave differently for files on Windows shares if + # the file is open. + fd = open(f2) + return nlinks(f2) > 1 + finally: + if fd is not None: + fd.close() + for f in (f1, f2): + try: + os.unlink(f) + except OSError: + pass + + return False + +def endswithsep(path): + '''Check path ends with os.sep or os.altsep.''' + return path.endswith(os.sep) or os.altsep and path.endswith(os.altsep) + +def splitpath(path): + '''Split path by os.sep. + Note that this function does not use os.altsep because this is + an alternative of simple "xxx.split(os.sep)". + It is recommended to use os.path.normpath() before using this + function if need.''' + return path.split(os.sep) + +def gui(): + '''Are we running in a GUI?''' + return os.name == "nt" or os.name == "mac" or os.environ.get("DISPLAY") + +def mktempcopy(name, emptyok=False, createmode=None): + """Create a temporary file with the same contents from name + + The permission bits are copied from the original file. + + If the temporary file is going to be truncated immediately, you + can use emptyok=True as an optimization. + + Returns the name of the temporary file. + """ + d, fn = os.path.split(name) + fd, temp = tempfile.mkstemp(prefix='.%s-' % fn, dir=d) + os.close(fd) + # Temporary files are created with mode 0600, which is usually not + # what we want. If the original file already exists, just copy + # its mode. Otherwise, manually obey umask. + try: + st_mode = os.lstat(name).st_mode & 0777 + except OSError, inst: + if inst.errno != errno.ENOENT: + raise + st_mode = createmode + if st_mode is None: + st_mode = ~umask + st_mode &= 0666 + os.chmod(temp, st_mode) + if emptyok: + return temp + try: + try: + ifp = posixfile(name, "rb") + except IOError, inst: + if inst.errno == errno.ENOENT: + return temp + if not getattr(inst, 'filename', None): + inst.filename = name + raise + ofp = posixfile(temp, "wb") + for chunk in filechunkiter(ifp): + ofp.write(chunk) + ifp.close() + ofp.close() + except: + try: os.unlink(temp) + except: pass + raise + return temp + +class atomictempfile(object): + """file-like object that atomically updates a file + + All writes will be redirected to a temporary copy of the original + file. When rename is called, the copy is renamed to the original + name, making the changes visible. + """ + def __init__(self, name, mode='w+b', createmode=None): + self.__name = name + self._fp = None + self.temp = mktempcopy(name, emptyok=('w' in mode), + createmode=createmode) + self._fp = posixfile(self.temp, mode) + + def __getattr__(self, name): + return getattr(self._fp, name) + + def rename(self): + if not self._fp.closed: + self._fp.close() + rename(self.temp, localpath(self.__name)) + + def __del__(self): + if not self._fp: + return + if not self._fp.closed: + try: + os.unlink(self.temp) + except: pass + self._fp.close() + +def makedirs(name, mode=None): + """recursive directory creation with parent mode inheritance""" + parent = os.path.abspath(os.path.dirname(name)) + try: + os.mkdir(name) + if mode is not None: + os.chmod(name, mode) + return + except OSError, err: + if err.errno == errno.EEXIST: + return + if not name or parent == name or err.errno != errno.ENOENT: + raise + makedirs(parent, mode) + makedirs(name, mode) + +class opener(object): + """Open files relative to a base directory + + This class is used to hide the details of COW semantics and + remote file access from higher level code. + """ + def __init__(self, base, audit=True): + self.base = base + if audit: + self.auditor = path_auditor(base) + else: + self.auditor = always + self.createmode = None + self._trustnlink = None + + @propertycache + def _can_symlink(self): + return checklink(self.base) + + def _fixfilemode(self, name): + if self.createmode is None: + return + os.chmod(name, self.createmode & 0666) + + def __call__(self, path, mode="r", text=False, atomictemp=False): + self.auditor(path) + f = os.path.join(self.base, path) + + if not text and "b" not in mode: + mode += "b" # for that other OS + + nlink = -1 + st_mode = None + dirname, basename = os.path.split(f) + # If basename is empty, then the path is malformed because it points + # to a directory. Let the posixfile() call below raise IOError. + if basename and mode not in ('r', 'rb'): + if atomictemp: + if not os.path.isdir(dirname): + makedirs(dirname, self.createmode) + return atomictempfile(f, mode, self.createmode) + try: + if 'w' in mode: + st_mode = os.lstat(f).st_mode & 0777 + os.unlink(f) + nlink = 0 + else: + # nlinks() may behave differently for files on Windows + # shares if the file is open. + fd = open(f) + nlink = nlinks(f) + fd.close() + except (OSError, IOError): + nlink = 0 + if not os.path.isdir(dirname): + makedirs(dirname, self.createmode) + if nlink > 0: + if self._trustnlink is None: + self._trustnlink = nlink > 1 or checknlink(f) + if nlink > 1 or not self._trustnlink: + rename(mktempcopy(f), f) + fp = posixfile(f, mode) + if nlink == 0: + if st_mode is None: + self._fixfilemode(f) + else: + os.chmod(f, st_mode) + return fp + + def symlink(self, src, dst): + self.auditor(dst) + linkname = os.path.join(self.base, dst) + try: + os.unlink(linkname) + except OSError: + pass + + dirname = os.path.dirname(linkname) + if not os.path.exists(dirname): + makedirs(dirname, self.createmode) + + if self._can_symlink: + try: + os.symlink(src, linkname) + except OSError, err: + raise OSError(err.errno, _('could not symlink to %r: %s') % + (src, err.strerror), linkname) + else: + f = self(dst, "w") + f.write(src) + f.close() + self._fixfilemode(dst) + +class chunkbuffer(object): + """Allow arbitrary sized chunks of data to be efficiently read from an + iterator over chunks of arbitrary size.""" + + def __init__(self, in_iter): + """in_iter is the iterator that's iterating over the input chunks. + targetsize is how big a buffer to try to maintain.""" + def splitbig(chunks): + for chunk in chunks: + if len(chunk) > 2**20: + pos = 0 + while pos < len(chunk): + end = pos + 2 ** 18 + yield chunk[pos:end] + pos = end + else: + yield chunk + self.iter = splitbig(in_iter) + self._queue = [] + + def read(self, l): + """Read L bytes of data from the iterator of chunks of data. + Returns less than L bytes if the iterator runs dry.""" + left = l + buf = '' + queue = self._queue + while left > 0: + # refill the queue + if not queue: + target = 2**18 + for chunk in self.iter: + queue.append(chunk) + target -= len(chunk) + if target <= 0: + break + if not queue: + break + + chunk = queue.pop(0) + left -= len(chunk) + if left < 0: + queue.insert(0, chunk[left:]) + buf += chunk[:left] + else: + buf += chunk + + return buf + +def filechunkiter(f, size=65536, limit=None): + """Create a generator that produces the data in the file size + (default 65536) bytes at a time, up to optional limit (default is + to read all data). Chunks may be less than size bytes if the + chunk is the last chunk in the file, or the file is a socket or + some other type of file that sometimes reads less data than is + requested.""" + assert size >= 0 + assert limit is None or limit >= 0 + while True: + if limit is None: + nbytes = size + else: + nbytes = min(limit, size) + s = nbytes and f.read(nbytes) + if not s: + break + if limit: + limit -= len(s) + yield s + +def makedate(): + lt = time.localtime() + if lt[8] == 1 and time.daylight: + tz = time.altzone + else: + tz = time.timezone + t = time.mktime(lt) + if t < 0: + hint = _("check your clock") + raise Abort(_("negative timestamp: %d") % t, hint=hint) + return t, tz + +def datestr(date=None, format='%a %b %d %H:%M:%S %Y %1%2'): + """represent a (unixtime, offset) tuple as a localized time. + unixtime is seconds since the epoch, and offset is the time zone's + number of seconds away from UTC. if timezone is false, do not + append time zone to string.""" + t, tz = date or makedate() + if t < 0: + t = 0 # time.gmtime(lt) fails on Windows for lt < -43200 + tz = 0 + if "%1" in format or "%2" in format: + sign = (tz > 0) and "-" or "+" + minutes = abs(tz) // 60 + format = format.replace("%1", "%c%02d" % (sign, minutes // 60)) + format = format.replace("%2", "%02d" % (minutes % 60)) + s = time.strftime(format, time.gmtime(float(t) - tz)) + return s + +def shortdate(date=None): + """turn (timestamp, tzoff) tuple into iso 8631 date.""" + return datestr(date, format='%Y-%m-%d') + +def strdate(string, format, defaults=[]): + """parse a localized time string and return a (unixtime, offset) tuple. + if the string cannot be parsed, ValueError is raised.""" + def timezone(string): + tz = string.split()[-1] + if tz[0] in "+-" and len(tz) == 5 and tz[1:].isdigit(): + sign = (tz[0] == "+") and 1 or -1 + hours = int(tz[1:3]) + minutes = int(tz[3:5]) + return -sign * (hours * 60 + minutes) * 60 + if tz == "GMT" or tz == "UTC": + return 0 + return None + + # NOTE: unixtime = localunixtime + offset + offset, date = timezone(string), string + if offset != None: + date = " ".join(string.split()[:-1]) + + # add missing elements from defaults + usenow = False # default to using biased defaults + for part in ("S", "M", "HI", "d", "mb", "yY"): # decreasing specificity + found = [True for p in part if ("%"+p) in format] + if not found: + date += "@" + defaults[part][usenow] + format += "@%" + part[0] + else: + # We've found a specific time element, less specific time + # elements are relative to today + usenow = True + + timetuple = time.strptime(date, format) + localunixtime = int(calendar.timegm(timetuple)) + if offset is None: + # local timezone + unixtime = int(time.mktime(timetuple)) + offset = unixtime - localunixtime + else: + unixtime = localunixtime + offset + return unixtime, offset + +def parsedate(date, formats=None, bias={}): + """parse a localized date/time and return a (unixtime, offset) tuple. + + The date may be a "unixtime offset" string or in one of the specified + formats. If the date already is a (unixtime, offset) tuple, it is returned. + """ + if not date: + return 0, 0 + if isinstance(date, tuple) and len(date) == 2: + return date + if not formats: + formats = defaultdateformats + date = date.strip() + try: + when, offset = map(int, date.split(' ')) + except ValueError: + # fill out defaults + now = makedate() + defaults = {} + nowmap = {} + for part in "d mb yY HI M S".split(): + # this piece is for rounding the specific end of unknowns + b = bias.get(part) + if b is None: + if part[0] in "HMS": + b = "00" + else: + b = "0" + + # this piece is for matching the generic end to today's date + n = datestr(now, "%" + part[0]) + + defaults[part] = (b, n) + + for format in formats: + try: + when, offset = strdate(date, format, defaults) + except (ValueError, OverflowError): + pass + else: + break + else: + raise Abort(_('invalid date: %r') % date) + # validate explicit (probably user-specified) date and + # time zone offset. values must fit in signed 32 bits for + # current 32-bit linux runtimes. timezones go from UTC-12 + # to UTC+14 + if abs(when) > 0x7fffffff: + raise Abort(_('date exceeds 32 bits: %d') % when) + if when < 0: + raise Abort(_('negative date value: %d') % when) + if offset < -50400 or offset > 43200: + raise Abort(_('impossible time zone offset: %d') % offset) + return when, offset + +def matchdate(date): + """Return a function that matches a given date match specifier + + Formats include: + + '{date}' match a given date to the accuracy provided + + '<{date}' on or before a given date + + '>{date}' on or after a given date + + >>> p1 = parsedate("10:29:59") + >>> p2 = parsedate("10:30:00") + >>> p3 = parsedate("10:30:59") + >>> p4 = parsedate("10:31:00") + >>> p5 = parsedate("Sep 15 10:30:00 1999") + >>> f = matchdate("10:30") + >>> f(p1[0]) + False + >>> f(p2[0]) + True + >>> f(p3[0]) + True + >>> f(p4[0]) + False + >>> f(p5[0]) + False + """ + + def lower(date): + d = dict(mb="1", d="1") + return parsedate(date, extendeddateformats, d)[0] + + def upper(date): + d = dict(mb="12", HI="23", M="59", S="59") + for days in "31 30 29".split(): + try: + d["d"] = days + return parsedate(date, extendeddateformats, d)[0] + except: + pass + d["d"] = "28" + return parsedate(date, extendeddateformats, d)[0] + + date = date.strip() + if date[0] == "<": + when = upper(date[1:]) + return lambda x: x <= when + elif date[0] == ">": + when = lower(date[1:]) + return lambda x: x >= when + elif date[0] == "-": + try: + days = int(date[1:]) + except ValueError: + raise Abort(_("invalid day spec: %s") % date[1:]) + when = makedate()[0] - days * 3600 * 24 + return lambda x: x >= when + elif " to " in date: + a, b = date.split(" to ") + start, stop = lower(a), upper(b) + return lambda x: x >= start and x <= stop + else: + start, stop = lower(date), upper(date) + return lambda x: x >= start and x <= stop + +def shortuser(user): + """Return a short representation of a user name or email address.""" + f = user.find('@') + if f >= 0: + user = user[:f] + f = user.find('<') + if f >= 0: + user = user[f + 1:] + f = user.find(' ') + if f >= 0: + user = user[:f] + f = user.find('.') + if f >= 0: + user = user[:f] + return user + +def email(author): + '''get email of author.''' + r = author.find('>') + if r == -1: + r = None + return author[author.find('<') + 1:r] + +def _ellipsis(text, maxlength): + if len(text) <= maxlength: + return text, False + else: + return "%s..." % (text[:maxlength - 3]), True + +def ellipsis(text, maxlength=400): + """Trim string to at most maxlength (default: 400) characters.""" + try: + # use unicode not to split at intermediate multi-byte sequence + utext, truncated = _ellipsis(text.decode(encoding.encoding), + maxlength) + if not truncated: + return text + return utext.encode(encoding.encoding) + except (UnicodeDecodeError, UnicodeEncodeError): + return _ellipsis(text, maxlength)[0] + +def walkrepos(path, followsym=False, seen_dirs=None, recurse=False): + '''yield every hg repository under path, recursively.''' + def errhandler(err): + if err.filename == path: + raise err + if followsym and hasattr(os.path, 'samestat'): + def _add_dir_if_not_there(dirlst, dirname): + match = False + samestat = os.path.samestat + dirstat = os.stat(dirname) + for lstdirstat in dirlst: + if samestat(dirstat, lstdirstat): + match = True + break + if not match: + dirlst.append(dirstat) + return not match + else: + followsym = False + + if (seen_dirs is None) and followsym: + seen_dirs = [] + _add_dir_if_not_there(seen_dirs, path) + for root, dirs, files in os.walk(path, topdown=True, onerror=errhandler): + dirs.sort() + if '.hg' in dirs: + yield root # found a repository + qroot = os.path.join(root, '.hg', 'patches') + if os.path.isdir(os.path.join(qroot, '.hg')): + yield qroot # we have a patch queue repo here + if recurse: + # avoid recursing inside the .hg directory + dirs.remove('.hg') + else: + dirs[:] = [] # don't descend further + elif followsym: + newdirs = [] + for d in dirs: + fname = os.path.join(root, d) + if _add_dir_if_not_there(seen_dirs, fname): + if os.path.islink(fname): + for hgname in walkrepos(fname, True, seen_dirs): + yield hgname + else: + newdirs.append(d) + dirs[:] = newdirs + +_rcpath = None + +def os_rcpath(): + '''return default os-specific hgrc search path''' + path = system_rcpath() + path.extend(user_rcpath()) + path = [os.path.normpath(f) for f in path] + return path + +def rcpath(): + '''return hgrc search path. if env var HGRCPATH is set, use it. + for each item in path, if directory, use files ending in .rc, + else use item. + make HGRCPATH empty to only look in .hg/hgrc of current repo. + if no HGRCPATH, use default os-specific path.''' + global _rcpath + if _rcpath is None: + if 'HGRCPATH' in os.environ: + _rcpath = [] + for p in os.environ['HGRCPATH'].split(os.pathsep): + if not p: + continue + p = expandpath(p) + if os.path.isdir(p): + for f, kind in osutil.listdir(p): + if f.endswith('.rc'): + _rcpath.append(os.path.join(p, f)) + else: + _rcpath.append(p) + else: + _rcpath = os_rcpath() + return _rcpath + +def bytecount(nbytes): + '''return byte count formatted as readable string, with units''' + + units = ( + (100, 1 << 30, _('%.0f GB')), + (10, 1 << 30, _('%.1f GB')), + (1, 1 << 30, _('%.2f GB')), + (100, 1 << 20, _('%.0f MB')), + (10, 1 << 20, _('%.1f MB')), + (1, 1 << 20, _('%.2f MB')), + (100, 1 << 10, _('%.0f KB')), + (10, 1 << 10, _('%.1f KB')), + (1, 1 << 10, _('%.2f KB')), + (1, 1, _('%.0f bytes')), + ) + + for multiplier, divisor, format in units: + if nbytes >= divisor * multiplier: + return format % (nbytes / float(divisor)) + return units[-1][2] % nbytes + +def drop_scheme(scheme, path): + sc = scheme + ':' + if path.startswith(sc): + path = path[len(sc):] + if path.startswith('//'): + if scheme == 'file': + i = path.find('/', 2) + if i == -1: + return '' + # On Windows, absolute paths are rooted at the current drive + # root. On POSIX they are rooted at the file system root. + if os.name == 'nt': + droot = os.path.splitdrive(os.getcwd())[0] + '/' + path = os.path.join(droot, path[i + 1:]) + else: + path = path[i:] + else: + path = path[2:] + return path + +def uirepr(s): + # Avoid double backslash in Windows path repr() + return repr(s).replace('\\\\', '\\') + +#### naming convention of below implementation follows 'textwrap' module + +class MBTextWrapper(textwrap.TextWrapper): + def __init__(self, **kwargs): + textwrap.TextWrapper.__init__(self, **kwargs) + + def _cutdown(self, str, space_left): + l = 0 + ucstr = unicode(str, encoding.encoding) + w = unicodedata.east_asian_width + for i in xrange(len(ucstr)): + l += w(ucstr[i]) in 'WFA' and 2 or 1 + if space_left < l: + return (ucstr[:i].encode(encoding.encoding), + ucstr[i:].encode(encoding.encoding)) + return str, '' + + # ---------------------------------------- + # overriding of base class + + def _handle_long_word(self, reversed_chunks, cur_line, cur_len, width): + space_left = max(width - cur_len, 1) + + if self.break_long_words: + cut, res = self._cutdown(reversed_chunks[-1], space_left) + cur_line.append(cut) + reversed_chunks[-1] = res + elif not cur_line: + cur_line.append(reversed_chunks.pop()) + +#### naming convention of above implementation follows 'textwrap' module + +def wrap(line, width, initindent='', hangindent=''): + maxindent = max(len(hangindent), len(initindent)) + if width <= maxindent: + # adjust for weird terminal size + width = max(78, maxindent + 1) + wrapper = MBTextWrapper(width=width, + initial_indent=initindent, + subsequent_indent=hangindent) + return wrapper.fill(line) + +def iterlines(iterator): + for chunk in iterator: + for line in chunk.splitlines(): + yield line + +def expandpath(path): + return os.path.expanduser(os.path.expandvars(path)) + +def hgcmd(): + """Return the command used to execute current hg + + This is different from hgexecutable() because on Windows we want + to avoid things opening new shell windows like batch files, so we + get either the python call or current executable. + """ + if main_is_frozen(): + return [sys.executable] + return gethgcmd() + +def rundetached(args, condfn): + """Execute the argument list in a detached process. + + condfn is a callable which is called repeatedly and should return + True once the child process is known to have started successfully. + At this point, the child process PID is returned. If the child + process fails to start or finishes before condfn() evaluates to + True, return -1. + """ + # Windows case is easier because the child process is either + # successfully starting and validating the condition or exiting + # on failure. We just poll on its PID. On Unix, if the child + # process fails to start, it will be left in a zombie state until + # the parent wait on it, which we cannot do since we expect a long + # running process on success. Instead we listen for SIGCHLD telling + # us our child process terminated. + terminated = set() + def handler(signum, frame): + terminated.add(os.wait()) + prevhandler = None + if hasattr(signal, 'SIGCHLD'): + prevhandler = signal.signal(signal.SIGCHLD, handler) + try: + pid = spawndetached(args) + while not condfn(): + if ((pid in terminated or not testpid(pid)) + and not condfn()): + return -1 + time.sleep(0.1) + return pid + finally: + if prevhandler is not None: + signal.signal(signal.SIGCHLD, prevhandler) + +try: + any, all = any, all +except NameError: + def any(iterable): + for i in iterable: + if i: + return True + return False + + def all(iterable): + for i in iterable: + if not i: + return False + return True + +def interpolate(prefix, mapping, s, fn=None): + """Return the result of interpolating items in the mapping into string s. + + prefix is a single character string, or a two character string with + a backslash as the first character if the prefix needs to be escaped in + a regular expression. + + fn is an optional function that will be applied to the replacement text + just before replacement. + """ + fn = fn or (lambda s: s) + r = re.compile(r'%s(%s)' % (prefix, '|'.join(mapping.keys()))) + return r.sub(lambda x: fn(mapping[x.group()[1:]]), s) + +def getport(port): + """Return the port for a given network service. + + If port is an integer, it's returned as is. If it's a string, it's + looked up using socket.getservbyname(). If there's no matching + service, util.Abort is raised. + """ + try: + return int(port) + except ValueError: + pass + + try: + return socket.getservbyname(port) + except socket.error: + raise Abort(_("no port number associated with service '%s'") % port) + +_booleans = {'1': True, 'yes': True, 'true': True, 'on': True, 'always': True, + '0': False, 'no': False, 'false': False, 'off': False, + 'never': False} + +def parsebool(s): + """Parse s into a boolean. + + If s is not a valid boolean, returns None. + """ + return _booleans.get(s.lower(), None) diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/util.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/util.pyo Binary files differnew file mode 100644 index 0000000..829410c --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/util.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/verify.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/verify.py new file mode 100644 index 0000000..bb1d6c2 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/verify.py @@ -0,0 +1,301 @@ +# verify.py - repository integrity checking for Mercurial +# +# Copyright 2006, 2007 Matt Mackall <mpm@selenic.com> +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +from node import nullid, short +from i18n import _ +import os +import revlog, util, error + +def verify(repo): + lock = repo.lock() + try: + return _verify(repo) + finally: + lock.release() + +def _verify(repo): + mflinkrevs = {} + filelinkrevs = {} + filenodes = {} + revisions = 0 + badrevs = set() + errors = [0] + warnings = [0] + ui = repo.ui + cl = repo.changelog + mf = repo.manifest + lrugetctx = util.lrucachefunc(repo.changectx) + + if not repo.cancopy(): + raise util.Abort(_("cannot verify bundle or remote repos")) + + def err(linkrev, msg, filename=None): + if linkrev != None: + badrevs.add(linkrev) + else: + linkrev = '?' + msg = "%s: %s" % (linkrev, msg) + if filename: + msg = "%s@%s" % (filename, msg) + ui.warn(" " + msg + "\n") + errors[0] += 1 + + def exc(linkrev, msg, inst, filename=None): + if isinstance(inst, KeyboardInterrupt): + ui.warn(_("interrupted")) + raise + if not str(inst): + inst = repr(inst) + err(linkrev, "%s: %s" % (msg, inst), filename) + + def warn(msg): + ui.warn(msg + "\n") + warnings[0] += 1 + + def checklog(obj, name, linkrev): + if not len(obj) and (havecl or havemf): + err(linkrev, _("empty or missing %s") % name) + return + + d = obj.checksize() + if d[0]: + err(None, _("data length off by %d bytes") % d[0], name) + if d[1]: + err(None, _("index contains %d extra bytes") % d[1], name) + + if obj.version != revlog.REVLOGV0: + if not revlogv1: + warn(_("warning: `%s' uses revlog format 1") % name) + elif revlogv1: + warn(_("warning: `%s' uses revlog format 0") % name) + + def checkentry(obj, i, node, seen, linkrevs, f): + lr = obj.linkrev(obj.rev(node)) + if lr < 0 or (havecl and lr not in linkrevs): + if lr < 0 or lr >= len(cl): + msg = _("rev %d points to nonexistent changeset %d") + else: + msg = _("rev %d points to unexpected changeset %d") + err(None, msg % (i, lr), f) + if linkrevs: + if f and len(linkrevs) > 1: + try: + # attempt to filter down to real linkrevs + linkrevs = [l for l in linkrevs + if lrugetctx(l)[f].filenode() == node] + except: + pass + warn(_(" (expected %s)") % " ".join(map(str, linkrevs))) + lr = None # can't be trusted + + try: + p1, p2 = obj.parents(node) + if p1 not in seen and p1 != nullid: + err(lr, _("unknown parent 1 %s of %s") % + (short(p1), short(n)), f) + if p2 not in seen and p2 != nullid: + err(lr, _("unknown parent 2 %s of %s") % + (short(p2), short(p1)), f) + except Exception, inst: + exc(lr, _("checking parents of %s") % short(node), inst, f) + + if node in seen: + err(lr, _("duplicate revision %d (%d)") % (i, seen[n]), f) + seen[n] = i + return lr + + if os.path.exists(repo.sjoin("journal")): + ui.warn(_("abandoned transaction found - run hg recover\n")) + + revlogv1 = cl.version != revlog.REVLOGV0 + if ui.verbose or not revlogv1: + ui.status(_("repository uses revlog format %d\n") % + (revlogv1 and 1 or 0)) + + havecl = len(cl) > 0 + havemf = len(mf) > 0 + + ui.status(_("checking changesets\n")) + seen = {} + checklog(cl, "changelog", 0) + total = len(repo) + for i in repo: + ui.progress(_('checking'), i, total=total, unit=_('changesets')) + n = cl.node(i) + checkentry(cl, i, n, seen, [i], "changelog") + + try: + changes = cl.read(n) + mflinkrevs.setdefault(changes[0], []).append(i) + for f in changes[3]: + filelinkrevs.setdefault(f, []).append(i) + except Exception, inst: + exc(i, _("unpacking changeset %s") % short(n), inst) + ui.progress(_('checking'), None) + + ui.status(_("checking manifests\n")) + seen = {} + checklog(mf, "manifest", 0) + total = len(mf) + for i in mf: + ui.progress(_('checking'), i, total=total, unit=_('manifests')) + n = mf.node(i) + lr = checkentry(mf, i, n, seen, mflinkrevs.get(n, []), "manifest") + if n in mflinkrevs: + del mflinkrevs[n] + else: + err(lr, _("%s not in changesets") % short(n), "manifest") + + try: + for f, fn in mf.readdelta(n).iteritems(): + if not f: + err(lr, _("file without name in manifest")) + elif f != "/dev/null": + filenodes.setdefault(f, {}).setdefault(fn, lr) + except Exception, inst: + exc(lr, _("reading manifest delta %s") % short(n), inst) + ui.progress(_('checking'), None) + + ui.status(_("crosschecking files in changesets and manifests\n")) + + total = len(mflinkrevs) + len(filelinkrevs) + len(filenodes) + count = 0 + if havemf: + for c, m in sorted([(c, m) for m in mflinkrevs + for c in mflinkrevs[m]]): + count += 1 + ui.progress(_('crosschecking'), count, total=total) + err(c, _("changeset refers to unknown manifest %s") % short(m)) + mflinkrevs = None # del is bad here due to scope issues + + for f in sorted(filelinkrevs): + count += 1 + ui.progress(_('crosschecking'), count, total=total) + if f not in filenodes: + lr = filelinkrevs[f][0] + err(lr, _("in changeset but not in manifest"), f) + + if havecl: + for f in sorted(filenodes): + count += 1 + ui.progress(_('crosschecking'), count, total=total) + if f not in filelinkrevs: + try: + fl = repo.file(f) + lr = min([fl.linkrev(fl.rev(n)) for n in filenodes[f]]) + except: + lr = None + err(lr, _("in manifest but not in changeset"), f) + + ui.progress(_('crosschecking'), None) + + ui.status(_("checking files\n")) + + storefiles = set() + for f, f2, size in repo.store.datafiles(): + if not f: + err(None, _("cannot decode filename '%s'") % f2) + elif size > 0 or not revlogv1: + storefiles.add(f) + + files = sorted(set(filenodes) | set(filelinkrevs)) + total = len(files) + for i, f in enumerate(files): + ui.progress(_('checking'), i, item=f, total=total) + try: + linkrevs = filelinkrevs[f] + except KeyError: + # in manifest but not in changelog + linkrevs = [] + + if linkrevs: + lr = linkrevs[0] + else: + lr = None + + try: + fl = repo.file(f) + except error.RevlogError, e: + err(lr, _("broken revlog! (%s)") % e, f) + continue + + for ff in fl.files(): + try: + storefiles.remove(ff) + except KeyError: + err(lr, _("missing revlog!"), ff) + + checklog(fl, f, lr) + seen = {} + rp = None + for i in fl: + revisions += 1 + n = fl.node(i) + lr = checkentry(fl, i, n, seen, linkrevs, f) + if f in filenodes: + if havemf and n not in filenodes[f]: + err(lr, _("%s not in manifests") % (short(n)), f) + else: + del filenodes[f][n] + + # verify contents + try: + l = len(fl.read(n)) + rp = fl.renamed(n) + if l != fl.size(i): + if len(fl.revision(n)) != fl.size(i): + err(lr, _("unpacked size is %s, %s expected") % + (l, fl.size(i)), f) + except Exception, inst: + exc(lr, _("unpacking %s") % short(n), inst, f) + + # check renames + try: + if rp: + if lr is not None and ui.verbose: + ctx = lrugetctx(lr) + found = False + for pctx in ctx.parents(): + if rp[0] in pctx: + found = True + break + if not found: + warn(_("warning: copy source of '%s' not" + " in parents of %s") % (f, ctx)) + fl2 = repo.file(rp[0]) + if not len(fl2): + err(lr, _("empty or missing copy source revlog %s:%s") + % (rp[0], short(rp[1])), f) + elif rp[1] == nullid: + ui.note(_("warning: %s@%s: copy source" + " revision is nullid %s:%s\n") + % (f, lr, rp[0], short(rp[1]))) + else: + fl2.rev(rp[1]) + except Exception, inst: + exc(lr, _("checking rename of %s") % short(n), inst, f) + + # cross-check + if f in filenodes: + fns = [(lr, n) for n, lr in filenodes[f].iteritems()] + for lr, node in sorted(fns): + err(lr, _("%s in manifests not found") % short(node), f) + ui.progress(_('checking'), None) + + for f in storefiles: + warn(_("warning: orphan revlog '%s'") % f) + + ui.status(_("%d files, %d changesets, %d total revisions\n") % + (len(files), len(cl), revisions)) + if warnings[0]: + ui.warn(_("%d warnings encountered!\n") % warnings[0]) + if errors[0]: + ui.warn(_("%d integrity errors encountered!\n") % errors[0]) + if badrevs: + ui.warn(_("(first damaged changeset appears to be %d)\n") + % min(badrevs)) + return 1 diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/verify.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/verify.pyo Binary files differnew file mode 100644 index 0000000..ffe597d --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/verify.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/win32.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/win32.py new file mode 100644 index 0000000..71179a1 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/win32.py @@ -0,0 +1,180 @@ +# win32.py - utility functions that use win32 API +# +# Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +"""Utility functions that use win32 API. + +Mark Hammond's win32all package allows better functionality on +Windows. This module overrides definitions in util.py. If not +available, import of this module will fail, and generic code will be +used. +""" + +import win32api + +import errno, os, sys, pywintypes, win32con, win32file, win32process +import winerror, win32gui, win32console +import osutil, encoding +from win32com.shell import shell, shellcon + +def os_link(src, dst): + try: + win32file.CreateHardLink(dst, src) + except pywintypes.error: + raise OSError(errno.EINVAL, 'target implements hardlinks improperly') + except NotImplementedError: # Another fake error win Win98 + raise OSError(errno.EINVAL, 'Hardlinking not supported') + +def _getfileinfo(pathname): + """Return number of hardlinks for the given file.""" + try: + fh = win32file.CreateFile(pathname, + win32file.GENERIC_READ, win32file.FILE_SHARE_READ, + None, win32file.OPEN_EXISTING, 0, None) + except pywintypes.error: + raise OSError(errno.ENOENT, 'The system cannot find the file specified') + try: + return win32file.GetFileInformationByHandle(fh) + finally: + fh.Close() + +def nlinks(pathname): + """Return number of hardlinks for the given file.""" + return _getfileinfo(pathname)[7] + +def samefile(fpath1, fpath2): + """Returns whether fpath1 and fpath2 refer to the same file. This is only + guaranteed to work for files, not directories.""" + res1 = _getfileinfo(fpath1) + res2 = _getfileinfo(fpath2) + # Index 4 is the volume serial number, and 8 and 9 contain the file ID + return res1[4] == res2[4] and res1[8] == res2[8] and res1[9] == res2[9] + +def samedevice(fpath1, fpath2): + """Returns whether fpath1 and fpath2 are on the same device. This is only + guaranteed to work for files, not directories.""" + res1 = _getfileinfo(fpath1) + res2 = _getfileinfo(fpath2) + return res1[4] == res2[4] + +def testpid(pid): + '''return True if pid is still running or unable to + determine, False otherwise''' + try: + handle = win32api.OpenProcess( + win32con.PROCESS_QUERY_INFORMATION, False, pid) + if handle: + status = win32process.GetExitCodeProcess(handle) + return status == win32con.STILL_ACTIVE + except pywintypes.error, details: + return details[0] != winerror.ERROR_INVALID_PARAMETER + return True + +def lookup_reg(key, valname=None, scope=None): + ''' Look up a key/value name in the Windows registry. + + valname: value name. If unspecified, the default value for the key + is used. + scope: optionally specify scope for registry lookup, this can be + a sequence of scopes to look up in order. Default (CURRENT_USER, + LOCAL_MACHINE). + ''' + try: + from _winreg import HKEY_CURRENT_USER, HKEY_LOCAL_MACHINE, \ + QueryValueEx, OpenKey + except ImportError: + return None + + if scope is None: + scope = (HKEY_CURRENT_USER, HKEY_LOCAL_MACHINE) + elif not isinstance(scope, (list, tuple)): + scope = (scope,) + for s in scope: + try: + val = QueryValueEx(OpenKey(s, key), valname)[0] + # never let a Unicode string escape into the wild + return encoding.tolocal(val.encode('UTF-8')) + except EnvironmentError: + pass + +def system_rcpath_win32(): + '''return default os-specific hgrc search path''' + filename = win32api.GetModuleFileName(0) + # Use mercurial.ini found in directory with hg.exe + progrc = os.path.join(os.path.dirname(filename), 'mercurial.ini') + if os.path.isfile(progrc): + return [progrc] + # Use hgrc.d found in directory with hg.exe + progrcd = os.path.join(os.path.dirname(filename), 'hgrc.d') + if os.path.isdir(progrcd): + rcpath = [] + for f, kind in osutil.listdir(progrcd): + if f.endswith('.rc'): + rcpath.append(os.path.join(progrcd, f)) + return rcpath + # else look for a system rcpath in the registry + try: + value = win32api.RegQueryValue( + win32con.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Mercurial') + rcpath = [] + for p in value.split(os.pathsep): + if p.lower().endswith('mercurial.ini'): + rcpath.append(p) + elif os.path.isdir(p): + for f, kind in osutil.listdir(p): + if f.endswith('.rc'): + rcpath.append(os.path.join(p, f)) + return rcpath + except pywintypes.error: + return [] + +def user_rcpath_win32(): + '''return os-specific hgrc search path to the user dir''' + userdir = os.path.expanduser('~') + if sys.getwindowsversion()[3] != 2 and userdir == '~': + # We are on win < nt: fetch the APPDATA directory location and use + # the parent directory as the user home dir. + appdir = shell.SHGetPathFromIDList( + shell.SHGetSpecialFolderLocation(0, shellcon.CSIDL_APPDATA)) + userdir = os.path.dirname(appdir) + return [os.path.join(userdir, 'mercurial.ini'), + os.path.join(userdir, '.hgrc')] + +def getuser(): + '''return name of current user''' + return win32api.GetUserName() + +def set_signal_handler_win32(): + """Register a termination handler for console events including + CTRL+C. python signal handlers do not work well with socket + operations. + """ + def handler(event): + win32process.ExitProcess(1) + win32api.SetConsoleCtrlHandler(handler) + +def hidewindow(): + def callback(*args, **kwargs): + hwnd, pid = args + wpid = win32process.GetWindowThreadProcessId(hwnd)[1] + if pid == wpid: + win32gui.ShowWindow(hwnd, win32con.SW_HIDE) + + pid = win32process.GetCurrentProcessId() + win32gui.EnumWindows(callback, pid) + +def termwidth(): + try: + # Query stderr to avoid problems with redirections + screenbuf = win32console.GetStdHandle(win32console.STD_ERROR_HANDLE) + try: + window = screenbuf.GetConsoleScreenBufferInfo()['Window'] + width = window.Right - window.Left + return width + finally: + screenbuf.Detach() + except pywintypes.error: + return 79 diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/win32.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/win32.pyo Binary files differnew file mode 100644 index 0000000..1438ea2 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/win32.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/windows.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/windows.py new file mode 100644 index 0000000..a4c5ff4 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/windows.py @@ -0,0 +1,375 @@ +# windows.py - Windows utility function implementations for Mercurial +# +# Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +from i18n import _ +import osutil, error +import errno, msvcrt, os, re, sys, random, subprocess + +nulldev = 'NUL:' +umask = 002 + +# wrap osutil.posixfile to provide friendlier exceptions +def posixfile(name, mode='r', buffering=-1): + try: + return osutil.posixfile(name, mode, buffering) + except WindowsError, err: + raise IOError(err.errno, '%s: %s' % (name, err.strerror)) +posixfile.__doc__ = osutil.posixfile.__doc__ + +class winstdout(object): + '''stdout on windows misbehaves if sent through a pipe''' + + def __init__(self, fp): + self.fp = fp + + def __getattr__(self, key): + return getattr(self.fp, key) + + def close(self): + try: + self.fp.close() + except: pass + + def write(self, s): + try: + # This is workaround for "Not enough space" error on + # writing large size of data to console. + limit = 16000 + l = len(s) + start = 0 + self.softspace = 0 + while start < l: + end = start + limit + self.fp.write(s[start:end]) + start = end + except IOError, inst: + if inst.errno != 0: + raise + self.close() + raise IOError(errno.EPIPE, 'Broken pipe') + + def flush(self): + try: + return self.fp.flush() + except IOError, inst: + if inst.errno != errno.EINVAL: + raise + self.close() + raise IOError(errno.EPIPE, 'Broken pipe') + +sys.stdout = winstdout(sys.stdout) + +def _is_win_9x(): + '''return true if run on windows 95, 98 or me.''' + try: + return sys.getwindowsversion()[3] == 1 + except AttributeError: + return 'command' in os.environ.get('comspec', '') + +def openhardlinks(): + return not _is_win_9x() and "win32api" in globals() + +def system_rcpath(): + try: + return system_rcpath_win32() + except: + return [r'c:\mercurial\mercurial.ini'] + +def user_rcpath(): + '''return os-specific hgrc search path to the user dir''' + try: + path = user_rcpath_win32() + except: + home = os.path.expanduser('~') + path = [os.path.join(home, 'mercurial.ini'), + os.path.join(home, '.hgrc')] + userprofile = os.environ.get('USERPROFILE') + if userprofile: + path.append(os.path.join(userprofile, 'mercurial.ini')) + path.append(os.path.join(userprofile, '.hgrc')) + return path + +def parse_patch_output(output_line): + """parses the output produced by patch and returns the filename""" + pf = output_line[14:] + if pf[0] == '`': + pf = pf[1:-1] # Remove the quotes + return pf + +def sshargs(sshcmd, host, user, port): + '''Build argument list for ssh or Plink''' + pflag = 'plink' in sshcmd.lower() and '-P' or '-p' + args = user and ("%s@%s" % (user, host)) or host + return port and ("%s %s %s" % (args, pflag, port)) or args + +def testpid(pid): + '''return False if pid dead, True if running or not known''' + return True + +def set_flags(f, l, x): + pass + +def set_binary(fd): + # When run without console, pipes may expose invalid + # fileno(), usually set to -1. + if hasattr(fd, 'fileno') and fd.fileno() >= 0: + msvcrt.setmode(fd.fileno(), os.O_BINARY) + +def pconvert(path): + return '/'.join(path.split(os.sep)) + +def localpath(path): + return path.replace('/', '\\') + +def normpath(path): + return pconvert(os.path.normpath(path)) + +def realpath(path): + ''' + Returns the true, canonical file system path equivalent to the given + path. + ''' + # TODO: There may be a more clever way to do this that also handles other, + # less common file systems. + return os.path.normpath(os.path.normcase(os.path.realpath(path))) + +def samestat(s1, s2): + return False + +# A sequence of backslashes is special iff it precedes a double quote: +# - if there's an even number of backslashes, the double quote is not +# quoted (i.e. it ends the quoted region) +# - if there's an odd number of backslashes, the double quote is quoted +# - in both cases, every pair of backslashes is unquoted into a single +# backslash +# (See http://msdn2.microsoft.com/en-us/library/a1y7w461.aspx ) +# So, to quote a string, we must surround it in double quotes, double +# the number of backslashes that preceed double quotes and add another +# backslash before every double quote (being careful with the double +# quote we've appended to the end) +_quotere = None +def shellquote(s): + global _quotere + if _quotere is None: + _quotere = re.compile(r'(\\*)("|\\$)') + return '"%s"' % _quotere.sub(r'\1\1\\\2', s) + +def quotecommand(cmd): + """Build a command string suitable for os.popen* calls.""" + if sys.version_info < (2, 7, 1): + # Python versions since 2.7.1 do this extra quoting themselves + return '"' + cmd + '"' + return cmd + +def popen(command, mode='r'): + # Work around "popen spawned process may not write to stdout + # under windows" + # http://bugs.python.org/issue1366 + command += " 2> %s" % nulldev + return os.popen(quotecommand(command), mode) + +def explain_exit(code): + return _("exited with status %d") % code, code + +# if you change this stub into a real check, please try to implement the +# username and groupname functions above, too. +def isowner(st): + return True + +def find_exe(command): + '''Find executable for command searching like cmd.exe does. + If command is a basename then PATH is searched for command. + PATH isn't searched if command is an absolute or relative path. + An extension from PATHEXT is found and added if not present. + If command isn't found None is returned.''' + pathext = os.environ.get('PATHEXT', '.COM;.EXE;.BAT;.CMD') + pathexts = [ext for ext in pathext.lower().split(os.pathsep)] + if os.path.splitext(command)[1].lower() in pathexts: + pathexts = [''] + + def findexisting(pathcommand): + 'Will append extension (if needed) and return existing file' + for ext in pathexts: + executable = pathcommand + ext + if os.path.exists(executable): + return executable + return None + + if os.sep in command: + return findexisting(command) + + for path in os.environ.get('PATH', '').split(os.pathsep): + executable = findexisting(os.path.join(path, command)) + if executable is not None: + return executable + return findexisting(os.path.expanduser(os.path.expandvars(command))) + +def set_signal_handler(): + try: + set_signal_handler_win32() + except NameError: + pass + +def statfiles(files): + '''Stat each file in files and yield stat or None if file does not exist. + Cluster and cache stat per directory to minimize number of OS stat calls.''' + ncase = os.path.normcase + dircache = {} # dirname -> filename -> status | None if file does not exist + for nf in files: + nf = ncase(nf) + dir, base = os.path.split(nf) + if not dir: + dir = '.' + cache = dircache.get(dir, None) + if cache is None: + try: + dmap = dict([(ncase(n), s) + for n, k, s in osutil.listdir(dir, True)]) + except OSError, err: + # handle directory not found in Python version prior to 2.5 + # Python <= 2.4 returns native Windows code 3 in errno + # Python >= 2.5 returns ENOENT and adds winerror field + # EINVAL is raised if dir is not a directory. + if err.errno not in (3, errno.ENOENT, errno.EINVAL, + errno.ENOTDIR): + raise + dmap = {} + cache = dircache.setdefault(dir, dmap) + yield cache.get(base, None) + +def getuser(): + '''return name of current user''' + raise error.Abort(_('user name not available - set USERNAME ' + 'environment variable')) + +def username(uid=None): + """Return the name of the user with the given uid. + + If uid is None, return the name of the current user.""" + return None + +def groupname(gid=None): + """Return the name of the group with the given gid. + + If gid is None, return the name of the current group.""" + return None + +def _removedirs(name): + """special version of os.removedirs that does not remove symlinked + directories or junction points if they actually contain files""" + if osutil.listdir(name): + return + os.rmdir(name) + head, tail = os.path.split(name) + if not tail: + head, tail = os.path.split(head) + while head and tail: + try: + if osutil.listdir(head): + return + os.rmdir(head) + except: + break + head, tail = os.path.split(head) + +def unlink(f): + """unlink and remove the directory if it is empty""" + os.unlink(f) + # try removing directories that might now be empty + try: + _removedirs(os.path.dirname(f)) + except OSError: + pass + +def rename(src, dst): + '''atomically rename file src to dst, replacing dst if it exists''' + try: + os.rename(src, dst) + except OSError: # FIXME: check err (EEXIST ?) + + # On windows, rename to existing file is not allowed, so we + # must delete destination first. But if a file is open, unlink + # schedules it for delete but does not delete it. Rename + # happens immediately even for open files, so we rename + # destination to a temporary name, then delete that. Then + # rename is safe to do. + # The temporary name is chosen at random to avoid the situation + # where a file is left lying around from a previous aborted run. + + for tries in xrange(10): + temp = '%s-%08x' % (dst, random.randint(0, 0xffffffff)) + try: + os.rename(dst, temp) # raises OSError EEXIST if temp exists + break + except OSError, e: + if e.errno != errno.EEXIST: + raise + else: + raise IOError, (errno.EEXIST, "No usable temporary filename found") + + try: + os.unlink(temp) + except: + # Some rude AV-scanners on Windows may cause the unlink to + # fail. Not aborting here just leaks the temp file, whereas + # aborting at this point may leave serious inconsistencies. + # Ideally, we would notify the user here. + pass + os.rename(src, dst) + +def spawndetached(args): + # No standard library function really spawns a fully detached + # process under win32 because they allocate pipes or other objects + # to handle standard streams communications. Passing these objects + # to the child process requires handle inheritance to be enabled + # which makes really detached processes impossible. + class STARTUPINFO: + dwFlags = subprocess.STARTF_USESHOWWINDOW + hStdInput = None + hStdOutput = None + hStdError = None + wShowWindow = subprocess.SW_HIDE + + args = subprocess.list2cmdline(args) + # Not running the command in shell mode makes python26 hang when + # writing to hgweb output socket. + comspec = os.environ.get("COMSPEC", "cmd.exe") + args = comspec + " /c " + args + hp, ht, pid, tid = subprocess.CreateProcess( + None, args, + # no special security + None, None, + # Do not inherit handles + 0, + # DETACHED_PROCESS + 0x00000008, + os.environ, + os.getcwd(), + STARTUPINFO()) + return pid + +def gethgcmd(): + return [sys.executable] + sys.argv[:1] + +def termwidth(): + # cmd.exe does not handle CR like a unix console, the CR is + # counted in the line length. On 80 columns consoles, if 80 + # characters are written, the following CR won't apply on the + # current line but on the new one. Keep room for it. + return 79 + +def groupmembers(name): + # Don't support groups on Windows for now + raise KeyError() + +try: + # override functions with win32 versions if possible + from win32 import * +except ImportError: + pass + +expandglobs = True diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/windows.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/windows.pyo Binary files differnew file mode 100644 index 0000000..aa5e567 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/windows.pyo diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/wireproto.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/wireproto.py new file mode 100644 index 0000000..372e842 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/wireproto.py @@ -0,0 +1,338 @@ +# wireproto.py - generic wire protocol support functions +# +# Copyright 2005-2010 Matt Mackall <mpm@selenic.com> +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +import urllib, tempfile, os, sys +from i18n import _ +from node import bin, hex +import changegroup as changegroupmod +import repo, error, encoding, util, store +import pushkey as pushkeymod + +# list of nodes encoding / decoding + +def decodelist(l, sep=' '): + return map(bin, l.split(sep)) + +def encodelist(l, sep=' '): + return sep.join(map(hex, l)) + +# client side + +class wirerepository(repo.repository): + def lookup(self, key): + self.requirecap('lookup', _('look up remote revision')) + d = self._call("lookup", key=key) + success, data = d[:-1].split(" ", 1) + if int(success): + return bin(data) + self._abort(error.RepoError(data)) + + def heads(self): + d = self._call("heads") + try: + return decodelist(d[:-1]) + except: + self._abort(error.ResponseError(_("unexpected response:"), d)) + + def branchmap(self): + d = self._call("branchmap") + try: + branchmap = {} + for branchpart in d.splitlines(): + branchname, branchheads = branchpart.split(' ', 1) + branchname = urllib.unquote(branchname) + # Earlier servers (1.3.x) send branch names in (their) local + # charset. The best we can do is assume it's identical to our + # own local charset, in case it's not utf-8. + try: + branchname.decode('utf-8') + except UnicodeDecodeError: + branchname = encoding.fromlocal(branchname) + branchheads = decodelist(branchheads) + branchmap[branchname] = branchheads + return branchmap + except TypeError: + self._abort(error.ResponseError(_("unexpected response:"), d)) + + def branches(self, nodes): + n = encodelist(nodes) + d = self._call("branches", nodes=n) + try: + br = [tuple(decodelist(b)) for b in d.splitlines()] + return br + except: + self._abort(error.ResponseError(_("unexpected response:"), d)) + + def between(self, pairs): + batch = 8 # avoid giant requests + r = [] + for i in xrange(0, len(pairs), batch): + n = " ".join([encodelist(p, '-') for p in pairs[i:i + batch]]) + d = self._call("between", pairs=n) + try: + r.extend(l and decodelist(l) or [] for l in d.splitlines()) + except: + self._abort(error.ResponseError(_("unexpected response:"), d)) + return r + + def pushkey(self, namespace, key, old, new): + if not self.capable('pushkey'): + return False + d = self._call("pushkey", + namespace=namespace, key=key, old=old, new=new) + return bool(int(d)) + + def listkeys(self, namespace): + if not self.capable('pushkey'): + return {} + d = self._call("listkeys", namespace=namespace) + r = {} + for l in d.splitlines(): + k, v = l.split('\t') + r[k.decode('string-escape')] = v.decode('string-escape') + return r + + def stream_out(self): + return self._callstream('stream_out') + + def changegroup(self, nodes, kind): + n = encodelist(nodes) + f = self._callstream("changegroup", roots=n) + return changegroupmod.unbundle10(self._decompress(f), 'UN') + + def changegroupsubset(self, bases, heads, kind): + self.requirecap('changegroupsubset', _('look up remote changes')) + bases = encodelist(bases) + heads = encodelist(heads) + f = self._callstream("changegroupsubset", + bases=bases, heads=heads) + return changegroupmod.unbundle10(self._decompress(f), 'UN') + + def unbundle(self, cg, heads, source): + '''Send cg (a readable file-like object representing the + changegroup to push, typically a chunkbuffer object) to the + remote server as a bundle. Return an integer indicating the + result of the push (see localrepository.addchangegroup()).''' + + ret, output = self._callpush("unbundle", cg, heads=encodelist(heads)) + if ret == "": + raise error.ResponseError( + _('push failed:'), output) + try: + ret = int(ret) + except ValueError: + raise error.ResponseError( + _('push failed (unexpected response):'), ret) + + for l in output.splitlines(True): + self.ui.status(_('remote: '), l) + return ret + +# server side + +class streamres(object): + def __init__(self, gen): + self.gen = gen + +class pushres(object): + def __init__(self, res): + self.res = res + +class pusherr(object): + def __init__(self, res): + self.res = res + +def dispatch(repo, proto, command): + func, spec = commands[command] + args = proto.getargs(spec) + return func(repo, proto, *args) + +def between(repo, proto, pairs): + pairs = [decodelist(p, '-') for p in pairs.split(" ")] + r = [] + for b in repo.between(pairs): + r.append(encodelist(b) + "\n") + return "".join(r) + +def branchmap(repo, proto): + branchmap = repo.branchmap() + heads = [] + for branch, nodes in branchmap.iteritems(): + branchname = urllib.quote(branch) + branchnodes = encodelist(nodes) + heads.append('%s %s' % (branchname, branchnodes)) + return '\n'.join(heads) + +def branches(repo, proto, nodes): + nodes = decodelist(nodes) + r = [] + for b in repo.branches(nodes): + r.append(encodelist(b) + "\n") + return "".join(r) + +def capabilities(repo, proto): + caps = 'lookup changegroupsubset branchmap pushkey'.split() + if _allowstream(repo.ui): + requiredformats = repo.requirements & repo.supportedformats + # if our local revlogs are just revlogv1, add 'stream' cap + if not requiredformats - set(('revlogv1',)): + caps.append('stream') + # otherwise, add 'streamreqs' detailing our local revlog format + else: + caps.append('streamreqs=%s' % ','.join(requiredformats)) + caps.append('unbundle=%s' % ','.join(changegroupmod.bundlepriority)) + return ' '.join(caps) + +def changegroup(repo, proto, roots): + nodes = decodelist(roots) + cg = repo.changegroup(nodes, 'serve') + return streamres(proto.groupchunks(cg)) + +def changegroupsubset(repo, proto, bases, heads): + bases = decodelist(bases) + heads = decodelist(heads) + cg = repo.changegroupsubset(bases, heads, 'serve') + return streamres(proto.groupchunks(cg)) + +def heads(repo, proto): + h = repo.heads() + return encodelist(h) + "\n" + +def hello(repo, proto): + '''the hello command returns a set of lines describing various + interesting things about the server, in an RFC822-like format. + Currently the only one defined is "capabilities", which + consists of a line in the form: + + capabilities: space separated list of tokens + ''' + return "capabilities: %s\n" % (capabilities(repo, proto)) + +def listkeys(repo, proto, namespace): + d = pushkeymod.list(repo, namespace).items() + t = '\n'.join(['%s\t%s' % (k.encode('string-escape'), + v.encode('string-escape')) for k, v in d]) + return t + +def lookup(repo, proto, key): + try: + r = hex(repo.lookup(key)) + success = 1 + except Exception, inst: + r = str(inst) + success = 0 + return "%s %s\n" % (success, r) + +def pushkey(repo, proto, namespace, key, old, new): + r = pushkeymod.push(repo, namespace, key, old, new) + return '%s\n' % int(r) + +def _allowstream(ui): + return ui.configbool('server', 'uncompressed', True, untrusted=True) + +def stream(repo, proto): + '''If the server supports streaming clone, it advertises the "stream" + capability with a value representing the version and flags of the repo + it is serving. Client checks to see if it understands the format. + + The format is simple: the server writes out a line with the amount + of files, then the total amount of bytes to be transfered (separated + by a space). Then, for each file, the server first writes the filename + and filesize (separated by the null character), then the file contents. + ''' + + if not _allowstream(repo.ui): + return '1\n' + + entries = [] + total_bytes = 0 + try: + # get consistent snapshot of repo, lock during scan + lock = repo.lock() + try: + repo.ui.debug('scanning\n') + for name, ename, size in repo.store.walk(): + entries.append((name, size)) + total_bytes += size + finally: + lock.release() + except error.LockError: + return '2\n' # error: 2 + + def streamer(repo, entries, total): + '''stream out all metadata files in repository.''' + yield '0\n' # success + repo.ui.debug('%d files, %d bytes to transfer\n' % + (len(entries), total_bytes)) + yield '%d %d\n' % (len(entries), total_bytes) + for name, size in entries: + repo.ui.debug('sending %s (%d bytes)\n' % (name, size)) + # partially encode name over the wire for backwards compat + yield '%s\0%d\n' % (store.encodedir(name), size) + for chunk in util.filechunkiter(repo.sopener(name), limit=size): + yield chunk + + return streamres(streamer(repo, entries, total_bytes)) + +def unbundle(repo, proto, heads): + their_heads = decodelist(heads) + + def check_heads(): + heads = repo.heads() + return their_heads == ['force'] or their_heads == heads + + proto.redirect() + + # fail early if possible + if not check_heads(): + return pusherr('unsynced changes') + + # write bundle data to temporary file because it can be big + fd, tempname = tempfile.mkstemp(prefix='hg-unbundle-') + fp = os.fdopen(fd, 'wb+') + r = 0 + try: + proto.getfile(fp) + lock = repo.lock() + try: + if not check_heads(): + # someone else committed/pushed/unbundled while we + # were transferring data + return pusherr('unsynced changes') + + # push can proceed + fp.seek(0) + gen = changegroupmod.readbundle(fp, None) + + try: + r = repo.addchangegroup(gen, 'serve', proto._client(), + lock=lock) + except util.Abort, inst: + sys.stderr.write("abort: %s\n" % inst) + finally: + lock.release() + return pushres(r) + + finally: + fp.close() + os.unlink(tempname) + +commands = { + 'between': (between, 'pairs'), + 'branchmap': (branchmap, ''), + 'branches': (branches, 'nodes'), + 'capabilities': (capabilities, ''), + 'changegroup': (changegroup, 'roots'), + 'changegroupsubset': (changegroupsubset, 'bases heads'), + 'heads': (heads, ''), + 'hello': (hello, ''), + 'listkeys': (listkeys, 'namespace'), + 'lookup': (lookup, 'key'), + 'pushkey': (pushkey, 'namespace key old new'), + 'stream_out': (stream, ''), + 'unbundle': (unbundle, 'heads'), +} diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/wireproto.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/wireproto.pyo Binary files differnew file mode 100644 index 0000000..981a393 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/mercurial/wireproto.pyo |