summaryrefslogtreecommitdiff
path: root/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext
diff options
context:
space:
mode:
authorNishanth Amuluru2011-01-08 11:20:57 +0530
committerNishanth Amuluru2011-01-08 11:20:57 +0530
commit65411d01d448ff0cd4abd14eee14cf60b5f8fc20 (patch)
treeb4c404363c4c63a61d6e2f8bd26c5b057c1fb09d /eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext
parent2e35094d43b4cc6974172e1febf76abb50f086ec (diff)
downloadpytask-65411d01d448ff0cd4abd14eee14cf60b5f8fc20.tar.gz
pytask-65411d01d448ff0cd4abd14eee14cf60b5f8fc20.tar.bz2
pytask-65411d01d448ff0cd4abd14eee14cf60b5f8fc20.zip
Added buildout stuff and made changes accordingly
--HG-- rename : profile/management/__init__.py => eggs/djangorecipe-0.20-py2.6.egg/EGG-INFO/dependency_links.txt rename : profile/management/__init__.py => eggs/djangorecipe-0.20-py2.6.egg/EGG-INFO/not-zip-safe rename : profile/management/__init__.py => eggs/infrae.subversion-1.4.5-py2.6.egg/EGG-INFO/dependency_links.txt rename : profile/management/__init__.py => eggs/infrae.subversion-1.4.5-py2.6.egg/EGG-INFO/not-zip-safe rename : profile/management/__init__.py => eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/EGG-INFO/dependency_links.txt rename : profile/management/__init__.py => eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/EGG-INFO/not-zip-safe rename : profile/management/__init__.py => eggs/py-1.4.0-py2.6.egg/EGG-INFO/dependency_links.txt rename : profile/management/__init__.py => eggs/py-1.4.0-py2.6.egg/EGG-INFO/not-zip-safe rename : profile/management/__init__.py => eggs/zc.buildout-1.5.2-py2.6.egg/EGG-INFO/dependency_links.txt rename : profile/management/__init__.py => eggs/zc.buildout-1.5.2-py2.6.egg/EGG-INFO/not-zip-safe rename : profile/management/__init__.py => eggs/zc.recipe.egg-1.3.2-py2.6.egg/EGG-INFO/dependency_links.txt rename : profile/management/__init__.py => eggs/zc.recipe.egg-1.3.2-py2.6.egg/EGG-INFO/not-zip-safe rename : profile/management/__init__.py => parts/django/Django.egg-info/dependency_links.txt rename : taskapp/models.py => parts/django/django/conf/app_template/models.py rename : taskapp/tests.py => parts/django/django/conf/app_template/tests.py rename : taskapp/views.py => parts/django/django/conf/app_template/views.py rename : taskapp/views.py => parts/django/django/contrib/gis/tests/geo3d/views.py rename : profile/management/__init__.py => parts/django/tests/modeltests/delete/__init__.py rename : profile/management/__init__.py => parts/django/tests/modeltests/files/__init__.py rename : profile/management/__init__.py => parts/django/tests/modeltests/invalid_models/__init__.py rename : profile/management/__init__.py => parts/django/tests/modeltests/m2m_signals/__init__.py rename : profile/management/__init__.py => parts/django/tests/modeltests/model_package/__init__.py rename : profile/management/__init__.py => parts/django/tests/regressiontests/bash_completion/__init__.py rename : profile/management/__init__.py => parts/django/tests/regressiontests/bash_completion/management/__init__.py rename : profile/management/__init__.py => parts/django/tests/regressiontests/bash_completion/management/commands/__init__.py rename : profile/management/__init__.py => parts/django/tests/regressiontests/bash_completion/models.py rename : profile/management/__init__.py => parts/django/tests/regressiontests/delete_regress/__init__.py rename : profile/management/__init__.py => parts/django/tests/regressiontests/file_storage/__init__.py rename : profile/management/__init__.py => parts/django/tests/regressiontests/max_lengths/__init__.py rename : profile/forms.py => pytask/profile/forms.py rename : profile/management/__init__.py => pytask/profile/management/__init__.py rename : profile/management/commands/seed_db.py => pytask/profile/management/commands/seed_db.py rename : profile/models.py => pytask/profile/models.py rename : profile/templatetags/user_tags.py => pytask/profile/templatetags/user_tags.py rename : taskapp/tests.py => pytask/profile/tests.py rename : profile/urls.py => pytask/profile/urls.py rename : profile/utils.py => pytask/profile/utils.py rename : profile/views.py => pytask/profile/views.py rename : static/css/base.css => pytask/static/css/base.css rename : taskapp/tests.py => pytask/taskapp/tests.py rename : taskapp/views.py => pytask/taskapp/views.py rename : templates/base.html => pytask/templates/base.html rename : templates/profile/browse_notifications.html => pytask/templates/profile/browse_notifications.html rename : templates/profile/edit.html => pytask/templates/profile/edit.html rename : templates/profile/view.html => pytask/templates/profile/view.html rename : templates/profile/view_notification.html => pytask/templates/profile/view_notification.html rename : templates/registration/activate.html => pytask/templates/registration/activate.html rename : templates/registration/activation_email.txt => pytask/templates/registration/activation_email.txt rename : templates/registration/activation_email_subject.txt => pytask/templates/registration/activation_email_subject.txt rename : templates/registration/logged_out.html => pytask/templates/registration/logged_out.html rename : templates/registration/login.html => pytask/templates/registration/login.html rename : templates/registration/logout.html => pytask/templates/registration/logout.html rename : templates/registration/password_change_done.html => pytask/templates/registration/password_change_done.html rename : templates/registration/password_change_form.html => pytask/templates/registration/password_change_form.html rename : templates/registration/password_reset_complete.html => pytask/templates/registration/password_reset_complete.html rename : templates/registration/password_reset_confirm.html => pytask/templates/registration/password_reset_confirm.html rename : templates/registration/password_reset_done.html => pytask/templates/registration/password_reset_done.html rename : templates/registration/password_reset_email.html => pytask/templates/registration/password_reset_email.html rename : templates/registration/password_reset_form.html => pytask/templates/registration/password_reset_form.html rename : templates/registration/registration_complete.html => pytask/templates/registration/registration_complete.html rename : templates/registration/registration_form.html => pytask/templates/registration/registration_form.html rename : utils.py => pytask/utils.py
Diffstat (limited to 'eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext')
-rw-r--r--eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/__init__.py1
-rw-r--r--eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/__init__.pyobin0 -> 186 bytes
-rw-r--r--eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/acl.py250
-rw-r--r--eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/acl.pyobin0 -> 8875 bytes
-rw-r--r--eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/bookmarks.py579
-rw-r--r--eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/bookmarks.pyobin0 -> 21456 bytes
-rw-r--r--eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/bugzilla.py441
-rw-r--r--eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/bugzilla.pyobin0 -> 18525 bytes
-rw-r--r--eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/children.py45
-rw-r--r--eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/children.pyobin0 -> 1514 bytes
-rw-r--r--eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/churn.py198
-rw-r--r--eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/churn.pyobin0 -> 9040 bytes
-rw-r--r--eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/color.py319
-rw-r--r--eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/color.pyobin0 -> 11558 bytes
-rw-r--r--eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/__init__.py321
-rw-r--r--eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/__init__.pyobin0 -> 14008 bytes
-rw-r--r--eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/bzr.py260
-rw-r--r--eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/bzr.pyobin0 -> 9581 bytes
-rw-r--r--eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/common.py389
-rw-r--r--eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/common.pyobin0 -> 20100 bytes
-rw-r--r--eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/convcmd.py434
-rw-r--r--eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/convcmd.pyobin0 -> 16933 bytes
-rw-r--r--eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/cvs.py271
-rw-r--r--eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/cvs.pyobin0 -> 9466 bytes
-rw-r--r--eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/cvsps.py847
-rw-r--r--eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/cvsps.pyobin0 -> 21195 bytes
-rw-r--r--eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/darcs.py200
-rw-r--r--eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/darcs.pyobin0 -> 8694 bytes
-rw-r--r--eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/filemap.py365
-rw-r--r--eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/filemap.pyobin0 -> 11449 bytes
-rw-r--r--eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/git.py170
-rw-r--r--eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/git.pyobin0 -> 7015 bytes
-rw-r--r--eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/gnuarch.py338
-rw-r--r--eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/gnuarch.pyobin0 -> 12415 bytes
-rw-r--r--eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/hg.py376
-rw-r--r--eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/hg.pyobin0 -> 16042 bytes
-rw-r--r--eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/monotone.py227
-rw-r--r--eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/monotone.pyobin0 -> 7808 bytes
-rw-r--r--eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/p4.py202
-rw-r--r--eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/p4.pyobin0 -> 7333 bytes
-rw-r--r--eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/subversion.py1168
-rw-r--r--eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/subversion.pyobin0 -> 39371 bytes
-rw-r--r--eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/transport.py128
-rw-r--r--eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/transport.pyobin0 -> 5744 bytes
-rw-r--r--eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/eol.py272
-rw-r--r--eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/eol.pyobin0 -> 10766 bytes
-rw-r--r--eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/extdiff.py325
-rw-r--r--eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/extdiff.pyobin0 -> 10985 bytes
-rw-r--r--eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/fetch.py152
-rw-r--r--eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/fetch.pyobin0 -> 5046 bytes
-rw-r--r--eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/gpg.py288
-rw-r--r--eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/gpg.pyobin0 -> 10444 bytes
-rw-r--r--eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/graphlog.py337
-rw-r--r--eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/graphlog.pyobin0 -> 12442 bytes
-rw-r--r--eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/hgcia.py251
-rw-r--r--eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/hgcia.pyobin0 -> 10391 bytes
-rw-r--r--eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/hgk.py348
-rw-r--r--eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/hgk.pyobin0 -> 11815 bytes
-rw-r--r--eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/highlight/__init__.py61
-rw-r--r--eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/highlight/__init__.pyobin0 -> 2545 bytes
-rw-r--r--eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/highlight/highlight.py61
-rw-r--r--eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/highlight/highlight.pyobin0 -> 2358 bytes
-rw-r--r--eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/interhg.py81
-rw-r--r--eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/interhg.pyobin0 -> 2746 bytes
-rw-r--r--eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/keyword.py649
-rw-r--r--eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/keyword.pyobin0 -> 29160 bytes
-rw-r--r--eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/mq.py3211
-rw-r--r--eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/mq.pyobin0 -> 112419 bytes
-rw-r--r--eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/notify.py316
-rw-r--r--eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/notify.pyobin0 -> 11753 bytes
-rw-r--r--eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/pager.py113
-rw-r--r--eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/pager.pyobin0 -> 4114 bytes
-rw-r--r--eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/parentrevspec.py96
-rw-r--r--eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/parentrevspec.pyobin0 -> 2626 bytes
-rw-r--r--eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/patchbomb.py553
-rw-r--r--eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/patchbomb.pyobin0 -> 20701 bytes
-rw-r--r--eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/progress.py206
-rw-r--r--eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/progress.pyobin0 -> 8302 bytes
-rw-r--r--eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/purge.py111
-rw-r--r--eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/purge.pyobin0 -> 3694 bytes
-rw-r--r--eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/rebase.py577
-rw-r--r--eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/rebase.pyobin0 -> 20920 bytes
-rw-r--r--eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/record.py569
-rw-r--r--eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/record.pyobin0 -> 22073 bytes
-rw-r--r--eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/relink.py180
-rw-r--r--eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/relink.pyobin0 -> 6226 bytes
-rw-r--r--eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/schemes.py84
-rw-r--r--eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/schemes.pyobin0 -> 3984 bytes
-rw-r--r--eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/share.py38
-rw-r--r--eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/share.pyobin0 -> 1595 bytes
-rw-r--r--eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/transplant.py630
-rw-r--r--eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/transplant.pyobin0 -> 23926 bytes
-rw-r--r--eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/win32mbcs.py159
-rw-r--r--eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/win32mbcs.pyobin0 -> 5770 bytes
-rw-r--r--eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/win32text.py170
-rw-r--r--eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/win32text.pyobin0 -> 7010 bytes
-rw-r--r--eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/zeroconf/Zeroconf.py1582
-rw-r--r--eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/zeroconf/Zeroconf.pyobin0 -> 66077 bytes
-rw-r--r--eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/zeroconf/__init__.py173
-rw-r--r--eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/zeroconf/__init__.pyobin0 -> 7506 bytes
100 files changed, 19122 insertions, 0 deletions
diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/__init__.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/__init__.py
new file mode 100644
index 0000000..fdffa2a
--- /dev/null
+++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/__init__.py
@@ -0,0 +1 @@
+# placeholder
diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/__init__.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/__init__.pyo
new file mode 100644
index 0000000..96afc1a
--- /dev/null
+++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/__init__.pyo
Binary files differ
diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/acl.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/acl.py
new file mode 100644
index 0000000..a50fa72
--- /dev/null
+++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/acl.py
@@ -0,0 +1,250 @@
+# acl.py - changeset access control for mercurial
+#
+# Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
+#
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2 or any later version.
+
+'''hooks for controlling repository access
+
+This hook makes it possible to allow or deny write access to given
+branches and paths of a repository when receiving incoming changesets
+via pretxnchangegroup and pretxncommit.
+
+The authorization is matched based on the local user name on the
+system where the hook runs, and not the committer of the original
+changeset (since the latter is merely informative).
+
+The acl hook is best used along with a restricted shell like hgsh,
+preventing authenticating users from doing anything other than pushing
+or pulling. The hook is not safe to use if users have interactive
+shell access, as they can then disable the hook. Nor is it safe if
+remote users share an account, because then there is no way to
+distinguish them.
+
+The order in which access checks are performed is:
+
+1) Deny list for branches (section ``acl.deny.branches``)
+2) Allow list for branches (section ``acl.allow.branches``)
+3) Deny list for paths (section ``acl.deny``)
+4) Allow list for paths (section ``acl.allow``)
+
+The allow and deny sections take key-value pairs.
+
+Branch-based Access Control
+...........................
+
+Use the ``acl.deny.branches`` and ``acl.allow.branches`` sections to
+have branch-based access control. Keys in these sections can be
+either:
+
+- a branch name, or
+- an asterisk, to match any branch;
+
+The corresponding values can be either:
+
+- a comma-separated list containing users and groups, or
+- an asterisk, to match anyone;
+
+Path-based Access Control
+.........................
+
+Use the ``acl.deny`` and ``acl.allow`` sections to have path-based
+access control. Keys in these sections accept a subtree pattern (with
+a glob syntax by default). The corresponding values follow the same
+syntax as the other sections above.
+
+Groups
+......
+
+Group names must be prefixed with an ``@`` symbol. Specifying a group
+name has the same effect as specifying all the users in that group.
+
+You can define group members in the ``acl.groups`` section.
+If a group name is not defined there, and Mercurial is running under
+a Unix-like system, the list of users will be taken from the OS.
+Otherwise, an exception will be raised.
+
+Example Configuration
+.....................
+
+::
+
+ [hooks]
+
+ # Use this if you want to check access restrictions at commit time
+ pretxncommit.acl = python:hgext.acl.hook
+
+ # Use this if you want to check access restrictions for pull, push,
+ # bundle and serve.
+ pretxnchangegroup.acl = python:hgext.acl.hook
+
+ [acl]
+ # Allow or deny access for incoming changes only if their source is
+ # listed here, let them pass otherwise. Source is "serve" for all
+ # remote access (http or ssh), "push", "pull" or "bundle" when the
+ # related commands are run locally.
+ # Default: serve
+ sources = serve
+
+ [acl.deny.branches]
+
+ # Everyone is denied to the frozen branch:
+ frozen-branch = *
+
+ # A bad user is denied on all branches:
+ * = bad-user
+
+ [acl.allow.branches]
+
+ # A few users are allowed on branch-a:
+ branch-a = user-1, user-2, user-3
+
+ # Only one user is allowed on branch-b:
+ branch-b = user-1
+
+ # The super user is allowed on any branch:
+ * = super-user
+
+ # Everyone is allowed on branch-for-tests:
+ branch-for-tests = *
+
+ [acl.deny]
+ # This list is checked first. If a match is found, acl.allow is not
+ # checked. All users are granted access if acl.deny is not present.
+ # Format for both lists: glob pattern = user, ..., @group, ...
+
+ # To match everyone, use an asterisk for the user:
+ # my/glob/pattern = *
+
+ # user6 will not have write access to any file:
+ ** = user6
+
+ # Group "hg-denied" will not have write access to any file:
+ ** = @hg-denied
+
+ # Nobody will be able to change "DONT-TOUCH-THIS.txt", despite
+ # everyone being able to change all other files. See below.
+ src/main/resources/DONT-TOUCH-THIS.txt = *
+
+ [acl.allow]
+ # if acl.allow is not present, all users are allowed by default
+ # empty acl.allow = no users allowed
+
+ # User "doc_writer" has write access to any file under the "docs"
+ # folder:
+ docs/** = doc_writer
+
+ # User "jack" and group "designers" have write access to any file
+ # under the "images" folder:
+ images/** = jack, @designers
+
+ # Everyone (except for "user6" - see acl.deny above) will have write
+ # access to any file under the "resources" folder (except for 1
+ # file. See acl.deny):
+ src/main/resources/** = *
+
+ .hgtags = release_engineer
+
+'''
+
+from mercurial.i18n import _
+from mercurial import util, match
+import getpass, urllib
+
+def _getusers(ui, group):
+
+ # First, try to use group definition from section [acl.groups]
+ hgrcusers = ui.configlist('acl.groups', group)
+ if hgrcusers:
+ return hgrcusers
+
+ ui.debug('acl: "%s" not defined in [acl.groups]\n' % group)
+ # If no users found in group definition, get users from OS-level group
+ try:
+ return util.groupmembers(group)
+ except KeyError:
+ raise util.Abort(_("group '%s' is undefined") % group)
+
+def _usermatch(ui, user, usersorgroups):
+
+ if usersorgroups == '*':
+ return True
+
+ for ug in usersorgroups.replace(',', ' ').split():
+ if user == ug or ug.find('@') == 0 and user in _getusers(ui, ug[1:]):
+ return True
+
+ return False
+
+def buildmatch(ui, repo, user, key):
+ '''return tuple of (match function, list enabled).'''
+ if not ui.has_section(key):
+ ui.debug('acl: %s not enabled\n' % key)
+ return None
+
+ pats = [pat for pat, users in ui.configitems(key)
+ if _usermatch(ui, user, users)]
+ ui.debug('acl: %s enabled, %d entries for user %s\n' %
+ (key, len(pats), user))
+
+ if not repo:
+ if pats:
+ return lambda b: '*' in pats or b in pats
+ return lambda b: False
+
+ if pats:
+ return match.match(repo.root, '', pats)
+ return match.exact(repo.root, '', [])
+
+
+def hook(ui, repo, hooktype, node=None, source=None, **kwargs):
+ if hooktype not in ['pretxnchangegroup', 'pretxncommit']:
+ raise util.Abort(_('config error - hook type "%s" cannot stop '
+ 'incoming changesets nor commits') % hooktype)
+ if (hooktype == 'pretxnchangegroup' and
+ source not in ui.config('acl', 'sources', 'serve').split()):
+ ui.debug('acl: changes have source "%s" - skipping\n' % source)
+ return
+
+ user = None
+ if source == 'serve' and 'url' in kwargs:
+ url = kwargs['url'].split(':')
+ if url[0] == 'remote' and url[1].startswith('http'):
+ user = urllib.unquote(url[3])
+
+ if user is None:
+ user = getpass.getuser()
+
+ cfg = ui.config('acl', 'config')
+ if cfg:
+ ui.readconfig(cfg, sections = ['acl.groups', 'acl.allow.branches',
+ 'acl.deny.branches', 'acl.allow', 'acl.deny'])
+
+ allowbranches = buildmatch(ui, None, user, 'acl.allow.branches')
+ denybranches = buildmatch(ui, None, user, 'acl.deny.branches')
+ allow = buildmatch(ui, repo, user, 'acl.allow')
+ deny = buildmatch(ui, repo, user, 'acl.deny')
+
+ for rev in xrange(repo[node], len(repo)):
+ ctx = repo[rev]
+ branch = ctx.branch()
+ if denybranches and denybranches(branch):
+ raise util.Abort(_('acl: user "%s" denied on branch "%s"'
+ ' (changeset "%s")')
+ % (user, branch, ctx))
+ if allowbranches and not allowbranches(branch):
+ raise util.Abort(_('acl: user "%s" not allowed on branch "%s"'
+ ' (changeset "%s")')
+ % (user, branch, ctx))
+ ui.debug('acl: branch access granted: "%s" on branch "%s"\n'
+ % (ctx, branch))
+
+ for f in ctx.files():
+ if deny and deny(f):
+ ui.debug('acl: user %s denied on %s\n' % (user, f))
+ raise util.Abort(_('acl: access denied for changeset %s') % ctx)
+ if allow and not allow(f):
+ ui.debug('acl: user %s not allowed on %s\n' % (user, f))
+ raise util.Abort(_('acl: access denied for changeset %s') % ctx)
+ ui.debug('acl: allowing changeset %s\n' % ctx)
diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/acl.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/acl.pyo
new file mode 100644
index 0000000..8001bf2
--- /dev/null
+++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/acl.pyo
Binary files differ
diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/bookmarks.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/bookmarks.py
new file mode 100644
index 0000000..1ebbc7a
--- /dev/null
+++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/bookmarks.py
@@ -0,0 +1,579 @@
+# Mercurial extension to provide the 'hg bookmark' command
+#
+# Copyright 2008 David Soria Parra <dsp@php.net>
+#
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2 or any later version.
+
+'''track a line of development with movable markers
+
+Bookmarks are local movable markers to changesets. Every bookmark
+points to a changeset identified by its hash. If you commit a
+changeset that is based on a changeset that has a bookmark on it, the
+bookmark shifts to the new changeset.
+
+It is possible to use bookmark names in every revision lookup (e.g.
+:hg:`merge`, :hg:`update`).
+
+By default, when several bookmarks point to the same changeset, they
+will all move forward together. It is possible to obtain a more
+git-like experience by adding the following configuration option to
+your configuration file::
+
+ [bookmarks]
+ track.current = True
+
+This will cause Mercurial to track the bookmark that you are currently
+using, and only update it. This is similar to git's approach to
+branching.
+'''
+
+from mercurial.i18n import _
+from mercurial.node import nullid, nullrev, bin, hex, short
+from mercurial import util, commands, repair, extensions, pushkey, hg, url
+from mercurial import revset
+import os
+
+def write(repo):
+ '''Write bookmarks
+
+ Write the given bookmark => hash dictionary to the .hg/bookmarks file
+ in a format equal to those of localtags.
+
+ We also store a backup of the previous state in undo.bookmarks that
+ can be copied back on rollback.
+ '''
+ refs = repo._bookmarks
+
+ try:
+ bms = repo.opener('bookmarks').read()
+ except IOError:
+ bms = None
+ if bms is not None:
+ repo.opener('undo.bookmarks', 'w').write(bms)
+
+ if repo._bookmarkcurrent not in refs:
+ setcurrent(repo, None)
+ wlock = repo.wlock()
+ try:
+ file = repo.opener('bookmarks', 'w', atomictemp=True)
+ for refspec, node in refs.iteritems():
+ file.write("%s %s\n" % (hex(node), refspec))
+ file.rename()
+
+ # touch 00changelog.i so hgweb reloads bookmarks (no lock needed)
+ try:
+ os.utime(repo.sjoin('00changelog.i'), None)
+ except OSError:
+ pass
+
+ finally:
+ wlock.release()
+
+def setcurrent(repo, mark):
+ '''Set the name of the bookmark that we are currently on
+
+ Set the name of the bookmark that we are on (hg update <bookmark>).
+ The name is recorded in .hg/bookmarks.current
+ '''
+ current = repo._bookmarkcurrent
+ if current == mark:
+ return
+
+ refs = repo._bookmarks
+
+ # do not update if we do update to a rev equal to the current bookmark
+ if (mark and mark not in refs and
+ current and refs[current] == repo.changectx('.').node()):
+ return
+ if mark not in refs:
+ mark = ''
+ wlock = repo.wlock()
+ try:
+ file = repo.opener('bookmarks.current', 'w', atomictemp=True)
+ file.write(mark)
+ file.rename()
+ finally:
+ wlock.release()
+ repo._bookmarkcurrent = mark
+
+def bookmark(ui, repo, mark=None, rev=None, force=False, delete=False, rename=None):
+ '''track a line of development with movable markers
+
+ Bookmarks are pointers to certain commits that move when
+ committing. Bookmarks are local. They can be renamed, copied and
+ deleted. It is possible to use bookmark names in :hg:`merge` and
+ :hg:`update` to merge and update respectively to a given bookmark.
+
+ You can use :hg:`bookmark NAME` to set a bookmark on the working
+ directory's parent revision with the given name. If you specify
+ a revision using -r REV (where REV may be an existing bookmark),
+ the bookmark is assigned to that revision.
+
+ Bookmarks can be pushed and pulled between repositories (see :hg:`help
+ push` and :hg:`help pull`). This requires the bookmark extension to be
+ enabled for both the local and remote repositories.
+ '''
+ hexfn = ui.debugflag and hex or short
+ marks = repo._bookmarks
+ cur = repo.changectx('.').node()
+
+ if rename:
+ if rename not in marks:
+ raise util.Abort(_("a bookmark of this name does not exist"))
+ if mark in marks and not force:
+ raise util.Abort(_("a bookmark of the same name already exists"))
+ if mark is None:
+ raise util.Abort(_("new bookmark name required"))
+ marks[mark] = marks[rename]
+ del marks[rename]
+ if repo._bookmarkcurrent == rename:
+ setcurrent(repo, mark)
+ write(repo)
+ return
+
+ if delete:
+ if mark is None:
+ raise util.Abort(_("bookmark name required"))
+ if mark not in marks:
+ raise util.Abort(_("a bookmark of this name does not exist"))
+ if mark == repo._bookmarkcurrent:
+ setcurrent(repo, None)
+ del marks[mark]
+ write(repo)
+ return
+
+ if mark != None:
+ if "\n" in mark:
+ raise util.Abort(_("bookmark name cannot contain newlines"))
+ mark = mark.strip()
+ if not mark:
+ raise util.Abort(_("bookmark names cannot consist entirely of "
+ "whitespace"))
+ if mark in marks and not force:
+ raise util.Abort(_("a bookmark of the same name already exists"))
+ if ((mark in repo.branchtags() or mark == repo.dirstate.branch())
+ and not force):
+ raise util.Abort(
+ _("a bookmark cannot have the name of an existing branch"))
+ if rev:
+ marks[mark] = repo.lookup(rev)
+ else:
+ marks[mark] = repo.changectx('.').node()
+ setcurrent(repo, mark)
+ write(repo)
+ return
+
+ if mark is None:
+ if rev:
+ raise util.Abort(_("bookmark name required"))
+ if len(marks) == 0:
+ ui.status(_("no bookmarks set\n"))
+ else:
+ for bmark, n in marks.iteritems():
+ if ui.configbool('bookmarks', 'track.current'):
+ current = repo._bookmarkcurrent
+ if bmark == current and n == cur:
+ prefix, label = '*', 'bookmarks.current'
+ else:
+ prefix, label = ' ', ''
+ else:
+ if n == cur:
+ prefix, label = '*', 'bookmarks.current'
+ else:
+ prefix, label = ' ', ''
+
+ if ui.quiet:
+ ui.write("%s\n" % bmark, label=label)
+ else:
+ ui.write(" %s %-25s %d:%s\n" % (
+ prefix, bmark, repo.changelog.rev(n), hexfn(n)),
+ label=label)
+ return
+
+def _revstostrip(changelog, node):
+ srev = changelog.rev(node)
+ tostrip = [srev]
+ saveheads = []
+ for r in xrange(srev, len(changelog)):
+ parents = changelog.parentrevs(r)
+ if parents[0] in tostrip or parents[1] in tostrip:
+ tostrip.append(r)
+ if parents[1] != nullrev:
+ for p in parents:
+ if p not in tostrip and p > srev:
+ saveheads.append(p)
+ return [r for r in tostrip if r not in saveheads]
+
+def strip(oldstrip, ui, repo, node, backup="all"):
+ """Strip bookmarks if revisions are stripped using
+ the mercurial.strip method. This usually happens during
+ qpush and qpop"""
+ revisions = _revstostrip(repo.changelog, node)
+ marks = repo._bookmarks
+ update = []
+ for mark, n in marks.iteritems():
+ if repo.changelog.rev(n) in revisions:
+ update.append(mark)
+ oldstrip(ui, repo, node, backup)
+ if len(update) > 0:
+ for m in update:
+ marks[m] = repo.changectx('.').node()
+ write(repo)
+
+def reposetup(ui, repo):
+ if not repo.local():
+ return
+
+ class bookmark_repo(repo.__class__):
+
+ @util.propertycache
+ def _bookmarks(self):
+ '''Parse .hg/bookmarks file and return a dictionary
+
+ Bookmarks are stored as {HASH}\\s{NAME}\\n (localtags format) values
+ in the .hg/bookmarks file.
+ Read the file and return a (name=>nodeid) dictionary
+ '''
+ try:
+ bookmarks = {}
+ for line in self.opener('bookmarks'):
+ sha, refspec = line.strip().split(' ', 1)
+ bookmarks[refspec] = self.changelog.lookup(sha)
+ except:
+ pass
+ return bookmarks
+
+ @util.propertycache
+ def _bookmarkcurrent(self):
+ '''Get the current bookmark
+
+ If we use gittishsh branches we have a current bookmark that
+ we are on. This function returns the name of the bookmark. It
+ is stored in .hg/bookmarks.current
+ '''
+ mark = None
+ if os.path.exists(self.join('bookmarks.current')):
+ file = self.opener('bookmarks.current')
+ # No readline() in posixfile_nt, reading everything is cheap
+ mark = (file.readlines() or [''])[0]
+ if mark == '':
+ mark = None
+ file.close()
+ return mark
+
+ def rollback(self, *args):
+ if os.path.exists(self.join('undo.bookmarks')):
+ util.rename(self.join('undo.bookmarks'), self.join('bookmarks'))
+ return super(bookmark_repo, self).rollback(*args)
+
+ def lookup(self, key):
+ if key in self._bookmarks:
+ key = self._bookmarks[key]
+ return super(bookmark_repo, self).lookup(key)
+
+ def _bookmarksupdate(self, parents, node):
+ marks = self._bookmarks
+ update = False
+ if ui.configbool('bookmarks', 'track.current'):
+ mark = self._bookmarkcurrent
+ if mark and marks[mark] in parents:
+ marks[mark] = node
+ update = True
+ else:
+ for mark, n in marks.items():
+ if n in parents:
+ marks[mark] = node
+ update = True
+ if update:
+ write(self)
+
+ def commitctx(self, ctx, error=False):
+ """Add a revision to the repository and
+ move the bookmark"""
+ wlock = self.wlock() # do both commit and bookmark with lock held
+ try:
+ node = super(bookmark_repo, self).commitctx(ctx, error)
+ if node is None:
+ return None
+ parents = self.changelog.parents(node)
+ if parents[1] == nullid:
+ parents = (parents[0],)
+
+ self._bookmarksupdate(parents, node)
+ return node
+ finally:
+ wlock.release()
+
+ def pull(self, remote, heads=None, force=False):
+ result = super(bookmark_repo, self).pull(remote, heads, force)
+
+ self.ui.debug("checking for updated bookmarks\n")
+ rb = remote.listkeys('bookmarks')
+ changed = False
+ for k in rb.keys():
+ if k in self._bookmarks:
+ nr, nl = rb[k], self._bookmarks[k]
+ if nr in self:
+ cr = self[nr]
+ cl = self[nl]
+ if cl.rev() >= cr.rev():
+ continue
+ if cr in cl.descendants():
+ self._bookmarks[k] = cr.node()
+ changed = True
+ self.ui.status(_("updating bookmark %s\n") % k)
+ else:
+ self.ui.warn(_("not updating divergent"
+ " bookmark %s\n") % k)
+ if changed:
+ write(repo)
+
+ return result
+
+ def push(self, remote, force=False, revs=None, newbranch=False):
+ result = super(bookmark_repo, self).push(remote, force, revs,
+ newbranch)
+
+ self.ui.debug("checking for updated bookmarks\n")
+ rb = remote.listkeys('bookmarks')
+ for k in rb.keys():
+ if k in self._bookmarks:
+ nr, nl = rb[k], self._bookmarks[k]
+ if nr in self:
+ cr = self[nr]
+ cl = self[nl]
+ if cl in cr.descendants():
+ r = remote.pushkey('bookmarks', k, nr, nl)
+ if r:
+ self.ui.status(_("updating bookmark %s\n") % k)
+ else:
+ self.ui.warn(_('updating bookmark %s'
+ ' failed!\n') % k)
+
+ return result
+
+ def addchangegroup(self, *args, **kwargs):
+ parents = self.dirstate.parents()
+
+ result = super(bookmark_repo, self).addchangegroup(*args, **kwargs)
+ if result > 1:
+ # We have more heads than before
+ return result
+ node = self.changelog.tip()
+
+ self._bookmarksupdate(parents, node)
+ return result
+
+ def _findtags(self):
+ """Merge bookmarks with normal tags"""
+ (tags, tagtypes) = super(bookmark_repo, self)._findtags()
+ tags.update(self._bookmarks)
+ return (tags, tagtypes)
+
+ if hasattr(repo, 'invalidate'):
+ def invalidate(self):
+ super(bookmark_repo, self).invalidate()
+ for attr in ('_bookmarks', '_bookmarkcurrent'):
+ if attr in self.__dict__:
+ delattr(self, attr)
+
+ repo.__class__ = bookmark_repo
+
+def listbookmarks(repo):
+ # We may try to list bookmarks on a repo type that does not
+ # support it (e.g., statichttprepository).
+ if not hasattr(repo, '_bookmarks'):
+ return {}
+
+ d = {}
+ for k, v in repo._bookmarks.iteritems():
+ d[k] = hex(v)
+ return d
+
+def pushbookmark(repo, key, old, new):
+ w = repo.wlock()
+ try:
+ marks = repo._bookmarks
+ if hex(marks.get(key, '')) != old:
+ return False
+ if new == '':
+ del marks[key]
+ else:
+ if new not in repo:
+ return False
+ marks[key] = repo[new].node()
+ write(repo)
+ return True
+ finally:
+ w.release()
+
+def pull(oldpull, ui, repo, source="default", **opts):
+ # translate bookmark args to rev args for actual pull
+ if opts.get('bookmark'):
+ # this is an unpleasant hack as pull will do this internally
+ source, branches = hg.parseurl(ui.expandpath(source),
+ opts.get('branch'))
+ other = hg.repository(hg.remoteui(repo, opts), source)
+ rb = other.listkeys('bookmarks')
+
+ for b in opts['bookmark']:
+ if b not in rb:
+ raise util.Abort(_('remote bookmark %s not found!') % b)
+ opts.setdefault('rev', []).append(b)
+
+ result = oldpull(ui, repo, source, **opts)
+
+ # update specified bookmarks
+ if opts.get('bookmark'):
+ for b in opts['bookmark']:
+ # explicit pull overrides local bookmark if any
+ ui.status(_("importing bookmark %s\n") % b)
+ repo._bookmarks[b] = repo[rb[b]].node()
+ write(repo)
+
+ return result
+
+def push(oldpush, ui, repo, dest=None, **opts):
+ dopush = True
+ if opts.get('bookmark'):
+ dopush = False
+ for b in opts['bookmark']:
+ if b in repo._bookmarks:
+ dopush = True
+ opts.setdefault('rev', []).append(b)
+
+ result = 0
+ if dopush:
+ result = oldpush(ui, repo, dest, **opts)
+
+ if opts.get('bookmark'):
+ # this is an unpleasant hack as push will do this internally
+ dest = ui.expandpath(dest or 'default-push', dest or 'default')
+ dest, branches = hg.parseurl(dest, opts.get('branch'))
+ other = hg.repository(hg.remoteui(repo, opts), dest)
+ rb = other.listkeys('bookmarks')
+ for b in opts['bookmark']:
+ # explicit push overrides remote bookmark if any
+ if b in repo._bookmarks:
+ ui.status(_("exporting bookmark %s\n") % b)
+ new = repo[b].hex()
+ elif b in rb:
+ ui.status(_("deleting remote bookmark %s\n") % b)
+ new = '' # delete
+ else:
+ ui.warn(_('bookmark %s does not exist on the local '
+ 'or remote repository!\n') % b)
+ return 2
+ old = rb.get(b, '')
+ r = other.pushkey('bookmarks', b, old, new)
+ if not r:
+ ui.warn(_('updating bookmark %s failed!\n') % b)
+ if not result:
+ result = 2
+
+ return result
+
+def diffbookmarks(ui, repo, remote):
+ ui.status(_("searching for changed bookmarks\n"))
+
+ lmarks = repo.listkeys('bookmarks')
+ rmarks = remote.listkeys('bookmarks')
+
+ diff = sorted(set(rmarks) - set(lmarks))
+ for k in diff:
+ ui.write(" %-25s %s\n" % (k, rmarks[k][:12]))
+
+ if len(diff) <= 0:
+ ui.status(_("no changed bookmarks found\n"))
+ return 1
+ return 0
+
+def incoming(oldincoming, ui, repo, source="default", **opts):
+ if opts.get('bookmarks'):
+ source, branches = hg.parseurl(ui.expandpath(source), opts.get('branch'))
+ other = hg.repository(hg.remoteui(repo, opts), source)
+ ui.status(_('comparing with %s\n') % url.hidepassword(source))
+ return diffbookmarks(ui, repo, other)
+ else:
+ return oldincoming(ui, repo, source, **opts)
+
+def outgoing(oldoutgoing, ui, repo, dest=None, **opts):
+ if opts.get('bookmarks'):
+ dest = ui.expandpath(dest or 'default-push', dest or 'default')
+ dest, branches = hg.parseurl(dest, opts.get('branch'))
+ other = hg.repository(hg.remoteui(repo, opts), dest)
+ ui.status(_('comparing with %s\n') % url.hidepassword(dest))
+ return diffbookmarks(ui, other, repo)
+ else:
+ return oldoutgoing(ui, repo, dest, **opts)
+
+def uisetup(ui):
+ extensions.wrapfunction(repair, "strip", strip)
+ if ui.configbool('bookmarks', 'track.current'):
+ extensions.wrapcommand(commands.table, 'update', updatecurbookmark)
+
+ entry = extensions.wrapcommand(commands.table, 'pull', pull)
+ entry[1].append(('B', 'bookmark', [],
+ _("bookmark to import"),
+ _('BOOKMARK')))
+ entry = extensions.wrapcommand(commands.table, 'push', push)
+ entry[1].append(('B', 'bookmark', [],
+ _("bookmark to export"),
+ _('BOOKMARK')))
+ entry = extensions.wrapcommand(commands.table, 'incoming', incoming)
+ entry[1].append(('B', 'bookmarks', False,
+ _("compare bookmark")))
+ entry = extensions.wrapcommand(commands.table, 'outgoing', outgoing)
+ entry[1].append(('B', 'bookmarks', False,
+ _("compare bookmark")))
+
+ pushkey.register('bookmarks', pushbookmark, listbookmarks)
+
+def updatecurbookmark(orig, ui, repo, *args, **opts):
+ '''Set the current bookmark
+
+ If the user updates to a bookmark we update the .hg/bookmarks.current
+ file.
+ '''
+ res = orig(ui, repo, *args, **opts)
+ rev = opts['rev']
+ if not rev and len(args) > 0:
+ rev = args[0]
+ setcurrent(repo, rev)
+ return res
+
+def bmrevset(repo, subset, x):
+ """``bookmark([name])``
+ The named bookmark or all bookmarks.
+ """
+ # i18n: "bookmark" is a keyword
+ args = revset.getargs(x, 0, 1, _('bookmark takes one or no arguments'))
+ if args:
+ bm = revset.getstring(args[0],
+ # i18n: "bookmark" is a keyword
+ _('the argument to bookmark must be a string'))
+ bmrev = listbookmarks(repo).get(bm, None)
+ if bmrev:
+ bmrev = repo.changelog.rev(bin(bmrev))
+ return [r for r in subset if r == bmrev]
+ bms = set([repo.changelog.rev(bin(r)) for r in listbookmarks(repo).values()])
+ return [r for r in subset if r in bms]
+
+def extsetup(ui):
+ revset.symbols['bookmark'] = bmrevset
+
+cmdtable = {
+ "bookmarks":
+ (bookmark,
+ [('f', 'force', False, _('force')),
+ ('r', 'rev', '', _('revision'), _('REV')),
+ ('d', 'delete', False, _('delete a given bookmark')),
+ ('m', 'rename', '', _('rename a given bookmark'), _('NAME'))],
+ _('hg bookmarks [-f] [-d] [-m NAME] [-r REV] [NAME]')),
+}
+
+colortable = {'bookmarks.current': 'green'}
+
+# tell hggettext to extract docstrings from these functions:
+i18nfunctions = [bmrevset]
diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/bookmarks.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/bookmarks.pyo
new file mode 100644
index 0000000..9cad1f2
--- /dev/null
+++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/bookmarks.pyo
Binary files differ
diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/bugzilla.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/bugzilla.py
new file mode 100644
index 0000000..de72e91
--- /dev/null
+++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/bugzilla.py
@@ -0,0 +1,441 @@
+# bugzilla.py - bugzilla integration for mercurial
+#
+# Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
+#
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2 or any later version.
+
+'''hooks for integrating with the Bugzilla bug tracker
+
+This hook extension adds comments on bugs in Bugzilla when changesets
+that refer to bugs by Bugzilla ID are seen. The hook does not change
+bug status.
+
+The hook updates the Bugzilla database directly. Only Bugzilla
+installations using MySQL are supported.
+
+The hook relies on a Bugzilla script to send bug change notification
+emails. That script changes between Bugzilla versions; the
+'processmail' script used prior to 2.18 is replaced in 2.18 and
+subsequent versions by 'config/sendbugmail.pl'. Note that these will
+be run by Mercurial as the user pushing the change; you will need to
+ensure the Bugzilla install file permissions are set appropriately.
+
+The extension is configured through three different configuration
+sections. These keys are recognized in the [bugzilla] section:
+
+host
+ Hostname of the MySQL server holding the Bugzilla database.
+
+db
+ Name of the Bugzilla database in MySQL. Default 'bugs'.
+
+user
+ Username to use to access MySQL server. Default 'bugs'.
+
+password
+ Password to use to access MySQL server.
+
+timeout
+ Database connection timeout (seconds). Default 5.
+
+version
+ Bugzilla version. Specify '3.0' for Bugzilla versions 3.0 and later,
+ '2.18' for Bugzilla versions from 2.18 and '2.16' for versions prior
+ to 2.18.
+
+bzuser
+ Fallback Bugzilla user name to record comments with, if changeset
+ committer cannot be found as a Bugzilla user.
+
+bzdir
+ Bugzilla install directory. Used by default notify. Default
+ '/var/www/html/bugzilla'.
+
+notify
+ The command to run to get Bugzilla to send bug change notification
+ emails. Substitutes from a map with 3 keys, 'bzdir', 'id' (bug id)
+ and 'user' (committer bugzilla email). Default depends on version;
+ from 2.18 it is "cd %(bzdir)s && perl -T contrib/sendbugmail.pl
+ %(id)s %(user)s".
+
+regexp
+ Regular expression to match bug IDs in changeset commit message.
+ Must contain one "()" group. The default expression matches 'Bug
+ 1234', 'Bug no. 1234', 'Bug number 1234', 'Bugs 1234,5678', 'Bug
+ 1234 and 5678' and variations thereof. Matching is case insensitive.
+
+style
+ The style file to use when formatting comments.
+
+template
+ Template to use when formatting comments. Overrides style if
+ specified. In addition to the usual Mercurial keywords, the
+ extension specifies::
+
+ {bug} The Bugzilla bug ID.
+ {root} The full pathname of the Mercurial repository.
+ {webroot} Stripped pathname of the Mercurial repository.
+ {hgweb} Base URL for browsing Mercurial repositories.
+
+ Default 'changeset {node|short} in repo {root} refers '
+ 'to bug {bug}.\\ndetails:\\n\\t{desc|tabindent}'
+
+strip
+ The number of slashes to strip from the front of {root} to produce
+ {webroot}. Default 0.
+
+usermap
+ Path of file containing Mercurial committer ID to Bugzilla user ID
+ mappings. If specified, the file should contain one mapping per
+ line, "committer"="Bugzilla user". See also the [usermap] section.
+
+The [usermap] section is used to specify mappings of Mercurial
+committer ID to Bugzilla user ID. See also [bugzilla].usermap.
+"committer"="Bugzilla user"
+
+Finally, the [web] section supports one entry:
+
+baseurl
+ Base URL for browsing Mercurial repositories. Reference from
+ templates as {hgweb}.
+
+Activating the extension::
+
+ [extensions]
+ bugzilla =
+
+ [hooks]
+ # run bugzilla hook on every change pulled or pushed in here
+ incoming.bugzilla = python:hgext.bugzilla.hook
+
+Example configuration:
+
+This example configuration is for a collection of Mercurial
+repositories in /var/local/hg/repos/ used with a local Bugzilla 3.2
+installation in /opt/bugzilla-3.2. ::
+
+ [bugzilla]
+ host=localhost
+ password=XYZZY
+ version=3.0
+ bzuser=unknown@domain.com
+ bzdir=/opt/bugzilla-3.2
+ template=Changeset {node|short} in {root|basename}.
+ {hgweb}/{webroot}/rev/{node|short}\\n
+ {desc}\\n
+ strip=5
+
+ [web]
+ baseurl=http://dev.domain.com/hg
+
+ [usermap]
+ user@emaildomain.com=user.name@bugzilladomain.com
+
+Commits add a comment to the Bugzilla bug record of the form::
+
+ Changeset 3b16791d6642 in repository-name.
+ http://dev.domain.com/hg/repository-name/rev/3b16791d6642
+
+ Changeset commit comment. Bug 1234.
+'''
+
+from mercurial.i18n import _
+from mercurial.node import short
+from mercurial import cmdutil, templater, util
+import re, time
+
+MySQLdb = None
+
+def buglist(ids):
+ return '(' + ','.join(map(str, ids)) + ')'
+
+class bugzilla_2_16(object):
+ '''support for bugzilla version 2.16.'''
+
+ def __init__(self, ui):
+ self.ui = ui
+ host = self.ui.config('bugzilla', 'host', 'localhost')
+ user = self.ui.config('bugzilla', 'user', 'bugs')
+ passwd = self.ui.config('bugzilla', 'password')
+ db = self.ui.config('bugzilla', 'db', 'bugs')
+ timeout = int(self.ui.config('bugzilla', 'timeout', 5))
+ usermap = self.ui.config('bugzilla', 'usermap')
+ if usermap:
+ self.ui.readconfig(usermap, sections=['usermap'])
+ self.ui.note(_('connecting to %s:%s as %s, password %s\n') %
+ (host, db, user, '*' * len(passwd)))
+ self.conn = MySQLdb.connect(host=host, user=user, passwd=passwd,
+ db=db, connect_timeout=timeout)
+ self.cursor = self.conn.cursor()
+ self.longdesc_id = self.get_longdesc_id()
+ self.user_ids = {}
+ self.default_notify = "cd %(bzdir)s && ./processmail %(id)s %(user)s"
+
+ def run(self, *args, **kwargs):
+ '''run a query.'''
+ self.ui.note(_('query: %s %s\n') % (args, kwargs))
+ try:
+ self.cursor.execute(*args, **kwargs)
+ except MySQLdb.MySQLError:
+ self.ui.note(_('failed query: %s %s\n') % (args, kwargs))
+ raise
+
+ def get_longdesc_id(self):
+ '''get identity of longdesc field'''
+ self.run('select fieldid from fielddefs where name = "longdesc"')
+ ids = self.cursor.fetchall()
+ if len(ids) != 1:
+ raise util.Abort(_('unknown database schema'))
+ return ids[0][0]
+
+ def filter_real_bug_ids(self, ids):
+ '''filter not-existing bug ids from list.'''
+ self.run('select bug_id from bugs where bug_id in %s' % buglist(ids))
+ return sorted([c[0] for c in self.cursor.fetchall()])
+
+ def filter_unknown_bug_ids(self, node, ids):
+ '''filter bug ids from list that already refer to this changeset.'''
+
+ self.run('''select bug_id from longdescs where
+ bug_id in %s and thetext like "%%%s%%"''' %
+ (buglist(ids), short(node)))
+ unknown = set(ids)
+ for (id,) in self.cursor.fetchall():
+ self.ui.status(_('bug %d already knows about changeset %s\n') %
+ (id, short(node)))
+ unknown.discard(id)
+ return sorted(unknown)
+
+ def notify(self, ids, committer):
+ '''tell bugzilla to send mail.'''
+
+ self.ui.status(_('telling bugzilla to send mail:\n'))
+ (user, userid) = self.get_bugzilla_user(committer)
+ for id in ids:
+ self.ui.status(_(' bug %s\n') % id)
+ cmdfmt = self.ui.config('bugzilla', 'notify', self.default_notify)
+ bzdir = self.ui.config('bugzilla', 'bzdir', '/var/www/html/bugzilla')
+ try:
+ # Backwards-compatible with old notify string, which
+ # took one string. This will throw with a new format
+ # string.
+ cmd = cmdfmt % id
+ except TypeError:
+ cmd = cmdfmt % {'bzdir': bzdir, 'id': id, 'user': user}
+ self.ui.note(_('running notify command %s\n') % cmd)
+ fp = util.popen('(%s) 2>&1' % cmd)
+ out = fp.read()
+ ret = fp.close()
+ if ret:
+ self.ui.warn(out)
+ raise util.Abort(_('bugzilla notify command %s') %
+ util.explain_exit(ret)[0])
+ self.ui.status(_('done\n'))
+
+ def get_user_id(self, user):
+ '''look up numeric bugzilla user id.'''
+ try:
+ return self.user_ids[user]
+ except KeyError:
+ try:
+ userid = int(user)
+ except ValueError:
+ self.ui.note(_('looking up user %s\n') % user)
+ self.run('''select userid from profiles
+ where login_name like %s''', user)
+ all = self.cursor.fetchall()
+ if len(all) != 1:
+ raise KeyError(user)
+ userid = int(all[0][0])
+ self.user_ids[user] = userid
+ return userid
+
+ def map_committer(self, user):
+ '''map name of committer to bugzilla user name.'''
+ for committer, bzuser in self.ui.configitems('usermap'):
+ if committer.lower() == user.lower():
+ return bzuser
+ return user
+
+ def get_bugzilla_user(self, committer):
+ '''see if committer is a registered bugzilla user. Return
+ bugzilla username and userid if so. If not, return default
+ bugzilla username and userid.'''
+ user = self.map_committer(committer)
+ try:
+ userid = self.get_user_id(user)
+ except KeyError:
+ try:
+ defaultuser = self.ui.config('bugzilla', 'bzuser')
+ if not defaultuser:
+ raise util.Abort(_('cannot find bugzilla user id for %s') %
+ user)
+ userid = self.get_user_id(defaultuser)
+ user = defaultuser
+ except KeyError:
+ raise util.Abort(_('cannot find bugzilla user id for %s or %s') %
+ (user, defaultuser))
+ return (user, userid)
+
+ def add_comment(self, bugid, text, committer):
+ '''add comment to bug. try adding comment as committer of
+ changeset, otherwise as default bugzilla user.'''
+ (user, userid) = self.get_bugzilla_user(committer)
+ now = time.strftime('%Y-%m-%d %H:%M:%S')
+ self.run('''insert into longdescs
+ (bug_id, who, bug_when, thetext)
+ values (%s, %s, %s, %s)''',
+ (bugid, userid, now, text))
+ self.run('''insert into bugs_activity (bug_id, who, bug_when, fieldid)
+ values (%s, %s, %s, %s)''',
+ (bugid, userid, now, self.longdesc_id))
+ self.conn.commit()
+
+class bugzilla_2_18(bugzilla_2_16):
+ '''support for bugzilla 2.18 series.'''
+
+ def __init__(self, ui):
+ bugzilla_2_16.__init__(self, ui)
+ self.default_notify = \
+ "cd %(bzdir)s && perl -T contrib/sendbugmail.pl %(id)s %(user)s"
+
+class bugzilla_3_0(bugzilla_2_18):
+ '''support for bugzilla 3.0 series.'''
+
+ def __init__(self, ui):
+ bugzilla_2_18.__init__(self, ui)
+
+ def get_longdesc_id(self):
+ '''get identity of longdesc field'''
+ self.run('select id from fielddefs where name = "longdesc"')
+ ids = self.cursor.fetchall()
+ if len(ids) != 1:
+ raise util.Abort(_('unknown database schema'))
+ return ids[0][0]
+
+class bugzilla(object):
+ # supported versions of bugzilla. different versions have
+ # different schemas.
+ _versions = {
+ '2.16': bugzilla_2_16,
+ '2.18': bugzilla_2_18,
+ '3.0': bugzilla_3_0
+ }
+
+ _default_bug_re = (r'bugs?\s*,?\s*(?:#|nos?\.?|num(?:ber)?s?)?\s*'
+ r'((?:\d+\s*(?:,?\s*(?:and)?)?\s*)+)')
+
+ _bz = None
+
+ def __init__(self, ui, repo):
+ self.ui = ui
+ self.repo = repo
+
+ def bz(self):
+ '''return object that knows how to talk to bugzilla version in
+ use.'''
+
+ if bugzilla._bz is None:
+ bzversion = self.ui.config('bugzilla', 'version')
+ try:
+ bzclass = bugzilla._versions[bzversion]
+ except KeyError:
+ raise util.Abort(_('bugzilla version %s not supported') %
+ bzversion)
+ bugzilla._bz = bzclass(self.ui)
+ return bugzilla._bz
+
+ def __getattr__(self, key):
+ return getattr(self.bz(), key)
+
+ _bug_re = None
+ _split_re = None
+
+ def find_bug_ids(self, ctx):
+ '''find valid bug ids that are referred to in changeset
+ comments and that do not already have references to this
+ changeset.'''
+
+ if bugzilla._bug_re is None:
+ bugzilla._bug_re = re.compile(
+ self.ui.config('bugzilla', 'regexp', bugzilla._default_bug_re),
+ re.IGNORECASE)
+ bugzilla._split_re = re.compile(r'\D+')
+ start = 0
+ ids = set()
+ while True:
+ m = bugzilla._bug_re.search(ctx.description(), start)
+ if not m:
+ break
+ start = m.end()
+ for id in bugzilla._split_re.split(m.group(1)):
+ if not id:
+ continue
+ ids.add(int(id))
+ if ids:
+ ids = self.filter_real_bug_ids(ids)
+ if ids:
+ ids = self.filter_unknown_bug_ids(ctx.node(), ids)
+ return ids
+
+ def update(self, bugid, ctx):
+ '''update bugzilla bug with reference to changeset.'''
+
+ def webroot(root):
+ '''strip leading prefix of repo root and turn into
+ url-safe path.'''
+ count = int(self.ui.config('bugzilla', 'strip', 0))
+ root = util.pconvert(root)
+ while count > 0:
+ c = root.find('/')
+ if c == -1:
+ break
+ root = root[c + 1:]
+ count -= 1
+ return root
+
+ mapfile = self.ui.config('bugzilla', 'style')
+ tmpl = self.ui.config('bugzilla', 'template')
+ t = cmdutil.changeset_templater(self.ui, self.repo,
+ False, None, mapfile, False)
+ if not mapfile and not tmpl:
+ tmpl = _('changeset {node|short} in repo {root} refers '
+ 'to bug {bug}.\ndetails:\n\t{desc|tabindent}')
+ if tmpl:
+ tmpl = templater.parsestring(tmpl, quoted=False)
+ t.use_template(tmpl)
+ self.ui.pushbuffer()
+ t.show(ctx, changes=ctx.changeset(),
+ bug=str(bugid),
+ hgweb=self.ui.config('web', 'baseurl'),
+ root=self.repo.root,
+ webroot=webroot(self.repo.root))
+ data = self.ui.popbuffer()
+ self.add_comment(bugid, data, util.email(ctx.user()))
+
+def hook(ui, repo, hooktype, node=None, **kwargs):
+ '''add comment to bugzilla for each changeset that refers to a
+ bugzilla bug id. only add a comment once per bug, so same change
+ seen multiple times does not fill bug with duplicate data.'''
+ try:
+ import MySQLdb as mysql
+ global MySQLdb
+ MySQLdb = mysql
+ except ImportError, err:
+ raise util.Abort(_('python mysql support not available: %s') % err)
+
+ if node is None:
+ raise util.Abort(_('hook type %s does not pass a changeset id') %
+ hooktype)
+ try:
+ bz = bugzilla(ui, repo)
+ ctx = repo[node]
+ ids = bz.find_bug_ids(ctx)
+ if ids:
+ for id in ids:
+ bz.update(id, ctx)
+ bz.notify(ids, util.email(ctx.user()))
+ except MySQLdb.MySQLError, err:
+ raise util.Abort(_('database error: %s') % err.args[1])
+
diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/bugzilla.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/bugzilla.pyo
new file mode 100644
index 0000000..b4bfa04
--- /dev/null
+++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/bugzilla.pyo
Binary files differ
diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/children.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/children.py
new file mode 100644
index 0000000..da2fe9c
--- /dev/null
+++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/children.py
@@ -0,0 +1,45 @@
+# Mercurial extension to provide the 'hg children' command
+#
+# Copyright 2007 by Intevation GmbH <intevation@intevation.de>
+#
+# Author(s):
+# Thomas Arendsen Hein <thomas@intevation.de>
+#
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2 or any later version.
+
+'''command to display child changesets'''
+
+from mercurial import cmdutil
+from mercurial.commands import templateopts
+from mercurial.i18n import _
+
+
+def children(ui, repo, file_=None, **opts):
+ """show the children of the given or working directory revision
+
+ Print the children of the working directory's revisions. If a
+ revision is given via -r/--rev, the children of that revision will
+ be printed. If a file argument is given, revision in which the
+ file was last changed (after the working directory revision or the
+ argument to --rev if given) is printed.
+ """
+ rev = opts.get('rev')
+ if file_:
+ ctx = repo.filectx(file_, changeid=rev)
+ else:
+ ctx = repo[rev]
+
+ displayer = cmdutil.show_changeset(ui, repo, opts)
+ for cctx in ctx.children():
+ displayer.show(cctx)
+ displayer.close()
+
+cmdtable = {
+ "children":
+ (children,
+ [('r', 'rev', '',
+ _('show children of the specified revision'), _('REV')),
+ ] + templateopts,
+ _('hg children [-r REV] [FILE]')),
+}
diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/children.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/children.pyo
new file mode 100644
index 0000000..05aecd2
--- /dev/null
+++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/children.pyo
Binary files differ
diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/churn.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/churn.py
new file mode 100644
index 0000000..32e481f
--- /dev/null
+++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/churn.py
@@ -0,0 +1,198 @@
+# churn.py - create a graph of revisions count grouped by template
+#
+# Copyright 2006 Josef "Jeff" Sipek <jeffpc@josefsipek.net>
+# Copyright 2008 Alexander Solovyov <piranha@piranha.org.ua>
+#
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2 or any later version.
+
+'''command to display statistics about repository history'''
+
+from mercurial.i18n import _
+from mercurial import patch, cmdutil, util, templater, commands
+import os
+import time, datetime
+
+def maketemplater(ui, repo, tmpl):
+ tmpl = templater.parsestring(tmpl, quoted=False)
+ try:
+ t = cmdutil.changeset_templater(ui, repo, False, None, None, False)
+ except SyntaxError, inst:
+ raise util.Abort(inst.args[0])
+ t.use_template(tmpl)
+ return t
+
+def changedlines(ui, repo, ctx1, ctx2, fns):
+ added, removed = 0, 0
+ fmatch = cmdutil.matchfiles(repo, fns)
+ diff = ''.join(patch.diff(repo, ctx1.node(), ctx2.node(), fmatch))
+ for l in diff.split('\n'):
+ if l.startswith("+") and not l.startswith("+++ "):
+ added += 1
+ elif l.startswith("-") and not l.startswith("--- "):
+ removed += 1
+ return (added, removed)
+
+def countrate(ui, repo, amap, *pats, **opts):
+ """Calculate stats"""
+ if opts.get('dateformat'):
+ def getkey(ctx):
+ t, tz = ctx.date()
+ date = datetime.datetime(*time.gmtime(float(t) - tz)[:6])
+ return date.strftime(opts['dateformat'])
+ else:
+ tmpl = opts.get('template', '{author|email}')
+ tmpl = maketemplater(ui, repo, tmpl)
+ def getkey(ctx):
+ ui.pushbuffer()
+ tmpl.show(ctx)
+ return ui.popbuffer()
+
+ state = {'count': 0}
+ rate = {}
+ df = False
+ if opts.get('date'):
+ df = util.matchdate(opts['date'])
+
+ m = cmdutil.match(repo, pats, opts)
+ def prep(ctx, fns):
+ rev = ctx.rev()
+ if df and not df(ctx.date()[0]): # doesn't match date format
+ return
+
+ key = getkey(ctx)
+ key = amap.get(key, key) # alias remap
+ key = key.strip() # ignore leading and trailing spaces
+ if opts.get('changesets'):
+ rate[key] = (rate.get(key, (0,))[0] + 1, 0)
+ else:
+ parents = ctx.parents()
+ if len(parents) > 1:
+ ui.note(_('Revision %d is a merge, ignoring...\n') % (rev,))
+ return
+
+ ctx1 = parents[0]
+ lines = changedlines(ui, repo, ctx1, ctx, fns)
+ rate[key] = [r + l for r, l in zip(rate.get(key, (0, 0)), lines)]
+
+ state['count'] += 1
+ ui.progress(_('analyzing'), state['count'], total=len(repo))
+
+ for ctx in cmdutil.walkchangerevs(repo, m, opts, prep):
+ continue
+
+ ui.progress(_('analyzing'), None)
+
+ return rate
+
+
+def churn(ui, repo, *pats, **opts):
+ '''histogram of changes to the repository
+
+ This command will display a histogram representing the number
+ of changed lines or revisions, grouped according to the given
+ template. The default template will group changes by author.
+ The --dateformat option may be used to group the results by
+ date instead.
+
+ Statistics are based on the number of changed lines, or
+ alternatively the number of matching revisions if the
+ --changesets option is specified.
+
+ Examples::
+
+ # display count of changed lines for every committer
+ hg churn -t '{author|email}'
+
+ # display daily activity graph
+ hg churn -f '%H' -s -c
+
+ # display activity of developers by month
+ hg churn -f '%Y-%m' -s -c
+
+ # display count of lines changed in every year
+ hg churn -f '%Y' -s
+
+ It is possible to map alternate email addresses to a main address
+ by providing a file using the following format::
+
+ <alias email> = <actual email>
+
+ Such a file may be specified with the --aliases option, otherwise
+ a .hgchurn file will be looked for in the working directory root.
+ '''
+ def pad(s, l):
+ return (s + " " * l)[:l]
+
+ amap = {}
+ aliases = opts.get('aliases')
+ if not aliases and os.path.exists(repo.wjoin('.hgchurn')):
+ aliases = repo.wjoin('.hgchurn')
+ if aliases:
+ for l in open(aliases, "r"):
+ try:
+ alias, actual = l.split('=' in l and '=' or None, 1)
+ amap[alias.strip()] = actual.strip()
+ except ValueError:
+ l = l.strip()
+ if l:
+ ui.warn(_("skipping malformed alias: %s\n" % l))
+ continue
+
+ rate = countrate(ui, repo, amap, *pats, **opts).items()
+ if not rate:
+ return
+
+ sortkey = ((not opts.get('sort')) and (lambda x: -sum(x[1])) or None)
+ rate.sort(key=sortkey)
+
+ # Be careful not to have a zero maxcount (issue833)
+ maxcount = float(max(sum(v) for k, v in rate)) or 1.0
+ maxname = max(len(k) for k, v in rate)
+
+ ttywidth = ui.termwidth()
+ ui.debug("assuming %i character terminal\n" % ttywidth)
+ width = ttywidth - maxname - 2 - 2 - 2
+
+ if opts.get('diffstat'):
+ width -= 15
+ def format(name, diffstat):
+ added, removed = diffstat
+ return "%s %15s %s%s\n" % (pad(name, maxname),
+ '+%d/-%d' % (added, removed),
+ ui.label('+' * charnum(added),
+ 'diffstat.inserted'),
+ ui.label('-' * charnum(removed),
+ 'diffstat.deleted'))
+ else:
+ width -= 6
+ def format(name, count):
+ return "%s %6d %s\n" % (pad(name, maxname), sum(count),
+ '*' * charnum(sum(count)))
+
+ def charnum(count):
+ return int(round(count * width / maxcount))
+
+ for name, count in rate:
+ ui.write(format(name, count))
+
+
+cmdtable = {
+ "churn":
+ (churn,
+ [('r', 'rev', [],
+ _('count rate for the specified revision or range'), _('REV')),
+ ('d', 'date', '',
+ _('count rate for revisions matching date spec'), _('DATE')),
+ ('t', 'template', '{author|email}',
+ _('template to group changesets'), _('TEMPLATE')),
+ ('f', 'dateformat', '',
+ _('strftime-compatible format for grouping by date'), _('FORMAT')),
+ ('c', 'changesets', False, _('count rate by number of changesets')),
+ ('s', 'sort', False, _('sort by key (default: sort by count)')),
+ ('', 'diffstat', False, _('display added/removed lines separately')),
+ ('', 'aliases', '',
+ _('file with email aliases'), _('FILE')),
+ ] + commands.walkopts,
+ _("hg churn [-d DATE] [-r REV] [--aliases FILE] [FILE]")),
+}
diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/churn.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/churn.pyo
new file mode 100644
index 0000000..90d9a2d
--- /dev/null
+++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/churn.pyo
Binary files differ
diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/color.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/color.py
new file mode 100644
index 0000000..df78f8d
--- /dev/null
+++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/color.py
@@ -0,0 +1,319 @@
+# color.py color output for the status and qseries commands
+#
+# Copyright (C) 2007 Kevin Christen <kevin.christen@gmail.com>
+#
+# This program is free software; you can redistribute it and/or modify it
+# under the terms of the GNU General Public License as published by the
+# Free Software Foundation; either version 2 of the License, or (at your
+# option) any later version.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
+# Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+'''colorize output from some commands
+
+This extension modifies the status and resolve commands to add color to their
+output to reflect file status, the qseries command to add color to reflect
+patch status (applied, unapplied, missing), and to diff-related
+commands to highlight additions, removals, diff headers, and trailing
+whitespace.
+
+Other effects in addition to color, like bold and underlined text, are
+also available. Effects are rendered with the ECMA-48 SGR control
+function (aka ANSI escape codes). This module also provides the
+render_text function, which can be used to add effects to any text.
+
+Default effects may be overridden from your configuration file::
+
+ [color]
+ status.modified = blue bold underline red_background
+ status.added = green bold
+ status.removed = red bold blue_background
+ status.deleted = cyan bold underline
+ status.unknown = magenta bold underline
+ status.ignored = black bold
+
+ # 'none' turns off all effects
+ status.clean = none
+ status.copied = none
+
+ qseries.applied = blue bold underline
+ qseries.unapplied = black bold
+ qseries.missing = red bold
+
+ diff.diffline = bold
+ diff.extended = cyan bold
+ diff.file_a = red bold
+ diff.file_b = green bold
+ diff.hunk = magenta
+ diff.deleted = red
+ diff.inserted = green
+ diff.changed = white
+ diff.trailingwhitespace = bold red_background
+
+ resolve.unresolved = red bold
+ resolve.resolved = green bold
+
+ bookmarks.current = green
+
+ branches.active = none
+ branches.closed = black bold
+ branches.current = green
+ branches.inactive = none
+
+The color extension will try to detect whether to use ANSI codes or
+Win32 console APIs, unless it is made explicit::
+
+ [color]
+ mode = ansi
+
+Any value other than 'ansi', 'win32', or 'auto' will disable color.
+
+'''
+
+import os
+
+from mercurial import commands, dispatch, extensions, ui as uimod, util
+from mercurial.i18n import _
+
+# start and stop parameters for effects
+_effects = {'none': 0, 'black': 30, 'red': 31, 'green': 32, 'yellow': 33,
+ 'blue': 34, 'magenta': 35, 'cyan': 36, 'white': 37, 'bold': 1,
+ 'italic': 3, 'underline': 4, 'inverse': 7,
+ 'black_background': 40, 'red_background': 41,
+ 'green_background': 42, 'yellow_background': 43,
+ 'blue_background': 44, 'purple_background': 45,
+ 'cyan_background': 46, 'white_background': 47}
+
+_styles = {'grep.match': 'red bold',
+ 'branches.active': 'none',
+ 'branches.closed': 'black bold',
+ 'branches.current': 'green',
+ 'branches.inactive': 'none',
+ 'diff.changed': 'white',
+ 'diff.deleted': 'red',
+ 'diff.diffline': 'bold',
+ 'diff.extended': 'cyan bold',
+ 'diff.file_a': 'red bold',
+ 'diff.file_b': 'green bold',
+ 'diff.hunk': 'magenta',
+ 'diff.inserted': 'green',
+ 'diff.trailingwhitespace': 'bold red_background',
+ 'diffstat.deleted': 'red',
+ 'diffstat.inserted': 'green',
+ 'log.changeset': 'yellow',
+ 'resolve.resolved': 'green bold',
+ 'resolve.unresolved': 'red bold',
+ 'status.added': 'green bold',
+ 'status.clean': 'none',
+ 'status.copied': 'none',
+ 'status.deleted': 'cyan bold underline',
+ 'status.ignored': 'black bold',
+ 'status.modified': 'blue bold',
+ 'status.removed': 'red bold',
+ 'status.unknown': 'magenta bold underline'}
+
+
+def render_effects(text, effects):
+ 'Wrap text in commands to turn on each effect.'
+ if not text:
+ return text
+ start = [str(_effects[e]) for e in ['none'] + effects.split()]
+ start = '\033[' + ';'.join(start) + 'm'
+ stop = '\033[' + str(_effects['none']) + 'm'
+ return ''.join([start, text, stop])
+
+def extstyles():
+ for name, ext in extensions.extensions():
+ _styles.update(getattr(ext, 'colortable', {}))
+
+def configstyles(ui):
+ for status, cfgeffects in ui.configitems('color'):
+ if '.' not in status:
+ continue
+ cfgeffects = ui.configlist('color', status)
+ if cfgeffects:
+ good = []
+ for e in cfgeffects:
+ if e in _effects:
+ good.append(e)
+ else:
+ ui.warn(_("ignoring unknown color/effect %r "
+ "(configured in color.%s)\n")
+ % (e, status))
+ _styles[status] = ' '.join(good)
+
+class colorui(uimod.ui):
+ def popbuffer(self, labeled=False):
+ if labeled:
+ return ''.join(self.label(a, label) for a, label
+ in self._buffers.pop())
+ return ''.join(a for a, label in self._buffers.pop())
+
+ _colormode = 'ansi'
+ def write(self, *args, **opts):
+ label = opts.get('label', '')
+ if self._buffers:
+ self._buffers[-1].extend([(str(a), label) for a in args])
+ elif self._colormode == 'win32':
+ for a in args:
+ win32print(a, super(colorui, self).write, **opts)
+ else:
+ return super(colorui, self).write(
+ *[self.label(str(a), label) for a in args], **opts)
+
+ def write_err(self, *args, **opts):
+ label = opts.get('label', '')
+ if self._colormode == 'win32':
+ for a in args:
+ win32print(a, super(colorui, self).write_err, **opts)
+ else:
+ return super(colorui, self).write_err(
+ *[self.label(str(a), label) for a in args], **opts)
+
+ def label(self, msg, label):
+ effects = []
+ for l in label.split():
+ s = _styles.get(l, '')
+ if s:
+ effects.append(s)
+ effects = ''.join(effects)
+ if effects:
+ return '\n'.join([render_effects(s, effects)
+ for s in msg.split('\n')])
+ return msg
+
+
+def uisetup(ui):
+ if ui.plain():
+ return
+ mode = ui.config('color', 'mode', 'auto')
+ if mode == 'auto':
+ if os.name == 'nt' and 'TERM' not in os.environ:
+ # looks line a cmd.exe console, use win32 API or nothing
+ mode = w32effects and 'win32' or 'none'
+ else:
+ mode = 'ansi'
+ if mode == 'win32':
+ if w32effects is None:
+ # only warn if color.mode is explicitly set to win32
+ ui.warn(_('win32console not found, please install pywin32\n'))
+ return
+ _effects.update(w32effects)
+ elif mode != 'ansi':
+ return
+ def colorcmd(orig, ui_, opts, cmd, cmdfunc):
+ coloropt = opts['color']
+ auto = coloropt == 'auto'
+ always = util.parsebool(coloropt)
+ if (always or
+ (always is None and
+ (auto and (os.environ.get('TERM') != 'dumb' and ui_.formatted())))):
+ colorui._colormode = mode
+ colorui.__bases__ = (ui_.__class__,)
+ ui_.__class__ = colorui
+ extstyles()
+ configstyles(ui_)
+ return orig(ui_, opts, cmd, cmdfunc)
+ extensions.wrapfunction(dispatch, '_runcommand', colorcmd)
+
+def extsetup(ui):
+ commands.globalopts.append(
+ ('', 'color', 'auto',
+ # i18n: 'always', 'auto', and 'never' are keywords and should
+ # not be translated
+ _("when to colorize (boolean, always, auto, or never)"),
+ _('TYPE')))
+
+try:
+ import re, pywintypes, win32console as win32c
+
+ # http://msdn.microsoft.com/en-us/library/ms682088%28VS.85%29.aspx
+ w32effects = {
+ 'none': -1,
+ 'black': 0,
+ 'red': win32c.FOREGROUND_RED,
+ 'green': win32c.FOREGROUND_GREEN,
+ 'yellow': win32c.FOREGROUND_RED | win32c.FOREGROUND_GREEN,
+ 'blue': win32c.FOREGROUND_BLUE,
+ 'magenta': win32c.FOREGROUND_BLUE | win32c.FOREGROUND_RED,
+ 'cyan': win32c.FOREGROUND_BLUE | win32c.FOREGROUND_GREEN,
+ 'white': (win32c.FOREGROUND_RED | win32c.FOREGROUND_GREEN |
+ win32c.FOREGROUND_BLUE),
+ 'bold': win32c.FOREGROUND_INTENSITY,
+ 'black_background': 0x100, # unused value > 0x0f
+ 'red_background': win32c.BACKGROUND_RED,
+ 'green_background': win32c.BACKGROUND_GREEN,
+ 'yellow_background': win32c.BACKGROUND_RED | win32c.BACKGROUND_GREEN,
+ 'blue_background': win32c.BACKGROUND_BLUE,
+ 'purple_background': win32c.BACKGROUND_BLUE | win32c.BACKGROUND_RED,
+ 'cyan_background': win32c.BACKGROUND_BLUE | win32c.BACKGROUND_GREEN,
+ 'white_background': (win32c.BACKGROUND_RED | win32c.BACKGROUND_GREEN |
+ win32c.BACKGROUND_BLUE),
+ 'bold_background': win32c.BACKGROUND_INTENSITY,
+ 'underline': win32c.COMMON_LVB_UNDERSCORE, # double-byte charsets only
+ 'inverse': win32c.COMMON_LVB_REVERSE_VIDEO, # double-byte charsets only
+ }
+
+ passthrough = set([win32c.FOREGROUND_INTENSITY,
+ win32c.BACKGROUND_INTENSITY,
+ win32c.COMMON_LVB_UNDERSCORE,
+ win32c.COMMON_LVB_REVERSE_VIDEO])
+
+ try:
+ stdout = win32c.GetStdHandle(win32c.STD_OUTPUT_HANDLE)
+ if stdout is None:
+ raise ImportError()
+ origattr = stdout.GetConsoleScreenBufferInfo()['Attributes']
+ except pywintypes.error:
+ # stdout may be defined but not support
+ # GetConsoleScreenBufferInfo(), when called from subprocess or
+ # redirected.
+ raise ImportError()
+ ansire = re.compile('\033\[([^m]*)m([^\033]*)(.*)', re.MULTILINE | re.DOTALL)
+
+ def win32print(text, orig, **opts):
+ label = opts.get('label', '')
+ attr = origattr
+
+ def mapcolor(val, attr):
+ if val == -1:
+ return origattr
+ elif val in passthrough:
+ return attr | val
+ elif val > 0x0f:
+ return (val & 0x70) | (attr & 0x8f)
+ else:
+ return (val & 0x07) | (attr & 0xf8)
+
+ # determine console attributes based on labels
+ for l in label.split():
+ style = _styles.get(l, '')
+ for effect in style.split():
+ attr = mapcolor(w32effects[effect], attr)
+
+ # hack to ensure regexp finds data
+ if not text.startswith('\033['):
+ text = '\033[m' + text
+
+ # Look for ANSI-like codes embedded in text
+ m = re.match(ansire, text)
+ while m:
+ for sattr in m.group(1).split(';'):
+ if sattr:
+ attr = mapcolor(int(sattr), attr)
+ stdout.SetConsoleTextAttribute(attr)
+ orig(m.group(2), **opts)
+ m = re.match(ansire, m.group(3))
+
+ # Explicity reset original attributes
+ stdout.SetConsoleTextAttribute(origattr)
+
+except ImportError:
+ w32effects = None
diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/color.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/color.pyo
new file mode 100644
index 0000000..2f131ee
--- /dev/null
+++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/color.pyo
Binary files differ
diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/__init__.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/__init__.py
new file mode 100644
index 0000000..be7aca5
--- /dev/null
+++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/__init__.py
@@ -0,0 +1,321 @@
+# convert.py Foreign SCM converter
+#
+# Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
+#
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2 or any later version.
+
+'''import revisions from foreign VCS repositories into Mercurial'''
+
+import convcmd
+import cvsps
+import subversion
+from mercurial import commands
+from mercurial.i18n import _
+
+# Commands definition was moved elsewhere to ease demandload job.
+
+def convert(ui, src, dest=None, revmapfile=None, **opts):
+ """convert a foreign SCM repository to a Mercurial one.
+
+ Accepted source formats [identifiers]:
+
+ - Mercurial [hg]
+ - CVS [cvs]
+ - Darcs [darcs]
+ - git [git]
+ - Subversion [svn]
+ - Monotone [mtn]
+ - GNU Arch [gnuarch]
+ - Bazaar [bzr]
+ - Perforce [p4]
+
+ Accepted destination formats [identifiers]:
+
+ - Mercurial [hg]
+ - Subversion [svn] (history on branches is not preserved)
+
+ If no revision is given, all revisions will be converted.
+ Otherwise, convert will only import up to the named revision
+ (given in a format understood by the source).
+
+ If no destination directory name is specified, it defaults to the
+ basename of the source with ``-hg`` appended. If the destination
+ repository doesn't exist, it will be created.
+
+ By default, all sources except Mercurial will use --branchsort.
+ Mercurial uses --sourcesort to preserve original revision numbers
+ order. Sort modes have the following effects:
+
+ --branchsort convert from parent to child revision when possible,
+ which means branches are usually converted one after
+ the other. It generates more compact repositories.
+
+ --datesort sort revisions by date. Converted repositories have
+ good-looking changelogs but are often an order of
+ magnitude larger than the same ones generated by
+ --branchsort.
+
+ --sourcesort try to preserve source revisions order, only
+ supported by Mercurial sources.
+
+ If <REVMAP> isn't given, it will be put in a default location
+ (<dest>/.hg/shamap by default). The <REVMAP> is a simple text file
+ that maps each source commit ID to the destination ID for that
+ revision, like so::
+
+ <source ID> <destination ID>
+
+ If the file doesn't exist, it's automatically created. It's
+ updated on each commit copied, so :hg:`convert` can be interrupted
+ and can be run repeatedly to copy new commits.
+
+ The authormap is a simple text file that maps each source commit
+ author to a destination commit author. It is handy for source SCMs
+ that use unix logins to identify authors (eg: CVS). One line per
+ author mapping and the line format is::
+
+ source author = destination author
+
+ Empty lines and lines starting with a ``#`` are ignored.
+
+ The filemap is a file that allows filtering and remapping of files
+ and directories. Each line can contain one of the following
+ directives::
+
+ include path/to/file-or-dir
+
+ exclude path/to/file-or-dir
+
+ rename path/to/source path/to/destination
+
+ Comment lines start with ``#``. A specified path matches if it
+ equals the full relative name of a file or one of its parent
+ directories. The ``include`` or ``exclude`` directive with the
+ longest matching path applies, so line order does not matter.
+
+ The ``include`` directive causes a file, or all files under a
+ directory, to be included in the destination repository, and the
+ exclusion of all other files and directories not explicitly
+ included. The ``exclude`` directive causes files or directories to
+ be omitted. The ``rename`` directive renames a file or directory if
+ it is converted. To rename from a subdirectory into the root of
+ the repository, use ``.`` as the path to rename to.
+
+ The splicemap is a file that allows insertion of synthetic
+ history, letting you specify the parents of a revision. This is
+ useful if you want to e.g. give a Subversion merge two parents, or
+ graft two disconnected series of history together. Each entry
+ contains a key, followed by a space, followed by one or two
+ comma-separated values::
+
+ key parent1, parent2
+
+ The key is the revision ID in the source
+ revision control system whose parents should be modified (same
+ format as a key in .hg/shamap). The values are the revision IDs
+ (in either the source or destination revision control system) that
+ should be used as the new parents for that node. For example, if
+ you have merged "release-1.0" into "trunk", then you should
+ specify the revision on "trunk" as the first parent and the one on
+ the "release-1.0" branch as the second.
+
+ The branchmap is a file that allows you to rename a branch when it is
+ being brought in from whatever external repository. When used in
+ conjunction with a splicemap, it allows for a powerful combination
+ to help fix even the most badly mismanaged repositories and turn them
+ into nicely structured Mercurial repositories. The branchmap contains
+ lines of the form::
+
+ original_branch_name new_branch_name
+
+ where "original_branch_name" is the name of the branch in the
+ source repository, and "new_branch_name" is the name of the branch
+ is the destination repository. No whitespace is allowed in the
+ branch names. This can be used to (for instance) move code in one
+ repository from "default" to a named branch.
+
+ Mercurial Source
+ ''''''''''''''''
+
+ --config convert.hg.ignoreerrors=False (boolean)
+ ignore integrity errors when reading. Use it to fix Mercurial
+ repositories with missing revlogs, by converting from and to
+ Mercurial.
+ --config convert.hg.saverev=False (boolean)
+ store original revision ID in changeset (forces target IDs to
+ change)
+ --config convert.hg.startrev=0 (hg revision identifier)
+ convert start revision and its descendants
+
+ CVS Source
+ ''''''''''
+
+ CVS source will use a sandbox (i.e. a checked-out copy) from CVS
+ to indicate the starting point of what will be converted. Direct
+ access to the repository files is not needed, unless of course the
+ repository is :local:. The conversion uses the top level directory
+ in the sandbox to find the CVS repository, and then uses CVS rlog
+ commands to find files to convert. This means that unless a
+ filemap is given, all files under the starting directory will be
+ converted, and that any directory reorganization in the CVS
+ sandbox is ignored.
+
+ The options shown are the defaults.
+
+ --config convert.cvsps.cache=True (boolean)
+ Set to False to disable remote log caching, for testing and
+ debugging purposes.
+ --config convert.cvsps.fuzz=60 (integer)
+ Specify the maximum time (in seconds) that is allowed between
+ commits with identical user and log message in a single
+ changeset. When very large files were checked in as part of a
+ changeset then the default may not be long enough.
+ --config convert.cvsps.mergeto='{{mergetobranch ([-\\w]+)}}'
+ Specify a regular expression to which commit log messages are
+ matched. If a match occurs, then the conversion process will
+ insert a dummy revision merging the branch on which this log
+ message occurs to the branch indicated in the regex.
+ --config convert.cvsps.mergefrom='{{mergefrombranch ([-\\w]+)}}'
+ Specify a regular expression to which commit log messages are
+ matched. If a match occurs, then the conversion process will
+ add the most recent revision on the branch indicated in the
+ regex as the second parent of the changeset.
+ --config hook.cvslog
+ Specify a Python function to be called at the end of gathering
+ the CVS log. The function is passed a list with the log entries,
+ and can modify the entries in-place, or add or delete them.
+ --config hook.cvschangesets
+ Specify a Python function to be called after the changesets
+ are calculated from the the CVS log. The function is passed
+ a list with the changeset entries, and can modify the changesets
+ in-place, or add or delete them.
+
+ An additional "debugcvsps" Mercurial command allows the builtin
+ changeset merging code to be run without doing a conversion. Its
+ parameters and output are similar to that of cvsps 2.1. Please see
+ the command help for more details.
+
+ Subversion Source
+ '''''''''''''''''
+
+ Subversion source detects classical trunk/branches/tags layouts.
+ By default, the supplied "svn://repo/path/" source URL is
+ converted as a single branch. If "svn://repo/path/trunk" exists it
+ replaces the default branch. If "svn://repo/path/branches" exists,
+ its subdirectories are listed as possible branches. If
+ "svn://repo/path/tags" exists, it is looked for tags referencing
+ converted branches. Default "trunk", "branches" and "tags" values
+ can be overridden with following options. Set them to paths
+ relative to the source URL, or leave them blank to disable auto
+ detection.
+
+ --config convert.svn.branches=branches (directory name)
+ specify the directory containing branches
+ --config convert.svn.tags=tags (directory name)
+ specify the directory containing tags
+ --config convert.svn.trunk=trunk (directory name)
+ specify the name of the trunk branch
+
+ Source history can be retrieved starting at a specific revision,
+ instead of being integrally converted. Only single branch
+ conversions are supported.
+
+ --config convert.svn.startrev=0 (svn revision number)
+ specify start Subversion revision.
+
+ Perforce Source
+ '''''''''''''''
+
+ The Perforce (P4) importer can be given a p4 depot path or a
+ client specification as source. It will convert all files in the
+ source to a flat Mercurial repository, ignoring labels, branches
+ and integrations. Note that when a depot path is given you then
+ usually should specify a target directory, because otherwise the
+ target may be named ...-hg.
+
+ It is possible to limit the amount of source history to be
+ converted by specifying an initial Perforce revision.
+
+ --config convert.p4.startrev=0 (perforce changelist number)
+ specify initial Perforce revision.
+
+ Mercurial Destination
+ '''''''''''''''''''''
+
+ --config convert.hg.clonebranches=False (boolean)
+ dispatch source branches in separate clones.
+ --config convert.hg.tagsbranch=default (branch name)
+ tag revisions branch name
+ --config convert.hg.usebranchnames=True (boolean)
+ preserve branch names
+
+ """
+ return convcmd.convert(ui, src, dest, revmapfile, **opts)
+
+def debugsvnlog(ui, **opts):
+ return subversion.debugsvnlog(ui, **opts)
+
+def debugcvsps(ui, *args, **opts):
+ '''create changeset information from CVS
+
+ This command is intended as a debugging tool for the CVS to
+ Mercurial converter, and can be used as a direct replacement for
+ cvsps.
+
+ Hg debugcvsps reads the CVS rlog for current directory (or any
+ named directory) in the CVS repository, and converts the log to a
+ series of changesets based on matching commit log entries and
+ dates.'''
+ return cvsps.debugcvsps(ui, *args, **opts)
+
+commands.norepo += " convert debugsvnlog debugcvsps"
+
+cmdtable = {
+ "convert":
+ (convert,
+ [('', 'authors', '',
+ _('username mapping filename (DEPRECATED, use --authormap instead)'),
+ _('FILE')),
+ ('s', 'source-type', '',
+ _('source repository type'), _('TYPE')),
+ ('d', 'dest-type', '',
+ _('destination repository type'), _('TYPE')),
+ ('r', 'rev', '',
+ _('import up to target revision REV'), _('REV')),
+ ('A', 'authormap', '',
+ _('remap usernames using this file'), _('FILE')),
+ ('', 'filemap', '',
+ _('remap file names using contents of file'), _('FILE')),
+ ('', 'splicemap', '',
+ _('splice synthesized history into place'), _('FILE')),
+ ('', 'branchmap', '',
+ _('change branch names while converting'), _('FILE')),
+ ('', 'branchsort', None, _('try to sort changesets by branches')),
+ ('', 'datesort', None, _('try to sort changesets by date')),
+ ('', 'sourcesort', None, _('preserve source changesets order'))],
+ _('hg convert [OPTION]... SOURCE [DEST [REVMAP]]')),
+ "debugsvnlog":
+ (debugsvnlog,
+ [],
+ 'hg debugsvnlog'),
+ "debugcvsps":
+ (debugcvsps,
+ [
+ # Main options shared with cvsps-2.1
+ ('b', 'branches', [], _('only return changes on specified branches')),
+ ('p', 'prefix', '', _('prefix to remove from file names')),
+ ('r', 'revisions', [],
+ _('only return changes after or between specified tags')),
+ ('u', 'update-cache', None, _("update cvs log cache")),
+ ('x', 'new-cache', None, _("create new cvs log cache")),
+ ('z', 'fuzz', 60, _('set commit time fuzz in seconds')),
+ ('', 'root', '', _('specify cvsroot')),
+ # Options specific to builtin cvsps
+ ('', 'parents', '', _('show parent changesets')),
+ ('', 'ancestors', '', _('show current changeset in ancestor branches')),
+ # Options that are ignored for compatibility with cvsps-2.1
+ ('A', 'cvs-direct', None, _('ignored for compatibility')),
+ ],
+ _('hg debugcvsps [OPTION]... [PATH]...')),
+}
diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/__init__.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/__init__.pyo
new file mode 100644
index 0000000..892b438
--- /dev/null
+++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/__init__.pyo
Binary files differ
diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/bzr.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/bzr.py
new file mode 100644
index 0000000..cc16258
--- /dev/null
+++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/bzr.py
@@ -0,0 +1,260 @@
+# bzr.py - bzr support for the convert extension
+#
+# Copyright 2008, 2009 Marek Kubica <marek@xivilization.net> and others
+#
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2 or any later version.
+
+# This module is for handling 'bzr', that was formerly known as Bazaar-NG;
+# it cannot access 'bar' repositories, but they were never used very much
+
+import os
+from mercurial import demandimport
+# these do not work with demandimport, blacklist
+demandimport.ignore.extend([
+ 'bzrlib.transactions',
+ 'bzrlib.urlutils',
+ 'ElementPath',
+ ])
+
+from mercurial.i18n import _
+from mercurial import util
+from common import NoRepo, commit, converter_source
+
+try:
+ # bazaar imports
+ from bzrlib import branch, revision, errors
+ from bzrlib.revisionspec import RevisionSpec
+except ImportError:
+ pass
+
+supportedkinds = ('file', 'symlink')
+
+class bzr_source(converter_source):
+ """Reads Bazaar repositories by using the Bazaar Python libraries"""
+
+ def __init__(self, ui, path, rev=None):
+ super(bzr_source, self).__init__(ui, path, rev=rev)
+
+ if not os.path.exists(os.path.join(path, '.bzr')):
+ raise NoRepo(_('%s does not look like a Bazaar repository')
+ % path)
+
+ try:
+ # access bzrlib stuff
+ branch
+ except NameError:
+ raise NoRepo(_('Bazaar modules could not be loaded'))
+
+ path = os.path.abspath(path)
+ self._checkrepotype(path)
+ self.branch = branch.Branch.open(path)
+ self.sourcerepo = self.branch.repository
+ self._parentids = {}
+
+ def _checkrepotype(self, path):
+ # Lightweight checkouts detection is informational but probably
+ # fragile at API level. It should not terminate the conversion.
+ try:
+ from bzrlib import bzrdir
+ dir = bzrdir.BzrDir.open_containing(path)[0]
+ try:
+ tree = dir.open_workingtree(recommend_upgrade=False)
+ branch = tree.branch
+ except (errors.NoWorkingTree, errors.NotLocalUrl):
+ tree = None
+ branch = dir.open_branch()
+ if (tree is not None and tree.bzrdir.root_transport.base !=
+ branch.bzrdir.root_transport.base):
+ self.ui.warn(_('warning: lightweight checkouts may cause '
+ 'conversion failures, try with a regular '
+ 'branch instead.\n'))
+ except:
+ self.ui.note(_('bzr source type could not be determined\n'))
+
+ def before(self):
+ """Before the conversion begins, acquire a read lock
+ for all the operations that might need it. Fortunately
+ read locks don't block other reads or writes to the
+ repository, so this shouldn't have any impact on the usage of
+ the source repository.
+
+ The alternative would be locking on every operation that
+ needs locks (there are currently two: getting the file and
+ getting the parent map) and releasing immediately after,
+ but this approach can take even 40% longer."""
+ self.sourcerepo.lock_read()
+
+ def after(self):
+ self.sourcerepo.unlock()
+
+ def getheads(self):
+ if not self.rev:
+ return [self.branch.last_revision()]
+ try:
+ r = RevisionSpec.from_string(self.rev)
+ info = r.in_history(self.branch)
+ except errors.BzrError:
+ raise util.Abort(_('%s is not a valid revision in current branch')
+ % self.rev)
+ return [info.rev_id]
+
+ def getfile(self, name, rev):
+ revtree = self.sourcerepo.revision_tree(rev)
+ fileid = revtree.path2id(name.decode(self.encoding or 'utf-8'))
+ kind = None
+ if fileid is not None:
+ kind = revtree.kind(fileid)
+ if kind not in supportedkinds:
+ # the file is not available anymore - was deleted
+ raise IOError(_('%s is not available in %s anymore') %
+ (name, rev))
+ mode = self._modecache[(name, rev)]
+ if kind == 'symlink':
+ target = revtree.get_symlink_target(fileid)
+ if target is None:
+ raise util.Abort(_('%s.%s symlink has no target')
+ % (name, rev))
+ return target, mode
+ else:
+ sio = revtree.get_file(fileid)
+ return sio.read(), mode
+
+ def getchanges(self, version):
+ # set up caches: modecache and revtree
+ self._modecache = {}
+ self._revtree = self.sourcerepo.revision_tree(version)
+ # get the parentids from the cache
+ parentids = self._parentids.pop(version)
+ # only diff against first parent id
+ prevtree = self.sourcerepo.revision_tree(parentids[0])
+ return self._gettreechanges(self._revtree, prevtree)
+
+ def getcommit(self, version):
+ rev = self.sourcerepo.get_revision(version)
+ # populate parent id cache
+ if not rev.parent_ids:
+ parents = []
+ self._parentids[version] = (revision.NULL_REVISION,)
+ else:
+ parents = self._filterghosts(rev.parent_ids)
+ self._parentids[version] = parents
+
+ return commit(parents=parents,
+ date='%d %d' % (rev.timestamp, -rev.timezone),
+ author=self.recode(rev.committer),
+ # bzr returns bytestrings or unicode, depending on the content
+ desc=self.recode(rev.message),
+ rev=version)
+
+ def gettags(self):
+ if not self.branch.supports_tags():
+ return {}
+ tagdict = self.branch.tags.get_tag_dict()
+ bytetags = {}
+ for name, rev in tagdict.iteritems():
+ bytetags[self.recode(name)] = rev
+ return bytetags
+
+ def getchangedfiles(self, rev, i):
+ self._modecache = {}
+ curtree = self.sourcerepo.revision_tree(rev)
+ if i is not None:
+ parentid = self._parentids[rev][i]
+ else:
+ # no parent id, get the empty revision
+ parentid = revision.NULL_REVISION
+
+ prevtree = self.sourcerepo.revision_tree(parentid)
+ changes = [e[0] for e in self._gettreechanges(curtree, prevtree)[0]]
+ return changes
+
+ def _gettreechanges(self, current, origin):
+ revid = current._revision_id
+ changes = []
+ renames = {}
+ for (fileid, paths, changed_content, versioned, parent, name,
+ kind, executable) in current.iter_changes(origin):
+
+ if paths[0] == u'' or paths[1] == u'':
+ # ignore changes to tree root
+ continue
+
+ # bazaar tracks directories, mercurial does not, so
+ # we have to rename the directory contents
+ if kind[1] == 'directory':
+ if kind[0] not in (None, 'directory'):
+ # Replacing 'something' with a directory, record it
+ # so it can be removed.
+ changes.append((self.recode(paths[0]), revid))
+
+ if None not in paths and paths[0] != paths[1]:
+ # neither an add nor an delete - a move
+ # rename all directory contents manually
+ subdir = origin.inventory.path2id(paths[0])
+ # get all child-entries of the directory
+ for name, entry in origin.inventory.iter_entries(subdir):
+ # hg does not track directory renames
+ if entry.kind == 'directory':
+ continue
+ frompath = self.recode(paths[0] + '/' + name)
+ topath = self.recode(paths[1] + '/' + name)
+ # register the files as changed
+ changes.append((frompath, revid))
+ changes.append((topath, revid))
+ # add to mode cache
+ mode = ((entry.executable and 'x')
+ or (entry.kind == 'symlink' and 's')
+ or '')
+ self._modecache[(topath, revid)] = mode
+ # register the change as move
+ renames[topath] = frompath
+
+ # no futher changes, go to the next change
+ continue
+
+ # we got unicode paths, need to convert them
+ path, topath = [self.recode(part) for part in paths]
+
+ if topath is None:
+ # file deleted
+ changes.append((path, revid))
+ continue
+
+ # renamed
+ if path and path != topath:
+ renames[topath] = path
+ changes.append((path, revid))
+
+ # populate the mode cache
+ kind, executable = [e[1] for e in (kind, executable)]
+ mode = ((executable and 'x') or (kind == 'symlink' and 'l')
+ or '')
+ self._modecache[(topath, revid)] = mode
+ changes.append((topath, revid))
+
+ return changes, renames
+
+ def _filterghosts(self, ids):
+ """Filters out ghost revisions which hg does not support, see
+ <http://bazaar-vcs.org/GhostRevision>
+ """
+ parentmap = self.sourcerepo.get_parent_map(ids)
+ parents = tuple([parent for parent in ids if parent in parentmap])
+ return parents
+
+ def recode(self, s, encoding=None):
+ """This version of recode tries to encode unicode to bytecode,
+ and preferably using the UTF-8 codec.
+ Other types than Unicode are silently returned, this is by
+ intention, e.g. the None-type is not going to be encoded but instead
+ just passed through
+ """
+ if not encoding:
+ encoding = self.encoding or 'utf-8'
+
+ if isinstance(s, unicode):
+ return s.encode(encoding)
+ else:
+ # leave it alone
+ return s
diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/bzr.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/bzr.pyo
new file mode 100644
index 0000000..ab47e99
--- /dev/null
+++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/bzr.pyo
Binary files differ
diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/common.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/common.py
new file mode 100644
index 0000000..fb3865f
--- /dev/null
+++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/common.py
@@ -0,0 +1,389 @@
+# common.py - common code for the convert extension
+#
+# Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
+#
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2 or any later version.
+
+import base64, errno
+import os
+import cPickle as pickle
+from mercurial import util
+from mercurial.i18n import _
+
+def encodeargs(args):
+ def encodearg(s):
+ lines = base64.encodestring(s)
+ lines = [l.splitlines()[0] for l in lines]
+ return ''.join(lines)
+
+ s = pickle.dumps(args)
+ return encodearg(s)
+
+def decodeargs(s):
+ s = base64.decodestring(s)
+ return pickle.loads(s)
+
+class MissingTool(Exception):
+ pass
+
+def checktool(exe, name=None, abort=True):
+ name = name or exe
+ if not util.find_exe(exe):
+ exc = abort and util.Abort or MissingTool
+ raise exc(_('cannot find required "%s" tool') % name)
+
+class NoRepo(Exception):
+ pass
+
+SKIPREV = 'SKIP'
+
+class commit(object):
+ def __init__(self, author, date, desc, parents, branch=None, rev=None,
+ extra={}, sortkey=None):
+ self.author = author or 'unknown'
+ self.date = date or '0 0'
+ self.desc = desc
+ self.parents = parents
+ self.branch = branch
+ self.rev = rev
+ self.extra = extra
+ self.sortkey = sortkey
+
+class converter_source(object):
+ """Conversion source interface"""
+
+ def __init__(self, ui, path=None, rev=None):
+ """Initialize conversion source (or raise NoRepo("message")
+ exception if path is not a valid repository)"""
+ self.ui = ui
+ self.path = path
+ self.rev = rev
+
+ self.encoding = 'utf-8'
+
+ def before(self):
+ pass
+
+ def after(self):
+ pass
+
+ def setrevmap(self, revmap):
+ """set the map of already-converted revisions"""
+ pass
+
+ def getheads(self):
+ """Return a list of this repository's heads"""
+ raise NotImplementedError()
+
+ def getfile(self, name, rev):
+ """Return a pair (data, mode) where data is the file content
+ as a string and mode one of '', 'x' or 'l'. rev is the
+ identifier returned by a previous call to getchanges(). Raise
+ IOError to indicate that name was deleted in rev.
+ """
+ raise NotImplementedError()
+
+ def getchanges(self, version):
+ """Returns a tuple of (files, copies).
+
+ files is a sorted list of (filename, id) tuples for all files
+ changed between version and its first parent returned by
+ getcommit(). id is the source revision id of the file.
+
+ copies is a dictionary of dest: source
+ """
+ raise NotImplementedError()
+
+ def getcommit(self, version):
+ """Return the commit object for version"""
+ raise NotImplementedError()
+
+ def gettags(self):
+ """Return the tags as a dictionary of name: revision
+
+ Tag names must be UTF-8 strings.
+ """
+ raise NotImplementedError()
+
+ def recode(self, s, encoding=None):
+ if not encoding:
+ encoding = self.encoding or 'utf-8'
+
+ if isinstance(s, unicode):
+ return s.encode("utf-8")
+ try:
+ return s.decode(encoding).encode("utf-8")
+ except:
+ try:
+ return s.decode("latin-1").encode("utf-8")
+ except:
+ return s.decode(encoding, "replace").encode("utf-8")
+
+ def getchangedfiles(self, rev, i):
+ """Return the files changed by rev compared to parent[i].
+
+ i is an index selecting one of the parents of rev. The return
+ value should be the list of files that are different in rev and
+ this parent.
+
+ If rev has no parents, i is None.
+
+ This function is only needed to support --filemap
+ """
+ raise NotImplementedError()
+
+ def converted(self, rev, sinkrev):
+ '''Notify the source that a revision has been converted.'''
+ pass
+
+ def hasnativeorder(self):
+ """Return true if this source has a meaningful, native revision
+ order. For instance, Mercurial revisions are store sequentially
+ while there is no such global ordering with Darcs.
+ """
+ return False
+
+ def lookuprev(self, rev):
+ """If rev is a meaningful revision reference in source, return
+ the referenced identifier in the same format used by getcommit().
+ return None otherwise.
+ """
+ return None
+
+class converter_sink(object):
+ """Conversion sink (target) interface"""
+
+ def __init__(self, ui, path):
+ """Initialize conversion sink (or raise NoRepo("message")
+ exception if path is not a valid repository)
+
+ created is a list of paths to remove if a fatal error occurs
+ later"""
+ self.ui = ui
+ self.path = path
+ self.created = []
+
+ def getheads(self):
+ """Return a list of this repository's heads"""
+ raise NotImplementedError()
+
+ def revmapfile(self):
+ """Path to a file that will contain lines
+ source_rev_id sink_rev_id
+ mapping equivalent revision identifiers for each system."""
+ raise NotImplementedError()
+
+ def authorfile(self):
+ """Path to a file that will contain lines
+ srcauthor=dstauthor
+ mapping equivalent authors identifiers for each system."""
+ return None
+
+ def putcommit(self, files, copies, parents, commit, source, revmap):
+ """Create a revision with all changed files listed in 'files'
+ and having listed parents. 'commit' is a commit object
+ containing at a minimum the author, date, and message for this
+ changeset. 'files' is a list of (path, version) tuples,
+ 'copies' is a dictionary mapping destinations to sources,
+ 'source' is the source repository, and 'revmap' is a mapfile
+ of source revisions to converted revisions. Only getfile() and
+ lookuprev() should be called on 'source'.
+
+ Note that the sink repository is not told to update itself to
+ a particular revision (or even what that revision would be)
+ before it receives the file data.
+ """
+ raise NotImplementedError()
+
+ def puttags(self, tags):
+ """Put tags into sink.
+
+ tags: {tagname: sink_rev_id, ...} where tagname is an UTF-8 string.
+ Return a pair (tag_revision, tag_parent_revision), or (None, None)
+ if nothing was changed.
+ """
+ raise NotImplementedError()
+
+ def setbranch(self, branch, pbranches):
+ """Set the current branch name. Called before the first putcommit
+ on the branch.
+ branch: branch name for subsequent commits
+ pbranches: (converted parent revision, parent branch) tuples"""
+ pass
+
+ def setfilemapmode(self, active):
+ """Tell the destination that we're using a filemap
+
+ Some converter_sources (svn in particular) can claim that a file
+ was changed in a revision, even if there was no change. This method
+ tells the destination that we're using a filemap and that it should
+ filter empty revisions.
+ """
+ pass
+
+ def before(self):
+ pass
+
+ def after(self):
+ pass
+
+
+class commandline(object):
+ def __init__(self, ui, command):
+ self.ui = ui
+ self.command = command
+
+ def prerun(self):
+ pass
+
+ def postrun(self):
+ pass
+
+ def _cmdline(self, cmd, *args, **kwargs):
+ cmdline = [self.command, cmd] + list(args)
+ for k, v in kwargs.iteritems():
+ if len(k) == 1:
+ cmdline.append('-' + k)
+ else:
+ cmdline.append('--' + k.replace('_', '-'))
+ try:
+ if len(k) == 1:
+ cmdline.append('' + v)
+ else:
+ cmdline[-1] += '=' + v
+ except TypeError:
+ pass
+ cmdline = [util.shellquote(arg) for arg in cmdline]
+ if not self.ui.debugflag:
+ cmdline += ['2>', util.nulldev]
+ cmdline += ['<', util.nulldev]
+ cmdline = ' '.join(cmdline)
+ return cmdline
+
+ def _run(self, cmd, *args, **kwargs):
+ cmdline = self._cmdline(cmd, *args, **kwargs)
+ self.ui.debug('running: %s\n' % (cmdline,))
+ self.prerun()
+ try:
+ return util.popen(cmdline)
+ finally:
+ self.postrun()
+
+ def run(self, cmd, *args, **kwargs):
+ fp = self._run(cmd, *args, **kwargs)
+ output = fp.read()
+ self.ui.debug(output)
+ return output, fp.close()
+
+ def runlines(self, cmd, *args, **kwargs):
+ fp = self._run(cmd, *args, **kwargs)
+ output = fp.readlines()
+ self.ui.debug(''.join(output))
+ return output, fp.close()
+
+ def checkexit(self, status, output=''):
+ if status:
+ if output:
+ self.ui.warn(_('%s error:\n') % self.command)
+ self.ui.warn(output)
+ msg = util.explain_exit(status)[0]
+ raise util.Abort('%s %s' % (self.command, msg))
+
+ def run0(self, cmd, *args, **kwargs):
+ output, status = self.run(cmd, *args, **kwargs)
+ self.checkexit(status, output)
+ return output
+
+ def runlines0(self, cmd, *args, **kwargs):
+ output, status = self.runlines(cmd, *args, **kwargs)
+ self.checkexit(status, ''.join(output))
+ return output
+
+ def getargmax(self):
+ if '_argmax' in self.__dict__:
+ return self._argmax
+
+ # POSIX requires at least 4096 bytes for ARG_MAX
+ self._argmax = 4096
+ try:
+ self._argmax = os.sysconf("SC_ARG_MAX")
+ except:
+ pass
+
+ # Windows shells impose their own limits on command line length,
+ # down to 2047 bytes for cmd.exe under Windows NT/2k and 2500 bytes
+ # for older 4nt.exe. See http://support.microsoft.com/kb/830473 for
+ # details about cmd.exe limitations.
+
+ # Since ARG_MAX is for command line _and_ environment, lower our limit
+ # (and make happy Windows shells while doing this).
+
+ self._argmax = self._argmax / 2 - 1
+ return self._argmax
+
+ def limit_arglist(self, arglist, cmd, *args, **kwargs):
+ limit = self.getargmax() - len(self._cmdline(cmd, *args, **kwargs))
+ bytes = 0
+ fl = []
+ for fn in arglist:
+ b = len(fn) + 3
+ if bytes + b < limit or len(fl) == 0:
+ fl.append(fn)
+ bytes += b
+ else:
+ yield fl
+ fl = [fn]
+ bytes = b
+ if fl:
+ yield fl
+
+ def xargs(self, arglist, cmd, *args, **kwargs):
+ for l in self.limit_arglist(arglist, cmd, *args, **kwargs):
+ self.run0(cmd, *(list(args) + l), **kwargs)
+
+class mapfile(dict):
+ def __init__(self, ui, path):
+ super(mapfile, self).__init__()
+ self.ui = ui
+ self.path = path
+ self.fp = None
+ self.order = []
+ self._read()
+
+ def _read(self):
+ if not self.path:
+ return
+ try:
+ fp = open(self.path, 'r')
+ except IOError, err:
+ if err.errno != errno.ENOENT:
+ raise
+ return
+ for i, line in enumerate(fp):
+ try:
+ key, value = line.splitlines()[0].rsplit(' ', 1)
+ except ValueError:
+ raise util.Abort(
+ _('syntax error in %s(%d): key/value pair expected')
+ % (self.path, i + 1))
+ if key not in self:
+ self.order.append(key)
+ super(mapfile, self).__setitem__(key, value)
+ fp.close()
+
+ def __setitem__(self, key, value):
+ if self.fp is None:
+ try:
+ self.fp = open(self.path, 'a')
+ except IOError, err:
+ raise util.Abort(_('could not open map file %r: %s') %
+ (self.path, err.strerror))
+ self.fp.write('%s %s\n' % (key, value))
+ self.fp.flush()
+ super(mapfile, self).__setitem__(key, value)
+
+ def close(self):
+ if self.fp:
+ self.fp.close()
+ self.fp = None
diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/common.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/common.pyo
new file mode 100644
index 0000000..de20000
--- /dev/null
+++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/common.pyo
Binary files differ
diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/convcmd.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/convcmd.py
new file mode 100644
index 0000000..ac91b41
--- /dev/null
+++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/convcmd.py
@@ -0,0 +1,434 @@
+# convcmd - convert extension commands definition
+#
+# Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
+#
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2 or any later version.
+
+from common import NoRepo, MissingTool, SKIPREV, mapfile
+from cvs import convert_cvs
+from darcs import darcs_source
+from git import convert_git
+from hg import mercurial_source, mercurial_sink
+from subversion import svn_source, svn_sink
+from monotone import monotone_source
+from gnuarch import gnuarch_source
+from bzr import bzr_source
+from p4 import p4_source
+import filemap
+
+import os, shutil
+from mercurial import hg, util, encoding
+from mercurial.i18n import _
+
+orig_encoding = 'ascii'
+
+def recode(s):
+ if isinstance(s, unicode):
+ return s.encode(orig_encoding, 'replace')
+ else:
+ return s.decode('utf-8').encode(orig_encoding, 'replace')
+
+source_converters = [
+ ('cvs', convert_cvs, 'branchsort'),
+ ('git', convert_git, 'branchsort'),
+ ('svn', svn_source, 'branchsort'),
+ ('hg', mercurial_source, 'sourcesort'),
+ ('darcs', darcs_source, 'branchsort'),
+ ('mtn', monotone_source, 'branchsort'),
+ ('gnuarch', gnuarch_source, 'branchsort'),
+ ('bzr', bzr_source, 'branchsort'),
+ ('p4', p4_source, 'branchsort'),
+ ]
+
+sink_converters = [
+ ('hg', mercurial_sink),
+ ('svn', svn_sink),
+ ]
+
+def convertsource(ui, path, type, rev):
+ exceptions = []
+ if type and type not in [s[0] for s in source_converters]:
+ raise util.Abort(_('%s: invalid source repository type') % type)
+ for name, source, sortmode in source_converters:
+ try:
+ if not type or name == type:
+ return source(ui, path, rev), sortmode
+ except (NoRepo, MissingTool), inst:
+ exceptions.append(inst)
+ if not ui.quiet:
+ for inst in exceptions:
+ ui.write("%s\n" % inst)
+ raise util.Abort(_('%s: missing or unsupported repository') % path)
+
+def convertsink(ui, path, type):
+ if type and type not in [s[0] for s in sink_converters]:
+ raise util.Abort(_('%s: invalid destination repository type') % type)
+ for name, sink in sink_converters:
+ try:
+ if not type or name == type:
+ return sink(ui, path)
+ except NoRepo, inst:
+ ui.note(_("convert: %s\n") % inst)
+ raise util.Abort(_('%s: unknown repository type') % path)
+
+class progresssource(object):
+ def __init__(self, ui, source, filecount):
+ self.ui = ui
+ self.source = source
+ self.filecount = filecount
+ self.retrieved = 0
+
+ def getfile(self, file, rev):
+ self.retrieved += 1
+ self.ui.progress(_('getting files'), self.retrieved,
+ item=file, total=self.filecount)
+ return self.source.getfile(file, rev)
+
+ def lookuprev(self, rev):
+ return self.source.lookuprev(rev)
+
+ def close(self):
+ self.ui.progress(_('getting files'), None)
+
+class converter(object):
+ def __init__(self, ui, source, dest, revmapfile, opts):
+
+ self.source = source
+ self.dest = dest
+ self.ui = ui
+ self.opts = opts
+ self.commitcache = {}
+ self.authors = {}
+ self.authorfile = None
+
+ # Record converted revisions persistently: maps source revision
+ # ID to target revision ID (both strings). (This is how
+ # incremental conversions work.)
+ self.map = mapfile(ui, revmapfile)
+
+ # Read first the dst author map if any
+ authorfile = self.dest.authorfile()
+ if authorfile and os.path.exists(authorfile):
+ self.readauthormap(authorfile)
+ # Extend/Override with new author map if necessary
+ if opts.get('authormap'):
+ self.readauthormap(opts.get('authormap'))
+ self.authorfile = self.dest.authorfile()
+
+ self.splicemap = mapfile(ui, opts.get('splicemap'))
+ self.branchmap = mapfile(ui, opts.get('branchmap'))
+
+ def walktree(self, heads):
+ '''Return a mapping that identifies the uncommitted parents of every
+ uncommitted changeset.'''
+ visit = heads
+ known = set()
+ parents = {}
+ while visit:
+ n = visit.pop(0)
+ if n in known or n in self.map:
+ continue
+ known.add(n)
+ self.ui.progress(_('scanning'), len(known), unit=_('revisions'))
+ commit = self.cachecommit(n)
+ parents[n] = []
+ for p in commit.parents:
+ parents[n].append(p)
+ visit.append(p)
+ self.ui.progress(_('scanning'), None)
+
+ return parents
+
+ def toposort(self, parents, sortmode):
+ '''Return an ordering such that every uncommitted changeset is
+ preceeded by all its uncommitted ancestors.'''
+
+ def mapchildren(parents):
+ """Return a (children, roots) tuple where 'children' maps parent
+ revision identifiers to children ones, and 'roots' is the list of
+ revisions without parents. 'parents' must be a mapping of revision
+ identifier to its parents ones.
+ """
+ visit = parents.keys()
+ seen = set()
+ children = {}
+ roots = []
+
+ while visit:
+ n = visit.pop(0)
+ if n in seen:
+ continue
+ seen.add(n)
+ # Ensure that nodes without parents are present in the
+ # 'children' mapping.
+ children.setdefault(n, [])
+ hasparent = False
+ for p in parents[n]:
+ if not p in self.map:
+ visit.append(p)
+ hasparent = True
+ children.setdefault(p, []).append(n)
+ if not hasparent:
+ roots.append(n)
+
+ return children, roots
+
+ # Sort functions are supposed to take a list of revisions which
+ # can be converted immediately and pick one
+
+ def makebranchsorter():
+ """If the previously converted revision has a child in the
+ eligible revisions list, pick it. Return the list head
+ otherwise. Branch sort attempts to minimize branch
+ switching, which is harmful for Mercurial backend
+ compression.
+ """
+ prev = [None]
+ def picknext(nodes):
+ next = nodes[0]
+ for n in nodes:
+ if prev[0] in parents[n]:
+ next = n
+ break
+ prev[0] = next
+ return next
+ return picknext
+
+ def makesourcesorter():
+ """Source specific sort."""
+ keyfn = lambda n: self.commitcache[n].sortkey
+ def picknext(nodes):
+ return sorted(nodes, key=keyfn)[0]
+ return picknext
+
+ def makedatesorter():
+ """Sort revisions by date."""
+ dates = {}
+ def getdate(n):
+ if n not in dates:
+ dates[n] = util.parsedate(self.commitcache[n].date)
+ return dates[n]
+
+ def picknext(nodes):
+ return min([(getdate(n), n) for n in nodes])[1]
+
+ return picknext
+
+ if sortmode == 'branchsort':
+ picknext = makebranchsorter()
+ elif sortmode == 'datesort':
+ picknext = makedatesorter()
+ elif sortmode == 'sourcesort':
+ picknext = makesourcesorter()
+ else:
+ raise util.Abort(_('unknown sort mode: %s') % sortmode)
+
+ children, actives = mapchildren(parents)
+
+ s = []
+ pendings = {}
+ while actives:
+ n = picknext(actives)
+ actives.remove(n)
+ s.append(n)
+
+ # Update dependents list
+ for c in children.get(n, []):
+ if c not in pendings:
+ pendings[c] = [p for p in parents[c] if p not in self.map]
+ try:
+ pendings[c].remove(n)
+ except ValueError:
+ raise util.Abort(_('cycle detected between %s and %s')
+ % (recode(c), recode(n)))
+ if not pendings[c]:
+ # Parents are converted, node is eligible
+ actives.insert(0, c)
+ pendings[c] = None
+
+ if len(s) != len(parents):
+ raise util.Abort(_("not all revisions were sorted"))
+
+ return s
+
+ def writeauthormap(self):
+ authorfile = self.authorfile
+ if authorfile:
+ self.ui.status(_('Writing author map file %s\n') % authorfile)
+ ofile = open(authorfile, 'w+')
+ for author in self.authors:
+ ofile.write("%s=%s\n" % (author, self.authors[author]))
+ ofile.close()
+
+ def readauthormap(self, authorfile):
+ afile = open(authorfile, 'r')
+ for line in afile:
+
+ line = line.strip()
+ if not line or line.startswith('#'):
+ continue
+
+ try:
+ srcauthor, dstauthor = line.split('=', 1)
+ except ValueError:
+ msg = _('Ignoring bad line in author map file %s: %s\n')
+ self.ui.warn(msg % (authorfile, line.rstrip()))
+ continue
+
+ srcauthor = srcauthor.strip()
+ dstauthor = dstauthor.strip()
+ if self.authors.get(srcauthor) in (None, dstauthor):
+ msg = _('mapping author %s to %s\n')
+ self.ui.debug(msg % (srcauthor, dstauthor))
+ self.authors[srcauthor] = dstauthor
+ continue
+
+ m = _('overriding mapping for author %s, was %s, will be %s\n')
+ self.ui.status(m % (srcauthor, self.authors[srcauthor], dstauthor))
+
+ afile.close()
+
+ def cachecommit(self, rev):
+ commit = self.source.getcommit(rev)
+ commit.author = self.authors.get(commit.author, commit.author)
+ commit.branch = self.branchmap.get(commit.branch, commit.branch)
+ self.commitcache[rev] = commit
+ return commit
+
+ def copy(self, rev):
+ commit = self.commitcache[rev]
+
+ changes = self.source.getchanges(rev)
+ if isinstance(changes, basestring):
+ if changes == SKIPREV:
+ dest = SKIPREV
+ else:
+ dest = self.map[changes]
+ self.map[rev] = dest
+ return
+ files, copies = changes
+ pbranches = []
+ if commit.parents:
+ for prev in commit.parents:
+ if prev not in self.commitcache:
+ self.cachecommit(prev)
+ pbranches.append((self.map[prev],
+ self.commitcache[prev].branch))
+ self.dest.setbranch(commit.branch, pbranches)
+ try:
+ parents = self.splicemap[rev].replace(',', ' ').split()
+ self.ui.status(_('spliced in %s as parents of %s\n') %
+ (parents, rev))
+ parents = [self.map.get(p, p) for p in parents]
+ except KeyError:
+ parents = [b[0] for b in pbranches]
+ source = progresssource(self.ui, self.source, len(files))
+ newnode = self.dest.putcommit(files, copies, parents, commit,
+ source, self.map)
+ source.close()
+ self.source.converted(rev, newnode)
+ self.map[rev] = newnode
+
+ def convert(self, sortmode):
+ try:
+ self.source.before()
+ self.dest.before()
+ self.source.setrevmap(self.map)
+ self.ui.status(_("scanning source...\n"))
+ heads = self.source.getheads()
+ parents = self.walktree(heads)
+ self.ui.status(_("sorting...\n"))
+ t = self.toposort(parents, sortmode)
+ num = len(t)
+ c = None
+
+ self.ui.status(_("converting...\n"))
+ for i, c in enumerate(t):
+ num -= 1
+ desc = self.commitcache[c].desc
+ if "\n" in desc:
+ desc = desc.splitlines()[0]
+ # convert log message to local encoding without using
+ # tolocal() because the encoding.encoding convert()
+ # uses is 'utf-8'
+ self.ui.status("%d %s\n" % (num, recode(desc)))
+ self.ui.note(_("source: %s\n") % recode(c))
+ self.ui.progress(_('converting'), i, unit=_('revisions'),
+ total=len(t))
+ self.copy(c)
+ self.ui.progress(_('converting'), None)
+
+ tags = self.source.gettags()
+ ctags = {}
+ for k in tags:
+ v = tags[k]
+ if self.map.get(v, SKIPREV) != SKIPREV:
+ ctags[k] = self.map[v]
+
+ if c and ctags:
+ nrev, tagsparent = self.dest.puttags(ctags)
+ if nrev and tagsparent:
+ # write another hash correspondence to override the previous
+ # one so we don't end up with extra tag heads
+ tagsparents = [e for e in self.map.iteritems()
+ if e[1] == tagsparent]
+ if tagsparents:
+ self.map[tagsparents[0][0]] = nrev
+
+ self.writeauthormap()
+ finally:
+ self.cleanup()
+
+ def cleanup(self):
+ try:
+ self.dest.after()
+ finally:
+ self.source.after()
+ self.map.close()
+
+def convert(ui, src, dest=None, revmapfile=None, **opts):
+ global orig_encoding
+ orig_encoding = encoding.encoding
+ encoding.encoding = 'UTF-8'
+
+ # support --authors as an alias for --authormap
+ if not opts.get('authormap'):
+ opts['authormap'] = opts.get('authors')
+
+ if not dest:
+ dest = hg.defaultdest(src) + "-hg"
+ ui.status(_("assuming destination %s\n") % dest)
+
+ destc = convertsink(ui, dest, opts.get('dest_type'))
+
+ try:
+ srcc, defaultsort = convertsource(ui, src, opts.get('source_type'),
+ opts.get('rev'))
+ except Exception:
+ for path in destc.created:
+ shutil.rmtree(path, True)
+ raise
+
+ sortmodes = ('branchsort', 'datesort', 'sourcesort')
+ sortmode = [m for m in sortmodes if opts.get(m)]
+ if len(sortmode) > 1:
+ raise util.Abort(_('more than one sort mode specified'))
+ sortmode = sortmode and sortmode[0] or defaultsort
+ if sortmode == 'sourcesort' and not srcc.hasnativeorder():
+ raise util.Abort(_('--sourcesort is not supported by this data source'))
+
+ fmap = opts.get('filemap')
+ if fmap:
+ srcc = filemap.filemap_source(ui, srcc, fmap)
+ destc.setfilemapmode(True)
+
+ if not revmapfile:
+ try:
+ revmapfile = destc.revmapfile()
+ except:
+ revmapfile = os.path.join(destc, "map")
+
+ c = converter(ui, srcc, destc, revmapfile, opts)
+ c.convert(sortmode)
+
diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/convcmd.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/convcmd.pyo
new file mode 100644
index 0000000..15f040a
--- /dev/null
+++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/convcmd.pyo
Binary files differ
diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/cvs.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/cvs.py
new file mode 100644
index 0000000..501fae2
--- /dev/null
+++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/cvs.py
@@ -0,0 +1,271 @@
+# cvs.py: CVS conversion code inspired by hg-cvs-import and git-cvsimport
+#
+# Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
+#
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2 or any later version.
+
+import os, re, socket, errno
+from cStringIO import StringIO
+from mercurial import encoding, util
+from mercurial.i18n import _
+
+from common import NoRepo, commit, converter_source, checktool
+import cvsps
+
+class convert_cvs(converter_source):
+ def __init__(self, ui, path, rev=None):
+ super(convert_cvs, self).__init__(ui, path, rev=rev)
+
+ cvs = os.path.join(path, "CVS")
+ if not os.path.exists(cvs):
+ raise NoRepo(_("%s does not look like a CVS checkout") % path)
+
+ checktool('cvs')
+
+ self.changeset = None
+ self.files = {}
+ self.tags = {}
+ self.lastbranch = {}
+ self.socket = None
+ self.cvsroot = open(os.path.join(cvs, "Root")).read()[:-1]
+ self.cvsrepo = open(os.path.join(cvs, "Repository")).read()[:-1]
+ self.encoding = encoding.encoding
+
+ self._connect()
+
+ def _parse(self):
+ if self.changeset is not None:
+ return
+ self.changeset = {}
+
+ maxrev = 0
+ if self.rev:
+ # TODO: handle tags
+ try:
+ # patchset number?
+ maxrev = int(self.rev)
+ except ValueError:
+ raise util.Abort(_('revision %s is not a patchset number')
+ % self.rev)
+
+ d = os.getcwd()
+ try:
+ os.chdir(self.path)
+ id = None
+
+ cache = 'update'
+ if not self.ui.configbool('convert', 'cvsps.cache', True):
+ cache = None
+ db = cvsps.createlog(self.ui, cache=cache)
+ db = cvsps.createchangeset(self.ui, db,
+ fuzz=int(self.ui.config('convert', 'cvsps.fuzz', 60)),
+ mergeto=self.ui.config('convert', 'cvsps.mergeto', None),
+ mergefrom=self.ui.config('convert', 'cvsps.mergefrom', None))
+
+ for cs in db:
+ if maxrev and cs.id > maxrev:
+ break
+ id = str(cs.id)
+ cs.author = self.recode(cs.author)
+ self.lastbranch[cs.branch] = id
+ cs.comment = self.recode(cs.comment)
+ date = util.datestr(cs.date)
+ self.tags.update(dict.fromkeys(cs.tags, id))
+
+ files = {}
+ for f in cs.entries:
+ files[f.file] = "%s%s" % ('.'.join([str(x)
+ for x in f.revision]),
+ ['', '(DEAD)'][f.dead])
+
+ # add current commit to set
+ c = commit(author=cs.author, date=date,
+ parents=[str(p.id) for p in cs.parents],
+ desc=cs.comment, branch=cs.branch or '')
+ self.changeset[id] = c
+ self.files[id] = files
+
+ self.heads = self.lastbranch.values()
+ finally:
+ os.chdir(d)
+
+ def _connect(self):
+ root = self.cvsroot
+ conntype = None
+ user, host = None, None
+ cmd = ['cvs', 'server']
+
+ self.ui.status(_("connecting to %s\n") % root)
+
+ if root.startswith(":pserver:"):
+ root = root[9:]
+ m = re.match(r'(?:(.*?)(?::(.*?))?@)?([^:\/]*)(?::(\d*))?(.*)',
+ root)
+ if m:
+ conntype = "pserver"
+ user, passw, serv, port, root = m.groups()
+ if not user:
+ user = "anonymous"
+ if not port:
+ port = 2401
+ else:
+ port = int(port)
+ format0 = ":pserver:%s@%s:%s" % (user, serv, root)
+ format1 = ":pserver:%s@%s:%d%s" % (user, serv, port, root)
+
+ if not passw:
+ passw = "A"
+ cvspass = os.path.expanduser("~/.cvspass")
+ try:
+ pf = open(cvspass)
+ for line in pf.read().splitlines():
+ part1, part2 = line.split(' ', 1)
+ if part1 == '/1':
+ # /1 :pserver:user@example.com:2401/cvsroot/foo Ah<Z
+ part1, part2 = part2.split(' ', 1)
+ format = format1
+ else:
+ # :pserver:user@example.com:/cvsroot/foo Ah<Z
+ format = format0
+ if part1 == format:
+ passw = part2
+ break
+ pf.close()
+ except IOError, inst:
+ if inst.errno != errno.ENOENT:
+ if not getattr(inst, 'filename', None):
+ inst.filename = cvspass
+ raise
+
+ sck = socket.socket()
+ sck.connect((serv, port))
+ sck.send("\n".join(["BEGIN AUTH REQUEST", root, user, passw,
+ "END AUTH REQUEST", ""]))
+ if sck.recv(128) != "I LOVE YOU\n":
+ raise util.Abort(_("CVS pserver authentication failed"))
+
+ self.writep = self.readp = sck.makefile('r+')
+
+ if not conntype and root.startswith(":local:"):
+ conntype = "local"
+ root = root[7:]
+
+ if not conntype:
+ # :ext:user@host/home/user/path/to/cvsroot
+ if root.startswith(":ext:"):
+ root = root[5:]
+ m = re.match(r'(?:([^@:/]+)@)?([^:/]+):?(.*)', root)
+ # Do not take Windows path "c:\foo\bar" for a connection strings
+ if os.path.isdir(root) or not m:
+ conntype = "local"
+ else:
+ conntype = "rsh"
+ user, host, root = m.group(1), m.group(2), m.group(3)
+
+ if conntype != "pserver":
+ if conntype == "rsh":
+ rsh = os.environ.get("CVS_RSH") or "ssh"
+ if user:
+ cmd = [rsh, '-l', user, host] + cmd
+ else:
+ cmd = [rsh, host] + cmd
+
+ # popen2 does not support argument lists under Windows
+ cmd = [util.shellquote(arg) for arg in cmd]
+ cmd = util.quotecommand(' '.join(cmd))
+ self.writep, self.readp = util.popen2(cmd)
+
+ self.realroot = root
+
+ self.writep.write("Root %s\n" % root)
+ self.writep.write("Valid-responses ok error Valid-requests Mode"
+ " M Mbinary E Checked-in Created Updated"
+ " Merged Removed\n")
+ self.writep.write("valid-requests\n")
+ self.writep.flush()
+ r = self.readp.readline()
+ if not r.startswith("Valid-requests"):
+ raise util.Abort(_('unexpected response from CVS server '
+ '(expected "Valid-requests", but got %r)')
+ % r)
+ if "UseUnchanged" in r:
+ self.writep.write("UseUnchanged\n")
+ self.writep.flush()
+ r = self.readp.readline()
+
+ def getheads(self):
+ self._parse()
+ return self.heads
+
+ def getfile(self, name, rev):
+
+ def chunkedread(fp, count):
+ # file-objects returned by socked.makefile() do not handle
+ # large read() requests very well.
+ chunksize = 65536
+ output = StringIO()
+ while count > 0:
+ data = fp.read(min(count, chunksize))
+ if not data:
+ raise util.Abort(_("%d bytes missing from remote file")
+ % count)
+ count -= len(data)
+ output.write(data)
+ return output.getvalue()
+
+ self._parse()
+ if rev.endswith("(DEAD)"):
+ raise IOError
+
+ args = ("-N -P -kk -r %s --" % rev).split()
+ args.append(self.cvsrepo + '/' + name)
+ for x in args:
+ self.writep.write("Argument %s\n" % x)
+ self.writep.write("Directory .\n%s\nco\n" % self.realroot)
+ self.writep.flush()
+
+ data = ""
+ mode = None
+ while 1:
+ line = self.readp.readline()
+ if line.startswith("Created ") or line.startswith("Updated "):
+ self.readp.readline() # path
+ self.readp.readline() # entries
+ mode = self.readp.readline()[:-1]
+ count = int(self.readp.readline()[:-1])
+ data = chunkedread(self.readp, count)
+ elif line.startswith(" "):
+ data += line[1:]
+ elif line.startswith("M "):
+ pass
+ elif line.startswith("Mbinary "):
+ count = int(self.readp.readline()[:-1])
+ data = chunkedread(self.readp, count)
+ else:
+ if line == "ok\n":
+ if mode is None:
+ raise util.Abort(_('malformed response from CVS'))
+ return (data, "x" in mode and "x" or "")
+ elif line.startswith("E "):
+ self.ui.warn(_("cvs server: %s\n") % line[2:])
+ elif line.startswith("Remove"):
+ self.readp.readline()
+ else:
+ raise util.Abort(_("unknown CVS response: %s") % line)
+
+ def getchanges(self, rev):
+ self._parse()
+ return sorted(self.files[rev].iteritems()), {}
+
+ def getcommit(self, rev):
+ self._parse()
+ return self.changeset[rev]
+
+ def gettags(self):
+ self._parse()
+ return self.tags
+
+ def getchangedfiles(self, rev, i):
+ self._parse()
+ return sorted(self.files[rev])
diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/cvs.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/cvs.pyo
new file mode 100644
index 0000000..d73fe3f
--- /dev/null
+++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/cvs.pyo
Binary files differ
diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/cvsps.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/cvsps.py
new file mode 100644
index 0000000..1519d41
--- /dev/null
+++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/cvsps.py
@@ -0,0 +1,847 @@
+# Mercurial built-in replacement for cvsps.
+#
+# Copyright 2008, Frank Kingswood <frank@kingswood-consulting.co.uk>
+#
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2 or any later version.
+
+import os
+import re
+import cPickle as pickle
+from mercurial import util
+from mercurial.i18n import _
+from mercurial import hook
+
+class logentry(object):
+ '''Class logentry has the following attributes:
+ .author - author name as CVS knows it
+ .branch - name of branch this revision is on
+ .branches - revision tuple of branches starting at this revision
+ .comment - commit message
+ .date - the commit date as a (time, tz) tuple
+ .dead - true if file revision is dead
+ .file - Name of file
+ .lines - a tuple (+lines, -lines) or None
+ .parent - Previous revision of this entry
+ .rcs - name of file as returned from CVS
+ .revision - revision number as tuple
+ .tags - list of tags on the file
+ .synthetic - is this a synthetic "file ... added on ..." revision?
+ .mergepoint- the branch that has been merged from
+ (if present in rlog output)
+ .branchpoints- the branches that start at the current entry
+ '''
+ def __init__(self, **entries):
+ self.synthetic = False
+ self.__dict__.update(entries)
+
+ def __repr__(self):
+ return "<%s at 0x%x: %s %s>" % (self.__class__.__name__,
+ id(self),
+ self.file,
+ ".".join(map(str, self.revision)))
+
+class logerror(Exception):
+ pass
+
+def getrepopath(cvspath):
+ """Return the repository path from a CVS path.
+
+ >>> getrepopath('/foo/bar')
+ '/foo/bar'
+ >>> getrepopath('c:/foo/bar')
+ 'c:/foo/bar'
+ >>> getrepopath(':pserver:10/foo/bar')
+ '/foo/bar'
+ >>> getrepopath(':pserver:10c:/foo/bar')
+ '/foo/bar'
+ >>> getrepopath(':pserver:/foo/bar')
+ '/foo/bar'
+ >>> getrepopath(':pserver:c:/foo/bar')
+ 'c:/foo/bar'
+ >>> getrepopath(':pserver:truc@foo.bar:/foo/bar')
+ '/foo/bar'
+ >>> getrepopath(':pserver:truc@foo.bar:c:/foo/bar')
+ 'c:/foo/bar'
+ """
+ # According to CVS manual, CVS paths are expressed like:
+ # [:method:][[user][:password]@]hostname[:[port]]/path/to/repository
+ #
+ # Unfortunately, Windows absolute paths start with a drive letter
+ # like 'c:' making it harder to parse. Here we assume that drive
+ # letters are only one character long and any CVS component before
+ # the repository path is at least 2 characters long, and use this
+ # to disambiguate.
+ parts = cvspath.split(':')
+ if len(parts) == 1:
+ return parts[0]
+ # Here there is an ambiguous case if we have a port number
+ # immediately followed by a Windows driver letter. We assume this
+ # never happens and decide it must be CVS path component,
+ # therefore ignoring it.
+ if len(parts[-2]) > 1:
+ return parts[-1].lstrip('0123456789')
+ return parts[-2] + ':' + parts[-1]
+
+def createlog(ui, directory=None, root="", rlog=True, cache=None):
+ '''Collect the CVS rlog'''
+
+ # Because we store many duplicate commit log messages, reusing strings
+ # saves a lot of memory and pickle storage space.
+ _scache = {}
+ def scache(s):
+ "return a shared version of a string"
+ return _scache.setdefault(s, s)
+
+ ui.status(_('collecting CVS rlog\n'))
+
+ log = [] # list of logentry objects containing the CVS state
+
+ # patterns to match in CVS (r)log output, by state of use
+ re_00 = re.compile('RCS file: (.+)$')
+ re_01 = re.compile('cvs \\[r?log aborted\\]: (.+)$')
+ re_02 = re.compile('cvs (r?log|server): (.+)\n$')
+ re_03 = re.compile("(Cannot access.+CVSROOT)|"
+ "(can't create temporary directory.+)$")
+ re_10 = re.compile('Working file: (.+)$')
+ re_20 = re.compile('symbolic names:')
+ re_30 = re.compile('\t(.+): ([\\d.]+)$')
+ re_31 = re.compile('----------------------------$')
+ re_32 = re.compile('======================================='
+ '======================================$')
+ re_50 = re.compile('revision ([\\d.]+)(\s+locked by:\s+.+;)?$')
+ re_60 = re.compile(r'date:\s+(.+);\s+author:\s+(.+);\s+state:\s+(.+?);'
+ r'(\s+lines:\s+(\+\d+)?\s+(-\d+)?;)?'
+ r'(.*mergepoint:\s+([^;]+);)?')
+ re_70 = re.compile('branches: (.+);$')
+
+ file_added_re = re.compile(r'file [^/]+ was (initially )?added on branch')
+
+ prefix = '' # leading path to strip of what we get from CVS
+
+ if directory is None:
+ # Current working directory
+
+ # Get the real directory in the repository
+ try:
+ prefix = open(os.path.join('CVS','Repository')).read().strip()
+ directory = prefix
+ if prefix == ".":
+ prefix = ""
+ except IOError:
+ raise logerror(_('not a CVS sandbox'))
+
+ if prefix and not prefix.endswith(os.sep):
+ prefix += os.sep
+
+ # Use the Root file in the sandbox, if it exists
+ try:
+ root = open(os.path.join('CVS','Root')).read().strip()
+ except IOError:
+ pass
+
+ if not root:
+ root = os.environ.get('CVSROOT', '')
+
+ # read log cache if one exists
+ oldlog = []
+ date = None
+
+ if cache:
+ cachedir = os.path.expanduser('~/.hg.cvsps')
+ if not os.path.exists(cachedir):
+ os.mkdir(cachedir)
+
+ # The cvsps cache pickle needs a uniquified name, based on the
+ # repository location. The address may have all sort of nasties
+ # in it, slashes, colons and such. So here we take just the
+ # alphanumerics, concatenated in a way that does not mix up the
+ # various components, so that
+ # :pserver:user@server:/path
+ # and
+ # /pserver/user/server/path
+ # are mapped to different cache file names.
+ cachefile = root.split(":") + [directory, "cache"]
+ cachefile = ['-'.join(re.findall(r'\w+', s)) for s in cachefile if s]
+ cachefile = os.path.join(cachedir,
+ '.'.join([s for s in cachefile if s]))
+
+ if cache == 'update':
+ try:
+ ui.note(_('reading cvs log cache %s\n') % cachefile)
+ oldlog = pickle.load(open(cachefile))
+ ui.note(_('cache has %d log entries\n') % len(oldlog))
+ except Exception, e:
+ ui.note(_('error reading cache: %r\n') % e)
+
+ if oldlog:
+ date = oldlog[-1].date # last commit date as a (time,tz) tuple
+ date = util.datestr(date, '%Y/%m/%d %H:%M:%S %1%2')
+
+ # build the CVS commandline
+ cmd = ['cvs', '-q']
+ if root:
+ cmd.append('-d%s' % root)
+ p = util.normpath(getrepopath(root))
+ if not p.endswith('/'):
+ p += '/'
+ if prefix:
+ # looks like normpath replaces "" by "."
+ prefix = p + util.normpath(prefix)
+ else:
+ prefix = p
+ cmd.append(['log', 'rlog'][rlog])
+ if date:
+ # no space between option and date string
+ cmd.append('-d>%s' % date)
+ cmd.append(directory)
+
+ # state machine begins here
+ tags = {} # dictionary of revisions on current file with their tags
+ branchmap = {} # mapping between branch names and revision numbers
+ state = 0
+ store = False # set when a new record can be appended
+
+ cmd = [util.shellquote(arg) for arg in cmd]
+ ui.note(_("running %s\n") % (' '.join(cmd)))
+ ui.debug("prefix=%r directory=%r root=%r\n" % (prefix, directory, root))
+
+ pfp = util.popen(' '.join(cmd))
+ peek = pfp.readline()
+ while True:
+ line = peek
+ if line == '':
+ break
+ peek = pfp.readline()
+ if line.endswith('\n'):
+ line = line[:-1]
+ #ui.debug('state=%d line=%r\n' % (state, line))
+
+ if state == 0:
+ # initial state, consume input until we see 'RCS file'
+ match = re_00.match(line)
+ if match:
+ rcs = match.group(1)
+ tags = {}
+ if rlog:
+ filename = util.normpath(rcs[:-2])
+ if filename.startswith(prefix):
+ filename = filename[len(prefix):]
+ if filename.startswith('/'):
+ filename = filename[1:]
+ if filename.startswith('Attic/'):
+ filename = filename[6:]
+ else:
+ filename = filename.replace('/Attic/', '/')
+ state = 2
+ continue
+ state = 1
+ continue
+ match = re_01.match(line)
+ if match:
+ raise logerror(match.group(1))
+ match = re_02.match(line)
+ if match:
+ raise logerror(match.group(2))
+ if re_03.match(line):
+ raise logerror(line)
+
+ elif state == 1:
+ # expect 'Working file' (only when using log instead of rlog)
+ match = re_10.match(line)
+ assert match, _('RCS file must be followed by working file')
+ filename = util.normpath(match.group(1))
+ state = 2
+
+ elif state == 2:
+ # expect 'symbolic names'
+ if re_20.match(line):
+ branchmap = {}
+ state = 3
+
+ elif state == 3:
+ # read the symbolic names and store as tags
+ match = re_30.match(line)
+ if match:
+ rev = [int(x) for x in match.group(2).split('.')]
+
+ # Convert magic branch number to an odd-numbered one
+ revn = len(rev)
+ if revn > 3 and (revn % 2) == 0 and rev[-2] == 0:
+ rev = rev[:-2] + rev[-1:]
+ rev = tuple(rev)
+
+ if rev not in tags:
+ tags[rev] = []
+ tags[rev].append(match.group(1))
+ branchmap[match.group(1)] = match.group(2)
+
+ elif re_31.match(line):
+ state = 5
+ elif re_32.match(line):
+ state = 0
+
+ elif state == 4:
+ # expecting '------' separator before first revision
+ if re_31.match(line):
+ state = 5
+ else:
+ assert not re_32.match(line), _('must have at least '
+ 'some revisions')
+
+ elif state == 5:
+ # expecting revision number and possibly (ignored) lock indication
+ # we create the logentry here from values stored in states 0 to 4,
+ # as this state is re-entered for subsequent revisions of a file.
+ match = re_50.match(line)
+ assert match, _('expected revision number')
+ e = logentry(rcs=scache(rcs), file=scache(filename),
+ revision=tuple([int(x) for x in match.group(1).split('.')]),
+ branches=[], parent=None)
+ state = 6
+
+ elif state == 6:
+ # expecting date, author, state, lines changed
+ match = re_60.match(line)
+ assert match, _('revision must be followed by date line')
+ d = match.group(1)
+ if d[2] == '/':
+ # Y2K
+ d = '19' + d
+
+ if len(d.split()) != 3:
+ # cvs log dates always in GMT
+ d = d + ' UTC'
+ e.date = util.parsedate(d, ['%y/%m/%d %H:%M:%S',
+ '%Y/%m/%d %H:%M:%S',
+ '%Y-%m-%d %H:%M:%S'])
+ e.author = scache(match.group(2))
+ e.dead = match.group(3).lower() == 'dead'
+
+ if match.group(5):
+ if match.group(6):
+ e.lines = (int(match.group(5)), int(match.group(6)))
+ else:
+ e.lines = (int(match.group(5)), 0)
+ elif match.group(6):
+ e.lines = (0, int(match.group(6)))
+ else:
+ e.lines = None
+
+ if match.group(7): # cvsnt mergepoint
+ myrev = match.group(8).split('.')
+ if len(myrev) == 2: # head
+ e.mergepoint = 'HEAD'
+ else:
+ myrev = '.'.join(myrev[:-2] + ['0', myrev[-2]])
+ branches = [b for b in branchmap if branchmap[b] == myrev]
+ assert len(branches) == 1, 'unknown branch: %s' % e.mergepoint
+ e.mergepoint = branches[0]
+ else:
+ e.mergepoint = None
+ e.comment = []
+ state = 7
+
+ elif state == 7:
+ # read the revision numbers of branches that start at this revision
+ # or store the commit log message otherwise
+ m = re_70.match(line)
+ if m:
+ e.branches = [tuple([int(y) for y in x.strip().split('.')])
+ for x in m.group(1).split(';')]
+ state = 8
+ elif re_31.match(line) and re_50.match(peek):
+ state = 5
+ store = True
+ elif re_32.match(line):
+ state = 0
+ store = True
+ else:
+ e.comment.append(line)
+
+ elif state == 8:
+ # store commit log message
+ if re_31.match(line):
+ state = 5
+ store = True
+ elif re_32.match(line):
+ state = 0
+ store = True
+ else:
+ e.comment.append(line)
+
+ # When a file is added on a branch B1, CVS creates a synthetic
+ # dead trunk revision 1.1 so that the branch has a root.
+ # Likewise, if you merge such a file to a later branch B2 (one
+ # that already existed when the file was added on B1), CVS
+ # creates a synthetic dead revision 1.1.x.1 on B2. Don't drop
+ # these revisions now, but mark them synthetic so
+ # createchangeset() can take care of them.
+ if (store and
+ e.dead and
+ e.revision[-1] == 1 and # 1.1 or 1.1.x.1
+ len(e.comment) == 1 and
+ file_added_re.match(e.comment[0])):
+ ui.debug('found synthetic revision in %s: %r\n'
+ % (e.rcs, e.comment[0]))
+ e.synthetic = True
+
+ if store:
+ # clean up the results and save in the log.
+ store = False
+ e.tags = sorted([scache(x) for x in tags.get(e.revision, [])])
+ e.comment = scache('\n'.join(e.comment))
+
+ revn = len(e.revision)
+ if revn > 3 and (revn % 2) == 0:
+ e.branch = tags.get(e.revision[:-1], [None])[0]
+ else:
+ e.branch = None
+
+ # find the branches starting from this revision
+ branchpoints = set()
+ for branch, revision in branchmap.iteritems():
+ revparts = tuple([int(i) for i in revision.split('.')])
+ if len(revparts) < 2: # bad tags
+ continue
+ if revparts[-2] == 0 and revparts[-1] % 2 == 0:
+ # normal branch
+ if revparts[:-2] == e.revision:
+ branchpoints.add(branch)
+ elif revparts == (1, 1, 1): # vendor branch
+ if revparts in e.branches:
+ branchpoints.add(branch)
+ e.branchpoints = branchpoints
+
+ log.append(e)
+
+ if len(log) % 100 == 0:
+ ui.status(util.ellipsis('%d %s' % (len(log), e.file), 80)+'\n')
+
+ log.sort(key=lambda x: (x.rcs, x.revision))
+
+ # find parent revisions of individual files
+ versions = {}
+ for e in log:
+ branch = e.revision[:-1]
+ p = versions.get((e.rcs, branch), None)
+ if p is None:
+ p = e.revision[:-2]
+ e.parent = p
+ versions[(e.rcs, branch)] = e.revision
+
+ # update the log cache
+ if cache:
+ if log:
+ # join up the old and new logs
+ log.sort(key=lambda x: x.date)
+
+ if oldlog and oldlog[-1].date >= log[0].date:
+ raise logerror(_('log cache overlaps with new log entries,'
+ ' re-run without cache.'))
+
+ log = oldlog + log
+
+ # write the new cachefile
+ ui.note(_('writing cvs log cache %s\n') % cachefile)
+ pickle.dump(log, open(cachefile, 'w'))
+ else:
+ log = oldlog
+
+ ui.status(_('%d log entries\n') % len(log))
+
+ hook.hook(ui, None, "cvslog", True, log=log)
+
+ return log
+
+
+class changeset(object):
+ '''Class changeset has the following attributes:
+ .id - integer identifying this changeset (list index)
+ .author - author name as CVS knows it
+ .branch - name of branch this changeset is on, or None
+ .comment - commit message
+ .date - the commit date as a (time,tz) tuple
+ .entries - list of logentry objects in this changeset
+ .parents - list of one or two parent changesets
+ .tags - list of tags on this changeset
+ .synthetic - from synthetic revision "file ... added on branch ..."
+ .mergepoint- the branch that has been merged from
+ (if present in rlog output)
+ .branchpoints- the branches that start at the current entry
+ '''
+ def __init__(self, **entries):
+ self.synthetic = False
+ self.__dict__.update(entries)
+
+ def __repr__(self):
+ return "<%s at 0x%x: %s>" % (self.__class__.__name__,
+ id(self),
+ getattr(self, 'id', "(no id)"))
+
+def createchangeset(ui, log, fuzz=60, mergefrom=None, mergeto=None):
+ '''Convert log into changesets.'''
+
+ ui.status(_('creating changesets\n'))
+
+ # Merge changesets
+
+ log.sort(key=lambda x: (x.comment, x.author, x.branch, x.date))
+
+ changesets = []
+ files = set()
+ c = None
+ for i, e in enumerate(log):
+
+ # Check if log entry belongs to the current changeset or not.
+
+ # Since CVS is file centric, two different file revisions with
+ # different branchpoints should be treated as belonging to two
+ # different changesets (and the ordering is important and not
+ # honoured by cvsps at this point).
+ #
+ # Consider the following case:
+ # foo 1.1 branchpoints: [MYBRANCH]
+ # bar 1.1 branchpoints: [MYBRANCH, MYBRANCH2]
+ #
+ # Here foo is part only of MYBRANCH, but not MYBRANCH2, e.g. a
+ # later version of foo may be in MYBRANCH2, so foo should be the
+ # first changeset and bar the next and MYBRANCH and MYBRANCH2
+ # should both start off of the bar changeset. No provisions are
+ # made to ensure that this is, in fact, what happens.
+ if not (c and
+ e.comment == c.comment and
+ e.author == c.author and
+ e.branch == c.branch and
+ (not hasattr(e, 'branchpoints') or
+ not hasattr (c, 'branchpoints') or
+ e.branchpoints == c.branchpoints) and
+ ((c.date[0] + c.date[1]) <=
+ (e.date[0] + e.date[1]) <=
+ (c.date[0] + c.date[1]) + fuzz) and
+ e.file not in files):
+ c = changeset(comment=e.comment, author=e.author,
+ branch=e.branch, date=e.date, entries=[],
+ mergepoint=getattr(e, 'mergepoint', None),
+ branchpoints=getattr(e, 'branchpoints', set()))
+ changesets.append(c)
+ files = set()
+ if len(changesets) % 100 == 0:
+ t = '%d %s' % (len(changesets), repr(e.comment)[1:-1])
+ ui.status(util.ellipsis(t, 80) + '\n')
+
+ c.entries.append(e)
+ files.add(e.file)
+ c.date = e.date # changeset date is date of latest commit in it
+
+ # Mark synthetic changesets
+
+ for c in changesets:
+ # Synthetic revisions always get their own changeset, because
+ # the log message includes the filename. E.g. if you add file3
+ # and file4 on a branch, you get four log entries and three
+ # changesets:
+ # "File file3 was added on branch ..." (synthetic, 1 entry)
+ # "File file4 was added on branch ..." (synthetic, 1 entry)
+ # "Add file3 and file4 to fix ..." (real, 2 entries)
+ # Hence the check for 1 entry here.
+ c.synthetic = len(c.entries) == 1 and c.entries[0].synthetic
+
+ # Sort files in each changeset
+
+ for c in changesets:
+ def pathcompare(l, r):
+ 'Mimic cvsps sorting order'
+ l = l.split('/')
+ r = r.split('/')
+ nl = len(l)
+ nr = len(r)
+ n = min(nl, nr)
+ for i in range(n):
+ if i + 1 == nl and nl < nr:
+ return -1
+ elif i + 1 == nr and nl > nr:
+ return +1
+ elif l[i] < r[i]:
+ return -1
+ elif l[i] > r[i]:
+ return +1
+ return 0
+ def entitycompare(l, r):
+ return pathcompare(l.file, r.file)
+
+ c.entries.sort(entitycompare)
+
+ # Sort changesets by date
+
+ def cscmp(l, r):
+ d = sum(l.date) - sum(r.date)
+ if d:
+ return d
+
+ # detect vendor branches and initial commits on a branch
+ le = {}
+ for e in l.entries:
+ le[e.rcs] = e.revision
+ re = {}
+ for e in r.entries:
+ re[e.rcs] = e.revision
+
+ d = 0
+ for e in l.entries:
+ if re.get(e.rcs, None) == e.parent:
+ assert not d
+ d = 1
+ break
+
+ for e in r.entries:
+ if le.get(e.rcs, None) == e.parent:
+ assert not d
+ d = -1
+ break
+
+ return d
+
+ changesets.sort(cscmp)
+
+ # Collect tags
+
+ globaltags = {}
+ for c in changesets:
+ for e in c.entries:
+ for tag in e.tags:
+ # remember which is the latest changeset to have this tag
+ globaltags[tag] = c
+
+ for c in changesets:
+ tags = set()
+ for e in c.entries:
+ tags.update(e.tags)
+ # remember tags only if this is the latest changeset to have it
+ c.tags = sorted(tag for tag in tags if globaltags[tag] is c)
+
+ # Find parent changesets, handle {{mergetobranch BRANCHNAME}}
+ # by inserting dummy changesets with two parents, and handle
+ # {{mergefrombranch BRANCHNAME}} by setting two parents.
+
+ if mergeto is None:
+ mergeto = r'{{mergetobranch ([-\w]+)}}'
+ if mergeto:
+ mergeto = re.compile(mergeto)
+
+ if mergefrom is None:
+ mergefrom = r'{{mergefrombranch ([-\w]+)}}'
+ if mergefrom:
+ mergefrom = re.compile(mergefrom)
+
+ versions = {} # changeset index where we saw any particular file version
+ branches = {} # changeset index where we saw a branch
+ n = len(changesets)
+ i = 0
+ while i < n:
+ c = changesets[i]
+
+ for f in c.entries:
+ versions[(f.rcs, f.revision)] = i
+
+ p = None
+ if c.branch in branches:
+ p = branches[c.branch]
+ else:
+ # first changeset on a new branch
+ # the parent is a changeset with the branch in its
+ # branchpoints such that it is the latest possible
+ # commit without any intervening, unrelated commits.
+
+ for candidate in xrange(i):
+ if c.branch not in changesets[candidate].branchpoints:
+ if p is not None:
+ break
+ continue
+ p = candidate
+
+ c.parents = []
+ if p is not None:
+ p = changesets[p]
+
+ # Ensure no changeset has a synthetic changeset as a parent.
+ while p.synthetic:
+ assert len(p.parents) <= 1, \
+ _('synthetic changeset cannot have multiple parents')
+ if p.parents:
+ p = p.parents[0]
+ else:
+ p = None
+ break
+
+ if p is not None:
+ c.parents.append(p)
+
+ if c.mergepoint:
+ if c.mergepoint == 'HEAD':
+ c.mergepoint = None
+ c.parents.append(changesets[branches[c.mergepoint]])
+
+ if mergefrom:
+ m = mergefrom.search(c.comment)
+ if m:
+ m = m.group(1)
+ if m == 'HEAD':
+ m = None
+ try:
+ candidate = changesets[branches[m]]
+ except KeyError:
+ ui.warn(_("warning: CVS commit message references "
+ "non-existent branch %r:\n%s\n")
+ % (m, c.comment))
+ if m in branches and c.branch != m and not candidate.synthetic:
+ c.parents.append(candidate)
+
+ if mergeto:
+ m = mergeto.search(c.comment)
+ if m:
+ try:
+ m = m.group(1)
+ if m == 'HEAD':
+ m = None
+ except:
+ m = None # if no group found then merge to HEAD
+ if m in branches and c.branch != m:
+ # insert empty changeset for merge
+ cc = changeset(
+ author=c.author, branch=m, date=c.date,
+ comment='convert-repo: CVS merge from branch %s'
+ % c.branch,
+ entries=[], tags=[],
+ parents=[changesets[branches[m]], c])
+ changesets.insert(i + 1, cc)
+ branches[m] = i + 1
+
+ # adjust our loop counters now we have inserted a new entry
+ n += 1
+ i += 2
+ continue
+
+ branches[c.branch] = i
+ i += 1
+
+ # Drop synthetic changesets (safe now that we have ensured no other
+ # changesets can have them as parents).
+ i = 0
+ while i < len(changesets):
+ if changesets[i].synthetic:
+ del changesets[i]
+ else:
+ i += 1
+
+ # Number changesets
+
+ for i, c in enumerate(changesets):
+ c.id = i + 1
+
+ ui.status(_('%d changeset entries\n') % len(changesets))
+
+ hook.hook(ui, None, "cvschangesets", True, changesets=changesets)
+
+ return changesets
+
+
+def debugcvsps(ui, *args, **opts):
+ '''Read CVS rlog for current directory or named path in
+ repository, and convert the log to changesets based on matching
+ commit log entries and dates.
+ '''
+ if opts["new_cache"]:
+ cache = "write"
+ elif opts["update_cache"]:
+ cache = "update"
+ else:
+ cache = None
+
+ revisions = opts["revisions"]
+
+ try:
+ if args:
+ log = []
+ for d in args:
+ log += createlog(ui, d, root=opts["root"], cache=cache)
+ else:
+ log = createlog(ui, root=opts["root"], cache=cache)
+ except logerror, e:
+ ui.write("%r\n"%e)
+ return
+
+ changesets = createchangeset(ui, log, opts["fuzz"])
+ del log
+
+ # Print changesets (optionally filtered)
+
+ off = len(revisions)
+ branches = {} # latest version number in each branch
+ ancestors = {} # parent branch
+ for cs in changesets:
+
+ if opts["ancestors"]:
+ if cs.branch not in branches and cs.parents and cs.parents[0].id:
+ ancestors[cs.branch] = (changesets[cs.parents[0].id - 1].branch,
+ cs.parents[0].id)
+ branches[cs.branch] = cs.id
+
+ # limit by branches
+ if opts["branches"] and (cs.branch or 'HEAD') not in opts["branches"]:
+ continue
+
+ if not off:
+ # Note: trailing spaces on several lines here are needed to have
+ # bug-for-bug compatibility with cvsps.
+ ui.write('---------------------\n')
+ ui.write('PatchSet %d \n' % cs.id)
+ ui.write('Date: %s\n' % util.datestr(cs.date,
+ '%Y/%m/%d %H:%M:%S %1%2'))
+ ui.write('Author: %s\n' % cs.author)
+ ui.write('Branch: %s\n' % (cs.branch or 'HEAD'))
+ ui.write('Tag%s: %s \n' % (['', 's'][len(cs.tags) > 1],
+ ','.join(cs.tags) or '(none)'))
+ branchpoints = getattr(cs, 'branchpoints', None)
+ if branchpoints:
+ ui.write('Branchpoints: %s \n' % ', '.join(branchpoints))
+ if opts["parents"] and cs.parents:
+ if len(cs.parents) > 1:
+ ui.write('Parents: %s\n' %
+ (','.join([str(p.id) for p in cs.parents])))
+ else:
+ ui.write('Parent: %d\n' % cs.parents[0].id)
+
+ if opts["ancestors"]:
+ b = cs.branch
+ r = []
+ while b:
+ b, c = ancestors[b]
+ r.append('%s:%d:%d' % (b or "HEAD", c, branches[b]))
+ if r:
+ ui.write('Ancestors: %s\n' % (','.join(r)))
+
+ ui.write('Log:\n')
+ ui.write('%s\n\n' % cs.comment)
+ ui.write('Members: \n')
+ for f in cs.entries:
+ fn = f.file
+ if fn.startswith(opts["prefix"]):
+ fn = fn[len(opts["prefix"]):]
+ ui.write('\t%s:%s->%s%s \n' % (
+ fn, '.'.join([str(x) for x in f.parent]) or 'INITIAL',
+ '.'.join([str(x) for x in f.revision]),
+ ['', '(DEAD)'][f.dead]))
+ ui.write('\n')
+
+ # have we seen the start tag?
+ if revisions and off:
+ if revisions[0] == str(cs.id) or \
+ revisions[0] in cs.tags:
+ off = False
+
+ # see if we reached the end tag
+ if len(revisions) > 1 and not off:
+ if revisions[1] == str(cs.id) or \
+ revisions[1] in cs.tags:
+ break
diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/cvsps.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/cvsps.pyo
new file mode 100644
index 0000000..fdf6d44
--- /dev/null
+++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/cvsps.pyo
Binary files differ
diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/darcs.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/darcs.py
new file mode 100644
index 0000000..9863eb8
--- /dev/null
+++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/darcs.py
@@ -0,0 +1,200 @@
+# darcs.py - darcs support for the convert extension
+#
+# Copyright 2007-2009 Matt Mackall <mpm@selenic.com> and others
+#
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2 or any later version.
+
+from common import NoRepo, checktool, commandline, commit, converter_source
+from mercurial.i18n import _
+from mercurial import encoding, util
+import os, shutil, tempfile, re
+
+# The naming drift of ElementTree is fun!
+
+try:
+ from xml.etree.cElementTree import ElementTree, XMLParser
+except ImportError:
+ try:
+ from xml.etree.ElementTree import ElementTree, XMLParser
+ except ImportError:
+ try:
+ from elementtree.cElementTree import ElementTree, XMLParser
+ except ImportError:
+ try:
+ from elementtree.ElementTree import ElementTree, XMLParser
+ except ImportError:
+ ElementTree = None
+
+class darcs_source(converter_source, commandline):
+ def __init__(self, ui, path, rev=None):
+ converter_source.__init__(self, ui, path, rev=rev)
+ commandline.__init__(self, ui, 'darcs')
+
+ # check for _darcs, ElementTree so that we can easily skip
+ # test-convert-darcs if ElementTree is not around
+ if not os.path.exists(os.path.join(path, '_darcs')):
+ raise NoRepo(_("%s does not look like a darcs repository") % path)
+
+ checktool('darcs')
+ version = self.run0('--version').splitlines()[0].strip()
+ if version < '2.1':
+ raise util.Abort(_('darcs version 2.1 or newer needed (found %r)') %
+ version)
+
+ if ElementTree is None:
+ raise util.Abort(_("Python ElementTree module is not available"))
+
+ self.path = os.path.realpath(path)
+
+ self.lastrev = None
+ self.changes = {}
+ self.parents = {}
+ self.tags = {}
+
+ # Check darcs repository format
+ format = self.format()
+ if format:
+ if format in ('darcs-1.0', 'hashed'):
+ raise NoRepo(_("%s repository format is unsupported, "
+ "please upgrade") % format)
+ else:
+ self.ui.warn(_('failed to detect repository format!'))
+
+ def before(self):
+ self.tmppath = tempfile.mkdtemp(
+ prefix='convert-' + os.path.basename(self.path) + '-')
+ output, status = self.run('init', repodir=self.tmppath)
+ self.checkexit(status)
+
+ tree = self.xml('changes', xml_output=True, summary=True,
+ repodir=self.path)
+ tagname = None
+ child = None
+ for elt in tree.findall('patch'):
+ node = elt.get('hash')
+ name = elt.findtext('name', '')
+ if name.startswith('TAG '):
+ tagname = name[4:].strip()
+ elif tagname is not None:
+ self.tags[tagname] = node
+ tagname = None
+ self.changes[node] = elt
+ self.parents[child] = [node]
+ child = node
+ self.parents[child] = []
+
+ def after(self):
+ self.ui.debug('cleaning up %s\n' % self.tmppath)
+ shutil.rmtree(self.tmppath, ignore_errors=True)
+
+ def recode(self, s, encoding=None):
+ if isinstance(s, unicode):
+ # XMLParser returns unicode objects for anything it can't
+ # encode into ASCII. We convert them back to str to get
+ # recode's normal conversion behavior.
+ s = s.encode('latin-1')
+ return super(darcs_source, self).recode(s, encoding)
+
+ def xml(self, cmd, **kwargs):
+ # NOTE: darcs is currently encoding agnostic and will print
+ # patch metadata byte-for-byte, even in the XML changelog.
+ etree = ElementTree()
+ # While we are decoding the XML as latin-1 to be as liberal as
+ # possible, etree will still raise an exception if any
+ # non-printable characters are in the XML changelog.
+ parser = XMLParser(encoding='latin-1')
+ fp = self._run(cmd, **kwargs)
+ etree.parse(fp, parser=parser)
+ self.checkexit(fp.close())
+ return etree.getroot()
+
+ def format(self):
+ output, status = self.run('show', 'repo', no_files=True,
+ repodir=self.path)
+ self.checkexit(status)
+ m = re.search(r'^\s*Format:\s*(.*)$', output, re.MULTILINE)
+ if not m:
+ return None
+ return ','.join(sorted(f.strip() for f in m.group(1).split(',')))
+
+ def manifest(self):
+ man = []
+ output, status = self.run('show', 'files', no_directories=True,
+ repodir=self.tmppath)
+ self.checkexit(status)
+ for line in output.split('\n'):
+ path = line[2:]
+ if path:
+ man.append(path)
+ return man
+
+ def getheads(self):
+ return self.parents[None]
+
+ def getcommit(self, rev):
+ elt = self.changes[rev]
+ date = util.strdate(elt.get('local_date'), '%a %b %d %H:%M:%S %Z %Y')
+ desc = elt.findtext('name') + '\n' + elt.findtext('comment', '')
+ # etree can return unicode objects for name, comment, and author,
+ # so recode() is used to ensure str objects are emitted.
+ return commit(author=self.recode(elt.get('author')),
+ date=util.datestr(date),
+ desc=self.recode(desc).strip(),
+ parents=self.parents[rev])
+
+ def pull(self, rev):
+ output, status = self.run('pull', self.path, all=True,
+ match='hash %s' % rev,
+ no_test=True, no_posthook=True,
+ external_merge='/bin/false',
+ repodir=self.tmppath)
+ if status:
+ if output.find('We have conflicts in') == -1:
+ self.checkexit(status, output)
+ output, status = self.run('revert', all=True, repodir=self.tmppath)
+ self.checkexit(status, output)
+
+ def getchanges(self, rev):
+ copies = {}
+ changes = []
+ man = None
+ for elt in self.changes[rev].find('summary').getchildren():
+ if elt.tag in ('add_directory', 'remove_directory'):
+ continue
+ if elt.tag == 'move':
+ if man is None:
+ man = self.manifest()
+ source, dest = elt.get('from'), elt.get('to')
+ if source in man:
+ # File move
+ changes.append((source, rev))
+ changes.append((dest, rev))
+ copies[dest] = source
+ else:
+ # Directory move, deduce file moves from manifest
+ source = source + '/'
+ for f in man:
+ if not f.startswith(source):
+ continue
+ fdest = dest + '/' + f[len(source):]
+ changes.append((f, rev))
+ changes.append((fdest, rev))
+ copies[fdest] = f
+ else:
+ changes.append((elt.text.strip(), rev))
+ self.pull(rev)
+ self.lastrev = rev
+ return sorted(changes), copies
+
+ def getfile(self, name, rev):
+ if rev != self.lastrev:
+ raise util.Abort(_('internal calling inconsistency'))
+ path = os.path.join(self.tmppath, name)
+ data = open(path, 'rb').read()
+ mode = os.lstat(path).st_mode
+ mode = (mode & 0111) and 'x' or ''
+ return data, mode
+
+ def gettags(self):
+ return self.tags
diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/darcs.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/darcs.pyo
new file mode 100644
index 0000000..78b7568
--- /dev/null
+++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/darcs.pyo
Binary files differ
diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/filemap.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/filemap.py
new file mode 100644
index 0000000..1064642
--- /dev/null
+++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/filemap.py
@@ -0,0 +1,365 @@
+# Copyright 2007 Bryan O'Sullivan <bos@serpentine.com>
+# Copyright 2007 Alexis S. L. Carvalho <alexis@cecm.usp.br>
+#
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2 or any later version.
+
+import shlex
+from mercurial.i18n import _
+from mercurial import util
+from common import SKIPREV, converter_source
+
+def rpairs(name):
+ e = len(name)
+ while e != -1:
+ yield name[:e], name[e + 1:]
+ e = name.rfind('/', 0, e)
+ yield '.', name
+
+class filemapper(object):
+ '''Map and filter filenames when importing.
+ A name can be mapped to itself, a new name, or None (omit from new
+ repository).'''
+
+ def __init__(self, ui, path=None):
+ self.ui = ui
+ self.include = {}
+ self.exclude = {}
+ self.rename = {}
+ if path:
+ if self.parse(path):
+ raise util.Abort(_('errors in filemap'))
+
+ def parse(self, path):
+ errs = 0
+ def check(name, mapping, listname):
+ if not name:
+ self.ui.warn(_('%s:%d: path to %s is missing\n') %
+ (lex.infile, lex.lineno, listname))
+ return 1
+ if name in mapping:
+ self.ui.warn(_('%s:%d: %r already in %s list\n') %
+ (lex.infile, lex.lineno, name, listname))
+ return 1
+ if (name.startswith('/') or
+ name.endswith('/') or
+ '//' in name):
+ self.ui.warn(_('%s:%d: superfluous / in %s %r\n') %
+ (lex.infile, lex.lineno, listname, name))
+ return 1
+ return 0
+ lex = shlex.shlex(open(path), path, True)
+ lex.wordchars += '!@#$%^&*()-=+[]{}|;:,./<>?'
+ cmd = lex.get_token()
+ while cmd:
+ if cmd == 'include':
+ name = lex.get_token()
+ errs += check(name, self.exclude, 'exclude')
+ self.include[name] = name
+ elif cmd == 'exclude':
+ name = lex.get_token()
+ errs += check(name, self.include, 'include')
+ errs += check(name, self.rename, 'rename')
+ self.exclude[name] = name
+ elif cmd == 'rename':
+ src = lex.get_token()
+ dest = lex.get_token()
+ errs += check(src, self.exclude, 'exclude')
+ self.rename[src] = dest
+ elif cmd == 'source':
+ errs += self.parse(lex.get_token())
+ else:
+ self.ui.warn(_('%s:%d: unknown directive %r\n') %
+ (lex.infile, lex.lineno, cmd))
+ errs += 1
+ cmd = lex.get_token()
+ return errs
+
+ def lookup(self, name, mapping):
+ for pre, suf in rpairs(name):
+ try:
+ return mapping[pre], pre, suf
+ except KeyError:
+ pass
+ return '', name, ''
+
+ def __call__(self, name):
+ if self.include:
+ inc = self.lookup(name, self.include)[0]
+ else:
+ inc = name
+ if self.exclude:
+ exc = self.lookup(name, self.exclude)[0]
+ else:
+ exc = ''
+ if (not self.include and exc) or (len(inc) <= len(exc)):
+ return None
+ newpre, pre, suf = self.lookup(name, self.rename)
+ if newpre:
+ if newpre == '.':
+ return suf
+ if suf:
+ return newpre + '/' + suf
+ return newpre
+ return name
+
+ def active(self):
+ return bool(self.include or self.exclude or self.rename)
+
+# This class does two additional things compared to a regular source:
+#
+# - Filter and rename files. This is mostly wrapped by the filemapper
+# class above. We hide the original filename in the revision that is
+# returned by getchanges to be able to find things later in getfile.
+#
+# - Return only revisions that matter for the files we're interested in.
+# This involves rewriting the parents of the original revision to
+# create a graph that is restricted to those revisions.
+#
+# This set of revisions includes not only revisions that directly
+# touch files we're interested in, but also merges that merge two
+# or more interesting revisions.
+
+class filemap_source(converter_source):
+ def __init__(self, ui, baseconverter, filemap):
+ super(filemap_source, self).__init__(ui)
+ self.base = baseconverter
+ self.filemapper = filemapper(ui, filemap)
+ self.commits = {}
+ # if a revision rev has parent p in the original revision graph, then
+ # rev will have parent self.parentmap[p] in the restricted graph.
+ self.parentmap = {}
+ # self.wantedancestors[rev] is the set of all ancestors of rev that
+ # are in the restricted graph.
+ self.wantedancestors = {}
+ self.convertedorder = None
+ self._rebuilt = False
+ self.origparents = {}
+ self.children = {}
+ self.seenchildren = {}
+
+ def before(self):
+ self.base.before()
+
+ def after(self):
+ self.base.after()
+
+ def setrevmap(self, revmap):
+ # rebuild our state to make things restartable
+ #
+ # To avoid calling getcommit for every revision that has already
+ # been converted, we rebuild only the parentmap, delaying the
+ # rebuild of wantedancestors until we need it (i.e. until a
+ # merge).
+ #
+ # We assume the order argument lists the revisions in
+ # topological order, so that we can infer which revisions were
+ # wanted by previous runs.
+ self._rebuilt = not revmap
+ seen = {SKIPREV: SKIPREV}
+ dummyset = set()
+ converted = []
+ for rev in revmap.order:
+ mapped = revmap[rev]
+ wanted = mapped not in seen
+ if wanted:
+ seen[mapped] = rev
+ self.parentmap[rev] = rev
+ else:
+ self.parentmap[rev] = seen[mapped]
+ self.wantedancestors[rev] = dummyset
+ arg = seen[mapped]
+ if arg == SKIPREV:
+ arg = None
+ converted.append((rev, wanted, arg))
+ self.convertedorder = converted
+ return self.base.setrevmap(revmap)
+
+ def rebuild(self):
+ if self._rebuilt:
+ return True
+ self._rebuilt = True
+ self.parentmap.clear()
+ self.wantedancestors.clear()
+ self.seenchildren.clear()
+ for rev, wanted, arg in self.convertedorder:
+ if rev not in self.origparents:
+ self.origparents[rev] = self.getcommit(rev).parents
+ if arg is not None:
+ self.children[arg] = self.children.get(arg, 0) + 1
+
+ for rev, wanted, arg in self.convertedorder:
+ parents = self.origparents[rev]
+ if wanted:
+ self.mark_wanted(rev, parents)
+ else:
+ self.mark_not_wanted(rev, arg)
+ self._discard(arg, *parents)
+
+ return True
+
+ def getheads(self):
+ return self.base.getheads()
+
+ def getcommit(self, rev):
+ # We want to save a reference to the commit objects to be able
+ # to rewrite their parents later on.
+ c = self.commits[rev] = self.base.getcommit(rev)
+ for p in c.parents:
+ self.children[p] = self.children.get(p, 0) + 1
+ return c
+
+ def _discard(self, *revs):
+ for r in revs:
+ if r is None:
+ continue
+ self.seenchildren[r] = self.seenchildren.get(r, 0) + 1
+ if self.seenchildren[r] == self.children[r]:
+ del self.wantedancestors[r]
+ del self.parentmap[r]
+ del self.seenchildren[r]
+ if self._rebuilt:
+ del self.children[r]
+
+ def wanted(self, rev, i):
+ # Return True if we're directly interested in rev.
+ #
+ # i is an index selecting one of the parents of rev (if rev
+ # has no parents, i is None). getchangedfiles will give us
+ # the list of files that are different in rev and in the parent
+ # indicated by i. If we're interested in any of these files,
+ # we're interested in rev.
+ try:
+ files = self.base.getchangedfiles(rev, i)
+ except NotImplementedError:
+ raise util.Abort(_("source repository doesn't support --filemap"))
+ for f in files:
+ if self.filemapper(f):
+ return True
+ return False
+
+ def mark_not_wanted(self, rev, p):
+ # Mark rev as not interesting and update data structures.
+
+ if p is None:
+ # A root revision. Use SKIPREV to indicate that it doesn't
+ # map to any revision in the restricted graph. Put SKIPREV
+ # in the set of wanted ancestors to simplify code elsewhere
+ self.parentmap[rev] = SKIPREV
+ self.wantedancestors[rev] = set((SKIPREV,))
+ return
+
+ # Reuse the data from our parent.
+ self.parentmap[rev] = self.parentmap[p]
+ self.wantedancestors[rev] = self.wantedancestors[p]
+
+ def mark_wanted(self, rev, parents):
+ # Mark rev ss wanted and update data structures.
+
+ # rev will be in the restricted graph, so children of rev in
+ # the original graph should still have rev as a parent in the
+ # restricted graph.
+ self.parentmap[rev] = rev
+
+ # The set of wanted ancestors of rev is the union of the sets
+ # of wanted ancestors of its parents. Plus rev itself.
+ wrev = set()
+ for p in parents:
+ wrev.update(self.wantedancestors[p])
+ wrev.add(rev)
+ self.wantedancestors[rev] = wrev
+
+ def getchanges(self, rev):
+ parents = self.commits[rev].parents
+ if len(parents) > 1:
+ self.rebuild()
+
+ # To decide whether we're interested in rev we:
+ #
+ # - calculate what parents rev will have if it turns out we're
+ # interested in it. If it's going to have more than 1 parent,
+ # we're interested in it.
+ #
+ # - otherwise, we'll compare it with the single parent we found.
+ # If any of the files we're interested in is different in the
+ # the two revisions, we're interested in rev.
+
+ # A parent p is interesting if its mapped version (self.parentmap[p]):
+ # - is not SKIPREV
+ # - is still not in the list of parents (we don't want duplicates)
+ # - is not an ancestor of the mapped versions of the other parents
+ mparents = []
+ wp = None
+ for i, p1 in enumerate(parents):
+ mp1 = self.parentmap[p1]
+ if mp1 == SKIPREV or mp1 in mparents:
+ continue
+ for p2 in parents:
+ if p1 == p2 or mp1 == self.parentmap[p2]:
+ continue
+ if mp1 in self.wantedancestors[p2]:
+ break
+ else:
+ mparents.append(mp1)
+ wp = i
+
+ if wp is None and parents:
+ wp = 0
+
+ self.origparents[rev] = parents
+
+ closed = 'close' in self.commits[rev].extra
+
+ if len(mparents) < 2 and not closed and not self.wanted(rev, wp):
+ # We don't want this revision.
+ # Update our state and tell the convert process to map this
+ # revision to the same revision its parent as mapped to.
+ p = None
+ if parents:
+ p = parents[wp]
+ self.mark_not_wanted(rev, p)
+ self.convertedorder.append((rev, False, p))
+ self._discard(*parents)
+ return self.parentmap[rev]
+
+ # We want this revision.
+ # Rewrite the parents of the commit object
+ self.commits[rev].parents = mparents
+ self.mark_wanted(rev, parents)
+ self.convertedorder.append((rev, True, None))
+ self._discard(*parents)
+
+ # Get the real changes and do the filtering/mapping. To be
+ # able to get the files later on in getfile, we hide the
+ # original filename in the rev part of the return value.
+ changes, copies = self.base.getchanges(rev)
+ newnames = {}
+ files = []
+ for f, r in changes:
+ newf = self.filemapper(f)
+ if newf:
+ files.append((newf, (f, r)))
+ newnames[f] = newf
+
+ ncopies = {}
+ for c in copies:
+ newc = self.filemapper(c)
+ if newc:
+ newsource = self.filemapper(copies[c])
+ if newsource:
+ ncopies[newc] = newsource
+
+ return files, ncopies
+
+ def getfile(self, name, rev):
+ realname, realrev = rev
+ return self.base.getfile(realname, realrev)
+
+ def gettags(self):
+ return self.base.gettags()
+
+ def hasnativeorder(self):
+ return self.base.hasnativeorder()
+
+ def lookuprev(self, rev):
+ return self.base.lookuprev(rev)
diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/filemap.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/filemap.pyo
new file mode 100644
index 0000000..2ece523
--- /dev/null
+++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/filemap.pyo
Binary files differ
diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/git.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/git.py
new file mode 100644
index 0000000..e973031
--- /dev/null
+++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/git.py
@@ -0,0 +1,170 @@
+# git.py - git support for the convert extension
+#
+# Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
+#
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2 or any later version.
+
+import os
+from mercurial import util
+from mercurial.node import hex, nullid
+from mercurial.i18n import _
+
+from common import NoRepo, commit, converter_source, checktool
+
+class convert_git(converter_source):
+ # Windows does not support GIT_DIR= construct while other systems
+ # cannot remove environment variable. Just assume none have
+ # both issues.
+ if hasattr(os, 'unsetenv'):
+ def gitopen(self, s):
+ prevgitdir = os.environ.get('GIT_DIR')
+ os.environ['GIT_DIR'] = self.path
+ try:
+ return util.popen(s, 'rb')
+ finally:
+ if prevgitdir is None:
+ del os.environ['GIT_DIR']
+ else:
+ os.environ['GIT_DIR'] = prevgitdir
+ else:
+ def gitopen(self, s):
+ return util.popen('GIT_DIR=%s %s' % (self.path, s), 'rb')
+
+ def gitread(self, s):
+ fh = self.gitopen(s)
+ data = fh.read()
+ return data, fh.close()
+
+ def __init__(self, ui, path, rev=None):
+ super(convert_git, self).__init__(ui, path, rev=rev)
+
+ if os.path.isdir(path + "/.git"):
+ path += "/.git"
+ if not os.path.exists(path + "/objects"):
+ raise NoRepo(_("%s does not look like a Git repository") % path)
+
+ checktool('git', 'git')
+
+ self.path = path
+
+ def getheads(self):
+ if not self.rev:
+ heads, ret = self.gitread('git rev-parse --branches --remotes')
+ heads = heads.splitlines()
+ else:
+ heads, ret = self.gitread("git rev-parse --verify %s" % self.rev)
+ heads = [heads[:-1]]
+ if ret:
+ raise util.Abort(_('cannot retrieve git heads'))
+ return heads
+
+ def catfile(self, rev, type):
+ if rev == hex(nullid):
+ raise IOError()
+ data, ret = self.gitread("git cat-file %s %s" % (type, rev))
+ if ret:
+ raise util.Abort(_('cannot read %r object at %s') % (type, rev))
+ return data
+
+ def getfile(self, name, rev):
+ data = self.catfile(rev, "blob")
+ mode = self.modecache[(name, rev)]
+ return data, mode
+
+ def getchanges(self, version):
+ self.modecache = {}
+ fh = self.gitopen("git diff-tree -z --root -m -r %s" % version)
+ changes = []
+ seen = set()
+ entry = None
+ for l in fh.read().split('\x00'):
+ if not entry:
+ if not l.startswith(':'):
+ continue
+ entry = l
+ continue
+ f = l
+ if f not in seen:
+ seen.add(f)
+ entry = entry.split()
+ h = entry[3]
+ p = (entry[1] == "100755")
+ s = (entry[1] == "120000")
+ self.modecache[(f, h)] = (p and "x") or (s and "l") or ""
+ changes.append((f, h))
+ entry = None
+ if fh.close():
+ raise util.Abort(_('cannot read changes in %s') % version)
+ return (changes, {})
+
+ def getcommit(self, version):
+ c = self.catfile(version, "commit") # read the commit hash
+ end = c.find("\n\n")
+ message = c[end + 2:]
+ message = self.recode(message)
+ l = c[:end].splitlines()
+ parents = []
+ author = committer = None
+ for e in l[1:]:
+ n, v = e.split(" ", 1)
+ if n == "author":
+ p = v.split()
+ tm, tz = p[-2:]
+ author = " ".join(p[:-2])
+ if author[0] == "<": author = author[1:-1]
+ author = self.recode(author)
+ if n == "committer":
+ p = v.split()
+ tm, tz = p[-2:]
+ committer = " ".join(p[:-2])
+ if committer[0] == "<": committer = committer[1:-1]
+ committer = self.recode(committer)
+ if n == "parent":
+ parents.append(v)
+
+ if committer and committer != author:
+ message += "\ncommitter: %s\n" % committer
+ tzs, tzh, tzm = tz[-5:-4] + "1", tz[-4:-2], tz[-2:]
+ tz = -int(tzs) * (int(tzh) * 3600 + int(tzm))
+ date = tm + " " + str(tz)
+
+ c = commit(parents=parents, date=date, author=author, desc=message,
+ rev=version)
+ return c
+
+ def gettags(self):
+ tags = {}
+ fh = self.gitopen('git ls-remote --tags "%s"' % self.path)
+ prefix = 'refs/tags/'
+ for line in fh:
+ line = line.strip()
+ if not line.endswith("^{}"):
+ continue
+ node, tag = line.split(None, 1)
+ if not tag.startswith(prefix):
+ continue
+ tag = tag[len(prefix):-3]
+ tags[tag] = node
+ if fh.close():
+ raise util.Abort(_('cannot read tags from %s') % self.path)
+
+ return tags
+
+ def getchangedfiles(self, version, i):
+ changes = []
+ if i is None:
+ fh = self.gitopen("git diff-tree --root -m -r %s" % version)
+ for l in fh:
+ if "\t" not in l:
+ continue
+ m, f = l[:-1].split("\t")
+ changes.append(f)
+ else:
+ fh = self.gitopen('git diff-tree --name-only --root -r %s "%s^%s" --'
+ % (version, version, i + 1))
+ changes = [f.rstrip('\n') for f in fh]
+ if fh.close():
+ raise util.Abort(_('cannot read changes in %s') % version)
+
+ return changes
diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/git.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/git.pyo
new file mode 100644
index 0000000..3166318
--- /dev/null
+++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/git.pyo
Binary files differ
diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/gnuarch.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/gnuarch.py
new file mode 100644
index 0000000..60cfede
--- /dev/null
+++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/gnuarch.py
@@ -0,0 +1,338 @@
+# gnuarch.py - GNU Arch support for the convert extension
+#
+# Copyright 2008, 2009 Aleix Conchillo Flaque <aleix@member.fsf.org>
+# and others
+#
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2 or any later version.
+
+from common import NoRepo, commandline, commit, converter_source
+from mercurial.i18n import _
+from mercurial import encoding, util
+import os, shutil, tempfile, stat
+from email.Parser import Parser
+
+class gnuarch_source(converter_source, commandline):
+
+ class gnuarch_rev(object):
+ def __init__(self, rev):
+ self.rev = rev
+ self.summary = ''
+ self.date = None
+ self.author = ''
+ self.continuationof = None
+ self.add_files = []
+ self.mod_files = []
+ self.del_files = []
+ self.ren_files = {}
+ self.ren_dirs = {}
+
+ def __init__(self, ui, path, rev=None):
+ super(gnuarch_source, self).__init__(ui, path, rev=rev)
+
+ if not os.path.exists(os.path.join(path, '{arch}')):
+ raise NoRepo(_("%s does not look like a GNU Arch repository")
+ % path)
+
+ # Could use checktool, but we want to check for baz or tla.
+ self.execmd = None
+ if util.find_exe('baz'):
+ self.execmd = 'baz'
+ else:
+ if util.find_exe('tla'):
+ self.execmd = 'tla'
+ else:
+ raise util.Abort(_('cannot find a GNU Arch tool'))
+
+ commandline.__init__(self, ui, self.execmd)
+
+ self.path = os.path.realpath(path)
+ self.tmppath = None
+
+ self.treeversion = None
+ self.lastrev = None
+ self.changes = {}
+ self.parents = {}
+ self.tags = {}
+ self.catlogparser = Parser()
+ self.encoding = encoding.encoding
+ self.archives = []
+
+ def before(self):
+ # Get registered archives
+ self.archives = [i.rstrip('\n')
+ for i in self.runlines0('archives', '-n')]
+
+ if self.execmd == 'tla':
+ output = self.run0('tree-version', self.path)
+ else:
+ output = self.run0('tree-version', '-d', self.path)
+ self.treeversion = output.strip()
+
+ # Get name of temporary directory
+ version = self.treeversion.split('/')
+ self.tmppath = os.path.join(tempfile.gettempdir(),
+ 'hg-%s' % version[1])
+
+ # Generate parents dictionary
+ self.parents[None] = []
+ treeversion = self.treeversion
+ child = None
+ while treeversion:
+ self.ui.status(_('analyzing tree version %s...\n') % treeversion)
+
+ archive = treeversion.split('/')[0]
+ if archive not in self.archives:
+ self.ui.status(_('tree analysis stopped because it points to '
+ 'an unregistered archive %s...\n') % archive)
+ break
+
+ # Get the complete list of revisions for that tree version
+ output, status = self.runlines('revisions', '-r', '-f', treeversion)
+ self.checkexit(status, 'failed retrieveing revisions for %s'
+ % treeversion)
+
+ # No new iteration unless a revision has a continuation-of header
+ treeversion = None
+
+ for l in output:
+ rev = l.strip()
+ self.changes[rev] = self.gnuarch_rev(rev)
+ self.parents[rev] = []
+
+ # Read author, date and summary
+ catlog, status = self.run('cat-log', '-d', self.path, rev)
+ if status:
+ catlog = self.run0('cat-archive-log', rev)
+ self._parsecatlog(catlog, rev)
+
+ # Populate the parents map
+ self.parents[child].append(rev)
+
+ # Keep track of the current revision as the child of the next
+ # revision scanned
+ child = rev
+
+ # Check if we have to follow the usual incremental history
+ # or if we have to 'jump' to a different treeversion given
+ # by the continuation-of header.
+ if self.changes[rev].continuationof:
+ treeversion = '--'.join(
+ self.changes[rev].continuationof.split('--')[:-1])
+ break
+
+ # If we reached a base-0 revision w/o any continuation-of
+ # header, it means the tree history ends here.
+ if rev[-6:] == 'base-0':
+ break
+
+ def after(self):
+ self.ui.debug('cleaning up %s\n' % self.tmppath)
+ shutil.rmtree(self.tmppath, ignore_errors=True)
+
+ def getheads(self):
+ return self.parents[None]
+
+ def getfile(self, name, rev):
+ if rev != self.lastrev:
+ raise util.Abort(_('internal calling inconsistency'))
+
+ # Raise IOError if necessary (i.e. deleted files).
+ if not os.path.lexists(os.path.join(self.tmppath, name)):
+ raise IOError
+
+ return self._getfile(name, rev)
+
+ def getchanges(self, rev):
+ self._update(rev)
+ changes = []
+ copies = {}
+
+ for f in self.changes[rev].add_files:
+ changes.append((f, rev))
+
+ for f in self.changes[rev].mod_files:
+ changes.append((f, rev))
+
+ for f in self.changes[rev].del_files:
+ changes.append((f, rev))
+
+ for src in self.changes[rev].ren_files:
+ to = self.changes[rev].ren_files[src]
+ changes.append((src, rev))
+ changes.append((to, rev))
+ copies[to] = src
+
+ for src in self.changes[rev].ren_dirs:
+ to = self.changes[rev].ren_dirs[src]
+ chgs, cps = self._rendirchanges(src, to)
+ changes += [(f, rev) for f in chgs]
+ copies.update(cps)
+
+ self.lastrev = rev
+ return sorted(set(changes)), copies
+
+ def getcommit(self, rev):
+ changes = self.changes[rev]
+ return commit(author=changes.author, date=changes.date,
+ desc=changes.summary, parents=self.parents[rev], rev=rev)
+
+ def gettags(self):
+ return self.tags
+
+ def _execute(self, cmd, *args, **kwargs):
+ cmdline = [self.execmd, cmd]
+ cmdline += args
+ cmdline = [util.shellquote(arg) for arg in cmdline]
+ cmdline += ['>', util.nulldev, '2>', util.nulldev]
+ cmdline = util.quotecommand(' '.join(cmdline))
+ self.ui.debug(cmdline, '\n')
+ return os.system(cmdline)
+
+ def _update(self, rev):
+ self.ui.debug('applying revision %s...\n' % rev)
+ changeset, status = self.runlines('replay', '-d', self.tmppath,
+ rev)
+ if status:
+ # Something went wrong while merging (baz or tla
+ # issue?), get latest revision and try from there
+ shutil.rmtree(self.tmppath, ignore_errors=True)
+ self._obtainrevision(rev)
+ else:
+ old_rev = self.parents[rev][0]
+ self.ui.debug('computing changeset between %s and %s...\n'
+ % (old_rev, rev))
+ self._parsechangeset(changeset, rev)
+
+ def _getfile(self, name, rev):
+ mode = os.lstat(os.path.join(self.tmppath, name)).st_mode
+ if stat.S_ISLNK(mode):
+ data = os.readlink(os.path.join(self.tmppath, name))
+ mode = mode and 'l' or ''
+ else:
+ data = open(os.path.join(self.tmppath, name), 'rb').read()
+ mode = (mode & 0111) and 'x' or ''
+ return data, mode
+
+ def _exclude(self, name):
+ exclude = ['{arch}', '.arch-ids', '.arch-inventory']
+ for exc in exclude:
+ if name.find(exc) != -1:
+ return True
+ return False
+
+ def _readcontents(self, path):
+ files = []
+ contents = os.listdir(path)
+ while len(contents) > 0:
+ c = contents.pop()
+ p = os.path.join(path, c)
+ # os.walk could be used, but here we avoid internal GNU
+ # Arch files and directories, thus saving a lot time.
+ if not self._exclude(p):
+ if os.path.isdir(p):
+ contents += [os.path.join(c, f) for f in os.listdir(p)]
+ else:
+ files.append(c)
+ return files
+
+ def _rendirchanges(self, src, dest):
+ changes = []
+ copies = {}
+ files = self._readcontents(os.path.join(self.tmppath, dest))
+ for f in files:
+ s = os.path.join(src, f)
+ d = os.path.join(dest, f)
+ changes.append(s)
+ changes.append(d)
+ copies[d] = s
+ return changes, copies
+
+ def _obtainrevision(self, rev):
+ self.ui.debug('obtaining revision %s...\n' % rev)
+ output = self._execute('get', rev, self.tmppath)
+ self.checkexit(output)
+ self.ui.debug('analyzing revision %s...\n' % rev)
+ files = self._readcontents(self.tmppath)
+ self.changes[rev].add_files += files
+
+ def _stripbasepath(self, path):
+ if path.startswith('./'):
+ return path[2:]
+ return path
+
+ def _parsecatlog(self, data, rev):
+ try:
+ catlog = self.catlogparser.parsestr(data)
+
+ # Commit date
+ self.changes[rev].date = util.datestr(
+ util.strdate(catlog['Standard-date'],
+ '%Y-%m-%d %H:%M:%S'))
+
+ # Commit author
+ self.changes[rev].author = self.recode(catlog['Creator'])
+
+ # Commit description
+ self.changes[rev].summary = '\n\n'.join((catlog['Summary'],
+ catlog.get_payload()))
+ self.changes[rev].summary = self.recode(self.changes[rev].summary)
+
+ # Commit revision origin when dealing with a branch or tag
+ if 'Continuation-of' in catlog:
+ self.changes[rev].continuationof = self.recode(
+ catlog['Continuation-of'])
+ except Exception:
+ raise util.Abort(_('could not parse cat-log of %s') % rev)
+
+ def _parsechangeset(self, data, rev):
+ for l in data:
+ l = l.strip()
+ # Added file (ignore added directory)
+ if l.startswith('A') and not l.startswith('A/'):
+ file = self._stripbasepath(l[1:].strip())
+ if not self._exclude(file):
+ self.changes[rev].add_files.append(file)
+ # Deleted file (ignore deleted directory)
+ elif l.startswith('D') and not l.startswith('D/'):
+ file = self._stripbasepath(l[1:].strip())
+ if not self._exclude(file):
+ self.changes[rev].del_files.append(file)
+ # Modified binary file
+ elif l.startswith('Mb'):
+ file = self._stripbasepath(l[2:].strip())
+ if not self._exclude(file):
+ self.changes[rev].mod_files.append(file)
+ # Modified link
+ elif l.startswith('M->'):
+ file = self._stripbasepath(l[3:].strip())
+ if not self._exclude(file):
+ self.changes[rev].mod_files.append(file)
+ # Modified file
+ elif l.startswith('M'):
+ file = self._stripbasepath(l[1:].strip())
+ if not self._exclude(file):
+ self.changes[rev].mod_files.append(file)
+ # Renamed file (or link)
+ elif l.startswith('=>'):
+ files = l[2:].strip().split(' ')
+ if len(files) == 1:
+ files = l[2:].strip().split('\t')
+ src = self._stripbasepath(files[0])
+ dst = self._stripbasepath(files[1])
+ if not self._exclude(src) and not self._exclude(dst):
+ self.changes[rev].ren_files[src] = dst
+ # Conversion from file to link or from link to file (modified)
+ elif l.startswith('ch'):
+ file = self._stripbasepath(l[2:].strip())
+ if not self._exclude(file):
+ self.changes[rev].mod_files.append(file)
+ # Renamed directory
+ elif l.startswith('/>'):
+ dirs = l[2:].strip().split(' ')
+ if len(dirs) == 1:
+ dirs = l[2:].strip().split('\t')
+ src = self._stripbasepath(dirs[0])
+ dst = self._stripbasepath(dirs[1])
+ if not self._exclude(src) and not self._exclude(dst):
+ self.changes[rev].ren_dirs[src] = dst
diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/gnuarch.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/gnuarch.pyo
new file mode 100644
index 0000000..0e564ba
--- /dev/null
+++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/gnuarch.pyo
Binary files differ
diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/hg.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/hg.py
new file mode 100644
index 0000000..183377d
--- /dev/null
+++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/hg.py
@@ -0,0 +1,376 @@
+# hg.py - hg backend for convert extension
+#
+# Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
+#
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2 or any later version.
+
+# Notes for hg->hg conversion:
+#
+# * Old versions of Mercurial didn't trim the whitespace from the ends
+# of commit messages, but new versions do. Changesets created by
+# those older versions, then converted, may thus have different
+# hashes for changesets that are otherwise identical.
+#
+# * Using "--config convert.hg.saverev=true" will make the source
+# identifier to be stored in the converted revision. This will cause
+# the converted revision to have a different identity than the
+# source.
+
+
+import os, time, cStringIO
+from mercurial.i18n import _
+from mercurial.node import bin, hex, nullid
+from mercurial import hg, util, context, error
+
+from common import NoRepo, commit, converter_source, converter_sink
+
+class mercurial_sink(converter_sink):
+ def __init__(self, ui, path):
+ converter_sink.__init__(self, ui, path)
+ self.branchnames = ui.configbool('convert', 'hg.usebranchnames', True)
+ self.clonebranches = ui.configbool('convert', 'hg.clonebranches', False)
+ self.tagsbranch = ui.config('convert', 'hg.tagsbranch', 'default')
+ self.lastbranch = None
+ if os.path.isdir(path) and len(os.listdir(path)) > 0:
+ try:
+ self.repo = hg.repository(self.ui, path)
+ if not self.repo.local():
+ raise NoRepo(_('%s is not a local Mercurial repository')
+ % path)
+ except error.RepoError, err:
+ ui.traceback()
+ raise NoRepo(err.args[0])
+ else:
+ try:
+ ui.status(_('initializing destination %s repository\n') % path)
+ self.repo = hg.repository(self.ui, path, create=True)
+ if not self.repo.local():
+ raise NoRepo(_('%s is not a local Mercurial repository')
+ % path)
+ self.created.append(path)
+ except error.RepoError:
+ ui.traceback()
+ raise NoRepo(_("could not create hg repository %s as sink")
+ % path)
+ self.lock = None
+ self.wlock = None
+ self.filemapmode = False
+
+ def before(self):
+ self.ui.debug('run hg sink pre-conversion action\n')
+ self.wlock = self.repo.wlock()
+ self.lock = self.repo.lock()
+
+ def after(self):
+ self.ui.debug('run hg sink post-conversion action\n')
+ if self.lock:
+ self.lock.release()
+ if self.wlock:
+ self.wlock.release()
+
+ def revmapfile(self):
+ return os.path.join(self.path, ".hg", "shamap")
+
+ def authorfile(self):
+ return os.path.join(self.path, ".hg", "authormap")
+
+ def getheads(self):
+ h = self.repo.changelog.heads()
+ return [hex(x) for x in h]
+
+ def setbranch(self, branch, pbranches):
+ if not self.clonebranches:
+ return
+
+ setbranch = (branch != self.lastbranch)
+ self.lastbranch = branch
+ if not branch:
+ branch = 'default'
+ pbranches = [(b[0], b[1] and b[1] or 'default') for b in pbranches]
+ pbranch = pbranches and pbranches[0][1] or 'default'
+
+ branchpath = os.path.join(self.path, branch)
+ if setbranch:
+ self.after()
+ try:
+ self.repo = hg.repository(self.ui, branchpath)
+ except:
+ self.repo = hg.repository(self.ui, branchpath, create=True)
+ self.before()
+
+ # pbranches may bring revisions from other branches (merge parents)
+ # Make sure we have them, or pull them.
+ missings = {}
+ for b in pbranches:
+ try:
+ self.repo.lookup(b[0])
+ except:
+ missings.setdefault(b[1], []).append(b[0])
+
+ if missings:
+ self.after()
+ for pbranch, heads in missings.iteritems():
+ pbranchpath = os.path.join(self.path, pbranch)
+ prepo = hg.repository(self.ui, pbranchpath)
+ self.ui.note(_('pulling from %s into %s\n') % (pbranch, branch))
+ self.repo.pull(prepo, [prepo.lookup(h) for h in heads])
+ self.before()
+
+ def _rewritetags(self, source, revmap, data):
+ fp = cStringIO.StringIO()
+ for line in data.splitlines():
+ s = line.split(' ', 1)
+ if len(s) != 2:
+ continue
+ revid = revmap.get(source.lookuprev(s[0]))
+ if not revid:
+ continue
+ fp.write('%s %s\n' % (revid, s[1]))
+ return fp.getvalue()
+
+ def putcommit(self, files, copies, parents, commit, source, revmap):
+
+ files = dict(files)
+ def getfilectx(repo, memctx, f):
+ v = files[f]
+ data, mode = source.getfile(f, v)
+ if f == '.hgtags':
+ data = self._rewritetags(source, revmap, data)
+ return context.memfilectx(f, data, 'l' in mode, 'x' in mode,
+ copies.get(f))
+
+ pl = []
+ for p in parents:
+ if p not in pl:
+ pl.append(p)
+ parents = pl
+ nparents = len(parents)
+ if self.filemapmode and nparents == 1:
+ m1node = self.repo.changelog.read(bin(parents[0]))[0]
+ parent = parents[0]
+
+ if len(parents) < 2:
+ parents.append(nullid)
+ if len(parents) < 2:
+ parents.append(nullid)
+ p2 = parents.pop(0)
+
+ text = commit.desc
+ extra = commit.extra.copy()
+ if self.branchnames and commit.branch:
+ extra['branch'] = commit.branch
+ if commit.rev:
+ extra['convert_revision'] = commit.rev
+
+ while parents:
+ p1 = p2
+ p2 = parents.pop(0)
+ ctx = context.memctx(self.repo, (p1, p2), text, files.keys(),
+ getfilectx, commit.author, commit.date, extra)
+ self.repo.commitctx(ctx)
+ text = "(octopus merge fixup)\n"
+ p2 = hex(self.repo.changelog.tip())
+
+ if self.filemapmode and nparents == 1:
+ man = self.repo.manifest
+ mnode = self.repo.changelog.read(bin(p2))[0]
+ closed = 'close' in commit.extra
+ if not closed and not man.cmp(m1node, man.revision(mnode)):
+ self.ui.status(_("filtering out empty revision\n"))
+ self.repo.rollback()
+ return parent
+ return p2
+
+ def puttags(self, tags):
+ try:
+ parentctx = self.repo[self.tagsbranch]
+ tagparent = parentctx.node()
+ except error.RepoError:
+ parentctx = None
+ tagparent = nullid
+
+ try:
+ oldlines = sorted(parentctx['.hgtags'].data().splitlines(True))
+ except:
+ oldlines = []
+
+ newlines = sorted([("%s %s\n" % (tags[tag], tag)) for tag in tags])
+ if newlines == oldlines:
+ return None, None
+ data = "".join(newlines)
+ def getfilectx(repo, memctx, f):
+ return context.memfilectx(f, data, False, False, None)
+
+ self.ui.status(_("updating tags\n"))
+ date = "%s 0" % int(time.mktime(time.gmtime()))
+ extra = {'branch': self.tagsbranch}
+ ctx = context.memctx(self.repo, (tagparent, None), "update tags",
+ [".hgtags"], getfilectx, "convert-repo", date,
+ extra)
+ self.repo.commitctx(ctx)
+ return hex(self.repo.changelog.tip()), hex(tagparent)
+
+ def setfilemapmode(self, active):
+ self.filemapmode = active
+
+class mercurial_source(converter_source):
+ def __init__(self, ui, path, rev=None):
+ converter_source.__init__(self, ui, path, rev)
+ self.ignoreerrors = ui.configbool('convert', 'hg.ignoreerrors', False)
+ self.ignored = set()
+ self.saverev = ui.configbool('convert', 'hg.saverev', False)
+ try:
+ self.repo = hg.repository(self.ui, path)
+ # try to provoke an exception if this isn't really a hg
+ # repo, but some other bogus compatible-looking url
+ if not self.repo.local():
+ raise error.RepoError()
+ except error.RepoError:
+ ui.traceback()
+ raise NoRepo(_("%s is not a local Mercurial repository") % path)
+ self.lastrev = None
+ self.lastctx = None
+ self._changescache = None
+ self.convertfp = None
+ # Restrict converted revisions to startrev descendants
+ startnode = ui.config('convert', 'hg.startrev')
+ if startnode is not None:
+ try:
+ startnode = self.repo.lookup(startnode)
+ except error.RepoError:
+ raise util.Abort(_('%s is not a valid start revision')
+ % startnode)
+ startrev = self.repo.changelog.rev(startnode)
+ children = {startnode: 1}
+ for rev in self.repo.changelog.descendants(startrev):
+ children[self.repo.changelog.node(rev)] = 1
+ self.keep = children.__contains__
+ else:
+ self.keep = util.always
+
+ def changectx(self, rev):
+ if self.lastrev != rev:
+ self.lastctx = self.repo[rev]
+ self.lastrev = rev
+ return self.lastctx
+
+ def parents(self, ctx):
+ return [p for p in ctx.parents() if p and self.keep(p.node())]
+
+ def getheads(self):
+ if self.rev:
+ heads = [self.repo[self.rev].node()]
+ else:
+ heads = self.repo.heads()
+ return [hex(h) for h in heads if self.keep(h)]
+
+ def getfile(self, name, rev):
+ try:
+ fctx = self.changectx(rev)[name]
+ return fctx.data(), fctx.flags()
+ except error.LookupError, err:
+ raise IOError(err)
+
+ def getchanges(self, rev):
+ ctx = self.changectx(rev)
+ parents = self.parents(ctx)
+ if not parents:
+ files = sorted(ctx.manifest())
+ if self.ignoreerrors:
+ # calling getcopies() is a simple way to detect missing
+ # revlogs and populate self.ignored
+ self.getcopies(ctx, parents, files)
+ return [(f, rev) for f in files if f not in self.ignored], {}
+ if self._changescache and self._changescache[0] == rev:
+ m, a, r = self._changescache[1]
+ else:
+ m, a, r = self.repo.status(parents[0].node(), ctx.node())[:3]
+ # getcopies() detects missing revlogs early, run it before
+ # filtering the changes.
+ copies = self.getcopies(ctx, parents, m + a)
+ changes = [(name, rev) for name in m + a + r
+ if name not in self.ignored]
+ return sorted(changes), copies
+
+ def getcopies(self, ctx, parents, files):
+ copies = {}
+ for name in files:
+ if name in self.ignored:
+ continue
+ try:
+ copysource, copynode = ctx.filectx(name).renamed()
+ if copysource in self.ignored or not self.keep(copynode):
+ continue
+ # Ignore copy sources not in parent revisions
+ found = False
+ for p in parents:
+ if copysource in p:
+ found = True
+ break
+ if not found:
+ continue
+ copies[name] = copysource
+ except TypeError:
+ pass
+ except error.LookupError, e:
+ if not self.ignoreerrors:
+ raise
+ self.ignored.add(name)
+ self.ui.warn(_('ignoring: %s\n') % e)
+ return copies
+
+ def getcommit(self, rev):
+ ctx = self.changectx(rev)
+ parents = [p.hex() for p in self.parents(ctx)]
+ if self.saverev:
+ crev = rev
+ else:
+ crev = None
+ return commit(author=ctx.user(), date=util.datestr(ctx.date()),
+ desc=ctx.description(), rev=crev, parents=parents,
+ branch=ctx.branch(), extra=ctx.extra(),
+ sortkey=ctx.rev())
+
+ def gettags(self):
+ tags = [t for t in self.repo.tagslist() if t[0] != 'tip']
+ return dict([(name, hex(node)) for name, node in tags
+ if self.keep(node)])
+
+ def getchangedfiles(self, rev, i):
+ ctx = self.changectx(rev)
+ parents = self.parents(ctx)
+ if not parents and i is None:
+ i = 0
+ changes = [], ctx.manifest().keys(), []
+ else:
+ i = i or 0
+ changes = self.repo.status(parents[i].node(), ctx.node())[:3]
+ changes = [[f for f in l if f not in self.ignored] for l in changes]
+
+ if i == 0:
+ self._changescache = (rev, changes)
+
+ return changes[0] + changes[1] + changes[2]
+
+ def converted(self, rev, destrev):
+ if self.convertfp is None:
+ self.convertfp = open(os.path.join(self.path, '.hg', 'shamap'),
+ 'a')
+ self.convertfp.write('%s %s\n' % (destrev, rev))
+ self.convertfp.flush()
+
+ def before(self):
+ self.ui.debug('run hg source pre-conversion action\n')
+
+ def after(self):
+ self.ui.debug('run hg source post-conversion action\n')
+
+ def hasnativeorder(self):
+ return True
+
+ def lookuprev(self, rev):
+ try:
+ return hex(self.repo.lookup(rev))
+ except error.RepoError:
+ return None
diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/hg.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/hg.pyo
new file mode 100644
index 0000000..633af5e
--- /dev/null
+++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/hg.pyo
Binary files differ
diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/monotone.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/monotone.py
new file mode 100644
index 0000000..151ddc5
--- /dev/null
+++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/monotone.py
@@ -0,0 +1,227 @@
+# monotone.py - monotone support for the convert extension
+#
+# Copyright 2008, 2009 Mikkel Fahnoe Jorgensen <mikkel@dvide.com> and
+# others
+#
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2 or any later version.
+
+import os, re
+from mercurial import util
+from common import NoRepo, commit, converter_source, checktool
+from common import commandline
+from mercurial.i18n import _
+
+class monotone_source(converter_source, commandline):
+ def __init__(self, ui, path=None, rev=None):
+ converter_source.__init__(self, ui, path, rev)
+ commandline.__init__(self, ui, 'mtn')
+
+ self.ui = ui
+ self.path = path
+
+ norepo = NoRepo(_("%s does not look like a monotone repository")
+ % path)
+ if not os.path.exists(os.path.join(path, '_MTN')):
+ # Could be a monotone repository (SQLite db file)
+ try:
+ header = file(path, 'rb').read(16)
+ except:
+ header = ''
+ if header != 'SQLite format 3\x00':
+ raise norepo
+
+ # regular expressions for parsing monotone output
+ space = r'\s*'
+ name = r'\s+"((?:\\"|[^"])*)"\s*'
+ value = name
+ revision = r'\s+\[(\w+)\]\s*'
+ lines = r'(?:.|\n)+'
+
+ self.dir_re = re.compile(space + "dir" + name)
+ self.file_re = re.compile(space + "file" + name +
+ "content" + revision)
+ self.add_file_re = re.compile(space + "add_file" + name +
+ "content" + revision)
+ self.patch_re = re.compile(space + "patch" + name +
+ "from" + revision + "to" + revision)
+ self.rename_re = re.compile(space + "rename" + name + "to" + name)
+ self.delete_re = re.compile(space + "delete" + name)
+ self.tag_re = re.compile(space + "tag" + name + "revision" +
+ revision)
+ self.cert_re = re.compile(lines + space + "name" + name +
+ "value" + value)
+
+ attr = space + "file" + lines + space + "attr" + space
+ self.attr_execute_re = re.compile(attr + '"mtn:execute"' +
+ space + '"true"')
+
+ # cached data
+ self.manifest_rev = None
+ self.manifest = None
+ self.files = None
+ self.dirs = None
+
+ checktool('mtn', abort=False)
+
+ # test if there are any revisions
+ self.rev = None
+ try:
+ self.getheads()
+ except:
+ raise norepo
+ self.rev = rev
+
+ def mtnrun(self, *args, **kwargs):
+ kwargs['d'] = self.path
+ return self.run0('automate', *args, **kwargs)
+
+ def mtnloadmanifest(self, rev):
+ if self.manifest_rev == rev:
+ return
+ self.manifest = self.mtnrun("get_manifest_of", rev).split("\n\n")
+ self.manifest_rev = rev
+ self.files = {}
+ self.dirs = {}
+
+ for e in self.manifest:
+ m = self.file_re.match(e)
+ if m:
+ attr = ""
+ name = m.group(1)
+ node = m.group(2)
+ if self.attr_execute_re.match(e):
+ attr += "x"
+ self.files[name] = (node, attr)
+ m = self.dir_re.match(e)
+ if m:
+ self.dirs[m.group(1)] = True
+
+ def mtnisfile(self, name, rev):
+ # a non-file could be a directory or a deleted or renamed file
+ self.mtnloadmanifest(rev)
+ return name in self.files
+
+ def mtnisdir(self, name, rev):
+ self.mtnloadmanifest(rev)
+ return name in self.dirs
+
+ def mtngetcerts(self, rev):
+ certs = {"author":"<missing>", "date":"<missing>",
+ "changelog":"<missing>", "branch":"<missing>"}
+ certlist = self.mtnrun("certs", rev)
+ # mtn < 0.45:
+ # key "test@selenic.com"
+ # mtn >= 0.45:
+ # key [ff58a7ffb771907c4ff68995eada1c4da068d328]
+ certlist = re.split('\n\n key ["\[]', certlist)
+ for e in certlist:
+ m = self.cert_re.match(e)
+ if m:
+ name, value = m.groups()
+ value = value.replace(r'\"', '"')
+ value = value.replace(r'\\', '\\')
+ certs[name] = value
+ # Monotone may have subsecond dates: 2005-02-05T09:39:12.364306
+ # and all times are stored in UTC
+ certs["date"] = certs["date"].split('.')[0] + " UTC"
+ return certs
+
+ # implement the converter_source interface:
+
+ def getheads(self):
+ if not self.rev:
+ return self.mtnrun("leaves").splitlines()
+ else:
+ return [self.rev]
+
+ def getchanges(self, rev):
+ #revision = self.mtncmd("get_revision %s" % rev).split("\n\n")
+ revision = self.mtnrun("get_revision", rev).split("\n\n")
+ files = {}
+ ignoremove = {}
+ renameddirs = []
+ copies = {}
+ for e in revision:
+ m = self.add_file_re.match(e)
+ if m:
+ files[m.group(1)] = rev
+ ignoremove[m.group(1)] = rev
+ m = self.patch_re.match(e)
+ if m:
+ files[m.group(1)] = rev
+ # Delete/rename is handled later when the convert engine
+ # discovers an IOError exception from getfile,
+ # but only if we add the "from" file to the list of changes.
+ m = self.delete_re.match(e)
+ if m:
+ files[m.group(1)] = rev
+ m = self.rename_re.match(e)
+ if m:
+ toname = m.group(2)
+ fromname = m.group(1)
+ if self.mtnisfile(toname, rev):
+ ignoremove[toname] = 1
+ copies[toname] = fromname
+ files[toname] = rev
+ files[fromname] = rev
+ elif self.mtnisdir(toname, rev):
+ renameddirs.append((fromname, toname))
+
+ # Directory renames can be handled only once we have recorded
+ # all new files
+ for fromdir, todir in renameddirs:
+ renamed = {}
+ for tofile in self.files:
+ if tofile in ignoremove:
+ continue
+ if tofile.startswith(todir + '/'):
+ renamed[tofile] = fromdir + tofile[len(todir):]
+ # Avoid chained moves like:
+ # d1(/a) => d3/d1(/a)
+ # d2 => d3
+ ignoremove[tofile] = 1
+ for tofile, fromfile in renamed.items():
+ self.ui.debug (_("copying file in renamed directory "
+ "from '%s' to '%s'")
+ % (fromfile, tofile), '\n')
+ files[tofile] = rev
+ copies[tofile] = fromfile
+ for fromfile in renamed.values():
+ files[fromfile] = rev
+
+ return (files.items(), copies)
+
+ def getfile(self, name, rev):
+ if not self.mtnisfile(name, rev):
+ raise IOError() # file was deleted or renamed
+ try:
+ data = self.mtnrun("get_file_of", name, r=rev)
+ except:
+ raise IOError() # file was deleted or renamed
+ self.mtnloadmanifest(rev)
+ node, attr = self.files.get(name, (None, ""))
+ return data, attr
+
+ def getcommit(self, rev):
+ certs = self.mtngetcerts(rev)
+ return commit(
+ author=certs["author"],
+ date=util.datestr(util.strdate(certs["date"], "%Y-%m-%dT%H:%M:%S")),
+ desc=certs["changelog"],
+ rev=rev,
+ parents=self.mtnrun("parents", rev).splitlines(),
+ branch=certs["branch"])
+
+ def gettags(self):
+ tags = {}
+ for e in self.mtnrun("tags").split("\n\n"):
+ m = self.tag_re.match(e)
+ if m:
+ tags[m.group(1)] = m.group(2)
+ return tags
+
+ def getchangedfiles(self, rev, i):
+ # This function is only needed to support --filemap
+ # ... and we don't support that
+ raise NotImplementedError()
diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/monotone.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/monotone.pyo
new file mode 100644
index 0000000..b51d266
--- /dev/null
+++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/monotone.pyo
Binary files differ
diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/p4.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/p4.py
new file mode 100644
index 0000000..5d640ad
--- /dev/null
+++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/p4.py
@@ -0,0 +1,202 @@
+# Perforce source for convert extension.
+#
+# Copyright 2009, Frank Kingswood <frank@kingswood-consulting.co.uk>
+#
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2 or any later version.
+
+from mercurial import util
+from mercurial.i18n import _
+
+from common import commit, converter_source, checktool, NoRepo
+import marshal
+import re
+
+def loaditer(f):
+ "Yield the dictionary objects generated by p4"
+ try:
+ while True:
+ d = marshal.load(f)
+ if not d:
+ break
+ yield d
+ except EOFError:
+ pass
+
+class p4_source(converter_source):
+ def __init__(self, ui, path, rev=None):
+ super(p4_source, self).__init__(ui, path, rev=rev)
+
+ if "/" in path and not path.startswith('//'):
+ raise NoRepo(_('%s does not look like a P4 repository') % path)
+
+ checktool('p4', abort=False)
+
+ self.p4changes = {}
+ self.heads = {}
+ self.changeset = {}
+ self.files = {}
+ self.tags = {}
+ self.lastbranch = {}
+ self.parent = {}
+ self.encoding = "latin_1"
+ self.depotname = {} # mapping from local name to depot name
+ self.re_type = re.compile(
+ "([a-z]+)?(text|binary|symlink|apple|resource|unicode|utf\d+)"
+ "(\+\w+)?$")
+ self.re_keywords = re.compile(
+ r"\$(Id|Header|Date|DateTime|Change|File|Revision|Author)"
+ r":[^$\n]*\$")
+ self.re_keywords_old = re.compile("\$(Id|Header):[^$\n]*\$")
+
+ self._parse(ui, path)
+
+ def _parse_view(self, path):
+ "Read changes affecting the path"
+ cmd = 'p4 -G changes -s submitted %s' % util.shellquote(path)
+ stdout = util.popen(cmd, mode='rb')
+ for d in loaditer(stdout):
+ c = d.get("change", None)
+ if c:
+ self.p4changes[c] = True
+
+ def _parse(self, ui, path):
+ "Prepare list of P4 filenames and revisions to import"
+ ui.status(_('reading p4 views\n'))
+
+ # read client spec or view
+ if "/" in path:
+ self._parse_view(path)
+ if path.startswith("//") and path.endswith("/..."):
+ views = {path[:-3]:""}
+ else:
+ views = {"//": ""}
+ else:
+ cmd = 'p4 -G client -o %s' % util.shellquote(path)
+ clientspec = marshal.load(util.popen(cmd, mode='rb'))
+
+ views = {}
+ for client in clientspec:
+ if client.startswith("View"):
+ sview, cview = clientspec[client].split()
+ self._parse_view(sview)
+ if sview.endswith("...") and cview.endswith("..."):
+ sview = sview[:-3]
+ cview = cview[:-3]
+ cview = cview[2:]
+ cview = cview[cview.find("/") + 1:]
+ views[sview] = cview
+
+ # list of changes that affect our source files
+ self.p4changes = self.p4changes.keys()
+ self.p4changes.sort(key=int)
+
+ # list with depot pathnames, longest first
+ vieworder = views.keys()
+ vieworder.sort(key=len, reverse=True)
+
+ # handle revision limiting
+ startrev = self.ui.config('convert', 'p4.startrev', default=0)
+ self.p4changes = [x for x in self.p4changes
+ if ((not startrev or int(x) >= int(startrev)) and
+ (not self.rev or int(x) <= int(self.rev)))]
+
+ # now read the full changelists to get the list of file revisions
+ ui.status(_('collecting p4 changelists\n'))
+ lastid = None
+ for change in self.p4changes:
+ cmd = "p4 -G describe -s %s" % change
+ stdout = util.popen(cmd, mode='rb')
+ d = marshal.load(stdout)
+ desc = self.recode(d["desc"])
+ shortdesc = desc.split("\n", 1)[0]
+ t = '%s %s' % (d["change"], repr(shortdesc)[1:-1])
+ ui.status(util.ellipsis(t, 80) + '\n')
+
+ if lastid:
+ parents = [lastid]
+ else:
+ parents = []
+
+ date = (int(d["time"]), 0) # timezone not set
+ c = commit(author=self.recode(d["user"]), date=util.datestr(date),
+ parents=parents, desc=desc, branch='',
+ extra={"p4": change})
+
+ files = []
+ i = 0
+ while ("depotFile%d" % i) in d and ("rev%d" % i) in d:
+ oldname = d["depotFile%d" % i]
+ filename = None
+ for v in vieworder:
+ if oldname.startswith(v):
+ filename = views[v] + oldname[len(v):]
+ break
+ if filename:
+ files.append((filename, d["rev%d" % i]))
+ self.depotname[filename] = oldname
+ i += 1
+ self.changeset[change] = c
+ self.files[change] = files
+ lastid = change
+
+ if lastid:
+ self.heads = [lastid]
+
+ def getheads(self):
+ return self.heads
+
+ def getfile(self, name, rev):
+ cmd = 'p4 -G print %s' \
+ % util.shellquote("%s#%s" % (self.depotname[name], rev))
+ stdout = util.popen(cmd, mode='rb')
+
+ mode = None
+ contents = ""
+ keywords = None
+
+ for d in loaditer(stdout):
+ code = d["code"]
+ data = d.get("data")
+
+ if code == "error":
+ raise IOError(d["generic"], data)
+
+ elif code == "stat":
+ p4type = self.re_type.match(d["type"])
+ if p4type:
+ mode = ""
+ flags = (p4type.group(1) or "") + (p4type.group(3) or "")
+ if "x" in flags:
+ mode = "x"
+ if p4type.group(2) == "symlink":
+ mode = "l"
+ if "ko" in flags:
+ keywords = self.re_keywords_old
+ elif "k" in flags:
+ keywords = self.re_keywords
+
+ elif code == "text" or code == "binary":
+ contents += data
+
+ if mode is None:
+ raise IOError(0, "bad stat")
+
+ if keywords:
+ contents = keywords.sub("$\\1$", contents)
+ if mode == "l" and contents.endswith("\n"):
+ contents = contents[:-1]
+
+ return contents, mode
+
+ def getchanges(self, rev):
+ return self.files[rev], {}
+
+ def getcommit(self, rev):
+ return self.changeset[rev]
+
+ def gettags(self):
+ return self.tags
+
+ def getchangedfiles(self, rev, i):
+ return sorted([x[0] for x in self.files[rev]])
diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/p4.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/p4.pyo
new file mode 100644
index 0000000..45f1e2a
--- /dev/null
+++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/p4.pyo
Binary files differ
diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/subversion.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/subversion.py
new file mode 100644
index 0000000..f2d26ad
--- /dev/null
+++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/subversion.py
@@ -0,0 +1,1168 @@
+# Subversion 1.4/1.5 Python API backend
+#
+# Copyright(C) 2007 Daniel Holth et al
+
+import os
+import re
+import sys
+import cPickle as pickle
+import tempfile
+import urllib
+import urllib2
+
+from mercurial import strutil, util, encoding
+from mercurial.i18n import _
+
+# Subversion stuff. Works best with very recent Python SVN bindings
+# e.g. SVN 1.5 or backports. Thanks to the bzr folks for enhancing
+# these bindings.
+
+from cStringIO import StringIO
+
+from common import NoRepo, MissingTool, commit, encodeargs, decodeargs
+from common import commandline, converter_source, converter_sink, mapfile
+
+try:
+ from svn.core import SubversionException, Pool
+ import svn
+ import svn.client
+ import svn.core
+ import svn.ra
+ import svn.delta
+ import transport
+ import warnings
+ warnings.filterwarnings('ignore',
+ module='svn.core',
+ category=DeprecationWarning)
+
+except ImportError:
+ pass
+
+class SvnPathNotFound(Exception):
+ pass
+
+def geturl(path):
+ try:
+ return svn.client.url_from_path(svn.core.svn_path_canonicalize(path))
+ except SubversionException:
+ pass
+ if os.path.isdir(path):
+ path = os.path.normpath(os.path.abspath(path))
+ if os.name == 'nt':
+ path = '/' + util.normpath(path)
+ # Module URL is later compared with the repository URL returned
+ # by svn API, which is UTF-8.
+ path = encoding.tolocal(path)
+ return 'file://%s' % urllib.quote(path)
+ return path
+
+def optrev(number):
+ optrev = svn.core.svn_opt_revision_t()
+ optrev.kind = svn.core.svn_opt_revision_number
+ optrev.value.number = number
+ return optrev
+
+class changedpath(object):
+ def __init__(self, p):
+ self.copyfrom_path = p.copyfrom_path
+ self.copyfrom_rev = p.copyfrom_rev
+ self.action = p.action
+
+def get_log_child(fp, url, paths, start, end, limit=0, discover_changed_paths=True,
+ strict_node_history=False):
+ protocol = -1
+ def receiver(orig_paths, revnum, author, date, message, pool):
+ if orig_paths is not None:
+ for k, v in orig_paths.iteritems():
+ orig_paths[k] = changedpath(v)
+ pickle.dump((orig_paths, revnum, author, date, message),
+ fp, protocol)
+
+ try:
+ # Use an ra of our own so that our parent can consume
+ # our results without confusing the server.
+ t = transport.SvnRaTransport(url=url)
+ svn.ra.get_log(t.ra, paths, start, end, limit,
+ discover_changed_paths,
+ strict_node_history,
+ receiver)
+ except SubversionException, (inst, num):
+ pickle.dump(num, fp, protocol)
+ except IOError:
+ # Caller may interrupt the iteration
+ pickle.dump(None, fp, protocol)
+ else:
+ pickle.dump(None, fp, protocol)
+ fp.close()
+ # With large history, cleanup process goes crazy and suddenly
+ # consumes *huge* amount of memory. The output file being closed,
+ # there is no need for clean termination.
+ os._exit(0)
+
+def debugsvnlog(ui, **opts):
+ """Fetch SVN log in a subprocess and channel them back to parent to
+ avoid memory collection issues.
+ """
+ util.set_binary(sys.stdin)
+ util.set_binary(sys.stdout)
+ args = decodeargs(sys.stdin.read())
+ get_log_child(sys.stdout, *args)
+
+class logstream(object):
+ """Interruptible revision log iterator."""
+ def __init__(self, stdout):
+ self._stdout = stdout
+
+ def __iter__(self):
+ while True:
+ try:
+ entry = pickle.load(self._stdout)
+ except EOFError:
+ raise util.Abort(_('Mercurial failed to run itself, check'
+ ' hg executable is in PATH'))
+ try:
+ orig_paths, revnum, author, date, message = entry
+ except:
+ if entry is None:
+ break
+ raise SubversionException("child raised exception", entry)
+ yield entry
+
+ def close(self):
+ if self._stdout:
+ self._stdout.close()
+ self._stdout = None
+
+
+# Check to see if the given path is a local Subversion repo. Verify this by
+# looking for several svn-specific files and directories in the given
+# directory.
+def filecheck(ui, path, proto):
+ for x in ('locks', 'hooks', 'format', 'db'):
+ if not os.path.exists(os.path.join(path, x)):
+ return False
+ return True
+
+# Check to see if a given path is the root of an svn repo over http. We verify
+# this by requesting a version-controlled URL we know can't exist and looking
+# for the svn-specific "not found" XML.
+def httpcheck(ui, path, proto):
+ try:
+ opener = urllib2.build_opener()
+ rsp = opener.open('%s://%s/!svn/ver/0/.svn' % (proto, path))
+ data = rsp.read()
+ except urllib2.HTTPError, inst:
+ if inst.code != 404:
+ # Except for 404 we cannot know for sure this is not an svn repo
+ ui.warn(_('svn: cannot probe remote repository, assume it could '
+ 'be a subversion repository. Use --source-type if you '
+ 'know better.\n'))
+ return True
+ data = inst.fp.read()
+ except:
+ # Could be urllib2.URLError if the URL is invalid or anything else.
+ return False
+ return '<m:human-readable errcode="160013">' in data
+
+protomap = {'http': httpcheck,
+ 'https': httpcheck,
+ 'file': filecheck,
+ }
+def issvnurl(ui, url):
+ try:
+ proto, path = url.split('://', 1)
+ if proto == 'file':
+ path = urllib.url2pathname(path)
+ except ValueError:
+ proto = 'file'
+ path = os.path.abspath(url)
+ if proto == 'file':
+ path = path.replace(os.sep, '/')
+ check = protomap.get(proto, lambda *args: False)
+ while '/' in path:
+ if check(ui, path, proto):
+ return True
+ path = path.rsplit('/', 1)[0]
+ return False
+
+# SVN conversion code stolen from bzr-svn and tailor
+#
+# Subversion looks like a versioned filesystem, branches structures
+# are defined by conventions and not enforced by the tool. First,
+# we define the potential branches (modules) as "trunk" and "branches"
+# children directories. Revisions are then identified by their
+# module and revision number (and a repository identifier).
+#
+# The revision graph is really a tree (or a forest). By default, a
+# revision parent is the previous revision in the same module. If the
+# module directory is copied/moved from another module then the
+# revision is the module root and its parent the source revision in
+# the parent module. A revision has at most one parent.
+#
+class svn_source(converter_source):
+ def __init__(self, ui, url, rev=None):
+ super(svn_source, self).__init__(ui, url, rev=rev)
+
+ if not (url.startswith('svn://') or url.startswith('svn+ssh://') or
+ (os.path.exists(url) and
+ os.path.exists(os.path.join(url, '.svn'))) or
+ issvnurl(ui, url)):
+ raise NoRepo(_("%s does not look like a Subversion repository")
+ % url)
+
+ try:
+ SubversionException
+ except NameError:
+ raise MissingTool(_('Subversion python bindings could not be loaded'))
+
+ try:
+ version = svn.core.SVN_VER_MAJOR, svn.core.SVN_VER_MINOR
+ if version < (1, 4):
+ raise MissingTool(_('Subversion python bindings %d.%d found, '
+ '1.4 or later required') % version)
+ except AttributeError:
+ raise MissingTool(_('Subversion python bindings are too old, 1.4 '
+ 'or later required'))
+
+ self.lastrevs = {}
+
+ latest = None
+ try:
+ # Support file://path@rev syntax. Useful e.g. to convert
+ # deleted branches.
+ at = url.rfind('@')
+ if at >= 0:
+ latest = int(url[at + 1:])
+ url = url[:at]
+ except ValueError:
+ pass
+ self.url = geturl(url)
+ self.encoding = 'UTF-8' # Subversion is always nominal UTF-8
+ try:
+ self.transport = transport.SvnRaTransport(url=self.url)
+ self.ra = self.transport.ra
+ self.ctx = self.transport.client
+ self.baseurl = svn.ra.get_repos_root(self.ra)
+ # Module is either empty or a repository path starting with
+ # a slash and not ending with a slash.
+ self.module = urllib.unquote(self.url[len(self.baseurl):])
+ self.prevmodule = None
+ self.rootmodule = self.module
+ self.commits = {}
+ self.paths = {}
+ self.uuid = svn.ra.get_uuid(self.ra)
+ except SubversionException:
+ ui.traceback()
+ raise NoRepo(_("%s does not look like a Subversion repository")
+ % self.url)
+
+ if rev:
+ try:
+ latest = int(rev)
+ except ValueError:
+ raise util.Abort(_('svn: revision %s is not an integer') % rev)
+
+ self.startrev = self.ui.config('convert', 'svn.startrev', default=0)
+ try:
+ self.startrev = int(self.startrev)
+ if self.startrev < 0:
+ self.startrev = 0
+ except ValueError:
+ raise util.Abort(_('svn: start revision %s is not an integer')
+ % self.startrev)
+
+ self.head = self.latest(self.module, latest)
+ if not self.head:
+ raise util.Abort(_('no revision found in module %s')
+ % self.module)
+ self.last_changed = self.revnum(self.head)
+
+ self._changescache = None
+
+ if os.path.exists(os.path.join(url, '.svn/entries')):
+ self.wc = url
+ else:
+ self.wc = None
+ self.convertfp = None
+
+ def setrevmap(self, revmap):
+ lastrevs = {}
+ for revid in revmap.iterkeys():
+ uuid, module, revnum = self.revsplit(revid)
+ lastrevnum = lastrevs.setdefault(module, revnum)
+ if revnum > lastrevnum:
+ lastrevs[module] = revnum
+ self.lastrevs = lastrevs
+
+ def exists(self, path, optrev):
+ try:
+ svn.client.ls(self.url.rstrip('/') + '/' + urllib.quote(path),
+ optrev, False, self.ctx)
+ return True
+ except SubversionException:
+ return False
+
+ def getheads(self):
+
+ def isdir(path, revnum):
+ kind = self._checkpath(path, revnum)
+ return kind == svn.core.svn_node_dir
+
+ def getcfgpath(name, rev):
+ cfgpath = self.ui.config('convert', 'svn.' + name)
+ if cfgpath is not None and cfgpath.strip() == '':
+ return None
+ path = (cfgpath or name).strip('/')
+ if not self.exists(path, rev):
+ if cfgpath:
+ raise util.Abort(_('expected %s to be at %r, but not found')
+ % (name, path))
+ return None
+ self.ui.note(_('found %s at %r\n') % (name, path))
+ return path
+
+ rev = optrev(self.last_changed)
+ oldmodule = ''
+ trunk = getcfgpath('trunk', rev)
+ self.tags = getcfgpath('tags', rev)
+ branches = getcfgpath('branches', rev)
+
+ # If the project has a trunk or branches, we will extract heads
+ # from them. We keep the project root otherwise.
+ if trunk:
+ oldmodule = self.module or ''
+ self.module += '/' + trunk
+ self.head = self.latest(self.module, self.last_changed)
+ if not self.head:
+ raise util.Abort(_('no revision found in module %s')
+ % self.module)
+
+ # First head in the list is the module's head
+ self.heads = [self.head]
+ if self.tags is not None:
+ self.tags = '%s/%s' % (oldmodule , (self.tags or 'tags'))
+
+ # Check if branches bring a few more heads to the list
+ if branches:
+ rpath = self.url.strip('/')
+ branchnames = svn.client.ls(rpath + '/' + urllib.quote(branches),
+ rev, False, self.ctx)
+ for branch in branchnames.keys():
+ module = '%s/%s/%s' % (oldmodule, branches, branch)
+ if not isdir(module, self.last_changed):
+ continue
+ brevid = self.latest(module, self.last_changed)
+ if not brevid:
+ self.ui.note(_('ignoring empty branch %s\n') % branch)
+ continue
+ self.ui.note(_('found branch %s at %d\n') %
+ (branch, self.revnum(brevid)))
+ self.heads.append(brevid)
+
+ if self.startrev and self.heads:
+ if len(self.heads) > 1:
+ raise util.Abort(_('svn: start revision is not supported '
+ 'with more than one branch'))
+ revnum = self.revnum(self.heads[0])
+ if revnum < self.startrev:
+ raise util.Abort(
+ _('svn: no revision found after start revision %d')
+ % self.startrev)
+
+ return self.heads
+
+ def getchanges(self, rev):
+ if self._changescache and self._changescache[0] == rev:
+ return self._changescache[1]
+ self._changescache = None
+ (paths, parents) = self.paths[rev]
+ if parents:
+ files, self.removed, copies = self.expandpaths(rev, paths, parents)
+ else:
+ # Perform a full checkout on roots
+ uuid, module, revnum = self.revsplit(rev)
+ entries = svn.client.ls(self.baseurl + urllib.quote(module),
+ optrev(revnum), True, self.ctx)
+ files = [n for n, e in entries.iteritems()
+ if e.kind == svn.core.svn_node_file]
+ copies = {}
+ self.removed = set()
+
+ files.sort()
+ files = zip(files, [rev] * len(files))
+
+ # caller caches the result, so free it here to release memory
+ del self.paths[rev]
+ return (files, copies)
+
+ def getchangedfiles(self, rev, i):
+ changes = self.getchanges(rev)
+ self._changescache = (rev, changes)
+ return [f[0] for f in changes[0]]
+
+ def getcommit(self, rev):
+ if rev not in self.commits:
+ uuid, module, revnum = self.revsplit(rev)
+ self.module = module
+ self.reparent(module)
+ # We assume that:
+ # - requests for revisions after "stop" come from the
+ # revision graph backward traversal. Cache all of them
+ # down to stop, they will be used eventually.
+ # - requests for revisions before "stop" come to get
+ # isolated branches parents. Just fetch what is needed.
+ stop = self.lastrevs.get(module, 0)
+ if revnum < stop:
+ stop = revnum + 1
+ self._fetch_revisions(revnum, stop)
+ commit = self.commits[rev]
+ # caller caches the result, so free it here to release memory
+ del self.commits[rev]
+ return commit
+
+ def gettags(self):
+ tags = {}
+ if self.tags is None:
+ return tags
+
+ # svn tags are just a convention, project branches left in a
+ # 'tags' directory. There is no other relationship than
+ # ancestry, which is expensive to discover and makes them hard
+ # to update incrementally. Worse, past revisions may be
+ # referenced by tags far away in the future, requiring a deep
+ # history traversal on every calculation. Current code
+ # performs a single backward traversal, tracking moves within
+ # the tags directory (tag renaming) and recording a new tag
+ # everytime a project is copied from outside the tags
+ # directory. It also lists deleted tags, this behaviour may
+ # change in the future.
+ pendings = []
+ tagspath = self.tags
+ start = svn.ra.get_latest_revnum(self.ra)
+ stream = self._getlog([self.tags], start, self.startrev)
+ try:
+ for entry in stream:
+ origpaths, revnum, author, date, message = entry
+ copies = [(e.copyfrom_path, e.copyfrom_rev, p) for p, e
+ in origpaths.iteritems() if e.copyfrom_path]
+ # Apply moves/copies from more specific to general
+ copies.sort(reverse=True)
+
+ srctagspath = tagspath
+ if copies and copies[-1][2] == tagspath:
+ # Track tags directory moves
+ srctagspath = copies.pop()[0]
+
+ for source, sourcerev, dest in copies:
+ if not dest.startswith(tagspath + '/'):
+ continue
+ for tag in pendings:
+ if tag[0].startswith(dest):
+ tagpath = source + tag[0][len(dest):]
+ tag[:2] = [tagpath, sourcerev]
+ break
+ else:
+ pendings.append([source, sourcerev, dest])
+
+ # Filter out tags with children coming from different
+ # parts of the repository like:
+ # /tags/tag.1 (from /trunk:10)
+ # /tags/tag.1/foo (from /branches/foo:12)
+ # Here/tags/tag.1 discarded as well as its children.
+ # It happens with tools like cvs2svn. Such tags cannot
+ # be represented in mercurial.
+ addeds = dict((p, e.copyfrom_path) for p, e
+ in origpaths.iteritems()
+ if e.action == 'A' and e.copyfrom_path)
+ badroots = set()
+ for destroot in addeds:
+ for source, sourcerev, dest in pendings:
+ if (not dest.startswith(destroot + '/')
+ or source.startswith(addeds[destroot] + '/')):
+ continue
+ badroots.add(destroot)
+ break
+
+ for badroot in badroots:
+ pendings = [p for p in pendings if p[2] != badroot
+ and not p[2].startswith(badroot + '/')]
+
+ # Tell tag renamings from tag creations
+ remainings = []
+ for source, sourcerev, dest in pendings:
+ tagname = dest.split('/')[-1]
+ if source.startswith(srctagspath):
+ remainings.append([source, sourcerev, tagname])
+ continue
+ if tagname in tags:
+ # Keep the latest tag value
+ continue
+ # From revision may be fake, get one with changes
+ try:
+ tagid = self.latest(source, sourcerev)
+ if tagid and tagname not in tags:
+ tags[tagname] = tagid
+ except SvnPathNotFound:
+ # It happens when we are following directories
+ # we assumed were copied with their parents
+ # but were really created in the tag
+ # directory.
+ pass
+ pendings = remainings
+ tagspath = srctagspath
+ finally:
+ stream.close()
+ return tags
+
+ def converted(self, rev, destrev):
+ if not self.wc:
+ return
+ if self.convertfp is None:
+ self.convertfp = open(os.path.join(self.wc, '.svn', 'hg-shamap'),
+ 'a')
+ self.convertfp.write('%s %d\n' % (destrev, self.revnum(rev)))
+ self.convertfp.flush()
+
+ def revid(self, revnum, module=None):
+ return 'svn:%s%s@%s' % (self.uuid, module or self.module, revnum)
+
+ def revnum(self, rev):
+ return int(rev.split('@')[-1])
+
+ def revsplit(self, rev):
+ url, revnum = rev.rsplit('@', 1)
+ revnum = int(revnum)
+ parts = url.split('/', 1)
+ uuid = parts.pop(0)[4:]
+ mod = ''
+ if parts:
+ mod = '/' + parts[0]
+ return uuid, mod, revnum
+
+ def latest(self, path, stop=0):
+ """Find the latest revid affecting path, up to stop. It may return
+ a revision in a different module, since a branch may be moved without
+ a change being reported. Return None if computed module does not
+ belong to rootmodule subtree.
+ """
+ if not path.startswith(self.rootmodule):
+ # Requests on foreign branches may be forbidden at server level
+ self.ui.debug('ignoring foreign branch %r\n' % path)
+ return None
+
+ if not stop:
+ stop = svn.ra.get_latest_revnum(self.ra)
+ try:
+ prevmodule = self.reparent('')
+ dirent = svn.ra.stat(self.ra, path.strip('/'), stop)
+ self.reparent(prevmodule)
+ except SubversionException:
+ dirent = None
+ if not dirent:
+ raise SvnPathNotFound(_('%s not found up to revision %d')
+ % (path, stop))
+
+ # stat() gives us the previous revision on this line of
+ # development, but it might be in *another module*. Fetch the
+ # log and detect renames down to the latest revision.
+ stream = self._getlog([path], stop, dirent.created_rev)
+ try:
+ for entry in stream:
+ paths, revnum, author, date, message = entry
+ if revnum <= dirent.created_rev:
+ break
+
+ for p in paths:
+ if not path.startswith(p) or not paths[p].copyfrom_path:
+ continue
+ newpath = paths[p].copyfrom_path + path[len(p):]
+ self.ui.debug("branch renamed from %s to %s at %d\n" %
+ (path, newpath, revnum))
+ path = newpath
+ break
+ finally:
+ stream.close()
+
+ if not path.startswith(self.rootmodule):
+ self.ui.debug('ignoring foreign branch %r\n' % path)
+ return None
+ return self.revid(dirent.created_rev, path)
+
+ def reparent(self, module):
+ """Reparent the svn transport and return the previous parent."""
+ if self.prevmodule == module:
+ return module
+ svnurl = self.baseurl + urllib.quote(module)
+ prevmodule = self.prevmodule
+ if prevmodule is None:
+ prevmodule = ''
+ self.ui.debug("reparent to %s\n" % svnurl)
+ svn.ra.reparent(self.ra, svnurl)
+ self.prevmodule = module
+ return prevmodule
+
+ def expandpaths(self, rev, paths, parents):
+ changed, removed = set(), set()
+ copies = {}
+
+ new_module, revnum = self.revsplit(rev)[1:]
+ if new_module != self.module:
+ self.module = new_module
+ self.reparent(self.module)
+
+ for i, (path, ent) in enumerate(paths):
+ self.ui.progress(_('scanning paths'), i, item=path,
+ total=len(paths))
+ entrypath = self.getrelpath(path)
+
+ kind = self._checkpath(entrypath, revnum)
+ if kind == svn.core.svn_node_file:
+ changed.add(self.recode(entrypath))
+ if not ent.copyfrom_path or not parents:
+ continue
+ # Copy sources not in parent revisions cannot be
+ # represented, ignore their origin for now
+ pmodule, prevnum = self.revsplit(parents[0])[1:]
+ if ent.copyfrom_rev < prevnum:
+ continue
+ copyfrom_path = self.getrelpath(ent.copyfrom_path, pmodule)
+ if not copyfrom_path:
+ continue
+ self.ui.debug("copied to %s from %s@%s\n" %
+ (entrypath, copyfrom_path, ent.copyfrom_rev))
+ copies[self.recode(entrypath)] = self.recode(copyfrom_path)
+ elif kind == 0: # gone, but had better be a deleted *file*
+ self.ui.debug("gone from %s\n" % ent.copyfrom_rev)
+ pmodule, prevnum = self.revsplit(parents[0])[1:]
+ parentpath = pmodule + "/" + entrypath
+ fromkind = self._checkpath(entrypath, prevnum, pmodule)
+
+ if fromkind == svn.core.svn_node_file:
+ removed.add(self.recode(entrypath))
+ elif fromkind == svn.core.svn_node_dir:
+ oroot = parentpath.strip('/')
+ nroot = path.strip('/')
+ children = self._iterfiles(oroot, prevnum)
+ for childpath in children:
+ childpath = childpath.replace(oroot, nroot)
+ childpath = self.getrelpath("/" + childpath, pmodule)
+ if childpath:
+ removed.add(self.recode(childpath))
+ else:
+ self.ui.debug('unknown path in revision %d: %s\n' % \
+ (revnum, path))
+ elif kind == svn.core.svn_node_dir:
+ if ent.action == 'M':
+ # If the directory just had a prop change,
+ # then we shouldn't need to look for its children.
+ continue
+ if ent.action == 'R' and parents:
+ # If a directory is replacing a file, mark the previous
+ # file as deleted
+ pmodule, prevnum = self.revsplit(parents[0])[1:]
+ pkind = self._checkpath(entrypath, prevnum, pmodule)
+ if pkind == svn.core.svn_node_file:
+ removed.add(self.recode(entrypath))
+ elif pkind == svn.core.svn_node_dir:
+ # We do not know what files were kept or removed,
+ # mark them all as changed.
+ for childpath in self._iterfiles(pmodule, prevnum):
+ childpath = self.getrelpath("/" + childpath)
+ if childpath:
+ changed.add(self.recode(childpath))
+
+ for childpath in self._iterfiles(path, revnum):
+ childpath = self.getrelpath("/" + childpath)
+ if childpath:
+ changed.add(self.recode(childpath))
+
+ # Handle directory copies
+ if not ent.copyfrom_path or not parents:
+ continue
+ # Copy sources not in parent revisions cannot be
+ # represented, ignore their origin for now
+ pmodule, prevnum = self.revsplit(parents[0])[1:]
+ if ent.copyfrom_rev < prevnum:
+ continue
+ copyfrompath = self.getrelpath(ent.copyfrom_path, pmodule)
+ if not copyfrompath:
+ continue
+ self.ui.debug("mark %s came from %s:%d\n"
+ % (path, copyfrompath, ent.copyfrom_rev))
+ children = self._iterfiles(ent.copyfrom_path, ent.copyfrom_rev)
+ for childpath in children:
+ childpath = self.getrelpath("/" + childpath, pmodule)
+ if not childpath:
+ continue
+ copytopath = path + childpath[len(copyfrompath):]
+ copytopath = self.getrelpath(copytopath)
+ copies[self.recode(copytopath)] = self.recode(childpath)
+
+ self.ui.progress(_('scanning paths'), None)
+ changed.update(removed)
+ return (list(changed), removed, copies)
+
+ def _fetch_revisions(self, from_revnum, to_revnum):
+ if from_revnum < to_revnum:
+ from_revnum, to_revnum = to_revnum, from_revnum
+
+ self.child_cset = None
+
+ def parselogentry(orig_paths, revnum, author, date, message):
+ """Return the parsed commit object or None, and True if
+ the revision is a branch root.
+ """
+ self.ui.debug("parsing revision %d (%d changes)\n" %
+ (revnum, len(orig_paths)))
+
+ branched = False
+ rev = self.revid(revnum)
+ # branch log might return entries for a parent we already have
+
+ if rev in self.commits or revnum < to_revnum:
+ return None, branched
+
+ parents = []
+ # check whether this revision is the start of a branch or part
+ # of a branch renaming
+ orig_paths = sorted(orig_paths.iteritems())
+ root_paths = [(p, e) for p, e in orig_paths
+ if self.module.startswith(p)]
+ if root_paths:
+ path, ent = root_paths[-1]
+ if ent.copyfrom_path:
+ branched = True
+ newpath = ent.copyfrom_path + self.module[len(path):]
+ # ent.copyfrom_rev may not be the actual last revision
+ previd = self.latest(newpath, ent.copyfrom_rev)
+ if previd is not None:
+ prevmodule, prevnum = self.revsplit(previd)[1:]
+ if prevnum >= self.startrev:
+ parents = [previd]
+ self.ui.note(
+ _('found parent of branch %s at %d: %s\n') %
+ (self.module, prevnum, prevmodule))
+ else:
+ self.ui.debug("no copyfrom path, don't know what to do.\n")
+
+ paths = []
+ # filter out unrelated paths
+ for path, ent in orig_paths:
+ if self.getrelpath(path) is None:
+ continue
+ paths.append((path, ent))
+
+ # Example SVN datetime. Includes microseconds.
+ # ISO-8601 conformant
+ # '2007-01-04T17:35:00.902377Z'
+ date = util.parsedate(date[:19] + " UTC", ["%Y-%m-%dT%H:%M:%S"])
+
+ log = message and self.recode(message) or ''
+ author = author and self.recode(author) or ''
+ try:
+ branch = self.module.split("/")[-1]
+ if branch == 'trunk':
+ branch = ''
+ except IndexError:
+ branch = None
+
+ cset = commit(author=author,
+ date=util.datestr(date),
+ desc=log,
+ parents=parents,
+ branch=branch,
+ rev=rev)
+
+ self.commits[rev] = cset
+ # The parents list is *shared* among self.paths and the
+ # commit object. Both will be updated below.
+ self.paths[rev] = (paths, cset.parents)
+ if self.child_cset and not self.child_cset.parents:
+ self.child_cset.parents[:] = [rev]
+ self.child_cset = cset
+ return cset, branched
+
+ self.ui.note(_('fetching revision log for "%s" from %d to %d\n') %
+ (self.module, from_revnum, to_revnum))
+
+ try:
+ firstcset = None
+ lastonbranch = False
+ stream = self._getlog([self.module], from_revnum, to_revnum)
+ try:
+ for entry in stream:
+ paths, revnum, author, date, message = entry
+ if revnum < self.startrev:
+ lastonbranch = True
+ break
+ if not paths:
+ self.ui.debug('revision %d has no entries\n' % revnum)
+ # If we ever leave the loop on an empty
+ # revision, do not try to get a parent branch
+ lastonbranch = lastonbranch or revnum == 0
+ continue
+ cset, lastonbranch = parselogentry(paths, revnum, author,
+ date, message)
+ if cset:
+ firstcset = cset
+ if lastonbranch:
+ break
+ finally:
+ stream.close()
+
+ if not lastonbranch and firstcset and not firstcset.parents:
+ # The first revision of the sequence (the last fetched one)
+ # has invalid parents if not a branch root. Find the parent
+ # revision now, if any.
+ try:
+ firstrevnum = self.revnum(firstcset.rev)
+ if firstrevnum > 1:
+ latest = self.latest(self.module, firstrevnum - 1)
+ if latest:
+ firstcset.parents.append(latest)
+ except SvnPathNotFound:
+ pass
+ except SubversionException, (inst, num):
+ if num == svn.core.SVN_ERR_FS_NO_SUCH_REVISION:
+ raise util.Abort(_('svn: branch has no revision %s') % to_revnum)
+ raise
+
+ def getfile(self, file, rev):
+ # TODO: ra.get_file transmits the whole file instead of diffs.
+ if file in self.removed:
+ raise IOError()
+ mode = ''
+ try:
+ new_module, revnum = self.revsplit(rev)[1:]
+ if self.module != new_module:
+ self.module = new_module
+ self.reparent(self.module)
+ io = StringIO()
+ info = svn.ra.get_file(self.ra, file, revnum, io)
+ data = io.getvalue()
+ # ra.get_files() seems to keep a reference on the input buffer
+ # preventing collection. Release it explicitely.
+ io.close()
+ if isinstance(info, list):
+ info = info[-1]
+ mode = ("svn:executable" in info) and 'x' or ''
+ mode = ("svn:special" in info) and 'l' or mode
+ except SubversionException, e:
+ notfound = (svn.core.SVN_ERR_FS_NOT_FOUND,
+ svn.core.SVN_ERR_RA_DAV_PATH_NOT_FOUND)
+ if e.apr_err in notfound: # File not found
+ raise IOError()
+ raise
+ if mode == 'l':
+ link_prefix = "link "
+ if data.startswith(link_prefix):
+ data = data[len(link_prefix):]
+ return data, mode
+
+ def _iterfiles(self, path, revnum):
+ """Enumerate all files in path at revnum, recursively."""
+ path = path.strip('/')
+ pool = Pool()
+ rpath = '/'.join([self.baseurl, urllib.quote(path)]).strip('/')
+ entries = svn.client.ls(rpath, optrev(revnum), True, self.ctx, pool)
+ return ((path + '/' + p) for p, e in entries.iteritems()
+ if e.kind == svn.core.svn_node_file)
+
+ def getrelpath(self, path, module=None):
+ if module is None:
+ module = self.module
+ # Given the repository url of this wc, say
+ # "http://server/plone/CMFPlone/branches/Plone-2_0-branch"
+ # extract the "entry" portion (a relative path) from what
+ # svn log --xml says, ie
+ # "/CMFPlone/branches/Plone-2_0-branch/tests/PloneTestCase.py"
+ # that is to say "tests/PloneTestCase.py"
+ if path.startswith(module):
+ relative = path.rstrip('/')[len(module):]
+ if relative.startswith('/'):
+ return relative[1:]
+ elif relative == '':
+ return relative
+
+ # The path is outside our tracked tree...
+ self.ui.debug('%r is not under %r, ignoring\n' % (path, module))
+ return None
+
+ def _checkpath(self, path, revnum, module=None):
+ if module is not None:
+ prevmodule = self.reparent('')
+ path = module + '/' + path
+ try:
+ # ra.check_path does not like leading slashes very much, it leads
+ # to PROPFIND subversion errors
+ return svn.ra.check_path(self.ra, path.strip('/'), revnum)
+ finally:
+ if module is not None:
+ self.reparent(prevmodule)
+
+ def _getlog(self, paths, start, end, limit=0, discover_changed_paths=True,
+ strict_node_history=False):
+ # Normalize path names, svn >= 1.5 only wants paths relative to
+ # supplied URL
+ relpaths = []
+ for p in paths:
+ if not p.startswith('/'):
+ p = self.module + '/' + p
+ relpaths.append(p.strip('/'))
+ args = [self.baseurl, relpaths, start, end, limit, discover_changed_paths,
+ strict_node_history]
+ arg = encodeargs(args)
+ hgexe = util.hgexecutable()
+ cmd = '%s debugsvnlog' % util.shellquote(hgexe)
+ stdin, stdout = util.popen2(cmd)
+ stdin.write(arg)
+ try:
+ stdin.close()
+ except IOError:
+ raise util.Abort(_('Mercurial failed to run itself, check'
+ ' hg executable is in PATH'))
+ return logstream(stdout)
+
+pre_revprop_change = '''#!/bin/sh
+
+REPOS="$1"
+REV="$2"
+USER="$3"
+PROPNAME="$4"
+ACTION="$5"
+
+if [ "$ACTION" = "M" -a "$PROPNAME" = "svn:log" ]; then exit 0; fi
+if [ "$ACTION" = "A" -a "$PROPNAME" = "hg:convert-branch" ]; then exit 0; fi
+if [ "$ACTION" = "A" -a "$PROPNAME" = "hg:convert-rev" ]; then exit 0; fi
+
+echo "Changing prohibited revision property" >&2
+exit 1
+'''
+
+class svn_sink(converter_sink, commandline):
+ commit_re = re.compile(r'Committed revision (\d+).', re.M)
+
+ def prerun(self):
+ if self.wc:
+ os.chdir(self.wc)
+
+ def postrun(self):
+ if self.wc:
+ os.chdir(self.cwd)
+
+ def join(self, name):
+ return os.path.join(self.wc, '.svn', name)
+
+ def revmapfile(self):
+ return self.join('hg-shamap')
+
+ def authorfile(self):
+ return self.join('hg-authormap')
+
+ def __init__(self, ui, path):
+ converter_sink.__init__(self, ui, path)
+ commandline.__init__(self, ui, 'svn')
+ self.delete = []
+ self.setexec = []
+ self.delexec = []
+ self.copies = []
+ self.wc = None
+ self.cwd = os.getcwd()
+
+ path = os.path.realpath(path)
+
+ created = False
+ if os.path.isfile(os.path.join(path, '.svn', 'entries')):
+ self.wc = path
+ self.run0('update')
+ else:
+ wcpath = os.path.join(os.getcwd(), os.path.basename(path) + '-wc')
+
+ if os.path.isdir(os.path.dirname(path)):
+ if not os.path.exists(os.path.join(path, 'db', 'fs-type')):
+ ui.status(_('initializing svn repository %r\n') %
+ os.path.basename(path))
+ commandline(ui, 'svnadmin').run0('create', path)
+ created = path
+ path = util.normpath(path)
+ if not path.startswith('/'):
+ path = '/' + path
+ path = 'file://' + path
+
+ ui.status(_('initializing svn working copy %r\n')
+ % os.path.basename(wcpath))
+ self.run0('checkout', path, wcpath)
+
+ self.wc = wcpath
+ self.opener = util.opener(self.wc)
+ self.wopener = util.opener(self.wc)
+ self.childmap = mapfile(ui, self.join('hg-childmap'))
+ self.is_exec = util.checkexec(self.wc) and util.is_exec or None
+
+ if created:
+ hook = os.path.join(created, 'hooks', 'pre-revprop-change')
+ fp = open(hook, 'w')
+ fp.write(pre_revprop_change)
+ fp.close()
+ util.set_flags(hook, False, True)
+
+ xport = transport.SvnRaTransport(url=geturl(path))
+ self.uuid = svn.ra.get_uuid(xport.ra)
+
+ def wjoin(self, *names):
+ return os.path.join(self.wc, *names)
+
+ def putfile(self, filename, flags, data):
+ if 'l' in flags:
+ self.wopener.symlink(data, filename)
+ else:
+ try:
+ if os.path.islink(self.wjoin(filename)):
+ os.unlink(filename)
+ except OSError:
+ pass
+ self.wopener(filename, 'w').write(data)
+
+ if self.is_exec:
+ was_exec = self.is_exec(self.wjoin(filename))
+ else:
+ # On filesystems not supporting execute-bit, there is no way
+ # to know if it is set but asking subversion. Setting it
+ # systematically is just as expensive and much simpler.
+ was_exec = 'x' not in flags
+
+ util.set_flags(self.wjoin(filename), False, 'x' in flags)
+ if was_exec:
+ if 'x' not in flags:
+ self.delexec.append(filename)
+ else:
+ if 'x' in flags:
+ self.setexec.append(filename)
+
+ def _copyfile(self, source, dest):
+ # SVN's copy command pukes if the destination file exists, but
+ # our copyfile method expects to record a copy that has
+ # already occurred. Cross the semantic gap.
+ wdest = self.wjoin(dest)
+ exists = os.path.lexists(wdest)
+ if exists:
+ fd, tempname = tempfile.mkstemp(
+ prefix='hg-copy-', dir=os.path.dirname(wdest))
+ os.close(fd)
+ os.unlink(tempname)
+ os.rename(wdest, tempname)
+ try:
+ self.run0('copy', source, dest)
+ finally:
+ if exists:
+ try:
+ os.unlink(wdest)
+ except OSError:
+ pass
+ os.rename(tempname, wdest)
+
+ def dirs_of(self, files):
+ dirs = set()
+ for f in files:
+ if os.path.isdir(self.wjoin(f)):
+ dirs.add(f)
+ for i in strutil.rfindall(f, '/'):
+ dirs.add(f[:i])
+ return dirs
+
+ def add_dirs(self, files):
+ add_dirs = [d for d in sorted(self.dirs_of(files))
+ if not os.path.exists(self.wjoin(d, '.svn', 'entries'))]
+ if add_dirs:
+ self.xargs(add_dirs, 'add', non_recursive=True, quiet=True)
+ return add_dirs
+
+ def add_files(self, files):
+ if files:
+ self.xargs(files, 'add', quiet=True)
+ return files
+
+ def tidy_dirs(self, names):
+ deleted = []
+ for d in sorted(self.dirs_of(names), reverse=True):
+ wd = self.wjoin(d)
+ if os.listdir(wd) == '.svn':
+ self.run0('delete', d)
+ deleted.append(d)
+ return deleted
+
+ def addchild(self, parent, child):
+ self.childmap[parent] = child
+
+ def revid(self, rev):
+ return u"svn:%s@%s" % (self.uuid, rev)
+
+ def putcommit(self, files, copies, parents, commit, source, revmap):
+ # Apply changes to working copy
+ for f, v in files:
+ try:
+ data, mode = source.getfile(f, v)
+ except IOError:
+ self.delete.append(f)
+ else:
+ self.putfile(f, mode, data)
+ if f in copies:
+ self.copies.append([copies[f], f])
+ files = [f[0] for f in files]
+
+ for parent in parents:
+ try:
+ return self.revid(self.childmap[parent])
+ except KeyError:
+ pass
+ entries = set(self.delete)
+ files = frozenset(files)
+ entries.update(self.add_dirs(files.difference(entries)))
+ if self.copies:
+ for s, d in self.copies:
+ self._copyfile(s, d)
+ self.copies = []
+ if self.delete:
+ self.xargs(self.delete, 'delete')
+ self.delete = []
+ entries.update(self.add_files(files.difference(entries)))
+ entries.update(self.tidy_dirs(entries))
+ if self.delexec:
+ self.xargs(self.delexec, 'propdel', 'svn:executable')
+ self.delexec = []
+ if self.setexec:
+ self.xargs(self.setexec, 'propset', 'svn:executable', '*')
+ self.setexec = []
+
+ fd, messagefile = tempfile.mkstemp(prefix='hg-convert-')
+ fp = os.fdopen(fd, 'w')
+ fp.write(commit.desc)
+ fp.close()
+ try:
+ output = self.run0('commit',
+ username=util.shortuser(commit.author),
+ file=messagefile,
+ encoding='utf-8')
+ try:
+ rev = self.commit_re.search(output).group(1)
+ except AttributeError:
+ if not files:
+ return parents[0]
+ self.ui.warn(_('unexpected svn output:\n'))
+ self.ui.warn(output)
+ raise util.Abort(_('unable to cope with svn output'))
+ if commit.rev:
+ self.run('propset', 'hg:convert-rev', commit.rev,
+ revprop=True, revision=rev)
+ if commit.branch and commit.branch != 'default':
+ self.run('propset', 'hg:convert-branch', commit.branch,
+ revprop=True, revision=rev)
+ for parent in parents:
+ self.addchild(parent, rev)
+ return self.revid(rev)
+ finally:
+ os.unlink(messagefile)
+
+ def puttags(self, tags):
+ self.ui.warn(_('writing Subversion tags is not yet implemented\n'))
+ return None, None
diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/subversion.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/subversion.pyo
new file mode 100644
index 0000000..df675f8
--- /dev/null
+++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/subversion.pyo
Binary files differ
diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/transport.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/transport.py
new file mode 100644
index 0000000..db68ede
--- /dev/null
+++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/transport.py
@@ -0,0 +1,128 @@
+# -*- coding: utf-8 -*-
+
+# Copyright (C) 2007 Daniel Holth <dholth@fastmail.fm>
+# This is a stripped-down version of the original bzr-svn transport.py,
+# Copyright (C) 2006 Jelmer Vernooij <jelmer@samba.org>
+
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+
+from svn.core import SubversionException, Pool
+import svn.ra
+import svn.client
+import svn.core
+
+# Some older versions of the Python bindings need to be
+# explicitly initialized. But what we want to do probably
+# won't work worth a darn against those libraries anyway!
+svn.ra.initialize()
+
+svn_config = svn.core.svn_config_get_config(None)
+
+
+def _create_auth_baton(pool):
+ """Create a Subversion authentication baton. """
+ import svn.client
+ # Give the client context baton a suite of authentication
+ # providers.h
+ providers = [
+ svn.client.get_simple_provider(pool),
+ svn.client.get_username_provider(pool),
+ svn.client.get_ssl_client_cert_file_provider(pool),
+ svn.client.get_ssl_client_cert_pw_file_provider(pool),
+ svn.client.get_ssl_server_trust_file_provider(pool),
+ ]
+ # Platform-dependant authentication methods
+ getprovider = getattr(svn.core, 'svn_auth_get_platform_specific_provider',
+ None)
+ if getprovider:
+ # Available in svn >= 1.6
+ for name in ('gnome_keyring', 'keychain', 'kwallet', 'windows'):
+ for type in ('simple', 'ssl_client_cert_pw', 'ssl_server_trust'):
+ p = getprovider(name, type, pool)
+ if p:
+ providers.append(p)
+ else:
+ if hasattr(svn.client, 'get_windows_simple_provider'):
+ providers.append(svn.client.get_windows_simple_provider(pool))
+
+ return svn.core.svn_auth_open(providers, pool)
+
+class NotBranchError(SubversionException):
+ pass
+
+class SvnRaTransport(object):
+ """
+ Open an ra connection to a Subversion repository.
+ """
+ def __init__(self, url="", ra=None):
+ self.pool = Pool()
+ self.svn_url = url
+ self.username = ''
+ self.password = ''
+
+ # Only Subversion 1.4 has reparent()
+ if ra is None or not hasattr(svn.ra, 'reparent'):
+ self.client = svn.client.create_context(self.pool)
+ ab = _create_auth_baton(self.pool)
+ if False:
+ svn.core.svn_auth_set_parameter(
+ ab, svn.core.SVN_AUTH_PARAM_DEFAULT_USERNAME, self.username)
+ svn.core.svn_auth_set_parameter(
+ ab, svn.core.SVN_AUTH_PARAM_DEFAULT_PASSWORD, self.password)
+ self.client.auth_baton = ab
+ self.client.config = svn_config
+ try:
+ self.ra = svn.client.open_ra_session(
+ self.svn_url.encode('utf8'),
+ self.client, self.pool)
+ except SubversionException, (inst, num):
+ if num in (svn.core.SVN_ERR_RA_ILLEGAL_URL,
+ svn.core.SVN_ERR_RA_LOCAL_REPOS_OPEN_FAILED,
+ svn.core.SVN_ERR_BAD_URL):
+ raise NotBranchError(url)
+ raise
+ else:
+ self.ra = ra
+ svn.ra.reparent(self.ra, self.svn_url.encode('utf8'))
+
+ class Reporter(object):
+ def __init__(self, reporter_data):
+ self._reporter, self._baton = reporter_data
+
+ def set_path(self, path, revnum, start_empty, lock_token, pool=None):
+ svn.ra.reporter2_invoke_set_path(self._reporter, self._baton,
+ path, revnum, start_empty, lock_token, pool)
+
+ def delete_path(self, path, pool=None):
+ svn.ra.reporter2_invoke_delete_path(self._reporter, self._baton,
+ path, pool)
+
+ def link_path(self, path, url, revision, start_empty, lock_token,
+ pool=None):
+ svn.ra.reporter2_invoke_link_path(self._reporter, self._baton,
+ path, url, revision, start_empty, lock_token,
+ pool)
+
+ def finish_report(self, pool=None):
+ svn.ra.reporter2_invoke_finish_report(self._reporter,
+ self._baton, pool)
+
+ def abort_report(self, pool=None):
+ svn.ra.reporter2_invoke_abort_report(self._reporter,
+ self._baton, pool)
+
+ def do_update(self, revnum, path, *args, **kwargs):
+ return self.Reporter(svn.ra.do_update(self.ra, revnum, path,
+ *args, **kwargs))
diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/transport.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/transport.pyo
new file mode 100644
index 0000000..ee1d3d1
--- /dev/null
+++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/transport.pyo
Binary files differ
diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/eol.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/eol.py
new file mode 100644
index 0000000..88294a7
--- /dev/null
+++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/eol.py
@@ -0,0 +1,272 @@
+"""automatically manage newlines in repository files
+
+This extension allows you to manage the type of line endings (CRLF or
+LF) that are used in the repository and in the local working
+directory. That way you can get CRLF line endings on Windows and LF on
+Unix/Mac, thereby letting everybody use their OS native line endings.
+
+The extension reads its configuration from a versioned ``.hgeol``
+configuration file every time you run an ``hg`` command. The
+``.hgeol`` file use the same syntax as all other Mercurial
+configuration files. It uses two sections, ``[patterns]`` and
+``[repository]``.
+
+The ``[patterns]`` section specifies how line endings should be
+converted between the working copy and the repository. The format is
+specified by a file pattern. The first match is used, so put more
+specific patterns first. The available line endings are ``LF``,
+``CRLF``, and ``BIN``.
+
+Files with the declared format of ``CRLF`` or ``LF`` are always
+checked out and stored in the repository in that format and files
+declared to be binary (``BIN``) are left unchanged. Additionally,
+``native`` is an alias for checking out in the platform's default line
+ending: ``LF`` on Unix (including Mac OS X) and ``CRLF`` on
+Windows. Note that ``BIN`` (do nothing to line endings) is Mercurial's
+default behaviour; it is only needed if you need to override a later,
+more general pattern.
+
+The optional ``[repository]`` section specifies the line endings to
+use for files stored in the repository. It has a single setting,
+``native``, which determines the storage line endings for files
+declared as ``native`` in the ``[patterns]`` section. It can be set to
+``LF`` or ``CRLF``. The default is ``LF``. For example, this means
+that on Windows, files configured as ``native`` (``CRLF`` by default)
+will be converted to ``LF`` when stored in the repository. Files
+declared as ``LF``, ``CRLF``, or ``BIN`` in the ``[patterns]`` section
+are always stored as-is in the repository.
+
+Example versioned ``.hgeol`` file::
+
+ [patterns]
+ **.py = native
+ **.vcproj = CRLF
+ **.txt = native
+ Makefile = LF
+ **.jpg = BIN
+
+ [repository]
+ native = LF
+
+.. note::
+ The rules will first apply when files are touched in the working
+ copy, e.g. by updating to null and back to tip to touch all files.
+
+The extension uses an optional ``[eol]`` section in your hgrc file
+(not the ``.hgeol`` file) for settings that control the overall
+behavior. There are two settings:
+
+- ``eol.native`` (default ``os.linesep``) can be set to ``LF`` or
+ ``CRLF`` to override the default interpretation of ``native`` for
+ checkout. This can be used with :hg:`archive` on Unix, say, to
+ generate an archive where files have line endings for Windows.
+
+- ``eol.only-consistent`` (default True) can be set to False to make
+ the extension convert files with inconsistent EOLs. Inconsistent
+ means that there is both ``CRLF`` and ``LF`` present in the file.
+ Such files are normally not touched under the assumption that they
+ have mixed EOLs on purpose.
+
+The ``win32text.forbid*`` hooks provided by the win32text extension
+have been unified into a single hook named ``eol.hook``. The hook will
+lookup the expected line endings from the ``.hgeol`` file, which means
+you must migrate to a ``.hgeol`` file first before using the hook.
+
+See :hg:`help patterns` for more information about the glob patterns
+used.
+"""
+
+from mercurial.i18n import _
+from mercurial import util, config, extensions, match
+import re, os
+
+# Matches a lone LF, i.e., one that is not part of CRLF.
+singlelf = re.compile('(^|[^\r])\n')
+# Matches a single EOL which can either be a CRLF where repeated CR
+# are removed or a LF. We do not care about old Machintosh files, so a
+# stray CR is an error.
+eolre = re.compile('\r*\n')
+
+
+def inconsistenteol(data):
+ return '\r\n' in data and singlelf.search(data)
+
+def tolf(s, params, ui, **kwargs):
+ """Filter to convert to LF EOLs."""
+ if util.binary(s):
+ return s
+ if ui.configbool('eol', 'only-consistent', True) and inconsistenteol(s):
+ return s
+ return eolre.sub('\n', s)
+
+def tocrlf(s, params, ui, **kwargs):
+ """Filter to convert to CRLF EOLs."""
+ if util.binary(s):
+ return s
+ if ui.configbool('eol', 'only-consistent', True) and inconsistenteol(s):
+ return s
+ return eolre.sub('\r\n', s)
+
+def isbinary(s, params):
+ """Filter to do nothing with the file."""
+ return s
+
+filters = {
+ 'to-lf': tolf,
+ 'to-crlf': tocrlf,
+ 'is-binary': isbinary,
+}
+
+
+def hook(ui, repo, node, hooktype, **kwargs):
+ """verify that files have expected EOLs"""
+ files = set()
+ for rev in xrange(repo[node].rev(), len(repo)):
+ files.update(repo[rev].files())
+ tip = repo['tip']
+ for f in files:
+ if f not in tip:
+ continue
+ for pattern, target in ui.configitems('encode'):
+ if match.match(repo.root, '', [pattern])(f):
+ data = tip[f].data()
+ if target == "to-lf" and "\r\n" in data:
+ raise util.Abort(_("%s should not have CRLF line endings")
+ % f)
+ elif target == "to-crlf" and singlelf.search(data):
+ raise util.Abort(_("%s should not have LF line endings")
+ % f)
+
+
+def preupdate(ui, repo, hooktype, parent1, parent2):
+ #print "preupdate for %s: %s -> %s" % (repo.root, parent1, parent2)
+ repo.readhgeol(parent1)
+ return False
+
+def uisetup(ui):
+ ui.setconfig('hooks', 'preupdate.eol', preupdate)
+
+def extsetup(ui):
+ try:
+ extensions.find('win32text')
+ raise util.Abort(_("the eol extension is incompatible with the "
+ "win32text extension"))
+ except KeyError:
+ pass
+
+
+def reposetup(ui, repo):
+ uisetup(repo.ui)
+ #print "reposetup for", repo.root
+
+ if not repo.local():
+ return
+ for name, fn in filters.iteritems():
+ repo.adddatafilter(name, fn)
+
+ ui.setconfig('patch', 'eol', 'auto')
+
+ class eolrepo(repo.__class__):
+
+ _decode = {'LF': 'to-lf', 'CRLF': 'to-crlf', 'BIN': 'is-binary'}
+ _encode = {'LF': 'to-lf', 'CRLF': 'to-crlf', 'BIN': 'is-binary'}
+
+ def readhgeol(self, node=None, data=None):
+ if data is None:
+ try:
+ if node is None:
+ data = self.wfile('.hgeol').read()
+ else:
+ data = self[node]['.hgeol'].data()
+ except (IOError, LookupError):
+ return None
+
+ if self.ui.config('eol', 'native', os.linesep) in ('LF', '\n'):
+ self._decode['NATIVE'] = 'to-lf'
+ else:
+ self._decode['NATIVE'] = 'to-crlf'
+
+ eol = config.config()
+ # Our files should not be touched. The pattern must be
+ # inserted first override a '** = native' pattern.
+ eol.set('patterns', '.hg*', 'BIN')
+ # We can then parse the user's patterns.
+ eol.parse('.hgeol', data)
+
+ if eol.get('repository', 'native') == 'CRLF':
+ self._encode['NATIVE'] = 'to-crlf'
+ else:
+ self._encode['NATIVE'] = 'to-lf'
+
+ for pattern, style in eol.items('patterns'):
+ key = style.upper()
+ try:
+ self.ui.setconfig('decode', pattern, self._decode[key])
+ self.ui.setconfig('encode', pattern, self._encode[key])
+ except KeyError:
+ self.ui.warn(_("ignoring unknown EOL style '%s' from %s\n")
+ % (style, eol.source('patterns', pattern)))
+
+ include = []
+ exclude = []
+ for pattern, style in eol.items('patterns'):
+ key = style.upper()
+ if key == 'BIN':
+ exclude.append(pattern)
+ else:
+ include.append(pattern)
+
+ # This will match the files for which we need to care
+ # about inconsistent newlines.
+ return match.match(self.root, '', [], include, exclude)
+
+ def _hgcleardirstate(self):
+ self._eolfile = self.readhgeol() or self.readhgeol('tip')
+
+ if not self._eolfile:
+ self._eolfile = util.never
+ return
+
+ try:
+ cachemtime = os.path.getmtime(self.join("eol.cache"))
+ except OSError:
+ cachemtime = 0
+
+ try:
+ eolmtime = os.path.getmtime(self.wjoin(".hgeol"))
+ except OSError:
+ eolmtime = 0
+
+ if eolmtime > cachemtime:
+ ui.debug("eol: detected change in .hgeol\n")
+ # TODO: we could introduce a method for this in dirstate.
+ wlock = None
+ try:
+ wlock = self.wlock()
+ for f, e in self.dirstate._map.iteritems():
+ self.dirstate._map[f] = (e[0], e[1], -1, 0)
+ self.dirstate._dirty = True
+ # Touch the cache to update mtime. TODO: are we sure this
+ # always enought to update the mtime, or should we write a
+ # bit to the file?
+ self.opener("eol.cache", "w").close()
+ finally:
+ if wlock is not None:
+ wlock.release()
+
+ def commitctx(self, ctx, error=False):
+ for f in sorted(ctx.added() + ctx.modified()):
+ if not self._eolfile(f):
+ continue
+ data = ctx[f].data()
+ if util.binary(data):
+ # We should not abort here, since the user should
+ # be able to say "** = native" to automatically
+ # have all non-binary files taken care of.
+ continue
+ if inconsistenteol(data):
+ raise util.Abort(_("inconsistent newline style "
+ "in %s\n" % f))
+ return super(eolrepo, self).commitctx(ctx, error)
+ repo.__class__ = eolrepo
+ repo._hgcleardirstate()
diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/eol.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/eol.pyo
new file mode 100644
index 0000000..cd15b4f
--- /dev/null
+++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/eol.pyo
Binary files differ
diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/extdiff.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/extdiff.py
new file mode 100644
index 0000000..5cf9f03
--- /dev/null
+++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/extdiff.py
@@ -0,0 +1,325 @@
+# extdiff.py - external diff program support for mercurial
+#
+# Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
+#
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2 or any later version.
+
+'''command to allow external programs to compare revisions
+
+The extdiff Mercurial extension allows you to use external programs
+to compare revisions, or revision with working directory. The external
+diff programs are called with a configurable set of options and two
+non-option arguments: paths to directories containing snapshots of
+files to compare.
+
+The extdiff extension also allows to configure new diff commands, so
+you do not need to type :hg:`extdiff -p kdiff3` always. ::
+
+ [extdiff]
+ # add new command that runs GNU diff(1) in 'context diff' mode
+ cdiff = gdiff -Nprc5
+ ## or the old way:
+ #cmd.cdiff = gdiff
+ #opts.cdiff = -Nprc5
+
+ # add new command called vdiff, runs kdiff3
+ vdiff = kdiff3
+
+ # add new command called meld, runs meld (no need to name twice)
+ meld =
+
+ # add new command called vimdiff, runs gvimdiff with DirDiff plugin
+ # (see http://www.vim.org/scripts/script.php?script_id=102) Non
+ # English user, be sure to put "let g:DirDiffDynamicDiffText = 1" in
+ # your .vimrc
+ vimdiff = gvim -f '+next' '+execute "DirDiff" argv(0) argv(1)'
+
+Tool arguments can include variables that are expanded at runtime::
+
+ $parent1, $plabel1 - filename, descriptive label of first parent
+ $child, $clabel - filename, descriptive label of child revision
+ $parent2, $plabel2 - filename, descriptive label of second parent
+ $parent is an alias for $parent1.
+
+The extdiff extension will look in your [diff-tools] and [merge-tools]
+sections for diff tool arguments, when none are specified in [extdiff].
+
+::
+
+ [extdiff]
+ kdiff3 =
+
+ [diff-tools]
+ kdiff3.diffargs=--L1 '$plabel1' --L2 '$clabel' $parent $child
+
+You can use -I/-X and list of file or directory names like normal
+:hg:`diff` command. The extdiff extension makes snapshots of only
+needed files, so running the external diff program will actually be
+pretty fast (at least faster than having to compare the entire tree).
+'''
+
+from mercurial.i18n import _
+from mercurial.node import short, nullid
+from mercurial import cmdutil, util, commands, encoding
+import os, shlex, shutil, tempfile, re
+
+def snapshot(ui, repo, files, node, tmproot):
+ '''snapshot files as of some revision
+ if not using snapshot, -I/-X does not work and recursive diff
+ in tools like kdiff3 and meld displays too many files.'''
+ dirname = os.path.basename(repo.root)
+ if dirname == "":
+ dirname = "root"
+ if node is not None:
+ dirname = '%s.%s' % (dirname, short(node))
+ base = os.path.join(tmproot, dirname)
+ os.mkdir(base)
+ if node is not None:
+ ui.note(_('making snapshot of %d files from rev %s\n') %
+ (len(files), short(node)))
+ else:
+ ui.note(_('making snapshot of %d files from working directory\n') %
+ (len(files)))
+ wopener = util.opener(base)
+ fns_and_mtime = []
+ ctx = repo[node]
+ for fn in files:
+ wfn = util.pconvert(fn)
+ if not wfn in ctx:
+ # File doesn't exist; could be a bogus modify
+ continue
+ ui.note(' %s\n' % wfn)
+ dest = os.path.join(base, wfn)
+ fctx = ctx[wfn]
+ data = repo.wwritedata(wfn, fctx.data())
+ if 'l' in fctx.flags():
+ wopener.symlink(data, wfn)
+ else:
+ wopener(wfn, 'w').write(data)
+ if 'x' in fctx.flags():
+ util.set_flags(dest, False, True)
+ if node is None:
+ fns_and_mtime.append((dest, repo.wjoin(fn), os.path.getmtime(dest)))
+ return dirname, fns_and_mtime
+
+def dodiff(ui, repo, diffcmd, diffopts, pats, opts):
+ '''Do the actuall diff:
+
+ - copy to a temp structure if diffing 2 internal revisions
+ - copy to a temp structure if diffing working revision with
+ another one and more than 1 file is changed
+ - just invoke the diff for a single file in the working dir
+ '''
+
+ revs = opts.get('rev')
+ change = opts.get('change')
+ args = ' '.join(diffopts)
+ do3way = '$parent2' in args
+
+ if revs and change:
+ msg = _('cannot specify --rev and --change at the same time')
+ raise util.Abort(msg)
+ elif change:
+ node2 = repo.lookup(change)
+ node1a, node1b = repo.changelog.parents(node2)
+ else:
+ node1a, node2 = cmdutil.revpair(repo, revs)
+ if not revs:
+ node1b = repo.dirstate.parents()[1]
+ else:
+ node1b = nullid
+
+ # Disable 3-way merge if there is only one parent
+ if do3way:
+ if node1b == nullid:
+ do3way = False
+
+ matcher = cmdutil.match(repo, pats, opts)
+ mod_a, add_a, rem_a = map(set, repo.status(node1a, node2, matcher)[:3])
+ if do3way:
+ mod_b, add_b, rem_b = map(set, repo.status(node1b, node2, matcher)[:3])
+ else:
+ mod_b, add_b, rem_b = set(), set(), set()
+ modadd = mod_a | add_a | mod_b | add_b
+ common = modadd | rem_a | rem_b
+ if not common:
+ return 0
+
+ tmproot = tempfile.mkdtemp(prefix='extdiff.')
+ try:
+ # Always make a copy of node1a (and node1b, if applicable)
+ dir1a_files = mod_a | rem_a | ((mod_b | add_b) - add_a)
+ dir1a = snapshot(ui, repo, dir1a_files, node1a, tmproot)[0]
+ rev1a = '@%d' % repo[node1a].rev()
+ if do3way:
+ dir1b_files = mod_b | rem_b | ((mod_a | add_a) - add_b)
+ dir1b = snapshot(ui, repo, dir1b_files, node1b, tmproot)[0]
+ rev1b = '@%d' % repo[node1b].rev()
+ else:
+ dir1b = None
+ rev1b = ''
+
+ fns_and_mtime = []
+
+ # If node2 in not the wc or there is >1 change, copy it
+ dir2root = ''
+ rev2 = ''
+ if node2:
+ dir2 = snapshot(ui, repo, modadd, node2, tmproot)[0]
+ rev2 = '@%d' % repo[node2].rev()
+ elif len(common) > 1:
+ #we only actually need to get the files to copy back to
+ #the working dir in this case (because the other cases
+ #are: diffing 2 revisions or single file -- in which case
+ #the file is already directly passed to the diff tool).
+ dir2, fns_and_mtime = snapshot(ui, repo, modadd, None, tmproot)
+ else:
+ # This lets the diff tool open the changed file directly
+ dir2 = ''
+ dir2root = repo.root
+
+ label1a = rev1a
+ label1b = rev1b
+ label2 = rev2
+
+ # If only one change, diff the files instead of the directories
+ # Handle bogus modifies correctly by checking if the files exist
+ if len(common) == 1:
+ common_file = util.localpath(common.pop())
+ dir1a = os.path.join(dir1a, common_file)
+ label1a = common_file + rev1a
+ if not os.path.isfile(os.path.join(tmproot, dir1a)):
+ dir1a = os.devnull
+ if do3way:
+ dir1b = os.path.join(dir1b, common_file)
+ label1b = common_file + rev1b
+ if not os.path.isfile(os.path.join(tmproot, dir1b)):
+ dir1b = os.devnull
+ dir2 = os.path.join(dir2root, dir2, common_file)
+ label2 = common_file + rev2
+
+ # Function to quote file/dir names in the argument string.
+ # When not operating in 3-way mode, an empty string is
+ # returned for parent2
+ replace = dict(parent=dir1a, parent1=dir1a, parent2=dir1b,
+ plabel1=label1a, plabel2=label1b,
+ clabel=label2, child=dir2)
+ def quote(match):
+ key = match.group()[1:]
+ if not do3way and key == 'parent2':
+ return ''
+ return util.shellquote(replace[key])
+
+ # Match parent2 first, so 'parent1?' will match both parent1 and parent
+ regex = '\$(parent2|parent1?|child|plabel1|plabel2|clabel)'
+ if not do3way and not re.search(regex, args):
+ args += ' $parent1 $child'
+ args = re.sub(regex, quote, args)
+ cmdline = util.shellquote(diffcmd) + ' ' + args
+
+ ui.debug('running %r in %s\n' % (cmdline, tmproot))
+ util.system(cmdline, cwd=tmproot)
+
+ for copy_fn, working_fn, mtime in fns_and_mtime:
+ if os.path.getmtime(copy_fn) != mtime:
+ ui.debug('file changed while diffing. '
+ 'Overwriting: %s (src: %s)\n' % (working_fn, copy_fn))
+ util.copyfile(copy_fn, working_fn)
+
+ return 1
+ finally:
+ ui.note(_('cleaning up temp directory\n'))
+ shutil.rmtree(tmproot)
+
+def extdiff(ui, repo, *pats, **opts):
+ '''use external program to diff repository (or selected files)
+
+ Show differences between revisions for the specified files, using
+ an external program. The default program used is diff, with
+ default options "-Npru".
+
+ To select a different program, use the -p/--program option. The
+ program will be passed the names of two directories to compare. To
+ pass additional options to the program, use -o/--option. These
+ will be passed before the names of the directories to compare.
+
+ When two revision arguments are given, then changes are shown
+ between those revisions. If only one revision is specified then
+ that revision is compared to the working directory, and, when no
+ revisions are specified, the working directory files are compared
+ to its parent.'''
+ program = opts.get('program')
+ option = opts.get('option')
+ if not program:
+ program = 'diff'
+ option = option or ['-Npru']
+ return dodiff(ui, repo, program, option, pats, opts)
+
+cmdtable = {
+ "extdiff":
+ (extdiff,
+ [('p', 'program', '',
+ _('comparison program to run'), _('CMD')),
+ ('o', 'option', [],
+ _('pass option to comparison program'), _('OPT')),
+ ('r', 'rev', [],
+ _('revision'), _('REV')),
+ ('c', 'change', '',
+ _('change made by revision'), _('REV')),
+ ] + commands.walkopts,
+ _('hg extdiff [OPT]... [FILE]...')),
+ }
+
+def uisetup(ui):
+ for cmd, path in ui.configitems('extdiff'):
+ if cmd.startswith('cmd.'):
+ cmd = cmd[4:]
+ if not path:
+ path = cmd
+ diffopts = ui.config('extdiff', 'opts.' + cmd, '')
+ diffopts = diffopts and [diffopts] or []
+ elif cmd.startswith('opts.'):
+ continue
+ else:
+ # command = path opts
+ if path:
+ diffopts = shlex.split(path)
+ path = diffopts.pop(0)
+ else:
+ path, diffopts = cmd, []
+ # look for diff arguments in [diff-tools] then [merge-tools]
+ if diffopts == []:
+ args = ui.config('diff-tools', cmd+'.diffargs') or \
+ ui.config('merge-tools', cmd+'.diffargs')
+ if args:
+ diffopts = shlex.split(args)
+ def save(cmd, path, diffopts):
+ '''use closure to save diff command to use'''
+ def mydiff(ui, repo, *pats, **opts):
+ return dodiff(ui, repo, path, diffopts + opts['option'],
+ pats, opts)
+ doc = _('''\
+use %(path)s to diff repository (or selected files)
+
+ Show differences between revisions for the specified files, using
+ the %(path)s program.
+
+ When two revision arguments are given, then changes are shown
+ between those revisions. If only one revision is specified then
+ that revision is compared to the working directory, and, when no
+ revisions are specified, the working directory files are compared
+ to its parent.\
+''') % dict(path=util.uirepr(path))
+
+ # We must translate the docstring right away since it is
+ # used as a format string. The string will unfortunately
+ # be translated again in commands.helpcmd and this will
+ # fail when the docstring contains non-ASCII characters.
+ # Decoding the string to a Unicode string here (using the
+ # right encoding) prevents that.
+ mydiff.__doc__ = doc.decode(encoding.encoding)
+ return mydiff
+ cmdtable[cmd] = (save(cmd, path, diffopts),
+ cmdtable['extdiff'][1][1:],
+ _('hg %s [OPTION]... [FILE]...') % cmd)
diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/extdiff.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/extdiff.pyo
new file mode 100644
index 0000000..008d690
--- /dev/null
+++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/extdiff.pyo
Binary files differ
diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/fetch.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/fetch.py
new file mode 100644
index 0000000..b8e765f
--- /dev/null
+++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/fetch.py
@@ -0,0 +1,152 @@
+# fetch.py - pull and merge remote changes
+#
+# Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
+#
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2 or any later version.
+
+'''pull, update and merge in one command'''
+
+from mercurial.i18n import _
+from mercurial.node import nullid, short
+from mercurial import commands, cmdutil, hg, util, url, error
+from mercurial.lock import release
+
+def fetch(ui, repo, source='default', **opts):
+ '''pull changes from a remote repository, merge new changes if needed.
+
+ This finds all changes from the repository at the specified path
+ or URL and adds them to the local repository.
+
+ If the pulled changes add a new branch head, the head is
+ automatically merged, and the result of the merge is committed.
+ Otherwise, the working directory is updated to include the new
+ changes.
+
+ When a merge occurs, the newly pulled changes are assumed to be
+ "authoritative". The head of the new changes is used as the first
+ parent, with local changes as the second. To switch the merge
+ order, use --switch-parent.
+
+ See :hg:`help dates` for a list of formats valid for -d/--date.
+
+ Returns 0 on success.
+ '''
+
+ date = opts.get('date')
+ if date:
+ opts['date'] = util.parsedate(date)
+
+ parent, p2 = repo.dirstate.parents()
+ branch = repo.dirstate.branch()
+ branchnode = repo.branchtags().get(branch)
+ if parent != branchnode:
+ raise util.Abort(_('working dir not at branch tip '
+ '(use "hg update" to check out branch tip)'))
+
+ if p2 != nullid:
+ raise util.Abort(_('outstanding uncommitted merge'))
+
+ wlock = lock = None
+ try:
+ wlock = repo.wlock()
+ lock = repo.lock()
+ mod, add, rem, del_ = repo.status()[:4]
+
+ if mod or add or rem:
+ raise util.Abort(_('outstanding uncommitted changes'))
+ if del_:
+ raise util.Abort(_('working directory is missing some files'))
+ bheads = repo.branchheads(branch)
+ bheads = [head for head in bheads if len(repo[head].children()) == 0]
+ if len(bheads) > 1:
+ raise util.Abort(_('multiple heads in this branch '
+ '(use "hg heads ." and "hg merge" to merge)'))
+
+ other = hg.repository(hg.remoteui(repo, opts),
+ ui.expandpath(source))
+ ui.status(_('pulling from %s\n') %
+ url.hidepassword(ui.expandpath(source)))
+ revs = None
+ if opts['rev']:
+ try:
+ revs = [other.lookup(rev) for rev in opts['rev']]
+ except error.CapabilityError:
+ err = _("Other repository doesn't support revision lookup, "
+ "so a rev cannot be specified.")
+ raise util.Abort(err)
+
+ # Are there any changes at all?
+ modheads = repo.pull(other, heads=revs)
+ if modheads == 0:
+ return 0
+
+ # Is this a simple fast-forward along the current branch?
+ newheads = repo.branchheads(branch)
+ newchildren = repo.changelog.nodesbetween([parent], newheads)[2]
+ if len(newheads) == 1:
+ if newchildren[0] != parent:
+ return hg.clean(repo, newchildren[0])
+ else:
+ return 0
+
+ # Are there more than one additional branch heads?
+ newchildren = [n for n in newchildren if n != parent]
+ newparent = parent
+ if newchildren:
+ newparent = newchildren[0]
+ hg.clean(repo, newparent)
+ newheads = [n for n in newheads if n != newparent]
+ if len(newheads) > 1:
+ ui.status(_('not merging with %d other new branch heads '
+ '(use "hg heads ." and "hg merge" to merge them)\n') %
+ (len(newheads) - 1))
+ return 1
+
+ # Otherwise, let's merge.
+ err = False
+ if newheads:
+ # By default, we consider the repository we're pulling
+ # *from* as authoritative, so we merge our changes into
+ # theirs.
+ if opts['switch_parent']:
+ firstparent, secondparent = newparent, newheads[0]
+ else:
+ firstparent, secondparent = newheads[0], newparent
+ ui.status(_('updating to %d:%s\n') %
+ (repo.changelog.rev(firstparent),
+ short(firstparent)))
+ hg.clean(repo, firstparent)
+ ui.status(_('merging with %d:%s\n') %
+ (repo.changelog.rev(secondparent), short(secondparent)))
+ err = hg.merge(repo, secondparent, remind=False)
+
+ if not err:
+ # we don't translate commit messages
+ message = (cmdutil.logmessage(opts) or
+ ('Automated merge with %s' %
+ url.removeauth(other.url())))
+ editor = cmdutil.commiteditor
+ if opts.get('force_editor') or opts.get('edit'):
+ editor = cmdutil.commitforceeditor
+ n = repo.commit(message, opts['user'], opts['date'], editor=editor)
+ ui.status(_('new changeset %d:%s merges remote changes '
+ 'with local\n') % (repo.changelog.rev(n),
+ short(n)))
+
+ return err
+
+ finally:
+ release(lock, wlock)
+
+cmdtable = {
+ 'fetch':
+ (fetch,
+ [('r', 'rev', [],
+ _('a specific revision you would like to pull'), _('REV')),
+ ('e', 'edit', None, _('edit commit message')),
+ ('', 'force-editor', None, _('edit commit message (DEPRECATED)')),
+ ('', 'switch-parent', None, _('switch parents when merging')),
+ ] + commands.commitopts + commands.commitopts2 + commands.remoteopts,
+ _('hg fetch [SOURCE]')),
+}
diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/fetch.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/fetch.pyo
new file mode 100644
index 0000000..c8529ef
--- /dev/null
+++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/fetch.pyo
Binary files differ
diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/gpg.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/gpg.py
new file mode 100644
index 0000000..b13ec1e
--- /dev/null
+++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/gpg.py
@@ -0,0 +1,288 @@
+# Copyright 2005, 2006 Benoit Boissinot <benoit.boissinot@ens-lyon.org>
+#
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2 or any later version.
+
+'''commands to sign and verify changesets'''
+
+import os, tempfile, binascii
+from mercurial import util, commands, match
+from mercurial import node as hgnode
+from mercurial.i18n import _
+
+class gpg(object):
+ def __init__(self, path, key=None):
+ self.path = path
+ self.key = (key and " --local-user \"%s\"" % key) or ""
+
+ def sign(self, data):
+ gpgcmd = "%s --sign --detach-sign%s" % (self.path, self.key)
+ return util.filter(data, gpgcmd)
+
+ def verify(self, data, sig):
+ """ returns of the good and bad signatures"""
+ sigfile = datafile = None
+ try:
+ # create temporary files
+ fd, sigfile = tempfile.mkstemp(prefix="hg-gpg-", suffix=".sig")
+ fp = os.fdopen(fd, 'wb')
+ fp.write(sig)
+ fp.close()
+ fd, datafile = tempfile.mkstemp(prefix="hg-gpg-", suffix=".txt")
+ fp = os.fdopen(fd, 'wb')
+ fp.write(data)
+ fp.close()
+ gpgcmd = ("%s --logger-fd 1 --status-fd 1 --verify "
+ "\"%s\" \"%s\"" % (self.path, sigfile, datafile))
+ ret = util.filter("", gpgcmd)
+ finally:
+ for f in (sigfile, datafile):
+ try:
+ if f:
+ os.unlink(f)
+ except:
+ pass
+ keys = []
+ key, fingerprint = None, None
+ err = ""
+ for l in ret.splitlines():
+ # see DETAILS in the gnupg documentation
+ # filter the logger output
+ if not l.startswith("[GNUPG:]"):
+ continue
+ l = l[9:]
+ if l.startswith("ERRSIG"):
+ err = _("error while verifying signature")
+ break
+ elif l.startswith("VALIDSIG"):
+ # fingerprint of the primary key
+ fingerprint = l.split()[10]
+ elif (l.startswith("GOODSIG") or
+ l.startswith("EXPSIG") or
+ l.startswith("EXPKEYSIG") or
+ l.startswith("BADSIG")):
+ if key is not None:
+ keys.append(key + [fingerprint])
+ key = l.split(" ", 2)
+ fingerprint = None
+ if err:
+ return err, []
+ if key is not None:
+ keys.append(key + [fingerprint])
+ return err, keys
+
+def newgpg(ui, **opts):
+ """create a new gpg instance"""
+ gpgpath = ui.config("gpg", "cmd", "gpg")
+ gpgkey = opts.get('key')
+ if not gpgkey:
+ gpgkey = ui.config("gpg", "key", None)
+ return gpg(gpgpath, gpgkey)
+
+def sigwalk(repo):
+ """
+ walk over every sigs, yields a couple
+ ((node, version, sig), (filename, linenumber))
+ """
+ def parsefile(fileiter, context):
+ ln = 1
+ for l in fileiter:
+ if not l:
+ continue
+ yield (l.split(" ", 2), (context, ln))
+ ln += 1
+
+ # read the heads
+ fl = repo.file(".hgsigs")
+ for r in reversed(fl.heads()):
+ fn = ".hgsigs|%s" % hgnode.short(r)
+ for item in parsefile(fl.read(r).splitlines(), fn):
+ yield item
+ try:
+ # read local signatures
+ fn = "localsigs"
+ for item in parsefile(repo.opener(fn), fn):
+ yield item
+ except IOError:
+ pass
+
+def getkeys(ui, repo, mygpg, sigdata, context):
+ """get the keys who signed a data"""
+ fn, ln = context
+ node, version, sig = sigdata
+ prefix = "%s:%d" % (fn, ln)
+ node = hgnode.bin(node)
+
+ data = node2txt(repo, node, version)
+ sig = binascii.a2b_base64(sig)
+ err, keys = mygpg.verify(data, sig)
+ if err:
+ ui.warn("%s:%d %s\n" % (fn, ln , err))
+ return None
+
+ validkeys = []
+ # warn for expired key and/or sigs
+ for key in keys:
+ if key[0] == "BADSIG":
+ ui.write(_("%s Bad signature from \"%s\"\n") % (prefix, key[2]))
+ continue
+ if key[0] == "EXPSIG":
+ ui.write(_("%s Note: Signature has expired"
+ " (signed by: \"%s\")\n") % (prefix, key[2]))
+ elif key[0] == "EXPKEYSIG":
+ ui.write(_("%s Note: This key has expired"
+ " (signed by: \"%s\")\n") % (prefix, key[2]))
+ validkeys.append((key[1], key[2], key[3]))
+ return validkeys
+
+def sigs(ui, repo):
+ """list signed changesets"""
+ mygpg = newgpg(ui)
+ revs = {}
+
+ for data, context in sigwalk(repo):
+ node, version, sig = data
+ fn, ln = context
+ try:
+ n = repo.lookup(node)
+ except KeyError:
+ ui.warn(_("%s:%d node does not exist\n") % (fn, ln))
+ continue
+ r = repo.changelog.rev(n)
+ keys = getkeys(ui, repo, mygpg, data, context)
+ if not keys:
+ continue
+ revs.setdefault(r, [])
+ revs[r].extend(keys)
+ for rev in sorted(revs, reverse=True):
+ for k in revs[rev]:
+ r = "%5d:%s" % (rev, hgnode.hex(repo.changelog.node(rev)))
+ ui.write("%-30s %s\n" % (keystr(ui, k), r))
+
+def check(ui, repo, rev):
+ """verify all the signatures there may be for a particular revision"""
+ mygpg = newgpg(ui)
+ rev = repo.lookup(rev)
+ hexrev = hgnode.hex(rev)
+ keys = []
+
+ for data, context in sigwalk(repo):
+ node, version, sig = data
+ if node == hexrev:
+ k = getkeys(ui, repo, mygpg, data, context)
+ if k:
+ keys.extend(k)
+
+ if not keys:
+ ui.write(_("No valid signature for %s\n") % hgnode.short(rev))
+ return
+
+ # print summary
+ ui.write("%s is signed by:\n" % hgnode.short(rev))
+ for key in keys:
+ ui.write(" %s\n" % keystr(ui, key))
+
+def keystr(ui, key):
+ """associate a string to a key (username, comment)"""
+ keyid, user, fingerprint = key
+ comment = ui.config("gpg", fingerprint, None)
+ if comment:
+ return "%s (%s)" % (user, comment)
+ else:
+ return user
+
+def sign(ui, repo, *revs, **opts):
+ """add a signature for the current or given revision
+
+ If no revision is given, the parent of the working directory is used,
+ or tip if no revision is checked out.
+
+ See :hg:`help dates` for a list of formats valid for -d/--date.
+ """
+
+ mygpg = newgpg(ui, **opts)
+ sigver = "0"
+ sigmessage = ""
+
+ date = opts.get('date')
+ if date:
+ opts['date'] = util.parsedate(date)
+
+ if revs:
+ nodes = [repo.lookup(n) for n in revs]
+ else:
+ nodes = [node for node in repo.dirstate.parents()
+ if node != hgnode.nullid]
+ if len(nodes) > 1:
+ raise util.Abort(_('uncommitted merge - please provide a '
+ 'specific revision'))
+ if not nodes:
+ nodes = [repo.changelog.tip()]
+
+ for n in nodes:
+ hexnode = hgnode.hex(n)
+ ui.write(_("Signing %d:%s\n") % (repo.changelog.rev(n),
+ hgnode.short(n)))
+ # build data
+ data = node2txt(repo, n, sigver)
+ sig = mygpg.sign(data)
+ if not sig:
+ raise util.Abort(_("error while signing"))
+ sig = binascii.b2a_base64(sig)
+ sig = sig.replace("\n", "")
+ sigmessage += "%s %s %s\n" % (hexnode, sigver, sig)
+
+ # write it
+ if opts['local']:
+ repo.opener("localsigs", "ab").write(sigmessage)
+ return
+
+ msigs = match.exact(repo.root, '', ['.hgsigs'])
+ s = repo.status(match=msigs, unknown=True, ignored=True)[:6]
+ if util.any(s) and not opts["force"]:
+ raise util.Abort(_("working copy of .hgsigs is changed "
+ "(please commit .hgsigs manually "
+ "or use --force)"))
+
+ repo.wfile(".hgsigs", "ab").write(sigmessage)
+
+ if '.hgsigs' not in repo.dirstate:
+ repo[None].add([".hgsigs"])
+
+ if opts["no_commit"]:
+ return
+
+ message = opts['message']
+ if not message:
+ # we don't translate commit messages
+ message = "\n".join(["Added signature for changeset %s"
+ % hgnode.short(n)
+ for n in nodes])
+ try:
+ repo.commit(message, opts['user'], opts['date'], match=msigs)
+ except ValueError, inst:
+ raise util.Abort(str(inst))
+
+def node2txt(repo, node, ver):
+ """map a manifest into some text"""
+ if ver == "0":
+ return "%s\n" % hgnode.hex(node)
+ else:
+ raise util.Abort(_("unknown signature version"))
+
+cmdtable = {
+ "sign":
+ (sign,
+ [('l', 'local', None, _('make the signature local')),
+ ('f', 'force', None, _('sign even if the sigfile is modified')),
+ ('', 'no-commit', None, _('do not commit the sigfile after signing')),
+ ('k', 'key', '',
+ _('the key id to sign with'), _('ID')),
+ ('m', 'message', '',
+ _('commit message'), _('TEXT')),
+ ] + commands.commitopts2,
+ _('hg sign [OPTION]... [REVISION]...')),
+ "sigcheck": (check, [], _('hg sigcheck REVISION')),
+ "sigs": (sigs, [], _('hg sigs')),
+}
+
diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/gpg.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/gpg.pyo
new file mode 100644
index 0000000..3d5d415
--- /dev/null
+++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/gpg.pyo
Binary files differ
diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/graphlog.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/graphlog.py
new file mode 100644
index 0000000..a8eb805
--- /dev/null
+++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/graphlog.py
@@ -0,0 +1,337 @@
+# ASCII graph log extension for Mercurial
+#
+# Copyright 2007 Joel Rosdahl <joel@rosdahl.net>
+#
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2 or any later version.
+
+'''command to view revision graphs from a shell
+
+This extension adds a --graph option to the incoming, outgoing and log
+commands. When this options is given, an ASCII representation of the
+revision graph is also shown.
+'''
+
+import os
+from mercurial.cmdutil import revrange, show_changeset
+from mercurial.commands import templateopts
+from mercurial.i18n import _
+from mercurial.node import nullrev
+from mercurial import cmdutil, commands, extensions
+from mercurial import hg, util, graphmod
+
+ASCIIDATA = 'ASC'
+
+def asciiedges(seen, rev, parents):
+ """adds edge info to changelog DAG walk suitable for ascii()"""
+ if rev not in seen:
+ seen.append(rev)
+ nodeidx = seen.index(rev)
+
+ knownparents = []
+ newparents = []
+ for parent in parents:
+ if parent in seen:
+ knownparents.append(parent)
+ else:
+ newparents.append(parent)
+
+ ncols = len(seen)
+ seen[nodeidx:nodeidx + 1] = newparents
+ edges = [(nodeidx, seen.index(p)) for p in knownparents]
+
+ if len(newparents) > 0:
+ edges.append((nodeidx, nodeidx))
+ if len(newparents) > 1:
+ edges.append((nodeidx, nodeidx + 1))
+
+ nmorecols = len(seen) - ncols
+ return nodeidx, edges, ncols, nmorecols
+
+def fix_long_right_edges(edges):
+ for (i, (start, end)) in enumerate(edges):
+ if end > start:
+ edges[i] = (start, end + 1)
+
+def get_nodeline_edges_tail(
+ node_index, p_node_index, n_columns, n_columns_diff, p_diff, fix_tail):
+ if fix_tail and n_columns_diff == p_diff and n_columns_diff != 0:
+ # Still going in the same non-vertical direction.
+ if n_columns_diff == -1:
+ start = max(node_index + 1, p_node_index)
+ tail = ["|", " "] * (start - node_index - 1)
+ tail.extend(["/", " "] * (n_columns - start))
+ return tail
+ else:
+ return ["\\", " "] * (n_columns - node_index - 1)
+ else:
+ return ["|", " "] * (n_columns - node_index - 1)
+
+def draw_edges(edges, nodeline, interline):
+ for (start, end) in edges:
+ if start == end + 1:
+ interline[2 * end + 1] = "/"
+ elif start == end - 1:
+ interline[2 * start + 1] = "\\"
+ elif start == end:
+ interline[2 * start] = "|"
+ else:
+ nodeline[2 * end] = "+"
+ if start > end:
+ (start, end) = (end, start)
+ for i in range(2 * start + 1, 2 * end):
+ if nodeline[i] != "+":
+ nodeline[i] = "-"
+
+def get_padding_line(ni, n_columns, edges):
+ line = []
+ line.extend(["|", " "] * ni)
+ if (ni, ni - 1) in edges or (ni, ni) in edges:
+ # (ni, ni - 1) (ni, ni)
+ # | | | | | | | |
+ # +---o | | o---+
+ # | | c | | c | |
+ # | |/ / | |/ /
+ # | | | | | |
+ c = "|"
+ else:
+ c = " "
+ line.extend([c, " "])
+ line.extend(["|", " "] * (n_columns - ni - 1))
+ return line
+
+def asciistate():
+ """returns the initial value for the "state" argument to ascii()"""
+ return [0, 0]
+
+def ascii(ui, state, type, char, text, coldata):
+ """prints an ASCII graph of the DAG
+
+ takes the following arguments (one call per node in the graph):
+
+ - ui to write to
+ - Somewhere to keep the needed state in (init to asciistate())
+ - Column of the current node in the set of ongoing edges.
+ - Type indicator of node data == ASCIIDATA.
+ - Payload: (char, lines):
+ - Character to use as node's symbol.
+ - List of lines to display as the node's text.
+ - Edges; a list of (col, next_col) indicating the edges between
+ the current node and its parents.
+ - Number of columns (ongoing edges) in the current revision.
+ - The difference between the number of columns (ongoing edges)
+ in the next revision and the number of columns (ongoing edges)
+ in the current revision. That is: -1 means one column removed;
+ 0 means no columns added or removed; 1 means one column added.
+ """
+
+ idx, edges, ncols, coldiff = coldata
+ assert -2 < coldiff < 2
+ if coldiff == -1:
+ # Transform
+ #
+ # | | | | | |
+ # o | | into o---+
+ # |X / |/ /
+ # | | | |
+ fix_long_right_edges(edges)
+
+ # add_padding_line says whether to rewrite
+ #
+ # | | | | | | | |
+ # | o---+ into | o---+
+ # | / / | | | # <--- padding line
+ # o | | | / /
+ # o | |
+ add_padding_line = (len(text) > 2 and coldiff == -1 and
+ [x for (x, y) in edges if x + 1 < y])
+
+ # fix_nodeline_tail says whether to rewrite
+ #
+ # | | o | | | | o | |
+ # | | |/ / | | |/ /
+ # | o | | into | o / / # <--- fixed nodeline tail
+ # | |/ / | |/ /
+ # o | | o | |
+ fix_nodeline_tail = len(text) <= 2 and not add_padding_line
+
+ # nodeline is the line containing the node character (typically o)
+ nodeline = ["|", " "] * idx
+ nodeline.extend([char, " "])
+
+ nodeline.extend(
+ get_nodeline_edges_tail(idx, state[1], ncols, coldiff,
+ state[0], fix_nodeline_tail))
+
+ # shift_interline is the line containing the non-vertical
+ # edges between this entry and the next
+ shift_interline = ["|", " "] * idx
+ if coldiff == -1:
+ n_spaces = 1
+ edge_ch = "/"
+ elif coldiff == 0:
+ n_spaces = 2
+ edge_ch = "|"
+ else:
+ n_spaces = 3
+ edge_ch = "\\"
+ shift_interline.extend(n_spaces * [" "])
+ shift_interline.extend([edge_ch, " "] * (ncols - idx - 1))
+
+ # draw edges from the current node to its parents
+ draw_edges(edges, nodeline, shift_interline)
+
+ # lines is the list of all graph lines to print
+ lines = [nodeline]
+ if add_padding_line:
+ lines.append(get_padding_line(idx, ncols, edges))
+ lines.append(shift_interline)
+
+ # make sure that there are as many graph lines as there are
+ # log strings
+ while len(text) < len(lines):
+ text.append("")
+ if len(lines) < len(text):
+ extra_interline = ["|", " "] * (ncols + coldiff)
+ while len(lines) < len(text):
+ lines.append(extra_interline)
+
+ # print lines
+ indentation_level = max(ncols, ncols + coldiff)
+ for (line, logstr) in zip(lines, text):
+ ln = "%-*s %s" % (2 * indentation_level, "".join(line), logstr)
+ ui.write(ln.rstrip() + '\n')
+
+ # ... and start over
+ state[0] = coldiff
+ state[1] = idx
+
+def get_revs(repo, rev_opt):
+ if rev_opt:
+ revs = revrange(repo, rev_opt)
+ if len(revs) == 0:
+ return (nullrev, nullrev)
+ return (max(revs), min(revs))
+ else:
+ return (len(repo) - 1, 0)
+
+def check_unsupported_flags(opts):
+ for op in ["follow", "follow_first", "date", "copies", "keyword", "remove",
+ "only_merges", "user", "branch", "only_branch", "prune",
+ "newest_first", "no_merges", "include", "exclude"]:
+ if op in opts and opts[op]:
+ raise util.Abort(_("--graph option is incompatible with --%s")
+ % op.replace("_", "-"))
+
+def generate(ui, dag, displayer, showparents, edgefn):
+ seen, state = [], asciistate()
+ for rev, type, ctx, parents in dag:
+ char = ctx.node() in showparents and '@' or 'o'
+ displayer.show(ctx)
+ lines = displayer.hunk.pop(rev).split('\n')[:-1]
+ displayer.flush(rev)
+ ascii(ui, state, type, char, lines, edgefn(seen, rev, parents))
+ displayer.close()
+
+def graphlog(ui, repo, path=None, **opts):
+ """show revision history alongside an ASCII revision graph
+
+ Print a revision history alongside a revision graph drawn with
+ ASCII characters.
+
+ Nodes printed as an @ character are parents of the working
+ directory.
+ """
+
+ check_unsupported_flags(opts)
+ limit = cmdutil.loglimit(opts)
+ start, stop = get_revs(repo, opts["rev"])
+ if start == nullrev:
+ return
+
+ if path:
+ path = util.canonpath(repo.root, os.getcwd(), path)
+ if path: # could be reset in canonpath
+ revdag = graphmod.filerevs(repo, path, start, stop, limit)
+ else:
+ if limit is not None:
+ stop = max(stop, start - limit + 1)
+ revdag = graphmod.revisions(repo, start, stop)
+
+ displayer = show_changeset(ui, repo, opts, buffered=True)
+ showparents = [ctx.node() for ctx in repo[None].parents()]
+ generate(ui, revdag, displayer, showparents, asciiedges)
+
+def graphrevs(repo, nodes, opts):
+ limit = cmdutil.loglimit(opts)
+ nodes.reverse()
+ if limit is not None:
+ nodes = nodes[:limit]
+ return graphmod.nodes(repo, nodes)
+
+def goutgoing(ui, repo, dest=None, **opts):
+ """show the outgoing changesets alongside an ASCII revision graph
+
+ Print the outgoing changesets alongside a revision graph drawn with
+ ASCII characters.
+
+ Nodes printed as an @ character are parents of the working
+ directory.
+ """
+
+ check_unsupported_flags(opts)
+ o = hg._outgoing(ui, repo, dest, opts)
+ if o is None:
+ return
+
+ revdag = graphrevs(repo, o, opts)
+ displayer = show_changeset(ui, repo, opts, buffered=True)
+ showparents = [ctx.node() for ctx in repo[None].parents()]
+ generate(ui, revdag, displayer, showparents, asciiedges)
+
+def gincoming(ui, repo, source="default", **opts):
+ """show the incoming changesets alongside an ASCII revision graph
+
+ Print the incoming changesets alongside a revision graph drawn with
+ ASCII characters.
+
+ Nodes printed as an @ character are parents of the working
+ directory.
+ """
+ def subreporecurse():
+ return 1
+
+ check_unsupported_flags(opts)
+ def display(other, chlist, displayer):
+ revdag = graphrevs(other, chlist, opts)
+ showparents = [ctx.node() for ctx in repo[None].parents()]
+ generate(ui, revdag, displayer, showparents, asciiedges)
+
+ hg._incoming(display, subreporecurse, ui, repo, source, opts, buffered=True)
+
+def uisetup(ui):
+ '''Initialize the extension.'''
+ _wrapcmd(ui, 'log', commands.table, graphlog)
+ _wrapcmd(ui, 'incoming', commands.table, gincoming)
+ _wrapcmd(ui, 'outgoing', commands.table, goutgoing)
+
+def _wrapcmd(ui, cmd, table, wrapfn):
+ '''wrap the command'''
+ def graph(orig, *args, **kwargs):
+ if kwargs['graph']:
+ return wrapfn(*args, **kwargs)
+ return orig(*args, **kwargs)
+ entry = extensions.wrapcommand(table, cmd, graph)
+ entry[1].append(('G', 'graph', None, _("show the revision DAG")))
+
+cmdtable = {
+ "glog":
+ (graphlog,
+ [('l', 'limit', '',
+ _('limit number of changes displayed'), _('NUM')),
+ ('p', 'patch', False, _('show patch')),
+ ('r', 'rev', [],
+ _('show the specified revision or range'), _('REV')),
+ ] + templateopts,
+ _('hg glog [OPTION]... [FILE]')),
+}
diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/graphlog.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/graphlog.pyo
new file mode 100644
index 0000000..2edc5f3
--- /dev/null
+++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/graphlog.pyo
Binary files differ
diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/hgcia.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/hgcia.py
new file mode 100644
index 0000000..4e72680
--- /dev/null
+++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/hgcia.py
@@ -0,0 +1,251 @@
+# Copyright (C) 2007-8 Brendan Cully <brendan@kublai.com>
+#
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2 or any later version.
+
+"""hooks for integrating with the CIA.vc notification service
+
+This is meant to be run as a changegroup or incoming hook. To
+configure it, set the following options in your hgrc::
+
+ [cia]
+ # your registered CIA user name
+ user = foo
+ # the name of the project in CIA
+ project = foo
+ # the module (subproject) (optional)
+ #module = foo
+ # Append a diffstat to the log message (optional)
+ #diffstat = False
+ # Template to use for log messages (optional)
+ #template = {desc}\\n{baseurl}/rev/{node}-- {diffstat}
+ # Style to use (optional)
+ #style = foo
+ # The URL of the CIA notification service (optional)
+ # You can use mailto: URLs to send by email, eg
+ # mailto:cia@cia.vc
+ # Make sure to set email.from if you do this.
+ #url = http://cia.vc/
+ # print message instead of sending it (optional)
+ #test = False
+
+ [hooks]
+ # one of these:
+ changegroup.cia = python:hgcia.hook
+ #incoming.cia = python:hgcia.hook
+
+ [web]
+ # If you want hyperlinks (optional)
+ baseurl = http://server/path/to/repo
+"""
+
+from mercurial.i18n import _
+from mercurial.node import bin, short
+from mercurial import cmdutil, patch, templater, util, mail
+import email.Parser
+
+import xmlrpclib
+from xml.sax import saxutils
+
+socket_timeout = 30 # seconds
+try:
+ # set a timeout for the socket so you don't have to wait so looooong
+ # when cia.vc is having problems. requires python >= 2.3:
+ import socket
+ socket.setdefaulttimeout(socket_timeout)
+except:
+ pass
+
+HGCIA_VERSION = '0.1'
+HGCIA_URL = 'http://hg.kublai.com/mercurial/hgcia'
+
+
+class ciamsg(object):
+ """ A CIA message """
+ def __init__(self, cia, ctx):
+ self.cia = cia
+ self.ctx = ctx
+ self.url = self.cia.url
+
+ def fileelem(self, path, uri, action):
+ if uri:
+ uri = ' uri=%s' % saxutils.quoteattr(uri)
+ return '<file%s action=%s>%s</file>' % (
+ uri, saxutils.quoteattr(action), saxutils.escape(path))
+
+ def fileelems(self):
+ n = self.ctx.node()
+ f = self.cia.repo.status(self.ctx.parents()[0].node(), n)
+ url = self.url or ''
+ elems = []
+ for path in f[0]:
+ uri = '%s/diff/%s/%s' % (url, short(n), path)
+ elems.append(self.fileelem(path, url and uri, 'modify'))
+ for path in f[1]:
+ # TODO: copy/rename ?
+ uri = '%s/file/%s/%s' % (url, short(n), path)
+ elems.append(self.fileelem(path, url and uri, 'add'))
+ for path in f[2]:
+ elems.append(self.fileelem(path, '', 'remove'))
+
+ return '\n'.join(elems)
+
+ def sourceelem(self, project, module=None, branch=None):
+ msg = ['<source>', '<project>%s</project>' % saxutils.escape(project)]
+ if module:
+ msg.append('<module>%s</module>' % saxutils.escape(module))
+ if branch:
+ msg.append('<branch>%s</branch>' % saxutils.escape(branch))
+ msg.append('</source>')
+
+ return '\n'.join(msg)
+
+ def diffstat(self):
+ class patchbuf(object):
+ def __init__(self):
+ self.lines = []
+ # diffstat is stupid
+ self.name = 'cia'
+ def write(self, data):
+ self.lines.append(data)
+ def close(self):
+ pass
+
+ n = self.ctx.node()
+ pbuf = patchbuf()
+ cmdutil.export(self.cia.repo, [n], fp=pbuf)
+ return patch.diffstat(pbuf.lines) or ''
+
+ def logmsg(self):
+ diffstat = self.cia.diffstat and self.diffstat() or ''
+ self.cia.ui.pushbuffer()
+ self.cia.templater.show(self.ctx, changes=self.ctx.changeset(),
+ url=self.cia.url, diffstat=diffstat)
+ return self.cia.ui.popbuffer()
+
+ def xml(self):
+ n = short(self.ctx.node())
+ src = self.sourceelem(self.cia.project, module=self.cia.module,
+ branch=self.ctx.branch())
+ # unix timestamp
+ dt = self.ctx.date()
+ timestamp = dt[0]
+
+ author = saxutils.escape(self.ctx.user())
+ rev = '%d:%s' % (self.ctx.rev(), n)
+ log = saxutils.escape(self.logmsg())
+
+ url = self.url and '<url>%s/rev/%s</url>' % (saxutils.escape(self.url),
+ n) or ''
+
+ msg = """
+<message>
+ <generator>
+ <name>Mercurial (hgcia)</name>
+ <version>%s</version>
+ <url>%s</url>
+ <user>%s</user>
+ </generator>
+ %s
+ <body>
+ <commit>
+ <author>%s</author>
+ <version>%s</version>
+ <log>%s</log>
+ %s
+ <files>%s</files>
+ </commit>
+ </body>
+ <timestamp>%d</timestamp>
+</message>
+""" % \
+ (HGCIA_VERSION, saxutils.escape(HGCIA_URL),
+ saxutils.escape(self.cia.user), src, author, rev, log, url,
+ self.fileelems(), timestamp)
+
+ return msg
+
+
+class hgcia(object):
+ """ CIA notification class """
+
+ deftemplate = '{desc}'
+ dstemplate = '{desc}\n-- \n{diffstat}'
+
+ def __init__(self, ui, repo):
+ self.ui = ui
+ self.repo = repo
+
+ self.ciaurl = self.ui.config('cia', 'url', 'http://cia.vc')
+ self.user = self.ui.config('cia', 'user')
+ self.project = self.ui.config('cia', 'project')
+ self.module = self.ui.config('cia', 'module')
+ self.diffstat = self.ui.configbool('cia', 'diffstat')
+ self.emailfrom = self.ui.config('email', 'from')
+ self.dryrun = self.ui.configbool('cia', 'test')
+ self.url = self.ui.config('web', 'baseurl')
+
+ style = self.ui.config('cia', 'style')
+ template = self.ui.config('cia', 'template')
+ if not template:
+ template = self.diffstat and self.dstemplate or self.deftemplate
+ template = templater.parsestring(template, quoted=False)
+ t = cmdutil.changeset_templater(self.ui, self.repo, False, None,
+ style, False)
+ t.use_template(template)
+ self.templater = t
+
+ def sendrpc(self, msg):
+ srv = xmlrpclib.Server(self.ciaurl)
+ res = srv.hub.deliver(msg)
+ if res is not True:
+ raise util.Abort(_('%s returned an error: %s') %
+ (self.ciaurl, res))
+
+ def sendemail(self, address, data):
+ p = email.Parser.Parser()
+ msg = p.parsestr(data)
+ msg['Date'] = util.datestr(format="%a, %d %b %Y %H:%M:%S %1%2")
+ msg['To'] = address
+ msg['From'] = self.emailfrom
+ msg['Subject'] = 'DeliverXML'
+ msg['Content-type'] = 'text/xml'
+ msgtext = msg.as_string()
+
+ self.ui.status(_('hgcia: sending update to %s\n') % address)
+ mail.sendmail(self.ui, util.email(self.emailfrom),
+ [address], msgtext)
+
+
+def hook(ui, repo, hooktype, node=None, url=None, **kwargs):
+ """ send CIA notification """
+ def sendmsg(cia, ctx):
+ msg = ciamsg(cia, ctx).xml()
+ if cia.dryrun:
+ ui.write(msg)
+ elif cia.ciaurl.startswith('mailto:'):
+ if not cia.emailfrom:
+ raise util.Abort(_('email.from must be defined when '
+ 'sending by email'))
+ cia.sendemail(cia.ciaurl[7:], msg)
+ else:
+ cia.sendrpc(msg)
+
+ n = bin(node)
+ cia = hgcia(ui, repo)
+ if not cia.user:
+ ui.debug('cia: no user specified')
+ return
+ if not cia.project:
+ ui.debug('cia: no project specified')
+ return
+ if hooktype == 'changegroup':
+ start = repo.changelog.rev(n)
+ end = len(repo.changelog)
+ for rev in xrange(start, end):
+ n = repo.changelog.node(rev)
+ ctx = repo.changectx(n)
+ sendmsg(cia, ctx)
+ else:
+ ctx = repo.changectx(n)
+ sendmsg(cia, ctx)
diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/hgcia.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/hgcia.pyo
new file mode 100644
index 0000000..2c5a2ee
--- /dev/null
+++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/hgcia.pyo
Binary files differ
diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/hgk.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/hgk.py
new file mode 100644
index 0000000..e8aae47
--- /dev/null
+++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/hgk.py
@@ -0,0 +1,348 @@
+# Minimal support for git commands on an hg repository
+#
+# Copyright 2005, 2006 Chris Mason <mason@suse.com>
+#
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2 or any later version.
+
+'''browse the repository in a graphical way
+
+The hgk extension allows browsing the history of a repository in a
+graphical way. It requires Tcl/Tk version 8.4 or later. (Tcl/Tk is not
+distributed with Mercurial.)
+
+hgk consists of two parts: a Tcl script that does the displaying and
+querying of information, and an extension to Mercurial named hgk.py,
+which provides hooks for hgk to get information. hgk can be found in
+the contrib directory, and the extension is shipped in the hgext
+repository, and needs to be enabled.
+
+The :hg:`view` command will launch the hgk Tcl script. For this command
+to work, hgk must be in your search path. Alternately, you can specify
+the path to hgk in your configuration file::
+
+ [hgk]
+ path=/location/of/hgk
+
+hgk can make use of the extdiff extension to visualize revisions.
+Assuming you had already configured extdiff vdiff command, just add::
+
+ [hgk]
+ vdiff=vdiff
+
+Revisions context menu will now display additional entries to fire
+vdiff on hovered and selected revisions.
+'''
+
+import os
+from mercurial import commands, util, patch, revlog, cmdutil
+from mercurial.node import nullid, nullrev, short
+from mercurial.i18n import _
+
+def difftree(ui, repo, node1=None, node2=None, *files, **opts):
+ """diff trees from two commits"""
+ def __difftree(repo, node1, node2, files=[]):
+ assert node2 is not None
+ mmap = repo[node1].manifest()
+ mmap2 = repo[node2].manifest()
+ m = cmdutil.match(repo, files)
+ modified, added, removed = repo.status(node1, node2, m)[:3]
+ empty = short(nullid)
+
+ for f in modified:
+ # TODO get file permissions
+ ui.write(":100664 100664 %s %s M\t%s\t%s\n" %
+ (short(mmap[f]), short(mmap2[f]), f, f))
+ for f in added:
+ ui.write(":000000 100664 %s %s N\t%s\t%s\n" %
+ (empty, short(mmap2[f]), f, f))
+ for f in removed:
+ ui.write(":100664 000000 %s %s D\t%s\t%s\n" %
+ (short(mmap[f]), empty, f, f))
+ ##
+
+ while True:
+ if opts['stdin']:
+ try:
+ line = raw_input().split(' ')
+ node1 = line[0]
+ if len(line) > 1:
+ node2 = line[1]
+ else:
+ node2 = None
+ except EOFError:
+ break
+ node1 = repo.lookup(node1)
+ if node2:
+ node2 = repo.lookup(node2)
+ else:
+ node2 = node1
+ node1 = repo.changelog.parents(node1)[0]
+ if opts['patch']:
+ if opts['pretty']:
+ catcommit(ui, repo, node2, "")
+ m = cmdutil.match(repo, files)
+ chunks = patch.diff(repo, node1, node2, match=m,
+ opts=patch.diffopts(ui, {'git': True}))
+ for chunk in chunks:
+ ui.write(chunk)
+ else:
+ __difftree(repo, node1, node2, files=files)
+ if not opts['stdin']:
+ break
+
+def catcommit(ui, repo, n, prefix, ctx=None):
+ nlprefix = '\n' + prefix
+ if ctx is None:
+ ctx = repo[n]
+ ui.write("tree %s\n" % short(ctx.changeset()[0])) # use ctx.node() instead ??
+ for p in ctx.parents():
+ ui.write("parent %s\n" % p)
+
+ date = ctx.date()
+ description = ctx.description().replace("\0", "")
+ lines = description.splitlines()
+ if lines and lines[-1].startswith('committer:'):
+ committer = lines[-1].split(': ')[1].rstrip()
+ else:
+ committer = ctx.user()
+
+ ui.write("author %s %s %s\n" % (ctx.user(), int(date[0]), date[1]))
+ ui.write("committer %s %s %s\n" % (committer, int(date[0]), date[1]))
+ ui.write("revision %d\n" % ctx.rev())
+ ui.write("branch %s\n\n" % ctx.branch())
+
+ if prefix != "":
+ ui.write("%s%s\n" % (prefix, description.replace('\n', nlprefix).strip()))
+ else:
+ ui.write(description + "\n")
+ if prefix:
+ ui.write('\0')
+
+def base(ui, repo, node1, node2):
+ """output common ancestor information"""
+ node1 = repo.lookup(node1)
+ node2 = repo.lookup(node2)
+ n = repo.changelog.ancestor(node1, node2)
+ ui.write(short(n) + "\n")
+
+def catfile(ui, repo, type=None, r=None, **opts):
+ """cat a specific revision"""
+ # in stdin mode, every line except the commit is prefixed with two
+ # spaces. This way the our caller can find the commit without magic
+ # strings
+ #
+ prefix = ""
+ if opts['stdin']:
+ try:
+ (type, r) = raw_input().split(' ')
+ prefix = " "
+ except EOFError:
+ return
+
+ else:
+ if not type or not r:
+ ui.warn(_("cat-file: type or revision not supplied\n"))
+ commands.help_(ui, 'cat-file')
+
+ while r:
+ if type != "commit":
+ ui.warn(_("aborting hg cat-file only understands commits\n"))
+ return 1
+ n = repo.lookup(r)
+ catcommit(ui, repo, n, prefix)
+ if opts['stdin']:
+ try:
+ (type, r) = raw_input().split(' ')
+ except EOFError:
+ break
+ else:
+ break
+
+# git rev-tree is a confusing thing. You can supply a number of
+# commit sha1s on the command line, and it walks the commit history
+# telling you which commits are reachable from the supplied ones via
+# a bitmask based on arg position.
+# you can specify a commit to stop at by starting the sha1 with ^
+def revtree(ui, args, repo, full="tree", maxnr=0, parents=False):
+ def chlogwalk():
+ count = len(repo)
+ i = count
+ l = [0] * 100
+ chunk = 100
+ while True:
+ if chunk > i:
+ chunk = i
+ i = 0
+ else:
+ i -= chunk
+
+ for x in xrange(chunk):
+ if i + x >= count:
+ l[chunk - x:] = [0] * (chunk - x)
+ break
+ if full != None:
+ l[x] = repo[i + x]
+ l[x].changeset() # force reading
+ else:
+ l[x] = 1
+ for x in xrange(chunk - 1, -1, -1):
+ if l[x] != 0:
+ yield (i + x, full != None and l[x] or None)
+ if i == 0:
+ break
+
+ # calculate and return the reachability bitmask for sha
+ def is_reachable(ar, reachable, sha):
+ if len(ar) == 0:
+ return 1
+ mask = 0
+ for i in xrange(len(ar)):
+ if sha in reachable[i]:
+ mask |= 1 << i
+
+ return mask
+
+ reachable = []
+ stop_sha1 = []
+ want_sha1 = []
+ count = 0
+
+ # figure out which commits they are asking for and which ones they
+ # want us to stop on
+ for i, arg in enumerate(args):
+ if arg.startswith('^'):
+ s = repo.lookup(arg[1:])
+ stop_sha1.append(s)
+ want_sha1.append(s)
+ elif arg != 'HEAD':
+ want_sha1.append(repo.lookup(arg))
+
+ # calculate the graph for the supplied commits
+ for i, n in enumerate(want_sha1):
+ reachable.append(set())
+ visit = [n]
+ reachable[i].add(n)
+ while visit:
+ n = visit.pop(0)
+ if n in stop_sha1:
+ continue
+ for p in repo.changelog.parents(n):
+ if p not in reachable[i]:
+ reachable[i].add(p)
+ visit.append(p)
+ if p in stop_sha1:
+ continue
+
+ # walk the repository looking for commits that are in our
+ # reachability graph
+ for i, ctx in chlogwalk():
+ n = repo.changelog.node(i)
+ mask = is_reachable(want_sha1, reachable, n)
+ if mask:
+ parentstr = ""
+ if parents:
+ pp = repo.changelog.parents(n)
+ if pp[0] != nullid:
+ parentstr += " " + short(pp[0])
+ if pp[1] != nullid:
+ parentstr += " " + short(pp[1])
+ if not full:
+ ui.write("%s%s\n" % (short(n), parentstr))
+ elif full == "commit":
+ ui.write("%s%s\n" % (short(n), parentstr))
+ catcommit(ui, repo, n, ' ', ctx)
+ else:
+ (p1, p2) = repo.changelog.parents(n)
+ (h, h1, h2) = map(short, (n, p1, p2))
+ (i1, i2) = map(repo.changelog.rev, (p1, p2))
+
+ date = ctx.date()[0]
+ ui.write("%s %s:%s" % (date, h, mask))
+ mask = is_reachable(want_sha1, reachable, p1)
+ if i1 != nullrev and mask > 0:
+ ui.write("%s:%s " % (h1, mask)),
+ mask = is_reachable(want_sha1, reachable, p2)
+ if i2 != nullrev and mask > 0:
+ ui.write("%s:%s " % (h2, mask))
+ ui.write("\n")
+ if maxnr and count >= maxnr:
+ break
+ count += 1
+
+def revparse(ui, repo, *revs, **opts):
+ """parse given revisions"""
+ def revstr(rev):
+ if rev == 'HEAD':
+ rev = 'tip'
+ return revlog.hex(repo.lookup(rev))
+
+ for r in revs:
+ revrange = r.split(':', 1)
+ ui.write('%s\n' % revstr(revrange[0]))
+ if len(revrange) == 2:
+ ui.write('^%s\n' % revstr(revrange[1]))
+
+# git rev-list tries to order things by date, and has the ability to stop
+# at a given commit without walking the whole repo. TODO add the stop
+# parameter
+def revlist(ui, repo, *revs, **opts):
+ """print revisions"""
+ if opts['header']:
+ full = "commit"
+ else:
+ full = None
+ copy = [x for x in revs]
+ revtree(ui, copy, repo, full, opts['max_count'], opts['parents'])
+
+def config(ui, repo, **opts):
+ """print extension options"""
+ def writeopt(name, value):
+ ui.write('k=%s\nv=%s\n' % (name, value))
+
+ writeopt('vdiff', ui.config('hgk', 'vdiff', ''))
+
+
+def view(ui, repo, *etc, **opts):
+ "start interactive history viewer"
+ os.chdir(repo.root)
+ optstr = ' '.join(['--%s %s' % (k, v) for k, v in opts.iteritems() if v])
+ cmd = ui.config("hgk", "path", "hgk") + " %s %s" % (optstr, " ".join(etc))
+ ui.debug("running %s\n" % cmd)
+ util.system(cmd)
+
+cmdtable = {
+ "^view":
+ (view,
+ [('l', 'limit', '',
+ _('limit number of changes displayed'), _('NUM'))],
+ _('hg view [-l LIMIT] [REVRANGE]')),
+ "debug-diff-tree":
+ (difftree,
+ [('p', 'patch', None, _('generate patch')),
+ ('r', 'recursive', None, _('recursive')),
+ ('P', 'pretty', None, _('pretty')),
+ ('s', 'stdin', None, _('stdin')),
+ ('C', 'copy', None, _('detect copies')),
+ ('S', 'search', "", _('search'))],
+ _('hg git-diff-tree [OPTION]... NODE1 NODE2 [FILE]...')),
+ "debug-cat-file":
+ (catfile,
+ [('s', 'stdin', None, _('stdin'))],
+ _('hg debug-cat-file [OPTION]... TYPE FILE')),
+ "debug-config":
+ (config, [], _('hg debug-config')),
+ "debug-merge-base":
+ (base, [], _('hg debug-merge-base REV REV')),
+ "debug-rev-parse":
+ (revparse,
+ [('', 'default', '', _('ignored'))],
+ _('hg debug-rev-parse REV')),
+ "debug-rev-list":
+ (revlist,
+ [('H', 'header', None, _('header')),
+ ('t', 'topo-order', None, _('topo-order')),
+ ('p', 'parents', None, _('parents')),
+ ('n', 'max-count', 0, _('max-count'))],
+ _('hg debug-rev-list [OPTION]... REV...')),
+}
diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/hgk.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/hgk.pyo
new file mode 100644
index 0000000..97aa394
--- /dev/null
+++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/hgk.pyo
Binary files differ
diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/highlight/__init__.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/highlight/__init__.py
new file mode 100644
index 0000000..55e3c18
--- /dev/null
+++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/highlight/__init__.py
@@ -0,0 +1,61 @@
+# highlight - syntax highlighting in hgweb, based on Pygments
+#
+# Copyright 2008, 2009 Patrick Mezard <pmezard@gmail.com> and others
+#
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2 or any later version.
+#
+# The original module was split in an interface and an implementation
+# file to defer pygments loading and speedup extension setup.
+
+"""syntax highlighting for hgweb (requires Pygments)
+
+It depends on the Pygments syntax highlighting library:
+http://pygments.org/
+
+There is a single configuration option::
+
+ [web]
+ pygments_style = <style>
+
+The default is 'colorful'.
+"""
+
+import highlight
+from mercurial.hgweb import webcommands, webutil, common
+from mercurial import extensions, encoding
+
+def filerevision_highlight(orig, web, tmpl, fctx):
+ mt = ''.join(tmpl('mimetype', encoding=encoding.encoding))
+ # only pygmentize for mimetype containing 'html' so we both match
+ # 'text/html' and possibly 'application/xhtml+xml' in the future
+ # so that we don't have to touch the extension when the mimetype
+ # for a template changes; also hgweb optimizes the case that a
+ # raw file is sent using rawfile() and doesn't call us, so we
+ # can't clash with the file's content-type here in case we
+ # pygmentize a html file
+ if 'html' in mt:
+ style = web.config('web', 'pygments_style', 'colorful')
+ highlight.pygmentize('fileline', fctx, style, tmpl)
+ return orig(web, tmpl, fctx)
+
+def annotate_highlight(orig, web, req, tmpl):
+ mt = ''.join(tmpl('mimetype', encoding=encoding.encoding))
+ if 'html' in mt:
+ fctx = webutil.filectx(web.repo, req)
+ style = web.config('web', 'pygments_style', 'colorful')
+ highlight.pygmentize('annotateline', fctx, style, tmpl)
+ return orig(web, req, tmpl)
+
+def generate_css(web, req, tmpl):
+ pg_style = web.config('web', 'pygments_style', 'colorful')
+ fmter = highlight.HtmlFormatter(style = pg_style)
+ req.respond(common.HTTP_OK, 'text/css')
+ return ['/* pygments_style = %s */\n\n' % pg_style, fmter.get_style_defs('')]
+
+def extsetup():
+ # monkeypatch in the new version
+ extensions.wrapfunction(webcommands, '_filerevision', filerevision_highlight)
+ extensions.wrapfunction(webcommands, 'annotate', annotate_highlight)
+ webcommands.highlightcss = generate_css
+ webcommands.__all__.append('highlightcss')
diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/highlight/__init__.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/highlight/__init__.pyo
new file mode 100644
index 0000000..eb0ba63
--- /dev/null
+++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/highlight/__init__.pyo
Binary files differ
diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/highlight/highlight.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/highlight/highlight.py
new file mode 100644
index 0000000..a8265cf
--- /dev/null
+++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/highlight/highlight.py
@@ -0,0 +1,61 @@
+# highlight.py - highlight extension implementation file
+#
+# Copyright 2007-2009 Adam Hupp <adam@hupp.org> and others
+#
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2 or any later version.
+#
+# The original module was split in an interface and an implementation
+# file to defer pygments loading and speedup extension setup.
+
+from mercurial import demandimport
+demandimport.ignore.extend(['pkgutil', 'pkg_resources', '__main__'])
+from mercurial import util, encoding
+
+from pygments import highlight
+from pygments.util import ClassNotFound
+from pygments.lexers import guess_lexer, guess_lexer_for_filename, TextLexer
+from pygments.formatters import HtmlFormatter
+
+SYNTAX_CSS = ('\n<link rel="stylesheet" href="{url}highlightcss" '
+ 'type="text/css" />')
+
+def pygmentize(field, fctx, style, tmpl):
+
+ # append a <link ...> to the syntax highlighting css
+ old_header = tmpl.load('header')
+ if SYNTAX_CSS not in old_header:
+ new_header = old_header + SYNTAX_CSS
+ tmpl.cache['header'] = new_header
+
+ text = fctx.data()
+ if util.binary(text):
+ return
+
+ # Pygments is best used with Unicode strings:
+ # <http://pygments.org/docs/unicode/>
+ text = text.decode(encoding.encoding, 'replace')
+
+ # To get multi-line strings right, we can't format line-by-line
+ try:
+ lexer = guess_lexer_for_filename(fctx.path(), text[:1024])
+ except (ClassNotFound, ValueError):
+ try:
+ lexer = guess_lexer(text[:1024])
+ except (ClassNotFound, ValueError):
+ lexer = TextLexer()
+
+ formatter = HtmlFormatter(style=style)
+
+ colorized = highlight(text, lexer, formatter)
+ # strip wrapping div
+ colorized = colorized[:colorized.find('\n</pre>')]
+ colorized = colorized[colorized.find('<pre>')+5:]
+ coloriter = (s.encode(encoding.encoding, 'replace')
+ for s in colorized.splitlines())
+
+ tmpl.filters['colorize'] = lambda x: coloriter.next()
+
+ oldl = tmpl.cache[field]
+ newl = oldl.replace('line|escape', 'line|colorize')
+ tmpl.cache[field] = newl
diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/highlight/highlight.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/highlight/highlight.pyo
new file mode 100644
index 0000000..7d10f48
--- /dev/null
+++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/highlight/highlight.pyo
Binary files differ
diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/interhg.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/interhg.py
new file mode 100644
index 0000000..60c4255
--- /dev/null
+++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/interhg.py
@@ -0,0 +1,81 @@
+# interhg.py - interhg
+#
+# Copyright 2007 OHASHI Hideya <ohachige@gmail.com>
+#
+# Contributor(s):
+# Edward Lee <edward.lee@engineering.uiuc.edu>
+#
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2 or any later version.
+
+'''expand expressions into changelog and summaries
+
+This extension allows the use of a special syntax in summaries, which
+will be automatically expanded into links or any other arbitrary
+expression, much like InterWiki does.
+
+A few example patterns (link to bug tracking, etc.) that may be used
+in your hgrc::
+
+ [interhg]
+ issues = s!issue(\\d+)!<a href="http://bts/issue\\1">issue\\1</a>!
+ bugzilla = s!((?:bug|b=|(?=#?\\d{4,}))(?:\\s*#?)(\\d+))!<a..=\\2">\\1</a>!i
+ boldify = s!(^|\\s)#(\\d+)\\b! <b>#\\2</b>!
+'''
+
+import re
+from mercurial.hgweb import hgweb_mod
+from mercurial import templatefilters, extensions
+from mercurial.i18n import _
+
+interhg_table = []
+
+def uisetup(ui):
+ orig_escape = templatefilters.filters["escape"]
+
+ def interhg_escape(x):
+ escstr = orig_escape(x)
+ for regexp, format in interhg_table:
+ escstr = regexp.sub(format, escstr)
+ return escstr
+
+ templatefilters.filters["escape"] = interhg_escape
+
+def interhg_refresh(orig, self, *args, **kwargs):
+ interhg_table[:] = []
+ for key, pattern in self.repo.ui.configitems('interhg'):
+ # grab the delimiter from the character after the "s"
+ unesc = pattern[1]
+ delim = re.escape(unesc)
+
+ # identify portions of the pattern, taking care to avoid escaped
+ # delimiters. the replace format and flags are optional, but delimiters
+ # are required.
+ match = re.match(r'^s%s(.+)(?:(?<=\\\\)|(?<!\\))%s(.*)%s([ilmsux])*$'
+ % (delim, delim, delim), pattern)
+ if not match:
+ self.repo.ui.warn(_("interhg: invalid pattern for %s: %s\n")
+ % (key, pattern))
+ continue
+
+ # we need to unescape the delimiter for regexp and format
+ delim_re = re.compile(r'(?<!\\)\\%s' % delim)
+ regexp = delim_re.sub(unesc, match.group(1))
+ format = delim_re.sub(unesc, match.group(2))
+
+ # the pattern allows for 6 regexp flags, so set them if necessary
+ flagin = match.group(3)
+ flags = 0
+ if flagin:
+ for flag in flagin.upper():
+ flags |= re.__dict__[flag]
+
+ try:
+ regexp = re.compile(regexp, flags)
+ interhg_table.append((regexp, format))
+ except re.error:
+ self.repo.ui.warn(_("interhg: invalid regexp for %s: %s\n")
+ % (key, regexp))
+ return orig(self, *args, **kwargs)
+
+extensions.wrapfunction(hgweb_mod.hgweb, 'refresh', interhg_refresh)
diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/interhg.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/interhg.pyo
new file mode 100644
index 0000000..c5dd4d7
--- /dev/null
+++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/interhg.pyo
Binary files differ
diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/keyword.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/keyword.py
new file mode 100644
index 0000000..9060714
--- /dev/null
+++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/keyword.py
@@ -0,0 +1,649 @@
+# keyword.py - $Keyword$ expansion for Mercurial
+#
+# Copyright 2007-2010 Christian Ebert <blacktrash@gmx.net>
+#
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2 or any later version.
+#
+# $Id$
+#
+# Keyword expansion hack against the grain of a DSCM
+#
+# There are many good reasons why this is not needed in a distributed
+# SCM, still it may be useful in very small projects based on single
+# files (like LaTeX packages), that are mostly addressed to an
+# audience not running a version control system.
+#
+# For in-depth discussion refer to
+# <http://mercurial.selenic.com/wiki/KeywordPlan>.
+#
+# Keyword expansion is based on Mercurial's changeset template mappings.
+#
+# Binary files are not touched.
+#
+# Files to act upon/ignore are specified in the [keyword] section.
+# Customized keyword template mappings in the [keywordmaps] section.
+#
+# Run "hg help keyword" and "hg kwdemo" to get info on configuration.
+
+'''expand keywords in tracked files
+
+This extension expands RCS/CVS-like or self-customized $Keywords$ in
+tracked text files selected by your configuration.
+
+Keywords are only expanded in local repositories and not stored in the
+change history. The mechanism can be regarded as a convenience for the
+current user or for archive distribution.
+
+Keywords expand to the changeset data pertaining to the latest change
+relative to the working directory parent of each file.
+
+Configuration is done in the [keyword], [keywordset] and [keywordmaps]
+sections of hgrc files.
+
+Example::
+
+ [keyword]
+ # expand keywords in every python file except those matching "x*"
+ **.py =
+ x* = ignore
+
+ [keywordset]
+ # prefer svn- over cvs-like default keywordmaps
+ svn = True
+
+.. note::
+ The more specific you are in your filename patterns the less you
+ lose speed in huge repositories.
+
+For [keywordmaps] template mapping and expansion demonstration and
+control run :hg:`kwdemo`. See :hg:`help templates` for a list of
+available templates and filters.
+
+Three additional date template filters are provided::
+
+ utcdate "2006/09/18 15:13:13"
+ svnutcdate "2006-09-18 15:13:13Z"
+ svnisodate "2006-09-18 08:13:13 -700 (Mon, 18 Sep 2006)"
+
+The default template mappings (view with :hg:`kwdemo -d`) can be
+replaced with customized keywords and templates. Again, run
+:hg:`kwdemo` to control the results of your configuration changes.
+
+Before changing/disabling active keywords, run :hg:`kwshrink` to avoid
+the risk of inadvertently storing expanded keywords in the change
+history.
+
+To force expansion after enabling it, or a configuration change, run
+:hg:`kwexpand`.
+
+Expansions spanning more than one line and incremental expansions,
+like CVS' $Log$, are not supported. A keyword template map "Log =
+{desc}" expands to the first line of the changeset description.
+'''
+
+from mercurial import commands, context, cmdutil, dispatch, filelog, extensions
+from mercurial import localrepo, match, patch, templatefilters, templater, util
+from mercurial.hgweb import webcommands
+from mercurial.i18n import _
+import os, re, shutil, tempfile
+
+commands.optionalrepo += ' kwdemo'
+
+# hg commands that do not act on keywords
+nokwcommands = ('add addremove annotate bundle export grep incoming init log'
+ ' outgoing push tip verify convert email glog')
+
+# hg commands that trigger expansion only when writing to working dir,
+# not when reading filelog, and unexpand when reading from working dir
+restricted = 'merge kwexpand kwshrink record qrecord resolve transplant'
+
+# names of extensions using dorecord
+recordextensions = 'record'
+
+# date like in cvs' $Date
+utcdate = lambda x: util.datestr((x[0], 0), '%Y/%m/%d %H:%M:%S')
+# date like in svn's $Date
+svnisodate = lambda x: util.datestr(x, '%Y-%m-%d %H:%M:%S %1%2 (%a, %d %b %Y)')
+# date like in svn's $Id
+svnutcdate = lambda x: util.datestr((x[0], 0), '%Y-%m-%d %H:%M:%SZ')
+
+# make keyword tools accessible
+kwtools = {'templater': None, 'hgcmd': ''}
+
+
+def _defaultkwmaps(ui):
+ '''Returns default keywordmaps according to keywordset configuration.'''
+ templates = {
+ 'Revision': '{node|short}',
+ 'Author': '{author|user}',
+ }
+ kwsets = ({
+ 'Date': '{date|utcdate}',
+ 'RCSfile': '{file|basename},v',
+ 'RCSFile': '{file|basename},v', # kept for backwards compatibility
+ # with hg-keyword
+ 'Source': '{root}/{file},v',
+ 'Id': '{file|basename},v {node|short} {date|utcdate} {author|user}',
+ 'Header': '{root}/{file},v {node|short} {date|utcdate} {author|user}',
+ }, {
+ 'Date': '{date|svnisodate}',
+ 'Id': '{file|basename},v {node|short} {date|svnutcdate} {author|user}',
+ 'LastChangedRevision': '{node|short}',
+ 'LastChangedBy': '{author|user}',
+ 'LastChangedDate': '{date|svnisodate}',
+ })
+ templates.update(kwsets[ui.configbool('keywordset', 'svn')])
+ return templates
+
+def _shrinktext(text, subfunc):
+ '''Helper for keyword expansion removal in text.
+ Depending on subfunc also returns number of substitutions.'''
+ return subfunc(r'$\1$', text)
+
+def _preselect(wstatus, changed):
+ '''Retrieves modfied and added files from a working directory state
+ and returns the subset of each contained in given changed files
+ retrieved from a change context.'''
+ modified, added = wstatus[:2]
+ modified = [f for f in modified if f in changed]
+ added = [f for f in added if f in changed]
+ return modified, added
+
+
+class kwtemplater(object):
+ '''
+ Sets up keyword templates, corresponding keyword regex, and
+ provides keyword substitution functions.
+ '''
+
+ def __init__(self, ui, repo, inc, exc):
+ self.ui = ui
+ self.repo = repo
+ self.match = match.match(repo.root, '', [], inc, exc)
+ self.restrict = kwtools['hgcmd'] in restricted.split()
+ self.record = False
+
+ kwmaps = self.ui.configitems('keywordmaps')
+ if kwmaps: # override default templates
+ self.templates = dict((k, templater.parsestring(v, False))
+ for k, v in kwmaps)
+ else:
+ self.templates = _defaultkwmaps(self.ui)
+ escaped = '|'.join(map(re.escape, self.templates.keys()))
+ self.re_kw = re.compile(r'\$(%s)\$' % escaped)
+ self.re_kwexp = re.compile(r'\$(%s): [^$\n\r]*? \$' % escaped)
+
+ templatefilters.filters.update({'utcdate': utcdate,
+ 'svnisodate': svnisodate,
+ 'svnutcdate': svnutcdate})
+
+ def substitute(self, data, path, ctx, subfunc):
+ '''Replaces keywords in data with expanded template.'''
+ def kwsub(mobj):
+ kw = mobj.group(1)
+ ct = cmdutil.changeset_templater(self.ui, self.repo,
+ False, None, '', False)
+ ct.use_template(self.templates[kw])
+ self.ui.pushbuffer()
+ ct.show(ctx, root=self.repo.root, file=path)
+ ekw = templatefilters.firstline(self.ui.popbuffer())
+ return '$%s: %s $' % (kw, ekw)
+ return subfunc(kwsub, data)
+
+ def expand(self, path, node, data):
+ '''Returns data with keywords expanded.'''
+ if not self.restrict and self.match(path) and not util.binary(data):
+ ctx = self.repo.filectx(path, fileid=node).changectx()
+ return self.substitute(data, path, ctx, self.re_kw.sub)
+ return data
+
+ def iskwfile(self, cand, ctx):
+ '''Returns subset of candidates which are configured for keyword
+ expansion are not symbolic links.'''
+ return [f for f in cand if self.match(f) and not 'l' in ctx.flags(f)]
+
+ def overwrite(self, ctx, candidates, lookup, expand, rekw=False):
+ '''Overwrites selected files expanding/shrinking keywords.'''
+ if self.restrict or lookup or self.record: # exclude kw_copy
+ candidates = self.iskwfile(candidates, ctx)
+ if not candidates:
+ return
+ kwcmd = self.restrict and lookup # kwexpand/kwshrink
+ if self.restrict or expand and lookup:
+ mf = ctx.manifest()
+ fctx = ctx
+ subn = (self.restrict or rekw) and self.re_kw.subn or self.re_kwexp.subn
+ msg = (expand and _('overwriting %s expanding keywords\n')
+ or _('overwriting %s shrinking keywords\n'))
+ for f in candidates:
+ if self.restrict:
+ data = self.repo.file(f).read(mf[f])
+ else:
+ data = self.repo.wread(f)
+ if util.binary(data):
+ continue
+ if expand:
+ if lookup:
+ fctx = self.repo.filectx(f, fileid=mf[f]).changectx()
+ data, found = self.substitute(data, f, fctx, subn)
+ elif self.restrict:
+ found = self.re_kw.search(data)
+ else:
+ data, found = _shrinktext(data, subn)
+ if found:
+ self.ui.note(msg % f)
+ self.repo.wwrite(f, data, ctx.flags(f))
+ if kwcmd:
+ self.repo.dirstate.normal(f)
+ elif self.record:
+ self.repo.dirstate.normallookup(f)
+
+ def shrink(self, fname, text):
+ '''Returns text with all keyword substitutions removed.'''
+ if self.match(fname) and not util.binary(text):
+ return _shrinktext(text, self.re_kwexp.sub)
+ return text
+
+ def shrinklines(self, fname, lines):
+ '''Returns lines with keyword substitutions removed.'''
+ if self.match(fname):
+ text = ''.join(lines)
+ if not util.binary(text):
+ return _shrinktext(text, self.re_kwexp.sub).splitlines(True)
+ return lines
+
+ def wread(self, fname, data):
+ '''If in restricted mode returns data read from wdir with
+ keyword substitutions removed.'''
+ return self.restrict and self.shrink(fname, data) or data
+
+class kwfilelog(filelog.filelog):
+ '''
+ Subclass of filelog to hook into its read, add, cmp methods.
+ Keywords are "stored" unexpanded, and processed on reading.
+ '''
+ def __init__(self, opener, kwt, path):
+ super(kwfilelog, self).__init__(opener, path)
+ self.kwt = kwt
+ self.path = path
+
+ def read(self, node):
+ '''Expands keywords when reading filelog.'''
+ data = super(kwfilelog, self).read(node)
+ if self.renamed(node):
+ return data
+ return self.kwt.expand(self.path, node, data)
+
+ def add(self, text, meta, tr, link, p1=None, p2=None):
+ '''Removes keyword substitutions when adding to filelog.'''
+ text = self.kwt.shrink(self.path, text)
+ return super(kwfilelog, self).add(text, meta, tr, link, p1, p2)
+
+ def cmp(self, node, text):
+ '''Removes keyword substitutions for comparison.'''
+ text = self.kwt.shrink(self.path, text)
+ return super(kwfilelog, self).cmp(node, text)
+
+def _status(ui, repo, kwt, *pats, **opts):
+ '''Bails out if [keyword] configuration is not active.
+ Returns status of working directory.'''
+ if kwt:
+ return repo.status(match=cmdutil.match(repo, pats, opts), clean=True,
+ unknown=opts.get('unknown') or opts.get('all'))
+ if ui.configitems('keyword'):
+ raise util.Abort(_('[keyword] patterns cannot match'))
+ raise util.Abort(_('no [keyword] patterns configured'))
+
+def _kwfwrite(ui, repo, expand, *pats, **opts):
+ '''Selects files and passes them to kwtemplater.overwrite.'''
+ wctx = repo[None]
+ if len(wctx.parents()) > 1:
+ raise util.Abort(_('outstanding uncommitted merge'))
+ kwt = kwtools['templater']
+ wlock = repo.wlock()
+ try:
+ status = _status(ui, repo, kwt, *pats, **opts)
+ modified, added, removed, deleted, unknown, ignored, clean = status
+ if modified or added or removed or deleted:
+ raise util.Abort(_('outstanding uncommitted changes'))
+ kwt.overwrite(wctx, clean, True, expand)
+ finally:
+ wlock.release()
+
+def demo(ui, repo, *args, **opts):
+ '''print [keywordmaps] configuration and an expansion example
+
+ Show current, custom, or default keyword template maps and their
+ expansions.
+
+ Extend the current configuration by specifying maps as arguments
+ and using -f/--rcfile to source an external hgrc file.
+
+ Use -d/--default to disable current configuration.
+
+ See :hg:`help templates` for information on templates and filters.
+ '''
+ def demoitems(section, items):
+ ui.write('[%s]\n' % section)
+ for k, v in sorted(items):
+ ui.write('%s = %s\n' % (k, v))
+
+ fn = 'demo.txt'
+ tmpdir = tempfile.mkdtemp('', 'kwdemo.')
+ ui.note(_('creating temporary repository at %s\n') % tmpdir)
+ repo = localrepo.localrepository(ui, tmpdir, True)
+ ui.setconfig('keyword', fn, '')
+
+ uikwmaps = ui.configitems('keywordmaps')
+ if args or opts.get('rcfile'):
+ ui.status(_('\n\tconfiguration using custom keyword template maps\n'))
+ if uikwmaps:
+ ui.status(_('\textending current template maps\n'))
+ if opts.get('default') or not uikwmaps:
+ ui.status(_('\toverriding default template maps\n'))
+ if opts.get('rcfile'):
+ ui.readconfig(opts.get('rcfile'))
+ if args:
+ # simulate hgrc parsing
+ rcmaps = ['[keywordmaps]\n'] + [a + '\n' for a in args]
+ fp = repo.opener('hgrc', 'w')
+ fp.writelines(rcmaps)
+ fp.close()
+ ui.readconfig(repo.join('hgrc'))
+ kwmaps = dict(ui.configitems('keywordmaps'))
+ elif opts.get('default'):
+ ui.status(_('\n\tconfiguration using default keyword template maps\n'))
+ kwmaps = _defaultkwmaps(ui)
+ if uikwmaps:
+ ui.status(_('\tdisabling current template maps\n'))
+ for k, v in kwmaps.iteritems():
+ ui.setconfig('keywordmaps', k, v)
+ else:
+ ui.status(_('\n\tconfiguration using current keyword template maps\n'))
+ kwmaps = dict(uikwmaps) or _defaultkwmaps(ui)
+
+ uisetup(ui)
+ reposetup(ui, repo)
+ ui.write('[extensions]\nkeyword =\n')
+ demoitems('keyword', ui.configitems('keyword'))
+ demoitems('keywordmaps', kwmaps.iteritems())
+ keywords = '$' + '$\n$'.join(sorted(kwmaps.keys())) + '$\n'
+ repo.wopener(fn, 'w').write(keywords)
+ repo[None].add([fn])
+ ui.note(_('\nkeywords written to %s:\n') % fn)
+ ui.note(keywords)
+ repo.dirstate.setbranch('demobranch')
+ for name, cmd in ui.configitems('hooks'):
+ if name.split('.', 1)[0].find('commit') > -1:
+ repo.ui.setconfig('hooks', name, '')
+ msg = _('hg keyword configuration and expansion example')
+ ui.note("hg ci -m '%s'\n" % msg)
+ repo.commit(text=msg)
+ ui.status(_('\n\tkeywords expanded\n'))
+ ui.write(repo.wread(fn))
+ shutil.rmtree(tmpdir, ignore_errors=True)
+
+def expand(ui, repo, *pats, **opts):
+ '''expand keywords in the working directory
+
+ Run after (re)enabling keyword expansion.
+
+ kwexpand refuses to run if given files contain local changes.
+ '''
+ # 3rd argument sets expansion to True
+ _kwfwrite(ui, repo, True, *pats, **opts)
+
+def files(ui, repo, *pats, **opts):
+ '''show files configured for keyword expansion
+
+ List which files in the working directory are matched by the
+ [keyword] configuration patterns.
+
+ Useful to prevent inadvertent keyword expansion and to speed up
+ execution by including only files that are actual candidates for
+ expansion.
+
+ See :hg:`help keyword` on how to construct patterns both for
+ inclusion and exclusion of files.
+
+ With -A/--all and -v/--verbose the codes used to show the status
+ of files are::
+
+ K = keyword expansion candidate
+ k = keyword expansion candidate (not tracked)
+ I = ignored
+ i = ignored (not tracked)
+ '''
+ kwt = kwtools['templater']
+ status = _status(ui, repo, kwt, *pats, **opts)
+ cwd = pats and repo.getcwd() or ''
+ modified, added, removed, deleted, unknown, ignored, clean = status
+ files = []
+ if not opts.get('unknown') or opts.get('all'):
+ files = sorted(modified + added + clean)
+ wctx = repo[None]
+ kwfiles = kwt.iskwfile(files, wctx)
+ kwunknown = kwt.iskwfile(unknown, wctx)
+ if not opts.get('ignore') or opts.get('all'):
+ showfiles = kwfiles, kwunknown
+ else:
+ showfiles = [], []
+ if opts.get('all') or opts.get('ignore'):
+ showfiles += ([f for f in files if f not in kwfiles],
+ [f for f in unknown if f not in kwunknown])
+ for char, filenames in zip('KkIi', showfiles):
+ fmt = (opts.get('all') or ui.verbose) and '%s %%s\n' % char or '%s\n'
+ for f in filenames:
+ ui.write(fmt % repo.pathto(f, cwd))
+
+def shrink(ui, repo, *pats, **opts):
+ '''revert expanded keywords in the working directory
+
+ Run before changing/disabling active keywords or if you experience
+ problems with :hg:`import` or :hg:`merge`.
+
+ kwshrink refuses to run if given files contain local changes.
+ '''
+ # 3rd argument sets expansion to False
+ _kwfwrite(ui, repo, False, *pats, **opts)
+
+
+def uisetup(ui):
+ ''' Monkeypatches dispatch._parse to retrieve user command.'''
+
+ def kwdispatch_parse(orig, ui, args):
+ '''Monkeypatch dispatch._parse to obtain running hg command.'''
+ cmd, func, args, options, cmdoptions = orig(ui, args)
+ kwtools['hgcmd'] = cmd
+ return cmd, func, args, options, cmdoptions
+
+ extensions.wrapfunction(dispatch, '_parse', kwdispatch_parse)
+
+def reposetup(ui, repo):
+ '''Sets up repo as kwrepo for keyword substitution.
+ Overrides file method to return kwfilelog instead of filelog
+ if file matches user configuration.
+ Wraps commit to overwrite configured files with updated
+ keyword substitutions.
+ Monkeypatches patch and webcommands.'''
+
+ try:
+ if (not repo.local() or kwtools['hgcmd'] in nokwcommands.split()
+ or '.hg' in util.splitpath(repo.root)
+ or repo._url.startswith('bundle:')):
+ return
+ except AttributeError:
+ pass
+
+ inc, exc = [], ['.hg*']
+ for pat, opt in ui.configitems('keyword'):
+ if opt != 'ignore':
+ inc.append(pat)
+ else:
+ exc.append(pat)
+ if not inc:
+ return
+
+ kwtools['templater'] = kwt = kwtemplater(ui, repo, inc, exc)
+
+ class kwrepo(repo.__class__):
+ def file(self, f):
+ if f[0] == '/':
+ f = f[1:]
+ return kwfilelog(self.sopener, kwt, f)
+
+ def wread(self, filename):
+ data = super(kwrepo, self).wread(filename)
+ return kwt.wread(filename, data)
+
+ def commit(self, *args, **opts):
+ # use custom commitctx for user commands
+ # other extensions can still wrap repo.commitctx directly
+ self.commitctx = self.kwcommitctx
+ try:
+ return super(kwrepo, self).commit(*args, **opts)
+ finally:
+ del self.commitctx
+
+ def kwcommitctx(self, ctx, error=False):
+ n = super(kwrepo, self).commitctx(ctx, error)
+ # no lock needed, only called from repo.commit() which already locks
+ if not kwt.record:
+ restrict = kwt.restrict
+ kwt.restrict = True
+ kwt.overwrite(self[n], sorted(ctx.added() + ctx.modified()),
+ False, True)
+ kwt.restrict = restrict
+ return n
+
+ def rollback(self, dryrun=False):
+ wlock = self.wlock()
+ try:
+ if not dryrun:
+ changed = self['.'].files()
+ ret = super(kwrepo, self).rollback(dryrun)
+ if not dryrun:
+ ctx = self['.']
+ modified, added = _preselect(self[None].status(), changed)
+ kwt.overwrite(ctx, modified, True, True)
+ kwt.overwrite(ctx, added, True, False)
+ return ret
+ finally:
+ wlock.release()
+
+ # monkeypatches
+ def kwpatchfile_init(orig, self, ui, fname, opener,
+ missing=False, eolmode=None):
+ '''Monkeypatch/wrap patch.patchfile.__init__ to avoid
+ rejects or conflicts due to expanded keywords in working dir.'''
+ orig(self, ui, fname, opener, missing, eolmode)
+ # shrink keywords read from working dir
+ self.lines = kwt.shrinklines(self.fname, self.lines)
+
+ def kw_diff(orig, repo, node1=None, node2=None, match=None, changes=None,
+ opts=None, prefix=''):
+ '''Monkeypatch patch.diff to avoid expansion.'''
+ kwt.restrict = True
+ return orig(repo, node1, node2, match, changes, opts, prefix)
+
+ def kwweb_skip(orig, web, req, tmpl):
+ '''Wraps webcommands.x turning off keyword expansion.'''
+ kwt.match = util.never
+ return orig(web, req, tmpl)
+
+ def kw_copy(orig, ui, repo, pats, opts, rename=False):
+ '''Wraps cmdutil.copy so that copy/rename destinations do not
+ contain expanded keywords.
+ Note that the source of a regular file destination may also be a
+ symlink:
+ hg cp sym x -> x is symlink
+ cp sym x; hg cp -A sym x -> x is file (maybe expanded keywords)
+ For the latter we have to follow the symlink to find out whether its
+ target is configured for expansion and we therefore must unexpand the
+ keywords in the destination.'''
+ orig(ui, repo, pats, opts, rename)
+ if opts.get('dry_run'):
+ return
+ wctx = repo[None]
+ cwd = repo.getcwd()
+
+ def haskwsource(dest):
+ '''Returns true if dest is a regular file and configured for
+ expansion or a symlink which points to a file configured for
+ expansion. '''
+ source = repo.dirstate.copied(dest)
+ if 'l' in wctx.flags(source):
+ source = util.canonpath(repo.root, cwd,
+ os.path.realpath(source))
+ return kwt.match(source)
+
+ candidates = [f for f in repo.dirstate.copies() if
+ not 'l' in wctx.flags(f) and haskwsource(f)]
+ kwt.overwrite(wctx, candidates, False, False)
+
+ def kw_dorecord(orig, ui, repo, commitfunc, *pats, **opts):
+ '''Wraps record.dorecord expanding keywords after recording.'''
+ wlock = repo.wlock()
+ try:
+ # record returns 0 even when nothing has changed
+ # therefore compare nodes before and after
+ kwt.record = True
+ ctx = repo['.']
+ wstatus = repo[None].status()
+ ret = orig(ui, repo, commitfunc, *pats, **opts)
+ recctx = repo['.']
+ if ctx != recctx:
+ modified, added = _preselect(wstatus, recctx.files())
+ kwt.restrict = False
+ kwt.overwrite(recctx, modified, False, True)
+ kwt.overwrite(recctx, added, False, True, True)
+ kwt.restrict = True
+ return ret
+ finally:
+ wlock.release()
+
+ repo.__class__ = kwrepo
+
+ def kwfilectx_cmp(orig, self, fctx):
+ # keyword affects data size, comparing wdir and filelog size does
+ # not make sense
+ if (fctx._filerev is None and
+ (self._repo._encodefilterpats or
+ kwt.match(fctx.path()) and not 'l' in fctx.flags()) or
+ self.size() == fctx.size()):
+ return self._filelog.cmp(self._filenode, fctx.data())
+ return True
+
+ extensions.wrapfunction(context.filectx, 'cmp', kwfilectx_cmp)
+ extensions.wrapfunction(patch.patchfile, '__init__', kwpatchfile_init)
+ extensions.wrapfunction(patch, 'diff', kw_diff)
+ extensions.wrapfunction(cmdutil, 'copy', kw_copy)
+ for c in 'annotate changeset rev filediff diff'.split():
+ extensions.wrapfunction(webcommands, c, kwweb_skip)
+ for name in recordextensions.split():
+ try:
+ record = extensions.find(name)
+ extensions.wrapfunction(record, 'dorecord', kw_dorecord)
+ except KeyError:
+ pass
+
+cmdtable = {
+ 'kwdemo':
+ (demo,
+ [('d', 'default', None, _('show default keyword template maps')),
+ ('f', 'rcfile', '',
+ _('read maps from rcfile'), _('FILE'))],
+ _('hg kwdemo [-d] [-f RCFILE] [TEMPLATEMAP]...')),
+ 'kwexpand': (expand, commands.walkopts,
+ _('hg kwexpand [OPTION]... [FILE]...')),
+ 'kwfiles':
+ (files,
+ [('A', 'all', None, _('show keyword status flags of all files')),
+ ('i', 'ignore', None, _('show files excluded from expansion')),
+ ('u', 'unknown', None, _('only show unknown (not tracked) files')),
+ ] + commands.walkopts,
+ _('hg kwfiles [OPTION]... [FILE]...')),
+ 'kwshrink': (shrink, commands.walkopts,
+ _('hg kwshrink [OPTION]... [FILE]...')),
+}
diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/keyword.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/keyword.pyo
new file mode 100644
index 0000000..e6e44ac
--- /dev/null
+++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/keyword.pyo
Binary files differ
diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/mq.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/mq.py
new file mode 100644
index 0000000..5137089
--- /dev/null
+++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/mq.py
@@ -0,0 +1,3211 @@
+# mq.py - patch queues for mercurial
+#
+# Copyright 2005, 2006 Chris Mason <mason@suse.com>
+#
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2 or any later version.
+
+'''manage a stack of patches
+
+This extension lets you work with a stack of patches in a Mercurial
+repository. It manages two stacks of patches - all known patches, and
+applied patches (subset of known patches).
+
+Known patches are represented as patch files in the .hg/patches
+directory. Applied patches are both patch files and changesets.
+
+Common tasks (use :hg:`help command` for more details)::
+
+ create new patch qnew
+ import existing patch qimport
+
+ print patch series qseries
+ print applied patches qapplied
+
+ add known patch to applied stack qpush
+ remove patch from applied stack qpop
+ refresh contents of top applied patch qrefresh
+
+By default, mq will automatically use git patches when required to
+avoid losing file mode changes, copy records, binary files or empty
+files creations or deletions. This behaviour can be configured with::
+
+ [mq]
+ git = auto/keep/yes/no
+
+If set to 'keep', mq will obey the [diff] section configuration while
+preserving existing git patches upon qrefresh. If set to 'yes' or
+'no', mq will override the [diff] section and always generate git or
+regular patches, possibly losing data in the second case.
+
+You will by default be managing a patch queue named "patches". You can
+create other, independent patch queues with the :hg:`qqueue` command.
+'''
+
+from mercurial.i18n import _
+from mercurial.node import bin, hex, short, nullid, nullrev
+from mercurial.lock import release
+from mercurial import commands, cmdutil, hg, patch, util
+from mercurial import repair, extensions, url, error
+import os, sys, re, errno, shutil
+
+commands.norepo += " qclone"
+
+# Patch names looks like unix-file names.
+# They must be joinable with queue directory and result in the patch path.
+normname = util.normpath
+
+class statusentry(object):
+ def __init__(self, node, name):
+ self.node, self.name = node, name
+ def __repr__(self):
+ return hex(self.node) + ':' + self.name
+
+class patchheader(object):
+ def __init__(self, pf, plainmode=False):
+ def eatdiff(lines):
+ while lines:
+ l = lines[-1]
+ if (l.startswith("diff -") or
+ l.startswith("Index:") or
+ l.startswith("===========")):
+ del lines[-1]
+ else:
+ break
+ def eatempty(lines):
+ while lines:
+ if not lines[-1].strip():
+ del lines[-1]
+ else:
+ break
+
+ message = []
+ comments = []
+ user = None
+ date = None
+ parent = None
+ format = None
+ subject = None
+ diffstart = 0
+
+ for line in file(pf):
+ line = line.rstrip()
+ if (line.startswith('diff --git')
+ or (diffstart and line.startswith('+++ '))):
+ diffstart = 2
+ break
+ diffstart = 0 # reset
+ if line.startswith("--- "):
+ diffstart = 1
+ continue
+ elif format == "hgpatch":
+ # parse values when importing the result of an hg export
+ if line.startswith("# User "):
+ user = line[7:]
+ elif line.startswith("# Date "):
+ date = line[7:]
+ elif line.startswith("# Parent "):
+ parent = line[9:]
+ elif not line.startswith("# ") and line:
+ message.append(line)
+ format = None
+ elif line == '# HG changeset patch':
+ message = []
+ format = "hgpatch"
+ elif (format != "tagdone" and (line.startswith("Subject: ") or
+ line.startswith("subject: "))):
+ subject = line[9:]
+ format = "tag"
+ elif (format != "tagdone" and (line.startswith("From: ") or
+ line.startswith("from: "))):
+ user = line[6:]
+ format = "tag"
+ elif (format != "tagdone" and (line.startswith("Date: ") or
+ line.startswith("date: "))):
+ date = line[6:]
+ format = "tag"
+ elif format == "tag" and line == "":
+ # when looking for tags (subject: from: etc) they
+ # end once you find a blank line in the source
+ format = "tagdone"
+ elif message or line:
+ message.append(line)
+ comments.append(line)
+
+ eatdiff(message)
+ eatdiff(comments)
+ eatempty(message)
+ eatempty(comments)
+
+ # make sure message isn't empty
+ if format and format.startswith("tag") and subject:
+ message.insert(0, "")
+ message.insert(0, subject)
+
+ self.message = message
+ self.comments = comments
+ self.user = user
+ self.date = date
+ self.parent = parent
+ self.haspatch = diffstart > 1
+ self.plainmode = plainmode
+
+ def setuser(self, user):
+ if not self.updateheader(['From: ', '# User '], user):
+ try:
+ patchheaderat = self.comments.index('# HG changeset patch')
+ self.comments.insert(patchheaderat + 1, '# User ' + user)
+ except ValueError:
+ if self.plainmode or self._hasheader(['Date: ']):
+ self.comments = ['From: ' + user] + self.comments
+ else:
+ tmp = ['# HG changeset patch', '# User ' + user, '']
+ self.comments = tmp + self.comments
+ self.user = user
+
+ def setdate(self, date):
+ if not self.updateheader(['Date: ', '# Date '], date):
+ try:
+ patchheaderat = self.comments.index('# HG changeset patch')
+ self.comments.insert(patchheaderat + 1, '# Date ' + date)
+ except ValueError:
+ if self.plainmode or self._hasheader(['From: ']):
+ self.comments = ['Date: ' + date] + self.comments
+ else:
+ tmp = ['# HG changeset patch', '# Date ' + date, '']
+ self.comments = tmp + self.comments
+ self.date = date
+
+ def setparent(self, parent):
+ if not self.updateheader(['# Parent '], parent):
+ try:
+ patchheaderat = self.comments.index('# HG changeset patch')
+ self.comments.insert(patchheaderat + 1, '# Parent ' + parent)
+ except ValueError:
+ pass
+ self.parent = parent
+
+ def setmessage(self, message):
+ if self.comments:
+ self._delmsg()
+ self.message = [message]
+ self.comments += self.message
+
+ def updateheader(self, prefixes, new):
+ '''Update all references to a field in the patch header.
+ Return whether the field is present.'''
+ res = False
+ for prefix in prefixes:
+ for i in xrange(len(self.comments)):
+ if self.comments[i].startswith(prefix):
+ self.comments[i] = prefix + new
+ res = True
+ break
+ return res
+
+ def _hasheader(self, prefixes):
+ '''Check if a header starts with any of the given prefixes.'''
+ for prefix in prefixes:
+ for comment in self.comments:
+ if comment.startswith(prefix):
+ return True
+ return False
+
+ def __str__(self):
+ if not self.comments:
+ return ''
+ return '\n'.join(self.comments) + '\n\n'
+
+ def _delmsg(self):
+ '''Remove existing message, keeping the rest of the comments fields.
+ If comments contains 'subject: ', message will prepend
+ the field and a blank line.'''
+ if self.message:
+ subj = 'subject: ' + self.message[0].lower()
+ for i in xrange(len(self.comments)):
+ if subj == self.comments[i].lower():
+ del self.comments[i]
+ self.message = self.message[2:]
+ break
+ ci = 0
+ for mi in self.message:
+ while mi != self.comments[ci]:
+ ci += 1
+ del self.comments[ci]
+
+class queue(object):
+ def __init__(self, ui, path, patchdir=None):
+ self.basepath = path
+ try:
+ fh = open(os.path.join(path, 'patches.queue'))
+ cur = fh.read().rstrip()
+ if not cur:
+ curpath = os.path.join(path, 'patches')
+ else:
+ curpath = os.path.join(path, 'patches-' + cur)
+ except IOError:
+ curpath = os.path.join(path, 'patches')
+ self.path = patchdir or curpath
+ self.opener = util.opener(self.path)
+ self.ui = ui
+ self.applied_dirty = 0
+ self.series_dirty = 0
+ self.added = []
+ self.series_path = "series"
+ self.status_path = "status"
+ self.guards_path = "guards"
+ self.active_guards = None
+ self.guards_dirty = False
+ # Handle mq.git as a bool with extended values
+ try:
+ gitmode = ui.configbool('mq', 'git', None)
+ if gitmode is None:
+ raise error.ConfigError()
+ self.gitmode = gitmode and 'yes' or 'no'
+ except error.ConfigError:
+ self.gitmode = ui.config('mq', 'git', 'auto').lower()
+ self.plainmode = ui.configbool('mq', 'plain', False)
+
+ @util.propertycache
+ def applied(self):
+ if os.path.exists(self.join(self.status_path)):
+ def parse(l):
+ n, name = l.split(':', 1)
+ return statusentry(bin(n), name)
+ lines = self.opener(self.status_path).read().splitlines()
+ return [parse(l) for l in lines]
+ return []
+
+ @util.propertycache
+ def full_series(self):
+ if os.path.exists(self.join(self.series_path)):
+ return self.opener(self.series_path).read().splitlines()
+ return []
+
+ @util.propertycache
+ def series(self):
+ self.parse_series()
+ return self.series
+
+ @util.propertycache
+ def series_guards(self):
+ self.parse_series()
+ return self.series_guards
+
+ def invalidate(self):
+ for a in 'applied full_series series series_guards'.split():
+ if a in self.__dict__:
+ delattr(self, a)
+ self.applied_dirty = 0
+ self.series_dirty = 0
+ self.guards_dirty = False
+ self.active_guards = None
+
+ def diffopts(self, opts={}, patchfn=None):
+ diffopts = patch.diffopts(self.ui, opts)
+ if self.gitmode == 'auto':
+ diffopts.upgrade = True
+ elif self.gitmode == 'keep':
+ pass
+ elif self.gitmode in ('yes', 'no'):
+ diffopts.git = self.gitmode == 'yes'
+ else:
+ raise util.Abort(_('mq.git option can be auto/keep/yes/no'
+ ' got %s') % self.gitmode)
+ if patchfn:
+ diffopts = self.patchopts(diffopts, patchfn)
+ return diffopts
+
+ def patchopts(self, diffopts, *patches):
+ """Return a copy of input diff options with git set to true if
+ referenced patch is a git patch and should be preserved as such.
+ """
+ diffopts = diffopts.copy()
+ if not diffopts.git and self.gitmode == 'keep':
+ for patchfn in patches:
+ patchf = self.opener(patchfn, 'r')
+ # if the patch was a git patch, refresh it as a git patch
+ for line in patchf:
+ if line.startswith('diff --git'):
+ diffopts.git = True
+ break
+ patchf.close()
+ return diffopts
+
+ def join(self, *p):
+ return os.path.join(self.path, *p)
+
+ def find_series(self, patch):
+ def matchpatch(l):
+ l = l.split('#', 1)[0]
+ return l.strip() == patch
+ for index, l in enumerate(self.full_series):
+ if matchpatch(l):
+ return index
+ return None
+
+ guard_re = re.compile(r'\s?#([-+][^-+# \t\r\n\f][^# \t\r\n\f]*)')
+
+ def parse_series(self):
+ self.series = []
+ self.series_guards = []
+ for l in self.full_series:
+ h = l.find('#')
+ if h == -1:
+ patch = l
+ comment = ''
+ elif h == 0:
+ continue
+ else:
+ patch = l[:h]
+ comment = l[h:]
+ patch = patch.strip()
+ if patch:
+ if patch in self.series:
+ raise util.Abort(_('%s appears more than once in %s') %
+ (patch, self.join(self.series_path)))
+ self.series.append(patch)
+ self.series_guards.append(self.guard_re.findall(comment))
+
+ def check_guard(self, guard):
+ if not guard:
+ return _('guard cannot be an empty string')
+ bad_chars = '# \t\r\n\f'
+ first = guard[0]
+ if first in '-+':
+ return (_('guard %r starts with invalid character: %r') %
+ (guard, first))
+ for c in bad_chars:
+ if c in guard:
+ return _('invalid character in guard %r: %r') % (guard, c)
+
+ def set_active(self, guards):
+ for guard in guards:
+ bad = self.check_guard(guard)
+ if bad:
+ raise util.Abort(bad)
+ guards = sorted(set(guards))
+ self.ui.debug('active guards: %s\n' % ' '.join(guards))
+ self.active_guards = guards
+ self.guards_dirty = True
+
+ def active(self):
+ if self.active_guards is None:
+ self.active_guards = []
+ try:
+ guards = self.opener(self.guards_path).read().split()
+ except IOError, err:
+ if err.errno != errno.ENOENT:
+ raise
+ guards = []
+ for i, guard in enumerate(guards):
+ bad = self.check_guard(guard)
+ if bad:
+ self.ui.warn('%s:%d: %s\n' %
+ (self.join(self.guards_path), i + 1, bad))
+ else:
+ self.active_guards.append(guard)
+ return self.active_guards
+
+ def set_guards(self, idx, guards):
+ for g in guards:
+ if len(g) < 2:
+ raise util.Abort(_('guard %r too short') % g)
+ if g[0] not in '-+':
+ raise util.Abort(_('guard %r starts with invalid char') % g)
+ bad = self.check_guard(g[1:])
+ if bad:
+ raise util.Abort(bad)
+ drop = self.guard_re.sub('', self.full_series[idx])
+ self.full_series[idx] = drop + ''.join([' #' + g for g in guards])
+ self.parse_series()
+ self.series_dirty = True
+
+ def pushable(self, idx):
+ if isinstance(idx, str):
+ idx = self.series.index(idx)
+ patchguards = self.series_guards[idx]
+ if not patchguards:
+ return True, None
+ guards = self.active()
+ exactneg = [g for g in patchguards if g[0] == '-' and g[1:] in guards]
+ if exactneg:
+ return False, exactneg[0]
+ pos = [g for g in patchguards if g[0] == '+']
+ exactpos = [g for g in pos if g[1:] in guards]
+ if pos:
+ if exactpos:
+ return True, exactpos[0]
+ return False, pos
+ return True, ''
+
+ def explain_pushable(self, idx, all_patches=False):
+ write = all_patches and self.ui.write or self.ui.warn
+ if all_patches or self.ui.verbose:
+ if isinstance(idx, str):
+ idx = self.series.index(idx)
+ pushable, why = self.pushable(idx)
+ if all_patches and pushable:
+ if why is None:
+ write(_('allowing %s - no guards in effect\n') %
+ self.series[idx])
+ else:
+ if not why:
+ write(_('allowing %s - no matching negative guards\n') %
+ self.series[idx])
+ else:
+ write(_('allowing %s - guarded by %r\n') %
+ (self.series[idx], why))
+ if not pushable:
+ if why:
+ write(_('skipping %s - guarded by %r\n') %
+ (self.series[idx], why))
+ else:
+ write(_('skipping %s - no matching guards\n') %
+ self.series[idx])
+
+ def save_dirty(self):
+ def write_list(items, path):
+ fp = self.opener(path, 'w')
+ for i in items:
+ fp.write("%s\n" % i)
+ fp.close()
+ if self.applied_dirty:
+ write_list(map(str, self.applied), self.status_path)
+ if self.series_dirty:
+ write_list(self.full_series, self.series_path)
+ if self.guards_dirty:
+ write_list(self.active_guards, self.guards_path)
+ if self.added:
+ qrepo = self.qrepo()
+ if qrepo:
+ qrepo[None].add(f for f in self.added if f not in qrepo[None])
+ self.added = []
+
+ def removeundo(self, repo):
+ undo = repo.sjoin('undo')
+ if not os.path.exists(undo):
+ return
+ try:
+ os.unlink(undo)
+ except OSError, inst:
+ self.ui.warn(_('error removing undo: %s\n') % str(inst))
+
+ def printdiff(self, repo, diffopts, node1, node2=None, files=None,
+ fp=None, changes=None, opts={}):
+ stat = opts.get('stat')
+ m = cmdutil.match(repo, files, opts)
+ cmdutil.diffordiffstat(self.ui, repo, diffopts, node1, node2, m,
+ changes, stat, fp)
+
+ def mergeone(self, repo, mergeq, head, patch, rev, diffopts):
+ # first try just applying the patch
+ (err, n) = self.apply(repo, [patch], update_status=False,
+ strict=True, merge=rev)
+
+ if err == 0:
+ return (err, n)
+
+ if n is None:
+ raise util.Abort(_("apply failed for patch %s") % patch)
+
+ self.ui.warn(_("patch didn't work out, merging %s\n") % patch)
+
+ # apply failed, strip away that rev and merge.
+ hg.clean(repo, head)
+ self.strip(repo, [n], update=False, backup='strip')
+
+ ctx = repo[rev]
+ ret = hg.merge(repo, rev)
+ if ret:
+ raise util.Abort(_("update returned %d") % ret)
+ n = repo.commit(ctx.description(), ctx.user(), force=True)
+ if n is None:
+ raise util.Abort(_("repo commit failed"))
+ try:
+ ph = patchheader(mergeq.join(patch), self.plainmode)
+ except:
+ raise util.Abort(_("unable to read %s") % patch)
+
+ diffopts = self.patchopts(diffopts, patch)
+ patchf = self.opener(patch, "w")
+ comments = str(ph)
+ if comments:
+ patchf.write(comments)
+ self.printdiff(repo, diffopts, head, n, fp=patchf)
+ patchf.close()
+ self.removeundo(repo)
+ return (0, n)
+
+ def qparents(self, repo, rev=None):
+ if rev is None:
+ (p1, p2) = repo.dirstate.parents()
+ if p2 == nullid:
+ return p1
+ if not self.applied:
+ return None
+ return self.applied[-1].node
+ p1, p2 = repo.changelog.parents(rev)
+ if p2 != nullid and p2 in [x.node for x in self.applied]:
+ return p2
+ return p1
+
+ def mergepatch(self, repo, mergeq, series, diffopts):
+ if not self.applied:
+ # each of the patches merged in will have two parents. This
+ # can confuse the qrefresh, qdiff, and strip code because it
+ # needs to know which parent is actually in the patch queue.
+ # so, we insert a merge marker with only one parent. This way
+ # the first patch in the queue is never a merge patch
+ #
+ pname = ".hg.patches.merge.marker"
+ n = repo.commit('[mq]: merge marker', force=True)
+ self.removeundo(repo)
+ self.applied.append(statusentry(n, pname))
+ self.applied_dirty = 1
+
+ head = self.qparents(repo)
+
+ for patch in series:
+ patch = mergeq.lookup(patch, strict=True)
+ if not patch:
+ self.ui.warn(_("patch %s does not exist\n") % patch)
+ return (1, None)
+ pushable, reason = self.pushable(patch)
+ if not pushable:
+ self.explain_pushable(patch, all_patches=True)
+ continue
+ info = mergeq.isapplied(patch)
+ if not info:
+ self.ui.warn(_("patch %s is not applied\n") % patch)
+ return (1, None)
+ rev = info[1]
+ err, head = self.mergeone(repo, mergeq, head, patch, rev, diffopts)
+ if head:
+ self.applied.append(statusentry(head, patch))
+ self.applied_dirty = 1
+ if err:
+ return (err, head)
+ self.save_dirty()
+ return (0, head)
+
+ def patch(self, repo, patchfile):
+ '''Apply patchfile to the working directory.
+ patchfile: name of patch file'''
+ files = {}
+ try:
+ fuzz = patch.patch(patchfile, self.ui, strip=1, cwd=repo.root,
+ files=files, eolmode=None)
+ except Exception, inst:
+ self.ui.note(str(inst) + '\n')
+ if not self.ui.verbose:
+ self.ui.warn(_("patch failed, unable to continue (try -v)\n"))
+ return (False, files, False)
+
+ return (True, files, fuzz)
+
+ def apply(self, repo, series, list=False, update_status=True,
+ strict=False, patchdir=None, merge=None, all_files=None):
+ wlock = lock = tr = None
+ try:
+ wlock = repo.wlock()
+ lock = repo.lock()
+ tr = repo.transaction("qpush")
+ try:
+ ret = self._apply(repo, series, list, update_status,
+ strict, patchdir, merge, all_files=all_files)
+ tr.close()
+ self.save_dirty()
+ return ret
+ except:
+ try:
+ tr.abort()
+ finally:
+ repo.invalidate()
+ repo.dirstate.invalidate()
+ raise
+ finally:
+ release(tr, lock, wlock)
+ self.removeundo(repo)
+
+ def _apply(self, repo, series, list=False, update_status=True,
+ strict=False, patchdir=None, merge=None, all_files=None):
+ '''returns (error, hash)
+ error = 1 for unable to read, 2 for patch failed, 3 for patch fuzz'''
+ # TODO unify with commands.py
+ if not patchdir:
+ patchdir = self.path
+ err = 0
+ n = None
+ for patchname in series:
+ pushable, reason = self.pushable(patchname)
+ if not pushable:
+ self.explain_pushable(patchname, all_patches=True)
+ continue
+ self.ui.status(_("applying %s\n") % patchname)
+ pf = os.path.join(patchdir, patchname)
+
+ try:
+ ph = patchheader(self.join(patchname), self.plainmode)
+ except:
+ self.ui.warn(_("unable to read %s\n") % patchname)
+ err = 1
+ break
+
+ message = ph.message
+ if not message:
+ # The commit message should not be translated
+ message = "imported patch %s\n" % patchname
+ else:
+ if list:
+ # The commit message should not be translated
+ message.append("\nimported patch %s" % patchname)
+ message = '\n'.join(message)
+
+ if ph.haspatch:
+ (patcherr, files, fuzz) = self.patch(repo, pf)
+ if all_files is not None:
+ all_files.update(files)
+ patcherr = not patcherr
+ else:
+ self.ui.warn(_("patch %s is empty\n") % patchname)
+ patcherr, files, fuzz = 0, [], 0
+
+ if merge and files:
+ # Mark as removed/merged and update dirstate parent info
+ removed = []
+ merged = []
+ for f in files:
+ if os.path.lexists(repo.wjoin(f)):
+ merged.append(f)
+ else:
+ removed.append(f)
+ for f in removed:
+ repo.dirstate.remove(f)
+ for f in merged:
+ repo.dirstate.merge(f)
+ p1, p2 = repo.dirstate.parents()
+ repo.dirstate.setparents(p1, merge)
+
+ files = cmdutil.updatedir(self.ui, repo, files)
+ match = cmdutil.matchfiles(repo, files or [])
+ n = repo.commit(message, ph.user, ph.date, match=match, force=True)
+
+ if n is None:
+ raise util.Abort(_("repository commit failed"))
+
+ if update_status:
+ self.applied.append(statusentry(n, patchname))
+
+ if patcherr:
+ self.ui.warn(_("patch failed, rejects left in working dir\n"))
+ err = 2
+ break
+
+ if fuzz and strict:
+ self.ui.warn(_("fuzz found when applying patch, stopping\n"))
+ err = 3
+ break
+ return (err, n)
+
+ def _cleanup(self, patches, numrevs, keep=False):
+ if not keep:
+ r = self.qrepo()
+ if r:
+ r[None].remove(patches, True)
+ else:
+ for p in patches:
+ os.unlink(self.join(p))
+
+ if numrevs:
+ del self.applied[:numrevs]
+ self.applied_dirty = 1
+
+ for i in sorted([self.find_series(p) for p in patches], reverse=True):
+ del self.full_series[i]
+ self.parse_series()
+ self.series_dirty = 1
+
+ def _revpatches(self, repo, revs):
+ firstrev = repo[self.applied[0].node].rev()
+ patches = []
+ for i, rev in enumerate(revs):
+
+ if rev < firstrev:
+ raise util.Abort(_('revision %d is not managed') % rev)
+
+ ctx = repo[rev]
+ base = self.applied[i].node
+ if ctx.node() != base:
+ msg = _('cannot delete revision %d above applied patches')
+ raise util.Abort(msg % rev)
+
+ patch = self.applied[i].name
+ for fmt in ('[mq]: %s', 'imported patch %s'):
+ if ctx.description() == fmt % patch:
+ msg = _('patch %s finalized without changeset message\n')
+ repo.ui.status(msg % patch)
+ break
+
+ patches.append(patch)
+ return patches
+
+ def finish(self, repo, revs):
+ patches = self._revpatches(repo, sorted(revs))
+ self._cleanup(patches, len(patches))
+
+ def delete(self, repo, patches, opts):
+ if not patches and not opts.get('rev'):
+ raise util.Abort(_('qdelete requires at least one revision or '
+ 'patch name'))
+
+ realpatches = []
+ for patch in patches:
+ patch = self.lookup(patch, strict=True)
+ info = self.isapplied(patch)
+ if info:
+ raise util.Abort(_("cannot delete applied patch %s") % patch)
+ if patch not in self.series:
+ raise util.Abort(_("patch %s not in series file") % patch)
+ if patch not in realpatches:
+ realpatches.append(patch)
+
+ numrevs = 0
+ if opts.get('rev'):
+ if not self.applied:
+ raise util.Abort(_('no patches applied'))
+ revs = cmdutil.revrange(repo, opts.get('rev'))
+ if len(revs) > 1 and revs[0] > revs[1]:
+ revs.reverse()
+ revpatches = self._revpatches(repo, revs)
+ realpatches += revpatches
+ numrevs = len(revpatches)
+
+ self._cleanup(realpatches, numrevs, opts.get('keep'))
+
+ def check_toppatch(self, repo):
+ if self.applied:
+ top = self.applied[-1].node
+ patch = self.applied[-1].name
+ pp = repo.dirstate.parents()
+ if top not in pp:
+ raise util.Abort(_("working directory revision is not qtip"))
+ return top, patch
+ return None, None
+
+ def check_localchanges(self, repo, force=False, refresh=True):
+ m, a, r, d = repo.status()[:4]
+ if (m or a or r or d) and not force:
+ if refresh:
+ raise util.Abort(_("local changes found, refresh first"))
+ else:
+ raise util.Abort(_("local changes found"))
+ return m, a, r, d
+
+ _reserved = ('series', 'status', 'guards')
+ def check_reserved_name(self, name):
+ if (name in self._reserved or name.startswith('.hg')
+ or name.startswith('.mq') or '#' in name or ':' in name):
+ raise util.Abort(_('"%s" cannot be used as the name of a patch')
+ % name)
+
+ def new(self, repo, patchfn, *pats, **opts):
+ """options:
+ msg: a string or a no-argument function returning a string
+ """
+ msg = opts.get('msg')
+ user = opts.get('user')
+ date = opts.get('date')
+ if date:
+ date = util.parsedate(date)
+ diffopts = self.diffopts({'git': opts.get('git')})
+ self.check_reserved_name(patchfn)
+ if os.path.exists(self.join(patchfn)):
+ if os.path.isdir(self.join(patchfn)):
+ raise util.Abort(_('"%s" already exists as a directory')
+ % patchfn)
+ else:
+ raise util.Abort(_('patch "%s" already exists') % patchfn)
+ if opts.get('include') or opts.get('exclude') or pats:
+ match = cmdutil.match(repo, pats, opts)
+ # detect missing files in pats
+ def badfn(f, msg):
+ raise util.Abort('%s: %s' % (f, msg))
+ match.bad = badfn
+ m, a, r, d = repo.status(match=match)[:4]
+ else:
+ m, a, r, d = self.check_localchanges(repo, force=True)
+ match = cmdutil.matchfiles(repo, m + a + r)
+ if len(repo[None].parents()) > 1:
+ raise util.Abort(_('cannot manage merge changesets'))
+ commitfiles = m + a + r
+ self.check_toppatch(repo)
+ insert = self.full_series_end()
+ wlock = repo.wlock()
+ try:
+ try:
+ # if patch file write fails, abort early
+ p = self.opener(patchfn, "w")
+ except IOError, e:
+ raise util.Abort(_('cannot write patch "%s": %s')
+ % (patchfn, e.strerror))
+ try:
+ if self.plainmode:
+ if user:
+ p.write("From: " + user + "\n")
+ if not date:
+ p.write("\n")
+ if date:
+ p.write("Date: %d %d\n\n" % date)
+ else:
+ p.write("# HG changeset patch\n")
+ p.write("# Parent "
+ + hex(repo[None].parents()[0].node()) + "\n")
+ if user:
+ p.write("# User " + user + "\n")
+ if date:
+ p.write("# Date %s %s\n\n" % date)
+ if hasattr(msg, '__call__'):
+ msg = msg()
+ commitmsg = msg and msg or ("[mq]: %s" % patchfn)
+ n = repo.commit(commitmsg, user, date, match=match, force=True)
+ if n is None:
+ raise util.Abort(_("repo commit failed"))
+ try:
+ self.full_series[insert:insert] = [patchfn]
+ self.applied.append(statusentry(n, patchfn))
+ self.parse_series()
+ self.series_dirty = 1
+ self.applied_dirty = 1
+ if msg:
+ msg = msg + "\n\n"
+ p.write(msg)
+ if commitfiles:
+ parent = self.qparents(repo, n)
+ chunks = patch.diff(repo, node1=parent, node2=n,
+ match=match, opts=diffopts)
+ for chunk in chunks:
+ p.write(chunk)
+ p.close()
+ wlock.release()
+ wlock = None
+ r = self.qrepo()
+ if r:
+ r[None].add([patchfn])
+ except:
+ repo.rollback()
+ raise
+ except Exception:
+ patchpath = self.join(patchfn)
+ try:
+ os.unlink(patchpath)
+ except:
+ self.ui.warn(_('error unlinking %s\n') % patchpath)
+ raise
+ self.removeundo(repo)
+ finally:
+ release(wlock)
+
+ def strip(self, repo, revs, update=True, backup="all", force=None):
+ wlock = lock = None
+ try:
+ wlock = repo.wlock()
+ lock = repo.lock()
+
+ if update:
+ self.check_localchanges(repo, force=force, refresh=False)
+ urev = self.qparents(repo, revs[0])
+ hg.clean(repo, urev)
+ repo.dirstate.write()
+
+ self.removeundo(repo)
+ for rev in revs:
+ repair.strip(self.ui, repo, rev, backup)
+ # strip may have unbundled a set of backed up revisions after
+ # the actual strip
+ self.removeundo(repo)
+ finally:
+ release(lock, wlock)
+
+ def isapplied(self, patch):
+ """returns (index, rev, patch)"""
+ for i, a in enumerate(self.applied):
+ if a.name == patch:
+ return (i, a.node, a.name)
+ return None
+
+ # if the exact patch name does not exist, we try a few
+ # variations. If strict is passed, we try only #1
+ #
+ # 1) a number to indicate an offset in the series file
+ # 2) a unique substring of the patch name was given
+ # 3) patchname[-+]num to indicate an offset in the series file
+ def lookup(self, patch, strict=False):
+ patch = patch and str(patch)
+
+ def partial_name(s):
+ if s in self.series:
+ return s
+ matches = [x for x in self.series if s in x]
+ if len(matches) > 1:
+ self.ui.warn(_('patch name "%s" is ambiguous:\n') % s)
+ for m in matches:
+ self.ui.warn(' %s\n' % m)
+ return None
+ if matches:
+ return matches[0]
+ if self.series and self.applied:
+ if s == 'qtip':
+ return self.series[self.series_end(True)-1]
+ if s == 'qbase':
+ return self.series[0]
+ return None
+
+ if patch is None:
+ return None
+ if patch in self.series:
+ return patch
+
+ if not os.path.isfile(self.join(patch)):
+ try:
+ sno = int(patch)
+ except (ValueError, OverflowError):
+ pass
+ else:
+ if -len(self.series) <= sno < len(self.series):
+ return self.series[sno]
+
+ if not strict:
+ res = partial_name(patch)
+ if res:
+ return res
+ minus = patch.rfind('-')
+ if minus >= 0:
+ res = partial_name(patch[:minus])
+ if res:
+ i = self.series.index(res)
+ try:
+ off = int(patch[minus + 1:] or 1)
+ except (ValueError, OverflowError):
+ pass
+ else:
+ if i - off >= 0:
+ return self.series[i - off]
+ plus = patch.rfind('+')
+ if plus >= 0:
+ res = partial_name(patch[:plus])
+ if res:
+ i = self.series.index(res)
+ try:
+ off = int(patch[plus + 1:] or 1)
+ except (ValueError, OverflowError):
+ pass
+ else:
+ if i + off < len(self.series):
+ return self.series[i + off]
+ raise util.Abort(_("patch %s not in series") % patch)
+
+ def push(self, repo, patch=None, force=False, list=False,
+ mergeq=None, all=False, move=False):
+ diffopts = self.diffopts()
+ wlock = repo.wlock()
+ try:
+ heads = []
+ for b, ls in repo.branchmap().iteritems():
+ heads += ls
+ if not heads:
+ heads = [nullid]
+ if repo.dirstate.parents()[0] not in heads:
+ self.ui.status(_("(working directory not at a head)\n"))
+
+ if not self.series:
+ self.ui.warn(_('no patches in series\n'))
+ return 0
+
+ patch = self.lookup(patch)
+ # Suppose our series file is: A B C and the current 'top'
+ # patch is B. qpush C should be performed (moving forward)
+ # qpush B is a NOP (no change) qpush A is an error (can't
+ # go backwards with qpush)
+ if patch:
+ info = self.isapplied(patch)
+ if info:
+ if info[0] < len(self.applied) - 1:
+ raise util.Abort(
+ _("cannot push to a previous patch: %s") % patch)
+ self.ui.warn(
+ _('qpush: %s is already at the top\n') % patch)
+ return 0
+ pushable, reason = self.pushable(patch)
+ if not pushable:
+ if reason:
+ reason = _('guarded by %r') % reason
+ else:
+ reason = _('no matching guards')
+ self.ui.warn(_("cannot push '%s' - %s\n") % (patch, reason))
+ return 1
+ elif all:
+ patch = self.series[-1]
+ if self.isapplied(patch):
+ self.ui.warn(_('all patches are currently applied\n'))
+ return 0
+
+ # Following the above example, starting at 'top' of B:
+ # qpush should be performed (pushes C), but a subsequent
+ # qpush without an argument is an error (nothing to
+ # apply). This allows a loop of "...while hg qpush..." to
+ # work as it detects an error when done
+ start = self.series_end()
+ if start == len(self.series):
+ self.ui.warn(_('patch series already fully applied\n'))
+ return 1
+ if not force:
+ self.check_localchanges(repo)
+
+ if move:
+ if not patch:
+ raise util.Abort(_("please specify the patch to move"))
+ for i, rpn in enumerate(self.full_series[start:]):
+ # strip markers for patch guards
+ if self.guard_re.split(rpn, 1)[0] == patch:
+ break
+ index = start + i
+ assert index < len(self.full_series)
+ fullpatch = self.full_series[index]
+ del self.full_series[index]
+ self.full_series.insert(start, fullpatch)
+ self.parse_series()
+ self.series_dirty = 1
+
+ self.applied_dirty = 1
+ if start > 0:
+ self.check_toppatch(repo)
+ if not patch:
+ patch = self.series[start]
+ end = start + 1
+ else:
+ end = self.series.index(patch, start) + 1
+
+ s = self.series[start:end]
+ all_files = set()
+ try:
+ if mergeq:
+ ret = self.mergepatch(repo, mergeq, s, diffopts)
+ else:
+ ret = self.apply(repo, s, list, all_files=all_files)
+ except:
+ self.ui.warn(_('cleaning up working directory...'))
+ node = repo.dirstate.parents()[0]
+ hg.revert(repo, node, None)
+ # only remove unknown files that we know we touched or
+ # created while patching
+ for f in all_files:
+ if f not in repo.dirstate:
+ try:
+ util.unlink(repo.wjoin(f))
+ except OSError, inst:
+ if inst.errno != errno.ENOENT:
+ raise
+ self.ui.warn(_('done\n'))
+ raise
+
+ if not self.applied:
+ return ret[0]
+ top = self.applied[-1].name
+ if ret[0] and ret[0] > 1:
+ msg = _("errors during apply, please fix and refresh %s\n")
+ self.ui.write(msg % top)
+ else:
+ self.ui.write(_("now at: %s\n") % top)
+ return ret[0]
+
+ finally:
+ wlock.release()
+
+ def pop(self, repo, patch=None, force=False, update=True, all=False):
+ wlock = repo.wlock()
+ try:
+ if patch:
+ # index, rev, patch
+ info = self.isapplied(patch)
+ if not info:
+ patch = self.lookup(patch)
+ info = self.isapplied(patch)
+ if not info:
+ raise util.Abort(_("patch %s is not applied") % patch)
+
+ if not self.applied:
+ # Allow qpop -a to work repeatedly,
+ # but not qpop without an argument
+ self.ui.warn(_("no patches applied\n"))
+ return not all
+
+ if all:
+ start = 0
+ elif patch:
+ start = info[0] + 1
+ else:
+ start = len(self.applied) - 1
+
+ if start >= len(self.applied):
+ self.ui.warn(_("qpop: %s is already at the top\n") % patch)
+ return
+
+ if not update:
+ parents = repo.dirstate.parents()
+ rr = [x.node for x in self.applied]
+ for p in parents:
+ if p in rr:
+ self.ui.warn(_("qpop: forcing dirstate update\n"))
+ update = True
+ else:
+ parents = [p.node() for p in repo[None].parents()]
+ needupdate = False
+ for entry in self.applied[start:]:
+ if entry.node in parents:
+ needupdate = True
+ break
+ update = needupdate
+
+ if not force and update:
+ self.check_localchanges(repo)
+
+ self.applied_dirty = 1
+ end = len(self.applied)
+ rev = self.applied[start].node
+ if update:
+ top = self.check_toppatch(repo)[0]
+
+ try:
+ heads = repo.changelog.heads(rev)
+ except error.LookupError:
+ node = short(rev)
+ raise util.Abort(_('trying to pop unknown node %s') % node)
+
+ if heads != [self.applied[-1].node]:
+ raise util.Abort(_("popping would remove a revision not "
+ "managed by this patch queue"))
+
+ # we know there are no local changes, so we can make a simplified
+ # form of hg.update.
+ if update:
+ qp = self.qparents(repo, rev)
+ ctx = repo[qp]
+ m, a, r, d = repo.status(qp, top)[:4]
+ if d:
+ raise util.Abort(_("deletions found between repo revs"))
+ for f in a:
+ try:
+ util.unlink(repo.wjoin(f))
+ except OSError, e:
+ if e.errno != errno.ENOENT:
+ raise
+ repo.dirstate.forget(f)
+ for f in m + r:
+ fctx = ctx[f]
+ repo.wwrite(f, fctx.data(), fctx.flags())
+ repo.dirstate.normal(f)
+ repo.dirstate.setparents(qp, nullid)
+ for patch in reversed(self.applied[start:end]):
+ self.ui.status(_("popping %s\n") % patch.name)
+ del self.applied[start:end]
+ self.strip(repo, [rev], update=False, backup='strip')
+ if self.applied:
+ self.ui.write(_("now at: %s\n") % self.applied[-1].name)
+ else:
+ self.ui.write(_("patch queue now empty\n"))
+ finally:
+ wlock.release()
+
+ def diff(self, repo, pats, opts):
+ top, patch = self.check_toppatch(repo)
+ if not top:
+ self.ui.write(_("no patches applied\n"))
+ return
+ qp = self.qparents(repo, top)
+ if opts.get('reverse'):
+ node1, node2 = None, qp
+ else:
+ node1, node2 = qp, None
+ diffopts = self.diffopts(opts, patch)
+ self.printdiff(repo, diffopts, node1, node2, files=pats, opts=opts)
+
+ def refresh(self, repo, pats=None, **opts):
+ if not self.applied:
+ self.ui.write(_("no patches applied\n"))
+ return 1
+ msg = opts.get('msg', '').rstrip()
+ newuser = opts.get('user')
+ newdate = opts.get('date')
+ if newdate:
+ newdate = '%d %d' % util.parsedate(newdate)
+ wlock = repo.wlock()
+
+ try:
+ self.check_toppatch(repo)
+ (top, patchfn) = (self.applied[-1].node, self.applied[-1].name)
+ if repo.changelog.heads(top) != [top]:
+ raise util.Abort(_("cannot refresh a revision with children"))
+
+ cparents = repo.changelog.parents(top)
+ patchparent = self.qparents(repo, top)
+ ph = patchheader(self.join(patchfn), self.plainmode)
+ diffopts = self.diffopts({'git': opts.get('git')}, patchfn)
+ if msg:
+ ph.setmessage(msg)
+ if newuser:
+ ph.setuser(newuser)
+ if newdate:
+ ph.setdate(newdate)
+ ph.setparent(hex(patchparent))
+
+ # only commit new patch when write is complete
+ patchf = self.opener(patchfn, 'w', atomictemp=True)
+
+ comments = str(ph)
+ if comments:
+ patchf.write(comments)
+
+ # update the dirstate in place, strip off the qtip commit
+ # and then commit.
+ #
+ # this should really read:
+ # mm, dd, aa, aa2 = repo.status(tip, patchparent)[:4]
+ # but we do it backwards to take advantage of manifest/chlog
+ # caching against the next repo.status call
+ mm, aa, dd, aa2 = repo.status(patchparent, top)[:4]
+ changes = repo.changelog.read(top)
+ man = repo.manifest.read(changes[0])
+ aaa = aa[:]
+ matchfn = cmdutil.match(repo, pats, opts)
+ # in short mode, we only diff the files included in the
+ # patch already plus specified files
+ if opts.get('short'):
+ # if amending a patch, we start with existing
+ # files plus specified files - unfiltered
+ match = cmdutil.matchfiles(repo, mm + aa + dd + matchfn.files())
+ # filter with inc/exl options
+ matchfn = cmdutil.match(repo, opts=opts)
+ else:
+ match = cmdutil.matchall(repo)
+ m, a, r, d = repo.status(match=match)[:4]
+
+ # we might end up with files that were added between
+ # qtip and the dirstate parent, but then changed in the
+ # local dirstate. in this case, we want them to only
+ # show up in the added section
+ for x in m:
+ if x == '.hgsub' or x == '.hgsubstate':
+ self.ui.warn(_('warning: not refreshing %s\n') % x)
+ continue
+ if x not in aa:
+ mm.append(x)
+ # we might end up with files added by the local dirstate that
+ # were deleted by the patch. In this case, they should only
+ # show up in the changed section.
+ for x in a:
+ if x == '.hgsub' or x == '.hgsubstate':
+ self.ui.warn(_('warning: not adding %s\n') % x)
+ continue
+ if x in dd:
+ del dd[dd.index(x)]
+ mm.append(x)
+ else:
+ aa.append(x)
+ # make sure any files deleted in the local dirstate
+ # are not in the add or change column of the patch
+ forget = []
+ for x in d + r:
+ if x == '.hgsub' or x == '.hgsubstate':
+ self.ui.warn(_('warning: not removing %s\n') % x)
+ continue
+ if x in aa:
+ del aa[aa.index(x)]
+ forget.append(x)
+ continue
+ elif x in mm:
+ del mm[mm.index(x)]
+ dd.append(x)
+
+ m = list(set(mm))
+ r = list(set(dd))
+ a = list(set(aa))
+ c = [filter(matchfn, l) for l in (m, a, r)]
+ match = cmdutil.matchfiles(repo, set(c[0] + c[1] + c[2]))
+ chunks = patch.diff(repo, patchparent, match=match,
+ changes=c, opts=diffopts)
+ for chunk in chunks:
+ patchf.write(chunk)
+
+ try:
+ if diffopts.git or diffopts.upgrade:
+ copies = {}
+ for dst in a:
+ src = repo.dirstate.copied(dst)
+ # during qfold, the source file for copies may
+ # be removed. Treat this as a simple add.
+ if src is not None and src in repo.dirstate:
+ copies.setdefault(src, []).append(dst)
+ repo.dirstate.add(dst)
+ # remember the copies between patchparent and qtip
+ for dst in aaa:
+ f = repo.file(dst)
+ src = f.renamed(man[dst])
+ if src:
+ copies.setdefault(src[0], []).extend(
+ copies.get(dst, []))
+ if dst in a:
+ copies[src[0]].append(dst)
+ # we can't copy a file created by the patch itself
+ if dst in copies:
+ del copies[dst]
+ for src, dsts in copies.iteritems():
+ for dst in dsts:
+ repo.dirstate.copy(src, dst)
+ else:
+ for dst in a:
+ repo.dirstate.add(dst)
+ # Drop useless copy information
+ for f in list(repo.dirstate.copies()):
+ repo.dirstate.copy(None, f)
+ for f in r:
+ repo.dirstate.remove(f)
+ # if the patch excludes a modified file, mark that
+ # file with mtime=0 so status can see it.
+ mm = []
+ for i in xrange(len(m)-1, -1, -1):
+ if not matchfn(m[i]):
+ mm.append(m[i])
+ del m[i]
+ for f in m:
+ repo.dirstate.normal(f)
+ for f in mm:
+ repo.dirstate.normallookup(f)
+ for f in forget:
+ repo.dirstate.forget(f)
+
+ if not msg:
+ if not ph.message:
+ message = "[mq]: %s\n" % patchfn
+ else:
+ message = "\n".join(ph.message)
+ else:
+ message = msg
+
+ user = ph.user or changes[1]
+
+ # assumes strip can roll itself back if interrupted
+ repo.dirstate.setparents(*cparents)
+ self.applied.pop()
+ self.applied_dirty = 1
+ self.strip(repo, [top], update=False,
+ backup='strip')
+ except:
+ repo.dirstate.invalidate()
+ raise
+
+ try:
+ # might be nice to attempt to roll back strip after this
+ patchf.rename()
+ n = repo.commit(message, user, ph.date, match=match,
+ force=True)
+ self.applied.append(statusentry(n, patchfn))
+ except:
+ ctx = repo[cparents[0]]
+ repo.dirstate.rebuild(ctx.node(), ctx.manifest())
+ self.save_dirty()
+ self.ui.warn(_('refresh interrupted while patch was popped! '
+ '(revert --all, qpush to recover)\n'))
+ raise
+ finally:
+ wlock.release()
+ self.removeundo(repo)
+
+ def init(self, repo, create=False):
+ if not create and os.path.isdir(self.path):
+ raise util.Abort(_("patch queue directory already exists"))
+ try:
+ os.mkdir(self.path)
+ except OSError, inst:
+ if inst.errno != errno.EEXIST or not create:
+ raise
+ if create:
+ return self.qrepo(create=True)
+
+ def unapplied(self, repo, patch=None):
+ if patch and patch not in self.series:
+ raise util.Abort(_("patch %s is not in series file") % patch)
+ if not patch:
+ start = self.series_end()
+ else:
+ start = self.series.index(patch) + 1
+ unapplied = []
+ for i in xrange(start, len(self.series)):
+ pushable, reason = self.pushable(i)
+ if pushable:
+ unapplied.append((i, self.series[i]))
+ self.explain_pushable(i)
+ return unapplied
+
+ def qseries(self, repo, missing=None, start=0, length=None, status=None,
+ summary=False):
+ def displayname(pfx, patchname, state):
+ if pfx:
+ self.ui.write(pfx)
+ if summary:
+ ph = patchheader(self.join(patchname), self.plainmode)
+ msg = ph.message and ph.message[0] or ''
+ if self.ui.formatted():
+ width = self.ui.termwidth() - len(pfx) - len(patchname) - 2
+ if width > 0:
+ msg = util.ellipsis(msg, width)
+ else:
+ msg = ''
+ self.ui.write(patchname, label='qseries.' + state)
+ self.ui.write(': ')
+ self.ui.write(msg, label='qseries.message.' + state)
+ else:
+ self.ui.write(patchname, label='qseries.' + state)
+ self.ui.write('\n')
+
+ applied = set([p.name for p in self.applied])
+ if length is None:
+ length = len(self.series) - start
+ if not missing:
+ if self.ui.verbose:
+ idxwidth = len(str(start + length - 1))
+ for i in xrange(start, start + length):
+ patch = self.series[i]
+ if patch in applied:
+ char, state = 'A', 'applied'
+ elif self.pushable(i)[0]:
+ char, state = 'U', 'unapplied'
+ else:
+ char, state = 'G', 'guarded'
+ pfx = ''
+ if self.ui.verbose:
+ pfx = '%*d %s ' % (idxwidth, i, char)
+ elif status and status != char:
+ continue
+ displayname(pfx, patch, state)
+ else:
+ msng_list = []
+ for root, dirs, files in os.walk(self.path):
+ d = root[len(self.path) + 1:]
+ for f in files:
+ fl = os.path.join(d, f)
+ if (fl not in self.series and
+ fl not in (self.status_path, self.series_path,
+ self.guards_path)
+ and not fl.startswith('.')):
+ msng_list.append(fl)
+ for x in sorted(msng_list):
+ pfx = self.ui.verbose and ('D ') or ''
+ displayname(pfx, x, 'missing')
+
+ def issaveline(self, l):
+ if l.name == '.hg.patches.save.line':
+ return True
+
+ def qrepo(self, create=False):
+ ui = self.ui.copy()
+ ui.setconfig('paths', 'default', '', overlay=False)
+ ui.setconfig('paths', 'default-push', '', overlay=False)
+ if create or os.path.isdir(self.join(".hg")):
+ return hg.repository(ui, path=self.path, create=create)
+
+ def restore(self, repo, rev, delete=None, qupdate=None):
+ desc = repo[rev].description().strip()
+ lines = desc.splitlines()
+ i = 0
+ datastart = None
+ series = []
+ applied = []
+ qpp = None
+ for i, line in enumerate(lines):
+ if line == 'Patch Data:':
+ datastart = i + 1
+ elif line.startswith('Dirstate:'):
+ l = line.rstrip()
+ l = l[10:].split(' ')
+ qpp = [bin(x) for x in l]
+ elif datastart != None:
+ l = line.rstrip()
+ n, name = l.split(':', 1)
+ if n:
+ applied.append(statusentry(bin(n), name))
+ else:
+ series.append(l)
+ if datastart is None:
+ self.ui.warn(_("No saved patch data found\n"))
+ return 1
+ self.ui.warn(_("restoring status: %s\n") % lines[0])
+ self.full_series = series
+ self.applied = applied
+ self.parse_series()
+ self.series_dirty = 1
+ self.applied_dirty = 1
+ heads = repo.changelog.heads()
+ if delete:
+ if rev not in heads:
+ self.ui.warn(_("save entry has children, leaving it alone\n"))
+ else:
+ self.ui.warn(_("removing save entry %s\n") % short(rev))
+ pp = repo.dirstate.parents()
+ if rev in pp:
+ update = True
+ else:
+ update = False
+ self.strip(repo, [rev], update=update, backup='strip')
+ if qpp:
+ self.ui.warn(_("saved queue repository parents: %s %s\n") %
+ (short(qpp[0]), short(qpp[1])))
+ if qupdate:
+ self.ui.status(_("updating queue directory\n"))
+ r = self.qrepo()
+ if not r:
+ self.ui.warn(_("Unable to load queue repository\n"))
+ return 1
+ hg.clean(r, qpp[0])
+
+ def save(self, repo, msg=None):
+ if not self.applied:
+ self.ui.warn(_("save: no patches applied, exiting\n"))
+ return 1
+ if self.issaveline(self.applied[-1]):
+ self.ui.warn(_("status is already saved\n"))
+ return 1
+
+ if not msg:
+ msg = _("hg patches saved state")
+ else:
+ msg = "hg patches: " + msg.rstrip('\r\n')
+ r = self.qrepo()
+ if r:
+ pp = r.dirstate.parents()
+ msg += "\nDirstate: %s %s" % (hex(pp[0]), hex(pp[1]))
+ msg += "\n\nPatch Data:\n"
+ msg += ''.join('%s\n' % x for x in self.applied)
+ msg += ''.join(':%s\n' % x for x in self.full_series)
+ n = repo.commit(msg, force=True)
+ if not n:
+ self.ui.warn(_("repo commit failed\n"))
+ return 1
+ self.applied.append(statusentry(n, '.hg.patches.save.line'))
+ self.applied_dirty = 1
+ self.removeundo(repo)
+
+ def full_series_end(self):
+ if self.applied:
+ p = self.applied[-1].name
+ end = self.find_series(p)
+ if end is None:
+ return len(self.full_series)
+ return end + 1
+ return 0
+
+ def series_end(self, all_patches=False):
+ """If all_patches is False, return the index of the next pushable patch
+ in the series, or the series length. If all_patches is True, return the
+ index of the first patch past the last applied one.
+ """
+ end = 0
+ def next(start):
+ if all_patches or start >= len(self.series):
+ return start
+ for i in xrange(start, len(self.series)):
+ p, reason = self.pushable(i)
+ if p:
+ break
+ self.explain_pushable(i)
+ return i
+ if self.applied:
+ p = self.applied[-1].name
+ try:
+ end = self.series.index(p)
+ except ValueError:
+ return 0
+ return next(end + 1)
+ return next(end)
+
+ def appliedname(self, index):
+ pname = self.applied[index].name
+ if not self.ui.verbose:
+ p = pname
+ else:
+ p = str(self.series.index(pname)) + " " + pname
+ return p
+
+ def qimport(self, repo, files, patchname=None, rev=None, existing=None,
+ force=None, git=False):
+ def checkseries(patchname):
+ if patchname in self.series:
+ raise util.Abort(_('patch %s is already in the series file')
+ % patchname)
+ def checkfile(patchname):
+ if not force and os.path.exists(self.join(patchname)):
+ raise util.Abort(_('patch "%s" already exists')
+ % patchname)
+
+ if rev:
+ if files:
+ raise util.Abort(_('option "-r" not valid when importing '
+ 'files'))
+ rev = cmdutil.revrange(repo, rev)
+ rev.sort(reverse=True)
+ if (len(files) > 1 or len(rev) > 1) and patchname:
+ raise util.Abort(_('option "-n" not valid when importing multiple '
+ 'patches'))
+ if rev:
+ # If mq patches are applied, we can only import revisions
+ # that form a linear path to qbase.
+ # Otherwise, they should form a linear path to a head.
+ heads = repo.changelog.heads(repo.changelog.node(rev[-1]))
+ if len(heads) > 1:
+ raise util.Abort(_('revision %d is the root of more than one '
+ 'branch') % rev[-1])
+ if self.applied:
+ base = repo.changelog.node(rev[0])
+ if base in [n.node for n in self.applied]:
+ raise util.Abort(_('revision %d is already managed')
+ % rev[0])
+ if heads != [self.applied[-1].node]:
+ raise util.Abort(_('revision %d is not the parent of '
+ 'the queue') % rev[0])
+ base = repo.changelog.rev(self.applied[0].node)
+ lastparent = repo.changelog.parentrevs(base)[0]
+ else:
+ if heads != [repo.changelog.node(rev[0])]:
+ raise util.Abort(_('revision %d has unmanaged children')
+ % rev[0])
+ lastparent = None
+
+ diffopts = self.diffopts({'git': git})
+ for r in rev:
+ p1, p2 = repo.changelog.parentrevs(r)
+ n = repo.changelog.node(r)
+ if p2 != nullrev:
+ raise util.Abort(_('cannot import merge revision %d') % r)
+ if lastparent and lastparent != r:
+ raise util.Abort(_('revision %d is not the parent of %d')
+ % (r, lastparent))
+ lastparent = p1
+
+ if not patchname:
+ patchname = normname('%d.diff' % r)
+ self.check_reserved_name(patchname)
+ checkseries(patchname)
+ checkfile(patchname)
+ self.full_series.insert(0, patchname)
+
+ patchf = self.opener(patchname, "w")
+ cmdutil.export(repo, [n], fp=patchf, opts=diffopts)
+ patchf.close()
+
+ se = statusentry(n, patchname)
+ self.applied.insert(0, se)
+
+ self.added.append(patchname)
+ patchname = None
+ self.parse_series()
+ self.applied_dirty = 1
+ self.series_dirty = True
+
+ for i, filename in enumerate(files):
+ if existing:
+ if filename == '-':
+ raise util.Abort(_('-e is incompatible with import from -'))
+ filename = normname(filename)
+ self.check_reserved_name(filename)
+ originpath = self.join(filename)
+ if not os.path.isfile(originpath):
+ raise util.Abort(_("patch %s does not exist") % filename)
+
+ if patchname:
+ self.check_reserved_name(patchname)
+ checkfile(patchname)
+
+ self.ui.write(_('renaming %s to %s\n')
+ % (filename, patchname))
+ util.rename(originpath, self.join(patchname))
+ else:
+ patchname = filename
+
+ else:
+ try:
+ if filename == '-':
+ if not patchname:
+ raise util.Abort(
+ _('need --name to import a patch from -'))
+ text = sys.stdin.read()
+ else:
+ text = url.open(self.ui, filename).read()
+ except (OSError, IOError):
+ raise util.Abort(_("unable to read file %s") % filename)
+ if not patchname:
+ patchname = normname(os.path.basename(filename))
+ self.check_reserved_name(patchname)
+ checkfile(patchname)
+ patchf = self.opener(patchname, "w")
+ patchf.write(text)
+ if not force:
+ checkseries(patchname)
+ if patchname not in self.series:
+ index = self.full_series_end() + i
+ self.full_series[index:index] = [patchname]
+ self.parse_series()
+ self.series_dirty = True
+ self.ui.warn(_("adding %s to series file\n") % patchname)
+ self.added.append(patchname)
+ patchname = None
+
+def delete(ui, repo, *patches, **opts):
+ """remove patches from queue
+
+ The patches must not be applied, and at least one patch is required. With
+ -k/--keep, the patch files are preserved in the patch directory.
+
+ To stop managing a patch and move it into permanent history,
+ use the :hg:`qfinish` command."""
+ q = repo.mq
+ q.delete(repo, patches, opts)
+ q.save_dirty()
+ return 0
+
+def applied(ui, repo, patch=None, **opts):
+ """print the patches already applied
+
+ Returns 0 on success."""
+
+ q = repo.mq
+
+ if patch:
+ if patch not in q.series:
+ raise util.Abort(_("patch %s is not in series file") % patch)
+ end = q.series.index(patch) + 1
+ else:
+ end = q.series_end(True)
+
+ if opts.get('last') and not end:
+ ui.write(_("no patches applied\n"))
+ return 1
+ elif opts.get('last') and end == 1:
+ ui.write(_("only one patch applied\n"))
+ return 1
+ elif opts.get('last'):
+ start = end - 2
+ end = 1
+ else:
+ start = 0
+
+ q.qseries(repo, length=end, start=start, status='A',
+ summary=opts.get('summary'))
+
+
+def unapplied(ui, repo, patch=None, **opts):
+ """print the patches not yet applied
+
+ Returns 0 on success."""
+
+ q = repo.mq
+ if patch:
+ if patch not in q.series:
+ raise util.Abort(_("patch %s is not in series file") % patch)
+ start = q.series.index(patch) + 1
+ else:
+ start = q.series_end(True)
+
+ if start == len(q.series) and opts.get('first'):
+ ui.write(_("all patches applied\n"))
+ return 1
+
+ length = opts.get('first') and 1 or None
+ q.qseries(repo, start=start, length=length, status='U',
+ summary=opts.get('summary'))
+
+def qimport(ui, repo, *filename, **opts):
+ """import a patch
+
+ The patch is inserted into the series after the last applied
+ patch. If no patches have been applied, qimport prepends the patch
+ to the series.
+
+ The patch will have the same name as its source file unless you
+ give it a new one with -n/--name.
+
+ You can register an existing patch inside the patch directory with
+ the -e/--existing flag.
+
+ With -f/--force, an existing patch of the same name will be
+ overwritten.
+
+ An existing changeset may be placed under mq control with -r/--rev
+ (e.g. qimport --rev tip -n patch will place tip under mq control).
+ With -g/--git, patches imported with --rev will use the git diff
+ format. See the diffs help topic for information on why this is
+ important for preserving rename/copy information and permission
+ changes.
+
+ To import a patch from standard input, pass - as the patch file.
+ When importing from standard input, a patch name must be specified
+ using the --name flag.
+
+ To import an existing patch while renaming it::
+
+ hg qimport -e existing-patch -n new-name
+
+ Returns 0 if import succeeded.
+ """
+ q = repo.mq
+ try:
+ q.qimport(repo, filename, patchname=opts.get('name'),
+ existing=opts.get('existing'), force=opts.get('force'),
+ rev=opts.get('rev'), git=opts.get('git'))
+ finally:
+ q.save_dirty()
+
+ if opts.get('push') and not opts.get('rev'):
+ return q.push(repo, None)
+ return 0
+
+def qinit(ui, repo, create):
+ """initialize a new queue repository
+
+ This command also creates a series file for ordering patches, and
+ an mq-specific .hgignore file in the queue repository, to exclude
+ the status and guards files (these contain mostly transient state).
+
+ Returns 0 if initialization succeeded."""
+ q = repo.mq
+ r = q.init(repo, create)
+ q.save_dirty()
+ if r:
+ if not os.path.exists(r.wjoin('.hgignore')):
+ fp = r.wopener('.hgignore', 'w')
+ fp.write('^\\.hg\n')
+ fp.write('^\\.mq\n')
+ fp.write('syntax: glob\n')
+ fp.write('status\n')
+ fp.write('guards\n')
+ fp.close()
+ if not os.path.exists(r.wjoin('series')):
+ r.wopener('series', 'w').close()
+ r[None].add(['.hgignore', 'series'])
+ commands.add(ui, r)
+ return 0
+
+def init(ui, repo, **opts):
+ """init a new queue repository (DEPRECATED)
+
+ The queue repository is unversioned by default. If
+ -c/--create-repo is specified, qinit will create a separate nested
+ repository for patches (qinit -c may also be run later to convert
+ an unversioned patch repository into a versioned one). You can use
+ qcommit to commit changes to this queue repository.
+
+ This command is deprecated. Without -c, it's implied by other relevant
+ commands. With -c, use :hg:`init --mq` instead."""
+ return qinit(ui, repo, create=opts.get('create_repo'))
+
+def clone(ui, source, dest=None, **opts):
+ '''clone main and patch repository at same time
+
+ If source is local, destination will have no patches applied. If
+ source is remote, this command can not check if patches are
+ applied in source, so cannot guarantee that patches are not
+ applied in destination. If you clone remote repository, be sure
+ before that it has no patches applied.
+
+ Source patch repository is looked for in <src>/.hg/patches by
+ default. Use -p <url> to change.
+
+ The patch directory must be a nested Mercurial repository, as
+ would be created by :hg:`init --mq`.
+
+ Return 0 on success.
+ '''
+ def patchdir(repo):
+ url = repo.url()
+ if url.endswith('/'):
+ url = url[:-1]
+ return url + '/.hg/patches'
+ if dest is None:
+ dest = hg.defaultdest(source)
+ sr = hg.repository(hg.remoteui(ui, opts), ui.expandpath(source))
+ if opts.get('patches'):
+ patchespath = ui.expandpath(opts.get('patches'))
+ else:
+ patchespath = patchdir(sr)
+ try:
+ hg.repository(ui, patchespath)
+ except error.RepoError:
+ raise util.Abort(_('versioned patch repository not found'
+ ' (see init --mq)'))
+ qbase, destrev = None, None
+ if sr.local():
+ if sr.mq.applied:
+ qbase = sr.mq.applied[0].node
+ if not hg.islocal(dest):
+ heads = set(sr.heads())
+ destrev = list(heads.difference(sr.heads(qbase)))
+ destrev.append(sr.changelog.parents(qbase)[0])
+ elif sr.capable('lookup'):
+ try:
+ qbase = sr.lookup('qbase')
+ except error.RepoError:
+ pass
+ ui.note(_('cloning main repository\n'))
+ sr, dr = hg.clone(ui, sr.url(), dest,
+ pull=opts.get('pull'),
+ rev=destrev,
+ update=False,
+ stream=opts.get('uncompressed'))
+ ui.note(_('cloning patch repository\n'))
+ hg.clone(ui, opts.get('patches') or patchdir(sr), patchdir(dr),
+ pull=opts.get('pull'), update=not opts.get('noupdate'),
+ stream=opts.get('uncompressed'))
+ if dr.local():
+ if qbase:
+ ui.note(_('stripping applied patches from destination '
+ 'repository\n'))
+ dr.mq.strip(dr, [qbase], update=False, backup=None)
+ if not opts.get('noupdate'):
+ ui.note(_('updating destination repository\n'))
+ hg.update(dr, dr.changelog.tip())
+
+def commit(ui, repo, *pats, **opts):
+ """commit changes in the queue repository (DEPRECATED)
+
+ This command is deprecated; use :hg:`commit --mq` instead."""
+ q = repo.mq
+ r = q.qrepo()
+ if not r:
+ raise util.Abort('no queue repository')
+ commands.commit(r.ui, r, *pats, **opts)
+
+def series(ui, repo, **opts):
+ """print the entire series file
+
+ Returns 0 on success."""
+ repo.mq.qseries(repo, missing=opts.get('missing'), summary=opts.get('summary'))
+ return 0
+
+def top(ui, repo, **opts):
+ """print the name of the current patch
+
+ Returns 0 on success."""
+ q = repo.mq
+ t = q.applied and q.series_end(True) or 0
+ if t:
+ q.qseries(repo, start=t - 1, length=1, status='A',
+ summary=opts.get('summary'))
+ else:
+ ui.write(_("no patches applied\n"))
+ return 1
+
+def next(ui, repo, **opts):
+ """print the name of the next patch
+
+ Returns 0 on success."""
+ q = repo.mq
+ end = q.series_end()
+ if end == len(q.series):
+ ui.write(_("all patches applied\n"))
+ return 1
+ q.qseries(repo, start=end, length=1, summary=opts.get('summary'))
+
+def prev(ui, repo, **opts):
+ """print the name of the previous patch
+
+ Returns 0 on success."""
+ q = repo.mq
+ l = len(q.applied)
+ if l == 1:
+ ui.write(_("only one patch applied\n"))
+ return 1
+ if not l:
+ ui.write(_("no patches applied\n"))
+ return 1
+ q.qseries(repo, start=l - 2, length=1, status='A',
+ summary=opts.get('summary'))
+
+def setupheaderopts(ui, opts):
+ if not opts.get('user') and opts.get('currentuser'):
+ opts['user'] = ui.username()
+ if not opts.get('date') and opts.get('currentdate'):
+ opts['date'] = "%d %d" % util.makedate()
+
+def new(ui, repo, patch, *args, **opts):
+ """create a new patch
+
+ qnew creates a new patch on top of the currently-applied patch (if
+ any). The patch will be initialized with any outstanding changes
+ in the working directory. You may also use -I/--include,
+ -X/--exclude, and/or a list of files after the patch name to add
+ only changes to matching files to the new patch, leaving the rest
+ as uncommitted modifications.
+
+ -u/--user and -d/--date can be used to set the (given) user and
+ date, respectively. -U/--currentuser and -D/--currentdate set user
+ to current user and date to current date.
+
+ -e/--edit, -m/--message or -l/--logfile set the patch header as
+ well as the commit message. If none is specified, the header is
+ empty and the commit message is '[mq]: PATCH'.
+
+ Use the -g/--git option to keep the patch in the git extended diff
+ format. Read the diffs help topic for more information on why this
+ is important for preserving permission changes and copy/rename
+ information.
+
+ Returns 0 on successful creation of a new patch.
+ """
+ msg = cmdutil.logmessage(opts)
+ def getmsg():
+ return ui.edit(msg, opts.get('user') or ui.username())
+ q = repo.mq
+ opts['msg'] = msg
+ if opts.get('edit'):
+ opts['msg'] = getmsg
+ else:
+ opts['msg'] = msg
+ setupheaderopts(ui, opts)
+ q.new(repo, patch, *args, **opts)
+ q.save_dirty()
+ return 0
+
+def refresh(ui, repo, *pats, **opts):
+ """update the current patch
+
+ If any file patterns are provided, the refreshed patch will
+ contain only the modifications that match those patterns; the
+ remaining modifications will remain in the working directory.
+
+ If -s/--short is specified, files currently included in the patch
+ will be refreshed just like matched files and remain in the patch.
+
+ If -e/--edit is specified, Mercurial will start your configured editor for
+ you to enter a message. In case qrefresh fails, you will find a backup of
+ your message in ``.hg/last-message.txt``.
+
+ hg add/remove/copy/rename work as usual, though you might want to
+ use git-style patches (-g/--git or [diff] git=1) to track copies
+ and renames. See the diffs help topic for more information on the
+ git diff format.
+
+ Returns 0 on success.
+ """
+ q = repo.mq
+ message = cmdutil.logmessage(opts)
+ if opts.get('edit'):
+ if not q.applied:
+ ui.write(_("no patches applied\n"))
+ return 1
+ if message:
+ raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
+ patch = q.applied[-1].name
+ ph = patchheader(q.join(patch), q.plainmode)
+ message = ui.edit('\n'.join(ph.message), ph.user or ui.username())
+ # We don't want to lose the patch message if qrefresh fails (issue2062)
+ msgfile = repo.opener('last-message.txt', 'wb')
+ msgfile.write(message)
+ msgfile.close()
+ setupheaderopts(ui, opts)
+ ret = q.refresh(repo, pats, msg=message, **opts)
+ q.save_dirty()
+ return ret
+
+def diff(ui, repo, *pats, **opts):
+ """diff of the current patch and subsequent modifications
+
+ Shows a diff which includes the current patch as well as any
+ changes which have been made in the working directory since the
+ last refresh (thus showing what the current patch would become
+ after a qrefresh).
+
+ Use :hg:`diff` if you only want to see the changes made since the
+ last qrefresh, or :hg:`export qtip` if you want to see changes
+ made by the current patch without including changes made since the
+ qrefresh.
+
+ Returns 0 on success.
+ """
+ repo.mq.diff(repo, pats, opts)
+ return 0
+
+def fold(ui, repo, *files, **opts):
+ """fold the named patches into the current patch
+
+ Patches must not yet be applied. Each patch will be successively
+ applied to the current patch in the order given. If all the
+ patches apply successfully, the current patch will be refreshed
+ with the new cumulative patch, and the folded patches will be
+ deleted. With -k/--keep, the folded patch files will not be
+ removed afterwards.
+
+ The header for each folded patch will be concatenated with the
+ current patch header, separated by a line of ``* * *``.
+
+ Returns 0 on success."""
+
+ q = repo.mq
+
+ if not files:
+ raise util.Abort(_('qfold requires at least one patch name'))
+ if not q.check_toppatch(repo)[0]:
+ raise util.Abort(_('no patches applied'))
+ q.check_localchanges(repo)
+
+ message = cmdutil.logmessage(opts)
+ if opts.get('edit'):
+ if message:
+ raise util.Abort(_('option "-e" incompatible with "-m" or "-l"'))
+
+ parent = q.lookup('qtip')
+ patches = []
+ messages = []
+ for f in files:
+ p = q.lookup(f)
+ if p in patches or p == parent:
+ ui.warn(_('Skipping already folded patch %s\n') % p)
+ if q.isapplied(p):
+ raise util.Abort(_('qfold cannot fold already applied patch %s') % p)
+ patches.append(p)
+
+ for p in patches:
+ if not message:
+ ph = patchheader(q.join(p), q.plainmode)
+ if ph.message:
+ messages.append(ph.message)
+ pf = q.join(p)
+ (patchsuccess, files, fuzz) = q.patch(repo, pf)
+ if not patchsuccess:
+ raise util.Abort(_('error folding patch %s') % p)
+ cmdutil.updatedir(ui, repo, files)
+
+ if not message:
+ ph = patchheader(q.join(parent), q.plainmode)
+ message, user = ph.message, ph.user
+ for msg in messages:
+ message.append('* * *')
+ message.extend(msg)
+ message = '\n'.join(message)
+
+ if opts.get('edit'):
+ message = ui.edit(message, user or ui.username())
+
+ diffopts = q.patchopts(q.diffopts(), *patches)
+ q.refresh(repo, msg=message, git=diffopts.git)
+ q.delete(repo, patches, opts)
+ q.save_dirty()
+
+def goto(ui, repo, patch, **opts):
+ '''push or pop patches until named patch is at top of stack
+
+ Returns 0 on success.'''
+ q = repo.mq
+ patch = q.lookup(patch)
+ if q.isapplied(patch):
+ ret = q.pop(repo, patch, force=opts.get('force'))
+ else:
+ ret = q.push(repo, patch, force=opts.get('force'))
+ q.save_dirty()
+ return ret
+
+def guard(ui, repo, *args, **opts):
+ '''set or print guards for a patch
+
+ Guards control whether a patch can be pushed. A patch with no
+ guards is always pushed. A patch with a positive guard ("+foo") is
+ pushed only if the :hg:`qselect` command has activated it. A patch with
+ a negative guard ("-foo") is never pushed if the :hg:`qselect` command
+ has activated it.
+
+ With no arguments, print the currently active guards.
+ With arguments, set guards for the named patch.
+
+ .. note::
+ Specifying negative guards now requires '--'.
+
+ To set guards on another patch::
+
+ hg qguard other.patch -- +2.6.17 -stable
+
+ Returns 0 on success.
+ '''
+ def status(idx):
+ guards = q.series_guards[idx] or ['unguarded']
+ if q.series[idx] in applied:
+ state = 'applied'
+ elif q.pushable(idx)[0]:
+ state = 'unapplied'
+ else:
+ state = 'guarded'
+ label = 'qguard.patch qguard.%s qseries.%s' % (state, state)
+ ui.write('%s: ' % ui.label(q.series[idx], label))
+
+ for i, guard in enumerate(guards):
+ if guard.startswith('+'):
+ ui.write(guard, label='qguard.positive')
+ elif guard.startswith('-'):
+ ui.write(guard, label='qguard.negative')
+ else:
+ ui.write(guard, label='qguard.unguarded')
+ if i != len(guards) - 1:
+ ui.write(' ')
+ ui.write('\n')
+ q = repo.mq
+ applied = set(p.name for p in q.applied)
+ patch = None
+ args = list(args)
+ if opts.get('list'):
+ if args or opts.get('none'):
+ raise util.Abort(_('cannot mix -l/--list with options or arguments'))
+ for i in xrange(len(q.series)):
+ status(i)
+ return
+ if not args or args[0][0:1] in '-+':
+ if not q.applied:
+ raise util.Abort(_('no patches applied'))
+ patch = q.applied[-1].name
+ if patch is None and args[0][0:1] not in '-+':
+ patch = args.pop(0)
+ if patch is None:
+ raise util.Abort(_('no patch to work with'))
+ if args or opts.get('none'):
+ idx = q.find_series(patch)
+ if idx is None:
+ raise util.Abort(_('no patch named %s') % patch)
+ q.set_guards(idx, args)
+ q.save_dirty()
+ else:
+ status(q.series.index(q.lookup(patch)))
+
+def header(ui, repo, patch=None):
+ """print the header of the topmost or specified patch
+
+ Returns 0 on success."""
+ q = repo.mq
+
+ if patch:
+ patch = q.lookup(patch)
+ else:
+ if not q.applied:
+ ui.write(_('no patches applied\n'))
+ return 1
+ patch = q.lookup('qtip')
+ ph = patchheader(q.join(patch), q.plainmode)
+
+ ui.write('\n'.join(ph.message) + '\n')
+
+def lastsavename(path):
+ (directory, base) = os.path.split(path)
+ names = os.listdir(directory)
+ namere = re.compile("%s.([0-9]+)" % base)
+ maxindex = None
+ maxname = None
+ for f in names:
+ m = namere.match(f)
+ if m:
+ index = int(m.group(1))
+ if maxindex is None or index > maxindex:
+ maxindex = index
+ maxname = f
+ if maxname:
+ return (os.path.join(directory, maxname), maxindex)
+ return (None, None)
+
+def savename(path):
+ (last, index) = lastsavename(path)
+ if last is None:
+ index = 0
+ newpath = path + ".%d" % (index + 1)
+ return newpath
+
+def push(ui, repo, patch=None, **opts):
+ """push the next patch onto the stack
+
+ When -f/--force is applied, all local changes in patched files
+ will be lost.
+
+ Return 0 on succces.
+ """
+ q = repo.mq
+ mergeq = None
+
+ if opts.get('merge'):
+ if opts.get('name'):
+ newpath = repo.join(opts.get('name'))
+ else:
+ newpath, i = lastsavename(q.path)
+ if not newpath:
+ ui.warn(_("no saved queues found, please use -n\n"))
+ return 1
+ mergeq = queue(ui, repo.join(""), newpath)
+ ui.warn(_("merging with queue at: %s\n") % mergeq.path)
+ ret = q.push(repo, patch, force=opts.get('force'), list=opts.get('list'),
+ mergeq=mergeq, all=opts.get('all'), move=opts.get('move'))
+ return ret
+
+def pop(ui, repo, patch=None, **opts):
+ """pop the current patch off the stack
+
+ By default, pops off the top of the patch stack. If given a patch
+ name, keeps popping off patches until the named patch is at the
+ top of the stack.
+
+ Return 0 on success.
+ """
+ localupdate = True
+ if opts.get('name'):
+ q = queue(ui, repo.join(""), repo.join(opts.get('name')))
+ ui.warn(_('using patch queue: %s\n') % q.path)
+ localupdate = False
+ else:
+ q = repo.mq
+ ret = q.pop(repo, patch, force=opts.get('force'), update=localupdate,
+ all=opts.get('all'))
+ q.save_dirty()
+ return ret
+
+def rename(ui, repo, patch, name=None, **opts):
+ """rename a patch
+
+ With one argument, renames the current patch to PATCH1.
+ With two arguments, renames PATCH1 to PATCH2.
+
+ Returns 0 on success."""
+
+ q = repo.mq
+
+ if not name:
+ name = patch
+ patch = None
+
+ if patch:
+ patch = q.lookup(patch)
+ else:
+ if not q.applied:
+ ui.write(_('no patches applied\n'))
+ return
+ patch = q.lookup('qtip')
+ absdest = q.join(name)
+ if os.path.isdir(absdest):
+ name = normname(os.path.join(name, os.path.basename(patch)))
+ absdest = q.join(name)
+ if os.path.exists(absdest):
+ raise util.Abort(_('%s already exists') % absdest)
+
+ if name in q.series:
+ raise util.Abort(
+ _('A patch named %s already exists in the series file') % name)
+
+ ui.note(_('renaming %s to %s\n') % (patch, name))
+ i = q.find_series(patch)
+ guards = q.guard_re.findall(q.full_series[i])
+ q.full_series[i] = name + ''.join([' #' + g for g in guards])
+ q.parse_series()
+ q.series_dirty = 1
+
+ info = q.isapplied(patch)
+ if info:
+ q.applied[info[0]] = statusentry(info[1], name)
+ q.applied_dirty = 1
+
+ destdir = os.path.dirname(absdest)
+ if not os.path.isdir(destdir):
+ os.makedirs(destdir)
+ util.rename(q.join(patch), absdest)
+ r = q.qrepo()
+ if r and patch in r.dirstate:
+ wctx = r[None]
+ wlock = r.wlock()
+ try:
+ if r.dirstate[patch] == 'a':
+ r.dirstate.forget(patch)
+ r.dirstate.add(name)
+ else:
+ if r.dirstate[name] == 'r':
+ wctx.undelete([name])
+ wctx.copy(patch, name)
+ wctx.remove([patch], False)
+ finally:
+ wlock.release()
+
+ q.save_dirty()
+
+def restore(ui, repo, rev, **opts):
+ """restore the queue state saved by a revision (DEPRECATED)
+
+ This command is deprecated, use :hg:`rebase` instead."""
+ rev = repo.lookup(rev)
+ q = repo.mq
+ q.restore(repo, rev, delete=opts.get('delete'),
+ qupdate=opts.get('update'))
+ q.save_dirty()
+ return 0
+
+def save(ui, repo, **opts):
+ """save current queue state (DEPRECATED)
+
+ This command is deprecated, use :hg:`rebase` instead."""
+ q = repo.mq
+ message = cmdutil.logmessage(opts)
+ ret = q.save(repo, msg=message)
+ if ret:
+ return ret
+ q.save_dirty()
+ if opts.get('copy'):
+ path = q.path
+ if opts.get('name'):
+ newpath = os.path.join(q.basepath, opts.get('name'))
+ if os.path.exists(newpath):
+ if not os.path.isdir(newpath):
+ raise util.Abort(_('destination %s exists and is not '
+ 'a directory') % newpath)
+ if not opts.get('force'):
+ raise util.Abort(_('destination %s exists, '
+ 'use -f to force') % newpath)
+ else:
+ newpath = savename(path)
+ ui.warn(_("copy %s to %s\n") % (path, newpath))
+ util.copyfiles(path, newpath)
+ if opts.get('empty'):
+ try:
+ os.unlink(q.join(q.status_path))
+ except:
+ pass
+ return 0
+
+def strip(ui, repo, *revs, **opts):
+ """strip changesets and all their descendants from the repository
+
+ The strip command removes the specified changesets and all their
+ descendants. If the working directory has uncommitted changes,
+ the operation is aborted unless the --force flag is supplied.
+
+ If a parent of the working directory is stripped, then the working
+ directory will automatically be updated to the most recent
+ available ancestor of the stripped parent after the operation
+ completes.
+
+ Any stripped changesets are stored in ``.hg/strip-backup`` as a
+ bundle (see :hg:`help bundle` and :hg:`help unbundle`). They can
+ be restored by running :hg:`unbundle .hg/strip-backup/BUNDLE`,
+ where BUNDLE is the bundle file created by the strip. Note that
+ the local revision numbers will in general be different after the
+ restore.
+
+ Use the --no-backup option to discard the backup bundle once the
+ operation completes.
+
+ Return 0 on success.
+ """
+ backup = 'all'
+ if opts.get('backup'):
+ backup = 'strip'
+ elif opts.get('no_backup') or opts.get('nobackup'):
+ backup = 'none'
+
+ cl = repo.changelog
+ revs = set(cmdutil.revrange(repo, revs))
+ if not revs:
+ raise util.Abort(_('empty revision set'))
+
+ descendants = set(cl.descendants(*revs))
+ strippedrevs = revs.union(descendants)
+ roots = revs.difference(descendants)
+
+ update = False
+ # if one of the wdir parent is stripped we'll need
+ # to update away to an earlier revision
+ for p in repo.dirstate.parents():
+ if p != nullid and cl.rev(p) in strippedrevs:
+ update = True
+ break
+
+ rootnodes = set(cl.node(r) for r in roots)
+
+ q = repo.mq
+ if q.applied:
+ # refresh queue state if we're about to strip
+ # applied patches
+ if cl.rev(repo.lookup('qtip')) in strippedrevs:
+ q.applied_dirty = True
+ start = 0
+ end = len(q.applied)
+ for i, statusentry in enumerate(q.applied):
+ if statusentry.node in rootnodes:
+ # if one of the stripped roots is an applied
+ # patch, only part of the queue is stripped
+ start = i
+ break
+ del q.applied[start:end]
+ q.save_dirty()
+
+ revs = list(rootnodes)
+ if update and opts.get('keep'):
+ wlock = repo.wlock()
+ try:
+ urev = repo.mq.qparents(repo, revs[0])
+ repo.dirstate.rebuild(urev, repo[urev].manifest())
+ repo.dirstate.write()
+ update = False
+ finally:
+ wlock.release()
+
+ repo.mq.strip(repo, revs, backup=backup, update=update,
+ force=opts.get('force'))
+ return 0
+
+def select(ui, repo, *args, **opts):
+ '''set or print guarded patches to push
+
+ Use the :hg:`qguard` command to set or print guards on patch, then use
+ qselect to tell mq which guards to use. A patch will be pushed if
+ it has no guards or any positive guards match the currently
+ selected guard, but will not be pushed if any negative guards
+ match the current guard. For example::
+
+ qguard foo.patch -stable (negative guard)
+ qguard bar.patch +stable (positive guard)
+ qselect stable
+
+ This activates the "stable" guard. mq will skip foo.patch (because
+ it has a negative match) but push bar.patch (because it has a
+ positive match).
+
+ With no arguments, prints the currently active guards.
+ With one argument, sets the active guard.
+
+ Use -n/--none to deactivate guards (no other arguments needed).
+ When no guards are active, patches with positive guards are
+ skipped and patches with negative guards are pushed.
+
+ qselect can change the guards on applied patches. It does not pop
+ guarded patches by default. Use --pop to pop back to the last
+ applied patch that is not guarded. Use --reapply (which implies
+ --pop) to push back to the current patch afterwards, but skip
+ guarded patches.
+
+ Use -s/--series to print a list of all guards in the series file
+ (no other arguments needed). Use -v for more information.
+
+ Returns 0 on success.'''
+
+ q = repo.mq
+ guards = q.active()
+ if args or opts.get('none'):
+ old_unapplied = q.unapplied(repo)
+ old_guarded = [i for i in xrange(len(q.applied)) if
+ not q.pushable(i)[0]]
+ q.set_active(args)
+ q.save_dirty()
+ if not args:
+ ui.status(_('guards deactivated\n'))
+ if not opts.get('pop') and not opts.get('reapply'):
+ unapplied = q.unapplied(repo)
+ guarded = [i for i in xrange(len(q.applied))
+ if not q.pushable(i)[0]]
+ if len(unapplied) != len(old_unapplied):
+ ui.status(_('number of unguarded, unapplied patches has '
+ 'changed from %d to %d\n') %
+ (len(old_unapplied), len(unapplied)))
+ if len(guarded) != len(old_guarded):
+ ui.status(_('number of guarded, applied patches has changed '
+ 'from %d to %d\n') %
+ (len(old_guarded), len(guarded)))
+ elif opts.get('series'):
+ guards = {}
+ noguards = 0
+ for gs in q.series_guards:
+ if not gs:
+ noguards += 1
+ for g in gs:
+ guards.setdefault(g, 0)
+ guards[g] += 1
+ if ui.verbose:
+ guards['NONE'] = noguards
+ guards = guards.items()
+ guards.sort(key=lambda x: x[0][1:])
+ if guards:
+ ui.note(_('guards in series file:\n'))
+ for guard, count in guards:
+ ui.note('%2d ' % count)
+ ui.write(guard, '\n')
+ else:
+ ui.note(_('no guards in series file\n'))
+ else:
+ if guards:
+ ui.note(_('active guards:\n'))
+ for g in guards:
+ ui.write(g, '\n')
+ else:
+ ui.write(_('no active guards\n'))
+ reapply = opts.get('reapply') and q.applied and q.appliedname(-1)
+ popped = False
+ if opts.get('pop') or opts.get('reapply'):
+ for i in xrange(len(q.applied)):
+ pushable, reason = q.pushable(i)
+ if not pushable:
+ ui.status(_('popping guarded patches\n'))
+ popped = True
+ if i == 0:
+ q.pop(repo, all=True)
+ else:
+ q.pop(repo, i - 1)
+ break
+ if popped:
+ try:
+ if reapply:
+ ui.status(_('reapplying unguarded patches\n'))
+ q.push(repo, reapply)
+ finally:
+ q.save_dirty()
+
+def finish(ui, repo, *revrange, **opts):
+ """move applied patches into repository history
+
+ Finishes the specified revisions (corresponding to applied
+ patches) by moving them out of mq control into regular repository
+ history.
+
+ Accepts a revision range or the -a/--applied option. If --applied
+ is specified, all applied mq revisions are removed from mq
+ control. Otherwise, the given revisions must be at the base of the
+ stack of applied patches.
+
+ This can be especially useful if your changes have been applied to
+ an upstream repository, or if you are about to push your changes
+ to upstream.
+
+ Returns 0 on success.
+ """
+ if not opts.get('applied') and not revrange:
+ raise util.Abort(_('no revisions specified'))
+ elif opts.get('applied'):
+ revrange = ('qbase::qtip',) + revrange
+
+ q = repo.mq
+ if not q.applied:
+ ui.status(_('no patches applied\n'))
+ return 0
+
+ revs = cmdutil.revrange(repo, revrange)
+ q.finish(repo, revs)
+ q.save_dirty()
+ return 0
+
+def qqueue(ui, repo, name=None, **opts):
+ '''manage multiple patch queues
+
+ Supports switching between different patch queues, as well as creating
+ new patch queues and deleting existing ones.
+
+ Omitting a queue name or specifying -l/--list will show you the registered
+ queues - by default the "normal" patches queue is registered. The currently
+ active queue will be marked with "(active)".
+
+ To create a new queue, use -c/--create. The queue is automatically made
+ active, except in the case where there are applied patches from the
+ currently active queue in the repository. Then the queue will only be
+ created and switching will fail.
+
+ To delete an existing queue, use --delete. You cannot delete the currently
+ active queue.
+
+ Returns 0 on success.
+ '''
+
+ q = repo.mq
+
+ _defaultqueue = 'patches'
+ _allqueues = 'patches.queues'
+ _activequeue = 'patches.queue'
+
+ def _getcurrent():
+ cur = os.path.basename(q.path)
+ if cur.startswith('patches-'):
+ cur = cur[8:]
+ return cur
+
+ def _noqueues():
+ try:
+ fh = repo.opener(_allqueues, 'r')
+ fh.close()
+ except IOError:
+ return True
+
+ return False
+
+ def _getqueues():
+ current = _getcurrent()
+
+ try:
+ fh = repo.opener(_allqueues, 'r')
+ queues = [queue.strip() for queue in fh if queue.strip()]
+ if current not in queues:
+ queues.append(current)
+ except IOError:
+ queues = [_defaultqueue]
+
+ return sorted(queues)
+
+ def _setactive(name):
+ if q.applied:
+ raise util.Abort(_('patches applied - cannot set new queue active'))
+ _setactivenocheck(name)
+
+ def _setactivenocheck(name):
+ fh = repo.opener(_activequeue, 'w')
+ if name != 'patches':
+ fh.write(name)
+ fh.close()
+
+ def _addqueue(name):
+ fh = repo.opener(_allqueues, 'a')
+ fh.write('%s\n' % (name,))
+ fh.close()
+
+ def _queuedir(name):
+ if name == 'patches':
+ return repo.join('patches')
+ else:
+ return repo.join('patches-' + name)
+
+ def _validname(name):
+ for n in name:
+ if n in ':\\/.':
+ return False
+ return True
+
+ def _delete(name):
+ if name not in existing:
+ raise util.Abort(_('cannot delete queue that does not exist'))
+
+ current = _getcurrent()
+
+ if name == current:
+ raise util.Abort(_('cannot delete currently active queue'))
+
+ fh = repo.opener('patches.queues.new', 'w')
+ for queue in existing:
+ if queue == name:
+ continue
+ fh.write('%s\n' % (queue,))
+ fh.close()
+ util.rename(repo.join('patches.queues.new'), repo.join(_allqueues))
+
+ if not name or opts.get('list'):
+ current = _getcurrent()
+ for queue in _getqueues():
+ ui.write('%s' % (queue,))
+ if queue == current and not ui.quiet:
+ ui.write(_(' (active)\n'))
+ else:
+ ui.write('\n')
+ return
+
+ if not _validname(name):
+ raise util.Abort(
+ _('invalid queue name, may not contain the characters ":\\/."'))
+
+ existing = _getqueues()
+
+ if opts.get('create'):
+ if name in existing:
+ raise util.Abort(_('queue "%s" already exists') % name)
+ if _noqueues():
+ _addqueue(_defaultqueue)
+ _addqueue(name)
+ _setactive(name)
+ elif opts.get('rename'):
+ current = _getcurrent()
+ if name == current:
+ raise util.Abort(_('can\'t rename "%s" to its current name') % name)
+ if name in existing:
+ raise util.Abort(_('queue "%s" already exists') % name)
+
+ olddir = _queuedir(current)
+ newdir = _queuedir(name)
+
+ if os.path.exists(newdir):
+ raise util.Abort(_('non-queue directory "%s" already exists') %
+ newdir)
+
+ fh = repo.opener('patches.queues.new', 'w')
+ for queue in existing:
+ if queue == current:
+ fh.write('%s\n' % (name,))
+ if os.path.exists(olddir):
+ util.rename(olddir, newdir)
+ else:
+ fh.write('%s\n' % (queue,))
+ fh.close()
+ util.rename(repo.join('patches.queues.new'), repo.join(_allqueues))
+ _setactivenocheck(name)
+ elif opts.get('delete'):
+ _delete(name)
+ elif opts.get('purge'):
+ if name in existing:
+ _delete(name)
+ qdir = _queuedir(name)
+ if os.path.exists(qdir):
+ shutil.rmtree(qdir)
+ else:
+ if name not in existing:
+ raise util.Abort(_('use --create to create a new queue'))
+ _setactive(name)
+
+def reposetup(ui, repo):
+ class mqrepo(repo.__class__):
+ @util.propertycache
+ def mq(self):
+ return queue(self.ui, self.join(""))
+
+ def abort_if_wdir_patched(self, errmsg, force=False):
+ if self.mq.applied and not force:
+ parent = self.dirstate.parents()[0]
+ if parent in [s.node for s in self.mq.applied]:
+ raise util.Abort(errmsg)
+
+ def commit(self, text="", user=None, date=None, match=None,
+ force=False, editor=False, extra={}):
+ self.abort_if_wdir_patched(
+ _('cannot commit over an applied mq patch'),
+ force)
+
+ return super(mqrepo, self).commit(text, user, date, match, force,
+ editor, extra)
+
+ def push(self, remote, force=False, revs=None, newbranch=False):
+ if self.mq.applied and not force:
+ haspatches = True
+ if revs:
+ # Assume applied patches have no non-patch descendants
+ # and are not on remote already. If they appear in the
+ # set of resolved 'revs', bail out.
+ applied = set(e.node for e in self.mq.applied)
+ haspatches = bool([n for n in revs if n in applied])
+ if haspatches:
+ raise util.Abort(_('source has mq patches applied'))
+ return super(mqrepo, self).push(remote, force, revs, newbranch)
+
+ def _findtags(self):
+ '''augment tags from base class with patch tags'''
+ result = super(mqrepo, self)._findtags()
+
+ q = self.mq
+ if not q.applied:
+ return result
+
+ mqtags = [(patch.node, patch.name) for patch in q.applied]
+
+ if mqtags[-1][0] not in self.changelog.nodemap:
+ self.ui.warn(_('mq status file refers to unknown node %s\n')
+ % short(mqtags[-1][0]))
+ return result
+
+ mqtags.append((mqtags[-1][0], 'qtip'))
+ mqtags.append((mqtags[0][0], 'qbase'))
+ mqtags.append((self.changelog.parents(mqtags[0][0])[0], 'qparent'))
+ tags = result[0]
+ for patch in mqtags:
+ if patch[1] in tags:
+ self.ui.warn(_('Tag %s overrides mq patch of the same name\n')
+ % patch[1])
+ else:
+ tags[patch[1]] = patch[0]
+
+ return result
+
+ def _branchtags(self, partial, lrev):
+ q = self.mq
+ if not q.applied:
+ return super(mqrepo, self)._branchtags(partial, lrev)
+
+ cl = self.changelog
+ qbasenode = q.applied[0].node
+ if qbasenode not in cl.nodemap:
+ self.ui.warn(_('mq status file refers to unknown node %s\n')
+ % short(qbasenode))
+ return super(mqrepo, self)._branchtags(partial, lrev)
+
+ qbase = cl.rev(qbasenode)
+ start = lrev + 1
+ if start < qbase:
+ # update the cache (excluding the patches) and save it
+ ctxgen = (self[r] for r in xrange(lrev + 1, qbase))
+ self._updatebranchcache(partial, ctxgen)
+ self._writebranchcache(partial, cl.node(qbase - 1), qbase - 1)
+ start = qbase
+ # if start = qbase, the cache is as updated as it should be.
+ # if start > qbase, the cache includes (part of) the patches.
+ # we might as well use it, but we won't save it.
+
+ # update the cache up to the tip
+ ctxgen = (self[r] for r in xrange(start, len(cl)))
+ self._updatebranchcache(partial, ctxgen)
+
+ return partial
+
+ if repo.local():
+ repo.__class__ = mqrepo
+
+def mqimport(orig, ui, repo, *args, **kwargs):
+ if (hasattr(repo, 'abort_if_wdir_patched')
+ and not kwargs.get('no_commit', False)):
+ repo.abort_if_wdir_patched(_('cannot import over an applied patch'),
+ kwargs.get('force'))
+ return orig(ui, repo, *args, **kwargs)
+
+def mqinit(orig, ui, *args, **kwargs):
+ mq = kwargs.pop('mq', None)
+
+ if not mq:
+ return orig(ui, *args, **kwargs)
+
+ if args:
+ repopath = args[0]
+ if not hg.islocal(repopath):
+ raise util.Abort(_('only a local queue repository '
+ 'may be initialized'))
+ else:
+ repopath = cmdutil.findrepo(os.getcwd())
+ if not repopath:
+ raise util.Abort(_('there is no Mercurial repository here '
+ '(.hg not found)'))
+ repo = hg.repository(ui, repopath)
+ return qinit(ui, repo, True)
+
+def mqcommand(orig, ui, repo, *args, **kwargs):
+ """Add --mq option to operate on patch repository instead of main"""
+
+ # some commands do not like getting unknown options
+ mq = kwargs.pop('mq', None)
+
+ if not mq:
+ return orig(ui, repo, *args, **kwargs)
+
+ q = repo.mq
+ r = q.qrepo()
+ if not r:
+ raise util.Abort(_('no queue repository'))
+ return orig(r.ui, r, *args, **kwargs)
+
+def summary(orig, ui, repo, *args, **kwargs):
+ r = orig(ui, repo, *args, **kwargs)
+ q = repo.mq
+ m = []
+ a, u = len(q.applied), len(q.unapplied(repo))
+ if a:
+ m.append(ui.label(_("%d applied"), 'qseries.applied') % a)
+ if u:
+ m.append(ui.label(_("%d unapplied"), 'qseries.unapplied') % u)
+ if m:
+ ui.write("mq: %s\n" % ', '.join(m))
+ else:
+ ui.note(_("mq: (empty queue)\n"))
+ return r
+
+def uisetup(ui):
+ mqopt = [('', 'mq', None, _("operate on patch repository"))]
+
+ extensions.wrapcommand(commands.table, 'import', mqimport)
+ extensions.wrapcommand(commands.table, 'summary', summary)
+
+ entry = extensions.wrapcommand(commands.table, 'init', mqinit)
+ entry[1].extend(mqopt)
+
+ nowrap = set(commands.norepo.split(" ") + ['qrecord'])
+
+ def dotable(cmdtable):
+ for cmd in cmdtable.keys():
+ cmd = cmdutil.parsealiases(cmd)[0]
+ if cmd in nowrap:
+ continue
+ entry = extensions.wrapcommand(cmdtable, cmd, mqcommand)
+ entry[1].extend(mqopt)
+
+ dotable(commands.table)
+
+ for extname, extmodule in extensions.extensions():
+ if extmodule.__file__ != __file__:
+ dotable(getattr(extmodule, 'cmdtable', {}))
+
+seriesopts = [('s', 'summary', None, _('print first line of patch header'))]
+
+cmdtable = {
+ "qapplied":
+ (applied,
+ [('1', 'last', None, _('show only the last patch'))] + seriesopts,
+ _('hg qapplied [-1] [-s] [PATCH]')),
+ "qclone":
+ (clone,
+ [('', 'pull', None, _('use pull protocol to copy metadata')),
+ ('U', 'noupdate', None, _('do not update the new working directories')),
+ ('', 'uncompressed', None,
+ _('use uncompressed transfer (fast over LAN)')),
+ ('p', 'patches', '',
+ _('location of source patch repository'), _('REPO')),
+ ] + commands.remoteopts,
+ _('hg qclone [OPTION]... SOURCE [DEST]')),
+ "qcommit|qci":
+ (commit,
+ commands.table["^commit|ci"][1],
+ _('hg qcommit [OPTION]... [FILE]...')),
+ "^qdiff":
+ (diff,
+ commands.diffopts + commands.diffopts2 + commands.walkopts,
+ _('hg qdiff [OPTION]... [FILE]...')),
+ "qdelete|qremove|qrm":
+ (delete,
+ [('k', 'keep', None, _('keep patch file')),
+ ('r', 'rev', [],
+ _('stop managing a revision (DEPRECATED)'), _('REV'))],
+ _('hg qdelete [-k] [PATCH]...')),
+ 'qfold':
+ (fold,
+ [('e', 'edit', None, _('edit patch header')),
+ ('k', 'keep', None, _('keep folded patch files')),
+ ] + commands.commitopts,
+ _('hg qfold [-e] [-k] [-m TEXT] [-l FILE] PATCH...')),
+ 'qgoto':
+ (goto,
+ [('f', 'force', None, _('overwrite any local changes'))],
+ _('hg qgoto [OPTION]... PATCH')),
+ 'qguard':
+ (guard,
+ [('l', 'list', None, _('list all patches and guards')),
+ ('n', 'none', None, _('drop all guards'))],
+ _('hg qguard [-l] [-n] [PATCH] [-- [+GUARD]... [-GUARD]...]')),
+ 'qheader': (header, [], _('hg qheader [PATCH]')),
+ "qimport":
+ (qimport,
+ [('e', 'existing', None, _('import file in patch directory')),
+ ('n', 'name', '',
+ _('name of patch file'), _('NAME')),
+ ('f', 'force', None, _('overwrite existing files')),
+ ('r', 'rev', [],
+ _('place existing revisions under mq control'), _('REV')),
+ ('g', 'git', None, _('use git extended diff format')),
+ ('P', 'push', None, _('qpush after importing'))],
+ _('hg qimport [-e] [-n NAME] [-f] [-g] [-P] [-r REV]... FILE...')),
+ "^qinit":
+ (init,
+ [('c', 'create-repo', None, _('create queue repository'))],
+ _('hg qinit [-c]')),
+ "^qnew":
+ (new,
+ [('e', 'edit', None, _('edit commit message')),
+ ('f', 'force', None, _('import uncommitted changes (DEPRECATED)')),
+ ('g', 'git', None, _('use git extended diff format')),
+ ('U', 'currentuser', None, _('add "From: <current user>" to patch')),
+ ('u', 'user', '',
+ _('add "From: <USER>" to patch'), _('USER')),
+ ('D', 'currentdate', None, _('add "Date: <current date>" to patch')),
+ ('d', 'date', '',
+ _('add "Date: <DATE>" to patch'), _('DATE'))
+ ] + commands.walkopts + commands.commitopts,
+ _('hg qnew [-e] [-m TEXT] [-l FILE] PATCH [FILE]...')),
+ "qnext": (next, [] + seriesopts, _('hg qnext [-s]')),
+ "qprev": (prev, [] + seriesopts, _('hg qprev [-s]')),
+ "^qpop":
+ (pop,
+ [('a', 'all', None, _('pop all patches')),
+ ('n', 'name', '',
+ _('queue name to pop (DEPRECATED)'), _('NAME')),
+ ('f', 'force', None, _('forget any local changes to patched files'))],
+ _('hg qpop [-a] [-f] [PATCH | INDEX]')),
+ "^qpush":
+ (push,
+ [('f', 'force', None, _('apply on top of local changes')),
+ ('l', 'list', None, _('list patch name in commit text')),
+ ('a', 'all', None, _('apply all patches')),
+ ('m', 'merge', None, _('merge from another queue (DEPRECATED)')),
+ ('n', 'name', '',
+ _('merge queue name (DEPRECATED)'), _('NAME')),
+ ('', 'move', None, _('reorder patch series and apply only the patch'))],
+ _('hg qpush [-f] [-l] [-a] [--move] [PATCH | INDEX]')),
+ "^qrefresh":
+ (refresh,
+ [('e', 'edit', None, _('edit commit message')),
+ ('g', 'git', None, _('use git extended diff format')),
+ ('s', 'short', None,
+ _('refresh only files already in the patch and specified files')),
+ ('U', 'currentuser', None,
+ _('add/update author field in patch with current user')),
+ ('u', 'user', '',
+ _('add/update author field in patch with given user'), _('USER')),
+ ('D', 'currentdate', None,
+ _('add/update date field in patch with current date')),
+ ('d', 'date', '',
+ _('add/update date field in patch with given date'), _('DATE'))
+ ] + commands.walkopts + commands.commitopts,
+ _('hg qrefresh [-I] [-X] [-e] [-m TEXT] [-l FILE] [-s] [FILE]...')),
+ 'qrename|qmv':
+ (rename, [], _('hg qrename PATCH1 [PATCH2]')),
+ "qrestore":
+ (restore,
+ [('d', 'delete', None, _('delete save entry')),
+ ('u', 'update', None, _('update queue working directory'))],
+ _('hg qrestore [-d] [-u] REV')),
+ "qsave":
+ (save,
+ [('c', 'copy', None, _('copy patch directory')),
+ ('n', 'name', '',
+ _('copy directory name'), _('NAME')),
+ ('e', 'empty', None, _('clear queue status file')),
+ ('f', 'force', None, _('force copy'))] + commands.commitopts,
+ _('hg qsave [-m TEXT] [-l FILE] [-c] [-n NAME] [-e] [-f]')),
+ "qselect":
+ (select,
+ [('n', 'none', None, _('disable all guards')),
+ ('s', 'series', None, _('list all guards in series file')),
+ ('', 'pop', None, _('pop to before first guarded applied patch')),
+ ('', 'reapply', None, _('pop, then reapply patches'))],
+ _('hg qselect [OPTION]... [GUARD]...')),
+ "qseries":
+ (series,
+ [('m', 'missing', None, _('print patches not in series')),
+ ] + seriesopts,
+ _('hg qseries [-ms]')),
+ "strip":
+ (strip,
+ [('f', 'force', None, _('force removal of changesets even if the '
+ 'working directory has uncommitted changes')),
+ ('b', 'backup', None, _('bundle only changesets with local revision'
+ ' number greater than REV which are not'
+ ' descendants of REV (DEPRECATED)')),
+ ('n', 'no-backup', None, _('no backups')),
+ ('', 'nobackup', None, _('no backups (DEPRECATED)')),
+ ('k', 'keep', None, _("do not modify working copy during strip"))],
+ _('hg strip [-k] [-f] [-n] REV...')),
+ "qtop": (top, [] + seriesopts, _('hg qtop [-s]')),
+ "qunapplied":
+ (unapplied,
+ [('1', 'first', None, _('show only the first patch'))] + seriesopts,
+ _('hg qunapplied [-1] [-s] [PATCH]')),
+ "qfinish":
+ (finish,
+ [('a', 'applied', None, _('finish all applied changesets'))],
+ _('hg qfinish [-a] [REV]...')),
+ 'qqueue':
+ (qqueue,
+ [
+ ('l', 'list', False, _('list all available queues')),
+ ('c', 'create', False, _('create new queue')),
+ ('', 'rename', False, _('rename active queue')),
+ ('', 'delete', False, _('delete reference to queue')),
+ ('', 'purge', False, _('delete queue, and remove patch dir')),
+ ],
+ _('[OPTION] [QUEUE]')),
+}
+
+colortable = {'qguard.negative': 'red',
+ 'qguard.positive': 'yellow',
+ 'qguard.unguarded': 'green',
+ 'qseries.applied': 'blue bold underline',
+ 'qseries.guarded': 'black bold',
+ 'qseries.missing': 'red bold',
+ 'qseries.unapplied': 'black bold'}
diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/mq.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/mq.pyo
new file mode 100644
index 0000000..ed0654f
--- /dev/null
+++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/mq.pyo
Binary files differ
diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/notify.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/notify.py
new file mode 100644
index 0000000..5ce2eee
--- /dev/null
+++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/notify.py
@@ -0,0 +1,316 @@
+# notify.py - email notifications for mercurial
+#
+# Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
+#
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2 or any later version.
+
+'''hooks for sending email notifications at commit/push time
+
+Subscriptions can be managed through a hgrc file. Default mode is to
+print messages to stdout, for testing and configuring.
+
+To use, configure the notify extension and enable it in hgrc like
+this::
+
+ [extensions]
+ notify =
+
+ [hooks]
+ # one email for each incoming changeset
+ incoming.notify = python:hgext.notify.hook
+ # batch emails when many changesets incoming at one time
+ changegroup.notify = python:hgext.notify.hook
+
+ [notify]
+ # config items go here
+
+Required configuration items::
+
+ config = /path/to/file # file containing subscriptions
+
+Optional configuration items::
+
+ test = True # print messages to stdout for testing
+ strip = 3 # number of slashes to strip for url paths
+ domain = example.com # domain to use if committer missing domain
+ style = ... # style file to use when formatting email
+ template = ... # template to use when formatting email
+ incoming = ... # template to use when run as incoming hook
+ changegroup = ... # template when run as changegroup hook
+ maxdiff = 300 # max lines of diffs to include (0=none, -1=all)
+ maxsubject = 67 # truncate subject line longer than this
+ diffstat = True # add a diffstat before the diff content
+ sources = serve # notify if source of incoming changes in this list
+ # (serve == ssh or http, push, pull, bundle)
+ merge = False # send notification for merges (default True)
+ [email]
+ from = user@host.com # email address to send as if none given
+ [web]
+ baseurl = http://hgserver/... # root of hg web site for browsing commits
+
+The notify config file has same format as a regular hgrc file. It has
+two sections so you can express subscriptions in whatever way is
+handier for you.
+
+::
+
+ [usersubs]
+ # key is subscriber email, value is ","-separated list of glob patterns
+ user@host = pattern
+
+ [reposubs]
+ # key is glob pattern, value is ","-separated list of subscriber emails
+ pattern = user@host
+
+Glob patterns are matched against path to repository root.
+
+If you like, you can put notify config file in repository that users
+can push changes to, they can manage their own subscriptions.
+'''
+
+from mercurial.i18n import _
+from mercurial import patch, cmdutil, templater, util, mail
+import email.Parser, email.Errors, fnmatch, socket, time
+
+# template for single changeset can include email headers.
+single_template = '''
+Subject: changeset in {webroot}: {desc|firstline|strip}
+From: {author}
+
+changeset {node|short} in {root}
+details: {baseurl}{webroot}?cmd=changeset;node={node|short}
+description:
+\t{desc|tabindent|strip}
+'''.lstrip()
+
+# template for multiple changesets should not contain email headers,
+# because only first set of headers will be used and result will look
+# strange.
+multiple_template = '''
+changeset {node|short} in {root}
+details: {baseurl}{webroot}?cmd=changeset;node={node|short}
+summary: {desc|firstline}
+'''
+
+deftemplates = {
+ 'changegroup': multiple_template,
+}
+
+class notifier(object):
+ '''email notification class.'''
+
+ def __init__(self, ui, repo, hooktype):
+ self.ui = ui
+ cfg = self.ui.config('notify', 'config')
+ if cfg:
+ self.ui.readconfig(cfg, sections=['usersubs', 'reposubs'])
+ self.repo = repo
+ self.stripcount = int(self.ui.config('notify', 'strip', 0))
+ self.root = self.strip(self.repo.root)
+ self.domain = self.ui.config('notify', 'domain')
+ self.test = self.ui.configbool('notify', 'test', True)
+ self.charsets = mail._charsets(self.ui)
+ self.subs = self.subscribers()
+ self.merge = self.ui.configbool('notify', 'merge', True)
+
+ mapfile = self.ui.config('notify', 'style')
+ template = (self.ui.config('notify', hooktype) or
+ self.ui.config('notify', 'template'))
+ self.t = cmdutil.changeset_templater(self.ui, self.repo,
+ False, None, mapfile, False)
+ if not mapfile and not template:
+ template = deftemplates.get(hooktype) or single_template
+ if template:
+ template = templater.parsestring(template, quoted=False)
+ self.t.use_template(template)
+
+ def strip(self, path):
+ '''strip leading slashes from local path, turn into web-safe path.'''
+
+ path = util.pconvert(path)
+ count = self.stripcount
+ while count > 0:
+ c = path.find('/')
+ if c == -1:
+ break
+ path = path[c + 1:]
+ count -= 1
+ return path
+
+ def fixmail(self, addr):
+ '''try to clean up email addresses.'''
+
+ addr = util.email(addr.strip())
+ if self.domain:
+ a = addr.find('@localhost')
+ if a != -1:
+ addr = addr[:a]
+ if '@' not in addr:
+ return addr + '@' + self.domain
+ return addr
+
+ def subscribers(self):
+ '''return list of email addresses of subscribers to this repo.'''
+ subs = set()
+ for user, pats in self.ui.configitems('usersubs'):
+ for pat in pats.split(','):
+ if fnmatch.fnmatch(self.repo.root, pat.strip()):
+ subs.add(self.fixmail(user))
+ for pat, users in self.ui.configitems('reposubs'):
+ if fnmatch.fnmatch(self.repo.root, pat):
+ for user in users.split(','):
+ subs.add(self.fixmail(user))
+ return [mail.addressencode(self.ui, s, self.charsets, self.test)
+ for s in sorted(subs)]
+
+ def url(self, path=None):
+ return self.ui.config('web', 'baseurl') + (path or self.root)
+
+ def node(self, ctx, **props):
+ '''format one changeset, unless it is a suppressed merge.'''
+ if not self.merge and len(ctx.parents()) > 1:
+ return False
+ self.t.show(ctx, changes=ctx.changeset(),
+ baseurl=self.ui.config('web', 'baseurl'),
+ root=self.repo.root, webroot=self.root, **props)
+ return True
+
+ def skipsource(self, source):
+ '''true if incoming changes from this source should be skipped.'''
+ ok_sources = self.ui.config('notify', 'sources', 'serve').split()
+ return source not in ok_sources
+
+ def send(self, ctx, count, data):
+ '''send message.'''
+
+ p = email.Parser.Parser()
+ try:
+ msg = p.parsestr(data)
+ except email.Errors.MessageParseError, inst:
+ raise util.Abort(inst)
+
+ # store sender and subject
+ sender, subject = msg['From'], msg['Subject']
+ del msg['From'], msg['Subject']
+
+ if not msg.is_multipart():
+ # create fresh mime message from scratch
+ # (multipart templates must take care of this themselves)
+ headers = msg.items()
+ payload = msg.get_payload()
+ # for notification prefer readability over data precision
+ msg = mail.mimeencode(self.ui, payload, self.charsets, self.test)
+ # reinstate custom headers
+ for k, v in headers:
+ msg[k] = v
+
+ msg['Date'] = util.datestr(format="%a, %d %b %Y %H:%M:%S %1%2")
+
+ # try to make subject line exist and be useful
+ if not subject:
+ if count > 1:
+ subject = _('%s: %d new changesets') % (self.root, count)
+ else:
+ s = ctx.description().lstrip().split('\n', 1)[0].rstrip()
+ subject = '%s: %s' % (self.root, s)
+ maxsubject = int(self.ui.config('notify', 'maxsubject', 67))
+ if maxsubject:
+ subject = util.ellipsis(subject, maxsubject)
+ msg['Subject'] = mail.headencode(self.ui, subject,
+ self.charsets, self.test)
+
+ # try to make message have proper sender
+ if not sender:
+ sender = self.ui.config('email', 'from') or self.ui.username()
+ if '@' not in sender or '@localhost' in sender:
+ sender = self.fixmail(sender)
+ msg['From'] = mail.addressencode(self.ui, sender,
+ self.charsets, self.test)
+
+ msg['X-Hg-Notification'] = 'changeset %s' % ctx
+ if not msg['Message-Id']:
+ msg['Message-Id'] = ('<hg.%s.%s.%s@%s>' %
+ (ctx, int(time.time()),
+ hash(self.repo.root), socket.getfqdn()))
+ msg['To'] = ', '.join(self.subs)
+
+ msgtext = msg.as_string()
+ if self.test:
+ self.ui.write(msgtext)
+ if not msgtext.endswith('\n'):
+ self.ui.write('\n')
+ else:
+ self.ui.status(_('notify: sending %d subscribers %d changes\n') %
+ (len(self.subs), count))
+ mail.sendmail(self.ui, util.email(msg['From']),
+ self.subs, msgtext)
+
+ def diff(self, ctx, ref=None):
+
+ maxdiff = int(self.ui.config('notify', 'maxdiff', 300))
+ prev = ctx.parents()[0].node()
+ ref = ref and ref.node() or ctx.node()
+ chunks = patch.diff(self.repo, prev, ref, opts=patch.diffopts(self.ui))
+ difflines = ''.join(chunks).splitlines()
+
+ if self.ui.configbool('notify', 'diffstat', True):
+ s = patch.diffstat(difflines)
+ # s may be nil, don't include the header if it is
+ if s:
+ self.ui.write('\ndiffstat:\n\n%s' % s)
+
+ if maxdiff == 0:
+ return
+ elif maxdiff > 0 and len(difflines) > maxdiff:
+ msg = _('\ndiffs (truncated from %d to %d lines):\n\n')
+ self.ui.write(msg % (len(difflines), maxdiff))
+ difflines = difflines[:maxdiff]
+ elif difflines:
+ self.ui.write(_('\ndiffs (%d lines):\n\n') % len(difflines))
+
+ self.ui.write("\n".join(difflines))
+
+def hook(ui, repo, hooktype, node=None, source=None, **kwargs):
+ '''send email notifications to interested subscribers.
+
+ if used as changegroup hook, send one email for all changesets in
+ changegroup. else send one email per changeset.'''
+
+ n = notifier(ui, repo, hooktype)
+ ctx = repo[node]
+
+ if not n.subs:
+ ui.debug('notify: no subscribers to repository %s\n' % n.root)
+ return
+ if n.skipsource(source):
+ ui.debug('notify: changes have source "%s" - skipping\n' % source)
+ return
+
+ ui.pushbuffer()
+ data = ''
+ count = 0
+ if hooktype == 'changegroup':
+ start, end = ctx.rev(), len(repo)
+ for rev in xrange(start, end):
+ if n.node(repo[rev]):
+ count += 1
+ else:
+ data += ui.popbuffer()
+ ui.note(_('notify: suppressing notification for merge %d:%s\n') %
+ (rev, repo[rev].hex()[:12]))
+ ui.pushbuffer()
+ if count:
+ n.diff(ctx, repo['tip'])
+ else:
+ if not n.node(ctx):
+ ui.popbuffer()
+ ui.note(_('notify: suppressing notification for merge %d:%s\n') %
+ (ctx.rev(), ctx.hex()[:12]))
+ return
+ count += 1
+ n.diff(ctx)
+
+ data += ui.popbuffer()
+ if count:
+ n.send(ctx, count, data)
diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/notify.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/notify.pyo
new file mode 100644
index 0000000..087cf73
--- /dev/null
+++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/notify.pyo
Binary files differ
diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/pager.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/pager.py
new file mode 100644
index 0000000..6d73c34
--- /dev/null
+++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/pager.py
@@ -0,0 +1,113 @@
+# pager.py - display output using a pager
+#
+# Copyright 2008 David Soria Parra <dsp@php.net>
+#
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2 or any later version.
+#
+# To load the extension, add it to your configuration file:
+#
+# [extension]
+# pager =
+#
+# Run "hg help pager" to get info on configuration.
+
+'''browse command output with an external pager
+
+To set the pager that should be used, set the application variable::
+
+ [pager]
+ pager = less -FRSX
+
+If no pager is set, the pager extensions uses the environment variable
+$PAGER. If neither pager.pager, nor $PAGER is set, no pager is used.
+
+If you notice "BROKEN PIPE" error messages, you can disable them by
+setting::
+
+ [pager]
+ quiet = True
+
+You can disable the pager for certain commands by adding them to the
+pager.ignore list::
+
+ [pager]
+ ignore = version, help, update
+
+You can also enable the pager only for certain commands using
+pager.attend. Below is the default list of commands to be paged::
+
+ [pager]
+ attend = annotate, cat, diff, export, glog, log, qdiff
+
+Setting pager.attend to an empty value will cause all commands to be
+paged.
+
+If pager.attend is present, pager.ignore will be ignored.
+
+To ignore global commands like :hg:`version` or :hg:`help`, you have
+to specify them in your user configuration file.
+
+The --pager=... option can also be used to control when the pager is
+used. Use a boolean value like yes, no, on, off, or use auto for
+normal behavior.
+'''
+
+import sys, os, signal, shlex, errno
+from mercurial import commands, dispatch, util, extensions
+from mercurial.i18n import _
+
+def _runpager(p):
+ if not hasattr(os, 'fork'):
+ sys.stderr = sys.stdout = util.popen(p, 'wb')
+ return
+ fdin, fdout = os.pipe()
+ pid = os.fork()
+ if pid == 0:
+ os.close(fdin)
+ os.dup2(fdout, sys.stdout.fileno())
+ os.dup2(fdout, sys.stderr.fileno())
+ os.close(fdout)
+ return
+ os.dup2(fdin, sys.stdin.fileno())
+ os.close(fdin)
+ os.close(fdout)
+ try:
+ os.execvp('/bin/sh', ['/bin/sh', '-c', p])
+ except OSError, e:
+ if e.errno == errno.ENOENT:
+ # no /bin/sh, try executing the pager directly
+ args = shlex.split(p)
+ os.execvp(args[0], args)
+ else:
+ raise
+
+def uisetup(ui):
+ if ui.plain():
+ return
+
+ def pagecmd(orig, ui, options, cmd, cmdfunc):
+ p = ui.config("pager", "pager", os.environ.get("PAGER"))
+ if p and sys.stdout.isatty() and '--debugger' not in sys.argv:
+ attend = ui.configlist('pager', 'attend', attended)
+ auto = options['pager'] == 'auto'
+ always = util.parsebool(options['pager'])
+ if (always or auto and
+ (cmd in attend or
+ (cmd not in ui.configlist('pager', 'ignore') and not attend))):
+ ui.setconfig('ui', 'formatted', ui.formatted())
+ ui.setconfig('ui', 'interactive', False)
+ _runpager(p)
+ if ui.configbool('pager', 'quiet'):
+ signal.signal(signal.SIGPIPE, signal.SIG_DFL)
+ return orig(ui, options, cmd, cmdfunc)
+
+ extensions.wrapfunction(dispatch, '_runcommand', pagecmd)
+
+def extsetup(ui):
+ commands.globalopts.append(
+ ('', 'pager', 'auto',
+ _("when to paginate (boolean, always, auto, or never)"),
+ _('TYPE')))
+
+attended = ['annotate', 'cat', 'diff', 'export', 'glog', 'log', 'qdiff']
diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/pager.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/pager.pyo
new file mode 100644
index 0000000..0b020cf
--- /dev/null
+++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/pager.pyo
Binary files differ
diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/parentrevspec.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/parentrevspec.py
new file mode 100644
index 0000000..d66be24
--- /dev/null
+++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/parentrevspec.py
@@ -0,0 +1,96 @@
+# Mercurial extension to make it easy to refer to the parent of a revision
+#
+# Copyright (C) 2007 Alexis S. L. Carvalho <alexis@cecm.usp.br>
+#
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2 or any later version.
+
+'''interpret suffixes to refer to ancestor revisions
+
+This extension allows you to use git-style suffixes to refer to the
+ancestors of a specific revision.
+
+For example, if you can refer to a revision as "foo", then::
+
+ foo^N = Nth parent of foo
+ foo^0 = foo
+ foo^1 = first parent of foo
+ foo^2 = second parent of foo
+ foo^ = foo^1
+
+ foo~N = Nth first grandparent of foo
+ foo~0 = foo
+ foo~1 = foo^1 = foo^ = first parent of foo
+ foo~2 = foo^1^1 = foo^^ = first parent of first parent of foo
+'''
+from mercurial import error
+
+def reposetup(ui, repo):
+ if not repo.local():
+ return
+
+ class parentrevspecrepo(repo.__class__):
+ def lookup(self, key):
+ try:
+ _super = super(parentrevspecrepo, self)
+ return _super.lookup(key)
+ except error.RepoError:
+ pass
+
+ circ = key.find('^')
+ tilde = key.find('~')
+ if circ < 0 and tilde < 0:
+ raise
+ elif circ >= 0 and tilde >= 0:
+ end = min(circ, tilde)
+ else:
+ end = max(circ, tilde)
+
+ cl = self.changelog
+ base = key[:end]
+ try:
+ node = _super.lookup(base)
+ except error.RepoError:
+ # eek - reraise the first error
+ return _super.lookup(key)
+
+ rev = cl.rev(node)
+ suffix = key[end:]
+ i = 0
+ while i < len(suffix):
+ # foo^N => Nth parent of foo
+ # foo^0 == foo
+ # foo^1 == foo^ == 1st parent of foo
+ # foo^2 == 2nd parent of foo
+ if suffix[i] == '^':
+ j = i + 1
+ p = cl.parentrevs(rev)
+ if j < len(suffix) and suffix[j].isdigit():
+ j += 1
+ n = int(suffix[i + 1:j])
+ if n > 2 or n == 2 and p[1] == -1:
+ raise
+ else:
+ n = 1
+ if n:
+ rev = p[n - 1]
+ i = j
+ # foo~N => Nth first grandparent of foo
+ # foo~0 = foo
+ # foo~1 = foo^1 == foo^ == 1st parent of foo
+ # foo~2 = foo^1^1 == foo^^ == 1st parent of 1st parent of foo
+ elif suffix[i] == '~':
+ j = i + 1
+ while j < len(suffix) and suffix[j].isdigit():
+ j += 1
+ if j == i + 1:
+ raise
+ n = int(suffix[i + 1:j])
+ for k in xrange(n):
+ rev = cl.parentrevs(rev)[0]
+ i = j
+ else:
+ raise
+ return cl.node(rev)
+
+ repo.__class__ = parentrevspecrepo
diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/parentrevspec.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/parentrevspec.pyo
new file mode 100644
index 0000000..3a5be89
--- /dev/null
+++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/parentrevspec.pyo
Binary files differ
diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/patchbomb.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/patchbomb.py
new file mode 100644
index 0000000..93ea4cb
--- /dev/null
+++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/patchbomb.py
@@ -0,0 +1,553 @@
+# patchbomb.py - sending Mercurial changesets as patch emails
+#
+# Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others
+#
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2 or any later version.
+
+'''command to send changesets as (a series of) patch emails
+
+The series is started off with a "[PATCH 0 of N]" introduction, which
+describes the series as a whole.
+
+Each patch email has a Subject line of "[PATCH M of N] ...", using the
+first line of the changeset description as the subject text. The
+message contains two or three body parts:
+
+- The changeset description.
+- [Optional] The result of running diffstat on the patch.
+- The patch itself, as generated by :hg:`export`.
+
+Each message refers to the first in the series using the In-Reply-To
+and References headers, so they will show up as a sequence in threaded
+mail and news readers, and in mail archives.
+
+To configure other defaults, add a section like this to your hgrc
+file::
+
+ [email]
+ from = My Name <my@email>
+ to = recipient1, recipient2, ...
+ cc = cc1, cc2, ...
+ bcc = bcc1, bcc2, ...
+ reply-to = address1, address2, ...
+
+Use ``[patchbomb]`` as configuration section name if you need to
+override global ``[email]`` address settings.
+
+Then you can use the :hg:`email` command to mail a series of
+changesets as a patchbomb.
+
+You can also either configure the method option in the email section
+to be a sendmail compatible mailer or fill out the [smtp] section so
+that the patchbomb extension can automatically send patchbombs
+directly from the commandline. See the [email] and [smtp] sections in
+hgrc(5) for details.
+'''
+
+import os, errno, socket, tempfile, cStringIO, time
+import email.MIMEMultipart, email.MIMEBase
+import email.Utils, email.Encoders, email.Generator
+from mercurial import cmdutil, commands, hg, mail, patch, util, discovery, url
+from mercurial.i18n import _
+from mercurial.node import bin
+
+def prompt(ui, prompt, default=None, rest=':'):
+ if not ui.interactive() and default is None:
+ raise util.Abort(_("%s Please enter a valid value" % (prompt + rest)))
+ if default:
+ prompt += ' [%s]' % default
+ prompt += rest
+ while True:
+ r = ui.prompt(prompt, default=default)
+ if r:
+ return r
+ if default is not None:
+ return default
+ ui.warn(_('Please enter a valid value.\n'))
+
+def introneeded(opts, number):
+ '''is an introductory message required?'''
+ return number > 1 or opts.get('intro') or opts.get('desc')
+
+def makepatch(ui, repo, patchlines, opts, _charsets, idx, total,
+ patchname=None):
+
+ desc = []
+ node = None
+ body = ''
+
+ for line in patchlines:
+ if line.startswith('#'):
+ if line.startswith('# Node ID'):
+ node = line.split()[-1]
+ continue
+ if line.startswith('diff -r') or line.startswith('diff --git'):
+ break
+ desc.append(line)
+
+ if not patchname and not node:
+ raise ValueError
+
+ if opts.get('attach'):
+ body = ('\n'.join(desc[1:]).strip() or
+ 'Patch subject is complete summary.')
+ body += '\n\n\n'
+
+ if opts.get('plain'):
+ while patchlines and patchlines[0].startswith('# '):
+ patchlines.pop(0)
+ if patchlines:
+ patchlines.pop(0)
+ while patchlines and not patchlines[0].strip():
+ patchlines.pop(0)
+
+ ds = patch.diffstat(patchlines)
+ if opts.get('diffstat'):
+ body += ds + '\n\n'
+
+ if opts.get('attach') or opts.get('inline'):
+ msg = email.MIMEMultipart.MIMEMultipart()
+ if body:
+ msg.attach(mail.mimeencode(ui, body, _charsets, opts.get('test')))
+ p = mail.mimetextpatch('\n'.join(patchlines), 'x-patch', opts.get('test'))
+ binnode = bin(node)
+ # if node is mq patch, it will have the patch file's name as a tag
+ if not patchname:
+ patchtags = [t for t in repo.nodetags(binnode)
+ if t.endswith('.patch') or t.endswith('.diff')]
+ if patchtags:
+ patchname = patchtags[0]
+ elif total > 1:
+ patchname = cmdutil.make_filename(repo, '%b-%n.patch',
+ binnode, seqno=idx, total=total)
+ else:
+ patchname = cmdutil.make_filename(repo, '%b.patch', binnode)
+ disposition = 'inline'
+ if opts.get('attach'):
+ disposition = 'attachment'
+ p['Content-Disposition'] = disposition + '; filename=' + patchname
+ msg.attach(p)
+ else:
+ body += '\n'.join(patchlines)
+ msg = mail.mimetextpatch(body, display=opts.get('test'))
+
+ flag = ' '.join(opts.get('flag'))
+ if flag:
+ flag = ' ' + flag
+
+ subj = desc[0].strip().rstrip('. ')
+ if not introneeded(opts, total):
+ subj = '[PATCH%s] %s' % (flag, opts.get('subject') or subj)
+ else:
+ tlen = len(str(total))
+ subj = '[PATCH %0*d of %d%s] %s' % (tlen, idx, total, flag, subj)
+ msg['Subject'] = mail.headencode(ui, subj, _charsets, opts.get('test'))
+ msg['X-Mercurial-Node'] = node
+ return msg, subj, ds
+
+def patchbomb(ui, repo, *revs, **opts):
+ '''send changesets by email
+
+ By default, diffs are sent in the format generated by
+ :hg:`export`, one per message. The series starts with a "[PATCH 0
+ of N]" introduction, which describes the series as a whole.
+
+ Each patch email has a Subject line of "[PATCH M of N] ...", using
+ the first line of the changeset description as the subject text.
+ The message contains two or three parts. First, the changeset
+ description.
+
+ With the -d/--diffstat option, if the diffstat program is
+ installed, the result of running diffstat on the patch is inserted.
+
+ Finally, the patch itself, as generated by :hg:`export`.
+
+ With the -d/--diffstat or -c/--confirm options, you will be presented
+ with a final summary of all messages and asked for confirmation before
+ the messages are sent.
+
+ By default the patch is included as text in the email body for
+ easy reviewing. Using the -a/--attach option will instead create
+ an attachment for the patch. With -i/--inline an inline attachment
+ will be created.
+
+ With -o/--outgoing, emails will be generated for patches not found
+ in the destination repository (or only those which are ancestors
+ of the specified revisions if any are provided)
+
+ With -b/--bundle, changesets are selected as for --outgoing, but a
+ single email containing a binary Mercurial bundle as an attachment
+ will be sent.
+
+ With -m/--mbox, instead of previewing each patchbomb message in a
+ pager or sending the messages directly, it will create a UNIX
+ mailbox file with the patch emails. This mailbox file can be
+ previewed with any mail user agent which supports UNIX mbox
+ files.
+
+ With -n/--test, all steps will run, but mail will not be sent.
+ You will be prompted for an email recipient address, a subject and
+ an introductory message describing the patches of your patchbomb.
+ Then when all is done, patchbomb messages are displayed. If the
+ PAGER environment variable is set, your pager will be fired up once
+ for each patchbomb message, so you can verify everything is alright.
+
+ Examples::
+
+ hg email -r 3000 # send patch 3000 only
+ hg email -r 3000 -r 3001 # send patches 3000 and 3001
+ hg email -r 3000:3005 # send patches 3000 through 3005
+ hg email 3000 # send patch 3000 (deprecated)
+
+ hg email -o # send all patches not in default
+ hg email -o DEST # send all patches not in DEST
+ hg email -o -r 3000 # send all ancestors of 3000 not in default
+ hg email -o -r 3000 DEST # send all ancestors of 3000 not in DEST
+
+ hg email -b # send bundle of all patches not in default
+ hg email -b DEST # send bundle of all patches not in DEST
+ hg email -b -r 3000 # bundle of all ancestors of 3000 not in default
+ hg email -b -r 3000 DEST # bundle of all ancestors of 3000 not in DEST
+
+ hg email -o -m mbox && # generate an mbox file...
+ mutt -R -f mbox # ... and view it with mutt
+ hg email -o -m mbox && # generate an mbox file ...
+ formail -s sendmail \\ # ... and use formail to send from the mbox
+ -bm -t < mbox # ... using sendmail
+
+ Before using this command, you will need to enable email in your
+ hgrc. See the [email] section in hgrc(5) for details.
+ '''
+
+ _charsets = mail._charsets(ui)
+
+ bundle = opts.get('bundle')
+ date = opts.get('date')
+ mbox = opts.get('mbox')
+ outgoing = opts.get('outgoing')
+ rev = opts.get('rev')
+ # internal option used by pbranches
+ patches = opts.get('patches')
+
+ def getoutgoing(dest, revs):
+ '''Return the revisions present locally but not in dest'''
+ dest = ui.expandpath(dest or 'default-push', dest or 'default')
+ dest, branches = hg.parseurl(dest)
+ revs, checkout = hg.addbranchrevs(repo, repo, branches, revs)
+ if revs:
+ revs = [repo.lookup(rev) for rev in revs]
+ other = hg.repository(hg.remoteui(repo, opts), dest)
+ ui.status(_('comparing with %s\n') % url.hidepassword(dest))
+ o = discovery.findoutgoing(repo, other)
+ if not o:
+ ui.status(_("no changes found\n"))
+ return []
+ o = repo.changelog.nodesbetween(o, revs)[0]
+ return [str(repo.changelog.rev(r)) for r in o]
+
+ def getpatches(revs):
+ for r in cmdutil.revrange(repo, revs):
+ output = cStringIO.StringIO()
+ cmdutil.export(repo, [r], fp=output,
+ opts=patch.diffopts(ui, opts))
+ yield output.getvalue().split('\n')
+
+ def getbundle(dest):
+ tmpdir = tempfile.mkdtemp(prefix='hg-email-bundle-')
+ tmpfn = os.path.join(tmpdir, 'bundle')
+ try:
+ commands.bundle(ui, repo, tmpfn, dest, **opts)
+ return open(tmpfn, 'rb').read()
+ finally:
+ try:
+ os.unlink(tmpfn)
+ except:
+ pass
+ os.rmdir(tmpdir)
+
+ if not (opts.get('test') or mbox):
+ # really sending
+ mail.validateconfig(ui)
+
+ if not (revs or rev or outgoing or bundle or patches):
+ raise util.Abort(_('specify at least one changeset with -r or -o'))
+
+ if outgoing and bundle:
+ raise util.Abort(_("--outgoing mode always on with --bundle;"
+ " do not re-specify --outgoing"))
+
+ if outgoing or bundle:
+ if len(revs) > 1:
+ raise util.Abort(_("too many destinations"))
+ dest = revs and revs[0] or None
+ revs = []
+
+ if rev:
+ if revs:
+ raise util.Abort(_('use only one form to specify the revision'))
+ revs = rev
+
+ if outgoing:
+ revs = getoutgoing(dest, rev)
+ if bundle:
+ opts['revs'] = revs
+
+ # start
+ if date:
+ start_time = util.parsedate(date)
+ else:
+ start_time = util.makedate()
+
+ def genmsgid(id):
+ return '<%s.%s@%s>' % (id[:20], int(start_time[0]), socket.getfqdn())
+
+ def getdescription(body, sender):
+ if opts.get('desc'):
+ body = open(opts.get('desc')).read()
+ else:
+ ui.write(_('\nWrite the introductory message for the '
+ 'patch series.\n\n'))
+ body = ui.edit(body, sender)
+ return body
+
+ def getpatchmsgs(patches, patchnames=None):
+ jumbo = []
+ msgs = []
+
+ ui.write(_('This patch series consists of %d patches.\n\n')
+ % len(patches))
+
+ name = None
+ for i, p in enumerate(patches):
+ jumbo.extend(p)
+ if patchnames:
+ name = patchnames[i]
+ msg = makepatch(ui, repo, p, opts, _charsets, i + 1,
+ len(patches), name)
+ msgs.append(msg)
+
+ if introneeded(opts, len(patches)):
+ tlen = len(str(len(patches)))
+
+ flag = ' '.join(opts.get('flag'))
+ if flag:
+ subj = '[PATCH %0*d of %d %s]' % (tlen, 0, len(patches), flag)
+ else:
+ subj = '[PATCH %0*d of %d]' % (tlen, 0, len(patches))
+ subj += ' ' + (opts.get('subject') or
+ prompt(ui, 'Subject: ', rest=subj))
+
+ body = ''
+ ds = patch.diffstat(jumbo)
+ if ds and opts.get('diffstat'):
+ body = '\n' + ds
+
+ body = getdescription(body, sender)
+ msg = mail.mimeencode(ui, body, _charsets, opts.get('test'))
+ msg['Subject'] = mail.headencode(ui, subj, _charsets,
+ opts.get('test'))
+
+ msgs.insert(0, (msg, subj, ds))
+ return msgs
+
+ def getbundlemsgs(bundle):
+ subj = (opts.get('subject')
+ or prompt(ui, 'Subject:', 'A bundle for your repository'))
+
+ body = getdescription('', sender)
+ msg = email.MIMEMultipart.MIMEMultipart()
+ if body:
+ msg.attach(mail.mimeencode(ui, body, _charsets, opts.get('test')))
+ datapart = email.MIMEBase.MIMEBase('application', 'x-mercurial-bundle')
+ datapart.set_payload(bundle)
+ bundlename = '%s.hg' % opts.get('bundlename', 'bundle')
+ datapart.add_header('Content-Disposition', 'attachment',
+ filename=bundlename)
+ email.Encoders.encode_base64(datapart)
+ msg.attach(datapart)
+ msg['Subject'] = mail.headencode(ui, subj, _charsets, opts.get('test'))
+ return [(msg, subj, None)]
+
+ sender = (opts.get('from') or ui.config('email', 'from') or
+ ui.config('patchbomb', 'from') or
+ prompt(ui, 'From', ui.username()))
+
+ if patches:
+ msgs = getpatchmsgs(patches, opts.get('patchnames'))
+ elif bundle:
+ msgs = getbundlemsgs(getbundle(dest))
+ else:
+ msgs = getpatchmsgs(list(getpatches(revs)))
+
+ showaddrs = []
+
+ def getaddrs(opt, prpt=None, default=None):
+ addrs = opts.get(opt.replace('-', '_'))
+ if opt != 'reply-to':
+ showaddr = '%s:' % opt.capitalize()
+ else:
+ showaddr = 'Reply-To:'
+
+ if addrs:
+ showaddrs.append('%s %s' % (showaddr, ', '.join(addrs)))
+ return mail.addrlistencode(ui, addrs, _charsets, opts.get('test'))
+
+ addrs = ui.config('email', opt) or ui.config('patchbomb', opt) or ''
+ if not addrs and prpt:
+ addrs = prompt(ui, prpt, default)
+
+ if addrs:
+ showaddrs.append('%s %s' % (showaddr, addrs))
+ return mail.addrlistencode(ui, [addrs], _charsets, opts.get('test'))
+
+ to = getaddrs('to', 'To')
+ cc = getaddrs('cc', 'Cc', '')
+ bcc = getaddrs('bcc')
+ replyto = getaddrs('reply-to')
+
+ if opts.get('diffstat') or opts.get('confirm'):
+ ui.write(_('\nFinal summary:\n\n'))
+ ui.write('From: %s\n' % sender)
+ for addr in showaddrs:
+ ui.write('%s\n' % addr)
+ for m, subj, ds in msgs:
+ ui.write('Subject: %s\n' % subj)
+ if ds:
+ ui.write(ds)
+ ui.write('\n')
+ if ui.promptchoice(_('are you sure you want to send (yn)?'),
+ (_('&Yes'), _('&No'))):
+ raise util.Abort(_('patchbomb canceled'))
+
+ ui.write('\n')
+
+ parent = opts.get('in_reply_to') or None
+ # angle brackets may be omitted, they're not semantically part of the msg-id
+ if parent is not None:
+ if not parent.startswith('<'):
+ parent = '<' + parent
+ if not parent.endswith('>'):
+ parent += '>'
+
+ first = True
+
+ sender_addr = email.Utils.parseaddr(sender)[1]
+ sender = mail.addressencode(ui, sender, _charsets, opts.get('test'))
+ sendmail = None
+ for i, (m, subj, ds) in enumerate(msgs):
+ try:
+ m['Message-Id'] = genmsgid(m['X-Mercurial-Node'])
+ except TypeError:
+ m['Message-Id'] = genmsgid('patchbomb')
+ if parent:
+ m['In-Reply-To'] = parent
+ m['References'] = parent
+ if first:
+ parent = m['Message-Id']
+ first = False
+
+ m['User-Agent'] = 'Mercurial-patchbomb/%s' % util.version()
+ m['Date'] = email.Utils.formatdate(start_time[0], localtime=True)
+
+ start_time = (start_time[0] + 1, start_time[1])
+ m['From'] = sender
+ m['To'] = ', '.join(to)
+ if cc:
+ m['Cc'] = ', '.join(cc)
+ if bcc:
+ m['Bcc'] = ', '.join(bcc)
+ if replyto:
+ m['Reply-To'] = ', '.join(replyto)
+ if opts.get('test'):
+ ui.status(_('Displaying '), subj, ' ...\n')
+ ui.flush()
+ if 'PAGER' in os.environ and not ui.plain():
+ fp = util.popen(os.environ['PAGER'], 'w')
+ else:
+ fp = ui
+ generator = email.Generator.Generator(fp, mangle_from_=False)
+ try:
+ generator.flatten(m, 0)
+ fp.write('\n')
+ except IOError, inst:
+ if inst.errno != errno.EPIPE:
+ raise
+ if fp is not ui:
+ fp.close()
+ elif mbox:
+ ui.status(_('Writing '), subj, ' ...\n')
+ ui.progress(_('writing'), i, item=subj, total=len(msgs))
+ fp = open(mbox, 'In-Reply-To' in m and 'ab+' or 'wb+')
+ generator = email.Generator.Generator(fp, mangle_from_=True)
+ # Should be time.asctime(), but Windows prints 2-characters day
+ # of month instead of one. Make them print the same thing.
+ date = time.strftime('%a %b %d %H:%M:%S %Y',
+ time.localtime(start_time[0]))
+ fp.write('From %s %s\n' % (sender_addr, date))
+ generator.flatten(m, 0)
+ fp.write('\n\n')
+ fp.close()
+ else:
+ if not sendmail:
+ sendmail = mail.connect(ui)
+ ui.status(_('Sending '), subj, ' ...\n')
+ ui.progress(_('sending'), i, item=subj, total=len(msgs))
+ # Exim does not remove the Bcc field
+ del m['Bcc']
+ fp = cStringIO.StringIO()
+ generator = email.Generator.Generator(fp, mangle_from_=False)
+ generator.flatten(m, 0)
+ sendmail(sender, to + bcc + cc, fp.getvalue())
+
+ ui.progress(_('writing'), None)
+ ui.progress(_('sending'), None)
+
+emailopts = [
+ ('a', 'attach', None, _('send patches as attachments')),
+ ('i', 'inline', None, _('send patches as inline attachments')),
+ ('', 'bcc', [], _('email addresses of blind carbon copy recipients')),
+ ('c', 'cc', [], _('email addresses of copy recipients')),
+ ('', 'confirm', None, _('ask for confirmation before sending')),
+ ('d', 'diffstat', None, _('add diffstat output to messages')),
+ ('', 'date', '', _('use the given date as the sending date')),
+ ('', 'desc', '', _('use the given file as the series description')),
+ ('f', 'from', '', _('email address of sender')),
+ ('n', 'test', None, _('print messages that would be sent')),
+ ('m', 'mbox', '',
+ _('write messages to mbox file instead of sending them')),
+ ('', 'reply-to', [], _('email addresses replies should be sent to')),
+ ('s', 'subject', '',
+ _('subject of first message (intro or single patch)')),
+ ('', 'in-reply-to', '',
+ _('message identifier to reply to')),
+ ('', 'flag', [], _('flags to add in subject prefixes')),
+ ('t', 'to', [], _('email addresses of recipients')),
+ ]
+
+
+cmdtable = {
+ "email":
+ (patchbomb,
+ [('g', 'git', None, _('use git extended diff format')),
+ ('', 'plain', None, _('omit hg patch header')),
+ ('o', 'outgoing', None,
+ _('send changes not found in the target repository')),
+ ('b', 'bundle', None,
+ _('send changes not in target as a binary bundle')),
+ ('', 'bundlename', 'bundle',
+ _('name of the bundle attachment file'), _('NAME')),
+ ('r', 'rev', [],
+ _('a revision to send'), _('REV')),
+ ('', 'force', None,
+ _('run even when remote repository is unrelated '
+ '(with -b/--bundle)')),
+ ('', 'base', [],
+ _('a base changeset to specify instead of a destination '
+ '(with -b/--bundle)'),
+ _('REV')),
+ ('', 'intro', None,
+ _('send an introduction email for a single patch')),
+ ] + emailopts + commands.remoteopts,
+ _('hg email [OPTION]... [DEST]...'))
+}
diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/patchbomb.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/patchbomb.pyo
new file mode 100644
index 0000000..57d9f7f
--- /dev/null
+++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/patchbomb.pyo
Binary files differ
diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/progress.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/progress.py
new file mode 100644
index 0000000..e25f4f8
--- /dev/null
+++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/progress.py
@@ -0,0 +1,206 @@
+# progress.py show progress bars for some actions
+#
+# Copyright (C) 2010 Augie Fackler <durin42@gmail.com>
+#
+# This program is free software; you can redistribute it and/or modify it
+# under the terms of the GNU General Public License as published by the
+# Free Software Foundation; either version 2 of the License, or (at your
+# option) any later version.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
+# Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+"""show progress bars for some actions
+
+This extension uses the progress information logged by hg commands
+to draw progress bars that are as informative as possible. Some progress
+bars only offer indeterminate information, while others have a definite
+end point.
+
+The following settings are available::
+
+ [progress]
+ delay = 3 # number of seconds (float) before showing the progress bar
+ refresh = 0.1 # time in seconds between refreshes of the progress bar
+ format = topic bar number # format of the progress bar
+ width = <none> # if set, the maximum width of the progress information
+ # (that is, min(width, term width) will be used)
+ clear-complete = True # clear the progress bar after it's done
+ disable = False # if true, don't show a progress bar
+ assume-tty = False # if true, ALWAYS show a progress bar, unless
+ # disable is given
+
+Valid entries for the format field are topic, bar, number, unit, and
+item. item defaults to the last 20 characters of the item, but this
+can be changed by adding either ``-<num>`` which would take the last
+num characters, or ``+<num>`` for the first num characters.
+"""
+
+import sys
+import time
+
+from mercurial import util
+
+def spacejoin(*args):
+ return ' '.join(s for s in args if s)
+
+def shouldprint(ui):
+ return (getattr(sys.stderr, 'isatty', None) and
+ (sys.stderr.isatty() or ui.configbool('progress', 'assume-tty')))
+
+class progbar(object):
+ def __init__(self, ui):
+ self.ui = ui
+ self.resetstate()
+
+ def resetstate(self):
+ self.topics = []
+ self.printed = False
+ self.lastprint = time.time() + float(self.ui.config(
+ 'progress', 'delay', default=3))
+ self.indetcount = 0
+ self.refresh = float(self.ui.config(
+ 'progress', 'refresh', default=0.1))
+ self.order = self.ui.configlist(
+ 'progress', 'format',
+ default=['topic', 'bar', 'number'])
+
+ def show(self, topic, pos, item, unit, total):
+ if not shouldprint(self.ui):
+ return
+ termwidth = self.width()
+ self.printed = True
+ head = ''
+ needprogress = False
+ tail = ''
+ for indicator in self.order:
+ add = ''
+ if indicator == 'topic':
+ add = topic
+ elif indicator == 'number':
+ if total:
+ add = ('% ' + str(len(str(total))) +
+ 's/%s') % (pos, total)
+ else:
+ add = str(pos)
+ elif indicator.startswith('item') and item:
+ slice = 'end'
+ if '-' in indicator:
+ wid = int(indicator.split('-')[1])
+ elif '+' in indicator:
+ slice = 'beginning'
+ wid = int(indicator.split('+')[1])
+ else:
+ wid = 20
+ if slice == 'end':
+ add = item[-wid:]
+ else:
+ add = item[:wid]
+ add += (wid - len(add)) * ' '
+ elif indicator == 'bar':
+ add = ''
+ needprogress = True
+ elif indicator == 'unit' and unit:
+ add = unit
+ if not needprogress:
+ head = spacejoin(head, add)
+ else:
+ tail = spacejoin(add, tail)
+ if needprogress:
+ used = 0
+ if head:
+ used += len(head) + 1
+ if tail:
+ used += len(tail) + 1
+ progwidth = termwidth - used - 3
+ if total and pos <= total:
+ amt = pos * progwidth // total
+ bar = '=' * (amt - 1)
+ if amt > 0:
+ bar += '>'
+ bar += ' ' * (progwidth - amt)
+ else:
+ progwidth -= 3
+ self.indetcount += 1
+ # mod the count by twice the width so we can make the
+ # cursor bounce between the right and left sides
+ amt = self.indetcount % (2 * progwidth)
+ amt -= progwidth
+ bar = (' ' * int(progwidth - abs(amt)) + '<=>' +
+ ' ' * int(abs(amt)))
+ prog = ''.join(('[', bar , ']'))
+ out = spacejoin(head, prog, tail)
+ else:
+ out = spacejoin(head, tail)
+ sys.stderr.write('\r' + out[:termwidth])
+ sys.stderr.flush()
+
+ def clear(self):
+ if not shouldprint(self.ui):
+ return
+ sys.stderr.write('\r%s\r' % (' ' * self.width()))
+
+ def complete(self):
+ if not shouldprint(self.ui):
+ return
+ if self.ui.configbool('progress', 'clear-complete', default=True):
+ self.clear()
+ else:
+ sys.stderr.write('\n')
+ sys.stderr.flush()
+
+ def width(self):
+ tw = self.ui.termwidth()
+ return min(int(self.ui.config('progress', 'width', default=tw)), tw)
+
+ def progress(self, topic, pos, item='', unit='', total=None):
+ if pos is None:
+ if self.topics and self.topics[-1] == topic and self.printed:
+ self.complete()
+ self.resetstate()
+ else:
+ if topic not in self.topics:
+ self.topics.append(topic)
+ now = time.time()
+ if (now - self.lastprint >= self.refresh
+ and topic == self.topics[-1]):
+ self.lastprint = now
+ self.show(topic, pos, item, unit, total)
+
+def uisetup(ui):
+ class progressui(ui.__class__):
+ _progbar = None
+
+ def progress(self, *args, **opts):
+ self._progbar.progress(*args, **opts)
+ return super(progressui, self).progress(*args, **opts)
+
+ def write(self, *args, **opts):
+ if self._progbar.printed:
+ self._progbar.clear()
+ return super(progressui, self).write(*args, **opts)
+
+ def write_err(self, *args, **opts):
+ if self._progbar.printed:
+ self._progbar.clear()
+ return super(progressui, self).write_err(*args, **opts)
+
+ # Apps that derive a class from ui.ui() can use
+ # setconfig('progress', 'disable', 'True') to disable this extension
+ if ui.configbool('progress', 'disable'):
+ return
+ if shouldprint(ui) and not ui.debugflag and not ui.quiet:
+ ui.__class__ = progressui
+ # we instantiate one globally shared progress bar to avoid
+ # competing progress bars when multiple UI objects get created
+ if not progressui._progbar:
+ progressui._progbar = progbar(ui)
+
+def reposetup(ui, repo):
+ uisetup(repo.ui)
diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/progress.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/progress.pyo
new file mode 100644
index 0000000..e0a83d4
--- /dev/null
+++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/progress.pyo
Binary files differ
diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/purge.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/purge.py
new file mode 100644
index 0000000..4a99ec3
--- /dev/null
+++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/purge.py
@@ -0,0 +1,111 @@
+# Copyright (C) 2006 - Marco Barisione <marco@barisione.org>
+#
+# This is a small extension for Mercurial (http://mercurial.selenic.com/)
+# that removes files not known to mercurial
+#
+# This program was inspired by the "cvspurge" script contained in CVS
+# utilities (http://www.red-bean.com/cvsutils/).
+#
+# For help on the usage of "hg purge" use:
+# hg help purge
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation; either version 2 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
+
+'''command to delete untracked files from the working directory'''
+
+from mercurial import util, commands, cmdutil
+from mercurial.i18n import _
+import os, stat
+
+def purge(ui, repo, *dirs, **opts):
+ '''removes files not tracked by Mercurial
+
+ Delete files not known to Mercurial. This is useful to test local
+ and uncommitted changes in an otherwise-clean source tree.
+
+ This means that purge will delete:
+
+ - Unknown files: files marked with "?" by :hg:`status`
+ - Empty directories: in fact Mercurial ignores directories unless
+ they contain files under source control management
+
+ But it will leave untouched:
+
+ - Modified and unmodified tracked files
+ - Ignored files (unless --all is specified)
+ - New files added to the repository (with :hg:`add`)
+
+ If directories are given on the command line, only files in these
+ directories are considered.
+
+ Be careful with purge, as you could irreversibly delete some files
+ you forgot to add to the repository. If you only want to print the
+ list of files that this program would delete, use the --print
+ option.
+ '''
+ act = not opts['print']
+ eol = '\n'
+ if opts['print0']:
+ eol = '\0'
+ act = False # --print0 implies --print
+
+ def remove(remove_func, name):
+ if act:
+ try:
+ remove_func(repo.wjoin(name))
+ except OSError:
+ m = _('%s cannot be removed') % name
+ if opts['abort_on_err']:
+ raise util.Abort(m)
+ ui.warn(_('warning: %s\n') % m)
+ else:
+ ui.write('%s%s' % (name, eol))
+
+ def removefile(path):
+ try:
+ os.remove(path)
+ except OSError:
+ # read-only files cannot be unlinked under Windows
+ s = os.stat(path)
+ if (s.st_mode & stat.S_IWRITE) != 0:
+ raise
+ os.chmod(path, stat.S_IMODE(s.st_mode) | stat.S_IWRITE)
+ os.remove(path)
+
+ directories = []
+ match = cmdutil.match(repo, dirs, opts)
+ match.dir = directories.append
+ status = repo.status(match=match, ignored=opts['all'], unknown=True)
+
+ for f in sorted(status[4] + status[5]):
+ ui.note(_('Removing file %s\n') % f)
+ remove(removefile, f)
+
+ for f in sorted(directories, reverse=True):
+ if match(f) and not os.listdir(repo.wjoin(f)):
+ ui.note(_('Removing directory %s\n') % f)
+ remove(os.rmdir, f)
+
+cmdtable = {
+ 'purge|clean':
+ (purge,
+ [('a', 'abort-on-err', None, _('abort if an error occurs')),
+ ('', 'all', None, _('purge ignored files too')),
+ ('p', 'print', None, _('print filenames instead of deleting them')),
+ ('0', 'print0', None, _('end filenames with NUL, for use with xargs'
+ ' (implies -p/--print)')),
+ ] + commands.walkopts,
+ _('hg purge [OPTION]... [DIR]...'))
+}
diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/purge.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/purge.pyo
new file mode 100644
index 0000000..69287ba
--- /dev/null
+++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/purge.pyo
Binary files differ
diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/rebase.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/rebase.py
new file mode 100644
index 0000000..7a43541
--- /dev/null
+++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/rebase.py
@@ -0,0 +1,577 @@
+# rebase.py - rebasing feature for mercurial
+#
+# Copyright 2008 Stefano Tortarolo <stefano.tortarolo at gmail dot com>
+#
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2 or any later version.
+
+'''command to move sets of revisions to a different ancestor
+
+This extension lets you rebase changesets in an existing Mercurial
+repository.
+
+For more information:
+http://mercurial.selenic.com/wiki/RebaseExtension
+'''
+
+from mercurial import hg, util, repair, merge, cmdutil, commands
+from mercurial import extensions, ancestor, copies, patch
+from mercurial.commands import templateopts
+from mercurial.node import nullrev
+from mercurial.lock import release
+from mercurial.i18n import _
+import os, errno
+
+nullmerge = -2
+
+def rebase(ui, repo, **opts):
+ """move changeset (and descendants) to a different branch
+
+ Rebase uses repeated merging to graft changesets from one part of
+ history (the source) onto another (the destination). This can be
+ useful for linearizing *local* changes relative to a master
+ development tree.
+
+ You should not rebase changesets that have already been shared
+ with others. Doing so will force everybody else to perform the
+ same rebase or they will end up with duplicated changesets after
+ pulling in your rebased changesets.
+
+ If you don't specify a destination changeset (``-d/--dest``),
+ rebase uses the tipmost head of the current named branch as the
+ destination. (The destination changeset is not modified by
+ rebasing, but new changesets are added as its descendants.)
+
+ You can specify which changesets to rebase in two ways: as a
+ "source" changeset or as a "base" changeset. Both are shorthand
+ for a topologically related set of changesets (the "source
+ branch"). If you specify source (``-s/--source``), rebase will
+ rebase that changeset and all of its descendants onto dest. If you
+ specify base (``-b/--base``), rebase will select ancestors of base
+ back to but not including the common ancestor with dest. Thus,
+ ``-b`` is less precise but more convenient than ``-s``: you can
+ specify any changeset in the source branch, and rebase will select
+ the whole branch. If you specify neither ``-s`` nor ``-b``, rebase
+ uses the parent of the working directory as the base.
+
+ By default, rebase recreates the changesets in the source branch
+ as descendants of dest and then destroys the originals. Use
+ ``--keep`` to preserve the original source changesets. Some
+ changesets in the source branch (e.g. merges from the destination
+ branch) may be dropped if they no longer contribute any change.
+
+ One result of the rules for selecting the destination changeset
+ and source branch is that, unlike ``merge``, rebase will do
+ nothing if you are at the latest (tipmost) head of a named branch
+ with two heads. You need to explicitly specify source and/or
+ destination (or ``update`` to the other head, if it's the head of
+ the intended source branch).
+
+ If a rebase is interrupted to manually resolve a merge, it can be
+ continued with --continue/-c or aborted with --abort/-a.
+
+ Returns 0 on success, 1 if nothing to rebase.
+ """
+ originalwd = target = None
+ external = nullrev
+ state = {}
+ skipped = set()
+ targetancestors = set()
+
+ lock = wlock = None
+ try:
+ lock = repo.lock()
+ wlock = repo.wlock()
+
+ # Validate input and define rebasing points
+ destf = opts.get('dest', None)
+ srcf = opts.get('source', None)
+ basef = opts.get('base', None)
+ contf = opts.get('continue')
+ abortf = opts.get('abort')
+ collapsef = opts.get('collapse', False)
+ extrafn = opts.get('extrafn')
+ keepf = opts.get('keep', False)
+ keepbranchesf = opts.get('keepbranches', False)
+ detachf = opts.get('detach', False)
+ # keepopen is not meant for use on the command line, but by
+ # other extensions
+ keepopen = opts.get('keepopen', False)
+
+ if contf or abortf:
+ if contf and abortf:
+ raise util.Abort(_('cannot use both abort and continue'))
+ if collapsef:
+ raise util.Abort(
+ _('cannot use collapse with continue or abort'))
+ if detachf:
+ raise util.Abort(_('cannot use detach with continue or abort'))
+ if srcf or basef or destf:
+ raise util.Abort(
+ _('abort and continue do not allow specifying revisions'))
+
+ (originalwd, target, state, skipped, collapsef, keepf,
+ keepbranchesf, external) = restorestatus(repo)
+ if abortf:
+ return abort(repo, originalwd, target, state)
+ else:
+ if srcf and basef:
+ raise util.Abort(_('cannot specify both a '
+ 'revision and a base'))
+ if detachf:
+ if not srcf:
+ raise util.Abort(
+ _('detach requires a revision to be specified'))
+ if basef:
+ raise util.Abort(_('cannot specify a base with detach'))
+
+ cmdutil.bail_if_changed(repo)
+ result = buildstate(repo, destf, srcf, basef, detachf)
+ if not result:
+ # Empty state built, nothing to rebase
+ ui.status(_('nothing to rebase\n'))
+ return 1
+ else:
+ originalwd, target, state = result
+ if collapsef:
+ targetancestors = set(repo.changelog.ancestors(target))
+ external = checkexternal(repo, state, targetancestors)
+
+ if keepbranchesf:
+ if extrafn:
+ raise util.Abort(_('cannot use both keepbranches and extrafn'))
+ def extrafn(ctx, extra):
+ extra['branch'] = ctx.branch()
+
+ # Rebase
+ if not targetancestors:
+ targetancestors = set(repo.changelog.ancestors(target))
+ targetancestors.add(target)
+
+ sortedstate = sorted(state)
+ total = len(sortedstate)
+ pos = 0
+ for rev in sortedstate:
+ pos += 1
+ if state[rev] == -1:
+ ui.progress(_("rebasing"), pos, ("%d:%s" % (rev, repo[rev])),
+ _('changesets'), total)
+ storestatus(repo, originalwd, target, state, collapsef, keepf,
+ keepbranchesf, external)
+ p1, p2 = defineparents(repo, rev, target, state,
+ targetancestors)
+ if len(repo.parents()) == 2:
+ repo.ui.debug('resuming interrupted rebase\n')
+ else:
+ stats = rebasenode(repo, rev, p1, p2, state)
+ if stats and stats[3] > 0:
+ raise util.Abort(_('unresolved conflicts (see hg '
+ 'resolve, then hg rebase --continue)'))
+ updatedirstate(repo, rev, target, p2)
+ if not collapsef:
+ newrev = concludenode(repo, rev, p1, p2, extrafn=extrafn)
+ else:
+ # Skip commit if we are collapsing
+ repo.dirstate.setparents(repo[p1].node())
+ newrev = None
+ # Update the state
+ if newrev is not None:
+ state[rev] = repo[newrev].rev()
+ else:
+ if not collapsef:
+ ui.note(_('no changes, revision %d skipped\n') % rev)
+ ui.debug('next revision set to %s\n' % p1)
+ skipped.add(rev)
+ state[rev] = p1
+
+ ui.progress(_('rebasing'), None)
+ ui.note(_('rebase merging completed\n'))
+
+ if collapsef and not keepopen:
+ p1, p2 = defineparents(repo, min(state), target,
+ state, targetancestors)
+ commitmsg = 'Collapsed revision'
+ for rebased in state:
+ if rebased not in skipped and state[rebased] != nullmerge:
+ commitmsg += '\n* %s' % repo[rebased].description()
+ commitmsg = ui.edit(commitmsg, repo.ui.username())
+ newrev = concludenode(repo, rev, p1, external, commitmsg=commitmsg,
+ extrafn=extrafn)
+
+ if 'qtip' in repo.tags():
+ updatemq(repo, state, skipped, **opts)
+
+ if not keepf:
+ # Remove no more useful revisions
+ rebased = [rev for rev in state if state[rev] != nullmerge]
+ if rebased:
+ if set(repo.changelog.descendants(min(rebased))) - set(state):
+ ui.warn(_("warning: new changesets detected "
+ "on source branch, not stripping\n"))
+ else:
+ # backup the old csets by default
+ repair.strip(ui, repo, repo[min(rebased)].node(), "all")
+
+ clearstatus(repo)
+ ui.note(_("rebase completed\n"))
+ if os.path.exists(repo.sjoin('undo')):
+ util.unlink(repo.sjoin('undo'))
+ if skipped:
+ ui.note(_("%d revisions have been skipped\n") % len(skipped))
+ finally:
+ release(lock, wlock)
+
+def rebasemerge(repo, rev, first=False):
+ 'return the correct ancestor'
+ oldancestor = ancestor.ancestor
+
+ def newancestor(a, b, pfunc):
+ if b == rev:
+ return repo[rev].parents()[0].rev()
+ return oldancestor(a, b, pfunc)
+
+ if not first:
+ ancestor.ancestor = newancestor
+ else:
+ repo.ui.debug("first revision, do not change ancestor\n")
+ try:
+ stats = merge.update(repo, rev, True, True, False)
+ return stats
+ finally:
+ ancestor.ancestor = oldancestor
+
+def checkexternal(repo, state, targetancestors):
+ """Check whether one or more external revisions need to be taken in
+ consideration. In the latter case, abort.
+ """
+ external = nullrev
+ source = min(state)
+ for rev in state:
+ if rev == source:
+ continue
+ # Check externals and fail if there are more than one
+ for p in repo[rev].parents():
+ if (p.rev() not in state
+ and p.rev() not in targetancestors):
+ if external != nullrev:
+ raise util.Abort(_('unable to collapse, there is more '
+ 'than one external parent'))
+ external = p.rev()
+ return external
+
+def updatedirstate(repo, rev, p1, p2):
+ """Keep track of renamed files in the revision that is going to be rebased
+ """
+ # Here we simulate the copies and renames in the source changeset
+ cop, diver = copies.copies(repo, repo[rev], repo[p1], repo[p2], True)
+ m1 = repo[rev].manifest()
+ m2 = repo[p1].manifest()
+ for k, v in cop.iteritems():
+ if k in m1:
+ if v in m1 or v in m2:
+ repo.dirstate.copy(v, k)
+ if v in m2 and v not in m1:
+ repo.dirstate.remove(v)
+
+def concludenode(repo, rev, p1, p2, commitmsg=None, extrafn=None):
+ 'Commit the changes and store useful information in extra'
+ try:
+ repo.dirstate.setparents(repo[p1].node(), repo[p2].node())
+ ctx = repo[rev]
+ if commitmsg is None:
+ commitmsg = ctx.description()
+ extra = {'rebase_source': ctx.hex()}
+ if extrafn:
+ extrafn(ctx, extra)
+ # Commit might fail if unresolved files exist
+ newrev = repo.commit(text=commitmsg, user=ctx.user(),
+ date=ctx.date(), extra=extra)
+ repo.dirstate.setbranch(repo[newrev].branch())
+ return newrev
+ except util.Abort:
+ # Invalidate the previous setparents
+ repo.dirstate.invalidate()
+ raise
+
+def rebasenode(repo, rev, p1, p2, state):
+ 'Rebase a single revision'
+ # Merge phase
+ # Update to target and merge it with local
+ if repo['.'].rev() != repo[p1].rev():
+ repo.ui.debug(" update to %d:%s\n" % (repo[p1].rev(), repo[p1]))
+ merge.update(repo, p1, False, True, False)
+ else:
+ repo.ui.debug(" already in target\n")
+ repo.dirstate.write()
+ repo.ui.debug(" merge against %d:%s\n" % (repo[rev].rev(), repo[rev]))
+ first = repo[rev].rev() == repo[min(state)].rev()
+ stats = rebasemerge(repo, rev, first)
+ return stats
+
+def defineparents(repo, rev, target, state, targetancestors):
+ 'Return the new parent relationship of the revision that will be rebased'
+ parents = repo[rev].parents()
+ p1 = p2 = nullrev
+
+ P1n = parents[0].rev()
+ if P1n in targetancestors:
+ p1 = target
+ elif P1n in state:
+ if state[P1n] == nullmerge:
+ p1 = target
+ else:
+ p1 = state[P1n]
+ else: # P1n external
+ p1 = target
+ p2 = P1n
+
+ if len(parents) == 2 and parents[1].rev() not in targetancestors:
+ P2n = parents[1].rev()
+ # interesting second parent
+ if P2n in state:
+ if p1 == target: # P1n in targetancestors or external
+ p1 = state[P2n]
+ else:
+ p2 = state[P2n]
+ else: # P2n external
+ if p2 != nullrev: # P1n external too => rev is a merged revision
+ raise util.Abort(_('cannot use revision %d as base, result '
+ 'would have 3 parents') % rev)
+ p2 = P2n
+ repo.ui.debug(" future parents are %d and %d\n" %
+ (repo[p1].rev(), repo[p2].rev()))
+ return p1, p2
+
+def isagitpatch(repo, patchname):
+ 'Return true if the given patch is in git format'
+ mqpatch = os.path.join(repo.mq.path, patchname)
+ for line in patch.linereader(file(mqpatch, 'rb')):
+ if line.startswith('diff --git'):
+ return True
+ return False
+
+def updatemq(repo, state, skipped, **opts):
+ 'Update rebased mq patches - finalize and then import them'
+ mqrebase = {}
+ mq = repo.mq
+ for p in mq.applied:
+ rev = repo[p.node].rev()
+ if rev in state:
+ repo.ui.debug('revision %d is an mq patch (%s), finalize it.\n' %
+ (rev, p.name))
+ mqrebase[rev] = (p.name, isagitpatch(repo, p.name))
+
+ if mqrebase:
+ mq.finish(repo, mqrebase.keys())
+
+ # We must start import from the newest revision
+ for rev in sorted(mqrebase, reverse=True):
+ if rev not in skipped:
+ name, isgit = mqrebase[rev]
+ repo.ui.debug('import mq patch %d (%s)\n' % (state[rev], name))
+ mq.qimport(repo, (), patchname=name, git=isgit,
+ rev=[str(state[rev])])
+ mq.save_dirty()
+
+def storestatus(repo, originalwd, target, state, collapse, keep, keepbranches,
+ external):
+ 'Store the current status to allow recovery'
+ f = repo.opener("rebasestate", "w")
+ f.write(repo[originalwd].hex() + '\n')
+ f.write(repo[target].hex() + '\n')
+ f.write(repo[external].hex() + '\n')
+ f.write('%d\n' % int(collapse))
+ f.write('%d\n' % int(keep))
+ f.write('%d\n' % int(keepbranches))
+ for d, v in state.iteritems():
+ oldrev = repo[d].hex()
+ newrev = repo[v].hex()
+ f.write("%s:%s\n" % (oldrev, newrev))
+ f.close()
+ repo.ui.debug('rebase status stored\n')
+
+def clearstatus(repo):
+ 'Remove the status files'
+ if os.path.exists(repo.join("rebasestate")):
+ util.unlink(repo.join("rebasestate"))
+
+def restorestatus(repo):
+ 'Restore a previously stored status'
+ try:
+ target = None
+ collapse = False
+ external = nullrev
+ state = {}
+ f = repo.opener("rebasestate")
+ for i, l in enumerate(f.read().splitlines()):
+ if i == 0:
+ originalwd = repo[l].rev()
+ elif i == 1:
+ target = repo[l].rev()
+ elif i == 2:
+ external = repo[l].rev()
+ elif i == 3:
+ collapse = bool(int(l))
+ elif i == 4:
+ keep = bool(int(l))
+ elif i == 5:
+ keepbranches = bool(int(l))
+ else:
+ oldrev, newrev = l.split(':')
+ state[repo[oldrev].rev()] = repo[newrev].rev()
+ skipped = set()
+ # recompute the set of skipped revs
+ if not collapse:
+ seen = set([target])
+ for old, new in sorted(state.items()):
+ if new != nullrev and new in seen:
+ skipped.add(old)
+ seen.add(new)
+ repo.ui.debug('computed skipped revs: %s\n' % skipped)
+ repo.ui.debug('rebase status resumed\n')
+ return (originalwd, target, state, skipped,
+ collapse, keep, keepbranches, external)
+ except IOError, err:
+ if err.errno != errno.ENOENT:
+ raise
+ raise util.Abort(_('no rebase in progress'))
+
+def abort(repo, originalwd, target, state):
+ 'Restore the repository to its original state'
+ if set(repo.changelog.descendants(target)) - set(state.values()):
+ repo.ui.warn(_("warning: new changesets detected on target branch, "
+ "can't abort\n"))
+ return -1
+ else:
+ # Strip from the first rebased revision
+ merge.update(repo, repo[originalwd].rev(), False, True, False)
+ rebased = filter(lambda x: x > -1 and x != target, state.values())
+ if rebased:
+ strippoint = min(rebased)
+ # no backup of rebased cset versions needed
+ repair.strip(repo.ui, repo, repo[strippoint].node())
+ clearstatus(repo)
+ repo.ui.warn(_('rebase aborted\n'))
+ return 0
+
+def buildstate(repo, dest, src, base, detach):
+ 'Define which revisions are going to be rebased and where'
+ targetancestors = set()
+ detachset = set()
+
+ if not dest:
+ # Destination defaults to the latest revision in the current branch
+ branch = repo[None].branch()
+ dest = repo[branch].rev()
+ else:
+ dest = repo[dest].rev()
+
+ # This check isn't strictly necessary, since mq detects commits over an
+ # applied patch. But it prevents messing up the working directory when
+ # a partially completed rebase is blocked by mq.
+ if 'qtip' in repo.tags() and (repo[dest].node() in
+ [s.node for s in repo.mq.applied]):
+ raise util.Abort(_('cannot rebase onto an applied mq patch'))
+
+ if src:
+ commonbase = repo[src].ancestor(repo[dest])
+ if commonbase == repo[src]:
+ raise util.Abort(_('source is ancestor of destination'))
+ if commonbase == repo[dest]:
+ raise util.Abort(_('source is descendant of destination'))
+ source = repo[src].rev()
+ if detach:
+ # We need to keep track of source's ancestors up to the common base
+ srcancestors = set(repo.changelog.ancestors(source))
+ baseancestors = set(repo.changelog.ancestors(commonbase.rev()))
+ detachset = srcancestors - baseancestors
+ detachset.discard(commonbase.rev())
+ else:
+ if base:
+ cwd = repo[base].rev()
+ else:
+ cwd = repo['.'].rev()
+
+ if cwd == dest:
+ repo.ui.debug('source and destination are the same\n')
+ return None
+
+ targetancestors = set(repo.changelog.ancestors(dest))
+ if cwd in targetancestors:
+ repo.ui.debug('source is ancestor of destination\n')
+ return None
+
+ cwdancestors = set(repo.changelog.ancestors(cwd))
+ if dest in cwdancestors:
+ repo.ui.debug('source is descendant of destination\n')
+ return None
+
+ cwdancestors.add(cwd)
+ rebasingbranch = cwdancestors - targetancestors
+ source = min(rebasingbranch)
+
+ repo.ui.debug('rebase onto %d starting from %d\n' % (dest, source))
+ state = dict.fromkeys(repo.changelog.descendants(source), nullrev)
+ state.update(dict.fromkeys(detachset, nullmerge))
+ state[source] = nullrev
+ return repo['.'].rev(), repo[dest].rev(), state
+
+def pullrebase(orig, ui, repo, *args, **opts):
+ 'Call rebase after pull if the latter has been invoked with --rebase'
+ if opts.get('rebase'):
+ if opts.get('update'):
+ del opts['update']
+ ui.debug('--update and --rebase are not compatible, ignoring '
+ 'the update flag\n')
+
+ cmdutil.bail_if_changed(repo)
+ revsprepull = len(repo)
+ origpostincoming = commands.postincoming
+ def _dummy(*args, **kwargs):
+ pass
+ commands.postincoming = _dummy
+ try:
+ orig(ui, repo, *args, **opts)
+ finally:
+ commands.postincoming = origpostincoming
+ revspostpull = len(repo)
+ if revspostpull > revsprepull:
+ rebase(ui, repo, **opts)
+ branch = repo[None].branch()
+ dest = repo[branch].rev()
+ if dest != repo['.'].rev():
+ # there was nothing to rebase we force an update
+ hg.update(repo, dest)
+ else:
+ orig(ui, repo, *args, **opts)
+
+def uisetup(ui):
+ 'Replace pull with a decorator to provide --rebase option'
+ entry = extensions.wrapcommand(commands.table, 'pull', pullrebase)
+ entry[1].append(('', 'rebase', None,
+ _("rebase working directory to branch head"))
+)
+
+cmdtable = {
+"rebase":
+ (rebase,
+ [
+ ('s', 'source', '',
+ _('rebase from the specified changeset'), _('REV')),
+ ('b', 'base', '',
+ _('rebase from the base of the specified changeset '
+ '(up to greatest common ancestor of base and dest)'),
+ _('REV')),
+ ('d', 'dest', '',
+ _('rebase onto the specified changeset'), _('REV')),
+ ('', 'collapse', False, _('collapse the rebased changesets')),
+ ('', 'keep', False, _('keep original changesets')),
+ ('', 'keepbranches', False, _('keep original branch names')),
+ ('', 'detach', False, _('force detaching of source from its original '
+ 'branch')),
+ ('c', 'continue', False, _('continue an interrupted rebase')),
+ ('a', 'abort', False, _('abort an interrupted rebase'))] +
+ templateopts,
+ _('hg rebase [-s REV | -b REV] [-d REV] [options]\n'
+ 'hg rebase {-a|-c}'))
+}
diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/rebase.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/rebase.pyo
new file mode 100644
index 0000000..c637b8f
--- /dev/null
+++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/rebase.pyo
Binary files differ
diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/record.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/record.py
new file mode 100644
index 0000000..c515c26
--- /dev/null
+++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/record.py
@@ -0,0 +1,569 @@
+# record.py
+#
+# Copyright 2007 Bryan O'Sullivan <bos@serpentine.com>
+#
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2 or any later version.
+
+'''commands to interactively select changes for commit/qrefresh'''
+
+from mercurial.i18n import gettext, _
+from mercurial import cmdutil, commands, extensions, hg, mdiff, patch
+from mercurial import util
+import copy, cStringIO, errno, os, re, tempfile
+
+lines_re = re.compile(r'@@ -(\d+),(\d+) \+(\d+),(\d+) @@\s*(.*)')
+
+def scanpatch(fp):
+ """like patch.iterhunks, but yield different events
+
+ - ('file', [header_lines + fromfile + tofile])
+ - ('context', [context_lines])
+ - ('hunk', [hunk_lines])
+ - ('range', (-start,len, +start,len, diffp))
+ """
+ lr = patch.linereader(fp)
+
+ def scanwhile(first, p):
+ """scan lr while predicate holds"""
+ lines = [first]
+ while True:
+ line = lr.readline()
+ if not line:
+ break
+ if p(line):
+ lines.append(line)
+ else:
+ lr.push(line)
+ break
+ return lines
+
+ while True:
+ line = lr.readline()
+ if not line:
+ break
+ if line.startswith('diff --git a/'):
+ def notheader(line):
+ s = line.split(None, 1)
+ return not s or s[0] not in ('---', 'diff')
+ header = scanwhile(line, notheader)
+ fromfile = lr.readline()
+ if fromfile.startswith('---'):
+ tofile = lr.readline()
+ header += [fromfile, tofile]
+ else:
+ lr.push(fromfile)
+ yield 'file', header
+ elif line[0] == ' ':
+ yield 'context', scanwhile(line, lambda l: l[0] in ' \\')
+ elif line[0] in '-+':
+ yield 'hunk', scanwhile(line, lambda l: l[0] in '-+\\')
+ else:
+ m = lines_re.match(line)
+ if m:
+ yield 'range', m.groups()
+ else:
+ raise patch.PatchError('unknown patch content: %r' % line)
+
+class header(object):
+ """patch header
+
+ XXX shoudn't we move this to mercurial/patch.py ?
+ """
+ diff_re = re.compile('diff --git a/(.*) b/(.*)$')
+ allhunks_re = re.compile('(?:index|new file|deleted file) ')
+ pretty_re = re.compile('(?:new file|deleted file) ')
+ special_re = re.compile('(?:index|new|deleted|copy|rename) ')
+
+ def __init__(self, header):
+ self.header = header
+ self.hunks = []
+
+ def binary(self):
+ for h in self.header:
+ if h.startswith('index '):
+ return True
+
+ def pretty(self, fp):
+ for h in self.header:
+ if h.startswith('index '):
+ fp.write(_('this modifies a binary file (all or nothing)\n'))
+ break
+ if self.pretty_re.match(h):
+ fp.write(h)
+ if self.binary():
+ fp.write(_('this is a binary file\n'))
+ break
+ if h.startswith('---'):
+ fp.write(_('%d hunks, %d lines changed\n') %
+ (len(self.hunks),
+ sum([max(h.added, h.removed) for h in self.hunks])))
+ break
+ fp.write(h)
+
+ def write(self, fp):
+ fp.write(''.join(self.header))
+
+ def allhunks(self):
+ for h in self.header:
+ if self.allhunks_re.match(h):
+ return True
+
+ def files(self):
+ fromfile, tofile = self.diff_re.match(self.header[0]).groups()
+ if fromfile == tofile:
+ return [fromfile]
+ return [fromfile, tofile]
+
+ def filename(self):
+ return self.files()[-1]
+
+ def __repr__(self):
+ return '<header %s>' % (' '.join(map(repr, self.files())))
+
+ def special(self):
+ for h in self.header:
+ if self.special_re.match(h):
+ return True
+
+def countchanges(hunk):
+ """hunk -> (n+,n-)"""
+ add = len([h for h in hunk if h[0] == '+'])
+ rem = len([h for h in hunk if h[0] == '-'])
+ return add, rem
+
+class hunk(object):
+ """patch hunk
+
+ XXX shouldn't we merge this with patch.hunk ?
+ """
+ maxcontext = 3
+
+ def __init__(self, header, fromline, toline, proc, before, hunk, after):
+ def trimcontext(number, lines):
+ delta = len(lines) - self.maxcontext
+ if False and delta > 0:
+ return number + delta, lines[:self.maxcontext]
+ return number, lines
+
+ self.header = header
+ self.fromline, self.before = trimcontext(fromline, before)
+ self.toline, self.after = trimcontext(toline, after)
+ self.proc = proc
+ self.hunk = hunk
+ self.added, self.removed = countchanges(self.hunk)
+
+ def write(self, fp):
+ delta = len(self.before) + len(self.after)
+ if self.after and self.after[-1] == '\\ No newline at end of file\n':
+ delta -= 1
+ fromlen = delta + self.removed
+ tolen = delta + self.added
+ fp.write('@@ -%d,%d +%d,%d @@%s\n' %
+ (self.fromline, fromlen, self.toline, tolen,
+ self.proc and (' ' + self.proc)))
+ fp.write(''.join(self.before + self.hunk + self.after))
+
+ pretty = write
+
+ def filename(self):
+ return self.header.filename()
+
+ def __repr__(self):
+ return '<hunk %r@%d>' % (self.filename(), self.fromline)
+
+def parsepatch(fp):
+ """patch -> [] of hunks """
+ class parser(object):
+ """patch parsing state machine"""
+ def __init__(self):
+ self.fromline = 0
+ self.toline = 0
+ self.proc = ''
+ self.header = None
+ self.context = []
+ self.before = []
+ self.hunk = []
+ self.stream = []
+
+ def addrange(self, limits):
+ fromstart, fromend, tostart, toend, proc = limits
+ self.fromline = int(fromstart)
+ self.toline = int(tostart)
+ self.proc = proc
+
+ def addcontext(self, context):
+ if self.hunk:
+ h = hunk(self.header, self.fromline, self.toline, self.proc,
+ self.before, self.hunk, context)
+ self.header.hunks.append(h)
+ self.stream.append(h)
+ self.fromline += len(self.before) + h.removed
+ self.toline += len(self.before) + h.added
+ self.before = []
+ self.hunk = []
+ self.proc = ''
+ self.context = context
+
+ def addhunk(self, hunk):
+ if self.context:
+ self.before = self.context
+ self.context = []
+ self.hunk = hunk
+
+ def newfile(self, hdr):
+ self.addcontext([])
+ h = header(hdr)
+ self.stream.append(h)
+ self.header = h
+
+ def finished(self):
+ self.addcontext([])
+ return self.stream
+
+ transitions = {
+ 'file': {'context': addcontext,
+ 'file': newfile,
+ 'hunk': addhunk,
+ 'range': addrange},
+ 'context': {'file': newfile,
+ 'hunk': addhunk,
+ 'range': addrange},
+ 'hunk': {'context': addcontext,
+ 'file': newfile,
+ 'range': addrange},
+ 'range': {'context': addcontext,
+ 'hunk': addhunk},
+ }
+
+ p = parser()
+
+ state = 'context'
+ for newstate, data in scanpatch(fp):
+ try:
+ p.transitions[state][newstate](p, data)
+ except KeyError:
+ raise patch.PatchError('unhandled transition: %s -> %s' %
+ (state, newstate))
+ state = newstate
+ return p.finished()
+
+def filterpatch(ui, chunks):
+ """Interactively filter patch chunks into applied-only chunks"""
+ chunks = list(chunks)
+ chunks.reverse()
+ seen = set()
+ def consumefile():
+ """fetch next portion from chunks until a 'header' is seen
+ NB: header == new-file mark
+ """
+ consumed = []
+ while chunks:
+ if isinstance(chunks[-1], header):
+ break
+ else:
+ consumed.append(chunks.pop())
+ return consumed
+
+ resp_all = [None] # this two are changed from inside prompt,
+ resp_file = [None] # so can't be usual variables
+ applied = {} # 'filename' -> [] of chunks
+ def prompt(query):
+ """prompt query, and process base inputs
+
+ - y/n for the rest of file
+ - y/n for the rest
+ - ? (help)
+ - q (quit)
+
+ Returns True/False and sets reps_all and resp_file as
+ appropriate.
+ """
+ if resp_all[0] is not None:
+ return resp_all[0]
+ if resp_file[0] is not None:
+ return resp_file[0]
+ while True:
+ resps = _('[Ynsfdaq?]')
+ choices = (_('&Yes, record this change'),
+ _('&No, skip this change'),
+ _('&Skip remaining changes to this file'),
+ _('Record remaining changes to this &file'),
+ _('&Done, skip remaining changes and files'),
+ _('Record &all changes to all remaining files'),
+ _('&Quit, recording no changes'),
+ _('&?'))
+ r = ui.promptchoice("%s %s" % (query, resps), choices)
+ ui.write("\n")
+ if r == 7: # ?
+ doc = gettext(record.__doc__)
+ c = doc.find('::') + 2
+ for l in doc[c:].splitlines():
+ if l.startswith(' '):
+ ui.write(l.strip(), '\n')
+ continue
+ elif r == 0: # yes
+ ret = True
+ elif r == 1: # no
+ ret = False
+ elif r == 2: # Skip
+ ret = resp_file[0] = False
+ elif r == 3: # file (Record remaining)
+ ret = resp_file[0] = True
+ elif r == 4: # done, skip remaining
+ ret = resp_all[0] = False
+ elif r == 5: # all
+ ret = resp_all[0] = True
+ elif r == 6: # quit
+ raise util.Abort(_('user quit'))
+ return ret
+ pos, total = 0, len(chunks) - 1
+ while chunks:
+ pos = total - len(chunks) + 1
+ chunk = chunks.pop()
+ if isinstance(chunk, header):
+ # new-file mark
+ resp_file = [None]
+ fixoffset = 0
+ hdr = ''.join(chunk.header)
+ if hdr in seen:
+ consumefile()
+ continue
+ seen.add(hdr)
+ if resp_all[0] is None:
+ chunk.pretty(ui)
+ r = prompt(_('examine changes to %s?') %
+ _(' and ').join(map(repr, chunk.files())))
+ if r:
+ applied[chunk.filename()] = [chunk]
+ if chunk.allhunks():
+ applied[chunk.filename()] += consumefile()
+ else:
+ consumefile()
+ else:
+ # new hunk
+ if resp_file[0] is None and resp_all[0] is None:
+ chunk.pretty(ui)
+ r = total == 1 and prompt(_('record this change to %r?') %
+ chunk.filename()) \
+ or prompt(_('record change %d/%d to %r?') %
+ (pos, total, chunk.filename()))
+ if r:
+ if fixoffset:
+ chunk = copy.copy(chunk)
+ chunk.toline += fixoffset
+ applied[chunk.filename()].append(chunk)
+ else:
+ fixoffset += chunk.removed - chunk.added
+ return sum([h for h in applied.itervalues()
+ if h[0].special() or len(h) > 1], [])
+
+def record(ui, repo, *pats, **opts):
+ '''interactively select changes to commit
+
+ If a list of files is omitted, all changes reported by :hg:`status`
+ will be candidates for recording.
+
+ See :hg:`help dates` for a list of formats valid for -d/--date.
+
+ You will be prompted for whether to record changes to each
+ modified file, and for files with multiple changes, for each
+ change to use. For each query, the following responses are
+ possible::
+
+ y - record this change
+ n - skip this change
+
+ s - skip remaining changes to this file
+ f - record remaining changes to this file
+
+ d - done, skip remaining changes and files
+ a - record all changes to all remaining files
+ q - quit, recording no changes
+
+ ? - display help
+
+ This command is not available when committing a merge.'''
+
+ dorecord(ui, repo, commands.commit, *pats, **opts)
+
+
+def qrecord(ui, repo, patch, *pats, **opts):
+ '''interactively record a new patch
+
+ See :hg:`help qnew` & :hg:`help record` for more information and
+ usage.
+ '''
+
+ try:
+ mq = extensions.find('mq')
+ except KeyError:
+ raise util.Abort(_("'mq' extension not loaded"))
+
+ def committomq(ui, repo, *pats, **opts):
+ mq.new(ui, repo, patch, *pats, **opts)
+
+ opts = opts.copy()
+ opts['force'] = True # always 'qnew -f'
+ dorecord(ui, repo, committomq, *pats, **opts)
+
+
+def dorecord(ui, repo, commitfunc, *pats, **opts):
+ if not ui.interactive():
+ raise util.Abort(_('running non-interactively, use commit instead'))
+
+ def recordfunc(ui, repo, message, match, opts):
+ """This is generic record driver.
+
+ Its job is to interactively filter local changes, and accordingly
+ prepare working dir into a state, where the job can be delegated to
+ non-interactive commit command such as 'commit' or 'qrefresh'.
+
+ After the actual job is done by non-interactive command, working dir
+ state is restored to original.
+
+ In the end we'll record interesting changes, and everything else will be
+ left in place, so the user can continue his work.
+ """
+
+ merge = len(repo[None].parents()) > 1
+ if merge:
+ raise util.Abort(_('cannot partially commit a merge '
+ '(use hg commit instead)'))
+
+ changes = repo.status(match=match)[:3]
+ diffopts = mdiff.diffopts(git=True, nodates=True)
+ chunks = patch.diff(repo, changes=changes, opts=diffopts)
+ fp = cStringIO.StringIO()
+ fp.write(''.join(chunks))
+ fp.seek(0)
+
+ # 1. filter patch, so we have intending-to apply subset of it
+ chunks = filterpatch(ui, parsepatch(fp))
+ del fp
+
+ contenders = set()
+ for h in chunks:
+ try:
+ contenders.update(set(h.files()))
+ except AttributeError:
+ pass
+
+ changed = changes[0] + changes[1] + changes[2]
+ newfiles = [f for f in changed if f in contenders]
+ if not newfiles:
+ ui.status(_('no changes to record\n'))
+ return 0
+
+ modified = set(changes[0])
+
+ # 2. backup changed files, so we can restore them in the end
+ backups = {}
+ backupdir = repo.join('record-backups')
+ try:
+ os.mkdir(backupdir)
+ except OSError, err:
+ if err.errno != errno.EEXIST:
+ raise
+ try:
+ # backup continues
+ for f in newfiles:
+ if f not in modified:
+ continue
+ fd, tmpname = tempfile.mkstemp(prefix=f.replace('/', '_')+'.',
+ dir=backupdir)
+ os.close(fd)
+ ui.debug('backup %r as %r\n' % (f, tmpname))
+ util.copyfile(repo.wjoin(f), tmpname)
+ backups[f] = tmpname
+
+ fp = cStringIO.StringIO()
+ for c in chunks:
+ if c.filename() in backups:
+ c.write(fp)
+ dopatch = fp.tell()
+ fp.seek(0)
+
+ # 3a. apply filtered patch to clean repo (clean)
+ if backups:
+ hg.revert(repo, repo.dirstate.parents()[0],
+ lambda key: key in backups)
+
+ # 3b. (apply)
+ if dopatch:
+ try:
+ ui.debug('applying patch\n')
+ ui.debug(fp.getvalue())
+ pfiles = {}
+ patch.internalpatch(fp, ui, 1, repo.root, files=pfiles,
+ eolmode=None)
+ cmdutil.updatedir(ui, repo, pfiles)
+ except patch.PatchError, err:
+ raise util.Abort(str(err))
+ del fp
+
+ # 4. We prepared working directory according to filtered patch.
+ # Now is the time to delegate the job to commit/qrefresh or the like!
+
+ # it is important to first chdir to repo root -- we'll call a
+ # highlevel command with list of pathnames relative to repo root
+ cwd = os.getcwd()
+ os.chdir(repo.root)
+ try:
+ commitfunc(ui, repo, *newfiles, **opts)
+ finally:
+ os.chdir(cwd)
+
+ return 0
+ finally:
+ # 5. finally restore backed-up files
+ try:
+ for realname, tmpname in backups.iteritems():
+ ui.debug('restoring %r to %r\n' % (tmpname, realname))
+ util.copyfile(tmpname, repo.wjoin(realname))
+ os.unlink(tmpname)
+ os.rmdir(backupdir)
+ except OSError:
+ pass
+
+ # wrap ui.write so diff output can be labeled/colorized
+ def wrapwrite(orig, *args, **kw):
+ label = kw.pop('label', '')
+ for chunk, l in patch.difflabel(lambda: args):
+ orig(chunk, label=label + l)
+ oldwrite = ui.write
+ extensions.wrapfunction(ui, 'write', wrapwrite)
+ try:
+ return cmdutil.commit(ui, repo, recordfunc, pats, opts)
+ finally:
+ ui.write = oldwrite
+
+cmdtable = {
+ "record":
+ (record,
+
+ # add commit options
+ commands.table['^commit|ci'][1],
+
+ _('hg record [OPTION]... [FILE]...')),
+}
+
+
+def uisetup(ui):
+ try:
+ mq = extensions.find('mq')
+ except KeyError:
+ return
+
+ qcmdtable = {
+ "qrecord":
+ (qrecord,
+
+ # add qnew options, except '--force'
+ [opt for opt in mq.cmdtable['^qnew'][1] if opt[1] != 'force'],
+
+ _('hg qrecord [OPTION]... PATCH [FILE]...')),
+ }
+
+ cmdtable.update(qcmdtable)
+
diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/record.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/record.pyo
new file mode 100644
index 0000000..456b7d6
--- /dev/null
+++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/record.pyo
Binary files differ
diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/relink.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/relink.py
new file mode 100644
index 0000000..77bd3c3
--- /dev/null
+++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/relink.py
@@ -0,0 +1,180 @@
+# Mercurial extension to provide 'hg relink' command
+#
+# Copyright (C) 2007 Brendan Cully <brendan@kublai.com>
+#
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2 or any later version.
+
+"""recreates hardlinks between repository clones"""
+
+from mercurial import hg, util
+from mercurial.i18n import _
+import os, stat
+
+def relink(ui, repo, origin=None, **opts):
+ """recreate hardlinks between two repositories
+
+ When repositories are cloned locally, their data files will be
+ hardlinked so that they only use the space of a single repository.
+
+ Unfortunately, subsequent pulls into either repository will break
+ hardlinks for any files touched by the new changesets, even if
+ both repositories end up pulling the same changes.
+
+ Similarly, passing --rev to "hg clone" will fail to use any
+ hardlinks, falling back to a complete copy of the source
+ repository.
+
+ This command lets you recreate those hardlinks and reclaim that
+ wasted space.
+
+ This repository will be relinked to share space with ORIGIN, which
+ must be on the same local disk. If ORIGIN is omitted, looks for
+ "default-relink", then "default", in [paths].
+
+ Do not attempt any read operations on this repository while the
+ command is running. (Both repositories will be locked against
+ writes.)
+ """
+ if not hasattr(util, 'samefile') or not hasattr(util, 'samedevice'):
+ raise util.Abort(_('hardlinks are not supported on this system'))
+ src = hg.repository(
+ hg.remoteui(repo, opts),
+ ui.expandpath(origin or 'default-relink', origin or 'default'))
+ if not src.local():
+ raise util.Abort('must specify local origin repository')
+ ui.status(_('relinking %s to %s\n') % (src.store.path, repo.store.path))
+ locallock = repo.lock()
+ try:
+ remotelock = src.lock()
+ try:
+ candidates = sorted(collect(src, ui))
+ targets = prune(candidates, src.store.path, repo.store.path, ui)
+ do_relink(src.store.path, repo.store.path, targets, ui)
+ finally:
+ remotelock.release()
+ finally:
+ locallock.release()
+
+def collect(src, ui):
+ seplen = len(os.path.sep)
+ candidates = []
+ live = len(src['tip'].manifest())
+ # Your average repository has some files which were deleted before
+ # the tip revision. We account for that by assuming that there are
+ # 3 tracked files for every 2 live files as of the tip version of
+ # the repository.
+ #
+ # mozilla-central as of 2010-06-10 had a ratio of just over 7:5.
+ total = live * 3 // 2
+ src = src.store.path
+ pos = 0
+ ui.status(_("tip has %d files, estimated total number of files: %s\n")
+ % (live, total))
+ for dirpath, dirnames, filenames in os.walk(src):
+ dirnames.sort()
+ relpath = dirpath[len(src) + seplen:]
+ for filename in sorted(filenames):
+ if not filename[-2:] in ('.d', '.i'):
+ continue
+ st = os.stat(os.path.join(dirpath, filename))
+ if not stat.S_ISREG(st.st_mode):
+ continue
+ pos += 1
+ candidates.append((os.path.join(relpath, filename), st))
+ ui.progress(_('collecting'), pos, filename, _('files'), total)
+
+ ui.progress(_('collecting'), None)
+ ui.status(_('collected %d candidate storage files\n') % len(candidates))
+ return candidates
+
+def prune(candidates, src, dst, ui):
+ def linkfilter(src, dst, st):
+ try:
+ ts = os.stat(dst)
+ except OSError:
+ # Destination doesn't have this file?
+ return False
+ if util.samefile(src, dst):
+ return False
+ if not util.samedevice(src, dst):
+ # No point in continuing
+ raise util.Abort(
+ _('source and destination are on different devices'))
+ if st.st_size != ts.st_size:
+ return False
+ return st
+
+ targets = []
+ total = len(candidates)
+ pos = 0
+ for fn, st in candidates:
+ pos += 1
+ srcpath = os.path.join(src, fn)
+ tgt = os.path.join(dst, fn)
+ ts = linkfilter(srcpath, tgt, st)
+ if not ts:
+ ui.debug(_('not linkable: %s\n') % fn)
+ continue
+ targets.append((fn, ts.st_size))
+ ui.progress(_('pruning'), pos, fn, _('files'), total)
+
+ ui.progress(_('pruning'), None)
+ ui.status(_('pruned down to %d probably relinkable files\n') % len(targets))
+ return targets
+
+def do_relink(src, dst, files, ui):
+ def relinkfile(src, dst):
+ bak = dst + '.bak'
+ os.rename(dst, bak)
+ try:
+ util.os_link(src, dst)
+ except OSError:
+ os.rename(bak, dst)
+ raise
+ os.remove(bak)
+
+ CHUNKLEN = 65536
+ relinked = 0
+ savedbytes = 0
+
+ pos = 0
+ total = len(files)
+ for f, sz in files:
+ pos += 1
+ source = os.path.join(src, f)
+ tgt = os.path.join(dst, f)
+ # Binary mode, so that read() works correctly, especially on Windows
+ sfp = file(source, 'rb')
+ dfp = file(tgt, 'rb')
+ sin = sfp.read(CHUNKLEN)
+ while sin:
+ din = dfp.read(CHUNKLEN)
+ if sin != din:
+ break
+ sin = sfp.read(CHUNKLEN)
+ sfp.close()
+ dfp.close()
+ if sin:
+ ui.debug(_('not linkable: %s\n') % f)
+ continue
+ try:
+ relinkfile(source, tgt)
+ ui.progress(_('relinking'), pos, f, _('files'), total)
+ relinked += 1
+ savedbytes += sz
+ except OSError, inst:
+ ui.warn('%s: %s\n' % (tgt, str(inst)))
+
+ ui.progress(_('relinking'), None)
+
+ ui.status(_('relinked %d files (%d bytes reclaimed)\n') %
+ (relinked, savedbytes))
+
+cmdtable = {
+ 'relink': (
+ relink,
+ [],
+ _('[ORIGIN]')
+ )
+}
diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/relink.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/relink.pyo
new file mode 100644
index 0000000..61565b1
--- /dev/null
+++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/relink.pyo
Binary files differ
diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/schemes.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/schemes.py
new file mode 100644
index 0000000..ac9a8be
--- /dev/null
+++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/schemes.py
@@ -0,0 +1,84 @@
+# Copyright 2009, Alexander Solovyov <piranha@piranha.org.ua>
+#
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2 or any later version.
+
+"""extend schemes with shortcuts to repository swarms
+
+This extension allows you to specify shortcuts for parent URLs with a
+lot of repositories to act like a scheme, for example::
+
+ [schemes]
+ py = http://code.python.org/hg/
+
+After that you can use it like::
+
+ hg clone py://trunk/
+
+Additionally there is support for some more complex schemas, for
+example used by Google Code::
+
+ [schemes]
+ gcode = http://{1}.googlecode.com/hg/
+
+The syntax is taken from Mercurial templates, and you have unlimited
+number of variables, starting with ``{1}`` and continuing with
+``{2}``, ``{3}`` and so on. This variables will receive parts of URL
+supplied, split by ``/``. Anything not specified as ``{part}`` will be
+just appended to an URL.
+
+For convenience, the extension adds these schemes by default::
+
+ [schemes]
+ py = http://hg.python.org/
+ bb = https://bitbucket.org/
+ bb+ssh = ssh://hg@bitbucket.org/
+ gcode = https://{1}.googlecode.com/hg/
+ kiln = https://{1}.kilnhg.com/Repo/
+
+You can override a predefined scheme by defining a new scheme with the
+same name.
+"""
+
+import re
+from mercurial import hg, templater
+
+
+class ShortRepository(object):
+ def __init__(self, url, scheme, templater):
+ self.scheme = scheme
+ self.templater = templater
+ self.url = url
+ try:
+ self.parts = max(map(int, re.findall(r'\{(\d+)\}', self.url)))
+ except ValueError:
+ self.parts = 0
+
+ def __repr__(self):
+ return '<ShortRepository: %s>' % self.scheme
+
+ def instance(self, ui, url, create):
+ url = url.split('://', 1)[1]
+ parts = url.split('/', self.parts)
+ if len(parts) > self.parts:
+ tail = parts[-1]
+ parts = parts[:-1]
+ else:
+ tail = ''
+ context = dict((str(i + 1), v) for i, v in enumerate(parts))
+ url = ''.join(self.templater.process(self.url, context)) + tail
+ return hg._lookup(url).instance(ui, url, create)
+
+schemes = {
+ 'py': 'http://hg.python.org/',
+ 'bb': 'https://bitbucket.org/',
+ 'bb+ssh': 'ssh://hg@bitbucket.org/',
+ 'gcode': 'https://{1}.googlecode.com/hg/',
+ 'kiln': 'https://{1}.kilnhg.com/Repo/'
+ }
+
+def extsetup(ui):
+ schemes.update(dict(ui.configitems('schemes')))
+ t = templater.engine(lambda x: x)
+ for scheme, url in schemes.items():
+ hg.schemes[scheme] = ShortRepository(url, scheme, t)
diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/schemes.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/schemes.pyo
new file mode 100644
index 0000000..a089ec3
--- /dev/null
+++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/schemes.pyo
Binary files differ
diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/share.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/share.py
new file mode 100644
index 0000000..cc33148
--- /dev/null
+++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/share.py
@@ -0,0 +1,38 @@
+# Copyright 2006, 2007 Matt Mackall <mpm@selenic.com>
+#
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2 or any later version.
+
+'''share a common history between several working directories'''
+
+from mercurial.i18n import _
+from mercurial import hg, commands
+
+def share(ui, source, dest=None, noupdate=False):
+ """create a new shared repository
+
+ Initialize a new repository and working directory that shares its
+ history with another repository.
+
+ .. note::
+ using rollback or extensions that destroy/modify history (mq,
+ rebase, etc.) can cause considerable confusion with shared
+ clones. In particular, if two shared clones are both updated to
+ the same changeset, and one of them destroys that changeset
+ with rollback, the other clone will suddenly stop working: all
+ operations will fail with "abort: working directory has unknown
+ parent". The only known workaround is to use debugsetparents on
+ the broken clone to reset it to a changeset that still exists
+ (e.g. tip).
+ """
+
+ return hg.share(ui, source, dest, not noupdate)
+
+cmdtable = {
+ "share":
+ (share,
+ [('U', 'noupdate', None, _('do not create a working copy'))],
+ _('[-U] SOURCE [DEST]')),
+}
+
+commands.norepo += " share"
diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/share.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/share.pyo
new file mode 100644
index 0000000..f4fca90
--- /dev/null
+++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/share.pyo
Binary files differ
diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/transplant.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/transplant.py
new file mode 100644
index 0000000..4325d9b
--- /dev/null
+++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/transplant.py
@@ -0,0 +1,630 @@
+# Patch transplanting extension for Mercurial
+#
+# Copyright 2006, 2007 Brendan Cully <brendan@kublai.com>
+#
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2 or any later version.
+
+'''command to transplant changesets from another branch
+
+This extension allows you to transplant patches from another branch.
+
+Transplanted patches are recorded in .hg/transplant/transplants, as a
+map from a changeset hash to its hash in the source repository.
+'''
+
+from mercurial.i18n import _
+import os, tempfile
+from mercurial import bundlerepo, cmdutil, hg, merge, match
+from mercurial import patch, revlog, util, error
+from mercurial import revset
+
+class transplantentry(object):
+ def __init__(self, lnode, rnode):
+ self.lnode = lnode
+ self.rnode = rnode
+
+class transplants(object):
+ def __init__(self, path=None, transplantfile=None, opener=None):
+ self.path = path
+ self.transplantfile = transplantfile
+ self.opener = opener
+
+ if not opener:
+ self.opener = util.opener(self.path)
+ self.transplants = {}
+ self.dirty = False
+ self.read()
+
+ def read(self):
+ abspath = os.path.join(self.path, self.transplantfile)
+ if self.transplantfile and os.path.exists(abspath):
+ for line in self.opener(self.transplantfile).read().splitlines():
+ lnode, rnode = map(revlog.bin, line.split(':'))
+ list = self.transplants.setdefault(rnode, [])
+ list.append(transplantentry(lnode, rnode))
+
+ def write(self):
+ if self.dirty and self.transplantfile:
+ if not os.path.isdir(self.path):
+ os.mkdir(self.path)
+ fp = self.opener(self.transplantfile, 'w')
+ for list in self.transplants.itervalues():
+ for t in list:
+ l, r = map(revlog.hex, (t.lnode, t.rnode))
+ fp.write(l + ':' + r + '\n')
+ fp.close()
+ self.dirty = False
+
+ def get(self, rnode):
+ return self.transplants.get(rnode) or []
+
+ def set(self, lnode, rnode):
+ list = self.transplants.setdefault(rnode, [])
+ list.append(transplantentry(lnode, rnode))
+ self.dirty = True
+
+ def remove(self, transplant):
+ list = self.transplants.get(transplant.rnode)
+ if list:
+ del list[list.index(transplant)]
+ self.dirty = True
+
+class transplanter(object):
+ def __init__(self, ui, repo):
+ self.ui = ui
+ self.path = repo.join('transplant')
+ self.opener = util.opener(self.path)
+ self.transplants = transplants(self.path, 'transplants',
+ opener=self.opener)
+
+ def applied(self, repo, node, parent):
+ '''returns True if a node is already an ancestor of parent
+ or has already been transplanted'''
+ if hasnode(repo, node):
+ if node in repo.changelog.reachable(parent, stop=node):
+ return True
+ for t in self.transplants.get(node):
+ # it might have been stripped
+ if not hasnode(repo, t.lnode):
+ self.transplants.remove(t)
+ return False
+ if t.lnode in repo.changelog.reachable(parent, stop=t.lnode):
+ return True
+ return False
+
+ def apply(self, repo, source, revmap, merges, opts={}):
+ '''apply the revisions in revmap one by one in revision order'''
+ revs = sorted(revmap)
+ p1, p2 = repo.dirstate.parents()
+ pulls = []
+ diffopts = patch.diffopts(self.ui, opts)
+ diffopts.git = True
+
+ lock = wlock = None
+ try:
+ wlock = repo.wlock()
+ lock = repo.lock()
+ for rev in revs:
+ node = revmap[rev]
+ revstr = '%s:%s' % (rev, revlog.short(node))
+
+ if self.applied(repo, node, p1):
+ self.ui.warn(_('skipping already applied revision %s\n') %
+ revstr)
+ continue
+
+ parents = source.changelog.parents(node)
+ if not opts.get('filter'):
+ # If the changeset parent is the same as the
+ # wdir's parent, just pull it.
+ if parents[0] == p1:
+ pulls.append(node)
+ p1 = node
+ continue
+ if pulls:
+ if source != repo:
+ repo.pull(source, heads=pulls)
+ merge.update(repo, pulls[-1], False, False, None)
+ p1, p2 = repo.dirstate.parents()
+ pulls = []
+
+ domerge = False
+ if node in merges:
+ # pulling all the merge revs at once would mean we
+ # couldn't transplant after the latest even if
+ # transplants before them fail.
+ domerge = True
+ if not hasnode(repo, node):
+ repo.pull(source, heads=[node])
+
+ if parents[1] != revlog.nullid:
+ self.ui.note(_('skipping merge changeset %s:%s\n')
+ % (rev, revlog.short(node)))
+ patchfile = None
+ else:
+ fd, patchfile = tempfile.mkstemp(prefix='hg-transplant-')
+ fp = os.fdopen(fd, 'w')
+ gen = patch.diff(source, parents[0], node, opts=diffopts)
+ for chunk in gen:
+ fp.write(chunk)
+ fp.close()
+
+ del revmap[rev]
+ if patchfile or domerge:
+ try:
+ n = self.applyone(repo, node,
+ source.changelog.read(node),
+ patchfile, merge=domerge,
+ log=opts.get('log'),
+ filter=opts.get('filter'))
+ if n and domerge:
+ self.ui.status(_('%s merged at %s\n') % (revstr,
+ revlog.short(n)))
+ elif n:
+ self.ui.status(_('%s transplanted to %s\n')
+ % (revlog.short(node),
+ revlog.short(n)))
+ finally:
+ if patchfile:
+ os.unlink(patchfile)
+ if pulls:
+ repo.pull(source, heads=pulls)
+ merge.update(repo, pulls[-1], False, False, None)
+ finally:
+ self.saveseries(revmap, merges)
+ self.transplants.write()
+ lock.release()
+ wlock.release()
+
+ def filter(self, filter, changelog, patchfile):
+ '''arbitrarily rewrite changeset before applying it'''
+
+ self.ui.status(_('filtering %s\n') % patchfile)
+ user, date, msg = (changelog[1], changelog[2], changelog[4])
+
+ fd, headerfile = tempfile.mkstemp(prefix='hg-transplant-')
+ fp = os.fdopen(fd, 'w')
+ fp.write("# HG changeset patch\n")
+ fp.write("# User %s\n" % user)
+ fp.write("# Date %d %d\n" % date)
+ fp.write(msg + '\n')
+ fp.close()
+
+ try:
+ util.system('%s %s %s' % (filter, util.shellquote(headerfile),
+ util.shellquote(patchfile)),
+ environ={'HGUSER': changelog[1]},
+ onerr=util.Abort, errprefix=_('filter failed'))
+ user, date, msg = self.parselog(file(headerfile))[1:4]
+ finally:
+ os.unlink(headerfile)
+
+ return (user, date, msg)
+
+ def applyone(self, repo, node, cl, patchfile, merge=False, log=False,
+ filter=None):
+ '''apply the patch in patchfile to the repository as a transplant'''
+ (manifest, user, (time, timezone), files, message) = cl[:5]
+ date = "%d %d" % (time, timezone)
+ extra = {'transplant_source': node}
+ if filter:
+ (user, date, message) = self.filter(filter, cl, patchfile)
+
+ if log:
+ # we don't translate messages inserted into commits
+ message += '\n(transplanted from %s)' % revlog.hex(node)
+
+ self.ui.status(_('applying %s\n') % revlog.short(node))
+ self.ui.note('%s %s\n%s\n' % (user, date, message))
+
+ if not patchfile and not merge:
+ raise util.Abort(_('can only omit patchfile if merging'))
+ if patchfile:
+ try:
+ files = {}
+ try:
+ patch.patch(patchfile, self.ui, cwd=repo.root,
+ files=files, eolmode=None)
+ if not files:
+ self.ui.warn(_('%s: empty changeset')
+ % revlog.hex(node))
+ return None
+ finally:
+ files = cmdutil.updatedir(self.ui, repo, files)
+ except Exception, inst:
+ seriespath = os.path.join(self.path, 'series')
+ if os.path.exists(seriespath):
+ os.unlink(seriespath)
+ p1 = repo.dirstate.parents()[0]
+ p2 = node
+ self.log(user, date, message, p1, p2, merge=merge)
+ self.ui.write(str(inst) + '\n')
+ raise util.Abort(_('fix up the merge and run '
+ 'hg transplant --continue'))
+ else:
+ files = None
+ if merge:
+ p1, p2 = repo.dirstate.parents()
+ repo.dirstate.setparents(p1, node)
+ m = match.always(repo.root, '')
+ else:
+ m = match.exact(repo.root, '', files)
+
+ n = repo.commit(message, user, date, extra=extra, match=m)
+ if not n:
+ # Crash here to prevent an unclear crash later, in
+ # transplants.write(). This can happen if patch.patch()
+ # does nothing but claims success or if repo.status() fails
+ # to report changes done by patch.patch(). These both
+ # appear to be bugs in other parts of Mercurial, but dying
+ # here, as soon as we can detect the problem, is preferable
+ # to silently dropping changesets on the floor.
+ raise RuntimeError('nothing committed after transplant')
+ if not merge:
+ self.transplants.set(n, node)
+
+ return n
+
+ def resume(self, repo, source, opts=None):
+ '''recover last transaction and apply remaining changesets'''
+ if os.path.exists(os.path.join(self.path, 'journal')):
+ n, node = self.recover(repo)
+ self.ui.status(_('%s transplanted as %s\n') % (revlog.short(node),
+ revlog.short(n)))
+ seriespath = os.path.join(self.path, 'series')
+ if not os.path.exists(seriespath):
+ self.transplants.write()
+ return
+ nodes, merges = self.readseries()
+ revmap = {}
+ for n in nodes:
+ revmap[source.changelog.rev(n)] = n
+ os.unlink(seriespath)
+
+ self.apply(repo, source, revmap, merges, opts)
+
+ def recover(self, repo):
+ '''commit working directory using journal metadata'''
+ node, user, date, message, parents = self.readlog()
+ merge = len(parents) == 2
+
+ if not user or not date or not message or not parents[0]:
+ raise util.Abort(_('transplant log file is corrupt'))
+
+ extra = {'transplant_source': node}
+ wlock = repo.wlock()
+ try:
+ p1, p2 = repo.dirstate.parents()
+ if p1 != parents[0]:
+ raise util.Abort(
+ _('working dir not at transplant parent %s') %
+ revlog.hex(parents[0]))
+ if merge:
+ repo.dirstate.setparents(p1, parents[1])
+ n = repo.commit(message, user, date, extra=extra)
+ if not n:
+ raise util.Abort(_('commit failed'))
+ if not merge:
+ self.transplants.set(n, node)
+ self.unlog()
+
+ return n, node
+ finally:
+ wlock.release()
+
+ def readseries(self):
+ nodes = []
+ merges = []
+ cur = nodes
+ for line in self.opener('series').read().splitlines():
+ if line.startswith('# Merges'):
+ cur = merges
+ continue
+ cur.append(revlog.bin(line))
+
+ return (nodes, merges)
+
+ def saveseries(self, revmap, merges):
+ if not revmap:
+ return
+
+ if not os.path.isdir(self.path):
+ os.mkdir(self.path)
+ series = self.opener('series', 'w')
+ for rev in sorted(revmap):
+ series.write(revlog.hex(revmap[rev]) + '\n')
+ if merges:
+ series.write('# Merges\n')
+ for m in merges:
+ series.write(revlog.hex(m) + '\n')
+ series.close()
+
+ def parselog(self, fp):
+ parents = []
+ message = []
+ node = revlog.nullid
+ inmsg = False
+ for line in fp.read().splitlines():
+ if inmsg:
+ message.append(line)
+ elif line.startswith('# User '):
+ user = line[7:]
+ elif line.startswith('# Date '):
+ date = line[7:]
+ elif line.startswith('# Node ID '):
+ node = revlog.bin(line[10:])
+ elif line.startswith('# Parent '):
+ parents.append(revlog.bin(line[9:]))
+ elif not line.startswith('# '):
+ inmsg = True
+ message.append(line)
+ return (node, user, date, '\n'.join(message), parents)
+
+ def log(self, user, date, message, p1, p2, merge=False):
+ '''journal changelog metadata for later recover'''
+
+ if not os.path.isdir(self.path):
+ os.mkdir(self.path)
+ fp = self.opener('journal', 'w')
+ fp.write('# User %s\n' % user)
+ fp.write('# Date %s\n' % date)
+ fp.write('# Node ID %s\n' % revlog.hex(p2))
+ fp.write('# Parent ' + revlog.hex(p1) + '\n')
+ if merge:
+ fp.write('# Parent ' + revlog.hex(p2) + '\n')
+ fp.write(message.rstrip() + '\n')
+ fp.close()
+
+ def readlog(self):
+ return self.parselog(self.opener('journal'))
+
+ def unlog(self):
+ '''remove changelog journal'''
+ absdst = os.path.join(self.path, 'journal')
+ if os.path.exists(absdst):
+ os.unlink(absdst)
+
+ def transplantfilter(self, repo, source, root):
+ def matchfn(node):
+ if self.applied(repo, node, root):
+ return False
+ if source.changelog.parents(node)[1] != revlog.nullid:
+ return False
+ extra = source.changelog.read(node)[5]
+ cnode = extra.get('transplant_source')
+ if cnode and self.applied(repo, cnode, root):
+ return False
+ return True
+
+ return matchfn
+
+def hasnode(repo, node):
+ try:
+ return repo.changelog.rev(node) != None
+ except error.RevlogError:
+ return False
+
+def browserevs(ui, repo, nodes, opts):
+ '''interactively transplant changesets'''
+ def browsehelp(ui):
+ ui.write(_('y: transplant this changeset\n'
+ 'n: skip this changeset\n'
+ 'm: merge at this changeset\n'
+ 'p: show patch\n'
+ 'c: commit selected changesets\n'
+ 'q: cancel transplant\n'
+ '?: show this help\n'))
+
+ displayer = cmdutil.show_changeset(ui, repo, opts)
+ transplants = []
+ merges = []
+ for node in nodes:
+ displayer.show(repo[node])
+ action = None
+ while not action:
+ action = ui.prompt(_('apply changeset? [ynmpcq?]:'))
+ if action == '?':
+ browsehelp(ui)
+ action = None
+ elif action == 'p':
+ parent = repo.changelog.parents(node)[0]
+ for chunk in patch.diff(repo, parent, node):
+ ui.write(chunk)
+ action = None
+ elif action not in ('y', 'n', 'm', 'c', 'q'):
+ ui.write(_('no such option\n'))
+ action = None
+ if action == 'y':
+ transplants.append(node)
+ elif action == 'm':
+ merges.append(node)
+ elif action == 'c':
+ break
+ elif action == 'q':
+ transplants = ()
+ merges = ()
+ break
+ displayer.close()
+ return (transplants, merges)
+
+def transplant(ui, repo, *revs, **opts):
+ '''transplant changesets from another branch
+
+ Selected changesets will be applied on top of the current working
+ directory with the log of the original changeset. If --log is
+ specified, log messages will have a comment appended of the form::
+
+ (transplanted from CHANGESETHASH)
+
+ You can rewrite the changelog message with the --filter option.
+ Its argument will be invoked with the current changelog message as
+ $1 and the patch as $2.
+
+ If --source/-s is specified, selects changesets from the named
+ repository. If --branch/-b is specified, selects changesets from
+ the branch holding the named revision, up to that revision. If
+ --all/-a is specified, all changesets on the branch will be
+ transplanted, otherwise you will be prompted to select the
+ changesets you want.
+
+ :hg:`transplant --branch REVISION --all` will rebase the selected
+ branch (up to the named revision) onto your current working
+ directory.
+
+ You can optionally mark selected transplanted changesets as merge
+ changesets. You will not be prompted to transplant any ancestors
+ of a merged transplant, and you can merge descendants of them
+ normally instead of transplanting them.
+
+ If no merges or revisions are provided, :hg:`transplant` will
+ start an interactive changeset browser.
+
+ If a changeset application fails, you can fix the merge by hand
+ and then resume where you left off by calling :hg:`transplant
+ --continue/-c`.
+ '''
+ def incwalk(repo, incoming, branches, match=util.always):
+ if not branches:
+ branches = None
+ for node in repo.changelog.nodesbetween(incoming, branches)[0]:
+ if match(node):
+ yield node
+
+ def transplantwalk(repo, root, branches, match=util.always):
+ if not branches:
+ branches = repo.heads()
+ ancestors = []
+ for branch in branches:
+ ancestors.append(repo.changelog.ancestor(root, branch))
+ for node in repo.changelog.nodesbetween(ancestors, branches)[0]:
+ if match(node):
+ yield node
+
+ def checkopts(opts, revs):
+ if opts.get('continue'):
+ if opts.get('branch') or opts.get('all') or opts.get('merge'):
+ raise util.Abort(_('--continue is incompatible with '
+ 'branch, all or merge'))
+ return
+ if not (opts.get('source') or revs or
+ opts.get('merge') or opts.get('branch')):
+ raise util.Abort(_('no source URL, branch tag or revision '
+ 'list provided'))
+ if opts.get('all'):
+ if not opts.get('branch'):
+ raise util.Abort(_('--all requires a branch revision'))
+ if revs:
+ raise util.Abort(_('--all is incompatible with a '
+ 'revision list'))
+
+ checkopts(opts, revs)
+
+ if not opts.get('log'):
+ opts['log'] = ui.config('transplant', 'log')
+ if not opts.get('filter'):
+ opts['filter'] = ui.config('transplant', 'filter')
+
+ tp = transplanter(ui, repo)
+
+ p1, p2 = repo.dirstate.parents()
+ if len(repo) > 0 and p1 == revlog.nullid:
+ raise util.Abort(_('no revision checked out'))
+ if not opts.get('continue'):
+ if p2 != revlog.nullid:
+ raise util.Abort(_('outstanding uncommitted merges'))
+ m, a, r, d = repo.status()[:4]
+ if m or a or r or d:
+ raise util.Abort(_('outstanding local changes'))
+
+ bundle = None
+ source = opts.get('source')
+ if source:
+ sourcerepo = ui.expandpath(source)
+ source = hg.repository(ui, sourcerepo)
+ source, incoming, bundle = bundlerepo.getremotechanges(ui, repo, source,
+ force=True)
+ else:
+ source = repo
+
+ try:
+ if opts.get('continue'):
+ tp.resume(repo, source, opts)
+ return
+
+ tf = tp.transplantfilter(repo, source, p1)
+ if opts.get('prune'):
+ prune = [source.lookup(r)
+ for r in cmdutil.revrange(source, opts.get('prune'))]
+ matchfn = lambda x: tf(x) and x not in prune
+ else:
+ matchfn = tf
+ branches = map(source.lookup, opts.get('branch', ()))
+ merges = map(source.lookup, opts.get('merge', ()))
+ revmap = {}
+ if revs:
+ for r in cmdutil.revrange(source, revs):
+ revmap[int(r)] = source.lookup(r)
+ elif opts.get('all') or not merges:
+ if source != repo:
+ alltransplants = incwalk(source, incoming, branches,
+ match=matchfn)
+ else:
+ alltransplants = transplantwalk(source, p1, branches,
+ match=matchfn)
+ if opts.get('all'):
+ revs = alltransplants
+ else:
+ revs, newmerges = browserevs(ui, source, alltransplants, opts)
+ merges.extend(newmerges)
+ for r in revs:
+ revmap[source.changelog.rev(r)] = r
+ for r in merges:
+ revmap[source.changelog.rev(r)] = r
+
+ tp.apply(repo, source, revmap, merges, opts)
+ finally:
+ if bundle:
+ source.close()
+ os.unlink(bundle)
+
+def revsettransplanted(repo, subset, x):
+ """``transplanted(set)``
+ Transplanted changesets in set.
+ """
+ if x:
+ s = revset.getset(repo, subset, x)
+ else:
+ s = subset
+ cs = set()
+ for r in xrange(0, len(repo)):
+ if repo[r].extra().get('transplant_source'):
+ cs.add(r)
+ return [r for r in s if r in cs]
+
+def extsetup(ui):
+ revset.symbols['transplanted'] = revsettransplanted
+
+cmdtable = {
+ "transplant":
+ (transplant,
+ [('s', 'source', '',
+ _('pull patches from REPO'), _('REPO')),
+ ('b', 'branch', [],
+ _('pull patches from branch BRANCH'), _('BRANCH')),
+ ('a', 'all', None, _('pull all changesets up to BRANCH')),
+ ('p', 'prune', [],
+ _('skip over REV'), _('REV')),
+ ('m', 'merge', [],
+ _('merge at REV'), _('REV')),
+ ('', 'log', None, _('append transplant info to log message')),
+ ('c', 'continue', None, _('continue last transplant session '
+ 'after repair')),
+ ('', 'filter', '',
+ _('filter changesets through command'), _('CMD'))],
+ _('hg transplant [-s REPO] [-b BRANCH [-a]] [-p REV] '
+ '[-m REV] [REV]...'))
+}
+
+# tell hggettext to extract docstrings from these functions:
+i18nfunctions = [revsettransplanted]
diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/transplant.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/transplant.pyo
new file mode 100644
index 0000000..a41b8f0
--- /dev/null
+++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/transplant.pyo
Binary files differ
diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/win32mbcs.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/win32mbcs.py
new file mode 100644
index 0000000..f83c43e
--- /dev/null
+++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/win32mbcs.py
@@ -0,0 +1,159 @@
+# win32mbcs.py -- MBCS filename support for Mercurial
+#
+# Copyright (c) 2008 Shun-ichi Goto <shunichi.goto@gmail.com>
+#
+# Version: 0.3
+# Author: Shun-ichi Goto <shunichi.goto@gmail.com>
+#
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2 or any later version.
+#
+
+'''allow the use of MBCS paths with problematic encodings
+
+Some MBCS encodings are not good for some path operations (i.e.
+splitting path, case conversion, etc.) with its encoded bytes. We call
+such a encoding (i.e. shift_jis and big5) as "problematic encoding".
+This extension can be used to fix the issue with those encodings by
+wrapping some functions to convert to Unicode string before path
+operation.
+
+This extension is useful for:
+
+- Japanese Windows users using shift_jis encoding.
+- Chinese Windows users using big5 encoding.
+- All users who use a repository with one of problematic encodings on
+ case-insensitive file system.
+
+This extension is not needed for:
+
+- Any user who use only ASCII chars in path.
+- Any user who do not use any of problematic encodings.
+
+Note that there are some limitations on using this extension:
+
+- You should use single encoding in one repository.
+- If the repository path ends with 0x5c, .hg/hgrc cannot be read.
+- win32mbcs is not compatible with fixutf8 extention.
+
+By default, win32mbcs uses encoding.encoding decided by Mercurial.
+You can specify the encoding by config option::
+
+ [win32mbcs]
+ encoding = sjis
+
+It is useful for the users who want to commit with UTF-8 log message.
+'''
+
+import os, sys
+from mercurial.i18n import _
+from mercurial import util, encoding
+
+_encoding = None # see extsetup
+
+def decode(arg):
+ if isinstance(arg, str):
+ uarg = arg.decode(_encoding)
+ if arg == uarg.encode(_encoding):
+ return uarg
+ raise UnicodeError("Not local encoding")
+ elif isinstance(arg, tuple):
+ return tuple(map(decode, arg))
+ elif isinstance(arg, list):
+ return map(decode, arg)
+ elif isinstance(arg, dict):
+ for k, v in arg.items():
+ arg[k] = decode(v)
+ return arg
+
+def encode(arg):
+ if isinstance(arg, unicode):
+ return arg.encode(_encoding)
+ elif isinstance(arg, tuple):
+ return tuple(map(encode, arg))
+ elif isinstance(arg, list):
+ return map(encode, arg)
+ elif isinstance(arg, dict):
+ for k, v in arg.items():
+ arg[k] = encode(v)
+ return arg
+
+def appendsep(s):
+ # ensure the path ends with os.sep, appending it if necessary.
+ try:
+ us = decode(s)
+ except UnicodeError:
+ us = s
+ if us and us[-1] not in ':/\\':
+ s += os.sep
+ return s
+
+def wrapper(func, args, kwds):
+ # check argument is unicode, then call original
+ for arg in args:
+ if isinstance(arg, unicode):
+ return func(*args, **kwds)
+
+ try:
+ # convert arguments to unicode, call func, then convert back
+ return encode(func(*decode(args), **decode(kwds)))
+ except UnicodeError:
+ raise util.Abort(_("[win32mbcs] filename conversion failed with"
+ " %s encoding\n") % (_encoding))
+
+def wrapperforlistdir(func, args, kwds):
+ # Ensure 'path' argument ends with os.sep to avoids
+ # misinterpreting last 0x5c of MBCS 2nd byte as path separator.
+ if args:
+ args = list(args)
+ args[0] = appendsep(args[0])
+ if 'path' in kwds:
+ kwds['path'] = appendsep(kwds['path'])
+ return func(*args, **kwds)
+
+def wrapname(name, wrapper):
+ module, name = name.rsplit('.', 1)
+ module = sys.modules[module]
+ func = getattr(module, name)
+ def f(*args, **kwds):
+ return wrapper(func, args, kwds)
+ try:
+ f.__name__ = func.__name__ # fail with python23
+ except Exception:
+ pass
+ setattr(module, name, f)
+
+# List of functions to be wrapped.
+# NOTE: os.path.dirname() and os.path.basename() are safe because
+# they use result of os.path.split()
+funcs = '''os.path.join os.path.split os.path.splitext
+ os.path.splitunc os.path.normpath os.path.normcase os.makedirs
+ mercurial.util.endswithsep mercurial.util.splitpath mercurial.util.checkcase
+ mercurial.util.fspath mercurial.util.pconvert mercurial.util.normpath'''
+
+# codec and alias names of sjis and big5 to be faked.
+problematic_encodings = '''big5 big5-tw csbig5 big5hkscs big5-hkscs
+ hkscs cp932 932 ms932 mskanji ms-kanji shift_jis csshiftjis shiftjis
+ sjis s_jis shift_jis_2004 shiftjis2004 sjis_2004 sjis2004
+ shift_jisx0213 shiftjisx0213 sjisx0213 s_jisx0213 950 cp950 ms950 '''
+
+def extsetup(ui):
+ # TODO: decide use of config section for this extension
+ if not os.path.supports_unicode_filenames:
+ ui.warn(_("[win32mbcs] cannot activate on this platform.\n"))
+ return
+ # determine encoding for filename
+ global _encoding
+ _encoding = ui.config('win32mbcs', 'encoding', encoding.encoding)
+ # fake is only for relevant environment.
+ if _encoding.lower() in problematic_encodings.split():
+ for f in funcs.split():
+ wrapname(f, wrapper)
+ wrapname("mercurial.osutil.listdir", wrapperforlistdir)
+ # Check sys.args manually instead of using ui.debug() because
+ # command line options is not yet applied when
+ # extensions.loadall() is called.
+ if '--debug' in sys.argv:
+ ui.write("[win32mbcs] activated with encoding: %s\n"
+ % _encoding)
+
diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/win32mbcs.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/win32mbcs.pyo
new file mode 100644
index 0000000..43fac41
--- /dev/null
+++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/win32mbcs.pyo
Binary files differ
diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/win32text.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/win32text.py
new file mode 100644
index 0000000..82e6aed
--- /dev/null
+++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/win32text.py
@@ -0,0 +1,170 @@
+# win32text.py - LF <-> CRLF/CR translation utilities for Windows/Mac users
+#
+# Copyright 2005, 2007-2009 Matt Mackall <mpm@selenic.com> and others
+#
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2 or any later version.
+
+'''perform automatic newline conversion
+
+ Deprecation: The win32text extension requires each user to configure
+ the extension again and again for each clone since the configuration
+ is not copied when cloning.
+
+ We have therefore made the ``eol`` as an alternative. The ``eol``
+ uses a version controlled file for its configuration and each clone
+ will therefore use the right settings from the start.
+
+To perform automatic newline conversion, use::
+
+ [extensions]
+ win32text =
+ [encode]
+ ** = cleverencode:
+ # or ** = macencode:
+
+ [decode]
+ ** = cleverdecode:
+ # or ** = macdecode:
+
+If not doing conversion, to make sure you do not commit CRLF/CR by accident::
+
+ [hooks]
+ pretxncommit.crlf = python:hgext.win32text.forbidcrlf
+ # or pretxncommit.cr = python:hgext.win32text.forbidcr
+
+To do the same check on a server to prevent CRLF/CR from being
+pushed or pulled::
+
+ [hooks]
+ pretxnchangegroup.crlf = python:hgext.win32text.forbidcrlf
+ # or pretxnchangegroup.cr = python:hgext.win32text.forbidcr
+'''
+
+from mercurial.i18n import _
+from mercurial.node import short
+from mercurial import util
+import re
+
+# regexp for single LF without CR preceding.
+re_single_lf = re.compile('(^|[^\r])\n', re.MULTILINE)
+
+newlinestr = {'\r\n': 'CRLF', '\r': 'CR'}
+filterstr = {'\r\n': 'clever', '\r': 'mac'}
+
+def checknewline(s, newline, ui=None, repo=None, filename=None):
+ # warn if already has 'newline' in repository.
+ # it might cause unexpected eol conversion.
+ # see issue 302:
+ # http://mercurial.selenic.com/bts/issue302
+ if newline in s and ui and filename and repo:
+ ui.warn(_('WARNING: %s already has %s line endings\n'
+ 'and does not need EOL conversion by the win32text plugin.\n'
+ 'Before your next commit, please reconsider your '
+ 'encode/decode settings in \nMercurial.ini or %s.\n') %
+ (filename, newlinestr[newline], repo.join('hgrc')))
+
+def dumbdecode(s, cmd, **kwargs):
+ checknewline(s, '\r\n', **kwargs)
+ # replace single LF to CRLF
+ return re_single_lf.sub('\\1\r\n', s)
+
+def dumbencode(s, cmd):
+ return s.replace('\r\n', '\n')
+
+def macdumbdecode(s, cmd, **kwargs):
+ checknewline(s, '\r', **kwargs)
+ return s.replace('\n', '\r')
+
+def macdumbencode(s, cmd):
+ return s.replace('\r', '\n')
+
+def cleverdecode(s, cmd, **kwargs):
+ if not util.binary(s):
+ return dumbdecode(s, cmd, **kwargs)
+ return s
+
+def cleverencode(s, cmd):
+ if not util.binary(s):
+ return dumbencode(s, cmd)
+ return s
+
+def macdecode(s, cmd, **kwargs):
+ if not util.binary(s):
+ return macdumbdecode(s, cmd, **kwargs)
+ return s
+
+def macencode(s, cmd):
+ if not util.binary(s):
+ return macdumbencode(s, cmd)
+ return s
+
+_filters = {
+ 'dumbdecode:': dumbdecode,
+ 'dumbencode:': dumbencode,
+ 'cleverdecode:': cleverdecode,
+ 'cleverencode:': cleverencode,
+ 'macdumbdecode:': macdumbdecode,
+ 'macdumbencode:': macdumbencode,
+ 'macdecode:': macdecode,
+ 'macencode:': macencode,
+ }
+
+def forbidnewline(ui, repo, hooktype, node, newline, **kwargs):
+ halt = False
+ seen = set()
+ # we try to walk changesets in reverse order from newest to
+ # oldest, so that if we see a file multiple times, we take the
+ # newest version as canonical. this prevents us from blocking a
+ # changegroup that contains an unacceptable commit followed later
+ # by a commit that fixes the problem.
+ tip = repo['tip']
+ for rev in xrange(len(repo)-1, repo[node].rev()-1, -1):
+ c = repo[rev]
+ for f in c.files():
+ if f in seen or f not in tip or f not in c:
+ continue
+ seen.add(f)
+ data = c[f].data()
+ if not util.binary(data) and newline in data:
+ if not halt:
+ ui.warn(_('Attempt to commit or push text file(s) '
+ 'using %s line endings\n') %
+ newlinestr[newline])
+ ui.warn(_('in %s: %s\n') % (short(c.node()), f))
+ halt = True
+ if halt and hooktype == 'pretxnchangegroup':
+ crlf = newlinestr[newline].lower()
+ filter = filterstr[newline]
+ ui.warn(_('\nTo prevent this mistake in your local repository,\n'
+ 'add to Mercurial.ini or .hg/hgrc:\n'
+ '\n'
+ '[hooks]\n'
+ 'pretxncommit.%s = python:hgext.win32text.forbid%s\n'
+ '\n'
+ 'and also consider adding:\n'
+ '\n'
+ '[extensions]\n'
+ 'win32text =\n'
+ '[encode]\n'
+ '** = %sencode:\n'
+ '[decode]\n'
+ '** = %sdecode:\n') % (crlf, crlf, filter, filter))
+ return halt
+
+def forbidcrlf(ui, repo, hooktype, node, **kwargs):
+ return forbidnewline(ui, repo, hooktype, node, '\r\n', **kwargs)
+
+def forbidcr(ui, repo, hooktype, node, **kwargs):
+ return forbidnewline(ui, repo, hooktype, node, '\r', **kwargs)
+
+def reposetup(ui, repo):
+ if not repo.local():
+ return
+ for name, fn in _filters.iteritems():
+ repo.adddatafilter(name, fn)
+
+def extsetup(ui):
+ if ui.configbool('win32text', 'warn', True):
+ ui.warn(_("win32text is deprecated: "
+ "http://mercurial.selenic.com/wiki/Win32TextExtension\n"))
diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/win32text.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/win32text.pyo
new file mode 100644
index 0000000..fe9072d
--- /dev/null
+++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/win32text.pyo
Binary files differ
diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/zeroconf/Zeroconf.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/zeroconf/Zeroconf.py
new file mode 100644
index 0000000..acca5c2
--- /dev/null
+++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/zeroconf/Zeroconf.py
@@ -0,0 +1,1582 @@
+""" Multicast DNS Service Discovery for Python, v0.12
+ Copyright (C) 2003, Paul Scott-Murphy
+
+ This module provides a framework for the use of DNS Service Discovery
+ using IP multicast. It has been tested against the JRendezvous
+ implementation from <a href="http://strangeberry.com">StrangeBerry</a>,
+ and against the mDNSResponder from Mac OS X 10.3.8.
+
+ This library is free software; you can redistribute it and/or
+ modify it under the terms of the GNU Lesser General Public
+ License as published by the Free Software Foundation; either
+ version 2.1 of the License, or (at your option) any later version.
+
+ This library is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+ Lesser General Public License for more details.
+
+ You should have received a copy of the GNU Lesser General Public
+ License along with this library; if not, write to the Free Software
+ Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+
+"""
+
+"""0.12 update - allow selection of binding interface
+ typo fix - Thanks A. M. Kuchlingi
+ removed all use of word 'Rendezvous' - this is an API change"""
+
+"""0.11 update - correction to comments for addListener method
+ support for new record types seen from OS X
+ - IPv6 address
+ - hostinfo
+ ignore unknown DNS record types
+ fixes to name decoding
+ works alongside other processes using port 5353 (e.g. on Mac OS X)
+ tested against Mac OS X 10.3.2's mDNSResponder
+ corrections to removal of list entries for service browser"""
+
+"""0.10 update - Jonathon Paisley contributed these corrections:
+ always multicast replies, even when query is unicast
+ correct a pointer encoding problem
+ can now write records in any order
+ traceback shown on failure
+ better TXT record parsing
+ server is now separate from name
+ can cancel a service browser
+
+ modified some unit tests to accommodate these changes"""
+
+"""0.09 update - remove all records on service unregistration
+ fix DOS security problem with readName"""
+
+"""0.08 update - changed licensing to LGPL"""
+
+"""0.07 update - faster shutdown on engine
+ pointer encoding of outgoing names
+ ServiceBrowser now works
+ new unit tests"""
+
+"""0.06 update - small improvements with unit tests
+ added defined exception types
+ new style objects
+ fixed hostname/interface problem
+ fixed socket timeout problem
+ fixed addServiceListener() typo bug
+ using select() for socket reads
+ tested on Debian unstable with Python 2.2.2"""
+
+"""0.05 update - ensure case insensitivty on domain names
+ support for unicast DNS queries"""
+
+"""0.04 update - added some unit tests
+ added __ne__ adjuncts where required
+ ensure names end in '.local.'
+ timeout on receiving socket for clean shutdown"""
+
+__author__ = "Paul Scott-Murphy"
+__email__ = "paul at scott dash murphy dot com"
+__version__ = "0.12"
+
+import string
+import time
+import struct
+import socket
+import threading
+import select
+import traceback
+
+__all__ = ["Zeroconf", "ServiceInfo", "ServiceBrowser"]
+
+# hook for threads
+
+globals()['_GLOBAL_DONE'] = 0
+
+# Some timing constants
+
+_UNREGISTER_TIME = 125
+_CHECK_TIME = 175
+_REGISTER_TIME = 225
+_LISTENER_TIME = 200
+_BROWSER_TIME = 500
+
+# Some DNS constants
+
+_MDNS_ADDR = '224.0.0.251'
+_MDNS_PORT = 5353;
+_DNS_PORT = 53;
+_DNS_TTL = 60 * 60; # one hour default TTL
+
+_MAX_MSG_TYPICAL = 1460 # unused
+_MAX_MSG_ABSOLUTE = 8972
+
+_FLAGS_QR_MASK = 0x8000 # query response mask
+_FLAGS_QR_QUERY = 0x0000 # query
+_FLAGS_QR_RESPONSE = 0x8000 # response
+
+_FLAGS_AA = 0x0400 # Authorative answer
+_FLAGS_TC = 0x0200 # Truncated
+_FLAGS_RD = 0x0100 # Recursion desired
+_FLAGS_RA = 0x8000 # Recursion available
+
+_FLAGS_Z = 0x0040 # Zero
+_FLAGS_AD = 0x0020 # Authentic data
+_FLAGS_CD = 0x0010 # Checking disabled
+
+_CLASS_IN = 1
+_CLASS_CS = 2
+_CLASS_CH = 3
+_CLASS_HS = 4
+_CLASS_NONE = 254
+_CLASS_ANY = 255
+_CLASS_MASK = 0x7FFF
+_CLASS_UNIQUE = 0x8000
+
+_TYPE_A = 1
+_TYPE_NS = 2
+_TYPE_MD = 3
+_TYPE_MF = 4
+_TYPE_CNAME = 5
+_TYPE_SOA = 6
+_TYPE_MB = 7
+_TYPE_MG = 8
+_TYPE_MR = 9
+_TYPE_NULL = 10
+_TYPE_WKS = 11
+_TYPE_PTR = 12
+_TYPE_HINFO = 13
+_TYPE_MINFO = 14
+_TYPE_MX = 15
+_TYPE_TXT = 16
+_TYPE_AAAA = 28
+_TYPE_SRV = 33
+_TYPE_ANY = 255
+
+# Mapping constants to names
+
+_CLASSES = { _CLASS_IN : "in",
+ _CLASS_CS : "cs",
+ _CLASS_CH : "ch",
+ _CLASS_HS : "hs",
+ _CLASS_NONE : "none",
+ _CLASS_ANY : "any" }
+
+_TYPES = { _TYPE_A : "a",
+ _TYPE_NS : "ns",
+ _TYPE_MD : "md",
+ _TYPE_MF : "mf",
+ _TYPE_CNAME : "cname",
+ _TYPE_SOA : "soa",
+ _TYPE_MB : "mb",
+ _TYPE_MG : "mg",
+ _TYPE_MR : "mr",
+ _TYPE_NULL : "null",
+ _TYPE_WKS : "wks",
+ _TYPE_PTR : "ptr",
+ _TYPE_HINFO : "hinfo",
+ _TYPE_MINFO : "minfo",
+ _TYPE_MX : "mx",
+ _TYPE_TXT : "txt",
+ _TYPE_AAAA : "quada",
+ _TYPE_SRV : "srv",
+ _TYPE_ANY : "any" }
+
+# utility functions
+
+def currentTimeMillis():
+ """Current system time in milliseconds"""
+ return time.time() * 1000
+
+# Exceptions
+
+class NonLocalNameException(Exception):
+ pass
+
+class NonUniqueNameException(Exception):
+ pass
+
+class NamePartTooLongException(Exception):
+ pass
+
+class AbstractMethodException(Exception):
+ pass
+
+class BadTypeInNameException(Exception):
+ pass
+
+class BadDomainName(Exception):
+ def __init__(self, pos):
+ Exception.__init__(self, "at position %s" % pos)
+
+class BadDomainNameCircular(BadDomainName):
+ pass
+
+# implementation classes
+
+class DNSEntry(object):
+ """A DNS entry"""
+
+ def __init__(self, name, type, clazz):
+ self.key = string.lower(name)
+ self.name = name
+ self.type = type
+ self.clazz = clazz & _CLASS_MASK
+ self.unique = (clazz & _CLASS_UNIQUE) != 0
+
+ def __eq__(self, other):
+ """Equality test on name, type, and class"""
+ if isinstance(other, DNSEntry):
+ return self.name == other.name and self.type == other.type and self.clazz == other.clazz
+ return 0
+
+ def __ne__(self, other):
+ """Non-equality test"""
+ return not self.__eq__(other)
+
+ def getClazz(self, clazz):
+ """Class accessor"""
+ try:
+ return _CLASSES[clazz]
+ except:
+ return "?(%s)" % (clazz)
+
+ def getType(self, type):
+ """Type accessor"""
+ try:
+ return _TYPES[type]
+ except:
+ return "?(%s)" % (type)
+
+ def toString(self, hdr, other):
+ """String representation with additional information"""
+ result = "%s[%s,%s" % (hdr, self.getType(self.type), self.getClazz(self.clazz))
+ if self.unique:
+ result += "-unique,"
+ else:
+ result += ","
+ result += self.name
+ if other is not None:
+ result += ",%s]" % (other)
+ else:
+ result += "]"
+ return result
+
+class DNSQuestion(DNSEntry):
+ """A DNS question entry"""
+
+ def __init__(self, name, type, clazz):
+ if not name.endswith(".local."):
+ raise NonLocalNameException(name)
+ DNSEntry.__init__(self, name, type, clazz)
+
+ def answeredBy(self, rec):
+ """Returns true if the question is answered by the record"""
+ return self.clazz == rec.clazz and (self.type == rec.type or self.type == _TYPE_ANY) and self.name == rec.name
+
+ def __repr__(self):
+ """String representation"""
+ return DNSEntry.toString(self, "question", None)
+
+
+class DNSRecord(DNSEntry):
+ """A DNS record - like a DNS entry, but has a TTL"""
+
+ def __init__(self, name, type, clazz, ttl):
+ DNSEntry.__init__(self, name, type, clazz)
+ self.ttl = ttl
+ self.created = currentTimeMillis()
+
+ def __eq__(self, other):
+ """Tests equality as per DNSRecord"""
+ if isinstance(other, DNSRecord):
+ return DNSEntry.__eq__(self, other)
+ return 0
+
+ def suppressedBy(self, msg):
+ """Returns true if any answer in a message can suffice for the
+ information held in this record."""
+ for record in msg.answers:
+ if self.suppressedByAnswer(record):
+ return 1
+ return 0
+
+ def suppressedByAnswer(self, other):
+ """Returns true if another record has same name, type and class,
+ and if its TTL is at least half of this record's."""
+ if self == other and other.ttl > (self.ttl / 2):
+ return 1
+ return 0
+
+ def getExpirationTime(self, percent):
+ """Returns the time at which this record will have expired
+ by a certain percentage."""
+ return self.created + (percent * self.ttl * 10)
+
+ def getRemainingTTL(self, now):
+ """Returns the remaining TTL in seconds."""
+ return max(0, (self.getExpirationTime(100) - now) / 1000)
+
+ def isExpired(self, now):
+ """Returns true if this record has expired."""
+ return self.getExpirationTime(100) <= now
+
+ def isStale(self, now):
+ """Returns true if this record is at least half way expired."""
+ return self.getExpirationTime(50) <= now
+
+ def resetTTL(self, other):
+ """Sets this record's TTL and created time to that of
+ another record."""
+ self.created = other.created
+ self.ttl = other.ttl
+
+ def write(self, out):
+ """Abstract method"""
+ raise AbstractMethodException
+
+ def toString(self, other):
+ """String representation with addtional information"""
+ arg = "%s/%s,%s" % (self.ttl, self.getRemainingTTL(currentTimeMillis()), other)
+ return DNSEntry.toString(self, "record", arg)
+
+class DNSAddress(DNSRecord):
+ """A DNS address record"""
+
+ def __init__(self, name, type, clazz, ttl, address):
+ DNSRecord.__init__(self, name, type, clazz, ttl)
+ self.address = address
+
+ def write(self, out):
+ """Used in constructing an outgoing packet"""
+ out.writeString(self.address, len(self.address))
+
+ def __eq__(self, other):
+ """Tests equality on address"""
+ if isinstance(other, DNSAddress):
+ return self.address == other.address
+ return 0
+
+ def __repr__(self):
+ """String representation"""
+ try:
+ return socket.inet_ntoa(self.address)
+ except:
+ return self.address
+
+class DNSHinfo(DNSRecord):
+ """A DNS host information record"""
+
+ def __init__(self, name, type, clazz, ttl, cpu, os):
+ DNSRecord.__init__(self, name, type, clazz, ttl)
+ self.cpu = cpu
+ self.os = os
+
+ def write(self, out):
+ """Used in constructing an outgoing packet"""
+ out.writeString(self.cpu, len(self.cpu))
+ out.writeString(self.os, len(self.os))
+
+ def __eq__(self, other):
+ """Tests equality on cpu and os"""
+ if isinstance(other, DNSHinfo):
+ return self.cpu == other.cpu and self.os == other.os
+ return 0
+
+ def __repr__(self):
+ """String representation"""
+ return self.cpu + " " + self.os
+
+class DNSPointer(DNSRecord):
+ """A DNS pointer record"""
+
+ def __init__(self, name, type, clazz, ttl, alias):
+ DNSRecord.__init__(self, name, type, clazz, ttl)
+ self.alias = alias
+
+ def write(self, out):
+ """Used in constructing an outgoing packet"""
+ out.writeName(self.alias)
+
+ def __eq__(self, other):
+ """Tests equality on alias"""
+ if isinstance(other, DNSPointer):
+ return self.alias == other.alias
+ return 0
+
+ def __repr__(self):
+ """String representation"""
+ return self.toString(self.alias)
+
+class DNSText(DNSRecord):
+ """A DNS text record"""
+
+ def __init__(self, name, type, clazz, ttl, text):
+ DNSRecord.__init__(self, name, type, clazz, ttl)
+ self.text = text
+
+ def write(self, out):
+ """Used in constructing an outgoing packet"""
+ out.writeString(self.text, len(self.text))
+
+ def __eq__(self, other):
+ """Tests equality on text"""
+ if isinstance(other, DNSText):
+ return self.text == other.text
+ return 0
+
+ def __repr__(self):
+ """String representation"""
+ if len(self.text) > 10:
+ return self.toString(self.text[:7] + "...")
+ else:
+ return self.toString(self.text)
+
+class DNSService(DNSRecord):
+ """A DNS service record"""
+
+ def __init__(self, name, type, clazz, ttl, priority, weight, port, server):
+ DNSRecord.__init__(self, name, type, clazz, ttl)
+ self.priority = priority
+ self.weight = weight
+ self.port = port
+ self.server = server
+
+ def write(self, out):
+ """Used in constructing an outgoing packet"""
+ out.writeShort(self.priority)
+ out.writeShort(self.weight)
+ out.writeShort(self.port)
+ out.writeName(self.server)
+
+ def __eq__(self, other):
+ """Tests equality on priority, weight, port and server"""
+ if isinstance(other, DNSService):
+ return self.priority == other.priority and self.weight == other.weight and self.port == other.port and self.server == other.server
+ return 0
+
+ def __repr__(self):
+ """String representation"""
+ return self.toString("%s:%s" % (self.server, self.port))
+
+class DNSIncoming(object):
+ """Object representation of an incoming DNS packet"""
+
+ def __init__(self, data):
+ """Constructor from string holding bytes of packet"""
+ self.offset = 0
+ self.data = data
+ self.questions = []
+ self.answers = []
+ self.numQuestions = 0
+ self.numAnswers = 0
+ self.numAuthorities = 0
+ self.numAdditionals = 0
+
+ self.readHeader()
+ self.readQuestions()
+ self.readOthers()
+
+ def readHeader(self):
+ """Reads header portion of packet"""
+ format = '!HHHHHH'
+ length = struct.calcsize(format)
+ info = struct.unpack(format, self.data[self.offset:self.offset+length])
+ self.offset += length
+
+ self.id = info[0]
+ self.flags = info[1]
+ self.numQuestions = info[2]
+ self.numAnswers = info[3]
+ self.numAuthorities = info[4]
+ self.numAdditionals = info[5]
+
+ def readQuestions(self):
+ """Reads questions section of packet"""
+ format = '!HH'
+ length = struct.calcsize(format)
+ for i in range(0, self.numQuestions):
+ name = self.readName()
+ info = struct.unpack(format, self.data[self.offset:self.offset+length])
+ self.offset += length
+
+ try:
+ question = DNSQuestion(name, info[0], info[1])
+ self.questions.append(question)
+ except NonLocalNameException:
+ pass
+
+ def readInt(self):
+ """Reads an integer from the packet"""
+ format = '!I'
+ length = struct.calcsize(format)
+ info = struct.unpack(format, self.data[self.offset:self.offset+length])
+ self.offset += length
+ return info[0]
+
+ def readCharacterString(self):
+ """Reads a character string from the packet"""
+ length = ord(self.data[self.offset])
+ self.offset += 1
+ return self.readString(length)
+
+ def readString(self, len):
+ """Reads a string of a given length from the packet"""
+ format = '!' + str(len) + 's'
+ length = struct.calcsize(format)
+ info = struct.unpack(format, self.data[self.offset:self.offset+length])
+ self.offset += length
+ return info[0]
+
+ def readUnsignedShort(self):
+ """Reads an unsigned short from the packet"""
+ format = '!H'
+ length = struct.calcsize(format)
+ info = struct.unpack(format, self.data[self.offset:self.offset+length])
+ self.offset += length
+ return info[0]
+
+ def readOthers(self):
+ """Reads the answers, authorities and additionals section of the packet"""
+ format = '!HHiH'
+ length = struct.calcsize(format)
+ n = self.numAnswers + self.numAuthorities + self.numAdditionals
+ for i in range(0, n):
+ domain = self.readName()
+ info = struct.unpack(format, self.data[self.offset:self.offset+length])
+ self.offset += length
+
+ rec = None
+ if info[0] == _TYPE_A:
+ rec = DNSAddress(domain, info[0], info[1], info[2], self.readString(4))
+ elif info[0] == _TYPE_CNAME or info[0] == _TYPE_PTR:
+ rec = DNSPointer(domain, info[0], info[1], info[2], self.readName())
+ elif info[0] == _TYPE_TXT:
+ rec = DNSText(domain, info[0], info[1], info[2], self.readString(info[3]))
+ elif info[0] == _TYPE_SRV:
+ rec = DNSService(domain, info[0], info[1], info[2], self.readUnsignedShort(), self.readUnsignedShort(), self.readUnsignedShort(), self.readName())
+ elif info[0] == _TYPE_HINFO:
+ rec = DNSHinfo(domain, info[0], info[1], info[2], self.readCharacterString(), self.readCharacterString())
+ elif info[0] == _TYPE_AAAA:
+ rec = DNSAddress(domain, info[0], info[1], info[2], self.readString(16))
+ else:
+ # Try to ignore types we don't know about
+ # this may mean the rest of the name is
+ # unable to be parsed, and may show errors
+ # so this is left for debugging. New types
+ # encountered need to be parsed properly.
+ #
+ #print "UNKNOWN TYPE = " + str(info[0])
+ #raise BadTypeInNameException
+ self.offset += info[3]
+
+ if rec is not None:
+ self.answers.append(rec)
+
+ def isQuery(self):
+ """Returns true if this is a query"""
+ return (self.flags & _FLAGS_QR_MASK) == _FLAGS_QR_QUERY
+
+ def isResponse(self):
+ """Returns true if this is a response"""
+ return (self.flags & _FLAGS_QR_MASK) == _FLAGS_QR_RESPONSE
+
+ def readUTF(self, offset, len):
+ """Reads a UTF-8 string of a given length from the packet"""
+ return self.data[offset:offset+len].decode('utf-8')
+
+ def readName(self):
+ """Reads a domain name from the packet"""
+ result = ''
+ off = self.offset
+ next = -1
+ first = off
+
+ while 1:
+ len = ord(self.data[off])
+ off += 1
+ if len == 0:
+ break
+ t = len & 0xC0
+ if t == 0x00:
+ result = ''.join((result, self.readUTF(off, len) + '.'))
+ off += len
+ elif t == 0xC0:
+ if next < 0:
+ next = off + 1
+ off = ((len & 0x3F) << 8) | ord(self.data[off])
+ if off >= first:
+ raise BadDomainNameCircular(off)
+ first = off
+ else:
+ raise BadDomainName(off)
+
+ if next >= 0:
+ self.offset = next
+ else:
+ self.offset = off
+
+ return result
+
+
+class DNSOutgoing(object):
+ """Object representation of an outgoing packet"""
+
+ def __init__(self, flags, multicast = 1):
+ self.finished = 0
+ self.id = 0
+ self.multicast = multicast
+ self.flags = flags
+ self.names = {}
+ self.data = []
+ self.size = 12
+
+ self.questions = []
+ self.answers = []
+ self.authorities = []
+ self.additionals = []
+
+ def addQuestion(self, record):
+ """Adds a question"""
+ self.questions.append(record)
+
+ def addAnswer(self, inp, record):
+ """Adds an answer"""
+ if not record.suppressedBy(inp):
+ self.addAnswerAtTime(record, 0)
+
+ def addAnswerAtTime(self, record, now):
+ """Adds an answer if if does not expire by a certain time"""
+ if record is not None:
+ if now == 0 or not record.isExpired(now):
+ self.answers.append((record, now))
+
+ def addAuthorativeAnswer(self, record):
+ """Adds an authoritative answer"""
+ self.authorities.append(record)
+
+ def addAdditionalAnswer(self, record):
+ """Adds an additional answer"""
+ self.additionals.append(record)
+
+ def writeByte(self, value):
+ """Writes a single byte to the packet"""
+ format = '!c'
+ self.data.append(struct.pack(format, chr(value)))
+ self.size += 1
+
+ def insertShort(self, index, value):
+ """Inserts an unsigned short in a certain position in the packet"""
+ format = '!H'
+ self.data.insert(index, struct.pack(format, value))
+ self.size += 2
+
+ def writeShort(self, value):
+ """Writes an unsigned short to the packet"""
+ format = '!H'
+ self.data.append(struct.pack(format, value))
+ self.size += 2
+
+ def writeInt(self, value):
+ """Writes an unsigned integer to the packet"""
+ format = '!I'
+ self.data.append(struct.pack(format, int(value)))
+ self.size += 4
+
+ def writeString(self, value, length):
+ """Writes a string to the packet"""
+ format = '!' + str(length) + 's'
+ self.data.append(struct.pack(format, value))
+ self.size += length
+
+ def writeUTF(self, s):
+ """Writes a UTF-8 string of a given length to the packet"""
+ utfstr = s.encode('utf-8')
+ length = len(utfstr)
+ if length > 64:
+ raise NamePartTooLongException
+ self.writeByte(length)
+ self.writeString(utfstr, length)
+
+ def writeName(self, name):
+ """Writes a domain name to the packet"""
+
+ try:
+ # Find existing instance of this name in packet
+ #
+ index = self.names[name]
+ except KeyError:
+ # No record of this name already, so write it
+ # out as normal, recording the location of the name
+ # for future pointers to it.
+ #
+ self.names[name] = self.size
+ parts = name.split('.')
+ if parts[-1] == '':
+ parts = parts[:-1]
+ for part in parts:
+ self.writeUTF(part)
+ self.writeByte(0)
+ return
+
+ # An index was found, so write a pointer to it
+ #
+ self.writeByte((index >> 8) | 0xC0)
+ self.writeByte(index)
+
+ def writeQuestion(self, question):
+ """Writes a question to the packet"""
+ self.writeName(question.name)
+ self.writeShort(question.type)
+ self.writeShort(question.clazz)
+
+ def writeRecord(self, record, now):
+ """Writes a record (answer, authoritative answer, additional) to
+ the packet"""
+ self.writeName(record.name)
+ self.writeShort(record.type)
+ if record.unique and self.multicast:
+ self.writeShort(record.clazz | _CLASS_UNIQUE)
+ else:
+ self.writeShort(record.clazz)
+ if now == 0:
+ self.writeInt(record.ttl)
+ else:
+ self.writeInt(record.getRemainingTTL(now))
+ index = len(self.data)
+ # Adjust size for the short we will write before this record
+ #
+ self.size += 2
+ record.write(self)
+ self.size -= 2
+
+ length = len(''.join(self.data[index:]))
+ self.insertShort(index, length) # Here is the short we adjusted for
+
+ def packet(self):
+ """Returns a string containing the packet's bytes
+
+ No further parts should be added to the packet once this
+ is done."""
+ if not self.finished:
+ self.finished = 1
+ for question in self.questions:
+ self.writeQuestion(question)
+ for answer, time in self.answers:
+ self.writeRecord(answer, time)
+ for authority in self.authorities:
+ self.writeRecord(authority, 0)
+ for additional in self.additionals:
+ self.writeRecord(additional, 0)
+
+ self.insertShort(0, len(self.additionals))
+ self.insertShort(0, len(self.authorities))
+ self.insertShort(0, len(self.answers))
+ self.insertShort(0, len(self.questions))
+ self.insertShort(0, self.flags)
+ if self.multicast:
+ self.insertShort(0, 0)
+ else:
+ self.insertShort(0, self.id)
+ return ''.join(self.data)
+
+
+class DNSCache(object):
+ """A cache of DNS entries"""
+
+ def __init__(self):
+ self.cache = {}
+
+ def add(self, entry):
+ """Adds an entry"""
+ try:
+ list = self.cache[entry.key]
+ except:
+ list = self.cache[entry.key] = []
+ list.append(entry)
+
+ def remove(self, entry):
+ """Removes an entry"""
+ try:
+ list = self.cache[entry.key]
+ list.remove(entry)
+ except:
+ pass
+
+ def get(self, entry):
+ """Gets an entry by key. Will return None if there is no
+ matching entry."""
+ try:
+ list = self.cache[entry.key]
+ return list[list.index(entry)]
+ except:
+ return None
+
+ def getByDetails(self, name, type, clazz):
+ """Gets an entry by details. Will return None if there is
+ no matching entry."""
+ entry = DNSEntry(name, type, clazz)
+ return self.get(entry)
+
+ def entriesWithName(self, name):
+ """Returns a list of entries whose key matches the name."""
+ try:
+ return self.cache[name]
+ except:
+ return []
+
+ def entries(self):
+ """Returns a list of all entries"""
+ def add(x, y): return x+y
+ try:
+ return reduce(add, self.cache.values())
+ except:
+ return []
+
+
+class Engine(threading.Thread):
+ """An engine wraps read access to sockets, allowing objects that
+ need to receive data from sockets to be called back when the
+ sockets are ready.
+
+ A reader needs a handle_read() method, which is called when the socket
+ it is interested in is ready for reading.
+
+ Writers are not implemented here, because we only send short
+ packets.
+ """
+
+ def __init__(self, zeroconf):
+ threading.Thread.__init__(self)
+ self.zeroconf = zeroconf
+ self.readers = {} # maps socket to reader
+ self.timeout = 5
+ self.condition = threading.Condition()
+ self.start()
+
+ def run(self):
+ while not globals()['_GLOBAL_DONE']:
+ rs = self.getReaders()
+ if len(rs) == 0:
+ # No sockets to manage, but we wait for the timeout
+ # or addition of a socket
+ #
+ self.condition.acquire()
+ self.condition.wait(self.timeout)
+ self.condition.release()
+ else:
+ try:
+ rr, wr, er = select.select(rs, [], [], self.timeout)
+ for socket in rr:
+ try:
+ self.readers[socket].handle_read()
+ except:
+ if not globals()['_GLOBAL_DONE']:
+ traceback.print_exc()
+ except:
+ pass
+
+ def getReaders(self):
+ self.condition.acquire()
+ result = self.readers.keys()
+ self.condition.release()
+ return result
+
+ def addReader(self, reader, socket):
+ self.condition.acquire()
+ self.readers[socket] = reader
+ self.condition.notify()
+ self.condition.release()
+
+ def delReader(self, socket):
+ self.condition.acquire()
+ del(self.readers[socket])
+ self.condition.notify()
+ self.condition.release()
+
+ def notify(self):
+ self.condition.acquire()
+ self.condition.notify()
+ self.condition.release()
+
+class Listener(object):
+ """A Listener is used by this module to listen on the multicast
+ group to which DNS messages are sent, allowing the implementation
+ to cache information as it arrives.
+
+ It requires registration with an Engine object in order to have
+ the read() method called when a socket is availble for reading."""
+
+ def __init__(self, zeroconf):
+ self.zeroconf = zeroconf
+ self.zeroconf.engine.addReader(self, self.zeroconf.socket)
+
+ def handle_read(self):
+ data, (addr, port) = self.zeroconf.socket.recvfrom(_MAX_MSG_ABSOLUTE)
+ self.data = data
+ msg = DNSIncoming(data)
+ if msg.isQuery():
+ # Always multicast responses
+ #
+ if port == _MDNS_PORT:
+ self.zeroconf.handleQuery(msg, _MDNS_ADDR, _MDNS_PORT)
+ # If it's not a multicast query, reply via unicast
+ # and multicast
+ #
+ elif port == _DNS_PORT:
+ self.zeroconf.handleQuery(msg, addr, port)
+ self.zeroconf.handleQuery(msg, _MDNS_ADDR, _MDNS_PORT)
+ else:
+ self.zeroconf.handleResponse(msg)
+
+
+class Reaper(threading.Thread):
+ """A Reaper is used by this module to remove cache entries that
+ have expired."""
+
+ def __init__(self, zeroconf):
+ threading.Thread.__init__(self)
+ self.zeroconf = zeroconf
+ self.start()
+
+ def run(self):
+ while 1:
+ self.zeroconf.wait(10 * 1000)
+ if globals()['_GLOBAL_DONE']:
+ return
+ now = currentTimeMillis()
+ for record in self.zeroconf.cache.entries():
+ if record.isExpired(now):
+ self.zeroconf.updateRecord(now, record)
+ self.zeroconf.cache.remove(record)
+
+
+class ServiceBrowser(threading.Thread):
+ """Used to browse for a service of a specific type.
+
+ The listener object will have its addService() and
+ removeService() methods called when this browser
+ discovers changes in the services availability."""
+
+ def __init__(self, zeroconf, type, listener):
+ """Creates a browser for a specific type"""
+ threading.Thread.__init__(self)
+ self.zeroconf = zeroconf
+ self.type = type
+ self.listener = listener
+ self.services = {}
+ self.nextTime = currentTimeMillis()
+ self.delay = _BROWSER_TIME
+ self.list = []
+
+ self.done = 0
+
+ self.zeroconf.addListener(self, DNSQuestion(self.type, _TYPE_PTR, _CLASS_IN))
+ self.start()
+
+ def updateRecord(self, zeroconf, now, record):
+ """Callback invoked by Zeroconf when new information arrives.
+
+ Updates information required by browser in the Zeroconf cache."""
+ if record.type == _TYPE_PTR and record.name == self.type:
+ expired = record.isExpired(now)
+ try:
+ oldrecord = self.services[record.alias.lower()]
+ if not expired:
+ oldrecord.resetTTL(record)
+ else:
+ del(self.services[record.alias.lower()])
+ callback = lambda x: self.listener.removeService(x, self.type, record.alias)
+ self.list.append(callback)
+ return
+ except:
+ if not expired:
+ self.services[record.alias.lower()] = record
+ callback = lambda x: self.listener.addService(x, self.type, record.alias)
+ self.list.append(callback)
+
+ expires = record.getExpirationTime(75)
+ if expires < self.nextTime:
+ self.nextTime = expires
+
+ def cancel(self):
+ self.done = 1
+ self.zeroconf.notifyAll()
+
+ def run(self):
+ while 1:
+ event = None
+ now = currentTimeMillis()
+ if len(self.list) == 0 and self.nextTime > now:
+ self.zeroconf.wait(self.nextTime - now)
+ if globals()['_GLOBAL_DONE'] or self.done:
+ return
+ now = currentTimeMillis()
+
+ if self.nextTime <= now:
+ out = DNSOutgoing(_FLAGS_QR_QUERY)
+ out.addQuestion(DNSQuestion(self.type, _TYPE_PTR, _CLASS_IN))
+ for record in self.services.values():
+ if not record.isExpired(now):
+ out.addAnswerAtTime(record, now)
+ self.zeroconf.send(out)
+ self.nextTime = now + self.delay
+ self.delay = min(20 * 1000, self.delay * 2)
+
+ if len(self.list) > 0:
+ event = self.list.pop(0)
+
+ if event is not None:
+ event(self.zeroconf)
+
+
+class ServiceInfo(object):
+ """Service information"""
+
+ def __init__(self, type, name, address=None, port=None, weight=0, priority=0, properties=None, server=None):
+ """Create a service description.
+
+ type: fully qualified service type name
+ name: fully qualified service name
+ address: IP address as unsigned short, network byte order
+ port: port that the service runs on
+ weight: weight of the service
+ priority: priority of the service
+ properties: dictionary of properties (or a string holding the bytes for the text field)
+ server: fully qualified name for service host (defaults to name)"""
+
+ if not name.endswith(type):
+ raise BadTypeInNameException
+ self.type = type
+ self.name = name
+ self.address = address
+ self.port = port
+ self.weight = weight
+ self.priority = priority
+ if server:
+ self.server = server
+ else:
+ self.server = name
+ self.setProperties(properties)
+
+ def setProperties(self, properties):
+ """Sets properties and text of this info from a dictionary"""
+ if isinstance(properties, dict):
+ self.properties = properties
+ list = []
+ result = ''
+ for key in properties:
+ value = properties[key]
+ if value is None:
+ suffix = ''
+ elif isinstance(value, str):
+ suffix = value
+ elif isinstance(value, int):
+ if value:
+ suffix = 'true'
+ else:
+ suffix = 'false'
+ else:
+ suffix = ''
+ list.append('='.join((key, suffix)))
+ for item in list:
+ result = ''.join((result, struct.pack('!c', chr(len(item))), item))
+ self.text = result
+ else:
+ self.text = properties
+
+ def setText(self, text):
+ """Sets properties and text given a text field"""
+ self.text = text
+ try:
+ result = {}
+ end = len(text)
+ index = 0
+ strs = []
+ while index < end:
+ length = ord(text[index])
+ index += 1
+ strs.append(text[index:index+length])
+ index += length
+
+ for s in strs:
+ eindex = s.find('=')
+ if eindex == -1:
+ # No equals sign at all
+ key = s
+ value = 0
+ else:
+ key = s[:eindex]
+ value = s[eindex+1:]
+ if value == 'true':
+ value = 1
+ elif value == 'false' or not value:
+ value = 0
+
+ # Only update non-existent properties
+ if key and result.get(key) == None:
+ result[key] = value
+
+ self.properties = result
+ except:
+ traceback.print_exc()
+ self.properties = None
+
+ def getType(self):
+ """Type accessor"""
+ return self.type
+
+ def getName(self):
+ """Name accessor"""
+ if self.type is not None and self.name.endswith("." + self.type):
+ return self.name[:len(self.name) - len(self.type) - 1]
+ return self.name
+
+ def getAddress(self):
+ """Address accessor"""
+ return self.address
+
+ def getPort(self):
+ """Port accessor"""
+ return self.port
+
+ def getPriority(self):
+ """Pirority accessor"""
+ return self.priority
+
+ def getWeight(self):
+ """Weight accessor"""
+ return self.weight
+
+ def getProperties(self):
+ """Properties accessor"""
+ return self.properties
+
+ def getText(self):
+ """Text accessor"""
+ return self.text
+
+ def getServer(self):
+ """Server accessor"""
+ return self.server
+
+ def updateRecord(self, zeroconf, now, record):
+ """Updates service information from a DNS record"""
+ if record is not None and not record.isExpired(now):
+ if record.type == _TYPE_A:
+ #if record.name == self.name:
+ if record.name == self.server:
+ self.address = record.address
+ elif record.type == _TYPE_SRV:
+ if record.name == self.name:
+ self.server = record.server
+ self.port = record.port
+ self.weight = record.weight
+ self.priority = record.priority
+ #self.address = None
+ self.updateRecord(zeroconf, now, zeroconf.cache.getByDetails(self.server, _TYPE_A, _CLASS_IN))
+ elif record.type == _TYPE_TXT:
+ if record.name == self.name:
+ self.setText(record.text)
+
+ def request(self, zeroconf, timeout):
+ """Returns true if the service could be discovered on the
+ network, and updates this object with details discovered.
+ """
+ now = currentTimeMillis()
+ delay = _LISTENER_TIME
+ next = now + delay
+ last = now + timeout
+ result = 0
+ try:
+ zeroconf.addListener(self, DNSQuestion(self.name, _TYPE_ANY, _CLASS_IN))
+ while self.server is None or self.address is None or self.text is None:
+ if last <= now:
+ return 0
+ if next <= now:
+ out = DNSOutgoing(_FLAGS_QR_QUERY)
+ out.addQuestion(DNSQuestion(self.name, _TYPE_SRV, _CLASS_IN))
+ out.addAnswerAtTime(zeroconf.cache.getByDetails(self.name, _TYPE_SRV, _CLASS_IN), now)
+ out.addQuestion(DNSQuestion(self.name, _TYPE_TXT, _CLASS_IN))
+ out.addAnswerAtTime(zeroconf.cache.getByDetails(self.name, _TYPE_TXT, _CLASS_IN), now)
+ if self.server is not None:
+ out.addQuestion(DNSQuestion(self.server, _TYPE_A, _CLASS_IN))
+ out.addAnswerAtTime(zeroconf.cache.getByDetails(self.server, _TYPE_A, _CLASS_IN), now)
+ zeroconf.send(out)
+ next = now + delay
+ delay = delay * 2
+
+ zeroconf.wait(min(next, last) - now)
+ now = currentTimeMillis()
+ result = 1
+ finally:
+ zeroconf.removeListener(self)
+
+ return result
+
+ def __eq__(self, other):
+ """Tests equality of service name"""
+ if isinstance(other, ServiceInfo):
+ return other.name == self.name
+ return 0
+
+ def __ne__(self, other):
+ """Non-equality test"""
+ return not self.__eq__(other)
+
+ def __repr__(self):
+ """String representation"""
+ result = "service[%s,%s:%s," % (self.name, socket.inet_ntoa(self.getAddress()), self.port)
+ if self.text is None:
+ result += "None"
+ else:
+ if len(self.text) < 20:
+ result += self.text
+ else:
+ result += self.text[:17] + "..."
+ result += "]"
+ return result
+
+
+class Zeroconf(object):
+ """Implementation of Zeroconf Multicast DNS Service Discovery
+
+ Supports registration, unregistration, queries and browsing.
+ """
+ def __init__(self, bindaddress=None):
+ """Creates an instance of the Zeroconf class, establishing
+ multicast communications, listening and reaping threads."""
+ globals()['_GLOBAL_DONE'] = 0
+ if bindaddress is None:
+ self.intf = socket.gethostbyname(socket.gethostname())
+ else:
+ self.intf = bindaddress
+ self.group = ('', _MDNS_PORT)
+ self.socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
+ try:
+ self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
+ self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1)
+ except:
+ # SO_REUSEADDR should be equivalent to SO_REUSEPORT for
+ # multicast UDP sockets (p 731, "TCP/IP Illustrated,
+ # Volume 2"), but some BSD-derived systems require
+ # SO_REUSEPORT to be specified explicity. Also, not all
+ # versions of Python have SO_REUSEPORT available. So
+ # if you're on a BSD-based system, and haven't upgraded
+ # to Python 2.3 yet, you may find this library doesn't
+ # work as expected.
+ #
+ pass
+ self.socket.setsockopt(socket.SOL_IP, socket.IP_MULTICAST_TTL, 255)
+ self.socket.setsockopt(socket.SOL_IP, socket.IP_MULTICAST_LOOP, 1)
+ try:
+ self.socket.bind(self.group)
+ except:
+ # Some versions of linux raise an exception even though
+ # the SO_REUSE* options have been set, so ignore it
+ #
+ pass
+ #self.socket.setsockopt(socket.SOL_IP, socket.IP_MULTICAST_IF, socket.inet_aton(self.intf) + socket.inet_aton('0.0.0.0'))
+ self.socket.setsockopt(socket.SOL_IP, socket.IP_ADD_MEMBERSHIP, socket.inet_aton(_MDNS_ADDR) + socket.inet_aton('0.0.0.0'))
+
+ self.listeners = []
+ self.browsers = []
+ self.services = {}
+ self.servicetypes = {}
+
+ self.cache = DNSCache()
+
+ self.condition = threading.Condition()
+
+ self.engine = Engine(self)
+ self.listener = Listener(self)
+ self.reaper = Reaper(self)
+
+ def isLoopback(self):
+ return self.intf.startswith("127.0.0.1")
+
+ def isLinklocal(self):
+ return self.intf.startswith("169.254.")
+
+ def wait(self, timeout):
+ """Calling thread waits for a given number of milliseconds or
+ until notified."""
+ self.condition.acquire()
+ self.condition.wait(timeout/1000)
+ self.condition.release()
+
+ def notifyAll(self):
+ """Notifies all waiting threads"""
+ self.condition.acquire()
+ self.condition.notifyAll()
+ self.condition.release()
+
+ def getServiceInfo(self, type, name, timeout=3000):
+ """Returns network's service information for a particular
+ name and type, or None if no service matches by the timeout,
+ which defaults to 3 seconds."""
+ info = ServiceInfo(type, name)
+ if info.request(self, timeout):
+ return info
+ return None
+
+ def addServiceListener(self, type, listener):
+ """Adds a listener for a particular service type. This object
+ will then have its updateRecord method called when information
+ arrives for that type."""
+ self.removeServiceListener(listener)
+ self.browsers.append(ServiceBrowser(self, type, listener))
+
+ def removeServiceListener(self, listener):
+ """Removes a listener from the set that is currently listening."""
+ for browser in self.browsers:
+ if browser.listener == listener:
+ browser.cancel()
+ del(browser)
+
+ def registerService(self, info, ttl=_DNS_TTL):
+ """Registers service information to the network with a default TTL
+ of 60 seconds. Zeroconf will then respond to requests for
+ information for that service. The name of the service may be
+ changed if needed to make it unique on the network."""
+ self.checkService(info)
+ self.services[info.name.lower()] = info
+ if self.servicetypes.has_key(info.type):
+ self.servicetypes[info.type]+=1
+ else:
+ self.servicetypes[info.type]=1
+ now = currentTimeMillis()
+ nextTime = now
+ i = 0
+ while i < 3:
+ if now < nextTime:
+ self.wait(nextTime - now)
+ now = currentTimeMillis()
+ continue
+ out = DNSOutgoing(_FLAGS_QR_RESPONSE | _FLAGS_AA)
+ out.addAnswerAtTime(DNSPointer(info.type, _TYPE_PTR, _CLASS_IN, ttl, info.name), 0)
+ out.addAnswerAtTime(DNSService(info.name, _TYPE_SRV, _CLASS_IN, ttl, info.priority, info.weight, info.port, info.server), 0)
+ out.addAnswerAtTime(DNSText(info.name, _TYPE_TXT, _CLASS_IN, ttl, info.text), 0)
+ if info.address:
+ out.addAnswerAtTime(DNSAddress(info.server, _TYPE_A, _CLASS_IN, ttl, info.address), 0)
+ self.send(out)
+ i += 1
+ nextTime += _REGISTER_TIME
+
+ def unregisterService(self, info):
+ """Unregister a service."""
+ try:
+ del(self.services[info.name.lower()])
+ if self.servicetypes[info.type]>1:
+ self.servicetypes[info.type]-=1
+ else:
+ del self.servicetypes[info.type]
+ except:
+ pass
+ now = currentTimeMillis()
+ nextTime = now
+ i = 0
+ while i < 3:
+ if now < nextTime:
+ self.wait(nextTime - now)
+ now = currentTimeMillis()
+ continue
+ out = DNSOutgoing(_FLAGS_QR_RESPONSE | _FLAGS_AA)
+ out.addAnswerAtTime(DNSPointer(info.type, _TYPE_PTR, _CLASS_IN, 0, info.name), 0)
+ out.addAnswerAtTime(DNSService(info.name, _TYPE_SRV, _CLASS_IN, 0, info.priority, info.weight, info.port, info.name), 0)
+ out.addAnswerAtTime(DNSText(info.name, _TYPE_TXT, _CLASS_IN, 0, info.text), 0)
+ if info.address:
+ out.addAnswerAtTime(DNSAddress(info.server, _TYPE_A, _CLASS_IN, 0, info.address), 0)
+ self.send(out)
+ i += 1
+ nextTime += _UNREGISTER_TIME
+
+ def unregisterAllServices(self):
+ """Unregister all registered services."""
+ if len(self.services) > 0:
+ now = currentTimeMillis()
+ nextTime = now
+ i = 0
+ while i < 3:
+ if now < nextTime:
+ self.wait(nextTime - now)
+ now = currentTimeMillis()
+ continue
+ out = DNSOutgoing(_FLAGS_QR_RESPONSE | _FLAGS_AA)
+ for info in self.services.values():
+ out.addAnswerAtTime(DNSPointer(info.type, _TYPE_PTR, _CLASS_IN, 0, info.name), 0)
+ out.addAnswerAtTime(DNSService(info.name, _TYPE_SRV, _CLASS_IN, 0, info.priority, info.weight, info.port, info.server), 0)
+ out.addAnswerAtTime(DNSText(info.name, _TYPE_TXT, _CLASS_IN, 0, info.text), 0)
+ if info.address:
+ out.addAnswerAtTime(DNSAddress(info.server, _TYPE_A, _CLASS_IN, 0, info.address), 0)
+ self.send(out)
+ i += 1
+ nextTime += _UNREGISTER_TIME
+
+ def checkService(self, info):
+ """Checks the network for a unique service name, modifying the
+ ServiceInfo passed in if it is not unique."""
+ now = currentTimeMillis()
+ nextTime = now
+ i = 0
+ while i < 3:
+ for record in self.cache.entriesWithName(info.type):
+ if record.type == _TYPE_PTR and not record.isExpired(now) and record.alias == info.name:
+ if (info.name.find('.') < 0):
+ info.name = info.name + ".[" + info.address + ":" + info.port + "]." + info.type
+ self.checkService(info)
+ return
+ raise NonUniqueNameException
+ if now < nextTime:
+ self.wait(nextTime - now)
+ now = currentTimeMillis()
+ continue
+ out = DNSOutgoing(_FLAGS_QR_QUERY | _FLAGS_AA)
+ self.debug = out
+ out.addQuestion(DNSQuestion(info.type, _TYPE_PTR, _CLASS_IN))
+ out.addAuthorativeAnswer(DNSPointer(info.type, _TYPE_PTR, _CLASS_IN, _DNS_TTL, info.name))
+ self.send(out)
+ i += 1
+ nextTime += _CHECK_TIME
+
+ def addListener(self, listener, question):
+ """Adds a listener for a given question. The listener will have
+ its updateRecord method called when information is available to
+ answer the question."""
+ now = currentTimeMillis()
+ self.listeners.append(listener)
+ if question is not None:
+ for record in self.cache.entriesWithName(question.name):
+ if question.answeredBy(record) and not record.isExpired(now):
+ listener.updateRecord(self, now, record)
+ self.notifyAll()
+
+ def removeListener(self, listener):
+ """Removes a listener."""
+ try:
+ self.listeners.remove(listener)
+ self.notifyAll()
+ except:
+ pass
+
+ def updateRecord(self, now, rec):
+ """Used to notify listeners of new information that has updated
+ a record."""
+ for listener in self.listeners:
+ listener.updateRecord(self, now, rec)
+ self.notifyAll()
+
+ def handleResponse(self, msg):
+ """Deal with incoming response packets. All answers
+ are held in the cache, and listeners are notified."""
+ now = currentTimeMillis()
+ for record in msg.answers:
+ expired = record.isExpired(now)
+ if record in self.cache.entries():
+ if expired:
+ self.cache.remove(record)
+ else:
+ entry = self.cache.get(record)
+ if entry is not None:
+ entry.resetTTL(record)
+ record = entry
+ else:
+ self.cache.add(record)
+
+ self.updateRecord(now, record)
+
+ def handleQuery(self, msg, addr, port):
+ """Deal with incoming query packets. Provides a response if
+ possible."""
+ out = None
+
+ # Support unicast client responses
+ #
+ if port != _MDNS_PORT:
+ out = DNSOutgoing(_FLAGS_QR_RESPONSE | _FLAGS_AA, 0)
+ for question in msg.questions:
+ out.addQuestion(question)
+
+ for question in msg.questions:
+ if question.type == _TYPE_PTR:
+ if question.name == "_services._dns-sd._udp.local.":
+ for stype in self.servicetypes.keys():
+ if out is None:
+ out = DNSOutgoing(_FLAGS_QR_RESPONSE | _FLAGS_AA)
+ out.addAnswer(msg, DNSPointer("_services._dns-sd._udp.local.", _TYPE_PTR, _CLASS_IN, _DNS_TTL, stype))
+ for service in self.services.values():
+ if question.name == service.type:
+ if out is None:
+ out = DNSOutgoing(_FLAGS_QR_RESPONSE | _FLAGS_AA)
+ out.addAnswer(msg, DNSPointer(service.type, _TYPE_PTR, _CLASS_IN, _DNS_TTL, service.name))
+ else:
+ try:
+ if out is None:
+ out = DNSOutgoing(_FLAGS_QR_RESPONSE | _FLAGS_AA)
+
+ # Answer A record queries for any service addresses we know
+ if question.type == _TYPE_A or question.type == _TYPE_ANY:
+ for service in self.services.values():
+ if service.server == question.name.lower():
+ out.addAnswer(msg, DNSAddress(question.name, _TYPE_A, _CLASS_IN | _CLASS_UNIQUE, _DNS_TTL, service.address))
+
+ service = self.services.get(question.name.lower(), None)
+ if not service: continue
+
+ if question.type == _TYPE_SRV or question.type == _TYPE_ANY:
+ out.addAnswer(msg, DNSService(question.name, _TYPE_SRV, _CLASS_IN | _CLASS_UNIQUE, _DNS_TTL, service.priority, service.weight, service.port, service.server))
+ if question.type == _TYPE_TXT or question.type == _TYPE_ANY:
+ out.addAnswer(msg, DNSText(question.name, _TYPE_TXT, _CLASS_IN | _CLASS_UNIQUE, _DNS_TTL, service.text))
+ if question.type == _TYPE_SRV:
+ out.addAdditionalAnswer(DNSAddress(service.server, _TYPE_A, _CLASS_IN | _CLASS_UNIQUE, _DNS_TTL, service.address))
+ except:
+ traceback.print_exc()
+
+ if out is not None and out.answers:
+ out.id = msg.id
+ self.send(out, addr, port)
+
+ def send(self, out, addr = _MDNS_ADDR, port = _MDNS_PORT):
+ """Sends an outgoing packet."""
+ # This is a quick test to see if we can parse the packets we generate
+ #temp = DNSIncoming(out.packet())
+ try:
+ self.socket.sendto(out.packet(), 0, (addr, port))
+ except:
+ # Ignore this, it may be a temporary loss of network connection
+ pass
+
+ def close(self):
+ """Ends the background threads, and prevent this instance from
+ servicing further queries."""
+ if globals()['_GLOBAL_DONE'] == 0:
+ globals()['_GLOBAL_DONE'] = 1
+ self.notifyAll()
+ self.engine.notify()
+ self.unregisterAllServices()
+ self.socket.setsockopt(socket.SOL_IP, socket.IP_DROP_MEMBERSHIP, socket.inet_aton(_MDNS_ADDR) + socket.inet_aton('0.0.0.0'))
+ self.socket.close()
+
+# Test a few module features, including service registration, service
+# query (for Zoe), and service unregistration.
+
+if __name__ == '__main__':
+ print "Multicast DNS Service Discovery for Python, version", __version__
+ r = Zeroconf()
+ print "1. Testing registration of a service..."
+ desc = {'version':'0.10','a':'test value', 'b':'another value'}
+ info = ServiceInfo("_http._tcp.local.", "My Service Name._http._tcp.local.", socket.inet_aton("127.0.0.1"), 1234, 0, 0, desc)
+ print " Registering service..."
+ r.registerService(info)
+ print " Registration done."
+ print "2. Testing query of service information..."
+ print " Getting ZOE service:", str(r.getServiceInfo("_http._tcp.local.", "ZOE._http._tcp.local."))
+ print " Query done."
+ print "3. Testing query of own service..."
+ print " Getting self:", str(r.getServiceInfo("_http._tcp.local.", "My Service Name._http._tcp.local."))
+ print " Query done."
+ print "4. Testing unregister of service information..."
+ r.unregisterService(info)
+ print " Unregister done."
+ r.close()
+
+# no-check-code
diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/zeroconf/Zeroconf.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/zeroconf/Zeroconf.pyo
new file mode 100644
index 0000000..339a003
--- /dev/null
+++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/zeroconf/Zeroconf.pyo
Binary files differ
diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/zeroconf/__init__.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/zeroconf/__init__.py
new file mode 100644
index 0000000..60185c7
--- /dev/null
+++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/zeroconf/__init__.py
@@ -0,0 +1,173 @@
+# zeroconf.py - zeroconf support for Mercurial
+#
+# Copyright 2005-2007 Matt Mackall <mpm@selenic.com>
+#
+# This software may be used and distributed according to the terms of the
+# GNU General Public License version 2 or any later version.
+
+'''discover and advertise repositories on the local network
+
+Zeroconf-enabled repositories will be announced in a network without
+the need to configure a server or a service. They can be discovered
+without knowing their actual IP address.
+
+To allow other people to discover your repository using run
+:hg:`serve` in your repository::
+
+ $ cd test
+ $ hg serve
+
+You can discover Zeroconf-enabled repositories by running
+:hg:`paths`::
+
+ $ hg paths
+ zc-test = http://example.com:8000/test
+'''
+
+import socket, time, os
+
+import Zeroconf
+from mercurial import ui, hg, encoding, util
+from mercurial import extensions
+from mercurial.hgweb import hgweb_mod
+from mercurial.hgweb import hgwebdir_mod
+
+# publish
+
+server = None
+localip = None
+
+def getip():
+ # finds external-facing interface without sending any packets (Linux)
+ try:
+ s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
+ s.connect(('1.0.0.1', 0))
+ ip = s.getsockname()[0]
+ return ip
+ except:
+ pass
+
+ # Generic method, sometimes gives useless results
+ try:
+ dumbip = socket.gethostbyaddr(socket.gethostname())[2][0]
+ if not dumbip.startswith('127.') and ':' not in dumbip:
+ return dumbip
+ except (socket.gaierror, socket.herror):
+ dumbip = '127.0.0.1'
+
+ # works elsewhere, but actually sends a packet
+ try:
+ s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
+ s.connect(('1.0.0.1', 1))
+ ip = s.getsockname()[0]
+ return ip
+ except:
+ pass
+
+ return dumbip
+
+def publish(name, desc, path, port):
+ global server, localip
+ if not server:
+ ip = getip()
+ if ip.startswith('127.'):
+ # if we have no internet connection, this can happen.
+ return
+ localip = socket.inet_aton(ip)
+ server = Zeroconf.Zeroconf(ip)
+
+ hostname = socket.gethostname().split('.')[0]
+ host = hostname + ".local"
+ name = "%s-%s" % (hostname, name)
+
+ # advertise to browsers
+ svc = Zeroconf.ServiceInfo('_http._tcp.local.',
+ name + '._http._tcp.local.',
+ server = host,
+ port = port,
+ properties = {'description': desc,
+ 'path': "/" + path},
+ address = localip, weight = 0, priority = 0)
+ server.registerService(svc)
+
+ # advertise to Mercurial clients
+ svc = Zeroconf.ServiceInfo('_hg._tcp.local.',
+ name + '._hg._tcp.local.',
+ server = host,
+ port = port,
+ properties = {'description': desc,
+ 'path': "/" + path},
+ address = localip, weight = 0, priority = 0)
+ server.registerService(svc)
+
+class hgwebzc(hgweb_mod.hgweb):
+ def __init__(self, repo, name=None, baseui=None):
+ super(hgwebzc, self).__init__(repo, name=name, baseui=baseui)
+ name = self.reponame or os.path.basename(self.repo.root)
+ path = self.repo.ui.config("web", "prefix", "").strip('/')
+ desc = self.repo.ui.config("web", "description", name)
+ publish(name, desc, path,
+ util.getport(self.repo.ui.config("web", "port", 8000)))
+
+class hgwebdirzc(hgwebdir_mod.hgwebdir):
+ def __init__(self, conf, baseui=None):
+ super(hgwebdirzc, self).__init__(conf, baseui=baseui)
+ prefix = self.ui.config("web", "prefix", "").strip('/') + '/'
+ for repo, path in self.repos:
+ u = self.ui.copy()
+ u.readconfig(os.path.join(path, '.hg', 'hgrc'))
+ name = os.path.basename(repo)
+ path = (prefix + repo).strip('/')
+ desc = u.config('web', 'description', name)
+ publish(name, desc, path, util.getport(u.config("web", "port", 8000)))
+
+# listen
+
+class listener(object):
+ def __init__(self):
+ self.found = {}
+ def removeService(self, server, type, name):
+ if repr(name) in self.found:
+ del self.found[repr(name)]
+ def addService(self, server, type, name):
+ self.found[repr(name)] = server.getServiceInfo(type, name)
+
+def getzcpaths():
+ ip = getip()
+ if ip.startswith('127.'):
+ return
+ server = Zeroconf.Zeroconf(ip)
+ l = listener()
+ Zeroconf.ServiceBrowser(server, "_hg._tcp.local.", l)
+ time.sleep(1)
+ server.close()
+ for value in l.found.values():
+ name = value.name[:value.name.index('.')]
+ url = "http://%s:%s%s" % (socket.inet_ntoa(value.address), value.port,
+ value.properties.get("path", "/"))
+ yield "zc-" + name, url
+
+def config(orig, self, section, key, default=None, untrusted=False):
+ if section == "paths" and key.startswith("zc-"):
+ for name, path in getzcpaths():
+ if name == key:
+ return path
+ return orig(self, section, key, default, untrusted)
+
+def configitems(orig, self, section, untrusted=False):
+ repos = orig(self, section, untrusted)
+ if section == "paths":
+ repos += getzcpaths()
+ return repos
+
+def defaultdest(orig, source):
+ for name, path in getzcpaths():
+ if path == source:
+ return name.encode(encoding.encoding)
+ return orig(source)
+
+extensions.wrapfunction(ui.ui, 'config', config)
+extensions.wrapfunction(ui.ui, 'configitems', configitems)
+extensions.wrapfunction(hg, 'defaultdest', defaultdest)
+hgweb_mod.hgweb = hgwebzc
+hgwebdir_mod.hgwebdir = hgwebdirzc
diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/zeroconf/__init__.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/zeroconf/__init__.pyo
new file mode 100644
index 0000000..82e5bf4
--- /dev/null
+++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/zeroconf/__init__.pyo
Binary files differ