From 65411d01d448ff0cd4abd14eee14cf60b5f8fc20 Mon Sep 17 00:00:00 2001 From: Nishanth Amuluru Date: Sat, 8 Jan 2011 11:20:57 +0530 Subject: Added buildout stuff and made changes accordingly --HG-- rename : profile/management/__init__.py => eggs/djangorecipe-0.20-py2.6.egg/EGG-INFO/dependency_links.txt rename : profile/management/__init__.py => eggs/djangorecipe-0.20-py2.6.egg/EGG-INFO/not-zip-safe rename : profile/management/__init__.py => eggs/infrae.subversion-1.4.5-py2.6.egg/EGG-INFO/dependency_links.txt rename : profile/management/__init__.py => eggs/infrae.subversion-1.4.5-py2.6.egg/EGG-INFO/not-zip-safe rename : profile/management/__init__.py => eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/EGG-INFO/dependency_links.txt rename : profile/management/__init__.py => eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/EGG-INFO/not-zip-safe rename : profile/management/__init__.py => eggs/py-1.4.0-py2.6.egg/EGG-INFO/dependency_links.txt rename : profile/management/__init__.py => eggs/py-1.4.0-py2.6.egg/EGG-INFO/not-zip-safe rename : profile/management/__init__.py => eggs/zc.buildout-1.5.2-py2.6.egg/EGG-INFO/dependency_links.txt rename : profile/management/__init__.py => eggs/zc.buildout-1.5.2-py2.6.egg/EGG-INFO/not-zip-safe rename : profile/management/__init__.py => eggs/zc.recipe.egg-1.3.2-py2.6.egg/EGG-INFO/dependency_links.txt rename : profile/management/__init__.py => eggs/zc.recipe.egg-1.3.2-py2.6.egg/EGG-INFO/not-zip-safe rename : profile/management/__init__.py => parts/django/Django.egg-info/dependency_links.txt rename : taskapp/models.py => parts/django/django/conf/app_template/models.py rename : taskapp/tests.py => parts/django/django/conf/app_template/tests.py rename : taskapp/views.py => parts/django/django/conf/app_template/views.py rename : taskapp/views.py => parts/django/django/contrib/gis/tests/geo3d/views.py rename : profile/management/__init__.py => parts/django/tests/modeltests/delete/__init__.py rename : profile/management/__init__.py => parts/django/tests/modeltests/files/__init__.py rename : profile/management/__init__.py => parts/django/tests/modeltests/invalid_models/__init__.py rename : profile/management/__init__.py => parts/django/tests/modeltests/m2m_signals/__init__.py rename : profile/management/__init__.py => parts/django/tests/modeltests/model_package/__init__.py rename : profile/management/__init__.py => parts/django/tests/regressiontests/bash_completion/__init__.py rename : profile/management/__init__.py => parts/django/tests/regressiontests/bash_completion/management/__init__.py rename : profile/management/__init__.py => parts/django/tests/regressiontests/bash_completion/management/commands/__init__.py rename : profile/management/__init__.py => parts/django/tests/regressiontests/bash_completion/models.py rename : profile/management/__init__.py => parts/django/tests/regressiontests/delete_regress/__init__.py rename : profile/management/__init__.py => parts/django/tests/regressiontests/file_storage/__init__.py rename : profile/management/__init__.py => parts/django/tests/regressiontests/max_lengths/__init__.py rename : profile/forms.py => pytask/profile/forms.py rename : profile/management/__init__.py => pytask/profile/management/__init__.py rename : profile/management/commands/seed_db.py => pytask/profile/management/commands/seed_db.py rename : profile/models.py => pytask/profile/models.py rename : profile/templatetags/user_tags.py => pytask/profile/templatetags/user_tags.py rename : taskapp/tests.py => pytask/profile/tests.py rename : profile/urls.py => pytask/profile/urls.py rename : profile/utils.py => pytask/profile/utils.py rename : profile/views.py => pytask/profile/views.py rename : static/css/base.css => pytask/static/css/base.css rename : taskapp/tests.py => pytask/taskapp/tests.py rename : taskapp/views.py => pytask/taskapp/views.py rename : templates/base.html => pytask/templates/base.html rename : templates/profile/browse_notifications.html => pytask/templates/profile/browse_notifications.html rename : templates/profile/edit.html => pytask/templates/profile/edit.html rename : templates/profile/view.html => pytask/templates/profile/view.html rename : templates/profile/view_notification.html => pytask/templates/profile/view_notification.html rename : templates/registration/activate.html => pytask/templates/registration/activate.html rename : templates/registration/activation_email.txt => pytask/templates/registration/activation_email.txt rename : templates/registration/activation_email_subject.txt => pytask/templates/registration/activation_email_subject.txt rename : templates/registration/logged_out.html => pytask/templates/registration/logged_out.html rename : templates/registration/login.html => pytask/templates/registration/login.html rename : templates/registration/logout.html => pytask/templates/registration/logout.html rename : templates/registration/password_change_done.html => pytask/templates/registration/password_change_done.html rename : templates/registration/password_change_form.html => pytask/templates/registration/password_change_form.html rename : templates/registration/password_reset_complete.html => pytask/templates/registration/password_reset_complete.html rename : templates/registration/password_reset_confirm.html => pytask/templates/registration/password_reset_confirm.html rename : templates/registration/password_reset_done.html => pytask/templates/registration/password_reset_done.html rename : templates/registration/password_reset_email.html => pytask/templates/registration/password_reset_email.html rename : templates/registration/password_reset_form.html => pytask/templates/registration/password_reset_form.html rename : templates/registration/registration_complete.html => pytask/templates/registration/registration_complete.html rename : templates/registration/registration_form.html => pytask/templates/registration/registration_form.html rename : utils.py => pytask/utils.py --- .../hgext/__init__.py | 1 + .../hgext/__init__.pyo | Bin 0 -> 186 bytes .../hgext/acl.py | 250 ++ .../hgext/acl.pyo | Bin 0 -> 8875 bytes .../hgext/bookmarks.py | 579 ++++ .../hgext/bookmarks.pyo | Bin 0 -> 21456 bytes .../hgext/bugzilla.py | 441 +++ .../hgext/bugzilla.pyo | Bin 0 -> 18525 bytes .../hgext/children.py | 45 + .../hgext/children.pyo | Bin 0 -> 1514 bytes .../hgext/churn.py | 198 ++ .../hgext/churn.pyo | Bin 0 -> 9040 bytes .../hgext/color.py | 319 ++ .../hgext/color.pyo | Bin 0 -> 11558 bytes .../hgext/convert/__init__.py | 321 ++ .../hgext/convert/__init__.pyo | Bin 0 -> 14008 bytes .../hgext/convert/bzr.py | 260 ++ .../hgext/convert/bzr.pyo | Bin 0 -> 9581 bytes .../hgext/convert/common.py | 389 +++ .../hgext/convert/common.pyo | Bin 0 -> 20100 bytes .../hgext/convert/convcmd.py | 434 +++ .../hgext/convert/convcmd.pyo | Bin 0 -> 16933 bytes .../hgext/convert/cvs.py | 271 ++ .../hgext/convert/cvs.pyo | Bin 0 -> 9466 bytes .../hgext/convert/cvsps.py | 847 ++++++ .../hgext/convert/cvsps.pyo | Bin 0 -> 21195 bytes .../hgext/convert/darcs.py | 200 ++ .../hgext/convert/darcs.pyo | Bin 0 -> 8694 bytes .../hgext/convert/filemap.py | 365 +++ .../hgext/convert/filemap.pyo | Bin 0 -> 11449 bytes .../hgext/convert/git.py | 170 ++ .../hgext/convert/git.pyo | Bin 0 -> 7015 bytes .../hgext/convert/gnuarch.py | 338 +++ .../hgext/convert/gnuarch.pyo | Bin 0 -> 12415 bytes .../hgext/convert/hg.py | 376 +++ .../hgext/convert/hg.pyo | Bin 0 -> 16042 bytes .../hgext/convert/monotone.py | 227 ++ .../hgext/convert/monotone.pyo | Bin 0 -> 7808 bytes .../hgext/convert/p4.py | 202 ++ .../hgext/convert/p4.pyo | Bin 0 -> 7333 bytes .../hgext/convert/subversion.py | 1168 +++++++ .../hgext/convert/subversion.pyo | Bin 0 -> 39371 bytes .../hgext/convert/transport.py | 128 + .../hgext/convert/transport.pyo | Bin 0 -> 5744 bytes .../hgext/eol.py | 272 ++ .../hgext/eol.pyo | Bin 0 -> 10766 bytes .../hgext/extdiff.py | 325 ++ .../hgext/extdiff.pyo | Bin 0 -> 10985 bytes .../hgext/fetch.py | 152 + .../hgext/fetch.pyo | Bin 0 -> 5046 bytes .../hgext/gpg.py | 288 ++ .../hgext/gpg.pyo | Bin 0 -> 10444 bytes .../hgext/graphlog.py | 337 ++ .../hgext/graphlog.pyo | Bin 0 -> 12442 bytes .../hgext/hgcia.py | 251 ++ .../hgext/hgcia.pyo | Bin 0 -> 10391 bytes .../hgext/hgk.py | 348 +++ .../hgext/hgk.pyo | Bin 0 -> 11815 bytes .../hgext/highlight/__init__.py | 61 + .../hgext/highlight/__init__.pyo | Bin 0 -> 2545 bytes .../hgext/highlight/highlight.py | 61 + .../hgext/highlight/highlight.pyo | Bin 0 -> 2358 bytes .../hgext/interhg.py | 81 + .../hgext/interhg.pyo | Bin 0 -> 2746 bytes .../hgext/keyword.py | 649 ++++ .../hgext/keyword.pyo | Bin 0 -> 29160 bytes .../hgext/mq.py | 3211 ++++++++++++++++++++ .../hgext/mq.pyo | Bin 0 -> 112419 bytes .../hgext/notify.py | 316 ++ .../hgext/notify.pyo | Bin 0 -> 11753 bytes .../hgext/pager.py | 113 + .../hgext/pager.pyo | Bin 0 -> 4114 bytes .../hgext/parentrevspec.py | 96 + .../hgext/parentrevspec.pyo | Bin 0 -> 2626 bytes .../hgext/patchbomb.py | 553 ++++ .../hgext/patchbomb.pyo | Bin 0 -> 20701 bytes .../hgext/progress.py | 206 ++ .../hgext/progress.pyo | Bin 0 -> 8302 bytes .../hgext/purge.py | 111 + .../hgext/purge.pyo | Bin 0 -> 3694 bytes .../hgext/rebase.py | 577 ++++ .../hgext/rebase.pyo | Bin 0 -> 20920 bytes .../hgext/record.py | 569 ++++ .../hgext/record.pyo | Bin 0 -> 22073 bytes .../hgext/relink.py | 180 ++ .../hgext/relink.pyo | Bin 0 -> 6226 bytes .../hgext/schemes.py | 84 + .../hgext/schemes.pyo | Bin 0 -> 3984 bytes .../hgext/share.py | 38 + .../hgext/share.pyo | Bin 0 -> 1595 bytes .../hgext/transplant.py | 630 ++++ .../hgext/transplant.pyo | Bin 0 -> 23926 bytes .../hgext/win32mbcs.py | 159 + .../hgext/win32mbcs.pyo | Bin 0 -> 5770 bytes .../hgext/win32text.py | 170 ++ .../hgext/win32text.pyo | Bin 0 -> 7010 bytes .../hgext/zeroconf/Zeroconf.py | 1582 ++++++++++ .../hgext/zeroconf/Zeroconf.pyo | Bin 0 -> 66077 bytes .../hgext/zeroconf/__init__.py | 173 ++ .../hgext/zeroconf/__init__.pyo | Bin 0 -> 7506 bytes 100 files changed, 19122 insertions(+) create mode 100644 eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/__init__.py create mode 100644 eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/__init__.pyo create mode 100644 eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/acl.py create mode 100644 eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/acl.pyo create mode 100644 eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/bookmarks.py create mode 100644 eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/bookmarks.pyo create mode 100644 eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/bugzilla.py create mode 100644 eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/bugzilla.pyo create mode 100644 eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/children.py create mode 100644 eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/children.pyo create mode 100644 eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/churn.py create mode 100644 eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/churn.pyo create mode 100644 eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/color.py create mode 100644 eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/color.pyo create mode 100644 eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/__init__.py create mode 100644 eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/__init__.pyo create mode 100644 eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/bzr.py create mode 100644 eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/bzr.pyo create mode 100644 eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/common.py create mode 100644 eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/common.pyo create mode 100644 eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/convcmd.py create mode 100644 eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/convcmd.pyo create mode 100644 eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/cvs.py create mode 100644 eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/cvs.pyo create mode 100644 eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/cvsps.py create mode 100644 eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/cvsps.pyo create mode 100644 eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/darcs.py create mode 100644 eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/darcs.pyo create mode 100644 eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/filemap.py create mode 100644 eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/filemap.pyo create mode 100644 eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/git.py create mode 100644 eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/git.pyo create mode 100644 eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/gnuarch.py create mode 100644 eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/gnuarch.pyo create mode 100644 eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/hg.py create mode 100644 eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/hg.pyo create mode 100644 eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/monotone.py create mode 100644 eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/monotone.pyo create mode 100644 eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/p4.py create mode 100644 eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/p4.pyo create mode 100644 eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/subversion.py create mode 100644 eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/subversion.pyo create mode 100644 eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/transport.py create mode 100644 eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/transport.pyo create mode 100644 eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/eol.py create mode 100644 eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/eol.pyo create mode 100644 eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/extdiff.py create mode 100644 eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/extdiff.pyo create mode 100644 eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/fetch.py create mode 100644 eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/fetch.pyo create mode 100644 eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/gpg.py create mode 100644 eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/gpg.pyo create mode 100644 eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/graphlog.py create mode 100644 eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/graphlog.pyo create mode 100644 eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/hgcia.py create mode 100644 eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/hgcia.pyo create mode 100644 eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/hgk.py create mode 100644 eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/hgk.pyo create mode 100644 eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/highlight/__init__.py create mode 100644 eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/highlight/__init__.pyo create mode 100644 eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/highlight/highlight.py create mode 100644 eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/highlight/highlight.pyo create mode 100644 eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/interhg.py create mode 100644 eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/interhg.pyo create mode 100644 eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/keyword.py create mode 100644 eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/keyword.pyo create mode 100644 eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/mq.py create mode 100644 eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/mq.pyo create mode 100644 eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/notify.py create mode 100644 eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/notify.pyo create mode 100644 eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/pager.py create mode 100644 eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/pager.pyo create mode 100644 eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/parentrevspec.py create mode 100644 eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/parentrevspec.pyo create mode 100644 eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/patchbomb.py create mode 100644 eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/patchbomb.pyo create mode 100644 eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/progress.py create mode 100644 eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/progress.pyo create mode 100644 eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/purge.py create mode 100644 eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/purge.pyo create mode 100644 eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/rebase.py create mode 100644 eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/rebase.pyo create mode 100644 eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/record.py create mode 100644 eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/record.pyo create mode 100644 eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/relink.py create mode 100644 eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/relink.pyo create mode 100644 eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/schemes.py create mode 100644 eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/schemes.pyo create mode 100644 eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/share.py create mode 100644 eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/share.pyo create mode 100644 eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/transplant.py create mode 100644 eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/transplant.pyo create mode 100644 eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/win32mbcs.py create mode 100644 eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/win32mbcs.pyo create mode 100644 eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/win32text.py create mode 100644 eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/win32text.pyo create mode 100644 eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/zeroconf/Zeroconf.py create mode 100644 eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/zeroconf/Zeroconf.pyo create mode 100644 eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/zeroconf/__init__.py create mode 100644 eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/zeroconf/__init__.pyo (limited to 'eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext') diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/__init__.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/__init__.py new file mode 100644 index 0000000..fdffa2a --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/__init__.py @@ -0,0 +1 @@ +# placeholder diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/__init__.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/__init__.pyo new file mode 100644 index 0000000..96afc1a Binary files /dev/null and b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/__init__.pyo differ diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/acl.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/acl.py new file mode 100644 index 0000000..a50fa72 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/acl.py @@ -0,0 +1,250 @@ +# acl.py - changeset access control for mercurial +# +# Copyright 2006 Vadim Gelfer +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +'''hooks for controlling repository access + +This hook makes it possible to allow or deny write access to given +branches and paths of a repository when receiving incoming changesets +via pretxnchangegroup and pretxncommit. + +The authorization is matched based on the local user name on the +system where the hook runs, and not the committer of the original +changeset (since the latter is merely informative). + +The acl hook is best used along with a restricted shell like hgsh, +preventing authenticating users from doing anything other than pushing +or pulling. The hook is not safe to use if users have interactive +shell access, as they can then disable the hook. Nor is it safe if +remote users share an account, because then there is no way to +distinguish them. + +The order in which access checks are performed is: + +1) Deny list for branches (section ``acl.deny.branches``) +2) Allow list for branches (section ``acl.allow.branches``) +3) Deny list for paths (section ``acl.deny``) +4) Allow list for paths (section ``acl.allow``) + +The allow and deny sections take key-value pairs. + +Branch-based Access Control +........................... + +Use the ``acl.deny.branches`` and ``acl.allow.branches`` sections to +have branch-based access control. Keys in these sections can be +either: + +- a branch name, or +- an asterisk, to match any branch; + +The corresponding values can be either: + +- a comma-separated list containing users and groups, or +- an asterisk, to match anyone; + +Path-based Access Control +......................... + +Use the ``acl.deny`` and ``acl.allow`` sections to have path-based +access control. Keys in these sections accept a subtree pattern (with +a glob syntax by default). The corresponding values follow the same +syntax as the other sections above. + +Groups +...... + +Group names must be prefixed with an ``@`` symbol. Specifying a group +name has the same effect as specifying all the users in that group. + +You can define group members in the ``acl.groups`` section. +If a group name is not defined there, and Mercurial is running under +a Unix-like system, the list of users will be taken from the OS. +Otherwise, an exception will be raised. + +Example Configuration +..................... + +:: + + [hooks] + + # Use this if you want to check access restrictions at commit time + pretxncommit.acl = python:hgext.acl.hook + + # Use this if you want to check access restrictions for pull, push, + # bundle and serve. + pretxnchangegroup.acl = python:hgext.acl.hook + + [acl] + # Allow or deny access for incoming changes only if their source is + # listed here, let them pass otherwise. Source is "serve" for all + # remote access (http or ssh), "push", "pull" or "bundle" when the + # related commands are run locally. + # Default: serve + sources = serve + + [acl.deny.branches] + + # Everyone is denied to the frozen branch: + frozen-branch = * + + # A bad user is denied on all branches: + * = bad-user + + [acl.allow.branches] + + # A few users are allowed on branch-a: + branch-a = user-1, user-2, user-3 + + # Only one user is allowed on branch-b: + branch-b = user-1 + + # The super user is allowed on any branch: + * = super-user + + # Everyone is allowed on branch-for-tests: + branch-for-tests = * + + [acl.deny] + # This list is checked first. If a match is found, acl.allow is not + # checked. All users are granted access if acl.deny is not present. + # Format for both lists: glob pattern = user, ..., @group, ... + + # To match everyone, use an asterisk for the user: + # my/glob/pattern = * + + # user6 will not have write access to any file: + ** = user6 + + # Group "hg-denied" will not have write access to any file: + ** = @hg-denied + + # Nobody will be able to change "DONT-TOUCH-THIS.txt", despite + # everyone being able to change all other files. See below. + src/main/resources/DONT-TOUCH-THIS.txt = * + + [acl.allow] + # if acl.allow is not present, all users are allowed by default + # empty acl.allow = no users allowed + + # User "doc_writer" has write access to any file under the "docs" + # folder: + docs/** = doc_writer + + # User "jack" and group "designers" have write access to any file + # under the "images" folder: + images/** = jack, @designers + + # Everyone (except for "user6" - see acl.deny above) will have write + # access to any file under the "resources" folder (except for 1 + # file. See acl.deny): + src/main/resources/** = * + + .hgtags = release_engineer + +''' + +from mercurial.i18n import _ +from mercurial import util, match +import getpass, urllib + +def _getusers(ui, group): + + # First, try to use group definition from section [acl.groups] + hgrcusers = ui.configlist('acl.groups', group) + if hgrcusers: + return hgrcusers + + ui.debug('acl: "%s" not defined in [acl.groups]\n' % group) + # If no users found in group definition, get users from OS-level group + try: + return util.groupmembers(group) + except KeyError: + raise util.Abort(_("group '%s' is undefined") % group) + +def _usermatch(ui, user, usersorgroups): + + if usersorgroups == '*': + return True + + for ug in usersorgroups.replace(',', ' ').split(): + if user == ug or ug.find('@') == 0 and user in _getusers(ui, ug[1:]): + return True + + return False + +def buildmatch(ui, repo, user, key): + '''return tuple of (match function, list enabled).''' + if not ui.has_section(key): + ui.debug('acl: %s not enabled\n' % key) + return None + + pats = [pat for pat, users in ui.configitems(key) + if _usermatch(ui, user, users)] + ui.debug('acl: %s enabled, %d entries for user %s\n' % + (key, len(pats), user)) + + if not repo: + if pats: + return lambda b: '*' in pats or b in pats + return lambda b: False + + if pats: + return match.match(repo.root, '', pats) + return match.exact(repo.root, '', []) + + +def hook(ui, repo, hooktype, node=None, source=None, **kwargs): + if hooktype not in ['pretxnchangegroup', 'pretxncommit']: + raise util.Abort(_('config error - hook type "%s" cannot stop ' + 'incoming changesets nor commits') % hooktype) + if (hooktype == 'pretxnchangegroup' and + source not in ui.config('acl', 'sources', 'serve').split()): + ui.debug('acl: changes have source "%s" - skipping\n' % source) + return + + user = None + if source == 'serve' and 'url' in kwargs: + url = kwargs['url'].split(':') + if url[0] == 'remote' and url[1].startswith('http'): + user = urllib.unquote(url[3]) + + if user is None: + user = getpass.getuser() + + cfg = ui.config('acl', 'config') + if cfg: + ui.readconfig(cfg, sections = ['acl.groups', 'acl.allow.branches', + 'acl.deny.branches', 'acl.allow', 'acl.deny']) + + allowbranches = buildmatch(ui, None, user, 'acl.allow.branches') + denybranches = buildmatch(ui, None, user, 'acl.deny.branches') + allow = buildmatch(ui, repo, user, 'acl.allow') + deny = buildmatch(ui, repo, user, 'acl.deny') + + for rev in xrange(repo[node], len(repo)): + ctx = repo[rev] + branch = ctx.branch() + if denybranches and denybranches(branch): + raise util.Abort(_('acl: user "%s" denied on branch "%s"' + ' (changeset "%s")') + % (user, branch, ctx)) + if allowbranches and not allowbranches(branch): + raise util.Abort(_('acl: user "%s" not allowed on branch "%s"' + ' (changeset "%s")') + % (user, branch, ctx)) + ui.debug('acl: branch access granted: "%s" on branch "%s"\n' + % (ctx, branch)) + + for f in ctx.files(): + if deny and deny(f): + ui.debug('acl: user %s denied on %s\n' % (user, f)) + raise util.Abort(_('acl: access denied for changeset %s') % ctx) + if allow and not allow(f): + ui.debug('acl: user %s not allowed on %s\n' % (user, f)) + raise util.Abort(_('acl: access denied for changeset %s') % ctx) + ui.debug('acl: allowing changeset %s\n' % ctx) diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/acl.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/acl.pyo new file mode 100644 index 0000000..8001bf2 Binary files /dev/null and b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/acl.pyo differ diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/bookmarks.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/bookmarks.py new file mode 100644 index 0000000..1ebbc7a --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/bookmarks.py @@ -0,0 +1,579 @@ +# Mercurial extension to provide the 'hg bookmark' command +# +# Copyright 2008 David Soria Parra +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +'''track a line of development with movable markers + +Bookmarks are local movable markers to changesets. Every bookmark +points to a changeset identified by its hash. If you commit a +changeset that is based on a changeset that has a bookmark on it, the +bookmark shifts to the new changeset. + +It is possible to use bookmark names in every revision lookup (e.g. +:hg:`merge`, :hg:`update`). + +By default, when several bookmarks point to the same changeset, they +will all move forward together. It is possible to obtain a more +git-like experience by adding the following configuration option to +your configuration file:: + + [bookmarks] + track.current = True + +This will cause Mercurial to track the bookmark that you are currently +using, and only update it. This is similar to git's approach to +branching. +''' + +from mercurial.i18n import _ +from mercurial.node import nullid, nullrev, bin, hex, short +from mercurial import util, commands, repair, extensions, pushkey, hg, url +from mercurial import revset +import os + +def write(repo): + '''Write bookmarks + + Write the given bookmark => hash dictionary to the .hg/bookmarks file + in a format equal to those of localtags. + + We also store a backup of the previous state in undo.bookmarks that + can be copied back on rollback. + ''' + refs = repo._bookmarks + + try: + bms = repo.opener('bookmarks').read() + except IOError: + bms = None + if bms is not None: + repo.opener('undo.bookmarks', 'w').write(bms) + + if repo._bookmarkcurrent not in refs: + setcurrent(repo, None) + wlock = repo.wlock() + try: + file = repo.opener('bookmarks', 'w', atomictemp=True) + for refspec, node in refs.iteritems(): + file.write("%s %s\n" % (hex(node), refspec)) + file.rename() + + # touch 00changelog.i so hgweb reloads bookmarks (no lock needed) + try: + os.utime(repo.sjoin('00changelog.i'), None) + except OSError: + pass + + finally: + wlock.release() + +def setcurrent(repo, mark): + '''Set the name of the bookmark that we are currently on + + Set the name of the bookmark that we are on (hg update ). + The name is recorded in .hg/bookmarks.current + ''' + current = repo._bookmarkcurrent + if current == mark: + return + + refs = repo._bookmarks + + # do not update if we do update to a rev equal to the current bookmark + if (mark and mark not in refs and + current and refs[current] == repo.changectx('.').node()): + return + if mark not in refs: + mark = '' + wlock = repo.wlock() + try: + file = repo.opener('bookmarks.current', 'w', atomictemp=True) + file.write(mark) + file.rename() + finally: + wlock.release() + repo._bookmarkcurrent = mark + +def bookmark(ui, repo, mark=None, rev=None, force=False, delete=False, rename=None): + '''track a line of development with movable markers + + Bookmarks are pointers to certain commits that move when + committing. Bookmarks are local. They can be renamed, copied and + deleted. It is possible to use bookmark names in :hg:`merge` and + :hg:`update` to merge and update respectively to a given bookmark. + + You can use :hg:`bookmark NAME` to set a bookmark on the working + directory's parent revision with the given name. If you specify + a revision using -r REV (where REV may be an existing bookmark), + the bookmark is assigned to that revision. + + Bookmarks can be pushed and pulled between repositories (see :hg:`help + push` and :hg:`help pull`). This requires the bookmark extension to be + enabled for both the local and remote repositories. + ''' + hexfn = ui.debugflag and hex or short + marks = repo._bookmarks + cur = repo.changectx('.').node() + + if rename: + if rename not in marks: + raise util.Abort(_("a bookmark of this name does not exist")) + if mark in marks and not force: + raise util.Abort(_("a bookmark of the same name already exists")) + if mark is None: + raise util.Abort(_("new bookmark name required")) + marks[mark] = marks[rename] + del marks[rename] + if repo._bookmarkcurrent == rename: + setcurrent(repo, mark) + write(repo) + return + + if delete: + if mark is None: + raise util.Abort(_("bookmark name required")) + if mark not in marks: + raise util.Abort(_("a bookmark of this name does not exist")) + if mark == repo._bookmarkcurrent: + setcurrent(repo, None) + del marks[mark] + write(repo) + return + + if mark != None: + if "\n" in mark: + raise util.Abort(_("bookmark name cannot contain newlines")) + mark = mark.strip() + if not mark: + raise util.Abort(_("bookmark names cannot consist entirely of " + "whitespace")) + if mark in marks and not force: + raise util.Abort(_("a bookmark of the same name already exists")) + if ((mark in repo.branchtags() or mark == repo.dirstate.branch()) + and not force): + raise util.Abort( + _("a bookmark cannot have the name of an existing branch")) + if rev: + marks[mark] = repo.lookup(rev) + else: + marks[mark] = repo.changectx('.').node() + setcurrent(repo, mark) + write(repo) + return + + if mark is None: + if rev: + raise util.Abort(_("bookmark name required")) + if len(marks) == 0: + ui.status(_("no bookmarks set\n")) + else: + for bmark, n in marks.iteritems(): + if ui.configbool('bookmarks', 'track.current'): + current = repo._bookmarkcurrent + if bmark == current and n == cur: + prefix, label = '*', 'bookmarks.current' + else: + prefix, label = ' ', '' + else: + if n == cur: + prefix, label = '*', 'bookmarks.current' + else: + prefix, label = ' ', '' + + if ui.quiet: + ui.write("%s\n" % bmark, label=label) + else: + ui.write(" %s %-25s %d:%s\n" % ( + prefix, bmark, repo.changelog.rev(n), hexfn(n)), + label=label) + return + +def _revstostrip(changelog, node): + srev = changelog.rev(node) + tostrip = [srev] + saveheads = [] + for r in xrange(srev, len(changelog)): + parents = changelog.parentrevs(r) + if parents[0] in tostrip or parents[1] in tostrip: + tostrip.append(r) + if parents[1] != nullrev: + for p in parents: + if p not in tostrip and p > srev: + saveheads.append(p) + return [r for r in tostrip if r not in saveheads] + +def strip(oldstrip, ui, repo, node, backup="all"): + """Strip bookmarks if revisions are stripped using + the mercurial.strip method. This usually happens during + qpush and qpop""" + revisions = _revstostrip(repo.changelog, node) + marks = repo._bookmarks + update = [] + for mark, n in marks.iteritems(): + if repo.changelog.rev(n) in revisions: + update.append(mark) + oldstrip(ui, repo, node, backup) + if len(update) > 0: + for m in update: + marks[m] = repo.changectx('.').node() + write(repo) + +def reposetup(ui, repo): + if not repo.local(): + return + + class bookmark_repo(repo.__class__): + + @util.propertycache + def _bookmarks(self): + '''Parse .hg/bookmarks file and return a dictionary + + Bookmarks are stored as {HASH}\\s{NAME}\\n (localtags format) values + in the .hg/bookmarks file. + Read the file and return a (name=>nodeid) dictionary + ''' + try: + bookmarks = {} + for line in self.opener('bookmarks'): + sha, refspec = line.strip().split(' ', 1) + bookmarks[refspec] = self.changelog.lookup(sha) + except: + pass + return bookmarks + + @util.propertycache + def _bookmarkcurrent(self): + '''Get the current bookmark + + If we use gittishsh branches we have a current bookmark that + we are on. This function returns the name of the bookmark. It + is stored in .hg/bookmarks.current + ''' + mark = None + if os.path.exists(self.join('bookmarks.current')): + file = self.opener('bookmarks.current') + # No readline() in posixfile_nt, reading everything is cheap + mark = (file.readlines() or [''])[0] + if mark == '': + mark = None + file.close() + return mark + + def rollback(self, *args): + if os.path.exists(self.join('undo.bookmarks')): + util.rename(self.join('undo.bookmarks'), self.join('bookmarks')) + return super(bookmark_repo, self).rollback(*args) + + def lookup(self, key): + if key in self._bookmarks: + key = self._bookmarks[key] + return super(bookmark_repo, self).lookup(key) + + def _bookmarksupdate(self, parents, node): + marks = self._bookmarks + update = False + if ui.configbool('bookmarks', 'track.current'): + mark = self._bookmarkcurrent + if mark and marks[mark] in parents: + marks[mark] = node + update = True + else: + for mark, n in marks.items(): + if n in parents: + marks[mark] = node + update = True + if update: + write(self) + + def commitctx(self, ctx, error=False): + """Add a revision to the repository and + move the bookmark""" + wlock = self.wlock() # do both commit and bookmark with lock held + try: + node = super(bookmark_repo, self).commitctx(ctx, error) + if node is None: + return None + parents = self.changelog.parents(node) + if parents[1] == nullid: + parents = (parents[0],) + + self._bookmarksupdate(parents, node) + return node + finally: + wlock.release() + + def pull(self, remote, heads=None, force=False): + result = super(bookmark_repo, self).pull(remote, heads, force) + + self.ui.debug("checking for updated bookmarks\n") + rb = remote.listkeys('bookmarks') + changed = False + for k in rb.keys(): + if k in self._bookmarks: + nr, nl = rb[k], self._bookmarks[k] + if nr in self: + cr = self[nr] + cl = self[nl] + if cl.rev() >= cr.rev(): + continue + if cr in cl.descendants(): + self._bookmarks[k] = cr.node() + changed = True + self.ui.status(_("updating bookmark %s\n") % k) + else: + self.ui.warn(_("not updating divergent" + " bookmark %s\n") % k) + if changed: + write(repo) + + return result + + def push(self, remote, force=False, revs=None, newbranch=False): + result = super(bookmark_repo, self).push(remote, force, revs, + newbranch) + + self.ui.debug("checking for updated bookmarks\n") + rb = remote.listkeys('bookmarks') + for k in rb.keys(): + if k in self._bookmarks: + nr, nl = rb[k], self._bookmarks[k] + if nr in self: + cr = self[nr] + cl = self[nl] + if cl in cr.descendants(): + r = remote.pushkey('bookmarks', k, nr, nl) + if r: + self.ui.status(_("updating bookmark %s\n") % k) + else: + self.ui.warn(_('updating bookmark %s' + ' failed!\n') % k) + + return result + + def addchangegroup(self, *args, **kwargs): + parents = self.dirstate.parents() + + result = super(bookmark_repo, self).addchangegroup(*args, **kwargs) + if result > 1: + # We have more heads than before + return result + node = self.changelog.tip() + + self._bookmarksupdate(parents, node) + return result + + def _findtags(self): + """Merge bookmarks with normal tags""" + (tags, tagtypes) = super(bookmark_repo, self)._findtags() + tags.update(self._bookmarks) + return (tags, tagtypes) + + if hasattr(repo, 'invalidate'): + def invalidate(self): + super(bookmark_repo, self).invalidate() + for attr in ('_bookmarks', '_bookmarkcurrent'): + if attr in self.__dict__: + delattr(self, attr) + + repo.__class__ = bookmark_repo + +def listbookmarks(repo): + # We may try to list bookmarks on a repo type that does not + # support it (e.g., statichttprepository). + if not hasattr(repo, '_bookmarks'): + return {} + + d = {} + for k, v in repo._bookmarks.iteritems(): + d[k] = hex(v) + return d + +def pushbookmark(repo, key, old, new): + w = repo.wlock() + try: + marks = repo._bookmarks + if hex(marks.get(key, '')) != old: + return False + if new == '': + del marks[key] + else: + if new not in repo: + return False + marks[key] = repo[new].node() + write(repo) + return True + finally: + w.release() + +def pull(oldpull, ui, repo, source="default", **opts): + # translate bookmark args to rev args for actual pull + if opts.get('bookmark'): + # this is an unpleasant hack as pull will do this internally + source, branches = hg.parseurl(ui.expandpath(source), + opts.get('branch')) + other = hg.repository(hg.remoteui(repo, opts), source) + rb = other.listkeys('bookmarks') + + for b in opts['bookmark']: + if b not in rb: + raise util.Abort(_('remote bookmark %s not found!') % b) + opts.setdefault('rev', []).append(b) + + result = oldpull(ui, repo, source, **opts) + + # update specified bookmarks + if opts.get('bookmark'): + for b in opts['bookmark']: + # explicit pull overrides local bookmark if any + ui.status(_("importing bookmark %s\n") % b) + repo._bookmarks[b] = repo[rb[b]].node() + write(repo) + + return result + +def push(oldpush, ui, repo, dest=None, **opts): + dopush = True + if opts.get('bookmark'): + dopush = False + for b in opts['bookmark']: + if b in repo._bookmarks: + dopush = True + opts.setdefault('rev', []).append(b) + + result = 0 + if dopush: + result = oldpush(ui, repo, dest, **opts) + + if opts.get('bookmark'): + # this is an unpleasant hack as push will do this internally + dest = ui.expandpath(dest or 'default-push', dest or 'default') + dest, branches = hg.parseurl(dest, opts.get('branch')) + other = hg.repository(hg.remoteui(repo, opts), dest) + rb = other.listkeys('bookmarks') + for b in opts['bookmark']: + # explicit push overrides remote bookmark if any + if b in repo._bookmarks: + ui.status(_("exporting bookmark %s\n") % b) + new = repo[b].hex() + elif b in rb: + ui.status(_("deleting remote bookmark %s\n") % b) + new = '' # delete + else: + ui.warn(_('bookmark %s does not exist on the local ' + 'or remote repository!\n') % b) + return 2 + old = rb.get(b, '') + r = other.pushkey('bookmarks', b, old, new) + if not r: + ui.warn(_('updating bookmark %s failed!\n') % b) + if not result: + result = 2 + + return result + +def diffbookmarks(ui, repo, remote): + ui.status(_("searching for changed bookmarks\n")) + + lmarks = repo.listkeys('bookmarks') + rmarks = remote.listkeys('bookmarks') + + diff = sorted(set(rmarks) - set(lmarks)) + for k in diff: + ui.write(" %-25s %s\n" % (k, rmarks[k][:12])) + + if len(diff) <= 0: + ui.status(_("no changed bookmarks found\n")) + return 1 + return 0 + +def incoming(oldincoming, ui, repo, source="default", **opts): + if opts.get('bookmarks'): + source, branches = hg.parseurl(ui.expandpath(source), opts.get('branch')) + other = hg.repository(hg.remoteui(repo, opts), source) + ui.status(_('comparing with %s\n') % url.hidepassword(source)) + return diffbookmarks(ui, repo, other) + else: + return oldincoming(ui, repo, source, **opts) + +def outgoing(oldoutgoing, ui, repo, dest=None, **opts): + if opts.get('bookmarks'): + dest = ui.expandpath(dest or 'default-push', dest or 'default') + dest, branches = hg.parseurl(dest, opts.get('branch')) + other = hg.repository(hg.remoteui(repo, opts), dest) + ui.status(_('comparing with %s\n') % url.hidepassword(dest)) + return diffbookmarks(ui, other, repo) + else: + return oldoutgoing(ui, repo, dest, **opts) + +def uisetup(ui): + extensions.wrapfunction(repair, "strip", strip) + if ui.configbool('bookmarks', 'track.current'): + extensions.wrapcommand(commands.table, 'update', updatecurbookmark) + + entry = extensions.wrapcommand(commands.table, 'pull', pull) + entry[1].append(('B', 'bookmark', [], + _("bookmark to import"), + _('BOOKMARK'))) + entry = extensions.wrapcommand(commands.table, 'push', push) + entry[1].append(('B', 'bookmark', [], + _("bookmark to export"), + _('BOOKMARK'))) + entry = extensions.wrapcommand(commands.table, 'incoming', incoming) + entry[1].append(('B', 'bookmarks', False, + _("compare bookmark"))) + entry = extensions.wrapcommand(commands.table, 'outgoing', outgoing) + entry[1].append(('B', 'bookmarks', False, + _("compare bookmark"))) + + pushkey.register('bookmarks', pushbookmark, listbookmarks) + +def updatecurbookmark(orig, ui, repo, *args, **opts): + '''Set the current bookmark + + If the user updates to a bookmark we update the .hg/bookmarks.current + file. + ''' + res = orig(ui, repo, *args, **opts) + rev = opts['rev'] + if not rev and len(args) > 0: + rev = args[0] + setcurrent(repo, rev) + return res + +def bmrevset(repo, subset, x): + """``bookmark([name])`` + The named bookmark or all bookmarks. + """ + # i18n: "bookmark" is a keyword + args = revset.getargs(x, 0, 1, _('bookmark takes one or no arguments')) + if args: + bm = revset.getstring(args[0], + # i18n: "bookmark" is a keyword + _('the argument to bookmark must be a string')) + bmrev = listbookmarks(repo).get(bm, None) + if bmrev: + bmrev = repo.changelog.rev(bin(bmrev)) + return [r for r in subset if r == bmrev] + bms = set([repo.changelog.rev(bin(r)) for r in listbookmarks(repo).values()]) + return [r for r in subset if r in bms] + +def extsetup(ui): + revset.symbols['bookmark'] = bmrevset + +cmdtable = { + "bookmarks": + (bookmark, + [('f', 'force', False, _('force')), + ('r', 'rev', '', _('revision'), _('REV')), + ('d', 'delete', False, _('delete a given bookmark')), + ('m', 'rename', '', _('rename a given bookmark'), _('NAME'))], + _('hg bookmarks [-f] [-d] [-m NAME] [-r REV] [NAME]')), +} + +colortable = {'bookmarks.current': 'green'} + +# tell hggettext to extract docstrings from these functions: +i18nfunctions = [bmrevset] diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/bookmarks.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/bookmarks.pyo new file mode 100644 index 0000000..9cad1f2 Binary files /dev/null and b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/bookmarks.pyo differ diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/bugzilla.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/bugzilla.py new file mode 100644 index 0000000..de72e91 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/bugzilla.py @@ -0,0 +1,441 @@ +# bugzilla.py - bugzilla integration for mercurial +# +# Copyright 2006 Vadim Gelfer +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +'''hooks for integrating with the Bugzilla bug tracker + +This hook extension adds comments on bugs in Bugzilla when changesets +that refer to bugs by Bugzilla ID are seen. The hook does not change +bug status. + +The hook updates the Bugzilla database directly. Only Bugzilla +installations using MySQL are supported. + +The hook relies on a Bugzilla script to send bug change notification +emails. That script changes between Bugzilla versions; the +'processmail' script used prior to 2.18 is replaced in 2.18 and +subsequent versions by 'config/sendbugmail.pl'. Note that these will +be run by Mercurial as the user pushing the change; you will need to +ensure the Bugzilla install file permissions are set appropriately. + +The extension is configured through three different configuration +sections. These keys are recognized in the [bugzilla] section: + +host + Hostname of the MySQL server holding the Bugzilla database. + +db + Name of the Bugzilla database in MySQL. Default 'bugs'. + +user + Username to use to access MySQL server. Default 'bugs'. + +password + Password to use to access MySQL server. + +timeout + Database connection timeout (seconds). Default 5. + +version + Bugzilla version. Specify '3.0' for Bugzilla versions 3.0 and later, + '2.18' for Bugzilla versions from 2.18 and '2.16' for versions prior + to 2.18. + +bzuser + Fallback Bugzilla user name to record comments with, if changeset + committer cannot be found as a Bugzilla user. + +bzdir + Bugzilla install directory. Used by default notify. Default + '/var/www/html/bugzilla'. + +notify + The command to run to get Bugzilla to send bug change notification + emails. Substitutes from a map with 3 keys, 'bzdir', 'id' (bug id) + and 'user' (committer bugzilla email). Default depends on version; + from 2.18 it is "cd %(bzdir)s && perl -T contrib/sendbugmail.pl + %(id)s %(user)s". + +regexp + Regular expression to match bug IDs in changeset commit message. + Must contain one "()" group. The default expression matches 'Bug + 1234', 'Bug no. 1234', 'Bug number 1234', 'Bugs 1234,5678', 'Bug + 1234 and 5678' and variations thereof. Matching is case insensitive. + +style + The style file to use when formatting comments. + +template + Template to use when formatting comments. Overrides style if + specified. In addition to the usual Mercurial keywords, the + extension specifies:: + + {bug} The Bugzilla bug ID. + {root} The full pathname of the Mercurial repository. + {webroot} Stripped pathname of the Mercurial repository. + {hgweb} Base URL for browsing Mercurial repositories. + + Default 'changeset {node|short} in repo {root} refers ' + 'to bug {bug}.\\ndetails:\\n\\t{desc|tabindent}' + +strip + The number of slashes to strip from the front of {root} to produce + {webroot}. Default 0. + +usermap + Path of file containing Mercurial committer ID to Bugzilla user ID + mappings. If specified, the file should contain one mapping per + line, "committer"="Bugzilla user". See also the [usermap] section. + +The [usermap] section is used to specify mappings of Mercurial +committer ID to Bugzilla user ID. See also [bugzilla].usermap. +"committer"="Bugzilla user" + +Finally, the [web] section supports one entry: + +baseurl + Base URL for browsing Mercurial repositories. Reference from + templates as {hgweb}. + +Activating the extension:: + + [extensions] + bugzilla = + + [hooks] + # run bugzilla hook on every change pulled or pushed in here + incoming.bugzilla = python:hgext.bugzilla.hook + +Example configuration: + +This example configuration is for a collection of Mercurial +repositories in /var/local/hg/repos/ used with a local Bugzilla 3.2 +installation in /opt/bugzilla-3.2. :: + + [bugzilla] + host=localhost + password=XYZZY + version=3.0 + bzuser=unknown@domain.com + bzdir=/opt/bugzilla-3.2 + template=Changeset {node|short} in {root|basename}. + {hgweb}/{webroot}/rev/{node|short}\\n + {desc}\\n + strip=5 + + [web] + baseurl=http://dev.domain.com/hg + + [usermap] + user@emaildomain.com=user.name@bugzilladomain.com + +Commits add a comment to the Bugzilla bug record of the form:: + + Changeset 3b16791d6642 in repository-name. + http://dev.domain.com/hg/repository-name/rev/3b16791d6642 + + Changeset commit comment. Bug 1234. +''' + +from mercurial.i18n import _ +from mercurial.node import short +from mercurial import cmdutil, templater, util +import re, time + +MySQLdb = None + +def buglist(ids): + return '(' + ','.join(map(str, ids)) + ')' + +class bugzilla_2_16(object): + '''support for bugzilla version 2.16.''' + + def __init__(self, ui): + self.ui = ui + host = self.ui.config('bugzilla', 'host', 'localhost') + user = self.ui.config('bugzilla', 'user', 'bugs') + passwd = self.ui.config('bugzilla', 'password') + db = self.ui.config('bugzilla', 'db', 'bugs') + timeout = int(self.ui.config('bugzilla', 'timeout', 5)) + usermap = self.ui.config('bugzilla', 'usermap') + if usermap: + self.ui.readconfig(usermap, sections=['usermap']) + self.ui.note(_('connecting to %s:%s as %s, password %s\n') % + (host, db, user, '*' * len(passwd))) + self.conn = MySQLdb.connect(host=host, user=user, passwd=passwd, + db=db, connect_timeout=timeout) + self.cursor = self.conn.cursor() + self.longdesc_id = self.get_longdesc_id() + self.user_ids = {} + self.default_notify = "cd %(bzdir)s && ./processmail %(id)s %(user)s" + + def run(self, *args, **kwargs): + '''run a query.''' + self.ui.note(_('query: %s %s\n') % (args, kwargs)) + try: + self.cursor.execute(*args, **kwargs) + except MySQLdb.MySQLError: + self.ui.note(_('failed query: %s %s\n') % (args, kwargs)) + raise + + def get_longdesc_id(self): + '''get identity of longdesc field''' + self.run('select fieldid from fielddefs where name = "longdesc"') + ids = self.cursor.fetchall() + if len(ids) != 1: + raise util.Abort(_('unknown database schema')) + return ids[0][0] + + def filter_real_bug_ids(self, ids): + '''filter not-existing bug ids from list.''' + self.run('select bug_id from bugs where bug_id in %s' % buglist(ids)) + return sorted([c[0] for c in self.cursor.fetchall()]) + + def filter_unknown_bug_ids(self, node, ids): + '''filter bug ids from list that already refer to this changeset.''' + + self.run('''select bug_id from longdescs where + bug_id in %s and thetext like "%%%s%%"''' % + (buglist(ids), short(node))) + unknown = set(ids) + for (id,) in self.cursor.fetchall(): + self.ui.status(_('bug %d already knows about changeset %s\n') % + (id, short(node))) + unknown.discard(id) + return sorted(unknown) + + def notify(self, ids, committer): + '''tell bugzilla to send mail.''' + + self.ui.status(_('telling bugzilla to send mail:\n')) + (user, userid) = self.get_bugzilla_user(committer) + for id in ids: + self.ui.status(_(' bug %s\n') % id) + cmdfmt = self.ui.config('bugzilla', 'notify', self.default_notify) + bzdir = self.ui.config('bugzilla', 'bzdir', '/var/www/html/bugzilla') + try: + # Backwards-compatible with old notify string, which + # took one string. This will throw with a new format + # string. + cmd = cmdfmt % id + except TypeError: + cmd = cmdfmt % {'bzdir': bzdir, 'id': id, 'user': user} + self.ui.note(_('running notify command %s\n') % cmd) + fp = util.popen('(%s) 2>&1' % cmd) + out = fp.read() + ret = fp.close() + if ret: + self.ui.warn(out) + raise util.Abort(_('bugzilla notify command %s') % + util.explain_exit(ret)[0]) + self.ui.status(_('done\n')) + + def get_user_id(self, user): + '''look up numeric bugzilla user id.''' + try: + return self.user_ids[user] + except KeyError: + try: + userid = int(user) + except ValueError: + self.ui.note(_('looking up user %s\n') % user) + self.run('''select userid from profiles + where login_name like %s''', user) + all = self.cursor.fetchall() + if len(all) != 1: + raise KeyError(user) + userid = int(all[0][0]) + self.user_ids[user] = userid + return userid + + def map_committer(self, user): + '''map name of committer to bugzilla user name.''' + for committer, bzuser in self.ui.configitems('usermap'): + if committer.lower() == user.lower(): + return bzuser + return user + + def get_bugzilla_user(self, committer): + '''see if committer is a registered bugzilla user. Return + bugzilla username and userid if so. If not, return default + bugzilla username and userid.''' + user = self.map_committer(committer) + try: + userid = self.get_user_id(user) + except KeyError: + try: + defaultuser = self.ui.config('bugzilla', 'bzuser') + if not defaultuser: + raise util.Abort(_('cannot find bugzilla user id for %s') % + user) + userid = self.get_user_id(defaultuser) + user = defaultuser + except KeyError: + raise util.Abort(_('cannot find bugzilla user id for %s or %s') % + (user, defaultuser)) + return (user, userid) + + def add_comment(self, bugid, text, committer): + '''add comment to bug. try adding comment as committer of + changeset, otherwise as default bugzilla user.''' + (user, userid) = self.get_bugzilla_user(committer) + now = time.strftime('%Y-%m-%d %H:%M:%S') + self.run('''insert into longdescs + (bug_id, who, bug_when, thetext) + values (%s, %s, %s, %s)''', + (bugid, userid, now, text)) + self.run('''insert into bugs_activity (bug_id, who, bug_when, fieldid) + values (%s, %s, %s, %s)''', + (bugid, userid, now, self.longdesc_id)) + self.conn.commit() + +class bugzilla_2_18(bugzilla_2_16): + '''support for bugzilla 2.18 series.''' + + def __init__(self, ui): + bugzilla_2_16.__init__(self, ui) + self.default_notify = \ + "cd %(bzdir)s && perl -T contrib/sendbugmail.pl %(id)s %(user)s" + +class bugzilla_3_0(bugzilla_2_18): + '''support for bugzilla 3.0 series.''' + + def __init__(self, ui): + bugzilla_2_18.__init__(self, ui) + + def get_longdesc_id(self): + '''get identity of longdesc field''' + self.run('select id from fielddefs where name = "longdesc"') + ids = self.cursor.fetchall() + if len(ids) != 1: + raise util.Abort(_('unknown database schema')) + return ids[0][0] + +class bugzilla(object): + # supported versions of bugzilla. different versions have + # different schemas. + _versions = { + '2.16': bugzilla_2_16, + '2.18': bugzilla_2_18, + '3.0': bugzilla_3_0 + } + + _default_bug_re = (r'bugs?\s*,?\s*(?:#|nos?\.?|num(?:ber)?s?)?\s*' + r'((?:\d+\s*(?:,?\s*(?:and)?)?\s*)+)') + + _bz = None + + def __init__(self, ui, repo): + self.ui = ui + self.repo = repo + + def bz(self): + '''return object that knows how to talk to bugzilla version in + use.''' + + if bugzilla._bz is None: + bzversion = self.ui.config('bugzilla', 'version') + try: + bzclass = bugzilla._versions[bzversion] + except KeyError: + raise util.Abort(_('bugzilla version %s not supported') % + bzversion) + bugzilla._bz = bzclass(self.ui) + return bugzilla._bz + + def __getattr__(self, key): + return getattr(self.bz(), key) + + _bug_re = None + _split_re = None + + def find_bug_ids(self, ctx): + '''find valid bug ids that are referred to in changeset + comments and that do not already have references to this + changeset.''' + + if bugzilla._bug_re is None: + bugzilla._bug_re = re.compile( + self.ui.config('bugzilla', 'regexp', bugzilla._default_bug_re), + re.IGNORECASE) + bugzilla._split_re = re.compile(r'\D+') + start = 0 + ids = set() + while True: + m = bugzilla._bug_re.search(ctx.description(), start) + if not m: + break + start = m.end() + for id in bugzilla._split_re.split(m.group(1)): + if not id: + continue + ids.add(int(id)) + if ids: + ids = self.filter_real_bug_ids(ids) + if ids: + ids = self.filter_unknown_bug_ids(ctx.node(), ids) + return ids + + def update(self, bugid, ctx): + '''update bugzilla bug with reference to changeset.''' + + def webroot(root): + '''strip leading prefix of repo root and turn into + url-safe path.''' + count = int(self.ui.config('bugzilla', 'strip', 0)) + root = util.pconvert(root) + while count > 0: + c = root.find('/') + if c == -1: + break + root = root[c + 1:] + count -= 1 + return root + + mapfile = self.ui.config('bugzilla', 'style') + tmpl = self.ui.config('bugzilla', 'template') + t = cmdutil.changeset_templater(self.ui, self.repo, + False, None, mapfile, False) + if not mapfile and not tmpl: + tmpl = _('changeset {node|short} in repo {root} refers ' + 'to bug {bug}.\ndetails:\n\t{desc|tabindent}') + if tmpl: + tmpl = templater.parsestring(tmpl, quoted=False) + t.use_template(tmpl) + self.ui.pushbuffer() + t.show(ctx, changes=ctx.changeset(), + bug=str(bugid), + hgweb=self.ui.config('web', 'baseurl'), + root=self.repo.root, + webroot=webroot(self.repo.root)) + data = self.ui.popbuffer() + self.add_comment(bugid, data, util.email(ctx.user())) + +def hook(ui, repo, hooktype, node=None, **kwargs): + '''add comment to bugzilla for each changeset that refers to a + bugzilla bug id. only add a comment once per bug, so same change + seen multiple times does not fill bug with duplicate data.''' + try: + import MySQLdb as mysql + global MySQLdb + MySQLdb = mysql + except ImportError, err: + raise util.Abort(_('python mysql support not available: %s') % err) + + if node is None: + raise util.Abort(_('hook type %s does not pass a changeset id') % + hooktype) + try: + bz = bugzilla(ui, repo) + ctx = repo[node] + ids = bz.find_bug_ids(ctx) + if ids: + for id in ids: + bz.update(id, ctx) + bz.notify(ids, util.email(ctx.user())) + except MySQLdb.MySQLError, err: + raise util.Abort(_('database error: %s') % err.args[1]) + diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/bugzilla.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/bugzilla.pyo new file mode 100644 index 0000000..b4bfa04 Binary files /dev/null and b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/bugzilla.pyo differ diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/children.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/children.py new file mode 100644 index 0000000..da2fe9c --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/children.py @@ -0,0 +1,45 @@ +# Mercurial extension to provide the 'hg children' command +# +# Copyright 2007 by Intevation GmbH +# +# Author(s): +# Thomas Arendsen Hein +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +'''command to display child changesets''' + +from mercurial import cmdutil +from mercurial.commands import templateopts +from mercurial.i18n import _ + + +def children(ui, repo, file_=None, **opts): + """show the children of the given or working directory revision + + Print the children of the working directory's revisions. If a + revision is given via -r/--rev, the children of that revision will + be printed. If a file argument is given, revision in which the + file was last changed (after the working directory revision or the + argument to --rev if given) is printed. + """ + rev = opts.get('rev') + if file_: + ctx = repo.filectx(file_, changeid=rev) + else: + ctx = repo[rev] + + displayer = cmdutil.show_changeset(ui, repo, opts) + for cctx in ctx.children(): + displayer.show(cctx) + displayer.close() + +cmdtable = { + "children": + (children, + [('r', 'rev', '', + _('show children of the specified revision'), _('REV')), + ] + templateopts, + _('hg children [-r REV] [FILE]')), +} diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/children.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/children.pyo new file mode 100644 index 0000000..05aecd2 Binary files /dev/null and b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/children.pyo differ diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/churn.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/churn.py new file mode 100644 index 0000000..32e481f --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/churn.py @@ -0,0 +1,198 @@ +# churn.py - create a graph of revisions count grouped by template +# +# Copyright 2006 Josef "Jeff" Sipek +# Copyright 2008 Alexander Solovyov +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +'''command to display statistics about repository history''' + +from mercurial.i18n import _ +from mercurial import patch, cmdutil, util, templater, commands +import os +import time, datetime + +def maketemplater(ui, repo, tmpl): + tmpl = templater.parsestring(tmpl, quoted=False) + try: + t = cmdutil.changeset_templater(ui, repo, False, None, None, False) + except SyntaxError, inst: + raise util.Abort(inst.args[0]) + t.use_template(tmpl) + return t + +def changedlines(ui, repo, ctx1, ctx2, fns): + added, removed = 0, 0 + fmatch = cmdutil.matchfiles(repo, fns) + diff = ''.join(patch.diff(repo, ctx1.node(), ctx2.node(), fmatch)) + for l in diff.split('\n'): + if l.startswith("+") and not l.startswith("+++ "): + added += 1 + elif l.startswith("-") and not l.startswith("--- "): + removed += 1 + return (added, removed) + +def countrate(ui, repo, amap, *pats, **opts): + """Calculate stats""" + if opts.get('dateformat'): + def getkey(ctx): + t, tz = ctx.date() + date = datetime.datetime(*time.gmtime(float(t) - tz)[:6]) + return date.strftime(opts['dateformat']) + else: + tmpl = opts.get('template', '{author|email}') + tmpl = maketemplater(ui, repo, tmpl) + def getkey(ctx): + ui.pushbuffer() + tmpl.show(ctx) + return ui.popbuffer() + + state = {'count': 0} + rate = {} + df = False + if opts.get('date'): + df = util.matchdate(opts['date']) + + m = cmdutil.match(repo, pats, opts) + def prep(ctx, fns): + rev = ctx.rev() + if df and not df(ctx.date()[0]): # doesn't match date format + return + + key = getkey(ctx) + key = amap.get(key, key) # alias remap + key = key.strip() # ignore leading and trailing spaces + if opts.get('changesets'): + rate[key] = (rate.get(key, (0,))[0] + 1, 0) + else: + parents = ctx.parents() + if len(parents) > 1: + ui.note(_('Revision %d is a merge, ignoring...\n') % (rev,)) + return + + ctx1 = parents[0] + lines = changedlines(ui, repo, ctx1, ctx, fns) + rate[key] = [r + l for r, l in zip(rate.get(key, (0, 0)), lines)] + + state['count'] += 1 + ui.progress(_('analyzing'), state['count'], total=len(repo)) + + for ctx in cmdutil.walkchangerevs(repo, m, opts, prep): + continue + + ui.progress(_('analyzing'), None) + + return rate + + +def churn(ui, repo, *pats, **opts): + '''histogram of changes to the repository + + This command will display a histogram representing the number + of changed lines or revisions, grouped according to the given + template. The default template will group changes by author. + The --dateformat option may be used to group the results by + date instead. + + Statistics are based on the number of changed lines, or + alternatively the number of matching revisions if the + --changesets option is specified. + + Examples:: + + # display count of changed lines for every committer + hg churn -t '{author|email}' + + # display daily activity graph + hg churn -f '%H' -s -c + + # display activity of developers by month + hg churn -f '%Y-%m' -s -c + + # display count of lines changed in every year + hg churn -f '%Y' -s + + It is possible to map alternate email addresses to a main address + by providing a file using the following format:: + + = + + Such a file may be specified with the --aliases option, otherwise + a .hgchurn file will be looked for in the working directory root. + ''' + def pad(s, l): + return (s + " " * l)[:l] + + amap = {} + aliases = opts.get('aliases') + if not aliases and os.path.exists(repo.wjoin('.hgchurn')): + aliases = repo.wjoin('.hgchurn') + if aliases: + for l in open(aliases, "r"): + try: + alias, actual = l.split('=' in l and '=' or None, 1) + amap[alias.strip()] = actual.strip() + except ValueError: + l = l.strip() + if l: + ui.warn(_("skipping malformed alias: %s\n" % l)) + continue + + rate = countrate(ui, repo, amap, *pats, **opts).items() + if not rate: + return + + sortkey = ((not opts.get('sort')) and (lambda x: -sum(x[1])) or None) + rate.sort(key=sortkey) + + # Be careful not to have a zero maxcount (issue833) + maxcount = float(max(sum(v) for k, v in rate)) or 1.0 + maxname = max(len(k) for k, v in rate) + + ttywidth = ui.termwidth() + ui.debug("assuming %i character terminal\n" % ttywidth) + width = ttywidth - maxname - 2 - 2 - 2 + + if opts.get('diffstat'): + width -= 15 + def format(name, diffstat): + added, removed = diffstat + return "%s %15s %s%s\n" % (pad(name, maxname), + '+%d/-%d' % (added, removed), + ui.label('+' * charnum(added), + 'diffstat.inserted'), + ui.label('-' * charnum(removed), + 'diffstat.deleted')) + else: + width -= 6 + def format(name, count): + return "%s %6d %s\n" % (pad(name, maxname), sum(count), + '*' * charnum(sum(count))) + + def charnum(count): + return int(round(count * width / maxcount)) + + for name, count in rate: + ui.write(format(name, count)) + + +cmdtable = { + "churn": + (churn, + [('r', 'rev', [], + _('count rate for the specified revision or range'), _('REV')), + ('d', 'date', '', + _('count rate for revisions matching date spec'), _('DATE')), + ('t', 'template', '{author|email}', + _('template to group changesets'), _('TEMPLATE')), + ('f', 'dateformat', '', + _('strftime-compatible format for grouping by date'), _('FORMAT')), + ('c', 'changesets', False, _('count rate by number of changesets')), + ('s', 'sort', False, _('sort by key (default: sort by count)')), + ('', 'diffstat', False, _('display added/removed lines separately')), + ('', 'aliases', '', + _('file with email aliases'), _('FILE')), + ] + commands.walkopts, + _("hg churn [-d DATE] [-r REV] [--aliases FILE] [FILE]")), +} diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/churn.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/churn.pyo new file mode 100644 index 0000000..90d9a2d Binary files /dev/null and b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/churn.pyo differ diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/color.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/color.py new file mode 100644 index 0000000..df78f8d --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/color.py @@ -0,0 +1,319 @@ +# color.py color output for the status and qseries commands +# +# Copyright (C) 2007 Kevin Christen +# +# This program is free software; you can redistribute it and/or modify it +# under the terms of the GNU General Public License as published by the +# Free Software Foundation; either version 2 of the License, or (at your +# option) any later version. +# +# This program is distributed in the hope that it will be useful, but +# WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General +# Public License for more details. +# +# You should have received a copy of the GNU General Public License along +# with this program; if not, write to the Free Software Foundation, Inc., +# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. + +'''colorize output from some commands + +This extension modifies the status and resolve commands to add color to their +output to reflect file status, the qseries command to add color to reflect +patch status (applied, unapplied, missing), and to diff-related +commands to highlight additions, removals, diff headers, and trailing +whitespace. + +Other effects in addition to color, like bold and underlined text, are +also available. Effects are rendered with the ECMA-48 SGR control +function (aka ANSI escape codes). This module also provides the +render_text function, which can be used to add effects to any text. + +Default effects may be overridden from your configuration file:: + + [color] + status.modified = blue bold underline red_background + status.added = green bold + status.removed = red bold blue_background + status.deleted = cyan bold underline + status.unknown = magenta bold underline + status.ignored = black bold + + # 'none' turns off all effects + status.clean = none + status.copied = none + + qseries.applied = blue bold underline + qseries.unapplied = black bold + qseries.missing = red bold + + diff.diffline = bold + diff.extended = cyan bold + diff.file_a = red bold + diff.file_b = green bold + diff.hunk = magenta + diff.deleted = red + diff.inserted = green + diff.changed = white + diff.trailingwhitespace = bold red_background + + resolve.unresolved = red bold + resolve.resolved = green bold + + bookmarks.current = green + + branches.active = none + branches.closed = black bold + branches.current = green + branches.inactive = none + +The color extension will try to detect whether to use ANSI codes or +Win32 console APIs, unless it is made explicit:: + + [color] + mode = ansi + +Any value other than 'ansi', 'win32', or 'auto' will disable color. + +''' + +import os + +from mercurial import commands, dispatch, extensions, ui as uimod, util +from mercurial.i18n import _ + +# start and stop parameters for effects +_effects = {'none': 0, 'black': 30, 'red': 31, 'green': 32, 'yellow': 33, + 'blue': 34, 'magenta': 35, 'cyan': 36, 'white': 37, 'bold': 1, + 'italic': 3, 'underline': 4, 'inverse': 7, + 'black_background': 40, 'red_background': 41, + 'green_background': 42, 'yellow_background': 43, + 'blue_background': 44, 'purple_background': 45, + 'cyan_background': 46, 'white_background': 47} + +_styles = {'grep.match': 'red bold', + 'branches.active': 'none', + 'branches.closed': 'black bold', + 'branches.current': 'green', + 'branches.inactive': 'none', + 'diff.changed': 'white', + 'diff.deleted': 'red', + 'diff.diffline': 'bold', + 'diff.extended': 'cyan bold', + 'diff.file_a': 'red bold', + 'diff.file_b': 'green bold', + 'diff.hunk': 'magenta', + 'diff.inserted': 'green', + 'diff.trailingwhitespace': 'bold red_background', + 'diffstat.deleted': 'red', + 'diffstat.inserted': 'green', + 'log.changeset': 'yellow', + 'resolve.resolved': 'green bold', + 'resolve.unresolved': 'red bold', + 'status.added': 'green bold', + 'status.clean': 'none', + 'status.copied': 'none', + 'status.deleted': 'cyan bold underline', + 'status.ignored': 'black bold', + 'status.modified': 'blue bold', + 'status.removed': 'red bold', + 'status.unknown': 'magenta bold underline'} + + +def render_effects(text, effects): + 'Wrap text in commands to turn on each effect.' + if not text: + return text + start = [str(_effects[e]) for e in ['none'] + effects.split()] + start = '\033[' + ';'.join(start) + 'm' + stop = '\033[' + str(_effects['none']) + 'm' + return ''.join([start, text, stop]) + +def extstyles(): + for name, ext in extensions.extensions(): + _styles.update(getattr(ext, 'colortable', {})) + +def configstyles(ui): + for status, cfgeffects in ui.configitems('color'): + if '.' not in status: + continue + cfgeffects = ui.configlist('color', status) + if cfgeffects: + good = [] + for e in cfgeffects: + if e in _effects: + good.append(e) + else: + ui.warn(_("ignoring unknown color/effect %r " + "(configured in color.%s)\n") + % (e, status)) + _styles[status] = ' '.join(good) + +class colorui(uimod.ui): + def popbuffer(self, labeled=False): + if labeled: + return ''.join(self.label(a, label) for a, label + in self._buffers.pop()) + return ''.join(a for a, label in self._buffers.pop()) + + _colormode = 'ansi' + def write(self, *args, **opts): + label = opts.get('label', '') + if self._buffers: + self._buffers[-1].extend([(str(a), label) for a in args]) + elif self._colormode == 'win32': + for a in args: + win32print(a, super(colorui, self).write, **opts) + else: + return super(colorui, self).write( + *[self.label(str(a), label) for a in args], **opts) + + def write_err(self, *args, **opts): + label = opts.get('label', '') + if self._colormode == 'win32': + for a in args: + win32print(a, super(colorui, self).write_err, **opts) + else: + return super(colorui, self).write_err( + *[self.label(str(a), label) for a in args], **opts) + + def label(self, msg, label): + effects = [] + for l in label.split(): + s = _styles.get(l, '') + if s: + effects.append(s) + effects = ''.join(effects) + if effects: + return '\n'.join([render_effects(s, effects) + for s in msg.split('\n')]) + return msg + + +def uisetup(ui): + if ui.plain(): + return + mode = ui.config('color', 'mode', 'auto') + if mode == 'auto': + if os.name == 'nt' and 'TERM' not in os.environ: + # looks line a cmd.exe console, use win32 API or nothing + mode = w32effects and 'win32' or 'none' + else: + mode = 'ansi' + if mode == 'win32': + if w32effects is None: + # only warn if color.mode is explicitly set to win32 + ui.warn(_('win32console not found, please install pywin32\n')) + return + _effects.update(w32effects) + elif mode != 'ansi': + return + def colorcmd(orig, ui_, opts, cmd, cmdfunc): + coloropt = opts['color'] + auto = coloropt == 'auto' + always = util.parsebool(coloropt) + if (always or + (always is None and + (auto and (os.environ.get('TERM') != 'dumb' and ui_.formatted())))): + colorui._colormode = mode + colorui.__bases__ = (ui_.__class__,) + ui_.__class__ = colorui + extstyles() + configstyles(ui_) + return orig(ui_, opts, cmd, cmdfunc) + extensions.wrapfunction(dispatch, '_runcommand', colorcmd) + +def extsetup(ui): + commands.globalopts.append( + ('', 'color', 'auto', + # i18n: 'always', 'auto', and 'never' are keywords and should + # not be translated + _("when to colorize (boolean, always, auto, or never)"), + _('TYPE'))) + +try: + import re, pywintypes, win32console as win32c + + # http://msdn.microsoft.com/en-us/library/ms682088%28VS.85%29.aspx + w32effects = { + 'none': -1, + 'black': 0, + 'red': win32c.FOREGROUND_RED, + 'green': win32c.FOREGROUND_GREEN, + 'yellow': win32c.FOREGROUND_RED | win32c.FOREGROUND_GREEN, + 'blue': win32c.FOREGROUND_BLUE, + 'magenta': win32c.FOREGROUND_BLUE | win32c.FOREGROUND_RED, + 'cyan': win32c.FOREGROUND_BLUE | win32c.FOREGROUND_GREEN, + 'white': (win32c.FOREGROUND_RED | win32c.FOREGROUND_GREEN | + win32c.FOREGROUND_BLUE), + 'bold': win32c.FOREGROUND_INTENSITY, + 'black_background': 0x100, # unused value > 0x0f + 'red_background': win32c.BACKGROUND_RED, + 'green_background': win32c.BACKGROUND_GREEN, + 'yellow_background': win32c.BACKGROUND_RED | win32c.BACKGROUND_GREEN, + 'blue_background': win32c.BACKGROUND_BLUE, + 'purple_background': win32c.BACKGROUND_BLUE | win32c.BACKGROUND_RED, + 'cyan_background': win32c.BACKGROUND_BLUE | win32c.BACKGROUND_GREEN, + 'white_background': (win32c.BACKGROUND_RED | win32c.BACKGROUND_GREEN | + win32c.BACKGROUND_BLUE), + 'bold_background': win32c.BACKGROUND_INTENSITY, + 'underline': win32c.COMMON_LVB_UNDERSCORE, # double-byte charsets only + 'inverse': win32c.COMMON_LVB_REVERSE_VIDEO, # double-byte charsets only + } + + passthrough = set([win32c.FOREGROUND_INTENSITY, + win32c.BACKGROUND_INTENSITY, + win32c.COMMON_LVB_UNDERSCORE, + win32c.COMMON_LVB_REVERSE_VIDEO]) + + try: + stdout = win32c.GetStdHandle(win32c.STD_OUTPUT_HANDLE) + if stdout is None: + raise ImportError() + origattr = stdout.GetConsoleScreenBufferInfo()['Attributes'] + except pywintypes.error: + # stdout may be defined but not support + # GetConsoleScreenBufferInfo(), when called from subprocess or + # redirected. + raise ImportError() + ansire = re.compile('\033\[([^m]*)m([^\033]*)(.*)', re.MULTILINE | re.DOTALL) + + def win32print(text, orig, **opts): + label = opts.get('label', '') + attr = origattr + + def mapcolor(val, attr): + if val == -1: + return origattr + elif val in passthrough: + return attr | val + elif val > 0x0f: + return (val & 0x70) | (attr & 0x8f) + else: + return (val & 0x07) | (attr & 0xf8) + + # determine console attributes based on labels + for l in label.split(): + style = _styles.get(l, '') + for effect in style.split(): + attr = mapcolor(w32effects[effect], attr) + + # hack to ensure regexp finds data + if not text.startswith('\033['): + text = '\033[m' + text + + # Look for ANSI-like codes embedded in text + m = re.match(ansire, text) + while m: + for sattr in m.group(1).split(';'): + if sattr: + attr = mapcolor(int(sattr), attr) + stdout.SetConsoleTextAttribute(attr) + orig(m.group(2), **opts) + m = re.match(ansire, m.group(3)) + + # Explicity reset original attributes + stdout.SetConsoleTextAttribute(origattr) + +except ImportError: + w32effects = None diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/color.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/color.pyo new file mode 100644 index 0000000..2f131ee Binary files /dev/null and b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/color.pyo differ diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/__init__.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/__init__.py new file mode 100644 index 0000000..be7aca5 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/__init__.py @@ -0,0 +1,321 @@ +# convert.py Foreign SCM converter +# +# Copyright 2005-2007 Matt Mackall +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +'''import revisions from foreign VCS repositories into Mercurial''' + +import convcmd +import cvsps +import subversion +from mercurial import commands +from mercurial.i18n import _ + +# Commands definition was moved elsewhere to ease demandload job. + +def convert(ui, src, dest=None, revmapfile=None, **opts): + """convert a foreign SCM repository to a Mercurial one. + + Accepted source formats [identifiers]: + + - Mercurial [hg] + - CVS [cvs] + - Darcs [darcs] + - git [git] + - Subversion [svn] + - Monotone [mtn] + - GNU Arch [gnuarch] + - Bazaar [bzr] + - Perforce [p4] + + Accepted destination formats [identifiers]: + + - Mercurial [hg] + - Subversion [svn] (history on branches is not preserved) + + If no revision is given, all revisions will be converted. + Otherwise, convert will only import up to the named revision + (given in a format understood by the source). + + If no destination directory name is specified, it defaults to the + basename of the source with ``-hg`` appended. If the destination + repository doesn't exist, it will be created. + + By default, all sources except Mercurial will use --branchsort. + Mercurial uses --sourcesort to preserve original revision numbers + order. Sort modes have the following effects: + + --branchsort convert from parent to child revision when possible, + which means branches are usually converted one after + the other. It generates more compact repositories. + + --datesort sort revisions by date. Converted repositories have + good-looking changelogs but are often an order of + magnitude larger than the same ones generated by + --branchsort. + + --sourcesort try to preserve source revisions order, only + supported by Mercurial sources. + + If isn't given, it will be put in a default location + (/.hg/shamap by default). The is a simple text file + that maps each source commit ID to the destination ID for that + revision, like so:: + + + + If the file doesn't exist, it's automatically created. It's + updated on each commit copied, so :hg:`convert` can be interrupted + and can be run repeatedly to copy new commits. + + The authormap is a simple text file that maps each source commit + author to a destination commit author. It is handy for source SCMs + that use unix logins to identify authors (eg: CVS). One line per + author mapping and the line format is:: + + source author = destination author + + Empty lines and lines starting with a ``#`` are ignored. + + The filemap is a file that allows filtering and remapping of files + and directories. Each line can contain one of the following + directives:: + + include path/to/file-or-dir + + exclude path/to/file-or-dir + + rename path/to/source path/to/destination + + Comment lines start with ``#``. A specified path matches if it + equals the full relative name of a file or one of its parent + directories. The ``include`` or ``exclude`` directive with the + longest matching path applies, so line order does not matter. + + The ``include`` directive causes a file, or all files under a + directory, to be included in the destination repository, and the + exclusion of all other files and directories not explicitly + included. The ``exclude`` directive causes files or directories to + be omitted. The ``rename`` directive renames a file or directory if + it is converted. To rename from a subdirectory into the root of + the repository, use ``.`` as the path to rename to. + + The splicemap is a file that allows insertion of synthetic + history, letting you specify the parents of a revision. This is + useful if you want to e.g. give a Subversion merge two parents, or + graft two disconnected series of history together. Each entry + contains a key, followed by a space, followed by one or two + comma-separated values:: + + key parent1, parent2 + + The key is the revision ID in the source + revision control system whose parents should be modified (same + format as a key in .hg/shamap). The values are the revision IDs + (in either the source or destination revision control system) that + should be used as the new parents for that node. For example, if + you have merged "release-1.0" into "trunk", then you should + specify the revision on "trunk" as the first parent and the one on + the "release-1.0" branch as the second. + + The branchmap is a file that allows you to rename a branch when it is + being brought in from whatever external repository. When used in + conjunction with a splicemap, it allows for a powerful combination + to help fix even the most badly mismanaged repositories and turn them + into nicely structured Mercurial repositories. The branchmap contains + lines of the form:: + + original_branch_name new_branch_name + + where "original_branch_name" is the name of the branch in the + source repository, and "new_branch_name" is the name of the branch + is the destination repository. No whitespace is allowed in the + branch names. This can be used to (for instance) move code in one + repository from "default" to a named branch. + + Mercurial Source + '''''''''''''''' + + --config convert.hg.ignoreerrors=False (boolean) + ignore integrity errors when reading. Use it to fix Mercurial + repositories with missing revlogs, by converting from and to + Mercurial. + --config convert.hg.saverev=False (boolean) + store original revision ID in changeset (forces target IDs to + change) + --config convert.hg.startrev=0 (hg revision identifier) + convert start revision and its descendants + + CVS Source + '''''''''' + + CVS source will use a sandbox (i.e. a checked-out copy) from CVS + to indicate the starting point of what will be converted. Direct + access to the repository files is not needed, unless of course the + repository is :local:. The conversion uses the top level directory + in the sandbox to find the CVS repository, and then uses CVS rlog + commands to find files to convert. This means that unless a + filemap is given, all files under the starting directory will be + converted, and that any directory reorganization in the CVS + sandbox is ignored. + + The options shown are the defaults. + + --config convert.cvsps.cache=True (boolean) + Set to False to disable remote log caching, for testing and + debugging purposes. + --config convert.cvsps.fuzz=60 (integer) + Specify the maximum time (in seconds) that is allowed between + commits with identical user and log message in a single + changeset. When very large files were checked in as part of a + changeset then the default may not be long enough. + --config convert.cvsps.mergeto='{{mergetobranch ([-\\w]+)}}' + Specify a regular expression to which commit log messages are + matched. If a match occurs, then the conversion process will + insert a dummy revision merging the branch on which this log + message occurs to the branch indicated in the regex. + --config convert.cvsps.mergefrom='{{mergefrombranch ([-\\w]+)}}' + Specify a regular expression to which commit log messages are + matched. If a match occurs, then the conversion process will + add the most recent revision on the branch indicated in the + regex as the second parent of the changeset. + --config hook.cvslog + Specify a Python function to be called at the end of gathering + the CVS log. The function is passed a list with the log entries, + and can modify the entries in-place, or add or delete them. + --config hook.cvschangesets + Specify a Python function to be called after the changesets + are calculated from the the CVS log. The function is passed + a list with the changeset entries, and can modify the changesets + in-place, or add or delete them. + + An additional "debugcvsps" Mercurial command allows the builtin + changeset merging code to be run without doing a conversion. Its + parameters and output are similar to that of cvsps 2.1. Please see + the command help for more details. + + Subversion Source + ''''''''''''''''' + + Subversion source detects classical trunk/branches/tags layouts. + By default, the supplied "svn://repo/path/" source URL is + converted as a single branch. If "svn://repo/path/trunk" exists it + replaces the default branch. If "svn://repo/path/branches" exists, + its subdirectories are listed as possible branches. If + "svn://repo/path/tags" exists, it is looked for tags referencing + converted branches. Default "trunk", "branches" and "tags" values + can be overridden with following options. Set them to paths + relative to the source URL, or leave them blank to disable auto + detection. + + --config convert.svn.branches=branches (directory name) + specify the directory containing branches + --config convert.svn.tags=tags (directory name) + specify the directory containing tags + --config convert.svn.trunk=trunk (directory name) + specify the name of the trunk branch + + Source history can be retrieved starting at a specific revision, + instead of being integrally converted. Only single branch + conversions are supported. + + --config convert.svn.startrev=0 (svn revision number) + specify start Subversion revision. + + Perforce Source + ''''''''''''''' + + The Perforce (P4) importer can be given a p4 depot path or a + client specification as source. It will convert all files in the + source to a flat Mercurial repository, ignoring labels, branches + and integrations. Note that when a depot path is given you then + usually should specify a target directory, because otherwise the + target may be named ...-hg. + + It is possible to limit the amount of source history to be + converted by specifying an initial Perforce revision. + + --config convert.p4.startrev=0 (perforce changelist number) + specify initial Perforce revision. + + Mercurial Destination + ''''''''''''''''''''' + + --config convert.hg.clonebranches=False (boolean) + dispatch source branches in separate clones. + --config convert.hg.tagsbranch=default (branch name) + tag revisions branch name + --config convert.hg.usebranchnames=True (boolean) + preserve branch names + + """ + return convcmd.convert(ui, src, dest, revmapfile, **opts) + +def debugsvnlog(ui, **opts): + return subversion.debugsvnlog(ui, **opts) + +def debugcvsps(ui, *args, **opts): + '''create changeset information from CVS + + This command is intended as a debugging tool for the CVS to + Mercurial converter, and can be used as a direct replacement for + cvsps. + + Hg debugcvsps reads the CVS rlog for current directory (or any + named directory) in the CVS repository, and converts the log to a + series of changesets based on matching commit log entries and + dates.''' + return cvsps.debugcvsps(ui, *args, **opts) + +commands.norepo += " convert debugsvnlog debugcvsps" + +cmdtable = { + "convert": + (convert, + [('', 'authors', '', + _('username mapping filename (DEPRECATED, use --authormap instead)'), + _('FILE')), + ('s', 'source-type', '', + _('source repository type'), _('TYPE')), + ('d', 'dest-type', '', + _('destination repository type'), _('TYPE')), + ('r', 'rev', '', + _('import up to target revision REV'), _('REV')), + ('A', 'authormap', '', + _('remap usernames using this file'), _('FILE')), + ('', 'filemap', '', + _('remap file names using contents of file'), _('FILE')), + ('', 'splicemap', '', + _('splice synthesized history into place'), _('FILE')), + ('', 'branchmap', '', + _('change branch names while converting'), _('FILE')), + ('', 'branchsort', None, _('try to sort changesets by branches')), + ('', 'datesort', None, _('try to sort changesets by date')), + ('', 'sourcesort', None, _('preserve source changesets order'))], + _('hg convert [OPTION]... SOURCE [DEST [REVMAP]]')), + "debugsvnlog": + (debugsvnlog, + [], + 'hg debugsvnlog'), + "debugcvsps": + (debugcvsps, + [ + # Main options shared with cvsps-2.1 + ('b', 'branches', [], _('only return changes on specified branches')), + ('p', 'prefix', '', _('prefix to remove from file names')), + ('r', 'revisions', [], + _('only return changes after or between specified tags')), + ('u', 'update-cache', None, _("update cvs log cache")), + ('x', 'new-cache', None, _("create new cvs log cache")), + ('z', 'fuzz', 60, _('set commit time fuzz in seconds')), + ('', 'root', '', _('specify cvsroot')), + # Options specific to builtin cvsps + ('', 'parents', '', _('show parent changesets')), + ('', 'ancestors', '', _('show current changeset in ancestor branches')), + # Options that are ignored for compatibility with cvsps-2.1 + ('A', 'cvs-direct', None, _('ignored for compatibility')), + ], + _('hg debugcvsps [OPTION]... [PATH]...')), +} diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/__init__.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/__init__.pyo new file mode 100644 index 0000000..892b438 Binary files /dev/null and b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/__init__.pyo differ diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/bzr.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/bzr.py new file mode 100644 index 0000000..cc16258 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/bzr.py @@ -0,0 +1,260 @@ +# bzr.py - bzr support for the convert extension +# +# Copyright 2008, 2009 Marek Kubica and others +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +# This module is for handling 'bzr', that was formerly known as Bazaar-NG; +# it cannot access 'bar' repositories, but they were never used very much + +import os +from mercurial import demandimport +# these do not work with demandimport, blacklist +demandimport.ignore.extend([ + 'bzrlib.transactions', + 'bzrlib.urlutils', + 'ElementPath', + ]) + +from mercurial.i18n import _ +from mercurial import util +from common import NoRepo, commit, converter_source + +try: + # bazaar imports + from bzrlib import branch, revision, errors + from bzrlib.revisionspec import RevisionSpec +except ImportError: + pass + +supportedkinds = ('file', 'symlink') + +class bzr_source(converter_source): + """Reads Bazaar repositories by using the Bazaar Python libraries""" + + def __init__(self, ui, path, rev=None): + super(bzr_source, self).__init__(ui, path, rev=rev) + + if not os.path.exists(os.path.join(path, '.bzr')): + raise NoRepo(_('%s does not look like a Bazaar repository') + % path) + + try: + # access bzrlib stuff + branch + except NameError: + raise NoRepo(_('Bazaar modules could not be loaded')) + + path = os.path.abspath(path) + self._checkrepotype(path) + self.branch = branch.Branch.open(path) + self.sourcerepo = self.branch.repository + self._parentids = {} + + def _checkrepotype(self, path): + # Lightweight checkouts detection is informational but probably + # fragile at API level. It should not terminate the conversion. + try: + from bzrlib import bzrdir + dir = bzrdir.BzrDir.open_containing(path)[0] + try: + tree = dir.open_workingtree(recommend_upgrade=False) + branch = tree.branch + except (errors.NoWorkingTree, errors.NotLocalUrl): + tree = None + branch = dir.open_branch() + if (tree is not None and tree.bzrdir.root_transport.base != + branch.bzrdir.root_transport.base): + self.ui.warn(_('warning: lightweight checkouts may cause ' + 'conversion failures, try with a regular ' + 'branch instead.\n')) + except: + self.ui.note(_('bzr source type could not be determined\n')) + + def before(self): + """Before the conversion begins, acquire a read lock + for all the operations that might need it. Fortunately + read locks don't block other reads or writes to the + repository, so this shouldn't have any impact on the usage of + the source repository. + + The alternative would be locking on every operation that + needs locks (there are currently two: getting the file and + getting the parent map) and releasing immediately after, + but this approach can take even 40% longer.""" + self.sourcerepo.lock_read() + + def after(self): + self.sourcerepo.unlock() + + def getheads(self): + if not self.rev: + return [self.branch.last_revision()] + try: + r = RevisionSpec.from_string(self.rev) + info = r.in_history(self.branch) + except errors.BzrError: + raise util.Abort(_('%s is not a valid revision in current branch') + % self.rev) + return [info.rev_id] + + def getfile(self, name, rev): + revtree = self.sourcerepo.revision_tree(rev) + fileid = revtree.path2id(name.decode(self.encoding or 'utf-8')) + kind = None + if fileid is not None: + kind = revtree.kind(fileid) + if kind not in supportedkinds: + # the file is not available anymore - was deleted + raise IOError(_('%s is not available in %s anymore') % + (name, rev)) + mode = self._modecache[(name, rev)] + if kind == 'symlink': + target = revtree.get_symlink_target(fileid) + if target is None: + raise util.Abort(_('%s.%s symlink has no target') + % (name, rev)) + return target, mode + else: + sio = revtree.get_file(fileid) + return sio.read(), mode + + def getchanges(self, version): + # set up caches: modecache and revtree + self._modecache = {} + self._revtree = self.sourcerepo.revision_tree(version) + # get the parentids from the cache + parentids = self._parentids.pop(version) + # only diff against first parent id + prevtree = self.sourcerepo.revision_tree(parentids[0]) + return self._gettreechanges(self._revtree, prevtree) + + def getcommit(self, version): + rev = self.sourcerepo.get_revision(version) + # populate parent id cache + if not rev.parent_ids: + parents = [] + self._parentids[version] = (revision.NULL_REVISION,) + else: + parents = self._filterghosts(rev.parent_ids) + self._parentids[version] = parents + + return commit(parents=parents, + date='%d %d' % (rev.timestamp, -rev.timezone), + author=self.recode(rev.committer), + # bzr returns bytestrings or unicode, depending on the content + desc=self.recode(rev.message), + rev=version) + + def gettags(self): + if not self.branch.supports_tags(): + return {} + tagdict = self.branch.tags.get_tag_dict() + bytetags = {} + for name, rev in tagdict.iteritems(): + bytetags[self.recode(name)] = rev + return bytetags + + def getchangedfiles(self, rev, i): + self._modecache = {} + curtree = self.sourcerepo.revision_tree(rev) + if i is not None: + parentid = self._parentids[rev][i] + else: + # no parent id, get the empty revision + parentid = revision.NULL_REVISION + + prevtree = self.sourcerepo.revision_tree(parentid) + changes = [e[0] for e in self._gettreechanges(curtree, prevtree)[0]] + return changes + + def _gettreechanges(self, current, origin): + revid = current._revision_id + changes = [] + renames = {} + for (fileid, paths, changed_content, versioned, parent, name, + kind, executable) in current.iter_changes(origin): + + if paths[0] == u'' or paths[1] == u'': + # ignore changes to tree root + continue + + # bazaar tracks directories, mercurial does not, so + # we have to rename the directory contents + if kind[1] == 'directory': + if kind[0] not in (None, 'directory'): + # Replacing 'something' with a directory, record it + # so it can be removed. + changes.append((self.recode(paths[0]), revid)) + + if None not in paths and paths[0] != paths[1]: + # neither an add nor an delete - a move + # rename all directory contents manually + subdir = origin.inventory.path2id(paths[0]) + # get all child-entries of the directory + for name, entry in origin.inventory.iter_entries(subdir): + # hg does not track directory renames + if entry.kind == 'directory': + continue + frompath = self.recode(paths[0] + '/' + name) + topath = self.recode(paths[1] + '/' + name) + # register the files as changed + changes.append((frompath, revid)) + changes.append((topath, revid)) + # add to mode cache + mode = ((entry.executable and 'x') + or (entry.kind == 'symlink' and 's') + or '') + self._modecache[(topath, revid)] = mode + # register the change as move + renames[topath] = frompath + + # no futher changes, go to the next change + continue + + # we got unicode paths, need to convert them + path, topath = [self.recode(part) for part in paths] + + if topath is None: + # file deleted + changes.append((path, revid)) + continue + + # renamed + if path and path != topath: + renames[topath] = path + changes.append((path, revid)) + + # populate the mode cache + kind, executable = [e[1] for e in (kind, executable)] + mode = ((executable and 'x') or (kind == 'symlink' and 'l') + or '') + self._modecache[(topath, revid)] = mode + changes.append((topath, revid)) + + return changes, renames + + def _filterghosts(self, ids): + """Filters out ghost revisions which hg does not support, see + + """ + parentmap = self.sourcerepo.get_parent_map(ids) + parents = tuple([parent for parent in ids if parent in parentmap]) + return parents + + def recode(self, s, encoding=None): + """This version of recode tries to encode unicode to bytecode, + and preferably using the UTF-8 codec. + Other types than Unicode are silently returned, this is by + intention, e.g. the None-type is not going to be encoded but instead + just passed through + """ + if not encoding: + encoding = self.encoding or 'utf-8' + + if isinstance(s, unicode): + return s.encode(encoding) + else: + # leave it alone + return s diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/bzr.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/bzr.pyo new file mode 100644 index 0000000..ab47e99 Binary files /dev/null and b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/bzr.pyo differ diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/common.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/common.py new file mode 100644 index 0000000..fb3865f --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/common.py @@ -0,0 +1,389 @@ +# common.py - common code for the convert extension +# +# Copyright 2005-2009 Matt Mackall and others +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +import base64, errno +import os +import cPickle as pickle +from mercurial import util +from mercurial.i18n import _ + +def encodeargs(args): + def encodearg(s): + lines = base64.encodestring(s) + lines = [l.splitlines()[0] for l in lines] + return ''.join(lines) + + s = pickle.dumps(args) + return encodearg(s) + +def decodeargs(s): + s = base64.decodestring(s) + return pickle.loads(s) + +class MissingTool(Exception): + pass + +def checktool(exe, name=None, abort=True): + name = name or exe + if not util.find_exe(exe): + exc = abort and util.Abort or MissingTool + raise exc(_('cannot find required "%s" tool') % name) + +class NoRepo(Exception): + pass + +SKIPREV = 'SKIP' + +class commit(object): + def __init__(self, author, date, desc, parents, branch=None, rev=None, + extra={}, sortkey=None): + self.author = author or 'unknown' + self.date = date or '0 0' + self.desc = desc + self.parents = parents + self.branch = branch + self.rev = rev + self.extra = extra + self.sortkey = sortkey + +class converter_source(object): + """Conversion source interface""" + + def __init__(self, ui, path=None, rev=None): + """Initialize conversion source (or raise NoRepo("message") + exception if path is not a valid repository)""" + self.ui = ui + self.path = path + self.rev = rev + + self.encoding = 'utf-8' + + def before(self): + pass + + def after(self): + pass + + def setrevmap(self, revmap): + """set the map of already-converted revisions""" + pass + + def getheads(self): + """Return a list of this repository's heads""" + raise NotImplementedError() + + def getfile(self, name, rev): + """Return a pair (data, mode) where data is the file content + as a string and mode one of '', 'x' or 'l'. rev is the + identifier returned by a previous call to getchanges(). Raise + IOError to indicate that name was deleted in rev. + """ + raise NotImplementedError() + + def getchanges(self, version): + """Returns a tuple of (files, copies). + + files is a sorted list of (filename, id) tuples for all files + changed between version and its first parent returned by + getcommit(). id is the source revision id of the file. + + copies is a dictionary of dest: source + """ + raise NotImplementedError() + + def getcommit(self, version): + """Return the commit object for version""" + raise NotImplementedError() + + def gettags(self): + """Return the tags as a dictionary of name: revision + + Tag names must be UTF-8 strings. + """ + raise NotImplementedError() + + def recode(self, s, encoding=None): + if not encoding: + encoding = self.encoding or 'utf-8' + + if isinstance(s, unicode): + return s.encode("utf-8") + try: + return s.decode(encoding).encode("utf-8") + except: + try: + return s.decode("latin-1").encode("utf-8") + except: + return s.decode(encoding, "replace").encode("utf-8") + + def getchangedfiles(self, rev, i): + """Return the files changed by rev compared to parent[i]. + + i is an index selecting one of the parents of rev. The return + value should be the list of files that are different in rev and + this parent. + + If rev has no parents, i is None. + + This function is only needed to support --filemap + """ + raise NotImplementedError() + + def converted(self, rev, sinkrev): + '''Notify the source that a revision has been converted.''' + pass + + def hasnativeorder(self): + """Return true if this source has a meaningful, native revision + order. For instance, Mercurial revisions are store sequentially + while there is no such global ordering with Darcs. + """ + return False + + def lookuprev(self, rev): + """If rev is a meaningful revision reference in source, return + the referenced identifier in the same format used by getcommit(). + return None otherwise. + """ + return None + +class converter_sink(object): + """Conversion sink (target) interface""" + + def __init__(self, ui, path): + """Initialize conversion sink (or raise NoRepo("message") + exception if path is not a valid repository) + + created is a list of paths to remove if a fatal error occurs + later""" + self.ui = ui + self.path = path + self.created = [] + + def getheads(self): + """Return a list of this repository's heads""" + raise NotImplementedError() + + def revmapfile(self): + """Path to a file that will contain lines + source_rev_id sink_rev_id + mapping equivalent revision identifiers for each system.""" + raise NotImplementedError() + + def authorfile(self): + """Path to a file that will contain lines + srcauthor=dstauthor + mapping equivalent authors identifiers for each system.""" + return None + + def putcommit(self, files, copies, parents, commit, source, revmap): + """Create a revision with all changed files listed in 'files' + and having listed parents. 'commit' is a commit object + containing at a minimum the author, date, and message for this + changeset. 'files' is a list of (path, version) tuples, + 'copies' is a dictionary mapping destinations to sources, + 'source' is the source repository, and 'revmap' is a mapfile + of source revisions to converted revisions. Only getfile() and + lookuprev() should be called on 'source'. + + Note that the sink repository is not told to update itself to + a particular revision (or even what that revision would be) + before it receives the file data. + """ + raise NotImplementedError() + + def puttags(self, tags): + """Put tags into sink. + + tags: {tagname: sink_rev_id, ...} where tagname is an UTF-8 string. + Return a pair (tag_revision, tag_parent_revision), or (None, None) + if nothing was changed. + """ + raise NotImplementedError() + + def setbranch(self, branch, pbranches): + """Set the current branch name. Called before the first putcommit + on the branch. + branch: branch name for subsequent commits + pbranches: (converted parent revision, parent branch) tuples""" + pass + + def setfilemapmode(self, active): + """Tell the destination that we're using a filemap + + Some converter_sources (svn in particular) can claim that a file + was changed in a revision, even if there was no change. This method + tells the destination that we're using a filemap and that it should + filter empty revisions. + """ + pass + + def before(self): + pass + + def after(self): + pass + + +class commandline(object): + def __init__(self, ui, command): + self.ui = ui + self.command = command + + def prerun(self): + pass + + def postrun(self): + pass + + def _cmdline(self, cmd, *args, **kwargs): + cmdline = [self.command, cmd] + list(args) + for k, v in kwargs.iteritems(): + if len(k) == 1: + cmdline.append('-' + k) + else: + cmdline.append('--' + k.replace('_', '-')) + try: + if len(k) == 1: + cmdline.append('' + v) + else: + cmdline[-1] += '=' + v + except TypeError: + pass + cmdline = [util.shellquote(arg) for arg in cmdline] + if not self.ui.debugflag: + cmdline += ['2>', util.nulldev] + cmdline += ['<', util.nulldev] + cmdline = ' '.join(cmdline) + return cmdline + + def _run(self, cmd, *args, **kwargs): + cmdline = self._cmdline(cmd, *args, **kwargs) + self.ui.debug('running: %s\n' % (cmdline,)) + self.prerun() + try: + return util.popen(cmdline) + finally: + self.postrun() + + def run(self, cmd, *args, **kwargs): + fp = self._run(cmd, *args, **kwargs) + output = fp.read() + self.ui.debug(output) + return output, fp.close() + + def runlines(self, cmd, *args, **kwargs): + fp = self._run(cmd, *args, **kwargs) + output = fp.readlines() + self.ui.debug(''.join(output)) + return output, fp.close() + + def checkexit(self, status, output=''): + if status: + if output: + self.ui.warn(_('%s error:\n') % self.command) + self.ui.warn(output) + msg = util.explain_exit(status)[0] + raise util.Abort('%s %s' % (self.command, msg)) + + def run0(self, cmd, *args, **kwargs): + output, status = self.run(cmd, *args, **kwargs) + self.checkexit(status, output) + return output + + def runlines0(self, cmd, *args, **kwargs): + output, status = self.runlines(cmd, *args, **kwargs) + self.checkexit(status, ''.join(output)) + return output + + def getargmax(self): + if '_argmax' in self.__dict__: + return self._argmax + + # POSIX requires at least 4096 bytes for ARG_MAX + self._argmax = 4096 + try: + self._argmax = os.sysconf("SC_ARG_MAX") + except: + pass + + # Windows shells impose their own limits on command line length, + # down to 2047 bytes for cmd.exe under Windows NT/2k and 2500 bytes + # for older 4nt.exe. See http://support.microsoft.com/kb/830473 for + # details about cmd.exe limitations. + + # Since ARG_MAX is for command line _and_ environment, lower our limit + # (and make happy Windows shells while doing this). + + self._argmax = self._argmax / 2 - 1 + return self._argmax + + def limit_arglist(self, arglist, cmd, *args, **kwargs): + limit = self.getargmax() - len(self._cmdline(cmd, *args, **kwargs)) + bytes = 0 + fl = [] + for fn in arglist: + b = len(fn) + 3 + if bytes + b < limit or len(fl) == 0: + fl.append(fn) + bytes += b + else: + yield fl + fl = [fn] + bytes = b + if fl: + yield fl + + def xargs(self, arglist, cmd, *args, **kwargs): + for l in self.limit_arglist(arglist, cmd, *args, **kwargs): + self.run0(cmd, *(list(args) + l), **kwargs) + +class mapfile(dict): + def __init__(self, ui, path): + super(mapfile, self).__init__() + self.ui = ui + self.path = path + self.fp = None + self.order = [] + self._read() + + def _read(self): + if not self.path: + return + try: + fp = open(self.path, 'r') + except IOError, err: + if err.errno != errno.ENOENT: + raise + return + for i, line in enumerate(fp): + try: + key, value = line.splitlines()[0].rsplit(' ', 1) + except ValueError: + raise util.Abort( + _('syntax error in %s(%d): key/value pair expected') + % (self.path, i + 1)) + if key not in self: + self.order.append(key) + super(mapfile, self).__setitem__(key, value) + fp.close() + + def __setitem__(self, key, value): + if self.fp is None: + try: + self.fp = open(self.path, 'a') + except IOError, err: + raise util.Abort(_('could not open map file %r: %s') % + (self.path, err.strerror)) + self.fp.write('%s %s\n' % (key, value)) + self.fp.flush() + super(mapfile, self).__setitem__(key, value) + + def close(self): + if self.fp: + self.fp.close() + self.fp = None diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/common.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/common.pyo new file mode 100644 index 0000000..de20000 Binary files /dev/null and b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/common.pyo differ diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/convcmd.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/convcmd.py new file mode 100644 index 0000000..ac91b41 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/convcmd.py @@ -0,0 +1,434 @@ +# convcmd - convert extension commands definition +# +# Copyright 2005-2007 Matt Mackall +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +from common import NoRepo, MissingTool, SKIPREV, mapfile +from cvs import convert_cvs +from darcs import darcs_source +from git import convert_git +from hg import mercurial_source, mercurial_sink +from subversion import svn_source, svn_sink +from monotone import monotone_source +from gnuarch import gnuarch_source +from bzr import bzr_source +from p4 import p4_source +import filemap + +import os, shutil +from mercurial import hg, util, encoding +from mercurial.i18n import _ + +orig_encoding = 'ascii' + +def recode(s): + if isinstance(s, unicode): + return s.encode(orig_encoding, 'replace') + else: + return s.decode('utf-8').encode(orig_encoding, 'replace') + +source_converters = [ + ('cvs', convert_cvs, 'branchsort'), + ('git', convert_git, 'branchsort'), + ('svn', svn_source, 'branchsort'), + ('hg', mercurial_source, 'sourcesort'), + ('darcs', darcs_source, 'branchsort'), + ('mtn', monotone_source, 'branchsort'), + ('gnuarch', gnuarch_source, 'branchsort'), + ('bzr', bzr_source, 'branchsort'), + ('p4', p4_source, 'branchsort'), + ] + +sink_converters = [ + ('hg', mercurial_sink), + ('svn', svn_sink), + ] + +def convertsource(ui, path, type, rev): + exceptions = [] + if type and type not in [s[0] for s in source_converters]: + raise util.Abort(_('%s: invalid source repository type') % type) + for name, source, sortmode in source_converters: + try: + if not type or name == type: + return source(ui, path, rev), sortmode + except (NoRepo, MissingTool), inst: + exceptions.append(inst) + if not ui.quiet: + for inst in exceptions: + ui.write("%s\n" % inst) + raise util.Abort(_('%s: missing or unsupported repository') % path) + +def convertsink(ui, path, type): + if type and type not in [s[0] for s in sink_converters]: + raise util.Abort(_('%s: invalid destination repository type') % type) + for name, sink in sink_converters: + try: + if not type or name == type: + return sink(ui, path) + except NoRepo, inst: + ui.note(_("convert: %s\n") % inst) + raise util.Abort(_('%s: unknown repository type') % path) + +class progresssource(object): + def __init__(self, ui, source, filecount): + self.ui = ui + self.source = source + self.filecount = filecount + self.retrieved = 0 + + def getfile(self, file, rev): + self.retrieved += 1 + self.ui.progress(_('getting files'), self.retrieved, + item=file, total=self.filecount) + return self.source.getfile(file, rev) + + def lookuprev(self, rev): + return self.source.lookuprev(rev) + + def close(self): + self.ui.progress(_('getting files'), None) + +class converter(object): + def __init__(self, ui, source, dest, revmapfile, opts): + + self.source = source + self.dest = dest + self.ui = ui + self.opts = opts + self.commitcache = {} + self.authors = {} + self.authorfile = None + + # Record converted revisions persistently: maps source revision + # ID to target revision ID (both strings). (This is how + # incremental conversions work.) + self.map = mapfile(ui, revmapfile) + + # Read first the dst author map if any + authorfile = self.dest.authorfile() + if authorfile and os.path.exists(authorfile): + self.readauthormap(authorfile) + # Extend/Override with new author map if necessary + if opts.get('authormap'): + self.readauthormap(opts.get('authormap')) + self.authorfile = self.dest.authorfile() + + self.splicemap = mapfile(ui, opts.get('splicemap')) + self.branchmap = mapfile(ui, opts.get('branchmap')) + + def walktree(self, heads): + '''Return a mapping that identifies the uncommitted parents of every + uncommitted changeset.''' + visit = heads + known = set() + parents = {} + while visit: + n = visit.pop(0) + if n in known or n in self.map: + continue + known.add(n) + self.ui.progress(_('scanning'), len(known), unit=_('revisions')) + commit = self.cachecommit(n) + parents[n] = [] + for p in commit.parents: + parents[n].append(p) + visit.append(p) + self.ui.progress(_('scanning'), None) + + return parents + + def toposort(self, parents, sortmode): + '''Return an ordering such that every uncommitted changeset is + preceeded by all its uncommitted ancestors.''' + + def mapchildren(parents): + """Return a (children, roots) tuple where 'children' maps parent + revision identifiers to children ones, and 'roots' is the list of + revisions without parents. 'parents' must be a mapping of revision + identifier to its parents ones. + """ + visit = parents.keys() + seen = set() + children = {} + roots = [] + + while visit: + n = visit.pop(0) + if n in seen: + continue + seen.add(n) + # Ensure that nodes without parents are present in the + # 'children' mapping. + children.setdefault(n, []) + hasparent = False + for p in parents[n]: + if not p in self.map: + visit.append(p) + hasparent = True + children.setdefault(p, []).append(n) + if not hasparent: + roots.append(n) + + return children, roots + + # Sort functions are supposed to take a list of revisions which + # can be converted immediately and pick one + + def makebranchsorter(): + """If the previously converted revision has a child in the + eligible revisions list, pick it. Return the list head + otherwise. Branch sort attempts to minimize branch + switching, which is harmful for Mercurial backend + compression. + """ + prev = [None] + def picknext(nodes): + next = nodes[0] + for n in nodes: + if prev[0] in parents[n]: + next = n + break + prev[0] = next + return next + return picknext + + def makesourcesorter(): + """Source specific sort.""" + keyfn = lambda n: self.commitcache[n].sortkey + def picknext(nodes): + return sorted(nodes, key=keyfn)[0] + return picknext + + def makedatesorter(): + """Sort revisions by date.""" + dates = {} + def getdate(n): + if n not in dates: + dates[n] = util.parsedate(self.commitcache[n].date) + return dates[n] + + def picknext(nodes): + return min([(getdate(n), n) for n in nodes])[1] + + return picknext + + if sortmode == 'branchsort': + picknext = makebranchsorter() + elif sortmode == 'datesort': + picknext = makedatesorter() + elif sortmode == 'sourcesort': + picknext = makesourcesorter() + else: + raise util.Abort(_('unknown sort mode: %s') % sortmode) + + children, actives = mapchildren(parents) + + s = [] + pendings = {} + while actives: + n = picknext(actives) + actives.remove(n) + s.append(n) + + # Update dependents list + for c in children.get(n, []): + if c not in pendings: + pendings[c] = [p for p in parents[c] if p not in self.map] + try: + pendings[c].remove(n) + except ValueError: + raise util.Abort(_('cycle detected between %s and %s') + % (recode(c), recode(n))) + if not pendings[c]: + # Parents are converted, node is eligible + actives.insert(0, c) + pendings[c] = None + + if len(s) != len(parents): + raise util.Abort(_("not all revisions were sorted")) + + return s + + def writeauthormap(self): + authorfile = self.authorfile + if authorfile: + self.ui.status(_('Writing author map file %s\n') % authorfile) + ofile = open(authorfile, 'w+') + for author in self.authors: + ofile.write("%s=%s\n" % (author, self.authors[author])) + ofile.close() + + def readauthormap(self, authorfile): + afile = open(authorfile, 'r') + for line in afile: + + line = line.strip() + if not line or line.startswith('#'): + continue + + try: + srcauthor, dstauthor = line.split('=', 1) + except ValueError: + msg = _('Ignoring bad line in author map file %s: %s\n') + self.ui.warn(msg % (authorfile, line.rstrip())) + continue + + srcauthor = srcauthor.strip() + dstauthor = dstauthor.strip() + if self.authors.get(srcauthor) in (None, dstauthor): + msg = _('mapping author %s to %s\n') + self.ui.debug(msg % (srcauthor, dstauthor)) + self.authors[srcauthor] = dstauthor + continue + + m = _('overriding mapping for author %s, was %s, will be %s\n') + self.ui.status(m % (srcauthor, self.authors[srcauthor], dstauthor)) + + afile.close() + + def cachecommit(self, rev): + commit = self.source.getcommit(rev) + commit.author = self.authors.get(commit.author, commit.author) + commit.branch = self.branchmap.get(commit.branch, commit.branch) + self.commitcache[rev] = commit + return commit + + def copy(self, rev): + commit = self.commitcache[rev] + + changes = self.source.getchanges(rev) + if isinstance(changes, basestring): + if changes == SKIPREV: + dest = SKIPREV + else: + dest = self.map[changes] + self.map[rev] = dest + return + files, copies = changes + pbranches = [] + if commit.parents: + for prev in commit.parents: + if prev not in self.commitcache: + self.cachecommit(prev) + pbranches.append((self.map[prev], + self.commitcache[prev].branch)) + self.dest.setbranch(commit.branch, pbranches) + try: + parents = self.splicemap[rev].replace(',', ' ').split() + self.ui.status(_('spliced in %s as parents of %s\n') % + (parents, rev)) + parents = [self.map.get(p, p) for p in parents] + except KeyError: + parents = [b[0] for b in pbranches] + source = progresssource(self.ui, self.source, len(files)) + newnode = self.dest.putcommit(files, copies, parents, commit, + source, self.map) + source.close() + self.source.converted(rev, newnode) + self.map[rev] = newnode + + def convert(self, sortmode): + try: + self.source.before() + self.dest.before() + self.source.setrevmap(self.map) + self.ui.status(_("scanning source...\n")) + heads = self.source.getheads() + parents = self.walktree(heads) + self.ui.status(_("sorting...\n")) + t = self.toposort(parents, sortmode) + num = len(t) + c = None + + self.ui.status(_("converting...\n")) + for i, c in enumerate(t): + num -= 1 + desc = self.commitcache[c].desc + if "\n" in desc: + desc = desc.splitlines()[0] + # convert log message to local encoding without using + # tolocal() because the encoding.encoding convert() + # uses is 'utf-8' + self.ui.status("%d %s\n" % (num, recode(desc))) + self.ui.note(_("source: %s\n") % recode(c)) + self.ui.progress(_('converting'), i, unit=_('revisions'), + total=len(t)) + self.copy(c) + self.ui.progress(_('converting'), None) + + tags = self.source.gettags() + ctags = {} + for k in tags: + v = tags[k] + if self.map.get(v, SKIPREV) != SKIPREV: + ctags[k] = self.map[v] + + if c and ctags: + nrev, tagsparent = self.dest.puttags(ctags) + if nrev and tagsparent: + # write another hash correspondence to override the previous + # one so we don't end up with extra tag heads + tagsparents = [e for e in self.map.iteritems() + if e[1] == tagsparent] + if tagsparents: + self.map[tagsparents[0][0]] = nrev + + self.writeauthormap() + finally: + self.cleanup() + + def cleanup(self): + try: + self.dest.after() + finally: + self.source.after() + self.map.close() + +def convert(ui, src, dest=None, revmapfile=None, **opts): + global orig_encoding + orig_encoding = encoding.encoding + encoding.encoding = 'UTF-8' + + # support --authors as an alias for --authormap + if not opts.get('authormap'): + opts['authormap'] = opts.get('authors') + + if not dest: + dest = hg.defaultdest(src) + "-hg" + ui.status(_("assuming destination %s\n") % dest) + + destc = convertsink(ui, dest, opts.get('dest_type')) + + try: + srcc, defaultsort = convertsource(ui, src, opts.get('source_type'), + opts.get('rev')) + except Exception: + for path in destc.created: + shutil.rmtree(path, True) + raise + + sortmodes = ('branchsort', 'datesort', 'sourcesort') + sortmode = [m for m in sortmodes if opts.get(m)] + if len(sortmode) > 1: + raise util.Abort(_('more than one sort mode specified')) + sortmode = sortmode and sortmode[0] or defaultsort + if sortmode == 'sourcesort' and not srcc.hasnativeorder(): + raise util.Abort(_('--sourcesort is not supported by this data source')) + + fmap = opts.get('filemap') + if fmap: + srcc = filemap.filemap_source(ui, srcc, fmap) + destc.setfilemapmode(True) + + if not revmapfile: + try: + revmapfile = destc.revmapfile() + except: + revmapfile = os.path.join(destc, "map") + + c = converter(ui, srcc, destc, revmapfile, opts) + c.convert(sortmode) + diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/convcmd.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/convcmd.pyo new file mode 100644 index 0000000..15f040a Binary files /dev/null and b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/convcmd.pyo differ diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/cvs.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/cvs.py new file mode 100644 index 0000000..501fae2 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/cvs.py @@ -0,0 +1,271 @@ +# cvs.py: CVS conversion code inspired by hg-cvs-import and git-cvsimport +# +# Copyright 2005-2009 Matt Mackall and others +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +import os, re, socket, errno +from cStringIO import StringIO +from mercurial import encoding, util +from mercurial.i18n import _ + +from common import NoRepo, commit, converter_source, checktool +import cvsps + +class convert_cvs(converter_source): + def __init__(self, ui, path, rev=None): + super(convert_cvs, self).__init__(ui, path, rev=rev) + + cvs = os.path.join(path, "CVS") + if not os.path.exists(cvs): + raise NoRepo(_("%s does not look like a CVS checkout") % path) + + checktool('cvs') + + self.changeset = None + self.files = {} + self.tags = {} + self.lastbranch = {} + self.socket = None + self.cvsroot = open(os.path.join(cvs, "Root")).read()[:-1] + self.cvsrepo = open(os.path.join(cvs, "Repository")).read()[:-1] + self.encoding = encoding.encoding + + self._connect() + + def _parse(self): + if self.changeset is not None: + return + self.changeset = {} + + maxrev = 0 + if self.rev: + # TODO: handle tags + try: + # patchset number? + maxrev = int(self.rev) + except ValueError: + raise util.Abort(_('revision %s is not a patchset number') + % self.rev) + + d = os.getcwd() + try: + os.chdir(self.path) + id = None + + cache = 'update' + if not self.ui.configbool('convert', 'cvsps.cache', True): + cache = None + db = cvsps.createlog(self.ui, cache=cache) + db = cvsps.createchangeset(self.ui, db, + fuzz=int(self.ui.config('convert', 'cvsps.fuzz', 60)), + mergeto=self.ui.config('convert', 'cvsps.mergeto', None), + mergefrom=self.ui.config('convert', 'cvsps.mergefrom', None)) + + for cs in db: + if maxrev and cs.id > maxrev: + break + id = str(cs.id) + cs.author = self.recode(cs.author) + self.lastbranch[cs.branch] = id + cs.comment = self.recode(cs.comment) + date = util.datestr(cs.date) + self.tags.update(dict.fromkeys(cs.tags, id)) + + files = {} + for f in cs.entries: + files[f.file] = "%s%s" % ('.'.join([str(x) + for x in f.revision]), + ['', '(DEAD)'][f.dead]) + + # add current commit to set + c = commit(author=cs.author, date=date, + parents=[str(p.id) for p in cs.parents], + desc=cs.comment, branch=cs.branch or '') + self.changeset[id] = c + self.files[id] = files + + self.heads = self.lastbranch.values() + finally: + os.chdir(d) + + def _connect(self): + root = self.cvsroot + conntype = None + user, host = None, None + cmd = ['cvs', 'server'] + + self.ui.status(_("connecting to %s\n") % root) + + if root.startswith(":pserver:"): + root = root[9:] + m = re.match(r'(?:(.*?)(?::(.*?))?@)?([^:\/]*)(?::(\d*))?(.*)', + root) + if m: + conntype = "pserver" + user, passw, serv, port, root = m.groups() + if not user: + user = "anonymous" + if not port: + port = 2401 + else: + port = int(port) + format0 = ":pserver:%s@%s:%s" % (user, serv, root) + format1 = ":pserver:%s@%s:%d%s" % (user, serv, port, root) + + if not passw: + passw = "A" + cvspass = os.path.expanduser("~/.cvspass") + try: + pf = open(cvspass) + for line in pf.read().splitlines(): + part1, part2 = line.split(' ', 1) + if part1 == '/1': + # /1 :pserver:user@example.com:2401/cvsroot/foo Ah 0: + data = fp.read(min(count, chunksize)) + if not data: + raise util.Abort(_("%d bytes missing from remote file") + % count) + count -= len(data) + output.write(data) + return output.getvalue() + + self._parse() + if rev.endswith("(DEAD)"): + raise IOError + + args = ("-N -P -kk -r %s --" % rev).split() + args.append(self.cvsrepo + '/' + name) + for x in args: + self.writep.write("Argument %s\n" % x) + self.writep.write("Directory .\n%s\nco\n" % self.realroot) + self.writep.flush() + + data = "" + mode = None + while 1: + line = self.readp.readline() + if line.startswith("Created ") or line.startswith("Updated "): + self.readp.readline() # path + self.readp.readline() # entries + mode = self.readp.readline()[:-1] + count = int(self.readp.readline()[:-1]) + data = chunkedread(self.readp, count) + elif line.startswith(" "): + data += line[1:] + elif line.startswith("M "): + pass + elif line.startswith("Mbinary "): + count = int(self.readp.readline()[:-1]) + data = chunkedread(self.readp, count) + else: + if line == "ok\n": + if mode is None: + raise util.Abort(_('malformed response from CVS')) + return (data, "x" in mode and "x" or "") + elif line.startswith("E "): + self.ui.warn(_("cvs server: %s\n") % line[2:]) + elif line.startswith("Remove"): + self.readp.readline() + else: + raise util.Abort(_("unknown CVS response: %s") % line) + + def getchanges(self, rev): + self._parse() + return sorted(self.files[rev].iteritems()), {} + + def getcommit(self, rev): + self._parse() + return self.changeset[rev] + + def gettags(self): + self._parse() + return self.tags + + def getchangedfiles(self, rev, i): + self._parse() + return sorted(self.files[rev]) diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/cvs.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/cvs.pyo new file mode 100644 index 0000000..d73fe3f Binary files /dev/null and b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/cvs.pyo differ diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/cvsps.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/cvsps.py new file mode 100644 index 0000000..1519d41 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/cvsps.py @@ -0,0 +1,847 @@ +# Mercurial built-in replacement for cvsps. +# +# Copyright 2008, Frank Kingswood +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +import os +import re +import cPickle as pickle +from mercurial import util +from mercurial.i18n import _ +from mercurial import hook + +class logentry(object): + '''Class logentry has the following attributes: + .author - author name as CVS knows it + .branch - name of branch this revision is on + .branches - revision tuple of branches starting at this revision + .comment - commit message + .date - the commit date as a (time, tz) tuple + .dead - true if file revision is dead + .file - Name of file + .lines - a tuple (+lines, -lines) or None + .parent - Previous revision of this entry + .rcs - name of file as returned from CVS + .revision - revision number as tuple + .tags - list of tags on the file + .synthetic - is this a synthetic "file ... added on ..." revision? + .mergepoint- the branch that has been merged from + (if present in rlog output) + .branchpoints- the branches that start at the current entry + ''' + def __init__(self, **entries): + self.synthetic = False + self.__dict__.update(entries) + + def __repr__(self): + return "<%s at 0x%x: %s %s>" % (self.__class__.__name__, + id(self), + self.file, + ".".join(map(str, self.revision))) + +class logerror(Exception): + pass + +def getrepopath(cvspath): + """Return the repository path from a CVS path. + + >>> getrepopath('/foo/bar') + '/foo/bar' + >>> getrepopath('c:/foo/bar') + 'c:/foo/bar' + >>> getrepopath(':pserver:10/foo/bar') + '/foo/bar' + >>> getrepopath(':pserver:10c:/foo/bar') + '/foo/bar' + >>> getrepopath(':pserver:/foo/bar') + '/foo/bar' + >>> getrepopath(':pserver:c:/foo/bar') + 'c:/foo/bar' + >>> getrepopath(':pserver:truc@foo.bar:/foo/bar') + '/foo/bar' + >>> getrepopath(':pserver:truc@foo.bar:c:/foo/bar') + 'c:/foo/bar' + """ + # According to CVS manual, CVS paths are expressed like: + # [:method:][[user][:password]@]hostname[:[port]]/path/to/repository + # + # Unfortunately, Windows absolute paths start with a drive letter + # like 'c:' making it harder to parse. Here we assume that drive + # letters are only one character long and any CVS component before + # the repository path is at least 2 characters long, and use this + # to disambiguate. + parts = cvspath.split(':') + if len(parts) == 1: + return parts[0] + # Here there is an ambiguous case if we have a port number + # immediately followed by a Windows driver letter. We assume this + # never happens and decide it must be CVS path component, + # therefore ignoring it. + if len(parts[-2]) > 1: + return parts[-1].lstrip('0123456789') + return parts[-2] + ':' + parts[-1] + +def createlog(ui, directory=None, root="", rlog=True, cache=None): + '''Collect the CVS rlog''' + + # Because we store many duplicate commit log messages, reusing strings + # saves a lot of memory and pickle storage space. + _scache = {} + def scache(s): + "return a shared version of a string" + return _scache.setdefault(s, s) + + ui.status(_('collecting CVS rlog\n')) + + log = [] # list of logentry objects containing the CVS state + + # patterns to match in CVS (r)log output, by state of use + re_00 = re.compile('RCS file: (.+)$') + re_01 = re.compile('cvs \\[r?log aborted\\]: (.+)$') + re_02 = re.compile('cvs (r?log|server): (.+)\n$') + re_03 = re.compile("(Cannot access.+CVSROOT)|" + "(can't create temporary directory.+)$") + re_10 = re.compile('Working file: (.+)$') + re_20 = re.compile('symbolic names:') + re_30 = re.compile('\t(.+): ([\\d.]+)$') + re_31 = re.compile('----------------------------$') + re_32 = re.compile('=======================================' + '======================================$') + re_50 = re.compile('revision ([\\d.]+)(\s+locked by:\s+.+;)?$') + re_60 = re.compile(r'date:\s+(.+);\s+author:\s+(.+);\s+state:\s+(.+?);' + r'(\s+lines:\s+(\+\d+)?\s+(-\d+)?;)?' + r'(.*mergepoint:\s+([^;]+);)?') + re_70 = re.compile('branches: (.+);$') + + file_added_re = re.compile(r'file [^/]+ was (initially )?added on branch') + + prefix = '' # leading path to strip of what we get from CVS + + if directory is None: + # Current working directory + + # Get the real directory in the repository + try: + prefix = open(os.path.join('CVS','Repository')).read().strip() + directory = prefix + if prefix == ".": + prefix = "" + except IOError: + raise logerror(_('not a CVS sandbox')) + + if prefix and not prefix.endswith(os.sep): + prefix += os.sep + + # Use the Root file in the sandbox, if it exists + try: + root = open(os.path.join('CVS','Root')).read().strip() + except IOError: + pass + + if not root: + root = os.environ.get('CVSROOT', '') + + # read log cache if one exists + oldlog = [] + date = None + + if cache: + cachedir = os.path.expanduser('~/.hg.cvsps') + if not os.path.exists(cachedir): + os.mkdir(cachedir) + + # The cvsps cache pickle needs a uniquified name, based on the + # repository location. The address may have all sort of nasties + # in it, slashes, colons and such. So here we take just the + # alphanumerics, concatenated in a way that does not mix up the + # various components, so that + # :pserver:user@server:/path + # and + # /pserver/user/server/path + # are mapped to different cache file names. + cachefile = root.split(":") + [directory, "cache"] + cachefile = ['-'.join(re.findall(r'\w+', s)) for s in cachefile if s] + cachefile = os.path.join(cachedir, + '.'.join([s for s in cachefile if s])) + + if cache == 'update': + try: + ui.note(_('reading cvs log cache %s\n') % cachefile) + oldlog = pickle.load(open(cachefile)) + ui.note(_('cache has %d log entries\n') % len(oldlog)) + except Exception, e: + ui.note(_('error reading cache: %r\n') % e) + + if oldlog: + date = oldlog[-1].date # last commit date as a (time,tz) tuple + date = util.datestr(date, '%Y/%m/%d %H:%M:%S %1%2') + + # build the CVS commandline + cmd = ['cvs', '-q'] + if root: + cmd.append('-d%s' % root) + p = util.normpath(getrepopath(root)) + if not p.endswith('/'): + p += '/' + if prefix: + # looks like normpath replaces "" by "." + prefix = p + util.normpath(prefix) + else: + prefix = p + cmd.append(['log', 'rlog'][rlog]) + if date: + # no space between option and date string + cmd.append('-d>%s' % date) + cmd.append(directory) + + # state machine begins here + tags = {} # dictionary of revisions on current file with their tags + branchmap = {} # mapping between branch names and revision numbers + state = 0 + store = False # set when a new record can be appended + + cmd = [util.shellquote(arg) for arg in cmd] + ui.note(_("running %s\n") % (' '.join(cmd))) + ui.debug("prefix=%r directory=%r root=%r\n" % (prefix, directory, root)) + + pfp = util.popen(' '.join(cmd)) + peek = pfp.readline() + while True: + line = peek + if line == '': + break + peek = pfp.readline() + if line.endswith('\n'): + line = line[:-1] + #ui.debug('state=%d line=%r\n' % (state, line)) + + if state == 0: + # initial state, consume input until we see 'RCS file' + match = re_00.match(line) + if match: + rcs = match.group(1) + tags = {} + if rlog: + filename = util.normpath(rcs[:-2]) + if filename.startswith(prefix): + filename = filename[len(prefix):] + if filename.startswith('/'): + filename = filename[1:] + if filename.startswith('Attic/'): + filename = filename[6:] + else: + filename = filename.replace('/Attic/', '/') + state = 2 + continue + state = 1 + continue + match = re_01.match(line) + if match: + raise logerror(match.group(1)) + match = re_02.match(line) + if match: + raise logerror(match.group(2)) + if re_03.match(line): + raise logerror(line) + + elif state == 1: + # expect 'Working file' (only when using log instead of rlog) + match = re_10.match(line) + assert match, _('RCS file must be followed by working file') + filename = util.normpath(match.group(1)) + state = 2 + + elif state == 2: + # expect 'symbolic names' + if re_20.match(line): + branchmap = {} + state = 3 + + elif state == 3: + # read the symbolic names and store as tags + match = re_30.match(line) + if match: + rev = [int(x) for x in match.group(2).split('.')] + + # Convert magic branch number to an odd-numbered one + revn = len(rev) + if revn > 3 and (revn % 2) == 0 and rev[-2] == 0: + rev = rev[:-2] + rev[-1:] + rev = tuple(rev) + + if rev not in tags: + tags[rev] = [] + tags[rev].append(match.group(1)) + branchmap[match.group(1)] = match.group(2) + + elif re_31.match(line): + state = 5 + elif re_32.match(line): + state = 0 + + elif state == 4: + # expecting '------' separator before first revision + if re_31.match(line): + state = 5 + else: + assert not re_32.match(line), _('must have at least ' + 'some revisions') + + elif state == 5: + # expecting revision number and possibly (ignored) lock indication + # we create the logentry here from values stored in states 0 to 4, + # as this state is re-entered for subsequent revisions of a file. + match = re_50.match(line) + assert match, _('expected revision number') + e = logentry(rcs=scache(rcs), file=scache(filename), + revision=tuple([int(x) for x in match.group(1).split('.')]), + branches=[], parent=None) + state = 6 + + elif state == 6: + # expecting date, author, state, lines changed + match = re_60.match(line) + assert match, _('revision must be followed by date line') + d = match.group(1) + if d[2] == '/': + # Y2K + d = '19' + d + + if len(d.split()) != 3: + # cvs log dates always in GMT + d = d + ' UTC' + e.date = util.parsedate(d, ['%y/%m/%d %H:%M:%S', + '%Y/%m/%d %H:%M:%S', + '%Y-%m-%d %H:%M:%S']) + e.author = scache(match.group(2)) + e.dead = match.group(3).lower() == 'dead' + + if match.group(5): + if match.group(6): + e.lines = (int(match.group(5)), int(match.group(6))) + else: + e.lines = (int(match.group(5)), 0) + elif match.group(6): + e.lines = (0, int(match.group(6))) + else: + e.lines = None + + if match.group(7): # cvsnt mergepoint + myrev = match.group(8).split('.') + if len(myrev) == 2: # head + e.mergepoint = 'HEAD' + else: + myrev = '.'.join(myrev[:-2] + ['0', myrev[-2]]) + branches = [b for b in branchmap if branchmap[b] == myrev] + assert len(branches) == 1, 'unknown branch: %s' % e.mergepoint + e.mergepoint = branches[0] + else: + e.mergepoint = None + e.comment = [] + state = 7 + + elif state == 7: + # read the revision numbers of branches that start at this revision + # or store the commit log message otherwise + m = re_70.match(line) + if m: + e.branches = [tuple([int(y) for y in x.strip().split('.')]) + for x in m.group(1).split(';')] + state = 8 + elif re_31.match(line) and re_50.match(peek): + state = 5 + store = True + elif re_32.match(line): + state = 0 + store = True + else: + e.comment.append(line) + + elif state == 8: + # store commit log message + if re_31.match(line): + state = 5 + store = True + elif re_32.match(line): + state = 0 + store = True + else: + e.comment.append(line) + + # When a file is added on a branch B1, CVS creates a synthetic + # dead trunk revision 1.1 so that the branch has a root. + # Likewise, if you merge such a file to a later branch B2 (one + # that already existed when the file was added on B1), CVS + # creates a synthetic dead revision 1.1.x.1 on B2. Don't drop + # these revisions now, but mark them synthetic so + # createchangeset() can take care of them. + if (store and + e.dead and + e.revision[-1] == 1 and # 1.1 or 1.1.x.1 + len(e.comment) == 1 and + file_added_re.match(e.comment[0])): + ui.debug('found synthetic revision in %s: %r\n' + % (e.rcs, e.comment[0])) + e.synthetic = True + + if store: + # clean up the results and save in the log. + store = False + e.tags = sorted([scache(x) for x in tags.get(e.revision, [])]) + e.comment = scache('\n'.join(e.comment)) + + revn = len(e.revision) + if revn > 3 and (revn % 2) == 0: + e.branch = tags.get(e.revision[:-1], [None])[0] + else: + e.branch = None + + # find the branches starting from this revision + branchpoints = set() + for branch, revision in branchmap.iteritems(): + revparts = tuple([int(i) for i in revision.split('.')]) + if len(revparts) < 2: # bad tags + continue + if revparts[-2] == 0 and revparts[-1] % 2 == 0: + # normal branch + if revparts[:-2] == e.revision: + branchpoints.add(branch) + elif revparts == (1, 1, 1): # vendor branch + if revparts in e.branches: + branchpoints.add(branch) + e.branchpoints = branchpoints + + log.append(e) + + if len(log) % 100 == 0: + ui.status(util.ellipsis('%d %s' % (len(log), e.file), 80)+'\n') + + log.sort(key=lambda x: (x.rcs, x.revision)) + + # find parent revisions of individual files + versions = {} + for e in log: + branch = e.revision[:-1] + p = versions.get((e.rcs, branch), None) + if p is None: + p = e.revision[:-2] + e.parent = p + versions[(e.rcs, branch)] = e.revision + + # update the log cache + if cache: + if log: + # join up the old and new logs + log.sort(key=lambda x: x.date) + + if oldlog and oldlog[-1].date >= log[0].date: + raise logerror(_('log cache overlaps with new log entries,' + ' re-run without cache.')) + + log = oldlog + log + + # write the new cachefile + ui.note(_('writing cvs log cache %s\n') % cachefile) + pickle.dump(log, open(cachefile, 'w')) + else: + log = oldlog + + ui.status(_('%d log entries\n') % len(log)) + + hook.hook(ui, None, "cvslog", True, log=log) + + return log + + +class changeset(object): + '''Class changeset has the following attributes: + .id - integer identifying this changeset (list index) + .author - author name as CVS knows it + .branch - name of branch this changeset is on, or None + .comment - commit message + .date - the commit date as a (time,tz) tuple + .entries - list of logentry objects in this changeset + .parents - list of one or two parent changesets + .tags - list of tags on this changeset + .synthetic - from synthetic revision "file ... added on branch ..." + .mergepoint- the branch that has been merged from + (if present in rlog output) + .branchpoints- the branches that start at the current entry + ''' + def __init__(self, **entries): + self.synthetic = False + self.__dict__.update(entries) + + def __repr__(self): + return "<%s at 0x%x: %s>" % (self.__class__.__name__, + id(self), + getattr(self, 'id', "(no id)")) + +def createchangeset(ui, log, fuzz=60, mergefrom=None, mergeto=None): + '''Convert log into changesets.''' + + ui.status(_('creating changesets\n')) + + # Merge changesets + + log.sort(key=lambda x: (x.comment, x.author, x.branch, x.date)) + + changesets = [] + files = set() + c = None + for i, e in enumerate(log): + + # Check if log entry belongs to the current changeset or not. + + # Since CVS is file centric, two different file revisions with + # different branchpoints should be treated as belonging to two + # different changesets (and the ordering is important and not + # honoured by cvsps at this point). + # + # Consider the following case: + # foo 1.1 branchpoints: [MYBRANCH] + # bar 1.1 branchpoints: [MYBRANCH, MYBRANCH2] + # + # Here foo is part only of MYBRANCH, but not MYBRANCH2, e.g. a + # later version of foo may be in MYBRANCH2, so foo should be the + # first changeset and bar the next and MYBRANCH and MYBRANCH2 + # should both start off of the bar changeset. No provisions are + # made to ensure that this is, in fact, what happens. + if not (c and + e.comment == c.comment and + e.author == c.author and + e.branch == c.branch and + (not hasattr(e, 'branchpoints') or + not hasattr (c, 'branchpoints') or + e.branchpoints == c.branchpoints) and + ((c.date[0] + c.date[1]) <= + (e.date[0] + e.date[1]) <= + (c.date[0] + c.date[1]) + fuzz) and + e.file not in files): + c = changeset(comment=e.comment, author=e.author, + branch=e.branch, date=e.date, entries=[], + mergepoint=getattr(e, 'mergepoint', None), + branchpoints=getattr(e, 'branchpoints', set())) + changesets.append(c) + files = set() + if len(changesets) % 100 == 0: + t = '%d %s' % (len(changesets), repr(e.comment)[1:-1]) + ui.status(util.ellipsis(t, 80) + '\n') + + c.entries.append(e) + files.add(e.file) + c.date = e.date # changeset date is date of latest commit in it + + # Mark synthetic changesets + + for c in changesets: + # Synthetic revisions always get their own changeset, because + # the log message includes the filename. E.g. if you add file3 + # and file4 on a branch, you get four log entries and three + # changesets: + # "File file3 was added on branch ..." (synthetic, 1 entry) + # "File file4 was added on branch ..." (synthetic, 1 entry) + # "Add file3 and file4 to fix ..." (real, 2 entries) + # Hence the check for 1 entry here. + c.synthetic = len(c.entries) == 1 and c.entries[0].synthetic + + # Sort files in each changeset + + for c in changesets: + def pathcompare(l, r): + 'Mimic cvsps sorting order' + l = l.split('/') + r = r.split('/') + nl = len(l) + nr = len(r) + n = min(nl, nr) + for i in range(n): + if i + 1 == nl and nl < nr: + return -1 + elif i + 1 == nr and nl > nr: + return +1 + elif l[i] < r[i]: + return -1 + elif l[i] > r[i]: + return +1 + return 0 + def entitycompare(l, r): + return pathcompare(l.file, r.file) + + c.entries.sort(entitycompare) + + # Sort changesets by date + + def cscmp(l, r): + d = sum(l.date) - sum(r.date) + if d: + return d + + # detect vendor branches and initial commits on a branch + le = {} + for e in l.entries: + le[e.rcs] = e.revision + re = {} + for e in r.entries: + re[e.rcs] = e.revision + + d = 0 + for e in l.entries: + if re.get(e.rcs, None) == e.parent: + assert not d + d = 1 + break + + for e in r.entries: + if le.get(e.rcs, None) == e.parent: + assert not d + d = -1 + break + + return d + + changesets.sort(cscmp) + + # Collect tags + + globaltags = {} + for c in changesets: + for e in c.entries: + for tag in e.tags: + # remember which is the latest changeset to have this tag + globaltags[tag] = c + + for c in changesets: + tags = set() + for e in c.entries: + tags.update(e.tags) + # remember tags only if this is the latest changeset to have it + c.tags = sorted(tag for tag in tags if globaltags[tag] is c) + + # Find parent changesets, handle {{mergetobranch BRANCHNAME}} + # by inserting dummy changesets with two parents, and handle + # {{mergefrombranch BRANCHNAME}} by setting two parents. + + if mergeto is None: + mergeto = r'{{mergetobranch ([-\w]+)}}' + if mergeto: + mergeto = re.compile(mergeto) + + if mergefrom is None: + mergefrom = r'{{mergefrombranch ([-\w]+)}}' + if mergefrom: + mergefrom = re.compile(mergefrom) + + versions = {} # changeset index where we saw any particular file version + branches = {} # changeset index where we saw a branch + n = len(changesets) + i = 0 + while i < n: + c = changesets[i] + + for f in c.entries: + versions[(f.rcs, f.revision)] = i + + p = None + if c.branch in branches: + p = branches[c.branch] + else: + # first changeset on a new branch + # the parent is a changeset with the branch in its + # branchpoints such that it is the latest possible + # commit without any intervening, unrelated commits. + + for candidate in xrange(i): + if c.branch not in changesets[candidate].branchpoints: + if p is not None: + break + continue + p = candidate + + c.parents = [] + if p is not None: + p = changesets[p] + + # Ensure no changeset has a synthetic changeset as a parent. + while p.synthetic: + assert len(p.parents) <= 1, \ + _('synthetic changeset cannot have multiple parents') + if p.parents: + p = p.parents[0] + else: + p = None + break + + if p is not None: + c.parents.append(p) + + if c.mergepoint: + if c.mergepoint == 'HEAD': + c.mergepoint = None + c.parents.append(changesets[branches[c.mergepoint]]) + + if mergefrom: + m = mergefrom.search(c.comment) + if m: + m = m.group(1) + if m == 'HEAD': + m = None + try: + candidate = changesets[branches[m]] + except KeyError: + ui.warn(_("warning: CVS commit message references " + "non-existent branch %r:\n%s\n") + % (m, c.comment)) + if m in branches and c.branch != m and not candidate.synthetic: + c.parents.append(candidate) + + if mergeto: + m = mergeto.search(c.comment) + if m: + try: + m = m.group(1) + if m == 'HEAD': + m = None + except: + m = None # if no group found then merge to HEAD + if m in branches and c.branch != m: + # insert empty changeset for merge + cc = changeset( + author=c.author, branch=m, date=c.date, + comment='convert-repo: CVS merge from branch %s' + % c.branch, + entries=[], tags=[], + parents=[changesets[branches[m]], c]) + changesets.insert(i + 1, cc) + branches[m] = i + 1 + + # adjust our loop counters now we have inserted a new entry + n += 1 + i += 2 + continue + + branches[c.branch] = i + i += 1 + + # Drop synthetic changesets (safe now that we have ensured no other + # changesets can have them as parents). + i = 0 + while i < len(changesets): + if changesets[i].synthetic: + del changesets[i] + else: + i += 1 + + # Number changesets + + for i, c in enumerate(changesets): + c.id = i + 1 + + ui.status(_('%d changeset entries\n') % len(changesets)) + + hook.hook(ui, None, "cvschangesets", True, changesets=changesets) + + return changesets + + +def debugcvsps(ui, *args, **opts): + '''Read CVS rlog for current directory or named path in + repository, and convert the log to changesets based on matching + commit log entries and dates. + ''' + if opts["new_cache"]: + cache = "write" + elif opts["update_cache"]: + cache = "update" + else: + cache = None + + revisions = opts["revisions"] + + try: + if args: + log = [] + for d in args: + log += createlog(ui, d, root=opts["root"], cache=cache) + else: + log = createlog(ui, root=opts["root"], cache=cache) + except logerror, e: + ui.write("%r\n"%e) + return + + changesets = createchangeset(ui, log, opts["fuzz"]) + del log + + # Print changesets (optionally filtered) + + off = len(revisions) + branches = {} # latest version number in each branch + ancestors = {} # parent branch + for cs in changesets: + + if opts["ancestors"]: + if cs.branch not in branches and cs.parents and cs.parents[0].id: + ancestors[cs.branch] = (changesets[cs.parents[0].id - 1].branch, + cs.parents[0].id) + branches[cs.branch] = cs.id + + # limit by branches + if opts["branches"] and (cs.branch or 'HEAD') not in opts["branches"]: + continue + + if not off: + # Note: trailing spaces on several lines here are needed to have + # bug-for-bug compatibility with cvsps. + ui.write('---------------------\n') + ui.write('PatchSet %d \n' % cs.id) + ui.write('Date: %s\n' % util.datestr(cs.date, + '%Y/%m/%d %H:%M:%S %1%2')) + ui.write('Author: %s\n' % cs.author) + ui.write('Branch: %s\n' % (cs.branch or 'HEAD')) + ui.write('Tag%s: %s \n' % (['', 's'][len(cs.tags) > 1], + ','.join(cs.tags) or '(none)')) + branchpoints = getattr(cs, 'branchpoints', None) + if branchpoints: + ui.write('Branchpoints: %s \n' % ', '.join(branchpoints)) + if opts["parents"] and cs.parents: + if len(cs.parents) > 1: + ui.write('Parents: %s\n' % + (','.join([str(p.id) for p in cs.parents]))) + else: + ui.write('Parent: %d\n' % cs.parents[0].id) + + if opts["ancestors"]: + b = cs.branch + r = [] + while b: + b, c = ancestors[b] + r.append('%s:%d:%d' % (b or "HEAD", c, branches[b])) + if r: + ui.write('Ancestors: %s\n' % (','.join(r))) + + ui.write('Log:\n') + ui.write('%s\n\n' % cs.comment) + ui.write('Members: \n') + for f in cs.entries: + fn = f.file + if fn.startswith(opts["prefix"]): + fn = fn[len(opts["prefix"]):] + ui.write('\t%s:%s->%s%s \n' % ( + fn, '.'.join([str(x) for x in f.parent]) or 'INITIAL', + '.'.join([str(x) for x in f.revision]), + ['', '(DEAD)'][f.dead])) + ui.write('\n') + + # have we seen the start tag? + if revisions and off: + if revisions[0] == str(cs.id) or \ + revisions[0] in cs.tags: + off = False + + # see if we reached the end tag + if len(revisions) > 1 and not off: + if revisions[1] == str(cs.id) or \ + revisions[1] in cs.tags: + break diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/cvsps.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/cvsps.pyo new file mode 100644 index 0000000..fdf6d44 Binary files /dev/null and b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/cvsps.pyo differ diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/darcs.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/darcs.py new file mode 100644 index 0000000..9863eb8 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/darcs.py @@ -0,0 +1,200 @@ +# darcs.py - darcs support for the convert extension +# +# Copyright 2007-2009 Matt Mackall and others +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +from common import NoRepo, checktool, commandline, commit, converter_source +from mercurial.i18n import _ +from mercurial import encoding, util +import os, shutil, tempfile, re + +# The naming drift of ElementTree is fun! + +try: + from xml.etree.cElementTree import ElementTree, XMLParser +except ImportError: + try: + from xml.etree.ElementTree import ElementTree, XMLParser + except ImportError: + try: + from elementtree.cElementTree import ElementTree, XMLParser + except ImportError: + try: + from elementtree.ElementTree import ElementTree, XMLParser + except ImportError: + ElementTree = None + +class darcs_source(converter_source, commandline): + def __init__(self, ui, path, rev=None): + converter_source.__init__(self, ui, path, rev=rev) + commandline.__init__(self, ui, 'darcs') + + # check for _darcs, ElementTree so that we can easily skip + # test-convert-darcs if ElementTree is not around + if not os.path.exists(os.path.join(path, '_darcs')): + raise NoRepo(_("%s does not look like a darcs repository") % path) + + checktool('darcs') + version = self.run0('--version').splitlines()[0].strip() + if version < '2.1': + raise util.Abort(_('darcs version 2.1 or newer needed (found %r)') % + version) + + if ElementTree is None: + raise util.Abort(_("Python ElementTree module is not available")) + + self.path = os.path.realpath(path) + + self.lastrev = None + self.changes = {} + self.parents = {} + self.tags = {} + + # Check darcs repository format + format = self.format() + if format: + if format in ('darcs-1.0', 'hashed'): + raise NoRepo(_("%s repository format is unsupported, " + "please upgrade") % format) + else: + self.ui.warn(_('failed to detect repository format!')) + + def before(self): + self.tmppath = tempfile.mkdtemp( + prefix='convert-' + os.path.basename(self.path) + '-') + output, status = self.run('init', repodir=self.tmppath) + self.checkexit(status) + + tree = self.xml('changes', xml_output=True, summary=True, + repodir=self.path) + tagname = None + child = None + for elt in tree.findall('patch'): + node = elt.get('hash') + name = elt.findtext('name', '') + if name.startswith('TAG '): + tagname = name[4:].strip() + elif tagname is not None: + self.tags[tagname] = node + tagname = None + self.changes[node] = elt + self.parents[child] = [node] + child = node + self.parents[child] = [] + + def after(self): + self.ui.debug('cleaning up %s\n' % self.tmppath) + shutil.rmtree(self.tmppath, ignore_errors=True) + + def recode(self, s, encoding=None): + if isinstance(s, unicode): + # XMLParser returns unicode objects for anything it can't + # encode into ASCII. We convert them back to str to get + # recode's normal conversion behavior. + s = s.encode('latin-1') + return super(darcs_source, self).recode(s, encoding) + + def xml(self, cmd, **kwargs): + # NOTE: darcs is currently encoding agnostic and will print + # patch metadata byte-for-byte, even in the XML changelog. + etree = ElementTree() + # While we are decoding the XML as latin-1 to be as liberal as + # possible, etree will still raise an exception if any + # non-printable characters are in the XML changelog. + parser = XMLParser(encoding='latin-1') + fp = self._run(cmd, **kwargs) + etree.parse(fp, parser=parser) + self.checkexit(fp.close()) + return etree.getroot() + + def format(self): + output, status = self.run('show', 'repo', no_files=True, + repodir=self.path) + self.checkexit(status) + m = re.search(r'^\s*Format:\s*(.*)$', output, re.MULTILINE) + if not m: + return None + return ','.join(sorted(f.strip() for f in m.group(1).split(','))) + + def manifest(self): + man = [] + output, status = self.run('show', 'files', no_directories=True, + repodir=self.tmppath) + self.checkexit(status) + for line in output.split('\n'): + path = line[2:] + if path: + man.append(path) + return man + + def getheads(self): + return self.parents[None] + + def getcommit(self, rev): + elt = self.changes[rev] + date = util.strdate(elt.get('local_date'), '%a %b %d %H:%M:%S %Z %Y') + desc = elt.findtext('name') + '\n' + elt.findtext('comment', '') + # etree can return unicode objects for name, comment, and author, + # so recode() is used to ensure str objects are emitted. + return commit(author=self.recode(elt.get('author')), + date=util.datestr(date), + desc=self.recode(desc).strip(), + parents=self.parents[rev]) + + def pull(self, rev): + output, status = self.run('pull', self.path, all=True, + match='hash %s' % rev, + no_test=True, no_posthook=True, + external_merge='/bin/false', + repodir=self.tmppath) + if status: + if output.find('We have conflicts in') == -1: + self.checkexit(status, output) + output, status = self.run('revert', all=True, repodir=self.tmppath) + self.checkexit(status, output) + + def getchanges(self, rev): + copies = {} + changes = [] + man = None + for elt in self.changes[rev].find('summary').getchildren(): + if elt.tag in ('add_directory', 'remove_directory'): + continue + if elt.tag == 'move': + if man is None: + man = self.manifest() + source, dest = elt.get('from'), elt.get('to') + if source in man: + # File move + changes.append((source, rev)) + changes.append((dest, rev)) + copies[dest] = source + else: + # Directory move, deduce file moves from manifest + source = source + '/' + for f in man: + if not f.startswith(source): + continue + fdest = dest + '/' + f[len(source):] + changes.append((f, rev)) + changes.append((fdest, rev)) + copies[fdest] = f + else: + changes.append((elt.text.strip(), rev)) + self.pull(rev) + self.lastrev = rev + return sorted(changes), copies + + def getfile(self, name, rev): + if rev != self.lastrev: + raise util.Abort(_('internal calling inconsistency')) + path = os.path.join(self.tmppath, name) + data = open(path, 'rb').read() + mode = os.lstat(path).st_mode + mode = (mode & 0111) and 'x' or '' + return data, mode + + def gettags(self): + return self.tags diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/darcs.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/darcs.pyo new file mode 100644 index 0000000..78b7568 Binary files /dev/null and b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/darcs.pyo differ diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/filemap.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/filemap.py new file mode 100644 index 0000000..1064642 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/filemap.py @@ -0,0 +1,365 @@ +# Copyright 2007 Bryan O'Sullivan +# Copyright 2007 Alexis S. L. Carvalho +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +import shlex +from mercurial.i18n import _ +from mercurial import util +from common import SKIPREV, converter_source + +def rpairs(name): + e = len(name) + while e != -1: + yield name[:e], name[e + 1:] + e = name.rfind('/', 0, e) + yield '.', name + +class filemapper(object): + '''Map and filter filenames when importing. + A name can be mapped to itself, a new name, or None (omit from new + repository).''' + + def __init__(self, ui, path=None): + self.ui = ui + self.include = {} + self.exclude = {} + self.rename = {} + if path: + if self.parse(path): + raise util.Abort(_('errors in filemap')) + + def parse(self, path): + errs = 0 + def check(name, mapping, listname): + if not name: + self.ui.warn(_('%s:%d: path to %s is missing\n') % + (lex.infile, lex.lineno, listname)) + return 1 + if name in mapping: + self.ui.warn(_('%s:%d: %r already in %s list\n') % + (lex.infile, lex.lineno, name, listname)) + return 1 + if (name.startswith('/') or + name.endswith('/') or + '//' in name): + self.ui.warn(_('%s:%d: superfluous / in %s %r\n') % + (lex.infile, lex.lineno, listname, name)) + return 1 + return 0 + lex = shlex.shlex(open(path), path, True) + lex.wordchars += '!@#$%^&*()-=+[]{}|;:,./<>?' + cmd = lex.get_token() + while cmd: + if cmd == 'include': + name = lex.get_token() + errs += check(name, self.exclude, 'exclude') + self.include[name] = name + elif cmd == 'exclude': + name = lex.get_token() + errs += check(name, self.include, 'include') + errs += check(name, self.rename, 'rename') + self.exclude[name] = name + elif cmd == 'rename': + src = lex.get_token() + dest = lex.get_token() + errs += check(src, self.exclude, 'exclude') + self.rename[src] = dest + elif cmd == 'source': + errs += self.parse(lex.get_token()) + else: + self.ui.warn(_('%s:%d: unknown directive %r\n') % + (lex.infile, lex.lineno, cmd)) + errs += 1 + cmd = lex.get_token() + return errs + + def lookup(self, name, mapping): + for pre, suf in rpairs(name): + try: + return mapping[pre], pre, suf + except KeyError: + pass + return '', name, '' + + def __call__(self, name): + if self.include: + inc = self.lookup(name, self.include)[0] + else: + inc = name + if self.exclude: + exc = self.lookup(name, self.exclude)[0] + else: + exc = '' + if (not self.include and exc) or (len(inc) <= len(exc)): + return None + newpre, pre, suf = self.lookup(name, self.rename) + if newpre: + if newpre == '.': + return suf + if suf: + return newpre + '/' + suf + return newpre + return name + + def active(self): + return bool(self.include or self.exclude or self.rename) + +# This class does two additional things compared to a regular source: +# +# - Filter and rename files. This is mostly wrapped by the filemapper +# class above. We hide the original filename in the revision that is +# returned by getchanges to be able to find things later in getfile. +# +# - Return only revisions that matter for the files we're interested in. +# This involves rewriting the parents of the original revision to +# create a graph that is restricted to those revisions. +# +# This set of revisions includes not only revisions that directly +# touch files we're interested in, but also merges that merge two +# or more interesting revisions. + +class filemap_source(converter_source): + def __init__(self, ui, baseconverter, filemap): + super(filemap_source, self).__init__(ui) + self.base = baseconverter + self.filemapper = filemapper(ui, filemap) + self.commits = {} + # if a revision rev has parent p in the original revision graph, then + # rev will have parent self.parentmap[p] in the restricted graph. + self.parentmap = {} + # self.wantedancestors[rev] is the set of all ancestors of rev that + # are in the restricted graph. + self.wantedancestors = {} + self.convertedorder = None + self._rebuilt = False + self.origparents = {} + self.children = {} + self.seenchildren = {} + + def before(self): + self.base.before() + + def after(self): + self.base.after() + + def setrevmap(self, revmap): + # rebuild our state to make things restartable + # + # To avoid calling getcommit for every revision that has already + # been converted, we rebuild only the parentmap, delaying the + # rebuild of wantedancestors until we need it (i.e. until a + # merge). + # + # We assume the order argument lists the revisions in + # topological order, so that we can infer which revisions were + # wanted by previous runs. + self._rebuilt = not revmap + seen = {SKIPREV: SKIPREV} + dummyset = set() + converted = [] + for rev in revmap.order: + mapped = revmap[rev] + wanted = mapped not in seen + if wanted: + seen[mapped] = rev + self.parentmap[rev] = rev + else: + self.parentmap[rev] = seen[mapped] + self.wantedancestors[rev] = dummyset + arg = seen[mapped] + if arg == SKIPREV: + arg = None + converted.append((rev, wanted, arg)) + self.convertedorder = converted + return self.base.setrevmap(revmap) + + def rebuild(self): + if self._rebuilt: + return True + self._rebuilt = True + self.parentmap.clear() + self.wantedancestors.clear() + self.seenchildren.clear() + for rev, wanted, arg in self.convertedorder: + if rev not in self.origparents: + self.origparents[rev] = self.getcommit(rev).parents + if arg is not None: + self.children[arg] = self.children.get(arg, 0) + 1 + + for rev, wanted, arg in self.convertedorder: + parents = self.origparents[rev] + if wanted: + self.mark_wanted(rev, parents) + else: + self.mark_not_wanted(rev, arg) + self._discard(arg, *parents) + + return True + + def getheads(self): + return self.base.getheads() + + def getcommit(self, rev): + # We want to save a reference to the commit objects to be able + # to rewrite their parents later on. + c = self.commits[rev] = self.base.getcommit(rev) + for p in c.parents: + self.children[p] = self.children.get(p, 0) + 1 + return c + + def _discard(self, *revs): + for r in revs: + if r is None: + continue + self.seenchildren[r] = self.seenchildren.get(r, 0) + 1 + if self.seenchildren[r] == self.children[r]: + del self.wantedancestors[r] + del self.parentmap[r] + del self.seenchildren[r] + if self._rebuilt: + del self.children[r] + + def wanted(self, rev, i): + # Return True if we're directly interested in rev. + # + # i is an index selecting one of the parents of rev (if rev + # has no parents, i is None). getchangedfiles will give us + # the list of files that are different in rev and in the parent + # indicated by i. If we're interested in any of these files, + # we're interested in rev. + try: + files = self.base.getchangedfiles(rev, i) + except NotImplementedError: + raise util.Abort(_("source repository doesn't support --filemap")) + for f in files: + if self.filemapper(f): + return True + return False + + def mark_not_wanted(self, rev, p): + # Mark rev as not interesting and update data structures. + + if p is None: + # A root revision. Use SKIPREV to indicate that it doesn't + # map to any revision in the restricted graph. Put SKIPREV + # in the set of wanted ancestors to simplify code elsewhere + self.parentmap[rev] = SKIPREV + self.wantedancestors[rev] = set((SKIPREV,)) + return + + # Reuse the data from our parent. + self.parentmap[rev] = self.parentmap[p] + self.wantedancestors[rev] = self.wantedancestors[p] + + def mark_wanted(self, rev, parents): + # Mark rev ss wanted and update data structures. + + # rev will be in the restricted graph, so children of rev in + # the original graph should still have rev as a parent in the + # restricted graph. + self.parentmap[rev] = rev + + # The set of wanted ancestors of rev is the union of the sets + # of wanted ancestors of its parents. Plus rev itself. + wrev = set() + for p in parents: + wrev.update(self.wantedancestors[p]) + wrev.add(rev) + self.wantedancestors[rev] = wrev + + def getchanges(self, rev): + parents = self.commits[rev].parents + if len(parents) > 1: + self.rebuild() + + # To decide whether we're interested in rev we: + # + # - calculate what parents rev will have if it turns out we're + # interested in it. If it's going to have more than 1 parent, + # we're interested in it. + # + # - otherwise, we'll compare it with the single parent we found. + # If any of the files we're interested in is different in the + # the two revisions, we're interested in rev. + + # A parent p is interesting if its mapped version (self.parentmap[p]): + # - is not SKIPREV + # - is still not in the list of parents (we don't want duplicates) + # - is not an ancestor of the mapped versions of the other parents + mparents = [] + wp = None + for i, p1 in enumerate(parents): + mp1 = self.parentmap[p1] + if mp1 == SKIPREV or mp1 in mparents: + continue + for p2 in parents: + if p1 == p2 or mp1 == self.parentmap[p2]: + continue + if mp1 in self.wantedancestors[p2]: + break + else: + mparents.append(mp1) + wp = i + + if wp is None and parents: + wp = 0 + + self.origparents[rev] = parents + + closed = 'close' in self.commits[rev].extra + + if len(mparents) < 2 and not closed and not self.wanted(rev, wp): + # We don't want this revision. + # Update our state and tell the convert process to map this + # revision to the same revision its parent as mapped to. + p = None + if parents: + p = parents[wp] + self.mark_not_wanted(rev, p) + self.convertedorder.append((rev, False, p)) + self._discard(*parents) + return self.parentmap[rev] + + # We want this revision. + # Rewrite the parents of the commit object + self.commits[rev].parents = mparents + self.mark_wanted(rev, parents) + self.convertedorder.append((rev, True, None)) + self._discard(*parents) + + # Get the real changes and do the filtering/mapping. To be + # able to get the files later on in getfile, we hide the + # original filename in the rev part of the return value. + changes, copies = self.base.getchanges(rev) + newnames = {} + files = [] + for f, r in changes: + newf = self.filemapper(f) + if newf: + files.append((newf, (f, r))) + newnames[f] = newf + + ncopies = {} + for c in copies: + newc = self.filemapper(c) + if newc: + newsource = self.filemapper(copies[c]) + if newsource: + ncopies[newc] = newsource + + return files, ncopies + + def getfile(self, name, rev): + realname, realrev = rev + return self.base.getfile(realname, realrev) + + def gettags(self): + return self.base.gettags() + + def hasnativeorder(self): + return self.base.hasnativeorder() + + def lookuprev(self, rev): + return self.base.lookuprev(rev) diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/filemap.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/filemap.pyo new file mode 100644 index 0000000..2ece523 Binary files /dev/null and b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/filemap.pyo differ diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/git.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/git.py new file mode 100644 index 0000000..e973031 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/git.py @@ -0,0 +1,170 @@ +# git.py - git support for the convert extension +# +# Copyright 2005-2009 Matt Mackall and others +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +import os +from mercurial import util +from mercurial.node import hex, nullid +from mercurial.i18n import _ + +from common import NoRepo, commit, converter_source, checktool + +class convert_git(converter_source): + # Windows does not support GIT_DIR= construct while other systems + # cannot remove environment variable. Just assume none have + # both issues. + if hasattr(os, 'unsetenv'): + def gitopen(self, s): + prevgitdir = os.environ.get('GIT_DIR') + os.environ['GIT_DIR'] = self.path + try: + return util.popen(s, 'rb') + finally: + if prevgitdir is None: + del os.environ['GIT_DIR'] + else: + os.environ['GIT_DIR'] = prevgitdir + else: + def gitopen(self, s): + return util.popen('GIT_DIR=%s %s' % (self.path, s), 'rb') + + def gitread(self, s): + fh = self.gitopen(s) + data = fh.read() + return data, fh.close() + + def __init__(self, ui, path, rev=None): + super(convert_git, self).__init__(ui, path, rev=rev) + + if os.path.isdir(path + "/.git"): + path += "/.git" + if not os.path.exists(path + "/objects"): + raise NoRepo(_("%s does not look like a Git repository") % path) + + checktool('git', 'git') + + self.path = path + + def getheads(self): + if not self.rev: + heads, ret = self.gitread('git rev-parse --branches --remotes') + heads = heads.splitlines() + else: + heads, ret = self.gitread("git rev-parse --verify %s" % self.rev) + heads = [heads[:-1]] + if ret: + raise util.Abort(_('cannot retrieve git heads')) + return heads + + def catfile(self, rev, type): + if rev == hex(nullid): + raise IOError() + data, ret = self.gitread("git cat-file %s %s" % (type, rev)) + if ret: + raise util.Abort(_('cannot read %r object at %s') % (type, rev)) + return data + + def getfile(self, name, rev): + data = self.catfile(rev, "blob") + mode = self.modecache[(name, rev)] + return data, mode + + def getchanges(self, version): + self.modecache = {} + fh = self.gitopen("git diff-tree -z --root -m -r %s" % version) + changes = [] + seen = set() + entry = None + for l in fh.read().split('\x00'): + if not entry: + if not l.startswith(':'): + continue + entry = l + continue + f = l + if f not in seen: + seen.add(f) + entry = entry.split() + h = entry[3] + p = (entry[1] == "100755") + s = (entry[1] == "120000") + self.modecache[(f, h)] = (p and "x") or (s and "l") or "" + changes.append((f, h)) + entry = None + if fh.close(): + raise util.Abort(_('cannot read changes in %s') % version) + return (changes, {}) + + def getcommit(self, version): + c = self.catfile(version, "commit") # read the commit hash + end = c.find("\n\n") + message = c[end + 2:] + message = self.recode(message) + l = c[:end].splitlines() + parents = [] + author = committer = None + for e in l[1:]: + n, v = e.split(" ", 1) + if n == "author": + p = v.split() + tm, tz = p[-2:] + author = " ".join(p[:-2]) + if author[0] == "<": author = author[1:-1] + author = self.recode(author) + if n == "committer": + p = v.split() + tm, tz = p[-2:] + committer = " ".join(p[:-2]) + if committer[0] == "<": committer = committer[1:-1] + committer = self.recode(committer) + if n == "parent": + parents.append(v) + + if committer and committer != author: + message += "\ncommitter: %s\n" % committer + tzs, tzh, tzm = tz[-5:-4] + "1", tz[-4:-2], tz[-2:] + tz = -int(tzs) * (int(tzh) * 3600 + int(tzm)) + date = tm + " " + str(tz) + + c = commit(parents=parents, date=date, author=author, desc=message, + rev=version) + return c + + def gettags(self): + tags = {} + fh = self.gitopen('git ls-remote --tags "%s"' % self.path) + prefix = 'refs/tags/' + for line in fh: + line = line.strip() + if not line.endswith("^{}"): + continue + node, tag = line.split(None, 1) + if not tag.startswith(prefix): + continue + tag = tag[len(prefix):-3] + tags[tag] = node + if fh.close(): + raise util.Abort(_('cannot read tags from %s') % self.path) + + return tags + + def getchangedfiles(self, version, i): + changes = [] + if i is None: + fh = self.gitopen("git diff-tree --root -m -r %s" % version) + for l in fh: + if "\t" not in l: + continue + m, f = l[:-1].split("\t") + changes.append(f) + else: + fh = self.gitopen('git diff-tree --name-only --root -r %s "%s^%s" --' + % (version, version, i + 1)) + changes = [f.rstrip('\n') for f in fh] + if fh.close(): + raise util.Abort(_('cannot read changes in %s') % version) + + return changes diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/git.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/git.pyo new file mode 100644 index 0000000..3166318 Binary files /dev/null and b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/git.pyo differ diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/gnuarch.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/gnuarch.py new file mode 100644 index 0000000..60cfede --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/gnuarch.py @@ -0,0 +1,338 @@ +# gnuarch.py - GNU Arch support for the convert extension +# +# Copyright 2008, 2009 Aleix Conchillo Flaque +# and others +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +from common import NoRepo, commandline, commit, converter_source +from mercurial.i18n import _ +from mercurial import encoding, util +import os, shutil, tempfile, stat +from email.Parser import Parser + +class gnuarch_source(converter_source, commandline): + + class gnuarch_rev(object): + def __init__(self, rev): + self.rev = rev + self.summary = '' + self.date = None + self.author = '' + self.continuationof = None + self.add_files = [] + self.mod_files = [] + self.del_files = [] + self.ren_files = {} + self.ren_dirs = {} + + def __init__(self, ui, path, rev=None): + super(gnuarch_source, self).__init__(ui, path, rev=rev) + + if not os.path.exists(os.path.join(path, '{arch}')): + raise NoRepo(_("%s does not look like a GNU Arch repository") + % path) + + # Could use checktool, but we want to check for baz or tla. + self.execmd = None + if util.find_exe('baz'): + self.execmd = 'baz' + else: + if util.find_exe('tla'): + self.execmd = 'tla' + else: + raise util.Abort(_('cannot find a GNU Arch tool')) + + commandline.__init__(self, ui, self.execmd) + + self.path = os.path.realpath(path) + self.tmppath = None + + self.treeversion = None + self.lastrev = None + self.changes = {} + self.parents = {} + self.tags = {} + self.catlogparser = Parser() + self.encoding = encoding.encoding + self.archives = [] + + def before(self): + # Get registered archives + self.archives = [i.rstrip('\n') + for i in self.runlines0('archives', '-n')] + + if self.execmd == 'tla': + output = self.run0('tree-version', self.path) + else: + output = self.run0('tree-version', '-d', self.path) + self.treeversion = output.strip() + + # Get name of temporary directory + version = self.treeversion.split('/') + self.tmppath = os.path.join(tempfile.gettempdir(), + 'hg-%s' % version[1]) + + # Generate parents dictionary + self.parents[None] = [] + treeversion = self.treeversion + child = None + while treeversion: + self.ui.status(_('analyzing tree version %s...\n') % treeversion) + + archive = treeversion.split('/')[0] + if archive not in self.archives: + self.ui.status(_('tree analysis stopped because it points to ' + 'an unregistered archive %s...\n') % archive) + break + + # Get the complete list of revisions for that tree version + output, status = self.runlines('revisions', '-r', '-f', treeversion) + self.checkexit(status, 'failed retrieveing revisions for %s' + % treeversion) + + # No new iteration unless a revision has a continuation-of header + treeversion = None + + for l in output: + rev = l.strip() + self.changes[rev] = self.gnuarch_rev(rev) + self.parents[rev] = [] + + # Read author, date and summary + catlog, status = self.run('cat-log', '-d', self.path, rev) + if status: + catlog = self.run0('cat-archive-log', rev) + self._parsecatlog(catlog, rev) + + # Populate the parents map + self.parents[child].append(rev) + + # Keep track of the current revision as the child of the next + # revision scanned + child = rev + + # Check if we have to follow the usual incremental history + # or if we have to 'jump' to a different treeversion given + # by the continuation-of header. + if self.changes[rev].continuationof: + treeversion = '--'.join( + self.changes[rev].continuationof.split('--')[:-1]) + break + + # If we reached a base-0 revision w/o any continuation-of + # header, it means the tree history ends here. + if rev[-6:] == 'base-0': + break + + def after(self): + self.ui.debug('cleaning up %s\n' % self.tmppath) + shutil.rmtree(self.tmppath, ignore_errors=True) + + def getheads(self): + return self.parents[None] + + def getfile(self, name, rev): + if rev != self.lastrev: + raise util.Abort(_('internal calling inconsistency')) + + # Raise IOError if necessary (i.e. deleted files). + if not os.path.lexists(os.path.join(self.tmppath, name)): + raise IOError + + return self._getfile(name, rev) + + def getchanges(self, rev): + self._update(rev) + changes = [] + copies = {} + + for f in self.changes[rev].add_files: + changes.append((f, rev)) + + for f in self.changes[rev].mod_files: + changes.append((f, rev)) + + for f in self.changes[rev].del_files: + changes.append((f, rev)) + + for src in self.changes[rev].ren_files: + to = self.changes[rev].ren_files[src] + changes.append((src, rev)) + changes.append((to, rev)) + copies[to] = src + + for src in self.changes[rev].ren_dirs: + to = self.changes[rev].ren_dirs[src] + chgs, cps = self._rendirchanges(src, to) + changes += [(f, rev) for f in chgs] + copies.update(cps) + + self.lastrev = rev + return sorted(set(changes)), copies + + def getcommit(self, rev): + changes = self.changes[rev] + return commit(author=changes.author, date=changes.date, + desc=changes.summary, parents=self.parents[rev], rev=rev) + + def gettags(self): + return self.tags + + def _execute(self, cmd, *args, **kwargs): + cmdline = [self.execmd, cmd] + cmdline += args + cmdline = [util.shellquote(arg) for arg in cmdline] + cmdline += ['>', util.nulldev, '2>', util.nulldev] + cmdline = util.quotecommand(' '.join(cmdline)) + self.ui.debug(cmdline, '\n') + return os.system(cmdline) + + def _update(self, rev): + self.ui.debug('applying revision %s...\n' % rev) + changeset, status = self.runlines('replay', '-d', self.tmppath, + rev) + if status: + # Something went wrong while merging (baz or tla + # issue?), get latest revision and try from there + shutil.rmtree(self.tmppath, ignore_errors=True) + self._obtainrevision(rev) + else: + old_rev = self.parents[rev][0] + self.ui.debug('computing changeset between %s and %s...\n' + % (old_rev, rev)) + self._parsechangeset(changeset, rev) + + def _getfile(self, name, rev): + mode = os.lstat(os.path.join(self.tmppath, name)).st_mode + if stat.S_ISLNK(mode): + data = os.readlink(os.path.join(self.tmppath, name)) + mode = mode and 'l' or '' + else: + data = open(os.path.join(self.tmppath, name), 'rb').read() + mode = (mode & 0111) and 'x' or '' + return data, mode + + def _exclude(self, name): + exclude = ['{arch}', '.arch-ids', '.arch-inventory'] + for exc in exclude: + if name.find(exc) != -1: + return True + return False + + def _readcontents(self, path): + files = [] + contents = os.listdir(path) + while len(contents) > 0: + c = contents.pop() + p = os.path.join(path, c) + # os.walk could be used, but here we avoid internal GNU + # Arch files and directories, thus saving a lot time. + if not self._exclude(p): + if os.path.isdir(p): + contents += [os.path.join(c, f) for f in os.listdir(p)] + else: + files.append(c) + return files + + def _rendirchanges(self, src, dest): + changes = [] + copies = {} + files = self._readcontents(os.path.join(self.tmppath, dest)) + for f in files: + s = os.path.join(src, f) + d = os.path.join(dest, f) + changes.append(s) + changes.append(d) + copies[d] = s + return changes, copies + + def _obtainrevision(self, rev): + self.ui.debug('obtaining revision %s...\n' % rev) + output = self._execute('get', rev, self.tmppath) + self.checkexit(output) + self.ui.debug('analyzing revision %s...\n' % rev) + files = self._readcontents(self.tmppath) + self.changes[rev].add_files += files + + def _stripbasepath(self, path): + if path.startswith('./'): + return path[2:] + return path + + def _parsecatlog(self, data, rev): + try: + catlog = self.catlogparser.parsestr(data) + + # Commit date + self.changes[rev].date = util.datestr( + util.strdate(catlog['Standard-date'], + '%Y-%m-%d %H:%M:%S')) + + # Commit author + self.changes[rev].author = self.recode(catlog['Creator']) + + # Commit description + self.changes[rev].summary = '\n\n'.join((catlog['Summary'], + catlog.get_payload())) + self.changes[rev].summary = self.recode(self.changes[rev].summary) + + # Commit revision origin when dealing with a branch or tag + if 'Continuation-of' in catlog: + self.changes[rev].continuationof = self.recode( + catlog['Continuation-of']) + except Exception: + raise util.Abort(_('could not parse cat-log of %s') % rev) + + def _parsechangeset(self, data, rev): + for l in data: + l = l.strip() + # Added file (ignore added directory) + if l.startswith('A') and not l.startswith('A/'): + file = self._stripbasepath(l[1:].strip()) + if not self._exclude(file): + self.changes[rev].add_files.append(file) + # Deleted file (ignore deleted directory) + elif l.startswith('D') and not l.startswith('D/'): + file = self._stripbasepath(l[1:].strip()) + if not self._exclude(file): + self.changes[rev].del_files.append(file) + # Modified binary file + elif l.startswith('Mb'): + file = self._stripbasepath(l[2:].strip()) + if not self._exclude(file): + self.changes[rev].mod_files.append(file) + # Modified link + elif l.startswith('M->'): + file = self._stripbasepath(l[3:].strip()) + if not self._exclude(file): + self.changes[rev].mod_files.append(file) + # Modified file + elif l.startswith('M'): + file = self._stripbasepath(l[1:].strip()) + if not self._exclude(file): + self.changes[rev].mod_files.append(file) + # Renamed file (or link) + elif l.startswith('=>'): + files = l[2:].strip().split(' ') + if len(files) == 1: + files = l[2:].strip().split('\t') + src = self._stripbasepath(files[0]) + dst = self._stripbasepath(files[1]) + if not self._exclude(src) and not self._exclude(dst): + self.changes[rev].ren_files[src] = dst + # Conversion from file to link or from link to file (modified) + elif l.startswith('ch'): + file = self._stripbasepath(l[2:].strip()) + if not self._exclude(file): + self.changes[rev].mod_files.append(file) + # Renamed directory + elif l.startswith('/>'): + dirs = l[2:].strip().split(' ') + if len(dirs) == 1: + dirs = l[2:].strip().split('\t') + src = self._stripbasepath(dirs[0]) + dst = self._stripbasepath(dirs[1]) + if not self._exclude(src) and not self._exclude(dst): + self.changes[rev].ren_dirs[src] = dst diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/gnuarch.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/gnuarch.pyo new file mode 100644 index 0000000..0e564ba Binary files /dev/null and b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/gnuarch.pyo differ diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/hg.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/hg.py new file mode 100644 index 0000000..183377d --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/hg.py @@ -0,0 +1,376 @@ +# hg.py - hg backend for convert extension +# +# Copyright 2005-2009 Matt Mackall and others +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +# Notes for hg->hg conversion: +# +# * Old versions of Mercurial didn't trim the whitespace from the ends +# of commit messages, but new versions do. Changesets created by +# those older versions, then converted, may thus have different +# hashes for changesets that are otherwise identical. +# +# * Using "--config convert.hg.saverev=true" will make the source +# identifier to be stored in the converted revision. This will cause +# the converted revision to have a different identity than the +# source. + + +import os, time, cStringIO +from mercurial.i18n import _ +from mercurial.node import bin, hex, nullid +from mercurial import hg, util, context, error + +from common import NoRepo, commit, converter_source, converter_sink + +class mercurial_sink(converter_sink): + def __init__(self, ui, path): + converter_sink.__init__(self, ui, path) + self.branchnames = ui.configbool('convert', 'hg.usebranchnames', True) + self.clonebranches = ui.configbool('convert', 'hg.clonebranches', False) + self.tagsbranch = ui.config('convert', 'hg.tagsbranch', 'default') + self.lastbranch = None + if os.path.isdir(path) and len(os.listdir(path)) > 0: + try: + self.repo = hg.repository(self.ui, path) + if not self.repo.local(): + raise NoRepo(_('%s is not a local Mercurial repository') + % path) + except error.RepoError, err: + ui.traceback() + raise NoRepo(err.args[0]) + else: + try: + ui.status(_('initializing destination %s repository\n') % path) + self.repo = hg.repository(self.ui, path, create=True) + if not self.repo.local(): + raise NoRepo(_('%s is not a local Mercurial repository') + % path) + self.created.append(path) + except error.RepoError: + ui.traceback() + raise NoRepo(_("could not create hg repository %s as sink") + % path) + self.lock = None + self.wlock = None + self.filemapmode = False + + def before(self): + self.ui.debug('run hg sink pre-conversion action\n') + self.wlock = self.repo.wlock() + self.lock = self.repo.lock() + + def after(self): + self.ui.debug('run hg sink post-conversion action\n') + if self.lock: + self.lock.release() + if self.wlock: + self.wlock.release() + + def revmapfile(self): + return os.path.join(self.path, ".hg", "shamap") + + def authorfile(self): + return os.path.join(self.path, ".hg", "authormap") + + def getheads(self): + h = self.repo.changelog.heads() + return [hex(x) for x in h] + + def setbranch(self, branch, pbranches): + if not self.clonebranches: + return + + setbranch = (branch != self.lastbranch) + self.lastbranch = branch + if not branch: + branch = 'default' + pbranches = [(b[0], b[1] and b[1] or 'default') for b in pbranches] + pbranch = pbranches and pbranches[0][1] or 'default' + + branchpath = os.path.join(self.path, branch) + if setbranch: + self.after() + try: + self.repo = hg.repository(self.ui, branchpath) + except: + self.repo = hg.repository(self.ui, branchpath, create=True) + self.before() + + # pbranches may bring revisions from other branches (merge parents) + # Make sure we have them, or pull them. + missings = {} + for b in pbranches: + try: + self.repo.lookup(b[0]) + except: + missings.setdefault(b[1], []).append(b[0]) + + if missings: + self.after() + for pbranch, heads in missings.iteritems(): + pbranchpath = os.path.join(self.path, pbranch) + prepo = hg.repository(self.ui, pbranchpath) + self.ui.note(_('pulling from %s into %s\n') % (pbranch, branch)) + self.repo.pull(prepo, [prepo.lookup(h) for h in heads]) + self.before() + + def _rewritetags(self, source, revmap, data): + fp = cStringIO.StringIO() + for line in data.splitlines(): + s = line.split(' ', 1) + if len(s) != 2: + continue + revid = revmap.get(source.lookuprev(s[0])) + if not revid: + continue + fp.write('%s %s\n' % (revid, s[1])) + return fp.getvalue() + + def putcommit(self, files, copies, parents, commit, source, revmap): + + files = dict(files) + def getfilectx(repo, memctx, f): + v = files[f] + data, mode = source.getfile(f, v) + if f == '.hgtags': + data = self._rewritetags(source, revmap, data) + return context.memfilectx(f, data, 'l' in mode, 'x' in mode, + copies.get(f)) + + pl = [] + for p in parents: + if p not in pl: + pl.append(p) + parents = pl + nparents = len(parents) + if self.filemapmode and nparents == 1: + m1node = self.repo.changelog.read(bin(parents[0]))[0] + parent = parents[0] + + if len(parents) < 2: + parents.append(nullid) + if len(parents) < 2: + parents.append(nullid) + p2 = parents.pop(0) + + text = commit.desc + extra = commit.extra.copy() + if self.branchnames and commit.branch: + extra['branch'] = commit.branch + if commit.rev: + extra['convert_revision'] = commit.rev + + while parents: + p1 = p2 + p2 = parents.pop(0) + ctx = context.memctx(self.repo, (p1, p2), text, files.keys(), + getfilectx, commit.author, commit.date, extra) + self.repo.commitctx(ctx) + text = "(octopus merge fixup)\n" + p2 = hex(self.repo.changelog.tip()) + + if self.filemapmode and nparents == 1: + man = self.repo.manifest + mnode = self.repo.changelog.read(bin(p2))[0] + closed = 'close' in commit.extra + if not closed and not man.cmp(m1node, man.revision(mnode)): + self.ui.status(_("filtering out empty revision\n")) + self.repo.rollback() + return parent + return p2 + + def puttags(self, tags): + try: + parentctx = self.repo[self.tagsbranch] + tagparent = parentctx.node() + except error.RepoError: + parentctx = None + tagparent = nullid + + try: + oldlines = sorted(parentctx['.hgtags'].data().splitlines(True)) + except: + oldlines = [] + + newlines = sorted([("%s %s\n" % (tags[tag], tag)) for tag in tags]) + if newlines == oldlines: + return None, None + data = "".join(newlines) + def getfilectx(repo, memctx, f): + return context.memfilectx(f, data, False, False, None) + + self.ui.status(_("updating tags\n")) + date = "%s 0" % int(time.mktime(time.gmtime())) + extra = {'branch': self.tagsbranch} + ctx = context.memctx(self.repo, (tagparent, None), "update tags", + [".hgtags"], getfilectx, "convert-repo", date, + extra) + self.repo.commitctx(ctx) + return hex(self.repo.changelog.tip()), hex(tagparent) + + def setfilemapmode(self, active): + self.filemapmode = active + +class mercurial_source(converter_source): + def __init__(self, ui, path, rev=None): + converter_source.__init__(self, ui, path, rev) + self.ignoreerrors = ui.configbool('convert', 'hg.ignoreerrors', False) + self.ignored = set() + self.saverev = ui.configbool('convert', 'hg.saverev', False) + try: + self.repo = hg.repository(self.ui, path) + # try to provoke an exception if this isn't really a hg + # repo, but some other bogus compatible-looking url + if not self.repo.local(): + raise error.RepoError() + except error.RepoError: + ui.traceback() + raise NoRepo(_("%s is not a local Mercurial repository") % path) + self.lastrev = None + self.lastctx = None + self._changescache = None + self.convertfp = None + # Restrict converted revisions to startrev descendants + startnode = ui.config('convert', 'hg.startrev') + if startnode is not None: + try: + startnode = self.repo.lookup(startnode) + except error.RepoError: + raise util.Abort(_('%s is not a valid start revision') + % startnode) + startrev = self.repo.changelog.rev(startnode) + children = {startnode: 1} + for rev in self.repo.changelog.descendants(startrev): + children[self.repo.changelog.node(rev)] = 1 + self.keep = children.__contains__ + else: + self.keep = util.always + + def changectx(self, rev): + if self.lastrev != rev: + self.lastctx = self.repo[rev] + self.lastrev = rev + return self.lastctx + + def parents(self, ctx): + return [p for p in ctx.parents() if p and self.keep(p.node())] + + def getheads(self): + if self.rev: + heads = [self.repo[self.rev].node()] + else: + heads = self.repo.heads() + return [hex(h) for h in heads if self.keep(h)] + + def getfile(self, name, rev): + try: + fctx = self.changectx(rev)[name] + return fctx.data(), fctx.flags() + except error.LookupError, err: + raise IOError(err) + + def getchanges(self, rev): + ctx = self.changectx(rev) + parents = self.parents(ctx) + if not parents: + files = sorted(ctx.manifest()) + if self.ignoreerrors: + # calling getcopies() is a simple way to detect missing + # revlogs and populate self.ignored + self.getcopies(ctx, parents, files) + return [(f, rev) for f in files if f not in self.ignored], {} + if self._changescache and self._changescache[0] == rev: + m, a, r = self._changescache[1] + else: + m, a, r = self.repo.status(parents[0].node(), ctx.node())[:3] + # getcopies() detects missing revlogs early, run it before + # filtering the changes. + copies = self.getcopies(ctx, parents, m + a) + changes = [(name, rev) for name in m + a + r + if name not in self.ignored] + return sorted(changes), copies + + def getcopies(self, ctx, parents, files): + copies = {} + for name in files: + if name in self.ignored: + continue + try: + copysource, copynode = ctx.filectx(name).renamed() + if copysource in self.ignored or not self.keep(copynode): + continue + # Ignore copy sources not in parent revisions + found = False + for p in parents: + if copysource in p: + found = True + break + if not found: + continue + copies[name] = copysource + except TypeError: + pass + except error.LookupError, e: + if not self.ignoreerrors: + raise + self.ignored.add(name) + self.ui.warn(_('ignoring: %s\n') % e) + return copies + + def getcommit(self, rev): + ctx = self.changectx(rev) + parents = [p.hex() for p in self.parents(ctx)] + if self.saverev: + crev = rev + else: + crev = None + return commit(author=ctx.user(), date=util.datestr(ctx.date()), + desc=ctx.description(), rev=crev, parents=parents, + branch=ctx.branch(), extra=ctx.extra(), + sortkey=ctx.rev()) + + def gettags(self): + tags = [t for t in self.repo.tagslist() if t[0] != 'tip'] + return dict([(name, hex(node)) for name, node in tags + if self.keep(node)]) + + def getchangedfiles(self, rev, i): + ctx = self.changectx(rev) + parents = self.parents(ctx) + if not parents and i is None: + i = 0 + changes = [], ctx.manifest().keys(), [] + else: + i = i or 0 + changes = self.repo.status(parents[i].node(), ctx.node())[:3] + changes = [[f for f in l if f not in self.ignored] for l in changes] + + if i == 0: + self._changescache = (rev, changes) + + return changes[0] + changes[1] + changes[2] + + def converted(self, rev, destrev): + if self.convertfp is None: + self.convertfp = open(os.path.join(self.path, '.hg', 'shamap'), + 'a') + self.convertfp.write('%s %s\n' % (destrev, rev)) + self.convertfp.flush() + + def before(self): + self.ui.debug('run hg source pre-conversion action\n') + + def after(self): + self.ui.debug('run hg source post-conversion action\n') + + def hasnativeorder(self): + return True + + def lookuprev(self, rev): + try: + return hex(self.repo.lookup(rev)) + except error.RepoError: + return None diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/hg.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/hg.pyo new file mode 100644 index 0000000..633af5e Binary files /dev/null and b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/hg.pyo differ diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/monotone.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/monotone.py new file mode 100644 index 0000000..151ddc5 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/monotone.py @@ -0,0 +1,227 @@ +# monotone.py - monotone support for the convert extension +# +# Copyright 2008, 2009 Mikkel Fahnoe Jorgensen and +# others +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +import os, re +from mercurial import util +from common import NoRepo, commit, converter_source, checktool +from common import commandline +from mercurial.i18n import _ + +class monotone_source(converter_source, commandline): + def __init__(self, ui, path=None, rev=None): + converter_source.__init__(self, ui, path, rev) + commandline.__init__(self, ui, 'mtn') + + self.ui = ui + self.path = path + + norepo = NoRepo(_("%s does not look like a monotone repository") + % path) + if not os.path.exists(os.path.join(path, '_MTN')): + # Could be a monotone repository (SQLite db file) + try: + header = file(path, 'rb').read(16) + except: + header = '' + if header != 'SQLite format 3\x00': + raise norepo + + # regular expressions for parsing monotone output + space = r'\s*' + name = r'\s+"((?:\\"|[^"])*)"\s*' + value = name + revision = r'\s+\[(\w+)\]\s*' + lines = r'(?:.|\n)+' + + self.dir_re = re.compile(space + "dir" + name) + self.file_re = re.compile(space + "file" + name + + "content" + revision) + self.add_file_re = re.compile(space + "add_file" + name + + "content" + revision) + self.patch_re = re.compile(space + "patch" + name + + "from" + revision + "to" + revision) + self.rename_re = re.compile(space + "rename" + name + "to" + name) + self.delete_re = re.compile(space + "delete" + name) + self.tag_re = re.compile(space + "tag" + name + "revision" + + revision) + self.cert_re = re.compile(lines + space + "name" + name + + "value" + value) + + attr = space + "file" + lines + space + "attr" + space + self.attr_execute_re = re.compile(attr + '"mtn:execute"' + + space + '"true"') + + # cached data + self.manifest_rev = None + self.manifest = None + self.files = None + self.dirs = None + + checktool('mtn', abort=False) + + # test if there are any revisions + self.rev = None + try: + self.getheads() + except: + raise norepo + self.rev = rev + + def mtnrun(self, *args, **kwargs): + kwargs['d'] = self.path + return self.run0('automate', *args, **kwargs) + + def mtnloadmanifest(self, rev): + if self.manifest_rev == rev: + return + self.manifest = self.mtnrun("get_manifest_of", rev).split("\n\n") + self.manifest_rev = rev + self.files = {} + self.dirs = {} + + for e in self.manifest: + m = self.file_re.match(e) + if m: + attr = "" + name = m.group(1) + node = m.group(2) + if self.attr_execute_re.match(e): + attr += "x" + self.files[name] = (node, attr) + m = self.dir_re.match(e) + if m: + self.dirs[m.group(1)] = True + + def mtnisfile(self, name, rev): + # a non-file could be a directory or a deleted or renamed file + self.mtnloadmanifest(rev) + return name in self.files + + def mtnisdir(self, name, rev): + self.mtnloadmanifest(rev) + return name in self.dirs + + def mtngetcerts(self, rev): + certs = {"author":"", "date":"", + "changelog":"", "branch":""} + certlist = self.mtnrun("certs", rev) + # mtn < 0.45: + # key "test@selenic.com" + # mtn >= 0.45: + # key [ff58a7ffb771907c4ff68995eada1c4da068d328] + certlist = re.split('\n\n key ["\[]', certlist) + for e in certlist: + m = self.cert_re.match(e) + if m: + name, value = m.groups() + value = value.replace(r'\"', '"') + value = value.replace(r'\\', '\\') + certs[name] = value + # Monotone may have subsecond dates: 2005-02-05T09:39:12.364306 + # and all times are stored in UTC + certs["date"] = certs["date"].split('.')[0] + " UTC" + return certs + + # implement the converter_source interface: + + def getheads(self): + if not self.rev: + return self.mtnrun("leaves").splitlines() + else: + return [self.rev] + + def getchanges(self, rev): + #revision = self.mtncmd("get_revision %s" % rev).split("\n\n") + revision = self.mtnrun("get_revision", rev).split("\n\n") + files = {} + ignoremove = {} + renameddirs = [] + copies = {} + for e in revision: + m = self.add_file_re.match(e) + if m: + files[m.group(1)] = rev + ignoremove[m.group(1)] = rev + m = self.patch_re.match(e) + if m: + files[m.group(1)] = rev + # Delete/rename is handled later when the convert engine + # discovers an IOError exception from getfile, + # but only if we add the "from" file to the list of changes. + m = self.delete_re.match(e) + if m: + files[m.group(1)] = rev + m = self.rename_re.match(e) + if m: + toname = m.group(2) + fromname = m.group(1) + if self.mtnisfile(toname, rev): + ignoremove[toname] = 1 + copies[toname] = fromname + files[toname] = rev + files[fromname] = rev + elif self.mtnisdir(toname, rev): + renameddirs.append((fromname, toname)) + + # Directory renames can be handled only once we have recorded + # all new files + for fromdir, todir in renameddirs: + renamed = {} + for tofile in self.files: + if tofile in ignoremove: + continue + if tofile.startswith(todir + '/'): + renamed[tofile] = fromdir + tofile[len(todir):] + # Avoid chained moves like: + # d1(/a) => d3/d1(/a) + # d2 => d3 + ignoremove[tofile] = 1 + for tofile, fromfile in renamed.items(): + self.ui.debug (_("copying file in renamed directory " + "from '%s' to '%s'") + % (fromfile, tofile), '\n') + files[tofile] = rev + copies[tofile] = fromfile + for fromfile in renamed.values(): + files[fromfile] = rev + + return (files.items(), copies) + + def getfile(self, name, rev): + if not self.mtnisfile(name, rev): + raise IOError() # file was deleted or renamed + try: + data = self.mtnrun("get_file_of", name, r=rev) + except: + raise IOError() # file was deleted or renamed + self.mtnloadmanifest(rev) + node, attr = self.files.get(name, (None, "")) + return data, attr + + def getcommit(self, rev): + certs = self.mtngetcerts(rev) + return commit( + author=certs["author"], + date=util.datestr(util.strdate(certs["date"], "%Y-%m-%dT%H:%M:%S")), + desc=certs["changelog"], + rev=rev, + parents=self.mtnrun("parents", rev).splitlines(), + branch=certs["branch"]) + + def gettags(self): + tags = {} + for e in self.mtnrun("tags").split("\n\n"): + m = self.tag_re.match(e) + if m: + tags[m.group(1)] = m.group(2) + return tags + + def getchangedfiles(self, rev, i): + # This function is only needed to support --filemap + # ... and we don't support that + raise NotImplementedError() diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/monotone.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/monotone.pyo new file mode 100644 index 0000000..b51d266 Binary files /dev/null and b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/monotone.pyo differ diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/p4.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/p4.py new file mode 100644 index 0000000..5d640ad --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/p4.py @@ -0,0 +1,202 @@ +# Perforce source for convert extension. +# +# Copyright 2009, Frank Kingswood +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +from mercurial import util +from mercurial.i18n import _ + +from common import commit, converter_source, checktool, NoRepo +import marshal +import re + +def loaditer(f): + "Yield the dictionary objects generated by p4" + try: + while True: + d = marshal.load(f) + if not d: + break + yield d + except EOFError: + pass + +class p4_source(converter_source): + def __init__(self, ui, path, rev=None): + super(p4_source, self).__init__(ui, path, rev=rev) + + if "/" in path and not path.startswith('//'): + raise NoRepo(_('%s does not look like a P4 repository') % path) + + checktool('p4', abort=False) + + self.p4changes = {} + self.heads = {} + self.changeset = {} + self.files = {} + self.tags = {} + self.lastbranch = {} + self.parent = {} + self.encoding = "latin_1" + self.depotname = {} # mapping from local name to depot name + self.re_type = re.compile( + "([a-z]+)?(text|binary|symlink|apple|resource|unicode|utf\d+)" + "(\+\w+)?$") + self.re_keywords = re.compile( + r"\$(Id|Header|Date|DateTime|Change|File|Revision|Author)" + r":[^$\n]*\$") + self.re_keywords_old = re.compile("\$(Id|Header):[^$\n]*\$") + + self._parse(ui, path) + + def _parse_view(self, path): + "Read changes affecting the path" + cmd = 'p4 -G changes -s submitted %s' % util.shellquote(path) + stdout = util.popen(cmd, mode='rb') + for d in loaditer(stdout): + c = d.get("change", None) + if c: + self.p4changes[c] = True + + def _parse(self, ui, path): + "Prepare list of P4 filenames and revisions to import" + ui.status(_('reading p4 views\n')) + + # read client spec or view + if "/" in path: + self._parse_view(path) + if path.startswith("//") and path.endswith("/..."): + views = {path[:-3]:""} + else: + views = {"//": ""} + else: + cmd = 'p4 -G client -o %s' % util.shellquote(path) + clientspec = marshal.load(util.popen(cmd, mode='rb')) + + views = {} + for client in clientspec: + if client.startswith("View"): + sview, cview = clientspec[client].split() + self._parse_view(sview) + if sview.endswith("...") and cview.endswith("..."): + sview = sview[:-3] + cview = cview[:-3] + cview = cview[2:] + cview = cview[cview.find("/") + 1:] + views[sview] = cview + + # list of changes that affect our source files + self.p4changes = self.p4changes.keys() + self.p4changes.sort(key=int) + + # list with depot pathnames, longest first + vieworder = views.keys() + vieworder.sort(key=len, reverse=True) + + # handle revision limiting + startrev = self.ui.config('convert', 'p4.startrev', default=0) + self.p4changes = [x for x in self.p4changes + if ((not startrev or int(x) >= int(startrev)) and + (not self.rev or int(x) <= int(self.rev)))] + + # now read the full changelists to get the list of file revisions + ui.status(_('collecting p4 changelists\n')) + lastid = None + for change in self.p4changes: + cmd = "p4 -G describe -s %s" % change + stdout = util.popen(cmd, mode='rb') + d = marshal.load(stdout) + desc = self.recode(d["desc"]) + shortdesc = desc.split("\n", 1)[0] + t = '%s %s' % (d["change"], repr(shortdesc)[1:-1]) + ui.status(util.ellipsis(t, 80) + '\n') + + if lastid: + parents = [lastid] + else: + parents = [] + + date = (int(d["time"]), 0) # timezone not set + c = commit(author=self.recode(d["user"]), date=util.datestr(date), + parents=parents, desc=desc, branch='', + extra={"p4": change}) + + files = [] + i = 0 + while ("depotFile%d" % i) in d and ("rev%d" % i) in d: + oldname = d["depotFile%d" % i] + filename = None + for v in vieworder: + if oldname.startswith(v): + filename = views[v] + oldname[len(v):] + break + if filename: + files.append((filename, d["rev%d" % i])) + self.depotname[filename] = oldname + i += 1 + self.changeset[change] = c + self.files[change] = files + lastid = change + + if lastid: + self.heads = [lastid] + + def getheads(self): + return self.heads + + def getfile(self, name, rev): + cmd = 'p4 -G print %s' \ + % util.shellquote("%s#%s" % (self.depotname[name], rev)) + stdout = util.popen(cmd, mode='rb') + + mode = None + contents = "" + keywords = None + + for d in loaditer(stdout): + code = d["code"] + data = d.get("data") + + if code == "error": + raise IOError(d["generic"], data) + + elif code == "stat": + p4type = self.re_type.match(d["type"]) + if p4type: + mode = "" + flags = (p4type.group(1) or "") + (p4type.group(3) or "") + if "x" in flags: + mode = "x" + if p4type.group(2) == "symlink": + mode = "l" + if "ko" in flags: + keywords = self.re_keywords_old + elif "k" in flags: + keywords = self.re_keywords + + elif code == "text" or code == "binary": + contents += data + + if mode is None: + raise IOError(0, "bad stat") + + if keywords: + contents = keywords.sub("$\\1$", contents) + if mode == "l" and contents.endswith("\n"): + contents = contents[:-1] + + return contents, mode + + def getchanges(self, rev): + return self.files[rev], {} + + def getcommit(self, rev): + return self.changeset[rev] + + def gettags(self): + return self.tags + + def getchangedfiles(self, rev, i): + return sorted([x[0] for x in self.files[rev]]) diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/p4.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/p4.pyo new file mode 100644 index 0000000..45f1e2a Binary files /dev/null and b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/p4.pyo differ diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/subversion.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/subversion.py new file mode 100644 index 0000000..f2d26ad --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/subversion.py @@ -0,0 +1,1168 @@ +# Subversion 1.4/1.5 Python API backend +# +# Copyright(C) 2007 Daniel Holth et al + +import os +import re +import sys +import cPickle as pickle +import tempfile +import urllib +import urllib2 + +from mercurial import strutil, util, encoding +from mercurial.i18n import _ + +# Subversion stuff. Works best with very recent Python SVN bindings +# e.g. SVN 1.5 or backports. Thanks to the bzr folks for enhancing +# these bindings. + +from cStringIO import StringIO + +from common import NoRepo, MissingTool, commit, encodeargs, decodeargs +from common import commandline, converter_source, converter_sink, mapfile + +try: + from svn.core import SubversionException, Pool + import svn + import svn.client + import svn.core + import svn.ra + import svn.delta + import transport + import warnings + warnings.filterwarnings('ignore', + module='svn.core', + category=DeprecationWarning) + +except ImportError: + pass + +class SvnPathNotFound(Exception): + pass + +def geturl(path): + try: + return svn.client.url_from_path(svn.core.svn_path_canonicalize(path)) + except SubversionException: + pass + if os.path.isdir(path): + path = os.path.normpath(os.path.abspath(path)) + if os.name == 'nt': + path = '/' + util.normpath(path) + # Module URL is later compared with the repository URL returned + # by svn API, which is UTF-8. + path = encoding.tolocal(path) + return 'file://%s' % urllib.quote(path) + return path + +def optrev(number): + optrev = svn.core.svn_opt_revision_t() + optrev.kind = svn.core.svn_opt_revision_number + optrev.value.number = number + return optrev + +class changedpath(object): + def __init__(self, p): + self.copyfrom_path = p.copyfrom_path + self.copyfrom_rev = p.copyfrom_rev + self.action = p.action + +def get_log_child(fp, url, paths, start, end, limit=0, discover_changed_paths=True, + strict_node_history=False): + protocol = -1 + def receiver(orig_paths, revnum, author, date, message, pool): + if orig_paths is not None: + for k, v in orig_paths.iteritems(): + orig_paths[k] = changedpath(v) + pickle.dump((orig_paths, revnum, author, date, message), + fp, protocol) + + try: + # Use an ra of our own so that our parent can consume + # our results without confusing the server. + t = transport.SvnRaTransport(url=url) + svn.ra.get_log(t.ra, paths, start, end, limit, + discover_changed_paths, + strict_node_history, + receiver) + except SubversionException, (inst, num): + pickle.dump(num, fp, protocol) + except IOError: + # Caller may interrupt the iteration + pickle.dump(None, fp, protocol) + else: + pickle.dump(None, fp, protocol) + fp.close() + # With large history, cleanup process goes crazy and suddenly + # consumes *huge* amount of memory. The output file being closed, + # there is no need for clean termination. + os._exit(0) + +def debugsvnlog(ui, **opts): + """Fetch SVN log in a subprocess and channel them back to parent to + avoid memory collection issues. + """ + util.set_binary(sys.stdin) + util.set_binary(sys.stdout) + args = decodeargs(sys.stdin.read()) + get_log_child(sys.stdout, *args) + +class logstream(object): + """Interruptible revision log iterator.""" + def __init__(self, stdout): + self._stdout = stdout + + def __iter__(self): + while True: + try: + entry = pickle.load(self._stdout) + except EOFError: + raise util.Abort(_('Mercurial failed to run itself, check' + ' hg executable is in PATH')) + try: + orig_paths, revnum, author, date, message = entry + except: + if entry is None: + break + raise SubversionException("child raised exception", entry) + yield entry + + def close(self): + if self._stdout: + self._stdout.close() + self._stdout = None + + +# Check to see if the given path is a local Subversion repo. Verify this by +# looking for several svn-specific files and directories in the given +# directory. +def filecheck(ui, path, proto): + for x in ('locks', 'hooks', 'format', 'db'): + if not os.path.exists(os.path.join(path, x)): + return False + return True + +# Check to see if a given path is the root of an svn repo over http. We verify +# this by requesting a version-controlled URL we know can't exist and looking +# for the svn-specific "not found" XML. +def httpcheck(ui, path, proto): + try: + opener = urllib2.build_opener() + rsp = opener.open('%s://%s/!svn/ver/0/.svn' % (proto, path)) + data = rsp.read() + except urllib2.HTTPError, inst: + if inst.code != 404: + # Except for 404 we cannot know for sure this is not an svn repo + ui.warn(_('svn: cannot probe remote repository, assume it could ' + 'be a subversion repository. Use --source-type if you ' + 'know better.\n')) + return True + data = inst.fp.read() + except: + # Could be urllib2.URLError if the URL is invalid or anything else. + return False + return '' in data + +protomap = {'http': httpcheck, + 'https': httpcheck, + 'file': filecheck, + } +def issvnurl(ui, url): + try: + proto, path = url.split('://', 1) + if proto == 'file': + path = urllib.url2pathname(path) + except ValueError: + proto = 'file' + path = os.path.abspath(url) + if proto == 'file': + path = path.replace(os.sep, '/') + check = protomap.get(proto, lambda *args: False) + while '/' in path: + if check(ui, path, proto): + return True + path = path.rsplit('/', 1)[0] + return False + +# SVN conversion code stolen from bzr-svn and tailor +# +# Subversion looks like a versioned filesystem, branches structures +# are defined by conventions and not enforced by the tool. First, +# we define the potential branches (modules) as "trunk" and "branches" +# children directories. Revisions are then identified by their +# module and revision number (and a repository identifier). +# +# The revision graph is really a tree (or a forest). By default, a +# revision parent is the previous revision in the same module. If the +# module directory is copied/moved from another module then the +# revision is the module root and its parent the source revision in +# the parent module. A revision has at most one parent. +# +class svn_source(converter_source): + def __init__(self, ui, url, rev=None): + super(svn_source, self).__init__(ui, url, rev=rev) + + if not (url.startswith('svn://') or url.startswith('svn+ssh://') or + (os.path.exists(url) and + os.path.exists(os.path.join(url, '.svn'))) or + issvnurl(ui, url)): + raise NoRepo(_("%s does not look like a Subversion repository") + % url) + + try: + SubversionException + except NameError: + raise MissingTool(_('Subversion python bindings could not be loaded')) + + try: + version = svn.core.SVN_VER_MAJOR, svn.core.SVN_VER_MINOR + if version < (1, 4): + raise MissingTool(_('Subversion python bindings %d.%d found, ' + '1.4 or later required') % version) + except AttributeError: + raise MissingTool(_('Subversion python bindings are too old, 1.4 ' + 'or later required')) + + self.lastrevs = {} + + latest = None + try: + # Support file://path@rev syntax. Useful e.g. to convert + # deleted branches. + at = url.rfind('@') + if at >= 0: + latest = int(url[at + 1:]) + url = url[:at] + except ValueError: + pass + self.url = geturl(url) + self.encoding = 'UTF-8' # Subversion is always nominal UTF-8 + try: + self.transport = transport.SvnRaTransport(url=self.url) + self.ra = self.transport.ra + self.ctx = self.transport.client + self.baseurl = svn.ra.get_repos_root(self.ra) + # Module is either empty or a repository path starting with + # a slash and not ending with a slash. + self.module = urllib.unquote(self.url[len(self.baseurl):]) + self.prevmodule = None + self.rootmodule = self.module + self.commits = {} + self.paths = {} + self.uuid = svn.ra.get_uuid(self.ra) + except SubversionException: + ui.traceback() + raise NoRepo(_("%s does not look like a Subversion repository") + % self.url) + + if rev: + try: + latest = int(rev) + except ValueError: + raise util.Abort(_('svn: revision %s is not an integer') % rev) + + self.startrev = self.ui.config('convert', 'svn.startrev', default=0) + try: + self.startrev = int(self.startrev) + if self.startrev < 0: + self.startrev = 0 + except ValueError: + raise util.Abort(_('svn: start revision %s is not an integer') + % self.startrev) + + self.head = self.latest(self.module, latest) + if not self.head: + raise util.Abort(_('no revision found in module %s') + % self.module) + self.last_changed = self.revnum(self.head) + + self._changescache = None + + if os.path.exists(os.path.join(url, '.svn/entries')): + self.wc = url + else: + self.wc = None + self.convertfp = None + + def setrevmap(self, revmap): + lastrevs = {} + for revid in revmap.iterkeys(): + uuid, module, revnum = self.revsplit(revid) + lastrevnum = lastrevs.setdefault(module, revnum) + if revnum > lastrevnum: + lastrevs[module] = revnum + self.lastrevs = lastrevs + + def exists(self, path, optrev): + try: + svn.client.ls(self.url.rstrip('/') + '/' + urllib.quote(path), + optrev, False, self.ctx) + return True + except SubversionException: + return False + + def getheads(self): + + def isdir(path, revnum): + kind = self._checkpath(path, revnum) + return kind == svn.core.svn_node_dir + + def getcfgpath(name, rev): + cfgpath = self.ui.config('convert', 'svn.' + name) + if cfgpath is not None and cfgpath.strip() == '': + return None + path = (cfgpath or name).strip('/') + if not self.exists(path, rev): + if cfgpath: + raise util.Abort(_('expected %s to be at %r, but not found') + % (name, path)) + return None + self.ui.note(_('found %s at %r\n') % (name, path)) + return path + + rev = optrev(self.last_changed) + oldmodule = '' + trunk = getcfgpath('trunk', rev) + self.tags = getcfgpath('tags', rev) + branches = getcfgpath('branches', rev) + + # If the project has a trunk or branches, we will extract heads + # from them. We keep the project root otherwise. + if trunk: + oldmodule = self.module or '' + self.module += '/' + trunk + self.head = self.latest(self.module, self.last_changed) + if not self.head: + raise util.Abort(_('no revision found in module %s') + % self.module) + + # First head in the list is the module's head + self.heads = [self.head] + if self.tags is not None: + self.tags = '%s/%s' % (oldmodule , (self.tags or 'tags')) + + # Check if branches bring a few more heads to the list + if branches: + rpath = self.url.strip('/') + branchnames = svn.client.ls(rpath + '/' + urllib.quote(branches), + rev, False, self.ctx) + for branch in branchnames.keys(): + module = '%s/%s/%s' % (oldmodule, branches, branch) + if not isdir(module, self.last_changed): + continue + brevid = self.latest(module, self.last_changed) + if not brevid: + self.ui.note(_('ignoring empty branch %s\n') % branch) + continue + self.ui.note(_('found branch %s at %d\n') % + (branch, self.revnum(brevid))) + self.heads.append(brevid) + + if self.startrev and self.heads: + if len(self.heads) > 1: + raise util.Abort(_('svn: start revision is not supported ' + 'with more than one branch')) + revnum = self.revnum(self.heads[0]) + if revnum < self.startrev: + raise util.Abort( + _('svn: no revision found after start revision %d') + % self.startrev) + + return self.heads + + def getchanges(self, rev): + if self._changescache and self._changescache[0] == rev: + return self._changescache[1] + self._changescache = None + (paths, parents) = self.paths[rev] + if parents: + files, self.removed, copies = self.expandpaths(rev, paths, parents) + else: + # Perform a full checkout on roots + uuid, module, revnum = self.revsplit(rev) + entries = svn.client.ls(self.baseurl + urllib.quote(module), + optrev(revnum), True, self.ctx) + files = [n for n, e in entries.iteritems() + if e.kind == svn.core.svn_node_file] + copies = {} + self.removed = set() + + files.sort() + files = zip(files, [rev] * len(files)) + + # caller caches the result, so free it here to release memory + del self.paths[rev] + return (files, copies) + + def getchangedfiles(self, rev, i): + changes = self.getchanges(rev) + self._changescache = (rev, changes) + return [f[0] for f in changes[0]] + + def getcommit(self, rev): + if rev not in self.commits: + uuid, module, revnum = self.revsplit(rev) + self.module = module + self.reparent(module) + # We assume that: + # - requests for revisions after "stop" come from the + # revision graph backward traversal. Cache all of them + # down to stop, they will be used eventually. + # - requests for revisions before "stop" come to get + # isolated branches parents. Just fetch what is needed. + stop = self.lastrevs.get(module, 0) + if revnum < stop: + stop = revnum + 1 + self._fetch_revisions(revnum, stop) + commit = self.commits[rev] + # caller caches the result, so free it here to release memory + del self.commits[rev] + return commit + + def gettags(self): + tags = {} + if self.tags is None: + return tags + + # svn tags are just a convention, project branches left in a + # 'tags' directory. There is no other relationship than + # ancestry, which is expensive to discover and makes them hard + # to update incrementally. Worse, past revisions may be + # referenced by tags far away in the future, requiring a deep + # history traversal on every calculation. Current code + # performs a single backward traversal, tracking moves within + # the tags directory (tag renaming) and recording a new tag + # everytime a project is copied from outside the tags + # directory. It also lists deleted tags, this behaviour may + # change in the future. + pendings = [] + tagspath = self.tags + start = svn.ra.get_latest_revnum(self.ra) + stream = self._getlog([self.tags], start, self.startrev) + try: + for entry in stream: + origpaths, revnum, author, date, message = entry + copies = [(e.copyfrom_path, e.copyfrom_rev, p) for p, e + in origpaths.iteritems() if e.copyfrom_path] + # Apply moves/copies from more specific to general + copies.sort(reverse=True) + + srctagspath = tagspath + if copies and copies[-1][2] == tagspath: + # Track tags directory moves + srctagspath = copies.pop()[0] + + for source, sourcerev, dest in copies: + if not dest.startswith(tagspath + '/'): + continue + for tag in pendings: + if tag[0].startswith(dest): + tagpath = source + tag[0][len(dest):] + tag[:2] = [tagpath, sourcerev] + break + else: + pendings.append([source, sourcerev, dest]) + + # Filter out tags with children coming from different + # parts of the repository like: + # /tags/tag.1 (from /trunk:10) + # /tags/tag.1/foo (from /branches/foo:12) + # Here/tags/tag.1 discarded as well as its children. + # It happens with tools like cvs2svn. Such tags cannot + # be represented in mercurial. + addeds = dict((p, e.copyfrom_path) for p, e + in origpaths.iteritems() + if e.action == 'A' and e.copyfrom_path) + badroots = set() + for destroot in addeds: + for source, sourcerev, dest in pendings: + if (not dest.startswith(destroot + '/') + or source.startswith(addeds[destroot] + '/')): + continue + badroots.add(destroot) + break + + for badroot in badroots: + pendings = [p for p in pendings if p[2] != badroot + and not p[2].startswith(badroot + '/')] + + # Tell tag renamings from tag creations + remainings = [] + for source, sourcerev, dest in pendings: + tagname = dest.split('/')[-1] + if source.startswith(srctagspath): + remainings.append([source, sourcerev, tagname]) + continue + if tagname in tags: + # Keep the latest tag value + continue + # From revision may be fake, get one with changes + try: + tagid = self.latest(source, sourcerev) + if tagid and tagname not in tags: + tags[tagname] = tagid + except SvnPathNotFound: + # It happens when we are following directories + # we assumed were copied with their parents + # but were really created in the tag + # directory. + pass + pendings = remainings + tagspath = srctagspath + finally: + stream.close() + return tags + + def converted(self, rev, destrev): + if not self.wc: + return + if self.convertfp is None: + self.convertfp = open(os.path.join(self.wc, '.svn', 'hg-shamap'), + 'a') + self.convertfp.write('%s %d\n' % (destrev, self.revnum(rev))) + self.convertfp.flush() + + def revid(self, revnum, module=None): + return 'svn:%s%s@%s' % (self.uuid, module or self.module, revnum) + + def revnum(self, rev): + return int(rev.split('@')[-1]) + + def revsplit(self, rev): + url, revnum = rev.rsplit('@', 1) + revnum = int(revnum) + parts = url.split('/', 1) + uuid = parts.pop(0)[4:] + mod = '' + if parts: + mod = '/' + parts[0] + return uuid, mod, revnum + + def latest(self, path, stop=0): + """Find the latest revid affecting path, up to stop. It may return + a revision in a different module, since a branch may be moved without + a change being reported. Return None if computed module does not + belong to rootmodule subtree. + """ + if not path.startswith(self.rootmodule): + # Requests on foreign branches may be forbidden at server level + self.ui.debug('ignoring foreign branch %r\n' % path) + return None + + if not stop: + stop = svn.ra.get_latest_revnum(self.ra) + try: + prevmodule = self.reparent('') + dirent = svn.ra.stat(self.ra, path.strip('/'), stop) + self.reparent(prevmodule) + except SubversionException: + dirent = None + if not dirent: + raise SvnPathNotFound(_('%s not found up to revision %d') + % (path, stop)) + + # stat() gives us the previous revision on this line of + # development, but it might be in *another module*. Fetch the + # log and detect renames down to the latest revision. + stream = self._getlog([path], stop, dirent.created_rev) + try: + for entry in stream: + paths, revnum, author, date, message = entry + if revnum <= dirent.created_rev: + break + + for p in paths: + if not path.startswith(p) or not paths[p].copyfrom_path: + continue + newpath = paths[p].copyfrom_path + path[len(p):] + self.ui.debug("branch renamed from %s to %s at %d\n" % + (path, newpath, revnum)) + path = newpath + break + finally: + stream.close() + + if not path.startswith(self.rootmodule): + self.ui.debug('ignoring foreign branch %r\n' % path) + return None + return self.revid(dirent.created_rev, path) + + def reparent(self, module): + """Reparent the svn transport and return the previous parent.""" + if self.prevmodule == module: + return module + svnurl = self.baseurl + urllib.quote(module) + prevmodule = self.prevmodule + if prevmodule is None: + prevmodule = '' + self.ui.debug("reparent to %s\n" % svnurl) + svn.ra.reparent(self.ra, svnurl) + self.prevmodule = module + return prevmodule + + def expandpaths(self, rev, paths, parents): + changed, removed = set(), set() + copies = {} + + new_module, revnum = self.revsplit(rev)[1:] + if new_module != self.module: + self.module = new_module + self.reparent(self.module) + + for i, (path, ent) in enumerate(paths): + self.ui.progress(_('scanning paths'), i, item=path, + total=len(paths)) + entrypath = self.getrelpath(path) + + kind = self._checkpath(entrypath, revnum) + if kind == svn.core.svn_node_file: + changed.add(self.recode(entrypath)) + if not ent.copyfrom_path or not parents: + continue + # Copy sources not in parent revisions cannot be + # represented, ignore their origin for now + pmodule, prevnum = self.revsplit(parents[0])[1:] + if ent.copyfrom_rev < prevnum: + continue + copyfrom_path = self.getrelpath(ent.copyfrom_path, pmodule) + if not copyfrom_path: + continue + self.ui.debug("copied to %s from %s@%s\n" % + (entrypath, copyfrom_path, ent.copyfrom_rev)) + copies[self.recode(entrypath)] = self.recode(copyfrom_path) + elif kind == 0: # gone, but had better be a deleted *file* + self.ui.debug("gone from %s\n" % ent.copyfrom_rev) + pmodule, prevnum = self.revsplit(parents[0])[1:] + parentpath = pmodule + "/" + entrypath + fromkind = self._checkpath(entrypath, prevnum, pmodule) + + if fromkind == svn.core.svn_node_file: + removed.add(self.recode(entrypath)) + elif fromkind == svn.core.svn_node_dir: + oroot = parentpath.strip('/') + nroot = path.strip('/') + children = self._iterfiles(oroot, prevnum) + for childpath in children: + childpath = childpath.replace(oroot, nroot) + childpath = self.getrelpath("/" + childpath, pmodule) + if childpath: + removed.add(self.recode(childpath)) + else: + self.ui.debug('unknown path in revision %d: %s\n' % \ + (revnum, path)) + elif kind == svn.core.svn_node_dir: + if ent.action == 'M': + # If the directory just had a prop change, + # then we shouldn't need to look for its children. + continue + if ent.action == 'R' and parents: + # If a directory is replacing a file, mark the previous + # file as deleted + pmodule, prevnum = self.revsplit(parents[0])[1:] + pkind = self._checkpath(entrypath, prevnum, pmodule) + if pkind == svn.core.svn_node_file: + removed.add(self.recode(entrypath)) + elif pkind == svn.core.svn_node_dir: + # We do not know what files were kept or removed, + # mark them all as changed. + for childpath in self._iterfiles(pmodule, prevnum): + childpath = self.getrelpath("/" + childpath) + if childpath: + changed.add(self.recode(childpath)) + + for childpath in self._iterfiles(path, revnum): + childpath = self.getrelpath("/" + childpath) + if childpath: + changed.add(self.recode(childpath)) + + # Handle directory copies + if not ent.copyfrom_path or not parents: + continue + # Copy sources not in parent revisions cannot be + # represented, ignore their origin for now + pmodule, prevnum = self.revsplit(parents[0])[1:] + if ent.copyfrom_rev < prevnum: + continue + copyfrompath = self.getrelpath(ent.copyfrom_path, pmodule) + if not copyfrompath: + continue + self.ui.debug("mark %s came from %s:%d\n" + % (path, copyfrompath, ent.copyfrom_rev)) + children = self._iterfiles(ent.copyfrom_path, ent.copyfrom_rev) + for childpath in children: + childpath = self.getrelpath("/" + childpath, pmodule) + if not childpath: + continue + copytopath = path + childpath[len(copyfrompath):] + copytopath = self.getrelpath(copytopath) + copies[self.recode(copytopath)] = self.recode(childpath) + + self.ui.progress(_('scanning paths'), None) + changed.update(removed) + return (list(changed), removed, copies) + + def _fetch_revisions(self, from_revnum, to_revnum): + if from_revnum < to_revnum: + from_revnum, to_revnum = to_revnum, from_revnum + + self.child_cset = None + + def parselogentry(orig_paths, revnum, author, date, message): + """Return the parsed commit object or None, and True if + the revision is a branch root. + """ + self.ui.debug("parsing revision %d (%d changes)\n" % + (revnum, len(orig_paths))) + + branched = False + rev = self.revid(revnum) + # branch log might return entries for a parent we already have + + if rev in self.commits or revnum < to_revnum: + return None, branched + + parents = [] + # check whether this revision is the start of a branch or part + # of a branch renaming + orig_paths = sorted(orig_paths.iteritems()) + root_paths = [(p, e) for p, e in orig_paths + if self.module.startswith(p)] + if root_paths: + path, ent = root_paths[-1] + if ent.copyfrom_path: + branched = True + newpath = ent.copyfrom_path + self.module[len(path):] + # ent.copyfrom_rev may not be the actual last revision + previd = self.latest(newpath, ent.copyfrom_rev) + if previd is not None: + prevmodule, prevnum = self.revsplit(previd)[1:] + if prevnum >= self.startrev: + parents = [previd] + self.ui.note( + _('found parent of branch %s at %d: %s\n') % + (self.module, prevnum, prevmodule)) + else: + self.ui.debug("no copyfrom path, don't know what to do.\n") + + paths = [] + # filter out unrelated paths + for path, ent in orig_paths: + if self.getrelpath(path) is None: + continue + paths.append((path, ent)) + + # Example SVN datetime. Includes microseconds. + # ISO-8601 conformant + # '2007-01-04T17:35:00.902377Z' + date = util.parsedate(date[:19] + " UTC", ["%Y-%m-%dT%H:%M:%S"]) + + log = message and self.recode(message) or '' + author = author and self.recode(author) or '' + try: + branch = self.module.split("/")[-1] + if branch == 'trunk': + branch = '' + except IndexError: + branch = None + + cset = commit(author=author, + date=util.datestr(date), + desc=log, + parents=parents, + branch=branch, + rev=rev) + + self.commits[rev] = cset + # The parents list is *shared* among self.paths and the + # commit object. Both will be updated below. + self.paths[rev] = (paths, cset.parents) + if self.child_cset and not self.child_cset.parents: + self.child_cset.parents[:] = [rev] + self.child_cset = cset + return cset, branched + + self.ui.note(_('fetching revision log for "%s" from %d to %d\n') % + (self.module, from_revnum, to_revnum)) + + try: + firstcset = None + lastonbranch = False + stream = self._getlog([self.module], from_revnum, to_revnum) + try: + for entry in stream: + paths, revnum, author, date, message = entry + if revnum < self.startrev: + lastonbranch = True + break + if not paths: + self.ui.debug('revision %d has no entries\n' % revnum) + # If we ever leave the loop on an empty + # revision, do not try to get a parent branch + lastonbranch = lastonbranch or revnum == 0 + continue + cset, lastonbranch = parselogentry(paths, revnum, author, + date, message) + if cset: + firstcset = cset + if lastonbranch: + break + finally: + stream.close() + + if not lastonbranch and firstcset and not firstcset.parents: + # The first revision of the sequence (the last fetched one) + # has invalid parents if not a branch root. Find the parent + # revision now, if any. + try: + firstrevnum = self.revnum(firstcset.rev) + if firstrevnum > 1: + latest = self.latest(self.module, firstrevnum - 1) + if latest: + firstcset.parents.append(latest) + except SvnPathNotFound: + pass + except SubversionException, (inst, num): + if num == svn.core.SVN_ERR_FS_NO_SUCH_REVISION: + raise util.Abort(_('svn: branch has no revision %s') % to_revnum) + raise + + def getfile(self, file, rev): + # TODO: ra.get_file transmits the whole file instead of diffs. + if file in self.removed: + raise IOError() + mode = '' + try: + new_module, revnum = self.revsplit(rev)[1:] + if self.module != new_module: + self.module = new_module + self.reparent(self.module) + io = StringIO() + info = svn.ra.get_file(self.ra, file, revnum, io) + data = io.getvalue() + # ra.get_files() seems to keep a reference on the input buffer + # preventing collection. Release it explicitely. + io.close() + if isinstance(info, list): + info = info[-1] + mode = ("svn:executable" in info) and 'x' or '' + mode = ("svn:special" in info) and 'l' or mode + except SubversionException, e: + notfound = (svn.core.SVN_ERR_FS_NOT_FOUND, + svn.core.SVN_ERR_RA_DAV_PATH_NOT_FOUND) + if e.apr_err in notfound: # File not found + raise IOError() + raise + if mode == 'l': + link_prefix = "link " + if data.startswith(link_prefix): + data = data[len(link_prefix):] + return data, mode + + def _iterfiles(self, path, revnum): + """Enumerate all files in path at revnum, recursively.""" + path = path.strip('/') + pool = Pool() + rpath = '/'.join([self.baseurl, urllib.quote(path)]).strip('/') + entries = svn.client.ls(rpath, optrev(revnum), True, self.ctx, pool) + return ((path + '/' + p) for p, e in entries.iteritems() + if e.kind == svn.core.svn_node_file) + + def getrelpath(self, path, module=None): + if module is None: + module = self.module + # Given the repository url of this wc, say + # "http://server/plone/CMFPlone/branches/Plone-2_0-branch" + # extract the "entry" portion (a relative path) from what + # svn log --xml says, ie + # "/CMFPlone/branches/Plone-2_0-branch/tests/PloneTestCase.py" + # that is to say "tests/PloneTestCase.py" + if path.startswith(module): + relative = path.rstrip('/')[len(module):] + if relative.startswith('/'): + return relative[1:] + elif relative == '': + return relative + + # The path is outside our tracked tree... + self.ui.debug('%r is not under %r, ignoring\n' % (path, module)) + return None + + def _checkpath(self, path, revnum, module=None): + if module is not None: + prevmodule = self.reparent('') + path = module + '/' + path + try: + # ra.check_path does not like leading slashes very much, it leads + # to PROPFIND subversion errors + return svn.ra.check_path(self.ra, path.strip('/'), revnum) + finally: + if module is not None: + self.reparent(prevmodule) + + def _getlog(self, paths, start, end, limit=0, discover_changed_paths=True, + strict_node_history=False): + # Normalize path names, svn >= 1.5 only wants paths relative to + # supplied URL + relpaths = [] + for p in paths: + if not p.startswith('/'): + p = self.module + '/' + p + relpaths.append(p.strip('/')) + args = [self.baseurl, relpaths, start, end, limit, discover_changed_paths, + strict_node_history] + arg = encodeargs(args) + hgexe = util.hgexecutable() + cmd = '%s debugsvnlog' % util.shellquote(hgexe) + stdin, stdout = util.popen2(cmd) + stdin.write(arg) + try: + stdin.close() + except IOError: + raise util.Abort(_('Mercurial failed to run itself, check' + ' hg executable is in PATH')) + return logstream(stdout) + +pre_revprop_change = '''#!/bin/sh + +REPOS="$1" +REV="$2" +USER="$3" +PROPNAME="$4" +ACTION="$5" + +if [ "$ACTION" = "M" -a "$PROPNAME" = "svn:log" ]; then exit 0; fi +if [ "$ACTION" = "A" -a "$PROPNAME" = "hg:convert-branch" ]; then exit 0; fi +if [ "$ACTION" = "A" -a "$PROPNAME" = "hg:convert-rev" ]; then exit 0; fi + +echo "Changing prohibited revision property" >&2 +exit 1 +''' + +class svn_sink(converter_sink, commandline): + commit_re = re.compile(r'Committed revision (\d+).', re.M) + + def prerun(self): + if self.wc: + os.chdir(self.wc) + + def postrun(self): + if self.wc: + os.chdir(self.cwd) + + def join(self, name): + return os.path.join(self.wc, '.svn', name) + + def revmapfile(self): + return self.join('hg-shamap') + + def authorfile(self): + return self.join('hg-authormap') + + def __init__(self, ui, path): + converter_sink.__init__(self, ui, path) + commandline.__init__(self, ui, 'svn') + self.delete = [] + self.setexec = [] + self.delexec = [] + self.copies = [] + self.wc = None + self.cwd = os.getcwd() + + path = os.path.realpath(path) + + created = False + if os.path.isfile(os.path.join(path, '.svn', 'entries')): + self.wc = path + self.run0('update') + else: + wcpath = os.path.join(os.getcwd(), os.path.basename(path) + '-wc') + + if os.path.isdir(os.path.dirname(path)): + if not os.path.exists(os.path.join(path, 'db', 'fs-type')): + ui.status(_('initializing svn repository %r\n') % + os.path.basename(path)) + commandline(ui, 'svnadmin').run0('create', path) + created = path + path = util.normpath(path) + if not path.startswith('/'): + path = '/' + path + path = 'file://' + path + + ui.status(_('initializing svn working copy %r\n') + % os.path.basename(wcpath)) + self.run0('checkout', path, wcpath) + + self.wc = wcpath + self.opener = util.opener(self.wc) + self.wopener = util.opener(self.wc) + self.childmap = mapfile(ui, self.join('hg-childmap')) + self.is_exec = util.checkexec(self.wc) and util.is_exec or None + + if created: + hook = os.path.join(created, 'hooks', 'pre-revprop-change') + fp = open(hook, 'w') + fp.write(pre_revprop_change) + fp.close() + util.set_flags(hook, False, True) + + xport = transport.SvnRaTransport(url=geturl(path)) + self.uuid = svn.ra.get_uuid(xport.ra) + + def wjoin(self, *names): + return os.path.join(self.wc, *names) + + def putfile(self, filename, flags, data): + if 'l' in flags: + self.wopener.symlink(data, filename) + else: + try: + if os.path.islink(self.wjoin(filename)): + os.unlink(filename) + except OSError: + pass + self.wopener(filename, 'w').write(data) + + if self.is_exec: + was_exec = self.is_exec(self.wjoin(filename)) + else: + # On filesystems not supporting execute-bit, there is no way + # to know if it is set but asking subversion. Setting it + # systematically is just as expensive and much simpler. + was_exec = 'x' not in flags + + util.set_flags(self.wjoin(filename), False, 'x' in flags) + if was_exec: + if 'x' not in flags: + self.delexec.append(filename) + else: + if 'x' in flags: + self.setexec.append(filename) + + def _copyfile(self, source, dest): + # SVN's copy command pukes if the destination file exists, but + # our copyfile method expects to record a copy that has + # already occurred. Cross the semantic gap. + wdest = self.wjoin(dest) + exists = os.path.lexists(wdest) + if exists: + fd, tempname = tempfile.mkstemp( + prefix='hg-copy-', dir=os.path.dirname(wdest)) + os.close(fd) + os.unlink(tempname) + os.rename(wdest, tempname) + try: + self.run0('copy', source, dest) + finally: + if exists: + try: + os.unlink(wdest) + except OSError: + pass + os.rename(tempname, wdest) + + def dirs_of(self, files): + dirs = set() + for f in files: + if os.path.isdir(self.wjoin(f)): + dirs.add(f) + for i in strutil.rfindall(f, '/'): + dirs.add(f[:i]) + return dirs + + def add_dirs(self, files): + add_dirs = [d for d in sorted(self.dirs_of(files)) + if not os.path.exists(self.wjoin(d, '.svn', 'entries'))] + if add_dirs: + self.xargs(add_dirs, 'add', non_recursive=True, quiet=True) + return add_dirs + + def add_files(self, files): + if files: + self.xargs(files, 'add', quiet=True) + return files + + def tidy_dirs(self, names): + deleted = [] + for d in sorted(self.dirs_of(names), reverse=True): + wd = self.wjoin(d) + if os.listdir(wd) == '.svn': + self.run0('delete', d) + deleted.append(d) + return deleted + + def addchild(self, parent, child): + self.childmap[parent] = child + + def revid(self, rev): + return u"svn:%s@%s" % (self.uuid, rev) + + def putcommit(self, files, copies, parents, commit, source, revmap): + # Apply changes to working copy + for f, v in files: + try: + data, mode = source.getfile(f, v) + except IOError: + self.delete.append(f) + else: + self.putfile(f, mode, data) + if f in copies: + self.copies.append([copies[f], f]) + files = [f[0] for f in files] + + for parent in parents: + try: + return self.revid(self.childmap[parent]) + except KeyError: + pass + entries = set(self.delete) + files = frozenset(files) + entries.update(self.add_dirs(files.difference(entries))) + if self.copies: + for s, d in self.copies: + self._copyfile(s, d) + self.copies = [] + if self.delete: + self.xargs(self.delete, 'delete') + self.delete = [] + entries.update(self.add_files(files.difference(entries))) + entries.update(self.tidy_dirs(entries)) + if self.delexec: + self.xargs(self.delexec, 'propdel', 'svn:executable') + self.delexec = [] + if self.setexec: + self.xargs(self.setexec, 'propset', 'svn:executable', '*') + self.setexec = [] + + fd, messagefile = tempfile.mkstemp(prefix='hg-convert-') + fp = os.fdopen(fd, 'w') + fp.write(commit.desc) + fp.close() + try: + output = self.run0('commit', + username=util.shortuser(commit.author), + file=messagefile, + encoding='utf-8') + try: + rev = self.commit_re.search(output).group(1) + except AttributeError: + if not files: + return parents[0] + self.ui.warn(_('unexpected svn output:\n')) + self.ui.warn(output) + raise util.Abort(_('unable to cope with svn output')) + if commit.rev: + self.run('propset', 'hg:convert-rev', commit.rev, + revprop=True, revision=rev) + if commit.branch and commit.branch != 'default': + self.run('propset', 'hg:convert-branch', commit.branch, + revprop=True, revision=rev) + for parent in parents: + self.addchild(parent, rev) + return self.revid(rev) + finally: + os.unlink(messagefile) + + def puttags(self, tags): + self.ui.warn(_('writing Subversion tags is not yet implemented\n')) + return None, None diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/subversion.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/subversion.pyo new file mode 100644 index 0000000..df675f8 Binary files /dev/null and b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/subversion.pyo differ diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/transport.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/transport.py new file mode 100644 index 0000000..db68ede --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/transport.py @@ -0,0 +1,128 @@ +# -*- coding: utf-8 -*- + +# Copyright (C) 2007 Daniel Holth +# This is a stripped-down version of the original bzr-svn transport.py, +# Copyright (C) 2006 Jelmer Vernooij + +# This program is free software; you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation; either version 2 of the License, or +# (at your option) any later version. + +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. + +# You should have received a copy of the GNU General Public License +# along with this program; if not, write to the Free Software +# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA + +from svn.core import SubversionException, Pool +import svn.ra +import svn.client +import svn.core + +# Some older versions of the Python bindings need to be +# explicitly initialized. But what we want to do probably +# won't work worth a darn against those libraries anyway! +svn.ra.initialize() + +svn_config = svn.core.svn_config_get_config(None) + + +def _create_auth_baton(pool): + """Create a Subversion authentication baton. """ + import svn.client + # Give the client context baton a suite of authentication + # providers.h + providers = [ + svn.client.get_simple_provider(pool), + svn.client.get_username_provider(pool), + svn.client.get_ssl_client_cert_file_provider(pool), + svn.client.get_ssl_client_cert_pw_file_provider(pool), + svn.client.get_ssl_server_trust_file_provider(pool), + ] + # Platform-dependant authentication methods + getprovider = getattr(svn.core, 'svn_auth_get_platform_specific_provider', + None) + if getprovider: + # Available in svn >= 1.6 + for name in ('gnome_keyring', 'keychain', 'kwallet', 'windows'): + for type in ('simple', 'ssl_client_cert_pw', 'ssl_server_trust'): + p = getprovider(name, type, pool) + if p: + providers.append(p) + else: + if hasattr(svn.client, 'get_windows_simple_provider'): + providers.append(svn.client.get_windows_simple_provider(pool)) + + return svn.core.svn_auth_open(providers, pool) + +class NotBranchError(SubversionException): + pass + +class SvnRaTransport(object): + """ + Open an ra connection to a Subversion repository. + """ + def __init__(self, url="", ra=None): + self.pool = Pool() + self.svn_url = url + self.username = '' + self.password = '' + + # Only Subversion 1.4 has reparent() + if ra is None or not hasattr(svn.ra, 'reparent'): + self.client = svn.client.create_context(self.pool) + ab = _create_auth_baton(self.pool) + if False: + svn.core.svn_auth_set_parameter( + ab, svn.core.SVN_AUTH_PARAM_DEFAULT_USERNAME, self.username) + svn.core.svn_auth_set_parameter( + ab, svn.core.SVN_AUTH_PARAM_DEFAULT_PASSWORD, self.password) + self.client.auth_baton = ab + self.client.config = svn_config + try: + self.ra = svn.client.open_ra_session( + self.svn_url.encode('utf8'), + self.client, self.pool) + except SubversionException, (inst, num): + if num in (svn.core.SVN_ERR_RA_ILLEGAL_URL, + svn.core.SVN_ERR_RA_LOCAL_REPOS_OPEN_FAILED, + svn.core.SVN_ERR_BAD_URL): + raise NotBranchError(url) + raise + else: + self.ra = ra + svn.ra.reparent(self.ra, self.svn_url.encode('utf8')) + + class Reporter(object): + def __init__(self, reporter_data): + self._reporter, self._baton = reporter_data + + def set_path(self, path, revnum, start_empty, lock_token, pool=None): + svn.ra.reporter2_invoke_set_path(self._reporter, self._baton, + path, revnum, start_empty, lock_token, pool) + + def delete_path(self, path, pool=None): + svn.ra.reporter2_invoke_delete_path(self._reporter, self._baton, + path, pool) + + def link_path(self, path, url, revision, start_empty, lock_token, + pool=None): + svn.ra.reporter2_invoke_link_path(self._reporter, self._baton, + path, url, revision, start_empty, lock_token, + pool) + + def finish_report(self, pool=None): + svn.ra.reporter2_invoke_finish_report(self._reporter, + self._baton, pool) + + def abort_report(self, pool=None): + svn.ra.reporter2_invoke_abort_report(self._reporter, + self._baton, pool) + + def do_update(self, revnum, path, *args, **kwargs): + return self.Reporter(svn.ra.do_update(self.ra, revnum, path, + *args, **kwargs)) diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/transport.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/transport.pyo new file mode 100644 index 0000000..ee1d3d1 Binary files /dev/null and b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/convert/transport.pyo differ diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/eol.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/eol.py new file mode 100644 index 0000000..88294a7 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/eol.py @@ -0,0 +1,272 @@ +"""automatically manage newlines in repository files + +This extension allows you to manage the type of line endings (CRLF or +LF) that are used in the repository and in the local working +directory. That way you can get CRLF line endings on Windows and LF on +Unix/Mac, thereby letting everybody use their OS native line endings. + +The extension reads its configuration from a versioned ``.hgeol`` +configuration file every time you run an ``hg`` command. The +``.hgeol`` file use the same syntax as all other Mercurial +configuration files. It uses two sections, ``[patterns]`` and +``[repository]``. + +The ``[patterns]`` section specifies how line endings should be +converted between the working copy and the repository. The format is +specified by a file pattern. The first match is used, so put more +specific patterns first. The available line endings are ``LF``, +``CRLF``, and ``BIN``. + +Files with the declared format of ``CRLF`` or ``LF`` are always +checked out and stored in the repository in that format and files +declared to be binary (``BIN``) are left unchanged. Additionally, +``native`` is an alias for checking out in the platform's default line +ending: ``LF`` on Unix (including Mac OS X) and ``CRLF`` on +Windows. Note that ``BIN`` (do nothing to line endings) is Mercurial's +default behaviour; it is only needed if you need to override a later, +more general pattern. + +The optional ``[repository]`` section specifies the line endings to +use for files stored in the repository. It has a single setting, +``native``, which determines the storage line endings for files +declared as ``native`` in the ``[patterns]`` section. It can be set to +``LF`` or ``CRLF``. The default is ``LF``. For example, this means +that on Windows, files configured as ``native`` (``CRLF`` by default) +will be converted to ``LF`` when stored in the repository. Files +declared as ``LF``, ``CRLF``, or ``BIN`` in the ``[patterns]`` section +are always stored as-is in the repository. + +Example versioned ``.hgeol`` file:: + + [patterns] + **.py = native + **.vcproj = CRLF + **.txt = native + Makefile = LF + **.jpg = BIN + + [repository] + native = LF + +.. note:: + The rules will first apply when files are touched in the working + copy, e.g. by updating to null and back to tip to touch all files. + +The extension uses an optional ``[eol]`` section in your hgrc file +(not the ``.hgeol`` file) for settings that control the overall +behavior. There are two settings: + +- ``eol.native`` (default ``os.linesep``) can be set to ``LF`` or + ``CRLF`` to override the default interpretation of ``native`` for + checkout. This can be used with :hg:`archive` on Unix, say, to + generate an archive where files have line endings for Windows. + +- ``eol.only-consistent`` (default True) can be set to False to make + the extension convert files with inconsistent EOLs. Inconsistent + means that there is both ``CRLF`` and ``LF`` present in the file. + Such files are normally not touched under the assumption that they + have mixed EOLs on purpose. + +The ``win32text.forbid*`` hooks provided by the win32text extension +have been unified into a single hook named ``eol.hook``. The hook will +lookup the expected line endings from the ``.hgeol`` file, which means +you must migrate to a ``.hgeol`` file first before using the hook. + +See :hg:`help patterns` for more information about the glob patterns +used. +""" + +from mercurial.i18n import _ +from mercurial import util, config, extensions, match +import re, os + +# Matches a lone LF, i.e., one that is not part of CRLF. +singlelf = re.compile('(^|[^\r])\n') +# Matches a single EOL which can either be a CRLF where repeated CR +# are removed or a LF. We do not care about old Machintosh files, so a +# stray CR is an error. +eolre = re.compile('\r*\n') + + +def inconsistenteol(data): + return '\r\n' in data and singlelf.search(data) + +def tolf(s, params, ui, **kwargs): + """Filter to convert to LF EOLs.""" + if util.binary(s): + return s + if ui.configbool('eol', 'only-consistent', True) and inconsistenteol(s): + return s + return eolre.sub('\n', s) + +def tocrlf(s, params, ui, **kwargs): + """Filter to convert to CRLF EOLs.""" + if util.binary(s): + return s + if ui.configbool('eol', 'only-consistent', True) and inconsistenteol(s): + return s + return eolre.sub('\r\n', s) + +def isbinary(s, params): + """Filter to do nothing with the file.""" + return s + +filters = { + 'to-lf': tolf, + 'to-crlf': tocrlf, + 'is-binary': isbinary, +} + + +def hook(ui, repo, node, hooktype, **kwargs): + """verify that files have expected EOLs""" + files = set() + for rev in xrange(repo[node].rev(), len(repo)): + files.update(repo[rev].files()) + tip = repo['tip'] + for f in files: + if f not in tip: + continue + for pattern, target in ui.configitems('encode'): + if match.match(repo.root, '', [pattern])(f): + data = tip[f].data() + if target == "to-lf" and "\r\n" in data: + raise util.Abort(_("%s should not have CRLF line endings") + % f) + elif target == "to-crlf" and singlelf.search(data): + raise util.Abort(_("%s should not have LF line endings") + % f) + + +def preupdate(ui, repo, hooktype, parent1, parent2): + #print "preupdate for %s: %s -> %s" % (repo.root, parent1, parent2) + repo.readhgeol(parent1) + return False + +def uisetup(ui): + ui.setconfig('hooks', 'preupdate.eol', preupdate) + +def extsetup(ui): + try: + extensions.find('win32text') + raise util.Abort(_("the eol extension is incompatible with the " + "win32text extension")) + except KeyError: + pass + + +def reposetup(ui, repo): + uisetup(repo.ui) + #print "reposetup for", repo.root + + if not repo.local(): + return + for name, fn in filters.iteritems(): + repo.adddatafilter(name, fn) + + ui.setconfig('patch', 'eol', 'auto') + + class eolrepo(repo.__class__): + + _decode = {'LF': 'to-lf', 'CRLF': 'to-crlf', 'BIN': 'is-binary'} + _encode = {'LF': 'to-lf', 'CRLF': 'to-crlf', 'BIN': 'is-binary'} + + def readhgeol(self, node=None, data=None): + if data is None: + try: + if node is None: + data = self.wfile('.hgeol').read() + else: + data = self[node]['.hgeol'].data() + except (IOError, LookupError): + return None + + if self.ui.config('eol', 'native', os.linesep) in ('LF', '\n'): + self._decode['NATIVE'] = 'to-lf' + else: + self._decode['NATIVE'] = 'to-crlf' + + eol = config.config() + # Our files should not be touched. The pattern must be + # inserted first override a '** = native' pattern. + eol.set('patterns', '.hg*', 'BIN') + # We can then parse the user's patterns. + eol.parse('.hgeol', data) + + if eol.get('repository', 'native') == 'CRLF': + self._encode['NATIVE'] = 'to-crlf' + else: + self._encode['NATIVE'] = 'to-lf' + + for pattern, style in eol.items('patterns'): + key = style.upper() + try: + self.ui.setconfig('decode', pattern, self._decode[key]) + self.ui.setconfig('encode', pattern, self._encode[key]) + except KeyError: + self.ui.warn(_("ignoring unknown EOL style '%s' from %s\n") + % (style, eol.source('patterns', pattern))) + + include = [] + exclude = [] + for pattern, style in eol.items('patterns'): + key = style.upper() + if key == 'BIN': + exclude.append(pattern) + else: + include.append(pattern) + + # This will match the files for which we need to care + # about inconsistent newlines. + return match.match(self.root, '', [], include, exclude) + + def _hgcleardirstate(self): + self._eolfile = self.readhgeol() or self.readhgeol('tip') + + if not self._eolfile: + self._eolfile = util.never + return + + try: + cachemtime = os.path.getmtime(self.join("eol.cache")) + except OSError: + cachemtime = 0 + + try: + eolmtime = os.path.getmtime(self.wjoin(".hgeol")) + except OSError: + eolmtime = 0 + + if eolmtime > cachemtime: + ui.debug("eol: detected change in .hgeol\n") + # TODO: we could introduce a method for this in dirstate. + wlock = None + try: + wlock = self.wlock() + for f, e in self.dirstate._map.iteritems(): + self.dirstate._map[f] = (e[0], e[1], -1, 0) + self.dirstate._dirty = True + # Touch the cache to update mtime. TODO: are we sure this + # always enought to update the mtime, or should we write a + # bit to the file? + self.opener("eol.cache", "w").close() + finally: + if wlock is not None: + wlock.release() + + def commitctx(self, ctx, error=False): + for f in sorted(ctx.added() + ctx.modified()): + if not self._eolfile(f): + continue + data = ctx[f].data() + if util.binary(data): + # We should not abort here, since the user should + # be able to say "** = native" to automatically + # have all non-binary files taken care of. + continue + if inconsistenteol(data): + raise util.Abort(_("inconsistent newline style " + "in %s\n" % f)) + return super(eolrepo, self).commitctx(ctx, error) + repo.__class__ = eolrepo + repo._hgcleardirstate() diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/eol.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/eol.pyo new file mode 100644 index 0000000..cd15b4f Binary files /dev/null and b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/eol.pyo differ diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/extdiff.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/extdiff.py new file mode 100644 index 0000000..5cf9f03 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/extdiff.py @@ -0,0 +1,325 @@ +# extdiff.py - external diff program support for mercurial +# +# Copyright 2006 Vadim Gelfer +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +'''command to allow external programs to compare revisions + +The extdiff Mercurial extension allows you to use external programs +to compare revisions, or revision with working directory. The external +diff programs are called with a configurable set of options and two +non-option arguments: paths to directories containing snapshots of +files to compare. + +The extdiff extension also allows to configure new diff commands, so +you do not need to type :hg:`extdiff -p kdiff3` always. :: + + [extdiff] + # add new command that runs GNU diff(1) in 'context diff' mode + cdiff = gdiff -Nprc5 + ## or the old way: + #cmd.cdiff = gdiff + #opts.cdiff = -Nprc5 + + # add new command called vdiff, runs kdiff3 + vdiff = kdiff3 + + # add new command called meld, runs meld (no need to name twice) + meld = + + # add new command called vimdiff, runs gvimdiff with DirDiff plugin + # (see http://www.vim.org/scripts/script.php?script_id=102) Non + # English user, be sure to put "let g:DirDiffDynamicDiffText = 1" in + # your .vimrc + vimdiff = gvim -f '+next' '+execute "DirDiff" argv(0) argv(1)' + +Tool arguments can include variables that are expanded at runtime:: + + $parent1, $plabel1 - filename, descriptive label of first parent + $child, $clabel - filename, descriptive label of child revision + $parent2, $plabel2 - filename, descriptive label of second parent + $parent is an alias for $parent1. + +The extdiff extension will look in your [diff-tools] and [merge-tools] +sections for diff tool arguments, when none are specified in [extdiff]. + +:: + + [extdiff] + kdiff3 = + + [diff-tools] + kdiff3.diffargs=--L1 '$plabel1' --L2 '$clabel' $parent $child + +You can use -I/-X and list of file or directory names like normal +:hg:`diff` command. The extdiff extension makes snapshots of only +needed files, so running the external diff program will actually be +pretty fast (at least faster than having to compare the entire tree). +''' + +from mercurial.i18n import _ +from mercurial.node import short, nullid +from mercurial import cmdutil, util, commands, encoding +import os, shlex, shutil, tempfile, re + +def snapshot(ui, repo, files, node, tmproot): + '''snapshot files as of some revision + if not using snapshot, -I/-X does not work and recursive diff + in tools like kdiff3 and meld displays too many files.''' + dirname = os.path.basename(repo.root) + if dirname == "": + dirname = "root" + if node is not None: + dirname = '%s.%s' % (dirname, short(node)) + base = os.path.join(tmproot, dirname) + os.mkdir(base) + if node is not None: + ui.note(_('making snapshot of %d files from rev %s\n') % + (len(files), short(node))) + else: + ui.note(_('making snapshot of %d files from working directory\n') % + (len(files))) + wopener = util.opener(base) + fns_and_mtime = [] + ctx = repo[node] + for fn in files: + wfn = util.pconvert(fn) + if not wfn in ctx: + # File doesn't exist; could be a bogus modify + continue + ui.note(' %s\n' % wfn) + dest = os.path.join(base, wfn) + fctx = ctx[wfn] + data = repo.wwritedata(wfn, fctx.data()) + if 'l' in fctx.flags(): + wopener.symlink(data, wfn) + else: + wopener(wfn, 'w').write(data) + if 'x' in fctx.flags(): + util.set_flags(dest, False, True) + if node is None: + fns_and_mtime.append((dest, repo.wjoin(fn), os.path.getmtime(dest))) + return dirname, fns_and_mtime + +def dodiff(ui, repo, diffcmd, diffopts, pats, opts): + '''Do the actuall diff: + + - copy to a temp structure if diffing 2 internal revisions + - copy to a temp structure if diffing working revision with + another one and more than 1 file is changed + - just invoke the diff for a single file in the working dir + ''' + + revs = opts.get('rev') + change = opts.get('change') + args = ' '.join(diffopts) + do3way = '$parent2' in args + + if revs and change: + msg = _('cannot specify --rev and --change at the same time') + raise util.Abort(msg) + elif change: + node2 = repo.lookup(change) + node1a, node1b = repo.changelog.parents(node2) + else: + node1a, node2 = cmdutil.revpair(repo, revs) + if not revs: + node1b = repo.dirstate.parents()[1] + else: + node1b = nullid + + # Disable 3-way merge if there is only one parent + if do3way: + if node1b == nullid: + do3way = False + + matcher = cmdutil.match(repo, pats, opts) + mod_a, add_a, rem_a = map(set, repo.status(node1a, node2, matcher)[:3]) + if do3way: + mod_b, add_b, rem_b = map(set, repo.status(node1b, node2, matcher)[:3]) + else: + mod_b, add_b, rem_b = set(), set(), set() + modadd = mod_a | add_a | mod_b | add_b + common = modadd | rem_a | rem_b + if not common: + return 0 + + tmproot = tempfile.mkdtemp(prefix='extdiff.') + try: + # Always make a copy of node1a (and node1b, if applicable) + dir1a_files = mod_a | rem_a | ((mod_b | add_b) - add_a) + dir1a = snapshot(ui, repo, dir1a_files, node1a, tmproot)[0] + rev1a = '@%d' % repo[node1a].rev() + if do3way: + dir1b_files = mod_b | rem_b | ((mod_a | add_a) - add_b) + dir1b = snapshot(ui, repo, dir1b_files, node1b, tmproot)[0] + rev1b = '@%d' % repo[node1b].rev() + else: + dir1b = None + rev1b = '' + + fns_and_mtime = [] + + # If node2 in not the wc or there is >1 change, copy it + dir2root = '' + rev2 = '' + if node2: + dir2 = snapshot(ui, repo, modadd, node2, tmproot)[0] + rev2 = '@%d' % repo[node2].rev() + elif len(common) > 1: + #we only actually need to get the files to copy back to + #the working dir in this case (because the other cases + #are: diffing 2 revisions or single file -- in which case + #the file is already directly passed to the diff tool). + dir2, fns_and_mtime = snapshot(ui, repo, modadd, None, tmproot) + else: + # This lets the diff tool open the changed file directly + dir2 = '' + dir2root = repo.root + + label1a = rev1a + label1b = rev1b + label2 = rev2 + + # If only one change, diff the files instead of the directories + # Handle bogus modifies correctly by checking if the files exist + if len(common) == 1: + common_file = util.localpath(common.pop()) + dir1a = os.path.join(dir1a, common_file) + label1a = common_file + rev1a + if not os.path.isfile(os.path.join(tmproot, dir1a)): + dir1a = os.devnull + if do3way: + dir1b = os.path.join(dir1b, common_file) + label1b = common_file + rev1b + if not os.path.isfile(os.path.join(tmproot, dir1b)): + dir1b = os.devnull + dir2 = os.path.join(dir2root, dir2, common_file) + label2 = common_file + rev2 + + # Function to quote file/dir names in the argument string. + # When not operating in 3-way mode, an empty string is + # returned for parent2 + replace = dict(parent=dir1a, parent1=dir1a, parent2=dir1b, + plabel1=label1a, plabel2=label1b, + clabel=label2, child=dir2) + def quote(match): + key = match.group()[1:] + if not do3way and key == 'parent2': + return '' + return util.shellquote(replace[key]) + + # Match parent2 first, so 'parent1?' will match both parent1 and parent + regex = '\$(parent2|parent1?|child|plabel1|plabel2|clabel)' + if not do3way and not re.search(regex, args): + args += ' $parent1 $child' + args = re.sub(regex, quote, args) + cmdline = util.shellquote(diffcmd) + ' ' + args + + ui.debug('running %r in %s\n' % (cmdline, tmproot)) + util.system(cmdline, cwd=tmproot) + + for copy_fn, working_fn, mtime in fns_and_mtime: + if os.path.getmtime(copy_fn) != mtime: + ui.debug('file changed while diffing. ' + 'Overwriting: %s (src: %s)\n' % (working_fn, copy_fn)) + util.copyfile(copy_fn, working_fn) + + return 1 + finally: + ui.note(_('cleaning up temp directory\n')) + shutil.rmtree(tmproot) + +def extdiff(ui, repo, *pats, **opts): + '''use external program to diff repository (or selected files) + + Show differences between revisions for the specified files, using + an external program. The default program used is diff, with + default options "-Npru". + + To select a different program, use the -p/--program option. The + program will be passed the names of two directories to compare. To + pass additional options to the program, use -o/--option. These + will be passed before the names of the directories to compare. + + When two revision arguments are given, then changes are shown + between those revisions. If only one revision is specified then + that revision is compared to the working directory, and, when no + revisions are specified, the working directory files are compared + to its parent.''' + program = opts.get('program') + option = opts.get('option') + if not program: + program = 'diff' + option = option or ['-Npru'] + return dodiff(ui, repo, program, option, pats, opts) + +cmdtable = { + "extdiff": + (extdiff, + [('p', 'program', '', + _('comparison program to run'), _('CMD')), + ('o', 'option', [], + _('pass option to comparison program'), _('OPT')), + ('r', 'rev', [], + _('revision'), _('REV')), + ('c', 'change', '', + _('change made by revision'), _('REV')), + ] + commands.walkopts, + _('hg extdiff [OPT]... [FILE]...')), + } + +def uisetup(ui): + for cmd, path in ui.configitems('extdiff'): + if cmd.startswith('cmd.'): + cmd = cmd[4:] + if not path: + path = cmd + diffopts = ui.config('extdiff', 'opts.' + cmd, '') + diffopts = diffopts and [diffopts] or [] + elif cmd.startswith('opts.'): + continue + else: + # command = path opts + if path: + diffopts = shlex.split(path) + path = diffopts.pop(0) + else: + path, diffopts = cmd, [] + # look for diff arguments in [diff-tools] then [merge-tools] + if diffopts == []: + args = ui.config('diff-tools', cmd+'.diffargs') or \ + ui.config('merge-tools', cmd+'.diffargs') + if args: + diffopts = shlex.split(args) + def save(cmd, path, diffopts): + '''use closure to save diff command to use''' + def mydiff(ui, repo, *pats, **opts): + return dodiff(ui, repo, path, diffopts + opts['option'], + pats, opts) + doc = _('''\ +use %(path)s to diff repository (or selected files) + + Show differences between revisions for the specified files, using + the %(path)s program. + + When two revision arguments are given, then changes are shown + between those revisions. If only one revision is specified then + that revision is compared to the working directory, and, when no + revisions are specified, the working directory files are compared + to its parent.\ +''') % dict(path=util.uirepr(path)) + + # We must translate the docstring right away since it is + # used as a format string. The string will unfortunately + # be translated again in commands.helpcmd and this will + # fail when the docstring contains non-ASCII characters. + # Decoding the string to a Unicode string here (using the + # right encoding) prevents that. + mydiff.__doc__ = doc.decode(encoding.encoding) + return mydiff + cmdtable[cmd] = (save(cmd, path, diffopts), + cmdtable['extdiff'][1][1:], + _('hg %s [OPTION]... [FILE]...') % cmd) diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/extdiff.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/extdiff.pyo new file mode 100644 index 0000000..008d690 Binary files /dev/null and b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/extdiff.pyo differ diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/fetch.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/fetch.py new file mode 100644 index 0000000..b8e765f --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/fetch.py @@ -0,0 +1,152 @@ +# fetch.py - pull and merge remote changes +# +# Copyright 2006 Vadim Gelfer +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +'''pull, update and merge in one command''' + +from mercurial.i18n import _ +from mercurial.node import nullid, short +from mercurial import commands, cmdutil, hg, util, url, error +from mercurial.lock import release + +def fetch(ui, repo, source='default', **opts): + '''pull changes from a remote repository, merge new changes if needed. + + This finds all changes from the repository at the specified path + or URL and adds them to the local repository. + + If the pulled changes add a new branch head, the head is + automatically merged, and the result of the merge is committed. + Otherwise, the working directory is updated to include the new + changes. + + When a merge occurs, the newly pulled changes are assumed to be + "authoritative". The head of the new changes is used as the first + parent, with local changes as the second. To switch the merge + order, use --switch-parent. + + See :hg:`help dates` for a list of formats valid for -d/--date. + + Returns 0 on success. + ''' + + date = opts.get('date') + if date: + opts['date'] = util.parsedate(date) + + parent, p2 = repo.dirstate.parents() + branch = repo.dirstate.branch() + branchnode = repo.branchtags().get(branch) + if parent != branchnode: + raise util.Abort(_('working dir not at branch tip ' + '(use "hg update" to check out branch tip)')) + + if p2 != nullid: + raise util.Abort(_('outstanding uncommitted merge')) + + wlock = lock = None + try: + wlock = repo.wlock() + lock = repo.lock() + mod, add, rem, del_ = repo.status()[:4] + + if mod or add or rem: + raise util.Abort(_('outstanding uncommitted changes')) + if del_: + raise util.Abort(_('working directory is missing some files')) + bheads = repo.branchheads(branch) + bheads = [head for head in bheads if len(repo[head].children()) == 0] + if len(bheads) > 1: + raise util.Abort(_('multiple heads in this branch ' + '(use "hg heads ." and "hg merge" to merge)')) + + other = hg.repository(hg.remoteui(repo, opts), + ui.expandpath(source)) + ui.status(_('pulling from %s\n') % + url.hidepassword(ui.expandpath(source))) + revs = None + if opts['rev']: + try: + revs = [other.lookup(rev) for rev in opts['rev']] + except error.CapabilityError: + err = _("Other repository doesn't support revision lookup, " + "so a rev cannot be specified.") + raise util.Abort(err) + + # Are there any changes at all? + modheads = repo.pull(other, heads=revs) + if modheads == 0: + return 0 + + # Is this a simple fast-forward along the current branch? + newheads = repo.branchheads(branch) + newchildren = repo.changelog.nodesbetween([parent], newheads)[2] + if len(newheads) == 1: + if newchildren[0] != parent: + return hg.clean(repo, newchildren[0]) + else: + return 0 + + # Are there more than one additional branch heads? + newchildren = [n for n in newchildren if n != parent] + newparent = parent + if newchildren: + newparent = newchildren[0] + hg.clean(repo, newparent) + newheads = [n for n in newheads if n != newparent] + if len(newheads) > 1: + ui.status(_('not merging with %d other new branch heads ' + '(use "hg heads ." and "hg merge" to merge them)\n') % + (len(newheads) - 1)) + return 1 + + # Otherwise, let's merge. + err = False + if newheads: + # By default, we consider the repository we're pulling + # *from* as authoritative, so we merge our changes into + # theirs. + if opts['switch_parent']: + firstparent, secondparent = newparent, newheads[0] + else: + firstparent, secondparent = newheads[0], newparent + ui.status(_('updating to %d:%s\n') % + (repo.changelog.rev(firstparent), + short(firstparent))) + hg.clean(repo, firstparent) + ui.status(_('merging with %d:%s\n') % + (repo.changelog.rev(secondparent), short(secondparent))) + err = hg.merge(repo, secondparent, remind=False) + + if not err: + # we don't translate commit messages + message = (cmdutil.logmessage(opts) or + ('Automated merge with %s' % + url.removeauth(other.url()))) + editor = cmdutil.commiteditor + if opts.get('force_editor') or opts.get('edit'): + editor = cmdutil.commitforceeditor + n = repo.commit(message, opts['user'], opts['date'], editor=editor) + ui.status(_('new changeset %d:%s merges remote changes ' + 'with local\n') % (repo.changelog.rev(n), + short(n))) + + return err + + finally: + release(lock, wlock) + +cmdtable = { + 'fetch': + (fetch, + [('r', 'rev', [], + _('a specific revision you would like to pull'), _('REV')), + ('e', 'edit', None, _('edit commit message')), + ('', 'force-editor', None, _('edit commit message (DEPRECATED)')), + ('', 'switch-parent', None, _('switch parents when merging')), + ] + commands.commitopts + commands.commitopts2 + commands.remoteopts, + _('hg fetch [SOURCE]')), +} diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/fetch.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/fetch.pyo new file mode 100644 index 0000000..c8529ef Binary files /dev/null and b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/fetch.pyo differ diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/gpg.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/gpg.py new file mode 100644 index 0000000..b13ec1e --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/gpg.py @@ -0,0 +1,288 @@ +# Copyright 2005, 2006 Benoit Boissinot +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +'''commands to sign and verify changesets''' + +import os, tempfile, binascii +from mercurial import util, commands, match +from mercurial import node as hgnode +from mercurial.i18n import _ + +class gpg(object): + def __init__(self, path, key=None): + self.path = path + self.key = (key and " --local-user \"%s\"" % key) or "" + + def sign(self, data): + gpgcmd = "%s --sign --detach-sign%s" % (self.path, self.key) + return util.filter(data, gpgcmd) + + def verify(self, data, sig): + """ returns of the good and bad signatures""" + sigfile = datafile = None + try: + # create temporary files + fd, sigfile = tempfile.mkstemp(prefix="hg-gpg-", suffix=".sig") + fp = os.fdopen(fd, 'wb') + fp.write(sig) + fp.close() + fd, datafile = tempfile.mkstemp(prefix="hg-gpg-", suffix=".txt") + fp = os.fdopen(fd, 'wb') + fp.write(data) + fp.close() + gpgcmd = ("%s --logger-fd 1 --status-fd 1 --verify " + "\"%s\" \"%s\"" % (self.path, sigfile, datafile)) + ret = util.filter("", gpgcmd) + finally: + for f in (sigfile, datafile): + try: + if f: + os.unlink(f) + except: + pass + keys = [] + key, fingerprint = None, None + err = "" + for l in ret.splitlines(): + # see DETAILS in the gnupg documentation + # filter the logger output + if not l.startswith("[GNUPG:]"): + continue + l = l[9:] + if l.startswith("ERRSIG"): + err = _("error while verifying signature") + break + elif l.startswith("VALIDSIG"): + # fingerprint of the primary key + fingerprint = l.split()[10] + elif (l.startswith("GOODSIG") or + l.startswith("EXPSIG") or + l.startswith("EXPKEYSIG") or + l.startswith("BADSIG")): + if key is not None: + keys.append(key + [fingerprint]) + key = l.split(" ", 2) + fingerprint = None + if err: + return err, [] + if key is not None: + keys.append(key + [fingerprint]) + return err, keys + +def newgpg(ui, **opts): + """create a new gpg instance""" + gpgpath = ui.config("gpg", "cmd", "gpg") + gpgkey = opts.get('key') + if not gpgkey: + gpgkey = ui.config("gpg", "key", None) + return gpg(gpgpath, gpgkey) + +def sigwalk(repo): + """ + walk over every sigs, yields a couple + ((node, version, sig), (filename, linenumber)) + """ + def parsefile(fileiter, context): + ln = 1 + for l in fileiter: + if not l: + continue + yield (l.split(" ", 2), (context, ln)) + ln += 1 + + # read the heads + fl = repo.file(".hgsigs") + for r in reversed(fl.heads()): + fn = ".hgsigs|%s" % hgnode.short(r) + for item in parsefile(fl.read(r).splitlines(), fn): + yield item + try: + # read local signatures + fn = "localsigs" + for item in parsefile(repo.opener(fn), fn): + yield item + except IOError: + pass + +def getkeys(ui, repo, mygpg, sigdata, context): + """get the keys who signed a data""" + fn, ln = context + node, version, sig = sigdata + prefix = "%s:%d" % (fn, ln) + node = hgnode.bin(node) + + data = node2txt(repo, node, version) + sig = binascii.a2b_base64(sig) + err, keys = mygpg.verify(data, sig) + if err: + ui.warn("%s:%d %s\n" % (fn, ln , err)) + return None + + validkeys = [] + # warn for expired key and/or sigs + for key in keys: + if key[0] == "BADSIG": + ui.write(_("%s Bad signature from \"%s\"\n") % (prefix, key[2])) + continue + if key[0] == "EXPSIG": + ui.write(_("%s Note: Signature has expired" + " (signed by: \"%s\")\n") % (prefix, key[2])) + elif key[0] == "EXPKEYSIG": + ui.write(_("%s Note: This key has expired" + " (signed by: \"%s\")\n") % (prefix, key[2])) + validkeys.append((key[1], key[2], key[3])) + return validkeys + +def sigs(ui, repo): + """list signed changesets""" + mygpg = newgpg(ui) + revs = {} + + for data, context in sigwalk(repo): + node, version, sig = data + fn, ln = context + try: + n = repo.lookup(node) + except KeyError: + ui.warn(_("%s:%d node does not exist\n") % (fn, ln)) + continue + r = repo.changelog.rev(n) + keys = getkeys(ui, repo, mygpg, data, context) + if not keys: + continue + revs.setdefault(r, []) + revs[r].extend(keys) + for rev in sorted(revs, reverse=True): + for k in revs[rev]: + r = "%5d:%s" % (rev, hgnode.hex(repo.changelog.node(rev))) + ui.write("%-30s %s\n" % (keystr(ui, k), r)) + +def check(ui, repo, rev): + """verify all the signatures there may be for a particular revision""" + mygpg = newgpg(ui) + rev = repo.lookup(rev) + hexrev = hgnode.hex(rev) + keys = [] + + for data, context in sigwalk(repo): + node, version, sig = data + if node == hexrev: + k = getkeys(ui, repo, mygpg, data, context) + if k: + keys.extend(k) + + if not keys: + ui.write(_("No valid signature for %s\n") % hgnode.short(rev)) + return + + # print summary + ui.write("%s is signed by:\n" % hgnode.short(rev)) + for key in keys: + ui.write(" %s\n" % keystr(ui, key)) + +def keystr(ui, key): + """associate a string to a key (username, comment)""" + keyid, user, fingerprint = key + comment = ui.config("gpg", fingerprint, None) + if comment: + return "%s (%s)" % (user, comment) + else: + return user + +def sign(ui, repo, *revs, **opts): + """add a signature for the current or given revision + + If no revision is given, the parent of the working directory is used, + or tip if no revision is checked out. + + See :hg:`help dates` for a list of formats valid for -d/--date. + """ + + mygpg = newgpg(ui, **opts) + sigver = "0" + sigmessage = "" + + date = opts.get('date') + if date: + opts['date'] = util.parsedate(date) + + if revs: + nodes = [repo.lookup(n) for n in revs] + else: + nodes = [node for node in repo.dirstate.parents() + if node != hgnode.nullid] + if len(nodes) > 1: + raise util.Abort(_('uncommitted merge - please provide a ' + 'specific revision')) + if not nodes: + nodes = [repo.changelog.tip()] + + for n in nodes: + hexnode = hgnode.hex(n) + ui.write(_("Signing %d:%s\n") % (repo.changelog.rev(n), + hgnode.short(n))) + # build data + data = node2txt(repo, n, sigver) + sig = mygpg.sign(data) + if not sig: + raise util.Abort(_("error while signing")) + sig = binascii.b2a_base64(sig) + sig = sig.replace("\n", "") + sigmessage += "%s %s %s\n" % (hexnode, sigver, sig) + + # write it + if opts['local']: + repo.opener("localsigs", "ab").write(sigmessage) + return + + msigs = match.exact(repo.root, '', ['.hgsigs']) + s = repo.status(match=msigs, unknown=True, ignored=True)[:6] + if util.any(s) and not opts["force"]: + raise util.Abort(_("working copy of .hgsigs is changed " + "(please commit .hgsigs manually " + "or use --force)")) + + repo.wfile(".hgsigs", "ab").write(sigmessage) + + if '.hgsigs' not in repo.dirstate: + repo[None].add([".hgsigs"]) + + if opts["no_commit"]: + return + + message = opts['message'] + if not message: + # we don't translate commit messages + message = "\n".join(["Added signature for changeset %s" + % hgnode.short(n) + for n in nodes]) + try: + repo.commit(message, opts['user'], opts['date'], match=msigs) + except ValueError, inst: + raise util.Abort(str(inst)) + +def node2txt(repo, node, ver): + """map a manifest into some text""" + if ver == "0": + return "%s\n" % hgnode.hex(node) + else: + raise util.Abort(_("unknown signature version")) + +cmdtable = { + "sign": + (sign, + [('l', 'local', None, _('make the signature local')), + ('f', 'force', None, _('sign even if the sigfile is modified')), + ('', 'no-commit', None, _('do not commit the sigfile after signing')), + ('k', 'key', '', + _('the key id to sign with'), _('ID')), + ('m', 'message', '', + _('commit message'), _('TEXT')), + ] + commands.commitopts2, + _('hg sign [OPTION]... [REVISION]...')), + "sigcheck": (check, [], _('hg sigcheck REVISION')), + "sigs": (sigs, [], _('hg sigs')), +} + diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/gpg.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/gpg.pyo new file mode 100644 index 0000000..3d5d415 Binary files /dev/null and b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/gpg.pyo differ diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/graphlog.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/graphlog.py new file mode 100644 index 0000000..a8eb805 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/graphlog.py @@ -0,0 +1,337 @@ +# ASCII graph log extension for Mercurial +# +# Copyright 2007 Joel Rosdahl +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +'''command to view revision graphs from a shell + +This extension adds a --graph option to the incoming, outgoing and log +commands. When this options is given, an ASCII representation of the +revision graph is also shown. +''' + +import os +from mercurial.cmdutil import revrange, show_changeset +from mercurial.commands import templateopts +from mercurial.i18n import _ +from mercurial.node import nullrev +from mercurial import cmdutil, commands, extensions +from mercurial import hg, util, graphmod + +ASCIIDATA = 'ASC' + +def asciiedges(seen, rev, parents): + """adds edge info to changelog DAG walk suitable for ascii()""" + if rev not in seen: + seen.append(rev) + nodeidx = seen.index(rev) + + knownparents = [] + newparents = [] + for parent in parents: + if parent in seen: + knownparents.append(parent) + else: + newparents.append(parent) + + ncols = len(seen) + seen[nodeidx:nodeidx + 1] = newparents + edges = [(nodeidx, seen.index(p)) for p in knownparents] + + if len(newparents) > 0: + edges.append((nodeidx, nodeidx)) + if len(newparents) > 1: + edges.append((nodeidx, nodeidx + 1)) + + nmorecols = len(seen) - ncols + return nodeidx, edges, ncols, nmorecols + +def fix_long_right_edges(edges): + for (i, (start, end)) in enumerate(edges): + if end > start: + edges[i] = (start, end + 1) + +def get_nodeline_edges_tail( + node_index, p_node_index, n_columns, n_columns_diff, p_diff, fix_tail): + if fix_tail and n_columns_diff == p_diff and n_columns_diff != 0: + # Still going in the same non-vertical direction. + if n_columns_diff == -1: + start = max(node_index + 1, p_node_index) + tail = ["|", " "] * (start - node_index - 1) + tail.extend(["/", " "] * (n_columns - start)) + return tail + else: + return ["\\", " "] * (n_columns - node_index - 1) + else: + return ["|", " "] * (n_columns - node_index - 1) + +def draw_edges(edges, nodeline, interline): + for (start, end) in edges: + if start == end + 1: + interline[2 * end + 1] = "/" + elif start == end - 1: + interline[2 * start + 1] = "\\" + elif start == end: + interline[2 * start] = "|" + else: + nodeline[2 * end] = "+" + if start > end: + (start, end) = (end, start) + for i in range(2 * start + 1, 2 * end): + if nodeline[i] != "+": + nodeline[i] = "-" + +def get_padding_line(ni, n_columns, edges): + line = [] + line.extend(["|", " "] * ni) + if (ni, ni - 1) in edges or (ni, ni) in edges: + # (ni, ni - 1) (ni, ni) + # | | | | | | | | + # +---o | | o---+ + # | | c | | c | | + # | |/ / | |/ / + # | | | | | | + c = "|" + else: + c = " " + line.extend([c, " "]) + line.extend(["|", " "] * (n_columns - ni - 1)) + return line + +def asciistate(): + """returns the initial value for the "state" argument to ascii()""" + return [0, 0] + +def ascii(ui, state, type, char, text, coldata): + """prints an ASCII graph of the DAG + + takes the following arguments (one call per node in the graph): + + - ui to write to + - Somewhere to keep the needed state in (init to asciistate()) + - Column of the current node in the set of ongoing edges. + - Type indicator of node data == ASCIIDATA. + - Payload: (char, lines): + - Character to use as node's symbol. + - List of lines to display as the node's text. + - Edges; a list of (col, next_col) indicating the edges between + the current node and its parents. + - Number of columns (ongoing edges) in the current revision. + - The difference between the number of columns (ongoing edges) + in the next revision and the number of columns (ongoing edges) + in the current revision. That is: -1 means one column removed; + 0 means no columns added or removed; 1 means one column added. + """ + + idx, edges, ncols, coldiff = coldata + assert -2 < coldiff < 2 + if coldiff == -1: + # Transform + # + # | | | | | | + # o | | into o---+ + # |X / |/ / + # | | | | + fix_long_right_edges(edges) + + # add_padding_line says whether to rewrite + # + # | | | | | | | | + # | o---+ into | o---+ + # | / / | | | # <--- padding line + # o | | | / / + # o | | + add_padding_line = (len(text) > 2 and coldiff == -1 and + [x for (x, y) in edges if x + 1 < y]) + + # fix_nodeline_tail says whether to rewrite + # + # | | o | | | | o | | + # | | |/ / | | |/ / + # | o | | into | o / / # <--- fixed nodeline tail + # | |/ / | |/ / + # o | | o | | + fix_nodeline_tail = len(text) <= 2 and not add_padding_line + + # nodeline is the line containing the node character (typically o) + nodeline = ["|", " "] * idx + nodeline.extend([char, " "]) + + nodeline.extend( + get_nodeline_edges_tail(idx, state[1], ncols, coldiff, + state[0], fix_nodeline_tail)) + + # shift_interline is the line containing the non-vertical + # edges between this entry and the next + shift_interline = ["|", " "] * idx + if coldiff == -1: + n_spaces = 1 + edge_ch = "/" + elif coldiff == 0: + n_spaces = 2 + edge_ch = "|" + else: + n_spaces = 3 + edge_ch = "\\" + shift_interline.extend(n_spaces * [" "]) + shift_interline.extend([edge_ch, " "] * (ncols - idx - 1)) + + # draw edges from the current node to its parents + draw_edges(edges, nodeline, shift_interline) + + # lines is the list of all graph lines to print + lines = [nodeline] + if add_padding_line: + lines.append(get_padding_line(idx, ncols, edges)) + lines.append(shift_interline) + + # make sure that there are as many graph lines as there are + # log strings + while len(text) < len(lines): + text.append("") + if len(lines) < len(text): + extra_interline = ["|", " "] * (ncols + coldiff) + while len(lines) < len(text): + lines.append(extra_interline) + + # print lines + indentation_level = max(ncols, ncols + coldiff) + for (line, logstr) in zip(lines, text): + ln = "%-*s %s" % (2 * indentation_level, "".join(line), logstr) + ui.write(ln.rstrip() + '\n') + + # ... and start over + state[0] = coldiff + state[1] = idx + +def get_revs(repo, rev_opt): + if rev_opt: + revs = revrange(repo, rev_opt) + if len(revs) == 0: + return (nullrev, nullrev) + return (max(revs), min(revs)) + else: + return (len(repo) - 1, 0) + +def check_unsupported_flags(opts): + for op in ["follow", "follow_first", "date", "copies", "keyword", "remove", + "only_merges", "user", "branch", "only_branch", "prune", + "newest_first", "no_merges", "include", "exclude"]: + if op in opts and opts[op]: + raise util.Abort(_("--graph option is incompatible with --%s") + % op.replace("_", "-")) + +def generate(ui, dag, displayer, showparents, edgefn): + seen, state = [], asciistate() + for rev, type, ctx, parents in dag: + char = ctx.node() in showparents and '@' or 'o' + displayer.show(ctx) + lines = displayer.hunk.pop(rev).split('\n')[:-1] + displayer.flush(rev) + ascii(ui, state, type, char, lines, edgefn(seen, rev, parents)) + displayer.close() + +def graphlog(ui, repo, path=None, **opts): + """show revision history alongside an ASCII revision graph + + Print a revision history alongside a revision graph drawn with + ASCII characters. + + Nodes printed as an @ character are parents of the working + directory. + """ + + check_unsupported_flags(opts) + limit = cmdutil.loglimit(opts) + start, stop = get_revs(repo, opts["rev"]) + if start == nullrev: + return + + if path: + path = util.canonpath(repo.root, os.getcwd(), path) + if path: # could be reset in canonpath + revdag = graphmod.filerevs(repo, path, start, stop, limit) + else: + if limit is not None: + stop = max(stop, start - limit + 1) + revdag = graphmod.revisions(repo, start, stop) + + displayer = show_changeset(ui, repo, opts, buffered=True) + showparents = [ctx.node() for ctx in repo[None].parents()] + generate(ui, revdag, displayer, showparents, asciiedges) + +def graphrevs(repo, nodes, opts): + limit = cmdutil.loglimit(opts) + nodes.reverse() + if limit is not None: + nodes = nodes[:limit] + return graphmod.nodes(repo, nodes) + +def goutgoing(ui, repo, dest=None, **opts): + """show the outgoing changesets alongside an ASCII revision graph + + Print the outgoing changesets alongside a revision graph drawn with + ASCII characters. + + Nodes printed as an @ character are parents of the working + directory. + """ + + check_unsupported_flags(opts) + o = hg._outgoing(ui, repo, dest, opts) + if o is None: + return + + revdag = graphrevs(repo, o, opts) + displayer = show_changeset(ui, repo, opts, buffered=True) + showparents = [ctx.node() for ctx in repo[None].parents()] + generate(ui, revdag, displayer, showparents, asciiedges) + +def gincoming(ui, repo, source="default", **opts): + """show the incoming changesets alongside an ASCII revision graph + + Print the incoming changesets alongside a revision graph drawn with + ASCII characters. + + Nodes printed as an @ character are parents of the working + directory. + """ + def subreporecurse(): + return 1 + + check_unsupported_flags(opts) + def display(other, chlist, displayer): + revdag = graphrevs(other, chlist, opts) + showparents = [ctx.node() for ctx in repo[None].parents()] + generate(ui, revdag, displayer, showparents, asciiedges) + + hg._incoming(display, subreporecurse, ui, repo, source, opts, buffered=True) + +def uisetup(ui): + '''Initialize the extension.''' + _wrapcmd(ui, 'log', commands.table, graphlog) + _wrapcmd(ui, 'incoming', commands.table, gincoming) + _wrapcmd(ui, 'outgoing', commands.table, goutgoing) + +def _wrapcmd(ui, cmd, table, wrapfn): + '''wrap the command''' + def graph(orig, *args, **kwargs): + if kwargs['graph']: + return wrapfn(*args, **kwargs) + return orig(*args, **kwargs) + entry = extensions.wrapcommand(table, cmd, graph) + entry[1].append(('G', 'graph', None, _("show the revision DAG"))) + +cmdtable = { + "glog": + (graphlog, + [('l', 'limit', '', + _('limit number of changes displayed'), _('NUM')), + ('p', 'patch', False, _('show patch')), + ('r', 'rev', [], + _('show the specified revision or range'), _('REV')), + ] + templateopts, + _('hg glog [OPTION]... [FILE]')), +} diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/graphlog.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/graphlog.pyo new file mode 100644 index 0000000..2edc5f3 Binary files /dev/null and b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/graphlog.pyo differ diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/hgcia.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/hgcia.py new file mode 100644 index 0000000..4e72680 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/hgcia.py @@ -0,0 +1,251 @@ +# Copyright (C) 2007-8 Brendan Cully +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +"""hooks for integrating with the CIA.vc notification service + +This is meant to be run as a changegroup or incoming hook. To +configure it, set the following options in your hgrc:: + + [cia] + # your registered CIA user name + user = foo + # the name of the project in CIA + project = foo + # the module (subproject) (optional) + #module = foo + # Append a diffstat to the log message (optional) + #diffstat = False + # Template to use for log messages (optional) + #template = {desc}\\n{baseurl}/rev/{node}-- {diffstat} + # Style to use (optional) + #style = foo + # The URL of the CIA notification service (optional) + # You can use mailto: URLs to send by email, eg + # mailto:cia@cia.vc + # Make sure to set email.from if you do this. + #url = http://cia.vc/ + # print message instead of sending it (optional) + #test = False + + [hooks] + # one of these: + changegroup.cia = python:hgcia.hook + #incoming.cia = python:hgcia.hook + + [web] + # If you want hyperlinks (optional) + baseurl = http://server/path/to/repo +""" + +from mercurial.i18n import _ +from mercurial.node import bin, short +from mercurial import cmdutil, patch, templater, util, mail +import email.Parser + +import xmlrpclib +from xml.sax import saxutils + +socket_timeout = 30 # seconds +try: + # set a timeout for the socket so you don't have to wait so looooong + # when cia.vc is having problems. requires python >= 2.3: + import socket + socket.setdefaulttimeout(socket_timeout) +except: + pass + +HGCIA_VERSION = '0.1' +HGCIA_URL = 'http://hg.kublai.com/mercurial/hgcia' + + +class ciamsg(object): + """ A CIA message """ + def __init__(self, cia, ctx): + self.cia = cia + self.ctx = ctx + self.url = self.cia.url + + def fileelem(self, path, uri, action): + if uri: + uri = ' uri=%s' % saxutils.quoteattr(uri) + return '%s' % ( + uri, saxutils.quoteattr(action), saxutils.escape(path)) + + def fileelems(self): + n = self.ctx.node() + f = self.cia.repo.status(self.ctx.parents()[0].node(), n) + url = self.url or '' + elems = [] + for path in f[0]: + uri = '%s/diff/%s/%s' % (url, short(n), path) + elems.append(self.fileelem(path, url and uri, 'modify')) + for path in f[1]: + # TODO: copy/rename ? + uri = '%s/file/%s/%s' % (url, short(n), path) + elems.append(self.fileelem(path, url and uri, 'add')) + for path in f[2]: + elems.append(self.fileelem(path, '', 'remove')) + + return '\n'.join(elems) + + def sourceelem(self, project, module=None, branch=None): + msg = ['', '%s' % saxutils.escape(project)] + if module: + msg.append('%s' % saxutils.escape(module)) + if branch: + msg.append('%s' % saxutils.escape(branch)) + msg.append('') + + return '\n'.join(msg) + + def diffstat(self): + class patchbuf(object): + def __init__(self): + self.lines = [] + # diffstat is stupid + self.name = 'cia' + def write(self, data): + self.lines.append(data) + def close(self): + pass + + n = self.ctx.node() + pbuf = patchbuf() + cmdutil.export(self.cia.repo, [n], fp=pbuf) + return patch.diffstat(pbuf.lines) or '' + + def logmsg(self): + diffstat = self.cia.diffstat and self.diffstat() or '' + self.cia.ui.pushbuffer() + self.cia.templater.show(self.ctx, changes=self.ctx.changeset(), + url=self.cia.url, diffstat=diffstat) + return self.cia.ui.popbuffer() + + def xml(self): + n = short(self.ctx.node()) + src = self.sourceelem(self.cia.project, module=self.cia.module, + branch=self.ctx.branch()) + # unix timestamp + dt = self.ctx.date() + timestamp = dt[0] + + author = saxutils.escape(self.ctx.user()) + rev = '%d:%s' % (self.ctx.rev(), n) + log = saxutils.escape(self.logmsg()) + + url = self.url and '%s/rev/%s' % (saxutils.escape(self.url), + n) or '' + + msg = """ + + + Mercurial (hgcia) + %s + %s + %s + + %s + + + %s + %s + %s + %s + %s + + + %d + +""" % \ + (HGCIA_VERSION, saxutils.escape(HGCIA_URL), + saxutils.escape(self.cia.user), src, author, rev, log, url, + self.fileelems(), timestamp) + + return msg + + +class hgcia(object): + """ CIA notification class """ + + deftemplate = '{desc}' + dstemplate = '{desc}\n-- \n{diffstat}' + + def __init__(self, ui, repo): + self.ui = ui + self.repo = repo + + self.ciaurl = self.ui.config('cia', 'url', 'http://cia.vc') + self.user = self.ui.config('cia', 'user') + self.project = self.ui.config('cia', 'project') + self.module = self.ui.config('cia', 'module') + self.diffstat = self.ui.configbool('cia', 'diffstat') + self.emailfrom = self.ui.config('email', 'from') + self.dryrun = self.ui.configbool('cia', 'test') + self.url = self.ui.config('web', 'baseurl') + + style = self.ui.config('cia', 'style') + template = self.ui.config('cia', 'template') + if not template: + template = self.diffstat and self.dstemplate or self.deftemplate + template = templater.parsestring(template, quoted=False) + t = cmdutil.changeset_templater(self.ui, self.repo, False, None, + style, False) + t.use_template(template) + self.templater = t + + def sendrpc(self, msg): + srv = xmlrpclib.Server(self.ciaurl) + res = srv.hub.deliver(msg) + if res is not True: + raise util.Abort(_('%s returned an error: %s') % + (self.ciaurl, res)) + + def sendemail(self, address, data): + p = email.Parser.Parser() + msg = p.parsestr(data) + msg['Date'] = util.datestr(format="%a, %d %b %Y %H:%M:%S %1%2") + msg['To'] = address + msg['From'] = self.emailfrom + msg['Subject'] = 'DeliverXML' + msg['Content-type'] = 'text/xml' + msgtext = msg.as_string() + + self.ui.status(_('hgcia: sending update to %s\n') % address) + mail.sendmail(self.ui, util.email(self.emailfrom), + [address], msgtext) + + +def hook(ui, repo, hooktype, node=None, url=None, **kwargs): + """ send CIA notification """ + def sendmsg(cia, ctx): + msg = ciamsg(cia, ctx).xml() + if cia.dryrun: + ui.write(msg) + elif cia.ciaurl.startswith('mailto:'): + if not cia.emailfrom: + raise util.Abort(_('email.from must be defined when ' + 'sending by email')) + cia.sendemail(cia.ciaurl[7:], msg) + else: + cia.sendrpc(msg) + + n = bin(node) + cia = hgcia(ui, repo) + if not cia.user: + ui.debug('cia: no user specified') + return + if not cia.project: + ui.debug('cia: no project specified') + return + if hooktype == 'changegroup': + start = repo.changelog.rev(n) + end = len(repo.changelog) + for rev in xrange(start, end): + n = repo.changelog.node(rev) + ctx = repo.changectx(n) + sendmsg(cia, ctx) + else: + ctx = repo.changectx(n) + sendmsg(cia, ctx) diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/hgcia.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/hgcia.pyo new file mode 100644 index 0000000..2c5a2ee Binary files /dev/null and b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/hgcia.pyo differ diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/hgk.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/hgk.py new file mode 100644 index 0000000..e8aae47 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/hgk.py @@ -0,0 +1,348 @@ +# Minimal support for git commands on an hg repository +# +# Copyright 2005, 2006 Chris Mason +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. + +'''browse the repository in a graphical way + +The hgk extension allows browsing the history of a repository in a +graphical way. It requires Tcl/Tk version 8.4 or later. (Tcl/Tk is not +distributed with Mercurial.) + +hgk consists of two parts: a Tcl script that does the displaying and +querying of information, and an extension to Mercurial named hgk.py, +which provides hooks for hgk to get information. hgk can be found in +the contrib directory, and the extension is shipped in the hgext +repository, and needs to be enabled. + +The :hg:`view` command will launch the hgk Tcl script. For this command +to work, hgk must be in your search path. Alternately, you can specify +the path to hgk in your configuration file:: + + [hgk] + path=/location/of/hgk + +hgk can make use of the extdiff extension to visualize revisions. +Assuming you had already configured extdiff vdiff command, just add:: + + [hgk] + vdiff=vdiff + +Revisions context menu will now display additional entries to fire +vdiff on hovered and selected revisions. +''' + +import os +from mercurial import commands, util, patch, revlog, cmdutil +from mercurial.node import nullid, nullrev, short +from mercurial.i18n import _ + +def difftree(ui, repo, node1=None, node2=None, *files, **opts): + """diff trees from two commits""" + def __difftree(repo, node1, node2, files=[]): + assert node2 is not None + mmap = repo[node1].manifest() + mmap2 = repo[node2].manifest() + m = cmdutil.match(repo, files) + modified, added, removed = repo.status(node1, node2, m)[:3] + empty = short(nullid) + + for f in modified: + # TODO get file permissions + ui.write(":100664 100664 %s %s M\t%s\t%s\n" % + (short(mmap[f]), short(mmap2[f]), f, f)) + for f in added: + ui.write(":000000 100664 %s %s N\t%s\t%s\n" % + (empty, short(mmap2[f]), f, f)) + for f in removed: + ui.write(":100664 000000 %s %s D\t%s\t%s\n" % + (short(mmap[f]), empty, f, f)) + ## + + while True: + if opts['stdin']: + try: + line = raw_input().split(' ') + node1 = line[0] + if len(line) > 1: + node2 = line[1] + else: + node2 = None + except EOFError: + break + node1 = repo.lookup(node1) + if node2: + node2 = repo.lookup(node2) + else: + node2 = node1 + node1 = repo.changelog.parents(node1)[0] + if opts['patch']: + if opts['pretty']: + catcommit(ui, repo, node2, "") + m = cmdutil.match(repo, files) + chunks = patch.diff(repo, node1, node2, match=m, + opts=patch.diffopts(ui, {'git': True})) + for chunk in chunks: + ui.write(chunk) + else: + __difftree(repo, node1, node2, files=files) + if not opts['stdin']: + break + +def catcommit(ui, repo, n, prefix, ctx=None): + nlprefix = '\n' + prefix + if ctx is None: + ctx = repo[n] + ui.write("tree %s\n" % short(ctx.changeset()[0])) # use ctx.node() instead ?? + for p in ctx.parents(): + ui.write("parent %s\n" % p) + + date = ctx.date() + description = ctx.description().replace("\0", "") + lines = description.splitlines() + if lines and lines[-1].startswith('committer:'): + committer = lines[-1].split(': ')[1].rstrip() + else: + committer = ctx.user() + + ui.write("author %s %s %s\n" % (ctx.user(), int(date[0]), date[1])) + ui.write("committer %s %s %s\n" % (committer, int(date[0]), date[1])) + ui.write("revision %d\n" % ctx.rev()) + ui.write("branch %s\n\n" % ctx.branch()) + + if prefix != "": + ui.write("%s%s\n" % (prefix, description.replace('\n', nlprefix).strip())) + else: + ui.write(description + "\n") + if prefix: + ui.write('\0') + +def base(ui, repo, node1, node2): + """output common ancestor information""" + node1 = repo.lookup(node1) + node2 = repo.lookup(node2) + n = repo.changelog.ancestor(node1, node2) + ui.write(short(n) + "\n") + +def catfile(ui, repo, type=None, r=None, **opts): + """cat a specific revision""" + # in stdin mode, every line except the commit is prefixed with two + # spaces. This way the our caller can find the commit without magic + # strings + # + prefix = "" + if opts['stdin']: + try: + (type, r) = raw_input().split(' ') + prefix = " " + except EOFError: + return + + else: + if not type or not r: + ui.warn(_("cat-file: type or revision not supplied\n")) + commands.help_(ui, 'cat-file') + + while r: + if type != "commit": + ui.warn(_("aborting hg cat-file only understands commits\n")) + return 1 + n = repo.lookup(r) + catcommit(ui, repo, n, prefix) + if opts['stdin']: + try: + (type, r) = raw_input().split(' ') + except EOFError: + break + else: + break + +# git rev-tree is a confusing thing. You can supply a number of +# commit sha1s on the command line, and it walks the commit history +# telling you which commits are reachable from the supplied ones via +# a bitmask based on arg position. +# you can specify a commit to stop at by starting the sha1 with ^ +def revtree(ui, args, repo, full="tree", maxnr=0, parents=False): + def chlogwalk(): + count = len(repo) + i = count + l = [0] * 100 + chunk = 100 + while True: + if chunk > i: + chunk = i + i = 0 + else: + i -= chunk + + for x in xrange(chunk): + if i + x >= count: + l[chunk - x:] = [0] * (chunk - x) + break + if full != None: + l[x] = repo[i + x] + l[x].changeset() # force reading + else: + l[x] = 1 + for x in xrange(chunk - 1, -1, -1): + if l[x] != 0: + yield (i + x, full != None and l[x] or None) + if i == 0: + break + + # calculate and return the reachability bitmask for sha + def is_reachable(ar, reachable, sha): + if len(ar) == 0: + return 1 + mask = 0 + for i in xrange(len(ar)): + if sha in reachable[i]: + mask |= 1 << i + + return mask + + reachable = [] + stop_sha1 = [] + want_sha1 = [] + count = 0 + + # figure out which commits they are asking for and which ones they + # want us to stop on + for i, arg in enumerate(args): + if arg.startswith('^'): + s = repo.lookup(arg[1:]) + stop_sha1.append(s) + want_sha1.append(s) + elif arg != 'HEAD': + want_sha1.append(repo.lookup(arg)) + + # calculate the graph for the supplied commits + for i, n in enumerate(want_sha1): + reachable.append(set()) + visit = [n] + reachable[i].add(n) + while visit: + n = visit.pop(0) + if n in stop_sha1: + continue + for p in repo.changelog.parents(n): + if p not in reachable[i]: + reachable[i].add(p) + visit.append(p) + if p in stop_sha1: + continue + + # walk the repository looking for commits that are in our + # reachability graph + for i, ctx in chlogwalk(): + n = repo.changelog.node(i) + mask = is_reachable(want_sha1, reachable, n) + if mask: + parentstr = "" + if parents: + pp = repo.changelog.parents(n) + if pp[0] != nullid: + parentstr += " " + short(pp[0]) + if pp[1] != nullid: + parentstr += " " + short(pp[1]) + if not full: + ui.write("%s%s\n" % (short(n), parentstr)) + elif full == "commit": + ui.write("%s%s\n" % (short(n), parentstr)) + catcommit(ui, repo, n, ' ', ctx) + else: + (p1, p2) = repo.changelog.parents(n) + (h, h1, h2) = map(short, (n, p1, p2)) + (i1, i2) = map(repo.changelog.rev, (p1, p2)) + + date = ctx.date()[0] + ui.write("%s %s:%s" % (date, h, mask)) + mask = is_reachable(want_sha1, reachable, p1) + if i1 != nullrev and mask > 0: + ui.write("%s:%s " % (h1, mask)), + mask = is_reachable(want_sha1, reachable, p2) + if i2 != nullrev and mask > 0: + ui.write("%s:%s " % (h2, mask)) + ui.write("\n") + if maxnr and count >= maxnr: + break + count += 1 + +def revparse(ui, repo, *revs, **opts): + """parse given revisions""" + def revstr(rev): + if rev == 'HEAD': + rev = 'tip' + return revlog.hex(repo.lookup(rev)) + + for r in revs: + revrange = r.split(':', 1) + ui.write('%s\n' % revstr(revrange[0])) + if len(revrange) == 2: + ui.write('^%s\n' % revstr(revrange[1])) + +# git rev-list tries to order things by date, and has the ability to stop +# at a given commit without walking the whole repo. TODO add the stop +# parameter +def revlist(ui, repo, *revs, **opts): + """print revisions""" + if opts['header']: + full = "commit" + else: + full = None + copy = [x for x in revs] + revtree(ui, copy, repo, full, opts['max_count'], opts['parents']) + +def config(ui, repo, **opts): + """print extension options""" + def writeopt(name, value): + ui.write('k=%s\nv=%s\n' % (name, value)) + + writeopt('vdiff', ui.config('hgk', 'vdiff', '')) + + +def view(ui, repo, *etc, **opts): + "start interactive history viewer" + os.chdir(repo.root) + optstr = ' '.join(['--%s %s' % (k, v) for k, v in opts.iteritems() if v]) + cmd = ui.config("hgk", "path", "hgk") + " %s %s" % (optstr, " ".join(etc)) + ui.debug("running %s\n" % cmd) + util.system(cmd) + +cmdtable = { + "^view": + (view, + [('l', 'limit', '', + _('limit number of changes displayed'), _('NUM'))], + _('hg view [-l LIMIT] [REVRANGE]')), + "debug-diff-tree": + (difftree, + [('p', 'patch', None, _('generate patch')), + ('r', 'recursive', None, _('recursive')), + ('P', 'pretty', None, _('pretty')), + ('s', 'stdin', None, _('stdin')), + ('C', 'copy', None, _('detect copies')), + ('S', 'search', "", _('search'))], + _('hg git-diff-tree [OPTION]... NODE1 NODE2 [FILE]...')), + "debug-cat-file": + (catfile, + [('s', 'stdin', None, _('stdin'))], + _('hg debug-cat-file [OPTION]... TYPE FILE')), + "debug-config": + (config, [], _('hg debug-config')), + "debug-merge-base": + (base, [], _('hg debug-merge-base REV REV')), + "debug-rev-parse": + (revparse, + [('', 'default', '', _('ignored'))], + _('hg debug-rev-parse REV')), + "debug-rev-list": + (revlist, + [('H', 'header', None, _('header')), + ('t', 'topo-order', None, _('topo-order')), + ('p', 'parents', None, _('parents')), + ('n', 'max-count', 0, _('max-count'))], + _('hg debug-rev-list [OPTION]... REV...')), +} diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/hgk.pyo b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/hgk.pyo new file mode 100644 index 0000000..97aa394 Binary files /dev/null and b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/hgk.pyo differ diff --git a/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/highlight/__init__.py b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/highlight/__init__.py new file mode 100644 index 0000000..55e3c18 --- /dev/null +++ b/eggs/mercurial-1.7.3-py2.6-linux-x86_64.egg/hgext/highlight/__init__.py @@ -0,0 +1,61 @@ +# highlight - syntax highlighting in hgweb, based on Pygments +# +# Copyright 2008, 2009 Patrick Mezard and others +# +# This software may be used and distributed according to the terms of the +# GNU General Public License version 2 or any later version. +# +# The original module was split in an interface and an implementation +# file to defer pygments loading and speedup extension setup. + +"""syntax highlighting for hgweb (requires Pygments) + +It depends on the Pygments syntax highlighting library: +http://pygments.org/ + +There is a single configuration option:: + + [web] + pygments_style =