Permalink
Cannot retrieve contributors at this time
Name already in use
A tag already exists with the provided branch name. Many Git commands accept both tag and branch names, so creating this branch may cause unexpected behavior. Are you sure you want to create this branch?
git-remote-hg/git-remote-hg
Go to fileThis commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
executable file
1310 lines (1054 sloc)
34.8 KB
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#!/usr/bin/env python | |
# | |
# Copyright (c) 2012 Felipe Contreras | |
# | |
# Inspired by Rocco Rutte's hg-fast-export | |
# Just copy to your ~/bin, or anywhere in your $PATH. | |
# Then you can clone with: | |
# git clone hg::/path/to/mercurial/repo/ | |
# | |
# For remote repositories a local clone is stored in | |
# "$GIT_DIR/hg/origin/clone/.hg/". | |
from __future__ import print_function | |
from mercurial import hg, ui, bookmarks, context, encoding | |
from mercurial import node, error, extensions, discovery, util | |
from mercurial import changegroup | |
from mercurial import exchange | |
import re | |
import sys | |
import os | |
import json | |
import shutil | |
import subprocess | |
import atexit | |
import hashlib | |
import time as ptime | |
if sys.version_info[0] >= 3: | |
from urllib.parse import urlparse, urljoin | |
from urllib.parse import quote as urlquote, unquote as urlunquote | |
ferr = sys.stderr.buffer | |
fin = sys.stdin.buffer | |
else: | |
from urlparse import urlparse, urljoin | |
from urllib import quote as urlquote, unquote as urlunquote | |
# Hack to set default encoding | |
reload(sys) | |
sys.setdefaultencoding('utf-8') | |
ferr = sys.stderr | |
fin = sys.stdin | |
# | |
# If you want to see Mercurial revisions as Git commit notes: | |
# git config core.notesRef refs/notes/hg | |
# | |
# If you are not in hg-git-compat mode and want to disable the tracking of | |
# named branches: | |
# git config --global remote-hg.track-branches false | |
# | |
# If you want the equivalent of hg's clone/pull--insecure option: | |
# git config --global remote-hg.insecure true | |
# | |
# If you want to switch to hg-git compatibility mode: | |
# git config --global remote-hg.hg-git-compat true | |
# | |
# git: | |
# Sensible defaults for git. | |
# hg bookmarks are exported as git branches, hg branches are prefixed | |
# with 'branches/', HEAD is a special case. | |
# | |
# hg: | |
# Emulate hg-git. | |
# Only hg bookmarks are exported as git branches. | |
# Commits are modified to preserve hg information and allow bidirectionality. | |
# | |
NAME_RE = re.compile('^([^<>]+)') | |
AUTHOR_RE = re.compile('^([^<>]+?)? ?[<>]([^<>]*)(?:$|>)') | |
EMAIL_RE = re.compile(r'([^ \t<>]+@[^ \t<>]+)') | |
AUTHOR_HG_RE = re.compile('^(.*?) ?<(.*?)(?:>(.*))?$') | |
RAW_AUTHOR_RE = re.compile('^(\w+) (?:(.+)? )?<(.*)> (\d+) ([+-]\d+)') | |
VERSION = 2 | |
def print(arg): | |
os.write(1, arg.encode() + b'\n') | |
def print_data(data): | |
os.write(1, b'data %d\n' % len(data)) | |
os.write(1, data) | |
def debug(*args): | |
sys.stderr.write('DEBUG: %s\n' % repr(args)) | |
def die(msg, *args): | |
sys.stderr.write('ERROR: %s\n' % (msg % args)) | |
sys.exit(1) | |
def warn(msg, *args): | |
sys.stderr.write('WARNING: %s\n' % (msg % args)) | |
def gitmode(flags): | |
return b'l' in flags and '120000' or b'x' in flags and '100755' or '100644' | |
def gittz(tz): | |
tz /= 60.0 | |
return '%+03d%02d' % (tz / -60, tz % 60) | |
def hgtz(tz): | |
sign = -1 if tz[0] == '-' else 1 | |
return sign * (int(tz[-4:-2]) * 60 + int(tz[-2:])) * -60 | |
def hgmode(mode): | |
m = { '100755': 'x', '120000': 'l' } | |
return m.get(mode, '') | |
def hghex(n): | |
return node.hex(n) | |
def hgbin(n): | |
return node.bin(n) | |
def hgref(ref): | |
return ref.replace('___', ' ').encode() | |
def gitref(ref): | |
return ref.decode().replace(' ', '___') | |
def check_version(*check): | |
if not hg_version: | |
return True | |
return hg_version >= check | |
def get_config(config): | |
cmd = ['git', 'config', '--get', config] | |
process = subprocess.Popen(cmd, stdout=subprocess.PIPE) | |
output, _ = process.communicate() | |
return output.decode() | |
def get_config_bool(config, default=False): | |
value = get_config(config).rstrip('\n') | |
if value == "true": | |
return True | |
elif value == "false": | |
return False | |
else: | |
return default | |
class Marks: | |
def __init__(self, path): | |
self.path = path | |
self.clear() | |
self.load() | |
def clear(self): | |
self.tips = {} | |
self.marks = {} | |
self.rev_marks = {} | |
self.last_mark = 0 | |
self.version = VERSION | |
def load(self): | |
if not os.path.exists(self.path): | |
return | |
tmp = json.load(open(self.path)) | |
self.tips = tmp['tips'] | |
self.marks = tmp['marks'] | |
self.last_mark = tmp['last-mark'] | |
self.version = tmp['version'] | |
for rev, mark in self.marks.items(): | |
self.rev_marks[mark] = rev | |
def dict(self): | |
return { 'tips': self.tips, 'marks': self.marks, | |
'last-mark': self.last_mark, 'version': self.version } | |
def store(self): | |
json.dump(self.dict(), open(self.path, 'w')) | |
def __str__(self): | |
return str(self.dict()) | |
def from_rev(self, rev): | |
return self.marks[rev] | |
def to_rev(self, mark): | |
return self.rev_marks[mark] | |
def next_mark(self): | |
self.last_mark += 1 | |
return self.last_mark | |
def get_mark(self, rev): | |
self.last_mark += 1 | |
self.marks[rev.decode()] = self.last_mark | |
return self.last_mark | |
def new_mark(self, rev, mark): | |
rev = rev.decode() | |
self.marks[rev] = mark | |
self.rev_marks[mark] = rev | |
self.last_mark = mark | |
def is_marked(self, rev): | |
return rev.decode() in self.marks | |
def get_tip(self, branch): | |
return self.tips[branch].encode() | |
def set_tip(self, branch, tip): | |
self.tips[branch] = tip.decode() | |
class Parser: | |
def __init__(self, repo): | |
self.repo = repo | |
self.line = self.get_line() | |
def get_line(self): | |
return fin.readline().decode().strip() | |
def __getitem__(self, i): | |
return self.line.split()[i] | |
def check(self, word): | |
return self.line.startswith(word) | |
def each_block(self, separator): | |
while self.line != separator: | |
yield self.line | |
self.line = self.get_line() | |
def __iter__(self): | |
return self.each_block('') | |
def next(self): | |
self.line = self.get_line() | |
if self.line == 'done': | |
self.line = None | |
def get_mark(self): | |
i = self.line.index(':') + 1 | |
return int(self.line[i:]) | |
def get_data(self): | |
if not self.check('data'): | |
return None | |
i = self.line.index(' ') + 1 | |
size = int(self.line[i:]) | |
return fin.read(size) | |
def get_author(self): | |
ex = None | |
m = RAW_AUTHOR_RE.match(self.line) | |
if not m: | |
return None | |
_, name, email, date, tz = m.groups() | |
if name and 'ext:' in name: | |
m = re.match('^(.+?) ext:\((.+)\)$', name) | |
if m: | |
name = m.group(1) | |
ex = urlunquote(m.group(2)) | |
if email != bad_mail: | |
if name: | |
user = '%s <%s>' % (name, email) | |
else: | |
user = '<%s>' % (email) | |
else: | |
user = name | |
if ex: | |
user += ex | |
return (user, int(date), hgtz(tz)) | |
def fix_file_path(path): | |
path = os.path.normpath(path.decode()) | |
if os.path.isabs(path): | |
path = os.path.relpath(path, '/') | |
if os.sep == '/': | |
return path | |
# Git for Windows expects forward | |
return path.replace(os.sep, '/') | |
def export_file(ctx, fname): | |
f = ctx.filectx(fname) | |
fid = node.hex(f.filenode()) | |
if fid in filenodes: | |
mark = filenodes[fid] | |
else: | |
mark = marks.next_mark() | |
filenodes[fid] = mark | |
print("blob") | |
print("mark :%u" % mark) | |
print_data(f.data()) | |
return gitmode(f.flags()), mark, fix_file_path(fname) | |
def get_filechanges(repo, ctx, parents, files): | |
if not parents: | |
return files, [] | |
else: | |
stat = parents[0].status(ctx) | |
return stat.modified + stat.added, stat.removed | |
def fixup_user_git(user): | |
name = mail = None | |
user = user.replace('"', '') | |
m = AUTHOR_RE.match(user) | |
if m: | |
name = m.group(1) | |
mail = m.group(2).strip() | |
else: | |
m = EMAIL_RE.match(user) | |
if m: | |
mail = m.group(1) | |
else: | |
m = NAME_RE.match(user) | |
if m: | |
name = m.group(1).strip() | |
return (name, mail) | |
def fixup_user_hg(user): | |
def sanitize(name): | |
# stole this from hg-git | |
return re.sub('[<>\n]', '?', name.lstrip('< ').rstrip('> ')) | |
m = AUTHOR_HG_RE.match(user) | |
if m: | |
name = sanitize(m.group(1)) | |
mail = sanitize(m.group(2)) | |
ex = m.group(3) | |
if ex: | |
name += ' ext:(' + urlquote(ex) + ')' | |
else: | |
name = sanitize(user) | |
if '@' in user: | |
mail = name | |
else: | |
mail = None | |
return (name, mail) | |
def fixup_user(user): | |
if mode == 'git': | |
name, mail = fixup_user_git(user) | |
else: | |
name, mail = fixup_user_hg(user) | |
if not name: | |
name = bad_name | |
if not mail: | |
mail = bad_mail | |
return '%s <%s>' % (name, mail) | |
def updatebookmarks(repo, peer): | |
remotemarks = peer.listkeys(b'bookmarks') | |
localmarks = repo._bookmarks | |
if not remotemarks: | |
return | |
changes = { k: hgbin(v) for k, v in remotemarks.items() } | |
wlock = tr = None | |
try: | |
wlock = repo.wlock() | |
tr = repo.transaction(b'bookmark') | |
if check_version(4, 3): | |
localmarks.applychanges(repo, tr, changes.items()) | |
else: | |
localmarks.update(changes) | |
tr.close() | |
finally: | |
tr.release() | |
wlock.release() | |
def get_repo(url, alias): | |
global peer | |
if check_version(4, 1): | |
myui = ui.ui.load() | |
else: | |
myui = ui.ui() | |
myui.setconfig(b'ui', b'interactive', b'off') | |
myui.fout = ferr | |
if get_config_bool('remote-hg.insecure'): | |
myui.setconfig(b'web', b'cacerts', b'') | |
extensions.loadall(myui) | |
url = url.encode() | |
if hg.islocal(url) and not os.environ.get('GIT_REMOTE_HG_TEST_REMOTE'): | |
repo = hg.repository(myui, url) | |
if not os.path.exists(dirname): | |
os.makedirs(dirname) | |
else: | |
shared_path = os.path.join(gitdir, 'hg') | |
# setup shared repo (if not there) | |
try: | |
hg.peer(myui, {}, shared_path.encode(), create=True) | |
except error.RepoError: | |
pass | |
if not os.path.exists(dirname): | |
os.makedirs(dirname) | |
local_path = os.path.join(dirname, 'clone') | |
if check_version(4, 2): | |
if not os.path.exists(local_path): | |
hg.share(myui, shared_path.encode(), local_path.encode(), update=False, relative=True) | |
else: | |
if not os.path.exists(local_path): | |
hg.share(myui, shared_path.encode(), local_path.encode(), update=False) | |
else: | |
# make sure the shared path is always up-to-date | |
hg_path = os.path.join(shared_path, '.hg') | |
util.writefile(os.path.join(local_path, '.hg', 'sharedpath').encode(), hg_path.encode()) | |
repo = hg.repository(myui, local_path.encode()) | |
try: | |
peer = hg.peer(repo.ui, {}, url) | |
except: | |
die('Repository error') | |
exchange.pull(repo, peer, heads=None, force=True) | |
updatebookmarks(repo, peer) | |
return repo | |
def rev_to_mark(rev): | |
return marks.from_rev(rev.hex().decode()) | |
def mark_to_rev(mark): | |
return marks.to_rev(mark).encode() | |
def export_ref(repo, name, kind, head): | |
ename = '%s/%s' % (kind, name) | |
try: | |
tip = marks.get_tip(ename) | |
tip = repo[tip] | |
except (KeyError,error.RepoLookupError): | |
tip = repo[-1] | |
# tip..head | |
revs = repo.changelog.findmissingrevs(common={tip.rev()}, heads={head.rev()}) | |
total = len(revs) | |
tip = tip.rev() | |
for rev in revs: | |
c = repo[rev] | |
node = c.node() | |
if marks.is_marked(c.hex()): | |
continue | |
manifest, user, (time, tz), files, desc, extra = repo.changelog.read(node) | |
rev_branch = extra[b'branch'].decode() | |
user = user.decode() | |
author = "%s %d %s" % (fixup_user(user), time, gittz(tz)) | |
if b'committer' in extra: | |
try: | |
cuser, ctime, ctz = extra[b'committer'].decode().rsplit(' ', 2) | |
committer = "%s %s %s" % (fixup_user(cuser), ctime, gittz(int(ctz))) | |
except ValueError: | |
cuser = extra[b'committer'].decode() | |
committer = "%s %d %s" % (fixup_user(cuser), time, gittz(tz)) | |
else: | |
committer = author | |
parents = [p for p in c.parents() if p.rev() != -1] | |
modified, removed = get_filechanges(repo, c, parents, files) | |
desc += b'\n' | |
if mode == 'hg': | |
extra_msg = '' | |
if rev_branch != 'default': | |
extra_msg += 'branch : %s\n' % rev_branch | |
renames = [] | |
for f in c.files(): | |
if f not in c.manifest(): | |
continue | |
rename = c.filectx(f).renamed() | |
if rename: | |
renames.append((rename[0], f)) | |
for e in renames: | |
extra_msg += (b"rename : %s => %s\n" % e).decode() | |
for key, value in extra.items(): | |
key, value = key.decode(), value.decode() | |
if key in ('author', 'committer', 'encoding', 'message', 'branch', 'hg-git'): | |
continue | |
else: | |
extra_msg += "extra : %s : %s\n" % (key, urlquote(value)) | |
if extra_msg: | |
desc += b'\n--HG--\n' + extra_msg.encode() | |
modified_final = [export_file(c, fname) for fname in modified] | |
if len(parents) == 0 and rev: | |
print('reset %s/%s' % (prefix, ename)) | |
print("commit %s/%s" % (prefix, ename)) | |
print("mark :%d" % (marks.get_mark(c.hex()))) | |
print("author %s" % (author)) | |
print("committer %s" % (committer)) | |
print_data(desc) | |
if len(parents) > 0: | |
print("from :%s" % (rev_to_mark(parents[0]))) | |
if len(parents) > 1: | |
print("merge :%s" % (rev_to_mark(parents[1]))) | |
for f in removed: | |
print("D %s" % (fix_file_path(f))) | |
for f in modified_final: | |
print("M %s :%u %s" % f) | |
print("") | |
progress = (rev - tip) | |
if (progress % 100 == 0): | |
print("progress revision %d '%s' (%d/%d)" % (rev, name, progress, total)) | |
# make sure the ref is updated | |
print("reset %s/%s" % (prefix, ename)) | |
print("from :%u" % rev_to_mark(head)) | |
print("") | |
pending_revs = set(revs) - notes | |
if pending_revs: | |
ref = "refs/notes/hg" | |
print("commit %s" % ref) | |
print("mark :%d" % marks.next_mark()) | |
print("committer remote-hg <> %d %s" % (ptime.time(), gittz(ptime.timezone))) | |
desc = "Notes for %s\n" % (name) | |
print("data %d" % (len(desc))) | |
print(desc) | |
if tip != -1: | |
print("from %s^0" % ref) | |
for rev in pending_revs: | |
notes.add(rev) | |
c = repo[rev] | |
print("N inline :%u" % rev_to_mark(c)) | |
msg = c.hex() | |
print_data(msg) | |
print("") | |
marks.set_tip(ename, head.hex()) | |
def export_tag(repo, tag): | |
node = repo.tags().get(hgref(tag)) | |
export_ref(repo, tag, 'tags', repo[node]) | |
def export_bookmark(repo, bmark): | |
head = bmarks[hgref(bmark)] | |
export_ref(repo, bmark, 'bookmarks', head) | |
def export_branch(repo, branch): | |
tip = get_branch_tip(repo, branch) | |
head = repo[tip] | |
export_ref(repo, branch, 'branches', head) | |
def export_head(repo): | |
export_ref(repo, g_head[0], 'bookmarks', g_head[1]) | |
def do_capabilities(parser): | |
print("import") | |
print("export") | |
print("refspec refs/heads/branches/*:%s/branches/*" % prefix) | |
print("refspec refs/heads/*:%s/bookmarks/*" % prefix) | |
print("refspec refs/tags/*:%s/tags/*" % prefix) | |
path = os.path.join(dirname, 'marks-git') | |
if os.path.exists(path): | |
print("*import-marks %s" % path) | |
print("*export-marks %s" % path) | |
print("option") | |
print("") | |
def branch_tip(branch): | |
return branches[branch][-1] | |
def get_branch_tip(repo, branch): | |
heads = branches.get(hgref(branch), None) | |
if not heads: | |
return None | |
# verify there's only one head | |
if (len(heads) > 1): | |
warn("Branch '%s' has more than one head, consider merging" % branch) | |
return branch_tip(hgref(branch)) | |
return heads[0] | |
def list_head(repo, cur): | |
global g_head, fake_bmark | |
if b'default' not in branches: | |
# empty repo | |
return | |
node = repo[branch_tip(b'default')] | |
head = b'master' if b'master' not in bmarks else b'default' | |
fake_bmark = head | |
bmarks[head] = node | |
head = gitref(head) | |
print("@refs/heads/%s HEAD" % head) | |
g_head = (head, node) | |
def do_list(parser): | |
repo = parser.repo | |
for bmark, node in bookmarks.listbookmarks(repo).items(): | |
bmarks[bmark] = repo[node] | |
cur = repo.dirstate.branch() | |
orig = peer if peer else repo | |
if check_version(5, 1): | |
items = orig.branchmap().items() | |
else: | |
items = orig.branchmap().iteritems() | |
for branch, heads in items: | |
# only open heads | |
try: | |
heads = [h for h in heads if b'close' not in repo.changelog.read(h)[5]] | |
if heads: | |
branches[branch] = heads | |
except error.LookupError: | |
branches[branch] = heads | |
list_head(repo, cur) | |
if track_branches: | |
for branch in branches: | |
print("? refs/heads/branches/%s" % gitref(branch)) | |
for bmark in bmarks: | |
if bmarks[bmark].hex() != b'0' * 40: | |
print("? refs/heads/%s" % gitref(bmark)) | |
for tag, node in repo.tagslist(): | |
if tag == b'tip': | |
continue | |
print("? refs/tags/%s" % gitref(tag)) | |
print("") | |
def do_import(parser): | |
repo = parser.repo | |
path = os.path.join(dirname, 'marks-git') | |
print("feature done") | |
if os.path.exists(path): | |
print("feature import-marks=%s" % path) | |
print("feature export-marks=%s" % path) | |
print("feature force") | |
sys.stdout.flush() | |
tmp = encoding.encoding | |
encoding.encoding = 'utf-8' | |
# lets get all the import lines | |
while parser.check('import'): | |
ref = parser[1] | |
if (ref == 'HEAD'): | |
export_head(repo) | |
elif ref.startswith('refs/heads/branches/'): | |
branch = ref[len('refs/heads/branches/'):] | |
export_branch(repo, branch) | |
elif ref.startswith('refs/heads/'): | |
bmark = ref[len('refs/heads/'):] | |
export_bookmark(repo, bmark) | |
elif ref.startswith('refs/tags/'): | |
tag = ref[len('refs/tags/'):] | |
export_tag(repo, tag) | |
parser.next() | |
encoding.encoding = tmp | |
print('done') | |
def parse_blob(parser): | |
parser.next() | |
mark = parser.get_mark() | |
parser.next() | |
data = parser.get_data() | |
blob_marks[mark] = data | |
parser.next() | |
def get_merge_files(repo, p1, p2, files): | |
for e in repo[p1].files(): | |
if e not in files: | |
if e not in repo[p1].manifest(): | |
continue | |
f = { 'ctx': repo[p1][e] } | |
files[e] = f | |
def c_style_unescape(string): | |
if string[0] == string[-1] == '"': | |
return string.encode().decode('unicode_escape').encode('latin-1')[1:-1] | |
return string.encode() | |
def make_memfilectx(repo, changectx, path, data, islink=False, isexec=False, copied=None): | |
if check_version(4, 5): | |
return context.memfilectx(repo, changectx, path, data, islink, isexec, copied) | |
else: | |
return context.memfilectx(repo, path, data, islink, isexec, copied) | |
def parse_commit(parser): | |
from_mark = merge_mark = None | |
ref = parser[1] | |
parser.next() | |
commit_mark = parser.get_mark() | |
parser.next() | |
author = parser.get_author() | |
parser.next() | |
committer = parser.get_author() | |
parser.next() | |
data = parser.get_data().decode() | |
parser.next() | |
if parser.check('from'): | |
from_mark = parser.get_mark() | |
parser.next() | |
if parser.check('merge'): | |
merge_mark = parser.get_mark() | |
parser.next() | |
if parser.check('merge'): | |
die('octopus merges are not supported yet') | |
# fast-export adds an extra newline | |
if data[-1] == '\n': | |
data = data[:-1] | |
files = {} | |
for line in parser: | |
if parser.check('M'): | |
t, m, mark_ref, path = line.split(' ', 3) | |
mark = int(mark_ref[1:]) | |
f = { 'mode': hgmode(m), 'data': blob_marks[mark] } | |
elif parser.check('D'): | |
t, path = line.split(' ', 1) | |
f = { 'deleted': True } | |
else: | |
die('Unknown file command: %s' % line) | |
path = c_style_unescape(path) | |
files[path] = f | |
# only export the commits if we are on an internal proxy repo | |
if dry_run and not peer: | |
parsed_refs[ref] = None | |
return | |
def getfilectx(repo, memctx, f): | |
of = files[f] | |
if 'deleted' in of: | |
return None | |
if 'ctx' in of: | |
if mode == 'hg': | |
# force the creation of a new filelog | |
ctx = of['ctx'] | |
is_exec = ctx.isexec() | |
is_link = ctx.islink() | |
return make_memfilectx(repo, memctx, f, ctx.data(), is_link, is_exec) | |
else: | |
return of['ctx'] | |
is_exec = of['mode'] == 'x' | |
is_link = of['mode'] == 'l' | |
rename = of.get('rename', None) | |
return make_memfilectx(repo, memctx, f, of['data'], is_link, is_exec, rename) | |
repo = parser.repo | |
user, date, tz = author | |
extra = {} | |
if committer != author: | |
extra[b'committer'] = ("%s %u %u" % committer).encode() | |
if from_mark: | |
p1 = mark_to_rev(from_mark) | |
else: | |
p1 = b'0' * 40 | |
if merge_mark: | |
p2 = mark_to_rev(merge_mark) | |
else: | |
p2 = b'0' * 40 | |
# | |
# If files changed from any of the parents, hg wants to know, but in git if | |
# nothing changed from the first parent, nothing changed. | |
# | |
if merge_mark: | |
get_merge_files(repo, p1, p2, files) | |
# Check if the ref is supposed to be a named branch | |
if ref.startswith('refs/heads/branches/'): | |
branch = ref[len('refs/heads/branches/'):] | |
extra[b'branch'] = hgref(branch) | |
if mode == 'hg': | |
i = data.find('\n--HG--\n') | |
if i >= 0: | |
tmp = data[i + len('\n--HG--\n'):].strip() | |
for k, v in [e.encode().split(b' : ', 1) for e in tmp.split('\n')]: | |
if k == b'rename': | |
old, new = v.split(b' => ', 1) | |
files[new]['rename'] = old | |
elif k == b'branch': | |
extra[k] = v | |
elif k == b'extra': | |
ek, ev = v.split(b' : ', 1) | |
extra[ek] = urlunquote(ev).encode() | |
data = data[:i] | |
ctx = context.memctx(repo, (p1, p2), data.encode(), | |
files.keys(), getfilectx, | |
user.encode(), (date, tz), extra) | |
tmp = encoding.encoding | |
encoding.encoding = 'utf-8' | |
node = hghex(repo.commitctx(ctx)) | |
encoding.encoding = tmp | |
parsed_refs[ref] = node | |
marks.new_mark(node, commit_mark) | |
def parse_reset(parser): | |
ref = parser[1] | |
parser.next() | |
# ugh | |
if parser.check('commit'): | |
parse_commit(parser) | |
return | |
if not parser.check('from'): | |
return | |
from_mark = parser.get_mark() | |
parser.next() | |
try: | |
rev = mark_to_rev(from_mark) | |
except KeyError: | |
rev = None | |
parsed_refs[ref] = rev | |
def parse_tag(parser): | |
name = parser[1] | |
parser.next() | |
from_mark = parser.get_mark() | |
parser.next() | |
tagger = parser.get_author() | |
parser.next() | |
data = parser.get_data() | |
parser.next() | |
try: | |
rev = mark_to_rev(from_mark) | |
except KeyError: | |
rev = None | |
parsed_refs['refs/tags/' + name] = rev | |
parsed_tags[name.encode()] = (tagger, data) | |
def write_tag(repo, tag, node, msg, author): | |
branch = repo[node].branch() | |
tip = branch_tip(branch) | |
tip = repo[tip] | |
def getfilectx(repo, memctx, f): | |
try: | |
fctx = tip.filectx(f) | |
data = fctx.data() | |
except error.LookupError: | |
data = b"" | |
content = data + b"%s %s\n" % (node, tag) | |
return make_memfilectx(repo, memctx, f, content) | |
p1 = tip.hex() | |
p2 = b'0' * 40 | |
if author: | |
user, date, tz = author | |
user = user.encode() | |
date_tz = (date, tz) | |
else: | |
cmd = ['git', 'var', 'GIT_COMMITTER_IDENT'] | |
process = subprocess.Popen(cmd, stdout=subprocess.PIPE) | |
output, _ = process.communicate() | |
m = re.match(b'^.* <.*>', output) | |
if m: | |
user = m.group(0) | |
else: | |
user = repo.ui.username() | |
date_tz = None | |
ctx = context.memctx(repo, (p1, p2), msg, | |
[b'.hgtags'], getfilectx, | |
user, date_tz, {b'branch': branch}) | |
tmp = encoding.encoding | |
encoding.encoding = 'utf-8' | |
tagnode = repo.commitctx(ctx) | |
encoding.encoding = tmp | |
return (tagnode, branch) | |
def checkheads_bmark(repo, ref, ctx): | |
bmark = ref[len('refs/heads/'):].encode() | |
if bmark not in bmarks: | |
# new bmark | |
return True | |
ctx_old = bmarks[bmark] | |
ctx_new = ctx | |
if not ctx.rev(): | |
print("error %s unknown" % ref) | |
return False | |
if check_version(4, 7): | |
isancestorrev = repo.changelog.isancestorrev | |
else: | |
isancestorrev = repo.changelog.descendant | |
if not isancestorrev(ctx_old.rev(), ctx_new.rev()): | |
if force_push: | |
print("ok %s forced update" % ref) | |
else: | |
print("error %s non-fast forward" % ref) | |
return False | |
return True | |
def checkheads(repo, remote, p_revs): | |
remotemap = remote.branchmap() | |
if not remotemap: | |
# empty repo | |
return True | |
new = {} | |
ret = True | |
for node, ref in p_revs.items(): | |
ctx = repo[node] | |
branch = ctx.branch() | |
if branch not in remotemap: | |
# new branch | |
continue | |
if not ref.startswith('refs/heads/branches'): | |
if ref.startswith('refs/heads/'): | |
if not checkheads_bmark(repo, ref, ctx): | |
ret = False | |
# only check branches | |
continue | |
new.setdefault(branch, []).append(ctx.rev()) | |
for branch, heads in new.items(): | |
old = [repo.changelog.rev(x) for x in remotemap[branch]] | |
for rev in heads: | |
ancestors = repo.changelog.ancestors([rev], stoprev=min(old)) | |
found = False | |
for x in old: | |
if x in ancestors: | |
found = True | |
break | |
if found: | |
continue | |
node = repo.changelog.node(rev) | |
ref = p_revs[node] | |
if force_push: | |
print("ok %s forced update" % ref) | |
else: | |
print("error %s non-fast forward" % ref) | |
ret = False | |
return ret | |
def push(repo, remote, parsed_refs, p_revs): | |
if hasattr(remote, 'canpush') and not remote.canpush(): | |
print("error cannot push") | |
if not p_revs: | |
# nothing to push | |
return | |
force = force_push | |
fci = discovery.findcommonincoming | |
if check_version(4, 5): | |
commoninc = fci(repo, remote, force=force, ancestorsof=list(p_revs)) | |
else: | |
commoninc = fci(repo, remote, force=force) | |
common, _, remoteheads = commoninc | |
fco = discovery.findcommonoutgoing | |
outgoing = fco(repo, remote, onlyheads=list(p_revs), commoninc=commoninc, force=force) | |
if not checkheads(repo, remote, p_revs): | |
return False | |
if not outgoing.missing: | |
# Nothing to push | |
return True | |
if check_version(4, 4): | |
cg = changegroup.makechangegroup(repo, outgoing, b'01', b'push') | |
else: | |
cg = changegroup.getchangegroup(repo, b'push', outgoing) | |
if force: | |
remoteheads = [b'force'] | |
return remote.unbundle(cg, remoteheads, b'push') | |
def check_tip(ref, kind, name, heads): | |
try: | |
ename = '%s/%s' % (kind, name) | |
tip = marks.get_tip(ename) | |
except KeyError: | |
return True | |
else: | |
return tip in heads | |
def bookmark_is_fake(bmark, real_bmarks): | |
return bmark == fake_bmark or \ | |
(bmark == b'master' and bmark not in real_bmarks) | |
def do_export(parser): | |
p_bmarks = [] | |
p_revs = {} | |
ok_refs = [] | |
parser.next() | |
for line in parser.each_block('done'): | |
if parser.check('blob'): | |
parse_blob(parser) | |
elif parser.check('commit'): | |
parse_commit(parser) | |
elif parser.check('reset'): | |
parse_reset(parser) | |
elif parser.check('tag'): | |
parse_tag(parser) | |
elif parser.check('feature'): | |
pass | |
else: | |
die('unhandled export command: %s' % line) | |
need_fetch = False | |
for ref, node in parsed_refs.items(): | |
bnode = hgbin(node) if node else None | |
if ref.startswith('refs/heads/branches'): | |
g_branch = ref[len('refs/heads/branches/'):] | |
branch = g_branch.encode() | |
if branch in branches and bnode in branches[branch]: | |
# up to date | |
continue | |
if peer: | |
remotemap = peer.branchmap() | |
if remotemap and branch in remotemap: | |
heads = [hghex(e) for e in remotemap[branch]] | |
if not check_tip(ref, 'branches', g_branch, heads): | |
print("error %s fetch first" % ref) | |
need_fetch = True | |
continue | |
p_revs[bnode] = ref | |
ok_refs.append(ref) | |
elif ref.startswith('refs/heads/'): | |
g_bmark = ref[len('refs/heads/'):] | |
bmark = g_bmark.encode() | |
new = node | |
old = bmarks[bmark].hex() if bmark in bmarks else b'' | |
if old == new: | |
continue | |
if bookmark_is_fake(bmark, parser.repo._bookmarks): | |
ok_refs.append(ref) | |
else: | |
p_bmarks.append((ref, bmark, old, new)) | |
if peer: | |
remote_old = peer.listkeys(b'bookmarks').get(bmark) | |
if remote_old: | |
if not check_tip(ref, 'bookmarks', g_bmark, remote_old): | |
print("error %s fetch first" % ref) | |
need_fetch = True | |
continue | |
p_revs[bnode] = ref | |
elif ref.startswith('refs/tags/'): | |
if dry_run: | |
ok_refs.append(ref) | |
continue | |
tag = ref[len('refs/tags/'):] | |
tag = hgref(tag) | |
author, msg = parsed_tags.get(tag, (None, None)) | |
if mode == 'git': | |
if not msg: | |
msg = b'Added tag %s for changeset %s' % (tag, node[:12]) | |
tagnode, branch = write_tag(parser.repo, tag, node, msg, author) | |
p_revs[tagnode] = 'refs/heads/branches/' + gitref(branch) | |
else: | |
vfs = parser.repo.vfs | |
fp = vfs(b'localtags', b'a') | |
fp.write(b'%s %s\n' % (node, tag)) | |
fp.close() | |
p_revs[bnode] = ref | |
ok_refs.append(ref) | |
else: | |
# transport-helper/fast-export bugs | |
continue | |
if need_fetch: | |
print("") | |
return | |
if dry_run: | |
if not peer or checkheads(parser.repo, peer, p_revs): | |
for ref, bmark, old, new in p_bmarks: | |
print("ok %s" % ref) | |
for ref in ok_refs: | |
print("ok %s" % ref) | |
print("") | |
return | |
if peer: | |
if not push(parser.repo, peer, parsed_refs, p_revs): | |
# do not update bookmarks | |
print("") | |
return | |
# update remote bookmarks | |
remote_bmarks = peer.listkeys(b'bookmarks') | |
for ref, bmark, old, new in p_bmarks: | |
if force_push: | |
old = remote_bmarks.get(bmark, b'') | |
if peer.pushkey(b'bookmarks', bmark, old, new): | |
print("ok %s" % ref) | |
else: | |
print("error %s" % ref) | |
else: | |
# update local bookmarks | |
for ref, bmark, old, new in p_bmarks: | |
if bookmarks.pushbookmark(parser.repo, bmark, old, new): | |
print("ok %s" % ref) | |
else: | |
print("error %s" % ref) | |
# update rest of the refs | |
for ref in ok_refs: | |
print("ok %s" % ref) | |
print("") | |
def do_option(parser): | |
global dry_run, force_push | |
_, key, value = parser.line.split(' ') | |
if key == 'dry-run': | |
dry_run = (value == 'true') | |
print('ok') | |
elif key == 'force': | |
force_push = (value == 'true') | |
print('ok') | |
else: | |
print('unsupported') | |
def fix_path(alias, repo, orig_url): | |
url = urlparse(orig_url, 'file') | |
if url.scheme != 'file' or os.path.isabs(os.path.expanduser(url.path)): | |
return | |
cmd = ['git', 'config', 'remote.%s.url' % alias, "hg::%s" % os.path.abspath(orig_url)] | |
subprocess.call(cmd) | |
def main(args): | |
global prefix, gitdir, dirname, branches, bmarks | |
global marks, blob_marks, parsed_refs | |
global peer, mode, bad_mail, bad_name | |
global track_branches, force_push, is_tmp | |
global parsed_tags | |
global filenodes | |
global fake_bmark, hg_version | |
global dry_run | |
global notes, alias | |
marks = None | |
is_tmp = False | |
gitdir = os.environ.get('GIT_DIR', None) | |
if len(args) < 3: | |
die('Not enough arguments.') | |
if not gitdir: | |
die('GIT_DIR not set') | |
alias = args[1] | |
url = args[2] | |
peer = None | |
hg_git_compat = get_config_bool('remote-hg.hg-git-compat') | |
track_branches = get_config_bool('remote-hg.track-branches', True) | |
force_push = False | |
if hg_git_compat: | |
mode = 'hg' | |
bad_mail = 'none@none' | |
bad_name = '' | |
else: | |
mode = 'git' | |
bad_mail = 'unknown' | |
bad_name = 'Unknown' | |
if alias[4:] == url: | |
is_tmp = True | |
alias = hashlib.sha1(alias.encode()).hexdigest() | |
dirname = os.path.join(gitdir, 'hg', alias) | |
branches = {} | |
bmarks = {} | |
blob_marks = {} | |
parsed_refs = {} | |
parsed_tags = {} | |
filenodes = {} | |
fake_bmark = None | |
try: | |
version, _, extra = util.version().partition('+') | |
version = list(int(e) for e in version.split('.')) | |
if extra: | |
version[1] += 1 | |
hg_version = tuple(version) | |
except: | |
hg_version = None | |
dry_run = False | |
notes = set() | |
repo = get_repo(url, alias) | |
prefix = 'refs/hg/%s' % alias | |
if not is_tmp: | |
fix_path(alias, peer or repo, url) | |
marks_path = os.path.join(dirname, 'marks-hg') | |
marks = Marks(marks_path) | |
parser = Parser(repo) | |
for line in parser: | |
if parser.check('capabilities'): | |
do_capabilities(parser) | |
elif parser.check('list'): | |
do_list(parser) | |
elif parser.check('import'): | |
do_import(parser) | |
elif parser.check('export'): | |
do_export(parser) | |
elif parser.check('option'): | |
do_option(parser) | |
else: | |
die('unhandled command: %s' % line) | |
sys.stdout.flush() | |
marks.store() | |
def bye(): | |
if is_tmp: | |
shutil.rmtree(dirname) | |
atexit.register(bye) | |
sys.exit(main(sys.argv)) |