2012-11-11 18:19:52 +04:00
|
|
|
#!/usr/bin/env python
|
|
|
|
#
|
|
|
|
# Copyright (c) 2012 Felipe Contreras
|
|
|
|
#
|
|
|
|
|
|
|
|
#
|
|
|
|
# Just copy to your ~/bin, or anywhere in your $PATH.
|
|
|
|
# Then you can clone with:
|
|
|
|
# % git clone bzr::/path/to/bzr/repo/or/url
|
|
|
|
#
|
|
|
|
# For example:
|
|
|
|
# % git clone bzr::$HOME/myrepo
|
|
|
|
# or
|
|
|
|
# % git clone bzr::lp:myrepo
|
|
|
|
#
|
2013-08-28 23:23:07 +04:00
|
|
|
# If you want to specify which branches you want to track (per repo):
|
|
|
|
# % git config remote.origin.bzr-branches 'trunk, devel, test'
|
|
|
|
#
|
|
|
|
# Where 'origin' is the name of the repository you want to specify the
|
|
|
|
# branches.
|
2013-05-01 05:10:06 +04:00
|
|
|
#
|
2012-11-11 18:19:52 +04:00
|
|
|
|
|
|
|
import sys
|
|
|
|
|
|
|
|
import bzrlib
|
2012-11-28 05:01:34 +04:00
|
|
|
if hasattr(bzrlib, "initialize"):
|
|
|
|
bzrlib.initialize()
|
2012-11-11 18:19:52 +04:00
|
|
|
|
|
|
|
import bzrlib.plugin
|
|
|
|
bzrlib.plugin.load_plugins()
|
|
|
|
|
2012-11-11 18:19:53 +04:00
|
|
|
import bzrlib.generate_ids
|
2012-11-28 05:01:35 +04:00
|
|
|
import bzrlib.transport
|
2013-04-08 22:36:39 +04:00
|
|
|
import bzrlib.errors
|
2013-04-27 01:12:37 +04:00
|
|
|
import bzrlib.ui
|
2013-05-01 05:10:01 +04:00
|
|
|
import bzrlib.urlutils
|
2013-05-17 21:10:19 +04:00
|
|
|
import bzrlib.branch
|
2012-11-11 18:19:53 +04:00
|
|
|
|
2012-11-11 18:19:52 +04:00
|
|
|
import sys
|
|
|
|
import os
|
|
|
|
import json
|
|
|
|
import re
|
2012-11-11 18:19:53 +04:00
|
|
|
import StringIO
|
2013-04-27 01:12:36 +04:00
|
|
|
import atexit, shutil, hashlib, urlparse, subprocess
|
2012-11-11 18:19:52 +04:00
|
|
|
|
|
|
|
NAME_RE = re.compile('^([^<>]+)')
|
2013-11-18 08:12:50 +04:00
|
|
|
AUTHOR_RE = re.compile('^([^<>]+?)? ?[<>]([^<>]*)(?:$|>)')
|
|
|
|
EMAIL_RE = re.compile(r'([^ \t<>]+@[^ \t<>]+)')
|
2012-11-11 18:19:53 +04:00
|
|
|
RAW_AUTHOR_RE = re.compile('^(\w+) (.+)? <(.*)> (\d+) ([+-]\d+)')
|
2012-11-11 18:19:52 +04:00
|
|
|
|
|
|
|
def die(msg, *args):
|
|
|
|
sys.stderr.write('ERROR: %s\n' % (msg % args))
|
|
|
|
sys.exit(1)
|
|
|
|
|
|
|
|
def warn(msg, *args):
|
|
|
|
sys.stderr.write('WARNING: %s\n' % (msg % args))
|
|
|
|
|
|
|
|
def gittz(tz):
|
|
|
|
return '%+03d%02d' % (tz / 3600, tz % 3600 / 60)
|
|
|
|
|
2013-05-01 05:10:06 +04:00
|
|
|
def get_config(config):
|
|
|
|
cmd = ['git', 'config', '--get', config]
|
|
|
|
process = subprocess.Popen(cmd, stdout=subprocess.PIPE)
|
|
|
|
output, _ = process.communicate()
|
|
|
|
return output
|
|
|
|
|
2012-11-11 18:19:52 +04:00
|
|
|
class Marks:
|
|
|
|
|
|
|
|
def __init__(self, path):
|
|
|
|
self.path = path
|
|
|
|
self.tips = {}
|
|
|
|
self.marks = {}
|
2012-11-11 18:19:53 +04:00
|
|
|
self.rev_marks = {}
|
2012-11-11 18:19:52 +04:00
|
|
|
self.last_mark = 0
|
|
|
|
self.load()
|
|
|
|
|
|
|
|
def load(self):
|
|
|
|
if not os.path.exists(self.path):
|
|
|
|
return
|
|
|
|
|
|
|
|
tmp = json.load(open(self.path))
|
|
|
|
self.tips = tmp['tips']
|
|
|
|
self.marks = tmp['marks']
|
|
|
|
self.last_mark = tmp['last-mark']
|
|
|
|
|
2012-11-11 18:19:53 +04:00
|
|
|
for rev, mark in self.marks.iteritems():
|
|
|
|
self.rev_marks[mark] = rev
|
|
|
|
|
2012-11-11 18:19:52 +04:00
|
|
|
def dict(self):
|
|
|
|
return { 'tips': self.tips, 'marks': self.marks, 'last-mark' : self.last_mark }
|
|
|
|
|
|
|
|
def store(self):
|
|
|
|
json.dump(self.dict(), open(self.path, 'w'))
|
|
|
|
|
|
|
|
def __str__(self):
|
|
|
|
return str(self.dict())
|
|
|
|
|
|
|
|
def from_rev(self, rev):
|
|
|
|
return self.marks[rev]
|
|
|
|
|
2012-11-11 18:19:53 +04:00
|
|
|
def to_rev(self, mark):
|
2013-05-04 04:31:06 +04:00
|
|
|
return str(self.rev_marks[mark])
|
2012-11-11 18:19:53 +04:00
|
|
|
|
2012-11-11 18:19:52 +04:00
|
|
|
def next_mark(self):
|
|
|
|
self.last_mark += 1
|
|
|
|
return self.last_mark
|
|
|
|
|
|
|
|
def get_mark(self, rev):
|
|
|
|
self.last_mark += 1
|
|
|
|
self.marks[rev] = self.last_mark
|
|
|
|
return self.last_mark
|
|
|
|
|
|
|
|
def is_marked(self, rev):
|
2013-04-27 01:12:31 +04:00
|
|
|
return rev in self.marks
|
2012-11-11 18:19:52 +04:00
|
|
|
|
2012-11-11 18:19:53 +04:00
|
|
|
def new_mark(self, rev, mark):
|
|
|
|
self.marks[rev] = mark
|
|
|
|
self.rev_marks[mark] = rev
|
|
|
|
self.last_mark = mark
|
|
|
|
|
2012-11-11 18:19:52 +04:00
|
|
|
def get_tip(self, branch):
|
2013-05-25 06:30:00 +04:00
|
|
|
try:
|
|
|
|
return str(self.tips[branch])
|
|
|
|
except KeyError:
|
|
|
|
return None
|
2012-11-11 18:19:52 +04:00
|
|
|
|
|
|
|
def set_tip(self, branch, tip):
|
|
|
|
self.tips[branch] = tip
|
|
|
|
|
|
|
|
class Parser:
|
|
|
|
|
|
|
|
def __init__(self, repo):
|
|
|
|
self.repo = repo
|
|
|
|
self.line = self.get_line()
|
|
|
|
|
|
|
|
def get_line(self):
|
|
|
|
return sys.stdin.readline().strip()
|
|
|
|
|
|
|
|
def __getitem__(self, i):
|
|
|
|
return self.line.split()[i]
|
|
|
|
|
|
|
|
def check(self, word):
|
|
|
|
return self.line.startswith(word)
|
|
|
|
|
|
|
|
def each_block(self, separator):
|
|
|
|
while self.line != separator:
|
|
|
|
yield self.line
|
|
|
|
self.line = self.get_line()
|
|
|
|
|
|
|
|
def __iter__(self):
|
|
|
|
return self.each_block('')
|
|
|
|
|
|
|
|
def next(self):
|
|
|
|
self.line = self.get_line()
|
|
|
|
if self.line == 'done':
|
|
|
|
self.line = None
|
|
|
|
|
2012-11-11 18:19:53 +04:00
|
|
|
def get_mark(self):
|
|
|
|
i = self.line.index(':') + 1
|
|
|
|
return int(self.line[i:])
|
|
|
|
|
|
|
|
def get_data(self):
|
|
|
|
if not self.check('data'):
|
|
|
|
return None
|
|
|
|
i = self.line.index(' ') + 1
|
|
|
|
size = int(self.line[i:])
|
|
|
|
return sys.stdin.read(size)
|
|
|
|
|
|
|
|
def get_author(self):
|
|
|
|
m = RAW_AUTHOR_RE.match(self.line)
|
|
|
|
if not m:
|
|
|
|
return None
|
|
|
|
_, name, email, date, tz = m.groups()
|
2013-08-29 01:14:40 +04:00
|
|
|
name = name.decode('utf-8')
|
2012-11-11 18:19:53 +04:00
|
|
|
committer = '%s <%s>' % (name, email)
|
|
|
|
tz = int(tz)
|
|
|
|
tz = ((tz / 100) * 3600) + ((tz % 100) * 60)
|
|
|
|
return (committer, int(date), tz)
|
|
|
|
|
2012-11-11 18:19:52 +04:00
|
|
|
def rev_to_mark(rev):
|
|
|
|
return marks.from_rev(rev)
|
|
|
|
|
2012-11-11 18:19:53 +04:00
|
|
|
def mark_to_rev(mark):
|
|
|
|
return marks.to_rev(mark)
|
|
|
|
|
2012-11-11 18:19:52 +04:00
|
|
|
def fixup_user(user):
|
|
|
|
name = mail = None
|
|
|
|
user = user.replace('"', '')
|
|
|
|
m = AUTHOR_RE.match(user)
|
|
|
|
if m:
|
|
|
|
name = m.group(1)
|
|
|
|
mail = m.group(2).strip()
|
|
|
|
else:
|
2013-05-01 05:10:04 +04:00
|
|
|
m = EMAIL_RE.match(user)
|
2012-11-11 18:19:52 +04:00
|
|
|
if m:
|
2013-11-18 08:12:50 +04:00
|
|
|
mail = m.group(1)
|
2013-05-01 05:10:04 +04:00
|
|
|
else:
|
|
|
|
m = NAME_RE.match(user)
|
|
|
|
if m:
|
|
|
|
name = m.group(1).strip()
|
|
|
|
|
|
|
|
if not name:
|
|
|
|
name = 'unknown'
|
|
|
|
if not mail:
|
|
|
|
mail = 'Unknown'
|
2012-11-11 18:19:52 +04:00
|
|
|
|
|
|
|
return '%s <%s>' % (name, mail)
|
|
|
|
|
|
|
|
def get_filechanges(cur, prev):
|
|
|
|
modified = {}
|
|
|
|
removed = {}
|
|
|
|
|
|
|
|
changes = cur.changes_from(prev)
|
|
|
|
|
2013-04-08 22:36:38 +04:00
|
|
|
def u(s):
|
|
|
|
return s.encode('utf-8')
|
|
|
|
|
2012-11-11 18:19:52 +04:00
|
|
|
for path, fid, kind in changes.added:
|
2013-04-08 22:36:38 +04:00
|
|
|
modified[u(path)] = fid
|
2012-11-11 18:19:52 +04:00
|
|
|
for path, fid, kind in changes.removed:
|
2013-04-08 22:36:38 +04:00
|
|
|
removed[u(path)] = None
|
2012-11-11 18:19:52 +04:00
|
|
|
for path, fid, kind, mod, _ in changes.modified:
|
2013-04-08 22:36:38 +04:00
|
|
|
modified[u(path)] = fid
|
2012-11-11 18:19:52 +04:00
|
|
|
for oldpath, newpath, fid, kind, mod, _ in changes.renamed:
|
2013-04-08 22:36:38 +04:00
|
|
|
removed[u(oldpath)] = None
|
2013-04-06 07:49:17 +04:00
|
|
|
if kind == 'directory':
|
|
|
|
lst = cur.list_files(from_dir=newpath, recursive=True)
|
|
|
|
for path, file_class, kind, fid, entry in lst:
|
|
|
|
if kind != 'directory':
|
2013-04-08 22:36:38 +04:00
|
|
|
modified[u(newpath + '/' + path)] = fid
|
2013-04-06 07:49:17 +04:00
|
|
|
else:
|
2013-04-08 22:36:38 +04:00
|
|
|
modified[u(newpath)] = fid
|
2012-11-11 18:19:52 +04:00
|
|
|
|
|
|
|
return modified, removed
|
|
|
|
|
|
|
|
def export_files(tree, files):
|
|
|
|
final = []
|
|
|
|
for path, fid in files.iteritems():
|
2012-11-11 18:19:57 +04:00
|
|
|
kind = tree.kind(fid)
|
|
|
|
|
2012-11-11 18:19:52 +04:00
|
|
|
h = tree.get_file_sha1(fid)
|
|
|
|
|
2012-11-11 18:19:57 +04:00
|
|
|
if kind == 'symlink':
|
|
|
|
d = tree.get_symlink_target(fid)
|
|
|
|
mode = '120000'
|
|
|
|
elif kind == 'file':
|
|
|
|
|
|
|
|
if tree.is_executable(fid):
|
|
|
|
mode = '100755'
|
|
|
|
else:
|
|
|
|
mode = '100644'
|
|
|
|
|
2013-04-27 01:12:31 +04:00
|
|
|
# is the blob already exported?
|
2012-11-11 18:19:57 +04:00
|
|
|
if h in filenodes:
|
|
|
|
mark = filenodes[h]
|
2013-04-08 22:36:38 +04:00
|
|
|
final.append((mode, mark, path))
|
2012-11-11 18:19:57 +04:00
|
|
|
continue
|
2012-11-11 18:19:52 +04:00
|
|
|
|
|
|
|
d = tree.get_file_text(fid)
|
2012-11-11 18:19:57 +04:00
|
|
|
elif kind == 'directory':
|
|
|
|
continue
|
|
|
|
else:
|
|
|
|
die("Unhandled kind '%s' for path '%s'" % (kind, path))
|
2012-11-11 18:19:52 +04:00
|
|
|
|
2012-11-11 18:19:57 +04:00
|
|
|
mark = marks.next_mark()
|
|
|
|
filenodes[h] = mark
|
2012-11-11 18:19:52 +04:00
|
|
|
|
2012-11-11 18:19:57 +04:00
|
|
|
print "blob"
|
|
|
|
print "mark :%u" % mark
|
|
|
|
print "data %d" % len(d)
|
|
|
|
print d
|
2012-11-11 18:19:52 +04:00
|
|
|
|
2013-04-08 22:36:38 +04:00
|
|
|
final.append((mode, mark, path))
|
2012-11-11 18:19:52 +04:00
|
|
|
|
|
|
|
return final
|
|
|
|
|
2013-05-01 05:10:01 +04:00
|
|
|
def export_branch(repo, name):
|
2012-11-11 18:19:52 +04:00
|
|
|
ref = '%s/heads/%s' % (prefix, name)
|
|
|
|
tip = marks.get_tip(name)
|
|
|
|
|
2013-05-25 06:24:22 +04:00
|
|
|
branch = get_remote_branch(name)
|
2012-11-11 18:19:52 +04:00
|
|
|
repo = branch.repository
|
2013-05-01 05:10:00 +04:00
|
|
|
|
|
|
|
branch.lock_read()
|
2012-11-11 18:19:52 +04:00
|
|
|
revs = branch.iter_merge_sorted_revisions(None, tip, 'exclude', 'forward')
|
2013-05-08 03:39:35 +04:00
|
|
|
try:
|
|
|
|
tip_revno = branch.revision_id_to_revno(tip)
|
|
|
|
last_revno, _ = branch.last_revision_info()
|
|
|
|
total = last_revno - tip_revno
|
|
|
|
except bzrlib.errors.NoSuchRevision:
|
|
|
|
tip_revno = 0
|
|
|
|
total = 0
|
2012-11-11 18:19:52 +04:00
|
|
|
|
2013-05-01 05:10:08 +04:00
|
|
|
for revid, _, seq, _ in revs:
|
2012-11-11 18:19:52 +04:00
|
|
|
|
2013-05-01 05:10:08 +04:00
|
|
|
if marks.is_marked(revid):
|
|
|
|
continue
|
2012-11-11 18:19:52 +04:00
|
|
|
|
|
|
|
rev = repo.get_revision(revid)
|
2013-05-01 05:10:07 +04:00
|
|
|
revno = seq[0]
|
2012-11-11 18:19:52 +04:00
|
|
|
|
|
|
|
parents = rev.parent_ids
|
|
|
|
time = rev.timestamp
|
|
|
|
tz = rev.timezone
|
|
|
|
committer = rev.committer.encode('utf-8')
|
|
|
|
committer = "%s %u %s" % (fixup_user(committer), time, gittz(tz))
|
2013-04-06 07:49:19 +04:00
|
|
|
authors = rev.get_apparent_authors()
|
|
|
|
if authors:
|
|
|
|
author = authors[0].encode('utf-8')
|
|
|
|
author = "%s %u %s" % (fixup_user(author), time, gittz(tz))
|
|
|
|
else:
|
|
|
|
author = committer
|
2012-11-11 18:19:52 +04:00
|
|
|
msg = rev.message.encode('utf-8')
|
|
|
|
|
|
|
|
msg += '\n'
|
|
|
|
|
|
|
|
if len(parents) == 0:
|
|
|
|
parent = bzrlib.revision.NULL_REVISION
|
|
|
|
else:
|
|
|
|
parent = parents[0]
|
|
|
|
|
|
|
|
cur_tree = repo.revision_tree(revid)
|
|
|
|
prev = repo.revision_tree(parent)
|
|
|
|
modified, removed = get_filechanges(cur_tree, prev)
|
|
|
|
|
|
|
|
modified_final = export_files(cur_tree, modified)
|
|
|
|
|
|
|
|
if len(parents) == 0:
|
|
|
|
print 'reset %s' % ref
|
|
|
|
|
|
|
|
print "commit %s" % ref
|
|
|
|
print "mark :%d" % (marks.get_mark(revid))
|
|
|
|
print "author %s" % (author)
|
|
|
|
print "committer %s" % (committer)
|
|
|
|
print "data %d" % (len(msg))
|
|
|
|
print msg
|
|
|
|
|
|
|
|
for i, p in enumerate(parents):
|
|
|
|
try:
|
|
|
|
m = rev_to_mark(p)
|
|
|
|
except KeyError:
|
|
|
|
# ghost?
|
|
|
|
continue
|
|
|
|
if i == 0:
|
|
|
|
print "from :%s" % m
|
|
|
|
else:
|
|
|
|
print "merge :%s" % m
|
|
|
|
|
2013-04-06 07:49:18 +04:00
|
|
|
for f in removed:
|
|
|
|
print "D %s" % (f,)
|
2012-11-11 18:19:52 +04:00
|
|
|
for f in modified_final:
|
|
|
|
print "M %s :%u %s" % f
|
|
|
|
print
|
|
|
|
|
2013-05-01 05:10:07 +04:00
|
|
|
if len(seq) > 1:
|
|
|
|
# let's skip branch revisions from the progress report
|
|
|
|
continue
|
|
|
|
|
|
|
|
progress = (revno - tip_revno)
|
|
|
|
if (progress % 100 == 0):
|
2013-05-08 03:39:35 +04:00
|
|
|
if total:
|
|
|
|
print "progress revision %d '%s' (%d/%d)" % (revno, name, progress, total)
|
|
|
|
else:
|
|
|
|
print "progress revision %d '%s' (%d)" % (revno, name, progress)
|
2012-11-11 18:19:52 +04:00
|
|
|
|
2013-05-01 05:10:00 +04:00
|
|
|
branch.unlock()
|
2012-11-11 18:19:52 +04:00
|
|
|
|
|
|
|
revid = branch.last_revision()
|
|
|
|
|
|
|
|
# make sure the ref is updated
|
|
|
|
print "reset %s" % ref
|
|
|
|
print "from :%u" % rev_to_mark(revid)
|
|
|
|
print
|
|
|
|
|
|
|
|
marks.set_tip(name, revid)
|
|
|
|
|
|
|
|
def export_tag(repo, name):
|
2013-04-16 01:47:28 +04:00
|
|
|
ref = '%s/tags/%s' % (prefix, name)
|
|
|
|
print "reset %s" % ref
|
2013-04-08 22:36:39 +04:00
|
|
|
print "from :%u" % rev_to_mark(tags[name])
|
|
|
|
print
|
2012-11-11 18:19:52 +04:00
|
|
|
|
|
|
|
def do_import(parser):
|
2013-05-01 05:10:01 +04:00
|
|
|
repo = parser.repo
|
2012-11-11 18:19:52 +04:00
|
|
|
path = os.path.join(dirname, 'marks-git')
|
|
|
|
|
|
|
|
print "feature done"
|
|
|
|
if os.path.exists(path):
|
|
|
|
print "feature import-marks=%s" % path
|
|
|
|
print "feature export-marks=%s" % path
|
2013-05-01 05:09:58 +04:00
|
|
|
print "feature force"
|
2012-11-11 18:19:52 +04:00
|
|
|
sys.stdout.flush()
|
|
|
|
|
|
|
|
while parser.check('import'):
|
|
|
|
ref = parser[1]
|
|
|
|
if ref.startswith('refs/heads/'):
|
|
|
|
name = ref[len('refs/heads/'):]
|
2013-05-01 05:10:01 +04:00
|
|
|
export_branch(repo, name)
|
2012-11-11 18:19:52 +04:00
|
|
|
if ref.startswith('refs/tags/'):
|
|
|
|
name = ref[len('refs/tags/'):]
|
2013-05-01 05:10:01 +04:00
|
|
|
export_tag(repo, name)
|
2012-11-11 18:19:52 +04:00
|
|
|
parser.next()
|
|
|
|
|
|
|
|
print 'done'
|
|
|
|
|
|
|
|
sys.stdout.flush()
|
|
|
|
|
2012-11-11 18:19:53 +04:00
|
|
|
def parse_blob(parser):
|
|
|
|
parser.next()
|
|
|
|
mark = parser.get_mark()
|
|
|
|
parser.next()
|
|
|
|
data = parser.get_data()
|
|
|
|
blob_marks[mark] = data
|
|
|
|
parser.next()
|
|
|
|
|
|
|
|
class CustomTree():
|
|
|
|
|
2013-05-01 05:10:00 +04:00
|
|
|
def __init__(self, branch, revid, parents, files):
|
2012-11-11 18:19:58 +04:00
|
|
|
self.updates = {}
|
2013-05-01 05:10:00 +04:00
|
|
|
self.branch = branch
|
2012-11-11 18:19:53 +04:00
|
|
|
|
|
|
|
def copy_tree(revid):
|
|
|
|
files = files_cache[revid] = {}
|
2013-05-01 05:10:00 +04:00
|
|
|
branch.lock_read()
|
|
|
|
tree = branch.repository.revision_tree(revid)
|
2012-11-11 18:19:53 +04:00
|
|
|
try:
|
|
|
|
for path, entry in tree.iter_entries_by_dir():
|
2013-05-01 05:09:57 +04:00
|
|
|
files[path] = [entry.file_id, None]
|
2012-11-11 18:19:53 +04:00
|
|
|
finally:
|
2013-05-01 05:10:00 +04:00
|
|
|
branch.unlock()
|
2012-11-11 18:19:53 +04:00
|
|
|
return files
|
|
|
|
|
|
|
|
if len(parents) == 0:
|
|
|
|
self.base_id = bzrlib.revision.NULL_REVISION
|
|
|
|
self.base_files = {}
|
|
|
|
else:
|
|
|
|
self.base_id = parents[0]
|
|
|
|
self.base_files = files_cache.get(self.base_id, None)
|
|
|
|
if not self.base_files:
|
|
|
|
self.base_files = copy_tree(self.base_id)
|
|
|
|
|
|
|
|
self.files = files_cache[revid] = self.base_files.copy()
|
2013-05-01 05:09:57 +04:00
|
|
|
self.rev_files = {}
|
|
|
|
|
|
|
|
for path, data in self.files.iteritems():
|
|
|
|
fid, mark = data
|
|
|
|
self.rev_files[fid] = [path, mark]
|
2012-11-11 18:19:53 +04:00
|
|
|
|
2012-11-11 18:19:58 +04:00
|
|
|
for path, f in files.iteritems():
|
2013-05-01 05:09:57 +04:00
|
|
|
fid, mark = self.files.get(path, [None, None])
|
2012-11-11 18:19:58 +04:00
|
|
|
if not fid:
|
|
|
|
fid = bzrlib.generate_ids.gen_file_id(path)
|
|
|
|
f['path'] = path
|
2013-05-01 05:09:57 +04:00
|
|
|
self.rev_files[fid] = [path, mark]
|
2012-11-11 18:19:58 +04:00
|
|
|
self.updates[fid] = f
|
|
|
|
|
2012-11-11 18:19:53 +04:00
|
|
|
def last_revision(self):
|
|
|
|
return self.base_id
|
|
|
|
|
|
|
|
def iter_changes(self):
|
|
|
|
changes = []
|
|
|
|
|
|
|
|
def get_parent(dirname, basename):
|
2013-05-01 05:09:57 +04:00
|
|
|
parent_fid, mark = self.base_files.get(dirname, [None, None])
|
2012-11-11 18:19:53 +04:00
|
|
|
if parent_fid:
|
|
|
|
return parent_fid
|
2013-05-01 05:09:57 +04:00
|
|
|
parent_fid, mark = self.files.get(dirname, [None, None])
|
2012-11-11 18:19:53 +04:00
|
|
|
if parent_fid:
|
|
|
|
return parent_fid
|
|
|
|
if basename == '':
|
|
|
|
return None
|
2012-11-11 18:19:58 +04:00
|
|
|
fid = bzrlib.generate_ids.gen_file_id(path)
|
2013-05-01 05:09:53 +04:00
|
|
|
add_entry(fid, dirname, 'directory')
|
2012-11-11 18:19:58 +04:00
|
|
|
return fid
|
2012-11-11 18:19:53 +04:00
|
|
|
|
2013-08-28 23:23:11 +04:00
|
|
|
def add_entry(fid, path, kind, mode=None):
|
2012-11-11 18:19:53 +04:00
|
|
|
dirname, basename = os.path.split(path)
|
|
|
|
parent_fid = get_parent(dirname, basename)
|
2012-11-11 18:19:58 +04:00
|
|
|
|
|
|
|
executable = False
|
|
|
|
if mode == '100755':
|
|
|
|
executable = True
|
|
|
|
elif mode == '120000':
|
|
|
|
kind = 'symlink'
|
|
|
|
|
2012-11-11 18:19:53 +04:00
|
|
|
change = (fid,
|
|
|
|
(None, path),
|
|
|
|
True,
|
|
|
|
(False, True),
|
|
|
|
(None, parent_fid),
|
|
|
|
(None, basename),
|
|
|
|
(None, kind),
|
2012-11-11 18:19:58 +04:00
|
|
|
(None, executable))
|
2013-05-01 05:09:57 +04:00
|
|
|
self.files[path] = [change[0], None]
|
2012-11-11 18:19:53 +04:00
|
|
|
changes.append(change)
|
|
|
|
|
2013-08-28 23:23:11 +04:00
|
|
|
def update_entry(fid, path, kind, mode=None):
|
2012-11-11 18:19:53 +04:00
|
|
|
dirname, basename = os.path.split(path)
|
|
|
|
parent_fid = get_parent(dirname, basename)
|
2012-11-11 18:19:58 +04:00
|
|
|
|
|
|
|
executable = False
|
|
|
|
if mode == '100755':
|
|
|
|
executable = True
|
|
|
|
elif mode == '120000':
|
|
|
|
kind = 'symlink'
|
|
|
|
|
2012-11-11 18:19:53 +04:00
|
|
|
change = (fid,
|
|
|
|
(path, path),
|
|
|
|
True,
|
|
|
|
(True, True),
|
|
|
|
(None, parent_fid),
|
|
|
|
(None, basename),
|
|
|
|
(None, kind),
|
2012-11-11 18:19:58 +04:00
|
|
|
(None, executable))
|
2013-05-01 05:09:57 +04:00
|
|
|
self.files[path] = [change[0], None]
|
2012-11-11 18:19:53 +04:00
|
|
|
changes.append(change)
|
|
|
|
|
2012-11-11 18:19:58 +04:00
|
|
|
def remove_entry(fid, path, kind):
|
2012-11-11 18:19:53 +04:00
|
|
|
dirname, basename = os.path.split(path)
|
|
|
|
parent_fid = get_parent(dirname, basename)
|
|
|
|
change = (fid,
|
|
|
|
(path, None),
|
|
|
|
True,
|
|
|
|
(True, False),
|
|
|
|
(parent_fid, None),
|
|
|
|
(None, None),
|
|
|
|
(None, None),
|
|
|
|
(None, None))
|
|
|
|
del self.files[path]
|
|
|
|
changes.append(change)
|
|
|
|
|
2012-11-11 18:19:58 +04:00
|
|
|
for fid, f in self.updates.iteritems():
|
|
|
|
path = f['path']
|
|
|
|
|
2012-11-11 18:19:53 +04:00
|
|
|
if 'deleted' in f:
|
2012-11-11 18:19:58 +04:00
|
|
|
remove_entry(fid, path, 'file')
|
|
|
|
continue
|
|
|
|
|
|
|
|
if path in self.base_files:
|
|
|
|
update_entry(fid, path, 'file', f['mode'])
|
2012-11-11 18:19:53 +04:00
|
|
|
else:
|
2012-11-11 18:19:58 +04:00
|
|
|
add_entry(fid, path, 'file', f['mode'])
|
2012-11-11 18:19:53 +04:00
|
|
|
|
2013-05-01 05:09:57 +04:00
|
|
|
self.files[path][1] = f['mark']
|
|
|
|
self.rev_files[fid][1] = f['mark']
|
|
|
|
|
2012-11-11 18:19:53 +04:00
|
|
|
return changes
|
|
|
|
|
2013-05-01 05:09:59 +04:00
|
|
|
def get_content(self, file_id):
|
2013-05-01 05:09:57 +04:00
|
|
|
path, mark = self.rev_files[file_id]
|
2013-05-01 05:09:59 +04:00
|
|
|
if mark:
|
|
|
|
return blob_marks[mark]
|
|
|
|
|
|
|
|
# last resort
|
|
|
|
tree = self.branch.repository.revision_tree(self.base_id)
|
|
|
|
return tree.get_file_text(file_id)
|
|
|
|
|
|
|
|
def get_file_with_stat(self, file_id, path=None):
|
|
|
|
content = self.get_content(file_id)
|
|
|
|
return (StringIO.StringIO(content), None)
|
2012-11-11 18:19:58 +04:00
|
|
|
|
|
|
|
def get_symlink_target(self, file_id):
|
2013-05-01 05:09:59 +04:00
|
|
|
return self.get_content(file_id)
|
2012-11-11 18:19:53 +04:00
|
|
|
|
2013-05-01 05:09:57 +04:00
|
|
|
def id2path(self, file_id):
|
|
|
|
path, mark = self.rev_files[file_id]
|
|
|
|
return path
|
|
|
|
|
2013-04-06 07:49:23 +04:00
|
|
|
def c_style_unescape(string):
|
|
|
|
if string[0] == string[-1] == '"':
|
|
|
|
return string.decode('string-escape')[1:-1]
|
|
|
|
return string
|
|
|
|
|
2012-11-11 18:19:53 +04:00
|
|
|
def parse_commit(parser):
|
|
|
|
parents = []
|
|
|
|
|
|
|
|
ref = parser[1]
|
|
|
|
parser.next()
|
|
|
|
|
2013-05-01 05:10:01 +04:00
|
|
|
if ref.startswith('refs/heads/'):
|
|
|
|
name = ref[len('refs/heads/'):]
|
2013-05-25 06:24:22 +04:00
|
|
|
branch = get_remote_branch(name)
|
2013-05-01 05:10:01 +04:00
|
|
|
else:
|
|
|
|
die('unknown ref')
|
2012-11-11 18:19:53 +04:00
|
|
|
|
|
|
|
commit_mark = parser.get_mark()
|
|
|
|
parser.next()
|
|
|
|
author = parser.get_author()
|
|
|
|
parser.next()
|
|
|
|
committer = parser.get_author()
|
|
|
|
parser.next()
|
|
|
|
data = parser.get_data()
|
|
|
|
parser.next()
|
|
|
|
if parser.check('from'):
|
|
|
|
parents.append(parser.get_mark())
|
|
|
|
parser.next()
|
|
|
|
while parser.check('merge'):
|
|
|
|
parents.append(parser.get_mark())
|
|
|
|
parser.next()
|
|
|
|
|
2013-04-27 01:12:38 +04:00
|
|
|
# fast-export adds an extra newline
|
|
|
|
if data[-1] == '\n':
|
|
|
|
data = data[:-1]
|
|
|
|
|
2012-11-11 18:19:53 +04:00
|
|
|
files = {}
|
|
|
|
|
|
|
|
for line in parser:
|
|
|
|
if parser.check('M'):
|
|
|
|
t, m, mark_ref, path = line.split(' ', 3)
|
|
|
|
mark = int(mark_ref[1:])
|
2013-05-01 05:09:54 +04:00
|
|
|
f = { 'mode' : m, 'mark' : mark }
|
2012-11-11 18:19:53 +04:00
|
|
|
elif parser.check('D'):
|
2013-05-25 06:24:20 +04:00
|
|
|
t, path = line.split(' ', 1)
|
2012-11-11 18:19:53 +04:00
|
|
|
f = { 'deleted' : True }
|
|
|
|
else:
|
|
|
|
die('Unknown file command: %s' % line)
|
2013-04-06 07:49:23 +04:00
|
|
|
path = c_style_unescape(path).decode('utf-8')
|
2012-11-11 18:19:53 +04:00
|
|
|
files[path] = f
|
|
|
|
|
|
|
|
committer, date, tz = committer
|
2014-04-09 22:50:03 +04:00
|
|
|
author, _, _ = author
|
2013-05-04 04:31:06 +04:00
|
|
|
parents = [mark_to_rev(p) for p in parents]
|
2012-11-11 18:19:53 +04:00
|
|
|
revid = bzrlib.generate_ids.gen_revision_id(committer, date)
|
|
|
|
props = {}
|
2013-05-01 05:10:00 +04:00
|
|
|
props['branch-nick'] = branch.nick
|
2014-04-09 22:50:03 +04:00
|
|
|
props['authors'] = author
|
2012-11-11 18:19:53 +04:00
|
|
|
|
2013-05-01 05:10:00 +04:00
|
|
|
mtree = CustomTree(branch, revid, parents, files)
|
2012-11-11 18:19:53 +04:00
|
|
|
changes = mtree.iter_changes()
|
|
|
|
|
2013-05-01 05:10:00 +04:00
|
|
|
branch.lock_write()
|
2012-11-11 18:19:53 +04:00
|
|
|
try:
|
2013-05-01 05:10:00 +04:00
|
|
|
builder = branch.get_commit_builder(parents, None, date, tz, committer, props, revid)
|
2012-11-11 18:19:53 +04:00
|
|
|
try:
|
|
|
|
list(builder.record_iter_changes(mtree, mtree.last_revision(), changes))
|
|
|
|
builder.finish_inventory()
|
|
|
|
builder.commit(data.decode('utf-8', 'replace'))
|
|
|
|
except Exception, e:
|
|
|
|
builder.abort()
|
|
|
|
raise
|
|
|
|
finally:
|
2013-05-01 05:10:00 +04:00
|
|
|
branch.unlock()
|
2012-11-11 18:19:53 +04:00
|
|
|
|
|
|
|
parsed_refs[ref] = revid
|
|
|
|
marks.new_mark(revid, commit_mark)
|
|
|
|
|
|
|
|
def parse_reset(parser):
|
|
|
|
ref = parser[1]
|
|
|
|
parser.next()
|
|
|
|
|
|
|
|
# ugh
|
|
|
|
if parser.check('commit'):
|
|
|
|
parse_commit(parser)
|
|
|
|
return
|
|
|
|
if not parser.check('from'):
|
|
|
|
return
|
|
|
|
from_mark = parser.get_mark()
|
|
|
|
parser.next()
|
|
|
|
|
|
|
|
parsed_refs[ref] = mark_to_rev(from_mark)
|
|
|
|
|
|
|
|
def do_export(parser):
|
|
|
|
parser.next()
|
|
|
|
|
|
|
|
for line in parser.each_block('done'):
|
|
|
|
if parser.check('blob'):
|
|
|
|
parse_blob(parser)
|
|
|
|
elif parser.check('commit'):
|
|
|
|
parse_commit(parser)
|
|
|
|
elif parser.check('reset'):
|
|
|
|
parse_reset(parser)
|
|
|
|
elif parser.check('tag'):
|
|
|
|
pass
|
|
|
|
elif parser.check('feature'):
|
|
|
|
pass
|
|
|
|
else:
|
|
|
|
die('unhandled export command: %s' % line)
|
|
|
|
|
|
|
|
for ref, revid in parsed_refs.iteritems():
|
2013-05-04 04:31:07 +04:00
|
|
|
if ref.startswith('refs/heads/'):
|
|
|
|
name = ref[len('refs/heads/'):]
|
2013-05-25 06:24:22 +04:00
|
|
|
branch = get_remote_branch(name)
|
2013-05-04 04:31:07 +04:00
|
|
|
branch.generate_revision_history(revid, marks.get_tip(name))
|
2013-05-01 05:09:56 +04:00
|
|
|
|
2013-05-04 04:31:07 +04:00
|
|
|
if name in peers:
|
2013-09-08 09:47:49 +04:00
|
|
|
peer = bzrlib.branch.Branch.open(peers[name],
|
|
|
|
possible_transports=transports)
|
2013-05-04 04:31:07 +04:00
|
|
|
try:
|
2013-11-12 11:03:28 +04:00
|
|
|
peer.bzrdir.push_branch(branch, revision_id=revid,
|
|
|
|
overwrite=force)
|
2013-05-04 04:31:07 +04:00
|
|
|
except bzrlib.errors.DivergedBranches:
|
|
|
|
print "error %s non-fast forward" % ref
|
|
|
|
continue
|
2013-05-01 05:10:01 +04:00
|
|
|
|
2013-05-04 04:31:07 +04:00
|
|
|
try:
|
|
|
|
wt = branch.bzrdir.open_workingtree()
|
|
|
|
wt.update()
|
|
|
|
except bzrlib.errors.NoWorkingTree:
|
|
|
|
pass
|
|
|
|
elif ref.startswith('refs/tags/'):
|
|
|
|
# TODO: implement tag push
|
|
|
|
print "error %s pushing tags not supported" % ref
|
|
|
|
continue
|
|
|
|
else:
|
|
|
|
# transport-helper/fast-export bugs
|
|
|
|
continue
|
2013-05-01 05:09:56 +04:00
|
|
|
|
2012-11-11 18:19:53 +04:00
|
|
|
print "ok %s" % ref
|
2013-04-27 01:12:31 +04:00
|
|
|
|
2012-11-11 18:19:53 +04:00
|
|
|
print
|
|
|
|
|
2012-11-11 18:19:52 +04:00
|
|
|
def do_capabilities(parser):
|
|
|
|
print "import"
|
2012-11-11 18:19:53 +04:00
|
|
|
print "export"
|
2012-11-11 18:19:52 +04:00
|
|
|
print "refspec refs/heads/*:%s/heads/*" % prefix
|
2013-04-16 01:47:28 +04:00
|
|
|
print "refspec refs/tags/*:%s/tags/*" % prefix
|
2012-11-11 18:19:53 +04:00
|
|
|
|
|
|
|
path = os.path.join(dirname, 'marks-git')
|
|
|
|
|
|
|
|
if os.path.exists(path):
|
|
|
|
print "*import-marks %s" % path
|
|
|
|
print "*export-marks %s" % path
|
|
|
|
|
2013-11-12 11:03:28 +04:00
|
|
|
print "option"
|
2012-11-11 18:19:52 +04:00
|
|
|
print
|
|
|
|
|
2013-11-12 11:03:28 +04:00
|
|
|
class InvalidOptionValue(Exception):
|
|
|
|
pass
|
|
|
|
|
|
|
|
def get_bool_option(val):
|
|
|
|
if val == 'true':
|
|
|
|
return True
|
|
|
|
elif val == 'false':
|
|
|
|
return False
|
|
|
|
else:
|
|
|
|
raise InvalidOptionValue()
|
|
|
|
|
|
|
|
def do_option(parser):
|
|
|
|
global force
|
|
|
|
opt, val = parser[1:3]
|
|
|
|
try:
|
|
|
|
if opt == 'force':
|
|
|
|
force = get_bool_option(val)
|
|
|
|
print 'ok'
|
|
|
|
else:
|
|
|
|
print 'unsupported'
|
|
|
|
except InvalidOptionValue:
|
|
|
|
print "error '%s' is not a valid value for option '%s'" % (val, opt)
|
|
|
|
|
2013-04-08 22:36:39 +04:00
|
|
|
def ref_is_valid(name):
|
|
|
|
return not True in [c in name for c in '~^: \\']
|
|
|
|
|
2012-11-11 18:19:52 +04:00
|
|
|
def do_list(parser):
|
2013-05-01 05:10:01 +04:00
|
|
|
master_branch = None
|
|
|
|
|
|
|
|
for name in branches:
|
|
|
|
if not master_branch:
|
|
|
|
master_branch = name
|
|
|
|
print "? refs/heads/%s" % name
|
|
|
|
|
2013-05-25 06:24:22 +04:00
|
|
|
branch = get_remote_branch(master_branch)
|
2013-04-08 22:36:39 +04:00
|
|
|
branch.lock_read()
|
|
|
|
for tag, revid in branch.tags.get_tag_dict().items():
|
|
|
|
try:
|
|
|
|
branch.revision_id_to_dotted_revno(revid)
|
|
|
|
except bzrlib.errors.NoSuchRevision:
|
|
|
|
continue
|
|
|
|
if not ref_is_valid(tag):
|
2013-04-06 07:49:21 +04:00
|
|
|
continue
|
2012-11-11 18:19:52 +04:00
|
|
|
print "? refs/tags/%s" % tag
|
|
|
|
tags[tag] = revid
|
2013-04-08 22:36:39 +04:00
|
|
|
branch.unlock()
|
2013-05-01 05:10:01 +04:00
|
|
|
|
|
|
|
print "@refs/heads/%s HEAD" % master_branch
|
2012-11-11 18:19:52 +04:00
|
|
|
print
|
|
|
|
|
2013-05-25 06:24:21 +04:00
|
|
|
def clone(path, remote_branch):
|
2013-05-25 06:24:26 +04:00
|
|
|
try:
|
2013-09-08 09:47:49 +04:00
|
|
|
bdir = bzrlib.bzrdir.BzrDir.create(path, possible_transports=transports)
|
2013-05-25 06:24:26 +04:00
|
|
|
except bzrlib.errors.AlreadyControlDirError:
|
2013-09-08 09:47:49 +04:00
|
|
|
bdir = bzrlib.bzrdir.BzrDir.open(path, possible_transports=transports)
|
2013-05-25 06:24:21 +04:00
|
|
|
repo = bdir.find_repository()
|
|
|
|
repo.fetch(remote_branch.repository)
|
|
|
|
return remote_branch.sprout(bdir, repository=repo)
|
|
|
|
|
2013-05-25 06:24:22 +04:00
|
|
|
def get_remote_branch(name):
|
2013-09-08 09:47:49 +04:00
|
|
|
remote_branch = bzrlib.branch.Branch.open(branches[name],
|
|
|
|
possible_transports=transports)
|
2014-04-09 22:50:02 +04:00
|
|
|
if isinstance(remote_branch.bzrdir.root_transport, bzrlib.transport.local.LocalTransport):
|
2013-05-25 06:24:22 +04:00
|
|
|
return remote_branch
|
2013-05-01 05:10:01 +04:00
|
|
|
|
|
|
|
branch_path = os.path.join(dirname, 'clone', name)
|
2013-05-25 06:24:19 +04:00
|
|
|
|
|
|
|
try:
|
2013-09-08 09:47:49 +04:00
|
|
|
branch = bzrlib.branch.Branch.open(branch_path,
|
|
|
|
possible_transports=transports)
|
2013-05-25 06:24:19 +04:00
|
|
|
except bzrlib.errors.NotBranchError:
|
2013-05-01 05:10:01 +04:00
|
|
|
# clone
|
2013-05-25 06:24:21 +04:00
|
|
|
branch = clone(branch_path, remote_branch)
|
2013-05-25 06:24:19 +04:00
|
|
|
else:
|
2013-05-01 05:10:01 +04:00
|
|
|
# pull
|
|
|
|
try:
|
2013-05-25 06:24:21 +04:00
|
|
|
branch.pull(remote_branch, overwrite=True)
|
2013-05-01 05:10:01 +04:00
|
|
|
except bzrlib.errors.DivergedBranches:
|
|
|
|
# use remote branch for now
|
|
|
|
return remote_branch
|
|
|
|
|
|
|
|
return branch
|
|
|
|
|
2013-05-25 06:24:25 +04:00
|
|
|
def find_branches(repo):
|
2013-05-17 21:10:19 +04:00
|
|
|
transport = repo.bzrdir.root_transport
|
2013-05-01 05:10:05 +04:00
|
|
|
|
|
|
|
for fn in transport.iter_files_recursive():
|
|
|
|
if not fn.endswith('.bzr/branch-format'):
|
|
|
|
continue
|
|
|
|
|
|
|
|
name = subdir = fn[:-len('/.bzr/branch-format')]
|
|
|
|
name = name if name != '' else 'master'
|
|
|
|
name = name.replace('/', '+')
|
|
|
|
|
|
|
|
try:
|
|
|
|
cur = transport.clone(subdir)
|
|
|
|
branch = bzrlib.branch.Branch.open_from_transport(cur)
|
|
|
|
except bzrlib.errors.NotBranchError:
|
|
|
|
continue
|
|
|
|
else:
|
2013-05-25 06:24:24 +04:00
|
|
|
yield name, branch.base
|
2013-05-01 05:10:05 +04:00
|
|
|
|
2012-11-11 18:19:52 +04:00
|
|
|
def get_repo(url, alias):
|
2013-05-01 05:10:01 +04:00
|
|
|
normal_url = bzrlib.urlutils.normalize_url(url)
|
2013-09-08 09:47:49 +04:00
|
|
|
origin = bzrlib.bzrdir.BzrDir.open(url, possible_transports=transports)
|
2013-05-01 05:10:01 +04:00
|
|
|
is_local = isinstance(origin.transport, bzrlib.transport.local.LocalTransport)
|
|
|
|
|
2013-05-01 05:10:03 +04:00
|
|
|
shared_path = os.path.join(gitdir, 'bzr')
|
|
|
|
try:
|
2013-09-08 09:47:49 +04:00
|
|
|
shared_dir = bzrlib.bzrdir.BzrDir.open(shared_path,
|
|
|
|
possible_transports=transports)
|
2013-05-01 05:10:03 +04:00
|
|
|
except bzrlib.errors.NotBranchError:
|
2013-09-08 09:47:49 +04:00
|
|
|
shared_dir = bzrlib.bzrdir.BzrDir.create(shared_path,
|
|
|
|
possible_transports=transports)
|
2013-05-01 05:10:03 +04:00
|
|
|
try:
|
|
|
|
shared_repo = shared_dir.open_repository()
|
|
|
|
except bzrlib.errors.NoRepositoryPresent:
|
|
|
|
shared_repo = shared_dir.create_repository(shared=True)
|
|
|
|
|
|
|
|
if not is_local:
|
|
|
|
clone_path = os.path.join(dirname, 'clone')
|
|
|
|
if not os.path.exists(clone_path):
|
|
|
|
os.mkdir(clone_path)
|
2013-05-14 08:20:27 +04:00
|
|
|
else:
|
|
|
|
# check and remove old organization
|
|
|
|
try:
|
2013-09-08 09:47:49 +04:00
|
|
|
bdir = bzrlib.bzrdir.BzrDir.open(clone_path,
|
|
|
|
possible_transports=transports)
|
2013-05-14 08:20:27 +04:00
|
|
|
bdir.destroy_repository()
|
|
|
|
except bzrlib.errors.NotBranchError:
|
|
|
|
pass
|
2013-05-17 14:32:28 +04:00
|
|
|
except bzrlib.errors.NoRepositoryPresent:
|
|
|
|
pass
|
2013-05-01 05:10:01 +04:00
|
|
|
|
2013-08-28 23:23:07 +04:00
|
|
|
wanted = get_config('remote.%s.bzr-branches' % alias).rstrip().split(', ')
|
2013-05-25 06:24:25 +04:00
|
|
|
# stupid python
|
|
|
|
wanted = [e for e in wanted if e]
|
2013-08-28 23:23:07 +04:00
|
|
|
if not wanted:
|
|
|
|
wanted = get_config('remote-bzr.branches').rstrip().split(', ')
|
|
|
|
# stupid python
|
|
|
|
wanted = [e for e in wanted if e]
|
2013-05-01 05:10:01 +04:00
|
|
|
|
2013-05-25 06:24:25 +04:00
|
|
|
if not wanted:
|
|
|
|
try:
|
|
|
|
repo = origin.open_repository()
|
2014-04-09 22:50:02 +04:00
|
|
|
if not repo.bzrdir.root_transport.listable():
|
2013-05-25 06:24:25 +04:00
|
|
|
# this repository is not usable for us
|
|
|
|
raise bzrlib.errors.NoRepositoryPresent(repo.bzrdir)
|
|
|
|
except bzrlib.errors.NoRepositoryPresent:
|
|
|
|
wanted = ['master']
|
|
|
|
|
|
|
|
if wanted:
|
|
|
|
def list_wanted(url, wanted):
|
|
|
|
for name in wanted:
|
|
|
|
subdir = name if name != 'master' else ''
|
|
|
|
yield name, bzrlib.urlutils.join(url, subdir)
|
|
|
|
|
|
|
|
branch_list = list_wanted(url, wanted)
|
2012-11-11 18:19:54 +04:00
|
|
|
else:
|
2013-05-25 06:24:25 +04:00
|
|
|
branch_list = find_branches(repo)
|
2013-05-01 05:10:01 +04:00
|
|
|
|
2013-05-25 06:24:25 +04:00
|
|
|
for name, url in branch_list:
|
|
|
|
if not is_local:
|
|
|
|
peers[name] = url
|
|
|
|
branches[name] = url
|
2013-05-01 05:10:01 +04:00
|
|
|
|
2013-05-25 06:24:25 +04:00
|
|
|
return origin
|
2012-11-11 18:19:52 +04:00
|
|
|
|
2013-04-27 01:12:36 +04:00
|
|
|
def fix_path(alias, orig_url):
|
|
|
|
url = urlparse.urlparse(orig_url, 'file')
|
|
|
|
if url.scheme != 'file' or os.path.isabs(url.path):
|
|
|
|
return
|
|
|
|
abs_url = urlparse.urljoin("%s/" % os.getcwd(), orig_url)
|
|
|
|
cmd = ['git', 'config', 'remote.%s.url' % alias, "bzr::%s" % abs_url]
|
|
|
|
subprocess.call(cmd)
|
|
|
|
|
2012-11-11 18:19:52 +04:00
|
|
|
def main(args):
|
2013-05-01 05:10:03 +04:00
|
|
|
global marks, prefix, gitdir, dirname
|
2012-11-11 18:19:52 +04:00
|
|
|
global tags, filenodes
|
2012-11-11 18:19:53 +04:00
|
|
|
global blob_marks
|
|
|
|
global parsed_refs
|
|
|
|
global files_cache
|
2013-04-27 01:12:34 +04:00
|
|
|
global is_tmp
|
2013-05-01 05:10:01 +04:00
|
|
|
global branches, peers
|
2013-09-08 09:47:49 +04:00
|
|
|
global transports
|
2013-11-12 11:03:28 +04:00
|
|
|
global force
|
2012-11-11 18:19:52 +04:00
|
|
|
|
2013-12-07 17:09:41 +04:00
|
|
|
marks = None
|
|
|
|
is_tmp = False
|
|
|
|
gitdir = os.environ.get('GIT_DIR', None)
|
|
|
|
|
|
|
|
if len(args) < 3:
|
|
|
|
die('Not enough arguments.')
|
|
|
|
|
|
|
|
if not gitdir:
|
|
|
|
die('GIT_DIR not set')
|
|
|
|
|
2012-11-11 18:19:52 +04:00
|
|
|
alias = args[1]
|
|
|
|
url = args[2]
|
|
|
|
|
|
|
|
tags = {}
|
|
|
|
filenodes = {}
|
2012-11-11 18:19:53 +04:00
|
|
|
blob_marks = {}
|
|
|
|
parsed_refs = {}
|
|
|
|
files_cache = {}
|
2013-05-01 05:10:01 +04:00
|
|
|
branches = {}
|
|
|
|
peers = {}
|
2013-09-08 09:47:49 +04:00
|
|
|
transports = []
|
2013-11-12 11:03:28 +04:00
|
|
|
force = False
|
2012-11-11 18:19:52 +04:00
|
|
|
|
2013-04-27 01:12:34 +04:00
|
|
|
if alias[5:] == url:
|
|
|
|
is_tmp = True
|
|
|
|
alias = hashlib.sha1(alias).hexdigest()
|
|
|
|
|
|
|
|
prefix = 'refs/bzr/%s' % alias
|
2012-11-11 18:19:52 +04:00
|
|
|
dirname = os.path.join(gitdir, 'bzr', alias)
|
|
|
|
|
2013-04-27 01:12:36 +04:00
|
|
|
if not is_tmp:
|
|
|
|
fix_path(alias, url)
|
|
|
|
|
2012-11-11 18:19:52 +04:00
|
|
|
if not os.path.exists(dirname):
|
|
|
|
os.makedirs(dirname)
|
|
|
|
|
2013-05-17 21:10:19 +04:00
|
|
|
if hasattr(bzrlib.ui.ui_factory, 'be_quiet'):
|
|
|
|
bzrlib.ui.ui_factory.be_quiet(True)
|
2013-04-27 01:12:37 +04:00
|
|
|
|
2012-11-11 18:19:52 +04:00
|
|
|
repo = get_repo(url, alias)
|
|
|
|
|
|
|
|
marks_path = os.path.join(dirname, 'marks-int')
|
|
|
|
marks = Marks(marks_path)
|
|
|
|
|
|
|
|
parser = Parser(repo)
|
|
|
|
for line in parser:
|
|
|
|
if parser.check('capabilities'):
|
|
|
|
do_capabilities(parser)
|
|
|
|
elif parser.check('list'):
|
|
|
|
do_list(parser)
|
|
|
|
elif parser.check('import'):
|
|
|
|
do_import(parser)
|
2012-11-11 18:19:53 +04:00
|
|
|
elif parser.check('export'):
|
|
|
|
do_export(parser)
|
2013-11-12 11:03:28 +04:00
|
|
|
elif parser.check('option'):
|
|
|
|
do_option(parser)
|
2012-11-11 18:19:52 +04:00
|
|
|
else:
|
|
|
|
die('unhandled command: %s' % line)
|
|
|
|
sys.stdout.flush()
|
|
|
|
|
2013-04-27 01:12:33 +04:00
|
|
|
def bye():
|
|
|
|
if not marks:
|
|
|
|
return
|
2013-04-27 01:12:34 +04:00
|
|
|
if not is_tmp:
|
|
|
|
marks.store()
|
|
|
|
else:
|
|
|
|
shutil.rmtree(dirname)
|
2012-11-11 18:19:52 +04:00
|
|
|
|
2013-04-27 01:12:33 +04:00
|
|
|
atexit.register(bye)
|
2012-11-11 18:19:52 +04:00
|
|
|
sys.exit(main(sys.argv))
|