CI: Remove run-tests script
[fast-export.git] / hg-fast-export.py
blob59ca228b3e2f82ca2f455fc6e6b6a9c7e28b9f9e
1 #!/usr/bin/env python3
3 # Copyright (c) 2007, 2008 Rocco Rutte <pdmef@gmx.net> and others.
4 # License: MIT <http://www.opensource.org/licenses/mit-license.php>
6 from hg2git import setup_repo,fixup_user,get_branch,get_changeset
7 from hg2git import load_cache,save_cache,get_git_sha1,set_default_branch,set_origin_name
8 from optparse import OptionParser
9 import re
10 import sys
11 import os
12 from binascii import hexlify
13 import pluginloader
15 # silly regex to catch Signed-off-by lines in log message
16 sob_re=re.compile(b'^Signed-[Oo]ff-[Bb]y: (.+)$')
17 # insert 'checkpoint' command after this many commits or none at all if 0
18 cfg_checkpoint_count=0
19 # write some progress message every this many file contents written
20 cfg_export_boundary=1000
22 subrepo_cache={}
23 submodule_mappings=None
25 # True if fast export should automatically try to sanitize
26 # author/branch/tag names.
27 auto_sanitize = None
29 def gitmode(flags):
30 return b'l' in flags and b'120000' or b'x' in flags and b'100755' or b'100644'
32 def wr_no_nl(msg=b''):
33 assert isinstance(msg, bytes)
34 if msg:
35 sys.stdout.buffer.write(msg)
37 def wr(msg=b''):
38 wr_no_nl(msg + b'\n')
39 #map(lambda x: sys.stderr.write('\t[%s]\n' % x),msg.split('\n'))
41 def wr_data(data):
42 wr(b'data %d' % (len(data)))
43 wr(data)
45 def checkpoint(count):
46 count=count+1
47 if cfg_checkpoint_count>0 and count%cfg_checkpoint_count==0:
48 sys.stderr.buffer.write(b"Checkpoint after %d commits\n" % count)
49 wr(b'checkpoint')
50 wr()
51 return count
53 def revnum_to_revref(rev, old_marks):
54 """Convert an hg revnum to a git-fast-import rev reference (an SHA1
55 or a mark)"""
56 return old_marks.get(rev) or b':%d' % (rev+1)
58 def get_filechanges(repo,revision,parents,files):
59 """Given some repository and revision, find all changed/deleted files."""
60 if not parents:
61 # first revision: feed in full manifest
62 return files,[]
63 else:
64 # take the changes from the first parent
65 f=repo.status(parents[0],revision)
66 return f.modified+f.added,f.removed
68 def get_author(logmessage,committer,authors):
69 """As git distincts between author and committer of a patch, try to
70 extract author by detecting Signed-off-by lines.
72 This walks from the end of the log message towards the top skipping
73 empty lines. Upon the first non-empty line, it walks all Signed-off-by
74 lines upwards to find the first one. For that (if found), it extracts
75 authorship information the usual way (authors table, cleaning, etc.)
77 If no Signed-off-by line is found, this defaults to the committer.
79 This may sound stupid (and it somehow is), but in log messages we
80 accidentially may have lines in the middle starting with
81 "Signed-off-by: foo" and thus matching our detection regex. Prevent
82 that."""
84 loglines=logmessage.split(b'\n')
85 i=len(loglines)
86 # from tail walk to top skipping empty lines
87 while i>=0:
88 i-=1
89 if len(loglines[i].strip())==0: continue
90 break
91 if i>=0:
92 # walk further upwards to find first sob line, store in 'first'
93 first=None
94 while i>=0:
95 m=sob_re.match(loglines[i])
96 if m==None: break
97 first=m
98 i-=1
99 # if the last non-empty line matches our Signed-Off-by regex: extract username
100 if first!=None:
101 r=fixup_user(first.group(1),authors)
102 return r
103 return committer
105 def remove_gitmodules(ctx):
106 """Removes all submodules of ctx parents"""
107 # Removing all submoduies coming from all parents is safe, as the submodules
108 # of the current commit will be re-added below. A possible optimization would
109 # be to only remove the submodules of the first parent.
110 for parent_ctx in ctx.parents():
111 for submodule in parent_ctx.substate.keys():
112 wr(b'D %s' % submodule)
113 wr(b'D .gitmodules')
115 def refresh_git_submodule(name,subrepo_info):
116 wr(b'M 160000 %s %s' % (subrepo_info[1],name))
117 sys.stderr.buffer.write(
118 b"Adding/updating submodule %s, revision %s\n" % (name, subrepo_info[1])
120 return b'[submodule "%s"]\n\tpath = %s\n\turl = %s\n' % (name, name, subrepo_info[0])
122 def refresh_hg_submodule(name,subrepo_info):
123 gitRepoLocation=submodule_mappings[name] + b"/.git"
125 # Populate the cache to map mercurial revision to git revision
126 if not name in subrepo_cache:
127 subrepo_cache[name]=(load_cache(gitRepoLocation+b"/hg2git-mapping"),
128 load_cache(gitRepoLocation+b"/hg2git-marks",
129 lambda s: int(s)-1))
131 (mapping_cache,marks_cache)=subrepo_cache[name]
132 subrepo_hash=subrepo_info[1]
133 if subrepo_hash in mapping_cache:
134 revnum=mapping_cache[subrepo_hash]
135 gitSha=marks_cache[int(revnum)]
136 wr(b'M 160000 %s %s' % (gitSha,name))
137 sys.stderr.buffer.write(
138 b"Adding/updating submodule %s, revision %s->%s\n"
139 % (name, subrepo_hash, gitSha)
141 return b'[submodule "%s"]\n\tpath = %s\n\turl = %s\n' % (name,name,
142 submodule_mappings[name])
143 else:
144 sys.stderr.buffer.write(
145 b"Warning: Could not find hg revision %s for %s in git %s\n"
146 % (subrepo_hash, name, gitRepoLocation,)
148 return b''
150 def refresh_gitmodules(ctx):
151 """Updates list of ctx submodules according to .hgsubstate file"""
152 remove_gitmodules(ctx)
153 gitmodules=b""
154 # Create the .gitmodules file and all submodules
155 for name,subrepo_info in ctx.substate.items():
156 if subrepo_info[2]==b'git':
157 gitmodules+=refresh_git_submodule(name,subrepo_info)
158 elif submodule_mappings and name in submodule_mappings:
159 gitmodules+=refresh_hg_submodule(name,subrepo_info)
161 if len(gitmodules):
162 wr(b'M 100644 inline .gitmodules')
163 wr_data(gitmodules)
165 def export_file_contents(ctx,manifest,files,hgtags,encoding='',plugins={}):
166 count=0
167 max=len(files)
168 is_submodules_refreshed=False
169 for file in files:
170 if not is_submodules_refreshed and (file==b'.hgsub' or file==b'.hgsubstate'):
171 is_submodules_refreshed=True
172 refresh_gitmodules(ctx)
173 # Skip .hgtags files. They only get us in trouble.
174 if not hgtags and file == b".hgtags":
175 sys.stderr.buffer.write(b'Skip %s\n' % file)
176 continue
177 if encoding:
178 filename=file.decode(encoding).encode('utf8')
179 else:
180 filename=file
181 if b'.git' in filename.split(b'/'): # Even on Windows, the path separator is / here.
182 sys.stderr.buffer.write(
183 b'Ignoring file %s which cannot be tracked by git\n' % filename
185 continue
186 file_ctx=ctx.filectx(file)
187 d=file_ctx.data()
189 if plugins and plugins['file_data_filters']:
190 file_data = {'filename':filename,'file_ctx':file_ctx,'data':d}
191 for filter in plugins['file_data_filters']:
192 filter(file_data)
193 d=file_data['data']
194 filename=file_data['filename']
195 file_ctx=file_data['file_ctx']
197 if d is not None:
198 wr(b'M %s inline %s' % (gitmode(manifest.flags(file)),
199 strip_leading_slash(filename)))
200 wr(b'data %d' % len(d)) # had some trouble with size()
201 wr(d)
202 count+=1
203 if count%cfg_export_boundary==0:
204 sys.stderr.buffer.write(b'Exported %d/%d files\n' % (count,max))
205 if max>cfg_export_boundary:
206 sys.stderr.buffer.write(b'Exported %d/%d files\n' % (count,max))
208 def sanitize_name(name,what="branch", mapping={}):
209 """Sanitize input roughly according to git-check-ref-format(1)"""
211 # NOTE: Do not update this transform to work around
212 # incompatibilities on your platform. If you change it and it starts
213 # modifying names which previously were not touched it will break
214 # preexisting setups which are doing incremental imports.
216 # Fast-export tries to not inflict arbitrary naming policy on the
217 # user, instead it aims to provide mechanisms allowing the user to
218 # apply their own policy. Therefore do not add a transform which can
219 # already be implemented with the -B and -T options to mangle branch
220 # and tag names. If you have a source repository where this is too
221 # much work to do manually, write a tool that does it for you.
224 def dot(name):
225 if not name: return name
226 if name[0:1] == b'.': return b'_'+name[1:]
227 return name
229 if not auto_sanitize:
230 return mapping.get(name,name)
231 n=mapping.get(name,name)
232 p=re.compile(b'([\\[ ~^:?\\\\*]|\\.\\.)')
233 n=p.sub(b'_', n)
234 if n[-1:] in (b'/', b'.'): n=n[:-1]+b'_'
235 n=b'/'.join([dot(s) for s in n.split(b'/')])
236 p=re.compile(b'_+')
237 n=p.sub(b'_', n)
239 if n!=name:
240 sys.stderr.buffer.write(
241 b'Warning: sanitized %s [%s] to [%s]\n' % (what.encode(), name, n)
243 return n
245 def strip_leading_slash(filename):
246 if filename[0:1] == b'/':
247 return filename[1:]
248 return filename
250 def export_commit(ui,repo,revision,old_marks,max,count,authors,
251 branchesmap,sob,brmap,hgtags,encoding='',fn_encoding='',
252 plugins={}):
253 def get_branchname(name):
254 if name in brmap:
255 return brmap[name]
256 n=sanitize_name(name, "branch", branchesmap)
257 brmap[name]=n
258 return n
260 ctx=repo[revision]
262 if ctx.hidden():
263 return count
265 (_,user,(time,timezone),files,desc,branch,extra)=get_changeset(ui,repo,revision,authors,encoding)
267 branch=get_branchname(branch)
269 parents = [p for p in repo.changelog.parentrevs(revision) if p >= 0]
270 author = get_author(desc,user,authors)
271 hg_hash=ctx.hex()
273 if plugins and plugins['commit_message_filters']:
274 commit_data = {'branch': branch, 'parents': parents,
275 'author': author, 'desc': desc,
276 'revision': revision, 'hg_hash': hg_hash,
277 'committer': user, 'extra': extra}
278 for filter in plugins['commit_message_filters']:
279 filter(commit_data)
280 branch = commit_data['branch']
281 parents = commit_data['parents']
282 author = commit_data['author']
283 user = commit_data['committer']
284 desc = commit_data['desc'] + b'\n'
286 if len(parents)==0 and revision != 0:
287 wr(b'reset refs/heads/%s' % branch)
289 wr(b'commit refs/heads/%s' % branch)
290 wr(b'mark :%d' % (revision+1))
291 if sob:
292 wr(b'author %s %d %s' % (author,time,timezone))
293 wr(b'committer %s %d %s' % (user,time,timezone))
294 wr_data(desc)
296 man=ctx.manifest()
298 if not parents:
299 type='full'
300 else:
301 wr(b'from %s' % revnum_to_revref(parents[0], old_marks))
302 if len(parents) == 1:
303 type='simple delta'
304 else: # a merge with two parents
305 wr(b'merge %s' % revnum_to_revref(parents[1], old_marks))
306 type='thorough delta'
308 modified,removed=get_filechanges(repo,revision,parents,files)
310 sys.stderr.buffer.write(
311 b'%s: Exporting %s revision %d/%d with %d/%d modified/removed files\n'
312 % (branch, type.encode(), revision + 1, max, len(modified), len(removed))
315 for file in removed:
316 if fn_encoding:
317 filename=file.decode(fn_encoding).encode('utf8')
318 else:
319 filename=file
321 if plugins and plugins['file_data_filters']:
322 file_data = {'filename':filename, 'file_ctx':None, 'data':None}
323 for filter in plugins['file_data_filters']:
324 filter(file_data)
325 filename=file_data['filename']
327 filename=strip_leading_slash(filename)
328 if filename==b'.hgsub':
329 remove_gitmodules(ctx)
330 wr(b'D %s' % filename)
332 export_file_contents(ctx,man,modified,hgtags,fn_encoding,plugins)
333 wr()
335 return checkpoint(count)
337 def export_note(ui,repo,revision,count,authors,encoding,is_first):
338 ctx = repo[revision]
340 if ctx.hidden():
341 return count
343 (_,user,(time,timezone),_,_,_,_)=get_changeset(ui,repo,revision,authors,encoding)
345 wr(b'commit refs/notes/hg')
346 wr(b'committer %s %d %s' % (user,time,timezone))
347 wr(b'data 0')
348 if is_first:
349 wr(b'from refs/notes/hg^0')
350 wr(b'N inline :%d' % (revision+1))
351 hg_hash=ctx.hex()
352 wr_data(hg_hash)
353 wr()
354 return checkpoint(count)
356 def export_tags(ui,repo,old_marks,mapping_cache,count,authors,tagsmap):
357 l=repo.tagslist()
358 for tag,node in l:
359 # Remap the branch name
360 tag=sanitize_name(tag,"tag",tagsmap)
361 # ignore latest revision
362 if tag==b'tip': continue
363 # ignore tags to nodes that are missing (ie, 'in the future')
364 if hexlify(node) not in mapping_cache:
365 sys.stderr.buffer.write(b'Tag %s refers to unseen node %s\n' % (tag, hexlify(node)))
366 continue
368 rev=int(mapping_cache[hexlify(node)])
370 ref=revnum_to_revref(rev, old_marks)
371 if ref==None:
372 sys.stderr.buffer.write(
373 b'Failed to find reference for creating tag %s at r%d\n' % (tag, rev)
375 continue
376 sys.stderr.buffer.write(b'Exporting tag [%s] at [hg r%d] [git %s]\n' % (tag, rev, ref))
377 wr(b'reset refs/tags/%s' % tag)
378 wr(b'from %s' % ref)
379 wr()
380 count=checkpoint(count)
381 return count
383 def load_mapping(name, filename, mapping_is_raw):
384 raw_regexp=re.compile(b'^([^=]+)[ ]*=[ ]*(.+)$')
385 string_regexp=b'"(((\\.)|(\\")|[^"])*)"'
386 quoted_regexp=re.compile(b'^'+string_regexp+b'[ ]*=[ ]*'+string_regexp+b'$')
388 def parse_raw_line(line):
389 m=raw_regexp.match(line)
390 if m==None:
391 return None
392 return (m.group(1).strip(), m.group(2).strip())
394 def process_unicode_escape_sequences(s):
395 # Replace unicode escape sequences in the otherwise UTF8-encoded bytestring s with
396 # the UTF8-encoded characters they represent. We need to do an additional
397 # .decode('utf8').encode('ascii', 'backslashreplace') to convert any non-ascii
398 # characters into their escape sequences so that the subsequent
399 # .decode('unicode-escape') succeeds:
400 return (
401 s.decode('utf8')
402 .encode('ascii', 'backslashreplace')
403 .decode('unicode-escape')
404 .encode('utf8')
407 def parse_quoted_line(line):
408 m=quoted_regexp.match(line)
409 if m==None:
410 return
412 return (process_unicode_escape_sequences(m.group(1)),
413 process_unicode_escape_sequences(m.group(5)))
415 cache={}
416 if not os.path.exists(filename):
417 sys.stderr.write('Could not open mapping file [%s]\n' % (filename))
418 return cache
419 f=open(filename,'rb')
422 for line in f.readlines():
423 l+=1
424 line=line.strip()
425 if l==1 and line[0:1]==b'#' and line==b'# quoted-escaped-strings':
426 continue
427 elif line==b'' or line[0:1]==b'#':
428 continue
429 m=parse_raw_line(line) if mapping_is_raw else parse_quoted_line(line)
430 if m==None:
431 sys.stderr.write('Invalid file format in [%s], line %d\n' % (filename,l))
432 continue
433 # put key:value in cache, key without ^:
434 cache[m[0]]=m[1]
435 a+=1
436 f.close()
437 sys.stderr.write('Loaded %d %s\n' % (a, name))
438 return cache
440 def branchtip(repo, heads):
441 '''return the tipmost branch head in heads'''
442 tip = heads[-1]
443 for h in reversed(heads):
444 if 'close' not in repo.changelog.read(h)[5]:
445 tip = h
446 break
447 return tip
449 def verify_heads(ui,repo,cache,force,ignore_unnamed_heads,branchesmap):
450 branches={}
451 for bn, heads in repo.branchmap().iteritems():
452 branches[bn] = branchtip(repo, heads)
453 l=[(-repo.changelog.rev(n), n, t) for t, n in branches.items()]
454 l.sort()
456 # get list of hg's branches to verify, don't take all git has
457 for _,_,b in l:
458 b=get_branch(b)
459 sanitized_name=sanitize_name(b,"branch",branchesmap)
460 sha1=get_git_sha1(sanitized_name)
461 c=cache.get(sanitized_name)
462 if not c and sha1:
463 sys.stderr.buffer.write(
464 b'Error: Branch [%s] already exists and was not created by hg-fast-export, '
465 b'export would overwrite unrelated branch\n' % b)
466 if not force: return False
467 elif sha1!=c:
468 sys.stderr.buffer.write(
469 b'Error: Branch [%s] modified outside hg-fast-export:'
470 b'\n%s (repo) != %s (cache)\n' % (b, b'<None>' if sha1 is None else sha1, c)
472 if not force: return False
474 # verify that branch has exactly one head
475 t={}
476 unnamed_heads=False
477 for h in repo.filtered(b'visible').heads():
478 branch=get_branch(repo[h].branch())
479 if t.get(branch,False):
480 sys.stderr.buffer.write(
481 b'Error: repository has an unnamed head: hg r%d\n'
482 % repo.changelog.rev(h)
484 unnamed_heads=True
485 if not force and not ignore_unnamed_heads: return False
486 t[branch]=True
487 if unnamed_heads and not force and not ignore_unnamed_heads: return False
488 return True
490 def hg2git(repourl,m,marksfile,mappingfile,headsfile,tipfile,
491 authors={},branchesmap={},tagsmap={},
492 sob=False,force=False,ignore_unnamed_heads=False,hgtags=False,notes=False,encoding='',fn_encoding='',
493 plugins={}):
494 def check_cache(filename, contents):
495 if len(contents) == 0:
496 sys.stderr.write('Warning: %s does not contain any data, this will probably make an incremental import fail\n' % filename)
498 _max=int(m)
500 old_marks=load_cache(marksfile,lambda s: int(s)-1)
501 mapping_cache=load_cache(mappingfile)
502 heads_cache=load_cache(headsfile)
503 state_cache=load_cache(tipfile)
505 if len(state_cache) != 0:
506 for (name, data) in [(marksfile, old_marks),
507 (mappingfile, mapping_cache),
508 (headsfile, state_cache)]:
509 check_cache(name, data)
511 ui,repo=setup_repo(repourl)
513 if not verify_heads(ui,repo,heads_cache,force,ignore_unnamed_heads,branchesmap):
514 return 1
516 try:
517 tip=repo.changelog.count()
518 except AttributeError:
519 tip=len(repo)
521 min=int(state_cache.get(b'tip',0))
522 max=_max
523 if _max<0 or max>tip:
524 max=tip
526 for rev in range(0,max):
527 ctx=repo[rev]
528 if ctx.hidden():
529 continue
530 mapping_cache[ctx.hex()] = b"%d" % rev
532 if submodule_mappings:
533 # Make sure that all mercurial submodules are registered in the submodule-mappings file
534 for rev in range(0,max):
535 ctx=repo[rev]
536 if ctx.hidden():
537 continue
538 if ctx.substate:
539 for key in ctx.substate:
540 if ctx.substate[key][2]=='hg' and key not in submodule_mappings:
541 sys.stderr.write("Error: %s not found in submodule-mappings\n" % (key))
542 return 1
545 brmap={}
546 for rev in range(min,max):
547 c=export_commit(ui,repo,rev,old_marks,max,c,authors,branchesmap,
548 sob,brmap,hgtags,encoding,fn_encoding,
549 plugins)
550 if notes:
551 for rev in range(min,max):
552 c=export_note(ui,repo,rev,c,authors, encoding, rev == min and min != 0)
554 state_cache[b'tip']=max
555 state_cache[b'repo']=repourl
556 save_cache(tipfile,state_cache)
557 save_cache(mappingfile,mapping_cache)
559 c=export_tags(ui,repo,old_marks,mapping_cache,c,authors,tagsmap)
561 sys.stderr.write('Issued %d commands\n' % c)
563 return 0
565 if __name__=='__main__':
566 def bail(parser,opt):
567 sys.stderr.write('Error: No %s option given\n' % opt)
568 parser.print_help()
569 sys.exit(2)
571 parser=OptionParser()
573 parser.add_option("-n", "--no-auto-sanitize",action="store_false",
574 dest="auto_sanitize",default=True,
575 help="Do not perform built-in (broken in many cases) sanitizing of names")
576 parser.add_option("-m","--max",type="int",dest="max",
577 help="Maximum hg revision to import")
578 parser.add_option("--mapping",dest="mappingfile",
579 help="File to read last run's hg-to-git SHA1 mapping")
580 parser.add_option("--marks",dest="marksfile",
581 help="File to read git-fast-import's marks from")
582 parser.add_option("--heads",dest="headsfile",
583 help="File to read last run's git heads from")
584 parser.add_option("--status",dest="statusfile",
585 help="File to read status from")
586 parser.add_option("-r","--repo",dest="repourl",
587 help="URL of repo to import")
588 parser.add_option("-s",action="store_true",dest="sob",
589 default=False,help="Enable parsing Signed-off-by lines")
590 parser.add_option("--hgtags",action="store_true",dest="hgtags",
591 default=False,help="Enable exporting .hgtags files")
592 parser.add_option("-A","--authors",dest="authorfile",
593 help="Read authormap from AUTHORFILE")
594 parser.add_option("-B","--branches",dest="branchesfile",
595 help="Read branch map from BRANCHESFILE")
596 parser.add_option("-T","--tags",dest="tagsfile",
597 help="Read tags map from TAGSFILE")
598 parser.add_option("-f","--force",action="store_true",dest="force",
599 default=False,help="Ignore validation errors by force, implies --ignore-unnamed-heads")
600 parser.add_option("--ignore-unnamed-heads",action="store_true",dest="ignore_unnamed_heads",
601 default=False,help="Ignore unnamed head errors")
602 parser.add_option("-M","--default-branch",dest="default_branch",
603 help="Set the default branch")
604 parser.add_option("-o","--origin",dest="origin_name",
605 help="use <name> as namespace to track upstream")
606 parser.add_option("--hg-hash",action="store_true",dest="notes",
607 default=False,help="Annotate commits with the hg hash as git notes in the hg namespace")
608 parser.add_option("-e",dest="encoding",
609 help="Assume commit and author strings retrieved from Mercurial are encoded in <encoding>")
610 parser.add_option("--fe",dest="fn_encoding",
611 help="Assume file names from Mercurial are encoded in <filename_encoding>")
612 parser.add_option("--mappings-are-raw",dest="raw_mappings", default=False,
613 help="Assume mappings are raw <key>=<value> lines")
614 parser.add_option("--filter-contents",dest="filter_contents",
615 help="Pipe contents of each exported file through FILTER_CONTENTS <file-path> <hg-hash> <is-binary>")
616 parser.add_option("--plugin-path", type="string", dest="pluginpath",
617 help="Additional search path for plugins ")
618 parser.add_option("--plugin", action="append", type="string", dest="plugins",
619 help="Add a plugin with the given init string <name=init>")
620 parser.add_option("--subrepo-map", type="string", dest="subrepo_map",
621 help="Provide a mapping file between the subrepository name and the submodule name")
623 (options,args)=parser.parse_args()
625 m=-1
626 auto_sanitize = options.auto_sanitize
627 if options.max!=None: m=options.max
629 if options.marksfile==None: bail(parser,'--marks')
630 if options.mappingfile==None: bail(parser,'--mapping')
631 if options.headsfile==None: bail(parser,'--heads')
632 if options.statusfile==None: bail(parser,'--status')
633 if options.repourl==None: bail(parser,'--repo')
635 if options.subrepo_map:
636 if not os.path.exists(options.subrepo_map):
637 sys.stderr.write('Subrepo mapping file not found %s\n'
638 % options.subrepo_map)
639 sys.exit(1)
640 submodule_mappings=load_mapping('subrepo mappings',
641 options.subrepo_map,False)
643 a={}
644 if options.authorfile!=None:
645 a=load_mapping('authors', options.authorfile, options.raw_mappings)
647 b={}
648 if options.branchesfile!=None:
649 b=load_mapping('branches', options.branchesfile, options.raw_mappings)
651 t={}
652 if options.tagsfile!=None:
653 t=load_mapping('tags', options.tagsfile, options.raw_mappings)
655 if options.default_branch!=None:
656 set_default_branch(options.default_branch)
658 if options.origin_name!=None:
659 set_origin_name(options.origin_name)
661 encoding=''
662 if options.encoding!=None:
663 encoding=options.encoding
665 fn_encoding=encoding
666 if options.fn_encoding!=None:
667 fn_encoding=options.fn_encoding
669 plugins=[]
670 if options.plugins!=None:
671 plugins+=options.plugins
673 if options.filter_contents!=None:
674 plugins+=['shell_filter_file_contents='+options.filter_contents]
676 plugins_dict={}
677 plugins_dict['commit_message_filters']=[]
678 plugins_dict['file_data_filters']=[]
680 if plugins and options.pluginpath:
681 sys.stderr.write('Using additional plugin path: ' + options.pluginpath + '\n')
683 for plugin in plugins:
684 split = plugin.split('=')
685 name, opts = split[0], '='.join(split[1:])
686 i = pluginloader.get_plugin(name,options.pluginpath)
687 sys.stderr.write('Loaded plugin ' + i['name'] + ' from path: ' + i['path'] +' with opts: ' + opts + '\n')
688 plugin = pluginloader.load_plugin(i).build_filter(opts)
689 if hasattr(plugin,'file_data_filter') and callable(plugin.file_data_filter):
690 plugins_dict['file_data_filters'].append(plugin.file_data_filter)
691 if hasattr(plugin, 'commit_message_filter') and callable(plugin.commit_message_filter):
692 plugins_dict['commit_message_filters'].append(plugin.commit_message_filter)
694 sys.exit(hg2git(options.repourl,m,options.marksfile,options.mappingfile,
695 options.headsfile, options.statusfile,
696 authors=a,branchesmap=b,tagsmap=t,
697 sob=options.sob,force=options.force,
698 ignore_unnamed_heads=options.ignore_unnamed_heads,
699 hgtags=options.hgtags,
700 notes=options.notes,encoding=encoding,fn_encoding=fn_encoding,
701 plugins=plugins_dict))