Mercurial and Git clients can push and pull from this alias URL to interact with this repository. You can change to which repository an alias points by going to the Aliases link on the project page.
# Copyright 2009-2010 Gregory P. Ward# Copyright 2010-2011 Fog Creek Software# Copyright 2010-2011 Unity Technologies## This software may be used and distributed according to the terms of the# GNU General Public License version 2 or any later version.'''High-level command functions: lfadd() et. al, plus the cmdtable.'''importosimportshutilfrommercurialimportutil,matchasmatch_,hg,node,context,errorfrommercurial.i18nimport_importlfutilimportbasestore# -- Commands ----------------------------------------------------------
def lfconvert(ui, src, dest, *pats, **opts):
- '''Convert a repository to a repository using largefiles+ '''Convert a normal repository to a largefiles repository
- Convert source repository creating an identical
- repository, except that all files that match the
- patterns given, or are over a given size will
- be added as largefiles. The size of a file is the size of the
- first version of the file. After running this command you
- will need to setthestorethenrunlfput on the new-repository to upload the largefilesto the central store.
-'''
+ Convert source repository creating an identical repository, except that all
+ files that match the patterns given, or are over the given size will be+ added as largefiles. The size used to determine whether or not to track a+ file as a largefile is the size of the first version of the file. After
+ running this command you will need to makesurethatlargefilesisenabled+anywhere you intend to push the newrepository.'''
if opts['tonormal']:
tolfile = False
else:tolfile=Truesize=opts['size']ifnotsize:size=ui.config(lfutil.longname,'size',default=None)try:size=int(size)exceptValueError:raiseutil.Abort(_('largefiles.size must be integer, was %s\n')% \
size)exceptTypeError:raiseutil.Abort(_('size must be specified'))try:rsrc=hg.repository(ui,src)ifnotrsrc.local():raiseutil.Abort(_('%s is not a local Mercurial repo')%src)excepterror.RepoError,err:ui.traceback()raiseutil.Abort(err.args[0])ifos.path.exists(dest):ifnotos.path.isdir(dest):raiseutil.Abort(_('destination %s already exists')%dest)elifos.listdir(dest):raiseutil.Abort(_('destination %s is not empty')%dest)try:ui.status(_('initializing destination %s\n')%dest)rdst=hg.repository(ui,dest,create=True)ifnotrdst.local():raiseutil.Abort(_('%s is not a local Mercurial repo')%dest)excepterror.RepoError:ui.traceback()raiseutil.Abort(_('%s is not a repo')%dest)try:# Lock destination to prevent modification while it is converted to.# Don't need to lock src because we are just reading from its history# which can't change.dst_lock=rdst.lock()# Get a list of all changesets in the source. The easy way to do this# is to simply walk the changelog, using changelog.nodesbewteen().# Take a look at mercurial/revlog.py:639 for more details.# Use a generator instead of a list to decrease memory usagectxs=(rsrc[ctx]forctxinrsrc.changelog.nodesbetween(None,rsrc.heads())[0])revmap={node.nullid:node.nullid}iftolfile:lfiles=set()normalfiles=set()ifnotpats:pats=ui.config(lfutil.longname,'patterns',default=())ifpats:pats=pats.split(' ')ifpats:matcher=match_.match(rsrc.root,'',list(pats))else:matcher=Nonelfiletohash={}forctxinctxs:ui.progress(_('converting revisions'),ctx.rev(),unit=_('revision'),total=rsrc['tip'].rev())_lfconvert_addchangeset(rsrc,rdst,ctx,revmap,lfiles,normalfiles,matcher,size,lfiletohash)ui.progress(_('converting revisions'),None)ifos.path.exists(rdst.wjoin(lfutil.shortname)):shutil.rmtree(rdst.wjoin(lfutil.shortname))forfinlfiletohash.keys():ifos.path.isfile(rdst.wjoin(f)):os.unlink(rdst.wjoin(f))try:os.removedirs(os.path.dirname(rdst.wjoin(f)))except:passelse:forctxinctxs:ui.progress(_('converting revisions'),ctx.rev(),unit=_('revision'),total=rsrc['tip'].rev())_addchangeset(ui,rsrc,rdst,ctx,revmap)ui.progress(_('converting revisions'),None)except:# we failed, remove the new directoryshutil.rmtree(rdst.root)raisefinally:dst_lock.release()def_addchangeset(ui,rsrc,rdst,ctx,revmap):# Convert src parents to dst parentsparents=[]forpinctx.parents():parents.append(revmap[p.node()])whilelen(parents)<2:parents.append(node.nullid)# Generate list of changed filesfiles=set(ctx.files())ifnode.nullidnotinparents:mc=ctx.manifest()mp1=ctx.parents()[0].manifest()mp2=ctx.parents()[1].manifest()files|=(set(mp1)|set(mp2))-set(mc)forfinmc:ifmc[f]!=mp1.get(f,None)ormc[f]!=mp2.get(f,None):files.add(f)defgetfilectx(repo,memctx,f):iflfutil.standin(f)infiles:# if the file isn't in the manifest then it was removed# or renamed, raise IOError to indicate thistry:fctx=ctx.filectx(lfutil.standin(f))excepterror.LookupError:raiseIOError()renamed=fctx.renamed()ifrenamed:renamed=lfutil.splitstandin(renamed[0])hash=fctx.data().strip()path=lfutil.findfile(rsrc,hash)### TODO: What if the file is not cached?data=''fd=Nonetry:fd=open(path,'rb')data=fd.read()finally:iffd:fd.close()returncontext.memfilectx(f,data,'l'infctx.flags(),'x'infctx.flags(),renamed)else:try:fctx=ctx.filectx(f)excepterror.LookupError:raiseIOError()renamed=fctx.renamed()ifrenamed:renamed=renamed[0]data=fctx.data()iff=='.hgtags':newdata=[]forlineindata.splitlines():id,name=line.split(' ',1)newdata.append('%s%s\n'%(node.hex(revmap[node.bin(id)]),name))data=''.join(newdata)returncontext.memfilectx(f,data,'l'infctx.flags(),'x'infctx.flags(),renamed)dstfiles=[]forfileinfiles:iflfutil.isstandin(file):dstfiles.append(lfutil.splitstandin(file))else:dstfiles.append(file)# Commitmctx=context.memctx(rdst,parents,ctx.description(),dstfiles,getfilectx,ctx.user(),ctx.date(),ctx.extra())ret=rdst.commitctx(mctx)rdst.dirstate.setparents(ret)revmap[ctx.node()]=rdst.changelog.tip()def_lfconvert_addchangeset(rsrc,rdst,ctx,revmap,lfiles,normalfiles,matcher,size,lfiletohash):# Convert src parents to dst parentsparents=[]forpinctx.parents():parents.append(revmap[p.node()])whilelen(parents)<2:parents.append(node.nullid)# Generate list of changed filesfiles=set(ctx.files())ifnode.nullidnotinparents:mc=ctx.manifest()mp1=ctx.parents()[0].manifest()mp2=ctx.parents()[1].manifest()files|=(set(mp1)|set(mp2))-set(mc)forfinmc:ifmc[f]!=mp1.get(f,None)ormc[f]!=mp2.get(f,None):files.add(f)dstfiles=[]forfinfiles:iffnotinlfilesandfnotinnormalfiles:islfile=_islfile(f,ctx,matcher,size)# If this file was renamed or copied then copy# the lfileness of its predecessoriffinctx.manifest():fctx=ctx.filectx(f)renamed=fctx.renamed()renamedlfile=renamedandrenamed[0]inlfilesislfile|=renamedlfileif'l'infctx.flags():ifrenamedlfile:raiseutil.Abort(_('Renamed/copied largefile %s becomes symlink')%f)islfile=Falseifislfile:lfiles.add(f)else:normalfiles.add(f)iffinlfiles:dstfiles.append(lfutil.standin(f))# lfile in manifest if it has not been removed/renamediffinctx.manifest():if'l'inctx.filectx(f).flags():ifrenamedandrenamed[0]inlfiles:raiseutil.Abort(_('largefile %s becomes symlink')%f)# lfile was modified, update standinsfullpath=rdst.wjoin(f)lfutil.createdir(os.path.dirname(fullpath))m=util.sha1('')m.update(ctx[f].data())hash=m.hexdigest()iffnotinlfiletohashorlfiletohash[f]!=hash:try:fd=open(fullpath,'wb')fd.write(ctx[f].data())finally:iffd:fd.close()executable='x'inctx[f].flags()os.chmod(fullpath,lfutil.getmode(executable))lfutil.writestandin(rdst,lfutil.standin(f),hash,executable)lfiletohash[f]=hashelse:# normal filedstfiles.append(f)defgetfilectx(repo,memctx,f):iflfutil.isstandin(f):# if the file isn't in the manifest then it was removed# or renamed, raise IOError to indicate thissrcfname=lfutil.splitstandin(f)try:fctx=ctx.filectx(srcfname)excepterror.LookupError:raiseIOError()renamed=fctx.renamed()ifrenamed:# standin is always a lfile because lfileness# doesn't change after rename or copyrenamed=lfutil.standin(renamed[0])returncontext.memfilectx(f,lfiletohash[srcfname],'l'infctx.flags(),'x'infctx.flags(),renamed)else:try:fctx=ctx.filectx(f)excepterror.LookupError:raiseIOError()renamed=fctx.renamed()ifrenamed:renamed=renamed[0]data=fctx.data()iff=='.hgtags':newdata=[]forlineindata.splitlines():id,name=line.split(' ',1)newdata.append('%s%s\n'%(node.hex(revmap[node.bin(id)]),name))data=''.join(newdata)returncontext.memfilectx(f,data,'l'infctx.flags(),'x'infctx.flags(),renamed)# Commitmctx=context.memctx(rdst,parents,ctx.description(),dstfiles,getfilectx,ctx.user(),ctx.date(),ctx.extra())ret=rdst.commitctx(mctx)rdst.dirstate.setparents(ret)revmap[ctx.node()]=rdst.changelog.tip()def_islfile(file,ctx,matcher,size):''' A file is a lfile if it matches a pattern or is over the given size. '''# Never store hgtags or hgignore as lfilesiffile=='.hgtags'orfile=='.hgignore'orfile=='.hgsigs':returnFalseifmatcherandmatcher(file):returnTruetry:returnctx.filectx(file).size()>=size*1024*1024excepterror.LookupError:returnFalsedefuploadlfiles(ui,rsrc,rdst,files):'''upload largefiles to the central store'''# Don't upload locally. All largefiles are in the system wide cache# so the other repo can just get them from there.ifnotfilesorrdst.local():returnstore=basestore._openstore(rsrc,rdst,put=True)at=0files=filter(lambdah:notstore.exists(h),files)forhashinfiles:ui.progress(_('uploading largefiles'),at,unit='largefile',total=len(files))source=lfutil.findfile(rsrc,hash)ifnotsource:raiseutil.Abort(_('Missing largefile %s needs to be uploaded')%hash)# XXX check for errors herestore.put(source,hash)at+=1ui.progress('uploading largefiles',None)defverifylfiles(ui,repo,all=False,contents=False):'''Verify that every big file revision in the current changeset exists in the central store. With --contents, also verify that the contents of each big file revision are correct (SHA-1 hash matches the revision ID). With --all, check every changeset in this repository.'''ifall:# Pass a list to the function rather than an iterator because we know a# list will work.revs=range(len(repo))else:revs=['.']store=basestore._openstore(repo)returnstore.verify(revs,contents=contents)defcachelfiles(ui,repo,node):'''cachelfiles ensures that all largefiles needed by the specified revision are present in the repository's largefile cache. returns a tuple (cached, missing). cached is the list of files downloaded by this operation; missing is the list of files that were needed but could not be found.'''lfiles=lfutil.listlfiles(repo,node)toget=[]forlfileinlfiles:expectedhash=repo[node][lfutil.standin(lfile)].data().strip()# if it exists and its hash matches, it might have been locally# modified before updating and the user chose 'local'. in this case,# it will not be in any store, so don't look for it.if(notos.path.exists(repo.wjoin(lfile)) \
orexpectedhash!=lfutil.hashfile(repo.wjoin(lfile)))and \
notlfutil.findfile(repo,expectedhash):toget.append((lfile,expectedhash))iftoget:store=basestore._openstore(repo)ret=store.get(toget)returnretreturn([],[])defupdatelfiles(ui,repo,filelist=None,printmessage=True):wlock=repo.wlock()try:lfdirstate=lfutil.openlfdirstate(ui,repo)lfiles=set(lfutil.listlfiles(repo))|set(lfdirstate)iffilelistisnotNone:lfiles=[fforfinlfilesiffinfilelist]printed=Falseifprintmessageandlfiles:ui.status(_('getting changed largefiles\n'))printed=Truecachelfiles(ui,repo,'.')updated,removed=0,0foriinmap(lambdaf:_updatelfile(repo,lfdirstate,f),lfiles):# increment the appropriate counter according to _updatelfile's# return valueupdated+=i>0andior0removed-=i<0andior0ifprintmessageand(removedorupdated)andnotprinted:ui.status(_('getting changed largefiles\n'))printed=Truelfdirstate.write()ifprintedandprintmessage:ui.status(_('%d largefiles updated, %d removed\n')%(updated,removed))finally:wlock.release()def_updatelfile(repo,lfdirstate,lfile):'''updates a single largefile and copies the state of its standin from the repository's dirstate to its state in the lfdirstate. returns 1 if the file was modified, -1 if the file was removed, 0 if the file was unchanged, and None if the needed largefile was missing from the cache.'''ret=0abslfile=repo.wjoin(lfile)absstandin=repo.wjoin(lfutil.standin(lfile))ifos.path.exists(absstandin):ifos.path.exists(absstandin+'.orig'):shutil.copyfile(abslfile,abslfile+'.orig')expecthash=lfutil.readstandin(repo,lfile)ifexpecthash!=''and \
(notos.path.exists(abslfile)or \
expecthash!=lfutil.hashfile(abslfile)):ifnotlfutil.copyfromcache(repo,expecthash,lfile):returnNone# don't try to set the mode or update the dirstateret=1mode=os.stat(absstandin).st_modeifmode!=os.stat(abslfile).st_mode:os.chmod(abslfile,mode)ret=1else:ifos.path.exists(abslfile):os.unlink(abslfile)ret=-1state=repo.dirstate[lfutil.standin(lfile)]ifstate=='n':lfdirstate.normal(lfile)elifstate=='r':lfdirstate.remove(lfile)elifstate=='a':lfdirstate.add(lfile)elifstate=='?':try:# Mercurial >= 1.9lfdirstate.drop(lfile)exceptAttributeError:# Mercurial <= 1.8lfdirstate.forget(lfile)returnret# -- hg commands declarations ------------------------------------------------cmdtable={'lfconvert':(lfconvert,[('s','size',0,'All files over this size (in megabytes) ''will be considered largefiles. This can also be specified in ''your hgrc as [largefiles].size.'),('','tonormal',False,'Convert from a largefiles repo to a normal repo')],_('hg lfconvert SOURCE DEST [FILE ...]')),}
Attach a Trello Card
Add a tag
Your session has expired
You are no longer logged in. Please log in and try your request again.
Filter RSS Feed
This RSS feed URL allows you to see the contents of your current filter using any feed reader.
This link includes a special authentication token. If you share the URL with anyone else, they can see this RSS feed's activity. You can disable these tokens when needed.
Your current filter is unsaved; changing it won't affect this RSS feed.