mercurial/streamclone.py
changeset 28473 61d1a3cc6e1c
parent 27897 2fdbf22a1b63
child 28532 ed75909c4c67
equal deleted inserted replaced
28472:70d3dc05e118 28473:61d1a3cc6e1c
   268 
   268 
   269         # This is where we'll add compression in the future.
   269         # This is where we'll add compression in the future.
   270         assert compression == 'UN'
   270         assert compression == 'UN'
   271 
   271 
   272         seen = 0
   272         seen = 0
   273         repo.ui.progress(_('bundle'), 0, total=bytecount)
   273         repo.ui.progress(_('bundle'), 0, total=bytecount, unit=_('bytes'))
   274 
   274 
   275         for chunk in it:
   275         for chunk in it:
   276             seen += len(chunk)
   276             seen += len(chunk)
   277             repo.ui.progress(_('bundle'), seen, total=bytecount)
   277             repo.ui.progress(_('bundle'), seen, total=bytecount,
       
   278                              unit=_('bytes'))
   278             yield chunk
   279             yield chunk
   279 
   280 
   280         repo.ui.progress(_('bundle'), None)
   281         repo.ui.progress(_('bundle'), None)
   281 
   282 
   282     return requirements, gen()
   283     return requirements, gen()
   292     """
   293     """
   293     with repo.lock():
   294     with repo.lock():
   294         repo.ui.status(_('%d files to transfer, %s of data\n') %
   295         repo.ui.status(_('%d files to transfer, %s of data\n') %
   295                        (filecount, util.bytecount(bytecount)))
   296                        (filecount, util.bytecount(bytecount)))
   296         handled_bytes = 0
   297         handled_bytes = 0
   297         repo.ui.progress(_('clone'), 0, total=bytecount)
   298         repo.ui.progress(_('clone'), 0, total=bytecount, unit=_('bytes'))
   298         start = time.time()
   299         start = time.time()
   299 
   300 
   300         with repo.transaction('clone'):
   301         with repo.transaction('clone'):
   301             with repo.svfs.backgroundclosing(repo.ui, expectedcount=filecount):
   302             with repo.svfs.backgroundclosing(repo.ui, expectedcount=filecount):
   302                 for i in xrange(filecount):
   303                 for i in xrange(filecount):
   315                     path = store.decodedir(name)
   316                     path = store.decodedir(name)
   316                     with repo.svfs(path, 'w', backgroundclose=True) as ofp:
   317                     with repo.svfs(path, 'w', backgroundclose=True) as ofp:
   317                         for chunk in util.filechunkiter(fp, limit=size):
   318                         for chunk in util.filechunkiter(fp, limit=size):
   318                             handled_bytes += len(chunk)
   319                             handled_bytes += len(chunk)
   319                             repo.ui.progress(_('clone'), handled_bytes,
   320                             repo.ui.progress(_('clone'), handled_bytes,
   320                                              total=bytecount)
   321                                              total=bytecount, unit=_('bytes'))
   321                             ofp.write(chunk)
   322                             ofp.write(chunk)
   322 
   323 
   323         # Writing straight to files circumvented the inmemory caches
   324         # Writing straight to files circumvented the inmemory caches
   324         repo.invalidate()
   325         repo.invalidate()
   325 
   326