comparison hgext/remotefilelog/shallowrepo.py @ 43076:2372284d9457

formatting: blacken the codebase This is using my patch to black (https://github.com/psf/black/pull/826) so we don't un-wrap collection literals. Done with: hg files 'set:**.py - mercurial/thirdparty/** - "contrib/python-zstandard/**"' | xargs black -S # skip-blame mass-reformatting only # no-check-commit reformats foo_bar functions Differential Revision: https://phab.mercurial-scm.org/D6971
author Augie Fackler <augie@google.com>
date Sun, 06 Oct 2019 09:45:02 -0400
parents 2c74337e6483
children 687b865b95ad
comparison
equal deleted inserted replaced
43075:57875cf423c9 43076:2372284d9457
41 if not os.path.exists(localpath): 41 if not os.path.exists(localpath):
42 os.makedirs(localpath) 42 os.makedirs(localpath)
43 43
44 # Instantiate local data stores 44 # Instantiate local data stores
45 localcontent = contentstore.remotefilelogcontentstore( 45 localcontent = contentstore.remotefilelogcontentstore(
46 repo, localpath, repo.name, shared=False) 46 repo, localpath, repo.name, shared=False
47 )
47 localmetadata = metadatastore.remotefilelogmetadatastore( 48 localmetadata = metadatastore.remotefilelogmetadatastore(
48 repo, localpath, repo.name, shared=False) 49 repo, localpath, repo.name, shared=False
50 )
49 return localcontent, localmetadata 51 return localcontent, localmetadata
52
50 53
51 def makecachestores(repo): 54 def makecachestores(repo):
52 """Typically machine-wide, cache of remote data; can be discarded.""" 55 """Typically machine-wide, cache of remote data; can be discarded."""
53 # Instantiate shared cache stores 56 # Instantiate shared cache stores
54 cachepath = shallowutil.getcachepath(repo.ui) 57 cachepath = shallowutil.getcachepath(repo.ui)
55 cachecontent = contentstore.remotefilelogcontentstore( 58 cachecontent = contentstore.remotefilelogcontentstore(
56 repo, cachepath, repo.name, shared=True) 59 repo, cachepath, repo.name, shared=True
60 )
57 cachemetadata = metadatastore.remotefilelogmetadatastore( 61 cachemetadata = metadatastore.remotefilelogmetadatastore(
58 repo, cachepath, repo.name, shared=True) 62 repo, cachepath, repo.name, shared=True
63 )
59 64
60 repo.sharedstore = cachecontent 65 repo.sharedstore = cachecontent
61 repo.shareddatastores.append(cachecontent) 66 repo.shareddatastores.append(cachecontent)
62 repo.sharedhistorystores.append(cachemetadata) 67 repo.sharedhistorystores.append(cachemetadata)
63 68
64 return cachecontent, cachemetadata 69 return cachecontent, cachemetadata
70
65 71
66 def makeremotestores(repo, cachecontent, cachemetadata): 72 def makeremotestores(repo, cachecontent, cachemetadata):
67 """These stores fetch data from a remote server.""" 73 """These stores fetch data from a remote server."""
68 # Instantiate remote stores 74 # Instantiate remote stores
69 repo.fileservice = fileserverclient.fileserverclient(repo) 75 repo.fileservice = fileserverclient.fileserverclient(repo)
70 remotecontent = contentstore.remotecontentstore( 76 remotecontent = contentstore.remotecontentstore(
71 repo.ui, repo.fileservice, cachecontent) 77 repo.ui, repo.fileservice, cachecontent
78 )
72 remotemetadata = metadatastore.remotemetadatastore( 79 remotemetadata = metadatastore.remotemetadatastore(
73 repo.ui, repo.fileservice, cachemetadata) 80 repo.ui, repo.fileservice, cachemetadata
81 )
74 return remotecontent, remotemetadata 82 return remotecontent, remotemetadata
83
75 84
76 def makepackstores(repo): 85 def makepackstores(repo):
77 """Packs are more efficient (to read from) cache stores.""" 86 """Packs are more efficient (to read from) cache stores."""
78 # Instantiate pack stores 87 # Instantiate pack stores
79 packpath = shallowutil.getcachepackpath(repo, 88 packpath = shallowutil.getcachepackpath(repo, constants.FILEPACK_CATEGORY)
80 constants.FILEPACK_CATEGORY)
81 packcontentstore = datapack.datapackstore(repo.ui, packpath) 89 packcontentstore = datapack.datapackstore(repo.ui, packpath)
82 packmetadatastore = historypack.historypackstore(repo.ui, packpath) 90 packmetadatastore = historypack.historypackstore(repo.ui, packpath)
83 91
84 repo.shareddatastores.append(packcontentstore) 92 repo.shareddatastores.append(packcontentstore)
85 repo.sharedhistorystores.append(packmetadatastore) 93 repo.sharedhistorystores.append(packmetadatastore)
86 shallowutil.reportpackmetrics(repo.ui, 'filestore', packcontentstore, 94 shallowutil.reportpackmetrics(
87 packmetadatastore) 95 repo.ui, 'filestore', packcontentstore, packmetadatastore
96 )
88 return packcontentstore, packmetadatastore 97 return packcontentstore, packmetadatastore
98
89 99
90 def makeunionstores(repo): 100 def makeunionstores(repo):
91 """Union stores iterate the other stores and return the first result.""" 101 """Union stores iterate the other stores and return the first result."""
92 repo.shareddatastores = [] 102 repo.shareddatastores = []
93 repo.sharedhistorystores = [] 103 repo.sharedhistorystores = []
94 104
95 packcontentstore, packmetadatastore = makepackstores(repo) 105 packcontentstore, packmetadatastore = makepackstores(repo)
96 cachecontent, cachemetadata = makecachestores(repo) 106 cachecontent, cachemetadata = makecachestores(repo)
97 localcontent, localmetadata = makelocalstores(repo) 107 localcontent, localmetadata = makelocalstores(repo)
98 remotecontent, remotemetadata = makeremotestores(repo, cachecontent, 108 remotecontent, remotemetadata = makeremotestores(
99 cachemetadata) 109 repo, cachecontent, cachemetadata
110 )
100 111
101 # Instantiate union stores 112 # Instantiate union stores
102 repo.contentstore = contentstore.unioncontentstore( 113 repo.contentstore = contentstore.unioncontentstore(
103 packcontentstore, cachecontent, 114 packcontentstore,
104 localcontent, remotecontent, writestore=localcontent) 115 cachecontent,
116 localcontent,
117 remotecontent,
118 writestore=localcontent,
119 )
105 repo.metadatastore = metadatastore.unionmetadatastore( 120 repo.metadatastore = metadatastore.unionmetadatastore(
106 packmetadatastore, cachemetadata, localmetadata, remotemetadata, 121 packmetadatastore,
107 writestore=localmetadata) 122 cachemetadata,
123 localmetadata,
124 remotemetadata,
125 writestore=localmetadata,
126 )
108 127
109 fileservicedatawrite = cachecontent 128 fileservicedatawrite = cachecontent
110 fileservicehistorywrite = cachemetadata 129 fileservicehistorywrite = cachemetadata
111 repo.fileservice.setstore(repo.contentstore, repo.metadatastore, 130 repo.fileservice.setstore(
112 fileservicedatawrite, fileservicehistorywrite) 131 repo.contentstore,
113 shallowutil.reportpackmetrics(repo.ui, 'filestore', 132 repo.metadatastore,
114 packcontentstore, packmetadatastore) 133 fileservicedatawrite,
134 fileservicehistorywrite,
135 )
136 shallowutil.reportpackmetrics(
137 repo.ui, 'filestore', packcontentstore, packmetadatastore
138 )
139
115 140
116 def wraprepo(repo): 141 def wraprepo(repo):
117 class shallowrepository(repo.__class__): 142 class shallowrepository(repo.__class__):
118 @util.propertycache 143 @util.propertycache
119 def name(self): 144 def name(self):
120 return self.ui.config('remotefilelog', 'reponame') 145 return self.ui.config('remotefilelog', 'reponame')
121 146
122 @util.propertycache 147 @util.propertycache
123 def fallbackpath(self): 148 def fallbackpath(self):
124 path = repo.ui.config("remotefilelog", "fallbackpath", 149 path = repo.ui.config(
125 repo.ui.config('paths', 'default')) 150 "remotefilelog",
151 "fallbackpath",
152 repo.ui.config('paths', 'default'),
153 )
126 if not path: 154 if not path:
127 raise error.Abort("no remotefilelog server " 155 raise error.Abort(
128 "configured - is your .hg/hgrc trusted?") 156 "no remotefilelog server "
157 "configured - is your .hg/hgrc trusted?"
158 )
129 159
130 return path 160 return path
131 161
132 def maybesparsematch(self, *revs, **kwargs): 162 def maybesparsematch(self, *revs, **kwargs):
133 ''' 163 '''
155 185
156 def filectx(self, path, *args, **kwargs): 186 def filectx(self, path, *args, **kwargs):
157 if self.shallowmatch(path): 187 if self.shallowmatch(path):
158 return remotefilectx.remotefilectx(self, path, *args, **kwargs) 188 return remotefilectx.remotefilectx(self, path, *args, **kwargs)
159 else: 189 else:
160 return super(shallowrepository, self).filectx(path, *args, 190 return super(shallowrepository, self).filectx(
161 **kwargs) 191 path, *args, **kwargs
192 )
162 193
163 @localrepo.unfilteredmethod 194 @localrepo.unfilteredmethod
164 def commitctx(self, ctx, error=False, origctx=None): 195 def commitctx(self, ctx, error=False, origctx=None):
165 """Add a new revision to current repository. 196 """Add a new revision to current repository.
166 Revision information is passed via the context argument. 197 Revision information is passed via the context argument.
176 for f in ctx.modified() + ctx.added(): 207 for f in ctx.modified() + ctx.added():
177 fparent1 = m1.get(f, nullid) 208 fparent1 = m1.get(f, nullid)
178 if fparent1 != nullid: 209 if fparent1 != nullid:
179 files.append((f, hex(fparent1))) 210 files.append((f, hex(fparent1)))
180 self.fileservice.prefetch(files) 211 self.fileservice.prefetch(files)
181 return super(shallowrepository, self).commitctx(ctx, 212 return super(shallowrepository, self).commitctx(
182 error=error, 213 ctx, error=error, origctx=origctx
183 origctx=origctx) 214 )
184 215
185 def backgroundprefetch(self, revs, base=None, repack=False, pats=None, 216 def backgroundprefetch(
186 opts=None, ensurestart=False): 217 self,
218 revs,
219 base=None,
220 repack=False,
221 pats=None,
222 opts=None,
223 ensurestart=False,
224 ):
187 """Runs prefetch in background with optional repack 225 """Runs prefetch in background with optional repack
188 """ 226 """
189 cmd = [procutil.hgexecutable(), '-R', repo.origroot, 'prefetch'] 227 cmd = [procutil.hgexecutable(), '-R', repo.origroot, 'prefetch']
190 if repack: 228 if repack:
191 cmd.append('--repack') 229 cmd.append('--repack')
192 if revs: 230 if revs:
193 cmd += ['-r', revs] 231 cmd += ['-r', revs]
194 # We know this command will find a binary, so don't block 232 # We know this command will find a binary, so don't block
195 # on it starting. 233 # on it starting.
196 procutil.runbgcommand(cmd, encoding.environ, 234 procutil.runbgcommand(
197 ensurestart=ensurestart) 235 cmd, encoding.environ, ensurestart=ensurestart
236 )
198 237
199 def prefetch(self, revs, base=None, pats=None, opts=None): 238 def prefetch(self, revs, base=None, pats=None, opts=None):
200 """Prefetches all the necessary file revisions for the given revs 239 """Prefetches all the necessary file revisions for the given revs
201 Optionally runs repack in background 240 Optionally runs repack in background
202 """ 241 """
203 with repo._lock(repo.svfs, 'prefetchlock', True, None, None, 242 with repo._lock(
204 _('prefetching in %s') % repo.origroot): 243 repo.svfs,
244 'prefetchlock',
245 True,
246 None,
247 None,
248 _('prefetching in %s') % repo.origroot,
249 ):
205 self._prefetch(revs, base, pats, opts) 250 self._prefetch(revs, base, pats, opts)
206 251
207 def _prefetch(self, revs, base=None, pats=None, opts=None): 252 def _prefetch(self, revs, base=None, pats=None, opts=None):
208 fallbackpath = self.fallbackpath 253 fallbackpath = self.fallbackpath
209 if fallbackpath: 254 if fallbackpath:
210 # If we know a rev is on the server, we should fetch the server 255 # If we know a rev is on the server, we should fetch the server
211 # version of those files, since our local file versions might 256 # version of those files, since our local file versions might
212 # become obsolete if the local commits are stripped. 257 # become obsolete if the local commits are stripped.
213 localrevs = repo.revs('outgoing(%s)', fallbackpath) 258 localrevs = repo.revs('outgoing(%s)', fallbackpath)
214 if base is not None and base != nullrev: 259 if base is not None and base != nullrev:
215 serverbase = list(repo.revs('first(reverse(::%s) - %ld)', 260 serverbase = list(
216 base, localrevs)) 261 repo.revs('first(reverse(::%s) - %ld)', base, localrevs)
262 )
217 if serverbase: 263 if serverbase:
218 base = serverbase[0] 264 base = serverbase[0]
219 else: 265 else:
220 localrevs = repo 266 localrevs = repo
221 267
288 334
289 repo.shallowmatch = match.always() 335 repo.shallowmatch = match.always()
290 336
291 makeunionstores(repo) 337 makeunionstores(repo)
292 338
293 repo.includepattern = repo.ui.configlist("remotefilelog", "includepattern", 339 repo.includepattern = repo.ui.configlist(
294 None) 340 "remotefilelog", "includepattern", None
295 repo.excludepattern = repo.ui.configlist("remotefilelog", "excludepattern", 341 )
296 None) 342 repo.excludepattern = repo.ui.configlist(
343 "remotefilelog", "excludepattern", None
344 )
297 if not util.safehasattr(repo, 'connectionpool'): 345 if not util.safehasattr(repo, 'connectionpool'):
298 repo.connectionpool = connectionpool.connectionpool(repo) 346 repo.connectionpool = connectionpool.connectionpool(repo)
299 347
300 if repo.includepattern or repo.excludepattern: 348 if repo.includepattern or repo.excludepattern:
301 repo.shallowmatch = match.match(repo.root, '', None, 349 repo.shallowmatch = match.match(
302 repo.includepattern, repo.excludepattern) 350 repo.root, '', None, repo.includepattern, repo.excludepattern
351 )