87 command = eh.command |
87 command = eh.command |
88 configtable = eh.configtable |
88 configtable = eh.configtable |
89 templatekeyword = eh.templatekeyword |
89 templatekeyword = eh.templatekeyword |
90 |
90 |
91 # developer config: phabricator.batchsize |
91 # developer config: phabricator.batchsize |
92 eh.configitem(b'phabricator', b'batchsize', |
92 eh.configitem( |
93 default=12, |
93 b'phabricator', b'batchsize', default=12, |
94 ) |
94 ) |
95 eh.configitem(b'phabricator', b'callsign', |
95 eh.configitem( |
96 default=None, |
96 b'phabricator', b'callsign', default=None, |
97 ) |
97 ) |
98 eh.configitem(b'phabricator', b'curlcmd', |
98 eh.configitem( |
99 default=None, |
99 b'phabricator', b'curlcmd', default=None, |
100 ) |
100 ) |
101 # developer config: phabricator.repophid |
101 # developer config: phabricator.repophid |
102 eh.configitem(b'phabricator', b'repophid', |
102 eh.configitem( |
103 default=None, |
103 b'phabricator', b'repophid', default=None, |
104 ) |
104 ) |
105 eh.configitem(b'phabricator', b'url', |
105 eh.configitem( |
106 default=None, |
106 b'phabricator', b'url', default=None, |
107 ) |
107 ) |
108 eh.configitem(b'phabsend', b'confirm', |
108 eh.configitem( |
109 default=False, |
109 b'phabsend', b'confirm', default=False, |
110 ) |
110 ) |
111 |
111 |
112 colortable = { |
112 colortable = { |
113 b'phabricator.action.created': b'green', |
113 b'phabricator.action.created': b'green', |
114 b'phabricator.action.skipped': b'magenta', |
114 b'phabricator.action.skipped': b'magenta', |
117 b'phabricator.drev': b'bold', |
117 b'phabricator.drev': b'bold', |
118 b'phabricator.node': b'', |
118 b'phabricator.node': b'', |
119 } |
119 } |
120 |
120 |
121 _VCR_FLAGS = [ |
121 _VCR_FLAGS = [ |
122 (b'', b'test-vcr', b'', |
122 ( |
123 _(b'Path to a vcr file. If nonexistent, will record a new vcr transcript' |
123 b'', |
124 b', otherwise will mock all http requests using the specified vcr file.' |
124 b'test-vcr', |
125 b' (ADVANCED)' |
125 b'', |
126 )), |
126 _( |
|
127 b'Path to a vcr file. If nonexistent, will record a new vcr transcript' |
|
128 b', otherwise will mock all http requests using the specified vcr file.' |
|
129 b' (ADVANCED)' |
|
130 ), |
|
131 ), |
127 ] |
132 ] |
|
133 |
128 |
134 |
129 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False): |
135 def vcrcommand(name, flags, spec, helpcategory=None, optionalrepo=False): |
130 fullflags = flags + _VCR_FLAGS |
136 fullflags = flags + _VCR_FLAGS |
|
137 |
131 def hgmatcher(r1, r2): |
138 def hgmatcher(r1, r2): |
132 if r1.uri != r2.uri or r1.method != r2.method: |
139 if r1.uri != r2.uri or r1.method != r2.method: |
133 return False |
140 return False |
134 r1params = r1.body.split(b'&') |
141 r1params = r1.body.split(b'&') |
135 r2params = r2.body.split(b'&') |
142 r2params = r2.body.split(b'&') |
136 return set(r1params) == set(r2params) |
143 return set(r1params) == set(r2params) |
137 |
144 |
138 def sanitiserequest(request): |
145 def sanitiserequest(request): |
139 request.body = re.sub( |
146 request.body = re.sub( |
140 br'cli-[a-z0-9]+', |
147 br'cli-[a-z0-9]+', br'cli-hahayouwish', request.body |
141 br'cli-hahayouwish', |
|
142 request.body |
|
143 ) |
148 ) |
144 return request |
149 return request |
145 |
150 |
146 def sanitiseresponse(response): |
151 def sanitiseresponse(response): |
147 if r'set-cookie' in response[r'headers']: |
152 if r'set-cookie' in response[r'headers']: |
151 def decorate(fn): |
156 def decorate(fn): |
152 def inner(*args, **kwargs): |
157 def inner(*args, **kwargs): |
153 cassette = pycompat.fsdecode(kwargs.pop(r'test_vcr', None)) |
158 cassette = pycompat.fsdecode(kwargs.pop(r'test_vcr', None)) |
154 if cassette: |
159 if cassette: |
155 import hgdemandimport |
160 import hgdemandimport |
|
161 |
156 with hgdemandimport.deactivated(): |
162 with hgdemandimport.deactivated(): |
157 import vcr as vcrmod |
163 import vcr as vcrmod |
158 import vcr.stubs as stubs |
164 import vcr.stubs as stubs |
|
165 |
159 vcr = vcrmod.VCR( |
166 vcr = vcrmod.VCR( |
160 serializer=r'json', |
167 serializer=r'json', |
161 before_record_request=sanitiserequest, |
168 before_record_request=sanitiserequest, |
162 before_record_response=sanitiseresponse, |
169 before_record_response=sanitiseresponse, |
163 custom_patches=[ |
170 custom_patches=[ |
164 (urlmod, r'httpconnection', |
171 ( |
165 stubs.VCRHTTPConnection), |
172 urlmod, |
166 (urlmod, r'httpsconnection', |
173 r'httpconnection', |
167 stubs.VCRHTTPSConnection), |
174 stubs.VCRHTTPConnection, |
168 ]) |
175 ), |
|
176 ( |
|
177 urlmod, |
|
178 r'httpsconnection', |
|
179 stubs.VCRHTTPSConnection, |
|
180 ), |
|
181 ], |
|
182 ) |
169 vcr.register_matcher(r'hgmatcher', hgmatcher) |
183 vcr.register_matcher(r'hgmatcher', hgmatcher) |
170 with vcr.use_cassette(cassette, match_on=[r'hgmatcher']): |
184 with vcr.use_cassette(cassette, match_on=[r'hgmatcher']): |
171 return fn(*args, **kwargs) |
185 return fn(*args, **kwargs) |
172 return fn(*args, **kwargs) |
186 return fn(*args, **kwargs) |
|
187 |
173 inner.__name__ = fn.__name__ |
188 inner.__name__ = fn.__name__ |
174 inner.__doc__ = fn.__doc__ |
189 inner.__doc__ = fn.__doc__ |
175 return command(name, fullflags, spec, helpcategory=helpcategory, |
190 return command( |
176 optionalrepo=optionalrepo)(inner) |
191 name, |
|
192 fullflags, |
|
193 spec, |
|
194 helpcategory=helpcategory, |
|
195 optionalrepo=optionalrepo, |
|
196 )(inner) |
|
197 |
177 return decorate |
198 return decorate |
|
199 |
178 |
200 |
179 def urlencodenested(params): |
201 def urlencodenested(params): |
180 """like urlencode, but works with nested parameters. |
202 """like urlencode, but works with nested parameters. |
181 |
203 |
182 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be |
204 For example, if params is {'a': ['b', 'c'], 'd': {'e': 'f'}}, it will be |
183 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to |
205 flattened to {'a[0]': 'b', 'a[1]': 'c', 'd[e]': 'f'} and then passed to |
184 urlencode. Note: the encoding is consistent with PHP's http_build_query. |
206 urlencode. Note: the encoding is consistent with PHP's http_build_query. |
185 """ |
207 """ |
186 flatparams = util.sortdict() |
208 flatparams = util.sortdict() |
|
209 |
187 def process(prefix, obj): |
210 def process(prefix, obj): |
188 if isinstance(obj, bool): |
211 if isinstance(obj, bool): |
189 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form |
212 obj = {True: b'true', False: b'false'}[obj] # Python -> PHP form |
190 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)] |
213 lister = lambda l: [(b'%d' % k, v) for k, v in enumerate(l)] |
191 items = {list: lister, dict: lambda x: x.items()}.get(type(obj)) |
214 items = {list: lister, dict: lambda x: x.items()}.get(type(obj)) |
195 for k, v in items(obj): |
218 for k, v in items(obj): |
196 if prefix: |
219 if prefix: |
197 process(b'%s[%s]' % (prefix, k), v) |
220 process(b'%s[%s]' % (prefix, k), v) |
198 else: |
221 else: |
199 process(k, v) |
222 process(k, v) |
|
223 |
200 process(b'', params) |
224 process(b'', params) |
201 return util.urlreq.urlencode(flatparams) |
225 return util.urlreq.urlencode(flatparams) |
|
226 |
202 |
227 |
203 def readurltoken(ui): |
228 def readurltoken(ui): |
204 """return conduit url, token and make sure they exist |
229 """return conduit url, token and make sure they exist |
205 |
230 |
206 Currently read from [auth] config section. In the future, it might |
231 Currently read from [auth] config section. In the future, it might |
207 make sense to read from .arcconfig and .arcrc as well. |
232 make sense to read from .arcconfig and .arcrc as well. |
208 """ |
233 """ |
209 url = ui.config(b'phabricator', b'url') |
234 url = ui.config(b'phabricator', b'url') |
210 if not url: |
235 if not url: |
211 raise error.Abort(_(b'config %s.%s is required') |
236 raise error.Abort( |
212 % (b'phabricator', b'url')) |
237 _(b'config %s.%s is required') % (b'phabricator', b'url') |
|
238 ) |
213 |
239 |
214 res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user) |
240 res = httpconnectionmod.readauthforuri(ui, url, util.url(url).user) |
215 token = None |
241 token = None |
216 |
242 |
217 if res: |
243 if res: |
235 params = params.copy() |
263 params = params.copy() |
236 params[b'api.token'] = token |
264 params[b'api.token'] = token |
237 data = urlencodenested(params) |
265 data = urlencodenested(params) |
238 curlcmd = ui.config(b'phabricator', b'curlcmd') |
266 curlcmd = ui.config(b'phabricator', b'curlcmd') |
239 if curlcmd: |
267 if curlcmd: |
240 sin, sout = procutil.popen2(b'%s -d @- %s' |
268 sin, sout = procutil.popen2( |
241 % (curlcmd, procutil.shellquote(url))) |
269 b'%s -d @- %s' % (curlcmd, procutil.shellquote(url)) |
|
270 ) |
242 sin.write(data) |
271 sin.write(data) |
243 sin.close() |
272 sin.close() |
244 body = sout.read() |
273 body = sout.read() |
245 else: |
274 else: |
246 urlopener = urlmod.opener(ui, authinfo) |
275 urlopener = urlmod.opener(ui, authinfo) |
247 request = util.urlreq.request(pycompat.strurl(url), data=data) |
276 request = util.urlreq.request(pycompat.strurl(url), data=data) |
248 with contextlib.closing(urlopener.open(request)) as rsp: |
277 with contextlib.closing(urlopener.open(request)) as rsp: |
249 body = rsp.read() |
278 body = rsp.read() |
250 ui.debug(b'Conduit Response: %s\n' % body) |
279 ui.debug(b'Conduit Response: %s\n' % body) |
251 parsed = pycompat.rapply( |
280 parsed = pycompat.rapply( |
252 lambda x: encoding.unitolocal(x) if isinstance(x, pycompat.unicode) |
281 lambda x: encoding.unitolocal(x) |
|
282 if isinstance(x, pycompat.unicode) |
253 else x, |
283 else x, |
254 # json.loads only accepts bytes from py3.6+ |
284 # json.loads only accepts bytes from py3.6+ |
255 json.loads(encoding.unifromlocal(body)) |
285 json.loads(encoding.unifromlocal(body)), |
256 ) |
286 ) |
257 if parsed.get(b'error_code'): |
287 if parsed.get(b'error_code'): |
258 msg = (_(b'Conduit Error (%s): %s') |
288 msg = _(b'Conduit Error (%s): %s') % ( |
259 % (parsed[b'error_code'], parsed[b'error_info'])) |
289 parsed[b'error_code'], |
|
290 parsed[b'error_info'], |
|
291 ) |
260 raise error.Abort(msg) |
292 raise error.Abort(msg) |
261 return parsed[b'result'] |
293 return parsed[b'result'] |
|
294 |
262 |
295 |
263 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True) |
296 @vcrcommand(b'debugcallconduit', [], _(b'METHOD'), optionalrepo=True) |
264 def debugcallconduit(ui, repo, name): |
297 def debugcallconduit(ui, repo, name): |
265 """call Conduit API |
298 """call Conduit API |
266 |
299 |
268 to stdout as a JSON blob. |
301 to stdout as a JSON blob. |
269 """ |
302 """ |
270 # json.loads only accepts bytes from 3.6+ |
303 # json.loads only accepts bytes from 3.6+ |
271 rawparams = encoding.unifromlocal(ui.fin.read()) |
304 rawparams = encoding.unifromlocal(ui.fin.read()) |
272 # json.loads only returns unicode strings |
305 # json.loads only returns unicode strings |
273 params = pycompat.rapply(lambda x: |
306 params = pycompat.rapply( |
274 encoding.unitolocal(x) if isinstance(x, pycompat.unicode) else x, |
307 lambda x: encoding.unitolocal(x) |
275 json.loads(rawparams) |
308 if isinstance(x, pycompat.unicode) |
|
309 else x, |
|
310 json.loads(rawparams), |
276 ) |
311 ) |
277 # json.dumps only accepts unicode strings |
312 # json.dumps only accepts unicode strings |
278 result = pycompat.rapply(lambda x: |
313 result = pycompat.rapply( |
279 encoding.unifromlocal(x) if isinstance(x, bytes) else x, |
314 lambda x: encoding.unifromlocal(x) if isinstance(x, bytes) else x, |
280 callconduit(ui, name, params) |
315 callconduit(ui, name, params), |
281 ) |
316 ) |
282 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': ')) |
317 s = json.dumps(result, sort_keys=True, indent=2, separators=(u',', u': ')) |
283 ui.write(b'%s\n' % encoding.unitolocal(s)) |
318 ui.write(b'%s\n' % encoding.unitolocal(s)) |
|
319 |
284 |
320 |
285 def getrepophid(repo): |
321 def getrepophid(repo): |
286 """given callsign, return repository PHID or None""" |
322 """given callsign, return repository PHID or None""" |
287 # developer config: phabricator.repophid |
323 # developer config: phabricator.repophid |
288 repophid = repo.ui.config(b'phabricator', b'repophid') |
324 repophid = repo.ui.config(b'phabricator', b'repophid') |
289 if repophid: |
325 if repophid: |
290 return repophid |
326 return repophid |
291 callsign = repo.ui.config(b'phabricator', b'callsign') |
327 callsign = repo.ui.config(b'phabricator', b'callsign') |
292 if not callsign: |
328 if not callsign: |
293 return None |
329 return None |
294 query = callconduit(repo.ui, b'diffusion.repository.search', |
330 query = callconduit( |
295 {b'constraints': {b'callsigns': [callsign]}}) |
331 repo.ui, |
|
332 b'diffusion.repository.search', |
|
333 {b'constraints': {b'callsigns': [callsign]}}, |
|
334 ) |
296 if len(query[b'data']) == 0: |
335 if len(query[b'data']) == 0: |
297 return None |
336 return None |
298 repophid = query[b'data'][0][b'phid'] |
337 repophid = query[b'data'][0][b'phid'] |
299 repo.ui.setconfig(b'phabricator', b'repophid', repophid) |
338 repo.ui.setconfig(b'phabricator', b'repophid', repophid) |
300 return repophid |
339 return repophid |
301 |
340 |
|
341 |
302 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z') |
342 _differentialrevisiontagre = re.compile(br'\AD([1-9][0-9]*)\Z') |
303 _differentialrevisiondescre = re.compile( |
343 _differentialrevisiondescre = re.compile( |
304 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M) |
344 br'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M |
|
345 ) |
|
346 |
305 |
347 |
306 def getoldnodedrevmap(repo, nodelist): |
348 def getoldnodedrevmap(repo, nodelist): |
307 """find previous nodes that has been sent to Phabricator |
349 """find previous nodes that has been sent to Phabricator |
308 |
350 |
309 return {node: (oldnode, Differential diff, Differential Revision ID)} |
351 return {node: (oldnode, Differential diff, Differential Revision ID)} |
345 |
387 |
346 # Double check if tags are genuine by collecting all old nodes from |
388 # Double check if tags are genuine by collecting all old nodes from |
347 # Phabricator, and expect precursors overlap with it. |
389 # Phabricator, and expect precursors overlap with it. |
348 if toconfirm: |
390 if toconfirm: |
349 drevs = [drev for force, precs, drev in toconfirm.values()] |
391 drevs = [drev for force, precs, drev in toconfirm.values()] |
350 alldiffs = callconduit(unfi.ui, b'differential.querydiffs', |
392 alldiffs = callconduit( |
351 {b'revisionIDs': drevs}) |
393 unfi.ui, b'differential.querydiffs', {b'revisionIDs': drevs} |
352 getnode = lambda d: bin( |
394 ) |
353 getdiffmeta(d).get(b'node', b'')) or None |
395 getnode = lambda d: bin(getdiffmeta(d).get(b'node', b'')) or None |
354 for newnode, (force, precset, drev) in toconfirm.items(): |
396 for newnode, (force, precset, drev) in toconfirm.items(): |
355 diffs = [d for d in alldiffs.values() |
397 diffs = [ |
356 if int(d[b'revisionID']) == drev] |
398 d for d in alldiffs.values() if int(d[b'revisionID']) == drev |
|
399 ] |
357 |
400 |
358 # "precursors" as known by Phabricator |
401 # "precursors" as known by Phabricator |
359 phprecset = set(getnode(d) for d in diffs) |
402 phprecset = set(getnode(d) for d in diffs) |
360 |
403 |
361 # Ignore if precursors (Phabricator and local repo) do not overlap, |
404 # Ignore if precursors (Phabricator and local repo) do not overlap, |
362 # and force is not set (when commit message says nothing) |
405 # and force is not set (when commit message says nothing) |
363 if not force and not bool(phprecset & precset): |
406 if not force and not bool(phprecset & precset): |
364 tagname = b'D%d' % drev |
407 tagname = b'D%d' % drev |
365 tags.tag(repo, tagname, nullid, message=None, user=None, |
408 tags.tag( |
366 date=None, local=True) |
409 repo, |
367 unfi.ui.warn(_(b'D%s: local tag removed - does not match ' |
410 tagname, |
368 b'Differential history\n') % drev) |
411 nullid, |
|
412 message=None, |
|
413 user=None, |
|
414 date=None, |
|
415 local=True, |
|
416 ) |
|
417 unfi.ui.warn( |
|
418 _( |
|
419 b'D%s: local tag removed - does not match ' |
|
420 b'Differential history\n' |
|
421 ) |
|
422 % drev |
|
423 ) |
369 continue |
424 continue |
370 |
425 |
371 # Find the last node using Phabricator metadata, and make sure it |
426 # Find the last node using Phabricator metadata, and make sure it |
372 # exists in the repo |
427 # exists in the repo |
373 oldnode = lastdiff = None |
428 oldnode = lastdiff = None |
400 diff = callconduit(repo.ui, b'differential.createrawdiff', params) |
458 diff = callconduit(repo.ui, b'differential.createrawdiff', params) |
401 if not diff: |
459 if not diff: |
402 raise error.Abort(_(b'cannot create diff for %s') % ctx) |
460 raise error.Abort(_(b'cannot create diff for %s') % ctx) |
403 return diff |
461 return diff |
404 |
462 |
|
463 |
405 def writediffproperties(ctx, diff): |
464 def writediffproperties(ctx, diff): |
406 """write metadata to diff so patches could be applied losslessly""" |
465 """write metadata to diff so patches could be applied losslessly""" |
407 params = { |
466 params = { |
408 b'diff_id': diff[b'id'], |
467 b'diff_id': diff[b'id'], |
409 b'name': b'hg:meta', |
468 b'name': b'hg:meta', |
410 b'data': templatefilters.json({ |
469 b'data': templatefilters.json( |
411 b'user': ctx.user(), |
470 { |
412 b'date': b'%d %d' % ctx.date(), |
471 b'user': ctx.user(), |
413 b'branch': ctx.branch(), |
472 b'date': b'%d %d' % ctx.date(), |
414 b'node': ctx.hex(), |
473 b'branch': ctx.branch(), |
415 b'parent': ctx.p1().hex(), |
474 b'node': ctx.hex(), |
416 }), |
475 b'parent': ctx.p1().hex(), |
|
476 } |
|
477 ), |
417 } |
478 } |
418 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params) |
479 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params) |
419 |
480 |
420 params = { |
481 params = { |
421 b'diff_id': diff[b'id'], |
482 b'diff_id': diff[b'id'], |
422 b'name': b'local:commits', |
483 b'name': b'local:commits', |
423 b'data': templatefilters.json({ |
484 b'data': templatefilters.json( |
424 ctx.hex(): { |
485 { |
425 b'author': stringutil.person(ctx.user()), |
486 ctx.hex(): { |
426 b'authorEmail': stringutil.email(ctx.user()), |
487 b'author': stringutil.person(ctx.user()), |
427 b'time': int(ctx.date()[0]), |
488 b'authorEmail': stringutil.email(ctx.user()), |
428 b'commit': ctx.hex(), |
489 b'time': int(ctx.date()[0]), |
429 b'parents': [ctx.p1().hex()], |
490 b'commit': ctx.hex(), |
430 b'branch': ctx.branch(), |
491 b'parents': [ctx.p1().hex()], |
431 }, |
492 b'branch': ctx.branch(), |
432 }), |
493 }, |
|
494 } |
|
495 ), |
433 } |
496 } |
434 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params) |
497 callconduit(ctx.repo().ui, b'differential.setdiffproperty', params) |
435 |
498 |
436 def createdifferentialrevision(ctx, revid=None, parentrevphid=None, |
499 |
437 oldnode=None, olddiff=None, actions=None, |
500 def createdifferentialrevision( |
438 comment=None): |
501 ctx, |
|
502 revid=None, |
|
503 parentrevphid=None, |
|
504 oldnode=None, |
|
505 olddiff=None, |
|
506 actions=None, |
|
507 comment=None, |
|
508 ): |
439 """create or update a Differential Revision |
509 """create or update a Differential Revision |
440 |
510 |
441 If revid is None, create a new Differential Revision, otherwise update |
511 If revid is None, create a new Differential Revision, otherwise update |
442 revid. If parentrevphid is not None, set it as a dependency. |
512 revid. If parentrevphid is not None, set it as a dependency. |
443 |
513 |
468 diff = olddiff |
538 diff = olddiff |
469 writediffproperties(ctx, diff) |
539 writediffproperties(ctx, diff) |
470 |
540 |
471 # Set the parent Revision every time, so commit re-ordering is picked-up |
541 # Set the parent Revision every time, so commit re-ordering is picked-up |
472 if parentrevphid: |
542 if parentrevphid: |
473 transactions.append({b'type': b'parents.set', |
543 transactions.append( |
474 b'value': [parentrevphid]}) |
544 {b'type': b'parents.set', b'value': [parentrevphid]} |
|
545 ) |
475 |
546 |
476 if actions: |
547 if actions: |
477 transactions += actions |
548 transactions += actions |
478 |
549 |
479 # Parse commit message and update related fields. |
550 # Parse commit message and update related fields. |
480 desc = ctx.description() |
551 desc = ctx.description() |
481 info = callconduit(repo.ui, b'differential.parsecommitmessage', |
552 info = callconduit( |
482 {b'corpus': desc}) |
553 repo.ui, b'differential.parsecommitmessage', {b'corpus': desc} |
|
554 ) |
483 for k, v in info[b'fields'].items(): |
555 for k, v in info[b'fields'].items(): |
484 if k in [b'title', b'summary', b'testPlan']: |
556 if k in [b'title', b'summary', b'testPlan']: |
485 transactions.append({b'type': k, b'value': v}) |
557 transactions.append({b'type': k, b'value': v}) |
486 |
558 |
487 params = {b'transactions': transactions} |
559 params = {b'transactions': transactions} |
504 # some names here. |
577 # some names here. |
505 data = result[b'data'] |
578 data = result[b'data'] |
506 resolved = set(entry[b'fields'][b'username'].lower() for entry in data) |
579 resolved = set(entry[b'fields'][b'username'].lower() for entry in data) |
507 unresolved = set(names) - resolved |
580 unresolved = set(names) - resolved |
508 if unresolved: |
581 if unresolved: |
509 raise error.Abort(_(b'unknown username: %s') |
582 raise error.Abort( |
510 % b' '.join(sorted(unresolved))) |
583 _(b'unknown username: %s') % b' '.join(sorted(unresolved)) |
|
584 ) |
511 return [entry[b'phid'] for entry in data] |
585 return [entry[b'phid'] for entry in data] |
512 |
586 |
513 @vcrcommand(b'phabsend', |
587 |
514 [(b'r', b'rev', [], _(b'revisions to send'), _(b'REV')), |
588 @vcrcommand( |
515 (b'', b'amend', True, _(b'update commit messages')), |
589 b'phabsend', |
516 (b'', b'reviewer', [], _(b'specify reviewers')), |
590 [ |
517 (b'', b'blocker', [], _(b'specify blocking reviewers')), |
591 (b'r', b'rev', [], _(b'revisions to send'), _(b'REV')), |
518 (b'm', b'comment', b'', |
592 (b'', b'amend', True, _(b'update commit messages')), |
519 _(b'add a comment to Revisions with new/updated Diffs')), |
593 (b'', b'reviewer', [], _(b'specify reviewers')), |
520 (b'', b'confirm', None, _(b'ask for confirmation before sending'))], |
594 (b'', b'blocker', [], _(b'specify blocking reviewers')), |
521 _(b'REV [OPTIONS]'), |
595 ( |
522 helpcategory=command.CATEGORY_IMPORT_EXPORT) |
596 b'm', |
|
597 b'comment', |
|
598 b'', |
|
599 _(b'add a comment to Revisions with new/updated Diffs'), |
|
600 ), |
|
601 (b'', b'confirm', None, _(b'ask for confirmation before sending')), |
|
602 ], |
|
603 _(b'REV [OPTIONS]'), |
|
604 helpcategory=command.CATEGORY_IMPORT_EXPORT, |
|
605 ) |
523 def phabsend(ui, repo, *revs, **opts): |
606 def phabsend(ui, repo, *revs, **opts): |
524 """upload changesets to Phabricator |
607 """upload changesets to Phabricator |
525 |
608 |
526 If there are multiple revisions specified, they will be send as a stack |
609 If there are multiple revisions specified, they will be send as a stack |
527 with a linear dependencies relationship using the order specified by the |
610 with a linear dependencies relationship using the order specified by the |
592 # Get Differential Revision ID |
675 # Get Differential Revision ID |
593 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None)) |
676 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None)) |
594 if oldnode != ctx.node() or opts.get(b'amend'): |
677 if oldnode != ctx.node() or opts.get(b'amend'): |
595 # Create or update Differential Revision |
678 # Create or update Differential Revision |
596 revision, diff = createdifferentialrevision( |
679 revision, diff = createdifferentialrevision( |
597 ctx, revid, lastrevphid, oldnode, olddiff, actions, |
680 ctx, |
598 opts.get(b'comment')) |
681 revid, |
|
682 lastrevphid, |
|
683 oldnode, |
|
684 olddiff, |
|
685 actions, |
|
686 opts.get(b'comment'), |
|
687 ) |
599 diffmap[ctx.node()] = diff |
688 diffmap[ctx.node()] = diff |
600 newrevid = int(revision[b'object'][b'id']) |
689 newrevid = int(revision[b'object'][b'id']) |
601 newrevphid = revision[b'object'][b'phid'] |
690 newrevphid = revision[b'object'][b'phid'] |
602 if revid: |
691 if revid: |
603 action = b'updated' |
692 action = b'updated' |
607 # Create a local tag to note the association, if commit message |
696 # Create a local tag to note the association, if commit message |
608 # does not have it already |
697 # does not have it already |
609 m = _differentialrevisiondescre.search(ctx.description()) |
698 m = _differentialrevisiondescre.search(ctx.description()) |
610 if not m or int(m.group(r'id')) != newrevid: |
699 if not m or int(m.group(r'id')) != newrevid: |
611 tagname = b'D%d' % newrevid |
700 tagname = b'D%d' % newrevid |
612 tags.tag(repo, tagname, ctx.node(), message=None, user=None, |
701 tags.tag( |
613 date=None, local=True) |
702 repo, |
|
703 tagname, |
|
704 ctx.node(), |
|
705 message=None, |
|
706 user=None, |
|
707 date=None, |
|
708 local=True, |
|
709 ) |
614 else: |
710 else: |
615 # Nothing changed. But still set "newrevphid" so the next revision |
711 # Nothing changed. But still set "newrevphid" so the next revision |
616 # could depend on this one and "newrevid" for the summary line. |
712 # could depend on this one and "newrevid" for the summary line. |
617 newrevphid = querydrev(repo, b'%d' % revid)[0][b'phid'] |
713 newrevphid = querydrev(repo, b'%d' % revid)[0][b'phid'] |
618 newrevid = revid |
714 newrevid = revid |
619 action = b'skipped' |
715 action = b'skipped' |
620 |
716 |
621 actiondesc = ui.label( |
717 actiondesc = ui.label( |
622 {b'created': _(b'created'), |
718 { |
623 b'skipped': _(b'skipped'), |
719 b'created': _(b'created'), |
624 b'updated': _(b'updated')}[action], |
720 b'skipped': _(b'skipped'), |
625 b'phabricator.action.%s' % action) |
721 b'updated': _(b'updated'), |
|
722 }[action], |
|
723 b'phabricator.action.%s' % action, |
|
724 ) |
626 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev') |
725 drevdesc = ui.label(b'D%d' % newrevid, b'phabricator.drev') |
627 nodedesc = ui.label(bytes(ctx), b'phabricator.node') |
726 nodedesc = ui.label(bytes(ctx), b'phabricator.node') |
628 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc') |
727 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc') |
629 ui.write(_(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc, |
728 ui.write( |
630 desc)) |
729 _(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc, desc) |
|
730 ) |
631 drevids.append(newrevid) |
731 drevids.append(newrevid) |
632 lastrevphid = newrevphid |
732 lastrevphid = newrevphid |
633 |
733 |
634 # Update commit messages and remove tags |
734 # Update commit messages and remove tags |
635 if opts.get(b'amend'): |
735 if opts.get(b'amend'): |
636 unfi = repo.unfiltered() |
736 unfi = repo.unfiltered() |
637 drevs = callconduit(ui, b'differential.query', {b'ids': drevids}) |
737 drevs = callconduit(ui, b'differential.query', {b'ids': drevids}) |
638 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'): |
738 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'): |
639 wnode = unfi[b'.'].node() |
739 wnode = unfi[b'.'].node() |
640 mapping = {} # {oldnode: [newnode]} |
740 mapping = {} # {oldnode: [newnode]} |
641 for i, rev in enumerate(revs): |
741 for i, rev in enumerate(revs): |
642 old = unfi[rev] |
742 old = unfi[rev] |
643 drevid = drevids[i] |
743 drevid = drevids[i] |
644 drev = [d for d in drevs if int(d[b'id']) == drevid][0] |
744 drev = [d for d in drevs if int(d[b'id']) == drevid][0] |
645 newdesc = getdescfromdrev(drev) |
745 newdesc = getdescfromdrev(drev) |
646 # Make sure commit message contain "Differential Revision" |
746 # Make sure commit message contain "Differential Revision" |
647 if old.description() != newdesc: |
747 if old.description() != newdesc: |
648 if old.phase() == phases.public: |
748 if old.phase() == phases.public: |
649 ui.warn(_("warning: not updating public commit %s\n") |
749 ui.warn( |
650 % scmutil.formatchangeid(old)) |
750 _("warning: not updating public commit %s\n") |
|
751 % scmutil.formatchangeid(old) |
|
752 ) |
651 continue |
753 continue |
652 parents = [ |
754 parents = [ |
653 mapping.get(old.p1().node(), (old.p1(),))[0], |
755 mapping.get(old.p1().node(), (old.p1(),))[0], |
654 mapping.get(old.p2().node(), (old.p2(),))[0], |
756 mapping.get(old.p2().node(), (old.p2(),))[0], |
655 ] |
757 ] |
656 new = context.metadataonlyctx( |
758 new = context.metadataonlyctx( |
657 repo, old, parents=parents, text=newdesc, |
759 repo, |
658 user=old.user(), date=old.date(), extra=old.extra()) |
760 old, |
|
761 parents=parents, |
|
762 text=newdesc, |
|
763 user=old.user(), |
|
764 date=old.date(), |
|
765 extra=old.extra(), |
|
766 ) |
659 |
767 |
660 newnode = new.commit() |
768 newnode = new.commit() |
661 |
769 |
662 mapping[old.node()] = [newnode] |
770 mapping[old.node()] = [newnode] |
663 # Update diff property |
771 # Update diff property |
668 except util.urlerr.urlerror: |
776 except util.urlerr.urlerror: |
669 ui.warn(b'Failed to update metadata for D%s\n' % drevid) |
777 ui.warn(b'Failed to update metadata for D%s\n' % drevid) |
670 # Remove local tags since it's no longer necessary |
778 # Remove local tags since it's no longer necessary |
671 tagname = b'D%d' % drevid |
779 tagname = b'D%d' % drevid |
672 if tagname in repo.tags(): |
780 if tagname in repo.tags(): |
673 tags.tag(repo, tagname, nullid, message=None, user=None, |
781 tags.tag( |
674 date=None, local=True) |
782 repo, |
|
783 tagname, |
|
784 nullid, |
|
785 message=None, |
|
786 user=None, |
|
787 date=None, |
|
788 local=True, |
|
789 ) |
675 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True) |
790 scmutil.cleanupnodes(repo, mapping, b'phabsend', fixphase=True) |
676 if wnode in mapping: |
791 if wnode in mapping: |
677 unfi.setparents(mapping[wnode][0]) |
792 unfi.setparents(mapping[wnode][0]) |
678 |
793 |
|
794 |
679 # Map from "hg:meta" keys to header understood by "hg import". The order is |
795 # Map from "hg:meta" keys to header understood by "hg import". The order is |
680 # consistent with "hg export" output. |
796 # consistent with "hg export" output. |
681 _metanamemap = util.sortdict([(b'user', b'User'), (b'date', b'Date'), |
797 _metanamemap = util.sortdict( |
682 (b'branch', b'Branch'), (b'node', b'Node ID'), |
798 [ |
683 (b'parent', b'Parent ')]) |
799 (b'user', b'User'), |
|
800 (b'date', b'Date'), |
|
801 (b'branch', b'Branch'), |
|
802 (b'node', b'Node ID'), |
|
803 (b'parent', b'Parent '), |
|
804 ] |
|
805 ) |
|
806 |
684 |
807 |
685 def _confirmbeforesend(repo, revs, oldmap): |
808 def _confirmbeforesend(repo, revs, oldmap): |
686 url, token = readurltoken(repo.ui) |
809 url, token = readurltoken(repo.ui) |
687 ui = repo.ui |
810 ui = repo.ui |
688 for rev in revs: |
811 for rev in revs: |
692 if drevid: |
815 if drevid: |
693 drevdesc = ui.label(b'D%s' % drevid, b'phabricator.drev') |
816 drevdesc = ui.label(b'D%s' % drevid, b'phabricator.drev') |
694 else: |
817 else: |
695 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev') |
818 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev') |
696 |
819 |
697 ui.write(_(b'%s - %s: %s\n') |
820 ui.write( |
698 % (drevdesc, |
821 _(b'%s - %s: %s\n') |
699 ui.label(bytes(ctx), b'phabricator.node'), |
822 % ( |
700 ui.label(desc, b'phabricator.desc'))) |
823 drevdesc, |
701 |
824 ui.label(bytes(ctx), b'phabricator.node'), |
702 if ui.promptchoice(_(b'Send the above changes to %s (yn)?' |
825 ui.label(desc, b'phabricator.desc'), |
703 b'$$ &Yes $$ &No') % url): |
826 ) |
|
827 ) |
|
828 |
|
829 if ui.promptchoice( |
|
830 _(b'Send the above changes to %s (yn)?' b'$$ &Yes $$ &No') % url |
|
831 ): |
704 return False |
832 return False |
705 |
833 |
706 return True |
834 return True |
707 |
835 |
708 _knownstatusnames = {b'accepted', b'needsreview', b'needsrevision', b'closed', |
836 |
709 b'abandoned'} |
837 _knownstatusnames = { |
|
838 b'accepted', |
|
839 b'needsreview', |
|
840 b'needsrevision', |
|
841 b'closed', |
|
842 b'abandoned', |
|
843 } |
|
844 |
710 |
845 |
711 def _getstatusname(drev): |
846 def _getstatusname(drev): |
712 """get normalized status name from a Differential Revision""" |
847 """get normalized status name from a Differential Revision""" |
713 return drev[b'statusName'].replace(b' ', b'').lower() |
848 return drev[b'statusName'].replace(b' ', b'').lower() |
714 |
849 |
|
850 |
715 # Small language to specify differential revisions. Support symbols: (), :X, |
851 # Small language to specify differential revisions. Support symbols: (), :X, |
716 # +, and -. |
852 # +, and -. |
717 |
853 |
718 _elements = { |
854 _elements = { |
719 # token-type: binding-strength, primary, prefix, infix, suffix |
855 # token-type: binding-strength, primary, prefix, infix, suffix |
720 b'(': (12, None, (b'group', 1, b')'), None, None), |
856 b'(': (12, None, (b'group', 1, b')'), None, None), |
721 b':': (8, None, (b'ancestors', 8), None, None), |
857 b':': (8, None, (b'ancestors', 8), None, None), |
722 b'&': (5, None, None, (b'and_', 5), None), |
858 b'&': (5, None, None, (b'and_', 5), None), |
723 b'+': (4, None, None, (b'add', 4), None), |
859 b'+': (4, None, None, (b'add', 4), None), |
724 b'-': (4, None, None, (b'sub', 4), None), |
860 b'-': (4, None, None, (b'sub', 4), None), |
725 b')': (0, None, None, None, None), |
861 b')': (0, None, None, None, None), |
726 b'symbol': (0, b'symbol', None, None, None), |
862 b'symbol': (0, b'symbol', None, None, None), |
727 b'end': (0, None, None, None, None), |
863 b'end': (0, None, None, None, None), |
728 } |
864 } |
729 |
865 |
|
866 |
730 def _tokenize(text): |
867 def _tokenize(text): |
731 view = memoryview(text) # zero-copy slice |
868 view = memoryview(text) # zero-copy slice |
732 special = b'():+-& ' |
869 special = b'():+-& ' |
733 pos = 0 |
870 pos = 0 |
734 length = len(text) |
871 length = len(text) |
735 while pos < length: |
872 while pos < length: |
736 symbol = b''.join(itertools.takewhile(lambda ch: ch not in special, |
873 symbol = b''.join( |
737 pycompat.iterbytestr(view[pos:]))) |
874 itertools.takewhile( |
|
875 lambda ch: ch not in special, pycompat.iterbytestr(view[pos:]) |
|
876 ) |
|
877 ) |
738 if symbol: |
878 if symbol: |
739 yield (b'symbol', symbol, pos) |
879 yield (b'symbol', symbol, pos) |
740 pos += len(symbol) |
880 pos += len(symbol) |
741 else: # special char, ignore space |
881 else: # special char, ignore space |
742 if text[pos] != b' ': |
882 if text[pos] != b' ': |
743 yield (text[pos], None, pos) |
883 yield (text[pos], None, pos) |
744 pos += 1 |
884 pos += 1 |
745 yield (b'end', None, pos) |
885 yield (b'end', None, pos) |
|
886 |
746 |
887 |
747 def _parse(text): |
888 def _parse(text): |
748 tree, pos = parser.parser(_elements).parse(_tokenize(text)) |
889 tree, pos = parser.parser(_elements).parse(_tokenize(text)) |
749 if pos != len(text): |
890 if pos != len(text): |
750 raise error.ParseError(b'invalid token', pos) |
891 raise error.ParseError(b'invalid token', pos) |
751 return tree |
892 return tree |
|
893 |
752 |
894 |
753 def _parsedrev(symbol): |
895 def _parsedrev(symbol): |
754 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None""" |
896 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None""" |
755 if symbol.startswith(b'D') and symbol[1:].isdigit(): |
897 if symbol.startswith(b'D') and symbol[1:].isdigit(): |
756 return int(symbol[1:]) |
898 return int(symbol[1:]) |
757 if symbol.isdigit(): |
899 if symbol.isdigit(): |
758 return int(symbol) |
900 return int(symbol) |
|
901 |
759 |
902 |
760 def _prefetchdrevs(tree): |
903 def _prefetchdrevs(tree): |
761 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch""" |
904 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch""" |
762 drevs = set() |
905 drevs = set() |
763 ancestordrevs = set() |
906 ancestordrevs = set() |
1004 header += b'# %s %s\n' % (_metanamemap[k], meta[k]) |
1159 header += b'# %s %s\n' % (_metanamemap[k], meta[k]) |
1005 |
1160 |
1006 content = b'%s%s\n%s' % (header, desc, body) |
1161 content = b'%s%s\n%s' % (header, desc, body) |
1007 write(content) |
1162 write(content) |
1008 |
1163 |
1009 @vcrcommand(b'phabread', |
1164 |
1010 [(b'', b'stack', False, _(b'read dependencies'))], |
1165 @vcrcommand( |
1011 _(b'DREVSPEC [OPTIONS]'), |
1166 b'phabread', |
1012 helpcategory=command.CATEGORY_IMPORT_EXPORT) |
1167 [(b'', b'stack', False, _(b'read dependencies'))], |
|
1168 _(b'DREVSPEC [OPTIONS]'), |
|
1169 helpcategory=command.CATEGORY_IMPORT_EXPORT, |
|
1170 ) |
1013 def phabread(ui, repo, spec, **opts): |
1171 def phabread(ui, repo, spec, **opts): |
1014 """print patches from Phabricator suitable for importing |
1172 """print patches from Phabricator suitable for importing |
1015 |
1173 |
1016 DREVSPEC could be a Differential Revision identity, like ``D123``, or just |
1174 DREVSPEC could be a Differential Revision identity, like ``D123``, or just |
1017 the number ``123``. It could also have common operators like ``+``, ``-``, |
1175 the number ``123``. It could also have common operators like ``+``, ``-``, |
1033 if opts.get(b'stack'): |
1191 if opts.get(b'stack'): |
1034 spec = b':(%s)' % spec |
1192 spec = b':(%s)' % spec |
1035 drevs = querydrev(repo, spec) |
1193 drevs = querydrev(repo, spec) |
1036 readpatch(repo, drevs, ui.write) |
1194 readpatch(repo, drevs, ui.write) |
1037 |
1195 |
1038 @vcrcommand(b'phabupdate', |
1196 |
1039 [(b'', b'accept', False, _(b'accept revisions')), |
1197 @vcrcommand( |
1040 (b'', b'reject', False, _(b'reject revisions')), |
1198 b'phabupdate', |
1041 (b'', b'abandon', False, _(b'abandon revisions')), |
1199 [ |
1042 (b'', b'reclaim', False, _(b'reclaim revisions')), |
1200 (b'', b'accept', False, _(b'accept revisions')), |
1043 (b'm', b'comment', b'', _(b'comment on the last revision')), |
1201 (b'', b'reject', False, _(b'reject revisions')), |
1044 ], _(b'DREVSPEC [OPTIONS]'), |
1202 (b'', b'abandon', False, _(b'abandon revisions')), |
1045 helpcategory=command.CATEGORY_IMPORT_EXPORT) |
1203 (b'', b'reclaim', False, _(b'reclaim revisions')), |
|
1204 (b'm', b'comment', b'', _(b'comment on the last revision')), |
|
1205 ], |
|
1206 _(b'DREVSPEC [OPTIONS]'), |
|
1207 helpcategory=command.CATEGORY_IMPORT_EXPORT, |
|
1208 ) |
1046 def phabupdate(ui, repo, spec, **opts): |
1209 def phabupdate(ui, repo, spec, **opts): |
1047 """update Differential Revision in batch |
1210 """update Differential Revision in batch |
1048 |
1211 |
1049 DREVSPEC selects revisions. See :hg:`help phabread` for its usage. |
1212 DREVSPEC selects revisions. See :hg:`help phabread` for its usage. |
1050 """ |
1213 """ |
1060 drevs = querydrev(repo, spec) |
1223 drevs = querydrev(repo, spec) |
1061 for i, drev in enumerate(drevs): |
1224 for i, drev in enumerate(drevs): |
1062 if i + 1 == len(drevs) and opts.get(b'comment'): |
1225 if i + 1 == len(drevs) and opts.get(b'comment'): |
1063 actions.append({b'type': b'comment', b'value': opts[b'comment']}) |
1226 actions.append({b'type': b'comment', b'value': opts[b'comment']}) |
1064 if actions: |
1227 if actions: |
1065 params = {b'objectIdentifier': drev[b'phid'], |
1228 params = { |
1066 b'transactions': actions} |
1229 b'objectIdentifier': drev[b'phid'], |
|
1230 b'transactions': actions, |
|
1231 } |
1067 callconduit(ui, b'differential.revision.edit', params) |
1232 callconduit(ui, b'differential.revision.edit', params) |
|
1233 |
1068 |
1234 |
1069 @eh.templatekeyword(b'phabreview', requires={b'ctx'}) |
1235 @eh.templatekeyword(b'phabreview', requires={b'ctx'}) |
1070 def template_review(context, mapping): |
1236 def template_review(context, mapping): |
1071 """:phabreview: Object describing the review for this changeset. |
1237 """:phabreview: Object describing the review for this changeset. |
1072 Has attributes `url` and `id`. |
1238 Has attributes `url` and `id`. |
1073 """ |
1239 """ |
1074 ctx = context.resource(mapping, b'ctx') |
1240 ctx = context.resource(mapping, b'ctx') |
1075 m = _differentialrevisiondescre.search(ctx.description()) |
1241 m = _differentialrevisiondescre.search(ctx.description()) |
1076 if m: |
1242 if m: |
1077 return templateutil.hybriddict({ |
1243 return templateutil.hybriddict( |
1078 b'url': m.group(r'url'), |
1244 {b'url': m.group(r'url'), b'id': b"D%s" % m.group(r'id'),} |
1079 b'id': b"D%s" % m.group(r'id'), |
1245 ) |
1080 }) |
|
1081 else: |
1246 else: |
1082 tags = ctx.repo().nodetags(ctx.node()) |
1247 tags = ctx.repo().nodetags(ctx.node()) |
1083 for t in tags: |
1248 for t in tags: |
1084 if _differentialrevisiontagre.match(t): |
1249 if _differentialrevisiontagre.match(t): |
1085 url = ctx.repo().ui.config(b'phabricator', b'url') |
1250 url = ctx.repo().ui.config(b'phabricator', b'url') |
1086 if not url.endswith(b'/'): |
1251 if not url.endswith(b'/'): |
1087 url += b'/' |
1252 url += b'/' |
1088 url += t |
1253 url += t |
1089 |
1254 |
1090 return templateutil.hybriddict({ |
1255 return templateutil.hybriddict({b'url': url, b'id': t,}) |
1091 b'url': url, |
|
1092 b'id': t, |
|
1093 }) |
|
1094 return None |
1256 return None |