75 |
75 |
76 configtable = {} |
76 configtable = {} |
77 configitem = registrar.configitem(configtable) |
77 configitem = registrar.configitem(configtable) |
78 |
78 |
79 # developer config: phabricator.batchsize |
79 # developer config: phabricator.batchsize |
80 configitem('phabricator', 'batchsize', |
80 configitem(b'phabricator', b'batchsize', |
81 default=12, |
81 default=12, |
82 ) |
82 ) |
83 configitem('phabricator', 'callsign', |
83 configitem(b'phabricator', b'callsign', |
84 default=None, |
84 default=None, |
85 ) |
85 ) |
86 configitem('phabricator', 'curlcmd', |
86 configitem(b'phabricator', b'curlcmd', |
87 default=None, |
87 default=None, |
88 ) |
88 ) |
89 # developer config: phabricator.repophid |
89 # developer config: phabricator.repophid |
90 configitem('phabricator', 'repophid', |
90 configitem(b'phabricator', b'repophid', |
91 default=None, |
91 default=None, |
92 ) |
92 ) |
93 configitem('phabricator', 'url', |
93 configitem(b'phabricator', b'url', |
94 default=None, |
94 default=None, |
95 ) |
95 ) |
96 configitem('phabsend', 'confirm', |
96 configitem(b'phabsend', b'confirm', |
97 default=False, |
97 default=False, |
98 ) |
98 ) |
99 |
99 |
100 colortable = { |
100 colortable = { |
101 'phabricator.action.created': 'green', |
101 b'phabricator.action.created': b'green', |
102 'phabricator.action.skipped': 'magenta', |
102 b'phabricator.action.skipped': b'magenta', |
103 'phabricator.action.updated': 'magenta', |
103 b'phabricator.action.updated': b'magenta', |
104 'phabricator.desc': '', |
104 b'phabricator.desc': b'', |
105 'phabricator.drev': 'bold', |
105 b'phabricator.drev': b'bold', |
106 'phabricator.node': '', |
106 b'phabricator.node': b'', |
107 } |
107 } |
108 |
108 |
109 def urlencodenested(params): |
109 def urlencodenested(params): |
110 """like urlencode, but works with nested parameters. |
110 """like urlencode, but works with nested parameters. |
111 |
111 |
119 if items is None: |
119 if items is None: |
120 flatparams[prefix] = obj |
120 flatparams[prefix] = obj |
121 else: |
121 else: |
122 for k, v in items(obj): |
122 for k, v in items(obj): |
123 if prefix: |
123 if prefix: |
124 process('%s[%s]' % (prefix, k), v) |
124 process(b'%s[%s]' % (prefix, k), v) |
125 else: |
125 else: |
126 process(k, v) |
126 process(k, v) |
127 process('', params) |
127 process(b'', params) |
128 return util.urlreq.urlencode(flatparams) |
128 return util.urlreq.urlencode(flatparams) |
129 |
129 |
130 printed_token_warning = False |
130 printed_token_warning = False |
131 |
131 |
132 def readlegacytoken(repo, url): |
132 def readlegacytoken(repo, url): |
133 """Transitional support for old phabricator tokens. |
133 """Transitional support for old phabricator tokens. |
134 |
134 |
135 Remove before the 4.7 release. |
135 Remove before the 4.7 release. |
136 """ |
136 """ |
137 groups = {} |
137 groups = {} |
138 for key, val in repo.ui.configitems('phabricator.auth'): |
138 for key, val in repo.ui.configitems(b'phabricator.auth'): |
139 if '.' not in key: |
139 if b'.' not in key: |
140 repo.ui.warn(_("ignoring invalid [phabricator.auth] key '%s'\n") |
140 repo.ui.warn(_(b"ignoring invalid [phabricator.auth] key '%s'\n") |
141 % key) |
141 % key) |
142 continue |
142 continue |
143 group, setting = key.rsplit('.', 1) |
143 group, setting = key.rsplit(b'.', 1) |
144 groups.setdefault(group, {})[setting] = val |
144 groups.setdefault(group, {})[setting] = val |
145 |
145 |
146 token = None |
146 token = None |
147 for group, auth in groups.iteritems(): |
147 for group, auth in groups.iteritems(): |
148 if url != auth.get('url'): |
148 if url != auth.get(b'url'): |
149 continue |
149 continue |
150 token = auth.get('token') |
150 token = auth.get(b'token') |
151 if token: |
151 if token: |
152 break |
152 break |
153 |
153 |
154 global printed_token_warning |
154 global printed_token_warning |
155 |
155 |
156 if token and not printed_token_warning: |
156 if token and not printed_token_warning: |
157 printed_token_warning = True |
157 printed_token_warning = True |
158 repo.ui.warn(_('phabricator.auth.token is deprecated - please ' |
158 repo.ui.warn(_(b'phabricator.auth.token is deprecated - please ' |
159 'migrate to auth.phabtoken.\n')) |
159 b'migrate to auth.phabtoken.\n')) |
160 return token |
160 return token |
161 |
161 |
162 def readurltoken(repo): |
162 def readurltoken(repo): |
163 """return conduit url, token and make sure they exist |
163 """return conduit url, token and make sure they exist |
164 |
164 |
165 Currently read from [auth] config section. In the future, it might |
165 Currently read from [auth] config section. In the future, it might |
166 make sense to read from .arcconfig and .arcrc as well. |
166 make sense to read from .arcconfig and .arcrc as well. |
167 """ |
167 """ |
168 url = repo.ui.config('phabricator', 'url') |
168 url = repo.ui.config(b'phabricator', b'url') |
169 if not url: |
169 if not url: |
170 raise error.Abort(_('config %s.%s is required') |
170 raise error.Abort(_(b'config %s.%s is required') |
171 % ('phabricator', 'url')) |
171 % (b'phabricator', b'url')) |
172 |
172 |
173 res = httpconnectionmod.readauthforuri(repo.ui, url, util.url(url).user) |
173 res = httpconnectionmod.readauthforuri(repo.ui, url, util.url(url).user) |
174 token = None |
174 token = None |
175 |
175 |
176 if res: |
176 if res: |
177 group, auth = res |
177 group, auth = res |
178 |
178 |
179 repo.ui.debug("using auth.%s.* for authentication\n" % group) |
179 repo.ui.debug(b"using auth.%s.* for authentication\n" % group) |
180 |
180 |
181 token = auth.get('phabtoken') |
181 token = auth.get(b'phabtoken') |
182 |
182 |
183 if not token: |
183 if not token: |
184 token = readlegacytoken(repo, url) |
184 token = readlegacytoken(repo, url) |
185 if not token: |
185 if not token: |
186 raise error.Abort(_('Can\'t find conduit token associated to %s') |
186 raise error.Abort(_(b'Can\'t find conduit token associated to %s') |
187 % (url,)) |
187 % (url,)) |
188 |
188 |
189 return url, token |
189 return url, token |
190 |
190 |
191 def callconduit(repo, name, params): |
191 def callconduit(repo, name, params): |
192 """call Conduit API, params is a dict. return json.loads result, or None""" |
192 """call Conduit API, params is a dict. return json.loads result, or None""" |
193 host, token = readurltoken(repo) |
193 host, token = readurltoken(repo) |
194 url, authinfo = util.url('/'.join([host, 'api', name])).authinfo() |
194 url, authinfo = util.url(b'/'.join([host, b'api', name])).authinfo() |
195 repo.ui.debug('Conduit Call: %s %s\n' % (url, params)) |
195 repo.ui.debug(b'Conduit Call: %s %s\n' % (url, params)) |
196 params = params.copy() |
196 params = params.copy() |
197 params['api.token'] = token |
197 params[b'api.token'] = token |
198 data = urlencodenested(params) |
198 data = urlencodenested(params) |
199 curlcmd = repo.ui.config('phabricator', 'curlcmd') |
199 curlcmd = repo.ui.config(b'phabricator', b'curlcmd') |
200 if curlcmd: |
200 if curlcmd: |
201 sin, sout = procutil.popen2('%s -d @- %s' |
201 sin, sout = procutil.popen2(b'%s -d @- %s' |
202 % (curlcmd, procutil.shellquote(url))) |
202 % (curlcmd, procutil.shellquote(url))) |
203 sin.write(data) |
203 sin.write(data) |
204 sin.close() |
204 sin.close() |
205 body = sout.read() |
205 body = sout.read() |
206 else: |
206 else: |
207 urlopener = urlmod.opener(repo.ui, authinfo) |
207 urlopener = urlmod.opener(repo.ui, authinfo) |
208 request = util.urlreq.request(url, data=data) |
208 request = util.urlreq.request(url, data=data) |
209 body = urlopener.open(request).read() |
209 body = urlopener.open(request).read() |
210 repo.ui.debug('Conduit Response: %s\n' % body) |
210 repo.ui.debug(b'Conduit Response: %s\n' % body) |
211 parsed = json.loads(body) |
211 parsed = json.loads(body) |
212 if parsed.get(r'error_code'): |
212 if parsed.get(r'error_code'): |
213 msg = (_('Conduit Error (%s): %s') |
213 msg = (_(b'Conduit Error (%s): %s') |
214 % (parsed[r'error_code'], parsed[r'error_info'])) |
214 % (parsed[r'error_code'], parsed[r'error_info'])) |
215 raise error.Abort(msg) |
215 raise error.Abort(msg) |
216 return parsed[r'result'] |
216 return parsed[r'result'] |
217 |
217 |
218 @command('debugcallconduit', [], _('METHOD')) |
218 @command(b'debugcallconduit', [], _(b'METHOD')) |
219 def debugcallconduit(ui, repo, name): |
219 def debugcallconduit(ui, repo, name): |
220 """call Conduit API |
220 """call Conduit API |
221 |
221 |
222 Call parameters are read from stdin as a JSON blob. Result will be written |
222 Call parameters are read from stdin as a JSON blob. Result will be written |
223 to stdout as a JSON blob. |
223 to stdout as a JSON blob. |
224 """ |
224 """ |
225 params = json.loads(ui.fin.read()) |
225 params = json.loads(ui.fin.read()) |
226 result = callconduit(repo, name, params) |
226 result = callconduit(repo, name, params) |
227 s = json.dumps(result, sort_keys=True, indent=2, separators=(',', ': ')) |
227 s = json.dumps(result, sort_keys=True, indent=2, separators=(b',', b': ')) |
228 ui.write('%s\n' % s) |
228 ui.write(b'%s\n' % s) |
229 |
229 |
230 def getrepophid(repo): |
230 def getrepophid(repo): |
231 """given callsign, return repository PHID or None""" |
231 """given callsign, return repository PHID or None""" |
232 # developer config: phabricator.repophid |
232 # developer config: phabricator.repophid |
233 repophid = repo.ui.config('phabricator', 'repophid') |
233 repophid = repo.ui.config(b'phabricator', b'repophid') |
234 if repophid: |
234 if repophid: |
235 return repophid |
235 return repophid |
236 callsign = repo.ui.config('phabricator', 'callsign') |
236 callsign = repo.ui.config(b'phabricator', b'callsign') |
237 if not callsign: |
237 if not callsign: |
238 return None |
238 return None |
239 query = callconduit(repo, 'diffusion.repository.search', |
239 query = callconduit(repo, b'diffusion.repository.search', |
240 {'constraints': {'callsigns': [callsign]}}) |
240 {b'constraints': {b'callsigns': [callsign]}}) |
241 if len(query[r'data']) == 0: |
241 if len(query[r'data']) == 0: |
242 return None |
242 return None |
243 repophid = encoding.strtolocal(query[r'data'][0][r'phid']) |
243 repophid = encoding.strtolocal(query[r'data'][0][r'phid']) |
244 repo.ui.setconfig('phabricator', 'repophid', repophid) |
244 repo.ui.setconfig(b'phabricator', b'repophid', repophid) |
245 return repophid |
245 return repophid |
246 |
246 |
247 _differentialrevisiontagre = re.compile('\AD([1-9][0-9]*)\Z') |
247 _differentialrevisiontagre = re.compile(b'\AD([1-9][0-9]*)\Z') |
248 _differentialrevisiondescre = re.compile( |
248 _differentialrevisiondescre = re.compile( |
249 '^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M) |
249 b'^Differential Revision:\s*(?P<url>(?:.*)D(?P<id>[1-9][0-9]*))$', re.M) |
250 |
250 |
251 def getoldnodedrevmap(repo, nodelist): |
251 def getoldnodedrevmap(repo, nodelist): |
252 """find previous nodes that has been sent to Phabricator |
252 """find previous nodes that has been sent to Phabricator |
253 |
253 |
254 return {node: (oldnode, Differential diff, Differential Revision ID)} |
254 return {node: (oldnode, Differential diff, Differential Revision ID)} |
285 continue |
285 continue |
286 |
286 |
287 # Check commit message |
287 # Check commit message |
288 m = _differentialrevisiondescre.search(ctx.description()) |
288 m = _differentialrevisiondescre.search(ctx.description()) |
289 if m: |
289 if m: |
290 toconfirm[node] = (1, set(precnodes), int(m.group('id'))) |
290 toconfirm[node] = (1, set(precnodes), int(m.group(b'id'))) |
291 |
291 |
292 # Double check if tags are genuine by collecting all old nodes from |
292 # Double check if tags are genuine by collecting all old nodes from |
293 # Phabricator, and expect precursors overlap with it. |
293 # Phabricator, and expect precursors overlap with it. |
294 if toconfirm: |
294 if toconfirm: |
295 drevs = [drev for force, precs, drev in toconfirm.values()] |
295 drevs = [drev for force, precs, drev in toconfirm.values()] |
296 alldiffs = callconduit(unfi, 'differential.querydiffs', |
296 alldiffs = callconduit(unfi, b'differential.querydiffs', |
297 {'revisionIDs': drevs}) |
297 {b'revisionIDs': drevs}) |
298 getnode = lambda d: bin(encoding.unitolocal( |
298 getnode = lambda d: bin(encoding.unitolocal( |
299 getdiffmeta(d).get(r'node', ''))) or None |
299 getdiffmeta(d).get(r'node', b''))) or None |
300 for newnode, (force, precset, drev) in toconfirm.items(): |
300 for newnode, (force, precset, drev) in toconfirm.items(): |
301 diffs = [d for d in alldiffs.values() |
301 diffs = [d for d in alldiffs.values() |
302 if int(d[r'revisionID']) == drev] |
302 if int(d[r'revisionID']) == drev] |
303 |
303 |
304 # "precursors" as known by Phabricator |
304 # "precursors" as known by Phabricator |
305 phprecset = set(getnode(d) for d in diffs) |
305 phprecset = set(getnode(d) for d in diffs) |
306 |
306 |
307 # Ignore if precursors (Phabricator and local repo) do not overlap, |
307 # Ignore if precursors (Phabricator and local repo) do not overlap, |
308 # and force is not set (when commit message says nothing) |
308 # and force is not set (when commit message says nothing) |
309 if not force and not bool(phprecset & precset): |
309 if not force and not bool(phprecset & precset): |
310 tagname = 'D%d' % drev |
310 tagname = b'D%d' % drev |
311 tags.tag(repo, tagname, nullid, message=None, user=None, |
311 tags.tag(repo, tagname, nullid, message=None, user=None, |
312 date=None, local=True) |
312 date=None, local=True) |
313 unfi.ui.warn(_('D%s: local tag removed - does not match ' |
313 unfi.ui.warn(_(b'D%s: local tag removed - does not match ' |
314 'Differential history\n') % drev) |
314 b'Differential history\n') % drev) |
315 continue |
315 continue |
316 |
316 |
317 # Find the last node using Phabricator metadata, and make sure it |
317 # Find the last node using Phabricator metadata, and make sure it |
318 # exists in the repo |
318 # exists in the repo |
319 oldnode = lastdiff = None |
319 oldnode = lastdiff = None |
338 def creatediff(ctx): |
338 def creatediff(ctx): |
339 """create a Differential Diff""" |
339 """create a Differential Diff""" |
340 repo = ctx.repo() |
340 repo = ctx.repo() |
341 repophid = getrepophid(repo) |
341 repophid = getrepophid(repo) |
342 # Create a "Differential Diff" via "differential.createrawdiff" API |
342 # Create a "Differential Diff" via "differential.createrawdiff" API |
343 params = {'diff': getdiff(ctx, mdiff.diffopts(git=True, context=32767))} |
343 params = {b'diff': getdiff(ctx, mdiff.diffopts(git=True, context=32767))} |
344 if repophid: |
344 if repophid: |
345 params['repositoryPHID'] = repophid |
345 params[b'repositoryPHID'] = repophid |
346 diff = callconduit(repo, 'differential.createrawdiff', params) |
346 diff = callconduit(repo, b'differential.createrawdiff', params) |
347 if not diff: |
347 if not diff: |
348 raise error.Abort(_('cannot create diff for %s') % ctx) |
348 raise error.Abort(_(b'cannot create diff for %s') % ctx) |
349 return diff |
349 return diff |
350 |
350 |
351 def writediffproperties(ctx, diff): |
351 def writediffproperties(ctx, diff): |
352 """write metadata to diff so patches could be applied losslessly""" |
352 """write metadata to diff so patches could be applied losslessly""" |
353 params = { |
353 params = { |
354 'diff_id': diff[r'id'], |
354 b'diff_id': diff[r'id'], |
355 'name': 'hg:meta', |
355 b'name': b'hg:meta', |
356 'data': json.dumps({ |
356 b'data': json.dumps({ |
357 'user': ctx.user(), |
357 b'user': ctx.user(), |
358 'date': '%d %d' % ctx.date(), |
358 b'date': b'%d %d' % ctx.date(), |
359 'node': ctx.hex(), |
359 b'node': ctx.hex(), |
360 'parent': ctx.p1().hex(), |
360 b'parent': ctx.p1().hex(), |
361 }), |
361 }), |
362 } |
362 } |
363 callconduit(ctx.repo(), 'differential.setdiffproperty', params) |
363 callconduit(ctx.repo(), b'differential.setdiffproperty', params) |
364 |
364 |
365 params = { |
365 params = { |
366 'diff_id': diff[r'id'], |
366 b'diff_id': diff[r'id'], |
367 'name': 'local:commits', |
367 b'name': b'local:commits', |
368 'data': json.dumps({ |
368 b'data': json.dumps({ |
369 ctx.hex(): { |
369 ctx.hex(): { |
370 'author': stringutil.person(ctx.user()), |
370 b'author': stringutil.person(ctx.user()), |
371 'authorEmail': stringutil.email(ctx.user()), |
371 b'authorEmail': stringutil.email(ctx.user()), |
372 'time': ctx.date()[0], |
372 b'time': ctx.date()[0], |
373 }, |
373 }, |
374 }), |
374 }), |
375 } |
375 } |
376 callconduit(ctx.repo(), 'differential.setdiffproperty', params) |
376 callconduit(ctx.repo(), b'differential.setdiffproperty', params) |
377 |
377 |
378 def createdifferentialrevision(ctx, revid=None, parentrevid=None, oldnode=None, |
378 def createdifferentialrevision(ctx, revid=None, parentrevid=None, oldnode=None, |
379 olddiff=None, actions=None): |
379 olddiff=None, actions=None): |
380 """create or update a Differential Revision |
380 """create or update a Differential Revision |
381 |
381 |
410 # Use a temporary summary to set dependency. There might be better ways but |
410 # Use a temporary summary to set dependency. There might be better ways but |
411 # I cannot find them for now. But do not do that if we are updating an |
411 # I cannot find them for now. But do not do that if we are updating an |
412 # existing revision (revid is not None) since that introduces visible |
412 # existing revision (revid is not None) since that introduces visible |
413 # churns (someone edited "Summary" twice) on the web page. |
413 # churns (someone edited "Summary" twice) on the web page. |
414 if parentrevid and revid is None: |
414 if parentrevid and revid is None: |
415 summary = 'Depends on D%s' % parentrevid |
415 summary = b'Depends on D%s' % parentrevid |
416 transactions += [{'type': 'summary', 'value': summary}, |
416 transactions += [{b'type': b'summary', b'value': summary}, |
417 {'type': 'summary', 'value': ' '}] |
417 {b'type': b'summary', b'value': b' '}] |
418 |
418 |
419 if actions: |
419 if actions: |
420 transactions += actions |
420 transactions += actions |
421 |
421 |
422 # Parse commit message and update related fields. |
422 # Parse commit message and update related fields. |
423 desc = ctx.description() |
423 desc = ctx.description() |
424 info = callconduit(repo, 'differential.parsecommitmessage', |
424 info = callconduit(repo, b'differential.parsecommitmessage', |
425 {'corpus': desc}) |
425 {b'corpus': desc}) |
426 for k, v in info[r'fields'].items(): |
426 for k, v in info[r'fields'].items(): |
427 if k in ['title', 'summary', 'testPlan']: |
427 if k in [b'title', b'summary', b'testPlan']: |
428 transactions.append({'type': k, 'value': v}) |
428 transactions.append({b'type': k, b'value': v}) |
429 |
429 |
430 params = {'transactions': transactions} |
430 params = {b'transactions': transactions} |
431 if revid is not None: |
431 if revid is not None: |
432 # Update an existing Differential Revision |
432 # Update an existing Differential Revision |
433 params['objectIdentifier'] = revid |
433 params[b'objectIdentifier'] = revid |
434 |
434 |
435 revision = callconduit(repo, 'differential.revision.edit', params) |
435 revision = callconduit(repo, b'differential.revision.edit', params) |
436 if not revision: |
436 if not revision: |
437 raise error.Abort(_('cannot create revision for %s') % ctx) |
437 raise error.Abort(_(b'cannot create revision for %s') % ctx) |
438 |
438 |
439 return revision, diff |
439 return revision, diff |
440 |
440 |
441 def userphids(repo, names): |
441 def userphids(repo, names): |
442 """convert user names to PHIDs""" |
442 """convert user names to PHIDs""" |
443 query = {'constraints': {'usernames': names}} |
443 query = {b'constraints': {b'usernames': names}} |
444 result = callconduit(repo, 'user.search', query) |
444 result = callconduit(repo, b'user.search', query) |
445 # username not found is not an error of the API. So check if we have missed |
445 # username not found is not an error of the API. So check if we have missed |
446 # some names here. |
446 # some names here. |
447 data = result[r'data'] |
447 data = result[r'data'] |
448 resolved = set(entry[r'fields'][r'username'] for entry in data) |
448 resolved = set(entry[r'fields'][r'username'] for entry in data) |
449 unresolved = set(names) - resolved |
449 unresolved = set(names) - resolved |
450 if unresolved: |
450 if unresolved: |
451 raise error.Abort(_('unknown username: %s') |
451 raise error.Abort(_(b'unknown username: %s') |
452 % ' '.join(sorted(unresolved))) |
452 % b' '.join(sorted(unresolved))) |
453 return [entry[r'phid'] for entry in data] |
453 return [entry[r'phid'] for entry in data] |
454 |
454 |
455 @command('phabsend', |
455 @command(b'phabsend', |
456 [('r', 'rev', [], _('revisions to send'), _('REV')), |
456 [(b'r', b'rev', [], _(b'revisions to send'), _(b'REV')), |
457 ('', 'amend', True, _('update commit messages')), |
457 (b'', b'amend', True, _(b'update commit messages')), |
458 ('', 'reviewer', [], _('specify reviewers')), |
458 (b'', b'reviewer', [], _(b'specify reviewers')), |
459 ('', 'confirm', None, _('ask for confirmation before sending'))], |
459 (b'', b'confirm', None, _(b'ask for confirmation before sending'))], |
460 _('REV [OPTIONS]')) |
460 _(b'REV [OPTIONS]')) |
461 def phabsend(ui, repo, *revs, **opts): |
461 def phabsend(ui, repo, *revs, **opts): |
462 """upload changesets to Phabricator |
462 """upload changesets to Phabricator |
463 |
463 |
464 If there are multiple revisions specified, they will be send as a stack |
464 If there are multiple revisions specified, they will be send as a stack |
465 with a linear dependencies relationship using the order specified by the |
465 with a linear dependencies relationship using the order specified by the |
483 confirm = true |
483 confirm = true |
484 |
484 |
485 phabsend will check obsstore and the above association to decide whether to |
485 phabsend will check obsstore and the above association to decide whether to |
486 update an existing Differential Revision, or create a new one. |
486 update an existing Differential Revision, or create a new one. |
487 """ |
487 """ |
488 revs = list(revs) + opts.get('rev', []) |
488 revs = list(revs) + opts.get(b'rev', []) |
489 revs = scmutil.revrange(repo, revs) |
489 revs = scmutil.revrange(repo, revs) |
490 |
490 |
491 if not revs: |
491 if not revs: |
492 raise error.Abort(_('phabsend requires at least one changeset')) |
492 raise error.Abort(_(b'phabsend requires at least one changeset')) |
493 if opts.get('amend'): |
493 if opts.get(b'amend'): |
494 cmdutil.checkunfinished(repo) |
494 cmdutil.checkunfinished(repo) |
495 |
495 |
496 # {newnode: (oldnode, olddiff, olddrev} |
496 # {newnode: (oldnode, olddiff, olddrev} |
497 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs]) |
497 oldmap = getoldnodedrevmap(repo, [repo[r].node() for r in revs]) |
498 |
498 |
499 confirm = ui.configbool('phabsend', 'confirm') |
499 confirm = ui.configbool(b'phabsend', b'confirm') |
500 confirm |= bool(opts.get('confirm')) |
500 confirm |= bool(opts.get(b'confirm')) |
501 if confirm: |
501 if confirm: |
502 confirmed = _confirmbeforesend(repo, revs, oldmap) |
502 confirmed = _confirmbeforesend(repo, revs, oldmap) |
503 if not confirmed: |
503 if not confirmed: |
504 raise error.Abort(_('phabsend cancelled')) |
504 raise error.Abort(_(b'phabsend cancelled')) |
505 |
505 |
506 actions = [] |
506 actions = [] |
507 reviewers = opts.get('reviewer', []) |
507 reviewers = opts.get(b'reviewer', []) |
508 if reviewers: |
508 if reviewers: |
509 phids = userphids(repo, reviewers) |
509 phids = userphids(repo, reviewers) |
510 actions.append({'type': 'reviewers.add', 'value': phids}) |
510 actions.append({b'type': b'reviewers.add', b'value': phids}) |
511 |
511 |
512 drevids = [] # [int] |
512 drevids = [] # [int] |
513 diffmap = {} # {newnode: diff} |
513 diffmap = {} # {newnode: diff} |
514 |
514 |
515 # Send patches one by one so we know their Differential Revision IDs and |
515 # Send patches one by one so we know their Differential Revision IDs and |
516 # can provide dependency relationship |
516 # can provide dependency relationship |
517 lastrevid = None |
517 lastrevid = None |
518 for rev in revs: |
518 for rev in revs: |
519 ui.debug('sending rev %d\n' % rev) |
519 ui.debug(b'sending rev %d\n' % rev) |
520 ctx = repo[rev] |
520 ctx = repo[rev] |
521 |
521 |
522 # Get Differential Revision ID |
522 # Get Differential Revision ID |
523 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None)) |
523 oldnode, olddiff, revid = oldmap.get(ctx.node(), (None, None, None)) |
524 if oldnode != ctx.node() or opts.get('amend'): |
524 if oldnode != ctx.node() or opts.get(b'amend'): |
525 # Create or update Differential Revision |
525 # Create or update Differential Revision |
526 revision, diff = createdifferentialrevision( |
526 revision, diff = createdifferentialrevision( |
527 ctx, revid, lastrevid, oldnode, olddiff, actions) |
527 ctx, revid, lastrevid, oldnode, olddiff, actions) |
528 diffmap[ctx.node()] = diff |
528 diffmap[ctx.node()] = diff |
529 newrevid = int(revision[r'object'][r'id']) |
529 newrevid = int(revision[r'object'][r'id']) |
530 if revid: |
530 if revid: |
531 action = 'updated' |
531 action = b'updated' |
532 else: |
532 else: |
533 action = 'created' |
533 action = b'created' |
534 |
534 |
535 # Create a local tag to note the association, if commit message |
535 # Create a local tag to note the association, if commit message |
536 # does not have it already |
536 # does not have it already |
537 m = _differentialrevisiondescre.search(ctx.description()) |
537 m = _differentialrevisiondescre.search(ctx.description()) |
538 if not m or int(m.group('id')) != newrevid: |
538 if not m or int(m.group(b'id')) != newrevid: |
539 tagname = 'D%d' % newrevid |
539 tagname = b'D%d' % newrevid |
540 tags.tag(repo, tagname, ctx.node(), message=None, user=None, |
540 tags.tag(repo, tagname, ctx.node(), message=None, user=None, |
541 date=None, local=True) |
541 date=None, local=True) |
542 else: |
542 else: |
543 # Nothing changed. But still set "newrevid" so the next revision |
543 # Nothing changed. But still set "newrevid" so the next revision |
544 # could depend on this one. |
544 # could depend on this one. |
545 newrevid = revid |
545 newrevid = revid |
546 action = 'skipped' |
546 action = b'skipped' |
547 |
547 |
548 actiondesc = ui.label( |
548 actiondesc = ui.label( |
549 {'created': _('created'), |
549 {b'created': _(b'created'), |
550 'skipped': _('skipped'), |
550 b'skipped': _(b'skipped'), |
551 'updated': _('updated')}[action], |
551 b'updated': _(b'updated')}[action], |
552 'phabricator.action.%s' % action) |
552 b'phabricator.action.%s' % action) |
553 drevdesc = ui.label('D%s' % newrevid, 'phabricator.drev') |
553 drevdesc = ui.label(b'D%s' % newrevid, b'phabricator.drev') |
554 nodedesc = ui.label(bytes(ctx), 'phabricator.node') |
554 nodedesc = ui.label(bytes(ctx), b'phabricator.node') |
555 desc = ui.label(ctx.description().split('\n')[0], 'phabricator.desc') |
555 desc = ui.label(ctx.description().split(b'\n')[0], b'phabricator.desc') |
556 ui.write(_('%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc, |
556 ui.write(_(b'%s - %s - %s: %s\n') % (drevdesc, actiondesc, nodedesc, |
557 desc)) |
557 desc)) |
558 drevids.append(newrevid) |
558 drevids.append(newrevid) |
559 lastrevid = newrevid |
559 lastrevid = newrevid |
560 |
560 |
561 # Update commit messages and remove tags |
561 # Update commit messages and remove tags |
562 if opts.get('amend'): |
562 if opts.get(b'amend'): |
563 unfi = repo.unfiltered() |
563 unfi = repo.unfiltered() |
564 drevs = callconduit(repo, 'differential.query', {'ids': drevids}) |
564 drevs = callconduit(repo, b'differential.query', {b'ids': drevids}) |
565 with repo.wlock(), repo.lock(), repo.transaction('phabsend'): |
565 with repo.wlock(), repo.lock(), repo.transaction(b'phabsend'): |
566 wnode = unfi['.'].node() |
566 wnode = unfi[b'.'].node() |
567 mapping = {} # {oldnode: [newnode]} |
567 mapping = {} # {oldnode: [newnode]} |
568 for i, rev in enumerate(revs): |
568 for i, rev in enumerate(revs): |
569 old = unfi[rev] |
569 old = unfi[rev] |
570 drevid = drevids[i] |
570 drevid = drevids[i] |
571 drev = [d for d in drevs if int(d[r'id']) == drevid][0] |
571 drev = [d for d in drevs if int(d[r'id']) == drevid][0] |
578 ] |
578 ] |
579 new = context.metadataonlyctx( |
579 new = context.metadataonlyctx( |
580 repo, old, parents=parents, text=newdesc, |
580 repo, old, parents=parents, text=newdesc, |
581 user=old.user(), date=old.date(), extra=old.extra()) |
581 user=old.user(), date=old.date(), extra=old.extra()) |
582 |
582 |
583 overrides = {('phases', 'new-commit'): old.phase()} |
583 overrides = {(b'phases', b'new-commit'): old.phase()} |
584 with ui.configoverride(overrides, 'phabsend'): |
584 with ui.configoverride(overrides, b'phabsend'): |
585 newnode = new.commit() |
585 newnode = new.commit() |
586 |
586 |
587 mapping[old.node()] = [newnode] |
587 mapping[old.node()] = [newnode] |
588 # Update diff property |
588 # Update diff property |
589 writediffproperties(unfi[newnode], diffmap[old.node()]) |
589 writediffproperties(unfi[newnode], diffmap[old.node()]) |
590 # Remove local tags since it's no longer necessary |
590 # Remove local tags since it's no longer necessary |
591 tagname = 'D%d' % drevid |
591 tagname = b'D%d' % drevid |
592 if tagname in repo.tags(): |
592 if tagname in repo.tags(): |
593 tags.tag(repo, tagname, nullid, message=None, user=None, |
593 tags.tag(repo, tagname, nullid, message=None, user=None, |
594 date=None, local=True) |
594 date=None, local=True) |
595 scmutil.cleanupnodes(repo, mapping, 'phabsend') |
595 scmutil.cleanupnodes(repo, mapping, b'phabsend') |
596 if wnode in mapping: |
596 if wnode in mapping: |
597 unfi.setparents(mapping[wnode][0]) |
597 unfi.setparents(mapping[wnode][0]) |
598 |
598 |
599 # Map from "hg:meta" keys to header understood by "hg import". The order is |
599 # Map from "hg:meta" keys to header understood by "hg import". The order is |
600 # consistent with "hg export" output. |
600 # consistent with "hg export" output. |
601 _metanamemap = util.sortdict([(r'user', 'User'), (r'date', 'Date'), |
601 _metanamemap = util.sortdict([(r'user', b'User'), (r'date', b'Date'), |
602 (r'node', 'Node ID'), (r'parent', 'Parent ')]) |
602 (r'node', b'Node ID'), (r'parent', b'Parent ')]) |
603 |
603 |
604 def _confirmbeforesend(repo, revs, oldmap): |
604 def _confirmbeforesend(repo, revs, oldmap): |
605 url, token = readurltoken(repo) |
605 url, token = readurltoken(repo) |
606 ui = repo.ui |
606 ui = repo.ui |
607 for rev in revs: |
607 for rev in revs: |
608 ctx = repo[rev] |
608 ctx = repo[rev] |
609 desc = ctx.description().splitlines()[0] |
609 desc = ctx.description().splitlines()[0] |
610 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None)) |
610 oldnode, olddiff, drevid = oldmap.get(ctx.node(), (None, None, None)) |
611 if drevid: |
611 if drevid: |
612 drevdesc = ui.label('D%s' % drevid, 'phabricator.drev') |
612 drevdesc = ui.label(b'D%s' % drevid, b'phabricator.drev') |
613 else: |
613 else: |
614 drevdesc = ui.label(_('NEW'), 'phabricator.drev') |
614 drevdesc = ui.label(_(b'NEW'), b'phabricator.drev') |
615 |
615 |
616 ui.write(_('%s - %s: %s\n') % (drevdesc, |
616 ui.write(_(b'%s - %s: %s\n') |
617 ui.label(bytes(ctx), 'phabricator.node'), |
617 % (drevdesc, |
618 ui.label(desc, 'phabricator.desc'))) |
618 ui.label(bytes(ctx), b'phabricator.node'), |
619 |
619 ui.label(desc, b'phabricator.desc'))) |
620 if ui.promptchoice(_('Send the above changes to %s (yn)?' |
620 |
621 '$$ &Yes $$ &No') % url): |
621 if ui.promptchoice(_(b'Send the above changes to %s (yn)?' |
|
622 b'$$ &Yes $$ &No') % url): |
622 return False |
623 return False |
623 |
624 |
624 return True |
625 return True |
625 |
626 |
626 _knownstatusnames = {'accepted', 'needsreview', 'needsrevision', 'closed', |
627 _knownstatusnames = {b'accepted', b'needsreview', b'needsrevision', b'closed', |
627 'abandoned'} |
628 b'abandoned'} |
628 |
629 |
629 def _getstatusname(drev): |
630 def _getstatusname(drev): |
630 """get normalized status name from a Differential Revision""" |
631 """get normalized status name from a Differential Revision""" |
631 return drev[r'statusName'].replace(' ', '').lower() |
632 return drev[r'statusName'].replace(b' ', b'').lower() |
632 |
633 |
633 # Small language to specify differential revisions. Support symbols: (), :X, |
634 # Small language to specify differential revisions. Support symbols: (), :X, |
634 # +, and -. |
635 # +, and -. |
635 |
636 |
636 _elements = { |
637 _elements = { |
637 # token-type: binding-strength, primary, prefix, infix, suffix |
638 # token-type: binding-strength, primary, prefix, infix, suffix |
638 '(': (12, None, ('group', 1, ')'), None, None), |
639 b'(': (12, None, (b'group', 1, b')'), None, None), |
639 ':': (8, None, ('ancestors', 8), None, None), |
640 b':': (8, None, (b'ancestors', 8), None, None), |
640 '&': (5, None, None, ('and_', 5), None), |
641 b'&': (5, None, None, (b'and_', 5), None), |
641 '+': (4, None, None, ('add', 4), None), |
642 b'+': (4, None, None, (b'add', 4), None), |
642 '-': (4, None, None, ('sub', 4), None), |
643 b'-': (4, None, None, (b'sub', 4), None), |
643 ')': (0, None, None, None, None), |
644 b')': (0, None, None, None, None), |
644 'symbol': (0, 'symbol', None, None, None), |
645 b'symbol': (0, b'symbol', None, None, None), |
645 'end': (0, None, None, None, None), |
646 b'end': (0, None, None, None, None), |
646 } |
647 } |
647 |
648 |
648 def _tokenize(text): |
649 def _tokenize(text): |
649 view = memoryview(text) # zero-copy slice |
650 view = memoryview(text) # zero-copy slice |
650 special = '():+-& ' |
651 special = b'():+-& ' |
651 pos = 0 |
652 pos = 0 |
652 length = len(text) |
653 length = len(text) |
653 while pos < length: |
654 while pos < length: |
654 symbol = ''.join(itertools.takewhile(lambda ch: ch not in special, |
655 symbol = b''.join(itertools.takewhile(lambda ch: ch not in special, |
655 view[pos:])) |
656 view[pos:])) |
656 if symbol: |
657 if symbol: |
657 yield ('symbol', symbol, pos) |
658 yield (b'symbol', symbol, pos) |
658 pos += len(symbol) |
659 pos += len(symbol) |
659 else: # special char, ignore space |
660 else: # special char, ignore space |
660 if text[pos] != ' ': |
661 if text[pos] != b' ': |
661 yield (text[pos], None, pos) |
662 yield (text[pos], None, pos) |
662 pos += 1 |
663 pos += 1 |
663 yield ('end', None, pos) |
664 yield (b'end', None, pos) |
664 |
665 |
665 def _parse(text): |
666 def _parse(text): |
666 tree, pos = parser.parser(_elements).parse(_tokenize(text)) |
667 tree, pos = parser.parser(_elements).parse(_tokenize(text)) |
667 if pos != len(text): |
668 if pos != len(text): |
668 raise error.ParseError('invalid token', pos) |
669 raise error.ParseError(b'invalid token', pos) |
669 return tree |
670 return tree |
670 |
671 |
671 def _parsedrev(symbol): |
672 def _parsedrev(symbol): |
672 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None""" |
673 """str -> int or None, ex. 'D45' -> 45; '12' -> 12; 'x' -> None""" |
673 if symbol.startswith('D') and symbol[1:].isdigit(): |
674 if symbol.startswith(b'D') and symbol[1:].isdigit(): |
674 return int(symbol[1:]) |
675 return int(symbol[1:]) |
675 if symbol.isdigit(): |
676 if symbol.isdigit(): |
676 return int(symbol) |
677 return int(symbol) |
677 |
678 |
678 def _prefetchdrevs(tree): |
679 def _prefetchdrevs(tree): |
679 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch""" |
680 """return ({single-drev-id}, {ancestor-drev-id}) to prefetch""" |
680 drevs = set() |
681 drevs = set() |
681 ancestordrevs = set() |
682 ancestordrevs = set() |
682 op = tree[0] |
683 op = tree[0] |
683 if op == 'symbol': |
684 if op == b'symbol': |
684 r = _parsedrev(tree[1]) |
685 r = _parsedrev(tree[1]) |
685 if r: |
686 if r: |
686 drevs.add(r) |
687 drevs.add(r) |
687 elif op == 'ancestors': |
688 elif op == b'ancestors': |
688 r, a = _prefetchdrevs(tree[1]) |
689 r, a = _prefetchdrevs(tree[1]) |
689 drevs.update(r) |
690 drevs.update(r) |
690 ancestordrevs.update(r) |
691 ancestordrevs.update(r) |
691 ancestordrevs.update(a) |
692 ancestordrevs.update(a) |
692 else: |
693 else: |
883 write is usually ui.write. drevs is what "querydrev" returns, results of |
885 write is usually ui.write. drevs is what "querydrev" returns, results of |
884 "differential.query". |
886 "differential.query". |
885 """ |
887 """ |
886 # Prefetch hg:meta property for all diffs |
888 # Prefetch hg:meta property for all diffs |
887 diffids = sorted(set(max(int(v) for v in drev[r'diffs']) for drev in drevs)) |
889 diffids = sorted(set(max(int(v) for v in drev[r'diffs']) for drev in drevs)) |
888 diffs = callconduit(repo, 'differential.querydiffs', {'ids': diffids}) |
890 diffs = callconduit(repo, b'differential.querydiffs', {b'ids': diffids}) |
889 |
891 |
890 # Generate patch for each drev |
892 # Generate patch for each drev |
891 for drev in drevs: |
893 for drev in drevs: |
892 repo.ui.note(_('reading D%s\n') % drev[r'id']) |
894 repo.ui.note(_(b'reading D%s\n') % drev[r'id']) |
893 |
895 |
894 diffid = max(int(v) for v in drev[r'diffs']) |
896 diffid = max(int(v) for v in drev[r'diffs']) |
895 body = callconduit(repo, 'differential.getrawdiff', {'diffID': diffid}) |
897 body = callconduit(repo, b'differential.getrawdiff', |
|
898 {b'diffID': diffid}) |
896 desc = getdescfromdrev(drev) |
899 desc = getdescfromdrev(drev) |
897 header = '# HG changeset patch\n' |
900 header = b'# HG changeset patch\n' |
898 |
901 |
899 # Try to preserve metadata from hg:meta property. Write hg patch |
902 # Try to preserve metadata from hg:meta property. Write hg patch |
900 # headers that can be read by the "import" command. See patchheadermap |
903 # headers that can be read by the "import" command. See patchheadermap |
901 # and extract in mercurial/patch.py for supported headers. |
904 # and extract in mercurial/patch.py for supported headers. |
902 meta = getdiffmeta(diffs[str(diffid)]) |
905 meta = getdiffmeta(diffs[str(diffid)]) |
903 for k in _metanamemap.keys(): |
906 for k in _metanamemap.keys(): |
904 if k in meta: |
907 if k in meta: |
905 header += '# %s %s\n' % (_metanamemap[k], meta[k]) |
908 header += b'# %s %s\n' % (_metanamemap[k], meta[k]) |
906 |
909 |
907 content = '%s%s\n%s' % (header, desc, body) |
910 content = b'%s%s\n%s' % (header, desc, body) |
908 write(encoding.unitolocal(content)) |
911 write(encoding.unitolocal(content)) |
909 |
912 |
910 @command('phabread', |
913 @command(b'phabread', |
911 [('', 'stack', False, _('read dependencies'))], |
914 [(b'', b'stack', False, _(b'read dependencies'))], |
912 _('DREVSPEC [OPTIONS]')) |
915 _(b'DREVSPEC [OPTIONS]')) |
913 def phabread(ui, repo, spec, **opts): |
916 def phabread(ui, repo, spec, **opts): |
914 """print patches from Phabricator suitable for importing |
917 """print patches from Phabricator suitable for importing |
915 |
918 |
916 DREVSPEC could be a Differential Revision identity, like ``D123``, or just |
919 DREVSPEC could be a Differential Revision identity, like ``D123``, or just |
917 the number ``123``. It could also have common operators like ``+``, ``-``, |
920 the number ``123``. It could also have common operators like ``+``, ``-``, |
927 stack up to D9. |
930 stack up to D9. |
928 |
931 |
929 If --stack is given, follow dependencies information and read all patches. |
932 If --stack is given, follow dependencies information and read all patches. |
930 It is equivalent to the ``:`` operator. |
933 It is equivalent to the ``:`` operator. |
931 """ |
934 """ |
932 if opts.get('stack'): |
935 if opts.get(b'stack'): |
933 spec = ':(%s)' % spec |
936 spec = b':(%s)' % spec |
934 drevs = querydrev(repo, spec) |
937 drevs = querydrev(repo, spec) |
935 readpatch(repo, drevs, ui.write) |
938 readpatch(repo, drevs, ui.write) |
936 |
939 |
937 @command('phabupdate', |
940 @command(b'phabupdate', |
938 [('', 'accept', False, _('accept revisions')), |
941 [(b'', b'accept', False, _(b'accept revisions')), |
939 ('', 'reject', False, _('reject revisions')), |
942 (b'', b'reject', False, _(b'reject revisions')), |
940 ('', 'abandon', False, _('abandon revisions')), |
943 (b'', b'abandon', False, _(b'abandon revisions')), |
941 ('', 'reclaim', False, _('reclaim revisions')), |
944 (b'', b'reclaim', False, _(b'reclaim revisions')), |
942 ('m', 'comment', '', _('comment on the last revision')), |
945 (b'm', b'comment', b'', _(b'comment on the last revision')), |
943 ], _('DREVSPEC [OPTIONS]')) |
946 ], _(b'DREVSPEC [OPTIONS]')) |
944 def phabupdate(ui, repo, spec, **opts): |
947 def phabupdate(ui, repo, spec, **opts): |
945 """update Differential Revision in batch |
948 """update Differential Revision in batch |
946 |
949 |
947 DREVSPEC selects revisions. See :hg:`help phabread` for its usage. |
950 DREVSPEC selects revisions. See :hg:`help phabread` for its usage. |
948 """ |
951 """ |
949 flags = [n for n in 'accept reject abandon reclaim'.split() if opts.get(n)] |
952 flags = [n for n in b'accept reject abandon reclaim'.split() if opts.get(n)] |
950 if len(flags) > 1: |
953 if len(flags) > 1: |
951 raise error.Abort(_('%s cannot be used together') % ', '.join(flags)) |
954 raise error.Abort(_(b'%s cannot be used together') % b', '.join(flags)) |
952 |
955 |
953 actions = [] |
956 actions = [] |
954 for f in flags: |
957 for f in flags: |
955 actions.append({'type': f, 'value': 'true'}) |
958 actions.append({b'type': f, b'value': b'true'}) |
956 |
959 |
957 drevs = querydrev(repo, spec) |
960 drevs = querydrev(repo, spec) |
958 for i, drev in enumerate(drevs): |
961 for i, drev in enumerate(drevs): |
959 if i + 1 == len(drevs) and opts.get('comment'): |
962 if i + 1 == len(drevs) and opts.get(b'comment'): |
960 actions.append({'type': 'comment', 'value': opts['comment']}) |
963 actions.append({b'type': b'comment', b'value': opts[b'comment']}) |
961 if actions: |
964 if actions: |
962 params = {'objectIdentifier': drev[r'phid'], |
965 params = {b'objectIdentifier': drev[r'phid'], |
963 'transactions': actions} |
966 b'transactions': actions} |
964 callconduit(repo, 'differential.revision.edit', params) |
967 callconduit(repo, b'differential.revision.edit', params) |
965 |
968 |
966 templatekeyword = registrar.templatekeyword() |
969 templatekeyword = registrar.templatekeyword() |
967 |
970 |
968 @templatekeyword('phabreview', requires={'ctx'}) |
971 @templatekeyword(b'phabreview', requires={b'ctx'}) |
969 def template_review(context, mapping): |
972 def template_review(context, mapping): |
970 """:phabreview: Object describing the review for this changeset. |
973 """:phabreview: Object describing the review for this changeset. |
971 Has attributes `url` and `id`. |
974 Has attributes `url` and `id`. |
972 """ |
975 """ |
973 ctx = context.resource(mapping, 'ctx') |
976 ctx = context.resource(mapping, b'ctx') |
974 m = _differentialrevisiondescre.search(ctx.description()) |
977 m = _differentialrevisiondescre.search(ctx.description()) |
975 if m: |
978 if m: |
976 return { |
979 return { |
977 'url': m.group('url'), |
980 b'url': m.group(b'url'), |
978 'id': "D{}".format(m.group('id')), |
981 b'id': b"D{}".format(m.group(b'id')), |
979 } |
982 } |