changeset 28911:35da19348143

templater: add function to parse whole string as template expression This will be a parser of template aliases, and it can also be used for processing quoted string templates in map files. That's why this function isn't defined in the upcoming _aliasrules class.
author Yuya Nishihara <yuya@tcha.org>
date Sun, 27 Mar 2016 20:29:03 +0900
parents 1203159c8928
children 867d6ba2353d
files mercurial/templater.py
diffstat 1 files changed, 30 insertions(+), 4 deletions(-) [+]
line wrap: on
line diff
--- a/mercurial/templater.py	Tue Mar 29 17:27:34 2016 +0900
+++ b/mercurial/templater.py	Sun Mar 27 20:29:03 2016 +0900
@@ -40,7 +40,9 @@
     "end": (0, None, None, None, None),
 }
 
-def tokenize(program, start, end):
+def tokenize(program, start, end, term=None):
+    """Parse a template expression into a stream of tokens, which must end
+    with term if specified"""
     pos = start
     while pos < end:
         c = program[pos]
@@ -127,13 +129,15 @@
             sym = program[s:pos]
             yield ('symbol', sym, s)
             pos -= 1
-        elif c == '}':
+        elif c == term:
             yield ('end', None, pos + 1)
             return
         else:
             raise error.ParseError(_("syntax error"), pos)
         pos += 1
-    raise error.ParseError(_("unterminated template expansion"), start)
+    if term:
+        raise error.ParseError(_("unterminated template expansion"), start)
+    yield ('end', None, pos)
 
 def _parsetemplate(tmpl, start, stop, quote=''):
     r"""
@@ -171,7 +175,7 @@
         if c == quote:
             return parsed, n + 1
 
-        parseres, pos = p.parse(tokenize(tmpl, n + 1, stop))
+        parseres, pos = p.parse(tokenize(tmpl, n + 1, stop, '}'))
         parsed.append(parseres)
 
     if quote:
@@ -218,6 +222,28 @@
     assert pos == len(tmpl), 'unquoted template should be consumed'
     return _unnesttemplatelist(('template', parsed))
 
+def _parseexpr(expr):
+    """Parse a template expression into tree
+
+    >>> _parseexpr('"foo"')
+    ('string', 'foo')
+    >>> _parseexpr('foo(bar)')
+    ('func', ('symbol', 'foo'), ('symbol', 'bar'))
+    >>> _parseexpr('foo(')
+    Traceback (most recent call last):
+      ...
+    ParseError: ('not a prefix: end', 4)
+    >>> _parseexpr('"foo" "bar"')
+    Traceback (most recent call last):
+      ...
+    ParseError: ('invalid token', 7)
+    """
+    p = parser.parser(elements)
+    tree, pos = p.parse(tokenize(expr, 0, len(expr)))
+    if pos != len(expr):
+        raise error.ParseError(_('invalid token'), pos)
+    return _unnesttemplatelist(tree)
+
 def prettyformat(tree):
     return parser.prettyformat(tree, ('integer', 'string', 'symbol'))