Commit b67f8a31 authored by adam j hartz's avatar adam j hartz

replace !(...) and ![...] with ?(...) and ?[...]

parent 2b72c684
......@@ -42,7 +42,7 @@ TAKO_TOKENS = {
'/', '//', '%', '**', '|', '&', '~', '^', '>>', '<<', '<', '<=', '>', '>=',
'==', '!=', '->', '=', '+=', '-=', '*=', '/=', '%=', '**=', '>>=', '<<=',
'&=', '^=', '|=', '//=', ',', ';', ':', '?', '??', '$(', '${', '$[', '..',
'...', '![', '!(', '@(', '@$(', '@'
'...', '![', '!(', '@(', '@$(', '@', '?(', '?[',
}
......
......@@ -60,8 +60,7 @@ _op_map = {
'&=': 'AMPERSANDEQUAL', '^=': 'XOREQUAL', '|=': 'PIPEEQUAL',
'//=': 'DOUBLEDIVEQUAL',
# extra tako operators
'?': 'QUESTION', '??': 'DOUBLE_QUESTION', '@$': 'ATDOLLAR',
'&': 'AMPERSAND',
'@$': 'ATDOLLAR', '&': 'AMPERSAND',
}
for (op, type) in _op_map.items():
token_map[(tokenize.OP, op)] = type
......@@ -223,6 +222,8 @@ _make_matcher_handler('{', 'LBRACE', True, '}')
_make_matcher_handler('$(', 'DOLLAR_LPAREN', False, ')')
_make_matcher_handler('$[', 'DOLLAR_LBRACKET', False, ']')
_make_matcher_handler('${', 'DOLLAR_LBRACE', True, '}')
_make_matcher_handler('?(', 'QUESTION_LPAREN', False, ')')
_make_matcher_handler('?[', 'QUESTION_LBRACKET', False, ']')
_make_matcher_handler('!(', 'BANG_LPAREN', False, ')')
_make_matcher_handler('![', 'BANG_LBRACKET', False, ']')
_make_matcher_handler('@(', 'AT_LPAREN', True, ')')
......@@ -359,6 +360,8 @@ class Lexer(object):
'LBRACKET', 'RBRACKET', # [ ]
'LBRACE', 'RBRACE', # { }
'AT_LPAREN', # @(
'QUESTION_LPAREN', # ?(
'QUESTION_LBRACKET', # ?[
'BANG_LPAREN', # !(
'BANG_LBRACKET', # ![
'DOLLAR_LPAREN', # $(
......
......@@ -252,7 +252,8 @@ class BaseParser(object):
'for', 'colon', 'import', 'except', 'nonlocal', 'global',
'yield', 'from', 'raise', 'with', 'dollar_lparen',
'dollar_lbrace', 'dollar_lbracket', 'try',
'bang_lparen', 'bang_lbracket']
'bang_lparen', 'bang_lbracket', 'question_lparen',
'question_lbracket']
for rule in tok_rules:
self._tok_rule(rule)
......@@ -279,9 +280,9 @@ class BaseParser(object):
'LSHIFT', 'RSHIFT', 'IOREDIRECT', 'SEARCHPATH', 'INDENT',
'DEDENT', 'LPAREN', 'RPAREN', 'LBRACE', 'RBRACE',
'LBRACKET', 'RBRACKET', 'AT_LPAREN', 'BANG_LPAREN',
'BANG_LBRACKET', 'DOLLAR_LPAREN', 'DOLLAR_LBRACE',
'DOLLAR_LBRACKET', 'ATDOLLAR_LPAREN', 'AMPERSAND', 'DOLLAR',
'DOLLAR_NAME'}
'BANG_LBRACKET', 'QUESTION_LPAREN', 'QUESTION_LBRACKET',
'DOLLAR_LPAREN', 'DOLLAR_LBRACE', 'DOLLAR_LBRACKET',
'ATDOLLAR_LPAREN', 'AMPERSAND', 'DOLLAR', 'DOLLAR_NAME'}
ts = '\n | '.join(sorted(toks))
doc = 'subproc_arg_part : ' + ts + '\n'
self.p_subproc_arg_part.__func__.__doc__ = doc
......@@ -1792,8 +1793,14 @@ class BaseParser(object):
| dollar_lparen_tok subproc RPAREN
| bang_lparen_tok subproc RPAREN
| bang_lbracket_tok subproc RBRACKET
| question_lparen_tok subproc RPAREN
| question_lbracket_tok subproc RBRACKET
| dollar_lbracket_tok subproc RBRACKET
"""
if p[1].value.startswith('!'):
print('WARNING: !(...) and ![...] are deprecated (to be removed '
'in a later version). Use ?(...) and ?[...] instead.',
file=sys.stderr)
p[0] = self._dollar_rules(p)
def p_string_literal(self, p):
......@@ -1879,12 +1886,6 @@ class BaseParser(object):
"""
p[0] = [p[2]]
def p_trailer_quest(self, p):
"""trailer : DOUBLE_QUESTION
| QUESTION
"""
p[0] = [p[1]]
def p_subscriptlist(self, p):
"""subscriptlist : subscript comma_subscript_list_opt comma_opt"""
p1, p2 = p[1], p[2]
......@@ -2142,10 +2143,10 @@ class BaseParser(object):
elif p1 == '$(':
p0 = tako_call('__tako_subproc_captured_stdout__', p2,
lineno=lineno, col=col)
elif p1 == '!(':
elif p1 == '!(' or p1 == '?(':
p0 = tako_call('__tako_subproc_captured_object__', p2,
lineno=lineno, col=col)
elif p1 == '![':
elif p1 == '![' or p1 == '?[':
p0 = tako_call('__tako_subproc_captured_hiddenobject__', p2,
lineno=lineno, col=col)
elif p1 == '$[':
......
......@@ -92,22 +92,22 @@ ATEQUAL = N_TOKENS
tok_name[N_TOKENS] = 'ATEQUAL'
N_TOKENS += 1
_tako_tokens = {
'?': 'QUESTION',
'@=': 'ATEQUAL',
'@$': 'ATDOLLAR',
'||': 'DOUBLEPIPE',
'&&': 'DOUBLEAMPER',
'@(': 'ATLPAREN',
'?(': 'QUESTIONLPAREN',
'?[': 'QUESTIONLBRACKET',
'!(': 'BANGLPAREN',
'![': 'BANGLBRACKET',
'$(': 'DOLLARLPAREN',
'$[': 'DOLLARLBRACKET',
'${': 'DOLLARLBRACE',
'??': 'DOUBLEQUESTION',
'@$(': 'ATDOLLARLPAREN',
}
additional_parenlevs = frozenset({'@(', '!(', '![', '$(', '$[', '${', '@$('})
additional_parenlevs = frozenset({'@(', '!(', '![', '$(', '$[', '${', '@$(', '?(', '?['})
for k, v in _tako_tokens.items():
exec('%s = N_TOKENS' % v)
......@@ -251,8 +251,9 @@ _redir_check = {'{}>'.format(i) for i in _redir_names}.union(_redir_check)
_redir_check = {'{}>>'.format(i) for i in _redir_names}.union(_redir_check)
_redir_check = frozenset(_redir_check)
Operator = group(r"\*\*=?", r">>=?", r"<<=?", r"!=", r"//=?", r"->",
r"@\$\(?", r'\|\|', '&&', r'@\(', r'!\(', r'!\[', r'\$\(',
r'\$\[', '\${', r'\?\?', r'\?', AUGASSIGN_OPS, r"~")
r"@\$\(?", r'\|\|', '&&', r'@\(', r'!\(', r'!\[', r'\?\(',
r'\?\[', r'\$\(', r'\$\[', '\${',
AUGASSIGN_OPS, r"~")
Bracket = '[][(){}]'
Special = group(r'\r?\n', r'\.\.\.', r'[:;.,@]')
......
......@@ -296,7 +296,7 @@ def subproc_toks(line, mincol=-1, maxcol=None, lexer=None, returnline=False):
return # handle comment lines
beg, end = toks[0].lexpos, (toks[-1].lexpos + end_offset)
end = len(line[:end].rstrip())
rtn = '![' + line[beg:end] + ']'
rtn = '?[' + line[beg:end] + ']'
if returnline:
rtn = line[:beg] + rtn + line[end:]
return rtn
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment