import os
import regex
-def generate_flex(_ast, _element, home_dir, skel_file, out_file):
+def generate_flex(_xast, _element, home_dir, skel_file, out_file):
# generate group_ref_data which emulates the old way where
# start = even, end = odd, remaining bits = flex rule index,
# ignoring user-defined groups by putting start = end = -1:
group_ref_data = []
- for i in range(len(_ast.flex_rules)):
+ for i in range(len(_xast.flex_rules)):
group_ref_data.extend(
- [(-1, -1) for j in range(len(_ast.flex_rules[i].groups0))] +
+ [(-1, -1) for j in range(len(_xast.flex_rules[i].groups0))] +
[(i * 2, i * 2 + 1)] +
- [(-1, -1) for j in range(len(_ast.flex_rules[i].groups1))]
+ [(-1, -1) for j in range(len(_xast.flex_rules[i].groups1))]
)
- _nfa = _ast.to_nfa(group_ref_data)
+ _nfa = _xast.to_nfa(group_ref_data)
# end of buffer expression (do here because only necessary for flex)
eob_regex = regex.RegexGroup(children = [regex.RegexEmpty()])
eob_groups = []
- eob_regex.post_process(eob_groups, caseless = _ast[0].caseless)
+ eob_regex.post_process(eob_groups, caseless = _xast[0].caseless)
assert len(eob_groups) == 1
eob_regex.add_to_nfa(
_nfa,
- [(len(_ast.flex_rules) * 2, len(_ast.flex_rules) * 2 + 1)]
+ [(len(_xast.flex_rules) * 2, len(_xast.flex_rules) * 2 + 1)]
)
_flex_dfa = _nfa.to_dfa().to_flex_dfa()
skel_file = os.path.join(home_dir, 'skel/skel_flex.c')
if out_file is None:
out_file = (
- _ast[0].outfile
- if len(_ast[0].outfile) else
- 'lex.{0:s}.c'.format(_ast[0].prefix)
+ _xast[0].outfile
+ if len(_xast[0].outfile) else
+ 'lex.{0:s}.c'.format(_xast[0].prefix)
)
with open(skel_file, 'r') as fin:
with open(out_file, 'w+') as fout:
{0:s}/* GENERATE END */
'''.format(
''
- if _ast[0].prefix == 'yy' else
+ if _xast[0].prefix == 'yy' else
''.join(
[
'#define yy{0:s} {1:s}{2:s}\n'.format(
i,
- _ast[0].prefix,
+ _xast[0].prefix,
i
)
for i in [
{0:s}/* GENERATE END */
'''.format(
''
- if _ast[0].yywrap else
+ if _xast[0].yywrap else
'''#define {0:s}wrap() (/*CONSTCOND*/1)
#define YY_SKIP_YYWRAP
'''.format(
- _ast[0].prefix
+ _xast[0].prefix
)
)
)
}};
/* GENERATE END */
'''.format(
- len(_ast.actions_text),
+ len(_xast.actions_text),
','.join(
[
'\n\t{0:s}'.format(
'''/* GENERATE SECTION1 BEGIN */
{0:s}/* GENERATE END */
'''.format(
- ''.join([i.get_text() for i in _ast[0].code_blocks_text])
+ ''.join([i.get_text() for i in _xast[0].code_blocks_text])
)
)
elif line == '/* GENERATE STARTCONDDECL */\n':
''.join(
[
'#define {0:s} {1:d}\n'.format(
- _ast.start_conditions[i].name,
+ _xast.start_conditions[i].name,
i
)
- for i in range(len(_ast.start_conditions))
+ for i in range(len(_xast.start_conditions))
]
)
)
'''/* GENERATE SECTION2INITIAL BEGIN */
{0:s}/* GENERATE END */
'''.format(
- ''.join([i.get_text() for i in _ast[1].code_blocks_text])
+ ''.join([i.get_text() for i in _xast[1].code_blocks_text])
)
)
elif line == '/* GENERATE SECTION2 */\n':
eof_action_to_start_conditions = [
[
j
- for j in range(len(_ast.start_conditions))
- if _ast.start_conditions[j].eof_action == i
+ for j in range(len(_xast.start_conditions))
+ if _xast.start_conditions[j].eof_action == i
]
- for i in range(len(_ast.eof_actions_text))
+ for i in range(len(_xast.eof_actions_text))
]
#print('eof_action_to_start_conditions', eof_action_to_start_conditions)
fout.write(
{1:s} YY_BREAK
'''.format(
i,
- _ast.actions_text[i].get_text()
+ _xast.actions_text[i].get_text()
)
- for i in range(len(_ast.actions_text))
+ for i in range(len(_xast.actions_text))
]
),
''.join(
''.join(
[
'\t\t\tcase YY_STATE_EOF({0:s}):\n'.format(
- _ast.start_conditions[j].name
+ _xast.start_conditions[j].name
)
for j in eof_action_to_start_conditions[i]
]
),
- _ast.eof_actions_text[i].get_text()
+ _xast.eof_actions_text[i].get_text()
)
- for i in range(len(_ast.eof_actions_text))
+ for i in range(len(_xast.eof_actions_text))
if len(eof_action_to_start_conditions[i]) > 0
]
)
'''/* GENERATE SECTION3 BEGIN */
{0:s}/* GENERATE END */
'''.format(
- '' if len(_ast) < 3 else _ast[2].get_text()
+ '' if len(_xast) < 3 else _xast[2].get_text()
)
)
else:
- if _ast[0].prefix != 'yy':
- line = line.replace('yywrap', '{0:s}wrap'.format(_ast[0].prefix))
+ if _xast[0].prefix != 'yy':
+ line = line.replace('yywrap', '{0:s}wrap'.format(_xast[0].prefix))
fout.write(line)
line = fin.readline()
return ''.join(lines)
# note: these routines are literally the same, but conceptually different,
-# because ast.Text and regex.Text are different and unrelated base classes
-def ast_text_to_python(ast_text, indent):
- return text_to_python(ast_text.get_text(), indent)
+# because xast.Text and regex.Text are different and unrelated base classes
+def xast_text_to_python(xast_text, indent):
+ return text_to_python(xast_text.get_text(), indent)
def regex_text_to_python(regex_text, indent):
return text_to_python(regex_text.get_text(), indent)
-def generate_py(_ast, _element, home_dir, skel_file, out_file):
+def generate_py(_xast, _element, home_dir, skel_file, out_file):
# generate group action function names (ref_data) and body text
group_ref_data = []
group_rules_text = []
group_actions_text = []
- for i in _ast.flex_rules:
+ for i in _xast.flex_rules:
group_ref_data.extend(
[
(
yy_groups_by_name = {{}}
yy_action = yy_action{0:d}
'''.format(
- _ast.default_action
+ _xast.default_action
)
)
)
- _dfa = _ast.to_nfa(group_ref_data).to_dfa()
+ _dfa = _xast.to_nfa(group_ref_data).to_dfa()
if skel_file is None:
skel_file = os.path.join(
)
if out_file is None:
out_file = (
- _ast[0].outfile
- if len(_ast[0].outfile) else
- 'lex_{0:s}.py'.format(_ast[0].prefix)
+ _xast[0].outfile
+ if len(_xast[0].outfile) else
+ 'lex_{0:s}.py'.format(_xast[0].prefix)
)
with open(skel_file, 'r') as fin:
with open(out_file, 'w+') as fout:
'''.format(
''.join(
[
- ast_text_to_python(i, '')
- for i in _ast[0].code_blocks_text
+ xast_text_to_python(i, '')
+ for i in _xast[0].code_blocks_text
]
)
)
''.join(
[
'{0:s} = {1:d}\n'.format(
- _ast.start_conditions[i].name,
+ _xast.start_conditions[i].name,
i
)
- for i in range(len(_ast.start_conditions))
+ for i in range(len(_xast.start_conditions))
]
)
)
{1:s} raise YYContinue()
'''.format(
i,
- ast_text_to_python(_ast.actions_text[i], ' ')
+ xast_text_to_python(_xast.actions_text[i], ' ')
)
- for i in range(len(_ast.actions_text))
+ for i in range(len(_xast.actions_text))
]
),
''.join(
{1:s} return 0
'''.format(
i,
- ast_text_to_python(_ast.eof_actions_text[i], ' ')
+ xast_text_to_python(_xast.eof_actions_text[i], ' ')
)
- for i in range(len(_ast.eof_actions_text))
+ for i in range(len(_xast.eof_actions_text))
]
),
','.join(
[
'\n yy_eof_action{0:d}'.format(i.eof_action)
- for i in _ast.start_conditions
+ for i in _xast.start_conditions
]
)
)
'''.format(
''.join(
[
- ast_text_to_python(i, ' ')
- for i in _ast[1].code_blocks_text
+ xast_text_to_python(i, ' ')
+ for i in _xast[1].code_blocks_text
]
)
)
'''# GENERATE SECTION3 BEGIN
{0:s}# GENERATE END
'''.format(
- '' if len(_ast) < 3 else ast_text_to_python(_ast[2], '')
+ '' if len(_xast) < 3 else xast_text_to_python(_xast[2], '')
)
)
else:
- #if _ast[0].prefix != 'yy':
- # line = line.replace('yywrap', '{0:s}wrap'.format(_ast[0].prefix))
+ #if _xast[0].prefix != 'yy':
+ # line = line.replace('yywrap', '{0:s}wrap'.format(_xast[0].prefix))
fout.write(line)
line = fin.readline()
import sys
# GENERATE SECTION1 BEGIN
-import ast
+import xast
import regex
import state
import y_tab
yy_push_state(CODEBLOCK)
indented_code = True
#add_action(yytext)
- markup_push(ast.AST.Section1Or2.CodeBlock)
- markup_push(ast.AST.Text)
+ markup_push(xast.AST.Section1Or2.CodeBlock)
+ markup_push(xast.AST.Text)
raise YYContinue()
def yy_action1():
#add_action('/*[' '[')
#add_action('[' '[')
yy_push_state(CODEBLOCK)
indented_code = False
- markup_push(ast.AST.Section1Or2.CodeBlock)
+ markup_push(xast.AST.Section1Or2.CodeBlock)
markup_flush(len(yytext))
- markup_push(ast.AST.Text)
+ markup_push(xast.AST.Text)
raise YYContinue()
def yy_action6():
global brace_start_line, brace_depth
#add_action(']' ']')
#if not indented_code:
# line_directive_out(None, 0)
- markup_pop() # ast.AST.Text
+ markup_pop() # xast.AST.Text
markup_flush(len(yytext))
- markup_pop() # ast.AST.Section1Or2.CodeBlock
+ markup_pop() # xast.AST.Section1Or2.CodeBlock
raise YYContinue()
def yy_action39():
#add_action(yytext)
#if not indented_code:
# line_directive_out(None, 0)
markup_flush(len(yytext))
- markup_pop() # ast.AST.Text
- markup_pop() # ast.AST.Section1Or2.CodeBlock
+ markup_pop() # xast.AST.Text
+ markup_pop() # xast.AST.Section1Or2.CodeBlock
raise YYContinue()
def yy_action42():
global brace_depth
yy_push_state(CODEBLOCK)
indented_code = True
#add_action(yytext)
- markup_push(ast.AST.Section1Or2.CodeBlock)
- markup_push(ast.AST.Text)
+ markup_push(xast.AST.Section1Or2.CodeBlock)
+ markup_push(xast.AST.Text)
raise YYContinue()
def yy_action132():
global indented_code
yy_push_state(CODEBLOCK)
indented_code = True
#add_action(yytext)
- markup_push(ast.AST.Section1Or2.CodeBlock)
- markup_push(ast.AST.Text)
+ markup_push(xast.AST.Section1Or2.CodeBlock)
+ markup_push(xast.AST.Text)
raise YYContinue()
def yy_action133():
#add_action(yytext)
bracelevel = 1
BEGIN(PERCENT_BRACE_ACTION)
markup_flush(len(yytext) - 2)
- markup_push(ast.AST.Section1Or2.CodeBlock)
+ markup_push(xast.AST.Section1Or2.CodeBlock)
markup_flush(2)
- markup_push(ast.AST.Text)
+ markup_push(xast.AST.Text)
raise YYContinue()
def yy_action137():
if not (state._sf_stk[-1] & 4):
bracelevel = 1
BEGIN(PERCENT_BRACE_ACTION)
markup_flush(len(yytext) - 2)
- markup_push(ast.AST.Section2.Rule.Action)
+ markup_push(xast.AST.Section2.Rule.Action)
doing_rule_action = True
state.in_rule = False
markup_flush(2)
- markup_push(ast.AST.Text)
+ markup_push(xast.AST.Text)
raise YYContinue()
def yy_action143():
if state._sf_stk[-1] & 4:
doing_rule_action = True
state.in_rule = False
markup_flush(len(yytext))
- markup_push(ast.AST.Section2.Rule.Action)
- markup_push(ast.AST.Text)
+ markup_push(xast.AST.Section2.Rule.Action)
+ markup_push(xast.AST.Text)
raise YYContinue()
def yy_action147():
global bracelevel, doing_rule_action
doing_rule_action = True
state.in_rule = False
markup_flush(len(yytext))
- markup_push(ast.AST.Section2.Rule.Action)
- markup_push(ast.AST.Text)
+ markup_push(xast.AST.Section2.Rule.Action)
+ markup_push(xast.AST.Text)
raise YYContinue()
def yy_action148():
markup_flush(len(yytext) - 7)
def yy_action220():
global bracelevel
bracelevel = 0
- markup_pop() # ast.AST.Text
+ markup_pop() # xast.AST.Text
raise YYContinue()
def yy_action221():
#add_action(yytext)
markup_flush(len(yytext))
markup_pop() # as below
if doing_codeblock:
- # ast.AST.Section1Or2.CodeBlock
+ # xast.AST.Section1Or2.CodeBlock
doing_codeblock = False
else:
- # ast.AST.Section2.Rule.Action
+ # xast.AST.Section2.Rule.Action
return ord('\n')
raise YYContinue()
def yy_action226():
doing_rule_action = False
#add_action('\tYY_BREAK]' ']\n')
markup_flush(len(yytext))
- markup_pop() # ast.AST.Text
- markup_pop() # ast.AST.Section2.Rule.Action
+ markup_pop() # xast.AST.Text
+ markup_pop() # xast.AST.Section2.Rule.Action
return ord('\n')
raise YYContinue()
def yy_action234():
markup_flush(len(yytext))
if doing_rule_action:
doing_rule_action = False # Nick added, error in the original?
- markup_pop() # ast.AST.Text
- markup_pop() # ast.AST.Section2.Rule.Action
+ markup_pop() # xast.AST.Text
+ markup_pop() # xast.AST.Section2.Rule.Action
return '\n'
else:
BEGIN(ACTION)
yy_group_stack[-1],
yy_group_stack[-2],
yy_element_stack.pop(),
- ast.AST.Section1.Options
+ xast.AST.Section1.Options
)
yy_element_stack[-1].append(
(yy_group_stack[-1], yy_group_stack[-2], yy_element)
yy_group_stack[-1],
yy_group_stack[-2],
yy_element_stack.pop(),
- ast.AST.Section1.Options.Array
+ xast.AST.Section1.Options.Array
)
yy_element_stack[-1].append(
(yy_group_stack[-1], yy_group_stack[-2], yy_element)
yy_group_stack[-1],
yy_group_stack[-2],
yy_element_stack.pop(),
- ast.AST.Section1.Options
+ xast.AST.Section1.Options
)
yy_element_stack[-1].append(
(yy_group_stack[-1], yy_group_stack[-2], yy_element)
yy_group_stack[-1],
yy_group_stack[-2],
yy_element_stack.pop(),
- ast.AST.Section1.Options.Array, value = True
+ xast.AST.Section1.Options.Array, value = True
)
yy_element_stack[-1].append(
(yy_group_stack[-1], yy_group_stack[-2], yy_element)
yy_group_stack[-1],
yy_group_stack[-2],
yy_element_stack.pop(),
- ast.AST.Name
+ xast.AST.Name
)
yy_element_stack[-1].append(
(yy_group_stack[-1], yy_group_stack[-2], yy_element)
yy_group_stack[-1],
yy_group_stack[-2],
yy_element_stack.pop(),
- ast.AST.Section1.Options.SevenBit
+ xast.AST.Section1.Options.SevenBit
)
yy_element_stack[-1].append(
(yy_group_stack[-1], yy_group_stack[-2], yy_element)
yy_group_stack[-1],
yy_group_stack[-2],
yy_element_stack.pop(),
- ast.AST.Section1.Options.SevenBit
+ xast.AST.Section1.Options.SevenBit
)
yy_element_stack[-1].append(
(yy_group_stack[-1], yy_group_stack[-2], yy_element)
yy_group_stack[-1],
yy_group_stack[-2],
yy_element_stack.pop(),
- ast.AST.Section1.Options.Align
+ xast.AST.Section1.Options.Align
)
yy_element_stack[-1].append(
(yy_group_stack[-1], yy_group_stack[-2], yy_element)
yy_group_stack[-1],
yy_group_stack[-2],
yy_element_stack.pop(),
- ast.AST.Section1.Options.AlwaysInteractive
+ xast.AST.Section1.Options.AlwaysInteractive
)
yy_element_stack[-1].append(
(yy_group_stack[-1], yy_group_stack[-2], yy_element)
yy_group_stack[-1],
yy_group_stack[-2],
yy_element_stack.pop(),
- ast.AST.Section1.Options.Array
+ xast.AST.Section1.Options.Array
)
yy_element_stack[-1].append(
(yy_group_stack[-1], yy_group_stack[-2], yy_element)
yy_group_stack[-1],
yy_group_stack[-2],
yy_element_stack.pop(),
- ast.AST.Section1.Options.Backup
+ xast.AST.Section1.Options.Backup
)
yy_element_stack[-1].append(
(yy_group_stack[-1], yy_group_stack[-2], yy_element)
yy_group_stack[-1],
yy_group_stack[-2],
yy_element_stack.pop(),
- ast.AST.Section1.Options.Interactive
+ xast.AST.Section1.Options.Interactive
)
yy_element_stack[-1].append(
(yy_group_stack[-1], yy_group_stack[-2], yy_element)
yy_group_stack[-1],
yy_group_stack[-2],
yy_element_stack.pop(),
- ast.AST.Section1.Options.BisonBridge
+ xast.AST.Section1.Options.BisonBridge
)
yy_element_stack[-1].append(
(yy_group_stack[-1], yy_group_stack[-2], yy_element)
yy_group_stack[-1],
yy_group_stack[-2],
yy_element_stack.pop(),
- ast.AST.Section1.Options.BisonLocations
+ xast.AST.Section1.Options.BisonLocations
)
yy_element_stack[-1].append(
(yy_group_stack[-1], yy_group_stack[-2], yy_element)
yy_group_stack[-1],
yy_group_stack[-2],
yy_element_stack.pop(),
- ast.AST.Section1.Options.CPlusPlus
+ xast.AST.Section1.Options.CPlusPlus
)
yy_element_stack[-1].append(
(yy_group_stack[-1], yy_group_stack[-2], yy_element)
yy_group_stack[-1],
yy_group_stack[-2],
yy_element_stack.pop(),
- ast.AST.Section1.Options.Caseless
+ xast.AST.Section1.Options.Caseless
)
yy_element_stack[-1].append(
(yy_group_stack[-1], yy_group_stack[-2], yy_element)
yy_group_stack[-1],
yy_group_stack[-2],
yy_element_stack.pop(),
- ast.AST.Section1.Options.Caseless
+ xast.AST.Section1.Options.Caseless
)
yy_element_stack[-1].append(
(yy_group_stack[-1], yy_group_stack[-2], yy_element)
yy_group_stack[-1],
yy_group_stack[-2],
yy_element_stack.pop(),
- ast.AST.Section1.Options.Debug
+ xast.AST.Section1.Options.Debug
)
yy_element_stack[-1].append(
(yy_group_stack[-1], yy_group_stack[-2], yy_element)
yy_group_stack[-1],
yy_group_stack[-2],
yy_element_stack.pop(),
- ast.AST.Section1.Options.Default
+ xast.AST.Section1.Options.Default
)
yy_element_stack[-1].append(
(yy_group_stack[-1], yy_group_stack[-2], yy_element)
yy_group_stack[-1],
yy_group_stack[-2],
yy_element_stack.pop(),
- ast.AST.Section1.Options.ECS
+ xast.AST.Section1.Options.ECS
)
yy_element_stack[-1].append(
(yy_group_stack[-1], yy_group_stack[-2], yy_element)
yy_group_stack[-1],
yy_group_stack[-2],
yy_element_stack.pop(),
- ast.AST.Section1.Options.Fast
+ xast.AST.Section1.Options.Fast
)
yy_element_stack[-1].append(
(yy_group_stack[-1], yy_group_stack[-2], yy_element)
yy_group_stack[-1],
yy_group_stack[-2],
yy_element_stack.pop(),
- ast.AST.Section1.Options.Full
+ xast.AST.Section1.Options.Full
)
yy_element_stack[-1].append(
(yy_group_stack[-1], yy_group_stack[-2], yy_element)
yy_group_stack[-1],
yy_group_stack[-2],
yy_element_stack.pop(),
- ast.AST.Section1.Options.Input
+ xast.AST.Section1.Options.Input
)
yy_element_stack[-1].append(
(yy_group_stack[-1], yy_group_stack[-2], yy_element)
yy_group_stack[-1],
yy_group_stack[-2],
yy_element_stack.pop(),
- ast.AST.Section1.Options.Interactive
+ xast.AST.Section1.Options.Interactive
)
yy_element_stack[-1].append(
(yy_group_stack[-1], yy_group_stack[-2], yy_element)
yy_group_stack[-1],
yy_group_stack[-2],
yy_element_stack.pop(),
- ast.AST.Section1.Options.LexCompat
+ xast.AST.Section1.Options.LexCompat
)
yy_element_stack[-1].append(
(yy_group_stack[-1], yy_group_stack[-2], yy_element)
yy_group_stack[-1],
yy_group_stack[-2],
yy_element_stack.pop(),
- ast.AST.Section1.Options.PosixCompat
+ xast.AST.Section1.Options.PosixCompat
)
yy_element_stack[-1].append(
(yy_group_stack[-1], yy_group_stack[-2], yy_element)
yy_group_stack[-1],
yy_group_stack[-2],
yy_element_stack.pop(),
- ast.AST.Section1.Options.Line
+ xast.AST.Section1.Options.Line
)
yy_element_stack[-1].append(
(yy_group_stack[-1], yy_group_stack[-2], yy_element)
yy_group_stack[-1],
yy_group_stack[-2],
yy_element_stack.pop(),
- ast.AST.Section1.Options.Main
+ xast.AST.Section1.Options.Main
)
yy_element_stack[-1].append(
(yy_group_stack[-1], yy_group_stack[-2], yy_element)
yy_group_stack[-1],
yy_group_stack[-2],
yy_element_stack.pop(),
- ast.AST.Section1.Options.MetaECS
+ xast.AST.Section1.Options.MetaECS
)
yy_element_stack[-1].append(
(yy_group_stack[-1], yy_group_stack[-2], yy_element)
yy_group_stack[-1],
yy_group_stack[-2],
yy_element_stack.pop(),
- ast.AST.Section1.Options.NeverInteractive
+ xast.AST.Section1.Options.NeverInteractive
)
yy_element_stack[-1].append(
(yy_group_stack[-1], yy_group_stack[-2], yy_element)
yy_group_stack[-1],
yy_group_stack[-2],
yy_element_stack.pop(),
- ast.AST.Section1.Options.PerfReport
+ xast.AST.Section1.Options.PerfReport
)
yy_element_stack[-1].append(
(yy_group_stack[-1], yy_group_stack[-2], yy_element)
yy_group_stack[-1],
yy_group_stack[-2],
yy_element_stack.pop(),
- ast.AST.Section1.Options.Array
+ xast.AST.Section1.Options.Array
)
yy_element_stack[-1].append(
(yy_group_stack[-1], yy_group_stack[-2], yy_element)
yy_group_stack[-1],
yy_group_stack[-2],
yy_element_stack.pop(),
- ast.AST.Section1.Options.Read
+ xast.AST.Section1.Options.Read
)
yy_element_stack[-1].append(
(yy_group_stack[-1], yy_group_stack[-2], yy_element)
yy_group_stack[-1],
yy_group_stack[-2],
yy_element_stack.pop(),
- ast.AST.Section1.Options.Reentrant
+ xast.AST.Section1.Options.Reentrant
)
yy_element_stack[-1].append(
(yy_group_stack[-1], yy_group_stack[-2], yy_element)
yy_group_stack[-1],
yy_group_stack[-2],
yy_element_stack.pop(),
- ast.AST.Section1.Options.Reject
+ xast.AST.Section1.Options.Reject
)
yy_element_stack[-1].append(
(yy_group_stack[-1], yy_group_stack[-2], yy_element)
yy_group_stack[-1],
yy_group_stack[-2],
yy_element_stack.pop(),
- ast.AST.Section1.Options.Stack
+ xast.AST.Section1.Options.Stack
)
yy_element_stack[-1].append(
(yy_group_stack[-1], yy_group_stack[-2], yy_element)
yy_group_stack[-1],
yy_group_stack[-2],
yy_element_stack.pop(),
- ast.AST.Section1.Options.StdInit
+ xast.AST.Section1.Options.StdInit
)
yy_element_stack[-1].append(
(yy_group_stack[-1], yy_group_stack[-2], yy_element)
yy_group_stack[-1],
yy_group_stack[-2],
yy_element_stack.pop(),
- ast.AST.Section1.Options.StdOut
+ xast.AST.Section1.Options.StdOut
)
yy_element_stack[-1].append(
(yy_group_stack[-1], yy_group_stack[-2], yy_element)
yy_group_stack[-1],
yy_group_stack[-2],
yy_element_stack.pop(),
- ast.AST.Section1.Options.UniStd
+ xast.AST.Section1.Options.UniStd
)
yy_element_stack[-1].append(
(yy_group_stack[-1], yy_group_stack[-2], yy_element)
yy_group_stack[-1],
yy_group_stack[-2],
yy_element_stack.pop(),
- ast.AST.Section1.Options.Unput
+ xast.AST.Section1.Options.Unput
)
yy_element_stack[-1].append(
(yy_group_stack[-1], yy_group_stack[-2], yy_element)
yy_group_stack[-1],
yy_group_stack[-2],
yy_element_stack.pop(),
- ast.AST.Section1.Options.Verbose
+ xast.AST.Section1.Options.Verbose
)
yy_element_stack[-1].append(
(yy_group_stack[-1], yy_group_stack[-2], yy_element)
yy_group_stack[-1],
yy_group_stack[-2],
yy_element_stack.pop(),
- ast.AST.Section1.Options.Warn
+ xast.AST.Section1.Options.Warn
)
yy_element_stack[-1].append(
(yy_group_stack[-1], yy_group_stack[-2], yy_element)
yy_group_stack[-1],
yy_group_stack[-2],
yy_element_stack.pop(),
- ast.AST.Section1.Options.YYLineNo
+ xast.AST.Section1.Options.YYLineNo
)
yy_element_stack[-1].append(
(yy_group_stack[-1], yy_group_stack[-2], yy_element)
yy_group_stack[-1],
yy_group_stack[-2],
yy_element_stack.pop(),
- ast.AST.Section1.Options.YYMore
+ xast.AST.Section1.Options.YYMore
)
yy_element_stack[-1].append(
(yy_group_stack[-1], yy_group_stack[-2], yy_element)
yy_group_stack[-1],
yy_group_stack[-2],
yy_element_stack.pop(),
- ast.AST.Section1.Options.YYWrap
+ xast.AST.Section1.Options.YYWrap
)
yy_element_stack[-1].append(
(yy_group_stack[-1], yy_group_stack[-2], yy_element)
yy_group_stack[-1],
yy_group_stack[-2],
yy_element_stack.pop(),
- ast.AST.Section1.Options.YYPushState
+ xast.AST.Section1.Options.YYPushState
)
yy_element_stack[-1].append(
(yy_group_stack[-1], yy_group_stack[-2], yy_element)
yy_group_stack[-1],
yy_group_stack[-2],
yy_element_stack.pop(),
- ast.AST.Section1.Options.YYPopState
+ xast.AST.Section1.Options.YYPopState
)
yy_element_stack[-1].append(
(yy_group_stack[-1], yy_group_stack[-2], yy_element)
yy_group_stack[-1],
yy_group_stack[-2],
yy_element_stack.pop(),
- ast.AST.Section1.Options.YYTopState
+ xast.AST.Section1.Options.YYTopState
)
yy_element_stack[-1].append(
(yy_group_stack[-1], yy_group_stack[-2], yy_element)
yy_group_stack[-1],
yy_group_stack[-2],
yy_element_stack.pop(),
- ast.AST.Section1.Options.YYScanBuffer
+ xast.AST.Section1.Options.YYScanBuffer
)
yy_element_stack[-1].append(
(yy_group_stack[-1], yy_group_stack[-2], yy_element)
yy_group_stack[-1],
yy_group_stack[-2],
yy_element_stack.pop(),
- ast.AST.Section1.Options.YYScanBytes
+ xast.AST.Section1.Options.YYScanBytes
)
yy_element_stack[-1].append(
(yy_group_stack[-1], yy_group_stack[-2], yy_element)
yy_group_stack[-1],
yy_group_stack[-2],
yy_element_stack.pop(),
- ast.AST.Section1.Options.YYScanString
+ xast.AST.Section1.Options.YYScanString
)
yy_element_stack[-1].append(
(yy_group_stack[-1], yy_group_stack[-2], yy_element)
yy_group_stack[-1],
yy_group_stack[-2],
yy_element_stack.pop(),
- ast.AST.Section1.Options.YYAlloc
+ xast.AST.Section1.Options.YYAlloc
)
yy_element_stack[-1].append(
(yy_group_stack[-1], yy_group_stack[-2], yy_element)
yy_group_stack[-1],
yy_group_stack[-2],
yy_element_stack.pop(),
- ast.AST.Section1.Options.YYRealloc
+ xast.AST.Section1.Options.YYRealloc
)
yy_element_stack[-1].append(
(yy_group_stack[-1], yy_group_stack[-2], yy_element)
yy_group_stack[-1],
yy_group_stack[-2],
yy_element_stack.pop(),
- ast.AST.Section1.Options.YYFree
+ xast.AST.Section1.Options.YYFree
)
yy_element_stack[-1].append(
(yy_group_stack[-1], yy_group_stack[-2], yy_element)
yy_group_stack[-1],
yy_group_stack[-2],
yy_element_stack.pop(),
- ast.AST.Section1.Options.YYGetDebug
+ xast.AST.Section1.Options.YYGetDebug
)
yy_element_stack[-1].append(
(yy_group_stack[-1], yy_group_stack[-2], yy_element)
yy_group_stack[-1],
yy_group_stack[-2],
yy_element_stack.pop(),
- ast.AST.Section1.Options.YYSetDebug
+ xast.AST.Section1.Options.YYSetDebug
)
yy_element_stack[-1].append(
(yy_group_stack[-1], yy_group_stack[-2], yy_element)
yy_group_stack[-1],
yy_group_stack[-2],
yy_element_stack.pop(),
- ast.AST.Section1.Options.YYGetExtra
+ xast.AST.Section1.Options.YYGetExtra
)
yy_element_stack[-1].append(
(yy_group_stack[-1], yy_group_stack[-2], yy_element)
yy_group_stack[-1],
yy_group_stack[-2],
yy_element_stack.pop(),
- ast.AST.Section1.Options.YYSetExtra
+ xast.AST.Section1.Options.YYSetExtra
)
yy_element_stack[-1].append(
(yy_group_stack[-1], yy_group_stack[-2], yy_element)
yy_group_stack[-1],
yy_group_stack[-2],
yy_element_stack.pop(),
- ast.AST.Section1.Options.YYGetLeng
+ xast.AST.Section1.Options.YYGetLeng
)
yy_element_stack[-1].append(
(yy_group_stack[-1], yy_group_stack[-2], yy_element)
yy_group_stack[-1],
yy_group_stack[-2],
yy_element_stack.pop(),
- ast.AST.Section1.Options.YYGetText
+ xast.AST.Section1.Options.YYGetText
)
yy_element_stack[-1].append(
(yy_group_stack[-1], yy_group_stack[-2], yy_element)
yy_group_stack[-1],
yy_group_stack[-2],
yy_element_stack.pop(),
- ast.AST.Section1.Options.YYGetLineNo
+ xast.AST.Section1.Options.YYGetLineNo
)
yy_element_stack[-1].append(
(yy_group_stack[-1], yy_group_stack[-2], yy_element)
yy_group_stack[-1],
yy_group_stack[-2],
yy_element_stack.pop(),
- ast.AST.Section1.Options.YYSetLineNo
+ xast.AST.Section1.Options.YYSetLineNo
)
yy_element_stack[-1].append(
(yy_group_stack[-1], yy_group_stack[-2], yy_element)
yy_group_stack[-1],
yy_group_stack[-2],
yy_element_stack.pop(),
- ast.AST.Section1.Options.YYGetIn
+ xast.AST.Section1.Options.YYGetIn
)
yy_element_stack[-1].append(
(yy_group_stack[-1], yy_group_stack[-2], yy_element)
yy_group_stack[-1],
yy_group_stack[-2],
yy_element_stack.pop(),
- ast.AST.Section1.Options.YYSetIn
+ xast.AST.Section1.Options.YYSetIn
)
yy_element_stack[-1].append(
(yy_group_stack[-1], yy_group_stack[-2], yy_element)
yy_group_stack[-1],
yy_group_stack[-2],
yy_element_stack.pop(),
- ast.AST.Section1.Options.YYGetOut
+ xast.AST.Section1.Options.YYGetOut
)
yy_element_stack[-1].append(
(yy_group_stack[-1], yy_group_stack[-2], yy_element)
yy_group_stack[-1],
yy_group_stack[-2],
yy_element_stack.pop(),
- ast.AST.Section1.Options.YYSetOut
+ xast.AST.Section1.Options.YYSetOut
)
yy_element_stack[-1].append(
(yy_group_stack[-1], yy_group_stack[-2], yy_element)
yy_group_stack[-1],
yy_group_stack[-2],
yy_element_stack.pop(),
- ast.AST.Section1.Options.YYGetLVal
+ xast.AST.Section1.Options.YYGetLVal
)
yy_element_stack[-1].append(
(yy_group_stack[-1], yy_group_stack[-2], yy_element)
yy_group_stack[-1],
yy_group_stack[-2],
yy_element_stack.pop(),
- ast.AST.Section1.Options.YYSetLVal
+ xast.AST.Section1.Options.YYSetLVal
)
yy_element_stack[-1].append(
(yy_group_stack[-1], yy_group_stack[-2], yy_element)
yy_group_stack[-1],
yy_group_stack[-2],
yy_element_stack.pop(),
- ast.AST.Section1.Options.YYGetLLoc
+ xast.AST.Section1.Options.YYGetLLoc
)
yy_element_stack[-1].append(
(yy_group_stack[-1], yy_group_stack[-2], yy_element)
yy_group_stack[-1],
yy_group_stack[-2],
yy_element_stack.pop(),
- ast.AST.Section1.Options.YYSetLLoc
+ xast.AST.Section1.Options.YYSetLLoc
)
yy_element_stack[-1].append(
(yy_group_stack[-1], yy_group_stack[-2], yy_element)
yy_group_stack[-1],
yy_group_stack[-2],
yy_element_stack.pop(),
- ast.AST.Section1.Options.TablesVerify
+ xast.AST.Section1.Options.TablesVerify
)
yy_element_stack[-1].append(
(yy_group_stack[-1], yy_group_stack[-2], yy_element)
yy_group_stack[-1],
yy_group_stack[-2],
yy_element_stack.pop(),
- ast.AST.String
+ xast.AST.String
)
yy_element_stack[-1].append(
(yy_group_stack[-1], yy_group_stack[-2], yy_element)
yy_group_stack[-1],
yy_group_stack[-2],
yy_element_stack.pop(),
- ast.AST.Text
+ xast.AST.Text
)
yy_element_stack[-1].append(
(yy_group_stack[-1], yy_group_stack[-2], yy_element)
yy_group_stack[-1],
yy_group_stack[-2],
yy_element_stack.pop(),
- ast.AST.Section2.Rule.Action, continued = True
+ xast.AST.Section2.Rule.Action, continued = True
)
yy_element_stack[-1].append(
(yy_group_stack[-1], yy_group_stack[-2], yy_element)
yy_group_stack[-1],
yy_group_stack[-2],
yy_element_stack.pop(),
- ast.AST.Name
+ xast.AST.Name
)
yy_element_stack[-1].append(
(yy_group_stack[-1], yy_group_stack[-2], yy_element)
%{
- import ast
+ import xast
import regex
import state
%%
goal
- : %space (?E{ast.AST.Section1}initlex sect1 %space) sect1end (?E{ast.AST.Section2}sect2 %space) sect2end initforrule
+ : %space (?E{xast.AST.Section1}initlex sect1 %space) sect1end (?E{xast.AST.Section2}sect2 %space) sect2end initforrule
{
#def_rule = None
#pat = cclinit()
;
sect1
- : sect1 %space (?E{ast.AST.Section1.StartConditions, exclusive = xcluflg} startconddecl namelist1)
- | sect1 %space (?E{ast.AST.Section1.Options}options)
+ : sect1 %space (?E{xast.AST.Section1.StartConditions, exclusive = xcluflg} startconddecl namelist1)
+ | sect1 %space (?E{xast.AST.Section1.Options}options)
|
| error
{
/* Nick */
sect2end
- : SECTEND (?E{ast.AST.Section3}%space)
+ : SECTEND (?E{xast.AST.Section3}%space)
|
;
;
option
- : %space (?E{ast.AST.Section1.Options.OutFile}TOK_OUTFILE '=' NAME)
+ : %space (?E{xast.AST.Section1.Options.OutFile}TOK_OUTFILE '=' NAME)
{
#outfilename = xstrdup(nmstr)
#did_outfilename = 1
}
- | %space (?E{ast.AST.Section1.Options.ExtraType}TOK_EXTRA_TYPE '=' NAME)
+ | %space (?E{xast.AST.Section1.Options.ExtraType}TOK_EXTRA_TYPE '=' NAME)
{
#extra_type = xstrdup(nmstr)
}
- | %space (?E{ast.AST.Section1.Options.Prefix}TOK_PREFIX '=' NAME)
+ | %space (?E{xast.AST.Section1.Options.Prefix}TOK_PREFIX '=' NAME)
{
#prefix = xstrdup(nmstr)
#if strchr(prefix, ord('[')) or strchr(prefix, ord(']')):
# flexerror('Prefix must not contain [ or ]')
}
- | %space (?E{ast.AST.Section1.Options.YYClass}TOK_YYCLASS '=' NAME)
+ | %space (?E{xast.AST.Section1.Options.YYClass}TOK_YYCLASS '=' NAME)
{
#yyclass = xstrdup(nmstr)
}
- | %space (?E{ast.AST.Section1.Options.HeaderFile}TOK_HEADER_FILE '=' NAME)
+ | %space (?E{xast.AST.Section1.Options.HeaderFile}TOK_HEADER_FILE '=' NAME)
{
#headerfilename = xstrdup(nmstr)
}
- | %space (?E{ast.AST.Section1.Options.TablesFile}TOK_TABLES_FILE '=' NAME)
+ | %space (?E{xast.AST.Section1.Options.TablesFile}TOK_TABLES_FILE '=' NAME)
{
#tablesext = True
#tablesfilename = xstrdup(nmstr)
;
sect2
- : sect2 %space (?E{ast.AST.Section2.Rule}scon initforrule flexrule '\n')
+ : sect2 %space (?E{xast.AST.Section2.Rule}scon initforrule flexrule '\n')
{
#scon_stk_ptr = $2
}
- | sect2 %space (?E{ast.AST.Section2.CompoundRule}scon '{' sect2 '}')
+ | sect2 %space (?E{xast.AST.Section2.CompoundRule}scon '{' sect2 '}')
{
#scon_stk_ptr = $2
}
;
flexrule
- : %space (?E{ast.AST.Section2.Rule.FLexRule, bol = True}'^' rule)
+ : %space (?E{xast.AST.Section2.Rule.FLexRule, bol = True}'^' rule)
{
#pat = $2
#finish_rule(pat, variable_trail_rule, headcnt, trailcnt, previous_continued_action)
# if performance_report > 1:
# pinpoint_message('\'^\' operator results in sub-optimal performance')
}
- | %space (?E{ast.AST.Section2.Rule.FLexRule}rule)
+ | %space (?E{xast.AST.Section2.Rule.FLexRule}rule)
{
#pat = $1
#finish_rule(pat, variable_trail_rule, headcnt, trailcnt, previous_continued_action)
# scset[i] = mkbranch(scset[i], pat)
# i += 1
}
- | %space (?E{ast.AST.Section2.Rule.EOFRule}EOF_OP)
+ | %space (?E{xast.AST.Section2.Rule.EOFRule}EOF_OP)
{
#if scon_stk_ptr > 0:
# build_eof_action()
;
scon
- : %space (?E{ast.AST.Section2.StartConditions}'<' scon_stk_ptr namelist2 '>')
+ : %space (?E{xast.AST.Section2.StartConditions}'<' scon_stk_ptr namelist2 '>')
{
#$$ = $2
}
- | %space (?E{ast.AST.Section2.StartConditions, wildcard = True}'<' '*' '>')
+ | %space (?E{xast.AST.Section2.StartConditions, wildcard = True}'<' '*' '>')
{
#$$ = scon_stk_ptr
#i = 1
# scon_stk[++scon_stk_ptr] = i
# i += 1
}
- | %space (?E{ast.AST.Section2.StartConditions})
+ | %space (?E{xast.AST.Section2.StartConditions})
{
#$$ = scon_stk_ptr
}
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA
-import ast
+import xast
import element
import generate_flex
import generate_py
with open(in_file) as fin:
if in_file[-4:] == '.xml':
- _ast = element.deserialize(fin, ast.factory)
+ _xast = element.deserialize(fin, xast.factory)
else:
import lex_yy
import state
import y_tab
state.infilename = in_file
lex_yy.yyin = fin
- _ast = y_tab.yyparse(ast.AST)
-#element.serialize(_ast, 'a.xml', 'utf-8')
-#_ast = element.deserialize('a.xml', ast.factory, 'utf-8')
-_ast.post_process()
-#element.serialize(_ast, 'b.xml', 'utf-8')
-#_ast = element.deserialize('b.xml', ast.factory, 'utf-8')
+ _xast = y_tab.yyparse(xast.AST)
+#element.serialize(_xast, 'a.xml', 'utf-8')
+#_xast = element.deserialize('a.xml', xast.factory, 'utf-8')
+_xast.post_process()
+#element.serialize(_xast, 'b.xml', 'utf-8')
+#_xast = element.deserialize('b.xml', xast.factory, 'utf-8')
(generate_py.generate_py if python else generate_flex.generate_flex)(
- _ast,
+ _xast,
_element,
home_dir,
skel_file,
/* scan.l - scanner for flex input -*-C-*- */
%{
- import ast
+ import xast
import regex
import state
import y_tab
yy_push_state(CODEBLOCK)
indented_code = True
#add_action(yytext)
- markup_push(ast.AST.Section1Or2.CodeBlock)
- markup_push(ast.AST.Text)
+ markup_push(xast.AST.Section1Or2.CodeBlock)
+ markup_push(xast.AST.Text)
}
^"/*" {
#add_action('/*[' '[')
#add_action('[' '[')
yy_push_state(CODEBLOCK)
indented_code = False
- markup_push(ast.AST.Section1Or2.CodeBlock)
+ markup_push(xast.AST.Section1Or2.CodeBlock)
markup_flush(len(yytext))
- markup_push(ast.AST.Text)
+ markup_push(xast.AST.Text)
}
^"%top"[[:blank:]]*"{"[[:blank:]]*{NL} {
global brace_start_line, brace_depth
return y_tab.SECTEND
}
- ^(?E{ast.AST.Section1.Options}(?E{ast.AST.Section1.Options.Array}"%pointer".*)){NL} {
+ ^(?E{xast.AST.Section1.Options}(?E{xast.AST.Section1.Options.Array}"%pointer".*)){NL} {
#yytext_is_array = False
state.linenum += 1
}
- ^(?E{ast.AST.Section1.Options}(?E{ast.AST.Section1.Options.Array, value = True}"%array".*)){NL} {
+ ^(?E{xast.AST.Section1.Options}(?E{xast.AST.Section1.Options.Array, value = True}"%array".*)){NL} {
#yytext_is_array = True
state.linenum += 1
}
BEGIN(PICKUPDEF)
}
- (?E{ast.AST.Name}{SCNAME}) {
+ (?E{xast.AST.Name}{SCNAME}) {
state.nmstr = yytext
return y_tab.NAME
}
#add_action(']' ']')
#if not indented_code:
# line_directive_out(None, 0)
- markup_pop() # ast.AST.Text
+ markup_pop() # xast.AST.Text
markup_flush(len(yytext))
- markup_pop() # ast.AST.Section1Or2.CodeBlock
+ markup_pop() # xast.AST.Section1Or2.CodeBlock
}
[^\n%\[\]]* #add_action(yytext)
. #add_action(yytext)
#if not indented_code:
# line_directive_out(None, 0)
markup_flush(len(yytext))
- markup_pop() # ast.AST.Text
- markup_pop() # ast.AST.Section1Or2.CodeBlock
+ markup_pop() # xast.AST.Text
+ markup_pop() # xast.AST.Section1Or2.CodeBlock
}
}
option_sense = not option_sense
}*/
- (?E{ast.AST.Section1.Options.SevenBit}("no"*)7bit) {
+ (?E{xast.AST.Section1.Options.SevenBit}("no"*)7bit) {
option_sense = (len(yy_groups[2]) & 2) == 0
#csize = 128 if option_sense else 256
yy_element_token[0].value = option_sense
return y_tab.TOK_OPTION_OTHER # Nick
}
- (?E{ast.AST.Section1.Options.SevenBit}("no"*)8bit) {
+ (?E{xast.AST.Section1.Options.SevenBit}("no"*)8bit) {
option_sense = (len(yy_groups[2]) & 2) == 0
#csize = 256 if option_sense else 128
yy_element_token[0].value = not option_sense
return y_tab.TOK_OPTION_OTHER # Nick
}
- (?E{ast.AST.Section1.Options.Align}("no"*)align) {
+ (?E{xast.AST.Section1.Options.Align}("no"*)align) {
option_sense = (len(yy_groups[2]) & 2) == 0
#long_align = option_sense
yy_element_token[0].value = option_sense
return y_tab.TOK_OPTION_OTHER # Nick
}
- (?E{ast.AST.Section1.Options.AlwaysInteractive}("no"*)always-interactive) {
+ (?E{xast.AST.Section1.Options.AlwaysInteractive}("no"*)always-interactive) {
option_sense = (len(yy_groups[2]) & 2) == 0
#if option_sense:
# buf_m4_define(&m4defs_buf, 'M4' '_YY_ALWAYS_INTERACTIVE', None)
yy_element_token[0].value = option_sense
return y_tab.TOK_OPTION_OTHER # Nick
}
- (?E{ast.AST.Section1.Options.Array}("no"*)array) {
+ (?E{xast.AST.Section1.Options.Array}("no"*)array) {
option_sense = (len(yy_groups[2]) & 2) == 0
#yytext_is_array = option_sense
yy_element_token[0].value = option_sense
return y_tab.TOK_OPTION_OTHER # Nick
}
- (?E{ast.AST.Section1.Options.Backup}("no"*)backup) {
+ (?E{xast.AST.Section1.Options.Backup}("no"*)backup) {
option_sense = (len(yy_groups[2]) & 2) == 0
#backing_up_report = option_sense
yy_element_token[0].value = option_sense
return y_tab.TOK_OPTION_OTHER # Nick
}
- (?E{ast.AST.Section1.Options.Interactive}("no"*)batch) {
+ (?E{xast.AST.Section1.Options.Interactive}("no"*)batch) {
option_sense = (len(yy_groups[2]) & 2) == 0
#interactive = not option_sense
yy_element_token[0].value = not option_sense
return y_tab.TOK_OPTION_OTHER # Nick
}
- (?E{ast.AST.Section1.Options.BisonBridge}("no"*)bison-bridge) {
+ (?E{xast.AST.Section1.Options.BisonBridge}("no"*)bison-bridge) {
option_sense = (len(yy_groups[2]) & 2) == 0
#bison_bridge_lval = option_sense
yy_element_token[0].value = option_sense
return y_tab.TOK_OPTION_OTHER # Nick
}
- (?E{ast.AST.Section1.Options.BisonLocations}("no"*)bison-locations) {
+ (?E{xast.AST.Section1.Options.BisonLocations}("no"*)bison-locations) {
option_sense = (len(yy_groups[2]) & 2) == 0
#if bison_bridge_lloc = option_sense:
# bison_bridge_lval = True
yy_element_token[0].value = option_sense
return y_tab.TOK_OPTION_OTHER # Nick
}
- (?E{ast.AST.Section1.Options.CPlusPlus}("no"*)"c++") {
+ (?E{xast.AST.Section1.Options.CPlusPlus}("no"*)"c++") {
option_sense = (len(yy_groups[1]) & 2) == 0
#C_plus_plus = option_sense
yy_element_token[0].value = option_sense
return y_tab.TOK_OPTION_OTHER # Nick
}
- (?E{ast.AST.Section1.Options.Caseless}("no"*)caseful|case-sensitive) {
+ (?E{xast.AST.Section1.Options.Caseless}("no"*)caseful|case-sensitive) {
option_sense = (len(yy_groups[2]) & 2) == 0
if not option_sense:
state._sf_stk[-1] |= 1
yy_element_token[0].value = not option_sense
return y_tab.TOK_OPTION_OTHER # Nick
}
- (?E{ast.AST.Section1.Options.Caseless}("no"*)caseless|case-insensitive) {
+ (?E{xast.AST.Section1.Options.Caseless}("no"*)caseless|case-insensitive) {
option_sense = (len(yy_groups[2]) & 2) == 0
if option_sense:
state._sf_stk[-1] |= 1
yy_element_token[0].value = option_sense
return y_tab.TOK_OPTION_OTHER # Nick
}
- (?E{ast.AST.Section1.Options.Debug}("no"*)debug) {
+ (?E{xast.AST.Section1.Options.Debug}("no"*)debug) {
option_sense = (len(yy_groups[2]) & 2) == 0
#ddebug = option_sense
yy_element_token[0].value = option_sense
return y_tab.TOK_OPTION_OTHER # Nick
}
- (?E{ast.AST.Section1.Options.Default}("no"*)default) {
+ (?E{xast.AST.Section1.Options.Default}("no"*)default) {
option_sense = (len(yy_groups[2]) & 2) == 0
#spprdflt = not option_sense
yy_element_token[0].value = option_sense
return y_tab.TOK_OPTION_OTHER # Nick
}
- (?E{ast.AST.Section1.Options.ECS}("no"*)ecs) {
+ (?E{xast.AST.Section1.Options.ECS}("no"*)ecs) {
option_sense = (len(yy_groups[2]) & 2) == 0
#useecs = option_sense
yy_element_token[0].value = option_sense
return y_tab.TOK_OPTION_OTHER # Nick
}
- (?E{ast.AST.Section1.Options.Fast}("no"*)fast) {
+ (?E{xast.AST.Section1.Options.Fast}("no"*)fast) {
option_sense = (len(yy_groups[2]) & 2) == 0
#useecs = usemecs = False
#use_read = fullspd = True
yy_element_token[0].value = option_sense
return y_tab.TOK_OPTION_OTHER # Nick
}
- (?E{ast.AST.Section1.Options.Full}("no"*)full) {
+ (?E{xast.AST.Section1.Options.Full}("no"*)full) {
option_sense = (len(yy_groups[2]) & 2) == 0
#useecs = usemecs = False
#use_read = fulltbl = True
yy_element_token[0].value = option_sense
return y_tab.TOK_OPTION_OTHER # Nick
}
- (?E{ast.AST.Section1.Options.Input}("no"*)input) {
+ (?E{xast.AST.Section1.Options.Input}("no"*)input) {
option_sense = (len(yy_groups[2]) & 2) == 0
#if not option_sense:
# action_define('YY_NO_INPUT', 1)
yy_element_token[0].value = option_sense
return y_tab.TOK_OPTION_OTHER # Nick
}
- (?E{ast.AST.Section1.Options.Interactive}("no"*)interactive) {
+ (?E{xast.AST.Section1.Options.Interactive}("no"*)interactive) {
option_sense = (len(yy_groups[2]) & 2) == 0
#interactive = option_sense
yy_element_token[0].value = option_sense
return y_tab.TOK_OPTION_OTHER # Nick
}
- (?E{ast.AST.Section1.Options.LexCompat}("no"*)lex-compat) {
+ (?E{xast.AST.Section1.Options.LexCompat}("no"*)lex-compat) {
option_sense = (len(yy_groups[2]) & 2) == 0
state.lex_compat = option_sense
yy_element_token[0].value = option_sense
return y_tab.TOK_OPTION_OTHER # Nick
}
- (?E{ast.AST.Section1.Options.PosixCompat}("no"*)posix-compat) {
+ (?E{xast.AST.Section1.Options.PosixCompat}("no"*)posix-compat) {
option_sense = (len(yy_groups[2]) & 2) == 0
state.posix_compat = option_sense
yy_element_token[0].value = option_sense
return y_tab.TOK_OPTION_OTHER # Nick
}
- (?E{ast.AST.Section1.Options.Line}("no"*)line) {
+ (?E{xast.AST.Section1.Options.Line}("no"*)line) {
option_sense = (len(yy_groups[2]) & 2) == 0
#gen_line_dirs = option_sense
yy_element_token[0].value = option_sense
return y_tab.TOK_OPTION_OTHER # Nick
}
- (?E{ast.AST.Section1.Options.Main}("no"*)main) {
+ (?E{xast.AST.Section1.Options.Main}("no"*)main) {
option_sense = (len(yy_groups[2]) & 2) == 0
#if option_sense:
# buf_m4_define(&m4defs_buf, 'M4' '_YY_MAIN', None)
yy_element_token[0].value = option_sense
return y_tab.TOK_OPTION_OTHER # Nick
}
- (?E{ast.AST.Section1.Options.MetaECS}("no"*)meta-ecs) {
+ (?E{xast.AST.Section1.Options.MetaECS}("no"*)meta-ecs) {
option_sense = (len(yy_groups[2]) & 2) == 0
#usemecs = option_sense
yy_element_token[0].value = option_sense
return y_tab.TOK_OPTION_OTHER # Nick
}
- (?E{ast.AST.Section1.Options.NeverInteractive}("no"*)never-interactive) {
+ (?E{xast.AST.Section1.Options.NeverInteractive}("no"*)never-interactive) {
option_sense = (len(yy_groups[2]) & 2) == 0
#if option_sense:
# buf_m4_define(&m4defs_buf, 'M4' '_YY_NEVER_INTERACTIVE', None)
yy_element_token[0].value = option_sense
return y_tab.TOK_OPTION_OTHER # Nick
}
- (?E{ast.AST.Section1.Options.PerfReport}("no"*)perf-report) {
+ (?E{xast.AST.Section1.Options.PerfReport}("no"*)perf-report) {
option_sense = (len(yy_groups[2]) & 2) == 0
#performance_report += 1 if option_sense else -1
yy_element_token[0].value = option_sense
return y_tab.TOK_OPTION_OTHER # Nick
}
- (?E{ast.AST.Section1.Options.Array}("no"*)pointer) {
+ (?E{xast.AST.Section1.Options.Array}("no"*)pointer) {
option_sense = (len(yy_groups[2]) & 2) == 0
#yytext_is_array = not option_sense
yy_element_token[0].value = not option_sense
return y_tab.TOK_OPTION_OTHER # Nick
}
- (?E{ast.AST.Section1.Options.Read}("no"*)read) {
+ (?E{xast.AST.Section1.Options.Read}("no"*)read) {
option_sense = (len(yy_groups[2]) & 2) == 0
#use_read = option_sense
yy_element_token[0].value = option_sense
return y_tab.TOK_OPTION_OTHER # Nick
}
- (?E{ast.AST.Section1.Options.Reentrant}("no"*)reentrant) {
+ (?E{xast.AST.Section1.Options.Reentrant}("no"*)reentrant) {
option_sense = (len(yy_groups[2]) & 2) == 0
#reentrant = option_sense
yy_element_token[0].value = option_sense
return y_tab.TOK_OPTION_OTHER # Nick
}
- (?E{ast.AST.Section1.Options.Reject}("no"*)reject) {
+ (?E{xast.AST.Section1.Options.Reject}("no"*)reject) {
option_sense = (len(yy_groups[2]) & 2) == 0
#reject_really_used = option_sense
yy_element_token[0].value = option_sense
return y_tab.TOK_OPTION_OTHER # Nick
}
- (?E{ast.AST.Section1.Options.Stack}("no"*)stack) {
+ (?E{xast.AST.Section1.Options.Stack}("no"*)stack) {
option_sense = (len(yy_groups[2]) & 2) == 0
#if option_sense:
# buf_m4_define(&m4defs_buf, 'M4' '_YY_STACK_USED', None)
yy_element_token[0].value = option_sense
return y_tab.TOK_OPTION_OTHER # Nick
}
- (?E{ast.AST.Section1.Options.StdInit}("no"*)stdinit) {
+ (?E{xast.AST.Section1.Options.StdInit}("no"*)stdinit) {
option_sense = (len(yy_groups[2]) & 2) == 0
#do_stdinit = option_sense
yy_element_token[0].value = option_sense
return y_tab.TOK_OPTION_OTHER # Nick
}
- (?E{ast.AST.Section1.Options.StdOut}("no"*)stdout) {
+ (?E{xast.AST.Section1.Options.StdOut}("no"*)stdout) {
option_sense = (len(yy_groups[2]) & 2) == 0
#use_stdout = option_sense
yy_element_token[0].value = option_sense
return y_tab.TOK_OPTION_OTHER # Nick
}
- (?E{ast.AST.Section1.Options.UniStd}("no"*)unistd) {
+ (?E{xast.AST.Section1.Options.UniStd}("no"*)unistd) {
option_sense = (len(yy_groups[2]) & 2) == 0
#if not option_sense:
# action_define('YY_NO_UNISTD_H', 1)
yy_element_token[0].value = option_sense
return y_tab.TOK_OPTION_OTHER # Nick
}
- (?E{ast.AST.Section1.Options.Unput}("no"*)unput) {
+ (?E{xast.AST.Section1.Options.Unput}("no"*)unput) {
option_sense = (len(yy_groups[2]) & 2) == 0
#if not option_sense:
# buf_m4_define(&m4defs_buf, 'M4' '_YY_NO_UNPUT', None)
yy_element_token[0].value = option_sense
return y_tab.TOK_OPTION_OTHER # Nick
}
- (?E{ast.AST.Section1.Options.Verbose}("no"*)verbose) {
+ (?E{xast.AST.Section1.Options.Verbose}("no"*)verbose) {
option_sense = (len(yy_groups[2]) & 2) == 0
#printstats = option_sense
yy_element_token[0].value = option_sense
return y_tab.TOK_OPTION_OTHER # Nick
}
- (?E{ast.AST.Section1.Options.Warn}("no"*)warn) {
+ (?E{xast.AST.Section1.Options.Warn}("no"*)warn) {
option_sense = (len(yy_groups[2]) & 2) == 0
#nowarn = not option_sense
yy_element_token[0].value = option_sense
return y_tab.TOK_OPTION_OTHER # Nick
}
- (?E{ast.AST.Section1.Options.YYLineNo}("no"*)yylineno) {
+ (?E{xast.AST.Section1.Options.YYLineNo}("no"*)yylineno) {
option_sense = (len(yy_groups[2]) & 2) == 0
#do_yylineno = option_sense
#if option_sense:
yy_element_token[0].value = option_sense
return y_tab.TOK_OPTION_OTHER # Nick
}
- (?E{ast.AST.Section1.Options.YYMore}("no"*)yymore) {
+ (?E{xast.AST.Section1.Options.YYMore}("no"*)yymore) {
option_sense = (len(yy_groups[2]) & 2) == 0
#yymore_really_used = option_sense
yy_element_token[0].value = option_sense
return y_tab.TOK_OPTION_OTHER # Nick
}
- (?E{ast.AST.Section1.Options.YYWrap}("no"*)yywrap) {
+ (?E{xast.AST.Section1.Options.YYWrap}("no"*)yywrap) {
option_sense = (len(yy_groups[2]) & 2) == 0
#do_yywrap = option_sense
yy_element_token[0].value = option_sense
return y_tab.TOK_OPTION_OTHER # Nick
}
- (?E{ast.AST.Section1.Options.YYPushState}("no"*)yy_push_state) {
+ (?E{xast.AST.Section1.Options.YYPushState}("no"*)yy_push_state) {
option_sense = (len(yy_groups[2]) & 2) == 0
#if not option_sense:
# buf_m4_define(&m4defs_buf, 'M4' '_YY_NO_PUSH_STATE', None)
yy_element_token[0].value = option_sense
return y_tab.TOK_OPTION_OTHER # Nick
}
- (?E{ast.AST.Section1.Options.YYPopState}("no"*)yy_pop_state) {
+ (?E{xast.AST.Section1.Options.YYPopState}("no"*)yy_pop_state) {
option_sense = (len(yy_groups[2]) & 2) == 0
#if not option_sense:
# buf_m4_define(&m4defs_buf, 'M4' '_YY_NO_POP_STATE', None)
yy_element_token[0].value = option_sense
return y_tab.TOK_OPTION_OTHER # Nick
}
- (?E{ast.AST.Section1.Options.YYTopState}("no"*)yy_top_state) {
+ (?E{xast.AST.Section1.Options.YYTopState}("no"*)yy_top_state) {
option_sense = (len(yy_groups[2]) & 2) == 0
#if not option_sense:
# buf_m4_define(&m4defs_buf, 'M4' '_YY_NO_TOP_STATE', None)
return y_tab.TOK_OPTION_OTHER # Nick
}
- (?E{ast.AST.Section1.Options.YYScanBuffer}("no"*)yy_scan_buffer) {
+ (?E{xast.AST.Section1.Options.YYScanBuffer}("no"*)yy_scan_buffer) {
option_sense = (len(yy_groups[2]) & 2) == 0
#if not option_sense:
# buf_m4_define(&m4defs_buf, 'M4' '_YY_NO_SCAN_BUFFER', None)
yy_element_token[0].value = option_sense
return y_tab.TOK_OPTION_OTHER # Nick
}
- (?E{ast.AST.Section1.Options.YYScanBytes}("no"*)yy_scan_bytes) {
+ (?E{xast.AST.Section1.Options.YYScanBytes}("no"*)yy_scan_bytes) {
option_sense = (len(yy_groups[2]) & 2) == 0
#if not option_sense:
# buf_m4_define(&m4defs_buf, 'M4' '_YY_NO_SCAN_BYTES', None)
yy_element_token[0].value = option_sense
return y_tab.TOK_OPTION_OTHER # Nick
}
- (?E{ast.AST.Section1.Options.YYScanString}("no"*)yy_scan_string) {
+ (?E{xast.AST.Section1.Options.YYScanString}("no"*)yy_scan_string) {
option_sense = (len(yy_groups[2]) & 2) == 0
#if not option_sense:
# buf_m4_define(&m4defs_buf, 'M4' '_YY_NO_SCAN_STRING', None)
return y_tab.TOK_OPTION_OTHER # Nick
}
- (?E{ast.AST.Section1.Options.YYAlloc}("no"*)yyalloc) {
+ (?E{xast.AST.Section1.Options.YYAlloc}("no"*)yyalloc) {
option_sense = (len(yy_groups[2]) & 2) == 0
#if not option_sense:
# buf_m4_define(&m4defs_buf, 'M4' '_YY_NO_FLEX_ALLOC', None)
yy_element_token[0].value = option_sense
return y_tab.TOK_OPTION_OTHER # Nick
}
- (?E{ast.AST.Section1.Options.YYRealloc}("no"*)yyrealloc) {
+ (?E{xast.AST.Section1.Options.YYRealloc}("no"*)yyrealloc) {
option_sense = (len(yy_groups[2]) & 2) == 0
#if not option_sense:
# buf_m4_define(&m4defs_buf, 'M4' '_YY_NO_FLEX_REALLOC', None)
yy_element_token[0].value = option_sense
return y_tab.TOK_OPTION_OTHER # Nick
}
- (?E{ast.AST.Section1.Options.YYFree}("no"*)yyfree) {
+ (?E{xast.AST.Section1.Options.YYFree}("no"*)yyfree) {
option_sense = (len(yy_groups[2]) & 2) == 0
#if not option_sense:
# buf_m4_define(&m4defs_buf, 'M4' '_YY_NO_FLEX_FREE', None)
return y_tab.TOK_OPTION_OTHER # Nick
}
- (?E{ast.AST.Section1.Options.YYGetDebug}("no"*)yyget_debug) {
+ (?E{xast.AST.Section1.Options.YYGetDebug}("no"*)yyget_debug) {
option_sense = (len(yy_groups[2]) & 2) == 0
#if not option_sense:
# buf_m4_define(&m4defs_buf, 'M4' '_YY_NO_GET_DEBUG', None)
yy_element_token[0].value = option_sense
return y_tab.TOK_OPTION_OTHER # Nick
}
- (?E{ast.AST.Section1.Options.YYSetDebug}("no"*)yyset_debug) {
+ (?E{xast.AST.Section1.Options.YYSetDebug}("no"*)yyset_debug) {
option_sense = (len(yy_groups[2]) & 2) == 0
#if not option_sense:
# buf_m4_define(&m4defs_buf, 'M4' '_YY_NO_SET_DEBUG', None)
yy_element_token[0].value = option_sense
return y_tab.TOK_OPTION_OTHER # Nick
}
- (?E{ast.AST.Section1.Options.YYGetExtra}("no"*)yyget_extra) {
+ (?E{xast.AST.Section1.Options.YYGetExtra}("no"*)yyget_extra) {
option_sense = (len(yy_groups[2]) & 2) == 0
#if not option_sense:
# buf_m4_define(&m4defs_buf, 'M4' '_YY_NO_GET_EXTRA', None)
yy_element_token[0].value = option_sense
return y_tab.TOK_OPTION_OTHER # Nick
}
- (?E{ast.AST.Section1.Options.YYSetExtra}("no"*)yyset_extra) {
+ (?E{xast.AST.Section1.Options.YYSetExtra}("no"*)yyset_extra) {
option_sense = (len(yy_groups[2]) & 2) == 0
#if not option_sense:
# buf_m4_define(&m4defs_buf, 'M4' '_YY_NO_SET_EXTRA', None)
yy_element_token[0].value = option_sense
return y_tab.TOK_OPTION_OTHER # Nick
}
- (?E{ast.AST.Section1.Options.YYGetLeng}("no"*)yyget_leng) {
+ (?E{xast.AST.Section1.Options.YYGetLeng}("no"*)yyget_leng) {
option_sense = (len(yy_groups[2]) & 2) == 0
#if not option_sense:
# buf_m4_define(&m4defs_buf, 'M4' '_YY_NO_GET_LENG', None)
yy_element_token[0].value = option_sense
return y_tab.TOK_OPTION_OTHER # Nick
}
- (?E{ast.AST.Section1.Options.YYGetText}("no"*)yyget_text) {
+ (?E{xast.AST.Section1.Options.YYGetText}("no"*)yyget_text) {
option_sense = (len(yy_groups[2]) & 2) == 0
#if not option_sense:
# buf_m4_define(&m4defs_buf, 'M4' '_YY_NO_GET_TEXT', None)
yy_element_token[0].value = option_sense
return y_tab.TOK_OPTION_OTHER # Nick
}
- (?E{ast.AST.Section1.Options.YYGetLineNo}("no"*)yyget_lineno) {
+ (?E{xast.AST.Section1.Options.YYGetLineNo}("no"*)yyget_lineno) {
option_sense = (len(yy_groups[2]) & 2) == 0
#if not option_sense:
# buf_m4_define(&m4defs_buf, 'M4' '_YY_NO_GET_LINENO', None)
yy_element_token[0].value = option_sense
return y_tab.TOK_OPTION_OTHER # Nick
}
- (?E{ast.AST.Section1.Options.YYSetLineNo}("no"*)yyset_lineno) {
+ (?E{xast.AST.Section1.Options.YYSetLineNo}("no"*)yyset_lineno) {
option_sense = (len(yy_groups[2]) & 2) == 0
#if not option_sense:
# buf_m4_define(&m4defs_buf, 'M4' '_YY_NO_SET_LINENO', None)
yy_element_token[0].value = option_sense
return y_tab.TOK_OPTION_OTHER # Nick
}
- (?E{ast.AST.Section1.Options.YYGetIn}("no"*)yyget_in) {
+ (?E{xast.AST.Section1.Options.YYGetIn}("no"*)yyget_in) {
option_sense = (len(yy_groups[2]) & 2) == 0
#if not option_sense:
# buf_m4_define(&m4defs_buf, 'M4' '_YY_NO_GET_IN', None)
yy_element_token[0].value = option_sense
return y_tab.TOK_OPTION_OTHER # Nick
}
- (?E{ast.AST.Section1.Options.YYSetIn}("no"*)yyset_in) {
+ (?E{xast.AST.Section1.Options.YYSetIn}("no"*)yyset_in) {
option_sense = (len(yy_groups[2]) & 2) == 0
#if not option_sense:
# buf_m4_define(&m4defs_buf, 'M4' '_YY_NO_SET_IN', None)
yy_element_token[0].value = option_sense
return y_tab.TOK_OPTION_OTHER # Nick
}
- (?E{ast.AST.Section1.Options.YYGetOut}("no"*)yyget_out) {
+ (?E{xast.AST.Section1.Options.YYGetOut}("no"*)yyget_out) {
option_sense = (len(yy_groups[2]) & 2) == 0
#if not option_sense:
# buf_m4_define(&m4defs_buf, 'M4' '_YY_NO_GET_OUT', None)
yy_element_token[0].value = option_sense
return y_tab.TOK_OPTION_OTHER # Nick
}
- (?E{ast.AST.Section1.Options.YYSetOut}("no"*)yyset_out) {
+ (?E{xast.AST.Section1.Options.YYSetOut}("no"*)yyset_out) {
option_sense = (len(yy_groups[2]) & 2) == 0
#if not option_sense:
# buf_m4_define(&m4defs_buf, 'M4' '_YY_NO_SET_OUT', None)
yy_element_token[0].value = option_sense
return y_tab.TOK_OPTION_OTHER # Nick
}
- (?E{ast.AST.Section1.Options.YYGetLVal}("no"*)yyget_lval) {
+ (?E{xast.AST.Section1.Options.YYGetLVal}("no"*)yyget_lval) {
option_sense = (len(yy_groups[2]) & 2) == 0
#if not option_sense:
# buf_m4_define(&m4defs_buf, 'M4' '_YY_NO_GET_LVAL', None)
yy_element_token[0].value = option_sense
return y_tab.TOK_OPTION_OTHER # Nick
}
- (?E{ast.AST.Section1.Options.YYSetLVal}("no"*)yyset_lval) {
+ (?E{xast.AST.Section1.Options.YYSetLVal}("no"*)yyset_lval) {
option_sense = (len(yy_groups[2]) & 2) == 0
#if not option_sense:
# buf_m4_define(&m4defs_buf, 'M4' '_YY_NO_SET_LVAL', None)
yy_element_token[0].value = option_sense
return y_tab.TOK_OPTION_OTHER # Nick
}
- (?E{ast.AST.Section1.Options.YYGetLLoc}("no"*)yyget_lloc) {
+ (?E{xast.AST.Section1.Options.YYGetLLoc}("no"*)yyget_lloc) {
option_sense = (len(yy_groups[2]) & 2) == 0
#if not option_sense:
# buf_m4_define(&m4defs_buf, 'M4' '_YY_NO_GET_LLOC', None)
yy_element_token[0].value = option_sense
return y_tab.TOK_OPTION_OTHER # Nick
}
- (?E{ast.AST.Section1.Options.YYSetLLoc}("no"*)yyset_lloc) {
+ (?E{xast.AST.Section1.Options.YYSetLLoc}("no"*)yyset_lloc) {
option_sense = (len(yy_groups[2]) & 2) == 0
#if not option_sense:
# buf_m4_define(&m4defs_buf, 'M4' '_YY_NO_SET_LLOC', None)
yyclass return y_tab.TOK_YYCLASS
header(-file)? return y_tab.TOK_HEADER_FILE
tables-file return y_tab.TOK_TABLES_FILE
- (?E{ast.AST.Section1.Options.TablesVerify}("no"*)tables-verify) {
+ (?E{xast.AST.Section1.Options.TablesVerify}("no"*)tables-verify) {
option_sense = (len(yy_groups[2]) & 2) == 0
#tablesverify = option_sense
#if not tablesext and option_sense:
return y_tab.TOK_OPTION_OTHER # Nick
}
- (?E{ast.AST.String}\"(?E{ast.AST.Text}[^"\n]*)\") {
+ (?E{xast.AST.String}\"(?E{xast.AST.Text}[^"\n]*)\") {
state.nmstr = yytext[1:-1]
return y_tab.NAME
}
yy_push_state(CODEBLOCK)
indented_code = True
#add_action(yytext)
- markup_push(ast.AST.Section1Or2.CodeBlock)
- markup_push(ast.AST.Text)
+ markup_push(xast.AST.Section1Or2.CodeBlock)
+ markup_push(xast.AST.Text)
}
^{NOT_WS}.* {
yy_push_state(CODEBLOCK)
indented_code = True
#add_action(yytext)
- markup_push(ast.AST.Section1Or2.CodeBlock)
- markup_push(ast.AST.Text)
+ markup_push(xast.AST.Section1Or2.CodeBlock)
+ markup_push(xast.AST.Text)
}
. #add_action(yytext)
bracelevel = 1
BEGIN(PERCENT_BRACE_ACTION)
markup_flush(len(yytext) - 2)
- markup_push(ast.AST.Section1Or2.CodeBlock)
+ markup_push(xast.AST.Section1Or2.CodeBlock)
markup_flush(2)
- markup_push(ast.AST.Text)
+ markup_push(xast.AST.Text)
}
^{OPTWS}"<" {
bracelevel = 1
BEGIN(PERCENT_BRACE_ACTION)
markup_flush(len(yytext) - 2)
- markup_push(ast.AST.Section2.Rule.Action)
+ markup_push(xast.AST.Section2.Rule.Action)
doing_rule_action = True
state.in_rule = False
markup_flush(2)
- markup_push(ast.AST.Text)
+ markup_push(xast.AST.Text)
}
- {WS}(?E{ast.AST.Section2.Rule.Action, continued = True}"|".*{NL}) {
+ {WS}(?E{xast.AST.Section2.Rule.Action, continued = True}"|".*{NL}) {
if state._sf_stk[-1] & 4:
markup_yyless(yytext.index('|'))
else:
doing_rule_action = True
state.in_rule = False
markup_flush(len(yytext))
- markup_push(ast.AST.Section2.Rule.Action)
- markup_push(ast.AST.Text)
+ markup_push(xast.AST.Section2.Rule.Action)
+ markup_push(xast.AST.Text)
}
{OPTWS}{NL} {
doing_rule_action = True
state.in_rule = False
markup_flush(len(yytext))
- markup_push(ast.AST.Section2.Rule.Action)
- markup_push(ast.AST.Text)
+ markup_push(xast.AST.Section2.Rule.Action)
+ markup_push(xast.AST.Text)
}
^{OPTWS}"<<EOF>>" |
BEGIN(CARETISBOL)
return ord('>')
}
- (?E{ast.AST.Name}{SCNAME}) {
+ (?E{xast.AST.Name}{SCNAME}) {
state.nmstr = yytext
return y_tab.NAME
}
{OPTWS}"%}".* {
global bracelevel
bracelevel = 0
- markup_pop() # ast.AST.Text
+ markup_pop() # xast.AST.Text
}
<ACTION>"/*" {
markup_flush(len(yytext))
markup_pop() # as below
if doing_codeblock:
- # ast.AST.Section1Or2.CodeBlock
+ # xast.AST.Section1Or2.CodeBlock
doing_codeblock = False
else:
- # ast.AST.Section2.Rule.Action
+ # xast.AST.Section2.Rule.Action
return ord('\n')
}
}
doing_rule_action = False
#add_action('\tYY_BREAK]' ']\n')
markup_flush(len(yytext))
- markup_pop() # ast.AST.Text
- markup_pop() # ast.AST.Section2.Rule.Action
+ markup_pop() # xast.AST.Text
+ markup_pop() # xast.AST.Section2.Rule.Action
return ord('\n')
}
. #add_action(yytext)
markup_flush(len(yytext))
if doing_rule_action:
doing_rule_action = False # Nick added, error in the original?
- markup_pop() # ast.AST.Text
- markup_pop() # ast.AST.Section2.Rule.Action
+ markup_pop() # xast.AST.Text
+ markup_pop() # xast.AST.Section2.Rule.Action
return '\n'
else:
BEGIN(ACTION)
# GENERATE END
def post_process(
self,
- _ast,
+ _xast,
section,
name_to_start_condition,
all_start_conditions,
# GENERATE END
def post_process(
self,
- _ast,
+ _xast,
section,
name_to_start_condition,
all_start_conditions,
# GENERATE END
def post_process(
self,
- _ast,
+ _xast,
name_to_start_condition,
all_start_conditions,
inclusive_start_conditions
continued_action = False
for i in self:
continued_action = i.post_process(
- _ast,
+ _xast,
self,
name_to_start_condition,
all_start_conditions,
# GENERATE END
def post_process(
self,
- _ast,
+ _xast,
section,
name_to_start_condition,
all_start_conditions,
# GENERATE END
def post_process(
self,
- _ast,
+ _xast,
section,
name_to_start_condition,
all_start_conditions,
for i in self:
name = i.get_text()
assert name not in name_to_start_condition
- name_to_start_condition[name] = len(_ast.start_conditions)
- all_start_conditions.add(len(_ast.start_conditions))
+ name_to_start_condition[name] = len(_xast.start_conditions)
+ all_start_conditions.add(len(_xast.start_conditions))
if not self.exclusive:
- inclusive_start_conditions.add(len(_ast.start_conditions))
- _ast.start_conditions.append(
+ inclusive_start_conditions.add(len(_xast.start_conditions))
+ _xast.start_conditions.append(
AST.StartCondition(
name = name,
exclusive = self.exclusive,
# GENERATE END
def post_process(
self,
- _ast,
+ _xast,
name_to_start_condition,
all_start_conditions,
inclusive_start_conditions
self.yywrap = True
AST.Section1Or2.post_process(
self,
- _ast,
+ _xast,
name_to_start_condition,
all_start_conditions,
inclusive_start_conditions
# GENERATE END
def post_process(
self,
- _ast,
+ _xast,
section,
name_to_start_condition,
all_start_conditions,
)
for i in self[1:]:
continued_action = i.post_process(
- _ast,
+ _xast,
section,
name_to_start_condition,
all_start_conditions,
# GENERATE END
def post_process(
self,
- _ast,
+ _xast,
section,
name_to_start_condition,
all_start_conditions,
assert not continued_action
if len(start_conditions) == 0:
for i in all_start_conditions:
- if _ast.start_conditions[i].eof_action == 0:
- _ast.start_conditions[i].eof_action = (
- len(_ast.eof_actions_text)
+ if _xast.start_conditions[i].eof_action == 0:
+ _xast.start_conditions[i].eof_action = (
+ len(_xast.eof_actions_text)
)
else:
for i in start_conditions:
- assert _ast.start_conditions[i].eof_action == 0
- _ast.start_conditions[i].eof_action = (
- len(_ast.eof_actions_text)
+ assert _xast.start_conditions[i].eof_action == 0
+ _xast.start_conditions[i].eof_action = (
+ len(_xast.eof_actions_text)
)
assert not action.continued
- _ast.eof_actions_text.append(action[0])
+ _xast.eof_actions_text.append(action[0])
return False # continued_action
class FLexRule(element.Element):
# GENERATE END
def post_process(
self,
- _ast,
+ _xast,
section,
name_to_start_condition,
all_start_conditions,
inclusive_start_conditions
):
if not self.bol:
- _ast.start_conditions[i].rules.append(self)
- _ast.start_conditions[i].bol_rules.append(self)
+ _xast.start_conditions[i].rules.append(self)
+ _xast.start_conditions[i].bol_rules.append(self)
self.groups0 = []
self[0].post_process(
self.groups0,
- caseless = _ast[0].caseless
+ caseless = _xast[0].caseless
) # regex
self.groups1 = []
self[1].post_process(
self.groups1,
- caseless = _ast[0].caseless
+ caseless = _xast[0].caseless
) # trailing context regex
- self.action = len(_ast.actions_text)
+ self.action = len(_xast.actions_text)
if action.continued:
continued_action = True
else:
- _ast.actions_text.append(action[0])
+ _xast.actions_text.append(action[0])
continued_action = False
#def to_text(node):
# return ''.join(
# )
#text = '{0:s}/{1:s}'.format(to_text(self[0]), to_text(self[1]))
#element.set_text(
- # _ast.actions_text[-1],
+ # _xast.actions_text[-1],
# 0,
# 'fprintf(stderr, "%d >>>%s<<< {0:s}\\n", yy_start, yytext);\n{1:s}'.format(
# text.replace('\\', '\\\\').replace('"', '\\"').replace('%', '%%'),
- # element.get_text(_ast.actions_text[-1], 0)
+ # element.get_text(_xast.actions_text[-1], 0)
# )
#)
- _ast.flex_rules.append(self)
- self.group_index = _ast.n_groups
- _ast.n_groups += len(self.groups0) + 1 + len(self.groups1)
+ _xast.flex_rules.append(self)
+ self.group_index = _xast.n_groups
+ _xast.n_groups += len(self.groups0) + 1 + len(self.groups1)
return continued_action
# GENERATE ELEMENT() BEGIN
# GENERATE END
def post_process(
self,
- _ast,
+ _xast,
section,
name_to_start_condition,
all_start_conditions,
name_to_start_condition[i.get_text()]
)
return self[1].post_process(
- _ast,
+ _xast,
section,
name_to_start_condition,
all_start_conditions,
self.last_column = last_column
# GENERATE SECTION1 BEGIN
-import ast
+import xast
import regex
import state
children = [
element.concatenate(
yy_element_stack[-6:-3],
- ast.AST.Section2
+ xast.AST.Section2
)
]
)
children = [
element.concatenate(
yy_element_stack[-9:-5],
- ast.AST.Section1
+ xast.AST.Section1
)
]
)
children = [
element.concatenate(
yy_element_stack[-3:],
- ast.AST.Section1.StartConditions, exclusive = xcluflg
+ xast.AST.Section1.StartConditions, exclusive = xcluflg
)
]
)
children = [
element.concatenate(
yy_element_stack[-1:],
- ast.AST.Section1.Options
+ xast.AST.Section1.Options
)
]
)
children = [
element.concatenate(
yy_element_stack[-1:],
- ast.AST.Section3
+ xast.AST.Section3
)
]
)
children = [
element.concatenate(
yy_element_stack[-5:],
- ast.AST.Section1.Options.OutFile
+ xast.AST.Section1.Options.OutFile
)
]
)
children = [
element.concatenate(
yy_element_stack[-5:],
- ast.AST.Section1.Options.ExtraType
+ xast.AST.Section1.Options.ExtraType
)
]
)
children = [
element.concatenate(
yy_element_stack[-5:],
- ast.AST.Section1.Options.Prefix
+ xast.AST.Section1.Options.Prefix
)
]
)
children = [
element.concatenate(
yy_element_stack[-5:],
- ast.AST.Section1.Options.YYClass
+ xast.AST.Section1.Options.YYClass
)
]
)
children = [
element.concatenate(
yy_element_stack[-5:],
- ast.AST.Section1.Options.HeaderFile
+ xast.AST.Section1.Options.HeaderFile
)
]
)
children = [
element.concatenate(
yy_element_stack[-5:],
- ast.AST.Section1.Options.TablesFile
+ xast.AST.Section1.Options.TablesFile
)
]
)
children = [
element.concatenate(
yy_element_stack[-7:],
- ast.AST.Section2.Rule
+ xast.AST.Section2.Rule
)
]
)
children = [
element.concatenate(
yy_element_stack[-7:],
- ast.AST.Section2.CompoundRule
+ xast.AST.Section2.CompoundRule
)
]
)
children = [
element.concatenate(
yy_element_stack[-3:],
- ast.AST.Section2.Rule.FLexRule, bol = True
+ xast.AST.Section2.Rule.FLexRule, bol = True
)
]
)
children = [
element.concatenate(
yy_element_stack[-1:],
- ast.AST.Section2.Rule.FLexRule
+ xast.AST.Section2.Rule.FLexRule
)
]
)
children = [
element.concatenate(
yy_element_stack[-1:],
- ast.AST.Section2.Rule.EOFRule
+ xast.AST.Section2.Rule.EOFRule
)
]
)
children = [
element.concatenate(
yy_element_stack[-7:],
- ast.AST.Section2.StartConditions
+ xast.AST.Section2.StartConditions
)
]
)
children = [
element.concatenate(
yy_element_stack[-5:],
- ast.AST.Section2.StartConditions, wildcard = True
+ xast.AST.Section2.StartConditions, wildcard = True
)
]
)
children = [
element.concatenate(
yy_element_stack[len(yy_element_stack):],
- ast.AST.Section2.StartConditions
+ xast.AST.Section2.StartConditions
)
]
)
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA
-import ast
+import xast
import numpy
import os
}
def generate_bison(
- _ast,
+ _xast,
_element,
home_dir,
skel_file,
out_file,
defines_file = None
):
- _lr1dfa = _ast.to_lr1().to_lalr1()
+ _lr1dfa = _xast.to_lr1().to_lalr1()
# generate translate table for terminal symbols
# this undoes yacc/bison's rather wasteful mapping of 0x00..0xff to literal
2, # '$undefined'
numpy.int16
)
- for i in _ast.symbols:
- if i._type == ast.AST.Symbol.TYPE_TERMINAL:
+ for i in _xast.symbols:
+ if i._type == xast.AST.Symbol.TYPE_TERMINAL:
for j in range(0, len(i.character_set), 2):
translate_terminals[
i.character_set[j]:i.character_set[j + 1]
-1,
numpy.int16
)
- for i in _ast.symbols:
- if i._type == ast.AST.Symbol.TYPE_NONTERMINAL:
+ for i in _xast.symbols:
+ if i._type == xast.AST.Symbol.TYPE_NONTERMINAL:
for j in range(0, len(i.character_set), 2):
translate_nonterminals[
i.character_set[j] - 1:i.character_set[j + 1] - 1
#define YYPURE {0:d}
/* GENERATE END */
'''.format(
- _ast[0].api_pure
+ _xast[0].api_pure
).replace('YY', type_prefix if is_header else 'YY').replace('yy', name_prefix if is_header else 'yy') # hack
)
elif line == '/* GENERATE TYPEPREFIX */\n':
[
'#define YY{0:s} {1:s}{2:s}\n'.format(
i,
- _ast[0].type_prefix,
+ _xast[0].type_prefix,
i
)
for i in (
['STYPE'] +
- (['LTYPE'] if _ast[0].locations else [])
+ (['LTYPE'] if _xast[0].locations else [])
)
]
)
)
- if _ast[0].type_prefix != 'YY' else
+ if _xast[0].type_prefix != 'YY' else
''
)
)
[
'#define yy{0:s} {1:s}{2:s}\n'.format(
i,
- _ast[0].name_prefix,
+ _xast[0].name_prefix,
i
)
for i in (
['parse', 'lex', 'error', 'debug', 'nerrs'] +
(
[]
- if _ast[0].api_pure else
+ if _xast[0].api_pure else
['lval', 'char'] +
- (['lloc'] if _ast[0].locations else [])
+ (['lloc'] if _xast[0].locations else [])
)
)
]
)
)
- if _ast[0].name_prefix != 'yy' else
+ if _xast[0].name_prefix != 'yy' else
''
)
)
''.join(
[
'{0:s}\n'.format(i.get_text())
- for i in _ast.top_text
+ for i in _xast.top_text
]
)
)
''.join(
[
'{0:s}\n'.format(i.get_text())
- for i in _ast.before_union_text
+ for i in _xast.before_union_text
]
)
)
#endif
/* GENERATE END */
'''.format(
- int(_ast[0].error_verbose)
+ int(_xast[0].error_verbose)
)
)
elif line == '/* GENERATE SECTION1REQUIRES */\n':
''.join(
[
'{0:s}\n'.format(i.get_text())
- for i in _ast.requires_text
+ for i in _xast.requires_text
]
)
)
#endif
/* GENERATE END */
'''.format(
- int(_ast[0].debug)
+ int(_xast[0].debug)
).replace('YY', type_prefix).replace('yy', name_prefix) # hack
)
elif line == '/* GENERATE TOKENSEQUAL */\n':
','.join(
[
'\n {0:s} = {1:d}'.format(i.name, i.character_set[0])
- for i in _ast.symbols[3:]
+ for i in _xast.symbols[3:]
if (
- i._type == ast.AST.Symbol.TYPE_TERMINAL and
+ i._type == xast.AST.Symbol.TYPE_TERMINAL and
len(i.name)
)
]
''.join(
[
'#define {0:s} {1:d}\n'.format(i.name, i.character_set[0])
- for i in _ast.symbols[3:]
+ for i in _xast.symbols[3:]
if (
- i._type == ast.AST.Symbol.TYPE_TERMINAL and
+ i._type == xast.AST.Symbol.TYPE_TERMINAL and
len(i.name)
)
]
''.join(
[
'{0:s}\n'.format(i.get_text())
- for i in _ast.union_text
+ for i in _xast.union_text
]
)
)
- if len(_ast.union_text) else
+ if len(_xast.union_text) else
'''typedef int YYSTYPE;
'''
).replace('YY', type_prefix).replace('yy', name_prefix) # hack
# define YYLTYPE_IS_TRIVIAL 1
#endif
'''
- if _ast[0].locations else
+ if _xast[0].locations else
''
).replace('YY', type_prefix).replace('yy', name_prefix) # hack
)
''.join(
[
'{0:s}\n'.format(i.get_text())
- for i in _ast.after_union_text
+ for i in _xast.after_union_text
]
)
)
'\\\\x{0:02x}'.format(i.character_set[0])
)
)
- for i in _ast.symbols
- if i._type == ast.AST.Symbol.TYPE_TERMINAL
+ for i in _xast.symbols
+ if i._type == xast.AST.Symbol.TYPE_TERMINAL
] +
[
'"{0:s}"'.format(i.name)
- for i in _ast.symbols
- if i._type == ast.AST.Symbol.TYPE_NONTERMINAL
+ for i in _xast.symbols
+ if i._type == xast.AST.Symbol.TYPE_NONTERMINAL
] +
['"$@{0:d}"'.format(i) for i in range(n_midrule_actions)] +
['YY_NULLPTR']
]
),
# YYERROR_VERBOSE (strangely the defined value is repeated)
- int(_ast[0].error_verbose),
+ int(_xast[0].error_verbose),
# yytname
','.join(
['\n {0:s}'.format(', '.join(i)) for i in yytname_lines]
''.join(
[
'{0:s}\n'.format(i.get_text())
- for i in _ast.initial_action_text
+ for i in _xast.initial_action_text
]
).replace('(yyval)', '(yylval').replace('(yyloc)', '(yylloc)') # hack
)
'''/* GENERATE SECTION3 BEGIN */
{0:s}/*GENERATE END */
'''.format(
- '' if len(_ast) < 3 else _ast[2].get_text()
+ '' if len(_xast) < 3 else _xast[2].get_text()
)
)
else:
generate(
skel_file,
out_file,
- _ast[0].type_prefix,
- _ast[0].type_prefix,
- _ast[0].name_prefix,
+ _xast[0].type_prefix,
+ _xast[0].type_prefix,
+ _xast[0].name_prefix,
False
)
if defines_file is not None:
skel_file[:-2] if skel_file[-2:] == '.c' else skel_file
),
defines_file,
- _ast[0].type_prefix,
- _ast[0].type_prefix,
- _ast[0].name_prefix,
+ _xast[0].type_prefix,
+ _xast[0].type_prefix,
+ _xast[0].name_prefix,
True
)
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA
-import ast
+import xast
import os
import wrap_repr
-def ast_text_to_python(ast_text, indent):
- text = ast_text.get_text(True) # python
+def xast_text_to_python(xast_text, indent):
+ text = xast_text.get_text(True) # python
text_strip = text.strip()
if text_strip[:1] == '{' and text_strip[-1:] == '}':
text = text_strip[1:-1]
return ''.join(lines)
def generate_py(
- _ast,
+ _xast,
_element,
home_dir,
skel_file,
out_file,
defines_file = None
):
- _lr1dfa = _ast.to_lr1().to_lalr1()
+ _lr1dfa = _xast.to_lr1().to_lalr1()
assert _lr1dfa.eof_terminal == 0
actions = [i for _, i in _lr1dfa.productions]
#print('actions', actions)
'''.format(
''.join(
[
- ast_text_to_python(i, '')
- for i in _ast.requires_text
+ xast_text_to_python(i, '')
+ for i in _xast.requires_text
]
),
''.join(
[
- ast_text_to_python(i, '')
- for i in _ast.before_union_text
+ xast_text_to_python(i, '')
+ for i in _xast.before_union_text
]
),
''.join(
[
- ast_text_to_python(i, '')
- for i in _ast.after_union_text
+ xast_text_to_python(i, '')
+ for i in _xast.after_union_text
]
)
)
''.join(
[
'{0:s} = {1:d}\n'.format(i.name, i.character_set[0])
- for i in _ast.symbols[3:]
+ for i in _xast.symbols[3:]
if (
- i._type == ast.AST.Symbol.TYPE_TERMINAL and
+ i._type == xast.AST.Symbol.TYPE_TERMINAL and
len(i.name)
)
]
YYERROR_VERBOSE = {1:s}
{2:s}{3:s}{4:s}# GENERATE END
'''.format(
- 'True' if _ast[0].debug else 'False',
- 'True' if _ast[0].error_verbose else 'False',
+ 'True' if _xast[0].debug else 'False',
+ 'True' if _xast[0].error_verbose else 'False',
''.join(
[
'''def yy_action{0:d}():
str(pos1) if pos1 else '',
str(pos0) if pos0 else 'len(yy_element_stack)',
str(pos1) if pos1 else '',
- ast_text_to_python(factory_text, ' ')
+ xast_text_to_python(factory_text, ' ')
)
for pos0, pos1, factory_text in actions[i][1] # groups
]
(
' pass\n'
if actions[i][4] is None else # last_action
- ast_text_to_python(actions[i][4], ' ')
+ xast_text_to_python(actions[i][4], ' ')
)
)
for i in range(len(actions))
'''.format(
''.join(
[
- ast_text_to_python(i, ' ')
- for i in _ast.initial_action_text
+ xast_text_to_python(i, ' ')
+ for i in _xast.initial_action_text
]
).replace('(yyval)', '(yylval').replace('(yyloc)', '(yylloc)') # hack
)
'''# GENERATE SECTION3 BEGIN
{0:s}# GENERATE END
'''.format(
- '' if len(_ast) < 3 else ast_text_to_python(_ast[2], '')
+ '' if len(_xast) < 3 else xast_text_to_python(_xast[2], '')
)
)
else:
import sys
# GENERATE SECTION1 BEGIN
-import ast
+import xast
import state
import y_tab
token_start = y_tab.yylloc.start
BEGIN(SC_ESCAPED_CHARACTER)
markup_push(element.Element)
- markup_push(ast.AST.Char)
+ markup_push(xast.AST.Char)
markup_flush(len(yytext))
- markup_push(ast.AST.Text)
+ markup_push(xast.AST.Text)
raise YYContinue()
def yy_action70():
global token_start
token_start = y_tab.yylloc.start
BEGIN(SC_ESCAPED_STRING)
markup_push(element.Element)
- markup_push(ast.AST.String)
+ markup_push(xast.AST.String)
markup_flush(len(yytext))
- markup_push(ast.AST.Text)
+ markup_push(xast.AST.Text)
raise YYContinue()
def yy_action71():
global code_start
code_start = y_tab.yylloc.start
BEGIN(SC_PROLOGUE)
markup_push(element.Element)
- markup_push(ast.AST.Section1.Prologue)
+ markup_push(xast.AST.Section1.Prologue)
markup_flush(len(yytext))
- markup_push(ast.AST.Text)
+ markup_push(xast.AST.Text)
raise YYContinue()
def yy_action72():
global nesting, code_start
BEGIN(SC_BRACED_CODE)
markup_push(element.Element)
# new way, includes braces, wrapped by <AST_Production_Action> later
- markup_push(ast.AST.Text)
+ markup_push(xast.AST.Text)
markup_flush(len(yytext))
# old way
- #markup_push(ast.AST.BracedCode)
+ #markup_push(xast.AST.BracedCode)
#markup_flush(len(yytext))
- #markup_push(ast.AST.Text)
+ #markup_push(xast.AST.Text)
# to here
raise YYContinue()
def yy_action73():
code_start = y_tab.yylloc.start
BEGIN(SC_PREDICATE)
markup_push(element.Element)
- markup_push(ast.AST.BracedPredicate)
+ markup_push(xast.AST.BracedPredicate)
markup_flush(len(yytext))
- markup_push(ast.AST.Text)
+ markup_push(xast.AST.Text)
raise YYContinue()
def yy_action74():
global nesting, code_start
BEGIN(SC_ELEMENT_GROUP)
markup_push(element.Element)
markup_flush(len(yytext))
- markup_push(ast.AST.Text)
+ markup_push(xast.AST.Text)
raise YYContinue()
def yy_action75():
return ord(')')
token_start = y_tab.yylloc.start
BEGIN(SC_TAG)
markup_push(element.Element)
- markup_push(ast.AST.TagRef)
+ markup_push(xast.AST.TagRef)
markup_flush(len(yytext))
- markup_push(ast.AST.Text)
+ markup_push(xast.AST.Text)
raise YYContinue()
def yy_action79():
global percent_percent_count
y_tab.yylloc.start = token_start
y_tab.yylval = gram_last_string
BEGIN(INITIAL)
- markup_pop() # ast.AST.Text
+ markup_pop() # xast.AST.Text
markup_flush(len(yytext))
- markup_pop() # ast.AST.String
+ markup_pop() # xast.AST.String
markup_pop_token() # element.Element
return y_tab.STRING
raise YYContinue()
y_tab.yylval = ord(gram_last_string[0])
#del obstack_for_string[:]
BEGIN(INITIAL)
- markup_pop() # ast.AST.Text
+ markup_pop() # xast.AST.Text
markup_flush(len(yytext))
- markup_pop() # ast.AST.Char
+ markup_pop() # xast.AST.Char
markup_pop_token() # element.Element
return y_tab.CHAR
raise YYContinue()
y_tab.yylval = gram_last_string
#del obstack_for_string[:]
BEGIN(INITIAL)
- markup_pop() # ast.AST.Text
+ markup_pop() # xast.AST.Text
markup_flush(len(yytext))
- markup_pop() # ast.AST.TagRef
+ markup_pop() # xast.AST.TagRef
markup_pop_token() # element.Element
return y_tab.TAG
obstack_for_string.append(yytext)
BEGIN(INITIAL)
# new way, includes braces, wrapped by <AST_Production_Action> later
markup_flush(len(yytext))
- markup_pop() # ast.AST.Text
+ markup_pop() # xast.AST.Text
# old way
- #markup_pop() # ast.AST.Text
+ #markup_pop() # xast.AST.Text
#markup_flush(len(yytext))
- #markup_pop() # ast.AST.BracedCode
+ #markup_pop() # xast.AST.BracedCode
# to here
markup_pop_token() # element.Element
return y_tab.BRACED_CODE
y_tab.yylloc.start = code_start
y_tab.yylval = gram_last_string
BEGIN(INITIAL)
- markup_pop() # ast.AST.Text
+ markup_pop() # xast.AST.Text
markup_flush(len(yytext))
- markup_pop() # ast.AST.BracedPredicate
+ markup_pop() # xast.AST.BracedPredicate
markup_pop_token() # element.Element
return y_tab.BRACED_PREDICATE
else:
y_tab.yylval = gram_last_string
#del obstack_for_string[:]
BEGIN(INITIAL)
- markup_pop() # ast.AST.Text
+ markup_pop() # xast.AST.Text
markup_flush(len(yytext))
markup_pop_token() # element.Element
return ord('(')
y_tab.yylloc.start = code_start
y_tab.yylval = gram_last_string
BEGIN(INITIAL)
- markup_pop() # ast.AST.Text
+ markup_pop() # xast.AST.Text
markup_flush(len(yytext))
- markup_pop() # ast.AST.Section1.Prologue
+ markup_pop() # xast.AST.Section1.Prologue
markup_pop_token() # element.Element
return y_tab.PROLOGUE
raise YYContinue()
yy_group_stack[-1],
yy_group_stack[-2],
yy_element_stack.pop(),
- ast.AST.ID
+ xast.AST.ID
)
yy_element_stack[-1].append(
(yy_group_stack[-1], yy_group_stack[-2], yy_element)
yy_group_stack[-1],
yy_group_stack[-2],
yy_element_stack.pop(),
- ast.AST.Text.Escape, character = int(yy_groups[2], 8)
+ xast.AST.Text.Escape, character = int(yy_groups[2], 8)
)
yy_element_stack[-1].append(
(yy_group_stack[-1], yy_group_stack[-2], yy_element)
yy_group_stack[-1],
yy_group_stack[-2],
yy_element_stack.pop(),
- ast.AST.Text.Escape, character = int(yy_groups[2], 16)
+ xast.AST.Text.Escape, character = int(yy_groups[2], 16)
)
yy_element_stack[-1].append(
(yy_group_stack[-1], yy_group_stack[-2], yy_element)
yy_group_stack[-1],
yy_group_stack[-2],
yy_element_stack.pop(),
- ast.AST.Text.Escape, character = 7
+ xast.AST.Text.Escape, character = 7
)
yy_element_stack[-1].append(
(yy_group_stack[-1], yy_group_stack[-2], yy_element)
yy_group_stack[-1],
yy_group_stack[-2],
yy_element_stack.pop(),
- ast.AST.Text.Escape, character = 8
+ xast.AST.Text.Escape, character = 8
)
yy_element_stack[-1].append(
(yy_group_stack[-1], yy_group_stack[-2], yy_element)
yy_group_stack[-1],
yy_group_stack[-2],
yy_element_stack.pop(),
- ast.AST.Text.Escape, character = 12
+ xast.AST.Text.Escape, character = 12
)
yy_element_stack[-1].append(
(yy_group_stack[-1], yy_group_stack[-2], yy_element)
yy_group_stack[-1],
yy_group_stack[-2],
yy_element_stack.pop(),
- ast.AST.Text.Escape, character = 10
+ xast.AST.Text.Escape, character = 10
)
yy_element_stack[-1].append(
(yy_group_stack[-1], yy_group_stack[-2], yy_element)
yy_group_stack[-1],
yy_group_stack[-2],
yy_element_stack.pop(),
- ast.AST.Text.Escape, character = 13
+ xast.AST.Text.Escape, character = 13
)
yy_element_stack[-1].append(
(yy_group_stack[-1], yy_group_stack[-2], yy_element)
yy_group_stack[-1],
yy_group_stack[-2],
yy_element_stack.pop(),
- ast.AST.Text.Escape, character = 9
+ xast.AST.Text.Escape, character = 9
)
yy_element_stack[-1].append(
(yy_group_stack[-1], yy_group_stack[-2], yy_element)
yy_group_stack[-1],
yy_group_stack[-2],
yy_element_stack.pop(),
- ast.AST.Text.Escape, character = 11
+ xast.AST.Text.Escape, character = 11
)
yy_element_stack[-1].append(
(yy_group_stack[-1], yy_group_stack[-2], yy_element)
yy_group_stack[-1],
yy_group_stack[-2],
yy_element_stack.pop(),
- ast.AST.Text.Escape, character = ord(yy_groups[2])
+ xast.AST.Text.Escape, character = ord(yy_groups[2])
)
yy_element_stack[-1].append(
(yy_group_stack[-1], yy_group_stack[-2], yy_element)
yy_group_stack[-1],
yy_group_stack[-2],
yy_element_stack.pop(),
- ast.AST.Text.Escape, character = int(('' if yy_groups[3] is None else yy_groups[3]) + yy_groups[4], 16)
+ xast.AST.Text.Escape, character = int(('' if yy_groups[3] is None else yy_groups[3]) + yy_groups[4], 16)
)
yy_element_stack[-1].append(
(yy_group_stack[-1], yy_group_stack[-2], yy_element)
import sys
# GENERATE SECTION1 BEGIN
-import ast
+import xast
sc_context = -1
# GENERATE END
yy_group_stack[-2],
yy_element_stack.pop(),
- ast.AST.Text.StackReference,
+ xast.AST.Text.StackReference,
tag_name = '' if yy_groups[2] is None else yy_groups[2][1:-1],
index = int(yy_groups[3])
yy_group_stack[-2],
yy_element_stack.pop(),
- ast.AST.Text.StackLocation,
+ xast.AST.Text.StackLocation,
index = int(yy_groups[2])
)
yy_group_stack[-2],
yy_element_stack.pop(),
- ast.AST.Text.ValueReference,
+ xast.AST.Text.ValueReference,
tag_name = '' if yy_groups[2] is None else yy_groups[2][1:-1]
)
yy_group_stack[-2],
yy_element_stack.pop(),
- ast.AST.Text.ValueLocation
+ xast.AST.Text.ValueLocation
)
yy_element_stack[-1].append(
assert numpy.all(action_table != 0)
action_table[action_table == 1] = len(self.states) << 1
action_table[action_table == -1] = 0
- mask = (action_table & 1).astype(numpy.bool)
+ mask = (action_table & 1).xastype(numpy.bool)
action_table >>= 1
action_table[mask] = -action_table[mask]
assert numpy.all(goto_table != 0)
-1,
dtype = numpy.int16
)[numpy.newaxis, numpy.newaxis, :]
- ).astype(numpy.int16),
+ ).xastype(numpy.int16),
1
)
],
len(self.states),
dtype = numpy.int16
)[numpy.newaxis, numpy.newaxis, :]
- ).astype(numpy.int16),
+ ).xastype(numpy.int16),
1
)
],
%code requires
{
- import ast
+ import xast
import element
import lex_yy_code
import state
%%
input
- : %space (?E{ast.AST.Section1}prologue_declarations) "%%" %space (?E{ast.AST.Section2}grammar) epilogue.opt
+ : %space (?E{xast.AST.Section1}prologue_declarations) "%%" %space (?E{xast.AST.Section2}grammar) epilogue.opt
;
/*------------------------------------.
| "%{...%}"
| %space (?E{
(
- ast.AST.Section1.PureParser
+ xast.AST.Section1.PureParser
if $1 == 'api.pure' else
- ast.AST.Section1.Locations
+ xast.AST.Section1.Locations
if $1 == 'locations' else
- ast.AST.Section1.Debug
+ xast.AST.Section1.Debug
if $1 == 'parse.trace' else
element.Element
)
}"%<flag>")
- | %space (?E{ast.AST.Section1.Define}"%define" variable value)
- | %space (?E{ast.AST.Section1.Defines}"%defines")
- | %space (?E{ast.AST.Section1.Defines}"%defines" STRING)
- | %space (?E{ast.AST.Section1.ErrorVerbose}"%error-verbose")
- | %space (?E{ast.AST.Section1.Expect, value = $2}"%expect" INT)
- | %space (?E{ast.AST.Section1.ExpectRR, value = $2}"%expect-rr" INT)
- | %space (?E{ast.AST.Section1.FilePrefix}"%file-prefix" STRING)
- | %space (?E{ast.AST.Section1.GLRParser}"%glr-parser")
- | %space (?E{ast.AST.Section1.InitialAction}"%initial-action" "{...}")
+ | %space (?E{xast.AST.Section1.Define}"%define" variable value)
+ | %space (?E{xast.AST.Section1.Defines}"%defines")
+ | %space (?E{xast.AST.Section1.Defines}"%defines" STRING)
+ | %space (?E{xast.AST.Section1.ErrorVerbose}"%error-verbose")
+ | %space (?E{xast.AST.Section1.Expect, value = $2}"%expect" INT)
+ | %space (?E{xast.AST.Section1.ExpectRR, value = $2}"%expect-rr" INT)
+ | %space (?E{xast.AST.Section1.FilePrefix}"%file-prefix" STRING)
+ | %space (?E{xast.AST.Section1.GLRParser}"%glr-parser")
+ | %space (?E{xast.AST.Section1.InitialAction}"%initial-action" "{...}")
{
lex_yy_code.yyin = None
lex_yy_code.yy_buffer_stack = [lex_yy_code.YYBufferState()]
lex_yy_code.yytext = '' # fool unput()
lex_yy_code.unput($2)
lex_yy_code.sc_context = lex_yy_code.SC_SYMBOL_ACTION # CODE_PROPS_SYMBOL_ACTION
- lex_yy_code.yylex(ast.AST.Text)
- assert isinstance(yy_element_stack[-1][0], ast.AST.Section1.InitialAction)
- assert isinstance(yy_element_stack[-1][0][0], ast.AST.Text)
+ lex_yy_code.yylex(xast.AST.Text)
+ assert isinstance(yy_element_stack[-1][0], xast.AST.Section1.InitialAction)
+ assert isinstance(yy_element_stack[-1][0][0], xast.AST.Text)
tail = element.get_text(yy_element_stack[-1][0], 1)
yy_element_stack[-1][0][0] = lex_yy_code.yy_element_space
element.set_text(yy_element_stack[-1][0], 1, tail)
}
- | %space (?E{ast.AST.Section1.Language}"%language" STRING)
- | %space (?E{ast.AST.Section1.NamePrefix}"%name-prefix" STRING)
- | %space (?E{ast.AST.Section1.Lines, value = False}"%no-lines")
- | %space (?E{ast.AST.Section1.NonDeterministicParser}"%nondeterministic-parser")
- | %space (?E{ast.AST.Section1.Output}"%output" STRING)
- | %space (?E{ast.AST.Section1.Param}"%param" params)
- | %space (?E{ast.AST.Section1.Require}"%require" STRING)
- | %space (?E{ast.AST.Section1.Skeleton}"%skeleton" STRING)
- | %space (?E{ast.AST.Section1.TokenTable}"%token-table")
- | %space (?E{ast.AST.Section1.Verbose}"%verbose")
- | %space (?E{ast.AST.Section1.YACC}"%yacc")
+ | %space (?E{xast.AST.Section1.Language}"%language" STRING)
+ | %space (?E{xast.AST.Section1.NamePrefix}"%name-prefix" STRING)
+ | %space (?E{xast.AST.Section1.Lines, value = False}"%no-lines")
+ | %space (?E{xast.AST.Section1.NonDeterministicParser}"%nondeterministic-parser")
+ | %space (?E{xast.AST.Section1.Output}"%output" STRING)
+ | %space (?E{xast.AST.Section1.Param}"%param" params)
+ | %space (?E{xast.AST.Section1.Require}"%require" STRING)
+ | %space (?E{xast.AST.Section1.Skeleton}"%skeleton" STRING)
+ | %space (?E{xast.AST.Section1.TokenTable}"%token-table")
+ | %space (?E{xast.AST.Section1.Verbose}"%verbose")
+ | %space (?E{xast.AST.Section1.YACC}"%yacc")
| /*FIXME: Err? What is this horror doing here? */ ";"
;
: precedence_declaration
| symbol_declaration
/* ) ) should be )) */
- | %space (?E{ast.AST.Section1Or2.Start}"%start" %space (?E{ast.AST.SymbolRef}symbol) )
- | %space (?E{ast.AST.Section1Or2.CodeProps, _type = $1}code_props_type "{...}" generic_symlist)
+ | %space (?E{xast.AST.Section1Or2.Start}"%start" %space (?E{xast.AST.SymbolRef}symbol) )
+ | %space (?E{xast.AST.Section1Or2.CodeProps, _type = $1}code_props_type "{...}" generic_symlist)
{
lex_yy_code.yyin = None
lex_yy_code.yy_buffer_stack = [lex_yy_code.YYBufferState()]
lex_yy_code.yytext = '' # fool unput()
lex_yy_code.unput($2)
lex_yy_code.sc_context = lex_yy_code.SC_SYMBOL_ACTION # CODE_PROPS_SYMBOL_ACTION
- lex_yy_code.yylex(ast.AST.Text)
- assert isinstance(yy_element_stack[-1][0], ast.AST.Section1Or2.CodeProps)
- assert isinstance(yy_element_stack[-1][0][0], ast.AST.Text)
+ lex_yy_code.yylex(xast.AST.Text)
+ assert isinstance(yy_element_stack[-1][0], xast.AST.Section1Or2.CodeProps)
+ assert isinstance(yy_element_stack[-1][0][0], xast.AST.Text)
tail = element.get_text(yy_element_stack[-1][0], 1)
yy_element_stack[-1][0][0] = lex_yy_code.yy_element_space
element.set_text(yy_element_stack[-1][0], 1, tail)
}
- | %space (?E{ast.AST.Section1Or2.DefaultPrec, value = True}"%default-prec")
- | %space (?E{ast.AST.Section1Or2.DefaultPrec}"%no-default-prec")
+ | %space (?E{xast.AST.Section1Or2.DefaultPrec, value = True}"%default-prec")
+ | %space (?E{xast.AST.Section1Or2.DefaultPrec}"%no-default-prec")
/* the BracedCode should be removed altogether, put here for compatibility */
- | %space (?E{ast.AST.Section1Or2.Code}"%code" (?E{ast.AST.ID}) %space (?E{ast.AST.BracedCode}"{...}") )
+ | %space (?E{xast.AST.Section1Or2.Code}"%code" (?E{xast.AST.ID}) %space (?E{xast.AST.BracedCode}"{...}") )
{
# most egregious (leftover from when we used to run code scanner on it):
- assert isinstance(yy_element_stack[-1][0], ast.AST.Section1Or2.Code)
- assert isinstance(yy_element_stack[-1][0][1], ast.AST.BracedCode)
- assert isinstance(yy_element_stack[-1][0][1][0], ast.AST.Text)
+ assert isinstance(yy_element_stack[-1][0], xast.AST.Section1Or2.Code)
+ assert isinstance(yy_element_stack[-1][0][1], xast.AST.BracedCode)
+ assert isinstance(yy_element_stack[-1][0][1][0], xast.AST.Text)
element.set_text(
yy_element_stack[-1][0][1][0],
0,
)
}
/* the BracedCode should be removed altogether, put here for compatibility */
- | %space (?E{ast.AST.Section1Or2.Code}"%code" ID %space (?E{ast.AST.BracedCode}"{...}") )
+ | %space (?E{xast.AST.Section1Or2.Code}"%code" ID %space (?E{xast.AST.BracedCode}"{...}") )
{
# most egregious (leftover from when we used to run code scanner on it):
- assert isinstance(yy_element_stack[-1][0], ast.AST.Section1Or2.Code)
- assert isinstance(yy_element_stack[-1][0][1], ast.AST.BracedCode)
- assert isinstance(yy_element_stack[-1][0][1][0], ast.AST.Text)
+ assert isinstance(yy_element_stack[-1][0], xast.AST.Section1Or2.Code)
+ assert isinstance(yy_element_stack[-1][0][1], xast.AST.BracedCode)
+ assert isinstance(yy_element_stack[-1][0][1][0], xast.AST.Text)
element.set_text(
yy_element_stack[-1][0][1][0],
0,
%token PERCENT_UNION "%union";
union_name
- : (?E{ast.AST.ID}%empty)
+ : (?E{xast.AST.ID}%empty)
| ID
;
grammar_declaration
/* the BracedCode should be removed altogether, put here for compatibility */
- : %space (?E{ast.AST.Section1Or2.Union}"%union" union_name %space (?E{ast.AST.BracedCode}"{...}") )
+ : %space (?E{xast.AST.Section1Or2.Union}"%union" union_name %space (?E{xast.AST.BracedCode}"{...}") )
{
# most egregious (leftover from when we used to run code scanner on it):
- assert isinstance(yy_element_stack[-1][0], ast.AST.Section1Or2.Union)
- assert isinstance(yy_element_stack[-1][0][1], ast.AST.BracedCode)
- assert isinstance(yy_element_stack[-1][0][1][0], ast.AST.Text)
+ assert isinstance(yy_element_stack[-1][0], xast.AST.Section1Or2.Union)
+ assert isinstance(yy_element_stack[-1][0][1], xast.AST.BracedCode)
+ assert isinstance(yy_element_stack[-1][0][1][0], xast.AST.Text)
element.set_text(
yy_element_stack[-1][0][1][0],
0,
;
symbol_declaration
- : %space (?E{ast.AST.Section1Or2.NTerm}"%nterm" symbol_defs.1)
- | %space (?E{ast.AST.Section1Or2.Token}"%token" symbol_defs.1)
- | %space (?E{ast.AST.Section1Or2.Type}"%type" TAG symbols.1)
+ : %space (?E{xast.AST.Section1Or2.NTerm}"%nterm" symbol_defs.1)
+ | %space (?E{xast.AST.Section1Or2.Token}"%token" symbol_defs.1)
+ | %space (?E{xast.AST.Section1Or2.Type}"%type" TAG symbols.1)
;
precedence_declaration
- : %space (?E{ast.AST.Section1Or2.Precedence, _type = ($1 & 3) - 1}precedence_declarator tag.opt symbols.prec)
+ : %space (?E{xast.AST.Section1Or2.Precedence, _type = ($1 & 3) - 1}precedence_declarator tag.opt symbols.prec)
;
precedence_declarator
;
symbol.prec
- : %space (?E{ast.AST.SymbolRef}symbol)
- | %space (?E{ast.AST.SymbolRef, user_token = $2}symbol INT)
+ : %space (?E{xast.AST.SymbolRef}symbol)
+ | %space (?E{xast.AST.SymbolRef, user_token = $2}symbol INT)
;
/* One or more symbols to be %typed. */
symbols.1
- : %space (?E{ast.AST.SymbolRef}symbol)
- | symbols.1 %space (?E{ast.AST.SymbolRef}symbol)
+ : %space (?E{xast.AST.SymbolRef}symbol)
+ | symbols.1 %space (?E{xast.AST.SymbolRef}symbol)
;
generic_symlist
;
generic_symlist_item
- : %space (?E{ast.AST.SymbolRef}symbol)
+ : %space (?E{xast.AST.SymbolRef}symbol)
| tag
;
/* One token definition. */
symbol_def
: TAG
- | %space (?E{ast.AST.SymbolRef}id)
- | %space (?E{ast.AST.SymbolRef, user_token = $2}id INT)
- | %space (?E{ast.AST.SymbolRef}id string_as_id)
- | %space (?E{ast.AST.SymbolRef, user_token = $2}id INT string_as_id)
+ | %space (?E{xast.AST.SymbolRef}id)
+ | %space (?E{xast.AST.SymbolRef, user_token = $2}id INT)
+ | %space (?E{xast.AST.SymbolRef}id string_as_id)
+ | %space (?E{xast.AST.SymbolRef, user_token = $2}id INT string_as_id)
;
/* One or more symbol definitions. */
;
rules
- : %space (?E{ast.AST.Section2.Rules}(?E{ast.AST.SymbolRef}id_colon) named_ref.opt rhses.1)
+ : %space (?E{xast.AST.Section2.Rules}(?E{xast.AST.SymbolRef}id_colon) named_ref.opt rhses.1)
;
rhses.1
- : %space (?E{ast.AST.Production}rhs)
- | rhses.1 "|" %space (?E{ast.AST.Production}rhs)
+ : %space (?E{xast.AST.Production}rhs)
+ | rhses.1 "|" %space (?E{xast.AST.Production}rhs)
| rhses.1 ";"
;
yy_element_stack[-1] = lex_yy.yy_element_space
lex_yy.yy_element_space = element.Element()
}
- | rhs %space (?E{ast.AST.Production.SymbolRef}(?E{ast.AST.SymbolRef}symbol) named_ref.opt)
- | rhs %space (?E{ast.AST.Production.Action}"{...}" named_ref.opt)
+ | rhs %space (?E{xast.AST.Production.SymbolRef}(?E{xast.AST.SymbolRef}symbol) named_ref.opt)
+ | rhs %space (?E{xast.AST.Production.Action}"{...}" named_ref.opt)
{
lex_yy_code.yyin = None
lex_yy_code.yy_buffer_stack = [lex_yy_code.YYBufferState()]
lex_yy_code.yytext = '' # fool unput()
lex_yy_code.unput($2)
lex_yy_code.sc_context = lex_yy_code.SC_RULE_ACTION # CODE_PROPS_RULE_ACTION
- lex_yy_code.yylex(ast.AST.Text)
- assert isinstance(yy_element_stack[-1][0], ast.AST.Production.Action)
- assert isinstance(yy_element_stack[-1][0][0], ast.AST.Text)
+ lex_yy_code.yylex(xast.AST.Text)
+ assert isinstance(yy_element_stack[-1][0], xast.AST.Production.Action)
+ assert isinstance(yy_element_stack[-1][0][0], xast.AST.Text)
tail = element.get_text(yy_element_stack[-1][0], 1)
yy_element_stack[-1][0][0] = lex_yy_code.yy_element_space
element.set_text(yy_element_stack[-1][0], 1, tail)
{
#grammar_current_rule_action_append($2, @2, None, True)
}
- | rhs %space (?E{ast.AST.Production.Empty}"%empty")
+ | rhs %space (?E{xast.AST.Production.Empty}"%empty")
/* ) ) should be )) */
- | rhs %space (?E{ast.AST.Production.Prec}"%prec" %space (?E{ast.AST.SymbolRef}symbol) )
- | rhs %space (?E{ast.AST.Production.DPrec, value = $3}"%dprec" INT)
- | rhs %space (?E{ast.AST.Production.Merge}"%merge" TAG)
+ | rhs %space (?E{xast.AST.Production.Prec}"%prec" %space (?E{xast.AST.SymbolRef}symbol) )
+ | rhs %space (?E{xast.AST.Production.DPrec, value = $3}"%dprec" INT)
+ | rhs %space (?E{xast.AST.Production.Merge}"%merge" TAG)
/* Nick extra rules for element groups */
- | rhs %space (?E{ast.AST.Production.GroupElement}'(' rhs ')')
+ | rhs %space (?E{xast.AST.Production.GroupElement}'(' rhs ')')
{
lex_yy_code.yyin = None
lex_yy_code.yy_buffer_stack = [lex_yy_code.YYBufferState()]
lex_yy_code.yytext = '' # fool unput()
lex_yy_code.unput($2[4:-1])
lex_yy_code.sc_context = lex_yy_code.SC_RULE_ACTION # CODE_PROPS_RULE_ACTION
- lex_yy_code.yylex(ast.AST.Text)
- assert isinstance(yy_element_stack[-1][0], ast.AST.Production.GroupElement)
- assert isinstance(yy_element_stack[-1][0][0], ast.AST.Text)
+ lex_yy_code.yylex(xast.AST.Text)
+ assert isinstance(yy_element_stack[-1][0], xast.AST.Production.GroupElement)
+ assert isinstance(yy_element_stack[-1][0][0], xast.AST.Text)
tail = element.get_text(yy_element_stack[-1][0], 1)
yy_element_stack[-1][0][0] = lex_yy_code.yy_element_space
element.set_text(yy_element_stack[-1][0], 1, tail)
}
/* Nick added %space */
- | rhs %space (?E{ast.AST.Production.Space}"%space")
+ | rhs %space (?E{xast.AST.Production.Space}"%space")
;
named_ref.opt
epilogue.opt
: %empty
- | "%%" (?E{ast.AST.Section3}EPILOGUE %space)
+ | "%%" (?E{xast.AST.Section3}EPILOGUE %space)
;
%%
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA
-import ast
+import xast
import element
import generate_bison
import generate_py
with open(in_file) as fin:
if in_file[-4:] == '.xml':
- _ast = element.deserialize(fin, ast.factory)
+ _xast = element.deserialize(fin, xast.factory)
else:
import lex_yy
import state
import y_tab
state.infilename = in_file
lex_yy.yyin = fin
- _ast = y_tab.yyparse(ast.AST)
-#element.serialize(_ast, 'a.xml', 'utf-8')
-#_ast = element.deserialize('a.xml', ast.factory, 'utf-8')
-_ast.post_process()
-#element.serialize(_ast, 'b.xml', 'utf-8')
-#_ast = element.deserialize('b.xml', ast.factory, 'utf-8')
+ _xast = y_tab.yyparse(xast.AST)
+#element.serialize(_xast, 'a.xml', 'utf-8')
+#_xast = element.deserialize('a.xml', xast.factory, 'utf-8')
+_xast.post_process()
+#element.serialize(_xast, 'b.xml', 'utf-8')
+#_xast = element.deserialize('b.xml', xast.factory, 'utf-8')
(generate_py.generate_py if python else generate_bison.generate_bison)(
- _ast,
+ _xast,
_element,
home_dir,
skel_file,
%option debug nodefault noinput nounput noyywrap never-interactive
%{
- import ast
+ import xast
sc_context = -1
%}
<SC_RULE_ACTION>
{
(?E{
- ast.AST.Text.StackReference,
+ xast.AST.Text.StackReference,
tag_name = '' if yy_groups[2] is None else yy_groups[2][1:-1],
index = int(yy_groups[3])
}"$"("<"{tag}">")?(-?[0-9]+|{id}|"["{id}"]"))
(?E{
- ast.AST.Text.StackLocation,
+ xast.AST.Text.StackLocation,
index = int(yy_groups[2])
}"@"(-?[0-9]+|{id}|"["{id}"]"))
}
<SC_RULE_ACTION,SC_SYMBOL_ACTION>
{
(?E{
- ast.AST.Text.ValueReference,
+ xast.AST.Text.ValueReference,
tag_name = '' if yy_groups[2] is None else yy_groups[2][1:-1]
}"$"("<"{tag}">")?"$")
(?E{
- ast.AST.Text.ValueLocation
+ xast.AST.Text.ValueLocation
}"@$")
}
%option debug nodefault noinput noyywrap never-interactive
%{
- import ast
+ import xast
import state
import y_tab
"|" return y_tab.PIPE
";" return y_tab.SEMICOLON
- (?E{ast.AST.ID}{id}) {
+ (?E{xast.AST.ID}{id}) {
global id_loc, bracketed_id_str
y_tab.yylval = yytext
id_loc = y_tab.yylloc
token_start = y_tab.yylloc.start
BEGIN(SC_ESCAPED_CHARACTER)
markup_push(element.Element)
- markup_push(ast.AST.Char)
+ markup_push(xast.AST.Char)
markup_flush(len(yytext))
- markup_push(ast.AST.Text)
+ markup_push(xast.AST.Text)
}
/* Strings. */
token_start = y_tab.yylloc.start
BEGIN(SC_ESCAPED_STRING)
markup_push(element.Element)
- markup_push(ast.AST.String)
+ markup_push(xast.AST.String)
markup_flush(len(yytext))
- markup_push(ast.AST.Text)
+ markup_push(xast.AST.Text)
}
/* Prologue. */
code_start = y_tab.yylloc.start
BEGIN(SC_PROLOGUE)
markup_push(element.Element)
- markup_push(ast.AST.Section1.Prologue)
+ markup_push(xast.AST.Section1.Prologue)
markup_flush(len(yytext))
- markup_push(ast.AST.Text)
+ markup_push(xast.AST.Text)
}
/* Code in between braces. */
BEGIN(SC_BRACED_CODE)
markup_push(element.Element)
# new way, includes braces, wrapped by <AST_Production_Action> later
- markup_push(ast.AST.Text)
+ markup_push(xast.AST.Text)
markup_flush(len(yytext))
# old way
- #markup_push(ast.AST.BracedCode)
+ #markup_push(xast.AST.BracedCode)
#markup_flush(len(yytext))
- #markup_push(ast.AST.Text)
+ #markup_push(xast.AST.Text)
# to here
}
code_start = y_tab.yylloc.start
BEGIN(SC_PREDICATE)
markup_push(element.Element)
- markup_push(ast.AST.BracedPredicate)
+ markup_push(xast.AST.BracedPredicate)
markup_flush(len(yytext))
- markup_push(ast.AST.Text)
+ markup_push(xast.AST.Text)
}
/* Nick extra rules for element groups */
BEGIN(SC_ELEMENT_GROUP)
markup_push(element.Element)
markup_flush(len(yytext))
- markup_push(ast.AST.Text)
+ markup_push(xast.AST.Text)
}
")" return ord(')')
/* A type. */
- "(?E{ast.AST.TagRef}<(?E{ast.AST.Text}*)>)" {
+ "(?E{xast.AST.TagRef}<(?E{xast.AST.Text}*)>)" {
return y_tab.TAG_ANY
}
- "(?E{ast.AST.TagRef}<(?E{ast.AST.Text})>)" {
+ "(?E{xast.AST.TagRef}<(?E{xast.AST.Text})>)" {
return y_tab.TAG_NONE
}
"<" {
token_start = y_tab.yylloc.start
BEGIN(SC_TAG)
markup_push(element.Element)
- markup_push(ast.AST.TagRef)
+ markup_push(xast.AST.TagRef)
markup_flush(len(yytext))
- markup_push(ast.AST.Text)
+ markup_push(xast.AST.Text)
}
"%%" {
y_tab.yylloc.start = token_start
y_tab.yylval = gram_last_string
BEGIN(INITIAL)
- markup_pop() # ast.AST.Text
+ markup_pop() # xast.AST.Text
markup_flush(len(yytext))
- markup_pop() # ast.AST.String
+ markup_pop() # xast.AST.String
markup_pop_token() # element.Element
return y_tab.STRING
}
y_tab.yylval = ord(gram_last_string[0])
#del obstack_for_string[:]
BEGIN(INITIAL)
- markup_pop() # ast.AST.Text
+ markup_pop() # xast.AST.Text
markup_flush(len(yytext))
- markup_pop() # ast.AST.Char
+ markup_pop() # xast.AST.Char
markup_pop_token() # element.Element
return y_tab.CHAR
}
y_tab.yylval = gram_last_string
#del obstack_for_string[:]
BEGIN(INITIAL)
- markup_pop() # ast.AST.Text
+ markup_pop() # xast.AST.Text
markup_flush(len(yytext))
- markup_pop() # ast.AST.TagRef
+ markup_pop() # xast.AST.TagRef
markup_pop_token() # element.Element
return y_tab.TAG
obstack_for_string.append(yytext)
<SC_ESCAPED_STRING,SC_ESCAPED_CHARACTER>
{
- (?E{ast.AST.Text.Escape, character = int(yy_groups[2], 8)}\\([0-7]{1,3})) {
+ (?E{xast.AST.Text.Escape, character = int(yy_groups[2], 8)}\\([0-7]{1,3})) {
c = int(yytext + 1, 8)
if not c or 0x7f * 2 + 1 < c:
state.complain(y_tab.yylloc, state.complaint, 'invalid number after \\-escape: {0:s}'.format(yytext[1:]))
obstack_for_string.append(chr(c))
}
- (?E{ast.AST.Text.Escape, character = int(yy_groups[2], 16)}\\x([0-9abcdefABCDEF]+)) {
+ (?E{xast.AST.Text.Escape, character = int(yy_groups[2], 16)}\\x([0-9abcdefABCDEF]+)) {
c = int(yytext + 2, 16)
if not c or 0x7f * 2 + 1 < c:
state.complain(y_tab.yylloc, state.complaint, 'invalid number after \\-escape: {0:s}'.format(yytext[1:]))
obstack_for_string.append(chr(c))
}
- (?E{ast.AST.Text.Escape, character = 7}\\a) {
+ (?E{xast.AST.Text.Escape, character = 7}\\a) {
obstack_for_string.append('\a')
}
- (?E{ast.AST.Text.Escape, character = 8}\\b) {
+ (?E{xast.AST.Text.Escape, character = 8}\\b) {
obstack_for_string.append('\b')
}
- (?E{ast.AST.Text.Escape, character = 12}\\f) {
+ (?E{xast.AST.Text.Escape, character = 12}\\f) {
obstack_for_string.append('\f')
}
- (?E{ast.AST.Text.Escape, character = 10}\\n) {
+ (?E{xast.AST.Text.Escape, character = 10}\\n) {
obstack_for_string.append('\n')
}
- (?E{ast.AST.Text.Escape, character = 13}\\r) {
+ (?E{xast.AST.Text.Escape, character = 13}\\r) {
obstack_for_string.append('\r')
}
- (?E{ast.AST.Text.Escape, character = 9}\\t) {
+ (?E{xast.AST.Text.Escape, character = 9}\\t) {
obstack_for_string.append('\t')
}
- (?E{ast.AST.Text.Escape, character = 11}\\v) {
+ (?E{xast.AST.Text.Escape, character = 11}\\v) {
obstack_for_string.append('\v')
}
/* \\[\"\'?\\] would be shorter, but it confuses xgettext. */
- (?E{ast.AST.Text.Escape, character = ord(yy_groups[2])}\\("\""|"'"|"?"|"\\")) {
+ (?E{xast.AST.Text.Escape, character = ord(yy_groups[2])}\\("\""|"'"|"?"|"\\")) {
obstack_for_string.append(yytext[1])
}
- (?E{ast.AST.Text.Escape, character = int(('' if yy_groups[3] is None else yy_groups[3]) + yy_groups[4], 16)}\\(u|U([0-9abcdefABCDEF]{4}))([0-9abcdefABCDEF]{4})) {
+ (?E{xast.AST.Text.Escape, character = int(('' if yy_groups[3] is None else yy_groups[3]) + yy_groups[4], 16)}\\(u|U([0-9abcdefABCDEF]{4}))([0-9abcdefABCDEF]{4})) {
c = convert_ucn_to_byte(yytext)
if c <= 0:
state.complain(y_tab.yylloc, state.complaint, 'invalid number after \\-escape: {0:s}'.format(yytext[1:]))
BEGIN(INITIAL)
# new way, includes braces, wrapped by <AST_Production_Action> later
markup_flush(len(yytext))
- markup_pop() # ast.AST.Text
+ markup_pop() # xast.AST.Text
# old way
- #markup_pop() # ast.AST.Text
+ #markup_pop() # xast.AST.Text
#markup_flush(len(yytext))
- #markup_pop() # ast.AST.BracedCode
+ #markup_pop() # xast.AST.BracedCode
# to here
markup_pop_token() # element.Element
return y_tab.BRACED_CODE
y_tab.yylloc.start = code_start
y_tab.yylval = gram_last_string
BEGIN(INITIAL)
- markup_pop() # ast.AST.Text
+ markup_pop() # xast.AST.Text
markup_flush(len(yytext))
- markup_pop() # ast.AST.BracedPredicate
+ markup_pop() # xast.AST.BracedPredicate
markup_pop_token() # element.Element
return y_tab.BRACED_PREDICATE
else:
y_tab.yylval = gram_last_string
#del obstack_for_string[:]
BEGIN(INITIAL)
- markup_pop() # ast.AST.Text
+ markup_pop() # xast.AST.Text
markup_flush(len(yytext))
markup_pop_token() # element.Element
return ord('(')
y_tab.yylloc.start = code_start
y_tab.yylval = gram_last_string
BEGIN(INITIAL)
- markup_pop() # ast.AST.Text
+ markup_pop() # xast.AST.Text
markup_flush(len(yytext))
- markup_pop() # ast.AST.Section1.Prologue
+ markup_pop() # xast.AST.Section1.Prologue
markup_pop_token() # element.Element
return y_tab.PROLOGUE
}
# GENERATE END
def post_process(
self,
- _ast,
+ _xast,
section,
character_to_symbol,
name_to_symbol,
# GENERATE END
def post_process(
self,
- _ast,
+ _xast,
section,
production,
character_to_symbol,
raise NotImplementedError
def add_to_symbols(
self,
- _ast,
+ _xast,
production,
last_action,
_lr1,
return last_action
def add_to_groups(
self,
- _ast,
+ _xast,
production,
groups,
first_action,
# GENERATE END
def post_process(
self,
- _ast,
+ _xast,
section,
production,
character_to_symbol,
has_space,
last_grouped_pos
):
- _ast.n_productions += int(last_action) # midrule action production
+ _xast.n_productions += int(last_action) # midrule action production
production.n_symbols += int(last_action) # midrule action symbol
return True, has_space, last_grouped_pos
def add_to_symbols(
self,
- _ast,
+ _xast,
production,
last_action,
_lr1,
return self[0]
def add_to_groups(
self,
- _ast,
+ _xast,
production,
groups,
first_action,
# GENERATE END
def post_process(
self,
- _ast,
+ _xast,
section,
production,
character_to_symbol,
# GENERATE END
def post_process(
self,
- _ast,
+ _xast,
section,
production,
character_to_symbol,
):
assert production.precedence_terminal == -1
_, production.precedence_terminal = self[0].post_process(
- _ast,
+ _xast,
section,
character_to_symbol,
name_to_symbol,
# GENERATE END
def post_process(
self,
- _ast,
+ _xast,
section,
production,
character_to_symbol,
has_space,
last_grouped_pos
):
- _ast.n_productions += int(last_action) # midrule action production
+ _xast.n_productions += int(last_action) # midrule action production
production.n_symbols += int(last_action) + 1 # midrule action symbol
_, self.symbol = self[0].post_process(
- _ast,
+ _xast,
section,
character_to_symbol,
name_to_symbol,
-1, # _tag
-1 # precedence
)
- if _ast.symbols[self.symbol]._type == AST.Symbol.TYPE_TERMINAL:
+ if _xast.symbols[self.symbol]._type == AST.Symbol.TYPE_TERMINAL:
production.last_terminal = self.symbol
self.has_space = has_space
return False, False, last_grouped_pos
def add_to_symbols(
self,
- _ast,
+ _xast,
production,
last_action,
_lr1,
)
)
symbols.append(
- (_ast.symbols[self.symbol].character_set, [])
- if _ast.symbols[self.symbol]._type == AST.Symbol.TYPE_TERMINAL else
- ([], _ast.symbols[self.symbol].character_set)
+ (_xast.symbols[self.symbol].character_set, [])
+ if _xast.symbols[self.symbol]._type == AST.Symbol.TYPE_TERMINAL else
+ ([], _xast.symbols[self.symbol].character_set)
)
tag_names.append(
''
- if _ast.symbols[self.symbol]._tag == -1 else
- _ast.tags[_ast.symbols[self.symbol]._tag].name
+ if _xast.symbols[self.symbol]._tag == -1 else
+ _xast.tags[_xast.symbols[self.symbol]._tag].name
)
return None
def add_to_groups(
self,
- _ast,
+ _xast,
production,
groups,
first_action,
# GENERATE END
def post_process(
self,
- _ast,
+ _xast,
section,
production,
character_to_symbol,
):
for i in self[1:]:
last_action, has_space, last_grouped_pos = i.post_process(
- _ast,
+ _xast,
section,
production,
character_to_symbol,
return last_action, has_space, last_grouped_pos
def add_to_symbols(
self,
- _ast,
+ _xast,
production,
last_action,
_lr1,
i.offset = -production.n_symbols
for i in self[1:]:
last_action = i.add_to_symbols(
- _ast,
+ _xast,
production,
last_action,
_lr1,
return last_action
def add_to_groups(
self,
- _ast,
+ _xast,
production,
groups,
first_action,
pos1 = pos
for i in self[:0:-1]:
first_action, pos, last_grouped_pos = i.add_to_groups(
- _ast,
+ _xast,
production,
groups,
first_action,
# GENERATE END
def post_process(
self,
- _ast,
+ _xast,
section,
production,
character_to_symbol,
return last_action, True, last_grouped_pos
def add_to_groups(
self,
- _ast,
+ _xast,
production,
groups,
first_action,
def post_process(
self,
- _ast,
+ _xast,
section,
lhs_nonterminal,
character_to_symbol,
last_grouped_pos = 0
for i in self:
last_action, has_space, last_grouped_pos = i.post_process(
- _ast,
+ _xast,
section,
self,
character_to_symbol,
#print('last_grouped_pos', last_grouped_pos)
#print('self.n_symbols', self.n_symbols)
- i = _ast.symbols[self.lhs_nonterminal]
- if len(i.character_set) and i.character_set[-1] == _ast.n_productions:
- i.character_set[-1] = _ast.n_productions + 1
+ i = _xast.symbols[self.lhs_nonterminal]
+ if len(i.character_set) and i.character_set[-1] == _xast.n_productions:
+ i.character_set[-1] = _xast.n_productions + 1
else:
- i.character_set.extend([_ast.n_productions, _ast.n_productions + 1])
- _ast.n_productions += 1
- _ast.productions.append(self)
+ i.character_set.extend([_xast.n_productions, _xast.n_productions + 1])
+ _xast.n_productions += 1
+ _xast.productions.append(self)
- def add_to_lr1(self, _ast, _lr1):
+ def add_to_lr1(self, _xast, _lr1):
last_action = None
symbols = []
tag_names = []
for i in self:
last_action = i.add_to_symbols(
- _ast,
+ _xast,
self,
last_action,
_lr1,
):
i.tag_name = (
''
- if _ast.symbols[self.lhs_nonterminal]._tag == -1 else
- _ast.tags[_ast.symbols[self.lhs_nonterminal]._tag].name
+ if _xast.symbols[self.lhs_nonterminal]._tag == -1 else
+ _xast.tags[_xast.symbols[self.lhs_nonterminal]._tag].name
)
# go backwards collecting negative indices of element group start/end
last_grouped_pos = pos # says last position where a group has started
for i in self[::-1]:
first_action, pos, last_grouped_pos = i.add_to_groups(
- _ast,
+ _xast,
self,
groups,
first_action,
)
precedence = (
- _ast.symbols[self.precedence_terminal].precedence
+ _xast.symbols[self.precedence_terminal].precedence
if self.precedence_terminal != -1 else
- _ast.symbols[self.last_terminal].precedence
+ _xast.symbols[self.last_terminal].precedence
if self.last_terminal != -1 else
-1
)
# GENERATE END
def post_process(
self,
- _ast,
+ _xast,
section,
character_to_symbol,
name_to_symbol,
name = self[0].get_text()
if len(name) == 0: # do the same as Section1.Prologue
(
- _ast.before_union_text
- if len(_ast.union_text) == 0 else
- _ast.after_union_text
+ _xast.before_union_text
+ if len(_xast.union_text) == 0 else
+ _xast.after_union_text
).append(self[1][0])
elif name == 'top':
- _ast.top_text.append(self[1][0])
+ _xast.top_text.append(self[1][0])
elif name == 'requires':
- _ast.requires_text.append(self[1][0])
+ _xast.requires_text.append(self[1][0])
else:
assert False
# GENERATE END
def post_process(
self,
- _ast,
+ _xast,
section,
character_to_symbol,
name_to_symbol,
):
for i in self[1:]:
_, j = i.post_process(
- _ast,
+ _xast,
section,
character_to_symbol,
name_to_symbol,
-1, # _tag
-1 # precedence
)
- tag_or_symbol = _ast.symbols[j] if j >= 0 else _ast.tags[~j]
+ tag_or_symbol = _xast.symbols[j] if j >= 0 else _xast.tags[~j]
assert tag_or_symbol.code_props[self._type] is None
tag_or_symbol.code_props[self._type] = self[0]
# GENERATE END
def post_process(
self,
- _ast,
+ _xast,
section,
character_to_symbol,
name_to_symbol,
_tag = -1
for i in self:
_tag, _ = i.post_process(
- _ast,
+ _xast,
section,
character_to_symbol,
name_to_symbol,
# GENERATE END
def post_process(
self,
- _ast,
+ _xast,
section,
character_to_symbol,
name_to_symbol,
_tag = -1
for i in self:
_tag, _ = i.post_process(
- _ast,
+ _xast,
section,
character_to_symbol,
name_to_symbol,
name_to_tag,
AST.Symbol.TYPE_TERMINAL, # _type
_tag,
- len(_ast.associativities) # precedence
+ len(_xast.associativities) # precedence
)
- _ast.associativities.append(self._type)
+ _xast.associativities.append(self._type)
class Start(Item):
# GENERATE ELEMENT() BEGIN
# GENERATE END
def post_process(
self,
- _ast,
+ _xast,
section,
character_to_symbol,
name_to_symbol,
string_to_symbol,
name_to_tag
):
- _, _ast.start_nonterminal = self[0].post_process(
- _ast,
+ _, _xast.start_nonterminal = self[0].post_process(
+ _xast,
section,
character_to_symbol,
name_to_symbol,
# GENERATE END
def post_process(
self,
- _ast,
+ _xast,
section,
character_to_symbol,
name_to_symbol,
_tag = -1
for i in self:
_tag, _ = i.post_process(
- _ast,
+ _xast,
section,
character_to_symbol,
name_to_symbol,
# GENERATE END
def post_process(
self,
- _ast,
+ _xast,
section,
character_to_symbol,
name_to_symbol,
_tag = -1
for i in self:
_tag, _ = i.post_process(
- _ast,
+ _xast,
section,
character_to_symbol,
name_to_symbol,
# GENERATE END
def post_process(
self,
- _ast,
+ _xast,
section,
character_to_symbol,
name_to_symbol,
):
name = self[0].get_text()
if len(name):
- assert len(_ast.union_name) == 0
- _ast.union_name = name
- _ast.union_text.append(self[1][0])
+ assert len(_xast.union_name) == 0
+ _xast.union_name = name
+ _xast.union_text.append(self[1][0])
# GENERATE ELEMENT() BEGIN
def __init__(
# GENERATE END
def post_process(
self,
- _ast,
+ _xast,
character_to_symbol,
name_to_symbol,
string_to_symbol,
):
for i in self:
i.post_process(
- _ast,
+ _xast,
self,
character_to_symbol,
name_to_symbol,
# GENERATE END
def post_process(
self,
- _ast,
+ _xast,
section,
character_to_symbol,
name_to_symbol,
# GENERATE END
def post_process(
self,
- _ast,
+ _xast,
section,
character_to_symbol,
name_to_symbol,
# GENERATE END
def post_process(
self,
- _ast,
+ _xast,
section,
character_to_symbol,
name_to_symbol,
# GENERATE END
def post_process(
self,
- _ast,
+ _xast,
section,
character_to_symbol,
name_to_symbol,
# GENERATE END
def post_process(
self,
- _ast,
+ _xast,
section,
character_to_symbol,
name_to_symbol,
# GENERATE END
def post_process(
self,
- _ast,
+ _xast,
section,
character_to_symbol,
name_to_symbol,
name_to_tag
):
assert isinstance(self[0], AST.Text) # temporary
- _ast.initial_action_text.append(self[0])
+ _xast.initial_action_text.append(self[0])
class Language(Item):
# GENERATE ELEMENT() BEGIN
# GENERATE END
def post_process(
self,
- _ast,
+ _xast,
section,
character_to_symbol,
name_to_symbol,
# GENERATE END
def post_process(
self,
- _ast,
+ _xast,
section,
character_to_symbol,
name_to_symbol,
# GENERATE END
def post_process(
self,
- _ast,
+ _xast,
section,
character_to_symbol,
name_to_symbol,
name_to_tag
):
(
- _ast.before_union_text
- if len(_ast.union_text) == 0 else
- _ast.after_union_text
+ _xast.before_union_text
+ if len(_xast.union_text) == 0 else
+ _xast.after_union_text
).append(self[0])
class PureParser(Item):
# GENERATE END
def post_process(
self,
- _ast,
+ _xast,
section,
character_to_symbol,
name_to_symbol,
# GENERATE END
def post_process(
self,
- _ast,
+ _xast,
section,
character_to_symbol,
name_to_symbol,
# GENERATE END
def post_process(
self,
- _ast,
+ _xast,
character_to_symbol,
name_to_symbol,
string_to_symbol,
self.verbose = False
AST.Section1Or2.post_process(
self,
- _ast,
+ _xast,
character_to_symbol,
name_to_symbol,
string_to_symbol,
# GENERATE END
def post_process(
self,
- _ast,
+ _xast,
section,
character_to_symbol,
name_to_symbol,
name_to_tag
):
_, lhs_nonterminal = self[0].post_process(
- _ast,
+ _xast,
section,
character_to_symbol,
name_to_symbol,
-1, # _tag
-1 # precedence
)
- if _ast.first_nonterminal == -1:
- _ast.first_nonterminal = lhs_nonterminal
+ if _xast.first_nonterminal == -1:
+ _xast.first_nonterminal = lhs_nonterminal
for i in self[1:]:
i.post_process(
- _ast,
+ _xast,
section,
lhs_nonterminal,
character_to_symbol,
# GENERATE END
def post_process(
self,
- _ast,
+ _xast,
section,
character_to_symbol,
name_to_symbol,
# GENERATE END
def post_process(
self,
- _ast,
+ _xast,
section,
character_to_symbol,
name_to_symbol,
if name in name_to_tag:
_tag = name_to_tag[name]
else:
- _tag = len(_ast.tags)
+ _tag = len(_xast.tags)
name_to_tag[name] = _tag
- _ast.tags.append(
+ _xast.tags.append(
AST.Tag(name = name, code_props = [None, None])
)
return (_tag, ~_tag)
# GENERATE END
def post_process(
self,
- _ast,
+ _xast,
section,
character_to_symbol,
name_to_symbol,
if character in character_to_symbol:
symbol = character_to_symbol[character]
else:
- symbol = len(_ast.symbols)
+ symbol = len(_xast.symbols)
character_to_symbol[character] = symbol
- _ast.symbols.append(
+ _xast.symbols.append(
AST.Symbol(
code_props = [None, None],
_type = AST.Symbol.TYPE_TERMINAL,
# it have internal token number 0 (and apparently they don't)
symbol = 0
name_to_symbol[name] = symbol
- assert _ast.symbols[symbol].name == '$end'
- _ast.symbols[symbol] = AST.Symbol(
+ assert _xast.symbols[symbol].name == '$end'
+ _xast.symbols[symbol] = AST.Symbol(
name = name,
code_props = [None, None],
character_set = []
)
else:
- symbol = len(_ast.symbols)
+ symbol = len(_xast.symbols)
name_to_symbol[name] = symbol
- _ast.symbols.append(
+ _xast.symbols.append(
AST.Symbol(
name = name,
code_props = [None, None],
# insert information from the SymbolRef element
if self.user_token != -1:
- assert len(_ast.symbols[symbol].character_set) == 0
- _ast.symbols[symbol].character_set = (
+ assert len(_xast.symbols[symbol].character_set) == 0
+ _xast.symbols[symbol].character_set = (
[self.user_token, self.user_token + 1]
)
if len(self) >= 2:
# insert information from the calling contexxt
if _type != -1:
- if _ast.symbols[symbol]._type == -1:
- _ast.symbols[symbol]._type = _type
+ if _xast.symbols[symbol]._type == -1:
+ _xast.symbols[symbol]._type = _type
else:
- assert _ast.symbols[symbol]._type == _type
+ assert _xast.symbols[symbol]._type == _type
if _tag != -1:
- assert _ast.symbols[symbol]._tag == -1
- _ast.symbols[symbol]._tag = _tag
+ assert _xast.symbols[symbol]._tag == -1
+ _xast.symbols[symbol]._tag = _tag
if precedence != -1:
- assert _ast.symbols[symbol].precedence == -1
- _ast.symbols[symbol].precedence = precedence
+ assert _xast.symbols[symbol].precedence == -1
+ _xast.symbols[symbol].precedence = precedence
return (_tag, symbol)
# GENERATE END
def post_process(
self,
- _ast,
+ _xast,
character_to_symbol,
name_to_symbol,
string_to_symbol,
self.last_column = last_column
# GENERATE SECTION1 BEGIN
-import ast
+import xast
import element
import lex_yy_code
import state
children = [
element.concatenate(
yy_element_stack[-3:-2],
- ast.AST.Section2
+ xast.AST.Section2
)
]
)
children = [
element.concatenate(
yy_element_stack[-7:-6],
- ast.AST.Section1
+ xast.AST.Section1
)
]
)
element.concatenate(
yy_element_stack[-1:],
(
- ast.AST.Section1.PureParser
+ xast.AST.Section1.PureParser
if (yystack[-2][1]) == 'api.pure' else
- ast.AST.Section1.Locations
+ xast.AST.Section1.Locations
if (yystack[-2][1]) == 'locations' else
- ast.AST.Section1.Debug
+ xast.AST.Section1.Debug
if (yystack[-2][1]) == 'parse.trace' else
element.Element
)
children = [
element.concatenate(
yy_element_stack[-5:],
- ast.AST.Section1.Define
+ xast.AST.Section1.Define
)
]
)
children = [
element.concatenate(
yy_element_stack[-1:],
- ast.AST.Section1.Defines
+ xast.AST.Section1.Defines
)
]
)
children = [
element.concatenate(
yy_element_stack[-3:],
- ast.AST.Section1.Defines
+ xast.AST.Section1.Defines
)
]
)
children = [
element.concatenate(
yy_element_stack[-1:],
- ast.AST.Section1.ErrorVerbose
+ xast.AST.Section1.ErrorVerbose
)
]
)
children = [
element.concatenate(
yy_element_stack[-3:],
- ast.AST.Section1.Expect, value = (yystack[-2][1])
+ xast.AST.Section1.Expect, value = (yystack[-2][1])
)
]
)
children = [
element.concatenate(
yy_element_stack[-3:],
- ast.AST.Section1.ExpectRR, value = (yystack[-2][1])
+ xast.AST.Section1.ExpectRR, value = (yystack[-2][1])
)
]
)
children = [
element.concatenate(
yy_element_stack[-3:],
- ast.AST.Section1.FilePrefix
+ xast.AST.Section1.FilePrefix
)
]
)
children = [
element.concatenate(
yy_element_stack[-1:],
- ast.AST.Section1.GLRParser
+ xast.AST.Section1.GLRParser
)
]
)
children = [
element.concatenate(
yy_element_stack[-3:],
- ast.AST.Section1.InitialAction
+ xast.AST.Section1.InitialAction
)
]
)
lex_yy_code.yytext = '' # fool unput()
lex_yy_code.unput((yystack[-2][1]))
lex_yy_code.sc_context = lex_yy_code.SC_SYMBOL_ACTION # CODE_PROPS_SYMBOL_ACTION
- lex_yy_code.yylex(ast.AST.Text)
- assert isinstance(yy_element_stack[-1][0], ast.AST.Section1.InitialAction)
- assert isinstance(yy_element_stack[-1][0][0], ast.AST.Text)
+ lex_yy_code.yylex(xast.AST.Text)
+ assert isinstance(yy_element_stack[-1][0], xast.AST.Section1.InitialAction)
+ assert isinstance(yy_element_stack[-1][0][0], xast.AST.Text)
tail = element.get_text(yy_element_stack[-1][0], 1)
yy_element_stack[-1][0][0] = lex_yy_code.yy_element_space
element.set_text(yy_element_stack[-1][0], 1, tail)
children = [
element.concatenate(
yy_element_stack[-3:],
- ast.AST.Section1.Language
+ xast.AST.Section1.Language
)
]
)
children = [
element.concatenate(
yy_element_stack[-3:],
- ast.AST.Section1.NamePrefix
+ xast.AST.Section1.NamePrefix
)
]
)
children = [
element.concatenate(
yy_element_stack[-1:],
- ast.AST.Section1.Lines, value = False
+ xast.AST.Section1.Lines, value = False
)
]
)
children = [
element.concatenate(
yy_element_stack[-1:],
- ast.AST.Section1.NonDeterministicParser
+ xast.AST.Section1.NonDeterministicParser
)
]
)
children = [
element.concatenate(
yy_element_stack[-3:],
- ast.AST.Section1.Output
+ xast.AST.Section1.Output
)
]
)
children = [
element.concatenate(
yy_element_stack[-3:],
- ast.AST.Section1.Param
+ xast.AST.Section1.Param
)
]
)
children = [
element.concatenate(
yy_element_stack[-3:],
- ast.AST.Section1.Require
+ xast.AST.Section1.Require
)
]
)
children = [
element.concatenate(
yy_element_stack[-3:],
- ast.AST.Section1.Skeleton
+ xast.AST.Section1.Skeleton
)
]
)
children = [
element.concatenate(
yy_element_stack[-1:],
- ast.AST.Section1.TokenTable
+ xast.AST.Section1.TokenTable
)
]
)
children = [
element.concatenate(
yy_element_stack[-1:],
- ast.AST.Section1.Verbose
+ xast.AST.Section1.Verbose
)
]
)
children = [
element.concatenate(
yy_element_stack[-1:],
- ast.AST.Section1.YACC
+ xast.AST.Section1.YACC
)
]
)
children = [
element.concatenate(
yy_element_stack[-1:],
- ast.AST.SymbolRef
+ xast.AST.SymbolRef
)
]
)
children = [
element.concatenate(
yy_element_stack[-3:],
- ast.AST.Section1Or2.Start
+ xast.AST.Section1Or2.Start
)
]
)
children = [
element.concatenate(
yy_element_stack[-5:],
- ast.AST.Section1Or2.CodeProps, _type = (yystack[-4][1])
+ xast.AST.Section1Or2.CodeProps, _type = (yystack[-4][1])
)
]
)
lex_yy_code.yytext = '' # fool unput()
lex_yy_code.unput((yystack[-3][1]))
lex_yy_code.sc_context = lex_yy_code.SC_SYMBOL_ACTION # CODE_PROPS_SYMBOL_ACTION
- lex_yy_code.yylex(ast.AST.Text)
- assert isinstance(yy_element_stack[-1][0], ast.AST.Section1Or2.CodeProps)
- assert isinstance(yy_element_stack[-1][0][0], ast.AST.Text)
+ lex_yy_code.yylex(xast.AST.Text)
+ assert isinstance(yy_element_stack[-1][0], xast.AST.Section1Or2.CodeProps)
+ assert isinstance(yy_element_stack[-1][0][0], xast.AST.Text)
tail = element.get_text(yy_element_stack[-1][0], 1)
yy_element_stack[-1][0][0] = lex_yy_code.yy_element_space
element.set_text(yy_element_stack[-1][0], 1, tail)
children = [
element.concatenate(
yy_element_stack[-1:],
- ast.AST.Section1Or2.DefaultPrec, value = True
+ xast.AST.Section1Or2.DefaultPrec, value = True
)
]
)
children = [
element.concatenate(
yy_element_stack[-1:],
- ast.AST.Section1Or2.DefaultPrec
+ xast.AST.Section1Or2.DefaultPrec
)
]
)
children = [
element.concatenate(
yy_element_stack[-1:],
- ast.AST.BracedCode
+ xast.AST.BracedCode
)
]
)
children = [
element.concatenate(
yy_element_stack[-2:-2],
- ast.AST.ID
+ xast.AST.ID
)
]
)
children = [
element.concatenate(
yy_element_stack[-4:],
- ast.AST.Section1Or2.Code
+ xast.AST.Section1Or2.Code
)
]
)
]
# most egregious (leftover from when we used to run code scanner on it):
- assert isinstance(yy_element_stack[-1][0], ast.AST.Section1Or2.Code)
- assert isinstance(yy_element_stack[-1][0][1], ast.AST.BracedCode)
- assert isinstance(yy_element_stack[-1][0][1][0], ast.AST.Text)
+ assert isinstance(yy_element_stack[-1][0], xast.AST.Section1Or2.Code)
+ assert isinstance(yy_element_stack[-1][0][1], xast.AST.BracedCode)
+ assert isinstance(yy_element_stack[-1][0][1][0], xast.AST.Text)
element.set_text(
yy_element_stack[-1][0][1][0],
0,
children = [
element.concatenate(
yy_element_stack[-1:],
- ast.AST.BracedCode
+ xast.AST.BracedCode
)
]
)
children = [
element.concatenate(
yy_element_stack[-5:],
- ast.AST.Section1Or2.Code
+ xast.AST.Section1Or2.Code
)
]
)
]
# most egregious (leftover from when we used to run code scanner on it):
- assert isinstance(yy_element_stack[-1][0], ast.AST.Section1Or2.Code)
- assert isinstance(yy_element_stack[-1][0][1], ast.AST.BracedCode)
- assert isinstance(yy_element_stack[-1][0][1][0], ast.AST.Text)
+ assert isinstance(yy_element_stack[-1][0], xast.AST.Section1Or2.Code)
+ assert isinstance(yy_element_stack[-1][0][1], xast.AST.BracedCode)
+ assert isinstance(yy_element_stack[-1][0][1][0], xast.AST.Text)
element.set_text(
yy_element_stack[-1][0][1][0],
0,
children = [
element.concatenate(
yy_element_stack[len(yy_element_stack):],
- ast.AST.ID
+ xast.AST.ID
)
]
)
children = [
element.concatenate(
yy_element_stack[-1:],
- ast.AST.BracedCode
+ xast.AST.BracedCode
)
]
)
children = [
element.concatenate(
yy_element_stack[-5:],
- ast.AST.Section1Or2.Union
+ xast.AST.Section1Or2.Union
)
]
)
]
# most egregious (leftover from when we used to run code scanner on it):
- assert isinstance(yy_element_stack[-1][0], ast.AST.Section1Or2.Union)
- assert isinstance(yy_element_stack[-1][0][1], ast.AST.BracedCode)
- assert isinstance(yy_element_stack[-1][0][1][0], ast.AST.Text)
+ assert isinstance(yy_element_stack[-1][0], xast.AST.Section1Or2.Union)
+ assert isinstance(yy_element_stack[-1][0][1], xast.AST.BracedCode)
+ assert isinstance(yy_element_stack[-1][0][1][0], xast.AST.Text)
element.set_text(
yy_element_stack[-1][0][1][0],
0,
children = [
element.concatenate(
yy_element_stack[-3:],
- ast.AST.Section1Or2.NTerm
+ xast.AST.Section1Or2.NTerm
)
]
)
children = [
element.concatenate(
yy_element_stack[-3:],
- ast.AST.Section1Or2.Token
+ xast.AST.Section1Or2.Token
)
]
)
children = [
element.concatenate(
yy_element_stack[-5:],
- ast.AST.Section1Or2.Type
+ xast.AST.Section1Or2.Type
)
]
)
children = [
element.concatenate(
yy_element_stack[-5:],
- ast.AST.Section1Or2.Precedence, _type = ((yystack[-4][1]) & 3) - 1
+ xast.AST.Section1Or2.Precedence, _type = ((yystack[-4][1]) & 3) - 1
)
]
)
children = [
element.concatenate(
yy_element_stack[-1:],
- ast.AST.SymbolRef
+ xast.AST.SymbolRef
)
]
)
children = [
element.concatenate(
yy_element_stack[-3:],
- ast.AST.SymbolRef, user_token = (yystack[-2][1])
+ xast.AST.SymbolRef, user_token = (yystack[-2][1])
)
]
)
children = [
element.concatenate(
yy_element_stack[-1:],
- ast.AST.SymbolRef
+ xast.AST.SymbolRef
)
]
)
children = [
element.concatenate(
yy_element_stack[-1:],
- ast.AST.SymbolRef
+ xast.AST.SymbolRef
)
]
)
children = [
element.concatenate(
yy_element_stack[-1:],
- ast.AST.SymbolRef
+ xast.AST.SymbolRef
)
]
)
children = [
element.concatenate(
yy_element_stack[-1:],
- ast.AST.SymbolRef
+ xast.AST.SymbolRef
)
]
)
children = [
element.concatenate(
yy_element_stack[-3:],
- ast.AST.SymbolRef, user_token = (yystack[-2][1])
+ xast.AST.SymbolRef, user_token = (yystack[-2][1])
)
]
)
children = [
element.concatenate(
yy_element_stack[-3:],
- ast.AST.SymbolRef
+ xast.AST.SymbolRef
)
]
)
children = [
element.concatenate(
yy_element_stack[-5:],
- ast.AST.SymbolRef, user_token = (yystack[-3][1])
+ xast.AST.SymbolRef, user_token = (yystack[-3][1])
)
]
)
children = [
element.concatenate(
yy_element_stack[-5:-4],
- ast.AST.SymbolRef
+ xast.AST.SymbolRef
)
]
)
children = [
element.concatenate(
yy_element_stack[-5:],
- ast.AST.Section2.Rules
+ xast.AST.Section2.Rules
)
]
)
children = [
element.concatenate(
yy_element_stack[-1:],
- ast.AST.Production
+ xast.AST.Production
)
]
)
children = [
element.concatenate(
yy_element_stack[-1:],
- ast.AST.Production
+ xast.AST.Production
)
]
)
children = [
element.concatenate(
yy_element_stack[-3:-2],
- ast.AST.SymbolRef
+ xast.AST.SymbolRef
)
]
)
children = [
element.concatenate(
yy_element_stack[-3:],
- ast.AST.Production.SymbolRef
+ xast.AST.Production.SymbolRef
)
]
)
children = [
element.concatenate(
yy_element_stack[-3:],
- ast.AST.Production.Action
+ xast.AST.Production.Action
)
]
)
lex_yy_code.yytext = '' # fool unput()
lex_yy_code.unput((yystack[-3][1]))
lex_yy_code.sc_context = lex_yy_code.SC_RULE_ACTION # CODE_PROPS_RULE_ACTION
- lex_yy_code.yylex(ast.AST.Text)
- assert isinstance(yy_element_stack[-1][0], ast.AST.Production.Action)
- assert isinstance(yy_element_stack[-1][0][0], ast.AST.Text)
+ lex_yy_code.yylex(xast.AST.Text)
+ assert isinstance(yy_element_stack[-1][0], xast.AST.Production.Action)
+ assert isinstance(yy_element_stack[-1][0][0], xast.AST.Text)
tail = element.get_text(yy_element_stack[-1][0], 1)
yy_element_stack[-1][0][0] = lex_yy_code.yy_element_space
element.set_text(yy_element_stack[-1][0], 1, tail)
children = [
element.concatenate(
yy_element_stack[-1:],
- ast.AST.Production.Empty
+ xast.AST.Production.Empty
)
]
)
children = [
element.concatenate(
yy_element_stack[-1:],
- ast.AST.SymbolRef
+ xast.AST.SymbolRef
)
]
)
children = [
element.concatenate(
yy_element_stack[-3:],
- ast.AST.Production.Prec
+ xast.AST.Production.Prec
)
]
)
children = [
element.concatenate(
yy_element_stack[-3:],
- ast.AST.Production.DPrec, value = (yystack[-2][1])
+ xast.AST.Production.DPrec, value = (yystack[-2][1])
)
]
)
children = [
element.concatenate(
yy_element_stack[-3:],
- ast.AST.Production.Merge
+ xast.AST.Production.Merge
)
]
)
children = [
element.concatenate(
yy_element_stack[-5:],
- ast.AST.Production.GroupElement
+ xast.AST.Production.GroupElement
)
]
)
lex_yy_code.yytext = '' # fool unput()
lex_yy_code.unput((yystack[-4][1])[4:-1])
lex_yy_code.sc_context = lex_yy_code.SC_RULE_ACTION # CODE_PROPS_RULE_ACTION
- lex_yy_code.yylex(ast.AST.Text)
- assert isinstance(yy_element_stack[-1][0], ast.AST.Production.GroupElement)
- assert isinstance(yy_element_stack[-1][0][0], ast.AST.Text)
+ lex_yy_code.yylex(xast.AST.Text)
+ assert isinstance(yy_element_stack[-1][0], xast.AST.Production.GroupElement)
+ assert isinstance(yy_element_stack[-1][0][0], xast.AST.Text)
tail = element.get_text(yy_element_stack[-1][0], 1)
yy_element_stack[-1][0][0] = lex_yy_code.yy_element_space
element.set_text(yy_element_stack[-1][0], 1, tail)
children = [
element.concatenate(
yy_element_stack[-1:],
- ast.AST.Production.Space
+ xast.AST.Production.Space
)
]
)
children = [
element.concatenate(
yy_element_stack[-3:],
- ast.AST.Section3
+ xast.AST.Section3
)
]
)
--- /dev/null
+#!/bin/sh
+
+# note that both ast and _ast are reserved module names in recent Python
+# so we will use x prefix for disambiguation instead of _ as we usually do
+
+for i in bootstrap_pilex bootstrap_piyacc
+do
+ sed -e 's/ast/xast/g; s/\([A-Za-z]\)xast/\1ast/g' -i $i/*.py $i/*.l $i/*.y
+ git mv $i/ast.py $i/xast.py
+done