4766ee256365752e655394b4f9fa08e27ea3aae8
1 # Based on GardenSnake - a parser generator demonstration program
2 # GardenSnake was released into the Public Domain by Andrew Dalke.
4 # Portions of this work are derived from Python's Grammar definition
5 # and may be covered under the Python copyright and license
7 # Andrew Dalke / Dalke Scientific Software, LLC
8 # 30 August 2006 / Cape Town, South Africa
10 # Modifications for inclusion in PLY distribution
11 from pprint
import pprint
12 from ply
import lex
, yacc
15 from soc
.decoder
.power_decoder
import create_pdecode
16 from soc
.decoder
.pseudo
.lexer
import IndentLexer
18 # I use the Python AST
19 #from compiler import ast
25 def Assign(left
, right
):
27 print("Assign", left
, right
)
28 if isinstance(left
, ast
.Name
):
29 # Single assignment on left
30 # XXX when doing IntClass, which will have an "eq" function,
31 # this is how to access it
32 # eq = ast.Attribute(left, "eq") # get eq fn
33 # return ast.Call(eq, [right], []) # now call left.eq(right)
34 return ast
.Assign([ast
.Name(left
.id, ast
.Store())], right
)
35 elif isinstance(left
, ast
.Tuple
):
36 # List of things - make sure they are Name nodes
38 for child
in left
.getChildren():
39 if not isinstance(child
, ast
.Name
):
40 raise SyntaxError("that assignment not supported")
41 names
.append(child
.name
)
42 ass_list
= [ast
.AssName(name
, 'OP_ASSIGN') for name
in names
]
43 return ast
.Assign([ast
.AssTuple(ass_list
)], right
)
44 elif isinstance(left
, ast
.Subscript
):
45 return ast
.Assign([left
], right
)
46 # XXX HMMM probably not needed...
48 if isinstance(ls
, ast
.Slice
):
49 lower
, upper
, step
= ls
.lower
, ls
.upper
, ls
.step
50 print("slice assign", lower
, upper
, step
)
52 ls
= (lower
, upper
, None)
54 ls
= (lower
, upper
, step
)
56 return ast
.Call(ast
.Name("selectassign"),
57 [left
.value
, ls
, right
], [])
60 raise SyntaxError("Can't do that yet")
63 # I implemented INDENT / DEDENT generation as a post-processing filter
65 # The original lex token stream contains WS and NEWLINE characters.
66 # WS will only occur before any other tokens on a line.
68 # I have three filters. One tags tokens by adding two attributes.
69 # "must_indent" is True if the token must be indented from the
70 # previous code. The other is "at_line_start" which is True for WS
71 # and the first non-WS/non-NEWLINE on a line. It flags the check so
72 # see if the new line has changed indication level.
75 # No using Python's approach because Ply supports precedence
77 # comparison: expr (comp_op expr)*
78 # arith_expr: term (('+'|'-') term)*
79 # term: factor (('*'|'/'|'%'|'//') factor)*
80 # factor: ('+'|'-'|'~') factor | power
81 # comp_op: '<'|'>'|'=='|'>='|'<='|'<>'|'!='|'in'|'not' 'in'|'is'|'is' 'not'
83 def make_le_compare(arg
):
85 return ast
.Compare(left
, [ast
.LtE()], [right
])
88 def make_ge_compare(arg
):
90 return ast
.Compare(left
, [ast
.GtE()], [right
])
93 def make_lt_compare(arg
):
95 return ast
.Compare(left
, [ast
.Lt()], [right
])
98 def make_gt_compare(arg
):
100 return ast
.Compare(left
, [ast
.Gt()], [right
])
103 def make_eq_compare(arg
):
105 return ast
.Compare(left
, [ast
.Eq()], [right
])
115 "<=": make_le_compare
,
116 ">=": make_ge_compare
,
117 "<": make_lt_compare
,
118 ">": make_gt_compare
,
119 "=": make_eq_compare
,
128 def check_concat(node
): # checks if the comparison is already a concat
129 print("check concat", node
)
130 if not isinstance(node
, ast
.Call
):
132 print("func", node
.func
.id)
133 if node
.func
.id != 'concat':
138 ########## Parser (tokens -> AST) ######
146 ("left", "EQ", "GT", "LT", "LE", "GE", "LTU", "GTU"),
149 ("left", "PLUS", "MINUS"),
150 ("left", "MULT", "DIV"),
156 for rname
in ['RA', 'RB', 'RC', 'RT', 'RS']:
157 self
.gprs
[rname
] = None
159 self
.uninit_regs
= []
162 # The grammar comments come from Python's Grammar/Grammar file
164 # NB: compound_stmt in single_input is followed by extra NEWLINE!
165 # file_input: (NEWLINE | stmt)* ENDMARKER
167 def p_file_input_end(self
, p
):
168 """file_input_end : file_input ENDMARKER"""
172 def p_file_input(self
, p
):
173 """file_input : file_input NEWLINE
177 if isinstance(p
[len(p
)-1], str):
181 p
[0] = [] # p == 2 --> only a blank line
188 # funcdef: [decorators] 'def' NAME parameters ':' suite
189 # ignoring decorators
191 def p_funcdef(self
, p
):
192 "funcdef : DEF NAME parameters COLON suite"
193 p
[0] = ast
.FunctionDef(p
[2], p
[3], p
[5], ())
195 # parameters: '(' [varargslist] ')'
196 def p_parameters(self
, p
):
197 """parameters : LPAR RPAR
198 | LPAR varargslist RPAR"""
203 p
[0] = ast
.arguments(args
=args
, vararg
=None, kwarg
=None, defaults
=[])
205 # varargslist: (fpdef ['=' test] ',')* ('*' NAME [',' '**' NAME] |
209 def p_varargslist(self
, p
):
210 """varargslist : varargslist COMMA NAME
217 # stmt: simple_stmt | compound_stmt
218 def p_stmt_simple(self
, p
):
219 """stmt : simple_stmt"""
220 # simple_stmt is a list
223 def p_stmt_compound(self
, p
):
224 """stmt : compound_stmt"""
227 # simple_stmt: small_stmt (';' small_stmt)* [';'] NEWLINE
228 def p_simple_stmt(self
, p
):
229 """simple_stmt : small_stmts NEWLINE
230 | small_stmts SEMICOLON NEWLINE"""
233 def p_small_stmts(self
, p
):
234 """small_stmts : small_stmts SEMICOLON small_stmt
241 # small_stmt: expr_stmt | print_stmt | del_stmt | pass_stmt | flow_stmt |
242 # import_stmt | global_stmt | exec_stmt | assert_stmt
243 def p_small_stmt(self
, p
):
244 """small_stmt : flow_stmt
247 if isinstance(p
[1], ast
.Call
):
248 p
[0] = ast
.Expr(p
[1])
252 # expr_stmt: testlist (augassign (yield_expr|testlist) |
253 # ('=' (yield_expr|testlist))*)
254 # augassign: ('+=' | '-=' | '*=' | '/=' | '%=' | '&=' | '|=' | '^=' |
255 # '<<=' | '>>=' | '**=' | '//=')
256 def p_expr_stmt(self
, p
):
257 """expr_stmt : testlist ASSIGN testlist
259 print("expr_stmt", p
)
261 # a list of expressions
262 #p[0] = ast.Discard(p[1])
265 if isinstance(p
[1], ast
.Name
):
267 elif isinstance(p
[1], ast
.Subscript
):
269 if name
in self
.gprs
:
270 # add to list of uninitialised
271 self
.uninit_regs
.append(name
)
272 print("expr assign", name
, p
[1])
273 if name
in self
.gprs
:
274 self
.write_regs
.append(name
) # add to list of regs to write
275 p
[0] = Assign(p
[1], p
[3])
277 def p_flow_stmt(self
, p
):
278 "flow_stmt : return_stmt"
281 # return_stmt: 'return' [testlist]
282 def p_return_stmt(self
, p
):
283 "return_stmt : RETURN testlist"
284 p
[0] = ast
.Return(p
[2])
286 def p_compound_stmt(self
, p
):
287 """compound_stmt : if_stmt
294 def p_break_stmt(self
, p
):
295 """break_stmt : BREAK
299 def p_for_stmt(self
, p
):
300 """for_stmt : FOR test EQ test TO test COLON suite
302 p
[0] = ast
.While(p
[2], p
[4], [])
303 # auto-add-one (sigh) due to python range
305 end
= ast
.BinOp(p
[6], ast
.Add(), ast
.Constant(1))
306 it
= ast
.Call(ast
.Name("range"), [start
, end
], [])
307 p
[0] = ast
.For(p
[2], it
, p
[8], [])
309 def p_while_stmt(self
, p
):
310 """while_stmt : DO WHILE test COLON suite ELSE COLON suite
311 | DO WHILE test COLON suite
314 p
[0] = ast
.While(p
[3], p
[5], [])
316 p
[0] = ast
.While(p
[3], p
[5], p
[8])
318 def p_if_stmt(self
, p
):
319 """if_stmt : IF test COLON suite ELSE COLON if_stmt
320 | IF test COLON suite ELSE COLON suite
321 | IF test COLON suite
323 if len(p
) == 8 and isinstance(p
[7], ast
.If
):
324 p
[0] = ast
.If(p
[2], p
[4], [p
[7]])
326 p
[0] = ast
.If(p
[2], p
[4], [])
328 p
[0] = ast
.If(p
[2], p
[4], p
[7])
330 def p_suite(self
, p
):
331 """suite : simple_stmt
332 | NEWLINE INDENT stmts DEDENT"""
338 def p_stmts(self
, p
):
339 """stmts : stmts stmt
346 def p_comparison(self
, p
):
347 """comparison : comparison PLUS comparison
348 | comparison MINUS comparison
349 | comparison MULT comparison
350 | comparison DIV comparison
351 | comparison EQ comparison
352 | comparison LE comparison
353 | comparison GE comparison
354 | comparison LTU comparison
355 | comparison GTU comparison
356 | comparison LT comparison
357 | comparison GT comparison
358 | comparison BITOR comparison
359 | comparison BITAND comparison
363 | comparison APPEND comparison
368 p
[0] = ast
.Call(ast
.Name("ltu"), (p
[1], p
[3]), [])
370 p
[0] = ast
.Call(ast
.Name("gtu"), (p
[1], p
[3]), [])
372 l
= check_concat(p
[1]) + check_concat(p
[3])
373 p
[0] = ast
.Call(ast
.Name("concat"), l
, [])
374 elif p
[2] in ['<', '>', '=', '<=', '>=']:
375 p
[0] = binary_ops
[p
[2]]((p
[1], p
[3]))
377 p
[0] = ast
.BinOp(p
[1], binary_ops
[p
[2]], p
[3])
379 if isinstance(p
[2], str) and p
[2] == '-':
380 p
[0] = ast
.UnaryOp(unary_ops
[p
[2]], p
[1])
382 p
[0] = ast
.UnaryOp(unary_ops
[p
[1]], p
[2])
386 # power: atom trailer* ['**' factor]
387 # trailers enables function calls (and subscripts).
388 # I only allow one level of calls
389 # so this is 'trailer'
390 def p_power(self
, p
):
396 if p
[2][0] == "CALL":
397 #p[0] = ast.Expr(ast.Call(p[1], p[2][1], []))
398 p
[0] = ast
.Call(p
[1], p
[2][1], [])
399 # if p[1].id == 'print':
400 # p[0] = ast.Printnl(ast.Tuple(p[2][1]), None, None)
402 # p[0] = ast.CallFunc(p[1], p[2][1], None, None)
404 print("subscript atom", p
[2][1])
405 #raise AssertionError("not implemented %s" % p[2][0])
410 idx
= ast
.Slice(subs
[0], subs
[1], None)
411 p
[0] = ast
.Subscript(p
[1], idx
, ast
.Load())
413 def p_atom_name(self
, p
):
415 p
[0] = ast
.Name(id=p
[1], ctx
=ast
.Load())
417 def p_atom_number(self
, p
):
421 p
[0] = ast
.Constant(p
[1])
423 # '[' [listmaker] ']' |
425 def p_atom_listmaker(self
, p
):
426 """atom : LBRACK listmaker RBRACK"""
429 def p_listmaker(self
, p
):
430 """listmaker : test COMMA listmaker
434 p
[0] = ast
.List([p
[1]])
436 p
[0] = ast
.List([p
[1]] + p
[3].nodes
)
438 def p_atom_tuple(self
, p
):
439 """atom : LPAR testlist RPAR"""
441 if isinstance(p
[2], ast
.Name
):
442 print("tuple name", p
[2].id)
443 if p
[2].id in self
.gprs
:
444 self
.read_regs
.append(p
[2].id) # add to list of regs to read
445 #p[0] = ast.Subscript(ast.Name("GPR"), ast.Str(p[2].id))
449 # trailer: '(' [arglist] ')' | '[' subscriptlist ']' | '.' NAME
450 def p_trailer(self
, p
):
451 """trailer : trailer_arglist
456 def p_trailer_arglist(self
, p
):
457 "trailer_arglist : LPAR arglist RPAR"
458 p
[0] = ("CALL", p
[2])
460 def p_trailer_subscript(self
, p
):
461 "trailer_subscript : LBRACK subscript RBRACK"
462 p
[0] = ("SUBS", p
[2])
464 # subscript: '.' '.' '.' | test | [test] ':' [test]
466 def p_subscript(self
, p
):
467 """subscript : test COLON test
472 if isinstance(p
[3], ast
.Constant
):
473 end
= ast
.Constant(p
[3].value
+1)
475 end
= ast
.BinOp(p
[3], ast
.Add(), ast
.Constant(1))
480 # testlist: test (',' test)* [',']
481 # Contains shift/reduce error
483 def p_testlist(self
, p
):
484 """testlist : testlist_multi COMMA
489 # May need to promote singleton to tuple
490 if isinstance(p
[1], list):
494 # Convert into a tuple?
495 if isinstance(p
[0], list):
496 p
[0] = ast
.Tuple(p
[0])
498 def p_testlist_multi(self
, p
):
499 """testlist_multi : testlist_multi COMMA test
505 if isinstance(p
[1], list):
511 # test: or_test ['if' or_test 'else' test] | lambdef
512 # as I don't support 'and', 'or', and 'not' this works down to 'comparison'
518 # arglist: (argument ',')* (argument [',']| '*' test [',' '**' test]
520 # XXX INCOMPLETE: this doesn't allow the trailing comma
522 def p_arglist(self
, p
):
523 """arglist : arglist COMMA argument
530 # argument: test [gen_for] | test '=' test # Really [keyword '='] test
531 def p_argument(self
, p
):
535 def p_error(self
, p
):
536 # print "Error!", repr(p)
540 class GardenSnakeParser(PowerParser
):
541 def __init__(self
, lexer
=None):
542 PowerParser
.__init
__(self
)
544 lexer
= IndentLexer(debug
=0)
546 self
.tokens
= lexer
.tokens
547 self
.parser
= yacc
.yacc(module
=self
, start
="file_input_end",
548 debug
=False, write_tables
=False)
550 self
.sd
= create_pdecode()
552 def parse(self
, code
):
553 # self.lexer.input(code)
554 result
= self
.parser
.parse(code
, lexer
=self
.lexer
, debug
=False)
555 return ast
.Module(result
)
558 ###### Code generation ######
560 #from compiler import misc, syntax, pycodegen
562 class GardenSnakeCompiler(object):
564 self
.parser
= GardenSnakeParser()
566 def compile(self
, code
, mode
="exec", filename
="<string>"):
567 tree
= self
.parser
.parse(code
)
571 #misc.set_filename(filename, tree)
572 return compile(tree
, mode
="exec", filename
="<string>")
574 gen
= pycodegen
.ModuleCodeGenerator(tree
)