-
Notifications
You must be signed in to change notification settings - Fork 1
/
compiler.py
126 lines (98 loc) · 3.61 KB
/
compiler.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
# Danial Erfanian - 97110155
# Mohamad Namdar - 97106302
import sys
from copy import copy
from anytree import RenderTree
from py_minus.code_gen import CodeGenerator
from py_minus.parser import Parser
from py_minus.scanner import Scanner
from py_minus.utils import KEYWORDS, TokenType
def write_tokens(recognized_tokens):
with open("tokens.txt", "w+") as f:
last_line = -1
for lineno, token in recognized_tokens:
if last_line < lineno:
if last_line != -1:
f.write('\n')
f.write(f"{lineno + 1}. ")
last_line = lineno
f.write(f"({token[0].name}, {token[1]}) ")
def write_symbol_table(symbols):
with open("symbol_table.txt", "w+") as f:
for idx, symbol in enumerate(symbols):
f.write(f"{idx + 1}. {symbol}\n")
def write_lexical_errors(lexical_errors):
with open("lexical_errors.txt", "w+") as f:
first_line = True
last_line = -1
if len(lexical_errors) == 0:
f.write('There is no lexical error.')
for lineno, error in lexical_errors:
if last_line < lineno:
if not first_line:
f.write('\n')
first_line = False
f.write(f"{lineno + 1}.")
last_line = lineno
f.write(f" {error}")
def write_syntax_errors(errors):
with open("syntax_errors.txt", "w+") as f:
if len(errors) == 0:
f.write("There is no syntax error.")
for error in errors:
f.write(f"#{error[1].lineno + 1} : ")
if error[0] == 3:
f.write(f"syntax error, missing {error[-1]}\n")
elif error[0] == 1:
if error[1].token_type in [TokenType.SYMBOL, TokenType.KEYWORD]:
arg = error[1].lexim
else:
arg = error[1].token_type.name
f.write(f"syntax error, illegal {arg}\n")
elif error[0] == 2:
f.write(f"syntax error, missing {error[-1].name}\n")
# TODO: on line chi shod?
elif error[0] == 4:
f.write(f"syntax error, Unexpected EOF")
else:
assert False
def write_parse_tree(tree):
with open("parse_tree.txt", "w+") as f:
for pre, fill, node in RenderTree(tree):
f.write("%s%s\n" % (pre, node.name))
def write_ouptut(gen):
with open("output.txt", "w+") as f:
gen.print(f)
def write_semantic_errors(gen):
with open("semantic_errors.txt", "w+") as f:
gen.print_semantic_errors(f)
if __name__ == '__main__':
recognized_tokens = []
symbols = copy(KEYWORDS)
prefix = ""
# prefix = "PA3-Testcases/T9/"
# print("expected.txt".center(40, "-"))
# with open(prefix + "expected.txt", "r") as f:
# print(''.join(f.readlines()))
# print("out result".center(40, "-"))
#
# with open(prefix + "input.txt", "r") as f:
# assert "recursive" not in f.readline()
scanner = Scanner(prefix + "input.txt", symbols)
code_gen = CodeGenerator()
parser = Parser(scanner, code_gen)
parser.parse()
# parser.print_tree()
# print(parser.syntaxError)
# code_gen.print()
code_gen.print_semantic_errors(sys.stdout)
write_semantic_errors(code_gen)
write_ouptut(code_gen)
# write_parse_tree(parser.parseTree)
write_syntax_errors(parser.syntaxError)
write_symbol_table(symbols)
write_tokens(recognized_tokens)
write_lexical_errors(scanner.lexical_errors)
# op = Optimizer(code_gen)
# op.optimize_code()
# write_optimized_code(op)