|
1 // A parser and evaluator for the WHILE language |
|
2 // |
|
3 import matcher._ |
|
4 import parser._ |
|
5 |
|
6 |
|
7 // some regular expressions |
|
8 val SYM = RANGE("ABCDEFGHIJKLMNOPQRSTUVXYZabcdefghijklmnopqrstuvwxyz_") |
|
9 val DIGIT = RANGE("0123456789") |
|
10 val ID = SEQ(SYM, STAR(ALT(SYM, DIGIT))) |
|
11 val NUM = PLUS(DIGIT) |
|
12 val KEYWORD = ALTS("skip", "while", "do", "if", "then", "else", "true", "false", "write") |
|
13 val SEMI: Rexp = ";" |
|
14 val OP: Rexp = ALTS(":=", "=", "-", "+", "*", "!=", "<", ">") |
|
15 val WHITESPACE = PLUS(RANGE(" \n")) |
|
16 val RPAREN: Rexp = ")" |
|
17 val LPAREN: Rexp = "(" |
|
18 val BEGIN: Rexp = "{" |
|
19 val END: Rexp = "}" |
|
20 val COMMENT = SEQS("/*", NOT(SEQS(STAR(ALLC), "*/", STAR(ALLC))), "*/") |
|
21 |
|
22 // tokens for classifying the strings that have been recognised |
|
23 abstract class Token |
|
24 case object T_WHITESPACE extends Token |
|
25 case object T_COMMENT extends Token |
|
26 case object T_SEMI extends Token |
|
27 case object T_LPAREN extends Token |
|
28 case object T_RPAREN extends Token |
|
29 case object T_BEGIN extends Token |
|
30 case object T_END extends Token |
|
31 case class T_ID(s: String) extends Token |
|
32 case class T_OP(s: String) extends Token |
|
33 case class T_NUM(s: String) extends Token |
|
34 case class T_KWD(s: String) extends Token |
|
35 |
|
36 val lexing_rules: List[(Rexp, List[Char] => Token)] = |
|
37 List((KEYWORD, (s) => T_KWD(s.mkString)), |
|
38 (ID, (s) => T_ID(s.mkString)), |
|
39 (OP, (s) => T_OP(s.mkString)), |
|
40 (NUM, (s) => T_NUM(s.mkString)), |
|
41 (SEMI, (s) => T_SEMI), |
|
42 (LPAREN, (s) => T_LPAREN), |
|
43 (RPAREN, (s) => T_RPAREN), |
|
44 (BEGIN, (s) => T_BEGIN), |
|
45 (END, (s) => T_END), |
|
46 (WHITESPACE, (s) => T_WHITESPACE), |
|
47 (COMMENT, (s) => T_COMMENT)) |
|
48 |
|
49 // the tokenizer |
|
50 val Tok = Tokenizer(lexing_rules, List(T_WHITESPACE, T_COMMENT)) |
|
51 |
|
52 // the abstract syntax trees |
|
53 abstract class Stmt |
|
54 abstract class AExp |
|
55 abstract class BExp |
|
56 type Block = List[Stmt] |
|
57 case object Skip extends Stmt |
|
58 case class If(a: BExp, bl1: Block, bl2: Block) extends Stmt |
|
59 case class While(b: BExp, bl: Block) extends Stmt |
|
60 case class Assign(s: String, a: AExp) extends Stmt |
|
61 case class Write(s: String) extends Stmt |
|
62 |
|
63 case class Var(s: String) extends AExp |
|
64 case class Num(i: Int) extends AExp |
|
65 case class Aop(o: String, a1: AExp, a2: AExp) extends AExp |
|
66 |
|
67 case object True extends BExp |
|
68 case object False extends BExp |
|
69 case class Bop(o: String, a1: AExp, a2: AExp) extends BExp |
|
70 |
|
71 // atomic parsers |
|
72 case class TokParser(tok: Token) extends Parser[List[Token], Token] { |
|
73 def parse(ts: List[Token]) = ts match { |
|
74 case t::ts if (t == tok) => Set((t, ts)) |
|
75 case _ => Set () |
|
76 } |
|
77 } |
|
78 implicit def token2tparser(t: Token) = TokParser(t) |
|
79 |
|
80 case object NumParser extends Parser[List[Token], Int] { |
|
81 def parse(ts: List[Token]) = ts match { |
|
82 case T_NUM(s)::ts => Set((s.toInt, ts)) |
|
83 case _ => Set () |
|
84 } |
|
85 } |
|
86 |
|
87 case object IdParser extends Parser[List[Token], String] { |
|
88 def parse(ts: List[Token]) = ts match { |
|
89 case T_ID(s)::ts => Set((s, ts)) |
|
90 case _ => Set () |
|
91 } |
|
92 } |
|
93 |
|
94 |
|
95 // arithmetic expressions |
|
96 lazy val AExp: Parser[List[Token], AExp] = |
|
97 (T ~ T_OP("+") ~ AExp) ==> { case ((x, y), z) => Aop("+", x, z): AExp } || |
|
98 (T ~ T_OP("-") ~ AExp) ==> { case ((x, y), z) => Aop("-", x, z): AExp } || T |
|
99 lazy val T: Parser[List[Token], AExp] = |
|
100 (F ~ T_OP("*") ~ T) ==> { case ((x, y), z) => Aop("*", x, z): AExp } || F |
|
101 lazy val F: Parser[List[Token], AExp] = |
|
102 (T_LPAREN ~> AExp <~ T_RPAREN) || |
|
103 IdParser ==> Var || |
|
104 NumParser ==> Num |
|
105 |
|
106 // boolean expressions |
|
107 lazy val BExp: Parser[List[Token], BExp] = |
|
108 (T_KWD("true") ==> ((_) => True: BExp)) || |
|
109 (T_KWD("false") ==> ((_) => False: BExp)) || |
|
110 (T_LPAREN ~> BExp <~ T_RPAREN) || |
|
111 (AExp ~ T_OP("=") ~ AExp) ==> { case ((x, y), z) => Bop("=", x, z): BExp } || |
|
112 (AExp ~ T_OP("!=") ~ AExp) ==> { case ((x, y), z) => Bop("!=", x, z): BExp } || |
|
113 (AExp ~ T_OP("<") ~ AExp) ==> { case ((x, y), z) => Bop("<", x, z): BExp } || |
|
114 (AExp ~ T_OP(">") ~ AExp) ==> { case ((x, y), z) => Bop("<", z, x): BExp } |
|
115 |
|
116 lazy val Stmt: Parser[List[Token], Stmt] = |
|
117 (T_KWD("skip") ==> ((_) => Skip: Stmt)) || |
|
118 (IdParser ~ T_OP(":=") ~ AExp) ==> { case ((x, y), z) => Assign(x, z): Stmt } || |
|
119 (T_KWD("if") ~ BExp ~ T_KWD("then") ~ Block ~ T_KWD("else") ~ Block) ==> |
|
120 { case (((((x,y),z),u),v),w) => If(y, u, w): Stmt } || |
|
121 (T_KWD("while") ~ BExp ~ T_KWD("do") ~ Block) ==> { case (((x, y), z), w) => While(y, w) } || |
|
122 (T_KWD("write") ~ IdParser) ==> { case (x, y) => Write(y) } |
|
123 |
|
124 lazy val Stmts: Parser[List[Token], Block] = |
|
125 (Stmt ~ T_SEMI ~ Stmts) ==> { case ((x, y), z) => x :: z : Block } || |
|
126 (Stmt ==> ((s) => List(s) : Block)) |
|
127 |
|
128 lazy val Block: Parser[List[Token], Block] = |
|
129 (T_BEGIN ~> Stmts <~ T_END) || |
|
130 (Stmt ==> ((s) => List(s))) |
|
131 |
|
132 // interpreter |
|
133 type Env = Map[String, Int] |
|
134 |
|
135 def eval_bexp(b: BExp, env: Env) : Boolean = b match { |
|
136 case True => true |
|
137 case False => false |
|
138 case Bop("=", a1, a2) => eval_aexp(a1, env) == eval_aexp(a2, env) |
|
139 case Bop("!=", a1, a2) => !(eval_aexp(a1, env) == eval_aexp(a2, env)) |
|
140 case Bop("<", a1, a2) => eval_aexp(a1, env) < eval_aexp(a2, env) |
|
141 } |
|
142 |
|
143 def eval_aexp(a: AExp, env : Env) : Int = a match { |
|
144 case Num(i) => i |
|
145 case Var(s) => env(s) |
|
146 case Aop("+", a1, a2) => eval_aexp(a1, env) + eval_aexp(a2, env) |
|
147 case Aop("-", a1, a2) => eval_aexp(a1, env) - eval_aexp(a2, env) |
|
148 case Aop("*", a1, a2) => eval_aexp(a1, env) * eval_aexp(a2, env) |
|
149 } |
|
150 |
|
151 def eval_stmt(s: Stmt, env: Env) : Env = s match { |
|
152 case Skip => env |
|
153 case Assign(x, a) => env + (x -> eval_aexp(a, env)) |
|
154 case If(b, bl1, bl2) => if (eval_bexp(b, env)) eval_bl(bl1, env) else eval_bl(bl2, env) |
|
155 case While(b, bl) => |
|
156 if (eval_bexp(b, env)) eval_stmt(While(b, bl), eval_bl(bl, env)) |
|
157 else env |
|
158 case Write(x) => { println(env(x)); env } |
|
159 } |
|
160 |
|
161 def eval_bl(bl: Block, env: Env) : Env = bl match { |
|
162 case Nil => env |
|
163 case s::bl => eval_bl(bl, eval_stmt(s, env)) |
|
164 } |
|
165 |
|
166 def eval_prog(name: String) : Env = { |
|
167 val tks = Tok.fromFile(name) |
|
168 val ast = Stmts.parse_single(tks) |
|
169 eval_bl(ast, Map.empty) |
|
170 } |
|
171 |
|
172 |
|
173 //examples |
|
174 |
|
175 //eval_prog("loops.while") |
|
176 eval_prog("fib.while") |
|
177 |
|
178 |
|
179 def time_needed[T](i: Int, code: => T) = { |
|
180 val start = System.nanoTime() |
|
181 for (j <- 1 to i) code |
|
182 val end = System.nanoTime() |
|
183 (end - start)/(i * 1.0e9) |
|
184 } |
|
185 |
|
186 |
|
187 val test_prog = """ |
|
188 start := XXX; |
|
189 x := start; |
|
190 y := start; |
|
191 z := start; |
|
192 while 0 < x do { |
|
193 while 0 < y do { |
|
194 while 0 < z do { |
|
195 z := z - 1 |
|
196 }; |
|
197 z := start; |
|
198 y := y - 1 |
|
199 }; |
|
200 y := start; |
|
201 x := x - 1 |
|
202 } |
|
203 """ |
|
204 |
|
205 |
|
206 |
|
207 def eval_test(n: Int) : Unit = { |
|
208 val tks = Tok.fromString(test_prog.replaceAllLiterally("XXX", n.toString)) |
|
209 val ast = Stmts.parse_single(tks) |
|
210 println(n + " " + time_needed(2, eval_bl(ast, Map.empty))) |
|
211 } |
|
212 |
|
213 List(1, 200, 400, 600, 800, 1000, 1200, 1400, 1600).map(eval_test(_)) |
|
214 |
|
215 |
|
216 |
|
217 |
|
218 |
|
219 |
|
220 |