equal
deleted
inserted
replaced
170 case (ONE, _) => (r2s, F_SEQ_Empty1(f1s, f2s)) |
170 case (ONE, _) => (r2s, F_SEQ_Empty1(f1s, f2s)) |
171 case (_, ONE) => (r1s, F_SEQ_Empty2(f1s, f2s)) |
171 case (_, ONE) => (r1s, F_SEQ_Empty2(f1s, f2s)) |
172 case _ => (SEQ(r1s,r2s), F_SEQ(f1s, f2s)) |
172 case _ => (SEQ(r1s,r2s), F_SEQ(f1s, f2s)) |
173 } |
173 } |
174 } |
174 } |
175 case RECD(x, r1) => { |
|
176 val (r1s, f1s) = simp(r1) |
|
177 (RECD(x, r1s), F_RECD(f1s)) |
|
178 } |
|
179 case r => (r, F_ID) |
175 case r => (r, F_ID) |
180 } |
176 } |
181 |
177 |
182 // lexing functions including simplification |
178 // lexing functions including simplification |
183 def lex_simp(r: Rexp, s: List[Char]) : Val = s match { |
179 def lex_simp(r: Rexp, s: List[Char]) : Val = s match { |
255 case ("n", s) => T_NUM(s.toInt) |
251 case ("n", s) => T_NUM(s.toInt) |
256 case ("k", s) => T_KWD(s) |
252 case ("k", s) => T_KWD(s) |
257 case ("str", s) => T_STR(s) |
253 case ("str", s) => T_STR(s) |
258 } |
254 } |
259 |
255 |
260 // filters out all un-interesting token |
256 // filters out all un-interesting tokens |
261 def tokenise(s: String) : List[Token] = |
257 def tokenise(s: String) : List[Token] = |
262 lexing_simp(WHILE_REGS, s).collect(token) |
258 lexing_simp(WHILE_REGS, s).collect(token) |
263 |
259 |
264 |
260 |
265 def serialise[T](fname: String, data: T) = { |
261 def serialise[T](fname: String, data: T) = { |