|
1 import scala.language.implicitConversions |
|
2 import scala.language.reflectiveCalls |
|
3 import scala.util._ |
|
4 import scala.annotation.tailrec |
|
5 import scala.sys.process._ |
|
6 |
|
7 abstract class Rexp |
|
8 case object NULL extends Rexp |
|
9 case object EMPTY extends Rexp |
|
10 case class CHAR(c: Char) extends Rexp |
|
11 case class ALT(r1: Rexp, r2: Rexp) extends Rexp |
|
12 case class RANGE(cs: List[Char]) extends Rexp |
|
13 case class SEQ(r1: Rexp, r2: Rexp) extends Rexp |
|
14 case class PLUS(r: Rexp) extends Rexp |
|
15 case class STAR(r: Rexp) extends Rexp |
|
16 case class NTIMES(r: Rexp, n: Int) extends Rexp |
|
17 case class NUPTOM(r: Rexp, n: Int, m: Int) extends Rexp |
|
18 |
|
19 object RANGE { |
|
20 def apply(s: String) : RANGE = RANGE(s.toList) |
|
21 } |
|
22 def NMTIMES(r: Rexp, n: Int, m: Int) = { |
|
23 if(m < n) throw new IllegalArgumentException("the number m cannot be smaller than n.") |
|
24 else NUPTOM(r, n, m - n) |
|
25 } |
|
26 |
|
27 case class NOT(r: Rexp) extends Rexp |
|
28 case class OPT(r: Rexp) extends Rexp |
|
29 |
|
30 // some convenience for typing in regular expressions |
|
31 def charlist2rexp(s : List[Char]) : Rexp = s match { |
|
32 case Nil => EMPTY |
|
33 case c::Nil => CHAR(c) |
|
34 case c::s => SEQ(CHAR(c), charlist2rexp(s)) |
|
35 } |
|
36 implicit def string2rexp(s : String) : Rexp = charlist2rexp(s.toList) |
|
37 |
|
38 implicit def RexpOps (r: Rexp) = new { |
|
39 def | (s: Rexp) = ALT(r, s) |
|
40 def % = STAR(r) |
|
41 def ~ (s: Rexp) = SEQ(r, s) |
|
42 } |
|
43 |
|
44 implicit def stringOps (s: String) = new { |
|
45 def | (r: Rexp) = ALT(s, r) |
|
46 def | (r: String) = ALT(s, r) |
|
47 def % = STAR(s) |
|
48 def ~ (r: Rexp) = SEQ(s, r) |
|
49 def ~ (r: String) = SEQ(s, r) |
|
50 } |
|
51 |
|
52 |
|
53 // nullable function: tests whether the regular |
|
54 // expression can recognise the empty string |
|
55 def nullable (r: Rexp) : Boolean = r match { |
|
56 case NULL => false |
|
57 case EMPTY => true |
|
58 case CHAR(_) => false |
|
59 case ALT(r1, r2) => nullable(r1) || nullable(r2) |
|
60 case SEQ(r1, r2) => nullable(r1) && nullable(r2) |
|
61 case STAR(_) => true |
|
62 case PLUS(r) => nullable(r) |
|
63 case NTIMES(r, i) => if (i == 0) true else nullable(r) |
|
64 case NUPTOM(r, i, j) => if (i == 0) true else nullable(r) |
|
65 case RANGE(_) => false |
|
66 case NOT(r) => !(nullable(r)) |
|
67 case OPT(_) => true |
|
68 } |
|
69 |
|
70 // derivative of a regular expression w.r.t. a character |
|
71 def der (c: Char, r: Rexp) : Rexp = r match { |
|
72 case NULL => NULL |
|
73 case EMPTY => NULL |
|
74 case CHAR(d) => if (c == d) EMPTY else NULL |
|
75 case ALT(r1, r2) => ALT(der(c, r1), der(c, r2)) |
|
76 case SEQ(r1, r2) => |
|
77 if (nullable(r1)) ALT(SEQ(der(c, r1), r2), der(c, r2)) |
|
78 else SEQ(der(c, r1), r2) |
|
79 case STAR(r) => SEQ(der(c, r), STAR(r)) |
|
80 case PLUS(r) => SEQ(der(c, r), STAR(r)) |
|
81 case NTIMES(r, i) => |
|
82 if (i == 0) NULL else der(c, SEQ(r, NTIMES(r, i - 1))) |
|
83 case NUPTOM(r, i, j) => |
|
84 if (i == 0 && j == 0) NULL else |
|
85 if (i == 0) ALT(der(c, NTIMES(r, j)), der(c, NUPTOM(r, 0, j - 1))) |
|
86 else der(c, SEQ(r, NUPTOM(r, i - 1, j))) |
|
87 case RANGE(cs) => if (cs contains c) EMPTY else NULL |
|
88 case NOT(r) => NOT(der (c, r)) |
|
89 case OPT(r) => der(c, r) |
|
90 } |
|
91 |
|
92 def zeroable (r: Rexp) : Boolean = r match { |
|
93 case NULL => true |
|
94 case EMPTY => false |
|
95 case CHAR(_) => false |
|
96 case ALT(r1, r2) => zeroable(r1) && zeroable(r2) |
|
97 case SEQ(r1, r2) => zeroable(r1) || zeroable(r2) |
|
98 case STAR(_) => false |
|
99 case PLUS(r) => zeroable(r) |
|
100 case NTIMES(r, i) => if (i == 0) false else zeroable(r) |
|
101 case NUPTOM(r, i, j) => if (i == 0) false else zeroable(r) |
|
102 case RANGE(_) => false |
|
103 case NOT(r) => !(zeroable(r)) // bug: incorrect definition for NOT |
|
104 case OPT(_) => false |
|
105 } |
|
106 |
|
107 // derivative w.r.t. a string (iterates der) |
|
108 def ders (s: List[Char], r: Rexp) : Rexp = s match { |
|
109 case Nil => r |
|
110 case c::s => ders(s, der(c, r)) |
|
111 } |
|
112 |
|
113 |
|
114 // regular expressions for the While language |
|
115 val SYM = RANGE("ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz_") |
|
116 val DIGIT = RANGE("0123456789") |
|
117 val ID = SYM ~ (SYM | DIGIT).% |
|
118 val NUM = PLUS(DIGIT) |
|
119 val KEYWORD : Rexp = "if" | "then" | "else" | "read" | "write" | "def" |
|
120 val SEMI: Rexp = ";" |
|
121 val COMMA: Rexp = "," |
|
122 val OP: Rexp = ":=" | "==" | "-" | "+" | "*" | "!=" | "<" | ">" | "%" | "=" | "/" |
|
123 val WHITESPACE = PLUS(" " | "\n" | "\t") |
|
124 val RPAREN: Rexp = ")" |
|
125 val LPAREN: Rexp = "(" |
|
126 val BEGIN: Rexp = "{" |
|
127 val END: Rexp = "}" |
|
128 val ALL = SYM | DIGIT | OP | " " | ":" | ";" | "\"" | "=" | "," | "(" | ")" |
|
129 val ALL2 = ALL | "\n" |
|
130 val COMMENT2 = ("/*" ~ NOT(ALL.% ~ "*/" ~ ALL.%) ~ "*/") |
|
131 val COMMENT = ("/*" ~ ALL2.% ~ "*/") | ("//" ~ ALL.% ~ "\n") |
|
132 val STRING = "\"" ~ ALL.% ~ "\"" |
|
133 |
|
134 |
|
135 // token for While languag |
|
136 abstract class Token |
|
137 case object T_WHITESPACE extends Token |
|
138 case object T_SEMI extends Token |
|
139 case object T_COMMA extends Token |
|
140 case object T_LPAREN extends Token |
|
141 case object T_RPAREN extends Token |
|
142 case object T_BEGIN extends Token |
|
143 case object T_END extends Token |
|
144 case object T_COMMENT extends Token |
|
145 case class T_ID(s: String) extends Token |
|
146 case class T_OP(s: String) extends Token |
|
147 case class T_NUM(s: String) extends Token |
|
148 case class T_KWD(s: String) extends Token |
|
149 case class T_STRING(s: String) extends Token |
|
150 case class T_ERR(s: String) extends Token // special error token |
|
151 |
|
152 |
|
153 type TokenFun = String => Token |
|
154 type LexRules = List[(Rexp, TokenFun)] |
|
155 val While_lexing_rules: LexRules = |
|
156 List((KEYWORD, (s) => T_KWD(s)), |
|
157 (ID, (s) => T_ID(s)), |
|
158 (COMMENT, (s) => T_COMMENT), |
|
159 (OP, (s) => T_OP(s)), |
|
160 (NUM, (s) => T_NUM(s)), |
|
161 (SEMI, (s) => T_SEMI), |
|
162 (COMMA, (s) => T_COMMA), |
|
163 (LPAREN, (s) => T_LPAREN), |
|
164 (RPAREN, (s) => T_RPAREN), |
|
165 (BEGIN, (s) => T_BEGIN), |
|
166 (END, (s) => T_END), |
|
167 (STRING, (s) => T_STRING(s.drop(1).dropRight(1))), |
|
168 (WHITESPACE, (s) => T_WHITESPACE)) |
|
169 |
|
170 |
|
171 // calculates derivatives until all of them are zeroable |
|
172 @tailrec |
|
173 def munch(s: List[Char], |
|
174 pos: Int, |
|
175 rs: LexRules, |
|
176 last: Option[(Int, TokenFun)]): Option[(Int, TokenFun)] = { |
|
177 rs match { |
|
178 case Nil => last |
|
179 case rs if (s.length <= pos) => last |
|
180 case rs => { |
|
181 val ders = rs.map({case (r, tf) => (der(s(pos), r), tf)}) |
|
182 val rs_nzero = ders.filterNot({case (r, _) => zeroable(r)}) |
|
183 val rs_nulls = ders.filter({case (r, _) => nullable(r)}) |
|
184 val new_last = if (rs_nulls != Nil) Some((pos, rs_nulls.head._2)) else last |
|
185 munch(s, 1 + pos, rs_nzero, new_last) |
|
186 } |
|
187 }} |
|
188 |
|
189 // iterates the munching function and returns a Token list |
|
190 def tokenize(s: String, rs: LexRules) : List[Token] = munch(s.toList, 0, rs, None) match { |
|
191 case None if (s == "") => Nil |
|
192 case None => List(T_ERR(s"Lexing error: $s")) |
|
193 case Some((n, tf)) => { |
|
194 val (head, tail) = s.splitAt(n + 1) |
|
195 tf(head)::tokenize(tail, rs) |
|
196 } |
|
197 } |
|
198 |
|
199 def tokenizer(s:String) : List[Token] = |
|
200 tokenize(s, While_lexing_rules).filter { |
|
201 case T_ERR(s) => { println(s); sys.exit(-1) } |
|
202 case T_WHITESPACE => false |
|
203 case T_COMMENT => false |
|
204 case _ => true |
|
205 } |
|
206 |
|
207 def fromFile(name: String) : String = |
|
208 io.Source.fromFile(name).mkString |
|
209 |
|
210 // tokenizer tests |
|
211 //println(tokenizer(fromFile("loops.while")).mkString("\n")) |
|
212 //println(tokenizer(fromFile("fib.while")).mkString("\n")) |
|
213 //println(tokenizer(fromFile("collatz.while")).mkString("\n")) |
|
214 //println(tokenizer(fromFile("defs.rec")).mkString("\n")) |
|
215 |
|
216 // Parser - Abstract syntax trees |
|
217 abstract class Exp |
|
218 abstract class BExp |
|
219 abstract class Decl |
|
220 |
|
221 case class Def(name: String, args: List[String], body: Exp) extends Decl |
|
222 case class Main(e: Exp) extends Decl |
|
223 |
|
224 case class Call(name: String, args: List[Exp]) extends Exp |
|
225 case class If(a: BExp, e1: Exp, e2: Exp) extends Exp |
|
226 case class Read(s: String) extends Exp |
|
227 case class Write(s: String) extends Exp |
|
228 case class WriteS(s: String) extends Exp |
|
229 case class Var(s: String) extends Exp |
|
230 case class Num(i: Int) extends Exp |
|
231 case class Aop(o: String, a1: Exp, a2: Exp) extends Exp |
|
232 |
|
233 case object True extends BExp |
|
234 case object False extends BExp |
|
235 case class Bop(o: String, a1: Exp, a2: Exp) extends BExp |
|
236 |
|
237 // Parser combinators |
|
238 abstract class Parser[I <% Seq[_], T] { |
|
239 def parse(ts: I): Set[(T, I)] |
|
240 |
|
241 def parse_all(ts: I) : Set[T] = |
|
242 for ((head, tail) <- parse(ts); if (tail.isEmpty)) yield head |
|
243 |
|
244 def parse_single(ts: I) : T = parse_all(ts).toList match { |
|
245 case List(t) => t |
|
246 case _ => { println ("Parse Error") ; sys.exit(-1) } |
|
247 } |
|
248 } |
|
249 |
|
250 class SeqParser[I <% Seq[_], T, S](p: => Parser[I, T], q: => Parser[I, S]) extends Parser[I, (T, S)] { |
|
251 def parse(sb: I) = |
|
252 for ((head1, tail1) <- p.parse(sb); |
|
253 (head2, tail2) <- q.parse(tail1)) yield ((head1, head2), tail2) |
|
254 } |
|
255 |
|
256 class AltParser[I <% Seq[_], T](p: => Parser[I, T], q: => Parser[I, T]) extends Parser[I, T] { |
|
257 def parse(sb: I) = p.parse(sb) ++ q.parse(sb) |
|
258 } |
|
259 |
|
260 class FunParser[I <% Seq[_], T, S](p: => Parser[I, T], f: T => S) extends Parser[I, S] { |
|
261 def parse(sb: I) = |
|
262 for ((head, tail) <- p.parse(sb)) yield (f(head), tail) |
|
263 } |
|
264 |
|
265 case class TokParser(tok: Token) extends Parser[List[Token], Token] { |
|
266 def parse(ts: List[Token]) = ts match { |
|
267 case t::ts if (t == tok) => Set((t, ts)) |
|
268 case _ => Set () |
|
269 } |
|
270 } |
|
271 |
|
272 implicit def token2tparser(t: Token) = TokParser(t) |
|
273 |
|
274 case object NumParser extends Parser[List[Token], Int] { |
|
275 def parse(ts: List[Token]) = ts match { |
|
276 case T_NUM(s)::ts => Set((s.toInt, ts)) |
|
277 case _ => Set () |
|
278 } |
|
279 } |
|
280 |
|
281 case object IdParser extends Parser[List[Token], String] { |
|
282 def parse(ts: List[Token]) = ts match { |
|
283 case T_ID(s)::ts => Set((s, ts)) |
|
284 case _ => Set () |
|
285 } |
|
286 } |
|
287 |
|
288 case object StringParser extends Parser[List[Token], String] { |
|
289 def parse(ts: List[Token]) = ts match { |
|
290 case T_STRING(s)::ts => Set((s, ts)) |
|
291 case _ => Set () |
|
292 } |
|
293 } |
|
294 |
|
295 implicit def ParserOps[I<% Seq[_], T](p: Parser[I, T]) = new { |
|
296 def || (q : => Parser[I, T]) = new AltParser[I, T](p, q) |
|
297 def ==>[S] (f: => T => S) = new FunParser[I, T, S](p, f) |
|
298 def ~[S] (q : => Parser[I, S]) = new SeqParser[I, T, S](p, q) |
|
299 } |
|
300 implicit def TokOps(t: Token) = new { |
|
301 def || (q : => Parser[List[Token], Token]) = new AltParser[List[Token], Token](t, q) |
|
302 def ==>[S] (f: => Token => S) = new FunParser[List[Token], Token, S](t, f) |
|
303 def ~[S](q : => Parser[List[Token], S]) = new SeqParser[List[Token], Token, S](t, q) |
|
304 } |
|
305 |
|
306 def ListParser[I <% Seq[_], T, S](p: => Parser[I, T], q: => Parser[I, S]): Parser[I, List[T]] = { |
|
307 (p ~ q ~ ListParser(p, q)) ==> { case ((x, y), z) => x :: z : List[T] } || |
|
308 (p ==> ((s) => List(s))) |
|
309 } |
|
310 |
|
311 |
|
312 // arithmetic expressions |
|
313 lazy val Exp: Parser[List[Token], Exp] = |
|
314 (IdParser ~ T_LPAREN ~ ListParser(Exp, T_COMMA) ~ T_RPAREN) ==> |
|
315 { case (((x, y), z), w) => Call(x, z): Exp } || |
|
316 (T_KWD("if") ~ BExp ~ T_KWD("then") ~ Exp ~ T_KWD("else") ~ Exp) ==> |
|
317 { case (((((x, y), z), u), v), w) => If(y, u, w): Exp } || |
|
318 (T ~ T_OP("+") ~ Exp) ==> { case ((x, y), z) => Aop("+", x, z): Exp } || |
|
319 (T ~ T_OP("-") ~ Exp) ==> { case ((x, y), z) => Aop("-", x, z): Exp } || T |
|
320 lazy val T: Parser[List[Token], Exp] = |
|
321 (F ~ T_OP("*") ~ T) ==> { case ((x, y), z) => Aop("*", x, z): Exp } || |
|
322 (F ~ T_OP("/") ~ T) ==> { case ((x, y), z) => Aop("/", x, z): Exp } || |
|
323 (F ~ T_OP("%") ~ T) ==> { case ((x, y), z) => Aop("%", x, z): Exp } || F |
|
324 lazy val F: Parser[List[Token], Exp] = |
|
325 (T_LPAREN ~ Exp ~ T_RPAREN) ==> { case ((x, y), z) => y: Exp } || |
|
326 IdParser ==> { case x => Var(x): Exp } || |
|
327 NumParser ==> { case x => Num(x): Exp } |
|
328 |
|
329 // boolean expressions |
|
330 lazy val BExp: Parser[List[Token], BExp] = |
|
331 (Exp ~ T_OP("==") ~ Exp) ==> { case ((x, y), z) => Bop("==", x, z): BExp } || |
|
332 (Exp ~ T_OP("!=") ~ Exp) ==> { case ((x, y), z) => Bop("!=", x, z): BExp } || |
|
333 (Exp ~ T_OP("<") ~ Exp) ==> { case ((x, y), z) => Bop("<", x, z): BExp } || |
|
334 (Exp ~ T_OP(">") ~ Exp) ==> { case ((x, y), z) => Bop("<", z, x): BExp } || |
|
335 (T_KWD("true") ==> ((_) => True)) || |
|
336 (T_KWD("false") ==> ((_) => False: BExp)) |
|
337 |
|
338 lazy val Defn: Parser[List[Token], Decl] = |
|
339 (T_KWD("def") ~ IdParser ~ T_LPAREN ~ ListParser(IdParser, T_COMMA) ~ T_RPAREN ~ T_OP("=") ~ Exp) ==> |
|
340 { case ((((((x, y), z), w), u), v), r) => Def(y, w, r): Decl } |
|
341 |
|
342 lazy val Prog: Parser[List[Token], List[Decl]] = |
|
343 (Defn ~ T_SEMI ~ Prog) ==> { case ((x, y), z) => x :: z : List[Decl] } || |
|
344 (Exp ==> ((s) => List(Main(s)) : List[Decl])) |
|
345 |
|
346 // parser examples |
|
347 |
|
348 val p11 = """def zero(x) = 0""" |
|
349 val p11_toks = tokenizer(p11) |
|
350 val p11_ast = Defn.parse_all(p11_toks) |
|
351 //println(p11_toks) |
|
352 //println(p11_ast) |
|
353 |
|
354 |
|
355 val p12_toks = tokenizer(fromFile("defs.rec")) |
|
356 val p12_ast = Prog.parse_all(p12_toks) |
|
357 //println(p12_toks.mkString(",")) |
|
358 //println(p12_ast) |
|
359 |
|
360 |
|
361 |
|
362 // compiler - built-in functions |
|
363 // copied from http://www.ceng.metu.edu.tr/courses/ceng444/link/jvm-cpm.html |
|
364 // |
|
365 |
|
366 val beginning = """ |
|
367 .class public XXX.XXX |
|
368 .super java/lang/Object |
|
369 |
|
370 .method public <init>()V |
|
371 aload_0 |
|
372 invokenonvirtual java/lang/Object/<init>()V |
|
373 return |
|
374 .end method |
|
375 |
|
376 .method public static write(I)V |
|
377 .limit locals 5 |
|
378 .limit stack 5 |
|
379 iload 0 |
|
380 getstatic java/lang/System/out Ljava/io/PrintStream; |
|
381 swap |
|
382 invokevirtual java/io/PrintStream/println(I)V |
|
383 return |
|
384 .end method |
|
385 |
|
386 .method public static writes(Ljava/lang/String;)V |
|
387 .limit stack 2 |
|
388 .limit locals 2 |
|
389 getstatic java/lang/System/out Ljava/io/PrintStream; |
|
390 aload 0 |
|
391 invokevirtual java/io/PrintStream/println(Ljava/lang/String;)V |
|
392 return |
|
393 .end method |
|
394 |
|
395 .method public static read()I |
|
396 .limit locals 10 |
|
397 .limit stack 10 |
|
398 |
|
399 ldc 0 |
|
400 istore 1 ; this will hold our final integer |
|
401 Label1: |
|
402 getstatic java/lang/System/in Ljava/io/InputStream; |
|
403 invokevirtual java/io/InputStream/read()I |
|
404 istore 2 |
|
405 iload 2 |
|
406 ldc 10 ; the newline delimiter |
|
407 isub |
|
408 ifeq Label2 |
|
409 iload 2 |
|
410 ldc 32 ; the space delimiter |
|
411 isub |
|
412 ifeq Label2 |
|
413 |
|
414 iload 2 |
|
415 ldc 48 ; we have our digit in ASCII, have to subtract it from 48 |
|
416 isub |
|
417 ldc 10 |
|
418 iload 1 |
|
419 imul |
|
420 iadd |
|
421 istore 1 |
|
422 goto Label1 |
|
423 Label2: |
|
424 ;when we come here we have our integer computed in Local Variable 1 |
|
425 iload 1 |
|
426 ireturn |
|
427 .end method |
|
428 |
|
429 .method public static main([Ljava/lang/String;)V |
|
430 .limit locals 200 |
|
431 .limit stack 200 |
|
432 |
|
433 """ |
|
434 |
|
435 val ending = """ |
|
436 |
|
437 return |
|
438 |
|
439 .end method |
|
440 """ |
|
441 |
|
442 // for generating new labels |
|
443 var counter = -1 |
|
444 |
|
445 def Fresh(x: String) = { |
|
446 counter += 1 |
|
447 x ++ "_" ++ counter.toString() |
|
448 } |
|
449 |
|
450 type Mem = Map[String, String] |
|
451 type Instrs = List[String] |
|
452 |
|
453 def compile_exp(a: Exp, env : Mem) : Instrs = a match { |
|
454 case Num(i) => List("ldc " + i.toString + "\n") |
|
455 case Var(s) => List("iload " + env(s) + "\n") |
|
456 case Aop("+", a1, a2) => compile_exp(a1, env) ++ compile_exp(a2, env) ++ List("iadd\n") |
|
457 case Aop("-", a1, a2) => compile_exp(a1, env) ++ compile_exp(a2, env) ++ List("isub\n") |
|
458 case Aop("*", a1, a2) => compile_exp(a1, env) ++ compile_exp(a2, env) ++ List("imul\n") |
|
459 case If(b, a1, a2) => { |
|
460 val if_else = Fresh("If_else") |
|
461 val if_end = Fresh("If_end") |
|
462 compile_bexp(b, env, if_else) ++ |
|
463 compile_exp(a1, env) ++ |
|
464 List("goto " + if_end + "\n") ++ |
|
465 List("\n" + if_else + ":\n\n") ++ |
|
466 compile_exp(a2, env) ++ |
|
467 List("\n" + if_end + ":\n\n") |
|
468 } |
|
469 case Call(n, args) => |
|
470 args.flatMap(a => compile_exp(a, env)) ++ |
|
471 List ("invokestatic XXX/XXX/" + n + "(I)I\n") |
|
472 |
|
473 } |
|
474 |
|
475 def compile_bexp(b: BExp, env : Mem, jmp: String) : Instrs = b match { |
|
476 case True => Nil |
|
477 case False => List("goto " + jmp + "\n") |
|
478 case Bop("=", a1, a2) => |
|
479 compile_exp(a1, env) ++ compile_exp(a2, env) ++ List("if_icmpne " + jmp + "\n") |
|
480 case Bop("!=", a1, a2) => |
|
481 compile_exp(a1, env) ++ compile_exp(a2, env) ++ List("if_icmpeq " + jmp + "\n") |
|
482 case Bop("<", a1, a2) => |
|
483 compile_exp(a1, env) ++ compile_exp(a2, env) ++ List("if_icmpge " + jmp + "\n") |
|
484 } |
|
485 |
|
486 |
|
487 |
|
488 def compile_decl(d: Decl) : Instrs = d match { |
|
489 case Def(name, args, a) => Nil |
|
490 case Main(a) => compile_exp(a, Map()) |
|
491 } |
|
492 |
|
493 def compile(class_name: String, input: String) : String = { |
|
494 val tks = tokenizer(input) |
|
495 val ast = Prog.parse_single(tks) |
|
496 val instructions = ast.flatMap(compile_decl).mkString |
|
497 (instructions).replaceAllLiterally("XXX", class_name) |
|
498 } |
|
499 |
|
500 |
|
501 def compile_file(file_name: String) = { |
|
502 val class_name = file_name.split('.')(0) |
|
503 val output = compile(class_name, fromFile(file_name)) |
|
504 val fw = new java.io.FileWriter(class_name + ".j") |
|
505 fw.write(output) |
|
506 fw.close() |
|
507 } |
|
508 |
|
509 def time_needed[T](i: Int, code: => T) = { |
|
510 val start = System.nanoTime() |
|
511 for (j <- 1 to i) code |
|
512 val end = System.nanoTime() |
|
513 (end - start)/(i * 1.0e9) |
|
514 } |
|
515 |
|
516 |
|
517 def compile_run(file_name: String) : Unit = { |
|
518 val class_name = file_name.split('.')(0) |
|
519 compile_file(file_name) |
|
520 println(fromFile("defs.j")) |
|
521 //val test = ("java -jar jvm/jasmin-2.4/jasmin.jar " + class_name + ".j").!! |
|
522 //("java " + class_name + "/" + class_name).! |
|
523 } |
|
524 |
|
525 |
|
526 //examples |
|
527 //println(compile("test", p9)) |
|
528 //compile_run("loops.while") |
|
529 compile_run("defs.rec") |
|
530 //compile_run("test.while") |