diff -r 47f86885d481 -r e85600529ca5 scala/i.scala --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/scala/i.scala Sat Jun 15 09:11:11 2013 -0400 @@ -0,0 +1,492 @@ + +// regular expressions including NOT +abstract class Rexp + +case object NULL extends Rexp +case object EMPTY extends Rexp +case object ALLC extends Rexp // recognises any character +case class CHAR(c: Char) extends Rexp +case class ALT(r1: Rexp, r2: Rexp) extends Rexp +case class SEQ(r1: Rexp, r2: Rexp) extends Rexp +case class STAR(r: Rexp) extends Rexp +case class NOT(r: Rexp) extends Rexp // negation of a regular expression + + +// nullable function: tests whether the regular +// expression can recognise the empty string +def nullable (r: Rexp) : Boolean = r match { + case NULL => false + case EMPTY => true + case ALLC => false + case CHAR(_) => false + case ALT(r1, r2) => nullable(r1) || nullable(r2) + case SEQ(r1, r2) => nullable(r1) && nullable(r2) + case STAR(_) => true + case NOT(r) => !(nullable(r)) +} + +// tests whether a regular expression +// cannot recognise more +def no_more (r: Rexp) : Boolean = r match { + case NULL => true + case EMPTY => false + case ALLC => false + case CHAR(_) => false + case ALT(r1, r2) => no_more(r1) && no_more(r2) + case SEQ(r1, r2) => if (nullable(r1)) (no_more(r1) && no_more(r2)) else no_more(r1) + case STAR(_) => false + case NOT(r) => !(no_more(r)) +} + + +// derivative of a regular expression w.r.t. a character +def der (c: Char, r: Rexp) : Rexp = r match { + case NULL => NULL + case EMPTY => NULL + case ALLC => EMPTY + case CHAR(d) => if (c == d) EMPTY else NULL + case ALT(r1, r2) => ALT(der(c, r1), der(c, r2)) + case SEQ(r1, r2) => + if (nullable(r1)) ALT(SEQ(der(c, r1), r2), der(c, r2)) + else SEQ(der(c, r1), r2) + case STAR(r) => SEQ(der(c, r), STAR(r)) + case NOT(r) => NOT(der (c, r)) +} + +// regular expression for specifying +// ranges of characters +def Range(s : List[Char]) : Rexp = s match { + case Nil => NULL + case c::Nil => CHAR(c) + case c::s => ALT(CHAR(c), Range(s)) +} +def RANGE(s: String) = Range(s.toList) + + +// one or more +def PLUS(r: Rexp) = SEQ(r, STAR(r)) + +// many alternatives +def Alts(rs: List[Rexp]) : Rexp = rs match { + case Nil => NULL + case r::Nil => r + case r::rs => ALT(r, Alts(rs)) +} +def ALTS(rs: Rexp*) = Alts(rs.toList) + +// repetitions +def Seqs(rs: List[Rexp]) : Rexp = rs match { + case Nil => NULL + case r::Nil => r + case r::rs => SEQ(r, Seqs(rs)) +} +def SEQS(rs: Rexp*) = Seqs(rs.toList) + +// some convenience for typing in regular expressions +def charlist2rexp(s : List[Char]) : Rexp = s match { + case Nil => EMPTY + case c::Nil => CHAR(c) + case c::s => SEQ(CHAR(c), charlist2rexp(s)) +} +implicit def string2rexp(s : String) : Rexp = charlist2rexp(s.toList) + + +type Rule[T] = (Rexp, List[Char] => T) + +case class Tokenizer[T](rules: List[Rule[T]], excl: List[T] = Nil) { + + def munch(r: Rexp, action: List[Char] => T, s: List[Char], t: List[Char]) : Option[(List[Char], T)] = + s match { + case Nil if (nullable(r)) => Some(Nil, action(t)) + case Nil => None + case c::s if (no_more(der (c, r)) && nullable(r)) => Some(c::s, action(t)) + case c::s if (no_more(der (c, r))) => None + case c::s => munch(der (c, r), action, s, t ::: List(c)) + } + + def one_token(s: List[Char]) : Either[(List[Char], T), String] = { + val somes = rules.map { (r) => munch(r._1, r._2, s, Nil) }.flatten + if (somes == Nil) Right(s.mkString) + else Left(somes sortBy (_._1.length) head) + } + + def tokenize(cs: List[Char]) : List[T] = cs match { + case Nil => Nil + case _ => one_token(cs) match { + case Left((rest, token)) => token :: tokenize(rest) + case Right(s) => { println("Cannot tokenize: \"" + s + "\""); Nil } + } + } + + def fromString(s: String) : List[T] = + tokenize(s.toList).filterNot(excl.contains(_)) + + def fromFile(name: String) : List[T] = + fromString(io.Source.fromFile(name).mkString) + +} + + +// parser combinators with input type I and return type T + +abstract class Parser[I <% Seq[_], T] { + def parse(ts: I): Set[(T, I)] + + def parse_all(ts: I) : Set[T] = + for ((head, tail) <- parse(ts); if (tail.isEmpty)) yield head + + def parse_single(ts: I) : T = parse_all(ts).toList match { + case t::Nil => t + case _ => { println ("Parse Error") ; sys.exit(-1) } + } + + def || (right : => Parser[I, T]) : Parser[I, T] = new AltParser(this, right) + def ==>[S] (f: => T => S) : Parser [I, S] = new FunParser(this, f) + def ~[S] (right : => Parser[I, S]) : Parser[I, (T, S)] = new SeqParser(this, right) + def ~>[S] (right : => Parser[I, S]) : Parser[I, S] = this ~ right ==> (_._2) + def <~[S] (right : => Parser[I, S]) : Parser[I, T] = this ~ right ==> (_._1) +} + +class SeqParser[I <% Seq[_], T, S](p: => Parser[I, T], q: => Parser[I, S]) extends Parser[I, (T, S)] { + def parse(sb: I) = + for ((head1, tail1) <- p.parse(sb); + (head2, tail2) <- q.parse(tail1)) yield ((head1, head2), tail2) +} + +class AltParser[I <% Seq[_], T](p: => Parser[I, T], q: => Parser[I, T]) extends Parser[I, T] { + def parse(sb: I) = p.parse(sb) ++ q.parse(sb) +} + +class FunParser[I <% Seq[_], T, S](p: => Parser[I, T], f: T => S) extends Parser[I, S] { + def parse(sb: I) = + for ((head, tail) <- p.parse(sb)) yield (f(head), tail) +} + + +// A parser and evaluator for teh while language +// +//:load matcher.scala +//:load parser3.scala + +// some regular expressions +val SYM = RANGE("ABCDEFGHIJKLMNOPQRSTUVXYZabcdefghijklmnopqrstuvwxyz_") +val DIGIT = RANGE("0123456789") +val ID = SEQ(SYM, STAR(ALT(SYM, DIGIT))) +val NUM = PLUS(DIGIT) +val KEYWORD = ALTS("skip", "while", "do", "if", "then", "else", "true", "false", "write") +val SEMI: Rexp = ";" +val OP: Rexp = ALTS(":=", "=", "-", "+", "*", "!=", "<", ">") +val WHITESPACE = PLUS(RANGE(" \n")) +val RPAREN: Rexp = ")" +val LPAREN: Rexp = "(" +val BEGIN: Rexp = "{" +val END: Rexp = "}" +val COMMENT = SEQS("/*", NOT(SEQS(STAR(ALLC), "*/", STAR(ALLC))), "*/") + +// tokens for classifying the strings that have been recognised +abstract class Token +case object T_WHITESPACE extends Token +case object T_COMMENT extends Token +case object T_SEMI extends Token +case object T_LPAREN extends Token +case object T_RPAREN extends Token +case object T_BEGIN extends Token +case object T_END extends Token +case class T_ID(s: String) extends Token +case class T_OP(s: String) extends Token +case class T_NUM(s: String) extends Token +case class T_KWD(s: String) extends Token + +val lexing_rules: List[Rule[Token]] = + List((KEYWORD, (s) => T_KWD(s.mkString)), + (ID, (s) => T_ID(s.mkString)), + (OP, (s) => T_OP(s.mkString)), + (NUM, (s) => T_NUM(s.mkString)), + (SEMI, (s) => T_SEMI), + (LPAREN, (s) => T_LPAREN), + (RPAREN, (s) => T_RPAREN), + (BEGIN, (s) => T_BEGIN), + (END, (s) => T_END), + (WHITESPACE, (s) => T_WHITESPACE), + (COMMENT, (s) => T_COMMENT)) + +// the tokenizer +val Tok = Tokenizer(lexing_rules, List(T_WHITESPACE, T_COMMENT)) + +// the abstract syntax trees +abstract class Stmt +abstract class AExp +abstract class BExp +type Block = List[Stmt] +case object Skip extends Stmt +case class If(a: BExp, bl1: Block, bl2: Block) extends Stmt +case class While(b: BExp, bl: Block) extends Stmt +case class Assign(s: String, a: AExp) extends Stmt +case class Write(s: String) extends Stmt + +case class Var(s: String) extends AExp +case class Num(i: Int) extends AExp +case class Aop(o: String, a1: AExp, a2: AExp) extends AExp + +case object True extends BExp +case object False extends BExp +case class Relop(o: String, a1: AExp, a2: AExp) extends BExp + +// atomic parsers +case class TokParser(tok: Token) extends Parser[List[Token], Token] { + def parse(ts: List[Token]) = ts match { + case t::ts if (t == tok) => Set((t, ts)) + case _ => Set () + } +} +implicit def token2tparser(t: Token) = TokParser(t) + +case object NumParser extends Parser[List[Token], Int] { + def parse(ts: List[Token]) = ts match { + case T_NUM(s)::ts => Set((s.toInt, ts)) + case _ => Set () + } +} + +case object IdParser extends Parser[List[Token], String] { + def parse(ts: List[Token]) = ts match { + case T_ID(s)::ts => Set((s, ts)) + case _ => Set () + } +} + + +// arithmetic expressions +lazy val AExp: Parser[List[Token], AExp] = + (T ~ T_OP("+") ~ AExp) ==> { case ((x, y), z) => Aop("+", x, z): AExp } || + (T ~ T_OP("-") ~ AExp) ==> { case ((x, y), z) => Aop("-", x, z): AExp } || T +lazy val T: Parser[List[Token], AExp] = + (F ~ T_OP("*") ~ T) ==> { case ((x, y), z) => Aop("*", x, z): AExp } || F +lazy val F: Parser[List[Token], AExp] = + (T_LPAREN ~> AExp <~ T_RPAREN) || + IdParser ==> Var || + NumParser ==> Num + +// boolean expressions +lazy val BExp: Parser[List[Token], BExp] = + (T_KWD("true") ==> ((_) => True: BExp)) || + (T_KWD("false") ==> ((_) => False: BExp)) || + (T_LPAREN ~> BExp <~ T_RPAREN) || + (AExp ~ T_OP("=") ~ AExp) ==> { case ((x, y), z) => Relop("=", x, z): BExp } || + (AExp ~ T_OP("!=") ~ AExp) ==> { case ((x, y), z) => Relop("!=", x, z): BExp } || + (AExp ~ T_OP("<") ~ AExp) ==> { case ((x, y), z) => Relop("<", x, z): BExp } || + (AExp ~ T_OP(">") ~ AExp) ==> { case ((x, y), z) => Relop("<", z, x): BExp } + +lazy val Stmt: Parser[List[Token], Stmt] = + (T_KWD("skip") ==> ((_) => Skip: Stmt)) || + (IdParser ~ T_OP(":=") ~ AExp) ==> { case ((x, y), z) => Assign(x, z): Stmt } || + (T_KWD("if") ~ BExp ~ T_KWD("then") ~ Block ~ T_KWD("else") ~ Block) ==> + { case (((((x,y),z),u),v),w) => If(y, u, w): Stmt } || + (T_KWD("while") ~ BExp ~ T_KWD("do") ~ Block) ==> { case (((x, y), z), w) => While(y, w) } || + (T_KWD("write") ~ IdParser) ==> { case (x, y) => Write(y) } + +lazy val Stmts: Parser[List[Token], Block] = + (Stmt ~ T_SEMI ~ Stmts) ==> { case ((x, y), z) => x :: z : Block } || + (Stmt ==> ((s) => List(s) : Block)) + +lazy val Block: Parser[List[Token], Block] = + (T_BEGIN ~> Stmts <~ T_END) || + (Stmt ==> ((s) => List(s))) + +// compiler +val beginning = """ +.class public XXX.XXX +.super java/lang/Object + +.method public ()V + aload_0 + invokenonvirtual java/lang/Object/()V + return +.end method + +.method public static write(I)V + .limit locals 5 + .limit stack 5 + iload 0 + getstatic java/lang/System/out Ljava/io/PrintStream; + swap + invokevirtual java/io/PrintStream/println(I)V + return +.end method + + +.method public static main([Ljava/lang/String;)V + .limit locals 200 + .limit stack 200 + +""" + +val ending = """ + + return + +.end method +""" + +// for generating new labels +var counter = -1 + +def Fresh(x: String) = { + counter += 1 + x ++ "_" ++ counter.toString() +} + +type Env = Map[String, String] +type Instrs = List[String] + +def compile_aexp(a: AExp, env : Env) : Instrs = a match { + case Num(i) => List("ldc " + i.toString + "\n") + case Var(s) => List("iload " + env(s) + "\n") + case Aop("+", a1, a2) => compile_aexp(a1, env) ++ compile_aexp(a2, env) ++ List("iadd\n") + case Aop("-", a1, a2) => compile_aexp(a1, env) ++ compile_aexp(a2, env) ++ List("isub\n") + case Aop("*", a1, a2) => compile_aexp(a1, env) ++ compile_aexp(a2, env) ++ List("imul\n") +} + +def compile_bexp(b: BExp, env : Env, jmp: String) : Instrs = b match { + case True => Nil + case False => List("goto " + jmp + "\n") + case Relop("=", a1, a2) => + compile_aexp(a1, env) ++ compile_aexp(a2, env) ++ List("if_icmpne " + jmp + "\n") + case Relop("!=", a1, a2) => + compile_aexp(a1, env) ++ compile_aexp(a2, env) ++ List("if_icmpeq " + jmp + "\n") + case Relop("<", a1, a2) => + compile_aexp(a1, env) ++ compile_aexp(a2, env) ++ List("if_icmpge " + jmp + "\n") +} + + +def compile_stmt(s: Stmt, env: Env) : (Instrs, Env) = s match { + case Skip => (Nil, env) + case Assign(x, a) => { + val index = if (env.isDefinedAt(x)) env(x) else env.keys.size.toString + (compile_aexp(a, env) ++ + List("istore " + index + "\n"), env + (x -> index)) + } + case If(b, bl1, bl2) => { + val if_else = Fresh("If_else") + val if_end = Fresh("If_end") + val (instrs1, env1) = compile_bl(bl1, env) + val (instrs2, env2) = compile_bl(bl2, env1) + (compile_bexp(b, env, if_else) ++ + instrs1 ++ + List("goto " + if_end + "\n") ++ + List("\n" + if_else + ":\n\n") ++ + instrs2 ++ + List("\n" + if_end + ":\n\n"), env2) + } + case While(b, bl) => { + val loop_begin = Fresh("Loop_begin") + val loop_end = Fresh("Loop_end") + val (instrs1, env1) = compile_bl(bl, env) + (List("\n" + loop_begin + ":\n\n") ++ + compile_bexp(b, env, loop_end) ++ + instrs1 ++ + List("goto " + loop_begin + "\n") ++ + List("\n" + loop_end + ":\n\n"), env1) + } + case Write(x) => + (List("iload " + env(x) + "\n" + "invokestatic XXX/XXX/write(I)V\n"), env) +} + +def compile_bl(bl: Block, env: Env) : (Instrs, Env) = bl match { + case Nil => (Nil, env) + case s::bl => { + val (instrs1, env1) = compile_stmt(s, env) + val (instrs2, env2) = compile_bl(bl, env1) + (instrs1 ++ instrs2, env2) + } +} + +def compile(input: String) : String = { + val class_name = input.split('.')(0) + val tks = Tok.fromFile(input) + val ast = Stmts.parse_single(tks) + val instructions = compile_bl(ast, Map.empty)._1 + (beginning ++ instructions.mkString ++ ending).replaceAllLiterally("XXX", class_name) +} + + +def compile_to(input: String, output: String) = { + val fw = new java.io.FileWriter(output) + fw.write(compile(input)) + fw.close() +} + +// +val tks = Tok.fromString("x := x + 1") +val ast = Stmt.parse_single(tks) +println(compile_stmt(ast, Map("x" -> "n"))._1.mkString) + + + +//examples + +compile_to("loops.while", "loops.j") +//compile_to("fib.while", "fib.j") + + +// testing cases for time measurements +/* +def time_needed[T](i: Int, code: => T) = { + val start = System.nanoTime() + for (j <- 1 to i) code + val end = System.nanoTime() + (end - start)/(i * 1.0e9) +} + +// for testing +import scala.sys.process._ + +val test_prog = """ +start := XXX; +x := start; +y := start; +z := start; +while 0 < x do { + while 0 < y do { + while 0 < z do { + z := z - 1 + }; + z := start; + y := y - 1 + }; + y := start; + x := x - 1 +}; +write x; +write y; +write z +""" + + +def compile_test(n: Int) : Unit = { + val class_name = "LOOP" + val tks = Tok.fromString(test_prog.replaceAllLiterally("XXX", n.toString)) + val ast = Stmts.parse_single(tks) + val instructions = compile_bl(ast, Map.empty)._1 + val assembly = (beginning ++ instructions.mkString ++ ending).replaceAllLiterally("XXX", class_name) + val fw = new java.io.FileWriter(class_name + ".j") + fw.write(assembly) + fw.close() + val test = ("java -jar jvm/jasmin-2.4/jasmin.jar " + class_name + ".j").!! + println(n + " " + time_needed(2, ("java " + class_name + "/" + class_name).!!)) +} + +List(1, 5000, 10000, 50000, 100000, 250000, 500000, 750000, 1000000).map(compile_test(_)) + + + +// javabyte code assmbler +// +// java -jar jvm/jasmin-2.4/jasmin.jar loops.j + +*/ + + + + +