compile.scala
author Christian Urban <christian dot urban at kcl dot ac dot uk>
Sat, 24 Nov 2012 14:58:27 +0000
changeset 77 49c0beef79a1
parent 76 373cf55a3ca5
permissions -rw-r--r--
added

// A parser and evaluator for teh while language
// 
//:load matcher.scala
//:load parser3.scala

// some regular expressions
val SYM = RANGE("ABCDEFGHIJKLMNOPQRSTUVXYZabcdefghijklmnopqrstuvwxyz_")
val DIGIT = RANGE("0123456789")
val ID = SEQ(SYM, STAR(ALT(SYM, DIGIT))) 
val NUM = PLUS(DIGIT)
val KEYWORD = ALTS("skip", "while", "do", "if", "then", "else", "true", "false", "write") 
val SEMI: Rexp = ";"
val OP: Rexp = ALTS(":=", "=", "-", "+", "*", "!", "<", ">")
val WHITESPACE = PLUS(RANGE(" \n"))
val RPAREN: Rexp = ")"
val LPAREN: Rexp = "("
val BEGIN: Rexp = "{"
val END: Rexp = "}"
val COMMENT = SEQS("/*", NOT(SEQS(STAR(ALLC), "*/", STAR(ALLC))), "*/")

// tokens for classifying the strings that have been recognised
abstract class Token
case object T_WHITESPACE extends Token
case object T_COMMENT extends Token
case object T_SEMI extends Token
case object T_LPAREN extends Token
case object T_RPAREN extends Token
case object T_BEGIN extends Token
case object T_END extends Token
case class T_ID(s: String) extends Token
case class T_OP(s: String) extends Token
case class T_NUM(s: String) extends Token
case class T_KWD(s: String) extends Token

val lexing_rules: List[Rule[Token]] = 
  List((KEYWORD, (s) => T_KWD(s.mkString)),
       (ID, (s) => T_ID(s.mkString)),
       (OP, (s) => T_OP(s.mkString)),
       (NUM, (s) => T_NUM(s.mkString)),
       (SEMI, (s) => T_SEMI),
       (LPAREN, (s) => T_LPAREN),
       (RPAREN, (s) => T_RPAREN),
       (BEGIN, (s) => T_BEGIN),
       (END, (s) => T_END),
       (WHITESPACE, (s) => T_WHITESPACE),
       (COMMENT, (s) => T_COMMENT))

// the tokenizer
val Tok = Tokenizer(lexing_rules, List(T_WHITESPACE, T_COMMENT))

// the abstract syntax trees
abstract class Stmt
abstract class AExp
abstract class BExp 
type Block = List[Stmt]
case object Skip extends Stmt
case class If(a: BExp, bl1: Block, bl2: Block) extends Stmt
case class While(b: BExp, bl: Block) extends Stmt
case class Assign(s: String, a: AExp) extends Stmt
case class Write(s: String) extends Stmt

case class Var(s: String) extends AExp
case class Num(i: Int) extends AExp
case class Aop(o: String, a1: AExp, a2: AExp) extends AExp

case object True extends BExp
case object False extends BExp
case class Bop(o: String, a1: AExp, a2: AExp) extends BExp

// atomic parsers
case class TokParser(tok: Token) extends Parser[List[Token], Token] {
  def parse(ts: List[Token]) = ts match {
    case t::ts if (t == tok) => Set((t, ts)) 
    case _ => Set ()
  }
}
implicit def token2tparser(t: Token) = TokParser(t)

case object NumParser extends Parser[List[Token], Int] {
  def parse(ts: List[Token]) = ts match {
    case T_NUM(s)::ts => Set((s.toInt, ts)) 
    case _ => Set ()
  }
}

case object IdParser extends Parser[List[Token], String] {
  def parse(ts: List[Token]) = ts match {
    case T_ID(s)::ts => Set((s, ts)) 
    case _ => Set ()
  }
}


// arithmetic expressions
lazy val AExp: Parser[List[Token], AExp] = 
  (T ~ T_OP("+") ~ AExp) ==> { case ((x, y), z) => Aop("+", x, z): AExp } ||
  (T ~ T_OP("-") ~ AExp) ==> { case ((x, y), z) => Aop("-", x, z): AExp } || T  
lazy val T: Parser[List[Token], AExp] = 
  (F ~ T_OP("*") ~ T) ==> { case ((x, y), z) => Aop("*", x, z): AExp } || F
lazy val F: Parser[List[Token], AExp] = 
  (T_LPAREN ~> AExp <~ T_RPAREN) || 
  IdParser ==> Var || 
  NumParser ==> Num

// boolean expressions
lazy val BExp: Parser[List[Token], BExp] = 
  (T_KWD("true") ==> ((_) => True: BExp)) || 
  (T_KWD("false") ==> ((_) => False: BExp)) ||
  (T_LPAREN ~> BExp <~ T_RPAREN) ||
  (AExp ~ T_OP("=") ~ AExp) ==> { case ((x, y), z) => Bop("=", x, z): BExp } || 
  (AExp ~ T_OP("!=") ~ AExp) ==> { case ((x, y), z) => Bop("!=", x, z): BExp } || 
  (AExp ~ T_OP("<") ~ AExp) ==> { case ((x, y), z) => Bop("<", x, z): BExp } || 
  (AExp ~ T_OP(">") ~ AExp) ==> { case ((x, y), z) => Bop("<", z, x): BExp } 

lazy val Stmt: Parser[List[Token], Stmt] =
  (T_KWD("skip") ==> ((_) => Skip: Stmt)) ||
  (IdParser ~ T_OP(":=") ~ AExp) ==> { case ((x, y), z) => Assign(x, z): Stmt } ||
  (T_KWD("if") ~ BExp ~ T_KWD("then") ~ Block ~ T_KWD("else") ~ Block) ==>
    { case (((((x,y),z),u),v),w) => If(y, u, w): Stmt } ||
  (T_KWD("while") ~ BExp ~ T_KWD("do") ~ Block) ==> { case (((x, y), z), w) => While(y, w) } || 
  (T_KWD("write") ~ IdParser) ==> { case (x, y) => Write(y) } 

lazy val Stmts: Parser[List[Token], Block] =
  (Stmt ~ T_SEMI ~ Stmts) ==> { case ((x, y), z) => x :: z : Block } ||
  (Stmt ==> ((s) => List(s) : Block))

lazy val Block: Parser[List[Token], Block] =
  (T_BEGIN ~> Stmts <~ T_END) || 
  (Stmt ==> ((s) => List(s)))

// interpreter
val beginning = """
.class public examples/HelloWorld
.super java/lang/Object

.method public <init>()V
   aload_0
   invokenonvirtual java/lang/Object/<init>()V
   return
.end method

.method public static write(I)V 
    .limit locals 5 
    .limit stack 5 
    iload 0 
    getstatic java/lang/System/out Ljava/io/PrintStream; 
    swap 
    invokevirtual java/io/PrintStream/println(I)V 
    return 
.end method


.method public static main([Ljava/lang/String;)V
   .limit locals 200
   .limit stack 200

"""

val ending = """

   return

.end method
"""

// for generating new labels
var counter = -1

def Fresh(x: String) = {
  counter += 1
  x ++ "_" ++ counter.toString()
}

type Env = Map[String, String]

def compile_aexp(a: AExp, env : Env) : List[String] = a match {
  case Num(i) => List("ldc " + i.toString + "\n")
  case Var(s) => List("iload " + env(s) + "\n")
  case Aop("+", a1, a2) => compile_aexp(a1, env) ++ compile_aexp(a2, env) ++ List("iadd\n")
  case Aop("-", a1, a2) => compile_aexp(a1, env) ++ compile_aexp(a2, env) ++ List("isub\n")
  case Aop("*", a1, a2) => compile_aexp(a1, env) ++ compile_aexp(a2, env) ++ List("imul\n")
}

def compile_bexp(b: BExp, env : Env, jmp: String) : List[String] = b match {
  case Bop("=", a1, a2) => 
    compile_aexp(a1, env) ++ compile_aexp(a2, env) ++ List("if_icmpne " + jmp + "\n")
  case Bop("<", a1, a2) => 
    compile_aexp(a1, env) ++ compile_aexp(a2, env) ++ List("if_icmpge " + jmp + "\n")
}


def compile_stmt(s: Stmt, env: Env) : (List[String], Env) = s match {
  case Skip => (Nil, env)
  case Assign(x, a) => {
    val index = if (env.isDefinedAt(x)) env(x) else env.keys.size.toString
    (compile_aexp(a, env) ++ List("istore " + index + "\n"), env + (x -> index))
  } 
  case If(b, bl1, bl2) => {
    val if_else = Fresh("If_else")
    val if_end = Fresh("If_end")
    val (instrs1, env1) = compile_bl(bl1, env)
    val (instrs2, env2) = compile_bl(bl2, env1)
    (compile_bexp(b, env, if_else) ++
     instrs1 ++
     List("goto " + if_end + "\n") ++
     List("\n" + if_else + ":\n\n") ++
     instrs2 ++
     List("\n" + if_end + ":\n\n"), env2)
  }
  case While(b, bl) => {
    val loop_begin = Fresh("Loop_begin")
    val loop_end = Fresh("Loop_end")
    val (instrs1, env1) = compile_bl(bl, env)
    (List("\n" + loop_begin + ":\n\n") ++
     compile_bexp(b, env, loop_end) ++
     instrs1 ++
     List("goto " + loop_begin + "\n") ++
     List("\n" + loop_end + ":\n\n"), env1)
  }
  case Write(x) => 
    (List("iload " + env(x) + "\n" + "invokestatic examples/HelloWorld/write(I)V\n"), env)
}

def compile_bl(bl: Block, env: Env) : (List[String], Env) = bl match {
  case Nil => (Nil, env)
  case s::bl => {
    val (instrs1, env1) = compile_stmt(s, env)
    val (instrs2, env2) = compile_bl(bl, env1)
    (instrs1 ++ instrs2, env2)
  }
}

def compile(name: String) : String = {
  val tks = Tok.fromFile(name)
  val ast = Stmts.parse_single(tks)
  val instructions = compile_bl(ast, Map.empty)._1
  beginning ++ instructions.mkString ++ ending
}


//examples

println(compile("loops.while"))
//println(compile("fib.while"))