//:load matcher.scala
// some regular expressions
val SYM = RANGE("""ABCDEFGHIJKLMNOPQRSTUVXYZabcdefghijklmnopqrstuvwxyz_""")
val DIGIT = RANGE("0123456789")
val ID = SEQ(SYM, STAR(ALT(SYM, DIGIT)))
val NUM = PLUS(DIGIT)
val KEYWORD = ALTS("skip", "while", "do", "if", "then", "else", "begin", "end", "true", "false")
val SEMI: Rexp = ";"
val OP: Rexp = ALTS(":=", "=", "+", "-", "*")
val WHITESPACE = PLUS(RANGE(" \n"))
val RPAREN: Rexp = ")"
val LPAREN: Rexp = "("
val BEGIN: Rexp = "{"
val END: Rexp = "}"
// for classifying the strings that have been recognised
abstract class Token
case object T_WHITESPACE extends Token
case object T_SEMI extends Token
case object T_LPAREN extends Token
case object T_RPAREN extends Token
case object T_BEGIN extends Token
case object T_END extends Token
case class T_ID(s: String) extends Token
case class T_OP(s: String) extends Token
case class T_NUM(s: String) extends Token
case class T_KWD(s: String) extends Token
val lexing_rules: List[Rule[Token]] =
List((KEYWORD, (s) => T_KWD(s.mkString)),
(ID, (s) => T_ID(s.mkString)),
(OP, (s) => T_OP(s.mkString)),
(NUM, (s) => T_NUM(s.mkString)),
(SEMI, (s) => T_SEMI),
(LPAREN, (s) => T_LPAREN),
(RPAREN, (s) => T_RPAREN),
(BEGIN, (s) => T_BEGIN),
(END, (s) => T_END),
(WHITESPACE, (s) => T_WHITESPACE))
// the tokenizer
val Tok = Tokenizer(lexing_rules, List(T_WHITESPACE))
// the abstract syntax trees
abstract class Stmt
abstract class AExp
abstract class BExp
type Block = List[Stmt]
case object Skip extends Stmt
case class If(a: BExp, bl1: Block, bl2: Block) extends Stmt
case class While(b: BExp, bl: Block) extends Stmt
case class Assign(s: String, a: AExp) extends Stmt
case class Var(s: String) extends AExp
case class Num(i: Int) extends AExp
case class Aop(o: String, a1: AExp, a2: AExp) extends AExp
case object True extends BExp
case object False extends BExp
case class Bop(o: String, a1: AExp, a2: AExp) extends BExp
case class TokParser(tok: Token) extends Parser[List[Token], Token] {
def parse(ts: List[Token]) = ts match {
case t::ts if (t == tok) => Set((t, ts))
case _ => Set ()
}
}
implicit def token2tparser(t: Token) = TokParser(t)
case object NumParser extends Parser[List[Token], Int] {
def parse(ts: List[Token]) = ts match {
case T_NUM(s)::ts => Set((s.toInt, ts))
case _ => Set ()
}
}
case object IdParser extends Parser[List[Token], String] {
def parse(ts: List[Token]) = ts match {
case T_ID(s)::ts => Set((s, ts))
case _ => Set ()
}
}
lazy val AExp: Parser[List[Token], AExp] =
(T ~ T_OP("+") ~ AExp) ==> { case ((x, y), z) => Aop("+", x, z): AExp } ||
(T ~ T_OP("-") ~ AExp) ==> { case ((x, y), z) => Aop("-", x, z): AExp } || T
lazy val T: Parser[List[Token], AExp] =
(F ~ T_OP("*") ~ T) ==> { case ((x, y), z) => Aop("*", x, z): AExp } || F
lazy val F: Parser[List[Token], AExp] =
(T_LPAREN ~> AExp <~ T_RPAREN) ||
IdParser ==> ((s) => Var(s)) ||
NumParser ==> ((i) => Num(i))
lazy val BExp: Parser[List[Token], BExp] =
(AExp ~ T_OP("=") ~ AExp) ==> { case ((x, y), z) => Bop("=", x, z): BExp } ||
(T_KWD("true") ==> ((_) => True: BExp)) ||
(T_KWD("false") ==> ((_) => False: BExp))
lazy val Stmt: Parser[List[Token], Stmt] =
(T_KWD("skip") ==> ((_) => Skip: Stmt)) ||
(IdParser ~ T_OP(":=") ~ AExp) ==> { case ((x, y), z) => Assign(x, z): Stmt } ||
(T_KWD("if") ~ BExp ~ T_KWD("then") ~ Block ~ T_KWD("else") ~ Block) ==>
{ case (((((x,y),z),u),v),w) => If(y, u, w): Stmt }
lazy val Stmts: Parser[List[Token], Block] =
(Stmt ~ T_SEMI ~ Stmts) ==> { case ((x, y), z) => x :: z : Block } ||
(Stmt ==> ((s) => List(s) : Block))
lazy val Block: Parser[List[Token], Block] =
(T_BEGIN ~> Stmts <~ T_END) ||
(Stmt ==> ((s) => List(s)))
val p1 = "x := 5"
val p1_toks = Tok.fromString(p1)
val p1_ast = Block.parse_all(p1_toks)
println(p1_toks)
println(p1_ast)
val p2 = "5 = 6"
val p2_toks = Tok.fromString(p2)
val p2_ast = BExp.parse_all(p2_toks)
println(p2_toks)
println(p2_ast)
val p2a = "true"
val p2a_toks = Tok.fromString(p2a)
val p2a_ast = BExp.parse_all(p2a_toks)
println(p2a_toks)
println(p2a_ast)
val p3 = "if true then skip else skip"
val p3_toks = Tok.fromString(p3)
val p3_ast = Stmt.parse_all(p3_toks)
println(p3_toks)
println(p3_ast)
val p3a = "if true then x := 5 else x := 10"
val p3a_toks = Tok.fromString(p3a)
val p3a_ast = Stmt.parse_all(p3a_toks)
println(p3a_toks)
println(p3a_ast)
val p3b = "if false then x := 5 else x := 10"
val p3b_toks = Tok.fromString(p3b)
val p3b_ast = Stmt.parse_all(p3b_toks)
println(p3b_toks)
println(p3b_ast)
type Env = Map[String, Int]
def eval_bexp(b: BExp, env: Env) : Boolean = b match {
case True => true
case False => false
case Bop("=", a1, a2) => eval_aexp(a1, env) == eval_aexp(a2, env)
}
def eval_aexp(a: AExp, env : Env) : Int = a match {
case Num(i) => i
case Var(s) => env(s)
case Aop("+", a1, a2) => eval_aexp(a1, env) + eval_aexp(a2, env)
case Aop("-", a1, a2) => eval_aexp(a1, env) - eval_aexp(a2, env)
case Aop("*", a1, a2) => eval_aexp(a1, env) * eval_aexp(a2, env)
}
def eval_stmt(s: Stmt, env: Env) : Env = s match {
case Skip => env
case Assign(x, a) => env + (x -> eval_aexp(a, env))
case If(b, bl1, bl2) => if (eval_bexp(b, env)) eval_bl(bl1, env) else eval_bl(bl2, env)
}
def eval_bl(bl: Block, env: Env) : Env = bl match {
case Nil => env
case s::bl => eval_bl(bl, eval_stmt(s, env))
}
//println(eval_stmt(p3a_ast.head, Nil.toMap))
//println(eval_stmt(p3b_ast.head, Nil.toMap))