diff -r cc3f7908b942 -r e6868bd2942b while.scala --- a/while.scala Wed Nov 21 07:28:28 2012 +0000 +++ b/while.scala Wed Nov 21 09:04:11 2012 +0000 @@ -1,12 +1,13 @@ //:load matcher.scala +//:load parser3.scala // some regular expressions -val SYM = RANGE("""ABCDEFGHIJKLMNOPQRSTUVXYZabcdefghijklmnopqrstuvwxyz_""") +val SYM = RANGE("ABCDEFGHIJKLMNOPQRSTUVXYZabcdefghijklmnopqrstuvwxyz_") val DIGIT = RANGE("0123456789") val ID = SEQ(SYM, STAR(ALT(SYM, DIGIT))) val NUM = PLUS(DIGIT) -val KEYWORD = ALTS("skip", "while", "do", "if", "then", "else", "begin", "end", "true", "false") +val KEYWORD = ALTS("skip", "while", "do", "if", "then", "else", "true", "false") val SEMI: Rexp = ";" val OP: Rexp = ALTS(":=", "=", "+", "-", "*") val WHITESPACE = PLUS(RANGE(" \n")) @@ -15,7 +16,7 @@ val BEGIN: Rexp = "{" val END: Rexp = "}" -// for classifying the strings that have been recognised +// tokens for classifying the strings that have been recognised abstract class Token case object T_WHITESPACE extends Token case object T_SEMI extends Token @@ -28,7 +29,6 @@ case class T_NUM(s: String) extends Token case class T_KWD(s: String) extends Token - val lexing_rules: List[Rule[Token]] = List((KEYWORD, (s) => T_KWD(s.mkString)), (ID, (s) => T_ID(s.mkString)), @@ -53,14 +53,16 @@ case class If(a: BExp, bl1: Block, bl2: Block) extends Stmt case class While(b: BExp, bl: Block) extends Stmt case class Assign(s: String, a: AExp) extends Stmt + case class Var(s: String) extends AExp case class Num(i: Int) extends AExp case class Aop(o: String, a1: AExp, a2: AExp) extends AExp + case object True extends BExp case object False extends BExp case class Bop(o: String, a1: AExp, a2: AExp) extends BExp - +// atomic parsers case class TokParser(tok: Token) extends Parser[List[Token], Token] { def parse(ts: List[Token]) = ts match { case t::ts if (t == tok) => Set((t, ts)) @@ -91,12 +93,12 @@ (F ~ T_OP("*") ~ T) ==> { case ((x, y), z) => Aop("*", x, z): AExp } || F lazy val F: Parser[List[Token], AExp] = (T_LPAREN ~> AExp <~ T_RPAREN) || - IdParser ==> ((s) => Var(s)) || - NumParser ==> ((i) => Num(i)) + IdParser ==> Var || + NumParser ==> Num lazy val BExp: Parser[List[Token], BExp] = (AExp ~ T_OP("=") ~ AExp) ==> { case ((x, y), z) => Bop("=", x, z): BExp } || - (T_KWD("true") ==> ((_) => True: BExp)) || + (T_KWD("true") ==> ((_) => True)) || (T_KWD("false") ==> ((_) => False: BExp)) lazy val Stmt: Parser[List[Token], Stmt] = @@ -113,43 +115,58 @@ (T_BEGIN ~> Stmts <~ T_END) || (Stmt ==> ((s) => List(s))) + +// examples val p1 = "x := 5" val p1_toks = Tok.fromString(p1) val p1_ast = Block.parse_all(p1_toks) println(p1_toks) println(p1_ast) +val p1a = "{ x := 5; y := 8}" +val p1a_toks = Tok.fromString(p1a) +val p1a_ast = Block.parse_all(p1a_toks) +println(p1a_ast) + val p2 = "5 = 6" val p2_toks = Tok.fromString(p2) val p2_ast = BExp.parse_all(p2_toks) -println(p2_toks) println(p2_ast) val p2a = "true" val p2a_toks = Tok.fromString(p2a) val p2a_ast = BExp.parse_all(p2a_toks) -println(p2a_toks) println(p2a_ast) val p3 = "if true then skip else skip" val p3_toks = Tok.fromString(p3) val p3_ast = Stmt.parse_all(p3_toks) -println(p3_toks) println(p3_ast) val p3a = "if true then x := 5 else x := 10" val p3a_toks = Tok.fromString(p3a) val p3a_ast = Stmt.parse_all(p3a_toks) -println(p3a_toks) println(p3a_ast) val p3b = "if false then x := 5 else x := 10" val p3b_toks = Tok.fromString(p3b) val p3b_ast = Stmt.parse_all(p3b_toks) -println(p3b_toks) println(p3b_ast) +val p4 = """{ x := 5; + y := 3; + r := 0; + while y = 0 do { + r := r + x; + y := y - 1 + } + }""" +val p4_toks = Tok.fromString(p4) +val p4_ast = Block.parse_all(p4_toks) +println(p4_ast) + +// interpreter type Env = Map[String, Int] def eval_bexp(b: BExp, env: Env) : Boolean = b match {