diff -r 54a483a33763 -r 02ef5c3abc51 solution/cw4/parser.sc --- a/solution/cw4/parser.sc Fri Oct 28 09:08:13 2022 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,162 +0,0 @@ -// CW3 - -import $file.lexer -import lexer._ - - -case class ~[+A, +B](_1: A, _2: B) -type IsSeq[A] = A => Seq[_] - -abstract class Parser[I : IsSeq, T] { - def parse(ts: I): Set[(T, I)] - - def parse_all(ts: I) : Set[T] = - for ((head, tail) <- parse(ts); if tail.isEmpty) yield head -} - -class SeqParser[I : IsSeq, T, S](p: => Parser[I, T], q: => Parser[I, S]) extends Parser[I, ~[T, S]] { - def parse(sb: I) = - for ((head1, tail1) <- p.parse(sb); - (head2, tail2) <- q.parse(tail1)) yield (new ~(head1, head2), tail2) -} - -class AltParser[I : IsSeq, T](p: => Parser[I, T], q: => Parser[I, T]) extends Parser[I, T] { - def parse(sb: I) = p.parse(sb) ++ q.parse(sb) -} - -class FunParser[I : IsSeq, T, S](p: => Parser[I, T], f: T => S) extends Parser[I, S] { - def parse(sb: I) = - for ((head, tail) <- p.parse(sb)) yield (f(head), tail) -} - -// New parser that takes as input a list of tokens -case class TokenListParser(ts: List[Token]) extends Parser[List[Token], List[Token]] { - def parse(tsb: List[Token]) = { - val (prefix, suffix) = tsb.splitAt(ts.length) - if (prefix == ts) Set((prefix, suffix)) else Set() - } -} - -// Implicit definitions to go from a token -// or a list of tokens to a TokenListParser -implicit def token2parser(t: Token) = TokenListParser(List(t)) -implicit def tokenList2parser(ts: List[Token]) = TokenListParser(ts) - -implicit def ParserOps[I : IsSeq, T](p: Parser[I, T]) = new { - def || (q : => Parser[I, T]) = new AltParser[I, T](p, q) - def ==>[S] (f: => T => S) = new FunParser[I, T, S](p, f) - def ~[S] (q : => Parser[I, S]) = new SeqParser[I, T, S](p, q) -} - -implicit def TokenOps(t: Token) = new { - def || (q : => Parser[List[Token], List[Token]]) = new AltParser[List[Token], List[Token]](List(t), q) - def || (qs : List[Token]) = new AltParser[List[Token], List[Token]](List(t), qs) - def ==>[S] (f: => List[Token] => S) = new FunParser[List[Token], List[Token], S](List(t), f) - def ~[S](q : => Parser[List[Token], S]) = - new SeqParser[List[Token], List[Token], S](List(t), q) - def ~ (qs : List[Token]) = - new SeqParser[List[Token], List[Token], List[Token]](List(t), qs) -} - -implicit def TokenListOps(ts: List[Token]) = new { - def || (q : => Parser[List[Token], List[Token]]) = new AltParser[List[Token], List[Token]](ts, q) - def || (qs : List[Token]) = new AltParser[List[Token], List[Token]](ts, qs) - def ==>[S] (f: => List[Token] => S) = new FunParser[List[Token], List[Token], S](ts, f) - def ~[S](q : => Parser[List[Token], S]) = - new SeqParser[List[Token], List[Token], S](ts, q) - def ~ (qs : List[Token]) = - new SeqParser[List[Token], List[Token], List[Token]](ts, qs) -} - -// Abstract Syntax Trees -abstract class Stmt -abstract class AExp -abstract class BExp - -type Block = List[Stmt] - -case object Skip extends Stmt -case class If(a: BExp, bl1: Block, bl2: Block) extends Stmt -case class While(b: BExp, bl: Block) extends Stmt -case class Assign(s: String, a: AExp) extends Stmt -case class Read(s: String) extends Stmt -case class WriteId(s: String) extends Stmt // for printing values of variables -case class WriteString(s: String) extends Stmt // for printing words -case class For(counter: String, lower: AExp, upper: AExp, code: Block) extends Stmt - - -case class Var(s: String) extends AExp -case class Num(i: Int) extends AExp -case class Aop(o: String, a1: AExp, a2: AExp) extends AExp - -case object True extends BExp -case object False extends BExp -case class Bop(o: String, a1: AExp, a2: AExp) extends BExp -case class And(b1: BExp, b2: BExp) extends BExp -case class Or(b1: BExp, b2: BExp) extends BExp - -case class IdParser() extends Parser[List[Token], String] { - def parse(tsb: List[Token]) = tsb match { - case T_ID(id) :: rest => Set((id, rest)) - case _ => Set() - } -} - -case class NumParser() extends Parser[List[Token], Int] { - def parse(tsb: List[Token]) = tsb match { - case T_NUM(n) :: rest => Set((n, rest)) - case _ => Set() - } -} - -case class StringParser() extends Parser[List[Token], String] { - def parse(tsb: List[Token]) = tsb match { - case T_STRING(s) :: rest => Set((s, rest)) - case _ => Set() - } -} - -// WHILE Language Parsing -lazy val AExp: Parser[List[Token], AExp] = - (Te ~ T_OP("+") ~ AExp) ==> { case x ~ _ ~ z => Aop("+", x, z): AExp } || - (Te ~ T_OP("-") ~ AExp) ==> { case x ~ _ ~ z => Aop("-", x, z): AExp } || Te -lazy val Te: Parser[List[Token], AExp] = - (Fa ~ T_OP("*") ~ Te) ==> { case x ~ _ ~ z => Aop("*", x, z): AExp } || - (Fa ~ T_OP("/") ~ Te) ==> { case x ~ _ ~ z => Aop("/", x, z): AExp } || - (Fa ~ T_OP("%") ~ Te) ==> { case x ~ _ ~ z => Aop("%", x, z): AExp } || Fa -lazy val Fa: Parser[List[Token], AExp] = - (T_PAREN("(") ~ AExp ~ T_PAREN(")")) ==> { case _ ~ y ~ _ => y } || - IdParser() ==> Var || - NumParser() ==> Num - -lazy val BExp: Parser[List[Token], BExp] = - (AExp ~ T_OP("==") ~ AExp) ==> { case x ~ _ ~ z => Bop("==", x, z): BExp } || - (AExp ~ T_OP("!=") ~ AExp) ==> { case x ~ _ ~ z => Bop("!=", x, z): BExp } || - (AExp ~ T_OP("<") ~ AExp) ==> { case x ~ _ ~ z => Bop("<", x, z): BExp } || - (AExp ~ T_OP(">") ~ AExp) ==> { case x ~ _ ~ z => Bop(">", x, z): BExp } || - (T_PAREN("(") ~ BExp ~ List(T_PAREN(")"), T_OP("&&")) ~ BExp) ==> { case _ ~ y ~ _ ~ v => And(y, v): BExp } || - (T_PAREN("(") ~ BExp ~ List(T_PAREN(")"), T_OP("||")) ~ BExp) ==> { case _ ~ y ~ _ ~ v => Or(y, v): BExp } || - (T_KEYWORD("true") ==> (_ => True: BExp )) || - (T_KEYWORD("false") ==> (_ => False: BExp )) || - (T_PAREN("(") ~ BExp ~ T_PAREN(")")) ==> { case _ ~ x ~ _ => x } - -lazy val Stmt: Parser[List[Token], Stmt] = - T_KEYWORD("skip") ==> (_ => Skip: Stmt) || - (IdParser() ~ T_OP(":=") ~ AExp) ==> { case id ~ _ ~ z => Assign(id, z): Stmt } || - (T_KEYWORD("if") ~ BExp ~ T_KEYWORD("then") ~ Block ~ T_KEYWORD("else") ~ Block) ==> { case _ ~ y ~ _ ~ u ~ _ ~ w => If(y, u, w): Stmt } || - (T_KEYWORD("while") ~ BExp ~ T_KEYWORD("do") ~ Block) ==> { case _ ~ y ~ _ ~ w => While(y, w) : Stmt } || - (T_KEYWORD("read") ~ IdParser()) ==> { case _ ~ id => Read(id): Stmt} || - (T_KEYWORD("write") ~ IdParser()) ==> { case _ ~ id => WriteId(id): Stmt} || - (T_KEYWORD("write") ~ StringParser()) ==> { case _ ~ s => WriteString(s): Stmt} || - (T_KEYWORD("for") ~ IdParser() ~ T_OP(":=") ~ AExp ~ T_KEYWORD("upto") ~ AExp ~ T_KEYWORD("do") ~ Block) ==> { - case _ ~ id ~ _ ~ lower ~ _ ~ upper ~ _ ~ blck => For(id, lower, upper, blck): Stmt - } - -lazy val Stmts: Parser[List[Token], Block] = - (Stmt ~ T_SEMI ~ Stmts) ==> { case x ~ _ ~ z => x :: z : Block } || - (Stmt ==> (s => List(s) : Block)) - -lazy val Block: Parser[List[Token], Block] = - (T_PAREN("{") ~ Stmts ~ T_PAREN("}")) ==> { case x ~ y ~ z => y} || - (Stmt ==> (s => List(s))) -