diff -r d59bcff69998 -r b5b1bc0a603b solution/cw5/fun_parser.sc --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/solution/cw5/fun_parser.sc Wed Dec 15 19:00:01 2021 +0000 @@ -0,0 +1,217 @@ +// A parser for the Fun language +//================================ +// +// call with +// +// amm fun_parser.sc fact.fun +// +// amm fun_parser.sc defs.fun +// +// this will generate a parse-tree from a list +// of tokens + +import scala.language.implicitConversions +import scala.language.reflectiveCalls + +import $file.fun_tokens, fun_tokens._ + + +// Parser combinators +// type parameter I needs to be of Seq-type +// +abstract class Parser[I, T](implicit ev: I => Seq[_]) { + def parse(ts: I): Set[(T, I)] + + def parse_single(ts: I) : T = + parse(ts).partition(_._2.isEmpty) match { + case (good, _) if !good.isEmpty => good.head._1 + case (good, err) if err.isEmpty => { + println (s"Parse Error\n $good \n $err") ; sys.exit(-1) } + case (_, err) => { + println (s"Parse Error\n${err.minBy(_._2.length)}") ; sys.exit(-1) } + } +} + +// convenience for writing grammar rules +case class ~[+A, +B](_1: A, _2: B) + +class SeqParser[I, T, S](p: => Parser[I, T], + q: => Parser[I, S])(implicit ev: I => Seq[_]) extends Parser[I, ~[T, S]] { + def parse(sb: I) = + for ((head1, tail1) <- p.parse(sb); + (head2, tail2) <- q.parse(tail1)) yield (new ~(head1, head2), tail2) +} + +class AltParser[I, T](p: => Parser[I, T], + q: => Parser[I, T])(implicit ev: I => Seq[_]) extends Parser[I, T] { + def parse(sb: I) = p.parse(sb) ++ q.parse(sb) +} + +class FunParser[I, T, S](p: => Parser[I, T], + f: T => S)(implicit ev: I => Seq[_]) extends Parser[I, S] { + def parse(sb: I) = + for ((head, tail) <- p.parse(sb)) yield (f(head), tail) +} + +// convenient combinators +implicit def ParserOps[I, T](p: Parser[I, T])(implicit ev: I => Seq[_]) = new { + def || (q : => Parser[I, T]) = new AltParser[I, T](p, q) + def ==>[S] (f: => T => S) = new FunParser[I, T, S](p, f) + def ~[S] (q : => Parser[I, S]) = new SeqParser[I, T, S](p, q) +} + +def ListParser[I, T, S](p: => Parser[I, T], + q: => Parser[I, S])(implicit ev: I => Seq[_]): Parser[I, List[T]] = { + (p ==> ((s) => List(s))) || + (p ~ q ~ ListParser(p, q)) ==> { case x ~ _ ~ z => x :: z : List[T] } +} + +case class TokParser(tok: Token) extends Parser[List[Token], Token] { + def parse(ts: List[Token]) = ts match { + case t::ts if (t == tok) => Set((t, ts)) + case _ => Set() + } +} + +implicit def token2tparser(t: Token) = TokParser(t) + +implicit def TokOps(t: Token) = new { + def || (q : => Parser[List[Token], Token]) = new AltParser[List[Token], Token](t, q) + def ==>[S] (f: => Token => S) = new FunParser[List[Token], Token, S](t, f) + def ~[S](q : => Parser[List[Token], S]) = new SeqParser[List[Token], Token, S](t, q) +} + +case object EmptyParser extends Parser[List[Token], String] { + def parse(ts: List[Token]) = Set(("", ts)) +} + +case object NumParser extends Parser[List[Token], Int] { + def parse(ts: List[Token]) = ts match { + case T_NUM(n)::ts => Set((n, ts)) + case _ => Set () + } +} + +case object FNumParser extends Parser[List[Token], Float] { + def parse(ts: List[Token]) = ts match { + case T_FNUM(x)::ts => Set((x, ts)) + case _ => Set() + } +} + +case object IdParser extends Parser[List[Token], String] { + def parse(ts: List[Token]) = ts match { + case T_ID(s)::ts => Set((s, ts)) + case _ => Set () + } +} + +case object CharConstParser extends Parser[List[Token], Int] { + def parse(ts: List[Token]) = ts match { + case T_CHR(c)::ts => Set((c, ts)) + case _ => Set () + } +} + +case object TyParser extends Parser[List[Token], String] { + def parse(ts: List[Token]) = ts match { + case T_TY(s)::ts => Set((s, ts)) + case _ => Set () + } +} + + +// Abstract syntax trees for the Fun language +abstract class Exp +abstract class BExp +abstract class Decl + +case class Def(name: String, args: List[(String, String)], ty: String, body: Exp) extends Decl +case class Main(e: Exp) extends Decl +case class Const(name: String, v: Int) extends Decl +case class FConst(name: String, x: Float) extends Decl + +case class Call(name: String, args: List[Exp]) extends Exp +case class If(a: BExp, e1: Exp, e2: Exp) extends Exp +case class Var(s: String) extends Exp +case class Num(i: Int) extends Exp // integer numbers +case class FNum(i: Float) extends Exp // floating numbers +case class ChConst(c: Int) extends Exp // char constant +case class Aop(o: String, a1: Exp, a2: Exp) extends Exp +case class Sequence(e1: Exp, e2: Exp) extends Exp +case class Bop(o: String, a1: Exp, a2: Exp) extends BExp + + +// arithmetic expressions (there needs to be an F in the SEMICOLON case) +lazy val Exp: Parser[List[Token], Exp] = + (T_KWD("if") ~ BExp ~ T_KWD("then") ~ Exp ~ T_KWD("else") ~ Exp) ==> + { case _ ~ x ~ _ ~ y ~ _ ~ z => If(x, y, z): Exp } || + (F ~ T_SEMI ~ Exp) ==> { case x ~ _ ~ y => Sequence(x, y): Exp } || L +lazy val L: Parser[List[Token], Exp] = + (T ~ T_OP("+") ~ Exp) ==> { case x ~ _ ~ z => Aop("+", x, z): Exp } || + (T ~ T_OP("-") ~ Exp) ==> { case x ~ _ ~ z => Aop("-", x, z): Exp } || T +lazy val T: Parser[List[Token], Exp] = + (F ~ T_OP("*") ~ T) ==> { case x ~ _ ~ z => Aop("*", x, z): Exp } || + (F ~ T_OP("/") ~ T) ==> { case x ~ _ ~ z => Aop("/", x, z): Exp } || + (F ~ T_OP("%") ~ T) ==> { case x ~ _ ~ z => Aop("%", x, z): Exp } || F +lazy val F: Parser[List[Token], Exp] = + (IdParser ~ T_LPAREN ~ T_RPAREN) ==> + { case x ~ _ ~ _ => Call(x, Nil): Exp } || + (IdParser ~ T_LPAREN ~ T_RPAREN) ==> { case x ~ _ ~ _ => Call(x, Nil): Exp } || + (IdParser ~ T_LPAREN ~ ListParser(Exp, T_COMMA) ~ T_RPAREN) ==> { case x ~ _ ~ z ~ _ => Call(x, z): Exp } || + (T_LPAREN ~ Exp ~ T_RPAREN) ==> { case _ ~ y ~ _ => y: Exp } || + IdParser ==> { case x => Var(x): Exp } || + NumParser ==> { case x => Num(x): Exp } || + CharConstParser ==> { case x => ChConst(x): Exp } || + FNumParser ==> { case x => FNum(x): Exp } + +// boolean expressions +lazy val BExp: Parser[List[Token], BExp] = + (Exp ~ T_OP("==") ~ Exp) ==> { case x ~ _ ~ z => Bop("==", x, z): BExp } || + (Exp ~ T_OP("!=") ~ Exp) ==> { case x ~ _ ~ z => Bop("!=", x, z): BExp } || + (Exp ~ T_OP("<") ~ Exp) ==> { case x ~ _ ~ z => Bop("<", x, z): BExp } || + (Exp ~ T_OP(">") ~ Exp) ==> { case x ~ _ ~ z => Bop("<", z, x): BExp } || + (Exp ~ T_OP("<=") ~ Exp) ==> { case x ~ _ ~ z => Bop("<=", x, z): BExp } || + (Exp ~ T_OP("=>") ~ Exp) ==> { case x ~ _ ~ z => Bop("<=", z, x): BExp } || + (T_LPAREN ~ BExp ~ T_RPAREN) ==> { case _ ~ b ~ _ => b : BExp } + +lazy val Arg : Parser[List[Token], (String, String)] = + (IdParser ~ T_COLON ~ TyParser) ==> { case x ~ _ ~ ty => (x, ty) } + +lazy val Defn: Parser[List[Token], Decl] = { + (T_KWD("def") ~ IdParser ~ T_LPAREN ~ T_RPAREN ~ T_COLON ~ TyParser ~ T_OP("=") ~ Exp) ==> + { case _ ~ y ~ _ ~ _ ~ _~ ty ~ _ ~ r => Def(y, Nil, ty, r): Decl } || + (T_KWD("def") ~ IdParser ~ T_LPAREN ~ ListParser(Arg, T_COMMA) ~ T_RPAREN ~ T_COLON ~ TyParser ~ T_OP("=") ~ Exp) ==> + { case _ ~ y ~ _ ~ w ~ _ ~ _~ ty ~ _ ~ r => Def(y, w, ty, r): Decl } +} + +lazy val Const_decl: Parser[List[Token], Decl] = + (T_KWD("val") ~ Arg ~ T_OP("=") ~ NumParser) ==> + { case _ ~ x ~ _ ~ v => Const(x._1, v): Decl } || + (T_KWD("val") ~ Arg ~ T_OP("=") ~ FNumParser) ==> + { case _ ~ x ~ _ ~ v => FConst(x._1, v): Decl } + +lazy val Prog: Parser[List[Token], List[Decl]] = + (Defn ~ T_SEMI ~ Prog) ==> { case x ~ _ ~ z => x :: z : List[Decl] } || + (Const_decl ~ T_SEMI ~ Prog) ==> { case x ~ _ ~ z => x :: z : List[Decl] } || + (Exp ==> ((s) => List(Main(s)) : List[Decl])) + + + +// Reading tokens and Writing parse trees + +import ammonite.ops._ + +def parse_tks(tks: List[Token]) : List[Decl] = { + //println(Prog.parse(tks)) + Prog.parse_single(tks) +} + +//@doc("Parses a file.") +@main +def main(fname: String) : Unit = { + val tks = tokenise(os.read(os.pwd / fname)) + println(parse_tks(tks)) +} + +