--- a/solutions/cw3/parser.sc Wed May 29 13:25:30 2024 +0100
+++ b/solutions/cw3/parser.sc Thu Sep 19 15:47:33 2024 +0100
@@ -1,42 +1,47 @@
// CW3
-import $file.lexer
-import lexer._
-
+//> using file project.scala
+//> using file lexer.sc
+import lexer.*
case class ~[+A, +B](x: A, y: B)
// parser combinators
+//
+type IsSeq[I] = I => Seq[?]
-abstract class Parser[I, T](using is: I => Seq[_]) {
- def parse(in: I): Set[(T, I)]
+abstract class Parser[I : IsSeq, T](using is: I => Seq[?]) {
+ def parse(in: I): Set[(T, I)]
def parse_all(in: I) : Set[T] =
- for ((hd, tl) <- parse(in);
+ for ((hd, tl) <- parse(in);
if is(tl).isEmpty) yield hd
}
+// parser combinators
+
// alternative parser
-class AltParser[I, T](p: => Parser[I, T],
- q: => Parser[I, T])(using I => Seq[_]) extends Parser[I, T] {
- def parse(in: I) = p.parse(in) ++ q.parse(in)
+class AltParser[I : IsSeq, T](p: => Parser[I, T],
+ q: => Parser[I, T]) extends Parser[I, T] {
+ def parse(in: I) = p.parse(in) ++ q.parse(in)
}
// sequence parser
-class SeqParser[I, T, S](p: => Parser[I, T],
- q: => Parser[I, S])(using I => Seq[_]) extends Parser[I, ~[T, S]] {
- def parse(in: I) =
- for ((hd1, tl1) <- p.parse(in);
+class SeqParser[I : IsSeq, T, S](p: => Parser[I, T],
+ q: => Parser[I, S]) extends Parser[I, ~[T, S]] {
+ def parse(in: I) =
+ for ((hd1, tl1) <- p.parse(in);
(hd2, tl2) <- q.parse(tl1)) yield (new ~(hd1, hd2), tl2)
}
// map parser
-class MapParser[I, T, S](p: => Parser[I, T],
- f: T => S)(using I => Seq[_]) extends Parser[I, S] {
+class MapParser[I : IsSeq, T, S](p: => Parser[I, T],
+ f: T => S) extends Parser[I, S] {
def parse(in: I) = for ((hd, tl) <- p.parse(in)) yield (f(hd), tl)
}
+
/*
// atomic parser for (particular) strings
case class StrParser(s: String) extends Parser[String, String] {
@@ -46,15 +51,15 @@
}
}
-extension (sc: StringContext)
+extension (sc: StringContext)
def p(args: Any*) = StrParser(sc.s(args:_*))
*/
// more convenient syntax for parser combinators
-extension [I, T](p: Parser[I, T])(using I => Seq[_]) {
- def ||(q : => Parser[I, T]) = new AltParser[I, T](p, q)
- def ~[S] (q : => Parser[I, S]) = new SeqParser[I, T, S](p, q)
- def map[S](f: => T => S) = new MapParser[I, T, S](p, f)
+extension [I : IsSeq, T](p: Parser[I, T]) {
+ def ||(q : => Parser[I, T]) = AltParser[I, T](p, q)
+ def ~[S] (q : => Parser[I, S]) = SeqParser[I, T, S](p, q)
+ def map[S](f: => T => S) = MapParser[I, T, S](p, f)
}
// New parser that takes as input a list of tokens
@@ -63,7 +68,7 @@
// an example of an atomic parser for characters
if (!in.isEmpty && in.head == t) Set((t, in.tail)) else Set()
}
-}
+}
case class TokenListParser(ts: List[Token]) extends Parser[List[Token], List[Token]] {
def parse(tsb: List[Token]) = {
@@ -72,16 +77,14 @@
}
}
-// Implicit definitions to go from a token
+// Implicit definitions to go from a token
// or a list of tokens to a TokenListParser
-implicit def token2parser(t: Token) : Parser[List[Token], Token] =
+implicit def token2parser(t: Token) : Parser[List[Token], Token] =
TokenParser(t)
extension (t: Token) {
- def || (q : => Parser[List[Token], Token]) =
- new AltParser[List[Token], Token](t, q)
- def ~[S](q : => Parser[List[Token], S]) =
- new SeqParser[List[Token], Token, S](t, q)
+ def || (q : => Parser[List[Token], Token]) = AltParser[List[Token], Token](t, q)
+ def ~[S](q : => Parser[List[Token], S]) = SeqParser[List[Token], Token, S](t, q)
}
@@ -133,26 +136,26 @@
// WHILE Language Parsing
-lazy val AExp: Parser[List[Token], AExp] =
+lazy val AExp: Parser[List[Token], AExp] =
(Te ~ T_OP("+") ~ AExp).map{ case x ~ _ ~ z => Aop("+", x, z): AExp } ||
(Te ~ T_OP("-") ~ AExp).map{ case x ~ _ ~ z => Aop("-", x, z): AExp } || Te
-lazy val Te: Parser[List[Token], AExp] =
- (Fa ~ T_OP("*") ~ Te).map{ case x ~ _ ~ z => Aop("*", x, z): AExp } ||
- (Fa ~ T_OP("/") ~ Te).map{ case x ~ _ ~ z => Aop("/", x, z): AExp } ||
- (Fa ~ T_OP("%") ~ Te).map{ case x ~ _ ~ z => Aop("%", x, z): AExp } || Fa
-lazy val Fa: Parser[List[Token], AExp] =
- (T_PAREN("(") ~ AExp ~ T_PAREN(")")).map{ case _ ~ y ~ _ => y } ||
- IdParser().map{Var(_)} ||
+lazy val Te: Parser[List[Token], AExp] =
+ (Fa ~ T_OP("*") ~ Te).map{ case x ~ _ ~ z => Aop("*", x, z): AExp } ||
+ (Fa ~ T_OP("/") ~ Te).map{ case x ~ _ ~ z => Aop("/", x, z): AExp } ||
+ (Fa ~ T_OP("%") ~ Te).map{ case x ~ _ ~ z => Aop("%", x, z): AExp } || Fa
+lazy val Fa: Parser[List[Token], AExp] =
+ (T_PAREN("(") ~ AExp ~ T_PAREN(")")).map{ case _ ~ y ~ _ => y } ||
+ IdParser().map{Var(_)} ||
NumParser().map{Num(_)}
-lazy val BExp: Parser[List[Token], BExp] =
- (AExp ~ T_OP("==") ~ AExp).map{ case x ~ _ ~ z => Bop("==", x, z): BExp } ||
- (AExp ~ T_OP("!=") ~ AExp).map{ case x ~ _ ~ z => Bop("!=", x, z): BExp } ||
- (AExp ~ T_OP("<") ~ AExp).map{ case x ~ _ ~ z => Bop("<", x, z): BExp } ||
+lazy val BExp: Parser[List[Token], BExp] =
+ (AExp ~ T_OP("==") ~ AExp).map{ case x ~ _ ~ z => Bop("==", x, z): BExp } ||
+ (AExp ~ T_OP("!=") ~ AExp).map{ case x ~ _ ~ z => Bop("!=", x, z): BExp } ||
+ (AExp ~ T_OP("<") ~ AExp).map{ case x ~ _ ~ z => Bop("<", x, z): BExp } ||
(AExp ~ T_OP(">") ~ AExp).map{ case x ~ _ ~ z => Bop(">", x, z): BExp } ||
(T_PAREN("(") ~ BExp ~ T_PAREN(")") ~ T_OP("&&") ~ BExp).map{ case _ ~ y ~ _ ~ _ ~ v => And(y, v): BExp } ||
(T_PAREN("(") ~ BExp ~ T_PAREN(")") ~ T_OP("||") ~ BExp).map{ case _ ~ y ~ _ ~ _ ~ v => Or(y, v): BExp } ||
- (T_KEYWORD("true").map(_ => True: BExp )) ||
+ (T_KEYWORD("true").map(_ => True: BExp )) ||
(T_KEYWORD("false").map(_ => False: BExp )) ||
(T_PAREN("(") ~ BExp ~ T_PAREN(")")).map{ case _ ~ x ~ _ => x }
@@ -163,7 +166,7 @@
(T_KEYWORD("while") ~ BExp ~ T_KEYWORD("do") ~ Block).map{ case _ ~ y ~ _ ~ w => While(y, w) : Stmt } ||
(T_KEYWORD("read") ~ IdParser()).map{ case _ ~ id => Read(id): Stmt} ||
(T_KEYWORD("write") ~ IdParser()).map{ case _ ~ id => WriteId(id): Stmt} ||
- (T_KEYWORD("write") ~ StringParser()).map{ case _ ~ s => WriteString(s): Stmt} ||
+ (T_KEYWORD("write") ~ StringParser()).map{ case _ ~ s => WriteString(s): Stmt} ||
(T_KEYWORD("write") ~ T_PAREN("(") ~ IdParser() ~ T_PAREN(")")).map{ case _ ~ _ ~ id ~ _ => WriteId(id): Stmt} ||
(T_KEYWORD("write") ~ T_PAREN("(") ~ StringParser() ~ T_PAREN(")")).map{ case _ ~ _ ~ s ~ _ => WriteString(s): Stmt}
@@ -219,8 +222,8 @@
def eval_stmt(s: Stmt, env: Env) : Env = s match {
case Skip => env
case Assign(x, a) => env + (x -> eval_aexp(a, env))
- case If(b, bl1, bl2) => if (eval_bexp(b, env)) eval_bl(bl1, env) else eval_bl(bl2, env)
- case While(b, bl) =>
+ case If(b, bl1, bl2) => if (eval_bexp(b, env)) eval_bl(bl1, env) else eval_bl(bl2, env)
+ case While(b, bl) =>
if (eval_bexp(b, env)) eval_stmt(While(b, bl), eval_bl(bl, env))
else env
@@ -230,10 +233,10 @@
env
}
- case Read(x) => {
- println("Enter an integer and press ENTER:") ;
+ case Read(x) => {
+ println("Enter an integer and press ENTER:") ;
val n = readInt() ; // Note: Does not work when using the REPL
- eval_stmt(Assign(x, Num(n)), env)
+ eval_stmt(Assign(x, Num(n)), env)
}
}
@@ -251,7 +254,7 @@
-@main
+//@main
def run(file: String) = {
val contents = os.read(os.pwd / file)
println(s"Lex $file: ")
@@ -262,7 +265,7 @@
println(eval(Stmts.parse_all(tokenise(contents)).head))
}
-@main
+//@main
def test(file: String) = {
val contents = os.read(os.pwd / file)
println(s"Lex $file: ")
@@ -279,3 +282,6 @@
println("Time taken in seconds: ")
println((end - start)/(1.0e9))
*/
+
+test("primes.while")
+run("primes.while")