# HG changeset patch # User Christian Urban # Date 1694974377 -3600 # Node ID 53f08d873e0969b26f7880223b221c516bb840be # Parent 53e7da9f372a9ddb7cf7b236fa80cbae7359a562 updated diff -r 53e7da9f372a -r 53f08d873e09 cws/build.sc --- a/cws/build.sc Fri Sep 15 10:49:33 2023 +0100 +++ b/cws/build.sc Sun Sep 17 19:12:57 2023 +0100 @@ -1,10 +1,10 @@ #!/usr/bin/env amm val files = Seq("cw01.tex", - "cw02.tex", - "cw03.tex", - "cw04.tex", - "cw05.tex") + "cw02.tex", + "cw03.tex", + "cw04.tex", + "cw05.tex") val pdf_files = files.map(s => s.stripSuffix("tex") ++ "pdf") @@ -13,8 +13,8 @@ def make() = { for (f <- files) { println(s"Processing $f ...") - os.proc("xelatex", f).call(stdout = os.Inherit, stdin = os.Inherit) - os.proc("xelatex", f).call(stdout = os.Inherit, stdin = os.Inherit) + os.proc("lualatex", f).call(stdout = os.Inherit, stdin = os.Inherit) + os.proc("lualatex", f).call(stdout = os.Inherit, stdin = os.Inherit) } } diff -r 53e7da9f372a -r 53f08d873e09 cws/cw01.pdf Binary file cws/cw01.pdf has changed diff -r 53e7da9f372a -r 53f08d873e09 cws/cw02.pdf Binary file cws/cw02.pdf has changed diff -r 53e7da9f372a -r 53f08d873e09 cws/cw03.pdf Binary file cws/cw03.pdf has changed diff -r 53e7da9f372a -r 53f08d873e09 cws/cw04.pdf Binary file cws/cw04.pdf has changed diff -r 53e7da9f372a -r 53f08d873e09 cws/cw05.pdf Binary file cws/cw05.pdf has changed diff -r 53e7da9f372a -r 53f08d873e09 cws/upload --- a/cws/upload Fri Sep 15 10:49:33 2023 +0100 +++ b/cws/upload Sun Sep 17 19:12:57 2023 +0100 @@ -8,5 +8,10 @@ done -scp $fls k1192855@bastion:public_html/cfl/cws/ +scp $fls k1192855@bastion:public_html/cfl/cws + +#hg commit -m "updated jars" + +# prevent PDF from being copied +###gs -o output.pdf -dNoOutputFonts -sDEVICE=pdfwrite main_cw01.pdf diff -r 53e7da9f372a -r 53f08d873e09 progs/matcher/re1.sc --- a/progs/matcher/re1.sc Fri Sep 15 10:49:33 2023 +0100 +++ b/progs/matcher/re1.sc Sun Sep 17 19:12:57 2023 +0100 @@ -1,6 +1,6 @@ // A simple matcher for basic regular expressions // -// Call the test cases with X = {1,2,3} +// Call the testcases with X = {1,2,3} // // amm re1.sc testX // @@ -19,9 +19,9 @@ case class SEQ(r1: Rexp, r2: Rexp) extends Rexp // sequence case class STAR(r: Rexp) extends Rexp // star + // nullable function: tests whether a regular -// expression can recognise the empty string - +// expression can recognise the empty string def nullable(r: Rexp) : Boolean = r match { case ZERO => false case ONE => true @@ -54,13 +54,12 @@ nullable(ders(s.toList, r)) +// some examples from the homework val r = SEQ(CHAR('b'), CHAR('c')) matcher(r, "ac") -// some examples from the homework - -val r = STAR(ALT(SEQ(CHAR('a'), CHAR('b')), CHAR('b'))) +val r1 = STAR(ALT(SEQ(CHAR('a'), CHAR('b')), CHAR('b'))) der('a', r) der('b', r) der('c', r) @@ -71,6 +70,9 @@ der('z', der('y', der('x', r2))) +// Test Cases +//============ + // the optional regular expression (one or zero times) def OPT(r: Rexp) = ALT(r, ONE) // r + 1 @@ -81,10 +83,6 @@ case n => SEQ(r, NTIMES(r, n - 1)) } - -// Test Cases -//============ - // the evil regular expression (a?){n} a{n} def EVIL1(n: Int) = SEQ(NTIMES(OPT(CHAR('a')), n), NTIMES(CHAR('a'), n)) diff -r 53e7da9f372a -r 53f08d873e09 progs/matcher/re2.sc --- a/progs/matcher/re2.sc Fri Sep 15 10:49:33 2023 +0100 +++ b/progs/matcher/re2.sc Sun Sep 17 19:12:57 2023 +0100 @@ -1,6 +1,8 @@ -// A Version with an explicit n-times regular expression; +// A Version of the matcher with an explicit +// n-times regular expression +// // this keeps the size of the regular expression in the -// EVIL1 test-case quite small +// EVIL1 testcase small // // call the test cases with X = {1,2} // @@ -31,7 +33,6 @@ case NTIMES(r, n) => if (n == 0) true else nullable(r) } - def der(c: Char, r: Rexp) : Rexp = r match { case ZERO => ZERO case ONE => ZERO @@ -54,13 +55,13 @@ nullable(ders(s.toList, r)) +// Test Cases + // the optional regular expression: one or zero times // this regular expression is still defined in terms of ALT def OPT(r: Rexp) = ALT(r, ONE) -// Test Cases - // evil regular expressions def EVIL1(n: Int) = SEQ(NTIMES(OPT(CHAR('a')), n), NTIMES(CHAR('a'), n)) val EVIL2 = SEQ(STAR(STAR(CHAR('a'))), CHAR('b')) @@ -90,7 +91,7 @@ def test2() = { println("Test (a*)* b") - for (i <- 0 to 30 by 2) { + for (i <- 0 to 22 by 2) { println(f"$i: ${time_needed(1, matcher(EVIL2, "a" * i))}%.5f") } } diff -r 53e7da9f372a -r 53f08d873e09 progs/matcher/re3.sc --- a/progs/matcher/re3.sc Fri Sep 15 10:49:33 2023 +0100 +++ b/progs/matcher/re3.sc Sun Sep 17 19:12:57 2023 +0100 @@ -1,4 +1,6 @@ -// A version with simplification of derivatives; +// A version of the matcher with simplification +// of derivatives +// // this keeps the regular expressions small, which // is good for the run-time // @@ -48,6 +50,7 @@ if (i == 0) ZERO else SEQ(der(c, r), NTIMES(r, i - 1)) } +// simplification def simp(r: Rexp) : Rexp = r match { case ALT(r1, r2) => (simp(r1), simp(r2)) match { case (ZERO, r2s) => r2s @@ -64,26 +67,23 @@ case r => r } - - // the derivative w.r.t. a string (iterates der) def ders(s: List[Char], r: Rexp) : Rexp = s match { case Nil => r case c::s => ders(s, simp(der(c, r))) } - // the main matcher function def matcher(r: Rexp, s: String) : Boolean = nullable(ders(s.toList, r)) +// Test Cases +//============ + // one or zero def OPT(r: Rexp) = ALT(r, ONE) - -// Test Cases - // evil regular expressions: (a?){n} a{n} and (a*)* b def EVIL1(n: Int) = SEQ(NTIMES(OPT(CHAR('a')), n), NTIMES(CHAR('a'), n)) val EVIL2 = SEQ(STAR(STAR(CHAR('a'))), CHAR('b')) diff -r 53e7da9f372a -r 53f08d873e09 progs/parser-combinators/comb1.sc --- a/progs/parser-combinators/comb1.sc Fri Sep 15 10:49:33 2023 +0100 +++ b/progs/parser-combinators/comb1.sc Sun Sep 17 19:12:57 2023 +0100 @@ -89,12 +89,12 @@ extension [I, T](p: Parser[I, T])(using I => Seq[_]) { def ||(q : => Parser[I, T]) = new AltParser[I, T](p, q) def ~[S] (q : => Parser[I, S]) = new SeqParser[I, T, S](p, q) - def mapp[S](f: => T => S) = new MapParser[I, T, S](p, f) + def map[S](f: => T => S) = new MapParser[I, T, S](p, f) } def toU(s: String) = s.map(_.toUpper) -(p"ELSE").mapp(toU(_)).parse("ELSEifthen") +(p"ELSE").map(toU(_)).parse("ELSEifthen") // these implicits allow us to use an infix notation for // sequences and alternatives; we also can write the usual @@ -109,8 +109,8 @@ // A parser for palindromes (just returns them as string) lazy val Pal : Parser[String, String] = { - (p"a" ~ Pal ~ p"a").mapp{ case ((x, y), z) => s"$x$y$z" } || - (p"b" ~ Pal ~ p"b").mapp{ case ((x, y), z) => s"$x$y$z" } || + (p"a" ~ Pal ~ p"a").map{ case ((x, y), z) => s"$x$y$z" } || + (p"b" ~ Pal ~ p"b").map{ case ((x, y), z) => s"$x$y$z" } || p"a" || p"b" || p"" } @@ -126,7 +126,7 @@ // // (transforms '(' -> '{' , ')' -> '}' ) lazy val P : Parser[String, String] = { - (p"(" ~ P ~ p")" ~ P).mapp{ case (((_, x), _), y) => "{" + x + "}" + y } || + (p"(" ~ P ~ p")" ~ P).map{ case (((_, x), _), y) => "{" + x + "}" + y } || p"" } @@ -224,32 +224,32 @@ // a problem with the arithmetic expression parser: it // gets very slow with deeply nested parentheses -println("Runtime problem") +println("A runtime problem") println(E.parse("1")) println(E.parse("(1)")) println(E.parse("((1))")) println(E.parse("(((1)))")) println(E.parse("((((1))))")) -//println(E.parse("((((((1))))))")) -//println(E.parse("(((((((1)))))))")) +println(E.parse("((((((1))))))")) +println(E.parse("(((((((1)))))))")) //println(E.parse("((((((((1))))))))")) -// faster because of merge +// faster because of merge in the +/- case lazy val E2: Parser[String, Int] = { - (T2 ~ (p"+" || p"-") ~ E2).mapp[Int]{ case ((x, y), z) => if (y == "+") x + z else x - z} || T2 } + (T2 ~ (p"+" || p"-") ~ E2).map[Int]{ case ((x, y), z) => if (y == "+") x + z else x - z} || T2 } lazy val T2: Parser[String, Int] = { - (F2 ~ p"*" ~ T2).mapp[Int]{ case ((x, _), z) => x * z } || F2 } + (F2 ~ p"*" ~ T2).map[Int]{ case ((x, _), z) => x * z } || F2 } lazy val F2: Parser[String, Int] = { - (p"(" ~ E2 ~ p")").mapp[Int]{ case ((_, y), _) => y } || NumParserInt } + (p"(" ~ E2 ~ p")").map[Int]{ case ((_, y), _) => y } || NumParserInt } -println("Runtime problem") +println("mitigated by merging clauses") println(E2.parse("1")) println(E2.parse("(1)")) println(E2.parse("((1))")) println(E2.parse("(((1)))")) println(E2.parse("((((1))))")) -//println(E2.parse("((((((1))))))")) -//println(E2.parse("(((((((1)))))))")) -//println(E2.parse("((((((((1))))))))")) \ No newline at end of file +println(E2.parse("((((((1))))))")) +println(E2.parse("(((((((1)))))))")) +println(E2.parse("((((((((1))))))))")) \ No newline at end of file diff -r 53e7da9f372a -r 53f08d873e09 progs/parser-combinators/comb2.sc --- a/progs/parser-combinators/comb2.sc Fri Sep 15 10:49:33 2023 +0100 +++ b/progs/parser-combinators/comb2.sc Sun Sep 17 19:12:57 2023 +0100 @@ -43,7 +43,7 @@ } // map parser -class MapParser[I, T, S](p: => Parser[I, T], +class maparser[I, T, S](p: => Parser[I, T], f: T => S)(using I => Seq[_]) extends Parser[I, S] { def parse(in: I) = for ((hd, tl) <- p.parse(in)) yield (f(hd), tl) } @@ -93,7 +93,7 @@ extension [I, T](p: Parser[I, T])(using I => Seq[_]) { def ||(q : => Parser[I, T]) = new AltParser[I, T](p, q) def ~[S] (q : => Parser[I, S]) = new SeqParser[I, T, S](p, q) - def mapp[S](f: => T => S) = new MapParser[I, T, S](p, f) + def map[S](f: => T => S) = new maparser[I, T, S](p, f) } @@ -125,47 +125,47 @@ // arithmetic expressions lazy val AExp: Parser[String, AExp] = - (Te ~ p"+" ~ AExp).mapp[AExp]{ case x ~ _ ~ z => Aop("+", x, z) } || - (Te ~ p"-" ~ AExp).mapp[AExp]{ case x ~ _ ~ z => Aop("-", x, z) } || Te + (Te ~ p"+" ~ AExp).map[AExp]{ case x ~ _ ~ z => Aop("+", x, z) } || + (Te ~ p"-" ~ AExp).map[AExp]{ case x ~ _ ~ z => Aop("-", x, z) } || Te lazy val Te: Parser[String, AExp] = - (Fa ~ p"*" ~ Te).mapp[AExp]{ case x ~ _ ~ z => Aop("*", x, z) } || - (Fa ~ p"/" ~ Te).mapp[AExp]{ case x ~ _ ~ z => Aop("/", x, z) } || Fa + (Fa ~ p"*" ~ Te).map[AExp]{ case x ~ _ ~ z => Aop("*", x, z) } || + (Fa ~ p"/" ~ Te).map[AExp]{ case x ~ _ ~ z => Aop("/", x, z) } || Fa lazy val Fa: Parser[String, AExp] = - (p"(" ~ AExp ~ p")").mapp{ case _ ~ y ~ _ => y } || - IdParser.mapp(Var) || - NumParser.mapp(Num) + (p"(" ~ AExp ~ p")").map{ case _ ~ y ~ _ => y } || + IdParser.map(Var) || + NumParser.map(Num) // boolean expressions with some simple nesting lazy val BExp: Parser[String, BExp] = - (AExp ~ p"==" ~ AExp).mapp[BExp]{ case x ~ _ ~ z => Bop("==", x, z) } || - (AExp ~ p"!=" ~ AExp).mapp[BExp]{ case x ~ _ ~ z => Bop("!=", x, z) } || - (AExp ~ p"<" ~ AExp).mapp[BExp]{ case x ~ _ ~ z => Bop("<", x, z) } || - (AExp ~ p">" ~ AExp).mapp[BExp]{ case x ~ _ ~ z => Bop(">", x, z) } || - (p"(" ~ BExp ~ p")" ~ p"&&" ~ BExp).mapp[BExp]{ case _ ~ y ~ _ ~ _ ~ v => And(y, v) } || - (p"(" ~ BExp ~ p")" ~ p"||" ~ BExp).mapp[BExp]{ case _ ~ y ~ _ ~ _ ~ v => Or(y, v) } || - (p"true".mapp[BExp]{ _ => True }) || - (p"false".mapp[BExp]{ _ => False }) || - (p"(" ~ BExp ~ p")").mapp[BExp]{ case _ ~ x ~ _ => x } + (AExp ~ p"==" ~ AExp).map[BExp]{ case x ~ _ ~ z => Bop("==", x, z) } || + (AExp ~ p"!=" ~ AExp).map[BExp]{ case x ~ _ ~ z => Bop("!=", x, z) } || + (AExp ~ p"<" ~ AExp).map[BExp]{ case x ~ _ ~ z => Bop("<", x, z) } || + (AExp ~ p">" ~ AExp).map[BExp]{ case x ~ _ ~ z => Bop(">", x, z) } || + (p"(" ~ BExp ~ p")" ~ p"&&" ~ BExp).map[BExp]{ case _ ~ y ~ _ ~ _ ~ v => And(y, v) } || + (p"(" ~ BExp ~ p")" ~ p"||" ~ BExp).map[BExp]{ case _ ~ y ~ _ ~ _ ~ v => Or(y, v) } || + (p"true".map[BExp]{ _ => True }) || + (p"false".map[BExp]{ _ => False }) || + (p"(" ~ BExp ~ p")").map[BExp]{ case _ ~ x ~ _ => x } // a single statement lazy val Stmt: Parser[String, Stmt] = - ((p"skip".mapp[Stmt]{_ => Skip }) || - (IdParser ~ p":=" ~ AExp).mapp[Stmt]{ case x ~ _ ~ z => Assign(x, z) } || - (p"write(" ~ IdParser ~ p")").mapp[Stmt]{ case _ ~ y ~ _ => Write(y) } || + ((p"skip".map[Stmt]{_ => Skip }) || + (IdParser ~ p":=" ~ AExp).map[Stmt]{ case x ~ _ ~ z => Assign(x, z) } || + (p"write(" ~ IdParser ~ p")").map[Stmt]{ case _ ~ y ~ _ => Write(y) } || (p"if" ~ BExp ~ p"then" ~ Block ~ p"else" ~ Block) - .mapp[Stmt]{ case _ ~ y ~ _ ~ u ~ _ ~ w => If(y, u, w) } || - (p"while" ~ BExp ~ p"do" ~ Block).mapp[Stmt]{ case _ ~ y ~ _ ~ w => While(y, w) }) + .map[Stmt]{ case _ ~ y ~ _ ~ u ~ _ ~ w => If(y, u, w) } || + (p"while" ~ BExp ~ p"do" ~ Block).map[Stmt]{ case _ ~ y ~ _ ~ w => While(y, w) }) // statements lazy val Stmts: Parser[String, Block] = - (Stmt ~ p";" ~ Stmts).mapp[Block]{ case x ~ _ ~ z => x :: z } || - (Stmt.mapp[Block]{ s => List(s) }) + (Stmt ~ p";" ~ Stmts).map[Block]{ case x ~ _ ~ z => x :: z } || + (Stmt.map[Block]{ s => List(s) }) // blocks (enclosed in curly braces) lazy val Block: Parser[String, Block] = - ((p"{" ~ Stmts ~ p"}").mapp{ case _ ~ y ~ _ => y } || - (Stmt.mapp(s => List(s)))) + ((p"{" ~ Stmts ~ p"}").map{ case _ ~ y ~ _ => y } || + (Stmt.map(s => List(s)))) // Examples diff -r 53e7da9f372a -r 53f08d873e09 solutions/cw1/cw1.scala --- a/solutions/cw1/cw1.scala Fri Sep 15 10:49:33 2023 +0100 +++ b/solutions/cw1/cw1.scala Sun Sep 17 19:12:57 2023 +0100 @@ -1,6 +1,6 @@ // CW 1 -import scala.language.implicitConversions -import scala.language.reflectiveCalls +//import scala.language.implicitConversions +//import scala.language.reflectiveCalls abstract class Rexp case object ZERO extends Rexp // matches nothing diff -r 53e7da9f372a -r 53f08d873e09 solutions/cw1/matcher.sc --- a/solutions/cw1/matcher.sc Fri Sep 15 10:49:33 2023 +0100 +++ b/solutions/cw1/matcher.sc Sun Sep 17 19:12:57 2023 +0100 @@ -1,114 +1,17 @@ -// CW1 -abstract class Rexp -case object ZERO extends Rexp -case object ONE extends Rexp -case class CHAR(c: Char) extends Rexp -case class ALT(r1: Rexp, r2: Rexp) extends Rexp -case class SEQ(r1: Rexp, r2: Rexp) extends Rexp -case class STAR(r: Rexp) extends Rexp - -// extended Rexps -case class RANGE(s: Set[Char]) extends Rexp -case class PLUS(r: Rexp) extends Rexp -case class OPTIONAL(r: Rexp) extends Rexp -case class NTIMES(r: Rexp, n: Int) extends Rexp -case class UPTO(r: Rexp, n: Int) extends Rexp -case class FROM(r: Rexp, n: Int) extends Rexp -case class BETWEEN(r: Rexp, n: Int, m: Int) extends Rexp -case class NOT(r: Rexp) extends Rexp - -// functions -case class CFUN(f: Char => Boolean) extends Rexp - -// abbreviations -def FCHAR(c: Char) = CFUN((a: Char) => a == c) -def FSET(s: Set[Char]) = CFUN((a: Char) => s.contains(a)) -val FALL = CFUN(_ => true) - -def nullable (r: Rexp) : Boolean = r match { - case ZERO => false - case ONE => true - case CHAR(_) => false - case ALT(r1, r2) => nullable(r1) || nullable(r2) - case SEQ(r1, r2) => nullable(r1) && nullable(r2) - case STAR(_) => true - - case RANGE(_) => false - case PLUS(r1) => nullable(r1) - case OPTIONAL(_) => true - case NTIMES(r1, i) => if (i == 0) true else nullable(r1) - case UPTO(_, _) => true - case FROM(r1, i) => if (i == 0) true else nullable(r1) - case BETWEEN(r1, i, _) => if (i == 0) true else nullable(r1) - case NOT(r1) => !nullable(r1) - - case CFUN(f) => false -} - +println("===========================") -def der (c: Char, r: Rexp) : Rexp = r match { - case ZERO => ZERO - case ONE => ZERO - case CHAR(d) => if (c == d) ONE else ZERO - case ALT(r1, r2) => ALT(der(c, r1), der(c, r2)) - case SEQ(r1, r2) => - if (nullable(r1)) ALT(SEQ(der(c, r1), r2), der(c, r2)) - else SEQ(der(c, r1), r2) - case STAR(r1) => SEQ(der(c, r1), STAR(r1)) - - case RANGE(s) => - if (s.contains(c)) ONE else ZERO - case PLUS(r1) => SEQ(der(c, r1), STAR(r1)) - case OPTIONAL(r1) => der(c, r1) - case NTIMES(r, i) => - if (i == 0) ZERO else SEQ(der(c, r), NTIMES(r, i - 1)) - case UPTO(r1, i) => - if (i == 0) ZERO else SEQ(der(c, r1), UPTO(r1, i - 1)) - case FROM(r1, i) => - if (i == 0) SEQ(der(c, r1), FROM(r1, 0)) else - SEQ(der(c, r1), FROM(r1, i - 1)) - case BETWEEN(r1, i, j) => - if (i == 0) { - if (j == 0) ZERO else SEQ(der(c, r1), BETWEEN(r1, 0, j - 1)) - } else SEQ(der(c, r1), BETWEEN(r1, i - 1, j - 1)) - case NOT(r1) => NOT(der(c, r1)) +//import $file.cw1 +//import cw1._ +import CW1._ - case CFUN(f) => if (f(c)) ONE else ZERO -} - - -def simp(r: Rexp) : Rexp = r match { - case ALT(r1, r2) => (simp(r1), simp(r2)) match { - case (ZERO, r2s) => r2s - case (r1s, ZERO) => r1s - case (r1s, r2s) => if (r1s == r2s) r1s else ALT (r1s, r2s) - } - case SEQ(r1, r2) => (simp(r1), simp(r2)) match { - case (ZERO, _) => ZERO - case (_, ZERO) => ZERO - case (ONE, r2s) => r2s - case (r1s, ONE) => r1s - case (r1s, r2s) => SEQ(r1s, r2s) - } - case r => r -} - -def ders(s: List[Char], r: Rexp) : Rexp = s match { - case Nil => r - case c::s => ders(s, simp(der(c, r))) -} - -def matcher(r: Rexp, s: String) : Boolean = nullable(ders(s.toList, r)) - - - +/* val Regex31 = NTIMES(CHAR('a'),3) val Regex32 = NTIMES(OPTIONAL(CHAR('a')),3) val Regex33 = UPTO(CHAR('a'),3) val Regex34 = UPTO(OPTIONAL(CHAR('a')),3) -val Regex35 = BETWEEN(CHAR('a'),3,5) -val Regex36 = BETWEEN(OPTIONAL(CHAR('a')),3,5) +val Regex35 = NMTIMES(CHAR('a'),3,5) +val Regex36 = NMTIMES(OPTIONAL(CHAR('a')),3,5) val string31 = "" val string32 = "a" val string33 = "aa" @@ -323,3 +226,4 @@ matcher(PLUS(PLUS(Q7r1)), Q7str7) matcher(PLUS(PLUS(Q7r2)), Q7str7) +*/ \ No newline at end of file diff -r 53e7da9f372a -r 53f08d873e09 solutions/cw2/lexer.sc --- a/solutions/cw2/lexer.sc Fri Sep 15 10:49:33 2023 +0100 +++ b/solutions/cw2/lexer.sc Sun Sep 17 19:12:57 2023 +0100 @@ -37,13 +37,14 @@ implicit def string2rexp(s : String) : Rexp = charlist2rexp(s.toList) -implicit def RexpOps(r: Rexp) = new { +extension (r: Rexp) { + def ~ (s: Rexp) = SEQ(r, s) + def % = STAR(r) def | (s: Rexp) = ALT(r, s) - def % = STAR(r) - def ~ (s: Rexp) = SEQ(r, s) } -implicit def stringOps(s: String) = new { + +extension (s: String) { def | (r: Rexp) = ALT(s, r) def | (r: String) = ALT(s, r) def % = STAR(s) diff -r 53e7da9f372a -r 53f08d873e09 solutions/cw3/lexer.sc --- a/solutions/cw3/lexer.sc Fri Sep 15 10:49:33 2023 +0100 +++ b/solutions/cw3/lexer.sc Sun Sep 17 19:12:57 2023 +0100 @@ -37,13 +37,13 @@ implicit def string2rexp(s : String) : Rexp = charlist2rexp(s.toList) -implicit def RexpOps(r: Rexp) = new { +extension (r: Rexp) { def | (s: Rexp) = ALT(r, s) def % = STAR(r) def ~ (s: Rexp) = SEQ(r, s) } -implicit def stringOps(s: String) = new { +extension (s: String) { def | (r: Rexp) = ALT(s, r) def | (r: String) = ALT(s, r) def % = STAR(s) @@ -82,7 +82,7 @@ case RECD(_, r1) => der(c, r1) case RANGE(s) => if (s.contains(c)) ONE else ZERO case PLUS(r1) => SEQ(der(c, r1), STAR(r1)) - case OPTIONAL(r1) => ALT(der(c, r1), ZERO) + case OPTIONAL(r1) => der(c, r1) case NTIMES(r, i) => if (i == 0) ZERO else SEQ(der(c, r), NTIMES(r, i - 1)) } @@ -119,7 +119,7 @@ case RECD(x, r) => Rec(x, mkeps(r)) case PLUS(r) => Stars(List(mkeps(r))) // the first copy must match the empty string - case OPTIONAL(r) => Right(Empty) + case OPTIONAL(r) => if (nullable(r)) Stars(List(mkeps(r))) else Stars(Nil) case NTIMES(r, i) => Stars(List.fill(i)(mkeps(r))) } @@ -136,7 +136,7 @@ case (RANGE(_), Empty) => Chr(c) case (PLUS(r), Sequ(v1, Stars(vs))) => Stars(inj(r, c, v1)::vs) - case (OPTIONAL(r), Left(v1)) => Left(inj(r, c, v1)) + case (OPTIONAL(r), Left(v1)) => Stars(List(inj(r, c, v1))) case (NTIMES(r, n), Sequ(v1, Stars(vs))) => Stars(inj(r, c, v1)::vs) } @@ -201,18 +201,18 @@ // Language specific code val KEYWORD : Rexp = "while" | "if" | "then" | "else" | "do" | "for" | "to" | "true" | "false" | "read" | "write" | "skip" val OP : Rexp = "+" | "-" | "*" | "%" | "/" | "==" | "!=" | ">" | "<" | ">=" | "<=" | ":=" | "&&" | "||" -val LET: Rexp = RANGE(('A' to 'Z').toSet ++ ('a' to 'z')) -val SYM : Rexp = (LET | RANGE(Set('.', '_', '>', '<', '=', ';', ',', ':'))) +val LET: Rexp = RANGE(('A' to 'Z').toSet ++ ('a' to 'z').toSet) +val SYM : Rexp = RANGE(Set('.', '_', '>', '<', '=', ';', ',', ':', ')', '(')) val PARENS : Rexp = "(" | "{" | ")" | "}" val SEMI : Rexp = ";" val WHITESPACE : Rexp = PLUS(" ") | "\n" | "\t" | "\r" val DIGIT : Rexp = RANGE(('0' to '9').toSet) val DIGIT1 : Rexp = RANGE(('1' to '9').toSet) -val STRING : Rexp = "\"" ~ (SYM | " " | "\\n" | DIGIT).% ~ "\"" +val STRING : Rexp = "\"" ~ (LET | SYM | DIGIT | " " | "\\n").% ~ "\"" val ID : Rexp = LET ~ (LET | "_" | DIGIT).% val NUM : Rexp = "0" | (DIGIT1 ~ DIGIT.%) val EOL : Rexp = "\n" | "\r\n" -val COMMENT : Rexp = "//" ~ (SYM | PARENS | " " | DIGIT).% ~ EOL +val COMMENT : Rexp = "//" ~ (LET | SYM | PARENS | " " | DIGIT).% ~ EOL val WHILE_REGS = (("k" $ KEYWORD) | ("o" $ OP) | @@ -224,7 +224,18 @@ ("n" $ NUM) | ("c" $ COMMENT)).% -// Token + + +def esc(raw: String): String = { + import scala.reflect.runtime.universe._ + Literal(Constant(raw)).toString +} + +def escape(tks: List[(String, String)]) = + tks.map{ case (s1, s2) => (s1, esc(s2))} + + +// Tokens abstract class Token extends Serializable case class T_KEYWORD(s: String) extends Token case class T_OP(s: String) extends Token @@ -247,21 +258,3 @@ // Tokenise def tokenise(s: String) : List[Token] = lexing_simp(WHILE_REGS, s).collect(token) - - -val fact = """ -write "Input n please"; -read n; -write "The factors of n are"; -f := 2; -while n != 1 do { - while (n / f) * f == n do { - write f; - n := n / f - }; - f := f + 1 -} -""" - -//println(tokenise(fact)) - diff -r 53e7da9f372a -r 53f08d873e09 solutions/cw3/parser.sc --- a/solutions/cw3/parser.sc Fri Sep 15 10:49:33 2023 +0100 +++ b/solutions/cw3/parser.sc Sun Sep 17 19:12:57 2023 +0100 @@ -4,32 +4,67 @@ import lexer._ -case class ~[+A, +B](_1: A, _2: B) -type IsSeq[A] = A => Seq[_] +case class ~[+A, +B](x: A, y: B) + +// parser combinators + +abstract class Parser[I, T](using is: I => Seq[_]) { + def parse(in: I): Set[(T, I)] + + def parse_all(in: I) : Set[T] = + for ((hd, tl) <- parse(in); + if is(tl).isEmpty) yield hd +} -abstract class Parser[I : IsSeq, T] { - def parse(ts: I): Set[(T, I)] +// alternative parser +class AltParser[I, T](p: => Parser[I, T], + q: => Parser[I, T])(using I => Seq[_]) extends Parser[I, T] { + def parse(in: I) = p.parse(in) ++ q.parse(in) +} - def parse_all(ts: I) : Set[T] = - for ((head, tail) <- parse(ts); if tail.isEmpty) yield head +// sequence parser +class SeqParser[I, T, S](p: => Parser[I, T], + q: => Parser[I, S])(using I => Seq[_]) extends Parser[I, ~[T, S]] { + def parse(in: I) = + for ((hd1, tl1) <- p.parse(in); + (hd2, tl2) <- q.parse(tl1)) yield (new ~(hd1, hd2), tl2) } -class SeqParser[I : IsSeq, T, S](p: => Parser[I, T], q: => Parser[I, S]) extends Parser[I, ~[T, S]] { - def parse(sb: I) = - for ((head1, tail1) <- p.parse(sb); - (head2, tail2) <- q.parse(tail1)) yield (new ~(head1, head2), tail2) +// map parser +class MapParser[I, T, S](p: => Parser[I, T], + f: T => S)(using I => Seq[_]) extends Parser[I, S] { + def parse(in: I) = for ((hd, tl) <- p.parse(in)) yield (f(hd), tl) } -class AltParser[I : IsSeq, T](p: => Parser[I, T], q: => Parser[I, T]) extends Parser[I, T] { - def parse(sb: I) = p.parse(sb) ++ q.parse(sb) + +/* +// atomic parser for (particular) strings +case class StrParser(s: String) extends Parser[String, String] { + def parse(sb: String) = { + val (prefix, suffix) = sb.splitAt(s.length) + if (prefix == s) Set((prefix, suffix)) else Set() + } } -class FunParser[I : IsSeq, T, S](p: => Parser[I, T], f: T => S) extends Parser[I, S] { - def parse(sb: I) = - for ((head, tail) <- p.parse(sb)) yield (f(head), tail) +extension (sc: StringContext) + def p(args: Any*) = StrParser(sc.s(args:_*)) +*/ + +// more convenient syntax for parser combinators +extension [I, T](p: Parser[I, T])(using I => Seq[_]) { + def ||(q : => Parser[I, T]) = new AltParser[I, T](p, q) + def ~[S] (q : => Parser[I, S]) = new SeqParser[I, T, S](p, q) + def map[S](f: => T => S) = new MapParser[I, T, S](p, f) } // New parser that takes as input a list of tokens +case class TokenParser(t: Token) extends Parser[List[Token], Token] { + def parse(in: List[Token]) = { + // an example of an atomic parser for characters + if (!in.isEmpty && in.head == t) Set((t, in.tail)) else Set() + } +} + case class TokenListParser(ts: List[Token]) extends Parser[List[Token], List[Token]] { def parse(tsb: List[Token]) = { val (prefix, suffix) = tsb.splitAt(ts.length) @@ -39,34 +74,16 @@ // Implicit definitions to go from a token // or a list of tokens to a TokenListParser -implicit def token2parser(t: Token) = TokenListParser(List(t)) -implicit def tokenList2parser(ts: List[Token]) = TokenListParser(ts) +implicit def token2parser(t: Token) : Parser[List[Token], Token] = + TokenParser(t) -implicit def ParserOps[I : IsSeq, T](p: Parser[I, T]) = new { - def || (q : => Parser[I, T]) = new AltParser[I, T](p, q) - def ==>[S] (f: => T => S) = new FunParser[I, T, S](p, f) - def ~[S] (q : => Parser[I, S]) = new SeqParser[I, T, S](p, q) +extension (t: Token) { + def || (q : => Parser[List[Token], Token]) = + new AltParser[List[Token], Token](t, q) + def ~[S](q : => Parser[List[Token], S]) = + new SeqParser[List[Token], Token, S](t, q) } -implicit def TokenOps(t: Token) = new { - def || (q : => Parser[List[Token], List[Token]]) = new AltParser[List[Token], List[Token]](List(t), q) - def || (qs : List[Token]) = new AltParser[List[Token], List[Token]](List(t), qs) - def ==>[S] (f: => List[Token] => S) = new FunParser[List[Token], List[Token], S](List(t), f) - def ~[S](q : => Parser[List[Token], S]) = - new SeqParser[List[Token], List[Token], S](List(t), q) - def ~ (qs : List[Token]) = - new SeqParser[List[Token], List[Token], List[Token]](List(t), qs) -} - -implicit def TokenListOps(ts: List[Token]) = new { - def || (q : => Parser[List[Token], List[Token]]) = new AltParser[List[Token], List[Token]](ts, q) - def || (qs : List[Token]) = new AltParser[List[Token], List[Token]](ts, qs) - def ==>[S] (f: => List[Token] => S) = new FunParser[List[Token], List[Token], S](ts, f) - def ~[S](q : => Parser[List[Token], S]) = - new SeqParser[List[Token], List[Token], S](ts, q) - def ~ (qs : List[Token]) = - new SeqParser[List[Token], List[Token], List[Token]](ts, qs) -} // Abstract Syntax Trees abstract class Stmt @@ -114,48 +131,49 @@ } } + // WHILE Language Parsing lazy val AExp: Parser[List[Token], AExp] = - (Te ~ T_OP("+") ~ AExp) ==> { case x ~ _ ~ z => Aop("+", x, z): AExp } || - (Te ~ T_OP("-") ~ AExp) ==> { case x ~ _ ~ z => Aop("-", x, z): AExp } || Te + (Te ~ T_OP("+") ~ AExp).map{ case x ~ _ ~ z => Aop("+", x, z): AExp } || + (Te ~ T_OP("-") ~ AExp).map{ case x ~ _ ~ z => Aop("-", x, z): AExp } || Te lazy val Te: Parser[List[Token], AExp] = - (Fa ~ T_OP("*") ~ Te) ==> { case x ~ _ ~ z => Aop("*", x, z): AExp } || - (Fa ~ T_OP("/") ~ Te) ==> { case x ~ _ ~ z => Aop("/", x, z): AExp } || - (Fa ~ T_OP("%") ~ Te) ==> { case x ~ _ ~ z => Aop("%", x, z): AExp } || Fa + (Fa ~ T_OP("*") ~ Te).map{ case x ~ _ ~ z => Aop("*", x, z): AExp } || + (Fa ~ T_OP("/") ~ Te).map{ case x ~ _ ~ z => Aop("/", x, z): AExp } || + (Fa ~ T_OP("%") ~ Te).map{ case x ~ _ ~ z => Aop("%", x, z): AExp } || Fa lazy val Fa: Parser[List[Token], AExp] = - (T_PAREN("(") ~ AExp ~ T_PAREN(")")) ==> { case _ ~ y ~ _ => y } || - IdParser() ==> Var || - NumParser() ==> Num + (T_PAREN("(") ~ AExp ~ T_PAREN(")")).map{ case _ ~ y ~ _ => y } || + IdParser().map{Var(_)} || + NumParser().map{Num(_)} lazy val BExp: Parser[List[Token], BExp] = - (AExp ~ T_OP("==") ~ AExp) ==> { case x ~ _ ~ z => Bop("==", x, z): BExp } || - (AExp ~ T_OP("!=") ~ AExp) ==> { case x ~ _ ~ z => Bop("!=", x, z): BExp } || - (AExp ~ T_OP("<") ~ AExp) ==> { case x ~ _ ~ z => Bop("<", x, z): BExp } || - (AExp ~ T_OP(">") ~ AExp) ==> { case x ~ _ ~ z => Bop(">", x, z): BExp } || - (T_PAREN("(") ~ BExp ~ T_PAREN(")") ~ T_OP("&&") ~ BExp) ==> { case _ ~ y ~ _ ~ _ ~ v => And(y, v): BExp } || - (T_PAREN("(") ~ BExp ~ T_PAREN(")") ~ T_OP("||") ~ BExp) ==> { case _ ~ y ~ _ ~ _ ~ v => Or(y, v): BExp } || - (T_KEYWORD("true") ==> (_ => True: BExp )) || - (T_KEYWORD("false") ==> (_ => False: BExp )) || - (T_PAREN("(") ~ BExp ~ T_PAREN(")")) ==> { case _ ~ x ~ _ => x } + (AExp ~ T_OP("==") ~ AExp).map{ case x ~ _ ~ z => Bop("==", x, z): BExp } || + (AExp ~ T_OP("!=") ~ AExp).map{ case x ~ _ ~ z => Bop("!=", x, z): BExp } || + (AExp ~ T_OP("<") ~ AExp).map{ case x ~ _ ~ z => Bop("<", x, z): BExp } || + (AExp ~ T_OP(">") ~ AExp).map{ case x ~ _ ~ z => Bop(">", x, z): BExp } || + (T_PAREN("(") ~ BExp ~ T_PAREN(")") ~ T_OP("&&") ~ BExp).map{ case _ ~ y ~ _ ~ _ ~ v => And(y, v): BExp } || + (T_PAREN("(") ~ BExp ~ T_PAREN(")") ~ T_OP("||") ~ BExp).map{ case _ ~ y ~ _ ~ _ ~ v => Or(y, v): BExp } || + (T_KEYWORD("true").map(_ => True: BExp )) || + (T_KEYWORD("false").map(_ => False: BExp )) || + (T_PAREN("(") ~ BExp ~ T_PAREN(")")).map{ case _ ~ x ~ _ => x } lazy val Stmt: Parser[List[Token], Stmt] = - T_KEYWORD("skip") ==> (_ => Skip: Stmt) || - (IdParser() ~ T_OP(":=") ~ AExp) ==> { case id ~ _ ~ z => Assign(id, z): Stmt } || - (T_KEYWORD("if") ~ BExp ~ T_KEYWORD("then") ~ Block ~ T_KEYWORD("else") ~ Block) ==> { case _ ~ y ~ _ ~ u ~ _ ~ w => If(y, u, w): Stmt } || - (T_KEYWORD("while") ~ BExp ~ T_KEYWORD("do") ~ Block) ==> { case _ ~ y ~ _ ~ w => While(y, w) : Stmt } || - (T_KEYWORD("read") ~ IdParser()) ==> { case _ ~ id => Read(id): Stmt} || - (T_KEYWORD("write") ~ IdParser()) ==> { case _ ~ id => WriteId(id): Stmt} || - (T_KEYWORD("write") ~ StringParser()) ==> { case _ ~ s => WriteString(s): Stmt} || - (T_KEYWORD("write") ~ T_PAREN("(") ~ IdParser() ~ T_PAREN(")")) ==> { case _ ~ _ ~ id ~ _ => WriteId(id): Stmt} || - (T_KEYWORD("write") ~ T_PAREN("(") ~ StringParser() ~ T_PAREN(")")) ==> { case _ ~ _ ~ s ~ _ => WriteString(s): Stmt} + T_KEYWORD("skip").map(_ => Skip: Stmt) || + (IdParser() ~ T_OP(":=") ~ AExp).map{ case id ~ _ ~ z => Assign(id, z): Stmt } || + (T_KEYWORD("if") ~ BExp ~ T_KEYWORD("then") ~ Block ~ T_KEYWORD("else") ~ Block).map{ case _ ~ y ~ _ ~ u ~ _ ~ w => If(y, u, w): Stmt } || + (T_KEYWORD("while") ~ BExp ~ T_KEYWORD("do") ~ Block).map{ case _ ~ y ~ _ ~ w => While(y, w) : Stmt } || + (T_KEYWORD("read") ~ IdParser()).map{ case _ ~ id => Read(id): Stmt} || + (T_KEYWORD("write") ~ IdParser()).map{ case _ ~ id => WriteId(id): Stmt} || + (T_KEYWORD("write") ~ StringParser()).map{ case _ ~ s => WriteString(s): Stmt} || + (T_KEYWORD("write") ~ T_PAREN("(") ~ IdParser() ~ T_PAREN(")")).map{ case _ ~ _ ~ id ~ _ => WriteId(id): Stmt} || + (T_KEYWORD("write") ~ T_PAREN("(") ~ StringParser() ~ T_PAREN(")")).map{ case _ ~ _ ~ s ~ _ => WriteString(s): Stmt} lazy val Stmts: Parser[List[Token], Block] = - (Stmt ~ T_SEMI ~ Stmts) ==> { case x ~ _ ~ z => x :: z : Block } || - (Stmt ==> (s => List(s) : Block)) + (Stmt ~ T_SEMI ~ Stmts).map{ case x ~ _ ~ z => x :: z : Block } || + (Stmt.map(s => List(s) : Block)) lazy val Block: Parser[List[Token], Block] = - (T_PAREN("{") ~ Stmts ~ T_PAREN("}")) ==> { case x ~ y ~ z => y} || - (Stmt ==> (s => List(s))) + (T_PAREN("{") ~ Stmts ~ T_PAREN("}")).map{ case x ~ y ~ z => y} || + (Stmt.map(s => List(s))) // Testing with programs 2 & 3 diff -r 53e7da9f372a -r 53f08d873e09 solutions/cw3/parser2.sc --- a/solutions/cw3/parser2.sc Fri Sep 15 10:49:33 2023 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,255 +0,0 @@ -// CW3 - -import $file.lexer -import lexer._ - - -case class ~[+A, +B](_1: A, _2: B) -type IsSeq[A] = A => Seq[_] - -abstract class Parser[I : IsSeq, T] { - def parse(ts: I): Set[(T, I)] - - def parse_all(ts: I) : Set[T] = - for ((head, tail) <- parse(ts); if tail.isEmpty) yield head -} - -class SeqParser[I : IsSeq, T, S](p: => Parser[I, T], q: => Parser[I, S]) extends Parser[I, ~[T, S]] { - def parse(sb: I) = - for ((head1, tail1) <- p.parse(sb); - (head2, tail2) <- q.parse(tail1)) yield (new ~(head1, head2), tail2) -} - -class AltParser[I : IsSeq, T](p: => Parser[I, T], q: => Parser[I, T]) extends Parser[I, T] { - def parse(sb: I) = p.parse(sb) ++ q.parse(sb) -} - -class FunParser[I : IsSeq, T, S](p: => Parser[I, T], f: T => S) extends Parser[I, S] { - def parse(sb: I) = - for ((head, tail) <- p.parse(sb)) yield (f(head), tail) -} - -// New parser that takes as input a list of tokens -case class TokenListParser(ts: List[Token]) extends Parser[List[Token], List[Token]] { - def parse(tsb: List[Token]) = { - val (prefix, suffix) = tsb.splitAt(ts.length) - if (prefix == ts) Set((prefix, suffix)) else Set() - } -} - -// Implicit definitions to go from a token -// or a list of tokens to a TokenListParser -implicit def token2parser(t: Token) = TokenListParser(List(t)) -implicit def tokenList2parser(ts: List[Token]) = TokenListParser(ts) - -implicit def ParserOps[I : IsSeq, T](p: Parser[I, T]) = new { - def || (q : => Parser[I, T]) = new AltParser[I, T](p, q) - def ==>[S] (f: => T => S) = new FunParser[I, T, S](p, f) - def ~[S] (q : => Parser[I, S]) = new SeqParser[I, T, S](p, q) -} - -implicit def TokenOps(t: Token) = new { - def || (q : => Parser[List[Token], List[Token]]) = new AltParser[List[Token], List[Token]](List(t), q) - def || (qs : List[Token]) = new AltParser[List[Token], List[Token]](List(t), qs) - def ==>[S] (f: => List[Token] => S) = new FunParser[List[Token], List[Token], S](List(t), f) - def ~[S](q : => Parser[List[Token], S]) = - new SeqParser[List[Token], List[Token], S](List(t), q) - def ~ (qs : List[Token]) = - new SeqParser[List[Token], List[Token], List[Token]](List(t), qs) -} - -implicit def TokenListOps(ts: List[Token]) = new { - def || (q : => Parser[List[Token], List[Token]]) = new AltParser[List[Token], List[Token]](ts, q) - def || (qs : List[Token]) = new AltParser[List[Token], List[Token]](ts, qs) - def ==>[S] (f: => List[Token] => S) = new FunParser[List[Token], List[Token], S](ts, f) - def ~[S](q : => Parser[List[Token], S]) = - new SeqParser[List[Token], List[Token], S](ts, q) - def ~ (qs : List[Token]) = - new SeqParser[List[Token], List[Token], List[Token]](ts, qs) -} - -// Abstract Syntax Trees -abstract class Stmt -abstract class AExp -abstract class BExp - -type Block = List[Stmt] - -case object Skip extends Stmt -case class If(a: BExp, bl1: Block, bl2: Block) extends Stmt -case class While(b: BExp, bl: Block) extends Stmt -case class Assign(s: String, a: AExp) extends Stmt -case class Read(s: String) extends Stmt -case class WriteId(s: String) extends Stmt // for printing values of variables -case class WriteString(s: String) extends Stmt // for printing words - -case class Var(s: String) extends AExp -case class Num(i: Int) extends AExp -case class Aop(o: String, a1: AExp, a2: AExp) extends AExp - -case object True extends BExp -case object False extends BExp -case class Bop(o: String, a1: AExp, a2: AExp) extends BExp -case class And(b1: BExp, b2: BExp) extends BExp -case class Or(b1: BExp, b2: BExp) extends BExp - -case class IdParser() extends Parser[List[Token], String] { - def parse(tsb: List[Token]) = tsb match { - case T_ID(id) :: rest => Set((id, rest)) - case _ => Set() - } -} - -case class NumParser() extends Parser[List[Token], Int] { - def parse(tsb: List[Token]) = tsb match { - case T_NUM(n) :: rest => Set((n, rest)) - case _ => Set() - } -} - -case class StringParser() extends Parser[List[Token], String] { - def parse(tsb: List[Token]) = tsb match { - case T_STRING(s) :: rest => Set((s, rest)) - case _ => Set() - } -} - -case class TokParser(s: String) extends Parser[List[Token], String] { - def parse(tsb: List[Token]) = tsb match { - case T_OP(o) :: rest if s == o => Set((o, rest)) - case T_KWD(k) :: rest if s == k => Set((k, rest)) - case _ => Set() - } -} - -implicit def parser_interpolation(sc: StringContext) = new { - def p(args: Any*) = TokParser(sc.s(args:_*)) -} - - -// WHILE Language Parsing -lazy val AExp: Parser[List[Token], AExp] = - (Te ~ T_OP("+") ~ AExp) ==> { case x ~ _ ~ z => Aop("+", x, z): AExp } || - (Te ~ T_OP("-") ~ AExp) ==> { case x ~ _ ~ z => Aop("-", x, z): AExp } || Te -lazy val Te: Parser[List[Token], AExp] = - (Fa ~ T_OP("*") ~ Te) ==> { case x ~ _ ~ z => Aop("*", x, z): AExp } || - (Fa ~ T_OP("/") ~ Te) ==> { case x ~ _ ~ z => Aop("/", x, z): AExp } || - (Fa ~ T_OP("%") ~ Te) ==> { case x ~ _ ~ z => Aop("%", x, z): AExp } || Fa -lazy val Fa: Parser[List[Token], AExp] = - (T_PAREN("(") ~ AExp ~ T_PAREN(")")) ==> { case _ ~ y ~ _ => y } || - IdParser() ==> Var || - NumParser() ==> Num - -lazy val BExp: Parser[List[Token], BExp] = - (AExp ~ T_OP("==") ~ AExp) ==> { case x ~ _ ~ z => Bop("==", x, z): BExp } || - (AExp ~ T_OP("!=") ~ AExp) ==> { case x ~ _ ~ z => Bop("!=", x, z): BExp } || - (AExp ~ T_OP("<") ~ AExp) ==> { case x ~ _ ~ z => Bop("<", x, z): BExp } || - (AExp ~ T_OP(">") ~ AExp) ==> { case x ~ _ ~ z => Bop(">", x, z): BExp } || - (T_PAREN("(") ~ BExp ~ List(T_PAREN(")"), T_OP("&&")) ~ BExp) ==> { case _ ~ y ~ _ ~ v => And(y, v): BExp } || - (T_PAREN("(") ~ BExp ~ List(T_PAREN(")"), T_OP("||")) ~ BExp) ==> { case _ ~ y ~ _ ~ v => Or(y, v): BExp } || - (T_KEYWORD("true") ==> (_ => True: BExp )) || - (T_KEYWORD("false") ==> (_ => False: BExp )) || - (T_PAREN("(") ~ BExp ~ T_PAREN(")")) ==> { case _ ~ x ~ _ => x } - -lazy val Stmt: Parser[List[Token], Stmt] = - T_KEYWORD("skip") ==> (_ => Skip: Stmt) || - (IdParser() ~ T_OP(":=") ~ AExp) ==> { case id ~ _ ~ z => Assign(id, z): Stmt } || - (T_KEYWORD("if") ~ BExp ~ T_KEYWORD("then") ~ Block ~ T_KEYWORD("else") ~ Block) ==> { case _ ~ y ~ _ ~ u ~ _ ~ w => If(y, u, w): Stmt } || - (T_KEYWORD("while") ~ BExp ~ T_KEYWORD("do") ~ Block) ==> { case _ ~ y ~ _ ~ w => While(y, w) : Stmt } || - (T_KEYWORD("read") ~ IdParser()) ==> { case _ ~ id => Read(id): Stmt} || - (T_KEYWORD("write") ~ IdParser()) ==> { case _ ~ id => WriteId(id): Stmt} || - (T_KEYWORD("write") ~ StringParser()) ==> { case _ ~ s => WriteString(s): Stmt} - -lazy val Stmts: Parser[List[Token], Block] = - (Stmt ~ T_SEMI ~ Stmts) ==> { case x ~ _ ~ z => x :: z : Block } || - (Stmt ==> (s => List(s) : Block)) - -lazy val Block: Parser[List[Token], Block] = - (T_PAREN("{") ~ Stmts ~ T_PAREN("}")) ==> { case x ~ y ~ z => y} || - (Stmt ==> (s => List(s))) - -// Testing with programs 2 & 3 - -println("Fibonacci") -println(Stmts.parse_all(tokenise(os.read(os.pwd / "fib.while")))) - -println("Loops") -println(Stmts.parse_all(tokenise(os.read(os.pwd / "loops.while")))) - -println("Collatz") -println(Stmts.parse_all(tokenise(os.read(os.pwd / "collatz2.while")))) - - -// Interpreter - -// Environment to store values of variables -type Env = Map[String, Int] - -def eval_aexp(a: AExp, env: Env) : Int = a match { - case Num(i) => i - case Var(s) => env(s) - case Aop("+", a1, a2) => eval_aexp(a1, env) + eval_aexp(a2, env) - case Aop("-", a1, a2) => eval_aexp(a1, env) - eval_aexp(a2, env) - case Aop("*", a1, a2) => eval_aexp(a1, env) * eval_aexp(a2, env) - case Aop("/", a1, a2) => eval_aexp(a1, env) / eval_aexp(a2, env) - case Aop("%", a1, a2) => eval_aexp(a1, env) % eval_aexp(a2, env) -} - -def eval_bexp(b: BExp, env: Env) : Boolean = b match { - case True => true - case False => false - case Bop("==", a1, a2) => eval_aexp(a1, env) == eval_aexp(a2, env) - case Bop("!=", a1, a2) => !(eval_aexp(a1, env) == eval_aexp(a2, env)) - case Bop(">", a1, a2) => eval_aexp(a1, env) > eval_aexp(a2, env) - case Bop("<", a1, a2) => eval_aexp(a1, env) < eval_aexp(a2, env) - case And(b1, b2) => eval_bexp(b1, env) && eval_bexp(b2, env) - case Or(b1, b2) => eval_bexp(b1, env) || eval_bexp(b2, env) -} - -// Import needed to take int as input from the user -import scala.io.StdIn.readInt - -def eval_stmt(s: Stmt, env: Env) : Env = s match { - case Skip => env - case Assign(x, a) => env + (x -> eval_aexp(a, env)) - case If(b, bl1, bl2) => if (eval_bexp(b, env)) eval_bl(bl1, env) else eval_bl(bl2, env) - case While(b, bl) => - if (eval_bexp(b, env)) eval_stmt(While(b, bl), eval_bl(bl, env)) - else env - - case WriteId(x) => { print(env(x)) ; env } - case WriteString(x) => { - print(x.replaceAll("\"", "").replaceAll("""\\n""", "\n")) ; - env - } - - case Read(x) => { - println("Enter an integer and press ENTER:") ; - val n = readInt() ; // Note: Does not work when using the REPL - eval_stmt(Assign(x, Num(n)), env) - } -} - -def eval_bl(bl: Block, env: Env) : Env = bl match { - case Nil => env - case s::bl => eval_bl(bl, eval_stmt(s, env)) -} - -def eval(bl: Block) : Env = eval_bl(bl, Map()) - -println("Factors eval") -println(eval(Stmts.parse_all(tokenise(os.read(os.pwd / "factors.while"))).head)) - -println("Primes eval") -println(eval(Stmts.parse_all(tokenise(os.read(os.pwd / "primes.while"))).head)) - - -println("Collatz2 eval") -println(eval(Stmts.parse_all(tokenise(os.read(os.pwd / "collatz2.while"))).head)) - -println("Loops eval") -val start = System.nanoTime() -println(eval(Stmts.parse_all(tokenise(os.read(os.pwd / "loops.while"))).head)) -val end = System.nanoTime() -println("Time taken in seconds: ") -println((end - start)/(1.0e9))