--- a/progs/parser-combinators/c.sc Sun Nov 15 00:33:16 2020 +0000
+++ b/progs/parser-combinators/c.sc Mon Nov 16 01:49:24 2020 +0000
@@ -1,249 +1,229 @@
-// A parser and interpreter for the While language
-//
+// Parser Combinators: Simple Version
+//====================================
+//
+// Call with
+//
+// amm comb1.sc
-import scala.language.implicitConversions
-import scala.language.reflectiveCalls
-
-
-// more convenience for the semantic actions later on
-case class ~[+A, +B](_1: A, _2: B)
+
+// Note, in the lectures I did not show the implicit type constraint
+// I : IsSeq, which means that the input type 'I' needs to be
+// a sequence.
type IsSeq[A] = A => Seq[_]
-abstract class Parser[I : IsSeq, T] {
- def parse(ts: I): Set[(T, I)]
-
- def parse_all(ts: I) : Set[T] =
- for ((head, tail) <- parse(ts); if tail.isEmpty) yield head
-}
+abstract class Parser[I : IsSeq, T]{
+ def parse(in: I): Set[(T, I)]
-class SeqParser[I : IsSeq, T, S](p: => Parser[I, T], q: => Parser[I, S]) extends Parser[I, ~[T, S]] {
- def parse(sb: I) =
- for ((head1, tail1) <- p.parse(sb);
- (head2, tail2) <- q.parse(tail1)) yield (new ~(head1, head2), tail2)
-}
-
-class AltParser[I : IsSeq, T](p: => Parser[I, T], q: => Parser[I, T]) extends Parser[I, T] {
- def parse(sb: I) = p.parse(sb) ++ q.parse(sb)
+ def parse_all(in: I) : Set[T] =
+ for ((hd, tl) <- parse(in);
+ if tl.isEmpty) yield hd
}
-class MapParser[I : IsSeq, T, S](p: => Parser[I, T], f: T => S) extends Parser[I, S] {
- def parse(sb: I) =
- for ((head, tail) <- p.parse(sb)) yield (f(head), tail)
+// parser combinators
+
+// alternative parser
+class AltParser[I : IsSeq, T](p: => Parser[I, T],
+ q: => Parser[I, T]) extends Parser[I, T] {
+ def parse(in: I) = p.parse(in) ++ q.parse(in)
}
-case class StrParser(s: String) extends Parser[String, String] {
- def parse(sb: String) = {
- val (prefix, suffix) = sb.splitAt(s.length)
- if (prefix == s) Set((prefix, suffix)) else Set()
- }
+// sequence parser
+class SeqParser[I : IsSeq, T, S](p: => Parser[I, T],
+ q: => Parser[I, S]) extends Parser[I, (T, S)] {
+ def parse(in: I) =
+ for ((hd1, tl1) <- p.parse(in);
+ (hd2, tl2) <- q.parse(tl1)) yield ((hd1, hd2), tl2)
}
-case object NumParser extends Parser[String, Int] {
- val reg = "[0-9]+".r
- def parse(sb: String) = reg.findPrefixOf(sb) match {
- case None => Set()
- case Some(s) => {
- val (head, tail) = sb.splitAt(s.length)
- Set((head.toInt, tail))
- }
- }
+// map parser
+class MapParser[I : IsSeq, T, S](p: => Parser[I, T],
+ f: T => S) extends Parser[I, S] {
+ def parse(in: I) = for ((hd, tl) <- p.parse(in)) yield (f(hd), tl)
}
-implicit def parser_interpolation(sc: StringContext) = new {
- def p(args: Any*) = StrParser(sc.s(args:_*))
+
+// an example of an atomic parser for characters
+case class CharParser(c: Char) extends Parser[String, Char] {
+ def parse(in: String) =
+ if (in != "" && in.head == c) Set((c, in.tail)) else Set()
}
-// this string interpolation allows us to write
-// things like the following for a StrParser
-//
-// p"<_some_string_>"
-//
-// instead of StrParser(<_some_string_>)
-
-
-implicit def ParserOps[I : IsSeq, T](p: Parser[I, T]) = new {
- def ||(q : => Parser[I, T]) = new AltParser[I, T](p, q)
- def ~[S](q : => Parser[I, S]) = new SeqParser[I, T, S](p, q)
- def map[S](f: => T => S) = new MapParser[I, T, S](p, f)
-}
-
-// these implicits allow us to use infic notation for
-// sequences and alternatives; we also can write map
-// for a parser
-
-// the abstract syntax trees for the WHILE language
-abstract class Stmt
-abstract class AExp
-abstract class BExp
-
-type Block = List[Stmt]
-
-case object Skip extends Stmt
-case class If(a: BExp, bl1: Block, bl2: Block) extends Stmt
-case class While(b: BExp, bl: Block) extends Stmt
-case class Assign(s: String, a: AExp) extends Stmt
-case class Write(s: String) extends Stmt
-
+// an atomic parser for parsing strings according to a regex
+import scala.util.matching.Regex
-case class Var(s: String) extends AExp
-case class Num(i: Int) extends AExp
-case class Aop(o: String, a1: AExp, a2: AExp) extends AExp
-
-case object True extends BExp
-case object False extends BExp
-case class Bop(o: String, a1: AExp, a2: AExp) extends BExp
-case class And(b1: BExp, b2: BExp) extends BExp
-case class Or(b1: BExp, b2: BExp) extends BExp
-
-case object IdParser extends Parser[String, String] {
- val reg = "[a-z][a-z,0-9]*".r
- def parse(sb: String) = reg.findPrefixOf(sb) match {
+case class RegexParser(reg: Regex) extends Parser[String, String] {
+ def parse(in: String) = reg.findPrefixMatchOf(in) match {
case None => Set()
- case Some(s) => Set(sb.splitAt(s.length))
+ case Some(m) => Set((m.matched, m.after.toString))
}
}
-// arithmetic expressions
-lazy val AExp: Parser[String, AExp] =
- (Te ~ p"+" ~ AExp).map[AExp]{ case x ~ _ ~ z => Aop("+", x, z) } ||
- (Te ~ p"-" ~ AExp).map[AExp]{ case x ~ _ ~ z => Aop("-", x, z) } || Te
-lazy val Te: Parser[String, AExp] =
- (Fa ~ p"*" ~ Te).map[AExp]{ case x ~ _ ~ z => Aop("*", x, z) } ||
- (Fa ~ p"/" ~ Te).map[AExp]{ case x ~ _ ~ z => Aop("/", x, z) } || Fa
-lazy val Fa: Parser[String, AExp] =
- (p"(" ~ AExp ~ p")").map{ case _ ~ y ~ _ => y } ||
- IdParser.map(Var) ||
- NumParser.map(Num)
+// atomic parsers for numbers and "verbatim" strings
+val NumParser = RegexParser("[0-9]+".r)
+def StrParser(s: String) = RegexParser(Regex.quote(s).r)
+
-// boolean expressions with some simple nesting
-lazy val BExp: Parser[String, BExp] =
- (AExp ~ p"==" ~ AExp).map[BExp]{ case x ~ _ ~ z => Bop("==", x, z) } ||
- (AExp ~ p"!=" ~ AExp).map[BExp]{ case x ~ _ ~ z => Bop("!=", x, z) } ||
- (AExp ~ p"<" ~ AExp).map[BExp]{ case x ~ _ ~ z => Bop("<", x, z) } ||
- (AExp ~ p">" ~ AExp).map[BExp]{ case x ~ _ ~ z => Bop(">", x, z) } ||
- (p"(" ~ BExp ~ p")" ~ p"&&" ~ BExp).map[BExp]{ case _ ~ y ~ _ ~ _ ~ v => And(y, v) } ||
- (p"(" ~ BExp ~ p")" ~ p"||" ~ BExp).map[BExp]{ case _ ~ y ~ _ ~ _ ~ v => Or(y, v) } ||
- (p"true".map[BExp]{ _ => True }) ||
- (p"false".map[BExp]{ _ => False }) ||
- (p"(" ~ BExp ~ p")").map[BExp]{ case _ ~ x ~ _ => x }
-// statement / statements
-lazy val Stmt: Parser[String, Stmt] =
- ((p"skip".map[Stmt]{_ => Skip }) ||
- (IdParser ~ p":=" ~ AExp).map[Stmt]{ case x ~ _ ~ z => Assign(x, z) } ||
- (p"write(" ~ IdParser ~ p")").map[Stmt]{ case _ ~ y ~ _ => Write(y) } ||
- (p"if" ~ BExp ~ p"then" ~ Block ~ p"else" ~ Block)
- .map[Stmt]{ case _ ~ y ~ _ ~ u ~ _ ~ w => If(y, u, w) } ||
- (p"while" ~ BExp ~ p"do" ~ Block).map[Stmt]{ case _ ~ y ~ _ ~ w => While(y, w) })
-
-lazy val Stmts: Parser[String, Block] =
- (Stmt ~ p";" ~ Stmts).map[Block]{ case x ~ _ ~ z => x :: z } ||
- (Stmt.map[Block]{ s => List(s) })
+// NumParserInt transforms a "string integer" into a propper Int
+// (needs "new" because MapParser is not a case class)
-// blocks (enclosed in curly braces)
-lazy val Block: Parser[String, Block] =
- ((p"{" ~ Stmts ~ p"}").map{ case _ ~ y ~ _ => y } ||
- (Stmt.map(s => List(s))))
+val NumParserInt = new MapParser(NumParser, (s: String) => s.toInt)
-Stmts.parse_all("x2:=5+3;")
-Block.parse_all("{x:=5;y:=8}")
-Block.parse_all("if(false)then{x:=5}else{x:=10}")
+// the following string interpolation allows us to write
+// StrParser(_some_string_) more conveniently as
+//
+// p"<_some_string_>"
+
+implicit def parser_interpolation(sc: StringContext) = new {
+ def p(args: Any*) = StrParser(sc.s(args:_*))
+}
+
-val fib = """n := 10;
- minus1 := 0;
- minus2 := 1;
- temp := 0;
- while (n > 0) do {
- temp := minus2;
- minus2 := minus1 + minus2;
- minus1 := temp;
- n := n - 1
- };
- result := minus2""".replaceAll("\\s+", "")
+// more convenient syntax for parser combinators
+implicit def ParserOps[I : IsSeq, T](p: Parser[I, T]) = new {
+ def ||(q : => Parser[I, T]) = new AltParser[I, T](p, q)
+ def ~[S] (q : => Parser[I, S]) = new SeqParser[I, T, S](p, q)
+ def map[S](f: => T => S) = new MapParser[I, T, S](p, f)
+}
-Stmts.parse_all(fib)
+// these implicits allow us to use an infix notation for
+// sequences and alternatives; we also can write the usual
+// map for a MapParser
+
+
+// with this NumParserInt can now be written more conveniently
+// as:
+
+val NumParserInt2 = NumParser.map(_.toInt)
-// an interpreter for the WHILE language
-type Env = Map[String, Int]
+// A parser for palindromes (just returns them as string)
+lazy val Pal : Parser[String, String] = {
+ (p"a" ~ Pal ~ p"a").map{ case ((x, y), z) => s"$x$y$z" } ||
+ (p"b" ~ Pal ~ p"b").map{ case ((x, y), z) => s"$x$y$z" } ||
+ p"a" || p"b" || p""
+}
+
+// examples
+Pal.parse_all("abaaaba")
+Pal.parse("abaaaba")
+
+println("Palindrome: " + Pal.parse_all("abaaaba"))
-def eval_aexp(a: AExp, env: Env) : Int = a match {
- case Num(i) => i
- case Var(s) => env(s)
- case Aop("+", a1, a2) => eval_aexp(a1, env) + eval_aexp(a2, env)
- case Aop("-", a1, a2) => eval_aexp(a1, env) - eval_aexp(a2, env)
- case Aop("*", a1, a2) => eval_aexp(a1, env) * eval_aexp(a2, env)
- case Aop("/", a1, a2) => eval_aexp(a1, env) / eval_aexp(a2, env)
-}
+// A parser for wellnested parentheses
+//
+// P ::= ( P ) P | epsilon
+//
+// (transforms '(' -> '{' , ')' -> '}' )
+lazy val P : Parser[String, String] = {
+ (p"(" ~ P ~ p")" ~ P).map{ case (((_, x), _), y) => "{" + x + "}" + y } ||
+ p""
+}
+
+println(P.parse_all("(((()()))())"))
+println(P.parse_all("(((()()))()))"))
+println(P.parse_all(")("))
+println(P.parse_all("()"))
+
+// A parser for arithmetic expressions (Terms and Factors)
-def eval_bexp(b: BExp, env: Env) : Boolean = b match {
- case True => true
- case False => false
- case Bop("==", a1, a2) => eval_aexp(a1, env) == eval_aexp(a2, env)
- case Bop("!=", a1, a2) => !(eval_aexp(a1, env) == eval_aexp(a2, env))
- case Bop(">", a1, a2) => eval_aexp(a1, env) > eval_aexp(a2, env)
- case Bop("<", a1, a2) => eval_aexp(a1, env) < eval_aexp(a2, env)
- case And(b1, b2) => eval_bexp(b1, env) && eval_bexp(b2, env)
- case Or(b1, b2) => eval_bexp(b1, env) || eval_bexp(b2, env)
-}
+lazy val E: Parser[String, Int] = {
+ (T ~ p"+" ~ E).map{ case ((x, _), z) => x + z } ||
+ (T ~ p"-" ~ E).map{ case ((x, _), z) => x - z } || T }
+lazy val T: Parser[String, Int] = {
+ (F ~ p"*" ~ T).map{ case ((x, _), z) => x * z } || F }
+lazy val F: Parser[String, Int] = {
+ (p"(" ~ E ~ p")").map{ case ((_, y), _) => y } || NumParserInt }
-def eval_stmt(s: Stmt, env: Env) : Env = s match {
- case Skip => env
- case Assign(x, a) => env + (x -> eval_aexp(a, env))
- case If(b, bl1, bl2) => if (eval_bexp(b, env)) eval_bl(bl1, env) else eval_bl(bl2, env)
- case While(b, bl) =>
- if (eval_bexp(b, env)) eval_stmt(While(b, bl), eval_bl(bl, env))
- else env
- case Write(x) => { println(env(x)) ; env }
-}
+println(E.parse_all("1+3+4"))
+println(E.parse("1+3+4"))
+println(E.parse_all("4*2+3"))
+println(E.parse_all("4*(2+3)"))
+println(E.parse_all("(4)*((2+3))"))
+println(E.parse_all("4/2+3"))
+println(E.parse("1 + 2 * 3"))
+println(E.parse_all("(1+2)+3"))
+println(E.parse_all("1+2+3"))
+
-def eval_bl(bl: Block, env: Env) : Env = bl match {
- case Nil => env
- case s::bl => eval_bl(bl, eval_stmt(s, env))
-}
+// with parser combinators (and other parsing algorithms)
+// no left-recursion is allowed, otherwise the will loop
-def eval(bl: Block) : Env = eval_bl(bl, Map())
+lazy val EL: Parser[String, Int] =
+ ((EL ~ p"+" ~ EL).map{ case ((x, y), z) => x + z} ||
+ (EL ~ p"*" ~ EL).map{ case ((x, y), z) => x * z} ||
+ (p"(" ~ EL ~ p")").map{ case ((x, y), z) => y} ||
+ NumParserInt)
-// parse + evaluate fib program; then lookup what is
-// stored under the variable result
-println(eval(Stmts.parse_all(fib).head)("result"))
+// this will run forever:
+//println(EL.parse_all("1+2+3"))
-// more examles
+// non-ambiguous vs ambiguous grammars
+
+// ambiguous
+lazy val S : Parser[String, String] =
+ (p"1" ~ S ~ S).map{ case ((x, y), z) => x + y + z } || p""
-// calculate and print all factors bigger
-// than 1 and smaller than n
-println("Factors")
+//println(time(S.parse("1" * 10)))
+//println(time(S.parse_all("1" * 10)))
+
+// non-ambiguous
+lazy val U : Parser[String, String] =
+ (p"1" ~ U).map{ case (x, y) => x + y } || p""
-val factors =
- """n := 12;
- f := 2;
- while (f < n / 2 + 1) do {
- if ((n / f) * f == n) then { write(f) } else { skip };
- f := f + 1
- }""".replaceAll("\\s+", "")
+//println(time(U.parse("1" * 10)))
+//println(time(U.parse_all("1" * 10)))
+println(U.parse("1" * 25))
+
+U.parse("11")
+U.parse("11111")
+U.parse("11011")
-eval(Stmts.parse_all(factors).head)
+U.parse_all("1" * 100)
+U.parse_all("1" * 100 + "0")
+
+// you can see the difference in second example
+//S.parse_all("1" * 100) // succeeds
+//S.parse_all("1" * 100 + "0") // fails
+
-// calculate all prime numbers up to a number
-println("Primes")
+// A variant which counts how many 1s are parsed
+lazy val UCount : Parser[String, Int] =
+ (p"1" ~ UCount).map{ case (_, y) => y + 1 } || p"".map{ _ => 0 }
+
+println(UCount.parse("11111"))
+println(UCount.parse_all("11111"))
+
+// Two single character parsers
+lazy val One : Parser[String, String] = p"a"
+lazy val Two : Parser[String, String] = p"b"
+
+One.parse("a")
+One.parse("aaa")
-val primes =
- """end := 100;
- n := 2;
- while (n < end) do {
- f := 2;
- tmp := 0;
- while ((f < n / 2 + 1) && (tmp == 0)) do {
- if ((n / f) * f == n) then { tmp := 1 } else { skip };
- f := f + 1
- };
- if (tmp == 0) then { write(n) } else { skip };
- n := n + 1
- }""".replaceAll("\\s+", "")
+// note how the pairs nest to the left with sequence parsers
+(One ~ One).parse("aaa")
+(One ~ One ~ One).parse("aaa")
+(One ~ One ~ One ~ One).parse("aaaa")
+
+(One || Two).parse("aaa")
+
+
-eval(Stmts.parse_all(primes).head)
+// a problem with the arithmetic expression parser: it
+// gets very slow with deeply nested parentheses
+
+println("Runtime problem")
+println(E.parse("1"))
+println(E.parse("(1)"))
+println(E.parse("((1))"))
+//println(E.parse("(((1)))"))
+//println(E.parse("((((1))))"))
+//println(E.parse("((((((1))))))"))
+//println(E.parse("(((((((1)))))))"))
+//println(E.parse("((((((((1)))))))"))