progs/fun/fun_parser.sc
changeset 955 47acfd7f9096
parent 870 739039774cee
child 961 c0600f8b6427
--- a/progs/fun/fun_parser.sc	Sat Nov 11 10:08:33 2023 +0000
+++ b/progs/fun/fun_parser.sc	Fri Nov 17 20:06:43 2023 +0000
@@ -4,7 +4,7 @@
 // call with 
 //
 //     amm fun_parser.sc fact.fun
-//
+
 //     amm fun_parser.sc defs.fun
 //
 // this will generate a parse-tree from a list
@@ -19,49 +19,66 @@
 // Parser combinators
 //    type parameter I needs to be of Seq-type
 //
-abstract class Parser[I, T](implicit ev: I => Seq[_]) {
+type IsSeq[I] = I => Seq[_]
+
+/*
+abstract class Parser[I, T](using is: I => Seq[_])  {
+  def parse(in: I): Set[(T, I)]  
+
+  def parse_all(in: I) : Set[T] =
+    for ((hd, tl) <- parse(in); 
+        if is(tl).isEmpty) yield hd
+}
+*/
+
+
+abstract class Parser[I, T](using is: I => Seq[_]) {
   def parse(ts: I): Set[(T, I)]
 
   def parse_single(ts: I) : T = 
-    parse(ts).partition(_._2.isEmpty) match {
+    parse(ts).partition(p => is(p._2).isEmpty) match {
       case (good, _) if !good.isEmpty => good.head._1
-      case (_, err) => { 
-	println (s"Parse Error\n${err.minBy(_._2.length)}") ; sys.exit(-1) }
+      case (_, err) => { println (s"Parse Error\n${err.minBy(p => is(p._2).length)}") ; sys.exit(-1) }
     }
 }
 
 // convenience for writing grammar rules
 case class ~[+A, +B](_1: A, _2: B)
 
-class SeqParser[I, T, S](p: => Parser[I, T], 
-                         q: => Parser[I, S])(implicit ev: I => Seq[_]) extends Parser[I, ~[T, S]] {
-  def parse(sb: I) = 
-    for ((head1, tail1) <- p.parse(sb); 
-         (head2, tail2) <- q.parse(tail1)) yield (new ~(head1, head2), tail2)
+// parser combinators
+
+// alternative parser
+class AltParser[I : IsSeq, T](p: => Parser[I, T], 
+                              q: => Parser[I, T]) extends Parser[I, T] {
+  def parse(in: I) = p.parse(in) ++ q.parse(in)   
 }
 
-class AltParser[I, T](p: => Parser[I, T], 
-                      q: => Parser[I, T])(implicit ev: I => Seq[_]) extends Parser[I, T] {
-  def parse(sb: I) = p.parse(sb) ++ q.parse(sb)   
+// sequence parser
+class SeqParser[I : IsSeq, T, S](p: => Parser[I, T], 
+                                 q: => Parser[I, S]) extends Parser[I, ~[T, S]] {
+  def parse(in: I) = 
+    for ((hd1, tl1) <- p.parse(in); 
+         (hd2, tl2) <- q.parse(tl1)) yield (new ~(hd1, hd2), tl2)
 }
 
-class FunParser[I, T, S](p: => Parser[I, T], 
-                         f: T => S)(implicit ev: I => Seq[_]) extends Parser[I, S] {
-  def parse(sb: I) = 
-    for ((head, tail) <- p.parse(sb)) yield (f(head), tail)
+// map parser
+class MapParser[I : IsSeq, T, S](p: => Parser[I, T], 
+                                f: T => S) extends Parser[I, S] {
+  def parse(in: I) = for ((hd, tl) <- p.parse(in)) yield (f(hd), tl)
 }
 
-// convenient combinators
-implicit def ParserOps[I, T](p: Parser[I, T])(implicit ev: I => Seq[_]) = new {
-  def || (q : => Parser[I, T]) = new AltParser[I, T](p, q)
-  def ==>[S] (f: => T => S) = new FunParser[I, T, S](p, f)
+
+
+// more convenient syntax for parser combinators
+extension [I : IsSeq, T](p: Parser[I, T]) {
+  def ||(q : => Parser[I, T]) = new AltParser[I, T](p, q)
   def ~[S] (q : => Parser[I, S]) = new SeqParser[I, T, S](p, q)
+  def map[S](f: => T => S) = new MapParser[I, T, S](p, f)
 }
 
-def ListParser[I, T, S](p: => Parser[I, T], 
-                        q: => Parser[I, S])(implicit ev: I => Seq[_]): Parser[I, List[T]] = {
-  (p ~ q ~ ListParser(p, q)) ==> { case x ~ _ ~ z => x :: z : List[T] } ||
-  (p ==> ((s) => List(s)))
+def ListParser[I, T, S](p: => Parser[I, T], q: => Parser[I, S])(using is: I => Seq[_]): Parser[I, List[T]] = {
+  (p ~ q ~ ListParser(p, q)).map{ case (x:T) ~ (y:S) ~ (z:List[T]) => x :: z } ||
+  (p.map[List[T]]{s => List(s)})
 }
 
 case class TokParser(tok: Token) extends Parser[List[Token], Token] {
@@ -71,11 +88,12 @@
   }
 }
 
-implicit def token2tparser(t: Token) = TokParser(t)
+implicit def token2tparser(t: Token) : Parser[List[Token], Token] = TokParser(t)
+
 
-implicit def TokOps(t: Token) = new {
+extension (t: Token) {
   def || (q : => Parser[List[Token], Token]) = new AltParser[List[Token], Token](t, q)
-  def ==>[S] (f: => Token => S) = new FunParser[List[Token], Token, S](t, f)
+  def map[S] (f: => Token => S) = new MapParser[List[Token], Token, S](t, f)
   def ~[S](q : => Parser[List[Token], S]) = new SeqParser[List[Token], Token, S](t, q)
 }
 
@@ -118,41 +136,39 @@
 
 // arithmetic expressions
 lazy val Exp: Parser[List[Token], Exp] = 
-  (T_KWD("if") ~ BExp ~ T_KWD("then") ~ Exp ~ T_KWD("else") ~ Exp) ==>
-    { case _ ~ x ~ _ ~ y ~ _ ~ z => If(x, y, z): Exp } ||
-  (M ~ T_SEMI ~ Exp) ==> { case x ~ _ ~ y => Sequence(x, y): Exp } || M
+  (T_KWD("if") ~ BExp ~ T_KWD("then") ~ Exp ~ T_KWD("else") ~ Exp).map{ case _ ~ x ~ _ ~ y ~ _ ~ z => If(x, y, z): Exp } ||
+  (M ~ T_SEMI ~ Exp).map{ case x ~ _ ~ y => Sequence(x, y): Exp } || M
 lazy val M: Parser[List[Token], Exp] =
-  (T_KWD("write") ~ L) ==> { case _ ~ y => Write(y): Exp } || L
+  (T_KWD("write") ~ L).map{ case _ ~ y => Write(y): Exp } || L
 lazy val L: Parser[List[Token], Exp] = 
-  (T ~ T_OP("+") ~ Exp) ==> { case x ~ _ ~ z => Aop("+", x, z): Exp } ||
-  (T ~ T_OP("-") ~ Exp) ==> { case x ~ _ ~ z => Aop("-", x, z): Exp } || T  
+  (T ~ T_OP("+") ~ Exp).map{ case x ~ _ ~ z => Aop("+", x, z): Exp } ||
+  (T ~ T_OP("-") ~ Exp).map{ case x ~ _ ~ z => Aop("-", x, z): Exp } || T  
 lazy val T: Parser[List[Token], Exp] = 
-  (F ~ T_OP("*") ~ T) ==> { case x ~ _ ~ z => Aop("*", x, z): Exp } || 
-  (F ~ T_OP("/") ~ T) ==> { case x ~ _ ~ z => Aop("/", x, z): Exp } || 
-  (F ~ T_OP("%") ~ T) ==> { case x ~ _ ~ z => Aop("%", x, z): Exp } || F
+  (F ~ T_OP("*") ~ T).map{ case x ~ _ ~ z => Aop("*", x, z): Exp } || 
+  (F ~ T_OP("/") ~ T).map{ case x ~ _ ~ z => Aop("/", x, z): Exp } || 
+  (F ~ T_OP("%") ~ T).map{ case x ~ _ ~ z => Aop("%", x, z): Exp } || F
 lazy val F: Parser[List[Token], Exp] = 
-  (IdParser ~ T_LPAREN ~ ListParser(Exp, T_COMMA) ~ T_RPAREN) ==> 
+  (IdParser ~ T_LPAREN ~ ListParser(Exp, T_COMMA) ~ T_RPAREN).map
     { case x ~ _ ~ z ~ _ => Call(x, z): Exp } ||
-  (T_LPAREN ~ Exp ~ T_RPAREN) ==> { case _ ~ y ~ _ => y: Exp } || 
-  IdParser ==> { case x => Var(x): Exp } || 
-  NumParser ==> { case x => Num(x): Exp }
+  (T_LPAREN ~ Exp ~ T_RPAREN).map{ case _ ~ y ~ _ => y: Exp } || 
+  IdParser.map{ case x => Var(x): Exp } || 
+  NumParser.map{ case x => Num(x): Exp }
 
 // boolean expressions
 lazy val BExp: Parser[List[Token], BExp] = 
-  (Exp ~ T_OP("==") ~ Exp) ==> { case x ~ _ ~ z => Bop("==", x, z): BExp } || 
-  (Exp ~ T_OP("!=") ~ Exp) ==> { case x ~ _ ~ z => Bop("!=", x, z): BExp } || 
-  (Exp ~ T_OP("<") ~ Exp)  ==> { case x ~ _ ~ z => Bop("<",  x, z): BExp } || 
-  (Exp ~ T_OP(">") ~ Exp)  ==> { case x ~ _ ~ z => Bop("<",  z, x): BExp } || 
-  (Exp ~ T_OP("<=") ~ Exp) ==> { case x ~ _ ~ z => Bop("<=", x, z): BExp } || 
-  (Exp ~ T_OP("=>") ~ Exp) ==> { case x ~ _ ~ z => Bop("<=", z, x): BExp }  
+  (Exp ~ T_OP("==") ~ Exp).map{ case x ~ _ ~ z => Bop("==", x, z): BExp } || 
+  (Exp ~ T_OP("!=") ~ Exp).map{ case x ~ _ ~ z => Bop("!=", x, z): BExp } || 
+  (Exp ~ T_OP("<") ~ Exp) .map{ case x ~ _ ~ z => Bop("<",  x, z): BExp } || 
+  (Exp ~ T_OP(">") ~ Exp) .map{ case x ~ _ ~ z => Bop("<",  z, x): BExp } || 
+  (Exp ~ T_OP("<=") ~ Exp).map{ case x ~ _ ~ z => Bop("<=", x, z): BExp } || 
+  (Exp ~ T_OP("=>") ~ Exp).map{ case x ~ _ ~ z => Bop("<=", z, x): BExp }  
 
 lazy val Defn: Parser[List[Token], Decl] =
-   (T_KWD("def") ~ IdParser ~ T_LPAREN ~ ListParser(IdParser, T_COMMA) ~ T_RPAREN ~ T_OP("=") ~ Exp) ==>
-     { case _ ~ y ~ _ ~ w ~ _ ~ _ ~ r => Def(y, w, r): Decl }
+   (T_KWD("def") ~ IdParser ~ T_LPAREN ~ ListParser(IdParser, T_COMMA) ~ T_RPAREN ~ T_OP("=") ~ Exp).map{ case _ ~ y ~ _ ~ w ~ _ ~ _ ~ r => Def(y, w, r): Decl }
 
 lazy val Prog: Parser[List[Token], List[Decl]] =
-  (Defn ~ T_SEMI ~ Prog) ==> { case x ~ _ ~ z => x :: z : List[Decl] } ||
-  (Exp ==> ((s) => List(Main(s)) : List[Decl]))
+  (Defn ~ T_SEMI ~ Prog).map{ case x ~ _ ~ z => x :: z : List[Decl] } ||
+  (Exp.map((s) => List(Main(s)) : List[Decl]))