progs/fun/fun_parser.scala
changeset 734 5d860ff01938
parent 699 7dac350b20a1
equal deleted inserted replaced
733:022e2cb1668d 734:5d860ff01938
       
     1 // A parser for the Fun language
       
     2 //================================
       
     3 //
       
     4 // call with 
       
     5 //
       
     6 //     scala fun_parser.scala fact.tks
       
     7 //
       
     8 //     scala fun_parser.scala defs.tks
       
     9 //
       
    10 // this will generate a .prs file that can be deserialised back
       
    11 // into a list of declarations
       
    12 
       
    13 object Fun_Parser {
       
    14 
       
    15 import scala.language.implicitConversions    
       
    16 import scala.language.reflectiveCalls
       
    17 import scala.util._ 
       
    18 import java.io._
       
    19 
       
    20 abstract class Token extends Serializable 
       
    21 case object T_SEMI extends Token
       
    22 case object T_COMMA extends Token
       
    23 case object T_LPAREN extends Token
       
    24 case object T_RPAREN extends Token
       
    25 case class T_ID(s: String) extends Token
       
    26 case class T_OP(s: String) extends Token
       
    27 case class T_NUM(n: Int) extends Token
       
    28 case class T_KWD(s: String) extends Token
       
    29 
       
    30 
       
    31 // Parser combinators
       
    32 //    type parameter I needs to be of Seq-type
       
    33 //
       
    34 abstract class Parser[I, T](implicit ev: I => Seq[_]) {
       
    35   def parse(ts: I): Set[(T, I)]
       
    36 
       
    37   def parse_single(ts: I) : T = 
       
    38     parse(ts).partition(_._2.isEmpty) match {
       
    39       case (good, _) if !good.isEmpty => good.head._1
       
    40       case (_, err) => { 
       
    41 	println (s"Parse Error\n${err.minBy(_._2.length)}") ; sys.exit(-1) }
       
    42     }
       
    43 }
       
    44 
       
    45 // convenience for writing grammar rules
       
    46 case class ~[+A, +B](_1: A, _2: B)
       
    47 
       
    48 class SeqParser[I, T, S](p: => Parser[I, T], 
       
    49                          q: => Parser[I, S])(implicit ev: I => Seq[_]) extends Parser[I, ~[T, S]] {
       
    50   def parse(sb: I) = 
       
    51     for ((head1, tail1) <- p.parse(sb); 
       
    52          (head2, tail2) <- q.parse(tail1)) yield (new ~(head1, head2), tail2)
       
    53 }
       
    54 
       
    55 class AltParser[I, T](p: => Parser[I, T], 
       
    56                       q: => Parser[I, T])(implicit ev: I => Seq[_]) extends Parser[I, T] {
       
    57   def parse(sb: I) = p.parse(sb) ++ q.parse(sb)   
       
    58 }
       
    59 
       
    60 class FunParser[I, T, S](p: => Parser[I, T], 
       
    61                          f: T => S)(implicit ev: I => Seq[_]) extends Parser[I, S] {
       
    62   def parse(sb: I) = 
       
    63     for ((head, tail) <- p.parse(sb)) yield (f(head), tail)
       
    64 }
       
    65 
       
    66 // convenient combinators
       
    67 implicit def ParserOps[I, T](p: Parser[I, T])(implicit ev: I => Seq[_]) = new {
       
    68   def || (q : => Parser[I, T]) = new AltParser[I, T](p, q)
       
    69   def ==>[S] (f: => T => S) = new FunParser[I, T, S](p, f)
       
    70   def ~[S] (q : => Parser[I, S]) = new SeqParser[I, T, S](p, q)
       
    71 }
       
    72 
       
    73 def ListParser[I, T, S](p: => Parser[I, T], 
       
    74                         q: => Parser[I, S])(implicit ev: I => Seq[_]): Parser[I, List[T]] = {
       
    75   (p ~ q ~ ListParser(p, q)) ==> { case x ~ _ ~ z => x :: z : List[T] } ||
       
    76   (p ==> ((s) => List(s)))
       
    77 }
       
    78 
       
    79 case class TokParser(tok: Token) extends Parser[List[Token], Token] {
       
    80   def parse(ts: List[Token]) = ts match {
       
    81     case t::ts if (t == tok) => Set((t, ts)) 
       
    82     case _ => Set ()
       
    83   }
       
    84 }
       
    85 
       
    86 implicit def token2tparser(t: Token) = TokParser(t)
       
    87 
       
    88 implicit def TokOps(t: Token) = new {
       
    89   def || (q : => Parser[List[Token], Token]) = new AltParser[List[Token], Token](t, q)
       
    90   def ==>[S] (f: => Token => S) = new FunParser[List[Token], Token, S](t, f)
       
    91   def ~[S](q : => Parser[List[Token], S]) = new SeqParser[List[Token], Token, S](t, q)
       
    92 }
       
    93 
       
    94 case object NumParser extends Parser[List[Token], Int] {
       
    95   def parse(ts: List[Token]) = ts match {
       
    96     case T_NUM(n)::ts => Set((n, ts)) 
       
    97     case _ => Set ()
       
    98   }
       
    99 }
       
   100 
       
   101 case object IdParser extends Parser[List[Token], String] {
       
   102   def parse(ts: List[Token]) = ts match {
       
   103     case T_ID(s)::ts => Set((s, ts)) 
       
   104     case _ => Set ()
       
   105   }
       
   106 }
       
   107 
       
   108 
       
   109 
       
   110 // Abstract syntax trees for the Fun language
       
   111 abstract class Exp extends Serializable 
       
   112 abstract class BExp extends Serializable 
       
   113 abstract class Decl extends Serializable 
       
   114 
       
   115 case class Def(name: String, args: List[String], body: Exp) extends Decl
       
   116 case class Main(e: Exp) extends Decl
       
   117 
       
   118 case class Call(name: String, args: List[Exp]) extends Exp
       
   119 case class If(a: BExp, e1: Exp, e2: Exp) extends Exp
       
   120 case class Write(e: Exp) extends Exp
       
   121 case class Var(s: String) extends Exp
       
   122 case class Num(i: Int) extends Exp
       
   123 case class Aop(o: String, a1: Exp, a2: Exp) extends Exp
       
   124 case class Sequence(e1: Exp, e2: Exp) extends Exp
       
   125 case class Bop(o: String, a1: Exp, a2: Exp) extends BExp
       
   126 
       
   127 
       
   128 
       
   129 // Grammar Rules for the Fun language
       
   130 
       
   131 // arithmetic expressions
       
   132 lazy val Exp: Parser[List[Token], Exp] = 
       
   133   (T_KWD("if") ~ BExp ~ T_KWD("then") ~ Exp ~ T_KWD("else") ~ Exp) ==>
       
   134     { case _ ~ x ~ _ ~ y ~ _ ~ z => If(x, y, z): Exp } ||
       
   135   (M ~ T_SEMI ~ Exp) ==> { case x ~ _ ~ y => Sequence(x, y): Exp } || M
       
   136 lazy val M: Parser[List[Token], Exp] =
       
   137   (T_KWD("write") ~ L) ==> { case _ ~ y => Write(y): Exp } || L
       
   138 lazy val L: Parser[List[Token], Exp] = 
       
   139   (T ~ T_OP("+") ~ Exp) ==> { case x ~ _ ~ z => Aop("+", x, z): Exp } ||
       
   140   (T ~ T_OP("-") ~ Exp) ==> { case x ~ _ ~ z => Aop("-", x, z): Exp } || T  
       
   141 lazy val T: Parser[List[Token], Exp] = 
       
   142   (F ~ T_OP("*") ~ T) ==> { case x ~ _ ~ z => Aop("*", x, z): Exp } || 
       
   143   (F ~ T_OP("/") ~ T) ==> { case x ~ _ ~ z => Aop("/", x, z): Exp } || 
       
   144   (F ~ T_OP("%") ~ T) ==> { case x ~ _ ~ z => Aop("%", x, z): Exp } || F
       
   145 lazy val F: Parser[List[Token], Exp] = 
       
   146   (IdParser ~ T_LPAREN ~ ListParser(Exp, T_COMMA) ~ T_RPAREN) ==> 
       
   147     { case x ~ _ ~ z ~ _ => Call(x, z): Exp } ||
       
   148   (T_LPAREN ~ Exp ~ T_RPAREN) ==> { case _ ~ y ~ _ => y: Exp } || 
       
   149   IdParser ==> { case x => Var(x): Exp } || 
       
   150   NumParser ==> { case x => Num(x): Exp }
       
   151 
       
   152 // boolean expressions
       
   153 lazy val BExp: Parser[List[Token], BExp] = 
       
   154   (Exp ~ T_OP("==") ~ Exp) ==> { case x ~ _ ~ z => Bop("==", x, z): BExp } || 
       
   155   (Exp ~ T_OP("!=") ~ Exp) ==> { case x ~ _ ~ z => Bop("!=", x, z): BExp } || 
       
   156   (Exp ~ T_OP("<") ~ Exp)  ==> { case x ~ _ ~ z => Bop("<",  x, z): BExp } || 
       
   157   (Exp ~ T_OP(">") ~ Exp)  ==> { case x ~ _ ~ z => Bop("<",  z, x): BExp } || 
       
   158   (Exp ~ T_OP("<=") ~ Exp) ==> { case x ~ _ ~ z => Bop("<=", x, z): BExp } || 
       
   159   (Exp ~ T_OP("=>") ~ Exp) ==> { case x ~ _ ~ z => Bop("<=", z, x): BExp }  
       
   160 
       
   161 lazy val Defn: Parser[List[Token], Decl] =
       
   162    (T_KWD("def") ~ IdParser ~ T_LPAREN ~ ListParser(IdParser, T_COMMA) ~ T_RPAREN ~ T_OP("=") ~ Exp) ==>
       
   163      { case _ ~ y ~ _ ~ w ~ _ ~ _ ~ r => Def(y, w, r): Decl }
       
   164 
       
   165 lazy val Prog: Parser[List[Token], List[Decl]] =
       
   166   (Defn ~ T_SEMI ~ Prog) ==> { case x ~ _ ~ z => x :: z : List[Decl] } ||
       
   167   (Exp ==> ((s) => List(Main(s)) : List[Decl]))
       
   168 
       
   169 
       
   170 
       
   171 // Reading tokens and Writing parse trees
       
   172 
       
   173 def serialise[T](fname: String, data: T) = {
       
   174   import scala.util.Using
       
   175   Using(new ObjectOutputStream(new FileOutputStream(fname))) {
       
   176     out => out.writeObject(data)
       
   177   }
       
   178 }
       
   179 
       
   180 def deserialise[T](fname: String) : Try[T] = {
       
   181   import scala.util.Using
       
   182   Using(new ObjectInputStream(new FileInputStream(fname))) {
       
   183     in => in.readObject.asInstanceOf[T]
       
   184   }
       
   185 }
       
   186 
       
   187 
       
   188 def main(args: Array[String]) : Unit= {
       
   189   val fname = args(0)
       
   190   val pname = fname.stripSuffix(".tks") ++ ".prs"
       
   191   val tks = deserialise[List[Token]](fname).getOrElse(Nil)
       
   192   serialise(pname, Prog.parse_single(tks))
       
   193 
       
   194   // testing whether read-back is working
       
   195   //val ptree = deserialise[List[Decl]](pname).get
       
   196   //println(s"Reading back from ${pname}:\n${ptree.mkString("\n")}")  
       
   197 }
       
   198 
       
   199 }