645
|
1 |
// A parser for the Fun language
|
|
2 |
//================================
|
|
3 |
//
|
|
4 |
// call with
|
|
5 |
//
|
789
|
6 |
// amm fun_parser.sc fact.fun
|
955
|
7 |
|
789
|
8 |
// amm fun_parser.sc defs.fun
|
|
9 |
//
|
|
10 |
// this will generate a parse-tree from a list
|
|
11 |
// of tokens
|
644
|
12 |
|
961
|
13 |
|
975
|
14 |
import $file.fun_tokens, fun_tokens._
|
961
|
15 |
|
644
|
16 |
import scala.language.implicitConversions
|
|
17 |
import scala.language.reflectiveCalls
|
|
18 |
|
961
|
19 |
|
975
|
20 |
|
644
|
21 |
|
|
22 |
|
|
23 |
// Parser combinators
|
|
24 |
// type parameter I needs to be of Seq-type
|
|
25 |
//
|
961
|
26 |
type IsSeq[I] = I => Seq[?]
|
955
|
27 |
|
|
28 |
|
961
|
29 |
abstract class Parser[I, T](using is: I => Seq[?]) {
|
644
|
30 |
def parse(ts: I): Set[(T, I)]
|
|
31 |
|
655
|
32 |
def parse_single(ts: I) : T =
|
955
|
33 |
parse(ts).partition(p => is(p._2).isEmpty) match {
|
655
|
34 |
case (good, _) if !good.isEmpty => good.head._1
|
955
|
35 |
case (_, err) => { println (s"Parse Error\n${err.minBy(p => is(p._2).length)}") ; sys.exit(-1) }
|
655
|
36 |
}
|
644
|
37 |
}
|
|
38 |
|
|
39 |
// convenience for writing grammar rules
|
|
40 |
case class ~[+A, +B](_1: A, _2: B)
|
|
41 |
|
955
|
42 |
// parser combinators
|
|
43 |
|
|
44 |
// alternative parser
|
|
45 |
class AltParser[I : IsSeq, T](p: => Parser[I, T],
|
|
46 |
q: => Parser[I, T]) extends Parser[I, T] {
|
|
47 |
def parse(in: I) = p.parse(in) ++ q.parse(in)
|
644
|
48 |
}
|
|
49 |
|
955
|
50 |
// sequence parser
|
|
51 |
class SeqParser[I : IsSeq, T, S](p: => Parser[I, T],
|
|
52 |
q: => Parser[I, S]) extends Parser[I, ~[T, S]] {
|
|
53 |
def parse(in: I) =
|
|
54 |
for ((hd1, tl1) <- p.parse(in);
|
|
55 |
(hd2, tl2) <- q.parse(tl1)) yield (new ~(hd1, hd2), tl2)
|
644
|
56 |
}
|
|
57 |
|
955
|
58 |
// map parser
|
|
59 |
class MapParser[I : IsSeq, T, S](p: => Parser[I, T],
|
|
60 |
f: T => S) extends Parser[I, S] {
|
|
61 |
def parse(in: I) = for ((hd, tl) <- p.parse(in)) yield (f(hd), tl)
|
644
|
62 |
}
|
|
63 |
|
955
|
64 |
|
|
65 |
|
|
66 |
// more convenient syntax for parser combinators
|
|
67 |
extension [I : IsSeq, T](p: Parser[I, T]) {
|
|
68 |
def ||(q : => Parser[I, T]) = new AltParser[I, T](p, q)
|
644
|
69 |
def ~[S] (q : => Parser[I, S]) = new SeqParser[I, T, S](p, q)
|
955
|
70 |
def map[S](f: => T => S) = new MapParser[I, T, S](p, f)
|
644
|
71 |
}
|
|
72 |
|
961
|
73 |
def ListParser[I, T, S](p: => Parser[I, T], q: => Parser[I, S])(using is: I => Seq[?]): Parser[I, List[T]] = {
|
955
|
74 |
(p ~ q ~ ListParser(p, q)).map{ case (x:T) ~ (y:S) ~ (z:List[T]) => x :: z } ||
|
|
75 |
(p.map[List[T]]{s => List(s)})
|
644
|
76 |
}
|
|
77 |
|
|
78 |
case class TokParser(tok: Token) extends Parser[List[Token], Token] {
|
|
79 |
def parse(ts: List[Token]) = ts match {
|
|
80 |
case t::ts if (t == tok) => Set((t, ts))
|
|
81 |
case _ => Set ()
|
|
82 |
}
|
|
83 |
}
|
|
84 |
|
955
|
85 |
implicit def token2tparser(t: Token) : Parser[List[Token], Token] = TokParser(t)
|
|
86 |
|
644
|
87 |
|
955
|
88 |
extension (t: Token) {
|
644
|
89 |
def || (q : => Parser[List[Token], Token]) = new AltParser[List[Token], Token](t, q)
|
955
|
90 |
def map[S] (f: => Token => S) = new MapParser[List[Token], Token, S](t, f)
|
644
|
91 |
def ~[S](q : => Parser[List[Token], S]) = new SeqParser[List[Token], Token, S](t, q)
|
|
92 |
}
|
|
93 |
|
|
94 |
case object NumParser extends Parser[List[Token], Int] {
|
|
95 |
def parse(ts: List[Token]) = ts match {
|
|
96 |
case T_NUM(n)::ts => Set((n, ts))
|
|
97 |
case _ => Set ()
|
|
98 |
}
|
|
99 |
}
|
|
100 |
|
|
101 |
case object IdParser extends Parser[List[Token], String] {
|
|
102 |
def parse(ts: List[Token]) = ts match {
|
|
103 |
case T_ID(s)::ts => Set((s, ts))
|
|
104 |
case _ => Set ()
|
|
105 |
}
|
|
106 |
}
|
|
107 |
|
|
108 |
|
|
109 |
|
|
110 |
// Abstract syntax trees for the Fun language
|
|
111 |
abstract class Exp extends Serializable
|
|
112 |
abstract class BExp extends Serializable
|
|
113 |
abstract class Decl extends Serializable
|
|
114 |
|
|
115 |
case class Def(name: String, args: List[String], body: Exp) extends Decl
|
|
116 |
case class Main(e: Exp) extends Decl
|
|
117 |
|
|
118 |
case class Call(name: String, args: List[Exp]) extends Exp
|
|
119 |
case class If(a: BExp, e1: Exp, e2: Exp) extends Exp
|
|
120 |
case class Write(e: Exp) extends Exp
|
|
121 |
case class Var(s: String) extends Exp
|
|
122 |
case class Num(i: Int) extends Exp
|
|
123 |
case class Aop(o: String, a1: Exp, a2: Exp) extends Exp
|
|
124 |
case class Sequence(e1: Exp, e2: Exp) extends Exp
|
|
125 |
case class Bop(o: String, a1: Exp, a2: Exp) extends BExp
|
|
126 |
|
|
127 |
|
|
128 |
|
|
129 |
// Grammar Rules for the Fun language
|
|
130 |
|
|
131 |
// arithmetic expressions
|
|
132 |
lazy val Exp: Parser[List[Token], Exp] =
|
955
|
133 |
(T_KWD("if") ~ BExp ~ T_KWD("then") ~ Exp ~ T_KWD("else") ~ Exp).map{ case _ ~ x ~ _ ~ y ~ _ ~ z => If(x, y, z): Exp } ||
|
|
134 |
(M ~ T_SEMI ~ Exp).map{ case x ~ _ ~ y => Sequence(x, y): Exp } || M
|
644
|
135 |
lazy val M: Parser[List[Token], Exp] =
|
955
|
136 |
(T_KWD("write") ~ L).map{ case _ ~ y => Write(y): Exp } || L
|
644
|
137 |
lazy val L: Parser[List[Token], Exp] =
|
955
|
138 |
(T ~ T_OP("+") ~ Exp).map{ case x ~ _ ~ z => Aop("+", x, z): Exp } ||
|
|
139 |
(T ~ T_OP("-") ~ Exp).map{ case x ~ _ ~ z => Aop("-", x, z): Exp } || T
|
644
|
140 |
lazy val T: Parser[List[Token], Exp] =
|
955
|
141 |
(F ~ T_OP("*") ~ T).map{ case x ~ _ ~ z => Aop("*", x, z): Exp } ||
|
|
142 |
(F ~ T_OP("/") ~ T).map{ case x ~ _ ~ z => Aop("/", x, z): Exp } ||
|
|
143 |
(F ~ T_OP("%") ~ T).map{ case x ~ _ ~ z => Aop("%", x, z): Exp } || F
|
644
|
144 |
lazy val F: Parser[List[Token], Exp] =
|
955
|
145 |
(IdParser ~ T_LPAREN ~ ListParser(Exp, T_COMMA) ~ T_RPAREN).map
|
644
|
146 |
{ case x ~ _ ~ z ~ _ => Call(x, z): Exp } ||
|
955
|
147 |
(T_LPAREN ~ Exp ~ T_RPAREN).map{ case _ ~ y ~ _ => y: Exp } ||
|
|
148 |
IdParser.map{ case x => Var(x): Exp } ||
|
|
149 |
NumParser.map{ case x => Num(x): Exp }
|
644
|
150 |
|
|
151 |
// boolean expressions
|
|
152 |
lazy val BExp: Parser[List[Token], BExp] =
|
955
|
153 |
(Exp ~ T_OP("==") ~ Exp).map{ case x ~ _ ~ z => Bop("==", x, z): BExp } ||
|
|
154 |
(Exp ~ T_OP("!=") ~ Exp).map{ case x ~ _ ~ z => Bop("!=", x, z): BExp } ||
|
|
155 |
(Exp ~ T_OP("<") ~ Exp) .map{ case x ~ _ ~ z => Bop("<", x, z): BExp } ||
|
|
156 |
(Exp ~ T_OP(">") ~ Exp) .map{ case x ~ _ ~ z => Bop("<", z, x): BExp } ||
|
|
157 |
(Exp ~ T_OP("<=") ~ Exp).map{ case x ~ _ ~ z => Bop("<=", x, z): BExp } ||
|
|
158 |
(Exp ~ T_OP("=>") ~ Exp).map{ case x ~ _ ~ z => Bop("<=", z, x): BExp }
|
644
|
159 |
|
|
160 |
lazy val Defn: Parser[List[Token], Decl] =
|
955
|
161 |
(T_KWD("def") ~ IdParser ~ T_LPAREN ~ ListParser(IdParser, T_COMMA) ~ T_RPAREN ~ T_OP("=") ~ Exp).map{ case _ ~ y ~ _ ~ w ~ _ ~ _ ~ r => Def(y, w, r): Decl }
|
644
|
162 |
|
|
163 |
lazy val Prog: Parser[List[Token], List[Decl]] =
|
955
|
164 |
(Defn ~ T_SEMI ~ Prog).map{ case x ~ _ ~ z => x :: z : List[Decl] } ||
|
|
165 |
(Exp.map((s) => List(Main(s)) : List[Decl]))
|
644
|
166 |
|
|
167 |
|
|
168 |
|
|
169 |
// Reading tokens and Writing parse trees
|
|
170 |
|
789
|
171 |
def parse_tks(tks: List[Token]) : List[Decl] =
|
|
172 |
Prog.parse_single(tks)
|
644
|
173 |
|
975
|
174 |
|
|
175 |
@main
|
789
|
176 |
def main(fname: String) : Unit = {
|
|
177 |
val tks = tokenise(os.read(os.pwd / fname))
|
|
178 |
println(parse_tks(tks))
|
644
|
179 |
}
|
|
180 |
|
|
181 |
|