author | Christian Urban <christian.urban@kcl.ac.uk> |
Sun, 29 Sep 2024 18:46:02 +0100 | |
changeset 965 | 94f5cce73a4f |
parent 961 | c0600f8b6427 |
permissions | -rw-r--r-- |
864 | 1 |
// CW3 |
2 |
||
961 | 3 |
import scala.language.implicitConversions |
4 |
import $file.lexer, lexer._ |
|
864 | 5 |
|
920 | 6 |
case class ~[+A, +B](x: A, y: B) |
864 | 7 |
|
920 | 8 |
// parser combinators |
9 |
||
961 | 10 |
abstract class Parser[I, T](using is: I => Seq[?]) { |
920 | 11 |
def parse(in: I): Set[(T, I)] |
12 |
||
13 |
def parse_all(in: I) : Set[T] = |
|
14 |
for ((hd, tl) <- parse(in); |
|
15 |
if is(tl).isEmpty) yield hd |
|
16 |
} |
|
864 | 17 |
|
920 | 18 |
// alternative parser |
19 |
class AltParser[I, T](p: => Parser[I, T], |
|
961 | 20 |
q: => Parser[I, T])(using I => Seq[?]) extends Parser[I, T] { |
920 | 21 |
def parse(in: I) = p.parse(in) ++ q.parse(in) |
22 |
} |
|
864 | 23 |
|
920 | 24 |
// sequence parser |
25 |
class SeqParser[I, T, S](p: => Parser[I, T], |
|
961 | 26 |
q: => Parser[I, S])(using I => Seq[?]) extends Parser[I, ~[T, S]] { |
920 | 27 |
def parse(in: I) = |
28 |
for ((hd1, tl1) <- p.parse(in); |
|
29 |
(hd2, tl2) <- q.parse(tl1)) yield (new ~(hd1, hd2), tl2) |
|
864 | 30 |
} |
31 |
||
920 | 32 |
// map parser |
33 |
class MapParser[I, T, S](p: => Parser[I, T], |
|
961 | 34 |
f: T => S)(using I => Seq[?]) extends Parser[I, S] { |
920 | 35 |
def parse(in: I) = for ((hd, tl) <- p.parse(in)) yield (f(hd), tl) |
36 |
} |
|
37 |
||
38 |
// more convenient syntax for parser combinators |
|
961 | 39 |
extension [I, T](p: Parser[I, T])(using I => Seq[?]) { |
920 | 40 |
def ||(q : => Parser[I, T]) = new AltParser[I, T](p, q) |
961 | 41 |
def ~[S] (q : => Parser[I, S])(using S => Seq[?]) = new SeqParser[I, T, S](p, q) |
920 | 42 |
def map[S](f: => T => S) = new MapParser[I, T, S](p, f) |
864 | 43 |
} |
44 |
||
920 | 45 |
/* |
46 |
// atomic parser for (particular) strings |
|
47 |
case class StrParser(s: String) extends Parser[String, String] { |
|
48 |
def parse(sb: String) = { |
|
49 |
val (prefix, suffix) = sb.splitAt(s.length) |
|
50 |
if (prefix == s) Set((prefix, suffix)) else Set() |
|
51 |
} |
|
864 | 52 |
} |
53 |
||
920 | 54 |
extension (sc: StringContext) |
55 |
def p(args: Any*) = StrParser(sc.s(args:_*)) |
|
56 |
*/ |
|
864 | 57 |
|
920 | 58 |
case class TokenParser(t: Token) extends Parser[List[Token], Token] { |
59 |
def parse(in: List[Token]) = { |
|
60 |
// an example of an atomic parser for characters |
|
61 |
if (!in.isEmpty && in.head == t) Set((t, in.tail)) else Set() |
|
62 |
} |
|
63 |
} |
|
64 |
||
864 | 65 |
case class TokenListParser(ts: List[Token]) extends Parser[List[Token], List[Token]] { |
66 |
def parse(tsb: List[Token]) = { |
|
67 |
val (prefix, suffix) = tsb.splitAt(ts.length) |
|
68 |
if (prefix == ts) Set((prefix, suffix)) else Set() |
|
69 |
} |
|
70 |
} |
|
71 |
||
72 |
// Implicit definitions to go from a token |
|
73 |
// or a list of tokens to a TokenListParser |
|
961 | 74 |
given Conversion[Token, Parser[List[Token], Token]] = (t => TokenParser(t)) |
75 |
//implicit def token2parser(t: Token) : Parser[List[Token], Token] = |
|
76 |
// TokenParser(t) |
|
864 | 77 |
|
961 | 78 |
/* |
920 | 79 |
extension (t: Token) { |
80 |
def || (q : => Parser[List[Token], Token]) = |
|
81 |
new AltParser[List[Token], Token](t, q) |
|
82 |
def ~[S](q : => Parser[List[Token], S]) = |
|
83 |
new SeqParser[List[Token], Token, S](t, q) |
|
864 | 84 |
} |
961 | 85 |
*/ |
864 | 86 |
|
87 |
||
88 |
// Abstract Syntax Trees |
|
89 |
abstract class Stmt |
|
90 |
abstract class AExp |
|
91 |
abstract class BExp |
|
92 |
||
93 |
type Block = List[Stmt] |
|
94 |
||
95 |
case object Skip extends Stmt |
|
96 |
case class If(a: BExp, bl1: Block, bl2: Block) extends Stmt |
|
97 |
case class While(b: BExp, bl: Block) extends Stmt |
|
98 |
case class Assign(s: String, a: AExp) extends Stmt |
|
99 |
case class Read(s: String) extends Stmt |
|
100 |
case class WriteId(s: String) extends Stmt // for printing values of variables |
|
101 |
case class WriteString(s: String) extends Stmt // for printing words |
|
102 |
case class For(counter: String, lower: AExp, upper: AExp, code: Block) extends Stmt |
|
920 | 103 |
case object Break extends Stmt |
864 | 104 |
|
105 |
||
106 |
case class Var(s: String) extends AExp |
|
107 |
case class Num(i: Int) extends AExp |
|
108 |
case class Aop(o: String, a1: AExp, a2: AExp) extends AExp |
|
109 |
||
110 |
case object True extends BExp |
|
111 |
case object False extends BExp |
|
112 |
case class Bop(o: String, a1: AExp, a2: AExp) extends BExp |
|
113 |
case class And(b1: BExp, b2: BExp) extends BExp |
|
114 |
case class Or(b1: BExp, b2: BExp) extends BExp |
|
115 |
||
116 |
case class IdParser() extends Parser[List[Token], String] { |
|
117 |
def parse(tsb: List[Token]) = tsb match { |
|
118 |
case T_ID(id) :: rest => Set((id, rest)) |
|
119 |
case _ => Set() |
|
120 |
} |
|
121 |
} |
|
122 |
||
123 |
case class NumParser() extends Parser[List[Token], Int] { |
|
124 |
def parse(tsb: List[Token]) = tsb match { |
|
125 |
case T_NUM(n) :: rest => Set((n, rest)) |
|
126 |
case _ => Set() |
|
127 |
} |
|
128 |
} |
|
129 |
||
130 |
case class StringParser() extends Parser[List[Token], String] { |
|
131 |
def parse(tsb: List[Token]) = tsb match { |
|
132 |
case T_STRING(s) :: rest => Set((s, rest)) |
|
133 |
case _ => Set() |
|
134 |
} |
|
135 |
} |
|
136 |
||
137 |
// WHILE Language Parsing |
|
920 | 138 |
|
139 |
// WHILE Language Parsing |
|
864 | 140 |
lazy val AExp: Parser[List[Token], AExp] = |
920 | 141 |
(Te ~ T_OP("+") ~ AExp).map{ case x ~ _ ~ z => Aop("+", x, z): AExp } || |
142 |
(Te ~ T_OP("-") ~ AExp).map{ case x ~ _ ~ z => Aop("-", x, z): AExp } || Te |
|
864 | 143 |
lazy val Te: Parser[List[Token], AExp] = |
920 | 144 |
(Fa ~ T_OP("*") ~ Te).map{ case x ~ _ ~ z => Aop("*", x, z): AExp } || |
145 |
(Fa ~ T_OP("/") ~ Te).map{ case x ~ _ ~ z => Aop("/", x, z): AExp } || |
|
146 |
(Fa ~ T_OP("%") ~ Te).map{ case x ~ _ ~ z => Aop("%", x, z): AExp } || Fa |
|
864 | 147 |
lazy val Fa: Parser[List[Token], AExp] = |
920 | 148 |
(T_PAREN("(") ~ AExp ~ T_PAREN(")")).map{ case _ ~ y ~ _ => y } || |
149 |
IdParser().map{Var(_)} || |
|
150 |
NumParser().map{Num(_)} |
|
864 | 151 |
|
152 |
lazy val BExp: Parser[List[Token], BExp] = |
|
920 | 153 |
(AExp ~ T_OP("==") ~ AExp).map{ case x ~ _ ~ z => Bop("==", x, z): BExp } || |
154 |
(AExp ~ T_OP("!=") ~ AExp).map{ case x ~ _ ~ z => Bop("!=", x, z): BExp } || |
|
155 |
(AExp ~ T_OP("<") ~ AExp).map{ case x ~ _ ~ z => Bop("<", x, z): BExp } || |
|
156 |
(AExp ~ T_OP(">") ~ AExp).map{ case x ~ _ ~ z => Bop(">", x, z): BExp } || |
|
959
64ec1884d860
updated and added pascal.while file
Christian Urban <christian.urban@kcl.ac.uk>
parents:
920
diff
changeset
|
157 |
(AExp ~ T_OP("<=") ~ AExp).map{ case x ~ _ ~ z => Bop("<=", x, z): BExp } || |
64ec1884d860
updated and added pascal.while file
Christian Urban <christian.urban@kcl.ac.uk>
parents:
920
diff
changeset
|
158 |
(AExp ~ T_OP("=>") ~ AExp).map{ case x ~ _ ~ z => Bop("=>", x, z): BExp } || |
920 | 159 |
(T_PAREN("(") ~ BExp ~ T_PAREN(")") ~ T_OP("&&") ~ BExp).map{ case _ ~ y ~ _ ~ _ ~ v => And(y, v): BExp } || |
160 |
(T_PAREN("(") ~ BExp ~ T_PAREN(")") ~ T_OP("||") ~ BExp).map{ case _ ~ y ~ _ ~ _ ~ v => Or(y, v): BExp } || |
|
161 |
(T_KEYWORD("true").map(_ => True: BExp )) || |
|
162 |
(T_KEYWORD("false").map(_ => False: BExp )) || |
|
163 |
(T_PAREN("(") ~ BExp ~ T_PAREN(")")).map{ case _ ~ x ~ _ => x } |
|
864 | 164 |
|
165 |
lazy val Stmt: Parser[List[Token], Stmt] = |
|
920 | 166 |
T_KEYWORD("skip").map(_ => Skip: Stmt) || |
167 |
T_KEYWORD("break").map(_ => Break: Stmt) || |
|
168 |
(IdParser() ~ T_OP(":=") ~ AExp).map{ case id ~ _ ~ z => Assign(id, z): Stmt } || |
|
169 |
(T_KEYWORD("if") ~ BExp ~ T_KEYWORD("then") ~ Block ~ T_KEYWORD("else") ~ Block).map{ case _ ~ y ~ _ ~ u ~ _ ~ w => If(y, u, w): Stmt } || |
|
170 |
(T_KEYWORD("while") ~ BExp ~ T_KEYWORD("do") ~ Block).map{ case _ ~ y ~ _ ~ w => While(y, w) : Stmt } || |
|
171 |
(T_KEYWORD("for") ~ IdParser() ~ T_OP(":=") ~ AExp ~T_KEYWORD("upto") ~ AExp ~ T_KEYWORD("do") ~ Block).map{ |
|
172 |
case _ ~ id ~ _ ~ low ~ _ ~ high ~ _ ~ bl => For(id, low, high, bl) : Stmt } || |
|
173 |
(T_KEYWORD("read") ~ IdParser()).map{ case _ ~ id => Read(id): Stmt} || |
|
174 |
(T_KEYWORD("write") ~ IdParser()).map{ case _ ~ id => WriteId(id): Stmt} || |
|
175 |
(T_KEYWORD("write") ~ StringParser()).map{ case _ ~ s => WriteString(s): Stmt} || |
|
176 |
(T_KEYWORD("write") ~ T_PAREN("(") ~ IdParser() ~ T_PAREN(")")).map{ case _ ~ _ ~ id ~ _ => WriteId(id): Stmt} || |
|
177 |
(T_KEYWORD("write") ~ T_PAREN("(") ~ StringParser() ~ T_PAREN(")")).map{ case _ ~ _ ~ s ~ _ => WriteString(s): Stmt} |
|
864 | 178 |
|
179 |
lazy val Stmts: Parser[List[Token], Block] = |
|
920 | 180 |
(Stmt ~ T_SEMI ~ Stmts).map{ case x ~ _ ~ z => x :: z : Block } || |
181 |
(Stmt.map(s => List(s) : Block)) |
|
864 | 182 |
|
183 |
lazy val Block: Parser[List[Token], Block] = |
|
920 | 184 |
(T_PAREN("{") ~ Stmts ~ T_PAREN("}")).map{ case x ~ y ~ z => y} || |
185 |
(Stmt.map(s => List(s))) |
|
864 | 186 |
|
920 | 187 |