|
1 // A tokeniser for the fun language |
|
2 |
|
3 |
|
4 object Fun_Tokens { |
|
5 |
|
6 import scala.language.implicitConversions |
|
7 import scala.language.reflectiveCalls |
|
8 |
|
9 abstract class Rexp |
|
10 case object ZERO extends Rexp |
|
11 case object ONE extends Rexp |
|
12 case class CHAR(c: Char) extends Rexp |
|
13 case class ALT(r1: Rexp, r2: Rexp) extends Rexp |
|
14 case class SEQ(r1: Rexp, r2: Rexp) extends Rexp |
|
15 case class STAR(r: Rexp) extends Rexp |
|
16 case class RECD(x: String, r: Rexp) extends Rexp |
|
17 |
|
18 abstract class Val |
|
19 case object Empty extends Val |
|
20 case class Chr(c: Char) extends Val |
|
21 case class Sequ(v1: Val, v2: Val) extends Val |
|
22 case class Left(v: Val) extends Val |
|
23 case class Right(v: Val) extends Val |
|
24 case class Stars(vs: List[Val]) extends Val |
|
25 case class Rec(x: String, v: Val) extends Val |
|
26 |
|
27 // some convenience for typing in regular expressions |
|
28 def charlist2rexp(s : List[Char]): Rexp = s match { |
|
29 case Nil => ONE |
|
30 case c::Nil => CHAR(c) |
|
31 case c::s => SEQ(CHAR(c), charlist2rexp(s)) |
|
32 } |
|
33 implicit def string2rexp(s : String) : Rexp = |
|
34 charlist2rexp(s.toList) |
|
35 |
|
36 implicit def RexpOps(r: Rexp) = new { |
|
37 def | (s: Rexp) = ALT(r, s) |
|
38 def % = STAR(r) |
|
39 def ~ (s: Rexp) = SEQ(r, s) |
|
40 } |
|
41 |
|
42 implicit def stringOps(s: String) = new { |
|
43 def | (r: Rexp) = ALT(s, r) |
|
44 def | (r: String) = ALT(s, r) |
|
45 def % = STAR(s) |
|
46 def ~ (r: Rexp) = SEQ(s, r) |
|
47 def ~ (r: String) = SEQ(s, r) |
|
48 def $ (r: Rexp) = RECD(s, r) |
|
49 } |
|
50 |
|
51 def nullable (r: Rexp) : Boolean = r match { |
|
52 case ZERO => false |
|
53 case ONE => true |
|
54 case CHAR(_) => false |
|
55 case ALT(r1, r2) => nullable(r1) || nullable(r2) |
|
56 case SEQ(r1, r2) => nullable(r1) && nullable(r2) |
|
57 case STAR(_) => true |
|
58 case RECD(_, r1) => nullable(r1) |
|
59 } |
|
60 |
|
61 def der (c: Char, r: Rexp) : Rexp = r match { |
|
62 case ZERO => ZERO |
|
63 case ONE => ZERO |
|
64 case CHAR(d) => if (c == d) ONE else ZERO |
|
65 case ALT(r1, r2) => ALT(der(c, r1), der(c, r2)) |
|
66 case SEQ(r1, r2) => |
|
67 if (nullable(r1)) ALT(SEQ(der(c, r1), r2), der(c, r2)) |
|
68 else SEQ(der(c, r1), r2) |
|
69 case STAR(r) => SEQ(der(c, r), STAR(r)) |
|
70 case RECD(_, r1) => der(c, r1) |
|
71 } |
|
72 |
|
73 |
|
74 // extracts a string from value |
|
75 def flatten(v: Val) : String = v match { |
|
76 case Empty => "" |
|
77 case Chr(c) => c.toString |
|
78 case Left(v) => flatten(v) |
|
79 case Right(v) => flatten(v) |
|
80 case Sequ(v1, v2) => flatten(v1) + flatten(v2) |
|
81 case Stars(vs) => vs.map(flatten).mkString |
|
82 case Rec(_, v) => flatten(v) |
|
83 } |
|
84 |
|
85 // extracts an environment from a value; |
|
86 // used for tokenise a string |
|
87 def env(v: Val) : List[(String, String)] = v match { |
|
88 case Empty => Nil |
|
89 case Chr(c) => Nil |
|
90 case Left(v) => env(v) |
|
91 case Right(v) => env(v) |
|
92 case Sequ(v1, v2) => env(v1) ::: env(v2) |
|
93 case Stars(vs) => vs.flatMap(env) |
|
94 case Rec(x, v) => (x, flatten(v))::env(v) |
|
95 } |
|
96 |
|
97 // The Injection Part of the lexer |
|
98 |
|
99 def mkeps(r: Rexp) : Val = r match { |
|
100 case ONE => Empty |
|
101 case ALT(r1, r2) => |
|
102 if (nullable(r1)) Left(mkeps(r1)) else Right(mkeps(r2)) |
|
103 case SEQ(r1, r2) => Sequ(mkeps(r1), mkeps(r2)) |
|
104 case STAR(r) => Stars(Nil) |
|
105 case RECD(x, r) => Rec(x, mkeps(r)) |
|
106 } |
|
107 |
|
108 def inj(r: Rexp, c: Char, v: Val) : Val = (r, v) match { |
|
109 case (STAR(r), Sequ(v1, Stars(vs))) => Stars(inj(r, c, v1)::vs) |
|
110 case (SEQ(r1, r2), Sequ(v1, v2)) => Sequ(inj(r1, c, v1), v2) |
|
111 case (SEQ(r1, r2), Left(Sequ(v1, v2))) => Sequ(inj(r1, c, v1), v2) |
|
112 case (SEQ(r1, r2), Right(v2)) => Sequ(mkeps(r1), inj(r2, c, v2)) |
|
113 case (ALT(r1, r2), Left(v1)) => Left(inj(r1, c, v1)) |
|
114 case (ALT(r1, r2), Right(v2)) => Right(inj(r2, c, v2)) |
|
115 case (CHAR(d), Empty) => Chr(c) |
|
116 case (RECD(x, r1), _) => Rec(x, inj(r1, c, v)) |
|
117 case _ => { println ("Injection error") ; sys.exit(-1) } |
|
118 } |
|
119 |
|
120 // some "rectification" functions for simplification |
|
121 def F_ID(v: Val): Val = v |
|
122 def F_RIGHT(f: Val => Val) = (v:Val) => Right(f(v)) |
|
123 def F_LEFT(f: Val => Val) = (v:Val) => Left(f(v)) |
|
124 def F_ALT(f1: Val => Val, f2: Val => Val) = (v:Val) => v match { |
|
125 case Right(v) => Right(f2(v)) |
|
126 case Left(v) => Left(f1(v)) |
|
127 } |
|
128 def F_SEQ(f1: Val => Val, f2: Val => Val) = (v:Val) => v match { |
|
129 case Sequ(v1, v2) => Sequ(f1(v1), f2(v2)) |
|
130 } |
|
131 def F_SEQ_Empty1(f1: Val => Val, f2: Val => Val) = |
|
132 (v:Val) => Sequ(f1(Empty), f2(v)) |
|
133 def F_SEQ_Empty2(f1: Val => Val, f2: Val => Val) = |
|
134 (v:Val) => Sequ(f1(v), f2(Empty)) |
|
135 def F_RECD(f: Val => Val) = (v:Val) => v match { |
|
136 case Rec(x, v) => Rec(x, f(v)) |
|
137 } |
|
138 def F_ERROR(v: Val): Val = throw new Exception("error") |
|
139 |
|
140 def simp(r: Rexp): (Rexp, Val => Val) = r match { |
|
141 case ALT(r1, r2) => { |
|
142 val (r1s, f1s) = simp(r1) |
|
143 val (r2s, f2s) = simp(r2) |
|
144 (r1s, r2s) match { |
|
145 case (ZERO, _) => (r2s, F_RIGHT(f2s)) |
|
146 case (_, ZERO) => (r1s, F_LEFT(f1s)) |
|
147 case _ => if (r1s == r2s) (r1s, F_LEFT(f1s)) |
|
148 else (ALT (r1s, r2s), F_ALT(f1s, f2s)) |
|
149 } |
|
150 } |
|
151 case SEQ(r1, r2) => { |
|
152 val (r1s, f1s) = simp(r1) |
|
153 val (r2s, f2s) = simp(r2) |
|
154 (r1s, r2s) match { |
|
155 case (ZERO, _) => (ZERO, F_ERROR) |
|
156 case (_, ZERO) => (ZERO, F_ERROR) |
|
157 case (ONE, _) => (r2s, F_SEQ_Empty1(f1s, f2s)) |
|
158 case (_, ONE) => (r1s, F_SEQ_Empty2(f1s, f2s)) |
|
159 case _ => (SEQ(r1s,r2s), F_SEQ(f1s, f2s)) |
|
160 } |
|
161 } |
|
162 case RECD(x, r1) => { |
|
163 val (r1s, f1s) = simp(r1) |
|
164 (RECD(x, r1s), F_RECD(f1s)) |
|
165 } |
|
166 case r => (r, F_ID) |
|
167 } |
|
168 |
|
169 // lexing functions including simplification |
|
170 def lex_simp(r: Rexp, s: List[Char]) : Val = s match { |
|
171 case Nil => if (nullable(r)) mkeps(r) else { println ("Lexing Error") ; sys.exit(-1) } |
|
172 case c::cs => { |
|
173 val (r_simp, f_simp) = simp(der(c, r)) |
|
174 inj(r, c, f_simp(lex_simp(r_simp, cs))) |
|
175 } |
|
176 } |
|
177 |
|
178 def lexing_simp(r: Rexp, s: String) = env(lex_simp(r, s.toList)) |
|
179 |
|
180 |
|
181 // The Lexing Rules for the Fun Language |
|
182 |
|
183 def PLUS(r: Rexp) = r ~ r.% |
|
184 |
|
185 val SYM = "a" | "b" | "c" | "d" | "e" | "f" | "g" | "h" | "i" | "j" | "k" | |
|
186 "l" | "m" | "n" | "o" | "p" | "q" | "r" | "s" | "t" | "u" | "v" | |
|
187 "w" | "x" | "y" | "z" | "T" | "_" |
|
188 val DIGIT = "0" | "1" | "2" | "3" | "4" | "5" | "6" | "7" | "8" | "9" |
|
189 val ID = SYM ~ (SYM | DIGIT).% |
|
190 val NUM = PLUS(DIGIT) |
|
191 val KEYWORD : Rexp = "if" | "then" | "else" | "write" | "def" |
|
192 val SEMI: Rexp = ";" |
|
193 val OP: Rexp = "=" | "==" | "-" | "+" | "*" | "!=" | "<" | ">" | "<=" | ">=" | "%" | "/" |
|
194 val WHITESPACE = PLUS(" " | "\n" | "\t") |
|
195 val RPAREN: Rexp = ")" |
|
196 val LPAREN: Rexp = "(" |
|
197 val COMMA: Rexp = "," |
|
198 val ALL = SYM | DIGIT | OP | " " | ":" | ";" | "\"" | "=" | "," | "(" | ")" |
|
199 val ALL2 = ALL | "\n" |
|
200 val COMMENT = ("/*" ~ ALL2.% ~ "*/") | ("//" ~ ALL.% ~ "\n") |
|
201 |
|
202 |
|
203 val WHILE_REGS = (("k" $ KEYWORD) | |
|
204 ("i" $ ID) | |
|
205 ("o" $ OP) | |
|
206 ("n" $ NUM) | |
|
207 ("s" $ SEMI) | |
|
208 ("c" $ COMMA) | |
|
209 ("pl" $ LPAREN) | |
|
210 ("pr" $ RPAREN) | |
|
211 ("w" $ (WHITESPACE | COMMENT))).% |
|
212 |
|
213 |
|
214 |
|
215 // The tokens for the Fun language |
|
216 |
|
217 import java.io._ |
|
218 |
|
219 abstract class Token extends Serializable |
|
220 case object T_SEMI extends Token |
|
221 case object T_COMMA extends Token |
|
222 case object T_LPAREN extends Token |
|
223 case object T_RPAREN extends Token |
|
224 case class T_ID(s: String) extends Token |
|
225 case class T_OP(s: String) extends Token |
|
226 case class T_NUM(n: Int) extends Token |
|
227 case class T_KWD(s: String) extends Token |
|
228 |
|
229 val token : PartialFunction[(String, String), Token] = { |
|
230 case ("k", s) => T_KWD(s) |
|
231 case ("i", s) => T_ID(s) |
|
232 case ("o", s) => T_OP(s) |
|
233 case ("n", s) => T_NUM(s.toInt) |
|
234 case ("s", _) => T_SEMI |
|
235 case ("c", _) => T_COMMA |
|
236 case ("pl", _) => T_LPAREN |
|
237 case ("pr", _) => T_RPAREN |
|
238 } |
|
239 |
|
240 |
|
241 def tokenise(s: String) : List[Token] = |
|
242 lexing_simp(WHILE_REGS, s).collect(token) |
|
243 |
|
244 def serialise[T](fname: String, data: T) = { |
|
245 val out = new ObjectOutputStream(new FileOutputStream(fname)) |
|
246 out.writeObject(data) |
|
247 out.close |
|
248 } |
|
249 |
|
250 def main(args: Array[String]) = { |
|
251 val fname = args(0) |
|
252 val file = io.Source.fromFile(fname).mkString |
|
253 val tks = fname.stripSuffix(".fun") ++ ".tks" |
|
254 serialise(tks, tokenise(file)) |
|
255 } |
|
256 |
|
257 |
|
258 } |