equal
deleted
inserted
replaced
231 import scala.reflect.runtime.universe._ |
231 import scala.reflect.runtime.universe._ |
232 Literal(Constant(raw)).toString |
232 Literal(Constant(raw)).toString |
233 } |
233 } |
234 |
234 |
235 def escape(tks: List[(String, String)]) = |
235 def escape(tks: List[(String, String)]) = |
236 tks.map{ case (s1, s2) => (s1, esc(s2))} |
236 tks.map{ case (s1, s2) => (esc(s1), esc(s2))} |
237 |
237 |
238 |
238 |
239 // Tokens |
239 // Tokens |
240 abstract class Token extends Serializable |
240 abstract class Token extends Serializable |
241 case class T_KEYWORD(s: String) extends Token |
241 case class T_KEYWORD(s: String) extends Token |
255 case ("i", s) => T_ID(s) |
255 case ("i", s) => T_ID(s) |
256 case ("n", s) => T_NUM(s.toInt) |
256 case ("n", s) => T_NUM(s.toInt) |
257 } |
257 } |
258 |
258 |
259 // Tokenise |
259 // Tokenise |
260 def tokenise(s: String) : List[Token] = |
260 def tokenise(s: String) = //: List[Token] = |
261 lexing_simp(WHILE_REGS, s).collect(token) |
261 escape(lexing_simp(WHILE_REGS, s)).filter{p => p._1 != "\"w\""}//.collect(token) |
|
262 |
|
263 |
|
264 |
|
265 |
|
266 println(tokenise(os.read(os.pwd / "primes.while"))) |
|
267 |