--- a/progs/tokenise.scala Wed Oct 02 02:09:48 2019 +0100
+++ b/progs/tokenise.scala Wed Oct 02 14:05:36 2019 +0100
@@ -6,6 +6,11 @@
// scala tokenise.scala fib.while
//
// scala tokenise.scala loops.while
+//
+// this will generate a .tks file that can be deserialised back
+// into a list of tokens
+// you can add -Xno-patmat-analysis in order to get rid of the
+// match-not-exhaustive warning
object Tokenise {
@@ -188,8 +193,10 @@
env(lex_simp(r, s.toList))
-// The Lexing Rules for the Fun Language
+// The Lexing Rules for the WHILE Language
+// inefficient representations for some extended regular
+// expressions
def PLUS(r: Rexp) = r ~ r.%
def Range(s : List[Char]) : Rexp = s match {
@@ -223,8 +230,8 @@
-// Generating tokens for the WHILE language
-// and serialising them into a .tks file
+// Generate tokens for the WHILE language
+// and serialise them into a .tks file
import java.io._
@@ -238,6 +245,7 @@
case class T_KWD(s: String) extends Token
case class T_STR(s: String) extends Token
+// transforms pairs into tokens
val token : PartialFunction[(String, String), Token] = {
case ("s", _) => T_SEMI
case ("p", "{") => T_LPAREN
@@ -249,21 +257,23 @@
case ("str", s) => T_STR(s)
}
+// filters out all un-interesting token
def tokenise(s: String) : List[Token] =
lexing_simp(WHILE_REGS, s).collect(token)
def serialise[T](fname: String, data: T) = {
- val out = new ObjectOutputStream(new FileOutputStream(fname))
- out.writeObject(data)
- out.close
+ import scala.util.Using
+ Using(new ObjectOutputStream(new FileOutputStream(fname))) {
+ out => out.writeObject(data)
+ }
}
-def main(args: Array[String]) = {
+def main(args: Array[String]) : Unit = {
val fname = args(0)
+ val tname = fname.stripSuffix(".while") ++ ".tks"
val file = io.Source.fromFile(fname).mkString
- val tks = fname.stripSuffix(".while") ++ ".tks"
- serialise(tks, tokenise(file))
+ serialise(tname, tokenise(file))
}