progs/fun_tokens.scala
changeset 645 30943d5491b6
parent 644 b4f5714485e1
child 655 3d04ee04966d
--- a/progs/fun_tokens.scala	Wed Oct 02 02:09:48 2019 +0100
+++ b/progs/fun_tokens.scala	Wed Oct 02 14:05:36 2019 +0100
@@ -1,5 +1,16 @@
-// A tokeniser for the fun language
-
+// A tokeniser for the Fun language
+//==================================
+//
+// call with 
+//
+//     scala fun_tokens.scala fact.fun
+//
+//     scala fun_tokens.scala defs.fun
+//
+// this will generate a .tks file that can be deserialised back
+// into a list of tokens
+// you can add -Xno-patmat-analysis in order to get rid of the
+// match-not-exhaustive warning
 
 object Fun_Tokens {
 
@@ -200,7 +211,7 @@
 val COMMENT = ("/*" ~ ALL2.% ~ "*/") | ("//" ~ ALL.% ~ "\n")
 
 
-val WHILE_REGS = (("k" $ KEYWORD) | 
+val FUN_REGS = (("k" $ KEYWORD) | 
                   ("i" $ ID) | 
                   ("o" $ OP) | 
                   ("n" $ NUM) | 
@@ -239,19 +250,20 @@
 
 
 def tokenise(s: String) : List[Token] = 
-  lexing_simp(WHILE_REGS, s).collect(token)
+  lexing_simp(FUN_REGS, s).collect(token)
 
 def serialise[T](fname: String, data: T) = {
-  val out = new ObjectOutputStream(new FileOutputStream(fname))
-  out.writeObject(data)
-  out.close
+  import scala.util.Using
+  Using(new ObjectOutputStream(new FileOutputStream(fname))) {
+    out => out.writeObject(data)
+  }
 }
 
-def main(args: Array[String]) = {
+def main(args: Array[String]) : Unit = {
   val fname = args(0)
+  val tname = fname.stripSuffix(".fun") ++ ".tks"
   val file = io.Source.fromFile(fname).mkString
-  val tks = fname.stripSuffix(".fun") ++ ".tks"
-  serialise(tks, tokenise(file))
+  serialise(tname, tokenise(file))
 }