progs/tokenise.scala
changeset 644 b4f5714485e1
parent 642 064afa8fc1d9
child 645 30943d5491b6
--- a/progs/tokenise.scala	Tue Oct 01 23:49:39 2019 +0100
+++ b/progs/tokenise.scala	Wed Oct 02 02:09:48 2019 +0100
@@ -1,8 +1,13 @@
-// A simple lexer inspired by work of Sulzmann & Lu
-//==================================================
+// A simple tokeniser based on the Sulzmann & Lu algorithm
+//=========================================================
+// 
+// call with 
+//
+//     scala tokenise.scala fib.while
+//
+//     scala tokenise.scala loops.while
 
-
-object Lexer {
+object Tokenise {
 
 import scala.language.implicitConversions    
 import scala.language.reflectiveCalls
@@ -217,49 +222,9 @@
                   ("w" $ WHITESPACE)).%
 
 
-// escapes strings and prints them out as "", "\n" and so on
-def esc(raw: String): String = {
-  import scala.reflect.runtime.universe._
-  Literal(Constant(raw)).toString
-}
-
-def escape(tks: List[(String, String)]) =
-  tks.map{ case (s1, s2) => (s1, esc(s2))}
-
-val prog2 = """
-write "Fib";
-read n;
-minus1 := 0;
-minus2 := 1;
-while n > 0 do {
-  temp := minus2;
-  minus2 := minus1 + minus2;
-  minus1 := temp;
-  n := n - 1
-};
-write "Result";
-write minus2
-"""
-
-val prog3 = """
-start := 1000;
-x := start;
-y := start;
-z := start;
-while 0 < x do {
- while 0 < y do {
-  while 0 < z do {
-    z := z - 1
-  };
-  z := start;
-  y := y - 1
- };     
- y := start;
- x := x - 1
-}
-"""
 
 // Generating tokens for the WHILE language
+// and serialising them into a .tks file
 
 import java.io._
 
@@ -295,7 +260,10 @@
 }
 
 def main(args: Array[String]) = {
-  serialise("/tmp/nflx", tokenise(prog3))
+  val fname = args(0)
+  val file = io.Source.fromFile(fname).mkString
+  val tks = fname.stripSuffix(".while") ++ ".tks"
+  serialise(tks, tokenise(file))
 }