updated
authorChristian Urban <urbanc@in.tum.de>
Wed, 02 Oct 2019 02:09:48 +0100
changeset 644 b4f5714485e1
parent 643 08375ca3874e
child 645 30943d5491b6
updated
progs/detokenise.scala
progs/fun.scala
progs/fun_parser.scala
progs/fun_tokens.scala
progs/tokenise.scala
--- a/progs/detokenise.scala	Tue Oct 01 23:49:39 2019 +0100
+++ b/progs/detokenise.scala	Wed Oct 02 02:09:48 2019 +0100
@@ -1,8 +1,13 @@
-// A simple lexer inspired by work of Sulzmann & Lu
-//==================================================
+// Detokenising the ouput of Tokeniser
+//=====================================
+//
+// call with 
+//
+//     scala detokenise.scala fib.tks
+//
+//     scala detokenise.scala loops.tks
 
-
-object Delexer {
+object Detokenise {
 
 import java.io._
 
@@ -24,7 +29,9 @@
 }
 
 def main(args: Array[String]) = {
-  println("TEST\n" ++ deserialise[List[Token]]("/tmp/nflx").mkString)  
+  val fname = args(0)
+  val tks = deserialise[List[Token]](fname)
+  println(s"Reading back from ${fname}:\n${tks.mkString("\n")}")  
 }
 
 
--- a/progs/fun.scala	Tue Oct 01 23:49:39 2019 +0100
+++ b/progs/fun.scala	Wed Oct 02 02:09:48 2019 +0100
@@ -1,324 +1,14 @@
 // A Small Compiler for a Simple Functional Language
-// (includes a lexer and a parser)
-
-import scala.language.implicitConversions    
-import scala.language.reflectiveCalls 
-
-abstract class Rexp 
-case object ZERO extends Rexp
-case object ONE extends Rexp
-case class CHAR(c: Char) extends Rexp
-case class ALT(r1: Rexp, r2: Rexp) extends Rexp 
-case class SEQ(r1: Rexp, r2: Rexp) extends Rexp 
-case class STAR(r: Rexp) extends Rexp 
-case class RECD(x: String, r: Rexp) extends Rexp
-  
-abstract class Val
-case object Empty extends Val
-case class Chr(c: Char) extends Val
-case class Sequ(v1: Val, v2: Val) extends Val
-case class Left(v: Val) extends Val
-case class Right(v: Val) extends Val
-case class Stars(vs: List[Val]) extends Val
-case class Rec(x: String, v: Val) extends Val
-   
-// some convenience for typing in regular expressions
-def charlist2rexp(s : List[Char]): Rexp = s match {
-  case Nil => ONE
-  case c::Nil => CHAR(c)
-  case c::s => SEQ(CHAR(c), charlist2rexp(s))
-}
-implicit def string2rexp(s : String) : Rexp = 
-  charlist2rexp(s.toList)
-
-implicit def RexpOps(r: Rexp) = new {
-  def | (s: Rexp) = ALT(r, s)
-  def % = STAR(r)
-  def ~ (s: Rexp) = SEQ(r, s)
-}
-
-implicit def stringOps(s: String) = new {
-  def | (r: Rexp) = ALT(s, r)
-  def | (r: String) = ALT(s, r)
-  def % = STAR(s)
-  def ~ (r: Rexp) = SEQ(s, r)
-  def ~ (r: String) = SEQ(s, r)
-  def $ (r: Rexp) = RECD(s, r)
-}
-
-def nullable (r: Rexp) : Boolean = r match {
-  case ZERO => false
-  case ONE => true
-  case CHAR(_) => false
-  case ALT(r1, r2) => nullable(r1) || nullable(r2)
-  case SEQ(r1, r2) => nullable(r1) && nullable(r2)
-  case STAR(_) => true
-  case RECD(_, r1) => nullable(r1)
-}
+// (includes an external lexer and parser)
 
-def der (c: Char, r: Rexp) : Rexp = r match {
-  case ZERO => ZERO
-  case ONE => ZERO
-  case CHAR(d) => if (c == d) ONE else ZERO
-  case ALT(r1, r2) => ALT(der(c, r1), der(c, r2))
-  case SEQ(r1, r2) => 
-    if (nullable(r1)) ALT(SEQ(der(c, r1), r2), der(c, r2))
-    else SEQ(der(c, r1), r2)
-  case STAR(r) => SEQ(der(c, r), STAR(r))
-  case RECD(_, r1) => der(c, r1)
-}
-
-
-// extracts a string from value
-def flatten(v: Val) : String = v match {
-  case Empty => ""
-  case Chr(c) => c.toString
-  case Left(v) => flatten(v)
-  case Right(v) => flatten(v)
-  case Sequ(v1, v2) => flatten(v1) + flatten(v2)
-  case Stars(vs) => vs.map(flatten).mkString
-  case Rec(_, v) => flatten(v)
-}
-
-// extracts an environment from a value;
-// used for tokenise a string
-def env(v: Val) : List[(String, String)] = v match {
-  case Empty => Nil
-  case Chr(c) => Nil
-  case Left(v) => env(v)
-  case Right(v) => env(v)
-  case Sequ(v1, v2) => env(v1) ::: env(v2)
-  case Stars(vs) => vs.flatMap(env)
-  case Rec(x, v) => (x, flatten(v))::env(v)
-}
-
-// The Injection Part of the lexer
-
-def mkeps(r: Rexp) : Val = r match {
-  case ONE => Empty
-  case ALT(r1, r2) => 
-    if (nullable(r1)) Left(mkeps(r1)) else Right(mkeps(r2))
-  case SEQ(r1, r2) => Sequ(mkeps(r1), mkeps(r2))
-  case STAR(r) => Stars(Nil)
-  case RECD(x, r) => Rec(x, mkeps(r))
-}
-
-def inj(r: Rexp, c: Char, v: Val) : Val = (r, v) match {
-  case (STAR(r), Sequ(v1, Stars(vs))) => Stars(inj(r, c, v1)::vs)
-  case (SEQ(r1, r2), Sequ(v1, v2)) => Sequ(inj(r1, c, v1), v2)
-  case (SEQ(r1, r2), Left(Sequ(v1, v2))) => Sequ(inj(r1, c, v1), v2)
-  case (SEQ(r1, r2), Right(v2)) => Sequ(mkeps(r1), inj(r2, c, v2))
-  case (ALT(r1, r2), Left(v1)) => Left(inj(r1, c, v1))
-  case (ALT(r1, r2), Right(v2)) => Right(inj(r2, c, v2))
-  case (CHAR(d), Empty) => Chr(c) 
-  case (RECD(x, r1), _) => Rec(x, inj(r1, c, v))
-  case _ => { println ("Injection error") ; sys.exit(-1) } 
-}
-
-// some "rectification" functions for simplification
-def F_ID(v: Val): Val = v
-def F_RIGHT(f: Val => Val) = (v:Val) => Right(f(v))
-def F_LEFT(f: Val => Val) = (v:Val) => Left(f(v))
-def F_ALT(f1: Val => Val, f2: Val => Val) = (v:Val) => v match {
-  case Right(v) => Right(f2(v))
-  case Left(v) => Left(f1(v))
-}
-def F_SEQ(f1: Val => Val, f2: Val => Val) = (v:Val) => v match {
-  case Sequ(v1, v2) => Sequ(f1(v1), f2(v2))
-}
-def F_SEQ_Empty1(f1: Val => Val, f2: Val => Val) = 
-  (v:Val) => Sequ(f1(Empty), f2(v))
-def F_SEQ_Empty2(f1: Val => Val, f2: Val => Val) = 
-  (v:Val) => Sequ(f1(v), f2(Empty))
-def F_RECD(f: Val => Val) = (v:Val) => v match {
-  case Rec(x, v) => Rec(x, f(v))
-}
-def F_ERROR(v: Val): Val = throw new Exception("error")
+import java.io._
 
-def simp(r: Rexp): (Rexp, Val => Val) = r match {
-  case ALT(r1, r2) => {
-    val (r1s, f1s) = simp(r1)
-    val (r2s, f2s) = simp(r2)
-    (r1s, r2s) match {
-      case (ZERO, _) => (r2s, F_RIGHT(f2s))
-      case (_, ZERO) => (r1s, F_LEFT(f1s))
-      case _ => if (r1s == r2s) (r1s, F_LEFT(f1s))
-                else (ALT (r1s, r2s), F_ALT(f1s, f2s)) 
-    }
-  }
-  case SEQ(r1, r2) => {
-    val (r1s, f1s) = simp(r1)
-    val (r2s, f2s) = simp(r2)
-    (r1s, r2s) match {
-      case (ZERO, _) => (ZERO, F_ERROR)
-      case (_, ZERO) => (ZERO, F_ERROR)
-      case (ONE, _) => (r2s, F_SEQ_Empty1(f1s, f2s))
-      case (_, ONE) => (r1s, F_SEQ_Empty2(f1s, f2s))
-      case _ => (SEQ(r1s,r2s), F_SEQ(f1s, f2s))
-    }
-  }
-  case RECD(x, r1) => {
-    val (r1s, f1s) = simp(r1)
-    (RECD(x, r1s), F_RECD(f1s))
-  }
-  case r => (r, F_ID)
-}
-
-// lexing functions including simplification
-def lex_simp(r: Rexp, s: List[Char]) : Val = s match {
-  case Nil => if (nullable(r)) mkeps(r) else { println ("Lexing Error") ; sys.exit(-1) } 
-  case c::cs => {
-    val (r_simp, f_simp) = simp(der(c, r))
-    inj(r, c, f_simp(lex_simp(r_simp, cs)))
-  }
-}
-
-def lexing_simp(r: Rexp, s: String) = env(lex_simp(r, s.toList))
-
-
-// The Lexing Rules for the Fun Language
-
-def PLUS(r: Rexp) = r ~ r.%
-
-val SYM = "a" | "b" | "c" | "d" | "e" | "f" | "g" | "h" | "i" | "j" | "k" | 
-          "l" | "m" | "n" | "o" | "p" | "q" | "r" | "s" | "t" | "u" | "v" | 
-          "w" | "x" | "y" | "z" | "T" | "_"
-val DIGIT = "0" | "1" | "2" | "3" | "4" | "5" | "6" | "7" | "8" | "9"
-val ID = SYM ~ (SYM | DIGIT).% 
-val NUM = PLUS(DIGIT)
-val KEYWORD : Rexp = "if" | "then" | "else" | "write" | "def"
-val SEMI: Rexp = ";"
-val OP: Rexp = "=" | "==" | "-" | "+" | "*" | "!=" | "<" | ">" | "<=" | ">=" | "%" | "/"
-val WHITESPACE = PLUS(" " | "\n" | "\t")
-val RPAREN: Rexp = ")"
-val LPAREN: Rexp = "("
-val COMMA: Rexp = ","
-val ALL = SYM | DIGIT | OP | " " | ":" | ";" | "\"" | "=" | "," | "(" | ")"
-val ALL2 = ALL | "\n"
-val COMMENT = ("/*" ~ ALL2.% ~ "*/") | ("//" ~ ALL.% ~ "\n")
-
-
-val WHILE_REGS = (("k" $ KEYWORD) | 
-                  ("i" $ ID) | 
-                  ("o" $ OP) | 
-                  ("n" $ NUM) | 
-                  ("s" $ SEMI) | 
-                  ("c" $ COMMA) |
-                  ("pl" $ LPAREN) |
-                  ("pr" $ RPAREN) |
-                  ("w" $ (WHITESPACE | COMMENT))).%
-
-
-
-// The tokens for the Fun language
-
-abstract class Token
-case object T_SEMI extends Token
-case object T_COMMA extends Token
-case object T_LPAREN extends Token
-case object T_RPAREN extends Token
-case class T_ID(s: String) extends Token
-case class T_OP(s: String) extends Token
-case class T_NUM(n: Int) extends Token
-case class T_KWD(s: String) extends Token
+object Compiler {
 
-val token : PartialFunction[(String, String), Token] = {
-  case ("k", s) => T_KWD(s)
-  case ("i", s) => T_ID(s)
-  case ("o", s) => T_OP(s)
-  case ("n", s) => T_NUM(s.toInt)
-  case ("s", _) => T_SEMI
-  case ("c", _) => T_COMMA
-  case ("pl", _) => T_LPAREN
-  case ("pr", _) => T_RPAREN
-}
-
-
-def tokenise(s: String) : List[Token] = 
-  lexing_simp(WHILE_REGS, s).collect(token)
-
-
-
-// Parser combinators
-abstract class Parser[I, T](implicit ev: I => Seq[_]) {
-  def parse(ts: I): Set[(T, I)]
-
-  def parse_all(ts: I) : Set[T] =
-    for ((head, tail) <- parse(ts); if (tail.isEmpty)) yield head
-
-  def parse_single(ts: I) : T = parse_all(ts).toList match {
-    case List(t) => t
-    case _ => { println ("Parse Error\n") ; sys.exit(-1) }
-  }
-}
-
-case class ~[+A, +B](_1: A, _2: B)
-
-class SeqParser[I, T, S](p: => Parser[I, T], 
-                         q: => Parser[I, S])(implicit ev: I => Seq[_]) extends Parser[I, ~[T, S]] {
-  def parse(sb: I) = 
-    for ((head1, tail1) <- p.parse(sb); 
-         (head2, tail2) <- q.parse(tail1)) yield (new ~(head1, head2), tail2)
-}
-
-class AltParser[I, T](p: => Parser[I, T], 
-                      q: => Parser[I, T])(implicit ev: I => Seq[_]) extends Parser[I, T] {
-  def parse(sb: I) = p.parse(sb) ++ q.parse(sb)   
-}
-
-class FunParser[I, T, S](p: => Parser[I, T], 
-                         f: T => S)(implicit ev: I => Seq[_]) extends Parser[I, S] {
-  def parse(sb: I) = 
-    for ((head, tail) <- p.parse(sb)) yield (f(head), tail)
-}
-
-implicit def ParserOps[I, T](p: Parser[I, T])(implicit ev: I => Seq[_]) = new {
-  def || (q : => Parser[I, T]) = new AltParser[I, T](p, q)
-  def ==>[S] (f: => T => S) = new FunParser[I, T, S](p, f)
-  def ~[S] (q : => Parser[I, S]) = new SeqParser[I, T, S](p, q)
-}
-
-def ListParser[I, T, S](p: => Parser[I, T], 
-                        q: => Parser[I, S])(implicit ev: I => Seq[_]): Parser[I, List[T]] = {
-  (p ~ q ~ ListParser(p, q)) ==> { case x ~ _ ~ z => x :: z : List[T] } ||
-  (p ==> ((s) => List(s)))
-}
-
-case class TokParser(tok: Token) extends Parser[List[Token], Token] {
-  def parse(ts: List[Token]) = ts match {
-    case t::ts if (t == tok) => Set((t, ts)) 
-    case _ => Set ()
-  }
-}
-
-implicit def token2tparser(t: Token) = TokParser(t)
-
-implicit def TokOps(t: Token) = new {
-  def || (q : => Parser[List[Token], Token]) = new AltParser[List[Token], Token](t, q)
-  def ==>[S] (f: => Token => S) = new FunParser[List[Token], Token, S](t, f)
-  def ~[S](q : => Parser[List[Token], S]) = new SeqParser[List[Token], Token, S](t, q)
-}
-
-case object NumParser extends Parser[List[Token], Int] {
-  def parse(ts: List[Token]) = ts match {
-    case T_NUM(n)::ts => Set((n, ts)) 
-    case _ => Set ()
-  }
-}
-
-case object IdParser extends Parser[List[Token], String] {
-  def parse(ts: List[Token]) = ts match {
-    case T_ID(s)::ts => Set((s, ts)) 
-    case _ => Set ()
-  }
-}
-
-
-
-// Abstract syntax trees for Fun
-abstract class Exp
-abstract class BExp 
-abstract class Decl
+// Abstract syntax trees for the Fun language
+abstract class Exp extends Serializable 
+abstract class BExp extends Serializable 
+abstract class Decl extends Serializable 
 
 case class Def(name: String, args: List[String], body: Exp) extends Decl
 case class Main(e: Exp) extends Decl
@@ -333,48 +23,6 @@
 case class Bop(o: String, a1: Exp, a2: Exp) extends BExp
 
 
-
-// Grammar Rules for Fun
-
-// arithmetic expressions
-lazy val Exp: Parser[List[Token], Exp] = 
-  (T_KWD("if") ~ BExp ~ T_KWD("then") ~ Exp ~ T_KWD("else") ~ Exp) ==>
-    { case _ ~ x ~ _ ~ y ~ _ ~ z => If(x, y, z): Exp } ||
-  (M ~ T_SEMI ~ Exp) ==> { case x ~ _ ~ y => Sequence(x, y): Exp } || M
-lazy val M: Parser[List[Token], Exp] =
-  (T_KWD("write") ~ L) ==> { case _ ~ y => Write(y): Exp } || L
-lazy val L: Parser[List[Token], Exp] = 
-  (T ~ T_OP("+") ~ Exp) ==> { case x ~ _ ~ z => Aop("+", x, z): Exp } ||
-  (T ~ T_OP("-") ~ Exp) ==> { case x ~ _ ~ z => Aop("-", x, z): Exp } || T  
-lazy val T: Parser[List[Token], Exp] = 
-  (F ~ T_OP("*") ~ T) ==> { case x ~ _ ~ z => Aop("*", x, z): Exp } || 
-  (F ~ T_OP("/") ~ T) ==> { case x ~ _ ~ z => Aop("/", x, z): Exp } || 
-  (F ~ T_OP("%") ~ T) ==> { case x ~ _ ~ z => Aop("%", x, z): Exp } || F
-lazy val F: Parser[List[Token], Exp] = 
-  (IdParser ~ T_LPAREN ~ ListParser(Exp, T_COMMA) ~ T_RPAREN) ==> 
-    { case x ~ _ ~ z ~ _ => Call(x, z): Exp } ||
-  (T_LPAREN ~ Exp ~ T_RPAREN) ==> { case _ ~ y ~ _ => y: Exp } || 
-  IdParser ==> { case x => Var(x): Exp } || 
-  NumParser ==> { case x => Num(x): Exp }
-
-// boolean expressions
-lazy val BExp: Parser[List[Token], BExp] = 
-  (Exp ~ T_OP("==") ~ Exp) ==> { case x ~ _ ~ z => Bop("==", x, z): BExp } || 
-  (Exp ~ T_OP("!=") ~ Exp) ==> { case x ~ _ ~ z => Bop("!=", x, z): BExp } || 
-  (Exp ~ T_OP("<") ~ Exp) ==> { case x ~ _ ~ z => Bop("<", x, z): BExp } || 
-  (Exp ~ T_OP(">") ~ Exp) ==> { case x ~ _ ~ z => Bop("<", z, x): BExp } || 
-  (Exp ~ T_OP("<=") ~ Exp) ==> { case x ~ _ ~ z => Bop("<=", x, z): BExp } || 
-  (Exp ~ T_OP("=>") ~ Exp) ==> { case x ~ _ ~ z => Bop("<=", z, x): BExp }  
-
-lazy val Defn: Parser[List[Token], Decl] =
-   (T_KWD("def") ~ IdParser ~ T_LPAREN ~ ListParser(IdParser, T_COMMA) ~ T_RPAREN ~ T_OP("=") ~ Exp) ==>
-     { case _ ~ y ~ _ ~ w ~ _ ~ _ ~ r => Def(y, w, r): Decl }
-
-lazy val Prog: Parser[List[Token], List[Decl]] =
-  (Defn ~ T_SEMI ~ Prog) ==> { case x ~ _ ~ z => x :: z : List[Decl] } ||
-  (Exp ==> ((s) => List(Main(s)) : List[Decl]))
-
-
 // compiler - built-in functions 
 // copied from http://www.ceng.metu.edu.tr/courses/ceng444/link/jvm-cpm.html
 //
@@ -410,6 +58,7 @@
   case Aop(_, a1, a2) => max_stack_exp(a1) + max_stack_exp(a2)
   case Sequence(e1, e2) => List(max_stack_exp(e1), max_stack_exp(e2)).max
 }
+
 def max_stack_bexp(e: BExp): Int = e match {
   case Bop(_, a1, a2) => max_stack_exp(a1) + max_stack_exp(a2)
 }
@@ -516,23 +165,29 @@
   (end - start)/(i * 1.0e9)
 }
 
-def compile(class_name: String, input: String) : String = {
-  val tks = tokenise(input)
-  val ast = Prog.parse_single(tks)
+def deserialise[T](fname: String) : T = {
+  val in = new ObjectInputStream(new FileInputStream(fname))
+  val data = in.readObject.asInstanceOf[T]
+  in.close
+  data
+}
+
+
+def compile(class_name: String) : String = {
+  val ast = deserialise[List[Decl]](class_name ++ ".prs") 
   val instructions = ast.map(compile_decl).mkString
   (library + instructions).replaceAllLiterally("XXX", class_name)
 }
 
-def compile_file(class_name: String) = {
-  val input = io.Source.fromFile(s"${class_name}.fun").mkString
-  val output = compile(class_name, input)
+def compile_to_file(class_name: String) = {
+  val output = compile(class_name)
   scala.tools.nsc.io.File(s"${class_name}.j").writeAll(output)
 }
 
 import scala.sys.process._
 
 def compile_run(class_name: String) : Unit = {
-  compile_file(class_name)
+  compile_to_file(class_name)
   (s"java -jar jvm/jasmin-2.4/jasmin.jar ${class_name}.j").!!
   println("Time: " + time_needed(2, (s"java ${class_name}/${class_name}").!))
 }
@@ -541,4 +196,10 @@
 // some examples of .fun files
 //compile_file("fact")
 //compile_run("defs")
-compile_run("fact")
+//compile_run("fact")
+
+def main(args: Array[String]) = 
+   compile_run(args(0))
+
+
+}
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/progs/fun_parser.scala	Wed Oct 02 02:09:48 2019 +0100
@@ -0,0 +1,189 @@
+// A Small Compiler for a Simple Functional Language
+// (includes a lexer and a parser)
+
+object Fun_Parser {
+
+import scala.language.implicitConversions    
+import scala.language.reflectiveCalls
+import java.io._
+
+abstract class Token extends Serializable 
+case object T_SEMI extends Token
+case object T_COMMA extends Token
+case object T_LPAREN extends Token
+case object T_RPAREN extends Token
+case class T_ID(s: String) extends Token
+case class T_OP(s: String) extends Token
+case class T_NUM(n: Int) extends Token
+case class T_KWD(s: String) extends Token
+
+
+// Parser combinators
+//    type parameter I needs to be of Seq-type
+//
+abstract class Parser[I, T](implicit ev: I => Seq[_]) {
+  def parse(ts: I): Set[(T, I)]
+
+  def parse_all(ts: I) : Set[T] =
+    for ((head, tail) <- parse(ts); if (tail.isEmpty)) yield head
+
+  def parse_single(ts: I) : T = parse_all(ts).toList match {
+    case List(t) => t
+    case _ => { println ("Parse Error\n") ; sys.exit(-1) }
+  }
+}
+
+// convenience for writing grammar rules
+case class ~[+A, +B](_1: A, _2: B)
+
+class SeqParser[I, T, S](p: => Parser[I, T], 
+                         q: => Parser[I, S])(implicit ev: I => Seq[_]) extends Parser[I, ~[T, S]] {
+  def parse(sb: I) = 
+    for ((head1, tail1) <- p.parse(sb); 
+         (head2, tail2) <- q.parse(tail1)) yield (new ~(head1, head2), tail2)
+}
+
+class AltParser[I, T](p: => Parser[I, T], 
+                      q: => Parser[I, T])(implicit ev: I => Seq[_]) extends Parser[I, T] {
+  def parse(sb: I) = p.parse(sb) ++ q.parse(sb)   
+}
+
+class FunParser[I, T, S](p: => Parser[I, T], 
+                         f: T => S)(implicit ev: I => Seq[_]) extends Parser[I, S] {
+  def parse(sb: I) = 
+    for ((head, tail) <- p.parse(sb)) yield (f(head), tail)
+}
+
+// convenient combinators
+implicit def ParserOps[I, T](p: Parser[I, T])(implicit ev: I => Seq[_]) = new {
+  def || (q : => Parser[I, T]) = new AltParser[I, T](p, q)
+  def ==>[S] (f: => T => S) = new FunParser[I, T, S](p, f)
+  def ~[S] (q : => Parser[I, S]) = new SeqParser[I, T, S](p, q)
+}
+
+def ListParser[I, T, S](p: => Parser[I, T], 
+                        q: => Parser[I, S])(implicit ev: I => Seq[_]): Parser[I, List[T]] = {
+  (p ~ q ~ ListParser(p, q)) ==> { case x ~ _ ~ z => x :: z : List[T] } ||
+  (p ==> ((s) => List(s)))
+}
+
+case class TokParser(tok: Token) extends Parser[List[Token], Token] {
+  def parse(ts: List[Token]) = ts match {
+    case t::ts if (t == tok) => Set((t, ts)) 
+    case _ => Set ()
+  }
+}
+
+implicit def token2tparser(t: Token) = TokParser(t)
+
+implicit def TokOps(t: Token) = new {
+  def || (q : => Parser[List[Token], Token]) = new AltParser[List[Token], Token](t, q)
+  def ==>[S] (f: => Token => S) = new FunParser[List[Token], Token, S](t, f)
+  def ~[S](q : => Parser[List[Token], S]) = new SeqParser[List[Token], Token, S](t, q)
+}
+
+case object NumParser extends Parser[List[Token], Int] {
+  def parse(ts: List[Token]) = ts match {
+    case T_NUM(n)::ts => Set((n, ts)) 
+    case _ => Set ()
+  }
+}
+
+case object IdParser extends Parser[List[Token], String] {
+  def parse(ts: List[Token]) = ts match {
+    case T_ID(s)::ts => Set((s, ts)) 
+    case _ => Set ()
+  }
+}
+
+
+
+// Abstract syntax trees for the Fun language
+abstract class Exp extends Serializable 
+abstract class BExp extends Serializable 
+abstract class Decl extends Serializable 
+
+case class Def(name: String, args: List[String], body: Exp) extends Decl
+case class Main(e: Exp) extends Decl
+
+case class Call(name: String, args: List[Exp]) extends Exp
+case class If(a: BExp, e1: Exp, e2: Exp) extends Exp
+case class Write(e: Exp) extends Exp
+case class Var(s: String) extends Exp
+case class Num(i: Int) extends Exp
+case class Aop(o: String, a1: Exp, a2: Exp) extends Exp
+case class Sequence(e1: Exp, e2: Exp) extends Exp
+case class Bop(o: String, a1: Exp, a2: Exp) extends BExp
+
+
+
+// Grammar Rules for the Fun language
+
+// arithmetic expressions
+lazy val Exp: Parser[List[Token], Exp] = 
+  (T_KWD("if") ~ BExp ~ T_KWD("then") ~ Exp ~ T_KWD("else") ~ Exp) ==>
+    { case _ ~ x ~ _ ~ y ~ _ ~ z => If(x, y, z): Exp } ||
+  (M ~ T_SEMI ~ Exp) ==> { case x ~ _ ~ y => Sequence(x, y): Exp } || M
+lazy val M: Parser[List[Token], Exp] =
+  (T_KWD("write") ~ L) ==> { case _ ~ y => Write(y): Exp } || L
+lazy val L: Parser[List[Token], Exp] = 
+  (T ~ T_OP("+") ~ Exp) ==> { case x ~ _ ~ z => Aop("+", x, z): Exp } ||
+  (T ~ T_OP("-") ~ Exp) ==> { case x ~ _ ~ z => Aop("-", x, z): Exp } || T  
+lazy val T: Parser[List[Token], Exp] = 
+  (F ~ T_OP("*") ~ T) ==> { case x ~ _ ~ z => Aop("*", x, z): Exp } || 
+  (F ~ T_OP("/") ~ T) ==> { case x ~ _ ~ z => Aop("/", x, z): Exp } || 
+  (F ~ T_OP("%") ~ T) ==> { case x ~ _ ~ z => Aop("%", x, z): Exp } || F
+lazy val F: Parser[List[Token], Exp] = 
+  (IdParser ~ T_LPAREN ~ ListParser(Exp, T_COMMA) ~ T_RPAREN) ==> 
+    { case x ~ _ ~ z ~ _ => Call(x, z): Exp } ||
+  (T_LPAREN ~ Exp ~ T_RPAREN) ==> { case _ ~ y ~ _ => y: Exp } || 
+  IdParser ==> { case x => Var(x): Exp } || 
+  NumParser ==> { case x => Num(x): Exp }
+
+// boolean expressions
+lazy val BExp: Parser[List[Token], BExp] = 
+  (Exp ~ T_OP("==") ~ Exp) ==> { case x ~ _ ~ z => Bop("==", x, z): BExp } || 
+  (Exp ~ T_OP("!=") ~ Exp) ==> { case x ~ _ ~ z => Bop("!=", x, z): BExp } || 
+  (Exp ~ T_OP("<") ~ Exp) ==> { case x ~ _ ~ z => Bop("<", x, z): BExp } || 
+  (Exp ~ T_OP(">") ~ Exp) ==> { case x ~ _ ~ z => Bop("<", z, x): BExp } || 
+  (Exp ~ T_OP("<=") ~ Exp) ==> { case x ~ _ ~ z => Bop("<=", x, z): BExp } || 
+  (Exp ~ T_OP("=>") ~ Exp) ==> { case x ~ _ ~ z => Bop("<=", z, x): BExp }  
+
+lazy val Defn: Parser[List[Token], Decl] =
+   (T_KWD("def") ~ IdParser ~ T_LPAREN ~ ListParser(IdParser, T_COMMA) ~ T_RPAREN ~ T_OP("=") ~ Exp) ==>
+     { case _ ~ y ~ _ ~ w ~ _ ~ _ ~ r => Def(y, w, r): Decl }
+
+lazy val Prog: Parser[List[Token], List[Decl]] =
+  (Defn ~ T_SEMI ~ Prog) ==> { case x ~ _ ~ z => x :: z : List[Decl] } ||
+  (Exp ==> ((s) => List(Main(s)) : List[Decl]))
+
+
+
+// Reading tokens and Writing parse trees
+
+def serialise[T](fname: String, data: T) = {
+  val out = new ObjectOutputStream(new FileOutputStream(fname))
+  out.writeObject(data)
+  out.close
+}
+
+def deserialise[T](fname: String) : T = {
+  val in = new ObjectInputStream(new FileInputStream(fname))
+  val data = in.readObject.asInstanceOf[T]
+  in.close
+  data
+}
+
+
+def main(args: Array[String]) = {
+  val fname = args(0)
+  val pname = fname.stripSuffix(".tks") ++ ".prs"
+  val tks = deserialise[List[Token]](fname)
+  serialise(pname, Prog.parse_single(tks))
+
+  // testing whether read-back is working
+  //val ptree = deserialise[List[Decl]](pname)
+  //println(s"Reading back from ${pname}:\n${ptree.mkString("\n")}")  
+}
+
+}
\ No newline at end of file
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/progs/fun_tokens.scala	Wed Oct 02 02:09:48 2019 +0100
@@ -0,0 +1,258 @@
+// A tokeniser for the fun language
+
+
+object Fun_Tokens {
+
+import scala.language.implicitConversions    
+import scala.language.reflectiveCalls 
+
+abstract class Rexp 
+case object ZERO extends Rexp
+case object ONE extends Rexp
+case class CHAR(c: Char) extends Rexp
+case class ALT(r1: Rexp, r2: Rexp) extends Rexp 
+case class SEQ(r1: Rexp, r2: Rexp) extends Rexp 
+case class STAR(r: Rexp) extends Rexp 
+case class RECD(x: String, r: Rexp) extends Rexp
+  
+abstract class Val
+case object Empty extends Val
+case class Chr(c: Char) extends Val
+case class Sequ(v1: Val, v2: Val) extends Val
+case class Left(v: Val) extends Val
+case class Right(v: Val) extends Val
+case class Stars(vs: List[Val]) extends Val
+case class Rec(x: String, v: Val) extends Val
+   
+// some convenience for typing in regular expressions
+def charlist2rexp(s : List[Char]): Rexp = s match {
+  case Nil => ONE
+  case c::Nil => CHAR(c)
+  case c::s => SEQ(CHAR(c), charlist2rexp(s))
+}
+implicit def string2rexp(s : String) : Rexp = 
+  charlist2rexp(s.toList)
+
+implicit def RexpOps(r: Rexp) = new {
+  def | (s: Rexp) = ALT(r, s)
+  def % = STAR(r)
+  def ~ (s: Rexp) = SEQ(r, s)
+}
+
+implicit def stringOps(s: String) = new {
+  def | (r: Rexp) = ALT(s, r)
+  def | (r: String) = ALT(s, r)
+  def % = STAR(s)
+  def ~ (r: Rexp) = SEQ(s, r)
+  def ~ (r: String) = SEQ(s, r)
+  def $ (r: Rexp) = RECD(s, r)
+}
+
+def nullable (r: Rexp) : Boolean = r match {
+  case ZERO => false
+  case ONE => true
+  case CHAR(_) => false
+  case ALT(r1, r2) => nullable(r1) || nullable(r2)
+  case SEQ(r1, r2) => nullable(r1) && nullable(r2)
+  case STAR(_) => true
+  case RECD(_, r1) => nullable(r1)
+}
+
+def der (c: Char, r: Rexp) : Rexp = r match {
+  case ZERO => ZERO
+  case ONE => ZERO
+  case CHAR(d) => if (c == d) ONE else ZERO
+  case ALT(r1, r2) => ALT(der(c, r1), der(c, r2))
+  case SEQ(r1, r2) => 
+    if (nullable(r1)) ALT(SEQ(der(c, r1), r2), der(c, r2))
+    else SEQ(der(c, r1), r2)
+  case STAR(r) => SEQ(der(c, r), STAR(r))
+  case RECD(_, r1) => der(c, r1)
+}
+
+
+// extracts a string from value
+def flatten(v: Val) : String = v match {
+  case Empty => ""
+  case Chr(c) => c.toString
+  case Left(v) => flatten(v)
+  case Right(v) => flatten(v)
+  case Sequ(v1, v2) => flatten(v1) + flatten(v2)
+  case Stars(vs) => vs.map(flatten).mkString
+  case Rec(_, v) => flatten(v)
+}
+
+// extracts an environment from a value;
+// used for tokenise a string
+def env(v: Val) : List[(String, String)] = v match {
+  case Empty => Nil
+  case Chr(c) => Nil
+  case Left(v) => env(v)
+  case Right(v) => env(v)
+  case Sequ(v1, v2) => env(v1) ::: env(v2)
+  case Stars(vs) => vs.flatMap(env)
+  case Rec(x, v) => (x, flatten(v))::env(v)
+}
+
+// The Injection Part of the lexer
+
+def mkeps(r: Rexp) : Val = r match {
+  case ONE => Empty
+  case ALT(r1, r2) => 
+    if (nullable(r1)) Left(mkeps(r1)) else Right(mkeps(r2))
+  case SEQ(r1, r2) => Sequ(mkeps(r1), mkeps(r2))
+  case STAR(r) => Stars(Nil)
+  case RECD(x, r) => Rec(x, mkeps(r))
+}
+
+def inj(r: Rexp, c: Char, v: Val) : Val = (r, v) match {
+  case (STAR(r), Sequ(v1, Stars(vs))) => Stars(inj(r, c, v1)::vs)
+  case (SEQ(r1, r2), Sequ(v1, v2)) => Sequ(inj(r1, c, v1), v2)
+  case (SEQ(r1, r2), Left(Sequ(v1, v2))) => Sequ(inj(r1, c, v1), v2)
+  case (SEQ(r1, r2), Right(v2)) => Sequ(mkeps(r1), inj(r2, c, v2))
+  case (ALT(r1, r2), Left(v1)) => Left(inj(r1, c, v1))
+  case (ALT(r1, r2), Right(v2)) => Right(inj(r2, c, v2))
+  case (CHAR(d), Empty) => Chr(c) 
+  case (RECD(x, r1), _) => Rec(x, inj(r1, c, v))
+  case _ => { println ("Injection error") ; sys.exit(-1) } 
+}
+
+// some "rectification" functions for simplification
+def F_ID(v: Val): Val = v
+def F_RIGHT(f: Val => Val) = (v:Val) => Right(f(v))
+def F_LEFT(f: Val => Val) = (v:Val) => Left(f(v))
+def F_ALT(f1: Val => Val, f2: Val => Val) = (v:Val) => v match {
+  case Right(v) => Right(f2(v))
+  case Left(v) => Left(f1(v))
+}
+def F_SEQ(f1: Val => Val, f2: Val => Val) = (v:Val) => v match {
+  case Sequ(v1, v2) => Sequ(f1(v1), f2(v2))
+}
+def F_SEQ_Empty1(f1: Val => Val, f2: Val => Val) = 
+  (v:Val) => Sequ(f1(Empty), f2(v))
+def F_SEQ_Empty2(f1: Val => Val, f2: Val => Val) = 
+  (v:Val) => Sequ(f1(v), f2(Empty))
+def F_RECD(f: Val => Val) = (v:Val) => v match {
+  case Rec(x, v) => Rec(x, f(v))
+}
+def F_ERROR(v: Val): Val = throw new Exception("error")
+
+def simp(r: Rexp): (Rexp, Val => Val) = r match {
+  case ALT(r1, r2) => {
+    val (r1s, f1s) = simp(r1)
+    val (r2s, f2s) = simp(r2)
+    (r1s, r2s) match {
+      case (ZERO, _) => (r2s, F_RIGHT(f2s))
+      case (_, ZERO) => (r1s, F_LEFT(f1s))
+      case _ => if (r1s == r2s) (r1s, F_LEFT(f1s))
+                else (ALT (r1s, r2s), F_ALT(f1s, f2s)) 
+    }
+  }
+  case SEQ(r1, r2) => {
+    val (r1s, f1s) = simp(r1)
+    val (r2s, f2s) = simp(r2)
+    (r1s, r2s) match {
+      case (ZERO, _) => (ZERO, F_ERROR)
+      case (_, ZERO) => (ZERO, F_ERROR)
+      case (ONE, _) => (r2s, F_SEQ_Empty1(f1s, f2s))
+      case (_, ONE) => (r1s, F_SEQ_Empty2(f1s, f2s))
+      case _ => (SEQ(r1s,r2s), F_SEQ(f1s, f2s))
+    }
+  }
+  case RECD(x, r1) => {
+    val (r1s, f1s) = simp(r1)
+    (RECD(x, r1s), F_RECD(f1s))
+  }
+  case r => (r, F_ID)
+}
+
+// lexing functions including simplification
+def lex_simp(r: Rexp, s: List[Char]) : Val = s match {
+  case Nil => if (nullable(r)) mkeps(r) else { println ("Lexing Error") ; sys.exit(-1) } 
+  case c::cs => {
+    val (r_simp, f_simp) = simp(der(c, r))
+    inj(r, c, f_simp(lex_simp(r_simp, cs)))
+  }
+}
+
+def lexing_simp(r: Rexp, s: String) = env(lex_simp(r, s.toList))
+
+
+// The Lexing Rules for the Fun Language
+
+def PLUS(r: Rexp) = r ~ r.%
+
+val SYM = "a" | "b" | "c" | "d" | "e" | "f" | "g" | "h" | "i" | "j" | "k" | 
+          "l" | "m" | "n" | "o" | "p" | "q" | "r" | "s" | "t" | "u" | "v" | 
+          "w" | "x" | "y" | "z" | "T" | "_"
+val DIGIT = "0" | "1" | "2" | "3" | "4" | "5" | "6" | "7" | "8" | "9"
+val ID = SYM ~ (SYM | DIGIT).% 
+val NUM = PLUS(DIGIT)
+val KEYWORD : Rexp = "if" | "then" | "else" | "write" | "def"
+val SEMI: Rexp = ";"
+val OP: Rexp = "=" | "==" | "-" | "+" | "*" | "!=" | "<" | ">" | "<=" | ">=" | "%" | "/"
+val WHITESPACE = PLUS(" " | "\n" | "\t")
+val RPAREN: Rexp = ")"
+val LPAREN: Rexp = "("
+val COMMA: Rexp = ","
+val ALL = SYM | DIGIT | OP | " " | ":" | ";" | "\"" | "=" | "," | "(" | ")"
+val ALL2 = ALL | "\n"
+val COMMENT = ("/*" ~ ALL2.% ~ "*/") | ("//" ~ ALL.% ~ "\n")
+
+
+val WHILE_REGS = (("k" $ KEYWORD) | 
+                  ("i" $ ID) | 
+                  ("o" $ OP) | 
+                  ("n" $ NUM) | 
+                  ("s" $ SEMI) | 
+                  ("c" $ COMMA) |
+                  ("pl" $ LPAREN) |
+                  ("pr" $ RPAREN) |
+                  ("w" $ (WHITESPACE | COMMENT))).%
+
+
+
+// The tokens for the Fun language
+
+import java.io._
+
+abstract class Token extends Serializable 
+case object T_SEMI extends Token
+case object T_COMMA extends Token
+case object T_LPAREN extends Token
+case object T_RPAREN extends Token
+case class T_ID(s: String) extends Token
+case class T_OP(s: String) extends Token
+case class T_NUM(n: Int) extends Token
+case class T_KWD(s: String) extends Token
+
+val token : PartialFunction[(String, String), Token] = {
+  case ("k", s) => T_KWD(s)
+  case ("i", s) => T_ID(s)
+  case ("o", s) => T_OP(s)
+  case ("n", s) => T_NUM(s.toInt)
+  case ("s", _) => T_SEMI
+  case ("c", _) => T_COMMA
+  case ("pl", _) => T_LPAREN
+  case ("pr", _) => T_RPAREN
+}
+
+
+def tokenise(s: String) : List[Token] = 
+  lexing_simp(WHILE_REGS, s).collect(token)
+
+def serialise[T](fname: String, data: T) = {
+  val out = new ObjectOutputStream(new FileOutputStream(fname))
+  out.writeObject(data)
+  out.close
+}
+
+def main(args: Array[String]) = {
+  val fname = args(0)
+  val file = io.Source.fromFile(fname).mkString
+  val tks = fname.stripSuffix(".fun") ++ ".tks"
+  serialise(tks, tokenise(file))
+}
+
+
+}
\ No newline at end of file
--- a/progs/tokenise.scala	Tue Oct 01 23:49:39 2019 +0100
+++ b/progs/tokenise.scala	Wed Oct 02 02:09:48 2019 +0100
@@ -1,8 +1,13 @@
-// A simple lexer inspired by work of Sulzmann & Lu
-//==================================================
+// A simple tokeniser based on the Sulzmann & Lu algorithm
+//=========================================================
+// 
+// call with 
+//
+//     scala tokenise.scala fib.while
+//
+//     scala tokenise.scala loops.while
 
-
-object Lexer {
+object Tokenise {
 
 import scala.language.implicitConversions    
 import scala.language.reflectiveCalls
@@ -217,49 +222,9 @@
                   ("w" $ WHITESPACE)).%
 
 
-// escapes strings and prints them out as "", "\n" and so on
-def esc(raw: String): String = {
-  import scala.reflect.runtime.universe._
-  Literal(Constant(raw)).toString
-}
-
-def escape(tks: List[(String, String)]) =
-  tks.map{ case (s1, s2) => (s1, esc(s2))}
-
-val prog2 = """
-write "Fib";
-read n;
-minus1 := 0;
-minus2 := 1;
-while n > 0 do {
-  temp := minus2;
-  minus2 := minus1 + minus2;
-  minus1 := temp;
-  n := n - 1
-};
-write "Result";
-write minus2
-"""
-
-val prog3 = """
-start := 1000;
-x := start;
-y := start;
-z := start;
-while 0 < x do {
- while 0 < y do {
-  while 0 < z do {
-    z := z - 1
-  };
-  z := start;
-  y := y - 1
- };     
- y := start;
- x := x - 1
-}
-"""
 
 // Generating tokens for the WHILE language
+// and serialising them into a .tks file
 
 import java.io._
 
@@ -295,7 +260,10 @@
 }
 
 def main(args: Array[String]) = {
-  serialise("/tmp/nflx", tokenise(prog3))
+  val fname = args(0)
+  val file = io.Source.fromFile(fname).mkString
+  val tks = fname.stripSuffix(".while") ++ ".tks"
+  serialise(tks, tokenise(file))
 }