diff -r 1062a9512e79 -r 064afa8fc1d9 progs/detokenise.scala --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/progs/detokenise.scala Tue Oct 01 23:40:25 2019 +0100 @@ -0,0 +1,31 @@ +// A simple lexer inspired by work of Sulzmann & Lu +//================================================== + + +object Delexer { + +import java.io._ + +abstract class Token extends Serializable +case object T_SEMI extends Token +case object T_LPAREN extends Token +case object T_RPAREN extends Token +case class T_ID(s: String) extends Token +case class T_OP(s: String) extends Token +case class T_NUM(n: Int) extends Token +case class T_KWD(s: String) extends Token +case class T_STR(s: String) extends Token + +def deserialise[T](fname: String) : T = { + val in = new ObjectInputStream(new FileInputStream(fname)) + val data = in.readObject.asInstanceOf[T] + in.close + data +} + +def main(args: Array[String]) = { + println("TEST\n" ++ deserialise[List[Token]]("/tmp/nflx").mkString) +} + + +} \ No newline at end of file