progs/detokenise.scala
author Christian Urban <urbanc@in.tum.de>
Tue, 01 Oct 2019 23:40:25 +0100
changeset 642 064afa8fc1d9
child 644 b4f5714485e1
permissions -rw-r--r--
updated

// A simple lexer inspired by work of Sulzmann & Lu
//==================================================


object Delexer {

import java.io._

abstract class Token extends Serializable 
case object T_SEMI extends Token
case object T_LPAREN extends Token
case object T_RPAREN extends Token
case class T_ID(s: String) extends Token
case class T_OP(s: String) extends Token
case class T_NUM(n: Int) extends Token
case class T_KWD(s: String) extends Token
case class T_STR(s: String) extends Token

def deserialise[T](fname: String) : T = {
  val in = new ObjectInputStream(new FileInputStream(fname))
  val data = in.readObject.asInstanceOf[T]
  in.close
  data
}

def main(args: Array[String]) = {
  println("TEST\n" ++ deserialise[List[Token]]("/tmp/nflx").mkString)  
}


}