progs/detokenise.scala
changeset 644 b4f5714485e1
parent 642 064afa8fc1d9
child 645 30943d5491b6
equal deleted inserted replaced
643:08375ca3874e 644:b4f5714485e1
     1 // A simple lexer inspired by work of Sulzmann & Lu
     1 // Detokenising the ouput of Tokeniser
     2 //==================================================
     2 //=====================================
       
     3 //
       
     4 // call with 
       
     5 //
       
     6 //     scala detokenise.scala fib.tks
       
     7 //
       
     8 //     scala detokenise.scala loops.tks
     3 
     9 
     4 
    10 object Detokenise {
     5 object Delexer {
       
     6 
    11 
     7 import java.io._
    12 import java.io._
     8 
    13 
     9 abstract class Token extends Serializable 
    14 abstract class Token extends Serializable 
    10 case object T_SEMI extends Token
    15 case object T_SEMI extends Token
    22   in.close
    27   in.close
    23   data
    28   data
    24 }
    29 }
    25 
    30 
    26 def main(args: Array[String]) = {
    31 def main(args: Array[String]) = {
    27   println("TEST\n" ++ deserialise[List[Token]]("/tmp/nflx").mkString)  
    32   val fname = args(0)
       
    33   val tks = deserialise[List[Token]](fname)
       
    34   println(s"Reading back from ${fname}:\n${tks.mkString("\n")}")  
    28 }
    35 }
    29 
    36 
    30 
    37 
    31 }
    38 }