diff -r 08375ca3874e -r b4f5714485e1 progs/detokenise.scala --- a/progs/detokenise.scala Tue Oct 01 23:49:39 2019 +0100 +++ b/progs/detokenise.scala Wed Oct 02 02:09:48 2019 +0100 @@ -1,8 +1,13 @@ -// A simple lexer inspired by work of Sulzmann & Lu -//================================================== +// Detokenising the ouput of Tokeniser +//===================================== +// +// call with +// +// scala detokenise.scala fib.tks +// +// scala detokenise.scala loops.tks - -object Delexer { +object Detokenise { import java.io._ @@ -24,7 +29,9 @@ } def main(args: Array[String]) = { - println("TEST\n" ++ deserialise[List[Token]]("/tmp/nflx").mkString) + val fname = args(0) + val tks = deserialise[List[Token]](fname) + println(s"Reading back from ${fname}:\n${tks.mkString("\n")}") }