progs/fun_tokens.scala
changeset 655 3d04ee04966d
parent 645 30943d5491b6
equal deleted inserted replaced
654:fb6192488b91 655:3d04ee04966d
   247   case ("pl", _) => T_LPAREN
   247   case ("pl", _) => T_LPAREN
   248   case ("pr", _) => T_RPAREN
   248   case ("pr", _) => T_RPAREN
   249 }
   249 }
   250 
   250 
   251 
   251 
   252 def tokenise(s: String) : List[Token] = 
   252 def tokenise(s: String) : List[Token] = {
   253   lexing_simp(FUN_REGS, s).collect(token)
   253   val tks = lexing_simp(FUN_REGS, s).collect(token)
       
   254   if (tks.length != 0) tks
       
   255   else { println (s"Tokenise Error") ; sys.exit(-1) }     
       
   256 }
   254 
   257 
   255 def serialise[T](fname: String, data: T) = {
   258 def serialise[T](fname: String, data: T) = {
   256   import scala.util.Using
   259   import scala.util.Using
   257   Using(new ObjectOutputStream(new FileOutputStream(fname))) {
   260   Using(new ObjectOutputStream(new FileOutputStream(fname))) {
   258     out => out.writeObject(data)
   261     out => out.writeObject(data)