equal
deleted
inserted
replaced
247 val tks = lexing_simp(FUN_REGS, s).collect(token) |
247 val tks = lexing_simp(FUN_REGS, s).collect(token) |
248 if (tks.length != 0) tks |
248 if (tks.length != 0) tks |
249 else { println (s"Tokenise Error") ; sys.exit(-1) } |
249 else { println (s"Tokenise Error") ; sys.exit(-1) } |
250 } |
250 } |
251 |
251 |
252 import ammonite.ops._ |
252 // pre-2.5.0 ammonite |
|
253 // import ammonite.ops._ |
|
254 |
|
255 // post 2.5.0 ammonite |
|
256 import $ivy.`com.lihaoyi::os-lib:0.8.0` |
253 |
257 |
254 //@doc("Tokenising a file.") |
258 //@doc("Tokenising a file.") |
255 @main |
259 @main |
256 def main(fname: String) = { |
260 def main(fname: String) = { |
257 println(tokenise(os.read(os.pwd / fname))) |
261 println(tokenise(os.read(os.pwd / fname))) |