updated
authorChristian Urban <urbanc@in.tum.de>
Wed, 02 Oct 2019 14:05:36 +0100
changeset 645 30943d5491b6
parent 644 b4f5714485e1
child 646 2abd285c66d1
updated
progs/detokenise.scala
progs/fun.scala
progs/fun_parser.scala
progs/fun_tokens.scala
progs/tokenise.scala
--- a/progs/detokenise.scala	Wed Oct 02 02:09:48 2019 +0100
+++ b/progs/detokenise.scala	Wed Oct 02 14:05:36 2019 +0100
@@ -10,6 +10,7 @@
 object Detokenise {
 
 import java.io._
+import scala.util._ 
 
 abstract class Token extends Serializable 
 case object T_SEMI extends Token
@@ -21,16 +22,16 @@
 case class T_KWD(s: String) extends Token
 case class T_STR(s: String) extends Token
 
-def deserialise[T](fname: String) : T = {
-  val in = new ObjectInputStream(new FileInputStream(fname))
-  val data = in.readObject.asInstanceOf[T]
-  in.close
-  data
+def deserialise[T](fname: String) : Try[T] = {
+  import scala.util.Using
+  Using(new ObjectInputStream(new FileInputStream(fname))) {
+    in => in.readObject.asInstanceOf[T]
+  }
 }
 
 def main(args: Array[String]) = {
   val fname = args(0)
-  val tks = deserialise[List[Token]](fname)
+  val tks = deserialise[List[Token]](fname).getOrElse(Nil)
   println(s"Reading back from ${fname}:\n${tks.mkString("\n")}")  
 }
 
--- a/progs/fun.scala	Wed Oct 02 02:09:48 2019 +0100
+++ b/progs/fun.scala	Wed Oct 02 14:05:36 2019 +0100
@@ -1,10 +1,26 @@
 // A Small Compiler for a Simple Functional Language
 // (includes an external lexer and parser)
+//
+// call with 
+//
+//     scala fun.scala fact
+//
+//     scala fun.scala defs
+//
+// this will generate a .j file and run the jasmin
+// assembler (installed at jvm/jasmin-2.4/jasmin.jar)
+// it runs the resulting JVM file twice for timing 
+// purposes.
 
-import java.io._
+
+
 
 object Compiler {
 
+import java.io._  
+import scala.util._
+import scala.sys.process._
+
 // Abstract syntax trees for the Fun language
 abstract class Exp extends Serializable 
 abstract class BExp extends Serializable 
@@ -165,16 +181,15 @@
   (end - start)/(i * 1.0e9)
 }
 
-def deserialise[T](fname: String) : T = {
-  val in = new ObjectInputStream(new FileInputStream(fname))
-  val data = in.readObject.asInstanceOf[T]
-  in.close
-  data
+def deserialise[T](fname: String) : Try[T] = {
+  import scala.util.Using
+  Using(new ObjectInputStream(new FileInputStream(fname))) {
+    in => in.readObject.asInstanceOf[T]
+  }
 }
 
-
 def compile(class_name: String) : String = {
-  val ast = deserialise[List[Decl]](class_name ++ ".prs") 
+  val ast = deserialise[List[Decl]](class_name ++ ".prs").getOrElse(Nil) 
   val instructions = ast.map(compile_decl).mkString
   (library + instructions).replaceAllLiterally("XXX", class_name)
 }
@@ -184,9 +199,7 @@
   scala.tools.nsc.io.File(s"${class_name}.j").writeAll(output)
 }
 
-import scala.sys.process._
-
-def compile_run(class_name: String) : Unit = {
+def compile_and_run(class_name: String) : Unit = {
   compile_to_file(class_name)
   (s"java -jar jvm/jasmin-2.4/jasmin.jar ${class_name}.j").!!
   println("Time: " + time_needed(2, (s"java ${class_name}/${class_name}").!))
@@ -194,12 +207,13 @@
 
 
 // some examples of .fun files
-//compile_file("fact")
-//compile_run("defs")
-//compile_run("fact")
+//compile_to_file("fact")
+//compile_and_run("fact")
+//compile_and_run("defs")
+
 
 def main(args: Array[String]) = 
-   compile_run(args(0))
+   compile_and_run(args(0))
 
 
 }
\ No newline at end of file
--- a/progs/fun_parser.scala	Wed Oct 02 02:09:48 2019 +0100
+++ b/progs/fun_parser.scala	Wed Oct 02 14:05:36 2019 +0100
@@ -1,10 +1,20 @@
-// A Small Compiler for a Simple Functional Language
-// (includes a lexer and a parser)
+// A parser for the Fun language
+//================================
+//
+// call with 
+//
+//     scala fun_parser.scala fact.tks
+//
+//     scala fun_parser.scala defs.tks
+//
+// this will generate a .prs file that can be deserialised back
+// into a list of declarations
 
 object Fun_Parser {
 
 import scala.language.implicitConversions    
 import scala.language.reflectiveCalls
+import scala.util._ 
 import java.io._
 
 abstract class Token extends Serializable 
@@ -162,28 +172,29 @@
 // Reading tokens and Writing parse trees
 
 def serialise[T](fname: String, data: T) = {
-  val out = new ObjectOutputStream(new FileOutputStream(fname))
-  out.writeObject(data)
-  out.close
+  import scala.util.Using
+  Using(new ObjectOutputStream(new FileOutputStream(fname))) {
+    out => out.writeObject(data)
+  }
 }
 
-def deserialise[T](fname: String) : T = {
-  val in = new ObjectInputStream(new FileInputStream(fname))
-  val data = in.readObject.asInstanceOf[T]
-  in.close
-  data
+def deserialise[T](fname: String) : Try[T] = {
+  import scala.util.Using
+  Using(new ObjectInputStream(new FileInputStream(fname))) {
+    in => in.readObject.asInstanceOf[T]
+  }
 }
 
 
-def main(args: Array[String]) = {
+def main(args: Array[String]) : Unit= {
   val fname = args(0)
   val pname = fname.stripSuffix(".tks") ++ ".prs"
-  val tks = deserialise[List[Token]](fname)
+  val tks = deserialise[List[Token]](fname).getOrElse(Nil)
   serialise(pname, Prog.parse_single(tks))
 
   // testing whether read-back is working
-  //val ptree = deserialise[List[Decl]](pname)
+  //val ptree = deserialise[List[Decl]](pname).get
   //println(s"Reading back from ${pname}:\n${ptree.mkString("\n")}")  
 }
 
-}
\ No newline at end of file
+}
--- a/progs/fun_tokens.scala	Wed Oct 02 02:09:48 2019 +0100
+++ b/progs/fun_tokens.scala	Wed Oct 02 14:05:36 2019 +0100
@@ -1,5 +1,16 @@
-// A tokeniser for the fun language
-
+// A tokeniser for the Fun language
+//==================================
+//
+// call with 
+//
+//     scala fun_tokens.scala fact.fun
+//
+//     scala fun_tokens.scala defs.fun
+//
+// this will generate a .tks file that can be deserialised back
+// into a list of tokens
+// you can add -Xno-patmat-analysis in order to get rid of the
+// match-not-exhaustive warning
 
 object Fun_Tokens {
 
@@ -200,7 +211,7 @@
 val COMMENT = ("/*" ~ ALL2.% ~ "*/") | ("//" ~ ALL.% ~ "\n")
 
 
-val WHILE_REGS = (("k" $ KEYWORD) | 
+val FUN_REGS = (("k" $ KEYWORD) | 
                   ("i" $ ID) | 
                   ("o" $ OP) | 
                   ("n" $ NUM) | 
@@ -239,19 +250,20 @@
 
 
 def tokenise(s: String) : List[Token] = 
-  lexing_simp(WHILE_REGS, s).collect(token)
+  lexing_simp(FUN_REGS, s).collect(token)
 
 def serialise[T](fname: String, data: T) = {
-  val out = new ObjectOutputStream(new FileOutputStream(fname))
-  out.writeObject(data)
-  out.close
+  import scala.util.Using
+  Using(new ObjectOutputStream(new FileOutputStream(fname))) {
+    out => out.writeObject(data)
+  }
 }
 
-def main(args: Array[String]) = {
+def main(args: Array[String]) : Unit = {
   val fname = args(0)
+  val tname = fname.stripSuffix(".fun") ++ ".tks"
   val file = io.Source.fromFile(fname).mkString
-  val tks = fname.stripSuffix(".fun") ++ ".tks"
-  serialise(tks, tokenise(file))
+  serialise(tname, tokenise(file))
 }
 
 
--- a/progs/tokenise.scala	Wed Oct 02 02:09:48 2019 +0100
+++ b/progs/tokenise.scala	Wed Oct 02 14:05:36 2019 +0100
@@ -6,6 +6,11 @@
 //     scala tokenise.scala fib.while
 //
 //     scala tokenise.scala loops.while
+//
+// this will generate a .tks file that can be deserialised back
+// into a list of tokens
+// you can add -Xno-patmat-analysis in order to get rid of the
+// match-not-exhaustive warning
 
 object Tokenise {
 
@@ -188,8 +193,10 @@
   env(lex_simp(r, s.toList))
 
 
-// The Lexing Rules for the Fun Language
+// The Lexing Rules for the WHILE Language
 
+// inefficient representations for some extended regular
+// expressions
 def PLUS(r: Rexp) = r ~ r.%
 
 def Range(s : List[Char]) : Rexp = s match {
@@ -223,8 +230,8 @@
 
 
 
-// Generating tokens for the WHILE language
-// and serialising them into a .tks file
+// Generate tokens for the WHILE language
+// and serialise them into a .tks file
 
 import java.io._
 
@@ -238,6 +245,7 @@
 case class T_KWD(s: String) extends Token
 case class T_STR(s: String) extends Token
 
+// transforms pairs into tokens
 val token : PartialFunction[(String, String), Token] = {
   case ("s", _) => T_SEMI
   case ("p", "{") => T_LPAREN
@@ -249,21 +257,23 @@
   case ("str", s) => T_STR(s)
 }
 
+// filters out all un-interesting token
 def tokenise(s: String) : List[Token] = 
   lexing_simp(WHILE_REGS, s).collect(token)
 
 
 def serialise[T](fname: String, data: T) = {
-  val out = new ObjectOutputStream(new FileOutputStream(fname))
-  out.writeObject(data)
-  out.close
+  import scala.util.Using
+  Using(new ObjectOutputStream(new FileOutputStream(fname))) {
+    out => out.writeObject(data)
+  }
 }
 
-def main(args: Array[String]) = {
+def main(args: Array[String]) : Unit = {
   val fname = args(0)
+  val tname = fname.stripSuffix(".while") ++ ".tks"
   val file = io.Source.fromFile(fname).mkString
-  val tks = fname.stripSuffix(".while") ++ ".tks"
-  serialise(tks, tokenise(file))
+  serialise(tname, tokenise(file))
 }