added progs
authorChristian Urban <christian dot urban at kcl dot ac dot uk>
Sun, 27 Oct 2013 14:17:55 +0000
changeset 164 6c1d214c39ef
parent 163 89d6d89d9844
child 165 66b699c80479
added progs
progs/token.scala
progs/token2.scala
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/progs/token.scala	Sun Oct 27 14:17:55 2013 +0000
@@ -0,0 +1,147 @@
+import scala.language.implicitConversions
+import scala.language.reflectiveCalls
+import scala.util._
+import scala.annotation.tailrec
+
+sealed abstract class Rexp
+
+case object NULL extends Rexp
+case object EMPTY extends Rexp
+case class CHAR(c: Char) extends Rexp
+case class ALT(r1: Rexp, r2: Rexp) extends Rexp
+case class SEQ(r1: Rexp, r2: Rexp) extends Rexp
+case class STAR(r: Rexp) extends Rexp
+
+def charlist2rexp(s : List[Char]) : Rexp = s match {
+  case Nil => EMPTY
+  case c::Nil => CHAR(c)
+  case c::s => SEQ(CHAR(c), charlist2rexp(s))
+}
+implicit def string2rexp(s : String) : Rexp = charlist2rexp(s.toList)
+
+
+implicit def RexpOps(r: Rexp) = new {
+  def | (s: Rexp) = ALT(r, s)
+  def % = STAR(r)
+  def ~ (s: Rexp) = SEQ(r, s)
+}
+
+implicit def stringOps(s: String) = new {
+  def | (r: Rexp) = ALT(s, r)
+  def | (r: String) = ALT(s, r)
+  def % = STAR(s)
+  def ~ (r: Rexp) = SEQ(s, r)
+  def ~ (r: String) = SEQ(s, r)
+}
+
+def Range(s : List[Char]) : Rexp = s match {
+  case Nil => NULL
+  case c::Nil => CHAR(c)
+  case c::s => ALT(CHAR(c), Range(s))
+}
+def RANGE(s: String) = Range(s.toList)
+
+def PLUS(r: Rexp) = SEQ(r, STAR(r))
+
+val SYM = RANGE("ABCDEFGHIJKLMNOPQRSTUVXYZabcdefghijklmnopqrstuvwxyz_")
+val DIGIT = RANGE("0123456789")
+val ID = SYM ~ (SYM | DIGIT).% 
+val NUM = PLUS(DIGIT)
+val KEYWORD : Rexp = "skip" | "while" | "do" | "if" | "then" | "else" | "read" | "write" 
+val SEMI: Rexp = ";"
+val OP: Rexp = ":=" | "=" | "-" | "+" | "*" | "!=" | "<" | ">"
+val WHITESPACE = PLUS(RANGE(" \n"))
+val RPAREN: Rexp = ")"
+val LPAREN: Rexp = "("
+val BEGIN: Rexp = "{"
+val END: Rexp = "}"
+
+//regular expressions ranked by position in the list
+val regs: List[Rexp] = 
+  List(KEYWORD, ID, OP, NUM, SEMI, LPAREN, RPAREN, BEGIN, END, WHITESPACE)
+
+def nullable (r: Rexp) : Boolean = r match {
+  case NULL => false
+  case EMPTY => true
+  case CHAR(_) => false
+  case ALT(r1, r2) => nullable(r1) || nullable(r2)
+  case SEQ(r1, r2) => nullable(r1) && nullable(r2)
+  case STAR(_) => true
+}
+
+def zeroable (r: Rexp) : Boolean = r match {
+  case NULL => true
+  case EMPTY => false
+  case CHAR(_) => false
+  case ALT(r1, r2) => zeroable(r1) && zeroable(r2)
+  case SEQ(r1, r2) => zeroable(r1) || zeroable(r2)
+  case STAR(_) => false
+}
+
+def der (c: Char, r: Rexp) : Rexp = r match {
+  case NULL => NULL
+  case EMPTY => NULL  
+  case CHAR(d) => if (c == d) EMPTY else NULL
+  case ALT(r1, r2) => ALT(der(c, r1), der(c, r2))
+  case SEQ(r1, r2) => 
+    if (nullable(r1)) ALT(SEQ(der(c, r1), r2), der(c, r2))
+    else SEQ(der(c, r1), r2)
+  case STAR(r) => SEQ(der(c, r), STAR(r))
+}
+
+// returns the position of the last Some-element in the list
+def last(stack: List[Option[Rexp]]) : Int = stack match {
+  case Nil => 0
+  case None::stack => last(stack)
+  case Some(r)::stack => 1 + stack.length
+}
+
+// calculates derivatives until all of them are zeroable
+@tailrec
+def munch(cs: List[Char], rs: List[Rexp], stack: List[Option[Rexp]]) : Int = (cs, rs) match {
+  case (_, Nil) => last(stack)
+  case (Nil, _) => last(stack)
+  case (c::cs, rs) => {
+    val ds = rs.map(der(c, _))
+    val rs_nzero = ds.filterNot(zeroable(_))
+    val rs_nulls = ds.filter(nullable(_))
+    val opt = Try(Some(rs_nulls.head)) getOrElse None
+    munch(cs, rs_nzero, opt::stack)
+  }
+}
+
+// iterates the munching function and prints 
+// out the component strings
+@tailrec
+def tokenize(s: String, rs: List[Rexp]) : Unit = munch(s.toList, rs, Nil) match {
+  case 0 if (s == "") => println("EOF")
+  case 0 => println(s"Lexing error: $s")
+  case n => {
+    val (head, tail) = s.splitAt(n)
+    print(s"|${head.replaceAll("\n","Ret")}|")
+    tokenize(tail, rs)
+  }
+}
+
+val test_prog = """
+start := XXX;
+x := start;
+y := start;
+z := start;
+while 0 < x do {
+ while 0 < y do {
+  while 0 < z do {
+    z := z - 1
+  };
+  z := start;
+  y := y - 1
+ };     
+ y := start;
+ x := x - 1
+};
+write x;
+write y;
+write z
+"""
+
+tokenize(test_prog, regs)
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/progs/token2.scala	Sun Oct 27 14:17:55 2013 +0000
@@ -0,0 +1,172 @@
+import scala.language.implicitConversions
+import scala.language.reflectiveCalls
+import scala.util._
+import scala.annotation.tailrec
+
+sealed abstract class Rexp
+
+case object NULL extends Rexp
+case object EMPTY extends Rexp
+case class CHAR(c: Char) extends Rexp
+case class ALT(r1: Rexp, r2: Rexp) extends Rexp
+case class SEQ(r1: Rexp, r2: Rexp) extends Rexp
+case class STAR(r: Rexp) extends Rexp
+
+def charlist2rexp(s : List[Char]) : Rexp = s match {
+  case Nil => EMPTY
+  case c::Nil => CHAR(c)
+  case c::s => SEQ(CHAR(c), charlist2rexp(s))
+}
+implicit def string2rexp(s : String) : Rexp = charlist2rexp(s.toList)
+
+
+implicit def RexpOps(r: Rexp) = new {
+  def | (s: Rexp) = ALT(r, s)
+  def % = STAR(r)
+  def ~ (s: Rexp) = SEQ(r, s)
+}
+
+implicit def stringOps(s: String) = new {
+  def | (r: Rexp) = ALT(s, r)
+  def | (r: String) = ALT(s, r)
+  def % = STAR(s)
+  def ~ (r: Rexp) = SEQ(s, r)
+  def ~ (r: String) = SEQ(s, r)
+}
+
+def Range(s : List[Char]) : Rexp = s match {
+  case Nil => NULL
+  case c::Nil => CHAR(c)
+  case c::s => ALT(CHAR(c), Range(s))
+}
+def RANGE(s: String) = Range(s.toList)
+
+def PLUS(r: Rexp) = SEQ(r, STAR(r))
+
+val SYM = RANGE("ABCDEFGHIJKLMNOPQRSTUVXYZabcdefghijklmnopqrstuvwxyz_")
+val DIGIT = RANGE("0123456789")
+val ID = SYM ~ (SYM | DIGIT).% 
+val NUM = PLUS(DIGIT)
+val KEYWORD : Rexp = "skip" | "while" | "do" | "if" | "then" | "else" | "read" | "write" 
+val SEMI: Rexp = ";"
+val OP: Rexp = ":=" | "=" | "-" | "+" | "*" | "!=" | "<" | ">"
+val WHITESPACE = PLUS(RANGE(" \n"))
+val RPAREN: Rexp = ")"
+val LPAREN: Rexp = "("
+val BEGIN: Rexp = "{"
+val END: Rexp = "}"
+
+abstract class Token
+case object T_WHITESPACE extends Token
+case object T_SEMI extends Token
+case object T_LPAREN extends Token
+case object T_RPAREN extends Token
+case object T_BEGIN extends Token
+case object T_END extends Token
+case class T_ID(s: String) extends Token
+case class T_OP(s: String) extends Token
+case class T_NUM(s: String) extends Token
+case class T_KWD(s: String) extends Token
+case class T_ERR(s: String) extends Token // special error token
+
+type TokenFun = String => Token
+type LexRules = List[(Rexp, TokenFun)]
+val lexing_rules: LexRules = 
+  List((KEYWORD, (s) => T_KWD(s)),
+       (ID, (s) => T_ID(s)),
+       (OP, (s) => T_OP(s)),
+       (NUM, (s) => T_NUM(s)),
+       (SEMI, (s) => T_SEMI),
+       (LPAREN, (s) => T_LPAREN),
+       (RPAREN, (s) => T_RPAREN),
+       (BEGIN, (s) => T_BEGIN),
+       (END, (s) => T_END),
+       (WHITESPACE, (s) => T_WHITESPACE))
+
+
+def nullable (r: Rexp) : Boolean = r match {
+  case NULL => false
+  case EMPTY => true
+  case CHAR(_) => false
+  case ALT(r1, r2) => nullable(r1) || nullable(r2)
+  case SEQ(r1, r2) => nullable(r1) && nullable(r2)
+  case STAR(_) => true
+}
+
+def zeroable (r: Rexp) : Boolean = r match {
+  case NULL => true
+  case EMPTY => false
+  case CHAR(_) => false
+  case ALT(r1, r2) => zeroable(r1) && zeroable(r2)
+  case SEQ(r1, r2) => zeroable(r1) || zeroable(r2)
+  case STAR(_) => false
+}
+
+def der (c: Char, r: Rexp) : Rexp = r match {
+  case NULL => NULL
+  case EMPTY => NULL  
+  case CHAR(d) => if (c == d) EMPTY else NULL
+  case ALT(r1, r2) => ALT(der(c, r1), der(c, r2))
+  case SEQ(r1, r2) => 
+    if (nullable(r1)) ALT(SEQ(der(c, r1), r2), der(c, r2))
+    else SEQ(der(c, r1), r2)
+  case STAR(r) => SEQ(der(c, r), STAR(r))
+}
+
+// returns the position and Token function of 
+// the last Some-element in the list
+def last(stack: List[Option[TokenFun]]) : Option[(Int, TokenFun)] = stack match {
+  case Nil => None
+  case None::stack => last(stack)
+  case Some(tf)::stack => Some(1 + stack.length, tf)
+}
+
+// calculates derivatives until all of them are zeroable
+@tailrec
+def munch(cs: List[Char], 
+          rs: LexRules, 
+          stack: List[Option[TokenFun]]) : Option[(Int, TokenFun)] = (cs, rs) match {
+  case (_, Nil) => last(stack)
+  case (Nil, _) => last(stack)
+  case (c::cs, rs) => {
+    val ds = rs.map({case (r, tf) => (der(c, r), tf)})
+    val rs_nzero = ds.filterNot({case (r, _) => zeroable(r)})
+    val rs_nulls = ds.filter({case (r, _) => nullable(r)})
+    val opt = Try(Some(rs_nulls.head._2)) getOrElse None
+    munch(cs, rs_nzero, opt::stack)
+  }
+}
+
+// iterates the munching function and returns a Token list
+def tokenize(s: String, rs: LexRules) : List[Token] = munch(s.toList, rs, Nil) match {
+  case None if (s == "") => Nil
+  case None => List(T_ERR("Lexing error: $s"))
+  case Some((n, tf)) => {
+    val (head, tail) = s.splitAt(n)
+    tf(head)::tokenize(tail, rs)
+  }
+}
+
+val test_prog = """
+start := XXX;
+x := start;
+y := start;
+z := start;
+while 0 < x do {
+ while 0 < y do {
+  while 0 < z do {
+    z := z - 1
+  };
+  z := start;
+  y := y - 1
+ };     
+ y := start;
+ x := x - 1
+};
+write x;
+write y;
+write z
+"""
+
+println(tokenize(test_prog, lexing_rules).mkString("\n"))
+