tuned
authorChristian Urban <urbanc@in.tum.de>
Thu, 11 Oct 2012 10:58:18 +0100
changeset 29 774007c4b1b3
parent 28 f63ba92a7d78
child 30 0e971bd4403d
tuned
regexp.scala
regexp2.scala
regexp3.scala
--- a/regexp.scala	Wed Oct 10 14:08:49 2012 +0100
+++ b/regexp.scala	Thu Oct 11 10:58:18 2012 +0100
@@ -9,9 +9,13 @@
 case class STAR(r: Rexp) extends Rexp
 
 // some convenience for typing in regular expressions
-implicit def string2rexp(s : String) : Rexp = {
-  s.foldRight (EMPTY: Rexp) ( (c, r) => SEQ(CHAR(c), r) )
+def charlist2rexp(s : List[Char]) : Rexp = s match {
+  case Nil => EMPTY
+  case c::Nil => CHAR(c)
+  case c::s => SEQ(CHAR(c), charlist2rexp(s))
 }
+implicit def string2rexp(s : String) : Rexp = charlist2rexp(s.toList)
+
 
 // for example
 println(STAR("abc"))
@@ -99,3 +103,4 @@
 println(matcher("cab" ,"cab"))
 println(matcher(STAR("a"),"aaa"))
 
+
--- a/regexp2.scala	Wed Oct 10 14:08:49 2012 +0100
+++ b/regexp2.scala	Thu Oct 11 10:58:18 2012 +0100
@@ -1,4 +1,5 @@
-// regular expressions
+
+// regular expressions including NOT
 abstract class Rexp
 
 case object NULL extends Rexp
@@ -32,15 +33,15 @@
 }
 
 // tests whether a regular expression 
-// recognises nothing
-def zeroable (r: Rexp) : Boolean = r match {
+// cannot recognise more
+def no_more (r: Rexp) : Boolean = r match {
   case NULL => true
   case EMPTY => false
   case CHAR(_) => false
-  case ALT(r1, r2) => zeroable(r1) && zeroable(r2)
-  case SEQ(r1, r2) => if (nullable(r1)) (zeroable(r1) && zeroable(r2)) else zeroable(r1)
+  case ALT(r1, r2) => no_more(r1) && no_more(r2)
+  case SEQ(r1, r2) => if (nullable(r1)) (no_more(r1) && no_more(r2)) else no_more(r1)
   case STAR(_) => false
-  case NOT(r) => !(zeroable(r))
+  case NOT(r) => !(no_more(r))
 }
 
 
@@ -57,15 +58,6 @@
   case NOT(r) => NOT(der (c, r))
 }
 
-// derivative w.r.t. a string (iterates der)
-def ders (s: List[Char], r: Rexp) : Rexp = s match {
-  case Nil => r
-  case c::s => ders(s, der(c, r))
-}
-
-// main matcher function
-def matcher(r: Rexp, s: String) : Boolean = nullable(ders(s.toList, r))
-
 
 // regular expression for specifying 
 // ranges of characters
@@ -83,66 +75,49 @@
 val LOWERCASE = RANGE("abcdefghijklmnopqrstuvwxyz".toList)
 val UPPERCASE = RANGE("ABCDEFGHIJKLMNOPQRSTUVWXYZ".toList)
 val LETTER = ALT(LOWERCASE, UPPERCASE)
-val DIGITS = RANGE("0123456789".toList)
-val NONZERODIGITS = RANGE("123456789".toList)
+val DIGIT = RANGE("0123456789".toList)
+val NONZERODIGIT = RANGE("123456789".toList)
 
-val IDENT = SEQ(LETTER, STAR(ALT(LETTER,DIGITS)))
-val NUMBER = ALT(SEQ(NONZERODIGITS, STAR(DIGITS)), "0")
+val IDENT = SEQ(LETTER, STAR(ALT(LETTER,DIGIT)))
+val NUMBER = ALT(SEQ(NONZERODIGIT, STAR(DIGIT)), "0")
 val WHITESPACE = RANGE(" \n".toList)
 val WHITESPACES = PLUS(WHITESPACE)
 
-val ALL = ALT(ALT(LETTER, DIGITS), WHITESPACE)
-
+val ALL = ALT(ALT(LETTER, DIGIT), WHITESPACE)
 val COMMENT = SEQ(SEQ("/*", NOT(SEQ(SEQ(STAR(ALL), "*/"), STAR(ALL)))), "*/")
 
-println(matcher(NUMBER, "0"))
-println(matcher(NUMBER, "01"))
-println(matcher(NUMBER, "123450"))
-
-println(matcher(SEQ(STAR("a"), STAR("b")), "bbaaa"))
-println(matcher(ALT(STAR("a"), STAR("b")), ""))
-println(matcher("abc", ""))
-println(matcher(STAR(ALT(EMPTY, "a")), ""))
-println(matcher(STAR(EMPTY), "a"))
-println(matcher("cab","cab"))
-println(matcher(STAR("a"),"aaa"))
-println(matcher("cab" ,"cab"))
-println(matcher(STAR("a"),"aaa"))
-
-println(matcher(COMMENT, "/* */"))
-println(matcher(COMMENT, "/* foobar comment */"))
-println(matcher(COMMENT, "/* test */ test */"))
 
 // an example list of regular expressions
-val regs: List[Rexp]=  List("if", "then", "else", "+", IDENT, NUMBER, WHITESPACES) 
+val regs: List[Rexp]=  List("if", "then", "else", "+", IDENT, NUMBER, WHITESPACES, COMMENT) 
+
 
 def error (s: String) = throw new IllegalArgumentException ("Could not lex " + s)
 
-def munch(r: Rexp, s: List[Char], t: List[Char]) : Option[(List[Char], List[Char])] = s match {
-  case Nil if (nullable(r)) => Some(Nil, t)
-  case Nil => None
-  case c::s if (zeroable(der (c, r)) && nullable(r)) => Some(c::s, t)
-  case c::s if (zeroable(der (c, r))) => None
-  case c::s => munch(der (c, r), s, t ::: List(c))
-}
+def munch(r: Rexp, s: List[Char], t: List[Char]) : Option[(List[Char], List[Char])] = 
+  s match {
+    case Nil if (nullable(r)) => Some(Nil, t)
+    case Nil => None
+    case c::s if (no_more(der (c, r)) && nullable(r)) => Some(c::s, t)
+    case c::s if (no_more(der (c, r))) => None
+    case c::s => munch(der (c, r), s, t ::: List(c))
+  }
 
-def lex_one (regs: List[Rexp], s: List[Char]) : (List[Char], List[Char]) = {
+def one_string (regs: List[Rexp], s: List[Char]) : (List[Char], List[Char]) = {
  val somes = regs.map { munch(_, s, Nil) } .flatten
  if (somes == Nil) error(s.mkString) else (somes sortBy (_._1.length) head)
 }
 
-def lex_all (regs: List[Rexp], s: List[Char]) : List[String] = s match {
+def tokenize (regs: List[Rexp], s: List[Char]) : List[String] = s match {
   case Nil => Nil
-  case _ => lex_one(regs, s) match {
-    case (rest, s) => s.mkString :: lex_all(regs, rest) 
+  case _ => one_string(regs, s) match {
+    case (rest, s) => s.mkString :: tokenize(regs, rest) 
   }
 }
 
-val regs: List[Rexp]=  List("if", "then", "else", "+", IDENT, NUMBER, WHITESPACES) 
-
-println(lex_all(regs, "if true then 42 else +".toList))
-println(lex_all(regs, "ifff if     34 34".toList))
-println(lex_all(regs, "ifff +if+     34 34".toList))
-println(lex_all(regs, "1+x+3+4+foo".toList))
-println(lex_all(regs, "ifff $ if 34".toList))
-
+//examples
+println(tokenize(regs, "if true then then 42 else +".toList))
+println(tokenize(regs, "if+true+then+then+42+else +".toList))
+println(tokenize(regs, "ifff if     34 34".toList))
+println(tokenize(regs, "/*ifff if */ hhjj /*34 */".toList))
+println(tokenize(regs, "/* if true then */ then 42 else +".toList))
+//println(tokenize(regs, "ifff $ if 34".toList)) // causes an error because of the symbol $
--- a/regexp3.scala	Wed Oct 10 14:08:49 2012 +0100
+++ b/regexp3.scala	Thu Oct 11 10:58:18 2012 +0100
@@ -1,4 +1,5 @@
-// regular expressions
+
+// regular expressions including NOT
 abstract class Rexp
 
 case object NULL extends Rexp
@@ -26,22 +27,21 @@
   case EMPTY => true
   case CHAR(_) => false
   case ALT(r1, r2) => nullable(r1) || nullable(r2)
-  case SEQ(r1, r2) => if (nullable(r1)) (zeroable(r1) && zeroable(r2)) else zeroable(r1)
+  case SEQ(r1, r2) => nullable(r1) && nullable(r2)
   case STAR(_) => true
   case NOT(r) => !(nullable(r))
 }
 
 // tests whether a regular expression 
-// recognises nothing
-def zeroable (r: Rexp) : Boolean = r match {
+// cannot recognise more
+def no_more (r: Rexp) : Boolean = r match {
   case NULL => true
   case EMPTY => false
   case CHAR(_) => false
-  case ALT(r1, r2) => zeroable(r1) && zeroable(r2)
-  case SEQ(r1, r2) => if (nullable(r1)) (zeroable(r1) && zeroable(r2)) else zeroable(r1)
-  //case SEQ(r1, r2) => zeroable(r1) || zeroable(r2)
+  case ALT(r1, r2) => no_more(r1) && no_more(r2)
+  case SEQ(r1, r2) => if (nullable(r1)) (no_more(r1) && no_more(r2)) else no_more(r1)
   case STAR(_) => false
-  case NOT(r) => !(zeroable(r))
+  case NOT(r) => !(no_more(r))
 }
 
 
@@ -58,16 +58,6 @@
   case NOT(r) => NOT(der (c, r))
 }
 
-// derivative w.r.t. a string (iterates der)
-def ders (s: List[Char], r: Rexp) : Rexp = s match {
-  case Nil => r
-  case c::s => ders(s, der(c, r))
-}
-
-// main matcher function
-def matcher(r: Rexp, s: String) : Boolean = nullable(ders(s.toList, r))
-
-
 // regular expression for specifying 
 // ranges of characters
 def RANGE(s : List[Char]) : Rexp = s match {
@@ -76,71 +66,38 @@
   case c::s => ALT(CHAR(c), RANGE(s))
 }
 
-//one or more
+// one or more
 def PLUS(r: Rexp) = SEQ(r, STAR(r))
 
-
-//some regular expressions
+// some regular expressions
 val LOWERCASE = RANGE("abcdefghijklmnopqrstuvwxyz".toList)
 val UPPERCASE = RANGE("ABCDEFGHIJKLMNOPQRSTUVWXYZ".toList)
 val LETTER = ALT(LOWERCASE, UPPERCASE)
-val DIGITS = RANGE("0123456789".toList)
-val NONZERODIGITS = RANGE("123456789".toList)
+val DIGIT = RANGE("0123456789".toList)
+val NONZERODIGIT = RANGE("123456789".toList)
 
-val IDENT = SEQ(LETTER, STAR(ALT(LETTER,DIGITS)))
-val NUMBER = ALT(SEQ(NONZERODIGITS, STAR(DIGITS)), "0")
+val IDENT = SEQ(LETTER, STAR(ALT(LETTER,DIGIT)))
+val NUMBER = ALT(SEQ(NONZERODIGIT, STAR(DIGIT)), "0")
 val WHITESPACE = RANGE(" \n".toList)
 val WHITESPACES = PLUS(WHITESPACE)
 
-val ALL = ALT(ALT(LETTER, DIGITS), WHITESPACE)
-
+val ALL = ALT(ALT(LETTER, DIGIT), WHITESPACE)
 val COMMENT = SEQ(SEQ("/*", NOT(SEQ(SEQ(STAR(ALL), "*/"), STAR(ALL)))), "*/")
 
+
+// for classifying the strings that have been recognised
 abstract class Token
 
 case object T_WHITESPACE extends Token
+case object T_COMMENT extends Token
 case class T_IDENT(s: String) extends Token
 case class T_OP(s: String) extends Token
 case class T_NUM(n: Int) extends Token
 case class T_KEYWORD(s: String) extends Token
-case object T_COMMENT extends Token
-
-
-// an example list of rules
-type Rule = (Rexp, List[Char] => Token)
-
-val rules: List[Rule]= 
-  List(("if", (s) => T_KEYWORD(s.mkString)),
-       ("then", (s) => T_KEYWORD(s.mkString)),
-       ("else", (s) => T_KEYWORD(s.mkString)),
-       ("+", (s) => T_OP(s.mkString)),
-       (IDENT, (s) => T_IDENT(s.mkString)),
-       (NUMBER, (s) => T_NUM(s.mkString.toInt)),
-       (WHITESPACES, (s) => T_WHITESPACE))
 
 
-def error (s: String) = throw new IllegalArgumentException ("Could not lex " + s)
-
-def munch(r: Rexp, action: List[Char] => Token, s: List[Char], t: List[Char]) : Option[(List[Char], Token)] = 
-  s match {
-    case Nil if (nullable(r)) => Some(Nil, action(t))
-    case Nil => None
-    case c::s if (zeroable(der (c, r)) && nullable(r)) => Some(c::s, action(t))
-    case c::s if (zeroable(der (c, r))) => None
-    case c::s => munch(der (c, r), action, s, t ::: List(c))
-  }
-
-def lex_one (rs: List[Rule], s: List[Char]) : (List[Char], Token) = {
- val somes = rs.map { (r) => munch(r._1, r._2, s, Nil) } .flatten
- if (somes == Nil) error(s.mkString) else (somes sortBy (_._1.length) head)
-}
-
-def lex_all (rs: List[Rule], s: List[Char]) : List[Token] = s match {
-  case Nil => Nil
-  case _ => lex_one(rs, s) match {
-    case (rest, t) => t :: lex_all(rs, rest) 
-  }
-}
+// an example list of syntactic rules
+type Rule = (Rexp, List[Char] => Token)
 
 val rules: List[Rule]= 
   List(("if", (s) => T_KEYWORD(s.mkString)),
@@ -152,25 +109,34 @@
        (WHITESPACES, (s) => T_WHITESPACE),
        (COMMENT, (s) => T_COMMENT))
 
-println(lex_all(rules, "/*ifff if */ hhjj /*34 */".toList))
 
+def error (s: String) = throw new IllegalArgumentException ("Cannot tokenize: " + s)
 
-munch(COMMENT, (s) => T_COMMENT , "/*ifff if */ hhjj /*34 */".toList, Nil)
-val COMMENT2 = NOT(SEQ(SEQ(STAR(ALL), "*/"), STAR(ALL)))
-
-der('/', COMMENT)
-zeroable(der('/', COMMENT))
-zeroable(der('a', COMMENT2))
+def munch(r: Rexp, action: List[Char] => Token, s: List[Char], t: List[Char]) : Option[(List[Char], Token)] = 
+  s match {
+    case Nil if (nullable(r)) => Some(Nil, action(t))
+    case Nil => None
+    case c::s if (no_more(der (c, r)) && nullable(r)) => Some(c::s, action(t))
+    case c::s if (no_more(der (c, r))) => None
+    case c::s => munch(der (c, r), action, s, t ::: List(c))
+  }
 
-matcher(COMMENT2, "ifff if 34")
-munch(COMMENT2, "ifff if 34".toList, Nil)
-starts_with(COMMENT2, 'i')
-lex_all(regs, "ifff if 34".toList)
-lex_all(regs, "ifff $ if 34".toList)
+def one_token (rs: List[Rule], s: List[Char]) : (List[Char], Token) = {
+ val somes = rs.map { (r) => munch(r._1, r._2, s, Nil) } .flatten
+ if (somes == Nil) error(s.mkString) else (somes sortBy (_._1.length) head)
+}
 
-println(lex_all(rules, "/* if true then */ then 42 else +".toList))
-println(lex_all(rules, "if true then then 42 else +".toList))
-println(lex_all(rules, "ifff if     34 34".toList))
-println(lex_all(rules, "ifff $ if 34".toList))
+def tokenize (rs: List[Rule], s: List[Char]) : List[Token] = s match {
+  case Nil => Nil
+  case _ => one_token(rs, s) match {
+    case (rest, token) => token :: tokenize(rs, rest) 
+  }
+}
 
-
+//examples
+println(tokenize(rules, "if true then then 42 else +".toList))
+println(tokenize(rules, "if+true+then+then+42+else +".toList))
+println(tokenize(rules, "ifff if     34 34".toList))
+println(tokenize(rules, "/*ifff if */ hhjj /*34 */".toList))
+println(tokenize(rules, "/* if true then */ then 42 else +".toList))
+//println(tokenize(rules, "ifff $ if 34".toList)) // causes an error because of the symbol $