--- a/regexp2.scala Wed Oct 10 14:08:49 2012 +0100
+++ b/regexp2.scala Thu Oct 11 10:58:18 2012 +0100
@@ -1,4 +1,5 @@
-// regular expressions
+
+// regular expressions including NOT
abstract class Rexp
case object NULL extends Rexp
@@ -32,15 +33,15 @@
}
// tests whether a regular expression
-// recognises nothing
-def zeroable (r: Rexp) : Boolean = r match {
+// cannot recognise more
+def no_more (r: Rexp) : Boolean = r match {
case NULL => true
case EMPTY => false
case CHAR(_) => false
- case ALT(r1, r2) => zeroable(r1) && zeroable(r2)
- case SEQ(r1, r2) => if (nullable(r1)) (zeroable(r1) && zeroable(r2)) else zeroable(r1)
+ case ALT(r1, r2) => no_more(r1) && no_more(r2)
+ case SEQ(r1, r2) => if (nullable(r1)) (no_more(r1) && no_more(r2)) else no_more(r1)
case STAR(_) => false
- case NOT(r) => !(zeroable(r))
+ case NOT(r) => !(no_more(r))
}
@@ -57,15 +58,6 @@
case NOT(r) => NOT(der (c, r))
}
-// derivative w.r.t. a string (iterates der)
-def ders (s: List[Char], r: Rexp) : Rexp = s match {
- case Nil => r
- case c::s => ders(s, der(c, r))
-}
-
-// main matcher function
-def matcher(r: Rexp, s: String) : Boolean = nullable(ders(s.toList, r))
-
// regular expression for specifying
// ranges of characters
@@ -83,66 +75,49 @@
val LOWERCASE = RANGE("abcdefghijklmnopqrstuvwxyz".toList)
val UPPERCASE = RANGE("ABCDEFGHIJKLMNOPQRSTUVWXYZ".toList)
val LETTER = ALT(LOWERCASE, UPPERCASE)
-val DIGITS = RANGE("0123456789".toList)
-val NONZERODIGITS = RANGE("123456789".toList)
+val DIGIT = RANGE("0123456789".toList)
+val NONZERODIGIT = RANGE("123456789".toList)
-val IDENT = SEQ(LETTER, STAR(ALT(LETTER,DIGITS)))
-val NUMBER = ALT(SEQ(NONZERODIGITS, STAR(DIGITS)), "0")
+val IDENT = SEQ(LETTER, STAR(ALT(LETTER,DIGIT)))
+val NUMBER = ALT(SEQ(NONZERODIGIT, STAR(DIGIT)), "0")
val WHITESPACE = RANGE(" \n".toList)
val WHITESPACES = PLUS(WHITESPACE)
-val ALL = ALT(ALT(LETTER, DIGITS), WHITESPACE)
-
+val ALL = ALT(ALT(LETTER, DIGIT), WHITESPACE)
val COMMENT = SEQ(SEQ("/*", NOT(SEQ(SEQ(STAR(ALL), "*/"), STAR(ALL)))), "*/")
-println(matcher(NUMBER, "0"))
-println(matcher(NUMBER, "01"))
-println(matcher(NUMBER, "123450"))
-
-println(matcher(SEQ(STAR("a"), STAR("b")), "bbaaa"))
-println(matcher(ALT(STAR("a"), STAR("b")), ""))
-println(matcher("abc", ""))
-println(matcher(STAR(ALT(EMPTY, "a")), ""))
-println(matcher(STAR(EMPTY), "a"))
-println(matcher("cab","cab"))
-println(matcher(STAR("a"),"aaa"))
-println(matcher("cab" ,"cab"))
-println(matcher(STAR("a"),"aaa"))
-
-println(matcher(COMMENT, "/* */"))
-println(matcher(COMMENT, "/* foobar comment */"))
-println(matcher(COMMENT, "/* test */ test */"))
// an example list of regular expressions
-val regs: List[Rexp]= List("if", "then", "else", "+", IDENT, NUMBER, WHITESPACES)
+val regs: List[Rexp]= List("if", "then", "else", "+", IDENT, NUMBER, WHITESPACES, COMMENT)
+
def error (s: String) = throw new IllegalArgumentException ("Could not lex " + s)
-def munch(r: Rexp, s: List[Char], t: List[Char]) : Option[(List[Char], List[Char])] = s match {
- case Nil if (nullable(r)) => Some(Nil, t)
- case Nil => None
- case c::s if (zeroable(der (c, r)) && nullable(r)) => Some(c::s, t)
- case c::s if (zeroable(der (c, r))) => None
- case c::s => munch(der (c, r), s, t ::: List(c))
-}
+def munch(r: Rexp, s: List[Char], t: List[Char]) : Option[(List[Char], List[Char])] =
+ s match {
+ case Nil if (nullable(r)) => Some(Nil, t)
+ case Nil => None
+ case c::s if (no_more(der (c, r)) && nullable(r)) => Some(c::s, t)
+ case c::s if (no_more(der (c, r))) => None
+ case c::s => munch(der (c, r), s, t ::: List(c))
+ }
-def lex_one (regs: List[Rexp], s: List[Char]) : (List[Char], List[Char]) = {
+def one_string (regs: List[Rexp], s: List[Char]) : (List[Char], List[Char]) = {
val somes = regs.map { munch(_, s, Nil) } .flatten
if (somes == Nil) error(s.mkString) else (somes sortBy (_._1.length) head)
}
-def lex_all (regs: List[Rexp], s: List[Char]) : List[String] = s match {
+def tokenize (regs: List[Rexp], s: List[Char]) : List[String] = s match {
case Nil => Nil
- case _ => lex_one(regs, s) match {
- case (rest, s) => s.mkString :: lex_all(regs, rest)
+ case _ => one_string(regs, s) match {
+ case (rest, s) => s.mkString :: tokenize(regs, rest)
}
}
-val regs: List[Rexp]= List("if", "then", "else", "+", IDENT, NUMBER, WHITESPACES)
-
-println(lex_all(regs, "if true then 42 else +".toList))
-println(lex_all(regs, "ifff if 34 34".toList))
-println(lex_all(regs, "ifff +if+ 34 34".toList))
-println(lex_all(regs, "1+x+3+4+foo".toList))
-println(lex_all(regs, "ifff $ if 34".toList))
-
+//examples
+println(tokenize(regs, "if true then then 42 else +".toList))
+println(tokenize(regs, "if+true+then+then+42+else +".toList))
+println(tokenize(regs, "ifff if 34 34".toList))
+println(tokenize(regs, "/*ifff if */ hhjj /*34 */".toList))
+println(tokenize(regs, "/* if true then */ then 42 else +".toList))
+//println(tokenize(regs, "ifff $ if 34".toList)) // causes an error because of the symbol $