scala/regexp3.scala
changeset 93 4794759139ea
parent 92 e85600529ca5
child 94 9ea667baf097
equal deleted inserted replaced
92:e85600529ca5 93:4794759139ea
     1 
       
     2 // regular expressions including NOT
       
     3 abstract class Rexp
       
     4 
       
     5 case object NULL extends Rexp
       
     6 case object EMPTY extends Rexp
       
     7 case class CHAR(c: Char) extends Rexp
       
     8 case class ALT(r1: Rexp, r2: Rexp) extends Rexp
       
     9 case class SEQ(r1: Rexp, r2: Rexp) extends Rexp
       
    10 case class STAR(r: Rexp) extends Rexp
       
    11 case class NOT(r: Rexp) extends Rexp
       
    12 
       
    13 
       
    14 // some convenience for typing in regular expressions
       
    15 def charlist2rexp(s : List[Char]) : Rexp = s match {
       
    16   case Nil => EMPTY
       
    17   case c::Nil => CHAR(c)
       
    18   case c::s => SEQ(CHAR(c), charlist2rexp(s))
       
    19 }
       
    20 implicit def string2rexp(s : String) : Rexp = charlist2rexp(s.toList)
       
    21 
       
    22 
       
    23 // nullable function: tests whether the regular 
       
    24 // expression can recognise the empty string
       
    25 def nullable (r: Rexp) : Boolean = r match {
       
    26   case NULL => false
       
    27   case EMPTY => true
       
    28   case CHAR(_) => false
       
    29   case ALT(r1, r2) => nullable(r1) || nullable(r2)
       
    30   case SEQ(r1, r2) => nullable(r1) && nullable(r2)
       
    31   case STAR(_) => true
       
    32   case NOT(r) => !(nullable(r))
       
    33 }
       
    34 
       
    35 // tests whether a regular expression 
       
    36 // cannot recognise more
       
    37 def no_more (r: Rexp) : Boolean = r match {
       
    38   case NULL => true
       
    39   case EMPTY => false
       
    40   case CHAR(_) => false
       
    41   case ALT(r1, r2) => no_more(r1) && no_more(r2)
       
    42   case SEQ(r1, r2) => if (nullable(r1)) (no_more(r1) && no_more(r2)) else no_more(r1)
       
    43   case STAR(_) => false
       
    44   case NOT(r) => !(no_more(r))
       
    45 }
       
    46 
       
    47 
       
    48 // derivative of a regular expression w.r.t. a character
       
    49 def der (c: Char, r: Rexp) : Rexp = r match {
       
    50   case NULL => NULL
       
    51   case EMPTY => NULL  case CHAR(d) => if (c == d) EMPTY else NULL
       
    52   case ALT(r1, r2) => ALT(der(c, r1), der(c, r2))
       
    53   case SEQ(r1, r2) => 
       
    54     if (nullable(r1)) ALT(SEQ(der(c, r1), r2), der(c, r2))
       
    55     else SEQ(der(c, r1), r2)
       
    56   case STAR(r) => SEQ(der(c, r), STAR(r))
       
    57   case NOT(r) => NOT(der (c, r))
       
    58 }
       
    59 
       
    60 // regular expression for specifying 
       
    61 // ranges of characters
       
    62 def RANGE(s : List[Char]) : Rexp = s match {
       
    63   case Nil => NULL
       
    64   case c::Nil => CHAR(c)
       
    65   case c::s => ALT(CHAR(c), RANGE(s))
       
    66 }
       
    67 
       
    68 // one or more
       
    69 def PLUS(r: Rexp) = SEQ(r, STAR(r))
       
    70 
       
    71 // some regular expressions
       
    72 val LOWERCASE = RANGE("abcdefghijklmnopqrstuvwxyz".toList)
       
    73 val UPPERCASE = RANGE("ABCDEFGHIJKLMNOPQRSTUVWXYZ".toList)
       
    74 val LETTER = ALT(LOWERCASE, UPPERCASE)
       
    75 val DIGIT = RANGE("0123456789".toList)
       
    76 val NONZERODIGIT = RANGE("123456789".toList)
       
    77 
       
    78 val IDENT = SEQ(LETTER, STAR(ALT(LETTER,DIGIT)))
       
    79 val NUMBER = ALT(SEQ(NONZERODIGIT, STAR(DIGIT)), "0")
       
    80 val WHITESPACE = RANGE(" \n".toList)
       
    81 val WHITESPACES = PLUS(WHITESPACE)
       
    82 
       
    83 val ALL = ALT(ALT(LETTER, DIGIT), WHITESPACE)
       
    84 val COMMENT = SEQ(SEQ("/*", NOT(SEQ(SEQ(STAR(ALL), "*/"), STAR(ALL)))), "*/")
       
    85 
       
    86 
       
    87 // for classifying the strings that have been recognised
       
    88 abstract class Token
       
    89 
       
    90 case object T_WHITESPACE extends Token
       
    91 case object T_COMMENT extends Token
       
    92 case class T_IDENT(s: String) extends Token
       
    93 case class T_OP(s: String) extends Token
       
    94 case class T_NUM(n: Int) extends Token
       
    95 case class T_KEYWORD(s: String) extends Token
       
    96 
       
    97 
       
    98 // an example list of syntactic rules
       
    99 type Rule = (Rexp, List[Char] => Token)
       
   100 
       
   101 val rules: List[Rule]= 
       
   102   List(("if", (s) => T_KEYWORD(s.mkString)),
       
   103        ("then", (s) => T_KEYWORD(s.mkString)),
       
   104        ("else", (s) => T_KEYWORD(s.mkString)),
       
   105        ("+", (s) => T_OP(s.mkString)),
       
   106        (IDENT, (s) => T_IDENT(s.mkString)),
       
   107        (NUMBER, (s) => T_NUM(s.mkString.toInt)),
       
   108        (WHITESPACES, (s) => T_WHITESPACE),
       
   109        (COMMENT, (s) => T_COMMENT))
       
   110 
       
   111 
       
   112 def error (s: String) = throw new IllegalArgumentException ("Cannot tokenize: " + s)
       
   113 
       
   114 def munch(r: Rexp, action: List[Char] => Token, s: List[Char], t: List[Char]) : Option[(List[Char], Token)] = 
       
   115   s match {
       
   116     case Nil if (nullable(r)) => Some(Nil, action(t))
       
   117     case Nil => None
       
   118     case c::s if (no_more(der (c, r)) && nullable(r)) => Some(c::s, action(t))
       
   119     case c::s if (no_more(der (c, r))) => None
       
   120     case c::s => munch(der (c, r), action, s, t ::: List(c))
       
   121   }
       
   122 
       
   123 def one_token (rs: List[Rule], s: List[Char]) : (List[Char], Token) = {
       
   124  val somes = rs.map { (r) => munch(r._1, r._2, s, Nil) } .flatten
       
   125  if (somes == Nil) error(s.mkString) else (somes sortBy (_._1.length) head)
       
   126 }
       
   127 
       
   128 def tokenize (rs: List[Rule], s: List[Char]) : List[Token] = s match {
       
   129   case Nil => Nil
       
   130   case _ => one_token(rs, s) match {
       
   131     case (rest, token) => token :: tokenize(rs, rest) 
       
   132   }
       
   133 }
       
   134 
       
   135 //examples
       
   136 println(tokenize(rules, "if true then then 42 else +".toList))
       
   137 println(tokenize(rules, "if+true+then+then+42+else +".toList))
       
   138 println(tokenize(rules, "ifff if     34 34".toList))
       
   139 println(tokenize(rules, "/*ifff if */ hhjj /*34 */".toList))
       
   140 println(tokenize(rules, "/* if true then */ then 42 else +".toList))
       
   141 //println(tokenize(rules, "ifff $ if 34".toList)) // causes an error because of the symbol $