parser.scala
changeset 61 a80f0cf17f91
parent 54 485f38b530ab
equal deleted inserted replaced
60:68d664c204d2 61:a80f0cf17f91
     1 
     1 :load matcher.scala
     2 // regular expressions including NOT
       
     3 abstract class Rexp
       
     4 
       
     5 case object NULL extends Rexp
       
     6 case object EMPTY extends Rexp
       
     7 case class CHAR(c: Char) extends Rexp
       
     8 case class ALT(r1: Rexp, r2: Rexp) extends Rexp
       
     9 case class SEQ(r1: Rexp, r2: Rexp) extends Rexp
       
    10 case class STAR(r: Rexp) extends Rexp
       
    11 case class NOT(r: Rexp) extends Rexp
       
    12 
       
    13 
       
    14 // some convenience for typing in regular expressions
       
    15 def charlist2rexp(s : List[Char]) : Rexp = s match {
       
    16   case Nil => EMPTY
       
    17   case c::Nil => CHAR(c)
       
    18   case c::s => SEQ(CHAR(c), charlist2rexp(s))
       
    19 }
       
    20 implicit def string2rexp(s : String) : Rexp = charlist2rexp(s.toList)
       
    21 
       
    22 
       
    23 // nullable function: tests whether the regular 
       
    24 // expression can recognise the empty string
       
    25 def nullable (r: Rexp) : Boolean = r match {
       
    26   case NULL => false
       
    27   case EMPTY => true
       
    28   case CHAR(_) => false
       
    29   case ALT(r1, r2) => nullable(r1) || nullable(r2)
       
    30   case SEQ(r1, r2) => nullable(r1) && nullable(r2)
       
    31   case STAR(_) => true
       
    32   case NOT(r) => !(nullable(r))
       
    33 }
       
    34 
       
    35 // tests whether a regular expression 
       
    36 // cannot recognise more
       
    37 def no_more (r: Rexp) : Boolean = r match {
       
    38   case NULL => true
       
    39   case EMPTY => false
       
    40   case CHAR(_) => false
       
    41   case ALT(r1, r2) => no_more(r1) && no_more(r2)
       
    42   case SEQ(r1, r2) => if (nullable(r1)) (no_more(r1) && no_more(r2)) else no_more(r1)
       
    43   case STAR(_) => false
       
    44   case NOT(r) => !(no_more(r))
       
    45 }
       
    46 
       
    47 
       
    48 // derivative of a regular expression w.r.t. a character
       
    49 def der (c: Char, r: Rexp) : Rexp = r match {
       
    50   case NULL => NULL
       
    51   case EMPTY => NULL  case CHAR(d) => if (c == d) EMPTY else NULL
       
    52   case ALT(r1, r2) => ALT(der(c, r1), der(c, r2))
       
    53   case SEQ(r1, r2) => 
       
    54     if (nullable(r1)) ALT(SEQ(der(c, r1), r2), der(c, r2))
       
    55     else SEQ(der(c, r1), r2)
       
    56   case STAR(r) => SEQ(der(c, r), STAR(r))
       
    57   case NOT(r) => NOT(der (c, r))
       
    58 }
       
    59 
       
    60 // regular expression for specifying 
       
    61 // ranges of characters
       
    62 def RANGE(s : List[Char]) : Rexp = s match {
       
    63   case Nil => NULL
       
    64   case c::Nil => CHAR(c)
       
    65   case c::s => ALT(CHAR(c), RANGE(s))
       
    66 }
       
    67 
       
    68 // one or more
       
    69 def PLUS(r: Rexp) = SEQ(r, STAR(r))
       
    70 
     2 
    71 // some regular expressions
     3 // some regular expressions
    72 val DIGIT = RANGE("0123456789".toList)
     4 val DIGIT = RANGE("0123456789".toList)
    73 val NONZERODIGIT = RANGE("123456789".toList)
     5 val NONZERODIGIT = RANGE("123456789".toList)
    74 
     6 
    77 val RPAREN = CHAR(')')
     9 val RPAREN = CHAR(')')
    78 val WHITESPACE = PLUS(RANGE(" \n".toList))
    10 val WHITESPACE = PLUS(RANGE(" \n".toList))
    79 val OPS = RANGE("+-*".toList)
    11 val OPS = RANGE("+-*".toList)
    80 
    12 
    81 // for classifying the strings that have been recognised
    13 // for classifying the strings that have been recognised
       
    14 
    82 abstract class Token
    15 abstract class Token
    83 
       
    84 case object T_WHITESPACE extends Token
    16 case object T_WHITESPACE extends Token
    85 case object T_NUM extends Token
    17 case object T_NUM extends Token
    86 case class T_OP(s: String) extends Token
    18 case class T_OP(s: String) extends Token
    87 case object T_LPAREN extends Token
    19 case object T_LPAREN extends Token
    88 case object T_RPAREN extends Token
    20 case object T_RPAREN extends Token
    89 case class NT(s: String) extends Token
    21 case class NT(s: String) extends Token
    90 
    22 
    91 type Rule = (Rexp, List[Char] => Token)
       
    92 
    23 
    93 def error (s: String) = throw new IllegalArgumentException ("Cannot tokenize: " + s)
    24 def tokenizer(rs: List[Rule[Token]], s: String) : List[Token] = 
    94 
       
    95 def munch(r: Rexp, action: List[Char] => Token, s: List[Char], t: List[Char]) : Option[(List[Char], Token)] = 
       
    96   s match {
       
    97     case Nil if (nullable(r)) => Some(Nil, action(t))
       
    98     case Nil => None
       
    99     case c::s if (no_more(der (c, r)) && nullable(r)) => Some(c::s, action(t))
       
   100     case c::s if (no_more(der (c, r))) => None
       
   101     case c::s => munch(der (c, r), action, s, t ::: List(c))
       
   102   }
       
   103 
       
   104 def one_token (rs: List[Rule], s: List[Char]) : (List[Char], Token) = {
       
   105  val somes = rs.map { (r) => munch(r._1, r._2, s, Nil) } .flatten
       
   106  if (somes == Nil) error(s.mkString) else (somes sortBy (_._1.length) head)
       
   107 }
       
   108 
       
   109 def tokenize (rs: List[Rule], s: List[Char]) : List[Token] = s match {
       
   110   case Nil => Nil
       
   111   case _ => one_token(rs, s) match {
       
   112     case (rest, token) => token :: tokenize(rs, rest) 
       
   113   }
       
   114 }
       
   115 
       
   116 def tokenizer(rs: List[Rule], s: String) : List[Token] = 
       
   117   tokenize(rs, s.toList).filterNot(_ match {
    25   tokenize(rs, s.toList).filterNot(_ match {
   118     case T_WHITESPACE => true
    26     case T_WHITESPACE => true
   119     case _ => false
    27     case _ => false
   120   })
    28   })
   121 
    29 
   122 
       
   123 
       
   124 // lexing rules for arithmetic expressions
    30 // lexing rules for arithmetic expressions
   125 val lexing_rules: List[Rule]= 
    31 val lexing_rules: List[Rule[Token]]= 
   126   List((NUMBER, (s) => T_NUM),
    32   List((NUMBER, (s) => T_NUM),
   127        (WHITESPACE, (s) => T_WHITESPACE),
    33        (WHITESPACE, (s) => T_WHITESPACE),
   128        (LPAREN, (s) => T_LPAREN),
    34        (LPAREN, (s) => T_LPAREN),
   129        (RPAREN, (s) => T_RPAREN),
    35        (RPAREN, (s) => T_RPAREN),
   130        (OPS, (s) => T_OP(s.mkString)))
    36        (OPS, (s) => T_OP(s.mkString)))
   131 
       
   132 
       
   133 // examples
       
   134 println(tokenizer(lexing_rules, "2 + 3 * 4 + 1"))
       
   135 println(tokenizer(lexing_rules, "(2 + 3) * (4 + 1)"))
       
   136 
    37 
   137 
    38 
   138 type Grammar = List[(String, List[Token])]
    39 type Grammar = List[(String, List[Token])]
   139 
    40 
   140 // grammar for arithmetic expressions
    41 // grammar for arithmetic expressions
   172     val tss = for ((lhs, rhs) <- g) yield replace(ts, rhs, List(NT(lhs)))
    73     val tss = for ((lhs, rhs) <- g) yield replace(ts, rhs, List(NT(lhs)))
   173     tss.flatten.exists(parse(g, _))
    74     tss.flatten.exists(parse(g, _))
   174   }
    75   }
   175 }
    76 }
   176  
    77  
   177 def parser(g: Grammar, rs: List[Rule], s: String) = {
    78 def parser(g: Grammar, rs: List[Rule[Token]], s: String) = {
   178   println("\n")
    79   println("\n")
   179   parse(g, tokenizer(rs, s))
    80   parse(g, tokenizer(rs, s))
   180 }
    81 }
   181   
    82   
   182 
    83