progs/token2.scala
changeset 164 6c1d214c39ef
child 165 66b699c80479
equal deleted inserted replaced
163:89d6d89d9844 164:6c1d214c39ef
       
     1 import scala.language.implicitConversions
       
     2 import scala.language.reflectiveCalls
       
     3 import scala.util._
       
     4 import scala.annotation.tailrec
       
     5 
       
     6 sealed abstract class Rexp
       
     7 
       
     8 case object NULL extends Rexp
       
     9 case object EMPTY extends Rexp
       
    10 case class CHAR(c: Char) extends Rexp
       
    11 case class ALT(r1: Rexp, r2: Rexp) extends Rexp
       
    12 case class SEQ(r1: Rexp, r2: Rexp) extends Rexp
       
    13 case class STAR(r: Rexp) extends Rexp
       
    14 
       
    15 def charlist2rexp(s : List[Char]) : Rexp = s match {
       
    16   case Nil => EMPTY
       
    17   case c::Nil => CHAR(c)
       
    18   case c::s => SEQ(CHAR(c), charlist2rexp(s))
       
    19 }
       
    20 implicit def string2rexp(s : String) : Rexp = charlist2rexp(s.toList)
       
    21 
       
    22 
       
    23 implicit def RexpOps(r: Rexp) = new {
       
    24   def | (s: Rexp) = ALT(r, s)
       
    25   def % = STAR(r)
       
    26   def ~ (s: Rexp) = SEQ(r, s)
       
    27 }
       
    28 
       
    29 implicit def stringOps(s: String) = new {
       
    30   def | (r: Rexp) = ALT(s, r)
       
    31   def | (r: String) = ALT(s, r)
       
    32   def % = STAR(s)
       
    33   def ~ (r: Rexp) = SEQ(s, r)
       
    34   def ~ (r: String) = SEQ(s, r)
       
    35 }
       
    36 
       
    37 def Range(s : List[Char]) : Rexp = s match {
       
    38   case Nil => NULL
       
    39   case c::Nil => CHAR(c)
       
    40   case c::s => ALT(CHAR(c), Range(s))
       
    41 }
       
    42 def RANGE(s: String) = Range(s.toList)
       
    43 
       
    44 def PLUS(r: Rexp) = SEQ(r, STAR(r))
       
    45 
       
    46 val SYM = RANGE("ABCDEFGHIJKLMNOPQRSTUVXYZabcdefghijklmnopqrstuvwxyz_")
       
    47 val DIGIT = RANGE("0123456789")
       
    48 val ID = SYM ~ (SYM | DIGIT).% 
       
    49 val NUM = PLUS(DIGIT)
       
    50 val KEYWORD : Rexp = "skip" | "while" | "do" | "if" | "then" | "else" | "read" | "write" 
       
    51 val SEMI: Rexp = ";"
       
    52 val OP: Rexp = ":=" | "=" | "-" | "+" | "*" | "!=" | "<" | ">"
       
    53 val WHITESPACE = PLUS(RANGE(" \n"))
       
    54 val RPAREN: Rexp = ")"
       
    55 val LPAREN: Rexp = "("
       
    56 val BEGIN: Rexp = "{"
       
    57 val END: Rexp = "}"
       
    58 
       
    59 abstract class Token
       
    60 case object T_WHITESPACE extends Token
       
    61 case object T_SEMI extends Token
       
    62 case object T_LPAREN extends Token
       
    63 case object T_RPAREN extends Token
       
    64 case object T_BEGIN extends Token
       
    65 case object T_END extends Token
       
    66 case class T_ID(s: String) extends Token
       
    67 case class T_OP(s: String) extends Token
       
    68 case class T_NUM(s: String) extends Token
       
    69 case class T_KWD(s: String) extends Token
       
    70 case class T_ERR(s: String) extends Token // special error token
       
    71 
       
    72 type TokenFun = String => Token
       
    73 type LexRules = List[(Rexp, TokenFun)]
       
    74 val lexing_rules: LexRules = 
       
    75   List((KEYWORD, (s) => T_KWD(s)),
       
    76        (ID, (s) => T_ID(s)),
       
    77        (OP, (s) => T_OP(s)),
       
    78        (NUM, (s) => T_NUM(s)),
       
    79        (SEMI, (s) => T_SEMI),
       
    80        (LPAREN, (s) => T_LPAREN),
       
    81        (RPAREN, (s) => T_RPAREN),
       
    82        (BEGIN, (s) => T_BEGIN),
       
    83        (END, (s) => T_END),
       
    84        (WHITESPACE, (s) => T_WHITESPACE))
       
    85 
       
    86 
       
    87 def nullable (r: Rexp) : Boolean = r match {
       
    88   case NULL => false
       
    89   case EMPTY => true
       
    90   case CHAR(_) => false
       
    91   case ALT(r1, r2) => nullable(r1) || nullable(r2)
       
    92   case SEQ(r1, r2) => nullable(r1) && nullable(r2)
       
    93   case STAR(_) => true
       
    94 }
       
    95 
       
    96 def zeroable (r: Rexp) : Boolean = r match {
       
    97   case NULL => true
       
    98   case EMPTY => false
       
    99   case CHAR(_) => false
       
   100   case ALT(r1, r2) => zeroable(r1) && zeroable(r2)
       
   101   case SEQ(r1, r2) => zeroable(r1) || zeroable(r2)
       
   102   case STAR(_) => false
       
   103 }
       
   104 
       
   105 def der (c: Char, r: Rexp) : Rexp = r match {
       
   106   case NULL => NULL
       
   107   case EMPTY => NULL  
       
   108   case CHAR(d) => if (c == d) EMPTY else NULL
       
   109   case ALT(r1, r2) => ALT(der(c, r1), der(c, r2))
       
   110   case SEQ(r1, r2) => 
       
   111     if (nullable(r1)) ALT(SEQ(der(c, r1), r2), der(c, r2))
       
   112     else SEQ(der(c, r1), r2)
       
   113   case STAR(r) => SEQ(der(c, r), STAR(r))
       
   114 }
       
   115 
       
   116 // returns the position and Token function of 
       
   117 // the last Some-element in the list
       
   118 def last(stack: List[Option[TokenFun]]) : Option[(Int, TokenFun)] = stack match {
       
   119   case Nil => None
       
   120   case None::stack => last(stack)
       
   121   case Some(tf)::stack => Some(1 + stack.length, tf)
       
   122 }
       
   123 
       
   124 // calculates derivatives until all of them are zeroable
       
   125 @tailrec
       
   126 def munch(cs: List[Char], 
       
   127           rs: LexRules, 
       
   128           stack: List[Option[TokenFun]]) : Option[(Int, TokenFun)] = (cs, rs) match {
       
   129   case (_, Nil) => last(stack)
       
   130   case (Nil, _) => last(stack)
       
   131   case (c::cs, rs) => {
       
   132     val ds = rs.map({case (r, tf) => (der(c, r), tf)})
       
   133     val rs_nzero = ds.filterNot({case (r, _) => zeroable(r)})
       
   134     val rs_nulls = ds.filter({case (r, _) => nullable(r)})
       
   135     val opt = Try(Some(rs_nulls.head._2)) getOrElse None
       
   136     munch(cs, rs_nzero, opt::stack)
       
   137   }
       
   138 }
       
   139 
       
   140 // iterates the munching function and returns a Token list
       
   141 def tokenize(s: String, rs: LexRules) : List[Token] = munch(s.toList, rs, Nil) match {
       
   142   case None if (s == "") => Nil
       
   143   case None => List(T_ERR("Lexing error: $s"))
       
   144   case Some((n, tf)) => {
       
   145     val (head, tail) = s.splitAt(n)
       
   146     tf(head)::tokenize(tail, rs)
       
   147   }
       
   148 }
       
   149 
       
   150 val test_prog = """
       
   151 start := XXX;
       
   152 x := start;
       
   153 y := start;
       
   154 z := start;
       
   155 while 0 < x do {
       
   156  while 0 < y do {
       
   157   while 0 < z do {
       
   158     z := z - 1
       
   159   };
       
   160   z := start;
       
   161   y := y - 1
       
   162  };     
       
   163  y := start;
       
   164  x := x - 1
       
   165 };
       
   166 write x;
       
   167 write y;
       
   168 write z
       
   169 """
       
   170 
       
   171 println(tokenize(test_prog, lexing_rules).mkString("\n"))
       
   172