progs/token2.scala
changeset 367 04127a5aad23
parent 165 66b699c80479
child 368 a9911966c0df
equal deleted inserted replaced
366:5a83336a9690 367:04127a5aad23
     1 import scala.language.implicitConversions
     1 import scala.language.implicitConversions    
     2 import scala.language.reflectiveCalls
     2 import scala.language.reflectiveCalls
     3 import scala.util._
     3 import scala.annotation.tailrec   
     4 import scala.annotation.tailrec
     4 
     5 
     5 abstract class Rexp 
     6 sealed abstract class Rexp
       
     7 
       
     8 case object NULL extends Rexp
     6 case object NULL extends Rexp
     9 case object EMPTY extends Rexp
     7 case object EMPTY extends Rexp
    10 case class CHAR(c: Char) extends Rexp
     8 case class CHAR(c: Char) extends Rexp
    11 case class ALT(r1: Rexp, r2: Rexp) extends Rexp
     9 case class ALT(r1: Rexp, r2: Rexp) extends Rexp 
    12 case class SEQ(r1: Rexp, r2: Rexp) extends Rexp
    10 case class SEQ(r1: Rexp, r2: Rexp) extends Rexp 
    13 case class STAR(r: Rexp) extends Rexp
    11 case class STAR(r: Rexp) extends Rexp 
    14 
    12 case class RECD(x: String, r: Rexp) extends Rexp
    15 def charlist2rexp(s : List[Char]) : Rexp = s match {
    13 case class CRANGE(cs: String) extends Rexp
       
    14 case class PLUS(r: Rexp) extends Rexp
       
    15 
       
    16 abstract class Val
       
    17 case object Empty extends Val
       
    18 case class Chr(c: Char) extends Val
       
    19 case class Seq(v1: Val, v2: Val) extends Val
       
    20 case class Left(v: Val) extends Val
       
    21 case class Right(v: Val) extends Val
       
    22 case class Stars(vs: List[Val]) extends Val
       
    23 case class Rec(x: String, v: Val) extends Val
       
    24    
       
    25 // some convenience for typing in regular expressions
       
    26 def charlist2rexp(s : List[Char]): Rexp = s match {
    16   case Nil => EMPTY
    27   case Nil => EMPTY
    17   case c::Nil => CHAR(c)
    28   case c::Nil => CHAR(c)
    18   case c::s => SEQ(CHAR(c), charlist2rexp(s))
    29   case c::s => SEQ(CHAR(c), charlist2rexp(s))
    19 }
    30 }
    20 implicit def string2rexp(s : String) : Rexp = charlist2rexp(s.toList)
    31 implicit def string2rexp(s : String) : Rexp = charlist2rexp(s.toList)
    21 
       
    22 
    32 
    23 implicit def RexpOps(r: Rexp) = new {
    33 implicit def RexpOps(r: Rexp) = new {
    24   def | (s: Rexp) = ALT(r, s)
    34   def | (s: Rexp) = ALT(r, s)
    25   def % = STAR(r)
    35   def % = STAR(r)
    26   def ~ (s: Rexp) = SEQ(r, s)
    36   def ~ (s: Rexp) = SEQ(r, s)
    30   def | (r: Rexp) = ALT(s, r)
    40   def | (r: Rexp) = ALT(s, r)
    31   def | (r: String) = ALT(s, r)
    41   def | (r: String) = ALT(s, r)
    32   def % = STAR(s)
    42   def % = STAR(s)
    33   def ~ (r: Rexp) = SEQ(s, r)
    43   def ~ (r: Rexp) = SEQ(s, r)
    34   def ~ (r: String) = SEQ(s, r)
    44   def ~ (r: String) = SEQ(s, r)
    35 }
    45   def $ (r: Rexp) = RECD(s, r)
    36 
    46 }
    37 def Range(s : List[Char]) : Rexp = s match {
    47 
    38   case Nil => NULL
    48 // nullable function: tests whether the regular 
    39   case c::Nil => CHAR(c)
    49 // expression can recognise the empty string
    40   case c::s => ALT(CHAR(c), Range(s))
       
    41 }
       
    42 def RANGE(s: String) = Range(s.toList)
       
    43 
       
    44 def PLUS(r: Rexp) = SEQ(r, STAR(r))
       
    45 
       
    46 val SYM = RANGE("ABCDEFGHIJKLMNOPQRSTUVXYZabcdefghijklmnopqrstuvwxyz_")
       
    47 val DIGIT = RANGE("0123456789")
       
    48 val ID = SYM ~ (SYM | DIGIT).% 
       
    49 val NUM = PLUS(DIGIT)
       
    50 val KEYWORD : Rexp = "skip" | "while" | "do" | "if" | "then" | "else" | "read" | "write" 
       
    51 val SEMI: Rexp = ";"
       
    52 val OP: Rexp = ":=" | "=" | "-" | "+" | "*" | "!=" | "<" | ">"
       
    53 val WHITESPACE = PLUS(RANGE(" \n"))
       
    54 val RPAREN: Rexp = ")"
       
    55 val LPAREN: Rexp = "("
       
    56 val BEGIN: Rexp = "{"
       
    57 val END: Rexp = "}"
       
    58 
       
    59 abstract class Token
       
    60 case object T_WHITESPACE extends Token
       
    61 case object T_SEMI extends Token
       
    62 case object T_LPAREN extends Token
       
    63 case object T_RPAREN extends Token
       
    64 case object T_BEGIN extends Token
       
    65 case object T_END extends Token
       
    66 case class T_ID(s: String) extends Token
       
    67 case class T_OP(s: String) extends Token
       
    68 case class T_NUM(s: String) extends Token
       
    69 case class T_KWD(s: String) extends Token
       
    70 case class T_ERR(s: String) extends Token // special error token
       
    71 
       
    72 type TokenFun = String => Token
       
    73 type LexRules = List[(Rexp, TokenFun)]
       
    74 val lexing_rules: LexRules = 
       
    75   List((KEYWORD, (s) => T_KWD(s)),
       
    76        (ID, (s) => T_ID(s)),
       
    77        (OP, (s) => T_OP(s)),
       
    78        (NUM, (s) => T_NUM(s)),
       
    79        (SEMI, (s) => T_SEMI),
       
    80        (LPAREN, (s) => T_LPAREN),
       
    81        (RPAREN, (s) => T_RPAREN),
       
    82        (BEGIN, (s) => T_BEGIN),
       
    83        (END, (s) => T_END),
       
    84        (WHITESPACE, (s) => T_WHITESPACE))
       
    85 
       
    86 
       
    87 def nullable (r: Rexp) : Boolean = r match {
    50 def nullable (r: Rexp) : Boolean = r match {
    88   case NULL => false
    51   case NULL => false
    89   case EMPTY => true
    52   case EMPTY => true
    90   case CHAR(_) => false
    53   case CHAR(_) => false
    91   case ALT(r1, r2) => nullable(r1) || nullable(r2)
    54   case ALT(r1, r2) => nullable(r1) || nullable(r2)
    92   case SEQ(r1, r2) => nullable(r1) && nullable(r2)
    55   case SEQ(r1, r2) => nullable(r1) && nullable(r2)
    93   case STAR(_) => true
    56   case STAR(_) => true
    94 }
    57   case RECD(_, r) => nullable(r)
    95 
    58   case CRANGE(_) => false
    96 def zeroable (r: Rexp) : Boolean = r match {
    59   case PLUS(r) => nullable(r)
    97   case NULL => true
    60 }
    98   case EMPTY => false
    61 
    99   case CHAR(_) => false
    62 // derivative of a regular expression w.r.t. a character
   100   case ALT(r1, r2) => zeroable(r1) && zeroable(r2)
       
   101   case SEQ(r1, r2) => zeroable(r1) || zeroable(r2)
       
   102   case STAR(_) => false
       
   103 }
       
   104 
       
   105 def der (c: Char, r: Rexp) : Rexp = r match {
    63 def der (c: Char, r: Rexp) : Rexp = r match {
   106   case NULL => NULL
    64   case NULL => NULL
   107   case EMPTY => NULL  
    65   case EMPTY => NULL
   108   case CHAR(d) => if (c == d) EMPTY else NULL
    66   case CHAR(d) => if (c == d) EMPTY else NULL
   109   case ALT(r1, r2) => ALT(der(c, r1), der(c, r2))
    67   case ALT(r1, r2) => ALT(der(c, r1), der(c, r2))
   110   case SEQ(r1, r2) => 
    68   case SEQ(r1, r2) => 
   111     if (nullable(r1)) ALT(SEQ(der(c, r1), r2), der(c, r2))
    69     if (nullable(r1)) ALT(SEQ(der(c, r1), r2), der(c, r2))
   112     else SEQ(der(c, r1), r2)
    70     else SEQ(der(c, r1), r2)
   113   case STAR(r) => SEQ(der(c, r), STAR(r))
    71   case STAR(r) => SEQ(der(c, r), STAR(r))
   114 }
    72   case RECD(_, r1) => der(c, r1)
   115 
    73   case CRANGE(cs) => if (cs.contains(c)) EMPTY else NULL
   116 
    74   case PLUS(r) => SEQ(der(c, r), STAR(r))
   117 
    75 }
   118 // calculates derivatives until all of them are zeroable
    76 
   119 @tailrec
    77 // derivative w.r.t. a string (iterates der)
   120 def munch(s: List[Char], 
    78 def ders (s: List[Char], r: Rexp) : Rexp = s match {
   121           pos: Int, 
    79   case Nil => r
   122           rs: LexRules, 
    80   case c::s => ders(s, der(c, r))
   123           last: Option[(Int, TokenFun)]): Option[(Int, TokenFun)] = rs match {
    81 }
   124   case Nil => last
    82 
   125   case rs if (s.length <= pos) => last
    83 // extracts a string from value
   126   case rs => {
    84 def flatten(v: Val) : String = v match {
   127     val ders = rs.map({case (r, tf) => (der(s(pos), r), tf)})
    85   case Empty => ""
   128     val rs_nzero = ders.filterNot({case (r, _) => zeroable(r)})
    86   case Chr(c) => c.toString
   129     val rs_nulls = ders.filter({case (r, _) => nullable(r)})
    87   case Left(v) => flatten(v)
   130     val new_last = if (rs_nulls != Nil) Some((pos, rs_nulls.head._2)) else last
    88   case Right(v) => flatten(v)
   131     munch(s, 1 + pos, rs_nzero, new_last)
    89   case Seq(v1, v2) => flatten(v1) + flatten(v2)
   132   }
    90   case Stars(vs) => vs.map(flatten).mkString
   133 }
    91   case Rec(_, v) => flatten(v)
   134 
    92 }
   135 // iterates the munching function and returns a Token list
    93 
   136 def tokenize(s: String, rs: LexRules) : List[Token] = munch(s.toList, 0, rs, None) match {
    94 // extracts an environment from a value
   137   case None if (s == "") => Nil
    95 def env(v: Val) : List[(String, String)] = v match {
   138   case None => List(T_ERR("Lexing error: $s"))
    96   case Empty => Nil
   139   case Some((n, tf)) => {
    97   case Chr(c) => Nil
   140     val (head, tail) = s.splitAt(n + 1)
    98   case Left(v) => env(v)
   141     tf(head)::tokenize(tail, rs)
    99   case Right(v) => env(v)
   142   }
   100   case Seq(v1, v2) => env(v1) ::: env(v2)
   143 }
   101   case Stars(vs) => vs.flatMap(env)
   144 
   102   case Rec(x, v) => (x, flatten(v))::env(v)
   145 val test_prog = """
   103 }
   146 start := XXX;
   104 
   147 x := start;
   105 // injection part
   148 y := start;
   106 def mkeps(r: Rexp) : Val = r match {
   149 z := start;
   107   case EMPTY => Empty
   150 while 0 < x do {
   108   case ALT(r1, r2) => 
   151  while 0 < y do {
   109     if (nullable(r1)) Left(mkeps(r1)) else Right(mkeps(r2))
   152   while 0 < z do {
   110   case SEQ(r1, r2) => Seq(mkeps(r1), mkeps(r2))
   153     z := z - 1
   111   case STAR(r) => Stars(Nil)
   154   };
   112   case RECD(x, r) => Rec(x, mkeps(r))
   155   z := start;
   113   case PLUS(r) => Stars(List(mkeps(r)))
   156   y := y - 1
   114 }
   157  };     
   115 
   158  y := start;
   116 
   159  x := x - 1
   117 def inj(r: Rexp, c: Char, v: Val) : Val = (r, v) match {
       
   118   case (STAR(r), Seq(v1, Stars(vs))) => Stars(inj(r, c, v1)::vs)
       
   119   case (SEQ(r1, r2), Seq(v1, v2)) => Seq(inj(r1, c, v1), v2)
       
   120   case (SEQ(r1, r2), Left(Seq(v1, v2))) => Seq(inj(r1, c, v1), v2)
       
   121   case (SEQ(r1, r2), Right(v2)) => Seq(mkeps(r1), inj(r2, c, v2))
       
   122   case (ALT(r1, r2), Left(v1)) => Left(inj(r1, c, v1))
       
   123   case (ALT(r1, r2), Right(v2)) => Right(inj(r2, c, v2))
       
   124   case (CHAR(_), Empty) => Chr(c) 
       
   125   case (CRANGE(_), Empty) => Chr(c) 
       
   126   case (RECD(x, r1), _) => Rec(x, inj(r1, c, v))
       
   127   case (PLUS(r), Seq(v1, Stars(vs))) => Stars(inj(r, c, v1)::vs)
       
   128 }
       
   129 
       
   130 // main lexing function (produces a value)
       
   131 def lex(r: Rexp, s: List[Char]) : Val = s match {
       
   132   case Nil => if (nullable(r)) mkeps(r) else throw new Exception("Not matched")
       
   133   case c::cs => inj(r, c, lex(der(c, r), cs))
       
   134 }
       
   135 
       
   136 def lexing(r: Rexp, s: String) : Val = lex(r, s.toList)
       
   137 
       
   138 lexing(("ab" | "ab") ~ ("b" | EMPTY), "ab")
       
   139 
       
   140 // some "rectification" functions for simplification
       
   141 def F_ID(v: Val): Val = v
       
   142 def F_RIGHT(f: Val => Val) = (v:Val) => Right(f(v))
       
   143 def F_LEFT(f: Val => Val) = (v:Val) => Left(f(v))
       
   144 def F_ALT(f1: Val => Val, f2: Val => Val) = (v:Val) => v match {
       
   145   case Right(v) => Right(f2(v))
       
   146   case Left(v) => Left(f1(v))
       
   147 }
       
   148 def F_SEQ(f1: Val => Val, f2: Val => Val) = (v:Val) => v match {
       
   149   case Seq(v1, v2) => Seq(f1(v1), f2(v2))
       
   150 }
       
   151 def F_SEQ_Empty1(f1: Val => Val, f2: Val => Val) = 
       
   152   (v:Val) => Seq(f1(Empty), f2(v))
       
   153 def F_SEQ_Empty2(f1: Val => Val, f2: Val => Val) = 
       
   154   (v:Val) => Seq(f1(v), f2(Empty))
       
   155 def F_RECD(f: Val => Val) = (v:Val) => v match {
       
   156   case Rec(x, v) => Rec(x, f(v))
       
   157 }
       
   158 def F_ERROR(v: Val): Val = throw new Exception("error")
       
   159 
       
   160 // simplification of regular expressions returning also an
       
   161 // rectification function; no simplification under STAR 
       
   162 def simp(r: Rexp): (Rexp, Val => Val) = r match {
       
   163   case ALT(r1, r2) => {
       
   164     val (r1s, f1s) = simp(r1)
       
   165     val (r2s, f2s) = simp(r2)
       
   166     (r1s, r2s) match {
       
   167       case (NULL, _) => (r2s, F_RIGHT(f2s))
       
   168       case (_, NULL) => (r1s, F_LEFT(f1s))
       
   169       case _ => if (r1s == r2s) (r1s, F_LEFT(f1s))
       
   170                 else (ALT (r1s, r2s), F_ALT(f1s, f2s)) 
       
   171     }
       
   172   }
       
   173   case SEQ(r1, r2) => {
       
   174     val (r1s, f1s) = simp(r1)
       
   175     val (r2s, f2s) = simp(r2)
       
   176     (r1s, r2s) match {
       
   177       case (NULL, _) => (NULL, F_ERROR)
       
   178       case (_, NULL) => (NULL, F_ERROR)
       
   179       case (EMPTY, _) => (r2s, F_SEQ_Empty1(f1s, f2s))
       
   180       case (_, EMPTY) => (r1s, F_SEQ_Empty2(f1s, f2s))
       
   181       case _ => (SEQ(r1s,r2s), F_SEQ(f1s, f2s))
       
   182     }
       
   183   }
       
   184   case RECD(x, r1) => {
       
   185     val (r1s, f1s) = simp(r1)
       
   186     (RECD(x, r1s), F_RECD(f1s))
       
   187   }
       
   188   case r => (r, F_ID)
       
   189 }
       
   190 
       
   191 def lex_simp(r: Rexp, s: List[Char]) : Val = s match {
       
   192   case Nil => if (nullable(r)) mkeps(r) else throw new Exception("Not matched")
       
   193   case c::cs => {
       
   194     val (r_simp, f_simp) = simp(der(c, r))
       
   195     inj(r, c, f_simp(lex_simp(r_simp, cs)))
       
   196   }
       
   197 }
       
   198 
       
   199 def lexing_simp(r: Rexp, s: String) : Val = lex_simp(r, s.toList)
       
   200 
       
   201 lexing_simp(("a" | "ab") ~ ("b" | ""), "ab")
       
   202 
       
   203 // Lexing Rules for a Small While Language
       
   204 
       
   205 val SYM = CRANGE("abcdefghijklmnopqrstuvwxyz")
       
   206 val DIGIT = CRANGE("0123456789")
       
   207 val ID = SYM ~ (SYM | DIGIT).% 
       
   208 val NUM = PLUS(DIGIT)
       
   209 val KEYWORD : Rexp = "skip" | "while" | "do" | "if" | "then" | "else" | "read" | "write" | "true" | "false"
       
   210 val SEMI: Rexp = ";"
       
   211 val OP: Rexp = ":=" | "==" | "-" | "+" | "*" | "!=" | "<" | ">" | "<=" | ">=" | "%" | "/"
       
   212 val WHITESPACE = PLUS(" " | "\n" | "\t")
       
   213 val RPAREN: Rexp = ")"
       
   214 val LPAREN: Rexp = "("
       
   215 val BEGIN: Rexp = "{"
       
   216 val END: Rexp = "}"
       
   217 val STRING: Rexp = "\"" ~ SYM.% ~ "\""
       
   218 
       
   219 
       
   220 val WHILE_REGS = (("k" $ KEYWORD) | 
       
   221                   ("i" $ ID) | 
       
   222                   ("o" $ OP) | 
       
   223                   ("n" $ NUM) | 
       
   224                   ("s" $ SEMI) | 
       
   225                   ("str" $ STRING) |
       
   226                   ("p" $ (LPAREN | RPAREN)) | 
       
   227                   ("b" $ (BEGIN | END)) | 
       
   228                   ("w" $ WHITESPACE)).%
       
   229 
       
   230 // filters out all white spaces
       
   231 def tokenise(r: Rexp, s: String) = 
       
   232   env(lexing_simp(r, s)).filterNot { (s) => s._1 == "w"}.mkString("\n")
       
   233 
       
   234 
       
   235 //   Testing
       
   236 //============
       
   237 
       
   238 def time[T](code: => T) = {
       
   239   val start = System.nanoTime()
       
   240   val result = code
       
   241   val end = System.nanoTime()
       
   242   println((end - start)/1.0e9)
       
   243   result
       
   244 }
       
   245 
       
   246 val r1 = ("a" | "ab") ~ ("bcd" | "c")
       
   247 println(lexing(r1, "abcd"))
       
   248 
       
   249 val r2 = ("" | "a") ~ ("ab" | "b")
       
   250 println(lexing(r2, "ab"))
       
   251 
       
   252 
       
   253 // Two Simple While Tests
       
   254 //========================
       
   255 println("prog0 test")
       
   256 
       
   257 val prog0 = """read n"""
       
   258 println(env(lexing_simp(WHILE_REGS, prog0)))
       
   259 
       
   260 println("prog1 test")
       
   261 
       
   262 val prog1 = """read  n; write (n)"""
       
   263 println(env(lexing_simp(WHILE_REGS, prog1)))
       
   264 
       
   265 
       
   266 // Big Test
       
   267 //==========
       
   268 
       
   269 val prog2 = """
       
   270 write "fib";
       
   271 read n;
       
   272 minus1 := 0;
       
   273 minus2 := 1;
       
   274 while n > 0 do {
       
   275   temp := minus2;
       
   276   minus2 := minus1 + minus2;
       
   277   minus1 := temp;
       
   278   n := n - 1
   160 };
   279 };
   161 write x;
   280 write "result";
   162 write y;
   281 write minus2
   163 write z
       
   164 """
   282 """
   165 
   283 
   166 println(tokenize(test_prog, lexing_rules).mkString("\n"))
   284 println("Tokens")
   167 
   285 println(tokenise(WHILE_REGS, prog2))
       
   286 
       
   287 for (i <- 1 to 120 by 10) {
       
   288   print(i.toString + ":  ")
       
   289   time(lexing_simp(WHILE_REGS, prog2 * i))
       
   290 }
       
   291 
       
   292