progs/token2.scala
author Christian Urban <christian dot urban at kcl dot ac dot uk>
Sat, 31 Oct 2015 11:37:55 +0000
changeset 364 50ce3667c190
parent 165 66b699c80479
child 367 04127a5aad23
permissions -rw-r--r--
updated

import scala.language.implicitConversions
import scala.language.reflectiveCalls
import scala.util._
import scala.annotation.tailrec

sealed abstract class Rexp

case object NULL extends Rexp
case object EMPTY extends Rexp
case class CHAR(c: Char) extends Rexp
case class ALT(r1: Rexp, r2: Rexp) extends Rexp
case class SEQ(r1: Rexp, r2: Rexp) extends Rexp
case class STAR(r: Rexp) extends Rexp

def charlist2rexp(s : List[Char]) : Rexp = s match {
  case Nil => EMPTY
  case c::Nil => CHAR(c)
  case c::s => SEQ(CHAR(c), charlist2rexp(s))
}
implicit def string2rexp(s : String) : Rexp = charlist2rexp(s.toList)


implicit def RexpOps(r: Rexp) = new {
  def | (s: Rexp) = ALT(r, s)
  def % = STAR(r)
  def ~ (s: Rexp) = SEQ(r, s)
}

implicit def stringOps(s: String) = new {
  def | (r: Rexp) = ALT(s, r)
  def | (r: String) = ALT(s, r)
  def % = STAR(s)
  def ~ (r: Rexp) = SEQ(s, r)
  def ~ (r: String) = SEQ(s, r)
}

def Range(s : List[Char]) : Rexp = s match {
  case Nil => NULL
  case c::Nil => CHAR(c)
  case c::s => ALT(CHAR(c), Range(s))
}
def RANGE(s: String) = Range(s.toList)

def PLUS(r: Rexp) = SEQ(r, STAR(r))

val SYM = RANGE("ABCDEFGHIJKLMNOPQRSTUVXYZabcdefghijklmnopqrstuvwxyz_")
val DIGIT = RANGE("0123456789")
val ID = SYM ~ (SYM | DIGIT).% 
val NUM = PLUS(DIGIT)
val KEYWORD : Rexp = "skip" | "while" | "do" | "if" | "then" | "else" | "read" | "write" 
val SEMI: Rexp = ";"
val OP: Rexp = ":=" | "=" | "-" | "+" | "*" | "!=" | "<" | ">"
val WHITESPACE = PLUS(RANGE(" \n"))
val RPAREN: Rexp = ")"
val LPAREN: Rexp = "("
val BEGIN: Rexp = "{"
val END: Rexp = "}"

abstract class Token
case object T_WHITESPACE extends Token
case object T_SEMI extends Token
case object T_LPAREN extends Token
case object T_RPAREN extends Token
case object T_BEGIN extends Token
case object T_END extends Token
case class T_ID(s: String) extends Token
case class T_OP(s: String) extends Token
case class T_NUM(s: String) extends Token
case class T_KWD(s: String) extends Token
case class T_ERR(s: String) extends Token // special error token

type TokenFun = String => Token
type LexRules = List[(Rexp, TokenFun)]
val lexing_rules: LexRules = 
  List((KEYWORD, (s) => T_KWD(s)),
       (ID, (s) => T_ID(s)),
       (OP, (s) => T_OP(s)),
       (NUM, (s) => T_NUM(s)),
       (SEMI, (s) => T_SEMI),
       (LPAREN, (s) => T_LPAREN),
       (RPAREN, (s) => T_RPAREN),
       (BEGIN, (s) => T_BEGIN),
       (END, (s) => T_END),
       (WHITESPACE, (s) => T_WHITESPACE))


def nullable (r: Rexp) : Boolean = r match {
  case NULL => false
  case EMPTY => true
  case CHAR(_) => false
  case ALT(r1, r2) => nullable(r1) || nullable(r2)
  case SEQ(r1, r2) => nullable(r1) && nullable(r2)
  case STAR(_) => true
}

def zeroable (r: Rexp) : Boolean = r match {
  case NULL => true
  case EMPTY => false
  case CHAR(_) => false
  case ALT(r1, r2) => zeroable(r1) && zeroable(r2)
  case SEQ(r1, r2) => zeroable(r1) || zeroable(r2)
  case STAR(_) => false
}

def der (c: Char, r: Rexp) : Rexp = r match {
  case NULL => NULL
  case EMPTY => NULL  
  case CHAR(d) => if (c == d) EMPTY else NULL
  case ALT(r1, r2) => ALT(der(c, r1), der(c, r2))
  case SEQ(r1, r2) => 
    if (nullable(r1)) ALT(SEQ(der(c, r1), r2), der(c, r2))
    else SEQ(der(c, r1), r2)
  case STAR(r) => SEQ(der(c, r), STAR(r))
}



// calculates derivatives until all of them are zeroable
@tailrec
def munch(s: List[Char], 
          pos: Int, 
          rs: LexRules, 
          last: Option[(Int, TokenFun)]): Option[(Int, TokenFun)] = rs match {
  case Nil => last
  case rs if (s.length <= pos) => last
  case rs => {
    val ders = rs.map({case (r, tf) => (der(s(pos), r), tf)})
    val rs_nzero = ders.filterNot({case (r, _) => zeroable(r)})
    val rs_nulls = ders.filter({case (r, _) => nullable(r)})
    val new_last = if (rs_nulls != Nil) Some((pos, rs_nulls.head._2)) else last
    munch(s, 1 + pos, rs_nzero, new_last)
  }
}

// iterates the munching function and returns a Token list
def tokenize(s: String, rs: LexRules) : List[Token] = munch(s.toList, 0, rs, None) match {
  case None if (s == "") => Nil
  case None => List(T_ERR("Lexing error: $s"))
  case Some((n, tf)) => {
    val (head, tail) = s.splitAt(n + 1)
    tf(head)::tokenize(tail, rs)
  }
}

val test_prog = """
start := XXX;
x := start;
y := start;
z := start;
while 0 < x do {
 while 0 < y do {
  while 0 < z do {
    z := z - 1
  };
  z := start;
  y := y - 1
 };     
 y := start;
 x := x - 1
};
write x;
write y;
write z
"""

println(tokenize(test_prog, lexing_rules).mkString("\n"))