progs/mllex.scala
author Christian Urban <urbanc@in.tum.de>
Tue, 08 Oct 2019 21:12:52 +0100
changeset 649 e83afb44f276
parent 93 4794759139ea
permissions -rw-r--r--
updated

:load matcher.scala


// some regular expressions
val KEYWORDS =  ALTS(List("#", "(", ")", ",", "->", "...", ":", ":>", ";", "=",
  "=>", "[", "]", "_", "{", "|", "}", "abstype", "and", "andalso", "as",
  "case", "datatype", "do", "else", "end", "eqtype", "exception", "fn",
  "fun", "functor", "handle", "if", "in", "include", "infix", "infixr",
  "let", "local", "nonfix", "of", "op", "open", "orelse", "raise", "rec",
  "sharing", "sig", "signature", "struct", "structure", "then", "type",
  "val", "where", "while", "with", "withtype"))

val DIGITS = RANGE("0123456789")
val NONZERODIGITS = RANGE("123456789")

val POSITIVES = ALT(SEQ(NONZERODIGITS, STAR(DIGITS)), "0")
val INTEGERS = ALT(SEQ("~", POSITIVES), POSITIVES)

val ALL = ALTS(KEYWORDS, INTEGERS)

val COMMENT = SEQS("/*", NOT(SEGS(STAR(ALL), "*/", STAR(ALL))), "*/")



val LPAREN = CHAR('(')
val RPAREN = CHAR(')')
val WHITESPACE = PLUS(RANGE(" \n".toList))
val OPS = RANGE("+-*".toList)

// for classifying the strings that have been recognised
abstract class Token
case object T_WHITESPACE extends Token
case object T_NUM extends Token
case class T_OP(s: String) extends Token
case object T_LPAREN extends Token
case object T_RPAREN extends Token
case class T_NT(s: String, rhs: List[Token]) extends Token

def tokenizer(rs: List[Rule[Token]], s: String) : List[Token] = 
  tokenize(rs, s.toList).filterNot(_ match {
    case T_WHITESPACE => true
    case _ => false
  })



// lexing rules for arithmetic expressions
val lexing_rules: List[Rule[Token]]= 
  List((NUMBER, (s) => T_NUM),
       (WHITESPACE, (s) => T_WHITESPACE),
       (LPAREN, (s) => T_LPAREN),
       (RPAREN, (s) => T_RPAREN),
       (OPS, (s) => T_OP(s.mkString)))

tokenize_file(Nil, "nominal_library.ML")




type Grammar = List[(String, List[Token])]

// grammar for arithmetic expressions
val grammar = 
  List ("E" -> List(T_NUM),
        "E" -> List(T_NT("E", Nil), T_OP("+"), T_NT("E", Nil)),
        "E" -> List(T_NT("E", Nil), T_OP("-"), T_NT("E", Nil)),
        "E" -> List(T_NT("E", Nil), T_OP("*"), T_NT("E", Nil)),    
        "E" -> List(T_LPAREN, T_NT("E", Nil), T_RPAREN))

def startsWith[A](ts1: List[A], ts2: List[A]) : Boolean = (ts1, ts2) match {
  case (_, Nil) => true
  case (T_NT(e, _)::ts1,T_NT(f, _)::ts2) => (e == f) && startsWith(ts1, ts2)
  case (t1::ts1, t2::ts2) => (t1 == t2) && startsWith(ts1, ts2)
  case _ => false
}

def chop[A](ts1: List[A], prefix: List[A], ts2: List[A]) : Option[(List[A], List[A])] = 
  ts1 match {
    case Nil => None
    case t::ts => 
      if (startsWith(ts1, prefix)) Some(ts2.reverse, ts1.drop(prefix.length))
      else chop(ts, prefix, t::ts2)
  }

// examples
chop(List(1,2,3,4,5,6,7,8,9), List(4,5), Nil)  
chop(List(1,2,3,4,5,6,7,8,9), List(3,5), Nil)  

def replace[A](ts: List[A], out: List[A], in: List [A]) = 
  chop(ts, out, Nil) match {
    case None => None
    case Some((before, after)) => Some(before ::: in ::: after)
  }  

def parse1(g: Grammar, ts: List[Token]) : Boolean = ts match {
  case List(T_NT("E", tree)) => { println(tree); true }
  case _ => {
    val tss = for ((lhs, rhs) <- g) yield replace(ts, rhs, List(T_NT(lhs, rhs)))
    tss.flatten.exists(parse1(g, _))
  }
}
 

println() ; parse1(grammar, tokenizer(lexing_rules, "2 + 3 * 4 + 1"))
println() ; parse1(grammar, tokenizer(lexing_rules, "(2 + 3) * (4 + 1)"))
println() ; parse1(grammar, tokenizer(lexing_rules, "(2 + 3) * 4 (4 + 1)"))