--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/parser1.scala Wed Nov 14 08:45:30 2012 +0000
@@ -0,0 +1,88 @@
+:load matcher.scala
+
+// some regular expressions
+val DIGIT = RANGE("0123456789".toList)
+val NONZERODIGIT = RANGE("123456789".toList)
+
+val NUMBER = ALT(SEQ(NONZERODIGIT, STAR(DIGIT)), "0")
+val LPAREN = CHAR('(')
+val RPAREN = CHAR(')')
+val WHITESPACE = PLUS(RANGE(" \n".toList))
+val OPS = RANGE("+-*".toList)
+
+// for classifying the strings that have been recognised
+abstract class Token
+case object T_WHITESPACE extends Token
+case object T_NUM extends Token
+case class T_OP(s: String) extends Token
+case object T_LPAREN extends Token
+case object T_RPAREN extends Token
+case class T_NT(s: String, rhs: List[Token]) extends Token
+
+def tokenizer(rs: List[Rule[Token]], s: String) : List[Token] =
+ tokenize(rs, s.toList).filterNot(_ match {
+ case T_WHITESPACE => true
+ case _ => false
+ })
+
+
+
+// lexing rules for arithmetic expressions
+val lexing_rules: List[Rule[Token]]=
+ List((NUMBER, (s) => T_NUM),
+ (WHITESPACE, (s) => T_WHITESPACE),
+ (LPAREN, (s) => T_LPAREN),
+ (RPAREN, (s) => T_RPAREN),
+ (OPS, (s) => T_OP(s.mkString)))
+
+
+type Grammar = List[(String, List[Token])]
+
+// grammar for arithmetic expressions
+val grammar =
+ List ("E" -> List(T_NUM),
+ "E" -> List(T_NT("E", Nil), T_OP("+"), T_NT("E", Nil)),
+ "E" -> List(T_NT("E", Nil), T_OP("-"), T_NT("E", Nil)),
+ "E" -> List(T_NT("E", Nil), T_OP("*"), T_NT("E", Nil)),
+ "E" -> List(T_LPAREN, T_NT("E", Nil), T_RPAREN))
+
+def startsWith[A](ts1: List[A], ts2: List[A]) : Boolean = (ts1, ts2) match {
+ case (_, Nil) => true
+ case (T_NT(e, _)::ts1,T_NT(f, _)::ts2) => (e == f) && startsWith(ts1, ts2)
+ case (t1::ts1, t2::ts2) => (t1 == t2) && startsWith(ts1, ts2)
+ case _ => false
+}
+
+def chop[A](ts1: List[A], prefix: List[A], ts2: List[A]) : Option[(List[A], List[A])] =
+ ts1 match {
+ case Nil => None
+ case t::ts =>
+ if (startsWith(ts1, prefix)) Some(ts2.reverse, ts1.drop(prefix.length))
+ else chop(ts, prefix, t::ts2)
+ }
+
+// examples
+chop(List(1,2,3,4,5,6,7,8,9), List(4,5), Nil)
+chop(List(1,2,3,4,5,6,7,8,9), List(3,5), Nil)
+
+def replace[A](ts: List[A], out: List[A], in: List [A]) =
+ chop(ts, out, Nil) match {
+ case None => None
+ case Some((before, after)) => Some(before ::: in ::: after)
+ }
+
+def parse1(g: Grammar, ts: List[Token]) : Boolean = ts match {
+ case List(T_NT("E", tree)) => { println(tree); true }
+ case _ => {
+ val tss = for ((lhs, rhs) <- g) yield replace(ts, rhs, List(T_NT(lhs, rhs)))
+ tss.flatten.exists(parse1(g, _))
+ }
+}
+
+
+println() ; parse1(grammar, tokenizer(lexing_rules, "2 + 3 * 4 + 1"))
+println() ; parse1(grammar, tokenizer(lexing_rules, "(2 + 3) * (4 + 1)"))
+println() ; parse1(grammar, tokenizer(lexing_rules, "(2 + 3) * 4 (4 + 1)"))
+
+
+