| 
     1 :load matcher.scala  | 
         | 
     2   | 
         | 
     3   | 
         | 
     4 // some regular expressions  | 
         | 
     5 val KEYWORDS =  ALTS(List("#", "(", ")", ",", "->", "...", ":", ":>", ";", "=", | 
         | 
     6   "=>", "[", "]", "_", "{", "|", "}", "abstype", "and", "andalso", "as", | 
         | 
     7   "case", "datatype", "do", "else", "end", "eqtype", "exception", "fn",  | 
         | 
     8   "fun", "functor", "handle", "if", "in", "include", "infix", "infixr",  | 
         | 
     9   "let", "local", "nonfix", "of", "op", "open", "orelse", "raise", "rec",  | 
         | 
    10   "sharing", "sig", "signature", "struct", "structure", "then", "type",  | 
         | 
    11   "val", "where", "while", "with", "withtype"))  | 
         | 
    12   | 
         | 
    13 val DIGITS = RANGE("0123456789") | 
         | 
    14 val NONZERODIGITS = RANGE("123456789") | 
         | 
    15   | 
         | 
    16 val POSITIVES = ALT(SEQ(NONZERODIGITS, STAR(DIGITS)), "0")  | 
         | 
    17 val INTEGERS = ALT(SEQ("~", POSITIVES), POSITIVES) | 
         | 
    18   | 
         | 
    19 val ALL = ALTS(KEYWORDS, INTEGERS)  | 
         | 
    20   | 
         | 
    21 val COMMENT = SEQS("/*", NOT(SEGS(STAR(ALL), "*/", STAR(ALL))), "*/") | 
         | 
    22   | 
         | 
    23   | 
         | 
    24   | 
         | 
    25 val LPAREN = CHAR('(') | 
         | 
    26 val RPAREN = CHAR(')') | 
         | 
    27 val WHITESPACE = PLUS(RANGE(" \n".toList)) | 
         | 
    28 val OPS = RANGE("+-*".toList) | 
         | 
    29   | 
         | 
    30 // for classifying the strings that have been recognised  | 
         | 
    31 abstract class Token  | 
         | 
    32 case object T_WHITESPACE extends Token  | 
         | 
    33 case object T_NUM extends Token  | 
         | 
    34 case class T_OP(s: String) extends Token  | 
         | 
    35 case object T_LPAREN extends Token  | 
         | 
    36 case object T_RPAREN extends Token  | 
         | 
    37 case class T_NT(s: String, rhs: List[Token]) extends Token  | 
         | 
    38   | 
         | 
    39 def tokenizer(rs: List[Rule[Token]], s: String) : List[Token] =   | 
         | 
    40   tokenize(rs, s.toList).filterNot(_ match { | 
         | 
    41     case T_WHITESPACE => true  | 
         | 
    42     case _ => false  | 
         | 
    43   })  | 
         | 
    44   | 
         | 
    45   | 
         | 
    46   | 
         | 
    47 // lexing rules for arithmetic expressions  | 
         | 
    48 val lexing_rules: List[Rule[Token]]=   | 
         | 
    49   List((NUMBER, (s) => T_NUM),  | 
         | 
    50        (WHITESPACE, (s) => T_WHITESPACE),  | 
         | 
    51        (LPAREN, (s) => T_LPAREN),  | 
         | 
    52        (RPAREN, (s) => T_RPAREN),  | 
         | 
    53        (OPS, (s) => T_OP(s.mkString)))  | 
         | 
    54   | 
         | 
    55 tokenize_file(Nil, "nominal_library.ML")  | 
         | 
    56   | 
         | 
    57   | 
         | 
    58   | 
         | 
    59   | 
         | 
    60 type Grammar = List[(String, List[Token])]  | 
         | 
    61   | 
         | 
    62 // grammar for arithmetic expressions  | 
         | 
    63 val grammar =   | 
         | 
    64   List ("E" -> List(T_NUM), | 
         | 
    65         "E" -> List(T_NT("E", Nil), T_OP("+"), T_NT("E", Nil)), | 
         | 
    66         "E" -> List(T_NT("E", Nil), T_OP("-"), T_NT("E", Nil)), | 
         | 
    67         "E" -> List(T_NT("E", Nil), T_OP("*"), T_NT("E", Nil)),     | 
         | 
    68         "E" -> List(T_LPAREN, T_NT("E", Nil), T_RPAREN)) | 
         | 
    69   | 
         | 
    70 def startsWith[A](ts1: List[A], ts2: List[A]) : Boolean = (ts1, ts2) match { | 
         | 
    71   case (_, Nil) => true  | 
         | 
    72   case (T_NT(e, _)::ts1,T_NT(f, _)::ts2) => (e == f) && startsWith(ts1, ts2)  | 
         | 
    73   case (t1::ts1, t2::ts2) => (t1 == t2) && startsWith(ts1, ts2)  | 
         | 
    74   case _ => false  | 
         | 
    75 }  | 
         | 
    76   | 
         | 
    77 def chop[A](ts1: List[A], prefix: List[A], ts2: List[A]) : Option[(List[A], List[A])] =   | 
         | 
    78   ts1 match { | 
         | 
    79     case Nil => None  | 
         | 
    80     case t::ts =>   | 
         | 
    81       if (startsWith(ts1, prefix)) Some(ts2.reverse, ts1.drop(prefix.length))  | 
         | 
    82       else chop(ts, prefix, t::ts2)  | 
         | 
    83   }  | 
         | 
    84   | 
         | 
    85 // examples  | 
         | 
    86 chop(List(1,2,3,4,5,6,7,8,9), List(4,5), Nil)    | 
         | 
    87 chop(List(1,2,3,4,5,6,7,8,9), List(3,5), Nil)    | 
         | 
    88   | 
         | 
    89 def replace[A](ts: List[A], out: List[A], in: List [A]) =   | 
         | 
    90   chop(ts, out, Nil) match { | 
         | 
    91     case None => None  | 
         | 
    92     case Some((before, after)) => Some(before ::: in ::: after)  | 
         | 
    93   }    | 
         | 
    94   | 
         | 
    95 def parse1(g: Grammar, ts: List[Token]) : Boolean = ts match { | 
         | 
    96   case List(T_NT("E", tree)) => { println(tree); true } | 
         | 
    97   case _ => { | 
         | 
    98     val tss = for ((lhs, rhs) <- g) yield replace(ts, rhs, List(T_NT(lhs, rhs)))  | 
         | 
    99     tss.flatten.exists(parse1(g, _))  | 
         | 
   100   }  | 
         | 
   101 }  | 
         | 
   102    | 
         | 
   103   | 
         | 
   104 println() ; parse1(grammar, tokenizer(lexing_rules, "2 + 3 * 4 + 1"))  | 
         | 
   105 println() ; parse1(grammar, tokenizer(lexing_rules, "(2 + 3) * (4 + 1)"))  | 
         | 
   106 println() ; parse1(grammar, tokenizer(lexing_rules, "(2 + 3) * 4 (4 + 1)"))  | 
         | 
   107   | 
         | 
   108   | 
         | 
   109    |