parser2.scala
changeset 62 5988e44ea048
child 64 2d625418c011
equal deleted inserted replaced
61:a80f0cf17f91 62:5988e44ea048
       
     1 :load matcher.scala
       
     2 
       
     3 // some regular expressions
       
     4 val LETTER = RANGE("abcdefghijklmnopqrstuvwxyz".toList)
       
     5 val ID = PLUS(LETTER)
       
     6 
       
     7 val DIGIT = RANGE("0123456789".toList)
       
     8 val NONZERODIGIT = RANGE("123456789".toList)
       
     9 val NUMBER = ALT(SEQ(NONZERODIGIT, STAR(DIGIT)), "0")
       
    10 
       
    11 val LPAREN = CHAR('(')
       
    12 val RPAREN = CHAR(')')
       
    13 
       
    14 val WHITESPACE = PLUS(RANGE(" \n".toList))
       
    15 val OPS = RANGE("+-*".toList)
       
    16 
       
    17 // for classifying the strings that have been recognised
       
    18 abstract class Token
       
    19 
       
    20 case object T_WHITESPACE extends Token
       
    21 case class T_NUM(s: String) extends Token
       
    22 case class T_ID(s: String) extends Token
       
    23 case class T_OP(s: String) extends Token
       
    24 case object T_LPAREN extends Token
       
    25 case object T_RPAREN extends Token
       
    26 case object T_IF extends Token
       
    27 case object T_THEN extends Token
       
    28 case object T_ELSE extends Token
       
    29 
       
    30 def tokenizer(rs: List[Rule[Token]], s: String) : List[Token] = 
       
    31   tokenize(rs, s.toList).filterNot(_ match {
       
    32     case T_WHITESPACE => true
       
    33     case _ => false
       
    34   })
       
    35 
       
    36 
       
    37 // lexing rules for arithmetic expressions
       
    38 val lexing_rules: List[Rule[Token]]= 
       
    39   List(("if", (s) => T_IF),
       
    40        ("then", (s) => T_THEN),
       
    41        ("else", (s) => T_ELSE),
       
    42        (NUMBER, (s) => T_NUM(s.mkString)),
       
    43        (ID, (s) => T_ID(s.mkString)),
       
    44        (WHITESPACE, (s) => T_WHITESPACE),
       
    45        (LPAREN, (s) => T_LPAREN),
       
    46        (RPAREN, (s) => T_RPAREN),
       
    47        (OPS, (s) => T_OP(s.mkString)))
       
    48 
       
    49 
       
    50 // parse trees
       
    51 abstract class ParseTree
       
    52 case class Leaf(t: Token) extends ParseTree
       
    53 case class Branch(pts: List[ParseTree]) extends ParseTree
       
    54 
       
    55 def combine(pt1: ParseTree, pt2: ParseTree) = pt1 match {
       
    56   case Leaf(t) => Branch(List(Leaf(t), pt2))
       
    57   case Branch(pts) => Branch(pts ++ List(pt2))
       
    58 }
       
    59 
       
    60 // parser combinators
       
    61 abstract class Parser {
       
    62   def parse(ts: List[Token]): Set[(ParseTree, List[Token])]
       
    63 
       
    64   def parse_all(ts: List[Token]) : Set[ParseTree] =
       
    65     for ((head, tail) <- parse(ts); if (tail == Nil)) yield head
       
    66 
       
    67   def || (right : => Parser) : Parser = new AltParser(this, right)
       
    68   def ~ (right : => Parser) : Parser = new SeqParser(this, right)
       
    69 }
       
    70 
       
    71 class AltParser(p: => Parser, q: => Parser) extends Parser {
       
    72   def parse (ts: List[Token]) = p.parse(ts) ++ q.parse(ts)   
       
    73 }
       
    74 
       
    75 class SeqParser(p: => Parser, q: => Parser) extends Parser {
       
    76   def parse(ts: List[Token]) = 
       
    77     for ((head1, tail1) <- p.parse(ts); 
       
    78          (head2, tail2) <- q.parse(tail1)) yield (combine(head1, head2), tail2)
       
    79 }
       
    80 
       
    81 class ListParser(ps: => List[Parser]) extends Parser {
       
    82   def parse(ts: List[Token]) = ps match {
       
    83     case Nil => Set()
       
    84     case p::Nil => p.parse(ts)
       
    85     case p::ps =>
       
    86       for ((head1, tail1) <- p.parse(ts); 
       
    87            (head2, tail2) <- new ListParser(ps).parse(tail1)) yield (Branch(List(head1, head2)), tail2)
       
    88   }
       
    89 }
       
    90 
       
    91 case class TokParser(tok: Token) extends Parser {
       
    92   def parse(ts: List[Token]) = ts match {
       
    93     case t::ts if (t == tok) => Set((Leaf(t), ts)) 
       
    94     case _ => Set ()
       
    95   }
       
    96 }
       
    97 
       
    98 implicit def token2tparser(t: Token) = TokParser(t)
       
    99 
       
   100 case object IdParser extends Parser {
       
   101   def parse(ts: List[Token]) = ts match {
       
   102     case T_ID(s)::ts => Set((Leaf(T_ID(s)), ts)) 
       
   103     case _ => Set ()
       
   104   }
       
   105 }
       
   106 
       
   107 case object NumParser extends Parser {
       
   108   def parse(ts: List[Token]) = ts match {
       
   109     case T_NUM(s)::ts => Set((Leaf(T_NUM(s)), ts)) 
       
   110     case _ => Set ()
       
   111   }
       
   112 }
       
   113 
       
   114 lazy val E: Parser = (T ~ T_OP("+") ~ E) || T  // start symbol
       
   115 lazy val T: Parser = (F ~ T_OP("*") ~ T) || F
       
   116 lazy val F: Parser = (T_LPAREN ~ E ~ T_RPAREN) || NumParser
       
   117    
       
   118 println(E.parse_all(tokenizer(lexing_rules, "1 + 2 + 3")))
       
   119 
       
   120 def eval(t: ParseTree) : Int = t match {
       
   121   case Leaf(T_NUM(n)) => n.toInt
       
   122   case Branch(List(t1, Leaf(T_OP("+")), t2)) => eval(t1) + eval(t2)
       
   123   case Branch(List(t1, Leaf(T_OP("*")), t2)) => eval(t1) * eval(t2)
       
   124   case Branch(List(Leaf(T_LPAREN), t, Leaf(T_RPAREN))) => eval(t) 
       
   125 }
       
   126 
       
   127 (E.parse_all(tokenizer(lexing_rules, "1 + 2 + 3"))).map(eval(_))
       
   128 (E.parse_all(tokenizer(lexing_rules, "1 + 2 * 3"))).map(eval(_))
       
   129 (E.parse_all(tokenizer(lexing_rules, "(1 + 2) * 3"))).map(eval(_))
       
   130 
       
   131 lazy val EXPR: Parser = 
       
   132   new ListParser(List(T_IF, EXPR, T_THEN, EXPR)) || 
       
   133   new ListParser(List(T_IF, EXPR, T_THEN, EXPR, T_ELSE, EXPR)) || 
       
   134   IdParser
       
   135  
       
   136 println(EXPR.parse_all(tokenizer(lexing_rules, "if a then b else c")))
       
   137 println(EXPR.parse_all(tokenizer(lexing_rules, "if a then if x then y else c")))
       
   138 
       
   139 
       
   140 
       
   141