Term_grammar.scala
changeset 64 2d625418c011
equal deleted inserted replaced
63:dff4b062a8a9 64:2d625418c011
       
     1 //:load matcher.scala
       
     2 //:load parser3.scala
       
     3 
       
     4 // some regular expressions
       
     5 val LETTER = RANGE("abcdefghijklmnopqrstuvwxyz")
       
     6 val ID = PLUS(LETTER)
       
     7 
       
     8 val DIGIT = RANGE("0123456789")
       
     9 val NONZERODIGIT = RANGE("123456789")
       
    10 val NUMBER = ALT(SEQ(NONZERODIGIT, STAR(DIGIT)), "0")
       
    11 
       
    12 val LPAREN = CHAR('(')
       
    13 val RPAREN = CHAR(')')
       
    14 
       
    15 val WHITESPACE = PLUS(RANGE(" \n"))
       
    16 val OPS = RANGE("+-*")
       
    17 
       
    18 // for classifying the strings that have been lexed
       
    19 abstract class Token
       
    20 
       
    21 case object T_WHITESPACE extends Token
       
    22 case class T_NUM(s: String) extends Token
       
    23 case class T_OP(s: String) extends Token
       
    24 case object T_LPAREN extends Token
       
    25 case object T_RPAREN extends Token
       
    26 
       
    27 
       
    28 // lexing rules for arithmetic expressions
       
    29 val lexing_rules: List[Rule[Token]]= 
       
    30   List((NUMBER, (s) => T_NUM(s.mkString)),
       
    31        (WHITESPACE, (s) => T_WHITESPACE),
       
    32        (LPAREN, (s) => T_LPAREN),
       
    33        (RPAREN, (s) => T_RPAREN),
       
    34        (OPS, (s) => T_OP(s.mkString)))
       
    35 
       
    36 val Tk = Tokenizer(lexing_rules, List(T_WHITESPACE))
       
    37 
       
    38 
       
    39 case class TokParser(tok: Token) extends Parser[List[Token], Token] {
       
    40   def parse(ts: List[Token]) = ts match {
       
    41     case t::ts if (t == tok) => Set((t, ts)) 
       
    42     case _ => Set ()
       
    43   }
       
    44 }
       
    45 implicit def token2tparser(t: Token) = TokParser(t)
       
    46 
       
    47 case object NumParser extends Parser[List[Token], Int] {
       
    48   def parse(ts: List[Token]) = ts match {
       
    49     case T_NUM(s)::ts => Set((s.toInt, ts)) 
       
    50     case _ => Set ()
       
    51   }
       
    52 }
       
    53 
       
    54 lazy val E: Parser[List[Token], Int] = (T ~ T_OP("+") ~ E) ==> { case ((x, y), z) => x + z } || T  
       
    55 lazy val T: Parser[List[Token], Int] = (F ~ T_OP("*") ~ T) ==> { case ((x, y), z) => x * z } || F
       
    56 lazy val F: Parser[List[Token], Int] = (T_LPAREN ~> E <~ T_RPAREN) || NumParser
       
    57    
       
    58 println(E.parse_all(Tk.fromString("1 + 2 + 3")))
       
    59 println(E.parse_all(Tk.fromString("1 + 2 * 3")))
       
    60 println(E.parse_all(Tk.fromString("(1 + 2) * 3")))
       
    61 println(E.parse_all(Tk.fromString("(14 + 2) * (3 + 2)")))
       
    62