equal
deleted
inserted
replaced
|
1 //:load matcher.scala |
|
2 //:load parser3.scala |
|
3 |
|
4 abstract class Token |
|
5 case object T_ONE extends Token |
|
6 |
|
7 val lexing_rules : List[Rule[Token]] = |
|
8 List(("1", (s: List[Char]) => T_ONE)) |
|
9 |
|
10 val T = Tokenizer(lexing_rules) |
|
11 |
|
12 case class TokParser(tok: Token) extends Parser[List[Token], Token] { |
|
13 def parse(ts: List[Token]) = ts match { |
|
14 case t::ts if (t == tok) => Set((t, ts)) |
|
15 case _ => Set () |
|
16 } |
|
17 } |
|
18 implicit def token2tokparser(t: Token) = TokParser(t) |
|
19 |
|
20 case object EmpParser extends Parser[List[Token], String] { |
|
21 def parse(ts: List[Token]) = Set(("", ts)) |
|
22 } |
|
23 |
|
24 |
|
25 lazy val Su: Parser[List[Token], String] = |
|
26 (T_ONE ~ Su) ==> { case (x, y) => "1" + y} || EmpParser |
|
27 |
|
28 |
|
29 def time_needed[T](i: Int, code: => T) = { |
|
30 val start = System.nanoTime() |
|
31 for (j <- 1 to i) code |
|
32 val end = System.nanoTime() |
|
33 (end - start)/(i * 1.0e9) |
|
34 } |
|
35 |
|
36 def test(i: Int) = { |
|
37 val result = Su.parse_all(T.fromString("1" * i)) |
|
38 //print(result.size + " ") |
|
39 } |
|
40 |
|
41 |
|
42 for (i <- 1 to 1000 by 50) { |
|
43 print(i + " ") |
|
44 print("%.5f".format(time_needed(1, test(i)))) |
|
45 print("\n") |
|
46 } |
|
47 |