//:load matcher.scala
//:load parser3.scala
abstract class Token
case object T_ONE extends Token
val lexing_rules : List[Rule[Token]] =
List(("1", (s: List[Char]) => T_ONE))
val T = Tokenizer(lexing_rules)
case class TokParser(tok: Token) extends Parser[List[Token], Token] {
def parse(ts: List[Token]) = ts match {
case t::ts if (t == tok) => Set((t, ts))
case _ => Set ()
}
}
implicit def token2tokparser(t: Token) = TokParser(t)
case object EmpParser extends Parser[List[Token], String] {
def parse(ts: List[Token]) = Set(("", ts))
}
lazy val Su: Parser[List[Token], String] =
(T_ONE ~ Su) ==> { case (x, y) => "1" + y} || EmpParser
def time_needed[T](i: Int, code: => T) = {
val start = System.nanoTime()
for (j <- 1 to i) code
val end = System.nanoTime()
(end - start)/(i * 1.0e9)
}
def test(i: Int) = {
val result = Su.parse_all(T.fromString("1" * i))
//print(result.size + " ")
}
for (i <- 1 to 1000 by 50) {
print(i + " ")
print("%.5f".format(time_needed(1, test(i))))
print("\n")
}