scala/S_grammar-token.scala
changeset 93 4794759139ea
parent 92 e85600529ca5
child 94 9ea667baf097
--- a/scala/S_grammar-token.scala	Sat Jun 15 09:11:11 2013 -0400
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,47 +0,0 @@
-//:load matcher.scala
-//:load parser3.scala
-
-abstract class Token
-case object T_ONE extends Token
-
-val lexing_rules : List[Rule[Token]] =
-  List(("1", (s: List[Char]) => T_ONE))
-
-val T = Tokenizer(lexing_rules)
-
-case class TokParser(tok: Token) extends Parser[List[Token], Token] {
-  def parse(ts: List[Token]) = ts match {
-    case t::ts if (t == tok) => Set((t, ts)) 
-    case _ => Set ()
-  }
-}
-implicit def token2tokparser(t: Token) = TokParser(t)
-
-case object EmpParser extends Parser[List[Token], String] {
-  def parse(ts: List[Token]) = Set(("", ts))
-}
-
-
-lazy val Su: Parser[List[Token], String] = 
-  (T_ONE ~ Su) ==> { case (x, y) => "1" + y} || EmpParser  
-
-
-def time_needed[T](i: Int, code: => T) = {
-  val start = System.nanoTime()
-  for (j <- 1 to i) code
-  val end = System.nanoTime()
-  (end - start)/(i * 1.0e9)
-}
-
-def test(i: Int) = {
-  val result = Su.parse_all(T.fromString("1" * i))
-  //print(result.size + " ")
-}
-
-
-for (i <- 1 to 1000 by 50) {
-  print(i + " ")
-  print("%.5f".format(time_needed(1, test(i))))
-  print("\n")
-}
-