# HG changeset patch # User Christian Urban # Date 1525512660 -3600 # Node ID 352d15782d350bf8f5ff38e2a59e8ce7c285fe0b # Parent c6ba1d17aaf3220183c1767e2cbba218a192d475 updated diff -r c6ba1d17aaf3 -r 352d15782d35 progs/token.scala --- a/progs/token.scala Thu Apr 05 02:33:30 2018 +0100 +++ b/progs/token.scala Sat May 05 10:31:00 2018 +0100 @@ -5,20 +5,32 @@ case object ZERO extends Rexp case object ONE extends Rexp case class CHAR(c: Char) extends Rexp -case class ALT(r1: Rexp, r2: Rexp) extends Rexp +case class ALTS(rs: List[Rexp]) extends Rexp case class SEQ(r1: Rexp, r2: Rexp) extends Rexp case class STAR(r: Rexp) extends Rexp -case class RECD[A](f: String => A, r: Rexp) extends Rexp +// ALT is now an abbreviation +def ALT(r1: Rexp, r2: Rexp) = ALTS(List(r1, r2)) + +// proj is now used instead for Left and Right abstract class Val case object Empty extends Val case class Chr(c: Char) extends Val case class Sequ(v1: Val, v2: Val) extends Val -case class Left(v: Val) extends Val -case class Right(v: Val) extends Val +case class Proj(n: Int, v: Val) extends Val case class Stars(vs: List[Val]) extends Val -case class Rec[A](f: String => A, v: Val) extends Val - +case class Rec(s: String, v: Val) extends Val + +// for manipulating projections +def IncProj(m: Int, v: Val) = v match { + case Proj(n, v) => Proj(n + m, v) +} + +def DecProj(m: Int, v: Val) = v match { + case Proj(n, v) => Proj(n - m, v) +} + + // some convenience for typing in regular expressions def charlist2rexp(s : List[Char]): Rexp = s match { case Nil => ONE @@ -39,19 +51,40 @@ def % = STAR(s) def ~ (r: Rexp) = SEQ(s, r) def ~ (r: String) = SEQ(s, r) - def $ (r: Rexp) = RECD(s, r) } +// string of a regular expression - for testing purposes +def string(r: Rexp) : String = r match { + case ZERO => "0" + case ONE => "1" + case CHAR(c) => c.toString + case ALTS(rs) => rs.map(string).mkString("[", "|", "]") + case SEQ(CHAR(c), CHAR(d)) => s"${c}${d}" + case SEQ(ONE, CHAR(c)) => s"1${c}" + case SEQ(r1, r2) => s"(${string(r1)} ~ ${string(r2)})" + case STAR(r) => s"(${string(r)})*" +} + +// size of a regular expression - for testing purposes +def size(r: Rexp) : Int = r match { + case ZERO => 1 + case ONE => 1 + case CHAR(_) => 1 + case ALTS(rs) => 1 + rs.map(size).sum + case SEQ(r1, r2) => 1 + size(r1) + size(r2) + case STAR(r) => 1 + size(r) +} + + // nullable function: tests whether the regular // expression can recognise the empty string def nullable (r: Rexp) : Boolean = r match { case ZERO => false case ONE => true case CHAR(_) => false - case ALT(r1, r2) => nullable(r1) || nullable(r2) + case ALTS(rs) => rs.exists(nullable) case SEQ(r1, r2) => nullable(r1) && nullable(r2) case STAR(_) => true - case RECD(_, r1) => nullable(r1) } // derivative of a regular expression w.r.t. a character @@ -59,12 +92,11 @@ case ZERO => ZERO case ONE => ZERO case CHAR(d) => if (c == d) ONE else ZERO - case ALT(r1, r2) => ALT(der(c, r1), der(c, r2)) + case ALTS(rs) => ALTS(rs.map(der(c, _))) case SEQ(r1, r2) => if (nullable(r1)) ALT(SEQ(der(c, r1), r2), der(c, r2)) else SEQ(der(c, r1), r2) case STAR(r) => SEQ(der(c, r), STAR(r)) - case RECD(_, r1) => der(c, r1) } // derivative w.r.t. a string (iterates der) @@ -77,66 +109,74 @@ def flatten(v: Val) : String = v match { case Empty => "" case Chr(c) => c.toString - case Left(v) => flatten(v) - case Right(v) => flatten(v) + case Proj(_, v) => flatten(v) case Sequ(v1, v2) => flatten(v1) ++ flatten(v2) case Stars(vs) => vs.map(flatten).mkString - case Rec(_, v) => flatten(v) -} - -// extracts an environment from a value -def env[A](v: Val) : List[A] = v match { - case Empty => Nil - case Chr(c) => Nil - case Left(v) => env(v) - case Right(v) => env(v) - case Sequ(v1, v2) => env(v1) ::: env(v2) - case Stars(vs) => vs.flatMap(env) - case Rec(f, v) => ((f:String => A)(flatten(v)))::env(v) -} - -// injection part -def mkeps(r: Rexp) : Val = r match { - case ONE => Empty - case ALT(r1, r2) => - if (nullable(r1)) Left(mkeps(r1)) else Right(mkeps(r2)) - case SEQ(r1, r2) => Sequ(mkeps(r1), mkeps(r2)) - case STAR(r) => Stars(Nil) - case RECD(x, r) => Rec(x, mkeps(r)) } +// mkeps +def mkeps(r: Rexp) : Val = r match { + case ONE => Empty + case ALTS(r1::rs) => + if (nullable(r1)) Proj(0, mkeps(r1)) + else IncProj(1, mkeps(ALTS(rs))) + case SEQ(r1, r2) => Sequ(mkeps(r1), mkeps(r2)) + case STAR(r) => Stars(Nil) +} + +// injection def inj(r: Rexp, c: Char, v: Val) : Val = (r, v) match { case (STAR(r), Sequ(v1, Stars(vs))) => Stars(inj(r, c, v1)::vs) case (SEQ(r1, r2), Sequ(v1, v2)) => Sequ(inj(r1, c, v1), v2) - case (SEQ(r1, r2), Left(Sequ(v1, v2))) => Sequ(inj(r1, c, v1), v2) - case (SEQ(r1, r2), Right(v2)) => Sequ(mkeps(r1), inj(r2, c, v2)) - case (ALT(r1, r2), Left(v1)) => Left(inj(r1, c, v1)) - case (ALT(r1, r2), Right(v2)) => Right(inj(r2, c, v2)) + case (SEQ(r1, r2), Proj(0, Sequ(v1, v2))) => Sequ(inj(r1, c, v1), v2) + case (SEQ(r1, r2), Proj(1, v2)) => Sequ(mkeps(r1), inj(r2, c, v2)) + case (ALTS(rs), Proj(n, v1)) => Proj(n, inj(rs(n), c, v1)) case (CHAR(d), Empty) => Chr(c) - case (RECD(x, r1), _) => Rec(x, inj(r1, c, v)) } // main lexing function (produces a value) +// - does not simplify def lex(r: Rexp, s: List[Char]) : Val = s match { - case Nil => if (nullable(r)) mkeps(r) - else throw new Exception("Not matched") + case Nil => { + //println(s"Size of the last regex: ${size(r)}") + if (nullable(r)) mkeps(r) else throw new Exception("Not matched") + } case c::cs => inj(r, c, lex(der(c, r), cs)) } def lexing(r: Rexp, s: String) : Val = lex(r, s.toList) +lexing("a" | ZERO, "a") +lexing(ZERO | "a", "a") lexing(("ab" | "a") ~ ("b" | ONE), "ab") + +// removing duplicate regular expressions +val unit = (ZERO, F_ERROR(_)) + +def dups2(xs: List[(Rexp, Val => Val)], + acc: List[(Rexp, Val => Val)] = Nil) : List[(Rexp, Val => Val)] = xs match { + case Nil => acc + case (x, y)::xs => + if (acc.map(_._1).contains(x)) dups2(xs, acc :+ unit) + else dups2(xs, acc :+ (x, y)) +} + +def dups(xs: List[(Rexp, Val => Val)]) : List[(Rexp, Val => Val)] = { + val out = dups2(xs) + //if (out != xs) { + // println() + // println(s"Input ${string(ALTS(xs.map(_._1)))}") + // println(s"Ouput ${string(ALTS(out.map(_._1)))}") + //} + out +} + + // some "rectification" functions for simplification def F_ID(v: Val): Val = v -def F_RIGHT(f: Val => Val) = (v:Val) => Right(f(v)) -def F_LEFT(f: Val => Val) = (v:Val) => Left(f(v)) -def F_ALT(f1: Val => Val, f2: Val => Val) = (v:Val) => v match { - case Right(v) => Right(f2(v)) - case Left(v) => Left(f1(v)) -} def F_SEQ(f1: Val => Val, f2: Val => Val) = (v:Val) => v match { case Sequ(v1, v2) => Sequ(f1(v1), f2(v2)) } @@ -144,22 +184,54 @@ (v:Val) => Sequ(f1(Empty), f2(v)) def F_SEQ_Empty2(f1: Val => Val, f2: Val => Val) = (v:Val) => Sequ(f1(v), f2(Empty)) -def F_RECD(f: Val => Val) = (v:Val) => v match { - case Rec(x, v) => Rec(x, f(v)) +def F_ERROR(v: Val): Val = throw new Exception("error") +def F_PRINT(v: Val): Val = { + println(s"value is ${v}") + throw new Exception("caught error") } -def F_ERROR(v: Val): Val = throw new Exception("error") + +def flats(rs: List[Rexp], seen: Set[Rexp]) : (List[Rexp], Val => Val) = { + //println(s"I am flats: ${string(ALTS(rs))}") + //println(s"The size of seen is ${seen.size}, ${seen.map(string)}") + rs match { + case Nil => (Nil, F_ERROR) + case r::rs1 if seen.contains(simp(r)._1) => { + //println(s" I remove ${string(r)}") + val (rs2, f) = flats(rs1, seen) + (rs2, (v:Val) => IncProj(1, f(v))) + } + case ZERO::rs1 => { + val (rs2, f) = flats(rs1, seen) + (rs2, (v:Val) => IncProj(1, f(v))) + } + case ALTS(rs0)::rs1 => { + val (rss, f1) = flats(rs0, seen) + val (rs2, f2) = flats(rs1, rss.toSet ++ seen) + (rss:::rs2, (v:Val) => v match { + case Proj(n, vn) => + if (n < rss.length) Proj(0, f1(Proj(n, vn))) + else IncProj(1, f2(Proj(n - rss.length, vn))) + }) + } + case r1::rs2 => { + val (r1s, f1) = simp(r1) + val (rs3, f2) = flats(rs2, seen + r1s) + (r1s::rs3, (v:Val) => v match { + case Proj(0, vn) => Proj(0, f1(vn)) + case Proj(n, vn) => IncProj(1, f2(Proj(n - 1, vn))) + }) + } +}} // simplification of regular expressions returning also an // rectification function; no simplification under STAR def simp(r: Rexp): (Rexp, Val => Val) = r match { - case ALT(r1, r2) => { - val (r1s, f1s) = simp(r1) - val (r2s, f2s) = simp(r2) - (r1s, r2s) match { - case (ZERO, _) => (r2s, F_RIGHT(f2s)) - case (_, ZERO) => (r1s, F_LEFT(f1s)) - case _ => if (r1s == r2s) (r1s, F_LEFT(f1s)) - else (ALT (r1s, r2s), F_ALT(f1s, f2s)) + case ALTS(rs) => { + val (rs_s, fs_s) = flats(rs, Set()) + rs_s match { + case Nil => (ZERO, F_ERROR) + case r::Nil => (r, (v:Val) => fs_s(Proj(0, v))) + case rs_sd => (ALTS(rs_sd), fs_s) } } case SEQ(r1, r2) => { @@ -170,134 +242,157 @@ case (_, ZERO) => (ZERO, F_ERROR) case (ONE, _) => (r2s, F_SEQ_Empty1(f1s, f2s)) case (_, ONE) => (r1s, F_SEQ_Empty2(f1s, f2s)) + case (ALTS(rs), r2s) => (ALTS(rs.map(SEQ(_, r2s))), + (v:Val) => v match { + case Proj(n, Sequ(v1, v2)) => Sequ(f1s(Proj(n, v1)), f2s(v2)) + } + ) case _ => (SEQ(r1s,r2s), F_SEQ(f1s, f2s)) } } - case RECD(x, r1) => { - val (r1s, f1s) = simp(r1) - (RECD(x, r1s), F_RECD(f1s)) - } case r => (r, F_ID) } def lex_simp(r: Rexp, s: List[Char]) : Val = s match { - case Nil => if (nullable(r)) mkeps(r) else throw new Exception("Not matched") + case Nil => { + //println(s"Size of the last regex: ${size(r)}") + //println(s"${string(r)}") + if (nullable(r)) mkeps(r) else throw new Exception("Not matched") + } case c::cs => { - val (r_simp, f_simp) = simp(der(c, r)) - inj(r, c, f_simp(lex_simp(r_simp, cs))) + val rd = der(c, r) + val (r_simp, f_simp) = simp(rd) + //println(s"BEFORE ${string(rd)}") + //println(s"AFTER ${string(r_simp)}") + val rec = lex_simp(r_simp, cs) + inj(r, c, f_simp(rec)) } } def lexing_simp(r: Rexp, s: String) : Val = lex_simp(r, s.toList) -lexing_simp(("a" | "ab") ~ ("b" | ""), "ab") -// Lexing Rules for a Small While Language +lexing_simp("ab" | "aa", "ab") +lexing_simp("ab" | "aa", "aa") -def PLUS(r: Rexp) = r ~ r.% +lexing(STAR("a" | "aa"), "aaaaa") +lexing_simp(STAR("a" | "aa"), "aaaaa") -val SYM = "a" | "b" | "c" | "d" | "e" | "f" | "g" | "h" | "i" | "j" | "k" | "l" | "m" | "n" | "o" | "p" | "q" | "r" | "s" | "t" | "u" | "v" | "w" | "x" | "y" | "z" -val DIGIT = "0" | "1" | "2" | "3" | "4" | "5" | "6" | "7" | "8" | "9" -val ID = SYM ~ (SYM | DIGIT).% -val NUM = PLUS(DIGIT) -val KEYWORD : Rexp = "skip" | "while" | "do" | "if" | "then" | "else" | "read" | "write" | "true" | "false" -val SEMI: Rexp = ";" -val OP: Rexp = ":=" | "==" | "-" | "+" | "*" | "!=" | "<" | ">" | "<=" | ">=" | "%" | "/" -val WHITESPACE = PLUS(" " | "\n" | "\t") -val RPAREN: Rexp = ")" -val LPAREN: Rexp = "(" -val BEGIN: Rexp = "{" -val END: Rexp = "}" -val STRING: Rexp = "\"" ~ SYM.% ~ "\"" +lexing(STAR("a" | "aa"), "aaaaaaaaaaa") +lexing_simp(STAR("a" | "aa"), "aaaaaaaaaaa") + +lexing_simp(STAR("a" | "aa"), "a" * 2) +lexing_simp(STAR("a" | "aa"), "a" * 3) +lexing_simp(STAR("a" | "aa"), "a" * 4) -val WHILE_REGS = (((s) => Token $ KEYWORD) | - ("i" $ ID) | - ("o" $ OP) | - ("n" $ NUM) | - ("s" $ SEMI) | - ("str" $ STRING) | - ("p" $ (LPAREN | RPAREN)) | - ("b" $ (BEGIN | END)) | - ("w" $ WHITESPACE)).% +lexing_simp(STAR("a" | "aa"), "a" * 20) +lexing_simp(STAR("a" | "aa"), "a" * 2000) + +lexing(ALTS(List("aa", "aa", "aa", "ab", "ab")), "ab") +lexing_simp(ALTS(List("aa", "aa", "aa", "ab", "ab")), "ab") + +lexing(ALTS(List(("aa" | "ab" | "aa"), "aa", "ab", "ab")), "ab") +lexing_simp(ALTS(List(("aa" | "ab" | "aa"), "aa", "ab", "ab")), "ab") + +lexing(ALTS(List(ZERO, ZERO, ONE, "aa", ZERO, "aa", "aa")), "aa") +lexing_simp(ALTS(List(ZERO, ZERO, ONE, "aa", ZERO, "aa", "aa")), "aa") + +lexing_simp(ONE | ZERO, "") +lexing_simp(ZERO | ONE, "") + +lexing("a" | ZERO, "a") +lexing_simp("a" | ZERO, "a") +lexing(ZERO | "a", "a") +lexing_simp(ZERO | "a", "a") + +lexing(ALTS(List(ZERO, ZERO, ONE, "a", ZERO, "a")), "a") +lexing_simp(ALTS(List(ZERO, ZERO, ONE, "a", ZERO, "a")), "a") +lexing(ALTS(List("a")), "a") +lexing_simp(ALTS(List("a")), "a") + +lexing_simp(("a" | ZERO) | ZERO, "a") +lexing_simp("a" | (ZERO | ZERO), "a") +lexing_simp(ZERO | ("a" | ZERO), "a") + + +lexing_simp("abc", "abc") + +lexing_simp("abc" | ONE, "abc") + +lexing(("a" | "ab") ~ ("b" | ""), "ab") +lexing_simp(("a" | "ab") ~ ("b" | ""), "ab") +lexing_simp(("ba" | "c" | "ab"), "ab") + +lexing(ALTS(List(ALTS(Nil), "c", "ab")), "ab") +lexing_simp(ALTS(List(ALTS(Nil), "c", "ab")), "ab") + +lexing(ALTS(List(ALTS("ab"::Nil), "c", "ab")), "ab") +lexing_simp(ALTS(List(ALTS("ab"::Nil), "c", "ab")), "ab") + +lexing(ALTS(List(ALTS(List("a","ab")), "c", "ab")), "ab") +lexing_simp(ALTS(List(ALTS(List("a","ab")), "c", "ab")), "ab") + +lexing(ALTS(List(ALTS(List("ab","a")), "c", "ab")), "ab") +lexing_simp(ALTS(List(ALTS(List("ab","a")), "c", "ab")), "ab") + +lexing(ALTS(List(ALTS(List("ab","a","a")), "c", "ab")), "ab") +lexing_simp(ALTS(List(ALTS(List("ab","a","a")), "c", "ab")), "ab") + +lexing(ALTS(List(ALTS(List("a","ab","a")), "c", "ab")), "ab") +lexing_simp(ALTS(List(ALTS(List("a","ab","a")), "c", "ab")), "ab") + +lexing(ALTS(List(ALTS(List("b","a","ab")), "c", "ab")), "ab") +lexing_simp(ALTS(List(ALTS(List("b","a","ab")), "c", "ab")), "ab") + + +lexing_simp(ALTS(List("ba", "c", "ab")), "ab") + +lexing(ALTS(List("a", "ab", "a")), "ab") +lexing_simp(ALTS(List("a", "ab", "a")), "ab") + +lexing(STAR("a" | "aa"), "aa") +lexing_simp(STAR("a" | "aa"), "aa") -// extracts an environment from a value -def env(v: Val) : List[Token] = v match { - case Empty => Nil - case Chr(c) => Nil - case Left(v) => env(v) - case Right(v) => env(v) - case Sequ(v1, v2) => env(v1) ::: env(v2) - case Stars(vs) => vs.flatMap(env) - case Rec(f, v) => (f(flatten(v)))::env(v) +def enum(n: Int, s: String) : Set[Rexp] = n match { + case 0 => Set(ZERO, ONE) ++ s.toSet.map(CHAR) + case n => { + val rs = enum(n - 1, s) + rs ++ + (for (r1 <- rs; r2 <- rs) yield ALT(r1, r2)) ++ + (for (r1 <- rs; r2 <- rs) yield SEQ(r1, r2)) ++ + (for (r1 <- rs) yield STAR(r1)) + } } -// Testing -//============ - -def time[T](code: => T) = { - val start = System.nanoTime() - val result = code - val end = System.nanoTime() - println((end - start)/1.0e9) - result +def strs(n: Int, cs: String) : Set[String] = { + if (n == 0) Set("") + else { + val ss = strs(n - 1, cs) + ss ++ + (for (s <- ss; c <- cs.toList) yield c + s) + } } -val r1 = ("a" | "ab") ~ ("bcd" | "c") -println(lexing(r1, "abcd")) +import scala.util.Try -val r2 = ("" | "a") ~ ("ab" | "b") -println(lexing(r2, "ab")) +def tests(n: Int, m: Int, s: String) = { + val rs = enum(n, s) + val ss = strs(m, s) + println(s"cases generated: ${rs.size} regexes and ${ss.size} strings") + for (r1 <- rs.par; s1 <- ss.par) yield { + val res1 = Try(Some(lexing(r1, s1))).getOrElse(None) + val res2 = Try(Some(lexing_simp(r1, s1))).getOrElse(None) + if (res1 != res2) println(s"Disagree on ${r1} and ${s1}") + if (res1 != res2) Some((r1, s1)) else None + } +} + +println("Testing") +println(tests(2,7,"abc")) -// Two Simple While Tests -//======================== -println("prog0 test") -val prog0 = """read n""" -println(env(lexing_simp(WHILE_REGS, prog0))) - -println("prog1 test") - -val prog1 = """read n; write (n)""" -println(env(lexing_simp(WHILE_REGS, prog1))) - - -// Bigger Test -//============= - -val prog2 = """ -write "fib"; -read n; -minus1 := 0; -minus2 := 1; -while n > 0 do { - temp := minus2; - minus2 := minus1 + minus2; - minus1 := temp; - n := n - 1 -}; -write "result"; -write minus2 -""" - -println("Tokens") -println(env(lexing_simp(WHILE_REGS, prog2))) -println(env(lexing_simp(WHILE_REGS, prog2)).filterNot{_._1 == "w"}.mkString("\n")) - -// some more timing tests with -// i copies of the program - -for (i <- 1 to 21 by 10) { - print(i.toString + ": ") - time(lexing_simp(WHILE_REGS, prog2 * i)) -} - -abstract class Token -case class KeyToken(s: String) extends Token -case class IdToken(s: String) extends Token - -list[(STRING, STRING)]=> List(TOKEN)