# HG changeset patch # User Christian Urban # Date 1569969625 -3600 # Node ID 064afa8fc1d91c8b380fabac281be70fc467477e # Parent 1062a9512e7999091a93abc128716ec43e65c3f5 updated diff -r 1062a9512e79 -r 064afa8fc1d9 progs/detokenise.scala --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/progs/detokenise.scala Tue Oct 01 23:40:25 2019 +0100 @@ -0,0 +1,31 @@ +// A simple lexer inspired by work of Sulzmann & Lu +//================================================== + + +object Delexer { + +import java.io._ + +abstract class Token extends Serializable +case object T_SEMI extends Token +case object T_LPAREN extends Token +case object T_RPAREN extends Token +case class T_ID(s: String) extends Token +case class T_OP(s: String) extends Token +case class T_NUM(n: Int) extends Token +case class T_KWD(s: String) extends Token +case class T_STR(s: String) extends Token + +def deserialise[T](fname: String) : T = { + val in = new ObjectInputStream(new FileInputStream(fname)) + val data = in.readObject.asInstanceOf[T] + in.close + data +} + +def main(args: Array[String]) = { + println("TEST\n" ++ deserialise[List[Token]]("/tmp/nflx").mkString) +} + + +} \ No newline at end of file diff -r 1062a9512e79 -r 064afa8fc1d9 progs/lexer.scala --- a/progs/lexer.scala Tue Oct 01 15:00:09 2019 +0100 +++ b/progs/lexer.scala Tue Oct 01 23:40:25 2019 +0100 @@ -1,8 +1,11 @@ -// A Simple Lexer according to Sulzmann & Lu +// A simple lexer inspired by work of Sulzmann & Lu +//================================================== + import scala.language.implicitConversions import scala.language.reflectiveCalls +// regular expressions including records abstract class Rexp case object ZERO extends Rexp case object ONE extends Rexp @@ -11,7 +14,8 @@ case class SEQ(r1: Rexp, r2: Rexp) extends Rexp case class STAR(r: Rexp) extends Rexp case class RECD(x: String, r: Rexp) extends Rexp - + +// values abstract class Val case object Empty extends Val case class Chr(c: Char) extends Val @@ -45,12 +49,7 @@ def $ (r: Rexp) = RECD(s, r) } -// A test for more conveninet syntax -val re : Rexp = ("ab" | "a") ~ ("b" | ONE) - -// the nullable function: tests whether the regular -// expression can recognise the empty string -def nullable (r: Rexp) : Boolean = r match { +def nullable(r: Rexp) : Boolean = r match { case ZERO => false case ONE => true case CHAR(_) => false @@ -60,8 +59,7 @@ case RECD(_, r1) => nullable(r1) } -// the derivative of a regular expression w.r.t. a character -def der (c: Char, r: Rexp) : Rexp = r match { +def der(c: Char, r: Rexp) : Rexp = r match { case ZERO => ZERO case ONE => ZERO case CHAR(d) => if (c == d) ONE else ZERO @@ -73,11 +71,6 @@ case RECD(_, r1) => der(c, r1) } -// the derivative w.r.t. a string (iterates der) -def ders (s: List[Char], r: Rexp) : Rexp = s match { - case Nil => r - case c::s => ders(s, der(c, r)) -} // extracts a string from value def flatten(v: Val) : String = v match { @@ -90,8 +83,9 @@ case Rec(_, v) => flatten(v) } + // extracts an environment from a value; -// used for lexing a string +// used for tokenise a string def env(v: Val) : List[(String, String)] = v match { case Empty => Nil case Chr(c) => Nil @@ -102,10 +96,8 @@ case Rec(x, v) => (x, flatten(v))::env(v) } -// The Injection Part of the Lexer +// The Injection Part of the lexer -// calculates a value for how a nullable regex -// matches the empty string def mkeps(r: Rexp) : Val = r match { case ONE => Empty case ALT(r1, r2) => @@ -115,7 +107,6 @@ case RECD(x, r) => Rec(x, mkeps(r)) } -// injects back a character into a value def inj(r: Rexp, c: Char, v: Val) : Val = (r, v) match { case (STAR(r), Sequ(v1, Stars(vs))) => Stars(inj(r, c, v1)::vs) case (SEQ(r1, r2), Sequ(v1, v2)) => Sequ(inj(r1, c, v1), v2) @@ -127,19 +118,6 @@ case (RECD(x, r1), _) => Rec(x, inj(r1, c, v)) } -// the main lexing function (produces a value) -def lex(r: Rexp, s: List[Char]) : Val = s match { - case Nil => if (nullable(r)) mkeps(r) - else throw new Exception("Not matched") - case c::cs => inj(r, c, lex(der(c, r), cs)) -} - -def lexing(r: Rexp, s: String) : Val = lex(r, s.toList) - -// a simple test for extracting an environment -val re1 : Rexp = ("first" $ ("a" | "ab")) ~ ("second" $ ("b" | ONE)) -env(lexing(re1, "ab")) - // some "rectification" functions for simplification def F_ID(v: Val): Val = v def F_RIGHT(f: Val => Val) = (v:Val) => Right(f(v)) @@ -160,8 +138,6 @@ } def F_ERROR(v: Val): Val = throw new Exception("error") -// simplification of regular expressions returns now also -// an rectification function; no simplification under STAR def simp(r: Rexp): (Rexp, Val => Val) = r match { case ALT(r1, r2) => { val (r1s, f1s) = simp(r1) @@ -193,33 +169,39 @@ // lexing functions including simplification def lex_simp(r: Rexp, s: List[Char]) : Val = s match { - case Nil => if (nullable(r)) mkeps(r) else throw new Exception("Not matched") + case Nil => if (nullable(r)) mkeps(r) else + { throw new Exception("lexing error") } case c::cs => { val (r_simp, f_simp) = simp(der(c, r)) inj(r, c, f_simp(lex_simp(r_simp, cs))) } } -def lexing_simp(r: Rexp, s: String) : Val = lex_simp(r, s.toList) +def lexing_simp(r: Rexp, s: String) = + env(lex_simp(r, s.toList)) -lexing_simp(("a" | "ab") ~ ("b" | ""), "ab") -// The Lexing Rules for a Small While Language +// The Lexing Rules for the Fun Language def PLUS(r: Rexp) = r ~ r.% -val SYM = "a" | "b" | "c" | "d" | "e" | "f" | "g" | "h" | "i" | "j" | "k" | "l" | "m" | "n" | "o" | "p" | "q" | "r" | "s" | "t" | "u" | "v" | "w" | "x" | "y" | "z" -val DIGIT = "0" | "1" | "2" | "3" | "4" | "5" | "6" | "7" | "8" | "9" +def Range(s : List[Char]) : Rexp = s match { + case Nil => ZERO + case c::Nil => CHAR(c) + case c::s => ALT(CHAR(c), Range(s)) +} +def RANGE(s: String) = Range(s.toList) + +val SYM = RANGE("ABCDEFGHIJKLMNOPQRSTUVXYZabcdefghijklmnopqrstuvwxyz_") +val DIGIT = RANGE("0123456789") val ID = SYM ~ (SYM | DIGIT).% val NUM = PLUS(DIGIT) -val KEYWORD : Rexp = "skip" | "while" | "do" | "if" | "then" | "else" | "read" | "write" | "true" | "false" +val KEYWORD : Rexp = "skip" | "while" | "do" | "if" | "then" | "else" | "read" | "write" val SEMI: Rexp = ";" -val OP: Rexp = ":=" | "==" | "-" | "+" | "*" | "!=" | "<" | ">" | "<=" | ">=" | "%" | "/" +val OP: Rexp = ":=" | "=" | "-" | "+" | "*" | "!=" | "<" | ">" val WHITESPACE = PLUS(" " | "\n" | "\t") -val RPAREN: Rexp = ")" -val LPAREN: Rexp = "(" -val BEGIN: Rexp = "{" -val END: Rexp = "}" +val RPAREN: Rexp = "{" +val LPAREN: Rexp = "}" val STRING: Rexp = "\"" ~ SYM.% ~ "\"" @@ -230,45 +212,37 @@ ("s" $ SEMI) | ("str" $ STRING) | ("p" $ (LPAREN | RPAREN)) | - ("b" $ (BEGIN | END)) | ("w" $ WHITESPACE)).% -// Testing -//============ - -def time[T](code: => T) = { - val start = System.nanoTime() - val result = code - val end = System.nanoTime() - println((end - start)/1.0e9) - result -} - -val r1 = ("a" | "ab") ~ ("bcd" | "c") -println(lexing(r1, "abcd")) - -val r2 = ("" | "a") ~ ("ab" | "b") -println(lexing(r2, "ab")) - // Two Simple While Tests //======================== -println("prog0 test") -val prog0 = """read if""" -println(env(lexing_simp(WHILE_REGS, prog0))) +println("test: read n") -println("prog1 test") +val prog0 = """read n""" +println(lexing_simp(WHILE_REGS, prog0)) -val prog1 = """read n; write (n)""" -println(env(lexing_simp(WHILE_REGS, prog1))) +println("test: read n; write n ") + +val prog1 = """read n; write n""" +println(lexing_simp(WHILE_REGS, prog1)) -// Bigger Test -//============= +// Bigger Tests +//============== + +// escapes strings and prints them out as "", "\n" and so on +def esc(raw: String): String = { + import scala.reflect.runtime.universe._ + Literal(Constant(raw)).toString +} + +def escape(tks: List[(String, String)]) = + tks.map{ case (s1, s2) => (s1, esc(s2))} val prog2 = """ -write "fib"; +write "Fib"; read n; minus1 := 0; minus2 := 1; @@ -278,36 +252,33 @@ minus1 := temp; n := n - 1 }; -write "result"; -write minus2 -""" - -println("Tokens") -println(env(lexing_simp(WHILE_REGS, prog2))) -println(env(lexing_simp(WHILE_REGS, prog2)).filterNot{_._1 == "w"}.mkString("\n")) - -// some more timing tests with -// i copies of the program - -for (i <- 0 to 20 by 10) { - print(i.toString + ": ") - time(lexing_simp(WHILE_REGS, prog2 * i)) -} - - -val fib = """ -write "Fib"; -read n; -minus1 := 0; -minus2 := 1; -while n > 0 do { -temp := minus2; -minus2 := minus1 + minus2; -minus1 := temp; -n := n - 1 -}; write "Result"; write minus2 """ -println(env(lexing_simp(WHILE_REGS, prog2)).filterNot{_._1 == "w"}) +println("lexing Fib") +println(escape(lexing_simp(WHILE_REGS, prog2)).mkString("\n")) + + + +val prog3 = """ +start := 1000; +x := start; +y := start; +z := start; +while 0 < x do { + while 0 < y do { + while 0 < z do { + z := z - 1 + }; + z := start; + y := y - 1 + }; + y := start; + x := x - 1 +} +""" + +println("lexing Loops") +println(escape(lexing_simp(WHILE_REGS, prog3)).mkString("\n")) + diff -r 1062a9512e79 -r 064afa8fc1d9 progs/tokenise.scala --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/progs/tokenise.scala Tue Oct 01 23:40:25 2019 +0100 @@ -0,0 +1,302 @@ +// A simple lexer inspired by work of Sulzmann & Lu +//================================================== + + +object Lexer { + +import scala.language.implicitConversions +import scala.language.reflectiveCalls + +// regular expressions including records +abstract class Rexp +case object ZERO extends Rexp +case object ONE extends Rexp +case class CHAR(c: Char) extends Rexp +case class ALT(r1: Rexp, r2: Rexp) extends Rexp +case class SEQ(r1: Rexp, r2: Rexp) extends Rexp +case class STAR(r: Rexp) extends Rexp +case class RECD(x: String, r: Rexp) extends Rexp + +// values +abstract class Val +case object Empty extends Val +case class Chr(c: Char) extends Val +case class Sequ(v1: Val, v2: Val) extends Val +case class Left(v: Val) extends Val +case class Right(v: Val) extends Val +case class Stars(vs: List[Val]) extends Val +case class Rec(x: String, v: Val) extends Val + +// some convenience for typing in regular expressions +def charlist2rexp(s : List[Char]): Rexp = s match { + case Nil => ONE + case c::Nil => CHAR(c) + case c::s => SEQ(CHAR(c), charlist2rexp(s)) +} +implicit def string2rexp(s : String) : Rexp = + charlist2rexp(s.toList) + +implicit def RexpOps(r: Rexp) = new { + def | (s: Rexp) = ALT(r, s) + def % = STAR(r) + def ~ (s: Rexp) = SEQ(r, s) +} + +implicit def stringOps(s: String) = new { + def | (r: Rexp) = ALT(s, r) + def | (r: String) = ALT(s, r) + def % = STAR(s) + def ~ (r: Rexp) = SEQ(s, r) + def ~ (r: String) = SEQ(s, r) + def $ (r: Rexp) = RECD(s, r) +} + +def nullable(r: Rexp) : Boolean = r match { + case ZERO => false + case ONE => true + case CHAR(_) => false + case ALT(r1, r2) => nullable(r1) || nullable(r2) + case SEQ(r1, r2) => nullable(r1) && nullable(r2) + case STAR(_) => true + case RECD(_, r1) => nullable(r1) +} + +def der(c: Char, r: Rexp) : Rexp = r match { + case ZERO => ZERO + case ONE => ZERO + case CHAR(d) => if (c == d) ONE else ZERO + case ALT(r1, r2) => ALT(der(c, r1), der(c, r2)) + case SEQ(r1, r2) => + if (nullable(r1)) ALT(SEQ(der(c, r1), r2), der(c, r2)) + else SEQ(der(c, r1), r2) + case STAR(r) => SEQ(der(c, r), STAR(r)) + case RECD(_, r1) => der(c, r1) +} + + +// extracts a string from value +def flatten(v: Val) : String = v match { + case Empty => "" + case Chr(c) => c.toString + case Left(v) => flatten(v) + case Right(v) => flatten(v) + case Sequ(v1, v2) => flatten(v1) + flatten(v2) + case Stars(vs) => vs.map(flatten).mkString + case Rec(_, v) => flatten(v) +} + + +// extracts an environment from a value; +// used for tokenise a string +def env(v: Val) : List[(String, String)] = v match { + case Empty => Nil + case Chr(c) => Nil + case Left(v) => env(v) + case Right(v) => env(v) + case Sequ(v1, v2) => env(v1) ::: env(v2) + case Stars(vs) => vs.flatMap(env) + case Rec(x, v) => (x, flatten(v))::env(v) +} + +// The Injection Part of the lexer + +def mkeps(r: Rexp) : Val = r match { + case ONE => Empty + case ALT(r1, r2) => + if (nullable(r1)) Left(mkeps(r1)) else Right(mkeps(r2)) + case SEQ(r1, r2) => Sequ(mkeps(r1), mkeps(r2)) + case STAR(r) => Stars(Nil) + case RECD(x, r) => Rec(x, mkeps(r)) +} + +def inj(r: Rexp, c: Char, v: Val) : Val = (r, v) match { + case (STAR(r), Sequ(v1, Stars(vs))) => Stars(inj(r, c, v1)::vs) + case (SEQ(r1, r2), Sequ(v1, v2)) => Sequ(inj(r1, c, v1), v2) + case (SEQ(r1, r2), Left(Sequ(v1, v2))) => Sequ(inj(r1, c, v1), v2) + case (SEQ(r1, r2), Right(v2)) => Sequ(mkeps(r1), inj(r2, c, v2)) + case (ALT(r1, r2), Left(v1)) => Left(inj(r1, c, v1)) + case (ALT(r1, r2), Right(v2)) => Right(inj(r2, c, v2)) + case (CHAR(d), Empty) => Chr(c) + case (RECD(x, r1), _) => Rec(x, inj(r1, c, v)) +} + +// some "rectification" functions for simplification +def F_ID(v: Val): Val = v +def F_RIGHT(f: Val => Val) = (v:Val) => Right(f(v)) +def F_LEFT(f: Val => Val) = (v:Val) => Left(f(v)) +def F_ALT(f1: Val => Val, f2: Val => Val) = (v:Val) => v match { + case Right(v) => Right(f2(v)) + case Left(v) => Left(f1(v)) +} +def F_SEQ(f1: Val => Val, f2: Val => Val) = (v:Val) => v match { + case Sequ(v1, v2) => Sequ(f1(v1), f2(v2)) +} +def F_SEQ_Empty1(f1: Val => Val, f2: Val => Val) = + (v:Val) => Sequ(f1(Empty), f2(v)) +def F_SEQ_Empty2(f1: Val => Val, f2: Val => Val) = + (v:Val) => Sequ(f1(v), f2(Empty)) +def F_RECD(f: Val => Val) = (v:Val) => v match { + case Rec(x, v) => Rec(x, f(v)) +} +def F_ERROR(v: Val): Val = throw new Exception("error") + +def simp(r: Rexp): (Rexp, Val => Val) = r match { + case ALT(r1, r2) => { + val (r1s, f1s) = simp(r1) + val (r2s, f2s) = simp(r2) + (r1s, r2s) match { + case (ZERO, _) => (r2s, F_RIGHT(f2s)) + case (_, ZERO) => (r1s, F_LEFT(f1s)) + case _ => if (r1s == r2s) (r1s, F_LEFT(f1s)) + else (ALT (r1s, r2s), F_ALT(f1s, f2s)) + } + } + case SEQ(r1, r2) => { + val (r1s, f1s) = simp(r1) + val (r2s, f2s) = simp(r2) + (r1s, r2s) match { + case (ZERO, _) => (ZERO, F_ERROR) + case (_, ZERO) => (ZERO, F_ERROR) + case (ONE, _) => (r2s, F_SEQ_Empty1(f1s, f2s)) + case (_, ONE) => (r1s, F_SEQ_Empty2(f1s, f2s)) + case _ => (SEQ(r1s,r2s), F_SEQ(f1s, f2s)) + } + } + case RECD(x, r1) => { + val (r1s, f1s) = simp(r1) + (RECD(x, r1s), F_RECD(f1s)) + } + case r => (r, F_ID) +} + +// lexing functions including simplification +def lex_simp(r: Rexp, s: List[Char]) : Val = s match { + case Nil => if (nullable(r)) mkeps(r) else + { throw new Exception("lexing error") } + case c::cs => { + val (r_simp, f_simp) = simp(der(c, r)) + inj(r, c, f_simp(lex_simp(r_simp, cs))) + } +} + +def lexing_simp(r: Rexp, s: String) = + env(lex_simp(r, s.toList)) + + +// The Lexing Rules for the Fun Language + +def PLUS(r: Rexp) = r ~ r.% + +def Range(s : List[Char]) : Rexp = s match { + case Nil => ZERO + case c::Nil => CHAR(c) + case c::s => ALT(CHAR(c), Range(s)) +} +def RANGE(s: String) = Range(s.toList) + +val SYM = RANGE("ABCDEFGHIJKLMNOPQRSTUVXYZabcdefghijklmnopqrstuvwxyz_") +val DIGIT = RANGE("0123456789") +val ID = SYM ~ (SYM | DIGIT).% +val NUM = PLUS(DIGIT) +val KEYWORD : Rexp = "skip" | "while" | "do" | "if" | "then" | "else" | "read" | "write" +val SEMI: Rexp = ";" +val OP: Rexp = ":=" | "=" | "-" | "+" | "*" | "!=" | "<" | ">" +val WHITESPACE = PLUS(" " | "\n" | "\t") +val RPAREN: Rexp = "{" +val LPAREN: Rexp = "}" +val STRING: Rexp = "\"" ~ SYM.% ~ "\"" + + +val WHILE_REGS = (("k" $ KEYWORD) | + ("i" $ ID) | + ("o" $ OP) | + ("n" $ NUM) | + ("s" $ SEMI) | + ("str" $ STRING) | + ("p" $ (LPAREN | RPAREN)) | + ("w" $ WHITESPACE)).% + + +// escapes strings and prints them out as "", "\n" and so on +def esc(raw: String): String = { + import scala.reflect.runtime.universe._ + Literal(Constant(raw)).toString +} + +def escape(tks: List[(String, String)]) = + tks.map{ case (s1, s2) => (s1, esc(s2))} + +val prog2 = """ +write "Fib"; +read n; +minus1 := 0; +minus2 := 1; +while n > 0 do { + temp := minus2; + minus2 := minus1 + minus2; + minus1 := temp; + n := n - 1 +}; +write "Result"; +write minus2 +""" + +val prog3 = """ +start := 1000; +x := start; +y := start; +z := start; +while 0 < x do { + while 0 < y do { + while 0 < z do { + z := z - 1 + }; + z := start; + y := y - 1 + }; + y := start; + x := x - 1 +} +""" + +// Generating tokens for the WHILE language + +import java.io._ + +abstract class Token extends Serializable +case object T_SEMI extends Token +case object T_LPAREN extends Token +case object T_RPAREN extends Token +case class T_ID(s: String) extends Token +case class T_OP(s: String) extends Token +case class T_NUM(n: Int) extends Token +case class T_KWD(s: String) extends Token +case class T_STR(s: String) extends Token + +val token : PartialFunction[(String, String), Token] = { + case ("s", _) => T_SEMI + case ("p", "{") => T_LPAREN + case ("p", "}") => T_RPAREN + case ("i", s) => T_ID(s) + case ("o", s) => T_OP(s) + case ("n", s) => T_NUM(s.toInt) + case ("k", s) => T_KWD(s) + case ("str", s) => T_STR(s) +} + +def tokenise(s: String) : List[Token] = + lexing_simp(WHILE_REGS, s).collect(token) + + +def serialise[T](fname: String, data: T) = { + val out = new ObjectOutputStream(new FileOutputStream(fname)) + out.writeObject(data) + out.close +} + +def main(args: Array[String]) = { + serialise("/tmp/nflx", tokenise(prog3)) +} + + +} \ No newline at end of file