ProgTutorial/Parsing.thy
changeset 563 50d3059de9c6
parent 559 ffa5c4ec9611
child 565 cecd7a941885
--- a/ProgTutorial/Parsing.thy	Tue May 14 11:10:53 2019 +0200
+++ b/ProgTutorial/Parsing.thy	Tue May 14 13:39:31 2019 +0200
@@ -476,7 +476,7 @@
 *}
   
 ML %grayML{*type 'a parser = Token.T list -> 'a * Token.T list*}
-
+ML "Thy_Header.get_keywords'"
 text {*
   {\bf REDO!!}
 
@@ -496,14 +496,14 @@
   Token} for keywords and @{ML_ind Command in Token} for commands). Some
   token parsers take into account the kind of tokens. The first example shows
   how to generate a token list out of a string using the function 
-  @{ML_ind scan in Outer_Syntax}. It is given the argument 
+  @{ML_ind explode in Token}. It is given the argument 
   @{ML "Position.none"} since,
   at the moment, we are not interested in generating precise error
   messages. The following code
 
 
-@{ML_response_fake [display,gray] "Outer_Syntax.scan 
-  (Keyword.get_lexicons ()) Position.none \"hello world\"" 
+@{ML_response_fake [display,gray] "Token.explode 
+  (Thy_Header.get_keywords' @{context}) Position.none \"hello world\"" 
 "[Token (\<dots>,(Ident, \"hello\"),\<dots>), 
  Token (\<dots>,(Space, \" \"),\<dots>), 
  Token (\<dots>,(Ident, \"world\"),\<dots>)]"}
@@ -513,16 +513,19 @@
   other syntactic category. The second indicates a space.
 
   We can easily change what is recognised as a keyword with the function
-  @{ML_ind define in Keyword}. For example calling it with 
+  @{ML_ind add_keywords in Thy_Header}. For example calling it with 
 *}
 
-ML %grayML{*val _ = Keyword.define ("hello", NONE) *}
+
+
+setup {* Thy_Header.add_keywords [(("hello", Position.none),Keyword.no_spec)] *}
+
 
 text {*
   then lexing @{text [quotes] "hello world"} will produce
 
-  @{ML_response_fake [display,gray] "Outer_Syntax.scan 
-  (Keyword.get_lexicons ()) Position.none \"hello world\"" 
+  @{ML_response_fake [display,gray] "Token.explode 
+  (Thy_Header.get_keywords' @{context}) Position.none \"hello world\"" 
 "[Token (\<dots>,(Keyword, \"hello\"),\<dots>), 
  Token (\<dots>,(Space, \" \"),\<dots>), 
  Token (\<dots>,(Ident, \"world\"),\<dots>)]"}
@@ -534,7 +537,7 @@
 
 @{ML_response_fake [display,gray]
 "let
-   val input = Outer_Syntax.scan (Keyword.get_lexicons ()) Position.none \"hello world\"
+   val input = Token.explode (Thy_Header.get_keywords' @{context}) Position.none \"hello world\"
 in
    filter Token.is_proper input
 end" 
@@ -544,8 +547,9 @@
 *}
 
 ML %grayML{*fun filtered_input str = 
-  filter Token.is_proper (Outer_Syntax.scan (Keyword.get_lexicons ()) Position.none str) *}
+  filter Token.is_proper (Token.explode (Thy_Header.get_keywords' @{context}) Position.none str) *}
 
+ML \<open>filtered_input "inductive | for"\<close>
 text {* 
   If you now parse
 
@@ -557,17 +561,9 @@
 
   you obtain a list consisting of only one command and two keyword tokens.
   If you want to see which keywords and commands are currently known to Isabelle, 
-  use the function @{ML_ind get_lexicons in Keyword}:
+  use the function @{ML_ind get_keywords' in Thy_Header}:
 
-@{ML_response_fake [display,gray] 
-"let 
-  val (keywords, commands) = Keyword.get_lexicons ()
-in 
-  (Scan.dest_lexicon commands, Scan.dest_lexicon keywords)
-end" 
-"([\"}\", \"{\", \<dots>], [\"\<rightleftharpoons>\", \"\<leftharpoondown>\", \<dots>])"}
-
-  You might have to adjust the @{ML_ind default_print_depth} in order to
+  You might have to adjust the @{text ML_print_depth} in order to
   see the complete list.
 
   The parser @{ML_ind "$$$" in Parse} parses a single keyword. For example:
@@ -671,7 +667,7 @@
 *}
 
 ML %grayML{*fun filtered_input' str = 
-       filter Token.is_proper (Outer_Syntax.scan (Keyword.get_lexicons ()) (Position.line 7) str) *}
+       filter Token.is_proper (Token.explode (Thy_Header.get_keywords' @{context}) (Position.line 7) str) *}
 
 text {*
   where we pretend the parsed string starts on line 7. An example is
@@ -760,7 +756,7 @@
 
 @{ML_response_fake [display,gray]
 "let 
-  val input = Outer_Syntax.scan (Keyword.get_lexicons ()) Position.none \"foo\"
+  val input = Token.explode (Thy_Header.get_keywords' @{context}) Position.none \"foo\"
 in 
   Parse.term input
 end"
@@ -776,7 +772,7 @@
 
 @{ML_response_fake [display,gray]
 "let 
-  val input = Outer_Syntax.scan (Keyword.get_lexicons ()) (Position.line 42) \"foo\"
+  val input = Token.explode (Thy_Header.get_keywords' @{context}) (Position.line 42) \"foo\"
 in 
   YXML.parse (fst (Parse.term input))
 end"
@@ -821,7 +817,7 @@
 *}
 
 ML %linenosgray{*val spec_parser = 
-     Parse.fixes -- 
+     Parse.vars -- 
      Scan.optional 
        (Parse.$$$ "where" |--
           Parse.!!! 
@@ -852,27 +848,36 @@
    [((even0,\<dots>),\<dots>),
     ((evenS,\<dots>),\<dots>),
     ((oddS,\<dots>),\<dots>)]), [])"}
+*}
 
+ML \<open>let
+  val input = filtered_input 
+        "foo::\"int \<Rightarrow> bool\" and bar::nat (\"BAR\" 100) and blonk"
+in
+  parse Parse.vars input
+end\<close>
+
+text {*
   As you see, the result is a pair consisting of a list of
   variables with optional type-annotation and syntax-annotation, and a list of
   rules where every rule has optionally a name and an attribute.
 
-  The function @{ML_ind "fixes" in Parse} in Line 2 of the parser reads an 
+  The function @{ML_ind "vars" in Parse} in Line 2 of the parser reads an 
   \isacommand{and}-separated 
   list of variables that can include optional type annotations and syntax translations. 
   For example:\footnote{Note that in the code we need to write 
   @{text "\\\"int \<Rightarrow> bool\\\""} in order to properly escape the double quotes
   in the compound type.}
 
-@{ML_response [display,gray]
+@{ML_response_fake [display,gray]
 "let
   val input = filtered_input 
         \"foo::\\\"int \<Rightarrow> bool\\\" and bar::nat (\\\"BAR\\\" 100) and blonk\"
 in
-  parse Parse.fixes input
+  parse Parse.vars input
 end"
 "([(foo, SOME \<dots>, NoSyn), 
-  (bar, SOME \<dots>, Mixfix (\"BAR\", [], 100)), 
+  (bar, SOME \<dots>, Mixfix (Source {text=\"BAR\",...}, [], 100, \<dots>)), 
   (blonk, NONE, NoSyn)],[])"}  
 *}
 
@@ -915,15 +920,15 @@
 
 text_raw {*
   \begin{exercise}
-  Have a look at how the parser @{ML Parse_Spec.where_alt_specs} is implemented
+  Have a look at how the parser @{ML Parse_Spec.where_multi_specs} is implemented
   in file @{ML_file "Pure/Isar/parse_spec.ML"}. This parser corresponds
   to the ``where-part'' of the introduction rules given above. Below
-  we paraphrase the code of @{ML_ind where_alt_specs in Parse_Spec} adapted to our
+  we paraphrase the code of @{ML_ind where_multi_specs in Parse_Spec} adapted to our
   purposes. 
   \begin{isabelle}
 *}
 ML %linenosgray{*val spec_parser' = 
-     Parse.fixes -- 
+     Parse.vars -- 
      Scan.optional
      (Parse.$$$ "where" |-- 
         Parse.!!! 
@@ -981,7 +986,7 @@
 ML %grayML{*let
   val do_nothing = Scan.succeed (Local_Theory.background_theory I)
 in
-  Outer_Syntax.local_theory @{command_spec "foobar"} 
+  Outer_Syntax.local_theory @{command_keyword "foobar"} 
     "description of foobar" 
       do_nothing
 end *}
@@ -1026,7 +1031,7 @@
   fun trace_prop str = 
     Local_Theory.background_theory (fn ctxt => (tracing str; ctxt))
 in
-  Outer_Syntax.local_theory @{command_spec "foobar_trace"} 
+  Outer_Syntax.local_theory @{command_keyword "foobar_trace"} 
     "traces a proposition" 
       (Parse.prop >> trace_prop)
 end *}
@@ -1069,7 +1074,7 @@
       Proof.theorem NONE (K I) [[(prop, [])]] ctxt
     end
 in
-  Outer_Syntax.local_theory_to_proof @{command_spec "foobar_goal"}
+  Outer_Syntax.local_theory_to_proof @{command_keyword "foobar_goal"}
     "proves a proposition" 
       (Parse.prop >> goal_prop)
 end *}
@@ -1135,8 +1140,9 @@
    fun after_qed thm_name thms lthy =
         Local_Theory.note (thm_name, (flat thms)) lthy |> snd
 
-   fun setup_proof (thm_name, {text, ...}) lthy =
+   fun setup_proof (thm_name, src) lthy =
      let
+       val text = Input.source_content src
        val trm = Code_Runtime.value lthy result_cookie ("", text)
      in
        Proof.theorem NONE (after_qed thm_name) [[(trm, [])]] lthy
@@ -1144,7 +1150,7 @@
 
    val parser = Parse_Spec.opt_thm_name ":" -- Parse.ML_source
 in
-   Outer_Syntax.local_theory_to_proof @{command_spec "foobar_prove"}
+   Outer_Syntax.local_theory_to_proof @{command_keyword "foobar_prove"}
      "proving a proposition" 
        (parser >> setup_proof)
 end*}
@@ -1179,161 +1185,16 @@
   @{text "> "}~@{thm TrueI}
   \end{isabelle}
 
-  While this is everything you have to do for a new command when using jEdit, 
-  things are not as simple when using Emacs and ProofGeneral. We explain the details 
-  next.
 *}
 
 
-section {* Proof-General and Keyword Files *}
-
-text {*
-  In order to use a new command in Emacs and Proof-General, you need a keyword
-  file that can be loaded by ProofGeneral. To keep things simple we take as
-  a running example the command \isacommand{foobar} from the previous section. 
-
-  A keyword file can be generated with the command-line:
-
-  @{text [display] "$ isabelle keywords -k foobar some_log_files"}
-
-  The option @{text "-k foobar"} indicates which postfix the name of the keyword file 
-  will be assigned. In the case above the file will be named @{text
-  "isar-keywords-foobar.el"}. This command requires log files to be
-  present (in order to extract the keywords from them). To generate these log
-  files, you first need to package the code above into a separate theory file named
-  @{text "Command.thy"}, say---see Figure~\ref{fig:commandtheory} for the
-  complete code.
-
-
-  %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-  \begin{figure}[t]
-  \begin{graybox}\small
-  \isacommand{theory}~@{text Command}\\
-  \isacommand{imports}~@{text Main}\\
-  \isacommand{keywords} @{text [quotes] "foobar"} @{text "::"} @{text "thy_decl"}\\
-  \isacommand{begin}\\
-  \isacommand{ML}~@{text "\<verbopen>"}\\
-  @{ML
-"let
-  val do_nothing = Scan.succeed (Local_Theory.background_theory I)
-in
-  Outer_Syntax.local_theory @{command_spec \"foobar\"}
-    \"description of foobar\" 
-       do_nothing
-end"}\\
-  @{text "\<verbclose>"}\\
-  \isacommand{end}
-  \end{graybox}
-  \caption{This file can be used to generate a log file. This log file in turn can
-  be used to generate a keyword file containing the command \isacommand{foobar}.
-  \label{fig:commandtheory}}
-  \end{figure}
-  %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
-
-
-  For our purposes it is sufficient to use the log files of the theories
-  @{text "Pure"}, @{text "HOL"} and @{text "Pure-ProofGeneral"}, as well as
-  the log file for the theory @{text "Command.thy"}, which contains the new
-  \isacommand{foobar}-command. If you target other logics besides HOL, such
-  as Nominal or ZF, then you need to adapt the log files appropriately.
-  
-  @{text Pure} and @{text HOL} are usually compiled during the installation of
-  Isabelle. So log files for them should be already available. If not, then
-  they can be easily compiled with the build-script from the Isabelle
-  distribution.
-
-  @{text [display] 
-"$ ./build -m \"Pure\"
-$ ./build -m \"HOL\""}
-  
-  The @{text "Pure-ProofGeneral"} theory needs to be compiled with:
-
-  @{text [display] "$ ./build -m \"Pure-ProofGeneral\" \"Pure\""}
-
-  For the theory @{text "Command.thy"}, you first need to create a ``managed'' subdirectory 
-  with:
- 
-  @{text [display] "$ isabelle mkroot -d FoobarCommand"}
-
-  This generates a directory containing the files: 
-  
-  @{text [display] 
-"./FoobarCommand/ROOT
-./FoobarCommand/document
-./FoobarCommand/document/root.tex"}
- 
-  You need to copy the file @{text "Command.thy"} into the directory @{text "FoobarCommand"}
-  and add the line 
-
-  @{text [display] "no_document use_thy \"Command\";"} 
-  
-  to the file @{text "./FoobarCommand/ROOT"}. You can now compile the theory by just typing:
-
-  @{text [display] "$ isabelle build"}
-
-  If the compilation succeeds, you have finally created all the necessary log files. 
-  They are stored in the directory 
-  
-  @{text [display]  "~/.isabelle/heaps/Isabelle2012/polyml-5.2.1_x86-linux/log"}
-
-  or something similar depending on your Isabelle distribution and architecture.
-  One quick way to assign a shell variable to this directory is by typing
-
-  @{text [display] "$ ISABELLE_LOGS=\"$(isabelle getenv -b ISABELLE_OUTPUT)\"/log"}
- 
-  on the Unix prompt. If you now type @{text "ls $ISABELLE_LOGS"}, then the 
-  directory should include the files:
-
-  @{text [display] 
-"Pure.gz
-HOL.gz
-Pure-ProofGeneral.gz
-HOL-FoobarCommand.gz"} 
-
-  From them you can create the keyword files. Assuming the name 
-  of the directory is in  @{text "$ISABELLE_LOGS"},
-  then the Unix command for creating the keyword file is:
-
-@{text [display]
-"$ isabelle keywords -k foobar 
-   $ISABELLE_LOGS/{Pure.gz,HOL.gz,Pure-ProofGeneral.gz,HOL-FoobarCommand.gz}"}
-
-  The result is the file @{text "isar-keywords-foobar.el"}. It should contain
-  the string @{text "foobar"} twice.\footnote{To see whether things are fine,
-  check that @{text "grep foobar"} on this file returns something non-empty.}
-  This keyword file needs to be copied into the directory @{text
-  "~/.isabelle/etc"}. To make ProofGeneral aware of it, you have to start
-  Isabelle with the option @{text "-k foobar"}, that is:
-
-
-  @{text [display] "$ isabelle emacs -k foobar a_theory_file"}
-
-  If you now build a theory on top of @{text "Command.thy"}, 
-  then you can now use the command \isacommand{foobar}
-  in Proof-General
-
-  A similar procedure has to be done with any 
-  other new command, and also any new keyword that is introduced with 
-  the function @{ML_ind define in Keyword}. For example:
-*}
-
-ML %grayML{*val _ = Keyword.define ("blink", NONE) *}
-
-
-text {*
-  Also if the kind of a command changes, from @{text "thy_decl"} to 
-  @{text "thy_goal"} say,  you need to recreate the keyword file.
-*}
-
-
-
-
+(*
 text {*
   {\bf TBD below}
 
  *}
 
-
+*)
 
 
 
@@ -1485,7 +1346,7 @@
 
 method_setup %gray foo = 
  {* Scan.succeed
-      (K (SIMPLE_METHOD ((etac @{thm conjE} THEN' rtac @{thm conjI}) 1))) *}
+      (fn ctxt => (SIMPLE_METHOD ((eresolve_tac ctxt [@{thm conjE}] THEN' resolve_tac ctxt [@{thm conjI}]) 1))) *}
          "foo method for conjE and conjI"
 
 text {*
@@ -1514,7 +1375,7 @@
 oops
 
 method_setup joker = {* 
-  Scan.lift (Scan.succeed (fn ctxt => Method.cheating ctxt true)) *} {* bla *}
+  Scan.lift (Scan.succeed (fn ctxt => Method.cheating true)) *} {* bla *}
 
 lemma "False"
 apply(joker)
@@ -1522,9 +1383,9 @@
 
 text {* if true is set then always works *}
 
-ML {* atac *} 
+ML {* assume_tac *} 
 
-method_setup first_atac = {* Scan.lift (Scan.succeed (K (SIMPLE_METHOD (atac 1)))) *} {* bla *}
+method_setup first_atac = {* Scan.lift (Scan.succeed (fn ctxt => (SIMPLE_METHOD (assume_tac ctxt 1)))) *} {* bla *}
 
 ML {* HEADGOAL *}
 
@@ -1532,7 +1393,7 @@
 apply(first_atac)
 oops
 
-method_setup my_atac = {* Scan.lift (Scan.succeed (K (SIMPLE_METHOD' atac))) *} {* bla *}
+method_setup my_atac = {* Scan.lift (Scan.succeed (fn ctxt => (SIMPLE_METHOD' (assume_tac ctxt)))) *} {* bla *}
 
 lemma "A \<Longrightarrow> A"
 apply(my_atac)
@@ -1554,7 +1415,7 @@
 ML {* resolve_tac *}
 
 method_setup myrule =
-  {* Scan.lift (Scan.succeed (K (METHOD (fn thms => resolve_tac thms 1)))) *}
+  {* Scan.lift (Scan.succeed (fn ctxt => (METHOD (fn thms => resolve_tac ctxt thms 1)))) *}
   {* bla *}
 
 lemma