ProgTutorial/Parsing.thy
changeset 563 50d3059de9c6
parent 559 ffa5c4ec9611
child 565 cecd7a941885
equal deleted inserted replaced
562:daf404920ab9 563:50d3059de9c6
   474   Most of the time, however, Isabelle developers have to deal with parsing
   474   Most of the time, however, Isabelle developers have to deal with parsing
   475   tokens, not strings.  These token parsers have the type:
   475   tokens, not strings.  These token parsers have the type:
   476 *}
   476 *}
   477   
   477   
   478 ML %grayML{*type 'a parser = Token.T list -> 'a * Token.T list*}
   478 ML %grayML{*type 'a parser = Token.T list -> 'a * Token.T list*}
   479 
   479 ML "Thy_Header.get_keywords'"
   480 text {*
   480 text {*
   481   {\bf REDO!!}
   481   {\bf REDO!!}
   482 
   482 
   483 
   483 
   484   The reason for using token parsers is that theory syntax, as well as the
   484   The reason for using token parsers is that theory syntax, as well as the
   494   The structure @{ML_struct  Token} defines several kinds of tokens (for
   494   The structure @{ML_struct  Token} defines several kinds of tokens (for
   495   example @{ML_ind Ident in Token} for identifiers, @{ML Keyword in
   495   example @{ML_ind Ident in Token} for identifiers, @{ML Keyword in
   496   Token} for keywords and @{ML_ind Command in Token} for commands). Some
   496   Token} for keywords and @{ML_ind Command in Token} for commands). Some
   497   token parsers take into account the kind of tokens. The first example shows
   497   token parsers take into account the kind of tokens. The first example shows
   498   how to generate a token list out of a string using the function 
   498   how to generate a token list out of a string using the function 
   499   @{ML_ind scan in Outer_Syntax}. It is given the argument 
   499   @{ML_ind explode in Token}. It is given the argument 
   500   @{ML "Position.none"} since,
   500   @{ML "Position.none"} since,
   501   at the moment, we are not interested in generating precise error
   501   at the moment, we are not interested in generating precise error
   502   messages. The following code
   502   messages. The following code
   503 
   503 
   504 
   504 
   505 @{ML_response_fake [display,gray] "Outer_Syntax.scan 
   505 @{ML_response_fake [display,gray] "Token.explode 
   506   (Keyword.get_lexicons ()) Position.none \"hello world\"" 
   506   (Thy_Header.get_keywords' @{context}) Position.none \"hello world\"" 
   507 "[Token (\<dots>,(Ident, \"hello\"),\<dots>), 
   507 "[Token (\<dots>,(Ident, \"hello\"),\<dots>), 
   508  Token (\<dots>,(Space, \" \"),\<dots>), 
   508  Token (\<dots>,(Space, \" \"),\<dots>), 
   509  Token (\<dots>,(Ident, \"world\"),\<dots>)]"}
   509  Token (\<dots>,(Ident, \"world\"),\<dots>)]"}
   510 
   510 
   511   produces three tokens where the first and the last are identifiers, since
   511   produces three tokens where the first and the last are identifiers, since
   512   @{text [quotes] "hello"} and @{text [quotes] "world"} do not match any
   512   @{text [quotes] "hello"} and @{text [quotes] "world"} do not match any
   513   other syntactic category. The second indicates a space.
   513   other syntactic category. The second indicates a space.
   514 
   514 
   515   We can easily change what is recognised as a keyword with the function
   515   We can easily change what is recognised as a keyword with the function
   516   @{ML_ind define in Keyword}. For example calling it with 
   516   @{ML_ind add_keywords in Thy_Header}. For example calling it with 
   517 *}
   517 *}
   518 
   518 
   519 ML %grayML{*val _ = Keyword.define ("hello", NONE) *}
   519 
       
   520 
       
   521 setup {* Thy_Header.add_keywords [(("hello", Position.none),Keyword.no_spec)] *}
       
   522 
   520 
   523 
   521 text {*
   524 text {*
   522   then lexing @{text [quotes] "hello world"} will produce
   525   then lexing @{text [quotes] "hello world"} will produce
   523 
   526 
   524   @{ML_response_fake [display,gray] "Outer_Syntax.scan 
   527   @{ML_response_fake [display,gray] "Token.explode 
   525   (Keyword.get_lexicons ()) Position.none \"hello world\"" 
   528   (Thy_Header.get_keywords' @{context}) Position.none \"hello world\"" 
   526 "[Token (\<dots>,(Keyword, \"hello\"),\<dots>), 
   529 "[Token (\<dots>,(Keyword, \"hello\"),\<dots>), 
   527  Token (\<dots>,(Space, \" \"),\<dots>), 
   530  Token (\<dots>,(Space, \" \"),\<dots>), 
   528  Token (\<dots>,(Ident, \"world\"),\<dots>)]"}
   531  Token (\<dots>,(Ident, \"world\"),\<dots>)]"}
   529 
   532 
   530   Many parsing functions later on will require white space, comments and the like
   533   Many parsing functions later on will require white space, comments and the like
   532   functions @{ML filter} and @{ML_ind is_proper in Token} to do this. 
   535   functions @{ML filter} and @{ML_ind is_proper in Token} to do this. 
   533   For example:
   536   For example:
   534 
   537 
   535 @{ML_response_fake [display,gray]
   538 @{ML_response_fake [display,gray]
   536 "let
   539 "let
   537    val input = Outer_Syntax.scan (Keyword.get_lexicons ()) Position.none \"hello world\"
   540    val input = Token.explode (Thy_Header.get_keywords' @{context}) Position.none \"hello world\"
   538 in
   541 in
   539    filter Token.is_proper input
   542    filter Token.is_proper input
   540 end" 
   543 end" 
   541 "[Token (\<dots>,(Ident, \"hello\"), \<dots>), Token (\<dots>,(Ident, \"world\"), \<dots>)]"}
   544 "[Token (\<dots>,(Ident, \"hello\"), \<dots>), Token (\<dots>,(Ident, \"world\"), \<dots>)]"}
   542 
   545 
   543   For convenience we define the function:
   546   For convenience we define the function:
   544 *}
   547 *}
   545 
   548 
   546 ML %grayML{*fun filtered_input str = 
   549 ML %grayML{*fun filtered_input str = 
   547   filter Token.is_proper (Outer_Syntax.scan (Keyword.get_lexicons ()) Position.none str) *}
   550   filter Token.is_proper (Token.explode (Thy_Header.get_keywords' @{context}) Position.none str) *}
   548 
   551 
       
   552 ML \<open>filtered_input "inductive | for"\<close>
   549 text {* 
   553 text {* 
   550   If you now parse
   554   If you now parse
   551 
   555 
   552 @{ML_response_fake [display,gray] 
   556 @{ML_response_fake [display,gray] 
   553 "filtered_input \"inductive | for\"" 
   557 "filtered_input \"inductive | for\"" 
   555  Token (\<dots>,(Keyword, \"|\"),\<dots>), 
   559  Token (\<dots>,(Keyword, \"|\"),\<dots>), 
   556  Token (\<dots>,(Keyword, \"for\"),\<dots>)]"}
   560  Token (\<dots>,(Keyword, \"for\"),\<dots>)]"}
   557 
   561 
   558   you obtain a list consisting of only one command and two keyword tokens.
   562   you obtain a list consisting of only one command and two keyword tokens.
   559   If you want to see which keywords and commands are currently known to Isabelle, 
   563   If you want to see which keywords and commands are currently known to Isabelle, 
   560   use the function @{ML_ind get_lexicons in Keyword}:
   564   use the function @{ML_ind get_keywords' in Thy_Header}:
   561 
   565 
   562 @{ML_response_fake [display,gray] 
   566   You might have to adjust the @{text ML_print_depth} in order to
   563 "let 
       
   564   val (keywords, commands) = Keyword.get_lexicons ()
       
   565 in 
       
   566   (Scan.dest_lexicon commands, Scan.dest_lexicon keywords)
       
   567 end" 
       
   568 "([\"}\", \"{\", \<dots>], [\"\<rightleftharpoons>\", \"\<leftharpoondown>\", \<dots>])"}
       
   569 
       
   570   You might have to adjust the @{ML_ind default_print_depth} in order to
       
   571   see the complete list.
   567   see the complete list.
   572 
   568 
   573   The parser @{ML_ind "$$$" in Parse} parses a single keyword. For example:
   569   The parser @{ML_ind "$$$" in Parse} parses a single keyword. For example:
   574 
   570 
   575 @{ML_response [display,gray]
   571 @{ML_response [display,gray]
   669   and then thread this information up the ``processing chain''. To see this,
   665   and then thread this information up the ``processing chain''. To see this,
   670   modify the function @{ML filtered_input}, described earlier, as follows 
   666   modify the function @{ML filtered_input}, described earlier, as follows 
   671 *}
   667 *}
   672 
   668 
   673 ML %grayML{*fun filtered_input' str = 
   669 ML %grayML{*fun filtered_input' str = 
   674        filter Token.is_proper (Outer_Syntax.scan (Keyword.get_lexicons ()) (Position.line 7) str) *}
   670        filter Token.is_proper (Token.explode (Thy_Header.get_keywords' @{context}) (Position.line 7) str) *}
   675 
   671 
   676 text {*
   672 text {*
   677   where we pretend the parsed string starts on line 7. An example is
   673   where we pretend the parsed string starts on line 7. An example is
   678 
   674 
   679 @{ML_response_fake [display,gray]
   675 @{ML_response_fake [display,gray]
   758   for terms and  types: you can just call the predefined parsers. Terms can 
   754   for terms and  types: you can just call the predefined parsers. Terms can 
   759   be parsed using the function @{ML_ind term in Parse}. For example:
   755   be parsed using the function @{ML_ind term in Parse}. For example:
   760 
   756 
   761 @{ML_response_fake [display,gray]
   757 @{ML_response_fake [display,gray]
   762 "let 
   758 "let 
   763   val input = Outer_Syntax.scan (Keyword.get_lexicons ()) Position.none \"foo\"
   759   val input = Token.explode (Thy_Header.get_keywords' @{context}) Position.none \"foo\"
   764 in 
   760 in 
   765   Parse.term input
   761   Parse.term input
   766 end"
   762 end"
   767 "(\"<markup>\", [])"}
   763 "(\"<markup>\", [])"}
   768 
   764 
   774   The result of the decoding is an XML-tree. You can see better what is going on if
   770   The result of the decoding is an XML-tree. You can see better what is going on if
   775   you replace @{ML Position.none} by @{ML "Position.line 42"}, say:
   771   you replace @{ML Position.none} by @{ML "Position.line 42"}, say:
   776 
   772 
   777 @{ML_response_fake [display,gray]
   773 @{ML_response_fake [display,gray]
   778 "let 
   774 "let 
   779   val input = Outer_Syntax.scan (Keyword.get_lexicons ()) (Position.line 42) \"foo\"
   775   val input = Token.explode (Thy_Header.get_keywords' @{context}) (Position.line 42) \"foo\"
   780 in 
   776 in 
   781   YXML.parse (fst (Parse.term input))
   777   YXML.parse (fst (Parse.term input))
   782 end"
   778 end"
   783 "Elem (\"token\", [(\"line\", \"42\"), (\"end_line\", \"42\")], [XML.Text \"foo\"])"}
   779 "Elem (\"token\", [(\"line\", \"42\"), (\"end_line\", \"42\")], [XML.Text \"foo\"])"}
   784 
   780 
   819 text {*
   815 text {*
   820   For this we are going to use the parser:
   816   For this we are going to use the parser:
   821 *}
   817 *}
   822 
   818 
   823 ML %linenosgray{*val spec_parser = 
   819 ML %linenosgray{*val spec_parser = 
   824      Parse.fixes -- 
   820      Parse.vars -- 
   825      Scan.optional 
   821      Scan.optional 
   826        (Parse.$$$ "where" |--
   822        (Parse.$$$ "where" |--
   827           Parse.!!! 
   823           Parse.!!! 
   828             (Parse.enum1 "|" 
   824             (Parse.enum1 "|" 
   829                (Parse_Spec.opt_thm_name ":" -- Parse.prop))) []*}
   825                (Parse_Spec.opt_thm_name ":" -- Parse.prop))) []*}
   850 end"
   846 end"
   851 "(([(even, NONE, NoSyn), (odd, NONE, NoSyn)],
   847 "(([(even, NONE, NoSyn), (odd, NONE, NoSyn)],
   852    [((even0,\<dots>),\<dots>),
   848    [((even0,\<dots>),\<dots>),
   853     ((evenS,\<dots>),\<dots>),
   849     ((evenS,\<dots>),\<dots>),
   854     ((oddS,\<dots>),\<dots>)]), [])"}
   850     ((oddS,\<dots>),\<dots>)]), [])"}
   855 
   851 *}
       
   852 
       
   853 ML \<open>let
       
   854   val input = filtered_input 
       
   855         "foo::\"int \<Rightarrow> bool\" and bar::nat (\"BAR\" 100) and blonk"
       
   856 in
       
   857   parse Parse.vars input
       
   858 end\<close>
       
   859 
       
   860 text {*
   856   As you see, the result is a pair consisting of a list of
   861   As you see, the result is a pair consisting of a list of
   857   variables with optional type-annotation and syntax-annotation, and a list of
   862   variables with optional type-annotation and syntax-annotation, and a list of
   858   rules where every rule has optionally a name and an attribute.
   863   rules where every rule has optionally a name and an attribute.
   859 
   864 
   860   The function @{ML_ind "fixes" in Parse} in Line 2 of the parser reads an 
   865   The function @{ML_ind "vars" in Parse} in Line 2 of the parser reads an 
   861   \isacommand{and}-separated 
   866   \isacommand{and}-separated 
   862   list of variables that can include optional type annotations and syntax translations. 
   867   list of variables that can include optional type annotations and syntax translations. 
   863   For example:\footnote{Note that in the code we need to write 
   868   For example:\footnote{Note that in the code we need to write 
   864   @{text "\\\"int \<Rightarrow> bool\\\""} in order to properly escape the double quotes
   869   @{text "\\\"int \<Rightarrow> bool\\\""} in order to properly escape the double quotes
   865   in the compound type.}
   870   in the compound type.}
   866 
   871 
   867 @{ML_response [display,gray]
   872 @{ML_response_fake [display,gray]
   868 "let
   873 "let
   869   val input = filtered_input 
   874   val input = filtered_input 
   870         \"foo::\\\"int \<Rightarrow> bool\\\" and bar::nat (\\\"BAR\\\" 100) and blonk\"
   875         \"foo::\\\"int \<Rightarrow> bool\\\" and bar::nat (\\\"BAR\\\" 100) and blonk\"
   871 in
   876 in
   872   parse Parse.fixes input
   877   parse Parse.vars input
   873 end"
   878 end"
   874 "([(foo, SOME \<dots>, NoSyn), 
   879 "([(foo, SOME \<dots>, NoSyn), 
   875   (bar, SOME \<dots>, Mixfix (\"BAR\", [], 100)), 
   880   (bar, SOME \<dots>, Mixfix (Source {text=\"BAR\",...}, [], 100, \<dots>)), 
   876   (blonk, NONE, NoSyn)],[])"}  
   881   (blonk, NONE, NoSyn)],[])"}  
   877 *}
   882 *}
   878 
   883 
   879 text {*
   884 text {*
   880   Whenever types are given, they are stored in the @{ML SOME}s. The types are
   885   Whenever types are given, they are stored in the @{ML SOME}s. The types are
   913   \end{readmore}
   918   \end{readmore}
   914 *}
   919 *}
   915 
   920 
   916 text_raw {*
   921 text_raw {*
   917   \begin{exercise}
   922   \begin{exercise}
   918   Have a look at how the parser @{ML Parse_Spec.where_alt_specs} is implemented
   923   Have a look at how the parser @{ML Parse_Spec.where_multi_specs} is implemented
   919   in file @{ML_file "Pure/Isar/parse_spec.ML"}. This parser corresponds
   924   in file @{ML_file "Pure/Isar/parse_spec.ML"}. This parser corresponds
   920   to the ``where-part'' of the introduction rules given above. Below
   925   to the ``where-part'' of the introduction rules given above. Below
   921   we paraphrase the code of @{ML_ind where_alt_specs in Parse_Spec} adapted to our
   926   we paraphrase the code of @{ML_ind where_multi_specs in Parse_Spec} adapted to our
   922   purposes. 
   927   purposes. 
   923   \begin{isabelle}
   928   \begin{isabelle}
   924 *}
   929 *}
   925 ML %linenosgray{*val spec_parser' = 
   930 ML %linenosgray{*val spec_parser' = 
   926      Parse.fixes -- 
   931      Parse.vars -- 
   927      Scan.optional
   932      Scan.optional
   928      (Parse.$$$ "where" |-- 
   933      (Parse.$$$ "where" |-- 
   929         Parse.!!! 
   934         Parse.!!! 
   930           (Parse.enum1 "|" 
   935           (Parse.enum1 "|" 
   931              ((Parse_Spec.opt_thm_name ":" -- Parse.prop) --| 
   936              ((Parse_Spec.opt_thm_name ":" -- Parse.prop) --| 
   979 *}
   984 *}
   980 
   985 
   981 ML %grayML{*let
   986 ML %grayML{*let
   982   val do_nothing = Scan.succeed (Local_Theory.background_theory I)
   987   val do_nothing = Scan.succeed (Local_Theory.background_theory I)
   983 in
   988 in
   984   Outer_Syntax.local_theory @{command_spec "foobar"} 
   989   Outer_Syntax.local_theory @{command_keyword "foobar"} 
   985     "description of foobar" 
   990     "description of foobar" 
   986       do_nothing
   991       do_nothing
   987 end *}
   992 end *}
   988 
   993 
   989 text {*
   994 text {*
  1024 
  1029 
  1025 ML %grayML{*let
  1030 ML %grayML{*let
  1026   fun trace_prop str = 
  1031   fun trace_prop str = 
  1027     Local_Theory.background_theory (fn ctxt => (tracing str; ctxt))
  1032     Local_Theory.background_theory (fn ctxt => (tracing str; ctxt))
  1028 in
  1033 in
  1029   Outer_Syntax.local_theory @{command_spec "foobar_trace"} 
  1034   Outer_Syntax.local_theory @{command_keyword "foobar_trace"} 
  1030     "traces a proposition" 
  1035     "traces a proposition" 
  1031       (Parse.prop >> trace_prop)
  1036       (Parse.prop >> trace_prop)
  1032 end *}
  1037 end *}
  1033 
  1038 
  1034 text {*
  1039 text {*
  1067       val prop = Syntax.read_prop ctxt str
  1072       val prop = Syntax.read_prop ctxt str
  1068     in
  1073     in
  1069       Proof.theorem NONE (K I) [[(prop, [])]] ctxt
  1074       Proof.theorem NONE (K I) [[(prop, [])]] ctxt
  1070     end
  1075     end
  1071 in
  1076 in
  1072   Outer_Syntax.local_theory_to_proof @{command_spec "foobar_goal"}
  1077   Outer_Syntax.local_theory_to_proof @{command_keyword "foobar_goal"}
  1073     "proves a proposition" 
  1078     "proves a proposition" 
  1074       (Parse.prop >> goal_prop)
  1079       (Parse.prop >> goal_prop)
  1075 end *}
  1080 end *}
  1076 
  1081 
  1077 text {*
  1082 text {*
  1133 
  1138 
  1134 ML %linenosgray{*let
  1139 ML %linenosgray{*let
  1135    fun after_qed thm_name thms lthy =
  1140    fun after_qed thm_name thms lthy =
  1136         Local_Theory.note (thm_name, (flat thms)) lthy |> snd
  1141         Local_Theory.note (thm_name, (flat thms)) lthy |> snd
  1137 
  1142 
  1138    fun setup_proof (thm_name, {text, ...}) lthy =
  1143    fun setup_proof (thm_name, src) lthy =
  1139      let
  1144      let
       
  1145        val text = Input.source_content src
  1140        val trm = Code_Runtime.value lthy result_cookie ("", text)
  1146        val trm = Code_Runtime.value lthy result_cookie ("", text)
  1141      in
  1147      in
  1142        Proof.theorem NONE (after_qed thm_name) [[(trm, [])]] lthy
  1148        Proof.theorem NONE (after_qed thm_name) [[(trm, [])]] lthy
  1143      end
  1149      end
  1144 
  1150 
  1145    val parser = Parse_Spec.opt_thm_name ":" -- Parse.ML_source
  1151    val parser = Parse_Spec.opt_thm_name ":" -- Parse.ML_source
  1146 in
  1152 in
  1147    Outer_Syntax.local_theory_to_proof @{command_spec "foobar_prove"}
  1153    Outer_Syntax.local_theory_to_proof @{command_keyword "foobar_prove"}
  1148      "proving a proposition" 
  1154      "proving a proposition" 
  1149        (parser >> setup_proof)
  1155        (parser >> setup_proof)
  1150 end*}
  1156 end*}
  1151 
  1157 
  1152 text {*
  1158 text {*
  1177   \begin{isabelle}
  1183   \begin{isabelle}
  1178   \isacommand{thm}~@{text "test"}\\
  1184   \isacommand{thm}~@{text "test"}\\
  1179   @{text "> "}~@{thm TrueI}
  1185   @{text "> "}~@{thm TrueI}
  1180   \end{isabelle}
  1186   \end{isabelle}
  1181 
  1187 
  1182   While this is everything you have to do for a new command when using jEdit, 
  1188 *}
  1183   things are not as simple when using Emacs and ProofGeneral. We explain the details 
  1189 
  1184   next.
  1190 
  1185 *}
  1191 (*
  1186 
       
  1187 
       
  1188 section {* Proof-General and Keyword Files *}
       
  1189 
       
  1190 text {*
       
  1191   In order to use a new command in Emacs and Proof-General, you need a keyword
       
  1192   file that can be loaded by ProofGeneral. To keep things simple we take as
       
  1193   a running example the command \isacommand{foobar} from the previous section. 
       
  1194 
       
  1195   A keyword file can be generated with the command-line:
       
  1196 
       
  1197   @{text [display] "$ isabelle keywords -k foobar some_log_files"}
       
  1198 
       
  1199   The option @{text "-k foobar"} indicates which postfix the name of the keyword file 
       
  1200   will be assigned. In the case above the file will be named @{text
       
  1201   "isar-keywords-foobar.el"}. This command requires log files to be
       
  1202   present (in order to extract the keywords from them). To generate these log
       
  1203   files, you first need to package the code above into a separate theory file named
       
  1204   @{text "Command.thy"}, say---see Figure~\ref{fig:commandtheory} for the
       
  1205   complete code.
       
  1206 
       
  1207 
       
  1208   %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
       
  1209   \begin{figure}[t]
       
  1210   \begin{graybox}\small
       
  1211   \isacommand{theory}~@{text Command}\\
       
  1212   \isacommand{imports}~@{text Main}\\
       
  1213   \isacommand{keywords} @{text [quotes] "foobar"} @{text "::"} @{text "thy_decl"}\\
       
  1214   \isacommand{begin}\\
       
  1215   \isacommand{ML}~@{text "\<verbopen>"}\\
       
  1216   @{ML
       
  1217 "let
       
  1218   val do_nothing = Scan.succeed (Local_Theory.background_theory I)
       
  1219 in
       
  1220   Outer_Syntax.local_theory @{command_spec \"foobar\"}
       
  1221     \"description of foobar\" 
       
  1222        do_nothing
       
  1223 end"}\\
       
  1224   @{text "\<verbclose>"}\\
       
  1225   \isacommand{end}
       
  1226   \end{graybox}
       
  1227   \caption{This file can be used to generate a log file. This log file in turn can
       
  1228   be used to generate a keyword file containing the command \isacommand{foobar}.
       
  1229   \label{fig:commandtheory}}
       
  1230   \end{figure}
       
  1231   %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
       
  1232 
       
  1233 
       
  1234   For our purposes it is sufficient to use the log files of the theories
       
  1235   @{text "Pure"}, @{text "HOL"} and @{text "Pure-ProofGeneral"}, as well as
       
  1236   the log file for the theory @{text "Command.thy"}, which contains the new
       
  1237   \isacommand{foobar}-command. If you target other logics besides HOL, such
       
  1238   as Nominal or ZF, then you need to adapt the log files appropriately.
       
  1239   
       
  1240   @{text Pure} and @{text HOL} are usually compiled during the installation of
       
  1241   Isabelle. So log files for them should be already available. If not, then
       
  1242   they can be easily compiled with the build-script from the Isabelle
       
  1243   distribution.
       
  1244 
       
  1245   @{text [display] 
       
  1246 "$ ./build -m \"Pure\"
       
  1247 $ ./build -m \"HOL\""}
       
  1248   
       
  1249   The @{text "Pure-ProofGeneral"} theory needs to be compiled with:
       
  1250 
       
  1251   @{text [display] "$ ./build -m \"Pure-ProofGeneral\" \"Pure\""}
       
  1252 
       
  1253   For the theory @{text "Command.thy"}, you first need to create a ``managed'' subdirectory 
       
  1254   with:
       
  1255  
       
  1256   @{text [display] "$ isabelle mkroot -d FoobarCommand"}
       
  1257 
       
  1258   This generates a directory containing the files: 
       
  1259   
       
  1260   @{text [display] 
       
  1261 "./FoobarCommand/ROOT
       
  1262 ./FoobarCommand/document
       
  1263 ./FoobarCommand/document/root.tex"}
       
  1264  
       
  1265   You need to copy the file @{text "Command.thy"} into the directory @{text "FoobarCommand"}
       
  1266   and add the line 
       
  1267 
       
  1268   @{text [display] "no_document use_thy \"Command\";"} 
       
  1269   
       
  1270   to the file @{text "./FoobarCommand/ROOT"}. You can now compile the theory by just typing:
       
  1271 
       
  1272   @{text [display] "$ isabelle build"}
       
  1273 
       
  1274   If the compilation succeeds, you have finally created all the necessary log files. 
       
  1275   They are stored in the directory 
       
  1276   
       
  1277   @{text [display]  "~/.isabelle/heaps/Isabelle2012/polyml-5.2.1_x86-linux/log"}
       
  1278 
       
  1279   or something similar depending on your Isabelle distribution and architecture.
       
  1280   One quick way to assign a shell variable to this directory is by typing
       
  1281 
       
  1282   @{text [display] "$ ISABELLE_LOGS=\"$(isabelle getenv -b ISABELLE_OUTPUT)\"/log"}
       
  1283  
       
  1284   on the Unix prompt. If you now type @{text "ls $ISABELLE_LOGS"}, then the 
       
  1285   directory should include the files:
       
  1286 
       
  1287   @{text [display] 
       
  1288 "Pure.gz
       
  1289 HOL.gz
       
  1290 Pure-ProofGeneral.gz
       
  1291 HOL-FoobarCommand.gz"} 
       
  1292 
       
  1293   From them you can create the keyword files. Assuming the name 
       
  1294   of the directory is in  @{text "$ISABELLE_LOGS"},
       
  1295   then the Unix command for creating the keyword file is:
       
  1296 
       
  1297 @{text [display]
       
  1298 "$ isabelle keywords -k foobar 
       
  1299    $ISABELLE_LOGS/{Pure.gz,HOL.gz,Pure-ProofGeneral.gz,HOL-FoobarCommand.gz}"}
       
  1300 
       
  1301   The result is the file @{text "isar-keywords-foobar.el"}. It should contain
       
  1302   the string @{text "foobar"} twice.\footnote{To see whether things are fine,
       
  1303   check that @{text "grep foobar"} on this file returns something non-empty.}
       
  1304   This keyword file needs to be copied into the directory @{text
       
  1305   "~/.isabelle/etc"}. To make ProofGeneral aware of it, you have to start
       
  1306   Isabelle with the option @{text "-k foobar"}, that is:
       
  1307 
       
  1308 
       
  1309   @{text [display] "$ isabelle emacs -k foobar a_theory_file"}
       
  1310 
       
  1311   If you now build a theory on top of @{text "Command.thy"}, 
       
  1312   then you can now use the command \isacommand{foobar}
       
  1313   in Proof-General
       
  1314 
       
  1315   A similar procedure has to be done with any 
       
  1316   other new command, and also any new keyword that is introduced with 
       
  1317   the function @{ML_ind define in Keyword}. For example:
       
  1318 *}
       
  1319 
       
  1320 ML %grayML{*val _ = Keyword.define ("blink", NONE) *}
       
  1321 
       
  1322 
       
  1323 text {*
       
  1324   Also if the kind of a command changes, from @{text "thy_decl"} to 
       
  1325   @{text "thy_goal"} say,  you need to recreate the keyword file.
       
  1326 *}
       
  1327 
       
  1328 
       
  1329 
       
  1330 
       
  1331 text {*
  1192 text {*
  1332   {\bf TBD below}
  1193   {\bf TBD below}
  1333 
  1194 
  1334  *}
  1195  *}
  1335 
  1196 
  1336 
  1197 *)
  1337 
  1198 
  1338 
  1199 
  1339 
  1200 
  1340 (*
  1201 (*
  1341 ML {*
  1202 ML {*
  1483   An example of a very simple method is:
  1344   An example of a very simple method is:
  1484 *}
  1345 *}
  1485 
  1346 
  1486 method_setup %gray foo = 
  1347 method_setup %gray foo = 
  1487  {* Scan.succeed
  1348  {* Scan.succeed
  1488       (K (SIMPLE_METHOD ((etac @{thm conjE} THEN' rtac @{thm conjI}) 1))) *}
  1349       (fn ctxt => (SIMPLE_METHOD ((eresolve_tac ctxt [@{thm conjE}] THEN' resolve_tac ctxt [@{thm conjI}]) 1))) *}
  1489          "foo method for conjE and conjI"
  1350          "foo method for conjE and conjI"
  1490 
  1351 
  1491 text {*
  1352 text {*
  1492   It defines the method @{text foo}, which takes no arguments (therefore the
  1353   It defines the method @{text foo}, which takes no arguments (therefore the
  1493   parser @{ML Scan.succeed}) and only applies a single tactic, namely the tactic which 
  1354   parser @{ML Scan.succeed}) and only applies a single tactic, namely the tactic which 
  1512 lemma "True"
  1373 lemma "True"
  1513 apply(test)
  1374 apply(test)
  1514 oops
  1375 oops
  1515 
  1376 
  1516 method_setup joker = {* 
  1377 method_setup joker = {* 
  1517   Scan.lift (Scan.succeed (fn ctxt => Method.cheating ctxt true)) *} {* bla *}
  1378   Scan.lift (Scan.succeed (fn ctxt => Method.cheating true)) *} {* bla *}
  1518 
  1379 
  1519 lemma "False"
  1380 lemma "False"
  1520 apply(joker)
  1381 apply(joker)
  1521 oops
  1382 oops
  1522 
  1383 
  1523 text {* if true is set then always works *}
  1384 text {* if true is set then always works *}
  1524 
  1385 
  1525 ML {* atac *} 
  1386 ML {* assume_tac *} 
  1526 
  1387 
  1527 method_setup first_atac = {* Scan.lift (Scan.succeed (K (SIMPLE_METHOD (atac 1)))) *} {* bla *}
  1388 method_setup first_atac = {* Scan.lift (Scan.succeed (fn ctxt => (SIMPLE_METHOD (assume_tac ctxt 1)))) *} {* bla *}
  1528 
  1389 
  1529 ML {* HEADGOAL *}
  1390 ML {* HEADGOAL *}
  1530 
  1391 
  1531 lemma "A \<Longrightarrow> A"
  1392 lemma "A \<Longrightarrow> A"
  1532 apply(first_atac)
  1393 apply(first_atac)
  1533 oops
  1394 oops
  1534 
  1395 
  1535 method_setup my_atac = {* Scan.lift (Scan.succeed (K (SIMPLE_METHOD' atac))) *} {* bla *}
  1396 method_setup my_atac = {* Scan.lift (Scan.succeed (fn ctxt => (SIMPLE_METHOD' (assume_tac ctxt)))) *} {* bla *}
  1536 
  1397 
  1537 lemma "A \<Longrightarrow> A"
  1398 lemma "A \<Longrightarrow> A"
  1538 apply(my_atac)
  1399 apply(my_atac)
  1539 oops
  1400 oops
  1540 
  1401 
  1552 *)
  1413 *)
  1553 
  1414 
  1554 ML {* resolve_tac *}
  1415 ML {* resolve_tac *}
  1555 
  1416 
  1556 method_setup myrule =
  1417 method_setup myrule =
  1557   {* Scan.lift (Scan.succeed (K (METHOD (fn thms => resolve_tac thms 1)))) *}
  1418   {* Scan.lift (Scan.succeed (fn ctxt => (METHOD (fn thms => resolve_tac ctxt thms 1)))) *}
  1558   {* bla *}
  1419   {* bla *}
  1559 
  1420 
  1560 lemma
  1421 lemma
  1561   assumes a: "A \<Longrightarrow> B \<Longrightarrow> C"
  1422   assumes a: "A \<Longrightarrow> B \<Longrightarrow> C"
  1562   shows "C"
  1423   shows "C"