--- a/ProgTutorial/Essential.thy Sun Dec 15 23:49:05 2013 +0000
+++ b/ProgTutorial/Essential.thy Thu Mar 13 17:16:49 2014 +0000
@@ -609,9 +609,9 @@
term constructor is defined (@{text "List"}) and also in which datatype
(@{text "list"}). Even worse, some constants have a name involving
type-classes. Consider for example the constants for @{term "zero"} and
- \mbox{@{text "(op *)"}}:
-
- @{ML_response [display,gray] "(@{term \"0::nat\"}, @{term \"(op *)\"})"
+ \mbox{@{term "times"}}:
+
+ @{ML_response [display,gray] "(@{term \"0::nat\"}, @{term \"times\"})"
"(Const (\"Groups.zero_class.zero\", \<dots>),
Const (\"Groups.times_class.times\", \<dots>))"}
@@ -2133,7 +2133,7 @@
*}
setup %gray{*
- Term_Style.setup "my_strip_allq" (Scan.succeed (K strip_allq))
+ Term_Style.setup @{binding "my_strip_allq"} (Scan.succeed (K strip_allq))
*}
text {*
@@ -2185,7 +2185,7 @@
val parser = Scan.repeat Args.name
fun action xs = K (rename_allq xs #> strip_allq)
in
- Term_Style.setup "my_strip_allq2" (parser >> action)
+ Term_Style.setup @{binding "my_strip_allq2"} (parser >> action)
end *}
text {*
--- a/ProgTutorial/First_Steps.thy Sun Dec 15 23:49:05 2013 +0000
+++ b/ProgTutorial/First_Steps.thy Thu Mar 13 17:16:49 2014 +0000
@@ -862,7 +862,7 @@
ML %linenosgray{*val term_pat_setup =
let
- val parser = Args.context -- Scan.lift Args.name_source
+ val parser = Args.context -- Scan.lift Args.name_inner_syntax
fun term_pat (ctxt, str) =
str |> Proof_Context.read_term_pattern ctxt
@@ -895,7 +895,7 @@
ML %grayML{*val type_pat_setup =
let
- val parser = Args.context -- Scan.lift Args.name_source
+ val parser = Args.context -- Scan.lift Args.name_inner_syntax
fun typ_pat (ctxt, str) =
let
--- a/ProgTutorial/Helper/Command/Command.thy Sun Dec 15 23:49:05 2013 +0000
+++ b/ProgTutorial/Helper/Command/Command.thy Thu Mar 13 17:16:49 2014 +0000
@@ -7,7 +7,6 @@
ML {*
let
val do_nothing = Scan.succeed (Local_Theory.background_theory I)
- val kind = Keyword.thy_decl
in
Outer_Syntax.local_theory @{command_spec "foobar"}
"description of foobar"
@@ -20,7 +19,6 @@
fun trace_prop str =
Local_Theory.background_theory (fn lthy => (tracing str; lthy))
val trace_prop_parser = Parse.prop >> trace_prop
- val kind = Keyword.thy_decl
in
Outer_Syntax.local_theory @{command_spec "foobar_trace"}
"traces a proposition"
@@ -37,7 +35,6 @@
Proof.theorem NONE (K I) [[(prop,[])]] lthy
end;
val prove_prop_parser = Parse.prop >> prove_prop
- val kind = Keyword.thy_goal
in
Outer_Syntax.local_theory_to_proof @{command_spec "foobar_goal"}
"proving a proposition"
@@ -58,9 +55,9 @@
fun after_qed thm_name thms lthy =
Local_Theory.note (thm_name, (flat thms)) lthy |> snd
- fun setup_proof (thm_name, (txt, pos)) lthy =
+ fun setup_proof (thm_name, {text, ...}) lthy =
let
- val trm = Code_Runtime.value lthy result_cookie ("", txt)
+ val trm = Code_Runtime.value lthy result_cookie ("", text)
in
Proof.theorem NONE (after_qed thm_name) [[(trm,[])]] lthy
end
--- a/ProgTutorial/Intro.thy Sun Dec 15 23:49:05 2013 +0000
+++ b/ProgTutorial/Intro.thy Thu Mar 13 17:16:49 2014 +0000
@@ -296,7 +296,7 @@
\item {\bf Jeremy Dawson} wrote the first version of chapter \ref{chp:parsing}
about parsing.
- \item {\bf Florian Haftmann} helped with maintaining recipe \ref{rec:callml}.
+ %%\item {\bf Florian Haftmann} helped with maintaining recipe \ref{rec:callml}.
\item {\bf Armin Heller} helped with recipe \ref{rec:sat}.
@@ -306,7 +306,7 @@
\item {\bf Alexander Krauss} wrote a very early version of the ``first-steps''
chapter and also contributed the material on @{ML_funct Named_Thms}.
- \item {\bf Tobias Nipkow} contributed recipe \ref{rec:callml}.
+ %%\item {\bf Tobias Nipkow} contributed recipe \ref{rec:callml}.
\item {\bf Michael Norrish} proofread parts of the text.
--- a/ProgTutorial/Package/Ind_Interface.thy Sun Dec 15 23:49:05 2013 +0000
+++ b/ProgTutorial/Package/Ind_Interface.thy Thu Mar 13 17:16:49 2014 +0000
@@ -119,9 +119,9 @@
parse spec_parser input
end"
"(([(even, NONE, NoSyn), (odd, NONE, NoSyn)],
- [((even0,\<dots>), \"\\^E\\^Ftoken\\^Eeven 0\\^E\\^F\\^E\"),
- ((evenS,\<dots>), \"\\^E\\^Ftoken\\^Eodd n \<Longrightarrow> even (Suc n)\\^E\\^F\\^E\"),
- ((oddS,\<dots>), \"\\^E\\^Ftoken\\^Eeven n \<Longrightarrow> odd (Suc n)\\^E\\^F\\^E\")]), [])"}
+ [((even0,\<dots>), \<dots>),
+ ((evenS,\<dots>), \<dots>),
+ ((oddS,\<dots>), \<dots>)]), [])"}
then we get back the specifications of the predicates (with type and syntax annotations),
and specifications of the introduction rules. This is all the information we
--- a/ProgTutorial/Parsing.thy Sun Dec 15 23:49:05 2013 +0000
+++ b/ProgTutorial/Parsing.thy Thu Mar 13 17:16:49 2014 +0000
@@ -726,7 +726,7 @@
(*
ML {*
let
- val parser = Args.context -- Scan.lift Args.name_source
+ val parser = Args.context -- Scan.lift Args.name_inner_syntax
fun term_pat (ctxt, str) =
str |> Syntax.read_prop ctxt
@@ -752,23 +752,19 @@
for terms and types: you can just call the predefined parsers. Terms can
be parsed using the function @{ML_ind term in Parse}. For example:
-@{ML_response [display,gray]
+@{ML_response_fake [display,gray]
"let
val input = Outer_Syntax.scan Position.none \"foo\"
in
Parse.term input
end"
-"(\"\\^E\\^Ftoken\\^Efoo\\^E\\^F\\^E\", [])"}
+"(\"<markup>\", [])"}
+
The function @{ML_ind prop in Parse} is similar, except that it gives a different
error message, when parsing fails. As you can see, the parser not just returns
- the parsed string, but also some encoded information. You can decode the
- information with the function @{ML_ind parse in YXML} in @{ML_struct YXML}. For example
-
- @{ML_response_fake [display,gray]
- "YXML.parse \"\\^E\\^Ftoken\\^Efoo\\^E\\^F\\^E\""
- "Text \"\\^E\\^Ftoken\\^Efoo\\^E\\^F\\^E\""}
-
+ the parsed string, but also some markup information. You can decode the
+ information with the function @{ML_ind parse in YXML} in @{ML_struct YXML}.
The result of the decoding is an XML-tree. You can see better what is going on if
you replace @{ML Position.none} by @{ML "Position.line 42"}, say:
@@ -847,9 +843,9 @@
parse spec_parser input
end"
"(([(even, NONE, NoSyn), (odd, NONE, NoSyn)],
- [((even0,\<dots>), \"\\^E\\^Ftoken\\^Eeven 0\\^E\\^F\\^E\"),
- ((evenS,\<dots>), \"\\^E\\^Ftoken\\^Eodd n \<Longrightarrow> even (Suc n)\\^E\\^F\\^E\"),
- ((oddS,\<dots>), \"\\^E\\^Ftoken\\^Eeven n \<Longrightarrow> odd (Suc n)\\^E\\^F\\^E\")]), [])"}
+ [((even0,\<dots>),\<dots>),
+ ((evenS,\<dots>),\<dots>),
+ ((oddS,\<dots>),\<dots>)]), [])"}
As you see, the result is a pair consisting of a list of
variables with optional type-annotation and syntax-annotation, and a list of
@@ -869,8 +865,8 @@
in
parse Parse.fixes input
end"
-"([(foo, SOME \"\\^E\\^Ftoken\\^Eint \<Rightarrow> bool\\^E\\^F\\^E\", NoSyn),
- (bar, SOME \"\\^E\\^Ftoken\\^Enat\\^E\\^F\\^E\", Mixfix (\"BAR\", [], 100)),
+"([(foo, SOME \<dots>, NoSyn),
+ (bar, SOME \<dots>, Mixfix (\"BAR\", [], 100)),
(blonk, NONE, NoSyn)],[])"}
*}
@@ -897,8 +893,8 @@
val input = filtered_input \"foo_lemma[intro,dest!]:\"
val ((name, attrib), _) = parse (Parse_Spec.thm_name \":\") input
in
- (name, map Args.dest_src attrib)
-end" "(foo_lemma, [((\"intro\", []), \<dots>), ((\"dest\", [\<dots>]), \<dots>)])"}
+ (name, map Args.name_of_src attrib)
+end" "(foo_lemma, [(\"intro\", \<dots>), (\"dest\", \<dots>)])"}
The function @{ML_ind opt_thm_name in Parse_Spec} is the ``optional'' variant of
@{ML_ind thm_name in Parse_Spec}. Theorem names can contain attributes. The name
@@ -1133,9 +1129,9 @@
fun after_qed thm_name thms lthy =
Local_Theory.note (thm_name, (flat thms)) lthy |> snd
- fun setup_proof (thm_name, (txt, pos)) lthy =
+ fun setup_proof (thm_name, {text, ...}) lthy =
let
- val trm = Code_Runtime.value lthy result_cookie ("", txt)
+ val trm = Code_Runtime.value lthy result_cookie ("", text)
in
Proof.theorem NONE (after_qed thm_name) [[(trm, [])]] lthy
end
--- a/ProgTutorial/Recipes/Antiquotes.thy Sun Dec 15 23:49:05 2013 +0000
+++ b/ProgTutorial/Recipes/Antiquotes.thy Thu Mar 13 17:16:49 2014 +0000
@@ -35,7 +35,7 @@
The code is checked by sending the ML-expression @{text [quotes] "val _ =
a_piece_of_code"} to the ML-compiler (i.e.~the function @{ML
- "ML_Context.eval_text_in"} in Line 4 below). The complete code of the
+ "ML_Context.eval_source_in"} in Line 7 below). The complete code of the
document antiquotation is as follows:
*}
@@ -43,8 +43,12 @@
ML%linenosgray{*fun ml_val code_txt = "val _ = " ^ code_txt
fun output_ml {context = ctxt, ...} code_txt =
- (ML_Context.eval_text_in (SOME ctxt) false Position.none (ml_val code_txt);
+let
+ val srcpos = {delimited = false, text = (ml_val code_txt), pos = Position.none}
+in
+ (ML_Context.eval_source_in (SOME ctxt) false srcpos;
Thy_Output.output ctxt (map Pretty.str (space_explode "\n" code_txt)))
+end
val ml_checked_setup = Thy_Output.antiquotation @{binding "ML_checked"} (Scan.lift Args.name) output_ml*}
@@ -76,8 +80,12 @@
*}
ML%linenosgray{*fun output_ml {context = ctxt, ...} (code_txt, pos) =
- (ML_Context.eval_text_in (SOME ctxt) false pos (ml_val code_txt);
+let
+ val srcpos = {delimited = false, pos = pos, text = ml_val code_txt}
+in
+ (ML_Context.eval_source_in (SOME ctxt) false srcpos;
Thy_Output.output ctxt (map Pretty.str (space_explode "\n" code_txt)))
+end
val ml_checked_setup2 = Thy_Output.antiquotation @{binding "ML_checked2"}
(Scan.lift (Parse.position Args.name)) output_ml *}
@@ -132,7 +140,11 @@
*}
ML %linenosgray{*fun output_ml_resp {context = ctxt, ...} ((code_txt, pat), pos) =
- (ML_Context.eval_text_in (SOME ctxt) false pos (ml_pat (code_txt, pat));
+ (let
+ val srcpos = {delimited = false, text = ml_pat (code_txt, pat), pos = pos}
+ in
+ ML_Context.eval_source_in (SOME ctxt) false srcpos
+ end;
let
val code_output = space_explode "\n" code_txt
val resp_output = add_resp (space_explode "\n" pat)
@@ -140,6 +152,7 @@
Thy_Output.output ctxt (map Pretty.str (code_output @ resp_output))
end)
+
val ml_resp_setup = Thy_Output.antiquotation @{binding "ML_resp"}
(Scan.lift (Parse.position (Args.name -- Args.name)))
output_ml_resp*}
--- a/ProgTutorial/Recipes/CallML.thy Sun Dec 15 23:49:05 2013 +0000
+++ /dev/null Thu Jan 01 00:00:00 1970 +0000
@@ -1,145 +0,0 @@
-(*<*)
-theory CallML
-imports "~~/src/HOL/Number_Theory/Primes"
- "~~/src/HOL/Library/Code_Target_Numeral"
-begin
-(*>*)
-
-section {* Calling ML Functions from within HOL \label{rec:callml} *}
-
-text{*
- {\bf Problem:}
- How to call ML functions from within HOL?\smallskip
-
- {\bf Solution:} This can be achieved with \isacommand{code\_const}
- and \isacommand{code\_reflect}.\smallskip
-
-
- To make it clear we mean here calling unverified ML functions from within
- HOL! The motivation is the paradigm of \emph{result checking}: rather than
- verifying some complicated algorithm, have the algorithm produce an easily
- checkable certificate. For example, instead of verifying an algorithm for
- testing non-primality, have an algorithm that produces a factor as a witness to
- non-primality.
-
- The algorithm is an ML function finding a factor of a number. We first
- declare its type:
-*}
-
-consts factor :: "nat \<Rightarrow> nat"
-
-text{*
- Its definition will be given below in ML. But the whole point is that
- we can prove non-primality via @{const factor}, no matter what its
- actual definition is:
-*}
-
-lemma factor_non_prime:
- "let k = factor n in k \<noteq> 1 \<and> k \<noteq> n \<and> k dvd n \<Longrightarrow> \<not> prime n"
- by (auto simp: prime_nat_def Let_def)
-
-text{*
- Note that the premise is executable once we have defined
- @{const factor}. Here is a trivial definition in ML:
-*}
-
-ML %grayML{*fun factor n = if n = 4 then 2 else 1*}
-
-text{*
- Of course this trivial definition of @{const factor} could have been given
- directly in HOL rather than ML. But by going to the ML level, all of ML is
- at our disposal, including arrays and references, features that are less
- easily emulated in HOL. In fact, we could even call some external software
- from ML, e.g.\ a computer algebra system.
-
- It should be noted, however, that in this example you need to import the
- theory @{theory Code_Target_Numeral} in order to force the HOL-type @{typ nat} to
- be implemented by the ML-type @{text "int"}.
-
- \begin{graybox}\small
- \isacommand{theory}~@{text CallML}\\
- \isacommand{imports}~@{text Main} @{text [quotes] "~~/src/HOL/Library/Code_Target_Numeral"}\\
- ...
- \end{graybox}
-
-
- Thus the ML implementation of
- @{const factor} must be and is of type @{text "int -> int"}. Now it is time
- to connect the two levels:
-*}
-
-definition factor_integer :: "integer \<Rightarrow> integer"
-where
- [code del]: "factor_integer = integer_of_nat \<circ> factor \<circ> nat_of_integer"
-
-lemma [code]:
- "factor = nat_of_integer \<circ> factor_integer \<circ> integer_of_nat"
- by (simp add: factor_integer_def fun_eq_iff)
-
-code_const factor_integer (SML "factor")
-code_reserved SML factor
-
-text{*
- The result of this declaration is that the HOL-function @{const factor}
- is executable and command
-*}
-
-value "factor 4"
-
-text{*
- yields the expected result @{text 2}. Similarly we can prove that
- @{text 4} is not prime:
-*}
-
-lemma "\<not> prime (4::nat)"
- apply(rule factor_non_prime)
- apply eval
- done
-
-text{*
- Note, however, the command \isacommand{code\_const} cannot check that the ML function
- has the required type. Therefore in the worst case a type mismatch will be detected by
- the ML-compiler when we try to evaluate an expression involving @{const
- factor}. It could also happen that @{const factor} is (accidentally)
- redefined on the ML level later on. But remember that we do not assume
- anything about @{const factor} on the HOL-level. Hence no definition of
- @{const factor} can ever lead to an incorrect proof. Of course ``wrong''
- definitions can lead to compile time or run time exceptions, or to failed
- proofs.
-
- The above example was easy because we forced Isabelle (via the inclusion of the
- theory @{theory
- Code_Target_Numeral}) to implement @{typ nat} by @{text int}, a predefined
- ML-type. By default, Isabelle implements, for example, the HOL-type
- @{text list} by the corresponding ML-type. Thus the following variation
- on @{const factor} also works:
-*}
-
-consts factor2 :: "nat \<Rightarrow> nat list" (*<*)(*>*)
-ML %grayML{*fun factor2 n = if n = 4 then [2] else []*}(*<*)(*>*)
-
-definition factor2_integer :: "integer \<Rightarrow> integer list"
-where
- [code del]: "factor2_integer = map integer_of_nat \<circ> factor2 \<circ> nat_of_integer"
-
-lemma [code]:
- "factor2 = map nat_of_integer \<circ> factor2_integer \<circ> integer_of_nat"
- by (simp add: factor2_integer_def fun_eq_iff comp_def)
-
-code_const factor2_integer (SML "factor2")
-code_reserved SML factor2
-
-text{*
- The first line declares the type of @{const factor2}; the second
- gives its implementation in ML; the third makes it executable
- in HOL, and the last is just a test. In this way, you can easily
- interface with ML-functions whose types involve
- @{text bool}, @{text int}, @{text list}, @{text option} and pairs,
- only. If you have arbitrary tuples, for example, then you have to code
- them as nested pairs.
-*}
-
-value "factor2 4"
-
-end
-(*>*)
--- a/ProgTutorial/Recipes/Sat.thy Sun Dec 15 23:49:05 2013 +0000
+++ b/ProgTutorial/Recipes/Sat.thy Thu Mar 13 17:16:49 2014 +0000
@@ -97,15 +97,15 @@
@{text [quotes] "dpll"}.
There are also two tactics that make use of SAT solvers. One
- is the tactic @{ML sat_tac in sat}. For example
+ is the tactic @{ML sat_tac in SAT}. For example
*}
lemma "True"
-apply(tactic {* sat.sat_tac @{context} 1 *})
+apply(tactic {* SAT.sat_tac @{context} 1 *})
done
text {*
- However, for proving anything more exciting using @{ML "sat_tac" in sat} you
+ However, for proving anything more exciting using @{ML "sat_tac" in SAT} you
have to use a SAT solver that can produce a proof. The internal
one is not usuable for this.
@@ -113,7 +113,7 @@
The interface for the external SAT solvers is implemented
in @{ML_file "HOL/Tools/sat_solver.ML"}. This file contains also a simple
SAT solver based on the DPLL algorithm. The tactics for SAT solvers are
- implemented in @{ML_file "HOL/Tools/sat_funcs.ML"}. Functions concerning
+ implemented in @{ML_file "HOL/Tools/sat.ML"}. Functions concerning
propositional formulas are implemented in @{ML_file
"HOL/Tools/prop_logic.ML"}. The tables used in the translation function are
implemented in @{ML_file "Pure/General/table.ML"}.
--- a/ProgTutorial/antiquote_setup.ML Sun Dec 15 23:49:05 2013 +0000
+++ b/ProgTutorial/antiquote_setup.ML Thu Mar 13 17:16:49 2014 +0000
@@ -33,7 +33,8 @@
(* eval function *)
fun eval_fn ctxt exp =
- ML_Context.eval_text_in (SOME ctxt) false Position.none exp
+ ML_Context.eval_source_in (SOME ctxt) false
+ {delimited = false, text = exp, pos = Position.none}
(* checks and prints a possibly open expressions, no index *)
fun output_ml {context = ctxt, ...} (txt, (vs, stru)) =
--- a/ROOT Sun Dec 15 23:49:05 2013 +0000
+++ b/ROOT Thu Mar 13 17:16:49 2014 +0000
@@ -23,7 +23,6 @@
"Recipes/Antiquotes"
"Recipes/TimeLimit"
"Recipes/Timing"
- "Recipes/CallML"
"Recipes/ExternalSolver"
"Recipes/Oracle"
"Recipes/Sat"
@@ -56,7 +55,6 @@
"Recipes/Antiquotes"
"Recipes/TimeLimit"
"Recipes/Timing"
- "Recipes/CallML"
"Recipes/ExternalSolver"
"Recipes/Oracle"
"Recipes/Sat"
Binary file progtutorial.pdf has changed