# HG changeset patch # User Christian Urban # Date 1319631549 -3600 # Node ID 141751cab5b2515e296f655222facc4456ca0b44 # Parent 0fb910f62bf9db6050bd75ac7ead2c57bbcdd4e0# Parent 683fb6e468b1f5adca366cbf40b534b388769394 merged diff -r 683fb6e468b1 -r 141751cab5b2 ProgTutorial/Advanced.thy --- a/ProgTutorial/Advanced.thy Wed Oct 19 21:57:22 2011 +0100 +++ b/ProgTutorial/Advanced.thy Wed Oct 26 13:19:09 2011 +0100 @@ -127,8 +127,8 @@ section {* Contexts (TBD) *} -ML{*ProofContext.debug*} -ML{*ProofContext.verbose*} +ML{*Proof_Context.debug*} +ML{*Proof_Context.verbose*} section {* Local Theories (TBD) *} diff -r 683fb6e468b1 -r 141751cab5b2 ProgTutorial/Appendix.thy --- a/ProgTutorial/Appendix.thy Wed Oct 19 21:57:22 2011 +0100 +++ b/ProgTutorial/Appendix.thy Wed Oct 26 13:19:09 2011 +0100 @@ -21,7 +21,7 @@ \begin{itemize} \item translations/print translations; - @{ML "ProofContext.print_syntax"} + @{ML "Proof_Context.print_syntax"} \item user space type systems (in the form that already exists) diff -r 683fb6e468b1 -r 141751cab5b2 ProgTutorial/Base.thy --- a/ProgTutorial/Base.thy Wed Oct 19 21:57:22 2011 +0100 +++ b/ProgTutorial/Base.thy Wed Oct 26 13:19:09 2011 +0100 @@ -54,7 +54,7 @@ let val pre = implode ["\n", "local_setup ", "{", "*", "\n"] val post = implode ["\n", "*", "}", "\n"] - val _ = write_file (enclose pre post txt) (ProofContext.theory_of lthy) + val _ = write_file (enclose pre post txt) (Proof_Context.theory_of lthy) in lthy end diff -r 683fb6e468b1 -r 141751cab5b2 ProgTutorial/First_Steps.thy --- a/ProgTutorial/First_Steps.thy Wed Oct 19 21:57:22 2011 +0100 +++ b/ProgTutorial/First_Steps.thy Wed Oct 26 13:19:09 2011 +0100 @@ -18,7 +18,7 @@ ``We will most likely never realize the full importance of painting the Tower,\\ that it is the essential element in the conservation of metal works and the\\ more meticulous the paint job, the longer the tower shall endure.''} \\[1ex] - Gustave Eiffel, In his book {\em The 300-Meter Tower}.\footnote{The Eiffel Tower has been + Gustave Eiffel, in his book {\em The 300-Meter Tower}.\footnote{The Eiffel Tower has been re-painted 18 times since its initial construction, an average of once every seven years. It takes more than one year for a team of 25 painters to paint the tower from top to bottom.} @@ -284,7 +284,7 @@ Pretty.block (Pretty.commas (map (pretty_thm_no_vars ctxt) thms))*} text {* - Printing functions for types are + Printing functions for @{ML_type typ} are *} ML{*fun pretty_typ ctxt ty = Syntax.pretty_typ ctxt ty @@ -292,7 +292,7 @@ Pretty.block (Pretty.commas (map (pretty_typ ctxt) tys))*} text {* - respectively ctypes + respectively @{ML_type ctyp} *} ML{*fun pretty_ctyp ctxt cty = pretty_typ ctxt (typ_of cty) @@ -753,11 +753,11 @@ Another important antiquotation is @{text "@{context}"}. (What the difference between a theory and a context is will be described in Chapter \ref{chp:advanced}.) A context is for example needed in order to use the - function @{ML print_abbrevs in ProofContext} that list of all currently + function @{ML print_abbrevs in Proof_Context} that list of all currently defined abbreviations. @{ML_response_fake [display, gray] - "ProofContext.print_abbrevs @{context}" + "Proof_Context.print_abbrevs @{context}" "Code_Evaluation.valtermify \ \x. (x, \u. Code_Evaluation.termify x) INTER \ INFI Inter \ Inf @@ -846,7 +846,7 @@ val parser = Args.context -- Scan.lift Args.name_source fun term_pat (ctxt, str) = - str |> ProofContext.read_term_pattern ctxt + str |> Proof_Context.read_term_pattern ctxt |> ML_Syntax.print_term |> ML_Syntax.atomic in @@ -862,7 +862,7 @@ text {* The parser in Line 2 provides us with a context and a string; this string is transformed into a term using the function @{ML_ind read_term_pattern in - ProofContext} (Line 5); the next two lines transform the term into a string + Proof_Context} (Line 5); the next two lines transform the term into a string so that the ML-system can understand it. (All these functions will be explained in more detail in later sections.) An example for this antiquotation is: diff -r 683fb6e468b1 -r 141751cab5b2 ProgTutorial/Package/Ind_Code.thy --- a/ProgTutorial/Package/Ind_Code.thy Wed Oct 19 21:57:22 2011 +0100 +++ b/ProgTutorial/Package/Ind_Code.thy Wed Oct 26 13:19:09 2011 +0100 @@ -136,7 +136,7 @@ ML %linenosgray{*fun defns rules preds prednames mxs arg_typss lthy = let - val thy = ProofContext.theory_of lthy + val thy = Proof_Context.theory_of lthy val orules = map (Object_Logic.atomize_term thy) rules val defs = map (defn_aux lthy orules preds) (preds ~~ arg_typss) in @@ -148,7 +148,7 @@ meta-quanti\-fications. In Line 4, we transform these introduction rules into the object logic (since definitions cannot be stated with meta-connectives). To do this transformation we have to obtain the theory - behind the local theory using the function @{ML_ind theory_of in ProofContext} + behind the local theory using the function @{ML_ind theory_of in Proof_Context} (Line 3); with this theory we can use the function @{ML_ind atomize_term in Object_Logic} to make the transformation (Line 4). The call to @{ML defn_aux} in Line 5 produces all right-hand sides of the @@ -341,7 +341,7 @@ in Goal.prove lthy' [] [prem] goal (fn {prems, ...} => ind_tac defs prems cnewpreds) - |> singleton (ProofContext.export lthy' lthy) + |> singleton (Proof_Context.export lthy' lthy) end *} text {* @@ -405,7 +405,7 @@ val Ps = replicate (length preds) "P" val (newprednames, lthy') = Variable.variant_fixes Ps lthy - val thy = ProofContext.theory_of lthy' + val thy = Proof_Context.theory_of lthy' val tyss' = map (fn tys => tys ---> HOLogic.boolT) arg_tyss val newpreds = map Free (newprednames ~~ tyss') @@ -415,7 +415,7 @@ in map (prove_ind lthy' defs srules cnewpreds) (preds ~~ newpreds ~~ arg_tyss) - |> ProofContext.export lthy' lthy + |> Proof_Context.export lthy' lthy end*} text {* diff -r 683fb6e468b1 -r 141751cab5b2 ProgTutorial/Package/simple_inductive_package.ML --- a/ProgTutorial/Package/simple_inductive_package.ML Wed Oct 19 21:57:22 2011 +0100 +++ b/ProgTutorial/Package/simple_inductive_package.ML Wed Oct 26 13:19:09 2011 +0100 @@ -49,7 +49,7 @@ (* @chunk definitions *) fun definitions rules params preds prednames syns arg_typess lthy = let - val thy = ProofContext.theory_of lthy + val thy = Proof_Context.theory_of lthy val orules = map (Object_Logic.atomize_term thy) rules val defs = map (defs_aux lthy orules preds params) (preds ~~ arg_typess) @@ -78,7 +78,7 @@ val Ps = replicate (length preds) "P" val (newprednames, lthy3) = Variable.variant_fixes Ps lthy2 - val thy = ProofContext.theory_of lthy3 + val thy = Proof_Context.theory_of lthy3 val Tss' = map (fn Ts => Ts ---> HOLogic.boolT) Tss val newpreds = map Free (newprednames ~~ Tss') @@ -97,7 +97,7 @@ in Goal.prove lthy4 [] [prem] goal (fn {prems, ...} => induction_tac defs prems cnewpreds) - |> singleton (ProofContext.export lthy4 lthy1) + |> singleton (Proof_Context.export lthy4 lthy1) end in map prove_induction (preds ~~ newpreds ~~ Tss) @@ -152,7 +152,7 @@ fun prove_intro (i, goal) = Goal.prove lthy2 [] [] goal (fn {context, ...} => introductions_tac defs rules preds i context) - |> singleton (ProofContext.export lthy2 lthy1) + |> singleton (Proof_Context.export lthy2 lthy1) in map_index prove_intro rules end diff -r 683fb6e468b1 -r 141751cab5b2 ProgTutorial/Tactical.thy --- a/ProgTutorial/Tactical.thy Wed Oct 19 21:57:22 2011 +0100 +++ b/ProgTutorial/Tactical.thy Wed Oct 26 13:19:09 2011 +0100 @@ -13,6 +13,13 @@ chapter {* Tactical Reasoning\label{chp:tactical} *} text {* + \begin{flushright} + {\em ``The most effective debugging tool is still careful thought,\\ + coupled with judiciously placed print statements.''} \\[1ex] + Brian Kernighan, in {\em Unix for Beginners}, 1979 + \end{flushright} + + \medskip One of the main reason for descending to the ML-level of Isabelle is to be able to implement automatic proof procedures. Such proof procedures can considerably lessen the burden of manual reasoning. They are centred around @@ -1581,7 +1588,7 @@ \begin{figure}[p] \begin{boxedminipage}{\textwidth} \begin{isabelle} *} -types prm = "(nat \ nat) list" +type_synonym prm = "(nat \ nat) list" consts perm :: "prm \ 'a \ 'a" ("_ \ _" [80,80] 80) overloading @@ -2472,7 +2479,7 @@ val xs = strip_abs_vars (term_of rhs) val (_, ctxt') = Variable.add_fixes (map fst xs) ctxt - val thy = ProofContext.theory_of ctxt' + val thy = Proof_Context.theory_of ctxt' val cxs = map (cterm_of thy o Free) xs val new_lhs = Drule.list_comb (lhs, cxs) @@ -2480,7 +2487,7 @@ | get_conv (_::xs) = Conv.fun_conv (get_conv xs) in get_conv xs new_lhs |> - singleton (ProofContext.export ctxt' ctxt) + singleton (Proof_Context.export ctxt' ctxt) end*} text {* diff -r 683fb6e468b1 -r 141751cab5b2 ProgTutorial/antiquote_setup.ML --- a/ProgTutorial/antiquote_setup.ML Wed Oct 19 21:57:22 2011 +0100 +++ b/ProgTutorial/antiquote_setup.ML Wed Oct 26 13:19:09 2011 +0100 @@ -81,13 +81,13 @@ (* checks and expression agains a result pattern *) fun output_response {context = ctxt, ...} (lhs, pat) = (eval_fn ctxt (ml_pat (lhs, pat)); - write_file_ml_blk lhs (ProofContext.theory_of ctxt); + write_file_ml_blk lhs (Proof_Context.theory_of ctxt); output ctxt ((prefix_lines "" lhs) @ (prefix_lines "> " pat))) (* checks the expressions, but does not check it against a result pattern *) fun output_response_fake {context = ctxt, ...} (lhs, pat) = (eval_fn ctxt (ml_val [] NONE lhs); - write_file_ml_blk lhs (ProofContext.theory_of ctxt); + write_file_ml_blk lhs (Proof_Context.theory_of ctxt); output ctxt ((split_lines lhs) @ (prefix_lines "> " pat))) (* checks the expressions, but does not check it against a result pattern *) diff -r 683fb6e468b1 -r 141751cab5b2 progtutorial.pdf Binary file progtutorial.pdf has changed