--- a/ProgTutorial/Base.thy Sun Aug 16 22:14:36 2009 +0200
+++ b/ProgTutorial/Base.thy Mon Aug 17 20:57:32 2009 +0200
@@ -10,6 +10,43 @@
(* to have a special tag for text enclosed in ML *)
ML {*
+(* FIXME ref *)
+val file_name = ref (NONE : string option)
+
+fun write_file txt =
+ case !file_name of
+ NONE => () (* error "No open file" *)
+ | SOME name =>
+ (let
+ val stream = TextIO.openAppend name
+ in
+ TextIO.output (stream, txt);
+ TextIO.flushOut stream;
+ TextIO.closeOut stream
+ end)
+*}
+
+ML {*
+fun write_file_blk txt =
+let
+ val pre = implode ["\n", "ML ", "{", "*", "\n"]
+ val post = implode ["\n", "*", "}", "\n"]
+in
+ write_file (enclose pre post txt)
+end
+*}
+
+ML {*
+fun open_file name =
+ (tracing ("Opened File: " ^ name);
+ file_name := SOME name)
+
+fun open_file_prelude name txt =
+ (open_file name; write_file (txt ^ "\n"))
+*}
+
+
+ML {*
fun propagate_env (context as Context.Proof lthy) =
Context.Proof (LocalTheory.map_contexts (ML_Env.inherit context) lthy)
@@ -17,21 +54,27 @@
fun propagate_env_prf prf = Proof.map_contexts
(Context.proof_map (ML_Env.inherit (Context.Proof (Proof.context_of prf)))) prf
+*}
+ML {*
val _ =
OuterSyntax.command "ML" "eval ML text within theory"
(OuterKeyword.tag "TutorialML" OuterKeyword.thy_decl)
- (OuterParse.ML_source >> (fn (txt, pos) =>
+ (OuterParse.position OuterParse.text >> (fn (txt, pos) =>
Toplevel.generic_theory
- (ML_Context.exec (fn () => ML_Context.eval true pos txt) #> propagate_env)))
+ (ML_Context.exec (fn () => (write_file_blk txt; ML_Context.eval true pos txt)) #> propagate_env)))
+*}
+ML {*
val _ =
OuterSyntax.command "ML_prf" "ML text within proof"
(OuterKeyword.tag "TutorialML" OuterKeyword.prf_decl)
(OuterParse.ML_source >> (fn (txt, pos) =>
Toplevel.proof (Proof.map_context (Context.proof_map
(ML_Context.exec (fn () => ML_Context.eval true pos txt))) #> propagate_env_prf)))
+*}
+ML {*
val _ =
OuterSyntax.command "ML_val" "diagnostic ML text"
(OuterKeyword.tag "TutorialML" OuterKeyword.diag)
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/ProgTutorial/FirstSteps.ML Mon Aug 17 20:57:32 2009 +0200
@@ -0,0 +1,5 @@
+(* *)
+
+open_file_prelude
+"FirstSteps_Code.thy"
+(cat_lines ["theory FirstSteps", "imports Base", "begin"])
\ No newline at end of file
--- a/ProgTutorial/FirstSteps.thy Sun Aug 16 22:14:36 2009 +0200
+++ b/ProgTutorial/FirstSteps.thy Mon Aug 17 20:57:32 2009 +0200
@@ -1,7 +1,10 @@
theory FirstSteps
imports Base
+uses "FirstSteps.ML"
begin
+
+
chapter {* First Steps *}
text {*
@@ -165,7 +168,7 @@
will cause that all tracing information is printed into the file @{text "foo.bar"}.
You can print out error messages with the function @{ML [index] error}; for
- example:\footnote{FIXME: unwanted pagebreak}
+ example:
@{ML_response_fake [display,gray] "if 0=1 then true else (error \"foo\")"
"Exception- ERROR \"foo\" raised
@@ -194,46 +197,60 @@
*)
text {*
- Most often you want to inspect data of Isabelle's most basic datastructures,
- namely @{ML_type term}, @{ML_type cterm} or @{ML_type thm}. Isabelle
- contains elaborate pretty-printing functions for printing them (see Section
- \ref{sec:pretty}), but for quick-and-dirty solutions they are far too
- unwieldy. One way to transform a term into a string is to use the function
- @{ML [index] string_of_term in Syntax}.
-
+ Most often you want to inspect data of Isabelle's most basic data
+ structures, namely @{ML_type term}, @{ML_type cterm} or @{ML_type
+ thm}. Isabelle contains elaborate pretty-printing functions for printing
+ them (see Section \ref{sec:pretty}), but for quick-and-dirty solutions they
+ are a bit unwieldy. One way to transform a term into a string is to use the
+ function @{ML [index] string_of_term in Syntax} in structure @{ML_struct
+ Syntax}, which we bind for more convenience to the toplevel.
+*}
+
+ML{*val string_of_term = Syntax.string_of_term*}
+
+text {*
+ It can now be used as follows
@{ML_response_fake [display,gray]
- "Syntax.string_of_term @{context} @{term \"1::nat\"}"
+ "string_of_term @{context} @{term \"1::nat\"}"
"\"\\^E\\^Fterm\\^E\\^E\\^Fconst\\^Fname=HOL.one_class.one\\^E1\\^E\\^F\\^E\\^E\\^F\\^E\""}
- This produces a string with some additional information encoded in it. The string
- can be properly printed by using either the function @{ML [index] writeln} or
- @{ML [index] tracing}:
+ This produces a string corrsponding to the term @{term [show_types] "1::nat"} with some
+ additional information encoded in it. The string can be properly printed by
+ using either the function @{ML [index] writeln} or @{ML [index] tracing}:
@{ML_response_fake [display,gray]
- "writeln (Syntax.string_of_term @{context} @{term \"1::nat\"})"
+ "writeln (string_of_term @{context} @{term \"1::nat\"})"
"\"1\""}
or
@{ML_response_fake [display,gray]
- "tracing (Syntax.string_of_term @{context} @{term \"1::nat\"})"
+ "tracing (string_of_term @{context} @{term \"1::nat\"})"
"\"1\""}
+ If there are more than one @{ML_type term}s to be printed, you can use the
+ function @{ML [index] commas} to separate them.
+*}
+
+ML{*fun string_of_terms ctxt ts =
+ commas (map (string_of_term ctxt) ts)*}
+
+text {*
A @{ML_type cterm} can be transformed into a string by the following function.
*}
ML{*fun string_of_cterm ctxt t =
- Syntax.string_of_term ctxt (term_of t)*}
+ string_of_term ctxt (term_of t)*}
text {*
In this example the function @{ML [index] term_of} extracts the @{ML_type term} from
- a @{ML_type cterm}. If there are more than one @{ML_type cterm}s to be
- printed, you can use the function @{ML [index] commas} to separate them.
+ a @{ML_type cterm}. More than one @{ML_type cterm}s can again be printed
+ with @{ML [index] commas}.
*}
-ML{*fun string_of_cterms ctxt ts =
- commas (map (string_of_cterm ctxt) ts)*}
+ML{*fun string_of_cterms ctxt cts =
+ commas (map (string_of_cterm ctxt) cts)*}
text {*
The easiest way to get the string of a theorem is to transform it
@@ -251,10 +268,10 @@
"tracing (string_of_thm @{context} @{thm conjI})"
"\<lbrakk>?P; ?Q\<rbrakk> \<Longrightarrow> ?P \<and> ?Q"}
- In order to improve the readability of theorems we convert
- these schematic variables into free variables using the
- function @{ML [index] import in Variable}. This is similar
- to the attribute @{text "[no_vars]"} from Isabelle's user-level.
+ In order to improve the readability of theorems we convert these schematic
+ variables into free variables using the function @{ML [index] import in
+ Variable}. This is similar to the theorem attribute @{text "[no_vars]"} from
+ Isabelle's user-level.
*}
ML{*fun no_vars ctxt thm =
@@ -409,7 +426,7 @@
val ctxt = @{context}
in
apply_fresh_args t ctxt
- |> Syntax.string_of_term ctxt
+ |> string_of_term ctxt
|> tracing
end"
"P z za zb"}
@@ -434,7 +451,7 @@
val ctxt = @{context}
in
apply_fresh_args t ctxt
- |> Syntax.string_of_term ctxt
+ |> string_of_term ctxt
|> tracing
end"
"za z zb"}
@@ -599,20 +616,59 @@
(fn x => (x, x))
#-> (fn x => fn y => x + y)*}
+
+text {*
+ When using combinators for writing function in waterfall fashion, it is
+ sometimes necessary to do some ``plumbing'' for fitting functions
+ together. We have already seen such plumbing in the function @{ML
+ apply_fresh_args}, where @{ML curry} is needed for making the function @{ML
+ list_comb} that works over pairs to fit with the combinator @{ML "|>"}. Such
+ plumbing is also needed in situations where a functions operate over lists,
+ but one calculates only with a single entity. An example is the function
+ @{ML [index] check_terms in Syntax}, whose purpose is to type-check a list
+ of terms.
+
+ @{ML_response_fake [display, gray]
+ "let
+ val ctxt = @{context}
+in
+ map (Syntax.parse_term ctxt) [\"m + n\", \"m - (n::nat)\"]
+ |> Syntax.check_terms ctxt
+ |> string_of_terms ctxt
+ |> tracing
+end"
+ "m + n, m - n"}
+*}
+
text {*
-
- (FIXME: find a good exercise for combinators)
-
+ In this example we obtain two terms with appropriate typing. However, if you
+ have only a single term, then @{ML check_terms in Syntax} needs to be
+ adapted. This can be done with the ``plumbing'' function @{ML
+ singleton}.\footnote{There is in fact alread a function @{ML check_term in Syntax},
+ which however is defined in terms of @{ML singleton} and @{ML check_terms in
+ Syntax}.} For example
+
+ @{ML_response_fake [display, gray]
+ "let
+ val ctxt = @{context}
+in
+ Syntax.parse_term ctxt \"m - (n::nat)\"
+ |> singleton (Syntax.check_terms ctxt)
+ |> string_of_term ctxt
+ |> tracing
+end"
+ "m - n"}
+
\begin{readmore}
The most frequently used combinators are defined in the files @{ML_file
"Pure/library.ML"}
and @{ML_file "Pure/General/basics.ML"}. Also \isccite{sec:ML-linear-trans}
contains further information about combinators.
\end{readmore}
-
- (FIXME: say something about calling conventions)
-
- (FIXME: say something about singleton)
+
+ (FIXME: find a good exercise for combinators)
+
+ (FIXME: say something about calling conventions)
*}
@@ -831,13 +887,10 @@
val v2 = Var ((\"x1\", 3), @{typ bool})
val v3 = Free (\"x\", @{typ bool})
in
- map (Syntax.string_of_term @{context}) [v1, v2, v3]
- |> cat_lines
+ string_of_terms @{context} [v1, v2, v3]
|> tracing
end"
-"?x3
-?x1.3
-x"}
+"?x3, ?x1.3, x"}
When constructing terms, you are usually concerned with free variables (as mentioned earlier,
you cannot construct schematic variables using the antiquotation @{text "@{term \<dots>}"}).
@@ -864,7 +917,7 @@
"let
val omega = Free (\"x\", @{typ nat}) $ Free (\"x\", @{typ nat})
in
- tracing (Syntax.string_of_term @{context} omega)
+ tracing (string_of_term @{context} omega)
end"
"x x"}
@@ -1085,7 +1138,7 @@
val (vrs, trm) = strip_alls @{term \"\<forall>x y. x = (y::bool)\"}
in
subst_bounds (rev vrs, trm)
- |> Syntax.string_of_term @{context}
+ |> string_of_term @{context}
|> tracing
end"
"x = y"}
@@ -1104,7 +1157,7 @@
"let
val eq = HOLogic.mk_eq (@{term \"True\"}, @{term \"False\"})
in
- tracing (Syntax.string_of_term @{context} eq)
+ tracing (string_of_term @{context} eq)
end"
"True = False"}
*}
@@ -1516,7 +1569,7 @@
then let val (params,t) = strip_assums_all([], nth prems (i - 1))
in subst_bounds(map Free params, t) end
else error ("Not enough premises for prem" ^ string_of_int i ^
- " in propositon: " ^ Syntax.string_of_term ctxt t)
+ " in propositon: " ^ string_of_term ctxt t)
end;
*}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/ProgTutorial/Parsing.ML Mon Aug 17 20:57:32 2009 +0200
@@ -0,0 +1,7 @@
+(* *)
+
+open_file_prelude
+"Parsing_Code.thy"
+(cat_lines ["theory Parsing",
+ "imports Base \"Package/Simple_Inductive_Package\"",
+ "begin"])
\ No newline at end of file
--- a/ProgTutorial/Parsing.thy Sun Aug 16 22:14:36 2009 +0200
+++ b/ProgTutorial/Parsing.thy Mon Aug 17 20:57:32 2009 +0200
@@ -1,5 +1,6 @@
theory Parsing
imports Base "Package/Simple_Inductive_Package"
+uses "Parsing.ML"
begin
chapter {* Parsing *}
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/ProgTutorial/Tactical.ML Mon Aug 17 20:57:32 2009 +0200
@@ -0,0 +1,7 @@
+(* *)
+
+open_file_prelude
+"Tactical_Code.thy"
+(cat_lines ["theory Tactical",
+ "imports Base FirstSteps",
+ "begin"])
\ No newline at end of file