added positions to anti-quotations; removed old antiquotation_setup; tuned the text a bit
authorChristian Urban <urbanc@in.tum.de>
Wed, 29 Oct 2008 13:58:36 +0100
changeset 43 02f76f1b6e7b
parent 42 cd612b489504
child 44 dee4b3e66dfe
added positions to anti-quotations; removed old antiquotation_setup; tuned the text a bit
CookBook/Base.thy
CookBook/FirstSteps.thy
CookBook/Intro.thy
CookBook/Parsing.thy
CookBook/Recipes/NamedThms.thy
CookBook/antiquote_setup.ML
CookBook/antiquote_setup_plus.ML
CookBook/document/root.tex
cookbook.pdf
--- a/CookBook/Base.thy	Mon Oct 27 18:48:52 2008 +0100
+++ b/CookBook/Base.thy	Wed Oct 29 13:58:36 2008 +0100
@@ -3,7 +3,6 @@
 uses
   "antiquote_setup.ML"
   "chunks.ML"
-  "antiquote_setup_plus.ML"
 begin
 
 end
--- a/CookBook/FirstSteps.thy	Mon Oct 27 18:48:52 2008 +0100
+++ b/CookBook/FirstSteps.thy	Wed Oct 29 13:58:36 2008 +0100
@@ -8,7 +8,7 @@
 text {*
 
   Isabelle programming is done in Standard ML.
-  Just like lemmas and proofs, code in Isabelle is part of a 
+  Just like lemmas and proofs, SML-code in Isabelle is part of a 
   theory. If you want to follow the code written in this chapter, we 
   assume you are working inside the theory defined by
 
@@ -65,9 +65,6 @@
   \ldots
   \end{tabular}
   \end{center}
-
-  Note that from now on we are going to drop the 
-  \isacommand{ML} \isa{\isacharverbatimopen \ldots \isacharverbatimclose}. 
   
 *}
 
@@ -88,25 +85,26 @@
 
   @{ML [display] "warning (makestring 1)"} 
 
-  However this only works if the type of what is converted is monomorphic and not 
+  However this only works if the type of what is converted is monomorphic and is not 
   a function.
 
-  The funtion warning should only be used for testing purposes, because any
+  The funtion @{ML "warning"} should only be used for testing purposes, because any
   output this funtion generates will be overwritten, as soon as an error is
   raised. Therefore for printing anything more serious and elaborate, the
   function @{ML tracing} should be used. This function writes all output into
-  a separate buffer.
+  a separate tracing buffer.
 
   @{ML [display] "tracing \"foo\""}
 
   It is also possible to redirect the channel where the @{ML_text "foo"} is 
   printed to a separate file, e.g. to prevent Proof General from choking on massive 
   amounts of trace output. This rediretion can be achieved using the code
+*}
 
-@{ML_decl [display]
-"val strip_specials =
+ML {*
+val strip_specials =
 let
-  fun strip (\"\\^A\" :: _ :: cs) = strip cs
+  fun strip ("\^A" :: _ :: cs) = strip cs
     | strip (c :: cs) = c :: strip cs
     | strip [] = [];
 in implode o strip o explode end;
@@ -114,10 +112,13 @@
 fun redirect_tracing stream =
  Output.tracing_fn := (fn s =>
     (TextIO.output (stream, (strip_specials s));
-     TextIO.output (stream, \"\\n\");
-     TextIO.flushOut stream));"}
+     TextIO.output (stream, "\n");
+     TextIO.flushOut stream));
+*}
 
-  Calling @{ML_text "redirect_tracing"} with @{ML "(TextIO.openOut \"foo.bar\")"} 
+text {*
+
+  Calling @{ML "redirect_tracing"} with @{ML "(TextIO.openOut \"foo.bar\")"} 
   will cause that all tracing information is printed into the file @{ML_text "foo.bar"}.
 
 *}
@@ -141,14 +142,18 @@
 
   Note, however, that antiquotations are statically scoped, that is the value is
   determined at ``compile-time'', not ``run-time''. For example the function
+*}
 
-  @{ML_decl [display]
-  "fun not_current_thyname () = Context.theory_name @{theory}"}
+ML {*
+fun not_current_thyname () = Context.theory_name @{theory}
+*}
+
+text {*
 
   does \emph{not} return the name of the current theory, if it is run in a 
   different theory. Instead, the code above defines the constant function 
   that always returns the string @{ML_text "FirstSteps"}, no matter where the
-  function is called. Operationally speaking,  @{text "@{theory}"} is 
+  function is called. Operationally speaking,  the antiquotation @{text "@{theory}"} is 
   \emph{not} replaced with code that will look up the current theory in 
   some data structure and return it. Instead, it is literally
   replaced with the value representing the theory name.
@@ -158,7 +163,20 @@
   @{ML [display] "@{thm allI}"}
   @{ML [display] "@{simpset}"}
 
-  In the course of this introduction, we will learn more about 
+  While antiquotations have many uses, they were introduced to avoid explicit
+  bindings for theorems such as
+*}
+
+ML {*
+val allI = thm "allI"
+*}
+
+text {*
+  These bindings were difficult to maintain and also could be overwritten
+  by bindings introduced by the user. This had the potential to break
+  definitional packages. This additional layer of maintenance complexity
+  can be avoided by using antiquotations, since they are ``linked'' statically
+  at compile time.  In the course of this introduction, we will learn more about 
   these antiquotations: they greatly simplify Isabelle programming since one
   can directly access all kinds of logical elements from ML.
 
@@ -257,20 +275,28 @@
   dynamically. For example, a function that returns the implication 
   @{text "\<And>(x::\<tau>). P x \<Longrightarrow> Q x"} taking @{term P}, @{term Q} and the typ @{term "\<tau>"}
   as arguments can only be written as
+*}
 
-@{ML_decl [display]
-"fun make_imp P Q tau =
+ML {*
+fun make_imp P Q tau =
   let
-    val x = Free (\"x\",tau)
+    val x = Free ("x",tau)
   in Logic.all x (Logic.mk_implies (HOLogic.mk_Trueprop (P $ x),
                                     HOLogic.mk_Trueprop (Q $ x)))
-  end"}
+  end
+*}
+
+text {*
 
   The reason is that one cannot pass the arguments @{term P}, @{term Q} and 
   @{term "tau"} into an antiquotation. For example the following does not work.
+*}
 
-  @{ML_decl [display] "fun make_wrong_imp P Q tau = @{prop \"\<And>x. P x \<Longrightarrow> Q x\"}"} 
+ML {*
+fun make_wrong_imp P Q tau = @{prop "\<And>x. P x \<Longrightarrow> Q x"} 
+*}
 
+text {*
   To see this apply @{text "@{term S}"}, @{text "@{term T}"} and @{text "@{typ nat}"} 
   to both functions. 
 
--- a/CookBook/Intro.thy	Mon Oct 27 18:48:52 2008 +0100
+++ b/CookBook/Intro.thy	Wed Oct 29 13:58:36 2008 +0100
@@ -28,8 +28,8 @@
 
 text {*
   
-  The following documentation about Isabelle programming already exist (they are
-  included in the distribution of Isabelle):
+  The following documentation about Isabelle programming already exists (and is
+  part of the distribution of Isabelle):
 
   \begin{description}
   \item[The Implementation Manual] describes Isabelle
--- a/CookBook/Parsing.thy	Mon Oct 27 18:48:52 2008 +0100
+++ b/CookBook/Parsing.thy	Wed Oct 29 13:58:36 2008 +0100
@@ -54,8 +54,6 @@
   will raise the exception @{ML_text "FAIL"}.
   There are three exceptions used in the parsing combinators:
 
-  (FIXME: do the exception need to be explained, because the user cannot use them from ``outside''?)
-
   \begin{itemize}
   \item @{ML_text "FAIL"} is used to indicate that alternative routes of parsing 
   might be explored. 
@@ -65,11 +63,12 @@
   It is used for example in the function @{ML "(op !!)"} (see below).
   \end{itemize}
 
+  (FIXME: do the exception need to be explained, because the user cannot use them from ``outside''?)
 
   Slightly more general than the parser @{ML "(op $$)"} is the function @{ML Scan.one}, in that it 
   takes a predicate as argument and then parses exactly one item from the input list 
-  satisfying this prediate. For example the following parser either consumes an @{ML_text "h"}
-  or a @{ML_text "w"}:
+  satisfying this prediate. For example the following parser either consumes an 
+  @{ML_text [quotes] "h"} or a @{ML_text [quotes] "w"}:
 
 @{ML_response [display] 
 "let 
@@ -134,7 +133,7 @@
  "((\"h\", [\"e\", \"l\", \"l\", \"o\"]), (\"x\", [\"w\", \"o\", \"r\", \"l\", \"d\"]))"}
 
   The function @{ML "(op !!)"} helps to produce appropriate error messages
-  during parsing. For example if one wants to parse @{ML_text p} immediately 
+  during parsing. For example if one wants to parse that @{ML_text p} is immediately 
   followed by @{ML_text q}, or start a completely different parser @{ML_text r},
   one might write
 
@@ -148,7 +147,8 @@
   is present in the input, but not @{ML_text q}. That means @{ML_open "(p -- q)" for p q} will fail 
   and the 
   alternative parser @{ML_text r} will be tried. However in many circumstanes this will be the wrong
-  parser for the input ``p-followed-by-q'' and therefore will fail. The error message is then caused by the
+  parser for the input ``p-followed-by-q'' and therefore will also fail. The error message is then 
+  caused by the
   failure of @{ML_text r}, not by the absense of @{ML_text q} in the input. This kind of situation
   can be avoided by using the funtion @{ML "(op !!)"}. This function aborts the whole process of
   parsing in case of failure and invokes an error message. For example if we invoke the parser
@@ -178,7 +178,7 @@
   
   Returning to our example of parsing @{ML_open "(p -- q) || r" for p q r}. If we want
   to generate the correct error message for p-followed-by-q, then
-  we have to write
+  we have to write, for example
 *}
 
 ML {* 
@@ -239,7 +239,7 @@
 "Scan.lift (($$ \"h\") -- ($$ \"e\")) (1,(explode \"hello\"))"
 "((\"h\", \"e\"), (1, [\"l\", \"l\", \"o\"]))"}
 
-  (FIXME: In which situations is this useful?) 
+  (FIXME: In which situations is this useful? Give examples.) 
 *}
 
 section {* Parsing Theory Syntax *}
@@ -247,7 +247,7 @@
 text {*
   Most of the time, however, Isabelle developers have to deal with parsing tokens, not strings.
   This is because the parsers for the theory syntax, as well as the parsers for the 
-  argument syntax of proof methods and attributes, use the type 
+  argument syntax of proof methods and attributes use the type 
   @{ML_type OuterParse.token} (which is identical to the type @{ML_type OuterLex.token}).
 
   \begin{readmore}
@@ -257,10 +257,6 @@
   \end{readmore}
 *}
 
-ML {* OuterLex.mk_text "hello" *}
-
-ML {* print_depth 50 *}
-
 ML {* 
   let open OuterLex in 
   OuterSyntax.scan Position.none "for" 
@@ -268,8 +264,6 @@
 
 *}
 
-ML {* map OuterLex.mk_text (explode "hello") *} 
-
 ML {*
 
 fun my_scan lex pos str =
@@ -282,21 +276,10 @@
 *}
 
 ML {*
-let val toks = my_scan (["hello"], []) Position.none "hello"
+let val toks = my_scan (["hello"], []) Position.none "hello foo bar"
 in (OuterParse.$$$ "hello") toks end
 *}
 
-text {* 
-
-  @{ML_response_fake [display] "map OuterLex.mk_text (explode \"hello\")"  
-       "[Token (\<dots>), Token (\<dots>), Token (\<dots>), Token (\<dots>), Token (\<dots>)]"} 
-
-*}
-
-ML {*
-  OuterLex.mk_text "hello"
-*}
-
 ML {*
 
   let val input = [OuterLex.mk_text "hello", OuterLex.mk_text "world"];
@@ -304,7 +287,11 @@
 
 *}
 
-
+text {*
+  explain @{ML "OuterParse.enum1"}, @{ML "OuterParse.prop"} 
+   @{ML "OuterParse.opt_target"}, @{ML "OuterParse.fixes"} 
+   @{ML "OuterParse.!!!"}, @{ML "OuterParse.for_fixes"} 
+*}
 
 chapter {* Parsing *}
 
--- a/CookBook/Recipes/NamedThms.thy	Mon Oct 27 18:48:52 2008 +0100
+++ b/CookBook/Recipes/NamedThms.thy	Wed Oct 29 13:58:36 2008 +0100
@@ -25,7 +25,7 @@
 
 text {*
   This code declares a context data slot where the theorems are stored,
-  an attribute @{attribute foo} (with the usual @{text add} and @{text del} options
+  an attribute @{ML_text foo} (with the usual @{ML_text add} and @{ML_text del} options
   to adding and deleting theorems) and an internal ML interface to retrieve and 
   modify the theorems.
 
@@ -41,7 +41,7 @@
 thm foo
 
 text {* 
-  In an ML-context the rules marked with @{text "foo"} an be retrieved
+  In an ML-context the rules marked with @{ML_text "foo"} an be retrieved
   using
 *}
 
--- a/CookBook/antiquote_setup.ML	Mon Oct 27 18:48:52 2008 +0100
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,213 +0,0 @@
-(*  Title:      Doc/antiquote_setup.ML
-    ID:         $Id: antiquote_setup.ML,v 1.32 2008/09/29 10:31:56 haftmann Exp $
-    Author:     Makarius
-
-Auxiliary antiquotations for the Isabelle manuals.
-*)
-
-structure AntiquoteSetup: sig end =
-struct
-
-structure O = ThyOutput;
-
-
-(* misc utils *)
-
-val clean_string = translate_string
-  (fn "_" => "\\_"
-    | "<" => "$<$"
-    | ">" => "$>$"
-    | "#" => "\\#"
-    | "{" => "\\{"
-    | "}" => "\\}"
-    | c => c);
-
-fun clean_name "\\<dots>" = "dots"
-  | clean_name ".." = "ddot"
-  | clean_name "." = "dot"
-  | clean_name "_" = "underscore"
-  | clean_name "{" = "braceleft"
-  | clean_name "}" = "braceright"
-  | clean_name s = s |> translate_string (fn "_" => "-" | c => c);
-
-val str_of_source = space_implode " " o map OuterLex.unparse o #2 o #1 o Args.dest_src;
-
-fun tweak_line s =
-  if ! O.display then s else Symbol.strip_blanks s;
-
-val pretty_text = Pretty.chunks o map Pretty.str o map tweak_line o Library.split_lines;
-
-fun pretty_term ctxt t = Syntax.pretty_term (Variable.auto_fixes t ctxt) t;
-fun pretty_thm ctxt = pretty_term ctxt o Thm.full_prop_of;
-
-
-(* verbatim text *)
-
-val verbatim = space_implode "\\verb,|," o map (enclose "\\verb|" "|") o space_explode "|";
-
-val _ = O.add_commands
- [("verbatim", O.args (Scan.lift Args.name) (fn _ => fn _ =>
-      split_lines #> map verbatim #> space_implode "\\isasep\\isanewline%\n"))];
-
-
-(* ML text *)
-
-local
-
-fun ml_val (txt1, "") = "fn _ => (" ^ txt1 ^ ");"
-  | ml_val (txt1, txt2) = "fn _ => (" ^ txt1 ^ ": " ^ txt2 ^ ");";
-
-fun ml_type (txt1, "") = "val _ = NONE : (" ^ txt1 ^ ") option;"
-  | ml_type (txt1, txt2) = "val _ = [NONE : (" ^ txt1 ^ ") option, NONE : (" ^ txt2 ^ ") option];";
-
-fun ml_exn (txt1, "") = "fn _ => (" ^ txt1 ^ ": exn);"
-  | ml_exn (txt1, txt2) = "fn _ => (" ^ txt1 ^ ": " ^ txt2 ^ " -> exn);";
-
-fun ml_structure (txt, _) = "functor DUMMY_FUNCTOR() = struct structure DUMMY = " ^ txt ^ " end;"
-
-fun ml_functor _ = "val _ = ();";  (*no check!*)
-
-fun index_ml kind ml src ctxt (txt1, txt2) =
-  let
-    val txt = if txt2 = "" then txt1 else
-      if kind = "type" then txt1 ^ " = " ^ txt2
-      else if kind = "exception" then txt1 ^ " of " ^ txt2
-      else txt1 ^ ": " ^ txt2;
-    val txt' = if kind = "" then txt else kind ^ " " ^ txt;
-    val _ = writeln (ml (txt1, txt2));
-    val _ = ML_Context.eval_in (SOME ctxt) false Position.none (ml (txt1, txt2));
-  in
-    "\\indexml" ^ kind ^ enclose "{" "}" (clean_string txt1) ^
-    ((if ! O.source then str_of_source src else txt')
-    |> (if ! O.quotes then quote else I)
-    |> (if ! O.display then enclose "\\begin{alltt}\\isastyleminor\n" "\n\\end{alltt}"
-        else split_lines #> map verbatim #> space_implode "\\isasep\\isanewline%\n"))
-  end;
-
-fun output_ml ctxt src =
-  if ! O.display then enclose "\\begin{alltt}\\isastyleminor\n" "\n\\end{alltt}"
-  else
-    split_lines
-    #> map (space_implode "\\verb,|," o map (enclose "\\verb|" "|") o space_explode "|")
-    #> space_implode "\\isasep\\isanewline%\n";
-
-fun ml_args x = O.args (Scan.lift (Args.name -- Scan.optional (Args.colon |-- Args.name) "")) x;
-
-in
-
-val _ = O.add_commands
- [("index_ML", ml_args (index_ml "" ml_val)),
-  ("index_ML_type", ml_args (index_ml "type" ml_type)),
-  ("index_ML_exn", ml_args (index_ml "exception" ml_exn)),
-  ("index_ML_structure", ml_args (index_ml "structure" ml_structure)),
-  ("index_ML_functor", ml_args (index_ml "functor" ml_functor)),
-  ("ML_functor", O.args (Scan.lift Args.name) output_ml),
-  ("ML_text", O.args (Scan.lift Args.name) output_ml)];
-
-end;
-
-
-(* theorems with names *)
-
-local
-
-fun output_named_thms src ctxt xs =
-  map (apfst (pretty_thm ctxt)) xs        (*always pretty in order to exhibit errors!*)
-  |> (if ! O.quotes then map (apfst Pretty.quote) else I)
-  |> (if ! O.display then
-    map (fn (p, name) =>
-      Output.output (Pretty.string_of (Pretty.indent (! O.indent) p)) ^
-      "\\rulename{" ^ Output.output (Pretty.str_of (pretty_text name)) ^ "}")
-    #> space_implode "\\par\\smallskip%\n"
-    #> enclose "\\begin{isabelle}%\n" "%\n\\end{isabelle}"
-  else
-    map (fn (p, name) =>
-      Output.output (Pretty.str_of p) ^
-      "\\rulename{" ^ Output.output (Pretty.str_of (pretty_text name)) ^ "}")
-    #> space_implode "\\par\\smallskip%\n"
-    #> enclose "\\isa{" "}");
-
-in
-
-val _ = O.add_commands
- [("named_thms", O.args (Scan.repeat (Attrib.thm --
-      Scan.lift (Args.parens Args.name))) output_named_thms)];
-
-end;
-
-
-(* theory files *)
-
-val _ = O.add_commands
- [("thy_file", O.args (Scan.lift Args.name) (O.output (fn _ => fn name =>
-      (ThyLoad.check_thy Path.current name; Pretty.str name))))];
-
-
-(* Isabelle entities (with index) *)
-
-local
-
-fun no_check _ _ = true;
-
-fun thy_check intern defined ctxt =
-  let val thy = ProofContext.theory_of ctxt
-  in defined thy o intern thy end;
-
-fun check_tool name =
-  File.exists (Path.append (Path.explode "~~/lib/Tools") (Path.basic name));
-
-val arg = enclose "{" "}" o clean_string;
-
-fun output_entity check markup index kind ctxt (logic, name) =
-  let
-    val hyper_name = "{" ^ NameSpace.append kind (NameSpace.append logic (clean_name name)) ^ "}";
-    val hyper =
-      enclose ("\\hyperlink" ^ hyper_name ^ "{") "}" #>
-      index = SOME true ? enclose ("\\hypertarget" ^ hyper_name ^ "{") "}";
-    val idx =
-      (case index of
-        NONE => ""
-      | SOME is_def =>
-          "\\index" ^ (if is_def then "def" else "ref") ^ arg logic ^ arg kind ^ arg name);
-  in
-    if check ctxt name then
-      idx ^
-      (Output.output name
-        |> (if markup = "" then I else enclose ("\\" ^ markup ^ "{") "}")
-        |> (if ! O.quotes then quote else I)
-        |> (if ! O.display then enclose "\\begin{isabelle}%\n" "%\n\\end{isabelle}"
-            else hyper o enclose "\\mbox{\\isa{" "}}"))
-    else error ("Bad " ^ kind ^ " " ^ quote name)
-  end;
-
-fun entity check markup index kind =
-  O.args (Scan.lift (Scan.optional (Args.parens Args.name) "" -- Args.name))
-    (K (output_entity check markup index kind));
-
-fun entity_antiqs check markup kind =
- [(kind, entity check markup NONE kind),
-  (kind ^ "_def", entity check markup (SOME true) kind),
-  (kind ^ "_ref", entity check markup (SOME false) kind)];
-
-in
-
-val _ = O.add_commands
- (entity_antiqs no_check "" "syntax" @
-  entity_antiqs (K (is_some o OuterKeyword.command_keyword)) "isacommand" "command" @
-  entity_antiqs (K OuterKeyword.is_keyword) "isakeyword" "keyword" @
-  entity_antiqs (K OuterKeyword.is_keyword) "isakeyword" "element" @
-  entity_antiqs (thy_check Method.intern Method.defined) "" "method" @
-  entity_antiqs (thy_check Attrib.intern Attrib.defined) "" "attribute" @
-  entity_antiqs no_check "" "fact" @
-  entity_antiqs no_check "" "variable" @
-  entity_antiqs no_check "" "case" @
-  entity_antiqs (K ThyOutput.defined_command) "" "antiquotation" @
-  entity_antiqs (fn _ => fn name => is_some (OS.Process.getEnv name)) "isatt" "setting" @
-  entity_antiqs no_check "isatt" "executable" @
-  entity_antiqs (K check_tool) "isatt" "tool" @
-  entity_antiqs (K (File.exists o Path.explode)) "isatt" "file" @
-  entity_antiqs (K ThyInfo.known_thy) "" "theory");
-
-end;
-
-end;
--- a/CookBook/antiquote_setup_plus.ML	Mon Oct 27 18:48:52 2008 +0100
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,69 +0,0 @@
-(* Auxiliary antiquotations for the Cookbook. *)
-
-structure AntiquoteSetup: sig end =
-struct
-
-val str_of_source = space_implode " " o map OuterLex.unparse o #2 o #1 o Args.dest_src;
-
-fun ml_val_open (ys, xs) txt = 
-  let fun ml_val_open_aux ys txt = 
-          "fn " ^ (case ys of [] => "_" | _ => enclose "(" ")" (commas ys)) ^ " => (" ^ txt ^ ")";
-  in
-    (case xs of
-       [] => ml_val_open_aux ys txt
-     | _  => ml_val_open_aux ys ("let open " ^ (space_implode " " xs) ^ " in " ^ txt ^ " end"))
-  end;
-
-fun ml_val txt = ml_val_open ([],[]) txt;
-
-fun ml_pat (rhs, pat) =
-  let val pat' = implode (map (fn "\\<dots>" => "_" | s => s) (Symbol.explode pat))
-  in
-    "val " ^ pat' ^ " = " ^ rhs
-  end;
-
-fun ml_struct txt = "functor DUMMY_FUNCTOR() = struct structure DUMMY = " ^ txt ^ " end";
-fun ml_type txt = "val _ = NONE : (" ^ txt ^ ") option";
-fun ml_decl txt = txt
-
-fun output_ml_open ml src ctxt (txt,ovars) =
-  (ML_Context.eval_in (SOME ctxt) false Position.none (ml ovars txt);
-  ThyOutput.output_list (fn _ => fn s => Pretty.str s) src ctxt (space_explode "\n" txt))
-
-fun output_ml ml src ctxt txt =
-  output_ml_open (K ml) src ctxt (txt,()) 
-
-fun add_response_indicator txt =
-  map (fn s => "> " ^ s) (space_explode "\n" txt)
-
-fun output_ml_response ml src ctxt (lhs,pat) = 
-  (ML_Context.eval_in (SOME ctxt) false Position.none (ml (lhs,pat));
-  let val txt = (space_explode "\n" lhs) @ (add_response_indicator pat)
-  in ThyOutput.output_list (fn _ => fn s => Pretty.str s) src ctxt txt end)
-
-fun output_ml_response_fake ml src ctxt (lhs,pat) = 
-  (ML_Context.eval_in (SOME ctxt) false Position.none (ml lhs);
-  let val txt = (space_explode "\n" lhs) @ (add_response_indicator pat)
-  in ThyOutput.output_list (fn _ => fn s => Pretty.str s) src ctxt txt end)
-
-fun check_file_exists ctxt name =
-  if File.exists (Path.append (Path.explode ("~~/src")) (Path.explode name)) then ()
-  else error ("Source file " ^ quote name ^ " does not exist.")
-
-val _ = ThyOutput.add_commands
-  [("ML_open", ThyOutput.args (Scan.lift (Args.name --
-      (Scan.optional (Args.$$$ "for" |-- OuterParse.!!! (Scan.repeat1 Args.name)) [] --
-       Scan.optional (Args.parens (OuterParse.list1 Args.name)) []))) (output_ml_open ml_val_open)),
-   ("ML_file", ThyOutput.args (Scan.lift Args.name)
-      (ThyOutput.output (fn _ => fn s => (check_file_exists s; Pretty.str s)))),
-   ("ML_text", ThyOutput.args (Scan.lift Args.name)
-      (ThyOutput.output (fn _ => fn s => Pretty.str s))),
-   ("ML", ThyOutput.args (Scan.lift Args.name) (output_ml ml_val)),
-   ("ML_decl", ThyOutput.args (Scan.lift Args.name) (output_ml ml_decl)),
-   ("ML_response", ThyOutput.args (Scan.lift (Args.name -- Args.name)) (output_ml_response ml_pat)),
-   ("ML_response_fake", 
-           ThyOutput.args (Scan.lift (Args.name -- Args.name)) (output_ml_response_fake ml_val)),
-   ("ML_struct", ThyOutput.args (Scan.lift Args.name) (output_ml ml_struct)),
-   ("ML_type", ThyOutput.args (Scan.lift Args.name) (output_ml ml_type))];
-   
-end;
--- a/CookBook/document/root.tex	Mon Oct 27 18:48:52 2008 +0100
+++ b/CookBook/document/root.tex	Wed Oct 29 13:58:36 2008 +0100
@@ -49,6 +49,7 @@
 % a table environment with proper indentation
 \newenvironment{mytable}{\begin{trivlist}\item\begin{tabular}{@{\hspace{2ex}}l}}{\end{tabular}\end{trivlist}}
 
+%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
 \hyphenation{Isabelle}
 
 \begin{document}
Binary file cookbook.pdf has changed