# HG changeset patch # User Christian Urban # Date 1239145179 -3600 # Node ID 8def5082432010e633afe782a77d1b0f3385ef19 # Parent abc7f90188af7ccd236ad90cff708a555a93c7e2 added material about OuterKeyword.keyword and OuterParse.reserved diff -r abc7f90188af -r 8def50824320 ProgTutorial/FirstSteps.thy --- a/ProgTutorial/FirstSteps.thy Tue Apr 07 17:04:39 2009 +0100 +++ b/ProgTutorial/FirstSteps.thy Tue Apr 07 23:59:39 2009 +0100 @@ -919,7 +919,7 @@ "Const (\"op =\", \"int \ int \ bool\") $ Free (\"a\", \"int\") $ Const (\"HOL.one_class.one\", \"int\")"} - (FIXME: readmore about types) + (FIXME: a readmore about types) *} @@ -1114,7 +1114,7 @@ (FIXME: handy functions working on theorems, like @{ML ObjectLogic.rulify} and so on) - (FIXME how to add case-names to goal states - maybe in the + (FIXME: how to add case-names to goal states - maybe in the next section) *} diff -r abc7f90188af -r 8def50824320 ProgTutorial/Parsing.thy --- a/ProgTutorial/Parsing.thy Tue Apr 07 17:04:39 2009 +0100 +++ b/ProgTutorial/Parsing.thy Tue Apr 07 23:59:39 2009 +0100 @@ -2,7 +2,6 @@ imports Base "Package/Simple_Inductive_Package" begin - chapter {* Parsing *} text {* @@ -359,9 +358,7 @@ text {* The reason for using token parsers is that theory syntax, as well as the parsers for the arguments of proof methods, use the type @{ML_type - OuterLex.token} (which is identical to the type @{ML_type - OuterParse.token}). However, there are also handy parsers for - ML-expressions and ML-files. + OuterLex.token}. \begin{readmore} The parser functions for the theory syntax are contained in the structure @@ -369,17 +366,17 @@ The definition for tokens is in the file @{ML_file "Pure/Isar/outer_lex.ML"}. \end{readmore} - The structure @{ML_struct OuterLex} defines several kinds of tokens (for example - @{ML "Ident" in OuterLex} for identifiers, @{ML "Keyword" in OuterLex} for keywords and - @{ML "Command" in OuterLex} for commands). Some token parsers take into account the - kind of tokens. -*} + The structure @{ML_struct OuterLex} defines several kinds of tokens (for + example @{ML "Ident" in OuterLex} for identifiers, @{ML "Keyword" in + OuterLex} for keywords and @{ML "Command" in OuterLex} for commands). Some + token parsers take into account the kind of tokens. The first example shows + how to generate a token list out of a string using the function @{ML + "OuterSyntax.scan"}. It is given the argument @{ML "Position.none"} since, + at the moment, we are not interested in generating precise error + messages. The following code\footnote{Note that because of a possible bug in + the PolyML runtime system, the result is printed as @{text [quotes] "?"}, + instead of the tokens.} -text {* - The first example shows how to generate a token list out of a string using - the function @{ML "OuterSyntax.scan"}. It is given the argument @{ML "Position.none"} - since, at the moment, we are not interested in generating - precise error messages. The following code @{ML_response_fake [display,gray] "OuterSyntax.scan Position.none \"hello world\"" "[Token (\,(Ident, \"hello\"),\), @@ -388,9 +385,21 @@ produces three tokens where the first and the last are identifiers, since @{text [quotes] "hello"} and @{text [quotes] "world"} do not match any - other syntactic category.\footnote{Note that because of a possible bug in - the PolyML runtime system, the result is printed as @{text [quotes] "?"}, instead of - the tokens.} The second indicates a space. + other syntactic category. The second indicates a space. + + We can easily change what is recognised as a keyword with + @{ML OuterKeyword.keyword}. For example calling this function +*} + +ML{*val _ = OuterKeyword.keyword "hello"*} + +text {* + then lexing @{text [quotes] "hello world"} will produce + + @{ML_response_fake [display,gray] "OuterSyntax.scan Position.none \"hello world\"" +"[Token (\,(Keyword, \"hello\"),\), + Token (\,(Space, \" \"),\), + Token (\,(Ident, \"world\"),\)]"} Many parsing functions later on will require spaces, comments and the like to have already been filtered out. So from now on we are going to use the @@ -405,14 +414,12 @@ "[Token (\,(Ident, \"hello\"), \), Token (\,(Ident, \"world\"), \)]"} For convenience we define the function: - *} ML{*fun filtered_input str = filter OuterLex.is_proper (OuterSyntax.scan Position.none str) *} -text {* - +text {* If you now parse @{ML_response_fake [display,gray] @@ -445,6 +452,18 @@ end" "((\"where\",\), (\"|\",\))"} + Any non-keyword string can be parsed with the function @{ML OuterParse.reserved}. + For example: + + @{ML_response [display,gray] +"let + val p = OuterParse.reserved \"bar\" + val input = filtered_input \"bar\" +in + p input +end" + "(\"bar\",[])"} + Like before, you can sequentially connect parsers with @{ML "--"}. For example: @{ML_response [display,gray] @@ -485,7 +504,6 @@ "([\"in\", \"in\", \"in\"], [])"} The following function will help to run examples. - *} ML{*fun parse p input = Scan.finite OuterLex.stopper (Scan.error p) input *} @@ -556,6 +574,10 @@ *} +text {* + (FIXME: there are also handy parsers for ML-expressions and ML-files) +*} + section {* Context Parser (TBD) *} text {* @@ -901,10 +923,15 @@ If you now build a theory on top of @{text "Command.thy"}, then the command \isacommand{foobar} can be used. - Similarly with any other new command. + Similarly with any other new command, and also any new keyword that is + introduced with +*} +ML{*val _ = OuterKeyword.keyword "blink" *} - At the moment \isacommand{foobar} is not very useful. Let us refine it a bit +text {* + At the moment the command \isacommand{foobar} is not very useful. Let us refine + it a bit next by letting it take a proposition as argument and printing this proposition inside the tracing buffer. @@ -1002,7 +1029,7 @@ \isacommand{done} \end{isabelle} - (FIXME read a name and show how to store theorems) + (FIXME: read a name and show how to store theorems) *} section {* Methods (TBD) *} diff -r abc7f90188af -r 8def50824320 ProgTutorial/Tactical.thy --- a/ProgTutorial/Tactical.thy Tue Apr 07 17:04:39 2009 +0100 +++ b/ProgTutorial/Tactical.thy Tue Apr 07 23:59:39 2009 +0100 @@ -1484,7 +1484,7 @@ text {* where the second argument specifies the pattern and the right-hand side contains the code of the simproc (we have to use @{ML K} since we ignoring - an argument about morphisms\footnote{FIXME: what does the morphism do?}). + an argument about morphisms. After this, the simplifier is aware of the simproc and you can test whether it fires on the lemma: *} diff -r abc7f90188af -r 8def50824320 progtutorial.pdf Binary file progtutorial.pdf has changed