1.1 --- a/NEWS Mon May 17 18:59:59 2010 -0700
1.2 +++ b/NEWS Tue May 18 06:28:42 2010 -0700
1.3 @@ -506,14 +506,18 @@
1.4 context actually works, but under normal circumstances one needs to
1.5 pass the proper local context through the code!
1.6
1.7 -* Renamed some important ML structures, while keeping the old names as
1.8 -legacy aliases for some time:
1.9 -
1.10 +* Renamed some important ML structures, while keeping the old names
1.11 +for some time as aliases within the structure Legacy:
1.12 +
1.13 + OuterKeyword ~> Keyword
1.14 + OuterLex ~> Token
1.15 + OuterParse ~> Parse
1.16 OuterSyntax ~> Outer_Syntax
1.17 - OuterKeyword ~> Keyword
1.18 - OuterParse ~> Parse
1.19 SpecParse ~> Parse_Spec
1.20
1.21 +Note that "open Legacy" simplifies porting of sources, but forgetting
1.22 +to remove it again will complicate porting again in the future.
1.23 +
1.24
1.25 *** System ***
1.26
2.1 --- a/doc-src/antiquote_setup.ML Mon May 17 18:59:59 2010 -0700
2.2 +++ b/doc-src/antiquote_setup.ML Tue May 18 06:28:42 2010 -0700
2.3 @@ -166,9 +166,9 @@
2.4 in
2.5
2.6 val _ = entity_antiqs no_check "" "syntax";
2.7 -val _ = entity_antiqs (K (is_some o OuterKeyword.command_keyword)) "isacommand" "command";
2.8 -val _ = entity_antiqs (K OuterKeyword.is_keyword) "isakeyword" "keyword";
2.9 -val _ = entity_antiqs (K OuterKeyword.is_keyword) "isakeyword" "element";
2.10 +val _ = entity_antiqs (K (is_some o Keyword.command_keyword)) "isacommand" "command";
2.11 +val _ = entity_antiqs (K Keyword.is_keyword) "isakeyword" "keyword";
2.12 +val _ = entity_antiqs (K Keyword.is_keyword) "isakeyword" "element";
2.13 val _ = entity_antiqs (thy_check Method.intern Method.defined) "" "method";
2.14 val _ = entity_antiqs (thy_check Attrib.intern Attrib.defined) "" "attribute";
2.15 val _ = entity_antiqs no_check "" "fact";
3.1 --- a/doc-src/rail.ML Mon May 17 18:59:59 2010 -0700
3.2 +++ b/doc-src/rail.ML Tue May 18 06:28:42 2010 -0700
3.3 @@ -59,9 +59,9 @@
3.4 (* [(kind, (markup, check, style))*)
3.5 Symtab.make [
3.6 ("syntax", ("", no_check, true)),
3.7 - ("command", ("isacommand", K (is_some o OuterKeyword.command_keyword), true)),
3.8 - ("keyword", ("isakeyword", K OuterKeyword.is_keyword, true)),
3.9 - ("element", ("isakeyword", K OuterKeyword.is_keyword, true)),
3.10 + ("command", ("isacommand", K (is_some o Keyword.command_keyword), true)),
3.11 + ("keyword", ("isakeyword", K Keyword.is_keyword, true)),
3.12 + ("element", ("isakeyword", K Keyword.is_keyword, true)),
3.13 ("method", ("", thy_check Method.intern Method.defined, true)),
3.14 ("attribute", ("", thy_check Attrib.intern Attrib.defined, true)),
3.15 ("fact", ("", no_check, true)),
3.16 @@ -473,7 +473,7 @@
3.17 |> parse
3.18 |> print;
3.19
3.20 -val _ = ThyOutput.antiquotation "rail" (Scan.lift ( OuterParse.position Args.name ))
3.21 +val _ = ThyOutput.antiquotation "rail" (Scan.lift (Parse.position Args.name))
3.22 (fn {context = ctxt,...} => fn txt => process txt ctxt);
3.23
3.24 end;
4.1 --- a/src/FOL/ex/Iff_Oracle.thy Mon May 17 18:59:59 2010 -0700
4.2 +++ b/src/FOL/ex/Iff_Oracle.thy Tue May 18 06:28:42 2010 -0700
4.3 @@ -52,7 +52,7 @@
4.4 subsection {* Oracle as proof method *}
4.5
4.6 method_setup iff = {*
4.7 - Scan.lift OuterParse.nat >> (fn n => fn ctxt =>
4.8 + Scan.lift Parse.nat >> (fn n => fn ctxt =>
4.9 SIMPLE_METHOD
4.10 (HEADGOAL (Tactic.rtac (iff_oracle (ProofContext.theory_of ctxt, n)))
4.11 handle Fail _ => no_tac))
5.1 --- a/src/HOL/Boogie/Tools/boogie_commands.ML Mon May 17 18:59:59 2010 -0700
5.2 +++ b/src/HOL/Boogie/Tools/boogie_commands.ML Tue May 18 06:28:42 2010 -0700
5.3 @@ -263,31 +263,31 @@
5.4 fun scan_arg f = Args.parens f
5.5 fun scan_opt n = Scan.optional (scan_arg (Args.$$$ n >> K true)) false
5.6
5.7 -val vc_offsets = Scan.optional (Args.bracks (OuterParse.list1
5.8 - (OuterParse.string --| Args.colon -- OuterParse.nat))) []
5.9 +val vc_offsets = Scan.optional (Args.bracks (Parse.list1
5.10 + (Parse.string --| Args.colon -- Parse.nat))) []
5.11
5.12 val _ =
5.13 - OuterSyntax.command "boogie_open"
5.14 + Outer_Syntax.command "boogie_open"
5.15 "Open a new Boogie environment and load a Boogie-generated .b2i file."
5.16 - OuterKeyword.thy_decl
5.17 - (scan_opt "quiet" -- OuterParse.name -- vc_offsets >>
5.18 + Keyword.thy_decl
5.19 + (scan_opt "quiet" -- Parse.name -- vc_offsets >>
5.20 (Toplevel.theory o boogie_open))
5.21
5.22
5.23 -val vc_name = OuterParse.name
5.24 +val vc_name = Parse.name
5.25
5.26 -val vc_label = OuterParse.name
5.27 +val vc_label = Parse.name
5.28 val vc_labels = Scan.repeat1 vc_label
5.29
5.30 val vc_paths =
5.31 - OuterParse.nat -- (Args.$$$ "-" |-- OuterParse.nat) >> (op upto) ||
5.32 - OuterParse.nat >> single
5.33 + Parse.nat -- (Args.$$$ "-" |-- Parse.nat) >> (op upto) ||
5.34 + Parse.nat >> single
5.35
5.36 val vc_opts =
5.37 scan_arg
5.38 (scan_val "assertion" vc_label >> VC_Single ||
5.39 scan_val "examine" vc_labels >> VC_Examine ||
5.40 - scan_val "take" ((OuterParse.list vc_paths >> flat) -- Scan.option (
5.41 + scan_val "take" ((Parse.list vc_paths >> flat) -- Scan.option (
5.42 scan_val "without" vc_labels >> pair false ||
5.43 scan_val "and_also" vc_labels >> pair true) >> VC_Take) ||
5.44 scan_val "only" vc_labels >> VC_Only ||
5.45 @@ -295,9 +295,9 @@
5.46 Scan.succeed VC_Complete
5.47
5.48 val _ =
5.49 - OuterSyntax.command "boogie_vc"
5.50 + Outer_Syntax.command "boogie_vc"
5.51 "Enter into proof mode for a specific verification condition."
5.52 - OuterKeyword.thy_goal
5.53 + Keyword.thy_goal
5.54 (vc_name -- vc_opts >> (fn args =>
5.55 (Toplevel.print o Toplevel.theory_to_proof (boogie_vc args))))
5.56
5.57 @@ -305,7 +305,7 @@
5.58 val quick_timeout = 5
5.59 val default_timeout = 20
5.60
5.61 -fun timeout name = Scan.optional (scan_val name OuterParse.nat)
5.62 +fun timeout name = Scan.optional (scan_val name Parse.nat)
5.63
5.64 val status_test =
5.65 scan_arg (
5.66 @@ -328,18 +328,18 @@
5.67 f (Toplevel.theory_of state))
5.68
5.69 val _ =
5.70 - OuterSyntax.improper_command "boogie_status"
5.71 + Outer_Syntax.improper_command "boogie_status"
5.72 "Show the name and state of all loaded verification conditions."
5.73 - OuterKeyword.diag
5.74 + Keyword.diag
5.75 (status_test >> status_cmd ||
5.76 status_vc >> status_cmd ||
5.77 Scan.succeed (status_cmd boogie_status))
5.78
5.79
5.80 val _ =
5.81 - OuterSyntax.command "boogie_end"
5.82 + Outer_Syntax.command "boogie_end"
5.83 "Close the current Boogie environment."
5.84 - OuterKeyword.thy_decl
5.85 + Keyword.thy_decl
5.86 (Scan.succeed (Toplevel.theory boogie_end))
5.87
5.88
6.1 --- a/src/HOL/Decision_Procs/Approximation.thy Mon May 17 18:59:59 2010 -0700
6.2 +++ b/src/HOL/Decision_Procs/Approximation.thy Tue May 18 06:28:42 2010 -0700
6.3 @@ -3310,13 +3310,13 @@
6.4 by auto
6.5
6.6 method_setup approximation = {*
6.7 - Scan.lift (OuterParse.nat)
6.8 + Scan.lift Parse.nat
6.9 --
6.10 Scan.optional (Scan.lift (Args.$$$ "splitting" |-- Args.colon)
6.11 - |-- OuterParse.and_list' (free --| Scan.lift (Args.$$$ "=") -- Scan.lift OuterParse.nat)) []
6.12 + |-- Parse.and_list' (free --| Scan.lift (Args.$$$ "=") -- Scan.lift Parse.nat)) []
6.13 --
6.14 Scan.option (Scan.lift (Args.$$$ "taylor" |-- Args.colon)
6.15 - |-- (free |-- Scan.lift (Args.$$$ "=") |-- Scan.lift OuterParse.nat))
6.16 + |-- (free |-- Scan.lift (Args.$$$ "=") |-- Scan.lift Parse.nat))
6.17 >>
6.18 (fn ((prec, splitting), taylor) => fn ctxt =>
6.19 SIMPLE_METHOD' (fn i =>
7.1 --- a/src/HOL/Import/import_syntax.ML Mon May 17 18:59:59 2010 -0700
7.2 +++ b/src/HOL/Import/import_syntax.ML Tue May 18 06:28:42 2010 -0700
7.3 @@ -4,26 +4,23 @@
7.4
7.5 signature HOL4_IMPORT_SYNTAX =
7.6 sig
7.7 - type token = OuterLex.token
7.8 - type command = token list -> (theory -> theory) * token list
7.9 + val import_segment: (theory -> theory) parser
7.10 + val import_theory : (theory -> theory) parser
7.11 + val end_import : (theory -> theory) parser
7.12 +
7.13 + val setup_theory : (theory -> theory) parser
7.14 + val end_setup : (theory -> theory) parser
7.15 +
7.16 + val thm_maps : (theory -> theory) parser
7.17 + val ignore_thms : (theory -> theory) parser
7.18 + val type_maps : (theory -> theory) parser
7.19 + val def_maps : (theory -> theory) parser
7.20 + val const_maps : (theory -> theory) parser
7.21 + val const_moves : (theory -> theory) parser
7.22 + val const_renames : (theory -> theory) parser
7.23
7.24 - val import_segment: token list -> (theory -> theory) * token list
7.25 - val import_theory : token list -> (theory -> theory) * token list
7.26 - val end_import : token list -> (theory -> theory) * token list
7.27 -
7.28 - val setup_theory : token list -> (theory -> theory) * token list
7.29 - val end_setup : token list -> (theory -> theory) * token list
7.30 -
7.31 - val thm_maps : token list -> (theory -> theory) * token list
7.32 - val ignore_thms : token list -> (theory -> theory) * token list
7.33 - val type_maps : token list -> (theory -> theory) * token list
7.34 - val def_maps : token list -> (theory -> theory) * token list
7.35 - val const_maps : token list -> (theory -> theory) * token list
7.36 - val const_moves : token list -> (theory -> theory) * token list
7.37 - val const_renames : token list -> (theory -> theory) * token list
7.38 -
7.39 - val skip_import_dir : token list -> (theory -> theory) * token list
7.40 - val skip_import : token list -> (theory -> theory) * token list
7.41 + val skip_import_dir : (theory -> theory) parser
7.42 + val skip_import : (theory -> theory) parser
7.43
7.44 val setup : unit -> unit
7.45 end
7.46 @@ -31,28 +28,23 @@
7.47 structure HOL4ImportSyntax :> HOL4_IMPORT_SYNTAX =
7.48 struct
7.49
7.50 -type token = OuterLex.token
7.51 -type command = token list -> (theory -> theory) * token list
7.52 -
7.53 -local structure P = OuterParse and K = OuterKeyword in
7.54 -
7.55 (* Parsers *)
7.56
7.57 -val import_segment = P.name >> set_import_segment
7.58 +val import_segment = Parse.name >> set_import_segment
7.59
7.60
7.61 -val import_theory = P.name >> (fn thyname =>
7.62 +val import_theory = Parse.name >> (fn thyname =>
7.63 fn thy =>
7.64 thy |> set_generating_thy thyname
7.65 |> Sign.add_path thyname
7.66 |> add_dump (";setup_theory " ^ thyname))
7.67
7.68 fun do_skip_import_dir s = (ImportRecorder.set_skip_import_dir (SOME s); I)
7.69 -val skip_import_dir : command = P.string >> do_skip_import_dir
7.70 +val skip_import_dir = Parse.string >> do_skip_import_dir
7.71
7.72 val lower = String.map Char.toLower
7.73 fun do_skip_import s = (ImportRecorder.set_skip_import (case lower s of "on" => true | "off" => false | _ => Scan.fail ()); I)
7.74 -val skip_import : command = P.short_ident >> do_skip_import
7.75 +val skip_import = Parse.short_ident >> do_skip_import
7.76
7.77 fun end_import toks =
7.78 Scan.succeed
7.79 @@ -84,7 +76,7 @@
7.80 |> add_dump ";end_setup"
7.81 end) toks
7.82
7.83 -val ignore_thms = Scan.repeat1 P.name
7.84 +val ignore_thms = Scan.repeat1 Parse.name
7.85 >> (fn ignored =>
7.86 fn thy =>
7.87 let
7.88 @@ -93,7 +85,7 @@
7.89 Library.foldl (fn (thy,thmname) => ignore_hol4 thyname thmname thy) (thy,ignored)
7.90 end)
7.91
7.92 -val thm_maps = Scan.repeat1 (P.name --| P.$$$ ">" -- P.xname)
7.93 +val thm_maps = Scan.repeat1 (Parse.name --| Parse.$$$ ">" -- Parse.xname)
7.94 >> (fn thmmaps =>
7.95 fn thy =>
7.96 let
7.97 @@ -102,7 +94,7 @@
7.98 Library.foldl (fn (thy,(hol,isa)) => add_hol4_mapping thyname hol isa thy) (thy,thmmaps)
7.99 end)
7.100
7.101 -val type_maps = Scan.repeat1 (P.name --| P.$$$ ">" -- P.xname)
7.102 +val type_maps = Scan.repeat1 (Parse.name --| Parse.$$$ ">" -- Parse.xname)
7.103 >> (fn typmaps =>
7.104 fn thy =>
7.105 let
7.106 @@ -111,7 +103,7 @@
7.107 Library.foldl (fn (thy,(hol,isa)) => add_hol4_type_mapping thyname hol false isa thy) (thy,typmaps)
7.108 end)
7.109
7.110 -val def_maps = Scan.repeat1 (P.name --| P.$$$ ">" -- P.xname)
7.111 +val def_maps = Scan.repeat1 (Parse.name --| Parse.$$$ ">" -- Parse.xname)
7.112 >> (fn defmaps =>
7.113 fn thy =>
7.114 let
7.115 @@ -120,7 +112,7 @@
7.116 Library.foldl (fn (thy,(hol,isa)) => add_defmap thyname hol isa thy) (thy,defmaps)
7.117 end)
7.118
7.119 -val const_renames = Scan.repeat1 (P.name --| P.$$$ ">" -- P.name)
7.120 +val const_renames = Scan.repeat1 (Parse.name --| Parse.$$$ ">" -- Parse.name)
7.121 >> (fn renames =>
7.122 fn thy =>
7.123 let
7.124 @@ -129,7 +121,7 @@
7.125 Library.foldl (fn (thy,(hol,isa)) => add_hol4_const_renaming thyname hol isa thy) (thy,renames)
7.126 end)
7.127
7.128 -val const_maps = Scan.repeat1 (P.name --| P.$$$ ">" -- P.xname -- Scan.option (P.$$$ "::" |-- P.typ))
7.129 +val const_maps = Scan.repeat1 (Parse.name --| Parse.$$$ ">" -- Parse.xname -- Scan.option (Parse.$$$ "::" |-- Parse.typ))
7.130 >> (fn constmaps =>
7.131 fn thy =>
7.132 let
7.133 @@ -139,7 +131,7 @@
7.134 | (thy,((hol,isa),SOME ty)) => add_hol4_const_wt_mapping thyname hol false isa (Syntax.read_typ_global thy ty) thy) (thy,constmaps)
7.135 end)
7.136
7.137 -val const_moves = Scan.repeat1 (P.name --| P.$$$ ">" -- P.xname -- Scan.option (P.$$$ "::" |-- P.typ))
7.138 +val const_moves = Scan.repeat1 (Parse.name --| Parse.$$$ ">" -- Parse.xname -- Scan.option (Parse.$$$ "::" |-- Parse.typ))
7.139 >> (fn constmaps =>
7.140 fn thy =>
7.141 let
7.142 @@ -160,18 +152,18 @@
7.143 (Scan.make_lexicon (map Symbol.explode ["import_segment","ignore_thms","import","end",">","::","const_maps","const_moves","thm_maps","const_renames","type_maps","def_maps"]),
7.144 Scan.empty_lexicon)
7.145 val get_lexes = fn () => !lexes
7.146 - val token_source = OuterLex.source {do_recover = NONE} get_lexes Position.start symb_source
7.147 - val token_list = filter_out (OuterLex.is_kind OuterLex.Space) (Source.exhaust token_source)
7.148 - val import_segmentP = OuterParse.$$$ "import_segment" |-- import_segment
7.149 - val type_mapsP = OuterParse.$$$ "type_maps" |-- type_maps
7.150 - val def_mapsP = OuterParse.$$$ "def_maps" |-- def_maps
7.151 - val const_mapsP = OuterParse.$$$ "const_maps" |-- const_maps
7.152 - val const_movesP = OuterParse.$$$ "const_moves" |-- const_moves
7.153 - val const_renamesP = OuterParse.$$$ "const_renames" |-- const_renames
7.154 - val ignore_thmsP = OuterParse.$$$ "ignore_thms" |-- ignore_thms
7.155 - val thm_mapsP = OuterParse.$$$ "thm_maps" |-- thm_maps
7.156 + val token_source = Token.source {do_recover = NONE} get_lexes Position.start symb_source
7.157 + val token_list = filter_out (Token.is_kind Token.Space) (Source.exhaust token_source)
7.158 + val import_segmentP = Parse.$$$ "import_segment" |-- import_segment
7.159 + val type_mapsP = Parse.$$$ "type_maps" |-- type_maps
7.160 + val def_mapsP = Parse.$$$ "def_maps" |-- def_maps
7.161 + val const_mapsP = Parse.$$$ "const_maps" |-- const_maps
7.162 + val const_movesP = Parse.$$$ "const_moves" |-- const_moves
7.163 + val const_renamesP = Parse.$$$ "const_renames" |-- const_renames
7.164 + val ignore_thmsP = Parse.$$$ "ignore_thms" |-- ignore_thms
7.165 + val thm_mapsP = Parse.$$$ "thm_maps" |-- thm_maps
7.166 val parser = type_mapsP || def_mapsP || const_mapsP || const_movesP || const_renamesP || thm_mapsP || ignore_thmsP || import_segmentP
7.167 - val importP = OuterParse.$$$ "import" |-- Scan.repeat parser --| OuterParse.$$$ "end"
7.168 + val importP = Parse.$$$ "import" |-- Scan.repeat parser --| Parse.$$$ "end"
7.169 fun apply [] thy = thy
7.170 | apply (f::fs) thy = apply fs (f thy)
7.171 in
7.172 @@ -194,7 +186,7 @@
7.173 NONE => error "Import data already cleared - forgotten a setup_theory?"
7.174 | SOME _ => ImportData.put NONE thy
7.175
7.176 -val setup_theory = P.name
7.177 +val setup_theory = Parse.name
7.178 >>
7.179 (fn thyname =>
7.180 fn thy =>
7.181 @@ -218,64 +210,62 @@
7.182 |> Sign.parent_path
7.183 end) toks
7.184
7.185 -val set_dump = P.string -- P.string >> setup_dump
7.186 +val set_dump = Parse.string -- Parse.string >> setup_dump
7.187 fun fl_dump toks = Scan.succeed flush_dump toks
7.188 -val append_dump = (P.verbatim || P.string) >> add_dump
7.189 +val append_dump = (Parse.verbatim || Parse.string) >> add_dump
7.190
7.191 fun setup () =
7.192 - (OuterKeyword.keyword ">";
7.193 - OuterSyntax.command "import_segment"
7.194 + (Keyword.keyword ">";
7.195 + Outer_Syntax.command "import_segment"
7.196 "Set import segment name"
7.197 - K.thy_decl (import_segment >> Toplevel.theory);
7.198 - OuterSyntax.command "import_theory"
7.199 + Keyword.thy_decl (import_segment >> Toplevel.theory);
7.200 + Outer_Syntax.command "import_theory"
7.201 "Set default HOL4 theory name"
7.202 - K.thy_decl (import_theory >> Toplevel.theory);
7.203 - OuterSyntax.command "end_import"
7.204 + Keyword.thy_decl (import_theory >> Toplevel.theory);
7.205 + Outer_Syntax.command "end_import"
7.206 "End HOL4 import"
7.207 - K.thy_decl (end_import >> Toplevel.theory);
7.208 - OuterSyntax.command "skip_import_dir"
7.209 + Keyword.thy_decl (end_import >> Toplevel.theory);
7.210 + Outer_Syntax.command "skip_import_dir"
7.211 "Sets caching directory for skipping importing"
7.212 - K.thy_decl (skip_import_dir >> Toplevel.theory);
7.213 - OuterSyntax.command "skip_import"
7.214 + Keyword.thy_decl (skip_import_dir >> Toplevel.theory);
7.215 + Outer_Syntax.command "skip_import"
7.216 "Switches skipping importing on or off"
7.217 - K.thy_decl (skip_import >> Toplevel.theory);
7.218 - OuterSyntax.command "setup_theory"
7.219 + Keyword.thy_decl (skip_import >> Toplevel.theory);
7.220 + Outer_Syntax.command "setup_theory"
7.221 "Setup HOL4 theory replaying"
7.222 - K.thy_decl (setup_theory >> Toplevel.theory);
7.223 - OuterSyntax.command "end_setup"
7.224 + Keyword.thy_decl (setup_theory >> Toplevel.theory);
7.225 + Outer_Syntax.command "end_setup"
7.226 "End HOL4 setup"
7.227 - K.thy_decl (end_setup >> Toplevel.theory);
7.228 - OuterSyntax.command "setup_dump"
7.229 + Keyword.thy_decl (end_setup >> Toplevel.theory);
7.230 + Outer_Syntax.command "setup_dump"
7.231 "Setup the dump file name"
7.232 - K.thy_decl (set_dump >> Toplevel.theory);
7.233 - OuterSyntax.command "append_dump"
7.234 + Keyword.thy_decl (set_dump >> Toplevel.theory);
7.235 + Outer_Syntax.command "append_dump"
7.236 "Add text to dump file"
7.237 - K.thy_decl (append_dump >> Toplevel.theory);
7.238 - OuterSyntax.command "flush_dump"
7.239 + Keyword.thy_decl (append_dump >> Toplevel.theory);
7.240 + Outer_Syntax.command "flush_dump"
7.241 "Write the dump to file"
7.242 - K.thy_decl (fl_dump >> Toplevel.theory);
7.243 - OuterSyntax.command "ignore_thms"
7.244 + Keyword.thy_decl (fl_dump >> Toplevel.theory);
7.245 + Outer_Syntax.command "ignore_thms"
7.246 "Theorems to ignore in next HOL4 theory import"
7.247 - K.thy_decl (ignore_thms >> Toplevel.theory);
7.248 - OuterSyntax.command "type_maps"
7.249 + Keyword.thy_decl (ignore_thms >> Toplevel.theory);
7.250 + Outer_Syntax.command "type_maps"
7.251 "Map HOL4 type names to existing Isabelle/HOL type names"
7.252 - K.thy_decl (type_maps >> Toplevel.theory);
7.253 - OuterSyntax.command "def_maps"
7.254 + Keyword.thy_decl (type_maps >> Toplevel.theory);
7.255 + Outer_Syntax.command "def_maps"
7.256 "Map HOL4 constant names to their primitive definitions"
7.257 - K.thy_decl (def_maps >> Toplevel.theory);
7.258 - OuterSyntax.command "thm_maps"
7.259 + Keyword.thy_decl (def_maps >> Toplevel.theory);
7.260 + Outer_Syntax.command "thm_maps"
7.261 "Map HOL4 theorem names to existing Isabelle/HOL theorem names"
7.262 - K.thy_decl (thm_maps >> Toplevel.theory);
7.263 - OuterSyntax.command "const_renames"
7.264 + Keyword.thy_decl (thm_maps >> Toplevel.theory);
7.265 + Outer_Syntax.command "const_renames"
7.266 "Rename HOL4 const names"
7.267 - K.thy_decl (const_renames >> Toplevel.theory);
7.268 - OuterSyntax.command "const_moves"
7.269 + Keyword.thy_decl (const_renames >> Toplevel.theory);
7.270 + Outer_Syntax.command "const_moves"
7.271 "Rename HOL4 const names to other HOL4 constants"
7.272 - K.thy_decl (const_moves >> Toplevel.theory);
7.273 - OuterSyntax.command "const_maps"
7.274 + Keyword.thy_decl (const_moves >> Toplevel.theory);
7.275 + Outer_Syntax.command "const_maps"
7.276 "Map HOL4 const names to existing Isabelle/HOL const names"
7.277 - K.thy_decl (const_maps >> Toplevel.theory));
7.278 + Keyword.thy_decl (const_maps >> Toplevel.theory));
7.279
7.280 end
7.281 -
7.282 -end
8.1 --- a/src/HOL/Import/proof_kernel.ML Mon May 17 18:59:59 2010 -0700
8.2 +++ b/src/HOL/Import/proof_kernel.ML Tue May 18 06:28:42 2010 -0700
8.3 @@ -189,7 +189,7 @@
8.4 else Delimfix (Syntax.escape c)
8.5
8.6 fun quotename c =
8.7 - if Syntax.is_identifier c andalso not (OuterKeyword.is_keyword c) then c else quote c
8.8 + if Syntax.is_identifier c andalso not (Keyword.is_keyword c) then c else quote c
8.9
8.10 fun simple_smart_string_of_cterm ct =
8.11 let
9.1 --- a/src/HOL/Library/Sum_Of_Squares/sos_wrapper.ML Mon May 17 18:59:59 2010 -0700
9.2 +++ b/src/HOL/Library/Sum_Of_Squares/sos_wrapper.ML Tue May 18 06:28:42 2010 -0700
9.3 @@ -144,11 +144,11 @@
9.4
9.5 val setup =
9.6 Method.setup @{binding sos}
9.7 - (Scan.lift (Scan.option OuterParse.xname)
9.8 + (Scan.lift (Scan.option Parse.xname)
9.9 >> (sos_solver print_cert o Sum_Of_Squares.Prover o call_solver))
9.10 "prove universal problems over the reals using sums of squares" #>
9.11 Method.setup @{binding sos_cert}
9.12 - (Scan.lift OuterParse.string
9.13 + (Scan.lift Parse.string
9.14 >> (fn cert => fn ctxt =>
9.15 sos_solver ignore
9.16 (Sum_Of_Squares.Certificate (PositivstellensatzTools.cert_to_pss_tree ctxt cert)) ctxt))
10.1 --- a/src/HOL/Mirabelle/Tools/mirabelle_sledgehammer.ML Mon May 17 18:59:59 2010 -0700
10.2 +++ b/src/HOL/Mirabelle/Tools/mirabelle_sledgehammer.ML Tue May 18 06:28:42 2010 -0700
10.3 @@ -330,14 +330,14 @@
10.4
10.5 fun thms_of_name ctxt name =
10.6 let
10.7 - val lex = OuterKeyword.get_lexicons
10.8 + val lex = Keyword.get_lexicons
10.9 val get = maps (ProofContext.get_fact ctxt o fst)
10.10 in
10.11 Source.of_string name
10.12 |> Symbol.source {do_recover=false}
10.13 - |> OuterLex.source {do_recover=SOME false} lex Position.start
10.14 - |> OuterLex.source_proper
10.15 - |> Source.source OuterLex.stopper (Parse_Spec.xthms1 >> get) NONE
10.16 + |> Token.source {do_recover=SOME false} lex Position.start
10.17 + |> Token.source_proper
10.18 + |> Source.source Token.stopper (Parse_Spec.xthms1 >> get) NONE
10.19 |> Source.exhaust
10.20 end
10.21
11.1 --- a/src/HOL/Nominal/nominal_atoms.ML Mon May 17 18:59:59 2010 -0700
11.2 +++ b/src/HOL/Nominal/nominal_atoms.ML Tue May 18 06:28:42 2010 -0700
11.3 @@ -1004,10 +1004,10 @@
11.4
11.5
11.6 (* syntax und parsing *)
11.7 -structure P = OuterParse and K = OuterKeyword;
11.8 +structure P = Parse and K = Keyword;
11.9
11.10 val _ =
11.11 - OuterSyntax.command "atom_decl" "Declare new kinds of atoms" K.thy_decl
11.12 - (Scan.repeat1 P.name >> (Toplevel.theory o create_nom_typedecls));
11.13 + Outer_Syntax.command "atom_decl" "declare new kinds of atoms" Keyword.thy_decl
11.14 + (Scan.repeat1 Parse.name >> (Toplevel.theory o create_nom_typedecls));
11.15
11.16 end;
12.1 --- a/src/HOL/Nominal/nominal_datatype.ML Mon May 17 18:59:59 2010 -0700
12.2 +++ b/src/HOL/Nominal/nominal_datatype.ML Tue May 18 06:28:42 2010 -0700
12.3 @@ -2076,11 +2076,10 @@
12.4
12.5 (* FIXME: The following stuff should be exported by Datatype *)
12.6
12.7 -local structure P = OuterParse and K = OuterKeyword in
12.8 -
12.9 val datatype_decl =
12.10 - Scan.option (P.$$$ "(" |-- P.name --| P.$$$ ")") -- P.type_args -- P.name -- P.opt_mixfix --
12.11 - (P.$$$ "=" |-- P.enum1 "|" (P.name -- Scan.repeat P.typ -- P.opt_mixfix));
12.12 + Scan.option (Parse.$$$ "(" |-- Parse.name --| Parse.$$$ ")") --
12.13 + Parse.type_args -- Parse.name -- Parse.opt_mixfix --
12.14 + (Parse.$$$ "=" |-- Parse.enum1 "|" (Parse.name -- Scan.repeat Parse.typ -- Parse.opt_mixfix));
12.15
12.16 fun mk_datatype args =
12.17 let
12.18 @@ -2090,9 +2089,7 @@
12.19 in add_nominal_datatype Datatype.default_config names specs end;
12.20
12.21 val _ =
12.22 - OuterSyntax.command "nominal_datatype" "define inductive datatypes" K.thy_decl
12.23 - (P.and_list1 datatype_decl >> (Toplevel.theory o mk_datatype));
12.24 -
12.25 -end;
12.26 + Outer_Syntax.command "nominal_datatype" "define inductive datatypes" Keyword.thy_decl
12.27 + (Parse.and_list1 datatype_decl >> (Toplevel.theory o mk_datatype));
12.28
12.29 end
13.1 --- a/src/HOL/Nominal/nominal_induct.ML Mon May 17 18:59:59 2010 -0700
13.2 +++ b/src/HOL/Nominal/nominal_induct.ML Tue May 18 06:28:42 2010 -0700
13.3 @@ -138,8 +138,6 @@
13.4
13.5 local
13.6
13.7 -structure P = OuterParse;
13.8 -
13.9 val avoidingN = "avoiding";
13.10 val fixingN = "arbitrary"; (* to be consistent with induct; hopefully this changes again *)
13.11 val ruleN = "rule";
13.12 @@ -165,14 +163,14 @@
13.13 Scan.repeat (unless_more_args free)) [];
13.14
13.15 val fixing = Scan.optional (Scan.lift (Args.$$$ fixingN -- Args.colon) |--
13.16 - P.and_list' (Scan.repeat (unless_more_args free))) [];
13.17 + Parse.and_list' (Scan.repeat (unless_more_args free))) [];
13.18
13.19 val rule_spec = Scan.lift (Args.$$$ "rule" -- Args.colon) |-- Attrib.thms;
13.20
13.21 in
13.22
13.23 val nominal_induct_method =
13.24 - Args.mode Induct.no_simpN -- (P.and_list' (Scan.repeat (unless_more_args def_inst)) --
13.25 + Args.mode Induct.no_simpN -- (Parse.and_list' (Scan.repeat (unless_more_args def_inst)) --
13.26 avoiding -- fixing -- rule_spec) >>
13.27 (fn (no_simp, (((x, y), z), w)) => fn ctxt =>
13.28 RAW_METHOD_CASES (fn facts =>
14.1 --- a/src/HOL/Nominal/nominal_inductive.ML Mon May 17 18:59:59 2010 -0700
14.2 +++ b/src/HOL/Nominal/nominal_inductive.ML Tue May 18 06:28:42 2010 -0700
14.3 @@ -672,23 +672,20 @@
14.4
14.5 (* outer syntax *)
14.6
14.7 -local structure P = OuterParse and K = OuterKeyword in
14.8 -
14.9 -val _ = OuterKeyword.keyword "avoids";
14.10 +val _ = Keyword.keyword "avoids";
14.11
14.12 val _ =
14.13 - OuterSyntax.local_theory_to_proof "nominal_inductive"
14.14 - "prove equivariance and strong induction theorem for inductive predicate involving nominal datatypes" K.thy_goal
14.15 - (P.xname -- Scan.optional (P.$$$ "avoids" |-- P.and_list1 (P.name --
14.16 - (P.$$$ ":" |-- Scan.repeat1 P.name))) [] >> (fn (name, avoids) =>
14.17 + Outer_Syntax.local_theory_to_proof "nominal_inductive"
14.18 + "prove equivariance and strong induction theorem for inductive predicate involving nominal datatypes"
14.19 + Keyword.thy_goal
14.20 + (Parse.xname -- Scan.optional (Parse.$$$ "avoids" |-- Parse.and_list1 (Parse.name --
14.21 + (Parse.$$$ ":" |-- Scan.repeat1 Parse.name))) [] >> (fn (name, avoids) =>
14.22 prove_strong_ind name avoids));
14.23
14.24 val _ =
14.25 - OuterSyntax.local_theory "equivariance"
14.26 - "prove equivariance for inductive predicate involving nominal datatypes" K.thy_decl
14.27 - (P.xname -- Scan.optional (P.$$$ "[" |-- P.list1 P.name --| P.$$$ "]") [] >>
14.28 + Outer_Syntax.local_theory "equivariance"
14.29 + "prove equivariance for inductive predicate involving nominal datatypes" Keyword.thy_decl
14.30 + (Parse.xname -- Scan.optional (Parse.$$$ "[" |-- Parse.list1 Parse.name --| Parse.$$$ "]") [] >>
14.31 (fn (name, atoms) => prove_eqvt name atoms));
14.32
14.33 -end;
14.34 -
14.35 end
15.1 --- a/src/HOL/Nominal/nominal_inductive2.ML Mon May 17 18:59:59 2010 -0700
15.2 +++ b/src/HOL/Nominal/nominal_inductive2.ML Tue May 18 06:28:42 2010 -0700
15.3 @@ -485,17 +485,14 @@
15.4
15.5 (* outer syntax *)
15.6
15.7 -local structure P = OuterParse and K = OuterKeyword in
15.8 -
15.9 val _ =
15.10 - OuterSyntax.local_theory_to_proof "nominal_inductive2"
15.11 - "prove strong induction theorem for inductive predicate involving nominal datatypes" K.thy_goal
15.12 - (P.xname --
15.13 - Scan.option (P.$$$ "(" |-- P.!!! (P.name --| P.$$$ ")")) --
15.14 - (Scan.optional (P.$$$ "avoids" |-- P.enum1 "|" (P.name --
15.15 - (P.$$$ ":" |-- P.and_list1 P.term))) []) >> (fn ((name, rule_name), avoids) =>
15.16 + Outer_Syntax.local_theory_to_proof "nominal_inductive2"
15.17 + "prove strong induction theorem for inductive predicate involving nominal datatypes"
15.18 + Keyword.thy_goal
15.19 + (Parse.xname --
15.20 + Scan.option (Parse.$$$ "(" |-- Parse.!!! (Parse.name --| Parse.$$$ ")")) --
15.21 + (Scan.optional (Parse.$$$ "avoids" |-- Parse.enum1 "|" (Parse.name --
15.22 + (Parse.$$$ ":" |-- Parse.and_list1 Parse.term))) []) >> (fn ((name, rule_name), avoids) =>
15.23 prove_strong_ind name rule_name avoids));
15.24
15.25 -end;
15.26 -
15.27 end
16.1 --- a/src/HOL/Nominal/nominal_primrec.ML Mon May 17 18:59:59 2010 -0700
16.2 +++ b/src/HOL/Nominal/nominal_primrec.ML Tue May 18 06:28:42 2010 -0700
16.3 @@ -393,28 +393,24 @@
16.4
16.5 (* outer syntax *)
16.6
16.7 -local structure P = OuterParse in
16.8 +val freshness_context = Parse.reserved "freshness_context";
16.9 +val invariant = Parse.reserved "invariant";
16.10
16.11 -val freshness_context = P.reserved "freshness_context";
16.12 -val invariant = P.reserved "invariant";
16.13 +fun unless_flag scan = Scan.unless ((freshness_context || invariant) -- Parse.$$$ ":") scan;
16.14
16.15 -fun unless_flag scan = Scan.unless ((freshness_context || invariant) -- P.$$$ ":") scan;
16.16 -
16.17 -val parser1 = (freshness_context -- P.$$$ ":") |-- unless_flag P.term >> SOME;
16.18 -val parser2 = (invariant -- P.$$$ ":") |--
16.19 - (Scan.repeat1 (unless_flag P.term) >> SOME) -- Scan.optional parser1 NONE ||
16.20 +val parser1 = (freshness_context -- Parse.$$$ ":") |-- unless_flag Parse.term >> SOME;
16.21 +val parser2 = (invariant -- Parse.$$$ ":") |--
16.22 + (Scan.repeat1 (unless_flag Parse.term) >> SOME) -- Scan.optional parser1 NONE ||
16.23 (parser1 >> pair NONE);
16.24 val options =
16.25 - Scan.optional (P.$$$ "(" |-- P.!!! (parser2 --| P.$$$ ")")) (NONE, NONE);
16.26 + Scan.optional (Parse.$$$ "(" |-- Parse.!!! (parser2 --| Parse.$$$ ")")) (NONE, NONE);
16.27
16.28 val _ =
16.29 - OuterSyntax.local_theory_to_proof "nominal_primrec"
16.30 - "define primitive recursive functions on nominal datatypes" OuterKeyword.thy_goal
16.31 - (options -- P.fixes -- P.for_fixes -- Parse_Spec.where_alt_specs
16.32 + Outer_Syntax.local_theory_to_proof "nominal_primrec"
16.33 + "define primitive recursive functions on nominal datatypes" Keyword.thy_goal
16.34 + (options -- Parse.fixes -- Parse.for_fixes -- Parse_Spec.where_alt_specs
16.35 >> (fn ((((invs, fctxt), fixes), params), specs) =>
16.36 add_primrec_cmd invs fctxt fixes params specs));
16.37
16.38 end;
16.39
16.40 -end;
16.41 -
17.1 --- a/src/HOL/Orderings.thy Mon May 17 18:59:59 2010 -0700
17.2 +++ b/src/HOL/Orderings.thy Tue May 18 06:28:42 2010 -0700
17.3 @@ -422,8 +422,8 @@
17.4 (** Diagnostic command **)
17.5
17.6 val _ =
17.7 - OuterSyntax.improper_command "print_orders"
17.8 - "print order structures available to transitivity reasoner" OuterKeyword.diag
17.9 + Outer_Syntax.improper_command "print_orders"
17.10 + "print order structures available to transitivity reasoner" Keyword.diag
17.11 (Scan.succeed (Toplevel.no_timing o Toplevel.unknown_context o
17.12 Toplevel.keep (print_structures o Toplevel.context_of)));
17.13
18.1 --- a/src/HOL/Statespace/state_space.ML Mon May 17 18:59:59 2010 -0700
18.2 +++ b/src/HOL/Statespace/state_space.ML Tue May 18 06:28:42 2010 -0700
18.3 @@ -32,10 +32,9 @@
18.4 (string * typ) list -> theory -> theory
18.5
18.6 val statespace_decl :
18.7 - OuterParse.token list ->
18.8 ((string list * bstring) *
18.9 ((string list * xstring * (bstring * bstring) list) list *
18.10 - (bstring * string) list)) * OuterParse.token list
18.11 + (bstring * string) list)) parser
18.12
18.13
18.14 val neq_x_y : Proof.context -> term -> term -> thm option
18.15 @@ -668,37 +667,33 @@
18.16
18.17 (*** outer syntax *)
18.18
18.19 -local structure P = OuterParse and K = OuterKeyword in
18.20 +val type_insts =
18.21 + Parse.typ >> single ||
18.22 + Parse.$$$ "(" |-- Parse.!!! (Parse.list1 Parse.typ --| Parse.$$$ ")")
18.23
18.24 -val type_insts =
18.25 - P.typ >> single ||
18.26 - P.$$$ "(" |-- P.!!! (P.list1 P.typ --| P.$$$ ")")
18.27 +val comp = Parse.name -- (Parse.$$$ "::" |-- Parse.!!! Parse.typ);
18.28 +fun plus1_unless test scan =
18.29 + scan ::: Scan.repeat (Parse.$$$ "+" |-- Scan.unless test (Parse.!!! scan));
18.30
18.31 -val comp = P.name -- (P.$$$ "::" |-- P.!!! P.typ);
18.32 -fun plus1_unless test scan =
18.33 - scan ::: Scan.repeat (P.$$$ "+" |-- Scan.unless test (P.!!! scan));
18.34 +val mapsto = Parse.$$$ "=";
18.35 +val rename = Parse.name -- (mapsto |-- Parse.name);
18.36 +val renames = Scan.optional (Parse.$$$ "[" |-- Parse.!!! (Parse.list1 rename --| Parse.$$$ "]")) [];
18.37
18.38 -val mapsto = P.$$$ "=";
18.39 -val rename = P.name -- (mapsto |-- P.name);
18.40 -val renames = Scan.optional (P.$$$ "[" |-- P.!!! (P.list1 rename --| P.$$$ "]")) [];
18.41
18.42 -
18.43 -val parent = ((type_insts -- P.xname) || (P.xname >> pair [])) -- renames
18.44 +val parent = ((type_insts -- Parse.xname) || (Parse.xname >> pair [])) -- renames
18.45 >> (fn ((insts,name),renames) => (insts,name,renames))
18.46
18.47
18.48 val statespace_decl =
18.49 - P.type_args -- P.name --
18.50 - (P.$$$ "=" |--
18.51 + Parse.type_args -- Parse.name --
18.52 + (Parse.$$$ "=" |--
18.53 ((Scan.repeat1 comp >> pair []) ||
18.54 (plus1_unless comp parent --
18.55 - Scan.optional (P.$$$ "+" |-- P.!!! (Scan.repeat1 comp)) [])))
18.56 + Scan.optional (Parse.$$$ "+" |-- Parse.!!! (Scan.repeat1 comp)) [])))
18.57
18.58 val statespace_command =
18.59 - OuterSyntax.command "statespace" "define state space" K.thy_decl
18.60 + Outer_Syntax.command "statespace" "define state space" Keyword.thy_decl
18.61 (statespace_decl >> (fn ((args,name),(parents,comps)) =>
18.62 Toplevel.theory (define_statespace args name parents comps)))
18.63
18.64 -end;
18.65 -
18.66 end;
18.67 \ No newline at end of file
19.1 --- a/src/HOL/Tools/Datatype/datatype.ML Mon May 17 18:59:59 2010 -0700
19.2 +++ b/src/HOL/Tools/Datatype/datatype.ML Tue May 18 06:28:42 2010 -0700
19.3 @@ -721,8 +721,6 @@
19.4
19.5 local
19.6
19.7 -structure P = OuterParse and K = OuterKeyword
19.8 -
19.9 fun prep_datatype_decls args =
19.10 let
19.11 val names = map
19.12 @@ -732,15 +730,16 @@
19.13 in (names, specs) end;
19.14
19.15 val parse_datatype_decl =
19.16 - (Scan.option (P.$$$ "(" |-- P.name --| P.$$$ ")") -- P.type_args -- P.binding -- P.opt_mixfix --
19.17 - (P.$$$ "=" |-- P.enum1 "|" (P.binding -- Scan.repeat P.typ -- P.opt_mixfix)));
19.18 + (Scan.option (Parse.$$$ "(" |-- Parse.name --| Parse.$$$ ")") --
19.19 + Parse.type_args -- Parse.binding -- Parse.opt_mixfix --
19.20 + (Parse.$$$ "=" |-- Parse.enum1 "|" (Parse.binding -- Scan.repeat Parse.typ -- Parse.opt_mixfix)));
19.21
19.22 -val parse_datatype_decls = P.and_list1 parse_datatype_decl >> prep_datatype_decls;
19.23 +val parse_datatype_decls = Parse.and_list1 parse_datatype_decl >> prep_datatype_decls;
19.24
19.25 in
19.26
19.27 val _ =
19.28 - OuterSyntax.command "datatype" "define inductive datatypes" K.thy_decl
19.29 + Outer_Syntax.command "datatype" "define inductive datatypes" Keyword.thy_decl
19.30 (parse_datatype_decls >> (fn (names, specs) => Toplevel.theory (datatype_cmd names specs)));
19.31
19.32 end;
20.1 --- a/src/HOL/Tools/Datatype/datatype_data.ML Mon May 17 18:59:59 2010 -0700
20.2 +++ b/src/HOL/Tools/Datatype/datatype_data.ML Tue May 18 06:28:42 2010 -0700
20.3 @@ -455,18 +455,11 @@
20.4
20.5 (* outer syntax *)
20.6
20.7 -local
20.8 -
20.9 -structure P = OuterParse and K = OuterKeyword
20.10 -
20.11 -in
20.12 -
20.13 val _ =
20.14 - OuterSyntax.command "rep_datatype" "represent existing types inductively" K.thy_goal
20.15 - (Scan.option (P.$$$ "(" |-- Scan.repeat1 P.name --| P.$$$ ")") -- Scan.repeat1 P.term
20.16 - >> (fn (alt_names, ts) => Toplevel.print
20.17 - o Toplevel.theory_to_proof (rep_datatype_cmd alt_names ts)));
20.18 + Outer_Syntax.command "rep_datatype" "represent existing types inductively" Keyword.thy_goal
20.19 + (Scan.option (Parse.$$$ "(" |-- Scan.repeat1 Parse.name --| Parse.$$$ ")") --
20.20 + Scan.repeat1 Parse.term
20.21 + >> (fn (alt_names, ts) =>
20.22 + Toplevel.print o Toplevel.theory_to_proof (rep_datatype_cmd alt_names ts)));
20.23
20.24 end;
20.25 -
20.26 -end;
21.1 --- a/src/HOL/Tools/Function/fun.ML Mon May 17 18:59:59 2010 -0700
21.2 +++ b/src/HOL/Tools/Function/fun.ML Tue May 18 06:28:42 2010 -0700
21.3 @@ -159,13 +159,10 @@
21.4
21.5
21.6
21.7 -local structure P = OuterParse and K = OuterKeyword in
21.8 -
21.9 val _ =
21.10 - OuterSyntax.local_theory "fun" "define general recursive functions (short version)" K.thy_decl
21.11 + Outer_Syntax.local_theory "fun" "define general recursive functions (short version)"
21.12 + Keyword.thy_decl
21.13 (function_parser fun_config
21.14 - >> (fn ((config, fixes), statements) => add_fun_cmd fixes statements config));
21.15 + >> (fn ((config, fixes), statements) => add_fun_cmd fixes statements config))
21.16
21.17 end
21.18 -
21.19 -end
22.1 --- a/src/HOL/Tools/Function/function.ML Mon May 17 18:59:59 2010 -0700
22.2 +++ b/src/HOL/Tools/Function/function.ML Tue May 18 06:28:42 2010 -0700
22.3 @@ -274,20 +274,19 @@
22.4 fun get_info ctxt t = Item_Net.retrieve (get_function ctxt) t
22.5 |> the_single |> snd
22.6
22.7 +
22.8 (* outer syntax *)
22.9
22.10 -local structure P = OuterParse and K = OuterKeyword in
22.11 -
22.12 val _ =
22.13 - OuterSyntax.local_theory_to_proof "function" "define general recursive functions" K.thy_goal
22.14 + Outer_Syntax.local_theory_to_proof "function" "define general recursive functions"
22.15 + Keyword.thy_goal
22.16 (function_parser default_config
22.17 >> (fn ((config, fixes), statements) => function_cmd fixes statements config))
22.18
22.19 val _ =
22.20 - OuterSyntax.local_theory_to_proof "termination" "prove termination of a recursive function" K.thy_goal
22.21 - (Scan.option P.term >> termination_cmd)
22.22 + Outer_Syntax.local_theory_to_proof "termination" "prove termination of a recursive function"
22.23 + Keyword.thy_goal
22.24 + (Scan.option Parse.term >> termination_cmd)
22.25 +
22.26
22.27 end
22.28 -
22.29 -
22.30 -end
23.1 --- a/src/HOL/Tools/Function/function_common.ML Mon May 17 18:59:59 2010 -0700
23.2 +++ b/src/HOL/Tools/Function/function_common.ML Tue May 18 06:28:42 2010 -0700
23.3 @@ -341,21 +341,19 @@
23.4
23.5
23.6 local
23.7 - structure P = OuterParse and K = OuterKeyword
23.8 -
23.9 - val option_parser = P.group "option"
23.10 - ((P.reserved "sequential" >> K Sequential)
23.11 - || ((P.reserved "default" |-- P.term) >> Default)
23.12 - || (P.reserved "domintros" >> K DomIntros)
23.13 - || (P.reserved "no_partials" >> K No_Partials)
23.14 - || (P.reserved "tailrec" >> K Tailrec))
23.15 + val option_parser = Parse.group "option"
23.16 + ((Parse.reserved "sequential" >> K Sequential)
23.17 + || ((Parse.reserved "default" |-- Parse.term) >> Default)
23.18 + || (Parse.reserved "domintros" >> K DomIntros)
23.19 + || (Parse.reserved "no_partials" >> K No_Partials)
23.20 + || (Parse.reserved "tailrec" >> K Tailrec))
23.21
23.22 fun config_parser default =
23.23 - (Scan.optional (P.$$$ "(" |-- P.!!! (P.list1 option_parser) --| P.$$$ ")") [])
23.24 + (Scan.optional (Parse.$$$ "(" |-- Parse.!!! (Parse.list1 option_parser) --| Parse.$$$ ")") [])
23.25 >> (fn opts => fold apply_opt opts default)
23.26 in
23.27 fun function_parser default_cfg =
23.28 - config_parser default_cfg -- P.fixes -- Parse_Spec.where_alt_specs
23.29 + config_parser default_cfg -- Parse.fixes -- Parse_Spec.where_alt_specs
23.30 end
23.31
23.32
24.1 --- a/src/HOL/Tools/Nitpick/nitpick_isar.ML Mon May 17 18:59:59 2010 -0700
24.2 +++ b/src/HOL/Tools/Nitpick/nitpick_isar.ML Tue May 18 06:28:42 2010 -0700
24.3 @@ -24,8 +24,6 @@
24.4 open Nitpick_Nut
24.5 open Nitpick
24.6
24.7 -structure K = OuterKeyword and P = OuterParse
24.8 -
24.9 val auto = Unsynchronized.ref false;
24.10
24.11 val _ =
24.12 @@ -289,14 +287,14 @@
24.13 extract_params (ProofContext.init_global thy) false (default_raw_params thy)
24.14 o map (apsnd single)
24.15
24.16 -val parse_key = Scan.repeat1 P.typ_group >> space_implode " "
24.17 +val parse_key = Scan.repeat1 Parse.typ_group >> space_implode " "
24.18 val parse_value =
24.19 - Scan.repeat1 (P.minus >> single
24.20 - || Scan.repeat1 (Scan.unless P.minus P.name)
24.21 - || P.$$$ "," |-- P.number >> prefix "," >> single) >> flat
24.22 -val parse_param = parse_key -- Scan.optional (P.$$$ "=" |-- parse_value) []
24.23 + Scan.repeat1 (Parse.minus >> single
24.24 + || Scan.repeat1 (Scan.unless Parse.minus Parse.name)
24.25 + || Parse.$$$ "," |-- Parse.number >> prefix "," >> single) >> flat
24.26 +val parse_param = parse_key -- Scan.optional (Parse.$$$ "=" |-- parse_value) []
24.27 val parse_params =
24.28 - Scan.optional (P.$$$ "[" |-- P.list parse_param --| P.$$$ "]") []
24.29 + Scan.optional (Parse.$$$ "[" |-- Parse.list parse_param --| Parse.$$$ "]") []
24.30
24.31 fun handle_exceptions ctxt f x =
24.32 f x
24.33 @@ -375,15 +373,15 @@
24.34 |> sort_strings |> cat_lines)))))
24.35
24.36 val parse_nitpick_command =
24.37 - (parse_params -- Scan.optional P.nat 1) #>> nitpick_trans
24.38 + (parse_params -- Scan.optional Parse.nat 1) #>> nitpick_trans
24.39 val parse_nitpick_params_command = parse_params #>> nitpick_params_trans
24.40
24.41 -val _ = OuterSyntax.improper_command "nitpick"
24.42 +val _ = Outer_Syntax.improper_command "nitpick"
24.43 "try to find a counterexample for a given subgoal using Nitpick"
24.44 - K.diag parse_nitpick_command
24.45 -val _ = OuterSyntax.command "nitpick_params"
24.46 + Keyword.diag parse_nitpick_command
24.47 +val _ = Outer_Syntax.command "nitpick_params"
24.48 "set and display the default parameters for Nitpick"
24.49 - K.thy_decl parse_nitpick_params_command
24.50 + Keyword.thy_decl parse_nitpick_params_command
24.51
24.52 fun auto_nitpick state =
24.53 if not (!auto) then (false, state) else pick_nits [] true 1 0 state
25.1 --- a/src/HOL/Tools/Predicate_Compile/predicate_compile.ML Mon May 17 18:59:59 2010 -0700
25.2 +++ b/src/HOL/Tools/Predicate_Compile/predicate_compile.ML Tue May 18 06:28:42 2010 -0700
25.3 @@ -192,8 +192,6 @@
25.4
25.5 val setup = Predicate_Compile_Core.setup
25.6
25.7 -local structure P = OuterParse
25.8 -in
25.9
25.10 (* Parser for mode annotations *)
25.11
25.12 @@ -207,15 +205,15 @@
25.13 (parse_mode_tuple_expr --| Args.$$$ "=>" -- parse_mode_expr >> Fun || parse_mode_tuple_expr) xs
25.14
25.15 val mode_and_opt_proposal = parse_mode_expr --
25.16 - Scan.optional (Args.$$$ "as" |-- P.xname >> SOME) NONE
25.17 + Scan.optional (Args.$$$ "as" |-- Parse.xname >> SOME) NONE
25.18
25.19 val opt_modes =
25.20 - Scan.optional (P.$$$ "(" |-- Args.$$$ "modes" |-- P.$$$ ":" |--
25.21 - P.enum "," mode_and_opt_proposal --| P.$$$ ")" >> SOME) NONE
25.22 + Scan.optional (Parse.$$$ "(" |-- Args.$$$ "modes" |-- Parse.$$$ ":" |--
25.23 + Parse.enum "," mode_and_opt_proposal --| Parse.$$$ ")" >> SOME) NONE
25.24
25.25 val opt_expected_modes =
25.26 - Scan.optional (P.$$$ "(" |-- Args.$$$ "expected_modes" |-- P.$$$ ":" |--
25.27 - P.enum "," parse_mode_expr --| P.$$$ ")" >> SOME) NONE
25.28 + Scan.optional (Parse.$$$ "(" |-- Args.$$$ "expected_modes" |-- Parse.$$$ ":" |--
25.29 + Parse.enum "," parse_mode_expr --| Parse.$$$ ")" >> SOME) NONE
25.30
25.31 (* Parser for options *)
25.32
25.33 @@ -224,46 +222,46 @@
25.34 val scan_bool_option = foldl1 (op ||) (map Args.$$$ bool_options)
25.35 val scan_compilation = foldl1 (op ||) (map (fn (s, c) => Args.$$$ s >> K c) compilation_names)
25.36 in
25.37 - Scan.optional (P.$$$ "[" |-- Scan.optional scan_compilation Pred
25.38 - -- P.enum "," scan_bool_option --| P.$$$ "]")
25.39 + Scan.optional (Parse.$$$ "[" |-- Scan.optional scan_compilation Pred
25.40 + -- Parse.enum "," scan_bool_option --| Parse.$$$ "]")
25.41 (Pred, [])
25.42 end
25.43
25.44 -val opt_print_modes = Scan.optional (P.$$$ "(" |-- P.!!! (Scan.repeat1 P.xname --| P.$$$ ")")) [];
25.45 +val opt_print_modes =
25.46 + Scan.optional (Parse.$$$ "(" |-- Parse.!!! (Scan.repeat1 Parse.xname --| Parse.$$$ ")")) [];
25.47
25.48 -val opt_mode = (P.$$$ "_" >> K NONE) || (parse_mode_expr >> SOME)
25.49 +val opt_mode = (Parse.$$$ "_" >> K NONE) || (parse_mode_expr >> SOME)
25.50
25.51 -val opt_param_modes = Scan.optional (P.$$$ "[" |-- Args.$$$ "mode" |-- P.$$$ ":" |--
25.52 - P.enum ", " opt_mode --| P.$$$ "]" >> SOME) NONE
25.53 +val opt_param_modes = Scan.optional (Parse.$$$ "[" |-- Args.$$$ "mode" |-- Parse.$$$ ":" |--
25.54 + Parse.enum ", " opt_mode --| Parse.$$$ "]" >> SOME) NONE
25.55
25.56 val stats = Scan.optional (Args.$$$ "stats" >> K true) false
25.57
25.58 val value_options =
25.59 let
25.60 - val expected_values = Scan.optional (Args.$$$ "expected" |-- P.term >> SOME) NONE
25.61 + val expected_values = Scan.optional (Args.$$$ "expected" |-- Parse.term >> SOME) NONE
25.62 val scan_compilation =
25.63 Scan.optional
25.64 (foldl1 (op ||)
25.65 - (map (fn (s, c) => Args.$$$ s -- P.enum "," P.int >> (fn (_, ps) => (c, ps)))
25.66 + (map (fn (s, c) => Args.$$$ s -- Parse.enum "," Parse.int >> (fn (_, ps) => (c, ps)))
25.67 compilation_names))
25.68 (Pred, [])
25.69 in
25.70 - Scan.optional (P.$$$ "[" |-- (expected_values -- stats) -- scan_compilation --| P.$$$ "]")
25.71 + Scan.optional
25.72 + (Parse.$$$ "[" |-- (expected_values -- stats) -- scan_compilation --| Parse.$$$ "]")
25.73 ((NONE, false), (Pred, []))
25.74 end
25.75
25.76 (* code_pred command and values command *)
25.77
25.78 -val _ = OuterSyntax.local_theory_to_proof "code_pred"
25.79 +val _ = Outer_Syntax.local_theory_to_proof "code_pred"
25.80 "prove equations for predicate specified by intro/elim rules"
25.81 - OuterKeyword.thy_goal
25.82 - (opt_expected_modes -- opt_modes -- scan_options -- P.term_group >> code_pred_cmd)
25.83 + Keyword.thy_goal
25.84 + (opt_expected_modes -- opt_modes -- scan_options -- Parse.term_group >> code_pred_cmd)
25.85
25.86 -val _ = OuterSyntax.improper_command "values" "enumerate and print comprehensions" OuterKeyword.diag
25.87 - (opt_print_modes -- opt_param_modes -- value_options -- Scan.optional P.nat ~1 -- P.term
25.88 +val _ = Outer_Syntax.improper_command "values" "enumerate and print comprehensions" Keyword.diag
25.89 + (opt_print_modes -- opt_param_modes -- value_options -- Scan.optional Parse.nat ~1 -- Parse.term
25.90 >> (fn ((((print_modes, param_modes), options), k), t) => Toplevel.keep
25.91 (Predicate_Compile_Core.values_cmd print_modes param_modes options k t)));
25.92
25.93 end
25.94 -
25.95 -end
26.1 --- a/src/HOL/Tools/Quotient/quotient_def.ML Mon May 17 18:59:59 2010 -0700
26.2 +++ b/src/HOL/Tools/Quotient/quotient_def.ML Tue May 18 06:28:42 2010 -0700
26.3 @@ -91,20 +91,15 @@
26.4 quotient_def ((NONE, NoSyn), (Attrib.empty_binding,
26.5 (Quotient_Term.quotient_lift_const qtys (b, t) ctxt, t))) ctxt
26.6
26.7 -local
26.8 - structure P = OuterParse;
26.9 -in
26.10 -
26.11 -val quotdef_decl = (P.binding >> SOME) -- P.opt_mixfix' --| P.$$$ "where"
26.12 +val quotdef_decl = (Parse.binding >> SOME) -- Parse.opt_mixfix' --| Parse.$$$ "where"
26.13
26.14 val quotdef_parser =
26.15 Scan.optional quotdef_decl (NONE, NoSyn) --
26.16 - P.!!! (Parse_Spec.opt_thm_name ":" -- (P.term --| P.$$$ "is" -- P.term))
26.17 -end
26.18 + Parse.!!! (Parse_Spec.opt_thm_name ":" -- (Parse.term --| Parse.$$$ "is" -- Parse.term))
26.19
26.20 val _ =
26.21 - OuterSyntax.local_theory "quotient_definition"
26.22 + Outer_Syntax.local_theory "quotient_definition"
26.23 "definition for constants over the quotient type"
26.24 - OuterKeyword.thy_decl (quotdef_parser >> (snd oo quotdef_cmd))
26.25 + Keyword.thy_decl (quotdef_parser >> (snd oo quotdef_cmd))
26.26
26.27 end; (* structure *)
27.1 --- a/src/HOL/Tools/Quotient/quotient_info.ML Mon May 17 18:59:59 2010 -0700
27.2 +++ b/src/HOL/Tools/Quotient/quotient_info.ML Tue May 18 06:28:42 2010 -0700
27.3 @@ -91,9 +91,9 @@
27.4
27.5 val maps_attr_parser =
27.6 Args.context -- Scan.lift
27.7 - ((Args.name --| OuterParse.$$$ "=") --
27.8 - (OuterParse.$$$ "(" |-- Args.name --| OuterParse.$$$ "," --
27.9 - Args.name --| OuterParse.$$$ ")"))
27.10 + ((Args.name --| Parse.$$$ "=") --
27.11 + (Parse.$$$ "(" |-- Args.name --| Parse.$$$ "," --
27.12 + Args.name --| Parse.$$$ ")"))
27.13
27.14 val _ = Context.>> (Context.map_theory
27.15 (Attrib.setup @{binding "map"} (maps_attr_parser >> maps_attribute)
27.16 @@ -278,8 +278,8 @@
27.17 (* setup of the printing commands *)
27.18
27.19 fun improper_command (pp_fn, cmd_name, descr_str) =
27.20 - OuterSyntax.improper_command cmd_name descr_str
27.21 - OuterKeyword.diag (Scan.succeed (Toplevel.keep (pp_fn o Toplevel.context_of)))
27.22 + Outer_Syntax.improper_command cmd_name descr_str
27.23 + Keyword.diag (Scan.succeed (Toplevel.keep (pp_fn o Toplevel.context_of)))
27.24
27.25 val _ = map improper_command
27.26 [(print_mapsinfo, "print_quotmaps", "prints out all map functions"),
28.1 --- a/src/HOL/Tools/Quotient/quotient_typ.ML Mon May 17 18:59:59 2010 -0700
28.2 +++ b/src/HOL/Tools/Quotient/quotient_typ.ML Tue May 18 06:28:42 2010 -0700
28.3 @@ -299,16 +299,16 @@
28.4 end
28.5
28.6 val quotspec_parser =
28.7 - OuterParse.and_list1
28.8 - ((OuterParse.type_args -- OuterParse.binding) --
28.9 - OuterParse.opt_mixfix -- (OuterParse.$$$ "=" |-- OuterParse.typ) --
28.10 - (OuterParse.$$$ "/" |-- OuterParse.term))
28.11 + Parse.and_list1
28.12 + ((Parse.type_args -- Parse.binding) --
28.13 + Parse.opt_mixfix -- (Parse.$$$ "=" |-- Parse.typ) --
28.14 + (Parse.$$$ "/" |-- Parse.term))
28.15
28.16 -val _ = OuterKeyword.keyword "/"
28.17 +val _ = Keyword.keyword "/"
28.18
28.19 val _ =
28.20 - OuterSyntax.local_theory_to_proof "quotient_type"
28.21 + Outer_Syntax.local_theory_to_proof "quotient_type"
28.22 "quotient type definitions (require equivalence proofs)"
28.23 - OuterKeyword.thy_goal (quotspec_parser >> quotient_type_cmd)
28.24 + Keyword.thy_goal (quotspec_parser >> quotient_type_cmd)
28.25
28.26 end; (* structure *)
29.1 --- a/src/HOL/Tools/SMT/smt_solver.ML Mon May 17 18:59:59 2010 -0700
29.2 +++ b/src/HOL/Tools/SMT/smt_solver.ML Tue May 18 06:28:42 2010 -0700
29.3 @@ -311,14 +311,14 @@
29.4
29.5 val setup =
29.6 Attrib.setup (Binding.name "smt_solver")
29.7 - (Scan.lift (OuterParse.$$$ "=" |-- Args.name) >>
29.8 + (Scan.lift (Parse.$$$ "=" |-- Args.name) >>
29.9 (Thm.declaration_attribute o K o select_solver))
29.10 "SMT solver configuration" #>
29.11 setup_timeout #>
29.12 setup_trace #>
29.13 setup_fixed_certificates #>
29.14 Attrib.setup (Binding.name "smt_certificates")
29.15 - (Scan.lift (OuterParse.$$$ "=" |-- Args.name) >>
29.16 + (Scan.lift (Parse.$$$ "=" |-- Args.name) >>
29.17 (Thm.declaration_attribute o K o select_certificates))
29.18 "SMT certificates" #>
29.19 Method.setup (Binding.name "smt") smt_method
29.20 @@ -353,9 +353,9 @@
29.21 Pretty.big_list "Solver-specific settings:" infos])
29.22 end
29.23
29.24 -val _ = OuterSyntax.improper_command "smt_status"
29.25 - "Show the available SMT solvers and the currently selected solver."
29.26 - OuterKeyword.diag
29.27 +val _ =
29.28 + Outer_Syntax.improper_command "smt_status"
29.29 + "show the available SMT solvers and the currently selected solver" Keyword.diag
29.30 (Scan.succeed (Toplevel.no_timing o Toplevel.keep (fn state =>
29.31 print_setup (Context.Proof (Toplevel.context_of state)))))
29.32
30.1 --- a/src/HOL/Tools/Sledgehammer/sledgehammer_isar.ML Mon May 17 18:59:59 2010 -0700
30.2 +++ b/src/HOL/Tools/Sledgehammer/sledgehammer_isar.ML Tue May 18 06:28:42 2010 -0700
30.3 @@ -24,8 +24,6 @@
30.4 open ATP_Systems
30.5 open Sledgehammer_Fact_Minimizer
30.6
30.7 -structure K = OuterKeyword and P = OuterParse
30.8 -
30.9 (** Proof method used in Isar proofs **)
30.10
30.11 val neg_clausify_setup =
30.12 @@ -36,7 +34,7 @@
30.13 (** Attribute for converting a theorem into clauses **)
30.14
30.15 val parse_clausify_attribute : attribute context_parser =
30.16 - Scan.lift OuterParse.nat
30.17 + Scan.lift Parse.nat
30.18 >> (fn i => Thm.rule_attribute (fn context => fn th =>
30.19 let val thy = Context.theory_of context in
30.20 Meson.make_meta_clause (nth (cnf_axiom thy th) i)
30.21 @@ -321,13 +319,13 @@
30.22 params |> map string_for_raw_param
30.23 |> sort_strings |> cat_lines)))))
30.24
30.25 -val parse_key = Scan.repeat1 P.typ_group >> space_implode " "
30.26 -val parse_value = Scan.repeat1 P.xname
30.27 -val parse_param = parse_key -- Scan.optional (P.$$$ "=" |-- parse_value) []
30.28 -val parse_params = Scan.optional (Args.bracks (P.list parse_param)) []
30.29 +val parse_key = Scan.repeat1 Parse.typ_group >> space_implode " "
30.30 +val parse_value = Scan.repeat1 Parse.xname
30.31 +val parse_param = parse_key -- Scan.optional (Parse.$$$ "=" |-- parse_value) []
30.32 +val parse_params = Scan.optional (Args.bracks (Parse.list parse_param)) []
30.33 val parse_fact_refs =
30.34 - Scan.repeat1 (Scan.unless (P.name -- Args.colon)
30.35 - (P.xname -- Scan.option Attrib.thm_sel)
30.36 + Scan.repeat1 (Scan.unless (Parse.name -- Args.colon)
30.37 + (Parse.xname -- Scan.option Attrib.thm_sel)
30.38 >> (fn (name, interval) =>
30.39 Facts.Named ((name, Position.none), interval)))
30.40 val parse_relevance_chunk =
30.41 @@ -340,18 +338,18 @@
30.42 >> merge_relevance_overrides))
30.43 (add_to_relevance_override [])
30.44 val parse_sledgehammer_command =
30.45 - (Scan.optional P.short_ident runN -- parse_params -- parse_relevance_override
30.46 - -- Scan.option P.nat) #>> sledgehammer_trans
30.47 + (Scan.optional Parse.short_ident runN -- parse_params -- parse_relevance_override
30.48 + -- Scan.option Parse.nat) #>> sledgehammer_trans
30.49 val parse_sledgehammer_params_command =
30.50 parse_params #>> sledgehammer_params_trans
30.51
30.52 val _ =
30.53 - OuterSyntax.improper_command sledgehammerN
30.54 - "search for first-order proof using automatic theorem provers" K.diag
30.55 + Outer_Syntax.improper_command sledgehammerN
30.56 + "search for first-order proof using automatic theorem provers" Keyword.diag
30.57 parse_sledgehammer_command
30.58 val _ =
30.59 - OuterSyntax.command sledgehammer_paramsN
30.60 - "set and display the default parameters for Sledgehammer" K.thy_decl
30.61 + Outer_Syntax.command sledgehammer_paramsN
30.62 + "set and display the default parameters for Sledgehammer" Keyword.thy_decl
30.63 parse_sledgehammer_params_command
30.64
30.65 val setup =
31.1 --- a/src/HOL/Tools/Sledgehammer/sledgehammer_util.ML Mon May 17 18:59:59 2010 -0700
31.2 +++ b/src/HOL/Tools/Sledgehammer/sledgehammer_util.ML Tue May 18 06:28:42 2010 -0700
31.3 @@ -102,7 +102,7 @@
31.4 let val s = unyxml y in
31.5 y |> ((not (is_long_identifier (perhaps (try (unprefix "'")) s)) andalso
31.6 not (is_long_identifier (perhaps (try (unprefix "?")) s))) orelse
31.7 - OuterKeyword.is_keyword s) ? quote
31.8 + Keyword.is_keyword s) ? quote
31.9 end
31.10
31.11 fun monomorphic_term subst t =
32.1 --- a/src/HOL/Tools/choice_specification.ML Mon May 17 18:59:59 2010 -0700
32.2 +++ b/src/HOL/Tools/choice_specification.ML Tue May 18 06:28:42 2010 -0700
32.3 @@ -232,33 +232,28 @@
32.4
32.5 (* outer syntax *)
32.6
32.7 -local structure P = OuterParse and K = OuterKeyword in
32.8 -
32.9 -val opt_name = Scan.optional (P.name --| P.$$$ ":") ""
32.10 -val opt_overloaded = P.opt_keyword "overloaded";
32.11 +val opt_name = Scan.optional (Parse.name --| Parse.$$$ ":") ""
32.12 +val opt_overloaded = Parse.opt_keyword "overloaded"
32.13
32.14 val specification_decl =
32.15 - P.$$$ "(" |-- Scan.repeat1 (opt_name -- P.term -- opt_overloaded) --| P.$$$ ")" --
32.16 - Scan.repeat1 ((Parse_Spec.opt_thm_name ":" >> apfst Binding.name_of) -- P.prop)
32.17 + Parse.$$$ "(" |-- Scan.repeat1 (opt_name -- Parse.term -- opt_overloaded) --| Parse.$$$ ")" --
32.18 + Scan.repeat1 ((Parse_Spec.opt_thm_name ":" >> apfst Binding.name_of) -- Parse.prop)
32.19
32.20 val _ =
32.21 - OuterSyntax.command "specification" "define constants by specification" K.thy_goal
32.22 + Outer_Syntax.command "specification" "define constants by specification" Keyword.thy_goal
32.23 (specification_decl >> (fn (cos,alt_props) =>
32.24 Toplevel.print o (Toplevel.theory_to_proof
32.25 (process_spec NONE cos alt_props))))
32.26
32.27 val ax_specification_decl =
32.28 - P.name --
32.29 - (P.$$$ "(" |-- Scan.repeat1 (opt_name -- P.term -- opt_overloaded) --| P.$$$ ")" --
32.30 - Scan.repeat1 ((Parse_Spec.opt_thm_name ":" >> apfst Binding.name_of) -- P.prop))
32.31 + Parse.name --
32.32 + (Parse.$$$ "(" |-- Scan.repeat1 (opt_name -- Parse.term -- opt_overloaded) --| Parse.$$$ ")" --
32.33 + Scan.repeat1 ((Parse_Spec.opt_thm_name ":" >> apfst Binding.name_of) -- Parse.prop))
32.34
32.35 val _ =
32.36 - OuterSyntax.command "ax_specification" "define constants by specification" K.thy_goal
32.37 + Outer_Syntax.command "ax_specification" "define constants by specification" Keyword.thy_goal
32.38 (ax_specification_decl >> (fn (axname,(cos,alt_props)) =>
32.39 Toplevel.print o (Toplevel.theory_to_proof
32.40 (process_spec (SOME axname) cos alt_props))))
32.41
32.42 end
32.43 -
32.44 -
32.45 -end
33.1 --- a/src/HOL/Tools/inductive.ML Mon May 17 18:59:59 2010 -0700
33.2 +++ b/src/HOL/Tools/inductive.ML Tue May 18 06:28:42 2010 -0700
33.3 @@ -83,8 +83,7 @@
33.4 (binding * string option * mixfix) list ->
33.5 (Attrib.binding * string) list -> (Facts.ref * Attrib.src list) list ->
33.6 bool -> local_theory -> inductive_result * local_theory
33.7 - val gen_ind_decl: add_ind_def -> bool ->
33.8 - OuterParse.token list -> (bool -> local_theory -> local_theory) * OuterParse.token list
33.9 + val gen_ind_decl: add_ind_def -> bool -> (bool -> local_theory -> local_theory) parser
33.10 end;
33.11
33.12 structure Inductive: INDUCTIVE =
33.13 @@ -971,32 +970,28 @@
33.14
33.15 (* outer syntax *)
33.16
33.17 -local structure P = OuterParse and K = OuterKeyword in
33.18 -
33.19 -val _ = OuterKeyword.keyword "monos";
33.20 +val _ = Keyword.keyword "monos";
33.21
33.22 fun gen_ind_decl mk_def coind =
33.23 - P.fixes -- P.for_fixes --
33.24 + Parse.fixes -- Parse.for_fixes --
33.25 Scan.optional Parse_Spec.where_alt_specs [] --
33.26 - Scan.optional (P.$$$ "monos" |-- P.!!! Parse_Spec.xthms1) []
33.27 + Scan.optional (Parse.$$$ "monos" |-- Parse.!!! Parse_Spec.xthms1) []
33.28 >> (fn (((preds, params), specs), monos) =>
33.29 (snd oo gen_add_inductive mk_def true coind preds params specs monos));
33.30
33.31 val ind_decl = gen_ind_decl add_ind_def;
33.32
33.33 val _ =
33.34 - OuterSyntax.local_theory' "inductive" "define inductive predicates" K.thy_decl
33.35 + Outer_Syntax.local_theory' "inductive" "define inductive predicates" Keyword.thy_decl
33.36 (ind_decl false);
33.37
33.38 val _ =
33.39 - OuterSyntax.local_theory' "coinductive" "define coinductive predicates" K.thy_decl
33.40 + Outer_Syntax.local_theory' "coinductive" "define coinductive predicates" Keyword.thy_decl
33.41 (ind_decl true);
33.42
33.43 val _ =
33.44 - OuterSyntax.local_theory "inductive_cases"
33.45 - "create simplified instances of elimination rules (improper)" K.thy_script
33.46 - (P.and_list1 Parse_Spec.specs >> (snd oo inductive_cases));
33.47 + Outer_Syntax.local_theory "inductive_cases"
33.48 + "create simplified instances of elimination rules (improper)" Keyword.thy_script
33.49 + (Parse.and_list1 Parse_Spec.specs >> (snd oo inductive_cases));
33.50
33.51 end;
33.52 -
33.53 -end;
34.1 --- a/src/HOL/Tools/inductive_codegen.ML Mon May 17 18:59:59 2010 -0700
34.2 +++ b/src/HOL/Tools/inductive_codegen.ML Tue May 18 06:28:42 2010 -0700
34.3 @@ -775,7 +775,7 @@
34.4 add_codegen "inductive" inductive_codegen #>
34.5 Attrib.setup @{binding code_ind}
34.6 (Scan.lift (Scan.option (Args.$$$ "target" |-- Args.colon |-- Args.name) --
34.7 - Scan.option (Args.$$$ "params" |-- Args.colon |-- OuterParse.nat) >> uncurry add))
34.8 + Scan.option (Args.$$$ "params" |-- Args.colon |-- Parse.nat) >> uncurry add))
34.9 "introduction rules for executable predicates";
34.10
34.11 (**** Quickcheck involving inductive predicates ****)
35.1 --- a/src/HOL/Tools/inductive_set.ML Mon May 17 18:59:59 2010 -0700
35.2 +++ b/src/HOL/Tools/inductive_set.ML Tue May 18 06:28:42 2010 -0700
35.3 @@ -562,18 +562,14 @@
35.4
35.5 (* outer syntax *)
35.6
35.7 -local structure P = OuterParse and K = OuterKeyword in
35.8 -
35.9 val ind_set_decl = Inductive.gen_ind_decl add_ind_set_def;
35.10
35.11 val _ =
35.12 - OuterSyntax.local_theory' "inductive_set" "define inductive sets" K.thy_decl
35.13 + Outer_Syntax.local_theory' "inductive_set" "define inductive sets" Keyword.thy_decl
35.14 (ind_set_decl false);
35.15
35.16 val _ =
35.17 - OuterSyntax.local_theory' "coinductive_set" "define coinductive sets" K.thy_decl
35.18 + Outer_Syntax.local_theory' "coinductive_set" "define coinductive sets" Keyword.thy_decl
35.19 (ind_set_decl true);
35.20
35.21 end;
35.22 -
35.23 -end;
36.1 --- a/src/HOL/Tools/primrec.ML Mon May 17 18:59:59 2010 -0700
36.2 +++ b/src/HOL/Tools/primrec.ML Tue May 18 06:28:42 2010 -0700
36.3 @@ -307,29 +307,26 @@
36.4
36.5 (* outer syntax *)
36.6
36.7 -local structure P = OuterParse and K = OuterKeyword in
36.8 -
36.9 val opt_unchecked_name =
36.10 - Scan.optional (P.$$$ "(" |-- P.!!!
36.11 - (((P.$$$ "unchecked" >> K true) -- Scan.optional P.name "" ||
36.12 - P.name >> pair false) --| P.$$$ ")")) (false, "");
36.13 + Scan.optional (Parse.$$$ "(" |-- Parse.!!!
36.14 + (((Parse.$$$ "unchecked" >> K true) -- Scan.optional Parse.name "" ||
36.15 + Parse.name >> pair false) --| Parse.$$$ ")")) (false, "");
36.16
36.17 val old_primrec_decl =
36.18 opt_unchecked_name --
36.19 - Scan.repeat1 ((Parse_Spec.opt_thm_name ":" >> apfst Binding.name_of) -- P.prop);
36.20 + Scan.repeat1 ((Parse_Spec.opt_thm_name ":" >> apfst Binding.name_of) -- Parse.prop);
36.21
36.22 -val primrec_decl = P.opt_target -- P.fixes -- Parse_Spec.where_alt_specs;
36.23 +val primrec_decl = Parse.opt_target -- Parse.fixes -- Parse_Spec.where_alt_specs;
36.24
36.25 val _ =
36.26 - OuterSyntax.command "primrec" "define primitive recursive functions on datatypes" K.thy_decl
36.27 + Outer_Syntax.command "primrec" "define primitive recursive functions on datatypes"
36.28 + Keyword.thy_decl
36.29 ((primrec_decl >> (fn ((opt_target, fixes), specs) =>
36.30 Toplevel.local_theory opt_target (add_primrec_cmd fixes specs #> snd)))
36.31 || (old_primrec_decl >> (fn ((unchecked, alt_name), eqns) =>
36.32 Toplevel.theory (snd o
36.33 (if unchecked then OldPrimrec.add_primrec_unchecked else OldPrimrec.add_primrec)
36.34 - alt_name (map P.triple_swap eqns) o
36.35 + alt_name (map Parse.triple_swap eqns) o
36.36 tap (fn _ => legacy_feature "Old variant of 'primrec' command -- use new syntax instead")))));
36.37
36.38 end;
36.39 -
36.40 -end;
37.1 --- a/src/HOL/Tools/recdef.ML Mon May 17 18:59:59 2010 -0700
37.2 +++ b/src/HOL/Tools/recdef.ML Tue May 18 06:28:42 2010 -0700
37.3 @@ -289,40 +289,39 @@
37.4
37.5 (* outer syntax *)
37.6
37.7 -local structure P = OuterParse and K = OuterKeyword in
37.8 -
37.9 -val _ = List.app OuterKeyword.keyword ["permissive", "congs", "hints"];
37.10 +val _ = List.app Keyword.keyword ["permissive", "congs", "hints"];
37.11
37.12 val hints =
37.13 - P.$$$ "(" |-- P.!!! (P.position (P.$$$ "hints" -- Args.parse) --| P.$$$ ")") >> Args.src;
37.14 + Parse.$$$ "(" |--
37.15 + Parse.!!! (Parse.position (Parse.$$$ "hints" -- Args.parse) --| Parse.$$$ ")") >> Args.src;
37.16
37.17 val recdef_decl =
37.18 - Scan.optional (P.$$$ "(" -- P.!!! (P.$$$ "permissive" -- P.$$$ ")") >> K false) true --
37.19 - P.name -- P.term -- Scan.repeat1 (Parse_Spec.opt_thm_name ":" -- P.prop)
37.20 + Scan.optional
37.21 + (Parse.$$$ "(" -- Parse.!!! (Parse.$$$ "permissive" -- Parse.$$$ ")") >> K false) true --
37.22 + Parse.name -- Parse.term -- Scan.repeat1 (Parse_Spec.opt_thm_name ":" -- Parse.prop)
37.23 -- Scan.option hints
37.24 - >> (fn ((((p, f), R), eqs), src) => #1 o add_recdef p f R (map P.triple_swap eqs) src);
37.25 + >> (fn ((((p, f), R), eqs), src) => #1 o add_recdef p f R (map Parse.triple_swap eqs) src);
37.26
37.27 val _ =
37.28 - OuterSyntax.command "recdef" "define general recursive functions (TFL)" K.thy_decl
37.29 + Outer_Syntax.command "recdef" "define general recursive functions (TFL)" Keyword.thy_decl
37.30 (recdef_decl >> Toplevel.theory);
37.31
37.32
37.33 val defer_recdef_decl =
37.34 - P.name -- Scan.repeat1 P.prop --
37.35 - Scan.optional (P.$$$ "(" |-- P.$$$ "congs" |-- P.!!! (Parse_Spec.xthms1 --| P.$$$ ")")) []
37.36 + Parse.name -- Scan.repeat1 Parse.prop --
37.37 + Scan.optional
37.38 + (Parse.$$$ "(" |-- Parse.$$$ "congs" |-- Parse.!!! (Parse_Spec.xthms1 --| Parse.$$$ ")")) []
37.39 >> (fn ((f, eqs), congs) => #1 o defer_recdef f eqs congs);
37.40
37.41 val _ =
37.42 - OuterSyntax.command "defer_recdef" "defer general recursive functions (TFL)" K.thy_decl
37.43 + Outer_Syntax.command "defer_recdef" "defer general recursive functions (TFL)" Keyword.thy_decl
37.44 (defer_recdef_decl >> Toplevel.theory);
37.45
37.46 val _ =
37.47 - OuterSyntax.local_theory_to_proof' "recdef_tc" "recommence proof of termination condition (TFL)"
37.48 - K.thy_goal
37.49 - ((Parse_Spec.opt_thm_name ":" >> apfst Binding.name_of) -- P.xname --
37.50 - Scan.option (P.$$$ "(" |-- P.nat --| P.$$$ ")")
37.51 + Outer_Syntax.local_theory_to_proof' "recdef_tc" "recommence proof of termination condition (TFL)"
37.52 + Keyword.thy_goal
37.53 + ((Parse_Spec.opt_thm_name ":" >> apfst Binding.name_of) -- Parse.xname --
37.54 + Scan.option (Parse.$$$ "(" |-- Parse.nat --| Parse.$$$ ")")
37.55 >> (fn ((thm_name, name), i) => recdef_tc thm_name name i));
37.56
37.57 end;
37.58 -
37.59 -end;
38.1 --- a/src/HOL/Tools/record.ML Mon May 17 18:59:59 2010 -0700
38.2 +++ b/src/HOL/Tools/record.ML Tue May 18 06:28:42 2010 -0700
38.3 @@ -2460,17 +2460,14 @@
38.4
38.5 (* outer syntax *)
38.6
38.7 -local structure P = OuterParse and K = OuterKeyword in
38.8 -
38.9 val _ =
38.10 - OuterSyntax.command "record" "define extensible record" K.thy_decl
38.11 - (P.type_args_constrained -- P.binding --
38.12 - (P.$$$ "=" |-- Scan.option (P.typ --| P.$$$ "+") -- Scan.repeat1 P.const_binding)
38.13 + Outer_Syntax.command "record" "define extensible record" Keyword.thy_decl
38.14 + (Parse.type_args_constrained -- Parse.binding --
38.15 + (Parse.$$$ "=" |-- Scan.option (Parse.typ --| Parse.$$$ "+") --
38.16 + Scan.repeat1 Parse.const_binding)
38.17 >> (fn (x, (y, z)) => Toplevel.theory (add_record_cmd false x y z)));
38.18
38.19 end;
38.20
38.21 -end;
38.22 -
38.23 structure Basic_Record: BASIC_RECORD = Record;
38.24 open Basic_Record;
39.1 --- a/src/HOL/Tools/refute_isar.ML Mon May 17 18:59:59 2010 -0700
39.2 +++ b/src/HOL/Tools/refute_isar.ML Tue May 18 06:28:42 2010 -0700
39.3 @@ -12,19 +12,16 @@
39.4
39.5 (*optional list of arguments of the form [name1=value1, name2=value2, ...]*)
39.6
39.7 -val scan_parm =
39.8 - OuterParse.name
39.9 - -- (Scan.optional (OuterParse.$$$ "=" |-- OuterParse.name) "true")
39.10 -val scan_parms = Scan.optional
39.11 - (OuterParse.$$$ "[" |-- OuterParse.list scan_parm --| OuterParse.$$$ "]") [];
39.12 +val scan_parm = Parse.name -- (Scan.optional (Parse.$$$ "=" |-- Parse.name) "true")
39.13 +val scan_parms = Scan.optional (Parse.$$$ "[" |-- Parse.list scan_parm --| Parse.$$$ "]") [];
39.14
39.15
39.16 (* 'refute' command *)
39.17
39.18 val _ =
39.19 - OuterSyntax.improper_command "refute"
39.20 - "try to find a model that refutes a given subgoal" OuterKeyword.diag
39.21 - (scan_parms -- Scan.optional OuterParse.nat 1 >>
39.22 + Outer_Syntax.improper_command "refute"
39.23 + "try to find a model that refutes a given subgoal" Keyword.diag
39.24 + (scan_parms -- Scan.optional Parse.nat 1 >>
39.25 (fn (parms, i) =>
39.26 Toplevel.keep (fn state =>
39.27 let
39.28 @@ -36,8 +33,8 @@
39.29 (* 'refute_params' command *)
39.30
39.31 val _ =
39.32 - OuterSyntax.command "refute_params"
39.33 - "show/store default parameters for the 'refute' command" OuterKeyword.thy_decl
39.34 + Outer_Syntax.command "refute_params"
39.35 + "show/store default parameters for the 'refute' command" Keyword.thy_decl
39.36 (scan_parms >> (fn parms =>
39.37 Toplevel.theory (fn thy =>
39.38 let
40.1 --- a/src/HOL/Tools/split_rule.ML Mon May 17 18:59:59 2010 -0700
40.2 +++ b/src/HOL/Tools/split_rule.ML Tue May 18 06:28:42 2010 -0700
40.3 @@ -135,7 +135,7 @@
40.4 Attrib.setup @{binding split_format}
40.5 (Scan.lift
40.6 (Args.parens (Args.$$$ "complete") >> K (Thm.rule_attribute (K complete_split_rule)) ||
40.7 - OuterParse.and_list1 (Scan.repeat Args.name_source)
40.8 + Parse.and_list1 (Scan.repeat Args.name_source)
40.9 >> (fn xss => Thm.rule_attribute (fn context =>
40.10 split_rule_goal (Context.proof_of context) xss))))
40.11 "split pair-typed subterms in premises, or function arguments" #>
41.1 --- a/src/HOL/Tools/typedef.ML Mon May 17 18:59:59 2010 -0700
41.2 +++ b/src/HOL/Tools/typedef.ML Tue May 18 06:28:42 2010 -0700
41.3 @@ -296,22 +296,19 @@
41.4
41.5 (** outer syntax **)
41.6
41.7 -local structure P = OuterParse in
41.8 -
41.9 -val _ = OuterKeyword.keyword "morphisms";
41.10 +val _ = Keyword.keyword "morphisms";
41.11
41.12 val _ =
41.13 - OuterSyntax.local_theory_to_proof "typedef" "HOL type definition (requires non-emptiness proof)"
41.14 - OuterKeyword.thy_goal
41.15 - (Scan.optional (P.$$$ "(" |--
41.16 - ((P.$$$ "open" >> K false) -- Scan.option P.binding ||
41.17 - P.binding >> (fn s => (true, SOME s))) --| P.$$$ ")") (true, NONE) --
41.18 - (P.type_args_constrained -- P.binding) -- P.opt_mixfix -- (P.$$$ "=" |-- P.term) --
41.19 - Scan.option (P.$$$ "morphisms" |-- P.!!! (P.binding -- P.binding))
41.20 + Outer_Syntax.local_theory_to_proof "typedef" "HOL type definition (requires non-emptiness proof)"
41.21 + Keyword.thy_goal
41.22 + (Scan.optional (Parse.$$$ "(" |--
41.23 + ((Parse.$$$ "open" >> K false) -- Scan.option Parse.binding ||
41.24 + Parse.binding >> (fn s => (true, SOME s))) --| Parse.$$$ ")") (true, NONE) --
41.25 + (Parse.type_args_constrained -- Parse.binding) --
41.26 + Parse.opt_mixfix -- (Parse.$$$ "=" |-- Parse.term) --
41.27 + Scan.option (Parse.$$$ "morphisms" |-- Parse.!!! (Parse.binding -- Parse.binding))
41.28 >> (fn ((((((def, opt_name), (args, t)), mx), A), morphs)) =>
41.29 typedef_cmd ((def, the_default t opt_name), (t, args, mx), A, morphs)));
41.30
41.31 end;
41.32
41.33 -end;
41.34 -
42.1 --- a/src/HOLCF/IOA/meta_theory/automaton.ML Mon May 17 18:59:59 2010 -0700
42.2 +++ b/src/HOLCF/IOA/meta_theory/automaton.ML Tue May 18 06:28:42 2010 -0700
42.3 @@ -485,52 +485,50 @@
42.4
42.5 (* outer syntax *)
42.6
42.7 -local structure P = OuterParse and K = OuterKeyword in
42.8 -
42.9 -val _ = List.app OuterKeyword.keyword ["signature", "actions", "inputs",
42.10 +val _ = List.app Keyword.keyword ["signature", "actions", "inputs",
42.11 "outputs", "internals", "states", "initially", "transitions", "pre",
42.12 "post", "transrel", ":=", "compose", "hide_action", "in", "restrict", "to",
42.13 "rename"];
42.14
42.15 -val actionlist = P.list1 P.term;
42.16 -val inputslist = P.$$$ "inputs" |-- P.!!! actionlist;
42.17 -val outputslist = P.$$$ "outputs" |-- P.!!! actionlist;
42.18 -val internalslist = P.$$$ "internals" |-- P.!!! actionlist;
42.19 -val stateslist = P.$$$ "states" |-- P.!!! (Scan.repeat1 (P.name --| P.$$$ "::" -- P.typ));
42.20 -val initial = P.$$$ "initially" |-- P.!!! P.term;
42.21 -val assign_list = P.list1 (P.name --| P.$$$ ":=" -- P.!!! P.term);
42.22 -val pre = P.$$$ "pre" |-- P.!!! P.term;
42.23 -val post = P.$$$ "post" |-- P.!!! assign_list;
42.24 +val actionlist = Parse.list1 Parse.term;
42.25 +val inputslist = Parse.$$$ "inputs" |-- Parse.!!! actionlist;
42.26 +val outputslist = Parse.$$$ "outputs" |-- Parse.!!! actionlist;
42.27 +val internalslist = Parse.$$$ "internals" |-- Parse.!!! actionlist;
42.28 +val stateslist =
42.29 + Parse.$$$ "states" |-- Parse.!!! (Scan.repeat1 (Parse.name --| Parse.$$$ "::" -- Parse.typ));
42.30 +val initial = Parse.$$$ "initially" |-- Parse.!!! Parse.term;
42.31 +val assign_list = Parse.list1 (Parse.name --| Parse.$$$ ":=" -- Parse.!!! Parse.term);
42.32 +val pre = Parse.$$$ "pre" |-- Parse.!!! Parse.term;
42.33 +val post = Parse.$$$ "post" |-- Parse.!!! assign_list;
42.34 val pre1 = (pre -- (Scan.optional post [])) >> mk_trans_of_prepost;
42.35 val post1 = ((Scan.optional pre "True") -- post) >> mk_trans_of_prepost;
42.36 -val transrel = (P.$$$ "transrel" |-- P.!!! P.term) >> mk_trans_of_rel;
42.37 -val transition = P.term -- (transrel || pre1 || post1);
42.38 -val translist = P.$$$ "transitions" |-- P.!!! (Scan.repeat1 transition);
42.39 +val transrel = (Parse.$$$ "transrel" |-- Parse.!!! Parse.term) >> mk_trans_of_rel;
42.40 +val transition = Parse.term -- (transrel || pre1 || post1);
42.41 +val translist = Parse.$$$ "transitions" |-- Parse.!!! (Scan.repeat1 transition);
42.42
42.43 val ioa_decl =
42.44 - (P.name -- (P.$$$ "=" |--
42.45 - (P.$$$ "signature" |--
42.46 - P.!!! (P.$$$ "actions" |--
42.47 - (P.typ --
42.48 + (Parse.name -- (Parse.$$$ "=" |--
42.49 + (Parse.$$$ "signature" |--
42.50 + Parse.!!! (Parse.$$$ "actions" |--
42.51 + (Parse.typ --
42.52 (Scan.optional inputslist []) --
42.53 (Scan.optional outputslist []) --
42.54 (Scan.optional internalslist []) --
42.55 stateslist --
42.56 (Scan.optional initial "True") --
42.57 translist))))) >> mk_ioa_decl ||
42.58 - (P.name -- (P.$$$ "=" |-- (P.$$$ "compose" |-- P.!!! (P.list1 P.name))))
42.59 + (Parse.name -- (Parse.$$$ "=" |-- (Parse.$$$ "compose" |-- Parse.!!! (Parse.list1 Parse.name))))
42.60 >> mk_composition_decl ||
42.61 - (P.name -- (P.$$$ "=" |-- (P.$$$ "hide_action" |--
42.62 - P.!!! (P.list1 P.term -- (P.$$$ "in" |-- P.name))))) >> mk_hiding_decl ||
42.63 - (P.name -- (P.$$$ "=" |-- (P.$$$ "restrict" |--
42.64 - P.!!! (P.name -- (P.$$$ "to" |-- P.list1 P.term))))) >> mk_restriction_decl ||
42.65 - (P.name -- (P.$$$ "=" |-- (P.$$$ "rename" |-- (P.!!! (P.name -- (P.$$$ "to" |-- P.term))))))
42.66 + (Parse.name -- (Parse.$$$ "=" |-- (Parse.$$$ "hide_action" |--
42.67 + Parse.!!! (Parse.list1 Parse.term -- (Parse.$$$ "in" |-- Parse.name))))) >> mk_hiding_decl ||
42.68 + (Parse.name -- (Parse.$$$ "=" |-- (Parse.$$$ "restrict" |--
42.69 + Parse.!!! (Parse.name -- (Parse.$$$ "to" |-- Parse.list1 Parse.term))))) >> mk_restriction_decl ||
42.70 + (Parse.name -- (Parse.$$$ "=" |--
42.71 + (Parse.$$$ "rename" |-- (Parse.!!! (Parse.name -- (Parse.$$$ "to" |-- Parse.term))))))
42.72 >> mk_rename_decl;
42.73
42.74 val _ =
42.75 - OuterSyntax.command "automaton" "define Lynch/Vaandrager-style I/O automaton" K.thy_decl
42.76 + Outer_Syntax.command "automaton" "define Lynch/Vaandrager-style I/O automaton" Keyword.thy_decl
42.77 (ioa_decl >> Toplevel.theory);
42.78
42.79 end;
42.80 -
42.81 -end;
43.1 --- a/src/HOLCF/Tools/Domain/domain_extender.ML Mon May 17 18:59:59 2010 -0700
43.2 +++ b/src/HOLCF/Tools/Domain/domain_extender.ML Tue May 18 06:28:42 2010 -0700
43.3 @@ -224,27 +224,25 @@
43.4
43.5 (** outer syntax **)
43.6
43.7 -local structure P = OuterParse and K = OuterKeyword in
43.8 -
43.9 -val _ = OuterKeyword.keyword "lazy";
43.10 +val _ = Keyword.keyword "lazy";
43.11
43.12 val dest_decl : (bool * binding option * string) parser =
43.13 - P.$$$ "(" |-- Scan.optional (P.$$$ "lazy" >> K true) false --
43.14 - (P.binding >> SOME) -- (P.$$$ "::" |-- P.typ) --| P.$$$ ")" >> P.triple1
43.15 - || P.$$$ "(" |-- P.$$$ "lazy" |-- P.typ --| P.$$$ ")"
43.16 + Parse.$$$ "(" |-- Scan.optional (Parse.$$$ "lazy" >> K true) false --
43.17 + (Parse.binding >> SOME) -- (Parse.$$$ "::" |-- Parse.typ) --| Parse.$$$ ")" >> Parse.triple1
43.18 + || Parse.$$$ "(" |-- Parse.$$$ "lazy" |-- Parse.typ --| Parse.$$$ ")"
43.19 >> (fn t => (true,NONE,t))
43.20 - || P.typ >> (fn t => (false,NONE,t));
43.21 + || Parse.typ >> (fn t => (false,NONE,t));
43.22
43.23 val cons_decl =
43.24 - P.binding -- Scan.repeat dest_decl -- P.opt_mixfix;
43.25 + Parse.binding -- Scan.repeat dest_decl -- Parse.opt_mixfix;
43.26
43.27 val domain_decl =
43.28 - (P.type_args_constrained -- P.binding -- P.opt_mixfix) --
43.29 - (P.$$$ "=" |-- P.enum1 "|" cons_decl);
43.30 + (Parse.type_args_constrained -- Parse.binding -- Parse.opt_mixfix) --
43.31 + (Parse.$$$ "=" |-- Parse.enum1 "|" cons_decl);
43.32
43.33 val domains_decl =
43.34 - Scan.option (P.$$$ "(" |-- P.binding --| P.$$$ ")") --
43.35 - P.and_list1 domain_decl;
43.36 + Scan.option (Parse.$$$ "(" |-- Parse.binding --| Parse.$$$ ")") --
43.37 + Parse.and_list1 domain_decl;
43.38
43.39 fun mk_domain
43.40 (definitional : bool)
43.41 @@ -267,13 +265,11 @@
43.42 end;
43.43
43.44 val _ =
43.45 - OuterSyntax.command "domain" "define recursive domains (HOLCF)"
43.46 - K.thy_decl (domains_decl >> (Toplevel.theory o mk_domain false));
43.47 + Outer_Syntax.command "domain" "define recursive domains (HOLCF)"
43.48 + Keyword.thy_decl (domains_decl >> (Toplevel.theory o mk_domain false));
43.49
43.50 val _ =
43.51 - OuterSyntax.command "new_domain" "define recursive domains (HOLCF)"
43.52 - K.thy_decl (domains_decl >> (Toplevel.theory o mk_domain true));
43.53 + Outer_Syntax.command "new_domain" "define recursive domains (HOLCF)"
43.54 + Keyword.thy_decl (domains_decl >> (Toplevel.theory o mk_domain true));
43.55
43.56 end;
43.57 -
43.58 -end;
44.1 --- a/src/HOLCF/Tools/Domain/domain_isomorphism.ML Mon May 17 18:59:59 2010 -0700
44.2 +++ b/src/HOLCF/Tools/Domain/domain_isomorphism.ML Tue May 18 06:28:42 2010 -0700
44.3 @@ -707,21 +707,20 @@
44.4
44.5 local
44.6
44.7 -structure P = OuterParse and K = OuterKeyword
44.8 -
44.9 val parse_domain_iso :
44.10 (string list * binding * mixfix * string * (binding * binding) option)
44.11 parser =
44.12 - (P.type_args -- P.binding -- P.opt_mixfix -- (P.$$$ "=" |-- P.typ) --
44.13 - Scan.option (P.$$$ "morphisms" |-- P.!!! (P.binding -- P.binding)))
44.14 + (Parse.type_args -- Parse.binding -- Parse.opt_mixfix -- (Parse.$$$ "=" |-- Parse.typ) --
44.15 + Scan.option (Parse.$$$ "morphisms" |-- Parse.!!! (Parse.binding -- Parse.binding)))
44.16 >> (fn ((((vs, t), mx), rhs), morphs) => (vs, t, mx, rhs, morphs));
44.17
44.18 -val parse_domain_isos = P.and_list1 parse_domain_iso;
44.19 +val parse_domain_isos = Parse.and_list1 parse_domain_iso;
44.20
44.21 in
44.22
44.23 val _ =
44.24 - OuterSyntax.command "domain_isomorphism" "define domain isomorphisms (HOLCF)" K.thy_decl
44.25 + Outer_Syntax.command "domain_isomorphism" "define domain isomorphisms (HOLCF)"
44.26 + Keyword.thy_decl
44.27 (parse_domain_isos >> (Toplevel.theory o domain_isomorphism_cmd));
44.28
44.29 end;
45.1 --- a/src/HOLCF/Tools/cont_consts.ML Mon May 17 18:59:59 2010 -0700
45.2 +++ b/src/HOLCF/Tools/cont_consts.ML Tue May 18 06:28:42 2010 -0700
45.3 @@ -93,12 +93,8 @@
45.4
45.5 (* outer syntax *)
45.6
45.7 -local structure P = OuterParse and K = OuterKeyword in
45.8 -
45.9 val _ =
45.10 - OuterSyntax.command "consts" "declare constants (HOLCF)" K.thy_decl
45.11 - (Scan.repeat1 P.const_binding >> (Toplevel.theory o add_consts_cmd));
45.12 + Outer_Syntax.command "consts" "declare constants (HOLCF)" Keyword.thy_decl
45.13 + (Scan.repeat1 Parse.const_binding >> (Toplevel.theory o add_consts_cmd));
45.14
45.15 end;
45.16 -
45.17 -end;
46.1 --- a/src/HOLCF/Tools/fixrec.ML Mon May 17 18:59:59 2010 -0700
46.2 +++ b/src/HOLCF/Tools/fixrec.ML Tue May 18 06:28:42 2010 -0700
46.3 @@ -443,16 +443,14 @@
46.4 (******************************** Parsers ********************************)
46.5 (*************************************************************************)
46.6
46.7 -local structure P = OuterParse and K = OuterKeyword in
46.8 +val _ =
46.9 + Outer_Syntax.local_theory "fixrec" "define recursive functions (HOLCF)" Keyword.thy_decl
46.10 + ((Parse.opt_keyword "permissive" >> not) -- Parse.fixes -- Parse_Spec.where_alt_specs
46.11 + >> (fn ((strict, fixes), specs) => add_fixrec_cmd strict fixes specs));
46.12
46.13 -val _ = OuterSyntax.local_theory "fixrec" "define recursive functions (HOLCF)" K.thy_decl
46.14 - ((P.opt_keyword "permissive" >> not) -- P.fixes -- Parse_Spec.where_alt_specs
46.15 - >> (fn ((strict, fixes), specs) => add_fixrec_cmd strict fixes specs));
46.16 -
46.17 -val _ = OuterSyntax.command "fixpat" "define rewrites for fixrec functions" K.thy_decl
46.18 - (Parse_Spec.specs >> (Toplevel.theory o add_fixpat_cmd));
46.19 -
46.20 -end;
46.21 +val _ =
46.22 + Outer_Syntax.command "fixpat" "define rewrites for fixrec functions" Keyword.thy_decl
46.23 + (Parse_Spec.specs >> (Toplevel.theory o add_fixpat_cmd));
46.24
46.25 val setup =
46.26 Attrib.setup @{binding fixrec_simp}
47.1 --- a/src/HOLCF/Tools/pcpodef.ML Mon May 17 18:59:59 2010 -0700
47.2 +++ b/src/HOLCF/Tools/pcpodef.ML Tue May 18 06:28:42 2010 -0700
47.3 @@ -355,29 +355,29 @@
47.4
47.5 (** outer syntax **)
47.6
47.7 -local structure P = OuterParse and K = OuterKeyword in
47.8 -
47.9 val typedef_proof_decl =
47.10 - Scan.optional (P.$$$ "(" |--
47.11 - ((P.$$$ "open" >> K false) -- Scan.option P.binding || P.binding >> (fn s => (true, SOME s)))
47.12 - --| P.$$$ ")") (true, NONE) --
47.13 - (P.type_args_constrained -- P.binding) -- P.opt_mixfix -- (P.$$$ "=" |-- P.term) --
47.14 - Scan.option (P.$$$ "morphisms" |-- P.!!! (P.binding -- P.binding));
47.15 + Scan.optional (Parse.$$$ "(" |--
47.16 + ((Parse.$$$ "open" >> K false) -- Scan.option Parse.binding ||
47.17 + Parse.binding >> (fn s => (true, SOME s)))
47.18 + --| Parse.$$$ ")") (true, NONE) --
47.19 + (Parse.type_args_constrained -- Parse.binding) -- Parse.opt_mixfix --
47.20 + (Parse.$$$ "=" |-- Parse.term) --
47.21 + Scan.option (Parse.$$$ "morphisms" |-- Parse.!!! (Parse.binding -- Parse.binding));
47.22
47.23 fun mk_pcpodef_proof pcpo ((((((def, opt_name), (args, t)), mx), A), morphs)) =
47.24 (if pcpo then pcpodef_proof_cmd else cpodef_proof_cmd)
47.25 ((def, the_default t opt_name), (t, args, mx), A, morphs);
47.26
47.27 val _ =
47.28 - OuterSyntax.command "pcpodef" "HOLCF type definition (requires admissibility proof)" K.thy_goal
47.29 + Outer_Syntax.command "pcpodef" "HOLCF type definition (requires admissibility proof)"
47.30 + Keyword.thy_goal
47.31 (typedef_proof_decl >>
47.32 (Toplevel.print oo (Toplevel.theory_to_proof o mk_pcpodef_proof true)));
47.33
47.34 val _ =
47.35 - OuterSyntax.command "cpodef" "HOLCF type definition (requires admissibility proof)" K.thy_goal
47.36 + Outer_Syntax.command "cpodef" "HOLCF type definition (requires admissibility proof)"
47.37 + Keyword.thy_goal
47.38 (typedef_proof_decl >>
47.39 (Toplevel.print oo (Toplevel.theory_to_proof o mk_pcpodef_proof false)));
47.40
47.41 end;
47.42 -
47.43 -end;
48.1 --- a/src/HOLCF/Tools/repdef.ML Mon May 17 18:59:59 2010 -0700
48.2 +++ b/src/HOLCF/Tools/repdef.ML Tue May 18 06:28:42 2010 -0700
48.3 @@ -168,23 +168,21 @@
48.4
48.5 (** outer syntax **)
48.6
48.7 -local structure P = OuterParse and K = OuterKeyword in
48.8 -
48.9 val repdef_decl =
48.10 - Scan.optional (P.$$$ "(" |--
48.11 - ((P.$$$ "open" >> K false) -- Scan.option P.binding || P.binding >> (fn s => (true, SOME s)))
48.12 - --| P.$$$ ")") (true, NONE) --
48.13 - (P.type_args_constrained -- P.binding) -- P.opt_mixfix -- (P.$$$ "=" |-- P.term) --
48.14 - Scan.option (P.$$$ "morphisms" |-- P.!!! (P.binding -- P.binding));
48.15 + Scan.optional (Parse.$$$ "(" |--
48.16 + ((Parse.$$$ "open" >> K false) -- Scan.option Parse.binding ||
48.17 + Parse.binding >> (fn s => (true, SOME s)))
48.18 + --| Parse.$$$ ")") (true, NONE) --
48.19 + (Parse.type_args_constrained -- Parse.binding) --
48.20 + Parse.opt_mixfix -- (Parse.$$$ "=" |-- Parse.term) --
48.21 + Scan.option (Parse.$$$ "morphisms" |-- Parse.!!! (Parse.binding -- Parse.binding));
48.22
48.23 fun mk_repdef ((((((def, opt_name), (args, t)), mx), A), morphs)) =
48.24 repdef_cmd ((def, the_default t opt_name), (t, args, mx), A, morphs);
48.25
48.26 val _ =
48.27 - OuterSyntax.command "repdef" "HOLCF definition of representable domains" K.thy_decl
48.28 + Outer_Syntax.command "repdef" "HOLCF definition of representable domains" Keyword.thy_decl
48.29 (repdef_decl >>
48.30 (Toplevel.print oo (Toplevel.theory o mk_repdef)));
48.31
48.32 end;
48.33 -
48.34 -end;
49.1 --- a/src/Provers/blast.ML Mon May 17 18:59:59 2010 -0700
49.2 +++ b/src/Provers/blast.ML Tue May 18 06:28:42 2010 -0700
49.3 @@ -1309,7 +1309,7 @@
49.4 val setup =
49.5 setup_depth_limit #>
49.6 Method.setup @{binding blast}
49.7 - (Scan.lift (Scan.option OuterParse.nat) --| Method.sections Data.cla_modifiers >>
49.8 + (Scan.lift (Scan.option Parse.nat) --| Method.sections Data.cla_modifiers >>
49.9 (fn NONE => Data.cla_meth' blast_tac
49.10 | SOME lim => Data.cla_meth' (fn cs => depth_tac cs lim)))
49.11 "classical tableau prover";
50.1 --- a/src/Provers/clasimp.ML Mon May 17 18:59:59 2010 -0700
50.2 +++ b/src/Provers/clasimp.ML Tue May 18 06:28:42 2010 -0700
50.3 @@ -275,7 +275,7 @@
50.4 Method.sections clasimp_modifiers >> K (clasimp_meth' tac);
50.5
50.6 val auto_method =
50.7 - Scan.lift (Scan.option (OuterParse.nat -- OuterParse.nat)) --|
50.8 + Scan.lift (Scan.option (Parse.nat -- Parse.nat)) --|
50.9 Method.sections clasimp_modifiers >>
50.10 (fn NONE => clasimp_meth (CHANGED_PROP o auto_tac)
50.11 | SOME (m, n) => clasimp_meth (CHANGED_PROP o (fn css => mk_auto_tac css m n)));
51.1 --- a/src/Provers/classical.ML Mon May 17 18:59:59 2010 -0700
51.2 +++ b/src/Provers/classical.ML Tue May 18 06:28:42 2010 -0700
51.3 @@ -1015,8 +1015,8 @@
51.4 (** outer syntax **)
51.5
51.6 val _ =
51.7 - OuterSyntax.improper_command "print_claset" "print context of Classical Reasoner"
51.8 - OuterKeyword.diag
51.9 + Outer_Syntax.improper_command "print_claset" "print context of Classical Reasoner"
51.10 + Keyword.diag
51.11 (Scan.succeed (Toplevel.no_timing o Toplevel.unknown_context o
51.12 Toplevel.keep (print_cs o claset_of o Toplevel.context_of)));
51.13
52.1 --- a/src/Pure/General/scan.scala Mon May 17 18:59:59 2010 -0700
52.2 +++ b/src/Pure/General/scan.scala Tue May 18 06:28:42 2010 -0700
52.3 @@ -258,47 +258,44 @@
52.4
52.5 /* outer syntax tokens */
52.6
52.7 - def token(symbols: Symbol.Interpretation, is_command: String => Boolean):
52.8 - Parser[Outer_Lex.Token] =
52.9 + def token(symbols: Symbol.Interpretation, is_command: String => Boolean): Parser[Token] =
52.10 {
52.11 - import Outer_Lex.Token_Kind, Outer_Lex.Token
52.12 -
52.13 val id = one(symbols.is_letter) ~ many(symbols.is_letdig) ^^ { case x ~ y => x + y }
52.14 val nat = many1(symbols.is_digit)
52.15 val id_nat = id ~ opt("." ~ nat) ^^ { case x ~ Some(y ~ z) => x + y + z case x ~ None => x }
52.16
52.17 val ident = id ~ rep("." ~> id) ^^
52.18 - { case x ~ Nil => Token(Token_Kind.IDENT, x)
52.19 - case x ~ ys => Token(Token_Kind.LONG_IDENT, (x :: ys).mkString(".")) }
52.20 + { case x ~ Nil => Token(Token.Kind.IDENT, x)
52.21 + case x ~ ys => Token(Token.Kind.LONG_IDENT, (x :: ys).mkString(".")) }
52.22
52.23 - val var_ = "?" ~ id_nat ^^ { case x ~ y => Token(Token_Kind.VAR, x + y) }
52.24 - val type_ident = "'" ~ id ^^ { case x ~ y => Token(Token_Kind.TYPE_IDENT, x + y) }
52.25 - val type_var = "?'" ~ id_nat ^^ { case x ~ y => Token(Token_Kind.TYPE_VAR, x + y) }
52.26 - val nat_ = nat ^^ (x => Token(Token_Kind.NAT, x))
52.27 + val var_ = "?" ~ id_nat ^^ { case x ~ y => Token(Token.Kind.VAR, x + y) }
52.28 + val type_ident = "'" ~ id ^^ { case x ~ y => Token(Token.Kind.TYPE_IDENT, x + y) }
52.29 + val type_var = "?'" ~ id_nat ^^ { case x ~ y => Token(Token.Kind.TYPE_VAR, x + y) }
52.30 + val nat_ = nat ^^ (x => Token(Token.Kind.NAT, x))
52.31
52.32 val sym_ident =
52.33 (many1(symbols.is_symbolic_char) |
52.34 one(sym => symbols.is_symbolic(sym) & Symbol.is_wellformed(sym))) ^^
52.35 - (x => Token(Token_Kind.SYM_IDENT, x))
52.36 + (x => Token(Token.Kind.SYM_IDENT, x))
52.37
52.38 - val space = many1(symbols.is_blank) ^^ (x => Token(Token_Kind.SPACE, x))
52.39 + val space = many1(symbols.is_blank) ^^ (x => Token(Token.Kind.SPACE, x))
52.40
52.41 - val string = quoted("\"") ^^ (x => Token(Token_Kind.STRING, x))
52.42 - val alt_string = quoted("`") ^^ (x => Token(Token_Kind.ALT_STRING, x))
52.43 + val string = quoted("\"") ^^ (x => Token(Token.Kind.STRING, x))
52.44 + val alt_string = quoted("`") ^^ (x => Token(Token.Kind.ALT_STRING, x))
52.45
52.46 val junk = many1(sym => !(symbols.is_blank(sym)))
52.47 val bad_delimiter =
52.48 - ("\"" | "`" | "{*" | "(*") ~ junk ^^ { case x ~ y => Token(Token_Kind.BAD_INPUT, x + y) }
52.49 - val bad = junk ^^ (x => Token(Token_Kind.BAD_INPUT, x))
52.50 + ("\"" | "`" | "{*" | "(*") ~ junk ^^ { case x ~ y => Token(Token.Kind.BAD_INPUT, x + y) }
52.51 + val bad = junk ^^ (x => Token(Token.Kind.BAD_INPUT, x))
52.52
52.53
52.54 /* tokens */
52.55
52.56 - (space | (string | (alt_string | (verbatim ^^ (x => Token(Token_Kind.VERBATIM, x)) |
52.57 - comment ^^ (x => Token(Token_Kind.COMMENT, x)))))) |
52.58 + (space | (string | (alt_string | (verbatim ^^ (x => Token(Token.Kind.VERBATIM, x)) |
52.59 + comment ^^ (x => Token(Token.Kind.COMMENT, x)))))) |
52.60 bad_delimiter |
52.61 ((ident | (var_ | (type_ident | (type_var | (nat_ | sym_ident))))) |||
52.62 - keyword ^^ (x => Token(if (is_command(x)) Token_Kind.COMMAND else Token_Kind.KEYWORD, x))) |
52.63 + keyword ^^ (x => Token(if (is_command(x)) Token.Kind.COMMAND else Token.Kind.KEYWORD, x))) |
52.64 bad
52.65 }
52.66 }
53.1 --- a/src/Pure/General/symbol_pos.ML Mon May 17 18:59:59 2010 -0700
53.2 +++ b/src/Pure/General/symbol_pos.ML Tue May 18 06:28:42 2010 -0700
53.3 @@ -4,17 +4,12 @@
53.4 Symbols with explicit position information.
53.5 *)
53.6
53.7 -signature BASIC_SYMBOL_POS =
53.8 +signature SYMBOL_POS =
53.9 sig
53.10 type T = Symbol.symbol * Position.T
53.11 val symbol: T -> Symbol.symbol
53.12 val $$$ : Symbol.symbol -> T list -> T list * T list
53.13 val ~$$$ : Symbol.symbol -> T list -> T list * T list
53.14 -end
53.15 -
53.16 -signature SYMBOL_POS =
53.17 -sig
53.18 - include BASIC_SYMBOL_POS
53.19 val content: T list -> string
53.20 val untabify_content: T list -> string
53.21 val is_eof: T -> bool
53.22 @@ -185,5 +180,10 @@
53.23
53.24 end;
53.25
53.26 -structure Basic_Symbol_Pos: BASIC_SYMBOL_POS = Symbol_Pos; (*not open by default*)
53.27 +structure Basic_Symbol_Pos = (*not open by default*)
53.28 +struct
53.29 + val symbol = Symbol_Pos.symbol;
53.30 + val $$$ = Symbol_Pos.$$$;
53.31 + val ~$$$ = Symbol_Pos.~$$$;
53.32 +end;
53.33
54.1 --- a/src/Pure/IsaMakefile Mon May 17 18:59:59 2010 -0700
54.2 +++ b/src/Pure/IsaMakefile Tue May 18 06:28:42 2010 -0700
54.3 @@ -67,24 +67,25 @@
54.4 Isar/expression.ML Isar/isar_cmd.ML Isar/isar_document.ML \
54.5 Isar/isar_syn.ML Isar/keyword.ML Isar/local_defs.ML \
54.6 Isar/local_syntax.ML Isar/local_theory.ML Isar/locale.ML \
54.7 - Isar/method.ML Isar/object_logic.ML Isar/obtain.ML Isar/outer_lex.ML \
54.8 + Isar/method.ML Isar/object_logic.ML Isar/obtain.ML \
54.9 Isar/outer_syntax.ML Isar/overloading.ML Isar/parse.ML \
54.10 Isar/parse_spec.ML Isar/parse_value.ML Isar/proof.ML \
54.11 Isar/proof_context.ML Isar/proof_display.ML Isar/proof_node.ML \
54.12 Isar/rule_cases.ML Isar/rule_insts.ML Isar/runtime.ML \
54.13 Isar/skip_proof.ML Isar/spec_rules.ML Isar/specification.ML \
54.14 - Isar/theory_target.ML Isar/toplevel.ML Isar/typedecl.ML \
54.15 - ML/ml_antiquote.ML ML/ml_compiler.ML ML/ml_compiler_polyml-5.3.ML \
54.16 - ML/ml_context.ML ML/ml_env.ML ML/ml_lex.ML ML/ml_parse.ML \
54.17 - ML/ml_syntax.ML ML/ml_thms.ML ML-Systems/install_pp_polyml.ML \
54.18 - ML-Systems/install_pp_polyml-5.3.ML ML-Systems/use_context.ML \
54.19 - Proof/extraction.ML Proof/proof_rewrite_rules.ML \
54.20 - Proof/proof_syntax.ML Proof/proofchecker.ML Proof/reconstruct.ML \
54.21 - ProofGeneral/pgip.ML ProofGeneral/pgip_input.ML \
54.22 - ProofGeneral/pgip_isabelle.ML ProofGeneral/pgip_markup.ML \
54.23 - ProofGeneral/pgip_output.ML ProofGeneral/pgip_parser.ML \
54.24 - ProofGeneral/pgip_tests.ML ProofGeneral/pgip_types.ML \
54.25 - ProofGeneral/preferences.ML ProofGeneral/proof_general_emacs.ML \
54.26 + Isar/theory_target.ML Isar/token.ML Isar/toplevel.ML \
54.27 + Isar/typedecl.ML ML/ml_antiquote.ML ML/ml_compiler.ML \
54.28 + ML/ml_compiler_polyml-5.3.ML ML/ml_context.ML ML/ml_env.ML \
54.29 + ML/ml_lex.ML ML/ml_parse.ML ML/ml_syntax.ML ML/ml_thms.ML \
54.30 + ML-Systems/install_pp_polyml.ML ML-Systems/install_pp_polyml-5.3.ML \
54.31 + ML-Systems/use_context.ML Proof/extraction.ML \
54.32 + Proof/proof_rewrite_rules.ML Proof/proof_syntax.ML \
54.33 + Proof/proofchecker.ML Proof/reconstruct.ML ProofGeneral/pgip.ML \
54.34 + ProofGeneral/pgip_input.ML ProofGeneral/pgip_isabelle.ML \
54.35 + ProofGeneral/pgip_markup.ML ProofGeneral/pgip_output.ML \
54.36 + ProofGeneral/pgip_parser.ML ProofGeneral/pgip_tests.ML \
54.37 + ProofGeneral/pgip_types.ML ProofGeneral/preferences.ML \
54.38 + ProofGeneral/proof_general_emacs.ML \
54.39 ProofGeneral/proof_general_pgip.ML Pure.thy ROOT.ML Syntax/ast.ML \
54.40 Syntax/lexicon.ML Syntax/mixfix.ML Syntax/parser.ML \
54.41 Syntax/printer.ML Syntax/simple_syntax.ML Syntax/syn_ext.ML \
55.1 --- a/src/Pure/Isar/args.ML Mon May 17 18:59:59 2010 -0700
55.2 +++ b/src/Pure/Isar/args.ML Tue May 18 06:28:42 2010 -0700
55.3 @@ -7,10 +7,9 @@
55.4
55.5 signature ARGS =
55.6 sig
55.7 - type token = OuterLex.token
55.8 type src
55.9 - val src: (string * token list) * Position.T -> src
55.10 - val dest_src: src -> (string * token list) * Position.T
55.11 + val src: (string * Token.T list) * Position.T -> src
55.12 + val dest_src: src -> (string * Token.T list) * Position.T
55.13 val pretty_src: Proof.context -> src -> Pretty.T
55.14 val map_name: (string -> string) -> src -> src
55.15 val morph_values: morphism -> src -> src
55.16 @@ -57,8 +56,8 @@
55.17 val const: bool -> string context_parser
55.18 val const_proper: bool -> string context_parser
55.19 val goal_spec: ((int -> tactic) -> tactic) context_parser
55.20 - val parse: token list parser
55.21 - val parse1: (string -> bool) -> token list parser
55.22 + val parse: Token.T list parser
55.23 + val parse1: (string -> bool) -> Token.T list parser
55.24 val attribs: (string -> string) -> src list parser
55.25 val opt_attribs: (string -> string) -> src list parser
55.26 val thm_name: (string -> string) -> string -> (binding * src list) parser
55.27 @@ -70,15 +69,9 @@
55.28 structure Args: ARGS =
55.29 struct
55.30
55.31 -structure T = OuterLex;
55.32 -
55.33 -type token = T.token;
55.34 -
55.35 -
55.36 -
55.37 (** datatype src **)
55.38
55.39 -datatype src = Src of (string * token list) * Position.T;
55.40 +datatype src = Src of (string * Token.T list) * Position.T;
55.41
55.42 val src = Src;
55.43 fun dest_src (Src src) = src;
55.44 @@ -87,12 +80,12 @@
55.45 let
55.46 val prt_thm = Pretty.backquote o Display.pretty_thm ctxt;
55.47 fun prt arg =
55.48 - (case T.get_value arg of
55.49 - SOME (T.Text s) => Pretty.str (quote s)
55.50 - | SOME (T.Typ T) => Syntax.pretty_typ ctxt T
55.51 - | SOME (T.Term t) => Syntax.pretty_term ctxt t
55.52 - | SOME (T.Fact ths) => Pretty.enclose "(" ")" (Pretty.breaks (map prt_thm ths))
55.53 - | _ => Pretty.str (T.unparse arg));
55.54 + (case Token.get_value arg of
55.55 + SOME (Token.Text s) => Pretty.str (quote s)
55.56 + | SOME (Token.Typ T) => Syntax.pretty_typ ctxt T
55.57 + | SOME (Token.Term t) => Syntax.pretty_term ctxt t
55.58 + | SOME (Token.Fact ths) => Pretty.enclose "(" ")" (Pretty.breaks (map prt_thm ths))
55.59 + | _ => Pretty.str (Token.unparse arg));
55.60 val (s, args) = #1 (dest_src src);
55.61 in Pretty.block (Pretty.breaks (Pretty.str s :: map prt args)) end;
55.62
55.63 @@ -102,15 +95,15 @@
55.64
55.65 (* values *)
55.66
55.67 -fun morph_values phi = map_args (T.map_value
55.68 - (fn T.Text s => T.Text s
55.69 - | T.Typ T => T.Typ (Morphism.typ phi T)
55.70 - | T.Term t => T.Term (Morphism.term phi t)
55.71 - | T.Fact ths => T.Fact (Morphism.fact phi ths)
55.72 - | T.Attribute att => T.Attribute (Morphism.transform phi att)));
55.73 +fun morph_values phi = map_args (Token.map_value
55.74 + (fn Token.Text s => Token.Text s
55.75 + | Token.Typ T => Token.Typ (Morphism.typ phi T)
55.76 + | Token.Term t => Token.Term (Morphism.term phi t)
55.77 + | Token.Fact ths => Token.Fact (Morphism.fact phi ths)
55.78 + | Token.Attribute att => Token.Attribute (Morphism.transform phi att)));
55.79
55.80 -val assignable = map_args T.assignable;
55.81 -val closure = map_args T.closure;
55.82 +val assignable = map_args Token.assignable;
55.83 +val closure = map_args Token.closure;
55.84
55.85
55.86
55.87 @@ -134,7 +127,7 @@
55.88 val alt_string = token Parse.alt_string;
55.89 val symbolic = token Parse.keyword_ident_or_symbolic;
55.90
55.91 -fun $$$ x = (ident >> T.content_of || Parse.keyword)
55.92 +fun $$$ x = (ident >> Token.content_of || Parse.keyword)
55.93 :|-- (fn y => if x = y then Scan.succeed x else Scan.fail);
55.94
55.95
55.96 @@ -153,39 +146,40 @@
55.97 fun mode s = Scan.lift (Scan.optional (parens ($$$ s) >> K true) false);
55.98 fun maybe scan = $$$ "_" >> K NONE || scan >> SOME;
55.99
55.100 -val name_source = named >> T.source_of;
55.101 -val name_source_position = named >> T.source_position_of;
55.102 +val name_source = named >> Token.source_of;
55.103 +val name_source_position = named >> Token.source_position_of;
55.104
55.105 -val name = named >> T.content_of;
55.106 +val name = named >> Token.content_of;
55.107 val binding = Parse.position name >> Binding.make;
55.108 -val alt_name = alt_string >> T.content_of;
55.109 -val symbol = symbolic >> T.content_of;
55.110 +val alt_name = alt_string >> Token.content_of;
55.111 +val symbol = symbolic >> Token.content_of;
55.112 val liberal_name = symbol || name;
55.113
55.114 -val var = (ident >> T.content_of) :|-- (fn x =>
55.115 +val var = (ident >> Token.content_of) :|-- (fn x =>
55.116 (case Lexicon.read_variable x of SOME v => Scan.succeed v | NONE => Scan.fail));
55.117
55.118
55.119 (* values *)
55.120
55.121 fun value dest = Scan.some (fn arg =>
55.122 - (case T.get_value arg of SOME v => (SOME (dest v) handle Match => NONE) | NONE => NONE));
55.123 + (case Token.get_value arg of SOME v => (SOME (dest v) handle Match => NONE) | NONE => NONE));
55.124
55.125 fun evaluate mk eval arg =
55.126 - let val x = eval arg in (T.assign (SOME (mk x)) arg; x) end;
55.127 + let val x = eval arg in (Token.assign (SOME (mk x)) arg; x) end;
55.128
55.129 -val internal_text = value (fn T.Text s => s);
55.130 -val internal_typ = value (fn T.Typ T => T);
55.131 -val internal_term = value (fn T.Term t => t);
55.132 -val internal_fact = value (fn T.Fact ths => ths);
55.133 -val internal_attribute = value (fn T.Attribute att => att);
55.134 +val internal_text = value (fn Token.Text s => s);
55.135 +val internal_typ = value (fn Token.Typ T => T);
55.136 +val internal_term = value (fn Token.Term t => t);
55.137 +val internal_fact = value (fn Token.Fact ths => ths);
55.138 +val internal_attribute = value (fn Token.Attribute att => att);
55.139
55.140 -fun named_text intern = internal_text || named >> evaluate T.Text (intern o T.content_of);
55.141 -fun named_typ readT = internal_typ || named >> evaluate T.Typ (readT o T.source_of);
55.142 -fun named_term read = internal_term || named >> evaluate T.Term (read o T.source_of);
55.143 -fun named_fact get = internal_fact || named >> evaluate T.Fact (get o T.content_of) ||
55.144 - alt_string >> evaluate T.Fact (get o T.source_of);
55.145 -fun named_attribute att = internal_attribute || named >> evaluate T.Attribute (att o T.content_of);
55.146 +fun named_text intern = internal_text || named >> evaluate Token.Text (intern o Token.content_of);
55.147 +fun named_typ readT = internal_typ || named >> evaluate Token.Typ (readT o Token.source_of);
55.148 +fun named_term read = internal_term || named >> evaluate Token.Term (read o Token.source_of);
55.149 +fun named_fact get = internal_fact || named >> evaluate Token.Fact (get o Token.content_of) ||
55.150 + alt_string >> evaluate Token.Fact (get o Token.source_of);
55.151 +fun named_attribute att =
55.152 + internal_attribute || named >> evaluate Token.Attribute (att o Token.content_of);
55.153
55.154
55.155 (* terms and types *)
55.156 @@ -243,7 +237,7 @@
55.157 (token (Parse.$$$ l) ::: Parse.!!! (args true @@@ (token (Parse.$$$ r) >> single))) x;
55.158 in (args, args1) end;
55.159
55.160 -val parse = #1 (parse_args T.ident_or_symbolic) false;
55.161 +val parse = #1 (parse_args Token.ident_or_symbolic) false;
55.162 fun parse1 is_symid = #2 (parse_args is_symid) false;
55.163
55.164
55.165 @@ -252,7 +246,7 @@
55.166 fun attribs intern =
55.167 let
55.168 val attrib_name = internal_text || (symbolic || named)
55.169 - >> evaluate T.Text (intern o T.content_of);
55.170 + >> evaluate Token.Text (intern o Token.content_of);
55.171 val attrib = Parse.position (attrib_name -- Parse.!!! parse) >> src;
55.172 in $$$ "[" |-- Parse.!!! (Parse.list attrib --| $$$ "]") end;
55.173
55.174 @@ -273,11 +267,11 @@
55.175 (** syntax wrapper **)
55.176
55.177 fun syntax kind scan (Src ((s, args), pos)) st =
55.178 - (case Scan.error (Scan.finite' T.stopper (Scan.option scan)) (st, args) of
55.179 + (case Scan.error (Scan.finite' Token.stopper (Scan.option scan)) (st, args) of
55.180 (SOME x, (st', [])) => (x, st')
55.181 | (_, (_, args')) =>
55.182 error (kind ^ " " ^ quote s ^ Position.str_of pos ^ ": bad arguments\n " ^
55.183 - space_implode " " (map T.unparse args')));
55.184 + space_implode " " (map Token.unparse args')));
55.185
55.186 fun context_syntax kind scan src = apsnd Context.the_proof o syntax kind scan src o Context.Proof;
55.187
56.1 --- a/src/Pure/Isar/attrib.ML Mon May 17 18:59:59 2010 -0700
56.2 +++ b/src/Pure/Isar/attrib.ML Tue May 18 06:28:42 2010 -0700
56.3 @@ -47,9 +47,6 @@
56.4 structure Attrib: ATTRIB =
56.5 struct
56.6
56.7 -structure T = OuterLex;
56.8 -
56.9 -
56.10 (* source and bindings *)
56.11
56.12 type src = Args.src;
56.13 @@ -216,7 +213,7 @@
56.14
56.15 (* internal *)
56.16
56.17 -fun internal att = Args.src (("Pure.attribute", [T.mk_attribute att]), Position.none);
56.18 +fun internal att = Args.src (("Pure.attribute", [Token.mk_attribute att]), Position.none);
56.19
56.20
56.21 (* rule composition *)
57.1 --- a/src/Pure/Isar/keyword.ML Mon May 17 18:59:59 2010 -0700
57.2 +++ b/src/Pure/Isar/keyword.ML Tue May 18 06:28:42 2010 -0700
57.3 @@ -210,7 +210,4 @@
57.4
57.5 end;
57.6
57.7 -(*legacy alias*)
57.8 -structure OuterKeyword = Keyword;
57.9
57.10 -
58.1 --- a/src/Pure/Isar/method.ML Mon May 17 18:59:59 2010 -0700
58.2 +++ b/src/Pure/Isar/method.ML Tue May 18 06:28:42 2010 -0700
58.3 @@ -411,7 +411,7 @@
58.4 (* outer parser *)
58.5
58.6 fun is_symid_meth s =
58.7 - s <> "|" andalso s <> "?" andalso s <> "+" andalso OuterLex.ident_or_symbolic s;
58.8 + s <> "|" andalso s <> "?" andalso s <> "+" andalso Token.ident_or_symbolic s;
58.9
58.10 local
58.11
59.1 --- a/src/Pure/Isar/outer_lex.ML Mon May 17 18:59:59 2010 -0700
59.2 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000
59.3 @@ -1,390 +0,0 @@
59.4 -(* Title: Pure/Isar/outer_lex.ML
59.5 - Author: Markus Wenzel, TU Muenchen
59.6 -
59.7 -Outer lexical syntax for Isabelle/Isar.
59.8 -*)
59.9 -
59.10 -signature OUTER_LEX =
59.11 -sig
59.12 - datatype token_kind =
59.13 - Command | Keyword | Ident | LongIdent | SymIdent | Var | TypeIdent | TypeVar |
59.14 - Nat | String | AltString | Verbatim | Space | Comment | InternalValue |
59.15 - Malformed | Error of string | Sync | EOF
59.16 - datatype value =
59.17 - Text of string | Typ of typ | Term of term | Fact of thm list |
59.18 - Attribute of morphism -> attribute
59.19 - type token
59.20 - val str_of_kind: token_kind -> string
59.21 - val position_of: token -> Position.T
59.22 - val end_position_of: token -> Position.T
59.23 - val pos_of: token -> string
59.24 - val eof: token
59.25 - val is_eof: token -> bool
59.26 - val not_eof: token -> bool
59.27 - val not_sync: token -> bool
59.28 - val stopper: token Scan.stopper
59.29 - val kind_of: token -> token_kind
59.30 - val is_kind: token_kind -> token -> bool
59.31 - val keyword_with: (string -> bool) -> token -> bool
59.32 - val ident_with: (string -> bool) -> token -> bool
59.33 - val is_proper: token -> bool
59.34 - val is_semicolon: token -> bool
59.35 - val is_comment: token -> bool
59.36 - val is_begin_ignore: token -> bool
59.37 - val is_end_ignore: token -> bool
59.38 - val is_blank: token -> bool
59.39 - val is_newline: token -> bool
59.40 - val source_of: token -> string
59.41 - val source_position_of: token -> Symbol_Pos.text * Position.T
59.42 - val content_of: token -> string
59.43 - val unparse: token -> string
59.44 - val text_of: token -> string * string
59.45 - val get_value: token -> value option
59.46 - val map_value: (value -> value) -> token -> token
59.47 - val mk_text: string -> token
59.48 - val mk_typ: typ -> token
59.49 - val mk_term: term -> token
59.50 - val mk_fact: thm list -> token
59.51 - val mk_attribute: (morphism -> attribute) -> token
59.52 - val assignable: token -> token
59.53 - val assign: value option -> token -> unit
59.54 - val closure: token -> token
59.55 - val ident_or_symbolic: string -> bool
59.56 - val !!! : string -> (Symbol_Pos.T list -> 'a) -> Symbol_Pos.T list -> 'a
59.57 - val source_proper: (token, 'a) Source.source -> (token, (token, 'a) Source.source) Source.source
59.58 - val source': {do_recover: bool Option.option} -> (unit -> Scan.lexicon * Scan.lexicon) ->
59.59 - (Symbol_Pos.T, 'a) Source.source -> (token, (Symbol_Pos.T, 'a) Source.source) Source.source
59.60 - val source: {do_recover: bool Option.option} -> (unit -> Scan.lexicon * Scan.lexicon) ->
59.61 - Position.T -> (Symbol.symbol, 'a) Source.source -> (token,
59.62 - (Symbol_Pos.T, Position.T * (Symbol.symbol, 'a) Source.source) Source.source) Source.source
59.63 - val read_antiq: Scan.lexicon -> (token list -> 'a * token list) ->
59.64 - Symbol_Pos.T list * Position.T -> 'a
59.65 -end;
59.66 -
59.67 -structure OuterLex: OUTER_LEX =
59.68 -struct
59.69 -
59.70 -(** tokens **)
59.71 -
59.72 -(* token values *)
59.73 -
59.74 -(*The value slot assigns an (optional) internal value to a token,
59.75 - usually as a side-effect of special scanner setup (see also
59.76 - args.ML). Note that an assignable ref designates an intermediate
59.77 - state of internalization -- it is NOT meant to persist.*)
59.78 -
59.79 -datatype value =
59.80 - Text of string |
59.81 - Typ of typ |
59.82 - Term of term |
59.83 - Fact of thm list |
59.84 - Attribute of morphism -> attribute;
59.85 -
59.86 -datatype slot =
59.87 - Slot |
59.88 - Value of value option |
59.89 - Assignable of value option Unsynchronized.ref;
59.90 -
59.91 -
59.92 -(* datatype token *)
59.93 -
59.94 -datatype token_kind =
59.95 - Command | Keyword | Ident | LongIdent | SymIdent | Var | TypeIdent | TypeVar |
59.96 - Nat | String | AltString | Verbatim | Space | Comment | InternalValue |
59.97 - Malformed | Error of string | Sync | EOF;
59.98 -
59.99 -datatype token = Token of (Symbol_Pos.text * Position.range) * (token_kind * string) * slot;
59.100 -
59.101 -val str_of_kind =
59.102 - fn Command => "command"
59.103 - | Keyword => "keyword"
59.104 - | Ident => "identifier"
59.105 - | LongIdent => "long identifier"
59.106 - | SymIdent => "symbolic identifier"
59.107 - | Var => "schematic variable"
59.108 - | TypeIdent => "type variable"
59.109 - | TypeVar => "schematic type variable"
59.110 - | Nat => "number"
59.111 - | String => "string"
59.112 - | AltString => "back-quoted string"
59.113 - | Verbatim => "verbatim text"
59.114 - | Space => "white space"
59.115 - | Comment => "comment text"
59.116 - | InternalValue => "internal value"
59.117 - | Malformed => "malformed symbolic character"
59.118 - | Error _ => "bad input"
59.119 - | Sync => "sync marker"
59.120 - | EOF => "end-of-file";
59.121 -
59.122 -
59.123 -(* position *)
59.124 -
59.125 -fun position_of (Token ((_, (pos, _)), _, _)) = pos;
59.126 -fun end_position_of (Token ((_, (_, pos)), _, _)) = pos;
59.127 -
59.128 -val pos_of = Position.str_of o position_of;
59.129 -
59.130 -
59.131 -(* control tokens *)
59.132 -
59.133 -fun mk_eof pos = Token (("", (pos, Position.none)), (EOF, ""), Slot);
59.134 -val eof = mk_eof Position.none;
59.135 -
59.136 -fun is_eof (Token (_, (EOF, _), _)) = true
59.137 - | is_eof _ = false;
59.138 -
59.139 -val not_eof = not o is_eof;
59.140 -
59.141 -fun not_sync (Token (_, (Sync, _), _)) = false
59.142 - | not_sync _ = true;
59.143 -
59.144 -val stopper =
59.145 - Scan.stopper (fn [] => eof | toks => mk_eof (end_position_of (List.last toks))) is_eof;
59.146 -
59.147 -
59.148 -(* kind of token *)
59.149 -
59.150 -fun kind_of (Token (_, (k, _), _)) = k;
59.151 -fun is_kind k (Token (_, (k', _), _)) = k = k';
59.152 -
59.153 -fun keyword_with pred (Token (_, (Keyword, x), _)) = pred x
59.154 - | keyword_with _ _ = false;
59.155 -
59.156 -fun ident_with pred (Token (_, (Ident, x), _)) = pred x
59.157 - | ident_with _ _ = false;
59.158 -
59.159 -fun is_proper (Token (_, (Space, _), _)) = false
59.160 - | is_proper (Token (_, (Comment, _), _)) = false
59.161 - | is_proper _ = true;
59.162 -
59.163 -fun is_semicolon (Token (_, (Keyword, ";"), _)) = true
59.164 - | is_semicolon _ = false;
59.165 -
59.166 -fun is_comment (Token (_, (Comment, _), _)) = true
59.167 - | is_comment _ = false;
59.168 -
59.169 -fun is_begin_ignore (Token (_, (Comment, "<"), _)) = true
59.170 - | is_begin_ignore _ = false;
59.171 -
59.172 -fun is_end_ignore (Token (_, (Comment, ">"), _)) = true
59.173 - | is_end_ignore _ = false;
59.174 -
59.175 -
59.176 -(* blanks and newlines -- space tokens obey lines *)
59.177 -
59.178 -fun is_blank (Token (_, (Space, x), _)) = not (String.isSuffix "\n" x)
59.179 - | is_blank _ = false;
59.180 -
59.181 -fun is_newline (Token (_, (Space, x), _)) = String.isSuffix "\n" x
59.182 - | is_newline _ = false;
59.183 -
59.184 -
59.185 -(* token content *)
59.186 -
59.187 -fun source_of (Token ((source, (pos, _)), _, _)) =
59.188 - YXML.string_of (XML.Elem (Markup.tokenN, Position.properties_of pos, [XML.Text source]));
59.189 -
59.190 -fun source_position_of (Token ((source, (pos, _)), _, _)) = (source, pos);
59.191 -
59.192 -fun content_of (Token (_, (_, x), _)) = x;
59.193 -
59.194 -
59.195 -(* unparse *)
59.196 -
59.197 -fun escape q =
59.198 - implode o map (fn s => if s = q orelse s = "\\" then "\\" ^ s else s) o Symbol.explode;
59.199 -
59.200 -fun unparse (Token (_, (kind, x), _)) =
59.201 - (case kind of
59.202 - String => x |> quote o escape "\""
59.203 - | AltString => x |> enclose "`" "`" o escape "`"
59.204 - | Verbatim => x |> enclose "{*" "*}"
59.205 - | Comment => x |> enclose "(*" "*)"
59.206 - | Malformed => space_implode " " (explode x)
59.207 - | Sync => ""
59.208 - | EOF => ""
59.209 - | _ => x);
59.210 -
59.211 -fun text_of tok =
59.212 - if is_semicolon tok then ("terminator", "")
59.213 - else
59.214 - let
59.215 - val k = str_of_kind (kind_of tok);
59.216 - val s = unparse tok
59.217 - handle ERROR _ => Symbol.separate_chars (content_of tok);
59.218 - in
59.219 - if s = "" then (k, "")
59.220 - else if size s < 40 andalso not (exists_string (fn c => c = "\n") s) then (k ^ " " ^ s, "")
59.221 - else (k, s)
59.222 - end;
59.223 -
59.224 -
59.225 -
59.226 -(** associated values **)
59.227 -
59.228 -(* access values *)
59.229 -
59.230 -fun get_value (Token (_, _, Value v)) = v
59.231 - | get_value _ = NONE;
59.232 -
59.233 -fun map_value f (Token (x, y, Value (SOME v))) = Token (x, y, Value (SOME (f v)))
59.234 - | map_value _ tok = tok;
59.235 -
59.236 -
59.237 -(* make values *)
59.238 -
59.239 -fun mk_value k v = Token ((k, Position.no_range), (InternalValue, k), Value (SOME v));
59.240 -
59.241 -val mk_text = mk_value "<text>" o Text;
59.242 -val mk_typ = mk_value "<typ>" o Typ;
59.243 -val mk_term = mk_value "<term>" o Term;
59.244 -val mk_fact = mk_value "<fact>" o Fact;
59.245 -val mk_attribute = mk_value "<attribute>" o Attribute;
59.246 -
59.247 -
59.248 -(* static binding *)
59.249 -
59.250 -(*1st stage: make empty slots assignable*)
59.251 -fun assignable (Token (x, y, Slot)) = Token (x, y, Assignable (Unsynchronized.ref NONE))
59.252 - | assignable tok = tok;
59.253 -
59.254 -(*2nd stage: assign values as side-effect of scanning*)
59.255 -fun assign v (Token (_, _, Assignable r)) = r := v
59.256 - | assign _ _ = ();
59.257 -
59.258 -(*3rd stage: static closure of final values*)
59.259 -fun closure (Token (x, y, Assignable (Unsynchronized.ref v))) = Token (x, y, Value v)
59.260 - | closure tok = tok;
59.261 -
59.262 -
59.263 -
59.264 -(** scanners **)
59.265 -
59.266 -open Basic_Symbol_Pos;
59.267 -
59.268 -fun !!! msg = Symbol_Pos.!!! ("Outer lexical error: " ^ msg);
59.269 -
59.270 -
59.271 -(* scan symbolic idents *)
59.272 -
59.273 -val is_sym_char = member (op =) (explode "!#$%&*+-/<=>?@^_|~");
59.274 -
59.275 -val scan_symid =
59.276 - Scan.many1 (is_sym_char o symbol) ||
59.277 - Scan.one (Symbol.is_symbolic o symbol) >> single;
59.278 -
59.279 -fun is_symid str =
59.280 - (case try Symbol.explode str of
59.281 - SOME [s] => Symbol.is_symbolic s orelse is_sym_char s
59.282 - | SOME ss => forall is_sym_char ss
59.283 - | _ => false);
59.284 -
59.285 -fun ident_or_symbolic "begin" = false
59.286 - | ident_or_symbolic ":" = true
59.287 - | ident_or_symbolic "::" = true
59.288 - | ident_or_symbolic s = Syntax.is_identifier s orelse is_symid s;
59.289 -
59.290 -
59.291 -(* scan verbatim text *)
59.292 -
59.293 -val scan_verb =
59.294 - $$$ "*" --| Scan.ahead (~$$$ "}") ||
59.295 - Scan.one (fn (s, _) => s <> "*" andalso Symbol.is_regular s) >> single;
59.296 -
59.297 -val scan_verbatim =
59.298 - (Symbol_Pos.scan_pos --| $$$ "{" --| $$$ "*") -- !!! "missing end of verbatim text"
59.299 - (Symbol_Pos.change_prompt
59.300 - ((Scan.repeat scan_verb >> flat) -- ($$$ "*" |-- $$$ "}" |-- Symbol_Pos.scan_pos)));
59.301 -
59.302 -
59.303 -(* scan space *)
59.304 -
59.305 -fun is_space s = Symbol.is_blank s andalso s <> "\n";
59.306 -
59.307 -val scan_space =
59.308 - Scan.many1 (is_space o symbol) @@@ Scan.optional ($$$ "\n") [] ||
59.309 - Scan.many (is_space o symbol) @@@ $$$ "\n";
59.310 -
59.311 -
59.312 -(* scan comment *)
59.313 -
59.314 -val scan_comment =
59.315 - Symbol_Pos.scan_pos -- (Symbol_Pos.scan_comment_body !!! -- Symbol_Pos.scan_pos);
59.316 -
59.317 -
59.318 -(* scan malformed symbols *)
59.319 -
59.320 -val scan_malformed =
59.321 - $$$ Symbol.malformed |--
59.322 - Symbol_Pos.change_prompt (Scan.many (Symbol.is_regular o symbol))
59.323 - --| Scan.option ($$$ Symbol.end_malformed);
59.324 -
59.325 -
59.326 -
59.327 -(** token sources **)
59.328 -
59.329 -fun source_proper src = src |> Source.filter is_proper;
59.330 -
59.331 -local
59.332 -
59.333 -fun token_leq ((_, syms1), (_, syms2)) = length syms1 <= length syms2;
59.334 -
59.335 -fun token k ss =
59.336 - Token ((Symbol_Pos.implode ss, Symbol_Pos.range ss), (k, Symbol_Pos.untabify_content ss), Slot);
59.337 -
59.338 -fun token_range k (pos1, (ss, pos2)) =
59.339 - Token (Symbol_Pos.implode_range pos1 pos2 ss, (k, Symbol_Pos.untabify_content ss), Slot);
59.340 -
59.341 -fun scan (lex1, lex2) = !!! "bad input"
59.342 - (Symbol_Pos.scan_string >> token_range String ||
59.343 - Symbol_Pos.scan_alt_string >> token_range AltString ||
59.344 - scan_verbatim >> token_range Verbatim ||
59.345 - scan_comment >> token_range Comment ||
59.346 - scan_space >> token Space ||
59.347 - scan_malformed >> token Malformed ||
59.348 - Scan.one (Symbol.is_sync o symbol) >> (token Sync o single) ||
59.349 - (Scan.max token_leq
59.350 - (Scan.max token_leq
59.351 - (Scan.literal lex2 >> pair Command)
59.352 - (Scan.literal lex1 >> pair Keyword))
59.353 - (Syntax.scan_longid >> pair LongIdent ||
59.354 - Syntax.scan_id >> pair Ident ||
59.355 - Syntax.scan_var >> pair Var ||
59.356 - Syntax.scan_tid >> pair TypeIdent ||
59.357 - Syntax.scan_tvar >> pair TypeVar ||
59.358 - Syntax.scan_nat >> pair Nat ||
59.359 - scan_symid >> pair SymIdent) >> uncurry token));
59.360 -
59.361 -fun recover msg =
59.362 - Scan.many ((Symbol.is_regular andf (not o Symbol.is_blank)) o symbol)
59.363 - >> (single o token (Error msg));
59.364 -
59.365 -in
59.366 -
59.367 -fun source' {do_recover} get_lex =
59.368 - Source.source Symbol_Pos.stopper (Scan.bulk (fn xs => scan (get_lex ()) xs))
59.369 - (Option.map (rpair recover) do_recover);
59.370 -
59.371 -fun source do_recover get_lex pos src =
59.372 - Symbol_Pos.source pos src
59.373 - |> source' do_recover get_lex;
59.374 -
59.375 -end;
59.376 -
59.377 -
59.378 -(* read_antiq *)
59.379 -
59.380 -fun read_antiq lex scan (syms, pos) =
59.381 - let
59.382 - fun err msg = cat_error msg ("Malformed antiquotation" ^ Position.str_of pos ^ ":\n" ^
59.383 - "@{" ^ Symbol_Pos.content syms ^ "}");
59.384 -
59.385 - val res =
59.386 - Source.of_list syms
59.387 - |> source' {do_recover = NONE} (K (lex, Scan.empty_lexicon))
59.388 - |> source_proper
59.389 - |> Source.source stopper (Scan.error (Scan.bulk scan)) NONE
59.390 - |> Source.exhaust;
59.391 - in (case res of [x] => x | _ => err "") handle ERROR msg => err msg end;
59.392 -
59.393 -end;
60.1 --- a/src/Pure/Isar/outer_lex.scala Mon May 17 18:59:59 2010 -0700
60.2 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000
60.3 @@ -1,94 +0,0 @@
60.4 -/* Title: Pure/Isar/outer_lex.scala
60.5 - Author: Makarius
60.6 -
60.7 -Outer lexical syntax for Isabelle/Isar.
60.8 -*/
60.9 -
60.10 -package isabelle
60.11 -
60.12 -
60.13 -object Outer_Lex
60.14 -{
60.15 - /* tokens */
60.16 -
60.17 - object Token_Kind extends Enumeration
60.18 - {
60.19 - val COMMAND = Value("command")
60.20 - val KEYWORD = Value("keyword")
60.21 - val IDENT = Value("identifier")
60.22 - val LONG_IDENT = Value("long identifier")
60.23 - val SYM_IDENT = Value("symbolic identifier")
60.24 - val VAR = Value("schematic variable")
60.25 - val TYPE_IDENT = Value("type variable")
60.26 - val TYPE_VAR = Value("schematic type variable")
60.27 - val NAT = Value("number")
60.28 - val STRING = Value("string")
60.29 - val ALT_STRING = Value("back-quoted string")
60.30 - val VERBATIM = Value("verbatim text")
60.31 - val SPACE = Value("white space")
60.32 - val COMMENT = Value("comment text")
60.33 - val BAD_INPUT = Value("bad input")
60.34 - val UNPARSED = Value("unparsed input")
60.35 - }
60.36 -
60.37 - sealed case class Token(val kind: Token_Kind.Value, val source: String)
60.38 - {
60.39 - def is_command: Boolean = kind == Token_Kind.COMMAND
60.40 - def is_delimited: Boolean =
60.41 - kind == Token_Kind.STRING ||
60.42 - kind == Token_Kind.ALT_STRING ||
60.43 - kind == Token_Kind.VERBATIM ||
60.44 - kind == Token_Kind.COMMENT
60.45 - def is_name: Boolean =
60.46 - kind == Token_Kind.IDENT ||
60.47 - kind == Token_Kind.SYM_IDENT ||
60.48 - kind == Token_Kind.STRING ||
60.49 - kind == Token_Kind.NAT
60.50 - def is_xname: Boolean = is_name || kind == Token_Kind.LONG_IDENT
60.51 - def is_text: Boolean = is_xname || kind == Token_Kind.VERBATIM
60.52 - def is_space: Boolean = kind == Token_Kind.SPACE
60.53 - def is_comment: Boolean = kind == Token_Kind.COMMENT
60.54 - def is_ignored: Boolean = is_space || is_comment
60.55 - def is_unparsed: Boolean = kind == Token_Kind.UNPARSED
60.56 -
60.57 - def content: String =
60.58 - if (kind == Token_Kind.STRING) Scan.Lexicon.empty.quoted_content("\"", source)
60.59 - else if (kind == Token_Kind.ALT_STRING) Scan.Lexicon.empty.quoted_content("`", source)
60.60 - else if (kind == Token_Kind.VERBATIM) Scan.Lexicon.empty.verbatim_content(source)
60.61 - else if (kind == Token_Kind.COMMENT) Scan.Lexicon.empty.comment_content(source)
60.62 - else source
60.63 -
60.64 - def text: (String, String) =
60.65 - if (kind == Token_Kind.COMMAND && source == ";") ("terminator", "")
60.66 - else (kind.toString, source)
60.67 - }
60.68 -
60.69 -
60.70 - /* token reader */
60.71 -
60.72 - class Line_Position(val line: Int) extends scala.util.parsing.input.Position
60.73 - {
60.74 - def column = 0
60.75 - def lineContents = ""
60.76 - override def toString = line.toString
60.77 -
60.78 - def advance(token: Token): Line_Position =
60.79 - {
60.80 - var n = 0
60.81 - for (c <- token.content if c == '\n') n += 1
60.82 - if (n == 0) this else new Line_Position(line + n)
60.83 - }
60.84 - }
60.85 -
60.86 - abstract class Reader extends scala.util.parsing.input.Reader[Token]
60.87 -
60.88 - private class Token_Reader(tokens: List[Token], val pos: Line_Position) extends Reader
60.89 - {
60.90 - def first = tokens.head
60.91 - def rest = new Token_Reader(tokens.tail, pos.advance(first))
60.92 - def atEnd = tokens.isEmpty
60.93 - }
60.94 -
60.95 - def reader(tokens: List[Token]): Reader = new Token_Reader(tokens, new Line_Position(1))
60.96 -}
60.97 -
61.1 --- a/src/Pure/Isar/outer_syntax.ML Mon May 17 18:59:59 2010 -0700
61.2 +++ b/src/Pure/Isar/outer_syntax.ML Tue May 18 06:28:42 2010 -0700
61.3 @@ -25,7 +25,7 @@
61.4 val local_theory_to_proof: string -> string -> Keyword.T ->
61.5 (local_theory -> Proof.state) parser -> unit
61.6 val print_outer_syntax: unit -> unit
61.7 - val scan: Position.T -> string -> OuterLex.token list
61.8 + val scan: Position.T -> string -> Token.T list
61.9 val parse: Position.T -> string -> Toplevel.transition list
61.10 val process_file: Path.T -> theory -> theory
61.11 type isar
61.12 @@ -37,11 +37,6 @@
61.13 structure Outer_Syntax: OUTER_SYNTAX =
61.14 struct
61.15
61.16 -structure T = OuterLex;
61.17 -type 'a parser = 'a Parse.parser;
61.18 -
61.19 -
61.20 -
61.21 (** outer syntax **)
61.22
61.23 (* command parsers *)
61.24 @@ -171,17 +166,17 @@
61.25 fun toplevel_source term do_recover cmd src =
61.26 let
61.27 val no_terminator =
61.28 - Scan.unless Parse.semicolon (Scan.one (T.not_sync andf T.not_eof));
61.29 + Scan.unless Parse.semicolon (Scan.one (Token.not_sync andf Token.not_eof));
61.30 fun recover int =
61.31 (int, fn _ => Scan.prompt "recover# " (Scan.repeat no_terminator) >> K [NONE]);
61.32 in
61.33 src
61.34 - |> T.source_proper
61.35 - |> Source.source T.stopper
61.36 + |> Token.source_proper
61.37 + |> Source.source Token.stopper
61.38 (Scan.bulk (Parse.$$$ "--" -- Parse.!!! Parse.doc_source >> K NONE || Parse.not_eof >> SOME))
61.39 (Option.map recover do_recover)
61.40 |> Source.map_filter I
61.41 - |> Source.source T.stopper
61.42 + |> Source.source Token.stopper
61.43 (Scan.bulk (fn xs => Parse.!!! (parse_command term (cmd ())) xs))
61.44 (Option.map recover do_recover)
61.45 |> Source.map_filter I
61.46 @@ -193,13 +188,13 @@
61.47 fun scan pos str =
61.48 Source.of_string str
61.49 |> Symbol.source {do_recover = false}
61.50 - |> T.source {do_recover = SOME false} Keyword.get_lexicons pos
61.51 + |> Token.source {do_recover = SOME false} Keyword.get_lexicons pos
61.52 |> Source.exhaust;
61.53
61.54 fun parse pos str =
61.55 Source.of_string str
61.56 |> Symbol.source {do_recover = false}
61.57 - |> T.source {do_recover = SOME false} Keyword.get_lexicons pos
61.58 + |> Token.source {do_recover = SOME false} Keyword.get_lexicons pos
61.59 |> toplevel_source false NONE get_command
61.60 |> Source.exhaust;
61.61
61.62 @@ -222,7 +217,7 @@
61.63
61.64 type isar =
61.65 (Toplevel.transition, (Toplevel.transition option,
61.66 - (OuterLex.token, (OuterLex.token option, (OuterLex.token, (OuterLex.token,
61.67 + (Token.T, (Token.T option, (Token.T, (Token.T,
61.68 (Symbol_Pos.T, Position.T * (Symbol.symbol, (string, unit) Source.source)
61.69 Source.source) Source.source) Source.source) Source.source)
61.70 Source.source) Source.source) Source.source) Source.source;
61.71 @@ -230,7 +225,7 @@
61.72 fun isar term : isar =
61.73 Source.tty
61.74 |> Symbol.source {do_recover = true}
61.75 - |> T.source {do_recover = SOME true} Keyword.get_lexicons Position.none
61.76 + |> Token.source {do_recover = SOME true} Keyword.get_lexicons Position.none
61.77 |> toplevel_source term (SOME true) get_command;
61.78
61.79
61.80 @@ -297,6 +292,3 @@
61.81
61.82 end;
61.83
61.84 -(*legacy alias*)
61.85 -structure OuterSyntax = Outer_Syntax;
61.86 -
62.1 --- a/src/Pure/Isar/outer_syntax.scala Mon May 17 18:59:59 2010 -0700
62.2 +++ b/src/Pure/Isar/outer_syntax.scala Tue May 18 06:28:42 2010 -0700
62.3 @@ -39,7 +39,7 @@
62.4
62.5 /* tokenize */
62.6
62.7 - def scan(input: Reader[Char]): List[Outer_Lex.Token] =
62.8 + def scan(input: Reader[Char]): List[Token] =
62.9 {
62.10 import lexicon._
62.11
62.12 @@ -49,6 +49,6 @@
62.13 }
62.14 }
62.15
62.16 - def scan(input: CharSequence): List[Outer_Lex.Token] =
62.17 + def scan(input: CharSequence): List[Token] =
62.18 scan(new CharSequenceReader(input))
62.19 }
63.1 --- a/src/Pure/Isar/parse.ML Mon May 17 18:59:59 2010 -0700
63.2 +++ b/src/Pure/Isar/parse.ML Tue May 18 06:28:42 2010 -0700
63.3 @@ -6,17 +6,16 @@
63.4
63.5 signature PARSE =
63.6 sig
63.7 - type token = OuterLex.token
63.8 - type 'a parser = token list -> 'a * token list
63.9 - type 'a context_parser = Context.generic * token list -> 'a * (Context.generic * token list)
63.10 - val group: string -> (token list -> 'a) -> token list -> 'a
63.11 - val !!! : (token list -> 'a) -> token list -> 'a
63.12 - val !!!! : (token list -> 'a) -> token list -> 'a
63.13 + type 'a parser = Token.T list -> 'a * Token.T list
63.14 + type 'a context_parser = Context.generic * Token.T list -> 'a * (Context.generic * Token.T list)
63.15 + val group: string -> (Token.T list -> 'a) -> Token.T list -> 'a
63.16 + val !!! : (Token.T list -> 'a) -> Token.T list -> 'a
63.17 + val !!!! : (Token.T list -> 'a) -> Token.T list -> 'a
63.18 val triple1: ('a * 'b) * 'c -> 'a * 'b * 'c
63.19 val triple2: 'a * ('b * 'c) -> 'a * 'b * 'c
63.20 val triple_swap: ('a * 'b) * 'c -> ('a * 'c) * 'b
63.21 - val not_eof: token parser
63.22 - val position: (token list -> 'a * 'b) -> token list -> ('a * Position.T) * 'b
63.23 + val not_eof: Token.T parser
63.24 + val position: (Token.T list -> 'a * 'b) -> Token.T list -> ('a * Position.T) * 'b
63.25 val command: string parser
63.26 val keyword: string parser
63.27 val short_ident: string parser
63.28 @@ -98,11 +97,8 @@
63.29 structure Parse: PARSE =
63.30 struct
63.31
63.32 -structure T = OuterLex;
63.33 -type token = T.token;
63.34 -
63.35 -type 'a parser = token list -> 'a * token list;
63.36 -type 'a context_parser = Context.generic * token list -> 'a * (Context.generic * token list);
63.37 +type 'a parser = Token.T list -> 'a * Token.T list;
63.38 +type 'a context_parser = Context.generic * Token.T list -> 'a * (Context.generic * Token.T list);
63.39
63.40
63.41 (** error handling **)
63.42 @@ -112,9 +108,11 @@
63.43 fun fail_with s = Scan.fail_with
63.44 (fn [] => s ^ " expected (past end-of-file!)"
63.45 | (tok :: _) =>
63.46 - (case T.text_of tok of
63.47 - (txt, "") => s ^ " expected,\nbut " ^ txt ^ T.pos_of tok ^ " was found"
63.48 - | (txt1, txt2) => s ^ " expected,\nbut " ^ txt1 ^ T.pos_of tok ^ " was found:\n" ^ txt2));
63.49 + (case Token.text_of tok of
63.50 + (txt, "") =>
63.51 + s ^ " expected,\nbut " ^ txt ^ Token.pos_of tok ^ " was found"
63.52 + | (txt1, txt2) =>
63.53 + s ^ " expected,\nbut " ^ txt1 ^ Token.pos_of tok ^ " was found:\n" ^ txt2));
63.54
63.55 fun group s scan = scan || fail_with s;
63.56
63.57 @@ -124,7 +122,7 @@
63.58 fun cut kind scan =
63.59 let
63.60 fun get_pos [] = " (past end-of-file!)"
63.61 - | get_pos (tok :: _) = T.pos_of tok;
63.62 + | get_pos (tok :: _) = Token.pos_of tok;
63.63
63.64 fun err (toks, NONE) = kind ^ get_pos toks
63.65 | err (toks, SOME msg) =
63.66 @@ -149,42 +147,42 @@
63.67 (* tokens *)
63.68
63.69 fun RESET_VALUE atom = (*required for all primitive parsers*)
63.70 - Scan.ahead (Scan.one (K true)) -- atom >> (fn (arg, x) => (T.assign NONE arg; x));
63.71 + Scan.ahead (Scan.one (K true)) -- atom >> (fn (arg, x) => (Token.assign NONE arg; x));
63.72
63.73
63.74 -val not_eof = RESET_VALUE (Scan.one T.not_eof);
63.75 +val not_eof = RESET_VALUE (Scan.one Token.not_eof);
63.76
63.77 -fun position scan = (Scan.ahead not_eof >> T.position_of) -- scan >> Library.swap;
63.78 -fun source_position atom = Scan.ahead atom |-- not_eof >> T.source_position_of;
63.79 -fun inner_syntax atom = Scan.ahead atom |-- not_eof >> T.source_of;
63.80 +fun position scan = (Scan.ahead not_eof >> Token.position_of) -- scan >> Library.swap;
63.81 +fun source_position atom = Scan.ahead atom |-- not_eof >> Token.source_position_of;
63.82 +fun inner_syntax atom = Scan.ahead atom |-- not_eof >> Token.source_of;
63.83
63.84 fun kind k =
63.85 - group (T.str_of_kind k) (RESET_VALUE (Scan.one (T.is_kind k) >> T.content_of));
63.86 + group (Token.str_of_kind k) (RESET_VALUE (Scan.one (Token.is_kind k) >> Token.content_of));
63.87
63.88 -val command = kind T.Command;
63.89 -val keyword = kind T.Keyword;
63.90 -val short_ident = kind T.Ident;
63.91 -val long_ident = kind T.LongIdent;
63.92 -val sym_ident = kind T.SymIdent;
63.93 -val term_var = kind T.Var;
63.94 -val type_ident = kind T.TypeIdent;
63.95 -val type_var = kind T.TypeVar;
63.96 -val number = kind T.Nat;
63.97 -val string = kind T.String;
63.98 -val alt_string = kind T.AltString;
63.99 -val verbatim = kind T.Verbatim;
63.100 -val sync = kind T.Sync;
63.101 -val eof = kind T.EOF;
63.102 +val command = kind Token.Command;
63.103 +val keyword = kind Token.Keyword;
63.104 +val short_ident = kind Token.Ident;
63.105 +val long_ident = kind Token.LongIdent;
63.106 +val sym_ident = kind Token.SymIdent;
63.107 +val term_var = kind Token.Var;
63.108 +val type_ident = kind Token.TypeIdent;
63.109 +val type_var = kind Token.TypeVar;
63.110 +val number = kind Token.Nat;
63.111 +val string = kind Token.String;
63.112 +val alt_string = kind Token.AltString;
63.113 +val verbatim = kind Token.Verbatim;
63.114 +val sync = kind Token.Sync;
63.115 +val eof = kind Token.EOF;
63.116
63.117 -fun keyword_with pred = RESET_VALUE (Scan.one (T.keyword_with pred) >> T.content_of);
63.118 -val keyword_ident_or_symbolic = keyword_with T.ident_or_symbolic;
63.119 +fun keyword_with pred = RESET_VALUE (Scan.one (Token.keyword_with pred) >> Token.content_of);
63.120 +val keyword_ident_or_symbolic = keyword_with Token.ident_or_symbolic;
63.121
63.122 fun $$$ x =
63.123 - group (T.str_of_kind T.Keyword ^ " " ^ quote x) (keyword_with (fn y => x = y));
63.124 + group (Token.str_of_kind Token.Keyword ^ " " ^ quote x) (keyword_with (fn y => x = y));
63.125
63.126 fun reserved x =
63.127 group ("reserved identifier " ^ quote x)
63.128 - (RESET_VALUE (Scan.one (T.ident_with (fn y => x = y)) >> T.content_of));
63.129 + (RESET_VALUE (Scan.one (Token.ident_with (fn y => x = y)) >> Token.content_of));
63.130
63.131 val semicolon = $$$ ";";
63.132
63.133 @@ -341,6 +339,3 @@
63.134 type 'a parser = 'a Parse.parser;
63.135 type 'a context_parser = 'a Parse.context_parser;
63.136
63.137 -(*legacy alias*)
63.138 -structure OuterParse = Parse;
63.139 -
64.1 --- a/src/Pure/Isar/parse.scala Mon May 17 18:59:59 2010 -0700
64.2 +++ b/src/Pure/Isar/parse.scala Tue May 18 06:28:42 2010 -0700
64.3 @@ -15,7 +15,7 @@
64.4
64.5 trait Parser extends Parsers
64.6 {
64.7 - type Elem = Outer_Lex.Token
64.8 + type Elem = Token
64.9
64.10 def filter_proper = true
64.11
64.12 @@ -50,8 +50,8 @@
64.13 token(s, pred) ^^ (_.content)
64.14
64.15 def keyword(name: String): Parser[String] =
64.16 - atom(Outer_Lex.Token_Kind.KEYWORD.toString + " \"" + name + "\"",
64.17 - tok => tok.kind == Outer_Lex.Token_Kind.KEYWORD && tok.content == name)
64.18 + atom(Token.Kind.KEYWORD.toString + " \"" + name + "\"",
64.19 + tok => tok.kind == Token.Kind.KEYWORD && tok.content == name)
64.20
64.21 def name: Parser[String] = atom("name declaration", _.is_name)
64.22 def xname: Parser[String] = atom("name reference", _.is_xname)
64.23 @@ -62,16 +62,16 @@
64.24
64.25 private def tag_name: Parser[String] =
64.26 atom("tag name", tok =>
64.27 - tok.kind == Outer_Lex.Token_Kind.IDENT ||
64.28 - tok.kind == Outer_Lex.Token_Kind.STRING)
64.29 + tok.kind == Token.Kind.IDENT ||
64.30 + tok.kind == Token.Kind.STRING)
64.31
64.32 def tags: Parser[List[String]] = rep(keyword("%") ~> tag_name)
64.33
64.34
64.35 /* wrappers */
64.36
64.37 - def parse[T](p: Parser[T], in: Outer_Lex.Reader): ParseResult[T] = p(in)
64.38 - def parse_all[T](p: Parser[T], in: Outer_Lex.Reader): ParseResult[T] = parse(phrase(p), in)
64.39 + def parse[T](p: Parser[T], in: Token.Reader): ParseResult[T] = p(in)
64.40 + def parse_all[T](p: Parser[T], in: Token.Reader): ParseResult[T] = parse(phrase(p), in)
64.41 }
64.42 }
64.43
65.1 --- a/src/Pure/Isar/parse_spec.ML Mon May 17 18:59:59 2010 -0700
65.2 +++ b/src/Pure/Isar/parse_spec.ML Tue May 18 06:28:42 2010 -0700
65.3 @@ -162,6 +162,3 @@
65.4
65.5 end;
65.6
65.7 -(*legacy alias*)
65.8 -structure SpecParse = Parse_Spec;
65.9 -
66.1 --- a/src/Pure/Isar/rule_insts.ML Mon May 17 18:59:59 2010 -0700
66.2 +++ b/src/Pure/Isar/rule_insts.ML Tue May 18 06:28:42 2010 -0700
66.3 @@ -29,9 +29,6 @@
66.4 structure RuleInsts: RULE_INSTS =
66.5 struct
66.6
66.7 -structure T = OuterLex;
66.8 -
66.9 -
66.10 (** reading instantiations **)
66.11
66.12 local
66.13 @@ -100,11 +97,11 @@
66.14
66.15 val (type_insts, term_insts) = List.partition (is_tvar o fst) mixed_insts;
66.16 val internal_insts = term_insts |> map_filter
66.17 - (fn (xi, T.Term t) => SOME (xi, t)
66.18 - | (_, T.Text _) => NONE
66.19 + (fn (xi, Token.Term t) => SOME (xi, t)
66.20 + | (_, Token.Text _) => NONE
66.21 | (xi, _) => error_var "Term argument expected for " xi);
66.22 val external_insts = term_insts |> map_filter
66.23 - (fn (xi, T.Text s) => SOME (xi, s) | _ => NONE);
66.24 + (fn (xi, Token.Text s) => SOME (xi, s) | _ => NONE);
66.25
66.26
66.27 (* mixed type instantiations *)
66.28 @@ -114,8 +111,8 @@
66.29 val S = the_sort tvars xi;
66.30 val T =
66.31 (case arg of
66.32 - T.Text s => Syntax.read_typ ctxt s
66.33 - | T.Typ T => T
66.34 + Token.Text s => Syntax.read_typ ctxt s
66.35 + | Token.Typ T => T
66.36 | _ => error_var "Type argument expected for " xi);
66.37 in
66.38 if Sign.of_sort thy (T, S) then ((xi, S), T)
66.39 @@ -172,9 +169,9 @@
66.40 val _ = (*assign internalized values*)
66.41 mixed_insts |> List.app (fn (arg, (xi, _)) =>
66.42 if is_tvar xi then
66.43 - T.assign (SOME (T.Typ (the (AList.lookup (op =) type_insts xi)))) arg
66.44 + Token.assign (SOME (Token.Typ (the (AList.lookup (op =) type_insts xi)))) arg
66.45 else
66.46 - T.assign (SOME (T.Term (the (AList.lookup (op =) term_insts xi)))) arg);
66.47 + Token.assign (SOME (Token.Term (the (AList.lookup (op =) term_insts xi)))) arg);
66.48 in
66.49 Drule.instantiate insts thm |> Rule_Cases.save thm
66.50 end;
66.51 @@ -197,7 +194,7 @@
66.52
66.53 fun read_instantiate ctxt args thm =
66.54 read_instantiate_mixed (ctxt |> ProofContext.set_mode ProofContext.mode_schematic) (* FIXME !? *)
66.55 - (map (fn (x, y) => (T.eof, (x, T.Text y))) args) thm;
66.56 + (map (fn (x, y) => (Token.eof, (x, Token.Text y))) args) thm;
66.57
66.58 fun instantiate_tac ctxt args = PRIMITIVE (read_instantiate ctxt args);
66.59
66.60 @@ -210,9 +207,9 @@
66.61 local
66.62
66.63 val value =
66.64 - Args.internal_typ >> T.Typ ||
66.65 - Args.internal_term >> T.Term ||
66.66 - Args.name_source >> T.Text;
66.67 + Args.internal_typ >> Token.Typ ||
66.68 + Args.internal_term >> Token.Term ||
66.69 + Args.name_source >> Token.Text;
66.70
66.71 val inst = Args.var -- (Args.$$$ "=" |-- Scan.ahead Parse.not_eof -- value)
66.72 >> (fn (xi, (a, v)) => (a, (xi, v)));
66.73 @@ -233,8 +230,8 @@
66.74 local
66.75
66.76 val value =
66.77 - Args.internal_term >> T.Term ||
66.78 - Args.name_source >> T.Text;
66.79 + Args.internal_term >> Token.Term ||
66.80 + Args.name_source >> Token.Text;
66.81
66.82 val inst = Scan.ahead Parse.not_eof -- Args.maybe value;
66.83 val concl = Args.$$$ "concl" -- Args.colon;
67.1 --- /dev/null Thu Jan 01 00:00:00 1970 +0000
67.2 +++ b/src/Pure/Isar/token.ML Tue May 18 06:28:42 2010 -0700
67.3 @@ -0,0 +1,389 @@
67.4 +(* Title: Pure/Isar/token.ML
67.5 + Author: Markus Wenzel, TU Muenchen
67.6 +
67.7 +Outer token syntax for Isabelle/Isar.
67.8 +*)
67.9 +
67.10 +signature TOKEN =
67.11 +sig
67.12 + datatype kind =
67.13 + Command | Keyword | Ident | LongIdent | SymIdent | Var | TypeIdent | TypeVar |
67.14 + Nat | String | AltString | Verbatim | Space | Comment | InternalValue |
67.15 + Malformed | Error of string | Sync | EOF
67.16 + datatype value =
67.17 + Text of string | Typ of typ | Term of term | Fact of thm list |
67.18 + Attribute of morphism -> attribute
67.19 + type T
67.20 + val str_of_kind: kind -> string
67.21 + val position_of: T -> Position.T
67.22 + val end_position_of: T -> Position.T
67.23 + val pos_of: T -> string
67.24 + val eof: T
67.25 + val is_eof: T -> bool
67.26 + val not_eof: T -> bool
67.27 + val not_sync: T -> bool
67.28 + val stopper: T Scan.stopper
67.29 + val kind_of: T -> kind
67.30 + val is_kind: kind -> T -> bool
67.31 + val keyword_with: (string -> bool) -> T -> bool
67.32 + val ident_with: (string -> bool) -> T -> bool
67.33 + val is_proper: T -> bool
67.34 + val is_semicolon: T -> bool
67.35 + val is_comment: T -> bool
67.36 + val is_begin_ignore: T -> bool
67.37 + val is_end_ignore: T -> bool
67.38 + val is_blank: T -> bool
67.39 + val is_newline: T -> bool
67.40 + val source_of: T -> string
67.41 + val source_position_of: T -> Symbol_Pos.text * Position.T
67.42 + val content_of: T -> string
67.43 + val unparse: T -> string
67.44 + val text_of: T -> string * string
67.45 + val get_value: T -> value option
67.46 + val map_value: (value -> value) -> T -> T
67.47 + val mk_text: string -> T
67.48 + val mk_typ: typ -> T
67.49 + val mk_term: term -> T
67.50 + val mk_fact: thm list -> T
67.51 + val mk_attribute: (morphism -> attribute) -> T
67.52 + val assignable: T -> T
67.53 + val assign: value option -> T -> unit
67.54 + val closure: T -> T
67.55 + val ident_or_symbolic: string -> bool
67.56 + val !!! : string -> (Symbol_Pos.T list -> 'a) -> Symbol_Pos.T list -> 'a
67.57 + val source_proper: (T, 'a) Source.source -> (T, (T, 'a) Source.source) Source.source
67.58 + val source': {do_recover: bool Option.option} -> (unit -> Scan.lexicon * Scan.lexicon) ->
67.59 + (Symbol_Pos.T, 'a) Source.source -> (T, (Symbol_Pos.T, 'a) Source.source) Source.source
67.60 + val source: {do_recover: bool Option.option} -> (unit -> Scan.lexicon * Scan.lexicon) ->
67.61 + Position.T -> (Symbol.symbol, 'a) Source.source -> (T,
67.62 + (Symbol_Pos.T, Position.T * (Symbol.symbol, 'a) Source.source) Source.source) Source.source
67.63 + val read_antiq: Scan.lexicon -> (T list -> 'a * T list) -> Symbol_Pos.T list * Position.T -> 'a
67.64 +end;
67.65 +
67.66 +structure Token: TOKEN =
67.67 +struct
67.68 +
67.69 +(** tokens **)
67.70 +
67.71 +(* token values *)
67.72 +
67.73 +(*The value slot assigns an (optional) internal value to a token,
67.74 + usually as a side-effect of special scanner setup (see also
67.75 + args.ML). Note that an assignable ref designates an intermediate
67.76 + state of internalization -- it is NOT meant to persist.*)
67.77 +
67.78 +datatype value =
67.79 + Text of string |
67.80 + Typ of typ |
67.81 + Term of term |
67.82 + Fact of thm list |
67.83 + Attribute of morphism -> attribute;
67.84 +
67.85 +datatype slot =
67.86 + Slot |
67.87 + Value of value option |
67.88 + Assignable of value option Unsynchronized.ref;
67.89 +
67.90 +
67.91 +(* datatype token *)
67.92 +
67.93 +datatype kind =
67.94 + Command | Keyword | Ident | LongIdent | SymIdent | Var | TypeIdent | TypeVar |
67.95 + Nat | String | AltString | Verbatim | Space | Comment | InternalValue |
67.96 + Malformed | Error of string | Sync | EOF;
67.97 +
67.98 +datatype T = Token of (Symbol_Pos.text * Position.range) * (kind * string) * slot;
67.99 +
67.100 +val str_of_kind =
67.101 + fn Command => "command"
67.102 + | Keyword => "keyword"
67.103 + | Ident => "identifier"
67.104 + | LongIdent => "long identifier"
67.105 + | SymIdent => "symbolic identifier"
67.106 + | Var => "schematic variable"
67.107 + | TypeIdent => "type variable"
67.108 + | TypeVar => "schematic type variable"
67.109 + | Nat => "number"
67.110 + | String => "string"
67.111 + | AltString => "back-quoted string"
67.112 + | Verbatim => "verbatim text"
67.113 + | Space => "white space"
67.114 + | Comment => "comment text"
67.115 + | InternalValue => "internal value"
67.116 + | Malformed => "malformed symbolic character"
67.117 + | Error _ => "bad input"
67.118 + | Sync => "sync marker"
67.119 + | EOF => "end-of-file";
67.120 +
67.121 +
67.122 +(* position *)
67.123 +
67.124 +fun position_of (Token ((_, (pos, _)), _, _)) = pos;
67.125 +fun end_position_of (Token ((_, (_, pos)), _, _)) = pos;
67.126 +
67.127 +val pos_of = Position.str_of o position_of;
67.128 +
67.129 +
67.130 +(* control tokens *)
67.131 +
67.132 +fun mk_eof pos = Token (("", (pos, Position.none)), (EOF, ""), Slot);
67.133 +val eof = mk_eof Position.none;
67.134 +
67.135 +fun is_eof (Token (_, (EOF, _), _)) = true
67.136 + | is_eof _ = false;
67.137 +
67.138 +val not_eof = not o is_eof;
67.139 +
67.140 +fun not_sync (Token (_, (Sync, _), _)) = false
67.141 + | not_sync _ = true;
67.142 +
67.143 +val stopper =
67.144 + Scan.stopper (fn [] => eof | toks => mk_eof (end_position_of (List.last toks))) is_eof;
67.145 +
67.146 +
67.147 +(* kind of token *)
67.148 +
67.149 +fun kind_of (Token (_, (k, _), _)) = k;
67.150 +fun is_kind k (Token (_, (k', _), _)) = k = k';
67.151 +
67.152 +fun keyword_with pred (Token (_, (Keyword, x), _)) = pred x
67.153 + | keyword_with _ _ = false;
67.154 +
67.155 +fun ident_with pred (Token (_, (Ident, x), _)) = pred x
67.156 + | ident_with _ _ = false;
67.157 +
67.158 +fun is_proper (Token (_, (Space, _), _)) = false
67.159 + | is_proper (Token (_, (Comment, _), _)) = false
67.160 + | is_proper _ = true;
67.161 +
67.162 +fun is_semicolon (Token (_, (Keyword, ";"), _)) = true
67.163 + | is_semicolon _ = false;
67.164 +
67.165 +fun is_comment (Token (_, (Comment, _), _)) = true
67.166 + | is_comment _ = false;
67.167 +
67.168 +fun is_begin_ignore (Token (_, (Comment, "<"), _)) = true
67.169 + | is_begin_ignore _ = false;
67.170 +
67.171 +fun is_end_ignore (Token (_, (Comment, ">"), _)) = true
67.172 + | is_end_ignore _ = false;
67.173 +
67.174 +
67.175 +(* blanks and newlines -- space tokens obey lines *)
67.176 +
67.177 +fun is_blank (Token (_, (Space, x), _)) = not (String.isSuffix "\n" x)
67.178 + | is_blank _ = false;
67.179 +
67.180 +fun is_newline (Token (_, (Space, x), _)) = String.isSuffix "\n" x
67.181 + | is_newline _ = false;
67.182 +
67.183 +
67.184 +(* token content *)
67.185 +
67.186 +fun source_of (Token ((source, (pos, _)), _, _)) =
67.187 + YXML.string_of (XML.Elem (Markup.tokenN, Position.properties_of pos, [XML.Text source]));
67.188 +
67.189 +fun source_position_of (Token ((source, (pos, _)), _, _)) = (source, pos);
67.190 +
67.191 +fun content_of (Token (_, (_, x), _)) = x;
67.192 +
67.193 +
67.194 +(* unparse *)
67.195 +
67.196 +fun escape q =
67.197 + implode o map (fn s => if s = q orelse s = "\\" then "\\" ^ s else s) o Symbol.explode;
67.198 +
67.199 +fun unparse (Token (_, (kind, x), _)) =
67.200 + (case kind of
67.201 + String => x |> quote o escape "\""
67.202 + | AltString => x |> enclose "`" "`" o escape "`"
67.203 + | Verbatim => x |> enclose "{*" "*}"
67.204 + | Comment => x |> enclose "(*" "*)"
67.205 + | Malformed => space_implode " " (explode x)
67.206 + | Sync => ""
67.207 + | EOF => ""
67.208 + | _ => x);
67.209 +
67.210 +fun text_of tok =
67.211 + if is_semicolon tok then ("terminator", "")
67.212 + else
67.213 + let
67.214 + val k = str_of_kind (kind_of tok);
67.215 + val s = unparse tok
67.216 + handle ERROR _ => Symbol.separate_chars (content_of tok);
67.217 + in
67.218 + if s = "" then (k, "")
67.219 + else if size s < 40 andalso not (exists_string (fn c => c = "\n") s) then (k ^ " " ^ s, "")
67.220 + else (k, s)
67.221 + end;
67.222 +
67.223 +
67.224 +
67.225 +(** associated values **)
67.226 +
67.227 +(* access values *)
67.228 +
67.229 +fun get_value (Token (_, _, Value v)) = v
67.230 + | get_value _ = NONE;
67.231 +
67.232 +fun map_value f (Token (x, y, Value (SOME v))) = Token (x, y, Value (SOME (f v)))
67.233 + | map_value _ tok = tok;
67.234 +
67.235 +
67.236 +(* make values *)
67.237 +
67.238 +fun mk_value k v = Token ((k, Position.no_range), (InternalValue, k), Value (SOME v));
67.239 +
67.240 +val mk_text = mk_value "<text>" o Text;
67.241 +val mk_typ = mk_value "<typ>" o Typ;
67.242 +val mk_term = mk_value "<term>" o Term;
67.243 +val mk_fact = mk_value "<fact>" o Fact;
67.244 +val mk_attribute = mk_value "<attribute>" o Attribute;
67.245 +
67.246 +
67.247 +(* static binding *)
67.248 +
67.249 +(*1st stage: make empty slots assignable*)
67.250 +fun assignable (Token (x, y, Slot)) = Token (x, y, Assignable (Unsynchronized.ref NONE))
67.251 + | assignable tok = tok;
67.252 +
67.253 +(*2nd stage: assign values as side-effect of scanning*)
67.254 +fun assign v (Token (_, _, Assignable r)) = r := v
67.255 + | assign _ _ = ();
67.256 +
67.257 +(*3rd stage: static closure of final values*)
67.258 +fun closure (Token (x, y, Assignable (Unsynchronized.ref v))) = Token (x, y, Value v)
67.259 + | closure tok = tok;
67.260 +
67.261 +
67.262 +
67.263 +(** scanners **)
67.264 +
67.265 +open Basic_Symbol_Pos;
67.266 +
67.267 +fun !!! msg = Symbol_Pos.!!! ("Outer lexical error: " ^ msg);
67.268 +
67.269 +
67.270 +(* scan symbolic idents *)
67.271 +
67.272 +val is_sym_char = member (op =) (explode "!#$%&*+-/<=>?@^_|~");
67.273 +
67.274 +val scan_symid =
67.275 + Scan.many1 (is_sym_char o symbol) ||
67.276 + Scan.one (Symbol.is_symbolic o symbol) >> single;
67.277 +
67.278 +fun is_symid str =
67.279 + (case try Symbol.explode str of
67.280 + SOME [s] => Symbol.is_symbolic s orelse is_sym_char s
67.281 + | SOME ss => forall is_sym_char ss
67.282 + | _ => false);
67.283 +
67.284 +fun ident_or_symbolic "begin" = false
67.285 + | ident_or_symbolic ":" = true
67.286 + | ident_or_symbolic "::" = true
67.287 + | ident_or_symbolic s = Syntax.is_identifier s orelse is_symid s;
67.288 +
67.289 +
67.290 +(* scan verbatim text *)
67.291 +
67.292 +val scan_verb =
67.293 + $$$ "*" --| Scan.ahead (~$$$ "}") ||
67.294 + Scan.one (fn (s, _) => s <> "*" andalso Symbol.is_regular s) >> single;
67.295 +
67.296 +val scan_verbatim =
67.297 + (Symbol_Pos.scan_pos --| $$$ "{" --| $$$ "*") -- !!! "missing end of verbatim text"
67.298 + (Symbol_Pos.change_prompt
67.299 + ((Scan.repeat scan_verb >> flat) -- ($$$ "*" |-- $$$ "}" |-- Symbol_Pos.scan_pos)));
67.300 +
67.301 +
67.302 +(* scan space *)
67.303 +
67.304 +fun is_space s = Symbol.is_blank s andalso s <> "\n";
67.305 +
67.306 +val scan_space =
67.307 + Scan.many1 (is_space o symbol) @@@ Scan.optional ($$$ "\n") [] ||
67.308 + Scan.many (is_space o symbol) @@@ $$$ "\n";
67.309 +
67.310 +
67.311 +(* scan comment *)
67.312 +
67.313 +val scan_comment =
67.314 + Symbol_Pos.scan_pos -- (Symbol_Pos.scan_comment_body !!! -- Symbol_Pos.scan_pos);
67.315 +
67.316 +
67.317 +(* scan malformed symbols *)
67.318 +
67.319 +val scan_malformed =
67.320 + $$$ Symbol.malformed |--
67.321 + Symbol_Pos.change_prompt (Scan.many (Symbol.is_regular o symbol))
67.322 + --| Scan.option ($$$ Symbol.end_malformed);
67.323 +
67.324 +
67.325 +
67.326 +(** token sources **)
67.327 +
67.328 +fun source_proper src = src |> Source.filter is_proper;
67.329 +
67.330 +local
67.331 +
67.332 +fun token_leq ((_, syms1), (_, syms2)) = length syms1 <= length syms2;
67.333 +
67.334 +fun token k ss =
67.335 + Token ((Symbol_Pos.implode ss, Symbol_Pos.range ss), (k, Symbol_Pos.untabify_content ss), Slot);
67.336 +
67.337 +fun token_range k (pos1, (ss, pos2)) =
67.338 + Token (Symbol_Pos.implode_range pos1 pos2 ss, (k, Symbol_Pos.untabify_content ss), Slot);
67.339 +
67.340 +fun scan (lex1, lex2) = !!! "bad input"
67.341 + (Symbol_Pos.scan_string >> token_range String ||
67.342 + Symbol_Pos.scan_alt_string >> token_range AltString ||
67.343 + scan_verbatim >> token_range Verbatim ||
67.344 + scan_comment >> token_range Comment ||
67.345 + scan_space >> token Space ||
67.346 + scan_malformed >> token Malformed ||
67.347 + Scan.one (Symbol.is_sync o symbol) >> (token Sync o single) ||
67.348 + (Scan.max token_leq
67.349 + (Scan.max token_leq
67.350 + (Scan.literal lex2 >> pair Command)
67.351 + (Scan.literal lex1 >> pair Keyword))
67.352 + (Syntax.scan_longid >> pair LongIdent ||
67.353 + Syntax.scan_id >> pair Ident ||
67.354 + Syntax.scan_var >> pair Var ||
67.355 + Syntax.scan_tid >> pair TypeIdent ||
67.356 + Syntax.scan_tvar >> pair TypeVar ||
67.357 + Syntax.scan_nat >> pair Nat ||
67.358 + scan_symid >> pair SymIdent) >> uncurry token));
67.359 +
67.360 +fun recover msg =
67.361 + Scan.many ((Symbol.is_regular andf (not o Symbol.is_blank)) o symbol)
67.362 + >> (single o token (Error msg));
67.363 +
67.364 +in
67.365 +
67.366 +fun source' {do_recover} get_lex =
67.367 + Source.source Symbol_Pos.stopper (Scan.bulk (fn xs => scan (get_lex ()) xs))
67.368 + (Option.map (rpair recover) do_recover);
67.369 +
67.370 +fun source do_recover get_lex pos src =
67.371 + Symbol_Pos.source pos src
67.372 + |> source' do_recover get_lex;
67.373 +
67.374 +end;
67.375 +
67.376 +
67.377 +(* read_antiq *)
67.378 +
67.379 +fun read_antiq lex scan (syms, pos) =
67.380 + let
67.381 + fun err msg = cat_error msg ("Malformed antiquotation" ^ Position.str_of pos ^ ":\n" ^
67.382 + "@{" ^ Symbol_Pos.content syms ^ "}");
67.383 +
67.384 + val res =
67.385 + Source.of_list syms
67.386 + |> source' {do_recover = NONE} (K (lex, Scan.empty_lexicon))
67.387 + |> source_proper
67.388 + |> Source.source stopper (Scan.error (Scan.bulk scan)) NONE
67.389 + |> Source.exhaust;
67.390 + in (case res of [x] => x | _ => err "") handle ERROR msg => err msg end;
67.391 +
67.392 +end;
68.1 --- /dev/null Thu Jan 01 00:00:00 1970 +0000
68.2 +++ b/src/Pure/Isar/token.scala Tue May 18 06:28:42 2010 -0700
68.3 @@ -0,0 +1,95 @@
68.4 +/* Title: Pure/Isar/token.scala
68.5 + Author: Makarius
68.6 +
68.7 +Outer token syntax for Isabelle/Isar.
68.8 +*/
68.9 +
68.10 +package isabelle
68.11 +
68.12 +
68.13 +object Token
68.14 +{
68.15 + /* tokens */
68.16 +
68.17 + object Kind extends Enumeration
68.18 + {
68.19 + val COMMAND = Value("command")
68.20 + val KEYWORD = Value("keyword")
68.21 + val IDENT = Value("identifier")
68.22 + val LONG_IDENT = Value("long identifier")
68.23 + val SYM_IDENT = Value("symbolic identifier")
68.24 + val VAR = Value("schematic variable")
68.25 + val TYPE_IDENT = Value("type variable")
68.26 + val TYPE_VAR = Value("schematic type variable")
68.27 + val NAT = Value("number")
68.28 + val STRING = Value("string")
68.29 + val ALT_STRING = Value("back-quoted string")
68.30 + val VERBATIM = Value("verbatim text")
68.31 + val SPACE = Value("white space")
68.32 + val COMMENT = Value("comment text")
68.33 + val BAD_INPUT = Value("bad input")
68.34 + val UNPARSED = Value("unparsed input")
68.35 + }
68.36 +
68.37 +
68.38 + /* token reader */
68.39 +
68.40 + class Line_Position(val line: Int) extends scala.util.parsing.input.Position
68.41 + {
68.42 + def column = 0
68.43 + def lineContents = ""
68.44 + override def toString = line.toString
68.45 +
68.46 + def advance(token: Token): Line_Position =
68.47 + {
68.48 + var n = 0
68.49 + for (c <- token.content if c == '\n') n += 1
68.50 + if (n == 0) this else new Line_Position(line + n)
68.51 + }
68.52 + }
68.53 +
68.54 + abstract class Reader extends scala.util.parsing.input.Reader[Token]
68.55 +
68.56 + private class Token_Reader(tokens: List[Token], val pos: Line_Position) extends Reader
68.57 + {
68.58 + def first = tokens.head
68.59 + def rest = new Token_Reader(tokens.tail, pos.advance(first))
68.60 + def atEnd = tokens.isEmpty
68.61 + }
68.62 +
68.63 + def reader(tokens: List[Token]): Reader = new Token_Reader(tokens, new Line_Position(1))
68.64 +}
68.65 +
68.66 +
68.67 +sealed case class Token(val kind: Token.Kind.Value, val source: String)
68.68 +{
68.69 + def is_command: Boolean = kind == Token.Kind.COMMAND
68.70 + def is_delimited: Boolean =
68.71 + kind == Token.Kind.STRING ||
68.72 + kind == Token.Kind.ALT_STRING ||
68.73 + kind == Token.Kind.VERBATIM ||
68.74 + kind == Token.Kind.COMMENT
68.75 + def is_name: Boolean =
68.76 + kind == Token.Kind.IDENT ||
68.77 + kind == Token.Kind.SYM_IDENT ||
68.78 + kind == Token.Kind.STRING ||
68.79 + kind == Token.Kind.NAT
68.80 + def is_xname: Boolean = is_name || kind == Token.Kind.LONG_IDENT
68.81 + def is_text: Boolean = is_xname || kind == Token.Kind.VERBATIM
68.82 + def is_space: Boolean = kind == Token.Kind.SPACE
68.83 + def is_comment: Boolean = kind == Token.Kind.COMMENT
68.84 + def is_ignored: Boolean = is_space || is_comment
68.85 + def is_unparsed: Boolean = kind == Token.Kind.UNPARSED
68.86 +
68.87 + def content: String =
68.88 + if (kind == Token.Kind.STRING) Scan.Lexicon.empty.quoted_content("\"", source)
68.89 + else if (kind == Token.Kind.ALT_STRING) Scan.Lexicon.empty.quoted_content("`", source)
68.90 + else if (kind == Token.Kind.VERBATIM) Scan.Lexicon.empty.verbatim_content(source)
68.91 + else if (kind == Token.Kind.COMMENT) Scan.Lexicon.empty.comment_content(source)
68.92 + else source
68.93 +
68.94 + def text: (String, String) =
68.95 + if (kind == Token.Kind.COMMAND && source == ";") ("terminator", "")
68.96 + else (kind.toString, source)
68.97 +}
68.98 +
69.1 --- a/src/Pure/ML/ml_context.ML Mon May 17 18:59:59 2010 -0700
69.2 +++ b/src/Pure/ML/ml_context.ML Tue May 18 06:28:42 2010 -0700
69.3 @@ -112,8 +112,6 @@
69.4
69.5 local
69.6
69.7 -structure T = OuterLex;
69.8 -
69.9 val antiq =
69.10 Parse.!!! (Parse.position Parse.xname -- Args.parse --| Scan.ahead Parse.eof)
69.11 >> (fn ((x, pos), y) => Args.src ((x, y), pos));
69.12 @@ -144,7 +142,8 @@
69.13 | expand (Antiquote.Antiq (ss, range)) (scope, background) =
69.14 let
69.15 val context = Stack.top scope;
69.16 - val (f, context') = antiquotation (T.read_antiq lex antiq (ss, #1 range)) context;
69.17 + val (f, context') =
69.18 + antiquotation (Token.read_antiq lex antiq (ss, #1 range)) context;
69.19 val (decl, background') = f background;
69.20 val decl' = decl #> pairself (ML_Lex.tokenize #> map (ML_Lex.set_range range));
69.21 in (decl', (Stack.map_top (K context') scope, background')) end
70.1 --- a/src/Pure/PIDE/document.scala Mon May 17 18:59:59 2010 -0700
70.2 +++ b/src/Pure/PIDE/document.scala Tue May 18 06:28:42 2010 -0700
70.3 @@ -46,7 +46,7 @@
70.4 /* unparsed dummy commands */
70.5
70.6 def unparsed(source: String) =
70.7 - new Command(null, List(Outer_Lex.Token(Outer_Lex.Token_Kind.UNPARSED, source)))
70.8 + new Command(null, List(Token(Token.Kind.UNPARSED, source)))
70.9
70.10 def is_unparsed(command: Command) = command.id == null
70.11
71.1 --- a/src/Pure/ROOT.ML Mon May 17 18:59:59 2010 -0700
71.2 +++ b/src/Pure/ROOT.ML Tue May 18 06:28:42 2010 -0700
71.3 @@ -167,7 +167,7 @@
71.4 use "Proof/proofchecker.ML";
71.5
71.6 (*outer syntax*)
71.7 -use "Isar/outer_lex.ML";
71.8 +use "Isar/token.ML";
71.9 use "Isar/keyword.ML";
71.10 use "Isar/parse.ML";
71.11 use "Isar/parse_value.ML";
71.12 @@ -292,3 +292,23 @@
71.13
71.14 use "pure_setup.ML";
71.15
71.16 +
71.17 +(* legacy aliases *)
71.18 +
71.19 +structure Legacy =
71.20 +struct
71.21 +
71.22 +structure OuterKeyword = Keyword;
71.23 +
71.24 +structure OuterLex =
71.25 +struct
71.26 + open Token;
71.27 + type token = T;
71.28 +end;
71.29 +
71.30 +structure OuterParse = Parse;
71.31 +structure OuterSyntax = Outer_Syntax;
71.32 +structure SpecParse = Parse_Spec;
71.33 +
71.34 +end;
71.35 +
72.1 --- a/src/Pure/Thy/latex.ML Mon May 17 18:59:59 2010 -0700
72.2 +++ b/src/Pure/Thy/latex.ML Tue May 18 06:28:42 2010 -0700
72.3 @@ -9,7 +9,7 @@
72.4 val output_known_symbols: (string -> bool) * (string -> bool) ->
72.5 Symbol.symbol list -> string
72.6 val output_symbols: Symbol.symbol list -> string
72.7 - val output_basic: OuterLex.token -> string
72.8 + val output_basic: Token.T -> string
72.9 val output_markup: string -> string -> string
72.10 val output_markup_env: string -> string -> string
72.11 val output_verbatim: string -> string
72.12 @@ -99,24 +99,22 @@
72.13
72.14 (* token output *)
72.15
72.16 -structure T = OuterLex;
72.17 -
72.18 -val invisible_token = T.keyword_with (fn s => s = ";") orf T.is_kind T.Comment;
72.19 +val invisible_token = Token.keyword_with (fn s => s = ";") orf Token.is_kind Token.Comment;
72.20
72.21 fun output_basic tok =
72.22 - let val s = T.content_of tok in
72.23 + let val s = Token.content_of tok in
72.24 if invisible_token tok then ""
72.25 - else if T.is_kind T.Command tok then
72.26 + else if Token.is_kind Token.Command tok then
72.27 "\\isacommand{" ^ output_syms s ^ "}"
72.28 - else if T.is_kind T.Keyword tok andalso Syntax.is_ascii_identifier s then
72.29 + else if Token.is_kind Token.Keyword tok andalso Syntax.is_ascii_identifier s then
72.30 "\\isakeyword{" ^ output_syms s ^ "}"
72.31 - else if T.is_kind T.String tok then
72.32 + else if Token.is_kind Token.String tok then
72.33 enclose "{\\isachardoublequoteopen}" "{\\isachardoublequoteclose}" (output_syms s)
72.34 - else if T.is_kind T.AltString tok then
72.35 + else if Token.is_kind Token.AltString tok then
72.36 enclose "{\\isacharbackquoteopen}" "{\\isacharbackquoteclose}" (output_syms s)
72.37 - else if T.is_kind T.Verbatim tok then
72.38 + else if Token.is_kind Token.Verbatim tok then
72.39 let
72.40 - val (txt, pos) = T.source_position_of tok;
72.41 + val (txt, pos) = Token.source_position_of tok;
72.42 val ants = Antiquote.read (Symbol_Pos.explode (txt, pos), pos);
72.43 val out = implode (map output_syms_antiq ants);
72.44 in enclose "{\\isacharverbatimopen}" "{\\isacharverbatimclose}" out end
73.1 --- a/src/Pure/Thy/thy_header.ML Mon May 17 18:59:59 2010 -0700
73.2 +++ b/src/Pure/Thy/thy_header.ML Tue May 18 06:28:42 2010 -0700
73.3 @@ -6,17 +6,13 @@
73.4
73.5 signature THY_HEADER =
73.6 sig
73.7 - val args: OuterLex.token list ->
73.8 - (string * string list * (string * bool) list) * OuterLex.token list
73.9 + val args: Token.T list -> (string * string list * (string * bool) list) * Token.T list
73.10 val read: Position.T -> (string, 'a) Source.source -> string * string list * (string * bool) list
73.11 end;
73.12
73.13 structure Thy_Header: THY_HEADER =
73.14 struct
73.15
73.16 -structure T = OuterLex;
73.17 -
73.18 -
73.19 (* keywords *)
73.20
73.21 val headerN = "header";
73.22 @@ -58,9 +54,9 @@
73.23 let val res =
73.24 src
73.25 |> Symbol.source {do_recover = false}
73.26 - |> T.source {do_recover = NONE} (fn () => (header_lexicon, Scan.empty_lexicon)) pos
73.27 - |> T.source_proper
73.28 - |> Source.source T.stopper (Scan.single (Scan.error (Parse.!!! header))) NONE
73.29 + |> Token.source {do_recover = NONE} (fn () => (header_lexicon, Scan.empty_lexicon)) pos
73.30 + |> Token.source_proper
73.31 + |> Source.source Token.stopper (Scan.single (Scan.error (Parse.!!! header))) NONE
73.32 |> Source.get_single;
73.33 in
73.34 (case res of SOME (x, _) => x
74.1 --- a/src/Pure/Thy/thy_header.scala Mon May 17 18:59:59 2010 -0700
74.2 +++ b/src/Pure/Thy/thy_header.scala Tue May 18 06:28:42 2010 -0700
74.3 @@ -59,14 +59,14 @@
74.4 def read(file: File): Header =
74.5 {
74.6 val token = lexicon.token(symbols, _ => false)
74.7 - val toks = new mutable.ListBuffer[Outer_Lex.Token]
74.8 + val toks = new mutable.ListBuffer[Token]
74.9
74.10 def scan_to_begin(in: Reader[Char])
74.11 {
74.12 token(in) match {
74.13 case lexicon.Success(tok, rest) =>
74.14 toks += tok
74.15 - if (!(tok.kind == Outer_Lex.Token_Kind.KEYWORD && tok.content == BEGIN))
74.16 + if (!(tok.kind == Token.Kind.KEYWORD && tok.content == BEGIN))
74.17 scan_to_begin(rest)
74.18 case _ =>
74.19 }
74.20 @@ -74,7 +74,7 @@
74.21 val reader = Scan.byte_reader(file)
74.22 try { scan_to_begin(reader) } finally { reader.close }
74.23
74.24 - parse(commit(header), Outer_Lex.reader(toks.toList)) match {
74.25 + parse(commit(header), Token.reader(toks.toList)) match {
74.26 case Success(result, _) => result
74.27 case bad => error(bad.toString)
74.28 }
75.1 --- a/src/Pure/Thy/thy_output.ML Mon May 17 18:59:59 2010 -0700
75.2 +++ b/src/Pure/Thy/thy_output.ML Tue May 18 06:28:42 2010 -0700
75.3 @@ -24,7 +24,7 @@
75.4 val modes: string list Unsynchronized.ref
75.5 val eval_antiquote: Scan.lexicon -> Toplevel.state -> Symbol_Pos.text * Position.T -> string
75.6 val present_thy: Scan.lexicon -> (string -> string list) -> (markup -> string -> bool) ->
75.7 - (Toplevel.transition * Toplevel.state) list -> (OuterLex.token, 'a) Source.source -> Buffer.T
75.8 + (Toplevel.transition * Toplevel.state) list -> (Token.T, 'a) Source.source -> Buffer.T
75.9 val pretty_text: string -> Pretty.T
75.10 val pretty_term: Proof.context -> term -> Pretty.T
75.11 val pretty_thm: Proof.context -> thm -> Pretty.T
75.12 @@ -36,9 +36,6 @@
75.13 structure ThyOutput: THY_OUTPUT =
75.14 struct
75.15
75.16 -structure T = OuterLex;
75.17 -
75.18 -
75.19 (** global options **)
75.20
75.21 val display = Unsynchronized.ref false;
75.22 @@ -154,7 +151,7 @@
75.23 let
75.24 fun expand (Antiquote.Text ss) = Symbol_Pos.content ss
75.25 | expand (Antiquote.Antiq (ss, (pos, _))) =
75.26 - let val (opts, src) = T.read_antiq lex antiq (ss, pos) in
75.27 + let val (opts, src) = Token.read_antiq lex antiq (ss, pos) in
75.28 options opts (fn () => command src state) (); (*preview errors!*)
75.29 PrintMode.with_modes (! modes @ Latex.modes)
75.30 (Output.no_warnings_CRITICAL (options opts (fn () => command src state))) ()
75.31 @@ -178,7 +175,7 @@
75.32
75.33 datatype token =
75.34 NoToken
75.35 - | BasicToken of T.token
75.36 + | BasicToken of Token.T
75.37 | MarkupToken of string * (string * Position.T)
75.38 | MarkupEnvToken of string * (string * Position.T)
75.39 | VerbatimToken of string * Position.T;
75.40 @@ -195,10 +192,10 @@
75.41 fun basic_token pred (BasicToken tok) = pred tok
75.42 | basic_token _ _ = false;
75.43
75.44 -val improper_token = basic_token (not o T.is_proper);
75.45 -val comment_token = basic_token T.is_comment;
75.46 -val blank_token = basic_token T.is_blank;
75.47 -val newline_token = basic_token T.is_newline;
75.48 +val improper_token = basic_token (not o Token.is_proper);
75.49 +val comment_token = basic_token Token.is_comment;
75.50 +val blank_token = basic_token Token.is_blank;
75.51 +val newline_token = basic_token Token.is_newline;
75.52
75.53
75.54 (* command spans *)
75.55 @@ -303,19 +300,19 @@
75.56 local
75.57
75.58 val space_proper =
75.59 - Scan.one T.is_blank -- Scan.many T.is_comment -- Scan.one T.is_proper;
75.60 + Scan.one Token.is_blank -- Scan.many Token.is_comment -- Scan.one Token.is_proper;
75.61
75.62 -val is_improper = not o (T.is_proper orf T.is_begin_ignore orf T.is_end_ignore);
75.63 +val is_improper = not o (Token.is_proper orf Token.is_begin_ignore orf Token.is_end_ignore);
75.64 val improper = Scan.many is_improper;
75.65 val improper_end = Scan.repeat (Scan.unless space_proper (Scan.one is_improper));
75.66 -val blank_end = Scan.repeat (Scan.unless space_proper (Scan.one T.is_blank));
75.67 +val blank_end = Scan.repeat (Scan.unless space_proper (Scan.one Token.is_blank));
75.68
75.69 -val opt_newline = Scan.option (Scan.one T.is_newline);
75.70 +val opt_newline = Scan.option (Scan.one Token.is_newline);
75.71
75.72 val ignore =
75.73 - Scan.depend (fn d => opt_newline |-- Scan.one T.is_begin_ignore
75.74 + Scan.depend (fn d => opt_newline |-- Scan.one Token.is_begin_ignore
75.75 >> pair (d + 1)) ||
75.76 - Scan.depend (fn d => Scan.one T.is_end_ignore --|
75.77 + Scan.depend (fn d => Scan.one Token.is_end_ignore --|
75.78 (if d = 0 then Scan.fail_with (K "Bad nesting of meta-comments") else opt_newline)
75.79 >> pair (d - 1));
75.80
75.81 @@ -336,18 +333,19 @@
75.82
75.83 fun markup mark mk flag = Scan.peek (fn d =>
75.84 improper |--
75.85 - Parse.position (Scan.one (T.is_kind T.Command andf is_markup mark o T.content_of)) --
75.86 + Parse.position
75.87 + (Scan.one (Token.is_kind Token.Command andf is_markup mark o Token.content_of)) --
75.88 Scan.repeat tag --
75.89 Parse.!!!! ((improper -- locale -- improper) |-- Parse.doc_source --| improper_end)
75.90 >> (fn (((tok, pos), tags), txt) =>
75.91 - let val name = T.content_of tok
75.92 + let val name = Token.content_of tok
75.93 in (SOME (name, pos, tags), (mk (name, txt), (flag, d))) end));
75.94
75.95 val command = Scan.peek (fn d =>
75.96 - Parse.position (Scan.one (T.is_kind T.Command)) --
75.97 + Parse.position (Scan.one (Token.is_kind Token.Command)) --
75.98 Scan.repeat tag
75.99 >> (fn ((tok, pos), tags) =>
75.100 - let val name = T.content_of tok
75.101 + let val name = Token.content_of tok
75.102 in (SOME (name, pos, tags), (BasicToken tok, (Latex.markup_false, d))) end));
75.103
75.104 val cmt = Scan.peek (fn d =>
75.105 @@ -367,8 +365,8 @@
75.106
75.107 (* spans *)
75.108
75.109 - val is_eof = fn (_, (BasicToken x, _)) => T.is_eof x | _ => false;
75.110 - val stopper = Scan.stopper (K (NONE, (BasicToken T.eof, ("", 0)))) is_eof;
75.111 + val is_eof = fn (_, (BasicToken x, _)) => Token.is_eof x | _ => false;
75.112 + val stopper = Scan.stopper (K (NONE, (BasicToken Token.eof, ("", 0)))) is_eof;
75.113
75.114 val cmd = Scan.one (is_some o fst);
75.115 val non_cmd = Scan.one (is_none o fst andf not o is_eof) >> #2;
75.116 @@ -390,8 +388,8 @@
75.117
75.118 val spans =
75.119 src
75.120 - |> Source.filter (not o T.is_semicolon)
75.121 - |> Source.source' 0 T.stopper (Scan.error (Scan.bulk token)) NONE
75.122 + |> Source.filter (not o Token.is_semicolon)
75.123 + |> Source.source' 0 Token.stopper (Scan.error (Scan.bulk token)) NONE
75.124 |> Source.source stopper (Scan.error (Scan.bulk span)) NONE
75.125 |> Source.exhaust;
75.126
75.127 @@ -490,7 +488,7 @@
75.128
75.129 (* default output *)
75.130
75.131 -val str_of_source = space_implode " " o map T.unparse o #2 o #1 o Args.dest_src;
75.132 +val str_of_source = space_implode " " o map Token.unparse o #2 o #1 o Args.dest_src;
75.133
75.134 fun maybe_pretty_source pretty src xs =
75.135 map pretty xs (*always pretty in order to exhibit errors!*)
76.1 --- a/src/Pure/Thy/thy_syntax.ML Mon May 17 18:59:59 2010 -0700
76.2 +++ b/src/Pure/Thy/thy_syntax.ML Tue May 18 06:28:42 2010 -0700
76.3 @@ -7,18 +7,17 @@
76.4 signature THY_SYNTAX =
76.5 sig
76.6 val token_source: Scan.lexicon * Scan.lexicon -> Position.T -> (string, 'a) Source.source ->
76.7 - (OuterLex.token, (Symbol_Pos.T, Position.T * (Symbol.symbol, (string, 'a)
76.8 + (Token.T, (Symbol_Pos.T, Position.T * (Symbol.symbol, (string, 'a)
76.9 Source.source) Source.source) Source.source) Source.source
76.10 - val parse_tokens: Scan.lexicon * Scan.lexicon -> Position.T -> string -> OuterLex.token list
76.11 - val present_token: OuterLex.token -> output
76.12 - val report_token: OuterLex.token -> unit
76.13 + val parse_tokens: Scan.lexicon * Scan.lexicon -> Position.T -> string -> Token.T list
76.14 + val present_token: Token.T -> output
76.15 + val report_token: Token.T -> unit
76.16 datatype span_kind = Command of string | Ignored | Malformed
76.17 type span
76.18 val span_kind: span -> span_kind
76.19 - val span_content: span -> OuterLex.token list
76.20 + val span_content: span -> Token.T list
76.21 val span_range: span -> Position.range
76.22 - val span_source: (OuterLex.token, 'a) Source.source ->
76.23 - (span, (OuterLex.token, 'a) Source.source) Source.source
76.24 + val span_source: (Token.T, 'a) Source.source -> (span, (Token.T, 'a) Source.source) Source.source
76.25 val parse_spans: Scan.lexicon * Scan.lexicon -> Position.T -> string -> span list
76.26 val present_span: span -> output
76.27 val report_span: span -> unit
76.28 @@ -29,16 +28,13 @@
76.29 structure ThySyntax: THY_SYNTAX =
76.30 struct
76.31
76.32 -structure T = OuterLex;
76.33 -
76.34 -
76.35 (** tokens **)
76.36
76.37 (* parse *)
76.38
76.39 fun token_source lexs pos src =
76.40 Symbol.source {do_recover = true} src
76.41 - |> T.source {do_recover = SOME false} (K lexs) pos;
76.42 + |> Token.source {do_recover = SOME false} (K lexs) pos;
76.43
76.44 fun parse_tokens lexs pos str =
76.45 Source.of_string str
76.46 @@ -51,33 +47,33 @@
76.47 local
76.48
76.49 val token_kind_markup =
76.50 - fn T.Command => (Markup.commandN, [])
76.51 - | T.Keyword => (Markup.keywordN, [])
76.52 - | T.Ident => Markup.ident
76.53 - | T.LongIdent => Markup.ident
76.54 - | T.SymIdent => Markup.ident
76.55 - | T.Var => Markup.var
76.56 - | T.TypeIdent => Markup.tfree
76.57 - | T.TypeVar => Markup.tvar
76.58 - | T.Nat => Markup.ident
76.59 - | T.String => Markup.string
76.60 - | T.AltString => Markup.altstring
76.61 - | T.Verbatim => Markup.verbatim
76.62 - | T.Space => Markup.none
76.63 - | T.Comment => Markup.comment
76.64 - | T.InternalValue => Markup.none
76.65 - | T.Malformed => Markup.malformed
76.66 - | T.Error _ => Markup.malformed
76.67 - | T.Sync => Markup.control
76.68 - | T.EOF => Markup.control;
76.69 + fn Token.Command => (Markup.commandN, [])
76.70 + | Token.Keyword => (Markup.keywordN, [])
76.71 + | Token.Ident => Markup.ident
76.72 + | Token.LongIdent => Markup.ident
76.73 + | Token.SymIdent => Markup.ident
76.74 + | Token.Var => Markup.var
76.75 + | Token.TypeIdent => Markup.tfree
76.76 + | Token.TypeVar => Markup.tvar
76.77 + | Token.Nat => Markup.ident
76.78 + | Token.String => Markup.string
76.79 + | Token.AltString => Markup.altstring
76.80 + | Token.Verbatim => Markup.verbatim
76.81 + | Token.Space => Markup.none
76.82 + | Token.Comment => Markup.comment
76.83 + | Token.InternalValue => Markup.none
76.84 + | Token.Malformed => Markup.malformed
76.85 + | Token.Error _ => Markup.malformed
76.86 + | Token.Sync => Markup.control
76.87 + | Token.EOF => Markup.control;
76.88
76.89 in
76.90
76.91 fun present_token tok =
76.92 - Markup.enclose (token_kind_markup (T.kind_of tok)) (Output.output (T.unparse tok));
76.93 + Markup.enclose (token_kind_markup (Token.kind_of tok)) (Output.output (Token.unparse tok));
76.94
76.95 fun report_token tok =
76.96 - Position.report (token_kind_markup (T.kind_of tok)) (T.position_of tok);
76.97 + Position.report (token_kind_markup (Token.kind_of tok)) (Token.position_of tok);
76.98
76.99 end;
76.100
76.101 @@ -88,7 +84,7 @@
76.102 (* type span *)
76.103
76.104 datatype span_kind = Command of string | Ignored | Malformed;
76.105 -datatype span = Span of span_kind * OuterLex.token list;
76.106 +datatype span = Span of span_kind * Token.T list;
76.107
76.108 fun span_kind (Span (k, _)) = k;
76.109 fun span_content (Span (_, toks)) = toks;
76.110 @@ -98,8 +94,8 @@
76.111 [] => (Position.none, Position.none)
76.112 | toks =>
76.113 let
76.114 - val start_pos = T.position_of (hd toks);
76.115 - val end_pos = T.end_position_of (List.last toks);
76.116 + val start_pos = Token.position_of (hd toks);
76.117 + val end_pos = Token.end_position_of (List.last toks);
76.118 in (start_pos, end_pos) end);
76.119
76.120
76.121 @@ -107,7 +103,7 @@
76.122
76.123 local
76.124
76.125 -val is_whitespace = T.is_kind T.Space orf T.is_kind T.Comment;
76.126 +val is_whitespace = Token.is_kind Token.Space orf Token.is_kind Token.Comment;
76.127
76.128 val body =
76.129 Scan.unless (Scan.many is_whitespace -- Scan.ahead (Parse.command || Parse.eof)) Parse.not_eof;
76.130 @@ -120,7 +116,7 @@
76.131
76.132 in
76.133
76.134 -fun span_source src = Source.source T.stopper (Scan.bulk span) NONE src;
76.135 +fun span_source src = Source.source Token.stopper (Scan.bulk span) NONE src;
76.136
76.137 end;
76.138
77.1 --- a/src/Pure/Thy/thy_syntax.scala Mon May 17 18:59:59 2010 -0700
77.2 +++ b/src/Pure/Thy/thy_syntax.scala Tue May 18 06:28:42 2010 -0700
77.3 @@ -22,13 +22,13 @@
77.4 }
77.5 }
77.6
77.7 - type Span = List[Outer_Lex.Token]
77.8 + type Span = List[Token]
77.9
77.10 - def parse_spans(toks: List[Outer_Lex.Token]): List[Span] =
77.11 + def parse_spans(toks: List[Token]): List[Span] =
77.12 {
77.13 import parser._
77.14
77.15 - parse(rep(command_span), Outer_Lex.reader(toks)) match {
77.16 + parse(rep(command_span), Token.reader(toks)) match {
77.17 case Success(spans, rest) if rest.atEnd => spans
77.18 case bad => error(bad.toString)
77.19 }
78.1 --- a/src/Pure/build-jars Mon May 17 18:59:59 2010 -0700
78.2 +++ b/src/Pure/build-jars Tue May 18 06:28:42 2010 -0700
78.3 @@ -34,9 +34,9 @@
78.4 General/yxml.scala
78.5 Isar/isar_document.scala
78.6 Isar/keyword.scala
78.7 - Isar/outer_lex.scala
78.8 Isar/outer_syntax.scala
78.9 Isar/parse.scala
78.10 + Isar/token.scala
78.11 PIDE/change.scala
78.12 PIDE/command.scala
78.13 PIDE/document.scala
79.1 --- a/src/Tools/Code/code_eval.ML Mon May 17 18:59:59 2010 -0700
79.2 +++ b/src/Tools/Code/code_eval.ML Tue May 18 06:28:42 2010 -0700
79.3 @@ -204,26 +204,24 @@
79.4
79.5 local
79.6
79.7 -structure P = OuterParse
79.8 -and K = OuterKeyword
79.9 -
79.10 val datatypesK = "datatypes";
79.11 val functionsK = "functions";
79.12 val fileK = "file";
79.13 val andK = "and"
79.14
79.15 -val _ = List.app K.keyword [datatypesK, functionsK];
79.16 +val _ = List.app Keyword.keyword [datatypesK, functionsK];
79.17
79.18 -val parse_datatype = (P.name --| P.$$$ "=" -- (P.term ::: (Scan.repeat (P.$$$ "|" |-- P.term))));
79.19 +val parse_datatype =
79.20 + Parse.name --| Parse.$$$ "=" -- (Parse.term ::: (Scan.repeat (Parse.$$$ "|" |-- Parse.term)));
79.21
79.22 in
79.23
79.24 val _ =
79.25 - OuterSyntax.command "code_reflect" "enrich runtime environment with generated code"
79.26 - K.thy_decl (P.name -- Scan.optional (P.$$$ datatypesK |-- (parse_datatype
79.27 - ::: Scan.repeat (P.$$$ andK |-- parse_datatype))) []
79.28 - -- Scan.optional (P.$$$ functionsK |-- Scan.repeat1 P.name) []
79.29 - -- Scan.option (P.$$$ fileK |-- P.name)
79.30 + Outer_Syntax.command "code_reflect" "enrich runtime environment with generated code"
79.31 + Keyword.thy_decl (Parse.name -- Scan.optional (Parse.$$$ datatypesK |-- (parse_datatype
79.32 + ::: Scan.repeat (Parse.$$$ andK |-- parse_datatype))) []
79.33 + -- Scan.optional (Parse.$$$ functionsK |-- Scan.repeat1 Parse.name) []
79.34 + -- Scan.option (Parse.$$$ fileK |-- Parse.name)
79.35 >> (fn (((module_name, raw_datatypes), raw_functions), some_file) => Toplevel.theory
79.36 (code_reflect_cmd raw_datatypes raw_functions module_name some_file)));
79.37
80.1 --- a/src/Tools/Code/code_haskell.ML Mon May 17 18:59:59 2010 -0700
80.2 +++ b/src/Tools/Code/code_haskell.ML Tue May 18 06:28:42 2010 -0700
80.3 @@ -469,8 +469,8 @@
80.4 serialize_haskell module_prefix module_name string_classes));
80.5
80.6 val _ =
80.7 - OuterSyntax.command "code_monad" "define code syntax for monads" OuterKeyword.thy_decl (
80.8 - OuterParse.term_group -- OuterParse.name >> (fn (raw_bind, target) =>
80.9 + Outer_Syntax.command "code_monad" "define code syntax for monads" Keyword.thy_decl (
80.10 + Parse.term_group -- Parse.name >> (fn (raw_bind, target) =>
80.11 Toplevel.theory (add_monad target raw_bind))
80.12 );
80.13
81.1 --- a/src/Tools/Code/code_preproc.ML Mon May 17 18:59:59 2010 -0700
81.2 +++ b/src/Tools/Code/code_preproc.ML Tue May 18 06:28:42 2010 -0700
81.3 @@ -479,8 +479,8 @@
81.4 end;
81.5
81.6 val _ =
81.7 - OuterSyntax.improper_command "print_codeproc" "print code preprocessor setup"
81.8 - OuterKeyword.diag (Scan.succeed
81.9 + Outer_Syntax.improper_command "print_codeproc" "print code preprocessor setup"
81.10 + Keyword.diag (Scan.succeed
81.11 (Toplevel.no_timing o Toplevel.unknown_theory o Toplevel.keep
81.12 (print_codeproc o Toplevel.theory_of)));
81.13
82.1 --- a/src/Tools/Code/code_printer.ML Mon May 17 18:59:59 2010 -0700
82.2 +++ b/src/Tools/Code/code_printer.ML Tue May 18 06:28:42 2010 -0700
82.3 @@ -72,9 +72,9 @@
82.4 val parse_infix: ('a -> 'b) -> lrx * int -> string
82.5 -> int * ((fixity -> 'b -> Pretty.T)
82.6 -> fixity -> 'a list -> Pretty.T)
82.7 - val parse_syntax: ('a -> 'b) -> OuterParse.token list
82.8 + val parse_syntax: ('a -> 'b) -> Token.T list
82.9 -> (int * ((fixity -> 'b -> Pretty.T)
82.10 - -> fixity -> 'a list -> Pretty.T)) option * OuterParse.token list
82.11 + -> fixity -> 'a list -> Pretty.T)) option * Token.T list
82.12 val simple_const_syntax: simple_const_syntax -> proto_const_syntax
82.13 val activate_const_syntax: theory -> literals
82.14 -> proto_const_syntax -> Code_Thingol.naming -> const_syntax * Code_Thingol.naming
82.15 @@ -329,15 +329,15 @@
82.16
82.17 fun parse_syntax prep_arg xs =
82.18 Scan.option ((
82.19 - ((OuterParse.$$$ infixK >> K X)
82.20 - || (OuterParse.$$$ infixlK >> K L)
82.21 - || (OuterParse.$$$ infixrK >> K R))
82.22 - -- OuterParse.nat >> parse_infix prep_arg
82.23 + ((Parse.$$$ infixK >> K X)
82.24 + || (Parse.$$$ infixlK >> K L)
82.25 + || (Parse.$$$ infixrK >> K R))
82.26 + -- Parse.nat >> parse_infix prep_arg
82.27 || Scan.succeed (parse_mixfix prep_arg))
82.28 - -- OuterParse.string
82.29 + -- Parse.string
82.30 >> (fn (parse, s) => parse s)) xs;
82.31
82.32 -val _ = List.app OuterKeyword.keyword [infixK, infixlK, infixrK];
82.33 +val _ = List.app Keyword.keyword [infixK, infixlK, infixrK];
82.34
82.35
82.36 (** module name spaces **)
83.1 --- a/src/Tools/Code/code_target.ML Mon May 17 18:59:59 2010 -0700
83.2 +++ b/src/Tools/Code/code_target.ML Tue May 18 06:28:42 2010 -0700
83.3 @@ -19,14 +19,13 @@
83.4
83.5 type destination
83.6 type serialization
83.7 - val parse_args: (OuterLex.token list -> 'a * OuterLex.token list)
83.8 - -> OuterLex.token list -> 'a
83.9 + val parse_args: 'a parser -> Token.T list -> 'a
83.10 val stmt_names_of_destination: destination -> string list
83.11 val mk_serialization: string -> ('a -> unit) option
83.12 -> (Path.T option -> 'a -> unit)
83.13 -> ('a -> string * string option list)
83.14 -> 'a -> serialization
83.15 - val serialize: theory -> string -> int option -> string option -> OuterLex.token list
83.16 + val serialize: theory -> string -> int option -> string option -> Token.T list
83.17 -> Code_Thingol.naming -> Code_Thingol.program -> string list -> serialization
83.18 val serialize_custom: theory -> string * (serializer * literals)
83.19 -> Code_Thingol.naming -> Code_Thingol.program -> string list -> string * string option list
83.20 @@ -105,7 +104,7 @@
83.21
83.22 type serializer =
83.23 string option (*module name*)
83.24 - -> OuterLex.token list (*arguments*)
83.25 + -> Token.T list (*arguments*)
83.26 -> (string -> string) (*labelled_name*)
83.27 -> string list (*reserved symbols*)
83.28 -> (string * Pretty.T) list (*includes*)
83.29 @@ -464,9 +463,6 @@
83.30
83.31 local
83.32
83.33 -structure P = OuterParse
83.34 -and K = OuterKeyword
83.35 -
83.36 fun zip_list (x::xs) f g =
83.37 f
83.38 #-> (fn y =>
83.39 @@ -474,9 +470,9 @@
83.40 #-> (fn xys => pair ((x, y) :: xys)));
83.41
83.42 fun parse_multi_syntax parse_thing parse_syntax =
83.43 - P.and_list1 parse_thing
83.44 - #-> (fn things => Scan.repeat1 (P.$$$ "(" |-- P.name --
83.45 - (zip_list things parse_syntax (P.$$$ "and")) --| P.$$$ ")"));
83.46 + Parse.and_list1 parse_thing
83.47 + #-> (fn things => Scan.repeat1 (Parse.$$$ "(" |-- Parse.name --
83.48 + (zip_list things parse_syntax (Parse.$$$ "and")) --| Parse.$$$ ")"));
83.49
83.50 in
83.51
83.52 @@ -498,7 +494,7 @@
83.53 val allow_abort_cmd = gen_allow_abort Code.read_const;
83.54
83.55 fun parse_args f args =
83.56 - case Scan.read OuterLex.stopper f args
83.57 + case Scan.read Token.stopper f args
83.58 of SOME x => x
83.59 | NONE => error "Bad serializer arguments";
83.60
83.61 @@ -508,75 +504,79 @@
83.62 val (inK, module_nameK, fileK) = ("in", "module_name", "file");
83.63
83.64 val code_exprP =
83.65 - (Scan.repeat1 P.term_group
83.66 - -- Scan.repeat (P.$$$ inK |-- P.name
83.67 - -- Scan.option (P.$$$ module_nameK |-- P.name)
83.68 - -- Scan.option (P.$$$ fileK |-- P.name)
83.69 - -- Scan.optional (P.$$$ "(" |-- Args.parse --| P.$$$ ")") []
83.70 + (Scan.repeat1 Parse.term_group
83.71 + -- Scan.repeat (Parse.$$$ inK |-- Parse.name
83.72 + -- Scan.option (Parse.$$$ module_nameK |-- Parse.name)
83.73 + -- Scan.option (Parse.$$$ fileK |-- Parse.name)
83.74 + -- Scan.optional (Parse.$$$ "(" |-- Args.parse --| Parse.$$$ ")") []
83.75 ) >> (fn (raw_cs, seris) => export_code_cmd raw_cs seris));
83.76
83.77 -val _ = List.app OuterKeyword.keyword [inK, module_nameK, fileK];
83.78 +val _ = List.app Keyword.keyword [inK, module_nameK, fileK];
83.79
83.80 val _ =
83.81 - OuterSyntax.command "code_class" "define code syntax for class" K.thy_decl (
83.82 - parse_multi_syntax P.xname (Scan.option P.string)
83.83 + Outer_Syntax.command "code_class" "define code syntax for class" Keyword.thy_decl (
83.84 + parse_multi_syntax Parse.xname (Scan.option Parse.string)
83.85 >> (Toplevel.theory oo fold) (fn (target, syns) =>
83.86 fold (fn (raw_class, syn) => add_syntax_class_cmd target raw_class syn) syns)
83.87 );
83.88
83.89 val _ =
83.90 - OuterSyntax.command "code_instance" "define code syntax for instance" K.thy_decl (
83.91 - parse_multi_syntax (P.xname --| P.$$$ "::" -- P.xname) (Scan.option (P.minus >> K ()))
83.92 + Outer_Syntax.command "code_instance" "define code syntax for instance" Keyword.thy_decl (
83.93 + parse_multi_syntax (Parse.xname --| Parse.$$$ "::" -- Parse.xname)
83.94 + (Scan.option (Parse.minus >> K ()))
83.95 >> (Toplevel.theory oo fold) (fn (target, syns) =>
83.96 fold (fn (raw_inst, add_del) => add_syntax_inst_cmd target raw_inst add_del) syns)
83.97 );
83.98
83.99 val _ =
83.100 - OuterSyntax.command "code_type" "define code syntax for type constructor" K.thy_decl (
83.101 - parse_multi_syntax P.xname (Code_Printer.parse_syntax I)
83.102 + Outer_Syntax.command "code_type" "define code syntax for type constructor" Keyword.thy_decl (
83.103 + parse_multi_syntax Parse.xname (Code_Printer.parse_syntax I)
83.104 >> (Toplevel.theory oo fold) (fn (target, syns) =>
83.105 fold (fn (raw_tyco, syn) => add_syntax_tyco_cmd target raw_tyco syn) syns)
83.106 );
83.107
83.108 val _ =
83.109 - OuterSyntax.command "code_const" "define code syntax for constant" K.thy_decl (
83.110 - parse_multi_syntax P.term_group (Code_Printer.parse_syntax fst)
83.111 + Outer_Syntax.command "code_const" "define code syntax for constant" Keyword.thy_decl (
83.112 + parse_multi_syntax Parse.term_group (Code_Printer.parse_syntax fst)
83.113 >> (Toplevel.theory oo fold) (fn (target, syns) =>
83.114 fold (fn (raw_const, syn) => add_syntax_const_simple_cmd target raw_const syn) syns)
83.115 );
83.116
83.117 val _ =
83.118 - OuterSyntax.command "code_reserved" "declare words as reserved for target language" K.thy_decl (
83.119 - P.name -- Scan.repeat1 P.name
83.120 + Outer_Syntax.command "code_reserved" "declare words as reserved for target language"
83.121 + Keyword.thy_decl (
83.122 + Parse.name -- Scan.repeat1 Parse.name
83.123 >> (fn (target, reserveds) => (Toplevel.theory o fold (add_reserved target)) reserveds)
83.124 );
83.125
83.126 val _ =
83.127 - OuterSyntax.command "code_include" "declare piece of code to be included in generated code" K.thy_decl (
83.128 - P.name -- P.name -- (P.text :|-- (fn "-" => Scan.succeed NONE
83.129 - | s => Scan.optional (P.$$$ "attach" |-- Scan.repeat1 P.term) [] >> pair s >> SOME))
83.130 + Outer_Syntax.command "code_include" "declare piece of code to be included in generated code"
83.131 + Keyword.thy_decl (
83.132 + Parse.name -- Parse.name -- (Parse.text :|-- (fn "-" => Scan.succeed NONE
83.133 + | s => Scan.optional (Parse.$$$ "attach" |-- Scan.repeat1 Parse.term) [] >> pair s >> SOME))
83.134 >> (fn ((target, name), content_consts) =>
83.135 (Toplevel.theory o add_include_cmd target) (name, content_consts))
83.136 );
83.137
83.138 val _ =
83.139 - OuterSyntax.command "code_modulename" "alias module to other name" K.thy_decl (
83.140 - P.name -- Scan.repeat1 (P.name -- P.name)
83.141 + Outer_Syntax.command "code_modulename" "alias module to other name" Keyword.thy_decl (
83.142 + Parse.name -- Scan.repeat1 (Parse.name -- Parse.name)
83.143 >> (fn (target, modlnames) => (Toplevel.theory o fold (add_module_alias target)) modlnames)
83.144 );
83.145
83.146 val _ =
83.147 - OuterSyntax.command "code_abort" "permit constant to be implemented as program abort" K.thy_decl (
83.148 - Scan.repeat1 P.term_group >> (Toplevel.theory o fold allow_abort_cmd)
83.149 + Outer_Syntax.command "code_abort" "permit constant to be implemented as program abort"
83.150 + Keyword.thy_decl (
83.151 + Scan.repeat1 Parse.term_group >> (Toplevel.theory o fold allow_abort_cmd)
83.152 );
83.153
83.154 val _ =
83.155 - OuterSyntax.command "export_code" "generate executable code for constants"
83.156 - K.diag (P.!!! code_exprP >> (fn f => Toplevel.keep (f o Toplevel.theory_of)));
83.157 + Outer_Syntax.command "export_code" "generate executable code for constants"
83.158 + Keyword.diag (Parse.!!! code_exprP >> (fn f => Toplevel.keep (f o Toplevel.theory_of)));
83.159
83.160 fun shell_command thyname cmd = Toplevel.program (fn _ =>
83.161 - (use_thy thyname; case Scan.read OuterLex.stopper (P.!!! code_exprP)
83.162 - ((filter OuterLex.is_proper o OuterSyntax.scan Position.none) cmd)
83.163 + (use_thy thyname; case Scan.read Token.stopper (Parse.!!! code_exprP)
83.164 + ((filter Token.is_proper o Outer_Syntax.scan Position.none) cmd)
83.165 of SOME f => (writeln "Now generating code..."; f (theory thyname))
83.166 | NONE => error ("Bad directive " ^ quote cmd)))
83.167 handle Runtime.TOPLEVEL_ERROR => OS.Process.exit OS.Process.failure;
84.1 --- a/src/Tools/Code/code_thingol.ML Mon May 17 18:59:59 2010 -0700
84.2 +++ b/src/Tools/Code/code_thingol.ML Tue May 18 06:28:42 2010 -0700
84.3 @@ -915,23 +915,21 @@
84.4
84.5 local
84.6
84.7 -structure P = OuterParse
84.8 -and K = OuterKeyword
84.9 -
84.10 fun code_thms_cmd thy = code_thms thy o op @ o read_const_exprs thy;
84.11 fun code_deps_cmd thy = code_deps thy o op @ o read_const_exprs thy;
84.12
84.13 in
84.14
84.15 val _ =
84.16 - OuterSyntax.improper_command "code_thms" "print system of code equations for code" OuterKeyword.diag
84.17 - (Scan.repeat1 P.term_group
84.18 + Outer_Syntax.improper_command "code_thms" "print system of code equations for code" Keyword.diag
84.19 + (Scan.repeat1 Parse.term_group
84.20 >> (fn cs => Toplevel.no_timing o Toplevel.unknown_theory
84.21 o Toplevel.keep ((fn thy => code_thms_cmd thy cs) o Toplevel.theory_of)));
84.22
84.23 val _ =
84.24 - OuterSyntax.improper_command "code_deps" "visualize dependencies of code equations for code" OuterKeyword.diag
84.25 - (Scan.repeat1 P.term_group
84.26 + Outer_Syntax.improper_command "code_deps" "visualize dependencies of code equations for code"
84.27 + Keyword.diag
84.28 + (Scan.repeat1 Parse.term_group
84.29 >> (fn cs => Toplevel.no_timing o Toplevel.unknown_theory
84.30 o Toplevel.keep ((fn thy => code_deps_cmd thy cs) o Toplevel.theory_of)));
84.31
85.1 --- a/src/Tools/WWW_Find/find_theorems.ML Mon May 17 18:59:59 2010 -0700
85.2 +++ b/src/Tools/WWW_Find/find_theorems.ML Tue May 18 06:28:42 2010 -0700
85.3 @@ -158,21 +158,17 @@
85.4
85.5 end;
85.6
85.7 -structure P = OuterParse
85.8 - and K = OuterKeyword
85.9 - and FT = Find_Theorems;
85.10 -
85.11 val criterion =
85.12 - P.reserved "name" |-- P.!!! (P.$$$ ":" |-- P.xname) >> Find_Theorems.Name ||
85.13 - P.reserved "intro" >> K Find_Theorems.Intro ||
85.14 - P.reserved "elim" >> K Find_Theorems.Elim ||
85.15 - P.reserved "dest" >> K Find_Theorems.Dest ||
85.16 - P.reserved "solves" >> K Find_Theorems.Solves ||
85.17 - P.reserved "simp" |-- P.!!! (P.$$$ ":" |-- P.term) >> Find_Theorems.Simp ||
85.18 - P.term >> Find_Theorems.Pattern;
85.19 + Parse.reserved "name" |-- Parse.!!! (Parse.$$$ ":" |-- Parse.xname) >> Find_Theorems.Name ||
85.20 + Parse.reserved "intro" >> K Find_Theorems.Intro ||
85.21 + Parse.reserved "elim" >> K Find_Theorems.Elim ||
85.22 + Parse.reserved "dest" >> K Find_Theorems.Dest ||
85.23 + Parse.reserved "solves" >> K Find_Theorems.Solves ||
85.24 + Parse.reserved "simp" |-- Parse.!!! (Parse.$$$ ":" |-- Parse.term) >> Find_Theorems.Simp ||
85.25 + Parse.term >> Find_Theorems.Pattern;
85.26
85.27 val parse_query =
85.28 - Scan.repeat (((Scan.option P.minus >> is_none) -- criterion));
85.29 + Scan.repeat (((Scan.option Parse.minus >> is_none) -- criterion));
85.30
85.31 fun app_index f xs = fold_index (fn x => K (f x)) xs ();
85.32
85.33 @@ -194,8 +190,8 @@
85.34 fun get_query () =
85.35 query
85.36 |> (fn s => s ^ ";")
85.37 - |> OuterSyntax.scan Position.start
85.38 - |> filter OuterLex.is_proper
85.39 + |> Outer_Syntax.scan Position.start
85.40 + |> filter Token.is_proper
85.41 |> Scan.error parse_query
85.42 |> fst;
85.43
86.1 --- a/src/Tools/WWW_Find/unicode_symbols.ML Mon May 17 18:59:59 2010 -0700
86.2 +++ b/src/Tools/WWW_Find/unicode_symbols.ML Tue May 18 06:28:42 2010 -0700
86.3 @@ -114,8 +114,6 @@
86.4
86.5 local (* Parser *)
86.6
86.7 -structure P = OuterParse;
86.8 -
86.9 fun $$$ kw = Scan.one (is_keyword kw) >> str_of_token;
86.10 val hex_code = Scan.one is_hex_code >> int_of_code;
86.11 val ascii_sym = Scan.one is_ascii_sym >> str_of_token;
86.12 @@ -129,7 +127,7 @@
86.13
86.14 in
86.15
86.16 -val line = (symbol -- unicode --| font -- abbr) >> P.triple1;
86.17 +val line = (symbol -- unicode --| font -- abbr) >> Parse.triple1;
86.18
86.19 val symbols_path =
86.20 [getenv "ISABELLE_HOME", "etc", "symbols"]
87.1 --- a/src/Tools/eqsubst.ML Mon May 17 18:59:59 2010 -0700
87.2 +++ b/src/Tools/eqsubst.ML Tue May 18 06:28:42 2010 -0700
87.3 @@ -556,7 +556,7 @@
87.4 (Scan.succeed false);
87.5
87.6 val ith_syntax =
87.7 - Scan.optional (Args.parens (Scan.repeat OuterParse.nat)) [0];
87.8 + Scan.optional (Args.parens (Scan.repeat Parse.nat)) [0];
87.9
87.10 (* combination method that takes a flag (true indicates that subst
87.11 should be done to an assumption, false = apply to the conclusion of
88.1 --- a/src/Tools/induct.ML Mon May 17 18:59:59 2010 -0700
88.2 +++ b/src/Tools/induct.ML Tue May 18 06:28:42 2010 -0700
88.3 @@ -254,8 +254,8 @@
88.4 end;
88.5
88.6 val _ =
88.7 - OuterSyntax.improper_command "print_induct_rules" "print induction and cases rules"
88.8 - OuterKeyword.diag (Scan.succeed (Toplevel.no_timing o Toplevel.unknown_context o
88.9 + Outer_Syntax.improper_command "print_induct_rules" "print induction and cases rules"
88.10 + Keyword.diag (Scan.succeed (Toplevel.no_timing o Toplevel.unknown_context o
88.11 Toplevel.keep (print_rules o Toplevel.context_of)));
88.12
88.13
88.14 @@ -845,8 +845,6 @@
88.15
88.16 (** concrete syntax **)
88.17
88.18 -structure P = OuterParse;
88.19 -
88.20 val arbitraryN = "arbitrary";
88.21 val takingN = "taking";
88.22 val ruleN = "rule";
88.23 @@ -892,7 +890,7 @@
88.24 Args.$$$ predN || Args.$$$ setN || Args.$$$ ruleN) -- Args.colon)) scan;
88.25
88.26 val arbitrary = Scan.optional (Scan.lift (Args.$$$ arbitraryN -- Args.colon) |--
88.27 - P.and_list1' (Scan.repeat (unless_more_args free))) [];
88.28 + Parse.and_list1' (Scan.repeat (unless_more_args free))) [];
88.29
88.30 val taking = Scan.optional (Scan.lift (Args.$$$ takingN -- Args.colon) |--
88.31 Scan.repeat1 (unless_more_args inst)) [];
88.32 @@ -902,7 +900,7 @@
88.33 val cases_setup =
88.34 Method.setup @{binding cases}
88.35 (Args.mode no_simpN --
88.36 - (P.and_list' (Scan.repeat (unless_more_args inst)) -- Scan.option cases_rule) >>
88.37 + (Parse.and_list' (Scan.repeat (unless_more_args inst)) -- Scan.option cases_rule) >>
88.38 (fn (no_simp, (insts, opt_rule)) => fn ctxt =>
88.39 METHOD_CASES (fn facts => Seq.DETERM (HEADGOAL
88.40 (cases_tac ctxt (not no_simp) insts opt_rule facts)))))
88.41 @@ -910,11 +908,12 @@
88.42
88.43 val induct_setup =
88.44 Method.setup @{binding induct}
88.45 - (Args.mode no_simpN -- (P.and_list' (Scan.repeat (unless_more_args def_inst)) --
88.46 + (Args.mode no_simpN -- (Parse.and_list' (Scan.repeat (unless_more_args def_inst)) --
88.47 (arbitrary -- taking -- Scan.option induct_rule)) >>
88.48 (fn (no_simp, (insts, ((arbitrary, taking), opt_rule))) => fn ctxt =>
88.49 RAW_METHOD_CASES (fn facts =>
88.50 - Seq.DETERM (HEADGOAL (induct_tac ctxt (not no_simp) insts arbitrary taking opt_rule facts)))))
88.51 + Seq.DETERM
88.52 + (HEADGOAL (induct_tac ctxt (not no_simp) insts arbitrary taking opt_rule facts)))))
88.53 "induction on types or predicates/sets";
88.54
88.55 val coinduct_setup =
89.1 --- a/src/Tools/induct_tacs.ML Mon May 17 18:59:59 2010 -0700
89.2 +++ b/src/Tools/induct_tacs.ML Tue May 18 06:28:42 2010 -0700
89.3 @@ -116,8 +116,7 @@
89.4 val opt_rules = Scan.option (rule_spec |-- Attrib.thms);
89.5
89.6 val varss =
89.7 - OuterParse.and_list'
89.8 - (Scan.repeat (Scan.unless rule_spec (Scan.lift (Args.maybe Args.name_source))));
89.9 + Parse.and_list' (Scan.repeat (Scan.unless rule_spec (Scan.lift (Args.maybe Args.name_source))));
89.10
89.11 in
89.12
90.1 --- a/src/Tools/intuitionistic.ML Mon May 17 18:59:59 2010 -0700
90.2 +++ b/src/Tools/intuitionistic.ML Tue May 18 06:28:42 2010 -0700
90.3 @@ -71,7 +71,7 @@
90.4 val destN = "dest";
90.5
90.6 fun modifier name kind kind' att =
90.7 - Args.$$$ name |-- (kind >> K NONE || kind' |-- OuterParse.nat --| Args.colon >> SOME)
90.8 + Args.$$$ name |-- (kind >> K NONE || kind' |-- Parse.nat --| Args.colon >> SOME)
90.9 >> (pair (I: Proof.context -> Proof.context) o att);
90.10
90.11 val modifiers =
90.12 @@ -87,7 +87,7 @@
90.13
90.14 fun method_setup name =
90.15 Method.setup name
90.16 - (Scan.lift (Scan.option OuterParse.nat) --| Method.sections modifiers >>
90.17 + (Scan.lift (Scan.option Parse.nat) --| Method.sections modifiers >>
90.18 (fn opt_lim => fn ctxt =>
90.19 SIMPLE_METHOD' (Object_Logic.atomize_prems_tac THEN' prover_tac ctxt opt_lim)))
90.20 "intuitionistic proof search";
91.1 --- a/src/Tools/nbe.ML Mon May 17 18:59:59 2010 -0700
91.2 +++ b/src/Tools/nbe.ML Tue May 18 06:28:42 2010 -0700
91.3 @@ -625,15 +625,12 @@
91.4
91.5 val setup = Value.add_evaluator ("nbe", norm o ProofContext.theory_of);
91.6
91.7 -local structure P = OuterParse and K = OuterKeyword in
91.8 -
91.9 -val opt_modes = Scan.optional (P.$$$ "(" |-- P.!!! (Scan.repeat1 P.xname --| P.$$$ ")")) [];
91.10 +val opt_modes =
91.11 + Scan.optional (Parse.$$$ "(" |-- Parse.!!! (Scan.repeat1 Parse.xname --| Parse.$$$ ")")) [];
91.12
91.13 val _ =
91.14 - OuterSyntax.improper_command "normal_form" "normalize term by evaluation" K.diag
91.15 - (opt_modes -- P.term >> (Toplevel.keep o norm_print_term_cmd));
91.16 -
91.17 -end;
91.18 + Outer_Syntax.improper_command "normal_form" "normalize term by evaluation" Keyword.diag
91.19 + (opt_modes -- Parse.term >> (Toplevel.keep o norm_print_term_cmd));
91.20
91.21 end;
91.22
91.23 \ No newline at end of file
92.1 --- a/src/Tools/quickcheck.ML Mon May 17 18:59:59 2010 -0700
92.2 +++ b/src/Tools/quickcheck.ML Tue May 18 06:28:42 2010 -0700
92.3 @@ -315,27 +315,25 @@
92.4 test_goal quiet report generator_name size iterations default_type no_assms insts i assms state
92.5 end;
92.6
92.7 -fun quickcheck args i state = fst (gen_quickcheck args i state)
92.8 +fun quickcheck args i state = fst (gen_quickcheck args i state);
92.9
92.10 fun quickcheck_cmd args i state =
92.11 gen_quickcheck args i (Toplevel.proof_of state)
92.12 |> Pretty.writeln o pretty_counterex_and_reports (Toplevel.context_of state);
92.13
92.14 -local structure P = OuterParse and K = OuterKeyword in
92.15 +val parse_arg = Parse.name -- (Scan.optional (Parse.$$$ "=" |-- Parse.name) "true");
92.16
92.17 -val parse_arg = P.name -- (Scan.optional (P.$$$ "=" |-- P.name) "true")
92.18 -
92.19 -val parse_args = P.$$$ "[" |-- P.list1 parse_arg --| P.$$$ "]"
92.20 +val parse_args = Parse.$$$ "[" |-- Parse.list1 parse_arg --| Parse.$$$ "]"
92.21 || Scan.succeed [];
92.22
92.23 -val _ = OuterSyntax.command "quickcheck_params" "set parameters for random testing" K.thy_decl
92.24 - (parse_args >> (fn args => Toplevel.theory (quickcheck_params_cmd args)));
92.25 +val _ =
92.26 + Outer_Syntax.command "quickcheck_params" "set parameters for random testing" Keyword.thy_decl
92.27 + (parse_args >> (fn args => Toplevel.theory (quickcheck_params_cmd args)));
92.28
92.29 -val _ = OuterSyntax.improper_command "quickcheck" "try to find counterexample for subgoal" K.diag
92.30 - (parse_args -- Scan.optional P.nat 1
92.31 - >> (fn (args, i) => Toplevel.no_timing o Toplevel.keep (quickcheck_cmd args i)));
92.32 -
92.33 -end; (*local*)
92.34 +val _ =
92.35 + Outer_Syntax.improper_command "quickcheck" "try to find counterexample for subgoal" Keyword.diag
92.36 + (parse_args -- Scan.optional Parse.nat 1
92.37 + >> (fn (args, i) => Toplevel.no_timing o Toplevel.keep (quickcheck_cmd args i)));
92.38
92.39 end;
92.40
93.1 --- a/src/Tools/value.ML Mon May 17 18:59:59 2010 -0700
93.2 +++ b/src/Tools/value.ML Tue May 18 06:28:42 2010 -0700
93.3 @@ -52,15 +52,13 @@
93.4 Pretty.str "::", Pretty.brk 1, Pretty.quote (Syntax.pretty_typ ctxt' ty')]) ();
93.5 in Pretty.writeln p end;
93.6
93.7 -local structure P = OuterParse and K = OuterKeyword in
93.8 +val opt_modes =
93.9 + Scan.optional (Parse.$$$ "(" |-- Parse.!!! (Scan.repeat1 Parse.xname --| Parse.$$$ ")")) [];
93.10
93.11 -val opt_modes = Scan.optional (P.$$$ "(" |-- P.!!! (Scan.repeat1 P.xname --| P.$$$ ")")) [];
93.12 -
93.13 -val _ = OuterSyntax.improper_command "value" "evaluate and print term" K.diag
93.14 - (Scan.option (P.$$$ "[" |-- P.xname --| P.$$$ "]") -- opt_modes -- P.term
93.15 - >> (fn ((some_name, modes), t) => Toplevel.no_timing o Toplevel.keep
93.16 - (value_cmd some_name modes t)));
93.17 -
93.18 -end; (*local*)
93.19 +val _ =
93.20 + Outer_Syntax.improper_command "value" "evaluate and print term" Keyword.diag
93.21 + (Scan.option (Parse.$$$ "[" |-- Parse.xname --| Parse.$$$ "]") -- opt_modes -- Parse.term
93.22 + >> (fn ((some_name, modes), t) => Toplevel.no_timing o Toplevel.keep
93.23 + (value_cmd some_name modes t)));
93.24
93.25 end;
94.1 --- a/src/ZF/Tools/datatype_package.ML Mon May 17 18:59:59 2010 -0700
94.2 +++ b/src/ZF/Tools/datatype_package.ML Tue May 18 06:28:42 2010 -0700
94.3 @@ -419,29 +419,26 @@
94.4
94.5 (* outer syntax *)
94.6
94.7 -local structure P = OuterParse and K = OuterKeyword in
94.8 -
94.9 fun mk_datatype ((((dom, dts), monos), type_intrs), type_elims) =
94.10 #1 o add_datatype (dom, map fst dts) (map snd dts) (monos, type_intrs, type_elims);
94.11
94.12 val con_decl =
94.13 - P.name -- Scan.optional (P.$$$ "(" |-- P.list1 P.term --| P.$$$ ")") [] -- P.opt_mixfix
94.14 - >> P.triple1;
94.15 + Parse.name -- Scan.optional (Parse.$$$ "(" |-- Parse.list1 Parse.term --| Parse.$$$ ")") [] --
94.16 + Parse.opt_mixfix >> Parse.triple1;
94.17
94.18 val datatype_decl =
94.19 - (Scan.optional ((P.$$$ "\<subseteq>" || P.$$$ "<=") |-- P.!!! P.term) "") --
94.20 - P.and_list1 (P.term -- (P.$$$ "=" |-- P.enum1 "|" con_decl)) --
94.21 - Scan.optional (P.$$$ "monos" |-- P.!!! Parse_Spec.xthms1) [] --
94.22 - Scan.optional (P.$$$ "type_intros" |-- P.!!! Parse_Spec.xthms1) [] --
94.23 - Scan.optional (P.$$$ "type_elims" |-- P.!!! Parse_Spec.xthms1) []
94.24 + (Scan.optional ((Parse.$$$ "\<subseteq>" || Parse.$$$ "<=") |-- Parse.!!! Parse.term) "") --
94.25 + Parse.and_list1 (Parse.term -- (Parse.$$$ "=" |-- Parse.enum1 "|" con_decl)) --
94.26 + Scan.optional (Parse.$$$ "monos" |-- Parse.!!! Parse_Spec.xthms1) [] --
94.27 + Scan.optional (Parse.$$$ "type_intros" |-- Parse.!!! Parse_Spec.xthms1) [] --
94.28 + Scan.optional (Parse.$$$ "type_elims" |-- Parse.!!! Parse_Spec.xthms1) []
94.29 >> (Toplevel.theory o mk_datatype);
94.30
94.31 val coind_prefix = if coind then "co" else "";
94.32
94.33 -val _ = OuterSyntax.command (coind_prefix ^ "datatype")
94.34 - ("define " ^ coind_prefix ^ "datatype") K.thy_decl datatype_decl;
94.35 +val _ =
94.36 + Outer_Syntax.command (coind_prefix ^ "datatype")
94.37 + ("define " ^ coind_prefix ^ "datatype") Keyword.thy_decl datatype_decl;
94.38
94.39 end;
94.40
94.41 -end;
94.42 -
95.1 --- a/src/ZF/Tools/ind_cases.ML Mon May 17 18:59:59 2010 -0700
95.2 +++ b/src/ZF/Tools/ind_cases.ML Tue May 18 06:28:42 2010 -0700
95.3 @@ -64,15 +64,11 @@
95.4
95.5 (* outer syntax *)
95.6
95.7 -local structure P = OuterParse and K = OuterKeyword in
95.8 -
95.9 val _ =
95.10 - OuterSyntax.command "inductive_cases"
95.11 - "create simplified instances of elimination rules (improper)" K.thy_script
95.12 - (P.and_list1 (Parse_Spec.opt_thm_name ":" -- Scan.repeat1 P.prop)
95.13 + Outer_Syntax.command "inductive_cases"
95.14 + "create simplified instances of elimination rules (improper)" Keyword.thy_script
95.15 + (Parse.and_list1 (Parse_Spec.opt_thm_name ":" -- Scan.repeat1 Parse.prop)
95.16 >> (Toplevel.theory o inductive_cases));
95.17
95.18 end;
95.19
95.20 -end;
95.21 -
96.1 --- a/src/ZF/Tools/induct_tacs.ML Mon May 17 18:59:59 2010 -0700
96.2 +++ b/src/ZF/Tools/induct_tacs.ML Tue May 18 06:28:42 2010 -0700
96.3 @@ -186,25 +186,20 @@
96.4
96.5 (* outer syntax *)
96.6
96.7 -local structure P = OuterParse and K = OuterKeyword in
96.8 -
96.9 -val _ = List.app OuterKeyword.keyword ["elimination", "induction", "case_eqns", "recursor_eqns"];
96.10 +val _ = List.app Keyword.keyword ["elimination", "induction", "case_eqns", "recursor_eqns"];
96.11
96.12 val rep_datatype_decl =
96.13 - (P.$$$ "elimination" |-- P.!!! Parse_Spec.xthm) --
96.14 - (P.$$$ "induction" |-- P.!!! Parse_Spec.xthm) --
96.15 - (P.$$$ "case_eqns" |-- P.!!! Parse_Spec.xthms1) --
96.16 - Scan.optional (P.$$$ "recursor_eqns" |-- P.!!! Parse_Spec.xthms1) []
96.17 + (Parse.$$$ "elimination" |-- Parse.!!! Parse_Spec.xthm) --
96.18 + (Parse.$$$ "induction" |-- Parse.!!! Parse_Spec.xthm) --
96.19 + (Parse.$$$ "case_eqns" |-- Parse.!!! Parse_Spec.xthms1) --
96.20 + Scan.optional (Parse.$$$ "recursor_eqns" |-- Parse.!!! Parse_Spec.xthms1) []
96.21 >> (fn (((x, y), z), w) => rep_datatype x y z w);
96.22
96.23 val _ =
96.24 - OuterSyntax.command "rep_datatype" "represent existing set inductively" K.thy_decl
96.25 + Outer_Syntax.command "rep_datatype" "represent existing set inductively" Keyword.thy_decl
96.26 (rep_datatype_decl >> Toplevel.theory);
96.27
96.28 end;
96.29
96.30 -end;
96.31 -
96.32 -
96.33 val exhaust_tac = DatatypeTactics.exhaust_tac;
96.34 val induct_tac = DatatypeTactics.induct_tac;
97.1 --- a/src/ZF/Tools/inductive_package.ML Mon May 17 18:59:59 2010 -0700
97.2 +++ b/src/ZF/Tools/inductive_package.ML Tue May 18 06:28:42 2010 -0700
97.3 @@ -576,29 +576,26 @@
97.4
97.5 (* outer syntax *)
97.6
97.7 -local structure P = OuterParse and K = OuterKeyword in
97.8 -
97.9 -val _ = List.app OuterKeyword.keyword
97.10 +val _ = List.app Keyword.keyword
97.11 ["domains", "intros", "monos", "con_defs", "type_intros", "type_elims"];
97.12
97.13 fun mk_ind (((((doms, intrs), monos), con_defs), type_intrs), type_elims) =
97.14 - #1 o add_inductive doms (map P.triple_swap intrs) (monos, con_defs, type_intrs, type_elims);
97.15 + #1 o add_inductive doms (map Parse.triple_swap intrs) (monos, con_defs, type_intrs, type_elims);
97.16
97.17 val ind_decl =
97.18 - (P.$$$ "domains" |-- P.!!! (P.enum1 "+" P.term --
97.19 - ((P.$$$ "\<subseteq>" || P.$$$ "<=") |-- P.term))) --
97.20 - (P.$$$ "intros" |--
97.21 - P.!!! (Scan.repeat1 (Parse_Spec.opt_thm_name ":" -- P.prop))) --
97.22 - Scan.optional (P.$$$ "monos" |-- P.!!! Parse_Spec.xthms1) [] --
97.23 - Scan.optional (P.$$$ "con_defs" |-- P.!!! Parse_Spec.xthms1) [] --
97.24 - Scan.optional (P.$$$ "type_intros" |-- P.!!! Parse_Spec.xthms1) [] --
97.25 - Scan.optional (P.$$$ "type_elims" |-- P.!!! Parse_Spec.xthms1) []
97.26 + (Parse.$$$ "domains" |-- Parse.!!! (Parse.enum1 "+" Parse.term --
97.27 + ((Parse.$$$ "\<subseteq>" || Parse.$$$ "<=") |-- Parse.term))) --
97.28 + (Parse.$$$ "intros" |--
97.29 + Parse.!!! (Scan.repeat1 (Parse_Spec.opt_thm_name ":" -- Parse.prop))) --
97.30 + Scan.optional (Parse.$$$ "monos" |-- Parse.!!! Parse_Spec.xthms1) [] --
97.31 + Scan.optional (Parse.$$$ "con_defs" |-- Parse.!!! Parse_Spec.xthms1) [] --
97.32 + Scan.optional (Parse.$$$ "type_intros" |-- Parse.!!! Parse_Spec.xthms1) [] --
97.33 + Scan.optional (Parse.$$$ "type_elims" |-- Parse.!!! Parse_Spec.xthms1) []
97.34 >> (Toplevel.theory o mk_ind);
97.35
97.36 -val _ = OuterSyntax.command (co_prefix ^ "inductive")
97.37 - ("define " ^ co_prefix ^ "inductive sets") K.thy_decl ind_decl;
97.38 +val _ =
97.39 + Outer_Syntax.command (co_prefix ^ "inductive")
97.40 + ("define " ^ co_prefix ^ "inductive sets") Keyword.thy_decl ind_decl;
97.41
97.42 end;
97.43
97.44 -end;
97.45 -
98.1 --- a/src/ZF/Tools/primrec_package.ML Mon May 17 18:59:59 2010 -0700
98.2 +++ b/src/ZF/Tools/primrec_package.ML Tue May 18 06:28:42 2010 -0700
98.3 @@ -194,12 +194,11 @@
98.4
98.5 (* outer syntax *)
98.6
98.7 -structure P = OuterParse and K = OuterKeyword;
98.8 -
98.9 val _ =
98.10 - OuterSyntax.command "primrec" "define primitive recursive functions on datatypes" K.thy_decl
98.11 - (Scan.repeat1 (Parse_Spec.opt_thm_name ":" -- P.prop)
98.12 - >> (Toplevel.theory o (#1 oo (add_primrec o map P.triple_swap))));
98.13 + Outer_Syntax.command "primrec" "define primitive recursive functions on datatypes"
98.14 + Keyword.thy_decl
98.15 + (Scan.repeat1 (Parse_Spec.opt_thm_name ":" -- Parse.prop)
98.16 + >> (Toplevel.theory o (#1 oo (add_primrec o map Parse.triple_swap))));
98.17
98.18 end;
98.19
99.1 --- a/src/ZF/Tools/typechk.ML Mon May 17 18:59:59 2010 -0700
99.2 +++ b/src/ZF/Tools/typechk.ML Tue May 18 06:28:42 2010 -0700
99.3 @@ -118,7 +118,7 @@
99.4 "ZF type-checking";
99.5
99.6 val _ =
99.7 - OuterSyntax.improper_command "print_tcset" "print context of ZF typecheck" OuterKeyword.diag
99.8 + Outer_Syntax.improper_command "print_tcset" "print context of ZF typecheck" Keyword.diag
99.9 (Scan.succeed (Toplevel.no_timing o Toplevel.unknown_context o
99.10 Toplevel.keep (print_tcset o Toplevel.context_of)));
99.11