1.1 --- a/src/Tools/isac/BridgeJEdit/Calculation.thy Mon Dec 14 15:04:10 2020 +0100
1.2 +++ b/src/Tools/isac/BridgeJEdit/Calculation.thy Tue Dec 15 15:10:38 2020 +0100
1.3 @@ -12,15 +12,13 @@
1.4 (** )"HOL-Word.Word"( ** ) trial on ERROR in definition sdiv below *)
1.5 keywords
1.6 "Example" :: thy_load ("str") (* "spark_vc" :: thy_goal *)
1.7 - and "Problem" :: thy_goal (* root-problem + recursively in Solution *)
1.8 + and "Problem" :: thy_decl (* root-problem + recursively in Solution;
1.9 + "spark_vc" :: thy_goal *)
1.10 and "Specification" "Model" "References" "Solution" (* structure only *)
1.11 and"Given" "Find" "Relate" "Where" (* await input of term *)
1.12 and "RTheory" (* await input of string; "R" distingues "Problem".."RProblem" *)
1.13 and "RProblem" "RMethod" (* await input of string list *)
1.14 "Tactic" (* optional for each step 0..end of calculation *)
1.15 -(*//-------------------- investigate Outer_Syntax.local_theory_to_proof -------------------\\*)
1.16 - and"global_interpretationC" :: thy_goal
1.17 -(*\\-------------------- investigate Outer_Syntax.local_theory_to_proof -------------------//*)
1.18 begin
1.19
1.20 (**)ML_file parseC.sml(**)
1.21 @@ -74,6 +72,9 @@
1.22 fun merge (_, ctxt) = ctxt
1.23 )
1.24 \<close> ML \<open>
1.25 +(*
1.26 +store_and_show: Formalise.T list -> theory -> theory;
1.27 +*)
1.28 fun store_and_show formalise thy =
1.29 let
1.30 (**)val file_read_correct = case formalise of xxx as
1.31 @@ -94,14 +95,32 @@
1.32 |> Ctxt_Data.put var_type_ctxt
1.33 end;
1.34 \<close> ML \<open>
1.35 +store_and_show: Formalise.T list -> theory -> theory;
1.36 +\<close> ML \<open>
1.37 +\<close> ML \<open>
1.38 fun load_formalisation parser (files, _) thy =
1.39 let
1.40 val ([{lines, pos, ...}: Token.file], thy') = files thy;
1.41 in
1.42 store_and_show
1.43 - (ParseC.read_out_formalise (fst (parser (fst (ParseC.scan' pos (cat_lines lines))))))
1.44 + (ParseC.read_out_formalise (fst (parser (fst (ParseC.tokenize_formalise pos (cat_lines lines))))))
1.45 thy'
1.46 end;
1.47 +fun load_formalisation parse (files, _) thy =
1.48 + let
1.49 + val ([{lines, pos, ...}: Token.file], thy') = files thy;
1.50 + val formalise = lines
1.51 + |> cat_lines
1.52 + |> ParseC.tokenize_formalise pos
1.53 + |> fst
1.54 + |> parse
1.55 + |> fst
1.56 + |> ParseC.read_out_formalise
1.57 + |> store_and_show
1.58 + val _ = @{print} {a = "### load_formalisation \<rightarrow>", formalise = formalise}
1.59 + in
1.60 + formalise thy'
1.61 + end;
1.62 \<close> ML \<open>
1.63 \<close>
1.64
1.65 @@ -651,18 +670,18 @@
1.66 else ((inthy, inpbl, Method.id_empty), [])
1.67 \<close> ML \<open>
1.68 (*
1.69 -val prove_vc: ParseC.headline_string -> Proof.context -> Proof.state
1.70 +val prove_vc: ParseC.problem -> Proof.context -> Proof.state
1.71 *)
1.72 -fun prove_vc (*vc_name*)headline lthy =
1.73 +fun prove_vc (*vc_name*)problem lthy =
1.74 let
1.75 - val _ = @{print} {a = "//--- Spark_Commands.prove_vc", headline = headline}
1.76 + val _ = @{print} {a = "//--- Spark_Commands.prove_vc", headline = problem}
1.77 val thy = Proof_Context.theory_of lthy;
1.78 val vc_name = "procedure_g_c_d_4" (*fixed for spark_open \<open>greatest_common_divisor/g_c_d\<close>*)
1.79 val (ctxt, stmt) = get_vc thy vc_name
1.80 (*//--------------------------------- new code --------------------------------\\*)
1.81 - val refs'' = ParseC.read_out_headline headline
1.82 + val {thy_id, pbl_id, (*givens, wheres, ..*)...} = ParseC.read_out_problem problem
1.83 val refs' = Refs_Data.get thy
1.84 - val (refs as (_, pbl_id, _), o_model) = prefer_input refs'' refs' (OMod_Data.get thy)
1.85 + val (refs as (_, pbl_id, _), o_model) = prefer_input (thy_id, pbl_id) refs' (OMod_Data.get thy)
1.86 val i_model = I_Model.initPIDE pbl_id
1.87 (*
1.88 TODO: present Specification = i_model () + refs via PIDE
1.89 @@ -677,6 +696,8 @@
1.90 (Binding.name vc_name, []) [] ctxt stmt false lthy
1.91 end;
1.92 \<close> ML \<open>
1.93 +prove_vc: ParseC.problem -> Proof.context -> Proof.state
1.94 +\<close> ML \<open>
1.95 \<close>
1.96
1.97 section \<open>setup command_keyword \<close>
1.98 @@ -690,9 +711,9 @@
1.99 val _ =
1.100 Outer_Syntax.local_theory_to_proof \<^command_keyword>\<open>Problem\<close>
1.101 "start a Specification, either from scratch or from preceding 'Example'"
1.102 - (ParseC.problem_headline >> prove_vc);
1.103 + (ParseC.problem >> prove_vc);
1.104 \<close> ML \<open>
1.105 -prove_vc\<close>
1.106 +\<close>
1.107
1.108 subsection \<open>test runs with Example\<close>
1.109 subsubsection \<open>with new code\<close>
1.110 @@ -709,7 +730,7 @@
1.111 text \<open>
1.112 The sequence of code below is copied from SPARK/Examples/Gcd/Greatest_Common_Divisor.thy.
1.113 But here the code works only partially (see ERROR at end).
1.114 - Thus it is advisable to run tests in Greatest_Common_Divisor.thy.
1.115 + Thus it is advisable to run tests in Test_Some.thy, where "Example" has "Biegelinie".
1.116
1.117 spark_proof_functions is required for proof below..
1.118 \<close>
1.119 @@ -749,54 +770,27 @@
1.120 ( *\\------------------- outcomment before creating session Isac ----------------------------//*)
1.121
1.122
1.123 -section \<open>adapt spark_vc to Problem\<close>
1.124 +section \<open>adapt spark_vc to Problem using Naproche as model\<close>
1.125
1.126 subsection \<open>survey on steps of investigation\<close>
1.127 text \<open>
1.128 -1.prove_vc: string -> Proof.context -> Proof.state
1.129 - ? where does ^^^^^^^^^^^^^ come from:
1.130 - fun get_vc: (case SPARK_VCs.lookup_vc .. of SOME (ctxt, (_, proved, ctxt', stmt)) => ...
1.131 -
1.132 + We stopped step 3.4 going down from Outer_Syntax.local_theory_to_proof into proof.
1.133 + Now we go the other way: Naproche checks the input via the Isabelle/server
1.134 + and outputs highlighting, semantic annotations and errors via PIDE ---
1.135 + and we investigate the output.
1.136 \<close>
1.137
1.138 -subsection \<open>notes and testbed for spark_vcs\<close>
1.139 +subsection \<open>notes and testbed for Naproche in preliminary spark_vcs\<close>
1.140 ML \<open>
1.141 -\<close> ML \<open> (* Outer_Syntax.local_theory_to_proof..global_interpretationC *)
1.142 -val interpretation_args_with_defs =
1.143 - Parse.!!! Parse_Spec.locale_expression --
1.144 - (Scan.optional (\<^keyword>\<open>defines\<close> |-- Parse.and_list1 (Parse_Spec.opt_thm_name ":"
1.145 - -- ((Parse.binding -- Parse.opt_mixfix') --| \<^keyword>\<open>=\<close> -- Parse.term))) ([]));
1.146 \<close> ML \<open>
1.147 - Outer_Syntax.local_theory_to_proof \<^command_keyword>\<open>global_interpretationC\<close>
1.148 - "prove interpretation of locale expression into global theory"
1.149 - (interpretation_args_with_defs >> (fn (expr, defs) =>
1.150 - Interpretation.global_interpretation_cmd expr defs));
1.151 \<close> ML \<open>
1.152 - (interpretation_args_with_defs >> (fn (expr, defs) =>
1.153 - Interpretation.global_interpretation_cmd expr defs))
1.154 -: Token.T list -> (local_theory -> Proof.state) * Token.T list;
1.155 -\<close> ML \<open>
1.156 -\<close> ML \<open> (* Outer_Syntax.local_theory_to_proof..Problem *)
1.157 - (ParseC.problem_headline >> prove_vc);
1.158 -\<close> ML \<open>
1.159 -Parse.name: string parser;
1.160 -Parse.name: Token.T list -> string * Token.T list;
1.161 -\<close> ML \<open>
1.162 -SPARK_Commands.prove_vc: string -> Proof.context -> Proof.state
1.163 -\<close> ML \<open>(*-------- new -----------*)
1.164 -ParseC.problem_headline: ParseC.problem_headline parser;
1.165 -ParseC.problem_headline: Token.T list -> ParseC.problem_headline * Token.T list;
1.166 -\<close> ML \<open>
1.167 -prove_vc: ParseC.problem_headline -> Proof.context -> Proof.state
1.168 \<close> ML \<open>
1.169 \<close>
1.170
1.171 subsection \<open>testbed for Problem\<close>
1.172 ML \<open>
1.173 \<close> ML \<open>
1.174 -val headline = "(\"Biegelinie\", [\"Biegelinien\"])";
1.175 -val toks = ParseC.tokenize headline;
1.176 -ParseC.problem_headline toks;
1.177 +\<close> ML \<open>
1.178 \<close> ML \<open>
1.179 \<close>
1.180
2.1 --- a/src/Tools/isac/BridgeJEdit/parseC.sml Mon Dec 14 15:04:10 2020 +0100
2.2 +++ b/src/Tools/isac/BridgeJEdit/parseC.sml Tue Dec 15 15:10:38 2020 +0100
2.3 @@ -11,14 +11,15 @@
2.4 val problem: problem parser
2.5 type problem_headline
2.6 val problem_headline: problem_headline parser
2.7 +
2.8 (* exclude "Problem" from parsing * )
2.9 val read_out_headline: headline_string * Token.T list -> ThyC.id * Problem.id
2.10 ( * exclude "Problem" from parsing *)
2.11 val read_out_headline: problem_headline -> ThyC.id * Problem.id
2.12 (* exclude "Problem" from parsing *)
2.13 + val read_out_problem: problem -> P_Spec.record
2.14
2.15 - (*RENAME TO tokenize_formalise..*)
2.16 - val scan': Position.T -> string -> Fdl_Lexer.T list * Fdl_Lexer.chars
2.17 + val tokenize_formalise: Position.T -> string -> Fdl_Lexer.T list * Fdl_Lexer.chars
2.18
2.19 type form_model = TermC.as_string list;
2.20 type form_refs = (((((string * ThyC.id) * string) * Problem.id) * string) * Method.id) * string
2.21 @@ -105,19 +106,42 @@
2.22 (Parse.$$$ "[" |-- Parse.list1 Parse.string --| Parse.$$$ "]") --
2.23 Parse.$$$ ")"
2.24
2.25 +(* exclude "Problem" from parsing * )
2.26 fun string_of_headline ((((left, thy_id: ThyC.id), comm), pbl_id), right) =
2.27 left ^ thy_id ^ comm ^ Problem.id_to_string pbl_id ^ right
2.28 -(** )
2.29 fun read_out_headline ((((("(", thy_id), ","), pbl_id), ")"), []) = (thy_id, pbl_id)
2.30 | read_out_headline (hdl, toks) =
2.31 raise ERROR ("read_out_headline NOT for: (" ^ string_of_headline hdl ^ ", " ^
2.32 string_of_toks toks ^ ")")
2.33 -( **)
2.34 +( * exclude "Problem" from parsing *)
2.35 fun read_out_headline ((((("(", thy_id), ","), pbl_id), ")")) = (thy_id, pbl_id)
2.36 - | read_out_headline (hdl) =
2.37 - raise ERROR ("read_out_headline NOT for: (" ^ string_of_headline hdl ^ ", __" ^ ")")
2.38 + | read_out_headline arg =
2.39 + (@{print} {a = "read_out_headline with", arg = arg}; raise ERROR "read_out_headline")
2.40 (**)
2.41
2.42 +(* used if at least "Problem (thy_id, pbl_id)" has been input *)
2.43 +fun read_out_problem ((((((
2.44 + "(", thy_id), ","), pbl_id), ")"), (((((
2.45 + _(*"Specification"*), _(*":"*)), (((((((((((((((
2.46 + _(*"Model"*), (("", ""), "")), _(*":"*)),
2.47 + _(*"Given"*)), _(*":"*)), givens),
2.48 + _(*"Where"*)), _(*":"*)), wheres),
2.49 + _(*"Find"*)), _(*":"*)), find),
2.50 + _(*"Relate"*)), _(*":"*)), relates), ((
2.51 + _(*"References"*), _(*":"*)), ((((((((
2.52 + _(*"RTheory"*), _(*":"*)), rthy_id),
2.53 + _(*"RProblem"*)), _(*":"*)), rpbl_id),
2.54 + _(*"RMethod"*)), _(*":"*)), rmet_id)))),
2.55 + _(*"Solution"*)), _(*":"*)),
2.56 + _(*[]*))),
2.57 + _(*""*)) =
2.58 + {thy_id = thy_id, pbl_id = pbl_id,
2.59 + givens = givens, wheres = wheres, find = find, relates = relates,
2.60 + rthy_id = rthy_id, rpbl_id = rpbl_id, rmet_id = rmet_id}
2.61 + | read_out_problem arg =
2.62 + (@{print} {a = "read_out_problem called with", arg = arg};
2.63 + raise ERROR "read_out_problem called with")
2.64 +
2.65
2.66 (*** Specification ***)
2.67
2.68 @@ -280,8 +304,7 @@
2.69 ) --|
2.70 Fdl_Lexer.whitespace) );
2.71
2.72 -(*fun tokenize_formalise pos str = KEEP IDENTIFIERS CLOSE TO SPARK..*)
2.73 -fun scan' pos str =
2.74 +fun tokenize_formalise pos str =
2.75 (Scan.finite Fdl_Lexer.char_stopper
2.76 (Scan.error scan) (Fdl_Lexer.explode_pos str pos));
2.77
3.1 --- a/src/Tools/isac/Specify/p-spec.sml Mon Dec 14 15:04:10 2020 +0100
3.2 +++ b/src/Tools/isac/Specify/p-spec.sml Tue Dec 15 15:10:38 2020 +0100
3.3 @@ -7,6 +7,7 @@
3.4
3.5 signature PRESENTATION_SPECIFICATION =
3.6 sig
3.7 + type record
3.8 (*/------- rename -------\*)
3.9 datatype iitem =
3.10 Find of TermC.as_string list
3.11 @@ -45,6 +46,11 @@
3.12 struct
3.13 (**)
3.14
3.15 + type record = {thy_id: ThyC.id, pbl_id: Problem.id, (* headline of Problem *)
3.16 + givens: TermC.as_string list, wheres: TermC.as_string list, (* Model.T *)
3.17 + find: TermC.as_string, relates: TermC.as_string list,
3.18 + rthy_id: ThyC.id, rpbl_id: Problem.id, rmet_id: Method.id} (* References.T *)
3.19 +
3.20 fun flattup2 (a, (b ,c, d, e)) = (a, b, c, d, e)
3.21
3.22 (** handle an input P_Specific'action **)
3.23 @@ -246,4 +252,4 @@
3.24 end
3.25 | input_icalhd _ (_, _, _, _(*Met*), _) = raise ERROR "input_icalhd Met not impl."
3.26
3.27 -(**)end (**)
3.28 \ No newline at end of file
3.29 +(**)end (**)
4.1 --- a/test/Pure/Isar/Test_Parse_Isac.thy Mon Dec 14 15:04:10 2020 +0100
4.2 +++ b/test/Pure/Isar/Test_Parse_Isac.thy Tue Dec 15 15:10:38 2020 +0100
4.3 @@ -72,8 +72,11 @@
4.4 subsection \<open>The goal bypassing cartouches, terms shortened\<close>
4.5 ML \<open>
4.6 \<close> ML \<open>
4.7 +val problem_headline_str_WITH_Problem =
4.8 + "Problem ( \"Biegelinie\" , [\"Biegelinien\"] )"
4.9 +\<close> ML \<open>
4.10 val problem_headline_str =
4.11 - "Problem ( \"Biegelinie\" , [\"Biegelinien\"] )"
4.12 + "( \"Biegelinie\" , [\"Biegelinien\"] )"
4.13 \<close> ML \<open>
4.14 val model_str = (
4.15 "Model:" ^
4.16 @@ -171,6 +174,38 @@
4.17 steps_nonrec_str
4.18 )
4.19 \<close> ML \<open>
4.20 +val problem_whole_str_WITH_Problem = (
4.21 + problem_headline_str_WITH_Problem ^
4.22 + "Specification:" ^
4.23 + model_str ^
4.24 + references_str ^
4.25 + "Solution:" ^
4.26 + steps_nonrec_str
4.27 +)
4.28 +\<close> ML \<open>
4.29 +if problem_whole_str_WITH_Problem =
4.30 + "Problem ( \"Biegelinie\" , [\"Biegelinien\"] )" ^
4.31 + "Specification:" ^
4.32 + "Model:" ^
4.33 + "Given: \"Traegerlaenge L\", \"Streckenlast q_0\"" ^
4.34 + "Where: \"q_0 ist_integrierbar_auf {| 0, L |}\", \"0 < L\"" ^
4.35 + "Find: \"Biegelinie y\"" ^
4.36 + "Relate: \"Randbedingungen [ Q_0 = q_0 * L, M_b L = 0, y_0 = 0, d_d x y_0 = 0 ]\"" ^
4.37 + "References:" ^
4.38 + "RTheory: \"Biegelinie\"" ^
4.39 + "RProblem: [\"Biegelinien\"]" ^
4.40 + "RMethod: [\"Integrieren\", \"KonstanteBestimmen2\"]" ^
4.41 + "Solution:" ^
4.42 + "\"[c = -1 * L * q_0 / -1, c_2 = -1 * L ^ 2 * q_0 / 2, c_3 = 0, c_4 = 0]\"" ^
4.43 + "\"y x = c_4 + c_3 * x + 1 / (-1 * EI)\"" ^
4.44 + "Tactic Substitute \"c_4 + c_3 * x + 1 / (-1 * EI)\" \"[c, c_1, c_2, c_4]\"" ^
4.45 + "\"y x = 0 + 0 * x + 1 / (-1 * EI)\"" ^
4.46 + "Tactic Rewrite_Set_Inst \"[(bdv, x)]\" \"make_ratpoly_in\"" ^
4.47 + "\"y x = -6 * q_0 * L ^ 2 / (-24 * EI)\"" ^
4.48 + "Tactic Check_Postcond [\"Biegelinien\", \"xxx\"]" ^
4.49 + "\"y x = -6 * q_0 * L ^ 2 / (-24 * EI)\""
4.50 +then () else error "problem_whole_str CHANGED"
4.51 +\<close> ML \<open>
4.52 val problem_whole_str = (
4.53 problem_headline_str ^
4.54 "Specification:" ^
4.55 @@ -181,7 +216,7 @@
4.56 )
4.57 \<close> ML \<open>
4.58 if problem_whole_str =
4.59 - "Problem ( \"Biegelinie\" , [\"Biegelinien\"] )" ^
4.60 + "( \"Biegelinie\" , [\"Biegelinien\"] )" ^
4.61 "Specification:" ^
4.62 "Model:" ^
4.63 "Given: \"Traegerlaenge L\", \"Streckenlast q_0\"" ^
4.64 @@ -218,7 +253,7 @@
4.65 section \<open>Problem headline\<close>
4.66 ML \<open>
4.67 \<close> ML \<open>
4.68 -val toks = tokenize problem_headline_str
4.69 +val toks_WITH_Problem = tokenize problem_headline_str_WITH_Problem
4.70 \<close> ML \<open>
4.71 Thy_Header.get_keywords' @{context}
4.72 (*
4.73 @@ -234,14 +269,26 @@
4.74 :
4.75 *)
4.76 \<close> ML \<open>
4.77 -val problem_headline = Parse.$$$ "Problem" --
4.78 +val problem_headline_WITH_Problem = Parse.$$$ "Problem" --
4.79 Parse.$$$ "(" --
4.80 Parse.string -- Parse.$$$ "," --
4.81 (Parse.$$$ "[" |-- Parse.list1 Parse.string --| Parse.$$$ "]") --
4.82 Parse.$$$ ")"
4.83 \<close> ML \<open>
4.84 +case problem_headline_WITH_Problem toks_WITH_Problem of
4.85 + (((((("Problem", "("), "Biegelinie"), ","), ["Biegelinien"]), ")"), [])
4.86 + => () | _ => error "parse problem_headline CHANGED"
4.87 +\<close> ML \<open>
4.88 +val problem_headline = (*Parse.$$$ "Problem" --*)
4.89 + Parse.$$$ "(" --
4.90 + Parse.string -- Parse.$$$ "," --
4.91 + (Parse.$$$ "[" |-- Parse.list1 Parse.string --| Parse.$$$ "]") --
4.92 + Parse.$$$ ")"
4.93 +\<close> ML \<open>
4.94 +val toks = tokenize problem_headline_str
4.95 +\<close> ML \<open>
4.96 case problem_headline toks of
4.97 - (((((("Problem", "("), "Biegelinie"), ","), ["Biegelinien"]), ")"), [])
4.98 + ((((("(", "Biegelinie"), ","), ["Biegelinien"]), ")"), [])
4.99 => () | _ => error "parse problem_headline CHANGED"
4.100 \<close> ML \<open>
4.101 \<close>
4.102 @@ -639,7 +686,7 @@
4.103 subsection \<open>type problem\<close>
4.104 ML \<open>
4.105 \<close> ML \<open> (*taken from "val problem =" below*)
4.106 -type problem =
4.107 +type problem_WITH_Problem =
4.108 (((((((string * string) * string) * string) * string list) * string) *
4.109 (((((string * string) *
4.110 (((((((((((((((string * ((string * string) * string)) * string) * string) * string) *
4.111 @@ -651,7 +698,24 @@
4.112 string) * string) * string list)) *
4.113 string)
4.114 \<close> ML \<open>
4.115 -val problem_dummy =
4.116 +(* compare case parse problem toks of *)
4.117 +type problem = ((((((
4.118 + string * string) * string) * string list) * string) * (((((
4.119 + string * string) * (((((((((((((((
4.120 + string * ((string * string) * string)) * string) *
4.121 + string) * string) * string list) *
4.122 + string) * string) * string list) *
4.123 + string) * string) * string) *
4.124 + string) * string) * string list) * ((
4.125 + string * string) * ((((((((
4.126 + string * string) * string) *
4.127 + string) * string) * string list) *
4.128 + string) * string) * string list)))) *
4.129 + string) * string) *
4.130 + string list)) *
4.131 + string)
4.132 +\<close> ML \<open>
4.133 +val problem_dummy_WITH_Problem =
4.134 ((((((("", ""), ""), ""), []: string list), ""),
4.135 ((((("", ""),
4.136 ((((((((((((((("", (("", ""), "")), ""), ""), ""),
4.137 @@ -663,19 +727,35 @@
4.138 ""), ""), []: string list)),
4.139 "");
4.140 \<close> ML \<open>
4.141 -problem_dummy: problem
4.142 +(* compare case parse problem toks of *)
4.143 +val problem_dummy = ((((((
4.144 + "", ""), ""), []: string list), ""), (((((
4.145 + "", ""), (((((((((((((((
4.146 + "", (("", ""), "")), ""),
4.147 + ""), ""), []: string list),
4.148 + ""), ""), []: string list),
4.149 + ""), ""), ""),
4.150 + ""), ""), []: string list), ((
4.151 + "", ""), ((((((((
4.152 + "", ""), ""),
4.153 + ""), ""), []: string list),
4.154 + ""), ""), []: string list)))),
4.155 + ""), ""),
4.156 + []: string list)),
4.157 + "")
4.158 \<close> ML \<open>
4.159 \<close> ML \<open> (*type cartouche_problem*)
4.160 -type cartouche_problem = string * problem;
4.161 +type cartouche_problem = string * problem_WITH_Problem;
4.162 \<close> ML \<open>
4.163 -val cartouche_problem_dummy = ("", problem_dummy);
4.164 +val cartouche_problem_dummy = ("", problem_dummy_WITH_Problem);
4.165 \<close> ML \<open>
4.166 \<close>
4.167
4.168 subsection \<open>whole Problem\<close>
4.169 ML \<open>
4.170 \<close> ML \<open>
4.171 -val toks = tokenize (problem_headline_str ^ specification_str ^ solution_str)
4.172 +val toks_WITH_Problem = tokenize (problem_headline_str_WITH_Problem ^ specification_str ^ solution_str);
4.173 +val toks = tokenize (problem_headline_str ^ specification_str ^ solution_str);
4.174 \<close>
4.175 declare [[ML_print_depth = 999]] (* ..for getting complete <Output> *)
4.176 ML \<open>
4.177 @@ -691,23 +771,25 @@
4.178 (("", ""), (((((((("", ""), ""), ""), ""), []), ""), ""), [])))),
4.179 ""), ""), [])
4.180 \<close> ML \<open>
4.181 +val problem_WITH_Problem =
4.182 + problem_headline_WITH_Problem --
4.183 + (Scan.optional body) body_dummy --
4.184 + (Scan.optional Parse.term) ""
4.185 +\<close> ML \<open>
4.186 +problem_WITH_Problem: problem_WITH_Problem parser
4.187 +\<close> ML \<open>
4.188 val problem =
4.189 problem_headline --
4.190 (Scan.optional body) body_dummy --
4.191 (Scan.optional Parse.term) ""
4.192 \<close> ML \<open>
4.193 -problem: problem parser (*GOON*)
4.194 -\<close> ML \<open>
4.195 -
4.196 +problem: problem parser
4.197 \<close> ML \<open>
4.198 \<close>
4.199 declare [[ML_print_depth = 999]] (* ..for getting complete <Output> *)
4.200 ML \<open>
4.201 -specification
4.202 \<close> ML \<open>
4.203 -parse problem toks
4.204 -\<close> ML \<open>
4.205 -case parse problem toks of ((((((((
4.206 +case parse problem_WITH_Problem toks_WITH_Problem of ((((((((
4.207 "Problem", "("), "Biegelinie"), ","), ["Biegelinien"]), ")"), (((((
4.208 "Specification", ":"), (((((((((((((((
4.209 "Model", (("", ""), "")), ":"),
4.210 @@ -727,8 +809,66 @@
4.211 _(*"EXEC IMMEDIATELY step_term: <markup> (, )"*)])),
4.212 ""),
4.213 [])
4.214 + => () | _ => error "parse problem_WITH_Problem (whole) CHANGED"
4.215 +\<close> ML \<open>
4.216 +case parse problem toks of (((((((
4.217 + "(", "Biegelinie"), ","), ["Biegelinien"]), ")"), (((((
4.218 + "Specification", ":"), (((((((((((((((
4.219 + "Model", (("", ""), "")), ":"),
4.220 + "Given"), ":"), [_(*"<markup>"*), _(*"<markup>"*)]),
4.221 + "Where"), ":"), [_(*"<markup>"*), _(*"<markup>"*)]),
4.222 + "Find"), ":"), _(*"<markup>"*)),
4.223 + "Relate"), ":"), [_(*"<markup>"*)]), ((
4.224 + "References", ":"), ((((((((
4.225 + "RTheory", ":"), "Biegelinie"),
4.226 + "RProblem"), ":"), ["Biegelinien"]),
4.227 + "RMethod"), ":"), ["Integrieren", "KonstanteBestimmen2"])))),
4.228 + "Solution"), ":"),
4.229 + [_(*"EXEC IMMEDIATELY step_term: <markup> (, )"*),
4.230 + _(*"EXEC IMMEDIATELY step_term: <markup> (Tactic, EXEC IMMEDIATELY: Substitute <markup> <markup>)"*),
4.231 + _(*"EXEC IMMEDIATELY step_term: <markup> (Tactic, EXEC IMMEDIATELY: Rewrite_Set_Inst <markup> make_ratpoly_in)"*),
4.232 + _(*"EXEC IMMEDIATELY step_term: <markup> (Tactic, EXEC IMMEDIATELY: Check_Postcond xxxBiegelinien)"*),
4.233 + _(*"EXEC IMMEDIATELY step_term: <markup> (, )"*)])),
4.234 + ""),
4.235 +[])
4.236 => () | _ => error "parse problem (whole) CHANGED"
4.237 \<close> ML \<open>
4.238 +parse problem toks
4.239 +\<close> ML \<open>
4.240 +(* compare case parse problem toks of *)
4.241 +type problem = ((((((
4.242 + string * string) * string) * string list) * string) * (((((
4.243 + string * string) * (((((((((((((((
4.244 + string * ((string * string) * string)) * string) *
4.245 + string) * string) * string list) *
4.246 + string) * string) * string list) *
4.247 + string) * string) * string) *
4.248 + string) * string) * string list) * ((
4.249 + string * string) * ((((((((
4.250 + string * string) * string) *
4.251 + string) * string) * string list) *
4.252 + string) * string) * string list)))) *
4.253 + string) * string) *
4.254 + string list)) *
4.255 + string)
4.256 +\<close> ML \<open>
4.257 +(* compare case parse problem toks of *)
4.258 +val problem_dummy = ((((((
4.259 + "", ""), ""), []: string list), ""), (((((
4.260 + "", ""), (((((((((((((((
4.261 + "", (("", ""), "")), ""),
4.262 + ""), ""), []: string list),
4.263 + ""), ""), []: string list),
4.264 + ""), ""), ""),
4.265 + ""), ""), []: string list), ((
4.266 + "", ""), ((((((((
4.267 + "", ""), ""),
4.268 + ""), ""), []: string list),
4.269 + ""), ""), []: string list)))),
4.270 + ""), ""),
4.271 + []: string list)),
4.272 + "")
4.273 +\<close> ML \<open>
4.274 \<close>
4.275
4.276 subsection \<open>Problem collapsed in variants\<close>
4.277 @@ -739,11 +879,11 @@
4.278 subsubsection \<open>start calculation: headline only\<close>
4.279 ML \<open>
4.280 \<close> ML \<open>
4.281 -val toks = tokenize problem_headline_str
4.282 +val toks = tokenize problem_headline_str_WITH_Problem
4.283 \<close> ML \<open>
4.284 -case problem_headline toks of
4.285 +case problem_headline_WITH_Problem toks of
4.286 (((((("Problem", "("), "Biegelinie"), ","), ["Biegelinien"]), ")"), [])
4.287 -=> () | _ => error "parse problem_headline_str CHANGED"
4.288 +=> () | _ => error "parse problem_headline_str_WITH_Problem CHANGED"
4.289 \<close> ML \<open>
4.290 \<close>
4.291
4.292 @@ -751,6 +891,28 @@
4.293 text \<open>
4.294 the Model only has Descriptors and the References are empty
4.295 \<close> ML \<open>
4.296 +val toks_WITH_Problem = tokenize (
4.297 + problem_headline_str_WITH_Problem ^
4.298 + "Specification:" ^
4.299 + model_empty_str ^ (* <<<----- empty *)
4.300 + "References:" ^ (* <<<----- collapsed *)
4.301 + "Solution:"
4.302 +)
4.303 +\<close> ML \<open>
4.304 +case parse problem_WITH_Problem toks_WITH_Problem of ((((((((
4.305 + "Problem", "("), "Biegelinie"), ","), ["Biegelinien"]), ")"), (((((
4.306 + "Specification", ":"), (((((((((((((((
4.307 + "Model", (("", ""), "")), ":"),
4.308 + "Given"), ":"), [_(*"<markup>"*), _(*"<markup>"*)]),
4.309 + "Where"), ":"), [_(*"<markup>"*)]),
4.310 + "Find"), ":"), _(*"<markup>"*)),
4.311 + "Relate"), ":"), [_(*"<markup>"*)]), ((
4.312 + "References", ":"), (((((((("", ""), ""), ""), ""), []), ""), ""), [])))),
4.313 + "Solution"), ":"), [])),
4.314 + ""),
4.315 +[])
4.316 +=> () | _ => error "parse enter Specification WITH_Problem CHANGED"
4.317 +\<close> ML \<open>
4.318 val toks = tokenize (
4.319 problem_headline_str ^
4.320 "Specification:" ^
4.321 @@ -759,8 +921,8 @@
4.322 "Solution:"
4.323 )
4.324 \<close> ML \<open>
4.325 -case parse problem toks of ((((((((
4.326 - "Problem", "("), "Biegelinie"), ","), ["Biegelinien"]), ")"), (((((
4.327 +case parse problem toks of (((((((
4.328 + "(", "Biegelinie"), ","), ["Biegelinien"]), ")"), (((((
4.329 "Specification", ":"), (((((((((((((((
4.330 "Model", (("", ""), "")), ":"),
4.331 "Given"), ":"), [_(*"<markup>"*), _(*"<markup>"*)]),
4.332 @@ -773,22 +935,41 @@
4.333 [])
4.334 => () | _ => error "parse enter Specification CHANGED"
4.335 \<close> ML \<open>
4.336 +\<close> ML \<open> (* this also works with Problem-headline only*)
4.337 +val toks = tokenize problem_headline_str;
4.338 +\<close> ML \<open>
4.339 +parse problem toks
4.340 +\<close> ML \<open>
4.341 +case parse problem toks of (((((((
4.342 + "(", "Biegelinie"), ","), ["Biegelinien"]), ")"), (((((
4.343 + "", ""), (((((((((((((((
4.344 + "", (("", ""), "")), ""),
4.345 + ""), ""), []),
4.346 + ""), ""), []),
4.347 + ""), ""), ""),
4.348 + ""), ""), []), ((
4.349 + "", ""), (((((((("", ""), ""), ""), ""), []), ""), ""), [])))),
4.350 + ""), ""), [])),
4.351 + ""),
4.352 +[])
4.353 +=> () | _ => error "parse BEFORE enter Specification CHANGED"
4.354 +\<close> ML \<open>
4.355 \<close>
4.356
4.357 subsubsection \<open>finish Specification\<close>
4.358 ML \<open>
4.359 \<close> ML \<open>
4.360 -val toks = tokenize (
4.361 - problem_headline_str ^
4.362 +val toks_WITH_Problem = tokenize (
4.363 + problem_headline_str_WITH_Problem ^
4.364 "Specification:" ^
4.365 model_str ^
4.366 references_str ^
4.367 "Solution:"
4.368 )
4.369 \<close> ML \<open>
4.370 -parse problem toks
4.371 +parse problem_WITH_Problem toks_WITH_Problem
4.372 \<close> ML \<open>
4.373 -case parse problem toks of ((((((((
4.374 +case parse problem_WITH_Problem toks_WITH_Problem of ((((((((
4.375 "Problem", "("), "Biegelinie"), ","), ["Biegelinien"]), ")"), (((((
4.376 "Specification", ":"), (((((((((((((((
4.377 "Model", (("", ""), "")), ":"),
4.378 @@ -806,22 +987,89 @@
4.379 [])
4.380 => () | _ => error "parse finish specification CHANGED"
4.381 \<close> ML \<open>
4.382 +val toks = tokenize (
4.383 + problem_headline_str ^
4.384 + "Specification:" ^
4.385 + model_str ^
4.386 + references_str ^
4.387 + "Solution:"
4.388 +)
4.389 +\<close> ML \<open>
4.390 +case parse problem toks of ( ((((((
4.391 + "(", "Biegelinie"), ","), ["Biegelinien"]), ")"), (((((
4.392 + "Specification", ":"), (((((((((((((((
4.393 + "Model", (("", ""), "")), ":"),
4.394 + "Given"), ":"), [_(*"<markup>"*), _(*"<markup>"*)]),
4.395 + "Where"), ":"), [_(*"<markup>"*), _(*"<markup>"*)]),
4.396 + "Find"), ":"), _(*"<markup>"*)),
4.397 + "Relate"), ":"), [_(*"<markup>"*)]), ((
4.398 + "References", ":"), ((((((((
4.399 + "RTheory", ":"), "Biegelinie"),
4.400 + "RProblem"), ":"), ["Biegelinien"]),
4.401 + "RMethod"), ":"), ["Integrieren", "KonstanteBestimmen2"])))),
4.402 + "Solution"), ":"),
4.403 + [])),
4.404 + ""),
4.405 + [])
4.406 + => () | _ => error "parse finish specification CHANGED"
4.407 +\<close> ML \<open>
4.408 +(*
4.409 + val read_out_problem: problem -> P_Spec.record
4.410 +*)
4.411 +fun read_out_problem ((((((
4.412 + "(", thy_id), ","), pbl_id), ")"), (((((
4.413 + "Specification", ":"), (((((((((((((((
4.414 + "Model", (("", ""), "")), ":"),
4.415 + "Given"), ":"), givens),
4.416 + "Where"), ":"), wheres),
4.417 + "Find"), ":"), find),
4.418 + "Relate"), ":"), relates), ((
4.419 + "References", ":"), ((((((((
4.420 + "RTheory", ":"), rthy_id),
4.421 + "RProblem"), ":"), rpbl_id),
4.422 + "RMethod"), ":"), rmet_id)))),
4.423 + "Solution"), ":"),
4.424 + [])),
4.425 + "") =
4.426 + {thy_id = thy_id, pbl_id = pbl_id,
4.427 + givens = givens, wheres = wheres, find = find, relates = relates,
4.428 + rthy_id = rthy_id, rpbl_id = rpbl_id, rmet_id = rmet_id}
4.429 + | read_out_problem arg =
4.430 + (@{print} {a = "read_out_problem called with", arg = arg};
4.431 + raise ERROR "read_out_problem called with")
4.432 +\<close> ML \<open>
4.433 +(*P_Spec.record*)
4.434 +type record = {thy_id: string, pbl_id: string list, (* headline of Problem *)
4.435 + givens: string list, wheres: string list, (* Model.T *)
4.436 + find: string, relates: string list,
4.437 + rthy_id: string, rpbl_id: string list, rmet_id: string list} (* References.T *)
4.438 +\<close> ML \<open>
4.439 +read_out_problem: problem -> record
4.440 +\<close> ML \<open>
4.441 +case parse problem toks |> fst |> read_out_problem of
4.442 + {thy_id = "Biegelinie", pbl_id = ["Biegelinien"],
4.443 + givens = [_(*"<markup>"*), _(*"<markup>"*)], wheres = [_(*"<markup>"*), _(*"<markup>"*)],
4.444 + find = _(*"<markup>"*), relates = [_(*"<markup>"*)],
4.445 + rthy_id = "Biegelinie", rpbl_id = ["Biegelinien"], rmet_id = ["Integrieren", "KonstanteBestimmen2"]
4.446 + } => ()
4.447 +| _ => error "read_out_problem CHANGED"
4.448 +\<close> ML \<open>
4.449 \<close>
4.450
4.451 subsubsection \<open>enter Solution\<close>
4.452 text \<open>
4.453 Specification collapsed or expanded
4.454 \<close> ML \<open>
4.455 -\<close> ML \<open> val str = (problem_headline_str ^ " Specification: Solution:")
4.456 +\<close> ML \<open> val str = (problem_headline_str_WITH_Problem ^ " Specification: Solution:")
4.457 \<close> ML \<open>
4.458 if str = "Problem ( \"Biegelinie\" , [\"Biegelinien\"] ) Specification: Solution:"
4.459 then () else error "Specification collapsed, Solution CHANGED"
4.460 \<close> ML \<open>
4.461 -val toks1 = tokenize str
4.462 +val toks1_WITH_Problem = tokenize str
4.463 \<close> ML \<open>
4.464 -parse problem toks1
4.465 +parse problem_WITH_Problem toks1_WITH_Problem
4.466 \<close> ML \<open>
4.467 -case parse problem toks1 of ((((((((
4.468 +case parse problem_WITH_Problem toks1_WITH_Problem of ((((((((
4.469 "Problem", "("), "Biegelinie"), ","), ["Biegelinien"]), ")"), (((((
4.470 "Specification", ":"), ((((((((((((((("", (("", ""), "")), ""), ""), ""), []), ""), ""), []), ""), ""), ""), ""), ""), []), (("", ""), (((((((("", ""), ""), ""), ""), []), ""), ""), [])))),
4.471 "Solution"), ":"), [])),
4.472 @@ -834,13 +1082,13 @@
4.473 subsubsection \<open>Problem with final_result, collapsed\<close>
4.474 ML \<open>
4.475 \<close> ML \<open>
4.476 -val str = (problem_headline_str ^ final_result_str)
4.477 +val str = (problem_headline_str_WITH_Problem ^ final_result_str)
4.478 \<close> ML \<open>
4.479 -val toks = tokenize str
4.480 +val toks_WITH_Problem = tokenize str
4.481 \<close> ML \<open>
4.482 -parse problem toks
4.483 +parse problem_WITH_Problem toks_WITH_Problem
4.484 \<close> ML \<open>
4.485 -case parse problem toks of ((((((((
4.486 +case parse problem_WITH_Problem toks_WITH_Problem of ((((((((
4.487 "Problem", "("), "Biegelinie"), ","), ["Biegelinien"]), ")"),
4.488 ((((("", ""),
4.489 ((((((((((((((("", (("", ""), "")), ""), ""), ""), []), ""), ""), []), ""), ""), ""), ""), ""), []),
4.490 @@ -857,8 +1105,8 @@
4.491 text \<open>
4.492 we make steps close to recursive version according to subsequent subsubsection.
4.493 \<close> ML \<open>
4.494 -val toks = tokenize (
4.495 - problem_headline_str ^
4.496 +val toks_WITH_Problem = tokenize (
4.497 + problem_headline_str_WITH_Problem ^
4.498 "Specification:" ^
4.499 "Solution:" ^
4.500 steps_nonrec_str
4.501 @@ -878,7 +1126,7 @@
4.502 declare [[ML_print_depth = 999]] (* ..for finding the type of steps_nonrec, exec_step_subproblem *)
4.503 ML \<open>
4.504 \<close> ML \<open> (* copy <Output> from val problem_nonrec = ..*)
4.505 -type problem_nonrec =
4.506 +type problem_nonrec_WITH_Problem =
4.507 (((((((((string * string) * string) * string) * string list) * string) *
4.508 ((string * string) *
4.509 (((((((((((((((string * ((string * string) * string)) * string) * string) * string) * string list) * string) * string) * string list) * string) *
4.510 @@ -887,7 +1135,7 @@
4.511 * string) * string) * string list)
4.512 (* ? repeat ^^^^^^^^^^^^*)
4.513 \<close> ML \<open>
4.514 -val empty_problem_nonrec =
4.515 +val empty_problem_nonrec_WITH_Problem =
4.516 ((((((((("", ""), ""), ""), []), ""),
4.517 (("", ""),
4.518 ((((((((((((((("", (("", ""), "")), ""), ""), ""), []), ""), ""), []), ""),
4.519 @@ -895,17 +1143,17 @@
4.520 (("", ""), (((((((("", ""), ""), ""), ""), []), ""), ""), [])))))
4.521 , ""), ""), [])
4.522 \<close> ML \<open>
4.523 -val problem_nonrec =
4.524 - problem_headline --
4.525 +val problem_nonrec_WITH_Problem =
4.526 + problem_headline_WITH_Problem --
4.527 specification --
4.528 Parse.$$$ "Solution" -- Parse.$$$ ":" -- steps_nonrec
4.529 :
4.530 - problem_nonrec parser
4.531 + problem_nonrec_WITH_Problem parser
4.532 (* ^^^^^^^*)
4.533 \<close> ML \<open>
4.534 -parse problem_nonrec toks
4.535 +parse problem_nonrec_WITH_Problem toks_WITH_Problem
4.536 \<close> ML \<open>
4.537 -case parse problem_nonrec toks of ((((((((((
4.538 +case parse problem_nonrec_WITH_Problem toks_WITH_Problem of ((((((((((
4.539 "Problem", "("), "Biegelinie"), ","), ["Biegelinien"]), ")"),
4.540 ((
4.541 "Specification", ":"),
4.542 @@ -1018,30 +1266,30 @@
4.543 (or even ?after executing Specification?).
4.544
4.545 \<close> ML \<open>
4.546 -val toks = tokenize (
4.547 - problem_headline_str ^
4.548 +val toks_WITH_Problem = tokenize (
4.549 + problem_headline_str_WITH_Problem ^
4.550 "Specification:" ^
4.551 "Solution:" ^
4.552 steps_subproblem_str
4.553 )
4.554 \<close> ML \<open> (* finding the type of exec_subproblem: *)
4.555 -fun problem xxx =
4.556 - problem_headline --
4.557 +fun problem_WITH_Problem xxx =
4.558 + problem_headline_WITH_Problem --
4.559 specification --
4.560 Parse.$$$ "Solution" -- Parse.$$$ ":" -- (steps xxx)
4.561 and steps xxx =
4.562 Scan.repeat (
4.563 (Parse.term -- (Scan.optional tactic ("", "")) >> exec_step_term) ||
4.564 - (problem xxx >> xxx)
4.565 + (problem_WITH_Problem xxx >> xxx)
4.566 )
4.567 \<close> ML \<open>
4.568 -problem
4.569 +problem_WITH_Problem
4.570 \<close> ML \<open> (* copy from <Output> abstracted ..*)
4.571 -problem : (problem_nonrec -> string) -> Token.T list -> problem_nonrec * Token.T list;
4.572 +problem_WITH_Problem : (problem_nonrec_WITH_Problem -> string) -> Token.T list -> problem_nonrec_WITH_Problem * Token.T list;
4.573 (* xxx : ^^^^^^^^^^^^^^^^^^^^^^^^^^ vvvvvv*)
4.574 -problem : (problem_nonrec -> string) -> problem_nonrec parser
4.575 +problem_WITH_Problem : (problem_nonrec_WITH_Problem -> string) -> problem_nonrec_WITH_Problem parser
4.576 \<close> ML \<open> (* compose arguments from empty_problem_nonrec ..*)
4.577 -fun subproblem_msg
4.578 +fun subproblem_msg_WITH_Problem
4.579 (((((((((s1, s2), s3), s4), strs0), s5),
4.580 ((s6, s7),
4.581 (((((((((((((((s11, ((s12, s13), s14)), s15), s16), s17), strs18), s19), s20), strs21), s22),
4.582 @@ -1055,13 +1303,13 @@
4.583 " ((" ^ s31 ^ ", " ^ s32 ^ "), ((((((((" ^ s30 ^ ", " ^ s33 ^ "), " ^ s34 ^ "), " ^ s35 ^ "), " ^ s36 ^ "), " ^ string_of_strs strs37 ^ "), " ^ s38 ^ "), " ^ s39 ^ "), " ^ string_of_strs strs40 ^ ")))))\n" ^
4.584 " , " ^ s51 ^ "), " ^ s61 ^ "), " ^ string_of_strs strs71 ^ ")"
4.585 \<close> ML \<open>
4.586 -fun exec_subproblem
4.587 +fun exec_subproblem_WITH_Problem
4.588 (((((((((s1, s2), s3), s4), strs0), s5),
4.589 ((s6, s7),
4.590 (((((((((((((((s11, ((s12, s13), s14)), s15), s16), s17), strs18), s19), s20), strs21), s22),
4.591 s23), s24), s25), s26), strs27),
4.592 ((s31, s32), ((((((((s30, s33), s34), s35), s36), strs37), s38), s39), strs40)))))
4.593 - , s51), s61), strs71) = subproblem_msg
4.594 + , s51), s61), strs71) = subproblem_msg_WITH_Problem
4.595 (((((((((s1, s2), s3), s4), strs0), s5),
4.596 ((s6, s7),
4.597 (((((((((((((((s11, ((s12, s13), s14)), s15), s16), s17), strs18), s19), s20), strs21), s22),
4.598 @@ -1069,23 +1317,23 @@
4.599 ((s31, s32), ((((((((s30, s33), s34), s35), s36), strs37), s38), s39), strs40)))))
4.600 , s51), s61), strs71)
4.601 \<close> ML \<open>
4.602 -exec_subproblem : problem_nonrec -> string
4.603 +exec_subproblem_WITH_Problem : problem_nonrec_WITH_Problem -> string
4.604 \<close> ML \<open>
4.605 \<close> ML \<open> (* exec_subproblem has right? type in problem *)
4.606 -fun problem _ =
4.607 - problem_headline --
4.608 +fun problem_WITH_Problem _ =
4.609 + problem_headline_WITH_Problem --
4.610 specification --
4.611 Parse.$$$ "Solution" -- Parse.$$$ ":" -- (steps 1)
4.612 and steps _ =
4.613 Scan.repeat (
4.614 (
4.615 ((Parse.term -- (Scan.optional tactic ("", ""))) >> exec_step_term) ||
4.616 - (problem 1 >> exec_subproblem)
4.617 + (problem_WITH_Problem 1 >> exec_subproblem_WITH_Problem)
4.618 (* ^^^^^^^^^^^^^^^*)
4.619 )
4.620 )
4.621 \<close> ML \<open>
4.622 -problem : int -> problem_nonrec parser
4.623 +problem_WITH_Problem : int -> problem_nonrec_WITH_Problem parser
4.624 \<close> ML \<open>
4.625 steps : int -> string list parser
4.626 \<close> ML \<open> (* BUT: omitting the parsers' arguments does NOT work:
5.1 --- a/test/Tools/isac/Test_Some.thy Mon Dec 14 15:04:10 2020 +0100
5.2 +++ b/test/Tools/isac/Test_Some.thy Tue Dec 15 15:10:38 2020 +0100
5.3 @@ -115,8 +115,9 @@
5.4 ERROR There is no verification condition "Biegelinie".
5.5 ..WE ADAPT PRELIMINARILY TO THAT..
5.6 *)
5.7 -spark_open \<open>~~/src/HOL/SPARK/Examples/Gcd/greatest_common_divisor/g_c_d\<close>
5.8 -Problem procedure_g_c_d_4
5.9 +spark_open \<open>~~/src/HOL/SPARK/Examples/Gcd/greatest_common_divisor/g_c_d\<close> (*Problem adds to spark*)
5.10 +
5.11 +Problem (*procedure_g_c_d_4*)("Biegelinie", ["Biegelinien"])
5.12 (* WE GET DATA TRANSFERRED FROM Example ABOVE, OK..
5.13
5.14 {a = "prove_vc", o_model =
5.15 @@ -144,7 +145,7 @@
5.16 (*
5.17 AT ABOVE "ML" WE HAVE:
5.18 Bad context for command "ML"\<^here> -- using reset state
5.19 - ..CAUSED BY INSUFFICIENT SEQUENCE OF SPARK keywords,
5.20 + ..CAUSED BY INSUFFICIENT SEQUENCE OF SPARK keywords here in this test sequence,
5.21 BUT spark_open + Problem ABOVE SHOWS NO ERROR -- THAT IS WHAT WE WANT DURING ADAPTATION!
5.22 *)
5.23 \<close>