eliminate SPARK; as an example replaced by Outer_Syntax.command..problem
authorwneuper <Walther.Neuper@jku.at>
Wed, 24 Aug 2022 12:37:07 +0200
changeset 605365038589d3033
parent 60535 d5c670beaba4
child 60537 f0305aeb010b
eliminate SPARK; as an example replaced by Outer_Syntax.command..problem
TODO.md
src/Tools/isac/BridgeJEdit/BridgeJEdit.thy
src/Tools/isac/BridgeJEdit/Calculation.thy
src/Tools/isac/BridgeJEdit/Calculation_OLD.thy
src/Tools/isac/BridgeJEdit/SPARK_FDL.thy
src/Tools/isac/BridgeJEdit/fdl_lexer.ML
src/Tools/isac/BridgeJEdit/fdl_parser.ML
src/Tools/isac/BridgeJEdit/parseC-OLD.sml
src/Tools/isac/BridgeJEdit/preliminary-OLD.sml
test/Tools/isac/BridgeJEdit/parseC.sml
test/Tools/isac/BridgeJEdit/preliminary.sml
     1.1 --- a/TODO.md	Tue Aug 23 18:05:08 2022 +0200
     1.2 +++ b/TODO.md	Wed Aug 24 12:37:07 2022 +0200
     1.3 @@ -60,8 +60,6 @@
     1.4  
     1.5  ***** priority of WN items is top down, most urgent/simple on top
     1.6  
     1.7 -* WN: eliminate SPARK; as an example replaced by Outer_Syntax.command..problem
     1.8 -
     1.9  * WN: polish naming in BaseDefinitions/eval-def.sml
    1.10  * WN: Step_Specify.initialisePIDE: remove hdl in return-value, replace Step_Specify.nxt_specify_init_calc
    1.11        ? which uses initialisePIDE !?
     2.1 --- a/src/Tools/isac/BridgeJEdit/BridgeJEdit.thy	Tue Aug 23 18:05:08 2022 +0200
     2.2 +++ b/src/Tools/isac/BridgeJEdit/BridgeJEdit.thy	Wed Aug 24 12:37:07 2022 +0200
     2.3 @@ -6,7 +6,7 @@
     2.4  *)
     2.5  
     2.6  theory BridgeJEdit
     2.7 -  imports Calculation_OLD VSCode_Example
     2.8 +  imports VSCode_Example
     2.9  begin
    2.10  
    2.11  ML \<open>
     3.1 --- a/src/Tools/isac/BridgeJEdit/Calculation.thy	Tue Aug 23 18:05:08 2022 +0200
     3.2 +++ b/src/Tools/isac/BridgeJEdit/Calculation.thy	Wed Aug 24 12:37:07 2022 +0200
     3.3 @@ -9,7 +9,6 @@
     3.4    imports
     3.5    (**)"$ISABELLE_ISAC/Knowledge/Build_Thydata"   (*remove after devel.of BridgeJEdit*)
     3.6    (**)"$ISABELLE_ISAC/MathEngine/MathEngine"
     3.7 -  (** )"SPARK_FDL"                             ( * remove after devel.of BridgeJEdit*)
     3.8  keywords "Example" :: thy_decl
     3.9  and "Specification" "Model" "References" "Solution" 
    3.10  
     4.1 --- a/src/Tools/isac/BridgeJEdit/Calculation_OLD.thy	Tue Aug 23 18:05:08 2022 +0200
     4.2 +++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
     4.3 @@ -1,222 +0,0 @@
     4.4 -(*  Title:      src/Tools/isac/BridgeJEdit/Calculation_OLD.thy
     4.5 -    Author:     Walther Neuper, JKU Linz
     4.6 -    (c) due to copyright terms
     4.7 -
     4.8 -Outer syntax for Isabelle/Isac's Calculation.
     4.9 -First trials with SPARK..
    4.10 -*)
    4.11 -
    4.12 -theory Calculation_OLD
    4.13 -imports
    4.14 -(**)"$ISABELLE_ISAC/Knowledge/Build_Thydata"   (*remove after devel.of BridgeJEdit*)
    4.15 -(**)"$ISABELLE_ISAC/MathEngine/MathEngine"
    4.16 -(**)"SPARK_FDL"                             (*remove after devel.of BridgeJEdit*)
    4.17 -keywords
    4.18 -  "ExampleSPARK" :: thy_load (*(isac_example)~(spark_vcg) ..would involve registering in Isabelle/Scala*)
    4.19 -  and(**)"Problem" :: thy_decl (*root-problem + recursively in Solution *)
    4.20 -  and "Specification" "Model" "References" :: diag (* structure only *)
    4.21 -  and "Solution" :: thy_decl (* structure only *)
    4.22 -  and "Where" "Given" "Find" "Relate" :: prf_decl (* await input of term *)
    4.23 -(*and "Where" (* only output, preliminarily *)*)
    4.24 -  and "RTheory" :: thy_decl (* await input of string; "R" distingues "Problem".."RProblem" *)
    4.25 -  and "RProblem" "RMethod" :: thy_decl (* await input of string list *)
    4.26 -  and "Tactic" (* optional for each step 0..end of calculation *)
    4.27 -begin
    4.28 -(** )declare [[ML_print_depth = 99999]]( **)
    4.29 -
    4.30 -ML_file "parseC-OLD.sml"
    4.31 -ML_file "preliminary-OLD.sml"
    4.32 -
    4.33 -
    4.34 -section \<open>new code for Outer_Syntax ExampleSPARK, Problem, ...\<close>
    4.35 -text \<open>
    4.36 -  code for ExampleSPARK, Problem shifted into structure Preliminary.
    4.37 -\<close>
    4.38 -
    4.39 -subsubsection \<open>cp from Pure.thy\<close>
    4.40 -ML \<open>
    4.41 -\<close> ML \<open> (* for "from" ~~ "Given:" *)
    4.42 -(* original *)
    4.43 -val facts = Parse.and_list1 Parse.thms1;
    4.44 -facts: (Facts.ref * Token.src list) list list parser;
    4.45 -\<close> ML \<open>
    4.46 -val facts =
    4.47 -  (writeln "####-## from parser";
    4.48 -    Parse.$$$ ":" |-- Parse.and_list1 Parse.thms1)(** ) --| Parse.$$$ "Where"( **);
    4.49 -facts: (Facts.ref * Token.src list) list list parser;
    4.50 -\<close> ML \<open>
    4.51 -\<close> ML \<open>
    4.52 -\<close>
    4.53 -
    4.54 -subsubsection \<open>tools to analyse parsing in Outer_Syntax >> \<close>
    4.55 -ML \<open>
    4.56 -\<close> text \<open> (*original basics.ML*)
    4.57 -op |>> : ('a * 'b) * ('a -> 'c) -> 'c * 'b;
    4.58 -fun (x, y) |>> f = (f x, y);
    4.59 -\<close> text \<open> (*original scan.ML*)
    4.60 -op >> : ('a -> 'b * 'c) * ('b -> 'd) -> 'a -> 'd * 'c;                              (*---vvv*)
    4.61 -fun (scan >> f) xs = scan xs |>> f;
    4.62 -\<close> ML \<open>
    4.63 -\<close> ML \<open> (*cp.from originals*)
    4.64 -infix 3 @>>;
    4.65 -fun (scan @>> f) xs = scan xs |>> f;
    4.66 -op @>> : ('a        -> 'b * 'c)        * ('b -> 'd) -> 'a        -> 'd * 'c;        (*---^^^*)
    4.67 -\<close> ML \<open> (*appl.to parser*)
    4.68 -op @>> : (Token.src -> 'b * Token.src) * ('b -> 'd) -> Token.src -> 'd * Token.src; (*---^^^*)
    4.69 -\<close> ML \<open>
    4.70 -\<close> ML \<open> (*add analysis*)
    4.71 -\<close> text \<open>
    4.72 -datatype T = Pos of (int * int * int) * Properties.T;
    4.73 -
    4.74 -fun s_to_string (Pos ((line, offset, end_offset), _)) = ("Pos ((" ^
    4.75 -  string_of_int line ^ ", " ^ string_of_int offset ^ ", " ^ string_of_int end_offset ^ "), [_])")
    4.76 -
    4.77 -s_to_string: src -> string
    4.78 -\<close> text \<open>
    4.79 -datatype T = Token of (Symbol_Pos.text * Position.range) * (kind * string) * slot
    4.80 -\<close> ML \<open>
    4.81 -Token.pos_of;        (*^^^^^^^^^^^^^^^*) 
    4.82 -Token.range_of;                         (*^^^^^^^^^^^^^*) 
    4.83 -Token.unparse;                                            (*^^^^^^^^^^^^^*) 
    4.84 -\<close> ML \<open>
    4.85 -fun string_of_tok (tok: Token.T) = "\n" ^ @{make_string} tok
    4.86 -\<close> ML \<open>
    4.87 -fun string_of_toks toks = fold (curry op ^) (map string_of_tok toks |> rev |> separate ", ") "";
    4.88 -string_of_toks:    Token.src -> string;
    4.89 -\<close> ML \<open>
    4.90 -fun (scan @>> f) xs = (@{print}{a = "_ >> ", toks = xs(*GIVES\<rightarrow>"?"*)}; scan xs) |>> f;
    4.91 -fun (scan @>> f) xs = (writeln ("toks= " ^ @{make_string} xs);      scan xs) |>> f;
    4.92 -op @>> : (Token.src -> 'b * Token.src) * ('b -> 'd) -> Token.src -> 'd * Token.src; (*---^^^*)
    4.93 -\<close> ML \<open>
    4.94 -op @>> : (Token.T list -> 'a * 'b) * ('a -> 'c) -> Token.T list -> 'c * 'b
    4.95 -\<close> ML \<open>
    4.96 -\<close> ML \<open> (*Scan < Token in bootstrap, thus we take Token.s_to_string as argument of >> *)
    4.97 -infix 3 @@>>;
    4.98 -\<close> ML \<open>
    4.99 -fun ((write, scan) @@>> f) xs = (write xs;                             scan xs) |>> f;
   4.100 -op @@>> : (('a -> 'b) * ('a -> 'c * 'd)) * ('c -> 'e) -> 'a -> 'e * 'd
   4.101 -\<close> ML \<open>
   4.102 -\<close>
   4.103 -
   4.104 -subsubsection \<open>TODO\<close>
   4.105 -ML \<open>
   4.106 -\<close> ML \<open>
   4.107 -\<close> ML \<open>
   4.108 -\<close> ML \<open>
   4.109 -\<close>
   4.110 -
   4.111 -section \<open>setup command_keyword + preliminary test\<close>
   4.112 -ML \<open>                                            
   4.113 -\<close> ML \<open>
   4.114 -val _ =                                                  
   4.115 -  Outer_Syntax.command \<^command_keyword>\<open>ExampleSPARK\<close> "start a Calculation from a Formalise-file"
   4.116 -    (Resources.provide_parse_file -- Parse.parname
   4.117 -      >> (Toplevel.theory o           (*vvvvvvvvvvvvvvvvvvvv--- HACK makes test independent from session Isac*)
   4.118 -        (Preliminary_OLD.load_formalisation @{theory Biegelinie} ParseC_OLD.formalisation)) );
   4.119 -\<close> ML \<open>
   4.120 -val _ =
   4.121 -  Outer_Syntax.command \<^command_keyword>\<open>Problem\<close>
   4.122 -    "start a Specification, either from scratch or from preceding 'Example'"
   4.123 -    ((@{print}, ParseC_OLD.problem_headline)
   4.124 -       @@>> (Toplevel.theory o Preliminary_OLD.init_specify));
   4.125 -\<close> ML \<open>
   4.126 -Preliminary_OLD.init_specify: ParseC_OLD.problem_headline -> theory -> theory;
   4.127 -(Toplevel.theory o Preliminary_OLD.init_specify)
   4.128 -  : ParseC_OLD.problem_headline -> Toplevel.transition -> Toplevel.transition;
   4.129 -\<close> ML \<open>
   4.130 -(**)                   Preliminary_OLD.init_specify: ParseC_OLD.problem_headline -> theory -> theory;(**)
   4.131 -(*                                                                          ^^^^^^    ^^^^^^*)
   4.132 -(**)(Toplevel.theory o Preliminary_OLD.init_specify):
   4.133 -      ParseC_OLD.problem_headline -> Toplevel.transition -> Toplevel.transition;(**)
   4.134 -(*                               ^^^^^^^^^^^^^^^^^^^    ^^^^^^^^^^^^^^^^^^^*)
   4.135 -\<close> ML \<open>
   4.136 -fun dummy (_(*":"*): string) thy =
   4.137 -  let
   4.138 -    val refs' = Refs_Data.get thy
   4.139 -    val o_model = OMod_Data.get thy
   4.140 -    val i_model = IMod_Data.get thy
   4.141 -    val _ =
   4.142 -      @{print} {a = "### dummy Specification", refs = refs', o_model = o_model, i_model = i_model}
   4.143 -  in thy end;
   4.144 -\<close> ML \<open>
   4.145 -val _ =
   4.146 -  Outer_Syntax.command \<^command_keyword>\<open>Specification\<close> "Specification dummy"
   4.147 -    ((@{print}, Parse.$$$ ":") @@>> (Toplevel.theory o dummy))
   4.148 -\<close> ML \<open>
   4.149 -fun dummy2 _ (thy: theory) = thy
   4.150 -\<close> ML \<open> (*or*)
   4.151 -fun dummy2 _ (ctxt: Proof.context) = ctxt
   4.152 -\<close> ML \<open>
   4.153 -val _ =
   4.154 -  Outer_Syntax.command \<^command_keyword>\<open>Model\<close> "Model dummy"
   4.155 -    ((@{print}, Parse.$$$ ":") @@>> (Toplevel.theory o dummy))
   4.156 -\<close> ML \<open>
   4.157 -val _ =
   4.158 -  Outer_Syntax.command \<^command_keyword>\<open>Given\<close> "input Given items to the Model of a Specification"
   4.159 -(*                                (facts    >> (Toplevel.proof o Proof.from_thmss_cmd));*)
   4.160 -    ((@{print}, facts) @@>> (Toplevel.proof o Proof.from_thmss_cmd));
   4.161 -val _ =
   4.162 -  Outer_Syntax.command \<^command_keyword>\<open>Where\<close> "input Find items to the Model of a Specification"
   4.163 -    (facts @>> (Toplevel.proof o Proof.from_thmss_cmd));
   4.164 -val _ =
   4.165 -  Outer_Syntax.command \<^command_keyword>\<open>Find\<close> "input Find items to the Model of a Specification"
   4.166 -    (facts @>> (Toplevel.proof o Proof.from_thmss_cmd));
   4.167 -val _ =
   4.168 -  Outer_Syntax.command \<^command_keyword>\<open>Relate\<close> "input Relate items to the Model of a Specification"
   4.169 -    (facts @>> (Toplevel.proof o Proof.from_thmss_cmd));
   4.170 -\<close> ML \<open>
   4.171 -(Toplevel.proof o Proof.from_thmss_cmd)
   4.172 -:
   4.173 -  (Facts.ref * Token.src list) list list -> Toplevel.transition -> Toplevel.transition
   4.174 -(*                                          ^^^^^^^^^^^^^^^^^^^    ^^^^^^^^^^^^^^^^^^^*)
   4.175 -\<close> ML \<open>
   4.176 -val _ =
   4.177 -  Outer_Syntax.command @{command_keyword References} "References dummy"
   4.178 -    (Parse.$$$ ":" >> (fn _ => Toplevel.keep (fn _ => ignore (writeln ""))));
   4.179 -val _ =
   4.180 -  Outer_Syntax.command @{command_keyword RTheory} "RTheory dummy"
   4.181 -    (Parse.$$$ ":" -- Parse.string 
   4.182 -      >> (fn _ => Toplevel.keep (fn _ => ignore (writeln ""))));
   4.183 -val _ =
   4.184 -  Outer_Syntax.command @{command_keyword RProblem} "RProblem dummy"
   4.185 -   (Parse.$$$ ":" -- (Parse.$$$ "[" |-- Parse.list1 Parse.string --| Parse.$$$ "]")
   4.186 -    >> (fn _ => Toplevel.keep (fn _ => ignore (writeln ""))));
   4.187 -val _ =
   4.188 -  Outer_Syntax.command @{command_keyword RMethod} "RMethod dummy"
   4.189 -   (Parse.$$$ ":" -- (Parse.$$$ "[" |-- Parse.list1 Parse.string --| Parse.$$$ "]")
   4.190 -    >> (fn _ => Toplevel.keep (fn _ => ignore (writeln ""))));
   4.191 -val _ =
   4.192 -  Outer_Syntax.command @{command_keyword Solution} "Solution dummy"
   4.193 -    (Parse.$$$ ":" >> (fn _ => Toplevel.keep (fn _ => ignore (writeln ""))));
   4.194 -\<close> ML \<open>
   4.195 -\<close> ML \<open>
   4.196 -\<close>
   4.197 -
   4.198 -subsection \<open>runs with test-Example\<close>
   4.199 -(**)
   4.200 -ExampleSPARK \<open>../Examples/exp_Statics_Biegel_Timischl_7-70.str\<close>
   4.201 -(**)
   4.202 -Problem ("Biegelinie", ["Biegelinien"])
   4.203 -(*1 collapse* )
   4.204 -  Specification:
   4.205 -  (*2 collapse*)
   4.206 -    Model:
   4.207 -      Given: "Traegerlaenge " "Streckenlast " (*Bad context for command "Given"\<^here> -- using reset state*)
   4.208 -      Where: "q_0 ist_integrierbar_auf {| 0, L |}" "0 < L"
   4.209 -      Find: "Biegelinie "
   4.210 -      Relate: "Randbedingungen "
   4.211 -    References:
   4.212 -      (*3 collapse*)
   4.213 -      RTheory: ""                           (*Bad context for command "RTheory"\<^here> -- using reset state*)
   4.214 -      RProblem: ["", ""]
   4.215 -      RMethod: ["", ""]
   4.216 -      (*3 collapse*)
   4.217 -  (*2 collapse*)
   4.218 -  Solution:
   4.219 -( * 1 collapse*)
   4.220 -(*
   4.221 -  TODO: compare click on above Given: "Traegerlaenge ", "Streckenlast " 
   4.222 -  with click on from \<open>0 < d\<close> in SPARK/../Greatest_Common_Divisorthy
   4.223 -*)
   4.224 -
   4.225 -end
   4.226 \ No newline at end of file
     5.1 --- a/src/Tools/isac/BridgeJEdit/SPARK_FDL.thy	Tue Aug 23 18:05:08 2022 +0200
     5.2 +++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
     5.3 @@ -1,8 +0,0 @@
     5.4 -theory SPARK_FDL
     5.5 -  imports Complex_Main
     5.6 -begin
     5.7 -
     5.8 -ML_file fdl_lexer.ML  (*replace with native Isabelle*)
     5.9 -ML_file fdl_parser.ML (*replace with native Isabelle*)
    5.10 -
    5.11 -end
    5.12 \ No newline at end of file
     6.1 --- a/src/Tools/isac/BridgeJEdit/fdl_lexer.ML	Tue Aug 23 18:05:08 2022 +0200
     6.2 +++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
     6.3 @@ -1,362 +0,0 @@
     6.4 -(*  Title:      HOL/SPARK/Tools/fdl_lexer.ML
     6.5 -    Author:     Stefan Berghofer
     6.6 -    Copyright:  secunet Security Networks AG
     6.7 -
     6.8 -Lexical analyzer for fdl files.
     6.9 -*)
    6.10 -
    6.11 -signature FDL_LEXER =
    6.12 -sig
    6.13 -  datatype kind = Keyword | Ident | Long_Ident | Traceability | Number | Comment | EOF
    6.14 -  datatype T = Token of kind * string * Position.T;
    6.15 -  type chars
    6.16 -  type banner
    6.17 -  type date
    6.18 -  type time
    6.19 -  val tokenize: (chars -> 'a * chars) -> (chars -> T * chars) ->
    6.20 -    Position.T -> string -> 'a * T list
    6.21 -  val position_of: T -> Position.T
    6.22 -  val pos_of: T -> string
    6.23 -  val is_eof: T -> bool
    6.24 -  val stopper: T Scan.stopper
    6.25 -  val kind_of: T -> kind
    6.26 -  val content_of: T -> string
    6.27 -  val unparse: T -> string
    6.28 -  val is_proper: T -> bool
    6.29 -  val is_digit: string -> bool
    6.30 -  val is_char_eof: string * 'a -> bool
    6.31 -  val c_comment: chars -> T * chars
    6.32 -  val curly_comment: chars -> T * chars
    6.33 -  val percent_comment: chars -> T * chars
    6.34 -  val vcg_header: chars -> (banner * (date * time) option) * chars
    6.35 -  val siv_header: chars ->
    6.36 -    (banner * (date * time) * (date * time) * (string * string)) * chars (*.. NOT [], but ..
    6.37 -    (^^^^^^^^^^^^^^^ header ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^)   ^^^^^ ..trailing line=13..117 *)
    6.38 -  val explode_pos: string -> Position.T -> chars
    6.39 -  val leq_token: T * T -> bool
    6.40 -  val lexicon: Scan.lexicon
    6.41 -  val identifier: chars -> chars * chars
    6.42 -  val long_identifier: chars -> chars * chars
    6.43 -  val banner: chars -> (string * string * string) * chars
    6.44 -  val date: chars -> (string * string * string) * chars;
    6.45 -  val whitespace: chars -> chars * chars
    6.46 -  val whitespace1: chars -> chars * chars
    6.47 -  val keyword: string -> chars -> chars * chars
    6.48 -  val number: chars -> chars * chars
    6.49 -  val any: chars -> (string * Position.T) * chars
    6.50 -  val any_line: chars -> string * chars;
    6.51 -  val !!! : string -> (chars -> 'a) -> chars -> 'a
    6.52 -  val $$$ : string -> chars -> chars * chars
    6.53 -  val scan: (chars -> 'a * chars) -> (chars -> T * chars) -> chars -> ('a * T list) * chars;
    6.54 -  val char_stopper: (string * Position.T) Scan.stopper
    6.55 -  val make_token: kind -> chars -> T
    6.56 -end;
    6.57 -
    6.58 -structure Fdl_Lexer(**): FDL_LEXER(**) =
    6.59 -struct
    6.60 -
    6.61 -(** tokens **)
    6.62 -
    6.63 -datatype kind = Keyword | Ident | Long_Ident | Traceability | Number | Comment | EOF;
    6.64 -
    6.65 -datatype T = Token of kind * string * Position.T;
    6.66 -(*for Formalise.T     ^^^^ NOT required?*)
    6.67 -
    6.68 -fun make_token k xs = Token (k, implode (map fst xs),
    6.69 -  case xs of [] => Position.none | (_, p) :: _ => p);
    6.70 -
    6.71 -fun kind_of (Token (k, _, _)) = k;
    6.72 -
    6.73 -fun is_proper (Token (Comment, _, _)) = false
    6.74 -  | is_proper _ = true;
    6.75 -
    6.76 -fun content_of (Token (_, s, _)) = s;
    6.77 -
    6.78 -fun unparse (Token (Traceability, s, _)) = "For " ^ s ^ ":"
    6.79 -  | unparse (Token (_, s, _)) = s;
    6.80 -
    6.81 -fun position_of (Token (_, _, pos)) = pos;
    6.82 -
    6.83 -val pos_of = Position.here o position_of;
    6.84 -
    6.85 -fun is_eof (Token (EOF, _, _)) = true
    6.86 -  | is_eof _ = false;
    6.87 -
    6.88 -fun mk_eof pos = Token (EOF, "", pos);
    6.89 -val eof = mk_eof Position.none;
    6.90 -
    6.91 -val stopper =
    6.92 -  Scan.stopper (fn [] => eof | toks => mk_eof (position_of (List.last toks))) is_eof;
    6.93 -
    6.94 -fun leq_token (Token (_, s, _), Token (_, s', _)) = size s <= size s';
    6.95 -
    6.96 -
    6.97 -(** split up a string into a list of characters (with positions) **)
    6.98 -
    6.99 -type chars = (string * Position.T) list;
   6.100 -
   6.101 -fun is_char_eof ("", _) = true
   6.102 -  | is_char_eof _ = false;
   6.103 -
   6.104 -val char_stopper = Scan.stopper (K ("", Position.none)) is_char_eof;
   6.105 -
   6.106 -fun symbol (x : string, _ : Position.T) = x;
   6.107 -
   6.108 -(* convert string s to chars ("", pos) *)
   6.109 -fun explode_pos s pos = fst (fold_map (fn x => fn pos =>
   6.110 -  ((x, pos), Position.symbol x pos)) (raw_explode s) pos);
   6.111 -
   6.112 -
   6.113 -(** scanners **)
   6.114 -
   6.115 -val any = Scan.one (not o Scan.is_stopper char_stopper);
   6.116 -
   6.117 -fun prfx [] = Scan.succeed []
   6.118 -  | prfx (x :: xs) = Scan.one (equal x o symbol) ::: prfx xs;
   6.119 -
   6.120 -val $$$ = prfx o raw_explode;
   6.121 -
   6.122 -val lexicon = Scan.make_lexicon (map raw_explode
   6.123 -  ["rule_family",
   6.124 -   "For",
   6.125 -   ":",
   6.126 -   "[",
   6.127 -   "]",
   6.128 -   "(",
   6.129 -   ")",
   6.130 -   ",",
   6.131 -   "&",
   6.132 -   ";",
   6.133 -   "=",
   6.134 -   ".",
   6.135 -   "..",
   6.136 -   "requires",
   6.137 -   "may_be_replaced_by",
   6.138 -   "may_be_deduced",
   6.139 -   "may_be_deduced_from",
   6.140 -   "are_interchangeable",
   6.141 -   "if",
   6.142 -   "end",
   6.143 -   "function",
   6.144 -   "procedure",
   6.145 -   "type",
   6.146 -   "var",
   6.147 -   "const",
   6.148 -   "array",
   6.149 -   "record",
   6.150 -   ":=",
   6.151 -   "of",
   6.152 -   "**",
   6.153 -   "*",
   6.154 -   "/",
   6.155 -   "div",
   6.156 -   "mod",
   6.157 -   "+",
   6.158 -   "-",
   6.159 -   "<>",
   6.160 -   "<",
   6.161 -   ">",
   6.162 -   "<=",
   6.163 -   ">=",
   6.164 -   "<->",
   6.165 -   "->",
   6.166 -   "not",
   6.167 -   "and",
   6.168 -   "or",
   6.169 -   "for_some",
   6.170 -   "for_all",
   6.171 -   "***",
   6.172 -   "!!!",
   6.173 -   "element",
   6.174 -   "update",
   6.175 -   "pending"]);
   6.176 -
   6.177 -fun keyword s = Scan.literal lexicon :|--
   6.178 -  (fn xs => if map symbol xs = raw_explode s then Scan.succeed xs else Scan.fail);
   6.179 -
   6.180 -fun is_digit x = "0" <= x andalso x <= "9";
   6.181 -fun is_alpha x = "a" <= x andalso x <= "z" orelse "A" <= x andalso x <= "Z";
   6.182 -val is_underscore = equal "_";
   6.183 -val is_tilde = equal "~";
   6.184 -val is_newline = equal "\n";
   6.185 -val is_tab = equal "\t";
   6.186 -val is_space = equal " ";
   6.187 -val is_whitespace = is_space orf is_tab orf is_newline;
   6.188 -val is_whitespace' = is_space orf is_tab;
   6.189 -
   6.190 -val number = Scan.many1 (is_digit o symbol);
   6.191 -
   6.192 -val identifier =
   6.193 -  Scan.one (is_alpha o symbol) :::
   6.194 -  Scan.many
   6.195 -    ((is_alpha orf is_digit orf is_underscore) o symbol) @@@
   6.196 -   Scan.optional (Scan.one (is_tilde o symbol) >> single) [];
   6.197 -
   6.198 -val long_identifier =
   6.199 -  identifier @@@ Scan.repeats1 ($$$ "." @@@ identifier);
   6.200 -
   6.201 -val whitespace = Scan.many (is_whitespace o symbol);
   6.202 -val whitespace1 = Scan.many1 (is_whitespace o symbol);
   6.203 -val whitespace' = Scan.many (is_whitespace' o symbol);
   6.204 -val newline = Scan.one (is_newline o symbol);
   6.205 -
   6.206 -fun beginning n cs =
   6.207 -  let
   6.208 -    val drop_blanks = drop_suffix is_whitespace;
   6.209 -    val all_cs = drop_blanks cs;
   6.210 -    val dots = if length all_cs > n then " ..." else "";
   6.211 -  in
   6.212 -    (drop_blanks (take n all_cs)
   6.213 -      |> map (fn c => if is_whitespace c then " " else c)
   6.214 -      |> implode) ^ dots
   6.215 -  end;
   6.216 -
   6.217 -fun !!! text scan =
   6.218 -  let
   6.219 -    fun get_pos [] = " (end-of-input)"
   6.220 -      | get_pos ((_, pos) :: _) = Position.here pos;
   6.221 -
   6.222 -    fun err (syms, msg) = fn () =>
   6.223 -      text ^ get_pos syms ^ " at " ^ beginning 10 (map symbol syms) ^
   6.224 -        (case msg of NONE => "" | SOME m => "\n" ^ m ());
   6.225 -  in Scan.!! err scan end;
   6.226 -
   6.227 -val any_line' =
   6.228 -  Scan.many (not o (Scan.is_stopper char_stopper orf (is_newline o symbol)));
   6.229 -
   6.230 -val any_line = whitespace' |-- any_line' --|
   6.231 -  newline >> (implode o map symbol);
   6.232 -
   6.233 -fun gen_comment a b = $$$ a |-- !!! "unclosed comment"
   6.234 -  (Scan.repeat (Scan.unless ($$$ b) any) --| $$$ b) >> make_token Comment;
   6.235 -
   6.236 -val c_comment = gen_comment "/*" "*/";
   6.237 -fun c_comment chars = 
   6.238 -  let
   6.239 -(** )val _ = @{print} {a = "//--- ###I Fdl_Lexer.c_comment called with", 
   6.240 -      b_lenght_chars = length chars, chars = chars};( **)
   6.241 -    val xxx as (redex, toks) = (gen_comment "/*" "*/") chars
   6.242 -(** )val _ = @{print} {z = "\\--- ###I Fdl_Lexer.c_comment \<rightarrow>",
   6.243 -      b_lenght_chars = length toks, res = xxx};( **)
   6.244 -  in xxx end;
   6.245 -val curly_comment = gen_comment "{" "}";
   6.246 -
   6.247 -val percent_comment = $$$ "%" |-- any_line' >> make_token Comment;
   6.248 -
   6.249 -fun repeatn 0 _ = Scan.succeed []
   6.250 -  | repeatn n p = Scan.one p ::: repeatn (n-1) p;
   6.251 -
   6.252 -
   6.253 -(** header of *.vcg and *.siv files (see simplifier/load_provenance.pro) **)
   6.254 -
   6.255 -type banner = string * string * string;
   6.256 -type date = string * string * string;
   6.257 -type time = string * string * string * string option;
   6.258 -
   6.259 -val asterisks = Scan.repeat1 (Scan.one (equal "*" o symbol));
   6.260 -
   6.261 -fun alphan n = repeatn n (is_alpha o symbol) >> (implode o map symbol);
   6.262 -fun digitn n = repeatn n (is_digit o symbol) >> (implode o map symbol);
   6.263 -
   6.264 -val time =
   6.265 -  digitn 2 --| $$$ ":" -- digitn 2 --| $$$ ":" -- digitn 2 --
   6.266 -  Scan.option ($$$ "." |-- digitn 2) >>
   6.267 -    (fn (((hr, mi), s), ms) => (hr, mi, s, ms));
   6.268 -
   6.269 -val date =
   6.270 -  digitn 2 --| $$$ "-" -- alphan 3 --| $$$ "-" -- digitn 4 >>
   6.271 -    (fn ((d, m), y) => (d, m, y));
   6.272 -
   6.273 -val banner = 
   6.274 -  whitespace' |-- asterisks --| whitespace' --| newline :|-- (fn xs =>
   6.275 -    (any_line -- any_line -- any_line >>
   6.276 -(*WN:^^^^^^^^    ^^^^^^^^    ^^^^^^^^   3 lines ending with \n *)
   6.277 -       (fn ((l1, l2), l3) => (l1, l2, l3))) --|
   6.278 -    whitespace' --| prfx (map symbol xs) --| whitespace' --| newline);
   6.279 -
   6.280 -val vcg_header = banner -- Scan.option (whitespace |--
   6.281 -  $$$ "DATE :" |-- whitespace |-- date --| whitespace --|
   6.282 -  Scan.option ($$$ "TIME :" -- whitespace) -- time);
   6.283 -
   6.284 -val siv_header = banner --| whitespace --
   6.285 -  ($$$ "CREATED" |-- whitespace |-- (date --| $$$ "," --| whitespace -- time)) --|
   6.286 -  whitespace --
   6.287 -  ($$$ "SIMPLIFIED" |-- whitespace |-- (date --| $$$ "," --| whitespace -- time)) --|
   6.288 -  newline --| newline -- (any_line -- any_line) >>
   6.289 -    (fn (((b, c), s), ls) => (b, c, s, ls));
   6.290 -
   6.291 -(*WN: print ---vvvvv*)
   6.292 -fun siv_header chars =
   6.293 -  let
   6.294 -(** )val _ = @{print} {a = "//--- ###I Fdl_Lexer.siv_header called with", 
   6.295 -      b_lenght_chars = length chars, chars = chars};( **)
   6.296 -    val xxx as (redex, toks) = 
   6.297 -      (
   6.298 -        banner --| whitespace --
   6.299 -        ($$$ "CREATED" |-- whitespace |-- (date --| $$$ "," --| whitespace -- time)) --|
   6.300 -        whitespace --
   6.301 -        ($$$ "SIMPLIFIED" |-- whitespace |-- (date --| $$$ "," --| whitespace -- time)) --|
   6.302 -        newline --| newline -- (any_line -- any_line) >>
   6.303 -          (fn (((b, c), s), ls) => (b, c, s, ls))
   6.304 -      ) chars;
   6.305 -(** )val _ = @{print} {z = "\\--- ###I Fdl_Lexer.siv_header \<rightarrow>",
   6.306 -      b_lenght_chars = length toks, res = xxx};( **)
   6.307 -  in xxx end;
   6.308 -
   6.309 -(** the main tokenizer **)
   6.310 -
   6.311 -fun scan header comment =
   6.312 -  !!! "bad header" header --| whitespace --
   6.313 -  Scan.repeat (Scan.unless (Scan.one is_char_eof)
   6.314 -    (!!! "bad input"
   6.315 -       (   comment
   6.316 -        || (keyword "For" -- whitespace1) |--
   6.317 -              Scan.repeat1 (Scan.unless (keyword ":") any) --|
   6.318 -              keyword ":" >> make_token Traceability
   6.319 -        || Scan.max leq_token
   6.320 -             (Scan.literal lexicon >> make_token Keyword)
   6.321 -             (   long_identifier >> make_token Long_Ident
   6.322 -              || identifier >> make_token Ident)
   6.323 -        || number >> make_token Number) --|
   6.324 -     whitespace) );
   6.325 -fun scan header comment chars =
   6.326 -  let
   6.327 -    (** )val _ = @{print} {a = "//--- ###I Fdl_Lexer.scan called with", 
   6.328 -      b_lenght_chars = length chars, chars = chars};( **)
   6.329 -    val xxx as (redex, toks) = 
   6.330 -      (!!! "bad header" header --| whitespace --
   6.331 -      Scan.repeat (Scan.unless (Scan.one is_char_eof)
   6.332 -        (!!! "bad input"
   6.333 -           (   comment
   6.334 -            || (keyword "For" -- whitespace1) |--
   6.335 -                  Scan.repeat1 (Scan.unless (keyword ":") any) --|
   6.336 -                  keyword ":" >> make_token Traceability             
   6.337 -            || Scan.max leq_token
   6.338 -                 (Scan.literal lexicon >> make_token Keyword)
   6.339 -                 (   long_identifier >> make_token Long_Ident
   6.340 -                  || identifier >> make_token Ident)
   6.341 -            || number >> make_token Number) --|
   6.342 -         whitespace))) chars
   6.343 -    (** )val _ = @{print} {z = "\\--- ###I Fdl_Lexer.scan \<rightarrow>",
   6.344 -      b_lenght_chars = length toks, res = xxx};( **)
   6.345 -  in xxx end;
   6.346 -
   6.347 -fun tokenize header comment pos s =
   6.348 -  fst (Scan.finite char_stopper
   6.349 -    (Scan.error (scan header comment)) (explode_pos s pos));
   6.350 -
   6.351 -fun tokenize header(*= Fdl_Lexer.siv_header*) comment pos s =
   6.352 -((** )@{print} {a = "//--- ###I Fdl_Lexer.tokenize called with", header = header,
   6.353 -    comment = comment, pos = pos, s = s};( **)
   6.354 -  let
   6.355 -(** )val _ = @{print} {a = "###I Fdl_Lexer.tokenize: explode_pos", res = explode_pos s pos};( **)
   6.356 -(*convert string s to chars ("", pos) is ---vvvvvvvvvvv                  ^^^^^^^^^^^*)
   6.357 -    val xxx as (redex, toks) =
   6.358 -      fst (Scan.finite char_stopper
   6.359 -        (Scan.error (scan header comment)) (explode_pos s pos))
   6.360 -(** )val _ = @{print} {a = "\\--- ###I Fdl_Lexer.tokenize \<rightarrow>",
   6.361 -      b_lenght_chars = length toks, res = xxx};( **)
   6.362 -  in xxx end
   6.363 -);
   6.364 -
   6.365 -end;
     7.1 --- a/src/Tools/isac/BridgeJEdit/fdl_parser.ML	Tue Aug 23 18:05:08 2022 +0200
     7.2 +++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
     7.3 @@ -1,415 +0,0 @@
     7.4 -(*  Title:      HOL/SPARK/Tools/fdl_parser.ML
     7.5 -    Author:     Stefan Berghofer
     7.6 -    Copyright:  secunet Security Networks AG
     7.7 -
     7.8 -Parser for fdl files.
     7.9 -*)
    7.10 -
    7.11 -signature FDL_PARSER =
    7.12 -sig
    7.13 -  datatype expr =
    7.14 -      Ident of string
    7.15 -    | Number of int
    7.16 -    | Quantifier of string * string list * string * expr
    7.17 -    | Funct of string * expr list
    7.18 -    | Element of expr * expr list
    7.19 -    | Update of expr * expr list * expr
    7.20 -    | Record of string * (string * expr) list
    7.21 -    | Array of string * expr option *
    7.22 -        ((expr * expr option) list list * expr) list;
    7.23 -
    7.24 -  datatype fdl_type =
    7.25 -      Basic_Type of string
    7.26 -    | Enum_Type of string list
    7.27 -    | Array_Type of string list * string
    7.28 -    | Record_Type of (string list * string) list
    7.29 -    | Pending_Type;
    7.30 -
    7.31 -  datatype fdl_rule =
    7.32 -      Inference_Rule of expr list * expr
    7.33 -    | Substitution_Rule of expr list * expr * expr;
    7.34 -
    7.35 -  type rules =
    7.36 -    ((string * int) * fdl_rule) list *
    7.37 -    (string * (expr * (string * string) list) list) list;
    7.38 -  val empty_rules: rules
    7.39 -
    7.40 -  type vcs = (string * (string *
    7.41 -    ((string * expr) list * (string * expr) list)) list) list;
    7.42 -  val empty_vcs: (string * string) * vcs
    7.43 -
    7.44 -  type 'a tab = 'a Symtab.table * (string * 'a) list;
    7.45 -
    7.46 -  val lookup: 'a tab -> string -> 'a option;
    7.47 -  val update: string * 'a -> 'a tab -> 'a tab;
    7.48 -  val items: 'a tab -> (string * 'a) list;
    7.49 -
    7.50 -  type decls =
    7.51 -    {types: fdl_type tab,
    7.52 -     vars: string tab,
    7.53 -     consts: string tab,
    7.54 -     funs: (string list * string) tab};
    7.55 -  val empty_decls: decls
    7.56 -
    7.57 -  val parse_vcs: (Fdl_Lexer.chars -> 'a * Fdl_Lexer.chars) -> Position.T ->
    7.58 -    string -> 'a * ((string * string) * vcs);
    7.59 -
    7.60 -  val parse_declarations:  Position.T -> string -> (string * string) * decls;
    7.61 -
    7.62 -  val parse_rules: Position.T -> string -> rules;
    7.63 -  val vcs: Fdl_Lexer.T list ->
    7.64 -    ((string * string) * (string * (string * ((string * expr) list * (string * expr) list)) list) list) * Fdl_Lexer.T list;
    7.65 -  val !!! : (Fdl_Lexer.T list -> 'a) -> Fdl_Lexer.T list -> 'a;
    7.66 -  val $$$ : string -> Fdl_Lexer.T list -> string * Fdl_Lexer.T list
    7.67 -  val group: string -> ('a -> 'b) -> 'a -> 'b
    7.68 -  val identifier: Fdl_Lexer.T list -> string * Fdl_Lexer.T list
    7.69 -  val enum1: string -> (Fdl_Lexer.T list -> 'a * Fdl_Lexer.T list) -> Fdl_Lexer.T list ->
    7.70 -    'a list * Fdl_Lexer.T list
    7.71 -  val list1: (Fdl_Lexer.T list -> 'a * Fdl_Lexer.T list) -> Fdl_Lexer.T list ->
    7.72 -    'a list * Fdl_Lexer.T list
    7.73 -end;
    7.74 -
    7.75 -structure Fdl_Parser(**): FDL_PARSER(**) =
    7.76 -struct
    7.77 -
    7.78 -(** error handling **)
    7.79 -
    7.80 -fun beginning n cs =
    7.81 -  let val dots = if length cs > n then " ..." else "";
    7.82 -  in
    7.83 -    space_implode " " (take n cs) ^ dots
    7.84 -  end;
    7.85 -
    7.86 -fun !!! scan =
    7.87 -  let
    7.88 -    fun get_pos [] = " (end-of-input)"
    7.89 -      | get_pos (tok :: _) = Fdl_Lexer.pos_of tok;
    7.90 -
    7.91 -    fun err (syms, msg) = fn () =>
    7.92 -      "Syntax error" ^ get_pos syms ^ " at " ^
    7.93 -        beginning 10 (map Fdl_Lexer.unparse syms) ^
    7.94 -        (case msg of NONE => "" | SOME m => "\n" ^ m () ^ " expected");
    7.95 -  in Scan.!! err scan end;
    7.96 -
    7.97 -fun parse_all p =
    7.98 -  Scan.repeat (Scan.unless (Scan.one Fdl_Lexer.is_eof) (!!! p));
    7.99 -
   7.100 -
   7.101 -(** parsers **)
   7.102 -
   7.103 -fun group s p = p || Scan.fail_with (K (fn () => s));
   7.104 -
   7.105 -fun $$$ s = group s
   7.106 -  (Scan.one (fn t => Fdl_Lexer.kind_of t = Fdl_Lexer.Keyword andalso
   7.107 -     Fdl_Lexer.content_of t = s) >> K s);
   7.108 -
   7.109 -val identifier = group "identifier"
   7.110 -  (Scan.one (fn t => Fdl_Lexer.kind_of t = Fdl_Lexer.Ident) >>
   7.111 -     Fdl_Lexer.content_of);
   7.112 -
   7.113 -val long_identifier = group "long identifier"
   7.114 -  (Scan.one (fn t => Fdl_Lexer.kind_of t = Fdl_Lexer.Long_Ident) >>
   7.115 -     Fdl_Lexer.content_of);
   7.116 -
   7.117 -fun the_identifier s = group s
   7.118 -  (Scan.one (fn t => Fdl_Lexer.kind_of t = Fdl_Lexer.Ident andalso
   7.119 -     Fdl_Lexer.content_of t = s) >> K s);
   7.120 -
   7.121 -fun prfx_identifier g s = group g
   7.122 -  (Scan.one (fn t => Fdl_Lexer.kind_of t = Fdl_Lexer.Ident andalso
   7.123 -     can (unprefix s) (Fdl_Lexer.content_of t)) >>
   7.124 -     (unprefix s o Fdl_Lexer.content_of));
   7.125 -
   7.126 -val mk_identifier = prfx_identifier "identifier \"mk__...\"" "mk__";
   7.127 -val hyp_identifier = prfx_identifier "hypothesis identifer" "H";
   7.128 -val concl_identifier = prfx_identifier "conclusion identifier" "C";
   7.129 -
   7.130 -val number = group "number"
   7.131 -  (Scan.one (fn t => Fdl_Lexer.kind_of t = Fdl_Lexer.Number) >>
   7.132 -     (the o Int.fromString o Fdl_Lexer.content_of));
   7.133 -
   7.134 -val traceability = group "traceability information"
   7.135 -  (Scan.one (fn t => Fdl_Lexer.kind_of t = Fdl_Lexer.Traceability) >>
   7.136 -     Fdl_Lexer.content_of);
   7.137 -
   7.138 -fun enum1 sep scan = scan ::: Scan.repeat ($$$ sep |-- !!! scan);
   7.139 -fun enum sep scan = enum1 sep scan || Scan.succeed [];
   7.140 -
   7.141 -fun list1 scan = enum1 "," scan;
   7.142 -fun list scan = enum "," scan;
   7.143 -
   7.144 -
   7.145 -(* expressions, see section 4.4 of SPARK Proof Manual *)
   7.146 -
   7.147 -datatype expr =
   7.148 -    Ident of string
   7.149 -  | Number of int
   7.150 -  | Quantifier of string * string list * string * expr
   7.151 -  | Funct of string * expr list
   7.152 -  | Element of expr * expr list
   7.153 -  | Update of expr * expr list * expr
   7.154 -  | Record of string * (string * expr) list
   7.155 -  | Array of string * expr option *
   7.156 -      ((expr * expr option) list list * expr) list;
   7.157 -
   7.158 -fun unop (f, x) = Funct (f, [x]);
   7.159 -
   7.160 -fun binop p q = p :|-- (fn x => Scan.optional
   7.161 -  (q -- !!! p >> (fn (f, y) => Funct (f, [x, y]))) x);
   7.162 -
   7.163 -(* left-associative *)
   7.164 -fun binops opp argp =
   7.165 -  argp -- Scan.repeat (opp -- !!! argp) >> (fn (x, fys) =>
   7.166 -    fold (fn (f, y) => fn x => Funct (f, [x, y])) fys x);
   7.167 -
   7.168 -(* right-associative *)
   7.169 -fun binops' f p = enum1 f p >> foldr1 (fn (x, y) => Funct (f, [x, y]));
   7.170 -
   7.171 -val multiplying_operator = $$$ "*" || $$$ "/" || $$$ "div" || $$$ "mod";
   7.172 -
   7.173 -val adding_operator = $$$ "+" || $$$ "-";
   7.174 -
   7.175 -val relational_operator =
   7.176 -     $$$ "=" || $$$ "<>"
   7.177 -  || $$$ "<" || $$$ ">"
   7.178 -  || $$$ "<="|| $$$ ">=";
   7.179 -
   7.180 -val quantification_kind = $$$ "for_all" || $$$ "for_some";
   7.181 -
   7.182 -val quantification_generator =
   7.183 -  list1 identifier --| $$$ ":" -- identifier;
   7.184 -
   7.185 -fun opt_parens p = $$$ "(" |-- p --| $$$ ")" || p;
   7.186 -
   7.187 -fun expression xs = group "expression"
   7.188 -  (binop disjunction ($$$ "->" || $$$ "<->")) xs
   7.189 -
   7.190 -and disjunction xs = binops' "or" conjunction xs
   7.191 -
   7.192 -and conjunction xs = binops' "and" negation xs
   7.193 -
   7.194 -and negation xs =
   7.195 -  (   $$$ "not" -- !!! relation >> unop
   7.196 -   || relation) xs
   7.197 -
   7.198 -and relation xs = binop sum relational_operator xs
   7.199 -
   7.200 -and sum xs = binops adding_operator term xs
   7.201 -
   7.202 -and term xs = binops multiplying_operator factor xs
   7.203 -
   7.204 -and factor xs =
   7.205 -  (   $$$ "+" |-- !!! primary
   7.206 -   || $$$ "-" -- !!! primary >> unop
   7.207 -   || binop primary ($$$ "**")) xs
   7.208 -
   7.209 -and primary xs = group "primary"
   7.210 -  (   number >> Number
   7.211 -   || $$$ "(" |-- !!! (expression --| $$$ ")")
   7.212 -   || quantified_expression
   7.213 -   || function_designator
   7.214 -   || identifier >> Ident) xs
   7.215 -
   7.216 -and quantified_expression xs = (quantification_kind --
   7.217 -  !!! ($$$ "(" |-- quantification_generator --|  $$$ "," --
   7.218 -    expression --| $$$ ")") >> (fn (q, ((xs, T), e)) =>
   7.219 -      Quantifier (q, xs, T, e))) xs
   7.220 -
   7.221 -and function_designator xs =
   7.222 -  (   mk_identifier --| $$$ "(" :|--
   7.223 -        (fn s => record_args s || array_args s) --| $$$ ")"
   7.224 -   || $$$ "element" |-- !!! ($$$ "(" |-- expression --| $$$ "," --| $$$ "[" --
   7.225 -        list1 expression --| $$$ "]" --| $$$ ")") >> Element
   7.226 -   || $$$ "update" |-- !!! ($$$ "(" |-- expression --| $$$ "," --| $$$ "[" --
   7.227 -        list1 expression --| $$$ "]" --| $$$ "," -- expression --| $$$ ")") >>
   7.228 -          (fn ((A, xs), x) => Update (A, xs, x))
   7.229 -   || identifier --| $$$ "(" -- !!! (list1 expression --| $$$ ")") >> Funct) xs
   7.230 -
   7.231 -and record_args s xs =
   7.232 -  (list1 (identifier --| $$$ ":=" -- !!! expression) >> (pair s #> Record)) xs
   7.233 -
   7.234 -and array_args s xs =
   7.235 -  (   array_associations >> (fn assocs => Array (s, NONE, assocs))
   7.236 -   || expression -- Scan.optional ($$$ "," |-- !!! array_associations) [] >>
   7.237 -        (fn (default, assocs) => Array (s, SOME default, assocs))) xs
   7.238 -
   7.239 -and array_associations xs =
   7.240 -  (list1 (opt_parens (enum1 "&" ($$$ "[" |--
   7.241 -     !!! (list1 (expression -- Scan.option ($$$ ".." |-- !!! expression)) --|
   7.242 -       $$$ "]"))) --| $$$ ":=" -- expression)) xs;
   7.243 -
   7.244 -
   7.245 -(* verification conditions *)
   7.246 -
   7.247 -type vcs = (string * (string *
   7.248 -  ((string * expr) list * (string * expr) list)) list) list;
   7.249 -
   7.250 -val vc =
   7.251 -  identifier --| $$$ "." -- !!!
   7.252 -    (   $$$ "***" |-- !!! (the_identifier "true" --| $$$ ".") >>
   7.253 -          (Ident #> pair "1" #> single #> pair [])
   7.254 -     || $$$ "!!!" |-- !!! (the_identifier "false" --| $$$ ".") >>
   7.255 -          (Ident #> pair "1" #> single #> pair [])
   7.256 -     || Scan.repeat1 (hyp_identifier --
   7.257 -          !!! ($$$ ":" |-- expression --| $$$ ".")) --| $$$ "->" --
   7.258 -        Scan.repeat1 (concl_identifier --
   7.259 -          !!! ($$$ ":" |-- expression --| $$$ ".")));
   7.260 -
   7.261 -val subprogram_kind = $$$ "function" || $$$ "procedure";
   7.262 -
   7.263 -val vcs =
   7.264 -  subprogram_kind -- (long_identifier || identifier) --
   7.265 -  parse_all (traceability -- !!! (Scan.repeat1 vc));
   7.266 -val empty_vcs = (("e_procedure", "e_G_C_D"), []: vcs)
   7.267 -
   7.268 -fun parse_vcs header pos s =
   7.269 -(*line_1*)s |>
   7.270 -(*line_2*)Fdl_Lexer.tokenize header Fdl_Lexer.c_comment pos ||>
   7.271 -(*line_3*)filter Fdl_Lexer.is_proper ||>
   7.272 -(*line_4*)Scan.finite Fdl_Lexer.stopper (Scan.error (!!! vcs)) ||>
   7.273 -(*line_5*)fst;
   7.274 -(*            vvvvvv-------- = Fdl_Lexer.siv_header*)
   7.275 -fun parse_vcs header pos s =
   7.276 -((** )@{print} {a = "//--- ###I Fdl_Parser.parse_vcs", header = header, pos = pos, s = s};( **)
   7.277 -  let
   7.278 -    val line_2 = Fdl_Lexer.tokenize header Fdl_Lexer.c_comment pos s
   7.279 -(** )val _ = @{print} {a = "###I Fdl_Parser.parse_vcs: ", line_2 = line_2};( **)
   7.280 -    val line_3 = apsnd (filter Fdl_Lexer.is_proper) line_2
   7.281 -    val line_4 = apsnd (Scan.finite Fdl_Lexer.stopper (Scan.error (!!! vcs))) line_3
   7.282 -    val line_5 = apsnd fst line_4
   7.283 -(** )val _ = @{print} {z = "\\--- ###I Fdl_Parser.parse_vcs \<rightarrow>", res = line_5};( **)
   7.284 -  in line_5 end
   7.285 -);
   7.286 -
   7.287 -
   7.288 -(* fdl declarations, see section 4.3 of SPARK Proof Manual *)
   7.289 -
   7.290 -datatype fdl_type =
   7.291 -    Basic_Type of string
   7.292 -  | Enum_Type of string list
   7.293 -  | Array_Type of string list * string
   7.294 -  | Record_Type of (string list * string) list
   7.295 -  | Pending_Type;
   7.296 -
   7.297 -(* also store items in a list to preserve order *)
   7.298 -type 'a tab = 'a Symtab.table * (string * 'a) list;
   7.299 -
   7.300 -fun lookup ((tab, _) : 'a tab) = Symtab.lookup tab;
   7.301 -fun update decl (tab, items) = (Symtab.update_new decl tab, decl :: items);
   7.302 -fun items ((_, items) : 'a tab) = rev items;
   7.303 -
   7.304 -type decls =
   7.305 -  {types: fdl_type tab,
   7.306 -   vars: string tab,
   7.307 -   consts: string tab,
   7.308 -   funs: (string list * string) tab};
   7.309 -
   7.310 -val empty_decls : decls =
   7.311 -  {types = (Symtab.empty, []), vars = (Symtab.empty, []),
   7.312 -   consts = (Symtab.empty, []), funs = (Symtab.empty, [])};
   7.313 -
   7.314 -fun add_type_decl decl {types, vars, consts, funs} =
   7.315 -  {types = update decl types,
   7.316 -   vars = vars, consts = consts, funs = funs}
   7.317 -  handle Symtab.DUP s => error ("Duplicate type " ^ s);
   7.318 -
   7.319 -fun add_var_decl (vs, ty) {types, vars, consts, funs} =
   7.320 -  {types = types,
   7.321 -   vars = fold (update o rpair ty) vs vars,
   7.322 -   consts = consts, funs = funs}
   7.323 -  handle Symtab.DUP s => error ("Duplicate variable " ^ s);
   7.324 -
   7.325 -fun add_const_decl decl {types, vars, consts, funs} =
   7.326 -  {types = types, vars = vars,
   7.327 -   consts = update decl consts,
   7.328 -   funs = funs}
   7.329 -  handle Symtab.DUP s => error ("Duplicate constant " ^ s);
   7.330 -
   7.331 -fun add_fun_decl decl {types, vars, consts, funs} =
   7.332 -  {types = types, vars = vars, consts = consts,
   7.333 -   funs = update decl funs}
   7.334 -  handle Symtab.DUP s => error ("Duplicate function " ^ s);
   7.335 -
   7.336 -fun type_decl x = ($$$ "type" |-- !!! (identifier --| $$$ "=" --
   7.337 -  (   identifier >> Basic_Type
   7.338 -   || $$$ "(" |-- !!! (list1 identifier --| $$$ ")") >> Enum_Type
   7.339 -   || $$$ "array" |-- !!! ($$$ "[" |-- list1 identifier --| $$$ "]" --|
   7.340 -        $$$ "of" -- identifier) >> Array_Type
   7.341 -   || $$$ "record" |-- !!! (enum1 ";"
   7.342 -        (list1 identifier -- !!! ($$$ ":" |-- identifier)) --|
   7.343 -           $$$ "end") >> Record_Type
   7.344 -   || $$$ "pending" >> K Pending_Type)) >> add_type_decl) x;
   7.345 -
   7.346 -fun const_decl x = ($$$ "const" |-- !!! (identifier --| $$$ ":" -- identifier --|
   7.347 -  $$$ "=" --| $$$ "pending") >> add_const_decl) x;
   7.348 -
   7.349 -fun var_decl x = ($$$ "var" |-- !!! (list1 identifier --| $$$ ":" -- identifier) >>
   7.350 -  add_var_decl) x;
   7.351 -
   7.352 -fun fun_decl x = ($$$ "function" |-- !!! (identifier --
   7.353 -  (Scan.optional ($$$ "(" |-- !!! (list1 identifier --| $$$ ")")) [] --|
   7.354 -   $$$ ":" -- identifier)) >> add_fun_decl) x;
   7.355 -
   7.356 -fun declarations x =
   7.357 - (the_identifier "title" |-- subprogram_kind -- identifier --| $$$ ";" --
   7.358 -  (Scan.repeat ((type_decl || const_decl || var_decl || fun_decl) --|
   7.359 -     !!! ($$$ ";")) >> (fn ds => fold I ds empty_decls)) --|
   7.360 -  $$$ "end" --| $$$ ";") x;
   7.361 -
   7.362 -fun parse_declarations pos s =
   7.363 -  s |>
   7.364 -  Fdl_Lexer.tokenize (Scan.succeed ()) Fdl_Lexer.curly_comment pos |>
   7.365 -  snd |> filter Fdl_Lexer.is_proper |>
   7.366 -  Scan.finite Fdl_Lexer.stopper (Scan.error (!!! declarations)) |>
   7.367 -  fst;
   7.368 -
   7.369 -
   7.370 -(* rules, see section 5 of SPADE Proof Checker Rules Manual *)
   7.371 -
   7.372 -datatype fdl_rule =
   7.373 -    Inference_Rule of expr list * expr
   7.374 -  | Substitution_Rule of expr list * expr * expr;
   7.375 -
   7.376 -type rules =
   7.377 -  ((string * int) * fdl_rule) list *
   7.378 -  (string * (expr * (string * string) list) list) list;
   7.379 -val empty_rules =
   7.380 -  ([]: ((string * int) * fdl_rule) list,
   7.381 -   []: (string * (expr * (string * string) list) list) list);
   7.382 -
   7.383 -val condition_list = $$$ "[" |-- list expression --| $$$ "]";
   7.384 -val if_condition_list = $$$ "if" |-- !!! condition_list;
   7.385 -
   7.386 -val rule =
   7.387 -  identifier -- !!! ($$$ "(" |-- number --| $$$ ")" --| $$$ ":" --
   7.388 -  (expression :|-- (fn e =>
   7.389 -        $$$ "may_be_deduced" >> K (Inference_Rule ([], e))
   7.390 -     || $$$ "may_be_deduced_from" |--
   7.391 -          !!! condition_list >> (Inference_Rule o rpair e)
   7.392 -     || $$$ "may_be_replaced_by" |-- !!! (expression --
   7.393 -          Scan.optional if_condition_list []) >> (fn (e', cs) =>
   7.394 -            Substitution_Rule (cs, e, e'))
   7.395 -     || $$$ "&" |-- !!! (expression --| $$$ "are_interchangeable" --
   7.396 -          Scan.optional if_condition_list []) >> (fn (e', cs) =>
   7.397 -            Substitution_Rule (cs, e, e')))) --| $$$ ".") >>
   7.398 -    (fn (id, (n, rl)) => ((id, n), rl));
   7.399 -
   7.400 -val rule_family = 
   7.401 -  $$$ "rule_family" |-- identifier --| $$$ ":" --
   7.402 -  enum1 "&" (expression -- !!! ($$$ "requires" |-- $$$ "[" |--
   7.403 -    list (identifier -- !!! ($$$ ":" |-- identifier)) --| $$$ "]")) --|
   7.404 -  $$$ ".";
   7.405 -
   7.406 -val rules =
   7.407 -  parse_all (rule >> (apfst o cons) || rule_family >> (apsnd o cons)) >>
   7.408 -  (fn rls => fold_rev I rls ([], []));
   7.409 -
   7.410 -fun parse_rules pos s =
   7.411 -  s |>
   7.412 -  Fdl_Lexer.tokenize (Scan.succeed ())
   7.413 -    (Fdl_Lexer.c_comment || Fdl_Lexer.percent_comment) pos |>
   7.414 -  snd |> filter Fdl_Lexer.is_proper |>
   7.415 -  Scan.finite Fdl_Lexer.stopper (Scan.error (!!! rules)) |>
   7.416 -  fst;
   7.417 -
   7.418 -end;
     8.1 --- a/src/Tools/isac/BridgeJEdit/parseC-OLD.sml	Tue Aug 23 18:05:08 2022 +0200
     8.2 +++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
     8.3 @@ -1,387 +0,0 @@
     8.4 -(*  Title:      Tools/isac/BridgeJEdit/parseC-OLD.sml
     8.5 -    Author:     Walther Neuper, JKU Linz
     8.6 -    (c) due to copyright terms
     8.7 -
     8.8 -Outer token syntax for Isabelle/Isac via .
     8.9 -*)
    8.10 -
    8.11 -signature PARSE_C_OLD =
    8.12 -sig
    8.13 -  type problem
    8.14 -  val problem: problem parser
    8.15 -  type problem_headline
    8.16 -  val problem_headline: problem_headline parser
    8.17 -
    8.18 -(* exclude "Problem" from parsing * )
    8.19 -  val read_out_headline: headline_string * Token.src -> ThyC.id * Problem.id
    8.20 -( * exclude "Problem" from parsing *)
    8.21 -  val read_out_headline: problem_headline -> ThyC.id * Problem.id
    8.22 -(* exclude "Problem" from parsing *)
    8.23 -  val read_out_problem: problem -> P_Spec.record
    8.24 -
    8.25 -  val tokenize_formalise: Position.T -> string -> Fdl_Lexer.T list * Fdl_Lexer.chars
    8.26 -
    8.27 -  type form_model = TermC.as_string list;
    8.28 -  type form_refs = (((((string * ThyC.id) * string) * Problem.id) * string) * MethodC.id) * string
    8.29 -  type form_model_refs = 
    8.30 -    (((((string * string) * form_model) * string) * form_refs) * string) * string
    8.31 -
    8.32 -  val read_out_formalise: form_model_refs -> Formalise.T list
    8.33 -  (*RENAME TO parse_formalise..*)
    8.34 -  val formalisation: Fdl_Lexer.T list -> form_model_refs * Fdl_Lexer.T list;
    8.35 -
    8.36 -\<^isac_test>\<open>
    8.37 -  val tokenize: string -> Token.src
    8.38 -  val string_of_toks: Token.src -> string
    8.39 -  val parse: (Token.src -> 'a * Token.src) -> Token.src -> 'a * Token.src
    8.40 -
    8.41 -  (*Model*)
    8.42 -  type model
    8.43 -  val model: model parser
    8.44 -
    8.45 -  (*References*)
    8.46 -  type refs
    8.47 -  type model_refs_dummy = model * ((string * string) * refs)
    8.48 -  val model_refs_dummy: model_refs_dummy
    8.49 -  val references: ((string * string) * refs) parser
    8.50 -  val refs_dummy: refs
    8.51 -
    8.52 -  (*Specification*)
    8.53 -  type specification = (string * string) * model_refs_dummy
    8.54 -  val specification: specification parser
    8.55 -
    8.56 -  (*Tactics *)
    8.57 -  val check_postcond: (string * string list) parser
    8.58 -  val rewrite_set_inst: ((string * string) * string) parser
    8.59 -  val substitute: ((string * string) * string) parser
    8.60 -  val exec_check_postcond: string * string list -> string
    8.61 -  val exec_rewrite_set_inst: (string * string) * string -> string
    8.62 -  val exec_substitute: (string * string) * string -> string
    8.63 -
    8.64 -  val tactic: Token.src -> (string * string) * Token.src
    8.65 -
    8.66 -  (*Steps*)
    8.67 -  val steps: (string * (string * string)) list parser
    8.68 -  val exec_step_term: string * (string * string) -> string
    8.69 -  val steps_subpbl: string list parser
    8.70 -
    8.71 -  (*Problem*)
    8.72 -  type body
    8.73 -  val body: body parser
    8.74 -  val body_dummy: body
    8.75 -
    8.76 -  val tokenize_string: Fdl_Lexer.chars -> Fdl_Lexer.T * Fdl_Lexer.chars
    8.77 -  val scan: Fdl_Lexer.chars -> Fdl_Lexer.T list * Fdl_Lexer.chars
    8.78 -  val parse_form_refs: Fdl_Lexer.T list -> form_refs * Fdl_Lexer.T list;
    8.79 -  val parse_form_model: Fdl_Lexer.T list -> string list * Fdl_Lexer.T list;
    8.80 -  val parse_string         : Fdl_Lexer.T list -> string * Fdl_Lexer.T list;
    8.81 -\<close>
    8.82 -end
    8.83 -
    8.84 -(**)
    8.85 -structure ParseC_OLD(**): PARSE_C_OLD(**) =
    8.86 -struct
    8.87 -(**)
    8.88 -
    8.89 -(**** Tools ****)
    8.90 -
    8.91 -\<^isac_test>\<open>
    8.92 -fun tokenize str =
    8.93 -  filter Token.is_proper (Token.explode (Thy_Header.get_keywords' @{context}) Position.none str)
    8.94 -
    8.95 -fun parse parser toks = Scan.finite Token.stopper (Scan.error parser) toks
    8.96 -
    8.97 -fun string_of_toks toks = fold (curry op ^) (map Token.unparse toks |> rev |> separate ", ") "";
    8.98 -\<close>
    8.99 -
   8.100 -(**** parse Calculation ****)
   8.101 -
   8.102 -(*** Problem headline ***)
   8.103 -
   8.104 -(* exclude "Problem" from parsing * )
   8.105 -type problem_headline = (((((string * string) * string) * string) * string list) * string)
   8.106 -( * exclude "Problem" from parsing *)
   8.107 -type problem_headline = (((string * ThyC.id) * string) * Problem.id) * string
   8.108 -(* exclude "Problem" from parsing *)
   8.109 -val problem_headline = (*Parse.$$$ "Problem" --*)
   8.110 -  Parse.$$$ "(" --
   8.111 -    Parse.string -- Parse.$$$ "," --
   8.112 -      (Parse.$$$ "[" |-- Parse.list1 Parse.string --| Parse.$$$ "]") --
   8.113 -  Parse.$$$ ")"
   8.114 -
   8.115 -(* exclude "Problem" from parsing * )
   8.116 -fun string_of_headline ((((left, thy_id: ThyC.id), comm), pbl_id), right) =
   8.117 -      left ^ thy_id ^ comm ^ Problem.id_to_string pbl_id ^ right
   8.118 -fun read_out_headline ((((("(", thy_id), ","), pbl_id), ")"), []) = (thy_id, pbl_id)
   8.119 -  | read_out_headline (hdl, toks) =
   8.120 -      raise ERROR ("read_out_headline NOT for: (" ^ string_of_headline hdl ^ ", " ^
   8.121 -        string_of_toks toks ^ ")")
   8.122 -( * exclude "Problem" from parsing *)
   8.123 -fun read_out_headline ((((("(", thy_id), ","), pbl_id), ")")) = (thy_id, pbl_id)
   8.124 -  | read_out_headline arg =
   8.125 -      (@{print} {a = "read_out_headline with", arg = arg}; raise ERROR "read_out_headline")
   8.126 -(**)
   8.127 -
   8.128 -(* used if at least "Problem (thy_id, pbl_id)" has been input *)
   8.129 -fun read_out_problem ((((((
   8.130 -    "(", thy_id), ","), pbl_id), ")"), (((((
   8.131 -      _(*"Specification"*), _(*":"*)), (((((((((((((((
   8.132 -        _(*"Model"*), (("", ""), "")), _(*":"*)),
   8.133 -          _(*"Given"*)), _(*":"*)), givens),
   8.134 -          _(*"Where"*)), _(*":"*)), wheres),
   8.135 -          _(*"Find"*)), _(*":"*)), find),
   8.136 -          _(*"Relate"*)), _(*":"*)), relates), ((
   8.137 -        _(*"References"*), _(*":"*)), ((((((((
   8.138 -          _(*"RTheory"*), _(*":"*)), rthy_id),
   8.139 -          _(*"RProblem"*)), _(*":"*)), rpbl_id),
   8.140 -          _(*"RMethod"*)), _(*":"*)), rmet_id)))),
   8.141 -        _(*"Solution"*)), _(*":"*)),
   8.142 -          _(*[]*))),
   8.143 -       _(*""*)) =
   8.144 -    {thy_id = thy_id, pbl_id = pbl_id,
   8.145 -      givens = givens, wheres = wheres, find = find, relates = relates,
   8.146 -      rthy_id = rthy_id, rpbl_id = rpbl_id, rmet_id = rmet_id}
   8.147 -  | read_out_problem arg =
   8.148 -    (@{print} {a = "read_out_problem called with", arg = arg};
   8.149 -      raise ERROR "read_out_problem called with")
   8.150 -
   8.151 -
   8.152 -(*** Specification ***)
   8.153 -
   8.154 -(** Model **)
   8.155 -
   8.156 -type model =
   8.157 -  ((((((((((((((string * ((string * string) * string)) * string) * string) * string) *
   8.158 -      string list) * string) * string) * string list) * string) * string) * string) *
   8.159 -         string) * string) * string list);
   8.160 -val model = (
   8.161 -  Parse.command_name "Model" --
   8.162 -    (Scan.optional (*.. show, whether the Model refers to RProblem or to RMethod *)
   8.163 -      (Parse.$$$ "(" --
   8.164 -        (Parse.command_name "RProblem" || Parse.command_name "RMethod") -- Parse.$$$ ")")
   8.165 -      (("", ""), "") ) --
   8.166 -    Parse.$$$ ":" --
   8.167 -    Parse.command_name "Given" -- Parse.$$$ ":" -- Parse.list1 Parse.term --
   8.168 -    Parse.command_name "Where" -- Parse.$$$ ":" -- Parse.list Parse.term --
   8.169 -    Parse.command_name "Find" -- Parse.$$$ ":" -- Parse.term --
   8.170 -    Parse.command_name "Relate" -- Parse.$$$ ":" -- Parse.list Parse.term
   8.171 -)
   8.172 -
   8.173 -(** References **)
   8.174 -
   8.175 -type refs =
   8.176 -  ((((((((string * string) * string) * string) * string) * string list) * string) * string) * string list)
   8.177 -val refs_dummy =
   8.178 -  (((((((("", ""), ""), ""), ""), []), ""), ""), [])
   8.179 -
   8.180 -val references = (
   8.181 -  Parse.command_name "References" -- Parse.$$$ ":" (**) --
   8.182 -    (Scan.optional 
   8.183 -      (Parse.command_name "RTheory" -- Parse.$$$ ":" -- Parse.string (**) --
   8.184 -      Parse.command_name "RProblem" -- Parse.$$$ ":" (**) --
   8.185 -        (Parse.$$$ "[" |-- Parse.list1 Parse.string --| Parse.$$$ "]") (**) --
   8.186 -      Parse.command_name "RMethod" -- Parse.$$$ ":" (**) --
   8.187 -        (Parse.$$$ "[" |-- Parse.list1 Parse.string --| Parse.$$$ "]")
   8.188 -      )
   8.189 -      refs_dummy
   8.190 -    ))
   8.191 -
   8.192 -(** compose Specification **)
   8.193 -
   8.194 -type model_refs_dummy = (model * ((string * string) * refs))
   8.195 -val model_refs_dummy = ((((((((((((((("", (("", ""), "")), ""), ""), ""),
   8.196 -    []), ""), ""), []), ""), ""), ""), ""), ""),
   8.197 -       []),
   8.198 -      (("", ""), (((((((("", ""), ""), ""), ""), []),
   8.199 -        ""), ""), [])))
   8.200 -
   8.201 -type specification = (string * string) * model_refs_dummy
   8.202 -val specification = (
   8.203 -  (Parse.command_name "Specification" -- Parse.$$$ ":") --
   8.204 -    (Scan.optional
   8.205 -      (model -- references)
   8.206 -    )
   8.207 -    model_refs_dummy
   8.208 -    );
   8.209 -
   8.210 -
   8.211 -(*** Solution ***)
   8.212 -(** Tactics **)
   8.213 -
   8.214 -val substitute = Parse.reserved "Substitute" -- Parse.term -- Parse.term;
   8.215 -val rewrite_set_inst = Parse.reserved "Rewrite_Set_Inst" -- Parse.term -- Parse.name;
   8.216 -val check_postcond = Parse.reserved "Check_Postcond" --
   8.217 -  (Parse.$$$ "[" |-- Parse.list1 Parse.name --| Parse.$$$ "]");
   8.218 -
   8.219 -(* see test/../Test_Parsers.thy || requires arguments of equal type *)
   8.220 -fun exec_substitute ((str, tm), bdv) =
   8.221 -  "EXEC IMMEDIATELY: " ^ str ^ " " ^ (*UnparseC.term*) tm ^ " " ^ (*UnparseC.term*) bdv;
   8.222 -fun exec_rewrite_set_inst ((str, tm), id) =
   8.223 -  "EXEC IMMEDIATELY: " ^ str ^ " " ^ (*UnparseC.term*) tm ^ " " ^ (*Problem.id_to_string*) id;
   8.224 -fun exec_check_postcond (str, path) =
   8.225 -  "EXEC IMMEDIATELY: " ^ str ^ " " ^ (*Problem.id_to_string*)fold (curry op ^) path "";
   8.226 -
   8.227 -val tactic = ( (* incomplete list of tactics *)
   8.228 -  Parse.$$$ "Tactic" --
   8.229 -    (substitute >> exec_substitute ||
   8.230 -     rewrite_set_inst >> exec_rewrite_set_inst ||
   8.231 -     check_postcond >> exec_check_postcond
   8.232 -    )
   8.233 -  )
   8.234 -
   8.235 -(** Step of term + tactic**)
   8.236 -
   8.237 -\<^isac_test>\<open>
   8.238 -val steps = (Scan.repeat (Parse.term -- (Scan.optional tactic ("", ""))))
   8.239 -\<close>
   8.240 -
   8.241 -fun exec_step_term (tm, (tac1, tac2)) =
   8.242 -  "EXEC IMMEDIATELY step_term: " ^ (*UnparseC.term*) tm ^ " (" ^ tac1 ^ ", " ^ tac2 ^ ")";
   8.243 -\<^isac_test>\<open>
   8.244 -val steps_subpbl =
   8.245 -  Scan.repeat (
   8.246 -    ((Parse.term -- (Scan.optional tactic ("", ""))) >> exec_step_term) (** ) ||
   8.247 -    (problem >> exec_subproblem) ( **)
   8.248 -  )
   8.249 -\<close>
   8.250 -
   8.251 -(** Body **)
   8.252 -
   8.253 -type body = (((((string * string) *
   8.254 -  (((((((((((((((string * ((string * string) * string)) * string) * string) * string) * string list) * string) * string) * string list) * string) * string) * string) * string) * string) * string list) *
   8.255 -    ((string * string) * ((((((((string * string) * string) * string) * string) * string list) * string) * string) * string list)))) *
   8.256 -        string) * string) * string list)
   8.257 -val body_dummy = ((((("", ""),
   8.258 -  ((((((((((((((("", (("", ""), "")), ""), ""), ""), []), ""), ""), []), ""), ""), ""), ""), ""), []),
   8.259 -    (("", ""), (((((((("", ""), ""), ""), ""), []), ""), ""), [])))),
   8.260 -      ""), ""), [])
   8.261 -val body =
   8.262 -  specification --
   8.263 -  Parse.command_name "Solution" -- Parse.$$$ ":" --
   8.264 -  (*/----- this will become "and steps".. *)
   8.265 -    (Scan.repeat (
   8.266 -      ((Parse.term -- (Scan.optional tactic ("", ""))) >> exec_step_term)  (** ) ||
   8.267 -       (problem >> exec_subproblem)  ( *exec_* make types equal for both args of ||*)
   8.268 -    ))
   8.269 -  (*\----- ..this will become "and steps"
   8.270 -    see Test_Parse_Isac subsubsection \<open>trials on recursion stopped\<close> *)
   8.271 -
   8.272 -
   8.273 -(*** Problem ***)
   8.274 -(* exclude "Problem" from parsing * )
   8.275 -type problem =
   8.276 -  (((((((string * string) * string) * string) * string list) * string) *
   8.277 -    (((((string * string) *
   8.278 -      (((((((((((((((string * ((string * string) * string)) * string) * string) * string) *
   8.279 -        string list) * string) * string) * string list) * string) *
   8.280 -        string) * string) * string) * string) * string list) *
   8.281 -        ((string * string) *
   8.282 -          ((((((((string * string) * string) * string) * string) * string list) * string) *
   8.283 -            string) * string list)))) *
   8.284 -      string) * string) * string list)) *
   8.285 -    string)
   8.286 -( * exclude "Problem" from parsing *)
   8.287 -type problem =
   8.288 -  (((((string * string) * string) * string list) * string) *
   8.289 -    (((((string * string) *
   8.290 -      (((((((((((((((string * ((string * string) * string)) * string) * string) * string) *
   8.291 -        string list) * string) * string) * string list) * string) *
   8.292 -        string) * string) * string) * string) * string list) *
   8.293 -        ((string * string) *
   8.294 -          ((((((((string * string) * string) * string) * string) * string list) * string)
   8.295 -            * string) * string list)))) *
   8.296 -      string) * string) * string list)) *
   8.297 -    string
   8.298 -(* exclude "Problem" from parsing *)
   8.299 -val problem =
   8.300 -  problem_headline --
   8.301 -    (Scan.optional body) body_dummy --
   8.302 -  (Scan.optional Parse.term) ""
   8.303 -
   8.304 -
   8.305 -(**** parse Formalise ****)
   8.306 -
   8.307 -(*** Tokenizer, from HOL/SPARK/Tools/fdl_lexer.ML ***)
   8.308 -
   8.309 -val tokenize_string = Fdl_Lexer.$$$ "\"" |-- Fdl_Lexer.!!! "unclosed string" (*2*)
   8.310 -  (Scan.repeat (Scan.unless (Fdl_Lexer.$$$ "\"") Fdl_Lexer.any) --| Fdl_Lexer.$$$ "\"") >>
   8.311 -    Fdl_Lexer.make_token Fdl_Lexer.Comment(*!!!*);
   8.312 -
   8.313 -val scan =
   8.314 -  Scan.repeat (Scan.unless (Scan.one Fdl_Lexer.is_char_eof)
   8.315 -    (Fdl_Lexer.!!! "bad input"
   8.316 -       (   tokenize_string           (*.. this must be first *)
   8.317 -        ||Scan.max Fdl_Lexer.leq_token
   8.318 -             (Scan.literal Fdl_Lexer.lexicon >> Fdl_Lexer.make_token Fdl_Lexer.Keyword)
   8.319 -             (   Fdl_Lexer.long_identifier >> Fdl_Lexer.make_token Fdl_Lexer.Long_Ident
   8.320 -              || Fdl_Lexer.identifier >> Fdl_Lexer.make_token Fdl_Lexer.Ident)
   8.321 -       ) --|
   8.322 -     Fdl_Lexer.whitespace) );
   8.323 -
   8.324 -fun tokenize_formalise pos str =
   8.325 -  (Scan.finite Fdl_Lexer.char_stopper
   8.326 -    (Scan.error scan) (Fdl_Lexer.explode_pos str pos));
   8.327 -
   8.328 -
   8.329 -(*** the parser from HOL/SPARK/Tools/fdl_parser.ML ***)
   8.330 -
   8.331 -type form_model = TermC.as_string list;
   8.332 -type form_refs = (((((string * string) * string) * string list) * string) * string list) * string
   8.333 -type form_model_refs = ((((((string * string) * form_model) * string) *
   8.334 -  ((((((string * ThyC.id) * string) * Problem.id) * string) * MethodC.id) * string)) * 
   8.335 -    string) * string)
   8.336 -type formalise = (((((string * string) * form_model) * string) *
   8.337 -  ((((((string * ThyC.id) * string) * Problem.id) * string) * MethodC.id) * string)) *
   8.338 -    string) * string;
   8.339 -
   8.340 -val parse_string = Fdl_Parser.group "string"
   8.341 -  (Scan.one (fn t => Fdl_Lexer.kind_of t = Fdl_Lexer.Comment) >>
   8.342 -     Fdl_Lexer.content_of);
   8.343 -
   8.344 -val parse_form_model =
   8.345 -  (Fdl_Parser.$$$ "[" |-- Fdl_Parser.!!! (
   8.346 -    Fdl_Parser.list1 parse_string --| 
   8.347 -  Fdl_Parser.$$$ "]"));
   8.348 -
   8.349 -val parse_form_refs = 
   8.350 -  Fdl_Parser.$$$ "(" --
   8.351 -    parse_string --                     (* "Biegelinie" *)
   8.352 -  Fdl_Parser.$$$ "," --
   8.353 -    (Fdl_Parser.$$$ "[" |-- Fdl_Parser.!!! (
   8.354 -      Fdl_Parser.list1 parse_string --| (* ["Biegelinien"] *)
   8.355 -    Fdl_Parser.$$$ "]")) --
   8.356 -  Fdl_Parser.$$$ "," --
   8.357 -    (Fdl_Parser.$$$ "[" |-- Fdl_Parser.!!! (
   8.358 -      Fdl_Parser.list1 parse_string --| (* ["IntegrierenUndKonstanteBestimmen2"] *)
   8.359 -    Fdl_Parser.$$$ "]")) --
   8.360 -  Fdl_Parser.$$$ ")";
   8.361 -
   8.362 -(*val parse_formalise =   KEEP IDENTIFIERS CLOSE TO SPARK..*)
   8.363 -val formalisation =
   8.364 -  Fdl_Parser.!!! (
   8.365 -    Fdl_Parser.$$$ "[" --
   8.366 -      Fdl_Parser.$$$ "(" --
   8.367 -        parse_form_model --
   8.368 -      Fdl_Parser.$$$ "," --
   8.369 -        parse_form_refs --
   8.370 -      Fdl_Parser.$$$ ")" --
   8.371 -    Fdl_Parser.$$$ "]");
   8.372 -
   8.373 -fun read_out_formalise ((((( (
   8.374 -  "[", 
   8.375 -    "("),
   8.376 -      form_model: TermC.as_string list),
   8.377 -    ","), ((((((
   8.378 -      "(", 
   8.379 -        thy_id: ThyC.id), 
   8.380 -      ","), 
   8.381 -        probl_id: Problem.id), 
   8.382 -      ","), 
   8.383 -        meth_id: MethodC.id), 
   8.384 -      ")")),
   8.385 -    ")"),
   8.386 -  "]") = [(form_model, (thy_id, probl_id, meth_id))]
   8.387 -| read_out_formalise _ =
   8.388 -  raise ERROR "read_out_formalise: WRONGLY parsed";
   8.389 -
   8.390 -(**)end(*struct*)
     9.1 --- a/src/Tools/isac/BridgeJEdit/preliminary-OLD.sml	Tue Aug 23 18:05:08 2022 +0200
     9.2 +++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
     9.3 @@ -1,126 +0,0 @@
     9.4 -(*  Title:      Tools/isac/BridgeJEdit/preliminary-OLD..sml
     9.5 -    Author:     Walther Neuper, JKU Linz
     9.6 -    (c) due to copyright terms
     9.7 -
     9.8 -Preliminary code for start transition Libisabelle -- PIDE.
     9.9 -Will be distributed to several structures, most of which do not yet exist.
    9.10 -*)
    9.11 -
    9.12 -signature PRELIMINARY_OLD =
    9.13 -sig
    9.14 -  (* for keyword ExampleSPARK *)
    9.15 -  val store_and_show: theory -> Formalise.T list -> theory -> theory;
    9.16 -  val load_formalisation: theory -> (Fdl_Lexer.T list -> ParseC_OLD.form_model_refs * 'a) ->
    9.17 -    (theory -> Token.file * theory) * string -> theory -> theory
    9.18 -  
    9.19 -  (* for keyword Problem*)
    9.20 -(**)val init_specify: ParseC_OLD.problem_headline -> theory -> theory(**)
    9.21 -(** )val init_specify: ParseC_OLD.problem -> theory -> theory( **)
    9.22 -end
    9.23 -
    9.24 -(** code for keyword ExampleSPARK **)
    9.25 -
    9.26 -structure Refs_Data = Theory_Data
    9.27 -(
    9.28 -  type T = References.T
    9.29 -  val empty : T = References.empty
    9.30 -  val extend = I
    9.31 -  fun merge (_, refs) = refs
    9.32 -);
    9.33 -structure OMod_Data = Theory_Data
    9.34 -(
    9.35 -  type T = O_Model.T
    9.36 -  val empty : T = []
    9.37 -  val extend = I
    9.38 -  fun merge (_, o_model) = o_model
    9.39 -);
    9.40 -structure IMod_Data = Theory_Data
    9.41 -(
    9.42 -  type T = I_Model.T
    9.43 -  val empty : T = []
    9.44 -  val extend = I
    9.45 -  fun merge (_, i_model) = i_model
    9.46 -);
    9.47 -structure Ctxt_Data = Theory_Data
    9.48 -(
    9.49 -  type T = Proof.context
    9.50 -  val empty : T = ContextC.empty
    9.51 -  val extend = I
    9.52 -  fun merge (_, ctxt) = ctxt
    9.53 -);
    9.54 -
    9.55 -(**)
    9.56 -structure Preliminary_OLD(**): PRELIMINARY_OLD(**) =
    9.57 -struct
    9.58 -(**)
    9.59 -
    9.60 -fun store_and_show HACKthy formalise thy =
    9.61 -  let
    9.62 -(**)val _(*file_read_correct*) = case formalise of xxx as
    9.63 -      [(["Traegerlaenge L", "Streckenlast q_0", "Biegelinie y", "Randbedingungen [y 0 = (0::real), y L = 0, M_b 0 = 0, M_b L = 0]", "FunktionsVariable x", "GleichungsVariablen [c, c_2, c_3, c_4]", "AbleitungBiegelinie dy"],
    9.64 -        ("Biegelinie", ["Biegelinien"], ["IntegrierenUndKonstanteBestimmen2"]))] => hd xxx
    9.65 -      | _ => error "magic failed: handling file exp_Statics_Biegel_Timischl_7-70.str failed"
    9.66 -(**)
    9.67 -    val formalise = hd formalise (*we expect only one Formalise.T in the list*)
    9.68 -    val (_(*hdlPIDE*), _, refs, o_model, var_type_ctxt) = Step_Specify.initialisePIDEHACK HACKthy formalise
    9.69 -  (*              ^^ TODO remove with cleanup in nxt_specify_init_calc *)
    9.70 -(*
    9.71 -  TODO: present "Problem hdlPIDE" via PIDE
    9.72 -*)
    9.73 -  in
    9.74 -    thy
    9.75 -      |> Refs_Data.put refs
    9.76 -      |> OMod_Data.put o_model
    9.77 -      |> Ctxt_Data.put var_type_ctxt
    9.78 -  end;
    9.79 -
    9.80 -(*
    9.81 -fun load_formalisation parser (files, _) thy =
    9.82 -  let
    9.83 -    val ([{lines, pos, ...}: Token.file], thy') = files thy;
    9.84 -  in
    9.85 -    store_and_show
    9.86 -      (ParseC_OLD.read_out_formalise (fst (parser (fst (ParseC_OLD.tokenize_formalise pos (cat_lines lines))))))
    9.87 -        thy'
    9.88 -  end;
    9.89 -*)
    9.90 -fun load_formalisation HACKthy parse (get_file: theory -> Token.file * theory, _: string) thy =
    9.91 -  let
    9.92 -    val (file, thy') = get_file thy;
    9.93 -    val formalise = #lines file
    9.94 -      |> cat_lines 
    9.95 -      |> ParseC_OLD.tokenize_formalise (#pos file)
    9.96 -      |> fst
    9.97 -      |> parse
    9.98 -      |> fst
    9.99 -      |> ParseC_OLD.read_out_formalise
   9.100 -      |> store_and_show HACKthy
   9.101 -(**)val _ = @{print} {a = "### load_formalisation \<rightarrow>", formalise = formalise}(**)
   9.102 -  in
   9.103 -    formalise thy'
   9.104 -  end;
   9.105 -
   9.106 -(*** code for keyword Problem ***)
   9.107 -
   9.108 -(*
   9.109 -val prefer_input: ThyC.id * Problem.id -> References.T -> O_Model.T -> References.T * O_Model.T
   9.110 -*)
   9.111 -fun prefer_input (inthy, inpbl : Problem.id) (thy, pbl, met_id) o_model =
   9.112 -  if inthy = thy andalso inpbl = pbl
   9.113 -  then ((thy, pbl, met_id) : References.T, o_model)
   9.114 -  else ((inthy, inpbl, MethodC.id_empty), [])
   9.115 -
   9.116 -fun init_specify problem thy =
   9.117 -  let
   9.118 -    val _ = @{print} {a = "//--- Spark_Commands.init_specify", headline = problem}
   9.119 -    val (thy_id, pbl_id) = ParseC_OLD.read_out_headline problem (*how handle Position?!?*)
   9.120 -    val refs' = Refs_Data.get thy
   9.121 -    val ((_, pbl_id, _), _(*o_model*)) =
   9.122 -      prefer_input (thy_id, pbl_id) refs' (OMod_Data.get thy)
   9.123 -    val i_model = I_Model.initPIDE pbl_id
   9.124 -(* TODO: present Specification = i_model + refs via PIDE, if specify is done explicitly. *)
   9.125 -  in
   9.126 -    IMod_Data.put i_model thy (* the Model-items with Descriptor s only *)
   9.127 -  end;
   9.128 -(**)
   9.129 -(**)end(**)
    10.1 --- a/test/Tools/isac/BridgeJEdit/parseC.sml	Tue Aug 23 18:05:08 2022 +0200
    10.2 +++ b/test/Tools/isac/BridgeJEdit/parseC.sml	Wed Aug 24 12:37:07 2022 +0200
    10.3 @@ -6,19 +6,7 @@
    10.4  "-----------------------------------------------------------------------------------------------";
    10.5  "table of contents -----------------------------------------------------------------------------";
    10.6  "-----------------------------------------------------------------------------------------------";
    10.7 -"----------- complete Problem ------------------------------------------------------------------";
    10.8 -"----------- Problem headline ------------------------------------------------------------------";
    10.9 -"----------- Model -----------------------------------------------------------------------------";
   10.10 -"----------- Model-items -----------------------------------------------------------------------";
   10.11 -"----------- References ------------------------------------------------------------------------";
   10.12 -"----------- Specification: References + Model -------------------------------------------------";
   10.13 -"----------- Tactics ---------------------------------------------------------------------------";
   10.14 -"----------- steps -----------------------------------------------------------------------------";
   10.15 -"----------- body ------------------------------------------------------------------------------";
   10.16 -"----------- Problem ---------------------------------------------------------------------------";
   10.17 -"----------- fun read_out_problem --------------------------------------------------------------";
   10.18 -"----------- THERE ARE TESTS with INCOMPLETE WORK IN Test_Parse_Isac.thy -----------------------";
   10.19 -"----------- parser for exp_file.str -----------------------------------------------------------";
   10.20 +"----------- TODO ------------------------------------------------------------------------------";
   10.21  "-----------------------------------------------------------------------------------------------";
   10.22  "-----------------------------------------------------------------------------------------------";
   10.23  "-----------------------------------------------------------------------------------------------";
   10.24 @@ -37,548 +25,6 @@
   10.25  :
   10.26  *)
   10.27  
   10.28 -"----------- complete Problem ------------------------------------------------------------------";
   10.29 -"----------- complete Problem ------------------------------------------------------------------";
   10.30 -"----------- complete Problem ------------------------------------------------------------------";
   10.31 -(* copy fromIsac's Java-Frontend:
   10.32 -
   10.33 -Problem ("Biegelinie", ["Biegelinien"])
   10.34 -  Specification:
   10.35 -    Model:
   10.36 -      Given: "Traegerlaenge L", "Streckenlast q_0"
   10.37 -      Where: "q_0 ist_integrierbar_auf {| 0, L |}", "0 < L"
   10.38 -      Find: "Biegelinie y"
   10.39 -      Relate: "Randbedingungen [ Q_0 = q_0 * L, M_b L = 0, y_0 = 0, d_d x y_0 = 0 ]"
   10.40 -    References:
   10.41 -      RTheory: "Biegelinie"
   10.42 -      RProblem: ["Biegelinien"]
   10.43 -      RMethod: ["Integrieren", "KonstanteBestimmen2"]
   10.44 -  Solution:
   10.45 -    Problem ("Biegelinie", ["vonBelastungZu", "Biegelinien"])
   10.46 -    "[Q x = c + -1 * q_0 * x, M_b x = c_2 + c * x + -1 * q_0 / 2 * x ^ 2,  y' x =  c_3 + 1 / (-1 * EI) * (c_2 * x + c / 2 * x ^ 2 + -1 * q_0 / 6 * x ^ 3),  y x =  c_4 + c_3 * x +  1 / (-1 * EI) *  (c_2 / 2 * x ^ 2 + c / 6 * x ^ 3 + -1 * q_0 / 24 * x ^ 4)]"
   10.47 -    Problem ("Biegelinie", ["setzeRandbedingungen", "Biegelinien"])
   10.48 -    "[L * q_0 = c, 0 = (2 * c_2 + 2 * L * c + -1 * L ^ 2 * q_0) / 2, 0 = c_4,  0 = c_3]"
   10.49 -    "solveSystem [L * q_0 = c, 0 = (2 * c_2 + 2 * L * c + -1 * L ^ 2 * q_0) / 2, 0 = c_4,  0 = c_3] [c, c_1, c_2, c_4]"
   10.50 -    "[c = -1 * L * q_0 / -1, c_2 = -1 * L ^ 2 * q_0 / 2, c_3 = 0, c_4 = 0]"
   10.51 -    "y x = c_4 + c_3 * x + 1 / (-1 * EI) * (c_2 / 2 * x ^ 2 + c / 6 * x ^ 3 + -1 * q_0 / 24 * x ^ 4)"
   10.52 -      Tactic Substitute "c_4 + c_3 * x +  1 / (-1 * EI) *  (c_2 / 2 * x ^ 2 + c / 6 * x ^ 3 + -1 * q_0 / 24 * x ^ 4)" "[c, c_1, c_2, c_4]"
   10.53 -    "y x = 0 + 0 * x + 1 / (-1 * EI) * (-1 * L ^ 2 * q_0 / 2 / 2 * x ^ 2 + -1 * L * q_0 / -1 / 6 * x ^ 3 +  -1 * q_0 / 24 * x ^ 4)"
   10.54 -      Tactic Rewrite_Set_Inst "[(bdv, x)]" "make_ratpoly_in" "y x"
   10.55 -    "y x = -6 * q_0 * L ^ 2 / (-24 * EI) * x ^ 2 + 4 * L * q_0 / (-24 * EI) * x ^ 3 + -1 * q_0 / (-24 * EI) * x ^ 4"
   10.56 -      Tactic Check_Postcond ["Biegelinien"]
   10.57 -"y x = -6 * q_0 * L ^ 2 / (-24 * EI) * x ^ 2 + 4 * L * q_0 / (-24 * EI) * x ^ 3 + -1 * q_0 / (-24 * EI) * x ^ 4"
   10.58 -*)
   10.59 -
   10.60 -"----------- Problem headline ------------------------------------------------------------------";
   10.61 -"----------- Problem headline ------------------------------------------------------------------";
   10.62 -"----------- Problem headline ------------------------------------------------------------------";
   10.63 -val problem_headline_str =
   10.64 -  "( \"Biegelinie\" , [\"Biegelinien\"] )";
   10.65 -
   10.66 -val toks = ParseC_OLD.tokenize problem_headline_str;
   10.67 -case ParseC_OLD.problem_headline toks of
   10.68 -  ((((("(", "Biegelinie"), ","), ["Biegelinien"]), ")"), [])
   10.69 -  => () | _ => error "TermC.parse problem_headline CHANGED"
   10.70 -
   10.71 -
   10.72 -"----------- Model-items -----------------------------------------------------------------------";
   10.73 -"----------- Model-items -----------------------------------------------------------------------";
   10.74 -"----------- Model-items -----------------------------------------------------------------------";
   10.75 -val given_comma_str = "Given: \"Traegerlaenge L\", \"Streckenlast q_0\"";
   10.76 -val given_str = "Given: \"Traegerlaenge L\"(*,*) \"Streckenlast q_0\"";
   10.77 -
   10.78 -val toks = ParseC_OLD.tokenize given_str;
   10.79 -(*
   10.80 -(*given_comma_str\<rightarrow>*)val toks =
   10.81 -   [Token (("Given", ({}, {})), (Command, "Given"), Slot), Token ((":", ({}, {})), (Keyword, ":"), Slot), Token (("Traegerlaenge L", ({}, {})), (String, "Traegerlaenge L"), Slot),
   10.82 -    Token ((",", ({}, {})), (Keyword, ","), Slot), Token (("Streckenlast q_0", ({}, {})), (String, "Streckenlast q_0"), Slot)]
   10.83 -(*given_str\<rightarrow>*)val toks =
   10.84 -   [Token (("Given", ({}, {})), (Command, "Given"), Slot), Token ((":", ({}, {})), (Keyword, ":"), Slot), Token (("Traegerlaenge L", ({}, {})), (String, "Traegerlaenge L"), Slot),
   10.85 -    Token (("Streckenlast q_0", ({}, {})), (String, "Streckenlast q_0"), Slot)]:
   10.86 -*)
   10.87 -val given_comma = Parse.command_name "Given" -- Parse.$$$ ":" -- Parse.list1 Parse.term;
   10.88 -val given = Parse.command_name "Given" -- Parse.$$$ ":" -- Parse.list1 Parse.term;
   10.89 -
   10.90 -case ParseC_OLD.parse given_comma (ParseC_OLD.tokenize given_comma_str) of
   10.91 -  ((("Given", ":"), [_(*"<markup>"*), _(*"<markup>"*)]),
   10.92 -    []) => ()
   10.93 -  (* \<up> ^--------------------------------- is empty:     parsing OK*)
   10.94 -| _ => error "TermC.parse given_comma CHANGED";
   10.95 -
   10.96 -"----------- Parse.list1 DOES expect <,> between elements";
   10.97 -case ParseC_OLD.parse given (ParseC_OLD.tokenize given_str) of
   10.98 -  ((("Given", ":"), [_(*"<markup>"*)]),
   10.99 -    [_(*Token (("Streckenlast q_0", ({}, {})), (String, "Streckenlast q_0"), Slot)*)]) => ()
  10.100 -  (* \<up> \<up> \<up> \<up> \<up> \<up> \<up> \<up> \<up> \<up> \<up>  \<up> ^ is NOT empty: parsing NOT ok*)
  10.101 -| _ => error "TermC.parse given CHANGED";
  10.102 -
  10.103 -
  10.104 -"----------- Model -----------------------------------------------------------------------------";
  10.105 -"----------- Model -----------------------------------------------------------------------------";
  10.106 -"----------- Model -----------------------------------------------------------------------------";
  10.107 -val model_str = (
  10.108 -  "Model:" ^
  10.109 -         (* \<up> \<up> \<up>  \<up> *)
  10.110 -    "Given: \"Traegerlaenge L\", \"Streckenlast q_0\"" ^
  10.111 -    "Where: \"q_0 ist_integrierbar_auf {| 0, L |}\", \"0 < L\"" ^
  10.112 -    "Find: \"Biegelinie y\"" ^
  10.113 -    "Relate: \"Randbedingungen [ Q_0 = q_0 * L, M_b L = 0, y_0 = 0, d_d x y_0 = 0 ]\""
  10.114 -    );
  10.115 -val model_empty_str = ( (*Model before student's input"*)
  10.116 -  "Model:" ^
  10.117 -         (* \<up> \<up> \<up>  \<up> *)
  10.118 -    "Given: \"Traegerlaenge \", \"Streckenlast \"" ^
  10.119 -    "Where: \"_ ist_integrierbar_auf {| |}\"" ^
  10.120 -    "Find: \"Biegelinie \"" ^
  10.121 -    "Relate: \"Randbedingungen [ ]\""
  10.122 -    );
  10.123 -val model_str_opt = ( (*Model explicitly referring to RProblem (default), not RMethod*)
  10.124 -  "Model ( RProblem ):" ^
  10.125 -       (* \<up> \<up> \<up>  \<up> *)
  10.126 -    "Given: \"Traegerlaenge L\", \"Streckenlast q_0\"" ^
  10.127 -    "Where: \"q_0 ist_integrierbar_auf {| 0, L |}\", \"0 < L\"" ^
  10.128 -    "Find: \"Biegelinie y\"" ^
  10.129 -    "Relate: \"Randbedingungen [ Q_0 = q_0 * L, M_b L = 0, y_0 = 0, d_d x y_0 = 0 ]\""
  10.130 -    );
  10.131 -
  10.132 -val toks = ParseC_OLD.tokenize model_str;
  10.133 -case ParseC_OLD.parse ParseC_OLD.model toks of
  10.134 -(((((((((((((((
  10.135 -  "Model", (("", ""), "")), ":"),
  10.136 -    "Given"), ":"), [_(*"<markup>"*), _(*"<markup>"*)]),
  10.137 -    "Where"), ":"), [_(*"<markup>"*), _(*"<markup>"*)]),
  10.138 -    "Find"), ":"), _(*"<markup>"*)),
  10.139 -    "Relate"), ":"), [_(*"<markup>"*)]), [])
  10.140 -=> () | _ => error "TermC.parse model CHANGED";
  10.141 -
  10.142 -"----------- Model before student's input";
  10.143 -val toks = ParseC_OLD.tokenize model_empty_str;
  10.144 -case ParseC_OLD.parse ParseC_OLD.model toks of
  10.145 -(((((((((((((((
  10.146 -  "Model", (("", ""), "")), ":"),
  10.147 -    "Given"), ":"), [_(*"<markup>"*), _(*"<markup>"*)]),
  10.148 -    "Where"), ":"), [_(*"<markup>"*) (*//, _(*"<markup>"*) //*)]),
  10.149 -    "Find"), ":"), _(*"<markup>"*)),
  10.150 -    "Relate"), ":"), [_(*"<markup>"*)]), [])
  10.151 -=> () | _ => error "TermC.parse model_empty_str CHANGED";
  10.152 -
  10.153 -"----------- Model explicitly referring to RProblem (default), not RMethod";
  10.154 -val toks = ParseC_OLD.tokenize model_str_opt;
  10.155 -case ParseC_OLD.parse ParseC_OLD.model toks of
  10.156 -(((((((((((((((
  10.157 -  "Model", (("(", "RProblem"), ")")), ":"),
  10.158 -    "Given"), ":"), [_(*"<markup>"*), _(*"<markup>"*)]),
  10.159 -    "Where"), ":"), [_(*"<markup>"*), _(*"<markup>"*)]),
  10.160 -    "Find"), ":"), _(*"<markup>"*)),
  10.161 -    "Relate"), ":"), [_(*"<markup>"*)]), [])
  10.162 -=> () | _ => error "TermC.parse model_str_opt CHANGED";
  10.163 -
  10.164 -
  10.165 -"----------- References ------------------------------------------------------------------------";
  10.166 -"----------- References ------------------------------------------------------------------------";
  10.167 -"----------- References ------------------------------------------------------------------------";
  10.168 -val references_collapsed_str = (
  10.169 -  "References:"
  10.170 -  );
  10.171 -val references_str = (
  10.172 -  "References:"  ^
  10.173 -    "RTheory: \"Biegelinie\"" ^
  10.174 -    "RProblem: [\"Biegelinien\"]" ^
  10.175 -    "RMethod: [\"Integrieren\", \"KonstanteBestimmen2\"]"
  10.176 -    );
  10.177 -val references_empty_str = (
  10.178 -  "References:"  ^
  10.179 -    "RTheory: \"\"" ^
  10.180 -    "RProblem: [\"\"]" ^
  10.181 -    "RMethod: [\"\"]"
  10.182 -    );
  10.183 -
  10.184 -case ParseC_OLD.parse ParseC_OLD.references (ParseC_OLD.tokenize references_collapsed_str) of
  10.185 -  ((("References", ":"), (((((((("", ""), ""), ""), ""), []), ""), ""), [])), [])
  10.186 -  => () | _ => error "TermC.parse references_collapsed CHANGED";
  10.187 -
  10.188 -case ParseC_OLD.parse ParseC_OLD.references (ParseC_OLD.tokenize references_str) of (((
  10.189 -  "References", ":"),
  10.190 -     ((((((((
  10.191 -    "RTheory", ":"), "Biegelinie"),
  10.192 -    "RProblem"), ":"), ["Biegelinien"]),
  10.193 -    "RMethod"), ":"), ["Integrieren", "KonstanteBestimmen2"])),
  10.194 -   [])
  10.195 -  => () | _ => error "TermC.parse references CHANGED";
  10.196 -
  10.197 -case ParseC_OLD.parse ParseC_OLD.references (ParseC_OLD.tokenize references_empty_str) of (((
  10.198 -  "References", ":"), ((((((((
  10.199 -    "RTheory", ":"), ""),
  10.200 -    "RProblem"), ":"), [""]),
  10.201 -    "RMethod"), ":"), [""])),
  10.202 -   [])
  10.203 -  => () | _ => error "TermC.parse references_empty_str CHANGED"
  10.204 -
  10.205 -
  10.206 -"----------- Specification: References + Model -------------------------------------------------";
  10.207 -"----------- Specification: References + Model -------------------------------------------------";
  10.208 -"----------- Specification: References + Model -------------------------------------------------";
  10.209 -val specification_str = (
  10.210 -    "Specification:" ^
  10.211 -       model_str ^
  10.212 -       references_str
  10.213 -       );
  10.214 -val specification_collapsed_str = (
  10.215 -"  Specification:" (** ) ^
  10.216 -"    Model:"^
  10.217 -"      Given: \"Traegerlaenge L\", \"Streckenlast q_0\"" ^
  10.218 -"      Where: \"q_0 ist_integrierbar_auf {| 0, L |}\", \"0 < L\"" ^
  10.219 -"      Find: \"Biegelinie y\"" ^
  10.220 -"      Relate: \"Randbedingungen [ Q_0 = q_0 * L, M_b L = 0, y_0 = 0, d_d x y_0 = 0 ]\"" ^
  10.221 -"    References:" (**) ^
  10.222 -"      RTheory: \"Biegelinie\"" (**) ^
  10.223 -"      RProblem: [\"Biegelinien\"]" (**) ^
  10.224 -"      RMethod: [\"Integrieren\", \"KonstanteBestimmen2\"]" ( **)
  10.225 -);
  10.226 -
  10.227 -case ParseC_OLD.specification (ParseC_OLD.tokenize specification_str) of (((
  10.228 -  "Specification", ":"),
  10.229 -     (((((((((((((((
  10.230 -    "Model", (("", ""), "")), ":"),
  10.231 -      "Given"), ":"), [_(*"<markup>"*), _(*"<markup>"*)]),
  10.232 -      "Where"), ":"), [_(*"<markup>"*), _(*"<markup>"*)]),
  10.233 -      "Find"), ":"), _(*"<markup>"*)),
  10.234 -      "Relate"), ":"), [_(*"<markup>"*)]),
  10.235 -      ((
  10.236 -    "References", ":"), ((((((((
  10.237 -      "RTheory", ":"), "Biegelinie"),
  10.238 -      "RProblem"), ":"), ["Biegelinien"]),
  10.239 -      "RMethod"), ":"), ["Integrieren", "KonstanteBestimmen2"])))),
  10.240 -   [])
  10.241 -  => () | _ => error "TermC.parse specification (expanded) changed";
  10.242 -
  10.243 -"----------- Specification collapsed";
  10.244 -case ParseC_OLD.parse ParseC_OLD.specification (ParseC_OLD.tokenize specification_collapsed_str) of (((
  10.245 -  "Specification", ":"),
  10.246 -(*model*)((((((((((((((("", (("", ""), "")), ""), ""), ""), []), ""), ""), []), ""), ""), ""), ""), ""), []), (("", ""),
  10.247 -(*refs?*)(((((((("", ""), ""), ""), ""), []), ""), ""), [])))),
  10.248 -   [])
  10.249 -  => () | _ => error "TermC.parse specification (collapsed) changed";
  10.250 -
  10.251 -
  10.252 -"----------- Tactics ---------------------------------------------------------------------------";
  10.253 -"----------- Tactics ---------------------------------------------------------------------------";
  10.254 -"----------- Tactics ---------------------------------------------------------------------------";
  10.255 -val tactic_Substitute_str =
  10.256 -  "Tactic Substitute \"c_4 + c_3 * x +  1 / (-1 * EI)\" \"[c, c_1, c_2, c_4]\"";
  10.257 -val tactic_Rewrite_Set_Inst_str =
  10.258 -  "Tactic Rewrite_Set_Inst \"[(bdv, x)]\" \"make_ratpoly_in\"";
  10.259 -val tactic_Check_Postcond_str =
  10.260 -  "Tactic Check_Postcond [\"Biegelinien\", \"xxx\"]";
  10.261 -val final_result_str = "\"y x = -6 * q_0 * L ^ 2 / (-24 * EI)\"";
  10.262 -
  10.263 -val toks1 = ParseC_OLD.tokenize "Substitute \"c_4 + c_3 * x +  1 / (-1 * EI)\" \"[c, c_1, c_2, c_4]\"";
  10.264 -case ParseC_OLD.substitute toks1 of ((("Substitute", _(*<markup>*)), _(*<markup>*)), [])
  10.265 -=> () | _ => error "TermC.parse Substitute CHANGED";
  10.266 -
  10.267 -val toks2 = ParseC_OLD.tokenize "Rewrite_Set_Inst \"[(bdv, x)]\" \"make_ratpoly_in\"";
  10.268 -case ParseC_OLD.rewrite_set_inst toks2 of
  10.269 -  ((("Rewrite_Set_Inst", _(*<markup>*)), "make_ratpoly_in"), [])
  10.270 -=> () | _ => error "TermC.parse Rewrite_Set_Inst CHANGED";
  10.271 -
  10.272 -val toks3 = ParseC_OLD.tokenize "Check_Postcond [\"Biegelinien\", \"xxx\"]";
  10.273 -case ParseC_OLD.check_postcond toks3 of (("Check_Postcond", ["Biegelinien", "xxx"]), [])
  10.274 -=> () | _ => error "TermC.parse Check_Postcond CHANGED";
  10.275 -
  10.276 -"----------- Tactics collected";
  10.277 -val toks1' = ParseC_OLD.tokenize tactic_Substitute_str;
  10.278 -val toks2' = ParseC_OLD.tokenize tactic_Rewrite_Set_Inst_str;
  10.279 -val toks3' = ParseC_OLD.tokenize tactic_Check_Postcond_str;
  10.280 -
  10.281 -ParseC_OLD.parse ParseC_OLD.tactic toks1'; (* = (("Tactic", "EXEC IMMEDIATELY: Substitute <markup> <markup>"), [])*)
  10.282 -ParseC_OLD.parse ParseC_OLD.tactic toks2'; (* = (("Tactic", "EXEC IMMEDIATELY: Rewrite_Set_Inst <markup> make_ratpoly_in"), [])*)
  10.283 -ParseC_OLD.parse ParseC_OLD.tactic toks3'; (* = (("Tactic", "EXEC IMMEDIATELY: Check_Postcond xxxBiegelinien"), [])*)
  10.284 -
  10.285 -case ParseC_OLD.parse ParseC_OLD.tactic (ParseC_OLD.tokenize tactic_Substitute_str) of
  10.286 -  (("Tactic", _(*"EXEC IMMEDIATELY: Substitute <markup> <markup>"*)), [])
  10.287 -=> () | _ => error "TermC.parse Tactic Substitute CHANGED";
  10.288 -
  10.289 -
  10.290 -"----------- steps -----------------------------------------------------------------------------";
  10.291 -"----------- steps -----------------------------------------------------------------------------";
  10.292 -"----------- steps -----------------------------------------------------------------------------";
  10.293 -val steps_1_str = "\"[c = -1 * L * q_0 / -1, c_2 = -1 * L ^ 2 * q_0 / 2, c_3 = 0, c_4 = 0]\"";
  10.294 -val steps_term_tac_str = (
  10.295 -  final_result_str ^
  10.296 -  "  Tactic Check_Postcond [\"Biegelinien\"]"
  10.297 -  );
  10.298 -val steps_nonrec_str = (
  10.299 -     "\"[c = -1 * L * q_0 / -1, c_2 = -1 * L ^ 2 * q_0 / 2, c_3 = 0, c_4 = 0]\"" (**) ^
  10.300 -     "\"y x = c_4 + c_3 * x + 1 / (-1 * EI)\"" (**) ^
  10.301 -        tactic_Substitute_str ^
  10.302 -     "\"y x = 0 + 0 * x + 1 / (-1 * EI)\"" (**) ^
  10.303 -        tactic_Rewrite_Set_Inst_str ^
  10.304 -     final_result_str (**) ^
  10.305 -        tactic_Check_Postcond_str ^
  10.306 -  final_result_str (**)
  10.307 -);
  10.308 -
  10.309 -val toks1 = ParseC_OLD.tokenize steps_1_str;
  10.310 -val toks2 = ParseC_OLD.tokenize steps_term_tac_str;
  10.311 -val toks3 = ParseC_OLD.tokenize steps_nonrec_str;
  10.312 -
  10.313 -"----------- simple version";
  10.314 -ParseC_OLD.parse ParseC_OLD.steps []; (* = ([], []): (string * (string * string)) list * Token.T list *)
  10.315 -ParseC_OLD.parse ParseC_OLD.steps toks1; (* = ([("<markup>", ("", ""))], [])*) 
  10.316 -ParseC_OLD.parse ParseC_OLD.steps toks2; (* = ([("<markup>", ("Tactic", "EXEC IMMEDIATELY: Check_Postcond Biegelinien"))], [])*)
  10.317 -ParseC_OLD.parse ParseC_OLD.steps toks3;
  10.318 -
  10.319 -case ParseC_OLD.parse ParseC_OLD.steps toks3 of
  10.320 -  ([(_(*"<markup>"*), ("", "")), 
  10.321 -    (_(*"<markup>"*), ("Tactic", _(*"EXEC IMMEDIATELY: Substitute <markup> <markup>"*))),
  10.322 -    (_(*"<markup>"*), ("Tactic", _(*"EXEC IMMEDIATELY: Rewrite_Set_Inst <markup> make_ratpoly_in"*))),
  10.323 -    (_(*"<markup>"*), ("Tactic", _(*"EXEC IMMEDIATELY: Check_Postcond Biegelinien"*))),
  10.324 -    (_(*"<markup>"*), ("", ""))],
  10.325 -  [])
  10.326 -=> () | _ => error "TermC.parse steps, simple version, CHANGED";
  10.327 -
  10.328 -"----------- version preparing subproblems";
  10.329 -case ParseC_OLD.parse ParseC_OLD.steps_subpbl toks3 of
  10.330 -   ([_(*"EXEC IMMEDIATELY step_term: <markup> (, )"*),
  10.331 -    _(*"EXEC IMMEDIATELY step_term: <markup> (Tactic, EXEC IMMEDIATELY: Substitute <markup> <markup>)"*),
  10.332 -    _(*"EXEC IMMEDIATELY step_term: <markup> (Tactic, EXEC IMMEDIATELY: Rewrite_Set_Inst <markup> make_ratpoly_in)"*),
  10.333 -    _(*"EXEC IMMEDIATELY step_term: <markup> (Tactic, EXEC IMMEDIATELY: Check_Postcond xxxBiegelinien)"*),
  10.334 -    _(*"EXEC IMMEDIATELY step_term: <markup> (, )"*)],
  10.335 -    [])
  10.336 -=> () | _ => error "TermC.parse steps, with exec_step_term, CHANGED";
  10.337 -
  10.338 -
  10.339 -"----------- body ------------------------------------------------------------------------------";
  10.340 -"----------- body ------------------------------------------------------------------------------";
  10.341 -"----------- body ------------------------------------------------------------------------------";
  10.342 -val solution_str = (
  10.343 -  "Solution:" ^
  10.344 -    steps_nonrec_str
  10.345 -  );
  10.346 -val body_str = specification_str ^ solution_str;
  10.347 -
  10.348 -case ParseC_OLD.parse ParseC_OLD.body (ParseC_OLD.tokenize body_str) of ((((((
  10.349 -  "Specification", ":"), (((((((((((((((
  10.350 -    "Model", (("", ""), "")), ":"),
  10.351 -      "Given"), ":"), [_(*"<markup>"*), _(*"<markup>"*)]),
  10.352 -      "Where"), ":"), [_(*"<markup>"*), _(*"<markup>"*)]), 
  10.353 -      "Find"), ":"), _(*"<markup>"*)),
  10.354 -      "Relate"), ":"), [_(*"<markup>"*)]), ((
  10.355 -    "References", ":"), ((((((((
  10.356 -      "RTheory", ":"), "Biegelinie"),
  10.357 -      "RProblem"), ":"), ["Biegelinien"]),
  10.358 -      "RMethod"), ":"), ["Integrieren", "KonstanteBestimmen2"])))),
  10.359 -  "Solution"), ":"),
  10.360 -     [_(*"EXEC IMMEDIATELY step_term: <markup> (, )"*),
  10.361 -      _(*"EXEC IMMEDIATELY step_term: <markup> (Tactic, EXEC IMMEDIATELY: Substitute <markup> <markup>)"*),
  10.362 -      _(*"EXEC IMMEDIATELY step_term: <markup> (Tactic, EXEC IMMEDIATELY: Rewrite_Set_Inst <markup> make_ratpoly_in)"*),
  10.363 -      _(*"EXEC IMMEDIATELY step_term: <markup> (Tactic, EXEC IMMEDIATELY: Check_Postcond xxxBiegelinien)"*),
  10.364 -      _(*"EXEC IMMEDIATELY step_term: <markup> (, )"*)]),
  10.365 -    [])
  10.366 -=> () | _ => error "TermC.parse body CHANGED";
  10.367 -
  10.368 -
  10.369 -"----------- Problem ---------------------------------------------------------------------------";
  10.370 -"----------- Problem ---------------------------------------------------------------------------";
  10.371 -"----------- Problem ---------------------------------------------------------------------------";
  10.372 -"----------- whole Problem";
  10.373 -val toks = ParseC_OLD.tokenize (problem_headline_str ^ specification_str ^ solution_str);
  10.374 -
  10.375 -case ParseC_OLD.parse ParseC_OLD.problem toks of (((((((
  10.376 -  "(", "Biegelinie"), ","), ["Biegelinien"]), ")"), (((((
  10.377 -    "Specification", ":"), (((((((((((((((
  10.378 -      "Model", (("", ""), "")), ":"),
  10.379 -        "Given"), ":"), [_(*"<markup>"*), _(*"<markup>"*)]),
  10.380 -        "Where"), ":"), [_(*"<markup>"*), _(*"<markup>"*)]),
  10.381 -        "Find"), ":"), _(*"<markup>"*)),
  10.382 -        "Relate"), ":"), [_(*"<markup>"*)]), ((
  10.383 -      "References", ":"), ((((((((
  10.384 -        "RTheory", ":"), "Biegelinie"),
  10.385 -        "RProblem"), ":"), ["Biegelinien"]),
  10.386 -        "RMethod"), ":"), ["Integrieren", "KonstanteBestimmen2"])))),
  10.387 -      "Solution"), ":"),
  10.388 -        [_(*"EXEC IMMEDIATELY step_term: <markup> (, )"*),
  10.389 -         _(*"EXEC IMMEDIATELY step_term: <markup> (Tactic, EXEC IMMEDIATELY: Substitute <markup> <markup>)"*),
  10.390 -         _(*"EXEC IMMEDIATELY step_term: <markup> (Tactic, EXEC IMMEDIATELY: Rewrite_Set_Inst <markup> make_ratpoly_in)"*),
  10.391 -         _(*"EXEC IMMEDIATELY step_term: <markup> (Tactic, EXEC IMMEDIATELY: Check_Postcond xxxBiegelinien)"*),
  10.392 -         _(*"EXEC IMMEDIATELY step_term: <markup> (, )"*)])),
  10.393 -  ""),
  10.394 -[]) 
  10.395 -  => () | _ => error "TermC.parse problem (whole) CHANGED"
  10.396 -
  10.397 -"----------- enter Specification";
  10.398 -val toks = ParseC_OLD.tokenize (
  10.399 -  problem_headline_str ^
  10.400 -    "Specification:" ^
  10.401 -      model_empty_str ^ (* <<<----- empty *)
  10.402 -      "References:" ^   (* <<<----- collapsed *)
  10.403 -    "Solution:"
  10.404 -  );
  10.405 -case ParseC_OLD.parse ParseC_OLD.problem toks of (((((((
  10.406 -  "(", "Biegelinie"), ","), ["Biegelinien"]), ")"), (((((
  10.407 -    "Specification", ":"), (((((((((((((((
  10.408 -      "Model", (("", ""), "")), ":"),
  10.409 -        "Given"), ":"), [_(*"<markup>"*), _(*"<markup>"*)]),
  10.410 -        "Where"), ":"), [_(*"<markup>"*)]),
  10.411 -        "Find"), ":"), _(*"<markup>"*)),
  10.412 -        "Relate"), ":"), [_(*"<markup>"*)]), ((
  10.413 -      "References", ":"), (((((((("", ""), ""), ""), ""), []), ""), ""), [])))),
  10.414 -    "Solution"), ":"), [])),
  10.415 -  ""),
  10.416 -[])
  10.417 -=> () | _ => error "TermC.parse enter Specification CHANGED"
  10.418 -
  10.419 -"----------- Problem-headline only";
  10.420 -val toks = ParseC_OLD.tokenize problem_headline_str;
  10.421 -
  10.422 -case ParseC_OLD.parse ParseC_OLD.problem toks of (((((((
  10.423 -  "(", "Biegelinie"), ","), ["Biegelinien"]), ")"), (((((
  10.424 -    "", ""), (((((((((((((((
  10.425 -      "", (("", ""), "")), ""),
  10.426 -        ""), ""), []),
  10.427 -        ""), ""), []),
  10.428 -        ""), ""), ""),
  10.429 -        ""), ""), []), ((
  10.430 -      "", ""), (((((((("", ""), ""), ""), ""), []), ""), ""), [])))),
  10.431 -    ""), ""), [])),
  10.432 -  ""),
  10.433 -[]) 
  10.434 -=> () | _ => error "TermC.parse BEFORE enter Specification CHANGED"
  10.435 -
  10.436 -"----------- finish Specification";
  10.437 -val toks = ParseC_OLD.tokenize (
  10.438 -  problem_headline_str ^
  10.439 -    "Specification:" ^
  10.440 -      model_str ^
  10.441 -      references_str ^
  10.442 -    "Solution:"
  10.443 -  );
  10.444 -case ParseC_OLD.parse ParseC_OLD.problem toks of ( ((((((
  10.445 -  "(", "Biegelinie"), ","), ["Biegelinien"]), ")"), (((((
  10.446 -    "Specification", ":"), (((((((((((((((
  10.447 -      "Model", (("", ""), "")), ":"),
  10.448 -        "Given"), ":"), [_(*"<markup>"*), _(*"<markup>"*)]),
  10.449 -        "Where"), ":"), [_(*"<markup>"*), _(*"<markup>"*)]),
  10.450 -        "Find"), ":"), _(*"<markup>"*)),
  10.451 -        "Relate"), ":"), [_(*"<markup>"*)]), ((
  10.452 -      "References", ":"), ((((((((
  10.453 -        "RTheory", ":"), "Biegelinie"),
  10.454 -        "RProblem"), ":"), ["Biegelinien"]),
  10.455 -        "RMethod"), ":"), ["Integrieren", "KonstanteBestimmen2"])))),
  10.456 -      "Solution"), ":"),
  10.457 -        [])),
  10.458 -  ""),
  10.459 -[])
  10.460 -=> () | _ => error "TermC.parse finish specification CHANGED"
  10.461 -
  10.462 -"----------- Specification collapsed";
  10.463 -case ParseC_OLD.parse ParseC_OLD.problem (ParseC_OLD.tokenize problem_headline_str) of (((((((
  10.464 -  "(", "Biegelinie"), ","), ["Biegelinien"]), ")"), (((((
  10.465 -    (*Specification*)"", ""), (((((((((((((((
  10.466 -      (*Model*)"", (("", ""), "")), ""), ""), ""), []), ""), ""), []), ""), ""), ""), ""), ""), []), (("", ""), ((((((((
  10.467 -      (*References*)"", ""), ""), ""), ""), []), ""), ""), [])))),
  10.468 -    (*Solution*)""), ""),
  10.469 -      [])),
  10.470 -  ""),
  10.471 -[])
  10.472 -=> () | _ => error "TermC.parse Specification collapsed CHANGED"
  10.473 -
  10.474 -"----------- Problem with final_result, all collapsed";
  10.475 -val toks = ParseC_OLD.tokenize (problem_headline_str ^ final_result_str);
  10.476 -case ParseC_OLD.parse ParseC_OLD.problem toks of (((((((
  10.477 -  "(", "Biegelinie"), ","), ["Biegelinien"]), ")"), (((((
  10.478 -    (*Specification*)"", ""), (((((((((((((((
  10.479 -      (*Model*)"", (("", ""), "")), ""), ""), ""), []), ""), ""), []), ""), ""), ""), ""), ""), []), (("", ""), ((((((((
  10.480 -      (*References*)"", ""), ""), ""), ""), []), ""), ""), [])))),
  10.481 -    (*Solution*)""), ""),
  10.482 -      [])),
  10.483 -  _(*"<markup>"*)),
  10.484 -[])
  10.485 -=> () | _ => error "TermC.parse Problem with final_result, all collapsed CHANGED"
  10.486 -
  10.487 -
  10.488 -"----------- fun read_out_problem --------------------------------------------------------------";
  10.489 -"----------- fun read_out_problem --------------------------------------------------------------";
  10.490 -"----------- fun read_out_problem --------------------------------------------------------------";
  10.491 -val toks = ParseC_OLD.tokenize (
  10.492 -  problem_headline_str ^
  10.493 -    "Specification:" ^
  10.494 -      model_str ^
  10.495 -      references_str ^
  10.496 -    "Solution:"
  10.497 -  );
  10.498 -case ParseC_OLD.parse ParseC_OLD.problem toks |> fst |> ParseC_OLD.read_out_problem of
  10.499 -  {thy_id = "Biegelinie", pbl_id = ["Biegelinien"],
  10.500 -   givens = [_(*"<markup>"*), _(*"<markup>"*)], wheres = [_(*"<markup>"*), _(*"<markup>"*)],
  10.501 -     find = _(*"<markup>"*), relates = [_(*"<markup>"*)],
  10.502 -   rthy_id = "Biegelinie", rpbl_id = ["Biegelinien"], rmet_id = ["Integrieren", "KonstanteBestimmen2"]
  10.503 -   } => ()
  10.504 -| _ => error "read_out_problem CHANGED"
  10.505 -
  10.506 -
  10.507 -"----------- THERE ARE TESTS with INCOMPLETE WORK IN Test_Parse_Isac.thy -----------------------";
  10.508 -"----------- THERE ARE TESTS with INCOMPLETE WORK IN Test_Parse_Isac.thy -----------------------";
  10.509 -"----------- THERE ARE TESTS with INCOMPLETE WORK IN Test_Parse_Isac.thy -----------------------";
  10.510 -(*TODO after Problem became recursive*)
  10.511 -
  10.512 -"----------- parser for exp_file.str -----------------------------------------------------------";
  10.513 -"----------- parser for exp_file.str -----------------------------------------------------------";
  10.514 -"----------- parser for exp_file.str -----------------------------------------------------------";
  10.515 -(*
  10.516 -  For keyword Example SPARK's lexer and parser are used. Respective Isac code was developed 
  10.517 -  alongside existing SPARK code, not by tests. Here are tests for code using Isabelle technologies
  10.518 -  and preceeding the actual production code.
  10.519 -*)
  10.520 -val form_single =
  10.521 -  Parse.$$$ "(" --
  10.522 -    (Parse.$$$ "[" |-- Parse.list1 Parse.string --| Parse.$$$ "]") (* Formalise.model *) --
  10.523 -  Parse.$$$ "," --
  10.524 -    Parse.$$$ "(" (* begin Formalise.spec *) --
  10.525 -      Parse.string -- (* ThyC.id, still not qualified *)
  10.526 -    Parse.$$$ "," --
  10.527 -      (Parse.$$$ "[" |-- Parse.list1 Parse.string --| Parse.$$$ "]") (* Problem.id *) --
  10.528 -    Parse.$$$ "," --
  10.529 -      (Parse.$$$ "[" |-- Parse.list1 Parse.string --| Parse.$$$ "]") (* MethodC.id *) --
  10.530 -    Parse.$$$ ")" (* end Formalise.spec *) --
  10.531 -  Parse.$$$ ")";
  10.532 -
  10.533 -val form_single_str = (
  10.534 -  "(                                                                             " ^
  10.535 -  "  [\"Traegerlaenge L\", \"Streckenlast q_0\", \"Biegelinie y\",               " ^
  10.536 -	"    \"Randbedingungen [y 0 = (0::real), y L = 0, M_b 0 = 0, M_b L = 0]\",     " ^
  10.537 -	"    \"FunktionsVariable x\", \"GleichungsVariablen [c, c_2, c_3, c_4]\",      " ^
  10.538 -  "    \"AbleitungBiegelinie dy\"],                                              " ^
  10.539 -  "  (\"Biegelinie\", [\"Biegelinien\"], [\"IntegrierenUndKonstanteBestimmen2\"])" ^
  10.540 -  ")");
  10.541 -
  10.542 -val toks = ParseC_OLD.tokenize form_single_str;
  10.543 -case ParseC_OLD.parse form_single toks of (((((((((((
  10.544 -  "(",
  10.545 -     ["Traegerlaenge L", "Streckenlast q_0", "Biegelinie y",
  10.546 -       "Randbedingungen [y 0 = (0::real), y L = 0, M_b 0 = 0, M_b L = 0]", "FunktionsVariable x",
  10.547 -       "GleichungsVariablen [c, c_2, c_3, c_4]", "AbleitungBiegelinie dy"]),
  10.548 -  ","),
  10.549 -    "("), "Biegelinie"), ","), ["Biegelinien"]), ","), ["IntegrierenUndKonstanteBestimmen2"]), ")"),
  10.550 -  ")"),
  10.551 -[])
  10.552 -=> () | _ => error "TermC.parse form_single toks CHANGED";
  10.553 -
  10.554 -(* Isac always takes a singleton here *)
  10.555 -val formalise = (Parse.$$$ "[" |-- Parse.list1 form_single --| Parse.$$$ "]");
  10.556 -type formalise =
  10.557 -  ((((((((((string * string list) * string) * string) * string) * string) * string list) *
  10.558 -    string) * string list) * string) * string) list;
  10.559 -formalise: formalise parser;
  10.560 -
  10.561 -val toks = ParseC_OLD.tokenize ("[" ^ form_single_str ^ "]");
  10.562 -case ParseC_OLD.parse formalise (ParseC_OLD.tokenize ("[" ^ form_single_str ^ "]")) of ([((((((((((
  10.563 -  "(",
  10.564 -    ["Traegerlaenge L", "Streckenlast q_0", "Biegelinie y",
  10.565 -      "Randbedingungen [y 0 = (0::real), y L = 0, M_b 0 = 0, M_b L = 0]", "FunktionsVariable x",
  10.566 -      "GleichungsVariablen [c, c_2, c_3, c_4]", "AbleitungBiegelinie dy"]),
  10.567 -  ","),
  10.568 -    "("), "Biegelinie"), ","), ["Biegelinien"]), ","), ["IntegrierenUndKonstanteBestimmen2"]), ")"),
  10.569 -  ")")],
  10.570 -[])
  10.571 -=> () | _ => error "TermC.parse formalise CHANGED";
  10.572 -
  10.573 +"----------- TODO ------------------------------------------------------------------------------";
  10.574 +"----------- TODO ------------------------------------------------------------------------------";
  10.575 +"----------- TODO ------------------------------------------------------------------------------";
    11.1 --- a/test/Tools/isac/BridgeJEdit/preliminary.sml	Tue Aug 23 18:05:08 2022 +0200
    11.2 +++ b/test/Tools/isac/BridgeJEdit/preliminary.sml	Wed Aug 24 12:37:07 2022 +0200
    11.3 @@ -14,14 +14,22 @@
    11.4  "----------- investigate Pure/context.ML THEORY_DATA -------------------------------------------";
    11.5  "----------- investigate Pure/context.ML THEORY_DATA -------------------------------------------";
    11.6  "----------- investigate Pure/context.ML THEORY_DATA -------------------------------------------";
    11.7 -(* Pure/context.ML
    11.8 +(* Pure/context.ML * )
    11.9  signature THEORY_DATA =
   11.10  sig
   11.11    type T
   11.12    val get: theory -> T
   11.13    val put: T -> theory -> theory
   11.14    val map: (T -> T) -> theory -> theory
   11.15 -end;*)
   11.16 +end;
   11.17 +( **)
   11.18 +structure Refs_Data = Theory_Data                             (*code for keyword ExampleSPARK*)
   11.19 +(
   11.20 +  type T = References.T
   11.21 +  val empty : T = References.empty
   11.22 +  val extend = I
   11.23 +  fun merge (_, refs) = refs
   11.24 +);
   11.25  (*
   11.26  Refs_Data.empty;                                    (*in Refs_Data defined workers are hidden*)
   11.27  Refs_Data.extend;                                   (*in Refs_Data defined workers are hidden*)